diff --git a/builder/frameworks/arduino.py b/builder/frameworks/arduino.py index d619fae8c..724271fc6 100644 --- a/builder/frameworks/arduino.py +++ b/builder/frameworks/arduino.py @@ -24,263 +24,1019 @@ import subprocess import json -import semantic_version import os import sys import shutil -from os.path import join, exists +import hashlib +import logging +import threading +from contextlib import suppress +from os.path import join, exists, isabs, splitdrive, commonpath, relpath +from pathlib import Path +from typing import Union, List -from SCons.Script import COMMAND_LINE_TARGETS, DefaultEnvironment, SConscript +import semantic_version +from SCons.Script import DefaultEnvironment, SConscript from platformio import fs from platformio.package.version import pepver_to_semver -from platformio.project.config import ProjectConfig from platformio.package.manager.tool import ToolPackageManager +IS_WINDOWS = sys.platform.startswith("win") + +python_deps = { + "wheel": ">=0.35.1", + "rich-click": ">=1.8.6", + "PyYAML": ">=6.0.2", + "intelhex": ">=2.3.0", + "rich": ">=14.0.0", + "esp-idf-size": ">=1.6.1" +} + + +def setup_logging(): + """Setup logging with optional file output""" + handlers = [logging.StreamHandler()] + + # Only add file handler if writable and not disabled + log_file = os.environ.get('ARDUINO_FRAMEWORK_LOG_FILE') + if log_file: + with suppress(OSError, PermissionError): + handlers.append(logging.FileHandler(log_file)) + + logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s', + handlers=handlers + ) + + +# Only setup logging if enabled via environment variable +if os.environ.get('ARDUINO_FRAMEWORK_ENABLE_LOGGING'): + setup_logging() + +# Constants for better performance +UNICORE_FLAGS = { + "CORE32SOLO1", + "CONFIG_FREERTOS_UNICORE=y" +} + +# Thread-safe global flags to prevent message spam +_PATH_SHORTENING_LOCK = threading.Lock() +_PATH_SHORTENING_MESSAGES = { + 'shortening_applied': False, + 'no_framework_paths_warning': False, + 'long_path_warning_shown': False +} + + +def get_platform_default_threshold(mcu): + """ + Platform-specific max performance default values for + INCLUDE_PATH_LENGTH_THRESHOLD + These values push the limits for maximum performance and minimal path + shortening + + Args: + mcu: MCU type (esp32, esp32s2, esp32s3, etc.) + + Returns: + int: Platform-specific default threshold + """ + # Max. performance values - pushing Windows command line limits + # Windows CMD has ~32768 character limit, we use aggressive values close + # to this + platform_defaults = { + "esp32": 32000, # Standard ESP32 + "esp32s2": 32000, # ESP32-S2 + "esp32s3": 32766, # ESP32-S3 + "esp32c3": 32000, # ESP32-C3 + "esp32c2": 32000, # ESP32-C2 + "esp32c6": 31600, # ESP32-C6 + "esp32h2": 32000, # ESP32-H2 + "esp32p4": 32000, # ESP32-P4 + } + + default_value = platform_defaults.get(mcu, 31600) + + # Debug output only in verbose mode + if logging.getLogger().isEnabledFor(logging.DEBUG): + logging.debug( + f"Max. possible platform default threshold for {mcu}: " + f"{default_value}") + + return default_value + + +def validate_threshold(threshold, mcu): + """ + Validates threshold value with max. performance limits + Uses aggressive boundaries for maximum performance + + Args: + threshold: Threshold value to validate + mcu: MCU type for context-specific validation + + Returns: + int: Validated threshold value + """ + # Absolute limits - pushing boundaries + min_threshold = 20000 # Minimum reasonable value for complex projects + # Maximum aggressive value (beyond Windows CMD limit for testing) + max_threshold = 32767 + + # MCU-specific adjustments - all values are aggressive + mcu_adjustments = { + "esp32c2": {"min": 30000, "max": 32767}, + "esp32c3": {"min": 30000, "max": 32767}, + "esp32": {"min": 30000, "max": 32767}, + "esp32s2": {"min": 30000, "max": 32767}, + "esp32s3": {"min": 30000, "max": 32767}, + "esp32p4": {"min": 30000, "max": 32767}, + "esp32c6": {"min": 30000, "max": 32767}, + "esp32h2": {"min": 30000, "max": 32767}, + } + + # Apply MCU-specific max. limits + if mcu in mcu_adjustments: + min_threshold = max(min_threshold, mcu_adjustments[mcu]["min"]) + max_threshold = min(max_threshold, mcu_adjustments[mcu]["max"]) + + original_threshold = threshold + + if threshold < min_threshold: + print(f"*** Warning: Include path threshold {threshold} too " + f"conservative for {mcu}, using safe minimum " + f"{min_threshold} ***") + threshold = min_threshold + elif threshold > max_threshold: + print(f"*** Warning: Include path threshold {threshold} exceeds " + f"possible maximum for {mcu}, using {max_threshold} ***") + threshold = max_threshold + + # Warning for conservative values (opposite of original - warn if too low) + platform_default = get_platform_default_threshold(mcu) + if threshold < platform_default * 0.7: # More than 30% below max. default + print(f"*** Info: Include path threshold {threshold} is conservative " + f"compared to maximum default {platform_default} for " + f"{mcu} ***") + print("*** Consider using higher values for maximum performance ***") + + if original_threshold != threshold: + logging.warning(f"Threshold adjusted from {original_threshold} to " + f"max. possible value {threshold} for {mcu}") + + return threshold + + +def get_include_path_threshold(env, config, current_env_section): + """ + Determines Windows INCLUDE_PATH_LENGTH_THRESHOLD from various sources + with priority order and max. possible validation + + Priority order: + 1. Environment variable PLATFORMIO_INCLUDE_PATH_THRESHOLD + 2. Environment-specific setting in platformio.ini + 3. Global setting in [env] section + 4. Setting in [platformio] section + 5. MCU-specific max. possible default value + + Args: + env: PlatformIO Environment + config: Project Configuration + current_env_section: Current environment section + + Returns: + int: Validated max. threshold value + """ + mcu = env.BoardConfig().get("build.mcu", "esp32") + default_threshold = get_platform_default_threshold(mcu) + setting_name = "custom_include_path_length_threshold" + + try: + # 1. Check environment variable (highest priority) + env_var = os.environ.get("PLATFORMIO_INCLUDE_PATH_THRESHOLD") + if env_var: + try: + threshold = int(env_var) + threshold = validate_threshold(threshold, mcu) + print(f"*** Using environment variable max. possible include " + f"path threshold: {threshold} (MCU: {mcu}) ***") + return threshold + except ValueError: + print(f"*** Warning: Invalid environment variable " + f"PLATFORMIO_INCLUDE_PATH_THRESHOLD='{env_var}', " + f"ignoring ***") + + # 2. Check environment-specific setting + if config.has_option(current_env_section, setting_name): + threshold = config.getint(current_env_section, setting_name) + threshold = validate_threshold(threshold, mcu) + print(f"*** Using environment-specific max. possible include " + f"path threshold: {threshold} (MCU: {mcu}) ***") + return threshold + + # 3. Check global setting in [env] section + if config.has_option("env", setting_name): + threshold = config.getint("env", setting_name) + threshold = validate_threshold(threshold, mcu) + print(f"*** Using global [env] max. possible include path " + f"threshold: {threshold} (MCU: {mcu}) ***") + return threshold + + # 4. Check setting in [platformio] section + if config.has_option("platformio", setting_name): + threshold = config.getint("platformio", setting_name) + threshold = validate_threshold(threshold, mcu) + print(f"*** Using [platformio] section max. possible include " + f"path threshold: {threshold} (MCU: {mcu}) ***") + return threshold + + # 5. Use MCU-specific max. possible default value + threshold = validate_threshold(default_threshold, mcu) + if env.get("VERBOSE"): + print(f"*** Using platform-specific max. possible default " + f"include path threshold: {threshold} (MCU: {mcu}) ***") + + return threshold + + except (ValueError, TypeError) as e: + print(f"*** Warning: Invalid include path threshold value, using " + f"max. possible platform default {default_threshold} for " + f"{mcu}: {e} ***") + return validate_threshold(default_threshold, mcu) + + +def get_threshold_info(env, config, current_env_section): + """ + Helper function for debug information about max. possible threshold + configuration + + Args: + env: PlatformIO Environment + config: Project Configuration + current_env_section: Current environment section + + Returns: + dict: Information about threshold configuration + """ + mcu = env.BoardConfig().get("build.mcu", "esp32") + setting_name = "custom_include_path_length_threshold" + + info = { + "mcu": mcu, + "platform_default": get_platform_default_threshold(mcu), + "env_variable": os.environ.get("PLATFORMIO_INCLUDE_PATH_THRESHOLD"), + "env_specific": None, + "global_env": None, + "platformio_section": None, + "final_threshold": None, + "source": "bleeding_edge_platform_default", + "is_bleeding_edge": True + } + + # Collect all possible sources + if config.has_option(current_env_section, setting_name): + with suppress(ValueError): + info["env_specific"] = config.getint(current_env_section, + setting_name) + + if config.has_option("env", setting_name): + with suppress(ValueError): + info["global_env"] = config.getint("env", setting_name) + + if config.has_option("platformio", setting_name): + with suppress(ValueError): + info["platformio_section"] = config.getint("platformio", + setting_name) + + # Determine final threshold and source + info["final_threshold"] = get_include_path_threshold(env, config, + current_env_section) + + # Determine source + if info["env_variable"]: + info["source"] = "environment_variable" + elif info["env_specific"] is not None: + info["source"] = "env_specific" + elif info["global_env"] is not None: + info["source"] = "global_env" + elif info["platformio_section"] is not None: + info["source"] = "platformio_section" + + return info + + +# Cache class for frequently used paths +class PathCache: + def __init__(self, platform, mcu): + self.platform = platform + self.mcu = mcu + self._framework_dir = None + self._framework_lib_dir = None + self._sdk_dir = None + + @property + def framework_dir(self): + if self._framework_dir is None: + self._framework_dir = self.platform.get_package_dir( + "framework-arduinoespressif32") + return self._framework_dir + + @property + def framework_lib_dir(self): + if self._framework_lib_dir is None: + self._framework_lib_dir = self.platform.get_package_dir( + "framework-arduinoespressif32-libs") + return self._framework_lib_dir + + @property + def sdk_dir(self): + if self._sdk_dir is None: + self._sdk_dir = fs.to_unix_path( + join(self.framework_lib_dir, self.mcu, "include") + ) + return self._sdk_dir + + +def check_and_warn_long_path_support(): + """Checks Windows long path support and issues warning if disabled""" + with _PATH_SHORTENING_LOCK: # Thread-safe access + if not IS_WINDOWS or _PATH_SHORTENING_MESSAGES[ + 'long_path_warning_shown']: + return + + try: + import winreg + key = winreg.OpenKey( + winreg.HKEY_LOCAL_MACHINE, + r"SYSTEM\CurrentControlSet\Control\FileSystem" + ) + value, _ = winreg.QueryValueEx(key, "LongPathsEnabled") + winreg.CloseKey(key) + + if value != 1: + print("*** WARNING: Windows Long Path Support is disabled ***") + print("*** Enable it for better performance: ***") + print("*** 1. Run as Administrator: gpedit.msc ***") + print("*** 2. Navigate to: Computer Configuration > " + "Administrative Templates > System > Filesystem ***") + print("*** 3. Enable 'Enable Win32 long paths' ***") + print("*** OR run PowerShell as Admin: ***") + print("*** New-ItemProperty -Path " + "'HKLM:\\SYSTEM\\CurrentControlSet\\Control\\FileSystem' " + "-Name 'LongPathsEnabled' -Value 1 -PropertyType DWORD " + "-Force ***") + print("*** Restart required after enabling ***") + except Exception: + print("*** WARNING: Could not check Long Path Support status ***") + print("*** Consider enabling Windows Long Path Support for " + "better performance ***") + + _PATH_SHORTENING_MESSAGES['long_path_warning_shown'] = True + + +# Secure deletion functions +def safe_delete_file(file_path: Union[str, Path], + force: bool = False) -> bool: + """ + Secure file deletion + + Args: + file_path: Path to file to be deleted + force: Forces deletion even for write-protected files + + Returns: + bool: True if successfully deleted + """ + file_path = Path(file_path) + + try: + # Check existence + if not file_path.exists(): + logging.warning(f"File does not exist: {file_path}") + return False + + # Remove write protection if necessary + if force and not os.access(file_path, os.W_OK): + file_path.chmod(0o666) + + # Delete file + file_path.unlink() + logging.info(f"File deleted: {file_path}") + return True + + except PermissionError: + logging.error(f"No permission to delete: {file_path}") + return False + except Exception as e: + logging.error(f"Error deleting {file_path}: {e}") + return False + + +def safe_delete_directory(dir_path: Union[str, Path]) -> bool: + """ + Secure directory deletion + """ + dir_path = Path(dir_path) + + try: + if not dir_path.exists(): + logging.warning(f"Directory does not exist: {dir_path}") + return False + + shutil.rmtree(dir_path) + logging.info(f"Directory deleted: {dir_path}") + return True + + except Exception as e: + logging.error(f"Error deleting {dir_path}: {e}") + return False + + +def validate_platformio_path(path: Union[str, Path]) -> bool: + """ + Enhanced validation for PlatformIO package paths + """ + try: + path = Path(path).resolve() + path_str = str(path) + + # Must be within .platformio directory structure + if ".platformio" not in path_str: + return False + + # Must be a packages directory + if "packages" not in path_str: + return False + + # Must be framework-related + framework_indicators = [ + "framework-arduinoespressif32", + "framework-arduinoespressif32-libs" + ] + + if not any(indicator in path_str for indicator in framework_indicators): + return False + + # Must not be a critical system path + critical_paths = ["/usr", "/bin", "/sbin", "/etc", "/boot", + "C:\\Windows", "C:\\Program Files"] + return not any(critical in path_str for critical in critical_paths) + + except Exception as e: + logging.error(f"Path validation error: {e}") + return False + + +def validate_deletion_path(path: Union[str, Path], + allowed_patterns: List[str]) -> bool: + """ + Validates if a path can be safely deleted + + Args: + path: Path to be checked + allowed_patterns: Allowed path patterns + + Returns: + bool: True if deletion is safe + """ + path = Path(path).resolve() + + # Check against critical system paths + critical_paths = [ + Path.home(), + Path("/"), + Path("C:\\") if IS_WINDOWS else None, + Path("/usr"), + Path("/etc"), + Path("/bin"), + Path("/sbin") + ] + + for critical in filter(None, critical_paths): + try: + normalized_path = path.resolve() + normalized_critical = critical.resolve() + if (normalized_path == normalized_critical or + normalized_critical in normalized_path.parents): + logging.error(f"Critical system path detected: {path}") + return False + except (OSError, ValueError): + # Path comparison failed, reject for safety + logging.error(f"Path comparison failed for: {path}") + return False + + # Check against allowed patterns + path_str = str(path) + is_allowed = any(pattern in path_str for pattern in allowed_patterns) + + if not is_allowed: + logging.error(f"Path does not match allowed patterns: {path}") + logging.error(f"Allowed patterns: {allowed_patterns}") + else: + logging.info(f"Path validation successful: {path}") + + return is_allowed + + +def safe_framework_cleanup(): + """Secure cleanup of Arduino Framework with enhanced error handling""" + success = True + + # Framework directory cleanup + if exists(FRAMEWORK_DIR): + logging.info(f"Attempting to validate framework path: " + f"{FRAMEWORK_DIR}") + + if validate_platformio_path(FRAMEWORK_DIR): + logging.info(f"Framework path validated successfully: " + f"{FRAMEWORK_DIR}") + + if safe_delete_directory(FRAMEWORK_DIR): + print("Framework successfully removed") + else: + print("Error removing framework") + success = False + else: + logging.error(f"PlatformIO path validation failed: " + f"{FRAMEWORK_DIR}") + success = False + + # Framework libs directory cleanup + if exists(FRAMEWORK_LIB_DIR): + logging.info(f"Attempting to validate framework lib path: " + f"{FRAMEWORK_LIB_DIR}") + + if validate_platformio_path(FRAMEWORK_LIB_DIR): + logging.info(f"Framework lib path validated successfully: " + f"{FRAMEWORK_LIB_DIR}") + + if safe_delete_directory(FRAMEWORK_LIB_DIR): + print("Framework libs successfully removed") + else: + print("Error removing framework libs") + success = False + else: + logging.error(f"PlatformIO path validation failed: " + f"{FRAMEWORK_LIB_DIR}") + success = False + + return success + + +def safe_remove_sdkconfig_files(): + """Secure removal of SDKConfig files""" + envs = [section.replace("env:", "") for section in config.sections() + if section.startswith("env:")] + for env_name in envs: + file_path = join(project_dir, f"sdkconfig.{env_name}") + if exists(file_path): + safe_delete_file(file_path) + + +# Initialization env = DefaultEnvironment() pm = ToolPackageManager() platform = env.PioPlatform() config = env.GetProjectConfig() board = env.BoardConfig() + +# Cached values mcu = board.get("build.mcu", "esp32") +pioenv = env["PIOENV"] +project_dir = env.subst("$PROJECT_DIR") +path_cache = PathCache(platform, mcu) +current_env_section = f"env:{pioenv}" + +# Board configuration board_sdkconfig = board.get("espidf.custom_sdkconfig", "") entry_custom_sdkconfig = "\n" flag_custom_sdkconfig = False -IS_WINDOWS = sys.platform.startswith("win") +flag_custom_component_remove = False +flag_custom_component_add = False +flag_lib_ignore = False + +if mcu == "esp32c2": + flag_custom_sdkconfig = True + +# pio lib_ignore check +if config.has_option(current_env_section, "lib_ignore"): + flag_lib_ignore = True -if config.has_option("env:"+env["PIOENV"], "custom_sdkconfig"): +# Custom Component remove check +if config.has_option(current_env_section, "custom_component_remove"): + flag_custom_component_remove = True + +# Custom SDKConfig check +if config.has_option(current_env_section, "custom_sdkconfig"): entry_custom_sdkconfig = env.GetProjectOption("custom_sdkconfig") flag_custom_sdkconfig = True -if len(str(board_sdkconfig)) > 2: +if board_sdkconfig: flag_custom_sdkconfig = True -extra_flags = (''.join([element for element in board.get("build.extra_flags", "")])).replace("-D", " ") +extra_flags_raw = board.get("build.extra_flags", []) +if isinstance(extra_flags_raw, list): + extra_flags = " ".join(extra_flags_raw).replace("-D", " ") +else: + extra_flags = str(extra_flags_raw).replace("-D", " ") + framework_reinstall = False -flag_any_custom_sdkconfig = False -FRAMEWORK_LIB_DIR = platform.get_package_dir("framework-arduinoespressif32-libs") +FRAMEWORK_DIR = path_cache.framework_dir +FRAMEWORK_LIB_DIR = path_cache.framework_lib_dir SConscript("_embed_files.py", exports="env") -flag_any_custom_sdkconfig = os.path.exists(join(platform.get_package_dir("framework-arduinoespressif32-libs"),"sdkconfig")) +flag_any_custom_sdkconfig = exists(join( + platform.get_package_dir("framework-arduinoespressif32-libs"), + "sdkconfig")) + -# Esp32-solo1 libs needs adopted settings -if flag_custom_sdkconfig == True and ("CORE32SOLO1" in extra_flags or "CONFIG_FREERTOS_UNICORE=y" in entry_custom_sdkconfig or "CONFIG_FREERTOS_UNICORE=y" in board_sdkconfig): - if len(str(env.GetProjectOption("build_unflags"))) == 2: # No valid env, needs init - env['BUILD_UNFLAGS'] = {} - build_unflags = " ".join(env['BUILD_UNFLAGS']) - build_unflags = build_unflags + " -mdisable-hardware-atomics -ustart_app_other_cores" +def has_unicore_flags(): + """Check if any UNICORE flags are present in configuration""" + return any(flag in extra_flags or flag in entry_custom_sdkconfig + or flag in board_sdkconfig for flag in UNICORE_FLAGS) + + +# Esp32-solo1 libs settings +if flag_custom_sdkconfig and has_unicore_flags(): + if not env.get('BUILD_UNFLAGS'): # Initialize if not set + env['BUILD_UNFLAGS'] = [] + + build_unflags = (" ".join(env['BUILD_UNFLAGS']) + + " -mdisable-hardware-atomics -ustart_app_other_cores") new_build_unflags = build_unflags.split() - env.Replace( - BUILD_UNFLAGS=new_build_unflags - ) + env.Replace(BUILD_UNFLAGS=new_build_unflags) + + +def get_packages_to_install(deps, installed_packages): + """Generator for packages to install""" + for package, spec in deps.items(): + if package not in installed_packages: + yield package + else: + version_spec = semantic_version.Spec(spec) + if not version_spec.match(installed_packages[package]): + yield package + def install_python_deps(): def _get_installed_pip_packages(): result = {} - packages = {} - pip_output = subprocess.check_output( - [ - env.subst("$PYTHONEXE"), - "-m", - "pip", - "list", - "--format=json", - "--disable-pip-version-check", - ] - ) try: + pip_output = subprocess.check_output([ + env.subst("$PYTHONEXE"), + "-m", "pip", "list", "--format=json", + "--disable-pip-version-check" + ]) packages = json.loads(pip_output) - except: - print("Warning! Couldn't extract the list of installed Python packages.") - return {} - for p in packages: - result[p["name"]] = pepver_to_semver(p["version"]) + for p in packages: + result[p["name"]] = pepver_to_semver(p["version"]) + except Exception: + print("Warning! Couldn't extract the list of installed Python " + "packages.") return result - deps = { - "wheel": ">=0.35.1", - "rich-click": ">=1.8.6", - "PyYAML": ">=6.0.2", - "intelhex": ">=2.3.0", - "rich": ">=14.0.0", - "esp-idf-size": ">=1.6.1" - } - installed_packages = _get_installed_pip_packages() - packages_to_install = [] - for package, spec in deps.items(): - if package not in installed_packages: - packages_to_install.append(package) - else: - version_spec = semantic_version.Spec(spec) - if not version_spec.match(installed_packages[package]): - packages_to_install.append(package) + packages_to_install = list(get_packages_to_install(python_deps, + installed_packages)) if packages_to_install: + packages_str = " ".join(f'"{p}{python_deps[p]}"' + for p in packages_to_install) env.Execute( env.VerboseAction( - ( - '"$PYTHONEXE" -m pip install -U -q -q -q ' - + " ".join( - [ - '"%s%s"' % (p, deps[p]) - for p in packages_to_install - ] - ) - ), + f'"$PYTHONEXE" -m pip install -U -q -q -q {packages_str}', "Installing Arduino Python dependencies", ) ) - return + install_python_deps() + def get_MD5_hash(phrase): - import hashlib - return hashlib.md5((phrase).encode('utf-8')).hexdigest()[:16] + return hashlib.md5(phrase.encode('utf-8')).hexdigest()[:16] def matching_custom_sdkconfig(): - # check if current env is matching to existing sdkconfig + """Checks if current environment matches existing sdkconfig""" cust_sdk_is_present = False - matching_sdkconfig = False - last_sdkconfig_path = join(env.subst("$PROJECT_DIR"),"sdkconfig.defaults") - if flag_any_custom_sdkconfig == False: - matching_sdkconfig = True - return matching_sdkconfig, cust_sdk_is_present - if os.path.exists(last_sdkconfig_path) == False: - return matching_sdkconfig, cust_sdk_is_present - if flag_custom_sdkconfig == False: - matching_sdkconfig = False - return matching_sdkconfig, cust_sdk_is_present - with open(last_sdkconfig_path) as src: - line = src.readline() - if line.startswith("# TASMOTA__"): - cust_sdk_is_present = True; - costum_options = entry_custom_sdkconfig - if (line.split("__")[1]).strip() == get_MD5_hash((costum_options).strip() + mcu): - matching_sdkconfig = True - - return matching_sdkconfig, cust_sdk_is_present + + if not flag_any_custom_sdkconfig: + return True, cust_sdk_is_present + + last_sdkconfig_path = join(project_dir, "sdkconfig.defaults") + if not exists(last_sdkconfig_path): + return False, cust_sdk_is_present + + if not flag_custom_sdkconfig: + return False, cust_sdk_is_present + + try: + with open(last_sdkconfig_path) as src: + line = src.readline() + if line.startswith("# TASMOTA__"): + cust_sdk_is_present = True + custom_options = entry_custom_sdkconfig + expected_hash = get_MD5_hash(custom_options.strip() + mcu) + if line.split("__")[1].strip() == expected_hash: + return True, cust_sdk_is_present + except (IOError, IndexError): + pass + + return False, cust_sdk_is_present + def check_reinstall_frwrk(): - framework_reinstall = False - cust_sdk_is_present = False - matching_sdkconfig = False - if flag_custom_sdkconfig == True: - matching_sdkconfig, cust_sdk_is_present = matching_custom_sdkconfig() - if flag_custom_sdkconfig == False and flag_any_custom_sdkconfig == True: - # case custom sdkconfig exists and a env without "custom_sdkconfig" - framework_reinstall = True - if flag_custom_sdkconfig == True and matching_sdkconfig == False: - # check if current custom sdkconfig is different from existing - framework_reinstall = True - return framework_reinstall - - -FRAMEWORK_SDK_DIR = fs.to_unix_path( - os.path.join( - FRAMEWORK_LIB_DIR, - mcu, - "include", - ) -) + if not flag_custom_sdkconfig and flag_any_custom_sdkconfig: + # case custom sdkconfig exists and an env without "custom_sdkconfig" + return True + + if flag_custom_sdkconfig: + matching_sdkconfig, _ = matching_custom_sdkconfig() + if not matching_sdkconfig: + # check if current custom sdkconfig is different from existing + return True + + return False + +def call_compile_libs(): + print(f"*** Compile Arduino IDF libs for {pioenv} ***") + SConscript("espidf.py") + + +FRAMEWORK_SDK_DIR = path_cache.sdk_dir IS_INTEGRATION_DUMP = env.IsIntegrationDump() def is_framework_subfolder(potential_subfolder): - if not os.path.isabs(potential_subfolder): + """Check if a path is a subfolder of the framework SDK directory""" + # carefully check before change this function + if not isabs(potential_subfolder): return False - if ( - os.path.splitdrive(FRAMEWORK_SDK_DIR)[0] - != os.path.splitdrive(potential_subfolder)[0] - ): + if (splitdrive(FRAMEWORK_SDK_DIR)[0] != + splitdrive(potential_subfolder)[0]): return False - return os.path.commonpath([FRAMEWORK_SDK_DIR]) == os.path.commonpath( - [FRAMEWORK_SDK_DIR, potential_subfolder] - ) + return (commonpath([FRAMEWORK_SDK_DIR]) == + commonpath([FRAMEWORK_SDK_DIR, potential_subfolder])) -def shorthen_includes(env, node): - if IS_INTEGRATION_DUMP: - # Don't shorten include paths for IDE integrations - return node +# Performance optimization with caching +def calculate_include_path_length(includes): + """Calculate total character count of all include paths with caching""" + if not hasattr(calculate_include_path_length, '_cache'): + calculate_include_path_length._cache = {} + + cache_key = tuple(includes) + if cache_key not in calculate_include_path_length._cache: + calculate_include_path_length._cache[cache_key] = sum( + len(str(inc)) for inc in includes) + + return calculate_include_path_length._cache[cache_key] + + +def analyze_path_distribution(includes): + """Analyze the distribution of include path lengths for optimization + insights""" + if not includes: + return {} + + lengths = [len(str(inc)) for inc in includes] + framework_lengths = [len(str(inc)) for inc in includes + if is_framework_subfolder(inc)] + + return { + 'total_paths': len(includes), + 'total_length': sum(lengths), + 'average_length': sum(lengths) / len(lengths), + 'max_length': max(lengths), + 'min_length': min(lengths), + 'framework_paths': len(framework_lengths), + 'framework_total_length': sum(framework_lengths), + 'framework_avg_length': (sum(framework_lengths) / + len(framework_lengths) + if framework_lengths else 0) + } - includes = [fs.to_unix_path(inc) for inc in env.get("CPPPATH", [])] + +def debug_framework_paths(env, include_count, total_length): + """Debug framework paths to understand the issue (verbose mode only)""" + if not env.get("VERBOSE"): + return + + print("*** Debug Framework Paths ***") + print(f"*** MCU: {mcu} ***") + print(f"*** FRAMEWORK_DIR: {FRAMEWORK_DIR} ***") + print(f"*** FRAMEWORK_SDK_DIR: {FRAMEWORK_SDK_DIR} ***") + print(f"*** SDK exists: {exists(FRAMEWORK_SDK_DIR)} ***") + print(f"*** Include count: {include_count} ***") + print(f"*** Total path length: {total_length} ***") + + includes = env.get("CPPPATH", []) + framework_count = 0 + longest_paths = sorted(includes, key=len, reverse=True)[:5] + + print("*** Longest include paths: ***") + for i, inc in enumerate(longest_paths): + is_fw = is_framework_subfolder(inc) + if is_fw: + framework_count += 1 + print(f"*** {i+1}: {inc} (length: {len(str(inc))}) -> " + f"Framework: {is_fw} ***") + + print(f"*** Framework includes found: {framework_count}/" + f"{len(includes)} ***") + + # Show path distribution analysis + analysis = analyze_path_distribution(includes) + print(f"*** Path Analysis: Avg={analysis.get('average_length', 0):.1f}, " + f"Max={analysis.get('max_length', 0)}, " + f"Framework Avg={analysis.get('framework_avg_length', 0):.1f} ***") + + +def apply_include_shortening(env, node, includes, total_length): + """Applies include path shortening technique""" + env_get = env.get + to_unix_path = fs.to_unix_path + ccflags = env["CCFLAGS"] + asflags = env["ASFLAGS"] + + includes = [to_unix_path(inc) for inc in env_get("CPPPATH", [])] shortened_includes = [] generic_includes = [] + + original_length = total_length + saved_chars = 0 + for inc in includes: if is_framework_subfolder(inc): - shortened_includes.append( - "-iwithprefix/" - + fs.to_unix_path(os.path.relpath(inc, FRAMEWORK_SDK_DIR)) - ) + relative_path = to_unix_path(relpath(inc, FRAMEWORK_SDK_DIR)) + shortened_path = "-iwithprefix/" + relative_path + shortened_includes.append(shortened_path) + + # Calculate character savings + # Original: full path in -I flag + # New: -iprefix + shortened relative path + original_chars = len(f"-I{inc}") + new_chars = len(shortened_path) + saved_chars += max(0, original_chars - new_chars) else: generic_includes.append(inc) + # Show result message only once with thread safety + with _PATH_SHORTENING_LOCK: + if not _PATH_SHORTENING_MESSAGES['shortening_applied']: + if shortened_includes: + # Each -I is 2 chars + removed_i_flags = len(shortened_includes) * 2 + new_total_length = (original_length - saved_chars + + len(f"-iprefix{FRAMEWORK_SDK_DIR}") - + removed_i_flags) + print(f"*** Applied include path shortening for " + f"{len(shortened_includes)} framework paths ***") + print(f"*** Path length reduced from {original_length} to " + f"~{new_total_length} characters ***") + print(f"*** Estimated savings: {saved_chars} characters ***") + else: + if not _PATH_SHORTENING_MESSAGES[ + 'no_framework_paths_warning']: + print("*** Warning: Path length high but no framework " + "paths found for shortening ***") + print("*** This may indicate an architecture-specific " + "issue ***") + print("*** Run with -v (verbose) for detailed path " + "analysis ***") + _PATH_SHORTENING_MESSAGES[ + 'no_framework_paths_warning'] = True + _PATH_SHORTENING_MESSAGES['shortening_applied'] = True + + common_flags = ["-iprefix", FRAMEWORK_SDK_DIR] + shortened_includes + return env.Object( node, CPPPATH=generic_includes, - CCFLAGS=env["CCFLAGS"] - + ["-iprefix", FRAMEWORK_SDK_DIR] - + shortened_includes, - ASFLAGS=env["ASFLAGS"] - + ["-iprefix", FRAMEWORK_SDK_DIR] - + shortened_includes, + CCFLAGS=ccflags + common_flags, + ASFLAGS=asflags + common_flags, ) -# Check if framework = arduino, espidf is set -> compile Arduino as an component of IDF -# using platformio.ini entry since we modify the framework env var for Hybrid Compile! + +def smart_include_length_shorten(env, node): + """ + Include path shortening based on bleeding edge configurable threshold + with enhanced MCU support + Uses aggressive thresholds for maximum performance + """ + if IS_INTEGRATION_DUMP: + return node + + if not IS_WINDOWS: + return env.Object(node) + + # Get dynamically configurable bleeding edge threshold + include_path_threshold = get_include_path_threshold(env, config, + current_env_section) + + check_and_warn_long_path_support() + + includes = env.get("CPPPATH", []) + include_count = len(includes) + total_path_length = calculate_include_path_length(includes) + + # Debug information in verbose mode + if env.get("VERBOSE"): + debug_framework_paths(env, include_count, total_path_length) + + # Extended debug information about bleeding edge threshold + # configuration + threshold_info = get_threshold_info(env, config, current_env_section) + print("*** Bleeding Edge Threshold Configuration Debug ***") + print(f"*** MCU: {threshold_info['mcu']} ***") + print(f"*** Bleeding Edge Platform Default: " + f"{threshold_info['platform_default']} ***") + print(f"*** Final Bleeding Edge Threshold: " + f"{threshold_info['final_threshold']} ***") + print(f"*** Source: {threshold_info['source']} ***") + print("*** Performance Mode: Maximum Aggressive ***") + if threshold_info['env_variable']: + print(f"*** Env Variable: {threshold_info['env_variable']} ***") + if threshold_info['env_specific']: + print(f"*** Env Specific: {threshold_info['env_specific']} ***") + if threshold_info['global_env']: + print(f"*** Global Env: {threshold_info['global_env']} ***") + if threshold_info['platformio_section']: + print(f"*** PlatformIO Section: " + f"{threshold_info['platformio_section']} ***") + + # Use the configurable and validated bleeding edge threshold + if total_path_length <= include_path_threshold: + return env.Object(node) + + return apply_include_shortening(env, node, includes, total_path_length) + + def get_frameworks_in_current_env(): - current_env_section = "env:" + env["PIOENV"] + """Determines the frameworks of the current environment""" if "framework" in config.options(current_env_section): - frameworks = config.get(current_env_section, "framework", "") - return frameworks + return config.get(current_env_section, "framework", "") return [] + +# Framework check current_env_frameworks = get_frameworks_in_current_env() if "arduino" in current_env_frameworks and "espidf" in current_env_frameworks: # Arduino as component is set, switch off Hybrid compile flag_custom_sdkconfig = False -def call_compile_libs(): - if mcu == "esp32c2": - ARDUINO_FRMWRK_C2_LIB_DIR = join(platform.get_package_dir("framework-arduinoespressif32-libs"),mcu) - if not os.path.exists(ARDUINO_FRMWRK_C2_LIB_DIR): - ARDUINO_C2_DIR = join(platform.get_package_dir("framework-arduino-c2-skeleton-lib"),mcu) - shutil.copytree(ARDUINO_C2_DIR, ARDUINO_FRMWRK_C2_LIB_DIR, dirs_exist_ok=True) - print("*** Compile Arduino IDF libs for %s ***" % env["PIOENV"]) - SConscript("espidf.py") +# Framework reinstallation if required - Enhanced with secure deletion and +# error handling +if check_reinstall_frwrk(): + # Secure removal of SDKConfig files + safe_remove_sdkconfig_files() -if check_reinstall_frwrk() == True: - envs = [section.replace("env:", "") for section in config.sections() if section.startswith("env:")] - for env_name in envs: - file_path = join(env.subst("$PROJECT_DIR"), f"sdkconfig.{env_name}") - if exists(file_path): - os.remove(file_path) print("*** Reinstall Arduino framework ***") - shutil.rmtree(platform.get_package_dir("framework-arduinoespressif32")) - shutil.rmtree(platform.get_package_dir("framework-arduinoespressif32-libs")) - ARDUINO_FRMWRK_URL = str(platform.get_package_spec("framework-arduinoespressif32")).split("uri=",1)[1][:-1] - ARDUINO_FRMWRK_LIB_URL = str(platform.get_package_spec("framework-arduinoespressif32-libs")).split("uri=",1)[1][:-1] - pm.install(ARDUINO_FRMWRK_URL) - pm.install(ARDUINO_FRMWRK_LIB_URL) - if flag_custom_sdkconfig == True: - call_compile_libs() - flag_custom_sdkconfig = False - -if flag_custom_sdkconfig == True and flag_any_custom_sdkconfig == False: + + # Secure framework cleanup with enhanced error handling + if safe_framework_cleanup(): + arduino_frmwrk_url = str(platform.get_package_spec( + "framework-arduinoespressif32")).split("uri=", 1)[1][:-1] + arduino_frmwrk_lib_url = str(platform.get_package_spec( + "framework-arduinoespressif32-libs")).split("uri=", 1)[1][:-1] + pm.install(arduino_frmwrk_url) + pm.install(arduino_frmwrk_lib_url) + + if flag_custom_sdkconfig: + call_compile_libs() + flag_custom_sdkconfig = False + else: + logging.error("Framework cleanup failed - installation aborted") + sys.exit(1) + +if flag_custom_sdkconfig and not flag_any_custom_sdkconfig: call_compile_libs() -if "arduino" in env.subst("$PIOFRAMEWORK") and "espidf" not in env.subst("$PIOFRAMEWORK") and env.subst("$ARDUINO_LIB_COMPILE_FLAG") in ("Inactive", "True"): +# Main logic for Arduino Framework +pioframework = env.subst("$PIOFRAMEWORK") +arduino_lib_compile_flag = env.subst("$ARDUINO_LIB_COMPILE_FLAG") + +if ("arduino" in pioframework and "espidf" not in pioframework and + arduino_lib_compile_flag in ("Inactive", "True")): + # try to remove not needed include path if an lib_ignore entry exists + from component_manager import ComponentManager + component_manager = ComponentManager(env) + component_manager.handle_component_settings() + silent_action = env.Action(component_manager.restore_pioarduino_build_py) + # hack to silence scons command output + silent_action.strfunction = lambda target, source, env: '' + env.AddPostAction("checkprogsize", silent_action) + if IS_WINDOWS: - env.AddBuildMiddleware(shorthen_includes) - if os.path.exists(join(platform.get_package_dir( - "framework-arduinoespressif32"), "tools", "platformio-build.py")): - PIO_BUILD = "platformio-build.py" - else: - PIO_BUILD = "pioarduino-build.py" - SConscript(join(platform.get_package_dir("framework-arduinoespressif32"), "tools", PIO_BUILD)) + # Smart include path optimization based on bleeding edge configurable + # threshold + env.AddBuildMiddleware(smart_include_length_shorten) + + build_script_path = join(FRAMEWORK_DIR, "tools", "pioarduino-build.py") + SConscript(build_script_path) diff --git a/builder/frameworks/component_manager.py b/builder/frameworks/component_manager.py new file mode 100644 index 000000000..ae5da555d --- /dev/null +++ b/builder/frameworks/component_manager.py @@ -0,0 +1,1411 @@ +# component_manager.py +""" +ESP32 Arduino Framework Component Manager + +This module provides a comprehensive system for managing IDF components in ESP32 Arduino +framework builds. It handles component addition/removal, library mapping, project analysis, +and build file management with extensive logging support. + +Classes: + ComponentLogger: Handles logging functionality for component operations + ComponentYamlHandler: Manages YAML file operations for component configuration + ProjectAnalyzer: Analyzes project dependencies and component usage + LibraryMapper: Maps library names to include paths + BuildFileManager: Manages backup and restoration of build files + ComponentManager: Main manager class that coordinates all component operations + +Author: Jason2866 ESP32 pioarduino Framework maintainer +License: Apache 2.0 +""" + +import os +import shutil +import re +import yaml +from yaml import SafeLoader +from os.path import join +from typing import Set, Optional, Dict, Any, List + + +class ComponentLogger: + """ + Handles logging functionality for component operations. + + This class provides a centralized logging mechanism for tracking all component + management operations, including changes, errors, and status updates. + + Attributes: + component_changes (List[str]): List of all logged change messages + + Example: + >>> logger = ComponentLogger() + >>> logger.log_change("Component added successfully") + >>> print(logger.get_change_count()) + 1 + """ + + def __init__(self): + """ + Initialize the logger with an empty change list. + + Creates a new ComponentLogger instance with an empty list to track + all component-related changes during the session. + """ + self.component_changes: List[str] = [] + + def log_change(self, message: str) -> None: + """ + Log a component change with simple console output. + + Records a change message both in the internal list and outputs it + to the console with a standardized format. + + Args: + message (str): The message to log describing the change + + Example: + >>> logger = ComponentLogger() + >>> logger.log_change("Added WiFi component") + [ComponentManager] Added WiFi component + """ + self.component_changes.append(message) + print(f"[ComponentManager] {message}") + + def get_changes(self) -> List[str]: + """ + Get all logged changes. + + Returns a copy of all change messages that have been logged during + the current session. + + Returns: + List[str]: List of all logged change messages + + Example: + >>> logger = ComponentLogger() + >>> logger.log_change("First change") + >>> logger.log_change("Second change") + >>> changes = logger.get_changes() + >>> len(changes) + 2 + """ + return self.component_changes + + def get_change_count(self) -> int: + """ + Get the number of changes logged. + + Returns the total count of changes that have been logged during + the current session. + + Returns: + int: Number of logged changes + + Example: + >>> logger = ComponentLogger() + >>> logger.log_change("Change 1") + >>> logger.get_change_count() + 1 + """ + return len(self.component_changes) + + +class ComponentYamlHandler: + """ + Handles YAML file operations for component configuration. + + This class manages all operations related to the idf_component.yml file, + including creation, loading, saving, and backup operations. It provides + a clean interface for component configuration management. + + Attributes: + logger (ComponentLogger): Logger instance for recording operations + + Example: + >>> logger = ComponentLogger() + >>> handler = ComponentYamlHandler(logger) + >>> data = handler.load_component_yml("path/to/component.yml") + """ + + def __init__(self, logger: ComponentLogger): + """ + Initialize the YAML handler. + + Creates a new ComponentYamlHandler with a reference to a logger + for recording all YAML-related operations. + + Args: + logger (ComponentLogger): Logger instance for recording operations + """ + self.logger = logger + + def get_or_create_component_yml(self, arduino_framework_dir: str, project_src_dir: str) -> str: + """ + Get path to idf_component.yml, creating it if necessary. + + Searches for an existing idf_component.yml file in the Arduino framework + directory first, then in the project source directory. If neither exists, + creates a new default file in the project source directory. + + Args: + arduino_framework_dir (str): Path to Arduino framework directory + project_src_dir (str): Path to project source directory + + Returns: + str: Path to the component YAML file + + Example: + >>> handler = ComponentYamlHandler(logger) + >>> yml_path = handler.get_or_create_component_yml("/framework", "/project/src") + >>> os.path.exists(yml_path) + True + """ + # Try Arduino framework first + framework_yml = join(arduino_framework_dir, "idf_component.yml") + if os.path.exists(framework_yml): + self._create_backup(framework_yml) + return framework_yml + + # Try project source directory + project_yml = join(project_src_dir, "idf_component.yml") + if os.path.exists(project_yml): + self._create_backup(project_yml) + return project_yml + + # Create new file in project source + self._create_default_component_yml(project_yml) + self.logger.log_change(f"Created new component.yml file at {project_yml}") + return project_yml + + def load_component_yml(self, file_path: str) -> Dict[str, Any]: + """ + Load and parse idf_component.yml file. + + Attempts to load and parse a YAML file containing component configuration. + If the file doesn't exist or cannot be parsed, returns a default structure + with an empty dependencies section. + + Args: + file_path (str): Path to the YAML file to load + + Returns: + Dict[str, Any]: Parsed YAML data as dictionary with at least a 'dependencies' key + + Example: + >>> handler = ComponentYamlHandler(logger) + >>> data = handler.load_component_yml("component.yml") + >>> 'dependencies' in data + True + """ + try: + with open(file_path, "r") as f: + return yaml.load(f, Loader=SafeLoader) or {"dependencies": {}} + except Exception: + return {"dependencies": {}} + + def save_component_yml(self, file_path: str, data: Dict[str, Any]) -> None: + """ + Save component data to YAML file. + + Writes component configuration data to a YAML file with proper formatting. + Logs the operation result, including any errors that occur during saving. + + Args: + file_path (str): Path where to save the YAML file + data (Dict[str, Any]): Component data to save + + Example: + >>> handler = ComponentYamlHandler(logger) + >>> data = {"dependencies": {"esp_wifi": {"version": "*"}}} + >>> handler.save_component_yml("component.yml", data) + """ + try: + with open(file_path, "w") as f: + yaml.dump(data, f) + self.logger.log_change(f"Saved component configuration to {file_path}") + except Exception as e: + self.logger.log_change(f"Error saving component configuration: {str(e)}") + + def _create_backup(self, file_path: str) -> None: + """ + Create backup of a file. + + Creates a backup copy of the specified file by appending '.orig' to the filename. + Only creates the backup if it doesn't already exist to preserve the original. + + Args: + file_path (str): Path to the file to backup + + Example: + >>> handler._create_backup("component.yml") + # Creates component.yml.orig if it doesn't exist + """ + backup_path = f"{file_path}.orig" + if not os.path.exists(backup_path): + shutil.copy(file_path, backup_path) + self.logger.log_change(f"Created backup: {backup_path}") + + def _create_default_component_yml(self, file_path: str) -> None: + """ + Create a default idf_component.yml file. + + Creates a new component YAML file with minimal default configuration + that includes only the IDF version requirement. + + Args: + file_path (str): Path where to create the default file + + Example: + >>> handler._create_default_component_yml("new_component.yml") + # Creates file with default IDF dependency + """ + default_content = { + "dependencies": { + "idf": ">=5.1" + } + } + + with open(file_path, 'w') as f: + yaml.dump(default_content, f) + + +class ProjectAnalyzer: + """ + Analyzes project dependencies and component usage. + + This class provides functionality to analyze project source files and + configuration to determine which ESP-IDF components are actually being + used. This helps prevent removal of critical components and optimizes + the build process. + + Attributes: + env: PlatformIO environment object + _project_components_cache (Optional[Set[str]]): Cached analysis results + + Example: + >>> analyzer = ProjectAnalyzer(env) + >>> used_components = analyzer.analyze_project_dependencies() + >>> analyzer.is_component_used_in_project("esp_wifi") + True + """ + + def __init__(self, env): + """ + Initialize the project analyzer. + + Creates a new ProjectAnalyzer with a reference to the PlatformIO + environment for accessing project configuration and files. + + Args: + env: PlatformIO environment object containing project information + """ + self.env = env + self._project_components_cache = None + + def analyze_project_dependencies(self) -> Set[str]: + """ + Analyze project files to detect actually used components/libraries. + + Performs a comprehensive analysis of project source files and library + dependencies to identify which ESP-IDF components are actually being + used in the project. This includes parsing source code for includes + and function calls, as well as analyzing lib_deps entries. + + Returns: + Set[str]: Set of component names that are used in the project + + Example: + >>> analyzer = ProjectAnalyzer(env) + >>> components = analyzer.analyze_project_dependencies() + >>> "esp_wifi" in components # If project uses WiFi + True + """ + used_components = set() + + try: + # Analyze project source files + src_dir = self.env.subst("$PROJECT_SRC_DIR") + if os.path.exists(src_dir): + for root, dirs, files in os.walk(src_dir): + for file in files: + if file.endswith(('.cpp', '.c', '.h', '.hpp', '.ino')): + file_path = os.path.join(root, file) + used_components.update(self._extract_components_from_file(file_path)) + + # Analyze lib_deps for explicit dependencies (if present) + lib_deps = self.env.GetProjectOption("lib_deps", []) + if isinstance(lib_deps, str): + lib_deps = [lib_deps] + + for dep in lib_deps: + used_components.update(self._extract_components_from_lib_dep(str(dep))) + + except Exception: + pass + + return used_components + + def is_component_used_in_project(self, lib_name: str) -> bool: + """ + Check if a component/library is actually used in the project. + + Determines whether a specific component or library is being used in the + project by checking against the cached analysis results. Uses both direct + matching and partial matching for related components. + + Args: + lib_name (str): Name of the library/component to check + + Returns: + bool: True if the component is used in the project, False otherwise + + Example: + >>> analyzer = ProjectAnalyzer(env) + >>> analyzer.is_component_used_in_project("esp_wifi") + True # If WiFi functionality is detected in project + """ + # Cache project analysis for performance + if self._project_components_cache is None: + self._project_components_cache = self.analyze_project_dependencies() + + lib_name_lower = lib_name.lower() + + # Direct match + if lib_name_lower in self._project_components_cache: + return True + + # Partial match for related components + for used_component in self._project_components_cache: + if lib_name_lower in used_component or used_component in lib_name_lower: + return True + + return False + + def _extract_components_from_file(self, file_path: str) -> Set[str]: + """ + Extract component usage from a single file by analyzing includes and function calls. + + Analyzes a source file to detect which ESP-IDF components are being used + by looking for specific patterns in the code such as include statements, + function calls, and API usage patterns. + + Args: + file_path (str): Path to the source file to analyze + + Returns: + Set[str]: Set of component names found in the file + + Example: + >>> analyzer = ProjectAnalyzer(env) + >>> components = analyzer._extract_components_from_file("main.cpp") + >>> "esp_wifi" in components # If file contains WiFi code + True + """ + components = set() + + # Component detection patterns - maps component names to code patterns + component_patterns = { + 'bt': ['bluetooth', 'ble', 'nimble', 'bt_', 'esp_bt', 'esp_ble'], + 'esp_wifi': ['wifi', 'esp_wifi', 'tcpip_adapter'], + 'esp_dsp': ['dsps_', 'esp_dsp', 'fft2r', 'dsps_fft2r'], # Enhanced DSP detection + 'esp_http_client': ['esp_http_client', 'http_client'], + 'esp_https_ota': ['esp_https_ota', 'esp_ota'], + 'mdns': ['mdns', 'esp_mdns'], + 'mqtt': ['mqtt', 'esp_mqtt'], + 'spiffs': ['spiffs', 'esp_spiffs'], + 'fatfs': ['fatfs', 'ff.h'], + 'nvs_flash': ['nvs', 'nvs_flash'], + 'esp_timer': ['esp_timer', 'timer_'], + 'driver': ['gpio_', 'uart_', 'spi_', 'i2c_', 'adc_', 'dac_'], + 'esp_camera': ['esp_camera', 'camera.h'], + 'esp_now': ['esp_now', 'espnow'], + 'esp_smartconfig': ['smartconfig', 'esp_smartconfig'], + 'esp_eth': ['esp_eth', 'ethernet'], + 'esp_websocket_client': ['websocket', 'esp_websocket'], + 'cjson': ['cjson', 'json'], + 'mbedtls': ['mbedtls', 'ssl'], + 'openssl': ['openssl'] + } + + try: + with open(file_path, 'r', encoding='utf-8', errors='ignore') as f: + content = f.read().lower() + + for component, patterns in component_patterns.items(): + if any(pattern in content for pattern in patterns): + components.add(component) + + except Exception: + pass + + return components + + def _extract_components_from_lib_dep(self, lib_dep: str) -> Set[str]: + """ + Extract components from lib_deps entry by mapping library names to ESP-IDF components. + + Analyzes a library dependency string from platformio.ini and maps it to + corresponding ESP-IDF components that would be required to support that library. + + Args: + lib_dep (str): Library dependency string from platformio.ini + + Returns: + Set[str]: Set of ESP-IDF component names that correspond to the library + + Example: + >>> analyzer = ProjectAnalyzer(env) + >>> components = analyzer._extract_components_from_lib_dep("WiFi") + >>> "esp_wifi" in components + True + """ + components = set() + lib_dep_upper = lib_dep.upper() + + # Map lib_deps entries to ESP-IDF components + lib_dep_mapping = { + 'bt': ['BLE', 'BT', 'BLUETOOTH', 'NIMBLE'], + 'esp_wifi': ['WIFI', 'ASYNCTCP', 'ESPASYNCWEBSERVER'], + 'esp_dsp': ['DSP', 'FFT', 'JPEG'], + 'esp_http_client': ['HTTP', 'HTTPCLIENT'], + 'mqtt': ['MQTT', 'PUBSUB'], + 'esp_camera': ['CAMERA', 'ESP32CAM'], + 'esp_now': ['ESPNOW', 'ESP_NOW'], + 'mdns': ['MDNS'], + 'esp_eth': ['ETHERNET'] + } + + for component, keywords in lib_dep_mapping.items(): + if any(keyword in lib_dep_upper for keyword in keywords): + components.add(component) + + return components + + +class LibraryMapper: + """ + Handles mapping between library names and include paths. + + This class provides functionality to map Arduino library names to their + corresponding ESP-IDF component include paths. It maintains a comprehensive + mapping database and can analyze Arduino library properties to determine + the correct include paths. + + Attributes: + arduino_framework_dir (str): Path to Arduino framework directory + _arduino_libraries_cache (Optional[Dict[str, str]]): Cached library mappings + + Example: + >>> mapper = LibraryMapper("/path/to/arduino/framework") + >>> include_path = mapper.convert_lib_name_to_include("WiFi") + >>> include_path + "esp_wifi" + """ + + def __init__(self, arduino_framework_dir: str): + """ + Initialize the library mapper. + + Creates a new LibraryMapper with the path to the Arduino framework + directory for analyzing available libraries and their properties. + + Args: + arduino_framework_dir (str): Path to Arduino framework directory + """ + self.arduino_framework_dir = arduino_framework_dir + self._arduino_libraries_cache = None + + def convert_lib_name_to_include(self, lib_name: str) -> str: + """ + Convert library name to potential include directory name. + + Takes an Arduino library name and converts it to the corresponding + ESP-IDF component include path. This involves checking against known + Arduino libraries, applying naming conventions, and using fallback + mapping rules. + + Args: + lib_name (str): Name of the library to convert + + Returns: + str: Converted include directory name + + Example: + >>> mapper = LibraryMapper("/arduino/framework") + >>> mapper.convert_lib_name_to_include("WiFi") + "esp_wifi" + >>> mapper.convert_lib_name_to_include("BluetoothSerial") + "bt" + """ + # Load Arduino Core Libraries on first call + if self._arduino_libraries_cache is None: + self._arduino_libraries_cache = self._get_arduino_core_libraries() + + lib_name_lower = lib_name.lower() + + # Check Arduino Core Libraries first + if lib_name_lower in self._arduino_libraries_cache: + return self._arduino_libraries_cache[lib_name_lower] + + # Remove common prefixes and suffixes + cleaned_name = lib_name_lower + + # Remove common prefixes + prefixes_to_remove = ['lib', 'arduino-', 'esp32-', 'esp-'] + for prefix in prefixes_to_remove: + if cleaned_name.startswith(prefix): + cleaned_name = cleaned_name[len(prefix):] + + # Remove common suffixes + suffixes_to_remove = ['-lib', '-library', '.h'] + for suffix in suffixes_to_remove: + if cleaned_name.endswith(suffix): + cleaned_name = cleaned_name[:-len(suffix)] + + # Check again with cleaned name + if cleaned_name in self._arduino_libraries_cache: + return self._arduino_libraries_cache[cleaned_name] + + # Direct mapping for common cases not in Arduino libraries + direct_mapping = { + 'ble': 'bt', + 'bluetooth': 'bt', + 'bluetoothserial': 'bt' + } + + if cleaned_name in direct_mapping: + return direct_mapping[cleaned_name] + + return cleaned_name + + def _get_arduino_core_libraries(self) -> Dict[str, str]: + """ + Get all Arduino core libraries and their corresponding include paths. + + Scans the Arduino framework libraries directory to build a comprehensive + mapping of library names to their corresponding include paths. This + includes reading library.properties files to get official library names. + + Returns: + Dict[str, str]: Dictionary mapping library names to include paths + + Example: + >>> mapper = LibraryMapper("/arduino/framework") + >>> libraries = mapper._get_arduino_core_libraries() + >>> "wifi" in libraries + True + """ + libraries_mapping = {} + + # Path to Arduino Core Libraries + arduino_libs_dir = join(self.arduino_framework_dir, "libraries") + + if not os.path.exists(arduino_libs_dir): + return libraries_mapping + + try: + for entry in os.listdir(arduino_libs_dir): + lib_path = join(arduino_libs_dir, entry) + if os.path.isdir(lib_path): + lib_name = self._get_library_name_from_properties(lib_path) + if lib_name: + include_path = self._map_library_to_include_path(lib_name, entry) + libraries_mapping[lib_name.lower()] = include_path + libraries_mapping[entry.lower()] = include_path # Also use directory name as key + except Exception: + pass + + return libraries_mapping + + def _get_library_name_from_properties(self, lib_dir: str) -> Optional[str]: + """ + Extract library name from library.properties file. + + Reads the library.properties file in an Arduino library directory + to extract the official library name as specified by the library author. + + Args: + lib_dir (str): Library directory path + + Returns: + Optional[str]: Library name if found, None otherwise + + Example: + >>> mapper = LibraryMapper("/arduino/framework") + >>> name = mapper._get_library_name_from_properties("/path/to/WiFi") + >>> name + "WiFi" + """ + prop_path = join(lib_dir, "library.properties") + if not os.path.isfile(prop_path): + return None + + try: + with open(prop_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line.startswith('name='): + return line.split('=', 1)[1].strip() + except Exception: + pass + + return None + + def _map_library_to_include_path(self, lib_name: str, dir_name: str) -> str: + """ + Map library name to corresponding include path. + + Takes a library name and directory name and maps them to the appropriate + ESP-IDF component include path using an extensive mapping table that + covers both core ESP32 components and Arduino-specific libraries. + + Args: + lib_name (str): Official library name from properties file + dir_name (str): Directory name of the library + + Returns: + str: Mapped include path for the ESP-IDF component + + Example: + >>> mapper = LibraryMapper("/arduino/framework") + >>> path = mapper._map_library_to_include_path("WiFi", "WiFi") + >>> path + "esp_wifi" + """ + lib_name_lower = lib_name.lower().replace(' ', '').replace('-', '_') + dir_name_lower = dir_name.lower() + + # Extended mapping list with Arduino Core Libraries + extended_mapping = { + # Core ESP32 mappings + 'wifi': 'esp_wifi', + 'bluetooth': 'bt', + 'bluetoothserial': 'bt', + 'ble': 'bt', + 'bt': 'bt', + 'ethernet': 'esp_eth', + 'websocket': 'esp_websocket_client', + 'http': 'esp_http_client', + 'https': 'esp_https_ota', + 'ota': 'esp_https_ota', + 'spiffs': 'spiffs', + 'fatfs': 'fatfs', + 'mesh': 'esp_wifi_mesh', + 'smartconfig': 'esp_smartconfig', + 'mdns': 'mdns', + 'coap': 'coap', + 'mqtt': 'mqtt', + 'json': 'cjson', + 'mbedtls': 'mbedtls', + 'openssl': 'openssl', + + # Arduino Core specific mappings (safe mappings that don't conflict with critical components) + 'esp32blearduino': 'bt', + 'esp32_ble_arduino': 'bt', + 'esp32': 'esp32', + 'wire': 'driver', + 'spi': 'driver', + 'i2c': 'driver', + 'uart': 'driver', + 'serial': 'driver', + 'analogwrite': 'driver', + 'ledc': 'driver', + 'pwm': 'driver', + 'dac': 'driver', + 'adc': 'driver', + 'touch': 'driver', + 'hall': 'driver', + 'rtc': 'driver', + 'timer': 'esp_timer', + 'preferences': 'arduino_preferences', + 'eeprom': 'arduino_eeprom', + 'update': 'esp_https_ota', + 'httpupdate': 'esp_https_ota', + 'httpclient': 'esp_http_client', + 'httpsclient': 'esp_https_ota', + 'wifimanager': 'esp_wifi', + 'wificlientsecure': 'esp_wifi', + 'wifiserver': 'esp_wifi', + 'wifiudp': 'esp_wifi', + 'wificlient': 'esp_wifi', + 'wifiap': 'esp_wifi', + 'wifimulti': 'esp_wifi', + 'esp32webserver': 'esp_http_server', + 'webserver': 'esp_http_server', + 'asyncwebserver': 'esp_http_server', + 'dnsserver': 'lwip', + 'netbios': 'netbios', + 'simpletime': 'lwip', + 'fs': 'vfs', + 'sd': 'fatfs', + 'sd_mmc': 'fatfs', + 'littlefs': 'esp_littlefs', + 'ffat': 'fatfs', + 'camera': 'esp32_camera', + 'esp_camera': 'esp32_camera', + 'arducam': 'esp32_camera', + 'rainmaker': 'esp_rainmaker', + 'esp_rainmaker': 'esp_rainmaker', + 'provisioning': 'wifi_provisioning', + 'wifiprovisioning': 'wifi_provisioning', + 'espnow': 'esp_now', + 'esp_now': 'esp_now', + 'esptouch': 'esp_smartconfig', + 'ping': 'lwip', + 'netif': 'lwip', + 'tcpip': 'lwip' + } + + # Check extended mapping first + if lib_name_lower in extended_mapping: + return extended_mapping[lib_name_lower] + + # Check directory name + if dir_name_lower in extended_mapping: + return extended_mapping[dir_name_lower] + + # Fallback: Use directory name as include path + return dir_name_lower + + +class BuildFileManager: + """ + Manages backup and restoration of build files. + + This class handles all operations related to the pioarduino-build.py file, + including creating backups, restoring from backups, and modifying the file + to remove unwanted include entries for ignored libraries and components. + + Attributes: + arduino_libs_mcu (str): Path to Arduino libraries for specific MCU + mcu (str): MCU type (e.g., esp32, esp32s3, esp32c3) + logger (ComponentLogger): Logger instance for recording operations + + Example: + >>> manager = BuildFileManager("/libs/esp32", "esp32", logger) + >>> manager.backup_pioarduino_build_py(env) + >>> manager.restore_pioarduino_build_py() + """ + + def __init__(self, arduino_libs_mcu: str, mcu: str, logger: ComponentLogger): + """ + Initialize the build file manager. + + Creates a new BuildFileManager with paths and configuration needed + to manage build file operations for a specific MCU type. + + Args: + arduino_libs_mcu (str): Path to Arduino libraries for specific MCU + mcu (str): MCU type (e.g., esp32, esp32s3, esp32c3) + logger (ComponentLogger): Logger instance for recording operations + """ + self.arduino_libs_mcu = arduino_libs_mcu + self.mcu = mcu + self.logger = logger + + def backup_pioarduino_build_py(self, env) -> None: + """ + Create backup of the original pioarduino-build.py. + + Creates a backup copy of the pioarduino-build.py file before making + any modifications. The backup is only created if it doesn't already + exist and only for Arduino framework projects. + + Args: + env: PlatformIO environment object for framework detection + + Example: + >>> manager = BuildFileManager("/libs/esp32", "esp32", logger) + >>> manager.backup_pioarduino_build_py(env) + # Creates pioarduino-build.py.esp32 backup file + """ + if "arduino" not in env.subst("$PIOFRAMEWORK"): + return + + build_py_path = join(self.arduino_libs_mcu, "pioarduino-build.py") + backup_path = join(self.arduino_libs_mcu, f"pioarduino-build.py.{self.mcu}") + + if os.path.exists(build_py_path) and not os.path.exists(backup_path): + shutil.copy2(build_py_path, backup_path) + self.logger.log_change(f"Created backup of pioarduino-build.py for {self.mcu}") + + def restore_pioarduino_build_py(self) -> None: + """ + Restore the original pioarduino-build.py from backup. + + Restores the pioarduino-build.py file from its backup copy and removes + the backup file. This effectively undoes all modifications made to + the build file during the session. + + Example: + >>> manager = BuildFileManager("/libs/esp32", "esp32", logger) + >>> manager.restore_pioarduino_build_py() + # Restores original file and removes backup + """ + build_py_path = join(self.arduino_libs_mcu, "pioarduino-build.py") + backup_path = join(self.arduino_libs_mcu, f"pioarduino-build.py.{self.mcu}") + + if os.path.exists(backup_path): + shutil.copy2(backup_path, build_py_path) + os.remove(backup_path) + self.logger.log_change("Restored original pioarduino-build.py from backup") + + def remove_ignored_lib_includes(self, ignored_libs: Set[str], project_analyzer: ProjectAnalyzer) -> None: + """ + Remove include entries for ignored libraries from pioarduino-build.py. + + Modifies the pioarduino-build.py file to remove CPPPATH entries for + libraries that are marked to be ignored. Includes safety checks to + prevent removal of libraries that are actually used in the project. + + Args: + ignored_libs (Set[str]): Set of library names to ignore + project_analyzer (ProjectAnalyzer): Analyzer to check if components are used + + Example: + >>> manager = BuildFileManager("/libs/esp32", "esp32", logger) + >>> ignored = {"unused_lib", "another_lib"} + >>> manager.remove_ignored_lib_includes(ignored, analyzer) + """ + build_py_path = join(self.arduino_libs_mcu, "pioarduino-build.py") + + if not os.path.exists(build_py_path): + return + + try: + with open(build_py_path, 'r') as f: + content = f.read() + + original_content = content + total_removed = 0 + + # Remove CPPPATH entries for each ignored library + for lib_name in ignored_libs: + # Universal protection: Skip if component is actually used in project + if project_analyzer.is_component_used_in_project(lib_name): + self.logger.log_change(f"Skipping removal of library '{lib_name}' - detected as used in project") + continue + + # Multiple patterns to catch different include formats + patterns = [ + rf'.*join\([^,]*,\s*"include",\s*"{re.escape(lib_name)}"[^)]*\),?\n', + rf'.*"include/{re.escape(lib_name)}"[^,\n]*,?\n', + rf'.*"[^"]*include[^"]*{re.escape(lib_name)}[^"]*"[^,\n]*,?\n', + rf'.*"[^"]*/{re.escape(lib_name)}/include[^"]*"[^,\n]*,?\n', + rf'.*"[^"]*{re.escape(lib_name)}[^"]*include[^"]*"[^,\n]*,?\n', + rf'.*join\([^)]*"include"[^)]*"{re.escape(lib_name)}"[^)]*\),?\n', + rf'.*"{re.escape(lib_name)}/include"[^,\n]*,?\n', + rf'\s*"[^"]*/{re.escape(lib_name)}/[^"]*",?\n' + ] + + for pattern in patterns: + matches = re.findall(pattern, content) + if matches: + content = re.sub(pattern, '', content) + total_removed += len(matches) + self.logger.log_change(f"Removed {len(matches)} include entries for library '{lib_name}'") + + # Clean up empty lines and trailing commas + content = re.sub(r'\n\s*\n', '\n', content) + content = re.sub(r',\s*\n\s*\]', '\n]', content) + + # Validate and write changes + if self._validate_changes(original_content, content) and content != original_content: + with open(build_py_path, 'w') as f: + f.write(content) + self.logger.log_change(f"Successfully updated build file with {total_removed} total removals") + + except Exception as e: + self.logger.log_change(f"Error processing ignored library includes: {str(e)}") + + def remove_cpppath_entries(self, removed_components: Set[str]) -> None: + """ + Remove CPPPATH entries for removed components from pioarduino-build.py. + + Removes include path entries from the build file for components that + have been explicitly removed from the project configuration. This + helps clean up the build environment after component removal. + + Args: + removed_components (Set[str]): Set of component names that were removed + + Example: + >>> manager = BuildFileManager("/libs/esp32", "esp32", logger) + >>> removed = {"esp_camera", "esp_dsp"} + >>> manager.remove_cpppath_entries(removed) + """ + build_py_path = join(self.arduino_libs_mcu, "pioarduino-build.py") + + if not os.path.exists(build_py_path): + return + + try: + with open(build_py_path, 'r') as f: + content = f.read() + + original_content = content + + # Remove CPPPATH entries for each removed component + for component in removed_components: + patterns = [ + rf'.*join\([^,]*,\s*"include",\s*"{re.escape(component)}"[^)]*\),?\n', + rf'.*"include/{re.escape(component)}"[^,\n]*,?\n', + rf'.*"[^"]*include[^"]*{re.escape(component)}[^"]*"[^,\n]*,?\n' + ] + + for pattern in patterns: + content = re.sub(pattern, '', content) + + if content != original_content: + with open(build_py_path, 'w') as f: + f.write(content) + self.logger.log_change(f"Cleaned up CPPPATH entries for removed components") + + except Exception as e: + self.logger.log_change(f"Error cleaning up CPPPATH entries: {str(e)}") + + def _validate_changes(self, original_content: str, new_content: str) -> bool: + """ + Validate that the changes are reasonable. + + Performs safety checks on file modifications to ensure that the changes + are reasonable and won't break the build system. Prevents removal of + more than 50% of the file content. + + Args: + original_content (str): Original file content before modifications + new_content (str): Modified file content after changes + + Returns: + bool: True if changes are valid and safe, False otherwise + + Example: + >>> manager = BuildFileManager("/libs/esp32", "esp32", logger) + >>> is_valid = manager._validate_changes(original, modified) + >>> is_valid + True # If less than 50% of content was removed + """ + original_lines = len(original_content.splitlines()) + new_lines = len(new_content.splitlines()) + removed_lines = original_lines - new_lines + + # Don't allow removing more than 50% of the file or negative changes + return not (removed_lines > original_lines * 0.5 or removed_lines < 0) + + +class ComponentManager: + """ + Manages IDF components for ESP32 Arduino framework builds with logging support. + + This is the main class that coordinates all component management operations + for ESP32 Arduino framework projects. It handles component addition and removal, + library ignore processing, build file management, and provides comprehensive + logging of all operations. + + The ComponentManager integrates multiple specialized classes to provide a + complete solution for managing ESP-IDF components in PlatformIO Arduino + framework projects. + + Attributes: + env: PlatformIO environment object + platform: PlatformIO platform object + config: Project configuration object + board: Board configuration object + mcu (str): MCU type (e.g., esp32, esp32s3) + project_src_dir (str): Path to project source directory + arduino_framework_dir (str): Path to Arduino framework directory + arduino_libs_mcu (str): Path to Arduino libraries for specific MCU + removed_components (Set[str]): Set of removed component names + ignored_libs (Set[str]): Set of ignored library names + logger (ComponentLogger): Logger for all operations + yaml_handler (ComponentYamlHandler): YAML file operations handler + project_analyzer (ProjectAnalyzer): Project dependency analyzer + library_mapper (LibraryMapper): Library name to include path mapper + build_file_manager (BuildFileManager): Build file operations manager + + Example: + >>> manager = ComponentManager(env) + >>> manager.handle_component_settings(add_components=True, remove_components=True) + >>> manager.handle_lib_ignore() + >>> manager.restore_pioarduino_build_py() + """ + + def __init__(self, env): + """ + Initialize the ComponentManager with all required dependencies. + + Creates a new ComponentManager instance with all necessary helper classes + and configuration. Extracts essential information from the PlatformIO + environment and sets up the component tracking system. + + Args: + env: PlatformIO environment object containing project and build information + + Example: + >>> from component_manager import ComponentManager + >>> manager = ComponentManager(env) + >>> # Manager is now ready to handle component operations + """ + # Core PlatformIO environment attributes + self.env = env + self.platform = env.PioPlatform() + self.config = env.GetProjectConfig() + self.board = env.BoardConfig() + self.mcu = self.board.get("build.mcu", "esp32").lower() + + # Path configurations + self.project_src_dir = env.subst("$PROJECT_SRC_DIR") + self.arduino_framework_dir = self.platform.get_package_dir("framework-arduinoespressif32") + self.arduino_libs_mcu = join(self.platform.get_package_dir("framework-arduinoespressif32-libs"), self.mcu) + + # Component tracking sets + self.removed_components: Set[str] = set() + self.ignored_libs: Set[str] = set() + + # Initialize helper classes for different responsibilities + self.logger = ComponentLogger() + self.yaml_handler = ComponentYamlHandler(self.logger) + self.project_analyzer = ProjectAnalyzer(env) + self.library_mapper = LibraryMapper(self.arduino_framework_dir) + self.build_file_manager = BuildFileManager(self.arduino_libs_mcu, self.mcu, self.logger) + + def _log_change(self, message: str) -> None: + """ + Delegate to logger for backward compatibility. + + Provides backward compatibility by delegating logging calls to the + dedicated ComponentLogger instance. This maintains the same API + while using the refactored logging system. + + Args: + message (str): Message to log describing the change or operation + + Example: + >>> manager = ComponentManager(env) + >>> manager._log_change("Component operation completed") + """ + self.logger.log_change(message) + + def handle_component_settings(self, add_components: bool = False, remove_components: bool = False) -> None: + """ + Handle adding and removing IDF components based on project configuration. + + This is the main method for processing component additions and removals + based on the custom_component_add and custom_component_remove options + in the project configuration. It coordinates YAML file operations, + component cleanup, and build file management. + + Args: + add_components (bool): Whether to process component additions from config + remove_components (bool): Whether to process component removals from config + + Example: + >>> manager = ComponentManager(env) + >>> # Add and remove components based on platformio.ini settings + >>> manager.handle_component_settings(add_components=True, remove_components=True) + >>> # Only add components + >>> manager.handle_component_settings(add_components=True) + """ + # Create backup before first component removal and always when a component is added + if remove_components and not self.removed_components or add_components: + self.build_file_manager.backup_pioarduino_build_py(self.env) + self._log_change("Created backup of build file") + + # Check if env and GetProjectOption are available + if hasattr(self, 'env') and hasattr(self.env, 'GetProjectOption'): + component_yml_path = self.yaml_handler.get_or_create_component_yml( + self.arduino_framework_dir, self.project_src_dir) + component_data = self.yaml_handler.load_component_yml(component_yml_path) + + if remove_components: + try: + remove_option = self.env.GetProjectOption("custom_component_remove", None) + if remove_option: + components_to_remove = remove_option.splitlines() + self._remove_components(component_data, components_to_remove) + except Exception as e: + self._log_change(f"Error removing components: {str(e)}") + + if add_components: + try: + add_option = self.env.GetProjectOption("custom_component_add", None) + if add_option: + components_to_add = add_option.splitlines() + self._add_components(component_data, components_to_add) + except Exception as e: + self._log_change(f"Error adding components: {str(e)}") + + self.yaml_handler.save_component_yml(component_yml_path, component_data) + + # Clean up removed components + if self.removed_components: + self._cleanup_removed_components() + + self.handle_lib_ignore() + + # Print summary + if self.logger.get_change_count() > 0: + self._log_change(f"Session completed with {self.logger.get_change_count()} changes") + + def handle_lib_ignore(self) -> None: + """ + Handle lib_ignore entries from platformio.ini and remove corresponding includes. + + Processes the lib_ignore configuration option to remove unwanted library + includes from the build system. This helps reduce build time and binary + size by excluding unused libraries while protecting critical components. + + Example: + >>> manager = ComponentManager(env) + >>> # Process lib_ignore entries from platformio.ini + >>> manager.handle_lib_ignore() + """ + # Create backup before processing lib_ignore + if not self.ignored_libs: + self.build_file_manager.backup_pioarduino_build_py(self.env) + + # Get lib_ignore entries from current environment only + lib_ignore_entries = self._get_lib_ignore_entries() + + if lib_ignore_entries: + self.ignored_libs.update(lib_ignore_entries) + self.build_file_manager.remove_ignored_lib_includes(self.ignored_libs, self.project_analyzer) + self._log_change(f"Processed {len(lib_ignore_entries)} ignored libraries") + + def restore_pioarduino_build_py(self, source=None, target=None, env=None) -> None: + """ + Restore the original pioarduino-build.py from backup. + + Restores the build file to its original state, undoing all modifications + made during the session. This method maintains compatibility with + PlatformIO's callback system by accepting unused parameters. + + Args: + source: Unused parameter for PlatformIO compatibility + target: Unused parameter for PlatformIO compatibility + env: Unused parameter for PlatformIO compatibility + + Example: + >>> manager = ComponentManager(env) + >>> # Restore original build file + >>> manager.restore_pioarduino_build_py() + >>> # Can also be used as PlatformIO callback + >>> env.AddPostAction("buildprog", manager.restore_pioarduino_build_py) + """ + self.build_file_manager.restore_pioarduino_build_py() + + def _get_lib_ignore_entries(self) -> List[str]: + """ + Get lib_ignore entries from current environment configuration only. + + Extracts and processes lib_ignore entries from the project configuration, + converting library names to include directory names and filtering out + critical ESP32 components that should never be ignored. + + Returns: + List[str]: List of library names to ignore after processing and filtering + + Example: + >>> manager = ComponentManager(env) + >>> ignored = manager._get_lib_ignore_entries() + >>> "esp_wifi" in ignored # Only if explicitly ignored and not critical + False # WiFi is typically critical + """ + try: + # Get lib_ignore from current environment only + lib_ignore = self.env.GetProjectOption("lib_ignore", []) + + if isinstance(lib_ignore, str): + lib_ignore = [lib_ignore] + elif lib_ignore is None: + lib_ignore = [] + + # Clean and normalize entries + cleaned_entries = [] + for entry in lib_ignore: + entry = str(entry).strip() + if entry: + # Convert library names to potential include directory names + include_name = self.library_mapper.convert_lib_name_to_include(entry) + cleaned_entries.append(include_name) + + # Filter out critical ESP32 components that should never be ignored + critical_components = [ + 'lwip', # Network stack + 'freertos', # Real-time OS + 'esp_system', # System functions + 'esp_common', # Common ESP functions + 'driver', # Hardware drivers + 'nvs_flash', # Non-volatile storage + 'spi_flash', # Flash memory access + 'esp_timer', # Timer functions + 'esp_event', # Event system + 'log' # Logging system + ] + + filtered_entries = [] + for entry in cleaned_entries: + if entry not in critical_components: + filtered_entries.append(entry) + + return filtered_entries + + except Exception: + return [] + + def _remove_components(self, component_data: Dict[str, Any], components_to_remove: list) -> None: + """ + Remove specified components from the configuration. + + Removes components from the idf_component.yml dependencies section + and tracks them for filesystem cleanup. Empty component names are + automatically skipped. + + Args: + component_data (Dict[str, Any]): Component configuration data from YAML + components_to_remove (list): List of component names to remove + + Example: + >>> manager = ComponentManager(env) + >>> data = {"dependencies": {"esp_camera": {"version": "*"}}} + >>> manager._remove_components(data, ["esp_camera"]) + >>> "esp_camera" in data["dependencies"] + False + """ + dependencies = component_data.setdefault("dependencies", {}) + + for component in components_to_remove: + component = component.strip() + if not component: + continue + + if component in dependencies: + del dependencies[component] + self._log_change(f"Removed component: {component}") + + # Track for cleanup + filesystem_name = self._convert_component_name_to_filesystem(component) + self.removed_components.add(filesystem_name) + + def _add_components(self, component_data: Dict[str, Any], components_to_add: list) -> None: + """ + Add specified components to the configuration. + + Adds components to the idf_component.yml dependencies section with + version specifications. Components that are too short (≤4 characters) + or already exist are automatically skipped. + + Args: + component_data (Dict[str, Any]): Component configuration data from YAML + components_to_add (list): List of component entries to add (can include versions) + + Example: + >>> manager = ComponentManager(env) + >>> data = {"dependencies": {}} + >>> manager._add_components(data, ["esp_camera@1.0.0", "esp_dsp"]) + >>> "esp_camera" in data["dependencies"] + True + """ + dependencies = component_data.setdefault("dependencies", {}) + + for component in components_to_add: + component = component.strip() + if len(component) <= 4: # Skip too short entries + continue + + component_name, version = self._parse_component_entry(component) + + if component_name not in dependencies: + dependencies[component_name] = {"version": version} + self._log_change(f"Added component: {component_name} (version: {version})") + + def _parse_component_entry(self, entry: str) -> tuple[str, str]: + """ + Parse component entry into name and version. + + Parses a component specification string that may include version + information separated by '@' symbol. If no version is specified, + defaults to wildcard version. + + Args: + entry (str): Component entry string (e.g., "component@1.0.0" or "component") + + Returns: + tuple[str, str]: Tuple of (component_name, version) + + Example: + >>> manager = ComponentManager(env) + >>> name, version = manager._parse_component_entry("esp_camera@1.0.0") + >>> name, version + ("esp_camera", "1.0.0") + >>> name, version = manager._parse_component_entry("esp_dsp") + >>> name, version + ("esp_dsp", "*") + """ + if "@" in entry: + name, version = entry.split("@", 1) + return (name.strip(), version.strip()) + return (entry.strip(), "*") + + def _convert_component_name_to_filesystem(self, component_name: str) -> str: + """ + Convert component name from registry format to filesystem format. + + Converts component names from ESP-IDF component registry format + (which uses forward slashes) to filesystem-safe format (using + double underscores) for directory operations. + + Args: + component_name (str): Component name in registry format + + Returns: + str: Component name in filesystem-safe format + + Example: + >>> manager = ComponentManager(env) + >>> fs_name = manager._convert_component_name_to_filesystem("espressif/esp_camera") + >>> fs_name + "espressif__esp_camera" + """ + return component_name.replace("/", "__") + + def _cleanup_removed_components(self) -> None: + """ + Clean up removed components and restore original build file. + + Performs cleanup operations for components that have been removed, + including removing their include directories and cleaning up + CPPPATH entries from the build file. + + Example: + >>> manager = ComponentManager(env) + >>> # After removing components + >>> manager._cleanup_removed_components() + # Removes include directories and cleans build file + """ + for component in self.removed_components: + self._remove_include_directory(component) + + self.build_file_manager.remove_cpppath_entries(self.removed_components) + + def _remove_include_directory(self, component: str) -> None: + """ + Remove include directory for a component. + + Removes the include directory for a specific component from the + Arduino libraries directory. This helps clean up the filesystem + after component removal. + + Args: + component (str): Component name whose include directory should be removed + + Example: + >>> manager = ComponentManager(env) + >>> manager._remove_include_directory("esp_camera") + # Removes /path/to/libs/esp32/include/esp_camera directory + """ + include_path = join(self.arduino_libs_mcu, "include", component) + + if os.path.exists(include_path): + shutil.rmtree(include_path) + self._log_change(f"Removed include directory: {include_path}") diff --git a/builder/frameworks/espidf.py b/builder/frameworks/espidf.py index 5859d0720..c8581b55f 100644 --- a/builder/frameworks/espidf.py +++ b/builder/frameworks/espidf.py @@ -155,12 +155,17 @@ def _get_installed_standard_pip_packages(): assert os.path.isdir(FRAMEWORK_DIR) assert os.path.isdir(TOOLCHAIN_DIR) +def create_silent_action(action_func): + """Create a silent SCons action that suppresses output""" + silent_action = env.Action(action_func) + silent_action.strfunction = lambda target, source, env: '' + return silent_action if "arduino" in env.subst("$PIOFRAMEWORK"): ARDUINO_FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoespressif32") ARDUINO_FRMWRK_LIB_DIR = platform.get_package_dir("framework-arduinoespressif32-libs") if mcu == "esp32c2": - ARDUINO_FRMWRK_C2_LIB_DIR = join(platform.get_package_dir("framework-arduinoespressif32-libs"),mcu) + ARDUINO_FRMWRK_C2_LIB_DIR = join(ARDUINO_FRMWRK_LIB_DIR, mcu) if not os.path.exists(ARDUINO_FRMWRK_C2_LIB_DIR): ARDUINO_C2_DIR = join(platform.get_package_dir("framework-arduino-c2-skeleton-lib"),mcu) shutil.copytree(ARDUINO_C2_DIR, ARDUINO_FRMWRK_C2_LIB_DIR, dirs_exist_ok=True) @@ -173,6 +178,7 @@ def _get_installed_standard_pip_packages(): os.rename(ARDUINO_FRAMEWORK_DIR, new_path) ARDUINO_FRAMEWORK_DIR = new_path assert ARDUINO_FRAMEWORK_DIR and os.path.isdir(ARDUINO_FRAMEWORK_DIR) + arduino_libs_mcu = join(platform.get_package_dir("framework-arduinoespressif32-libs"),mcu) BUILD_DIR = env.subst("$BUILD_DIR") PROJECT_DIR = env.subst("$PROJECT_DIR") @@ -183,6 +189,18 @@ def _get_installed_standard_pip_packages(): os.path.join(PROJECT_DIR, "sdkconfig.%s" % env.subst("$PIOENV")), )) +def contains_path_traversal(url): + """Check for Path Traversal patterns""" + dangerous_patterns = [ + '../', '..\\', # Standard Path Traversal + '%2e%2e%2f', '%2e%2e%5c', # URL-encoded + '..%2f', '..%5c', # Mixed + '%252e%252e%252f', # Double encoded + ] + + url_lower = url.lower() + return any(pattern in url_lower for pattern in dangerous_patterns) + # # generate modified Arduino IDF sdkconfig, applying settings from "custom_sdkconfig" # @@ -197,177 +215,205 @@ def _get_installed_standard_pip_packages(): flag_custom_sdkonfig = True def HandleArduinoIDFsettings(env): + """ + Handles Arduino IDF settings configuration with custom sdkconfig support. + """ + def get_MD5_hash(phrase): + """Generate MD5 hash for checksum validation.""" import hashlib - return hashlib.md5((phrase).encode('utf-8')).hexdigest()[:16] + return hashlib.md5(phrase.encode('utf-8')).hexdigest()[:16] - def custom_sdkconfig_file(string): - if not config.has_option("env:"+env["PIOENV"], "custom_sdkconfig"): + def load_custom_sdkconfig_file(): + """Load custom sdkconfig from file or URL if specified.""" + if not config.has_option("env:" + env["PIOENV"], "custom_sdkconfig"): return "" - sdkconfig_entrys = env.GetProjectOption("custom_sdkconfig").splitlines() - for file in sdkconfig_entrys: - if "http" in file and "://" in file: - response = requests.get(file.split(" ")[0]) - if response.ok: - target = str(response.content.decode('utf-8')) + + sdkconfig_entries = env.GetProjectOption("custom_sdkconfig").splitlines() + + for file_entry in sdkconfig_entries: + # Handle HTTP/HTTPS URLs + if "http" in file_entry and "://" in file_entry: + url = file_entry.split(" ")[0] + # Path Traversal protection + if contains_path_traversal(url): + print(f"Path Traversal detected: {url} check your URL path") else: - print("Failed to download:", file) - return "" - return target - if "file://" in file: - file_path = join(PROJECT_DIR,file.lstrip("file://").split(os.path.sep)[-1]) + try: + response = requests.get(file_entry.split(" ")[0], timeout=10) + if response.ok: + return response.content.decode('utf-8') + except requests.RequestException as e: + print(f"Error downloading {file_entry}: {e}") + except UnicodeDecodeError as e: + print(f"Error decoding response from {file_entry}: {e}") + return "" + + # Handle local files + if "file://" in file_entry: + file_ref = file_entry[7:] if file_entry.startswith("file://") else file_entry + filename = os.path.basename(file_ref) + file_path = join(PROJECT_DIR, filename) if os.path.exists(file_path): - with open(file_path, 'r') as file: - target = file.read() + try: + with open(file_path, 'r') as f: + return f.read() + except IOError as e: + print(f"Error reading file {file_path}: {e}") + return "" else: - print("File not found:", file_path) + print("File not found, check path:", file_path) return "" - return target + return "" + def extract_flag_name(line): + """Extract flag name from sdkconfig line.""" + line = line.strip() + if line.startswith("#") and "is not set" in line: + return line.split(" ")[1] + elif not line.startswith("#") and "=" in line: + return line.split("=")[0] + return None - custom_sdk_config_flags = "" - board_idf_config_flags = "" - sdkconfig_file_flags = "" - custom_sdkconfig_file_str = "" - - if config.has_option("env:"+env["PIOENV"], "custom_sdkconfig"): - flag_custom_sdkonfig = True - custom_sdk_config_flags = (env.GetProjectOption("custom_sdkconfig").rstrip("\n")) + "\n" - custom_sdkconfig_file_str = custom_sdkconfig_file(sdkconfig_file_flags) - - if "espidf.custom_sdkconfig" in board: - board_idf_config_flags = ('\n'.join([element for element in board.get("espidf.custom_sdkconfig", "")])).rstrip("\n") + "\n" - flag_custom_sdkonfig = True - - if flag_custom_sdkonfig == True: # TDOO duplicated - print("*** Add \"custom_sdkconfig\" settings to IDF sdkconfig.defaults ***") - idf_config_flags = custom_sdk_config_flags - if custom_sdkconfig_file_str != "": - sdkconfig_file_flags = custom_sdkconfig_file_str + "\n" - idf_config_flags = sdkconfig_file_flags + idf_config_flags - idf_config_flags = board_idf_config_flags + idf_config_flags + def build_idf_config_flags(): + """Build complete IDF configuration flags from all sources.""" + flags = [] + + # Add board-specific flags first + if "espidf.custom_sdkconfig" in board: + board_flags = board.get("espidf.custom_sdkconfig", []) + if board_flags: + flags.extend(board_flags) + + # Add custom sdkconfig file content + custom_file_content = load_custom_sdkconfig_file() + if custom_file_content: + flags.append(custom_file_content) + + # Add project-level custom sdkconfig + if config.has_option("env:" + env["PIOENV"], "custom_sdkconfig"): + custom_flags = env.GetProjectOption("custom_sdkconfig").rstrip("\n") + if custom_flags: + flags.append(custom_flags) + + return "\n".join(flags) + "\n" if flags else "" + + def add_flash_configuration(config_flags): + """Add flash frequency and mode configuration.""" if flash_frequency != "80m": - idf_config_flags = idf_config_flags + "# CONFIG_ESPTOOLPY_FLASHFREQ_80M is not set\n" - esptool_flashfreq_y = "CONFIG_ESPTOOLPY_FLASHFREQ_%s=y\n" % flash_frequency.upper() - esptool_flashfreq_M = "CONFIG_ESPTOOLPY_FLASHFREQ=\"%s\"\n" % flash_frequency - idf_config_flags = idf_config_flags + esptool_flashfreq_y + esptool_flashfreq_M + config_flags += "# CONFIG_ESPTOOLPY_FLASHFREQ_80M is not set\n" + config_flags += f"CONFIG_ESPTOOLPY_FLASHFREQ_{flash_frequency.upper()}=y\n" + config_flags += f"CONFIG_ESPTOOLPY_FLASHFREQ=\"{flash_frequency}\"\n" + if flash_mode != "qio": - idf_config_flags = idf_config_flags + "# CONFIG_ESPTOOLPY_FLASHMODE_QIO is not set\n" - esptool_flashmode = "CONFIG_ESPTOOLPY_FLASHMODE_%s=y\n" % flash_mode.upper() - if esptool_flashmode not in idf_config_flags: - idf_config_flags = idf_config_flags + esptool_flashmode - if mcu in ("esp32") and "CONFIG_FREERTOS_UNICORE=y" in idf_config_flags: - idf_config_flags = idf_config_flags + "# CONFIG_SPIRAM is not set\n" - - idf_config_flags = idf_config_flags.splitlines() - sdkconfig_src = join(ARDUINO_FRMWRK_LIB_DIR,mcu,"sdkconfig") - - def get_flag(line): - if line.startswith("#") and "is not set" in line: - return line.split(" ")[1] - elif not line.startswith("#") and len(line.split("=")) > 1: - return line.split("=")[0] - else: - return None - - with open(sdkconfig_src) as src: - sdkconfig_dst = os.path.join(PROJECT_DIR, "sdkconfig.defaults") - dst = open(sdkconfig_dst,"w") - dst.write("# TASMOTA__"+ get_MD5_hash(''.join(custom_sdk_config_flags).strip() + mcu) +"\n") - while line := src.readline(): - flag = get_flag(line) - if flag is None: + config_flags += "# CONFIG_ESPTOOLPY_FLASHMODE_QIO is not set\n" + + flash_mode_flag = f"CONFIG_ESPTOOLPY_FLASHMODE_{flash_mode.upper()}=y\n" + if flash_mode_flag not in config_flags: + config_flags += flash_mode_flag + + # ESP32 specific SPIRAM configuration + if mcu == "esp32" and "CONFIG_FREERTOS_UNICORE=y" in config_flags: + config_flags += "# CONFIG_SPIRAM is not set\n" + + return config_flags + + def write_sdkconfig_file(idf_config_flags, checksum_source): + if "arduino" not in env.subst("$PIOFRAMEWORK"): + print("Error: Arduino framework required for sdkconfig processing") + return + """Write the final sdkconfig.defaults file with checksum.""" + sdkconfig_src = join(arduino_libs_mcu, "sdkconfig") + sdkconfig_dst = join(PROJECT_DIR, "sdkconfig.defaults") + + # Generate checksum for validation (maintains original logic) + checksum = get_MD5_hash(checksum_source.strip() + mcu) + + with open(sdkconfig_src, 'r', encoding='utf-8') as src, open(sdkconfig_dst, 'w', encoding='utf-8') as dst: + # Write checksum header (critical for compilation decision logic) + dst.write(f"# TASMOTA__{checksum}\n") + + processed_flags = set() + + # Process each line from source sdkconfig + for line in src: + flag_name = extract_flag_name(line) + + if flag_name is None: dst.write(line) - else: - no_match = True - for item in idf_config_flags: - if flag == get_flag(item.replace("\'", "")): - dst.write(item.replace("\'", "")+"\n") - no_match = False - print("Replace:",line,"with:",item.replace("\'", "")) - idf_config_flags.remove(item) - if no_match: - dst.write(line) - for item in idf_config_flags: # are there new flags? - print("Add:",item.replace("\'", "")) - dst.write(item.replace("\'", "")+"\n") - dst.close() - return - else: + continue + + # Check if we have a custom replacement for this flag + flag_replaced = False + for custom_flag in idf_config_flags[:]: # Create copy for safe removal + custom_flag_name = extract_flag_name(custom_flag.replace("'", "")) + + if flag_name == custom_flag_name: + cleaned_flag = custom_flag.replace("'", "") + dst.write(cleaned_flag + "\n") + print(f"Replace: {line.strip()} with: {cleaned_flag}") + idf_config_flags.remove(custom_flag) + processed_flags.add(custom_flag_name) + flag_replaced = True + break + + if not flag_replaced: + dst.write(line) + + # Add any remaining new flags + for remaining_flag in idf_config_flags: + cleaned_flag = remaining_flag.replace("'", "") + print(f"Add: {cleaned_flag}") + dst.write(cleaned_flag + "\n") + + # Main execution logic + has_custom_config = ( + config.has_option("env:" + env["PIOENV"], "custom_sdkconfig") or + "espidf.custom_sdkconfig" in board + ) + + if not has_custom_config: return + + print("*** Add \"custom_sdkconfig\" settings to IDF sdkconfig.defaults ***") + + # Build complete configuration + idf_config_flags = build_idf_config_flags() + idf_config_flags = add_flash_configuration(idf_config_flags) + + # Convert to list for processing + idf_config_list = [line for line in idf_config_flags.splitlines() if line.strip()] + + # Write final configuration file with checksum + custom_sdk_config_flags = "" + if config.has_option("env:" + env["PIOENV"], "custom_sdkconfig"): + custom_sdk_config_flags = env.GetProjectOption("custom_sdkconfig").rstrip("\n") + "\n" + + write_sdkconfig_file(idf_config_list, custom_sdk_config_flags) + + def HandleCOMPONENTsettings(env): - if flag_custom_component_add == True or flag_custom_component_remove == True: # todo remove duplicated - import yaml - from yaml import SafeLoader - print("*** \"custom_component\" is used to (de)select managed idf components ***") - if flag_custom_component_remove == True: - idf_custom_component_remove = env.GetProjectOption("custom_component_remove").splitlines() - else: - idf_custom_component_remove = "" - if flag_custom_component_add == True: - idf_custom_component_add = env.GetProjectOption("custom_component_add").splitlines() - else: - idf_custom_component_add = "" - - # search "idf_component.yml" file - try: # 1.st in Arduino framework - idf_component_yml_src = os.path.join(ARDUINO_FRAMEWORK_DIR, "idf_component.yml") - shutil.copy(join(ARDUINO_FRAMEWORK_DIR,"idf_component.yml"),join(ARDUINO_FRAMEWORK_DIR,"idf_component.yml.orig")) - yml_file_dir = idf_component_yml_src - except: # 2.nd Project source - try: - idf_component_yml_src = os.path.join(PROJECT_SRC_DIR, "idf_component.yml") - shutil.copy(join(PROJECT_SRC_DIR,"idf_component.yml"),join(PROJECT_SRC_DIR,"idf_component.yml.orig")) - yml_file_dir = idf_component_yml_src - except: # no idf_component.yml in Project source -> create - idf_component_yml_src = os.path.join(PROJECT_SRC_DIR, "idf_component.yml") - yml_file_dir = idf_component_yml_src - idf_component_yml_str = """ - dependencies: - idf: \">=5.1\" - """ - idf_component_yml = yaml.safe_load(idf_component_yml_str) - with open(idf_component_yml_src, 'w',) as f : - yaml.dump(idf_component_yml,f) - - yaml_file=open(idf_component_yml_src,"r") - idf_component=yaml.load(yaml_file, Loader=SafeLoader) - idf_component_str=json.dumps(idf_component) # convert to json string - idf_component_json=json.loads(idf_component_str) # convert string to json dict - - if idf_custom_component_remove != "": - for entry in idf_custom_component_remove: - # checking if the entry exists before removing - if entry in idf_component_json["dependencies"]: - print("*** Removing component:",entry) - del idf_component_json["dependencies"][entry] - - if idf_custom_component_add != "": - for entry in idf_custom_component_add: - if len(str(entry)) > 4: # too short or empty entry - # add new entrys to json - if "@" in entry: - idf_comp_entry = str(entry.split("@")[0]).replace(" ", "") - idf_comp_vers = str(entry.split("@")[1]).replace(" ", "") - else: - idf_comp_entry = str(entry).replace(" ", "") - idf_comp_vers = "*" - if idf_comp_entry not in idf_component_json["dependencies"]: - print("*** Adding component:", idf_comp_entry, idf_comp_vers) - new_entry = {idf_comp_entry: {"version": idf_comp_vers}} - idf_component_json["dependencies"].update(new_entry) - - idf_component_yml_file = open(yml_file_dir,"w") - yaml.dump(idf_component_json, idf_component_yml_file) - idf_component_yml_file.close() - # print("JSON from modified idf_component.yml:") - # print(json.dumps(idf_component_json)) + from component_manager import ComponentManager + component_manager = ComponentManager(env) + + if flag_custom_component_add or flag_custom_component_remove: + actions = [action for flag, action in [ + (flag_custom_component_add, "select"), + (flag_custom_component_remove, "deselect") + ] if flag] + action_text = " and ".join(actions) + print(f"*** \"custom_component\" is used to {action_text} managed idf components ***") + + component_manager.handle_component_settings( + add_components=flag_custom_component_add, + remove_components=flag_custom_component_remove + ) return return -if flag_custom_component_add == True or flag_custom_component_remove == True: +if "arduino" in env.subst("$PIOFRAMEWORK"): HandleCOMPONENTsettings(env) if flag_custom_sdkonfig == True and "arduino" in env.subst("$PIOFRAMEWORK") and "espidf" not in env.subst("$PIOFRAMEWORK"): @@ -2172,7 +2218,12 @@ def idf_lib_copy(source, target, env): print("*** Original Arduino \"idf_component.yml\" restored ***") except: print("*** Original Arduino \"idf_component.yml\" couldnt be restored ***") - env.AddPostAction("checkprogsize", idf_lib_copy) + # Restore original pioarduino-build.py + from component_manager import ComponentManager + component_manager = ComponentManager(env) + component_manager.restore_pioarduino_build_py() + silent_action = create_silent_action(idf_lib_copy) + env.AddPostAction("checkprogsize", silent_action) if "espidf" in env.subst("$PIOFRAMEWORK") and (flag_custom_component_add == True or flag_custom_component_remove == True): def idf_custom_component(source, target, env): @@ -2188,8 +2239,14 @@ def idf_custom_component(source, target, env): os.remove(join(PROJECT_SRC_DIR,"idf_component.yml")) print("*** pioarduino generated \"idf_component.yml\" removed ***") except: - print("*** \"idf_component.yml\" couldnt be removed ***") - env.AddPostAction("checkprogsize", idf_custom_component) + print("*** no custom \"idf_component.yml\" found for removing ***") + if "arduino" in env.subst("$PIOFRAMEWORK"): + # Restore original pioarduino-build.py, only used with Arduino + from component_manager import ComponentManager + component_manager = ComponentManager(env) + component_manager.restore_pioarduino_build_py() + silent_action = create_silent_action(idf_custom_component) + env.AddPostAction("checkprogsize", silent_action) # # Process OTA partition and image # @@ -2244,7 +2301,7 @@ def _parse_size(value): partitions_csv = env.subst("$PARTITIONS_TABLE_CSV") result = [] next_offset = 0 -bound = int(board.get("upload.offset_address", "0x10000"), 16) # default 0x10000 +bound = 0x10000 with open(partitions_csv) as fp: for line in fp.readlines(): line = line.strip() diff --git a/builder/main.py b/builder/main.py index ee2ae007d..dde87dfde 100644 --- a/builder/main.py +++ b/builder/main.py @@ -12,31 +12,41 @@ # See the License for the specific language governing permissions and # limitations under the License. +import locale import os import re -import locale +import shlex +import subprocess +import sys from os.path import isfile, join from SCons.Script import ( - ARGUMENTS, COMMAND_LINE_TARGETS, AlwaysBuild, Builder, Default, - DefaultEnvironment) + ARGUMENTS, + COMMAND_LINE_TARGETS, + AlwaysBuild, + Builder, + Default, + DefaultEnvironment, +) -from platformio.util import get_serial_ports from platformio.project.helpers import get_project_dir +from platformio.util import get_serial_ports - +# Initialize environment and configuration env = DefaultEnvironment() platform = env.PioPlatform() projectconfig = env.GetProjectConfig() terminal_cp = locale.getpreferredencoding().lower() -# -# Helpers -# - +# Framework directory path FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoespressif32") + def BeforeUpload(target, source, env): + """ + Prepare the environment before uploading firmware. + Handles port detection and special upload configurations. + """ upload_options = {} if "BOARD" in env: upload_options = env.BoardConfig().get("upload", {}) @@ -53,6 +63,10 @@ def BeforeUpload(target, source, env): def _get_board_memory_type(env): + """ + Determine the memory type configuration for the board. + Returns the appropriate memory type string based on board configuration. + """ board_config = env.BoardConfig() default_type = "%s_%s" % ( board_config.get("build.flash_mode", "dio"), @@ -68,33 +82,46 @@ def _get_board_memory_type(env): ), ) + def _normalize_frequency(frequency): + """ + Convert frequency value to normalized string format (e.g., "40m"). + Removes 'L' suffix and converts to MHz format. + """ frequency = str(frequency).replace("L", "") return str(int(int(frequency) / 1000000)) + "m" + def _get_board_f_flash(env): + """Get the flash frequency for the board.""" frequency = env.subst("$BOARD_F_FLASH") return _normalize_frequency(frequency) + def _get_board_f_image(env): + """Get the image frequency for the board, fallback to flash frequency.""" board_config = env.BoardConfig() if "build.f_image" in board_config: return _normalize_frequency(board_config.get("build.f_image")) return _get_board_f_flash(env) + def _get_board_f_boot(env): + """Get the boot frequency for the board, fallback to flash frequency.""" board_config = env.BoardConfig() if "build.f_boot" in board_config: return _normalize_frequency(board_config.get("build.f_boot")) return _get_board_f_flash(env) + def _get_board_flash_mode(env): - if _get_board_memory_type(env) in ( - "opi_opi", - "opi_qspi", - ): + """ + Determine the appropriate flash mode for the board. + Handles special cases for OPI memory types. + """ + if _get_board_memory_type(env) in ("opi_opi", "opi_qspi"): return "dout" mode = env.subst("$BOARD_FLASH_MODE") @@ -102,14 +129,24 @@ def _get_board_flash_mode(env): return "dio" return mode + def _get_board_boot_mode(env): + """ + Determine the boot mode for the board. + Handles special cases for OPI memory types. + """ memory_type = env.BoardConfig().get("build.arduino.memory_type", "") build_boot = env.BoardConfig().get("build.boot", "$BOARD_FLASH_MODE") if memory_type in ("opi_opi", "opi_qspi"): build_boot = "opi" return build_boot + def _parse_size(value): + """ + Parse size values from various formats (int, hex, K/M suffixes). + Returns the size in bytes as an integer. + """ if isinstance(value, int): return value elif value.isdigit(): @@ -121,17 +158,25 @@ def _parse_size(value): return int(value[:-1]) * base return value + def _parse_partitions(env): + """ + Parse the partition table CSV file and return partition information. + Also sets the application offset for the environment. + """ partitions_csv = env.subst("$PARTITIONS_TABLE_CSV") if not isfile(partitions_csv): - sys.stderr.write("Could not find the file %s with partitions " - "table.\n" % partitions_csv) + sys.stderr.write( + "Could not find the file %s with partitions table.\n" + % partitions_csv + ) env.Exit(1) return result = [] next_offset = 0 - app_offset = 0x10000 # default address for firmware + app_offset = 0x10000 # Default address for firmware + with open(partitions_csv) as fp: for line in fp.readlines(): line = line.strip() @@ -148,24 +193,34 @@ def _parse_partitions(env): "subtype": tokens[2], "offset": tokens[3] or calculated_offset, "size": tokens[4], - "flags": tokens[5] if len(tokens) > 5 else None + "flags": tokens[5] if len(tokens) > 5 else None, } result.append(partition) next_offset = _parse_size(partition["offset"]) - if (partition["subtype"] == "ota_0"): + if partition["subtype"] == "ota_0": app_offset = next_offset next_offset = next_offset + _parse_size(partition["size"]) + # Configure application partition offset env.Replace(ESP32_APP_OFFSET=str(hex(app_offset))) # Propagate application offset to debug configurations - env["INTEGRATION_EXTRA_DATA"].update({"application_offset": str(hex(app_offset))}) + env["INTEGRATION_EXTRA_DATA"].update( + {"application_offset": str(hex(app_offset))} + ) return result + def _update_max_upload_size(env): + """ + Update the maximum upload size based on partition table configuration. + Prioritizes user-specified partition names. + """ if not env.get("PARTITIONS_TABLE_CSV"): return + sizes = { - p["subtype"]: _parse_size(p["size"]) for p in _parse_partitions(env) + p["subtype"]: _parse_size(p["size"]) + for p in _parse_partitions(env) if p["type"] in ("0", "app") } @@ -176,12 +231,15 @@ def _update_max_upload_size(env): if custom_app_partition_name: selected_partition = partitions.get(custom_app_partition_name, {}) if selected_partition: - board.update("upload.maximum_size", _parse_size(selected_partition["size"])) + board.update( + "upload.maximum_size", _parse_size(selected_partition["size"]) + ) return else: print( - "Warning! Selected partition `%s` is not available in the partition " \ - "table! Default partition will be used!" % custom_app_partition_name + "Warning! Selected partition `%s` is not available in the " + "partition table! Default partition will be used!" + % custom_app_partition_name ) for p in partitions.values(): @@ -189,17 +247,24 @@ def _update_max_upload_size(env): board.update("upload.maximum_size", _parse_size(p["size"])) break + def _to_unix_slashes(path): + """Convert Windows-style backslashes to Unix-style forward slashes.""" return path.replace("\\", "/") -# -# Filesystem helpers -# def fetch_fs_size(env): + """ + Extract filesystem size and offset information from partition table. + Sets FS_START, FS_SIZE, FS_PAGE, and FS_BLOCK environment variables. + """ fs = None for p in _parse_partitions(env): - if p["type"] == "data" and p["subtype"] in ("spiffs", "fat", "littlefs"): + if p["type"] == "data" and p["subtype"] in ( + "spiffs", + "fat", + "littlefs", + ): fs = p if not fs: sys.stderr.write( @@ -208,6 +273,7 @@ def fetch_fs_size(env): ) env.Exit(1) return + env["FS_START"] = _parse_size(fs["offset"]) env["FS_SIZE"] = _parse_size(fs["size"]) env["FS_PAGE"] = int("0x100", 16) @@ -219,20 +285,39 @@ def fetch_fs_size(env): env["FS_START"] += 4096 env["FS_SIZE"] -= 4096 + def __fetch_fs_size(target, source, env): + """Wrapper function for fetch_fs_size to be used as SCons emitter.""" fetch_fs_size(env) return (target, source) + +def check_lib_archive_exists(): + """ + Check if lib_archive is set in platformio.ini configuration. + Returns True if found, False otherwise. + """ + for section in projectconfig.sections(): + if "lib_archive" in projectconfig.options(section): + return True + return False + + +# Initialize board configuration and MCU settings board = env.BoardConfig() mcu = board.get("build.mcu", "esp32") toolchain_arch = "xtensa-%s" % mcu filesystem = board.get("build.filesystem", "littlefs") + +# Set toolchain architecture for RISC-V based ESP32 variants if mcu in ("esp32c2", "esp32c3", "esp32c5", "esp32c6", "esp32h2", "esp32p4"): toolchain_arch = "riscv32-esp" +# Initialize integration extra data if not present if "INTEGRATION_EXTRA_DATA" not in env: env["INTEGRATION_EXTRA_DATA"] = {} +# Configure build tools and environment variables env.Replace( __get_board_boot_mode=_get_board_boot_mode, __get_board_f_flash=_get_board_f_flash, @@ -240,7 +325,6 @@ def __fetch_fs_size(target, source, env): __get_board_f_boot=_get_board_f_boot, __get_board_flash_mode=_get_board_flash_mode, __get_board_memory_type=_get_board_memory_type, - AR="%s-elf-gcc-ar" % toolchain_arch, AS="%s-elf-as" % toolchain_arch, CC="%s-elf-gcc" % toolchain_arch, @@ -248,7 +332,14 @@ def __fetch_fs_size(target, source, env): GDB=join( platform.get_package_dir( "tool-riscv32-esp-elf-gdb" - if mcu in ("esp32c2", "esp32c3", "esp32c5", "esp32c6", "esp32h2", "esp32p4") + if mcu in ( + "esp32c2", + "esp32c3", + "esp32c5", + "esp32c6", + "esp32h2", + "esp32p4", + ) else "tool-xtensa-esp-elf-gdb" ) or "", @@ -258,20 +349,14 @@ def __fetch_fs_size(target, source, env): OBJCOPY=join(platform.get_package_dir("tool-esptoolpy") or "", "esptool.py"), RANLIB="%s-elf-gcc-ranlib" % toolchain_arch, SIZETOOL="%s-elf-size" % toolchain_arch, - ARFLAGS=["rc"], - - SIZEPROGREGEXP=r"^(?:\.iram0\.text|\.iram0\.vectors|\.dram0\.data|\.flash\.text|\.flash\.rodata|)\s+([0-9]+).*", + SIZEPROGREGEXP=r"^(?:\.iram0\.text|\.iram0\.vectors|\.dram0\.data|" + r"\.flash\.text|\.flash\.rodata|)\s+([0-9]+).*", SIZEDATAREGEXP=r"^(?:\.dram0\.data|\.dram0\.bss|\.noinit)\s+([0-9]+).*", SIZECHECKCMD="$SIZETOOL -A -d $SOURCES", SIZEPRINTCMD="$SIZETOOL -B -d $SOURCES", - - ERASEFLAGS=[ - "--chip", mcu, - "--port", '"$UPLOAD_PORT"' - ], + ERASEFLAGS=["--chip", mcu, "--port", '"$UPLOAD_PORT"'], ERASECMD='"$PYTHONEXE" "$OBJCOPY" $ERASEFLAGS erase-flash', - # mkspiffs package contains two different binaries for IDF and Arduino MKFSTOOL="mk%s" % filesystem + ( @@ -286,61 +371,61 @@ def __fetch_fs_size(target, source, env): if filesystem == "spiffs" else "" ), - # Legacy `ESP32_SPIFFS_IMAGE_NAME` is used as the second fallback value for - # backward compatibility + # Legacy `ESP32_SPIFFS_IMAGE_NAME` is used as the second fallback value + # for backward compatibility ESP32_FS_IMAGE_NAME=env.get( - "ESP32_FS_IMAGE_NAME", env.get("ESP32_SPIFFS_IMAGE_NAME", filesystem) + "ESP32_FS_IMAGE_NAME", + env.get("ESP32_SPIFFS_IMAGE_NAME", filesystem), + ), + ESP32_APP_OFFSET=env.get("INTEGRATION_EXTRA_DATA").get( + "application_offset" ), - - ESP32_APP_OFFSET=env.get("INTEGRATION_EXTRA_DATA").get("application_offset"), ARDUINO_LIB_COMPILE_FLAG="Inactive", - - PROGSUFFIX=".elf" + PROGSUFFIX=".elf", ) # Check if lib_archive is set in platformio.ini and set it to False # if not found. This makes weak defs in framework and libs possible. -def check_lib_archive_exists(): - for section in projectconfig.sections(): - if "lib_archive" in projectconfig.options(section): - #print(f"lib_archive in [{section}] found with value: {projectconfig.get(section, 'lib_archive')}") - return True - #print("lib_archive was not found in platformio.ini") - return False - if not check_lib_archive_exists(): env_section = "env:" + env["PIOENV"] projectconfig.set(env_section, "lib_archive", "False") - #print(f"lib_archive is set to False in [{env_section}]") # Allow user to override via pre:script if env.get("PROGNAME", "program") == "program": env.Replace(PROGNAME="firmware") +# Configure build actions and builders env.Append( BUILDERS=dict( ElfToBin=Builder( - action=env.VerboseAction(" ".join([ - '"$PYTHONEXE" "$OBJCOPY"', - "--chip", mcu, "elf2image", - "--flash-mode", "${__get_board_flash_mode(__env__)}", - "--flash-freq", "${__get_board_f_image(__env__)}", - "--flash-size", board.get("upload.flash_size", "4MB"), - "-o", "$TARGET", "$SOURCES" - ]), "Building $TARGET"), - suffix=".bin" + action=env.VerboseAction( + " ".join( + [ + '"$PYTHONEXE" "$OBJCOPY"', + "--chip", + mcu, + "elf2image", + "--flash-mode", + "${__get_board_flash_mode(__env__)}", + "--flash-freq", + "${__get_board_f_image(__env__)}", + "--flash-size", + board.get("upload.flash_size", "4MB"), + "-o", + "$TARGET", + "$SOURCES", + ] + ), + "Building $TARGET", + ), + suffix=".bin", ), DataToBin=Builder( action=env.VerboseAction( " ".join( ['"$MKFSTOOL"', "-c", "$SOURCES", "-s", "$FS_SIZE"] + ( - [ - "-p", - "$FS_PAGE", - "-b", - "$FS_BLOCK", - ] + ["-p", "$FS_PAGE", "-b", "$FS_BLOCK"] if filesystem in ("littlefs", "spiffs") else [] ) @@ -355,10 +440,10 @@ def check_lib_archive_exists(): ) ) +# Load framework-specific configuration if not env.get("PIOFRAMEWORK"): env.SConscript("frameworks/_bare.py", exports="env") - def firmware_metrics(target, source, env): """ Custom target to run esp-idf-size with support for command line parameters @@ -440,7 +525,8 @@ def firmware_metrics(target, source, env): else: target_elf = env.BuildProgram() silent_action = env.Action(firmware_metrics) - silent_action.strfunction = lambda target, source, env: '' # hack to silence scons command output + # Hack to silence scons command output + silent_action.strfunction = lambda target, source, env: "" env.AddPostAction(target_elf, silent_action) if set(["buildfs", "uploadfs", "uploadfsota"]) & set(COMMAND_LINE_TARGETS): target_firm = env.DataToBin( @@ -449,26 +535,27 @@ def firmware_metrics(target, source, env): env.NoCache(target_firm) AlwaysBuild(target_firm) else: - target_firm = env.ElfToBin( - join("$BUILD_DIR", "${PROGNAME}"), target_elf) + target_firm = env.ElfToBin(join("$BUILD_DIR", "${PROGNAME}"), target_elf) env.Depends(target_firm, "checkprogsize") -env.AddPlatformTarget("buildfs", target_firm, target_firm, "Build Filesystem Image") +# Configure platform targets +env.AddPlatformTarget( + "buildfs", target_firm, target_firm, "Build Filesystem Image" +) AlwaysBuild(env.Alias("nobuild", target_firm)) target_buildprog = env.Alias("buildprog", target_firm, target_firm) -# update max upload size based on CSV file +# Update max upload size based on CSV file if env.get("PIOMAINPROG"): env.AddPreAction( "checkprogsize", env.VerboseAction( lambda source, target, env: _update_max_upload_size(env), - "Retrieving maximum program size $SOURCES")) + "Retrieving maximum program size $SOURCES", + ), + ) -# # Target: Print binary size -# - target_size = env.AddPlatformTarget( "size", target_elf, @@ -477,25 +564,25 @@ def firmware_metrics(target, source, env): "Calculate program size", ) -# # Target: Upload firmware or FS image -# - upload_protocol = env.subst("$UPLOAD_PROTOCOL") debug_tools = board.get("debug.tools", {}) upload_actions = [] # Compatibility with old OTA configurations -if (upload_protocol != "espota" - and re.match(r"\"?((([0-9]{1,3}\.){3}[0-9]{1,3})|[^\\/]+\.local)\"?$", - env.get("UPLOAD_PORT", ""))): +if upload_protocol != "espota" and re.match( + r"\"?((([0-9]{1,3}\.){3}[0-9]{1,3})|[^\\/]+\.local)\"?$", + env.get("UPLOAD_PORT", ""), +): upload_protocol = "espota" sys.stderr.write( "Warning! We have just detected `upload_port` as IP address or host " "name of ESP device. `upload_protocol` is switched to `espota`.\n" "Please specify `upload_protocol = espota` in `platformio.ini` " - "project configuration file.\n") + "project configuration file.\n" + ) +# Configure upload protocol: ESP OTA if upload_protocol == "espota": if not env.subst("$UPLOAD_PORT"): sys.stderr.write( @@ -503,32 +590,45 @@ def firmware_metrics(target, source, env): "using `upload_port` for build environment or use " "global `--upload-port` option.\n" "See https://docs.platformio.org/page/platforms/" - "espressif32.html#over-the-air-ota-update\n") + "espressif32.html#over-the-air-ota-update\n" + ) env.Replace( - UPLOADER=join(FRAMEWORK_DIR,"tools", "espota.py"), + UPLOADER=join(FRAMEWORK_DIR, "tools", "espota.py"), UPLOADERFLAGS=["--debug", "--progress", "-i", "$UPLOAD_PORT"], - UPLOADCMD='"$PYTHONEXE" "$UPLOADER" $UPLOADERFLAGS -f $SOURCE' + UPLOADCMD='"$PYTHONEXE" "$UPLOADER" $UPLOADERFLAGS -f $SOURCE', ) if set(["uploadfs", "uploadfsota"]) & set(COMMAND_LINE_TARGETS): env.Append(UPLOADERFLAGS=["--spiffs"]) upload_actions = [env.VerboseAction("$UPLOADCMD", "Uploading $SOURCE")] +# Configure upload protocol: esptool elif upload_protocol == "esptool": env.Replace( UPLOADER=join( - platform.get_package_dir("tool-esptoolpy") or "", "esptool.py"), + platform.get_package_dir("tool-esptoolpy") or "", "esptool.py" + ), UPLOADERFLAGS=[ - "--chip", mcu, - "--port", '"$UPLOAD_PORT"', - "--baud", "$UPLOAD_SPEED", - "--before", board.get("upload.before_reset", "default-reset"), - "--after", board.get("upload.after_reset", "hard-reset"), - "write-flash", "-z", - "--flash-mode", "${__get_board_flash_mode(__env__)}", - "--flash-freq", "${__get_board_f_image(__env__)}", - "--flash-size", "detect" + "--chip", + mcu, + "--port", + '"$UPLOAD_PORT"', + "--baud", + "$UPLOAD_SPEED", + "--before", + board.get("upload.before_reset", "default-reset"), + "--after", + board.get("upload.after_reset", "hard-reset"), + "write-flash", + "-z", + "--flash-mode", + "${__get_board_flash_mode(__env__)}", + "--flash-freq", + "${__get_board_f_image(__env__)}", + "--flash-size", + "detect", ], - UPLOADCMD='"$PYTHONEXE" "$UPLOADER" $UPLOADERFLAGS $ESP32_APP_OFFSET $SOURCE' + UPLOADCMD='"$PYTHONEXE" "$UPLOADER" $UPLOADERFLAGS ' + "$ESP32_APP_OFFSET $SOURCE", ) for image in env.get("FLASH_EXTRA_IMAGES", []): env.Append(UPLOADERFLAGS=[image[0], env.subst(image[1])]) @@ -536,27 +636,36 @@ def firmware_metrics(target, source, env): if "uploadfs" in COMMAND_LINE_TARGETS: env.Replace( UPLOADERFLAGS=[ - "--chip", mcu, - "--port", '"$UPLOAD_PORT"', - "--baud", "$UPLOAD_SPEED", - "--before", board.get("upload.before_reset", "default-reset"), - "--after", board.get("upload.after_reset", "hard-reset"), - "write-flash", "-z", - "--flash-mode", "${__get_board_flash_mode(__env__)}", - "--flash-freq", "${__get_board_f_image(__env__)}", - "--flash-size", "detect", - "$FS_START" + "--chip", + mcu, + "--port", + '"$UPLOAD_PORT"', + "--baud", + "$UPLOAD_SPEED", + "--before", + board.get("upload.before_reset", "default-reset"), + "--after", + board.get("upload.after_reset", "hard-reset"), + "write-flash", + "-z", + "--flash-mode", + "${__get_board_flash_mode(__env__)}", + "--flash-freq", + "${__get_board_f_image(__env__)}", + "--flash-size", + "detect", + "$FS_START", ], UPLOADCMD='"$PYTHONEXE" "$UPLOADER" $UPLOADERFLAGS $SOURCE', ) upload_actions = [ env.VerboseAction(BeforeUpload, "Looking for upload port..."), - env.VerboseAction("$UPLOADCMD", "Uploading $SOURCE") + env.VerboseAction("$UPLOADCMD", "Uploading $SOURCE"), ] +# Configure upload protocol: DFU elif upload_protocol == "dfu": - hwids = board.get("build.hwids", [["0x2341", "0x0070"]]) vid = hwids[0][0] pid = hwids[0][1] @@ -571,16 +680,18 @@ def firmware_metrics(target, source, env): "-d", ",".join(["%s:%s" % (hwid[0], hwid[1]) for hwid in hwids]), "-Q", - "-D" + "-D", ], UPLOADCMD='"$UPLOADER" $UPLOADERFLAGS "$SOURCE"', ) +# Configure upload protocol: Debug tools (OpenOCD) elif upload_protocol in debug_tools: _parse_partitions(env) openocd_args = ["-d%d" % (2 if int(ARGUMENTS.get("PIOVERBOSE", 0)) else 1)] openocd_args.extend( - debug_tools.get(upload_protocol).get("server").get("arguments", [])) + debug_tools.get(upload_protocol).get("server").get("arguments", []) + ) openocd_args.extend( [ "-c", @@ -608,7 +719,9 @@ def firmware_metrics(target, source, env): f.replace( "$PACKAGE_DIR", _to_unix_slashes( - platform.get_package_dir("tool-openocd-esp32") or "")) + platform.get_package_dir("tool-openocd-esp32") or "" + ), + ) for f in openocd_args ] env.Replace( @@ -618,81 +731,71 @@ def firmware_metrics(target, source, env): ) upload_actions = [env.VerboseAction("$UPLOADCMD", "Uploading $SOURCE")] -# custom upload tool +# Configure upload protocol: Custom elif upload_protocol == "custom": upload_actions = [env.VerboseAction("$UPLOADCMD", "Uploading $SOURCE")] else: sys.stderr.write("Warning! Unknown upload protocol %s\n" % upload_protocol) - +# Register upload targets env.AddPlatformTarget("upload", target_firm, upload_actions, "Upload") -env.AddPlatformTarget("uploadfs", target_firm, upload_actions, "Upload Filesystem Image") env.AddPlatformTarget( - "uploadfsota", target_firm, upload_actions, "Upload Filesystem Image OTA") + "uploadfs", target_firm, upload_actions, "Upload Filesystem Image" +) +env.AddPlatformTarget( + "uploadfsota", + target_firm, + upload_actions, + "Upload Filesystem Image OTA", +) -# # Target: Erase Flash and Upload -# - env.AddPlatformTarget( "erase_upload", target_firm, [ env.VerboseAction(BeforeUpload, "Looking for upload port..."), env.VerboseAction("$ERASECMD", "Erasing..."), - env.VerboseAction("$UPLOADCMD", "Uploading $SOURCE") + env.VerboseAction("$UPLOADCMD", "Uploading $SOURCE"), ], "Erase Flash and Upload", ) -# # Target: Erase Flash -# - env.AddPlatformTarget( "erase", None, [ env.VerboseAction(BeforeUpload, "Looking for upload port..."), - env.VerboseAction("$ERASECMD", "Erasing...") + env.VerboseAction("$ERASECMD", "Erasing..."), ], "Erase Flash", ) -# -# Register Custom Target -# +# Register Custom Target for firmware metrics env.AddCustomTarget( name="metrics", dependencies="$BUILD_DIR/${PROGNAME}.elf", actions=firmware_metrics, title="Firmware Size Metrics", - description="Analyze firmware size using esp-idf-size (supports CLI args after --)", - always_build=True + description="Analyze firmware size using esp-idf-size " + "(supports CLI args after --)", + always_build=True, ) -# # Additional Target without Build-Dependency when already compiled -# env.AddCustomTarget( name="metrics-only", dependencies=None, actions=firmware_metrics, title="Firmware Size Metrics (No Build)", description="Analyze firmware size without building first", - always_build=True + always_build=True, ) - -# # Override memory inspection behavior -# - env.SConscript("sizedata.py", exports="env") -# -# Default targets -# - +# Set default targets Default([target_buildprog, target_size]) diff --git a/examples/arduino-blink/platformio.ini b/examples/arduino-blink/platformio.ini index 5a1d46f12..14ee644d1 100644 --- a/examples/arduino-blink/platformio.ini +++ b/examples/arduino-blink/platformio.ini @@ -8,31 +8,44 @@ ; http://docs.platformio.org/page/projectconf.html [env:esp32solo1] -platform = espressif32 -framework = arduino -board = esp32-solo1 -build_flags = -DLED_BUILTIN=2 -custom_component_remove = - espressif/esp_hosted - espressif/esp_wifi_remote - espressif/esp-dsp - espressif/esp32-camera - espressif/libsodium - espressif/esp-modbus - espressif/qrcode - espressif/esp_insights - espressif/esp_diag_data_store - espressif/esp_diagnostics - espressif/esp_rainmaker - espressif/rmaker_common +platform = espressif32 +framework = arduino +board = esp32-solo1 +build_flags = -DLED_BUILTIN=2 +lib_ignore = wifi + spiffs + NetworkClientSecure + bt +custom_component_remove = espressif/esp_hosted + espressif/esp_wifi_remote + espressif/esp_modem + chmorgan/esp-libhelix-mp3 + espressif/esp-dsp + espressif/esp32-camera + espressif/libsodium + espressif/esp-modbus + espressif/qrcode + espressif/esp_insights + espressif/esp_diag_data_store + espressif/esp_diagnostics + espressif/esp_rainmaker + espressif/rmaker_common + espressif/network_provisioning + joltwallet/littlefs [env:esp32-c2-devkitm-1] -platform = espressif32 -framework = arduino -board = esp32-c2-devkitm-1 -monitor_speed = 115200 +platform = espressif32 +framework = arduino +board = esp32-c2-devkitm-1 +monitor_speed = 115200 +lib_ignore = wifi + spiffs + NetworkClientSecure + bt custom_component_remove = espressif/esp_hosted espressif/esp_wifi_remote + espressif/esp_modem + chmorgan/esp-libhelix-mp3 espressif/esp-dsp espressif/esp32-camera espressif/libsodium @@ -43,13 +56,18 @@ custom_component_remove = espressif/esp_hosted espressif/esp_diagnostics espressif/esp_rainmaker espressif/rmaker_common -custom_component_add = espressif/cmake_utilities @ 0.* + espressif/network_provisioning + joltwallet/littlefs +custom_component_add = espressif/cmake_utilities @ 0.* [env:esp32-s3-arduino_nano_esp32] platform = espressif32 framework = arduino board = arduino_nano_esp32 monitor_speed = 115200 +lib_ignore = wifi + spiffs + NetworkClientSecure custom_component_remove = espressif/esp_hosted espressif/esp_wifi_remote espressif/esp-dsp @@ -68,6 +86,9 @@ custom_component_remove = espressif/esp_hosted platform = espressif32 framework = arduino board = esp32s3_120_16_8-qio_opi +lib_ignore = + spiffs + NetworkClientSecure custom_sdkconfig = CONFIG_SPIRAM_MODE_OCT=y CONFIG_SPIRAM_SPEED_120M=y CONFIG_LCD_RGB_ISR_IRAM_SAFE=y @@ -95,6 +116,9 @@ framework = arduino build_type = debug board = esp32-c6-devkitc-1 monitor_speed = 115200 +lib_ignore = wifi + spiffs + NetworkClientSecure custom_component_remove = espressif/esp_hosted espressif/esp_wifi_remote espressif/mdns @@ -107,6 +131,9 @@ platform = espressif32 framework = arduino board = esp32-h2-devkitm-1 monitor_speed = 115200 +lib_ignore = + spiffs + NetworkClientSecure custom_component_remove = espressif/esp_hosted espressif/esp_wifi_remote espressif/mdns @@ -119,6 +146,9 @@ platform = espressif32 framework = arduino board = esp32-p4 build_flags = -DLED_BUILTIN=2 +lib_ignore = wifi + spiffs + NetworkClientSecure monitor_speed = 115200 custom_component_remove = espressif/esp_hosted espressif/esp_wifi_remote diff --git a/examples/arduino-rmt-blink/platformio.ini b/examples/arduino-rmt-blink/platformio.ini index e3c6beacd..c40a21bd1 100644 --- a/examples/arduino-rmt-blink/platformio.ini +++ b/examples/arduino-rmt-blink/platformio.ini @@ -2,6 +2,9 @@ platform = espressif32 framework = arduino board = esp32-s2-saola-1 +lib_ignore = wifi + spiffs + NetworkClientSecure build_flags = -DBUILTIN_RGBLED_PIN=18 -DNR_OF_LEDS=1 @@ -9,6 +12,9 @@ build_flags = -DBUILTIN_RGBLED_PIN=18 platform = espressif32 framework = arduino board = esp32-s3-devkitc-1 +lib_ignore = wifi + spiffs + NetworkClientSecure build_flags = -DBUILTIN_RGBLED_PIN=48 -DNR_OF_LEDS=1 @@ -16,6 +22,9 @@ build_flags = -DBUILTIN_RGBLED_PIN=48 platform = espressif32 framework = arduino board = esp32-c3-devkitm-1 +lib_ignore = wifi + spiffs + NetworkClientSecure build_flags = -DBUILTIN_RGBLED_PIN=8 -DNR_OF_LEDS=1 @@ -23,5 +32,8 @@ build_flags = -DBUILTIN_RGBLED_PIN=8 platform = espressif32 framework = arduino board = esp32-c6-devkitm-1 +lib_ignore = wifi + spiffs + NetworkClientSecure build_flags = -DBUILTIN_RGBLED_PIN=8 -DNR_OF_LEDS=1 diff --git a/examples/tasmota_platformio_override.ini b/examples/tasmota_platformio_override.ini index 8b477e297..4ba00f20b 100644 --- a/examples/tasmota_platformio_override.ini +++ b/examples/tasmota_platformio_override.ini @@ -38,7 +38,6 @@ custom_sdkconfig = https://raw.githubusercontent.com/pioarduino/sdkconfig '# CONFIG_ETH_RMII_CLK_INPUT is not set' '# CONFIG_ETH_RMII_CLK_IN_GPIO is not set' custom_component_remove = - espressif/esp-dsp espressif/network_provisioning espressif/esp-zboss-lib espressif/esp-zigbee-lib diff --git a/platform.py b/platform.py index 399115a5b..44c264306 100644 --- a/platform.py +++ b/platform.py @@ -13,21 +13,64 @@ # limitations under the License. import os -import socket import contextlib -import requests import json +import requests +import socket import subprocess import sys import shutil -from os.path import join +import logging +from typing import Optional, Dict, List, Any from platformio.public import PlatformBase, to_unix_path from platformio.proc import get_pythonexe_path from platformio.project.config import ProjectConfig from platformio.package.manager.tool import ToolPackageManager - +# Constants +RETRY_LIMIT = 3 +SUBPROCESS_TIMEOUT = 300 +MKLITTLEFS_VERSION_320 = "3.2.0" +MKLITTLEFS_VERSION_400 = "4.0.0" +DEFAULT_DEBUG_SPEED = "5000" +DEFAULT_APP_OFFSET = "0x10000" + +# MCUs that support ESP-builtin debug +ESP_BUILTIN_DEBUG_MCUS = frozenset([ + "esp32c3", "esp32c5", "esp32c6", "esp32s3", "esp32h2", "esp32p4" +]) + +# MCU configuration mapping +MCU_TOOLCHAIN_CONFIG = { + "xtensa": { + "mcus": frozenset(["esp32", "esp32s2", "esp32s3"]), + "toolchains": ["toolchain-xtensa-esp-elf"], + "debug_tools": ["tool-xtensa-esp-elf-gdb"] + }, + "riscv": { + "mcus": frozenset([ + "esp32c2", "esp32c3", "esp32c5", "esp32c6", "esp32h2", "esp32p4" + ]), + "toolchains": ["toolchain-riscv32-esp"], + "debug_tools": ["tool-riscv32-esp-elf-gdb"] + } +} + +COMMON_IDF_PACKAGES = [ + "tool-cmake", + "tool-ninja", + "tool-scons", + "tool-esp-rom-elfs" +] + +CHECK_PACKAGES = [ + "tool-cppcheck", + "tool-clangtidy", + "tool-pvs-studio" +] + +# System-specific configuration IS_WINDOWS = sys.platform.startswith("win") # Set Platformio env var to use windows_amd64 for all windows architectures # only windows_amd64 native espressif toolchains are available @@ -35,9 +78,13 @@ if IS_WINDOWS: os.environ["PLATFORMIO_SYSTEM_TYPE"] = "windows_amd64" +# Global variables python_exe = get_pythonexe_path() pm = ToolPackageManager() +# Configure logger +logger = logging.getLogger(__name__) + def is_internet_available(): """Check if connected to Internet""" try: @@ -46,227 +93,432 @@ def is_internet_available(): except OSError: return False +def safe_file_operation(operation_func): + """Decorator for safe filesystem operations with error handling.""" + def wrapper(*args, **kwargs): + try: + return operation_func(*args, **kwargs) + except (OSError, IOError, FileNotFoundError) as e: + logger.error(f"Filesystem error in {operation_func.__name__}: {e}") + return False + except Exception as e: + logger.error(f"Unexpected error in {operation_func.__name__}: {e}") + raise # Re-raise unexpected exceptions + return wrapper + + +@safe_file_operation +def safe_remove_directory(path: str) -> bool: + """Safely remove directories with error handling.""" + if os.path.exists(path) and os.path.isdir(path): + shutil.rmtree(path) + logger.debug(f"Directory removed: {path}") + return True + + +@safe_file_operation +def safe_copy_file(src: str, dst: str) -> bool: + """Safely copy files with error handling.""" + os.makedirs(os.path.dirname(dst), exist_ok=True) + shutil.copyfile(src, dst) + logger.debug(f"File copied: {src} -> {dst}") + return True + + class Espressif32Platform(PlatformBase): - def configure_default_packages(self, variables, targets): - if not variables.get("board"): - return super().configure_default_packages(variables, targets) + """ESP32 platform implementation for PlatformIO with optimized toolchain management.""" + + def __init__(self, *args, **kwargs): + """Initialize the ESP32 platform with caching mechanisms.""" + super().__init__(*args, **kwargs) + self._packages_dir = None + self._tools_cache = {} + self._mcu_config_cache = {} + + @property + def packages_dir(self) -> str: + """Get cached packages directory path.""" + if self._packages_dir is None: + config = ProjectConfig.get_instance() + self._packages_dir = config.get("platformio", "packages_dir") + return self._packages_dir + + def _get_tool_paths(self, tool_name: str) -> Dict[str, str]: + """Get centralized path calculation for tools with caching.""" + if tool_name not in self._tools_cache: + tool_path = os.path.join(self.packages_dir, tool_name) + self._tools_cache[tool_name] = { + 'tool_path': tool_path, + 'package_path': os.path.join(tool_path, "package.json"), + 'tools_json_path': os.path.join(tool_path, "tools.json"), + 'piopm_path': os.path.join(tool_path, ".piopm"), + 'idf_tools_path': os.path.join( + self.packages_dir, "tl-install", "tools", "idf_tools.py" + ) + } + return self._tools_cache[tool_name] + + def _check_tool_status(self, tool_name: str) -> Dict[str, bool]: + """Check the installation status of a tool.""" + paths = self._get_tool_paths(tool_name) + return { + 'has_idf_tools': os.path.exists(paths['idf_tools_path']), + 'has_tools_json': os.path.exists(paths['tools_json_path']), + 'has_piopm': os.path.exists(paths['piopm_path']), + 'tool_exists': os.path.exists(paths['tool_path']) + } - board_config = self.board_config(variables.get("board")) - mcu = variables.get("board_build.mcu", board_config.get("build.mcu", "esp32")) - board_sdkconfig = variables.get("board_espidf.custom_sdkconfig", board_config.get("espidf.custom_sdkconfig", "")) - frameworks = variables.get("pioframework", []) - - def install_tool(TOOL, retry_count=0): - self.packages[TOOL]["optional"] = False - TOOL_PATH = os.path.join(ProjectConfig.get_instance().get("platformio", "packages_dir"), TOOL) - TOOL_PACKAGE_PATH = os.path.join(TOOL_PATH, "package.json") - TOOLS_PATH_DEFAULT = os.path.join(os.path.expanduser("~"), ".platformio") - IDF_TOOLS = os.path.join(ProjectConfig.get_instance().get("platformio", "packages_dir"), "tl-install", "tools", "idf_tools.py") - TOOLS_JSON_PATH = os.path.join(TOOL_PATH, "tools.json") - TOOLS_PIO_PATH = os.path.join(TOOL_PATH, ".piopm") - IDF_TOOLS_CMD = ( - python_exe, - IDF_TOOLS, - "--quiet", - "--non-interactive", - "--tools-json", - TOOLS_JSON_PATH, - "install" + def _run_idf_tools_install(self, tools_json_path: str, idf_tools_path: str) -> bool: + """Execute idf_tools.py install command with timeout and error handling.""" + cmd = [ + python_exe, + idf_tools_path, + "--quiet", + "--non-interactive", + "--tools-json", + tools_json_path, + "install" + ] + + try: + result = subprocess.run( + cmd, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + timeout=SUBPROCESS_TIMEOUT, + check=False + ) + + if result.returncode != 0: + logger.error("idf_tools.py installation failed") + return False + + logger.debug("idf_tools.py executed successfully") + return True + + except subprocess.TimeoutExpired: + logger.error(f"Timeout in idf_tools.py after {SUBPROCESS_TIMEOUT}s") + return False + except (subprocess.SubprocessError, OSError) as e: + logger.error(f"Error in idf_tools.py: {e}") + return False + + def _check_tool_version(self, tool_name: str) -> bool: + """Check if the installed tool version matches the required version.""" + paths = self._get_tool_paths(tool_name) + + try: + with open(paths['package_path'], 'r', encoding='utf-8') as f: + package_data = json.load(f) + + required_version = self.packages.get(tool_name, {}).get("package-version") + installed_version = package_data.get("version") + + if not required_version: + logger.debug(f"No version check required for {tool_name}") + return True + + if not installed_version: + logger.warning(f"Installed version for {tool_name} unknown") + return False + + version_match = required_version == installed_version + if not version_match: + logger.info( + f"Version mismatch for {tool_name}: " + f"{installed_version} != {required_version}" + ) + + return version_match + + except (json.JSONDecodeError, FileNotFoundError) as e: + logger.error(f"Error reading package data for {tool_name}: {e}") + return False + + def install_tool(self, tool_name: str, retry_count: int = 0) -> bool: + """Install a tool with optimized retry mechanism.""" + if retry_count >= RETRY_LIMIT: + logger.error( + f"Installation of {tool_name} failed after {RETRY_LIMIT} attempts" ) + return False + + self.packages[tool_name]["optional"] = False + paths = self._get_tool_paths(tool_name) + status = self._check_tool_status(tool_name) + + # Case 1: New installation with idf_tools + if status['has_idf_tools'] and status['has_tools_json']: + return self._install_with_idf_tools(tool_name, paths) + + # Case 2: Tool already installed, version check + if (status['has_idf_tools'] and status['has_piopm'] and + not status['has_tools_json']): + return self._handle_existing_tool(tool_name, paths, retry_count) + + logger.debug(f"Tool {tool_name} already configured") + return True + + def _install_with_idf_tools(self, tool_name: str, paths: Dict[str, str]) -> bool: + """Install tool using idf_tools.py installation method.""" + if not self._run_idf_tools_install( + paths['tools_json_path'], paths['idf_tools_path'] + ): + return False + + # Copy tool files + tools_path_default = os.path.join( + os.path.expanduser("~"), ".platformio" + ) + target_package_path = os.path.join( + tools_path_default, "tools", tool_name, "package.json" + ) + + if not safe_copy_file(paths['package_path'], target_package_path): + return False + + safe_remove_directory(paths['tool_path']) + + tl_path = f"file://{os.path.join(tools_path_default, 'tools', tool_name)}" + pm.install(tl_path) + + logger.info(f"Tool {tool_name} successfully installed") + return True - tl_flag = bool(os.path.exists(IDF_TOOLS)) - json_flag = bool(os.path.exists(TOOLS_JSON_PATH)) - pio_flag = bool(os.path.exists(TOOLS_PIO_PATH)) - if tl_flag and json_flag: - with open(os.devnull, 'w') as devnull, \ - contextlib.redirect_stdout(devnull), \ - contextlib.redirect_stderr(devnull): - rc = subprocess.run( - IDF_TOOLS_CMD, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL - ).returncode - if rc != 0: - sys.stderr.write("Error: Couldn't execute 'idf_tools.py install'\n") - else: - tl_path = "file://" + join(TOOLS_PATH_DEFAULT, "tools", TOOL) - try: - shutil.copyfile(TOOL_PACKAGE_PATH, join(TOOLS_PATH_DEFAULT, "tools", TOOL, "package.json")) - except FileNotFoundError as e: - sys.stderr.write(f"Error copying tool package file: {e}\n") - if os.path.exists(TOOL_PATH) and os.path.isdir(TOOL_PATH): - try: - shutil.rmtree(TOOL_PATH) - except Exception as e: - print(f"Error while removing the tool folder: {e}") - pm.install(tl_path) - # tool is already installed, just activate it - if tl_flag and pio_flag and not json_flag: - with open(TOOL_PACKAGE_PATH, "r") as file: - package_data = json.load(file) - # check installed tool version against listed in platforms.json - if "package-version" in self.packages[TOOL] \ - and "version" in package_data \ - and self.packages[TOOL]["package-version"] == package_data["version"]: - self.packages[TOOL]["version"] = TOOL_PATH - self.packages[TOOL]["optional"] = False - elif "package-version" not in self.packages[TOOL]: - # No version check needed, just use the installed tool - self.packages[TOOL]["version"] = TOOL_PATH - self.packages[TOOL]["optional"] = False - elif "version" not in package_data: - print(f"Warning: Cannot determine installed version for {TOOL}. Reinstalling...") - else: # Installed version does not match required version, deinstall existing and install needed - if os.path.exists(TOOL_PATH) and os.path.isdir(TOOL_PATH): - try: - shutil.rmtree(TOOL_PATH) - except Exception as e: - print(f"Error while removing the tool folder: {e}") - if retry_count >= 3: # Limit to 3 retries - print(f"Failed to install {TOOL} after multiple attempts. Please check your network connection and try again manually.") - return - print(f"Wrong version for {TOOL}. Installing needed version...") - install_tool(TOOL, retry_count + 1) + def _handle_existing_tool( + self, tool_name: str, paths: Dict[str, str], retry_count: int + ) -> bool: + """Handle already installed tools with version checking.""" + if self._check_tool_version(tool_name): + # Version matches, use tool + self.packages[tool_name]["version"] = paths['tool_path'] + self.packages[tool_name]["optional"] = False + logger.debug(f"Tool {tool_name} found with correct version") + return True + + # Wrong version, reinstall + logger.info(f"Reinstalling {tool_name} due to version mismatch") + safe_remove_directory(paths['tool_path']) + return self.install_tool(tool_name, retry_count + 1) + def _configure_arduino_framework(self, frameworks: List[str]) -> None: + """Configure Arduino framework with dynamic library URL fetching.""" + if "arduino" not in frameworks: return - # Installer only needed for setup, deactivate when installed - if bool(os.path.exists(os.path.join(ProjectConfig.get_instance().get("platformio", "packages_dir"), "tl-install", "tools", "idf_tools.py"))): - self.packages["tl-install"]["optional"] = True + self.packages["framework-arduinoespressif32"]["optional"] = False + self.packages["framework-arduinoespressif32-libs"]["optional"] = False + + if is_internet_available(): + # Use branch master + url = ("https://raw.githubusercontent.com/espressif/arduino-esp32/" + "master/package/package_esp32_index.template.json") + try: + response = requests.get(url, timeout=30) + response.raise_for_status() + packjdata = response.json() + dyn_lib_url = packjdata['packages'][0]['tools'][0]['systems'][0]['url'] + self.packages["framework-arduinoespressif32-libs"]["version"] = dyn_lib_url + except (requests.RequestException, KeyError, IndexError) as e: + logger.error(f"Failed to fetch Arduino framework library URL: {e}") + + def _configure_espidf_framework( + self, frameworks: List[str], variables: Dict, board_config: Dict, mcu: str + ) -> None: + """Configure ESP-IDF framework based on custom sdkconfig settings.""" + custom_sdkconfig = variables.get("custom_sdkconfig") + board_sdkconfig = variables.get( + "board_espidf.custom_sdkconfig", + board_config.get("espidf.custom_sdkconfig", "") + ) - if "arduino" in frameworks: - self.packages["framework-arduinoespressif32"]["optional"] = False - self.packages["framework-arduinoespressif32-libs"]["optional"] = False - if is_internet_available(): - try: - # use branch master - URL = ( - "https://raw.githubusercontent.com/espressif/arduino-esp32/master/" - "package/package_esp32_index.template.json" - ) - response = requests.get(URL, timeout=10) - response.raise_for_status() - packjdata = response.json() - dyn_lib_url = packjdata['packages'][0]['tools'][0]['systems'][0]['url'] - self.packages["framework-arduinoespressif32-libs"]["version"] = dyn_lib_url - except (requests.RequestException, ValueError, KeyError, IndexError) as e: - print(f"Error loading latest Arduino ESP32 libs: {e}") - else: - print("No Internet connection - using local/standard configuration") - - if variables.get("custom_sdkconfig") is not None or len(str(board_sdkconfig)) > 3: + if custom_sdkconfig is not None or len(str(board_sdkconfig)) > 3: frameworks.append("espidf") self.packages["framework-espidf"]["optional"] = False if mcu == "esp32c2": self.packages["framework-arduino-c2-skeleton-lib"]["optional"] = False - MCU_TOOLCHAIN_MAPPING = { - # Xtensa based and FSM toolchain - ("esp32", "esp32s2", "esp32s3"): { - "toolchains": ["toolchain-xtensa-esp-elf"], - "ulp_toolchain": ["toolchain-esp32ulp"] + (["toolchain-riscv32-esp"] if mcu != "esp32" else []), - "debug_tools": ["tool-xtensa-esp-elf-gdb"] - }, - # RISC-V based toolchain - ("esp32c2", "esp32c3", "esp32c5", "esp32c6", "esp32h2", "esp32p4"): { - "toolchains": ["toolchain-riscv32-esp"], - "ulp_toolchain": None, - "debug_tools": ["tool-riscv32-esp-elf-gdb"] - } + def _get_mcu_config(self, mcu: str) -> Optional[Dict]: + """Get MCU configuration with optimized caching and search.""" + if mcu in self._mcu_config_cache: + return self._mcu_config_cache[mcu] + + for _, config in MCU_TOOLCHAIN_CONFIG.items(): + if mcu in config["mcus"]: + # Dynamically add ULP toolchain + result = config.copy() + result["ulp_toolchain"] = ["toolchain-esp32ulp"] + if mcu != "esp32": + result["ulp_toolchain"].append("toolchain-riscv32-esp") + self._mcu_config_cache[mcu] = result + return result + return None + + def _needs_debug_tools(self, variables: Dict, targets: List[str]) -> bool: + """Check if debug tools are needed based on build configuration.""" + return bool( + variables.get("build_type") or + "debug" in targets or + variables.get("upload_protocol") + ) + + def _configure_mcu_toolchains( + self, mcu: str, variables: Dict, targets: List[str] + ) -> None: + """Configure MCU-specific toolchains with optimized installation.""" + mcu_config = self._get_mcu_config(mcu) + if not mcu_config: + logger.warning(f"Unknown MCU: {mcu}") + return + + # Install base toolchains + for toolchain in mcu_config["toolchains"]: + self.install_tool(toolchain) + + # ULP toolchain if ULP directory exists + if mcu_config.get("ulp_toolchain") and os.path.isdir("ulp"): + for toolchain in mcu_config["ulp_toolchain"]: + self.install_tool(toolchain) + + # Debug tools when needed + if self._needs_debug_tools(variables, targets): + for debug_tool in mcu_config["debug_tools"]: + self.install_tool(debug_tool) + self.install_tool("tool-openocd-esp32") + + def _configure_installer(self) -> None: + """Configure the ESP-IDF tools installer.""" + installer_path = os.path.join( + self.packages_dir, "tl-install", "tools", "idf_tools.py" + ) + if os.path.exists(installer_path): + self.packages["tl-install"]["optional"] = True + + def _install_common_idf_packages(self) -> None: + """Install common ESP-IDF packages required for all builds.""" + for package in COMMON_IDF_PACKAGES: + self.install_tool(package) + + def _configure_check_tools(self, variables: Dict) -> None: + """Configure static analysis and check tools based on configuration.""" + check_tools = variables.get("check_tool", []) + if not check_tools: + return + + for package in CHECK_PACKAGES: + if any(tool in package for tool in check_tools): + self.install_tool(package) + + def _ensure_mklittlefs_version(self) -> None: + """Ensure correct mklittlefs version is installed.""" + piopm_path = os.path.join(self.packages_dir, "tool-mklittlefs", ".piopm") + + if os.path.exists(piopm_path): + try: + with open(piopm_path, 'r', encoding='utf-8') as f: + package_data = json.load(f) + if package_data.get('version') != MKLITTLEFS_VERSION_320: + os.remove(piopm_path) + logger.info("Outdated mklittlefs version removed") + except (json.JSONDecodeError, KeyError) as e: + logger.error(f"Error reading mklittlefs package data: {e}") + + def _setup_mklittlefs_for_download(self) -> None: + """Setup mklittlefs for download functionality with version 4.0.0.""" + mklittlefs_dir = os.path.join(self.packages_dir, "tool-mklittlefs") + mklittlefs400_dir = os.path.join( + self.packages_dir, "tool-mklittlefs-4.0.0" + ) + + # Ensure mklittlefs 3.2.0 is installed + if not os.path.exists(mklittlefs_dir): + self.install_tool("tool-mklittlefs") + if os.path.exists(os.path.join(mklittlefs_dir, "tools.json")): + self.install_tool("tool-mklittlefs") + + # Install mklittlefs 4.0.0 + if not os.path.exists(mklittlefs400_dir): + self.install_tool("tool-mklittlefs-4.0.0") + if os.path.exists(os.path.join(mklittlefs400_dir, "tools.json")): + self.install_tool("tool-mklittlefs-4.0.0") + + # Copy mklittlefs 4.0.0 over 3.2.0 + if os.path.exists(mklittlefs400_dir): + package_src = os.path.join(mklittlefs_dir, "package.json") + package_dst = os.path.join(mklittlefs400_dir, "package.json") + safe_copy_file(package_src, package_dst) + shutil.copytree(mklittlefs400_dir, mklittlefs_dir, dirs_exist_ok=True) + self.packages.pop("tool-mkfatfs", None) + + def _handle_littlefs_tool(self, for_download: bool) -> None: + """Handle LittleFS tool installation with special download configuration.""" + if for_download: + self._setup_mklittlefs_for_download() + else: + self._ensure_mklittlefs_version() + self.install_tool("tool-mklittlefs") + + def _install_filesystem_tool(self, filesystem: str, for_download: bool = False) -> None: + """Install filesystem-specific tools based on the filesystem type.""" + tool_mapping = { + "default": lambda: self._handle_littlefs_tool(for_download), + "fatfs": lambda: self.install_tool("tool-mkfatfs"), + "spiffs": lambda: self.install_tool("tool-mkspiffs") } - # Iterate through MCU mappings - for supported_mcus, toolchain_data in MCU_TOOLCHAIN_MAPPING.items(): - if mcu in supported_mcus: - # Set mandatory toolchains - for toolchain in toolchain_data["toolchains"]: - install_tool(toolchain) - # Set ULP toolchain if applicable - ulp_toolchain = toolchain_data.get("ulp_toolchain") - if ulp_toolchain and os.path.isdir("ulp"): - for toolchain in ulp_toolchain: - install_tool(toolchain) - # Install debug tools if conditions match - if (variables.get("build_type") or "debug" in "".join(targets)) or variables.get("upload_protocol"): - for debug_tool in toolchain_data["debug_tools"]: - install_tool(debug_tool) - install_tool("tool-openocd-esp32") - break # Exit loop once MCU is matched - - # Common packages for IDF and mixed Arduino+IDF projects - COMMON_IDF_PACKAGES = [ - "tool-cmake", - "tool-ninja", - "tool-scons", - "tool-esp-rom-elfs" - ] - if "espidf" in frameworks: - for package in COMMON_IDF_PACKAGES: - install_tool(package) - - CHECK_PACKAGES = [ - "tool-cppcheck", - "tool-clangtidy", - "tool-pvs-studio" - ] - # Install check tool listed in pio entry "check_tool" - if variables.get("check_tool") is not None: - for package in CHECK_PACKAGES: - for check_tool in variables.get("check_tool", ""): - if check_tool in package: - install_tool(package) - - if "buildfs" or "uploadfs" in targets: - filesystem = variables.get("board_build.filesystem", "littlefs") - if filesystem == "littlefs": - # ensure use of mklittlefs 3.2.0 - piopm_path = os.path.join(ProjectConfig.get_instance().get("platformio", "packages_dir"), "tool-mklittlefs", ".piopm") - if os.path.exists(piopm_path): - with open(piopm_path, "r") as file: - package_data = json.load(file) - if package_data['version'] != "3.2.0": - os.remove(piopm_path) - install_tool("tool-mklittlefs") - elif filesystem == "fatfs": - install_tool("tool-mkfatfs") - else: - install_tool("tool-mkspiffs") - if "downloadfs" in targets: - filesystem = variables.get("board_build.filesystem", "littlefs") - if filesystem == "littlefs": - # Use Tasmota mklittlefs v4.0.0 to unpack, older version is incompatible - # make sure mklittlefs 3.2.0 is installed - mklittlefs_dir = os.path.join(ProjectConfig.get_instance().get("platformio", "packages_dir"), "tool-mklittlefs") - if not os.path.exists(mklittlefs_dir): - install_tool("tool-mklittlefs") - if os.path.exists(os.path.join(mklittlefs_dir, "tools.json")): - install_tool("tool-mklittlefs") - mklittlefs400_dir = os.path.join(ProjectConfig.get_instance().get("platformio", "packages_dir"), "tool-mklittlefs-4.0.0") - if not os.path.exists(mklittlefs400_dir): - # install mklittlefs 4.0.0 - install_tool("tool-mklittlefs-4.0.0") - if os.path.exists(os.path.join(mklittlefs400_dir, "tools.json")): - install_tool("tool-mklittlefs-4.0.0") - # use mklittlefs 4.0.0 instead of 3.2.0 by copying over - if os.path.exists(mklittlefs400_dir): - shutil.copyfile( - os.path.join(mklittlefs_dir, "package.json"), - os.path.join(mklittlefs400_dir, "package.json"), - ) - shutil.copytree(mklittlefs400_dir, mklittlefs_dir, dirs_exist_ok=True) - del self.packages["tool-mkfatfs"] - elif filesystem == "fatfs": - install_tool("tool-mkfatfs") + handler = tool_mapping.get(filesystem, tool_mapping["default"]) + handler() + def _handle_dfuutil_tool(self, variables: Dict, for_download: bool = False) -> None: + """Install dfuutil tool for Arduino Nano ESP32 board.""" # Currently only Arduino Nano ESP32 uses the dfuutil tool as uploader if variables.get("board") == "arduino_nano_esp32": - install_tool("tool-dfuutil-arduino") - else: - del self.packages["tool-dfuutil-arduino"] + self.install_tool("tool-dfuutil-arduino") + + def _configure_filesystem_tools(self, variables: Dict, targets: List[str]) -> None: + """Configure filesystem tools based on build targets and filesystem type.""" + filesystem = variables.get("board_build.filesystem", "littlefs") + + if any(target in targets for target in ["buildfs", "uploadfs"]): + self._install_filesystem_tool(filesystem, for_download=False) + + if "downloadfs" in targets: + self._install_filesystem_tool(filesystem, for_download=True) + + def configure_default_packages(self, variables: Dict, targets: List[str]) -> Any: + """Main configuration method with optimized package management.""" + if not variables.get("board"): + return super().configure_default_packages(variables, targets) + + # Base configuration + board_config = self.board_config(variables.get("board")) + mcu = variables.get("board_build.mcu", board_config.get("build.mcu", "esp32")) + frameworks = list(variables.get("pioframework", [])) # Create copy + + try: + # Configuration steps + self._configure_installer() + self._configure_arduino_framework(frameworks) + self._configure_espidf_framework(frameworks, variables, board_config, mcu) + self._configure_mcu_toolchains(mcu, variables, targets) + + if "espidf" in frameworks: + self._install_common_idf_packages() + + self._configure_check_tools(variables) + self._configure_filesystem_tools(variables, targets) + self._handle_dfuutil_tool(variables) + + logger.info("Package configuration completed successfully") + + except Exception as e: + logger.error(f"Error in package configuration: {type(e).__name__}: {e}") + # Don't re-raise to maintain compatibility return super().configure_default_packages(variables, targets) def get_boards(self, id_=None): + """Get board configuration with dynamic options.""" result = super().get_boards(id_) if not result: return result @@ -278,13 +530,14 @@ def get_boards(self, id_=None): return result def _add_dynamic_options(self, board): - # upload protocols + """Add dynamic board options for upload protocols and debug tools.""" + # Upload protocols if not board.get("upload.protocols", []): board.manifest["upload"]["protocols"] = ["esptool", "espota"] if not board.get("upload.protocol", ""): board.manifest["upload"]["protocol"] = "esptool" - # debug tools + # Debug tools debug = board.manifest.get("debug", {}) non_debug_protocols = ["esptool", "espota"] supported_debug_tools = [ @@ -298,17 +551,21 @@ def _add_dynamic_options(self, board): "olimex-arm-usb-ocd-h", "olimex-arm-usb-ocd", "olimex-jtag-tiny", - "tumpa", + "tumpa" ] - # A special case for the Kaluga board that has a separate interface config + # Special configuration for Kaluga board if board.id == "esp32-s2-kaluga-1": supported_debug_tools.append("ftdi") - if board.get("build.mcu", "") in ("esp32c3", "esp32c5", "esp32c6", "esp32s3", "esp32h2", "esp32p4"): + + # ESP-builtin for certain MCUs + mcu = board.get("build.mcu", "") + if mcu in ESP_BUILTIN_DEBUG_MCUS: supported_debug_tools.append("esp-builtin") upload_protocol = board.manifest.get("upload", {}).get("protocol") upload_protocols = board.manifest.get("upload", {}).get("protocols", []) + if debug: upload_protocols.extend(supported_debug_tools) if upload_protocol and upload_protocol not in upload_protocols: @@ -318,37 +575,13 @@ def _add_dynamic_options(self, board): if "tools" not in debug: debug["tools"] = {} + # Debug tool configuration for link in upload_protocols: if link in non_debug_protocols or link in debug["tools"]: continue - if link in ("jlink", "cmsis-dap"): - openocd_interface = link - elif link in ("esp-prog", "ftdi"): - if board.id == "esp32-s2-kaluga-1": - openocd_interface = "ftdi/esp32s2_kaluga_v1" - else: - openocd_interface = "ftdi/esp32_devkitj_v1" - elif link == "esp-bridge": - openocd_interface = "esp_usb_bridge" - elif link == "esp-builtin": - openocd_interface = "esp_usb_jtag" - else: - openocd_interface = "ftdi/" + link - - server_args = [ - "-s", - "$PACKAGE_DIR/share/openocd/scripts", - "-f", - "interface/%s.cfg" % openocd_interface, - "-f", - "%s/%s" - % ( - ("target", debug.get("openocd_target")) - if "openocd_target" in debug - else ("board", debug.get("openocd_board")) - ), - ] + openocd_interface = self._get_openocd_interface(link, board) + server_args = self._get_debug_server_args(openocd_interface, debug) debug["tools"][link] = { "server": { @@ -380,14 +613,43 @@ def _add_dynamic_options(self, board): board.manifest["debug"] = debug return board + def _get_openocd_interface(self, link: str, board) -> str: + """Determine OpenOCD interface configuration for debug link.""" + if link in ("jlink", "cmsis-dap"): + return link + if link in ("esp-prog", "ftdi"): + if board.id == "esp32-s2-kaluga-1": + return "ftdi/esp32s2_kaluga_v1" + return "ftdi/esp32_devkitj_v1" + if link == "esp-bridge": + return "esp_usb_bridge" + if link == "esp-builtin": + return "esp_usb_jtag" + return f"ftdi/{link}" + + def _get_debug_server_args(self, openocd_interface: str, debug: Dict) -> List[str]: + """Generate debug server arguments for OpenOCD configuration.""" + if 'openocd_target' in debug: + config_type = 'target' + config_name = debug.get('openocd_target') + else: + config_type = 'board' + config_name = debug.get('openocd_board') + return [ + "-s", "$PACKAGE_DIR/share/openocd/scripts", + "-f", f"interface/{openocd_interface}.cfg", + "-f", f"{config_type}/{config_name}.cfg" + ] + def configure_debug_session(self, debug_config): + """Configure debug session with flash image loading.""" build_extra_data = debug_config.build_data.get("extra", {}) flash_images = build_extra_data.get("flash_images", []) if "openocd" in (debug_config.server or {}).get("executable", ""): - debug_config.server["arguments"].extend( - ["-c", "adapter speed %s" % (debug_config.speed or "5000")] - ) + debug_config.server["arguments"].extend([ + "-c", f"adapter speed {debug_config.speed or DEFAULT_DEBUG_SPEED}" + ]) ignore_conds = [ debug_config.load_cmds != ["load"], @@ -399,16 +661,13 @@ def configure_debug_session(self, debug_config): return load_cmds = [ - 'monitor program_esp "{{{path}}}" {offset} verify'.format( - path=to_unix_path(item["path"]), offset=item["offset"] - ) + f'monitor program_esp "{to_unix_path(item["path"])}" ' + f'{item["offset"]} verify' for item in flash_images ] load_cmds.append( - 'monitor program_esp "{%s.bin}" %s verify' - % ( - to_unix_path(debug_config.build_data["prog_path"][:-4]), - build_extra_data.get("application_offset", "0x10000"), - ) + f'monitor program_esp ' + f'"{to_unix_path(debug_config.build_data["prog_path"][:-4])}.bin" ' + f'{build_extra_data.get("application_offset", DEFAULT_APP_OFFSET)} verify' ) debug_config.load_cmds = load_cmds