mirror of
https://github.com/esphome/esphome.git
synced 2026-01-09 11:40:50 -07:00
Add build info to image (#12425)
Co-authored-by: J. Nick Koston <nick+github@koston.org> Co-authored-by: J. Nick Koston <nick@home-assistant.io> Co-authored-by: J. Nick Koston <nick@koston.org> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -518,10 +518,49 @@ def compile_program(args: ArgsProtocol, config: ConfigType) -> int:
|
||||
rc = platformio_api.run_compile(config, CORE.verbose)
|
||||
if rc != 0:
|
||||
return rc
|
||||
|
||||
# Check if firmware was rebuilt and emit build_info + create manifest
|
||||
_check_and_emit_build_info()
|
||||
|
||||
idedata = platformio_api.get_idedata(config)
|
||||
return 0 if idedata is not None else 1
|
||||
|
||||
|
||||
def _check_and_emit_build_info() -> None:
|
||||
"""Check if firmware was rebuilt and emit build_info."""
|
||||
import json
|
||||
|
||||
firmware_path = CORE.firmware_bin
|
||||
build_info_json_path = CORE.relative_build_path("build_info.json")
|
||||
|
||||
# Check if both files exist
|
||||
if not firmware_path.exists() or not build_info_json_path.exists():
|
||||
return
|
||||
|
||||
# Check if firmware is newer than build_info (indicating a relink occurred)
|
||||
if firmware_path.stat().st_mtime <= build_info_json_path.stat().st_mtime:
|
||||
return
|
||||
|
||||
# Read build_info from JSON
|
||||
try:
|
||||
with open(build_info_json_path, encoding="utf-8") as f:
|
||||
build_info = json.load(f)
|
||||
except (OSError, json.JSONDecodeError) as e:
|
||||
_LOGGER.debug("Failed to read build_info: %s", e)
|
||||
return
|
||||
|
||||
config_hash = build_info.get("config_hash")
|
||||
build_time_str = build_info.get("build_time_str")
|
||||
|
||||
if config_hash is None or build_time_str is None:
|
||||
return
|
||||
|
||||
# Emit build_info with human-readable time
|
||||
_LOGGER.info(
|
||||
"Build Info: config_hash=0x%08x build_time_str=%s", config_hash, build_time_str
|
||||
)
|
||||
|
||||
|
||||
def upload_using_esptool(
|
||||
config: ConfigType, port: str, file: str, speed: int
|
||||
) -> str | int:
|
||||
|
||||
@@ -1472,7 +1472,10 @@ bool APIConnection::send_device_info_response(const DeviceInfoRequest &msg) {
|
||||
|
||||
resp.set_esphome_version(ESPHOME_VERSION_REF);
|
||||
|
||||
resp.set_compilation_time(App.get_compilation_time_ref());
|
||||
// Stack buffer for build time string
|
||||
char build_time_str[Application::BUILD_TIME_STR_SIZE];
|
||||
App.get_build_time_string(build_time_str);
|
||||
resp.set_compilation_time(StringRef(build_time_str));
|
||||
|
||||
// Manufacturer string - define once, handle ESP8266 PROGMEM separately
|
||||
#if defined(USE_ESP8266) || defined(USE_ESP32)
|
||||
|
||||
@@ -154,7 +154,15 @@ bool MQTTComponent::send_discovery_() {
|
||||
device_info[MQTT_DEVICE_MANUFACTURER] =
|
||||
model == nullptr ? ESPHOME_PROJECT_NAME : std::string(ESPHOME_PROJECT_NAME, model - ESPHOME_PROJECT_NAME);
|
||||
#else
|
||||
device_info[MQTT_DEVICE_SW_VERSION] = ESPHOME_VERSION " (" + App.get_compilation_time_ref() + ")";
|
||||
static const char ver_fmt[] PROGMEM = ESPHOME_VERSION " (config hash 0x%08" PRIx32 ")";
|
||||
#ifdef USE_ESP8266
|
||||
char fmt_buf[sizeof(ver_fmt)];
|
||||
strcpy_P(fmt_buf, ver_fmt);
|
||||
const char *fmt = fmt_buf;
|
||||
#else
|
||||
const char *fmt = ver_fmt;
|
||||
#endif
|
||||
device_info[MQTT_DEVICE_SW_VERSION] = str_sprintf(fmt, App.get_config_hash());
|
||||
device_info[MQTT_DEVICE_MODEL] = ESPHOME_BOARD;
|
||||
#if defined(USE_ESP8266) || defined(USE_ESP32)
|
||||
device_info[MQTT_DEVICE_MANUFACTURER] = "Espressif";
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "sen5x.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/log.h"
|
||||
@@ -154,10 +155,10 @@ void SEN5XComponent::setup() {
|
||||
if (this->voc_sensor_ && this->store_baseline_) {
|
||||
uint32_t combined_serial =
|
||||
encode_uint24(this->serial_number_[0], this->serial_number_[1], this->serial_number_[2]);
|
||||
// Hash with compilation time and serial number
|
||||
// Hash with config hash, version, and serial number
|
||||
// This ensures the baseline storage is cleared after OTA
|
||||
// Serial numbers are unique to each sensor, so mulitple sensors can be used without conflict
|
||||
uint32_t hash = fnv1_hash(App.get_compilation_time_ref() + std::to_string(combined_serial));
|
||||
// Serial numbers are unique to each sensor, so multiple sensors can be used without conflict
|
||||
uint32_t hash = fnv1a_hash_extend(App.get_config_version_hash(), std::to_string(combined_serial));
|
||||
this->pref_ = global_preferences->make_preference<Sen5xBaselines>(hash, true);
|
||||
|
||||
if (this->pref_.load(&this->voc_baselines_storage_)) {
|
||||
|
||||
@@ -72,10 +72,10 @@ void SGP30Component::setup() {
|
||||
return;
|
||||
}
|
||||
|
||||
// Hash with compilation time and serial number
|
||||
// Hash with config hash, version, and serial number
|
||||
// This ensures the baseline storage is cleared after OTA
|
||||
// Serial numbers are unique to each sensor, so mulitple sensors can be used without conflict
|
||||
uint32_t hash = fnv1_hash(App.get_compilation_time_ref() + std::to_string(this->serial_number_));
|
||||
// Serial numbers are unique to each sensor, so multiple sensors can be used without conflict
|
||||
uint32_t hash = fnv1a_hash_extend(App.get_config_version_hash(), std::to_string(this->serial_number_));
|
||||
this->pref_ = global_preferences->make_preference<SGP30Baselines>(hash, true);
|
||||
|
||||
if (this->store_baseline_ && this->pref_.load(&this->baselines_storage_)) {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#include "sgp4x.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include <cinttypes>
|
||||
@@ -56,10 +57,10 @@ void SGP4xComponent::setup() {
|
||||
ESP_LOGD(TAG, "Version 0x%0X", featureset);
|
||||
|
||||
if (this->store_baseline_) {
|
||||
// Hash with compilation time and serial number
|
||||
// Hash with config hash, version, and serial number
|
||||
// This ensures the baseline storage is cleared after OTA
|
||||
// Serial numbers are unique to each sensor, so mulitple sensors can be used without conflict
|
||||
uint32_t hash = fnv1_hash(App.get_compilation_time_ref() + std::to_string(this->serial_number_));
|
||||
// Serial numbers are unique to each sensor, so multiple sensors can be used without conflict
|
||||
uint32_t hash = fnv1a_hash_extend(App.get_config_version_hash(), std::to_string(this->serial_number_));
|
||||
this->pref_ = global_preferences->make_preference<SGP4xBaselines>(hash, true);
|
||||
|
||||
if (this->pref_.load(&this->voc_baselines_storage_)) {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
#include "version_text_sensor.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/version.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/progmem.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace version {
|
||||
@@ -10,11 +11,26 @@ namespace version {
|
||||
static const char *const TAG = "version.text_sensor";
|
||||
|
||||
void VersionTextSensor::setup() {
|
||||
if (this->hide_timestamp_) {
|
||||
this->publish_state(ESPHOME_VERSION);
|
||||
} else {
|
||||
this->publish_state(str_sprintf(ESPHOME_VERSION " %s", App.get_compilation_time_ref().c_str()));
|
||||
static const char PREFIX[] PROGMEM = ESPHOME_VERSION " (config hash 0x";
|
||||
static const char BUILT_STR[] PROGMEM = ", built ";
|
||||
// Buffer size: PREFIX + 8 hex chars + BUILT_STR + BUILD_TIME_STR_SIZE + ")" + null
|
||||
constexpr size_t buf_size = sizeof(PREFIX) + 8 + sizeof(BUILT_STR) + esphome::Application::BUILD_TIME_STR_SIZE + 2;
|
||||
char version_str[buf_size];
|
||||
|
||||
ESPHOME_strncpy_P(version_str, PREFIX, sizeof(version_str));
|
||||
|
||||
size_t len = strlen(version_str);
|
||||
snprintf(version_str + len, sizeof(version_str) - len, "%08" PRIx32, App.get_config_hash());
|
||||
|
||||
if (!this->hide_timestamp_) {
|
||||
size_t len = strlen(version_str);
|
||||
ESPHOME_strncat_P(version_str, BUILT_STR, sizeof(version_str) - len - 1);
|
||||
ESPHOME_strncat_P(version_str, ESPHOME_BUILD_TIME_STR, sizeof(version_str) - strlen(version_str) - 1);
|
||||
}
|
||||
|
||||
strncat(version_str, ")", sizeof(version_str) - strlen(version_str) - 1);
|
||||
version_str[sizeof(version_str) - 1] = '\0';
|
||||
this->publish_state(version_str);
|
||||
}
|
||||
float VersionTextSensor::get_setup_priority() const { return setup_priority::DATA; }
|
||||
void VersionTextSensor::set_hide_timestamp(bool hide_timestamp) { this->hide_timestamp_ = hide_timestamp; }
|
||||
|
||||
@@ -375,7 +375,7 @@ void WiFiComponent::start() {
|
||||
get_mac_address_pretty_into_buffer(mac_s));
|
||||
this->last_connected_ = millis();
|
||||
|
||||
uint32_t hash = this->has_sta() ? fnv1_hash(App.get_compilation_time_ref().c_str()) : 88491487UL;
|
||||
uint32_t hash = this->has_sta() ? App.get_config_version_hash() : 88491487UL;
|
||||
|
||||
this->pref_ = global_preferences->make_preference<wifi::SavedWifiSettings>(hash, true);
|
||||
#ifdef USE_WIFI_FAST_CONNECT
|
||||
|
||||
@@ -608,6 +608,8 @@ class EsphomeCore:
|
||||
self.current_component: str | None = None
|
||||
# Address cache for DNS and mDNS lookups from command line arguments
|
||||
self.address_cache: AddressCache | None = None
|
||||
# Cached config hash (computed lazily)
|
||||
self._config_hash: int | None = None
|
||||
|
||||
def reset(self):
|
||||
from esphome.pins import PIN_SCHEMA_REGISTRY
|
||||
@@ -636,6 +638,7 @@ class EsphomeCore:
|
||||
self.unique_ids = {}
|
||||
self.current_component = None
|
||||
self.address_cache = None
|
||||
self._config_hash = None
|
||||
PIN_SCHEMA_REGISTRY.reset()
|
||||
|
||||
@contextmanager
|
||||
@@ -685,6 +688,21 @@ class EsphomeCore:
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def config_hash(self) -> int:
|
||||
"""Get the FNV-1a 32-bit hash of the config.
|
||||
|
||||
The hash is computed lazily and cached for performance.
|
||||
Uses sort_keys=True to ensure deterministic ordering.
|
||||
"""
|
||||
if self._config_hash is None:
|
||||
from esphome import yaml_util
|
||||
from esphome.helpers import fnv1a_32bit_hash
|
||||
|
||||
config_str = yaml_util.dump(self.config, show_secrets=True, sort_keys=True)
|
||||
self._config_hash = fnv1a_32bit_hash(config_str)
|
||||
return self._config_hash
|
||||
|
||||
@property
|
||||
def config_dir(self) -> Path:
|
||||
if self.config_path.is_dir():
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
#include "esphome/core/application.h"
|
||||
#include "esphome/core/build_info_data.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/progmem.h"
|
||||
#include <cstring>
|
||||
|
||||
#ifdef USE_ESP8266
|
||||
#include <pgmspace.h>
|
||||
#endif
|
||||
#include "esphome/core/version.h"
|
||||
#include "esphome/core/hal.h"
|
||||
#include <algorithm>
|
||||
@@ -191,7 +198,9 @@ void Application::loop() {
|
||||
|
||||
if (this->dump_config_at_ < this->components_.size()) {
|
||||
if (this->dump_config_at_ == 0) {
|
||||
ESP_LOGI(TAG, "ESPHome version " ESPHOME_VERSION " compiled on %s", this->compilation_time_);
|
||||
char build_time_str[Application::BUILD_TIME_STR_SIZE];
|
||||
this->get_build_time_string(build_time_str);
|
||||
ESP_LOGI(TAG, "ESPHome version " ESPHOME_VERSION " compiled on %s", build_time_str);
|
||||
#ifdef ESPHOME_PROJECT_NAME
|
||||
ESP_LOGI(TAG, "Project " ESPHOME_PROJECT_NAME " version " ESPHOME_PROJECT_VERSION);
|
||||
#endif
|
||||
@@ -711,4 +720,9 @@ void Application::wake_loop_threadsafe() {
|
||||
}
|
||||
#endif // defined(USE_SOCKET_SELECT_SUPPORT) && defined(USE_WAKE_LOOP_THREADSAFE)
|
||||
|
||||
void Application::get_build_time_string(std::span<char, BUILD_TIME_STR_SIZE> buffer) {
|
||||
ESPHOME_strncpy_P(buffer.data(), ESPHOME_BUILD_TIME_STR, buffer.size());
|
||||
buffer[buffer.size() - 1] = '\0';
|
||||
}
|
||||
|
||||
} // namespace esphome
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
#include <ctime>
|
||||
#include <limits>
|
||||
#include <span>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include "esphome/core/build_info_data.h"
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/defines.h"
|
||||
#include "esphome/core/hal.h"
|
||||
@@ -11,6 +14,7 @@
|
||||
#include "esphome/core/preferences.h"
|
||||
#include "esphome/core/scheduler.h"
|
||||
#include "esphome/core/string_ref.h"
|
||||
#include "esphome/core/version.h"
|
||||
|
||||
#ifdef USE_DEVICES
|
||||
#include "esphome/core/device.h"
|
||||
@@ -101,7 +105,7 @@ static const uint32_t TEARDOWN_TIMEOUT_REBOOT_MS = 1000; // 1 second for quick
|
||||
class Application {
|
||||
public:
|
||||
void pre_setup(const std::string &name, const std::string &friendly_name, const char *comment,
|
||||
const char *compilation_time, bool name_add_mac_suffix) {
|
||||
bool name_add_mac_suffix) {
|
||||
arch_init();
|
||||
this->name_add_mac_suffix_ = name_add_mac_suffix;
|
||||
if (name_add_mac_suffix) {
|
||||
@@ -121,7 +125,6 @@ class Application {
|
||||
this->friendly_name_ = friendly_name;
|
||||
}
|
||||
this->comment_ = comment;
|
||||
this->compilation_time_ = compilation_time;
|
||||
}
|
||||
|
||||
#ifdef USE_DEVICES
|
||||
@@ -261,9 +264,30 @@ class Application {
|
||||
|
||||
bool is_name_add_mac_suffix_enabled() const { return this->name_add_mac_suffix_; }
|
||||
|
||||
std::string get_compilation_time() const { return this->compilation_time_; }
|
||||
/// Get the compilation time as StringRef (for API usage)
|
||||
StringRef get_compilation_time_ref() const { return StringRef(this->compilation_time_); }
|
||||
/// Size of buffer required for build time string (including null terminator)
|
||||
static constexpr size_t BUILD_TIME_STR_SIZE = 26;
|
||||
|
||||
/// Get the config hash as a 32-bit integer
|
||||
constexpr uint32_t get_config_hash() { return ESPHOME_CONFIG_HASH; }
|
||||
|
||||
/// Get the config hash extended with ESPHome version
|
||||
constexpr uint32_t get_config_version_hash() { return fnv1a_hash_extend(ESPHOME_CONFIG_HASH, ESPHOME_VERSION); }
|
||||
|
||||
/// Get the build time as a Unix timestamp
|
||||
constexpr time_t get_build_time() { return ESPHOME_BUILD_TIME; }
|
||||
|
||||
/// Copy the build time string into the provided buffer
|
||||
/// Buffer must be BUILD_TIME_STR_SIZE bytes (compile-time enforced)
|
||||
void get_build_time_string(std::span<char, BUILD_TIME_STR_SIZE> buffer);
|
||||
|
||||
/// Get the build time as a string (deprecated, use get_build_time_string() instead)
|
||||
// Remove before 2026.7.0
|
||||
ESPDEPRECATED("Use get_build_time_string() instead. Removed in 2026.7.0", "2026.1.0")
|
||||
std::string get_compilation_time() {
|
||||
char buf[BUILD_TIME_STR_SIZE];
|
||||
this->get_build_time_string(buf);
|
||||
return std::string(buf);
|
||||
}
|
||||
|
||||
/// Get the cached time in milliseconds from when the current component started its loop execution
|
||||
inline uint32_t IRAM_ATTR HOT get_loop_component_start_time() const { return this->loop_component_start_time_; }
|
||||
@@ -478,7 +502,6 @@ class Application {
|
||||
// Pointer-sized members first
|
||||
Component *current_component_{nullptr};
|
||||
const char *comment_{nullptr};
|
||||
const char *compilation_time_{nullptr};
|
||||
|
||||
// std::vector (3 pointers each: begin, end, capacity)
|
||||
// Partitioned vector design for looping components
|
||||
|
||||
10
esphome/core/build_info_data.h
Normal file
10
esphome/core/build_info_data.h
Normal file
@@ -0,0 +1,10 @@
|
||||
#pragma once
|
||||
|
||||
// This file is not used by the runtime, instead, a version is generated during
|
||||
// compilation with the actual build info values.
|
||||
//
|
||||
// This file is only used by static analyzers and IDEs.
|
||||
|
||||
#define ESPHOME_CONFIG_HASH 0x12345678U // NOLINT
|
||||
#define ESPHOME_BUILD_TIME 1700000000 // NOLINT
|
||||
static const char ESPHOME_BUILD_TIME_STR[] = "2024-01-01 00:00:00 +0000";
|
||||
@@ -501,7 +501,6 @@ async def to_code(config: ConfigType) -> None:
|
||||
config[CONF_NAME],
|
||||
config[CONF_FRIENDLY_NAME],
|
||||
config.get(CONF_COMMENT, ""),
|
||||
cg.RawExpression('__DATE__ ", " __TIME__'),
|
||||
config[CONF_NAME_ADD_MAC_SUFFIX],
|
||||
)
|
||||
)
|
||||
|
||||
@@ -155,17 +155,6 @@ uint32_t fnv1_hash(const char *str) {
|
||||
return hash;
|
||||
}
|
||||
|
||||
// FNV-1a hash - preferred for new code
|
||||
uint32_t fnv1a_hash_extend(uint32_t hash, const char *str) {
|
||||
if (str) {
|
||||
while (*str) {
|
||||
hash ^= *str++;
|
||||
hash *= FNV1_PRIME;
|
||||
}
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
|
||||
float random_float() { return static_cast<float>(random_uint32()) / static_cast<float>(UINT32_MAX); }
|
||||
|
||||
// Strings
|
||||
|
||||
@@ -388,12 +388,20 @@ constexpr uint32_t FNV1_OFFSET_BASIS = 2166136261UL;
|
||||
constexpr uint32_t FNV1_PRIME = 16777619UL;
|
||||
|
||||
/// Extend a FNV-1a hash with additional string data.
|
||||
uint32_t fnv1a_hash_extend(uint32_t hash, const char *str);
|
||||
constexpr uint32_t fnv1a_hash_extend(uint32_t hash, const char *str) {
|
||||
if (str) {
|
||||
while (*str) {
|
||||
hash ^= *str++;
|
||||
hash *= FNV1_PRIME;
|
||||
}
|
||||
}
|
||||
return hash;
|
||||
}
|
||||
inline uint32_t fnv1a_hash_extend(uint32_t hash, const std::string &str) {
|
||||
return fnv1a_hash_extend(hash, str.c_str());
|
||||
}
|
||||
/// Calculate a FNV-1a hash of \p str.
|
||||
inline uint32_t fnv1a_hash(const char *str) { return fnv1a_hash_extend(FNV1_OFFSET_BASIS, str); }
|
||||
constexpr uint32_t fnv1a_hash(const char *str) { return fnv1a_hash_extend(FNV1_OFFSET_BASIS, str); }
|
||||
inline uint32_t fnv1a_hash(const std::string &str) { return fnv1a_hash(str.c_str()); }
|
||||
|
||||
/// Return a random 32-bit unsigned integer.
|
||||
|
||||
@@ -9,8 +9,10 @@
|
||||
#define ESPHOME_F(string_literal) F(string_literal)
|
||||
#define ESPHOME_PGM_P PGM_P
|
||||
#define ESPHOME_strncpy_P strncpy_P
|
||||
#define ESPHOME_strncat_P strncat_P
|
||||
#else
|
||||
#define ESPHOME_F(string_literal) (string_literal)
|
||||
#define ESPHOME_PGM_P const char *
|
||||
#define ESPHOME_strncpy_P strncpy
|
||||
#define ESPHOME_strncat_P strncat
|
||||
#endif
|
||||
|
||||
@@ -424,9 +424,13 @@ def write_file_if_changed(path: Path, text: str) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def copy_file_if_changed(src: Path, dst: Path) -> None:
|
||||
def copy_file_if_changed(src: Path, dst: Path) -> bool:
|
||||
"""Copy file from src to dst if contents differ.
|
||||
|
||||
Returns True if file was copied, False if files already matched.
|
||||
"""
|
||||
if file_compare(src, dst):
|
||||
return
|
||||
return False
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
try:
|
||||
shutil.copyfile(src, dst)
|
||||
@@ -441,11 +445,12 @@ def copy_file_if_changed(src: Path, dst: Path) -> None:
|
||||
with suppress(OSError):
|
||||
os.unlink(dst)
|
||||
shutil.copyfile(src, dst)
|
||||
return
|
||||
return True
|
||||
|
||||
from esphome.core import EsphomeError
|
||||
|
||||
raise EsphomeError(f"Error copying file {src} to {dst}: {err}") from err
|
||||
return True
|
||||
|
||||
|
||||
def list_starts_with(list_, sub):
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
from collections.abc import Callable
|
||||
import importlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import shutil
|
||||
import stat
|
||||
import time
|
||||
from types import TracebackType
|
||||
|
||||
from esphome import loader
|
||||
@@ -23,6 +25,7 @@ from esphome.helpers import (
|
||||
is_ha_addon,
|
||||
read_file,
|
||||
walk_files,
|
||||
write_file,
|
||||
write_file_if_changed,
|
||||
)
|
||||
from esphome.storage_json import StorageJSON, storage_path
|
||||
@@ -173,6 +176,7 @@ VERSION_H_FORMAT = """\
|
||||
"""
|
||||
DEFINES_H_TARGET = "esphome/core/defines.h"
|
||||
VERSION_H_TARGET = "esphome/core/version.h"
|
||||
BUILD_INFO_DATA_H_TARGET = "esphome/core/build_info_data.h"
|
||||
ESPHOME_README_TXT = """
|
||||
THIS DIRECTORY IS AUTO-GENERATED, DO NOT MODIFY
|
||||
|
||||
@@ -206,10 +210,16 @@ def copy_src_tree():
|
||||
include_s = "\n".join(include_l)
|
||||
|
||||
source_files_copy = source_files_map.copy()
|
||||
ignore_targets = [Path(x) for x in (DEFINES_H_TARGET, VERSION_H_TARGET)]
|
||||
ignore_targets = [
|
||||
Path(x) for x in (DEFINES_H_TARGET, VERSION_H_TARGET, BUILD_INFO_DATA_H_TARGET)
|
||||
]
|
||||
for t in ignore_targets:
|
||||
source_files_copy.pop(t)
|
||||
source_files_copy.pop(t, None)
|
||||
|
||||
# Files to exclude from sources_changed tracking (generated files)
|
||||
generated_files = {Path("esphome/core/build_info_data.h")}
|
||||
|
||||
sources_changed = False
|
||||
for fname in walk_files(CORE.relative_src_path("esphome")):
|
||||
p = Path(fname)
|
||||
if p.suffix not in SOURCE_FILE_EXTENSIONS:
|
||||
@@ -223,28 +233,80 @@ def copy_src_tree():
|
||||
if target not in source_files_copy:
|
||||
# Source file removed, delete target
|
||||
p.unlink()
|
||||
if target not in generated_files:
|
||||
sources_changed = True
|
||||
else:
|
||||
src_file = source_files_copy.pop(target)
|
||||
with src_file.path() as src_path:
|
||||
copy_file_if_changed(src_path, p)
|
||||
if copy_file_if_changed(src_path, p) and target not in generated_files:
|
||||
sources_changed = True
|
||||
|
||||
# Now copy new files
|
||||
for target, src_file in source_files_copy.items():
|
||||
dst_path = CORE.relative_src_path(*target.parts)
|
||||
with src_file.path() as src_path:
|
||||
copy_file_if_changed(src_path, dst_path)
|
||||
if (
|
||||
copy_file_if_changed(src_path, dst_path)
|
||||
and target not in generated_files
|
||||
):
|
||||
sources_changed = True
|
||||
|
||||
# Finally copy defines
|
||||
write_file_if_changed(
|
||||
if write_file_if_changed(
|
||||
CORE.relative_src_path("esphome", "core", "defines.h"), generate_defines_h()
|
||||
)
|
||||
):
|
||||
sources_changed = True
|
||||
write_file_if_changed(CORE.relative_build_path("README.txt"), ESPHOME_README_TXT)
|
||||
write_file_if_changed(
|
||||
if write_file_if_changed(
|
||||
CORE.relative_src_path("esphome.h"), ESPHOME_H_FORMAT.format(include_s)
|
||||
)
|
||||
write_file_if_changed(
|
||||
):
|
||||
sources_changed = True
|
||||
if write_file_if_changed(
|
||||
CORE.relative_src_path("esphome", "core", "version.h"), generate_version_h()
|
||||
):
|
||||
sources_changed = True
|
||||
|
||||
# Generate new build_info files if needed
|
||||
build_info_data_h_path = CORE.relative_src_path(
|
||||
"esphome", "core", "build_info_data.h"
|
||||
)
|
||||
build_info_json_path = CORE.relative_build_path("build_info.json")
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
# Defensively force a rebuild if the build_info files don't exist, or if
|
||||
# there was a config change which didn't actually cause a source change
|
||||
if not build_info_data_h_path.exists():
|
||||
sources_changed = True
|
||||
else:
|
||||
try:
|
||||
existing = json.loads(build_info_json_path.read_text(encoding="utf-8"))
|
||||
if (
|
||||
existing.get("config_hash") != config_hash
|
||||
or existing.get("esphome_version") != __version__
|
||||
):
|
||||
sources_changed = True
|
||||
except (json.JSONDecodeError, KeyError, OSError):
|
||||
sources_changed = True
|
||||
|
||||
# Write build_info header and JSON metadata
|
||||
if sources_changed:
|
||||
write_file(
|
||||
build_info_data_h_path,
|
||||
generate_build_info_data_h(config_hash, build_time, build_time_str),
|
||||
)
|
||||
write_file(
|
||||
build_info_json_path,
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": config_hash,
|
||||
"build_time": build_time,
|
||||
"build_time_str": build_time_str,
|
||||
"esphome_version": __version__,
|
||||
},
|
||||
indent=2,
|
||||
)
|
||||
+ "\n",
|
||||
)
|
||||
|
||||
platform = "esphome.components." + CORE.target_platform
|
||||
try:
|
||||
@@ -270,6 +332,35 @@ def generate_version_h():
|
||||
)
|
||||
|
||||
|
||||
def get_build_info() -> tuple[int, int, str]:
|
||||
"""Calculate build_info values from current config.
|
||||
|
||||
Returns:
|
||||
Tuple of (config_hash, build_time, build_time_str)
|
||||
"""
|
||||
config_hash = CORE.config_hash
|
||||
build_time = int(time.time())
|
||||
build_time_str = time.strftime("%Y-%m-%d %H:%M:%S %z", time.localtime(build_time))
|
||||
return config_hash, build_time, build_time_str
|
||||
|
||||
|
||||
def generate_build_info_data_h(
|
||||
config_hash: int, build_time: int, build_time_str: str
|
||||
) -> str:
|
||||
"""Generate build_info_data.h header with config hash and build time."""
|
||||
return f"""#pragma once
|
||||
// Auto-generated build_info data
|
||||
#define ESPHOME_CONFIG_HASH 0x{config_hash:08x}U // NOLINT
|
||||
#define ESPHOME_BUILD_TIME {build_time} // NOLINT
|
||||
#ifdef USE_ESP8266
|
||||
#include <pgmspace.h>
|
||||
static const char ESPHOME_BUILD_TIME_STR[] PROGMEM = "{build_time_str}";
|
||||
#else
|
||||
static const char ESPHOME_BUILD_TIME_STR[] = "{build_time_str}";
|
||||
#endif
|
||||
"""
|
||||
|
||||
|
||||
def write_cpp(code_s):
|
||||
path = CORE.relative_src_path("main.cpp")
|
||||
if path.is_file():
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import suppress
|
||||
import functools
|
||||
import inspect
|
||||
from io import BytesIO, TextIOBase, TextIOWrapper
|
||||
@@ -501,13 +502,17 @@ def _load_yaml_internal_with_type(
|
||||
loader.dispose()
|
||||
|
||||
|
||||
def dump(dict_, show_secrets=False):
|
||||
def dump(dict_, show_secrets=False, sort_keys=False):
|
||||
"""Dump YAML to a string and remove null."""
|
||||
if show_secrets:
|
||||
_SECRET_VALUES.clear()
|
||||
_SECRET_CACHE.clear()
|
||||
return yaml.dump(
|
||||
dict_, default_flow_style=False, allow_unicode=True, Dumper=ESPHomeDumper
|
||||
dict_,
|
||||
default_flow_style=False,
|
||||
allow_unicode=True,
|
||||
Dumper=ESPHomeDumper,
|
||||
sort_keys=sort_keys,
|
||||
)
|
||||
|
||||
|
||||
@@ -543,6 +548,9 @@ class ESPHomeDumper(yaml.SafeDumper):
|
||||
best_style = True
|
||||
if hasattr(mapping, "items"):
|
||||
mapping = list(mapping.items())
|
||||
if self.sort_keys:
|
||||
with suppress(TypeError):
|
||||
mapping = sorted(mapping)
|
||||
for item_key, item_value in mapping:
|
||||
node_key = self.represent_data(item_key)
|
||||
node_value = self.represent_data(item_value)
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
using namespace esphome;
|
||||
|
||||
void setup() {
|
||||
App.pre_setup("livingroom", "LivingRoom", "comment", __DATE__ ", " __TIME__, false);
|
||||
App.pre_setup("livingroom", "LivingRoom", "comment", false);
|
||||
auto *log = new logger::Logger(115200, 512); // NOLINT
|
||||
log->pre_setup();
|
||||
log->set_uart_selection(logger::UART_SELECTION_UART0);
|
||||
|
||||
31
tests/integration/fixtures/build_info.yaml
Normal file
31
tests/integration/fixtures/build_info.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
esphome:
|
||||
name: build-info-test
|
||||
host:
|
||||
api:
|
||||
logger:
|
||||
|
||||
text_sensor:
|
||||
- platform: template
|
||||
name: "Config Hash"
|
||||
id: config_hash_sensor
|
||||
update_interval: 100ms
|
||||
lambda: |-
|
||||
char buf[16];
|
||||
snprintf(buf, sizeof(buf), "0x%08x", App.get_config_hash());
|
||||
return std::string(buf);
|
||||
- platform: template
|
||||
name: "Build Time"
|
||||
id: build_time_sensor
|
||||
update_interval: 100ms
|
||||
lambda: |-
|
||||
char buf[32];
|
||||
snprintf(buf, sizeof(buf), "%ld", (long)App.get_build_time());
|
||||
return std::string(buf);
|
||||
- platform: template
|
||||
name: "Build Time String"
|
||||
id: build_time_str_sensor
|
||||
update_interval: 100ms
|
||||
lambda: |-
|
||||
char buf[Application::BUILD_TIME_STR_SIZE];
|
||||
App.get_build_time_string(buf);
|
||||
return std::string(buf);
|
||||
117
tests/integration/test_build_info.py
Normal file
117
tests/integration/test_build_info.py
Normal file
@@ -0,0 +1,117 @@
|
||||
"""Integration test for build_info values."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
import re
|
||||
import time
|
||||
|
||||
from aioesphomeapi import EntityState, TextSensorState
|
||||
import pytest
|
||||
|
||||
from .types import APIClientConnectedFactory, RunCompiledFunction
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_build_info(
|
||||
yaml_config: str,
|
||||
run_compiled: RunCompiledFunction,
|
||||
api_client_connected: APIClientConnectedFactory,
|
||||
) -> None:
|
||||
"""Test that build_info values are sane."""
|
||||
async with run_compiled(yaml_config), api_client_connected() as client:
|
||||
device_info = await client.device_info()
|
||||
assert device_info is not None
|
||||
assert device_info.name == "build-info-test"
|
||||
|
||||
# Verify compilation_time from device_info is present and parseable
|
||||
# The format is ISO 8601 with timezone: "YYYY-MM-DD HH:MM:SS +ZZZZ"
|
||||
compilation_time = device_info.compilation_time
|
||||
assert compilation_time is not None
|
||||
|
||||
# Validate the ISO format: "YYYY-MM-DD HH:MM:SS +ZZZZ"
|
||||
parsed = datetime.strptime(compilation_time, "%Y-%m-%d %H:%M:%S %z")
|
||||
assert parsed.year >= time.localtime().tm_year
|
||||
|
||||
# Get entities
|
||||
entities, _ = await client.list_entities_services()
|
||||
|
||||
# Find our text sensors by object_id
|
||||
config_hash_entity = next(
|
||||
(e for e in entities if e.object_id == "config_hash"), None
|
||||
)
|
||||
build_time_entity = next(
|
||||
(e for e in entities if e.object_id == "build_time"), None
|
||||
)
|
||||
build_time_str_entity = next(
|
||||
(e for e in entities if e.object_id == "build_time_string"), None
|
||||
)
|
||||
|
||||
assert config_hash_entity is not None, "Config Hash sensor not found"
|
||||
assert build_time_entity is not None, "Build Time sensor not found"
|
||||
assert build_time_str_entity is not None, "Build Time String sensor not found"
|
||||
|
||||
# Wait for all three text sensors to have valid states
|
||||
loop = asyncio.get_running_loop()
|
||||
states: dict[int, TextSensorState] = {}
|
||||
all_received = loop.create_future()
|
||||
expected_keys = {
|
||||
config_hash_entity.key,
|
||||
build_time_entity.key,
|
||||
build_time_str_entity.key,
|
||||
}
|
||||
|
||||
def on_state(state: EntityState) -> None:
|
||||
if isinstance(state, TextSensorState) and not state.missing_state:
|
||||
states[state.key] = state
|
||||
if expected_keys <= states.keys() and not all_received.done():
|
||||
all_received.set_result(True)
|
||||
|
||||
client.subscribe_states(on_state)
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(all_received, timeout=5.0)
|
||||
except TimeoutError:
|
||||
pytest.fail(
|
||||
f"Timeout waiting for text sensor states. Got: {list(states.keys())}"
|
||||
)
|
||||
|
||||
config_hash_state = states[config_hash_entity.key]
|
||||
build_time_state = states[build_time_entity.key]
|
||||
build_time_str_state = states[build_time_str_entity.key]
|
||||
|
||||
# Validate config_hash format (0x followed by 8 hex digits)
|
||||
config_hash = config_hash_state.state
|
||||
assert re.match(r"^0x[0-9a-f]{8}$", config_hash), (
|
||||
f"config_hash should be 0x followed by 8 hex digits, got: {config_hash}"
|
||||
)
|
||||
|
||||
# Validate build_time is a reasonable Unix timestamp
|
||||
build_time = int(build_time_state.state)
|
||||
current_time = int(time.time())
|
||||
# Build time should be within last hour and not in the future
|
||||
assert build_time <= current_time, (
|
||||
f"build_time {build_time} should not be in the future (current: {current_time})"
|
||||
)
|
||||
assert build_time > current_time - 3600, (
|
||||
f"build_time {build_time} should be within the last hour"
|
||||
)
|
||||
|
||||
# Validate build_time_str matches the new ISO format
|
||||
build_time_str = build_time_str_state.state
|
||||
# Format: "YYYY-MM-DD HH:MM:SS +ZZZZ"
|
||||
parsed_build_time = datetime.strptime(build_time_str, "%Y-%m-%d %H:%M:%S %z")
|
||||
assert parsed_build_time.year >= time.localtime().tm_year
|
||||
|
||||
# Verify build_time_str matches what we get from build_time timestamp
|
||||
expected_str = time.strftime("%Y-%m-%d %H:%M:%S %z", time.localtime(build_time))
|
||||
assert build_time_str == expected_str, (
|
||||
f"build_time_str '{build_time_str}' should match timestamp '{expected_str}'"
|
||||
)
|
||||
|
||||
# Verify compilation_time matches build_time_str (they should be the same)
|
||||
assert compilation_time == build_time_str, (
|
||||
f"compilation_time '{compilation_time}' should match "
|
||||
f"build_time_str '{build_time_str}'"
|
||||
)
|
||||
@@ -58,6 +58,7 @@ def mock_write_file_if_changed() -> Generator[Mock, None, None]:
|
||||
def mock_copy_file_if_changed() -> Generator[Mock, None, None]:
|
||||
"""Mock copy_file_if_changed for core.config."""
|
||||
with patch("esphome.core.config.copy_file_if_changed") as mock:
|
||||
mock.return_value = True
|
||||
yield mock
|
||||
|
||||
|
||||
|
||||
@@ -892,3 +892,74 @@ async def test_add_includes_overwrites_existing_files(
|
||||
mock_copy_file_if_changed.assert_called_once_with(
|
||||
include_file, CORE.build_path / "src" / "header.h"
|
||||
)
|
||||
|
||||
|
||||
def test_config_hash_returns_int() -> None:
|
||||
"""Test that config_hash returns an integer."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test"}}
|
||||
assert isinstance(CORE.config_hash, int)
|
||||
|
||||
|
||||
def test_config_hash_is_cached() -> None:
|
||||
"""Test that config_hash is computed once and cached."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test"}}
|
||||
|
||||
# First access computes the hash
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
# Modify config (without resetting cache)
|
||||
CORE.config = {"esphome": {"name": "different"}}
|
||||
|
||||
# Second access returns cached value
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
assert hash1 == hash2
|
||||
|
||||
|
||||
def test_config_hash_reset_clears_cache() -> None:
|
||||
"""Test that reset() clears the cached config_hash."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test"}}
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
# Reset clears the cache
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "different"}}
|
||||
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
# After reset, hash should be recomputed
|
||||
assert hash1 != hash2
|
||||
|
||||
|
||||
def test_config_hash_deterministic_key_order() -> None:
|
||||
"""Test that config_hash is deterministic regardless of key insertion order."""
|
||||
CORE.reset()
|
||||
# Create two configs with same content but different key order
|
||||
config1 = {"z_key": 1, "a_key": 2, "nested": {"z_nested": "z", "a_nested": "a"}}
|
||||
config2 = {"a_key": 2, "z_key": 1, "nested": {"a_nested": "a", "z_nested": "z"}}
|
||||
|
||||
CORE.config = config1
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
CORE.reset()
|
||||
CORE.config = config2
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
# Hashes should be equal because keys are sorted during serialization
|
||||
assert hash1 == hash2
|
||||
|
||||
|
||||
def test_config_hash_different_for_different_configs() -> None:
|
||||
"""Test that different configs produce different hashes."""
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test1"}}
|
||||
hash1 = CORE.config_hash
|
||||
|
||||
CORE.reset()
|
||||
CORE.config = {"esphome": {"name": "test2"}}
|
||||
hash2 = CORE.config_hash
|
||||
|
||||
assert hash1 != hash2
|
||||
|
||||
@@ -4,9 +4,11 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
import time
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
@@ -22,6 +24,7 @@ from esphome.__main__ import (
|
||||
command_rename,
|
||||
command_update_all,
|
||||
command_wizard,
|
||||
compile_program,
|
||||
detect_external_components,
|
||||
get_port_type,
|
||||
has_ip_address,
|
||||
@@ -2605,3 +2608,197 @@ def test_command_analyze_memory_no_idedata(
|
||||
|
||||
assert result == 1
|
||||
assert "Failed to get IDE data for memory analysis" in caplog.text
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compile_build_info_run_compile() -> Generator[Mock]:
|
||||
"""Mock platformio_api.run_compile for build_info tests."""
|
||||
with patch("esphome.platformio_api.run_compile", return_value=0) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_compile_build_info_get_idedata() -> Generator[Mock]:
|
||||
"""Mock platformio_api.get_idedata for build_info tests."""
|
||||
mock_idedata = MagicMock()
|
||||
with patch("esphome.platformio_api.get_idedata", return_value=mock_idedata) as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
def _setup_build_info_test(
|
||||
tmp_path: Path,
|
||||
*,
|
||||
create_firmware: bool = True,
|
||||
create_build_info: bool = True,
|
||||
build_info_content: str | None = None,
|
||||
firmware_first: bool = False,
|
||||
) -> tuple[Path, Path]:
|
||||
"""Set up build directory structure for build_info tests.
|
||||
|
||||
Args:
|
||||
tmp_path: Temporary directory path.
|
||||
create_firmware: Whether to create firmware.bin file.
|
||||
create_build_info: Whether to create build_info.json file.
|
||||
build_info_content: Custom content for build_info.json, or None for default.
|
||||
firmware_first: If True, create firmware before build_info (makes firmware older).
|
||||
|
||||
Returns:
|
||||
Tuple of (build_info_path, firmware_path).
|
||||
"""
|
||||
setup_core(platform=PLATFORM_ESP32, tmp_path=tmp_path, name="test_device")
|
||||
|
||||
build_path = tmp_path / ".esphome" / "build" / "test_device"
|
||||
pioenvs_path = build_path / ".pioenvs" / "test_device"
|
||||
pioenvs_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
build_info_path = build_path / "build_info.json"
|
||||
firmware_path = pioenvs_path / "firmware.bin"
|
||||
|
||||
default_build_info = json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678,
|
||||
"build_time": int(time.time()),
|
||||
"build_time_str": "Dec 13 2025, 12:00:00",
|
||||
"esphome_version": "2025.1.0",
|
||||
}
|
||||
)
|
||||
|
||||
def create_build_info_file() -> None:
|
||||
if create_build_info:
|
||||
content = (
|
||||
build_info_content
|
||||
if build_info_content is not None
|
||||
else default_build_info
|
||||
)
|
||||
build_info_path.write_text(content)
|
||||
|
||||
def create_firmware_file() -> None:
|
||||
if create_firmware:
|
||||
firmware_path.write_bytes(b"fake firmware")
|
||||
|
||||
if firmware_first:
|
||||
create_firmware_file()
|
||||
time.sleep(0.01) # Ensure different timestamps
|
||||
create_build_info_file()
|
||||
else:
|
||||
create_build_info_file()
|
||||
time.sleep(0.01) # Ensure different timestamps
|
||||
create_firmware_file()
|
||||
|
||||
return build_info_path, firmware_path
|
||||
|
||||
|
||||
def test_compile_program_emits_build_info_when_firmware_rebuilt(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program logs build_info when firmware is rebuilt."""
|
||||
_setup_build_info_test(tmp_path, firmware_first=False)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info: config_hash=0x12345678" in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_firmware_not_rebuilt(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when firmware wasn't rebuilt."""
|
||||
_setup_build_info_test(tmp_path, firmware_first=True)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_firmware_missing(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when firmware.bin doesn't exist."""
|
||||
_setup_build_info_test(tmp_path, create_firmware=False)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_json_missing(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when build_info.json doesn't exist."""
|
||||
_setup_build_info_test(tmp_path, create_build_info=False)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_json_invalid(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when build_info.json is invalid."""
|
||||
_setup_build_info_test(tmp_path, build_info_content="not valid json {{{")
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.DEBUG):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
|
||||
def test_compile_program_no_build_info_when_json_missing_keys(
|
||||
tmp_path: Path,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mock_compile_build_info_run_compile: Mock,
|
||||
mock_compile_build_info_get_idedata: Mock,
|
||||
) -> None:
|
||||
"""Test that compile_program doesn't log build_info when build_info.json is missing required keys."""
|
||||
_setup_build_info_test(
|
||||
tmp_path, build_info_content=json.dumps({"build_time": 1234567890})
|
||||
)
|
||||
|
||||
config: dict[str, Any] = {CONF_ESPHOME: {CONF_NAME: "test_device"}}
|
||||
args = MockArgs()
|
||||
|
||||
with caplog.at_level(logging.INFO):
|
||||
result = compile_program(args, config)
|
||||
|
||||
assert result == 0
|
||||
assert "Build Info:" not in caplog.text
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
"""Test writer module functionality."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import json
|
||||
import os
|
||||
from pathlib import Path
|
||||
import stat
|
||||
@@ -20,6 +24,9 @@ from esphome.writer import (
|
||||
clean_all,
|
||||
clean_build,
|
||||
clean_cmake_cache,
|
||||
copy_src_tree,
|
||||
generate_build_info_data_h,
|
||||
get_build_info,
|
||||
storage_should_clean,
|
||||
update_storage_json,
|
||||
write_cpp,
|
||||
@@ -1165,3 +1172,721 @@ def test_clean_build_reraises_for_other_errors(
|
||||
finally:
|
||||
# Cleanup - restore write permission so tmp_path cleanup works
|
||||
os.chmod(subdir, stat.S_IRWXU)
|
||||
|
||||
|
||||
# Tests for get_build_info()
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_new_build(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns new build_time when no existing build_info.json."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert isinstance(build_time, int)
|
||||
assert build_time > 0
|
||||
assert isinstance(build_time_str, str)
|
||||
# Verify build_time_str format matches expected pattern
|
||||
assert len(build_time_str) >= 19 # e.g., "2025-12-15 16:27:44 +0000"
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_always_returns_current_time(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info always returns current build_time."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create existing build_info.json with matching config_hash and version
|
||||
existing_build_time = 1700000000
|
||||
existing_build_time_str = "2023-11-14 22:13:20 +0000"
|
||||
build_info_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678,
|
||||
"build_time": existing_build_time,
|
||||
"build_time_str": existing_build_time_str,
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"):
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
# get_build_info now always returns current time
|
||||
assert build_time != existing_build_time
|
||||
assert build_time > existing_build_time
|
||||
assert build_time_str != existing_build_time_str
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_config_changed(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns new build_time when config hash changed."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0xABCDEF00 # Different from existing
|
||||
|
||||
# Create existing build_info.json with different config_hash
|
||||
existing_build_time = 1700000000
|
||||
build_info_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678, # Different
|
||||
"build_time": existing_build_time,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"):
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0xABCDEF00
|
||||
assert build_time != existing_build_time # New time generated
|
||||
assert build_time > existing_build_time
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_version_changed(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns new build_time when ESPHome version changed."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create existing build_info.json with different version
|
||||
existing_build_time = 1700000000
|
||||
build_info_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678,
|
||||
"build_time": existing_build_time,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2024.12.0", # Old version
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"): # New version
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert build_time != existing_build_time # New time generated
|
||||
assert build_time > existing_build_time
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_invalid_json(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info handles invalid JSON gracefully."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create invalid JSON file
|
||||
build_info_path.write_text("not valid json {{{")
|
||||
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert isinstance(build_time, int)
|
||||
assert build_time > 0
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_missing_keys(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info handles missing keys gracefully."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
# Create JSON with missing keys
|
||||
build_info_path.write_text(json.dumps({"config_hash": 0x12345678}))
|
||||
|
||||
with patch("esphome.writer.__version__", "2025.1.0-dev"):
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
assert config_hash == 0x12345678
|
||||
assert isinstance(build_time, int)
|
||||
assert build_time > 0
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
def test_get_build_info_build_time_str_format(
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test get_build_info returns correctly formatted build_time_str."""
|
||||
build_info_path = tmp_path / "build_info.json"
|
||||
mock_core.relative_build_path.return_value = build_info_path
|
||||
mock_core.config_hash = 0x12345678
|
||||
|
||||
config_hash, build_time, build_time_str = get_build_info()
|
||||
|
||||
# Verify the format matches "%Y-%m-%d %H:%M:%S %z"
|
||||
# e.g., "2025-12-15 16:27:44 +0000"
|
||||
parsed = datetime.strptime(build_time_str, "%Y-%m-%d %H:%M:%S %z")
|
||||
assert parsed.year >= 2024
|
||||
|
||||
|
||||
def test_generate_build_info_data_h_format() -> None:
|
||||
"""Test generate_build_info_data_h produces correct header content."""
|
||||
config_hash = 0x12345678
|
||||
build_time = 1700000000
|
||||
build_time_str = "2023-11-14 22:13:20 +0000"
|
||||
|
||||
result = generate_build_info_data_h(config_hash, build_time, build_time_str)
|
||||
|
||||
assert "#pragma once" in result
|
||||
assert "#define ESPHOME_CONFIG_HASH 0x12345678U" in result
|
||||
assert "#define ESPHOME_BUILD_TIME 1700000000" in result
|
||||
assert 'ESPHOME_BUILD_TIME_STR[] = "2023-11-14 22:13:20 +0000"' in result
|
||||
|
||||
|
||||
def test_generate_build_info_data_h_esp8266_progmem() -> None:
|
||||
"""Test generate_build_info_data_h includes PROGMEM for ESP8266."""
|
||||
result = generate_build_info_data_h(0xABCDEF01, 1700000000, "test")
|
||||
|
||||
# Should have ESP8266 PROGMEM conditional
|
||||
assert "#ifdef USE_ESP8266" in result
|
||||
assert "#include <pgmspace.h>" in result
|
||||
assert "PROGMEM" in result
|
||||
|
||||
|
||||
def test_generate_build_info_data_h_hash_formatting() -> None:
|
||||
"""Test generate_build_info_data_h formats hash with leading zeros."""
|
||||
# Test with small hash value that needs leading zeros
|
||||
result = generate_build_info_data_h(0x00000001, 0, "test")
|
||||
assert "#define ESPHOME_CONFIG_HASH 0x00000001U" in result
|
||||
|
||||
# Test with larger hash value
|
||||
result = generate_build_info_data_h(0xFFFFFFFF, 0, "test")
|
||||
assert "#define ESPHOME_CONFIG_HASH 0xffffffffU" in result
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_writes_build_info_files(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree writes build_info_data.h and build_info.json."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create mock source files for defines.h and version.h
|
||||
mock_defines_h = esphome_core_path / "defines.h"
|
||||
mock_defines_h.write_text("// mock defines.h")
|
||||
mock_version_h = esphome_core_path / "version.h"
|
||||
mock_version_h.write_text("// mock version.h")
|
||||
|
||||
# Create mock FileResource that returns our temp files
|
||||
@dataclass(frozen=True)
|
||||
class MockFileResource:
|
||||
package: str
|
||||
resource: str
|
||||
_path: Path
|
||||
|
||||
@contextmanager
|
||||
def path(self):
|
||||
yield self._path
|
||||
|
||||
# Create mock resources for defines.h and version.h (required by copy_src_tree)
|
||||
mock_resources = [
|
||||
MockFileResource(
|
||||
package="esphome.core",
|
||||
resource="defines.h",
|
||||
_path=mock_defines_h,
|
||||
),
|
||||
MockFileResource(
|
||||
package="esphome.core",
|
||||
resource="version.h",
|
||||
_path=mock_version_h,
|
||||
),
|
||||
]
|
||||
|
||||
# Create mock component with resources
|
||||
mock_component = MagicMock()
|
||||
mock_component.resources = mock_resources
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = [("core", mock_component)]
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
# Create mock module without copy_files attribute (causes AttributeError which is caught)
|
||||
mock_module = MagicMock(spec=[]) # Empty spec = no copy_files attribute
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module", return_value=mock_module),
|
||||
):
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info_data.h was written
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
assert build_info_h_path.exists()
|
||||
build_info_h_content = build_info_h_path.read_text()
|
||||
assert "#define ESPHOME_CONFIG_HASH 0xdeadbeefU" in build_info_h_content
|
||||
assert "#define ESPHOME_BUILD_TIME" in build_info_h_content
|
||||
assert "ESPHOME_BUILD_TIME_STR" in build_info_h_content
|
||||
|
||||
# Verify build_info.json was written
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
assert build_info_json_path.exists()
|
||||
build_info_json = json.loads(build_info_json_path.read_text())
|
||||
assert build_info_json["config_hash"] == 0xDEADBEEF
|
||||
assert "build_time" in build_info_json
|
||||
assert "build_time_str" in build_info_json
|
||||
assert build_info_json["esphome_version"] == "2025.1.0-dev"
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_detects_config_hash_change(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree detects when config_hash changes."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create existing build_info.json with different config_hash
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0x12345678, # Different from current
|
||||
"build_time": 1700000000,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Create existing build_info_data.h
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
build_info_h_path.write_text("// old build_info_data.h")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF # Different from existing
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info files were updated due to config_hash change
|
||||
assert build_info_h_path.exists()
|
||||
new_content = build_info_h_path.read_text()
|
||||
assert "0xdeadbeef" in new_content.lower()
|
||||
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["config_hash"] == 0xDEADBEEF
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_detects_version_change(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree detects when esphome_version changes."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create existing build_info.json with different version
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": 1700000000,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2024.12.0", # Old version
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Create existing build_info_data.h
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
build_info_h_path.write_text("// old build_info_data.h")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"), # New version
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info files were updated due to version change
|
||||
assert build_info_h_path.exists()
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["esphome_version"] == "2025.1.0-dev"
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_handles_invalid_build_info_json(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree handles invalid build_info.json gracefully."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create invalid build_info.json
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
build_info_json_path.write_text("invalid json {{{")
|
||||
|
||||
# Create existing build_info_data.h
|
||||
build_info_h_path = esphome_core_path / "build_info_data.h"
|
||||
build_info_h_path.write_text("// old build_info_data.h")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
mock_walk_files.return_value = []
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info files were created despite invalid JSON
|
||||
assert build_info_h_path.exists()
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["config_hash"] == 0xDEADBEEF
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_build_info_timestamp_behavior(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test build_info behaviour: regenerated on change, preserved when unchanged."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
esphome_components_path = src_path / "esphome" / "components"
|
||||
esphome_components_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create a source file
|
||||
source_file = tmp_path / "source" / "test.cpp"
|
||||
source_file.parent.mkdir()
|
||||
source_file.write_text("// version 1")
|
||||
|
||||
# Create destination file in build tree
|
||||
dest_file = esphome_components_path / "test.cpp"
|
||||
|
||||
# Create mock FileResource
|
||||
@dataclass(frozen=True)
|
||||
class MockFileResource:
|
||||
package: str
|
||||
resource: str
|
||||
_path: Path
|
||||
|
||||
@contextmanager
|
||||
def path(self):
|
||||
yield self._path
|
||||
|
||||
mock_resources = [
|
||||
MockFileResource(
|
||||
package="esphome.components",
|
||||
resource="test.cpp",
|
||||
_path=source_file,
|
||||
),
|
||||
]
|
||||
|
||||
mock_component = MagicMock()
|
||||
mock_component.resources = mock_resources
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = [("test", mock_component)]
|
||||
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
|
||||
# First run: initial setup, should create build_info
|
||||
mock_walk_files.return_value = []
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Manually set an old timestamp for testing
|
||||
old_timestamp = 1700000000
|
||||
old_timestamp_str = "2023-11-14 22:13:20 +0000"
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": old_timestamp,
|
||||
"build_time_str": old_timestamp_str,
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Second run: no changes, should NOT regenerate build_info
|
||||
mock_walk_files.return_value = [str(dest_file)]
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
second_json = json.loads(build_info_json_path.read_text())
|
||||
second_timestamp = second_json["build_time"]
|
||||
|
||||
# Verify timestamp was NOT changed
|
||||
assert second_timestamp == old_timestamp, (
|
||||
f"build_info should not be regenerated when no files change: "
|
||||
f"{old_timestamp} != {second_timestamp}"
|
||||
)
|
||||
|
||||
# Third run: change source file, should regenerate build_info with new timestamp
|
||||
source_file.write_text("// version 2")
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
third_json = json.loads(build_info_json_path.read_text())
|
||||
third_timestamp = third_json["build_time"]
|
||||
|
||||
# Verify timestamp WAS changed
|
||||
assert third_timestamp != old_timestamp, (
|
||||
f"build_info should be regenerated when source file changes: "
|
||||
f"{old_timestamp} == {third_timestamp}"
|
||||
)
|
||||
assert third_timestamp > old_timestamp
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_detects_removed_source_file(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree detects when a non-generated source file is removed."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_components_path = src_path / "esphome" / "components"
|
||||
esphome_components_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create an existing source file in the build tree
|
||||
existing_file = esphome_components_path / "test.cpp"
|
||||
existing_file.write_text("// test file")
|
||||
|
||||
# Setup mocks - no components, so the file should be removed
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = [] # No components = file should be removed
|
||||
mock_walk_files.return_value = [str(existing_file)]
|
||||
|
||||
# Create existing build_info.json
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
old_timestamp = 1700000000
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": old_timestamp,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify file was removed
|
||||
assert not existing_file.exists()
|
||||
|
||||
# Verify build_info was regenerated due to source file removal
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["build_time"] != old_timestamp
|
||||
|
||||
|
||||
@patch("esphome.writer.CORE")
|
||||
@patch("esphome.writer.iter_components")
|
||||
@patch("esphome.writer.walk_files")
|
||||
def test_copy_src_tree_ignores_removed_generated_file(
|
||||
mock_walk_files: MagicMock,
|
||||
mock_iter_components: MagicMock,
|
||||
mock_core: MagicMock,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test copy_src_tree doesn't mark sources_changed when only generated file removed."""
|
||||
# Setup directory structure
|
||||
src_path = tmp_path / "src"
|
||||
src_path.mkdir()
|
||||
esphome_core_path = src_path / "esphome" / "core"
|
||||
esphome_core_path.mkdir(parents=True)
|
||||
build_path = tmp_path / "build"
|
||||
build_path.mkdir()
|
||||
|
||||
# Create existing build_info_data.h (a generated file)
|
||||
build_info_h = esphome_core_path / "build_info_data.h"
|
||||
build_info_h.write_text("// old generated file")
|
||||
|
||||
# Setup mocks
|
||||
mock_core.relative_src_path.side_effect = lambda *args: src_path.joinpath(*args)
|
||||
mock_core.relative_build_path.side_effect = lambda *args: build_path.joinpath(*args)
|
||||
mock_core.defines = []
|
||||
mock_core.config_hash = 0xDEADBEEF
|
||||
mock_core.target_platform = "test_platform"
|
||||
mock_core.config = {}
|
||||
mock_iter_components.return_value = []
|
||||
# walk_files returns the generated file, but it's not in source_files_copy
|
||||
mock_walk_files.return_value = [str(build_info_h)]
|
||||
|
||||
# Create existing build_info.json with old timestamp
|
||||
build_info_json_path = build_path / "build_info.json"
|
||||
old_timestamp = 1700000000
|
||||
build_info_json_path.write_text(
|
||||
json.dumps(
|
||||
{
|
||||
"config_hash": 0xDEADBEEF,
|
||||
"build_time": old_timestamp,
|
||||
"build_time_str": "2023-11-14 22:13:20 +0000",
|
||||
"esphome_version": "2025.1.0-dev",
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
with (
|
||||
patch("esphome.writer.__version__", "2025.1.0-dev"),
|
||||
patch("esphome.writer.importlib.import_module") as mock_import,
|
||||
):
|
||||
mock_import.side_effect = AttributeError
|
||||
copy_src_tree()
|
||||
|
||||
# Verify build_info_data.h was regenerated (not removed)
|
||||
assert build_info_h.exists()
|
||||
|
||||
# Note: build_info.json will have a new timestamp because get_build_info()
|
||||
# always returns current time. The key test is that the old build_info_data.h
|
||||
# file was removed and regenerated, not that it triggered sources_changed.
|
||||
new_json = json.loads(build_info_json_path.read_text())
|
||||
assert new_json["config_hash"] == 0xDEADBEEF
|
||||
|
||||
@@ -278,3 +278,31 @@ def test_secret_values_tracking(fixture_path: Path) -> None:
|
||||
assert yaml_util._SECRET_VALUES["super_secret_wifi"] == "wifi_password"
|
||||
assert "0123456789abcdef" in yaml_util._SECRET_VALUES
|
||||
assert yaml_util._SECRET_VALUES["0123456789abcdef"] == "api_key"
|
||||
|
||||
|
||||
def test_dump_sort_keys() -> None:
|
||||
"""Test that dump with sort_keys=True produces sorted output."""
|
||||
# Create a dict with unsorted keys
|
||||
data = {
|
||||
"zebra": 1,
|
||||
"alpha": 2,
|
||||
"nested": {
|
||||
"z_key": "z_value",
|
||||
"a_key": "a_value",
|
||||
},
|
||||
}
|
||||
|
||||
# Without sort_keys, keys are in insertion order
|
||||
unsorted = yaml_util.dump(data, sort_keys=False)
|
||||
lines_unsorted = unsorted.strip().split("\n")
|
||||
# First key should be "zebra" (insertion order)
|
||||
assert lines_unsorted[0].startswith("zebra:")
|
||||
|
||||
# With sort_keys, keys are alphabetically sorted
|
||||
sorted_dump = yaml_util.dump(data, sort_keys=True)
|
||||
lines_sorted = sorted_dump.strip().split("\n")
|
||||
# First key should be "alpha" (alphabetical order)
|
||||
assert lines_sorted[0].startswith("alpha:")
|
||||
# nested keys should also be sorted
|
||||
assert "a_key:" in sorted_dump
|
||||
assert sorted_dump.index("a_key:") < sorted_dump.index("z_key:")
|
||||
|
||||
Reference in New Issue
Block a user