Compare commits
119 Commits
2022.6.0b4
...
dev
Author | SHA1 | Date | |
---|---|---|---|
|
e0555e140f | ||
|
093989406f | ||
|
cdb16f08f6 | ||
|
53139c293b | ||
|
6a8bdcc315 | ||
|
fe535939a3 | ||
|
09e6c11d73 | ||
|
8112bdfaa8 | ||
|
f7db9aaa9f | ||
|
435f972357 | ||
|
f82b46c16b | ||
|
bca96f91b2 | ||
|
6f83a49c63 | ||
|
72cce391ab | ||
|
28d2949ebe | ||
|
c4a0015997 | ||
|
f564be6aea | ||
|
988f15e6af | ||
|
37b6d442bd | ||
|
fb2467f6f0 | ||
|
29045b0435 | ||
|
311a48c64e | ||
|
01b3815f27 | ||
|
b0d1c801bd | ||
|
5aaac06f5b | ||
|
34adbf0588 | ||
|
0a4213182e | ||
|
b0c0258e70 | ||
|
8110e591d0 | ||
|
fe05d7aec1 | ||
|
57f5884070 | ||
|
f329c74a15 | ||
|
7c86f3fa9e | ||
|
203b8b01bf | ||
|
8a1034a92f | ||
|
aa0c2dedd9 | ||
|
d045908e05 | ||
|
f002a23d2d | ||
|
29d6d0a906 | ||
|
c8b58b5c23 | ||
|
01bfafc5f1 | ||
|
8c9948bb56 | ||
|
2d1abaa68e | ||
|
664a3df0b4 | ||
|
06440d0202 | ||
|
0ecf9f4f2f | ||
|
8998c5f6dd | ||
|
3a9ab50dd2 | ||
|
5abd91d6d5 | ||
|
c3da42516b | ||
|
ec1fae6883 | ||
|
77f322166e | ||
|
f3f6e54818 | ||
|
fb0fec1f25 | ||
|
b66af9fb4d | ||
|
6617d576a7 | ||
|
420dacb22d | ||
|
ae2f6ad4d1 | ||
|
2c28d79bf8 | ||
|
993044c870 | ||
|
a8c1b63edb | ||
|
db7d946e1b | ||
|
fc7348d46d | ||
|
8be2456c7e | ||
|
bb5f7249a6 | ||
|
fc94a5d0ee | ||
|
24029cc918 | ||
|
9a9d5964ee | ||
|
4e4a512107 | ||
|
0729ed538e | ||
|
24b75b7ed6 | ||
|
ec3618ecb8 | ||
|
792a24f38d | ||
|
652e8a015b | ||
|
1ef6fd8fb0 | ||
|
942b0de7fd | ||
|
859cca49d1 | ||
|
8f7ff25624 | ||
|
97aca8e54c | ||
|
95acf19067 | ||
|
3d0899aa58 | ||
|
138d6e505b | ||
|
2748e6ba29 | ||
|
dbd4e927d8 | ||
|
e73d47918f | ||
|
b881bc071e | ||
|
1d0395d1c7 | ||
|
616c787e37 | ||
|
0c4de2bc97 | ||
|
c2f5ac9eba | ||
|
5764c988af | ||
|
ccc2fbfd67 | ||
|
10b4adb8e6 | ||
|
83b7181bcb | ||
|
8886b7e141 | ||
|
7dcc4d030b | ||
|
b9398897c1 | ||
|
657b1c60ae | ||
|
dc54b17778 | ||
|
1fb214165b | ||
|
81b2fd78f5 | ||
|
69002fb1e6 | ||
|
75332a752d | ||
|
09ed1aed93 | ||
|
53d3718028 | ||
|
2b5dce5232 | ||
|
9ad84150aa | ||
|
c0523590b4 | ||
|
c7f091ab10 | ||
|
7479e0aada | ||
|
5bbee1a1fe | ||
|
bdb9546ca3 | ||
|
46af4cad6e | ||
|
76a238912b | ||
|
909a526967 | ||
|
cd6f4fb93f | ||
|
c19458696e | ||
|
318b930e9f | ||
|
9296a078a7 |
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
@ -7,3 +7,8 @@ updates:
|
||||
ignore:
|
||||
# Hypotehsis is only used for testing and is updated quite often
|
||||
- dependency-name: hypothesis
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
|
8
.github/workflows/ci-docker.yml
vendored
8
.github/workflows/ci-docker.yml
vendored
@ -30,15 +30,15 @@ jobs:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set TAG
|
||||
run: |
|
||||
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@ -75,15 +75,15 @@ jobs:
|
||||
pio_cache_key: tidyesp32-idf
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
id: python
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
- name: Cache virtualenv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: .venv
|
||||
key: venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements*.txt') }}
|
||||
@ -102,7 +102,7 @@ jobs:
|
||||
|
||||
# Use per check platformio cache because checks use different parts
|
||||
- name: Cache platformio
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: platformio-${{ matrix.pio_cache_key }}-${{ hashFiles('platformio.ini') }}
|
||||
|
26
.github/workflows/release.yml
vendored
26
.github/workflows/release.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
outputs:
|
||||
tag: ${{ steps.tag.outputs.tag }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Get tag
|
||||
id: tag
|
||||
run: |
|
||||
@ -35,9 +35,9 @@ jobs:
|
||||
if: github.repository == 'esphome/esphome' && github.event_name == 'release'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v1
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.x'
|
||||
- name: Set up python environment
|
||||
@ -65,24 +65,24 @@ jobs:
|
||||
arch: [amd64, armv7, aarch64]
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@ -108,9 +108,9 @@ jobs:
|
||||
matrix:
|
||||
build_type: ["ha-addon", "docker", "lint"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- name: Enable experimental manifest support
|
||||
@ -119,12 +119,12 @@ jobs:
|
||||
echo "{\"experimental\": \"enabled\"}" > ~/.docker/config.json
|
||||
|
||||
- name: Log in to docker hub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to the GitHub container registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v4
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
days-before-pr-stale: 90
|
||||
days-before-pr-close: 7
|
||||
@ -35,7 +35,7 @@ jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v4
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
days-before-pr-stale: -1
|
||||
days-before-pr-close: -1
|
||||
|
@ -2,7 +2,7 @@
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/ambv/black
|
||||
rev: 22.3.0
|
||||
rev: 22.6.0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
@ -26,7 +26,7 @@ repos:
|
||||
- --branch=release
|
||||
- --branch=beta
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.31.1
|
||||
rev: v2.34.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py38-plus]
|
||||
|
@ -52,6 +52,7 @@ esphome/components/cs5460a/* @balrog-kun
|
||||
esphome/components/cse7761/* @berfenger
|
||||
esphome/components/ct_clamp/* @jesserockz
|
||||
esphome/components/current_based/* @djwmarcx
|
||||
esphome/components/dac7678/* @NickB1
|
||||
esphome/components/daly_bms/* @s1lvi0
|
||||
esphome/components/dashboard_import/* @esphome/core
|
||||
esphome/components/debug/* @OttoWinter
|
||||
|
@ -46,12 +46,10 @@ RUN \
|
||||
# Ubuntu python3-pip is missing wheel
|
||||
pip3 install --no-cache-dir \
|
||||
wheel==0.37.1 \
|
||||
platformio==5.2.5 \
|
||||
platformio==6.0.2 \
|
||||
# Change some platformio settings
|
||||
&& platformio settings set enable_telemetry No \
|
||||
&& platformio settings set check_libraries_interval 1000000 \
|
||||
&& platformio settings set check_platformio_interval 1000000 \
|
||||
&& platformio settings set check_platforms_interval 1000000 \
|
||||
&& mkdir -p /piolibs
|
||||
|
||||
|
||||
|
@ -12,7 +12,7 @@ from esphome.const import (
|
||||
CONF_TYPE_ID,
|
||||
CONF_TIME,
|
||||
)
|
||||
from esphome.jsonschema import jschema_extractor
|
||||
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
|
||||
from esphome.util import Registry
|
||||
|
||||
|
||||
@ -23,11 +23,10 @@ def maybe_simple_id(*validators):
|
||||
def maybe_conf(conf, *validators):
|
||||
validator = cv.All(*validators)
|
||||
|
||||
@jschema_extractor("maybe")
|
||||
@schema_extractor("maybe")
|
||||
def validate(value):
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
return validator
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return (validator, conf)
|
||||
|
||||
if isinstance(value, dict):
|
||||
return validator(value)
|
||||
@ -111,11 +110,9 @@ def validate_automation(extra_schema=None, extra_validators=None, single=False):
|
||||
# This should only happen with invalid configs, but let's have a nice error message.
|
||||
return [schema(value)]
|
||||
|
||||
@jschema_extractor("automation")
|
||||
@schema_extractor("automation")
|
||||
def validator(value):
|
||||
# hack to get the schema
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return schema
|
||||
|
||||
value = validator_(value)
|
||||
|
@ -92,7 +92,7 @@ void Anova::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t gattc_
|
||||
}
|
||||
if (this->codec_->has_unit()) {
|
||||
this->fahrenheit_ = (this->codec_->unit_ == 'f');
|
||||
ESP_LOGD(TAG, "Anova units is %s", this->fahrenheit_ ? "fahrenheit" : "celcius");
|
||||
ESP_LOGD(TAG, "Anova units is %s", this->fahrenheit_ ? "fahrenheit" : "celsius");
|
||||
this->current_request_++;
|
||||
}
|
||||
this->publish_state();
|
||||
|
@ -270,7 +270,7 @@ APIError APINoiseFrameHelper::try_read_frame_(ParsedFrame *frame) {
|
||||
*
|
||||
* If the handshake is still active when this method returns and a read/write can't take place at
|
||||
* the moment, returns WOULD_BLOCK.
|
||||
* If an error occured, returns that error. Only returns OK if the transport is ready for data
|
||||
* If an error occurred, returns that error. Only returns OK if the transport is ready for data
|
||||
* traffic.
|
||||
*/
|
||||
APIError APINoiseFrameHelper::state_action_() {
|
||||
@ -586,7 +586,7 @@ APIError APINoiseFrameHelper::write_raw_(const struct iovec *iov, int iovcnt) {
|
||||
}
|
||||
return APIError::OK;
|
||||
} else if (sent == -1) {
|
||||
// an error occured
|
||||
// an error occurred
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Socket write failed with errno %d", errno);
|
||||
return APIError::SOCKET_WRITE_FAILED;
|
||||
@ -980,7 +980,7 @@ APIError APIPlaintextFrameHelper::write_raw_(const struct iovec *iov, int iovcnt
|
||||
}
|
||||
return APIError::OK;
|
||||
} else if (sent == -1) {
|
||||
// an error occured
|
||||
// an error occurred
|
||||
state_ = State::FAILED;
|
||||
HELPER_LOG("Socket write failed with errno %d", errno);
|
||||
return APIError::SOCKET_WRITE_FAILED;
|
||||
|
@ -7,7 +7,7 @@ namespace bl0939 {
|
||||
static const char *const TAG = "bl0939";
|
||||
|
||||
// https://www.belling.com.cn/media/file_object/bel_product/BL0939/datasheet/BL0939_V1.2_cn.pdf
|
||||
// (unfortunatelly chinese, but the protocol can be understood with some translation tool)
|
||||
// (unfortunately chinese, but the protocol can be understood with some translation tool)
|
||||
static const uint8_t BL0939_READ_COMMAND = 0x55; // 0x5{A4,A3,A2,A1}
|
||||
static const uint8_t BL0939_FULL_PACKET = 0xAA;
|
||||
static const uint8_t BL0939_PACKET_HEADER = 0x55;
|
||||
|
@ -8,7 +8,7 @@ namespace esphome {
|
||||
namespace bl0939 {
|
||||
|
||||
// https://datasheet.lcsc.com/lcsc/2108071830_BL-Shanghai-Belling-BL0939_C2841044.pdf
|
||||
// (unfortunatelly chinese, but the formulas can be easily understood)
|
||||
// (unfortunately chinese, but the formulas can be easily understood)
|
||||
// Sonoff Dual R3 V2 has the exact same resistor values for the current shunts (RL=1miliOhm)
|
||||
// and for the voltage divider (R1=0.51kOhm, R2=5*390kOhm)
|
||||
// as in the manufacturer's reference circuit, so the same formulas were used here (Vref=1.218V)
|
||||
|
@ -113,6 +113,7 @@ void BLEClient::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t es
|
||||
}
|
||||
case ESP_GATTC_OPEN_EVT: {
|
||||
ESP_LOGV(TAG, "[%s] ESP_GATTC_OPEN_EVT", this->address_str().c_str());
|
||||
this->conn_id = param->open.conn_id;
|
||||
if (param->open.status != ESP_GATT_OK) {
|
||||
ESP_LOGW(TAG, "connect to %s failed, status=%d", this->address_str().c_str(), param->open.status);
|
||||
this->set_states_(espbt::ClientState::IDLE);
|
||||
@ -122,7 +123,10 @@ void BLEClient::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t es
|
||||
}
|
||||
case ESP_GATTC_CONNECT_EVT: {
|
||||
ESP_LOGV(TAG, "[%s] ESP_GATTC_CONNECT_EVT", this->address_str().c_str());
|
||||
this->conn_id = param->connect.conn_id;
|
||||
if (this->conn_id != param->connect.conn_id) {
|
||||
ESP_LOGD(TAG, "[%s] Unexpected conn_id in CONNECT_EVT: param conn=%d, open conn=%d",
|
||||
this->address_str().c_str(), param->connect.conn_id, this->conn_id);
|
||||
}
|
||||
auto ret = esp_ble_gattc_send_mtu_req(this->gattc_if, param->connect.conn_id);
|
||||
if (ret) {
|
||||
ESP_LOGW(TAG, "esp_ble_gattc_send_mtu_req failed, status=%x", ret);
|
||||
@ -183,9 +187,10 @@ void BLEClient::gattc_event_handler(esp_gattc_cb_event_t event, esp_gatt_if_t es
|
||||
descr->uuid.to_string().c_str());
|
||||
break;
|
||||
}
|
||||
uint8_t notify_en = 1;
|
||||
auto status = esp_ble_gattc_write_char_descr(this->gattc_if, this->conn_id, descr->handle, sizeof(notify_en),
|
||||
¬ify_en, ESP_GATT_WRITE_TYPE_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
uint16_t notify_en = 1;
|
||||
auto status =
|
||||
esp_ble_gattc_write_char_descr(this->gattc_if, this->conn_id, descr->handle, sizeof(notify_en),
|
||||
(uint8_t *) ¬ify_en, ESP_GATT_WRITE_TYPE_RSP, ESP_GATT_AUTH_REQ_NONE);
|
||||
if (status) {
|
||||
ESP_LOGW(TAG, "esp_ble_gattc_write_char_descr error, status=%d", status);
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ void CurrentBasedCover::dump_config() {
|
||||
ESP_LOGCONFIG(TAG, " Close Duration: %.1fs", this->close_duration_ / 1e3f);
|
||||
ESP_LOGCONFIG(TAG, "Obstacle Rollback: %.1f%%", this->obstacle_rollback_ * 100);
|
||||
if (this->max_duration_ != UINT32_MAX) {
|
||||
ESP_LOGCONFIG(TAG, "Maximun duration: %.1fs", this->max_duration_ / 1e3f);
|
||||
ESP_LOGCONFIG(TAG, "Maximum duration: %.1fs", this->max_duration_ / 1e3f);
|
||||
}
|
||||
ESP_LOGCONFIG(TAG, "Start sensing delay: %.1fs", this->start_sensing_delay_ / 1e3f);
|
||||
ESP_LOGCONFIG(TAG, "Malfunction detection: %s", YESNO(this->malfunction_detection_));
|
||||
|
32
esphome/components/dac7678/__init__.py
Normal file
32
esphome/components/dac7678/__init__.py
Normal file
@ -0,0 +1,32 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import i2c
|
||||
from esphome.const import CONF_ID
|
||||
|
||||
AUTO_LOAD = ["output"]
|
||||
CODEOWNERS = ["@NickB1"]
|
||||
DEPENDENCIES = ["i2c"]
|
||||
MULTI_CONF = True
|
||||
|
||||
dac7678_ns = cg.esphome_ns.namespace("dac7678")
|
||||
DAC7678Output = dac7678_ns.class_("DAC7678Output", cg.Component, i2c.I2CDevice)
|
||||
CONF_INTERNAL_REFERENCE = "internal_reference"
|
||||
|
||||
CONFIG_SCHEMA = (
|
||||
cv.Schema(
|
||||
{
|
||||
cv.GenerateID(): cv.declare_id(DAC7678Output),
|
||||
cv.Optional(CONF_INTERNAL_REFERENCE, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
.extend(cv.COMPONENT_SCHEMA)
|
||||
.extend(i2c.i2c_device_schema(0x48))
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
await cg.register_component(var, config)
|
||||
cg.add(var.set_internal_reference(config[CONF_INTERNAL_REFERENCE]))
|
||||
await i2c.register_i2c_device(var, config)
|
||||
return var
|
83
esphome/components/dac7678/dac7678_output.cpp
Normal file
83
esphome/components/dac7678/dac7678_output.cpp
Normal file
@ -0,0 +1,83 @@
|
||||
#include "dac7678_output.h"
|
||||
#include "esphome/core/log.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/core/hal.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace dac7678 {
|
||||
|
||||
static const char *const TAG = "dac7678";
|
||||
|
||||
static const uint8_t DAC7678_REG_INPUT_N = 0x00;
|
||||
static const uint8_t DAC7678_REG_SELECT_UPDATE_N = 0x10;
|
||||
static const uint8_t DAC7678_REG_WRITE_N_UPDATE_ALL = 0x20;
|
||||
static const uint8_t DAC7678_REG_WRITE_N_UPDATE_N = 0x30;
|
||||
static const uint8_t DAC7678_REG_POWER = 0x40;
|
||||
static const uint8_t DAC7678_REG_CLEAR_CODE = 0x50;
|
||||
static const uint8_t DAC7678_REG_LDAC = 0x60;
|
||||
static const uint8_t DAC7678_REG_SOFTWARE_RESET = 0x70;
|
||||
static const uint8_t DAC7678_REG_INTERNAL_REF_0 = 0x80;
|
||||
static const uint8_t DAC7678_REG_INTERNAL_REF_1 = 0x90;
|
||||
|
||||
void DAC7678Output::setup() {
|
||||
ESP_LOGCONFIG(TAG, "Setting up DAC7678OutputComponent...");
|
||||
|
||||
ESP_LOGV(TAG, "Resetting device...");
|
||||
|
||||
// Reset device
|
||||
if (!this->write_byte_16(DAC7678_REG_SOFTWARE_RESET, 0x0000)) {
|
||||
ESP_LOGE(TAG, "Reset failed");
|
||||
this->mark_failed();
|
||||
return;
|
||||
} else
|
||||
ESP_LOGV(TAG, "Reset succeeded");
|
||||
|
||||
delayMicroseconds(1000);
|
||||
|
||||
// Set internal reference
|
||||
if (this->internal_reference_) {
|
||||
if (!this->write_byte_16(DAC7678_REG_INTERNAL_REF_0, 1 << 4)) {
|
||||
ESP_LOGE(TAG, "Set internal reference failed");
|
||||
this->mark_failed();
|
||||
return;
|
||||
} else
|
||||
ESP_LOGV(TAG, "Internal reference enabled");
|
||||
}
|
||||
}
|
||||
|
||||
void DAC7678Output::dump_config() {
|
||||
if (this->is_failed()) {
|
||||
ESP_LOGE(TAG, "Setting up DAC7678 failed!");
|
||||
} else
|
||||
ESP_LOGCONFIG(TAG, "DAC7678 initialised");
|
||||
}
|
||||
|
||||
void DAC7678Output::register_channel(DAC7678Channel *channel) {
|
||||
auto c = channel->channel_;
|
||||
this->min_channel_ = std::min(this->min_channel_, c);
|
||||
this->max_channel_ = std::max(this->max_channel_, c);
|
||||
channel->set_parent(this);
|
||||
ESP_LOGV(TAG, "Registered channel: %01u", channel->channel_);
|
||||
}
|
||||
|
||||
void DAC7678Output::set_channel_value_(uint8_t channel, uint16_t value) {
|
||||
if (this->dac_input_reg_[channel] != value) {
|
||||
ESP_LOGV(TAG, "Channel %01u: input_reg=%04u ", channel, value);
|
||||
|
||||
if (!this->write_byte_16(DAC7678_REG_WRITE_N_UPDATE_N | channel, value << 4)) {
|
||||
this->status_set_warning();
|
||||
return;
|
||||
}
|
||||
}
|
||||
this->dac_input_reg_[channel] = value;
|
||||
this->status_clear_warning();
|
||||
}
|
||||
|
||||
void DAC7678Channel::write_state(float state) {
|
||||
const float input_rounded = roundf(state * this->full_scale_);
|
||||
auto input = static_cast<uint16_t>(input_rounded);
|
||||
this->parent_->set_channel_value_(this->channel_, input);
|
||||
}
|
||||
|
||||
} // namespace dac7678
|
||||
} // namespace esphome
|
55
esphome/components/dac7678/dac7678_output.h
Normal file
55
esphome/components/dac7678/dac7678_output.h
Normal file
@ -0,0 +1,55 @@
|
||||
#pragma once
|
||||
|
||||
#include "esphome/core/component.h"
|
||||
#include "esphome/core/helpers.h"
|
||||
#include "esphome/components/output/float_output.h"
|
||||
#include "esphome/components/i2c/i2c.h"
|
||||
|
||||
namespace esphome {
|
||||
namespace dac7678 {
|
||||
|
||||
class DAC7678Output;
|
||||
|
||||
class DAC7678Channel : public output::FloatOutput, public Parented<DAC7678Output> {
|
||||
public:
|
||||
void set_channel(uint8_t channel) { channel_ = channel; }
|
||||
|
||||
protected:
|
||||
friend class DAC7678Output;
|
||||
|
||||
const uint16_t full_scale_ = 0xFFF;
|
||||
|
||||
void write_state(float state) override;
|
||||
|
||||
uint8_t channel_;
|
||||
};
|
||||
|
||||
/// DAC7678 float output component.
|
||||
class DAC7678Output : public Component, public i2c::I2CDevice {
|
||||
public:
|
||||
DAC7678Output() {}
|
||||
|
||||
void register_channel(DAC7678Channel *channel);
|
||||
|
||||
void set_internal_reference(const bool value) { this->internal_reference_ = value; }
|
||||
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
|
||||
protected:
|
||||
friend DAC7678Channel;
|
||||
|
||||
bool internal_reference_;
|
||||
|
||||
void set_channel_value_(uint8_t channel, uint16_t value);
|
||||
|
||||
uint8_t min_channel_{0xFF};
|
||||
uint8_t max_channel_{0x00};
|
||||
uint16_t dac_input_reg_[8] = {
|
||||
0,
|
||||
};
|
||||
};
|
||||
|
||||
} // namespace dac7678
|
||||
} // namespace esphome
|
27
esphome/components/dac7678/output.py
Normal file
27
esphome/components/dac7678/output.py
Normal file
@ -0,0 +1,27 @@
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome.components import output
|
||||
from esphome.const import CONF_CHANNEL, CONF_ID
|
||||
from . import DAC7678Output, dac7678_ns
|
||||
|
||||
DEPENDENCIES = ["dac7678"]
|
||||
|
||||
DAC7678Channel = dac7678_ns.class_("DAC7678Channel", output.FloatOutput)
|
||||
CONF_DAC7678_ID = "dac7678_id"
|
||||
|
||||
CONFIG_SCHEMA = output.FLOAT_OUTPUT_SCHEMA.extend(
|
||||
{
|
||||
cv.Required(CONF_ID): cv.declare_id(DAC7678Channel),
|
||||
cv.GenerateID(CONF_DAC7678_ID): cv.use_id(DAC7678Output),
|
||||
cv.Required(CONF_CHANNEL): cv.int_range(min=0, max=7),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def to_code(config):
|
||||
paren = await cg.get_variable(config[CONF_DAC7678_ID])
|
||||
var = cg.new_Pvariable(config[CONF_ID])
|
||||
cg.add(var.set_channel(config[CONF_CHANNEL]))
|
||||
cg.add(paren.register_channel(var))
|
||||
await output.register_output(var, config)
|
||||
return var
|
@ -134,7 +134,6 @@ void DallasComponent::update() {
|
||||
return;
|
||||
}
|
||||
if (!sensor->check_scratch_pad()) {
|
||||
ESP_LOGW(TAG, "'%s' - Scratch pad checksum invalid!", sensor->get_name().c_str());
|
||||
sensor->publish_state(NAN);
|
||||
this->status_set_warning();
|
||||
return;
|
||||
@ -241,13 +240,29 @@ bool DallasTemperatureSensor::setup_sensor() {
|
||||
return true;
|
||||
}
|
||||
bool DallasTemperatureSensor::check_scratch_pad() {
|
||||
bool chksum_validity = (crc8(this->scratch_pad_, 8) == this->scratch_pad_[8]);
|
||||
bool config_validity = false;
|
||||
|
||||
switch (this->get_address8()[0]) {
|
||||
case DALLAS_MODEL_DS18B20:
|
||||
config_validity = ((this->scratch_pad_[4] & 0x9F) == 0x1F);
|
||||
break;
|
||||
default:
|
||||
config_validity = ((this->scratch_pad_[4] & 0x10) == 0x10);
|
||||
}
|
||||
|
||||
#ifdef ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
ESP_LOGVV(TAG, "Scratch pad: %02X.%02X.%02X.%02X.%02X.%02X.%02X.%02X.%02X (%02X)", this->scratch_pad_[0],
|
||||
this->scratch_pad_[1], this->scratch_pad_[2], this->scratch_pad_[3], this->scratch_pad_[4],
|
||||
this->scratch_pad_[5], this->scratch_pad_[6], this->scratch_pad_[7], this->scratch_pad_[8],
|
||||
crc8(this->scratch_pad_, 8));
|
||||
#endif
|
||||
return crc8(this->scratch_pad_, 8) == this->scratch_pad_[8];
|
||||
if (!chksum_validity) {
|
||||
ESP_LOGW(TAG, "'%s' - Scratch pad checksum invalid!", this->get_name().c_str());
|
||||
} else if (!config_validity) {
|
||||
ESP_LOGW(TAG, "'%s' - Scratch pad config register invalid!", this->get_name().c_str());
|
||||
}
|
||||
return chksum_validity && config_validity;
|
||||
}
|
||||
float DallasTemperatureSensor::get_temp_c() {
|
||||
int16_t temp = (int16_t(this->scratch_pad_[1]) << 11) | (int16_t(this->scratch_pad_[0]) << 3);
|
||||
|
@ -348,7 +348,7 @@ async def dfplayer_random_to_code(config, action_id, template_arg, args):
|
||||
}
|
||||
),
|
||||
)
|
||||
async def dfplyaer_is_playing_to_code(config, condition_id, template_arg, args):
|
||||
async def dfplayer_is_playing_to_code(config, condition_id, template_arg, args):
|
||||
var = cg.new_Pvariable(condition_id, template_arg)
|
||||
await cg.register_parented(var, config[CONF_ID])
|
||||
return var
|
||||
|
@ -132,7 +132,7 @@ class ColorUtil {
|
||||
int16_t plt_r = (int16_t) palette[i * 3 + 0];
|
||||
int16_t plt_g = (int16_t) palette[i * 3 + 1];
|
||||
int16_t plt_b = (int16_t) palette[i * 3 + 2];
|
||||
// Calculate euclidian distance (linear distance in rgb cube).
|
||||
// Calculate euclidean distance (linear distance in rgb cube).
|
||||
x = (uint32_t) std::abs(tgt_r - plt_r);
|
||||
y = (uint32_t) std::abs(tgt_g - plt_g);
|
||||
z = (uint32_t) std::abs(tgt_b - plt_b);
|
||||
|
@ -79,7 +79,7 @@ async def to_code(config):
|
||||
cg.add(var.set_request_interval(config[CONF_REQUEST_INTERVAL].total_milliseconds))
|
||||
cg.add(var.set_receive_timeout(config[CONF_RECEIVE_TIMEOUT].total_milliseconds))
|
||||
|
||||
cg.add_define("DSMR_GAS_MBUS_ID", config[CONF_GAS_MBUS_ID])
|
||||
cg.add_build_flag("-DDSMR_GAS_MBUS_ID=" + str(config[CONF_GAS_MBUS_ID]))
|
||||
|
||||
# DSMR Parser
|
||||
cg.add_library("glmnet/Dsmr", "0.5")
|
||||
|
@ -171,7 +171,7 @@ void Dsmr::receive_telegram_() {
|
||||
this->telegram_[this->bytes_read_] = c;
|
||||
this->bytes_read_++;
|
||||
|
||||
// Check for a footer, i.e. exlamation mark, followed by a hex checksum.
|
||||
// Check for a footer, i.e. exclamation mark, followed by a hex checksum.
|
||||
if (c == '!') {
|
||||
ESP_LOGV(TAG, "Footer of telegram found");
|
||||
this->footer_found_ = true;
|
||||
|
@ -199,7 +199,7 @@ void ENS210Component::update() {
|
||||
});
|
||||
}
|
||||
|
||||
// Extracts measurement 'data' and 'status' from a 'val' obtained from measurment.
|
||||
// Extracts measurement 'data' and 'status' from a 'val' obtained from measurement.
|
||||
void ENS210Component::extract_measurement_(uint32_t val, int *data, int *status) {
|
||||
*data = (val >> 0) & 0xffff;
|
||||
int valid = (val >> 16) & 0x1;
|
||||
|
@ -521,6 +521,33 @@ ESP32_BOARD_PINS = {
|
||||
},
|
||||
"lolin32": {"LED": 5},
|
||||
"lolin32_lite": {"LED": 22},
|
||||
"lolin_c3_mini": {
|
||||
"TX": 21,
|
||||
"RX": 20,
|
||||
"SDA": 8,
|
||||
"SCL": 10,
|
||||
"SS": 5,
|
||||
"MOSI": 4,
|
||||
"MISO": 3,
|
||||
"SCK": 2,
|
||||
"A0": 0,
|
||||
"A1": 1,
|
||||
"A2": 2,
|
||||
"A3": 3,
|
||||
"A4": 4,
|
||||
"A5": 5,
|
||||
"D0": 1,
|
||||
"D1": 10,
|
||||
"D2": 8,
|
||||
"D3": 7,
|
||||
"D4": 6,
|
||||
"D5": 2,
|
||||
"D6": 3,
|
||||
"D7": 4,
|
||||
"D8": 5,
|
||||
"LED": 7,
|
||||
"BUTTON": 9,
|
||||
},
|
||||
"lolin_d32": {"LED": 5, "_VBAT": 35},
|
||||
"lolin_d32_pro": {"LED": 5, "_VBAT": 35},
|
||||
"lopy": {
|
||||
@ -1026,6 +1053,7 @@ BOARD_TO_VARIANT = {
|
||||
"labplus_mpython": VARIANT_ESP32,
|
||||
"lolin32_lite": VARIANT_ESP32,
|
||||
"lolin32": VARIANT_ESP32,
|
||||
"lolin_c3_mini": VARIANT_ESP32C3,
|
||||
"lolin_d32_pro": VARIANT_ESP32,
|
||||
"lolin_d32": VARIANT_ESP32,
|
||||
"lopy4": VARIANT_ESP32,
|
||||
|
@ -36,6 +36,7 @@ class ESP32PreferenceBackend : public ESPPreferenceBackend {
|
||||
save.key = key;
|
||||
save.data.assign(data, data + len);
|
||||
s_pending_save.emplace_back(save);
|
||||
ESP_LOGVV(TAG, "s_pending_save: key: %s, len: %d", key.c_str(), len);
|
||||
return true;
|
||||
}
|
||||
bool load(uint8_t *data, size_t len) override {
|
||||
@ -65,6 +66,8 @@ class ESP32PreferenceBackend : public ESPPreferenceBackend {
|
||||
if (err != 0) {
|
||||
ESP_LOGV(TAG, "nvs_get_blob('%s') failed: %s", key.c_str(), esp_err_to_name(err));
|
||||
return false;
|
||||
} else {
|
||||
ESP_LOGVV(TAG, "nvs_get_blob: key: %s, len: %d", key.c_str(), len);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -73,7 +76,6 @@ class ESP32PreferenceBackend : public ESPPreferenceBackend {
|
||||
class ESP32Preferences : public ESPPreferences {
|
||||
public:
|
||||
uint32_t nvs_handle;
|
||||
uint32_t current_offset = 0;
|
||||
|
||||
void open() {
|
||||
nvs_flash_init();
|
||||
@ -97,12 +99,9 @@ class ESP32Preferences : public ESPPreferences {
|
||||
ESPPreferenceObject make_preference(size_t length, uint32_t type) override {
|
||||
auto *pref = new ESP32PreferenceBackend(); // NOLINT(cppcoreguidelines-owning-memory)
|
||||
pref->nvs_handle = nvs_handle;
|
||||
current_offset += length;
|
||||
|
||||
uint32_t keyval = current_offset ^ type;
|
||||
char keybuf[16];
|
||||
snprintf(keybuf, sizeof(keybuf), "%d", keyval);
|
||||
pref->key = keybuf; // copied to std::string
|
||||
uint32_t keyval = type;
|
||||
pref->key = str_sprintf("%u", keyval);
|
||||
|
||||
return ESPPreferenceObject(pref);
|
||||
}
|
||||
@ -121,6 +120,7 @@ class ESP32Preferences : public ESPPreferences {
|
||||
ESP_LOGVV(TAG, "Checking if NVS data %s has changed", save.key.c_str());
|
||||
if (is_changed(nvs_handle, save)) {
|
||||
esp_err_t err = nvs_set_blob(nvs_handle, save.key.c_str(), save.data.data(), save.data.size());
|
||||
ESP_LOGV(TAG, "sync: key: %s, len: %d", save.key.c_str(), save.data.size());
|
||||
if (err != 0) {
|
||||
ESP_LOGV(TAG, "nvs_set_blob('%s', len=%u) failed: %s", save.key.c_str(), save.data.size(),
|
||||
esp_err_to_name(err));
|
||||
|
@ -317,7 +317,7 @@ void ESP32Camera::update_camera_parameters() {
|
||||
s->set_gainceiling(s, (gainceiling_t) this->agc_gain_ceiling_);
|
||||
/* update white balance mode */
|
||||
s->set_wb_mode(s, (int) this->wb_mode_); // 0 to 4
|
||||
/* update test patern */
|
||||
/* update test pattern */
|
||||
s->set_colorbar(s, this->test_pattern_);
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,14 @@ template<typename T> class RestoringGlobalsComponent : public Component {
|
||||
|
||||
float get_setup_priority() const override { return setup_priority::HARDWARE; }
|
||||
|
||||
void loop() override {
|
||||
void loop() override { store_value_(); }
|
||||
|
||||
void on_shutdown() override { store_value_(); }
|
||||
|
||||
void set_name_hash(uint32_t name_hash) { this->name_hash_ = name_hash; }
|
||||
|
||||
protected:
|
||||
void store_value_() {
|
||||
int diff = memcmp(&this->value_, &this->prev_value_, sizeof(T));
|
||||
if (diff != 0) {
|
||||
this->rtc_.save(&this->value_);
|
||||
@ -52,9 +59,6 @@ template<typename T> class RestoringGlobalsComponent : public Component {
|
||||
}
|
||||
}
|
||||
|
||||
void set_name_hash(uint32_t name_hash) { this->name_hash_ = name_hash; }
|
||||
|
||||
protected:
|
||||
T value_{};
|
||||
T prev_value_{};
|
||||
uint32_t name_hash_{};
|
||||
|
@ -118,7 +118,7 @@ def _relocate_fields_to_subfolder(config, subfolder, subschema):
|
||||
fields = [k.schema for k in subschema.schema.keys()]
|
||||
fields.remove(CONF_ID)
|
||||
if subfolder in config:
|
||||
# Ensure no ambigious fields in base of config
|
||||
# Ensure no ambiguous fields in base of config
|
||||
for f in fields:
|
||||
if f in config:
|
||||
raise cv.Invalid(
|
||||
|
@ -224,7 +224,7 @@ void ArduinoI2CBus::recover_() {
|
||||
digitalWrite(sda_pin_, LOW); // NOLINT
|
||||
|
||||
// By now, any stuck device ought to have sent all remaining bits of its
|
||||
// transation, meaning that it should have freed up the SDA line, resulting
|
||||
// transaction, meaning that it should have freed up the SDA line, resulting
|
||||
// in SDA being pulled up.
|
||||
if (digitalRead(sda_pin_) == LOW) { // NOLINT
|
||||
ESP_LOGE(TAG, "Recovery failed: SDA is held LOW after clock pulse cycle");
|
||||
|
@ -285,7 +285,7 @@ void IDFI2CBus::recover_() {
|
||||
}
|
||||
|
||||
// By now, any stuck device ought to have sent all remaining bits of its
|
||||
// transation, meaning that it should have freed up the SDA line, resulting
|
||||
// transaction, meaning that it should have freed up the SDA line, resulting
|
||||
// in SDA being pulled up.
|
||||
if (gpio_get_level(sda_pin) == 0) {
|
||||
ESP_LOGE(TAG, "Recovery failed: SDA is held LOW after clock pulse cycle");
|
||||
|
@ -109,6 +109,10 @@ void I2SAudioMediaPlayer::setup() {
|
||||
this->audio_ = make_unique<Audio>(false);
|
||||
this->audio_->setPinout(this->bclk_pin_, this->lrclk_pin_, this->dout_pin_);
|
||||
this->audio_->forceMono(this->external_dac_channels_ == 1);
|
||||
if (this->mute_pin_ != nullptr) {
|
||||
this->mute_pin_->setup();
|
||||
this->mute_pin_->digital_write(false);
|
||||
}
|
||||
}
|
||||
this->state = media_player::MEDIA_PLAYER_STATE_IDLE;
|
||||
}
|
||||
|
@ -10,14 +10,13 @@ static const char *const TAG = "integration";
|
||||
|
||||
void IntegrationSensor::setup() {
|
||||
if (this->restore_) {
|
||||
this->rtc_ = global_preferences->make_preference<float>(this->get_object_id_hash());
|
||||
this->pref_ = global_preferences->make_preference<float>(this->get_object_id_hash());
|
||||
float preference_value = 0;
|
||||
this->rtc_.load(&preference_value);
|
||||
this->pref_.load(&preference_value);
|
||||
this->result_ = preference_value;
|
||||
}
|
||||
|
||||
this->last_update_ = millis();
|
||||
this->last_save_ = this->last_update_;
|
||||
|
||||
this->publish_and_save_(this->result_);
|
||||
this->sensor_->add_on_state_callback([this](float state) { this->process_sensor_value_(state); });
|
||||
|
@ -28,7 +28,6 @@ class IntegrationSensor : public sensor::Sensor, public Component {
|
||||
void setup() override;
|
||||
void dump_config() override;
|
||||
float get_setup_priority() const override { return setup_priority::DATA; }
|
||||
void set_min_save_interval(uint32_t min_interval) { this->min_save_interval_ = min_interval; }
|
||||
void set_sensor(Sensor *sensor) { sensor_ = sensor; }
|
||||
void set_time(IntegrationSensorTime time) { time_ = time; }
|
||||
void set_method(IntegrationMethod method) { method_ = method; }
|
||||
@ -56,22 +55,18 @@ class IntegrationSensor : public sensor::Sensor, public Component {
|
||||
void publish_and_save_(double result) {
|
||||
this->result_ = result;
|
||||
this->publish_state(result);
|
||||
float result_f = result;
|
||||
const uint32_t now = millis();
|
||||
if (now - this->last_save_ < this->min_save_interval_)
|
||||
return;
|
||||
this->last_save_ = now;
|
||||
this->rtc_.save(&result_f);
|
||||
if (this->restore_) {
|
||||
float result_f = result;
|
||||
this->pref_.save(&result_f);
|
||||
}
|
||||
}
|
||||
|
||||
sensor::Sensor *sensor_;
|
||||
IntegrationSensorTime time_;
|
||||
IntegrationMethod method_;
|
||||
bool restore_;
|
||||
ESPPreferenceObject rtc_;
|
||||
ESPPreferenceObject pref_;
|
||||
|
||||
uint32_t last_save_{0};
|
||||
uint32_t min_save_interval_{0};
|
||||
uint32_t last_update_;
|
||||
double result_{0.0f};
|
||||
float last_value_{0.0f};
|
||||
|
@ -35,7 +35,6 @@ INTEGRATION_METHODS = {
|
||||
|
||||
CONF_TIME_UNIT = "time_unit"
|
||||
CONF_INTEGRATION_METHOD = "integration_method"
|
||||
CONF_MIN_SAVE_INTERVAL = "min_save_interval"
|
||||
|
||||
|
||||
def inherit_unit_of_measurement(uom, config):
|
||||
@ -58,9 +57,9 @@ CONFIG_SCHEMA = sensor.SENSOR_SCHEMA.extend(
|
||||
INTEGRATION_METHODS, lower=True
|
||||
),
|
||||
cv.Optional(CONF_RESTORE, default=False): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_MIN_SAVE_INTERVAL, default="0s"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional("min_save_interval"): cv.invalid(
|
||||
"min_save_interval was removed in 2022.8.0. Please use the `preferences` -> `flash_write_interval` to adjust."
|
||||
),
|
||||
}
|
||||
).extend(cv.COMPONENT_SCHEMA)
|
||||
|
||||
@ -97,7 +96,6 @@ async def to_code(config):
|
||||
cg.add(var.set_time(config[CONF_TIME_UNIT]))
|
||||
cg.add(var.set_method(config[CONF_INTEGRATION_METHOD]))
|
||||
cg.add(var.set_restore(config[CONF_RESTORE]))
|
||||
cg.add(var.set_min_save_interval(config[CONF_MIN_SAVE_INTERVAL]))
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
|
@ -1,4 +1,4 @@
|
||||
from esphome.jsonschema import jschema_extractor
|
||||
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
|
||||
import esphome.codegen as cg
|
||||
import esphome.config_validation as cv
|
||||
from esphome import automation
|
||||
@ -479,11 +479,11 @@ async def addressable_flicker_effect_to_code(config, effect_id):
|
||||
|
||||
|
||||
def validate_effects(allowed_effects):
|
||||
@jschema_extractor("effects")
|
||||
@schema_extractor("effects")
|
||||
def validator(value):
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return (allowed_effects, EFFECTS_REGISTRY)
|
||||
|
||||
value = cv.validate_registry("effect", EFFECTS_REGISTRY)(value)
|
||||
errors = []
|
||||
names = set()
|
||||
|
@ -203,7 +203,7 @@ class LightColorValues {
|
||||
*color_temperature =
|
||||
(this->color_temperature_ - color_temperature_cw) / (color_temperature_ww - color_temperature_cw);
|
||||
*white_brightness = gamma_correct(this->state_ * this->brightness_ * white_level, gamma);
|
||||
} else { // Probably wont get here but put this here anyway.
|
||||
} else { // Probably won't get here but put this here anyway.
|
||||
*white_brightness = 0;
|
||||
}
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ void LightJSONSchema::parse_color_json(LightState &state, LightCall &call, JsonO
|
||||
call.set_cold_white(float(color["c"]) / 255.0f);
|
||||
}
|
||||
if (color.containsKey("w")) {
|
||||
// the HA scheme is ambigious here, the same key is used for white channel in RGBW and warm
|
||||
// the HA scheme is ambiguous here, the same key is used for white channel in RGBW and warm
|
||||
// white channel in RGBWW.
|
||||
if (color.containsKey("c")) {
|
||||
call.set_warm_white(float(color["w"]) / 255.0f);
|
||||
|
@ -3,7 +3,7 @@ import esphome.config_validation as cv
|
||||
import esphome.codegen as cg
|
||||
|
||||
from esphome.automation import maybe_simple_id
|
||||
from esphome.const import CONF_ID
|
||||
from esphome.const import CONF_ID, CONF_ON_STATE, CONF_TRIGGER_ID
|
||||
from esphome.core import CORE
|
||||
from esphome.coroutine import coroutine_with_priority
|
||||
from esphome.cpp_helpers import setup_entity
|
||||
@ -20,6 +20,9 @@ MediaPlayer = media_player_ns.class_("MediaPlayer")
|
||||
PlayAction = media_player_ns.class_(
|
||||
"PlayAction", automation.Action, cg.Parented.template(MediaPlayer)
|
||||
)
|
||||
PlayMediaAction = media_player_ns.class_(
|
||||
"PlayMediaAction", automation.Action, cg.Parented.template(MediaPlayer)
|
||||
)
|
||||
ToggleAction = media_player_ns.class_(
|
||||
"ToggleAction", automation.Action, cg.Parented.template(MediaPlayer)
|
||||
)
|
||||
@ -39,11 +42,35 @@ VolumeSetAction = media_player_ns.class_(
|
||||
"VolumeSetAction", automation.Action, cg.Parented.template(MediaPlayer)
|
||||
)
|
||||
|
||||
|
||||
CONF_VOLUME = "volume"
|
||||
CONF_ON_IDLE = "on_idle"
|
||||
CONF_ON_PLAY = "on_play"
|
||||
CONF_ON_PAUSE = "on_pause"
|
||||
CONF_MEDIA_URL = "media_url"
|
||||
|
||||
StateTrigger = media_player_ns.class_("StateTrigger", automation.Trigger.template())
|
||||
IdleTrigger = media_player_ns.class_("IdleTrigger", automation.Trigger.template())
|
||||
PlayTrigger = media_player_ns.class_("PlayTrigger", automation.Trigger.template())
|
||||
PauseTrigger = media_player_ns.class_("PauseTrigger", automation.Trigger.template())
|
||||
IsIdleCondition = media_player_ns.class_("IsIdleCondition", automation.Condition)
|
||||
IsPlayingCondition = media_player_ns.class_("IsPlayingCondition", automation.Condition)
|
||||
|
||||
|
||||
async def setup_media_player_core_(var, config):
|
||||
await setup_entity(var, config)
|
||||
for conf in config.get(CONF_ON_STATE, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
for conf in config.get(CONF_ON_IDLE, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
for conf in config.get(CONF_ON_PLAY, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
for conf in config.get(CONF_ON_PAUSE, []):
|
||||
trigger = cg.new_Pvariable(conf[CONF_TRIGGER_ID], var)
|
||||
await automation.build_automation(trigger, [], conf)
|
||||
|
||||
|
||||
async def register_media_player(var, config):
|
||||
@ -53,12 +80,54 @@ async def register_media_player(var, config):
|
||||
await setup_media_player_core_(var, config)
|
||||
|
||||
|
||||
MEDIA_PLAYER_SCHEMA = cv.ENTITY_BASE_SCHEMA.extend(cv.Schema({}))
|
||||
MEDIA_PLAYER_SCHEMA = cv.ENTITY_BASE_SCHEMA.extend(
|
||||
{
|
||||
cv.Optional(CONF_ON_STATE): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(StateTrigger),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_IDLE): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(IdleTrigger),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_PLAY): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(PlayTrigger),
|
||||
}
|
||||
),
|
||||
cv.Optional(CONF_ON_PAUSE): automation.validate_automation(
|
||||
{
|
||||
cv.GenerateID(CONF_TRIGGER_ID): cv.declare_id(PauseTrigger),
|
||||
}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
MEDIA_PLAYER_ACTION_SCHEMA = maybe_simple_id({cv.GenerateID(): cv.use_id(MediaPlayer)})
|
||||
|
||||
|
||||
@automation.register_action(
|
||||
"media_player.play_media",
|
||||
PlayMediaAction,
|
||||
cv.maybe_simple_value(
|
||||
{
|
||||
cv.GenerateID(): cv.use_id(MediaPlayer),
|
||||
cv.Required(CONF_MEDIA_URL): cv.templatable(cv.url),
|
||||
},
|
||||
key=CONF_MEDIA_URL,
|
||||
),
|
||||
)
|
||||
async def media_player_play_media_action(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
await cg.register_parented(var, config[CONF_ID])
|
||||
media_url = await cg.templatable(config[CONF_MEDIA_URL], args, cg.std_string)
|
||||
cg.add(var.set_media_url(media_url))
|
||||
return var
|
||||
|
||||
|
||||
@automation.register_action("media_player.play", PlayAction, MEDIA_PLAYER_ACTION_SCHEMA)
|
||||
@automation.register_action(
|
||||
"media_player.toggle", ToggleAction, MEDIA_PLAYER_ACTION_SCHEMA
|
||||
@ -73,6 +142,12 @@ MEDIA_PLAYER_ACTION_SCHEMA = maybe_simple_id({cv.GenerateID(): cv.use_id(MediaPl
|
||||
@automation.register_action(
|
||||
"media_player.volume_down", VolumeDownAction, MEDIA_PLAYER_ACTION_SCHEMA
|
||||
)
|
||||
@automation.register_condition(
|
||||
"media_player.is_idle", IsIdleCondition, MEDIA_PLAYER_ACTION_SCHEMA
|
||||
)
|
||||
@automation.register_condition(
|
||||
"media_player.is_playing", IsPlayingCondition, MEDIA_PLAYER_ACTION_SCHEMA
|
||||
)
|
||||
async def media_player_action(config, action_id, template_arg, args):
|
||||
var = cg.new_Pvariable(action_id, template_arg)
|
||||
await cg.register_parented(var, config[CONF_ID])
|
||||
|
@ -14,6 +14,17 @@ namespace media_player {
|
||||
} \
|
||||
};
|
||||
|
||||
#define MEDIA_PLAYER_SIMPLE_STATE_TRIGGER(TRIGGER_CLASS, TRIGGER_STATE) \
|
||||
class TRIGGER_CLASS : public Trigger<> { \
|
||||
public: \
|
||||
explicit TRIGGER_CLASS(MediaPlayer *player) { \
|
||||
player->add_on_state_callback([this, player]() { \
|
||||
if (player->state == MediaPlayerState::MEDIA_PLAYER_STATE_##TRIGGER_STATE) \
|
||||
this->trigger(); \
|
||||
}); \
|
||||
} \
|
||||
};
|
||||
|
||||
MEDIA_PLAYER_SIMPLE_COMMAND_ACTION(PlayAction, PLAY)
|
||||
MEDIA_PLAYER_SIMPLE_COMMAND_ACTION(PauseAction, PAUSE)
|
||||
MEDIA_PLAYER_SIMPLE_COMMAND_ACTION(StopAction, STOP)
|
||||
@ -21,10 +32,36 @@ MEDIA_PLAYER_SIMPLE_COMMAND_ACTION(ToggleAction, TOGGLE)
|
||||
MEDIA_PLAYER_SIMPLE_COMMAND_ACTION(VolumeUpAction, VOLUME_UP)
|
||||
MEDIA_PLAYER_SIMPLE_COMMAND_ACTION(VolumeDownAction, VOLUME_DOWN)
|
||||
|
||||
template<typename... Ts> class PlayMediaAction : public Action<Ts...>, public Parented<MediaPlayer> {
|
||||
TEMPLATABLE_VALUE(std::string, media_url)
|
||||
void play(Ts... x) override { this->parent_->make_call().set_media_url(this->media_url_.value(x...)).perform(); }
|
||||
};
|
||||
|
||||
template<typename... Ts> class VolumeSetAction : public Action<Ts...>, public Parented<MediaPlayer> {
|
||||
TEMPLATABLE_VALUE(float, volume)
|
||||
void play(Ts... x) override { this->parent_->make_call().set_volume(this->volume_.value(x...)).perform(); }
|
||||
};
|
||||
|
||||
class StateTrigger : public Trigger<> {
|
||||
public:
|
||||
explicit StateTrigger(MediaPlayer *player) {
|
||||
player->add_on_state_callback([this]() { this->trigger(); });
|
||||
}
|
||||
};
|
||||
|
||||
MEDIA_PLAYER_SIMPLE_STATE_TRIGGER(IdleTrigger, IDLE)
|
||||
MEDIA_PLAYER_SIMPLE_STATE_TRIGGER(PlayTrigger, PLAYING)
|
||||
MEDIA_PLAYER_SIMPLE_STATE_TRIGGER(PauseTrigger, PAUSED)
|
||||
|
||||
template<typename... Ts> class IsIdleCondition : public Condition<Ts...>, public Parented<MediaPlayer> {
|
||||
public:
|
||||
bool check(Ts... x) override { return this->parent_->state == MediaPlayerState::MEDIA_PLAYER_STATE_IDLE; }
|
||||
};
|
||||
|
||||
template<typename... Ts> class IsPlayingCondition : public Condition<Ts...>, public Parented<MediaPlayer> {
|
||||
public:
|
||||
bool check(Ts... x) override { return this->parent_->state == MediaPlayerState::MEDIA_PLAYER_STATE_PLAYING; }
|
||||
};
|
||||
|
||||
} // namespace media_player
|
||||
} // namespace esphome
|
||||
|
@ -76,7 +76,12 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
// installed, but wait, there is the CRC, and if we get a hit there is a good
|
||||
// chance that this is a complete message ... admittedly there is a small chance is
|
||||
// isn't but that is quite small given the purpose of the CRC in the first place
|
||||
data_len = at;
|
||||
|
||||
// Fewer than 2 bytes can't calc CRC
|
||||
if (at < 2)
|
||||
return true;
|
||||
|
||||
data_len = at - 2;
|
||||
data_offset = 1;
|
||||
|
||||
uint16_t computed_crc = crc16(raw, data_offset + data_len);
|
||||
@ -95,7 +100,7 @@ bool Modbus::parse_modbus_byte_(uint8_t byte) {
|
||||
}
|
||||
|
||||
// Error ( msb indicates error )
|
||||
// response format: Byte[0] = device address, Byte[1] function code | 0x80 , Byte[2] excpetion code, Byte[3-4] crc
|
||||
// response format: Byte[0] = device address, Byte[1] function code | 0x80 , Byte[2] exception code, Byte[3-4] crc
|
||||
if ((function_code & 0x80) == 0x80) {
|
||||
data_offset = 2;
|
||||
data_len = 1;
|
||||
|
@ -70,7 +70,7 @@ void ModbusController::on_modbus_error(uint8_t function_code, uint8_t exception_
|
||||
auto ¤t_command = this->command_queue_.front();
|
||||
if (current_command != nullptr) {
|
||||
ESP_LOGE(TAG,
|
||||
"Modbus error - last command: function code=0x%X register adddress = 0x%X "
|
||||
"Modbus error - last command: function code=0x%X register address = 0x%X "
|
||||
"registers count=%d "
|
||||
"payload size=%zu",
|
||||
function_code, current_command->register_address, current_command->register_count,
|
||||
@ -105,7 +105,7 @@ void ModbusController::on_register_data(ModbusRegisterType register_type, uint16
|
||||
}
|
||||
|
||||
void ModbusController::queue_command(const ModbusCommandItem &command) {
|
||||
// check if this commmand is already qeued.
|
||||
// check if this command is already qeued.
|
||||
// not very effective but the queue is never really large
|
||||
for (auto &item : command_queue_) {
|
||||
if (item->register_address == command.register_address && item->register_count == command.register_count &&
|
||||
@ -299,7 +299,7 @@ void ModbusController::loop() {
|
||||
incoming_queue_.pop();
|
||||
|
||||
} else {
|
||||
// all messages processed send pending commmands
|
||||
// all messages processed send pending commands
|
||||
send_next_command_();
|
||||
}
|
||||
}
|
||||
|
@ -185,8 +185,8 @@ inline bool coil_from_vector(int coil, const std::vector<uint8_t> &data) {
|
||||
|
||||
/** Extract bits from value and shift right according to the bitmask
|
||||
* if the bitmask is 0x00F0 we want the values frrom bit 5 - 8.
|
||||
* the result is then shifted right by the postion if the first right set bit in the mask
|
||||
* Usefull for modbus data where more than one value is packed in a 16 bit register
|
||||
* the result is then shifted right by the position if the first right set bit in the mask
|
||||
* Useful for modbus data where more than one value is packed in a 16 bit register
|
||||
* Example: on Epever the "Length of night" register 0x9065 encodes values of the whole night length of time as
|
||||
* D15 - D8 = hour, D7 - D0 = minute
|
||||
* To get the hours use mask 0xFF00 and 0x00FF for the minute
|
||||
@ -447,7 +447,7 @@ class ModbusController : public PollingComponent, public modbus::ModbusDevice {
|
||||
void dump_sensors_();
|
||||
/// Collection of all sensors for this component
|
||||
SensorSet sensorset_;
|
||||
/// Continous range of modbus registers
|
||||
/// Continuous range of modbus registers
|
||||
std::vector<RegisterRange> register_ranges_;
|
||||
/// Hold the pending requests to be sent
|
||||
std::list<std::unique_ptr<ModbusCommandItem>> command_queue_;
|
||||
|
@ -68,7 +68,7 @@ CONFIG_SCHEMA = cv.All(
|
||||
),
|
||||
cv.Optional(CONF_VALUE_TYPE, default="U_WORD"): cv.enum(SENSOR_VALUE_TYPE),
|
||||
cv.Optional(CONF_WRITE_LAMBDA): cv.returning_lambda,
|
||||
# 24 bits are the maximum value for fp32 before precison is lost
|
||||
# 24 bits are the maximum value for fp32 before precision is lost
|
||||
# 0x00FFFFFF = 16777215
|
||||
cv.Optional(CONF_MAX_VALUE, default=16777215.0): cv.float_,
|
||||
cv.Optional(CONF_MIN_VALUE, default=-16777215.0): cv.float_,
|
||||
|
@ -187,7 +187,7 @@ void MQTTClientComponent::start_connect_() {
|
||||
|
||||
this->mqtt_backend_.set_credentials(username, password);
|
||||
|
||||
this->mqtt_backend_.set_server((uint32_t) this->ip_, this->credentials_.port);
|
||||
this->mqtt_backend_.set_server(this->credentials_.address.c_str(), this->credentials_.port);
|
||||
if (!this->last_will_.topic.empty()) {
|
||||
this->mqtt_backend_.set_will(this->last_will_.topic.c_str(), this->last_will_.qos, this->last_will_.retain,
|
||||
this->last_will_.payload.c_str());
|
||||
|
@ -12,7 +12,7 @@ NdefRecord::NdefRecord(std::vector<uint8_t> payload_data) {
|
||||
std::vector<uint8_t> NdefRecord::encode(bool first, bool last) {
|
||||
std::vector<uint8_t> data;
|
||||
|
||||
// Get encoded payload, this is overriden by more specific record classes
|
||||
// Get encoded payload, this is overridden by more specific record classes
|
||||
std::vector<uint8_t> payload_data = get_encoded_payload();
|
||||
|
||||
size_t payload_length = payload_data.size();
|
||||
|
@ -6,8 +6,17 @@ namespace number {
|
||||
|
||||
static const char *const TAG = "number.automation";
|
||||
|
||||
union convert {
|
||||
float from;
|
||||
uint32_t to;
|
||||
};
|
||||
|
||||
void ValueRangeTrigger::setup() {
|
||||
this->rtc_ = global_preferences->make_preference<bool>(this->parent_->get_object_id_hash());
|
||||
float local_min = this->min_.value(0.0);
|
||||
float local_max = this->max_.value(0.0);
|
||||
convert hash = {.from = (local_max - local_min)};
|
||||
uint32_t myhash = hash.to ^ this->parent_->get_object_id_hash();
|
||||
this->rtc_ = global_preferences->make_preference<bool>(myhash);
|
||||
bool initial_state;
|
||||
if (this->rtc_.load(&initial_state)) {
|
||||
this->previous_in_range_ = initial_state;
|
||||
|
@ -768,7 +768,7 @@ uint8_t Pipsolar::check_incoming_length_(uint8_t length) {
|
||||
|
||||
uint8_t Pipsolar::check_incoming_crc_() {
|
||||
uint16_t crc16;
|
||||
crc16 = calc_crc_(read_buffer_, read_pos_ - 3);
|
||||
crc16 = cal_crc_half_(read_buffer_, read_pos_ - 3);
|
||||
ESP_LOGD(TAG, "checking crc on incoming message");
|
||||
if (((uint8_t)((crc16) >> 8)) == read_buffer_[read_pos_ - 3] &&
|
||||
((uint8_t)((crc16) &0xff)) == read_buffer_[read_pos_ - 2]) {
|
||||
@ -797,7 +797,7 @@ uint8_t Pipsolar::send_next_command_() {
|
||||
this->command_start_millis_ = millis();
|
||||
this->empty_uart_buffer_();
|
||||
this->read_pos_ = 0;
|
||||
crc16 = calc_crc_(byte_command, length);
|
||||
crc16 = cal_crc_half_(byte_command, length);
|
||||
this->write_str(command);
|
||||
// checksum
|
||||
this->write(((uint8_t)((crc16) >> 8))); // highbyte
|
||||
@ -824,8 +824,8 @@ void Pipsolar::send_next_poll_() {
|
||||
this->command_start_millis_ = millis();
|
||||
this->empty_uart_buffer_();
|
||||
this->read_pos_ = 0;
|
||||
crc16 = calc_crc_(this->used_polling_commands_[this->last_polling_command_].command,
|
||||
this->used_polling_commands_[this->last_polling_command_].length);
|
||||
crc16 = cal_crc_half_(this->used_polling_commands_[this->last_polling_command_].command,
|
||||
this->used_polling_commands_[this->last_polling_command_].length);
|
||||
this->write_array(this->used_polling_commands_[this->last_polling_command_].command,
|
||||
this->used_polling_commands_[this->last_polling_command_].length);
|
||||
// checksum
|
||||
@ -892,29 +892,41 @@ void Pipsolar::add_polling_command_(const char *command, ENUMPollingCommand poll
|
||||
}
|
||||
}
|
||||
|
||||
uint16_t Pipsolar::calc_crc_(uint8_t *msg, int n) {
|
||||
// Initial value. xmodem uses 0xFFFF but this example
|
||||
// requires an initial value of zero.
|
||||
uint16_t x = 0;
|
||||
while (n--) {
|
||||
x = crc_xmodem_update_(x, (uint16_t) *msg++);
|
||||
}
|
||||
return (x);
|
||||
}
|
||||
uint16_t Pipsolar::cal_crc_half_(uint8_t *msg, uint8_t len) {
|
||||
uint16_t crc;
|
||||
|
||||
// See bottom of this page: http://www.nongnu.org/avr-libc/user-manual/group__util__crc.html
|
||||
// Polynomial: x^16 + x^12 + x^5 + 1 (0x1021)
|
||||
uint16_t Pipsolar::crc_xmodem_update_(uint16_t crc, uint8_t data) {
|
||||
int i;
|
||||
crc = crc ^ ((uint16_t) data << 8);
|
||||
for (i = 0; i < 8; i++) {
|
||||
if (crc & 0x8000) {
|
||||
crc = (crc << 1) ^ 0x1021; //(polynomial = 0x1021)
|
||||
} else {
|
||||
crc <<= 1;
|
||||
}
|
||||
uint8_t da;
|
||||
uint8_t *ptr;
|
||||
uint8_t b_crc_hign;
|
||||
uint8_t b_crc_low;
|
||||
|
||||
uint16_t crc_ta[16] = {0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50a5, 0x60c6, 0x70e7,
|
||||
0x8108, 0x9129, 0xa14a, 0xb16b, 0xc18c, 0xd1ad, 0xe1ce, 0xf1ef};
|
||||
|
||||
ptr = msg;
|
||||
crc = 0;
|
||||
|
||||
while (len-- != 0) {
|
||||
da = ((uint8_t)(crc >> 8)) >> 4;
|
||||
crc <<= 4;
|
||||
crc ^= crc_ta[da ^ (*ptr >> 4)];
|
||||
da = ((uint8_t)(crc >> 8)) >> 4;
|
||||
crc <<= 4;
|
||||
crc ^= crc_ta[da ^ (*ptr & 0x0f)];
|
||||
ptr++;
|
||||
}
|
||||
return crc;
|
||||
|
||||
b_crc_low = crc;
|
||||
b_crc_hign = (uint8_t)(crc >> 8);
|
||||
|
||||
if (b_crc_low == 0x28 || b_crc_low == 0x0d || b_crc_low == 0x0a)
|
||||
b_crc_low++;
|
||||
if (b_crc_hign == 0x28 || b_crc_hign == 0x0d || b_crc_hign == 0x0a)
|
||||
b_crc_hign++;
|
||||
|
||||
crc = ((uint16_t) b_crc_hign) << 8;
|
||||
crc += b_crc_low;
|
||||
return (crc);
|
||||
}
|
||||
|
||||
} // namespace pipsolar
|
||||
|
@ -193,8 +193,7 @@ class Pipsolar : public uart::UARTDevice, public PollingComponent {
|
||||
void empty_uart_buffer_();
|
||||
uint8_t check_incoming_crc_();
|
||||
uint8_t check_incoming_length_(uint8_t length);
|
||||
uint16_t calc_crc_(uint8_t *msg, int n);
|
||||
uint16_t crc_xmodem_update_(uint16_t crc, uint8_t data);
|
||||
uint16_t cal_crc_half_(uint8_t *msg, uint8_t len);
|
||||
uint8_t send_next_command_();
|
||||
void send_next_poll_();
|
||||
void queue_command_(const char *command, uint8_t length);
|
||||
|
@ -32,7 +32,7 @@ from esphome.const import (
|
||||
CONF_LEVEL,
|
||||
)
|
||||
from esphome.core import coroutine
|
||||
from esphome.jsonschema import jschema_extractor
|
||||
from esphome.schema_extractors import SCHEMA_EXTRACT, schema_extractor
|
||||
from esphome.util import Registry, SimpleRegistry
|
||||
|
||||
AUTO_LOAD = ["binary_sensor"]
|
||||
@ -195,14 +195,14 @@ def validate_dumpers(value):
|
||||
def validate_triggers(base_schema):
|
||||
assert isinstance(base_schema, cv.Schema)
|
||||
|
||||
@jschema_extractor("triggers")
|
||||
@schema_extractor("triggers")
|
||||
def validator(config):
|
||||
added_keys = {}
|
||||
for key, (_, valid) in TRIGGER_REGISTRY.items():
|
||||
added_keys[cv.Optional(key)] = valid
|
||||
new_schema = base_schema.extend(added_keys)
|
||||
# pylint: disable=comparison-with-callable
|
||||
if config == jschema_extractor:
|
||||
|
||||
if config == SCHEMA_EXTRACT:
|
||||
return new_schema
|
||||
return new_schema(config)
|
||||
|
||||
|
@ -106,7 +106,7 @@ optional<NexaData> NexaProtocol::decode(RemoteReceiveData src) {
|
||||
SHHHH HHHH HHHH HHHH HHHH HHHH HHGO EE BB DDDD 0 P
|
||||
|
||||
S = Sync bit.
|
||||
H = The first 26 bits are transmitter unique codes, and it is this code that the reciever "learns" to recognize.
|
||||
H = The first 26 bits are transmitter unique codes, and it is this code that the receiver "learns" to recognize.
|
||||
G = Group code, set to one for the whole group.
|
||||
O = On/Off bit. Set to 1 for on, 0 for off.
|
||||
E = Unit to be turned on or off. The code is inverted, i.e. '11' equals 1, '00' equals 4.
|
||||
|
@ -102,7 +102,7 @@ bool RCSwitchBase::expect_sync(RemoteReceiveData &src) const {
|
||||
if (!src.peek_space(this->sync_low_, 1))
|
||||
return false;
|
||||
} else {
|
||||
// We cant peek a space at the beginning because signals starts with a low to high transition.
|
||||
// We can't peek a space at the beginning because signals starts with a low to high transition.
|
||||
// this long space at the beginning is the separation between the transmissions itself, so it is actually
|
||||
// added at the end kind of artificially (by the value given to "idle:" option by the user in the yaml)
|
||||
if (!src.peek_mark(this->sync_low_))
|
||||
|
@ -205,7 +205,7 @@ void SCD4XComponent::update() {
|
||||
bool SCD4XComponent::perform_forced_calibration(uint16_t current_co2_concentration) {
|
||||
/*
|
||||
Operate the SCD4x in the operation mode later used in normal sensor operation (periodic measurement, low power
|
||||
periodic measurement or single shot) for > 3 minutes in an environment with homogenous and constant CO2
|
||||
periodic measurement or single shot) for > 3 minutes in an environment with homogeneous and constant CO2
|
||||
concentration before performing a forced recalibration.
|
||||
*/
|
||||
if (!this->write_command(SCD4X_CMD_STOP_MEASUREMENTS)) {
|
||||
@ -217,7 +217,7 @@ bool SCD4XComponent::perform_forced_calibration(uint16_t current_co2_concentrati
|
||||
ESP_LOGD(TAG, "setting forced calibration Co2 level %d ppm", current_co2_concentration);
|
||||
// frc takes 400 ms
|
||||
// because this method will be used very rarly
|
||||
// the simple aproach with delay is ok
|
||||
// the simple approach with delay is ok
|
||||
delay(400); // NOLINT'
|
||||
if (!this->start_measurement_()) {
|
||||
return false;
|
||||
|
@ -33,7 +33,7 @@ bool SensirionI2CDevice::read_data(uint16_t *data, uint8_t len) {
|
||||
}
|
||||
/***
|
||||
* write command with parameters and insert crc
|
||||
* use stack array for less than 4 paramaters. Most sensirion i2c commands have less parameters
|
||||
* use stack array for less than 4 parameters. Most sensirion i2c commands have less parameters
|
||||
*/
|
||||
bool SensirionI2CDevice::write_command_(uint16_t command, CommandLen command_len, const uint16_t *data,
|
||||
uint8_t data_len) {
|
||||
@ -63,7 +63,7 @@ bool SensirionI2CDevice::write_command_(uint16_t command, CommandLen command_len
|
||||
temp[raw_idx++] = command >> 8;
|
||||
#endif
|
||||
}
|
||||
// add parameters folllowed by crc
|
||||
// add parameters followed by crc
|
||||
// skipped if len == 0
|
||||
for (size_t i = 0; i < data_len; i++) {
|
||||
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
|
||||
|
@ -20,13 +20,13 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
* handles crc check used by Sensirion sensors
|
||||
* @param data pointer to raw result
|
||||
* @param len number of words to read
|
||||
* @return true if reading succeded
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool read_data(uint16_t *data, uint8_t len);
|
||||
|
||||
/** Read 1 data word from i2c device.
|
||||
* @param data reference to raw result
|
||||
* @return true if reading succeded
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool read_data(uint16_t &data) { return this->read_data(&data, 1); }
|
||||
|
||||
@ -35,8 +35,8 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
* @param i2c register
|
||||
* @param data pointer to raw result
|
||||
* @param len number of words to read
|
||||
* @param delay milliseconds to to wait between sending the i2c commmand and reading the result
|
||||
* @return true if reading succeded
|
||||
* @param delay milliseconds to to wait between sending the i2c command and reading the result
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool get_register(uint16_t command, uint16_t *data, uint8_t len, uint8_t delay = 0) {
|
||||
return get_register_(command, ADDR_16_BIT, data, len, delay);
|
||||
@ -44,8 +44,8 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
/** Read 1 data word from 16 bit i2c register.
|
||||
* @param i2c register
|
||||
* @param data reference to raw result
|
||||
* @param delay milliseconds to to wait between sending the i2c commmand and reading the result
|
||||
* @return true if reading succeded
|
||||
* @param delay milliseconds to to wait between sending the i2c command and reading the result
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool get_register(uint16_t i2c_register, uint16_t &data, uint8_t delay = 0) {
|
||||
return this->get_register_(i2c_register, ADDR_16_BIT, &data, 1, delay);
|
||||
@ -56,8 +56,8 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
* @param i2c register
|
||||
* @param data pointer to raw result
|
||||
* @param len number of words to read
|
||||
* @param delay milliseconds to to wait between sending the i2c commmand and reading the result
|
||||
* @return true if reading succeded
|
||||
* @param delay milliseconds to to wait between sending the i2c command and reading the result
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool get_8bit_register(uint8_t i2c_register, uint16_t *data, uint8_t len, uint8_t delay = 0) {
|
||||
return get_register_(i2c_register, ADDR_8_BIT, data, len, delay);
|
||||
@ -66,8 +66,8 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
/** Read 1 data word from 8 bit i2c register.
|
||||
* @param i2c register
|
||||
* @param data reference to raw result
|
||||
* @param delay milliseconds to to wait between sending the i2c commmand and reading the result
|
||||
* @return true if reading succeded
|
||||
* @param delay milliseconds to to wait between sending the i2c command and reading the result
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool get_8bit_register(uint8_t i2c_register, uint16_t &data, uint8_t delay = 0) {
|
||||
return this->get_register_(i2c_register, ADDR_8_BIT, &data, 1, delay);
|
||||
@ -75,21 +75,21 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
|
||||
/** Write a command to the i2c device.
|
||||
* @param command i2c command to send
|
||||
* @return true if reading succeded
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
template<class T> bool write_command(T i2c_register) { return write_command(i2c_register, nullptr, 0); }
|
||||
|
||||
/** Write a command and one data word to the i2c device .
|
||||
* @param command i2c command to send
|
||||
* @param data argument for the i2c command
|
||||
* @return true if reading succeded
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
template<class T> bool write_command(T i2c_register, uint16_t data) { return write_command(i2c_register, &data, 1); }
|
||||
|
||||
/** Write a command with arguments as words
|
||||
* @param i2c_register i2c command to send - an be uint8_t or uint16_t
|
||||
* @param data vector<uint16> arguments for the i2c command
|
||||
* @return true if reading succeded
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
template<class T> bool write_command(T i2c_register, const std::vector<uint16_t> &data) {
|
||||
return write_command_(i2c_register, sizeof(T), data.data(), data.size());
|
||||
@ -99,7 +99,7 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
* @param i2c_register i2c command to send - an be uint8_t or uint16_t
|
||||
* @param data arguments for the i2c command
|
||||
* @param len number of arguments (words)
|
||||
* @return true if reading succeded
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
template<class T> bool write_command(T i2c_register, const uint16_t *data, uint8_t len) {
|
||||
// limit to 8 or 16 bit only
|
||||
@ -115,7 +115,7 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
* @param command_len either 1 for short 8 bit command or 2 for 16 bit command codes
|
||||
* @param data arguments for the i2c command
|
||||
* @param data_len number of arguments (words)
|
||||
* @return true if reading succeded
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool write_command_(uint16_t command, CommandLen command_len, const uint16_t *data, uint8_t data_len);
|
||||
|
||||
@ -125,8 +125,8 @@ class SensirionI2CDevice : public i2c::I2CDevice {
|
||||
* @param command_len either 1 for short 8 bit command or 2 for 16 bit command codes
|
||||
* @param data pointer to raw result
|
||||
* @param len number of words to read
|
||||
* @param delay milliseconds to to wait between sending the i2c commmand and reading the result
|
||||
* @return true if reading succeded
|
||||
* @param delay milliseconds to to wait between sending the i2c command and reading the result
|
||||
* @return true if reading succeeded
|
||||
*/
|
||||
bool get_register_(uint16_t reg, CommandLen command_len, uint16_t *data, uint8_t len, uint8_t delay);
|
||||
|
||||
|
@ -31,12 +31,15 @@ from esphome.const import (
|
||||
CONF_FORCE_UPDATE,
|
||||
DEVICE_CLASS_DURATION,
|
||||
DEVICE_CLASS_EMPTY,
|
||||
DEVICE_CLASS_APPARENT_POWER,
|
||||
DEVICE_CLASS_AQI,
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATE,
|
||||
DEVICE_CLASS_ENERGY,
|
||||
DEVICE_CLASS_FREQUENCY,
|
||||
DEVICE_CLASS_GAS,
|
||||
DEVICE_CLASS_HUMIDITY,
|
||||
DEVICE_CLASS_ILLUMINANCE,
|
||||
@ -51,6 +54,7 @@ from esphome.const import (
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_PRESSURE,
|
||||
DEVICE_CLASS_REACTIVE_POWER,
|
||||
DEVICE_CLASS_SIGNAL_STRENGTH,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
@ -66,13 +70,16 @@ from esphome.util import Registry
|
||||
CODEOWNERS = ["@esphome/core"]
|
||||
DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_EMPTY,
|
||||
DEVICE_CLASS_APPARENT_POWER,
|
||||
DEVICE_CLASS_AQI,
|
||||
DEVICE_CLASS_BATTERY,
|
||||
DEVICE_CLASS_CARBON_DIOXIDE,
|
||||
DEVICE_CLASS_CARBON_MONOXIDE,
|
||||
DEVICE_CLASS_CURRENT,
|
||||
DEVICE_CLASS_DATE,
|
||||
DEVICE_CLASS_DURATION,
|
||||
DEVICE_CLASS_ENERGY,
|
||||
DEVICE_CLASS_FREQUENCY,
|
||||
DEVICE_CLASS_GAS,
|
||||
DEVICE_CLASS_HUMIDITY,
|
||||
DEVICE_CLASS_ILLUMINANCE,
|
||||
@ -87,6 +94,7 @@ DEVICE_CLASSES = [
|
||||
DEVICE_CLASS_POWER,
|
||||
DEVICE_CLASS_POWER_FACTOR,
|
||||
DEVICE_CLASS_PRESSURE,
|
||||
DEVICE_CLASS_REACTIVE_POWER,
|
||||
DEVICE_CLASS_SIGNAL_STRENGTH,
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
|
@ -140,5 +140,7 @@ async def to_code(config):
|
||||
)
|
||||
)
|
||||
cg.add_library(
|
||||
None, None, "https://github.com/Sensirion/arduino-gas-index-algorithm.git"
|
||||
None,
|
||||
None,
|
||||
"https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1",
|
||||
)
|
||||
|
@ -39,7 +39,7 @@ void SGP4xComponent::setup() {
|
||||
ESP_LOGE(TAG, "Measuring NOx requires a SGP41 sensor but a SGP40 sensor is detected");
|
||||
// disable the sensor
|
||||
this->nox_sensor_->set_disabled_by_default(true);
|
||||
// make sure it's not visiable in HA
|
||||
// make sure it's not visible in HA
|
||||
this->nox_sensor_->set_internal(true);
|
||||
this->nox_sensor_->state = NAN;
|
||||
// remove pointer to sensor
|
||||
@ -104,8 +104,8 @@ void SGP4xComponent::setup() {
|
||||
https://github.com/Sensirion/embedded-sgp/issues/136 indicate the algorithm should be a bit resilient to slight
|
||||
timing variations so the software timer should be accurate enough for this.
|
||||
|
||||
This block starts sampling from the sensor at 1Hz, and is done seperately from the call
|
||||
to the update method. This seperation is to support getting accurate measurements but
|
||||
This block starts sampling from the sensor at 1Hz, and is done separately from the call
|
||||
to the update method. This separation is to support getting accurate measurements but
|
||||
limit the amount of communication done over wifi for power consumption or to keep the
|
||||
number of records reported from being overwhelming.
|
||||
*/
|
||||
@ -170,8 +170,8 @@ bool SGP4xComponent::measure_gas_indices_(int32_t &voc, int32_t &nox) {
|
||||
// much
|
||||
if (this->store_baseline_ && this->seconds_since_last_store_ > SHORTEST_BASELINE_STORE_INTERVAL) {
|
||||
voc_algorithm_.get_states(this->voc_state0_, this->voc_state1_);
|
||||
if ((uint32_t) abs(this->voc_baselines_storage_.state0 - this->voc_state0_) > MAXIMUM_STORAGE_DIFF ||
|
||||
(uint32_t) abs(this->voc_baselines_storage_.state1 - this->voc_state1_) > MAXIMUM_STORAGE_DIFF) {
|
||||
if (std::abs(this->voc_baselines_storage_.state0 - this->voc_state0_) > MAXIMUM_STORAGE_DIFF ||
|
||||
std::abs(this->voc_baselines_storage_.state1 - this->voc_state1_) > MAXIMUM_STORAGE_DIFF) {
|
||||
this->seconds_since_last_store_ = 0;
|
||||
this->voc_baselines_storage_.state0 = this->voc_state0_;
|
||||
this->voc_baselines_storage_.state1 = this->voc_state1_;
|
||||
@ -236,9 +236,9 @@ bool SGP4xComponent::measure_raw_(uint16_t &voc_raw, uint16_t &nox_raw) {
|
||||
}
|
||||
uint16_t rhticks = llround((uint16_t)((humidity * 65535) / 100));
|
||||
uint16_t tempticks = (uint16_t)(((temperature + 45) * 65535) / 175);
|
||||
// first paramater are the relative humidity ticks
|
||||
// first parameter are the relative humidity ticks
|
||||
data[0] = rhticks;
|
||||
// secomd paramater are the temperature ticks
|
||||
// secomd parameter are the temperature ticks
|
||||
data[1] = tempticks;
|
||||
|
||||
if (!this->write_command(command, data, 2)) {
|
||||
|
@ -49,7 +49,7 @@ static const uint16_t SPG41_SELFTEST_TIME = 320; // 320 ms for self test
|
||||
static const uint16_t SGP40_MEASURE_TIME = 30;
|
||||
static const uint16_t SGP41_MEASURE_TIME = 55;
|
||||
// Store anyway if the baseline difference exceeds the max storage diff value
|
||||
const uint32_t MAXIMUM_STORAGE_DIFF = 50;
|
||||
const float MAXIMUM_STORAGE_DIFF = 50.0f;
|
||||
|
||||
class SGP4xComponent;
|
||||
|
||||
@ -120,8 +120,8 @@ class SGP4xComponent : public PollingComponent, public sensor::Sensor, public se
|
||||
sensor::Sensor *voc_sensor_{nullptr};
|
||||
VOCGasIndexAlgorithm voc_algorithm_;
|
||||
optional<GasTuning> voc_tuning_params_;
|
||||
int32_t voc_state0_;
|
||||
int32_t voc_state1_;
|
||||
float voc_state0_;
|
||||
float voc_state1_;
|
||||
int32_t voc_index_ = 0;
|
||||
|
||||
sensor::Sensor *nox_sensor_{nullptr};
|
||||
|
@ -49,7 +49,7 @@ constexpr float POWER_SCALING_FACTOR = 880373;
|
||||
constexpr float VOLTAGE_SCALING_FACTOR = 347800;
|
||||
constexpr float CURRENT_SCALING_FACTOR = 1448;
|
||||
|
||||
// Esentially std::size() for pre c++17
|
||||
// Essentially std::size() for pre c++17
|
||||
template<typename T, size_t N> constexpr size_t size(const T (&/*unused*/)[N]) noexcept { return N; }
|
||||
|
||||
} // Anonymous namespace
|
||||
|
@ -42,12 +42,12 @@ void SlowPWMOutput::loop() {
|
||||
uint32_t now = millis();
|
||||
float scaled_state = this->state_ * this->period_;
|
||||
|
||||
if (now >= this->period_start_time_ + this->period_) {
|
||||
if (now - this->period_start_time_ >= this->period_) {
|
||||
ESP_LOGVV(TAG, "End of period. State: %f, Scaled state: %f", this->state_, scaled_state);
|
||||
this->period_start_time_ += this->period_;
|
||||
}
|
||||
|
||||
this->set_output_state_(now < this->period_start_time_ + scaled_state);
|
||||
this->set_output_state_(scaled_state > now - this->period_start_time_);
|
||||
}
|
||||
|
||||
void SlowPWMOutput::dump_config() {
|
||||
|
@ -161,7 +161,7 @@ bool SonoffD1Output::write_command_(uint8_t *cmd, const size_t len, bool needs_a
|
||||
return false;
|
||||
}
|
||||
if ((cmd[5] + 7 /*mandatory header + suffix length*/) != len) {
|
||||
ESP_LOGW(TAG, "[%04d] Payload length field does not match packet lenght (%d, expected %d)", this->write_count_,
|
||||
ESP_LOGW(TAG, "[%04d] Payload length field does not match packet length (%d, expected %d)", this->write_count_,
|
||||
cmd[5], len - 7);
|
||||
return false;
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ class ThermostatClimate : public climate::Climate, public Component {
|
||||
/// Change to a provided custom preset setting; will reset temperature, mode, fan, and swing modes accordingly
|
||||
void change_custom_preset_(const std::string &custom_preset);
|
||||
|
||||
/// Applies the temperature, mode, fan, and swing modes of the provded config.
|
||||
/// Applies the temperature, mode, fan, and swing modes of the provided config.
|
||||
/// This is agnostic of custom vs built in preset
|
||||
void change_preset_internal_(const ThermostatClimateTargetTempConfig &config);
|
||||
|
||||
|
@ -126,10 +126,10 @@ def _parse_cron_part(part, min_value, max_value, special_mapping):
|
||||
)
|
||||
begin, end = data
|
||||
begin_n = _parse_cron_int(
|
||||
begin, special_mapping, "Number for time range must be integer, " "got {}"
|
||||
begin, special_mapping, "Number for time range must be integer, got {}"
|
||||
)
|
||||
end_n = _parse_cron_int(
|
||||
end, special_mapping, "Number for time range must be integer, " "got {}"
|
||||
end, special_mapping, "Number for time range must be integer, got {}"
|
||||
)
|
||||
if end_n < begin_n:
|
||||
return set(range(end_n, max_value + 1)) | set(range(min_value, begin_n + 1))
|
||||
@ -139,7 +139,7 @@ def _parse_cron_part(part, min_value, max_value, special_mapping):
|
||||
_parse_cron_int(
|
||||
part,
|
||||
special_mapping,
|
||||
"Number for time expression must be an " "integer, got {}",
|
||||
"Number for time expression must be an integer, got {}",
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,6 @@ from esphome.core.entity_helpers import inherit_property_from
|
||||
DEPENDENCIES = ["time"]
|
||||
|
||||
CONF_POWER_ID = "power_id"
|
||||
CONF_MIN_SAVE_INTERVAL = "min_save_interval"
|
||||
total_daily_energy_ns = cg.esphome_ns.namespace("total_daily_energy")
|
||||
TotalDailyEnergyMethod = total_daily_energy_ns.enum("TotalDailyEnergyMethod")
|
||||
TOTAL_DAILY_ENERGY_METHODS = {
|
||||
@ -49,9 +48,9 @@ CONFIG_SCHEMA = (
|
||||
cv.GenerateID(CONF_TIME_ID): cv.use_id(time.RealTimeClock),
|
||||
cv.Required(CONF_POWER_ID): cv.use_id(sensor.Sensor),
|
||||
cv.Optional(CONF_RESTORE, default=True): cv.boolean,
|
||||
cv.Optional(
|
||||
CONF_MIN_SAVE_INTERVAL, default="0s"
|
||||
): cv.positive_time_period_milliseconds,
|
||||
cv.Optional("min_save_interval"): cv.invalid(
|
||||
"`min_save_interval` was removed in 2022.6.0. Please use the `preferences` -> `flash_write_interval` to adjust."
|
||||
),
|
||||
cv.Optional(CONF_METHOD, default="right"): cv.enum(
|
||||
TOTAL_DAILY_ENERGY_METHODS, lower=True
|
||||
),
|
||||
@ -90,5 +89,4 @@ async def to_code(config):
|
||||
time_ = await cg.get_variable(config[CONF_TIME_ID])
|
||||
cg.add(var.set_time(time_))
|
||||
cg.add(var.set_restore(config[CONF_RESTORE]))
|
||||
cg.add(var.set_min_save_interval(config[CONF_MIN_SAVE_INTERVAL]))
|
||||
cg.add(var.set_method(config[CONF_METHOD]))
|
||||
|
@ -16,7 +16,6 @@ void TotalDailyEnergy::setup() {
|
||||
this->publish_state_and_save(initial_value);
|
||||
|
||||
this->last_update_ = millis();
|
||||
this->last_save_ = this->last_update_;
|
||||
|
||||
this->parent_->add_on_state_callback([this](float state) { this->process_new_state_(state); });
|
||||
}
|
||||
@ -43,12 +42,9 @@ void TotalDailyEnergy::loop() {
|
||||
void TotalDailyEnergy::publish_state_and_save(float state) {
|
||||
this->total_energy_ = state;
|
||||
this->publish_state(state);
|
||||
const uint32_t now = millis();
|
||||
if (now - this->last_save_ < this->min_save_interval_) {
|
||||
return;
|
||||
if (this->restore_) {
|
||||
this->pref_.save(&state);
|
||||
}
|
||||
this->last_save_ = now;
|
||||
this->pref_.save(&state);
|
||||
}
|
||||
|
||||
void TotalDailyEnergy::process_new_state_(float state) {
|
||||
|
@ -18,7 +18,6 @@ enum TotalDailyEnergyMethod {
|
||||
class TotalDailyEnergy : public sensor::Sensor, public Component {
|
||||
public:
|
||||
void set_restore(bool restore) { restore_ = restore; }
|
||||
void set_min_save_interval(uint32_t min_interval) { this->min_save_interval_ = min_interval; }
|
||||
void set_time(time::RealTimeClock *time) { time_ = time; }
|
||||
void set_parent(Sensor *parent) { parent_ = parent; }
|
||||
void set_method(TotalDailyEnergyMethod method) { method_ = method; }
|
||||
@ -39,7 +38,6 @@ class TotalDailyEnergy : public sensor::Sensor, public Component {
|
||||
uint16_t last_day_of_year_{};
|
||||
uint32_t last_update_{0};
|
||||
uint32_t last_save_{0};
|
||||
uint32_t min_save_interval_{0};
|
||||
bool restore_;
|
||||
float total_energy_{0.0f};
|
||||
float last_power_state_{0.0f};
|
||||
|
@ -38,7 +38,7 @@ class UARTDebugger : public Component, public Trigger<UARTDirection, std::vector
|
||||
|
||||
/// Add a delimiter byte. This can be called multiple times to setup a
|
||||
/// multi-byte delimiter (a typical example would be '\r\n').
|
||||
/// When the constructued byte sequence is found in the data stream,
|
||||
/// When the constructed byte sequence is found in the data stream,
|
||||
/// logging will be triggered.
|
||||
void add_delimiter_byte(uint8_t byte) { this->after_delimiter_.push_back(byte); }
|
||||
|
||||
|
@ -257,6 +257,7 @@ void VL53L0XSensor::setup() {
|
||||
|
||||
ESP_LOGD(TAG, "'%s' - setup END", this->name_.c_str());
|
||||
}
|
||||
|
||||
void VL53L0XSensor::update() {
|
||||
if (this->initiated_read_ || this->waiting_for_interrupt_) {
|
||||
this->publish_state(NAN);
|
||||
@ -280,6 +281,7 @@ void VL53L0XSensor::update() {
|
||||
this->initiated_read_ = true;
|
||||
// wait for timeout
|
||||
}
|
||||
|
||||
void VL53L0XSensor::loop() {
|
||||
if (this->initiated_read_) {
|
||||
if (reg(0x00).get() & 0x01) {
|
||||
@ -311,5 +313,222 @@ void VL53L0XSensor::loop() {
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t VL53L0XSensor::get_measurement_timing_budget_() {
|
||||
SequenceStepEnables enables{};
|
||||
SequenceStepTimeouts timeouts{};
|
||||
|
||||
uint16_t start_overhead = 1910;
|
||||
uint16_t end_overhead = 960;
|
||||
uint16_t msrc_overhead = 660;
|
||||
uint16_t tcc_overhead = 590;
|
||||
uint16_t dss_overhead = 690;
|
||||
uint16_t pre_range_overhead = 660;
|
||||
uint16_t final_range_overhead = 550;
|
||||
|
||||
// "Start and end overhead times always present"
|
||||
uint32_t budget_us = start_overhead + end_overhead;
|
||||
|
||||
get_sequence_step_enables_(&enables);
|
||||
get_sequence_step_timeouts_(&enables, &timeouts);
|
||||
|
||||
if (enables.tcc)
|
||||
budget_us += (timeouts.msrc_dss_tcc_us + tcc_overhead);
|
||||
|
||||
if (enables.dss) {
|
||||
budget_us += 2 * (timeouts.msrc_dss_tcc_us + dss_overhead);
|
||||
} else if (enables.msrc) {
|
||||
budget_us += (timeouts.msrc_dss_tcc_us + msrc_overhead);
|
||||
}
|
||||
|
||||
if (enables.pre_range)
|
||||
budget_us += (timeouts.pre_range_us + pre_range_overhead);
|
||||
|
||||
if (enables.final_range)
|
||||
budget_us += (timeouts.final_range_us + final_range_overhead);
|
||||
|
||||
measurement_timing_budget_us_ = budget_us; // store for internal reuse
|
||||
return budget_us;
|
||||
}
|
||||
|
||||
bool VL53L0XSensor::set_measurement_timing_budget_(uint32_t budget_us) {
|
||||
SequenceStepEnables enables{};
|
||||
SequenceStepTimeouts timeouts{};
|
||||
|
||||
uint16_t start_overhead = 1320; // note that this is different than the value in get_
|
||||
uint16_t end_overhead = 960;
|
||||
uint16_t msrc_overhead = 660;
|
||||
uint16_t tcc_overhead = 590;
|
||||
uint16_t dss_overhead = 690;
|
||||
uint16_t pre_range_overhead = 660;
|
||||
uint16_t final_range_overhead = 550;
|
||||
|
||||
uint32_t min_timing_budget = 20000;
|
||||
|
||||
if (budget_us < min_timing_budget) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t used_budget_us = start_overhead + end_overhead;
|
||||
|
||||
get_sequence_step_enables_(&enables);
|
||||
get_sequence_step_timeouts_(&enables, &timeouts);
|
||||
|
||||
if (enables.tcc) {
|
||||
used_budget_us += (timeouts.msrc_dss_tcc_us + tcc_overhead);
|
||||
}
|
||||
|
||||
if (enables.dss) {
|
||||
used_budget_us += 2 * (timeouts.msrc_dss_tcc_us + dss_overhead);
|
||||
} else if (enables.msrc) {
|
||||
used_budget_us += (timeouts.msrc_dss_tcc_us + msrc_overhead);
|
||||
}
|
||||
|
||||
if (enables.pre_range) {
|
||||
used_budget_us += (timeouts.pre_range_us + pre_range_overhead);
|
||||
}
|
||||
|
||||
if (enables.final_range) {
|
||||
used_budget_us += final_range_overhead;
|
||||
|
||||
// "Note that the final range timeout is determined by the timing
|
||||
// budget and the sum of all other timeouts within the sequence.
|
||||
// If there is no room for the final range timeout, then an error
|
||||
// will be set. Otherwise the remaining time will be applied to
|
||||
// the final range."
|
||||
|
||||
if (used_budget_us > budget_us) {
|
||||
// "Requested timeout too big."
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t final_range_timeout_us = budget_us - used_budget_us;
|
||||
|
||||
// set_sequence_step_timeout() begin
|
||||
// (SequenceStepId == VL53L0X_SEQUENCESTEP_FINAL_RANGE)
|
||||
|
||||
// "For the final range timeout, the pre-range timeout
|
||||
// must be added. To do this both final and pre-range
|
||||
// timeouts must be expressed in macro periods MClks
|
||||
// because they have different vcsel periods."
|
||||
|
||||
uint16_t final_range_timeout_mclks =
|
||||
timeout_microseconds_to_mclks_(final_range_timeout_us, timeouts.final_range_vcsel_period_pclks);
|
||||
|
||||
if (enables.pre_range) {
|
||||
final_range_timeout_mclks += timeouts.pre_range_mclks;
|
||||
}
|
||||
|
||||
write_byte_16(0x71, encode_timeout_(final_range_timeout_mclks));
|
||||
|
||||
// set_sequence_step_timeout() end
|
||||
|
||||
measurement_timing_budget_us_ = budget_us; // store for internal reuse
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void VL53L0XSensor::get_sequence_step_enables_(SequenceStepEnables *enables) {
|
||||
uint8_t sequence_config = reg(0x01).get();
|
||||
enables->tcc = (sequence_config >> 4) & 0x1;
|
||||
enables->dss = (sequence_config >> 3) & 0x1;
|
||||
enables->msrc = (sequence_config >> 2) & 0x1;
|
||||
enables->pre_range = (sequence_config >> 6) & 0x1;
|
||||
enables->final_range = (sequence_config >> 7) & 0x1;
|
||||
}
|
||||
|
||||
void VL53L0XSensor::get_sequence_step_timeouts_(SequenceStepEnables const *enables, SequenceStepTimeouts *timeouts) {
|
||||
timeouts->pre_range_vcsel_period_pclks = get_vcsel_pulse_period_(VCSEL_PERIOD_PRE_RANGE);
|
||||
|
||||
timeouts->msrc_dss_tcc_mclks = reg(0x46).get() + 1;
|
||||
timeouts->msrc_dss_tcc_us =
|
||||
timeout_mclks_to_microseconds_(timeouts->msrc_dss_tcc_mclks, timeouts->pre_range_vcsel_period_pclks);
|
||||
|
||||
uint16_t value;
|
||||
read_byte_16(0x51, &value);
|
||||
timeouts->pre_range_mclks = decode_timeout_(value);
|
||||
timeouts->pre_range_us =
|
||||
timeout_mclks_to_microseconds_(timeouts->pre_range_mclks, timeouts->pre_range_vcsel_period_pclks);
|
||||
|
||||
timeouts->final_range_vcsel_period_pclks = get_vcsel_pulse_period_(VCSEL_PERIOD_FINAL_RANGE);
|
||||
|
||||
read_byte_16(0x71, &value);
|
||||
timeouts->final_range_mclks = decode_timeout_(value);
|
||||
|
||||
if (enables->pre_range) {
|
||||
timeouts->final_range_mclks -= timeouts->pre_range_mclks;
|
||||
}
|
||||
|
||||
timeouts->final_range_us =
|
||||
timeout_mclks_to_microseconds_(timeouts->final_range_mclks, timeouts->final_range_vcsel_period_pclks);
|
||||
}
|
||||
|
||||
uint8_t VL53L0XSensor::get_vcsel_pulse_period_(VcselPeriodType type) {
|
||||
uint8_t vcsel;
|
||||
if (type == VCSEL_PERIOD_PRE_RANGE) {
|
||||
vcsel = reg(0x50).get();
|
||||
} else if (type == VCSEL_PERIOD_FINAL_RANGE) {
|
||||
vcsel = reg(0x70).get();
|
||||
} else {
|
||||
return 255;
|
||||
}
|
||||
|
||||
return (vcsel + 1) << 1;
|
||||
}
|
||||
|
||||
uint32_t VL53L0XSensor::get_macro_period_(uint8_t vcsel_period_pclks) {
|
||||
return ((2304UL * vcsel_period_pclks * 1655UL) + 500UL) / 1000UL;
|
||||
}
|
||||
|
||||
uint32_t VL53L0XSensor::timeout_mclks_to_microseconds_(uint16_t timeout_period_mclks, uint8_t vcsel_period_pclks) {
|
||||
uint32_t macro_period_ns = get_macro_period_(vcsel_period_pclks);
|
||||
return ((timeout_period_mclks * macro_period_ns) + (macro_period_ns / 2)) / 1000;
|
||||
}
|
||||
|
||||
uint32_t VL53L0XSensor::timeout_microseconds_to_mclks_(uint32_t timeout_period_us, uint8_t vcsel_period_pclks) {
|
||||
uint32_t macro_period_ns = get_macro_period_(vcsel_period_pclks);
|
||||
return (((timeout_period_us * 1000) + (macro_period_ns / 2)) / macro_period_ns);
|
||||
}
|
||||
|
||||
uint16_t VL53L0XSensor::decode_timeout_(uint16_t reg_val) {
|
||||
// format: "(LSByte * 2^MSByte) + 1"
|
||||
uint8_t msb = (reg_val >> 8) & 0xFF;
|
||||
uint8_t lsb = (reg_val >> 0) & 0xFF;
|
||||
return (uint16_t(lsb) << msb) + 1;
|
||||
}
|
||||
|
||||
uint16_t VL53L0XSensor::encode_timeout_(uint16_t timeout_mclks) {
|
||||
// format: "(LSByte * 2^MSByte) + 1"
|
||||
uint32_t ls_byte = 0;
|
||||
uint16_t ms_byte = 0;
|
||||
|
||||
if (timeout_mclks <= 0)
|
||||
return 0;
|
||||
|
||||
ls_byte = timeout_mclks - 1;
|
||||
|
||||
while ((ls_byte & 0xFFFFFF00) > 0) {
|
||||
ls_byte >>= 1;
|
||||
ms_byte++;
|
||||
}
|
||||
|
||||
return (ms_byte << 8) | (ls_byte & 0xFF);
|
||||
}
|
||||
|
||||
bool VL53L0XSensor::perform_single_ref_calibration_(uint8_t vhv_init_byte) {
|
||||
reg(0x00) = 0x01 | vhv_init_byte; // VL53L0X_REG_SYSRANGE_MODE_START_STOP
|
||||
|
||||
uint32_t start = millis();
|
||||
while ((reg(0x13).get() & 0x07) == 0) {
|
||||
if (millis() - start > 1000)
|
||||
return false;
|
||||
yield();
|
||||
}
|
||||
|
||||
reg(0x0B) = 0x01;
|
||||
reg(0x00) = 0x00;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace vl53l0x
|
||||
} // namespace esphome
|
||||
|
@ -21,6 +21,8 @@ struct SequenceStepTimeouts {
|
||||
uint32_t msrc_dss_tcc_us, pre_range_us, final_range_us;
|
||||
};
|
||||
|
||||
enum VcselPeriodType { VCSEL_PERIOD_PRE_RANGE, VCSEL_PERIOD_FINAL_RANGE };
|
||||
|
||||
class VL53L0XSensor : public sensor::Sensor, public PollingComponent, public i2c::I2CDevice {
|
||||
public:
|
||||
VL53L0XSensor();
|
||||
@ -39,222 +41,20 @@ class VL53L0XSensor : public sensor::Sensor, public PollingComponent, public i2c
|
||||
void set_enable_pin(GPIOPin *enable) { this->enable_pin_ = enable; }
|
||||
|
||||
protected:
|
||||
uint32_t get_measurement_timing_budget_() {
|
||||
SequenceStepEnables enables{};
|
||||
SequenceStepTimeouts timeouts{};
|
||||
uint32_t get_measurement_timing_budget_();
|
||||
bool set_measurement_timing_budget_(uint32_t budget_us);
|
||||
void get_sequence_step_enables_(SequenceStepEnables *enables);
|
||||
void get_sequence_step_timeouts_(SequenceStepEnables const *enables, SequenceStepTimeouts *timeouts);
|
||||
uint8_t get_vcsel_pulse_period_(VcselPeriodType type);
|
||||
uint32_t get_macro_period_(uint8_t vcsel_period_pclks);
|
||||
|
||||
uint16_t start_overhead = 1910;
|
||||
uint16_t end_overhead = 960;
|
||||
uint16_t msrc_overhead = 660;
|
||||
uint16_t tcc_overhead = 590;
|
||||
uint16_t dss_overhead = 690;
|
||||
uint16_t pre_range_overhead = 660;
|
||||
uint16_t final_range_overhead = 550;
|
||||
uint32_t timeout_mclks_to_microseconds_(uint16_t timeout_period_mclks, uint8_t vcsel_period_pclks);
|
||||
uint32_t timeout_microseconds_to_mclks_(uint32_t timeout_period_us, uint8_t vcsel_period_pclks);
|
||||
|
||||
// "Start and end overhead times always present"
|
||||
uint32_t budget_us = start_overhead + end_overhead;
|
||||
uint16_t decode_timeout_(uint16_t reg_val);
|
||||
uint16_t encode_timeout_(uint16_t timeout_mclks);
|
||||
|
||||
get_sequence_step_enables_(&enables);
|
||||
get_sequence_step_timeouts_(&enables, &timeouts);
|
||||
|
||||
if (enables.tcc)
|
||||
budget_us += (timeouts.msrc_dss_tcc_us + tcc_overhead);
|
||||
|
||||
if (enables.dss) {
|
||||
budget_us += 2 * (timeouts.msrc_dss_tcc_us + dss_overhead);
|
||||
} else if (enables.msrc) {
|
||||
budget_us += (timeouts.msrc_dss_tcc_us + msrc_overhead);
|
||||
}
|
||||
|
||||
if (enables.pre_range)
|
||||
budget_us += (timeouts.pre_range_us + pre_range_overhead);
|
||||
|
||||
if (enables.final_range)
|
||||
budget_us += (timeouts.final_range_us + final_range_overhead);
|
||||
|
||||
measurement_timing_budget_us_ = budget_us; // store for internal reuse
|
||||
return budget_us;
|
||||
}
|
||||
|
||||
bool set_measurement_timing_budget_(uint32_t budget_us) {
|
||||
SequenceStepEnables enables{};
|
||||
SequenceStepTimeouts timeouts{};
|
||||
|
||||
uint16_t start_overhead = 1320; // note that this is different than the value in get_
|
||||
uint16_t end_overhead = 960;
|
||||
uint16_t msrc_overhead = 660;
|
||||
uint16_t tcc_overhead = 590;
|
||||
uint16_t dss_overhead = 690;
|
||||
uint16_t pre_range_overhead = 660;
|
||||
uint16_t final_range_overhead = 550;
|
||||
|
||||
uint32_t min_timing_budget = 20000;
|
||||
|
||||
if (budget_us < min_timing_budget) {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t used_budget_us = start_overhead + end_overhead;
|
||||
|
||||
get_sequence_step_enables_(&enables);
|
||||
get_sequence_step_timeouts_(&enables, &timeouts);
|
||||
|
||||
if (enables.tcc) {
|
||||
used_budget_us += (timeouts.msrc_dss_tcc_us + tcc_overhead);
|
||||
}
|
||||
|
||||
if (enables.dss) {
|
||||
used_budget_us += 2 * (timeouts.msrc_dss_tcc_us + dss_overhead);
|
||||
} else if (enables.msrc) {
|
||||
used_budget_us += (timeouts.msrc_dss_tcc_us + msrc_overhead);
|
||||
}
|
||||
|
||||
if (enables.pre_range) {
|
||||
used_budget_us += (timeouts.pre_range_us + pre_range_overhead);
|
||||
}
|
||||
|
||||
if (enables.final_range) {
|
||||
used_budget_us += final_range_overhead;
|
||||
|
||||
// "Note that the final range timeout is determined by the timing
|
||||
// budget and the sum of all other timeouts within the sequence.
|
||||
// If there is no room for the final range timeout, then an error
|
||||
// will be set. Otherwise the remaining time will be applied to
|
||||
// the final range."
|
||||
|
||||
if (used_budget_us > budget_us) {
|
||||
// "Requested timeout too big."
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t final_range_timeout_us = budget_us - used_budget_us;
|
||||
|
||||
// set_sequence_step_timeout() begin
|
||||
// (SequenceStepId == VL53L0X_SEQUENCESTEP_FINAL_RANGE)
|
||||
|
||||
// "For the final range timeout, the pre-range timeout
|
||||
// must be added. To do this both final and pre-range
|
||||
// timeouts must be expressed in macro periods MClks
|
||||
// because they have different vcsel periods."
|
||||
|
||||
uint16_t final_range_timeout_mclks =
|
||||
timeout_microseconds_to_mclks_(final_range_timeout_us, timeouts.final_range_vcsel_period_pclks);
|
||||
|
||||
if (enables.pre_range) {
|
||||
final_range_timeout_mclks += timeouts.pre_range_mclks;
|
||||
}
|
||||
|
||||
write_byte_16(0x71, encode_timeout_(final_range_timeout_mclks));
|
||||
|
||||
// set_sequence_step_timeout() end
|
||||
|
||||
measurement_timing_budget_us_ = budget_us; // store for internal reuse
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void get_sequence_step_enables_(SequenceStepEnables *enables) {
|
||||
uint8_t sequence_config = reg(0x01).get();
|
||||
enables->tcc = (sequence_config >> 4) & 0x1;
|
||||
enables->dss = (sequence_config >> 3) & 0x1;
|
||||
enables->msrc = (sequence_config >> 2) & 0x1;
|
||||
enables->pre_range = (sequence_config >> 6) & 0x1;
|
||||
enables->final_range = (sequence_config >> 7) & 0x1;
|
||||
}
|
||||
|
||||
enum VcselPeriodType { VCSEL_PERIOD_PRE_RANGE, VCSEL_PERIOD_FINAL_RANGE };
|
||||
|
||||
void get_sequence_step_timeouts_(SequenceStepEnables const *enables, SequenceStepTimeouts *timeouts) {
|
||||
timeouts->pre_range_vcsel_period_pclks = get_vcsel_pulse_period_(VCSEL_PERIOD_PRE_RANGE);
|
||||
|
||||
timeouts->msrc_dss_tcc_mclks = reg(0x46).get() + 1;
|
||||
timeouts->msrc_dss_tcc_us =
|
||||
timeout_mclks_to_microseconds_(timeouts->msrc_dss_tcc_mclks, timeouts->pre_range_vcsel_period_pclks);
|
||||
|
||||
uint16_t value;
|
||||
read_byte_16(0x51, &value);
|
||||
timeouts->pre_range_mclks = decode_timeout_(value);
|
||||
timeouts->pre_range_us =
|
||||
timeout_mclks_to_microseconds_(timeouts->pre_range_mclks, timeouts->pre_range_vcsel_period_pclks);
|
||||
|
||||
timeouts->final_range_vcsel_period_pclks = get_vcsel_pulse_period_(VCSEL_PERIOD_FINAL_RANGE);
|
||||
|
||||
read_byte_16(0x71, &value);
|
||||
timeouts->final_range_mclks = decode_timeout_(value);
|
||||
|
||||
if (enables->pre_range) {
|
||||
timeouts->final_range_mclks -= timeouts->pre_range_mclks;
|
||||
}
|
||||
|
||||
timeouts->final_range_us =
|
||||
timeout_mclks_to_microseconds_(timeouts->final_range_mclks, timeouts->final_range_vcsel_period_pclks);
|
||||
}
|
||||
|
||||
uint8_t get_vcsel_pulse_period_(VcselPeriodType type) {
|
||||
uint8_t vcsel;
|
||||
if (type == VCSEL_PERIOD_PRE_RANGE) {
|
||||
vcsel = reg(0x50).get();
|
||||
} else if (type == VCSEL_PERIOD_FINAL_RANGE) {
|
||||
vcsel = reg(0x70).get();
|
||||
} else {
|
||||
return 255;
|
||||
}
|
||||
|
||||
return (vcsel + 1) << 1;
|
||||
}
|
||||
|
||||
uint32_t get_macro_period_(uint8_t vcsel_period_pclks) {
|
||||
return ((2304UL * vcsel_period_pclks * 1655UL) + 500UL) / 1000UL;
|
||||
}
|
||||
|
||||
uint32_t timeout_mclks_to_microseconds_(uint16_t timeout_period_mclks, uint8_t vcsel_period_pclks) {
|
||||
uint32_t macro_period_ns = get_macro_period_(vcsel_period_pclks);
|
||||
return ((timeout_period_mclks * macro_period_ns) + (macro_period_ns / 2)) / 1000;
|
||||
}
|
||||
uint32_t timeout_microseconds_to_mclks_(uint32_t timeout_period_us, uint8_t vcsel_period_pclks) {
|
||||
uint32_t macro_period_ns = get_macro_period_(vcsel_period_pclks);
|
||||
return (((timeout_period_us * 1000) + (macro_period_ns / 2)) / macro_period_ns);
|
||||
}
|
||||
|
||||
uint16_t decode_timeout_(uint16_t reg_val) {
|
||||
// format: "(LSByte * 2^MSByte) + 1"
|
||||
uint8_t msb = (reg_val >> 8) & 0xFF;
|
||||
uint8_t lsb = (reg_val >> 0) & 0xFF;
|
||||
return (uint16_t(lsb) << msb) + 1;
|
||||
}
|
||||
uint16_t encode_timeout_(uint16_t timeout_mclks) {
|
||||
// format: "(LSByte * 2^MSByte) + 1"
|
||||
uint32_t ls_byte = 0;
|
||||
uint16_t ms_byte = 0;
|
||||
|
||||
if (timeout_mclks <= 0)
|
||||
return 0;
|
||||
|
||||
ls_byte = timeout_mclks - 1;
|
||||
|
||||
while ((ls_byte & 0xFFFFFF00) > 0) {
|
||||
ls_byte >>= 1;
|
||||
ms_byte++;
|
||||
}
|
||||
|
||||
return (ms_byte << 8) | (ls_byte & 0xFF);
|
||||
}
|
||||
|
||||
bool perform_single_ref_calibration_(uint8_t vhv_init_byte) {
|
||||
reg(0x00) = 0x01 | vhv_init_byte; // VL53L0X_REG_SYSRANGE_MODE_START_STOP
|
||||
|
||||
uint32_t start = millis();
|
||||
while ((reg(0x13).get() & 0x07) == 0) {
|
||||
if (millis() - start > 1000)
|
||||
return false;
|
||||
yield();
|
||||
}
|
||||
|
||||
reg(0x0B) = 0x01;
|
||||
reg(0x00) = 0x00;
|
||||
|
||||
return true;
|
||||
}
|
||||
bool perform_single_ref_calibration_(uint8_t vhv_init_byte);
|
||||
|
||||
float signal_rate_limit_;
|
||||
bool long_range_;
|
||||
|
@ -195,7 +195,7 @@ bool WiFiComponent::wifi_sta_connect_(const WiFiAP &ap) {
|
||||
|
||||
#if ESP_IDF_VERSION_MAJOR >= 4
|
||||
// Protected Management Frame
|
||||
// Device will prefer to connect in PMF mode if other device also advertizes PMF capability.
|
||||
// Device will prefer to connect in PMF mode if other device also advertises PMF capability.
|
||||
conf.sta.pmf_cfg.capable = true;
|
||||
conf.sta.pmf_cfg.required = false;
|
||||
#endif
|
||||
|
@ -303,7 +303,7 @@ bool WiFiComponent::wifi_sta_connect_(const WiFiAP &ap) {
|
||||
|
||||
#if ESP_IDF_VERSION_MAJOR >= 4
|
||||
// Protected Management Frame
|
||||
// Device will prefer to connect in PMF mode if other device also advertizes PMF capability.
|
||||
// Device will prefer to connect in PMF mode if other device also advertises PMF capability.
|
||||
conf.sta.pmf_cfg.capable = true;
|
||||
conf.sta.pmf_cfg.required = false;
|
||||
#endif
|
||||
|
@ -38,7 +38,7 @@ bool XiaomiMiscale::parse_device(const esp32_ble_tracker::ESPBTDevice &device) {
|
||||
|
||||
if (this->impedance_ != nullptr) {
|
||||
if (res->version == 1) {
|
||||
ESP_LOGW(TAG, "Impedance is only supported on version 2. Your scale was identified as verison 1.");
|
||||
ESP_LOGW(TAG, "Impedance is only supported on version 2. Your scale was identified as version 1.");
|
||||
} else {
|
||||
if (res->impedance.has_value()) {
|
||||
this->impedance_->publish_state(*res->impedance);
|
||||
|
@ -679,6 +679,7 @@ def validate_config(config, command_line_substitutions) -> Config:
|
||||
result.add_output_path([CONF_SUBSTITUTIONS], CONF_SUBSTITUTIONS)
|
||||
try:
|
||||
substitutions.do_substitution_pass(config, command_line_substitutions)
|
||||
substitutions.do_substitution_pass(config, command_line_substitutions)
|
||||
except vol.Invalid as err:
|
||||
result.add_error(err)
|
||||
return result
|
||||
|
@ -57,11 +57,12 @@ from esphome.core import (
|
||||
TimePeriodMinutes,
|
||||
)
|
||||
from esphome.helpers import list_starts_with, add_class_to_obj
|
||||
from esphome.jsonschema import (
|
||||
jschema_list,
|
||||
jschema_extractor,
|
||||
jschema_registry,
|
||||
jschema_typed,
|
||||
from esphome.schema_extractors import (
|
||||
SCHEMA_EXTRACT,
|
||||
schema_extractor_list,
|
||||
schema_extractor,
|
||||
schema_extractor_registry,
|
||||
schema_extractor_typed,
|
||||
)
|
||||
from esphome.util import parse_esphome_version
|
||||
from esphome.voluptuous_schema import _Schema
|
||||
@ -327,7 +328,7 @@ def boolean(value):
|
||||
)
|
||||
|
||||
|
||||
@jschema_list
|
||||
@schema_extractor_list
|
||||
def ensure_list(*validators):
|
||||
"""Validate this configuration option to be a list.
|
||||
|
||||
@ -452,7 +453,11 @@ def validate_id_name(value):
|
||||
def use_id(type):
|
||||
"""Declare that this configuration option should point to an ID with the given type."""
|
||||
|
||||
@schema_extractor("use_id")
|
||||
def validator(value):
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return type
|
||||
|
||||
check_not_templatable(value)
|
||||
if value is None:
|
||||
return core.ID(None, is_declaration=False, type=type)
|
||||
@ -475,7 +480,11 @@ def declare_id(type):
|
||||
If two IDs with the same name exist, a validation error is thrown.
|
||||
"""
|
||||
|
||||
@schema_extractor("declare_id")
|
||||
def validator(value):
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return type
|
||||
|
||||
check_not_templatable(value)
|
||||
if value is None:
|
||||
return core.ID(None, is_declaration=True, type=type)
|
||||
@ -494,11 +503,11 @@ def templatable(other_validators):
|
||||
"""
|
||||
schema = Schema(other_validators)
|
||||
|
||||
@jschema_extractor("templatable")
|
||||
@schema_extractor("templatable")
|
||||
def validator(value):
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return other_validators
|
||||
|
||||
if isinstance(value, Lambda):
|
||||
return returning_lambda(value)
|
||||
if isinstance(other_validators, dict):
|
||||
@ -963,9 +972,9 @@ def ipv4(value):
|
||||
elif isinstance(value, IPAddress):
|
||||
return value
|
||||
else:
|
||||
raise Invalid("IPv4 address must consist of either string or " "integer list")
|
||||
raise Invalid("IPv4 address must consist of either string or integer list")
|
||||
if len(parts) != 4:
|
||||
raise Invalid("IPv4 address must consist of four point-separated " "integers")
|
||||
raise Invalid("IPv4 address must consist of four point-separated integers")
|
||||
parts_ = list(map(int, parts))
|
||||
if not all(0 <= x < 256 for x in parts_):
|
||||
raise Invalid("IPv4 address parts must be in range from 0 to 255")
|
||||
@ -985,10 +994,10 @@ def _valid_topic(value):
|
||||
raise Invalid("MQTT topic name/filter must not be empty.")
|
||||
if len(raw_value) > 65535:
|
||||
raise Invalid(
|
||||
"MQTT topic name/filter must not be longer than " "65535 encoded bytes."
|
||||
"MQTT topic name/filter must not be longer than 65535 encoded bytes."
|
||||
)
|
||||
if "\0" in value:
|
||||
raise Invalid("MQTT topic name/filter must not contain null " "character.")
|
||||
raise Invalid("MQTT topic name/filter must not contain null character.")
|
||||
return value
|
||||
|
||||
|
||||
@ -1000,7 +1009,7 @@ def subscribe_topic(value):
|
||||
i < len(value) - 1 and value[i + 1] != "/"
|
||||
):
|
||||
raise Invalid(
|
||||
"Single-level wildcard must occupy an entire " "level of the filter"
|
||||
"Single-level wildcard must occupy an entire level of the filter"
|
||||
)
|
||||
|
||||
index = value.find("#")
|
||||
@ -1012,9 +1021,7 @@ def subscribe_topic(value):
|
||||
"character in the topic filter."
|
||||
)
|
||||
if len(value) > 1 and value[index - 1] != "/":
|
||||
raise Invalid(
|
||||
"Multi-level wildcard must be after a topic " "level separator."
|
||||
)
|
||||
raise Invalid("Multi-level wildcard must be after a topic level separator.")
|
||||
|
||||
return value
|
||||
|
||||
@ -1177,10 +1184,9 @@ def one_of(*values, **kwargs):
|
||||
if kwargs:
|
||||
raise ValueError
|
||||
|
||||
@jschema_extractor("one_of")
|
||||
@schema_extractor("one_of")
|
||||
def validator(value):
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return values
|
||||
|
||||
if string_:
|
||||
@ -1220,10 +1226,9 @@ def enum(mapping, **kwargs):
|
||||
assert isinstance(mapping, dict)
|
||||
one_of_validator = one_of(*mapping, **kwargs)
|
||||
|
||||
@jschema_extractor("enum")
|
||||
@schema_extractor("enum")
|
||||
def validator(value):
|
||||
# pylint: disable=comparison-with-callable
|
||||
if value == jschema_extractor:
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return mapping
|
||||
|
||||
value = one_of_validator(value)
|
||||
@ -1396,7 +1401,7 @@ def extract_keys(schema):
|
||||
return keys
|
||||
|
||||
|
||||
@jschema_typed
|
||||
@schema_extractor_typed
|
||||
def typed_schema(schemas, **kwargs):
|
||||
"""Create a schema that has a key to distinguish between schemas"""
|
||||
key = kwargs.pop("key", CONF_TYPE)
|
||||
@ -1510,7 +1515,7 @@ def validate_registry_entry(name, registry):
|
||||
)
|
||||
ignore_keys = extract_keys(base_schema)
|
||||
|
||||
@jschema_registry(registry)
|
||||
@schema_extractor_registry(registry)
|
||||
def validator(value):
|
||||
if isinstance(value, str):
|
||||
value = {value: {}}
|
||||
@ -1555,12 +1560,15 @@ def validate_registry(name, registry):
|
||||
return ensure_list(validate_registry_entry(name, registry))
|
||||
|
||||
|
||||
@jschema_list
|
||||
def maybe_simple_value(*validators, **kwargs):
|
||||
key = kwargs.pop("key", CONF_VALUE)
|
||||
validator = All(*validators)
|
||||
|
||||
@schema_extractor("maybe")
|
||||
def validate(value):
|
||||
if value == SCHEMA_EXTRACT:
|
||||
return (validator, key)
|
||||
|
||||
if isinstance(value, dict) and key in value:
|
||||
return validator(value)
|
||||
return validator({key: value})
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Constants used by esphome."""
|
||||
|
||||
__version__ = "2022.6.0b4"
|
||||
__version__ = "2022.7.0-dev"
|
||||
|
||||
ALLOWED_NAME_CHARS = "abcdefghijklmnopqrstuvwxyz0123456789-_"
|
||||
|
||||
@ -905,12 +905,15 @@ DEVICE_CLASS_BATTERY = "battery"
|
||||
DEVICE_CLASS_GAS = "gas"
|
||||
DEVICE_CLASS_POWER = "power"
|
||||
# device classes of sensor component
|
||||
DEVICE_CLASS_APPARENT_POWER = "apparent_power"
|
||||
DEVICE_CLASS_AQI = "aqi"
|
||||
DEVICE_CLASS_CARBON_DIOXIDE = "carbon_dioxide"
|
||||
DEVICE_CLASS_CARBON_MONOXIDE = "carbon_monoxide"
|
||||
DEVICE_CLASS_CURRENT = "current"
|
||||
DEVICE_CLASS_DATE = "date"
|
||||
DEVICE_CLASS_DURATION = "duration"
|
||||
DEVICE_CLASS_ENERGY = "energy"
|
||||
DEVICE_CLASS_FREQUENCY = "frequency"
|
||||
DEVICE_CLASS_HUMIDITY = "humidity"
|
||||
DEVICE_CLASS_ILLUMINANCE = "illuminance"
|
||||
DEVICE_CLASS_MONETARY = "monetary"
|
||||
@ -923,6 +926,7 @@ DEVICE_CLASS_PM10 = "pm10"
|
||||
DEVICE_CLASS_PM25 = "pm25"
|
||||
DEVICE_CLASS_POWER_FACTOR = "power_factor"
|
||||
DEVICE_CLASS_PRESSURE = "pressure"
|
||||
DEVICE_CLASS_REACTIVE_POWER = "reactive_power"
|
||||
DEVICE_CLASS_SIGNAL_STRENGTH = "signal_strength"
|
||||
DEVICE_CLASS_SULPHUR_DIOXIDE = "sulphur_dioxide"
|
||||
DEVICE_CLASS_TEMPERATURE = "temperature"
|
||||
|
@ -651,7 +651,7 @@ class EsphomeCore:
|
||||
continue
|
||||
if other.repository is not None:
|
||||
if library.repository is None or other.repository == library.repository:
|
||||
# Other is using a/the same repository, takes precendence
|
||||
# Other is using a/the same repository, takes precedence
|
||||
break
|
||||
raise ValueError(
|
||||
f"Adding named Library with repository failed! Libraries {library} and {other} "
|
||||
|
@ -125,19 +125,26 @@ void IRAM_ATTR HOT Application::feed_wdt() {
|
||||
}
|
||||
void Application::reboot() {
|
||||
ESP_LOGI(TAG, "Forcing a reboot...");
|
||||
for (auto *comp : this->components_)
|
||||
comp->on_shutdown();
|
||||
for (auto it = this->components_.rbegin(); it != this->components_.rend(); ++it) {
|
||||
(*it)->on_shutdown();
|
||||
}
|
||||
arch_restart();
|
||||
}
|
||||
void Application::safe_reboot() {
|
||||
ESP_LOGI(TAG, "Rebooting safely...");
|
||||
for (auto *comp : this->components_)
|
||||
comp->on_safe_shutdown();
|
||||
for (auto *comp : this->components_)
|
||||
comp->on_shutdown();
|
||||
run_safe_shutdown_hooks();
|
||||
arch_restart();
|
||||
}
|
||||
|
||||
void Application::run_safe_shutdown_hooks() {
|
||||
for (auto it = this->components_.rbegin(); it != this->components_.rend(); ++it) {
|
||||
(*it)->on_safe_shutdown();
|
||||
}
|
||||
for (auto it = this->components_.rbegin(); it != this->components_.rend(); ++it) {
|
||||
(*it)->on_shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
void Application::calculate_looping_components_() {
|
||||
for (auto *obj : this->components_) {
|
||||
if (obj->has_overridden_loop())
|
||||
|
@ -161,14 +161,7 @@ class Application {
|
||||
|
||||
void safe_reboot();
|
||||
|
||||
void run_safe_shutdown_hooks() {
|
||||
for (auto *comp : this->components_) {
|
||||
comp->on_safe_shutdown();
|
||||
}
|
||||
for (auto *comp : this->components_) {
|
||||
comp->on_shutdown();
|
||||
}
|
||||
}
|
||||
void run_safe_shutdown_hooks();
|
||||
|
||||
uint32_t get_app_state() const { return this->app_state_; }
|
||||
|
||||
|
@ -9,7 +9,7 @@ namespace esphome {
|
||||
ESP_LOGCONFIG(TAG, prefix "%s", (pin)->dump_summary().c_str()); \
|
||||
}
|
||||
|
||||
// put GPIO flags in a namepsace to not pollute esphome namespace
|
||||
// put GPIO flags in a namespace to not pollute esphome namespace
|
||||
namespace gpio {
|
||||
|
||||
enum Flags : uint8_t {
|
||||
|
@ -258,10 +258,10 @@ inline std::string to_string(const std::string &val) { return val; }
|
||||
/// Truncate a string to a specific length.
|
||||
std::string str_truncate(const std::string &str, size_t length);
|
||||
|
||||
/// Extract the part of the string until either the first occurence of the specified character, or the end (requires str
|
||||
/// to be null-terminated).
|
||||
/// Extract the part of the string until either the first occurrence of the specified character, or the end
|
||||
/// (requires str to be null-terminated).
|
||||
std::string str_until(const char *str, char ch);
|
||||
/// Extract the part of the string until either the first occurence of the specified character, or the end.
|
||||
/// Extract the part of the string until either the first occurrence of the specified character, or the end.
|
||||
std::string str_until(const std::string &str, char ch);
|
||||
|
||||
/// Convert the string to lower case.
|
||||
@ -600,7 +600,7 @@ template<class T> class ExternalRAMAllocator {
|
||||
|
||||
ExternalRAMAllocator() = default;
|
||||
ExternalRAMAllocator(Flags flags) : flags_{flags} {}
|
||||
template<class U> constexpr ExternalRAMAllocator(const ExternalRAMAllocator<U> &other) : flags_{other.flags} {}
|
||||
template<class U> constexpr ExternalRAMAllocator(const ExternalRAMAllocator<U> &other) : flags_{other.flags_} {}
|
||||
|
||||
T *allocate(size_t n) {
|
||||
size_t size = n * sizeof(T);
|
||||
|
@ -60,7 +60,6 @@ class ProgressBar:
|
||||
sys.stderr.write(text)
|
||||
sys.stderr.flush()
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def done(self):
|
||||
sys.stderr.write("\n")
|
||||
sys.stderr.flush()
|
||||
|
@ -296,7 +296,7 @@ _TYPE_OVERLOADS = {
|
||||
int: type("EInt", (int,), {}),
|
||||
float: type("EFloat", (float,), {}),
|
||||
str: type("EStr", (str,), {}),
|
||||
dict: type("EDict", (str,), {}),
|
||||
dict: type("EDict", (dict,), {}),
|
||||
list: type("EList", (list,), {}),
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,7 @@ def patch_structhash():
|
||||
# removed/added. This might have unintended consequences, but this improves compile
|
||||
# times greatly when adding/removing components and a simple clean build solves
|
||||
# all issues
|
||||
from platformio.commands.run import helpers, command
|
||||
from platformio.run import helpers, cli
|
||||
from os.path import join, isdir, getmtime
|
||||
from os import makedirs
|
||||
|
||||
@ -39,7 +39,7 @@ def patch_structhash():
|
||||
|
||||
# pylint: disable=protected-access
|
||||
helpers.clean_build_dir = patched_clean_build_dir
|
||||
command.clean_build_dir = patched_clean_build_dir
|
||||
cli.clean_build_dir = patched_clean_build_dir
|
||||
|
||||
|
||||
IGNORE_LIB_WARNINGS = f"(?:{'|'.join(['Hash', 'Update'])})"
|
||||
|
@ -9,9 +9,9 @@ However there is a property to further disable decorator
|
||||
impact."""
|
||||
|
||||
|
||||
# This is set to true by script/build_jsonschema.py
|
||||
# This is set to true by script/build_language_schema.py
|
||||
# only, so data is collected (again functionality is not modified)
|
||||
EnableJsonSchemaCollect = False
|
||||
EnableSchemaExtraction = False
|
||||
|
||||
extended_schemas = {}
|
||||
list_schemas = {}
|
||||
@ -19,9 +19,12 @@ registry_schemas = {}
|
||||
hidden_schemas = {}
|
||||
typed_schemas = {}
|
||||
|
||||
# This key is used to generate schema files of Esphome configuration.
|
||||
SCHEMA_EXTRACT = object()
|
||||
|
||||
def jschema_extractor(validator_name):
|
||||
if EnableJsonSchemaCollect:
|
||||
|
||||
def schema_extractor(validator_name):
|
||||
if EnableSchemaExtraction:
|
||||
|
||||
def decorator(func):
|
||||
hidden_schemas[repr(func)] = validator_name
|
||||
@ -35,8 +38,8 @@ def jschema_extractor(validator_name):
|
||||
return dummy
|
||||
|
||||
|
||||
def jschema_extended(func):
|
||||
if EnableJsonSchemaCollect:
|
||||
def schema_extractor_extended(func):
|
||||
if EnableSchemaExtraction:
|
||||
|
||||
def decorate(*args, **kwargs):
|
||||
ret = func(*args, **kwargs)
|
||||
@ -49,8 +52,8 @@ def jschema_extended(func):
|
||||
return func
|
||||
|
||||
|
||||
def jschema_list(func):
|
||||
if EnableJsonSchemaCollect:
|
||||
def schema_extractor_list(func):
|
||||
if EnableSchemaExtraction:
|
||||
|
||||
def decorate(*args, **kwargs):
|
||||
ret = func(*args, **kwargs)
|
||||
@ -63,8 +66,8 @@ def jschema_list(func):
|
||||
return func
|
||||
|
||||
|
||||
def jschema_registry(registry):
|
||||
if EnableJsonSchemaCollect:
|
||||
def schema_extractor_registry(registry):
|
||||
if EnableSchemaExtraction:
|
||||
|
||||
def decorator(func):
|
||||
registry_schemas[repr(func)] = registry
|
||||
@ -78,8 +81,8 @@ def jschema_registry(registry):
|
||||
return dummy
|
||||
|
||||
|
||||
def jschema_typed(func):
|
||||
if EnableJsonSchemaCollect:
|
||||
def schema_extractor_typed(func):
|
||||
if EnableSchemaExtraction:
|
||||
|
||||
def decorate(*args, **kwargs):
|
||||
ret = func(*args, **kwargs)
|
@ -152,7 +152,6 @@ class RedirectText:
|
||||
# any caller.
|
||||
return len(s)
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def isatty(self):
|
||||
return True
|
||||
|
||||
|
@ -2,7 +2,7 @@ import difflib
|
||||
import itertools
|
||||
|
||||
import voluptuous as vol
|
||||
from esphome.jsonschema import jschema_extended
|
||||
from esphome.schema_extractors import schema_extractor_extended
|
||||
|
||||
|
||||
class ExtraKeysInvalid(vol.Invalid):
|
||||
@ -203,7 +203,7 @@ class _Schema(vol.Schema):
|
||||
self._extra_schemas.append(validator)
|
||||
return self
|
||||
|
||||
@jschema_extended
|
||||
@schema_extractor_extended
|
||||
# pylint: disable=signature-differs
|
||||
def extend(self, *schemas, **kwargs):
|
||||
extra = kwargs.pop("extra", None)
|
||||
|
@ -343,7 +343,7 @@ def wizard(path):
|
||||
sleep(1)
|
||||
|
||||
safe_print_step(3, WIFI_BIG)
|
||||
safe_print("In this step, I'm going to create the configuration for " "WiFi.")
|
||||
safe_print("In this step, I'm going to create the configuration for WiFi.")
|
||||
safe_print()
|
||||
sleep(1)
|
||||
safe_print(
|
||||
|
@ -40,7 +40,7 @@ lib_deps =
|
||||
wjtje/qr-code-generator-library@1.7.0 ; qr_code
|
||||
functionpointer/arduino-MLX90393@1.0.0 ; mlx90393
|
||||
; This is using the repository until a new release is published to PlatformIO
|
||||
https://github.com/Sensirion/arduino-gas-index-algorithm.git ; Sensirion Gas Index Algorithm Arduino Library
|
||||
https://github.com/Sensirion/arduino-gas-index-algorithm.git#3.2.1 ; Sensirion Gas Index Algorithm Arduino Library
|
||||
build_flags =
|
||||
-DESPHOME_LOG_LEVEL=ESPHOME_LOG_LEVEL_VERY_VERBOSE
|
||||
src_filter =
|
||||
|
1
pylintrc
1
pylintrc
@ -24,7 +24,6 @@ disable=
|
||||
undefined-loop-variable,
|
||||
useless-object-inheritance,
|
||||
stop-iteration-return,
|
||||
no-self-use,
|
||||
import-outside-toplevel,
|
||||
# Broken
|
||||
unsupported-membership-test,
|
||||
|
@ -1,17 +1,17 @@
|
||||
voluptuous==0.13.1
|
||||
PyYAML==6.0
|
||||
paho-mqtt==1.6.1
|
||||
colorama==0.4.4
|
||||
colorama==0.4.5
|
||||
tornado==6.1
|
||||
tzlocal==4.2 # from time
|
||||
tzdata>=2021.1 # from time
|
||||
pyserial==3.5
|
||||
platformio==5.2.5 # When updating platformio, also update Dockerfile
|
||||
platformio==6.0.2 # When updating platformio, also update Dockerfile
|
||||
esptool==3.3.1
|
||||
click==8.1.3
|
||||
esphome-dashboard==20220508.0
|
||||
aioesphomeapi==10.8.2
|
||||
zeroconf==0.38.4
|
||||
aioesphomeapi==10.10.0
|
||||
zeroconf==0.38.7
|
||||
|
||||
# esp-idf requires this, but doesn't bundle it by default
|
||||
# https://github.com/espressif/esp-idf/blob/220590d599e134d7a5e7f1e683cc4550349ffbf8/requirements.txt#L24
|
||||
|
@ -1,13 +1,13 @@
|
||||
pylint==2.13.9
|
||||
pylint==2.14.4
|
||||
flake8==4.0.1
|
||||
black==22.3.0
|
||||
pyupgrade==2.32.1
|
||||
black==22.6.0 # also change in .pre-commit-config.yaml when updating
|
||||
pyupgrade==2.34.0 # also change in .pre-commit-config.yaml when updating
|
||||
pre-commit
|
||||
|
||||
# Unit tests
|
||||
pytest==7.1.1
|
||||
pytest-cov==3.0.0
|
||||
pytest-mock==3.7.0
|
||||
pytest-mock==3.8.1
|
||||
pytest-asyncio==0.18.3
|
||||
asyncmock==0.4.2
|
||||
hypothesis==5.49.0
|
||||
|
@ -1,828 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from esphome.cpp_generator import MockObj
|
||||
import json
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
import voluptuous as vol
|
||||
|
||||
# NOTE: Cannot import other esphome components globally as a modification in jsonschema
|
||||
# is needed before modules are loaded
|
||||
import esphome.jsonschema as ejs
|
||||
|
||||
ejs.EnableJsonSchemaCollect = True
|
||||
|
||||
DUMP_COMMENTS = False
|
||||
|
||||
JSC_ACTION = "automation.ACTION_REGISTRY"
|
||||
JSC_ALLOF = "allOf"
|
||||
JSC_ANYOF = "anyOf"
|
||||
JSC_COMMENT = "$comment"
|
||||
JSC_CONDITION = "automation.CONDITION_REGISTRY"
|
||||
JSC_DESCRIPTION = "description"
|
||||
JSC_ONEOF = "oneOf"
|
||||
JSC_PROPERTIES = "properties"
|
||||
JSC_REF = "$ref"
|
||||
|
||||
# this should be required, but YAML Language server completion does not work properly if required are specified.
|
||||
# still needed for other features / checks
|
||||
JSC_REQUIRED = "required_"
|
||||
|
||||
SIMPLE_AUTOMATION = "simple_automation"
|
||||
|
||||
schema_names = {}
|
||||
schema_registry = {}
|
||||
components = {}
|
||||
modules = {}
|
||||
registries = []
|
||||
pending_refs = []
|
||||
|
||||
definitions = {}
|
||||
base_props = {}
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--output", default="esphome.json", help="Output filename", type=os.path.abspath
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
def get_ref(definition):
|
||||
return {JSC_REF: "#/definitions/" + definition}
|
||||
|
||||
|
||||
def is_ref(jschema):
|
||||
return isinstance(jschema, dict) and JSC_REF in jschema
|
||||
|
||||
|
||||
def unref(jschema):
|
||||
return definitions.get(jschema[JSC_REF][len("#/definitions/") :])
|
||||
|
||||
|
||||
def add_definition_array_or_single_object(ref):
|
||||
return {JSC_ANYOF: [{"type": "array", "items": ref}, ref]}
|
||||
|
||||
|
||||
def add_core():
|
||||
from esphome.core.config import CONFIG_SCHEMA
|
||||
|
||||
base_props["esphome"] = get_jschema("esphome", CONFIG_SCHEMA)
|
||||
|
||||
|
||||
def add_buses():
|
||||
# uart
|
||||
from esphome.components.uart import UART_DEVICE_SCHEMA
|
||||
|
||||
get_jschema("uart_bus", UART_DEVICE_SCHEMA)
|
||||
|
||||
# spi
|
||||
from esphome.components.spi import spi_device_schema
|
||||
|
||||
get_jschema("spi_bus", spi_device_schema(False))
|
||||
|
||||
# i2c
|
||||
from esphome.components.i2c import i2c_device_schema
|
||||
|
||||
get_jschema("i2c_bus", i2c_device_schema(None))
|
||||
|
||||
|
||||
def add_registries():
|
||||
for domain, module in modules.items():
|
||||
add_module_registries(domain, module)
|
||||
|
||||
|
||||
def add_module_registries(domain, module):
|
||||
from esphome.util import Registry
|
||||
|
||||
for c in dir(module):
|
||||
m = getattr(module, c)
|
||||
if isinstance(m, Registry):
|
||||
add_registry(domain + "." + c, m)
|
||||
|
||||
|
||||
def add_registry(registry_name, registry):
|
||||
validators = []
|
||||
registries.append((registry, registry_name))
|
||||
for name in registry.keys():
|
||||
schema = get_jschema(str(name), registry[name].schema, create_return_ref=False)
|
||||
if not schema:
|
||||
schema = {"type": "null"}
|
||||
o_schema = {"type": "object", JSC_PROPERTIES: {name: schema}}
|
||||
o_schema = create_ref(
|
||||
registry_name + "-" + name, str(registry[name].schema) + "x", o_schema
|
||||
)
|
||||
validators.append(o_schema)
|
||||
definitions[registry_name] = {JSC_ANYOF: validators}
|
||||
|
||||
|
||||
def get_registry_ref(registry):
|
||||
# we don't know yet
|
||||
ref = {JSC_REF: "pending"}
|
||||
pending_refs.append((ref, registry))
|
||||
return ref
|
||||
|
||||
|
||||
def solve_pending_refs():
|
||||
for ref, registry in pending_refs:
|
||||
for registry_match, name in registries:
|
||||
if registry == registry_match:
|
||||
ref[JSC_REF] = "#/definitions/" + name
|
||||
|
||||
|
||||
def add_module_schemas(name, module):
|
||||
import esphome.config_validation as cv
|
||||
|
||||
for c in dir(module):
|
||||
v = getattr(module, c)
|
||||
if isinstance(v, cv.Schema):
|
||||
get_jschema(name + "." + c, v)
|
||||
|
||||
|
||||
def get_dirs():
|
||||
from esphome.loader import CORE_COMPONENTS_PATH
|
||||
|
||||
dir_names = [
|
||||
d
|
||||
for d in os.listdir(CORE_COMPONENTS_PATH)
|
||||
if not d.startswith("__")
|
||||
and os.path.isdir(os.path.join(CORE_COMPONENTS_PATH, d))
|
||||
]
|
||||
return dir_names
|
||||
|
||||
|
||||
def get_logger_tags():
|
||||
from esphome.loader import CORE_COMPONENTS_PATH
|
||||
import glob
|
||||
|
||||
pattern = re.compile(r'^static const char(\*\s|\s\*)TAG = "(\w.*)";', re.MULTILINE)
|
||||
tags = [
|
||||
"app",
|
||||
"component",
|
||||
"esphal",
|
||||
"helpers",
|
||||
"preferences",
|
||||
"scheduler",
|
||||
"api.service",
|
||||
]
|
||||
for x in os.walk(CORE_COMPONENTS_PATH):
|
||||
for y in glob.glob(os.path.join(x[0], "*.cpp")):
|
||||
with open(y) as file:
|
||||
data = file.read()
|
||||
match = pattern.search(data)
|
||||
if match:
|
||||
tags.append(match.group(2))
|
||||
return tags
|
||||
|
||||
|
||||
def load_components():
|
||||
import esphome.config_validation as cv
|
||||
from esphome.config import get_component
|
||||
|
||||
modules["cv"] = cv
|
||||
from esphome import automation
|
||||
|
||||
modules["automation"] = automation
|
||||
|
||||
for domain in get_dirs():
|
||||
components[domain] = get_component(domain)
|
||||
modules[domain] = components[domain].module
|
||||
|
||||
|
||||
def add_components():
|
||||
from esphome.config import get_platform
|
||||
|
||||
for domain, c in components.items():
|
||||
if c.is_platform_component:
|
||||
# this is a platform_component, e.g. binary_sensor
|
||||
platform_schema = [
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {"platform": {"type": "string"}},
|
||||
}
|
||||
]
|
||||
if domain not in ("output", "display"):
|
||||
# output bases are either FLOAT or BINARY so don't add common base for this
|
||||
# display bases are either simple or FULL so don't add common base for this
|
||||
platform_schema = [
|
||||
{"$ref": f"#/definitions/{domain}.{domain.upper()}_SCHEMA"}
|
||||
] + platform_schema
|
||||
|
||||
base_props[domain] = {"type": "array", "items": {"allOf": platform_schema}}
|
||||
|
||||
add_module_registries(domain, c.module)
|
||||
add_module_schemas(domain, c.module)
|
||||
|
||||
# need first to iterate all platforms then iterate components
|
||||
# a platform component can have other components as properties,
|
||||
# e.g. climate components usually have a temperature sensor
|
||||
|
||||
for domain, c in components.items():
|
||||
if (c.config_schema is not None) or c.is_platform_component:
|
||||
if c.is_platform_component:
|
||||
platform_schema = base_props[domain]["items"]["allOf"]
|
||||
for platform in get_dirs():
|
||||
p = get_platform(domain, platform)
|
||||
if p is not None:
|
||||
# this is a platform element, e.g.
|
||||
# - platform: gpio
|
||||
schema = get_jschema(
|
||||
domain + "-" + platform,
|
||||
p.config_schema,
|
||||
create_return_ref=False,
|
||||
)
|
||||
if (
|
||||
schema
|
||||
): # for invalid schemas, None is returned thus is deprecated
|
||||
platform_schema.append(
|
||||
{
|
||||
"if": {
|
||||
JSC_PROPERTIES: {
|
||||
"platform": {"const": platform}
|
||||
}
|
||||
},
|
||||
"then": schema,
|
||||
}
|
||||
)
|
||||
|
||||
elif c.config_schema is not None:
|
||||
# adds root components which are not platforms, e.g. api: logger:
|
||||
if c.multi_conf:
|
||||
schema = get_jschema(domain, c.config_schema)
|
||||
schema = add_definition_array_or_single_object(schema)
|
||||
else:
|
||||
schema = get_jschema(domain, c.config_schema, False)
|
||||
base_props[domain] = schema
|
||||
|
||||
|
||||
def get_automation_schema(name, vschema):
|
||||
from esphome.automation import AUTOMATION_SCHEMA
|
||||
|
||||
# ensure SIMPLE_AUTOMATION
|
||||
if SIMPLE_AUTOMATION not in definitions:
|
||||
simple_automation = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||
simple_automation[JSC_ANYOF].append(
|
||||
get_jschema(AUTOMATION_SCHEMA.__module__, AUTOMATION_SCHEMA)
|
||||
)
|
||||
|
||||
definitions[schema_names[str(AUTOMATION_SCHEMA)]][JSC_PROPERTIES][
|
||||
"then"
|
||||
] = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||
definitions[SIMPLE_AUTOMATION] = simple_automation
|
||||
|
||||
extra_vschema = None
|
||||
if AUTOMATION_SCHEMA == ejs.extended_schemas[str(vschema)][0]:
|
||||
extra_vschema = ejs.extended_schemas[str(vschema)][1]
|
||||
|
||||
if not extra_vschema:
|
||||
return get_ref(SIMPLE_AUTOMATION)
|
||||
|
||||
# add then property
|
||||
extra_jschema = get_jschema(name, extra_vschema, False)
|
||||
|
||||
if is_ref(extra_jschema):
|
||||
return extra_jschema
|
||||
|
||||
if not JSC_PROPERTIES in extra_jschema:
|
||||
# these are interval: and exposure_notifications, featuring automations a component
|
||||
extra_jschema[JSC_ALLOF][0][JSC_PROPERTIES][
|
||||
"then"
|
||||
] = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||
ref = create_ref(name, extra_vschema, extra_jschema)
|
||||
return add_definition_array_or_single_object(ref)
|
||||
|
||||
# automations can be either
|
||||
# * a single action,
|
||||
# * an array of action,
|
||||
# * an object with automation's schema and a then key
|
||||
# with again a single action or an array of actions
|
||||
|
||||
if len(extra_jschema[JSC_PROPERTIES]) == 0:
|
||||
return get_ref(SIMPLE_AUTOMATION)
|
||||
|
||||
extra_jschema[JSC_PROPERTIES]["then"] = add_definition_array_or_single_object(
|
||||
get_ref(JSC_ACTION)
|
||||
)
|
||||
# if there is a required element in extra_jschema then this automation does not support
|
||||
# directly a list of actions
|
||||
if JSC_REQUIRED in extra_jschema:
|
||||
return create_ref(name, extra_vschema, extra_jschema)
|
||||
|
||||
jschema = add_definition_array_or_single_object(get_ref(JSC_ACTION))
|
||||
jschema[JSC_ANYOF].append(extra_jschema)
|
||||
|
||||
return create_ref(name, extra_vschema, jschema)
|
||||
|
||||
|
||||
def get_entry(parent_key, vschema):
|
||||
from esphome.voluptuous_schema import _Schema as schema_type
|
||||
|
||||
entry = {}
|
||||
# annotate schema validator info
|
||||
if DUMP_COMMENTS:
|
||||
entry[JSC_COMMENT] = "entry: " + parent_key + "/" + str(vschema)
|
||||
|
||||
if isinstance(vschema, dict):
|
||||
entry = {"what": "is_this"}
|
||||
elif isinstance(vschema, list):
|
||||
ref = get_jschema(parent_key + "[]", vschema[0])
|
||||
entry = {"type": "array", "items": ref}
|
||||
elif isinstance(vschema, schema_type) and hasattr(vschema, "schema"):
|
||||
entry = get_jschema(parent_key, vschema, False)
|
||||
elif hasattr(vschema, "validators"):
|
||||
entry = get_jschema(parent_key, vschema, False)
|
||||
elif vschema in schema_registry:
|
||||
entry = schema_registry[vschema].copy()
|
||||
elif str(vschema) in ejs.registry_schemas:
|
||||
entry = get_registry_ref(ejs.registry_schemas[str(vschema)])
|
||||
elif str(vschema) in ejs.list_schemas:
|
||||
ref = get_jschema(parent_key, ejs.list_schemas[str(vschema)][0])
|
||||
entry = {JSC_ANYOF: [ref, {"type": "array", "items": ref}]}
|
||||
elif str(vschema) in ejs.typed_schemas:
|
||||
schema_types = [{"type": "object", "properties": {"type": {"type": "string"}}}]
|
||||
entry = {"allOf": schema_types}
|
||||
for schema_key, vschema_type in ejs.typed_schemas[str(vschema)][0][0].items():
|
||||
schema_types.append(
|
||||
{
|
||||
"if": {"properties": {"type": {"const": schema_key}}},
|
||||
"then": get_jschema(f"{parent_key}-{schema_key}", vschema_type),
|
||||
}
|
||||
)
|
||||
|
||||
elif str(vschema) in ejs.hidden_schemas:
|
||||
# get the schema from the automation schema
|
||||
type = ejs.hidden_schemas[str(vschema)]
|
||||
inner_vschema = vschema(ejs.jschema_extractor)
|
||||
if type == "automation":
|
||||
entry = get_automation_schema(parent_key, inner_vschema)
|
||||
elif type == "maybe":
|
||||
entry = get_jschema(parent_key, inner_vschema)
|
||||
elif type == "one_of":
|
||||
entry = {"enum": list(inner_vschema)}
|
||||
elif type == "enum":
|
||||
entry = {"enum": list(inner_vschema.keys())}
|
||||
elif type == "effects":
|
||||
# Like list schema but subset from list.
|
||||
subset_list = inner_vschema[0]
|
||||
# get_jschema('strobex', registry['strobe'].schema)
|
||||
registry_schemas = []
|
||||
for name in subset_list:
|
||||
registry_schemas.append(get_ref("light.EFFECTS_REGISTRY-" + name))
|
||||
|
||||
entry = {
|
||||
JSC_ANYOF: [{"type": "array", "items": {JSC_ANYOF: registry_schemas}}]
|
||||
}
|
||||
|
||||
else:
|
||||
raise ValueError("Unknown extracted schema type")
|
||||
elif str(vschema).startswith("<function invalid."):
|
||||
# deprecated options, don't list as valid schema
|
||||
return None
|
||||
else:
|
||||
# everything else just accept string and let ESPHome validate
|
||||
try:
|
||||
from esphome.core import ID
|
||||
from esphome.automation import Trigger, Automation
|
||||
|
||||
v = vschema(None)
|
||||
if isinstance(v, ID):
|
||||
if (
|
||||
v.type.base != "script::Script"
|
||||
and v.type.base != "switch_::Switch"
|
||||
and (v.type.inherits_from(Trigger) or v.type == Automation)
|
||||
):
|
||||
return None
|
||||
entry = {"type": "string", "id_type": v.type.base}
|
||||
elif isinstance(v, str):
|
||||
entry = {"type": "string"}
|
||||
elif isinstance(v, list):
|
||||
entry = {"type": "array"}
|
||||
else:
|
||||
entry = default_schema()
|
||||
except:
|
||||
entry = default_schema()
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
def default_schema():
|
||||
# Accept anything
|
||||
return {"type": ["null", "object", "string", "array", "number"]}
|
||||
|
||||
|
||||
def is_default_schema(jschema):
|
||||
if jschema is None:
|
||||
return False
|
||||
if is_ref(jschema):
|
||||
jschema = unref(jschema)
|
||||
if not jschema:
|
||||
return False
|
||||
return is_default_schema(jschema)
|
||||
return "type" in jschema and jschema["type"] == default_schema()["type"]
|
||||
|
||||
|
||||
def get_jschema(path, vschema, create_return_ref=True):
|
||||
name = schema_names.get(get_schema_str(vschema))
|
||||
if name:
|
||||
return get_ref(name)
|
||||
|
||||
jschema = convert_schema(path, vschema)
|
||||
|
||||
if jschema is None:
|
||||
return None
|
||||
|
||||
if is_ref(jschema):
|
||||
# this can happen when returned extended
|
||||
# schemas where all properties found in previous extended schema
|
||||
return jschema
|
||||
|
||||
if not create_return_ref:
|
||||
return jschema
|
||||
|
||||
return create_ref(path, vschema, jschema)
|
||||
|
||||
|
||||
def get_schema_str(vschema):
|
||||
# Hack on cs.use_id, in the future this can be improved by tracking which type is required by
|
||||
# the id, this information can be added somehow to schema (not supported by jsonschema) and
|
||||
# completion can be improved listing valid ids only Meanwhile it's a problem because it makes
|
||||
# all partial schemas with cv.use_id different, e.g. i2c
|
||||
|
||||
return re.sub(
|
||||
pattern="function use_id.<locals>.validator at 0[xX][0-9a-fA-F]+>",
|
||||
repl="function use_id.<locals>.validator<>",
|
||||
string=str(vschema),
|
||||
)
|
||||
|
||||
|
||||
def create_ref(name, vschema, jschema):
|
||||
if jschema is None:
|
||||
raise ValueError("Cannot create a ref with null jschema for " + name)
|
||||
|
||||
if name in schema_names:
|
||||
raise ValueError("Not supported")
|
||||
|
||||
schema_str = get_schema_str(vschema)
|
||||
|
||||
schema_names[schema_str] = name
|
||||
definitions[name] = jschema
|
||||
return get_ref(name)
|
||||
|
||||
|
||||
def get_all_properties(jschema):
|
||||
if JSC_PROPERTIES in jschema:
|
||||
return list(jschema[JSC_PROPERTIES].keys())
|
||||
if is_ref(jschema):
|
||||
return get_all_properties(unref(jschema))
|
||||
arr = jschema.get(JSC_ALLOF, jschema.get(JSC_ANYOF))
|
||||
props = []
|
||||
for x in arr:
|
||||
props = props + get_all_properties(x)
|
||||
|
||||
return props
|
||||
|
||||
|
||||
def merge(arr, element):
|
||||
# arr is an array of dicts, dicts can have keys like, properties, $ref, required:[], etc
|
||||
# element is a single dict which might have several keys to
|
||||
# the result should be an array with only one element containing properties, required, etc
|
||||
# and other elements for needed $ref elements
|
||||
# NOTE: json schema supports allof with properties in different elements, but that makes
|
||||
# complex for later adding docs to the schema
|
||||
for k, v in element.items():
|
||||
if k == JSC_PROPERTIES:
|
||||
props_found = False
|
||||
for a_dict in arr:
|
||||
if JSC_PROPERTIES in a_dict:
|
||||
# found properties
|
||||
arr_props = a_dict[JSC_PROPERTIES]
|
||||
for v_k, v_v in v.items():
|
||||
arr_props[v_k] = v_v # add or overwrite
|
||||
props_found = True
|
||||
if not props_found:
|
||||
arr.append(element)
|
||||
elif k == JSC_REF:
|
||||
ref_found = False
|
||||
for a_dict in arr:
|
||||
if k in a_dict and a_dict[k] == v:
|
||||
ref_found = True
|
||||
continue
|
||||
if not ref_found:
|
||||
arr.append(element)
|
||||
else:
|
||||
# TODO: Required might require special handling
|
||||
pass
|
||||
|
||||
|
||||
def convert_schema(path, vschema, un_extend=True):
|
||||
import esphome.config_validation as cv
|
||||
|
||||
# analyze input key, if it is not a Required or Optional, then it is an array
|
||||
output = {}
|
||||
|
||||
if str(vschema) in ejs.hidden_schemas:
|
||||
if ejs.hidden_schemas[str(vschema)] == "automation":
|
||||
vschema = vschema(ejs.jschema_extractor)
|
||||
jschema = get_jschema(path, vschema, True)
|
||||
return add_definition_array_or_single_object(jschema)
|
||||
else:
|
||||
vschema = vschema(ejs.jschema_extractor)
|
||||
|
||||
if un_extend:
|
||||
extended = ejs.extended_schemas.get(str(vschema))
|
||||
if extended:
|
||||
lhs = get_jschema(path, extended[0], False)
|
||||
|
||||
# The midea actions are extending an empty schema (resulted in the templatize not templatizing anything)
|
||||
# this causes a recursion in that this extended looks the same in extended schema as the extended[1]
|
||||
if ejs.extended_schemas.get(str(vschema)) == ejs.extended_schemas.get(
|
||||
str(extended[1])
|
||||
):
|
||||
assert path.startswith("midea_ac")
|
||||
return convert_schema(path, extended[1], False)
|
||||
|
||||
rhs = get_jschema(path, extended[1], False)
|
||||
|
||||
# check if we are not merging properties which are already in base component
|
||||
lprops = get_all_properties(lhs)
|
||||
rprops = get_all_properties(rhs)
|
||||
|
||||
if all(item in lprops for item in rprops):
|
||||
return lhs
|
||||
if all(item in rprops for item in lprops):
|
||||
return rhs
|
||||
|
||||
# merge
|
||||
if JSC_ALLOF in lhs and JSC_ALLOF in rhs:
|
||||
output = lhs
|
||||
for k in rhs[JSC_ALLOF]:
|
||||
merge(output[JSC_ALLOF], k)
|
||||
elif JSC_ALLOF in lhs:
|
||||
output = lhs
|
||||
merge(output[JSC_ALLOF], rhs)
|
||||
elif JSC_ALLOF in rhs:
|
||||
output = rhs
|
||||
merge(output[JSC_ALLOF], lhs)
|
||||
else:
|
||||
output = {JSC_ALLOF: [lhs]}
|
||||
merge(output[JSC_ALLOF], rhs)
|
||||
|
||||
return output
|
||||
|
||||
# When schema contains all, all also has a schema which points
|
||||
# back to the containing schema
|
||||
|
||||
if isinstance(vschema, MockObj):
|
||||
return output
|
||||
|
||||
while hasattr(vschema, "schema") and not hasattr(vschema, "validators"):
|
||||
vschema = vschema.schema
|
||||
|
||||
if hasattr(vschema, "validators"):
|
||||
output = default_schema()
|
||||
for v in vschema.validators:
|
||||
if v:
|
||||
# we should take the valid schema,
|
||||
# commonly all is used to validate a schema, and then a function which
|
||||
# is not a schema es also given, get_schema will then return a default_schema()
|
||||
if v == dict:
|
||||
continue # this is a dict in the SCHEMA of packages
|
||||
val_schema = get_jschema(path, v, False)
|
||||
if is_default_schema(val_schema):
|
||||
if not output:
|
||||
output = val_schema
|
||||
else:
|
||||
if is_default_schema(output):
|
||||
output = val_schema
|
||||
else:
|
||||
output = {**output, **val_schema}
|
||||
return output
|
||||
|
||||
if not vschema:
|
||||
return output
|
||||
|
||||
if not hasattr(vschema, "keys"):
|
||||
return get_entry(path, vschema)
|
||||
|
||||
key = list(vschema.keys())[0]
|
||||
|
||||
# used for platformio_options in core_config
|
||||
# pylint: disable=comparison-with-callable
|
||||
if key == cv.string_strict:
|
||||
output["type"] = "object"
|
||||
return output
|
||||
|
||||
props = output[JSC_PROPERTIES] = {}
|
||||
required = []
|
||||
|
||||
output["type"] = ["object", "null"]
|
||||
if DUMP_COMMENTS:
|
||||
output[JSC_COMMENT] = "converted: " + path + "/" + str(vschema)
|
||||
|
||||
if path == "logger-logs":
|
||||
tags = get_logger_tags()
|
||||
for k in tags:
|
||||
props[k] = {
|
||||
"enum": [
|
||||
"NONE",
|
||||
"ERROR",
|
||||
"WARN",
|
||||
"INFO",
|
||||
"DEBUG",
|
||||
"VERBOSE",
|
||||
"VERY_VERBOSE",
|
||||
]
|
||||
}
|
||||
|
||||
else:
|
||||
for k in vschema:
|
||||
if str(k).startswith("<function"):
|
||||
# generate all logger tags
|
||||
|
||||
# TODO handle key functions
|
||||
|
||||
continue
|
||||
|
||||
v = vschema[k]
|
||||
prop = {}
|
||||
|
||||
if isinstance(v, vol.Schema):
|
||||
prop = get_jschema(path + "-" + str(k), v.schema)
|
||||
elif hasattr(v, "validators"):
|
||||
prop = convert_schema(path + "-" + str(k), v, False)
|
||||
else:
|
||||
prop = get_entry(path + "-" + str(k), v)
|
||||
|
||||
if prop: # Deprecated (cv.Invalid) properties not added
|
||||
props[str(k)] = prop
|
||||
# TODO: see required, sometimes completions doesn't show up because of this...
|
||||
if isinstance(k, cv.Required):
|
||||
required.append(str(k))
|
||||
try:
|
||||
if str(k.default) != "...":
|
||||
default_value = k.default()
|
||||
# Yaml validator fails if `"default": null` ends up in the json schema
|
||||
if default_value is not None:
|
||||
if prop["type"] == "string":
|
||||
default_value = str(default_value)
|
||||
prop["default"] = default_value
|
||||
except:
|
||||
pass
|
||||
|
||||
if len(required) > 0:
|
||||
output[JSC_REQUIRED] = required
|
||||
return output
|
||||
|
||||
|
||||
def add_pin_schema():
|
||||
from esphome import pins
|
||||
|
||||
add_module_schemas("PIN", pins)
|
||||
|
||||
|
||||
def add_pin_registry():
|
||||
from esphome import pins
|
||||
|
||||
pin_registry = pins.PIN_SCHEMA_REGISTRY
|
||||
assert len(pin_registry) > 0
|
||||
# Here are schemas for pcf8574, mcp23xxx and other port expanders which add
|
||||
# gpio registers
|
||||
# ESPHome validates pins schemas if it founds a key in the pin configuration.
|
||||
# This key is added to a required in jsonschema, and all options are part of a
|
||||
# oneOf section, so only one is selected. Also internal schema adds number as required.
|
||||
|
||||
for mode in ("INPUT", "OUTPUT"):
|
||||
schema_name = f"PIN.GPIO_FULL_{mode}_PIN_SCHEMA"
|
||||
|
||||
# TODO: get pin definitions properly
|
||||
if schema_name not in definitions:
|
||||
definitions[schema_name] = {"type": ["object", "null"], JSC_PROPERTIES: {}}
|
||||
|
||||
internal = definitions[schema_name]
|
||||
definitions[schema_name]["additionalItems"] = False
|
||||
definitions[f"PIN.{mode}_INTERNAL"] = internal
|
||||
internal[JSC_PROPERTIES]["number"] = {"type": ["number", "string"]}
|
||||
schemas = [get_ref(f"PIN.{mode}_INTERNAL")]
|
||||
schemas[0]["required"] = ["number"]
|
||||
# accept string and object, for internal shorthand pin IO:
|
||||
definitions[schema_name] = {"oneOf": schemas, "type": ["string", "object"]}
|
||||
|
||||
for k, v in pin_registry.items():
|
||||
if isinstance(v[1], vol.validators.All):
|
||||
pin_jschema = get_jschema(f"PIN.{mode}_" + k, v[1])
|
||||
if unref(pin_jschema):
|
||||
pin_jschema["required"] = [k]
|
||||
schemas.append(pin_jschema)
|
||||
|
||||
|
||||
def dump_schema():
|
||||
import esphome.config_validation as cv
|
||||
|
||||
from esphome import automation
|
||||
from esphome.automation import validate_potentially_and_condition
|
||||
from esphome import pins
|
||||
from esphome.core import CORE
|
||||
from esphome.helpers import write_file_if_changed
|
||||
from esphome.components import remote_base
|
||||
|
||||
# The root directory of the repo
|
||||
root = Path(__file__).parent.parent
|
||||
|
||||
# Fake some directory so that get_component works
|
||||
CORE.config_path = str(root)
|
||||
|
||||
file_path = args.output
|
||||
|
||||
schema_registry[cv.boolean] = {"type": "boolean"}
|
||||
|
||||
for v in [
|
||||
cv.int_,
|
||||
cv.int_range,
|
||||
cv.positive_int,
|
||||
cv.float_,
|
||||
cv.positive_float,
|
||||
cv.positive_float,
|
||||
cv.positive_not_null_int,
|
||||
cv.negative_one_to_one_float,
|
||||
cv.port,
|
||||
]:
|
||||
schema_registry[v] = {"type": "number"}
|
||||
|
||||
for v in [
|
||||
cv.string,
|
||||
cv.string_strict,
|
||||
cv.valid_name,
|
||||
cv.hex_int,
|
||||
cv.hex_int_range,
|
||||
pins.gpio_output_pin_schema,
|
||||
pins.gpio_input_pin_schema,
|
||||
pins.gpio_input_pullup_pin_schema,
|
||||
cv.float_with_unit,
|
||||
cv.subscribe_topic,
|
||||
cv.publish_topic,
|
||||
cv.mqtt_payload,
|
||||
cv.ssid,
|
||||
cv.percentage_int,
|
||||
cv.percentage,
|
||||
cv.possibly_negative_percentage,
|
||||
cv.positive_time_period,
|
||||
cv.positive_time_period_microseconds,
|
||||
cv.positive_time_period_milliseconds,
|
||||
cv.positive_time_period_minutes,
|
||||
cv.positive_time_period_seconds,
|
||||
]:
|
||||
schema_registry[v] = {"type": "string"}
|
||||
|
||||
schema_registry[validate_potentially_and_condition] = get_ref("condition_list")
|
||||
|
||||
for v in [pins.gpio_input_pin_schema, pins.gpio_input_pullup_pin_schema]:
|
||||
schema_registry[v] = get_ref("PIN.GPIO_FULL_INPUT_PIN_SCHEMA")
|
||||
for v in [pins.internal_gpio_input_pin_schema, pins.gpio_input_pin_schema]:
|
||||
schema_registry[v] = get_ref("PIN.INPUT_INTERNAL")
|
||||
|
||||
for v in [pins.gpio_output_pin_schema, pins.internal_gpio_output_pin_schema]:
|
||||
schema_registry[v] = get_ref("PIN.GPIO_FULL_OUTPUT_PIN_SCHEMA")
|
||||
for v in [pins.internal_gpio_output_pin_schema, pins.gpio_output_pin_schema]:
|
||||
schema_registry[v] = get_ref("PIN.OUTPUT_INTERNAL")
|
||||
|
||||
add_module_schemas("CONFIG", cv)
|
||||
get_jschema("POLLING_COMPONENT", cv.polling_component_schema("60s"))
|
||||
|
||||
add_pin_schema()
|
||||
|
||||
add_module_schemas("REMOTE_BASE", remote_base)
|
||||
add_module_schemas("AUTOMATION", automation)
|
||||
|
||||
load_components()
|
||||
add_registries()
|
||||
|
||||
definitions["condition_list"] = {
|
||||
JSC_ONEOF: [
|
||||
{"type": "array", "items": get_ref(JSC_CONDITION)},
|
||||
get_ref(JSC_CONDITION),
|
||||
]
|
||||
}
|
||||
|
||||
output = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"type": "object",
|
||||
"definitions": definitions,
|
||||
JSC_PROPERTIES: base_props,
|
||||
}
|
||||
|
||||
add_core()
|
||||
add_buses()
|
||||
add_components()
|
||||
|
||||
add_registries() # need second pass, e.g. climate.pid.autotune
|
||||
add_pin_registry()
|
||||
solve_pending_refs()
|
||||
|
||||
write_file_if_changed(file_path, json.dumps(output))
|
||||
print(f"Wrote {file_path}")
|
||||
|
||||
|
||||
dump_schema()
|
@ -1,18 +1,19 @@
|
||||
import inspect
|
||||
import json
|
||||
import argparse
|
||||
from operator import truediv
|
||||
import os
|
||||
import glob
|
||||
import re
|
||||
import voluptuous as vol
|
||||
|
||||
# NOTE: Cannot import other esphome components globally as a modification in jsonschema
|
||||
# NOTE: Cannot import other esphome components globally as a modification in vol_schema
|
||||
# is needed before modules are loaded
|
||||
import esphome.jsonschema as ejs
|
||||
import esphome.schema_extractors as ejs
|
||||
|
||||
ejs.EnableJsonSchemaCollect = True
|
||||
ejs.EnableSchemaExtraction = True
|
||||
|
||||
# schema format:
|
||||
# Schemas are splitted in several files in json format, one for core stuff, one for each platform (sensor, binary_sensor, etc) and
|
||||
# Schemas are split in several files in json format, one for core stuff, one for each platform (sensor, binary_sensor, etc) and
|
||||
# one for each component (dallas, sim800l, etc.) component can have schema for root component/hub and also for platform component,
|
||||
# e.g. dallas has hub component which has pin and then has the sensor platform which has sensor name, index, etc.
|
||||
# When files are loaded they are merged in a single object.
|
||||
@ -60,15 +61,6 @@ solve_registry = []
|
||||
|
||||
|
||||
def get_component_names():
|
||||
# return [
|
||||
# "esphome",
|
||||
# "esp32",
|
||||
# "esp8266",
|
||||
# "logger",
|
||||
# "sensor",
|
||||
# "remote_receiver",
|
||||
# "binary_sensor",
|
||||
# ]
|
||||
from esphome.loader import CORE_COMPONENTS_PATH
|
||||
|
||||
component_names = ["esphome", "sensor"]
|
||||
@ -100,7 +92,7 @@ from esphome import automation
|
||||
from esphome import pins
|
||||
from esphome.components import remote_base
|
||||
from esphome.const import CONF_TYPE
|
||||
from esphome.loader import get_platform
|
||||
from esphome.loader import get_platform, CORE_COMPONENTS_PATH
|
||||
from esphome.helpers import write_file_if_changed
|
||||
from esphome.util import Registry
|
||||
|
||||
@ -120,10 +112,12 @@ def write_file(name, obj):
|
||||
def register_module_schemas(key, module, manifest=None):
|
||||
for name, schema in module_schemas(module):
|
||||
register_known_schema(key, name, schema)
|
||||
if (
|
||||
manifest and manifest.multi_conf and S_CONFIG_SCHEMA in output[key][S_SCHEMAS]
|
||||
): # not sure about 2nd part of the if, might be useless config (e.g. as3935)
|
||||
output[key][S_SCHEMAS][S_CONFIG_SCHEMA]["is_list"] = True
|
||||
|
||||
if manifest:
|
||||
# Multi conf should allow list of components
|
||||
# not sure about 2nd part of the if, might be useless config (e.g. as3935)
|
||||
if manifest.multi_conf and S_CONFIG_SCHEMA in output[key][S_SCHEMAS]:
|
||||
output[key][S_SCHEMAS][S_CONFIG_SCHEMA]["is_list"] = True
|
||||
|
||||
|
||||
def register_known_schema(module, name, schema):
|
||||
@ -265,13 +259,58 @@ def do_esp8266():
|
||||
|
||||
|
||||
def fix_remote_receiver():
|
||||
output["remote_receiver.binary_sensor"]["schemas"]["CONFIG_SCHEMA"] = {
|
||||
remote_receiver_schema = output["remote_receiver.binary_sensor"]["schemas"]
|
||||
remote_receiver_schema["CONFIG_SCHEMA"] = {
|
||||
"type": "schema",
|
||||
"schema": {
|
||||
"extends": ["binary_sensor.BINARY_SENSOR_SCHEMA", "core.COMPONENT_SCHEMA"],
|
||||
"config_vars": output["remote_base"]["binary"],
|
||||
"config_vars": output["remote_base"].pop("binary"),
|
||||
},
|
||||
}
|
||||
remote_receiver_schema["CONFIG_SCHEMA"]["schema"]["config_vars"]["receiver_id"] = {
|
||||
"key": "GeneratedID",
|
||||
"use_id_type": "remote_base::RemoteReceiverBase",
|
||||
"type": "use_id",
|
||||
}
|
||||
|
||||
|
||||
def fix_script():
|
||||
output["script"][S_SCHEMAS][S_CONFIG_SCHEMA][S_TYPE] = S_SCHEMA
|
||||
config_schema = output["script"][S_SCHEMAS][S_CONFIG_SCHEMA]
|
||||
config_schema[S_SCHEMA][S_CONFIG_VARS]["id"]["id_type"] = {
|
||||
"class": "script::Script"
|
||||
}
|
||||
config_schema["is_list"] = True
|
||||
|
||||
|
||||
def get_logger_tags():
|
||||
pattern = re.compile(r'^static const char \*const TAG = "(\w.*)";', re.MULTILINE)
|
||||
# tags not in components dir
|
||||
tags = [
|
||||
"app",
|
||||
"component",
|
||||
"entity_base",
|
||||
"scheduler",
|
||||
"api.service",
|
||||
]
|
||||
for x in os.walk(CORE_COMPONENTS_PATH):
|
||||
for y in glob.glob(os.path.join(x[0], "*.cpp")):
|
||||
with open(y, encoding="utf-8") as file:
|
||||
data = file.read()
|
||||
match = pattern.search(data)
|
||||
if match:
|
||||
tags.append(match.group(1))
|
||||
return tags
|
||||
|
||||
|
||||
def add_logger_tags():
|
||||
tags = get_logger_tags()
|
||||
logs = output["logger"]["schemas"]["CONFIG_SCHEMA"]["schema"]["config_vars"][
|
||||
"logs"
|
||||
]["schema"]["config_vars"]
|
||||
for t in tags:
|
||||
logs[t] = logs["string"].copy()
|
||||
logs.pop("string")
|
||||
|
||||
|
||||
def add_referenced_recursive(referenced_schemas, config_var, path, eat_schema=False):
|
||||
@ -401,7 +440,7 @@ def shrink():
|
||||
else:
|
||||
print("expected extends here!" + x)
|
||||
arr_s = merge(key_s, arr_s)
|
||||
if arr_s[S_TYPE] == "enum":
|
||||
if arr_s[S_TYPE] in ["enum", "typed"]:
|
||||
arr_s.pop(S_SCHEMA)
|
||||
else:
|
||||
arr_s.pop(S_EXTENDS)
|
||||
@ -491,14 +530,20 @@ def build_schema():
|
||||
if domain not in platforms:
|
||||
if manifest.config_schema is not None:
|
||||
core_components[domain] = {}
|
||||
if len(manifest.dependencies) > 0:
|
||||
core_components[domain]["dependencies"] = manifest.dependencies
|
||||
register_module_schemas(domain, manifest.module, manifest)
|
||||
|
||||
for platform in platforms:
|
||||
platform_manifest = get_platform(domain=platform, platform=domain)
|
||||
if platform_manifest is not None:
|
||||
output[platform][S_COMPONENTS][domain] = {}
|
||||
if len(platform_manifest.dependencies) > 0:
|
||||
output[platform][S_COMPONENTS][domain][
|
||||
"dependencies"
|
||||
] = platform_manifest.dependencies
|
||||
register_module_schemas(
|
||||
f"{domain}.{platform}", platform_manifest.module
|
||||
f"{domain}.{platform}", platform_manifest.module, platform_manifest
|
||||
)
|
||||
|
||||
# Do registries
|
||||
@ -517,6 +562,8 @@ def build_schema():
|
||||
do_esp8266()
|
||||
do_esp32()
|
||||
fix_remote_receiver()
|
||||
fix_script()
|
||||
add_logger_tags()
|
||||
shrink()
|
||||
|
||||
# aggregate components, so all component info is in same file, otherwise we have dallas.json, dallas.sensor.json, etc.
|
||||
@ -585,7 +632,7 @@ def convert_1(schema, config_var, path):
|
||||
assert S_EXTENDS not in config_var
|
||||
if not S_TYPE in config_var:
|
||||
config_var[S_TYPE] = S_SCHEMA
|
||||
assert config_var[S_TYPE] == S_SCHEMA
|
||||
# assert config_var[S_TYPE] == S_SCHEMA
|
||||
|
||||
if S_SCHEMA not in config_var:
|
||||
config_var[S_SCHEMA] = {}
|
||||
@ -662,7 +709,7 @@ def convert_1(schema, config_var, path):
|
||||
elif repr_schema in ejs.hidden_schemas:
|
||||
schema_type = ejs.hidden_schemas[repr_schema]
|
||||
|
||||
data = schema(ejs.jschema_extractor)
|
||||
data = schema(ejs.SCHEMA_EXTRACT)
|
||||
|
||||
# enums, e.g. esp32/variant
|
||||
if schema_type == "one_of":
|
||||
@ -672,8 +719,9 @@ def convert_1(schema, config_var, path):
|
||||
config_var[S_TYPE] = "enum"
|
||||
config_var["values"] = list(data.keys())
|
||||
elif schema_type == "maybe":
|
||||
config_var[S_TYPE] = "maybe"
|
||||
config_var["schema"] = convert_config(data, path + "/maybe")["schema"]
|
||||
config_var[S_TYPE] = S_SCHEMA
|
||||
config_var["maybe"] = data[1]
|
||||
config_var["schema"] = convert_config(data[0], path + "/maybe")["schema"]
|
||||
# esphome/on_boot
|
||||
elif schema_type == "automation":
|
||||
extra_schema = None
|
||||
@ -717,8 +765,50 @@ def convert_1(schema, config_var, path):
|
||||
elif schema_type == "sensor":
|
||||
schema = data
|
||||
convert_1(data, config_var, path + "/trigger")
|
||||
elif schema_type == "declare_id":
|
||||
# pylint: disable=protected-access
|
||||
parents = data._parents
|
||||
|
||||
config_var["id_type"] = {
|
||||
"class": str(data.base),
|
||||
"parents": [str(x.base) for x in parents]
|
||||
if isinstance(parents, list)
|
||||
else None,
|
||||
}
|
||||
elif schema_type == "use_id":
|
||||
if inspect.ismodule(data):
|
||||
m_attr_obj = getattr(data, "CONFIG_SCHEMA")
|
||||
use_schema = known_schemas.get(repr(m_attr_obj))
|
||||
if use_schema:
|
||||
[output_module, output_name] = use_schema[0][1].split(".")
|
||||
use_id_config = output[output_module][S_SCHEMAS][output_name]
|
||||
config_var["use_id_type"] = use_id_config["schema"]["config_vars"][
|
||||
"id"
|
||||
]["id_type"]["class"]
|
||||
config_var[S_TYPE] = "use_id"
|
||||
else:
|
||||
print("TODO deferred?")
|
||||
else:
|
||||
if isinstance(data, str):
|
||||
# TODO: Figure out why pipsolar does this
|
||||
config_var["use_id_type"] = data
|
||||
else:
|
||||
config_var["use_id_type"] = str(data.base)
|
||||
config_var[S_TYPE] = "use_id"
|
||||
else:
|
||||
raise Exception("Unknown extracted schema type")
|
||||
elif config_var.get("key") == "GeneratedID":
|
||||
if path == "i2c/CONFIG_SCHEMA/extL/all/id":
|
||||
config_var["id_type"] = {"class": "i2c::I2CBus", "parents": ["Component"]}
|
||||
elif path == "uart/CONFIG_SCHEMA/val 1/extL/all/id":
|
||||
config_var["id_type"] = {
|
||||
"class": "uart::UARTComponent",
|
||||
"parents": ["Component"],
|
||||
}
|
||||
elif path == "pins/esp32/val 1/id":
|
||||
config_var["id_type"] = "pin"
|
||||
else:
|
||||
raise Exception("Cannot determine id_type for " + path)
|
||||
|
||||
elif repr_schema in ejs.registry_schemas:
|
||||
solve_registry.append((ejs.registry_schemas[repr_schema], config_var))
|
||||
@ -787,7 +877,13 @@ def convert_keys(converted, schema, path):
|
||||
result["key"] = "Optional"
|
||||
else:
|
||||
converted["key"] = "String"
|
||||
converted["key_dump"] = str(k)
|
||||
key_string_match = re.search(
|
||||
r"<function (\w*) at \w*>", str(k), re.IGNORECASE
|
||||
)
|
||||
if key_string_match:
|
||||
converted["key_type"] = key_string_match.group(1)
|
||||
else:
|
||||
converted["key_type"] = str(k)
|
||||
|
||||
esphome_core.CORE.data = {
|
||||
esphome_core.KEY_CORE: {esphome_core.KEY_TARGET_PLATFORM: "esp8266"}
|
||||
@ -808,6 +904,12 @@ def convert_keys(converted, schema, path):
|
||||
if base_k in result and base_v == result[base_k]:
|
||||
result.pop(base_k)
|
||||
converted["schema"][S_CONFIG_VARS][str(k)] = result
|
||||
if "key" in converted and converted["key"] == "String":
|
||||
config_vars = converted["schema"]["config_vars"]
|
||||
assert len(config_vars) == 1
|
||||
key = list(config_vars.keys())[0]
|
||||
assert key.startswith("<")
|
||||
config_vars["string"] = config_vars.pop(key)
|
||||
|
||||
|
||||
build_schema()
|
||||
|
@ -452,7 +452,7 @@ def lint_no_removed_in_idf_conversions(fname, match):
|
||||
replacement = IDF_CONVERSION_FORBIDDEN[match.group(1)]
|
||||
return (
|
||||
f"The macro {highlight(match.group(1))} can no longer be used in ESPHome directly. "
|
||||
f"Plese use {highlight(replacement)} instead."
|
||||
f"Please use {highlight(replacement)} instead."
|
||||
)
|
||||
|
||||
|
||||
|
@ -600,7 +600,6 @@ sensor:
|
||||
sensor: hlw8012_power
|
||||
name: "Integration Sensor lazy"
|
||||
time_unit: s
|
||||
min_save_interval: 60s
|
||||
- platform: hmc5883l
|
||||
address: 0x68
|
||||
field_strength_x:
|
||||
|
@ -80,6 +80,11 @@ sx1509:
|
||||
mcp3204:
|
||||
cs_pin: GPIO23
|
||||
|
||||
dac7678:
|
||||
address: 0x4A
|
||||
id: dac7678_hub1
|
||||
internal_reference: true
|
||||
|
||||
sensor:
|
||||
- platform: homeassistant
|
||||
entity_id: sensor.hello_world
|
||||
@ -518,6 +523,38 @@ output:
|
||||
pipsolar_id: inverter0
|
||||
battery_recharge_voltage:
|
||||
id: inverter0_battery_recharge_voltage_out
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 0
|
||||
id: 'dac7678_1_ch0'
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 1
|
||||
id: 'dac7678_1_ch1'
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 2
|
||||
id: 'dac7678_1_ch2'
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 3
|
||||
id: 'dac7678_1_ch3'
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 4
|
||||
id: 'dac7678_1_ch4'
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 5
|
||||
id: 'dac7678_1_ch5'
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 6
|
||||
id: 'dac7678_1_ch6'
|
||||
- platform: dac7678
|
||||
dac7678_id: 'dac7678_hub1'
|
||||
channel: 7
|
||||
id: 'dac7678_1_ch7'
|
||||
esp32_camera:
|
||||
name: ESP-32 Camera
|
||||
data_pins: [GPIO17, GPIO35, GPIO34, GPIO5, GPIO39, GPIO18, GPIO36, GPIO19]
|
||||
@ -604,14 +641,6 @@ touchscreen:
|
||||
- logger.log:
|
||||
format: Touch at (%d, %d)
|
||||
args: ["touch.x", "touch.y"]
|
||||
- media_player.play:
|
||||
- media_player.pause:
|
||||
- media_player.stop:
|
||||
- media_player.toggle:
|
||||
- media_player.volume_up:
|
||||
- media_player.volume_down:
|
||||
- media_player.volume_set: 50%
|
||||
|
||||
|
||||
media_player:
|
||||
- platform: i2s_audio
|
||||
@ -621,3 +650,20 @@ media_player:
|
||||
i2s_dout_pin: GPIO25
|
||||
i2s_bclk_pin: GPIO27
|
||||
mute_pin: GPIO14
|
||||
on_state:
|
||||
- media_player.play:
|
||||
- media_player.play_media: http://localhost/media.mp3
|
||||
- media_player.play_media: !lambda 'return "http://localhost/media.mp3";'
|
||||
on_idle:
|
||||
- media_player.pause:
|
||||
on_play:
|
||||
- media_player.stop:
|
||||
on_pause:
|
||||
- media_player.toggle:
|
||||
- wait_until:
|
||||
media_player.is_idle:
|
||||
- wait_until:
|
||||
media_player.is_playing:
|
||||
- media_player.volume_up:
|
||||
- media_player.volume_down:
|
||||
- media_player.volume_set: 50%
|
||||
|
Loading…
x
Reference in New Issue
Block a user