Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
211 changes: 10 additions & 201 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,208 +1,17 @@
name: CI

# yamllint disable-line rule:truthy
on:
push:
pull_request: ~

env:
CACHE_VERSION: 1
DEFAULT_PYTHON: 3.8
PRE_COMMIT_HOME: ~/.cache/pre-commit

jobs:
# Separate job to pre-populate the base dependency cache
# This prevent upcoming jobs to do the same individually
prepare-base:
name: Prepare base dependencies
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8, 3.9, "3.10", "3.11"]
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3
with:
path: venv
key: ${{ env.CACHE_VERSION}}-${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('pyproject.toml') }}
restore-keys: ${{ env.CACHE_VERSION}}-${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
python -m venv venv
. venv/bin/activate
pip install -U pip setuptools pre-commit
pip install -e '.[testing]'

pre-commit:
name: Prepare pre-commit environment
runs-on: ubuntu-latest
needs: prepare-base
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3
with:
path: venv
key: ${{ env.CACHE_VERSION}}-${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('pyproject.toml') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: ${{ env.CACHE_VERSION}}-${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: ${{ env.CACHE_VERSION}}-${{ runner.os }}-pre-commit-
- name: Install pre-commit dependencies
if: steps.cache-precommit.outputs.cache-hit != 'true'
run: |
. venv/bin/activate
pre-commit install-hooks

lint-pre-commit:
name: Check pre-commit
runs-on: ubuntu-latest
needs: pre-commit
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3
with:
path: venv
key: ${{ env.CACHE_VERSION}}-${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('pyproject.toml') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: ${{ env.CACHE_VERSION}}-${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run pre-commit
run: |
. venv/bin/activate
pre-commit run --all-files --show-diff-on-failure

pytest:
runs-on: ubuntu-latest
needs: prepare-base
strategy:
matrix:
python-version: [3.8, 3.9, "3.10", "3.11"]
name: >-
Run tests Python ${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
id: python
with:
python-version: ${{ matrix.python-version }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3
with:
path: venv
key: ${{ env.CACHE_VERSION}}-${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('pyproject.toml') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
- name: Install Pytest Annotation plugin
run: |
. venv/bin/activate
# Ideally this should be part of our dependencies
# However this plugin is fairly new and doesn't run correctly
# on a non-GitHub environment.
pip install pytest-github-actions-annotate-failures
- name: Run pytest
run: |
. venv/bin/activate
pytest \
-qq \
--timeout=20 \
--durations=10 \
--cov zigpy_cli \
--cov-config pyproject.toml \
-o console_output_style=count \
-p no:sugar \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v3
with:
name: coverage-${{ matrix.python-version }}
path: .coverage


coverage:
name: Process test coverage
runs-on: ubuntu-latest
needs: pytest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@v3
with:
path: venv
key: ${{ env.CACHE_VERSION}}-${{ runner.os }}-base-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('pyproject.toml') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v3
- name: Combine coverage results
run: |
. venv/bin/activate
coverage combine coverage*/.coverage*
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
shared-ci:
uses: zigpy/workflows/.github/workflows/ci.yml@main
with:
CODE_FOLDER: zigpy_cli
CACHE_VERSION: 3
PRE_COMMIT_CACHE_PATH: ~/.cache/pre-commit
PYTHON_VERSION_DEFAULT: 3.9.15
MINIMUM_COVERAGE_PERCENTAGE: 1
secrets:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
26 changes: 6 additions & 20 deletions .github/workflows/publish-to-pypi.yml
Original file line number Diff line number Diff line change
@@ -1,26 +1,12 @@
name: Publish distributions to PyPI and TestPyPI
name: Publish distributions to PyPI

on:
release:
types:
- published

jobs:
build-and-publish:
name: Build and publish distributions to PyPI and TestPyPI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Install wheel
run: >-
pip install wheel build
- name: Build
run: >-
python3 -m build
- name: Publish distribution to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
password: ${{ secrets.PYPI_API_TOKEN }}
shared-build-and-publish:
uses: zigpy/workflows/.github/workflows/publish-to-pypi.yml@main
secrets:
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ dependencies = [
"coloredlogs",
"scapy",
"zigpy>=0.55.0",
"bellows>=0.35.1",
"bellows>=0.43.0",
"zigpy-deconz>=0.21.0",
"zigpy-xbee>=0.18.0",
"zigpy-zboss>=1.1.0",
Expand Down
5 changes: 5 additions & 0 deletions requirements_test.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
coverage[toml]
pytest
pytest-asyncio
pytest-cov
pytest-timeout
15 changes: 15 additions & 0 deletions zigpy_cli/common.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import click
from zigpy.types import Channels


class HexOrDecIntParamType(click.ParamType):
Expand All @@ -17,4 +18,18 @@ def convert(self, value, param, ctx):
self.fail(f"{value!r} is not a valid integer", param, ctx)


class ChannelsType(click.ParamType):
name = "channels"

def convert(self, value, param, ctx):
if isinstance(value, Channels):
return value

try:
return Channels.from_channel_list(map(int, value.split(",")))
except ValueError:
self.fail(f"{value!r} is not a valid channel list", param, ctx)


HEX_OR_DEC_INT = HexOrDecIntParamType()
CHANNELS_LIST = ChannelsType()
77 changes: 77 additions & 0 deletions zigpy_cli/helpers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import struct

import zigpy.types as t


class PcapWriter:
"""Class responsible to write in PCAP format."""

def __init__(self, file):
"""Initialize PCAP file and write global header."""
self.file = file

def write_header(self):
self.file.write(
struct.pack("<L", 0xA1B2C3D4)
+ struct.pack("<H", 2)
+ struct.pack("<H", 4)
+ struct.pack("<L", 0)
+ struct.pack("<L", 0)
+ struct.pack("<L", 65535)
+ struct.pack("<L", 283) # LINKTYPE_IEEE802_15_4_TAP
)

def write_packet(self, packet: t.CapturedPacket) -> None:
"""Write a packet with its header and TLV metadata."""
timestamp_sec = int(packet.timestamp.timestamp())
timestamp_usec = int(packet.timestamp.microsecond)

sub_tlvs = b""

# RSSI
sub_tlvs += (
t.uint16_t(1).serialize()
+ t.uint16_t(4).serialize()
+ t.Single(packet.rssi).serialize()
)

# LQI
sub_tlvs += (
t.uint16_t(10).serialize()
+ t.uint16_t(1).serialize()
+ t.uint8_t(packet.lqi).serialize()
+ b"\x00\x00\x00"
)

# Channel Assignment
sub_tlvs += (
t.uint16_t(3).serialize()
+ t.uint16_t(3).serialize()
+ t.uint16_t(packet.channel).serialize()
+ t.uint8_t(0).serialize() # page 0
+ b"\x00"
)

# FCS type
sub_tlvs += (
t.uint16_t(0).serialize()
+ t.uint16_t(1).serialize()
+ t.uint8_t(1).serialize() # FCS type 1
+ b"\x00\x00\x00"
)

tlvs = b""

# TAP header: version:u8, reserved: u8, length: u16
tlvs += struct.pack("<BBH", 0, 0, 4 + len(sub_tlvs))
assert len(sub_tlvs) % 4 == 0

data = tlvs + sub_tlvs + packet.data + packet.compute_fcs()

self.file.write(
struct.pack("<L", timestamp_sec)
+ struct.pack("<L", timestamp_usec)
+ struct.pack("<L", len(data))
+ struct.pack("<L", len(data))
+ data
)
Loading