mirror of
https://github.com/craigerl/aprsd.git
synced 2025-06-13 20:02:26 -04:00
Changed to ruff
This patch changes to the ruff linter. SO MUCH quicker. Removed grey and mypy as well.
This commit is contained in:
parent
30d1eb57dd
commit
72d068c0b8
@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.5.0
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
@ -10,13 +10,19 @@ repos:
|
|||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
- id: check-docstring-first
|
- id: check-docstring-first
|
||||||
- id: check-builtin-literals
|
- id: check-builtin-literals
|
||||||
|
- id: check-illegal-windows-names
|
||||||
|
|
||||||
- repo: https://github.com/asottile/setup-cfg-fmt
|
- repo: https://github.com/asottile/setup-cfg-fmt
|
||||||
rev: v2.5.0
|
rev: v2.5.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: setup-cfg-fmt
|
- id: setup-cfg-fmt
|
||||||
|
|
||||||
- repo: https://github.com/dizballanze/gray
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.14.0
|
rev: v0.8.4
|
||||||
hooks:
|
hooks:
|
||||||
- id: gray
|
- id: ruff
|
||||||
|
###### Relevant part below ######
|
||||||
|
- id: ruff
|
||||||
|
args: ["check", "--select", "I", "--fix"]
|
||||||
|
###### Relevant part above ######
|
||||||
|
- id: ruff-format
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from functools import update_wrapper
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
|
||||||
import typing as t
|
import typing as t
|
||||||
|
from functools import update_wrapper
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
@ -11,7 +11,6 @@ from aprsd import conf # noqa: F401
|
|||||||
from aprsd.log import log
|
from aprsd.log import log
|
||||||
from aprsd.utils import trace
|
from aprsd.utils import trace
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
home = str(Path.home())
|
home = str(Path.home())
|
||||||
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
||||||
@ -58,6 +57,7 @@ class AliasedGroup(click.Group):
|
|||||||
calling into :meth:`add_command`.
|
calling into :meth:`add_command`.
|
||||||
Copied from `click` and extended for `aliases`.
|
Copied from `click` and extended for `aliases`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def decorator(f):
|
def decorator(f):
|
||||||
aliases = kwargs.pop("aliases", [])
|
aliases = kwargs.pop("aliases", [])
|
||||||
cmd = click.decorators.command(*args, **kwargs)(f)
|
cmd = click.decorators.command(*args, **kwargs)(f)
|
||||||
@ -65,6 +65,7 @@ class AliasedGroup(click.Group):
|
|||||||
for alias in aliases:
|
for alias in aliases:
|
||||||
self.add_command(cmd, name=alias)
|
self.add_command(cmd, name=alias)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
def group(self, *args, **kwargs):
|
def group(self, *args, **kwargs):
|
||||||
@ -74,6 +75,7 @@ class AliasedGroup(click.Group):
|
|||||||
calling into :meth:`add_command`.
|
calling into :meth:`add_command`.
|
||||||
Copied from `click` and extended for `aliases`.
|
Copied from `click` and extended for `aliases`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def decorator(f):
|
def decorator(f):
|
||||||
aliases = kwargs.pop("aliases", [])
|
aliases = kwargs.pop("aliases", [])
|
||||||
cmd = click.decorators.group(*args, **kwargs)(f)
|
cmd = click.decorators.group(*args, **kwargs)(f)
|
||||||
@ -81,6 +83,7 @@ class AliasedGroup(click.Group):
|
|||||||
for alias in aliases:
|
for alias in aliases:
|
||||||
self.add_command(cmd, name=alias)
|
self.add_command(cmd, name=alias)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
@ -89,6 +92,7 @@ def add_options(options):
|
|||||||
for option in reversed(options):
|
for option in reversed(options):
|
||||||
func = option(func)
|
func = option(func)
|
||||||
return func
|
return func
|
||||||
|
|
||||||
return _add_options
|
return _add_options
|
||||||
|
|
||||||
|
|
||||||
@ -103,7 +107,9 @@ def process_standard_options(f: F) -> F:
|
|||||||
default_config_files = None
|
default_config_files = None
|
||||||
try:
|
try:
|
||||||
CONF(
|
CONF(
|
||||||
[], project="aprsd", version=aprsd.__version__,
|
[],
|
||||||
|
project="aprsd",
|
||||||
|
version=aprsd.__version__,
|
||||||
default_config_files=default_config_files,
|
default_config_files=default_config_files,
|
||||||
)
|
)
|
||||||
except cfg.ConfigFilesNotFoundError:
|
except cfg.ConfigFilesNotFoundError:
|
||||||
@ -119,7 +125,7 @@ def process_standard_options(f: F) -> F:
|
|||||||
trace.setup_tracing(["method", "api"])
|
trace.setup_tracing(["method", "api"])
|
||||||
|
|
||||||
if not config_file_found:
|
if not config_file_found:
|
||||||
LOG = logging.getLogger("APRSD") # noqa: N806
|
LOG = logging.getLogger("APRSD") # noqa: N806
|
||||||
LOG.error("No config file found!! run 'aprsd sample-config'")
|
LOG.error("No config file found!! run 'aprsd sample-config'")
|
||||||
|
|
||||||
del kwargs["loglevel"]
|
del kwargs["loglevel"]
|
||||||
@ -132,6 +138,7 @@ def process_standard_options(f: F) -> F:
|
|||||||
|
|
||||||
def process_standard_options_no_config(f: F) -> F:
|
def process_standard_options_no_config(f: F) -> F:
|
||||||
"""Use this as a decorator when config isn't needed."""
|
"""Use this as a decorator when config isn't needed."""
|
||||||
|
|
||||||
def new_func(*args, **kwargs):
|
def new_func(*args, **kwargs):
|
||||||
ctx = args[0]
|
ctx = args[0]
|
||||||
ctx.ensure_object(dict)
|
ctx.ensure_object(dict)
|
||||||
|
@ -2,24 +2,22 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import timeago
|
||||||
from aprslib.exceptions import LoginError
|
from aprslib.exceptions import LoginError
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import timeago
|
|
||||||
|
|
||||||
from aprsd import client, exception
|
from aprsd import client, exception
|
||||||
from aprsd.client import base
|
from aprsd.client import base
|
||||||
from aprsd.client.drivers import aprsis
|
from aprsd.client.drivers import aprsis
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
LOGU = logger
|
LOGU = logger
|
||||||
|
|
||||||
|
|
||||||
class APRSISClient(base.APRSClient):
|
class APRSISClient(base.APRSClient):
|
||||||
|
|
||||||
_client = None
|
_client = None
|
||||||
_checks = False
|
_checks = False
|
||||||
|
|
||||||
@ -106,6 +104,7 @@ class APRSISClient(base.APRSClient):
|
|||||||
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
|
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
|
||||||
return False
|
return False
|
||||||
return self._client.is_alive() and not self._is_stale_connection()
|
return self._client.is_alive() and not self._is_stale_connection()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
if self._client:
|
if self._client:
|
||||||
self._client.stop()
|
self._client.stop()
|
||||||
@ -134,8 +133,12 @@ class APRSISClient(base.APRSClient):
|
|||||||
if retry_count >= retries:
|
if retry_count >= retries:
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.")
|
LOG.info(
|
||||||
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
|
f"Creating aprslib client({host}:{port}) and logging in {user}."
|
||||||
|
)
|
||||||
|
aprs_client = aprsis.Aprsdis(
|
||||||
|
user, passwd=password, host=host, port=port
|
||||||
|
)
|
||||||
# Force the log to be the same
|
# Force the log to be the same
|
||||||
aprs_client.logger = LOG
|
aprs_client.logger = LOG
|
||||||
aprs_client.connect()
|
aprs_client.connect()
|
||||||
@ -166,8 +169,10 @@ class APRSISClient(base.APRSClient):
|
|||||||
if self._client:
|
if self._client:
|
||||||
try:
|
try:
|
||||||
self._client.consumer(
|
self._client.consumer(
|
||||||
callback, blocking=blocking,
|
callback,
|
||||||
immortal=immortal, raw=raw,
|
blocking=blocking,
|
||||||
|
immortal=immortal,
|
||||||
|
raw=raw,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error(e)
|
LOG.error(e)
|
||||||
|
@ -2,12 +2,11 @@ import abc
|
|||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
from aprsd.threads import keepalive_collector
|
from aprsd.utils import keepalive_collector
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
@ -4,17 +4,20 @@ import select
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
import aprslib
|
import aprslib
|
||||||
|
import wrapt
|
||||||
from aprslib import is_py3
|
from aprslib import is_py3
|
||||||
from aprslib.exceptions import (
|
from aprslib.exceptions import (
|
||||||
ConnectionDrop, ConnectionError, GenericError, LoginError, ParseError,
|
ConnectionDrop,
|
||||||
|
ConnectionError,
|
||||||
|
GenericError,
|
||||||
|
LoginError,
|
||||||
|
ParseError,
|
||||||
UnknownFormat,
|
UnknownFormat,
|
||||||
)
|
)
|
||||||
import wrapt
|
|
||||||
|
|
||||||
import aprsd
|
import aprsd
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,20 +3,19 @@ import threading
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
import aprslib
|
import aprslib
|
||||||
from oslo_config import cfg
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd import conf # noqa
|
from aprsd import conf # noqa
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
from aprsd.utils import trace
|
from aprsd.utils import trace
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
class APRSDFakeClient(metaclass=trace.TraceWrapperMetaclass):
|
class APRSDFakeClient(metaclass=trace.TraceWrapperMetaclass):
|
||||||
'''Fake client for testing.'''
|
"""Fake client for testing."""
|
||||||
|
|
||||||
# flag to tell us to stop
|
# flag to tell us to stop
|
||||||
thread_stop = False
|
thread_stop = False
|
||||||
|
@ -4,13 +4,11 @@ from typing import Callable, Protocol, runtime_checkable
|
|||||||
from aprsd import exception
|
from aprsd import exception
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
@runtime_checkable
|
@runtime_checkable
|
||||||
class Client(Protocol):
|
class Client(Protocol):
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -7,13 +7,11 @@ from aprsd.client import base
|
|||||||
from aprsd.client.drivers import fake as fake_driver
|
from aprsd.client.drivers import fake as fake_driver
|
||||||
from aprsd.utils import trace
|
from aprsd.utils import trace
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
class APRSDFakeClient(base.APRSClient, metaclass=trace.TraceWrapperMetaclass):
|
class APRSDFakeClient(base.APRSClient, metaclass=trace.TraceWrapperMetaclass):
|
||||||
|
|
||||||
def stats(self, serializable=False) -> dict:
|
def stats(self, serializable=False) -> dict:
|
||||||
return {
|
return {
|
||||||
"transport": "Fake",
|
"transport": "Fake",
|
||||||
|
@ -2,23 +2,21 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import aprslib
|
import aprslib
|
||||||
|
import timeago
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
import timeago
|
|
||||||
|
|
||||||
from aprsd import client, exception
|
from aprsd import client, exception
|
||||||
from aprsd.client import base
|
from aprsd.client import base
|
||||||
from aprsd.client.drivers import kiss
|
from aprsd.client.drivers import kiss
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
LOGU = logger
|
LOGU = logger
|
||||||
|
|
||||||
|
|
||||||
class KISSClient(base.APRSClient):
|
class KISSClient(base.APRSClient):
|
||||||
|
|
||||||
_client = None
|
_client = None
|
||||||
keepalive = datetime.datetime.now()
|
keepalive = datetime.datetime.now()
|
||||||
|
|
||||||
|
@ -1,18 +1,16 @@
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd import client
|
from aprsd import client
|
||||||
from aprsd.utils import singleton
|
from aprsd.utils import singleton
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
@singleton
|
@singleton
|
||||||
class APRSClientStats:
|
class APRSClientStats:
|
||||||
|
|
||||||
lock = threading.Lock()
|
lock = threading.Lock()
|
||||||
|
|
||||||
@wrapt.synchronized(lock)
|
@wrapt.synchronized(lock)
|
||||||
|
@ -3,12 +3,13 @@ import click.shell_completion
|
|||||||
|
|
||||||
from aprsd.main import cli
|
from aprsd.main import cli
|
||||||
|
|
||||||
|
|
||||||
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument("shell", type=click.Choice(list(click.shell_completion._available_shells)))
|
@click.argument(
|
||||||
|
"shell", type=click.Choice(list(click.shell_completion._available_shells))
|
||||||
|
)
|
||||||
def completion(shell):
|
def completion(shell):
|
||||||
"""Show the shell completion code"""
|
"""Show the shell completion code"""
|
||||||
from click.utils import _detect_program_name
|
from click.utils import _detect_program_name
|
||||||
@ -17,6 +18,8 @@ def completion(shell):
|
|||||||
prog_name = _detect_program_name()
|
prog_name = _detect_program_name()
|
||||||
complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
|
complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
|
||||||
print(cls(cli, {}, prog_name, complete_var).source())
|
print(cls(cli, {}, prog_name, complete_var).source())
|
||||||
print("# Add the following line to your shell configuration file to have aprsd command line completion")
|
print(
|
||||||
|
"# Add the following line to your shell configuration file to have aprsd command line completion"
|
||||||
|
)
|
||||||
print("# but remove the leading '#' character.")
|
print("# but remove the leading '#' character.")
|
||||||
print(f"# eval \"$(aprsd completion {shell})\"")
|
print(f'# eval "$(aprsd completion {shell})"')
|
||||||
|
@ -9,12 +9,12 @@ import click
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd import cli_helper, conf, packets, plugin
|
from aprsd import cli_helper, conf, packets, plugin
|
||||||
|
|
||||||
# local imports here
|
# local imports here
|
||||||
from aprsd.client import base
|
from aprsd.client import base
|
||||||
from aprsd.main import cli
|
from aprsd.main import cli
|
||||||
from aprsd.utils import trace
|
from aprsd.utils import trace
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
||||||
@ -112,7 +112,8 @@ def test_plugin(
|
|||||||
# Register the plugin they wanted tested.
|
# Register the plugin they wanted tested.
|
||||||
LOG.info(
|
LOG.info(
|
||||||
"Testing plugin {} Version {}".format(
|
"Testing plugin {} Version {}".format(
|
||||||
obj.__class__, obj.version,
|
obj.__class__,
|
||||||
|
obj.version,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
pm.register_msg(obj)
|
pm.register_msg(obj)
|
||||||
|
@ -2,8 +2,8 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from oslo_config import cfg
|
|
||||||
import requests
|
import requests
|
||||||
|
from oslo_config import cfg
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
|
||||||
@ -13,7 +13,6 @@ from aprsd import cli_helper
|
|||||||
from aprsd.main import cli
|
from aprsd.main import cli
|
||||||
from aprsd.threads.stats import StatsStore
|
from aprsd.threads.stats import StatsStore
|
||||||
|
|
||||||
|
|
||||||
# setup the global logger
|
# setup the global logger
|
||||||
# log.basicConfig(level=log.DEBUG) # level=10
|
# log.basicConfig(level=log.DEBUG) # level=10
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
@ -23,12 +22,14 @@ CONF = cfg.CONF
|
|||||||
@cli.command()
|
@cli.command()
|
||||||
@cli_helper.add_options(cli_helper.common_options)
|
@cli_helper.add_options(cli_helper.common_options)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--host", type=str,
|
"--host",
|
||||||
|
type=str,
|
||||||
default=None,
|
default=None,
|
||||||
help="IP address of the remote aprsd admin web ui fetch stats from.",
|
help="IP address of the remote aprsd admin web ui fetch stats from.",
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
"--port", type=int,
|
"--port",
|
||||||
|
type=int,
|
||||||
default=None,
|
default=None,
|
||||||
help="Port of the remote aprsd web admin interface to fetch stats from.",
|
help="Port of the remote aprsd web admin interface to fetch stats from.",
|
||||||
)
|
)
|
||||||
@ -169,8 +170,8 @@ def fetch_stats(ctx, host, port):
|
|||||||
"--show-section",
|
"--show-section",
|
||||||
default=["All"],
|
default=["All"],
|
||||||
help="Show specific sections of the stats. "
|
help="Show specific sections of the stats. "
|
||||||
" Choices: All, APRSDStats, APRSDThreadList, APRSClientStats,"
|
" Choices: All, APRSDStats, APRSDThreadList, APRSClientStats,"
|
||||||
" PacketList, SeenList, WatchList",
|
" PacketList, SeenList, WatchList",
|
||||||
multiple=True,
|
multiple=True,
|
||||||
type=click.Choice(
|
type=click.Choice(
|
||||||
[
|
[
|
||||||
|
@ -13,13 +13,15 @@ from oslo_config import cfg
|
|||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
|
|
||||||
import aprsd
|
import aprsd
|
||||||
from aprsd import cli_helper
|
from aprsd import (
|
||||||
from aprsd import conf # noqa
|
cli_helper,
|
||||||
|
conf, # noqa
|
||||||
|
)
|
||||||
|
|
||||||
# local imports here
|
# local imports here
|
||||||
from aprsd.main import cli
|
from aprsd.main import cli
|
||||||
from aprsd.threads import stats as stats_threads
|
from aprsd.threads import stats as stats_threads
|
||||||
|
|
||||||
|
|
||||||
# setup the global logger
|
# setup the global logger
|
||||||
# log.basicConfig(level=log.DEBUG) # level=10
|
# log.basicConfig(level=log.DEBUG) # level=10
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
@ -9,9 +9,9 @@ import sys
|
|||||||
from traceback import print_tb
|
from traceback import print_tb
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
import click
|
import click
|
||||||
import requests
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
from rich.text import Text
|
from rich.text import Text
|
||||||
@ -22,7 +22,6 @@ from aprsd import plugin as aprsd_plugin
|
|||||||
from aprsd.main import cli
|
from aprsd.main import cli
|
||||||
from aprsd.plugins import fortune, notify, ping, time, version, weather
|
from aprsd.plugins import fortune, notify, ping, time, version, weather
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
PYPI_URL = "https://pypi.org/search/"
|
PYPI_URL = "https://pypi.org/search/"
|
||||||
|
|
||||||
@ -79,7 +78,8 @@ def get_module_info(package_name, module_name, module_path):
|
|||||||
obj_list.append(
|
obj_list.append(
|
||||||
{
|
{
|
||||||
"package": package_name,
|
"package": package_name,
|
||||||
"name": mem_name, "obj": obj,
|
"name": mem_name,
|
||||||
|
"obj": obj,
|
||||||
"version": obj.version,
|
"version": obj.version,
|
||||||
"path": f"{'.'.join([module_name, obj.__name__])}",
|
"path": f"{'.'.join([module_name, obj.__name__])}",
|
||||||
},
|
},
|
||||||
@ -99,7 +99,9 @@ def _get_installed_aprsd_items():
|
|||||||
module = importlib.import_module(name)
|
module = importlib.import_module(name)
|
||||||
pkgs = walk_package(module)
|
pkgs = walk_package(module)
|
||||||
for pkg in pkgs:
|
for pkg in pkgs:
|
||||||
pkg_info = get_module_info(module.__name__, pkg.name, module.__path__[0])
|
pkg_info = get_module_info(
|
||||||
|
module.__name__, pkg.name, module.__path__[0]
|
||||||
|
)
|
||||||
if "plugin" in name:
|
if "plugin" in name:
|
||||||
plugins[name] = pkg_info
|
plugins[name] = pkg_info
|
||||||
elif "extension" in name:
|
elif "extension" in name:
|
||||||
@ -193,10 +195,18 @@ def show_pypi_plugins(installed_plugins, console):
|
|||||||
table.add_column("Installed?", style="red", justify="center")
|
table.add_column("Installed?", style="red", justify="center")
|
||||||
for snippet in snippets:
|
for snippet in snippets:
|
||||||
link = urljoin(PYPI_URL, snippet.get("href"))
|
link = urljoin(PYPI_URL, snippet.get("href"))
|
||||||
package = re.sub(r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip())
|
package = re.sub(
|
||||||
version = re.sub(r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip())
|
r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip()
|
||||||
created = re.sub(r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip())
|
)
|
||||||
description = re.sub(r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip())
|
version = re.sub(
|
||||||
|
r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip()
|
||||||
|
)
|
||||||
|
created = re.sub(
|
||||||
|
r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip()
|
||||||
|
)
|
||||||
|
description = re.sub(
|
||||||
|
r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip()
|
||||||
|
)
|
||||||
emoji = ":open_file_folder:"
|
emoji = ":open_file_folder:"
|
||||||
|
|
||||||
if "aprsd-" not in package or "-plugin" not in package:
|
if "aprsd-" not in package or "-plugin" not in package:
|
||||||
@ -210,7 +220,10 @@ def show_pypi_plugins(installed_plugins, console):
|
|||||||
|
|
||||||
table.add_row(
|
table.add_row(
|
||||||
f"[link={link}]{emoji}[/link] {package}",
|
f"[link={link}]{emoji}[/link] {package}",
|
||||||
description, version, created, installed,
|
description,
|
||||||
|
version,
|
||||||
|
created,
|
||||||
|
installed,
|
||||||
)
|
)
|
||||||
|
|
||||||
console.print("\n")
|
console.print("\n")
|
||||||
@ -234,10 +247,18 @@ def show_pypi_extensions(installed_extensions, console):
|
|||||||
table.add_column("Installed?", style="red", justify="center")
|
table.add_column("Installed?", style="red", justify="center")
|
||||||
for snippet in snippets:
|
for snippet in snippets:
|
||||||
link = urljoin(PYPI_URL, snippet.get("href"))
|
link = urljoin(PYPI_URL, snippet.get("href"))
|
||||||
package = re.sub(r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip())
|
package = re.sub(
|
||||||
version = re.sub(r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip())
|
r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip()
|
||||||
created = re.sub(r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip())
|
)
|
||||||
description = re.sub(r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip())
|
version = re.sub(
|
||||||
|
r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip()
|
||||||
|
)
|
||||||
|
created = re.sub(
|
||||||
|
r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip()
|
||||||
|
)
|
||||||
|
description = re.sub(
|
||||||
|
r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip()
|
||||||
|
)
|
||||||
emoji = ":open_file_folder:"
|
emoji = ":open_file_folder:"
|
||||||
|
|
||||||
if "aprsd-" not in package or "-extension" not in package:
|
if "aprsd-" not in package or "-extension" not in package:
|
||||||
@ -251,7 +272,10 @@ def show_pypi_extensions(installed_extensions, console):
|
|||||||
|
|
||||||
table.add_row(
|
table.add_row(
|
||||||
f"[link={link}]{emoji}[/link] {package}",
|
f"[link={link}]{emoji}[/link] {package}",
|
||||||
description, version, created, installed,
|
description,
|
||||||
|
version,
|
||||||
|
created,
|
||||||
|
installed,
|
||||||
)
|
)
|
||||||
|
|
||||||
console.print("\n")
|
console.print("\n")
|
||||||
|
@ -27,7 +27,6 @@ from aprsd.threads import keepalive, rx
|
|||||||
from aprsd.threads import stats as stats_thread
|
from aprsd.threads import stats as stats_thread
|
||||||
from aprsd.threads.aprsd import APRSDThread
|
from aprsd.threads.aprsd import APRSDThread
|
||||||
|
|
||||||
|
|
||||||
# setup the global logger
|
# setup the global logger
|
||||||
# log.basicConfig(level=log.DEBUG) # level=10
|
# log.basicConfig(level=log.DEBUG) # level=10
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
@ -51,8 +50,12 @@ def signal_handler(sig, frame):
|
|||||||
|
|
||||||
class APRSDListenThread(rx.APRSDRXThread):
|
class APRSDListenThread(rx.APRSDRXThread):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, packet_queue, packet_filter=None, plugin_manager=None,
|
self,
|
||||||
enabled_plugins=[], log_packets=False,
|
packet_queue,
|
||||||
|
packet_filter=None,
|
||||||
|
plugin_manager=None,
|
||||||
|
enabled_plugins=[],
|
||||||
|
log_packets=False,
|
||||||
):
|
):
|
||||||
super().__init__(packet_queue)
|
super().__init__(packet_queue)
|
||||||
self.packet_filter = packet_filter
|
self.packet_filter = packet_filter
|
||||||
@ -126,7 +129,7 @@ class ListenStatsThread(APRSDThread):
|
|||||||
thread_hex = f"fg {utils.hex_from_name(k)}"
|
thread_hex = f"fg {utils.hex_from_name(k)}"
|
||||||
LOGU.opt(colors=True).info(
|
LOGU.opt(colors=True).info(
|
||||||
f"<{thread_hex}>{k:<15}</{thread_hex}> "
|
f"<{thread_hex}>{k:<15}</{thread_hex}> "
|
||||||
f"<blue>RX: {v["rx"]}</blue> <red>TX: {v["tx"]}</red>",
|
f"<blue>RX: {v['rx']}</blue> <red>TX: {v['tx']}</red>",
|
||||||
)
|
)
|
||||||
|
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@ -265,7 +268,7 @@ def listen(
|
|||||||
LOG.debug(f"Filter by '{filter}'")
|
LOG.debug(f"Filter by '{filter}'")
|
||||||
aprs_client.set_filter(filter)
|
aprs_client.set_filter(filter)
|
||||||
|
|
||||||
keepalive = keepalive.KeepAliveThread()
|
keepalive_thread = keepalive.KeepAliveThread()
|
||||||
|
|
||||||
if not CONF.enable_seen_list:
|
if not CONF.enable_seen_list:
|
||||||
# just deregister the class from the packet collector
|
# just deregister the class from the packet collector
|
||||||
@ -309,9 +312,9 @@ def listen(
|
|||||||
listen_stats = ListenStatsThread()
|
listen_stats = ListenStatsThread()
|
||||||
listen_stats.start()
|
listen_stats.start()
|
||||||
|
|
||||||
keepalive.start()
|
keepalive_thread.start()
|
||||||
LOG.debug("keepalive Join")
|
LOG.debug("keepalive Join")
|
||||||
keepalive.join()
|
keepalive_thread.join()
|
||||||
LOG.debug("listen_thread Join")
|
LOG.debug("listen_thread Join")
|
||||||
listen_thread.join()
|
listen_thread.join()
|
||||||
stats.join()
|
stats.join()
|
||||||
|
@ -6,20 +6,17 @@ import click
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
import aprsd
|
import aprsd
|
||||||
from aprsd import cli_helper
|
from aprsd import cli_helper, plugin, threads, utils
|
||||||
from aprsd import main as aprsd_main
|
from aprsd import main as aprsd_main
|
||||||
from aprsd import plugin, threads, utils
|
|
||||||
from aprsd.client import client_factory
|
from aprsd.client import client_factory
|
||||||
from aprsd.main import cli
|
from aprsd.main import cli
|
||||||
from aprsd.packets import collector as packet_collector
|
from aprsd.packets import collector as packet_collector
|
||||||
from aprsd.packets import seen_list
|
from aprsd.packets import seen_list
|
||||||
from aprsd.threads import aprsd as aprsd_threads
|
from aprsd.threads import aprsd as aprsd_threads
|
||||||
from aprsd.threads import keepalive, registry, rx
|
from aprsd.threads import keepalive, registry, rx, tx
|
||||||
from aprsd.threads import stats as stats_thread
|
from aprsd.threads import stats as stats_thread
|
||||||
from aprsd.threads import tx
|
|
||||||
from aprsd.utils import singleton
|
from aprsd.utils import singleton
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
@ -32,6 +29,7 @@ class ServerThreads:
|
|||||||
the server command.
|
the server command.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.threads: list[aprsd_threads.APRSDThread] = []
|
self.threads: list[aprsd_threads.APRSDThread] = []
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@ The options for log setup
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_LOGIN = "NOCALL"
|
DEFAULT_LOGIN = "NOCALL"
|
||||||
|
|
||||||
aprs_group = cfg.OptGroup(
|
aprs_group = cfg.OptGroup(
|
||||||
@ -31,7 +30,7 @@ aprs_opts = [
|
|||||||
"enabled",
|
"enabled",
|
||||||
default=True,
|
default=True,
|
||||||
help="Set enabled to False if there is no internet connectivity."
|
help="Set enabled to False if there is no internet connectivity."
|
||||||
"This is useful for a direwolf KISS aprs connection only.",
|
"This is useful for a direwolf KISS aprs connection only.",
|
||||||
),
|
),
|
||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
"login",
|
"login",
|
||||||
@ -42,8 +41,8 @@ aprs_opts = [
|
|||||||
"password",
|
"password",
|
||||||
secret=True,
|
secret=True,
|
||||||
help="APRS Password "
|
help="APRS Password "
|
||||||
"Get the passcode for your callsign here: "
|
"Get the passcode for your callsign here: "
|
||||||
"https://apps.magicbug.co.uk/passcode",
|
"https://apps.magicbug.co.uk/passcode",
|
||||||
),
|
),
|
||||||
cfg.HostAddressOpt(
|
cfg.HostAddressOpt(
|
||||||
"host",
|
"host",
|
||||||
|
@ -2,7 +2,6 @@ from pathlib import Path
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
|
||||||
home = str(Path.home())
|
home = str(Path.home())
|
||||||
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
||||||
APRSD_DEFAULT_MAGIC_WORD = "CHANGEME!!!"
|
APRSD_DEFAULT_MAGIC_WORD = "CHANGEME!!!"
|
||||||
@ -47,15 +46,15 @@ aprsd_opts = [
|
|||||||
"ack_rate_limit_period",
|
"ack_rate_limit_period",
|
||||||
default=1,
|
default=1,
|
||||||
help="The wait period in seconds per Ack packet being sent."
|
help="The wait period in seconds per Ack packet being sent."
|
||||||
"1 means 1 ack packet per second allowed."
|
"1 means 1 ack packet per second allowed."
|
||||||
"2 means 1 pack packet every 2 seconds allowed",
|
"2 means 1 pack packet every 2 seconds allowed",
|
||||||
),
|
),
|
||||||
cfg.IntOpt(
|
cfg.IntOpt(
|
||||||
"msg_rate_limit_period",
|
"msg_rate_limit_period",
|
||||||
default=2,
|
default=2,
|
||||||
help="Wait period in seconds per non AckPacket being sent."
|
help="Wait period in seconds per non AckPacket being sent."
|
||||||
"2 means 1 packet every 2 seconds allowed."
|
"2 means 1 packet every 2 seconds allowed."
|
||||||
"5 means 1 pack packet every 5 seconds allowed",
|
"5 means 1 pack packet every 5 seconds allowed",
|
||||||
),
|
),
|
||||||
cfg.IntOpt(
|
cfg.IntOpt(
|
||||||
"packet_dupe_timeout",
|
"packet_dupe_timeout",
|
||||||
@ -66,7 +65,7 @@ aprsd_opts = [
|
|||||||
"enable_beacon",
|
"enable_beacon",
|
||||||
default=False,
|
default=False,
|
||||||
help="Enable sending of a GPS Beacon packet to locate this service. "
|
help="Enable sending of a GPS Beacon packet to locate this service. "
|
||||||
"Requires latitude and longitude to be set.",
|
"Requires latitude and longitude to be set.",
|
||||||
),
|
),
|
||||||
cfg.IntOpt(
|
cfg.IntOpt(
|
||||||
"beacon_interval",
|
"beacon_interval",
|
||||||
@ -93,8 +92,8 @@ aprsd_opts = [
|
|||||||
choices=["compact", "multiline", "both"],
|
choices=["compact", "multiline", "both"],
|
||||||
default="compact",
|
default="compact",
|
||||||
help="When logging packets 'compact' will use a single line formatted for each packet."
|
help="When logging packets 'compact' will use a single line formatted for each packet."
|
||||||
"'multiline' will use multiple lines for each packet and is the traditional format."
|
"'multiline' will use multiple lines for each packet and is the traditional format."
|
||||||
"both will log both compact and multiline.",
|
"both will log both compact and multiline.",
|
||||||
),
|
),
|
||||||
cfg.IntOpt(
|
cfg.IntOpt(
|
||||||
"default_packet_send_count",
|
"default_packet_send_count",
|
||||||
@ -120,7 +119,7 @@ aprsd_opts = [
|
|||||||
"enable_seen_list",
|
"enable_seen_list",
|
||||||
default=True,
|
default=True,
|
||||||
help="Enable the Callsign seen list tracking feature. This allows aprsd to keep track of "
|
help="Enable the Callsign seen list tracking feature. This allows aprsd to keep track of "
|
||||||
"callsigns that have been seen and when they were last seen.",
|
"callsigns that have been seen and when they were last seen.",
|
||||||
),
|
),
|
||||||
cfg.BoolOpt(
|
cfg.BoolOpt(
|
||||||
"enable_packet_logging",
|
"enable_packet_logging",
|
||||||
@ -136,7 +135,7 @@ aprsd_opts = [
|
|||||||
"enable_sending_ack_packets",
|
"enable_sending_ack_packets",
|
||||||
default=True,
|
default=True,
|
||||||
help="Set this to False, to disable sending of ack packets. This will entirely stop"
|
help="Set this to False, to disable sending of ack packets. This will entirely stop"
|
||||||
"APRSD from sending ack packets.",
|
"APRSD from sending ack packets.",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -145,8 +144,8 @@ watch_list_opts = [
|
|||||||
"enabled",
|
"enabled",
|
||||||
default=False,
|
default=False,
|
||||||
help="Enable the watch list feature. Still have to enable "
|
help="Enable the watch list feature. Still have to enable "
|
||||||
"the correct plugin. Built-in plugin to use is "
|
"the correct plugin. Built-in plugin to use is "
|
||||||
"aprsd.plugins.notify.NotifyPlugin",
|
"aprsd.plugins.notify.NotifyPlugin",
|
||||||
),
|
),
|
||||||
cfg.ListOpt(
|
cfg.ListOpt(
|
||||||
"callsigns",
|
"callsigns",
|
||||||
@ -165,7 +164,7 @@ watch_list_opts = [
|
|||||||
"alert_time_seconds",
|
"alert_time_seconds",
|
||||||
default=3600,
|
default=3600,
|
||||||
help="Time to wait before alert is sent on new message for "
|
help="Time to wait before alert is sent on new message for "
|
||||||
"users in callsigns.",
|
"users in callsigns.",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -183,8 +182,8 @@ enabled_plugins_opts = [
|
|||||||
"aprsd.plugins.notify.NotifySeenPlugin",
|
"aprsd.plugins.notify.NotifySeenPlugin",
|
||||||
],
|
],
|
||||||
help="Comma separated list of enabled plugins for APRSD."
|
help="Comma separated list of enabled plugins for APRSD."
|
||||||
"To enable installed external plugins add them here."
|
"To enable installed external plugins add them here."
|
||||||
"The full python path to the class name must be used",
|
"The full python path to the class name must be used",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -193,16 +192,16 @@ registry_opts = [
|
|||||||
"enabled",
|
"enabled",
|
||||||
default=False,
|
default=False,
|
||||||
help="Enable sending aprs registry information. This will let the "
|
help="Enable sending aprs registry information. This will let the "
|
||||||
"APRS registry know about your service and it's uptime. "
|
"APRS registry know about your service and it's uptime. "
|
||||||
"No personal information is sent, just the callsign, uptime and description. "
|
"No personal information is sent, just the callsign, uptime and description. "
|
||||||
"The service callsign is the callsign set in [DEFAULT] section.",
|
"The service callsign is the callsign set in [DEFAULT] section.",
|
||||||
),
|
),
|
||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
"description",
|
"description",
|
||||||
default=None,
|
default=None,
|
||||||
help="Description of the service to send to the APRS registry. "
|
help="Description of the service to send to the APRS registry. "
|
||||||
"This is what will show up in the APRS registry."
|
"This is what will show up in the APRS registry."
|
||||||
"If not set, the description will be the same as the callsign.",
|
"If not set, the description will be the same as the callsign.",
|
||||||
),
|
),
|
||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
"registry_url",
|
"registry_url",
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
"""
|
"""
|
||||||
The options for log setup
|
The options for log setup
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
|
||||||
LOG_LEVELS = {
|
LOG_LEVELS = {
|
||||||
"CRITICAL": logging.CRITICAL,
|
"CRITICAL": logging.CRITICAL,
|
||||||
"ERROR": logging.ERROR,
|
"ERROR": logging.ERROR,
|
||||||
@ -59,7 +59,5 @@ def register_opts(config):
|
|||||||
|
|
||||||
def list_opts():
|
def list_opts():
|
||||||
return {
|
return {
|
||||||
logging_group.name: (
|
logging_group.name: (logging_opts),
|
||||||
logging_opts
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
@ -31,7 +31,6 @@ import importlib
|
|||||||
import os
|
import os
|
||||||
import pkgutil
|
import pkgutil
|
||||||
|
|
||||||
|
|
||||||
LIST_OPTS_FUNC_NAME = "list_opts"
|
LIST_OPTS_FUNC_NAME = "list_opts"
|
||||||
|
|
||||||
|
|
||||||
@ -64,9 +63,11 @@ def _import_modules(module_names):
|
|||||||
for modname in module_names:
|
for modname in module_names:
|
||||||
mod = importlib.import_module("aprsd.conf." + modname)
|
mod = importlib.import_module("aprsd.conf." + modname)
|
||||||
if not hasattr(mod, LIST_OPTS_FUNC_NAME):
|
if not hasattr(mod, LIST_OPTS_FUNC_NAME):
|
||||||
msg = "The module 'aprsd.conf.%s' should have a '%s' "\
|
msg = (
|
||||||
"function which returns the config options." % \
|
"The module 'aprsd.conf.%s' should have a '%s' "
|
||||||
(modname, LIST_OPTS_FUNC_NAME)
|
"function which returns the config options."
|
||||||
|
% (modname, LIST_OPTS_FUNC_NAME)
|
||||||
|
)
|
||||||
raise Exception(msg)
|
raise Exception(msg)
|
||||||
else:
|
else:
|
||||||
imported_modules.append(mod)
|
imported_modules.append(mod)
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
|
||||||
aprsfi_group = cfg.OptGroup(
|
aprsfi_group = cfg.OptGroup(
|
||||||
name="aprs_fi",
|
name="aprs_fi",
|
||||||
title="APRS.FI website settings",
|
title="APRS.FI website settings",
|
||||||
@ -21,8 +20,7 @@ owm_wx_group = cfg.OptGroup(
|
|||||||
aprsfi_opts = [
|
aprsfi_opts = [
|
||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
"apiKey",
|
"apiKey",
|
||||||
help="Get the apiKey from your aprs.fi account here:"
|
help="Get the apiKey from your aprs.fi account here:" "http://aprs.fi/account",
|
||||||
"http://aprs.fi/account",
|
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -30,11 +28,11 @@ owm_wx_opts = [
|
|||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
"apiKey",
|
"apiKey",
|
||||||
help="OWMWeatherPlugin api key to OpenWeatherMap's API."
|
help="OWMWeatherPlugin api key to OpenWeatherMap's API."
|
||||||
"This plugin uses the openweathermap API to fetch"
|
"This plugin uses the openweathermap API to fetch"
|
||||||
"location and weather information."
|
"location and weather information."
|
||||||
"To use this plugin you need to get an openweathermap"
|
"To use this plugin you need to get an openweathermap"
|
||||||
"account and apikey."
|
"account and apikey."
|
||||||
"https://home.openweathermap.org/api_keys",
|
"https://home.openweathermap.org/api_keys",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -42,16 +40,16 @@ avwx_opts = [
|
|||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
"apiKey",
|
"apiKey",
|
||||||
help="avwx-api is an opensource project that has"
|
help="avwx-api is an opensource project that has"
|
||||||
"a hosted service here: https://avwx.rest/"
|
"a hosted service here: https://avwx.rest/"
|
||||||
"You can launch your own avwx-api in a container"
|
"You can launch your own avwx-api in a container"
|
||||||
"by cloning the githug repo here:"
|
"by cloning the githug repo here:"
|
||||||
"https://github.com/avwx-rest/AVWX-API",
|
"https://github.com/avwx-rest/AVWX-API",
|
||||||
),
|
),
|
||||||
cfg.StrOpt(
|
cfg.StrOpt(
|
||||||
"base_url",
|
"base_url",
|
||||||
default="https://avwx.rest",
|
default="https://avwx.rest",
|
||||||
help="The base url for the avwx API. If you are hosting your own"
|
help="The base url for the avwx API. If you are hosting your own"
|
||||||
"Here is where you change the url to point to yours.",
|
"Here is where you change the url to point to yours.",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
class MissingConfigOptionException(Exception):
|
class MissingConfigOptionException(Exception):
|
||||||
"""Missing a config option."""
|
"""Missing a config option."""
|
||||||
|
|
||||||
def __init__(self, config_option):
|
def __init__(self, config_option):
|
||||||
self.message = f"Option '{config_option}' was not in config file"
|
self.message = f"Option '{config_option}' was not in config file"
|
||||||
|
|
||||||
|
|
||||||
class ConfigOptionBogusDefaultException(Exception):
|
class ConfigOptionBogusDefaultException(Exception):
|
||||||
"""Missing a config option."""
|
"""Missing a config option."""
|
||||||
|
|
||||||
def __init__(self, config_option, default_fail):
|
def __init__(self, config_option, default_fail):
|
||||||
self.message = (
|
self.message = (
|
||||||
f"Config file option '{config_option}' needs to be "
|
f"Config file option '{config_option}' needs to be "
|
||||||
|
@ -7,7 +7,6 @@ from oslo_config import cfg
|
|||||||
|
|
||||||
from aprsd.conf import log as conf_log
|
from aprsd.conf import log as conf_log
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
# LOG = logging.getLogger("APRSD")
|
# LOG = logging.getLogger("APRSD")
|
||||||
LOG = logger
|
LOG = logger
|
||||||
@ -18,6 +17,7 @@ class QueueLatest(queue.Queue):
|
|||||||
|
|
||||||
This prevents the queue from blowing up in size.
|
This prevents the queue from blowing up in size.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def put(self, *args, **kwargs):
|
def put(self, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
super().put(*args, **kwargs)
|
super().put(*args, **kwargs)
|
||||||
@ -43,7 +43,9 @@ class InterceptHandler(logging.Handler):
|
|||||||
frame = frame.f_back
|
frame = frame.f_back
|
||||||
depth += 1
|
depth += 1
|
||||||
|
|
||||||
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
|
logger.opt(depth=depth, exception=record.exc_info).log(
|
||||||
|
level, record.getMessage()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Setup the log faciility
|
# Setup the log faciility
|
||||||
|
@ -22,11 +22,11 @@
|
|||||||
# python included libs
|
# python included libs
|
||||||
import datetime
|
import datetime
|
||||||
import importlib.metadata as imp
|
import importlib.metadata as imp
|
||||||
from importlib.metadata import version as metadata_version
|
|
||||||
import logging
|
import logging
|
||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from importlib.metadata import version as metadata_version
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from oslo_config import cfg, generator
|
from oslo_config import cfg, generator
|
||||||
@ -36,7 +36,6 @@ import aprsd
|
|||||||
from aprsd import cli_helper, packets, threads, utils
|
from aprsd import cli_helper, packets, threads, utils
|
||||||
from aprsd.stats import collector
|
from aprsd.stats import collector
|
||||||
|
|
||||||
|
|
||||||
# setup the global logger
|
# setup the global logger
|
||||||
# log.basicConfig(level=log.DEBUG) # level=10
|
# log.basicConfig(level=log.DEBUG) # level=10
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
@ -54,8 +53,14 @@ def cli(ctx):
|
|||||||
|
|
||||||
def load_commands():
|
def load_commands():
|
||||||
from .cmds import ( # noqa
|
from .cmds import ( # noqa
|
||||||
completion, dev, fetch_stats, healthcheck, list_plugins, listen,
|
completion,
|
||||||
send_message, server,
|
dev,
|
||||||
|
fetch_stats,
|
||||||
|
healthcheck,
|
||||||
|
list_plugins,
|
||||||
|
listen,
|
||||||
|
send_message,
|
||||||
|
server,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -115,6 +120,7 @@ def sample_config(ctx):
|
|||||||
|
|
||||||
def _get_selected_entry_points():
|
def _get_selected_entry_points():
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if sys.version_info < (3, 10):
|
if sys.version_info < (3, 10):
|
||||||
all = imp.entry_points()
|
all = imp.entry_points()
|
||||||
selected = []
|
selected = []
|
||||||
|
@ -1,15 +1,25 @@
|
|||||||
from aprsd.packets import collector
|
from aprsd.packets import collector
|
||||||
from aprsd.packets.core import ( # noqa: F401
|
from aprsd.packets.core import ( # noqa: F401
|
||||||
AckPacket, BeaconPacket, BulletinPacket, GPSPacket, MessagePacket,
|
AckPacket,
|
||||||
MicEPacket, ObjectPacket, Packet, RejectPacket, StatusPacket,
|
BeaconPacket,
|
||||||
ThirdPartyPacket, UnknownPacket, WeatherPacket, factory,
|
BulletinPacket,
|
||||||
|
GPSPacket,
|
||||||
|
MessagePacket,
|
||||||
|
MicEPacket,
|
||||||
|
ObjectPacket,
|
||||||
|
Packet,
|
||||||
|
RejectPacket,
|
||||||
|
StatusPacket,
|
||||||
|
ThirdPartyPacket,
|
||||||
|
UnknownPacket,
|
||||||
|
WeatherPacket,
|
||||||
|
factory,
|
||||||
)
|
)
|
||||||
from aprsd.packets.packet_list import PacketList # noqa: F401
|
from aprsd.packets.packet_list import PacketList # noqa: F401
|
||||||
from aprsd.packets.seen_list import SeenList # noqa: F401
|
from aprsd.packets.seen_list import SeenList # noqa: F401
|
||||||
from aprsd.packets.tracker import PacketTrack # noqa: F401
|
from aprsd.packets.tracker import PacketTrack # noqa: F401
|
||||||
from aprsd.packets.watch_list import WatchList # noqa: F401
|
from aprsd.packets.watch_list import WatchList # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
# Register all the packet tracking objects.
|
# Register all the packet tracking objects.
|
||||||
collector.PacketCollector().register(PacketList)
|
collector.PacketCollector().register(PacketList)
|
||||||
collector.PacketCollector().register(SeenList)
|
collector.PacketCollector().register(SeenList)
|
||||||
|
@ -1,20 +1,23 @@
|
|||||||
from dataclasses import dataclass, field
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import time
|
import time
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
# Due to a failure in python 3.8
|
# Due to a failure in python 3.8
|
||||||
from typing import Any, List, Optional, Type, TypeVar, Union
|
from typing import Any, List, Optional, Type, TypeVar, Union
|
||||||
|
|
||||||
from aprslib import util as aprslib_util
|
from aprslib import util as aprslib_util
|
||||||
from dataclasses_json import (
|
from dataclasses_json import (
|
||||||
CatchAll, DataClassJsonMixin, Undefined, dataclass_json,
|
CatchAll,
|
||||||
|
DataClassJsonMixin,
|
||||||
|
Undefined,
|
||||||
|
dataclass_json,
|
||||||
)
|
)
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
from aprsd.utils import counter
|
from aprsd.utils import counter
|
||||||
|
|
||||||
|
|
||||||
# For mypy to be happy
|
# For mypy to be happy
|
||||||
A = TypeVar("A", bound="DataClassJsonMixin")
|
A = TypeVar("A", bound="DataClassJsonMixin")
|
||||||
Json = Union[dict, list, str, int, float, bool, None]
|
Json = Union[dict, list, str, int, float, bool, None]
|
||||||
@ -51,7 +54,7 @@ def _init_send_time():
|
|||||||
return NO_DATE
|
return NO_DATE
|
||||||
|
|
||||||
|
|
||||||
def _init_msgNo(): # noqa: N802
|
def _init_msgNo(): # noqa: N802
|
||||||
"""For some reason __post__init doesn't get called.
|
"""For some reason __post__init doesn't get called.
|
||||||
|
|
||||||
So in order to initialize the msgNo field in the packet
|
So in order to initialize the msgNo field in the packet
|
||||||
@ -84,14 +87,16 @@ class Packet:
|
|||||||
to_call: Optional[str] = field(default=None)
|
to_call: Optional[str] = field(default=None)
|
||||||
addresse: Optional[str] = field(default=None)
|
addresse: Optional[str] = field(default=None)
|
||||||
format: Optional[str] = field(default=None)
|
format: Optional[str] = field(default=None)
|
||||||
msgNo: Optional[str] = field(default=None) # noqa: N815
|
msgNo: Optional[str] = field(default=None) # noqa: N815
|
||||||
ackMsgNo: Optional[str] = field(default=None) # noqa: N815
|
ackMsgNo: Optional[str] = field(default=None) # noqa: N815
|
||||||
packet_type: Optional[str] = field(default=None)
|
packet_type: Optional[str] = field(default=None)
|
||||||
timestamp: float = field(default_factory=_init_timestamp, compare=False, hash=False)
|
timestamp: float = field(default_factory=_init_timestamp, compare=False, hash=False)
|
||||||
# Holds the raw text string to be sent over the wire
|
# Holds the raw text string to be sent over the wire
|
||||||
# or holds the raw string from input packet
|
# or holds the raw string from input packet
|
||||||
raw: Optional[str] = field(default=None, compare=False, hash=False)
|
raw: Optional[str] = field(default=None, compare=False, hash=False)
|
||||||
raw_dict: dict = field(repr=False, default_factory=lambda: {}, compare=False, hash=False)
|
raw_dict: dict = field(
|
||||||
|
repr=False, default_factory=lambda: {}, compare=False, hash=False
|
||||||
|
)
|
||||||
# Built by calling prepare(). raw needs this built first.
|
# Built by calling prepare(). raw needs this built first.
|
||||||
payload: Optional[str] = field(default=None)
|
payload: Optional[str] = field(default=None)
|
||||||
|
|
||||||
@ -140,12 +145,12 @@ class Packet:
|
|||||||
def _build_payload(self) -> None:
|
def _build_payload(self) -> None:
|
||||||
"""The payload is the non headers portion of the packet."""
|
"""The payload is the non headers portion of the packet."""
|
||||||
if not self.to_call:
|
if not self.to_call:
|
||||||
raise ValueError("to_call isn't set. Must set to_call before calling prepare()")
|
raise ValueError(
|
||||||
|
"to_call isn't set. Must set to_call before calling prepare()"
|
||||||
|
)
|
||||||
|
|
||||||
# The base packet class has no real payload
|
# The base packet class has no real payload
|
||||||
self.payload = (
|
self.payload = f":{self.to_call.ljust(9)}"
|
||||||
f":{self.to_call.ljust(9)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _build_raw(self) -> None:
|
def _build_raw(self) -> None:
|
||||||
"""Build the self.raw which is what is sent over the air."""
|
"""Build the self.raw which is what is sent over the air."""
|
||||||
@ -166,8 +171,10 @@ class Packet:
|
|||||||
message = msg[:67]
|
message = msg[:67]
|
||||||
# We all miss George Carlin
|
# We all miss George Carlin
|
||||||
return re.sub(
|
return re.sub(
|
||||||
"fuck|shit|cunt|piss|cock|bitch", "****",
|
"fuck|shit|cunt|piss|cock|bitch",
|
||||||
message, flags=re.IGNORECASE,
|
"****",
|
||||||
|
message,
|
||||||
|
flags=re.IGNORECASE,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
@ -214,10 +221,7 @@ class BulletinPacket(Packet):
|
|||||||
return f"BLN{self.bid} {self.message_text}"
|
return f"BLN{self.bid} {self.message_text}"
|
||||||
|
|
||||||
def _build_payload(self) -> None:
|
def _build_payload(self) -> None:
|
||||||
self.payload = (
|
self.payload = f":BLN{self.bid:<9}" f":{self.message_text}"
|
||||||
f":BLN{self.bid:<9}"
|
|
||||||
f":{self.message_text}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass_json
|
@dataclass_json
|
||||||
@ -335,10 +339,7 @@ class GPSPacket(Packet):
|
|||||||
self.payload = "".join(payload)
|
self.payload = "".join(payload)
|
||||||
|
|
||||||
def _build_raw(self):
|
def _build_raw(self):
|
||||||
self.raw = (
|
self.raw = f"{self.from_call}>{self.to_call},WIDE2-1:" f"{self.payload}"
|
||||||
f"{self.from_call}>{self.to_call},WIDE2-1:"
|
|
||||||
f"{self.payload}"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def human_info(self) -> str:
|
def human_info(self) -> str:
|
||||||
@ -370,10 +371,7 @@ class BeaconPacket(GPSPacket):
|
|||||||
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
||||||
lon = aprslib_util.longitude_to_ddm(self.longitude)
|
lon = aprslib_util.longitude_to_ddm(self.longitude)
|
||||||
|
|
||||||
self.payload = (
|
self.payload = f"@{time_zulu}z{lat}{self.symbol_table}" f"{lon}"
|
||||||
f"@{time_zulu}z{lat}{self.symbol_table}"
|
|
||||||
f"{lon}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.comment:
|
if self.comment:
|
||||||
comment = self._filter_for_send(self.comment)
|
comment = self._filter_for_send(self.comment)
|
||||||
@ -382,10 +380,7 @@ class BeaconPacket(GPSPacket):
|
|||||||
self.payload = f"{self.payload}{self.symbol}APRSD Beacon"
|
self.payload = f"{self.payload}{self.symbol}APRSD Beacon"
|
||||||
|
|
||||||
def _build_raw(self):
|
def _build_raw(self):
|
||||||
self.raw = (
|
self.raw = f"{self.from_call}>APZ100:" f"{self.payload}"
|
||||||
f"{self.from_call}>APZ100:"
|
|
||||||
f"{self.payload}"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def key(self) -> str:
|
def key(self) -> str:
|
||||||
@ -474,10 +469,7 @@ class ObjectPacket(GPSPacket):
|
|||||||
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
||||||
long = aprslib_util.longitude_to_ddm(self.longitude)
|
long = aprslib_util.longitude_to_ddm(self.longitude)
|
||||||
|
|
||||||
self.payload = (
|
self.payload = f"*{time_zulu}z{lat}{self.symbol_table}" f"{long}{self.symbol}"
|
||||||
f"*{time_zulu}z{lat}{self.symbol_table}"
|
|
||||||
f"{long}{self.symbol}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.comment:
|
if self.comment:
|
||||||
comment = self._filter_for_send(self.comment)
|
comment = self._filter_for_send(self.comment)
|
||||||
@ -494,10 +486,7 @@ class ObjectPacket(GPSPacket):
|
|||||||
The frequency, uplink_tone, offset is part of the comment
|
The frequency, uplink_tone, offset is part of the comment
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.raw = (
|
self.raw = f"{self.from_call}>APZ100:;{self.to_call:9s}" f"{self.payload}"
|
||||||
f"{self.from_call}>APZ100:;{self.to_call:9s}"
|
|
||||||
f"{self.payload}"
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def human_info(self) -> str:
|
def human_info(self) -> str:
|
||||||
@ -547,11 +536,13 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
|||||||
if "speed" in raw:
|
if "speed" in raw:
|
||||||
del raw["speed"]
|
del raw["speed"]
|
||||||
# Let's adjust the rain numbers as well, since it's wrong
|
# Let's adjust the rain numbers as well, since it's wrong
|
||||||
raw["rain_1h"] = round((raw.get("rain_1h", 0) / .254) * .01, 3)
|
raw["rain_1h"] = round((raw.get("rain_1h", 0) / 0.254) * 0.01, 3)
|
||||||
raw["weather"]["rain_1h"] = raw["rain_1h"]
|
raw["weather"]["rain_1h"] = raw["rain_1h"]
|
||||||
raw["rain_24h"] = round((raw.get("rain_24h", 0) / .254) * .01, 3)
|
raw["rain_24h"] = round((raw.get("rain_24h", 0) / 0.254) * 0.01, 3)
|
||||||
raw["weather"]["rain_24h"] = raw["rain_24h"]
|
raw["weather"]["rain_24h"] = raw["rain_24h"]
|
||||||
raw["rain_since_midnight"] = round((raw.get("rain_since_midnight", 0) / .254) * .01, 3)
|
raw["rain_since_midnight"] = round(
|
||||||
|
(raw.get("rain_since_midnight", 0) / 0.254) * 0.01, 3
|
||||||
|
)
|
||||||
raw["weather"]["rain_since_midnight"] = raw["rain_since_midnight"]
|
raw["weather"]["rain_since_midnight"] = raw["rain_since_midnight"]
|
||||||
|
|
||||||
if "wind_direction" not in raw:
|
if "wind_direction" not in raw:
|
||||||
@ -593,26 +584,26 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
|||||||
def _build_payload(self):
|
def _build_payload(self):
|
||||||
"""Build an uncompressed weather packet
|
"""Build an uncompressed weather packet
|
||||||
|
|
||||||
Format =
|
Format =
|
||||||
|
|
||||||
_CSE/SPDgXXXtXXXrXXXpXXXPXXXhXXbXXXXX%type NEW FORMAT APRS793 June 97
|
_CSE/SPDgXXXtXXXrXXXpXXXPXXXhXXbXXXXX%type NEW FORMAT APRS793 June 97
|
||||||
NOT BACKWARD COMPATIBLE
|
NOT BACKWARD COMPATIBLE
|
||||||
|
|
||||||
|
|
||||||
Where: CSE/SPD is wind direction and sustained 1 minute speed
|
Where: CSE/SPD is wind direction and sustained 1 minute speed
|
||||||
t is in degrees F
|
t is in degrees F
|
||||||
|
|
||||||
r is Rain per last 60 minutes
|
r is Rain per last 60 minutes
|
||||||
1.04 inches of rain will show as r104
|
1.04 inches of rain will show as r104
|
||||||
p is precipitation per last 24 hours (sliding 24 hour window)
|
p is precipitation per last 24 hours (sliding 24 hour window)
|
||||||
P is precip per last 24 hours since midnight
|
P is precip per last 24 hours since midnight
|
||||||
b is Baro in tenths of a mb
|
b is Baro in tenths of a mb
|
||||||
h is humidity in percent. 00=100
|
h is humidity in percent. 00=100
|
||||||
g is Gust (peak winds in last 5 minutes)
|
g is Gust (peak winds in last 5 minutes)
|
||||||
# is the raw rain counter for remote WX stations
|
# is the raw rain counter for remote WX stations
|
||||||
See notes on remotes below
|
See notes on remotes below
|
||||||
% shows software type d=Dos, m=Mac, w=Win, etc
|
% shows software type d=Dos, m=Mac, w=Win, etc
|
||||||
type shows type of WX instrument
|
type shows type of WX instrument
|
||||||
|
|
||||||
"""
|
"""
|
||||||
time_zulu = self._build_time_zulu()
|
time_zulu = self._build_time_zulu()
|
||||||
@ -622,7 +613,8 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
|||||||
f"{self.longitude}{self.symbol}",
|
f"{self.longitude}{self.symbol}",
|
||||||
f"{self.wind_direction:03d}",
|
f"{self.wind_direction:03d}",
|
||||||
# Speed = sustained 1 minute wind speed in mph
|
# Speed = sustained 1 minute wind speed in mph
|
||||||
f"{self.symbol_table}", f"{self.wind_speed:03.0f}",
|
f"{self.symbol_table}",
|
||||||
|
f"{self.wind_speed:03.0f}",
|
||||||
# wind gust (peak wind speed in mph in the last 5 minutes)
|
# wind gust (peak wind speed in mph in the last 5 minutes)
|
||||||
f"g{self.wind_gust:03.0f}",
|
f"g{self.wind_gust:03.0f}",
|
||||||
# Temperature in degrees F
|
# Temperature in degrees F
|
||||||
@ -644,11 +636,7 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
|||||||
self.payload = "".join(contents)
|
self.payload = "".join(contents)
|
||||||
|
|
||||||
def _build_raw(self):
|
def _build_raw(self):
|
||||||
|
self.raw = f"{self.from_call}>{self.to_call},WIDE1-1,WIDE2-1:" f"{self.payload}"
|
||||||
self.raw = (
|
|
||||||
f"{self.from_call}>{self.to_call},WIDE1-1,WIDE2-1:"
|
|
||||||
f"{self.payload}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(unsafe_hash=True)
|
@dataclass(unsafe_hash=True)
|
||||||
@ -692,14 +680,17 @@ class UnknownPacket:
|
|||||||
|
|
||||||
All of the unknown attributes are stored in the unknown_fields
|
All of the unknown attributes are stored in the unknown_fields
|
||||||
"""
|
"""
|
||||||
|
|
||||||
unknown_fields: CatchAll
|
unknown_fields: CatchAll
|
||||||
_type: str = "UnknownPacket"
|
_type: str = "UnknownPacket"
|
||||||
from_call: Optional[str] = field(default=None)
|
from_call: Optional[str] = field(default=None)
|
||||||
to_call: Optional[str] = field(default=None)
|
to_call: Optional[str] = field(default=None)
|
||||||
msgNo: str = field(default_factory=_init_msgNo) # noqa: N815
|
msgNo: str = field(default_factory=_init_msgNo) # noqa: N815
|
||||||
format: Optional[str] = field(default=None)
|
format: Optional[str] = field(default=None)
|
||||||
raw: Optional[str] = field(default=None)
|
raw: Optional[str] = field(default=None)
|
||||||
raw_dict: dict = field(repr=False, default_factory=lambda: {}, compare=False, hash=False)
|
raw_dict: dict = field(
|
||||||
|
repr=False, default_factory=lambda: {}, compare=False, hash=False
|
||||||
|
)
|
||||||
path: List[str] = field(default_factory=list, compare=False, hash=False)
|
path: List[str] = field(default_factory=list, compare=False, hash=False)
|
||||||
packet_type: Optional[str] = field(default=None)
|
packet_type: Optional[str] = field(default=None)
|
||||||
via: Optional[str] = field(default=None, compare=False, hash=False)
|
via: Optional[str] = field(default=None, compare=False, hash=False)
|
||||||
|
@ -8,7 +8,6 @@ from oslo_config import cfg
|
|||||||
from aprsd import utils
|
from aprsd import utils
|
||||||
from aprsd.packets.core import AckPacket, GPSPacket, RejectPacket
|
from aprsd.packets.core import AckPacket, GPSPacket, RejectPacket
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger()
|
LOG = logging.getLogger()
|
||||||
LOGU = logger
|
LOGU = logger
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
@ -22,7 +21,9 @@ DISTANCE_COLOR = "fg #FF5733"
|
|||||||
DEGREES_COLOR = "fg #FFA900"
|
DEGREES_COLOR = "fg #FFA900"
|
||||||
|
|
||||||
|
|
||||||
def log_multiline(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> None:
|
def log_multiline(
|
||||||
|
packet, tx: Optional[bool] = False, header: Optional[bool] = True
|
||||||
|
) -> None:
|
||||||
"""LOG a packet to the logfile."""
|
"""LOG a packet to the logfile."""
|
||||||
if not CONF.enable_packet_logging:
|
if not CONF.enable_packet_logging:
|
||||||
return
|
return
|
||||||
@ -121,8 +122,7 @@ def log(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> No
|
|||||||
via_color = "green"
|
via_color = "green"
|
||||||
arrow = f"<{via_color}>-></{via_color}>"
|
arrow = f"<{via_color}>-></{via_color}>"
|
||||||
logit.append(
|
logit.append(
|
||||||
f"<cyan>{name}</cyan>"
|
f"<cyan>{name}</cyan>" f":{packet.msgNo}",
|
||||||
f":{packet.msgNo}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
tmp = None
|
tmp = None
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
from collections import OrderedDict
|
|
||||||
import logging
|
import logging
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
from aprsd.utils import objectstore
|
from aprsd.utils import objectstore
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
class PacketList(objectstore.ObjectStoreMixin):
|
class PacketList(objectstore.ObjectStoreMixin):
|
||||||
"""Class to keep track of the packets we tx/rx."""
|
"""Class to keep track of the packets we tx/rx."""
|
||||||
|
|
||||||
_instance = None
|
_instance = None
|
||||||
_total_rx: int = 0
|
_total_rx: int = 0
|
||||||
_total_tx: int = 0
|
_total_tx: int = 0
|
||||||
@ -38,7 +38,8 @@ class PacketList(objectstore.ObjectStoreMixin):
|
|||||||
self._add(packet)
|
self._add(packet)
|
||||||
ptype = packet.__class__.__name__
|
ptype = packet.__class__.__name__
|
||||||
type_stats = self.data["types"].setdefault(
|
type_stats = self.data["types"].setdefault(
|
||||||
ptype, {"tx": 0, "rx": 0},
|
ptype,
|
||||||
|
{"tx": 0, "rx": 0},
|
||||||
)
|
)
|
||||||
type_stats["rx"] += 1
|
type_stats["rx"] += 1
|
||||||
|
|
||||||
@ -49,7 +50,8 @@ class PacketList(objectstore.ObjectStoreMixin):
|
|||||||
self._add(packet)
|
self._add(packet)
|
||||||
ptype = packet.__class__.__name__
|
ptype = packet.__class__.__name__
|
||||||
type_stats = self.data["types"].setdefault(
|
type_stats = self.data["types"].setdefault(
|
||||||
ptype, {"tx": 0, "rx": 0},
|
ptype,
|
||||||
|
{"tx": 0, "rx": 0},
|
||||||
)
|
)
|
||||||
type_stats["tx"] += 1
|
type_stats["tx"] += 1
|
||||||
|
|
||||||
@ -86,10 +88,11 @@ class PacketList(objectstore.ObjectStoreMixin):
|
|||||||
with self.lock:
|
with self.lock:
|
||||||
# Get last N packets directly using list slicing
|
# Get last N packets directly using list slicing
|
||||||
packets_list = list(self.data.get("packets", {}).values())
|
packets_list = list(self.data.get("packets", {}).values())
|
||||||
pkts = packets_list[-CONF.packet_list_stats_maxlen:][::-1]
|
pkts = packets_list[-CONF.packet_list_stats_maxlen :][::-1]
|
||||||
|
|
||||||
stats = {
|
stats = {
|
||||||
"total_tracked": self._total_rx + self._total_tx, # Fixed typo: was rx + rx
|
"total_tracked": self._total_rx
|
||||||
|
+ self._total_tx, # Fixed typo: was rx + rx
|
||||||
"rx": self._total_rx,
|
"rx": self._total_rx,
|
||||||
"tx": self._total_tx,
|
"tx": self._total_tx,
|
||||||
"types": self.data.get("types", {}), # Changed default from [] to {}
|
"types": self.data.get("types", {}), # Changed default from [] to {}
|
||||||
|
@ -8,14 +8,13 @@ import re
|
|||||||
import textwrap
|
import textwrap
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
import pluggy
|
import pluggy
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
import aprsd
|
import aprsd
|
||||||
from aprsd import client, packets, threads
|
from aprsd import client, packets, threads
|
||||||
from aprsd.packets import watch_list
|
from aprsd.packets import watch_list
|
||||||
|
|
||||||
|
|
||||||
# setup the global logger
|
# setup the global logger
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
@ -166,7 +165,8 @@ class APRSDWatchListPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Plugin {} failed to process packet {}".format(
|
"Plugin {} failed to process packet {}".format(
|
||||||
self.__class__, ex,
|
self.__class__,
|
||||||
|
ex,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
if result:
|
if result:
|
||||||
@ -214,7 +214,9 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
if not isinstance(packet, packets.MessagePacket):
|
if not isinstance(packet, packets.MessagePacket):
|
||||||
LOG.warning(f"{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring")
|
LOG.warning(
|
||||||
|
f"{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring"
|
||||||
|
)
|
||||||
return packets.NULL_MESSAGE
|
return packets.NULL_MESSAGE
|
||||||
|
|
||||||
result = None
|
result = None
|
||||||
@ -236,7 +238,8 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
|||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.error(
|
LOG.error(
|
||||||
"Plugin {} failed to process packet {}".format(
|
"Plugin {} failed to process packet {}".format(
|
||||||
self.__class__, ex,
|
self.__class__,
|
||||||
|
ex,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
LOG.exception(ex)
|
LOG.exception(ex)
|
||||||
@ -286,7 +289,8 @@ class HelpPlugin(APRSDRegexCommandPluginBase):
|
|||||||
reply = None
|
reply = None
|
||||||
for p in pm.get_plugins():
|
for p in pm.get_plugins():
|
||||||
if (
|
if (
|
||||||
p.enabled and isinstance(p, APRSDRegexCommandPluginBase)
|
p.enabled
|
||||||
|
and isinstance(p, APRSDRegexCommandPluginBase)
|
||||||
and p.command_name.lower() == command_name
|
and p.command_name.lower() == command_name
|
||||||
):
|
):
|
||||||
reply = p.help()
|
reply = p.help()
|
||||||
@ -345,6 +349,7 @@ class PluginManager:
|
|||||||
|
|
||||||
def stats(self, serializable=False) -> dict:
|
def stats(self, serializable=False) -> dict:
|
||||||
"""Collect and return stats for all plugins."""
|
"""Collect and return stats for all plugins."""
|
||||||
|
|
||||||
def full_name_with_qualname(obj):
|
def full_name_with_qualname(obj):
|
||||||
return "{}.{}".format(
|
return "{}.{}".format(
|
||||||
obj.__class__.__module__,
|
obj.__class__.__module__,
|
||||||
@ -354,7 +359,6 @@ class PluginManager:
|
|||||||
plugin_stats = {}
|
plugin_stats = {}
|
||||||
plugins = self.get_plugins()
|
plugins = self.get_plugins()
|
||||||
if plugins:
|
if plugins:
|
||||||
|
|
||||||
for p in plugins:
|
for p in plugins:
|
||||||
plugin_stats[full_name_with_qualname(p)] = {
|
plugin_stats[full_name_with_qualname(p)] = {
|
||||||
"enabled": p.enabled,
|
"enabled": p.enabled,
|
||||||
@ -439,7 +443,9 @@ class PluginManager:
|
|||||||
)
|
)
|
||||||
self._watchlist_pm.register(plugin_obj)
|
self._watchlist_pm.register(plugin_obj)
|
||||||
else:
|
else:
|
||||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
LOG.warning(
|
||||||
|
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||||
|
)
|
||||||
elif isinstance(plugin_obj, APRSDRegexCommandPluginBase):
|
elif isinstance(plugin_obj, APRSDRegexCommandPluginBase):
|
||||||
if plugin_obj.enabled:
|
if plugin_obj.enabled:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
@ -451,7 +457,9 @@ class PluginManager:
|
|||||||
)
|
)
|
||||||
self._pluggy_pm.register(plugin_obj)
|
self._pluggy_pm.register(plugin_obj)
|
||||||
else:
|
else:
|
||||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
LOG.warning(
|
||||||
|
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||||
|
)
|
||||||
elif isinstance(plugin_obj, APRSDPluginBase):
|
elif isinstance(plugin_obj, APRSDPluginBase):
|
||||||
if plugin_obj.enabled:
|
if plugin_obj.enabled:
|
||||||
LOG.info(
|
LOG.info(
|
||||||
@ -462,7 +470,9 @@ class PluginManager:
|
|||||||
)
|
)
|
||||||
self._pluggy_pm.register(plugin_obj)
|
self._pluggy_pm.register(plugin_obj)
|
||||||
else:
|
else:
|
||||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
LOG.warning(
|
||||||
|
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||||
|
)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
LOG.error(f"Couldn't load plugin '{plugin_name}'")
|
LOG.error(f"Couldn't load plugin '{plugin_name}'")
|
||||||
LOG.exception(ex)
|
LOG.exception(ex)
|
||||||
@ -473,7 +483,8 @@ class PluginManager:
|
|||||||
self.setup_plugins(load_help_plugin=CONF.load_help_plugin)
|
self.setup_plugins(load_help_plugin=CONF.load_help_plugin)
|
||||||
|
|
||||||
def setup_plugins(
|
def setup_plugins(
|
||||||
self, load_help_plugin=True,
|
self,
|
||||||
|
load_help_plugin=True,
|
||||||
plugin_list=[],
|
plugin_list=[],
|
||||||
):
|
):
|
||||||
"""Create the plugin manager and register plugins."""
|
"""Create the plugin manager and register plugins."""
|
||||||
|
@ -4,7 +4,6 @@ from oslo_config import cfg
|
|||||||
|
|
||||||
from aprsd import packets, plugin
|
from aprsd import packets, plugin
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
@ -43,9 +42,7 @@ class NotifySeenPlugin(plugin.APRSDWatchListPluginBase):
|
|||||||
pkt = packets.MessagePacket(
|
pkt = packets.MessagePacket(
|
||||||
from_call=CONF.callsign,
|
from_call=CONF.callsign,
|
||||||
to_call=notify_callsign,
|
to_call=notify_callsign,
|
||||||
message_text=(
|
message_text=(f"{fromcall} was just seen by type:'{packet_type}'"),
|
||||||
f"{fromcall} was just seen by type:'{packet_type}'"
|
|
||||||
),
|
|
||||||
allow_delay=False,
|
allow_delay=False,
|
||||||
)
|
)
|
||||||
pkt.allow_delay = False
|
pkt.allow_delay = False
|
||||||
|
@ -2,13 +2,12 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
import requests
|
import requests
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd import plugin, plugin_utils
|
from aprsd import plugin, plugin_utils
|
||||||
from aprsd.utils import trace
|
from aprsd.utils import trace
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
@ -205,8 +204,9 @@ class OWMWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
|
|||||||
|
|
||||||
def help(self):
|
def help(self):
|
||||||
_help = [
|
_help = [
|
||||||
"openweathermap: Send {} to get weather "
|
"openweathermap: Send {} to get weather " "from your location".format(
|
||||||
"from your location".format(self.command_regex),
|
self.command_regex
|
||||||
|
),
|
||||||
"openweathermap: Send {} <callsign> to get "
|
"openweathermap: Send {} <callsign> to get "
|
||||||
"weather from <callsign>".format(self.command_regex),
|
"weather from <callsign>".format(self.command_regex),
|
||||||
]
|
]
|
||||||
@ -327,10 +327,12 @@ class AVWXWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
|
|||||||
|
|
||||||
def help(self):
|
def help(self):
|
||||||
_help = [
|
_help = [
|
||||||
"avwxweather: Send {} to get weather "
|
"avwxweather: Send {} to get weather " "from your location".format(
|
||||||
"from your location".format(self.command_regex),
|
self.command_regex
|
||||||
"avwxweather: Send {} <callsign> to get "
|
),
|
||||||
"weather from <callsign>".format(self.command_regex),
|
"avwxweather: Send {} <callsign> to get " "weather from <callsign>".format(
|
||||||
|
self.command_regex
|
||||||
|
),
|
||||||
]
|
]
|
||||||
return _help
|
return _help
|
||||||
|
|
||||||
|
@ -3,13 +3,13 @@ from typing import Callable, Protocol, runtime_checkable
|
|||||||
|
|
||||||
from aprsd.utils import singleton
|
from aprsd.utils import singleton
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
@runtime_checkable
|
@runtime_checkable
|
||||||
class StatsProducer(Protocol):
|
class StatsProducer(Protocol):
|
||||||
"""The StatsProducer protocol is used to define the interface for collecting stats."""
|
"""The StatsProducer protocol is used to define the interface for collecting stats."""
|
||||||
|
|
||||||
def stats(self, serializable=False) -> dict:
|
def stats(self, serializable=False) -> dict:
|
||||||
"""provide stats in a dictionary format."""
|
"""provide stats in a dictionary format."""
|
||||||
...
|
...
|
||||||
@ -18,6 +18,7 @@ class StatsProducer(Protocol):
|
|||||||
@singleton
|
@singleton
|
||||||
class Collector:
|
class Collector:
|
||||||
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.producers: list[Callable] = []
|
self.producers: list[Callable] = []
|
||||||
|
|
||||||
@ -26,7 +27,9 @@ class Collector:
|
|||||||
for name in self.producers:
|
for name in self.producers:
|
||||||
cls = name()
|
cls = name()
|
||||||
try:
|
try:
|
||||||
stats[cls.__class__.__name__] = cls.stats(serializable=serializable).copy()
|
stats[cls.__class__.__name__] = cls.stats(
|
||||||
|
serializable=serializable
|
||||||
|
).copy()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
LOG.error(f"Error in producer {name} (stats): {e}")
|
LOG.error(f"Error in producer {name} (stats): {e}")
|
||||||
return stats
|
return stats
|
||||||
|
@ -4,8 +4,9 @@ import queue
|
|||||||
# aprsd.threads
|
# aprsd.threads
|
||||||
from .aprsd import APRSDThread, APRSDThreadList # noqa: F401
|
from .aprsd import APRSDThread, APRSDThreadList # noqa: F401
|
||||||
from .rx import ( # noqa: F401
|
from .rx import ( # noqa: F401
|
||||||
APRSDDupeRXThread, APRSDProcessPacketThread, APRSDRXThread,
|
APRSDDupeRXThread,
|
||||||
|
APRSDProcessPacketThread,
|
||||||
|
APRSDRXThread,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
packet_queue = queue.Queue(maxsize=20)
|
packet_queue = queue.Queue(maxsize=20)
|
||||||
|
@ -7,7 +7,6 @@ from typing import List
|
|||||||
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
@ -25,7 +24,7 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
|
|||||||
self._last_loop = datetime.datetime.now()
|
self._last_loop = datetime.datetime.now()
|
||||||
|
|
||||||
def _should_quit(self):
|
def _should_quit(self):
|
||||||
""" see if we have a quit message from the global queue."""
|
"""see if we have a quit message from the global queue."""
|
||||||
if self.thread_stop:
|
if self.thread_stop:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -51,7 +50,9 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
|
|||||||
"""Add code to subclass to do any cleanup"""
|
"""Add code to subclass to do any cleanup"""
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
out = f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
|
out = (
|
||||||
|
f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
|
||||||
|
)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def loop_age(self):
|
def loop_age(self):
|
||||||
@ -124,7 +125,7 @@ class APRSDThreadList:
|
|||||||
for th in self.threads_list:
|
for th in self.threads_list:
|
||||||
LOG.info(f"Stopping Thread {th.name}")
|
LOG.info(f"Stopping Thread {th.name}")
|
||||||
if hasattr(th, "packet"):
|
if hasattr(th, "packet"):
|
||||||
LOG.info(F"{th.name} packet {th.packet}")
|
LOG.info(f"{th.name} packet {th.packet}")
|
||||||
th.stop()
|
th.stop()
|
||||||
|
|
||||||
@wrapt.synchronized
|
@wrapt.synchronized
|
||||||
@ -133,7 +134,7 @@ class APRSDThreadList:
|
|||||||
for th in self.threads_list:
|
for th in self.threads_list:
|
||||||
LOG.info(f"Pausing Thread {th.name}")
|
LOG.info(f"Pausing Thread {th.name}")
|
||||||
if hasattr(th, "packet"):
|
if hasattr(th, "packet"):
|
||||||
LOG.info(F"{th.name} packet {th.packet}")
|
LOG.info(f"{th.name} packet {th.packet}")
|
||||||
th.pause()
|
th.pause()
|
||||||
|
|
||||||
@wrapt.synchronized
|
@wrapt.synchronized
|
||||||
@ -142,7 +143,7 @@ class APRSDThreadList:
|
|||||||
for th in self.threads_list:
|
for th in self.threads_list:
|
||||||
LOG.info(f"Resuming Thread {th.name}")
|
LOG.info(f"Resuming Thread {th.name}")
|
||||||
if hasattr(th, "packet"):
|
if hasattr(th, "packet"):
|
||||||
LOG.info(F"{th.name} packet {th.packet}")
|
LOG.info(f"{th.name} packet {th.packet}")
|
||||||
th.unpause()
|
th.unpause()
|
||||||
|
|
||||||
@wrapt.synchronized(lock)
|
@wrapt.synchronized(lock)
|
||||||
@ -153,7 +154,11 @@ class APRSDThreadList:
|
|||||||
alive = thread.is_alive()
|
alive = thread.is_alive()
|
||||||
age = thread.loop_age()
|
age = thread.loop_age()
|
||||||
key = thread.__class__.__name__
|
key = thread.__class__.__name__
|
||||||
info[key] = {"alive": True if alive else False, "age": age, "name": thread.name}
|
info[key] = {
|
||||||
|
"alive": True if alive else False,
|
||||||
|
"age": age,
|
||||||
|
"name": thread.name,
|
||||||
|
}
|
||||||
return info
|
return info
|
||||||
|
|
||||||
@wrapt.synchronized(lock)
|
@wrapt.synchronized(lock)
|
||||||
|
@ -9,8 +9,8 @@ from oslo_config import cfg
|
|||||||
from aprsd import packets, utils
|
from aprsd import packets, utils
|
||||||
from aprsd.log import log as aprsd_log
|
from aprsd.log import log as aprsd_log
|
||||||
from aprsd.stats import collector
|
from aprsd.stats import collector
|
||||||
from aprsd.threads import APRSDThread, APRSDThreadList, keepalive_collector
|
from aprsd.threads import APRSDThread, APRSDThreadList
|
||||||
|
from aprsd.utils import keepalive_collector
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
@ -34,9 +34,14 @@ class KeepAliveThread(APRSDThread):
|
|||||||
thread_list = APRSDThreadList()
|
thread_list = APRSDThreadList()
|
||||||
now = datetime.datetime.now()
|
now = datetime.datetime.now()
|
||||||
|
|
||||||
if "APRSClientStats" in stats_json and stats_json["APRSClientStats"].get("transport") == "aprsis":
|
if (
|
||||||
|
"APRSClientStats" in stats_json
|
||||||
|
and stats_json["APRSClientStats"].get("transport") == "aprsis"
|
||||||
|
):
|
||||||
if stats_json["APRSClientStats"].get("server_keepalive"):
|
if stats_json["APRSClientStats"].get("server_keepalive"):
|
||||||
last_msg_time = utils.strfdelta(now - stats_json["APRSClientStats"]["server_keepalive"])
|
last_msg_time = utils.strfdelta(
|
||||||
|
now - stats_json["APRSClientStats"]["server_keepalive"]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
last_msg_time = "N/A"
|
last_msg_time = "N/A"
|
||||||
else:
|
else:
|
||||||
|
@ -1,19 +1,19 @@
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
import requests
|
import requests
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
import aprsd
|
import aprsd
|
||||||
from aprsd import threads as aprsd_threads
|
from aprsd import threads as aprsd_threads
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
class APRSRegistryThread(aprsd_threads.APRSDThread):
|
class APRSRegistryThread(aprsd_threads.APRSDThread):
|
||||||
"""This sends service information to the configured APRS Registry."""
|
"""This sends service information to the configured APRS Registry."""
|
||||||
|
|
||||||
_loop_cnt: int = 1
|
_loop_cnt: int = 1
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -41,7 +41,7 @@ class APRSRegistryThread(aprsd_threads.APRSDThread):
|
|||||||
"description": CONF.aprs_registry.description,
|
"description": CONF.aprs_registry.description,
|
||||||
"service_website": CONF.aprs_registry.service_website,
|
"service_website": CONF.aprs_registry.service_website,
|
||||||
"software": f"APRSD version {aprsd.__version__} "
|
"software": f"APRSD version {aprsd.__version__} "
|
||||||
"https://github.com/craigerl/aprsd",
|
"https://github.com/craigerl/aprsd",
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
requests.post(
|
requests.post(
|
||||||
|
@ -13,7 +13,6 @@ from aprsd.packets import log as packet_log
|
|||||||
from aprsd.threads import APRSDThread, tx
|
from aprsd.threads import APRSDThread, tx
|
||||||
from aprsd.utils import trace
|
from aprsd.utils import trace
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
@ -53,7 +52,9 @@ class APRSDRXThread(APRSDThread):
|
|||||||
# kwargs. :(
|
# kwargs. :(
|
||||||
# https://github.com/rossengeorgiev/aprs-python/pull/56
|
# https://github.com/rossengeorgiev/aprs-python/pull/56
|
||||||
self._client.consumer(
|
self._client.consumer(
|
||||||
self._process_packet, raw=False, blocking=False,
|
self._process_packet,
|
||||||
|
raw=False,
|
||||||
|
blocking=False,
|
||||||
)
|
)
|
||||||
except (
|
except (
|
||||||
aprslib.exceptions.ConnectionDrop,
|
aprslib.exceptions.ConnectionDrop,
|
||||||
@ -138,7 +139,9 @@ class APRSDDupeRXThread(APRSDRXThread):
|
|||||||
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
|
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
|
||||||
# If the packet came in within N seconds of the
|
# If the packet came in within N seconds of the
|
||||||
# Last time seeing the packet, then we drop it as a dupe.
|
# Last time seeing the packet, then we drop it as a dupe.
|
||||||
LOG.warning(f"Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.")
|
LOG.warning(
|
||||||
|
f"Packet {packet.from_call}:{packet.msgNo} already tracked, dropping."
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
LOG.warning(
|
LOG.warning(
|
||||||
f"Packet {packet.from_call}:{packet.msgNo} already tracked "
|
f"Packet {packet.from_call}:{packet.msgNo} already tracked "
|
||||||
@ -149,7 +152,7 @@ class APRSDDupeRXThread(APRSDRXThread):
|
|||||||
|
|
||||||
|
|
||||||
class APRSDPluginRXThread(APRSDDupeRXThread):
|
class APRSDPluginRXThread(APRSDDupeRXThread):
|
||||||
""""Process received packets.
|
""" "Process received packets.
|
||||||
|
|
||||||
For backwards compatibility, we keep the APRSDPluginRXThread.
|
For backwards compatibility, we keep the APRSDPluginRXThread.
|
||||||
"""
|
"""
|
||||||
@ -249,7 +252,8 @@ class APRSDProcessPacketThread(APRSDThread):
|
|||||||
self.process_other_packet(packet, for_us=False)
|
self.process_other_packet(packet, for_us=False)
|
||||||
else:
|
else:
|
||||||
self.process_other_packet(
|
self.process_other_packet(
|
||||||
packet, for_us=(to_call.lower() == our_call),
|
packet,
|
||||||
|
for_us=(to_call.lower() == our_call),
|
||||||
)
|
)
|
||||||
LOG.debug(f"Packet processing complete for pkt '{packet.key}'")
|
LOG.debug(f"Packet processing complete for pkt '{packet.key}'")
|
||||||
return False
|
return False
|
||||||
@ -349,7 +353,6 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
|
|||||||
# If the message was for us and we didn't have a
|
# If the message was for us and we didn't have a
|
||||||
# response, then we send a usage statement.
|
# response, then we send a usage statement.
|
||||||
if to_call == CONF.callsign and not replied:
|
if to_call == CONF.callsign and not replied:
|
||||||
|
|
||||||
# Tailor the messages accordingly
|
# Tailor the messages accordingly
|
||||||
if CONF.load_help_plugin:
|
if CONF.load_help_plugin:
|
||||||
LOG.warning("Sending help!")
|
LOG.warning("Sending help!")
|
||||||
|
@ -2,20 +2,20 @@ import logging
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd.stats import collector
|
from aprsd.stats import collector
|
||||||
from aprsd.threads import APRSDThread
|
from aprsd.threads import APRSDThread
|
||||||
from aprsd.utils import objectstore
|
from aprsd.utils import objectstore
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
class StatsStore(objectstore.ObjectStoreMixin):
|
class StatsStore(objectstore.ObjectStoreMixin):
|
||||||
"""Container to save the stats from the collector."""
|
"""Container to save the stats from the collector."""
|
||||||
|
|
||||||
lock = threading.Lock()
|
lock = threading.Lock()
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
|
@ -2,20 +2,18 @@ import logging
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import wrapt
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from rush import quota, throttle
|
from rush import quota, throttle
|
||||||
from rush.contrib import decorator
|
from rush.contrib import decorator
|
||||||
from rush.limiters import periodic
|
from rush.limiters import periodic
|
||||||
from rush.stores import dictionary
|
from rush.stores import dictionary
|
||||||
import wrapt
|
|
||||||
|
|
||||||
from aprsd import conf # noqa
|
from aprsd import conf # noqa
|
||||||
from aprsd import threads as aprsd_threads
|
from aprsd import threads as aprsd_threads
|
||||||
from aprsd.client import client_factory
|
from aprsd.client import client_factory
|
||||||
from aprsd.packets import collector, core
|
from aprsd.packets import collector, core, tracker
|
||||||
from aprsd.packets import log as packet_log
|
from aprsd.packets import log as packet_log
|
||||||
from aprsd.packets import tracker
|
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
@ -238,6 +236,7 @@ class BeaconSendThread(aprsd_threads.APRSDThread):
|
|||||||
|
|
||||||
Settings are in the [DEFAULT] section of the config file.
|
Settings are in the [DEFAULT] section of the config file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_loop_cnt: int = 1
|
_loop_cnt: int = 1
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -13,11 +13,11 @@ import update_checker
|
|||||||
import aprsd
|
import aprsd
|
||||||
|
|
||||||
from .fuzzyclock import fuzzy # noqa: F401
|
from .fuzzyclock import fuzzy # noqa: F401
|
||||||
|
|
||||||
# Make these available by anyone importing
|
# Make these available by anyone importing
|
||||||
# aprsd.utils
|
# aprsd.utils
|
||||||
from .ring_buffer import RingBuffer # noqa: F401
|
from .ring_buffer import RingBuffer # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info.major == 3 and sys.version_info.minor >= 3:
|
if sys.version_info.major == 3 and sys.version_info.minor >= 3:
|
||||||
from collections.abc import MutableMapping
|
from collections.abc import MutableMapping
|
||||||
else:
|
else:
|
||||||
@ -26,11 +26,13 @@ else:
|
|||||||
|
|
||||||
def singleton(cls):
|
def singleton(cls):
|
||||||
"""Make a class a Singleton class (only one instance)"""
|
"""Make a class a Singleton class (only one instance)"""
|
||||||
|
|
||||||
@functools.wraps(cls)
|
@functools.wraps(cls)
|
||||||
def wrapper_singleton(*args, **kwargs):
|
def wrapper_singleton(*args, **kwargs):
|
||||||
if wrapper_singleton.instance is None:
|
if wrapper_singleton.instance is None:
|
||||||
wrapper_singleton.instance = cls(*args, **kwargs)
|
wrapper_singleton.instance = cls(*args, **kwargs)
|
||||||
return wrapper_singleton.instance
|
return wrapper_singleton.instance
|
||||||
|
|
||||||
wrapper_singleton.instance = None
|
wrapper_singleton.instance = None
|
||||||
return wrapper_singleton
|
return wrapper_singleton
|
||||||
|
|
||||||
@ -170,7 +172,10 @@ def load_entry_points(group):
|
|||||||
try:
|
try:
|
||||||
ep.load()
|
ep.load()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Extension {ep.name} of group {group} failed to load with {e}", file=sys.stderr)
|
print(
|
||||||
|
f"Extension {ep.name} of group {group} failed to load with {e}",
|
||||||
|
file=sys.stderr,
|
||||||
|
)
|
||||||
print(traceback.format_exc(), file=sys.stderr)
|
print(traceback.format_exc(), file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
@ -200,8 +205,7 @@ def calculate_initial_compass_bearing(point_a, point_b):
|
|||||||
|
|
||||||
x = math.sin(diff_long) * math.cos(lat2)
|
x = math.sin(diff_long) * math.cos(lat2)
|
||||||
y = math.cos(lat1) * math.sin(lat2) - (
|
y = math.cos(lat1) * math.sin(lat2) - (
|
||||||
math.sin(lat1)
|
math.sin(lat1) * math.cos(lat2) * math.cos(diff_long)
|
||||||
* math.cos(lat2) * math.cos(diff_long)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
initial_bearing = math.atan2(x, y)
|
initial_bearing = math.atan2(x, y)
|
||||||
@ -218,15 +222,43 @@ def calculate_initial_compass_bearing(point_a, point_b):
|
|||||||
def degrees_to_cardinal(bearing, full_string=False):
|
def degrees_to_cardinal(bearing, full_string=False):
|
||||||
if full_string:
|
if full_string:
|
||||||
directions = [
|
directions = [
|
||||||
"North", "North-Northeast", "Northeast", "East-Northeast", "East", "East-Southeast",
|
"North",
|
||||||
"Southeast", "South-Southeast", "South", "South-Southwest", "Southwest", "West-Southwest",
|
"North-Northeast",
|
||||||
"West", "West-Northwest", "Northwest", "North-Northwest", "North",
|
"Northeast",
|
||||||
|
"East-Northeast",
|
||||||
|
"East",
|
||||||
|
"East-Southeast",
|
||||||
|
"Southeast",
|
||||||
|
"South-Southeast",
|
||||||
|
"South",
|
||||||
|
"South-Southwest",
|
||||||
|
"Southwest",
|
||||||
|
"West-Southwest",
|
||||||
|
"West",
|
||||||
|
"West-Northwest",
|
||||||
|
"Northwest",
|
||||||
|
"North-Northwest",
|
||||||
|
"North",
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
directions = [
|
directions = [
|
||||||
"N", "NNE", "NE", "ENE", "E", "ESE",
|
"N",
|
||||||
"SE", "SSE", "S", "SSW", "SW", "WSW",
|
"NNE",
|
||||||
"W", "WNW", "NW", "NNW", "N",
|
"NE",
|
||||||
|
"ENE",
|
||||||
|
"E",
|
||||||
|
"ESE",
|
||||||
|
"SE",
|
||||||
|
"SSE",
|
||||||
|
"S",
|
||||||
|
"SSW",
|
||||||
|
"SW",
|
||||||
|
"WSW",
|
||||||
|
"W",
|
||||||
|
"WNW",
|
||||||
|
"NW",
|
||||||
|
"NNW",
|
||||||
|
"N",
|
||||||
]
|
]
|
||||||
|
|
||||||
cardinal = directions[round(bearing / 22.5)]
|
cardinal = directions[round(bearing / 22.5)]
|
||||||
|
@ -10,8 +10,13 @@ class EnhancedJSONEncoder(json.JSONEncoder):
|
|||||||
def default(self, obj):
|
def default(self, obj):
|
||||||
if isinstance(obj, datetime.datetime):
|
if isinstance(obj, datetime.datetime):
|
||||||
args = (
|
args = (
|
||||||
"year", "month", "day", "hour", "minute",
|
"year",
|
||||||
"second", "microsecond",
|
"month",
|
||||||
|
"day",
|
||||||
|
"hour",
|
||||||
|
"minute",
|
||||||
|
"second",
|
||||||
|
"microsecond",
|
||||||
)
|
)
|
||||||
return {
|
return {
|
||||||
"__type__": "datetime.datetime",
|
"__type__": "datetime.datetime",
|
||||||
@ -63,10 +68,10 @@ class SimpleJSONEncoder(json.JSONEncoder):
|
|||||||
|
|
||||||
|
|
||||||
class EnhancedJSONDecoder(json.JSONDecoder):
|
class EnhancedJSONDecoder(json.JSONDecoder):
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
*args, object_hook=self.object_hook,
|
*args,
|
||||||
|
object_hook=self.object_hook,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -3,13 +3,13 @@ from typing import Callable, Protocol, runtime_checkable
|
|||||||
|
|
||||||
from aprsd.utils import singleton
|
from aprsd.utils import singleton
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger("APRSD")
|
LOG = logging.getLogger("APRSD")
|
||||||
|
|
||||||
|
|
||||||
@runtime_checkable
|
@runtime_checkable
|
||||||
class KeepAliveProducer(Protocol):
|
class KeepAliveProducer(Protocol):
|
||||||
"""The KeepAliveProducer protocol is used to define the interface for running Keepalive checks."""
|
"""The KeepAliveProducer protocol is used to define the interface for running Keepalive checks."""
|
||||||
|
|
||||||
def keepalive_check(self) -> dict:
|
def keepalive_check(self) -> dict:
|
||||||
"""Check for keepalive."""
|
"""Check for keepalive."""
|
||||||
...
|
...
|
||||||
@ -22,6 +22,7 @@ class KeepAliveProducer(Protocol):
|
|||||||
@singleton
|
@singleton
|
||||||
class KeepAliveCollector:
|
class KeepAliveCollector:
|
||||||
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.producers: list[Callable] = []
|
self.producers: list[Callable] = []
|
||||||
|
|
@ -5,7 +5,6 @@ import logging
|
|||||||
import time
|
import time
|
||||||
import types
|
import types
|
||||||
|
|
||||||
|
|
||||||
VALID_TRACE_FLAGS = {"method", "api"}
|
VALID_TRACE_FLAGS = {"method", "api"}
|
||||||
TRACE_API = False
|
TRACE_API = False
|
||||||
TRACE_METHOD = False
|
TRACE_METHOD = False
|
||||||
@ -27,7 +26,6 @@ def trace(*dec_args, **dec_kwargs):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def _decorator(f):
|
def _decorator(f):
|
||||||
|
|
||||||
func_name = f.__qualname__
|
func_name = f.__qualname__
|
||||||
func_file = "/".join(f.__code__.co_filename.split("/")[-4:])
|
func_file = "/".join(f.__code__.co_filename.split("/")[-4:])
|
||||||
|
|
||||||
|
@ -18,7 +18,7 @@ description = "APRSd is a APRS-IS server that can be used to connect to APRS-IS
|
|||||||
# 'Programming Language' classifiers in this file, 'pip install' will check this
|
# 'Programming Language' classifiers in this file, 'pip install' will check this
|
||||||
# and refuse to install the project if the version does not match. See
|
# and refuse to install the project if the version does not match. See
|
||||||
# https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
|
# https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
|
||||||
requires-python = ">=3.8"
|
requires-python = ">=3.9"
|
||||||
|
|
||||||
dynamic = ["version", "dependencies", "optional-dependencies"]
|
dynamic = ["version", "dependencies", "optional-dependencies"]
|
||||||
|
|
||||||
|
@ -1,12 +1,4 @@
|
|||||||
build
|
build
|
||||||
check-manifest
|
|
||||||
flake8
|
|
||||||
gray
|
|
||||||
isort
|
|
||||||
mypy
|
|
||||||
pep8-naming
|
|
||||||
pytest
|
|
||||||
pytest-cov
|
|
||||||
pip
|
pip
|
||||||
pip-tools
|
pip-tools
|
||||||
pre-commit
|
pre-commit
|
||||||
|
@ -1,65 +1,40 @@
|
|||||||
#
|
#
|
||||||
# This file is autogenerated by pip-compile with Python 3.12
|
# This file is autogenerated by pip-compile with Python 3.10
|
||||||
# by the following command:
|
# by the following command:
|
||||||
#
|
#
|
||||||
# pip-compile --annotation-style=line requirements-dev.in
|
# pip-compile --annotation-style=line requirements-dev.in
|
||||||
#
|
#
|
||||||
add-trailing-comma==3.1.0 # via gray
|
|
||||||
alabaster==1.0.0 # via sphinx
|
alabaster==1.0.0 # via sphinx
|
||||||
autoflake==1.5.3 # via gray
|
|
||||||
babel==2.16.0 # via sphinx
|
babel==2.16.0 # via sphinx
|
||||||
black==24.10.0 # via gray
|
build==1.2.2.post1 # via -r requirements-dev.in, pip-tools
|
||||||
build==1.2.2.post1 # via -r requirements-dev.in, check-manifest, pip-tools
|
|
||||||
cachetools==5.5.0 # via tox
|
cachetools==5.5.0 # via tox
|
||||||
certifi==2024.8.30 # via requests
|
certifi==2024.12.14 # via requests
|
||||||
cfgv==3.4.0 # via pre-commit
|
cfgv==3.4.0 # via pre-commit
|
||||||
chardet==5.2.0 # via tox
|
chardet==5.2.0 # via tox
|
||||||
charset-normalizer==3.4.0 # via requests
|
charset-normalizer==3.4.0 # via requests
|
||||||
check-manifest==0.50 # via -r requirements-dev.in
|
click==8.1.7 # via pip-tools
|
||||||
click==8.1.7 # via black, fixit, moreorless, pip-tools
|
|
||||||
colorama==0.4.6 # via tox
|
colorama==0.4.6 # via tox
|
||||||
commonmark==0.9.1 # via rich
|
|
||||||
configargparse==1.7 # via gray
|
|
||||||
coverage[toml]==7.6.9 # via pytest-cov
|
|
||||||
distlib==0.3.9 # via virtualenv
|
distlib==0.3.9 # via virtualenv
|
||||||
docutils==0.21.2 # via m2r, sphinx
|
docutils==0.21.2 # via m2r, sphinx
|
||||||
filelock==3.16.1 # via tox, virtualenv
|
filelock==3.16.1 # via tox, virtualenv
|
||||||
fixit==2.1.0 # via gray
|
|
||||||
flake8==7.1.1 # via -r requirements-dev.in, pep8-naming
|
|
||||||
gray==0.15.0 # via -r requirements-dev.in
|
|
||||||
identify==2.6.3 # via pre-commit
|
identify==2.6.3 # via pre-commit
|
||||||
idna==3.10 # via requests
|
idna==3.10 # via requests
|
||||||
imagesize==1.4.1 # via sphinx
|
imagesize==1.4.1 # via sphinx
|
||||||
iniconfig==2.0.0 # via pytest
|
|
||||||
isort==5.13.2 # via -r requirements-dev.in, gray
|
|
||||||
jinja2==3.1.4 # via sphinx
|
jinja2==3.1.4 # via sphinx
|
||||||
libcst==1.5.1 # via fixit
|
|
||||||
m2r==0.3.1 # via -r requirements-dev.in
|
m2r==0.3.1 # via -r requirements-dev.in
|
||||||
markupsafe==3.0.2 # via jinja2
|
markupsafe==3.0.2 # via jinja2
|
||||||
mccabe==0.7.0 # via flake8
|
|
||||||
mistune==0.8.4 # via m2r
|
mistune==0.8.4 # via m2r
|
||||||
moreorless==0.4.0 # via fixit
|
|
||||||
mypy==1.13.0 # via -r requirements-dev.in
|
|
||||||
mypy-extensions==1.0.0 # via black, mypy
|
|
||||||
nodeenv==1.9.1 # via pre-commit
|
nodeenv==1.9.1 # via pre-commit
|
||||||
packaging==24.2 # via black, build, fixit, pyproject-api, pytest, sphinx, tox
|
packaging==24.2 # via build, pyproject-api, sphinx, tox
|
||||||
pathspec==0.12.1 # via black, trailrunner
|
|
||||||
pep8-naming==0.14.1 # via -r requirements-dev.in
|
|
||||||
pip-tools==7.4.1 # via -r requirements-dev.in
|
pip-tools==7.4.1 # via -r requirements-dev.in
|
||||||
platformdirs==4.3.6 # via black, tox, virtualenv
|
platformdirs==4.3.6 # via tox, virtualenv
|
||||||
pluggy==1.5.0 # via pytest, tox
|
pluggy==1.5.0 # via tox
|
||||||
pre-commit==4.0.1 # via -r requirements-dev.in
|
pre-commit==4.0.1 # via -r requirements-dev.in
|
||||||
pycodestyle==2.12.1 # via flake8
|
pygments==2.18.0 # via sphinx
|
||||||
pyflakes==3.2.0 # via autoflake, flake8
|
|
||||||
pygments==2.18.0 # via rich, sphinx
|
|
||||||
pyproject-api==1.8.0 # via tox
|
pyproject-api==1.8.0 # via tox
|
||||||
pyproject-hooks==1.2.0 # via build, pip-tools
|
pyproject-hooks==1.2.0 # via build, pip-tools
|
||||||
pytest==8.3.4 # via -r requirements-dev.in, pytest-cov
|
pyyaml==6.0.2 # via pre-commit
|
||||||
pytest-cov==6.0.0 # via -r requirements-dev.in
|
|
||||||
pyupgrade==3.19.0 # via gray
|
|
||||||
pyyaml==6.0.2 # via libcst, pre-commit
|
|
||||||
requests==2.32.3 # via sphinx
|
requests==2.32.3 # via sphinx
|
||||||
rich==12.6.0 # via gray
|
|
||||||
snowballstemmer==2.2.0 # via sphinx
|
snowballstemmer==2.2.0 # via sphinx
|
||||||
sphinx==8.1.3 # via -r requirements-dev.in
|
sphinx==8.1.3 # via -r requirements-dev.in
|
||||||
sphinxcontrib-applehelp==2.0.0 # via sphinx
|
sphinxcontrib-applehelp==2.0.0 # via sphinx
|
||||||
@ -68,13 +43,9 @@ sphinxcontrib-htmlhelp==2.1.0 # via sphinx
|
|||||||
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
||||||
sphinxcontrib-qthelp==2.0.0 # via sphinx
|
sphinxcontrib-qthelp==2.0.0 # via sphinx
|
||||||
sphinxcontrib-serializinghtml==2.0.0 # via sphinx
|
sphinxcontrib-serializinghtml==2.0.0 # via sphinx
|
||||||
tokenize-rt==6.1.0 # via add-trailing-comma, pyupgrade
|
tomli==2.2.1 # via build, pip-tools, pyproject-api, sphinx, tox
|
||||||
toml==0.10.2 # via autoflake
|
|
||||||
tox==4.23.2 # via -r requirements-dev.in
|
tox==4.23.2 # via -r requirements-dev.in
|
||||||
trailrunner==1.4.0 # via fixit
|
typing-extensions==4.12.2 # via tox
|
||||||
typing-extensions==4.12.2 # via mypy
|
|
||||||
unify==0.5 # via gray
|
|
||||||
untokenize==0.1.1 # via unify
|
|
||||||
urllib3==2.2.3 # via requests
|
urllib3==2.2.3 # via requests
|
||||||
virtualenv==20.28.0 # via pre-commit, tox
|
virtualenv==20.28.0 # via pre-commit, tox
|
||||||
wheel==0.45.1 # via -r requirements-dev.in, pip-tools
|
wheel==0.45.1 # via -r requirements-dev.in, pip-tools
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
#
|
#
|
||||||
# This file is autogenerated by pip-compile with Python 3.12
|
# This file is autogenerated by pip-compile with Python 3.10
|
||||||
# by the following command:
|
# by the following command:
|
||||||
#
|
#
|
||||||
# pip-compile --annotation-style=line requirements.in
|
# pip-compile --annotation-style=line requirements.in
|
||||||
#
|
#
|
||||||
aprslib==0.7.2 # via -r requirements.in
|
aprslib==0.7.2 # via -r requirements.in
|
||||||
attrs==24.2.0 # via ax253, kiss3, rush
|
attrs==24.3.0 # via ax253, kiss3, rush
|
||||||
ax253==0.1.5.post1 # via kiss3
|
ax253==0.1.5.post1 # via kiss3
|
||||||
beautifulsoup4==4.12.3 # via -r requirements.in
|
beautifulsoup4==4.12.3 # via -r requirements.in
|
||||||
bitarray==3.0.0 # via ax253, kiss3
|
bitarray==3.0.0 # via ax253, kiss3
|
||||||
certifi==2024.8.30 # via requests
|
certifi==2024.12.14 # via requests
|
||||||
charset-normalizer==3.4.0 # via requests
|
charset-normalizer==3.4.0 # via requests
|
||||||
click==8.1.7 # via -r requirements.in
|
click==8.1.7 # via -r requirements.in
|
||||||
commonmark==0.9.1 # via rich
|
commonmark==0.9.1 # via rich
|
||||||
@ -20,7 +20,7 @@ idna==3.10 # via requests
|
|||||||
importlib-metadata==8.5.0 # via ax253, kiss3
|
importlib-metadata==8.5.0 # via ax253, kiss3
|
||||||
kiss3==8.0.0 # via -r requirements.in
|
kiss3==8.0.0 # via -r requirements.in
|
||||||
loguru==0.7.3 # via -r requirements.in
|
loguru==0.7.3 # via -r requirements.in
|
||||||
marshmallow==3.23.1 # via dataclasses-json
|
marshmallow==3.23.2 # via dataclasses-json
|
||||||
mypy-extensions==1.0.0 # via typing-inspect
|
mypy-extensions==1.0.0 # via typing-inspect
|
||||||
netaddr==1.3.0 # via oslo-config
|
netaddr==1.3.0 # via oslo-config
|
||||||
oslo-config==9.7.0 # via -r requirements.in
|
oslo-config==9.7.0 # via -r requirements.in
|
||||||
|
@ -54,6 +54,7 @@ class TestAPRSISClient(unittest.TestCase):
|
|||||||
with mock.patch.object(APRSISClient, "is_configured", return_value=True):
|
with mock.patch.object(APRSISClient, "is_configured", return_value=True):
|
||||||
stats = self.client.stats()
|
stats = self.client.stats()
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
|
|
||||||
c = Console()
|
c = Console()
|
||||||
c.print(stats)
|
c.print(stats)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -109,7 +109,8 @@ class TestAPRSClient(unittest.TestCase):
|
|||||||
"""Test handling of client creation failure."""
|
"""Test handling of client creation failure."""
|
||||||
# Make setup_connection raise an exception
|
# Make setup_connection raise an exception
|
||||||
with mock.patch.object(
|
with mock.patch.object(
|
||||||
self.client, "setup_connection",
|
self.client,
|
||||||
|
"setup_connection",
|
||||||
side_effect=Exception("Connection failed"),
|
side_effect=Exception("Connection failed"),
|
||||||
):
|
):
|
||||||
with self.assertRaises(Exception):
|
with self.assertRaises(Exception):
|
||||||
|
@ -11,13 +11,11 @@ from aprsd.main import cli
|
|||||||
|
|
||||||
from .. import fake
|
from .. import fake
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
F = t.TypeVar("F", bound=t.Callable[..., t.Any])
|
||||||
|
|
||||||
|
|
||||||
class TestSendMessageCommand(unittest.TestCase):
|
class TestSendMessageCommand(unittest.TestCase):
|
||||||
|
|
||||||
def config_and_init(self, login=None, password=None):
|
def config_and_init(self, login=None, password=None):
|
||||||
CONF.callsign = fake.FAKE_TO_CALLSIGN
|
CONF.callsign = fake.FAKE_TO_CALLSIGN
|
||||||
CONF.trace_enabled = False
|
CONF.trace_enabled = False
|
||||||
@ -41,7 +39,8 @@ class TestSendMessageCommand(unittest.TestCase):
|
|||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
|
|
||||||
result = runner.invoke(
|
result = runner.invoke(
|
||||||
cli, ["send-message"],
|
cli,
|
||||||
|
["send-message"],
|
||||||
catch_exceptions=False,
|
catch_exceptions=False,
|
||||||
)
|
)
|
||||||
assert result.exit_code == 2
|
assert result.exit_code == 2
|
||||||
@ -58,7 +57,8 @@ class TestSendMessageCommand(unittest.TestCase):
|
|||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
|
|
||||||
result = runner.invoke(
|
result = runner.invoke(
|
||||||
cli, ["send-message", "WB4BOR"],
|
cli,
|
||||||
|
["send-message", "WB4BOR"],
|
||||||
catch_exceptions=False,
|
catch_exceptions=False,
|
||||||
)
|
)
|
||||||
assert result.exit_code == 2
|
assert result.exit_code == 2
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
from aprsd import plugin, threads
|
from aprsd import plugin, threads
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
|
|
||||||
|
|
||||||
FAKE_MESSAGE_TEXT = "fake MeSSage"
|
FAKE_MESSAGE_TEXT = "fake MeSSage"
|
||||||
FAKE_FROM_CALLSIGN = "KFAKE"
|
FAKE_FROM_CALLSIGN = "KFAKE"
|
||||||
FAKE_TO_CALLSIGN = "KMINE"
|
FAKE_TO_CALLSIGN = "KMINE"
|
||||||
@ -88,6 +87,5 @@ class FakeRegexCommandPlugin(plugin.APRSDRegexCommandPluginBase):
|
|||||||
|
|
||||||
|
|
||||||
class FakeWatchListPlugin(plugin.APRSDWatchListPluginBase):
|
class FakeWatchListPlugin(plugin.APRSDWatchListPluginBase):
|
||||||
|
|
||||||
def process(self, packet):
|
def process(self, packet):
|
||||||
return FAKE_MESSAGE_TEXT
|
return FAKE_MESSAGE_TEXT
|
||||||
|
@ -2,13 +2,15 @@ from unittest import mock
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd import client, packets
|
from aprsd import (
|
||||||
from aprsd import conf # noqa: F401
|
client,
|
||||||
|
conf, # noqa: F401
|
||||||
|
packets,
|
||||||
|
)
|
||||||
from aprsd.plugins import notify as notify_plugin
|
from aprsd.plugins import notify as notify_plugin
|
||||||
|
|
||||||
from .. import fake, test_plugin
|
from .. import fake, test_plugin
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
DEFAULT_WATCHLIST_CALLSIGNS = fake.FAKE_FROM_CALLSIGN
|
DEFAULT_WATCHLIST_CALLSIGNS = fake.FAKE_FROM_CALLSIGN
|
||||||
|
|
||||||
@ -49,7 +51,6 @@ class TestWatchListPlugin(test_plugin.TestPlugin):
|
|||||||
|
|
||||||
|
|
||||||
class TestAPRSDWatchListPluginBase(TestWatchListPlugin):
|
class TestAPRSDWatchListPluginBase(TestWatchListPlugin):
|
||||||
|
|
||||||
def test_watchlist_not_enabled(self):
|
def test_watchlist_not_enabled(self):
|
||||||
self.config_and_init(watchlist_enabled=False)
|
self.config_and_init(watchlist_enabled=False)
|
||||||
plugin = fake.FakeWatchListPlugin()
|
plugin = fake.FakeWatchListPlugin()
|
||||||
@ -79,7 +80,6 @@ class TestAPRSDWatchListPluginBase(TestWatchListPlugin):
|
|||||||
|
|
||||||
|
|
||||||
class TestNotifySeenPlugin(TestWatchListPlugin):
|
class TestNotifySeenPlugin(TestWatchListPlugin):
|
||||||
|
|
||||||
def test_disabled(self):
|
def test_disabled(self):
|
||||||
self.config_and_init(watchlist_enabled=False)
|
self.config_and_init(watchlist_enabled=False)
|
||||||
plugin = notify_plugin.NotifySeenPlugin()
|
plugin = notify_plugin.NotifySeenPlugin()
|
||||||
@ -128,7 +128,9 @@ class TestNotifySeenPlugin(TestWatchListPlugin):
|
|||||||
|
|
||||||
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
|
@mock.patch("aprsd.client.factory.ClientFactory", autospec=True)
|
||||||
@mock.patch("aprsd.packets.WatchList.is_old")
|
@mock.patch("aprsd.packets.WatchList.is_old")
|
||||||
def test_callsign_in_watchlist_old_same_alert_callsign(self, mock_is_old, mock_factory):
|
def test_callsign_in_watchlist_old_same_alert_callsign(
|
||||||
|
self, mock_is_old, mock_factory
|
||||||
|
):
|
||||||
client.client_factory = mock_factory
|
client.client_factory = mock_factory
|
||||||
mock_is_old.return_value = True
|
mock_is_old.return_value = True
|
||||||
self.config_and_init(
|
self.config_and_init(
|
||||||
|
@ -1,19 +1,17 @@
|
|||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from oslo_config import cfg
|
|
||||||
import pytz
|
import pytz
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd.plugins import time as time_plugin
|
from aprsd.plugins import time as time_plugin
|
||||||
from aprsd.utils import fuzzy
|
from aprsd.utils import fuzzy
|
||||||
|
|
||||||
from .. import fake, test_plugin
|
from .. import fake, test_plugin
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class TestTimePlugins(test_plugin.TestPlugin):
|
class TestTimePlugins(test_plugin.TestPlugin):
|
||||||
|
|
||||||
@mock.patch("aprsd.plugins.time.TimePlugin._get_local_tz")
|
@mock.patch("aprsd.plugins.time.TimePlugin._get_local_tz")
|
||||||
@mock.patch("aprsd.plugins.time.TimePlugin._get_utcnow")
|
@mock.patch("aprsd.plugins.time.TimePlugin._get_utcnow")
|
||||||
def test_time(self, mock_utcnow, mock_localtz):
|
def test_time(self, mock_utcnow, mock_localtz):
|
||||||
|
@ -7,12 +7,10 @@ from aprsd.plugins import version as version_plugin
|
|||||||
|
|
||||||
from .. import fake, test_plugin
|
from .. import fake, test_plugin
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class TestVersionPlugin(test_plugin.TestPlugin):
|
class TestVersionPlugin(test_plugin.TestPlugin):
|
||||||
|
|
||||||
@mock.patch("aprsd.stats.app.APRSDStats.uptime")
|
@mock.patch("aprsd.stats.app.APRSDStats.uptime")
|
||||||
def test_version(self, mock_stats):
|
def test_version(self, mock_stats):
|
||||||
mock_stats.return_value = "00:00:00"
|
mock_stats.return_value = "00:00:00"
|
||||||
|
@ -7,12 +7,10 @@ from aprsd.plugins import weather as weather_plugin
|
|||||||
|
|
||||||
from .. import fake, test_plugin
|
from .. import fake, test_plugin
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class TestUSWeatherPlugin(test_plugin.TestPlugin):
|
class TestUSWeatherPlugin(test_plugin.TestPlugin):
|
||||||
|
|
||||||
def test_not_enabled_missing_aprs_fi_key(self):
|
def test_not_enabled_missing_aprs_fi_key(self):
|
||||||
# When the aprs.fi api key isn't set, then
|
# When the aprs.fi api key isn't set, then
|
||||||
# the LocationPlugin will be disabled.
|
# the LocationPlugin will be disabled.
|
||||||
@ -108,7 +106,6 @@ class TestUSWeatherPlugin(test_plugin.TestPlugin):
|
|||||||
|
|
||||||
|
|
||||||
class TestUSMetarPlugin(test_plugin.TestPlugin):
|
class TestUSMetarPlugin(test_plugin.TestPlugin):
|
||||||
|
|
||||||
def test_not_enabled_missing_aprs_fi_key(self):
|
def test_not_enabled_missing_aprs_fi_key(self):
|
||||||
# When the aprs.fi api key isn't set, then
|
# When the aprs.fi api key isn't set, then
|
||||||
# the LocationPlugin will be disabled.
|
# the LocationPlugin will be disabled.
|
||||||
@ -161,9 +158,9 @@ class TestUSMetarPlugin(test_plugin.TestPlugin):
|
|||||||
|
|
||||||
@mock.patch("aprsd.plugin_utils.get_weather_gov_metar")
|
@mock.patch("aprsd.plugin_utils.get_weather_gov_metar")
|
||||||
def test_airport_works(self, mock_metar):
|
def test_airport_works(self, mock_metar):
|
||||||
|
|
||||||
class Response:
|
class Response:
|
||||||
text = '{"properties": {"rawMessage": "BOGUSMETAR"}}'
|
text = '{"properties": {"rawMessage": "BOGUSMETAR"}}'
|
||||||
|
|
||||||
mock_metar.return_value = Response()
|
mock_metar.return_value = Response()
|
||||||
|
|
||||||
CONF.aprs_fi.apiKey = "abc123"
|
CONF.aprs_fi.apiKey = "abc123"
|
||||||
|
@ -11,7 +11,6 @@ from . import fake
|
|||||||
|
|
||||||
|
|
||||||
class TestPacketBase(unittest.TestCase):
|
class TestPacketBase(unittest.TestCase):
|
||||||
|
|
||||||
def _fake_dict(
|
def _fake_dict(
|
||||||
self,
|
self,
|
||||||
from_call=fake.FAKE_FROM_CALLSIGN,
|
from_call=fake.FAKE_FROM_CALLSIGN,
|
||||||
@ -79,7 +78,6 @@ class TestPacketBase(unittest.TestCase):
|
|||||||
|
|
||||||
@mock.patch("aprsd.packets.core.GPSPacket._build_time_zulu")
|
@mock.patch("aprsd.packets.core.GPSPacket._build_time_zulu")
|
||||||
def test_packet_format_rain_1h(self, mock_time_zulu):
|
def test_packet_format_rain_1h(self, mock_time_zulu):
|
||||||
|
|
||||||
mock_time_zulu.return_value = "221450"
|
mock_time_zulu.return_value = "221450"
|
||||||
|
|
||||||
wx = packets.WeatherPacket(
|
wx = packets.WeatherPacket(
|
||||||
@ -106,7 +104,9 @@ class TestPacketBase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_beacon_factory(self):
|
def test_beacon_factory(self):
|
||||||
"""Test to ensure a beacon packet is created."""
|
"""Test to ensure a beacon packet is created."""
|
||||||
packet_raw = "WB4BOR-12>APZ100,WIDE2-1:@161647z3724.15N107847.58W$ APRSD WebChat"
|
packet_raw = (
|
||||||
|
"WB4BOR-12>APZ100,WIDE2-1:@161647z3724.15N107847.58W$ APRSD WebChat"
|
||||||
|
)
|
||||||
packet_dict = aprslib.parse(packet_raw)
|
packet_dict = aprslib.parse(packet_raw)
|
||||||
packet = packets.factory(packet_dict)
|
packet = packets.factory(packet_dict)
|
||||||
self.assertIsInstance(packet, packets.BeaconPacket)
|
self.assertIsInstance(packet, packets.BeaconPacket)
|
||||||
@ -162,7 +162,9 @@ class TestPacketBase(unittest.TestCase):
|
|||||||
|
|
||||||
# Packet with telemetry and DAO
|
# Packet with telemetry and DAO
|
||||||
# http://www.aprs.org/datum.txt
|
# http://www.aprs.org/datum.txt
|
||||||
packet_raw = 'KD9YIL>T0PX9W,WIDE1-1,WIDE2-1,qAO,NU9R-10:`sB,l#P>/\'"6+}|#*%U\'a|!whl!|3'
|
packet_raw = (
|
||||||
|
"KD9YIL>T0PX9W,WIDE1-1,WIDE2-1,qAO,NU9R-10:`sB,l#P>/'\"6+}|#*%U'a|!whl!|3"
|
||||||
|
)
|
||||||
packet_dict = aprslib.parse(packet_raw)
|
packet_dict = aprslib.parse(packet_raw)
|
||||||
packet = packets.factory(packet_dict)
|
packet = packets.factory(packet_dict)
|
||||||
self.assertIsInstance(packet, packets.MicEPacket)
|
self.assertIsInstance(packet, packets.MicEPacket)
|
||||||
@ -175,7 +177,9 @@ class TestPacketBase(unittest.TestCase):
|
|||||||
msgNo=123,
|
msgNo=123,
|
||||||
)
|
)
|
||||||
|
|
||||||
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:ack123"
|
expected = (
|
||||||
|
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:ack123"
|
||||||
|
)
|
||||||
self.assertEqual(expected, str(ack))
|
self.assertEqual(expected, str(ack))
|
||||||
|
|
||||||
def test_reject_format(self):
|
def test_reject_format(self):
|
||||||
@ -186,7 +190,9 @@ class TestPacketBase(unittest.TestCase):
|
|||||||
msgNo=123,
|
msgNo=123,
|
||||||
)
|
)
|
||||||
|
|
||||||
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:rej123"
|
expected = (
|
||||||
|
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::{fake.FAKE_TO_CALLSIGN:<9}:rej123"
|
||||||
|
)
|
||||||
self.assertEqual(expected, str(reject))
|
self.assertEqual(expected, str(reject))
|
||||||
|
|
||||||
def test_beacon_format(self):
|
def test_beacon_format(self):
|
||||||
@ -240,7 +246,9 @@ class TestPacketBase(unittest.TestCase):
|
|||||||
bid=0,
|
bid=0,
|
||||||
)
|
)
|
||||||
|
|
||||||
expected = f"{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{packet.message_text}"
|
expected = (
|
||||||
|
f"{fake.FAKE_FROM_CALLSIGN}>APZ100::BLN{bid:<9}:{packet.message_text}"
|
||||||
|
)
|
||||||
self.assertEqual(expected, str(packet))
|
self.assertEqual(expected, str(packet))
|
||||||
|
|
||||||
# bulletin id = 1
|
# bulletin id = 1
|
||||||
|
@ -3,20 +3,20 @@ from unittest import mock
|
|||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
|
||||||
from aprsd import conf # noqa: F401
|
from aprsd import (
|
||||||
from aprsd import packets
|
conf, # noqa: F401
|
||||||
|
packets,
|
||||||
|
plugins,
|
||||||
|
)
|
||||||
from aprsd import plugin as aprsd_plugin
|
from aprsd import plugin as aprsd_plugin
|
||||||
from aprsd import plugins
|
|
||||||
from aprsd.packets import core
|
from aprsd.packets import core
|
||||||
|
|
||||||
from . import fake
|
from . import fake
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
class TestPluginManager(unittest.TestCase):
|
class TestPluginManager(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.fromcall = fake.FAKE_FROM_CALLSIGN
|
self.fromcall = fake.FAKE_FROM_CALLSIGN
|
||||||
self.config_and_init()
|
self.config_and_init()
|
||||||
@ -82,7 +82,6 @@ class TestPluginManager(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
class TestPlugin(unittest.TestCase):
|
class TestPlugin(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
self.fromcall = fake.FAKE_FROM_CALLSIGN
|
self.fromcall = fake.FAKE_FROM_CALLSIGN
|
||||||
self.ack = 1
|
self.ack = 1
|
||||||
@ -103,7 +102,6 @@ class TestPlugin(unittest.TestCase):
|
|||||||
|
|
||||||
|
|
||||||
class TestPluginBase(TestPlugin):
|
class TestPluginBase(TestPlugin):
|
||||||
|
|
||||||
@mock.patch.object(fake.FakeBaseNoThreadsPlugin, "process")
|
@mock.patch.object(fake.FakeBaseNoThreadsPlugin, "process")
|
||||||
def test_base_plugin_no_threads(self, mock_process):
|
def test_base_plugin_no_threads(self, mock_process):
|
||||||
p = fake.FakeBaseNoThreadsPlugin()
|
p = fake.FakeBaseNoThreadsPlugin()
|
||||||
|
30
tox.ini
30
tox.ini
@ -2,7 +2,7 @@
|
|||||||
minversion = 2.9.0
|
minversion = 2.9.0
|
||||||
skipdist = True
|
skipdist = True
|
||||||
skip_missing_interpreters = true
|
skip_missing_interpreters = true
|
||||||
envlist = pep8,py{310}
|
envlist = pep8,py{310,311}
|
||||||
#requires = tox-pipenv
|
#requires = tox-pipenv
|
||||||
# pip==22.0.4
|
# pip==22.0.4
|
||||||
# pip-tools==5.4.0
|
# pip-tools==5.4.0
|
||||||
@ -21,10 +21,9 @@ setenv =
|
|||||||
usedevelop = True
|
usedevelop = True
|
||||||
install_command = pip install {opts} {packages}
|
install_command = pip install {opts} {packages}
|
||||||
extras = tests
|
extras = tests
|
||||||
deps = coverage: coverage
|
deps =
|
||||||
-r{toxinidir}/requirements.txt
|
pytest-cov
|
||||||
-r{toxinidir}/requirements-dev.txt
|
pytest
|
||||||
pytestmain: git+https://github.com/pytest-dev/pytest.git@main
|
|
||||||
commands =
|
commands =
|
||||||
pytest -v --cov-report term-missing --cov=aprsd {posargs}
|
pytest -v --cov-report term-missing --cov=aprsd {posargs}
|
||||||
coverage: coverage report -m
|
coverage: coverage report -m
|
||||||
@ -43,6 +42,8 @@ commands =
|
|||||||
sphinx-build -a -W . _build
|
sphinx-build -a -W . _build
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
|
deps =
|
||||||
|
flake8
|
||||||
commands =
|
commands =
|
||||||
flake8 {posargs} aprsd tests
|
flake8 {posargs} aprsd tests
|
||||||
|
|
||||||
@ -57,9 +58,9 @@ passenv = FAST8_NUM_COMMITS
|
|||||||
[testenv:lint]
|
[testenv:lint]
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
-r{toxinidir}/requirements-dev.txt
|
ruff
|
||||||
commands =
|
commands =
|
||||||
flake8 aprsd tests
|
ruff check aprsd tests
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 99
|
max-line-length = 99
|
||||||
@ -74,25 +75,26 @@ exclude = .venv,.git,.tox,dist,doc,.ropeproject
|
|||||||
# This section is not needed if not using GitHub Actions for CI.
|
# This section is not needed if not using GitHub Actions for CI.
|
||||||
[gh-actions]
|
[gh-actions]
|
||||||
python =
|
python =
|
||||||
3.6: py36, pep8
|
|
||||||
3.7: py38, pep8
|
|
||||||
3.8: py38, pep8
|
|
||||||
3.9: py39, pep8, type-check, docs
|
3.9: py39, pep8, type-check, docs
|
||||||
3.10: py39, pep8, type-check, docs
|
3.10: py39, pep8, type-check, docs
|
||||||
|
3.11: py311, pep8, type-check, docs
|
||||||
|
|
||||||
[testenv:fmt]
|
[testenv:fmt]
|
||||||
# This will reformat your code to comply with pep8
|
# This will reformat your code to comply with pep8
|
||||||
# and standard formatting
|
# and standard formatting
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
-r{toxinidir}/requirements-dev.txt
|
ruff
|
||||||
commands =
|
commands =
|
||||||
gray aprsd tests
|
ruff format aprsd tests
|
||||||
|
|
||||||
[testenv:type-check]
|
[testenv:type-check]
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps = -r{toxinidir}/requirements.txt
|
deps =
|
||||||
-r{toxinidir}/requirements-dev.txt
|
mypy
|
||||||
|
types-pytz
|
||||||
|
types-requests
|
||||||
|
types-tzlocal
|
||||||
commands =
|
commands =
|
||||||
mypy --ignore-missing-imports --install-types aprsd
|
mypy --ignore-missing-imports --install-types aprsd
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user