Merge pull request #183 from craigerl/refactor-extraction
Migrate admin web out of aprsd.
15
.github/workflows/authors.yml
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
name: Update Authors
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
run:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: wow-actions/update-authors@v1
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
template: '{{email}} : {{commits}}'
|
||||
path: 'AUTHORS'
|
9
.github/workflows/manual_build.yml
vendored
@ -18,7 +18,6 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Get Branch Name
|
||||
id: branch-name
|
||||
uses: tj-actions/branch-names@v8
|
||||
@ -30,16 +29,16 @@ jobs:
|
||||
run: |
|
||||
echo "Selected Branch '${{ steps.extract_branch.outputs.branch }}'"
|
||||
- name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker HUB
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build the Docker image
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: "{{defaultContext}}:docker"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
13
.github/workflows/master-build.yml
vendored
@ -7,7 +7,7 @@ on:
|
||||
branches:
|
||||
- "**"
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
- "*.*.*"
|
||||
pull_request:
|
||||
branches:
|
||||
- "master"
|
||||
@ -17,7 +17,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"]
|
||||
python-version: ["3.10", "3.11", "3.12"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
@ -35,21 +35,20 @@ jobs:
|
||||
needs: tox
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Get Branch Name
|
||||
id: branch-name
|
||||
uses: tj-actions/branch-names@v8
|
||||
- name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to Docker HUB
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build the Docker image
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: "{{defaultContext}}:docker"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
4
.github/workflows/python.yml
vendored
@ -9,9 +9,9 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
|
4
.mailmap
Normal file
@ -0,0 +1,4 @@
|
||||
Craig Lamparter <craig@craiger.org> <craiger@hpe.com>
|
||||
Craig Lamparter <craig@craiger.org> craigerl <craig@craiger.org>
|
||||
Craig Lamparter <craig@craiger.org> craigerl <craiger@hpe.com>
|
||||
Walter A. Boring IV <waboring@hemna.com> Hemna <waboring@hemna.com>
|
@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
@ -10,13 +10,32 @@ repos:
|
||||
- id: check-case-conflict
|
||||
- id: check-docstring-first
|
||||
- id: check-builtin-literals
|
||||
- id: check-illegal-windows-names
|
||||
|
||||
- repo: https://github.com/asottile/setup-cfg-fmt
|
||||
rev: v2.5.0
|
||||
rev: v2.7.0
|
||||
hooks:
|
||||
- id: setup-cfg-fmt
|
||||
|
||||
- repo: https://github.com/dizballanze/gray
|
||||
rev: v0.14.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.0
|
||||
hooks:
|
||||
- id: gray
|
||||
- id: ruff
|
||||
###### Relevant part below ######
|
||||
- id: ruff
|
||||
args: ["check", "--select", "I", "--fix"]
|
||||
###### Relevant part above ######
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/astral-sh/uv-pre-commit
|
||||
# uv version.
|
||||
rev: 0.5.16
|
||||
hooks:
|
||||
# Compile requirements
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements.in
|
||||
args: [--resolver, backtracking, --annotation-style=line, requirements.in, -o, requirements.txt]
|
||||
- id: pip-compile
|
||||
name: pip-compile requirements-dev.in
|
||||
args: [--resolver, backtracking, --annotation-style=line, requirements-dev.in, -o, requirements-dev.txt]
|
||||
files: ^requirements-dev\.(in|txt)$
|
||||
|
31
CONTRIBUTING.md
Normal file
@ -0,0 +1,31 @@
|
||||
# CONTRIBUTING
|
||||
|
||||
Code contributions are welcomed and appreciated. Just submit a PR!
|
||||
|
||||
The current build environment uses `pre-commit`, and `uv`.
|
||||
|
||||
### Environment setup:
|
||||
|
||||
```console
|
||||
pip install uv
|
||||
uv venv
|
||||
uv pip install pip-tools
|
||||
git clone git@github.com:craigerl/aprsd.git
|
||||
cd aprsd
|
||||
pre-commit install
|
||||
|
||||
# Optionally run the pre-commit scripts at any time
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
### Running and testing:
|
||||
|
||||
From the aprstastic directory:
|
||||
|
||||
```console
|
||||
cd aprsd
|
||||
uv pip install -e .
|
||||
|
||||
# Running
|
||||
uv run aprsd
|
||||
```
|
2
Makefile
@ -32,7 +32,7 @@ docs: changelog
|
||||
mv ChangeLog.rst docs/changelog.rst
|
||||
tox -edocs
|
||||
|
||||
clean: clean-build clean-pyc clean-test clean-dev ## remove all build, test, coverage and Python artifacts
|
||||
clean: clean-dev clean-test clean-build clean-pyc ## remove all build, test, coverage and Python artifacts
|
||||
|
||||
clean-build: ## remove build artifacts
|
||||
rm -fr build/
|
||||
|
454
README.md
Normal file
@ -0,0 +1,454 @@
|
||||
# APRSD - Ham radio APRS-IS Message platform software
|
||||
|
||||
## KM6LYW and WB4BOR
|
||||
|
||||
[](https://badge.fury.io/py/aprsd)
|
||||
[](https://pypi.org/pypi/aprsd)
|
||||
[](https://hemna.slack.com/app_redirect?channel=C01KQSCP5RP)
|
||||

|
||||

|
||||
[](https://timothycrosley.github.io/isort/)
|
||||
[](https://pepy.tech/project/aprsd)
|
||||
|
||||
[APRSD](http://github.com/craigerl/aprsd) is a Ham radio
|
||||
[APRS](http://aprs.org) message platform built with python.
|
||||
|
||||

|
||||
|
||||
# Table of Contents
|
||||
|
||||
1. [APRSD - Ham radio APRS-IS Message platform software](#aprsd---ham-radio-aprs-is-message-platform-software)
|
||||
2. [What is APRSD](#what-is-aprsd)
|
||||
3. [APRSD Plugins/Extensions](#aprsd-pluginsextensions)
|
||||
4. [List of existing plugins - APRS Message processing/responders](#list-of-existing-plugins---aprs-message-processingresponders)
|
||||
5. [List of existing extensions - Add new capabilities to APRSD](#list-of-existing-extensions---add-new-capabilities-to-aprsd)
|
||||
6. [APRSD Overview Diagram](#aprsd-overview-diagram)
|
||||
7. [Typical use case](#typical-use-case)
|
||||
8. [Installation](#installation)
|
||||
9. [Example usage](#example-usage)
|
||||
10. [Help](#help)
|
||||
11. [Commands](#commands)
|
||||
12. [Configuration](#configuration)
|
||||
13. [server](#server)
|
||||
14. [Current list plugins](#current-list-plugins)
|
||||
15. [Current list extensions](#current-list-extensions)
|
||||
16. [send-message](#send-message)
|
||||
17. [Development](#development)
|
||||
18. [Release](#release)
|
||||
19. [Building your own APRSD plugins](#building-your-own-aprsd-plugins)
|
||||
20. [Overview](#overview)
|
||||
21. [Docker Container](#docker-container)
|
||||
22. [Building](#building)
|
||||
23. [Official Build](#official-build)
|
||||
24. [Development Build](#development-build)
|
||||
25. [Running the container](#running-the-container)
|
||||
26. [Activity](#activity)
|
||||
27. [Star History](#star-history)
|
||||
|
||||
---
|
||||
|
||||
> [!WARNING]
|
||||
> Legal operation of this software requires an amateur radio license and a valid call sign.
|
||||
|
||||
> [!NOTE]
|
||||
> Star this repo to follow our progress! This code is under active development, and contributions are both welcomed and appreciated. See [CONTRIBUTING.md](<https://github.com/craigerl/aprsd/blob/master/CONTRIBUTING.md>) for details.
|
||||
|
||||
### What is APRSD
|
||||
|
||||
APRSD is a python application for interacting with the APRS network and Ham radios with KISS interfaces and
|
||||
providing APRS services for HAM radio operators.
|
||||
|
||||
APRSD currently has 4 main commands to use.
|
||||
|
||||
- server - Connect to APRS and listen/respond to APRS messages
|
||||
- send-message - Send a message to a callsign via APRS_IS.
|
||||
- listen - Listen to packets on the APRS-IS Network based on FILTER.
|
||||
- check-version - check the version of aprsd
|
||||
- sample-config - generate a sample config file
|
||||
- dev - helpful for testing new aprsd plugins under development
|
||||
- dump-stats - output the stats of a running aprsd server command
|
||||
- list-plugins - list the built in plugins, available plugins on pypi.org and installed plugins
|
||||
- list-extensions - list the available extensions on pypi.org and installed extensions
|
||||
|
||||
Each of those commands can connect to the APRS-IS network if internet
|
||||
connectivity is available. If internet is not available, then APRS can
|
||||
be configured to talk to a TCP KISS TNC for radio connectivity directly.
|
||||
|
||||
Please [read the docs](https://aprsd.readthedocs.io) to learn more!
|
||||
|
||||
|
||||
### APRSD Plugins/Extensions
|
||||
|
||||
APRSD Has the ability to add plugins and extensions. Plugins add new message filters that can look for specific messages and respond. For example, the aprsd-email-plugin adds the ability to send/recieve email to/from an APRS callsign. Extensions add new unique capabilities to APRSD itself. For example the aprsd-admin-extension adds a web interface command that shows the running status of the aprsd server command. aprsd-webchat-extension is a new web based APRS 'chat' command.
|
||||
|
||||
You can see the [available plugins/extensions on pypi here:](https://pypi.org/search/?q=aprsd) [https://pypi.org/search/?q=aprsd](https://pypi.org/search/?q=aprsd)
|
||||
|
||||
> [!NOTE]
|
||||
> aprsd admin and webchat commands have been extracted into separate extensions.
|
||||
* [See admin extension here](https://github.com/hemna/aprsd-admin-extension) <div id="admin logo" align="left"><img src="https://raw.githubusercontent.com/hemna/aprsd-admin-extension/refs/heads/master/screenshot.png" alt="Web Admin" width="340"/></div>
|
||||
|
||||
* [See webchat extension here](https://github.com/hemna/aprsd-webchat-extension) <div id="webchat logo" align="left"><img src="https://raw.githubusercontent.com/hemna/aprsd-webchat-extension/master/screenshot.png" alt="Webchat" width="340"/></div>
|
||||
|
||||
|
||||
### List of existing plugins - APRS Message processing/responders
|
||||
|
||||
- [aprsd-email-plugin](https://github.com/hemna/aprsd-email-plugin) - send/receive email!
|
||||
- [aprsd-location-plugin](https://github.com/hemna/aprsd-location-plugin) - get latest GPS location.
|
||||
- [aprsd-locationdata-plugin](https://github.com/hemna/aprsd-locationdata-plugin) - get latest GPS location
|
||||
- [aprsd-digipi-plugin](https://github.com/hemna/aprsd-digipi-plugin) - Look for digipi beacon packets
|
||||
- [aprsd-w3w-plugin](https://github.com/hemna/aprsd-w3w-plugin) - get your w3w coordinates
|
||||
- [aprsd-mqtt-plugin](https://github.com/hemna/aprsd-mqtt-plugin) - send aprs packets to an MQTT topic
|
||||
- [aprsd-telegram-plugin](https://github.com/hemna/aprsd-telegram-plugin) - send/receive messages to telegram
|
||||
- [aprsd-borat-plugin](https://github.com/hemna/aprsd-borat-plugin) - get Borat quotes
|
||||
- [aprsd-wxnow-plugin](https://github.com/hemna/aprsd-wxnow-plugin) - get closest N weather station reports
|
||||
- [aprsd-weewx-plugin](https://github.com/hemna/aprsd-weewx-plugin) - get weather from your weewx weather station
|
||||
- [aprsd-slack-plugin](https://github.com/hemna/aprsd-slack-plugin) - send/receive messages to a slack channel
|
||||
- [aprsd-sentry-plugin](https://github.com/hemna/aprsd-sentry-plugin) -
|
||||
- [aprsd-repeat-plugins](https://github.com/hemna/aprsd-repeat-plugins) - plugins for the REPEAT service. Get nearest Ham radio repeaters!
|
||||
- [aprsd-twitter-plugin](https://github.com/hemna/aprsd-twitter-plugin) - make tweets from your Ham Radio!
|
||||
- [aprsd-timeopencage-plugin](https://github.com/hemna/aprsd-timeopencage-plugin) - Get local time for a callsign
|
||||
- [aprsd-stock-plugin](https://github.com/hemna/aprsd-stock-plugin) - get stock quotes from your Ham radio
|
||||
|
||||
### List of existing extensions - Add new capabilities to APRSD
|
||||
|
||||
- [aprsd-admin-extension](https://github.com/hemna/aprsd-admin-extension) - Web Administration page for APRSD
|
||||
- [aprsd-webchat-extension](https://github.com/hemna/aprsd-webchat-extension) - Web page for APRS Messaging
|
||||
- [aprsd-irc-extension](https://github.com/hemna/aprsd-irc-extension) - an IRC like server command for APRS
|
||||
|
||||
### APRSD Overview Diagram
|
||||
|
||||

|
||||
|
||||
### Typical use case
|
||||
|
||||
APRSD\'s typical use case is that of providing an APRS wide service to
|
||||
all HAM radio operators. For example the callsign \'REPEAT\' on the APRS
|
||||
network is actually an instance of APRSD that can provide a list of HAM
|
||||
repeaters in the area of the callsign that sent the message.
|
||||
|
||||
Ham radio operator using an APRS enabled HAM radio sends a message to
|
||||
check the weather. An APRS message is sent, and then picked up by APRSD.
|
||||
The APRS packet is decoded, and the message is sent through the list of
|
||||
plugins for processing. For example, the WeatherPlugin picks up the
|
||||
message, fetches the weather for the area around the user who sent the
|
||||
request, and then responds with the weather conditions in that area.
|
||||
Also includes a watch list of HAM callsigns to look out for. The watch
|
||||
list can notify you when a HAM callsign in the list is seen and now
|
||||
available to message on the APRS network.
|
||||
|
||||
### Installation
|
||||
|
||||
To install `aprsd`, use Pip:
|
||||
|
||||
`pip install aprsd`
|
||||
|
||||
### Example usage
|
||||
|
||||
`aprsd -h`
|
||||
|
||||
### Help
|
||||
|
||||
:
|
||||
|
||||
└─> aprsd -h
|
||||
Usage: aprsd [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
--version Show the version and exit.
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
check-version Check this version against the latest in pypi.org.
|
||||
completion Show the shell completion code
|
||||
dev Development type subcommands
|
||||
fetch-stats Fetch stats from a APRSD admin web interface.
|
||||
healthcheck Check the health of the running aprsd server.
|
||||
list-extensions List the built in plugins available to APRSD.
|
||||
list-plugins List the built in plugins available to APRSD.
|
||||
listen Listen to packets on the APRS-IS Network based on FILTER.
|
||||
sample-config Generate a sample Config file from aprsd and all...
|
||||
send-message Send a message to a callsign via APRS_IS.
|
||||
server Start the aprsd server gateway process.
|
||||
version Show the APRSD version.
|
||||
|
||||
### Commands
|
||||
|
||||
### Configuration
|
||||
|
||||
This command outputs a sample config yml formatted block that you can
|
||||
edit and use to pass in to `aprsd` with `-c`. By default aprsd looks in
|
||||
`~/.config/aprsd/aprsd.yml`
|
||||
|
||||
`aprsd sample-config`
|
||||
|
||||
└─> aprsd sample-config
|
||||
...
|
||||
|
||||
### server
|
||||
|
||||
This is the main server command that will listen to APRS-IS servers and
|
||||
look for incomming commands to the callsign configured in the config
|
||||
file
|
||||
|
||||
└─[$] > aprsd server --help
|
||||
Usage: aprsd server [OPTIONS]
|
||||
|
||||
Start the aprsd server gateway process.
|
||||
|
||||
Options:
|
||||
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
|
||||
The log level to use for aprsd.log
|
||||
[default: INFO]
|
||||
-c, --config TEXT The aprsd config file to use for options.
|
||||
[default:
|
||||
/Users/i530566/.config/aprsd/aprsd.yml]
|
||||
--quiet Don't log to stdout
|
||||
-f, --flush Flush out all old aged messages on disk.
|
||||
[default: False]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
└─> aprsd server
|
||||
Registering LogMonitorThread
|
||||
2025-01-06 16:27:12.398 | MainThread | INFO | APRSD is up to date | aprsd.cmds.server:server:82
|
||||
2025-01-06 16:27:12.398 | MainThread | INFO | APRSD Started version: 3.5.1.dev0+g72d068c.d20250102 | aprsd.cmds.server:server:83
|
||||
2025-01-06 16:27:12.398 | MainThread | INFO | Creating client connection | aprsd.cmds.server:server:101
|
||||
2025-01-06 16:27:12.398 | MainThread | INFO | Creating aprslib client(noam.aprs2.net:14580) and logging in WB4BOR-1. | aprsd.client.aprsis:setup_connection:136
|
||||
2025-01-06 16:27:12.398 | MainThread | INFO | Attempting connection to noam.aprs2.net:14580 | aprslib.inet:_connect:226
|
||||
2025-01-06 16:27:12.473 | MainThread | INFO | Connected to ('44.135.208.225', 14580) | aprslib.inet:_connect:233
|
||||
2025-01-06 16:27:12.617 | MainThread | INFO | Login successful | aprsd.client.drivers.aprsis:_send_login:154
|
||||
2025-01-06 16:27:12.618 | MainThread | INFO | Connected to T2BC | aprsd.client.drivers.aprsis:_send_login:156
|
||||
2025-01-06 16:27:12.618 | MainThread | INFO | <aprsd.client.aprsis.APRSISClient object at 0x103a36480> | aprsd.cmds.server:server:103
|
||||
2025-01-06 16:27:12.618 | MainThread | INFO | Loading Plugin Manager and registering plugins | aprsd.cmds.server:server:117
|
||||
2025-01-06 16:27:12.619 | MainThread | INFO | Loading APRSD Plugins | aprsd.plugin:setup_plugins:492
|
||||
|
||||
|
||||
#### Current list plugins
|
||||
|
||||
└─> aprsd list-plugins
|
||||
🐍 APRSD Built-in Plugins 🐍
|
||||
┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ Plugin Name ┃ Info ┃ Type ┃ Plugin Path ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
|
||||
│ AVWXWeatherPlugin │ AVWX weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.AVWXWeatherPlugin │
|
||||
│ FortunePlugin │ Give me a fortune │ RegexCommand │ aprsd.plugins.fortune.FortunePlugin │
|
||||
│ NotifySeenPlugin │ Notify me when a CALLSIGN is recently seen on APRS-IS │ WatchList │ aprsd.plugins.notify.NotifySeenPlugin │
|
||||
│ OWMWeatherPlugin │ OpenWeatherMap weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.OWMWeatherPlugin │
|
||||
│ PingPlugin │ reply with a Pong! │ RegexCommand │ aprsd.plugins.ping.PingPlugin │
|
||||
│ TimeOWMPlugin │ Current time of GPS beacon's timezone. Uses OpenWeatherMap │ RegexCommand │ aprsd.plugins.time.TimeOWMPlugin │
|
||||
│ TimePlugin │ What is the current local time. │ RegexCommand │ aprsd.plugins.time.TimePlugin │
|
||||
│ USMetarPlugin │ USA only METAR of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USMetarPlugin │
|
||||
│ USWeatherPlugin │ Provide USA only weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USWeatherPlugin │
|
||||
│ VersionPlugin │ What is the APRSD Version │ RegexCommand │ aprsd.plugins.version.VersionPlugin │
|
||||
└───────────────────┴────────────────────────────────────────────────────────────┴──────────────┴─────────────────────────────────────────┘
|
||||
|
||||
|
||||
Pypi.org APRSD Installable Plugin Packages
|
||||
|
||||
Install any of the following plugins with
|
||||
'pip install <Plugin Package Name>'
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓
|
||||
┃ Plugin Package Name ┃ Description ┃ Version ┃ Released ┃ Installed? ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩
|
||||
│ 📂 aprsd-assistant-plugin │ APRSd plugin for hosting the APRS Assistant chatbot │ 0.0.3 │ 2024-10-20T02:59:39 │ No │
|
||||
│ │ (aprs-assistant) │ │ │ │
|
||||
│ 📂 aprsd-borat-plugin │ Borat quotes for aprsd plugin │ 0.1.1.dev1 │ 2024-01-19T16:04:38 │ No │
|
||||
│ 📂 aprsd-locationdata-plugin │ Fetch location information from a callsign │ 0.3.0 │ 2024-02-06T17:20:43 │ No │
|
||||
│ 📂 aprsd-mqtt-plugin │ APRSD MQTT Plugin sends APRS packets to mqtt queue │ 0.2.0 │ 2023-04-17T16:01:50 │ No │
|
||||
│ 📂 aprsd-repeat-plugins │ APRSD Plugins for the REPEAT service │ 1.2.0 │ 2023-01-10T17:15:36 │ No │
|
||||
│ 📂 aprsd-sentry-plugin │ Ham radio APRSD plugin that does.... │ 0.1.2 │ 2022-12-02T19:07:33 │ No │
|
||||
│ 📂 aprsd-slack-plugin │ Amateur radio APRS daemon which listens for messages and │ 1.2.0 │ 2023-01-10T19:21:33 │ No │
|
||||
│ │ responds │ │ │ │
|
||||
│ 📂 aprsd-stock-plugin │ Ham Radio APRSD Plugin for fetching stock quotes │ 0.1.3 │ 2022-12-02T18:56:19 │ Yes │
|
||||
│ 📂 aprsd-telegram-plugin │ Ham Radio APRS APRSD plugin for Telegram IM service │ 0.1.3 │ 2022-12-02T19:07:15 │ No │
|
||||
│ 📂 aprsd-timeopencage-plugin │ APRSD plugin for fetching time based on GPS location │ 0.2.0 │ 2023-01-10T17:07:11 │ No │
|
||||
│ 📂 aprsd-twitter-plugin │ Python APRSD plugin to send tweets │ 0.5.0 │ 2023-01-10T16:51:47 │ No │
|
||||
│ 📂 aprsd-weewx-plugin │ HAM Radio APRSD that reports weather from a weewx weather │ 0.3.2 │ 2023-04-20T20:16:19 │ No │
|
||||
│ │ station. │ │ │ │
|
||||
│ 📂 aprsd-wxnow-plugin │ APRSD Plugin for getting the closest wx reports to last │ 0.2.0 │ 2023-10-08T01:27:29 │ Yes │
|
||||
│ │ beacon │ │ │ │
|
||||
└──────────────────────────────┴──────────────────────────────────────────────────────────────┴────────────┴─────────────────────┴────────────┘
|
||||
|
||||
|
||||
🐍 APRSD Installed 3rd party Plugins 🐍
|
||||
┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ Package Name ┃ Plugin Name ┃ Version ┃ Type ┃ Plugin Path ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
|
||||
│ aprsd-stock-plugin │ YahooStockQuote │ 0.1.3 │ RegexCommand │ aprsd_stock_plugin.stock.YahooStockQuote │
|
||||
│ aprsd-wxnow-plugin │ WXNowPlugin │ 0.2.0 │ RegexCommand │ aprsd_wxnow_plugin.conf.opts.WXNowPlugin │
|
||||
└────────────────────┴─────────────────┴─────────┴──────────────┴──────────────────────────────────────────┘
|
||||
|
||||
#### Current list extensions
|
||||
└─> aprsd list-extensions
|
||||
|
||||
|
||||
Pypi.org APRSD Installable Extension Packages
|
||||
|
||||
Install any of the following extensions by running
|
||||
'pip install <Plugin Package Name>'
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓
|
||||
┃ Extension Package Name ┃ Description ┃ Version ┃ Released ┃ Installed? ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩
|
||||
│ 📂 aprsd-admin-extension │ Administration extension for the Ham radio APRSD Server │ 1.0.1 │ 2025-01-06T21:57:24 │ Yes │
|
||||
│ 📂 aprsd-irc-extension │ An Extension to Ham radio APRSD Daemon to act like an irc server │ 0.0.5 │ 2024-04-09T11:28:47 │ No │
|
||||
│ │ for APRS │ │ │ │
|
||||
└──────────────────────────┴─────────────────────────────────────────────────────────────────────┴─────────┴─────────────────────┴────────────┘
|
||||
|
||||
### send-message
|
||||
|
||||
This command is typically used for development to send another aprsd
|
||||
instance test messages
|
||||
|
||||
└─[$] > aprsd send-message -h
|
||||
Usage: aprsd send-message [OPTIONS] TOCALLSIGN COMMAND...
|
||||
|
||||
Send a message to a callsign via APRS_IS.
|
||||
|
||||
Options:
|
||||
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
|
||||
The log level to use for aprsd.log
|
||||
[default: INFO]
|
||||
-c, --config TEXT The aprsd config file to use for options.
|
||||
[default:
|
||||
/Users/i530566/.config/aprsd/aprsd.yml]
|
||||
--quiet Don't log to stdout
|
||||
--aprs-login TEXT What callsign to send the message from.
|
||||
[env var: APRS_LOGIN]
|
||||
--aprs-password TEXT the APRS-IS password for APRS_LOGIN [env
|
||||
var: APRS_PASSWORD]
|
||||
-n, --no-ack Don't wait for an ack, just sent it to APRS-
|
||||
IS and bail. [default: False]
|
||||
-w, --wait-response Wait for a response to the message?
|
||||
[default: False]
|
||||
--raw TEXT Send a raw message. Implies --no-ack
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
### Development
|
||||
|
||||
- `git clone git@github.com:craigerl/aprsd.git`
|
||||
- `cd aprsd`
|
||||
- `make`
|
||||
|
||||
#### Workflow
|
||||
|
||||
While working aprsd, The workflow is as follows:
|
||||
|
||||
- Checkout a new branch to work on by running
|
||||
|
||||
`git checkout -b mybranch`
|
||||
|
||||
- Make your changes to the code
|
||||
|
||||
- Run Tox with the following options:
|
||||
|
||||
- `tox -epep8`
|
||||
- `tox -efmt`
|
||||
- `tox -p`
|
||||
|
||||
- Commit your changes. This will run the pre-commit hooks which does
|
||||
checks too
|
||||
|
||||
`git commit`
|
||||
|
||||
- Once you are done with all of your commits, then push up the branch
|
||||
to github with:
|
||||
|
||||
`git push -u origin mybranch`
|
||||
|
||||
- Create a pull request from your branch so github tests can run and
|
||||
we can do a code review.
|
||||
|
||||
#### Release
|
||||
|
||||
To do release to pypi:
|
||||
|
||||
- Tag release with:
|
||||
|
||||
`git tag -v1.XX -m "New release"`
|
||||
|
||||
- Push release tag:
|
||||
|
||||
`git push origin master --tags`
|
||||
|
||||
- Do a test build and verify build is valid by running:
|
||||
|
||||
`make build`
|
||||
|
||||
- Once twine is happy, upload release to pypi:
|
||||
|
||||
`make upload`
|
||||
|
||||
#### Building your own APRSD plugins
|
||||
|
||||
APRSD plugins are the mechanism by which APRSD can respond to APRS
|
||||
Messages. The plugins are loaded at server startup and can also be
|
||||
loaded at listen startup. When a packet is received by APRSD, it is
|
||||
passed to each of the plugins in the order they were registered in the
|
||||
config file. The plugins can then decide what to do with the packet.
|
||||
When a plugin is called, it is passed a APRSD Packet object. The plugin
|
||||
can then do something with the packet and return a reply message if
|
||||
desired. If a plugin does not want to reply to the packet, it can just
|
||||
return None. When a plugin does return a reply message, APRSD will send
|
||||
the reply message to the appropriate destination.
|
||||
|
||||
For example, when a \'ping\' message is received, the PingPlugin will
|
||||
return a reply message of \'pong\'. When APRSD receives the \'pong\'
|
||||
message, it will be sent back to the original caller of the ping
|
||||
message.
|
||||
|
||||
APRSD plugins are simply python packages that can be installed from
|
||||
pypi.org. They are installed into the aprsd virtualenv and can be
|
||||
imported by APRSD at runtime. The plugins are registered in the config
|
||||
file and loaded at startup of the aprsd server command or the aprsd
|
||||
listen command.
|
||||
|
||||
#### Overview
|
||||
|
||||
You can build your own plugins by following the instructions in the
|
||||
[Building your own APRSD plugins](#building-your-own-aprsd-plugins)
|
||||
section.
|
||||
|
||||
Plugins are called by APRSD when packe
|
||||
|
||||
### Docker Container
|
||||
|
||||
### Building
|
||||
|
||||
There are 2 versions of the container Dockerfile that can be used. The
|
||||
main Dockerfile, which is for building the official release container
|
||||
based off of the pip install version of aprsd and the Dockerfile-dev,
|
||||
which is used for building a container based off of a git branch of the
|
||||
repo.
|
||||
|
||||
### Official Build
|
||||
|
||||
`docker build -t hemna6969/aprsd:latest .`
|
||||
|
||||
### Development Build
|
||||
|
||||
`docker build -t hemna6969/aprsd:latest -f Dockerfile-dev .`
|
||||
|
||||
### Running the container
|
||||
|
||||
There is a `docker-compose.yml` file in the `docker/` directory that can
|
||||
be used to run your container. To provide the container an `aprsd.conf`
|
||||
configuration file, change your `docker-compose.yml` as shown below:
|
||||
|
||||
volumes:
|
||||
- $HOME/.config/aprsd:/config
|
||||
|
||||
To install plugins at container start time, pass in a list of
|
||||
comma-separated list of plugins on PyPI using the `APRSD_PLUGINS`
|
||||
environment variable in the `docker-compose.yml` file. Note that version
|
||||
constraints may also be provided. For example:
|
||||
|
||||
environment:
|
||||
- APRSD_PLUGINS=aprsd-slack-plugin>=1.0.2,aprsd-twitter-plugin
|
||||
|
||||
|
||||
### Activity
|
||||
|
||||

|
||||
|
||||
## Star History
|
||||
|
||||
[](https://star-history.com/#craigerl/aprsd&Date)
|
502
README.rst
@ -1,502 +0,0 @@
|
||||
===============================================
|
||||
APRSD - Ham radio APRS-IS Message plugin server
|
||||
===============================================
|
||||
|
||||
KM6LYW and WB4BOR
|
||||
____________________
|
||||
|
||||
|pypi| |pytest| |versions| |slack| |issues| |commit| |imports| |down|
|
||||
|
||||
|
||||
`APRSD <http://github.com/craigerl/aprsd>`_ is a Ham radio `APRS <http://aprs.org>`_ message command gateway built on python.
|
||||
|
||||
|
||||
Table of Contents
|
||||
=================
|
||||
|
||||
1. `What is APRSD <#what-is-aprsd>`_
|
||||
2. `APRSD Overview Diagram <#aprsd-overview-diagram>`_
|
||||
3. `Typical Use Case <#typical-use-case>`_
|
||||
4. `Installation <#installation>`_
|
||||
5. `Example Usage <#example-usage>`_
|
||||
6. `Help <#help>`_
|
||||
7. `Commands <#commands>`_
|
||||
- `Configuration <#configuration>`_
|
||||
- `Server <#server>`_
|
||||
- `Current List of Built-in Plugins <#current-list-of-built-in-plugins>`_
|
||||
- `Pypi.org APRSD Installable Plugin Packages <#pypiorg-aprsd-installable-plugin-packages>`_
|
||||
- `🐍 APRSD Installed 3rd Party Plugins <#aprsd-installed-3rd-party-plugins>`_
|
||||
- `Send Message <#send-message>`_
|
||||
- `Send Email (Radio to SMTP Server) <#send-email-radio-to-smtp-server>`_
|
||||
- `Receive Email (IMAP Server to Radio) <#receive-email-imap-server-to-radio>`_
|
||||
- `Location <#location>`_
|
||||
- `Web Admin Interface <#web-admin-interface>`_
|
||||
8. `Development <#development>`_
|
||||
- `Building Your Own APRSD Plugins <#building-your-own-aprsd-plugins>`_
|
||||
9. `Workflow <#workflow>`_
|
||||
10. `Release <#release>`_
|
||||
11. `Docker Container <#docker-container>`_
|
||||
- `Building <#building-1>`_
|
||||
- `Official Build <#official-build>`_
|
||||
- `Development Build <#development-build>`_
|
||||
- `Running the Container <#running-the-container>`_
|
||||
|
||||
|
||||
What is APRSD
|
||||
=============
|
||||
APRSD is a python application for interacting with the APRS network and providing
|
||||
APRS services for HAM radio operators.
|
||||
|
||||
APRSD currently has 4 main commands to use.
|
||||
* server - Connect to APRS and listen/respond to APRS messages
|
||||
* webchat - web based chat program over APRS
|
||||
* send-message - Send a message to a callsign via APRS_IS.
|
||||
* listen - Listen to packets on the APRS-IS Network based on FILTER.
|
||||
|
||||
Each of those commands can connect to the APRS-IS network if internet connectivity
|
||||
is available. If internet is not available, then APRS can be configured to talk
|
||||
to a TCP KISS TNC for radio connectivity.
|
||||
|
||||
Please `read the docs`_ to learn more!
|
||||
|
||||
APRSD Overview Diagram
|
||||
======================
|
||||
|
||||
.. image:: https://raw.githubusercontent.com/craigerl/aprsd/master/docs/_static/aprsd_overview.svg?sanitize=true
|
||||
|
||||
Typical use case
|
||||
================
|
||||
|
||||
APRSD's typical use case is that of providing an APRS wide service to all HAM
|
||||
radio operators. For example the callsign 'REPEAT' on the APRS network is actually
|
||||
an instance of APRSD that can provide a list of HAM repeaters in the area of the
|
||||
callsign that sent the message.
|
||||
|
||||
|
||||
Ham radio operator using an APRS enabled HAM radio sends a message to check
|
||||
the weather. An APRS message is sent, and then picked up by APRSD. The
|
||||
APRS packet is decoded, and the message is sent through the list of plugins
|
||||
for processing. For example, the WeatherPlugin picks up the message, fetches the weather
|
||||
for the area around the user who sent the request, and then responds with
|
||||
the weather conditions in that area. Also includes a watch list of HAM
|
||||
callsigns to look out for. The watch list can notify you when a HAM callsign
|
||||
in the list is seen and now available to message on the APRS network.
|
||||
|
||||
|
||||
|
||||
Installation
|
||||
=============
|
||||
|
||||
To install ``aprsd``, use Pip:
|
||||
|
||||
``pip install aprsd``
|
||||
|
||||
Example usage
|
||||
==============
|
||||
|
||||
``aprsd -h``
|
||||
|
||||
Help
|
||||
====
|
||||
::
|
||||
|
||||
|
||||
└─> aprsd -h
|
||||
Usage: aprsd [OPTIONS] COMMAND [ARGS]...
|
||||
|
||||
Options:
|
||||
--version Show the version and exit.
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
Commands:
|
||||
check-version Check this version against the latest in pypi.org.
|
||||
completion Show the shell completion code
|
||||
dev Development type subcommands
|
||||
fetch-stats Fetch stats from a APRSD admin web interface.
|
||||
healthcheck Check the health of the running aprsd server.
|
||||
list-extensions List the built in plugins available to APRSD.
|
||||
list-plugins List the built in plugins available to APRSD.
|
||||
listen Listen to packets on the APRS-IS Network based on FILTER.
|
||||
sample-config Generate a sample Config file from aprsd and all...
|
||||
send-message Send a message to a callsign via APRS_IS.
|
||||
server Start the aprsd server gateway process.
|
||||
version Show the APRSD version.
|
||||
webchat Web based HAM Radio chat program!
|
||||
|
||||
|
||||
Commands
|
||||
========
|
||||
|
||||
Configuration
|
||||
=============
|
||||
This command outputs a sample config yml formatted block that you can edit
|
||||
and use to pass in to ``aprsd`` with ``-c``. By default aprsd looks in ``~/.config/aprsd/aprsd.yml``
|
||||
|
||||
``aprsd sample-config``
|
||||
|
||||
::
|
||||
|
||||
└─> aprsd sample-config
|
||||
...
|
||||
|
||||
server
|
||||
======
|
||||
|
||||
This is the main server command that will listen to APRS-IS servers and
|
||||
look for incomming commands to the callsign configured in the config file
|
||||
|
||||
::
|
||||
|
||||
└─[$] > aprsd server --help
|
||||
Usage: aprsd server [OPTIONS]
|
||||
|
||||
Start the aprsd server gateway process.
|
||||
|
||||
Options:
|
||||
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
|
||||
The log level to use for aprsd.log
|
||||
[default: INFO]
|
||||
-c, --config TEXT The aprsd config file to use for options.
|
||||
[default:
|
||||
/Users/i530566/.config/aprsd/aprsd.yml]
|
||||
--quiet Don't log to stdout
|
||||
-f, --flush Flush out all old aged messages on disk.
|
||||
[default: False]
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
└─> aprsd server
|
||||
Load config
|
||||
12/07/2021 03:16:17 PM MainThread INFO APRSD is up to date server.py:51
|
||||
12/07/2021 03:16:17 PM MainThread INFO APRSD Started version: 2.5.6 server.py:52
|
||||
12/07/2021 03:16:17 PM MainThread INFO Using CONFIG values: server.py:55
|
||||
12/07/2021 03:16:17 PM MainThread INFO ham.callsign = WB4BOR server.py:60
|
||||
12/07/2021 03:16:17 PM MainThread INFO aprs.login = WB4BOR-12 server.py:60
|
||||
12/07/2021 03:16:17 PM MainThread INFO aprs.password = XXXXXXXXXXXXXXXXXXX server.py:58
|
||||
12/07/2021 03:16:17 PM MainThread INFO aprs.host = noam.aprs2.net server.py:60
|
||||
12/07/2021 03:16:17 PM MainThread INFO aprs.port = 14580 server.py:60
|
||||
12/07/2021 03:16:17 PM MainThread INFO aprs.logfile = /tmp/aprsd.log server.py:60
|
||||
|
||||
|
||||
Current list of built-in plugins
|
||||
--------------------------------
|
||||
::
|
||||
|
||||
└─> aprsd list-plugins
|
||||
🐍 APRSD Built-in Plugins 🐍
|
||||
┏━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ Plugin Name ┃ Info ┃ Type ┃ Plugin Path ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
|
||||
│ AVWXWeatherPlugin │ AVWX weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.AVWXWeatherPlugin │
|
||||
│ EmailPlugin │ Send and Receive email │ RegexCommand │ aprsd.plugins.email.EmailPlugin │
|
||||
│ FortunePlugin │ Give me a fortune │ RegexCommand │ aprsd.plugins.fortune.FortunePlugin │
|
||||
│ LocationPlugin │ Where in the world is a CALLSIGN's last GPS beacon? │ RegexCommand │ aprsd.plugins.location.LocationPlugin │
|
||||
│ NotifySeenPlugin │ Notify me when a CALLSIGN is recently seen on APRS-IS │ WatchList │ aprsd.plugins.notify.NotifySeenPlugin │
|
||||
│ OWMWeatherPlugin │ OpenWeatherMap weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.OWMWeatherPlugin │
|
||||
│ PingPlugin │ reply with a Pong! │ RegexCommand │ aprsd.plugins.ping.PingPlugin │
|
||||
│ QueryPlugin │ APRSD Owner command to query messages in the MsgTrack │ RegexCommand │ aprsd.plugins.query.QueryPlugin │
|
||||
│ TimeOWMPlugin │ Current time of GPS beacon's timezone. Uses OpenWeatherMap │ RegexCommand │ aprsd.plugins.time.TimeOWMPlugin │
|
||||
│ TimePlugin │ What is the current local time. │ RegexCommand │ aprsd.plugins.time.TimePlugin │
|
||||
│ USMetarPlugin │ USA only METAR of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USMetarPlugin │
|
||||
│ USWeatherPlugin │ Provide USA only weather of GPS Beacon location │ RegexCommand │ aprsd.plugins.weather.USWeatherPlugin │
|
||||
│ VersionPlugin │ What is the APRSD Version │ RegexCommand │ aprsd.plugins.version.VersionPlugin │
|
||||
└───────────────────┴────────────────────────────────────────────────────────────┴──────────────┴─────────────────────────────────────────┘
|
||||
|
||||
|
||||
Pypi.org APRSD Installable Plugin Packages
|
||||
|
||||
Install any of the following plugins with 'pip install <Plugin Package Name>'
|
||||
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━┓
|
||||
┃ Plugin Package Name ┃ Description ┃ Version ┃ Released ┃ Installed? ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━┩
|
||||
│ 📂 aprsd-stock-plugin │ Ham Radio APRSD Plugin for fetching stock quotes │ 0.1.3 │ Dec 2, 2022 │ No │
|
||||
│ 📂 aprsd-sentry-plugin │ Ham radio APRSD plugin that does.... │ 0.1.2 │ Dec 2, 2022 │ No │
|
||||
│ 📂 aprsd-timeopencage-plugin │ APRSD plugin for fetching time based on GPS location │ 0.1.0 │ Dec 2, 2022 │ No │
|
||||
│ 📂 aprsd-weewx-plugin │ HAM Radio APRSD that reports weather from a weewx weather station. │ 0.1.4 │ Dec 7, 2021 │ Yes │
|
||||
│ 📂 aprsd-repeat-plugins │ APRSD Plugins for the REPEAT service │ 1.0.12 │ Dec 2, 2022 │ No │
|
||||
│ 📂 aprsd-telegram-plugin │ Ham Radio APRS APRSD plugin for Telegram IM service │ 0.1.3 │ Dec 2, 2022 │ No │
|
||||
│ 📂 aprsd-twitter-plugin │ Python APRSD plugin to send tweets │ 0.3.0 │ Dec 7, 2021 │ No │
|
||||
│ 📂 aprsd-slack-plugin │ Amateur radio APRS daemon which listens for messages and responds │ 1.0.5 │ Dec 18, 2022 │ No │
|
||||
└──────────────────────────────┴────────────────────────────────────────────────────────────────────┴─────────┴──────────────┴────────────┘
|
||||
|
||||
|
||||
🐍 APRSD Installed 3rd party Plugins 🐍
|
||||
┏━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
|
||||
┃ Package Name ┃ Plugin Name ┃ Version ┃ Type ┃ Plugin Path ┃
|
||||
┡━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
|
||||
│ aprsd-weewx-plugin │ WeewxMQTTPlugin │ 1.0 │ RegexCommand │ aprsd_weewx_plugin.weewx.WeewxMQTTPlugin │
|
||||
└────────────────────┴─────────────────┴─────────┴──────────────┴──────────────────────────────────────────┘
|
||||
|
||||
|
||||
|
||||
send-message
|
||||
============
|
||||
|
||||
This command is typically used for development to send another aprsd instance
|
||||
test messages
|
||||
|
||||
::
|
||||
|
||||
└─[$] > aprsd send-message -h
|
||||
Usage: aprsd send-message [OPTIONS] TOCALLSIGN COMMAND...
|
||||
|
||||
Send a message to a callsign via APRS_IS.
|
||||
|
||||
Options:
|
||||
--loglevel [CRITICAL|ERROR|WARNING|INFO|DEBUG]
|
||||
The log level to use for aprsd.log
|
||||
[default: INFO]
|
||||
-c, --config TEXT The aprsd config file to use for options.
|
||||
[default:
|
||||
/Users/i530566/.config/aprsd/aprsd.yml]
|
||||
--quiet Don't log to stdout
|
||||
--aprs-login TEXT What callsign to send the message from.
|
||||
[env var: APRS_LOGIN]
|
||||
--aprs-password TEXT the APRS-IS password for APRS_LOGIN [env
|
||||
var: APRS_PASSWORD]
|
||||
-n, --no-ack Don't wait for an ack, just sent it to APRS-
|
||||
IS and bail. [default: False]
|
||||
-w, --wait-response Wait for a response to the message?
|
||||
[default: False]
|
||||
--raw TEXT Send a raw message. Implies --no-ack
|
||||
-h, --help Show this message and exit.
|
||||
|
||||
|
||||
SEND EMAIL (radio to smtp server)
|
||||
=================================
|
||||
|
||||
::
|
||||
|
||||
Received message______________
|
||||
Raw : KM6XXX>APY400,WIDE1-1,qAO,KM6XXX-1::KM6XXX-9 :-user@host.com test new shortcuts global, radio to pc{29
|
||||
From : KM6XXX
|
||||
Message : -user@host.com test new shortcuts global, radio to pc
|
||||
Msg number : 29
|
||||
|
||||
Sending Email_________________
|
||||
To : user@host.com
|
||||
Subject : KM6XXX
|
||||
Body : test new shortcuts global, radio to pc
|
||||
|
||||
Sending ack __________________ Tx(3)
|
||||
Raw : KM6XXX-9>APRS::KM6XXX :ack29
|
||||
To : KM6XXX
|
||||
Ack number : 29
|
||||
|
||||
|
||||
RECEIVE EMAIL (imap server to radio)
|
||||
====================================
|
||||
|
||||
::
|
||||
|
||||
Sending message_______________ 6(Tx3)
|
||||
Raw : KM6XXX-9>APRS::KM6XXX :-somebody@gmail.com email from internet to radio{6
|
||||
To : KM6XXX
|
||||
Message : -somebody@gmail.com email from internet to radio
|
||||
|
||||
Received message______________
|
||||
Raw : KM6XXX>APY400,WIDE1-1,qAO,KM6XXX-1::KM6XXX-9 :ack6
|
||||
From : KM6XXX
|
||||
Message : ack6
|
||||
Msg number : 0
|
||||
|
||||
|
||||
LOCATION
|
||||
========
|
||||
|
||||
::
|
||||
|
||||
Received Message _______________
|
||||
Raw : KM6XXX-6>APRS,TCPIP*,qAC,T2CAEAST::KM6XXX-14:location{2
|
||||
From : KM6XXX-6
|
||||
Message : location
|
||||
Msg number : 2
|
||||
Received Message _______________ Complete
|
||||
|
||||
Sending Message _______________
|
||||
Raw : KM6XXX-14>APRS::KM6XXX-6 :KM6XXX-6: 8 Miles E Auburn CA 0' 0,-120.93584 1873.7h ago{2
|
||||
To : KM6XXX-6
|
||||
Message : KM6XXX-6: 8 Miles E Auburn CA 0' 0,-120.93584 1873.7h ago
|
||||
Msg number : 2
|
||||
Sending Message _______________ Complete
|
||||
|
||||
Sending ack _______________
|
||||
Raw : KM6XXX-14>APRS::KM6XXX-6 :ack2
|
||||
To : KM6XXX-6
|
||||
Ack : 2
|
||||
Sending ack _______________ Complete
|
||||
|
||||
AND... ping, fortune, time.....
|
||||
|
||||
|
||||
Web Admin Interface
|
||||
===================
|
||||
APRSD has a web admin interface that allows you to view the status of the running APRSD server instance.
|
||||
The web admin interface shows graphs of packet counts, packet types, number of threads running, the latest
|
||||
packets sent and received, and the status of each of the plugins that are loaded. You can also view the logfile
|
||||
and view the raw APRSD configuration file.
|
||||
|
||||
To start the web admin interface, You have to install gunicorn in your virtualenv that already has aprsd installed.
|
||||
|
||||
::
|
||||
|
||||
source <path to APRSD's virtualenv>/bin/activate
|
||||
aprsd admin --loglevel INFO
|
||||
|
||||
The web admin interface will be running on port 8080 on the local machine. http://localhost:8080
|
||||
|
||||
|
||||
Development
|
||||
===========
|
||||
|
||||
* ``git clone git@github.com:craigerl/aprsd.git``
|
||||
* ``cd aprsd``
|
||||
* ``make``
|
||||
|
||||
Workflow
|
||||
--------
|
||||
|
||||
While working aprsd, The workflow is as follows:
|
||||
|
||||
* Checkout a new branch to work on by running
|
||||
|
||||
``git checkout -b mybranch``
|
||||
|
||||
* Make your changes to the code
|
||||
* Run Tox with the following options:
|
||||
|
||||
- ``tox -epep8``
|
||||
- ``tox -efmt``
|
||||
- ``tox -p``
|
||||
|
||||
* Commit your changes. This will run the pre-commit hooks which does checks too
|
||||
|
||||
``git commit``
|
||||
|
||||
* Once you are done with all of your commits, then push up the branch to
|
||||
github with:
|
||||
|
||||
``git push -u origin mybranch``
|
||||
|
||||
* Create a pull request from your branch so github tests can run and we can do
|
||||
a code review.
|
||||
|
||||
|
||||
Release
|
||||
-------
|
||||
|
||||
To do release to pypi:
|
||||
|
||||
* Tag release with:
|
||||
|
||||
``git tag -v1.XX -m "New release"``
|
||||
|
||||
* Push release tag:
|
||||
|
||||
``git push origin master --tags``
|
||||
|
||||
* Do a test build and verify build is valid by running:
|
||||
|
||||
``make build``
|
||||
|
||||
* Once twine is happy, upload release to pypi:
|
||||
|
||||
``make upload``
|
||||
|
||||
|
||||
Building your own APRSD plugins
|
||||
-------------------------------
|
||||
|
||||
APRSD plugins are the mechanism by which APRSD can respond to APRS Messages. The plugins are loaded at server startup
|
||||
and can also be loaded at listen startup. When a packet is received by APRSD, it is passed to each of the plugins
|
||||
in the order they were registered in the config file. The plugins can then decide what to do with the packet.
|
||||
When a plugin is called, it is passed a APRSD Packet object. The plugin can then do something with the packet and
|
||||
return a reply message if desired. If a plugin does not want to reply to the packet, it can just return None.
|
||||
When a plugin does return a reply message, APRSD will send the reply message to the appropriate destination.
|
||||
|
||||
For example, when a 'ping' message is received, the PingPlugin will return a reply message of 'pong'. When APRSD
|
||||
receives the 'pong' message, it will be sent back to the original caller of the ping message.
|
||||
|
||||
APRSD plugins are simply python packages that can be installed from pypi.org. They are installed into the
|
||||
aprsd virtualenv and can be imported by APRSD at runtime. The plugins are registered in the config file and loaded
|
||||
at startup of the aprsd server command or the aprsd listen command.
|
||||
|
||||
Overview
|
||||
--------
|
||||
You can build your own plugins by following the instructions in the `Building your own APRSD plugins`_ section.
|
||||
|
||||
Plugins are called by APRSD when packe
|
||||
|
||||
Docker Container
|
||||
================
|
||||
|
||||
Building
|
||||
========
|
||||
|
||||
There are 2 versions of the container Dockerfile that can be used.
|
||||
The main Dockerfile, which is for building the official release container
|
||||
based off of the pip install version of aprsd and the Dockerfile-dev,
|
||||
which is used for building a container based off of a git branch of
|
||||
the repo.
|
||||
|
||||
Official Build
|
||||
==============
|
||||
|
||||
``docker build -t hemna6969/aprsd:latest .``
|
||||
|
||||
Development Build
|
||||
=================
|
||||
|
||||
``docker build -t hemna6969/aprsd:latest -f Dockerfile-dev .``
|
||||
|
||||
|
||||
Running the container
|
||||
=====================
|
||||
|
||||
There is a ``docker-compose.yml`` file in the ``docker/`` directory
|
||||
that can be used to run your container. To provide the container
|
||||
an ``aprsd.conf`` configuration file, change your
|
||||
``docker-compose.yml`` as shown below:
|
||||
|
||||
::
|
||||
|
||||
volumes:
|
||||
- $HOME/.config/aprsd:/config
|
||||
|
||||
To install plugins at container start time, pass in a list of
|
||||
comma-separated list of plugins on PyPI using the ``APRSD_PLUGINS``
|
||||
environment variable in the ``docker-compose.yml`` file. Note that
|
||||
version constraints may also be provided. For example:
|
||||
|
||||
::
|
||||
|
||||
environment:
|
||||
- APRSD_PLUGINS=aprsd-slack-plugin>=1.0.2,aprsd-twitter-plugin
|
||||
|
||||
|
||||
.. badges
|
||||
|
||||
.. |pypi| image:: https://badge.fury.io/py/aprsd.svg
|
||||
:target: https://badge.fury.io/py/aprsd
|
||||
|
||||
.. |pytest| image:: https://github.com/craigerl/aprsd/workflows/python/badge.svg
|
||||
:target: https://github.com/craigerl/aprsd/actions
|
||||
|
||||
.. |versions| image:: https://img.shields.io/pypi/pyversions/aprsd.svg
|
||||
:target: https://pypi.org/pypi/aprsd
|
||||
|
||||
.. |slack| image:: https://img.shields.io/badge/slack-@hemna/aprsd-blue.svg?logo=slack
|
||||
:target: https://hemna.slack.com/app_redirect?channel=C01KQSCP5RP
|
||||
|
||||
.. |imports| image:: https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336
|
||||
:target: https://timothycrosley.github.io/isort/
|
||||
|
||||
.. |issues| image:: https://img.shields.io/github/issues/craigerl/aprsd
|
||||
|
||||
.. |commit| image:: https://img.shields.io/github/last-commit/craigerl/aprsd
|
||||
|
||||
.. |down| image:: https://static.pepy.tech/personalized-badge/aprsd?period=month&units=international_system&left_color=black&right_color=orange&left_text=Downloads
|
||||
:target: https://pepy.tech/project/aprsd
|
||||
|
||||
.. links
|
||||
.. _read the docs:
|
||||
https://aprsd.readthedocs.io
|
@ -1,7 +1,7 @@
|
||||
from functools import update_wrapper
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import typing as t
|
||||
from functools import update_wrapper
|
||||
from pathlib import Path
|
||||
|
||||
import click
|
||||
from oslo_config import cfg
|
||||
@ -11,7 +11,6 @@ from aprsd import conf # noqa: F401
|
||||
from aprsd.log import log
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
home = str(Path.home())
|
||||
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
||||
@ -58,6 +57,7 @@ class AliasedGroup(click.Group):
|
||||
calling into :meth:`add_command`.
|
||||
Copied from `click` and extended for `aliases`.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
aliases = kwargs.pop("aliases", [])
|
||||
cmd = click.decorators.command(*args, **kwargs)(f)
|
||||
@ -65,6 +65,7 @@ class AliasedGroup(click.Group):
|
||||
for alias in aliases:
|
||||
self.add_command(cmd, name=alias)
|
||||
return cmd
|
||||
|
||||
return decorator
|
||||
|
||||
def group(self, *args, **kwargs):
|
||||
@ -74,6 +75,7 @@ class AliasedGroup(click.Group):
|
||||
calling into :meth:`add_command`.
|
||||
Copied from `click` and extended for `aliases`.
|
||||
"""
|
||||
|
||||
def decorator(f):
|
||||
aliases = kwargs.pop("aliases", [])
|
||||
cmd = click.decorators.group(*args, **kwargs)(f)
|
||||
@ -81,6 +83,7 @@ class AliasedGroup(click.Group):
|
||||
for alias in aliases:
|
||||
self.add_command(cmd, name=alias)
|
||||
return cmd
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@ -89,6 +92,7 @@ def add_options(options):
|
||||
for option in reversed(options):
|
||||
func = option(func)
|
||||
return func
|
||||
|
||||
return _add_options
|
||||
|
||||
|
||||
@ -103,7 +107,9 @@ def process_standard_options(f: F) -> F:
|
||||
default_config_files = None
|
||||
try:
|
||||
CONF(
|
||||
[], project="aprsd", version=aprsd.__version__,
|
||||
[],
|
||||
project="aprsd",
|
||||
version=aprsd.__version__,
|
||||
default_config_files=default_config_files,
|
||||
)
|
||||
except cfg.ConfigFilesNotFoundError:
|
||||
@ -119,7 +125,7 @@ def process_standard_options(f: F) -> F:
|
||||
trace.setup_tracing(["method", "api"])
|
||||
|
||||
if not config_file_found:
|
||||
LOG = logging.getLogger("APRSD") # noqa: N806
|
||||
LOG = logging.getLogger("APRSD") # noqa: N806
|
||||
LOG.error("No config file found!! run 'aprsd sample-config'")
|
||||
|
||||
del kwargs["loglevel"]
|
||||
@ -132,6 +138,7 @@ def process_standard_options(f: F) -> F:
|
||||
|
||||
def process_standard_options_no_config(f: F) -> F:
|
||||
"""Use this as a decorator when config isn't needed."""
|
||||
|
||||
def new_func(*args, **kwargs):
|
||||
ctx = args[0]
|
||||
ctx.ensure_object(dict)
|
||||
|
@ -2,7 +2,9 @@ import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
import timeago
|
||||
from aprslib.exceptions import LoginError
|
||||
from loguru import logger
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import client, exception
|
||||
@ -10,14 +12,14 @@ from aprsd.client import base
|
||||
from aprsd.client.drivers import aprsis
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
LOGU = logger
|
||||
|
||||
|
||||
class APRSISClient(base.APRSClient):
|
||||
|
||||
_client = None
|
||||
_checks = False
|
||||
|
||||
def __init__(self):
|
||||
max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
|
||||
@ -45,6 +47,20 @@ class APRSISClient(base.APRSClient):
|
||||
|
||||
return stats
|
||||
|
||||
def keepalive_check(self):
|
||||
# Don't check the first time through.
|
||||
if not self.is_alive() and self._checks:
|
||||
LOG.warning("Resetting client. It's not alive.")
|
||||
self.reset()
|
||||
self._checks = True
|
||||
|
||||
def keepalive_log(self):
|
||||
if ka := self._client.aprsd_keepalive:
|
||||
keepalive = timeago.format(ka)
|
||||
else:
|
||||
keepalive = "N/A"
|
||||
LOGU.opt(colors=True).info(f"<green>Client keepalive {keepalive}</green>")
|
||||
|
||||
@staticmethod
|
||||
def is_enabled():
|
||||
# Defaults to True if the enabled flag is non existent
|
||||
@ -81,13 +97,13 @@ class APRSISClient(base.APRSClient):
|
||||
if delta > self.max_delta:
|
||||
LOG.error(f"Connection is stale, last heard {delta} ago.")
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_alive(self):
|
||||
if self._client:
|
||||
return self._client.is_alive() and not self._is_stale_connection()
|
||||
else:
|
||||
if not self._client:
|
||||
LOG.warning(f"APRS_CLIENT {self._client} alive? NO!!!")
|
||||
return False
|
||||
return self._client.is_alive() and not self._is_stale_connection()
|
||||
|
||||
def close(self):
|
||||
if self._client:
|
||||
@ -117,8 +133,12 @@ class APRSISClient(base.APRSClient):
|
||||
if retry_count >= retries:
|
||||
break
|
||||
try:
|
||||
LOG.info(f"Creating aprslib client({host}:{port}) and logging in {user}.")
|
||||
aprs_client = aprsis.Aprsdis(user, passwd=password, host=host, port=port)
|
||||
LOG.info(
|
||||
f"Creating aprslib client({host}:{port}) and logging in {user}."
|
||||
)
|
||||
aprs_client = aprsis.Aprsdis(
|
||||
user, passwd=password, host=host, port=port
|
||||
)
|
||||
# Force the log to be the same
|
||||
aprs_client.logger = LOG
|
||||
aprs_client.connect()
|
||||
@ -149,8 +169,10 @@ class APRSISClient(base.APRSClient):
|
||||
if self._client:
|
||||
try:
|
||||
self._client.consumer(
|
||||
callback, blocking=blocking,
|
||||
immortal=immortal, raw=raw,
|
||||
callback,
|
||||
blocking=blocking,
|
||||
immortal=immortal,
|
||||
raw=raw,
|
||||
)
|
||||
except Exception as e:
|
||||
LOG.error(e)
|
||||
|
@ -2,11 +2,11 @@ import abc
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.packets import core
|
||||
|
||||
from aprsd.utils import keepalive_collector
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -30,6 +30,7 @@ class APRSClient:
|
||||
"""This magic turns this into a singleton."""
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
keepalive_collector.KeepAliveCollector().register(cls)
|
||||
# Put any initialization here.
|
||||
cls._instance._create_client()
|
||||
return cls._instance
|
||||
@ -42,6 +43,16 @@ class APRSClient:
|
||||
dict: Statistics about the connection and packet handling
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def keepalive_check(self) -> None:
|
||||
"""Called during keepalive run to check status."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
def keepalive_log(self) -> None:
|
||||
"""Log any keepalive information."""
|
||||
...
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
return self.connected
|
||||
|
@ -4,17 +4,20 @@ import select
|
||||
import threading
|
||||
|
||||
import aprslib
|
||||
import wrapt
|
||||
from aprslib import is_py3
|
||||
from aprslib.exceptions import (
|
||||
ConnectionDrop, ConnectionError, GenericError, LoginError, ParseError,
|
||||
ConnectionDrop,
|
||||
ConnectionError,
|
||||
GenericError,
|
||||
LoginError,
|
||||
ParseError,
|
||||
UnknownFormat,
|
||||
)
|
||||
import wrapt
|
||||
|
||||
import aprsd
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
|
@ -3,20 +3,19 @@ import threading
|
||||
import time
|
||||
|
||||
import aprslib
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import conf # noqa
|
||||
from aprsd.packets import core
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
LOG = logging.getLogger('APRSD')
|
||||
|
||||
|
||||
class APRSDFakeClient(metaclass=trace.TraceWrapperMetaclass):
|
||||
'''Fake client for testing.'''
|
||||
"""Fake client for testing."""
|
||||
|
||||
# flag to tell us to stop
|
||||
thread_stop = False
|
||||
@ -25,12 +24,12 @@ class APRSDFakeClient(metaclass=trace.TraceWrapperMetaclass):
|
||||
path = []
|
||||
|
||||
def __init__(self):
|
||||
LOG.info("Starting APRSDFakeClient client.")
|
||||
self.path = ["WIDE1-1", "WIDE2-1"]
|
||||
LOG.info('Starting APRSDFakeClient client.')
|
||||
self.path = ['WIDE1-1', 'WIDE2-1']
|
||||
|
||||
def stop(self):
|
||||
self.thread_stop = True
|
||||
LOG.info("Shutdown APRSDFakeClient client.")
|
||||
LOG.info('Shutdown APRSDFakeClient client.')
|
||||
|
||||
def is_alive(self):
|
||||
"""If the connection is alive or not."""
|
||||
@ -39,35 +38,31 @@ class APRSDFakeClient(metaclass=trace.TraceWrapperMetaclass):
|
||||
@wrapt.synchronized(lock)
|
||||
def send(self, packet: core.Packet):
|
||||
"""Send an APRS Message object."""
|
||||
LOG.info(f"Sending packet: {packet}")
|
||||
LOG.info(f'Sending packet: {packet}')
|
||||
payload = None
|
||||
if isinstance(packet, core.Packet):
|
||||
packet.prepare()
|
||||
payload = packet.payload.encode("US-ASCII")
|
||||
if packet.path:
|
||||
packet.path
|
||||
else:
|
||||
self.path
|
||||
payload = packet.payload.encode('US-ASCII')
|
||||
else:
|
||||
msg_payload = f"{packet.raw}{{{str(packet.msgNo)}"
|
||||
msg_payload = f'{packet.raw}{{{str(packet.msgNo)}'
|
||||
payload = (
|
||||
":{:<9}:{}".format(
|
||||
':{:<9}:{}'.format(
|
||||
packet.to_call,
|
||||
msg_payload,
|
||||
)
|
||||
).encode("US-ASCII")
|
||||
).encode('US-ASCII')
|
||||
|
||||
LOG.debug(
|
||||
f"FAKE::Send '{payload}' TO '{packet.to_call}' From "
|
||||
f"'{packet.from_call}' with PATH \"{self.path}\"",
|
||||
f'\'{packet.from_call}\' with PATH "{self.path}"',
|
||||
)
|
||||
|
||||
def consumer(self, callback, blocking=False, immortal=False, raw=False):
|
||||
LOG.debug("Start non blocking FAKE consumer")
|
||||
LOG.debug('Start non blocking FAKE consumer')
|
||||
# Generate packets here?
|
||||
raw = "GTOWN>APDW16,WIDE1-1,WIDE2-1:}KM6LYW-9>APZ100,TCPIP,GTOWN*::KM6LYW :KM6LYW: 19 Miles SW"
|
||||
raw = 'GTOWN>APDW16,WIDE1-1,WIDE2-1:}KM6LYW-9>APZ100,TCPIP,GTOWN*::KM6LYW :KM6LYW: 19 Miles SW'
|
||||
pkt_raw = aprslib.parse(raw)
|
||||
pkt = core.factory(pkt_raw)
|
||||
callback(packet=pkt)
|
||||
LOG.debug(f"END blocking FAKE consumer {self}")
|
||||
LOG.debug(f'END blocking FAKE consumer {self}')
|
||||
time.sleep(8)
|
||||
|
@ -4,13 +4,11 @@ from typing import Callable, Protocol, runtime_checkable
|
||||
from aprsd import exception
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class Client(Protocol):
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
|
@ -7,13 +7,11 @@ from aprsd.client import base
|
||||
from aprsd.client.drivers import fake as fake_driver
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class APRSDFakeClient(base.APRSClient, metaclass=trace.TraceWrapperMetaclass):
|
||||
|
||||
def stats(self, serializable=False) -> dict:
|
||||
return {
|
||||
"transport": "Fake",
|
||||
|
@ -2,6 +2,8 @@ import datetime
|
||||
import logging
|
||||
|
||||
import aprslib
|
||||
import timeago
|
||||
from loguru import logger
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import client, exception
|
||||
@ -9,13 +11,12 @@ from aprsd.client import base
|
||||
from aprsd.client.drivers import kiss
|
||||
from aprsd.packets import core
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
LOGU = logger
|
||||
|
||||
|
||||
class KISSClient(base.APRSClient):
|
||||
|
||||
_client = None
|
||||
keepalive = datetime.datetime.now()
|
||||
|
||||
@ -79,6 +80,20 @@ class KISSClient(base.APRSClient):
|
||||
if self._client:
|
||||
self._client.stop()
|
||||
|
||||
def keepalive_check(self):
|
||||
# Don't check the first time through.
|
||||
if not self.is_alive() and self._checks:
|
||||
LOG.warning("Resetting client. It's not alive.")
|
||||
self.reset()
|
||||
self._checks = True
|
||||
|
||||
def keepalive_log(self):
|
||||
if ka := self._client.aprsd_keepalive:
|
||||
keepalive = timeago.format(ka)
|
||||
else:
|
||||
keepalive = "N/A"
|
||||
LOGU.opt(colors=True).info(f"<green>Client keepalive {keepalive}</green>")
|
||||
|
||||
@staticmethod
|
||||
def transport():
|
||||
if CONF.kiss_serial.enabled:
|
||||
|
@ -1,18 +1,16 @@
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import client
|
||||
from aprsd.utils import singleton
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
@singleton
|
||||
class APRSClientStats:
|
||||
|
||||
lock = threading.Lock()
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
|
@ -1,57 +0,0 @@
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
|
||||
import click
|
||||
from oslo_config import cfg
|
||||
import socketio
|
||||
|
||||
import aprsd
|
||||
from aprsd import cli_helper
|
||||
from aprsd import main as aprsd_main
|
||||
from aprsd import utils
|
||||
from aprsd.main import cli
|
||||
|
||||
|
||||
os.environ["APRSD_ADMIN_COMMAND"] = "1"
|
||||
# this import has to happen AFTER we set the
|
||||
# above environment variable, so that the code
|
||||
# inside the wsgi.py has the value
|
||||
from aprsd import wsgi as aprsd_wsgi # noqa
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
# main() ###
|
||||
@cli.command()
|
||||
@cli_helper.add_options(cli_helper.common_options)
|
||||
@click.pass_context
|
||||
@cli_helper.process_standard_options
|
||||
def admin(ctx):
|
||||
"""Start the aprsd admin interface."""
|
||||
signal.signal(signal.SIGINT, aprsd_main.signal_handler)
|
||||
signal.signal(signal.SIGTERM, aprsd_main.signal_handler)
|
||||
|
||||
level, msg = utils._check_version()
|
||||
if level:
|
||||
LOG.warning(msg)
|
||||
else:
|
||||
LOG.info(msg)
|
||||
LOG.info(f"APRSD Started version: {aprsd.__version__}")
|
||||
# Dump all the config options now.
|
||||
CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
|
||||
async_mode = "threading"
|
||||
sio = socketio.Server(logger=True, async_mode=async_mode)
|
||||
aprsd_wsgi.app.wsgi_app = socketio.WSGIApp(sio, aprsd_wsgi.app.wsgi_app)
|
||||
aprsd_wsgi.init_app()
|
||||
sio.register_namespace(aprsd_wsgi.LoggingNamespace("/logs"))
|
||||
CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
aprsd_wsgi.app.run(
|
||||
threaded=True,
|
||||
debug=False,
|
||||
port=CONF.admin.web_port,
|
||||
host=CONF.admin.web_ip,
|
||||
)
|
@ -3,12 +3,13 @@ import click.shell_completion
|
||||
|
||||
from aprsd.main import cli
|
||||
|
||||
|
||||
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("shell", type=click.Choice(list(click.shell_completion._available_shells)))
|
||||
@click.argument(
|
||||
"shell", type=click.Choice(list(click.shell_completion._available_shells))
|
||||
)
|
||||
def completion(shell):
|
||||
"""Show the shell completion code"""
|
||||
from click.utils import _detect_program_name
|
||||
@ -17,6 +18,8 @@ def completion(shell):
|
||||
prog_name = _detect_program_name()
|
||||
complete_var = f"_{prog_name}_COMPLETE".replace("-", "_").upper()
|
||||
print(cls(cli, {}, prog_name, complete_var).source())
|
||||
print("# Add the following line to your shell configuration file to have aprsd command line completion")
|
||||
print(
|
||||
"# Add the following line to your shell configuration file to have aprsd command line completion"
|
||||
)
|
||||
print("# but remove the leading '#' character.")
|
||||
print(f"# eval \"$(aprsd completion {shell})\"")
|
||||
print(f'# eval "$(aprsd completion {shell})"')
|
||||
|
@ -9,18 +9,18 @@ import click
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import cli_helper, conf, packets, plugin
|
||||
|
||||
# local imports here
|
||||
from aprsd.client import base
|
||||
from aprsd.main import cli
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
|
||||
LOG = logging.getLogger('APRSD')
|
||||
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
|
||||
|
||||
|
||||
@cli.group(help="Development type subcommands", context_settings=CONTEXT_SETTINGS)
|
||||
@cli.group(help='Development type subcommands', context_settings=CONTEXT_SETTINGS)
|
||||
@click.pass_context
|
||||
def dev(ctx):
|
||||
pass
|
||||
@ -29,37 +29,37 @@ def dev(ctx):
|
||||
@dev.command()
|
||||
@cli_helper.add_options(cli_helper.common_options)
|
||||
@click.option(
|
||||
"--aprs-login",
|
||||
envvar="APRS_LOGIN",
|
||||
'--aprs-login',
|
||||
envvar='APRS_LOGIN',
|
||||
show_envvar=True,
|
||||
help="What callsign to send the message from.",
|
||||
help='What callsign to send the message from.',
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--plugin",
|
||||
"plugin_path",
|
||||
'-p',
|
||||
'--plugin',
|
||||
'plugin_path',
|
||||
show_default=True,
|
||||
default=None,
|
||||
help="The plugin to run. Ex: aprsd.plugins.ping.PingPlugin",
|
||||
help='The plugin to run. Ex: aprsd.plugins.ping.PingPlugin',
|
||||
)
|
||||
@click.option(
|
||||
"-a",
|
||||
"--all",
|
||||
"load_all",
|
||||
'-a',
|
||||
'--all',
|
||||
'load_all',
|
||||
show_default=True,
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Load all the plugins in config?",
|
||||
help='Load all the plugins in config?',
|
||||
)
|
||||
@click.option(
|
||||
"-n",
|
||||
"--num",
|
||||
"number",
|
||||
'-n',
|
||||
'--num',
|
||||
'number',
|
||||
show_default=True,
|
||||
default=1,
|
||||
help="Number of times to call the plugin",
|
||||
help='Number of times to call the plugin',
|
||||
)
|
||||
@click.argument("message", nargs=-1, required=True)
|
||||
@click.argument('message', nargs=-1, required=True)
|
||||
@click.pass_context
|
||||
@cli_helper.process_standard_options
|
||||
def test_plugin(
|
||||
@ -76,7 +76,7 @@ def test_plugin(
|
||||
|
||||
if not aprs_login:
|
||||
if CONF.aprs_network.login == conf.client.DEFAULT_LOGIN:
|
||||
click.echo("Must set --aprs_login or APRS_LOGIN")
|
||||
click.echo('Must set --aprs_login or APRS_LOGIN')
|
||||
ctx.exit(-1)
|
||||
return
|
||||
else:
|
||||
@ -86,16 +86,16 @@ def test_plugin(
|
||||
|
||||
if not plugin_path:
|
||||
click.echo(ctx.get_help())
|
||||
click.echo("")
|
||||
click.echo("Failed to provide -p option to test a plugin")
|
||||
click.echo('')
|
||||
click.echo('Failed to provide -p option to test a plugin')
|
||||
ctx.exit(-1)
|
||||
return
|
||||
|
||||
if type(message) is tuple:
|
||||
message = " ".join(message)
|
||||
message = ' '.join(message)
|
||||
|
||||
if CONF.trace_enabled:
|
||||
trace.setup_tracing(["method", "api"])
|
||||
trace.setup_tracing(['method', 'api'])
|
||||
|
||||
base.APRSClient()
|
||||
|
||||
@ -105,14 +105,15 @@ def test_plugin(
|
||||
obj = pm._create_class(plugin_path, plugin.APRSDPluginBase)
|
||||
if not obj:
|
||||
click.echo(ctx.get_help())
|
||||
click.echo("")
|
||||
click.echo('')
|
||||
ctx.fail(f"Failed to create object from plugin path '{plugin_path}'")
|
||||
ctx.exit()
|
||||
|
||||
# Register the plugin they wanted tested.
|
||||
LOG.info(
|
||||
"Testing plugin {} Version {}".format(
|
||||
obj.__class__, obj.version,
|
||||
'Testing plugin {} Version {}'.format(
|
||||
obj.__class__,
|
||||
obj.version,
|
||||
),
|
||||
)
|
||||
pm.register_msg(obj)
|
||||
@ -125,7 +126,7 @@ def test_plugin(
|
||||
)
|
||||
LOG.info(f"P'{plugin_path}' F'{fromcall}' C'{message}'")
|
||||
|
||||
for x in range(number):
|
||||
for _ in range(number):
|
||||
replies = pm.run(packet)
|
||||
# Plugin might have threads, so lets stop them so we can exit.
|
||||
# obj.stop_threads()
|
||||
@ -146,17 +147,12 @@ def test_plugin(
|
||||
elif isinstance(reply, packets.Packet):
|
||||
# We have a message based object.
|
||||
LOG.info(reply)
|
||||
else:
|
||||
# A plugin can return a null message flag which signals
|
||||
# us that they processed the message correctly, but have
|
||||
# nothing to reply with, so we avoid replying with a
|
||||
# usage string
|
||||
if reply is not packets.NULL_MESSAGE:
|
||||
LOG.info(
|
||||
packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call=fromcall,
|
||||
message_text=reply,
|
||||
),
|
||||
)
|
||||
elif reply is not packets.NULL_MESSAGE:
|
||||
LOG.info(
|
||||
packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call=fromcall,
|
||||
message_text=reply,
|
||||
),
|
||||
)
|
||||
pm.stop()
|
||||
|
@ -2,8 +2,8 @@
|
||||
import logging
|
||||
|
||||
import click
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
from oslo_config import cfg
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
|
||||
@ -13,31 +13,32 @@ from aprsd import cli_helper
|
||||
from aprsd.main import cli
|
||||
from aprsd.threads.stats import StatsStore
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
LOG = logging.getLogger("APRSD")
|
||||
LOG = logging.getLogger('APRSD')
|
||||
CONF = cfg.CONF
|
||||
|
||||
|
||||
@cli.command()
|
||||
@cli_helper.add_options(cli_helper.common_options)
|
||||
@click.option(
|
||||
"--host", type=str,
|
||||
'--host',
|
||||
type=str,
|
||||
default=None,
|
||||
help="IP address of the remote aprsd admin web ui fetch stats from.",
|
||||
help='IP address of the remote aprsd admin web ui fetch stats from.',
|
||||
)
|
||||
@click.option(
|
||||
"--port", type=int,
|
||||
'--port',
|
||||
type=int,
|
||||
default=None,
|
||||
help="Port of the remote aprsd web admin interface to fetch stats from.",
|
||||
help='Port of the remote aprsd web admin interface to fetch stats from.',
|
||||
)
|
||||
@click.pass_context
|
||||
@cli_helper.process_standard_options
|
||||
def fetch_stats(ctx, host, port):
|
||||
"""Fetch stats from a APRSD admin web interface."""
|
||||
console = Console()
|
||||
console.print(f"APRSD Fetch-Stats started version: {aprsd.__version__}")
|
||||
console.print(f'APRSD Fetch-Stats started version: {aprsd.__version__}')
|
||||
|
||||
CONF.log_opt_values(LOG, logging.DEBUG)
|
||||
if not host:
|
||||
@ -45,114 +46,110 @@ def fetch_stats(ctx, host, port):
|
||||
if not port:
|
||||
port = CONF.admin.web_port
|
||||
|
||||
msg = f"Fetching stats from {host}:{port}"
|
||||
msg = f'Fetching stats from {host}:{port}'
|
||||
console.print(msg)
|
||||
with console.status(msg):
|
||||
response = requests.get(f"http://{host}:{port}/stats", timeout=120)
|
||||
response = requests.get(f'http://{host}:{port}/stats', timeout=120)
|
||||
if not response:
|
||||
console.print(
|
||||
f"Failed to fetch stats from {host}:{port}?",
|
||||
style="bold red",
|
||||
f'Failed to fetch stats from {host}:{port}?',
|
||||
style='bold red',
|
||||
)
|
||||
return
|
||||
|
||||
stats = response.json()
|
||||
if not stats:
|
||||
console.print(
|
||||
f"Failed to fetch stats from aprsd admin ui at {host}:{port}",
|
||||
style="bold red",
|
||||
f'Failed to fetch stats from aprsd admin ui at {host}:{port}',
|
||||
style='bold red',
|
||||
)
|
||||
return
|
||||
|
||||
aprsd_title = (
|
||||
"APRSD "
|
||||
f"[bold cyan]v{stats['APRSDStats']['version']}[/] "
|
||||
f"Callsign [bold green]{stats['APRSDStats']['callsign']}[/] "
|
||||
f"Uptime [bold yellow]{stats['APRSDStats']['uptime']}[/]"
|
||||
'APRSD '
|
||||
f'[bold cyan]v{stats["APRSDStats"]["version"]}[/] '
|
||||
f'Callsign [bold green]{stats["APRSDStats"]["callsign"]}[/] '
|
||||
f'Uptime [bold yellow]{stats["APRSDStats"]["uptime"]}[/]'
|
||||
)
|
||||
|
||||
console.rule(f"Stats from {host}:{port}")
|
||||
console.print("\n\n")
|
||||
console.rule(f'Stats from {host}:{port}')
|
||||
console.print('\n\n')
|
||||
console.rule(aprsd_title)
|
||||
|
||||
# Show the connection to APRS
|
||||
# It can be a connection to an APRS-IS server or a local TNC via KISS or KISSTCP
|
||||
if "aprs-is" in stats:
|
||||
title = f"APRS-IS Connection {stats['APRSClientStats']['server_string']}"
|
||||
if 'aprs-is' in stats:
|
||||
title = f'APRS-IS Connection {stats["APRSClientStats"]["server_string"]}'
|
||||
table = Table(title=title)
|
||||
table.add_column("Key")
|
||||
table.add_column("Value")
|
||||
for key, value in stats["APRSClientStats"].items():
|
||||
table.add_column('Key')
|
||||
table.add_column('Value')
|
||||
for key, value in stats['APRSClientStats'].items():
|
||||
table.add_row(key, value)
|
||||
console.print(table)
|
||||
|
||||
threads_table = Table(title="Threads")
|
||||
threads_table.add_column("Name")
|
||||
threads_table.add_column("Alive?")
|
||||
for name, alive in stats["APRSDThreadList"].items():
|
||||
threads_table = Table(title='Threads')
|
||||
threads_table.add_column('Name')
|
||||
threads_table.add_column('Alive?')
|
||||
for name, alive in stats['APRSDThreadList'].items():
|
||||
threads_table.add_row(name, str(alive))
|
||||
|
||||
console.print(threads_table)
|
||||
|
||||
packet_totals = Table(title="Packet Totals")
|
||||
packet_totals.add_column("Key")
|
||||
packet_totals.add_column("Value")
|
||||
packet_totals.add_row("Total Received", str(stats["PacketList"]["rx"]))
|
||||
packet_totals.add_row("Total Sent", str(stats["PacketList"]["tx"]))
|
||||
packet_totals = Table(title='Packet Totals')
|
||||
packet_totals.add_column('Key')
|
||||
packet_totals.add_column('Value')
|
||||
packet_totals.add_row('Total Received', str(stats['PacketList']['rx']))
|
||||
packet_totals.add_row('Total Sent', str(stats['PacketList']['tx']))
|
||||
console.print(packet_totals)
|
||||
|
||||
# Show each of the packet types
|
||||
packets_table = Table(title="Packets By Type")
|
||||
packets_table.add_column("Packet Type")
|
||||
packets_table.add_column("TX")
|
||||
packets_table.add_column("RX")
|
||||
for key, value in stats["PacketList"]["packets"].items():
|
||||
packets_table.add_row(key, str(value["tx"]), str(value["rx"]))
|
||||
packets_table = Table(title='Packets By Type')
|
||||
packets_table.add_column('Packet Type')
|
||||
packets_table.add_column('TX')
|
||||
packets_table.add_column('RX')
|
||||
for key, value in stats['PacketList']['packets'].items():
|
||||
packets_table.add_row(key, str(value['tx']), str(value['rx']))
|
||||
|
||||
console.print(packets_table)
|
||||
|
||||
if "plugins" in stats:
|
||||
count = len(stats["PluginManager"])
|
||||
plugins_table = Table(title=f"Plugins ({count})")
|
||||
plugins_table.add_column("Plugin")
|
||||
plugins_table.add_column("Enabled")
|
||||
plugins_table.add_column("Version")
|
||||
plugins_table.add_column("TX")
|
||||
plugins_table.add_column("RX")
|
||||
plugins = stats["PluginManager"]
|
||||
for key, value in plugins.items():
|
||||
if 'plugins' in stats:
|
||||
count = len(stats['PluginManager'])
|
||||
plugins_table = Table(title=f'Plugins ({count})')
|
||||
plugins_table.add_column('Plugin')
|
||||
plugins_table.add_column('Enabled')
|
||||
plugins_table.add_column('Version')
|
||||
plugins_table.add_column('TX')
|
||||
plugins_table.add_column('RX')
|
||||
plugins = stats['PluginManager']
|
||||
for key, _ in plugins.items():
|
||||
plugins_table.add_row(
|
||||
key,
|
||||
str(plugins[key]["enabled"]),
|
||||
plugins[key]["version"],
|
||||
str(plugins[key]["tx"]),
|
||||
str(plugins[key]["rx"]),
|
||||
str(plugins[key]['enabled']),
|
||||
plugins[key]['version'],
|
||||
str(plugins[key]['tx']),
|
||||
str(plugins[key]['rx']),
|
||||
)
|
||||
|
||||
console.print(plugins_table)
|
||||
|
||||
seen_list = stats.get("SeenList")
|
||||
|
||||
if seen_list:
|
||||
if seen_list := stats.get('SeenList'):
|
||||
count = len(seen_list)
|
||||
seen_table = Table(title=f"Seen List ({count})")
|
||||
seen_table.add_column("Callsign")
|
||||
seen_table.add_column("Message Count")
|
||||
seen_table.add_column("Last Heard")
|
||||
seen_table = Table(title=f'Seen List ({count})')
|
||||
seen_table.add_column('Callsign')
|
||||
seen_table.add_column('Message Count')
|
||||
seen_table.add_column('Last Heard')
|
||||
for key, value in seen_list.items():
|
||||
seen_table.add_row(key, str(value["count"]), value["last"])
|
||||
seen_table.add_row(key, str(value['count']), value['last'])
|
||||
|
||||
console.print(seen_table)
|
||||
|
||||
watch_list = stats.get("WatchList")
|
||||
|
||||
if watch_list:
|
||||
if watch_list := stats.get('WatchList'):
|
||||
count = len(watch_list)
|
||||
watch_table = Table(title=f"Watch List ({count})")
|
||||
watch_table.add_column("Callsign")
|
||||
watch_table.add_column("Last Heard")
|
||||
watch_table = Table(title=f'Watch List ({count})')
|
||||
watch_table.add_column('Callsign')
|
||||
watch_table.add_column('Last Heard')
|
||||
for key, value in watch_list.items():
|
||||
watch_table.add_row(key, value["last"])
|
||||
watch_table.add_row(key, value['last'])
|
||||
|
||||
console.print(watch_table)
|
||||
|
||||
@ -160,27 +157,27 @@ def fetch_stats(ctx, host, port):
|
||||
@cli.command()
|
||||
@cli_helper.add_options(cli_helper.common_options)
|
||||
@click.option(
|
||||
"--raw",
|
||||
'--raw',
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Dump raw stats instead of formatted output.",
|
||||
help='Dump raw stats instead of formatted output.',
|
||||
)
|
||||
@click.option(
|
||||
"--show-section",
|
||||
default=["All"],
|
||||
help="Show specific sections of the stats. "
|
||||
" Choices: All, APRSDStats, APRSDThreadList, APRSClientStats,"
|
||||
" PacketList, SeenList, WatchList",
|
||||
'--show-section',
|
||||
default=['All'],
|
||||
help='Show specific sections of the stats. '
|
||||
' Choices: All, APRSDStats, APRSDThreadList, APRSClientStats,'
|
||||
' PacketList, SeenList, WatchList',
|
||||
multiple=True,
|
||||
type=click.Choice(
|
||||
[
|
||||
"All",
|
||||
"APRSDStats",
|
||||
"APRSDThreadList",
|
||||
"APRSClientStats",
|
||||
"PacketList",
|
||||
"SeenList",
|
||||
"WatchList",
|
||||
'All',
|
||||
'APRSDStats',
|
||||
'APRSDThreadList',
|
||||
'APRSClientStats',
|
||||
'PacketList',
|
||||
'SeenList',
|
||||
'WatchList',
|
||||
],
|
||||
case_sensitive=False,
|
||||
),
|
||||
@ -190,122 +187,122 @@ def fetch_stats(ctx, host, port):
|
||||
def dump_stats(ctx, raw, show_section):
|
||||
"""Dump the current stats from the running APRSD instance."""
|
||||
console = Console()
|
||||
console.print(f"APRSD Dump-Stats started version: {aprsd.__version__}")
|
||||
console.print(f'APRSD Dump-Stats started version: {aprsd.__version__}')
|
||||
|
||||
with console.status("Dumping stats"):
|
||||
with console.status('Dumping stats'):
|
||||
ss = StatsStore()
|
||||
ss.load()
|
||||
stats = ss.data
|
||||
if raw:
|
||||
if "All" in show_section:
|
||||
if 'All' in show_section:
|
||||
console.print(stats)
|
||||
return
|
||||
else:
|
||||
for section in show_section:
|
||||
console.print(f"Dumping {section} section:")
|
||||
console.print(f'Dumping {section} section:')
|
||||
console.print(stats[section])
|
||||
return
|
||||
|
||||
t = Table(title="APRSD Stats")
|
||||
t.add_column("Key")
|
||||
t.add_column("Value")
|
||||
for key, value in stats["APRSDStats"].items():
|
||||
t = Table(title='APRSD Stats')
|
||||
t.add_column('Key')
|
||||
t.add_column('Value')
|
||||
for key, value in stats['APRSDStats'].items():
|
||||
t.add_row(key, str(value))
|
||||
|
||||
if "All" in show_section or "APRSDStats" in show_section:
|
||||
if 'All' in show_section or 'APRSDStats' in show_section:
|
||||
console.print(t)
|
||||
|
||||
# Show the thread list
|
||||
t = Table(title="Thread List")
|
||||
t.add_column("Name")
|
||||
t.add_column("Class")
|
||||
t.add_column("Alive?")
|
||||
t.add_column("Loop Count")
|
||||
t.add_column("Age")
|
||||
for name, value in stats["APRSDThreadList"].items():
|
||||
t = Table(title='Thread List')
|
||||
t.add_column('Name')
|
||||
t.add_column('Class')
|
||||
t.add_column('Alive?')
|
||||
t.add_column('Loop Count')
|
||||
t.add_column('Age')
|
||||
for name, value in stats['APRSDThreadList'].items():
|
||||
t.add_row(
|
||||
name,
|
||||
value["class"],
|
||||
str(value["alive"]),
|
||||
str(value["loop_count"]),
|
||||
str(value["age"]),
|
||||
value['class'],
|
||||
str(value['alive']),
|
||||
str(value['loop_count']),
|
||||
str(value['age']),
|
||||
)
|
||||
|
||||
if "All" in show_section or "APRSDThreadList" in show_section:
|
||||
if 'All' in show_section or 'APRSDThreadList' in show_section:
|
||||
console.print(t)
|
||||
|
||||
# Show the plugins
|
||||
t = Table(title="Plugin List")
|
||||
t.add_column("Name")
|
||||
t.add_column("Enabled")
|
||||
t.add_column("Version")
|
||||
t.add_column("TX")
|
||||
t.add_column("RX")
|
||||
for name, value in stats["PluginManager"].items():
|
||||
t = Table(title='Plugin List')
|
||||
t.add_column('Name')
|
||||
t.add_column('Enabled')
|
||||
t.add_column('Version')
|
||||
t.add_column('TX')
|
||||
t.add_column('RX')
|
||||
for name, value in stats['PluginManager'].items():
|
||||
t.add_row(
|
||||
name,
|
||||
str(value["enabled"]),
|
||||
value["version"],
|
||||
str(value["tx"]),
|
||||
str(value["rx"]),
|
||||
str(value['enabled']),
|
||||
value['version'],
|
||||
str(value['tx']),
|
||||
str(value['rx']),
|
||||
)
|
||||
|
||||
if "All" in show_section or "PluginManager" in show_section:
|
||||
if 'All' in show_section or 'PluginManager' in show_section:
|
||||
console.print(t)
|
||||
|
||||
# Now show the client stats
|
||||
t = Table(title="Client Stats")
|
||||
t.add_column("Key")
|
||||
t.add_column("Value")
|
||||
for key, value in stats["APRSClientStats"].items():
|
||||
t = Table(title='Client Stats')
|
||||
t.add_column('Key')
|
||||
t.add_column('Value')
|
||||
for key, value in stats['APRSClientStats'].items():
|
||||
t.add_row(key, str(value))
|
||||
|
||||
if "All" in show_section or "APRSClientStats" in show_section:
|
||||
if 'All' in show_section or 'APRSClientStats' in show_section:
|
||||
console.print(t)
|
||||
|
||||
# now show the packet list
|
||||
packet_list = stats.get("PacketList")
|
||||
t = Table(title="Packet List")
|
||||
t.add_column("Key")
|
||||
t.add_column("Value")
|
||||
t.add_row("Total Received", str(packet_list["rx"]))
|
||||
t.add_row("Total Sent", str(packet_list["tx"]))
|
||||
packet_list = stats.get('PacketList')
|
||||
t = Table(title='Packet List')
|
||||
t.add_column('Key')
|
||||
t.add_column('Value')
|
||||
t.add_row('Total Received', str(packet_list['rx']))
|
||||
t.add_row('Total Sent', str(packet_list['tx']))
|
||||
|
||||
if "All" in show_section or "PacketList" in show_section:
|
||||
if 'All' in show_section or 'PacketList' in show_section:
|
||||
console.print(t)
|
||||
|
||||
# now show the seen list
|
||||
seen_list = stats.get("SeenList")
|
||||
seen_list = stats.get('SeenList')
|
||||
sorted_seen_list = sorted(
|
||||
seen_list.items(),
|
||||
)
|
||||
t = Table(title="Seen List")
|
||||
t.add_column("Callsign")
|
||||
t.add_column("Message Count")
|
||||
t.add_column("Last Heard")
|
||||
t = Table(title='Seen List')
|
||||
t.add_column('Callsign')
|
||||
t.add_column('Message Count')
|
||||
t.add_column('Last Heard')
|
||||
for key, value in sorted_seen_list:
|
||||
t.add_row(
|
||||
key,
|
||||
str(value["count"]),
|
||||
str(value["last"]),
|
||||
str(value['count']),
|
||||
str(value['last']),
|
||||
)
|
||||
|
||||
if "All" in show_section or "SeenList" in show_section:
|
||||
if 'All' in show_section or 'SeenList' in show_section:
|
||||
console.print(t)
|
||||
|
||||
# now show the watch list
|
||||
watch_list = stats.get("WatchList")
|
||||
watch_list = stats.get('WatchList')
|
||||
sorted_watch_list = sorted(
|
||||
watch_list.items(),
|
||||
)
|
||||
t = Table(title="Watch List")
|
||||
t.add_column("Callsign")
|
||||
t.add_column("Last Heard")
|
||||
t = Table(title='Watch List')
|
||||
t.add_column('Callsign')
|
||||
t.add_column('Last Heard')
|
||||
for key, value in sorted_watch_list:
|
||||
t.add_row(
|
||||
key,
|
||||
str(value["last"]),
|
||||
str(value['last']),
|
||||
)
|
||||
|
||||
if "All" in show_section or "WatchList" in show_section:
|
||||
if 'All' in show_section or 'WatchList' in show_section:
|
||||
console.print(t)
|
||||
|
@ -13,13 +13,15 @@ from oslo_config import cfg
|
||||
from rich.console import Console
|
||||
|
||||
import aprsd
|
||||
from aprsd import cli_helper
|
||||
from aprsd import conf # noqa
|
||||
from aprsd import ( # noqa: F401
|
||||
cli_helper,
|
||||
conf,
|
||||
)
|
||||
|
||||
# local imports here
|
||||
from aprsd.main import cli
|
||||
from aprsd.threads import stats as stats_threads
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
CONF = cfg.CONF
|
||||
|
@ -4,12 +4,9 @@ import inspect
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
import re
|
||||
import sys
|
||||
from traceback import print_tb
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
import click
|
||||
import requests
|
||||
from rich.console import Console
|
||||
@ -20,17 +17,14 @@ from thesmuggler import smuggle
|
||||
from aprsd import cli_helper
|
||||
from aprsd import plugin as aprsd_plugin
|
||||
from aprsd.main import cli
|
||||
from aprsd.plugins import (
|
||||
email, fortune, location, notify, ping, time, version, weather,
|
||||
)
|
||||
from aprsd.plugins import fortune, notify, ping, time, version, weather
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
PYPI_URL = "https://pypi.org/search/"
|
||||
LOG = logging.getLogger('APRSD')
|
||||
PYPI_URL = 'https://pypi.org/search/'
|
||||
|
||||
|
||||
def onerror(name):
|
||||
print(f"Error importing module {name}")
|
||||
print(f'Error importing module {name}')
|
||||
type, value, traceback = sys.exc_info()
|
||||
print_tb(traceback)
|
||||
|
||||
@ -46,19 +40,19 @@ def is_plugin(obj):
|
||||
def plugin_type(obj):
|
||||
for c in inspect.getmro(obj):
|
||||
if issubclass(c, aprsd_plugin.APRSDRegexCommandPluginBase):
|
||||
return "RegexCommand"
|
||||
return 'RegexCommand'
|
||||
if issubclass(c, aprsd_plugin.APRSDWatchListPluginBase):
|
||||
return "WatchList"
|
||||
return 'WatchList'
|
||||
if issubclass(c, aprsd_plugin.APRSDPluginBase):
|
||||
return "APRSDPluginBase"
|
||||
return 'APRSDPluginBase'
|
||||
|
||||
return "Unknown"
|
||||
return 'Unknown'
|
||||
|
||||
|
||||
def walk_package(package):
|
||||
return pkgutil.walk_packages(
|
||||
package.__path__,
|
||||
package.__name__ + ".",
|
||||
package.__name__ + '.',
|
||||
onerror=onerror,
|
||||
)
|
||||
|
||||
@ -68,22 +62,23 @@ def get_module_info(package_name, module_name, module_path):
|
||||
return None
|
||||
|
||||
dir_path = os.path.realpath(module_path)
|
||||
pattern = "*.py"
|
||||
pattern = '*.py'
|
||||
|
||||
obj_list = []
|
||||
|
||||
for path, _subdirs, files in os.walk(dir_path):
|
||||
for name in files:
|
||||
if fnmatch.fnmatch(name, pattern):
|
||||
module = smuggle(f"{path}/{name}")
|
||||
module = smuggle(f'{path}/{name}')
|
||||
for mem_name, obj in inspect.getmembers(module):
|
||||
if inspect.isclass(obj) and is_plugin(obj):
|
||||
obj_list.append(
|
||||
{
|
||||
"package": package_name,
|
||||
"name": mem_name, "obj": obj,
|
||||
"version": obj.version,
|
||||
"path": f"{'.'.join([module_name, obj.__name__])}",
|
||||
'package': package_name,
|
||||
'name': mem_name,
|
||||
'obj': obj,
|
||||
'version': obj.version,
|
||||
'path': f'{".".join([module_name, obj.__name__])}',
|
||||
},
|
||||
)
|
||||
|
||||
@ -94,18 +89,18 @@ def _get_installed_aprsd_items():
|
||||
# installed plugins
|
||||
plugins = {}
|
||||
extensions = {}
|
||||
for finder, name, ispkg in pkgutil.iter_modules():
|
||||
if name.startswith("aprsd_"):
|
||||
print(f"Found aprsd_ module: {name}")
|
||||
if ispkg:
|
||||
module = importlib.import_module(name)
|
||||
pkgs = walk_package(module)
|
||||
for pkg in pkgs:
|
||||
pkg_info = get_module_info(module.__name__, pkg.name, module.__path__[0])
|
||||
if "plugin" in name:
|
||||
plugins[name] = pkg_info
|
||||
elif "extension" in name:
|
||||
extensions[name] = pkg_info
|
||||
for _finder, name, ispkg in pkgutil.iter_modules():
|
||||
if ispkg and name.startswith('aprsd_'):
|
||||
module = importlib.import_module(name)
|
||||
pkgs = walk_package(module)
|
||||
for pkg in pkgs:
|
||||
pkg_info = get_module_info(
|
||||
module.__name__, pkg.name, module.__path__[0]
|
||||
)
|
||||
if 'plugin' in name:
|
||||
plugins[name] = pkg_info
|
||||
elif 'extension' in name:
|
||||
extensions[name] = pkg_info
|
||||
return plugins, extensions
|
||||
|
||||
|
||||
@ -122,7 +117,7 @@ def get_installed_extensions():
|
||||
|
||||
|
||||
def show_built_in_plugins(console):
|
||||
modules = [email, fortune, location, notify, ping, time, version, weather]
|
||||
modules = [fortune, notify, ping, time, version, weather]
|
||||
plugins = []
|
||||
|
||||
for module in modules:
|
||||
@ -131,132 +126,141 @@ def show_built_in_plugins(console):
|
||||
cls = entry[1]
|
||||
if issubclass(cls, aprsd_plugin.APRSDPluginBase):
|
||||
info = {
|
||||
"name": cls.__qualname__,
|
||||
"path": f"{cls.__module__}.{cls.__qualname__}",
|
||||
"version": cls.version,
|
||||
"docstring": cls.__doc__,
|
||||
"short_desc": cls.short_description,
|
||||
'name': cls.__qualname__,
|
||||
'path': f'{cls.__module__}.{cls.__qualname__}',
|
||||
'version': cls.version,
|
||||
'docstring': cls.__doc__,
|
||||
'short_desc': cls.short_description,
|
||||
}
|
||||
|
||||
if issubclass(cls, aprsd_plugin.APRSDRegexCommandPluginBase):
|
||||
info["command_regex"] = cls.command_regex
|
||||
info["type"] = "RegexCommand"
|
||||
info['command_regex'] = cls.command_regex
|
||||
info['type'] = 'RegexCommand'
|
||||
|
||||
if issubclass(cls, aprsd_plugin.APRSDWatchListPluginBase):
|
||||
info["type"] = "WatchList"
|
||||
info['type'] = 'WatchList'
|
||||
|
||||
plugins.append(info)
|
||||
|
||||
plugins = sorted(plugins, key=lambda i: i["name"])
|
||||
plugins = sorted(plugins, key=lambda i: i['name'])
|
||||
|
||||
table = Table(
|
||||
title="[not italic]:snake:[/] [bold][magenta]APRSD Built-in Plugins [not italic]:snake:[/]",
|
||||
title='[not italic]:snake:[/] [bold][magenta]APRSD Built-in Plugins [not italic]:snake:[/]',
|
||||
)
|
||||
table.add_column("Plugin Name", style="cyan", no_wrap=True)
|
||||
table.add_column("Info", style="bold yellow")
|
||||
table.add_column("Type", style="bold green")
|
||||
table.add_column("Plugin Path", style="bold blue")
|
||||
table.add_column('Plugin Name', style='cyan', no_wrap=True)
|
||||
table.add_column('Info', style='bold yellow')
|
||||
table.add_column('Type', style='bold green')
|
||||
table.add_column('Plugin Path', style='bold blue')
|
||||
for entry in plugins:
|
||||
table.add_row(entry["name"], entry["short_desc"], entry["type"], entry["path"])
|
||||
table.add_row(entry['name'], entry['short_desc'], entry['type'], entry['path'])
|
||||
|
||||
console.print(table)
|
||||
|
||||
|
||||
def _get_pypi_packages():
|
||||
query = "aprsd"
|
||||
snippets = []
|
||||
s = requests.Session()
|
||||
for page in range(1, 3):
|
||||
params = {"q": query, "page": page}
|
||||
r = s.get(PYPI_URL, params=params)
|
||||
soup = BeautifulSoup(r.text, "html.parser")
|
||||
snippets += soup.select('a[class*="snippet"]')
|
||||
if not hasattr(s, "start_url"):
|
||||
s.start_url = r.url.rsplit("&page", maxsplit=1).pop(0)
|
||||
if simple_r := requests.get(
|
||||
'https://pypi.org/simple',
|
||||
headers={'Accept': 'application/vnd.pypi.simple.v1+json'},
|
||||
):
|
||||
simple_response = simple_r.json()
|
||||
else:
|
||||
simple_response = {}
|
||||
|
||||
return snippets
|
||||
key = 'aprsd'
|
||||
matches = [
|
||||
p['name'] for p in simple_response['projects'] if p['name'].startswith(key)
|
||||
]
|
||||
|
||||
packages = []
|
||||
for pkg in matches:
|
||||
# Get info for first match
|
||||
if r := requests.get(
|
||||
f'https://pypi.org/pypi/{pkg}/json',
|
||||
headers={'Accept': 'application/json'},
|
||||
):
|
||||
packages.append(r.json())
|
||||
|
||||
return packages
|
||||
|
||||
|
||||
def show_pypi_plugins(installed_plugins, console):
|
||||
snippets = _get_pypi_packages()
|
||||
packages = _get_pypi_packages()
|
||||
|
||||
title = Text.assemble(
|
||||
("Pypi.org APRSD Installable Plugin Packages\n\n", "bold magenta"),
|
||||
("Install any of the following plugins with\n", "bold yellow"),
|
||||
("'pip install ", "bold white"),
|
||||
("<Plugin Package Name>'", "cyan"),
|
||||
('Pypi.org APRSD Installable Plugin Packages\n\n', 'bold magenta'),
|
||||
('Install any of the following plugins with\n', 'bold yellow'),
|
||||
("'pip install ", 'bold white'),
|
||||
("<Plugin Package Name>'", 'cyan'),
|
||||
)
|
||||
|
||||
table = Table(title=title)
|
||||
table.add_column("Plugin Package Name", style="cyan", no_wrap=True)
|
||||
table.add_column("Description", style="yellow")
|
||||
table.add_column("Version", style="yellow", justify="center")
|
||||
table.add_column("Released", style="bold green", justify="center")
|
||||
table.add_column("Installed?", style="red", justify="center")
|
||||
for snippet in snippets:
|
||||
link = urljoin(PYPI_URL, snippet.get("href"))
|
||||
package = re.sub(r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip())
|
||||
version = re.sub(r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip())
|
||||
created = re.sub(r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip())
|
||||
description = re.sub(r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip())
|
||||
emoji = ":open_file_folder:"
|
||||
table.add_column('Plugin Package Name', style='cyan', no_wrap=True)
|
||||
table.add_column('Description', style='yellow')
|
||||
table.add_column('Version', style='yellow', justify='center')
|
||||
table.add_column('Released', style='bold green', justify='center')
|
||||
table.add_column('Installed?', style='red', justify='center')
|
||||
emoji = ':open_file_folder:'
|
||||
for package in packages:
|
||||
link = package['info']['package_url']
|
||||
version = package['info']['version']
|
||||
package_name = package['info']['name']
|
||||
description = package['info']['summary']
|
||||
created = package['releases'][version][0]['upload_time']
|
||||
|
||||
if "aprsd-" not in package or "-plugin" not in package:
|
||||
if 'aprsd-' not in package_name or '-plugin' not in package_name:
|
||||
continue
|
||||
|
||||
under = package.replace("-", "_")
|
||||
if under in installed_plugins:
|
||||
installed = "Yes"
|
||||
else:
|
||||
installed = "No"
|
||||
|
||||
under = package_name.replace('-', '_')
|
||||
installed = 'Yes' if under in installed_plugins else 'No'
|
||||
table.add_row(
|
||||
f"[link={link}]{emoji}[/link] {package}",
|
||||
description, version, created, installed,
|
||||
f'[link={link}]{emoji}[/link] {package_name}',
|
||||
description,
|
||||
version,
|
||||
created,
|
||||
installed,
|
||||
)
|
||||
|
||||
console.print("\n")
|
||||
console.print('\n')
|
||||
console.print(table)
|
||||
|
||||
|
||||
def show_pypi_extensions(installed_extensions, console):
|
||||
snippets = _get_pypi_packages()
|
||||
packages = _get_pypi_packages()
|
||||
|
||||
title = Text.assemble(
|
||||
("Pypi.org APRSD Installable Extension Packages\n\n", "bold magenta"),
|
||||
("Install any of the following extensions by running\n", "bold yellow"),
|
||||
("'pip install ", "bold white"),
|
||||
("<Plugin Package Name>'", "cyan"),
|
||||
('Pypi.org APRSD Installable Extension Packages\n\n', 'bold magenta'),
|
||||
('Install any of the following extensions by running\n', 'bold yellow'),
|
||||
("'pip install ", 'bold white'),
|
||||
("<Plugin Package Name>'", 'cyan'),
|
||||
)
|
||||
table = Table(title=title)
|
||||
table.add_column("Extension Package Name", style="cyan", no_wrap=True)
|
||||
table.add_column("Description", style="yellow")
|
||||
table.add_column("Version", style="yellow", justify="center")
|
||||
table.add_column("Released", style="bold green", justify="center")
|
||||
table.add_column("Installed?", style="red", justify="center")
|
||||
for snippet in snippets:
|
||||
link = urljoin(PYPI_URL, snippet.get("href"))
|
||||
package = re.sub(r"\s+", " ", snippet.select_one('span[class*="name"]').text.strip())
|
||||
version = re.sub(r"\s+", " ", snippet.select_one('span[class*="version"]').text.strip())
|
||||
created = re.sub(r"\s+", " ", snippet.select_one('span[class*="created"]').text.strip())
|
||||
description = re.sub(r"\s+", " ", snippet.select_one('p[class*="description"]').text.strip())
|
||||
emoji = ":open_file_folder:"
|
||||
table.add_column('Extension Package Name', style='cyan', no_wrap=True)
|
||||
table.add_column('Description', style='yellow')
|
||||
table.add_column('Version', style='yellow', justify='center')
|
||||
table.add_column('Released', style='bold green', justify='center')
|
||||
table.add_column('Installed?', style='red', justify='center')
|
||||
emoji = ':open_file_folder:'
|
||||
|
||||
if "aprsd-" not in package or "-extension" not in package:
|
||||
for package in packages:
|
||||
link = package['info']['package_url']
|
||||
version = package['info']['version']
|
||||
package_name = package['info']['name']
|
||||
description = package['info']['summary']
|
||||
created = package['releases'][version][0]['upload_time']
|
||||
if 'aprsd-' not in package_name or '-extension' not in package_name:
|
||||
continue
|
||||
|
||||
under = package.replace("-", "_")
|
||||
if under in installed_extensions:
|
||||
installed = "Yes"
|
||||
else:
|
||||
installed = "No"
|
||||
|
||||
under = package_name.replace('-', '_')
|
||||
installed = 'Yes' if under in installed_extensions else 'No'
|
||||
table.add_row(
|
||||
f"[link={link}]{emoji}[/link] {package}",
|
||||
description, version, created, installed,
|
||||
f'[link={link}]{emoji}[/link] {package_name}',
|
||||
description,
|
||||
version,
|
||||
created,
|
||||
installed,
|
||||
)
|
||||
|
||||
console.print("\n")
|
||||
console.print('\n')
|
||||
console.print(table)
|
||||
|
||||
|
||||
@ -265,24 +269,24 @@ def show_installed_plugins(installed_plugins, console):
|
||||
return
|
||||
|
||||
table = Table(
|
||||
title="[not italic]:snake:[/] [bold][magenta]APRSD Installed 3rd party Plugins [not italic]:snake:[/]",
|
||||
title='[not italic]:snake:[/] [bold][magenta]APRSD Installed 3rd party Plugins [not italic]:snake:[/]',
|
||||
)
|
||||
table.add_column("Package Name", style=" bold white", no_wrap=True)
|
||||
table.add_column("Plugin Name", style="cyan", no_wrap=True)
|
||||
table.add_column("Version", style="yellow", justify="center")
|
||||
table.add_column("Type", style="bold green")
|
||||
table.add_column("Plugin Path", style="bold blue")
|
||||
table.add_column('Package Name', style=' bold white', no_wrap=True)
|
||||
table.add_column('Plugin Name', style='cyan', no_wrap=True)
|
||||
table.add_column('Version', style='yellow', justify='center')
|
||||
table.add_column('Type', style='bold green')
|
||||
table.add_column('Plugin Path', style='bold blue')
|
||||
for name in installed_plugins:
|
||||
for plugin in installed_plugins[name]:
|
||||
table.add_row(
|
||||
name.replace("_", "-"),
|
||||
plugin["name"],
|
||||
plugin["version"],
|
||||
plugin_type(plugin["obj"]),
|
||||
plugin["path"],
|
||||
name.replace('_', '-'),
|
||||
plugin['name'],
|
||||
plugin['version'],
|
||||
plugin_type(plugin['obj']),
|
||||
plugin['path'],
|
||||
)
|
||||
|
||||
console.print("\n")
|
||||
console.print('\n')
|
||||
console.print(table)
|
||||
|
||||
|
||||
@ -294,14 +298,14 @@ def list_plugins(ctx):
|
||||
"""List the built in plugins available to APRSD."""
|
||||
console = Console()
|
||||
|
||||
with console.status("Show Built-in Plugins") as status:
|
||||
with console.status('Show Built-in Plugins') as status:
|
||||
show_built_in_plugins(console)
|
||||
|
||||
status.update("Fetching pypi.org plugins")
|
||||
status.update('Fetching pypi.org plugins')
|
||||
installed_plugins = get_installed_plugins()
|
||||
show_pypi_plugins(installed_plugins, console)
|
||||
|
||||
status.update("Looking for installed APRSD plugins")
|
||||
status.update('Looking for installed APRSD plugins')
|
||||
show_installed_plugins(installed_plugins, console)
|
||||
|
||||
|
||||
@ -313,7 +317,9 @@ def list_extensions(ctx):
|
||||
"""List the built in plugins available to APRSD."""
|
||||
console = Console()
|
||||
|
||||
with console.status("Show APRSD Extensions") as status:
|
||||
status.update("Fetching pypi.org APRSD Extensions")
|
||||
with console.status('Show APRSD Extensions') as status:
|
||||
status.update('Fetching pypi.org APRSD Extensions')
|
||||
|
||||
status.update('Looking for installed APRSD Extensions')
|
||||
installed_extensions = get_installed_extensions()
|
||||
show_pypi_extensions(installed_extensions, console)
|
||||
|
@ -23,11 +23,10 @@ from aprsd.packets import collector as packet_collector
|
||||
from aprsd.packets import log as packet_log
|
||||
from aprsd.packets import seen_list
|
||||
from aprsd.stats import collector
|
||||
from aprsd.threads import keep_alive, rx
|
||||
from aprsd.threads import keepalive, rx
|
||||
from aprsd.threads import stats as stats_thread
|
||||
from aprsd.threads.aprsd import APRSDThread
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -51,8 +50,12 @@ def signal_handler(sig, frame):
|
||||
|
||||
class APRSDListenThread(rx.APRSDRXThread):
|
||||
def __init__(
|
||||
self, packet_queue, packet_filter=None, plugin_manager=None,
|
||||
enabled_plugins=[], log_packets=False,
|
||||
self,
|
||||
packet_queue,
|
||||
packet_filter=None,
|
||||
plugin_manager=None,
|
||||
enabled_plugins=[],
|
||||
log_packets=False,
|
||||
):
|
||||
super().__init__(packet_queue)
|
||||
self.packet_filter = packet_filter
|
||||
@ -110,6 +113,7 @@ class ListenStatsThread(APRSDThread):
|
||||
stats_json = collector.Collector().collect()
|
||||
stats = stats_json["PacketList"]
|
||||
total_rx = stats["rx"]
|
||||
packet_count = len(stats["packets"])
|
||||
rx_delta = total_rx - self._last_total_rx
|
||||
rate = rx_delta / 10
|
||||
|
||||
@ -117,7 +121,8 @@ class ListenStatsThread(APRSDThread):
|
||||
LOGU.opt(colors=True).info(
|
||||
f"<green>RX Rate: {rate} pps</green> "
|
||||
f"<yellow>Total RX: {total_rx}</yellow> "
|
||||
f"<red>RX Last 10 secs: {rx_delta}</red>",
|
||||
f"<red>RX Last 10 secs: {rx_delta}</red> "
|
||||
f"<white>Packets in PacketList: {packet_count}</white>",
|
||||
)
|
||||
self._last_total_rx = total_rx
|
||||
|
||||
@ -265,7 +270,7 @@ def listen(
|
||||
LOG.debug(f"Filter by '{filter}'")
|
||||
aprs_client.set_filter(filter)
|
||||
|
||||
keepalive = keep_alive.KeepAliveThread()
|
||||
keepalive_thread = keepalive.KeepAliveThread()
|
||||
|
||||
if not CONF.enable_seen_list:
|
||||
# just deregister the class from the packet collector
|
||||
@ -309,9 +314,9 @@ def listen(
|
||||
listen_stats = ListenStatsThread()
|
||||
listen_stats.start()
|
||||
|
||||
keepalive.start()
|
||||
keepalive_thread.start()
|
||||
LOG.debug("keepalive Join")
|
||||
keepalive.join()
|
||||
keepalive_thread.join()
|
||||
LOG.debug("listen_thread Join")
|
||||
listen_thread.join()
|
||||
stats.join()
|
||||
|
@ -6,22 +6,54 @@ import click
|
||||
from oslo_config import cfg
|
||||
|
||||
import aprsd
|
||||
from aprsd import cli_helper
|
||||
from aprsd import cli_helper, plugin, threads, utils
|
||||
from aprsd import main as aprsd_main
|
||||
from aprsd import plugin, threads, utils
|
||||
from aprsd.client import client_factory
|
||||
from aprsd.main import cli
|
||||
from aprsd.packets import collector as packet_collector
|
||||
from aprsd.packets import seen_list
|
||||
from aprsd.threads import keep_alive, log_monitor, registry, rx
|
||||
from aprsd.threads import aprsd as aprsd_threads
|
||||
from aprsd.threads import keepalive, registry, rx, tx
|
||||
from aprsd.threads import stats as stats_thread
|
||||
from aprsd.threads import tx
|
||||
|
||||
from aprsd.utils import singleton
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@singleton
|
||||
class ServerThreads:
|
||||
"""Registry for threads that the server command runs.
|
||||
|
||||
This enables extensions to register a thread to run during
|
||||
the server command.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.threads: list[aprsd_threads.APRSDThread] = []
|
||||
|
||||
def register(self, thread: aprsd_threads.APRSDThread):
|
||||
if not isinstance(thread, aprsd_threads.APRSDThread):
|
||||
raise TypeError(f"Thread {thread} is not an APRSDThread")
|
||||
self.threads.append(thread)
|
||||
|
||||
def unregister(self, thread: aprsd_threads.APRSDThread):
|
||||
if not isinstance(thread, aprsd_threads.APRSDThread):
|
||||
raise TypeError(f"Thread {thread} is not an APRSDThread")
|
||||
self.threads.remove(thread)
|
||||
|
||||
def start(self):
|
||||
"""Start all threads in the list."""
|
||||
for thread in self.threads:
|
||||
thread.start()
|
||||
|
||||
def join(self):
|
||||
"""Join all the threads in the list"""
|
||||
for thread in self.threads:
|
||||
thread.join()
|
||||
|
||||
|
||||
# main() ###
|
||||
@cli.command()
|
||||
@cli_helper.add_options(cli_helper.common_options)
|
||||
@ -41,6 +73,8 @@ def server(ctx, flush):
|
||||
signal.signal(signal.SIGINT, aprsd_main.signal_handler)
|
||||
signal.signal(signal.SIGTERM, aprsd_main.signal_handler)
|
||||
|
||||
server_threads = ServerThreads()
|
||||
|
||||
level, msg = utils._check_version()
|
||||
if level:
|
||||
LOG.warning(msg)
|
||||
@ -110,36 +144,28 @@ def server(ctx, flush):
|
||||
|
||||
# Now start all the main processing threads.
|
||||
|
||||
keepalive = keep_alive.KeepAliveThread()
|
||||
keepalive.start()
|
||||
|
||||
stats_store_thread = stats_thread.APRSDStatsStoreThread()
|
||||
stats_store_thread.start()
|
||||
|
||||
rx_thread = rx.APRSDPluginRXThread(
|
||||
packet_queue=threads.packet_queue,
|
||||
server_threads.register(keepalive.KeepAliveThread())
|
||||
server_threads.register(stats_thread.APRSDStatsStoreThread())
|
||||
server_threads.register(
|
||||
rx.APRSDPluginRXThread(
|
||||
packet_queue=threads.packet_queue,
|
||||
),
|
||||
)
|
||||
process_thread = rx.APRSDPluginProcessPacketThread(
|
||||
packet_queue=threads.packet_queue,
|
||||
server_threads.register(
|
||||
rx.APRSDPluginProcessPacketThread(
|
||||
packet_queue=threads.packet_queue,
|
||||
),
|
||||
)
|
||||
rx_thread.start()
|
||||
process_thread.start()
|
||||
|
||||
if CONF.enable_beacon:
|
||||
LOG.info("Beacon Enabled. Starting Beacon thread.")
|
||||
bcn_thread = tx.BeaconSendThread()
|
||||
bcn_thread.start()
|
||||
server_threads.register(tx.BeaconSendThread())
|
||||
|
||||
if CONF.aprs_registry.enabled:
|
||||
LOG.info("Registry Enabled. Starting Registry thread.")
|
||||
registry_thread = registry.APRSRegistryThread()
|
||||
registry_thread.start()
|
||||
server_threads.register(registry.APRSRegistryThread())
|
||||
|
||||
if CONF.admin.web_enabled:
|
||||
log_monitor_thread = log_monitor.LogMonitorThread()
|
||||
log_monitor_thread.start()
|
||||
|
||||
rx_thread.join()
|
||||
process_thread.join()
|
||||
server_threads.start()
|
||||
server_threads.join()
|
||||
|
||||
return 0
|
||||
|
@ -1,662 +0,0 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
import click
|
||||
import flask
|
||||
from flask import request
|
||||
from flask_httpauth import HTTPBasicAuth
|
||||
from flask_socketio import Namespace, SocketIO
|
||||
from geopy.distance import geodesic
|
||||
from oslo_config import cfg
|
||||
import timeago
|
||||
from werkzeug.security import check_password_hash, generate_password_hash
|
||||
import wrapt
|
||||
|
||||
import aprsd
|
||||
from aprsd import cli_helper, client, packets, plugin_utils, stats, threads
|
||||
from aprsd import utils
|
||||
from aprsd import utils as aprsd_utils
|
||||
from aprsd.client import client_factory, kiss
|
||||
from aprsd.main import cli
|
||||
from aprsd.threads import aprsd as aprsd_threads
|
||||
from aprsd.threads import keep_alive, rx
|
||||
from aprsd.threads import stats as stats_thread
|
||||
from aprsd.threads import tx
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger()
|
||||
auth = HTTPBasicAuth()
|
||||
users = {}
|
||||
socketio = None
|
||||
|
||||
# List of callsigns that we don't want to track/fetch their location
|
||||
callsign_no_track = [
|
||||
"APDW16", "BLN0", "BLN1", "BLN2",
|
||||
"BLN3", "BLN4", "BLN5", "BLN6", "BLN7", "BLN8", "BLN9",
|
||||
]
|
||||
|
||||
# Callsign location information
|
||||
# callsign: {lat: 0.0, long: 0.0, last_update: datetime}
|
||||
callsign_locations = {}
|
||||
|
||||
flask_app = flask.Flask(
|
||||
"aprsd",
|
||||
static_url_path="/static",
|
||||
static_folder="web/chat/static",
|
||||
template_folder="web/chat/templates",
|
||||
)
|
||||
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
|
||||
click.echo("signal_handler: called")
|
||||
LOG.info(
|
||||
f"Ctrl+C, Sending all threads({len(threads.APRSDThreadList())}) exit! "
|
||||
f"Can take up to 10 seconds {datetime.datetime.now()}",
|
||||
)
|
||||
threads.APRSDThreadList().stop_all()
|
||||
if "subprocess" not in str(frame):
|
||||
time.sleep(1.5)
|
||||
stats.stats_collector.collect()
|
||||
LOG.info("Telling flask to bail.")
|
||||
signal.signal(signal.SIGTERM, sys.exit(0))
|
||||
|
||||
|
||||
class SentMessages:
|
||||
|
||||
_instance = None
|
||||
lock = threading.Lock()
|
||||
|
||||
data = {}
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
"""This magic turns this into a singleton."""
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def is_initialized(self):
|
||||
return True
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def add(self, msg):
|
||||
self.data[msg.msgNo] = msg.__dict__
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def __len__(self):
|
||||
return len(self.data.keys())
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def get(self, id):
|
||||
if id in self.data:
|
||||
return self.data[id]
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def get_all(self):
|
||||
return self.data
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def set_status(self, id, status):
|
||||
if id in self.data:
|
||||
self.data[id]["last_update"] = str(datetime.datetime.now())
|
||||
self.data[id]["status"] = status
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def ack(self, id):
|
||||
"""The message got an ack!"""
|
||||
if id in self.data:
|
||||
self.data[id]["last_update"] = str(datetime.datetime.now())
|
||||
self.data[id]["ack"] = True
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def reply(self, id, packet):
|
||||
"""We got a packet back from the sent message."""
|
||||
if id in self.data:
|
||||
self.data[id]["reply"] = packet
|
||||
|
||||
|
||||
# HTTPBasicAuth doesn't work on a class method.
|
||||
# This has to be out here. Rely on the APRSDFlask
|
||||
# class to initialize the users from the config
|
||||
@auth.verify_password
|
||||
def verify_password(username, password):
|
||||
global users
|
||||
|
||||
if username in users and check_password_hash(users[username], password):
|
||||
return username
|
||||
|
||||
|
||||
def _build_location_from_repeat(message):
|
||||
# This is a location message Format is
|
||||
# ^ld^callsign:latitude,longitude,altitude,course,speed,timestamp
|
||||
a = message.split(":")
|
||||
LOG.warning(a)
|
||||
if len(a) == 2:
|
||||
callsign = a[0].replace("^ld^", "")
|
||||
b = a[1].split(",")
|
||||
LOG.warning(b)
|
||||
if len(b) == 6:
|
||||
lat = float(b[0])
|
||||
lon = float(b[1])
|
||||
alt = float(b[2])
|
||||
course = float(b[3])
|
||||
speed = float(b[4])
|
||||
time = int(b[5])
|
||||
compass_bearing = aprsd_utils.degrees_to_cardinal(course)
|
||||
data = {
|
||||
"callsign": callsign,
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"altitude": alt,
|
||||
"course": course,
|
||||
"compass_bearing": compass_bearing,
|
||||
"speed": speed,
|
||||
"lasttime": time,
|
||||
"timeago": timeago.format(time),
|
||||
}
|
||||
LOG.debug(f"Location data from REPEAT {data}")
|
||||
return data
|
||||
|
||||
|
||||
def _calculate_location_data(location_data):
|
||||
"""Calculate all of the location data from data from aprs.fi or REPEAT."""
|
||||
lat = location_data["lat"]
|
||||
lon = location_data["lon"]
|
||||
alt = location_data["altitude"]
|
||||
speed = location_data["speed"]
|
||||
lasttime = location_data["lasttime"]
|
||||
timeago_str = location_data.get(
|
||||
"timeago",
|
||||
timeago.format(lasttime),
|
||||
)
|
||||
# now calculate distance from our own location
|
||||
distance = 0
|
||||
if CONF.webchat.latitude and CONF.webchat.longitude:
|
||||
our_lat = float(CONF.webchat.latitude)
|
||||
our_lon = float(CONF.webchat.longitude)
|
||||
distance = geodesic((our_lat, our_lon), (lat, lon)).kilometers
|
||||
bearing = aprsd_utils.calculate_initial_compass_bearing(
|
||||
(our_lat, our_lon),
|
||||
(lat, lon),
|
||||
)
|
||||
compass_bearing = aprsd_utils.degrees_to_cardinal(bearing)
|
||||
return {
|
||||
"callsign": location_data["callsign"],
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"altitude": alt,
|
||||
"course": f"{bearing:0.1f}",
|
||||
"compass_bearing": compass_bearing,
|
||||
"speed": speed,
|
||||
"lasttime": lasttime,
|
||||
"timeago": timeago_str,
|
||||
"distance": f"{distance:0.1f}",
|
||||
}
|
||||
|
||||
|
||||
def send_location_data_to_browser(location_data):
|
||||
global socketio
|
||||
callsign = location_data["callsign"]
|
||||
LOG.info(f"Got location for {callsign} {callsign_locations[callsign]}")
|
||||
socketio.emit(
|
||||
"callsign_location", callsign_locations[callsign],
|
||||
namespace="/sendmsg",
|
||||
)
|
||||
|
||||
|
||||
def populate_callsign_location(callsign, data=None):
|
||||
"""Populate the location for the callsign.
|
||||
|
||||
if data is passed in, then we have the location already from
|
||||
an APRS packet. If data is None, then we need to fetch the
|
||||
location from aprs.fi or REPEAT.
|
||||
"""
|
||||
global socketio
|
||||
"""Fetch the location for the callsign."""
|
||||
LOG.debug(f"populate_callsign_location {callsign}")
|
||||
if data:
|
||||
location_data = _calculate_location_data(data)
|
||||
callsign_locations[callsign] = location_data
|
||||
send_location_data_to_browser(location_data)
|
||||
return
|
||||
|
||||
# First we are going to try to get the location from aprs.fi
|
||||
# if there is no internets, then this will fail and we will
|
||||
# fallback to calling REPEAT for the location for the callsign.
|
||||
fallback = False
|
||||
if not CONF.aprs_fi.apiKey:
|
||||
LOG.warning(
|
||||
"Config aprs_fi.apiKey is not set. Can't get location from aprs.fi "
|
||||
" falling back to sending REPEAT to get location.",
|
||||
)
|
||||
fallback = True
|
||||
else:
|
||||
try:
|
||||
aprs_data = plugin_utils.get_aprs_fi(CONF.aprs_fi.apiKey, callsign)
|
||||
if not len(aprs_data["entries"]):
|
||||
LOG.error("Didn't get any entries from aprs.fi")
|
||||
return
|
||||
lat = float(aprs_data["entries"][0]["lat"])
|
||||
lon = float(aprs_data["entries"][0]["lng"])
|
||||
try: # altitude not always provided
|
||||
alt = float(aprs_data["entries"][0]["altitude"])
|
||||
except Exception:
|
||||
alt = 0
|
||||
location_data = {
|
||||
"callsign": callsign,
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"altitude": alt,
|
||||
"lasttime": int(aprs_data["entries"][0]["lasttime"]),
|
||||
"course": float(aprs_data["entries"][0].get("course", 0)),
|
||||
"speed": float(aprs_data["entries"][0].get("speed", 0)),
|
||||
}
|
||||
location_data = _calculate_location_data(location_data)
|
||||
callsign_locations[callsign] = location_data
|
||||
send_location_data_to_browser(location_data)
|
||||
return
|
||||
except Exception as ex:
|
||||
LOG.error(f"Failed to fetch aprs.fi '{ex}'")
|
||||
LOG.error(ex)
|
||||
fallback = True
|
||||
|
||||
if fallback:
|
||||
# We don't have the location data
|
||||
# and we can't get it from aprs.fi
|
||||
# Send a special message to REPEAT to get the location data
|
||||
LOG.info(f"Sending REPEAT to get location for callsign {callsign}.")
|
||||
tx.send(
|
||||
packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call="REPEAT",
|
||||
message_text=f"ld {callsign}",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class WebChatProcessPacketThread(rx.APRSDProcessPacketThread):
|
||||
"""Class that handles packets being sent to us."""
|
||||
|
||||
def __init__(self, packet_queue, socketio):
|
||||
self.socketio = socketio
|
||||
self.connected = False
|
||||
super().__init__(packet_queue)
|
||||
|
||||
def process_ack_packet(self, packet: packets.AckPacket):
|
||||
super().process_ack_packet(packet)
|
||||
ack_num = packet.get("msgNo")
|
||||
SentMessages().ack(ack_num)
|
||||
msg = SentMessages().get(ack_num)
|
||||
if msg:
|
||||
self.socketio.emit(
|
||||
"ack", msg,
|
||||
namespace="/sendmsg",
|
||||
)
|
||||
self.got_ack = True
|
||||
|
||||
def process_our_message_packet(self, packet: packets.MessagePacket):
|
||||
global callsign_locations
|
||||
# ok lets see if we have the location for the
|
||||
# person we just sent a message to.
|
||||
from_call = packet.get("from_call").upper()
|
||||
if from_call == "REPEAT":
|
||||
# We got a message from REPEAT. Is this a location message?
|
||||
message = packet.get("message_text")
|
||||
if message.startswith("^ld^"):
|
||||
location_data = _build_location_from_repeat(message)
|
||||
callsign = location_data["callsign"]
|
||||
location_data = _calculate_location_data(location_data)
|
||||
callsign_locations[callsign] = location_data
|
||||
send_location_data_to_browser(location_data)
|
||||
return
|
||||
elif (
|
||||
from_call not in callsign_locations
|
||||
and from_call not in callsign_no_track
|
||||
and client_factory.create().transport() in [client.TRANSPORT_APRSIS, client.TRANSPORT_FAKE]
|
||||
):
|
||||
# We have to ask aprs for the location for the callsign
|
||||
# We send a message packet to wb4bor-11 asking for location.
|
||||
populate_callsign_location(from_call)
|
||||
# Send the packet to the browser.
|
||||
self.socketio.emit(
|
||||
"new", packet.__dict__,
|
||||
namespace="/sendmsg",
|
||||
)
|
||||
|
||||
|
||||
class LocationProcessingThread(aprsd_threads.APRSDThread):
|
||||
"""Class to handle the location processing."""
|
||||
def __init__(self):
|
||||
super().__init__("LocationProcessingThread")
|
||||
|
||||
def loop(self):
|
||||
pass
|
||||
|
||||
|
||||
def set_config():
|
||||
global users
|
||||
|
||||
|
||||
def _get_transport(stats):
|
||||
if CONF.aprs_network.enabled:
|
||||
transport = "aprs-is"
|
||||
aprs_connection = (
|
||||
"APRS-IS Server: <a href='http://status.aprs2.net' >"
|
||||
"{}</a>".format(stats["APRSClientStats"]["server_string"])
|
||||
)
|
||||
elif kiss.KISSClient.is_enabled():
|
||||
transport = kiss.KISSClient.transport()
|
||||
if transport == client.TRANSPORT_TCPKISS:
|
||||
aprs_connection = (
|
||||
"TCPKISS://{}:{}".format(
|
||||
CONF.kiss_tcp.host,
|
||||
CONF.kiss_tcp.port,
|
||||
)
|
||||
)
|
||||
elif transport == client.TRANSPORT_SERIALKISS:
|
||||
# for pep8 violation
|
||||
aprs_connection = (
|
||||
"SerialKISS://{}@{} baud".format(
|
||||
CONF.kiss_serial.device,
|
||||
CONF.kiss_serial.baudrate,
|
||||
),
|
||||
)
|
||||
elif CONF.fake_client.enabled:
|
||||
transport = client.TRANSPORT_FAKE
|
||||
aprs_connection = "Fake Client"
|
||||
|
||||
return transport, aprs_connection
|
||||
|
||||
|
||||
@flask_app.route("/location/<callsign>", methods=["POST"])
|
||||
def location(callsign):
|
||||
LOG.debug(f"Fetch location for callsign {callsign}")
|
||||
if not callsign in callsign_no_track:
|
||||
populate_callsign_location(callsign)
|
||||
|
||||
|
||||
@auth.login_required
|
||||
@flask_app.route("/")
|
||||
def index():
|
||||
stats = _stats()
|
||||
|
||||
# For development
|
||||
html_template = "index.html"
|
||||
LOG.debug(f"Template {html_template}")
|
||||
|
||||
transport, aprs_connection = _get_transport(stats["stats"])
|
||||
LOG.debug(f"transport {transport} aprs_connection {aprs_connection}")
|
||||
|
||||
stats["transport"] = transport
|
||||
stats["aprs_connection"] = aprs_connection
|
||||
LOG.debug(f"initial stats = {stats}")
|
||||
latitude = CONF.webchat.latitude
|
||||
if latitude:
|
||||
latitude = float(CONF.webchat.latitude)
|
||||
|
||||
longitude = CONF.webchat.longitude
|
||||
if longitude:
|
||||
longitude = float(longitude)
|
||||
|
||||
return flask.render_template(
|
||||
html_template,
|
||||
initial_stats=stats,
|
||||
aprs_connection=aprs_connection,
|
||||
callsign=CONF.callsign,
|
||||
version=aprsd.__version__,
|
||||
latitude=latitude,
|
||||
longitude=longitude,
|
||||
)
|
||||
|
||||
|
||||
@auth.login_required
|
||||
@flask_app.route("/send-message-status")
|
||||
def send_message_status():
|
||||
LOG.debug(request)
|
||||
msgs = SentMessages()
|
||||
info = msgs.get_all()
|
||||
return json.dumps(info)
|
||||
|
||||
|
||||
def _stats():
|
||||
now = datetime.datetime.now()
|
||||
|
||||
time_format = "%m-%d-%Y %H:%M:%S"
|
||||
stats_dict = stats.stats_collector.collect(serializable=True)
|
||||
# Webchat doesnt need these
|
||||
if "WatchList" in stats_dict:
|
||||
del stats_dict["WatchList"]
|
||||
if "SeenList" in stats_dict:
|
||||
del stats_dict["SeenList"]
|
||||
if "APRSDThreadList" in stats_dict:
|
||||
del stats_dict["APRSDThreadList"]
|
||||
if "PacketList" in stats_dict:
|
||||
del stats_dict["PacketList"]
|
||||
if "EmailStats" in stats_dict:
|
||||
del stats_dict["EmailStats"]
|
||||
if "PluginManager" in stats_dict:
|
||||
del stats_dict["PluginManager"]
|
||||
|
||||
result = {
|
||||
"time": now.strftime(time_format),
|
||||
"stats": stats_dict,
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
@flask_app.route("/stats")
|
||||
def get_stats():
|
||||
return json.dumps(_stats())
|
||||
|
||||
|
||||
class SendMessageNamespace(Namespace):
|
||||
"""Class to handle the socketio interactions."""
|
||||
got_ack = False
|
||||
reply_sent = False
|
||||
msg = None
|
||||
request = None
|
||||
|
||||
def __init__(self, namespace=None, config=None):
|
||||
super().__init__(namespace)
|
||||
|
||||
def on_connect(self):
|
||||
global socketio
|
||||
LOG.debug("Web socket connected")
|
||||
socketio.emit(
|
||||
"connected", {"data": "/sendmsg Connected"},
|
||||
namespace="/sendmsg",
|
||||
)
|
||||
|
||||
def on_disconnect(self):
|
||||
LOG.debug("WS Disconnected")
|
||||
|
||||
def on_send(self, data):
|
||||
global socketio
|
||||
LOG.debug(f"WS: on_send {data}")
|
||||
self.request = data
|
||||
data["from"] = CONF.callsign
|
||||
path = data.get("path", None)
|
||||
if not path:
|
||||
path = []
|
||||
elif "," in path:
|
||||
path_opts = path.split(",")
|
||||
path = [x.strip() for x in path_opts]
|
||||
else:
|
||||
path = [path]
|
||||
|
||||
pkt = packets.MessagePacket(
|
||||
from_call=data["from"],
|
||||
to_call=data["to"].upper(),
|
||||
message_text=data["message"],
|
||||
path=path,
|
||||
)
|
||||
pkt.prepare()
|
||||
self.msg = pkt
|
||||
msgs = SentMessages()
|
||||
tx.send(pkt)
|
||||
msgs.add(pkt)
|
||||
msgs.set_status(pkt.msgNo, "Sending")
|
||||
obj = msgs.get(pkt.msgNo)
|
||||
socketio.emit(
|
||||
"sent", obj,
|
||||
namespace="/sendmsg",
|
||||
)
|
||||
|
||||
def on_gps(self, data):
|
||||
LOG.debug(f"WS on_GPS: {data}")
|
||||
lat = data["latitude"]
|
||||
long = data["longitude"]
|
||||
LOG.debug(f"Lat {lat}")
|
||||
LOG.debug(f"Long {long}")
|
||||
path = data.get("path", None)
|
||||
if not path:
|
||||
path = []
|
||||
elif "," in path:
|
||||
path_opts = path.split(",")
|
||||
path = [x.strip() for x in path_opts]
|
||||
else:
|
||||
path = [path]
|
||||
|
||||
tx.send(
|
||||
packets.BeaconPacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call="APDW16",
|
||||
latitude=lat,
|
||||
longitude=long,
|
||||
comment="APRSD WebChat Beacon",
|
||||
path=path,
|
||||
),
|
||||
direct=True,
|
||||
)
|
||||
|
||||
def handle_message(self, data):
|
||||
LOG.debug(f"WS Data {data}")
|
||||
|
||||
def handle_json(self, data):
|
||||
LOG.debug(f"WS json {data}")
|
||||
|
||||
def on_get_callsign_location(self, data):
|
||||
LOG.debug(f"on_callsign_location {data}")
|
||||
if data["callsign"] not in callsign_no_track:
|
||||
populate_callsign_location(data["callsign"])
|
||||
|
||||
|
||||
@trace.trace
|
||||
def init_flask(loglevel, quiet):
|
||||
global socketio, flask_app
|
||||
|
||||
socketio = SocketIO(
|
||||
flask_app, logger=False, engineio_logger=False,
|
||||
async_mode="threading",
|
||||
)
|
||||
|
||||
socketio.on_namespace(
|
||||
SendMessageNamespace(
|
||||
"/sendmsg",
|
||||
),
|
||||
)
|
||||
return socketio
|
||||
|
||||
|
||||
# main() ###
|
||||
@cli.command()
|
||||
@cli_helper.add_options(cli_helper.common_options)
|
||||
@click.option(
|
||||
"-f",
|
||||
"--flush",
|
||||
"flush",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
default=False,
|
||||
help="Flush out all old aged messages on disk.",
|
||||
)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--port",
|
||||
"port",
|
||||
show_default=True,
|
||||
default=None,
|
||||
help="Port to listen to web requests. This overrides the config.webchat.web_port setting.",
|
||||
)
|
||||
@click.pass_context
|
||||
@cli_helper.process_standard_options
|
||||
def webchat(ctx, flush, port):
|
||||
"""Web based HAM Radio chat program!"""
|
||||
loglevel = ctx.obj["loglevel"]
|
||||
quiet = ctx.obj["quiet"]
|
||||
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
signal.signal(signal.SIGTERM, signal_handler)
|
||||
|
||||
level, msg = utils._check_version()
|
||||
if level:
|
||||
LOG.warning(msg)
|
||||
else:
|
||||
LOG.info(msg)
|
||||
LOG.info(f"APRSD Started version: {aprsd.__version__}")
|
||||
|
||||
CONF.log_opt_values(logging.getLogger(), logging.DEBUG)
|
||||
user = CONF.admin.user
|
||||
users[user] = generate_password_hash(CONF.admin.password)
|
||||
if not port:
|
||||
port = CONF.webchat.web_port
|
||||
|
||||
# Initialize the client factory and create
|
||||
# The correct client object ready for use
|
||||
# Make sure we have 1 client transport enabled
|
||||
if not client_factory.is_client_enabled():
|
||||
LOG.error("No Clients are enabled in config.")
|
||||
sys.exit(-1)
|
||||
|
||||
if not client_factory.is_client_configured():
|
||||
LOG.error("APRS client is not properly configured in config file.")
|
||||
sys.exit(-1)
|
||||
|
||||
# Creates the client object
|
||||
LOG.info("Creating client connection")
|
||||
aprs_client = client_factory.create()
|
||||
LOG.info(aprs_client)
|
||||
if not aprs_client.login_success:
|
||||
# We failed to login, will just quit!
|
||||
msg = f"Login Failure: {aprs_client.login_failure}"
|
||||
LOG.error(msg)
|
||||
print(msg)
|
||||
sys.exit(-1)
|
||||
|
||||
keepalive = keep_alive.KeepAliveThread()
|
||||
LOG.info("Start KeepAliveThread")
|
||||
keepalive.start()
|
||||
|
||||
stats_store_thread = stats_thread.APRSDStatsStoreThread()
|
||||
stats_store_thread.start()
|
||||
|
||||
socketio = init_flask(loglevel, quiet)
|
||||
rx_thread = rx.APRSDPluginRXThread(
|
||||
packet_queue=threads.packet_queue,
|
||||
)
|
||||
rx_thread.start()
|
||||
process_thread = WebChatProcessPacketThread(
|
||||
packet_queue=threads.packet_queue,
|
||||
socketio=socketio,
|
||||
)
|
||||
process_thread.start()
|
||||
|
||||
LOG.info("Start socketio.run()")
|
||||
socketio.run(
|
||||
flask_app,
|
||||
# This is broken for now after removing cryptography
|
||||
# and pyopenssl
|
||||
# ssl_context="adhoc",
|
||||
host=CONF.webchat.web_ip,
|
||||
port=port,
|
||||
allow_unsafe_werkzeug=True,
|
||||
)
|
||||
|
||||
LOG.info("WebChat exiting!!!! Bye.")
|
@ -1,6 +1,6 @@
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.conf import client, common, log, plugin_common, plugin_email
|
||||
from aprsd.conf import client, common, log, plugin_common
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
@ -11,7 +11,6 @@ client.register_opts(CONF)
|
||||
|
||||
# plugins
|
||||
plugin_common.register_opts(CONF)
|
||||
plugin_email.register_opts(CONF)
|
||||
|
||||
|
||||
def set_lib_defaults():
|
||||
|
@ -4,7 +4,6 @@ The options for log setup
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
DEFAULT_LOGIN = "NOCALL"
|
||||
|
||||
aprs_group = cfg.OptGroup(
|
||||
@ -31,7 +30,7 @@ aprs_opts = [
|
||||
"enabled",
|
||||
default=True,
|
||||
help="Set enabled to False if there is no internet connectivity."
|
||||
"This is useful for a direwolf KISS aprs connection only.",
|
||||
"This is useful for a direwolf KISS aprs connection only.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"login",
|
||||
@ -42,8 +41,8 @@ aprs_opts = [
|
||||
"password",
|
||||
secret=True,
|
||||
help="APRS Password "
|
||||
"Get the passcode for your callsign here: "
|
||||
"https://apps.magicbug.co.uk/passcode",
|
||||
"Get the passcode for your callsign here: "
|
||||
"https://apps.magicbug.co.uk/passcode",
|
||||
),
|
||||
cfg.HostAddressOpt(
|
||||
"host",
|
||||
|
@ -2,30 +2,20 @@ from pathlib import Path
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
home = str(Path.home())
|
||||
DEFAULT_CONFIG_DIR = f"{home}/.config/aprsd/"
|
||||
APRSD_DEFAULT_MAGIC_WORD = "CHANGEME!!!"
|
||||
|
||||
admin_group = cfg.OptGroup(
|
||||
name="admin",
|
||||
title="Admin web interface settings",
|
||||
)
|
||||
watch_list_group = cfg.OptGroup(
|
||||
name="watch_list",
|
||||
title="Watch List settings",
|
||||
)
|
||||
webchat_group = cfg.OptGroup(
|
||||
name="webchat",
|
||||
title="Settings specific to the webchat command",
|
||||
)
|
||||
|
||||
registry_group = cfg.OptGroup(
|
||||
name="aprs_registry",
|
||||
title="APRS Registry settings",
|
||||
)
|
||||
|
||||
|
||||
aprsd_opts = [
|
||||
cfg.StrOpt(
|
||||
"callsign",
|
||||
@ -56,15 +46,15 @@ aprsd_opts = [
|
||||
"ack_rate_limit_period",
|
||||
default=1,
|
||||
help="The wait period in seconds per Ack packet being sent."
|
||||
"1 means 1 ack packet per second allowed."
|
||||
"2 means 1 pack packet every 2 seconds allowed",
|
||||
"1 means 1 ack packet per second allowed."
|
||||
"2 means 1 pack packet every 2 seconds allowed",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"msg_rate_limit_period",
|
||||
default=2,
|
||||
help="Wait period in seconds per non AckPacket being sent."
|
||||
"2 means 1 packet every 2 seconds allowed."
|
||||
"5 means 1 pack packet every 5 seconds allowed",
|
||||
"2 means 1 packet every 2 seconds allowed."
|
||||
"5 means 1 pack packet every 5 seconds allowed",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"packet_dupe_timeout",
|
||||
@ -75,7 +65,7 @@ aprsd_opts = [
|
||||
"enable_beacon",
|
||||
default=False,
|
||||
help="Enable sending of a GPS Beacon packet to locate this service. "
|
||||
"Requires latitude and longitude to be set.",
|
||||
"Requires latitude and longitude to be set.",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"beacon_interval",
|
||||
@ -102,8 +92,8 @@ aprsd_opts = [
|
||||
choices=["compact", "multiline", "both"],
|
||||
default="compact",
|
||||
help="When logging packets 'compact' will use a single line formatted for each packet."
|
||||
"'multiline' will use multiple lines for each packet and is the traditional format."
|
||||
"both will log both compact and multiline.",
|
||||
"'multiline' will use multiple lines for each packet and is the traditional format."
|
||||
"both will log both compact and multiline.",
|
||||
),
|
||||
cfg.IntOpt(
|
||||
"default_packet_send_count",
|
||||
@ -129,7 +119,7 @@ aprsd_opts = [
|
||||
"enable_seen_list",
|
||||
default=True,
|
||||
help="Enable the Callsign seen list tracking feature. This allows aprsd to keep track of "
|
||||
"callsigns that have been seen and when they were last seen.",
|
||||
"callsigns that have been seen and when they were last seen.",
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"enable_packet_logging",
|
||||
@ -145,7 +135,7 @@ aprsd_opts = [
|
||||
"enable_sending_ack_packets",
|
||||
default=True,
|
||||
help="Set this to False, to disable sending of ack packets. This will entirely stop"
|
||||
"APRSD from sending ack packets.",
|
||||
"APRSD from sending ack packets.",
|
||||
),
|
||||
]
|
||||
|
||||
@ -154,8 +144,8 @@ watch_list_opts = [
|
||||
"enabled",
|
||||
default=False,
|
||||
help="Enable the watch list feature. Still have to enable "
|
||||
"the correct plugin. Built-in plugin to use is "
|
||||
"aprsd.plugins.notify.NotifyPlugin",
|
||||
"the correct plugin. Built-in plugin to use is "
|
||||
"aprsd.plugins.notify.NotifyPlugin",
|
||||
),
|
||||
cfg.ListOpt(
|
||||
"callsigns",
|
||||
@ -174,36 +164,7 @@ watch_list_opts = [
|
||||
"alert_time_seconds",
|
||||
default=3600,
|
||||
help="Time to wait before alert is sent on new message for "
|
||||
"users in callsigns.",
|
||||
),
|
||||
]
|
||||
|
||||
admin_opts = [
|
||||
cfg.BoolOpt(
|
||||
"web_enabled",
|
||||
default=False,
|
||||
help="Enable the Admin Web Interface",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"web_ip",
|
||||
default="0.0.0.0",
|
||||
help="The ip address to listen on",
|
||||
),
|
||||
cfg.PortOpt(
|
||||
"web_port",
|
||||
default=8001,
|
||||
help="The port to listen on",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"user",
|
||||
default="admin",
|
||||
help="The admin user for the admin web interface",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"password",
|
||||
default="password",
|
||||
secret=True,
|
||||
help="Admin interface password",
|
||||
"users in callsigns.",
|
||||
),
|
||||
]
|
||||
|
||||
@ -212,7 +173,6 @@ enabled_plugins_opts = [
|
||||
cfg.ListOpt(
|
||||
"enabled_plugins",
|
||||
default=[
|
||||
"aprsd.plugins.email.EmailPlugin",
|
||||
"aprsd.plugins.fortune.FortunePlugin",
|
||||
"aprsd.plugins.location.LocationPlugin",
|
||||
"aprsd.plugins.ping.PingPlugin",
|
||||
@ -222,36 +182,8 @@ enabled_plugins_opts = [
|
||||
"aprsd.plugins.notify.NotifySeenPlugin",
|
||||
],
|
||||
help="Comma separated list of enabled plugins for APRSD."
|
||||
"To enable installed external plugins add them here."
|
||||
"The full python path to the class name must be used",
|
||||
),
|
||||
]
|
||||
|
||||
webchat_opts = [
|
||||
cfg.StrOpt(
|
||||
"web_ip",
|
||||
default="0.0.0.0",
|
||||
help="The ip address to listen on",
|
||||
),
|
||||
cfg.PortOpt(
|
||||
"web_port",
|
||||
default=8001,
|
||||
help="The port to listen on",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"latitude",
|
||||
default=None,
|
||||
help="Latitude for the GPS Beacon button. If not set, the button will not be enabled.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"longitude",
|
||||
default=None,
|
||||
help="Longitude for the GPS Beacon button. If not set, the button will not be enabled.",
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"disable_url_request_logging",
|
||||
default=False,
|
||||
help="Disable the logging of url requests in the webchat command.",
|
||||
"To enable installed external plugins add them here."
|
||||
"The full python path to the class name must be used",
|
||||
),
|
||||
]
|
||||
|
||||
@ -260,16 +192,16 @@ registry_opts = [
|
||||
"enabled",
|
||||
default=False,
|
||||
help="Enable sending aprs registry information. This will let the "
|
||||
"APRS registry know about your service and it's uptime. "
|
||||
"No personal information is sent, just the callsign, uptime and description. "
|
||||
"The service callsign is the callsign set in [DEFAULT] section.",
|
||||
"APRS registry know about your service and it's uptime. "
|
||||
"No personal information is sent, just the callsign, uptime and description. "
|
||||
"The service callsign is the callsign set in [DEFAULT] section.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"description",
|
||||
default=None,
|
||||
help="Description of the service to send to the APRS registry. "
|
||||
"This is what will show up in the APRS registry."
|
||||
"If not set, the description will be the same as the callsign.",
|
||||
"This is what will show up in the APRS registry."
|
||||
"If not set, the description will be the same as the callsign.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"registry_url",
|
||||
@ -292,12 +224,8 @@ registry_opts = [
|
||||
def register_opts(config):
|
||||
config.register_opts(aprsd_opts)
|
||||
config.register_opts(enabled_plugins_opts)
|
||||
config.register_group(admin_group)
|
||||
config.register_opts(admin_opts, group=admin_group)
|
||||
config.register_group(watch_list_group)
|
||||
config.register_opts(watch_list_opts, group=watch_list_group)
|
||||
config.register_group(webchat_group)
|
||||
config.register_opts(webchat_opts, group=webchat_group)
|
||||
config.register_group(registry_group)
|
||||
config.register_opts(registry_opts, group=registry_group)
|
||||
|
||||
@ -305,8 +233,6 @@ def register_opts(config):
|
||||
def list_opts():
|
||||
return {
|
||||
"DEFAULT": (aprsd_opts + enabled_plugins_opts),
|
||||
admin_group.name: admin_opts,
|
||||
watch_list_group.name: watch_list_opts,
|
||||
webchat_group.name: webchat_opts,
|
||||
registry_group.name: registry_opts,
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
"""
|
||||
The options for log setup
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
LOG_LEVELS = {
|
||||
"CRITICAL": logging.CRITICAL,
|
||||
"ERROR": logging.ERROR,
|
||||
@ -59,7 +59,5 @@ def register_opts(config):
|
||||
|
||||
def list_opts():
|
||||
return {
|
||||
logging_group.name: (
|
||||
logging_opts
|
||||
),
|
||||
logging_group.name: (logging_opts),
|
||||
}
|
||||
|
@ -31,7 +31,6 @@ import importlib
|
||||
import os
|
||||
import pkgutil
|
||||
|
||||
|
||||
LIST_OPTS_FUNC_NAME = "list_opts"
|
||||
|
||||
|
||||
@ -64,9 +63,11 @@ def _import_modules(module_names):
|
||||
for modname in module_names:
|
||||
mod = importlib.import_module("aprsd.conf." + modname)
|
||||
if not hasattr(mod, LIST_OPTS_FUNC_NAME):
|
||||
msg = "The module 'aprsd.conf.%s' should have a '%s' "\
|
||||
"function which returns the config options." % \
|
||||
(modname, LIST_OPTS_FUNC_NAME)
|
||||
msg = (
|
||||
"The module 'aprsd.conf.%s' should have a '%s' "
|
||||
"function which returns the config options."
|
||||
% (modname, LIST_OPTS_FUNC_NAME)
|
||||
)
|
||||
raise Exception(msg)
|
||||
else:
|
||||
imported_modules.append(mod)
|
||||
|
@ -1,6 +1,5 @@
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
aprsfi_group = cfg.OptGroup(
|
||||
name="aprs_fi",
|
||||
title="APRS.FI website settings",
|
||||
@ -18,16 +17,10 @@ owm_wx_group = cfg.OptGroup(
|
||||
title="Options for the OWMWeatherPlugin",
|
||||
)
|
||||
|
||||
location_group = cfg.OptGroup(
|
||||
name="location_plugin",
|
||||
title="Options for the LocationPlugin",
|
||||
)
|
||||
|
||||
aprsfi_opts = [
|
||||
cfg.StrOpt(
|
||||
"apiKey",
|
||||
help="Get the apiKey from your aprs.fi account here:"
|
||||
"http://aprs.fi/account",
|
||||
help="Get the apiKey from your aprs.fi account here:" "http://aprs.fi/account",
|
||||
),
|
||||
]
|
||||
|
||||
@ -35,11 +28,11 @@ owm_wx_opts = [
|
||||
cfg.StrOpt(
|
||||
"apiKey",
|
||||
help="OWMWeatherPlugin api key to OpenWeatherMap's API."
|
||||
"This plugin uses the openweathermap API to fetch"
|
||||
"location and weather information."
|
||||
"To use this plugin you need to get an openweathermap"
|
||||
"account and apikey."
|
||||
"https://home.openweathermap.org/api_keys",
|
||||
"This plugin uses the openweathermap API to fetch"
|
||||
"location and weather information."
|
||||
"To use this plugin you need to get an openweathermap"
|
||||
"account and apikey."
|
||||
"https://home.openweathermap.org/api_keys",
|
||||
),
|
||||
]
|
||||
|
||||
@ -47,116 +40,16 @@ avwx_opts = [
|
||||
cfg.StrOpt(
|
||||
"apiKey",
|
||||
help="avwx-api is an opensource project that has"
|
||||
"a hosted service here: https://avwx.rest/"
|
||||
"You can launch your own avwx-api in a container"
|
||||
"by cloning the githug repo here:"
|
||||
"https://github.com/avwx-rest/AVWX-API",
|
||||
"a hosted service here: https://avwx.rest/"
|
||||
"You can launch your own avwx-api in a container"
|
||||
"by cloning the githug repo here:"
|
||||
"https://github.com/avwx-rest/AVWX-API",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"base_url",
|
||||
default="https://avwx.rest",
|
||||
help="The base url for the avwx API. If you are hosting your own"
|
||||
"Here is where you change the url to point to yours.",
|
||||
),
|
||||
]
|
||||
|
||||
location_opts = [
|
||||
cfg.StrOpt(
|
||||
"geopy_geocoder",
|
||||
choices=[
|
||||
"ArcGIS", "AzureMaps", "Baidu", "Bing", "GoogleV3", "HERE",
|
||||
"Nominatim", "OpenCage", "TomTom", "USGov", "What3Words", "Woosmap",
|
||||
],
|
||||
default="Nominatim",
|
||||
help="The geopy geocoder to use. Default is Nominatim."
|
||||
"See https://geopy.readthedocs.io/en/stable/#module-geopy.geocoders"
|
||||
"for more information.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"user_agent",
|
||||
default="APRSD",
|
||||
help="The user agent to use for the Nominatim geocoder."
|
||||
"See https://geopy.readthedocs.io/en/stable/#module-geopy.geocoders"
|
||||
"for more information.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"arcgis_username",
|
||||
default=None,
|
||||
help="The username to use for the ArcGIS geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#arcgis"
|
||||
"for more information."
|
||||
"Only used for the ArcGIS geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"arcgis_password",
|
||||
default=None,
|
||||
help="The password to use for the ArcGIS geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#arcgis"
|
||||
"for more information."
|
||||
"Only used for the ArcGIS geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"azuremaps_subscription_key",
|
||||
help="The subscription key to use for the AzureMaps geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#azuremaps"
|
||||
"for more information."
|
||||
"Only used for the AzureMaps geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"baidu_api_key",
|
||||
help="The API key to use for the Baidu geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#baidu"
|
||||
"for more information."
|
||||
"Only used for the Baidu geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"bing_api_key",
|
||||
help="The API key to use for the Bing geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#bing"
|
||||
"for more information."
|
||||
"Only used for the Bing geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"google_api_key",
|
||||
help="The API key to use for the Google geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#googlev3"
|
||||
"for more information."
|
||||
"Only used for the Google geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"here_api_key",
|
||||
help="The API key to use for the HERE geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#here"
|
||||
"for more information."
|
||||
"Only used for the HERE geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"opencage_api_key",
|
||||
help="The API key to use for the OpenCage geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#opencage"
|
||||
"for more information."
|
||||
"Only used for the OpenCage geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"tomtom_api_key",
|
||||
help="The API key to use for the TomTom geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#tomtom"
|
||||
"for more information."
|
||||
"Only used for the TomTom geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"what3words_api_key",
|
||||
help="The API key to use for the What3Words geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#what3words"
|
||||
"for more information."
|
||||
"Only used for the What3Words geocoder.",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"woosmap_api_key",
|
||||
help="The API key to use for the Woosmap geocoder."
|
||||
"See https://geopy.readthedocs.io/en/latest/#woosmap"
|
||||
"for more information."
|
||||
"Only used for the Woosmap geocoder.",
|
||||
"Here is where you change the url to point to yours.",
|
||||
),
|
||||
]
|
||||
|
||||
@ -169,8 +62,6 @@ def register_opts(config):
|
||||
config.register_opts(owm_wx_opts, group=owm_wx_group)
|
||||
config.register_group(avwx_group)
|
||||
config.register_opts(avwx_opts, group=avwx_group)
|
||||
config.register_group(location_group)
|
||||
config.register_opts(location_opts, group=location_group)
|
||||
|
||||
|
||||
def list_opts():
|
||||
@ -178,5 +69,4 @@ def list_opts():
|
||||
aprsfi_group.name: aprsfi_opts,
|
||||
owm_wx_group.name: owm_wx_opts,
|
||||
avwx_group.name: avwx_opts,
|
||||
location_group.name: location_opts,
|
||||
}
|
||||
|
@ -1,105 +0,0 @@
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
email_group = cfg.OptGroup(
|
||||
name="email_plugin",
|
||||
title="Options for the APRSD Email plugin",
|
||||
)
|
||||
|
||||
email_opts = [
|
||||
cfg.StrOpt(
|
||||
"callsign",
|
||||
help="(Required) Callsign to validate for doing email commands."
|
||||
"Only this callsign can check email. This is also where the "
|
||||
"email notifications for new emails will be sent.",
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"enabled",
|
||||
default=False,
|
||||
help="Enable the Email plugin?",
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"debug",
|
||||
default=False,
|
||||
help="Enable the Email plugin Debugging?",
|
||||
),
|
||||
]
|
||||
|
||||
email_imap_opts = [
|
||||
cfg.StrOpt(
|
||||
"imap_login",
|
||||
help="Login username/email for IMAP server",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"imap_password",
|
||||
secret=True,
|
||||
help="Login password for IMAP server",
|
||||
),
|
||||
cfg.HostnameOpt(
|
||||
"imap_host",
|
||||
help="Hostname/IP of the IMAP server",
|
||||
),
|
||||
cfg.PortOpt(
|
||||
"imap_port",
|
||||
default=993,
|
||||
help="Port to use for IMAP server",
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"imap_use_ssl",
|
||||
default=True,
|
||||
help="Use SSL for connection to IMAP Server",
|
||||
),
|
||||
]
|
||||
|
||||
email_smtp_opts = [
|
||||
cfg.StrOpt(
|
||||
"smtp_login",
|
||||
help="Login username/email for SMTP server",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"smtp_password",
|
||||
secret=True,
|
||||
help="Login password for SMTP server",
|
||||
),
|
||||
cfg.HostnameOpt(
|
||||
"smtp_host",
|
||||
help="Hostname/IP of the SMTP server",
|
||||
),
|
||||
cfg.PortOpt(
|
||||
"smtp_port",
|
||||
default=465,
|
||||
help="Port to use for SMTP server",
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"smtp_use_ssl",
|
||||
default=True,
|
||||
help="Use SSL for connection to SMTP Server",
|
||||
),
|
||||
]
|
||||
|
||||
email_shortcuts_opts = [
|
||||
cfg.ListOpt(
|
||||
"email_shortcuts",
|
||||
help="List of email shortcuts for checking/sending email "
|
||||
"For Exmaple: wb=walt@walt.com,cl=cl@cl.com\n"
|
||||
"Means use 'wb' to send an email to walt@walt.com",
|
||||
),
|
||||
]
|
||||
|
||||
ALL_OPTS = (
|
||||
email_opts
|
||||
+ email_imap_opts
|
||||
+ email_smtp_opts
|
||||
+ email_shortcuts_opts
|
||||
)
|
||||
|
||||
|
||||
def register_opts(config):
|
||||
config.register_group(email_group)
|
||||
config.register_opts(ALL_OPTS, group=email_group)
|
||||
|
||||
|
||||
def list_opts():
|
||||
return {
|
||||
email_group.name: ALL_OPTS,
|
||||
}
|
@ -1,11 +1,13 @@
|
||||
class MissingConfigOptionException(Exception):
|
||||
"""Missing a config option."""
|
||||
|
||||
def __init__(self, config_option):
|
||||
self.message = f"Option '{config_option}' was not in config file"
|
||||
|
||||
|
||||
class ConfigOptionBogusDefaultException(Exception):
|
||||
"""Missing a config option."""
|
||||
|
||||
def __init__(self, config_option, default_fail):
|
||||
self.message = (
|
||||
f"Config file option '{config_option}' needs to be "
|
||||
|
@ -1,5 +1,4 @@
|
||||
import logging
|
||||
from logging.handlers import QueueHandler
|
||||
import queue
|
||||
import sys
|
||||
|
||||
@ -8,7 +7,6 @@ from oslo_config import cfg
|
||||
|
||||
from aprsd.conf import log as conf_log
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
# LOG = logging.getLogger("APRSD")
|
||||
LOG = logger
|
||||
@ -19,6 +17,7 @@ class QueueLatest(queue.Queue):
|
||||
|
||||
This prevents the queue from blowing up in size.
|
||||
"""
|
||||
|
||||
def put(self, *args, **kwargs):
|
||||
try:
|
||||
super().put(*args, **kwargs)
|
||||
@ -44,7 +43,9 @@ class InterceptHandler(logging.Handler):
|
||||
frame = frame.f_back
|
||||
depth += 1
|
||||
|
||||
logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
|
||||
logger.opt(depth=depth, exception=record.exc_info).log(
|
||||
level, record.getMessage()
|
||||
)
|
||||
|
||||
|
||||
# Setup the log faciility
|
||||
@ -60,43 +61,18 @@ def setup_logging(loglevel=None, quiet=False):
|
||||
logging.root.handlers = [InterceptHandler()]
|
||||
logging.root.setLevel(log_level)
|
||||
|
||||
imap_list = [
|
||||
"imapclient.imaplib", "imaplib", "imapclient",
|
||||
"imapclient.util",
|
||||
]
|
||||
aprslib_list = [
|
||||
# We don't really want to see the aprslib parsing debug output.
|
||||
disable_list = [
|
||||
"aprslib",
|
||||
"aprslib.parsing",
|
||||
"aprslib.exceptions",
|
||||
]
|
||||
webserver_list = [
|
||||
"werkzeug",
|
||||
"werkzeug._internal",
|
||||
"socketio",
|
||||
"urllib3.connectionpool",
|
||||
"chardet",
|
||||
"chardet.charsetgroupprober",
|
||||
"chardet.eucjpprober",
|
||||
"chardet.mbcharsetprober",
|
||||
]
|
||||
|
||||
# We don't really want to see the aprslib parsing debug output.
|
||||
disable_list = imap_list + aprslib_list + webserver_list
|
||||
|
||||
# remove every other logger's handlers
|
||||
# and propagate to root logger
|
||||
for name in logging.root.manager.loggerDict.keys():
|
||||
logging.getLogger(name).handlers = []
|
||||
if name in disable_list:
|
||||
logging.getLogger(name).propagate = False
|
||||
else:
|
||||
logging.getLogger(name).propagate = True
|
||||
|
||||
if CONF.webchat.disable_url_request_logging:
|
||||
for name in webserver_list:
|
||||
logging.getLogger(name).handlers = []
|
||||
logging.getLogger(name).propagate = True
|
||||
logging.getLogger(name).setLevel(logging.ERROR)
|
||||
logging.getLogger(name).propagate = name not in disable_list
|
||||
|
||||
handlers = [
|
||||
{
|
||||
@ -118,21 +94,6 @@ def setup_logging(loglevel=None, quiet=False):
|
||||
},
|
||||
)
|
||||
|
||||
if CONF.email_plugin.enabled and CONF.email_plugin.debug:
|
||||
for name in imap_list:
|
||||
logging.getLogger(name).propagate = True
|
||||
|
||||
if CONF.admin.web_enabled:
|
||||
qh = QueueHandler(logging_queue)
|
||||
handlers.append(
|
||||
{
|
||||
"sink": qh, "serialize": False,
|
||||
"format": CONF.logging.logformat,
|
||||
"level": log_level,
|
||||
"colorize": False,
|
||||
},
|
||||
)
|
||||
|
||||
# configure loguru
|
||||
logger.configure(handlers=handlers)
|
||||
logger.level("DEBUG", color="<fg #BABABA>")
|
||||
|
@ -22,11 +22,11 @@
|
||||
# python included libs
|
||||
import datetime
|
||||
import importlib.metadata as imp
|
||||
from importlib.metadata import version as metadata_version
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import time
|
||||
from importlib.metadata import version as metadata_version
|
||||
|
||||
import click
|
||||
from oslo_config import cfg, generator
|
||||
@ -36,7 +36,6 @@ import aprsd
|
||||
from aprsd import cli_helper, packets, threads, utils
|
||||
from aprsd.stats import collector
|
||||
|
||||
|
||||
# setup the global logger
|
||||
# log.basicConfig(level=log.DEBUG) # level=10
|
||||
CONF = cfg.CONF
|
||||
@ -54,8 +53,14 @@ def cli(ctx):
|
||||
|
||||
def load_commands():
|
||||
from .cmds import ( # noqa
|
||||
admin, completion, dev, fetch_stats, healthcheck, list_plugins, listen,
|
||||
send_message, server, webchat,
|
||||
completion,
|
||||
dev,
|
||||
fetch_stats,
|
||||
healthcheck,
|
||||
list_plugins,
|
||||
listen,
|
||||
send_message,
|
||||
server,
|
||||
)
|
||||
|
||||
|
||||
@ -115,6 +120,7 @@ def sample_config(ctx):
|
||||
|
||||
def _get_selected_entry_points():
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
all = imp.entry_points()
|
||||
selected = []
|
||||
|
@ -1,15 +1,25 @@
|
||||
from aprsd.packets import collector
|
||||
from aprsd.packets.core import ( # noqa: F401
|
||||
AckPacket, BeaconPacket, BulletinPacket, GPSPacket, MessagePacket,
|
||||
MicEPacket, ObjectPacket, Packet, RejectPacket, StatusPacket,
|
||||
ThirdPartyPacket, UnknownPacket, WeatherPacket, factory,
|
||||
AckPacket,
|
||||
BeaconPacket,
|
||||
BulletinPacket,
|
||||
GPSPacket,
|
||||
MessagePacket,
|
||||
MicEPacket,
|
||||
ObjectPacket,
|
||||
Packet,
|
||||
RejectPacket,
|
||||
StatusPacket,
|
||||
ThirdPartyPacket,
|
||||
UnknownPacket,
|
||||
WeatherPacket,
|
||||
factory,
|
||||
)
|
||||
from aprsd.packets.packet_list import PacketList # noqa: F401
|
||||
from aprsd.packets.seen_list import SeenList # noqa: F401
|
||||
from aprsd.packets.tracker import PacketTrack # noqa: F401
|
||||
from aprsd.packets.watch_list import WatchList # noqa: F401
|
||||
|
||||
|
||||
# Register all the packet tracking objects.
|
||||
collector.PacketCollector().register(PacketList)
|
||||
collector.PacketCollector().register(SeenList)
|
||||
|
@ -1,19 +1,22 @@
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
|
||||
# Due to a failure in python 3.8
|
||||
from typing import Any, List, Optional, Type, TypeVar, Union
|
||||
|
||||
from aprslib import util as aprslib_util
|
||||
from dataclasses_json import (
|
||||
CatchAll, DataClassJsonMixin, Undefined, dataclass_json,
|
||||
CatchAll,
|
||||
DataClassJsonMixin,
|
||||
Undefined,
|
||||
dataclass_json,
|
||||
)
|
||||
from loguru import logger
|
||||
|
||||
from aprsd.utils import counter, trace
|
||||
|
||||
from aprsd.utils import counter
|
||||
|
||||
# For mypy to be happy
|
||||
A = TypeVar("A", bound="DataClassJsonMixin")
|
||||
@ -51,7 +54,7 @@ def _init_send_time():
|
||||
return NO_DATE
|
||||
|
||||
|
||||
def _init_msgNo(): # noqa: N802
|
||||
def _init_msgNo(): # noqa: N802
|
||||
"""For some reason __post__init doesn't get called.
|
||||
|
||||
So in order to initialize the msgNo field in the packet
|
||||
@ -84,14 +87,16 @@ class Packet:
|
||||
to_call: Optional[str] = field(default=None)
|
||||
addresse: Optional[str] = field(default=None)
|
||||
format: Optional[str] = field(default=None)
|
||||
msgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
ackMsgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
msgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
ackMsgNo: Optional[str] = field(default=None) # noqa: N815
|
||||
packet_type: Optional[str] = field(default=None)
|
||||
timestamp: float = field(default_factory=_init_timestamp, compare=False, hash=False)
|
||||
# Holds the raw text string to be sent over the wire
|
||||
# or holds the raw string from input packet
|
||||
raw: Optional[str] = field(default=None, compare=False, hash=False)
|
||||
raw_dict: dict = field(repr=False, default_factory=lambda: {}, compare=False, hash=False)
|
||||
raw_dict: dict = field(
|
||||
repr=False, default_factory=lambda: {}, compare=False, hash=False
|
||||
)
|
||||
# Built by calling prepare(). raw needs this built first.
|
||||
payload: Optional[str] = field(default=None)
|
||||
|
||||
@ -129,7 +134,6 @@ class Packet:
|
||||
msg = self._filter_for_send(self.raw).rstrip("\n")
|
||||
return msg
|
||||
|
||||
@trace.trace
|
||||
def prepare(self, create_msg_number=False) -> None:
|
||||
"""Do stuff here that is needed prior to sending over the air."""
|
||||
# now build the raw message for sending
|
||||
@ -141,12 +145,12 @@ class Packet:
|
||||
def _build_payload(self) -> None:
|
||||
"""The payload is the non headers portion of the packet."""
|
||||
if not self.to_call:
|
||||
raise ValueError("to_call isn't set. Must set to_call before calling prepare()")
|
||||
raise ValueError(
|
||||
"to_call isn't set. Must set to_call before calling prepare()"
|
||||
)
|
||||
|
||||
# The base packet class has no real payload
|
||||
self.payload = (
|
||||
f":{self.to_call.ljust(9)}"
|
||||
)
|
||||
self.payload = f":{self.to_call.ljust(9)}"
|
||||
|
||||
def _build_raw(self) -> None:
|
||||
"""Build the self.raw which is what is sent over the air."""
|
||||
@ -167,8 +171,10 @@ class Packet:
|
||||
message = msg[:67]
|
||||
# We all miss George Carlin
|
||||
return re.sub(
|
||||
"fuck|shit|cunt|piss|cock|bitch", "****",
|
||||
message, flags=re.IGNORECASE,
|
||||
"fuck|shit|cunt|piss|cock|bitch",
|
||||
"****",
|
||||
message,
|
||||
flags=re.IGNORECASE,
|
||||
)
|
||||
|
||||
def __str__(self) -> str:
|
||||
@ -215,10 +221,7 @@ class BulletinPacket(Packet):
|
||||
return f"BLN{self.bid} {self.message_text}"
|
||||
|
||||
def _build_payload(self) -> None:
|
||||
self.payload = (
|
||||
f":BLN{self.bid:<9}"
|
||||
f":{self.message_text}"
|
||||
)
|
||||
self.payload = f":BLN{self.bid:<9}" f":{self.message_text}"
|
||||
|
||||
|
||||
@dataclass_json
|
||||
@ -336,10 +339,7 @@ class GPSPacket(Packet):
|
||||
self.payload = "".join(payload)
|
||||
|
||||
def _build_raw(self):
|
||||
self.raw = (
|
||||
f"{self.from_call}>{self.to_call},WIDE2-1:"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>{self.to_call},WIDE2-1:" f"{self.payload}"
|
||||
|
||||
@property
|
||||
def human_info(self) -> str:
|
||||
@ -371,10 +371,7 @@ class BeaconPacket(GPSPacket):
|
||||
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
||||
lon = aprslib_util.longitude_to_ddm(self.longitude)
|
||||
|
||||
self.payload = (
|
||||
f"@{time_zulu}z{lat}{self.symbol_table}"
|
||||
f"{lon}"
|
||||
)
|
||||
self.payload = f"@{time_zulu}z{lat}{self.symbol_table}" f"{lon}"
|
||||
|
||||
if self.comment:
|
||||
comment = self._filter_for_send(self.comment)
|
||||
@ -383,10 +380,7 @@ class BeaconPacket(GPSPacket):
|
||||
self.payload = f"{self.payload}{self.symbol}APRSD Beacon"
|
||||
|
||||
def _build_raw(self):
|
||||
self.raw = (
|
||||
f"{self.from_call}>APZ100:"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>APZ100:" f"{self.payload}"
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
@ -475,10 +469,7 @@ class ObjectPacket(GPSPacket):
|
||||
lat = aprslib_util.latitude_to_ddm(self.latitude)
|
||||
long = aprslib_util.longitude_to_ddm(self.longitude)
|
||||
|
||||
self.payload = (
|
||||
f"*{time_zulu}z{lat}{self.symbol_table}"
|
||||
f"{long}{self.symbol}"
|
||||
)
|
||||
self.payload = f"*{time_zulu}z{lat}{self.symbol_table}" f"{long}{self.symbol}"
|
||||
|
||||
if self.comment:
|
||||
comment = self._filter_for_send(self.comment)
|
||||
@ -495,10 +486,7 @@ class ObjectPacket(GPSPacket):
|
||||
The frequency, uplink_tone, offset is part of the comment
|
||||
"""
|
||||
|
||||
self.raw = (
|
||||
f"{self.from_call}>APZ100:;{self.to_call:9s}"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>APZ100:;{self.to_call:9s}" f"{self.payload}"
|
||||
|
||||
@property
|
||||
def human_info(self) -> str:
|
||||
@ -548,11 +536,13 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
if "speed" in raw:
|
||||
del raw["speed"]
|
||||
# Let's adjust the rain numbers as well, since it's wrong
|
||||
raw["rain_1h"] = round((raw.get("rain_1h", 0) / .254) * .01, 3)
|
||||
raw["rain_1h"] = round((raw.get("rain_1h", 0) / 0.254) * 0.01, 3)
|
||||
raw["weather"]["rain_1h"] = raw["rain_1h"]
|
||||
raw["rain_24h"] = round((raw.get("rain_24h", 0) / .254) * .01, 3)
|
||||
raw["rain_24h"] = round((raw.get("rain_24h", 0) / 0.254) * 0.01, 3)
|
||||
raw["weather"]["rain_24h"] = raw["rain_24h"]
|
||||
raw["rain_since_midnight"] = round((raw.get("rain_since_midnight", 0) / .254) * .01, 3)
|
||||
raw["rain_since_midnight"] = round(
|
||||
(raw.get("rain_since_midnight", 0) / 0.254) * 0.01, 3
|
||||
)
|
||||
raw["weather"]["rain_since_midnight"] = raw["rain_since_midnight"]
|
||||
|
||||
if "wind_direction" not in raw:
|
||||
@ -594,26 +584,26 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
def _build_payload(self):
|
||||
"""Build an uncompressed weather packet
|
||||
|
||||
Format =
|
||||
Format =
|
||||
|
||||
_CSE/SPDgXXXtXXXrXXXpXXXPXXXhXXbXXXXX%type NEW FORMAT APRS793 June 97
|
||||
NOT BACKWARD COMPATIBLE
|
||||
_CSE/SPDgXXXtXXXrXXXpXXXPXXXhXXbXXXXX%type NEW FORMAT APRS793 June 97
|
||||
NOT BACKWARD COMPATIBLE
|
||||
|
||||
|
||||
Where: CSE/SPD is wind direction and sustained 1 minute speed
|
||||
t is in degrees F
|
||||
Where: CSE/SPD is wind direction and sustained 1 minute speed
|
||||
t is in degrees F
|
||||
|
||||
r is Rain per last 60 minutes
|
||||
1.04 inches of rain will show as r104
|
||||
p is precipitation per last 24 hours (sliding 24 hour window)
|
||||
P is precip per last 24 hours since midnight
|
||||
b is Baro in tenths of a mb
|
||||
h is humidity in percent. 00=100
|
||||
g is Gust (peak winds in last 5 minutes)
|
||||
# is the raw rain counter for remote WX stations
|
||||
See notes on remotes below
|
||||
% shows software type d=Dos, m=Mac, w=Win, etc
|
||||
type shows type of WX instrument
|
||||
r is Rain per last 60 minutes
|
||||
1.04 inches of rain will show as r104
|
||||
p is precipitation per last 24 hours (sliding 24 hour window)
|
||||
P is precip per last 24 hours since midnight
|
||||
b is Baro in tenths of a mb
|
||||
h is humidity in percent. 00=100
|
||||
g is Gust (peak winds in last 5 minutes)
|
||||
# is the raw rain counter for remote WX stations
|
||||
See notes on remotes below
|
||||
% shows software type d=Dos, m=Mac, w=Win, etc
|
||||
type shows type of WX instrument
|
||||
|
||||
"""
|
||||
time_zulu = self._build_time_zulu()
|
||||
@ -623,7 +613,8 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
f"{self.longitude}{self.symbol}",
|
||||
f"{self.wind_direction:03d}",
|
||||
# Speed = sustained 1 minute wind speed in mph
|
||||
f"{self.symbol_table}", f"{self.wind_speed:03.0f}",
|
||||
f"{self.symbol_table}",
|
||||
f"{self.wind_speed:03.0f}",
|
||||
# wind gust (peak wind speed in mph in the last 5 minutes)
|
||||
f"g{self.wind_gust:03.0f}",
|
||||
# Temperature in degrees F
|
||||
@ -645,11 +636,7 @@ class WeatherPacket(GPSPacket, DataClassJsonMixin):
|
||||
self.payload = "".join(contents)
|
||||
|
||||
def _build_raw(self):
|
||||
|
||||
self.raw = (
|
||||
f"{self.from_call}>{self.to_call},WIDE1-1,WIDE2-1:"
|
||||
f"{self.payload}"
|
||||
)
|
||||
self.raw = f"{self.from_call}>{self.to_call},WIDE1-1,WIDE2-1:" f"{self.payload}"
|
||||
|
||||
|
||||
@dataclass(unsafe_hash=True)
|
||||
@ -693,14 +680,17 @@ class UnknownPacket:
|
||||
|
||||
All of the unknown attributes are stored in the unknown_fields
|
||||
"""
|
||||
|
||||
unknown_fields: CatchAll
|
||||
_type: str = "UnknownPacket"
|
||||
from_call: Optional[str] = field(default=None)
|
||||
to_call: Optional[str] = field(default=None)
|
||||
msgNo: str = field(default_factory=_init_msgNo) # noqa: N815
|
||||
msgNo: str = field(default_factory=_init_msgNo) # noqa: N815
|
||||
format: Optional[str] = field(default=None)
|
||||
raw: Optional[str] = field(default=None)
|
||||
raw_dict: dict = field(repr=False, default_factory=lambda: {}, compare=False, hash=False)
|
||||
raw_dict: dict = field(
|
||||
repr=False, default_factory=lambda: {}, compare=False, hash=False
|
||||
)
|
||||
path: List[str] = field(default_factory=list, compare=False, hash=False)
|
||||
packet_type: Optional[str] = field(default=None)
|
||||
via: Optional[str] = field(default=None, compare=False, hash=False)
|
||||
|
@ -1,14 +1,13 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from geopy.distance import geodesic
|
||||
from haversine import Unit, haversine
|
||||
from loguru import logger
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import utils
|
||||
from aprsd.packets.core import AckPacket, GPSPacket, RejectPacket
|
||||
|
||||
|
||||
LOG = logging.getLogger()
|
||||
LOGU = logger
|
||||
CONF = cfg.CONF
|
||||
@ -22,7 +21,9 @@ DISTANCE_COLOR = "fg #FF5733"
|
||||
DEGREES_COLOR = "fg #FFA900"
|
||||
|
||||
|
||||
def log_multiline(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> None:
|
||||
def log_multiline(
|
||||
packet, tx: Optional[bool] = False, header: Optional[bool] = True
|
||||
) -> None:
|
||||
"""LOG a packet to the logfile."""
|
||||
if not CONF.enable_packet_logging:
|
||||
return
|
||||
@ -121,8 +122,7 @@ def log(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> No
|
||||
via_color = "green"
|
||||
arrow = f"<{via_color}>-></{via_color}>"
|
||||
logit.append(
|
||||
f"<cyan>{name}</cyan>"
|
||||
f":{packet.msgNo}",
|
||||
f"<cyan>{name}</cyan>" f":{packet.msgNo}",
|
||||
)
|
||||
|
||||
tmp = None
|
||||
@ -145,8 +145,8 @@ def log(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> No
|
||||
|
||||
# is there distance information?
|
||||
if isinstance(packet, GPSPacket) and CONF.latitude and CONF.longitude:
|
||||
my_coords = (CONF.latitude, CONF.longitude)
|
||||
packet_coords = (packet.latitude, packet.longitude)
|
||||
my_coords = (float(CONF.latitude), float(CONF.longitude))
|
||||
packet_coords = (float(packet.latitude), float(packet.longitude))
|
||||
try:
|
||||
bearing = utils.calculate_initial_compass_bearing(my_coords, packet_coords)
|
||||
except Exception as e:
|
||||
@ -154,7 +154,7 @@ def log(packet, tx: Optional[bool] = False, header: Optional[bool] = True) -> No
|
||||
bearing = 0
|
||||
logit.append(
|
||||
f" : <{DEGREES_COLOR}>{utils.degrees_to_cardinal(bearing, full_string=True)}</{DEGREES_COLOR}>"
|
||||
f"<{DISTANCE_COLOR}>@{geodesic(my_coords, packet_coords).miles:.2f}miles</{DISTANCE_COLOR}>",
|
||||
f"<{DISTANCE_COLOR}>@{haversine(my_coords, packet_coords, unit=Unit.MILES):.2f}miles</{DISTANCE_COLOR}>",
|
||||
)
|
||||
|
||||
LOGU.opt(colors=True).info(" ".join(logit))
|
||||
|
@ -1,18 +1,18 @@
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.packets import core
|
||||
from aprsd.utils import objectstore
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class PacketList(objectstore.ObjectStoreMixin):
|
||||
"""Class to keep track of the packets we tx/rx."""
|
||||
|
||||
_instance = None
|
||||
_total_rx: int = 0
|
||||
_total_tx: int = 0
|
||||
@ -38,7 +38,8 @@ class PacketList(objectstore.ObjectStoreMixin):
|
||||
self._add(packet)
|
||||
ptype = packet.__class__.__name__
|
||||
type_stats = self.data["types"].setdefault(
|
||||
ptype, {"tx": 0, "rx": 0},
|
||||
ptype,
|
||||
{"tx": 0, "rx": 0},
|
||||
)
|
||||
type_stats["rx"] += 1
|
||||
|
||||
@ -49,7 +50,8 @@ class PacketList(objectstore.ObjectStoreMixin):
|
||||
self._add(packet)
|
||||
ptype = packet.__class__.__name__
|
||||
type_stats = self.data["types"].setdefault(
|
||||
ptype, {"tx": 0, "rx": 0},
|
||||
ptype,
|
||||
{"tx": 0, "rx": 0},
|
||||
)
|
||||
type_stats["tx"] += 1
|
||||
|
||||
@ -86,10 +88,11 @@ class PacketList(objectstore.ObjectStoreMixin):
|
||||
with self.lock:
|
||||
# Get last N packets directly using list slicing
|
||||
packets_list = list(self.data.get("packets", {}).values())
|
||||
pkts = packets_list[-CONF.packet_list_stats_maxlen:][::-1]
|
||||
pkts = packets_list[-CONF.packet_list_stats_maxlen :][::-1]
|
||||
|
||||
stats = {
|
||||
"total_tracked": self._total_rx + self._total_tx, # Fixed typo: was rx + rx
|
||||
"total_tracked": self._total_rx
|
||||
+ self._total_tx, # Fixed typo: was rx + rx
|
||||
"rx": self._total_rx,
|
||||
"tx": self._total_tx,
|
||||
"types": self.data.get("types", {}), # Changed default from [] to {}
|
||||
|
@ -8,14 +8,13 @@ import re
|
||||
import textwrap
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
import pluggy
|
||||
from oslo_config import cfg
|
||||
|
||||
import aprsd
|
||||
from aprsd import client, packets, threads
|
||||
from aprsd.packets import watch_list
|
||||
|
||||
|
||||
# setup the global logger
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -166,7 +165,8 @@ class APRSDWatchListPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
||||
except Exception as ex:
|
||||
LOG.error(
|
||||
"Plugin {} failed to process packet {}".format(
|
||||
self.__class__, ex,
|
||||
self.__class__,
|
||||
ex,
|
||||
),
|
||||
)
|
||||
if result:
|
||||
@ -214,7 +214,9 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
||||
return result
|
||||
|
||||
if not isinstance(packet, packets.MessagePacket):
|
||||
LOG.warning(f"{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring")
|
||||
LOG.warning(
|
||||
f"{self.__class__.__name__} Got a {packet.__class__.__name__} ignoring"
|
||||
)
|
||||
return packets.NULL_MESSAGE
|
||||
|
||||
result = None
|
||||
@ -236,7 +238,8 @@ class APRSDRegexCommandPluginBase(APRSDPluginBase, metaclass=abc.ABCMeta):
|
||||
except Exception as ex:
|
||||
LOG.error(
|
||||
"Plugin {} failed to process packet {}".format(
|
||||
self.__class__, ex,
|
||||
self.__class__,
|
||||
ex,
|
||||
),
|
||||
)
|
||||
LOG.exception(ex)
|
||||
@ -286,7 +289,8 @@ class HelpPlugin(APRSDRegexCommandPluginBase):
|
||||
reply = None
|
||||
for p in pm.get_plugins():
|
||||
if (
|
||||
p.enabled and isinstance(p, APRSDRegexCommandPluginBase)
|
||||
p.enabled
|
||||
and isinstance(p, APRSDRegexCommandPluginBase)
|
||||
and p.command_name.lower() == command_name
|
||||
):
|
||||
reply = p.help()
|
||||
@ -345,6 +349,7 @@ class PluginManager:
|
||||
|
||||
def stats(self, serializable=False) -> dict:
|
||||
"""Collect and return stats for all plugins."""
|
||||
|
||||
def full_name_with_qualname(obj):
|
||||
return "{}.{}".format(
|
||||
obj.__class__.__module__,
|
||||
@ -354,7 +359,6 @@ class PluginManager:
|
||||
plugin_stats = {}
|
||||
plugins = self.get_plugins()
|
||||
if plugins:
|
||||
|
||||
for p in plugins:
|
||||
plugin_stats[full_name_with_qualname(p)] = {
|
||||
"enabled": p.enabled,
|
||||
@ -439,7 +443,9 @@ class PluginManager:
|
||||
)
|
||||
self._watchlist_pm.register(plugin_obj)
|
||||
else:
|
||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
||||
LOG.warning(
|
||||
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||
)
|
||||
elif isinstance(plugin_obj, APRSDRegexCommandPluginBase):
|
||||
if plugin_obj.enabled:
|
||||
LOG.info(
|
||||
@ -451,7 +457,9 @@ class PluginManager:
|
||||
)
|
||||
self._pluggy_pm.register(plugin_obj)
|
||||
else:
|
||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
||||
LOG.warning(
|
||||
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||
)
|
||||
elif isinstance(plugin_obj, APRSDPluginBase):
|
||||
if plugin_obj.enabled:
|
||||
LOG.info(
|
||||
@ -462,7 +470,9 @@ class PluginManager:
|
||||
)
|
||||
self._pluggy_pm.register(plugin_obj)
|
||||
else:
|
||||
LOG.warning(f"Plugin {plugin_obj.__class__.__name__} is disabled")
|
||||
LOG.warning(
|
||||
f"Plugin {plugin_obj.__class__.__name__} is disabled"
|
||||
)
|
||||
except Exception as ex:
|
||||
LOG.error(f"Couldn't load plugin '{plugin_name}'")
|
||||
LOG.exception(ex)
|
||||
@ -473,7 +483,8 @@ class PluginManager:
|
||||
self.setup_plugins(load_help_plugin=CONF.load_help_plugin)
|
||||
|
||||
def setup_plugins(
|
||||
self, load_help_plugin=True,
|
||||
self,
|
||||
load_help_plugin=True,
|
||||
plugin_list=[],
|
||||
):
|
||||
"""Create the plugin manager and register plugins."""
|
||||
|
@ -1,715 +0,0 @@
|
||||
import datetime
|
||||
import email
|
||||
from email.mime.text import MIMEText
|
||||
import imaplib
|
||||
import logging
|
||||
import re
|
||||
import smtplib
|
||||
import threading
|
||||
import time
|
||||
|
||||
import imapclient
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import packets, plugin, threads, utils
|
||||
from aprsd.stats import collector
|
||||
from aprsd.threads import tx
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
shortcuts_dict = None
|
||||
|
||||
|
||||
class EmailInfo:
|
||||
"""A singleton thread safe mechanism for the global check_email_delay.
|
||||
|
||||
This has to be done because we have 2 separate threads that access
|
||||
the delay value.
|
||||
1) when EmailPlugin runs from a user message and
|
||||
2) when the background EmailThread runs to check email.
|
||||
|
||||
Access the check email delay with
|
||||
EmailInfo().delay
|
||||
|
||||
Set it with
|
||||
EmailInfo().delay = 100
|
||||
or
|
||||
EmailInfo().delay += 10
|
||||
|
||||
"""
|
||||
|
||||
_instance = None
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
"""This magic turns this into a singleton."""
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
cls._instance.lock = threading.Lock()
|
||||
cls._instance._delay = 60
|
||||
return cls._instance
|
||||
|
||||
@property
|
||||
def delay(self):
|
||||
with self.lock:
|
||||
return self._delay
|
||||
|
||||
@delay.setter
|
||||
def delay(self, val):
|
||||
with self.lock:
|
||||
self._delay = val
|
||||
|
||||
|
||||
@utils.singleton
|
||||
class EmailStats:
|
||||
"""Singleton object to store stats related to email."""
|
||||
_instance = None
|
||||
tx = 0
|
||||
rx = 0
|
||||
email_thread_last_time = None
|
||||
|
||||
def stats(self, serializable=False):
|
||||
if CONF.email_plugin.enabled:
|
||||
last_check_time = self.email_thread_last_time
|
||||
if serializable and last_check_time:
|
||||
last_check_time = last_check_time.isoformat()
|
||||
stats = {
|
||||
"tx": self.tx,
|
||||
"rx": self.rx,
|
||||
"last_check_time": last_check_time,
|
||||
}
|
||||
else:
|
||||
stats = {}
|
||||
return stats
|
||||
|
||||
def tx_inc(self):
|
||||
self.tx += 1
|
||||
|
||||
def rx_inc(self):
|
||||
self.rx += 1
|
||||
|
||||
def email_thread_update(self):
|
||||
self.email_thread_last_time = datetime.datetime.now()
|
||||
|
||||
|
||||
class EmailPlugin(plugin.APRSDRegexCommandPluginBase):
|
||||
"""Email Plugin."""
|
||||
|
||||
command_regex = "^-.*"
|
||||
command_name = "email"
|
||||
short_description = "Send and Receive email"
|
||||
|
||||
# message_number:time combos so we don't resend the same email in
|
||||
# five mins {int:int}
|
||||
email_sent_dict = {}
|
||||
enabled = False
|
||||
|
||||
def setup(self):
|
||||
"""Ensure that email is enabled and start the thread."""
|
||||
if CONF.email_plugin.enabled:
|
||||
self.enabled = True
|
||||
|
||||
if not CONF.email_plugin.callsign:
|
||||
self.enabled = False
|
||||
LOG.error("email_plugin.callsign is not set.")
|
||||
return
|
||||
|
||||
if not CONF.email_plugin.imap_login:
|
||||
LOG.error("email_plugin.imap_login not set. Disabling Plugin")
|
||||
self.enabled = False
|
||||
return
|
||||
|
||||
if not CONF.email_plugin.smtp_login:
|
||||
LOG.error("email_plugin.smtp_login not set. Disabling Plugin")
|
||||
self.enabled = False
|
||||
return
|
||||
|
||||
shortcuts = _build_shortcuts_dict()
|
||||
LOG.info(f"Email shortcuts {shortcuts}")
|
||||
|
||||
# Register the EmailStats producer with the stats collector
|
||||
# We do this here to prevent EmailStats from being registered
|
||||
# when email is not enabled in the config file.
|
||||
collector.Collector().register_producer(EmailStats)
|
||||
else:
|
||||
LOG.info("Email services not enabled.")
|
||||
self.enabled = False
|
||||
|
||||
def create_threads(self):
|
||||
if self.enabled:
|
||||
return APRSDEmailThread()
|
||||
|
||||
@trace.trace
|
||||
def process(self, packet: packets.MessagePacket):
|
||||
LOG.info("Email COMMAND")
|
||||
if not self.enabled:
|
||||
# Email has not been enabled
|
||||
# so the plugin will just NOOP
|
||||
return packets.NULL_MESSAGE
|
||||
|
||||
fromcall = packet.from_call
|
||||
message = packet.message_text
|
||||
ack = packet.get("msgNo", "0")
|
||||
|
||||
reply = None
|
||||
if not CONF.email_plugin.enabled:
|
||||
LOG.debug("Email is not enabled in config file ignoring.")
|
||||
return "Email not enabled."
|
||||
|
||||
searchstring = "^" + CONF.email_plugin.callsign + ".*"
|
||||
# only I can do email
|
||||
if re.search(searchstring, fromcall):
|
||||
# digits only, first one is number of emails to resend
|
||||
r = re.search("^-([0-9])[0-9]*$", message)
|
||||
if r is not None:
|
||||
LOG.debug("RESEND EMAIL")
|
||||
resend_email(r.group(1), fromcall)
|
||||
reply = packets.NULL_MESSAGE
|
||||
# -user@address.com body of email
|
||||
elif re.search(r"^-([A-Za-z0-9_\-\.@]+) (.*)", message):
|
||||
# (same search again)
|
||||
a = re.search(r"^-([A-Za-z0-9_\-\.@]+) (.*)", message)
|
||||
if a is not None:
|
||||
to_addr = a.group(1)
|
||||
content = a.group(2)
|
||||
|
||||
email_address = get_email_from_shortcut(to_addr)
|
||||
if not email_address:
|
||||
reply = "Bad email address"
|
||||
return reply
|
||||
|
||||
# send recipient link to aprs.fi map
|
||||
if content == "mapme":
|
||||
content = (
|
||||
"Click for my location: http://aprs.fi/{}" ""
|
||||
).format(
|
||||
CONF.email_plugin.callsign,
|
||||
)
|
||||
too_soon = 0
|
||||
now = time.time()
|
||||
# see if we sent this msg number recently
|
||||
if ack in self.email_sent_dict:
|
||||
# BUG(hemna) - when we get a 2 different email command
|
||||
# with the same ack #, we don't send it.
|
||||
timedelta = now - self.email_sent_dict[ack]
|
||||
if timedelta < 300: # five minutes
|
||||
too_soon = 1
|
||||
if not too_soon or ack == 0:
|
||||
LOG.info(f"Send email '{content}'")
|
||||
send_result = send_email(to_addr, content)
|
||||
reply = packets.NULL_MESSAGE
|
||||
if send_result != 0:
|
||||
reply = f"-{to_addr} failed"
|
||||
else:
|
||||
# clear email sent dictionary if somehow goes
|
||||
# over 100
|
||||
if len(self.email_sent_dict) > 98:
|
||||
LOG.debug(
|
||||
"DEBUG: email_sent_dict is big ("
|
||||
+ str(len(self.email_sent_dict))
|
||||
+ ") clearing out.",
|
||||
)
|
||||
self.email_sent_dict.clear()
|
||||
self.email_sent_dict[ack] = now
|
||||
else:
|
||||
reply = packets.NULL_MESSAGE
|
||||
LOG.info(
|
||||
"Email for message number "
|
||||
+ ack
|
||||
+ " recently sent, not sending again.",
|
||||
)
|
||||
else:
|
||||
reply = "Bad email address"
|
||||
|
||||
return reply
|
||||
|
||||
|
||||
def _imap_connect():
|
||||
imap_port = CONF.email_plugin.imap_port
|
||||
use_ssl = CONF.email_plugin.imap_use_ssl
|
||||
|
||||
try:
|
||||
server = imapclient.IMAPClient(
|
||||
CONF.email_plugin.imap_host,
|
||||
port=imap_port,
|
||||
use_uid=True,
|
||||
ssl=use_ssl,
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
LOG.exception("Failed to connect IMAP server")
|
||||
return
|
||||
|
||||
try:
|
||||
server.login(
|
||||
CONF.email_plugin.imap_login,
|
||||
CONF.email_plugin.imap_password,
|
||||
)
|
||||
except (imaplib.IMAP4.error, Exception) as e:
|
||||
msg = getattr(e, "message", repr(e))
|
||||
LOG.error(f"Failed to login {msg}")
|
||||
return
|
||||
|
||||
server.select_folder("INBOX")
|
||||
|
||||
server.fetch = trace.trace(server.fetch)
|
||||
server.search = trace.trace(server.search)
|
||||
server.remove_flags = trace.trace(server.remove_flags)
|
||||
server.add_flags = trace.trace(server.add_flags)
|
||||
return server
|
||||
|
||||
|
||||
def _smtp_connect():
|
||||
host = CONF.email_plugin.smtp_host
|
||||
smtp_port = CONF.email_plugin.smtp_port
|
||||
use_ssl = CONF.email_plugin.smtp_use_ssl
|
||||
msg = "{}{}:{}".format("SSL " if use_ssl else "", host, smtp_port)
|
||||
LOG.debug(
|
||||
"Connect to SMTP host {} with user '{}'".format(
|
||||
msg,
|
||||
CONF.email_plugin.smtp_login,
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
if use_ssl:
|
||||
server = smtplib.SMTP_SSL(
|
||||
host=host,
|
||||
port=smtp_port,
|
||||
timeout=30,
|
||||
)
|
||||
else:
|
||||
server = smtplib.SMTP(
|
||||
host=host,
|
||||
port=smtp_port,
|
||||
timeout=30,
|
||||
)
|
||||
except Exception:
|
||||
LOG.error("Couldn't connect to SMTP Server")
|
||||
return
|
||||
|
||||
LOG.debug(f"Connected to smtp host {msg}")
|
||||
|
||||
debug = CONF.email_plugin.debug
|
||||
if debug:
|
||||
server.set_debuglevel(5)
|
||||
server.sendmail = trace.trace(server.sendmail)
|
||||
|
||||
try:
|
||||
server.login(
|
||||
CONF.email_plugin.smtp_login,
|
||||
CONF.email_plugin.smtp_password,
|
||||
)
|
||||
except Exception:
|
||||
LOG.error("Couldn't connect to SMTP Server")
|
||||
return
|
||||
|
||||
LOG.debug(f"Logged into SMTP server {msg}")
|
||||
return server
|
||||
|
||||
|
||||
def _build_shortcuts_dict():
|
||||
global shortcuts_dict
|
||||
if not shortcuts_dict:
|
||||
if CONF.email_plugin.email_shortcuts:
|
||||
shortcuts_dict = {}
|
||||
tmp = CONF.email_plugin.email_shortcuts
|
||||
for combo in tmp:
|
||||
entry = combo.split("=")
|
||||
shortcuts_dict[entry[0]] = entry[1]
|
||||
else:
|
||||
shortcuts_dict = {}
|
||||
|
||||
return shortcuts_dict
|
||||
|
||||
|
||||
def get_email_from_shortcut(addr):
|
||||
if CONF.email_plugin.email_shortcuts:
|
||||
shortcuts = _build_shortcuts_dict()
|
||||
LOG.info(f"Shortcut lookup {addr} returns {shortcuts.get(addr, addr)}")
|
||||
return shortcuts.get(addr, addr)
|
||||
else:
|
||||
return addr
|
||||
|
||||
|
||||
def validate_email_config(disable_validation=False):
|
||||
"""function to simply ensure we can connect to email services.
|
||||
|
||||
This helps with failing early during startup.
|
||||
"""
|
||||
LOG.info("Checking IMAP configuration")
|
||||
imap_server = _imap_connect()
|
||||
LOG.info("Checking SMTP configuration")
|
||||
smtp_server = _smtp_connect()
|
||||
|
||||
if imap_server and smtp_server:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
@trace.trace
|
||||
def parse_email(msgid, data, server):
|
||||
envelope = data[b"ENVELOPE"]
|
||||
# email address match
|
||||
# use raw string to avoid invalid escape secquence errors r"string here"
|
||||
f = re.search(r"([\.\w_-]+@[\.\w_-]+)", str(envelope.from_[0]))
|
||||
if f is not None:
|
||||
from_addr = f.group(1)
|
||||
else:
|
||||
from_addr = "noaddr"
|
||||
LOG.debug(f"Got a message from '{from_addr}'")
|
||||
try:
|
||||
m = server.fetch([msgid], ["RFC822"])
|
||||
except Exception:
|
||||
LOG.exception("Couldn't fetch email from server in parse_email")
|
||||
return
|
||||
|
||||
msg = email.message_from_string(m[msgid][b"RFC822"].decode(errors="ignore"))
|
||||
if msg.is_multipart():
|
||||
text = ""
|
||||
html = None
|
||||
# default in case body somehow isn't set below - happened once
|
||||
body = b"* unreadable msg received"
|
||||
# this uses the last text or html part in the email,
|
||||
# phone companies often put content in an attachment
|
||||
for part in msg.get_payload():
|
||||
if part.get_content_charset() is None:
|
||||
# or BREAK when we hit a text or html?
|
||||
# We cannot know the character set,
|
||||
# so return decoded "something"
|
||||
LOG.debug("Email got unknown content type")
|
||||
text = part.get_payload(decode=True)
|
||||
continue
|
||||
|
||||
charset = part.get_content_charset()
|
||||
|
||||
if part.get_content_type() == "text/plain":
|
||||
LOG.debug("Email got text/plain")
|
||||
text = str(
|
||||
part.get_payload(decode=True),
|
||||
str(charset),
|
||||
"ignore",
|
||||
).encode("utf8", "replace")
|
||||
|
||||
if part.get_content_type() == "text/html":
|
||||
LOG.debug("Email got text/html")
|
||||
html = str(
|
||||
part.get_payload(decode=True),
|
||||
str(charset),
|
||||
"ignore",
|
||||
).encode("utf8", "replace")
|
||||
|
||||
if text is not None:
|
||||
# strip removes white space fore and aft of string
|
||||
body = text.strip()
|
||||
else:
|
||||
body = html.strip()
|
||||
else: # message is not multipart
|
||||
# email.uscc.net sends no charset, blows up unicode function below
|
||||
LOG.debug("Email is not multipart")
|
||||
if msg.get_content_charset() is None:
|
||||
text = str(msg.get_payload(decode=True), "US-ASCII", "ignore").encode(
|
||||
"utf8",
|
||||
"replace",
|
||||
)
|
||||
else:
|
||||
text = str(
|
||||
msg.get_payload(decode=True),
|
||||
msg.get_content_charset(),
|
||||
"ignore",
|
||||
).encode("utf8", "replace")
|
||||
body = text.strip()
|
||||
|
||||
# FIXED: UnicodeDecodeError: 'ascii' codec can't decode byte 0xf0
|
||||
# in position 6: ordinal not in range(128)
|
||||
# decode with errors='ignore'. be sure to encode it before we return
|
||||
# it below, also with errors='ignore'
|
||||
try:
|
||||
body = body.decode(errors="ignore")
|
||||
except Exception:
|
||||
LOG.exception("Unicode decode failure")
|
||||
LOG.error(f"Unidoce decode failed: {str(body)}")
|
||||
body = "Unreadable unicode msg"
|
||||
# strip all html tags
|
||||
body = re.sub("<[^<]+?>", "", body)
|
||||
# strip CR/LF, make it one line, .rstrip fails at this
|
||||
body = body.replace("\n", " ").replace("\r", " ")
|
||||
# ascii might be out of range, so encode it, removing any error characters
|
||||
body = body.encode(errors="ignore")
|
||||
return body, from_addr
|
||||
|
||||
|
||||
# end parse_email
|
||||
|
||||
|
||||
@trace.trace
|
||||
def send_email(to_addr, content):
|
||||
shortcuts = _build_shortcuts_dict()
|
||||
email_address = get_email_from_shortcut(to_addr)
|
||||
LOG.info("Sending Email_________________")
|
||||
|
||||
if to_addr in shortcuts:
|
||||
LOG.info(f"To : {to_addr}")
|
||||
to_addr = email_address
|
||||
LOG.info(f" ({to_addr})")
|
||||
subject = CONF.email_plugin.callsign
|
||||
# content = content + "\n\n(NOTE: reply with one line)"
|
||||
LOG.info(f"Subject : {subject}")
|
||||
LOG.info(f"Body : {content}")
|
||||
|
||||
# check email more often since there's activity right now
|
||||
EmailInfo().delay = 60
|
||||
|
||||
msg = MIMEText(content)
|
||||
msg["Subject"] = subject
|
||||
msg["From"] = CONF.email_plugin.smtp_login
|
||||
msg["To"] = to_addr
|
||||
server = _smtp_connect()
|
||||
if server:
|
||||
try:
|
||||
server.sendmail(
|
||||
CONF.email_plugin.smtp_login,
|
||||
[to_addr],
|
||||
msg.as_string(),
|
||||
)
|
||||
EmailStats().tx_inc()
|
||||
except Exception:
|
||||
LOG.exception("Sendmail Error!!!!")
|
||||
server.quit()
|
||||
return -1
|
||||
server.quit()
|
||||
return 0
|
||||
|
||||
|
||||
@trace.trace
|
||||
def resend_email(count, fromcall):
|
||||
date = datetime.datetime.now()
|
||||
month = date.strftime("%B")[:3] # Nov, Mar, Apr
|
||||
day = date.day
|
||||
year = date.year
|
||||
today = f"{day}-{month}-{year}"
|
||||
|
||||
shortcuts = _build_shortcuts_dict()
|
||||
# swap key/value
|
||||
shortcuts_inverted = {v: k for k, v in shortcuts.items()}
|
||||
|
||||
try:
|
||||
server = _imap_connect()
|
||||
except Exception:
|
||||
LOG.exception("Failed to Connect to IMAP. Cannot resend email ")
|
||||
return
|
||||
|
||||
try:
|
||||
messages = server.search(["SINCE", today])
|
||||
except Exception:
|
||||
LOG.exception("Couldn't search for emails in resend_email ")
|
||||
return
|
||||
|
||||
# LOG.debug("%d messages received today" % len(messages))
|
||||
|
||||
msgexists = False
|
||||
|
||||
messages.sort(reverse=True)
|
||||
del messages[int(count) :] # only the latest "count" messages
|
||||
for message in messages:
|
||||
try:
|
||||
parts = server.fetch(message, ["ENVELOPE"]).items()
|
||||
except Exception:
|
||||
LOG.exception("Couldn't fetch email parts in resend_email")
|
||||
continue
|
||||
|
||||
for msgid, data in list(parts):
|
||||
# one at a time, otherwise order is random
|
||||
(body, from_addr) = parse_email(msgid, data, server)
|
||||
# unset seen flag, will stay bold in email client
|
||||
try:
|
||||
server.remove_flags(msgid, [imapclient.SEEN])
|
||||
except Exception:
|
||||
LOG.exception("Failed to remove SEEN flag in resend_email")
|
||||
|
||||
if from_addr in shortcuts_inverted:
|
||||
# reverse lookup of a shortcut
|
||||
from_addr = shortcuts_inverted[from_addr]
|
||||
# asterisk indicates a resend
|
||||
reply = "-" + from_addr + " * " + body.decode(errors="ignore")
|
||||
tx.send(
|
||||
packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call=fromcall,
|
||||
message_text=reply,
|
||||
),
|
||||
)
|
||||
msgexists = True
|
||||
|
||||
if msgexists is not True:
|
||||
stm = time.localtime()
|
||||
h = stm.tm_hour
|
||||
m = stm.tm_min
|
||||
s = stm.tm_sec
|
||||
# append time as a kind of serial number to prevent FT1XDR from
|
||||
# thinking this is a duplicate message.
|
||||
# The FT1XDR pretty much ignores the aprs message number in this
|
||||
# regard. The FTM400 gets it right.
|
||||
reply = "No new msg {}:{}:{}".format(
|
||||
str(h).zfill(2),
|
||||
str(m).zfill(2),
|
||||
str(s).zfill(2),
|
||||
)
|
||||
tx.send(
|
||||
packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call=fromcall,
|
||||
message_text=reply,
|
||||
),
|
||||
)
|
||||
|
||||
# check email more often since we're resending one now
|
||||
EmailInfo().delay = 60
|
||||
|
||||
server.logout()
|
||||
# end resend_email()
|
||||
|
||||
|
||||
class APRSDEmailThread(threads.APRSDThread):
|
||||
def __init__(self):
|
||||
super().__init__("EmailThread")
|
||||
self.past = datetime.datetime.now()
|
||||
|
||||
def loop(self):
|
||||
time.sleep(5)
|
||||
EmailStats().email_thread_update()
|
||||
# always sleep for 5 seconds and see if we need to check email
|
||||
# This allows CTRL-C to stop the execution of this loop sooner
|
||||
# than check_email_delay time
|
||||
now = datetime.datetime.now()
|
||||
if now - self.past > datetime.timedelta(seconds=EmailInfo().delay):
|
||||
# It's time to check email
|
||||
|
||||
# slowly increase delay every iteration, max out at 300 seconds
|
||||
# any send/receive/resend activity will reset this to 60 seconds
|
||||
if EmailInfo().delay < 300:
|
||||
EmailInfo().delay += 10
|
||||
LOG.debug(
|
||||
f"check_email_delay is {EmailInfo().delay} seconds ",
|
||||
)
|
||||
|
||||
shortcuts = _build_shortcuts_dict()
|
||||
# swap key/value
|
||||
shortcuts_inverted = {v: k for k, v in shortcuts.items()}
|
||||
|
||||
date = datetime.datetime.now()
|
||||
month = date.strftime("%B")[:3] # Nov, Mar, Apr
|
||||
day = date.day
|
||||
year = date.year
|
||||
today = f"{day}-{month}-{year}"
|
||||
|
||||
try:
|
||||
server = _imap_connect()
|
||||
except Exception:
|
||||
LOG.exception("IMAP Failed to connect")
|
||||
return True
|
||||
|
||||
try:
|
||||
messages = server.search(["SINCE", today])
|
||||
except Exception:
|
||||
LOG.exception("IMAP failed to search for messages since today.")
|
||||
return True
|
||||
LOG.debug(f"{len(messages)} messages received today")
|
||||
|
||||
try:
|
||||
_msgs = server.fetch(messages, ["ENVELOPE"])
|
||||
except Exception:
|
||||
LOG.exception("IMAP failed to fetch/flag messages: ")
|
||||
return True
|
||||
|
||||
for msgid, data in _msgs.items():
|
||||
envelope = data[b"ENVELOPE"]
|
||||
LOG.debug(
|
||||
'ID:%d "%s" (%s)'
|
||||
% (msgid, envelope.subject.decode(), envelope.date),
|
||||
)
|
||||
f = re.search(
|
||||
r"'([[A-a][0-9]_-]+@[[A-a][0-9]_-\.]+)",
|
||||
str(envelope.from_[0]),
|
||||
)
|
||||
if f is not None:
|
||||
from_addr = f.group(1)
|
||||
else:
|
||||
from_addr = "noaddr"
|
||||
|
||||
# LOG.debug("Message flags/tags: " +
|
||||
# str(server.get_flags(msgid)[msgid]))
|
||||
# if "APRS" not in server.get_flags(msgid)[msgid]:
|
||||
# in python3, imap tags are unicode. in py2 they're strings.
|
||||
# so .decode them to handle both
|
||||
try:
|
||||
taglist = [
|
||||
x.decode(errors="ignore")
|
||||
for x in server.get_flags(msgid)[msgid]
|
||||
]
|
||||
except Exception:
|
||||
LOG.error("Failed to get flags.")
|
||||
break
|
||||
|
||||
if "APRS" not in taglist:
|
||||
# if msg not flagged as sent via aprs
|
||||
try:
|
||||
server.fetch([msgid], ["RFC822"])
|
||||
except Exception:
|
||||
LOG.exception("Failed single server fetch for RFC822")
|
||||
break
|
||||
|
||||
(body, from_addr) = parse_email(msgid, data, server)
|
||||
# unset seen flag, will stay bold in email client
|
||||
try:
|
||||
server.remove_flags(msgid, [imapclient.SEEN])
|
||||
except Exception:
|
||||
LOG.exception("Failed to remove flags SEEN")
|
||||
# Not much we can do here, so lets try and
|
||||
# send the aprs message anyway
|
||||
|
||||
if from_addr in shortcuts_inverted:
|
||||
# reverse lookup of a shortcut
|
||||
from_addr = shortcuts_inverted[from_addr]
|
||||
|
||||
reply = "-" + from_addr + " " + body.decode(errors="ignore")
|
||||
# Send the message to the registered user in the
|
||||
# config ham.callsign
|
||||
tx.send(
|
||||
packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call=CONF.email_plugin.callsign,
|
||||
message_text=reply,
|
||||
),
|
||||
)
|
||||
# flag message as sent via aprs
|
||||
try:
|
||||
server.add_flags(msgid, ["APRS"])
|
||||
# unset seen flag, will stay bold in email client
|
||||
except Exception:
|
||||
LOG.exception("Couldn't add APRS flag to email")
|
||||
|
||||
try:
|
||||
server.remove_flags(msgid, [imapclient.SEEN])
|
||||
except Exception:
|
||||
LOG.exception("Couldn't remove seen flag from email")
|
||||
|
||||
# check email more often since we just received an email
|
||||
EmailInfo().delay = 60
|
||||
|
||||
# reset clock
|
||||
LOG.debug("Done looping over Server.fetch, log out.")
|
||||
self.past = datetime.datetime.now()
|
||||
try:
|
||||
server.logout()
|
||||
except Exception:
|
||||
LOG.exception("IMAP failed to logout: ")
|
||||
return True
|
||||
else:
|
||||
# We haven't hit the email delay yet.
|
||||
# LOG.debug("Delta({}) < {}".format(now - past, check_email_delay))
|
||||
return True
|
||||
|
||||
return True
|
@ -1,181 +0,0 @@
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
|
||||
from geopy.geocoders import (
|
||||
ArcGIS, AzureMaps, Baidu, Bing, GoogleV3, HereV7, Nominatim, OpenCage,
|
||||
TomTom, What3WordsV3, Woosmap,
|
||||
)
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import packets, plugin, plugin_utils
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class UsLocation:
|
||||
raw = {}
|
||||
|
||||
def __init__(self, info):
|
||||
self.info = info
|
||||
|
||||
def __str__(self):
|
||||
return self.info
|
||||
|
||||
|
||||
class USGov:
|
||||
"""US Government geocoder that uses the geopy API.
|
||||
|
||||
This is a dummy class the implements the geopy reverse API,
|
||||
so the factory can return an object that conforms to the API.
|
||||
"""
|
||||
def reverse(self, coordinates):
|
||||
"""Reverse geocode a coordinate."""
|
||||
LOG.info(f"USGov reverse geocode {coordinates}")
|
||||
coords = coordinates.split(",")
|
||||
lat = float(coords[0])
|
||||
lon = float(coords[1])
|
||||
result = plugin_utils.get_weather_gov_for_gps(lat, lon)
|
||||
# LOG.info(f"WEATHER: {result}")
|
||||
# LOG.info(f"area description {result['location']['areaDescription']}")
|
||||
if "location" in result:
|
||||
loc = UsLocation(result["location"]["areaDescription"])
|
||||
else:
|
||||
loc = UsLocation("Unknown Location")
|
||||
|
||||
LOG.info(f"USGov reverse geocode LOC {loc}")
|
||||
return loc
|
||||
|
||||
|
||||
def geopy_factory():
|
||||
"""Factory function for geopy geocoders."""
|
||||
geocoder = CONF.location_plugin.geopy_geocoder
|
||||
LOG.info(f"Using geocoder: {geocoder}")
|
||||
user_agent = CONF.location_plugin.user_agent
|
||||
LOG.info(f"Using user_agent: {user_agent}")
|
||||
|
||||
if geocoder == "Nominatim":
|
||||
return Nominatim(user_agent=user_agent)
|
||||
elif geocoder == "USGov":
|
||||
return USGov()
|
||||
elif geocoder == "ArcGIS":
|
||||
return ArcGIS(
|
||||
username=CONF.location_plugin.arcgis_username,
|
||||
password=CONF.location_plugin.arcgis_password,
|
||||
user_agent=user_agent,
|
||||
)
|
||||
elif geocoder == "AzureMaps":
|
||||
return AzureMaps(
|
||||
user_agent=user_agent,
|
||||
subscription_key=CONF.location_plugin.azuremaps_subscription_key,
|
||||
)
|
||||
elif geocoder == "Baidu":
|
||||
return Baidu(user_agent=user_agent, api_key=CONF.location_plugin.baidu_api_key)
|
||||
elif geocoder == "Bing":
|
||||
return Bing(user_agent=user_agent, api_key=CONF.location_plugin.bing_api_key)
|
||||
elif geocoder == "GoogleV3":
|
||||
return GoogleV3(user_agent=user_agent, api_key=CONF.location_plugin.google_api_key)
|
||||
elif geocoder == "HERE":
|
||||
return HereV7(user_agent=user_agent, api_key=CONF.location_plugin.here_api_key)
|
||||
elif geocoder == "OpenCage":
|
||||
return OpenCage(user_agent=user_agent, api_key=CONF.location_plugin.opencage_api_key)
|
||||
elif geocoder == "TomTom":
|
||||
return TomTom(user_agent=user_agent, api_key=CONF.location_plugin.tomtom_api_key)
|
||||
elif geocoder == "What3Words":
|
||||
return What3WordsV3(user_agent=user_agent, api_key=CONF.location_plugin.what3words_api_key)
|
||||
elif geocoder == "Woosmap":
|
||||
return Woosmap(user_agent=user_agent, api_key=CONF.location_plugin.woosmap_api_key)
|
||||
else:
|
||||
raise ValueError(f"Unknown geocoder: {geocoder}")
|
||||
|
||||
|
||||
class LocationPlugin(plugin.APRSDRegexCommandPluginBase, plugin.APRSFIKEYMixin):
|
||||
"""Location!"""
|
||||
|
||||
command_regex = r"^([l]|[l]\s|location)"
|
||||
command_name = "location"
|
||||
short_description = "Where in the world is a CALLSIGN's last GPS beacon?"
|
||||
|
||||
def setup(self):
|
||||
self.ensure_aprs_fi_key()
|
||||
|
||||
@trace.trace
|
||||
def process(self, packet: packets.MessagePacket):
|
||||
LOG.info("Location Plugin")
|
||||
fromcall = packet.from_call
|
||||
message = packet.get("message_text", None)
|
||||
|
||||
api_key = CONF.aprs_fi.apiKey
|
||||
|
||||
# optional second argument is a callsign to search
|
||||
a = re.search(r"^.*\s+(.*)", message)
|
||||
if a is not None:
|
||||
searchcall = a.group(1)
|
||||
searchcall = searchcall.upper()
|
||||
else:
|
||||
# if no second argument, search for calling station
|
||||
searchcall = fromcall
|
||||
|
||||
try:
|
||||
aprs_data = plugin_utils.get_aprs_fi(api_key, searchcall)
|
||||
except Exception as ex:
|
||||
LOG.error(f"Failed to fetch aprs.fi '{ex}'")
|
||||
return "Failed to fetch aprs.fi location"
|
||||
|
||||
LOG.debug(f"LocationPlugin: aprs_data = {aprs_data}")
|
||||
if not len(aprs_data["entries"]):
|
||||
LOG.error("Didn't get any entries from aprs.fi")
|
||||
return "Failed to fetch aprs.fi location"
|
||||
|
||||
lat = float(aprs_data["entries"][0]["lat"])
|
||||
lon = float(aprs_data["entries"][0]["lng"])
|
||||
|
||||
# Get some information about their location
|
||||
try:
|
||||
tic = time.perf_counter()
|
||||
geolocator = geopy_factory()
|
||||
LOG.info(f"Using GEOLOCATOR: {geolocator}")
|
||||
coordinates = f"{lat:0.6f}, {lon:0.6f}"
|
||||
location = geolocator.reverse(coordinates)
|
||||
address = location.raw.get("address")
|
||||
LOG.debug(f"GEOLOCATOR address: {address}")
|
||||
toc = time.perf_counter()
|
||||
if address:
|
||||
LOG.info(f"Geopy address {address} took {toc - tic:0.4f}")
|
||||
if address.get("country_code") == "us":
|
||||
area_info = f"{address.get('county')}, {address.get('state')}"
|
||||
else:
|
||||
# what to do for address for non US?
|
||||
area_info = f"{address.get('country'), 'Unknown'}"
|
||||
else:
|
||||
area_info = str(location)
|
||||
except Exception as ex:
|
||||
LOG.error(ex)
|
||||
LOG.error(f"Failed to fetch Geopy address {ex}")
|
||||
area_info = "Unknown Location"
|
||||
|
||||
try: # altitude not always provided
|
||||
alt = float(aprs_data["entries"][0]["altitude"])
|
||||
except Exception:
|
||||
alt = 0
|
||||
altfeet = int(alt * 3.28084)
|
||||
aprs_lasttime_seconds = aprs_data["entries"][0]["lasttime"]
|
||||
# aprs_lasttime_seconds = aprs_lasttime_seconds.encode(
|
||||
# "ascii", errors="ignore"
|
||||
# ) # unicode to ascii
|
||||
delta_seconds = time.time() - int(aprs_lasttime_seconds)
|
||||
delta_hours = delta_seconds / 60 / 60
|
||||
|
||||
reply = "{}: {} {}' {},{} {}h ago".format(
|
||||
searchcall,
|
||||
area_info,
|
||||
str(altfeet),
|
||||
f"{lat:0.2f}",
|
||||
f"{lon:0.2f}",
|
||||
str("%.1f" % round(delta_hours, 1)),
|
||||
).rstrip()
|
||||
|
||||
return reply
|
@ -4,7 +4,6 @@ from oslo_config import cfg
|
||||
|
||||
from aprsd import packets, plugin
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
@ -43,9 +42,7 @@ class NotifySeenPlugin(plugin.APRSDWatchListPluginBase):
|
||||
pkt = packets.MessagePacket(
|
||||
from_call=CONF.callsign,
|
||||
to_call=notify_callsign,
|
||||
message_text=(
|
||||
f"{fromcall} was just seen by type:'{packet_type}'"
|
||||
),
|
||||
message_text=(f"{fromcall} was just seen by type:'{packet_type}'"),
|
||||
allow_delay=False,
|
||||
)
|
||||
pkt.allow_delay = False
|
||||
|
@ -2,13 +2,12 @@ import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd import plugin, plugin_utils
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
@ -205,8 +204,9 @@ class OWMWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
|
||||
|
||||
def help(self):
|
||||
_help = [
|
||||
"openweathermap: Send {} to get weather "
|
||||
"from your location".format(self.command_regex),
|
||||
"openweathermap: Send {} to get weather " "from your location".format(
|
||||
self.command_regex
|
||||
),
|
||||
"openweathermap: Send {} <callsign> to get "
|
||||
"weather from <callsign>".format(self.command_regex),
|
||||
]
|
||||
@ -327,10 +327,12 @@ class AVWXWeatherPlugin(plugin.APRSDRegexCommandPluginBase):
|
||||
|
||||
def help(self):
|
||||
_help = [
|
||||
"avwxweather: Send {} to get weather "
|
||||
"from your location".format(self.command_regex),
|
||||
"avwxweather: Send {} <callsign> to get "
|
||||
"weather from <callsign>".format(self.command_regex),
|
||||
"avwxweather: Send {} to get weather " "from your location".format(
|
||||
self.command_regex
|
||||
),
|
||||
"avwxweather: Send {} <callsign> to get " "weather from <callsign>".format(
|
||||
self.command_regex
|
||||
),
|
||||
]
|
||||
return _help
|
||||
|
||||
|
@ -3,13 +3,13 @@ from typing import Callable, Protocol, runtime_checkable
|
||||
|
||||
from aprsd.utils import singleton
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class StatsProducer(Protocol):
|
||||
"""The StatsProducer protocol is used to define the interface for collecting stats."""
|
||||
|
||||
def stats(self, serializable=False) -> dict:
|
||||
"""provide stats in a dictionary format."""
|
||||
...
|
||||
@ -18,6 +18,7 @@ class StatsProducer(Protocol):
|
||||
@singleton
|
||||
class Collector:
|
||||
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
||||
|
||||
def __init__(self):
|
||||
self.producers: list[Callable] = []
|
||||
|
||||
@ -26,7 +27,9 @@ class Collector:
|
||||
for name in self.producers:
|
||||
cls = name()
|
||||
try:
|
||||
stats[cls.__class__.__name__] = cls.stats(serializable=serializable).copy()
|
||||
stats[cls.__class__.__name__] = cls.stats(
|
||||
serializable=serializable
|
||||
).copy()
|
||||
except Exception as e:
|
||||
LOG.error(f"Error in producer {name} (stats): {e}")
|
||||
return stats
|
||||
|
@ -4,8 +4,9 @@ import queue
|
||||
# aprsd.threads
|
||||
from .aprsd import APRSDThread, APRSDThreadList # noqa: F401
|
||||
from .rx import ( # noqa: F401
|
||||
APRSDDupeRXThread, APRSDProcessPacketThread, APRSDRXThread,
|
||||
APRSDDupeRXThread,
|
||||
APRSDProcessPacketThread,
|
||||
APRSDRXThread,
|
||||
)
|
||||
|
||||
|
||||
packet_queue = queue.Queue(maxsize=20)
|
||||
|
@ -7,7 +7,6 @@ from typing import List
|
||||
|
||||
import wrapt
|
||||
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@ -25,7 +24,7 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
|
||||
self._last_loop = datetime.datetime.now()
|
||||
|
||||
def _should_quit(self):
|
||||
""" see if we have a quit message from the global queue."""
|
||||
"""see if we have a quit message from the global queue."""
|
||||
if self.thread_stop:
|
||||
return True
|
||||
|
||||
@ -51,7 +50,9 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
|
||||
"""Add code to subclass to do any cleanup"""
|
||||
|
||||
def __str__(self):
|
||||
out = f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
|
||||
out = (
|
||||
f"Thread <{self.__class__.__name__}({self.name}) Alive? {self.is_alive()}>"
|
||||
)
|
||||
return out
|
||||
|
||||
def loop_age(self):
|
||||
@ -124,7 +125,7 @@ class APRSDThreadList:
|
||||
for th in self.threads_list:
|
||||
LOG.info(f"Stopping Thread {th.name}")
|
||||
if hasattr(th, "packet"):
|
||||
LOG.info(F"{th.name} packet {th.packet}")
|
||||
LOG.info(f"{th.name} packet {th.packet}")
|
||||
th.stop()
|
||||
|
||||
@wrapt.synchronized
|
||||
@ -133,7 +134,7 @@ class APRSDThreadList:
|
||||
for th in self.threads_list:
|
||||
LOG.info(f"Pausing Thread {th.name}")
|
||||
if hasattr(th, "packet"):
|
||||
LOG.info(F"{th.name} packet {th.packet}")
|
||||
LOG.info(f"{th.name} packet {th.packet}")
|
||||
th.pause()
|
||||
|
||||
@wrapt.synchronized
|
||||
@ -142,7 +143,7 @@ class APRSDThreadList:
|
||||
for th in self.threads_list:
|
||||
LOG.info(f"Resuming Thread {th.name}")
|
||||
if hasattr(th, "packet"):
|
||||
LOG.info(F"{th.name} packet {th.packet}")
|
||||
LOG.info(f"{th.name} packet {th.packet}")
|
||||
th.unpause()
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
@ -153,7 +154,11 @@ class APRSDThreadList:
|
||||
alive = thread.is_alive()
|
||||
age = thread.loop_age()
|
||||
key = thread.__class__.__name__
|
||||
info[key] = {"alive": True if alive else False, "age": age, "name": thread.name}
|
||||
info[key] = {
|
||||
"alive": True if alive else False,
|
||||
"age": age,
|
||||
"name": thread.name,
|
||||
}
|
||||
return info
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
|
@ -5,14 +5,12 @@ import tracemalloc
|
||||
|
||||
from loguru import logger
|
||||
from oslo_config import cfg
|
||||
import timeago
|
||||
|
||||
from aprsd import packets, utils
|
||||
from aprsd.client import client_factory
|
||||
from aprsd.log import log as aprsd_log
|
||||
from aprsd.stats import collector
|
||||
from aprsd.threads import APRSDThread, APRSDThreadList
|
||||
|
||||
from aprsd.utils import keepalive_collector
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -36,18 +34,14 @@ class KeepAliveThread(APRSDThread):
|
||||
thread_list = APRSDThreadList()
|
||||
now = datetime.datetime.now()
|
||||
|
||||
if "EmailStats" in stats_json:
|
||||
email_stats = stats_json["EmailStats"]
|
||||
if email_stats.get("last_check_time"):
|
||||
email_thread_time = utils.strfdelta(now - email_stats["last_check_time"])
|
||||
else:
|
||||
email_thread_time = "N/A"
|
||||
else:
|
||||
email_thread_time = "N/A"
|
||||
|
||||
if "APRSClientStats" in stats_json and stats_json["APRSClientStats"].get("transport") == "aprsis":
|
||||
if (
|
||||
"APRSClientStats" in stats_json
|
||||
and stats_json["APRSClientStats"].get("transport") == "aprsis"
|
||||
):
|
||||
if stats_json["APRSClientStats"].get("server_keepalive"):
|
||||
last_msg_time = utils.strfdelta(now - stats_json["APRSClientStats"]["server_keepalive"])
|
||||
last_msg_time = utils.strfdelta(
|
||||
now - stats_json["APRSClientStats"]["server_keepalive"]
|
||||
)
|
||||
else:
|
||||
last_msg_time = "N/A"
|
||||
else:
|
||||
@ -64,7 +58,7 @@ class KeepAliveThread(APRSDThread):
|
||||
|
||||
keepalive = (
|
||||
"{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
|
||||
"Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{} LoggingQueue:{}"
|
||||
"Last:{} - RAM Current:{} Peak:{} Threads:{} LoggingQueue:{}"
|
||||
).format(
|
||||
stats_json["APRSDStats"]["callsign"],
|
||||
stats_json["APRSDStats"]["uptime"],
|
||||
@ -74,7 +68,6 @@ class KeepAliveThread(APRSDThread):
|
||||
tx_msg,
|
||||
rx_msg,
|
||||
last_msg_time,
|
||||
email_thread_time,
|
||||
stats_json["APRSDStats"]["memory_current_str"],
|
||||
stats_json["APRSDStats"]["memory_peak_str"],
|
||||
len(thread_list),
|
||||
@ -97,35 +90,11 @@ class KeepAliveThread(APRSDThread):
|
||||
LOGU.opt(colors=True).info(thread_msg)
|
||||
# LOG.info(f"{key: <15} Alive? {str(alive): <5} {str(age): <20}")
|
||||
|
||||
# check the APRS connection
|
||||
cl = client_factory.create()
|
||||
cl_stats = cl.stats()
|
||||
ka = cl_stats.get("connection_keepalive", None)
|
||||
if ka:
|
||||
keepalive = timeago.format(ka)
|
||||
else:
|
||||
keepalive = "N/A"
|
||||
LOGU.opt(colors=True).info(f"<green>Client keepalive {keepalive}</green>")
|
||||
# Reset the connection if it's dead and this isn't our
|
||||
# First time through the loop.
|
||||
# The first time through the loop can happen at startup where
|
||||
# The keepalive thread starts before the client has a chance
|
||||
# to make it's connection the first time.
|
||||
if not cl.is_alive() and self.cntr > 0:
|
||||
LOG.error(f"{cl.__class__.__name__} is not alive!!! Resetting")
|
||||
client_factory.create().reset()
|
||||
# else:
|
||||
# # See if we should reset the aprs-is client
|
||||
# # Due to losing a keepalive from them
|
||||
# delta_dict = utils.parse_delta_str(last_msg_time)
|
||||
# delta = datetime.timedelta(**delta_dict)
|
||||
#
|
||||
# if delta > self.max_delta:
|
||||
# # We haven't gotten a keepalive from aprs-is in a while
|
||||
# # reset the connection.a
|
||||
# if not client.KISSClient.is_enabled():
|
||||
# LOG.warning(f"Resetting connection to APRS-IS {delta}")
|
||||
# client.factory.create().reset()
|
||||
# Go through the registered keepalive collectors
|
||||
# and check them as well as call log.
|
||||
collect = keepalive_collector.KeepAliveCollector()
|
||||
collect.check()
|
||||
collect.log()
|
||||
|
||||
# Check version every day
|
||||
delta = now - self.checker_time
|
@ -1,121 +0,0 @@
|
||||
import datetime
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
import wrapt
|
||||
|
||||
from aprsd import threads
|
||||
from aprsd.log import log
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
def send_log_entries(force=False):
|
||||
"""Send all of the log entries to the web interface."""
|
||||
if CONF.admin.web_enabled:
|
||||
if force or LogEntries().is_purge_ready():
|
||||
entries = LogEntries().get_all_and_purge()
|
||||
if entries:
|
||||
try:
|
||||
requests.post(
|
||||
f"http://{CONF.admin.web_ip}:{CONF.admin.web_port}/log_entries",
|
||||
json=entries,
|
||||
auth=(CONF.admin.user, CONF.admin.password),
|
||||
)
|
||||
except Exception:
|
||||
LOG.warning(f"Failed to send log entries. len={len(entries)}")
|
||||
|
||||
|
||||
class LogEntries:
|
||||
entries = []
|
||||
lock = threading.Lock()
|
||||
_instance = None
|
||||
last_purge = datetime.datetime.now()
|
||||
max_delta = datetime.timedelta(
|
||||
hours=0.0, minutes=0, seconds=2,
|
||||
)
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
if cls._instance is None:
|
||||
cls._instance = super().__new__(cls)
|
||||
return cls._instance
|
||||
|
||||
def stats(self) -> dict:
|
||||
return {
|
||||
"log_entries": self.entries,
|
||||
}
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def add(self, entry):
|
||||
self.entries.append(entry)
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def get_all_and_purge(self):
|
||||
entries = self.entries.copy()
|
||||
self.entries = []
|
||||
self.last_purge = datetime.datetime.now()
|
||||
return entries
|
||||
|
||||
def is_purge_ready(self):
|
||||
now = datetime.datetime.now()
|
||||
if (
|
||||
now - self.last_purge > self.max_delta
|
||||
and len(self.entries) > 1
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
@wrapt.synchronized(lock)
|
||||
def __len__(self):
|
||||
return len(self.entries)
|
||||
|
||||
|
||||
class LogMonitorThread(threads.APRSDThread):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("LogMonitorThread")
|
||||
|
||||
def stop(self):
|
||||
send_log_entries(force=True)
|
||||
super().stop()
|
||||
|
||||
def loop(self):
|
||||
try:
|
||||
record = log.logging_queue.get(block=True, timeout=2)
|
||||
if isinstance(record, list):
|
||||
for item in record:
|
||||
entry = self.json_record(item)
|
||||
LogEntries().add(entry)
|
||||
else:
|
||||
entry = self.json_record(record)
|
||||
LogEntries().add(entry)
|
||||
except Exception:
|
||||
# Just ignore thi
|
||||
pass
|
||||
|
||||
send_log_entries()
|
||||
return True
|
||||
|
||||
def json_record(self, record):
|
||||
entry = {}
|
||||
entry["filename"] = record.filename
|
||||
entry["funcName"] = record.funcName
|
||||
entry["levelname"] = record.levelname
|
||||
entry["lineno"] = record.lineno
|
||||
entry["module"] = record.module
|
||||
entry["name"] = record.name
|
||||
entry["pathname"] = record.pathname
|
||||
entry["process"] = record.process
|
||||
entry["processName"] = record.processName
|
||||
if hasattr(record, "stack_info"):
|
||||
entry["stack_info"] = record.stack_info
|
||||
else:
|
||||
entry["stack_info"] = None
|
||||
entry["thread"] = record.thread
|
||||
entry["threadName"] = record.threadName
|
||||
entry["message"] = record.getMessage()
|
||||
return entry
|
@ -1,19 +1,19 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
from oslo_config import cfg
|
||||
|
||||
import aprsd
|
||||
from aprsd import threads as aprsd_threads
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class APRSRegistryThread(aprsd_threads.APRSDThread):
|
||||
"""This sends service information to the configured APRS Registry."""
|
||||
|
||||
_loop_cnt: int = 1
|
||||
|
||||
def __init__(self):
|
||||
@ -41,7 +41,7 @@ class APRSRegistryThread(aprsd_threads.APRSDThread):
|
||||
"description": CONF.aprs_registry.description,
|
||||
"service_website": CONF.aprs_registry.service_website,
|
||||
"software": f"APRSD version {aprsd.__version__} "
|
||||
"https://github.com/craigerl/aprsd",
|
||||
"https://github.com/craigerl/aprsd",
|
||||
}
|
||||
try:
|
||||
requests.post(
|
||||
|
@ -13,7 +13,6 @@ from aprsd.packets import log as packet_log
|
||||
from aprsd.threads import APRSDThread, tx
|
||||
from aprsd.utils import trace
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
@ -53,7 +52,9 @@ class APRSDRXThread(APRSDThread):
|
||||
# kwargs. :(
|
||||
# https://github.com/rossengeorgiev/aprs-python/pull/56
|
||||
self._client.consumer(
|
||||
self._process_packet, raw=False, blocking=False,
|
||||
self._process_packet,
|
||||
raw=False,
|
||||
blocking=False,
|
||||
)
|
||||
except (
|
||||
aprslib.exceptions.ConnectionDrop,
|
||||
@ -138,7 +139,9 @@ class APRSDDupeRXThread(APRSDRXThread):
|
||||
elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
|
||||
# If the packet came in within N seconds of the
|
||||
# Last time seeing the packet, then we drop it as a dupe.
|
||||
LOG.warning(f"Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.")
|
||||
LOG.warning(
|
||||
f"Packet {packet.from_call}:{packet.msgNo} already tracked, dropping."
|
||||
)
|
||||
else:
|
||||
LOG.warning(
|
||||
f"Packet {packet.from_call}:{packet.msgNo} already tracked "
|
||||
@ -149,7 +152,7 @@ class APRSDDupeRXThread(APRSDRXThread):
|
||||
|
||||
|
||||
class APRSDPluginRXThread(APRSDDupeRXThread):
|
||||
""""Process received packets.
|
||||
""" "Process received packets.
|
||||
|
||||
For backwards compatibility, we keep the APRSDPluginRXThread.
|
||||
"""
|
||||
@ -249,7 +252,8 @@ class APRSDProcessPacketThread(APRSDThread):
|
||||
self.process_other_packet(packet, for_us=False)
|
||||
else:
|
||||
self.process_other_packet(
|
||||
packet, for_us=(to_call.lower() == our_call),
|
||||
packet,
|
||||
for_us=(to_call.lower() == our_call),
|
||||
)
|
||||
LOG.debug(f"Packet processing complete for pkt '{packet.key}'")
|
||||
return False
|
||||
@ -349,7 +353,6 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
|
||||
# If the message was for us and we didn't have a
|
||||
# response, then we send a usage statement.
|
||||
if to_call == CONF.callsign and not replied:
|
||||
|
||||
# Tailor the messages accordingly
|
||||
if CONF.load_help_plugin:
|
||||
LOG.warning("Sending help!")
|
||||
|
@ -2,20 +2,20 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
|
||||
from oslo_config import cfg
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
|
||||
from aprsd.stats import collector
|
||||
from aprsd.threads import APRSDThread
|
||||
from aprsd.utils import objectstore
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
class StatsStore(objectstore.ObjectStoreMixin):
|
||||
"""Container to save the stats from the collector."""
|
||||
|
||||
lock = threading.Lock()
|
||||
data = {}
|
||||
|
||||
|
@ -2,20 +2,18 @@ import logging
|
||||
import threading
|
||||
import time
|
||||
|
||||
import wrapt
|
||||
from oslo_config import cfg
|
||||
from rush import quota, throttle
|
||||
from rush.contrib import decorator
|
||||
from rush.limiters import periodic
|
||||
from rush.stores import dictionary
|
||||
import wrapt
|
||||
|
||||
from aprsd import conf # noqa
|
||||
from aprsd import threads as aprsd_threads
|
||||
from aprsd.client import client_factory
|
||||
from aprsd.packets import collector, core
|
||||
from aprsd.packets import collector, core, tracker
|
||||
from aprsd.packets import log as packet_log
|
||||
from aprsd.packets import tracker
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger("APRSD")
|
||||
@ -238,6 +236,7 @@ class BeaconSendThread(aprsd_threads.APRSDThread):
|
||||
|
||||
Settings are in the [DEFAULT] section of the config file.
|
||||
"""
|
||||
|
||||
_loop_cnt: int = 1
|
||||
|
||||
def __init__(self):
|
||||
|
@ -13,11 +13,11 @@ import update_checker
|
||||
import aprsd
|
||||
|
||||
from .fuzzyclock import fuzzy # noqa: F401
|
||||
|
||||
# Make these available by anyone importing
|
||||
# aprsd.utils
|
||||
from .ring_buffer import RingBuffer # noqa: F401
|
||||
|
||||
|
||||
if sys.version_info.major == 3 and sys.version_info.minor >= 3:
|
||||
from collections.abc import MutableMapping
|
||||
else:
|
||||
@ -26,11 +26,13 @@ else:
|
||||
|
||||
def singleton(cls):
|
||||
"""Make a class a Singleton class (only one instance)"""
|
||||
|
||||
@functools.wraps(cls)
|
||||
def wrapper_singleton(*args, **kwargs):
|
||||
if wrapper_singleton.instance is None:
|
||||
wrapper_singleton.instance = cls(*args, **kwargs)
|
||||
return wrapper_singleton.instance
|
||||
|
||||
wrapper_singleton.instance = None
|
||||
return wrapper_singleton
|
||||
|
||||
@ -170,7 +172,10 @@ def load_entry_points(group):
|
||||
try:
|
||||
ep.load()
|
||||
except Exception as e:
|
||||
print(f"Extension {ep.name} of group {group} failed to load with {e}", file=sys.stderr)
|
||||
print(
|
||||
f"Extension {ep.name} of group {group} failed to load with {e}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
print(traceback.format_exc(), file=sys.stderr)
|
||||
|
||||
|
||||
@ -200,8 +205,7 @@ def calculate_initial_compass_bearing(point_a, point_b):
|
||||
|
||||
x = math.sin(diff_long) * math.cos(lat2)
|
||||
y = math.cos(lat1) * math.sin(lat2) - (
|
||||
math.sin(lat1)
|
||||
* math.cos(lat2) * math.cos(diff_long)
|
||||
math.sin(lat1) * math.cos(lat2) * math.cos(diff_long)
|
||||
)
|
||||
|
||||
initial_bearing = math.atan2(x, y)
|
||||
@ -218,15 +222,43 @@ def calculate_initial_compass_bearing(point_a, point_b):
|
||||
def degrees_to_cardinal(bearing, full_string=False):
|
||||
if full_string:
|
||||
directions = [
|
||||
"North", "North-Northeast", "Northeast", "East-Northeast", "East", "East-Southeast",
|
||||
"Southeast", "South-Southeast", "South", "South-Southwest", "Southwest", "West-Southwest",
|
||||
"West", "West-Northwest", "Northwest", "North-Northwest", "North",
|
||||
"North",
|
||||
"North-Northeast",
|
||||
"Northeast",
|
||||
"East-Northeast",
|
||||
"East",
|
||||
"East-Southeast",
|
||||
"Southeast",
|
||||
"South-Southeast",
|
||||
"South",
|
||||
"South-Southwest",
|
||||
"Southwest",
|
||||
"West-Southwest",
|
||||
"West",
|
||||
"West-Northwest",
|
||||
"Northwest",
|
||||
"North-Northwest",
|
||||
"North",
|
||||
]
|
||||
else:
|
||||
directions = [
|
||||
"N", "NNE", "NE", "ENE", "E", "ESE",
|
||||
"SE", "SSE", "S", "SSW", "SW", "WSW",
|
||||
"W", "WNW", "NW", "NNW", "N",
|
||||
"N",
|
||||
"NNE",
|
||||
"NE",
|
||||
"ENE",
|
||||
"E",
|
||||
"ESE",
|
||||
"SE",
|
||||
"SSE",
|
||||
"S",
|
||||
"SSW",
|
||||
"SW",
|
||||
"WSW",
|
||||
"W",
|
||||
"WNW",
|
||||
"NW",
|
||||
"NNW",
|
||||
"N",
|
||||
]
|
||||
|
||||
cardinal = directions[round(bearing / 22.5)]
|
||||
|
@ -10,8 +10,13 @@ class EnhancedJSONEncoder(json.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, datetime.datetime):
|
||||
args = (
|
||||
"year", "month", "day", "hour", "minute",
|
||||
"second", "microsecond",
|
||||
"year",
|
||||
"month",
|
||||
"day",
|
||||
"hour",
|
||||
"minute",
|
||||
"second",
|
||||
"microsecond",
|
||||
)
|
||||
return {
|
||||
"__type__": "datetime.datetime",
|
||||
@ -63,10 +68,10 @@ class SimpleJSONEncoder(json.JSONEncoder):
|
||||
|
||||
|
||||
class EnhancedJSONDecoder(json.JSONDecoder):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(
|
||||
*args, object_hook=self.object_hook,
|
||||
*args,
|
||||
object_hook=self.object_hook,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
55
aprsd/utils/keepalive_collector.py
Normal file
@ -0,0 +1,55 @@
|
||||
import logging
|
||||
from typing import Callable, Protocol, runtime_checkable
|
||||
|
||||
from aprsd.utils import singleton
|
||||
|
||||
LOG = logging.getLogger("APRSD")
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class KeepAliveProducer(Protocol):
|
||||
"""The KeepAliveProducer protocol is used to define the interface for running Keepalive checks."""
|
||||
|
||||
def keepalive_check(self) -> dict:
|
||||
"""Check for keepalive."""
|
||||
...
|
||||
|
||||
def keepalive_log(self):
|
||||
"""Log any keepalive information."""
|
||||
...
|
||||
|
||||
|
||||
@singleton
|
||||
class KeepAliveCollector:
|
||||
"""The Collector class is used to collect stats from multiple StatsProducer instances."""
|
||||
|
||||
def __init__(self):
|
||||
self.producers: list[Callable] = []
|
||||
|
||||
def check(self) -> None:
|
||||
"""Do any keepalive checks."""
|
||||
for name in self.producers:
|
||||
cls = name()
|
||||
try:
|
||||
cls.keepalive_check()
|
||||
except Exception as e:
|
||||
LOG.error(f"Error in producer {name} (check): {e}")
|
||||
|
||||
def log(self) -> None:
|
||||
"""Log any relevant information during a KeepAlive check"""
|
||||
for name in self.producers:
|
||||
cls = name()
|
||||
try:
|
||||
cls.keepalive_log()
|
||||
except Exception as e:
|
||||
LOG.error(f"Error in producer {name} (check): {e}")
|
||||
|
||||
def register(self, producer_name: Callable):
|
||||
if not isinstance(producer_name, KeepAliveProducer):
|
||||
raise TypeError(f"Producer {producer_name} is not a KeepAliveProducer")
|
||||
self.producers.append(producer_name)
|
||||
|
||||
def unregister(self, producer_name: Callable):
|
||||
if not isinstance(producer_name, KeepAliveProducer):
|
||||
raise TypeError(f"Producer {producer_name} is not a KeepAliveProducer")
|
||||
self.producers.remove(producer_name)
|
@ -5,7 +5,6 @@ import logging
|
||||
import time
|
||||
import types
|
||||
|
||||
|
||||
VALID_TRACE_FLAGS = {"method", "api"}
|
||||
TRACE_API = False
|
||||
TRACE_METHOD = False
|
||||
@ -27,7 +26,6 @@ def trace(*dec_args, **dec_kwargs):
|
||||
"""
|
||||
|
||||
def _decorator(f):
|
||||
|
||||
func_name = f.__qualname__
|
||||
func_file = "/".join(f.__code__.co_filename.split("/")[-4:])
|
||||
|
||||
|
@ -1,84 +0,0 @@
|
||||
body {
|
||||
background: #eeeeee;
|
||||
margin: 2em;
|
||||
text-align: center;
|
||||
font-family: system-ui, sans-serif;
|
||||
}
|
||||
|
||||
footer {
|
||||
padding: 2em;
|
||||
text-align: center;
|
||||
height: 10vh;
|
||||
}
|
||||
|
||||
.ui.segment {
|
||||
background: #eeeeee;
|
||||
}
|
||||
|
||||
#graphs {
|
||||
display: grid;
|
||||
width: 100%;
|
||||
height: 300px;
|
||||
grid-template-columns: 1fr 1fr;
|
||||
}
|
||||
#graphs_center {
|
||||
display: block;
|
||||
margin-top: 10px;
|
||||
margin-bottom: 10px;
|
||||
width: 100%;
|
||||
height: 300px;
|
||||
}
|
||||
#left {
|
||||
margin-right: 2px;
|
||||
height: 300px;
|
||||
}
|
||||
#right {
|
||||
height: 300px;
|
||||
}
|
||||
#center {
|
||||
height: 300px;
|
||||
}
|
||||
#packetsChart, #messageChart, #emailChart, #memChart {
|
||||
border: 1px solid #ccc;
|
||||
background: #ddd;
|
||||
}
|
||||
#stats {
|
||||
margin: auto;
|
||||
width: 80%;
|
||||
}
|
||||
#jsonstats {
|
||||
display: none;
|
||||
}
|
||||
#title {
|
||||
font-size: 4em;
|
||||
}
|
||||
#version{
|
||||
font-size: .5em;
|
||||
}
|
||||
#uptime, #aprsis {
|
||||
font-size: 1em;
|
||||
}
|
||||
#callsign {
|
||||
font-size: 1.4em;
|
||||
color: #00F;
|
||||
padding-top: 8px;
|
||||
margin:10px;
|
||||
}
|
||||
|
||||
#title_rx {
|
||||
background-color: darkseagreen;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
#title_tx {
|
||||
background-color: lightcoral;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.aprsd_1 {
|
||||
background-image: url(/static/images/aprs-symbols-16-0.png);
|
||||
background-repeat: no-repeat;
|
||||
background-position: -160px -48px;
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
/* PrismJS 1.29.0
|
||||
https://prismjs.com/download.html#themes=prism-tomorrow&languages=markup+css+clike+javascript+json+json5+log&plugins=show-language+toolbar */
|
||||
code[class*=language-],pre[class*=language-]{color:#ccc;background:0 0;font-family:Consolas,Monaco,'Andale Mono','Ubuntu Mono',monospace;font-size:1em;text-align:left;white-space:pre;word-spacing:normal;word-break:normal;word-wrap:normal;line-height:1.5;-moz-tab-size:4;-o-tab-size:4;tab-size:4;-webkit-hyphens:none;-moz-hyphens:none;-ms-hyphens:none;hyphens:none}pre[class*=language-]{padding:1em;margin:.5em 0;overflow:auto}:not(pre)>code[class*=language-],pre[class*=language-]{background:#2d2d2d}:not(pre)>code[class*=language-]{padding:.1em;border-radius:.3em;white-space:normal}.token.block-comment,.token.cdata,.token.comment,.token.doctype,.token.prolog{color:#999}.token.punctuation{color:#ccc}.token.attr-name,.token.deleted,.token.namespace,.token.tag{color:#e2777a}.token.function-name{color:#6196cc}.token.boolean,.token.function,.token.number{color:#f08d49}.token.class-name,.token.constant,.token.property,.token.symbol{color:#f8c555}.token.atrule,.token.builtin,.token.important,.token.keyword,.token.selector{color:#cc99cd}.token.attr-value,.token.char,.token.regex,.token.string,.token.variable{color:#7ec699}.token.entity,.token.operator,.token.url{color:#67cdcc}.token.bold,.token.important{font-weight:700}.token.italic{font-style:italic}.token.entity{cursor:help}.token.inserted{color:green}
|
||||
div.code-toolbar{position:relative}div.code-toolbar>.toolbar{position:absolute;z-index:10;top:.3em;right:.2em;transition:opacity .3s ease-in-out;opacity:0}div.code-toolbar:hover>.toolbar{opacity:1}div.code-toolbar:focus-within>.toolbar{opacity:1}div.code-toolbar>.toolbar>.toolbar-item{display:inline-block}div.code-toolbar>.toolbar>.toolbar-item>a{cursor:pointer}div.code-toolbar>.toolbar>.toolbar-item>button{background:0 0;border:0;color:inherit;font:inherit;line-height:normal;overflow:visible;padding:0;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none}div.code-toolbar>.toolbar>.toolbar-item>a,div.code-toolbar>.toolbar>.toolbar-item>button,div.code-toolbar>.toolbar>.toolbar-item>span{color:#bbb;font-size:.8em;padding:0 .5em;background:#f5f2f0;background:rgba(224,224,224,.2);box-shadow:0 2px 0 0 rgba(0,0,0,.2);border-radius:.5em}div.code-toolbar>.toolbar>.toolbar-item>a:focus,div.code-toolbar>.toolbar>.toolbar-item>a:hover,div.code-toolbar>.toolbar>.toolbar-item>button:focus,div.code-toolbar>.toolbar>.toolbar-item>button:hover,div.code-toolbar>.toolbar>.toolbar-item>span:focus,div.code-toolbar>.toolbar>.toolbar-item>span:hover{color:inherit;text-decoration:none}
|
@ -1,35 +0,0 @@
|
||||
/* Style the tab */
|
||||
.tab {
|
||||
overflow: hidden;
|
||||
border: 1px solid #ccc;
|
||||
background-color: #f1f1f1;
|
||||
}
|
||||
|
||||
/* Style the buttons that are used to open the tab content */
|
||||
.tab button {
|
||||
background-color: inherit;
|
||||
float: left;
|
||||
border: none;
|
||||
outline: none;
|
||||
cursor: pointer;
|
||||
padding: 14px 16px;
|
||||
transition: 0.3s;
|
||||
}
|
||||
|
||||
/* Change background color of buttons on hover */
|
||||
.tab button:hover {
|
||||
background-color: #ddd;
|
||||
}
|
||||
|
||||
/* Create an active/current tablink class */
|
||||
.tab button.active {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
/* Style the tab content */
|
||||
.tabcontent {
|
||||
display: none;
|
||||
padding: 6px 12px;
|
||||
border: 1px solid #ccc;
|
||||
border-top: none;
|
||||
}
|
Before Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 52 KiB |
Before Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 52 KiB |
Before Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 40 KiB |
@ -1,235 +0,0 @@
|
||||
var packet_list = {};
|
||||
|
||||
window.chartColors = {
|
||||
red: 'rgb(255, 99, 132)',
|
||||
orange: 'rgb(255, 159, 64)',
|
||||
yellow: 'rgb(255, 205, 86)',
|
||||
green: 'rgb(26, 181, 77)',
|
||||
blue: 'rgb(54, 162, 235)',
|
||||
purple: 'rgb(153, 102, 255)',
|
||||
grey: 'rgb(201, 203, 207)',
|
||||
black: 'rgb(0, 0, 0)',
|
||||
lightcoral: 'rgb(240,128,128)',
|
||||
darkseagreen: 'rgb(143, 188,143)'
|
||||
|
||||
};
|
||||
|
||||
function size_dict(d){c=0; for (i in d) ++c; return c}
|
||||
|
||||
function start_charts() {
|
||||
Chart.scaleService.updateScaleDefaults('linear', {
|
||||
ticks: {
|
||||
min: 0
|
||||
}
|
||||
});
|
||||
|
||||
packets_chart = new Chart($("#packetsChart"), {
|
||||
label: 'APRS Packets',
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: [],
|
||||
datasets: [{
|
||||
label: 'Packets Sent',
|
||||
borderColor: window.chartColors.lightcoral,
|
||||
data: [],
|
||||
},
|
||||
{
|
||||
label: 'Packets Recieved',
|
||||
borderColor: window.chartColors.darkseagreen,
|
||||
data: [],
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
title: {
|
||||
display: true,
|
||||
text: 'APRS Packets',
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
type: 'timeseries',
|
||||
offset: true,
|
||||
ticks: {
|
||||
major: { enabled: true },
|
||||
fontStyle: context => context.tick.major ? 'bold' : undefined,
|
||||
source: 'data',
|
||||
maxRotation: 0,
|
||||
autoSkip: true,
|
||||
autoSkipPadding: 75,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
message_chart = new Chart($("#messageChart"), {
|
||||
label: 'Messages',
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: [],
|
||||
datasets: [{
|
||||
label: 'Messages Sent',
|
||||
borderColor: window.chartColors.lightcoral,
|
||||
data: [],
|
||||
},
|
||||
{
|
||||
label: 'Messages Recieved',
|
||||
borderColor: window.chartColors.darkseagreen,
|
||||
data: [],
|
||||
},
|
||||
{
|
||||
label: 'Ack Sent',
|
||||
borderColor: window.chartColors.purple,
|
||||
data: [],
|
||||
},
|
||||
{
|
||||
label: 'Ack Recieved',
|
||||
borderColor: window.chartColors.black,
|
||||
data: [],
|
||||
}],
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
title: {
|
||||
display: true,
|
||||
text: 'APRS Messages',
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
type: 'timeseries',
|
||||
offset: true,
|
||||
ticks: {
|
||||
major: { enabled: true },
|
||||
fontStyle: context => context.tick.major ? 'bold' : undefined,
|
||||
source: 'data',
|
||||
maxRotation: 0,
|
||||
autoSkip: true,
|
||||
autoSkipPadding: 75,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
email_chart = new Chart($("#emailChart"), {
|
||||
label: 'Email Messages',
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: [],
|
||||
datasets: [{
|
||||
label: 'Sent',
|
||||
borderColor: window.chartColors.lightcoral,
|
||||
data: [],
|
||||
},
|
||||
{
|
||||
label: 'Recieved',
|
||||
borderColor: window.chartColors.darkseagreen,
|
||||
data: [],
|
||||
}],
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
title: {
|
||||
display: true,
|
||||
text: 'Email Messages',
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
type: 'timeseries',
|
||||
offset: true,
|
||||
ticks: {
|
||||
major: { enabled: true },
|
||||
fontStyle: context => context.tick.major ? 'bold' : undefined,
|
||||
source: 'data',
|
||||
maxRotation: 0,
|
||||
autoSkip: true,
|
||||
autoSkipPadding: 75,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
memory_chart = new Chart($("#memChart"), {
|
||||
label: 'Memory Usage',
|
||||
type: 'line',
|
||||
data: {
|
||||
labels: [],
|
||||
datasets: [{
|
||||
label: 'Peak Ram usage',
|
||||
borderColor: window.chartColors.red,
|
||||
data: [],
|
||||
},
|
||||
{
|
||||
label: 'Current Ram usage',
|
||||
borderColor: window.chartColors.blue,
|
||||
data: [],
|
||||
}],
|
||||
},
|
||||
options: {
|
||||
responsive: true,
|
||||
maintainAspectRatio: false,
|
||||
title: {
|
||||
display: true,
|
||||
text: 'Memory Usage',
|
||||
},
|
||||
scales: {
|
||||
x: {
|
||||
type: 'timeseries',
|
||||
offset: true,
|
||||
ticks: {
|
||||
major: { enabled: true },
|
||||
fontStyle: context => context.tick.major ? 'bold' : undefined,
|
||||
source: 'data',
|
||||
maxRotation: 0,
|
||||
autoSkip: true,
|
||||
autoSkipPadding: 75,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function addData(chart, label, newdata) {
|
||||
chart.data.labels.push(label);
|
||||
chart.data.datasets.forEach((dataset) => {
|
||||
dataset.data.push(newdata);
|
||||
});
|
||||
chart.update();
|
||||
}
|
||||
|
||||
function updateDualData(chart, label, first, second) {
|
||||
chart.data.labels.push(label);
|
||||
chart.data.datasets[0].data.push(first);
|
||||
chart.data.datasets[1].data.push(second);
|
||||
chart.update();
|
||||
}
|
||||
function updateQuadData(chart, label, first, second, third, fourth) {
|
||||
chart.data.labels.push(label);
|
||||
chart.data.datasets[0].data.push(first);
|
||||
chart.data.datasets[1].data.push(second);
|
||||
chart.data.datasets[2].data.push(third);
|
||||
chart.data.datasets[3].data.push(fourth);
|
||||
chart.update();
|
||||
}
|
||||
|
||||
function update_stats( data ) {
|
||||
our_callsign = data["APRSDStats"]["callsign"];
|
||||
$("#version").text( data["APRSDStats"]["version"] );
|
||||
$("#aprs_connection").html( data["aprs_connection"] );
|
||||
$("#uptime").text( "uptime: " + data["APRSDStats"]["uptime"] );
|
||||
const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json');
|
||||
$("#jsonstats").html(html_pretty);
|
||||
short_time = data["time"].split(/\s(.+)/)[1];
|
||||
packet_list = data["PacketList"]["packets"];
|
||||
updateDualData(packets_chart, short_time, data["PacketList"]["sent"], data["PacketList"]["received"]);
|
||||
updateQuadData(message_chart, short_time, packet_list["MessagePacket"]["tx"], packet_list["MessagePacket"]["rx"],
|
||||
packet_list["AckPacket"]["tx"], packet_list["AckPacket"]["rx"]);
|
||||
updateDualData(email_chart, short_time, data["EmailStats"]["sent"], data["EmailStats"]["recieved"]);
|
||||
updateDualData(memory_chart, short_time, data["APRSDStats"]["memory_peak"], data["APRSDStats"]["memory_current"]);
|
||||
}
|
@ -1,465 +0,0 @@
|
||||
var packet_list = {};
|
||||
|
||||
var tx_data = [];
|
||||
var rx_data = [];
|
||||
|
||||
var packet_types_data = {};
|
||||
|
||||
var mem_current = []
|
||||
var mem_peak = []
|
||||
|
||||
var thread_current = []
|
||||
|
||||
|
||||
function start_charts() {
|
||||
console.log("start_charts() called");
|
||||
// Initialize the echarts instance based on the prepared dom
|
||||
create_packets_chart();
|
||||
create_packets_types_chart();
|
||||
create_messages_chart();
|
||||
create_ack_chart();
|
||||
create_memory_chart();
|
||||
create_thread_chart();
|
||||
}
|
||||
|
||||
|
||||
function create_packets_chart() {
|
||||
// The packets totals TX/RX chart.
|
||||
pkt_c_canvas = document.getElementById('packetsChart');
|
||||
packets_chart = echarts.init(pkt_c_canvas);
|
||||
|
||||
// Specify the configuration items and data for the chart
|
||||
var option = {
|
||||
title: {
|
||||
text: 'APRS Packet totals'
|
||||
},
|
||||
legend: {},
|
||||
tooltip : {
|
||||
trigger: 'axis'
|
||||
},
|
||||
toolbox: {
|
||||
show : true,
|
||||
feature : {
|
||||
mark : {show: true},
|
||||
dataView : {show: true, readOnly: true},
|
||||
magicType : {show: true, type: ['line', 'bar']},
|
||||
restore : {show: true},
|
||||
saveAsImage : {show: true}
|
||||
}
|
||||
},
|
||||
calculable : true,
|
||||
xAxis: { type: 'time' },
|
||||
yAxis: { },
|
||||
series: [
|
||||
{
|
||||
name: 'tx',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
color: 'red',
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'tx' // refer sensor 1 value
|
||||
}
|
||||
},{
|
||||
name: 'rx',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'rx'
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
// Display the chart using the configuration items and data just specified.
|
||||
packets_chart.setOption(option);
|
||||
}
|
||||
|
||||
|
||||
function create_packets_types_chart() {
|
||||
// The packets types chart
|
||||
pkt_types_canvas = document.getElementById('packetTypesChart');
|
||||
packet_types_chart = echarts.init(pkt_types_canvas);
|
||||
|
||||
// The series and data are built and updated on the fly
|
||||
// as packets come in.
|
||||
var option = {
|
||||
title: {
|
||||
text: 'Packet Types'
|
||||
},
|
||||
legend: {},
|
||||
tooltip : {
|
||||
trigger: 'axis'
|
||||
},
|
||||
toolbox: {
|
||||
show : true,
|
||||
feature : {
|
||||
mark : {show: true},
|
||||
dataView : {show: true, readOnly: true},
|
||||
magicType : {show: true, type: ['line', 'bar']},
|
||||
restore : {show: true},
|
||||
saveAsImage : {show: true}
|
||||
}
|
||||
},
|
||||
calculable : true,
|
||||
xAxis: { type: 'time' },
|
||||
yAxis: { },
|
||||
}
|
||||
|
||||
packet_types_chart.setOption(option);
|
||||
}
|
||||
|
||||
|
||||
function create_messages_chart() {
|
||||
msg_c_canvas = document.getElementById('messagesChart');
|
||||
message_chart = echarts.init(msg_c_canvas);
|
||||
|
||||
// Specify the configuration items and data for the chart
|
||||
var option = {
|
||||
title: {
|
||||
text: 'Message Packets'
|
||||
},
|
||||
legend: {},
|
||||
tooltip: {
|
||||
trigger: 'axis'
|
||||
},
|
||||
toolbox: {
|
||||
show: true,
|
||||
feature: {
|
||||
mark : {show: true},
|
||||
dataView : {show: true, readOnly: true},
|
||||
magicType : {show: true, type: ['line', 'bar']},
|
||||
restore : {show: true},
|
||||
saveAsImage : {show: true}
|
||||
}
|
||||
},
|
||||
calculable: true,
|
||||
xAxis: { type: 'time' },
|
||||
yAxis: { },
|
||||
series: [
|
||||
{
|
||||
name: 'tx',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
color: 'red',
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'tx' // refer sensor 1 value
|
||||
}
|
||||
},{
|
||||
name: 'rx',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'rx'
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
// Display the chart using the configuration items and data just specified.
|
||||
message_chart.setOption(option);
|
||||
}
|
||||
|
||||
function create_ack_chart() {
|
||||
ack_canvas = document.getElementById('acksChart');
|
||||
ack_chart = echarts.init(ack_canvas);
|
||||
|
||||
// Specify the configuration items and data for the chart
|
||||
var option = {
|
||||
title: {
|
||||
text: 'Ack Packets'
|
||||
},
|
||||
legend: {},
|
||||
tooltip: {
|
||||
trigger: 'axis'
|
||||
},
|
||||
toolbox: {
|
||||
show: true,
|
||||
feature: {
|
||||
mark : {show: true},
|
||||
dataView : {show: true, readOnly: false},
|
||||
magicType : {show: true, type: ['line', 'bar']},
|
||||
restore : {show: true},
|
||||
saveAsImage : {show: true}
|
||||
}
|
||||
},
|
||||
calculable: true,
|
||||
xAxis: { type: 'time' },
|
||||
yAxis: { },
|
||||
series: [
|
||||
{
|
||||
name: 'tx',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
color: 'red',
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'tx' // refer sensor 1 value
|
||||
}
|
||||
},{
|
||||
name: 'rx',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'rx'
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
ack_chart.setOption(option);
|
||||
}
|
||||
|
||||
function create_memory_chart() {
|
||||
ack_canvas = document.getElementById('memChart');
|
||||
memory_chart = echarts.init(ack_canvas);
|
||||
|
||||
// Specify the configuration items and data for the chart
|
||||
var option = {
|
||||
title: {
|
||||
text: 'Memory Usage'
|
||||
},
|
||||
legend: {},
|
||||
tooltip: {
|
||||
trigger: 'axis'
|
||||
},
|
||||
toolbox: {
|
||||
show: true,
|
||||
feature: {
|
||||
mark : {show: true},
|
||||
dataView : {show: true, readOnly: false},
|
||||
magicType : {show: true, type: ['line', 'bar']},
|
||||
restore : {show: true},
|
||||
saveAsImage : {show: true}
|
||||
}
|
||||
},
|
||||
calculable: true,
|
||||
xAxis: { type: 'time' },
|
||||
yAxis: { },
|
||||
series: [
|
||||
{
|
||||
name: 'current',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
color: 'red',
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'current' // refer sensor 1 value
|
||||
}
|
||||
},{
|
||||
name: 'peak',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'peak'
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
memory_chart.setOption(option);
|
||||
}
|
||||
|
||||
function create_thread_chart() {
|
||||
thread_canvas = document.getElementById('threadChart');
|
||||
thread_chart = echarts.init(thread_canvas);
|
||||
|
||||
// Specify the configuration items and data for the chart
|
||||
var option = {
|
||||
title: {
|
||||
text: 'Active Threads'
|
||||
},
|
||||
legend: {},
|
||||
tooltip: {
|
||||
trigger: 'axis'
|
||||
},
|
||||
toolbox: {
|
||||
show: true,
|
||||
feature: {
|
||||
mark : {show: true},
|
||||
dataView : {show: true, readOnly: false},
|
||||
magicType : {show: true, type: ['line', 'bar']},
|
||||
restore : {show: true},
|
||||
saveAsImage : {show: true}
|
||||
}
|
||||
},
|
||||
calculable: true,
|
||||
xAxis: { type: 'time' },
|
||||
yAxis: { },
|
||||
series: [
|
||||
{
|
||||
name: 'current',
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
color: 'red',
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: 'current' // refer sensor 1 value
|
||||
}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
thread_chart.setOption(option);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
function updatePacketData(chart, time, first, second) {
|
||||
tx_data.push([time, first]);
|
||||
rx_data.push([time, second]);
|
||||
option = {
|
||||
series: [
|
||||
{
|
||||
name: 'tx',
|
||||
data: tx_data,
|
||||
},
|
||||
{
|
||||
name: 'rx',
|
||||
data: rx_data,
|
||||
}
|
||||
]
|
||||
}
|
||||
chart.setOption(option);
|
||||
}
|
||||
|
||||
function updatePacketTypesData(time, typesdata) {
|
||||
//The options series is created on the fly each time based on
|
||||
//the packet types we have in the data
|
||||
var series = []
|
||||
|
||||
for (const k in typesdata) {
|
||||
tx = [time, typesdata[k]["tx"]]
|
||||
rx = [time, typesdata[k]["rx"]]
|
||||
|
||||
if (packet_types_data.hasOwnProperty(k)) {
|
||||
packet_types_data[k]["tx"].push(tx)
|
||||
packet_types_data[k]["rx"].push(rx)
|
||||
} else {
|
||||
packet_types_data[k] = {'tx': [tx], 'rx': [rx]}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updatePacketTypesChart() {
|
||||
series = []
|
||||
for (const k in packet_types_data) {
|
||||
entry = {
|
||||
name: k+"tx",
|
||||
data: packet_types_data[k]["tx"],
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: k+'tx' // refer sensor 1 value
|
||||
}
|
||||
}
|
||||
series.push(entry)
|
||||
entry = {
|
||||
name: k+"rx",
|
||||
data: packet_types_data[k]["rx"],
|
||||
type: 'line',
|
||||
smooth: true,
|
||||
encode: {
|
||||
x: 'timestamp',
|
||||
y: k+'rx' // refer sensor 1 value
|
||||
}
|
||||
}
|
||||
series.push(entry)
|
||||
}
|
||||
|
||||
option = {
|
||||
series: series
|
||||
}
|
||||
packet_types_chart.setOption(option);
|
||||
}
|
||||
|
||||
function updateTypeChart(chart, key) {
|
||||
//Generic function to update a packet type chart
|
||||
if (! packet_types_data.hasOwnProperty(key)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (! packet_types_data[key].hasOwnProperty('tx')) {
|
||||
return;
|
||||
}
|
||||
var option = {
|
||||
series: [{
|
||||
name: "tx",
|
||||
data: packet_types_data[key]["tx"],
|
||||
},
|
||||
{
|
||||
name: "rx",
|
||||
data: packet_types_data[key]["rx"]
|
||||
}]
|
||||
}
|
||||
|
||||
chart.setOption(option);
|
||||
}
|
||||
|
||||
function updateMemChart(time, current, peak) {
|
||||
mem_current.push([time, current]);
|
||||
mem_peak.push([time, peak]);
|
||||
option = {
|
||||
series: [
|
||||
{
|
||||
name: 'current',
|
||||
data: mem_current,
|
||||
},
|
||||
{
|
||||
name: 'peak',
|
||||
data: mem_peak,
|
||||
}
|
||||
]
|
||||
}
|
||||
memory_chart.setOption(option);
|
||||
}
|
||||
|
||||
function updateThreadChart(time, threads) {
|
||||
keys = Object.keys(threads);
|
||||
thread_count = keys.length;
|
||||
thread_current.push([time, thread_count]);
|
||||
option = {
|
||||
series: [
|
||||
{
|
||||
name: 'current',
|
||||
data: thread_current,
|
||||
}
|
||||
]
|
||||
}
|
||||
thread_chart.setOption(option);
|
||||
}
|
||||
|
||||
function updateMessagesChart() {
|
||||
updateTypeChart(message_chart, "MessagePacket")
|
||||
}
|
||||
|
||||
function updateAcksChart() {
|
||||
updateTypeChart(ack_chart, "AckPacket")
|
||||
}
|
||||
|
||||
function update_stats( data ) {
|
||||
console.log("update_stats() echarts.js called")
|
||||
stats = data["stats"];
|
||||
our_callsign = stats["APRSDStats"]["callsign"];
|
||||
$("#version").text( stats["APRSDStats"]["version"] );
|
||||
$("#aprs_connection").html( stats["aprs_connection"] );
|
||||
$("#uptime").text( "uptime: " + stats["APRSDStats"]["uptime"] );
|
||||
const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json');
|
||||
$("#jsonstats").html(html_pretty);
|
||||
|
||||
t = Date.parse(data["time"]);
|
||||
ts = new Date(t);
|
||||
updatePacketData(packets_chart, ts, stats["PacketList"]["tx"], stats["PacketList"]["rx"]);
|
||||
updatePacketTypesData(ts, stats["PacketList"]["types"]);
|
||||
updatePacketTypesChart();
|
||||
updateMessagesChart();
|
||||
updateAcksChart();
|
||||
updateMemChart(ts, stats["APRSDStats"]["memory_current"], stats["APRSDStats"]["memory_peak"]);
|
||||
updateThreadChart(ts, stats["APRSDThreadList"]);
|
||||
//updateQuadData(message_chart, short_time, data["stats"]["messages"]["sent"], data["stats"]["messages"]["received"], data["stats"]["messages"]["ack_sent"], data["stats"]["messages"]["ack_recieved"]);
|
||||
//updateDualData(email_chart, short_time, data["stats"]["email"]["sent"], data["stats"]["email"]["recieved"]);
|
||||
//updateDualData(memory_chart, short_time, data["stats"]["aprsd"]["memory_peak"], data["stats"]["aprsd"]["memory_current"]);
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
function init_logs() {
|
||||
const socket = io("/logs");
|
||||
socket.on('connect', function () {
|
||||
console.log("Connected to logs socketio");
|
||||
});
|
||||
|
||||
socket.on('connected', function(msg) {
|
||||
console.log("Connected to /logs");
|
||||
console.log(msg);
|
||||
});
|
||||
|
||||
socket.on('log_entry', function(data) {
|
||||
update_logs(data);
|
||||
});
|
||||
|
||||
};
|
||||
|
||||
|
||||
function update_logs(data) {
|
||||
var code_block = $('#logtext')
|
||||
entry = data["message"]
|
||||
const html_pretty = Prism.highlight(entry, Prism.languages.log, 'log');
|
||||
code_block.append(html_pretty + "<br>");
|
||||
var div = document.getElementById('logContainer');
|
||||
div.scrollTop = div.scrollHeight;
|
||||
}
|
@ -1,231 +0,0 @@
|
||||
// watchlist is a dict of ham callsign => symbol, packets
|
||||
var watchlist = {};
|
||||
var our_callsign = "";
|
||||
|
||||
function aprs_img(item, x_offset, y_offset) {
|
||||
var x = x_offset * -16;
|
||||
if (y_offset > 5) {
|
||||
y_offset = 5;
|
||||
}
|
||||
var y = y_offset * -16;
|
||||
var loc = x + 'px '+ y + 'px'
|
||||
item.css('background-position', loc);
|
||||
}
|
||||
|
||||
function show_aprs_icon(item, symbol) {
|
||||
var offset = ord(symbol) - 33;
|
||||
var col = Math.floor(offset / 16);
|
||||
var row = offset % 16;
|
||||
//console.log("'" + symbol+"' off: "+offset+" row: "+ row + " col: " + col)
|
||||
aprs_img(item, row, col);
|
||||
}
|
||||
|
||||
function ord(str){return str.charCodeAt(0);}
|
||||
|
||||
|
||||
function update_watchlist( data ) {
|
||||
// Update the watch list
|
||||
stats = data["stats"];
|
||||
if (stats.hasOwnProperty("WatchList") == false) {
|
||||
return
|
||||
}
|
||||
var watchdiv = $("#watchDiv");
|
||||
var html_str = '<table class="ui celled striped table"><thead><tr><th>HAM Callsign</th><th>Age since last seen by APRSD</th></tr></thead><tbody>'
|
||||
watchdiv.html('')
|
||||
jQuery.each(stats["WatchList"], function(i, val) {
|
||||
html_str += '<tr><td class="collapsing"><img id="callsign_'+i+'" class="aprsd_1"></img>' + i + '</td><td>' + val["last"] + '</td></tr>'
|
||||
});
|
||||
html_str += "</tbody></table>";
|
||||
watchdiv.append(html_str);
|
||||
|
||||
jQuery.each(watchlist, function(i, val) {
|
||||
//update the symbol
|
||||
var call_img = $('#callsign_'+i);
|
||||
show_aprs_icon(call_img, val['symbol'])
|
||||
});
|
||||
}
|
||||
|
||||
function update_watchlist_from_packet(callsign, val) {
|
||||
if (!watchlist.hasOwnProperty(callsign)) {
|
||||
watchlist[callsign] = {
|
||||
"symbol": '[',
|
||||
"packets": {},
|
||||
}
|
||||
} else {
|
||||
if (val.hasOwnProperty('symbol')) {
|
||||
//console.log("Updating symbol for "+callsign + " to "+val["symbol"])
|
||||
watchlist[callsign]["symbol"] = val["symbol"]
|
||||
}
|
||||
}
|
||||
if (watchlist[callsign]["packets"].hasOwnProperty(val['ts']) == false) {
|
||||
watchlist[callsign]["packets"][val['ts']]= val;
|
||||
}
|
||||
//console.log(watchlist)
|
||||
}
|
||||
|
||||
function update_seenlist( data ) {
|
||||
stats = data["stats"];
|
||||
if (stats.hasOwnProperty("SeenList") == false) {
|
||||
return
|
||||
}
|
||||
var seendiv = $("#seenDiv");
|
||||
var html_str = '<table class="ui celled striped table">'
|
||||
html_str += '<thead><tr><th>HAM Callsign</th><th>Age since last seen by APRSD</th>'
|
||||
html_str += '<th>Number of packets RX</th></tr></thead><tbody>'
|
||||
seendiv.html('')
|
||||
var seen_list = stats["SeenList"]
|
||||
var len = Object.keys(seen_list).length
|
||||
$('#seen_count').html(len)
|
||||
jQuery.each(seen_list, function(i, val) {
|
||||
html_str += '<tr><td class="collapsing">'
|
||||
html_str += '<img id="callsign_'+i+'" class="aprsd_1"></img>' + i + '</td>'
|
||||
html_str += '<td>' + val["last"] + '</td>'
|
||||
html_str += '<td>' + val["count"] + '</td></tr>'
|
||||
});
|
||||
html_str += "</tbody></table>";
|
||||
seendiv.append(html_str);
|
||||
}
|
||||
|
||||
function update_plugins( data ) {
|
||||
stats = data["stats"];
|
||||
if (stats.hasOwnProperty("PluginManager") == false) {
|
||||
return
|
||||
}
|
||||
var plugindiv = $("#pluginDiv");
|
||||
var html_str = '<table class="ui celled striped table"><thead><tr>'
|
||||
html_str += '<th>Plugin Name</th><th>Plugin Enabled?</th>'
|
||||
html_str += '<th>Processed Packets</th><th>Sent Packets</th>'
|
||||
html_str += '<th>Version</th>'
|
||||
html_str += '</tr></thead><tbody>'
|
||||
plugindiv.html('')
|
||||
|
||||
var plugins = stats["PluginManager"];
|
||||
var keys = Object.keys(plugins);
|
||||
keys.sort();
|
||||
for (var i=0; i<keys.length; i++) { // now lets iterate in sort order
|
||||
var key = keys[i];
|
||||
var val = plugins[key];
|
||||
html_str += '<tr><td class="collapsing">' + key + '</td>';
|
||||
html_str += '<td>' + val["enabled"] + '</td><td>' + val["rx"] + '</td>';
|
||||
html_str += '<td>' + val["tx"] + '</td><td>' + val["version"] +'</td></tr>';
|
||||
}
|
||||
html_str += "</tbody></table>";
|
||||
plugindiv.append(html_str);
|
||||
}
|
||||
|
||||
function update_threads( data ) {
|
||||
stats = data["stats"];
|
||||
if (stats.hasOwnProperty("APRSDThreadList") == false) {
|
||||
return
|
||||
}
|
||||
var threadsdiv = $("#threadsDiv");
|
||||
var countdiv = $("#thread_count");
|
||||
var html_str = '<table class="ui celled striped table"><thead><tr>'
|
||||
html_str += '<th>Thread Name</th><th>Alive?</th>'
|
||||
html_str += '<th>Age</th><th>Loop Count</th>'
|
||||
html_str += '</tr></thead><tbody>'
|
||||
threadsdiv.html('')
|
||||
|
||||
var threads = stats["APRSDThreadList"];
|
||||
var keys = Object.keys(threads);
|
||||
countdiv.html(keys.length);
|
||||
keys.sort();
|
||||
for (var i=0; i<keys.length; i++) { // now lets iterate in sort order
|
||||
var key = keys[i];
|
||||
var val = threads[key];
|
||||
html_str += '<tr><td class="collapsing">' + key + '</td>';
|
||||
html_str += '<td>' + val["alive"] + '</td><td>' + val["age"] + '</td>';
|
||||
html_str += '<td>' + val["loop_count"] + '</td></tr>';
|
||||
}
|
||||
html_str += "</tbody></table>";
|
||||
threadsdiv.append(html_str);
|
||||
}
|
||||
|
||||
function update_packets( data ) {
|
||||
var packetsdiv = $("#packetsDiv");
|
||||
//nuke the contents first, then add to it.
|
||||
if (size_dict(packet_list) == 0 && size_dict(data) > 0) {
|
||||
packetsdiv.html('')
|
||||
}
|
||||
jQuery.each(data.packets, function(i, val) {
|
||||
pkt = val;
|
||||
|
||||
update_watchlist_from_packet(pkt['from_call'], pkt);
|
||||
if ( packet_list.hasOwnProperty(pkt['timestamp']) == false ) {
|
||||
// Store the packet
|
||||
packet_list[pkt['timestamp']] = pkt;
|
||||
//ts_str = val["timestamp"].toString();
|
||||
//ts = ts_str.split(".")[0]*1000;
|
||||
ts = pkt['timestamp'] * 1000;
|
||||
var d = new Date(ts).toLocaleDateString();
|
||||
var t = new Date(ts).toLocaleTimeString();
|
||||
var from_call = pkt.from_call;
|
||||
if (from_call == our_callsign) {
|
||||
title_id = 'title_tx';
|
||||
} else {
|
||||
title_id = 'title_rx';
|
||||
}
|
||||
var from_to = d + " " + t + " " + from_call + " > "
|
||||
|
||||
if (val.hasOwnProperty('addresse')) {
|
||||
from_to = from_to + pkt['addresse']
|
||||
} else if (pkt.hasOwnProperty('to_call')) {
|
||||
from_to = from_to + pkt['to_call']
|
||||
} else if (pkt.hasOwnProperty('format') && pkt['format'] == 'mic-e') {
|
||||
from_to = from_to + "Mic-E"
|
||||
}
|
||||
|
||||
from_to = from_to + " - " + pkt['raw']
|
||||
|
||||
json_pretty = Prism.highlight(JSON.stringify(pkt, null, '\t'), Prism.languages.json, 'json');
|
||||
pkt_html = '<div class="title" id="' + title_id + '"><i class="dropdown icon"></i>' + from_to + '</div><div class="content"><p class="transition hidden"><pre class="language-json">' + json_pretty + '</p></p></div>'
|
||||
packetsdiv.prepend(pkt_html);
|
||||
}
|
||||
});
|
||||
|
||||
$('.ui.accordion').accordion('refresh');
|
||||
|
||||
// Update the count of messages shown
|
||||
cnt = size_dict(packet_list);
|
||||
//console.log("packets list " + cnt)
|
||||
$('#packets_count').html(cnt);
|
||||
|
||||
const html_pretty = Prism.highlight(JSON.stringify(data, null, '\t'), Prism.languages.json, 'json');
|
||||
$("#packetsjson").html(html_pretty);
|
||||
}
|
||||
|
||||
|
||||
function start_update() {
|
||||
|
||||
(function statsworker() {
|
||||
$.ajax({
|
||||
url: "/stats",
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
success: function(data) {
|
||||
update_stats(data);
|
||||
update_watchlist(data);
|
||||
update_seenlist(data);
|
||||
update_plugins(data);
|
||||
update_threads(data);
|
||||
},
|
||||
complete: function() {
|
||||
setTimeout(statsworker, 10000);
|
||||
}
|
||||
});
|
||||
})();
|
||||
|
||||
(function packetsworker() {
|
||||
$.ajax({
|
||||
url: "/packets",
|
||||
type: 'GET',
|
||||
dataType: 'json',
|
||||
success: function(data) {
|
||||
update_packets(data);
|
||||
},
|
||||
complete: function() {
|
||||
setTimeout(packetsworker, 10000);
|
||||
}
|
||||
});
|
||||
})();
|
||||
}
|
@ -1,114 +0,0 @@
|
||||
var cleared = false;
|
||||
|
||||
function size_dict(d){c=0; for (i in d) ++c; return c}
|
||||
|
||||
function init_messages() {
|
||||
const socket = io("/sendmsg");
|
||||
socket.on('connect', function () {
|
||||
console.log("Connected to socketio");
|
||||
});
|
||||
socket.on('connected', function(msg) {
|
||||
console.log("Connected!");
|
||||
console.log(msg);
|
||||
});
|
||||
|
||||
socket.on("sent", function(msg) {
|
||||
if (cleared == false) {
|
||||
var msgsdiv = $("#msgsDiv");
|
||||
msgsdiv.html('')
|
||||
cleared = true
|
||||
}
|
||||
add_msg(msg);
|
||||
});
|
||||
|
||||
socket.on("ack", function(msg) {
|
||||
update_msg(msg);
|
||||
});
|
||||
socket.on("reply", function(msg) {
|
||||
update_msg(msg);
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
function add_msg(msg) {
|
||||
var msgsdiv = $("#sendMsgsDiv");
|
||||
|
||||
ts_str = msg["ts"].toString();
|
||||
ts = ts_str.split(".")[0]*1000;
|
||||
var d = new Date(ts).toLocaleDateString("en-US")
|
||||
var t = new Date(ts).toLocaleTimeString("en-US")
|
||||
|
||||
from = msg['from']
|
||||
title_id = 'title_tx'
|
||||
var from_to = d + " " + t + " " + from + " > "
|
||||
|
||||
if (msg.hasOwnProperty('to')) {
|
||||
from_to = from_to + msg['to']
|
||||
}
|
||||
from_to = from_to + " - " + msg['message']
|
||||
|
||||
id = ts_str.split('.')[0]
|
||||
pretty_id = "pretty_" + id
|
||||
loader_id = "loader_" + id
|
||||
ack_id = "ack_" + id
|
||||
reply_id = "reply_" + id
|
||||
span_id = "span_" + id
|
||||
json_pretty = Prism.highlight(JSON.stringify(msg, null, '\t'), Prism.languages.json, 'json');
|
||||
msg_html = '<div class="ui title" id="' + title_id + '"><i class="dropdown icon"></i>';
|
||||
msg_html += '<div class="ui active inline loader" id="' + loader_id +'" data-content="Waiting for Ack"></div> ';
|
||||
msg_html += '<i class="thumbs down outline icon" id="' + ack_id + '" data-content="Waiting for ACK"></i> ';
|
||||
msg_html += '<i class="thumbs down outline icon" id="' + reply_id + '" data-content="Waiting for Reply"></i> ';
|
||||
msg_html += '<span id="' + span_id + '">' + from_to +'</span></div>';
|
||||
msg_html += '<div class="content"><p class="transition hidden"><pre id="' + pretty_id + '" class="language-json">' + json_pretty + '</p></p></div>'
|
||||
msgsdiv.prepend(msg_html);
|
||||
$('.ui.accordion').accordion('refresh');
|
||||
}
|
||||
|
||||
function update_msg(msg) {
|
||||
var msgsdiv = $("#sendMsgsDiv");
|
||||
// We have an existing entry
|
||||
ts_str = msg["ts"].toString();
|
||||
id = ts_str.split('.')[0]
|
||||
pretty_id = "pretty_" + id
|
||||
loader_id = "loader_" + id
|
||||
reply_id = "reply_" + id
|
||||
ack_id = "ack_" + id
|
||||
span_id = "span_" + id
|
||||
|
||||
|
||||
|
||||
if (msg['ack'] == true) {
|
||||
var loader_div = $('#' + loader_id);
|
||||
var ack_div = $('#' + ack_id);
|
||||
loader_div.removeClass('ui active inline loader');
|
||||
loader_div.addClass('ui disabled loader');
|
||||
ack_div.removeClass('thumbs up outline icon');
|
||||
ack_div.addClass('thumbs up outline icon');
|
||||
}
|
||||
|
||||
if (msg['reply'] !== null) {
|
||||
var reply_div = $('#' + reply_id);
|
||||
reply_div.removeClass("thumbs down outline icon");
|
||||
reply_div.addClass('reply icon');
|
||||
reply_div.attr('data-content', 'Got Reply');
|
||||
|
||||
var d = new Date(ts).toLocaleDateString("en-US")
|
||||
var t = new Date(ts).toLocaleTimeString("en-US")
|
||||
var from_to = d + " " + t + " " + from + " > "
|
||||
|
||||
if (msg.hasOwnProperty('to')) {
|
||||
from_to = from_to + msg['to']
|
||||
}
|
||||
from_to = from_to + " - " + msg['message']
|
||||
from_to += " ===> " + msg["reply"]["message_text"]
|
||||
|
||||
var span_div = $('#' + span_id);
|
||||
span_div.html(from_to);
|
||||
}
|
||||
|
||||
var pretty_pre = $("#" + pretty_id);
|
||||
pretty_pre.html('');
|
||||
json_pretty = Prism.highlight(JSON.stringify(msg, null, '\t'), Prism.languages.json, 'json');
|
||||
pretty_pre.html(json_pretty);
|
||||
$('.ui.accordion').accordion('refresh');
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
function openTab(evt, tabName) {
|
||||
// Declare all variables
|
||||
var i, tabcontent, tablinks;
|
||||
|
||||
if (typeof tabName == 'undefined') {
|
||||
return
|
||||
}
|
||||
|
||||
// Get all elements with class="tabcontent" and hide them
|
||||
tabcontent = document.getElementsByClassName("tabcontent");
|
||||
for (i = 0; i < tabcontent.length; i++) {
|
||||
tabcontent[i].style.display = "none";
|
||||
}
|
||||
|
||||
// Get all elements with class="tablinks" and remove the class "active"
|
||||
tablinks = document.getElementsByClassName("tablinks");
|
||||
for (i = 0; i < tablinks.length; i++) {
|
||||
tablinks[i].className = tablinks[i].className.replace(" active", "");
|
||||
}
|
||||
|
||||
// Show the current tab, and add an "active" class to the button that opened the tab
|
||||
document.getElementById(tabName).style.display = "block";
|
||||
if (typeof evt.currentTarget == 'undefined') {
|
||||
return
|
||||
} else {
|
||||
evt.currentTarget.className += " active";
|
||||
}
|
||||
}
|
@ -1,196 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.6.0/jquery.min.js"></script>
|
||||
<link rel="stylesheet" href="https://ajax.googleapis.com/ajax/libs/jqueryui/1.12.1/themes/smoothness/jquery-ui.css">
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jqueryui/1.12.1/jquery-ui.min.js"></script>
|
||||
<script src="https://cdn.socket.io/4.7.1/socket.io.min.js" integrity="sha512-+NaO7d6gQ1YPxvc/qHIqZEchjGm207SszoNeMgppoqD/67fEqmc1edS8zrbxPD+4RQI3gDgT/83ihpFW61TG/Q==" crossorigin="anonymous"></script>
|
||||
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js@2.9.4/dist/Chart.bundle.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/echarts@5.4.3/dist/echarts.min.js"></script>
|
||||
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/semantic-ui@2.4.2/dist/semantic.min.css">
|
||||
<script src="https://cdn.jsdelivr.net/npm/semantic-ui@2.4.2/dist/semantic.min.js"></script>
|
||||
|
||||
<link rel="stylesheet" href="/static/css/index.css">
|
||||
<link rel="stylesheet" href="/static/css/tabs.css">
|
||||
<link rel="stylesheet" href="/static/css/prism.css">
|
||||
<script src="/static/js/prism.js"></script>
|
||||
<script src="/static/js/main.js"></script>
|
||||
<script src="/static/js/echarts.js"></script>
|
||||
<script src="/static/js/tabs.js"></script>
|
||||
<script src="/static/js/send-message.js"></script>
|
||||
<script src="/static/js/logs.js"></script>
|
||||
|
||||
|
||||
<script type="text/javascript">
|
||||
var initial_stats = {{ initial_stats|tojson|safe }};
|
||||
|
||||
var memory_chart = null
|
||||
var message_chart = null
|
||||
var color = Chart.helpers.color;
|
||||
|
||||
$(document).ready(function() {
|
||||
start_update();
|
||||
start_charts();
|
||||
init_messages();
|
||||
init_logs();
|
||||
|
||||
$("#toggleStats").click(function() {
|
||||
$("#jsonstats").fadeToggle(1000);
|
||||
});
|
||||
|
||||
// Pretty print the config json so it's readable
|
||||
var cfg_data = $("#configjson").text();
|
||||
var cfg_json = JSON.parse(cfg_data);
|
||||
var cfg_pretty = JSON.stringify(cfg_json, null, '\t');
|
||||
const html_pretty = Prism.highlight( cfg_pretty, Prism.languages.json, 'json');
|
||||
$("#configjson").html(html_pretty);
|
||||
$("#jsonstats").fadeToggle(1000);
|
||||
|
||||
//var log_text_pretty = $('#logtext').text();
|
||||
//const log_pretty = Prism.highlight( log_text_pretty, Prism.languages.log, 'log');
|
||||
//$('#logtext').html(log_pretty);
|
||||
|
||||
$('.ui.accordion').accordion({exclusive: false});
|
||||
$('.menu .item').tab('change tab', 'charts-tab');
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class='ui text container'>
|
||||
<h1 class='ui dividing header'>APRSD {{ version }}</h1>
|
||||
</div>
|
||||
|
||||
<div class='ui grid text container'>
|
||||
<div class='left floated ten wide column'>
|
||||
<span style='color: green'>{{ callsign }}</span>
|
||||
connected to
|
||||
<span style='color: blue' id='aprs_connection'>{{ aprs_connection|safe }}</span>
|
||||
</div>
|
||||
|
||||
<div class='right floated four wide column'>
|
||||
<span id='uptime'>NONE</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Tab links -->
|
||||
<div class="ui top attached tabular menu">
|
||||
<div class="active item" data-tab="charts-tab">Charts</div>
|
||||
<div class="item" data-tab="msgs-tab">Messages</div>
|
||||
<div class="item" data-tab="seen-tab">Seen List</div>
|
||||
<div class="item" data-tab="watch-tab">Watch List</div>
|
||||
<div class="item" data-tab="plugin-tab">Plugins</div>
|
||||
<div class="item" data-tab="threads-tab">Threads</div>
|
||||
<div class="item" data-tab="config-tab">Config</div>
|
||||
<div class="item" data-tab="log-tab">LogFile</div>
|
||||
<!-- <div class="item" data-tab="oslo-tab">OSLO CONFIG</div> //-->
|
||||
<div class="item" data-tab="raw-tab">Raw JSON</div>
|
||||
</div>
|
||||
|
||||
<!-- Tab content -->
|
||||
<div class="ui bottom attached active tab segment" data-tab="charts-tab">
|
||||
<h3 class="ui dividing header">Charts</h3>
|
||||
<div class="ui equal width relaxed grid">
|
||||
<div class="row">
|
||||
<div class="column">
|
||||
<div class="ui segment" style="height: 300px" id="packetsChart"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="column">
|
||||
<div class="ui segment" style="height: 300px" id="messagesChart"></div>
|
||||
</div>
|
||||
<div class="column">
|
||||
<div class="ui segment" style="height: 300px" id="acksChart"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="column">
|
||||
<div class="ui segment" style="height: 300px" id="packetTypesChart"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="column">
|
||||
<div class="ui segment" style="height: 300px" id="threadChart"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="column">
|
||||
<div class="ui segment" style="height: 300px" id="memChart"></div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- <div class="row">
|
||||
<div id="stats" class="two column">
|
||||
<button class="ui button" id="toggleStats">Toggle raw json</button>
|
||||
<pre id="jsonstats" class="language-json">{{ stats }}</pre>
|
||||
</div> //-->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="msgs-tab">
|
||||
<h3 class="ui dividing header">Messages (<span id="packets_count">0</span>)</h3>
|
||||
<div class="ui styled fluid accordion" id="accordion">
|
||||
<div id="packetsDiv" class="ui mini text">Loading</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="seen-tab">
|
||||
<h3 class="ui dividing header">
|
||||
Callsign Seen List (<span id="seen_count">{{ seen_count }}</span>)
|
||||
</h3>
|
||||
<div id="seenDiv" class="ui mini text">Loading</div>
|
||||
</div>
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="watch-tab">
|
||||
<h3 class="ui dividing header">
|
||||
Callsign Watch List (<span id="watch_count">{{ watch_count }}</span>)
|
||||
|
||||
Notification age - <span id="watch_age">{{ watch_age }}</span>
|
||||
</h3>
|
||||
<div id="watchDiv" class="ui mini text">Loading</div>
|
||||
</div>
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="plugin-tab">
|
||||
<h3 class="ui dividing header">
|
||||
Plugins Loaded (<span id="plugin_count">{{ plugin_count }}</span>)
|
||||
</h3>
|
||||
<div id="pluginDiv" class="ui mini text">Loading</div>
|
||||
</div>
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="threads-tab">
|
||||
<h3 class="ui dividing header">
|
||||
Threads Loaded (<span id="thread_count">{{ thread_count }}</span>)
|
||||
</h3>
|
||||
<div id="threadsDiv" class="ui mini text">Loading</div>
|
||||
</div>
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="config-tab">
|
||||
<h3 class="ui dividing header">Config</h3>
|
||||
<pre id="configjson" class="language-json">{{ config_json|safe }}</pre>
|
||||
</div>
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="log-tab">
|
||||
<h3 class="ui dividing header">LOGFILE</h3>
|
||||
<pre id="logContainer" style="height: 600px;overflow-y:auto;overflow-x:auto;"><code id="logtext" class="language-log" ></code></pre>
|
||||
</div>
|
||||
|
||||
<!--
|
||||
<div class="ui bottom attached tab segment" data-tab="oslo-tab">
|
||||
<h3 class="ui dividing header">OSLO</h3>
|
||||
<pre id="osloContainer" style="height:600px;overflow-y:auto;" class="language-json">{{ oslo_out|safe }}</pre>
|
||||
</div> //-->
|
||||
|
||||
<div class="ui bottom attached tab segment" data-tab="raw-tab">
|
||||
<h3 class="ui dividing header">Raw JSON</h3>
|
||||
<pre id="jsonstats" class="language-yaml" style="height:600px;overflow-y:auto;">{{ initial_stats|safe }}</pre>
|
||||
</div>
|
||||
|
||||
<div class="ui text container">
|
||||
<a href="https://badge.fury.io/py/aprsd"><img src="https://badge.fury.io/py/aprsd.svg" alt="PyPI version" height="18"></a>
|
||||
<a href="https://github.com/craigerl/aprsd"><img src="https://img.shields.io/badge/Made%20with-Python-1f425f.svg" height="18"></a>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
@ -1,115 +0,0 @@
|
||||
input[type=search]::-webkit-search-cancel-button {
|
||||
-webkit-appearance: searchfield-cancel-button;
|
||||
}
|
||||
|
||||
.speech-wrapper {
|
||||
padding-top: 0px;
|
||||
padding: 5px 30px;
|
||||
background-color: #CCCCCC;
|
||||
}
|
||||
|
||||
.bubble-row {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
|
||||
.bubble-row.alt {
|
||||
justify-content: flex-end;
|
||||
}
|
||||
|
||||
.bubble {
|
||||
/*width: 350px; */
|
||||
height: auto;
|
||||
display: block;
|
||||
background: #f5f5f5;
|
||||
border-radius: 4px;
|
||||
box-shadow: 2px 8px 5px #555;
|
||||
position: relative;
|
||||
margin: 0 0 15px;
|
||||
}
|
||||
|
||||
.bubble.alt {
|
||||
margin: 0 0 15px;
|
||||
}
|
||||
|
||||
.bubble-text {
|
||||
padding: 5px 5px 0px 8px;
|
||||
}
|
||||
|
||||
.bubble-name {
|
||||
width: 280px;
|
||||
font-weight: 600;
|
||||
font-size: 12px;
|
||||
margin: 0 0 0px;
|
||||
color: #3498db;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
.material-symbols-rounded {
|
||||
margin-left: auto;
|
||||
font-weight: normal;
|
||||
color: #808080;
|
||||
}
|
||||
}
|
||||
.bubble-name.alt {
|
||||
color: #2ecc71;
|
||||
}
|
||||
|
||||
.bubble-timestamp {
|
||||
margin-right: auto;
|
||||
font-size: 11px;
|
||||
text-transform: uppercase;
|
||||
color: #bbb
|
||||
}
|
||||
|
||||
.bubble-message {
|
||||
font-size: 16px;
|
||||
margin: 0px;
|
||||
padding: 0px 0px 0px 0px;
|
||||
color: #2b2b2b;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.bubble-arrow {
|
||||
position: absolute;
|
||||
width: 0;
|
||||
bottom:30px;
|
||||
left: -16px;
|
||||
height: 0px;
|
||||
}
|
||||
|
||||
.bubble-arrow.alt {
|
||||
right: -2px;
|
||||
bottom: 30px;
|
||||
left: auto;
|
||||
}
|
||||
|
||||
.bubble-arrow:after {
|
||||
content: "";
|
||||
position: absolute;
|
||||
border: 0 solid transparent;
|
||||
border-top: 9px solid #f5f5f5;
|
||||
border-radius: 0 20px 0;
|
||||
width: 15px;
|
||||
height: 30px;
|
||||
transform: rotate(145deg);
|
||||
}
|
||||
.bubble-arrow.alt:after {
|
||||
transform: rotate(45deg) scaleY(-1);
|
||||
}
|
||||
|
||||
.popover {
|
||||
max-width: 400px;
|
||||
}
|
||||
.popover-header {
|
||||
font-size: 8pt;
|
||||
max-width: 400px;
|
||||
padding: 5px;
|
||||
background-color: #ee;
|
||||
}
|
||||
|
||||
.popover-body {
|
||||
white-space: pre-line;
|
||||
max-width: 400px;
|
||||
padding: 5px;
|
||||
}
|
@ -1,66 +0,0 @@
|
||||
body {
|
||||
background: #eeeeee;
|
||||
/*margin: 1em;*/
|
||||
text-align: center;
|
||||
font-family: system-ui, sans-serif;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#title {
|
||||
font-size: 4em;
|
||||
}
|
||||
#version{
|
||||
font-size: .5em;
|
||||
}
|
||||
|
||||
#uptime, #aprsis {
|
||||
font-size: 1em;
|
||||
}
|
||||
#callsign {
|
||||
font-size: 1.4em;
|
||||
color: #00F;
|
||||
padding-top: 8px;
|
||||
margin:10px;
|
||||
}
|
||||
|
||||
#title_rx {
|
||||
background-color: darkseagreen;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
#title_tx {
|
||||
background-color: lightcoral;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.aprsd_1 {
|
||||
background-image: url(/static/images/aprs-symbols-16-0.png);
|
||||
background-repeat: no-repeat;
|
||||
background-position: -160px -48px;
|
||||
width: 16px;
|
||||
height: 16px;
|
||||
}
|
||||
|
||||
.wc-container {
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
height: 100%;
|
||||
}
|
||||
.wc-container .wc-row {
|
||||
/*border: 1px dotted #0313fc;*/
|
||||
padding: 2px;
|
||||
}
|
||||
.wc-container .wc-row.header {
|
||||
flex: 0 1 auto;
|
||||
}
|
||||
.wc-container .wc-row.content {
|
||||
flex: 1 1 auto;
|
||||
overflow-y: auto;
|
||||
}
|
||||
.wc-container .wc-row.footer {
|
||||
flex: 0 1 0px;
|
||||
}
|
||||
|
||||
.material-symbols-rounded.md-10 {
|
||||
font-size: 18px !important;
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
* {box-sizing: border-box}
|
||||
|
||||
/* Style the tab */
|
||||
.tab {
|
||||
border: 1px solid #ccc;
|
||||
background-color: #f1f1f1;
|
||||
height: 450px;
|
||||
}
|
||||
|
||||
/* Style the buttons inside the tab */
|
||||
.tab div {
|
||||
display: block;
|
||||
background-color: inherit;
|
||||
color: black;
|
||||
padding: 10px;
|
||||
width: 100%;
|
||||
border: none;
|
||||
outline: none;
|
||||
text-align: left;
|
||||
cursor: pointer;
|
||||
transition: 0.3s;
|
||||
font-size: 17px;
|
||||
}
|
||||
|
||||
/* Change background color of buttons on hover */
|
||||
.tab div:hover {
|
||||
background-color: #ddd;
|
||||
}
|
||||
|
||||
/* Create an active/current "tab button" class */
|
||||
.tab div.active {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
/* Style the tab content */
|
||||
.tabcontent {
|
||||
border: 1px solid #ccc;
|
||||
height: 450px;
|
||||
overflow-y: scroll;
|
||||
background-color: #CCCCCC;
|
||||
}
|
@ -1,23 +0,0 @@
|
||||
/* fallback */
|
||||
@font-face {
|
||||
font-family: 'Material Symbols Rounded';
|
||||
font-style: normal;
|
||||
font-weight: 200;
|
||||
src: url(/static/css/upstream/font.woff2) format('woff2');
|
||||
}
|
||||
|
||||
.material-symbols-rounded {
|
||||
font-family: 'Material Symbols Rounded';
|
||||
font-weight: normal;
|
||||
font-style: normal;
|
||||
font-size: 24px;
|
||||
line-height: 1;
|
||||
letter-spacing: normal;
|
||||
text-transform: none;
|
||||
display: inline-block;
|
||||
white-space: nowrap;
|
||||
word-wrap: normal;
|
||||
direction: ltr;
|
||||
-webkit-font-feature-settings: 'liga';
|
||||
-webkit-font-smoothing: antialiased;
|
||||
}
|
1311
aprsd/web/chat/static/css/upstream/jquery-ui.css
vendored
@ -1,28 +0,0 @@
|
||||
/**
|
||||
* jQuery toast plugin created by Kamran Ahmed copyright MIT license 2014
|
||||
*/
|
||||
.jq-toast-wrap { display: block; position: fixed; width: 250px; pointer-events: none !important; margin: 0; padding: 0; letter-spacing: normal; z-index: 9000 !important; }
|
||||
.jq-toast-wrap * { margin: 0; padding: 0; }
|
||||
|
||||
.jq-toast-wrap.bottom-left { bottom: 20px; left: 20px; }
|
||||
.jq-toast-wrap.bottom-right { bottom: 20px; right: 40px; }
|
||||
.jq-toast-wrap.top-left { top: 20px; left: 20px; }
|
||||
.jq-toast-wrap.top-right { top: 20px; right: 40px; }
|
||||
|
||||
.jq-toast-single { display: block; width: 100%; padding: 10px; margin: 0px 0px 5px; border-radius: 4px; font-size: 12px; font-family: arial, sans-serif; line-height: 17px; position: relative; pointer-events: all !important; background-color: #444444; color: white; }
|
||||
|
||||
.jq-toast-single h2 { font-family: arial, sans-serif; font-size: 14px; margin: 0px 0px 7px; background: none; color: inherit; line-height: inherit; letter-spacing: normal; }
|
||||
.jq-toast-single a { color: #eee; text-decoration: none; font-weight: bold; border-bottom: 1px solid white; padding-bottom: 3px; font-size: 12px; }
|
||||
|
||||
.jq-toast-single ul { margin: 0px 0px 0px 15px; background: none; padding:0px; }
|
||||
.jq-toast-single ul li { list-style-type: disc !important; line-height: 17px; background: none; margin: 0; padding: 0; letter-spacing: normal; }
|
||||
|
||||
.close-jq-toast-single { position: absolute; top: 3px; right: 7px; font-size: 14px; cursor: pointer; }
|
||||
|
||||
.jq-toast-loader { display: block; position: absolute; top: -2px; height: 5px; width: 0%; left: 0; border-radius: 5px; background: red; }
|
||||
.jq-toast-loaded { width: 100%; }
|
||||
.jq-has-icon { padding: 10px 10px 10px 50px; background-repeat: no-repeat; background-position: 10px; }
|
||||
.jq-icon-info { background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGwSURBVEhLtZa9SgNBEMc9sUxxRcoUKSzSWIhXpFMhhYWFhaBg4yPYiWCXZxBLERsLRS3EQkEfwCKdjWJAwSKCgoKCcudv4O5YLrt7EzgXhiU3/4+b2ckmwVjJSpKkQ6wAi4gwhT+z3wRBcEz0yjSseUTrcRyfsHsXmD0AmbHOC9Ii8VImnuXBPglHpQ5wwSVM7sNnTG7Za4JwDdCjxyAiH3nyA2mtaTJufiDZ5dCaqlItILh1NHatfN5skvjx9Z38m69CgzuXmZgVrPIGE763Jx9qKsRozWYw6xOHdER+nn2KkO+Bb+UV5CBN6WC6QtBgbRVozrahAbmm6HtUsgtPC19tFdxXZYBOfkbmFJ1VaHA1VAHjd0pp70oTZzvR+EVrx2Ygfdsq6eu55BHYR8hlcki+n+kERUFG8BrA0BwjeAv2M8WLQBtcy+SD6fNsmnB3AlBLrgTtVW1c2QN4bVWLATaIS60J2Du5y1TiJgjSBvFVZgTmwCU+dAZFoPxGEEs8nyHC9Bwe2GvEJv2WXZb0vjdyFT4Cxk3e/kIqlOGoVLwwPevpYHT+00T+hWwXDf4AJAOUqWcDhbwAAAAASUVORK5CYII='); background-color: #31708f; color: #d9edf7; border-color: #bce8f1; }
|
||||
.jq-icon-warning { background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGYSURBVEhL5ZSvTsNQFMbXZGICMYGYmJhAQIJAICYQPAACiSDB8AiICQQJT4CqQEwgJvYASAQCiZiYmJhAIBATCARJy+9rTsldd8sKu1M0+dLb057v6/lbq/2rK0mS/TRNj9cWNAKPYIJII7gIxCcQ51cvqID+GIEX8ASG4B1bK5gIZFeQfoJdEXOfgX4QAQg7kH2A65yQ87lyxb27sggkAzAuFhbbg1K2kgCkB1bVwyIR9m2L7PRPIhDUIXgGtyKw575yz3lTNs6X4JXnjV+LKM/m3MydnTbtOKIjtz6VhCBq4vSm3ncdrD2lk0VgUXSVKjVDJXJzijW1RQdsU7F77He8u68koNZTz8Oz5yGa6J3H3lZ0xYgXBK2QymlWWA+RWnYhskLBv2vmE+hBMCtbA7KX5drWyRT/2JsqZ2IvfB9Y4bWDNMFbJRFmC9E74SoS0CqulwjkC0+5bpcV1CZ8NMej4pjy0U+doDQsGyo1hzVJttIjhQ7GnBtRFN1UarUlH8F3xict+HY07rEzoUGPlWcjRFRr4/gChZgc3ZL2d8oAAAAASUVORK5CYII='); background-color: #8a6d3b; color: #fcf8e3; border-color: #faebcc; }
|
||||
.jq-icon-error { background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAHOSURBVEhLrZa/SgNBEMZzh0WKCClSCKaIYOED+AAKeQQLG8HWztLCImBrYadgIdY+gIKNYkBFSwu7CAoqCgkkoGBI/E28PdbLZmeDLgzZzcx83/zZ2SSXC1j9fr+I1Hq93g2yxH4iwM1vkoBWAdxCmpzTxfkN2RcyZNaHFIkSo10+8kgxkXIURV5HGxTmFuc75B2RfQkpxHG8aAgaAFa0tAHqYFfQ7Iwe2yhODk8+J4C7yAoRTWI3w/4klGRgR4lO7Rpn9+gvMyWp+uxFh8+H+ARlgN1nJuJuQAYvNkEnwGFck18Er4q3egEc/oO+mhLdKgRyhdNFiacC0rlOCbhNVz4H9FnAYgDBvU3QIioZlJFLJtsoHYRDfiZoUyIxqCtRpVlANq0EU4dApjrtgezPFad5S19Wgjkc0hNVnuF4HjVA6C7QrSIbylB+oZe3aHgBsqlNqKYH48jXyJKMuAbiyVJ8KzaB3eRc0pg9VwQ4niFryI68qiOi3AbjwdsfnAtk0bCjTLJKr6mrD9g8iq/S/B81hguOMlQTnVyG40wAcjnmgsCNESDrjme7wfftP4P7SP4N3CJZdvzoNyGq2c/HWOXJGsvVg+RA/k2MC/wN6I2YA2Pt8GkAAAAASUVORK5CYII='); background-color: #a94442; color: #f2dede; border-color: #ebccd1; }
|
||||
.jq-icon-success { background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAADsSURBVEhLY2AYBfQMgf///3P8+/evAIgvA/FsIF+BavYDDWMBGroaSMMBiE8VC7AZDrIFaMFnii3AZTjUgsUUWUDA8OdAH6iQbQEhw4HyGsPEcKBXBIC4ARhex4G4BsjmweU1soIFaGg/WtoFZRIZdEvIMhxkCCjXIVsATV6gFGACs4Rsw0EGgIIH3QJYJgHSARQZDrWAB+jawzgs+Q2UO49D7jnRSRGoEFRILcdmEMWGI0cm0JJ2QpYA1RDvcmzJEWhABhD/pqrL0S0CWuABKgnRki9lLseS7g2AlqwHWQSKH4oKLrILpRGhEQCw2LiRUIa4lwAAAABJRU5ErkJggg=='); color: #dff0d8; background-color: #3c763d; border-color: #d6e9c6; }
|
Before Width: | Height: | Size: 37 KiB |
Before Width: | Height: | Size: 52 KiB |
Before Width: | Height: | Size: 48 KiB |
Before Width: | Height: | Size: 52 KiB |