mirror of
https://github.com/dh1tw/pyhamtools.git
synced 2026-01-25 09:50:25 +01:00
Compare commits
37 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8c15ab8c2c | ||
|
|
8ef752e3ad | ||
|
|
9c61f75c28 | ||
|
|
b51b32e575 | ||
|
|
a12616ceca | ||
|
|
1a79467db1 | ||
|
|
4bdaf8d335 | ||
|
|
940c0f072c | ||
|
|
ac444fa36b | ||
|
|
17117b1c20 | ||
|
|
1c3536396d | ||
|
|
5ec3461d03 | ||
|
|
84d88faf69 | ||
|
|
5799c7337b | ||
|
|
cea30d761a | ||
|
|
71b2a743db | ||
|
|
8d765b9346 | ||
|
|
f2b2f16806 | ||
|
|
27d61089a7 | ||
|
|
b5ba291e0d | ||
|
|
c447cdc6a8 | ||
|
|
6a152760c5 | ||
|
|
e549f416e8 | ||
|
|
1dcc724e7b | ||
|
|
c5893dcc7a | ||
|
|
098307308d | ||
|
|
0cfea2ec89 | ||
|
|
c49ac97dd1 | ||
|
|
b49c549d48 | ||
|
|
01284a5ae8 | ||
|
|
19f00eac5b | ||
|
|
8d1bb624de | ||
|
|
7b40e6c7ed | ||
|
|
1b0073fb0c | ||
|
|
077b645efe | ||
|
|
bfbd0776bb | ||
|
|
4e8c90da1d |
2
.github/CODEOWNERS
vendored
Normal file
2
.github/CODEOWNERS
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
# global code owner
|
||||
@DH1TW
|
||||
93
.github/workflows/test.yml
vendored
93
.github/workflows/test.yml
vendored
|
|
@ -3,16 +3,17 @@ name: Linux
|
|||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
|
||||
test_linux:
|
||||
runs-on: "ubuntu-latest"
|
||||
name: "Ubuntu latest - Python ${{ matrix.python-version }}"
|
||||
runs-on: "ubuntu-24.04"
|
||||
name: "Ubuntu 24.04 - Python ${{ matrix.python-version }}"
|
||||
env:
|
||||
USING_COVERAGE: '3.11'
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["2.7", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "pypy2.7", "pypy3.7", "pypy3.8", "pypy3.9"]
|
||||
redis-version: [6]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "pypy3.8", "pypy3.9", "pypy3.10"]
|
||||
redis-version: [7]
|
||||
|
||||
steps:
|
||||
- uses: "actions/checkout@v3"
|
||||
|
|
@ -48,9 +49,14 @@ jobs:
|
|||
# delay the execution randomly by a couple of seconds to reduce the amount
|
||||
# of concurrent API calls on Clublog and QRZ.com when all CI jobs execute simultaneously
|
||||
run: |
|
||||
sleep $[ ( $RANDOM % 10 ) + 1 ]s
|
||||
pytest --cov=./
|
||||
if [[ $PYTHON_VERSION == 3.11 ]]; then codecov; fi
|
||||
sleep $[ ( $RANDOM % 60 ) + 1 ]s
|
||||
if [[ $PYTHON_VERSION == 3.11 ]]
|
||||
then
|
||||
pytest --cov=test/
|
||||
codecov
|
||||
else
|
||||
pytest test/
|
||||
fi
|
||||
cd docs && make html
|
||||
|
||||
# publish_package:
|
||||
|
|
@ -64,16 +70,57 @@ jobs:
|
|||
# user: __token__
|
||||
# password: ${{ secrets.PYPI_API_TOKEN }}
|
||||
|
||||
|
||||
test_macos:
|
||||
runs-on: "macos-15"
|
||||
name: "MacOS 15 - Python ${{ matrix.python-version }}"
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "pypy3.8", "pypy3.9", "pypy3.10"]
|
||||
redis-version: [7.2]
|
||||
|
||||
steps:
|
||||
- uses: "actions/checkout@v3"
|
||||
- uses: "actions/setup-python@v4"
|
||||
with:
|
||||
python-version: "${{ matrix.python-version }}"
|
||||
cache: "pip"
|
||||
cache-dependency-path: |
|
||||
**/setup.py
|
||||
**/requirements*.txt
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: |
|
||||
set -xe
|
||||
python -VV
|
||||
python -m pip install --upgrade pip setuptools
|
||||
python -m pip install -e .
|
||||
python -m pip install -r requirements-pytest.txt
|
||||
|
||||
- name: Start Redis
|
||||
uses: shogo82148/actions-setup-redis@v1
|
||||
with:
|
||||
redis-version: ${{ matrix.redis-version }}
|
||||
|
||||
- name: "Run tests for ${{ matrix.python-version }}"
|
||||
env:
|
||||
CLUBLOG_APIKEY: ${{ secrets.CLUBLOG_APIKEY }}
|
||||
QRZ_USERNAME: ${{ secrets.QRZ_USERNAME }}
|
||||
QRZ_PWD: ${{ secrets.QRZ_PWD }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
# delay the execution randomly by a couple of seconds to reduce the amount
|
||||
# of concurrent API calls on Clublog and QRZ.com when all CI jobs execute simultaneously
|
||||
run: |
|
||||
sleep $[ ( $RANDOM % 60 ) + 1 ]
|
||||
pytest ./test
|
||||
|
||||
test_windows:
|
||||
runs-on: "windows-latest"
|
||||
runs-on: "windows-2022"
|
||||
name: "Windows latest - Python ${{ matrix.python-version }}"
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
# lxml support for windows/python3.11 still missing (Dec 2022)
|
||||
# https://github.com/lxml/lxml/pull/360
|
||||
# python-version: ["3.6", "3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: "actions/checkout@v3"
|
||||
|
|
@ -92,24 +139,30 @@ jobs:
|
|||
python -m pip install -r requirements-pytest.txt
|
||||
python -m pip install -r requirements-docs.txt
|
||||
- name: Setup redis
|
||||
# We have to download and install a non-official redis windows port
|
||||
# since there is no official redis version for windows.
|
||||
# 5.0 is good enough for our purposes. After installing the msi,
|
||||
# redis will startup as a service.
|
||||
# There are no github-actions supporting redis on windows.
|
||||
# Github Actions Container services are also not available for windows.
|
||||
# We have to download and install a non-official redis windows port
|
||||
# since there is no official redis version for windows.
|
||||
# Redis is then installed an run as a service
|
||||
run: |
|
||||
C:\msys64\usr\bin\wget.exe https://github.com/tporadowski/redis/releases/download/v5.0.14.1/Redis-x64-5.0.14.1.msi
|
||||
msiexec /quiet /i Redis-x64-5.0.14.1.msi
|
||||
C:\msys64\usr\bin\wget.exe https://github.com/redis-windows/redis-windows/releases/download/7.0.14/Redis-7.0.14-Windows-x64-msys2-with-Service.zip
|
||||
C:\msys64\usr\bin\pacman.exe -S --noconfirm unzip
|
||||
C:\msys64\usr\bin\unzip.exe Redis-7.0.14-Windows-x64-msys2-with-Service.zip
|
||||
sc.exe create Redis binpath=${{ github.workspace }}\Redis-7.0.14-Windows-x64-msys2-with-Service\RedisService.exe start= auto
|
||||
echo "Redis service created, now starting it"
|
||||
net start Redis
|
||||
echo "Redis service started"
|
||||
|
||||
- name: "Run tests for ${{ matrix.python-version }}"
|
||||
env:
|
||||
CLUBLOG_APIKEY: ${{ secrets.CLUBLOG_APIKEY }}
|
||||
QRZ_USERNAME: ${{ secrets.QRZ_USERNAME }}
|
||||
QRZ_PWD: ${{ secrets.QRZ_PWD }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
# delay the execution randomly by 1-20sec to reduce the
|
||||
# give redis service time to startup and
|
||||
# delay the execution randomly by 5-20sec to reduce the
|
||||
# amount of concurrent API calls on Clublog and QRZ.com
|
||||
# when all CI jobs execute simultaneously
|
||||
run: |
|
||||
start-sleep -Seconds (1..10 | get-random)
|
||||
start-sleep -Seconds (5..60 | get-random)
|
||||
pytest
|
||||
33
.readthedocs.yaml
Normal file
33
.readthedocs.yaml
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
# Read the Docs configuration file for Sphinx projects
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Set the OS, Python version and other tools you might need
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/source/conf.py
|
||||
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
|
||||
# builder: "dirhtml"
|
||||
# Fail on all warnings to avoid broken references
|
||||
# fail_on_warning: true
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
# - pdf
|
||||
# - epub
|
||||
|
||||
# Optional but recommended, declare the Python requirements required
|
||||
# to build your documentation
|
||||
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
|
||||
python:
|
||||
install:
|
||||
- method: setuptools
|
||||
path: .
|
||||
- requirements: readthedocs-pip-requirements.txt
|
||||
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Tobias Wellnitz
|
||||
Copyright (c) 2025 Tobias Wellnitz
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
|||
32
README.md
32
README.md
|
|
@ -4,7 +4,7 @@
|
|||
[](https://codecov.io/gh/dh1tw/pyhamtools)
|
||||
[](https://badge.fury.io/py/pyhamtools)
|
||||
|
||||
Pyhamtools is a set of functions and classes for Amateur Radio purpose.
|
||||
Pyhamtools is a set of functions and classes for Amateur Radio purposes.
|
||||
Currently, the core part is the Callsign Lookup which decodes any amateur radio
|
||||
callsign string and provides the corresponding information (Country, DXCC
|
||||
entity, CQ Zone...etc). This basic functionality is needed for Logbooks,
|
||||
|
|
@ -31,22 +31,24 @@ This Library is used in production at the [DXHeat.com DX Cluster](https://dxheat
|
|||
|
||||
## Compatibility
|
||||
|
||||
Pyhamtools is compatible with Python 2.7 and Python >=3.6.
|
||||
We check compatibility on OSX, Windows, and Linux with the following Python
|
||||
versions:
|
||||
Pyhamtools is compatible with Python >=3.6.
|
||||
We check compatibility on OSX, Windows, and Linux with the following Python versions:
|
||||
|
||||
* Python 2.7 (will be deprecated in 2023)
|
||||
* Python 3.5 (has been deprecated in 2022)
|
||||
* Python 3.6 (will be deprecated in 2023)
|
||||
* Python 3.7
|
||||
* Python 3.8
|
||||
* Python 3.9
|
||||
* Python 3.10
|
||||
* Python 3.11
|
||||
* [pypy2](https://pypy.org/) (Python 2)
|
||||
* [pypy3.7](https://pypy.org/)
|
||||
* Python 3.12
|
||||
* Python 3.13
|
||||
* [pypy3.8](https://pypy.org/)
|
||||
* [pypy3.9](https://pypy.org/)
|
||||
* [pypy3.10](https://pypy.org/)
|
||||
|
||||
### depreciated: Python 2.7 & Python 3.5
|
||||
The support for Python 2.7 and 3.5 has been deprecated at the end of 2023. The last version which supports Python 2.7 and Python 3.5 is 0.8.7.
|
||||
|
||||
### depricated: Python 3.6 & Python 3.7
|
||||
Support for Python 3.6 and Python 3.7 has been deprecated in June 2025. The last version which support Python 3.6 and Python 3.7 is 0.11.0.
|
||||
|
||||
## Documentation
|
||||
|
||||
|
|
@ -62,9 +64,7 @@ Open Source Software licenses, including the MIT license at [choosealicense.com]
|
|||
|
||||
Starting with version 0.8.0, `libxml2-dev` and `libxslt-dev` are required dependencies.
|
||||
|
||||
## Installation
|
||||
|
||||
Install the dependencies (e.g. on Debian/Ubuntu):
|
||||
There is a good change that the libraries are already installed on your system. If not, you can install them with the package manager of your distro. For example on Debian / Ubuntu based distros the corresponding command is:
|
||||
|
||||
```bash
|
||||
|
||||
|
|
@ -72,6 +72,12 @@ $ sudo apt-get install libxml2-dev libxslt-dev
|
|||
|
||||
```
|
||||
|
||||
You don't need to install these libraries manually on Windows / MacOS.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
|
||||
The easiest way to install pyhamtools is through the packet manager `pip`:
|
||||
|
||||
```bash
|
||||
|
|
|
|||
|
|
@ -1,6 +1,92 @@
|
|||
Changelog
|
||||
---------
|
||||
|
||||
PyHamtools 0.12.0
|
||||
================
|
||||
|
||||
09. June 2025
|
||||
|
||||
* deprecated support for Python 3.6
|
||||
* deprecated support for Python 3.7
|
||||
* added support for higher Microwave bands (tnx @sq6emm)
|
||||
* added support for 10 characters Maidenhead locators (tnx @sq6emm)
|
||||
* updated CI pipeline
|
||||
|
||||
|
||||
PyHamtools 0.11.0
|
||||
================
|
||||
|
||||
02. March 2025
|
||||
|
||||
* added support for Python 3.13
|
||||
|
||||
PyHamtools 0.10.0
|
||||
================
|
||||
|
||||
01. June 2024
|
||||
|
||||
* full support for 4, 6, 8 characters Maidenhead locator conversions
|
||||
|
||||
|
||||
PyHamtools 0.9.1
|
||||
================
|
||||
|
||||
17. March 2024
|
||||
|
||||
* switched from distutils to setuptools. No impact for endusers.
|
||||
|
||||
|
||||
PyHamtools 0.9.0
|
||||
================
|
||||
|
||||
28. December 2023
|
||||
|
||||
* Deprecated support for Python 2.7 and Python 3.5
|
||||
* Added Support for Python 3.12
|
||||
* Replaced pytz with datetime.timezone
|
||||
* Added Continous Integration Jobs for MacOS (now supported by Github Actions)
|
||||
|
||||
|
||||
PyHamtools 0.8.7
|
||||
================
|
||||
|
||||
31. December 2022
|
||||
|
||||
* Lookuplib/Countryfiles: corrected Brazil to ADIF country id 108
|
||||
* Lookuplib/Countryfiles: corrected Domenican Republic to ADIF country if 72
|
||||
* Changed the remaining Clublog URLs to https://cdn.clublog.org
|
||||
|
||||
PyHamtools 0.8.6
|
||||
================
|
||||
|
||||
26. December 2022
|
||||
|
||||
* fixed regex regression for detection two-by-one callsigns
|
||||
|
||||
PyHamtools 0.8.5
|
||||
================
|
||||
|
||||
26. December 2022
|
||||
|
||||
* refined regex for decoding callsigns. In particular to better recognize callsigns with one or more digits in the suffix (e.g. TI5N5BEK, DP44N44T)
|
||||
|
||||
|
||||
PyHamtools 0.8.4
|
||||
================
|
||||
|
||||
18. December 2022
|
||||
|
||||
* raise KeyError when callsigns contain non-latin characters (e.g. cyrillic letters)
|
||||
|
||||
|
||||
PyHamtools 0.8.3
|
||||
================
|
||||
|
||||
06. December 2022
|
||||
|
||||
* fixed XML parsing error in QRZ.com session key renewal
|
||||
|
||||
|
||||
PyHamtools 0.8.2
|
||||
================
|
||||
|
||||
|
|
|
|||
|
|
@ -12,19 +12,8 @@
|
|||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
from pyhamtools.version import __version__, __release__
|
||||
|
||||
sys.path.insert(0,"/Users/user/projects/pyhamtools/pyhamtools")
|
||||
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
|
|
@ -36,6 +25,7 @@ sys.path.insert(0,"/Users/user/projects/pyhamtools/pyhamtools")
|
|||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.napoleon',
|
||||
'sphinx_rtd_dark_mode',
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
|
|
@ -52,7 +42,7 @@ master_doc = 'index'
|
|||
|
||||
# General information about the project.
|
||||
project = u'pyhamtools'
|
||||
copyright = u'2019, Tobias Wellnitz, DH1TW'
|
||||
copyright = u'2024, Tobias Wellnitz, DH1TW'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
|
|
@ -106,7 +96,9 @@ pygments_style = 'sphinx'
|
|||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
# html_theme = 'default'
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
# html_theme = 'sphinx_material'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ with some modules and classes which are frequently needed:
|
|||
PyHamTools is used in production at the DXHeat.com DXCluster_, performing several thousand lookups and
|
||||
calculations per day.
|
||||
|
||||
.. _Clublog.org: https://secure.clublog.org/
|
||||
.. _Clublog.org: https://clublog.org/
|
||||
.. _Country-Files.com: http://www.country-files.com/
|
||||
.. _QRZ.com: http://qrz.com
|
||||
.. _eQSL: http://eqsl.cc
|
||||
|
|
|
|||
|
|
@ -1,22 +1,11 @@
|
|||
import re
|
||||
import logging
|
||||
from datetime import datetime
|
||||
import sys
|
||||
|
||||
import pytz
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from pyhamtools.consts import LookupConventions as const
|
||||
|
||||
from pyhamtools.callsign_exceptions import callsign_exceptions
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
if sys.version_info < (2, 7, ):
|
||||
class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
|
||||
class Callinfo(object):
|
||||
"""
|
||||
The purpose of this class is to return data (country, latitude, longitude, CQ Zone...etc) for an
|
||||
|
|
@ -37,10 +26,7 @@ class Callinfo(object):
|
|||
self._logger = logger
|
||||
else:
|
||||
self._logger = logging.getLogger(__name__)
|
||||
if sys.version_info[:2] == (2, 6):
|
||||
self._logger.addHandler(NullHandler())
|
||||
else:
|
||||
self._logger.addHandler(logging.NullHandler())
|
||||
self._logger.addHandler(logging.NullHandler())
|
||||
|
||||
self._lookuplib = lookuplib
|
||||
self._callsign_info = None
|
||||
|
|
@ -81,10 +67,10 @@ class Callinfo(object):
|
|||
"""truncate call until it corresponds to a Prefix in the database"""
|
||||
prefix = callsign
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
if re.search('(VK|AX|VI)9[A-Z]{3}', callsign): #special rule for VK9 calls
|
||||
if timestamp > datetime(2006,1,1, tzinfo=UTC):
|
||||
if timestamp > datetime(2006,1,1, tzinfo=timezone.utc):
|
||||
prefix = callsign[0:3]+callsign[4:5]
|
||||
|
||||
while len(prefix) > 0:
|
||||
|
|
@ -115,7 +101,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Raises:
|
||||
KeyError: Callsign could not be identified
|
||||
|
|
@ -124,7 +110,7 @@ class Callinfo(object):
|
|||
"""
|
||||
entire_callsign = callsign.upper()
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
if re.search('[/A-Z0-9\\-]{3,15}', entire_callsign): # make sure the call has at least 3 characters
|
||||
|
||||
|
|
@ -205,7 +191,8 @@ class Callinfo(object):
|
|||
return self._iterate_prefix(callsign, timestamp)
|
||||
|
||||
# regular callsigns, without prefix or appendix
|
||||
elif re.match('^[\\d]{0,1}[A-Z]{1,2}\\d([A-Z]{1,4}|\\d{3,3}|\\d{1,3}[A-Z])[A-Z]{0,5}$', callsign):
|
||||
# elif re.match('^[\\d]{0,1}[A-Z]{1,2}\\d{1,2}[A-Z]{1,2}([A-Z]{1,4}|\\d{1,3})[A-Z]{0,5}$', callsign):
|
||||
elif re.match('^[\\d]{0,1}[A-Z]{1,2}\\d{1,4}([A-Z]{1,4}|[A-Z]{1,2}\\d{0,3})[A-Z]{0,5}$', callsign):
|
||||
return self._iterate_prefix(callsign, timestamp)
|
||||
|
||||
# callsigns with prefixes (xxx/callsign)
|
||||
|
|
@ -214,7 +201,10 @@ class Callinfo(object):
|
|||
pfx = re.sub('/', '', pfx.group(0))
|
||||
#make sure that the remaining part is actually a callsign (avoid: OZ/JO81)
|
||||
rest = re.search('/[A-Z0-9]+', entire_callsign)
|
||||
rest = re.sub('/', '', rest.group(0))
|
||||
if rest is None:
|
||||
self._logger.warning(u"non latin characters in callsign '{0}'".format(entire_callsign))
|
||||
raise KeyError
|
||||
rest = re.sub('/', '', rest.group(0))
|
||||
if re.match('^[\\d]{0,1}[A-Z]{1,2}\\d([A-Z]{1,4}|\\d{3,3}|\\d{1,3}[A-Z])[A-Z]{0,5}$', rest):
|
||||
return self._iterate_prefix(pfx)
|
||||
|
||||
|
|
@ -226,7 +216,7 @@ class Callinfo(object):
|
|||
|
||||
def _lookup_callsign(self, callsign, timestamp=None):
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
# Check if operation is invalid
|
||||
invalid = False
|
||||
|
|
@ -274,7 +264,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing the callsign specific data
|
||||
|
|
@ -311,7 +301,7 @@ class Callinfo(object):
|
|||
callsign = callsign.upper()
|
||||
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
callsign_data = self._lookup_callsign(callsign, timestamp)
|
||||
|
||||
|
|
@ -328,7 +318,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
bool: True / False
|
||||
|
|
@ -344,7 +334,7 @@ class Callinfo(object):
|
|||
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
try:
|
||||
if self.get_all(callsign, timestamp):
|
||||
|
|
@ -357,7 +347,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
dict: Containing Latitude and Longitude
|
||||
|
|
@ -384,7 +374,7 @@ class Callinfo(object):
|
|||
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
callsign_data = self.get_all(callsign, timestamp=timestamp)
|
||||
return {
|
||||
|
|
@ -397,7 +387,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
int: containing the callsign's CQ Zone
|
||||
|
|
@ -407,7 +397,7 @@ class Callinfo(object):
|
|||
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
return self.get_all(callsign, timestamp)[const.CQZ]
|
||||
|
||||
|
|
@ -416,7 +406,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
int: containing the callsign's CQ Zone
|
||||
|
|
@ -429,7 +419,7 @@ class Callinfo(object):
|
|||
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
return self.get_all(callsign, timestamp)[const.ITUZ]
|
||||
|
||||
|
|
@ -438,7 +428,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
str: name of the Country
|
||||
|
|
@ -456,7 +446,7 @@ class Callinfo(object):
|
|||
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
return self.get_all(callsign, timestamp)[const.COUNTRY]
|
||||
|
||||
|
|
@ -465,7 +455,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
int: containing the country ADIF id
|
||||
|
|
@ -475,7 +465,7 @@ class Callinfo(object):
|
|||
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
return self.get_all(callsign, timestamp)[const.ADIF]
|
||||
|
||||
|
|
@ -484,7 +474,7 @@ class Callinfo(object):
|
|||
|
||||
Args:
|
||||
callsign (str): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
str: continent identified
|
||||
|
|
@ -504,6 +494,6 @@ class Callinfo(object):
|
|||
- AN: Antarctica
|
||||
"""
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
return self.get_all(callsign, timestamp)[const.CONTINENT]
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@
|
|||
"Hungary": 239,
|
||||
"Sable Island": 211,
|
||||
"Bosnia-Herzegovina": 501,
|
||||
"Brazil": 18,
|
||||
"Brazil": 108,
|
||||
"Swains Island": 515,
|
||||
"DPR of Korea": 344,
|
||||
"Lakshadweep Islands": 142,
|
||||
|
|
@ -208,14 +208,13 @@
|
|||
"Vanuatu": 158,
|
||||
"Malawi": 440,
|
||||
"Republic of the Congo": 412,
|
||||
"Dominican Republic": 95,
|
||||
"Dominican Republic": 72,
|
||||
"St. Pierre & Miquelon": 277,
|
||||
"St. Helena": 250,
|
||||
"St. Peter ": 253,
|
||||
"Baker & Howland Islands": 20,
|
||||
"Willis Island": 303,
|
||||
"Balearic Islands": 21,
|
||||
"i.name": 150,
|
||||
"European Turkey": 390,
|
||||
"Rodriguez Island": 207,
|
||||
"Guinea": 107,
|
||||
|
|
@ -362,4 +361,4 @@
|
|||
"Amsterdam ": 10,
|
||||
"Corsica": 214,
|
||||
"Shetland Islands": 279
|
||||
}
|
||||
}
|
||||
|
|
@ -1,37 +1,30 @@
|
|||
__author__ = 'dh1tw'
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from time import strptime, mktime
|
||||
import re
|
||||
|
||||
import pytz
|
||||
|
||||
from pyhamtools.consts import LookupConventions as const
|
||||
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
|
||||
|
||||
def decode_char_spot(raw_string):
|
||||
"""Chop Line from DX-Cluster into pieces and return a dict with the spot data"""
|
||||
|
||||
data = {}
|
||||
|
||||
# Spotter callsign
|
||||
if re.match('[A-Za-z0-9\/]+[:$]', raw_string[6:15]):
|
||||
data[const.SPOTTER] = re.sub(':', '', re.match('[A-Za-z0-9\/]+[:$]', raw_string[6:15]).group(0))
|
||||
if re.match(r'[A-Za-z0-9\/]+[:$]', raw_string[6:15]):
|
||||
data[const.SPOTTER] = re.sub(':', '', re.match(r'[A-Za-z0-9\/]+[:$]', raw_string[6:15]).group(0))
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
if re.search('[0-9\.]{5,12}', raw_string[10:25]):
|
||||
data[const.FREQUENCY] = float(re.search('[0-9\.]{5,12}', raw_string[10:25]).group(0))
|
||||
if re.search(r'[0-9\.]{5,12}', raw_string[10:25]):
|
||||
data[const.FREQUENCY] = float(re.search(r'[0-9\.]{5,12}', raw_string[10:25]).group(0))
|
||||
else:
|
||||
raise ValueError
|
||||
|
||||
data[const.DX] = re.sub('[^A-Za-z0-9\/]+', '', raw_string[26:38])
|
||||
data[const.COMMENT] = re.sub('[^\sA-Za-z0-9\.,;\#\+\-!\?\$\(\)@\/]+', ' ', raw_string[39:69]).strip()
|
||||
data[const.TIME] = datetime.now().replace(tzinfo=UTC)
|
||||
data[const.DX] = re.sub(r'[^A-Za-z0-9\/]+', '', raw_string[26:38])
|
||||
data[const.COMMENT] = re.sub(r'[^\sA-Za-z0-9\.,;\#\+\-!\?\$\(\)@\/]+', ' ', raw_string[39:69]).strip()
|
||||
data[const.TIME] = datetime.now(timezone.utc)
|
||||
|
||||
return data
|
||||
|
||||
|
|
|
|||
|
|
@ -182,7 +182,7 @@ def freq_to_band(freq):
|
|||
elif ((freq >= 1200000) and (freq <= 1300000)):
|
||||
band = 0.23 #23cm
|
||||
mode = None
|
||||
elif ((freq >= 2390000) and (freq <= 2450000)):
|
||||
elif ((freq >= 2300000) and (freq <= 2450000)):
|
||||
band = 0.13 #13cm
|
||||
mode = None
|
||||
elif ((freq >= 3300000) and (freq <= 3500000)):
|
||||
|
|
@ -200,7 +200,19 @@ def freq_to_band(freq):
|
|||
elif ((freq >= 47000000) and (freq <= 47200000)):
|
||||
band = 0.0063 #6,3mm
|
||||
mode = None
|
||||
elif ((freq >= 75500000) and (freq <= 81500000)):
|
||||
band = 0.004 #4mm
|
||||
mode = None
|
||||
elif ((freq >= 122250000) and (freq <= 123000000)):
|
||||
band = 0.0025 #2.5mm
|
||||
mode = None
|
||||
elif ((freq >= 134000000) and (freq <= 141000000)):
|
||||
band = 0.002 #2mm
|
||||
mode = None
|
||||
elif ((freq >= 241000000) and (freq <= 250000000)):
|
||||
band = 0.001 #1mm
|
||||
mode = None
|
||||
else:
|
||||
raise KeyError
|
||||
|
||||
return {"band": band, "mode": mode}
|
||||
return {"band": band, "mode": mode}
|
||||
|
|
|
|||
|
|
@ -1,18 +1,15 @@
|
|||
from __future__ import division
|
||||
from math import pi, sin, cos, atan2, sqrt, radians, log, tan, degrees
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import pytz
|
||||
import ephem
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
def latlong_to_locator (latitude, longitude):
|
||||
def latlong_to_locator (latitude, longitude, precision=6):
|
||||
"""converts WGS84 coordinates into the corresponding Maidenhead Locator
|
||||
|
||||
Args:
|
||||
latitude (float): Latitude
|
||||
longitude (float): Longitude
|
||||
precision (int): 4,6,8,10 chars (default 6)
|
||||
|
||||
Returns:
|
||||
string: Maidenhead locator
|
||||
|
|
@ -36,35 +33,54 @@ def latlong_to_locator (latitude, longitude):
|
|||
|
||||
"""
|
||||
|
||||
if precision < 4 or precision == 5 or precision == 7 or precision == 9 or precision > 10:
|
||||
return ValueError
|
||||
|
||||
if longitude >= 180 or longitude <= -180:
|
||||
raise ValueError
|
||||
|
||||
if latitude >= 90 or latitude <= -90:
|
||||
raise ValueError
|
||||
|
||||
longitude += 180;
|
||||
latitude +=90;
|
||||
longitude +=180
|
||||
latitude +=90
|
||||
|
||||
locator = chr(ord('A') + int(longitude / 20))
|
||||
locator += chr(ord('A') + int(latitude / 10))
|
||||
locator += chr(ord('0') + int((longitude % 20) / 2))
|
||||
locator += chr(ord('0') + int(latitude % 10))
|
||||
locator += chr(ord('A') + int((longitude - int(longitude / 2) * 2) / (2 / 24)))
|
||||
locator += chr(ord('A') + int((latitude - int(latitude / 1) * 1 ) / (1 / 24)))
|
||||
# copied & adapted from github.com/space-physics/maidenhead
|
||||
A = ord('A')
|
||||
a = divmod(longitude, 20)
|
||||
b = divmod(latitude, 10)
|
||||
locator = chr(A + int(a[0])) + chr(A + int(b[0]))
|
||||
lon = a[1] / 2.0
|
||||
lat = b[1]
|
||||
i = 1
|
||||
|
||||
while i < precision/2:
|
||||
i += 1
|
||||
a = divmod(lon, 1)
|
||||
b = divmod(lat, 1)
|
||||
if not (i % 2):
|
||||
locator += str(int(a[0])) + str(int(b[0]))
|
||||
lon = 24 * a[1]
|
||||
lat = 24 * b[1]
|
||||
else:
|
||||
locator += chr(A + int(a[0])) + chr(A + int(b[0]))
|
||||
lon = 10 * a[1]
|
||||
lat = 10 * b[1]
|
||||
|
||||
return locator
|
||||
|
||||
def locator_to_latlong (locator):
|
||||
def locator_to_latlong (locator, center=True):
|
||||
"""converts Maidenhead locator in the corresponding WGS84 coordinates
|
||||
|
||||
Args:
|
||||
locator (string): Locator, either 4 or 6 characters
|
||||
locator (string): Locator, either 4, 6 or 8 characters
|
||||
center (bool): Center of (sub)square. By default True. If False, the south/western corner will be returned
|
||||
|
||||
Returns:
|
||||
tuple (float, float): Latitude, Longitude
|
||||
|
||||
Raises:
|
||||
ValueError: When called with wrong or invalid input arg
|
||||
ValueError: When called with wrong or invalid Maidenhead locator string
|
||||
TypeError: When arg is not a string
|
||||
|
||||
Example:
|
||||
|
|
@ -83,7 +99,7 @@ def locator_to_latlong (locator):
|
|||
|
||||
locator = locator.upper()
|
||||
|
||||
if len(locator) == 5 or len(locator) < 4:
|
||||
if len(locator) < 4 or len(locator) == 5 or len(locator) == 7 or len(locator) == 9:
|
||||
raise ValueError
|
||||
|
||||
if ord(locator[0]) > ord('R') or ord(locator[0]) < ord('A'):
|
||||
|
|
@ -104,23 +120,64 @@ def locator_to_latlong (locator):
|
|||
if ord (locator[5]) > ord('X') or ord(locator[5]) < ord('A'):
|
||||
raise ValueError
|
||||
|
||||
if len(locator) == 8:
|
||||
if ord(locator[6]) > ord('9') or ord(locator[6]) < ord('0'):
|
||||
raise ValueError
|
||||
if ord (locator[7]) > ord('9') or ord(locator[7]) < ord('0'):
|
||||
raise ValueError
|
||||
|
||||
if len(locator) == 10:
|
||||
if ord(locator[8]) > ord('X') or ord(locator[8]) < ord('A'):
|
||||
raise ValueError
|
||||
if ord (locator[9]) > ord('X') or ord(locator[9]) < ord('A'):
|
||||
raise ValueError
|
||||
|
||||
longitude = (ord(locator[0]) - ord('A')) * 20 - 180
|
||||
latitude = (ord(locator[1]) - ord('A')) * 10 - 90
|
||||
longitude += (ord(locator[2]) - ord('0')) * 2
|
||||
latitude += (ord(locator[3]) - ord('0'))
|
||||
latitude += (ord(locator[3]) - ord('0')) * 1
|
||||
|
||||
if len(locator) == 6:
|
||||
longitude += ((ord(locator[4])) - ord('A')) * (2 / 24)
|
||||
latitude += ((ord(locator[5])) - ord('A')) * (1 / 24)
|
||||
if len(locator) == 4:
|
||||
|
||||
# move to center of subsquare
|
||||
longitude += 1 / 24
|
||||
latitude += 0.5 / 24
|
||||
if center:
|
||||
longitude += 2 / 2
|
||||
latitude += 1.0 / 2
|
||||
|
||||
elif len(locator) == 6:
|
||||
longitude += (ord(locator[4]) - ord('A')) * 5.0 / 60
|
||||
latitude += (ord(locator[5]) - ord('A')) * 2.5 / 60
|
||||
|
||||
if center:
|
||||
longitude += 5.0 / 60 / 2
|
||||
latitude += 2.5 / 60 / 2
|
||||
|
||||
elif len(locator) == 8:
|
||||
longitude += (ord(locator[4]) - ord('A')) * 5.0 / 60
|
||||
latitude += (ord(locator[5]) - ord('A')) * 2.5 / 60
|
||||
|
||||
longitude += int(locator[6]) * 5.0 / 600
|
||||
latitude += int(locator[7]) * 2.5 / 600
|
||||
|
||||
if center:
|
||||
longitude += 5.0 / 600 / 2
|
||||
latitude += 2.5 / 600 / 2
|
||||
|
||||
elif len(locator) == 10:
|
||||
longitude += (ord(locator[4]) - ord('A')) * 5.0 / 60
|
||||
latitude += (ord(locator[5]) - ord('A')) * 2.5 / 60
|
||||
|
||||
longitude += int(locator[6]) * 5.0 / 600
|
||||
latitude += int(locator[7]) * 2.5 / 600
|
||||
|
||||
longitude += (ord(locator[8]) - ord('A')) * 1.0 / 2880
|
||||
latitude += (ord(locator[9]) - ord('A')) * 1.0 / 5760
|
||||
|
||||
if center:
|
||||
longitude += 1.0 / 2880 / 2
|
||||
latitude += 1.0 / 5760 / 2
|
||||
|
||||
else:
|
||||
# move to center of square
|
||||
longitude += 1;
|
||||
latitude += 0.5;
|
||||
raise ValueError
|
||||
|
||||
return latitude, longitude
|
||||
|
||||
|
|
@ -129,14 +186,14 @@ def calculate_distance(locator1, locator2):
|
|||
"""calculates the (shortpath) distance between two Maidenhead locators
|
||||
|
||||
Args:
|
||||
locator1 (string): Locator, either 4 or 6 characters
|
||||
locator2 (string): Locator, either 4 or 6 characters
|
||||
locator1 (string): Locator, either 4, 6 or 8 characters
|
||||
locator2 (string): Locator, either 4, 6 or 8 characters
|
||||
|
||||
Returns:
|
||||
float: Distance in km
|
||||
|
||||
Raises:
|
||||
ValueError: When called with wrong or invalid input arg
|
||||
ValueError: When called with wrong or invalid maidenhead locator strings
|
||||
AttributeError: When args are not a string
|
||||
|
||||
Example:
|
||||
|
|
@ -146,6 +203,9 @@ def calculate_distance(locator1, locator2):
|
|||
>>> calculate_distance("JN48QM", "QF67bf")
|
||||
16466.413
|
||||
|
||||
Note:
|
||||
Distances is calculated between the centers of the (sub) squares
|
||||
|
||||
"""
|
||||
|
||||
R = 6371 #earh radius
|
||||
|
|
@ -164,15 +224,15 @@ def calculate_distance(locator1, locator2):
|
|||
c = 2 * atan2(sqrt(a), sqrt(1-a))
|
||||
d = R * c #distance in km
|
||||
|
||||
return d;
|
||||
return d
|
||||
|
||||
|
||||
def calculate_distance_longpath(locator1, locator2):
|
||||
"""calculates the (longpath) distance between two Maidenhead locators
|
||||
|
||||
Args:
|
||||
locator1 (string): Locator, either 4 or 6 characters
|
||||
locator2 (string): Locator, either 4 or 6 characters
|
||||
locator1 (string): Locator, either 4, 6 or 8 characters
|
||||
locator2 (string): Locator, either 4, 6 or 8 characters
|
||||
|
||||
Returns:
|
||||
float: Distance in km
|
||||
|
|
@ -188,6 +248,8 @@ def calculate_distance_longpath(locator1, locator2):
|
|||
>>> calculate_distance_longpath("JN48QM", "QF67bf")
|
||||
23541.5867
|
||||
|
||||
Note:
|
||||
Distance is calculated between the centers of the (sub) squares
|
||||
"""
|
||||
|
||||
c = 40008 #[km] earth circumference
|
||||
|
|
@ -200,8 +262,8 @@ def calculate_heading(locator1, locator2):
|
|||
"""calculates the heading from the first to the second locator
|
||||
|
||||
Args:
|
||||
locator1 (string): Locator, either 4 or 6 characters
|
||||
locator2 (string): Locator, either 4 or 6 characters
|
||||
locator1 (string): Locator, either 4, 6 or 8 characters
|
||||
locator2 (string): Locator, either 4, 6 or 6 characters
|
||||
|
||||
Returns:
|
||||
float: Heading in deg
|
||||
|
|
@ -217,6 +279,9 @@ def calculate_heading(locator1, locator2):
|
|||
>>> calculate_heading("JN48QM", "QF67bf")
|
||||
74.3136
|
||||
|
||||
Note:
|
||||
Heading is calculated between the centers of the (sub) squares
|
||||
|
||||
"""
|
||||
|
||||
lat1, long1 = locator_to_latlong(locator1)
|
||||
|
|
@ -240,8 +305,8 @@ def calculate_heading_longpath(locator1, locator2):
|
|||
"""calculates the heading from the first to the second locator (long path)
|
||||
|
||||
Args:
|
||||
locator1 (string): Locator, either 4 or 6 characters
|
||||
locator2 (string): Locator, either 4 or 6 characters
|
||||
locator1 (string): Locator, either 4, 6 or 8 characters
|
||||
locator2 (string): Locator, either 4, 6 or 8 characters
|
||||
|
||||
Returns:
|
||||
float: Long path heading in deg
|
||||
|
|
@ -257,6 +322,9 @@ def calculate_heading_longpath(locator1, locator2):
|
|||
>>> calculate_heading_longpath("JN48QM", "QF67bf")
|
||||
254.3136
|
||||
|
||||
Note:
|
||||
Distance is calculated between the centers of the (sub) squares
|
||||
|
||||
"""
|
||||
|
||||
heading = calculate_heading(locator1, locator2)
|
||||
|
|
@ -269,7 +337,7 @@ def calculate_sunrise_sunset(locator, calc_date=None):
|
|||
"""calculates the next sunset and sunrise for a Maidenhead locator at a give date & time
|
||||
|
||||
Args:
|
||||
locator1 (string): Maidenhead Locator, either 4 or 6 characters
|
||||
locator1 (string): Maidenhead Locator, either 4, 6 or 8 characters
|
||||
calc_date (datetime, optional): Starting datetime for the calculations (UTC)
|
||||
|
||||
Returns:
|
||||
|
|
@ -283,16 +351,14 @@ def calculate_sunrise_sunset(locator, calc_date=None):
|
|||
The following calculates the next sunrise & sunset for JN48QM on the 1./Jan/2014
|
||||
|
||||
>>> from pyhamtools.locator import calculate_sunrise_sunset
|
||||
>>> from datetime import datetime
|
||||
>>> import pytz
|
||||
>>> UTC = pytz.UTC
|
||||
>>> myDate = datetime(year=2014, month=1, day=1, tzinfo=UTC)
|
||||
>>> from datetime import datetime, timezone
|
||||
>>> myDate = datetime(year=2014, month=1, day=1, tzinfo=timezone.utc)
|
||||
>>> calculate_sunrise_sunset("JN48QM", myDate)
|
||||
{
|
||||
'morning_dawn': datetime.datetime(2014, 1, 1, 6, 36, 51, 710524, tzinfo=<UTC>),
|
||||
'sunset': datetime.datetime(2014, 1, 1, 16, 15, 23, 31016, tzinfo=<UTC>),
|
||||
'evening_dawn': datetime.datetime(2014, 1, 1, 15, 38, 8, 355315, tzinfo=<UTC>),
|
||||
'sunrise': datetime.datetime(2014, 1, 1, 7, 14, 6, 162063, tzinfo=<UTC>)
|
||||
'morning_dawn': datetime.datetime(2014, 1, 1, 6, 36, 51, 710524, tzinfo=datetime.timezone.utc),
|
||||
'sunset': datetime.datetime(2014, 1, 1, 16, 15, 23, 31016, tzinfo=datetime.timezone.utc),
|
||||
'evening_dawn': datetime.datetime(2014, 1, 1, 15, 38, 8, 355315, tzinfo=datetime.timezone.utc),
|
||||
'sunrise': datetime.datetime(2014, 1, 1, 7, 14, 6, 162063, tzinfo=datetime.timezone.utc)
|
||||
}
|
||||
|
||||
"""
|
||||
|
|
@ -304,7 +370,7 @@ def calculate_sunrise_sunset(locator, calc_date=None):
|
|||
latitude, longitude = locator_to_latlong(locator)
|
||||
|
||||
if calc_date is None:
|
||||
calc_date = datetime.utcnow()
|
||||
calc_date = datetime.now(timezone.utc)
|
||||
if type(calc_date) != datetime:
|
||||
raise ValueError
|
||||
|
||||
|
|
@ -350,11 +416,11 @@ def calculate_sunrise_sunset(locator, calc_date=None):
|
|||
result['sunset'] = sunset
|
||||
|
||||
if morning_dawn:
|
||||
result['morning_dawn'] = morning_dawn.replace(tzinfo=UTC)
|
||||
result['morning_dawn'] = morning_dawn.replace(tzinfo=timezone.utc)
|
||||
if sunrise:
|
||||
result['sunrise'] = sunrise.replace(tzinfo=UTC)
|
||||
result['sunrise'] = sunrise.replace(tzinfo=timezone.utc)
|
||||
if evening_dawn:
|
||||
result['evening_dawn'] = evening_dawn.replace(tzinfo=UTC)
|
||||
result['evening_dawn'] = evening_dawn.replace(tzinfo=timezone.utc)
|
||||
if sunset:
|
||||
result['sunset'] = sunset.replace(tzinfo=UTC)
|
||||
result['sunset'] = sunset.replace(tzinfo=timezone.utc)
|
||||
return result
|
||||
|
|
|
|||
|
|
@ -1,38 +1,24 @@
|
|||
from __future__ import unicode_literals
|
||||
import os
|
||||
import logging
|
||||
import logging.config
|
||||
import re
|
||||
import random, string
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
import xml.etree.ElementTree as ET
|
||||
import urllib
|
||||
import json
|
||||
import copy
|
||||
import sys
|
||||
import unicodedata
|
||||
|
||||
import requests
|
||||
from requests.exceptions import ConnectionError, HTTPError, Timeout
|
||||
from bs4 import BeautifulSoup
|
||||
import pytz
|
||||
|
||||
from . import version
|
||||
from .consts import LookupConventions as const
|
||||
from .exceptions import APIKeyMissingError
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
REDIS_LUA_DEL_SCRIPT = "local keys = redis.call('keys', ARGV[1]) \n for i=1,#keys,20000 do \n redis.call('del', unpack(keys, i, math.min(i+19999, #keys))) \n end \n return keys"
|
||||
|
||||
if sys.version_info < (2, 7,):
|
||||
class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
unicode = str
|
||||
|
||||
class LookupLib(object):
|
||||
"""
|
||||
|
||||
|
|
@ -80,10 +66,7 @@ class LookupLib(object):
|
|||
self._logger = logger
|
||||
else:
|
||||
self._logger = logging.getLogger(__name__)
|
||||
if sys.version_info[:2] == (2, 6):
|
||||
self._logger.addHandler(NullHandler())
|
||||
else:
|
||||
self._logger.addHandler(logging.NullHandler())
|
||||
self._logger.addHandler(logging.NullHandler())
|
||||
|
||||
self._apikey = apikey
|
||||
self._apiv = apiv
|
||||
|
|
@ -131,10 +114,7 @@ class LookupLib(object):
|
|||
"agent" : agent
|
||||
}
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
else:
|
||||
encodeurl = url + "?" + urllib.urlencode(params)
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
response = requests.get(encodeurl, timeout=10)
|
||||
doc = BeautifulSoup(response.text, "xml")
|
||||
session_key = None
|
||||
|
|
@ -334,7 +314,7 @@ class LookupLib(object):
|
|||
|
||||
Args:
|
||||
callsign (string): Amateur radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing the country specific data of the callsign
|
||||
|
|
@ -347,10 +327,9 @@ class LookupLib(object):
|
|||
The following code queries the the online Clublog API for the callsign "VK9XO" on a specific date.
|
||||
|
||||
>>> from pyhamtools import LookupLib
|
||||
>>> from datetime import datetime
|
||||
>>> import pytz
|
||||
>>> from datetime import datetime, timezone
|
||||
>>> my_lookuplib = LookupLib(lookuptype="clublogapi", apikey="myapikey")
|
||||
>>> timestamp = datetime(year=1962, month=7, day=7, tzinfo=pytz.UTC)
|
||||
>>> timestamp = datetime(year=1962, month=7, day=7, tzinfo=timezone.utc)
|
||||
>>> print my_lookuplib.lookup_callsign("VK9XO", timestamp)
|
||||
{
|
||||
'country': u'CHRISTMAS ISLAND',
|
||||
|
|
@ -374,7 +353,7 @@ class LookupLib(object):
|
|||
"""
|
||||
callsign = callsign.strip().upper()
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
if self._lookuptype == "clublogapi":
|
||||
callsign_data = self._lookup_clublogAPI(callsign=callsign, timestamp=timestamp, apikey=self._apikey)
|
||||
|
|
@ -499,7 +478,7 @@ class LookupLib(object):
|
|||
|
||||
Args:
|
||||
prefix (string): Prefix of a Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
dict: Dictionary containing the country specific data of the Prefix
|
||||
|
|
@ -536,7 +515,7 @@ class LookupLib(object):
|
|||
|
||||
prefix = prefix.strip().upper()
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
if self._lookuptype == "clublogxml" or self._lookuptype == "countryfile":
|
||||
|
||||
|
|
@ -556,7 +535,7 @@ class LookupLib(object):
|
|||
|
||||
Args:
|
||||
callsign (string): Amateur Radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
bool: True if a record exists for this callsign (at the given time)
|
||||
|
|
@ -569,13 +548,12 @@ class LookupLib(object):
|
|||
The following code checks the Clublog XML database if the operation is valid for two dates.
|
||||
|
||||
>>> from pyhamtools import LookupLib
|
||||
>>> from datetime import datetime
|
||||
>>> import pytz
|
||||
>>> from datetime import datetime, timezone
|
||||
>>> my_lookuplib = LookupLib(lookuptype="clublogxml", apikey="myapikey")
|
||||
>>> print my_lookuplib.is_invalid_operation("5W1CFN")
|
||||
True
|
||||
>>> try:
|
||||
>>> timestamp = datetime(year=2012, month=1, day=31).replace(tzinfo=pytz.UTC)
|
||||
>>> timestamp = datetime(year=2012, month=1, day=31, tzinfo=timezone.utc)
|
||||
>>> my_lookuplib.is_invalid_operation("5W1CFN", timestamp)
|
||||
>>> except KeyError:
|
||||
>>> print "Seems to be invalid operation before 31.1.2012"
|
||||
|
|
@ -591,7 +569,7 @@ class LookupLib(object):
|
|||
|
||||
callsign = callsign.strip().upper()
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
if self._lookuptype == "clublogxml":
|
||||
|
||||
|
|
@ -645,7 +623,7 @@ class LookupLib(object):
|
|||
|
||||
Args:
|
||||
callsign (string): Amateur radio callsign
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC)
|
||||
timestamp (datetime, optional): datetime in UTC (tzinfo=timezone.utc)
|
||||
|
||||
Returns:
|
||||
int: Value of the the CQ Zone exception which exists for this callsign (at the given time)
|
||||
|
|
@ -675,7 +653,7 @@ class LookupLib(object):
|
|||
|
||||
callsign = callsign.strip().upper()
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
if self._lookuptype == "clublogxml":
|
||||
|
||||
|
|
@ -689,7 +667,7 @@ class LookupLib(object):
|
|||
#no matching case
|
||||
raise KeyError
|
||||
|
||||
def _lookup_clublogAPI(self, callsign=None, timestamp=None, url="https://secure.clublog.org/dxcc", apikey=None):
|
||||
def _lookup_clublogAPI(self, callsign=None, timestamp=None, url="https://cdn.clublog.org/dxcc", apikey=None):
|
||||
""" Set up the Lookup object for Clublog Online API
|
||||
"""
|
||||
|
||||
|
|
@ -704,12 +682,9 @@ class LookupLib(object):
|
|||
}
|
||||
|
||||
if timestamp is None:
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
else:
|
||||
encodeurl = url + "?" + urllib.urlencode(params)
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
response = requests.get(encodeurl, timeout=5)
|
||||
|
||||
if not self._check_html_response(response):
|
||||
|
|
@ -740,10 +715,7 @@ class LookupLib(object):
|
|||
"callsign" : callsign,
|
||||
}
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
else:
|
||||
encodeurl = url + "?" + urllib.urlencode(params)
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
response = requests.get(encodeurl, timeout=5)
|
||||
return response
|
||||
|
||||
|
|
@ -756,10 +728,7 @@ class LookupLib(object):
|
|||
"dxcc" : str(dxcc_or_callsign),
|
||||
}
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
else:
|
||||
encodeurl = url + "?" + urllib.urlencode(params)
|
||||
encodeurl = url + "?" + urllib.parse.urlencode(params)
|
||||
response = requests.get(encodeurl, timeout=5)
|
||||
return response
|
||||
|
||||
|
|
@ -783,7 +752,7 @@ class LookupLib(object):
|
|||
else:
|
||||
raise AttributeError("Session Key Missing") #most likely session key missing or invalid
|
||||
|
||||
if root.dxcc is None:
|
||||
if root.DXCC is None:
|
||||
raise ValueError
|
||||
|
||||
if root.DXCC.dxcc:
|
||||
|
|
@ -830,7 +799,7 @@ class LookupLib(object):
|
|||
raise KeyError(root.Error.text)
|
||||
|
||||
#try to get a new session key and try to request again
|
||||
elif re.search('Session Timeout', root.Error.text, re.I) or re.search('Invalid session key', root.error.text, re.I):
|
||||
elif re.search('Session Timeout', root.Error.text, re.I) or re.search('Invalid session key', root.Error.text, re.I):
|
||||
apikey = self._get_qrz_session_key(self._username, self._pwd)
|
||||
response = self._request_callsign_info_from_qrz(callsign, apikey, apiv)
|
||||
root = BeautifulSoup(response.text, "xml")
|
||||
|
|
@ -890,12 +859,12 @@ class LookupLib(object):
|
|||
lookup[const.LAND] = root.Callsign.land.text
|
||||
if root.Callsign.efdate:
|
||||
try:
|
||||
lookup[const.EFDATE] = datetime.strptime(root.Callsign.efdate.text, '%Y-%m-%d').replace(tzinfo=UTC)
|
||||
lookup[const.EFDATE] = datetime.strptime(root.Callsign.efdate.text, '%Y-%m-%d').replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
self._logger.debug("[QRZ.com] efdate: Invalid DateTime; " + callsign + " " + root.Callsign.efdate.text)
|
||||
if root.Callsign.expdate:
|
||||
try:
|
||||
lookup[const.EXPDATE] = datetime.strptime(root.Callsign.expdate.text, '%Y-%m-%d').replace(tzinfo=UTC)
|
||||
lookup[const.EXPDATE] = datetime.strptime(root.Callsign.expdate.text, '%Y-%m-%d').replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
self._logger.debug("[QRZ.com] expdate: Invalid DateTime; " + callsign + " " + root.Callsign.expdate.text)
|
||||
if root.Callsign.p_call:
|
||||
|
|
@ -916,7 +885,7 @@ class LookupLib(object):
|
|||
lookup[const.BIO] = root.Callsign.bio.text
|
||||
if root.Callsign.biodate:
|
||||
try:
|
||||
lookup[const.BIODATE] = datetime.strptime(root.Callsign.biodate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=UTC)
|
||||
lookup[const.BIODATE] = datetime.strptime(root.Callsign.biodate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
self._logger.warning("[QRZ.com] biodate: Invalid DateTime; " + callsign)
|
||||
if root.Callsign.image:
|
||||
|
|
@ -927,7 +896,7 @@ class LookupLib(object):
|
|||
lookup[const.SERIAL] = long(root.Callsign.serial.text)
|
||||
if root.Callsign.moddate:
|
||||
try:
|
||||
lookup[const.MODDATE] = datetime.strptime(root.Callsign.moddate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=UTC)
|
||||
lookup[const.MODDATE] = datetime.strptime(root.Callsign.moddate.text, '%Y-%m-%d %H:%M:%S').replace(tzinfo=timezone.utc)
|
||||
except ValueError:
|
||||
self._logger.warning("[QRZ.com] moddate: Invalid DateTime; " + callsign)
|
||||
if root.Callsign.MSA:
|
||||
|
|
@ -971,10 +940,6 @@ class LookupLib(object):
|
|||
if root.Callsign.geoloc:
|
||||
lookup[const.GEOLOC] = root.Callsign.geoloc.text
|
||||
|
||||
# if sys.version_info >= (2,):
|
||||
# for item in lookup:
|
||||
# if isinstance(lookup[item], unicode):
|
||||
# print item, repr(lookup[item])
|
||||
return lookup
|
||||
|
||||
def _load_clublogXML(self,
|
||||
|
|
@ -1135,7 +1100,7 @@ class LookupLib(object):
|
|||
if cty_date:
|
||||
cty_date = cty_date.group(0).replace("date=", "").replace("'", "")
|
||||
cty_date = datetime.strptime(cty_date[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
cty_date.replace(tzinfo=UTC)
|
||||
cty_date.replace(tzinfo=timezone.utc)
|
||||
cty_header["Date"] = cty_date
|
||||
|
||||
cty_ns = re.search("xmlns='.+[']", raw_header)
|
||||
|
|
@ -1215,10 +1180,10 @@ class LookupLib(object):
|
|||
entity = {}
|
||||
for item in cty_entity:
|
||||
if item.tag == "name":
|
||||
entity[const.COUNTRY] = unicode(item.text)
|
||||
self._logger.debug(unicode(item.text))
|
||||
entity[const.COUNTRY] = str(item.text)
|
||||
self._logger.debug(str(item.text))
|
||||
elif item.tag == "prefix":
|
||||
entity[const.PREFIX] = unicode(item.text)
|
||||
entity[const.PREFIX] = str(item.text)
|
||||
elif item.tag == "deleted":
|
||||
if item.text == "TRUE":
|
||||
entity[const.DELETED] = True
|
||||
|
|
@ -1227,17 +1192,17 @@ class LookupLib(object):
|
|||
elif item.tag == "cqz":
|
||||
entity[const.CQZ] = int(item.text)
|
||||
elif item.tag == "cont":
|
||||
entity[const.CONTINENT] = unicode(item.text)
|
||||
entity[const.CONTINENT] = str(item.text)
|
||||
elif item.tag == "long":
|
||||
entity[const.LONGITUDE] = float(item.text)
|
||||
elif item.tag == "lat":
|
||||
entity[const.LATITUDE] = float(item.text)
|
||||
elif item.tag == "start":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
entity[const.START] = dt.replace(tzinfo=UTC)
|
||||
entity[const.START] = dt.replace(tzinfo=timezone.utc)
|
||||
elif item.tag == "end":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
entity[const.END] = dt.replace(tzinfo=UTC)
|
||||
entity[const.END] = dt.replace(tzinfo=timezone.utc)
|
||||
elif item.tag == "whitelist":
|
||||
if item.text == "TRUE":
|
||||
entity[const.WHITELIST] = True
|
||||
|
|
@ -1245,10 +1210,10 @@ class LookupLib(object):
|
|||
entity[const.WHITELIST] = False
|
||||
elif item.tag == "whitelist_start":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
entity[const.WHITELIST_START] = dt.replace(tzinfo=UTC)
|
||||
entity[const.WHITELIST_START] = dt.replace(tzinfo=timezone.utc)
|
||||
elif item.tag == "whitelist_end":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
entity[const.WHITELIST_END] = dt.replace(tzinfo=UTC)
|
||||
entity[const.WHITELIST_END] = dt.replace(tzinfo=timezone.utc)
|
||||
except AttributeError:
|
||||
self._logger.error("Error while processing: ")
|
||||
entities[int(cty_entity[0].text)] = entity
|
||||
|
|
@ -1269,23 +1234,23 @@ class LookupLib(object):
|
|||
else:
|
||||
call_exceptions_index[call] = [int(cty_exception.attrib["record"])]
|
||||
elif item.tag == "entity":
|
||||
call_exception[const.COUNTRY] = unicode(item.text)
|
||||
call_exception[const.COUNTRY] = str(item.text)
|
||||
elif item.tag == "adif":
|
||||
call_exception[const.ADIF] = int(item.text)
|
||||
elif item.tag == "cqz":
|
||||
call_exception[const.CQZ] = int(item.text)
|
||||
elif item.tag == "cont":
|
||||
call_exception[const.CONTINENT] = unicode(item.text)
|
||||
call_exception[const.CONTINENT] = str(item.text)
|
||||
elif item.tag == "long":
|
||||
call_exception[const.LONGITUDE] = float(item.text)
|
||||
elif item.tag == "lat":
|
||||
call_exception[const.LATITUDE] = float(item.text)
|
||||
elif item.tag == "start":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
call_exception[const.START] = dt.replace(tzinfo=UTC)
|
||||
call_exception[const.START] = dt.replace(tzinfo=timezone.utc)
|
||||
elif item.tag == "end":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
call_exception[const.END] = dt.replace(tzinfo=UTC)
|
||||
call_exception[const.END] = dt.replace(tzinfo=timezone.utc)
|
||||
call_exceptions[int(cty_exception.attrib["record"])] = call_exception
|
||||
|
||||
self._logger.debug(str(len(call_exceptions))+" Exceptions added")
|
||||
|
|
@ -1310,23 +1275,23 @@ class LookupLib(object):
|
|||
else:
|
||||
prefixes_index[call] = [int(cty_prefix.attrib["record"])]
|
||||
if item.tag == "entity":
|
||||
prefix[const.COUNTRY] = unicode(item.text)
|
||||
prefix[const.COUNTRY] = str(item.text)
|
||||
elif item.tag == "adif":
|
||||
prefix[const.ADIF] = int(item.text)
|
||||
elif item.tag == "cqz":
|
||||
prefix[const.CQZ] = int(item.text)
|
||||
elif item.tag == "cont":
|
||||
prefix[const.CONTINENT] = unicode(item.text)
|
||||
prefix[const.CONTINENT] = str(item.text)
|
||||
elif item.tag == "long":
|
||||
prefix[const.LONGITUDE] = float(item.text)
|
||||
elif item.tag == "lat":
|
||||
prefix[const.LATITUDE] = float(item.text)
|
||||
elif item.tag == "start":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
prefix[const.START] = dt.replace(tzinfo=UTC)
|
||||
prefix[const.START] = dt.replace(tzinfo=timezone.utc)
|
||||
elif item.tag == "end":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
prefix[const.END] = dt.replace(tzinfo=UTC)
|
||||
prefix[const.END] = dt.replace(tzinfo=timezone.utc)
|
||||
prefixes[int(cty_prefix.attrib["record"])] = prefix
|
||||
|
||||
self._logger.debug(str(len(prefixes))+" Prefixes added")
|
||||
|
|
@ -1349,10 +1314,10 @@ class LookupLib(object):
|
|||
|
||||
elif item.tag == "start":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
invalid_operation[const.START] = dt.replace(tzinfo=UTC)
|
||||
invalid_operation[const.START] = dt.replace(tzinfo=timezone.utc)
|
||||
elif item.tag == "end":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
invalid_operation[const.END] = dt.replace(tzinfo=UTC)
|
||||
invalid_operation[const.END] = dt.replace(tzinfo=timezone.utc)
|
||||
invalid_operations[int(cty_inv_operation.attrib["record"])] = invalid_operation
|
||||
|
||||
self._logger.debug(str(len(invalid_operations))+" Invalid Operations added")
|
||||
|
|
@ -1378,10 +1343,10 @@ class LookupLib(object):
|
|||
zoneException[const.CQZ] = int(item.text)
|
||||
elif item.tag == "start":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
zoneException[const.START] = dt.replace(tzinfo=UTC)
|
||||
zoneException[const.START] = dt.replace(tzinfo=timezone.utc)
|
||||
elif item.tag == "end":
|
||||
dt = datetime.strptime(item.text[:19], '%Y-%m-%dT%H:%M:%S')
|
||||
zoneException[const.END] = dt.replace(tzinfo=UTC)
|
||||
zoneException[const.END] = dt.replace(tzinfo=timezone.utc)
|
||||
zone_exceptions[int(cty_zone_exception.attrib["record"])] = zoneException
|
||||
|
||||
self._logger.debug(str(len(zone_exceptions))+" Zone Exceptions added")
|
||||
|
|
@ -1437,12 +1402,12 @@ class LookupLib(object):
|
|||
for item in cty_list:
|
||||
entry = {}
|
||||
call = str(item)
|
||||
entry[const.COUNTRY] = unicode(cty_list[item]["Country"])
|
||||
entry[const.COUNTRY] = str(cty_list[item]["Country"])
|
||||
if mapping:
|
||||
entry[const.ADIF] = int(mapping[cty_list[item]["Country"]])
|
||||
entry[const.CQZ] = int(cty_list[item]["CQZone"])
|
||||
entry[const.ITUZ] = int(cty_list[item]["ITUZone"])
|
||||
entry[const.CONTINENT] = unicode(cty_list[item]["Continent"])
|
||||
entry[const.CONTINENT] = str(cty_list[item]["Continent"])
|
||||
entry[const.LATITUDE] = float(cty_list[item]["Latitude"])
|
||||
entry[const.LONGITUDE] = float(cty_list[item]["Longitude"])*(-1)
|
||||
|
||||
|
|
@ -1534,17 +1499,17 @@ class LookupLib(object):
|
|||
elif item == const.LONGITUDE:
|
||||
my_dict[item] = float(my_dict[item])
|
||||
elif item == const.START:
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=UTC)
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=timezone.utc)
|
||||
elif item == const.END:
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=UTC)
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=timezone.utc)
|
||||
elif item == const.WHITELIST_START:
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=UTC)
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=timezone.utc)
|
||||
elif item == const.WHITELIST_END:
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=UTC)
|
||||
my_dict[item] = datetime.strptime(my_dict[item], '%Y-%m-%d%H:%M:%S').replace(tzinfo=timezone.utc)
|
||||
elif item == const.WHITELIST:
|
||||
my_dict[item] = self._str_to_bool(my_dict[item])
|
||||
else:
|
||||
my_dict[item] = unicode(my_dict[item])
|
||||
my_dict[item] = str(my_dict[item])
|
||||
|
||||
return my_dict
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from future.utils import iteritems
|
||||
from datetime import datetime
|
||||
import re
|
||||
|
||||
|
|
@ -94,7 +93,7 @@ def get_clublog_users(**kwargs):
|
|||
'lastupload': datetime.datetime(2013, 5, 8, 15, 0, 6),
|
||||
'oqrs': True}
|
||||
|
||||
.. _CLUBLOG: https://secure.clublog.org
|
||||
.. _CLUBLOG: https://clublog.org
|
||||
__ CLUBLOG_
|
||||
|
||||
"""
|
||||
|
|
@ -106,7 +105,7 @@ def get_clublog_users(**kwargs):
|
|||
try:
|
||||
url = kwargs['url']
|
||||
except KeyError:
|
||||
url = "https://secure.clublog.org/clublog-users.json.zip"
|
||||
url = "https://cdn.clublog.org/clublog-users.json.zip"
|
||||
|
||||
try:
|
||||
result = requests.get(url)
|
||||
|
|
@ -125,7 +124,7 @@ def get_clublog_users(**kwargs):
|
|||
|
||||
error_count = 0
|
||||
|
||||
for call, call_data in iteritems(cl_data):
|
||||
for call, call_data in cl_data.items():
|
||||
try:
|
||||
data = {}
|
||||
if "firstqso" in call_data:
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
VERSION = (0, 8, 2)
|
||||
VERSION = (0, 12, 0)
|
||||
__release__ = ''.join(['-.'[type(x) == int]+str(x) for x in VERSION])[1:]
|
||||
__version__ = '.'.join((str(VERSION[0]), str(VERSION[1])))
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
sphinx>=1.8.5
|
||||
sphinxcontrib-napoleon>=0.7
|
||||
beautifulsoup4>=4.7.1
|
||||
beautifulsoup4>=4.7.1
|
||||
sphinx_rtd_theme>=0.5.2
|
||||
sphinx_rtd_dark_mode>=0.1.2
|
||||
|
|
@ -1,5 +1,9 @@
|
|||
pytest>=7.0.0; python_version>='3.7'
|
||||
pytest==4.6.11; python_version<='3.6' and python_version>='2.7'
|
||||
pytest>=7.0.0
|
||||
pytest-blockage>=0.2.2
|
||||
pytest-localserver>=0.5
|
||||
pytest-cov>=2.12
|
||||
maidenhead==1.6.0
|
||||
requests>=2.32.4
|
||||
beautifulsoup4==4.13.4
|
||||
redis==5.2.1
|
||||
ephem==4.2
|
||||
7
setup.py
7
setup.py
|
|
@ -1,7 +1,6 @@
|
|||
#!/usr/bin/env python
|
||||
import sys
|
||||
import os
|
||||
from distutils.core import setup
|
||||
from setuptools import setup
|
||||
|
||||
kw = {}
|
||||
|
||||
|
|
@ -16,12 +15,10 @@ setup(name='pyhamtools',
|
|||
package_data={'': ['countryfilemapping.json']},
|
||||
packages=['pyhamtools'],
|
||||
install_requires=[
|
||||
"pytz>=2019.1",
|
||||
"requests>=2.21.0",
|
||||
"ephem>=4.1.3",
|
||||
"beautifulsoup4>=4.7.1",
|
||||
"lxml>=4.8.0",
|
||||
"future>=0.18.2",
|
||||
"lxml>=5.0.0",
|
||||
"redis>=2.10.6",
|
||||
],
|
||||
**kw
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import pytest
|
||||
import tempfile
|
||||
import pkgutil
|
||||
import json
|
||||
import os
|
||||
import logging
|
||||
|
||||
from pyhamtools import LookupLib
|
||||
from pyhamtools import Callinfo
|
||||
|
|
@ -96,3 +96,7 @@ def fix_redis():
|
|||
@pytest.fixture(scope="module")
|
||||
def fix_qrz():
|
||||
return LookupLib(lookuptype="qrz", username=QRZ_USERNAME, pwd=QRZ_PWD)
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def fixCountryMapping():
|
||||
return json.loads(pkgutil.get_data("pyhamtools", "countryfilemapping.json"))
|
||||
47114
test/fixtures/cty.plist
vendored
47114
test/fixtures/cty.plist
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -1,13 +1,10 @@
|
|||
from datetime import datetime
|
||||
# -*- coding: utf-8 -*-
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import pytest
|
||||
import pytz
|
||||
|
||||
from pyhamtools.consts import LookupConventions as const
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
|
||||
response_prefix_DH_clublog = {
|
||||
'country': 'FEDERAL REPUBLIC OF GERMANY',
|
||||
'adif': 230,
|
||||
|
|
@ -17,6 +14,24 @@ response_prefix_DH_clublog = {
|
|||
'cqz': 14,
|
||||
}
|
||||
|
||||
response_prefix_OE_clublog = {
|
||||
'country': 'AUSTRIA',
|
||||
'adif': 206,
|
||||
'continent': 'EU',
|
||||
'latitude': 47.3,
|
||||
'longitude': 13.3,
|
||||
'cqz': 15,
|
||||
}
|
||||
|
||||
response_prefix_RU_clublog = {
|
||||
'country': 'EUROPEAN RUSSIA',
|
||||
'adif': 54,
|
||||
'continent': 'EU',
|
||||
'latitude': 55.8,
|
||||
'longitude': 37.6,
|
||||
'cqz': 16,
|
||||
}
|
||||
|
||||
response_prefix_DH_countryfile = {
|
||||
'country': 'Fed. Rep. of Germany',
|
||||
'adif': 230,
|
||||
|
|
@ -75,22 +90,49 @@ response_prefix_VK9DWX_clublog = {
|
|||
}
|
||||
|
||||
response_prefix_VK9DLX_clublog = {
|
||||
u'adif': 147,
|
||||
u'adif': 189,
|
||||
u'continent': u'OC',
|
||||
u'country': u'LORD HOWE ISLAND',
|
||||
u'cqz': 30,
|
||||
u'latitude': -31.6,
|
||||
u'longitude': 159.1
|
||||
u'country': u'NORFOLK ISLAND',
|
||||
u'cqz': 32,
|
||||
u'latitude': -29.0,
|
||||
u'longitude': 168.0
|
||||
}
|
||||
|
||||
response_prefix_TA7I_clublog = {
|
||||
u'adif': 390,
|
||||
u'continent': u'AS',
|
||||
u'country': u'TURKEY',
|
||||
u'cqz': 20,
|
||||
u'latitude': 40.0,
|
||||
u'longitude': 33.0
|
||||
}
|
||||
|
||||
response_prefix_W2T_clublog = {
|
||||
u'adif': 291,
|
||||
u'continent': u'NA',
|
||||
u'country': u'UNITED STATES OF AMERICA',
|
||||
u'cqz': 5,
|
||||
u'latitude': 43.0,
|
||||
u'longitude': -87.9
|
||||
}
|
||||
|
||||
response_prefix_V26K_clublog = {
|
||||
u'adif': 94,
|
||||
u'continent': u'NA',
|
||||
u'country': u'ANTIGUA & BARBUDA',
|
||||
u'cqz': 8,
|
||||
u'latitude': 17.1,
|
||||
u'longitude': -61.8
|
||||
}
|
||||
|
||||
response_prefix_VK9DLX_countryfile = {
|
||||
u'adif': 147,
|
||||
u'adif': 189,
|
||||
u'continent': u'OC',
|
||||
u'country': u'Lord Howe Island',
|
||||
u'cqz': 30,
|
||||
u'country': u'Norfolk Island',
|
||||
u'cqz': 32,
|
||||
u'ituz': 60,
|
||||
u'latitude': -31.55,
|
||||
u'longitude': 159.08
|
||||
u'latitude': -29.03,
|
||||
u'longitude': 167.93
|
||||
}
|
||||
|
||||
response_prefix_VK9GMW_clublog = {
|
||||
|
|
@ -102,6 +144,24 @@ response_prefix_VK9GMW_clublog = {
|
|||
u'longitude': 155.8
|
||||
}
|
||||
|
||||
response_prefix_8J1H90T_clublog = {
|
||||
u'adif': 339,
|
||||
u'continent': u'AS',
|
||||
u'country': u'JAPAN',
|
||||
u'cqz': 25,
|
||||
u'latitude': 35.7,
|
||||
u'longitude': 139.8
|
||||
}
|
||||
|
||||
response_prefix_TI5N5BEK_clublog = {
|
||||
u'adif': 308,
|
||||
u'continent': u'NA',
|
||||
u'country': u'COSTA RICA',
|
||||
u'cqz': 7,
|
||||
u'latitude': 9.9,
|
||||
u'longitude': -84.0
|
||||
}
|
||||
|
||||
response_callsign_exceptions_7N1PRD_0_clublog = {
|
||||
u'adif': 339,
|
||||
u'continent': u'AS',
|
||||
|
|
@ -134,8 +194,8 @@ response_Exception_VK9XO_with_start_date = {
|
|||
'adif': 35,
|
||||
'country': 'CHRISTMAS ISLAND',
|
||||
'continent': 'OC',
|
||||
'latitude': -10.50,
|
||||
'longitude': 105.70,
|
||||
'latitude': -10.48,
|
||||
'longitude': 105.62,
|
||||
'cqz': 29
|
||||
}
|
||||
|
||||
|
|
@ -230,12 +290,25 @@ class Test_callinfo_methods:
|
|||
assert fix_callinfo._dismantle_callsign("DH1TW/M") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DH1TW/B")[const.BEACON]
|
||||
assert fix_callinfo._dismantle_callsign("DH1TW") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DA2X") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DN1BU") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("OE50SPUTNIK") == response_prefix_OE_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DL60LINDAU") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DP75HILDE") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DL1640Y") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("V26K") == response_prefix_V26K_clublog
|
||||
assert fix_callinfo._dismantle_callsign("W2T") == response_prefix_W2T_clublog
|
||||
assert fix_callinfo._dismantle_callsign("R2AQH") == response_prefix_RU_clublog
|
||||
assert fix_callinfo._dismantle_callsign("TA7I") == response_prefix_TA7I_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DP44N44T") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DL/HC2AO") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("9H5A/C6A") == response_prefix_C6A_clublog
|
||||
assert fix_callinfo._dismantle_callsign("C6A/9H5A") == response_prefix_C6A_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DH1TW/UNI") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DH1TW/BUX") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("DH1TW/NOT") == response_prefix_DH_clublog
|
||||
assert fix_callinfo._dismantle_callsign("8J1H90T") == response_prefix_8J1H90T_clublog
|
||||
assert fix_callinfo._dismantle_callsign("TI5N5BEK") == response_prefix_TI5N5BEK_clublog
|
||||
assert fix_callinfo._dismantle_callsign("VK9DLX/NOT") == response_prefix_VK9DLX_clublog
|
||||
assert fix_callinfo._dismantle_callsign("7QAA") == response_callsign_exceptions_7QAA_clublog
|
||||
assert fix_callinfo._dismantle_callsign("7N1PRD/0") == response_callsign_exceptions_7N1PRD_0_clublog
|
||||
|
|
@ -243,6 +316,8 @@ class Test_callinfo_methods:
|
|||
|
||||
with pytest.raises(KeyError):
|
||||
fix_callinfo._dismantle_callsign("OZ/JO85")
|
||||
with pytest.raises(KeyError):
|
||||
fix_callinfo._dismantle_callsign("DL")
|
||||
|
||||
if fix_callinfo._lookuplib._lookuptype == "countryfile":
|
||||
assert fix_callinfo._dismantle_callsign("DH1TW/QRP") == response_prefix_DH_countryfile
|
||||
|
|
@ -265,6 +340,15 @@ class Test_callinfo_methods:
|
|||
fix_callinfo._dismantle_callsign("OZ/JO85")
|
||||
|
||||
|
||||
def test_dismantle_callsign_with_cyrillic_characters(self, fix_callinfo):
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
fix_callinfo._dismantle_callsign(u"RД3MAS") #cyrillic letter 'Д' in call
|
||||
with pytest.raises(KeyError):
|
||||
fix_callinfo._dismantle_callsign(u"RД3/K9MAS") #cyrillic letter 'Д' in prefix
|
||||
with pytest.raises(KeyError):
|
||||
fix_callinfo._dismantle_callsign(u"R2EA/М") #cyrillic letter 'M' in appendix
|
||||
|
||||
def test_dismantle_callsign_with_VK9_special_suffixes(self, fix_callinfo):
|
||||
|
||||
if fix_callinfo._lookuplib._lookuptype == "clublog":
|
||||
|
|
@ -301,7 +385,7 @@ class Test_callinfo_methods:
|
|||
if fix_callinfo._lookuplib._lookuptype == "clublogxml" or fix_callinfo._lookuplib._lookuptype == "clublogapi":
|
||||
assert fix_callinfo.get_all("DH1TW") == response_prefix_DH_clublog
|
||||
assert fix_callinfo.get_all("ci8aw") == response_zone_exception_ci8aw
|
||||
timestamp = datetime(year=2016, month=1, day=20, tzinfo=UTC)
|
||||
timestamp = datetime(year=2016, month=1, day=20, tzinfo=timezone.utc)
|
||||
assert fix_callinfo.get_all("VP8STI", timestamp) == response_Exception_VP8STI_with_start_and_stop_date
|
||||
|
||||
elif fix_callinfo._lookuplib._lookuptype == "countryfile":
|
||||
|
|
|
|||
|
|
@ -1,15 +1,9 @@
|
|||
import os
|
||||
import sys
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
from future.utils import iteritems
|
||||
|
||||
from pyhamtools.qsl import get_clublog_users
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
unicode = str
|
||||
|
||||
test_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
fix_dir = os.path.join(test_dir, 'fixtures')
|
||||
|
||||
|
|
@ -26,8 +20,8 @@ class Test_clublog_methods:
|
|||
|
||||
data = get_clublog_users()
|
||||
assert isinstance(data, dict)
|
||||
for key, value in iteritems(data):
|
||||
assert isinstance(key, unicode)
|
||||
for key, value in data.items():
|
||||
assert isinstance(key, str)
|
||||
assert isinstance(value, dict)
|
||||
|
||||
def test_with_invalid_url(self):
|
||||
|
|
|
|||
|
|
@ -1,15 +1,9 @@
|
|||
import pytest
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
import pytz
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from pyhamtools.consts import LookupConventions as const
|
||||
from pyhamtools.dxcluster import decode_char_spot, decode_pc11_message, decode_pc61_message
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
fix_spot1 = "DX de CT3FW: 21004.8 HC2AO 599 TKS(CW)QSL READ,QRZ.COM 2132Z"
|
||||
fix_spot1_broken_spotter_call = "DX de $QRM: 21004.8 HC2AO 599 TKS(CW)QSL READ,QRZ.COM 2132Z"
|
||||
|
||||
|
|
@ -34,7 +28,7 @@ response_spot1 = {
|
|||
const.BAND: 15,
|
||||
const.MODE: "CW",
|
||||
const.COMMENT: "599 TKS(CW)QSL READ,QRZ.COM",
|
||||
const.TIME: datetime.utcnow().replace( hour=21, minute=32, second=0, microsecond = 0, tzinfo=UTC)
|
||||
const.TIME: datetime.now(timezone.utc).replace(hour=21, minute=32, second=0, microsecond = 0)
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,10 @@
|
|||
from .execfile import execfile
|
||||
import os
|
||||
import sys
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from pyhamtools.qsl import get_eqsl_users
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
unicode = str
|
||||
|
||||
test_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
fix_dir = os.path.join(test_dir, 'fixtures')
|
||||
class Test_eqsl_methods:
|
||||
|
|
@ -26,7 +21,7 @@ class Test_eqsl_methods:
|
|||
data = get_eqsl_users()
|
||||
assert isinstance(data, list)
|
||||
for el in data:
|
||||
assert isinstance(el, unicode)
|
||||
assert isinstance(el, str)
|
||||
assert len(data) > 1000
|
||||
|
||||
def test_with_invalid_url(self):
|
||||
|
|
|
|||
|
|
@ -14,7 +14,12 @@ class Test_calculate_distance():
|
|||
|
||||
assert abs(calculate_distance("JN48QM", "FN44AB") - 5965) < 1
|
||||
assert abs(calculate_distance("FN44AB", "JN48QM") - 5965) < 1
|
||||
assert abs(calculate_distance("JN48QM", "QF67bf") - 16467) < 1
|
||||
assert abs(calculate_distance("JN48QM", "QF67BF") - 16467) < 1
|
||||
assert abs(calculate_distance("JN48QM84", "QF67BF84") - 16467) < 1
|
||||
assert abs(calculate_distance("JN48QM84", "QF67BF") - 16464) < 1
|
||||
assert abs(calculate_distance("JN48QM84", "QF67") - 16506) < 1
|
||||
assert abs(calculate_distance("JN48QM", "QF67") - 16508) < 1
|
||||
assert abs(calculate_distance("JN48", "QF67") - 16535) < 1
|
||||
|
||||
def test_calculate_distance_invalid_inputs(self):
|
||||
with pytest.raises(AttributeError):
|
||||
|
|
|
|||
|
|
@ -8,10 +8,24 @@ class Test_latlong_to_locator():
|
|||
assert latlong_to_locator(-89.97916, -179.95833) == "AA00AA"
|
||||
assert latlong_to_locator(89.97916, 179.9583) == "RR99XX"
|
||||
|
||||
def test_latlong_to_locator_normal_case(self):
|
||||
def test_latlong_to_locator_4chars_precision(self):
|
||||
|
||||
assert latlong_to_locator(48.52083, 9.3750000, precision=4) == "JN48"
|
||||
assert latlong_to_locator(39.222916, -86.45416, 4) == "EM69"
|
||||
|
||||
def test_latlong_to_locator_6chars_precision(self):
|
||||
|
||||
assert latlong_to_locator(48.52083, 9.3750000) == "JN48QM"
|
||||
assert latlong_to_locator(48.5, 9.0) == "JN48MM" #center of the square
|
||||
assert latlong_to_locator(39.222916, -86.45416, 6) == "EM69SF"
|
||||
|
||||
def test_latlong_to_locator_8chars_precision(self):
|
||||
assert latlong_to_locator(48.51760, 9.40345, precision=8) == "JN48QM84"
|
||||
assert latlong_to_locator(39.222916, -86.45416, 8) == "EM69SF53"
|
||||
|
||||
def test_latlong_to_locator_10chars_precision(self):
|
||||
assert latlong_to_locator(45.835677, 68.525173, precision=10) == "MN45GU30AN"
|
||||
assert latlong_to_locator(51.124913, 16.941840, 10) == "JO81LC39AX"
|
||||
|
||||
def test_latlong_to_locator_invalid_characters(self):
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,12 @@
|
|||
import pytest
|
||||
import maidenhead
|
||||
from pyhamtools.locator import locator_to_latlong
|
||||
from pyhamtools.consts import LookupConventions as const
|
||||
|
||||
|
||||
class Test_locator_to_latlong():
|
||||
|
||||
def test_locator_to_latlong_edge_cases(self):
|
||||
def test_locator_to_latlong_min_max_cases(self):
|
||||
latitude, longitude = locator_to_latlong("AA00AA")
|
||||
assert abs(latitude + 89.97916) < 0.00001
|
||||
assert abs(longitude +179.95833) < 0.0001
|
||||
|
|
@ -13,23 +15,79 @@ class Test_locator_to_latlong():
|
|||
assert abs(latitude - 89.97916) < 0.00001
|
||||
assert abs(longitude - 179.9583) < 0.0001
|
||||
|
||||
def test_locator_to_latlong_normal_case(self):
|
||||
|
||||
latitude, longitude = locator_to_latlong("JN48QM")
|
||||
assert abs(latitude - 48.52083) < 0.00001
|
||||
assert abs(longitude - 9.3750000) < 0.0001
|
||||
def test_locator_to_latlong_4chars_precision(self):
|
||||
|
||||
latitude, longitude = locator_to_latlong("JN48")
|
||||
assert abs(latitude - 48.5) < 0.001
|
||||
assert abs(longitude - 9.000) < 0.001
|
||||
assert abs(latitude - 48.5) < 0.1
|
||||
assert abs(longitude - 9.0) < 0.1
|
||||
|
||||
def test_locator_to_latlong_mixed_signs(self):
|
||||
latitude, longitude = locator_to_latlong("JN48", center=False)
|
||||
assert abs(latitude - 48) < 0.1
|
||||
assert abs(longitude - 8) < 0.1
|
||||
|
||||
def test_locator_to_latlong_6chars_precision(self):
|
||||
latitude, longitude = locator_to_latlong("JN48QM")
|
||||
assert abs(latitude - 48.52083) < 0.00001
|
||||
assert abs(longitude - 9.37500) < 0.00001
|
||||
|
||||
def test_locator_to_latlong_8chars_precision(self):
|
||||
latitude, longitude = locator_to_latlong("JN48QM84")
|
||||
assert abs(latitude - 48.51875) < 0.00001
|
||||
assert abs(longitude - 9.40416) < 0.00001
|
||||
|
||||
latitude, longitude = locator_to_latlong("EM69SF53")
|
||||
assert abs(latitude - 39.222916) < 0.00001
|
||||
assert abs(longitude + 86.45416) < 0.00001
|
||||
|
||||
def test_locator_to_latlong_10chars_precision(self):
|
||||
latitude, longitude = locator_to_latlong("JO81LC39AX")
|
||||
assert abs(latitude - 51.124913) < 0.000001
|
||||
assert abs(longitude - 16.941840) < 0.000001
|
||||
|
||||
latitude, longitude = locator_to_latlong("MN45GU30AN")
|
||||
assert abs(latitude - 45.835677) < 0.000001
|
||||
assert abs(longitude - 68.525173) < 0.000001
|
||||
|
||||
def test_locator_to_latlong_consistency_checks_6chars_lower_left_corner(self):
|
||||
|
||||
latitude_4, longitude_4 = locator_to_latlong("JN48", center=False)
|
||||
latitude_6, longitude_6 = locator_to_latlong("JN48AA", center=False)
|
||||
|
||||
assert latitude_4 == latitude_6
|
||||
assert longitude_4 == longitude_6
|
||||
|
||||
def test_locator_to_latlong_consistency_checks_8chars_lower_left_corner(self):
|
||||
|
||||
latitude_6, longitude_6 = locator_to_latlong("JN48AA", center=False)
|
||||
latitude_8, longitude_8 = locator_to_latlong("JN48AA00", center=False)
|
||||
|
||||
assert latitude_6 == latitude_8
|
||||
assert longitude_6 == longitude_8
|
||||
|
||||
def test_locator_to_latlong_consistency_checks_against_maidenhead(self):
|
||||
|
||||
locs = ["JN48", "EM69", "JN48QM", "EM69SF", "AA00AA", "RR99XX", "JN48QM84", "EM69SF53"]
|
||||
|
||||
# lower left (south/east) corner
|
||||
for loc in locs:
|
||||
lat, lon = locator_to_latlong(loc, center=False)
|
||||
lat_m, lon_m = maidenhead.to_location(loc)
|
||||
assert abs(lat - lat_m) < 0.00001
|
||||
assert abs(lon - lon_m) < 0.00001
|
||||
|
||||
# center of square
|
||||
for loc in locs:
|
||||
lat, lon = locator_to_latlong(loc) # default: center=True
|
||||
lat_m, lon_m = maidenhead.to_location(loc, center=True)
|
||||
assert abs(lat - lat_m) < 0.1
|
||||
assert abs(lon - lon_m) < 0.1
|
||||
|
||||
def test_locator_to_latlong_upper_lower_chars(self):
|
||||
|
||||
latitude, longitude = locator_to_latlong("Jn48qM")
|
||||
assert abs(latitude - 48.52083) < 0.00001
|
||||
assert abs(longitude - 9.3750000) < 0.0001
|
||||
|
||||
|
||||
def test_locator_to_latlong_wrong_amount_of_characters(self):
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
|
|
@ -43,12 +101,30 @@ class Test_locator_to_latlong():
|
|||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("JN8Q")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("JN8QM1")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("JN8QM1AA")
|
||||
|
||||
def test_locator_to_latlong_invalid_characters(self):
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("21XM99")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("48")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("JNJN")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("JN4848")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("JN48QMaa")
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
latitude, longitude = locator_to_latlong("****")
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,9 @@
|
|||
from datetime import datetime, timedelta
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import pytest
|
||||
import pytz
|
||||
|
||||
from pyhamtools.locator import calculate_sunrise_sunset
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
class Test_calculate_sunrise_sunset_normal_case():
|
||||
|
||||
def test_calculate_sunrise_sunset(self):
|
||||
|
|
@ -14,11 +11,11 @@ class Test_calculate_sunrise_sunset_normal_case():
|
|||
time_margin = timedelta(minutes=1)
|
||||
locator = "JN48QM"
|
||||
|
||||
test_time = datetime(year=2014, month=1, day=1, tzinfo=UTC)
|
||||
result_JN48QM_1_1_2014_evening_dawn = datetime(2014, 1, 1, 15, 38, tzinfo=UTC)
|
||||
result_JN48QM_1_1_2014_morning_dawn = datetime(2014, 1, 1, 6, 36, tzinfo=UTC)
|
||||
result_JN48QM_1_1_2014_sunrise = datetime(2014, 1, 1, 7, 14, tzinfo=UTC)
|
||||
result_JN48QM_1_1_2014_sunset = datetime(2014, 1, 1, 16, 15, 23, 31016, tzinfo=UTC)
|
||||
test_time = datetime(year=2014, month=1, day=1, tzinfo=timezone.utc)
|
||||
result_JN48QM_1_1_2014_evening_dawn = datetime(2014, 1, 1, 15, 38, tzinfo=timezone.utc)
|
||||
result_JN48QM_1_1_2014_morning_dawn = datetime(2014, 1, 1, 6, 36, tzinfo=timezone.utc)
|
||||
result_JN48QM_1_1_2014_sunrise = datetime(2014, 1, 1, 7, 14, tzinfo=timezone.utc)
|
||||
result_JN48QM_1_1_2014_sunset = datetime(2014, 1, 1, 16, 15, 23, 31016, tzinfo=timezone.utc)
|
||||
|
||||
assert calculate_sunrise_sunset(locator, test_time)['morning_dawn'] - result_JN48QM_1_1_2014_morning_dawn < time_margin
|
||||
assert calculate_sunrise_sunset(locator, test_time)['evening_dawn'] - result_JN48QM_1_1_2014_evening_dawn < time_margin
|
||||
|
|
@ -33,7 +30,7 @@ class Test_calculate_sunrise_sunset_normal_case():
|
|||
# The sun never rises in winter time close to the north pole (e.g. at Jan Mayen)
|
||||
# Therefore we expect no sunrise or sunset.
|
||||
|
||||
test_time = datetime(year=2021, month=12, day=15, tzinfo=UTC)
|
||||
test_time = datetime(year=2021, month=12, day=15, tzinfo=timezone.utc)
|
||||
|
||||
assert calculate_sunrise_sunset(locator, test_time)['morning_dawn'] == None
|
||||
assert calculate_sunrise_sunset(locator, test_time)['evening_dawn'] == None
|
||||
|
|
@ -48,7 +45,7 @@ class Test_calculate_sunrise_sunset_normal_case():
|
|||
# The sun never sets at the south pole during arctic summer
|
||||
# Therefore we expect no sunrise or sunset.
|
||||
|
||||
test_time = datetime(year=2014, month=1, day=1, tzinfo=UTC)
|
||||
test_time = datetime(year=2014, month=1, day=1, tzinfo=timezone.utc)
|
||||
|
||||
assert calculate_sunrise_sunset(locator, test_time)['morning_dawn'] == None
|
||||
assert calculate_sunrise_sunset(locator, test_time)['evening_dawn'] == None
|
||||
|
|
|
|||
|
|
@ -1,13 +1,8 @@
|
|||
from __future__ import unicode_literals
|
||||
import pytest
|
||||
import sys
|
||||
|
||||
from pyhamtools.lookuplib import LookupLib
|
||||
from pyhamtools.exceptions import APIKeyMissingError
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
unicode = str
|
||||
|
||||
@pytest.fixture(scope="function", params=[5, -5, "", "foo bar", 11.5, {}, [], None, ("foo", "bar")])
|
||||
def fixAnyValue(request):
|
||||
return request.param
|
||||
|
|
@ -41,5 +36,5 @@ class TestlookupLibHelper:
|
|||
with pytest.raises(TypeError):
|
||||
fixClublogApi._generate_random_word()
|
||||
|
||||
assert type(fixClublogApi._generate_random_word(5)) is unicode
|
||||
assert type(fixClublogApi._generate_random_word(5)) is str
|
||||
assert len(fixClublogApi._generate_random_word(5)) == 5
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import pytest
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from pyhamtools.lookuplib import LookupLib
|
||||
|
||||
|
|
@ -84,7 +84,7 @@ class TestclublogApi_Getters:
|
|||
def test_lookup_callsign(self, fixClublogApi):
|
||||
assert fixClublogApi.lookup_callsign("DH1TW") == response_Exception_DH1TW
|
||||
assert fixClublogApi.lookup_callsign("VU9KV") == response_Exception_VU9KV
|
||||
d = datetime.utcnow().replace(year=1971, month=4, day=14)
|
||||
d = datetime.now(timezone.utc).replace(year=1971, month=4, day=14)
|
||||
assert fixClublogApi.lookup_callsign("VU9KV", d) == response_Exception_VU9KV_with_Date
|
||||
assert fixClublogApi.lookup_callsign("DH1TW/MM") == response_Exception_DH1TW_MM
|
||||
assert fixClublogApi.lookup_callsign("DH1TW/AM") == response_Exception_DH1TW_AM
|
||||
|
|
|
|||
|
|
@ -1,13 +1,10 @@
|
|||
import pytest
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
from datetime import datetime, timezone
|
||||
import os
|
||||
|
||||
from pyhamtools.lookuplib import LookupLib
|
||||
from pyhamtools.exceptions import APIKeyMissingError
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
|
||||
#Fixtures
|
||||
#===========================================================
|
||||
|
|
@ -28,8 +25,8 @@ response_Exception_KC6MM_1990 = {
|
|||
'adif': 22,
|
||||
'country': u'PALAU',
|
||||
'continent': u'OC',
|
||||
'latitude': 9.50,
|
||||
'longitude': 138.20,
|
||||
'latitude': 9.52,
|
||||
'longitude': 138.21,
|
||||
'cqz': 27,
|
||||
}
|
||||
|
||||
|
|
@ -37,8 +34,8 @@ response_Exception_KC6MM_1992 = {
|
|||
'adif': 22,
|
||||
'country': u'PALAU',
|
||||
'continent': u'OC',
|
||||
'latitude': 9.50,
|
||||
'longitude': 138.20,
|
||||
'latitude': 9.52,
|
||||
'longitude': 138.21,
|
||||
'cqz': 27,
|
||||
}
|
||||
|
||||
|
|
@ -55,8 +52,8 @@ response_Exception_VK9XO_with_start_date = {
|
|||
'adif': 35,
|
||||
'country': u'CHRISTMAS ISLAND',
|
||||
'continent': u'OC',
|
||||
'latitude': -10.50,
|
||||
'longitude': 105.70,
|
||||
'latitude': -10.48,
|
||||
'longitude': 105.62,
|
||||
'cqz': 29,
|
||||
}
|
||||
|
||||
|
|
@ -142,40 +139,40 @@ class TestclublogXML_Getters:
|
|||
#===============================
|
||||
|
||||
def test_lookup_callsign_same_callsign_different_exceptions(self, fixClublogXML):
|
||||
timestamp = datetime(year=1990, month=10, day=12, tzinfo=UTC)
|
||||
timestamp = datetime(year=1990, month=10, day=12, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_callsign("kc6mm", timestamp) == response_Exception_KC6MM_1990
|
||||
|
||||
timestamp = datetime(year=1992, month=3, day=8, tzinfo=UTC)
|
||||
timestamp = datetime(year=1992, month=3, day=8, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_callsign("kc6mm", timestamp) == response_Exception_KC6MM_1992
|
||||
|
||||
def test_lookup_callsign_exception_only_with_start_date(self, fixClublogXML):
|
||||
#timestamp > startdate
|
||||
timestamp = datetime(year=1962, month=7, day=7, tzinfo=UTC)
|
||||
timestamp = datetime(year=1962, month=7, day=7, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_callsign("vk9xo", timestamp) == response_Exception_VK9XO_with_start_date
|
||||
assert fixClublogXML.lookup_callsign("vk9xo") == response_Exception_VK9XO_with_start_date
|
||||
|
||||
#timestamp < startdate
|
||||
timestamp = datetime(year=1962, month=7, day=5, tzinfo=UTC)
|
||||
timestamp = datetime(year=1962, month=7, day=5, tzinfo=timezone.utc)
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.lookup_callsign("vk9xo", timestamp)
|
||||
|
||||
def test_lookup_callsign_exception_only_with_end_date(self, fixClublogXML):
|
||||
|
||||
#timestamp < enddate
|
||||
timestamp = datetime(year=1975, month=9, day=14, tzinfo=UTC)
|
||||
timestamp = datetime(year=1975, month=9, day=14, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_callsign("vk9xx", timestamp) == response_Exception_VK9XX_with_end_date
|
||||
|
||||
# timestamp > enddate
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.lookup_callsign("vk9xx")
|
||||
|
||||
timestamp = datetime(year=1975, month=9, day=16, tzinfo=UTC)
|
||||
timestamp = datetime(year=1975, month=9, day=16, tzinfo=timezone.utc)
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.lookup_callsign("vk9xx", timestamp)
|
||||
|
||||
def test_lookup_callsign_exception_no_start_nor_end_date(self, fixClublogXML):
|
||||
|
||||
timestamp = datetime(year=1975, month=9, day=14, tzinfo=UTC)
|
||||
timestamp = datetime(year=1975, month=9, day=14, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_callsign("ax9nyg", timestamp) == response_Exception_AX9NYG
|
||||
assert fixClublogXML.lookup_callsign("ax9nyg" ) == response_Exception_AX9NYG
|
||||
|
||||
|
|
@ -196,29 +193,29 @@ class TestclublogXML_Getters:
|
|||
|
||||
def test_lookup_prefix_with_changing_entities(self, fixClublogXML):
|
||||
#return old entity (PAPUA TERR)
|
||||
timestamp = datetime(year=1975, month=9, day=14).replace(tzinfo=UTC)
|
||||
timestamp = datetime(year=1975, month=9, day=14, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_prefix("VK9", timestamp) == response_Prefix_VK9_until_1975
|
||||
|
||||
#return empty dict - Prefix was not assigned at that time
|
||||
timestamp = datetime(year=1975, month=9, day=16).replace(tzinfo=UTC)
|
||||
timestamp = datetime(year=1975, month=9, day=16, tzinfo=timezone.utc)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.lookup_prefix("VK9", timestamp)
|
||||
|
||||
#return new entity (Norfolk Island)
|
||||
timestamp = datetime.utcnow().replace(tzinfo=UTC)
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
assert fixClublogXML.lookup_prefix("VK9", timestamp ) == response_Prefix_VK9_starting_1976
|
||||
|
||||
def test_lookup_prefix_with_entities_having_start_and_stop(self, fixClublogXML):
|
||||
|
||||
timestamp_before = datetime(year=1964, month=11, day=1).replace(tzinfo=UTC)
|
||||
timestamp_before = datetime(year=1964, month=11, day=1, tzinfo=timezone.utc)
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.lookup_prefix("ZD5", timestamp_before)
|
||||
|
||||
timestamp_valid = datetime(year=1964, month=12, day=2).replace(tzinfo=UTC)
|
||||
timestamp_valid = datetime(year=1964, month=12, day=2, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_prefix("ZD5", timestamp_valid) == response_Prefix_ZD5_1964_to_1971
|
||||
|
||||
timestamp_after = datetime(year=1971, month=8, day=1).replace(tzinfo=UTC)
|
||||
timestamp_after = datetime(year=1971, month=8, day=1, tzinfo=timezone.utc)
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.lookup_prefix("ZD5", timestamp_after)
|
||||
|
||||
|
|
@ -234,8 +231,8 @@ class TestclublogXML_Getters:
|
|||
fixClublogXML.is_invalid_operation("dh1tw")
|
||||
|
||||
#Invalid Operation with start and end date
|
||||
timestamp_before = datetime(year=1993, month=12, day=30).replace(tzinfo=UTC)
|
||||
timestamp = datetime(year=1994, month=12, day=30).replace(tzinfo=UTC)
|
||||
timestamp_before = datetime(year=1993, month=12, day=30, tzinfo=timezone.utc)
|
||||
timestamp = datetime(year=1994, month=12, day=30, tzinfo=timezone.utc)
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.is_invalid_operation("vk0mc")
|
||||
|
||||
|
|
@ -246,7 +243,7 @@ class TestclublogXML_Getters:
|
|||
|
||||
#Invalid Operation with start date
|
||||
assert fixClublogXML.is_invalid_operation("5W1CFN")
|
||||
timestamp_before = datetime(year=2012, month=1, day=31).replace(tzinfo=UTC)
|
||||
timestamp_before = datetime(year=2012, month=1, day=31, tzinfo=timezone.utc)
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.is_invalid_operation("5W1CFN", timestamp_before)
|
||||
|
||||
|
|
@ -264,9 +261,9 @@ class TestclublogXML_Getters:
|
|||
assert fixClublogXML.lookup_zone_exception("dp0gvn") == 38
|
||||
|
||||
#zone exception with start and end date
|
||||
timestamp = datetime(year=1992, month=10, day=2).replace(tzinfo=UTC)
|
||||
timestamp_before = datetime(year=1992, month=9, day=30).replace(tzinfo=UTC)
|
||||
timestamp_after = datetime(year=1993, month=3, day=1).replace(tzinfo=UTC)
|
||||
timestamp = datetime(year=1992, month=10, day=2, tzinfo=timezone.utc)
|
||||
timestamp_before = datetime(year=1992, month=9, day=30, tzinfo=timezone.utc)
|
||||
timestamp_after = datetime(year=1993, month=3, day=1, tzinfo=timezone.utc)
|
||||
assert fixClublogXML.lookup_zone_exception("dl1kvc/p", timestamp) == 38
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
|
|
@ -276,6 +273,6 @@ class TestclublogXML_Getters:
|
|||
fixClublogXML.lookup_zone_exception("dl1kvc/p", timestamp_after)
|
||||
|
||||
#zone exception with start date
|
||||
timestamp_before = datetime(year=2013, month=12, day=26).replace(tzinfo=UTC)
|
||||
timestamp_before = datetime(year=2013, month=12, day=26,tzinfo=timezone.utc)
|
||||
with pytest.raises(KeyError):
|
||||
fixClublogXML.lookup_zone_exception("dh1hb/p", timestamp_before)
|
||||
|
|
|
|||
|
|
@ -1,11 +1,4 @@
|
|||
from __future__ import unicode_literals
|
||||
import pytest
|
||||
import tempfile
|
||||
import os
|
||||
import sys
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
unicode = str
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
|
|
@ -58,13 +51,13 @@ class Test_Getter_Setter_Api_Types_for_all_sources:
|
|||
count = 0
|
||||
for attr in entity:
|
||||
if attr == "country":
|
||||
assert type(entity[attr] is unicode)
|
||||
assert type(entity[attr] is str)
|
||||
count +=1
|
||||
if attr == "continent":
|
||||
assert type(entity[attr] is unicode)
|
||||
assert type(entity[attr] is str)
|
||||
count +=1
|
||||
if attr == "prefix":
|
||||
assert type(entity[attr] is unicode)
|
||||
assert type(entity[attr] is str)
|
||||
count +=1
|
||||
if attr == "deleted":
|
||||
assert type(entity[attr] is bool)
|
||||
|
|
@ -114,10 +107,10 @@ class Test_Getter_Setter_Api_Types_for_all_sources:
|
|||
assert type(ex[attr]) is float
|
||||
count +=1
|
||||
elif attr == "country":
|
||||
assert type(ex[attr]) is unicode
|
||||
assert type(ex[attr]) is str
|
||||
count +=1
|
||||
elif attr == "continent":
|
||||
assert type(ex[attr]) is unicode
|
||||
assert type(ex[attr]) is str
|
||||
count +=1
|
||||
elif attr == "cqz":
|
||||
assert type(ex[attr]) is int
|
||||
|
|
@ -150,7 +143,7 @@ class Test_Getter_Setter_Api_Types_for_all_sources:
|
|||
count = 0
|
||||
for attr in prefix:
|
||||
if attr == "country":
|
||||
assert type(prefix[attr]) is unicode
|
||||
assert type(prefix[attr]) is str
|
||||
count +=1
|
||||
elif attr == "adif":
|
||||
assert type(prefix[attr]) is int
|
||||
|
|
@ -162,7 +155,7 @@ class Test_Getter_Setter_Api_Types_for_all_sources:
|
|||
assert type(prefix[attr]) is int
|
||||
count +=1
|
||||
elif attr == "continent":
|
||||
assert type(prefix[attr]) is unicode
|
||||
assert type(prefix[attr]) is str
|
||||
count +=1
|
||||
elif attr == "latitude":
|
||||
assert type(prefix[attr]) is float
|
||||
|
|
|
|||
|
|
@ -1,16 +1,12 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from pyhamtools.lookuplib import LookupLib
|
||||
from pyhamtools.exceptions import APIKeyMissingError
|
||||
from pyhamtools.consts import LookupConventions as const
|
||||
|
||||
import pytz
|
||||
UTC = pytz.UTC
|
||||
|
||||
|
||||
try:
|
||||
QRZ_USERNAME = str(os.environ['QRZ_USERNAME'])
|
||||
QRZ_PWD = str(os.environ['QRZ_PWD'])
|
||||
|
|
@ -21,10 +17,10 @@ except Exception:
|
|||
#===========================================================
|
||||
|
||||
response_1A1AB = {
|
||||
u'biodate': datetime(2018, 9, 7, 21, 17, 7, tzinfo=UTC),
|
||||
u'biodate': datetime(2018, 9, 7, 21, 17, 7, tzinfo=timezone.utc),
|
||||
u'bio': u'0',
|
||||
u'license_class': u'C',
|
||||
u'moddate': datetime(2008, 11, 2, 15, 0, 38, tzinfo=UTC),
|
||||
u'moddate': datetime(2008, 11, 2, 15, 0, 38, tzinfo=timezone.utc),
|
||||
u'locator': u'JN61fw',
|
||||
u'callsign': u'1A1AB',
|
||||
u'addr2': u'00187 Rome',
|
||||
|
|
|
|||
|
|
@ -1,15 +1,11 @@
|
|||
import pytest
|
||||
import json
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import pytz
|
||||
import redis
|
||||
|
||||
from pyhamtools import LookupLib, Callinfo
|
||||
|
||||
|
||||
UTC = pytz.UTC
|
||||
|
||||
r = redis.Redis()
|
||||
|
||||
|
||||
|
|
@ -44,7 +40,7 @@ class TestStoreDataInRedis:
|
|||
with pytest.raises(KeyError):
|
||||
fix_redis.is_invalid_operation("VK0MC")
|
||||
|
||||
timestamp = datetime(year=1994, month=12, day=30).replace(tzinfo=UTC)
|
||||
timestamp = datetime(year=1994, month=12, day=30, tzinfo=timezone.utc)
|
||||
assert fix_redis.is_invalid_operation("VK0MC", timestamp)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
|
|
@ -61,7 +57,7 @@ class TestStoreDataInRedis:
|
|||
assert lib.lookup_prefix("DH") == fixCountryFile.lookup_prefix("DH")
|
||||
|
||||
def test_redis_lookup(self, fixClublogXML, fix_redis):
|
||||
timestamp = datetime(year=2016, month=1, day=20, tzinfo=UTC)
|
||||
timestamp = datetime(year=2016, month=1, day=20, tzinfo=timezone.utc)
|
||||
ci = Callinfo(fix_redis)
|
||||
assert ci.get_all("VP8STI", timestamp) == response_Exception_VP8STI_with_start_and_stop_date
|
||||
assert ci.get_all("tu5pct") == response_TU5PCT
|
||||
|
|
|
|||
|
|
@ -1,9 +1,7 @@
|
|||
import os
|
||||
import sys
|
||||
import datetime
|
||||
|
||||
from .execfile import execfile
|
||||
from future.utils import iteritems
|
||||
import pytest
|
||||
|
||||
def execfile(filepath, globals=None, locals=None):
|
||||
|
|
@ -19,9 +17,6 @@ def execfile(filepath, globals=None, locals=None):
|
|||
|
||||
from pyhamtools.qsl import get_lotw_users
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
unicode = str
|
||||
|
||||
test_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
fix_dir = os.path.join(test_dir, 'fixtures')
|
||||
|
||||
|
|
@ -34,12 +29,13 @@ class Test_lotw_methods:
|
|||
execfile(os.path.join(fix_dir,"lotw_fixture.py"), namespace)
|
||||
assert get_lotw_users(url=httpserver.url) == namespace['lotw_fixture']
|
||||
|
||||
@pytest.mark.skip("ARRL has been hacked in May 2024; skipping until LOTW is again up")
|
||||
def test_download_lotw_list_and_check_types(self):
|
||||
|
||||
data = get_lotw_users()
|
||||
assert isinstance(data, dict)
|
||||
for key, value in iteritems(data):
|
||||
assert isinstance(key, unicode)
|
||||
for key, value in data.items():
|
||||
assert isinstance(key, str)
|
||||
assert isinstance(value, datetime.datetime )
|
||||
assert len(data) > 1000
|
||||
|
||||
|
|
|
|||
|
|
@ -65,6 +65,7 @@ class Test_utils_freq_to_band():
|
|||
assert freq_to_band(1200000) == {"band" : 0.23, "mode":None}
|
||||
|
||||
def test_shf_frequencies(self):
|
||||
assert freq_to_band(2320200) == {"band" : 0.13, "mode":None}
|
||||
assert freq_to_band(2390000) == {"band" : 0.13, "mode":None}
|
||||
|
||||
assert freq_to_band(3300000) == {"band" : 0.09, "mode":None}
|
||||
|
|
|
|||
Loading…
Reference in a new issue