diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml
new file mode 100644
index 0000000..7d79401
--- /dev/null
+++ b/.github/workflows/pylint.yml
@@ -0,0 +1,24 @@
+name: Pylint
+
+on: [push]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v3
+ with:
+ python-version: ${{ matrix.python-version }}
+ - name: Install dependencies
+ run: |
+ python -m pip install --upgrade pip
+ pip install pylint
+ if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
+ - name: Analysing the code with pylint
+ run: |
+ pylint $(git ls-files '*.py')
\ No newline at end of file
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml
index 3e5b4a7..42c9161 100644
--- a/.github/workflows/python-package.yml
+++ b/.github/workflows/python-package.yml
@@ -16,7 +16,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python-version: ["3.9", "3.10", "3.11"]
+ python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v4
diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml
deleted file mode 100644
index ad5ac1e..0000000
--- a/.github/workflows/super-linter.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-name: Lint
-
-on: # yamllint disable-line rule:truthy
- push: null
- pull_request: null
-
-permissions: {}
-
-jobs:
- build:
- name: Lint
- runs-on: ubuntu-latest
-
- permissions:
- contents: read
- packages: read
- # To report GitHub Actions status checks
- statuses: write
-
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- with:
- # super-linter needs the full git history to get the
- # list of files that changed across commits
- fetch-depth: 0
-
- - name: Super-linter
- uses: super-linter/super-linter@v7.1.0 # x-release-please-version
- env:
- VALIDATE_PYTHON_PYLINT: true
- VALIDATE_PYTHON_PYINK: true
- # To report GitHub Actions status checks
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 7fda477..0898eb6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,4 +2,4 @@
/.vscode
/src/qspylib/__pycache__
/dist
-/docs/build
\ No newline at end of file
+/docs/build
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..b1be0f8
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,633 @@
+[MAIN]
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks=no
+
+# Clear in-memory caches upon conclusion of linting. Useful if running pylint
+# in a server-like mode.
+clear-cache-post-run=no
+
+# Load and enable all available extensions. Use --list-extensions to see a list
+# all available extensions.
+#enable-all-extensions=
+
+# In error mode, messages with a category besides ERROR or FATAL are
+# suppressed, and no reports are done by default. Error mode is compatible with
+# disabling specific errors.
+#errors-only=
+
+# Always return a 0 (non-error) status code, even if lint errors are found.
+# This is primarily useful in continuous integration scripts.
+#exit-zero=
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+extension-pkg-allow-list=
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
+# for backward compatibility.)
+extension-pkg-whitelist=
+
+# Return non-zero exit code if any of these messages/categories are detected,
+# even if score is above --fail-under value. Syntax same as enable. Messages
+# specified are enabled, while categories only check already-enabled messages.
+fail-on=
+
+# Specify a score threshold under which the program will exit with error.
+fail-under=9
+
+# Interpret the stdin as a python script, whose filename needs to be passed as
+# the module_or_package argument.
+#from-stdin=
+
+# Files or directories to be skipped. They should be base names, not paths.
+ignore=CVS, docs
+
+# Add files or directories matching the regular expressions patterns to the
+# ignore-list. The regex matches against paths and can be in Posix or Windows
+# format. Because '\\' represents the directory delimiter on Windows systems,
+# it can't be used as an escape character.
+ignore-paths=
+
+# Files or directories matching the regular expression patterns are skipped.
+# The regex matches against base names, not paths. The default value ignores
+# Emacs file locks
+ignore-patterns=^\.#
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis). It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use, and will cap the count on Windows to
+# avoid hangs.
+jobs=0
+
+# Control the amount of potential inferred values when inferring a single
+# object. This can help the performance when dealing with large functions or
+# complex, nested conditions.
+limit-inference-results=100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# Minimum Python version to use for version dependent checks. Will default to
+# the version used to run pylint.
+py-version=3.12
+
+# Discover python modules and packages in the file system subtree.
+recursive=no
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode=yes
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+# In verbose mode, extra non-checker-related info will be displayed.
+#verbose=
+
+
+[BASIC]
+
+# Naming style matching correct argument names.
+argument-naming-style=snake_case
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style. If left empty, argument names will be checked with the set
+# naming style.
+#argument-rgx=
+
+# Naming style matching correct attribute names.
+attr-naming-style=snake_case
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style. If left empty, attribute names will be checked with the set naming
+# style.
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names=foo,
+ bar,
+ baz,
+ toto,
+ tutu,
+ tata
+
+# Bad variable names regexes, separated by a comma. If names match any regex,
+# they will always be refused
+bad-names-rgxs=
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style=any
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style. If left empty, class attribute names will be checked
+# with the set naming style.
+#class-attribute-rgx=
+
+# Naming style matching correct class constant names.
+class-const-naming-style=UPPER_CASE
+
+# Regular expression matching correct class constant names. Overrides class-
+# const-naming-style. If left empty, class constant names will be checked with
+# the set naming style.
+#class-const-rgx=
+
+# Naming style matching correct class names.
+class-naming-style=PascalCase
+
+# Regular expression matching correct class names. Overrides class-naming-
+# style. If left empty, class names will be checked with the set naming style.
+#class-rgx=
+
+# Naming style matching correct constant names.
+const-naming-style=UPPER_CASE
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style. If left empty, constant names will be checked with the set naming
+# style.
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names.
+function-naming-style=snake_case
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style. If left empty, function names will be checked with the set
+# naming style.
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names=i,
+ j,
+ k,
+ ex,
+ Run,
+ _,
+ s
+
+# Good variable names regexes, separated by a comma. If names match any regex,
+# they will always be accepted
+good-names-rgxs=
+
+# Include a hint for the correct naming format with invalid-name.
+include-naming-hint=no
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style=any
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style. If left empty, inline iteration names will be checked
+# with the set naming style.
+#inlinevar-rgx=
+
+# Naming style matching correct method names.
+method-naming-style=snake_case
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style. If left empty, method names will be checked with the set naming style.
+#method-rgx=
+
+# Naming style matching correct module names.
+module-naming-style=snake_case
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style. If left empty, module names will be checked with the set naming style.
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+# These decorators are taken in consideration only for invalid-name.
+property-classes=abc.abstractproperty
+
+# Regular expression matching correct type variable names. If left empty, type
+# variable names will be checked with the set naming style.
+#typevar-rgx=
+
+# Naming style matching correct variable names.
+variable-naming-style=snake_case
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style. If left empty, variable names will be checked with the set
+# naming style.
+#variable-rgx=
+
+
+[CLASSES]
+
+# Warn about protected attribute access inside special methods
+check-protected-access-in-special-methods=no
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,
+ __new__,
+ setUp,
+ __post_init__
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,
+ _fields,
+ _replace,
+ _source,
+ _make
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+
+[DESIGN]
+
+# List of regular expressions of class ancestor names to ignore when counting
+# public methods (see R0903)
+exclude-too-few-public-methods=
+
+# List of qualified class names to ignore when counting class parents (see
+# R0901)
+ignored-parents=
+
+# Maximum number of arguments for function / method.
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr=5
+
+# Maximum number of branch for function / method body.
+max-branches=12
+
+# Maximum number of locals for function / method body.
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body.
+max-returns=6
+
+# Maximum number of statements in function / method body.
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when caught.
+overgeneral-exceptions=builtins.BaseException,builtins.Exception
+
+
+[FORMAT]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )??$
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Maximum number of lines in a module.
+max-module-lines=1000
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt=no
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=yes
+
+
+[IMPORTS]
+
+# List of modules that can be imported at any level, not just the top level
+# one.
+allow-any-import-level=
+
+# Allow explicit reexports by alias from a package __init__.
+allow-reexport-from-package=no
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all=no
+
+# Deprecated modules which should not be used, separated by a comma.
+deprecated-modules=
+
+# Output a graph (.gv or any supported image format) of external dependencies
+# to the given file (report RP0402 must not be disabled).
+ext-import-graph=
+
+# Output a graph (.gv or any supported image format) of all (i.e. internal and
+# external) dependencies to the given file (report RP0402 must not be
+# disabled).
+import-graph=
+
+# Output a graph (.gv or any supported image format) of internal dependencies
+# to the given file (report RP0402 must not be disabled).
+int-import-graph=
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=enchant
+
+# Couples of modules and preferred modules, separated by a comma.
+preferred-modules=
+
+
+[LOGGING]
+
+# The type of string formatting that logging methods do. `old` means using %
+# formatting, `new` is for `{}` formatting.
+logging-format-style=old
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules=logging
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
+# UNDEFINED.
+confidence=HIGH,
+ CONTROL_FLOW,
+ INFERENCE,
+ INFERENCE_FAILURE,
+ UNDEFINED
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once). You can also use "--disable=all" to
+# disable everything first and then re-enable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable=raw-checker-failed,
+ bad-inline-option,
+ locally-disabled,
+ file-ignored,
+ suppressed-message,
+ useless-suppression,
+ deprecated-pragma,
+ use-symbolic-message-instead
+ # usually, these are because the API we're fronting is just absurd with arguments itself.
+ # that said, there's something to be said about code smell. I'll leave them on, for now,
+ # with a fail-under.
+ #too-many-arguments,
+ #too-many-locals
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable=c-extension-no-member
+
+
+[METHOD_ARGS]
+
+# List of qualified names (i.e., library.method) which require a timeout
+# parameter e.g. 'requests.api.get,requests.api.post'
+timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,
+ XXX,
+ TODO,
+ to-do,
+ TO-DO,
+ todo
+
+# Regular expression of note tags to take in consideration.
+notes-rgx=
+
+
+[REFACTORING]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=sys.exit,argparse.parse_error
+
+
+[REPORTS]
+
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'fatal', 'error', 'warning', 'refactor',
+# 'convention', and 'info' which contain the number of messages in each
+# category, as well as 'statement' which is the total number of statements
+# analyzed. This score is used by the global evaluation report (RP0004).
+evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+#output-format=
+
+# Tells whether to display a full report or only the messages.
+reports=no
+
+# Activate the evaluation score.
+score=yes
+
+
+[SIMILARITIES]
+
+# Comments are removed from the similarity computation
+ignore-comments=yes
+
+# Docstrings are removed from the similarity computation
+ignore-docstrings=yes
+
+# Imports are removed from the similarity computation
+ignore-imports=yes
+
+# Signatures are removed from the similarity computation
+ignore-signatures=yes
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[SPELLING]
+
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it work,
+# install the 'python-enchant' package.
+spelling-dict=
+
+# List of comma separated words that should be considered directives if they
+# appear at the beginning of a comment and should not be checked.
+spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains the private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[STRING]
+
+# This flag controls whether inconsistent-quotes generates a warning when the
+# character used as a quote delimiter is used inconsistently within a module.
+check-quote-consistency=no
+
+# This flag controls whether the implicit-str-concat should generate a warning
+# on implicit string concatenation in sequences defined over several lines.
+check-str-concat-over-line-jumps=no
+
+
+[TYPECHECK]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=contextlib.contextmanager
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=requests.codes.ok
+
+# Tells whether to warn about missing members when the owner of the attribute
+# is inferred to be None.
+ignore-none=yes
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference=yes
+
+# List of symbolic message names to ignore for Mixin members.
+ignored-checks-for-mixins=no-member,
+ not-async-context-manager,
+ not-context-manager,
+ attribute-defined-outside-init
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint=yes
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+# Regex pattern to define which classes are considered mixins.
+mixin-class-rgx=.*[Mm]ixin
+
+# List of decorators that change the signature of a decorated function.
+signature-mutators=
+
+
+[VARIABLES]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid defining new builtins when possible.
+additional-builtins=
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables=yes
+
+# List of names allowed to shadow builtins
+allowed-redefined-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,
+ _cb
+
+# A regular expression matching the name of dummy variables (i.e. expected to
+# not be used).
+dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
+
+# Argument names that match this expression will be ignored.
+ignored-argument-names=_.*|^ignored_|^unused_
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index 3ed2280..e77578a 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -25,4 +25,4 @@ sphinx:
# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
python:
install:
- - requirements: docs/requirements.txt
\ No newline at end of file
+ - requirements: docs/requirements.txt
diff --git a/README.md b/README.md
index 40fdb86..5fe1aa9 100644
--- a/README.md
+++ b/README.md
@@ -1,13 +1,35 @@
# QSPyLib

-[](https://qspylib.readthedocs.io/en/latest/?badge=latest)
+
+
+
+
+
-QSPyLib is a bundle of API wrappers for various amateur radio-related websites, including QRZ, LOTW, eQSL, and ClubLog.
+QSPyLib is a bundle of API wrappers for various amateur radio-related sites, including QRZ, LOTW, eQSL, and ClubLog.
It is currently in development and should be considered unstable version-to-version while the version number is still 0.x.x.
Issues and pull requests are welcome, and should be made on the [GitHub repository](https://github.com/jaytotheay/qspy).
+## How do I install it?
+
+The latest stable* version of QSPyLib is available on PyPI, and can be installed by just running
+
+```bash
+py -m pip install qspylib
+```
+
+This release should match what is on the GitHub repository under a corresponding tagged release.
+
+To build the most recent source code (which isn’t necessarily stable – see the build test status), you can download the source code from GitHub, navigate to the directory, and run:
+
+```py
+py -m build
+```
+
+This will generate a .whl and tar.gz, which you can then install locally.
+
## What works right now?
As of v0.0.1:
@@ -21,7 +43,7 @@ As of v0.0.1:
Documentation of all functions and classes, including examples, is available at the ReadTheDocs listing for this project:
-http://qspylib.readthedocs.io/
+
A quick example of pulling a Logbook from LOTW:
@@ -30,7 +52,9 @@ from qspylib import lotw
LOTWAccount = lotw.LOTWClient("callsign", "password")
logbook = LOTWClient.fetch_logbook()
```
-This will give you a `Logbook` object, which contains a list of QSO objects and a parsed, usable adif_io log. The adif_io log property contains all the ADIF info that LOTW outputs (and likewise for other logging sites); the built-in log property of a `Logbook` object contains only some limited information, like callsign, band, mode, date, time, and QSL status from the originating site (which is a little handy as a single-reference for if a QSO is a QSL, since different sites use different, extra ADIF fields to express being QSL'd on their platform.)
+This will give you a `Logbook` object, which contains a list of QSO objects and a parsed, usable adif_io log.
+The adif_io log property contains all the ADIF info that LOTW outputs (and likewise for other logging sites).
+The built-in log property of a `Logbook` object contains only some limited information, like callsign, band, mode, date, time, and QSL status from the originating site (which is a little handy as a single-reference for if a QSO is a QSL, since different sites use different, extra ADIF fields to express being QSL'd on their platform.)
Other functions of APIs are generally available, like checking if an eQSL is verified:
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 296c16d..edd0f57 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,4 +1,4 @@
sphinx-rtd-theme
adif_io
xmltodict
-pytest
\ No newline at end of file
+pytest
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 5363d2c..bbf4fef 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -6,6 +6,8 @@
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
+#pylint: disable-all
+
import os
import sys
sys.path.insert(0, os.path.abspath('../../src/'))
diff --git a/docs/source/index.rst b/docs/source/index.rst
index e679d1e..7ab78d4 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -3,13 +3,34 @@
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
-Introduction
+QSPyLib
============
+
+.. image:: https://github.com/JayToTheAy/QSPy/actions/workflows/python-package.yml/badge.svg
+ :target: https://github.com/JayToTheAy/QSPy
+ :alt: Python Package Build Action Status
+.. image:: https://readthedocs.org/projects/qspylib/badge/?version=latest
+ :target: https://qspylib.readthedocs.io/en/
+ :alt: Documentation Status
+.. image:: https://img.shields.io/pypi/pyversions/qspylib
+ :target: https://pypi.org/project/qspylib/
+ :alt: PyPI - Python Verson
+.. image:: https://img.shields.io/pypi/v/qspylib
+ :target: https://pypi.org/project/qspylib/
+ :alt: PyPi - Version
+.. image:: https://img.shields.io/pypi/wheel/qspylib
+ :target: https://pypi.org/project/qspylib/
+ :alt: PyPI - Wheel
+.. image:: https://img.shields.io/pypi/l/qspylib
+ :target: https://pypi.org/project/qspylib/
+ :alt: PyPI - License
+
+
``qsyplib`` is a bundle of API wrappers for various amateur radio-related websites, including QRZ, LOTW, eQSL, and ClubLog.
It is currently in development and should be considered unstable version-to-version while the version number is still 0.x.x.
-Issues and pull requests are welcome, and should be made on the GitHub repository.
+Issues and pull requests are welcome, and should be made on the [GitHub repository](https://github.com/JayToTheAy/QSPy).
.. toctree::
:maxdepth: 2
diff --git a/pyproject.toml b/pyproject.toml
index be4d44f..0036043 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -19,7 +19,11 @@ description = "A set of API wrappers for different amateur radio websites, inclu
readme = "README.md"
keywords = ["QRZ", "LOTW", "eQSL", "API", "amateur radio"]
classifiers = [
- "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Operating System :: OS Independent",
"Development Status :: 2 - Pre-Alpha"
diff --git a/src/qspylib/__init__.py b/src/qspylib/__init__.py
index caed440..bc6ae9f 100644
--- a/src/qspylib/__init__.py
+++ b/src/qspylib/__init__.py
@@ -1,10 +1,10 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
+# pylint: disable-all
# imports
from . import logbook
from . import lotw
from . import clublog
from . import qrz
-from . import eqsl
\ No newline at end of file
+from . import eqsl
diff --git a/src/qspylib/_version.py b/src/qspylib/_version.py
index ecbf3ac..a5854fd 100644
--- a/src/qspylib/_version.py
+++ b/src/qspylib/_version.py
@@ -1 +1,2 @@
-__version__ = '0.0.1dev0'
\ No newline at end of file
+"""unified version string"""
+__version__ = '1.0.0a1'
diff --git a/src/qspylib/clublog.py b/src/qspylib/clublog.py
index d5549a0..4524e51 100644
--- a/src/qspylib/clublog.py
+++ b/src/qspylib/clublog.py
@@ -6,29 +6,37 @@
import requests
from .logbook import Logbook
+class ClubLogError(Exception):
+ """An error raised when an issue occurs with the ClubLog API."""
+ def __init__(self, message="An error occurred while interfacing with the ClubLog API"):
+ super().__init__(message)
class ClubLogClient:
- """This is a wrapper for the ClubLog API, holding a user's authentication to perform actions on their behalf.
+ """This is a wrapper for the ClubLog API, holding a user's authentication\
+ to perform actions on their behalf.
"""
- def __init__(self, email: str, callsign: str, password: str):
+ def __init__(self, email: str, callsign: str, password: str,
+ timeout: int = 15):
"""Initializes a ClubLogClient object.
Args:
email (str): Email address for the ClubLog account
callsign (str): Callsign for the ClubLog account
password (str): Password for the ClubLog account
-
+ timeout (int, optional): Timeout for requests. Defaults to 15.
"""
self.email = email
self.callsign = callsign
self.password = password
+ self.timeout = timeout
self.base_url = "https://clublog.org/getadif.php"
-
- def fetch_logbook(self):
- """Fetch the user's ClubLog logbook.
+ def fetch_logbook(self) -> Logbook:
+ """Fetch the user's ClubLog logbook.
+ Raises:
+ HTTPError: An error occurred while trying to make a connection.
Returns:
qspylib.logbook.Logbook: A logbook containing the user's QSOs.
"""
@@ -40,8 +48,7 @@ def fetch_logbook(self):
# filter down to only used params
data = {k: v for k, v in data.items() if v is not None}
- response = requests.post(self.base_url, data=data)
- if response.status_code == 200:
+ response = requests.post(self.base_url, data=data, timeout=self.timeout)
+ if response.status_code == requests.codes.ok:
return Logbook(self.callsign, response.text)
- else:
- response.raise_for_status()
\ No newline at end of file
+ raise response.raise_for_status()
diff --git a/src/qspylib/eqsl.py b/src/qspylib/eqsl.py
index 85cdfba..a860e73 100644
--- a/src/qspylib/eqsl.py
+++ b/src/qspylib/eqsl.py
@@ -3,69 +3,84 @@
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
"""Functions and classes related to querying the eQSL API.
"""
-from .logbook import Logbook
import requests
+from .logbook import Logbook
from ._version import __version__
-# functions that don't require authentication
+# region Exceptions
+class eQSLError(Exception): #pylint: disable=invalid-name
+ """An error occurred interfacing with eQSL."""
+ def __init__(self, message="An error occurred interfacing with eQSL"):
+ super().__init__(message)
+# endregion
-def verify_eqsl(CallsignFrom: str, CallsignTo: str, QSOBand: str, QSOMode: str = None, QSODate: str = None, timeout: int = 15):
+# region Module Functions
+def verify_eqsl(callsign_from: str, callsign_to: str, qso_band: str, #pylint: disable=R0913
+ qso_mode: str = None, qso_date: str = None, timeout: int = 15):
"""Verify a QSL with eQSL.
Args:
- CallsignFrom (str): Callsign originating QSO (i.e. N5UP)
- CallsignTo (str): Callsign receiving QSO (i.e. TE5T)
- QSOBand (str): Band QSO took place on (i.e. 160m)
- QSOMode (str, optional): Mode QSO took place with (i.e. SSB). Defaults to None.
- QSODate (str, optional): Date QSO took place (i.e. 01/31/2000). Defaults to None.
- timeout (int, optional): Seconds before connection times out. Defaults to 15.
+ callsign_from (str): Callsign originating QSO (i.e. N5UP)
+ callsign_to (str): Callsign receiving QSO (i.e. TE5T)
+ qso_band (str): Band QSO took place on (i.e. 160m)
+ qso_mode (str, optional): Mode QSO took place with (i.e. SSB).\
+ Defaults to None.
+ qso_date (str, optional): Date QSO took place (i.e. 01/31/2000).\
+ Defaults to None.
+ timeout (int, optional): Seconds before connection times out.\
+ Defaults to 15.
Raises:
- Exception: Exception
+ eQSLError: An error occurred interfacing with eQSL.
+ HTTPError: An error occurred while trying to make a connection.
Returns:
- bool, str: bool of whether the QSO was verified and a str of extra information eQSL reports, such as Authenticity Guaranteed status
+ bool, str: bool of whether the QSO was verified and a str of extra\
+ information eQSL reports, such as Authenticity Guaranteed status.
"""
url = "https://www.eqsl.cc/qslcard/VerifyQSO.cfm"
params = {
- 'CallsignFrom': CallsignFrom,
- 'CallsignTo': CallsignTo,
- 'QSOBand': QSOBand,
- 'QSOMode': QSOMode,
- 'QSODate': QSODate,
+ 'CallsignFrom': callsign_from,
+ 'CallsignTo': callsign_to,
+ 'QSOBand': qso_band,
+ 'QSOMode': qso_mode,
+ 'QSODate': qso_date,
}
with requests.Session() as s:
- r = s.get(url, params=params, headers={'user-agent': 'pyQSP/' + __version__}, timeout=timeout)
- if r.status_code == 200:
- raw_result = r.text
+ response = s.get(url, params=params, headers={'user-agent': 'pyQSP/'
+ + __version__}, timeout=timeout)
+ if response.status_code == requests.codes.ok:
+ raw_result = response.text
+ # TO-DO: make this a case statement
if 'Result - QSO on file' in raw_result:
return True, raw_result
- elif 'Parameter missing' not in raw_result:
+ if 'Parameter missing' not in raw_result:
return False, raw_result
- else:
- raise Exception(raw_result)
- else:
- r.raise_for_status()
+ raise eQSLError(raw_result)
+ raise response.raise_for_status()
-def retrieve_graphic(username: str, password: str, CallsignFrom: str, QSOYear: str, QSOMonth: str, QSODay: str, QSOHour: str, QSOMinute: str, QSOBand: str, QSOMode: str, timeout: int = 15):
+def retrieve_graphic(username: str, password: str, callsign_from: str,
+ qso_year: str, qso_month: str, qso_day: str,
+ qso_hour: str, qso_minute: str, qso_band: str,
+ qso_mode: str, timeout: int = 15):
"""Retrieve the graphic image for a QSO from eQSL.
- Note:
+ Note:
Not yet implemented.
Args:
username (str): The callsign of the recipient of the eQSL
password (str): The password of the user's account
- CallsignFrom (str): The callsign of the sender of the eQSL
- QSOYear (str): YYYY OR YY format date of the QSO
- QSOMonth (str): MM format
- QSODay (str): DD format
- QSOHour (str): HH format (24-hour time)
- QSOMinute (str): MM format
- QSOBand (str): 20m, 80M, 70cm, etc. (case insensitive)
- QSOMode (str): Must match exactly and should be an ADIF-compatible mode
+ callsign_from (str): The callsign of the sender of the eQSL
+ qso_year (str): YYYY OR YY format date of the QSO
+ qso_month (str): MM format
+ qso_day (str): DD format
+ qso_hour (str): HH format (24-hour time)
+ qso_minute (str): MM format
+ qso_band (str): 20m, 80M, 70cm, etc. (case insensitive)
+ qso_mode (str): Must match exactly and should be an ADIF-compatible mode
timeout (int, optional): time to connection timeout. Defaults to 15.
Todo:
@@ -73,111 +88,128 @@ def retrieve_graphic(username: str, password: str, CallsignFrom: str, QSOYear: s
Raises:
NotImplementedError: Not yet implemented.
+
"""
raise NotImplementedError
def get_ag_list(timeout: int = 15):
- """Get a list of Authenticity Guaranteed members.
+ """Get a list of Authenticity Guaranteed members.
Args:
timeout (int, optional): Seconds before connection times out. Defaults to 15.
+ Raises:
+ HTTPError: An error occurred while trying to make a connection.
+
Returns:
- tuple, str: tuple contains a list of string callsigns, and a str header with the date the list was generated
+ tuple, str: tuple contains a list of string callsigns, and a str header\
+ with the date the list was generated
"""
url = "https://www.eqsl.cc/qslcard/DownloadedFiles/AGMemberList.txt"
with requests.Session() as s:
- r = s.get(url, headers={'user-agent': 'pyQSP/' + __version__}, timeout=timeout)
- if r.status_code == 200:
- result_list = list()
- result_list += r.text.split('\r\n')
+ response = s.get(url, headers={'user-agent': 'pyQSP/' + __version__},
+ timeout=timeout)
+ if response.status_code == requests.codes.ok:
+ result_list = []
+ result_list += response.text.split('\r\n')
return set(result_list[1:-1]), str(result_list[0])
- else:
- r.raise_for_status()
+ raise response.raise_for_status()
def get_ag_list_dated(timeout: int = 15):
- """Get a list of Authenticity Guaranteed eQSL members with the date of their last upload to eQSL.
+ """Get a list of Authenticity Guaranteed eQSL members with the date of\
+ their last upload to eQSL.
Args:
- timeout (int, optional): Seconds before connection times out. Defaults to 15.
+ timeout (int, optional): Seconds before connection times out.\
+ Defaults to 15.
+
+ Raises:
+ HTTPError: An error occurred while trying to make a connection.
Returns:
- tuple: First element is a dict with key: callsign and value: date, and second is a header of when this list was generated.
+ tuple: First element is a dict with key: callsign and value: date, and\
+ second is a header of when this list was generated.
"""
url = "https://www.eqsl.cc/qslcard/DownloadedFiles/AGMemberListDated.txt"
with requests.Session() as s:
- r = s.get(url, headers={'user-agent': 'pyQSP/' + __version__}, timeout=timeout)
- if r.status_code == 200:
- result_list = r.text.split('\r\n')
+ response = s.get(url, headers={'user-agent': 'pyQSP/' + __version__},\
+ timeout=timeout)
+ if response.status_code == requests.codes.ok:
+ result_list = response.text.split('\r\n')
loc, header = result_list[1:-1], str(result_list[0])
- dict_calls = dict()
+ dict_calls = {}
for pair in loc:
call, date = pair.split(', ')
dict_calls[call] = date
return dict_calls, header
- else:
- r.raise_for_status()
+ raise response.raise_for_status()
def get_full_member_list(timeout: int = 15):
"""Get a list of all members of QRZ.
Args:
- timeout (int, optional): Seconds before connection times out. Defaults to 15.
+ timeout (int, optional): Seconds before connection times out.\
+ Defaults to 15.
+
+ Raises:
+ HTTPError: An error occurred while trying to make a connection.
Returns:
- dict: key is the callsign and the value is a tuple of: GridSquare, AG, Last Upload
+ dict: key is the callsign and the value is a tuple of: GridSquare, AG,\
+ Last Upload
"""
-
url = "https://www.eqsl.cc/DownloadedFiles/eQSLMemberList.csv"
with requests.Session() as s:
- r = requests.get(url, timeout=timeout)
- if r.status_code == 200:
- result_list = r.text.split('\r\n')[1:-1]
- dict_calls = dict()
+ response = s.get(url, timeout=timeout)
+ if response.status_code == requests.codes.ok:
+ result_list = response.text.split('\r\n')[1:-1]
+ dict_calls = {}
for row in result_list:
data = row.split(',')
dict_calls[data[0]] = data[1:]
return dict_calls
- else:
- r.raise_for_status()
+ raise response.raise_for_status()
-def get_users_data(callsign: str, timeout: int = 15):
+def get_users_data(callsign: str):
"""Get a specific user's data from the full member list.
Note:
- This is incredibly slow. A better method probably involves doing some vectorization.
+ This is incredibly slow. A better method probably involves doing some\
+ vectorization, but that would require adding a dependency.
Args:
callsign (str): callsign to get data about
- timeout (int, optional): Seconds before connection times out. Defaults to 15.
Returns:
tuple: contains: GridSquare, AG, Last Upload
"""
dict_users: dict = get_full_member_list()
return dict_users.get(callsign)
+# endregion
-
-# things that require authentication
-class eQSLClient:
- """API wrapper for eQSL.cc. This class holds a user's authentication to perform actions on their behalf.
+# region eQSL API Wrapper
+class eQSLClient: #pylint: disable=invalid-name
+ """API wrapper for eQSL.cc. This class holds a user's authentication to\
+ perform actions on their behalf.
"""
- def __init__(self, username: str, password: str, QTHNickname: str = None, timeout: int = 15):
+ def __init__(self, username: str, password: str, qth_nickname: str = None,
+ timeout: int = 15):
"""Create an eQSLClient object.
Args:
username (str): callsign to login with
password (str): password to login with
- QTHNickname (str, optional): QTHNickname. Defaults to None.
- timeout (int, optional): time to timeout for the entire Client. Defaults to 15.
+ qth_nickname (str, optional): QTHNickname. Defaults to None.
+ timeout (int, optional): time to timeout for the entire Client.\
+ Defaults to 15.
"""
- self.callsign = username,
+ self.callsign = username
self.timeout = timeout
self.base_url = "https://www.eqsl.cc/qslcard/"
@@ -186,11 +218,11 @@ def __init__(self, username: str, password: str, QTHNickname: str = None, timeou
session.params = {k: v for k, v in {
'username': username,
'password': password,
- 'QTHNickname': QTHNickname }.items() if v is not None}
+ 'QTHNickname': qth_nickname }.items() if v is not None}
session.headers = {'User-Agent': 'pyQSP/' + __version__}
self.session = session
-
+
def set_timeout(self, timeout: int):
"""Set timeout for the Client to a new value.
@@ -198,99 +230,158 @@ def set_timeout(self, timeout: int):
timeout (int): time to timeout in seconds.
"""
self.timeout = timeout
-
+
# actual GETs
def get_last_upload_date(self):
"""Gets last upload date for the logged in user.
Raises:
- Exception: Exception
+ eQSLError: An error occurred interfacing with eQSL.
+ HTTPError: An error occurred while trying to make a connection.
Returns:
- str: date of last upload for the active user. Date is formatted: DD-MMM-YYYY at HH:mm UTC
+ str: date of last upload for the active user. Date is formatted:\
+ DD-MMM-YYYY at HH:mm UTC
"""
with self.session as s:
- r = s.get(self.base_url + 'DisplayLastUploadDate.cfm', timeout=self.timeout)
- if r.status_code == 200:
+ r = s.get(self.base_url + 'DisplayLastUploadDate.cfm',
+ timeout=self.timeout)
+ if r.status_code == requests.codes.ok:
success_txt = 'Your last ADIF upload was'
if success_txt in r.text:
return r.text[r.text.index('(')+1:r.text.index(')')]
- else:
- raise Exception(r.text)
+ raise eQSLError(r.text)
+ raise r.raise_for_status()
- def fetch_inbox(self, LimitDateLo:str=None, LimitDateHi:str=None, RcvdSince:str=None, ConfirmedOnly:str=None, UnconfirmedOnly:str=None, Archive:str=None, HamOnly:str=None):
+ def fetch_inbox(self, limit_date_lo:str=None, limit_date_hi:str=None, #pylint: disable=R0914,R0913
+ rcvd_since:str=None, confirmed_only:str=None,
+ unconfirmed_only:str=None, archive:str=None,
+ ham_only:str=None) -> Logbook:
"""Fetches INCOMING QSOs, from the user's eQSL Inbox.
Args:
- LimitDateLo (str, optional): Earliest QSO date to download (oddly, in MM/DD/YYYY format with escape code 2F for slashes), optionally append HH:MM otherwise the default is 00:00. Defaults to None.
- LimitDateHi (str, optional): Latest QSO date to download (oddly, in MM/DD/YYYY format with escape code 2F), optionally append HH:MM otherwise the default is 23:59 to include the entire day. Defaults to None.
- RcvdSince (str, optional): (YYYYMMDDHHMM) Everything that was entered into the database on or after this date/time (Valid range 01/01/1900 - 12/31/2078). Defaults to None.
- ConfirmedOnly (str, optional): Set to any value to signify you only want to download Inbox items you HAVE confirmed. Defaults to None.
- UnconfirmedOnly (str, optional): Set to any value to signify you only want to download Inbox items you have NOT confirmed. Defaults to None.
- Archive (str, optional): 1 for Archived records ONLY; 0 for Inbox (non-archived) ONLY; omit this parameter to retrieve ALL records in Inbox and Archive. Defaults to None.
- HamOnly (str, optional): anything, filters out all SWL contacts. Defaults to None.
+ limit_date_lo (str, optional): Earliest QSO date to download\
+ (oddly, in MM/DD/YYYY format with escape code 2F for slashes),\
+ optionally append HH:MM otherwise the default is 00:00.\
+ Defaults to None.
+ limit_date_hi (str, optional): Latest QSO date to download\
+ (oddly, in MM/DD/YYYY format with escape code 2F), optionally\
+ append HH:MM otherwise the default is 23:59 to include the\
+ entire day.\
+ Defaults to None.
+ rcvd_since (str, optional): (YYYYMMDDHHMM) Everything that was\
+ entered into the database on or after this date/time (Valid\
+ range 01/01/1900 - 12/31/2078).\
+ Defaults to None.
+ confirmed_only (str, optional): Set to any value to signify you\
+ only want to download Inbox items you HAVE confirmed.\
+ Defaults to None.
+ unconfirmed_only (str, optional): Set to any value to signify you\
+ only want to download Inbox items you have NOT confirmed.\
+ Defaults to None.
+ archive (str, optional): 1 for Archived records ONLY; 0 for Inbox\
+ (non-archived) ONLY; omit this parameter to retrieve ALL\
+ records in Inbox and Archive.\
+ Defaults to None.
+ ham_only (str, optional): anything, filters out all SWL contacts.\
+ Defaults to None.
Raises:
- Exception: Exception
+ eQSLError: An error occurred interfacing with eQSL.
+ HTTPError: An error occurred while trying to make a connection.
Returns:
qspylib.logbook.Logbook: A logbook containing the user's QSOs.
"""
params = {
- 'LimitDateLo': LimitDateLo,
- 'LimitDateHi': LimitDateHi,
- 'RcvdSince': RcvdSince,
- 'ConfirmedOnly': ConfirmedOnly,
- 'UnconfirmedOnly': UnconfirmedOnly,
- 'Archive': Archive,
- 'HamOnly': HamOnly
+ 'LimitDateLo': limit_date_lo,
+ 'LimitDateHi': limit_date_hi,
+ 'RcvdSince': rcvd_since,
+ 'ConfirmedOnly': confirmed_only,
+ 'UnconfirmedOnly': unconfirmed_only,
+ 'Archive': archive,
+ 'HamOnly': ham_only
}
# filter down to only used params
params = {k: v for k, v in params.items() if v is not None}
with self.session as s:
- r = s.get(self.base_url + "DownloadInBox.cfm", params=params, timeout=self.timeout)
- if r.status_code == 200:
+ r = s.get(self.base_url + "DownloadInBox.cfm", params=params,
+ timeout=self.timeout)
+ if r.status_code == requests.codes.ok:
adif_found_txt = 'Your ADIF log file has been built'
adif_status = r.text.index(adif_found_txt) if adif_found_txt in r.text else -1
if adif_status < 0:
- raise Exception('Failed to generate ADIF.')
+ raise eQSLError('Failed to generate ADIF.')
adif_link_start_idx = r.text.index('
.ADI file')
adif_link = self.base_url + r.text[adif_link_start_idx:adif_link_end_idx]
- adif_response = requests.get(adif_link)
- if adif_response.status_code == 200:
+ adif_response = requests.get(adif_link, timeout=self.timeout)
+ if adif_response.status_code == requests.codes.ok:
return Logbook(self.callsign, adif_response.text)
- else:
- r.raise_for_status()
- else:
- r.raise_for_status()
+ raise r.raise_for_status()
+ raise r.raise_for_status()
+
+ def fetch_inbox_qsls(self, limit_date_lo:str=None, limit_date_hi:str=None, #pylint: disable = R0913
+ rcvd_since:str=None, archive:str=None,
+ ham_only:str=None) -> Logbook:
+ """Fetches INCOMING QSLs, from the user's eQSL Inbox.
+
+ limit_date_lo (str, optional): Earliest QSO date to download\
+ (oddly, in MM/DD/YYYY format with escape code 2F for slashes),\
+ optionally append HH:MM otherwise the default is 00:00.\
+ Defaults to None.
+ limit_date_hi (str, optional): Latest QSO date to download\
+ (oddly, in MM/DD/YYYY format with escape code 2F), optionally\
+ append HH:MM otherwise the default is 23:59 to include the\
+ entire day.\
+ Defaults to None.
+ rcvd_since (str, optional): (YYYYMMDDHHMM) Everything that was\
+ entered into the database on or after this date/time (Valid\
+ range 01/01/1900 - 12/31/2078).\
+ Defaults to None.
+ archive (str, optional): 1 for Archived records ONLY; 0 for Inbox\
+ (non-archived) ONLY; omit this parameter to retrieve ALL\
+ records in Inbox and Archive.\
+ Defaults to None.
+ ham_only (str, optional): anything, filters out all SWL contacts.\
+ Defaults to None.
+
+ Raises:
+ eQSLError: An error occurred interfacing with eQSL.
+ HTTPError: An error occurred while trying to make a connection.
+
+ Returns:
+ qspylib.logbook.Logbook: A logbook containing the user's QSOs.
+ """
+ return self.fetch_inbox(limit_date_lo, limit_date_hi, rcvd_since, 'Y',
+ None, archive, ham_only)
def fetch_outbox(self):
"""Fetches OUTGOING QSOs, from the user's eQSL Outbox.
Raises:
- Exception: Exception
+ eQSLError: An error occurred interfacing with eQSL.
+ HTTPError: An error occurred while trying to make a connection.
+
Returns:
qspylib.logbook.Logbook: A logbook containing the user's QSOs.
"""
with self.session as s:
- r = s.get(self.base_url + "DownloadADIF.cfm", timeout=self.timeout)
- if r.status_code == 200:
+ r = s.get(self.base_url + "DownloadADIF.cfm",
+ timeout=self.timeout)
+ if r.status_code == requests.codes.ok:
adif_found_txt = 'Your ADIF log file has been built'
adif_status = r.text.index(adif_found_txt) if adif_found_txt in r.text else -1
if adif_status < 0:
- raise Exception('Failed to generate ADIF.')
+ raise eQSLError('Failed to generate ADIF.')
adif_link_start_idx = r.text.index('.ADI file')
adif_link = self.base_url + r.text[adif_link_start_idx:adif_link_end_idx]
- adif_response = requests.get(adif_link)
- if adif_response.status_code == 200:
+ adif_response = requests.get(adif_link, timeout=self.timeout)
+ if adif_response.status_code == requests.codes.ok :
return Logbook(self.callsign, adif_response.text)
- else:
- r.raise_for_status()
- else:
- r.raise_for_status()
-
-
+ raise r.raise_for_status()
+ raise r.raise_for_status()
+# endregion
diff --git a/src/qspylib/logbook.py b/src/qspylib/logbook.py
index 9773a93..d01dc40 100644
--- a/src/qspylib/logbook.py
+++ b/src/qspylib/logbook.py
@@ -1,10 +1,12 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-"""Classes to provide the back-bone of qspylib.
+"""Classes to provide the backbone of qspylib's logbook functionality
"""
import adif_io
+# classes
+
class QSO:
"""A hambaseio QSO obj. Contains simple info on a QSO.
@@ -16,7 +18,8 @@ class QSO:
time_on (str): time start of QSO
qsl_rcvd (str): if QSO has been confirmed
"""
- def __init__(self, their_call:str, band:str, mode:str, qso_date:str, time_on:str, qsl_rcvd:str='N'):
+ def __init__(self, their_call:str, band:str, mode:str, qso_date:str,
+ time_on:str, qsl_rcvd:str='N'):
"""Initializes a QSO object.
Args:
@@ -35,21 +38,34 @@ def __init__(self, their_call:str, band:str, mode:str, qso_date:str, time_on:str
self.qsl_rcvd = qsl_rcvd
def __str__(self):
- return f"CALL: {self.their_call} BAND: {self.band} MODE: {self.mode} DATE: {self.qso_date} TIME: {self.time_on} QSL: {self.qsl_rcvd}\n"
- # to-do: make this return as an actual adif formattede string
+ return f"CALL: {self.their_call} BAND: {self.band} MODE: {self.mode} \
+ DATE: {self.qso_date} TIME: {self.time_on} QSL: {self.qsl_rcvd}\n"
+ # to-do: make this return as an actual adif formatted string
+
+ def __eq__(self, other):
+ if isinstance(other, QSO):
+ if self.their_call == other.their_call and self.band == other.band\
+ and self.mode == other.mode and self.qso_date\
+ == other.qso_date and self.time_on == other.time_on:
+ return True
+ return False
class Logbook:
- """A Logbook has both an adi field, holding all fields parsed from an .adi log per QSO, and a simplified log field, holding a simplified set of fields per QSO. A QSO is one of qspylib.logbook.QSO.
-
- Interacting with the log field can provide one field to check for if a QSO is confirmed on one or more of: LoTW, eQSL, QRZ, or ClubLog.
+ """A Logbook has both an adi field, holding all fields parsed from an .adi\
+ log per QSO, and a simplified log field, holding a simplified set of\
+ fields per QSO. A QSO is one of qspylib.logbook.QSO.
+
+ Interacting with the log field can provide one field to check for if a QSO\
+ is confirmed on one or more of: LoTW, eQSL, QRZ, or ClubLog.
A Logbook is built by consuming an .adi formatted input string.
Attributes:
callsign (str): callsign of the logbook owner
- adi (dict): a dict, where each "entry" is itself a dict of fields parsed from an .adi log
- header (str): header of the .adi log
- log (set): simplified set of fields per QSO
+ adi (list[adif_io.QSO]): a dict, where each "entry" is itself a dict\
+ of fields parsed from an .adi log.
+ header (adif_io.Headers): header of the .adi log.
+ log (list): simplified set of fields per QSO.
"""
def __init__(self, callsign: str, unparsed_log: str):
@@ -61,13 +77,11 @@ def __init__(self, callsign: str, unparsed_log: str):
"""
self.callsign = callsign
self.adi, self.header = adif_io.read_from_string(unparsed_log)
- self.log = set()
+ self.log = []
for contact in self.adi:
- # whether this qsl has been confirmed; lotw & clublog use qsl_rcvd, eqsl uses eqsl_qsl_rcvd, qrz most simply gives a qsl date
- qsl_rcvd, qrz_qsl_dte, eqsl_qsl_rcvd = contact.get('QSL_RCVD'), contact.get('app_qrzlog_qsldate'), contact.get('eqsl_qsl_rcvd')
- qso_confirmed = 'Y' if qsl_rcvd == 'Y' or qrz_qsl_dte or eqsl_qsl_rcvd == 'Y' else 'N'
- # create a QSO for this contact
- self.log.add(QSO(contact['CALL'], contact['BAND'], contact['MODE'], contact['QSO_DATE'], contact['TIME_ON'], qso_confirmed))
+ # whether this qsl has been confirmed; lotw & clublog use qsl_rcvd,
+ # eqsl uses eqsl_qsl_rcvd, qrz most simply gives a qsl date
+ self.log.append(qso_from_adi(contact))
def __str__(self):
log_str = ""
@@ -75,25 +89,52 @@ def __str__(self):
log_str += str(qso)
return log_str
- def write_qso(self, contact: QSO):
- """Append a QSO to the .log portion of a Logbook.
+ def __eq__(self, other):
+ if isinstance(other, Logbook):
+ if self.callsign == other.callsign and self.adi == other.adi and\
+ self.header == other.header and self.log == other.log:
+ return True
+ return False
+
+ # public methods
- Note:
- This does not append to the .adi portion of a Logbook.
+ def write_qso(self, contact: adif_io.QSO):
+ """Append a QSO to both the .log and .adi portions of the Logbook object.
Args:
- contact (QSO): QSO object to be added
+ contact (adif_io.QSO): QSO object to be added, structured as from\
+ an adif.io QSO object
"""
- self.log.add(contact)
+ logified_qso = qso_from_adi(contact)
+ self.log.append(logified_qso)
+ self.adi.append(contact)
- def discard_qso(self, contact: QSO):
- """Removes the corresponding QSO from the .log portion of a Logbook, if one exists.
-
- Note:
- This does not remove from the .adi portion of a Logbook.
+ def discard_qso(self, contact: adif_io.QSO):
+ """Removes the corresponding QSO from the .log portion of a Logbook,\
+ if one exists.
Args:
- contact (QSO): QSO to be deleted, if it exists
+ contact (adif_io.QSO): QSO to be deleted, if it exists, structured\
+ as from an adif.io QSO object
"""
- self.log.discard(contact)
- # to-do: discrad from adi?
\ No newline at end of file
+ logified_qso = qso_from_adi(contact)
+ self.log.remove(logified_qso)
+ self.adi.remove(contact)
+
+# functions of the module
+
+def qso_from_adi(contact: adif_io.QSO):
+ """Transforms an adif_io.QSO object into a qspylib.logbook.QSO object.
+
+ Args:
+ contact (adif_io.QSO): contact to transform into a .log friendly QSO
+
+ Returns:
+ qspylib.logbook.QSO: a qspylib QSO object
+ """
+ qsl_rcvd = contact.get('QSL_RCVD')
+ qrz_qsl_dte = contact.get('app_qrzlog_qsldate')
+ eqsl_qsl_rcvd = contact.get('eqsl_qsl_rcvd')
+ qso_confirmed = 'Y' if qsl_rcvd == 'Y' or qrz_qsl_dte or eqsl_qsl_rcvd == 'Y' else 'N'
+ return QSO(contact['CALL'], contact['BAND'], contact['MODE'],
+ contact['QSO_DATE'], contact['TIME_ON'], qso_confirmed)
diff --git a/src/qspylib/lotw.py b/src/qspylib/lotw.py
index d16bc02..f50f174 100644
--- a/src/qspylib/lotw.py
+++ b/src/qspylib/lotw.py
@@ -10,14 +10,17 @@
# exceptions
class RetrievalFailure(Exception):
- """A failure to retrieve information from LOTW. This can be due to a connection error, or a bad response from the server.
+ """A failure to retrieve information from LOTW. This can be due to a\
+ connection error, or a bad response from the server.
"""
- def __init__(self, message="Failed to retrieve information. Confirm log-in credentials are correct."):
+ def __init__(self, message="Failed to retrieve information. Confirm log-in \
+ credentials are correct."):
self.message=message
super().__init__(self, message)
-class UploadFailure(Exception):
- """A failure to upload a file to LOTW. This is due to a file being rejected by LOTW. The error message from LOTW is provided in the exception.
+class UploadError(Exception):
+ """A failure to upload a file to LOTW. This is due to a file being\
+ rejected by LOTW. The error message from LOTW is provided in the exception.
"""
def __init__(self, message="Failed to upload file."):
self.message=message
@@ -29,7 +32,11 @@ def get_last_upload(timeout: int = 15):
"""Queries LOTW for a list of callsigns and date they last uploaded.
Args:
- timeout (int, optional): time in seconds to connection timeout. Defaults to 15.
+ timeout (int, optional): time in seconds to connection timeout.\
+ Defaults to 15.
+
+ Raises:
+ HTTPError: An error occurred while trying to make a connection.
Returns:
csv: a csv of callsigns and last upload date
@@ -39,10 +46,9 @@ def get_last_upload(timeout: int = 15):
with requests.Session() as s:
response = s.get(url, timeout=timeout)
- if response.status_code == 200:
+ if response.status_code == requests.codes.ok:
return response.text
- else:
- response.raise_for_status()
+ raise response.raise_for_status()
def upload_logbook(file, timeout:int=120):
"""Given a .tq5 or .tq8, uploads it to LOTW.
@@ -52,10 +58,12 @@ def upload_logbook(file, timeout:int=120):
Args:
file (_type_): file to be uploaded
- timeout (int, optional): time in seconds to connection timeout. Defaults to 120.
+ timeout (int, optional): time in seconds to connection timeout.\
+ Defaults to 120.
Raises:
- UploadFailure: Why the upload failed.
+ UploadFailure: The upload was rejected by LotW.
+ HTTPError: An error occurred while trying to make a connection.
Returns:
str: Return message from LOTW on file upload.
@@ -67,18 +75,17 @@ def upload_logbook(file, timeout:int=120):
with requests.Session() as s:
response = s.post(upload_url, data, timeout=timeout)
- if response.status_code == 200:
+ if response.status_code == requests.codes.ok:
result = response.text
result_start_idx = result.index('')
upl_result = result[result_start_idx:result_end_idx]
- upl_message = str(result[result.index('')])
+ upl_message = str(result[result.index('')])
if 'rejected' in upl_result:
- raise UploadFailure(upl_message)
- else:
- return upl_message
- else:
- response.raise_for_status()
+ raise UploadError(upl_message)
+ return upl_message
+ raise response.raise_for_status()
class LOTWClient:
"""Wrapper for LOTW API functionality that requires a logged-in session.
@@ -103,31 +110,60 @@ def __init__(self, username: str, password: str):
self.session = session
- def fetch_logbook(self, qso_query=1, qso_qsl='yes', qso_qslsince=None, qso_qsorxsince=None, qso_owncall=None,
- qso_callsign=None,qso_mode=None,qso_band=None,qso_dxcc=None,qso_startdate=None, qso_starttime=None,
- qso_enddate=None, qso_endtime=None, qso_mydetail=None,qso_qsldetail=None, qsl_withown=None):
+ def fetch_logbook(self, qso_query=1, qso_qsl='yes', qso_qslsince=None,
+ qso_qsorxsince=None, qso_owncall=None, qso_callsign=None,
+ qso_mode=None, qso_band=None,qso_dxcc=None,
+ qso_startdate=None, qso_starttime=None, qso_enddate=None,
+ qso_endtime=None, qso_mydetail=None,qso_qsldetail=None,
+ qsl_withown=None):
"""_summary_
Args:
- qso_query (int, optional): If absent, ADIF file will contain no QSO records. Defaults to 1.
- qso_qsl (str, optional): If "yes", only QSL records are returned (can be 'yes' or 'no'). Defaults to 'yes'.
- qso_qslsince (_type_, optional): QSLs since specified datetime (YYYY-MM-DD HH:MM:SS). Ignored unless qso_qsl="yes". Defaults to None.
- qso_qsorxsince (_type_, optional): QSOs received since specified datetime. Ignored unless qso_qsl="no". Defaults to None.
- qso_owncall (_type_, optional): Returns records where "own" call sign matches. Defaults to None.
- qso_callsign (_type_, optional): Returns records where "worked" call sign matches. Defaults to None.
- qso_mode (_type_, optional): Returns records where mode matches. Defaults to None.
- qso_band (_type_, optional): Returns records where band matches. Defaults to None.
- qso_dxcc (_type_, optional): Returns matching DXCC entities, implies qso_qsl='yes'. Defaults to None.
- qso_startdate (_type_, optional): Returns only records with a QSO date on or after the specified value. Defaults to None.
- qso_starttime (_type_, optional): Returns only records with a QSO time at or after the specified value on the starting date. This value is ignored if qso_startdate is not provided. Defaults to None.
- qso_enddate (_type_, optional): Returns only records with a QSO date on or before the specified value. Defaults to None.
- qso_endtime (_type_, optional): Returns only records with a QSO time at or before the specified value on the ending date. This value is ignored if qso_enddate is not provided. Defaults to None.
- qso_mydetail (_type_, optional): If "yes", returns fields that contain the Logging station's location data, if any. Defaults to None.
- qso_qsldetail (_type_, optional): If "yes", returns fields that contain the QSLing station's location data, if any. Defaults to None.
- qsl_withown (_type_, optional): If "yes", each record contains the STATION_CALLSIGN and APP_LoTW_OWNCALL fields to identify the "own" call sign used for the QSO. Defaults to None.
+ qso_query (int, optional): If absent, ADIF file will contain no\
+ QSO records. Defaults to 1.
+ qso_qsl (str, optional): If "yes", only QSL records are returned \
+ (can be 'yes' or 'no'). Defaults to 'yes'.
+ qso_qslsince (_type_, optional): QSLs since specified datetime \
+ (YYYY-MM-DD HH:MM:SS). Ignored unless qso_qsl="yes". \
+ Defaults to None.
+ qso_qsorxsince (_type_, optional): QSOs received since specified \
+ datetime. Ignored unless qso_qsl="no". Defaults to None.
+ qso_owncall (_type_, optional): Returns records where "own" call \
+ sign matches. Defaults to None.
+ qso_callsign (_type_, optional): Returns records where "worked" \
+ call sign matches. Defaults to None.
+ qso_mode (_type_, optional): Returns records where mode matches. \
+ Defaults to None.
+ qso_band (_type_, optional): Returns records where band matches. \
+ Defaults to None.
+ qso_dxcc (_type_, optional): Returns matching DXCC entities, \
+ implies qso_qsl='yes'. Defaults to None.
+ qso_startdate (_type_, optional): Returns only records with a QSO \
+ date on or after the specified value. Defaults to None.
+ qso_starttime (_type_, optional): Returns only records with a QSO \
+ time at or after the specified value on the starting date. \
+ This value is ignored if qso_startdate is not provided. \
+ Defaults to None.
+ qso_enddate (_type_, optional): Returns only records with a QSO \
+ date on or before the specified value. Defaults to None.
+ qso_endtime (_type_, optional): Returns only records with a QSO \
+ time at or before the specified value on the ending date. \
+ This value is ignored if qso_enddate is not provided. \
+ Defaults to None.
+ qso_mydetail (_type_, optional): If "yes", returns fields that \
+ contain the Logging station's location data, if any. \
+ Defaults to None.
+ qso_qsldetail (_type_, optional): If "yes", returns fields that \
+ contain the QSLing station's location data, if any. \
+ Defaults to None.
+ qsl_withown (_type_, optional): If "yes", each record contains the \
+ STATION_CALLSIGN and APP_LoTW_OWNCALL fields to identify the \
+ "own" call sign used for the QSO. Defaults to None.
Raises:
- RetrievalFailure: A failure to retrieve information from LOTW. Contains the error received from LOTW.
+ RetrievalFailure: A failure to retrieve information from LOTW.\
+ Contains the error received from LOTW.
+ HTTPError: An error occurred while trying to make a connection.
Returns:
qspylib.logbook.Logbook: A logbook containing the user's QSOs.
@@ -159,23 +195,28 @@ def fetch_logbook(self, qso_query=1, qso_qsl='yes', qso_qslsince=None, qso_qsorx
response = s.get(self.base_url + log_url, params=params)
if '' not in response.text:
raise RetrievalFailure
- if response.status_code == 200:
+ if response.status_code == requests.codes.ok:
return Logbook(self.username, response.text)
- else:
- response.raise_for_status()
+ raise response.raise_for_status()
def get_dxcc_credit(self, entity:str=None, ac_acct:str=None):
- """Gets DXCC award account credit, optionally for a specific DXCC Entity Code specified via entity.
+ """Gets DXCC award account credit, optionally for a specific DXCC \
+ Entity Code specified via entity.
Note:
- This only returns *applied for and granted credit*, not 'presumed' credits.
+ This only returns *applied for and granted credit*, not 'presumed' \
+ credits.
Args:
- entity (str, optional): dxcc entity number to check for, if a specific entity is desired. Defaults to None.
- ac_acct (str, optional): award account to check against, if multiple exist for the given account. Defaults to None.
+ entity (str, optional): dxcc entity number to check for, if a \
+ specific entity is desired. Defaults to None.
+ ac_acct (str, optional): award account to check against, if \
+ multiple exist for the given account. Defaults to None.
Raises:
- RetrievalFailure: A failure to retrieve information from LOTW. Contains the error received from LOTW.
+ RetrievalFailure: A failure to retrieve information from LOTW. \
+ Contains the error received from LOTW.
+ HTTPError: An error occurred while trying to make a connection.
Returns:
qspylib.logbook.Logbook: A logbook containing the user's QSOs.
@@ -187,15 +228,13 @@ def get_dxcc_credit(self, entity:str=None, ac_acct:str=None):
}
# filter down to only used params
params = {k: v for k, v in params.items() if v is not None}
-
+
with self.session as s:
response = s.get(self.base_url + dxcc_url, params=params)
- if response.status_code == 200:
- # lotw lies, and claims an will be absent from bad outputs, but it's there, so we'll do something else.
+ if response.status_code == requests.codes.ok:
+ # lotw lies, and claims an will be absent from bad
+ # outputs, but it's there, so we'll do something else.
if 'ARRL Logbook of the World DXCC QSL Card Report' not in response.text[:46]:
raise RetrievalFailure(response.text)
- else:
- return Logbook(self.username, response.text)
- else:
- response.raise_for_status()
-
+ return Logbook(self.username, response.text)
+ raise response.raise_for_status()
diff --git a/src/qspylib/qrz.py b/src/qspylib/qrz.py
index 857ef48..e7dd98a 100644
--- a/src/qspylib/qrz.py
+++ b/src/qspylib/qrz.py
@@ -3,33 +3,49 @@
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
"""Functions and classes related to querying the QRZ APIs.
"""
-import requests
+#region Imports
import html
+from collections import OrderedDict
+from typing import Any
+from urllib.parse import urlparse, parse_qs
+import requests
import xmltodict
+#import adif_io
from .logbook import Logbook
from ._version import __version__
+
+# constants
+MAX_NUM_RETRIES = 1
+
+#endregion
+
+#region Exceptions
class QRZInvalidSession(Exception):
"""Error for when session is invalid.
"""
- def __init__(self, message="Got no session key back. This session is invalid."):
+ def __init__(self, message="Got no session key back. This session is \
+ invalid."):
self.message=message
super().__init__(self, message)
+#endregion
-
-
+#region Client Classes
class QRZLogbookClient:
- """API wrapper for a QRZ Logbook. At present, only handles fetching QSOs.
+ """API wrapper for accessing QRZ Logbook data.
"""
- def __init__(self, key: str):
+ def __init__(self, key: str, timeout: int = 15):
"""Initializes a QRZLogbookClient object.
Args:
key (str): API key for a QRZ logbook.
+ timeout (int, optional): Time in seconds to wait for a response.\
+ Defaults to 15.
"""
self.key = key
self.base_url = "https://logbook.qrz.com/api"
+ self.timeout = timeout
self.headers = {
'User-Agent': 'pyQSP/' + __version__,
'Accept-Encoding': 'gzip, deflate',
@@ -37,61 +53,183 @@ def __init__(self, key: str):
'Connection': 'keep-alive'
}
- def fetch_logbook(self):
- """Fetches a logbook from QRZ corresponding to the given QRZLogbookClient.
+ def fetch_logbook(self, option:str=None) -> Logbook:
+ """Fetches the logbook corresponding to the Client's API Key.
+
+ Note:
+ If too many records are fetched at once, parsing will fail to\
+ complete and not all response keys will be returned. To prevent\
+ this, you should fetch the logbook in chunks, using the highest\
+ logid to start fetching the next chunk. See fetch_logbook_paged,\
+ unless that hasn't been implemented yet; then use this, and suffer.
+
+ Args:
+ option (str, optional): Optional parameters as specified by QRZ,\
+ like "MODE:SSB,CALL:W1AW". This should be a comma separated string.\
+ Defaults to None.
+
+ Raises:
+ HTTPError: An error occurred trying to make a connection.
Returns:
- qspylib.logbook.Logbook: A logbook containing the user's QSOs.
+ qspylib.logbook.Logbook: A logbook containing the user’s QSOs.
"""
- params = {
+ data = {
'KEY': self.key,
'ACTION': 'FETCH',
- 'OPTION': ''
+ 'OPTION': option
}
# filter down to only used params
- params = {k: v for k, v in params.items() if v is not None}
-
- response = requests.get(self.base_url, params=params, headers=self.headers)
- if response.status_code == 200:
- return QRZLogbookClient.__stringify(self, response.text)
- else:
- response.raise_for_status()
-
- def insert_record(self, adif, option=None):
- raise NotImplementedError
-
- def delete_record(self, list_logids: list):
- raise NotImplementedError
-
- def check_status(self, list_logids: list):
- raise NotImplementedError
-
-
-
+ data = {k: v for k, v in data.items() if v is not None}
+
+ response = requests.post(self.base_url, data=data,
+ headers=self.headers, timeout=self.timeout)
+ if response.status_code == requests.codes.ok:
+ response_dict = parse_qs(urlparse("ws://a.a/?"
+ + html.unescape(response.text))[4],
+ strict_parsing=True)
+ return QRZLogbookClient.__stringify(self, response_dict["ADIF"])
+
+ #iff we didn't manage to return from a logged in session, raise an error
+ raise response.raise_for_status()
+
+ #def fetch_logbook_paged(self, per_page:int=50, option:str=None):
+ #
+ # data = {
+ # 'KEY': self.key,
+ # 'ACTION': 'FETCH',
+ # 'OPTION': 'MAX:' + str(per_page) + "," + option
+ # }
+ # # filter down to only used params
+ # response = requests.post(self.base_url, data=data, headers=self.headers)
+ #
+ # raise NotImplementedError
+
+ # def insert_record(self, qso:adif_io.QSO, option:str=None):
+ # """Inserts a single QSO into the logbook corresponding to the\
+ # Client's API Key.
+
+ # Args:
+ # qso (adif_io.QSO): _description_
+ # option (str, optional): _description_. Defaults to None.
+
+ # Raises:
+ # NotImplementedError: _description_
+ # """
+ # data = {
+ # 'KEY': self.key,
+ # 'ACTION': 'INSERT',
+ # 'OPTION': option
+ # }
+ # raise NotImplementedError
+
+ def delete_record(self, list_logids:list) -> dict[str, list[str]]:
+ """Deletes log records from the logbook corresponding to the\
+ Client's API Key.
+
+ Note:
+ This is permenant, and cannot be undone.
+
+ Args:
+ list_logids (list): A list of logid values to delete from the\
+ logbook.
+
+ Raises:
+ HTTPError: An error occurred trying to make a connection.
+
+ Returns:
+ dict[str, list[str]]: A dict containing the returned information\
+ from QRZ. This should include the RESULT, COUNT of records\
+ deleted, and LOGIDs not found, if any.
+ """
+ data = {
+ 'KEY': self.key,
+ 'ACTION': 'DELETE',
+ 'LOGIDS': ','.join(list_logids)
+ }
+ response = requests.post(self.base_url, data=data,
+ headers=self.headers, timeout=self.timeout)
+ if response.status_code == requests.codes.ok:
+ response_dict = parse_qs(urlparse("ws://a.a/?"
+ + html.unescape(response.text))[4],
+ strict_parsing=True)
+ return response_dict
+
+ #iff we didn't manage to return from a logged in session, raise an error
+ raise response.raise_for_status()
+
+ def check_status(self, list_logids:list=None) -> dict[str, list[str]]:
+ """Gets the status of a logbook based on the API Key supplied\
+ to the Client. This status can include information about the logbook\
+ like the owner, logbook name, DXCC count, confirmed QSOs, start and\
+ end date, etc.
+
+ Args:
+ list_logids (list, optional): A list of LOGIDs. Defaults to None.
+
+ Raises:
+ HTTPError: An error occurred trying to make a connection.
+
+ Returns:
+ dict[str, list[str]]: A dict containing the returned status\
+ information from QRZ. Keys correspond to the name given to the\
+ field by QRZ's API, e.g. DXCC count is 'DXCC_COUNT', confirmed\
+ is 'CONFIRMED', etc.
+ """
+ data = {
+ 'KEY': self.key,
+ 'ACTION': 'STATUS',
+ 'LOGIDS': ','.join(list_logids)
+ }
+
+ response = requests.post(self.base_url, data=data,
+ headers=self.headers, timeout=self.timeout)
+ if response.status_code == requests.codes.ok:
+ response_dict = parse_qs(urlparse("ws://a.a/?"
+ + html.unescape(response.text))[4],
+ strict_parsing=True)
+ return response_dict
+
+ #iff we didn't manage to return from a logged in session, raise an error
+ raise response.raise_for_status()
+
### Helpers
- def __stringify(self, adi_log):
- qrz_output = html.unescape(adi_log)
- start_of_log, end_of_log = qrz_output.index('ADIF=') + 5, qrz_output.rindex('\n\n') + 4
- log_adi = "" + qrz_output[start_of_log:end_of_log] #adif_io expects a header, so we're giving it an end of header
+ def __stringify(self, adi_log) -> Logbook:
+ #qrz_output = html.unescape(adi_log)
+ #start_of_log, end_of_log = qrz_output.index('ADIF=') + 5,
+ # qrz_output.rindex('\n\n') + 4
+ log_adi = "" + adi_log #adif_io expects a header, so we're giving it an end of header
return Logbook(self.key, log_adi)
-
+
class QRZXMLClient:
"""A wrapper for the QRZ XML interface.
This functionality requires being logged in and maintaining a session.
"""
- def __init__(self, username:str=None, password:str=None):
+ def __init__(self, username:str=None, password:str=None, agent:str=None,
+ timeout:int=15):
"""Creates a QRZXMLClient object.
+ Todo: Change this to use a session key instead of username/password.
+
Args:
- username (str, optional): username for QRZ user account. Defaults to None.
- password (str, optional): password for QRZ user account. Defaults to None.
+ username (str, optional): username for QRZ user account.\
+ Defaults to None.
+ password (str, optional): password for QRZ user account.\
+ Defaults to None.
+ agent (str, optional): User agent string to use for requests.\
+ This should identify the program responsible for this request,\
+ so QRZ can hunt you down if your program breaks and spams\
+ them. Defaults to None.
+ timeout (int, optional): Time in seconds to wait for a response.\
+ Defaults to 15.
"""
- self.username = username,
- self.password = password,
- self.agent = 'pyQSP/' + __version__
+ self.username = username
+ self.password = password
+ self.agent = agent if agent is not None else 'pyQSP/' + __version__
self.session_key = None
+ self.timeout = timeout
self.base_url = "https://xmldata.qrz.com/xml/1.34/"
self.headers = {
'User-Agent': self.agent,
@@ -100,29 +238,108 @@ def __init__(self, username:str=None, password:str=None):
'Connection': 'keep-alive'
}
- self.__initiate_session()
+ self._initiate_session()
- def __initiate_session(self):
- """Helper -- Grab us a session key so we're not throwing around passwords"""
+ def _initiate_session(self):
+ """Helper -- Grab us a session key so we're not throwing around\
+ passwords"""
params = {'username': self.username,
'password': self.password,
'agent': self.agent}
- response = requests.get(self.base_url, params=params, headers=self.headers)
+ response = requests.get(self.base_url, params=params,
+ headers=self.headers, timeout=self.timeout)
xml_dict = xmltodict.parse(response.text)
key = xml_dict["QRZDatabase"]["Session"].get("Key")
if not key:
raise QRZInvalidSession()
- else:
- self.session_key = key
- def __verify_session(self):
+ self.session_key = key
+
+ def _verify_session(self):
""" Helper -- Verify our session key is still valid."""
params = {'agent': self.agent,
's': self.session_key}
- response = requests.get(self.base_url, params=params, headers=self.headers)
+ response = requests.get(self.base_url, params=params,
+ headers=self.headers, timeout=self.timeout)
if not xmltodict.parse(response.text)["QRZDatabase"]["Session"].get("Key"):
raise QRZInvalidSession()
+ def lookup_callsign(self, callsign:str) -> OrderedDict[str, Any]:
+ """Looks up a callsign in the QRZ database.
+
+ Args:
+ callsign (str): Callsign to lookup.
+
+ Raises:
+ HTTPError: An error occurred trying to make a connection.
+ QRZInvalidSession: An error occurred trying to instantiate a session.
+
+ Returns:
+ OrderedDict[str, Any]: Data on the callsign looked up, organized as
+ a dict with each returned field as a key.
+ """
+ params = {
+ 'agent': self.agent,
+ 's': self.session_key,
+ 'callsign': callsign
+ }
+ num_retries = 0
+ while num_retries < MAX_NUM_RETRIES:
+ response = requests.get(self.base_url, params=params,
+ headers=self.headers, timeout=self.timeout)
+ if response.status_code == requests.codes.ok:
+ parsed_response = xmltodict.parse(response.text)
+ if not parsed_response.get("Key"):
+ self._initiate_session()
+ num_retries += 1
+ else:
+ return parsed_response
+ else:
+ raise response.raise_for_status()
+ #if we didn't manage to return from a logged in session, raise an error
+ raise QRZInvalidSession(**{'message':parsed_response['ERROR']} \
+ if parsed_response.get('ERROR') else {})
+
+ def lookup_dxcc(self, dxcc:str) -> OrderedDict[str, Any]:
+ """Looks up a DXCC by prefix or DXCC number.
+
+ Args:
+ dxcc (str): DXCC or prefix to lookup
+
+ Raises:
+ HTTPError: An error occurred trying to make a connection.
+ QRZInvalidSession: An error occurred trying to instantiate a session.
+
+ Returns:
+ OrderedDict[str, Any]: Data on the callsign looked up, organized as\
+ a dict with each returned field as a key. This data includes\
+ DXCC, CC, name, continent, ituzone, cqzone, timezone, lat,\
+ lon, & notes
+ """
+ #return self.__lookup_dxcc(dxcc, 0)
+ params = {
+ 'agent': self.agent,
+ 's': self.session_key,
+ 'dxcc': dxcc
+ }
+ num_retries = 0
+ while num_retries < MAX_NUM_RETRIES:
+ response = requests.get(self.base_url, params=params,
+ headers=self.headers, timeout=self.timeout)
+ if response.status_code == requests.codes.ok:
+ parsed_response = xmltodict.parse(response.text)
+ if not parsed_response.get("Key"):
+ self._initiate_session()
+ num_retries += 1
+ else:
+ return parsed_response
+ else:
+ raise response.raise_for_status()
+ #if we didn't manage to return from a logged in session, raise an error
+ raise QRZInvalidSession(**{'message':parsed_response['ERROR']} \
+ if parsed_response.get('ERROR') else {})
+
+#endregion
diff --git a/src/qspylib/test_pytest.py b/src/qspylib/test_pytest.py
index 6825bcc..5ba4ad5 100644
--- a/src/qspylib/test_pytest.py
+++ b/src/qspylib/test_pytest.py
@@ -1,27 +1,112 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-"""A PyTest module for confirming functionality works.
+"""A PyTest module for confirming functionality works None of these should be
+called; these are automatically ran by PyTest when pushes are made to the repo,
+or when the user runs `pytest` in the root directory of the project.
"""
+import adif_io
import pytest
-import qspylib.eqsl as eqsl
-import qspylib.lotw as lotw
-import qspylib.qrz as qrz
+import qspylib.logbook
+from qspylib import eqsl
+from qspylib import lotw
+#from qspylib import qrz
+
+#################
+# logbook tests #
+#################
+def test_equality_of_qso():
+ """Tests equality of qspylib.logbook.QSOs functions as expected"""
+ adif_qso = adif_io.QSO({'CALL': 'W1AW', 'BAND': '20m', 'MODE': 'SSB',
+ 'QSO_DATE': '20220101', 'TIME_ON': '0000',
+ 'QSL_RCVD': 'N'})
+ qso1 = qspylib.logbook.QSO('W1AW', '20m', 'SSB', '20220101', '0000', 'N')
+ qso2 = qspylib.logbook.qso_from_adi(adif_qso)
+ assert qso1 == qso2
+
+def test_inequality_of_qso():
+ """Tests inequality of qspylib.logbook.QSOs functions as expected"""
+ adif_qso = adif_io.QSO({'CALL': 'W1AW/4', 'BAND': '20m', 'MODE': 'SSB',
+ 'QSO_DATE': '20220101', 'TIME_ON': '0000',
+ 'QSL_RCVD': 'N'})
+ qso1 = qspylib.logbook.QSO('W1AW', '20m', 'SSB', '20220101', '0000', 'N')
+ qso2 = qspylib.logbook.qso_from_adi(adif_qso)
+ assert qso1 != qso2
+
+def test_generating_a_logbook():
+ """Test generating a qspylib.logbook.Logbook"""
+ adif_string = "a header\
+\
+\
+CA7LLSIGN\
+20M\
+14.20000\
+FT8\
+20240101\
+104500\
+Y\
+20240102\
+"
+ log = qspylib.logbook.Logbook("TE5T", adif_string)
+ assert isinstance(log, qspylib.logbook.Logbook)
+
+def test_logbook_attributes_match():
+ """Confirm that the Logbook's stored QSO matches what we read from text"""
+ adif_string = "a header\
+\
+\
+CA7LLSIGN\
+20M\
+14.20000\
+FT8\
+20240101\
+104500\
+Y\
+20240102\
+"
+ log = qspylib.logbook.Logbook("TE5T", adif_string)
+ assert log.log[0] == qspylib.logbook.qso_from_adi(log.adi[0])
+
+def test_adding_and_removing():
+ """Test adding and removing a QSO"""
+ adif_string = "a header\
+\
+\
+CA7LLSIGN\
+20M\
+14.20000\
+FT8\
+20240101\
+104500\
+Y\
+20240102\
+"
+ log = qspylib.logbook.Logbook("TE5T", adif_string)
+ new_adif_qso = adif_io.QSO({'CALL': 'W1AW/5', 'BAND': '20m', 'MODE': 'SSB',
+ 'QSO_DATE': '20220101', 'TIME_ON': '0000',
+ 'QSL_RCVD': 'N'})
+ log.write_qso(new_adif_qso)
+ log.discard_qso(log.adi[0])
+ assert len(log.log) == 1 and len(log.adi) == 1 and \
+ log.adi[0]['CALL'] == 'W1AW/5' and log.log[0].their_call == 'W1AW/5'
##############
# lotw tests #
##############
def test_pull_a_call_from_last_upload():
+ """Test pulling a known call from the last upload"""
last_uploads = lotw.get_last_upload()
assert 'W1AW' in last_uploads
def test_bad_login_fetch():
+ """Test fetching a logbook with a bad login"""
with pytest.raises(lotw.RetrievalFailure):
lotw_obj = lotw.LOTWClient('**notavalidcall**', '**notarealpassword**')
lotw_obj.fetch_logbook()
def test_bad_login_dxcc():
+ """Test fetching DXCC credits with a bad login"""
with pytest.raises(lotw.RetrievalFailure):
lotw_obj = lotw.LOTWClient('**notavalidcall**', '**notarealpassword**')
lotw_obj.get_dxcc_credit()
@@ -31,34 +116,44 @@ def test_bad_login_dxcc():
###############
def test_verify_a_bad_eqsl():
- is_qsl_real, result = eqsl.verify_eqsl('N5UP', 'TEST', '160m', 'SSB', '01/01/2000')
+ """Test verifying a known bad eqsl"""
+ is_qsl_real, result = eqsl.verify_eqsl('N5UP', 'TEST', '160m', 'SSB', \
+ '01/01/2000')
assert 'Error - Result: QSO not on file' in result and is_qsl_real is False
def test_verify_a_good_eqsl():
- is_qsl_real, result = eqsl.verify_eqsl('ai5zk', 'w1tjl', '10m', 'SSB', '01/20/2024')
+ """Test verifying a known good eqsl"""
+ is_qsl_real, result = eqsl.verify_eqsl('ai5zk', 'w1tjl', '10m', 'SSB', \
+ '01/20/2024')
assert 'Result - QSO on file' in result and is_qsl_real is True
def test_pull_a_known_ag_call():
- callsigns, date = eqsl.get_ag_list()
- assert 'W1AW' in callsigns
+ """Test grabbing a call we know should be in the AG list"""
+ callsigns = eqsl.get_ag_list()
+ assert 'W1AW' in callsigns[0]
def test_pull_a_known_nonag_call():
- callsigns, date = eqsl.get_ag_list()
- assert 'WE3BS' not in callsigns
+ """Test grabbing a call we know shouldn't be in the AG list"""
+ callsigns = eqsl.get_ag_list()
+ assert 'WE3BS' not in callsigns[0]
def test_pull_a_call_from_ag_dated():
- callsigns, date = eqsl.get_ag_list_dated()
- assert callsigns.get('W1AW') >= '0000-00-00'
+ """Test grabbing a call from the AG dated list"""
+ callsigns = eqsl.get_ag_list_dated()
+ assert callsigns[0].get('W1AW') >= '0000-00-00'
def test_pull_a_known_call_from_total_members():
+ """Test grabbing a call that should be in the list of total members"""
all_users = eqsl.get_full_member_list()
assert all_users.get('W1AW')
def test_pull_a_missing_call_from_total_members():
+ """Test grabbing a call that should be missing from the list of total members"""
all_users = eqsl.get_full_member_list()
assert not all_users.get('WE3BS')
def test_get_user_data():
+ """Test getting the data of a user, and verify it's what we expect"""
user = eqsl.get_users_data('W1AW')
assert user[0] == 'FN31pr' and user[1] == 'Y' and not user[2]
@@ -69,4 +164,3 @@ def test_get_user_data():
#def test_qrz_xml_with_invalid_key():
# log_obj = qrz.QRZLogbookAPI('aaaaaaaaaaaaa')
# log = log_obj.fetch_logbook()
-