diff --git a/.circleci/config.yml b/.circleci/config.yml index 850851c..5d756cd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,8 +9,7 @@ test: &test python -V which pip pip -V - pip install -r requirements.txt - python ./test_pagerduty.py + ./test.sh jobs: diff --git a/.gitignore b/.gitignore index b73ce99..98f81cc 100644 --- a/.gitignore +++ b/.gitignore @@ -8,5 +8,4 @@ pagerduty.egg-info/ docs/.buildinfo tmp/ .DS_Store -.tool-versions *.swp diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 0000000..3c35347 --- /dev/null +++ b/.tool-versions @@ -0,0 +1 @@ +python 3.13.2 diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 35611df..cba640e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,3 +1,13 @@ +**2025-04-08: Multi-file refactor - Version 2.0.0** + +This release introduces major structural changes to the module and how it is built and tested. These changes were made for long-term maintainability of the codebase. Previously, it was all contained within a monolithic ``.py`` file (with a single Python script for all unit tests); now it is organized into smaller, appropriately-named Python files. + +Some lesser changes are also included: + +* The docstrings for the ``submit`` and ``send_change_event`` methods of ``EventsApiV2Client`` have been updated to reflect how they are expected to always return ``None``; this was causing Airflow build failures. +* The default user agent header has been updated: the prefix has been changed from ``pagerduty`` to ``python-pagerduty``. +* The version number ``pagerduty.__version__`` is now maintained in ``pyproject.toml`` and discovered through package metadata introspection at import time. In Python versions prior to 3.8, the version will be ``2.*.*`` because the new ``importlib`` feature required for it is unavailable. + **2025-01-02: Migrate from PDPYRAS - Version 1.0.0** * **BREAKING CHANGE:** class names have changed from what they were in ``pdpyras``; see: `PDPYRAS Migration Guide `_ diff --git a/LICENSE b/LICENSE index 0036ff9..fc1af07 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2023 PagerDuty +Copyright (c) 2025 PagerDuty, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/Makefile b/Makefile index fe6d32e..065d3bc 100644 --- a/Makefile +++ b/Makefile @@ -1,18 +1,19 @@ -%: dist +%: build -dist: pagerduty.py setup.py - rm -f dist/* && python setup.py sdist bdist_wheel --universal +build: pagerduty/* pyproject.toml + rm -f dist/* && python3 -m build -docs/index.html: pagerduty.py README.rst CHANGELOG.rst sphinx/source/conf.py sphinx/source/*.rst +docs/index.html: pagerduty/* README.rst CHANGELOG.rst sphinx/source/* rm -fr ./docs && cd sphinx && make html && cd .. && mv sphinx/build/html ./docs && touch ./docs/.nojekyll -docs: docs/index.html +docs: docs/index.html pagerduty/__pycache__ -install: dist - python setup.py install +# Require the module be compiled first so metadata can be used: +pagerduty/__pycache__: + pip install . -testpublish: dist +testpublish: build ./publish-test.sh -publish: dist +publish: build twine upload dist/*.tar.gz dist/*.whl diff --git a/docs/_static/basic.css b/docs/_static/basic.css index cfc60b8..4738b2e 100644 --- a/docs/_static/basic.css +++ b/docs/_static/basic.css @@ -1,12 +1,5 @@ /* - * basic.css - * ~~~~~~~~~ - * * Sphinx stylesheet -- basic theme. - * - * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * */ /* -- main layout ----------------------------------------------------------- */ @@ -115,15 +108,11 @@ img { /* -- search page ----------------------------------------------------------- */ ul.search { - margin: 10px 0 0 20px; - padding: 0; + margin-top: 10px; } ul.search li { - padding: 5px 0 5px 20px; - background-image: url(file.png); - background-repeat: no-repeat; - background-position: 0 7px; + padding: 5px 0; } ul.search li a { @@ -237,6 +226,10 @@ a.headerlink { visibility: hidden; } +a:visited { + color: #551A8B; +} + h1:hover > a.headerlink, h2:hover > a.headerlink, h3:hover > a.headerlink, @@ -748,14 +741,6 @@ abbr, acronym { cursor: help; } -.translated { - background-color: rgba(207, 255, 207, 0.2) -} - -.untranslated { - background-color: rgba(255, 207, 207, 0.2) -} - /* -- code displays --------------------------------------------------------- */ pre { diff --git a/docs/_static/doctools.js b/docs/_static/doctools.js index d06a71d..0398ebb 100644 --- a/docs/_static/doctools.js +++ b/docs/_static/doctools.js @@ -1,12 +1,5 @@ /* - * doctools.js - * ~~~~~~~~~~~ - * * Base JavaScript utilities for all Sphinx HTML documentation. - * - * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * */ "use strict"; diff --git a/docs/_static/documentation_options.js b/docs/_static/documentation_options.js index 995f333..7f51004 100644 --- a/docs/_static/documentation_options.js +++ b/docs/_static/documentation_options.js @@ -1,6 +1,5 @@ -var DOCUMENTATION_OPTIONS = { - URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), - VERSION: '1.0.0', +const DOCUMENTATION_OPTIONS = { + VERSION: '2.0.0', LANGUAGE: 'en', COLLAPSE_INDEX: false, BUILDER: 'html', diff --git a/docs/_static/language_data.js b/docs/_static/language_data.js index 250f566..c7fe6c6 100644 --- a/docs/_static/language_data.js +++ b/docs/_static/language_data.js @@ -1,19 +1,12 @@ /* - * language_data.js - * ~~~~~~~~~~~~~~~~ - * * This script contains the language-specific data used by searchtools.js, * namely the list of stopwords, stemmer, scorer and splitter. - * - * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * */ var stopwords = ["a", "and", "are", "as", "at", "be", "but", "by", "for", "if", "in", "into", "is", "it", "near", "no", "not", "of", "on", "or", "such", "that", "the", "their", "then", "there", "these", "they", "this", "to", "was", "will", "with"]; -/* Non-minified version is copied as a separate JS file, is available */ +/* Non-minified version is copied as a separate JS file, if available */ /** * Porter Stemmer diff --git a/docs/_static/pygments.css b/docs/_static/pygments.css index 691aeb8..5f2b0a2 100644 --- a/docs/_static/pygments.css +++ b/docs/_static/pygments.css @@ -6,25 +6,26 @@ span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: .highlight .hll { background-color: #ffffcc } .highlight { background: #eeffcc; } .highlight .c { color: #408090; font-style: italic } /* Comment */ -.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .err { border: 1px solid #F00 } /* Error */ .highlight .k { color: #007020; font-weight: bold } /* Keyword */ -.highlight .o { color: #666666 } /* Operator */ +.highlight .o { color: #666 } /* Operator */ .highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ .highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ .highlight .cp { color: #007020 } /* Comment.Preproc */ .highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ .highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ -.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .cs { color: #408090; background-color: #FFF0F0 } /* Comment.Special */ .highlight .gd { color: #A00000 } /* Generic.Deleted */ .highlight .ge { font-style: italic } /* Generic.Emph */ -.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #F00 } /* Generic.Error */ .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ .highlight .gi { color: #00A000 } /* Generic.Inserted */ -.highlight .go { color: #333333 } /* Generic.Output */ -.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .go { color: #333 } /* Generic.Output */ +.highlight .gp { color: #C65D09; font-weight: bold } /* Generic.Prompt */ .highlight .gs { font-weight: bold } /* Generic.Strong */ .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ -.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .gt { color: #04D } /* Generic.Traceback */ .highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ .highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ .highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ @@ -32,43 +33,43 @@ span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: .highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ .highlight .kt { color: #902000 } /* Keyword.Type */ .highlight .m { color: #208050 } /* Literal.Number */ -.highlight .s { color: #4070a0 } /* Literal.String */ -.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .s { color: #4070A0 } /* Literal.String */ +.highlight .na { color: #4070A0 } /* Name.Attribute */ .highlight .nb { color: #007020 } /* Name.Builtin */ -.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ -.highlight .no { color: #60add5 } /* Name.Constant */ -.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ -.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .nc { color: #0E84B5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60ADD5 } /* Name.Constant */ +.highlight .nd { color: #555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #D55537; font-weight: bold } /* Name.Entity */ .highlight .ne { color: #007020 } /* Name.Exception */ -.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nf { color: #06287E } /* Name.Function */ .highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ -.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nn { color: #0E84B5; font-weight: bold } /* Name.Namespace */ .highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ -.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .nv { color: #BB60D5 } /* Name.Variable */ .highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ -.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .w { color: #BBB } /* Text.Whitespace */ .highlight .mb { color: #208050 } /* Literal.Number.Bin */ .highlight .mf { color: #208050 } /* Literal.Number.Float */ .highlight .mh { color: #208050 } /* Literal.Number.Hex */ .highlight .mi { color: #208050 } /* Literal.Number.Integer */ .highlight .mo { color: #208050 } /* Literal.Number.Oct */ -.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ -.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ -.highlight .sc { color: #4070a0 } /* Literal.String.Char */ -.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ -.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ -.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ -.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ -.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ -.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ -.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sa { color: #4070A0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070A0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070A0 } /* Literal.String.Char */ +.highlight .dl { color: #4070A0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070A0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070A0 } /* Literal.String.Double */ +.highlight .se { color: #4070A0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070A0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70A0D0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #C65D09 } /* Literal.String.Other */ .highlight .sr { color: #235388 } /* Literal.String.Regex */ -.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .s1 { color: #4070A0 } /* Literal.String.Single */ .highlight .ss { color: #517918 } /* Literal.String.Symbol */ .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ -.highlight .fm { color: #06287e } /* Name.Function.Magic */ -.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ -.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ -.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ -.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .fm { color: #06287E } /* Name.Function.Magic */ +.highlight .vc { color: #BB60D5 } /* Name.Variable.Class */ +.highlight .vg { color: #BB60D5 } /* Name.Variable.Global */ +.highlight .vi { color: #BB60D5 } /* Name.Variable.Instance */ +.highlight .vm { color: #BB60D5 } /* Name.Variable.Magic */ .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/docs/_static/searchtools.js b/docs/_static/searchtools.js index 97d56a7..91f4be5 100644 --- a/docs/_static/searchtools.js +++ b/docs/_static/searchtools.js @@ -1,12 +1,5 @@ /* - * searchtools.js - * ~~~~~~~~~~~~~~~~ - * * Sphinx JavaScript utilities for the full-text search. - * - * :copyright: Copyright 2007-2023 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * */ "use strict"; @@ -20,7 +13,7 @@ if (typeof Scorer === "undefined") { // and returns the new score. /* score: result => { - const [docname, title, anchor, descr, score, filename] = result + const [docname, title, anchor, descr, score, filename, kind] = result return score }, */ @@ -47,6 +40,14 @@ if (typeof Scorer === "undefined") { }; } +// Global search result kind enum, used by themes to style search results. +class SearchResultKind { + static get index() { return "index"; } + static get object() { return "object"; } + static get text() { return "text"; } + static get title() { return "title"; } +} + const _removeChildren = (element) => { while (element && element.lastChild) element.removeChild(element.lastChild); }; @@ -57,16 +58,20 @@ const _removeChildren = (element) => { const _escapeRegExp = (string) => string.replace(/[.*+\-?^${}()|[\]\\]/g, "\\$&"); // $& means the whole matched string -const _displayItem = (item, searchTerms) => { +const _displayItem = (item, searchTerms, highlightTerms) => { const docBuilder = DOCUMENTATION_OPTIONS.BUILDER; - const docUrlRoot = DOCUMENTATION_OPTIONS.URL_ROOT; const docFileSuffix = DOCUMENTATION_OPTIONS.FILE_SUFFIX; const docLinkSuffix = DOCUMENTATION_OPTIONS.LINK_SUFFIX; const showSearchSummary = DOCUMENTATION_OPTIONS.SHOW_SEARCH_SUMMARY; + const contentRoot = document.documentElement.dataset.content_root; - const [docName, title, anchor, descr, score, _filename] = item; + const [docName, title, anchor, descr, score, _filename, kind] = item; let listItem = document.createElement("li"); + // Add a class representing the item's type: + // can be used by a theme's CSS selector for styling + // See SearchResultKind for the class names. + listItem.classList.add(`kind-${kind}`); let requestUrl; let linkUrl; if (docBuilder === "dirhtml") { @@ -75,28 +80,35 @@ const _displayItem = (item, searchTerms) => { if (dirname.match(/\/index\/$/)) dirname = dirname.substring(0, dirname.length - 6); else if (dirname === "index/") dirname = ""; - requestUrl = docUrlRoot + dirname; + requestUrl = contentRoot + dirname; linkUrl = requestUrl; } else { // normal html builders - requestUrl = docUrlRoot + docName + docFileSuffix; + requestUrl = contentRoot + docName + docFileSuffix; linkUrl = docName + docLinkSuffix; } let linkEl = listItem.appendChild(document.createElement("a")); linkEl.href = linkUrl + anchor; linkEl.dataset.score = score; linkEl.innerHTML = title; - if (descr) + if (descr) { listItem.appendChild(document.createElement("span")).innerHTML = " (" + descr + ")"; + // highlight search terms in the description + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); + } else if (showSearchSummary) fetch(requestUrl) .then((responseData) => responseData.text()) .then((data) => { if (data) listItem.appendChild( - Search.makeSearchSummary(data, searchTerms) + Search.makeSearchSummary(data, searchTerms, anchor) ); + // highlight search terms in the summary + if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js + highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted")); }); Search.output.appendChild(listItem); }; @@ -108,27 +120,46 @@ const _finishSearch = (resultCount) => { "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories." ); else - Search.status.innerText = _( - `Search finished, found ${resultCount} page(s) matching the search query.` - ); + Search.status.innerText = Documentation.ngettext( + "Search finished, found one page matching the search query.", + "Search finished, found ${resultCount} pages matching the search query.", + resultCount, + ).replace('${resultCount}', resultCount); }; const _displayNextItem = ( results, resultCount, - searchTerms + searchTerms, + highlightTerms, ) => { // results left, load the summary and display it // this is intended to be dynamic (don't sub resultsCount) if (results.length) { - _displayItem(results.pop(), searchTerms); + _displayItem(results.pop(), searchTerms, highlightTerms); setTimeout( - () => _displayNextItem(results, resultCount, searchTerms), + () => _displayNextItem(results, resultCount, searchTerms, highlightTerms), 5 ); } // search finished, update title and status message else _finishSearch(resultCount); }; +// Helper function used by query() to order search results. +// Each input is an array of [docname, title, anchor, descr, score, filename, kind]. +// Order the results by score (in opposite order of appearance, since the +// `_displayNextItem` function uses pop() to retrieve items) and then alphabetically. +const _orderResultsByScoreThenName = (a, b) => { + const leftScore = a[4]; + const rightScore = b[4]; + if (leftScore === rightScore) { + // same score: sort alphabetically + const leftTitle = a[1].toLowerCase(); + const rightTitle = b[1].toLowerCase(); + if (leftTitle === rightTitle) return 0; + return leftTitle > rightTitle ? -1 : 1; // inverted is intentional + } + return leftScore > rightScore ? 1 : -1; +}; /** * Default splitQuery function. Can be overridden in ``sphinx.search`` with a @@ -152,13 +183,26 @@ const Search = { _queued_query: null, _pulse_status: -1, - htmlToText: (htmlString) => { + htmlToText: (htmlString, anchor) => { const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html'); - htmlElement.querySelectorAll(".headerlink").forEach((el) => { el.remove() }); + for (const removalQuery of [".headerlink", "script", "style"]) { + htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() }); + } + if (anchor) { + const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`); + if (anchorContent) return anchorContent.textContent; + + console.warn( + `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.` + ); + } + + // if anchor not specified or not found, fall back to main content const docContent = htmlElement.querySelector('[role="main"]'); - if (docContent !== undefined) return docContent.textContent; + if (docContent) return docContent.textContent; + console.warn( - "Content block not found. Sphinx search tries to obtain it via '[role=main]'. Could you check your theme or template." + "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template." ); return ""; }, @@ -211,6 +255,7 @@ const Search = { searchSummary.classList.add("search-summary"); searchSummary.innerText = ""; const searchList = document.createElement("ul"); + searchList.setAttribute("role", "list"); searchList.classList.add("search"); const out = document.getElementById("search-results"); @@ -231,16 +276,7 @@ const Search = { else Search.deferQuery(query); }, - /** - * execute search (requires search index to be loaded) - */ - query: (query) => { - const filenames = Search._index.filenames; - const docNames = Search._index.docnames; - const titles = Search._index.titles; - const allTitles = Search._index.alltitles; - const indexEntries = Search._index.indexentries; - + _parseQuery: (query) => { // stem the search terms and add them to the correct list const stemmer = new Stemmer(); const searchTerms = new Set(); @@ -276,22 +312,40 @@ const Search = { // console.info("required: ", [...searchTerms]); // console.info("excluded: ", [...excludedTerms]); - // array of [docname, title, anchor, descr, score, filename] - let results = []; + return [query, searchTerms, excludedTerms, highlightTerms, objectTerms]; + }, + + /** + * execute search (requires search index to be loaded) + */ + _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => { + const filenames = Search._index.filenames; + const docNames = Search._index.docnames; + const titles = Search._index.titles; + const allTitles = Search._index.alltitles; + const indexEntries = Search._index.indexentries; + + // Collect multiple result groups to be sorted separately and then ordered. + // Each is an array of [docname, title, anchor, descr, score, filename, kind]. + const normalResults = []; + const nonMainIndexResults = []; + _removeChildren(document.getElementById("search-progress")); - const queryLower = query.toLowerCase(); + const queryLower = query.toLowerCase().trim(); for (const [title, foundTitles] of Object.entries(allTitles)) { - if (title.toLowerCase().includes(queryLower) && (queryLower.length >= title.length/2)) { + if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) { for (const [file, id] of foundTitles) { - let score = Math.round(100 * queryLower.length / title.length) - results.push([ + const score = Math.round(Scorer.title * queryLower.length / title.length); + const boost = titles[file] === title ? 1 : 0; // add a boost for document titles + normalResults.push([ docNames[file], titles[file] !== title ? `${titles[file]} > ${title}` : title, id !== null ? "#" + id : "", null, - score, + score + boost, filenames[file], + SearchResultKind.title, ]); } } @@ -300,46 +354,48 @@ const Search = { // search for explicit entries in index directives for (const [entry, foundEntries] of Object.entries(indexEntries)) { if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) { - for (const [file, id] of foundEntries) { - let score = Math.round(100 * queryLower.length / entry.length) - results.push([ + for (const [file, id, isMain] of foundEntries) { + const score = Math.round(100 * queryLower.length / entry.length); + const result = [ docNames[file], titles[file], id ? "#" + id : "", null, score, filenames[file], - ]); + SearchResultKind.index, + ]; + if (isMain) { + normalResults.push(result); + } else { + nonMainIndexResults.push(result); + } } } } // lookup as object objectTerms.forEach((term) => - results.push(...Search.performObjectSearch(term, objectTerms)) + normalResults.push(...Search.performObjectSearch(term, objectTerms)) ); // lookup as search terms in fulltext - results.push(...Search.performTermsSearch(searchTerms, excludedTerms)); + normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms)); // let the scorer override scores with a custom scoring function - if (Scorer.score) results.forEach((item) => (item[4] = Scorer.score(item))); - - // now sort the results by score (in opposite order of appearance, since the - // display function below uses pop() to retrieve items) and then - // alphabetically - results.sort((a, b) => { - const leftScore = a[4]; - const rightScore = b[4]; - if (leftScore === rightScore) { - // same score: sort alphabetically - const leftTitle = a[1].toLowerCase(); - const rightTitle = b[1].toLowerCase(); - if (leftTitle === rightTitle) return 0; - return leftTitle > rightTitle ? -1 : 1; // inverted is intentional - } - return leftScore > rightScore ? 1 : -1; - }); + if (Scorer.score) { + normalResults.forEach((item) => (item[4] = Scorer.score(item))); + nonMainIndexResults.forEach((item) => (item[4] = Scorer.score(item))); + } + + // Sort each group of results by score and then alphabetically by name. + normalResults.sort(_orderResultsByScoreThenName); + nonMainIndexResults.sort(_orderResultsByScoreThenName); + + // Combine the result groups in (reverse) order. + // Non-main index entries are typically arbitrary cross-references, + // so display them after other results. + let results = [...nonMainIndexResults, ...normalResults]; // remove duplicate search results // note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept @@ -353,14 +409,19 @@ const Search = { return acc; }, []); - results = results.reverse(); + return results.reverse(); + }, + + query: (query) => { + const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query); + const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms); // for debugging //Search.lastresults = results.slice(); // a copy // console.info("search results:", Search.lastresults); // print the results - _displayNextItem(results, results.length, searchTerms); + _displayNextItem(results, results.length, searchTerms, highlightTerms); }, /** @@ -424,6 +485,7 @@ const Search = { descr, score, filenames[match[0]], + SearchResultKind.object, ]); }; Object.keys(objects).forEach((prefix) => @@ -451,21 +513,27 @@ const Search = { // perform the search on the required terms searchTerms.forEach((word) => { const files = []; + // find documents, if any, containing the query word in their text/title term indices + // use Object.hasOwnProperty to avoid mismatching against prototype properties const arr = [ - { files: terms[word], score: Scorer.term }, - { files: titleTerms[word], score: Scorer.title }, + { files: terms.hasOwnProperty(word) ? terms[word] : undefined, score: Scorer.term }, + { files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined, score: Scorer.title }, ]; // add support for partial matches if (word.length > 2) { const escapedWord = _escapeRegExp(word); - Object.keys(terms).forEach((term) => { - if (term.match(escapedWord) && !terms[word]) - arr.push({ files: terms[term], score: Scorer.partialTerm }); - }); - Object.keys(titleTerms).forEach((term) => { - if (term.match(escapedWord) && !titleTerms[word]) - arr.push({ files: titleTerms[word], score: Scorer.partialTitle }); - }); + if (!terms.hasOwnProperty(word)) { + Object.keys(terms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: terms[term], score: Scorer.partialTerm }); + }); + } + if (!titleTerms.hasOwnProperty(word)) { + Object.keys(titleTerms).forEach((term) => { + if (term.match(escapedWord)) + arr.push({ files: titleTerms[term], score: Scorer.partialTitle }); + }); + } } // no match but word was a required one @@ -481,16 +549,16 @@ const Search = { // set score for the word in each file recordFiles.forEach((file) => { - if (!scoreMap.has(file)) scoreMap.set(file, {}); - scoreMap.get(file)[word] = record.score; + if (!scoreMap.has(file)) scoreMap.set(file, new Map()); + const fileScores = scoreMap.get(file); + fileScores.set(word, record.score); }); }); // create the mapping files.forEach((file) => { - if (fileMap.has(file) && fileMap.get(file).indexOf(word) === -1) - fileMap.get(file).push(word); - else fileMap.set(file, [word]); + if (!fileMap.has(file)) fileMap.set(file, [word]); + else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word); }); }); @@ -522,7 +590,7 @@ const Search = { break; // select one (max) score for the file. - const score = Math.max(...wordList.map((w) => scoreMap.get(file)[w])); + const score = Math.max(...wordList.map((w) => scoreMap.get(file).get(w))); // add result to the result list results.push([ docNames[file], @@ -531,6 +599,7 @@ const Search = { null, score, filenames[file], + SearchResultKind.text, ]); } return results; @@ -541,8 +610,8 @@ const Search = { * search summary for a given text. keywords is a list * of stemmed words. */ - makeSearchSummary: (htmlText, keywords) => { - const text = Search.htmlToText(htmlText); + makeSearchSummary: (htmlText, keywords, anchor) => { + const text = Search.htmlToText(htmlText, anchor); if (text === "") return null; const textLower = text.toLowerCase(); diff --git a/docs/_static/sphinx_highlight.js b/docs/_static/sphinx_highlight.js index aae669d..8a96c69 100644 --- a/docs/_static/sphinx_highlight.js +++ b/docs/_static/sphinx_highlight.js @@ -29,14 +29,19 @@ const _highlight = (node, addItems, text, className) => { } span.appendChild(document.createTextNode(val.substr(pos, text.length))); + const rest = document.createTextNode(val.substr(pos + text.length)); parent.insertBefore( span, parent.insertBefore( - document.createTextNode(val.substr(pos + text.length)), + rest, node.nextSibling ) ); node.nodeValue = val.substr(0, pos); + /* There may be more occurrences of search term in this node. So call this + * function recursively on the remaining fragment. + */ + _highlight(rest, addItems, text, className); if (isInSVG) { const rect = document.createElementNS( @@ -140,5 +145,10 @@ const SphinxHighlight = { }, }; -_ready(SphinxHighlight.highlightSearchWords); -_ready(SphinxHighlight.initEscapeListener); +_ready(() => { + /* Do not call highlightSearchWords() when we are on the search page. + * It will highlight words from the *previous* search query. + */ + if (typeof Search === "undefined") SphinxHighlight.highlightSearchWords(); + SphinxHighlight.initEscapeListener(); +}); diff --git a/docs/changelog.html b/docs/changelog.html index 46512c2..ec6e2fe 100644 --- a/docs/changelog.html +++ b/docs/changelog.html @@ -1,21 +1,21 @@ - + - + - Changelog — python-pagerduty 1.0.0 documentation - + Changelog — python-pagerduty 2.0.0 documentation + - - - + + + @@ -75,7 +75,15 @@
-

Changelog

+

Changelog

+

2025-04-08: Multi-file refactor - Version 2.0.0

+

This release introduces major structural changes to the module and how it is built and tested. These changes were made for long-term maintainability of the codebase. Previously, it was all contained within a monolithic .py file (with a single Python script for all unit tests); now it is organized into smaller, appropriately-named Python files.

+

Some lesser changes are also included:

+
    +
  • The docstrings for the submit and send_change_event methods of EventsApiV2Client have been updated to reflect how they are expected to always return None; this was causing Airflow build failures.

  • +
  • The default user agent header has been updated: the prefix has been changed from pagerduty to python-pagerduty.

  • +
  • The version number pagerduty.__version__ is now maintained in pyproject.toml and discovered through package metadata introspection at import time. In Python versions prior to 3.8, the version will be 2.*.* because the new importlib feature required for it is unavailable.

  • +

2025-01-02: Migrate from PDPYRAS - Version 1.0.0

Built with Sphinx using a diff --git a/docs/contributing.html b/docs/contributing.html index df32b9b..07f8ecf 100644 --- a/docs/contributing.html +++ b/docs/contributing.html @@ -1,21 +1,21 @@ - + - + - Contribution Guide — python-pagerduty 1.0.0 documentation - + Contribution Guide — python-pagerduty 2.0.0 documentation + - - - + + + @@ -48,7 +48,6 @@
  • Module Reference
  • Contribution Guide @@ -156,7 +156,7 @@

    E

    @@ -242,13 +242,17 @@

    M

  • max_network_attempts (pagerduty.ApiClient attribute)
  • - - @@ -277,19 +281,40 @@

    P

    pagerduty +
  • + pagerduty.api_client + +
  • +
  • + pagerduty.common + +
  • +
  • + pagerduty.rest_api_v2_client + +
  • parent (pagerduty.ApiClient attribute)
  • + + -
    -

    Default Behavior

    +

    Default Behavior

    By default, after receiving a status 429 response, clients will retry an unlimited number of times, increasing the wait time before retry each successive time. When encountering status 401 Unauthorized, the client @@ -756,7 +756,7 @@

    Default Behaviorrequests.Response object.

    -

    Exponential Cooldown

    +

    Exponential Cooldown

    After each unsuccessful attempt, the client will sleep for a short period that increases exponentially with each retry.

    Let:

    @@ -773,7 +773,7 @@

    Exponential Cooldowntn = a (1 + ρ rn) tn-1

    -

    Configuring Retry Behavior

    +

    Configuring Retry Behavior

    The dictionary property pagerduty.ApiClient.retry allows customization of HTTP retry limits on a per-HTTP-status basis. This includes the ability to override the above defaults for 401 and 429, although that is not recommended.

    @@ -824,7 +824,7 @@

    Configuring Retry Behavior -

    © Copyright 2025, PagerDuty Inc..

    +

    © Copyright 2025 PagerDuty, Inc.

    Built with
    Sphinx using a diff --git a/pagerduty/__init__.py b/pagerduty/__init__.py new file mode 100644 index 0000000..dcdb484 --- /dev/null +++ b/pagerduty/__init__.py @@ -0,0 +1,46 @@ +from . version import __version__ + +from . api_client import ( + ApiClient, + TIMEOUT, + last_4, + normalize_url +) + +from . events_api_v2_client import EventsApiV2Client + +from . rest_api_v2_client import ( + CANONICAL_PATHS, + CURSOR_BASED_PAGINATION_PATHS, + ENTITY_WRAPPER_CONFIG, + ITERATION_LIMIT, + RestApiV2Client, + auto_json, + canonical_path, + endpoint_matches, + entity_wrappers, + infer_entity_wrapper, + is_path_param, + resource_url, + unwrap, + wrapped_entities +) + +from . common import ( + TEXT_LEN_LIMIT, + deprecated_kwarg, + http_error_message, + plural_name, + requires_success, + singular_name, + successful_response, + truncate_text, + try_decoding +) + +from . errors import ( + Error, + HttpError, + ServerHttpError, + UrlError +) diff --git a/pagerduty/api_client.py b/pagerduty/api_client.py new file mode 100644 index 0000000..ae9865f --- /dev/null +++ b/pagerduty/api_client.py @@ -0,0 +1,458 @@ +# Core +import logging +import sys +import time + +from copy import deepcopy +from random import random + +# PyPI +from requests import Response, Session +from requests import __version__ as REQUESTS_VERSION +from requests.exceptions import RequestException +from urllib3.exceptions import PoolError +from urllib3.exceptions import HTTPError as Urllib3HttpError + +# Local +from . version import __version__ +from . errors import ( + Error, + HttpError, + UrlError +) + +######################## +### DEFAULT SETTINGS ### +######################## + +TIMEOUT = 60 +""" +The default timeout in seconds for any given HTTP request. + +Modifying this value will not affect any preexisting API session instances. +Rather, it will only affect new instances. It is recommended to use +:attr:`ApiClient.timeout` to configure the timeout for a given session. +""" + +############### +### HELPERS ### +############### + +def last_4(secret: str) -> str: + """ + Truncate a sensitive value to its last 4 characters + + :param secret: text to truncate + :returns: + The truncated text + """ + return '*'+str(secret)[-4:] + +def normalize_url(base_url: str, url: str) -> str: + """ + Normalize a URL or path to be a complete API URL before query parameters. + + The ``url`` argument may be a path relative to the base URL or a full URL. + + :param url: + The URL or path to normalize to a full URL. + :param base_url: + The base API URL, excluding any trailing slash, i.e. + "https://api.pagerduty.com" + :returns: + The full API URL. + """ + if url.startswith(base_url): + return url + elif not (url.startswith('http://') or url.startswith('https://')): + return base_url.rstrip('/') + "/" + url.lstrip('/') + else: + raise UrlError( + f"URL {url} does not start with the API base URL {base_url}" + ) + +#################### +### CLIENT CLASS ### +#################### + +class ApiClient(Session): + """ + Base class for making HTTP requests to PagerDuty APIs + + This is an opinionated wrapper of `requests.Session`_, with a few additional + features: + + - The client will reattempt the request with auto-increasing cooldown/retry + intervals, with attempt limits configurable through the :attr:`retry` + attribute. + - When making requests, headers specified ad-hoc in calls to HTTP verb + functions will not replace, but will be merged into, default headers. + - The request URL, if it doesn't already start with the REST API base URL, + will be prepended with the default REST API base URL. + - It will only perform requests with methods as given in the + :attr:`permitted_methods` list, and will raise :class:`Error` for + any other HTTP methods. + + :param api_key: + REST API access token to use for HTTP requests + :param debug: + Sets :attr:`print_debug`. Set to True to enable verbose command line + output. + :type token: str + :type debug: bool + """ + + log = None + """ + A ``logging.Logger`` object for logging messages. By default it is + configured without any handlers and so no messages will be emitted. See: + `Logger Objects + `_. + """ + + max_http_attempts = 10 + """ + The number of times that the client will retry after error statuses, for any + that are defined greater than zero in :attr:`retry`. + """ + + max_network_attempts = 3 + """ + The number of times that connecting to the API will be attempted before + treating the failure as non-transient; a :class:`Error` exception + will be raised if this happens. + """ + + parent = None + """The ``super`` object (`requests.Session`_)""" + + permitted_methods = () + """ + A tuple of the methods permitted by the API which the client implements. + + For instance: + + * The REST API accepts GET, POST, PUT and DELETE. + * The Events API and Change Events APIs only accept POST. + """ + + retry = {} + """ + A dict defining the retry behavior for each HTTP response status code. + + Each key in this dictionary is an int representing a HTTP response code. The + behavior is specified by the int value at each key as follows: + + * ``-1`` to retry without limit. + * ``0`` has no effect; the default behavior will take effect. + * ``n``, where ``n > 0``, to retry ``n`` times (or up + to :attr:`max_http_attempts` total for all statuses, whichever is + encountered first), and then return the final response. + + The default behavior is to retry without limit on status 429, raise an + exception on a 401, and return the `requests.Response`_ object in any other case + (assuming a HTTP response was received from the server). + """ + + sleep_timer = 1.5 + """ + Default initial cooldown time factor for rate limiting and network errors. + + Each time that the request makes a followup request, there will be a delay + in seconds equal to this number times :attr:`sleep_timer_base` to the power + of how many attempts have already been made so far, unless + :attr:`stagger_cooldown` is nonzero. + """ + + sleep_timer_base = 2 + """ + After each retry, the time to sleep before reattempting the API connection + and request will increase by a factor of this amount. + """ + + timeout = TIMEOUT + """ + This is the value sent to `Requests`_ as the ``timeout`` parameter that + determines the TCP read timeout. + """ + + url = "" + + def __init__(self, api_key: str, debug=False): + self.parent = super(ApiClient, self) + self.parent.__init__() + self.api_key = api_key + self.log = logging.getLogger(__name__) + self.print_debug = debug + self.retry = {} + + def after_set_api_key(self): + """ + Setter hook for setting or updating the API key. + + Child classes should implement this to perform additional steps. + """ + pass + + @property + def api_key(self) -> str: + """ + Property representing the credential used for accessing the given API. + """ + return self._api_key + + @api_key.setter + def api_key(self, api_key): + if not (isinstance(api_key, str) and api_key): + raise ValueError("API credential must be a non-empty string.") + self._api_key = api_key + self.headers.update(self.auth_header) + self.after_set_api_key() + + @property + def auth_header(self) -> dict: + """ + Generates the header with the API credential used for authentication. + """ + raise NotImplementedError + + def cooldown_factor(self) -> float: + return self.sleep_timer_base*(1+self.stagger_cooldown*random()) + + def normalize_params(self, params) -> dict: + """ + Modify the user-supplied parameters to ease implementation + + Current behavior: + + * If a parameter's value is of type list, and the parameter name does + not already end in "[]", then the square brackets are appended to keep + in line with the requirement that all set filters' parameter names end + in "[]". + + :returns: + The query parameters after modification + """ + updated_params = {} + for param, value in params.items(): + if type(value) is list and not param.endswith('[]'): + updated_params[param+'[]'] = value + else: + updated_params[param] = value + return updated_params + + def normalize_url(self, url) -> str: + """Compose the URL whether it is a path or an already-complete URL""" + return normalize_url(self.url, url) + + def postprocess(self, response): + """ + Perform supplemental actions immediately after receiving a response. + + This method is called once per request not including retries, and can be + extended in child classes. + """ + pass + + def prepare_headers(self, method, user_headers={}) -> dict: + """ + Append special additional per-request headers. + + :param method: + The HTTP method, in upper case. + :param user_headers: + Headers that can be specified to override default values. + :returns: + The final list of headers to use in the request + """ + headers = deepcopy(self.headers) + if user_headers: + headers.update(user_headers) + return headers + + @property + def print_debug(self) -> bool: + """ + Printing debug flag + + If set to True, the logging level of :attr:`log` is set to + ``logging.DEBUG`` and all log messages are emitted to ``sys.stderr``. + If set to False, the logging level of :attr:`log` is set to + ``logging.NOTSET`` and the debugging log handler that prints messages to + ``sys.stderr`` is removed. This value thus can be toggled to enable and + disable verbose command line output. + + It is ``False`` by default and it is recommended to keep it that way in + production settings. + """ + return self._debug + + @print_debug.setter + def print_debug(self, debug: bool): + self._debug = debug + if debug and not hasattr(self, '_debugHandler'): + self.log.setLevel(logging.DEBUG) + self._debugHandler = logging.StreamHandler() + self.log.addHandler(self._debugHandler) + elif not debug and hasattr(self, '_debugHandler'): + self.log.setLevel(logging.NOTSET) + self.log.removeHandler(self._debugHandler) + delattr(self, '_debugHandler') + # else: no-op; only happens if debug is set to the same value twice + + def request(self, method, url, **kwargs) -> Response: + """ + Make a generic PagerDuty API request. + + :param method: + The request method to use. Case-insensitive. May be one of get, put, + post or delete. + :param url: + The path/URL to request. If it does not start with the base URL, the + base URL will be prepended. + :param **kwargs: + Custom keyword arguments to pass to ``requests.Session.request``. + :type method: str + :type url: str + :returns: + The `requests.Response`_ object corresponding to the HTTP response + """ + sleep_timer = self.sleep_timer + network_attempts = 0 + http_attempts = {} + method = method.strip().upper() + if method not in self.permitted_methods: + m_str = ', '.join(self.permitted_methods) + raise Error(f"Method {method} not supported by this API. " \ + f"Permitted methods: {m_str}") + req_kw = deepcopy(kwargs) + full_url = self.normalize_url(url) + endpoint = "%s %s"%(method.upper(), full_url) + + # Add in any headers specified in keyword arguments: + headers = kwargs.get('headers', {}) + req_kw.update({ + 'headers': self.prepare_headers(method, user_headers=headers), + 'stream': False, + 'timeout': self.timeout + }) + + # Special changes to user-supplied parameters, for convenience + if 'params' in kwargs and kwargs['params']: + req_kw['params'] = self.normalize_params(kwargs['params']) + + # Make the request (and repeat w/cooldown if the rate limit is reached): + while True: + try: + response = self.parent.request(method, full_url, **req_kw) + self.postprocess(response) + except (Urllib3HttpError, PoolError, RequestException) as e: + network_attempts += 1 + if network_attempts > self.max_network_attempts: + error_msg = f"{endpoint}: Non-transient network " \ + 'error; exceeded maximum number of attempts ' \ + f"({self.max_network_attempts}) to connect to the API." + raise Error(error_msg) from e + sleep_timer *= self.cooldown_factor() + self.log.warning( + "%s: HTTP or network error: %s. retrying in %g seconds.", + endpoint, e.__class__.__name__, sleep_timer) + time.sleep(sleep_timer) + continue + + status = response.status_code + retry_logic = self.retry.get(status, 0) + if not response.ok and retry_logic != 0: + # Take special action as defined by the retry logic + if retry_logic != -1: + # Retry a specific number of times (-1 implies infinite) + if http_attempts.get(status, 0)>=retry_logic or \ + sum(http_attempts.values())>self.max_http_attempts: + lower_limit = retry_logic + if lower_limit > self.max_http_attempts: + lower_limit = self.max_http_attempts + self.log.error( + f"%s: Non-transient HTTP error: exceeded " \ + 'maximum number of attempts (%d) to make a ' \ + 'successful request. Currently encountering ' \ + 'status %d.', endpoint, lower_limit, status) + return response + http_attempts[status] = 1 + http_attempts.get(status, 0) + sleep_timer *= self.cooldown_factor() + self.log.warning("%s: HTTP error (%d); retrying in %g seconds.", + endpoint, status, sleep_timer) + time.sleep(sleep_timer) + continue + elif status == 429: + sleep_timer *= self.cooldown_factor() + self.log.debug("%s: Hit API rate limit (status 429); " \ + "retrying in %g seconds", endpoint, sleep_timer) + time.sleep(sleep_timer) + continue + elif status == 401: + # Stop. Authentication failed. We shouldn't try doing any more, + # because we'll run into the same problem later anyway. + raise HttpError( + "Received 401 Unauthorized response from the API. The key " + "(...%s) may be invalid or deactivated."%self.trunc_key, + response) + else: + # All went according to plan. + return response + + @property + def stagger_cooldown(self) -> float: + """ + Randomizing factor for wait times between retries during rate limiting. + + If set to number greater than 0, the sleep time for rate limiting will + (for each successive sleep) be adjusted by a factor of one plus a + uniformly-distributed random number between 0 and 1 times this number, + on top of the base sleep timer :attr:`sleep_timer_base`. + + For example: + + * If this is 1, and :attr:`sleep_timer_base` is 2 (default), then after + each status 429 response, the sleep time will change overall by a + random factor between 2 and 4, whereas if it is zero, it will change + by a factor of 2. + * If :attr:`sleep_timer_base` is 1, then the cooldown time will be + adjusted by a random factor between one and one plus this number. + + If the number is set to zero, then this behavior is effectively + disabled, and the cooldown factor (by which the sleep time is adjusted) + will just be :attr:`sleep_timer_base` + + Setting this to a nonzero number helps avoid the "thundering herd" + effect that can potentially be caused by many API clients making + simultaneous concurrent API requests and consequently waiting for the + same amount of time before retrying. It is currently zero by default + for consistent behavior with previous versions. + """ + if hasattr(self, '_stagger_cooldown'): + return self._stagger_cooldown + else: + return 0 + + @stagger_cooldown.setter + def stagger_cooldown(self, val): + if type(val) not in [float, int] or val<0: + raise ValueError("Cooldown randomization factor stagger_cooldown " + "must be a positive real number") + self._stagger_cooldown = val + + @property + def trunc_key(self) -> str: + """Truncated key for secure display/identification purposes.""" + return last_4(self.api_key) + + @property + def user_agent(self) -> str: + return 'python-pagerduty/%s python-requests/%s Python/%d.%d'%( + __version__, + REQUESTS_VERSION, + sys.version_info.major, + sys.version_info.minor + ) + + diff --git a/pagerduty/common.py b/pagerduty/common.py new file mode 100644 index 0000000..e22d46f --- /dev/null +++ b/pagerduty/common.py @@ -0,0 +1,171 @@ +# Core +from typing import Union +from warnings import warn + +# PyPI +from requests import Response + +from . errors import ( + Error, + HttpError, + ServerHttpError, + UrlError +) + +######################## +### DEFAULT SETTINGS ### +######################## + +TEXT_LEN_LIMIT = 100 +""" +The longest permissible length of API content to include in error messages. +""" + +######################## +### HELPER FUNCTIONS ### +######################## + +def deprecated_kwarg(deprecated_name: str, details=None, method=None): + """ + Raises a warning if a deprecated keyword argument is used. + + :param deprecated_name: The name of the deprecated function + :param details: An optional message to append to the deprecation message + :param method: An optional method name + """ + details_msg = '' + method_msg = '' + if method is not None: + method_msg = f" of {method}" + if details is not None: + details_msg = f" {details}" + warn( + f"Keyword argument \"{deprecated_name}\"{method_msg} is deprecated."+details_msg + ) + +def http_error_message(r: Response, context=None) -> str: + """ + Formats a message describing a HTTP error. + + :param r: + The response object. + :param context: + A description of when the error was received, or None to not include it + :returns: + The message to include in the HTTP error + """ + received_http_response = bool(r.status_code) + endpoint = "%s %s"%(r.request.method.upper(), r.request.url) + context_msg = "" + if type(context) is str: + context_msg=f" in {context}" + if received_http_response and not r.ok: + err_type = 'unknown' + if r.status_code / 100 == 4: + err_type = 'client' + elif r.status_code / 100 == 5: + err_type = 'server' + tr_bod = truncate_text(r.text) + return f"{endpoint}: API responded with {err_type} error (status " \ + f"{r.status_code}){context_msg}: {tr_bod}" + elif not received_http_response: + return f"{endpoint}: Network or other unknown error{context_msg}" + else: + return f"{endpoint}: Success (status {r.status_code}) but an " \ + f"expectation still failed{context_msg}" + +def plural_name(obj_type: str) -> str: + """ + Pluralizes a name, i.e. the API name from the ``type`` property + + :param obj_type: + The object type, i.e. ``user`` or ``user_reference`` + :returns: + The name of the resource, i.e. the last part of the URL for the + resource's index URL + """ + if obj_type.endswith('_reference'): + # Strip down to basic type if it's a reference + obj_type = obj_type[:obj_type.index('_reference')] + if obj_type.endswith('y'): + # Because English + return obj_type[:-1]+'ies' + else: + return obj_type+'s' + +def requires_success(method): + """ + Decorator that validates HTTP responses. + """ + doc = method.__doc__ + def call(self, url, **kw): + return successful_response(method(self, url, **kw)) + call.__doc__ = doc + return call + +def singular_name(r_name: str) -> str: + """ + Singularizes a name, i.e. for the entity wrapper in a POST request + + :para r_name: + The "resource" name, i.e. "escalation_policies", a plural noun that + forms the part of the canonical path identifying what kind of resource + lives in the collection there, for an API that follows classic wrapped + entity naming patterns. + :returns: + The singularized name + """ + if r_name.endswith('ies'): + # Because English + return r_name[:-3]+'y' + else: + return r_name.rstrip('s') + +def successful_response(r: Response, context=None) -> Response: + """Validates the response as successful. + + Returns the response if it was successful; otherwise, raises an exception. + + :param r: + Response object corresponding to the response received. + :param context: + A description of when the HTTP request is happening, for error reporting + :returns: + The response object, if it was successful + """ + if r.ok and bool(r.status_code): + return r + elif r.status_code / 100 == 5: + raise ServerHttpError(http_error_message(r, context=context), r) + elif bool(r.status_code): + raise HttpError(http_error_message(r, context=context), r) + else: + raise Error(http_error_message(r, context=context)) + +def truncate_text(text: str) -> str: + """Truncates a string longer than :attr:`pagerduty.common.TEXT_LEN_LIMIT` + + :param text: The string to truncate if longer than the limit. + """ + if len(text) > TEXT_LEN_LIMIT: + return text[:TEXT_LEN_LIMIT-1]+'...' + else: + return text + +def try_decoding(r: Response) -> Union[dict, list, str]: + """ + JSON-decode a response body + + Returns the decoded body if successful; raises :class:`ServerHttpError` + otherwise. + + :param r: + The response object + """ + try: + return r.json() + except ValueError as e: + raise ServerHttpError( + "API responded with invalid JSON: " + truncate_text(r.text), + r, + ) diff --git a/pagerduty/errors.py b/pagerduty/errors.py new file mode 100644 index 0000000..4aec7bb --- /dev/null +++ b/pagerduty/errors.py @@ -0,0 +1,79 @@ +from requests import Response + +################## +### EXCEPTIONS ### +################## + +class UrlError(Exception): + """ + Exception class for unsupported URLs or malformed input. + """ + pass + +class Error(Exception): + """ + General API errors base class. + + Note, the name of this class does not imply it solely includes errors + experienced by the client or HTTP status 4xx responses, but descendants can + include issues with the API backend. + """ + + response = None + """ + The HTTP response object, if a response was successfully received. + + In the case of network errors, this property will be None. + """ + + def __init__(self, message, response=None): + self.msg = message + self.response = response + super(Error, self).__init__(message) + +class HttpError(Error): + """ + Error class representing errors strictly associated with HTTP responses. + + This class was created to make it easier to more cleanly handle errors by + way of a class that is guaranteed to have its ``response`` be a valid + `requests.Response`_ object. + + Whereas, the more generic :class:`Error` could also be used + to denote such things as non-transient network errors wherein no response + was received from the API. + For instance, instead of this: + + :: + + try: + user = session.rget('/users/PABC123') + except pagerduty.Error as e: + if e.response is not None: + print("HTTP error: "+str(e.response.status_code)) + else: + raise e + + one could write this: + + :: + + try: + user = session.rget('/users/PABC123') + except pagerduty.HttpError as e: + print("HTTP error: "+str(e.response.status_code)) + """ + + def __init__(self, message, response: Response): + super(HttpError, self).__init__(message, response=response) + +class ServerHttpError(HttpError): + """ + Error class representing failed expectations made of the server. + + This is raised in cases where the response schema differs from the expected schema + because of an API bug, or because it's an early access endpoint and changes before + GA, or in cases of HTTP status 5xx where a successful response is required. + """ + pass + diff --git a/pagerduty/events_api_v2_client.py b/pagerduty/events_api_v2_client.py new file mode 100644 index 0000000..c91ecd9 --- /dev/null +++ b/pagerduty/events_api_v2_client.py @@ -0,0 +1,277 @@ +# Core +from copy import deepcopy +from datetime import datetime + +# PyPI +from requests import Response + +# Local +from . api_client import ApiClient +from . common import ( + deprecated_kwarg, + successful_response, + try_decoding, + truncate_text +) + +class EventsApiV2Client(ApiClient): + + """ + Session class for submitting events to the PagerDuty v2 Events API. + + Implements methods for submitting events to PagerDuty through the Events API, + including change events, and inherits from :class:`pagerduty.ApiClient`. For more + details on usage of this API, refer to the `Events API v2 documentation + `_ + """ + + permitted_methods = ('POST',) + + url = "https://events.pagerduty.com" + + def __init__(self, api_key: str, debug=False): + super(EventsApiV2Client, self).__init__(api_key, debug) + # See: https://developer.pagerduty.com/docs/3d063fd4814a6-events-api-v2-overview#response-codes--retry-logic + self.retry[500] = 2 # internal server error + self.retry[502] = 4 # bad gateway + self.retry[503] = 6 # service unavailable + self.retry[504] = 6 # gateway timeout + + @property + def auth_header(self) -> dict: + return {} + + def acknowledge(self, dedup_key) -> str: + """ + Acknowledge an alert via Events API. + + :param dedup_key: + The deduplication key of the alert to set to the acknowledged state. + :returns: + The deduplication key + """ + return self.send_event('acknowledge', dedup_key=dedup_key) + + @property + def event_timestamp(self) -> str: + return datetime.utcnow().isoformat()+'Z' + + def post(self, *args, **kw) -> Response: + """ + Override of ``requests.Session.post`` + + Adds the ``routing_key`` parameter to the body before sending. + """ + if 'json' in kw and hasattr(kw['json'], 'update'): + kw['json'].update({'routing_key': self.api_key}) + return super(EventsApiV2Client, self).post(*args, **kw) + + def prepare_headers(self, method, user_headers=None) -> dict: + """ + Add user agent and content type headers for Events API requests. + + :param user_headers: User-supplied headers that will override defaults + :returns: + The final list of headers to use in the request + """ + headers = {} + headers.update(self.headers) + headers.update({ + 'Content-Type': 'application/json', + 'User-Agent': self.user_agent, + }) + if user_headers is not None: + headers.update(user_headers) + return headers + + def resolve(self, dedup_key) -> str: + """ + Resolve an alert via Events API. + + :param dedup_key: + The deduplication key of the alert to resolve. + """ + return self.send_event('resolve', dedup_key=dedup_key) + + def send_change_event(self, payload=None, links=None, routing_key=None): + """ + Send a change event to the v2 Change Events API. + + See: https://developer.pagerduty.com/docs/events-api-v2/send-change-events/ + + :param payload: + A dictionary object with keys ``summary``, ``source``, ``timestamp`` and + ``custom_details`` as described in the above documentation. + :param links: + A list of dictionary objects each with keys ``href`` and ``text`` + representing the target and display text of each link + :param routing_key: + (Deprecated) the routing key. The parameter is set automatically to the + :attr:`ApiClient.api_key` property in the final payload and this argument is ignored. + """ + if payload is None: + payload = {} + if links is None: + links = [] + if routing_key is not None: + deprecated_kwarg( + 'routing_key', + method='EventsApiV2Client.send_change_event' + ) + event = {'payload': deepcopy(payload)} + if links: + event['links'] = deepcopy(links) + successful_response( + self.post('/v2/change/enqueue', json=event), + context="submitting change event", + ) + + def send_event(self, action, dedup_key=None, **properties) -> str: + """ + Send an event to the v2 Events API. + + See: https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 + + :param action: + The action to perform through the Events API: trigger, acknowledge + or resolve. + :param dedup_key: + The deduplication key; used for determining event uniqueness and + associating actions with existing incidents. + :param **properties: + Additional properties to set, i.e. if ``action`` is ``trigger`` + this would include ``payload``. + :type action: str + :type dedup_key: str + :returns: + The deduplication key of the incident + """ + + actions = ('trigger', 'acknowledge', 'resolve') + if action not in actions: + raise ValueError("Event action must be one of: "+', '.join(actions)) + + event = {'event_action':action} + + event.update(properties) + if isinstance(dedup_key, str): + event['dedup_key'] = dedup_key + elif not action == 'trigger': + raise ValueError("The dedup_key property is required for" + "event_action=%s events, and it must be a string."%action) + response = successful_response( + self.post('/v2/enqueue', json=event), + context='submitting an event to the events API', + ) + response_body = try_decoding(response) + if type(response_body) is not dict or 'dedup_key' not in response_body: + err_msg = 'Malformed response body from the events API; it is ' \ + 'not a dict that has a key named "dedup_key" after ' \ + 'decoding. Body = '+truncate_text(response.text) + raise ServerHttpError(err_msg, response) + return response_body['dedup_key'] + + def submit(self, summary, source=None, custom_details=None, links=None, + timestamp=None): + """ + Submit a change event. + + See: https://developer.pagerduty.com/docs/send-change-event + + This is a wrapper method for :attr:`send_change_event` that composes an event + payload from keyword arguments and an auto-generated event timestamp. To send an + event with a wholly custom payload, use :attr:`send_change_event` instead. + + :param summary: + Summary / brief description of the change, for ``payload.summary``. + :param source: + A human-readable name identifying the source of the change, for the + ``payload.source`` event property. + :param custom_details: + A dictionary object to use as the ``payload.custom_details`` property. + :param links: + A list of dict objects to use as the ``links`` property of the event. + :param timestamp: + Specifies an event timestamp. Must be an ISO8601-format date/time. + :type summary: str + :type source: str + :type custom_details: dict + :type links: list + :type timestamp: str + """ + local_var = locals()['custom_details'] + if not (local_var is None or isinstance(local_var, dict)): + raise ValueError("custom_details must be a dict") + if timestamp is None: + timestamp = self.event_timestamp + event = { + 'payload': { + 'summary': summary, + 'timestamp': timestamp, + } + } + if isinstance(source, str): + event['payload']['source'] = source + if isinstance(custom_details, dict): + event['payload']['custom_details'] = custom_details + if links: + event['links'] = links + self.send_change_event(**event) + + def trigger(self, summary, source, dedup_key=None, severity='critical', + payload=None, custom_details=None, images=None, links=None) -> str: + """ + Send an alert-triggering event + + :param summary: + Summary / brief description of what is wrong. + :param source: + A human-readable name identifying the system that is affected. + :param dedup_key: + The deduplication key; used for determining event uniqueness and + associating actions with existing incidents. + :param severity: + Alert severity. Sets the ``payload.severity`` property. + :param payload: + Set the payload directly. Can be used in conjunction with other + parameters that also set payload properties; these properties will + be merged into the default payload, and any properties in this + parameter will take precedence except with regard to + ``custom_details``. + :param custom_details: + The ``payload.custom_details`` property of the payload. Will + override the property set in the ``payload`` parameter if given. + :param images: + Set the ``images`` property of the event. + :param links: + Set the ``links`` property of the event. + :type action: str + :type custom_details: dict + :type dedup_key: str + :type images: list + :type links: list + :type payload: dict + :type severity: str + :type source: str + :type summary: str + :returns: + The deduplication key of the incident, if any. + """ + for local in ('payload', 'custom_details'): + local_var = locals()[local] + if not (local_var is None or type(local_var) is dict): + raise ValueError(local+" must be a dict") + event = {'payload': {'summary':summary, 'source':source, + 'severity':severity}} + if type(payload) is dict: + event['payload'].update(payload) + if type(custom_details) is dict: + details = event.setdefault('payload', {}).get('custom_details', {}) + details.update(custom_details) + event['payload']['custom_details'] = details + if images: + event['images'] = images + if links: + event['links'] = links + return self.send_event('trigger', dedup_key=dedup_key, **event) + diff --git a/pagerduty.py b/pagerduty/rest_api_v2_client.py similarity index 60% rename from pagerduty.py rename to pagerduty/rest_api_v2_client.py index 890edbc..29dc777 100644 --- a/pagerduty.py +++ b/pagerduty/rest_api_v2_client.py @@ -1,30 +1,29 @@ - -# Copyright (c) PagerDuty. -# See LICENSE for details. - -# Standard libraries -import logging -import sys -import time +# Core from copy import deepcopy -from datetime import datetime -from random import random from typing import Iterator, Union from warnings import warn -# Upstream components on which this client is based: -from requests import Response, Session -from requests import __version__ as REQUESTS_VERSION - -# HTTP client exceptions: -from urllib3.exceptions import HTTPError, PoolError -from requests.exceptions import RequestException - -__version__ = '1.0.0' +# PyPI +from requests import Response + +# Local +from . api_client import ApiClient, normalize_url +from . common import ( + requires_success, + singular_name, + successful_response, + truncate_text, + try_decoding, +) +from . errors import ( + ServerHttpError, + UrlError +) ####################### ### CLIENT DEFAULTS ### ####################### + ITERATION_LIMIT = 1e4 """ The maximum position of a result in classic pagination. @@ -37,21 +36,7 @@ `_. """ -TIMEOUT = 60 -""" -The default timeout in seconds for any given HTTP request. - -Modifying this value will not affect any preexisting API session instances. -Rather, it will only affect new instances. It is recommended to use -:attr:`ApiClient.timeout` to configure the timeout for a given session. -""" - -TEXT_LEN_LIMIT = 100 -""" -The longest permissible length of API content to include in error messages. -""" - -# List of canonical API paths +# List of canonical REST API paths # # Supporting a new API for entity wrapping will require adding its patterns to # this list. If it doesn't follow standard naming conventions, it will also @@ -430,17 +415,18 @@ based on a matching logic. If no keys are found that match, it is assumed that the API endpoint follows classic entity wrapping conventions, and the wrapper name can be inferred based on those conventions (see -:attr:`infer_entity_wrapper`). Any new API that does not follow these +:attr:`pagerduty.infer_entity_wrapper`). Any new API that does not follow these conventions should therefore be given an entry in this dictionary in order to properly support it for entity wrapping. -Each of the keys should be a capitalized HTTP method (or ``*`` to match any -method), followed by a space, followed by a canonical path i.e. as returned by -:attr:`canonical_path` and included in :attr:`CANONICAL_PATHS`. Each value -is either a tuple with request and response body wrappers (if they differ), a -string (if they are the same for both cases) or ``None`` (if wrapping is -disabled and the data is to be marshaled or unmarshaled as-is). Values in tuples -can also be None to denote that either the request or response is unwrapped. +Each of the keys should be a capitalized HTTP method (or ``*`` to match any method), +followed by a space, followed by a canonical path i.e. as returned by +:attr:`pagerduty.canonical_path` and included in +:attr:`pagerduty.rest_api_v2_client.CANONICAL_PATHS`. Each value is either a tuple with +request and response body wrappers (if they differ), a string (if they are the same for +both cases) or ``None`` (if wrapping is disabled and the data is to be marshaled or +unmarshaled as-is). Values in tuples can also be None to denote that either the request +or response is unwrapped. An endpoint, under the design logic of this client, is said to have entity wrapping if the body (request or response) has only one property containing @@ -454,9 +440,9 @@ """ -#################### -### URL HANDLING ### -#################### +################################ +### REST API V2 URL HANDLING ### +################################ def canonical_path(base_url: str, url: str) -> str: """ @@ -466,9 +452,9 @@ def canonical_path(base_url: str, url: str) -> str: within REST API v2 it belongs to. Explicitly supported canonical paths are defined in the list - :attr:`CANONICAL_PATHS` and are the path part of any given API's URL. The - path for a given API is what is shown at the top of its reference page, i.e. - ``/users/{id}/contact_methods`` for retrieving a user's contact methods + :attr:`pagerduty.rest_api_v2.CANONICAL_PATHS` and are the path part of any given + API's URL. The path for a given API is what is shown at the top of its reference + page, i.e. ``/users/{id}/contact_methods`` for retrieving a user's contact methods (GET) or creating a new one (POST). :param base_url: The base URL of the API @@ -515,8 +501,9 @@ def endpoint_matches(endpoint_pattern: str, method: str, path: str) -> bool: """ Whether an endpoint (method and canonical path) matches a given pattern - This is the filtering logic used for finding the appropriate entry in - :attr:`ENTITY_WRAPPER_CONFIG` to use for a given method and API path. + This is the filtering logic used for finding the appropriate entry in + :attr:`pagerduty.rest_api_v2_client.ENTITY_WRAPPER_CONFIG` to use for a given method + and API path. :param endpoint_pattern: The endpoint pattern in the form ``METHOD PATH`` where ``METHOD`` is the @@ -544,32 +531,9 @@ def is_path_param(path_node: str) -> bool: """ return path_node.startswith('{') and path_node.endswith('}') -def normalize_url(base_url: str, url: str) -> str: - """ - Normalize a URL or path to be a complete API URL before query parameters. - - The ``url`` argument may be a path relative to the base URL or a full URL. - - :param url: - The URL or path to normalize to a full URL. - :param base_url: - The base API URL, excluding any trailing slash, i.e. - "https://api.pagerduty.com" - :returns: - The full API URL. - """ - if url.startswith(base_url): - return url - elif not (url.startswith('http://') or url.startswith('https://')): - return base_url.rstrip('/') + "/" + url.lstrip('/') - else: - raise UrlError( - f"URL {url} does not start with the API base URL {base_url}" - ) - -####################### -### ENTITY WRAPPING ### -####################### +############################### +### ENTITY WRAPPING HELPERS ### +############################### def entity_wrappers(method: str, path: str) -> tuple: """ @@ -630,10 +594,13 @@ def infer_entity_wrapper(method: str, path: str) -> str: the v2 REST API, where the wrapper name is predictable from the path and method. This is the default logic applied to determine the wrapper name based on the path if there is no explicit entity wrapping defined for the - given path in :attr:`ENTITY_WRAPPER_CONFIG`. + given path in :attr:`pagerduty.rest_api_v2_client.ENTITY_WRAPPER_CONFIG`. - :param method: The HTTP method - :param path: A canonical API path i.e. from :attr:`CANONICAL_PATHS` + :param method: + The HTTP method + :param path: + A canonical API path i.e. from + :attr:`pagerduty.rest_api_v2_client.CANONICAL_PATHS` """ m = method.upper() path_nodes = path.split('/') @@ -703,16 +670,6 @@ def call(self, url, **kw): call.__doc__ = doc return call -def requires_success(method): - """ - Decorator that validates HTTP responses. - """ - doc = method.__doc__ - def call(self, url, **kw): - return successful_response(method(self, url, **kw)) - call.__doc__ = doc - return call - def resource_url(method): """ API call decorator that allows passing a resource dict as the path/URL @@ -792,796 +749,9 @@ def call(self, url, **kw): call.__doc__ = doc return call - -######################## -### HELPER FUNCTIONS ### -######################## - -def deprecated_kwarg(deprecated_name: str, details=None, method=None): - """ - Raises a warning if a deprecated keyword argument is used. - - :param deprecated_name: The name of the deprecated function - :param details: An optional message to append to the deprecation message - :param method: An optional method name - """ - details_msg = '' - method_msg = '' - if method is not None: - method_msg = f" of {method}" - if details is not None: - details_msg = f" {details}" - warn( - f"Keyword argument \"{deprecated_name}\"{method_msg} is deprecated."+details_msg - ) - -def http_error_message(r: Response, context=None) -> str: - """ - Formats a message describing a HTTP error. - - :param r: - The response object. - :param context: - A description of when the error was received, or None to not include it - :returns: - The message to include in the HTTP error - """ - received_http_response = bool(r.status_code) - endpoint = "%s %s"%(r.request.method.upper(), r.request.url) - context_msg = "" - if type(context) is str: - context_msg=f" in {context}" - if received_http_response and not r.ok: - err_type = 'unknown' - if r.status_code / 100 == 4: - err_type = 'client' - elif r.status_code / 100 == 5: - err_type = 'server' - tr_bod = truncate_text(r.text) - return f"{endpoint}: API responded with {err_type} error (status " \ - f"{r.status_code}){context_msg}: {tr_bod}" - elif not received_http_response: - return f"{endpoint}: Network or other unknown error{context_msg}" - else: - return f"{endpoint}: Success (status {r.status_code}) but an " \ - f"expectation still failed{context_msg}" - -def last_4(secret: str) -> str: - """ - Truncate a sensitive value to its last 4 characters - - :param secret: text to truncate - :returns: - The truncated text - """ - return '*'+str(secret)[-4:] - -def plural_name(obj_type: str) -> str: - """ - Pluralizes a name, i.e. the API name from the ``type`` property - - :param obj_type: - The object type, i.e. ``user`` or ``user_reference`` - :returns: - The name of the resource, i.e. the last part of the URL for the - resource's index URL - """ - if obj_type.endswith('_reference'): - # Strip down to basic type if it's a reference - obj_type = obj_type[:obj_type.index('_reference')] - if obj_type.endswith('y'): - # Because English - return obj_type[:-1]+'ies' - else: - return obj_type+'s' - -def singular_name(r_name: str) -> str: - """ - Singularizes a name, i.e. for the entity wrapper in a POST request - - :para r_name: - The "resource" name, i.e. "escalation_policies", a plural noun that - forms the part of the canonical path identifying what kind of resource - lives in the collection there, for an API that follows classic wrapped - entity naming patterns. - :returns: - The singularized name - """ - if r_name.endswith('ies'): - # Because English - return r_name[:-3]+'y' - else: - return r_name.rstrip('s') - -def successful_response(r: Response, context=None) -> Response: - """Validates the response as successful. - - Returns the response if it was successful; otherwise, raises an exception. - - :param r: - Response object corresponding to the response received. - :param context: - A description of when the HTTP request is happening, for error reporting - :returns: - The response object, if it was successful - """ - if r.ok and bool(r.status_code): - return r - elif r.status_code / 100 == 5: - raise ServerHttpError(http_error_message(r, context=context), r) - elif bool(r.status_code): - raise HttpError(http_error_message(r, context=context), r) - else: - raise Error(http_error_message(r, context=context)) - -def truncate_text(text: str) -> str: - """Truncates a string longer than :attr:`TEXT_LEN_LIMIT` - - :param text: The string to truncate if longer than the limit. - """ - if len(text) > TEXT_LEN_LIMIT: - return text[:TEXT_LEN_LIMIT-1]+'...' - else: - return text - -def try_decoding(r: Response) -> Union[dict, list, str]: - """ - JSON-decode a response body - - Returns the decoded body if successful; raises :class:`ServerHttpError` - otherwise. - - :param r: - The response object - """ - try: - return r.json() - except ValueError as e: - raise ServerHttpError( - "API responded with invalid JSON: " + truncate_text(r.text), - r, - ) - -############### -### CLASSES ### -############### - -class ApiClient(Session): - """ - Base class for making HTTP requests to PagerDuty APIs - - This is an opinionated wrapper of `requests.Session`_, with a few additional - features: - - - The client will reattempt the request with auto-increasing cooldown/retry - intervals, with attempt limits configurable through the :attr:`retry` - attribute. - - When making requests, headers specified ad-hoc in calls to HTTP verb - functions will not replace, but will be merged into, default headers. - - The request URL, if it doesn't already start with the REST API base URL, - will be prepended with the default REST API base URL. - - It will only perform requests with methods as given in the - :attr:`permitted_methods` list, and will raise :class:`Error` for - any other HTTP methods. - - :param api_key: - REST API access token to use for HTTP requests - :param debug: - Sets :attr:`print_debug`. Set to True to enable verbose command line - output. - :type token: str - :type debug: bool - """ - - log = None - """ - A ``logging.Logger`` object for logging messages. By default it is - configured without any handlers and so no messages will be emitted. See: - `Logger Objects - `_. - """ - - max_http_attempts = 10 - """ - The number of times that the client will retry after error statuses, for any - that are defined greater than zero in :attr:`retry`. - """ - - max_network_attempts = 3 - """ - The number of times that connecting to the API will be attempted before - treating the failure as non-transient; a :class:`Error` exception - will be raised if this happens. - """ - - parent = None - """The ``super`` object (`requests.Session`_)""" - - permitted_methods = () - """ - A tuple of the methods permitted by the API which the client implements. - - For instance: - - * The REST API accepts GET, POST, PUT and DELETE. - * The Events API and Change Events APIs only accept POST. - """ - - retry = {} - """ - A dict defining the retry behavior for each HTTP response status code. - - Each key in this dictionary is an int representing a HTTP response code. The - behavior is specified by the int value at each key as follows: - - * ``-1`` to retry without limit. - * ``0`` has no effect; the default behavior will take effect. - * ``n``, where ``n > 0``, to retry ``n`` times (or up - to :attr:`max_http_attempts` total for all statuses, whichever is - encountered first), and then return the final response. - - The default behavior is to retry without limit on status 429, raise an - exception on a 401, and return the `requests.Response`_ object in any other case - (assuming a HTTP response was received from the server). - """ - - sleep_timer = 1.5 - """ - Default initial cooldown time factor for rate limiting and network errors. - - Each time that the request makes a followup request, there will be a delay - in seconds equal to this number times :attr:`sleep_timer_base` to the power - of how many attempts have already been made so far, unless - :attr:`stagger_cooldown` is nonzero. - """ - - sleep_timer_base = 2 - """ - After each retry, the time to sleep before reattempting the API connection - and request will increase by a factor of this amount. - """ - - timeout = TIMEOUT - """ - This is the value sent to `Requests`_ as the ``timeout`` parameter that - determines the TCP read timeout. - """ - - url = "" - - def __init__(self, api_key: str, debug=False): - self.parent = super(ApiClient, self) - self.parent.__init__() - self.api_key = api_key - self.log = logging.getLogger(__name__) - self.print_debug = debug - self.retry = {} - - def after_set_api_key(self): - """ - Setter hook for setting or updating the API key. - - Child classes should implement this to perform additional steps. - """ - pass - - @property - def api_key(self) -> str: - """ - Property representing the credential used for accessing the given API. - """ - return self._api_key - - @api_key.setter - def api_key(self, api_key): - if not (isinstance(api_key, str) and api_key): - raise ValueError("API credential must be a non-empty string.") - self._api_key = api_key - self.headers.update(self.auth_header) - self.after_set_api_key() - - @property - def auth_header(self) -> dict: - """ - Generates the header with the API credential used for authentication. - """ - raise NotImplementedError - - def cooldown_factor(self) -> float: - return self.sleep_timer_base*(1+self.stagger_cooldown*random()) - - def normalize_params(self, params) -> dict: - """ - Modify the user-supplied parameters to ease implementation - - Current behavior: - - * If a parameter's value is of type list, and the parameter name does - not already end in "[]", then the square brackets are appended to keep - in line with the requirement that all set filters' parameter names end - in "[]". - - :returns: - The query parameters after modification - """ - updated_params = {} - for param, value in params.items(): - if type(value) is list and not param.endswith('[]'): - updated_params[param+'[]'] = value - else: - updated_params[param] = value - return updated_params - - def normalize_url(self, url) -> str: - """Compose the URL whether it is a path or an already-complete URL""" - return normalize_url(self.url, url) - - def postprocess(self, response): - """ - Perform supplemental actions immediately after receiving a response. - - This method is called once per request not including retries, and can be - extended in child classes. - """ - pass - - def prepare_headers(self, method, user_headers={}) -> dict: - """ - Append special additional per-request headers. - - :param method: - The HTTP method, in upper case. - :param user_headers: - Headers that can be specified to override default values. - :returns: - The final list of headers to use in the request - """ - headers = deepcopy(self.headers) - if user_headers: - headers.update(user_headers) - return headers - - @property - def print_debug(self) -> bool: - """ - Printing debug flag - - If set to True, the logging level of :attr:`log` is set to - ``logging.DEBUG`` and all log messages are emitted to ``sys.stderr``. - If set to False, the logging level of :attr:`log` is set to - ``logging.NOTSET`` and the debugging log handler that prints messages to - ``sys.stderr`` is removed. This value thus can be toggled to enable and - disable verbose command line output. - - It is ``False`` by default and it is recommended to keep it that way in - production settings. - """ - return self._debug - - @print_debug.setter - def print_debug(self, debug: bool): - self._debug = debug - if debug and not hasattr(self, '_debugHandler'): - self.log.setLevel(logging.DEBUG) - self._debugHandler = logging.StreamHandler() - self.log.addHandler(self._debugHandler) - elif not debug and hasattr(self, '_debugHandler'): - self.log.setLevel(logging.NOTSET) - self.log.removeHandler(self._debugHandler) - delattr(self, '_debugHandler') - # else: no-op; only happens if debug is set to the same value twice - - def request(self, method, url, **kwargs) -> Response: - """ - Make a generic PagerDuty API request. - - :param method: - The request method to use. Case-insensitive. May be one of get, put, - post or delete. - :param url: - The path/URL to request. If it does not start with the base URL, the - base URL will be prepended. - :param **kwargs: - Custom keyword arguments to pass to ``requests.Session.request``. - :type method: str - :type url: str - :returns: - The `requests.Response`_ object corresponding to the HTTP response - """ - sleep_timer = self.sleep_timer - network_attempts = 0 - http_attempts = {} - method = method.strip().upper() - if method not in self.permitted_methods: - m_str = ', '.join(self.permitted_methods) - raise Error(f"Method {method} not supported by this API. " \ - f"Permitted methods: {m_str}") - req_kw = deepcopy(kwargs) - full_url = self.normalize_url(url) - endpoint = "%s %s"%(method.upper(), full_url) - - # Add in any headers specified in keyword arguments: - headers = kwargs.get('headers', {}) - req_kw.update({ - 'headers': self.prepare_headers(method, user_headers=headers), - 'stream': False, - 'timeout': self.timeout - }) - - # Special changes to user-supplied parameters, for convenience - if 'params' in kwargs and kwargs['params']: - req_kw['params'] = self.normalize_params(kwargs['params']) - - # Make the request (and repeat w/cooldown if the rate limit is reached): - while True: - try: - response = self.parent.request(method, full_url, **req_kw) - self.postprocess(response) - except (HTTPError, PoolError, RequestException) as e: - network_attempts += 1 - if network_attempts > self.max_network_attempts: - error_msg = f"{endpoint}: Non-transient network " \ - 'error; exceeded maximum number of attempts ' \ - f"({self.max_network_attempts}) to connect to the API." - raise Error(error_msg) from e - sleep_timer *= self.cooldown_factor() - self.log.warning( - "%s: HTTP or network error: %s. retrying in %g seconds.", - endpoint, e.__class__.__name__, sleep_timer) - time.sleep(sleep_timer) - continue - - status = response.status_code - retry_logic = self.retry.get(status, 0) - if not response.ok and retry_logic != 0: - # Take special action as defined by the retry logic - if retry_logic != -1: - # Retry a specific number of times (-1 implies infinite) - if http_attempts.get(status, 0)>=retry_logic or \ - sum(http_attempts.values())>self.max_http_attempts: - lower_limit = retry_logic - if lower_limit > self.max_http_attempts: - lower_limit = self.max_http_attempts - self.log.error( - f"%s: Non-transient HTTP error: exceeded " \ - 'maximum number of attempts (%d) to make a ' \ - 'successful request. Currently encountering ' \ - 'status %d.', endpoint, lower_limit, status) - return response - http_attempts[status] = 1 + http_attempts.get(status, 0) - sleep_timer *= self.cooldown_factor() - self.log.warning("%s: HTTP error (%d); retrying in %g seconds.", - endpoint, status, sleep_timer) - time.sleep(sleep_timer) - continue - elif status == 429: - sleep_timer *= self.cooldown_factor() - self.log.debug("%s: Hit API rate limit (status 429); " \ - "retrying in %g seconds", endpoint, sleep_timer) - time.sleep(sleep_timer) - continue - elif status == 401: - # Stop. Authentication failed. We shouldn't try doing any more, - # because we'll run into the same problem later anyway. - raise HttpError( - "Received 401 Unauthorized response from the API. The key " - "(...%s) may be invalid or deactivated."%self.trunc_key, - response) - else: - # All went according to plan. - return response - - @property - def stagger_cooldown(self) -> float: - """ - Randomizing factor for wait times between retries during rate limiting. - - If set to number greater than 0, the sleep time for rate limiting will - (for each successive sleep) be adjusted by a factor of one plus a - uniformly-distributed random number between 0 and 1 times this number, - on top of the base sleep timer :attr:`sleep_timer_base`. - - For example: - - * If this is 1, and :attr:`sleep_timer_base` is 2 (default), then after - each status 429 response, the sleep time will change overall by a - random factor between 2 and 4, whereas if it is zero, it will change - by a factor of 2. - * If :attr:`sleep_timer_base` is 1, then the cooldown time will be - adjusted by a random factor between one and one plus this number. - - If the number is set to zero, then this behavior is effectively - disabled, and the cooldown factor (by which the sleep time is adjusted) - will just be :attr:`sleep_timer_base` - - Setting this to a nonzero number helps avoid the "thundering herd" - effect that can potentially be caused by many API clients making - simultaneous concurrent API requests and consequently waiting for the - same amount of time before retrying. It is currently zero by default - for consistent behavior with previous versions. - """ - if hasattr(self, '_stagger_cooldown'): - return self._stagger_cooldown - else: - return 0 - - @stagger_cooldown.setter - def stagger_cooldown(self, val): - if type(val) not in [float, int] or val<0: - raise ValueError("Cooldown randomization factor stagger_cooldown " - "must be a positive real number") - self._stagger_cooldown = val - - @property - def trunc_key(self) -> str: - """Truncated key for secure display/identification purposes.""" - return last_4(self.api_key) - - @property - def user_agent(self) -> str: - return 'pagerduty/%s python-requests/%s Python/%d.%d'%( - __version__, - REQUESTS_VERSION, - sys.version_info.major, - sys.version_info.minor - ) - -class EventsApiV2Client(ApiClient): - - """ - Session class for submitting events to the PagerDuty v2 Events API. - - Implements methods for submitting events to PagerDuty through the Events API, - including change events, and inherits from :class:`pagerduty.ApiClient`. For more - details on usage of this API, refer to the `Events API v2 documentation - `_ - """ - - permitted_methods = ('POST',) - - url = "https://events.pagerduty.com" - - def __init__(self, api_key: str, debug=False): - super(EventsApiV2Client, self).__init__(api_key, debug) - # See: https://developer.pagerduty.com/docs/3d063fd4814a6-events-api-v2-overview#response-codes--retry-logic - self.retry[500] = 2 # internal server error - self.retry[502] = 4 # bad gateway - self.retry[503] = 6 # service unavailable - self.retry[504] = 6 # gateway timeout - - @property - def auth_header(self) -> dict: - return {} - - def acknowledge(self, dedup_key) -> str: - """ - Acknowledge an alert via Events API. - - :param dedup_key: - The deduplication key of the alert to set to the acknowledged state. - :returns: - The deduplication key - """ - return self.send_event('acknowledge', dedup_key=dedup_key) - - @property - def event_timestamp(self) -> str: - return datetime.utcnow().isoformat()+'Z' - - def post(self, *args, **kw) -> Response: - """ - Override of ``requests.Session.post`` - - Adds the ``routing_key`` parameter to the body before sending. - """ - if 'json' in kw and hasattr(kw['json'], 'update'): - kw['json'].update({'routing_key': self.api_key}) - return super(EventsApiV2Client, self).post(*args, **kw) - - def prepare_headers(self, method, user_headers={}) -> dict: - """ - Add user agent and content type headers for Events API requests. - - :param user_headers: User-supplied headers that will override defaults - :returns: - The final list of headers to use in the request - """ - headers = {} - headers.update(self.headers) - headers.update({ - 'Content-Type': 'application/json', - 'User-Agent': self.user_agent, - }) - headers.update(user_headers) - return headers - - def resolve(self, dedup_key) -> str: - """ - Resolve an alert via Events API. - - :param dedup_key: - The deduplication key of the alert to resolve. - """ - return self.send_event('resolve', dedup_key=dedup_key) - - def send_change_event(self, payload={}, links=[], routing_key=None): - """ - Send a change event to the v2 Change Events API. - - See: https://developer.pagerduty.com/docs/events-api-v2/send-change-events/ - - :param payload: - A dictionary object with keys ``summary``, ``source``, ``timestamp`` and - ``custom_details`` as described in the above documentation. - :param links: - A list of dictionary objects each with keys ``href`` and ``text`` - representing the target and display text of each link - :param routing_key: - (Deprecated) the routing key. The parameter is set automatically to the - :attr:`ApiClient.api_key` property in the final payload and this argument is ignored. - :returns: - The response ID - """ - if routing_key is not None: - deprecated_kwarg( - 'routing_key', - method='EventsApiV2Client.send_change_event' - ) - event = {'payload': deepcopy(payload)} - if links: - event['links'] = deepcopy(links) - response = self.post('/v2/change/enqueue', json=event) - response_body = try_decoding(successful_response( - response, - context="submitting change event", - )) - - def send_event(self, action, dedup_key=None, **properties) -> str: - """ - Send an event to the v2 Events API. - - See: https://v2.developer.pagerduty.com/docs/send-an-event-events-api-v2 - - :param action: - The action to perform through the Events API: trigger, acknowledge - or resolve. - :param dedup_key: - The deduplication key; used for determining event uniqueness and - associating actions with existing incidents. - :param **properties: - Additional properties to set, i.e. if ``action`` is ``trigger`` - this would include ``payload``. - :type action: str - :type dedup_key: str - :returns: - The deduplication key of the incident - """ - - actions = ('trigger', 'acknowledge', 'resolve') - if action not in actions: - raise ValueError("Event action must be one of: "+', '.join(actions)) - - event = {'event_action':action} - - event.update(properties) - if isinstance(dedup_key, str): - event['dedup_key'] = dedup_key - elif not action == 'trigger': - raise ValueError("The dedup_key property is required for" - "event_action=%s events, and it must be a string."%action) - response = successful_response( - self.post('/v2/enqueue', json=event), - context='submitting an event to the events API', - ) - response_body = try_decoding(response) - if type(response_body) is not dict or 'dedup_key' not in response_body: - err_msg = 'Malformed response body from the events API; it is ' \ - 'not a dict that has a key named "dedup_key" after ' \ - 'decoding. Body = '+truncate_text(response.text) - raise ServerHttpError(err_msg, response) - return response_body['dedup_key'] - - def submit(self, summary, source=None, custom_details=None, links=None, - timestamp=None) -> str: - """ - Submit a change event. - - See: https://developer.pagerduty.com/docs/send-change-event - - :param summary: - Summary / brief description of the change, for ``payload.summary``. - :param source: - A human-readable name identifying the source of the change, for the - ``payload.source`` event property. - :param custom_details: - A dictionary object to use as the ``payload.custom_details`` property. - :param links: - A list of dict objects to use as the ``links`` property of the event. - :param timestamp: - Specifies an event timestamp. Must be an ISO8601-format date/time. - :type summary: str - :type source: str - :type custom_details: dict - :type links: list - :type timestamp: str - :returns: - The response ID - """ - local_var = locals()['custom_details'] - if not (local_var is None or isinstance(local_var, dict)): - raise ValueError("custom_details must be a dict") - if timestamp is None: - timestamp = self.event_timestamp - event = { - 'payload': { - 'summary': summary, - 'timestamp': timestamp, - } - } - if isinstance(source, str): - event['payload']['source'] = source - if isinstance(custom_details, dict): - event['payload']['custom_details'] = custom_details - if links: - event['links'] = links - return self.send_change_event(**event) - - def trigger(self, summary, source, dedup_key=None, severity='critical', - payload=None, custom_details=None, images=None, links=None) -> str: - """ - Send an alert-triggering event - - :param summary: - Summary / brief description of what is wrong. - :param source: - A human-readable name identifying the system that is affected. - :param dedup_key: - The deduplication key; used for determining event uniqueness and - associating actions with existing incidents. - :param severity: - Alert severity. Sets the ``payload.severity`` property. - :param payload: - Set the payload directly. Can be used in conjunction with other - parameters that also set payload properties; these properties will - be merged into the default payload, and any properties in this - parameter will take precedence except with regard to - ``custom_details``. - :param custom_details: - The ``payload.custom_details`` property of the payload. Will - override the property set in the ``payload`` parameter if given. - :param images: - Set the ``images`` property of the event. - :param links: - Set the ``links`` property of the event. - :type action: str - :type custom_details: dict - :type dedup_key: str - :type images: list - :type links: list - :type payload: dict - :type severity: str - :type source: str - :type summary: str - :returns: - The deduplication key of the incident, if any. - """ - for local in ('payload', 'custom_details'): - local_var = locals()[local] - if not (local_var is None or type(local_var) is dict): - raise ValueError(local+" must be a dict") - event = {'payload': {'summary':summary, 'source':source, - 'severity':severity}} - if type(payload) is dict: - event['payload'].update(payload) - if type(custom_details) is dict: - details = event.setdefault('payload', {}).get('custom_details', {}) - details.update(custom_details) - event['payload']['custom_details'] = details - if images: - event['images'] = images - if links: - event['links'] = links - return self.send_event('trigger', dedup_key=dedup_key, **event) +#################### +### CLIENT CLASS ### +#################### class RestApiV2Client(ApiClient): """ @@ -2210,79 +1380,6 @@ def total_call_time(self) -> float: @property def trunc_token(self) -> str: - """Truncated token for secure display/identification purposes.""" - return last_4(self.api_key) - -class UrlError(Exception): - """ - Exception class for unsupported URLs or malformed input. - """ - pass - -class Error(Exception): - """ - General API errors base class. - - Note, the name of this class does not imply it solely includes errors - experienced by the client or HTTP status 4xx responses, but descendants can - include issues with the API backend. - """ - - response = None - """ - The HTTP response object, if a response was successfully received. - - In the case of network errors, this property will be None. - """ - - def __init__(self, message, response=None): - self.msg = message - self.response = response - super(Error, self).__init__(message) - -class HttpError(Error): - """ - Error class representing errors strictly associated with HTTP responses. - - This class was created to make it easier to more cleanly handle errors by - way of a class that is guaranteed to have its ``response`` be a valid - `requests.Response`_ object. - - Whereas, the more generic :class:`Error` could also be used - to denote such things as non-transient network errors wherein no response - was recevied from the API. - - For instance, instead of this: - - :: - - try: - user = session.rget('/users/PABC123') - except pagerduty.Error as e: - if e.response is not None: - print("HTTP error: "+str(e.response.status_code)) - else: - raise e - - one could write this: - - :: - - try: - user = session.rget('/users/PABC123') - except pagerduty.HttpError as e: - print("HTTP error: "+str(e.response.status_code)) - """ - - def __init__(self, message, response: Response): - super(HttpError, self).__init__(message, response=response) - -class ServerHttpError(HttpError): - """ - Error class representing failed expectations made of the server. - - This is raised in cases where the response schema differs from the expected schema - because of an API bug, or because it's an early access endpoint and changes before - GA, or in cases of HTTP status 5xx where a successful response is required. - """ - pass + """Truncated API key for secure display/identification purposes.""" + warn("Property trunc_token is deprecated. Use trunc_key instead.") + return self.trunc_key diff --git a/pagerduty/version.py b/pagerduty/version.py new file mode 100644 index 0000000..3e52213 --- /dev/null +++ b/pagerduty/version.py @@ -0,0 +1,35 @@ +import os +import sys + +UNKNOWN_VERSION = "2.*.*" + +def get_version_via_tomllib(): + import tomllib + base_project_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + with open(os.path.join(base_project_path, 'pyproject.toml'), 'rb') as f: + pkg_meta = tomllib.load(f) + return pkg_meta.get('project', {}).get('version', UNKNOWN_VERSION) + +def get_version(): + if sys.version_info.major == 3 and sys.version_info.minor < 8: + # There is no way to obtain the version from the package metadata because the + # necessary importlib features have not yet been added. At some point we need to + # drop support for these versions. We only care about major version 3 because + # version 2 is already not supported. + return UNKNOWN_VERSION + else: + try: + # Use package metadata introspection to get the version: + from importlib.metadata import version + return version(__package__) + except: + # No package has been built/installed yet, so this is a stopgap to avoid + # errors in local unit tests and documentation builds: + if sys.version_info.minor < 11: + # tomllib was introduced in 3.11 + return UNKNOWN_VERSION + else: + # Use tomllib so the correct version number goes into the doc build: + return get_version_via_tomllib() + +__version__ = get_version() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ffa1232 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,24 @@ +[project] +name = "pagerduty" +version = "2.0.0" +description = "Clients for PagerDuty's Public APIs" +requires-python = ">=3.6" +dependencies = ["certifi", "requests", "urllib3"] +maintainers = [ + {name = "Demitri Morgan", email = "demitri@pagerduty.com"}, + {name = "Mandi Walls", email = "mwalls@pagerduty.com"}, + {name = "PagerDuty Customer Support", email = "support@pagerduty.com"} +] + +[tool.setuptools.packages.find] +where = ["."] +include = ["pagerduty"] + +[project.urls] +Documentation = "https://pagerduty.github.io/python-pagerduty" +Repository = "https://github.com/PagerDuty/python-pagerduty" +PyPI = "https://pypi.org/project/pagerduty" + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" diff --git a/requirements-publish.txt b/requirements-publish.txt index 844044d..b75c409 100644 --- a/requirements-publish.txt +++ b/requirements-publish.txt @@ -1,3 +1,4 @@ +build sphinx sphinxcontrib-websupport sphinx-rtd-theme diff --git a/setup.py b/setup.py deleted file mode 100644 index b3c6834..0000000 --- a/setup.py +++ /dev/null @@ -1,19 +0,0 @@ -from setuptools import setup - -__version__ = '1.0.0' - -if __name__ == '__main__': - setup( - name='pagerduty', - description="python-pagerduty", - long_description="Clients for PagerDuty's APIs", - py_modules=['pagerduty'], - version=__version__, - license='MIT', - url='https://pagerduty.github.io/python-pagerduty', - download_url='https://pypi.org/project/pagerduty/', - install_requires=['certifi', 'requests', 'urllib3'], - author='PagerDuty', - author_email='support@pagerduty.com', - python_requires='>=3.6' - ) diff --git a/sphinx/source/conf.py b/sphinx/source/conf.py index ae5f46a..9fffef7 100644 --- a/sphinx/source/conf.py +++ b/sphinx/source/conf.py @@ -7,12 +7,14 @@ os.path.dirname(__file__), '..', '..' ))) -from setup import __version__ +from pagerduty import __version__ import pagerduty # -- Project information ----------------------------------------------------- +import datetime +year = datetime.datetime.now(datetime.UTC).year project = u'python-pagerduty' -copyright = u'2025, PagerDuty Inc.' +copyright = f'{year} PagerDuty, Inc' author = u'Demitri Morgan ' # The short X.Y version diff --git a/sphinx/source/contributing.rst b/sphinx/source/contributing.rst index f9e9e38..141a2a6 100644 --- a/sphinx/source/contributing.rst +++ b/sphinx/source/contributing.rst @@ -16,24 +16,32 @@ Initial Setup To be able to rebuild the documentation and release a new version, first make sure you have `make `_ and `pip `_ installed in your shell -environment. +environment, as well as Python version 3.11 or later. + +The recommended way of setting up the Python environment is using `asdf-vm +`_, i.e. run ``asdf install`` in your clone of the +repository. Next, install Python dependencies for building and publishing as well as testing locally: .. code-block:: shell - pip install -r requirements.txt + pip install . pip install -r requirements-publish.txt  -Running Unit Tests ------------------- -Assuming that all dependencies are installed, running ``test_pagerduty.py`` in -the root path of the repository will run the unit test suite: +If asdf-vm was used to install Python locally, run the following after the above: .. code-block:: shell - ./test_pagerduty.py + asdf reshim python + +Finally, run ``test.sh`` in the root path of the repository to run the unit +test suite locally, or run this command by itself: + +.. code-block:: shell + + python -m unittest discover -p '*_test.py' -s tests Maintaining Entity Wrapper Configuration ---------------------------------------- @@ -60,8 +68,8 @@ antipattern-handling configuration. This system requires two global variables that must be manually maintained: -* :attr:`pagerduty.CANONICAL_PATHS`, the list of canonical paths -* :attr:`pagerduty.ENTITY_WRAPPER_CONFIG`, a dictionary of exceptions to entity wrapping and schema conventions +* :attr:`pagerduty.rest_api_v2_client.CANONICAL_PATHS`, the list of canonical paths +* :attr:`pagerduty.rest_api_v2_client.ENTITY_WRAPPER_CONFIG`, a dictionary of exceptions to entity wrapping and schema conventions Limitations *********** @@ -87,8 +95,8 @@ The first step for adding support for new APIs is to have a copy of the API Reference source code (this is a private GitHub repository owned by the PagerDuty org). The script ``scripts/get_path_list/get_path_list.py`` can then be used to automatically generate definitions of the global variables -:attr:`pagerduty.CANONICAL_PATHS` and -:attr:`pagerduty.CURSOR_BASED_PAGINATION_PATHS` that can be copied into the +:attr:`pagerduty.rest_api_v2_client.CANONICAL_PATHS` and +:attr:`pagerduty.rest_api_v2_client.CURSOR_BASED_PAGINATION_PATHS` that can be copied into the source code to replace the existing definitions. The script takes one argument: a path to the file ``reference/v2/Index.yaml`` within the reference source repository. @@ -99,7 +107,7 @@ The next step is to look at the request and response schemas in the API reference for each new endpoint added to the canonical path list, to see if it follows classic schema conventions for entity wrapping. If any new path does not, adding support for it will also require adding entries to -:attr:`pagerduty.ENTITY_WRAPPER_CONFIG`. "Classic schema conventions" refers to +:attr:`pagerduty.rest_api_v2_client.ENTITY_WRAPPER_CONFIG`. "Classic schema conventions" refers to the logic codified in :attr:`pagerduty.infer_entity_wrapper` and :attr:`pagerduty.unwrap` (where a "node" is a component of the path component of the URL, separated by forward slashes): @@ -121,16 +129,16 @@ the same as the last node of the path. Examples: ``GET /services`` (wrapper = ``services``), ``PUT /incidents`` (wrapper = ``incidents``) If all of the above apply to new endpoints for all request methods, then no new -entries need to be added to :attr:`pagerduty.ENTITY_WRAPPER_CONFIG` to support +entries need to be added to :attr:`pagerduty.rest_api_v2_client.ENTITY_WRAPPER_CONFIG` to support them; they are supported automatically by virtue of following preexisting already-supported API patterns and having corresponding entries in -:attr:`pagerduty.CANONICAL_PATHS`. +:attr:`pagerduty.rest_api_v2_client.CANONICAL_PATHS`. Adding Support for Non-Conforming Endpoints ******************************************* If the new endpoints do not follow classic schema conventions for entity wrapping, entries for them must be added to -:attr:`pagerduty.ENTITY_WRAPPER_CONFIG` in order to support them. As described +:attr:`pagerduty.rest_api_v2_client.ENTITY_WRAPPER_CONFIG` in order to support them. As described in the documentation of that attribute, each key is a combination of the request method (or "*" for the configuration entry to apply to all methods) and the canonical path in question, and each value is a string (for the same @@ -139,7 +147,7 @@ is not applicable, and a tuple if the entity wrapping differs between the request and response bodies. Following the same examples as given in the :ref:`user_guide`: the entry in -:attr:`pagerduty.ENTITY_WRAPPER_CONFIG` to handle the "Create Business Service +:attr:`pagerduty.rest_api_v2_client.ENTITY_WRAPPER_CONFIG` to handle the "Create Business Service Subscribers" looks like this: .. code-block:: python @@ -164,15 +172,20 @@ lives. To rebuild the HTML documentation from the source, run: To force a rebuild, run ``touch CHANGELOG.rst`` first. +**NOTE:** Python version 3.13 or later must be used when rebuilding +documentation, or the version number in the documentation will be +``2.?.?-metadata-unavailable``. + Releasing a New Version ----------------------- -You will first need valid user accounts on both ``pypi.org`` and ``test.pypi.org`` +You will need valid user accounts on both ``pypi.org`` and ``test.pypi.org`` that have the "Maintainer" role on the project, as well as the requirements installed (see above). It is strongly recommended that you `use an API token -`_ to upload new releases to PyPI. +`_ to upload new releases to PyPI. The token +must have write access to the project. Perform end-to-end publish and installation testing *************************************************** @@ -183,8 +196,8 @@ project as on ``pypi.org``. Note, once a release is uploaded, it is no longer possible to upload a release with the same version number, even if that release is deleted. For that reason, -it is a good idea to first add a suffix, i.e. ``-dev001``, to ``__version__`` -in ``setup.py`` while testing. +it is a good idea to first add a suffix, i.e. ``-dev001``, to the version in +``pyproject.toml`` while testing. To perform end-to-end tests, run the following, entering credentials for ``test.pypi.org`` when prompted: @@ -195,7 +208,7 @@ To perform end-to-end tests, run the following, entering credentials for The make target ``testpublish`` performs the following: -* Build the Python egg in ``dist/`` +* Build the Python package * Upload the new library to ``test.pypi.org`` * Test-install the library from ``test.pypi.org`` into a temporary Python virtualenv that does not already have the library installed, to test @@ -205,8 +218,8 @@ The make target ``testpublish`` performs the following: If any errors are encountered, the script should immediately exit. Errors should be investigated and mitigated before publishing. To test again, -temporarily change ``__version__`` so that it counts as a new release -and gets uploaded, and set it to the desired version before the actual +temporarily change the version in ``pyproject.toml`` so that it counts as a new +release and gets uploaded, and set it to the desired version before the actual release. Merge changes and tag @@ -215,11 +228,8 @@ Merge changes and tag A pull request for releasing a new version should be created, which along with the functional changes should also include at least: -* An update to the changelog, where all items corresponding to community - contributions end with (in parentheses) the GitHub user handle of the - contributor, a slash, and a link to the pull request (see CHANGELOG.rst for - preexisting examples). -* A change in the version number in both setup.py and pagerduty.py, to a new +* An update to ``CHANGELOG.rst`` describing the changes in the new release +* A change in the version number in ``pyproject.toml`` to a new version that follows `Semantic Versioning `_. * Rebuilt HTML documentation diff --git a/sphinx/source/module_reference.rst b/sphinx/source/module_reference.rst index 1ac1c3c..7a2d682 100644 --- a/sphinx/source/module_reference.rst +++ b/sphinx/source/module_reference.rst @@ -33,8 +33,12 @@ Client Defaults These are properties of the module that configure default behavior for the API client. There should be no need for the end user to modify them. -.. automodule:: pagerduty - :members: ITERATION_LIMIT, TEXT_LEN_LIMIT, TIMEOUT, ENTITY_WRAPPER_CONFIG, CANONICAL_PATHS, CURSOR_BASED_PAGINATION_PATHS +.. automodule:: pagerduty.rest_api_v2_client + :members: ITERATION_LIMIT, ENTITY_WRAPPER_CONFIG, CANONICAL_PATHS, CURSOR_BASED_PAGINATION_PATHS +.. automodule:: pagerduty.common + :members: TEXT_LEN_LIMIT +.. automodule:: pagerduty.api_client + :members: TIMEOUT Functions --------- diff --git a/sphinx/source/user_guide.rst b/sphinx/source/user_guide.rst index e761a8e..f8535ff 100644 --- a/sphinx/source/user_guide.rst +++ b/sphinx/source/user_guide.rst @@ -237,7 +237,7 @@ recorded as *log entries* of type ``annotate_log_entry``, so with that in mind: Events API v2 ************* -**Trigger and resolve an alert,** getting its deduplication key from the API, using :class:`EventsApiV2Client`: +**Trigger and resolve an alert,** getting its deduplication key from the API, using :class:`pagerduty.EventsApiV2Client`: .. code-block:: python @@ -254,7 +254,7 @@ Events API v2 # ... events_client.acknowledge('abc123') -**Submit a change event** using a :class:`EventsApiV2Client` instance: +**Submit a change event** using a :class:`pagerduty.EventsApiV2Client` instance: .. code-block:: python diff --git a/test.sh b/test.sh new file mode 100755 index 0000000..4a3e87d --- /dev/null +++ b/test.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +py_minor_ver=`python -c 'import sys; print(sys.version_info.minor)'` +py_major_ver=`python -c 'import sys; print(sys.version_info.major)'` + +if [[ $py_major_ver -le 3 ]]; then + if [[ $py_minor_ver -le 6 ]]; then + echo "Using backwards compatibility hack for Python 3.6" + pip install -r requirements.txt + else + echo "pip install ." + pip install . + fi +fi + +/usr/bin/env python3 -m unittest discover -p '*_test.py' -s tests diff --git a/tests/common_test.py b/tests/common_test.py new file mode 100644 index 0000000..05ce306 --- /dev/null +++ b/tests/common_test.py @@ -0,0 +1,74 @@ +import json +import unittest + +from mocks import Response + +import pagerduty + +class SessionTest(unittest.TestCase): + def assertDictContainsSubset(self, d0, d1): + self.assertTrue(set(d0.keys()).issubset(set(d1.keys())), + msg="First dict is not a subset of second dict") + self.assertEqual(d0, dict([(k, d1[k]) for k in d0])) + +class UrlHandlingTest(unittest.TestCase): + + def test_normalize_url(self): + urls_expected = [ + ( + ('https://api.pagerduty.com/', 'users'), + 'https://api.pagerduty.com/users', + ), + ( + ('https://api.pagerduty.com', '/users'), + 'https://api.pagerduty.com/users', + ), + ( + ( + 'https://api.pagerduty.com', + 'https://api.pagerduty.com/users', + ), + 'https://api.pagerduty.com/users', + ) + ] + for (base_url_url, expected_url) in urls_expected: + self.assertEqual( + expected_url, + pagerduty.normalize_url(*base_url_url) + ) + invalid_input = [ # URL does not start with base_url + ( + 'https://api.pagerduty.com/incidents', + 'https://events.pagerduty.com/api/v2/enqueue', + ), + ( + 'https://api.pagerduty.com/services', + 'https://some.shady-site.com/read-auth-headers', + ) + ] + for args in invalid_input: + self.assertRaises(pagerduty.UrlError, pagerduty.normalize_url, *args) + +class HelperFunctionsTest(unittest.TestCase): + + def test_plural_deplural(self): + # forward + for r_name in ('escalation_policies', 'services', 'log_entries'): + self.assertEqual( + r_name, + pagerduty.plural_name(pagerduty.singular_name(r_name)) + ) + # reverse + for o_name in ('escalation_policy', 'service', 'log_entry'): + self.assertEqual( + o_name, + pagerduty.singular_name(pagerduty.plural_name(o_name)) + ) + + def test_successful_response(self): + self.assertRaises(pagerduty.Error, pagerduty.successful_response, + Response(400, json.dumps({}))) + self.assertRaises(pagerduty.ServerHttpError, pagerduty.successful_response, + Response(500, json.dumps({}))) + + diff --git a/tests/events_api_v2_client_test.py b/tests/events_api_v2_client_test.py new file mode 100644 index 0000000..4c22d39 --- /dev/null +++ b/tests/events_api_v2_client_test.py @@ -0,0 +1,183 @@ +from unittest.mock import Mock, MagicMock, patch, call + +from common_test import SessionTest +from mocks import Response + +import pagerduty + +EVENT_TIMESTAMP = '2020-03-25T00:00:00Z' + +class EventsApiV2ClientTest(SessionTest): + + def test_send_event(self): + sess = pagerduty.EventsApiV2Client('routingkey') + parent = MagicMock() + parent.request = MagicMock() + parent.request.side_effect = [ + Response(202, '{"dedup_key":"abc123"}'), + Response(202, '{"dedup_key":"abc123"}'), + Response(202, '{"dedup_key":"abc123"}') + ] + with patch.object(sess, 'parent', new=parent): + ddk = sess.trigger('testing 123', 'triggered.from.pagerduty', + custom_details={"this":"that"}, severity='warning', + images=[{'url':'https://http.cat/502.jpg'}]) + self.assertEqual('abc123', ddk) + self.assertEqual( + 'POST', + parent.request.call_args[0][0]) + self.assertEqual( + 'https://events.pagerduty.com/v2/enqueue', + parent.request.call_args[0][1]) + self.assertDictContainsSubset( + {'Content-Type': 'application/json'}, + parent.request.call_args[1]['headers']) + self.assertNotIn( + 'X-Routing-Key', + parent.request.call_args[1]['headers']) + self.assertEqual( + { + 'event_action':'trigger', + 'routing_key':'routingkey', + 'payload':{ + 'summary': 'testing 123', + 'source': 'triggered.from.pagerduty', + 'severity': 'warning', + 'custom_details': {'this':'that'}, + }, + 'images': [{'url':'https://http.cat/502.jpg'}] + }, + parent.request.call_args[1]['json']) + ddk = sess.resolve('abc123') + self.assertEqual( + { + 'event_action':'resolve', + 'dedup_key':'abc123', + 'routing_key':'routingkey', + }, + parent.request.call_args[1]['json']) + + ddk = sess.acknowledge('abc123') + self.assertEqual( + { + 'event_action':'acknowledge', + 'dedup_key':'abc123', + 'routing_key':'routingkey', + }, + parent.request.call_args[1]['json']) + + def test_send_explicit_event(self): + # test sending an event by calling `post` directly as opposed to any of + # the methods written into the client for sending events + sess = pagerduty.EventsApiV2Client('routingkey') + parent = MagicMock() + parent.request = MagicMock() + parent.request.side_effect = [Response(202, '{"dedup_key":"abc123"}')] + with patch.object(sess, 'parent', new=parent): + response = sess.post('/v2/enqueue', json={ + 'payload': { + 'summary': 'testing 123', + 'source': 'pagerduty integration', + 'severity': 'critical' + }, + 'event_action': 'trigger' + }) + json_sent = parent.request.call_args[1]['json'] + self.assertTrue('routing_key' in json_sent) + self.assertEqual(json_sent['routing_key'], 'routingkey') + + @patch('pagerduty.EventsApiV2Client.event_timestamp', EVENT_TIMESTAMP) + def test_submit_change_event(self): + sess = pagerduty.EventsApiV2Client('routingkey') + parent = MagicMock() + parent.request = MagicMock() + # The dedup key for change events is unused so we don't care about the response + # schema, only that it is valid JSON: + parent.request.side_effect = [ Response(202, '{}') ] + with patch.object(sess, 'parent', new=parent): + self.assertEqual( + sess.submit( + 'testing 123', + 'triggered.from.pagerduty', + custom_details={"this":"that"}, + links=[{'href':'https://http.cat/502.jpg'}], + ), + None + ) + self.assertEqual( + 'POST', + parent.request.call_args[0][0]) + self.assertEqual( + 'https://events.pagerduty.com/v2/change/enqueue', + parent.request.call_args[0][1]) + self.assertDictContainsSubset( + {'Content-Type': 'application/json'}, + parent.request.call_args[1]['headers']) + self.assertNotIn( + 'X-Routing-Key', + parent.request.call_args[1]['headers']) + self.assertEqual( + { + 'routing_key':'routingkey', + 'payload':{ + 'summary': 'testing 123', + 'timestamp': EVENT_TIMESTAMP, + 'source': 'triggered.from.pagerduty', + 'custom_details': {'this':'that'}, + }, + 'links': [{'href':'https://http.cat/502.jpg'}] + }, + parent.request.call_args[1]['json']) + # Same as above but with a custom timestamp: + sess = pagerduty.EventsApiV2Client('routingkey') + parent = MagicMock() + parent.request = MagicMock() + parent.request.side_effect = [ Response(202, '{}') ] + with patch.object(sess, 'parent', new=parent): + custom_timestamp = '2023-06-26T00:00:00Z' + self.assertEqual( + sess.submit( + 'testing 123', + 'triggered.from.pagerduty', + custom_details={"this":"that"}, + links=[{'href':'https://http.cat/502.jpg'}], + timestamp=custom_timestamp, + ), + None + ) + self.assertEqual( + parent.request.call_args[1]['json']['payload']['timestamp'], + custom_timestamp + ) + + @patch('pagerduty.EventsApiV2Client.event_timestamp', EVENT_TIMESTAMP) + def test_submit_lite_change_event(self): + sess = pagerduty.EventsApiV2Client('routingkey') + parent = MagicMock() + parent.request = MagicMock() + parent.request.side_effect = [ Response(202, '{}') ] + with patch.object(sess, 'parent', new=parent): + sess.submit('testing 123') + self.assertEqual( + 'POST', + parent.request.call_args[0][0]) + self.assertEqual( + 'https://events.pagerduty.com/v2/change/enqueue', + parent.request.call_args[0][1]) + self.assertDictContainsSubset( + {'Content-Type': 'application/json'}, + parent.request.call_args[1]['headers']) + self.assertNotIn( + 'X-Routing-Key', + parent.request.call_args[1]['headers']) + self.assertEqual( + { + 'routing_key':'routingkey', + 'payload':{ + 'summary': 'testing 123', + 'timestamp': EVENT_TIMESTAMP, + } + }, + parent.request.call_args[1]['json']) + + diff --git a/tests/mocks.py b/tests/mocks.py new file mode 100644 index 0000000..4012dd4 --- /dev/null +++ b/tests/mocks.py @@ -0,0 +1,35 @@ +import datetime +import json +from unittest.mock import Mock, MagicMock, patch, call + +class Session(object): + """ + Python reqeusts.Session mockery class + """ + request = None + headers = None + +class Response(object): + """Specialized mock class for emulating requests.Response objects + + Look for existing use of this class for examples on how to use. + """ + def __init__(self, code, text, method='GET', url=None): + super(Response, self).__init__() + self.status_code = code + self.text = text + self.ok = code < 400 + self.headers = MagicMock() + if url: + self.url = url + else: + self.url = 'https://api.pagerduty.com' + self.elapsed = datetime.timedelta(0,1.5) + self.request = Mock(url=self.url) + self.headers = {'date': 'somedate', + 'x-request-id': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'} + self.request.method = method + self.json = MagicMock() + self.json.return_value = json.loads(text) + + diff --git a/test_pagerduty.py b/tests/rest_api_v2_client_test.py old mode 100755 new mode 100644 similarity index 72% rename from test_pagerduty.py rename to tests/rest_api_v2_client_test.py index ce830ed..449a555 --- a/test_pagerduty.py +++ b/tests/rest_api_v2_client_test.py @@ -1,65 +1,18 @@ -#!/usr/bin/env python - -""" -Unit tests for pagerduty - -Python 3, or the backport of unittest.mock for Python 2, is required. - -See: - -https://docs.python.org/3.5/library/unittest.mock.html -https://pypi.org/project/backports.unittest_mock/1.3/ -""" -import argparse import copy -import datetime import json import logging import requests import sys import unittest - from unittest.mock import Mock, MagicMock, patch, call + +from common_test import SessionTest +from mocks import Response, Session + import pagerduty -class SessionTest(unittest.TestCase): - def assertDictContainsSubset(self, d0, d1): - self.assertTrue(set(d0.keys()).issubset(set(d1.keys())), - msg="First dict is not a subset of second dict") - self.assertEqual(d0, dict([(k, d1[k]) for k in d0])) - -class Session(object): - """ - Python reqeusts.Session mockery class - """ - request = None - headers = None - -class Response(object): - """Mock class for emulating requests.Response objects - - Look for existing use of this class for examples on how to use. - """ - def __init__(self, code, text, method='GET', url=None): - super(Response, self).__init__() - self.status_code = code - self.text = text - self.ok = code < 400 - self.headers = MagicMock() - if url: - self.url = url - else: - self.url = 'https://api.pagerduty.com' - self.elapsed = datetime.timedelta(0,1.5) - self.request = Mock(url=self.url) - self.headers = {'date': 'somedate', - 'x-request-id': 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx'} - self.request.method = method - self.json = MagicMock() - self.json.return_value = json.loads(text) - -class URLHandlingTest(unittest.TestCase): +class RestApiV2UrlHandlingTest(unittest.TestCase): def test_canonical_path(self): identified_urls = [ @@ -92,42 +45,6 @@ def test_is_path_param(self): self.assertTrue(pagerduty.is_path_param('{id}')) self.assertFalse(pagerduty.is_path_param('services')) - def test_normalize_url(self): - urls_expected = [ - ( - ('https://api.pagerduty.com/', 'users'), - 'https://api.pagerduty.com/users', - ), - ( - ('https://api.pagerduty.com', '/users'), - 'https://api.pagerduty.com/users', - ), - ( - ( - 'https://api.pagerduty.com', - 'https://api.pagerduty.com/users', - ), - 'https://api.pagerduty.com/users', - ) - ] - for (base_url_url, expected_url) in urls_expected: - self.assertEqual( - expected_url, - pagerduty.normalize_url(*base_url_url) - ) - invalid_input = [ # URL does not start with base_url - ( - 'https://api.pagerduty.com/incidents', - 'https://events.pagerduty.com/api/v2/enqueue', - ), - ( - 'https://api.pagerduty.com/services', - 'https://some.shady-site.com/read-auth-headers', - ) - ] - for args in invalid_input: - self.assertRaises(pagerduty.UrlError, pagerduty.normalize_url, *args) - class EntityWrappingTest(unittest.TestCase): def test_entity_wrappers(self): @@ -313,197 +230,6 @@ def reset_mocks(): {'incidents': incidents} ) -class HelperFunctionsTest(unittest.TestCase): - - def test_plural_deplural(self): - # forward - for r_name in ('escalation_policies', 'services', 'log_entries'): - self.assertEqual( - r_name, - pagerduty.plural_name(pagerduty.singular_name(r_name)) - ) - # reverse - for o_name in ('escalation_policy', 'service', 'log_entry'): - self.assertEqual( - o_name, - pagerduty.singular_name(pagerduty.plural_name(o_name)) - ) - - def test_successful_response(self): - self.assertRaises(pagerduty.Error, pagerduty.successful_response, - Response(400, json.dumps({}))) - self.assertRaises(pagerduty.ServerHttpError, pagerduty.successful_response, - Response(500, json.dumps({}))) - -class EventsApiV2ClientTest(SessionTest): - - def test_send_event(self): - sess = pagerduty.EventsApiV2Client('routingkey') - parent = MagicMock() - parent.request = MagicMock() - parent.request.side_effect = [ - Response(202, '{"dedup_key":"abc123"}'), - Response(202, '{"dedup_key":"abc123"}'), - Response(202, '{"dedup_key":"abc123"}') - ] - with patch.object(sess, 'parent', new=parent): - ddk = sess.trigger('testing 123', 'triggered.from.pagerduty', - custom_details={"this":"that"}, severity='warning', - images=[{'url':'https://http.cat/502.jpg'}]) - self.assertEqual('abc123', ddk) - self.assertEqual( - 'POST', - parent.request.call_args[0][0]) - self.assertEqual( - 'https://events.pagerduty.com/v2/enqueue', - parent.request.call_args[0][1]) - self.assertDictContainsSubset( - {'Content-Type': 'application/json'}, - parent.request.call_args[1]['headers']) - self.assertNotIn( - 'X-Routing-Key', - parent.request.call_args[1]['headers']) - self.assertEqual( - { - 'event_action':'trigger', - 'routing_key':'routingkey', - 'payload':{ - 'summary': 'testing 123', - 'source': 'triggered.from.pagerduty', - 'severity': 'warning', - 'custom_details': {'this':'that'}, - }, - 'images': [{'url':'https://http.cat/502.jpg'}] - }, - parent.request.call_args[1]['json']) - ddk = sess.resolve('abc123') - self.assertEqual( - { - 'event_action':'resolve', - 'dedup_key':'abc123', - 'routing_key':'routingkey', - }, - parent.request.call_args[1]['json']) - - ddk = sess.acknowledge('abc123') - self.assertEqual( - { - 'event_action':'acknowledge', - 'dedup_key':'abc123', - 'routing_key':'routingkey', - }, - parent.request.call_args[1]['json']) - - def test_send_explicit_event(self): - # test sending an event by calling `post` directly as opposed to any of - # the methods written into the client for sending events - sess = pagerduty.EventsApiV2Client('routingkey') - parent = MagicMock() - parent.request = MagicMock() - parent.request.side_effect = [Response(202, '{"dedup_key":"abc123"}')] - with patch.object(sess, 'parent', new=parent): - response = sess.post('/v2/enqueue', json={ - 'payload': { - 'summary': 'testing 123', - 'source': 'pagerduty integration', - 'severity': 'critical' - }, - 'event_action': 'trigger' - }) - json_sent = parent.request.call_args[1]['json'] - self.assertTrue('routing_key' in json_sent) - self.assertEqual(json_sent['routing_key'], 'routingkey') - - @patch('pagerduty.EventsApiV2Client.event_timestamp', - '2020-03-25T00:00:00Z') - def test_submit_change_event(self): - sess = pagerduty.EventsApiV2Client('routingkey') - parent = MagicMock() - parent.request = MagicMock() - # The dedup key for change events is unused so we don't care about the response - # schema, only that it is valid JSON: - parent.request.side_effect = [ Response(202, '{}') ] - with patch.object(sess, 'parent', new=parent): - sess.submit( - 'testing 123', - 'triggered.from.pagerduty', - custom_details={"this":"that"}, - links=[{'href':'https://http.cat/502.jpg'}], - ) - self.assertEqual( - 'POST', - parent.request.call_args[0][0]) - self.assertEqual( - 'https://events.pagerduty.com/v2/change/enqueue', - parent.request.call_args[0][1]) - self.assertDictContainsSubset( - {'Content-Type': 'application/json'}, - parent.request.call_args[1]['headers']) - self.assertNotIn( - 'X-Routing-Key', - parent.request.call_args[1]['headers']) - self.assertEqual( - { - 'routing_key':'routingkey', - 'payload':{ - 'summary': 'testing 123', - 'timestamp': '2020-03-25T00:00:00Z', - 'source': 'triggered.from.pagerduty', - 'custom_details': {'this':'that'}, - }, - 'links': [{'href':'https://http.cat/502.jpg'}] - }, - parent.request.call_args[1]['json']) - # Same as above but with a custom timestamp: - sess = pagerduty.EventsApiV2Client('routingkey') - parent = MagicMock() - parent.request = MagicMock() - parent.request.side_effect = [ Response(202, '{}') ] - with patch.object(sess, 'parent', new=parent): - custom_timestamp = '2023-06-26T00:00:00Z' - sess.submit( - 'testing 123', - 'triggered.from.pagerduty', - custom_details={"this":"that"}, - links=[{'href':'https://http.cat/502.jpg'}], - timestamp=custom_timestamp, - ) - self.assertEqual( - parent.request.call_args[1]['json']['payload']['timestamp'], - custom_timestamp - ) - - @patch('pagerduty.EventsApiV2Client.event_timestamp', - '2020-03-25T00:00:00Z') - def test_submit_lite_change_event(self): - sess = pagerduty.EventsApiV2Client('routingkey') - parent = MagicMock() - parent.request = MagicMock() - parent.request.side_effect = [ Response(202, '{}') ] - with patch.object(sess, 'parent', new=parent): - sess.submit('testing 123') - self.assertEqual( - 'POST', - parent.request.call_args[0][0]) - self.assertEqual( - 'https://events.pagerduty.com/v2/change/enqueue', - parent.request.call_args[0][1]) - self.assertDictContainsSubset( - {'Content-Type': 'application/json'}, - parent.request.call_args[1]['headers']) - self.assertNotIn( - 'X-Routing-Key', - parent.request.call_args[1]['headers']) - self.assertEqual( - { - 'routing_key':'routingkey', - 'payload':{ - 'summary': 'testing 123', - 'timestamp': '2020-03-25T00:00:00Z', - } - }, - parent.request.call_args[1]['json']) - class RestApiV2ClientTest(SessionTest): def test_oauth_headers(self): @@ -579,7 +305,9 @@ def test_iter_all(self, get, iter_cursor): # pagination, short-circuit to iter_cursor path = '/audit/records' cpath = pagerduty.canonical_path('https://api.pagerduty.com', path) - self.assertTrue(cpath in pagerduty.CURSOR_BASED_PAGINATION_PATHS) + self.assertTrue( + cpath in pagerduty.rest_api_v2_client.CURSOR_BASED_PAGINATION_PATHS + ) iter_cursor.return_value = [] self.assertEqual([], list(sess.iter_all('/audit/records'))) iter_cursor.assert_called_once_with('/audit/records', params=None) @@ -781,7 +509,7 @@ def test_request(self, postprocess): headers_get = { 'Accept': 'application/vnd.pagerduty+json;version=2', 'Authorization': 'Token token=12345', - 'User-Agent': 'pagerduty/%s python-requests/%s Python/%d.%d'%( + 'User-Agent': 'python-pagerduty/%s python-requests/%s Python/%d.%d'%( pagerduty.__version__, requests.__version__, sys.version_info.major, @@ -874,7 +602,7 @@ def test_request(self, postprocess): Response(429, json.dumps({'error': {'message': 'chill out'}})), Response(200, json.dumps({'user': user})), ] - with patch.object(pagerduty.time, 'sleep') as sleep: + with patch.object(pagerduty.api_client.time, 'sleep') as sleep: r = sess.get('/users') self.assertTrue(r.ok) # should only return after success self.assertEqual(3, request.call_count) @@ -894,10 +622,10 @@ def test_request(self, postprocess): request.reset_mock() # Test retry logic: - with patch.object(pagerduty.time, 'sleep') as sleep: + with patch.object(pagerduty.api_client.time, 'sleep') as sleep: # Test getting a connection error and succeeding the final time. returns = [ - pagerduty.HTTPError("D'oh!") + pagerduty.api_client.Urllib3HttpError("D'oh!") ]*sess.max_network_attempts returns.append(Response(200, json.dumps({'user': user}))) request.side_effect = returns @@ -915,7 +643,7 @@ def test_request(self, postprocess): # Now test handling a non-transient error when the client # library itself hits odd issues that it can't handle, i.e. # network, and that the raised exception includes context: - raises = [pagerduty.RequestException("D'oh!")]*( + raises = [pagerduty.api_client.RequestException("D'oh!")]*( sess.max_network_attempts+1) request.side_effect = raises try: @@ -982,10 +710,3 @@ def test_subdomain(self, rget): def test_truncated_token(self): sess = pagerduty.RestApiV2Client('abcd1234') self.assertEqual('*1234', sess.trunc_token) - -def main(): - ap=argparse.ArgumentParser() - unittest.main() - -if __name__ == '__main__': - main()