From 1759aa4da7a187f3a8be23dbb0dd1cebba859945 Mon Sep 17 00:00:00 2001 From: nick Date: Wed, 28 Nov 2018 10:55:58 -0600 Subject: [PATCH 01/90] PEP8 Cleanup --- sonarr.py | 116 ++++++++++++++++++++---------------------------------- 1 file changed, 43 insertions(+), 73 deletions(-) diff --git a/sonarr.py b/sonarr.py index 8be5f6c3..f504b74a 100644 --- a/sonarr.py +++ b/sonarr.py @@ -9,8 +9,8 @@ def now_iso(): - now_iso = datetime.now(timezone.utc).astimezone().isoformat() - return now_iso + now = datetime.now(timezone.utc).astimezone().isoformat() + return now def influx_sender(influx_payload): @@ -22,10 +22,7 @@ def influx_sender(influx_payload): def get_all_missing_shows(): # Set the time here so we have one timestamp to work with now = now_iso() - - missing = [] - - influx_payload = [] + missing, influx_payload = [], [] for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: @@ -36,19 +33,18 @@ def get_all_missing_shows(): tv_shows = {d['id']: d for d in get_tv_shows} - for show in tv_shows.keys(): series_title = '{}'.format(tv_shows[show]['series']['title']) - sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'],tv_shows[show]['episodeNumber']) + sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber']) missing.append((series_title, sxe, tv_shows[show]['id'], tv_shows[show]['title'])) - for series_title, sxe, id, episode_title in missing: + for series_title, sxe, sonarr_id, episode_title in missing: influx_payload.append( { "measurement": "Sonarr", "tags": { "type": "Missing", - "sonarrId": id, + "sonarrId": sonarr_id, "server": server_id }, "time": now, @@ -68,20 +64,16 @@ def get_all_missing_shows(): def get_missing_shows(days_past): # Set the time here so we have one timestamp to work with now = now_iso() - last_days = str(date.today()+timedelta(days=-days_past)) - today = str(date.today()) - - missing = [] - - influx_payload = [] + missing, influx_payload = [], [] for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: headers = {'X-Api-Key': sonarr_api_key} - get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=1000'.format(sonarr_url, last_days, today), + get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=1000'.format(sonarr_url, last_days, + today), headers=headers).json() tv_shows = {d['id']: d for d in get_tv_shows} @@ -93,13 +85,13 @@ def get_missing_shows(days_past): air_date = (tv_shows[show]['airDate']) missing.append((series_title, sxe, air_date, tv_shows[show]['id'])) - for series_title, sxe, air_date, id in missing: + for series_title, sxe, air_date, sonarr_id in missing: influx_payload.append( { "measurement": "Sonarr", "tags": { "type": "Missing_Days", - "sonarrId": id, + "sonarrId": sonarr_id, "server": server_id }, "time": now, @@ -120,41 +112,39 @@ def get_missing_shows(days_past): def get_upcoming_shows(): # Set the time here so we have one timestamp to work with now = now_iso() - upcoming = [] - influx_payload = [] for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: headers = {'X-Api-Key': sonarr_api_key} - get_upcoming_shows = requests.get('{}/api/calendar/'.format(sonarr_url), - headers=headers).json() + upcoming_shows_request = requests.get('{}/api/calendar/'.format(sonarr_url), headers=headers).json() - upcoming_shows = {d['id']: d for d in get_upcoming_shows} + upcoming_shows = {d['id']: d for d in upcoming_shows_request} for show in upcoming_shows.keys(): series_title = '{}'.format(upcoming_shows[show]['series']['title']) - sxe = 'S{:0>2}E{:0>2}'.format(upcoming_shows[show]['seasonNumber'],upcoming_shows[show]['episodeNumber']) - upcoming.append((series_title, sxe, upcoming_shows[show]['id'], upcoming_shows[show]['title'], upcoming_shows[show]['airDate'])) + sxe = 'S{:0>2}E{:0>2}'.format(upcoming_shows[show]['seasonNumber'], upcoming_shows[show]['episodeNumber']) + upcoming.append((series_title, sxe, upcoming_shows[show]['id'], upcoming_shows[show]['title'], + upcoming_shows[show]['airDate'])) - for series_title, sxe, id, episode_title, air_date in upcoming: + for series_title, sxe, sonarr_id, episode_title, air_date in upcoming: influx_payload.append( { "measurement": "Sonarr", "tags": { "type": "Soon", - "sonarrId": id, + "sonarrId": sonarr_id, "server": server_id - }, - "time": now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date - } + }, + "time": now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "airs": air_date + } } ) # Empty upcoming or else things get foo bared @@ -168,13 +158,8 @@ def get_future_shows(future_days): now = now_iso() today = str(date.today()) - future = str(date.today()+timedelta(days=future_days)) - air_days = [] - - downloaded = [] - influx_payload = [] for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: @@ -190,15 +175,16 @@ def get_future_shows(future_days): series_title = '{}'.format(tv_shows[show]['series']['title']) dl_status = int(tv_shows[show]['hasFile']) sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber']) - air_days.append((series_title, dl_status, sxe, tv_shows[show]['title'], tv_shows[show]['airDate'], tv_shows[show]['id'])) + air_days.append((series_title, dl_status, sxe, tv_shows[show]['title'], tv_shows[show]['airDate'], + tv_shows[show]['id'])) - for series_title, dl_status, sxe, episode_title, air_date, id in air_days: + for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days: influx_payload.append( { "measurement": "Sonarr", "tags": { "type": "Future", - "sonarrId": id, + "sonarrId": sonarr_id, "server": server_id }, "time": now, @@ -220,11 +206,7 @@ def get_future_shows(future_days): def get_queue_shows(): # Set the time here so we have one timestamp to work with now = now_iso() - queue = [] - - downloaded = [] - influx_payload = [] for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: @@ -239,8 +221,9 @@ def get_queue_shows(): for show in tv_shows.keys(): series_title = '{}'.format(tv_shows[show]['series']['title']) episode_title = '{}'.format(tv_shows[show]['episode']['title']) - protocol = (tv_shows[show]['protocol'].upper()) - sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['episode']['seasonNumber'], tv_shows[show]['episode']['episodeNumber']) + protocol = tv_shows[show]['protocol'].upper() + sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['episode']['seasonNumber'], + tv_shows[show]['episode']['episodeNumber']) if protocol == 'USENET': protocol_id = 1 else: @@ -248,13 +231,13 @@ def get_queue_shows(): queue.append((series_title, episode_title, protocol, protocol_id, sxe, tv_shows[show]['id'])) - for series_title, episode_title, protocol, protocol_id, sxe, id in queue: + for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue: influx_payload.append( { "measurement": "Sonarr", "tags": { "type": "Queue", - "sonarrId": id, + "sonarrId": sonarr_id, "server": server_id }, @@ -277,41 +260,28 @@ def get_queue_shows(): if __name__ == "__main__": parser = argparse.ArgumentParser(prog='Sonarr stats operations', - description='Script to aid in data gathering from Sonarr', formatter_class=RawTextHelpFormatter) + description='Script to aid in data gathering from Sonarr', + formatter_class=RawTextHelpFormatter) - parser.add_argument("--missing", action='store_true', - help='Get all missing TV shows') - - parser.add_argument("--missing_days", type=int, - help='Get missing TV shows in past X days') - - parser.add_argument("--upcoming", action='store_true', - help='Get upcoming TV shows') - - parser.add_argument("--future", type=int, - help='Get TV shows on X days into the future. Includes today.' - '\ni.e. --future 2 is Today and Tomorrow') - - parser.add_argument("--queue", action='store_true', - help='Get TV shows in queue') + parser.add_argument("--missing", action='store_true', help='Get all missing TV shows') + parser.add_argument("--missing_days", type=int, help='Get missing TV shows in past X days') + parser.add_argument("--upcoming", action='store_true', help='Get upcoming TV shows') + parser.add_argument("--future", type=int, help='Get TV shows on X days into the future. Includes today.' + '\ni.e. --future 2 is Today and Tomorrow') + parser.add_argument("--queue", action='store_true', help='Get TV shows in queue') opts = parser.parse_args() if opts.missing: influx_sender(get_all_missing_shows()) - elif opts.missing_days: influx_sender(get_missing_shows(opts.missing_days)) - elif opts.upcoming: influx_sender(get_upcoming_shows()) - elif opts.future: influx_sender(get_future_shows(opts.future)) - elif opts.queue: influx_sender(get_queue_shows()) - elif len(sys.argv) == 1: parser.print_help(sys.stderr) sys.exit(1) From bcb5f5810de1bc98ddc98a81723f356863cff4c9 Mon Sep 17 00:00:00 2001 From: nick Date: Wed, 28 Nov 2018 14:32:39 -0600 Subject: [PATCH 02/90] sonarr.py overhaul --- helpers.py | 42 +++++ sonarr.py | 457 +++++++++++++++++++++++------------------------------ 2 files changed, 238 insertions(+), 261 deletions(-) create mode 100644 helpers.py diff --git a/helpers.py b/helpers.py new file mode 100644 index 00000000..c30773fe --- /dev/null +++ b/helpers.py @@ -0,0 +1,42 @@ +from typing import NamedTuple + + +class TVShow(NamedTuple): + seriesId: int + episodeFileId: int + seasonNumber: int + episodeNumber: int + title: str + airDate: str + airDateUtc: str + overview: str + episodeFile: dict + hasFile: bool + monitored: bool + unverifiedSceneNumbering: bool + absoluteEpisodeNumber: int + series: dict + id: int + + +class Queue(NamedTuple): + series: dict + episode: dict + quality: dict + size: float + title: str + sizeleft: float + timeleft: str + estimatedCompletionTime: str + status: str + trackedDownloadStatus: str + statusMessages: list + downloadId: str + protocol: str + id: int + + +class Server(NamedTuple): + url: str + api_key: str + id: int diff --git a/sonarr.py b/sonarr.py index f504b74a..f62c0b43 100644 --- a/sonarr.py +++ b/sonarr.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # Do not edit this script. Edit configuration.py import sys import requests @@ -5,257 +6,183 @@ from influxdb import InfluxDBClient import argparse from argparse import RawTextHelpFormatter -import configuration - - -def now_iso(): - now = datetime.now(timezone.utc).astimezone().isoformat() - return now - - -def influx_sender(influx_payload): - influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username, - configuration.influxdb_password, configuration.sonarr_influxdb_db_name) - influx.write_points(influx_payload) - - -def get_all_missing_shows(): - # Set the time here so we have one timestamp to work with - now = now_iso() - missing, influx_payload = [], [] - - for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: - - headers = {'X-Api-Key': sonarr_api_key} - - get_tv_shows = requests.get('{}/api/wanted/missing/?pageSize=1000'.format(sonarr_url), - headers=headers).json()['records'] - - tv_shows = {d['id']: d for d in get_tv_shows} - - for show in tv_shows.keys(): - series_title = '{}'.format(tv_shows[show]['series']['title']) - sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber']) - missing.append((series_title, sxe, tv_shows[show]['id'], tv_shows[show]['title'])) - - for series_title, sxe, sonarr_id, episode_title in missing: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Missing", - "sonarrId": sonarr_id, - "server": server_id - }, - "time": now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe +import configuration as config +from helpers import Server, TVShow, Queue + + +class SonarrAPI(object): + TVShow.__new__.__defaults__ = (None,) * len(TVShow._fields) + + def __init__(self): + self.now = datetime.now(timezone.utc).astimezone().isoformat() + self.today = str(date.today()) + self.influx = InfluxDBClient(config.influxdb_url, config.influxdb_port, config.influxdb_username, + config.influxdb_password, config.sonarr_influxdb_db_name) + self.influx_payload = [] + self.servers = self.get_servers() + self.session = requests.Session() + self.session.params = {'pageSize': 1000} + + @staticmethod + def get_servers(): + if not config.sonarr_server_list: + sys.exit("No Sonarr servers defined in config") + + servers = [] + for url, api_key, server_id in config.sonarr_server_list: + servers.append(Server(url=url, api_key=api_key, id=server_id)) + + return servers + + def get_missing(self, days_past): + endpoint = '/api/calendar' + last_days = str(date.today() + timedelta(days=-days_past)) + params = {'start': last_days, 'end': self.today} + + for server in self.servers: + missing = [] + headers = {'X-Api-Key': server.api_key} + + get = self.session.get(server.url + endpoint, params=params, headers=headers).json() + tv_shows = [TVShow(**show) for show in get] + + for show in tv_shows: + if not show.hasFile: + sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) + missing.append((show.series['title'], sxe, show.airDate, show.title, show.id)) + + for series_title, sxe, air_date, episode_title, sonarr_id in missing: + self.influx_payload.append( + { + "measurement": "Sonarr", + "tags": { + "type": "Missing", + "sonarrId": sonarr_id, + "server": server.id + }, + "time": self.now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "airs": air_date + } } - } - ) - # Empty missing or else things get foo bared - missing = [] - - return influx_payload - - -def get_missing_shows(days_past): - # Set the time here so we have one timestamp to work with - now = now_iso() - last_days = str(date.today()+timedelta(days=-days_past)) - today = str(date.today()) - missing, influx_payload = [], [] - - for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: - - headers = {'X-Api-Key': sonarr_api_key} - - get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=1000'.format(sonarr_url, last_days, - today), - headers=headers).json() - - tv_shows = {d['id']: d for d in get_tv_shows} - - for show in tv_shows.keys(): - if not (tv_shows[show]['hasFile']): - series_title = '{}'.format(tv_shows[show]['series']['title']) - sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber']) - air_date = (tv_shows[show]['airDate']) - missing.append((series_title, sxe, air_date, tv_shows[show]['id'])) - - for series_title, sxe, air_date, sonarr_id in missing: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Missing_Days", - "sonarrId": sonarr_id, - "server": server_id - }, - "time": now, - "fields": { - "name": series_title, - "sxe": sxe, - "airs": air_date + ) + + def get_upcoming(self): + endpoint = '/api/calendar/' + + for server in self.servers: + upcoming = [] + headers = {'X-Api-Key': server.api_key} + + get = self.session.get(server.url + endpoint, headers=headers).json() + tv_shows = [TVShow(**show) for show in get] + + for show in tv_shows: + sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) + upcoming.append((show.series['title'], sxe, show.id, show.title, show.airDate)) + + for series_title, sxe, sonarr_id, episode_title, air_date in upcoming: + self.influx_payload.append( + { + "measurement": "Sonarr", + "tags": { + "type": "Soon", + "sonarrId": sonarr_id, + "server": server.id + }, + "time": self.now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "airs": air_date + } } - } - ) - - # Empty missing or else things get foo bared - missing = [] - - return influx_payload - - -def get_upcoming_shows(): - # Set the time here so we have one timestamp to work with - now = now_iso() - upcoming = [] - influx_payload = [] - - for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: - - headers = {'X-Api-Key': sonarr_api_key} - - upcoming_shows_request = requests.get('{}/api/calendar/'.format(sonarr_url), headers=headers).json() - - upcoming_shows = {d['id']: d for d in upcoming_shows_request} - - for show in upcoming_shows.keys(): - series_title = '{}'.format(upcoming_shows[show]['series']['title']) - sxe = 'S{:0>2}E{:0>2}'.format(upcoming_shows[show]['seasonNumber'], upcoming_shows[show]['episodeNumber']) - upcoming.append((series_title, sxe, upcoming_shows[show]['id'], upcoming_shows[show]['title'], - upcoming_shows[show]['airDate'])) - - for series_title, sxe, sonarr_id, episode_title, air_date in upcoming: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Soon", - "sonarrId": sonarr_id, - "server": server_id - }, - "time": now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date + ) + + def get_future(self, future_days): + endpoint = '/api/calendar/' + future = str(date.today() + timedelta(days=future_days)) + + for server in self.servers: + air_days = [] + headers = {'X-Api-Key': server.api_key} + params = {'start': self.today, 'end': future} + + get = self.session.get(server.url + endpoint, params=params, headers=headers).json() + tv_shows = [TVShow(**show) for show in get] + + for show in tv_shows: + sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) + air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id)) + + for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days: + self.influx_payload.append( + { + "measurement": "Sonarr", + "tags": { + "type": "Future", + "sonarrId": sonarr_id, + "server": server.id + }, + "time": self.now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "airs": air_date, + "downloaded": dl_status + } } - } - ) - # Empty upcoming or else things get foo bared - upcoming = [] - - return influx_payload - - -def get_future_shows(future_days): - # Set the time here so we have one timestamp to work with - now = now_iso() - - today = str(date.today()) - future = str(date.today()+timedelta(days=future_days)) - air_days = [] - influx_payload = [] - - for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: - - headers = {'X-Api-Key': sonarr_api_key} - - get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=200'.format(sonarr_url, today, future), - headers=headers).json() - - tv_shows = {d['id']: d for d in get_tv_shows} - - for show in tv_shows.keys(): - series_title = '{}'.format(tv_shows[show]['series']['title']) - dl_status = int(tv_shows[show]['hasFile']) - sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber']) - air_days.append((series_title, dl_status, sxe, tv_shows[show]['title'], tv_shows[show]['airDate'], - tv_shows[show]['id'])) - - for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Future", - "sonarrId": sonarr_id, - "server": server_id - }, - "time": now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date, - "downloaded": dl_status + ) + + def get_queue(self): + endpoint = '/api/queue' + + for server in self.servers: + queue = [] + headers = {'X-Api-Key': server.api_key} + + get = self.session.get(server.url + endpoint, headers=headers).json() + download_queue = [Queue(**show) for show in get] + + for show in download_queue: + sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber']) + if show.protocol.upper() == 'USENET': + protocol_id = 1 + else: + protocol_id = 0 + + queue.append((show.series['title'], show.episode['title'], show.protocol.upper(), + protocol_id, sxe, show.id)) + + for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue: + self.influx_payload.append( + { + "measurement": "Sonarr", + "tags": { + "type": "Queue", + "sonarrId": sonarr_id, + "server": server.id + + }, + "time": self.now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "protocol": protocol, + "protocol_id": protocol_id + } } - } - ) - # Empty air_days or else things get foo bared - air_days = [] - - return influx_payload - - -def get_queue_shows(): - # Set the time here so we have one timestamp to work with - now = now_iso() - queue = [] - influx_payload = [] - - for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list: - - headers = {'X-Api-Key': sonarr_api_key} - - get_tv_shows = requests.get('{}/api/queue'.format(sonarr_url), - headers=headers).json() - - tv_shows = {d['id']: d for d in get_tv_shows} - - for show in tv_shows.keys(): - series_title = '{}'.format(tv_shows[show]['series']['title']) - episode_title = '{}'.format(tv_shows[show]['episode']['title']) - protocol = tv_shows[show]['protocol'].upper() - sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['episode']['seasonNumber'], - tv_shows[show]['episode']['episodeNumber']) - if protocol == 'USENET': - protocol_id = 1 - else: - protocol_id = 0 - - queue.append((series_title, episode_title, protocol, protocol_id, sxe, tv_shows[show]['id'])) - - for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Queue", - "sonarrId": sonarr_id, - "server": server_id - - }, - "time": now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "protocol": protocol, - "protocol_id": protocol_id - } - } - ) - - # Empty queue or else things get foo bared - queue = [] + ) - return influx_payload + def influx_push(self): + # TODO: error handling for failed connection + self.influx.write_points(self.influx_payload) if __name__ == "__main__": @@ -263,25 +190,33 @@ def get_queue_shows(): description='Script to aid in data gathering from Sonarr', formatter_class=RawTextHelpFormatter) - parser.add_argument("--missing", action='store_true', help='Get all missing TV shows') - parser.add_argument("--missing_days", type=int, help='Get missing TV shows in past X days') + parser.add_argument("--missing", metavar='$days', type=int, help='Get missing TV shows in past X days' + '\ni.e. --missing 7 is in the last week') + parser.add_argument("--missing_days", metavar='$days', type=int, help='legacy command. Deprecated in favor of' + ' --missing' + '\nfunctions identically to --missing' + '\nNote: Will be removed in a future release') parser.add_argument("--upcoming", action='store_true', help='Get upcoming TV shows') - parser.add_argument("--future", type=int, help='Get TV shows on X days into the future. Includes today.' - '\ni.e. --future 2 is Today and Tomorrow') + parser.add_argument("--future", metavar='$days', type=int, help='Get TV shows on X days into the future. ' + 'Includes today.' + '\ni.e. --future 2 is Today and Tomorrow') parser.add_argument("--queue", action='store_true', help='Get TV shows in queue') opts = parser.parse_args() + sonarr = SonarrAPI() - if opts.missing: - influx_sender(get_all_missing_shows()) - elif opts.missing_days: - influx_sender(get_missing_shows(opts.missing_days)) - elif opts.upcoming: - influx_sender(get_upcoming_shows()) - elif opts.future: - influx_sender(get_future_shows(opts.future)) - elif opts.queue: - influx_sender(get_queue_shows()) - elif len(sys.argv) == 1: + if len(sys.argv) == 1: parser.print_help(sys.stderr) sys.exit(1) + + if any([opts.missing, opts.missing_days]): + days = opts.missing if opts.missing else opts.missing_days + sonarr.get_missing(days) + if opts.upcoming: + sonarr.get_upcoming() + if opts.future: + sonarr.get_future(opts.future) + if opts.queue: + sonarr.get_queue() + + sonarr.influx_push() From 5e878c78a475dcbe5b8de981711b0d29ba3e8a3d Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 18:08:59 -0600 Subject: [PATCH 03/90] formatting, and some notation --- sonarr.py | 36 ++++++++++++++++++++++-------------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/sonarr.py b/sonarr.py index f62c0b43..2655937b 100644 --- a/sonarr.py +++ b/sonarr.py @@ -2,32 +2,37 @@ # Do not edit this script. Edit configuration.py import sys import requests -from datetime import datetime, timezone, date, timedelta -from influxdb import InfluxDBClient import argparse -from argparse import RawTextHelpFormatter +from influxdb import InfluxDBClient +from datetime import datetime, timezone, date, timedelta + import configuration as config from helpers import Server, TVShow, Queue class SonarrAPI(object): + # Sets None as default for all TVShow NamedTuples, because sonarr's response json is inconsistent TVShow.__new__.__defaults__ = (None,) * len(TVShow._fields) def __init__(self): + # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() self.today = str(date.today()) self.influx = InfluxDBClient(config.influxdb_url, config.influxdb_port, config.influxdb_username, config.influxdb_password, config.sonarr_influxdb_db_name) self.influx_payload = [] self.servers = self.get_servers() + # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = requests.Session() self.session.params = {'pageSize': 1000} @staticmethod def get_servers(): + # Ensure sonarr servers have been defined if not config.sonarr_server_list: sys.exit("No Sonarr servers defined in config") + # Build Server Objects from config servers = [] for url, api_key, server_id in config.sonarr_server_list: servers.append(Server(url=url, api_key=api_key, id=server_id)) @@ -44,8 +49,10 @@ def get_missing(self, days_past): headers = {'X-Api-Key': server.api_key} get = self.session.get(server.url + endpoint, params=params, headers=headers).json() + # Iteratively create a list of TVShow Objects from response json tv_shows = [TVShow(**show) for show in get] + # Add show to missing list if file does not exist for show in tv_shows: if not show.hasFile: sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) @@ -188,18 +195,19 @@ def influx_push(self): if __name__ == "__main__": parser = argparse.ArgumentParser(prog='Sonarr stats operations', description='Script to aid in data gathering from Sonarr', - formatter_class=RawTextHelpFormatter) - - parser.add_argument("--missing", metavar='$days', type=int, help='Get missing TV shows in past X days' - '\ni.e. --missing 7 is in the last week') - parser.add_argument("--missing_days", metavar='$days', type=int, help='legacy command. Deprecated in favor of' - ' --missing' - '\nfunctions identically to --missing' - '\nNote: Will be removed in a future release') + formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument("--missing", metavar='$days', type=int, + help='Get missing TV shows in past X days' + '\ni.e. --missing 7 is in the last week') + parser.add_argument("--missing_days", metavar='$days', type=int, + help='legacy command. Deprecated in favor of --missing' + '\nfunctions identically to --missing' + '\nNote: Will be removed in a future release') parser.add_argument("--upcoming", action='store_true', help='Get upcoming TV shows') - parser.add_argument("--future", metavar='$days', type=int, help='Get TV shows on X days into the future. ' - 'Includes today.' - '\ni.e. --future 2 is Today and Tomorrow') + parser.add_argument("--future", metavar='$days', type=int, + help='Get TV shows on X days into the future. Includes today.' + '\ni.e. --future 2 is Today and Tomorrow') parser.add_argument("--queue", action='store_true', help='Get TV shows in queue') opts = parser.parse_args() From d8a48bd6ac561c68505c88190671f0e4e4680813 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 18:29:36 -0600 Subject: [PATCH 04/90] formatting, and some notation --- .gitignore | 3 ++- cisco_asa.py => Varken/cisco_asa.py | 2 +- configuration.example.py => Varken/configuration.example.py | 0 helpers.py => Varken/helpers.py | 0 ombi.py => Varken/ombi.py | 2 +- radarr.py => Varken/radarr.py | 2 +- raid_init.py => Varken/raid_init.py | 0 san.py => Varken/san.py | 0 sonarr.py => Varken/sonarr.py | 4 ++-- tautulli.py => Varken/tautulli.py | 2 +- Varken/varken.py | 0 11 files changed, 8 insertions(+), 7 deletions(-) rename cisco_asa.py => Varken/cisco_asa.py (97%) rename configuration.example.py => Varken/configuration.example.py (100%) rename helpers.py => Varken/helpers.py (100%) rename ombi.py => Varken/ombi.py (98%) rename radarr.py => Varken/radarr.py (99%) rename raid_init.py => Varken/raid_init.py (100%) rename san.py => Varken/san.py (100%) rename sonarr.py => Varken/sonarr.py (99%) rename tautulli.py => Varken/tautulli.py (99%) create mode 100644 Varken/varken.py diff --git a/.gitignore b/.gitignore index 4427ab9b..b7dbb729 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,8 @@ .Trashes ehthumbs.db Thumbs.db -configuration.py +Varken/configuration.py __pycache__ GeoLite2-City.mmdb GeoLite2-City.tar.gz +.idea/ diff --git a/cisco_asa.py b/Varken/cisco_asa.py similarity index 97% rename from cisco_asa.py rename to Varken/cisco_asa.py index 26f439a9..bb44c93a 100644 --- a/cisco_asa.py +++ b/Varken/cisco_asa.py @@ -3,7 +3,7 @@ from datetime import datetime, timezone from influxdb import InfluxDBClient -import configuration +from Varken import configuration current_time = datetime.now(timezone.utc).astimezone().isoformat() diff --git a/configuration.example.py b/Varken/configuration.example.py similarity index 100% rename from configuration.example.py rename to Varken/configuration.example.py diff --git a/helpers.py b/Varken/helpers.py similarity index 100% rename from helpers.py rename to Varken/helpers.py diff --git a/ombi.py b/Varken/ombi.py similarity index 98% rename from ombi.py rename to Varken/ombi.py index 5aa18124..898b8c8b 100644 --- a/ombi.py +++ b/Varken/ombi.py @@ -5,7 +5,7 @@ from influxdb import InfluxDBClient import argparse from argparse import RawTextHelpFormatter -import configuration +from Varken import configuration headers = {'Apikey': configuration.ombi_api_key} diff --git a/radarr.py b/Varken/radarr.py similarity index 99% rename from radarr.py rename to Varken/radarr.py index 2358a733..25ddff91 100644 --- a/radarr.py +++ b/Varken/radarr.py @@ -5,7 +5,7 @@ from influxdb import InfluxDBClient import argparse from argparse import RawTextHelpFormatter -import configuration +from Varken import configuration def now_iso(): diff --git a/raid_init.py b/Varken/raid_init.py similarity index 100% rename from raid_init.py rename to Varken/raid_init.py diff --git a/san.py b/Varken/san.py similarity index 100% rename from san.py rename to Varken/san.py diff --git a/sonarr.py b/Varken/sonarr.py similarity index 99% rename from sonarr.py rename to Varken/sonarr.py index 2655937b..203a30ee 100644 --- a/sonarr.py +++ b/Varken/sonarr.py @@ -6,8 +6,8 @@ from influxdb import InfluxDBClient from datetime import datetime, timezone, date, timedelta -import configuration as config -from helpers import Server, TVShow, Queue +from Varken import configuration as config +from Varken.helpers import Server, TVShow, Queue class SonarrAPI(object): diff --git a/tautulli.py b/Varken/tautulli.py similarity index 99% rename from tautulli.py rename to Varken/tautulli.py index 96f61eda..70656df8 100644 --- a/tautulli.py +++ b/Varken/tautulli.py @@ -6,7 +6,7 @@ import geoip2.database from influxdb import InfluxDBClient import requests -import configuration +from Varken import configuration CURRENT_TIME = datetime.now(timezone.utc).astimezone().isoformat() diff --git a/Varken/varken.py b/Varken/varken.py new file mode 100644 index 00000000..e69de29b From 8e2ef071ee328ad5f8de9ca82f9d404084cf5d51 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 19:26:16 -0600 Subject: [PATCH 05/90] added example config.ini --- .gitignore | 1 + Varken/varken.example.ini | 62 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 Varken/varken.example.ini diff --git a/.gitignore b/.gitignore index b7dbb729..5db3f8d5 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ ehthumbs.db Thumbs.db Varken/configuration.py +Varken/config.ini __pycache__ GeoLite2-City.mmdb GeoLite2-City.tar.gz diff --git a/Varken/varken.example.ini b/Varken/varken.example.ini new file mode 100644 index 00000000..4bfa5bfc --- /dev/null +++ b/Varken/varken.example.ini @@ -0,0 +1,62 @@ +#Notes: +# - Sonarr + Radarr scripts support multiple servers. You can remove the second +# server by putting a # in front of the lines and section name, and removing +# that number from your server_ids list +# - tautulli_failback_ip, This is used when there is no IP listed in tautulli. +# This can happen when you are streaming locally. This is usually your public IP. + +[global] +sonarr_server_ids = 1,2 +radarr_server_ids = 1,2 +ombi = true +tautulli = true +asa = false + +[influxdb] +url = influxdb.domain.tld +port = 8086 +username = root +password = root + +[sonarr-1] +url = sonarr1.domain.tld +apikey = xxxxxxxxxxxxxxxx +#ssl = true +#verify_ssl = false + +[sonarr-2] +url = sonarr2.domain.tld +apikey = yyyyyyyyyyyyyyyy +#ssl = true +#verify_ssl = false + +[radarr-1] +url = radarr1.domain.tld +apikey = xxxxxxxxxxxxxxxx +#ssl = true +#verify_ssl = false + +[radarr-2] +url = radarr2.domain.tld +apikey = yyyyyyyyyyyyyyyy +#ssl = true +#verify_ssl = false + +[ombi] +url = ombi.domain.tld +apikey = xxxxxxxxxxxxxxxx +#ssl = true +#verify_ssl = false + +[tautulli] +url = tautulli.domain.tld +tautulli_failback_ip = 0.0.0.0 +apikey = xxxxxxxxxxxxxxxx +#ssl = true +#verify_ssl = false +influx_db = plex + +[asa] +username = cisco +password = cisco +influx_db = asa From 7d7a161fb93ddc35f2b007c79347a90f5a35e16c Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 19:31:38 -0600 Subject: [PATCH 06/90] minor change in example --- Varken/varken.example.ini | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Varken/varken.example.ini b/Varken/varken.example.ini index 4bfa5bfc..9d550f49 100644 --- a/Varken/varken.example.ini +++ b/Varken/varken.example.ini @@ -1,4 +1,4 @@ -#Notes: +# Notes: # - Sonarr + Radarr scripts support multiple servers. You can remove the second # server by putting a # in front of the lines and section name, and removing # that number from your server_ids list @@ -21,39 +21,39 @@ password = root [sonarr-1] url = sonarr1.domain.tld apikey = xxxxxxxxxxxxxxxx -#ssl = true -#verify_ssl = false +ssl = false +verify_ssl = true [sonarr-2] url = sonarr2.domain.tld apikey = yyyyyyyyyyyyyyyy -#ssl = true -#verify_ssl = false +ssl = false +verify_ssl = true [radarr-1] url = radarr1.domain.tld apikey = xxxxxxxxxxxxxxxx -#ssl = true -#verify_ssl = false +ssl = false +verify_ssl = true [radarr-2] url = radarr2.domain.tld apikey = yyyyyyyyyyyyyyyy -#ssl = true -#verify_ssl = false +ssl = false +verify_ssl = true [ombi] url = ombi.domain.tld apikey = xxxxxxxxxxxxxxxx -#ssl = true -#verify_ssl = false +ssl = false +verify_ssl = true [tautulli] url = tautulli.domain.tld tautulli_failback_ip = 0.0.0.0 apikey = xxxxxxxxxxxxxxxx -#ssl = true -#verify_ssl = false +ssl = false +verify_ssl = true influx_db = plex [asa] From e788db273a9c8943895bbe0992c3b8dbb4b51bfc Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 21:20:32 -0600 Subject: [PATCH 07/90] added default values to helper classes --- .gitignore | 1 + Varken/__init__.py | 0 Varken/helpers.py | 79 +++++++++++++++++++++++++++------------------- 3 files changed, 48 insertions(+), 32 deletions(-) create mode 100644 Varken/__init__.py diff --git a/.gitignore b/.gitignore index 5db3f8d5..942e550b 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,4 @@ __pycache__ GeoLite2-City.mmdb GeoLite2-City.tar.gz .idea/ +Varken/varken.ini diff --git a/Varken/__init__.py b/Varken/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/Varken/helpers.py b/Varken/helpers.py index c30773fe..45157f2e 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -2,41 +2,56 @@ class TVShow(NamedTuple): - seriesId: int - episodeFileId: int - seasonNumber: int - episodeNumber: int - title: str - airDate: str - airDateUtc: str - overview: str - episodeFile: dict - hasFile: bool - monitored: bool - unverifiedSceneNumbering: bool - absoluteEpisodeNumber: int - series: dict - id: int + seriesId: int = None + episodeFileId: int = None + seasonNumber: int = None + episodeNumber: int = None + title: str = None + airDate: str = None + airDateUtc: str = None + overview: str = None + episodeFile: dict = None + hasFile: bool = None + monitored: bool = None + unverifiedSceneNumbering: bool = None + absoluteEpisodeNumber: int = None + series: dict = None + id: int = None class Queue(NamedTuple): - series: dict - episode: dict - quality: dict - size: float - title: str - sizeleft: float - timeleft: str - estimatedCompletionTime: str - status: str - trackedDownloadStatus: str - statusMessages: list - downloadId: str - protocol: str - id: int + series: dict = None + episode: dict = None + quality: dict = None + size: float = None + title: str = None + sizeleft: float = None + timeleft: str = None + estimatedCompletionTime: str = None + status: str = None + trackedDownloadStatus: str = None + statusMessages: list = None + downloadId: str = None + protocol: str = None + id: int = None class Server(NamedTuple): - url: str - api_key: str - id: int + id: int = None + url: str = None + api_key: str = None + verify_ssl: bool = False + + +class TautulliServer(NamedTuple): + url: str = None + fallback_ip: str = None + apikey: str = None + verify_ssl: bool = None + influx_db: str = None + +class InfluxServer(NamedTuple): + url: str = 'localhost' + port: int = 8086 + username: str = 'root' + password: str = 'root' \ No newline at end of file From e807a88b13301d8e02e17b08481e25c7ea453592 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 21:20:56 -0600 Subject: [PATCH 08/90] added missing keys to example.ini --- Varken/varken.example.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Varken/varken.example.ini b/Varken/varken.example.ini index 9d550f49..c037f55c 100644 --- a/Varken/varken.example.ini +++ b/Varken/varken.example.ini @@ -50,13 +50,16 @@ verify_ssl = true [tautulli] url = tautulli.domain.tld -tautulli_failback_ip = 0.0.0.0 +fallback_ip = 0.0.0.0 apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true influx_db = plex [asa] +url = firewall.domain.tld username = cisco password = cisco influx_db = asa +ssl = false +verify_ssl = true \ No newline at end of file From 7e3db8af7278116b428312eda62712e81a2e1c98 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 21:21:17 -0600 Subject: [PATCH 09/90] Created INIParser.py to read config file --- Varken/iniparser.py | 101 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100644 Varken/iniparser.py diff --git a/Varken/iniparser.py b/Varken/iniparser.py new file mode 100644 index 00000000..6706c530 --- /dev/null +++ b/Varken/iniparser.py @@ -0,0 +1,101 @@ +import sys +import configparser + +from Varken.helpers import Server, TautulliServer, InfluxServer + + +class INIParser(object): + def __init__(self): + self.config = configparser.ConfigParser() + + self.influx_server = InfluxServer() + self.sonarr_enabled = False + self.sonarr_servers = [] + self.radarr_enabled = False + self.radarr_servers = [] + self.ombi_enabled = False + self.ombi_server = None + self.tautulli_enabled = False + self.tautulli_server = None + self.read_file() + self.parse_opts() + + def read_file(self): + with open('varken.ini') as config_ini: + self.config.read_file(config_ini) + + def parse_opts(self): + # Parse InfluxDB options + url = self.config.get('influxdb', 'url') + port = self.config.getint('influxdb', 'port') + username = self.config.get('influxdb', 'username') + password = self.config.get('influxdb', 'password') + + self.influx_server = InfluxServer(url, port, username, password) + + # Parse Sonarr options + try: + if not self.config.getboolean('global', 'sonarr_server_ids'): + sys.exit('sonarr_server_ids must be either false, or a comma-separated list of server ids') + except ValueError: + self.sonarr_enabled = True + sids = self.config.get('global', 'sonarr_server_ids').strip(' ').split(',') + + for server_id in sids: + sonarr_section = 'sonarr-' + server_id + url = self.config.get(sonarr_section, 'url') + apikey = self.config.get(sonarr_section, 'apikey') + scheme = 'https://' if self.config.getboolean(sonarr_section, 'ssl') else 'http://' + verify_ssl = self.config.getboolean(sonarr_section, 'verify_ssl') + + self.sonarr_servers.append(Server(server_id, scheme + url, apikey, verify_ssl)) + + # Parse Radarr options + try: + if not self.config.getboolean('global', 'radarr_server_ids'): + sys.exit('radarr_server_ids must be either false, or a comma-separated list of server ids') + except ValueError: + self.radarr_enabled = True + sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',') + for server_id in sids: + radarr_section = 'sonarr-' + server_id + url = self.config.get(radarr_section, 'url') + apikey = self.config.get(radarr_section, 'apikey') + scheme = 'https://' if self.config.getboolean(radarr_section, 'ssl') else 'http://' + verify_ssl = self.config.getboolean(radarr_section, 'verify_ssl') + + self.radarr_servers.append(Server(server_id, scheme + url, apikey, verify_ssl)) + + # Parse Tautulli options + if self.config.getboolean('global', 'tautulli'): + self.tautulli_enabled = True + url = self.config.get('tautulli', 'url') + fallback_ip = self.config.get('tautulli', 'fallback_ip') + apikey = self.config.get('tautulli', 'apikey') + scheme = 'https://' if self.config.getboolean('tautulli', 'ssl') else 'http://' + verify_ssl = self.config.getboolean('tautulli', 'verify_ssl') + db_name = self.config.get('tautulli', 'influx_db') + + self.tautulli_server = TautulliServer(scheme + url, fallback_ip, apikey, verify_ssl, db_name) + + # Parse Ombi Options + if self.config.getboolean('global', 'ombi'): + self.tautulli_enabled = True + url = self.config.get('ombi', 'url') + apikey = self.config.get('ombi', 'apikey') + scheme = 'https://' if self.config.getboolean('ombi', 'ssl') else 'http://' + verify_ssl = self.config.getboolean('ombi', 'verify_ssl') + + self.ombi_server = Server(url=scheme + url, api_key=apikey, verify_ssl=verify_ssl) + + # Parse ASA opts + if self.config.getboolean('global', 'asa'): + self.tautulli_enabled = True + url = self.config.get('asa', 'url') + username = self.config.get('asa', 'username') + password = self.config.get('asa', 'password') + scheme = 'https://' if self.config.getboolean('asa', 'ssl') else 'http://' + verify_ssl = self.config.getboolean('asa', 'verify_ssl') + db_name = self.config.get('asa', 'influx_db') + + self.ombi_server = (scheme + url, username, password, verify_ssl, db_name) From 08fc6fb04d76570afaf1cbad7cc8f6a99c2bd968 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 21:23:33 -0600 Subject: [PATCH 10/90] the copy pasta was too strong --- Varken/iniparser.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 6706c530..915b576a 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -17,6 +17,8 @@ def __init__(self): self.ombi_server = None self.tautulli_enabled = False self.tautulli_server = None + self.asa_enabled = False + self.asa = None self.read_file() self.parse_opts() @@ -80,7 +82,7 @@ def parse_opts(self): # Parse Ombi Options if self.config.getboolean('global', 'ombi'): - self.tautulli_enabled = True + self.ombi_enabled = True url = self.config.get('ombi', 'url') apikey = self.config.get('ombi', 'apikey') scheme = 'https://' if self.config.getboolean('ombi', 'ssl') else 'http://' @@ -90,7 +92,7 @@ def parse_opts(self): # Parse ASA opts if self.config.getboolean('global', 'asa'): - self.tautulli_enabled = True + self.asa_enabled = True url = self.config.get('asa', 'url') username = self.config.get('asa', 'username') password = self.config.get('asa', 'password') @@ -98,4 +100,4 @@ def parse_opts(self): verify_ssl = self.config.getboolean('asa', 'verify_ssl') db_name = self.config.get('asa', 'influx_db') - self.ombi_server = (scheme + url, username, password, verify_ssl, db_name) + self.asa = (scheme + url, username, password, verify_ssl, db_name) From c18ed692fc875572a2b838e1eeb096530cccfc2b Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:19:42 -0600 Subject: [PATCH 11/90] split off sonarrserver as a class --- Varken/helpers.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Varken/helpers.py b/Varken/helpers.py index 45157f2e..5d9e4568 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -35,6 +35,14 @@ class Queue(NamedTuple): protocol: str = None id: int = None +class SonarrServer(NamedTuple): + id: int = None + url: str = None + api_key: str = None + verify_ssl: bool = False + missing_days: int = None + future_days: int = None + queue: bool = False class Server(NamedTuple): id: int = None From 2efdb7651bb5c115a634f54f2b0c33c3a6a50fed Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:20:09 -0600 Subject: [PATCH 12/90] var separation and args added to sonarr --- Varken/iniparser.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 915b576a..b4573432 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -1,25 +1,29 @@ import sys import configparser -from Varken.helpers import Server, TautulliServer, InfluxServer - +from Varken.helpers import Server, TautulliServer, SonarrServer, InfluxServer class INIParser(object): def __init__(self): self.config = configparser.ConfigParser() self.influx_server = InfluxServer() + self.sonarr_enabled = False self.sonarr_servers = [] + self.radarr_enabled = False self.radarr_servers = [] + self.ombi_enabled = False self.ombi_server = None + self.tautulli_enabled = False self.tautulli_server = None + self.asa_enabled = False self.asa = None - self.read_file() + self.parse_opts() def read_file(self): @@ -27,6 +31,7 @@ def read_file(self): self.config.read_file(config_ini) def parse_opts(self): + self.read_file() # Parse InfluxDB options url = self.config.get('influxdb', 'url') port = self.config.getint('influxdb', 'port') @@ -49,8 +54,12 @@ def parse_opts(self): apikey = self.config.get(sonarr_section, 'apikey') scheme = 'https://' if self.config.getboolean(sonarr_section, 'ssl') else 'http://' verify_ssl = self.config.getboolean(sonarr_section, 'verify_ssl') + queue = self.config.getboolean(sonarr_section, 'queue') + missing_days = self.config.getint(sonarr_section, 'missing_days') + future_days = self.config.getint(sonarr_section, 'future_days') - self.sonarr_servers.append(Server(server_id, scheme + url, apikey, verify_ssl)) + self.sonarr_servers.append(SonarrServer(server_id, scheme + url, apikey, verify_ssl, + missing_days, future_days, queue)) # Parse Radarr options try: @@ -60,7 +69,7 @@ def parse_opts(self): self.radarr_enabled = True sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',') for server_id in sids: - radarr_section = 'sonarr-' + server_id + radarr_section = 'radarr-' + server_id url = self.config.get(radarr_section, 'url') apikey = self.config.get(radarr_section, 'apikey') scheme = 'https://' if self.config.getboolean(radarr_section, 'ssl') else 'http://' From 56db26452327d7dbc14440e5d5aca657121956ce Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:20:25 -0600 Subject: [PATCH 13/90] leaving template --- Varken/logger.py | 1 + 1 file changed, 1 insertion(+) create mode 100644 Varken/logger.py diff --git a/Varken/logger.py b/Varken/logger.py new file mode 100644 index 00000000..b1b2b42d --- /dev/null +++ b/Varken/logger.py @@ -0,0 +1 @@ +import functools \ No newline at end of file From dbece1968d9ecaaebb4cf4261835747f1ac310b3 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:20:42 -0600 Subject: [PATCH 14/90] migrated sonarr.py to new style --- Varken/sonarr.py | 56 +++--------------------------------------------- 1 file changed, 3 insertions(+), 53 deletions(-) diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 203a30ee..22eaeb8c 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -7,38 +7,22 @@ from datetime import datetime, timezone, date, timedelta from Varken import configuration as config -from Varken.helpers import Server, TVShow, Queue +from Varken.helpers import TVShow, Queue class SonarrAPI(object): - # Sets None as default for all TVShow NamedTuples, because sonarr's response json is inconsistent - TVShow.__new__.__defaults__ = (None,) * len(TVShow._fields) - - def __init__(self): + def __init__(self, servers): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() self.today = str(date.today()) self.influx = InfluxDBClient(config.influxdb_url, config.influxdb_port, config.influxdb_username, config.influxdb_password, config.sonarr_influxdb_db_name) self.influx_payload = [] - self.servers = self.get_servers() + self.servers = servers # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = requests.Session() self.session.params = {'pageSize': 1000} - @staticmethod - def get_servers(): - # Ensure sonarr servers have been defined - if not config.sonarr_server_list: - sys.exit("No Sonarr servers defined in config") - - # Build Server Objects from config - servers = [] - for url, api_key, server_id in config.sonarr_server_list: - servers.append(Server(url=url, api_key=api_key, id=server_id)) - - return servers - def get_missing(self, days_past): endpoint = '/api/calendar' last_days = str(date.today() + timedelta(days=-days_past)) @@ -77,39 +61,6 @@ def get_missing(self, days_past): } ) - def get_upcoming(self): - endpoint = '/api/calendar/' - - for server in self.servers: - upcoming = [] - headers = {'X-Api-Key': server.api_key} - - get = self.session.get(server.url + endpoint, headers=headers).json() - tv_shows = [TVShow(**show) for show in get] - - for show in tv_shows: - sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) - upcoming.append((show.series['title'], sxe, show.id, show.title, show.airDate)) - - for series_title, sxe, sonarr_id, episode_title, air_date in upcoming: - self.influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Soon", - "sonarrId": sonarr_id, - "server": server.id - }, - "time": self.now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date - } - } - ) - def get_future(self, future_days): endpoint = '/api/calendar/' future = str(date.today() + timedelta(days=future_days)) @@ -204,7 +155,6 @@ def influx_push(self): help='legacy command. Deprecated in favor of --missing' '\nfunctions identically to --missing' '\nNote: Will be removed in a future release') - parser.add_argument("--upcoming", action='store_true', help='Get upcoming TV shows') parser.add_argument("--future", metavar='$days', type=int, help='Get TV shows on X days into the future. Includes today.' '\ni.e. --future 2 is Today and Tomorrow') From 6c3ec967f80d1c92859c8807216803776b383a0d Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:20:55 -0600 Subject: [PATCH 15/90] added args to ini --- Varken/varken.example.ini | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/Varken/varken.example.ini b/Varken/varken.example.ini index c037f55c..39a60730 100644 --- a/Varken/varken.example.ini +++ b/Varken/varken.example.ini @@ -23,12 +23,18 @@ url = sonarr1.domain.tld apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true +missing_days = 7 +future_days = 1 +queue = true [sonarr-2] url = sonarr2.domain.tld apikey = yyyyyyyyyyyyyyyy ssl = false verify_ssl = true +missing_days = 7 +future_days = 1 +queue = true [radarr-1] url = radarr1.domain.tld From cbbc3c5942e245420705636c793daec964713d6e Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:21:16 -0600 Subject: [PATCH 16/90] initial scheduling test --- Varken/varken.py | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/Varken/varken.py b/Varken/varken.py index e69de29b..4569f953 100644 --- a/Varken/varken.py +++ b/Varken/varken.py @@ -0,0 +1,36 @@ +import schedule +import threading +import functools +from time import sleep + +from Varken.iniparser import INIParser +from Varken.sonarr import SonarrAPI + +def logging(function): + @functools.wraps(function) + def wrapper(*args, **kwargs): + print('LOG: Running job "%s"' % function.__name__) + result = function(*args, **kwargs) + print('LOG: Job "%s" completed' % function.__name__) + return result + + return wrapper + +@logging +def threaded(job): + thread = threading.Thread(target=job) + thread.start() + +if __name__ == "__main__": + CONFIG = INIParser() + + if CONFIG.sonarr_enabled: + SONARR = SonarrAPI(CONFIG.sonarr_servers) + for server in CONFIG.sonarr_servers: + if server.queue: + schedule.every().minute.do(threaded, SONARR.get_queue) + + while True: + schedule.run_pending() + sleep(1) + From 4a38e2d16220634d454042bbad55863341b240f3 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:26:00 -0600 Subject: [PATCH 17/90] passed influx_seraver to sonarrapi class --- Varken/sonarr.py | 9 ++++----- Varken/varken.py | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 22eaeb8c..0ba4dcfb 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -6,19 +6,18 @@ from influxdb import InfluxDBClient from datetime import datetime, timezone, date, timedelta -from Varken import configuration as config from Varken.helpers import TVShow, Queue class SonarrAPI(object): - def __init__(self, servers): + def __init__(self, sonarr_servers, influx_server): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() self.today = str(date.today()) - self.influx = InfluxDBClient(config.influxdb_url, config.influxdb_port, config.influxdb_username, - config.influxdb_password, config.sonarr_influxdb_db_name) + self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, + influx_server.password, 'plex') self.influx_payload = [] - self.servers = servers + self.servers = sonarr_servers # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = requests.Session() self.session.params = {'pageSize': 1000} diff --git a/Varken/varken.py b/Varken/varken.py index 4569f953..1b9dac44 100644 --- a/Varken/varken.py +++ b/Varken/varken.py @@ -25,7 +25,7 @@ def threaded(job): CONFIG = INIParser() if CONFIG.sonarr_enabled: - SONARR = SonarrAPI(CONFIG.sonarr_servers) + SONARR = SonarrAPI(CONFIG.sonarr_servers, CONFIG.influx_server) for server in CONFIG.sonarr_servers: if server.queue: schedule.every().minute.do(threaded, SONARR.get_queue) From 62520eae6d758fccd891e7a0158005bd744ae5ac Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 22:57:10 -0600 Subject: [PATCH 18/90] bunch of tweaks. Scheduling is working --- Varken/iniparser.py | 4 +++ Varken/logger.py | 12 ++++++++- Varken/sonarr.py | 64 ++++++++++++++------------------------------- Varken/varken.py | 20 ++++++-------- 4 files changed, 42 insertions(+), 58 deletions(-) diff --git a/Varken/iniparser.py b/Varken/iniparser.py index b4573432..233db540 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -44,8 +44,12 @@ def parse_opts(self): try: if not self.config.getboolean('global', 'sonarr_server_ids'): sys.exit('sonarr_server_ids must be either false, or a comma-separated list of server ids') + elif self.config.getint('global', 'sonarr_server_ids'): + self.sonarr_enabled = True except ValueError: self.sonarr_enabled = True + + if self.sonarr_enabled: sids = self.config.get('global', 'sonarr_server_ids').strip(' ').split(',') for server_id in sids: diff --git a/Varken/logger.py b/Varken/logger.py index b1b2b42d..689dd37e 100644 --- a/Varken/logger.py +++ b/Varken/logger.py @@ -1 +1,11 @@ -import functools \ No newline at end of file +import functools + +def logging(function): + @functools.wraps(function) + def wrapper(*args, **kwargs): + print('LOG: Running job "%s"' % function.__name__) + result = function(*args, **kwargs) + print('LOG: Job "%s" completed' % function.__name__) + return result + + return wrapper \ No newline at end of file diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 0ba4dcfb..21cf87c5 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -1,11 +1,10 @@ #!/usr/bin/env python3 # Do not edit this script. Edit configuration.py -import sys import requests -import argparse from influxdb import InfluxDBClient from datetime import datetime, timezone, date, timedelta +from Varken.logger import logging from Varken.helpers import TVShow, Queue @@ -16,16 +15,17 @@ def __init__(self, sonarr_servers, influx_server): self.today = str(date.today()) self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, influx_server.password, 'plex') - self.influx_payload = [] self.servers = sonarr_servers # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = requests.Session() self.session.params = {'pageSize': 1000} + @logging def get_missing(self, days_past): endpoint = '/api/calendar' last_days = str(date.today() + timedelta(days=-days_past)) params = {'start': last_days, 'end': self.today} + influx_payload = [] for server in self.servers: missing = [] @@ -42,7 +42,7 @@ def get_missing(self, days_past): missing.append((show.series['title'], sxe, show.airDate, show.title, show.id)) for series_title, sxe, air_date, episode_title, sonarr_id in missing: - self.influx_payload.append( + influx_payload.append( { "measurement": "Sonarr", "tags": { @@ -60,12 +60,17 @@ def get_missing(self, days_past): } ) + self.influx_push(influx_payload) + + @logging def get_future(self, future_days): endpoint = '/api/calendar/' future = str(date.today() + timedelta(days=future_days)) + influx_payload = [] for server in self.servers: air_days = [] + headers = {'X-Api-Key': server.api_key} params = {'start': self.today, 'end': future} @@ -77,7 +82,7 @@ def get_future(self, future_days): air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id)) for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days: - self.influx_payload.append( + influx_payload.append( { "measurement": "Sonarr", "tags": { @@ -96,7 +101,11 @@ def get_future(self, future_days): } ) + self.influx_push(influx_payload) + + @logging def get_queue(self): + influx_payload = [] endpoint = '/api/queue' for server in self.servers: @@ -117,7 +126,7 @@ def get_queue(self): protocol_id, sxe, show.id)) for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue: - self.influx_payload.append( + influx_payload.append( { "measurement": "Sonarr", "tags": { @@ -137,43 +146,8 @@ def get_queue(self): } ) - def influx_push(self): + self.influx_push(influx_payload) + + def influx_push(self, payload): # TODO: error handling for failed connection - self.influx.write_points(self.influx_payload) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(prog='Sonarr stats operations', - description='Script to aid in data gathering from Sonarr', - formatter_class=argparse.RawTextHelpFormatter) - - parser.add_argument("--missing", metavar='$days', type=int, - help='Get missing TV shows in past X days' - '\ni.e. --missing 7 is in the last week') - parser.add_argument("--missing_days", metavar='$days', type=int, - help='legacy command. Deprecated in favor of --missing' - '\nfunctions identically to --missing' - '\nNote: Will be removed in a future release') - parser.add_argument("--future", metavar='$days', type=int, - help='Get TV shows on X days into the future. Includes today.' - '\ni.e. --future 2 is Today and Tomorrow') - parser.add_argument("--queue", action='store_true', help='Get TV shows in queue') - - opts = parser.parse_args() - sonarr = SonarrAPI() - - if len(sys.argv) == 1: - parser.print_help(sys.stderr) - sys.exit(1) - - if any([opts.missing, opts.missing_days]): - days = opts.missing if opts.missing else opts.missing_days - sonarr.get_missing(days) - if opts.upcoming: - sonarr.get_upcoming() - if opts.future: - sonarr.get_future(opts.future) - if opts.queue: - sonarr.get_queue() - - sonarr.influx_push() + self.influx.write_points(payload) diff --git a/Varken/varken.py b/Varken/varken.py index 1b9dac44..0af850dd 100644 --- a/Varken/varken.py +++ b/Varken/varken.py @@ -1,23 +1,13 @@ import schedule import threading -import functools from time import sleep from Varken.iniparser import INIParser from Varken.sonarr import SonarrAPI -def logging(function): - @functools.wraps(function) - def wrapper(*args, **kwargs): - print('LOG: Running job "%s"' % function.__name__) - result = function(*args, **kwargs) - print('LOG: Job "%s" completed' % function.__name__) - return result - return wrapper - -@logging def threaded(job): + print('test') thread = threading.Thread(target=job) thread.start() @@ -26,9 +16,15 @@ def threaded(job): if CONFIG.sonarr_enabled: SONARR = SonarrAPI(CONFIG.sonarr_servers, CONFIG.influx_server) + for server in CONFIG.sonarr_servers: if server.queue: - schedule.every().minute.do(threaded, SONARR.get_queue) + schedule.every(1).minutes.do(threaded, SONARR.get_queue) + if server.missing_days > 0: + schedule.every(30).minutes.do(threaded, SONARR.get_missing, server.missing_days) + if server.future_days > 0: + schedule.every(30).minutes.do(threaded, SONARR.get_future, server.future_days) + while True: schedule.run_pending() From 4dee66f8df44d159d8aa985c9c999c47905a4ba9 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Wed, 28 Nov 2018 23:47:04 -0600 Subject: [PATCH 19/90] added config minutes setting --- Varken/helpers.py | 7 +++++-- Varken/iniparser.py | 8 ++++++-- Varken/sonarr.py | 10 ++++++---- Varken/varken.example.ini | 3 +++ Varken/varken.py | 14 +++++++------- 5 files changed, 27 insertions(+), 15 deletions(-) diff --git a/Varken/helpers.py b/Varken/helpers.py index 5d9e4568..25519288 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -40,9 +40,12 @@ class SonarrServer(NamedTuple): url: str = None api_key: str = None verify_ssl: bool = False - missing_days: int = None - future_days: int = None + missing_days: int = 0 + missing_days_run_minutes: int = 30 + future_days: int = 0 + future_days_run_minutes: int = 30 queue: bool = False + queue_run_minutes: int = 1 class Server(NamedTuple): id: int = None diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 233db540..0ea80980 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -61,9 +61,13 @@ def parse_opts(self): queue = self.config.getboolean(sonarr_section, 'queue') missing_days = self.config.getint(sonarr_section, 'missing_days') future_days = self.config.getint(sonarr_section, 'future_days') + missing_days_run_minutes = self.config.getint(sonarr_section, 'missing_days_run_minutes') + future_days_run_minutes = self.config.getint(sonarr_section, 'future_days_run_minutes') + queue_run_minutes = self.config.getint(sonarr_section, 'queue_run_minutes') - self.sonarr_servers.append(SonarrServer(server_id, scheme + url, apikey, verify_ssl, - missing_days, future_days, queue)) + self.sonarr_servers.append(SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days, + missing_days_run_minutes, future_days, + future_days_run_minutes, queue, queue_run_minutes)) # Parse Radarr options try: diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 21cf87c5..c93a0673 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -31,7 +31,8 @@ def get_missing(self, days_past): missing = [] headers = {'X-Api-Key': server.api_key} - get = self.session.get(server.url + endpoint, params=params, headers=headers).json() + get = self.session.get(server.url + endpoint, params=params, headers=headers, + verify=server.verify_ssl).json() # Iteratively create a list of TVShow Objects from response json tv_shows = [TVShow(**show) for show in get] @@ -74,7 +75,8 @@ def get_future(self, future_days): headers = {'X-Api-Key': server.api_key} params = {'start': self.today, 'end': future} - get = self.session.get(server.url + endpoint, params=params, headers=headers).json() + get = self.session.get(server.url + endpoint, params=params, headers=headers, + verify=server.verify_ssl).json() tv_shows = [TVShow(**show) for show in get] for show in tv_shows: @@ -104,7 +106,7 @@ def get_future(self, future_days): self.influx_push(influx_payload) @logging - def get_queue(self): + def get_queue(self, notimplemented): influx_payload = [] endpoint = '/api/queue' @@ -112,7 +114,7 @@ def get_queue(self): queue = [] headers = {'X-Api-Key': server.api_key} - get = self.session.get(server.url + endpoint, headers=headers).json() + get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json() download_queue = [Queue(**show) for show in get] for show in download_queue: diff --git a/Varken/varken.example.ini b/Varken/varken.example.ini index 39a60730..e3022ed1 100644 --- a/Varken/varken.example.ini +++ b/Varken/varken.example.ini @@ -24,8 +24,11 @@ apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true missing_days = 7 +missing_days_run_minutes = 30 future_days = 1 +future_days_run_minutes = 30 queue = true +queue_run_minutes = 1 [sonarr-2] url = sonarr2.domain.tld diff --git a/Varken/varken.py b/Varken/varken.py index 0af850dd..2212e488 100644 --- a/Varken/varken.py +++ b/Varken/varken.py @@ -6,9 +6,8 @@ from Varken.sonarr import SonarrAPI -def threaded(job): - print('test') - thread = threading.Thread(target=job) +def threaded(job, days=None): + thread = threading.Thread(target=job, args=([days])) thread.start() if __name__ == "__main__": @@ -19,12 +18,13 @@ def threaded(job): for server in CONFIG.sonarr_servers: if server.queue: - schedule.every(1).minutes.do(threaded, SONARR.get_queue) + schedule.every(server.queue_run_minutes).minutes.do(threaded, SONARR.get_queue) if server.missing_days > 0: - schedule.every(30).minutes.do(threaded, SONARR.get_missing, server.missing_days) + schedule.every(server.missing_days_run_minutes).minutes.do(threaded, SONARR.get_missing, + server.missing_days) if server.future_days > 0: - schedule.every(30).minutes.do(threaded, SONARR.get_future, server.future_days) - + schedule.every(server.future_days_run_minutes).minutes.do(threaded, SONARR.get_future, + server.future_days) while True: schedule.run_pending() From bca833e9f9ceb2c12c2d1b9ebf44d90b705ea56d Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Thu, 29 Nov 2018 00:05:27 -0600 Subject: [PATCH 20/90] created systemd config example --- varken.service | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 varken.service diff --git a/varken.service b/varken.service new file mode 100644 index 00000000..d1db1520 --- /dev/null +++ b/varken.service @@ -0,0 +1,12 @@ +[Unit] +Description=Varken - A data collection and graphing tool +After=network-online.target + +[Service] +Type=simple +WorkingDirectory=/opt/Varken/Varken +ExecStart=/usr/bin/python3 /opt/Varken/Varken/varken.py +Restart=always + +[Install] +WantedBy=multi-user.target From a0456f078bdaa6e81b518dde0a37fa2c83a005e5 Mon Sep 17 00:00:00 2001 From: nick Date: Thu, 29 Nov 2018 12:42:10 -0600 Subject: [PATCH 21/90] changed to seconds instead of minutes --- Varken/iniparser.py | 14 +++++++++----- Varken/varken.example.ini | 12 +++++++----- Varken/varken.py | 7 ++++--- 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 0ea80980..9b24d4c8 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -3,6 +3,7 @@ from Varken.helpers import Server, TautulliServer, SonarrServer, InfluxServer + class INIParser(object): def __init__(self): self.config = configparser.ConfigParser() @@ -61,21 +62,24 @@ def parse_opts(self): queue = self.config.getboolean(sonarr_section, 'queue') missing_days = self.config.getint(sonarr_section, 'missing_days') future_days = self.config.getint(sonarr_section, 'future_days') - missing_days_run_minutes = self.config.getint(sonarr_section, 'missing_days_run_minutes') - future_days_run_minutes = self.config.getint(sonarr_section, 'future_days_run_minutes') - queue_run_minutes = self.config.getint(sonarr_section, 'queue_run_minutes') + missing_days_run_seconds = self.config.getint(sonarr_section, 'missing_days_run_seconds') + future_days_run_seconds = self.config.getint(sonarr_section, 'future_days_run_seconds') + queue_run_seconds = self.config.getint(sonarr_section, 'queue_run_seconds') self.sonarr_servers.append(SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days, - missing_days_run_minutes, future_days, - future_days_run_minutes, queue, queue_run_minutes)) + missing_days_run_seconds, future_days, + future_days_run_seconds, queue, queue_run_seconds)) # Parse Radarr options try: if not self.config.getboolean('global', 'radarr_server_ids'): sys.exit('radarr_server_ids must be either false, or a comma-separated list of server ids') + elif self.config.getint('global', 'radarr_server_ids'): + self.radarr_enabled = True except ValueError: self.radarr_enabled = True sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',') + for server_id in sids: radarr_section = 'radarr-' + server_id url = self.config.get(radarr_section, 'url') diff --git a/Varken/varken.example.ini b/Varken/varken.example.ini index e3022ed1..10b7c703 100644 --- a/Varken/varken.example.ini +++ b/Varken/varken.example.ini @@ -2,7 +2,7 @@ # - Sonarr + Radarr scripts support multiple servers. You can remove the second # server by putting a # in front of the lines and section name, and removing # that number from your server_ids list -# - tautulli_failback_ip, This is used when there is no IP listed in tautulli. +# - fallback_ip, This is used when there is no IP listed in tautulli. # This can happen when you are streaming locally. This is usually your public IP. [global] @@ -24,11 +24,11 @@ apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true missing_days = 7 -missing_days_run_minutes = 30 +missing_days_run_seconds = 300 future_days = 1 -future_days_run_minutes = 30 +future_days_run_seconds = 300 queue = true -queue_run_minutes = 1 +queue_run_seconds = 300 [sonarr-2] url = sonarr2.domain.tld @@ -36,8 +36,11 @@ apikey = yyyyyyyyyyyyyyyy ssl = false verify_ssl = true missing_days = 7 +missing_days_run_seconds = 300 future_days = 1 +future_days_run_seconds = 300 queue = true +queue_run_seconds = 300 [radarr-1] url = radarr1.domain.tld @@ -63,7 +66,6 @@ fallback_ip = 0.0.0.0 apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true -influx_db = plex [asa] url = firewall.domain.tld diff --git a/Varken/varken.py b/Varken/varken.py index 2212e488..c6e93499 100644 --- a/Varken/varken.py +++ b/Varken/varken.py @@ -10,6 +10,7 @@ def threaded(job, days=None): thread = threading.Thread(target=job, args=([days])) thread.start() + if __name__ == "__main__": CONFIG = INIParser() @@ -18,12 +19,12 @@ def threaded(job, days=None): for server in CONFIG.sonarr_servers: if server.queue: - schedule.every(server.queue_run_minutes).minutes.do(threaded, SONARR.get_queue) + schedule.every(server.queue_run_seconds).seconds.do(threaded, SONARR.get_queue) if server.missing_days > 0: - schedule.every(server.missing_days_run_minutes).minutes.do(threaded, SONARR.get_missing, + schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing, server.missing_days) if server.future_days > 0: - schedule.every(server.future_days_run_minutes).minutes.do(threaded, SONARR.get_future, + schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future, server.future_days) while True: From fa69fdb4f70e946771c42c6de70cb17ad82ff257 Mon Sep 17 00:00:00 2001 From: nick Date: Thu, 29 Nov 2018 12:55:13 -0600 Subject: [PATCH 22/90] folder restructure, dbmanager placeholder, iniparser file fullpath, and updated example --- {Varken => Legacy}/cisco_asa.py | 0 {Varken => Legacy}/configuration.example.py | 0 crontabs => Legacy/crontabs | 0 {Varken => Legacy}/ombi.py | 0 {Varken => Legacy}/radarr.py | 0 {Varken => Legacy}/raid_init.py | 0 {Varken => Legacy}/san.py | 0 {Varken => Legacy}/tautulli.py | 0 8 files changed, 0 insertions(+), 0 deletions(-) rename {Varken => Legacy}/cisco_asa.py (100%) rename {Varken => Legacy}/configuration.example.py (100%) rename crontabs => Legacy/crontabs (100%) rename {Varken => Legacy}/ombi.py (100%) rename {Varken => Legacy}/radarr.py (100%) rename {Varken => Legacy}/raid_init.py (100%) rename {Varken => Legacy}/san.py (100%) rename {Varken => Legacy}/tautulli.py (100%) diff --git a/Varken/cisco_asa.py b/Legacy/cisco_asa.py similarity index 100% rename from Varken/cisco_asa.py rename to Legacy/cisco_asa.py diff --git a/Varken/configuration.example.py b/Legacy/configuration.example.py similarity index 100% rename from Varken/configuration.example.py rename to Legacy/configuration.example.py diff --git a/crontabs b/Legacy/crontabs similarity index 100% rename from crontabs rename to Legacy/crontabs diff --git a/Varken/ombi.py b/Legacy/ombi.py similarity index 100% rename from Varken/ombi.py rename to Legacy/ombi.py diff --git a/Varken/radarr.py b/Legacy/radarr.py similarity index 100% rename from Varken/radarr.py rename to Legacy/radarr.py diff --git a/Varken/raid_init.py b/Legacy/raid_init.py similarity index 100% rename from Varken/raid_init.py rename to Legacy/raid_init.py diff --git a/Varken/san.py b/Legacy/san.py similarity index 100% rename from Varken/san.py rename to Legacy/san.py diff --git a/Varken/tautulli.py b/Legacy/tautulli.py similarity index 100% rename from Varken/tautulli.py rename to Legacy/tautulli.py From eaba2fa15621c2244d7e7b4e6dedf6bf45e27269 Mon Sep 17 00:00:00 2001 From: nick Date: Thu, 29 Nov 2018 12:55:28 -0600 Subject: [PATCH 23/90] folder restructure, dbmanager placeholder, iniparser file fullpath, and updated example --- .gitignore | 1 + Varken/dbmanager.py | 0 Varken/iniparser.py | 5 +++-- Varken/varken.example.ini => varken.example.ini | 0 4 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 Varken/dbmanager.py rename Varken/varken.example.ini => varken.example.ini (100%) diff --git a/.gitignore b/.gitignore index 942e550b..c51a719b 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ __pycache__ GeoLite2-City.mmdb GeoLite2-City.tar.gz .idea/ +.idea/* Varken/varken.ini diff --git a/Varken/dbmanager.py b/Varken/dbmanager.py new file mode 100644 index 00000000..e69de29b diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 9b24d4c8..6c6875f7 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -1,6 +1,6 @@ import sys import configparser - +from os.path import abspath, dirname, join from Varken.helpers import Server, TautulliServer, SonarrServer, InfluxServer @@ -28,7 +28,8 @@ def __init__(self): self.parse_opts() def read_file(self): - with open('varken.ini') as config_ini: + file_path = abspath(join(dirname(__file__), '..', 'varken.ini')) + with open(file_path) as config_ini: self.config.read_file(config_ini) def parse_opts(self): diff --git a/Varken/varken.example.ini b/varken.example.ini similarity index 100% rename from Varken/varken.example.ini rename to varken.example.ini From 1604b11c55f82e7e0320756c88dcfc0a9dcdc31e Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 16:30:41 -0600 Subject: [PATCH 24/90] Migrated tautulli.py and allowed for multiple servers --- .gitignore | 2 +- Legacy/tautulli.py | 179 -------------------------------- Varken/helpers.py | 242 +++++++++++++++++++++++++++++++++++++++++++- Varken/iniparser.py | 45 +++++--- Varken/sonarr.py | 4 +- Varken/tautulli.py | 146 ++++++++++++++++++++++++++ Varken/varken.py | 10 ++ varken.example.ini | 19 ++-- 8 files changed, 440 insertions(+), 207 deletions(-) delete mode 100644 Legacy/tautulli.py create mode 100644 Varken/tautulli.py diff --git a/.gitignore b/.gitignore index c51a719b..185ff03f 100644 --- a/.gitignore +++ b/.gitignore @@ -12,4 +12,4 @@ GeoLite2-City.mmdb GeoLite2-City.tar.gz .idea/ .idea/* -Varken/varken.ini +varken.ini diff --git a/Legacy/tautulli.py b/Legacy/tautulli.py deleted file mode 100644 index 70656df8..00000000 --- a/Legacy/tautulli.py +++ /dev/null @@ -1,179 +0,0 @@ -import os -import tarfile -import urllib.request -import time -from datetime import datetime, timezone -import geoip2.database -from influxdb import InfluxDBClient -import requests -from Varken import configuration - -CURRENT_TIME = datetime.now(timezone.utc).astimezone().isoformat() - -PAYLOAD = {'apikey': configuration.tautulli_api_key, 'cmd': 'get_activity'} - -ACTIVITY = requests.get('{}/api/v2'.format(configuration.tautulli_url), - params=PAYLOAD).json()['response']['data'] - -SESSIONS = {d['session_id']: d for d in ACTIVITY['sessions']} - -TAR_DBFILE = '{}/GeoLite2-City.tar.gz'.format(os.path.dirname(os.path.realpath(__file__))) - -DBFILE = '{}/GeoLite2-City.mmdb'.format(os.path.dirname(os.path.realpath(__file__))) - -NOW = time.time() - -DB_AGE = NOW - (86400 * 35) - -#remove the running db file if it is older than 35 days -try: - t = os.stat(DBFILE) - c = t.st_ctime - if c < DB_AGE: - os.remove(DBFILE) -except FileNotFoundError: - pass - - -def geo_lookup(ipaddress): - """Lookup an IP using the local GeoLite2 DB""" - if not os.path.isfile(DBFILE): - urllib.request.urlretrieve( - 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz', - TAR_DBFILE) - - tar = tarfile.open(TAR_DBFILE, "r:gz") - for files in tar.getmembers(): - if 'GeoLite2-City.mmdb' in files.name: - files.name = os.path.basename(files.name) - tar.extract(files, '{}/'.format(os.path.dirname(os.path.realpath(__file__)))) - - reader = geoip2.database.Reader(DBFILE) - - return reader.city(ipaddress) - - -INFLUX_PAYLOAD = [ - { - "measurement": "Tautulli", - "tags": { - "type": "stream_count" - }, - "time": CURRENT_TIME, - "fields": { - "current_streams": int(ACTIVITY['stream_count']), - "transcode_streams": int(ACTIVITY['stream_count_transcode']), - "direct_play_streams": int(ACTIVITY['stream_count_direct_play']), - "direct_streams": int(ACTIVITY['stream_count_direct_stream']) - } - } -] - -for session in SESSIONS.keys(): - try: - geodata = geo_lookup(SESSIONS[session]['ip_address_public']) - except (ValueError, geoip2.errors.AddressNotFoundError): - if configuration.tautulli_failback_ip: - geodata = geo_lookup(configuration.tautulli_failback_ip) - else: - geodata = geo_lookup(requests.get('http://ip.42.pl/raw').text) - - latitude = geodata.location.latitude - - if not geodata.location.latitude: - latitude = 37.234332396 - else: - latitude = geodata.location.latitude - - if not geodata.location.longitude: - longitude = -115.80666344 - else: - longitude = geodata.location.longitude - - decision = SESSIONS[session]['transcode_decision'] - - if decision == 'copy': - decision = 'direct stream' - - video_decision = SESSIONS[session]['stream_video_decision'] - - if video_decision == 'copy': - video_decision = 'direct stream' - - elif video_decision == '': - video_decision = 'Music' - - quality = SESSIONS[session]['stream_video_resolution'] - - - # If the video resolution is empty. Asssume it's an audio stream - # and use the container for music - if not quality: - quality = SESSIONS[session]['container'].upper() - - elif quality in ('SD', 'sd'): - quality = SESSIONS[session]['stream_video_resolution'].upper() - - elif quality in '4k': - quality = SESSIONS[session]['stream_video_resolution'].upper() - - else: - quality = '{}p'.format(SESSIONS[session]['stream_video_resolution']) - - - # Translate player_state to integers so we can colorize the table - player_state = SESSIONS[session]['state'].lower() - - if player_state == 'playing': - player_state = 0 - - elif player_state == 'paused': - player_state = 1 - - elif player_state == 'buffering': - player_state = 3 - - - INFLUX_PAYLOAD.append( - { - "measurement": "Tautulli", - "tags": { - "type": "Session", - "session_id": SESSIONS[session]['session_id'], - "name": SESSIONS[session]['friendly_name'], - "title": SESSIONS[session]['full_title'], - "platform": SESSIONS[session]['platform'], - "product_version": SESSIONS[session]['product_version'], - "quality": quality, - "video_decision": video_decision.title(), - "transcode_decision": decision.title(), - "media_type": SESSIONS[session]['media_type'].title(), - "audio_codec": SESSIONS[session]['audio_codec'].upper(), - "audio_profile": SESSIONS[session]['audio_profile'].upper(), - "stream_audio_codec": SESSIONS[session]['stream_audio_codec'].upper(), - "quality_profile": SESSIONS[session]['quality_profile'], - "progress_percent": SESSIONS[session]['progress_percent'], - "region_code": geodata.subdivisions.most_specific.iso_code, - "location": geodata.city.name, - "full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name, - geodata.city.name), - "latitude": latitude, - "longitude": longitude, - "player_state": player_state, - "device_type": SESSIONS[session]['platform'] - }, - "time": CURRENT_TIME, - "fields": { - "session_id": SESSIONS[session]['session_id'], - "session_key": SESSIONS[session]['session_key'] - } - } - ) - -INFLUX_SENDER = InfluxDBClient(configuration.influxdb_url, - configuration.influxdb_port, - configuration.influxdb_username, - configuration.influxdb_password, - configuration.tautulli_influxdb_db_name) - -INFLUX_SENDER.write_points(INFLUX_PAYLOAD) diff --git a/Varken/helpers.py b/Varken/helpers.py index 25519288..fea68b1c 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -1,4 +1,10 @@ +import os +import time +import tarfile +import geoip2.database from typing import NamedTuple +from os.path import abspath, join +from urllib.request import urlretrieve class TVShow(NamedTuple): @@ -35,17 +41,19 @@ class Queue(NamedTuple): protocol: str = None id: int = None + class SonarrServer(NamedTuple): id: int = None url: str = None api_key: str = None verify_ssl: bool = False missing_days: int = 0 - missing_days_run_minutes: int = 30 + missing_days_run_seconds: int = 30 future_days: int = 0 - future_days_run_minutes: int = 30 + future_days_run_seconds: int = 30 queue: bool = False - queue_run_minutes: int = 1 + queue_run_seconds: int = 1 + class Server(NamedTuple): id: int = None @@ -55,14 +63,238 @@ class Server(NamedTuple): class TautulliServer(NamedTuple): + id: int = None url: str = None fallback_ip: str = None apikey: str = None verify_ssl: bool = None - influx_db: str = None + get_activity: bool = False + get_activity_run_seconds: int = 30 + get_sessions: bool = False + get_sessions_run_seconds: int = 30 + class InfluxServer(NamedTuple): url: str = 'localhost' port: int = 8086 username: str = 'root' - password: str = 'root' \ No newline at end of file + password: str = 'root' + + +class TautulliStream(NamedTuple): + rating: str + transcode_width: str + labels: list + stream_bitrate: str + bandwidth: str + optimized_version: int + video_language: str + parent_rating_key: str + rating_key: str + platform_version: str + transcode_hw_decoding: int + thumb: str + title: str + video_codec_level: str + tagline: str + last_viewed_at: str + audio_sample_rate: str + user_rating: str + platform: str + collections: list + location: str + transcode_container: str + audio_channel_layout: str + local: str + stream_subtitle_format: str + stream_video_ref_frames: str + transcode_hw_encode_title: str + stream_container_decision: str + audience_rating: str + full_title: str + ip_address: str + subtitles: int + stream_subtitle_language: str + channel_stream: int + video_bitrate: str + is_allow_sync: int + stream_video_bitrate: str + summary: str + stream_audio_decision: str + aspect_ratio: str + audio_bitrate_mode: str + transcode_hw_decode_title: str + stream_audio_channel_layout: str + deleted_user: int + library_name: str + art: str + stream_video_resolution: str + video_profile: str + sort_title: str + stream_video_codec_level: str + stream_video_height: str + year: str + stream_duration: str + stream_audio_channels: str + video_language_code: str + transcode_key: str + transcode_throttled: int + container: str + stream_audio_bitrate: str + user: str + selected: int + product_version: str + subtitle_location: str + transcode_hw_requested: int + video_height: str + state: str + is_restricted: int + email: str + stream_container: str + transcode_speed: str + video_bit_depth: str + stream_audio_sample_rate: str + grandparent_title: str + studio: str + transcode_decision: str + video_width: str + bitrate: str + machine_id: str + originally_available_at: str + video_frame_rate: str + synced_version_profile: str + friendly_name: str + audio_profile: str + optimized_version_title: str + platform_name: str + stream_video_language: str + keep_history: int + stream_audio_codec: str + stream_video_codec: str + grandparent_thumb: str + synced_version: int + transcode_hw_decode: str + user_thumb: str + stream_video_width: str + height: str + stream_subtitle_decision: str + audio_codec: str + parent_title: str + guid: str + audio_language_code: str + transcode_video_codec: str + transcode_audio_codec: str + stream_video_decision: str + user_id: int + transcode_height: str + transcode_hw_full_pipeline: int + throttled: str + quality_profile: str + width: str + live: int + stream_subtitle_forced: int + media_type: str + video_resolution: str + stream_subtitle_location: str + do_notify: int + video_ref_frames: str + stream_subtitle_language_code: str + audio_channels: str + stream_audio_language_code: str + optimized_version_profile: str + relay: int + duration: str + rating_image: str + is_home_user: int + is_admin: int + ip_address_public: str + allow_guest: int + transcode_audio_channels: str + stream_audio_channel_layout_: str + media_index: str + stream_video_framerate: str + transcode_hw_encode: str + grandparent_rating_key: str + original_title: str + added_at: str + banner: str + bif_thumb: str + parent_media_index: str + live_uuid: str + audio_language: str + stream_audio_bitrate_mode: str + username: str + subtitle_decision: str + children_count: str + updated_at: str + player: str + subtitle_format: str + file: str + file_size: str + session_key: str + id: str + subtitle_container: str + genres: list + stream_video_language_code: str + indexes: int + video_decision: str + stream_audio_language: str + writers: list + actors: list + progress_percent: str + audio_decision: str + subtitle_forced: int + profile: str + product: str + view_offset: str + type: str + audience_rating_image: str + audio_bitrate: str + section_id: str + stream_subtitle_codec: str + subtitle_codec: str + video_codec: str + device: str + stream_video_bit_depth: str + video_framerate: str + transcode_hw_encoding: int + transcode_protocol: str + shared_libraries: list + stream_aspect_ratio: str + content_rating: str + session_id: str + directors: list + parent_thumb: str + subtitle_language_code: str + transcode_progress: int + subtitle_language: str + stream_subtitle_container: str + +def geoip_download(): + tar_dbfile = abspath(join('..', 'data', 'GeoLite2-City.tar.gz')) + url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz' + urlretrieve(url, tar_dbfile) + tar = tarfile.open(tar_dbfile, "r:gz") + for files in tar.getmembers(): + if 'GeoLite2-City.mmdb' in files.name: + files.name = os.path.basename(files.name) + tar.extract(files, '{}/'.format(os.path.dirname(os.path.realpath(__file__)))) + os.remove(tar_dbfile) + +def geo_lookup(ipaddress): + + dbfile = abspath(join('..', 'data', 'GeoLite2-City.mmdb')) + now = time.time() + + try: + dbinfo = os.stat(dbfile) + db_age = now - dbinfo.st_ctime + if db_age > (35 * 86400): + os.remove(dbfile) + geoip_download() + except FileNotFoundError: + geoip_download() + + reader = geoip2.database.Reader(dbfile) + + return reader.city(ipaddress) diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 6c6875f7..449d052e 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -20,7 +20,7 @@ def __init__(self): self.ombi_server = None self.tautulli_enabled = False - self.tautulli_server = None + self.tautulli_servers = [] self.asa_enabled = False self.asa = None @@ -67,9 +67,10 @@ def parse_opts(self): future_days_run_seconds = self.config.getint(sonarr_section, 'future_days_run_seconds') queue_run_seconds = self.config.getint(sonarr_section, 'queue_run_seconds') - self.sonarr_servers.append(SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days, - missing_days_run_seconds, future_days, - future_days_run_seconds, queue, queue_run_seconds)) + server = SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days, + missing_days_run_seconds, future_days, future_days_run_seconds, + queue, queue_run_seconds) + self.sonarr_servers.append(server) # Parse Radarr options try: @@ -79,6 +80,8 @@ def parse_opts(self): self.radarr_enabled = True except ValueError: self.radarr_enabled = True + + if self.sonarr_enabled: sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',') for server_id in sids: @@ -91,16 +94,32 @@ def parse_opts(self): self.radarr_servers.append(Server(server_id, scheme + url, apikey, verify_ssl)) # Parse Tautulli options - if self.config.getboolean('global', 'tautulli'): + try: + if not self.config.getboolean('global', 'tautulli_server_ids'): + sys.exit('tautulli_server_ids must be either false, or a comma-separated list of server ids') + elif self.config.getint('global', 'tautulli_server_ids'): + self.tautulli_enabled = True + except ValueError: self.tautulli_enabled = True - url = self.config.get('tautulli', 'url') - fallback_ip = self.config.get('tautulli', 'fallback_ip') - apikey = self.config.get('tautulli', 'apikey') - scheme = 'https://' if self.config.getboolean('tautulli', 'ssl') else 'http://' - verify_ssl = self.config.getboolean('tautulli', 'verify_ssl') - db_name = self.config.get('tautulli', 'influx_db') - - self.tautulli_server = TautulliServer(scheme + url, fallback_ip, apikey, verify_ssl, db_name) + + if self.tautulli_enabled: + sids = self.config.get('global', 'tautulli_server_ids').strip(' ').split(',') + + for server_id in sids: + tautulli_section = 'tautulli-' + server_id + url = self.config.get(tautulli_section, 'url') + fallback_ip = self.config.get(tautulli_section, 'fallback_ip') + apikey = self.config.get(tautulli_section, 'apikey') + scheme = 'https://' if self.config.getboolean(tautulli_section, 'ssl') else 'http://' + verify_ssl = self.config.getboolean(tautulli_section, 'verify_ssl') + get_activity = self.config.getboolean(tautulli_section, 'get_activity') + get_activity_run_seconds = self.config.getint(tautulli_section, 'get_activity_run_seconds') + get_sessions = self.config.getboolean(tautulli_section, 'get_sessions') + get_sessions_run_seconds = self.config.getint(tautulli_section, 'get_sessions_run_seconds') + + server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity, + get_activity_run_seconds, get_sessions, get_sessions_run_seconds) + self.tautulli_servers.append(server) # Parse Ombi Options if self.config.getboolean('global', 'ombi'): diff --git a/Varken/sonarr.py b/Varken/sonarr.py index c93a0673..65f5df01 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -9,13 +9,13 @@ class SonarrAPI(object): - def __init__(self, sonarr_servers, influx_server): + def __init__(self, servers, influx_server): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() self.today = str(date.today()) self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, influx_server.password, 'plex') - self.servers = sonarr_servers + self.servers = servers # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = requests.Session() self.session.params = {'pageSize': 1000} diff --git a/Varken/tautulli.py b/Varken/tautulli.py new file mode 100644 index 00000000..6912c902 --- /dev/null +++ b/Varken/tautulli.py @@ -0,0 +1,146 @@ +from datetime import datetime, timezone +from geoip2.errors import AddressNotFoundError +from influxdb import InfluxDBClient +import requests +from Varken.helpers import TautulliStream, geo_lookup +from Varken.logger import logging + +class TautulliAPI(object): + def __init__(self, servers, influx_server): + # Set Time of initialization + self.now = datetime.now(timezone.utc).astimezone().isoformat() + self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, + influx_server.password, 'plex') + self.servers = servers + self.session = requests.Session() + self.endpoint = '/api/v2' + + def influx_push(self, payload): + # TODO: error handling for failed connection + self.influx.write_points(payload) + + @logging + def get_activity(self, notimplemented): + params = {'cmd': 'get_activity'} + influx_payload = [] + + for server in self.servers: + params['apikey'] = server.apikey + g = self.session.get(server.url + self.endpoint, params=params, verify=server.verify_ssl) + get = g.json()['response']['data'] + + influx_payload.append( + { + "measurement": "Tautulli", + "tags": { + "type": "current_stream_stats", + "server": server.id + }, + "time": self.now, + "fields": { + "stream_count": int(get['stream_count']), + "total_bandwidth": int(get['total_bandwidth']), + "wan_bandwidth": int(get['wan_bandwidth']), + "lan_bandwidth": int(get['lan_bandwidth']), + "transcode_streams": int(get['stream_count_transcode']), + "direct_play_streams": int(get['stream_count_direct_play']), + "direct_streams": int(get['stream_count_direct_stream']) + } + } + ) + + self.influx_push(influx_payload) + + @logging + def get_sessions(self, notimplemented): + params = {'cmd': 'get_activity'} + influx_payload = [] + + for server in self.servers: + params['apikey'] = server.apikey + g = self.session.get(server.url + self.endpoint, params=params, verify=server.verify_ssl) + get = g.json()['response']['data']['sessions'] + print(get) + sessions = [TautulliStream(**session) for session in get] + + for session in sessions: + try: + geodata = geo_lookup(session.ip_address_public) + except (ValueError, AddressNotFoundError): + if server.fallback_ip: + geodata = geo_lookup(server.fallback_ip) + else: + my_ip = requests.get('http://ip.42.pl/raw').text + geodata = geo_lookup(my_ip) + + if not all([geodata.location.latitude, geodata.location.longitude]): + latitude = 37.234332396 + longitude = -115.80666344 + else: + latitude = geodata.location.latitude + longitude = geodata.location.longitude + + decision = session.transcode_decision + if decision == 'copy': + decision = 'direct stream' + + video_decision = session.stream_video_decision + if video_decision == 'copy': + video_decision = 'direct stream' + elif video_decision == '': + video_decision = 'Music' + + quality = session.stream_video_resolution + if not quality: + quality = session.container.upper() + elif quality in ('SD', 'sd', '4k'): + quality = session.stream_video_resolution.upper() + else: + quality = session.stream_video_resolution + 'p' + + player_state = session.state.lower() + if player_state == 'playing': + player_state = 0 + elif player_state == 'paused': + player_state = 1 + elif player_state == 'buffering': + player_state = 3 + + influx_payload.append( + { + "measurement": "Tautulli", + "tags": { + "type": "Session", + "session_id": session.session_id, + "name": session.friendly_name, + "title": session.full_title, + "platform": session.platform, + "product_version": session.product_version, + "quality": quality, + "video_decision": video_decision.title(), + "transcode_decision": decision.title(), + "media_type": session.media_type.title(), + "audio_codec": session.audio_codec.upper(), + "audio_profile": session.audio_profile.upper(), + "stream_audio_codec": session.stream_audio_codec.upper(), + "quality_profile": session.quality_profile, + "progress_percent": session.progress_percent, + "region_code": geodata.subdivisions.most_specific.iso_code, + "location": geodata.city.name, + "full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name, + geodata.city.name), + "latitude": latitude, + "longitude": longitude, + "player_state": player_state, + "device_type": session.platform, + "server": server.id + }, + "time": self.now, + "fields": { + "session_id": session.session_id, + "session_key": session.session_key + } + } + ) + + self.influx_push(influx_payload) diff --git a/Varken/varken.py b/Varken/varken.py index c6e93499..8be5b2c8 100644 --- a/Varken/varken.py +++ b/Varken/varken.py @@ -4,6 +4,7 @@ from Varken.iniparser import INIParser from Varken.sonarr import SonarrAPI +from Varken.tautulli import TautulliAPI def threaded(job, days=None): @@ -27,6 +28,15 @@ def threaded(job, days=None): schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future, server.future_days) + if CONFIG.tautulli_enabled: + TAUTULLI = TautulliAPI(CONFIG.tautulli_servers, CONFIG.influx_server) + + for server in CONFIG.tautulli_servers: + if server.get_activity: + schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity) + if server.get_sessions: + schedule.every(server.get_sessions_run_seconds).seconds.do(threaded, TAUTULLI.get_sessions) + while True: schedule.run_pending() sleep(1) diff --git a/varken.example.ini b/varken.example.ini index 10b7c703..aaa3bb63 100644 --- a/varken.example.ini +++ b/varken.example.ini @@ -8,8 +8,8 @@ [global] sonarr_server_ids = 1,2 radarr_server_ids = 1,2 +tautulli_server_ids = 1 ombi = true -tautulli = true asa = false [influxdb] @@ -18,6 +18,17 @@ port = 8086 username = root password = root +[tautulli-1] +url = tautulli.domain.tld +fallback_ip = 0.0.0.0 +apikey = xxxxxxxxxxxxxxxx +ssl = false +verify_ssl = true +get_activity = true +get_activity_run_seconds = 30 +get_sessions = true +get_sessions_run_seconds = 30 + [sonarr-1] url = sonarr1.domain.tld apikey = xxxxxxxxxxxxxxxx @@ -60,12 +71,6 @@ apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true -[tautulli] -url = tautulli.domain.tld -fallback_ip = 0.0.0.0 -apikey = xxxxxxxxxxxxxxxx -ssl = false -verify_ssl = true [asa] url = firewall.domain.tld From 95fbb3319a69bc6d7a1cafdd6727b955d1422b29 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:11:46 -0600 Subject: [PATCH 25/90] fixed time --- Varken/sonarr.py | 3 +++ Varken/tautulli.py | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 65f5df01..79cab8a1 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -24,6 +24,7 @@ def __init__(self, servers, influx_server): def get_missing(self, days_past): endpoint = '/api/calendar' last_days = str(date.today() + timedelta(days=-days_past)) + self.now = datetime.now(timezone.utc).astimezone().isoformat() params = {'start': last_days, 'end': self.today} influx_payload = [] @@ -66,6 +67,7 @@ def get_missing(self, days_past): @logging def get_future(self, future_days): endpoint = '/api/calendar/' + self.now = datetime.now(timezone.utc).astimezone().isoformat() future = str(date.today() + timedelta(days=future_days)) influx_payload = [] @@ -109,6 +111,7 @@ def get_future(self, future_days): def get_queue(self, notimplemented): influx_payload = [] endpoint = '/api/queue' + self.now = datetime.now(timezone.utc).astimezone().isoformat() for server in self.servers: queue = [] diff --git a/Varken/tautulli.py b/Varken/tautulli.py index 6912c902..da01739a 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -5,12 +5,13 @@ from Varken.helpers import TautulliStream, geo_lookup from Varken.logger import logging + class TautulliAPI(object): def __init__(self, servers, influx_server): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, - influx_server.password, 'plex') + influx_server.password, 'plex2') self.servers = servers self.session = requests.Session() self.endpoint = '/api/v2' @@ -21,6 +22,7 @@ def influx_push(self, payload): @logging def get_activity(self, notimplemented): + self.now = datetime.now(timezone.utc).astimezone().isoformat() params = {'cmd': 'get_activity'} influx_payload = [] @@ -53,6 +55,7 @@ def get_activity(self, notimplemented): @logging def get_sessions(self, notimplemented): + self.now = datetime.now(timezone.utc).astimezone().isoformat() params = {'cmd': 'get_activity'} influx_payload = [] @@ -60,7 +63,6 @@ def get_sessions(self, notimplemented): params['apikey'] = server.apikey g = self.session.get(server.url + self.endpoint, params=params, verify=server.verify_ssl) get = g.json()['response']['data']['sessions'] - print(get) sessions = [TautulliStream(**session) for session in get] for session in sessions: From f46de051317a7c518e56356bbfdfdd1fc3b581c4 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:28:50 -0600 Subject: [PATCH 26/90] deleted extras and fixed downloaded --- .gitignore | 1 + Varken/helpers.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 185ff03f..50ca5a6c 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ GeoLite2-City.tar.gz .idea/ .idea/* varken.ini +data/ diff --git a/Varken/helpers.py b/Varken/helpers.py index fea68b1c..d6554748 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -278,7 +278,7 @@ def geoip_download(): for files in tar.getmembers(): if 'GeoLite2-City.mmdb' in files.name: files.name = os.path.basename(files.name) - tar.extract(files, '{}/'.format(os.path.dirname(os.path.realpath(__file__)))) + tar.extract(files, abspath(join('..', 'data'))) os.remove(tar_dbfile) def geo_lookup(ipaddress): From c93a526bc89c2ea52fd0ac9032432c1eef39b054 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:46:56 -0600 Subject: [PATCH 27/90] moved files --- .gitignore | 9 ++-- Legacy/cisco_asa.py | 2 +- Legacy/ombi.py | 2 +- Legacy/radarr.py | 2 +- Varken/iniparser.py | 4 +- varken.example.ini | 81 ----------------------------------- Varken/varken.py => varken.py | 0 7 files changed, 9 insertions(+), 91 deletions(-) delete mode 100644 varken.example.ini rename Varken/varken.py => varken.py (100%) diff --git a/.gitignore b/.gitignore index 50ca5a6c..a7e3e027 100644 --- a/.gitignore +++ b/.gitignore @@ -5,12 +5,11 @@ .Trashes ehthumbs.db Thumbs.db -Varken/configuration.py -Varken/config.ini __pycache__ GeoLite2-City.mmdb GeoLite2-City.tar.gz +data/varken.ini +data/GeoLite2-City.mmdb +data/GeoLite2-City.tar.gz .idea/ -.idea/* -varken.ini -data/ +Legacy/configuration.py diff --git a/Legacy/cisco_asa.py b/Legacy/cisco_asa.py index bb44c93a..33ebe440 100644 --- a/Legacy/cisco_asa.py +++ b/Legacy/cisco_asa.py @@ -3,7 +3,7 @@ from datetime import datetime, timezone from influxdb import InfluxDBClient -from Varken import configuration +from Legacy import configuration current_time = datetime.now(timezone.utc).astimezone().isoformat() diff --git a/Legacy/ombi.py b/Legacy/ombi.py index 898b8c8b..bcea45fb 100644 --- a/Legacy/ombi.py +++ b/Legacy/ombi.py @@ -5,7 +5,7 @@ from influxdb import InfluxDBClient import argparse from argparse import RawTextHelpFormatter -from Varken import configuration +from Legacy import configuration headers = {'Apikey': configuration.ombi_api_key} diff --git a/Legacy/radarr.py b/Legacy/radarr.py index 25ddff91..eea34996 100644 --- a/Legacy/radarr.py +++ b/Legacy/radarr.py @@ -5,7 +5,7 @@ from influxdb import InfluxDBClient import argparse from argparse import RawTextHelpFormatter -from Varken import configuration +from Legacy import configuration def now_iso(): diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 449d052e..ec7f82b3 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -1,6 +1,6 @@ import sys import configparser -from os.path import abspath, dirname, join +from os.path import abspath, join from Varken.helpers import Server, TautulliServer, SonarrServer, InfluxServer @@ -28,7 +28,7 @@ def __init__(self): self.parse_opts() def read_file(self): - file_path = abspath(join(dirname(__file__), '..', 'varken.ini')) + file_path = abspath(join('..', 'data', 'varken.ini')) with open(file_path) as config_ini: self.config.read_file(config_ini) diff --git a/varken.example.ini b/varken.example.ini deleted file mode 100644 index aaa3bb63..00000000 --- a/varken.example.ini +++ /dev/null @@ -1,81 +0,0 @@ -# Notes: -# - Sonarr + Radarr scripts support multiple servers. You can remove the second -# server by putting a # in front of the lines and section name, and removing -# that number from your server_ids list -# - fallback_ip, This is used when there is no IP listed in tautulli. -# This can happen when you are streaming locally. This is usually your public IP. - -[global] -sonarr_server_ids = 1,2 -radarr_server_ids = 1,2 -tautulli_server_ids = 1 -ombi = true -asa = false - -[influxdb] -url = influxdb.domain.tld -port = 8086 -username = root -password = root - -[tautulli-1] -url = tautulli.domain.tld -fallback_ip = 0.0.0.0 -apikey = xxxxxxxxxxxxxxxx -ssl = false -verify_ssl = true -get_activity = true -get_activity_run_seconds = 30 -get_sessions = true -get_sessions_run_seconds = 30 - -[sonarr-1] -url = sonarr1.domain.tld -apikey = xxxxxxxxxxxxxxxx -ssl = false -verify_ssl = true -missing_days = 7 -missing_days_run_seconds = 300 -future_days = 1 -future_days_run_seconds = 300 -queue = true -queue_run_seconds = 300 - -[sonarr-2] -url = sonarr2.domain.tld -apikey = yyyyyyyyyyyyyyyy -ssl = false -verify_ssl = true -missing_days = 7 -missing_days_run_seconds = 300 -future_days = 1 -future_days_run_seconds = 300 -queue = true -queue_run_seconds = 300 - -[radarr-1] -url = radarr1.domain.tld -apikey = xxxxxxxxxxxxxxxx -ssl = false -verify_ssl = true - -[radarr-2] -url = radarr2.domain.tld -apikey = yyyyyyyyyyyyyyyy -ssl = false -verify_ssl = true - -[ombi] -url = ombi.domain.tld -apikey = xxxxxxxxxxxxxxxx -ssl = false -verify_ssl = true - - -[asa] -url = firewall.domain.tld -username = cisco -password = cisco -influx_db = asa -ssl = false -verify_ssl = true \ No newline at end of file diff --git a/Varken/varken.py b/varken.py similarity index 100% rename from Varken/varken.py rename to varken.py From c8ceb52b60b82530355c2f8343fa0b59fd33707c Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:47:31 -0600 Subject: [PATCH 28/90] moved files --- data/varken.example.ini | 81 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 data/varken.example.ini diff --git a/data/varken.example.ini b/data/varken.example.ini new file mode 100644 index 00000000..aaa3bb63 --- /dev/null +++ b/data/varken.example.ini @@ -0,0 +1,81 @@ +# Notes: +# - Sonarr + Radarr scripts support multiple servers. You can remove the second +# server by putting a # in front of the lines and section name, and removing +# that number from your server_ids list +# - fallback_ip, This is used when there is no IP listed in tautulli. +# This can happen when you are streaming locally. This is usually your public IP. + +[global] +sonarr_server_ids = 1,2 +radarr_server_ids = 1,2 +tautulli_server_ids = 1 +ombi = true +asa = false + +[influxdb] +url = influxdb.domain.tld +port = 8086 +username = root +password = root + +[tautulli-1] +url = tautulli.domain.tld +fallback_ip = 0.0.0.0 +apikey = xxxxxxxxxxxxxxxx +ssl = false +verify_ssl = true +get_activity = true +get_activity_run_seconds = 30 +get_sessions = true +get_sessions_run_seconds = 30 + +[sonarr-1] +url = sonarr1.domain.tld +apikey = xxxxxxxxxxxxxxxx +ssl = false +verify_ssl = true +missing_days = 7 +missing_days_run_seconds = 300 +future_days = 1 +future_days_run_seconds = 300 +queue = true +queue_run_seconds = 300 + +[sonarr-2] +url = sonarr2.domain.tld +apikey = yyyyyyyyyyyyyyyy +ssl = false +verify_ssl = true +missing_days = 7 +missing_days_run_seconds = 300 +future_days = 1 +future_days_run_seconds = 300 +queue = true +queue_run_seconds = 300 + +[radarr-1] +url = radarr1.domain.tld +apikey = xxxxxxxxxxxxxxxx +ssl = false +verify_ssl = true + +[radarr-2] +url = radarr2.domain.tld +apikey = yyyyyyyyyyyyyyyy +ssl = false +verify_ssl = true + +[ombi] +url = ombi.domain.tld +apikey = xxxxxxxxxxxxxxxx +ssl = false +verify_ssl = true + + +[asa] +url = firewall.domain.tld +username = cisco +password = cisco +influx_db = asa +ssl = false +verify_ssl = true \ No newline at end of file From b4ed9fb7757ad6e67e1fb0f1a739d27451621275 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:47:37 -0600 Subject: [PATCH 29/90] moved files --- Legacy/configuration.py | 49 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 Legacy/configuration.py diff --git a/Legacy/configuration.py b/Legacy/configuration.py new file mode 100644 index 00000000..a0df50a2 --- /dev/null +++ b/Legacy/configuration.py @@ -0,0 +1,49 @@ +''' +Notes: + - Domains should be either http(s)://subdomain.domain.com or http(s)://domain.com/url_suffix + + - Sonarr + Radarr scripts support multiple servers. You can remove the second + server by putting a # in front of the line. + + - tautulli_failback_ip, This is used when there is no IP listed in tautulli. + This can happen when you are streaming locally. This is usually your public IP. +''' + +########################### INFLUXDB CONFIG ########################### +influxdb_url = 'influxdb.domain.tld' +influxdb_port = 8086 +influxdb_username = '' +influxdb_password = '' + +############################ SONARR CONFIG ############################ +sonarr_server_list = [ + ('https://sonarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'), + ('https://sonarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'), + #('https://sonarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3') + ] +sonarr_influxdb_db_name = 'plex' + +############################ RADARR CONFIG ############################ +radarr_server_list = [ + ('https://radarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'), + ('https://radarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'), + #('https://radarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3') + ] +radarr_influxdb_db_name = 'plex' + +############################ OMBI CONFIG ############################## +ombi_url = 'https://ombi.domain.tld' +ombi_api_key = 'xxxxxxxxxxxxxxx' +ombi_influxdb_db_name = 'plex' + +########################## TAUTULLI CONFIG ############################ +tautulli_url = 'https://tautulli.domain.tld' +tautulli_api_key = 'xxxxxxxxxxxxxxx' +tautulli_failback_ip = '' +tautulli_influxdb_db_name = 'plex' + +########################## FIREWALL CONFIG ############################ +asa_url = 'https://firewall.domain.tld' +asa_username = 'cisco' +asa_password = 'cisco' +asa_influxdb_db_name = 'asa' From c201d1e02fb2ba26617d3e12b81c1c25367eeaba Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:51:28 -0600 Subject: [PATCH 30/90] moved files --- Varken/helpers.py | 2 +- Varken/iniparser.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Varken/helpers.py b/Varken/helpers.py index d6554748..9ffbf5a8 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -278,7 +278,7 @@ def geoip_download(): for files in tar.getmembers(): if 'GeoLite2-City.mmdb' in files.name: files.name = os.path.basename(files.name) - tar.extract(files, abspath(join('..', 'data'))) + tar.extract(files, abspath(join('.', 'data'))) os.remove(tar_dbfile) def geo_lookup(ipaddress): diff --git a/Varken/iniparser.py b/Varken/iniparser.py index ec7f82b3..7793161e 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -28,7 +28,7 @@ def __init__(self): self.parse_opts() def read_file(self): - file_path = abspath(join('..', 'data', 'varken.ini')) + file_path = abspath(join('.', 'data', 'varken.ini')) with open(file_path) as config_ini: self.config.read_file(config_ini) From 6c51399c5bd36ba4fd9ab7652abd7c433c56b784 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:53:03 -0600 Subject: [PATCH 31/90] forgot to delete period --- Varken/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Varken/helpers.py b/Varken/helpers.py index 9ffbf5a8..5939032b 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -271,7 +271,7 @@ class TautulliStream(NamedTuple): stream_subtitle_container: str def geoip_download(): - tar_dbfile = abspath(join('..', 'data', 'GeoLite2-City.tar.gz')) + tar_dbfile = abspath(join('.', 'data', 'GeoLite2-City.tar.gz')) url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz' urlretrieve(url, tar_dbfile) tar = tarfile.open(tar_dbfile, "r:gz") From 48f458c97226286ad7d0712cc21592c6a6da8a0a Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 17:54:47 -0600 Subject: [PATCH 32/90] forgot to delete period again --- Varken/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Varken/helpers.py b/Varken/helpers.py index 5939032b..f84689b6 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -283,7 +283,7 @@ def geoip_download(): def geo_lookup(ipaddress): - dbfile = abspath(join('..', 'data', 'GeoLite2-City.mmdb')) + dbfile = abspath(join('.', 'data', 'GeoLite2-City.mmdb')) now = time.time() try: From 4c439bb6c2bfa2d974aa7d099467c9940e29f748 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 20:33:33 -0600 Subject: [PATCH 33/90] migrated radarr --- .gitignore | 2 - Legacy/radarr.py | 171 ---------------- Varken/helpers.py | 426 ++++++++++++++++++++++------------------ Varken/iniparser.py | 14 +- Varken/radarr.py | 106 ++++++++++ Varken/sonarr.py | 2 - data/varken.example.ini | 5 + varken.py | 12 ++ 8 files changed, 372 insertions(+), 366 deletions(-) delete mode 100644 Legacy/radarr.py create mode 100644 Varken/radarr.py diff --git a/.gitignore b/.gitignore index a7e3e027..f238ac14 100644 --- a/.gitignore +++ b/.gitignore @@ -9,7 +9,5 @@ __pycache__ GeoLite2-City.mmdb GeoLite2-City.tar.gz data/varken.ini -data/GeoLite2-City.mmdb -data/GeoLite2-City.tar.gz .idea/ Legacy/configuration.py diff --git a/Legacy/radarr.py b/Legacy/radarr.py deleted file mode 100644 index eea34996..00000000 --- a/Legacy/radarr.py +++ /dev/null @@ -1,171 +0,0 @@ -# Do not edit this script. Edit configuration.py -import sys -import requests -from datetime import datetime, timezone -from influxdb import InfluxDBClient -import argparse -from argparse import RawTextHelpFormatter -from Legacy import configuration - - -def now_iso(): - now_iso = datetime.now(timezone.utc).astimezone().isoformat() - return now_iso - - -def influx_sender(influx_payload): - influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username, - configuration.influxdb_password, configuration.radarr_influxdb_db_name) - influx.write_points(influx_payload) - - -def get_missing_movies(): - # Set the time here so we have one timestamp to work with - now = now_iso() - missing = [] - influx_payload = [] - - for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list: - headers = {'X-Api-Key': radarr_api_key} - get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json() - movies = {d['tmdbId']: d for d in get_movies} - - for movie in movies.keys(): - if not movies[movie]['downloaded']: - movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year'])) - missing.append((movie_name, movies[movie]['tmdbId'])) - - for movie, id in missing: - influx_payload.append( - { - "measurement": "Radarr", - "tags": { - "type": "Missing", - "tmdbId": id, - "server": server_id - }, - "time": now, - "fields": { - "name": movie - } - } - ) - # Empty missing or else things get foo bared - missing = [] - - return influx_payload - - -def get_missing_avl(): - # Set the time here so we have one timestamp to work with - now = now_iso() - missing = [] - influx_payload = [] - - for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list: - headers = {'X-Api-Key': radarr_api_key} - get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json() - movies = {d['tmdbId']: d for d in get_movies} - - for movie in movies.keys(): - if not movies[movie]['downloaded']: - if movies[movie]['isAvailable'] is True: - movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year'])) - missing.append((movie_name, movies[movie]['tmdbId'])) - - - for movie, id in missing: - influx_payload.append( - { - "measurement": "Radarr", - "tags": { - "type": "Missing_Available", - "tmdbId": id, - "server": server_id - }, - "time": now, - "fields": { - "name": movie, - } - } - ) - # Empty missing or else things get foo bared - missing = [] - - return influx_payload - - -def get_queue_movies(): - # Set the time here so we have one timestamp to work with - now = now_iso() - influx_payload = [] - queue = [] - - for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list: - headers = {'X-Api-Key': radarr_api_key} - get_movies = requests.get('{}/api/queue'.format(radarr_url), headers=headers).json() - queue_movies = {d['id']: d for d in get_movies} - - for movie in queue_movies.keys(): - name = '{} ({})'.format(queue_movies[movie]['movie']['title'], queue_movies[movie]['movie']['year']) - quality = (queue_movies[movie]['quality']['quality']['name']) - protocol = (queue_movies[movie]['protocol'].upper()) - - if protocol == 'USENET': - protocol_id = 1 - else: - protocol_id = 0 - - queue.append((name, queue_movies[movie]['id'])) - - for movie, id in queue: - influx_payload.append( - { - "measurement": "Radarr", - "tags": { - "type": "Queue", - "tmdbId": id, - "server": server_id - }, - "time": now, - "fields": { - "name": movie, - "quality": quality, - "protocol": protocol, - "protocol_id": protocol_id - } - } - ) - # Empty queue or else things get foo bared - queue = [] - - return influx_payload - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(prog='Radarr stats operations', - description='Script to aid in data gathering from Radarr', formatter_class=RawTextHelpFormatter) - - parser.add_argument("--missing", action='store_true', - help='Get missing movies') - - parser.add_argument("--missing_avl", action='store_true', - help='Get missing yet available movies') - - parser.add_argument("--queue", action='store_true', - help='Get movies in queue') - - opts = parser.parse_args() - - if opts.missing: - influx_sender(get_missing_movies()) - - elif opts.missing_avl: - influx_sender(get_missing_avl()) - - elif opts.queue: - influx_sender(get_queue_movies()) - - elif len(sys.argv) == 1: - parser.print_help(sys.stderr) - sys.exit(1) diff --git a/Varken/helpers.py b/Varken/helpers.py index f84689b6..434f2eeb 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -25,7 +25,48 @@ class TVShow(NamedTuple): id: int = None +class Movie(NamedTuple): + title: str = None + alternativeTitles: list = None + secondaryYearSourceId: int = None + sortTitle: str = None + sizeOnDisk: int = None + status: str = None + overview: str = None + inCinemas: str = None + images: list = None + downloaded: bool = None + year: int = None + secondaryYear: str = None + hasFile: bool = None + youTubeTrailerId: str = None + studio: str = None + path: str = None + profileId: int = None + pathState: str = None + monitored: bool = None + minimumAvailability: str = None + isAvailable: bool = None + folderName: str = None + runtime: int = None + lastInfoSync: str = None + cleanTitle: str = None + imdbId: str = None + tmdbId: int = None + titleSlug: str = None + genres: list = None + tags: list = None + added: str = None + ratings: dict = None + movieFile: dict = None + qualityProfileId: int = None + physicalRelease: str = None + physicalReleaseNote: str = None + website: str = None + id: int = None + class Queue(NamedTuple): + movie: dict = None series: dict = None episode: dict = None quality: dict = None @@ -54,6 +95,15 @@ class SonarrServer(NamedTuple): queue: bool = False queue_run_seconds: int = 1 +class RadarrServer(NamedTuple): + id: int = None + url: str = None + api_key: str = None + verify_ssl: bool = False + queue: bool = False + queue_run_seconds: int = 1 + get_missing: bool = False + get_missing_run_seconds: int = 30 class Server(NamedTuple): id: int = None @@ -82,193 +132,195 @@ class InfluxServer(NamedTuple): class TautulliStream(NamedTuple): - rating: str - transcode_width: str - labels: list - stream_bitrate: str - bandwidth: str - optimized_version: int - video_language: str - parent_rating_key: str - rating_key: str - platform_version: str - transcode_hw_decoding: int - thumb: str - title: str - video_codec_level: str - tagline: str - last_viewed_at: str - audio_sample_rate: str - user_rating: str - platform: str - collections: list - location: str - transcode_container: str - audio_channel_layout: str - local: str - stream_subtitle_format: str - stream_video_ref_frames: str - transcode_hw_encode_title: str - stream_container_decision: str - audience_rating: str - full_title: str - ip_address: str - subtitles: int - stream_subtitle_language: str - channel_stream: int - video_bitrate: str - is_allow_sync: int - stream_video_bitrate: str - summary: str - stream_audio_decision: str - aspect_ratio: str - audio_bitrate_mode: str - transcode_hw_decode_title: str - stream_audio_channel_layout: str - deleted_user: int - library_name: str - art: str - stream_video_resolution: str - video_profile: str - sort_title: str - stream_video_codec_level: str - stream_video_height: str - year: str - stream_duration: str - stream_audio_channels: str - video_language_code: str - transcode_key: str - transcode_throttled: int - container: str - stream_audio_bitrate: str - user: str - selected: int - product_version: str - subtitle_location: str - transcode_hw_requested: int - video_height: str - state: str - is_restricted: int - email: str - stream_container: str - transcode_speed: str - video_bit_depth: str - stream_audio_sample_rate: str - grandparent_title: str - studio: str - transcode_decision: str - video_width: str - bitrate: str - machine_id: str - originally_available_at: str - video_frame_rate: str - synced_version_profile: str - friendly_name: str - audio_profile: str - optimized_version_title: str - platform_name: str - stream_video_language: str - keep_history: int - stream_audio_codec: str - stream_video_codec: str - grandparent_thumb: str - synced_version: int - transcode_hw_decode: str - user_thumb: str - stream_video_width: str - height: str - stream_subtitle_decision: str - audio_codec: str - parent_title: str - guid: str - audio_language_code: str - transcode_video_codec: str - transcode_audio_codec: str - stream_video_decision: str - user_id: int - transcode_height: str - transcode_hw_full_pipeline: int - throttled: str - quality_profile: str - width: str - live: int - stream_subtitle_forced: int - media_type: str - video_resolution: str - stream_subtitle_location: str - do_notify: int - video_ref_frames: str - stream_subtitle_language_code: str - audio_channels: str - stream_audio_language_code: str - optimized_version_profile: str - relay: int - duration: str - rating_image: str - is_home_user: int - is_admin: int - ip_address_public: str - allow_guest: int - transcode_audio_channels: str - stream_audio_channel_layout_: str - media_index: str - stream_video_framerate: str - transcode_hw_encode: str - grandparent_rating_key: str - original_title: str - added_at: str - banner: str - bif_thumb: str - parent_media_index: str - live_uuid: str - audio_language: str - stream_audio_bitrate_mode: str - username: str - subtitle_decision: str - children_count: str - updated_at: str - player: str - subtitle_format: str - file: str - file_size: str - session_key: str - id: str - subtitle_container: str - genres: list - stream_video_language_code: str - indexes: int - video_decision: str - stream_audio_language: str - writers: list - actors: list - progress_percent: str - audio_decision: str - subtitle_forced: int - profile: str - product: str - view_offset: str - type: str - audience_rating_image: str - audio_bitrate: str - section_id: str - stream_subtitle_codec: str - subtitle_codec: str - video_codec: str - device: str - stream_video_bit_depth: str - video_framerate: str - transcode_hw_encoding: int - transcode_protocol: str - shared_libraries: list - stream_aspect_ratio: str - content_rating: str - session_id: str - directors: list - parent_thumb: str - subtitle_language_code: str - transcode_progress: int - subtitle_language: str - stream_subtitle_container: str + rating: str = None + transcode_width: str = None + labels: list = None + stream_bitrate: str = None + bandwidth: str = None + optimized_version: int = None + video_language: str = None + parent_rating_key: str = None + rating_key: str = None + platform_version: str = None + transcode_hw_decoding: int = None + thumb: str = None + title: str = None + video_codec_level: str = None + tagline: str = None + last_viewed_at: str = None + audio_sample_rate: str = None + user_rating: str = None + platform: str = None + collections: list = None + location: str = None + transcode_container: str = None + audio_channel_layout: str = None + local: str = None + stream_subtitle_format: str = None + stream_video_ref_frames: str = None + transcode_hw_encode_title: str = None + stream_container_decision: str = None + audience_rating: str = None + full_title: str = None + ip_address: str = None + subtitles: int = None + stream_subtitle_language: str = None + channel_stream: int = None + video_bitrate: str = None + is_allow_sync: int = None + stream_video_bitrate: str = None + summary: str = None + stream_audio_decision: str = None + aspect_ratio: str = None + audio_bitrate_mode: str = None + transcode_hw_decode_title: str = None + stream_audio_channel_layout: str = None + deleted_user: int = None + library_name: str = None + art: str = None + stream_video_resolution: str = None + video_profile: str = None + sort_title: str = None + stream_video_codec_level: str = None + stream_video_height: str = None + year: str = None + stream_duration: str = None + stream_audio_channels: str = None + video_language_code: str = None + transcode_key: str = None + transcode_throttled: int = None + container: str = None + stream_audio_bitrate: str = None + user: str = None + selected: int = None + product_version: str = None + subtitle_location: str = None + transcode_hw_requested: int = None + video_height: str = None + state: str = None + is_restricted: int = None + email: str = None + stream_container: str = None + transcode_speed: str = None + video_bit_depth: str = None + stream_audio_sample_rate: str = None + grandparent_title: str = None + studio: str = None + transcode_decision: str = None + video_width: str = None + bitrate: str = None + machine_id: str = None + originally_available_at: str = None + video_frame_rate: str = None + synced_version_profile: str = None + friendly_name: str = None + audio_profile: str = None + optimized_version_title: str = None + platform_name: str = None + stream_video_language: str = None + keep_history: int = None + stream_audio_codec: str = None + stream_video_codec: str = None + grandparent_thumb: str = None + synced_version: int = None + transcode_hw_decode: str = None + user_thumb: str = None + stream_video_width: str = None + height: str = None + stream_subtitle_decision: str = None + audio_codec: str = None + parent_title: str = None + guid: str = None + audio_language_code: str = None + transcode_video_codec: str = None + transcode_audio_codec: str = None + stream_video_decision: str = None + user_id: int = None + transcode_height: str = None + transcode_hw_full_pipeline: int = None + throttled: str = None + quality_profile: str = None + width: str = None + live: int = None + stream_subtitle_forced: int = None + media_type: str = None + video_resolution: str = None + stream_subtitle_location: str = None + do_notify: int = None + video_ref_frames: str = None + stream_subtitle_language_code: str = None + audio_channels: str = None + stream_audio_language_code: str = None + optimized_version_profile: str = None + relay: int = None + duration: str = None + rating_image: str = None + is_home_user: int = None + is_admin: int = None + ip_address_public: str = None + allow_guest: int = None + transcode_audio_channels: str = None + stream_audio_channel_layout_: str = None + media_index: str = None + stream_video_framerate: str = None + transcode_hw_encode: str = None + grandparent_rating_key: str = None + original_title: str = None + added_at: str = None + banner: str = None + bif_thumb: str = None + parent_media_index: str = None + live_uuid: str = None + audio_language: str = None + stream_audio_bitrate_mode: str = None + username: str = None + subtitle_decision: str = None + children_count: str = None + updated_at: str = None + player: str = None + subtitle_format: str = None + file: str = None + file_size: str = None + session_key: str = None + id: str = None + subtitle_container: str = None + genres: list = None + stream_video_language_code: str = None + indexes: int = None + video_decision: str = None + stream_audio_language: str = None + writers: list = None + actors: list = None + progress_percent: str = None + audio_decision: str = None + subtitle_forced: int = None + profile: str = None + product: str = None + view_offset: str = None + type: str = None + audience_rating_image: str = None + audio_bitrate: str = None + section_id: str = None + stream_subtitle_codec: str = None + subtitle_codec: str = None + video_codec: str = None + device: str = None + stream_video_bit_depth: str = None + video_framerate: str = None + transcode_hw_encoding: int = None + transcode_protocol: str = None + shared_libraries: list = None + stream_aspect_ratio: str = None + content_rating: str = None + session_id: str = None + directors: list = None + parent_thumb: str = None + subtitle_language_code: str = None + transcode_progress: int = None + subtitle_language: str = None + stream_subtitle_container: str = None + _cache_time: int = None + def geoip_download(): tar_dbfile = abspath(join('.', 'data', 'GeoLite2-City.tar.gz')) diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 7793161e..89d56fcc 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -1,7 +1,7 @@ import sys import configparser from os.path import abspath, join -from Varken.helpers import Server, TautulliServer, SonarrServer, InfluxServer +from Varken.helpers import Server, TautulliServer, SonarrServer, InfluxServer, RadarrServer class INIParser(object): @@ -81,7 +81,7 @@ def parse_opts(self): except ValueError: self.radarr_enabled = True - if self.sonarr_enabled: + if self.radarr_enabled: sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',') for server_id in sids: @@ -90,8 +90,14 @@ def parse_opts(self): apikey = self.config.get(radarr_section, 'apikey') scheme = 'https://' if self.config.getboolean(radarr_section, 'ssl') else 'http://' verify_ssl = self.config.getboolean(radarr_section, 'verify_ssl') - - self.radarr_servers.append(Server(server_id, scheme + url, apikey, verify_ssl)) + queue = self.config.getboolean(radarr_section, 'queue') + queue_run_seconds = self.config.getint(radarr_section, 'queue_run_seconds') + get_missing = self.config.getboolean(radarr_section, 'get_missing') + get_missing_run_seconds = self.config.getint(radarr_section, 'get_missing_run_seconds') + + server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds, + get_missing, get_missing_run_seconds) + self.radarr_servers.append(server) # Parse Tautulli options try: diff --git a/Varken/radarr.py b/Varken/radarr.py new file mode 100644 index 00000000..33f7c0b3 --- /dev/null +++ b/Varken/radarr.py @@ -0,0 +1,106 @@ +import requests +from datetime import datetime, timezone +from influxdb import InfluxDBClient + +from Varken.logger import logging +from Varken.helpers import Movie, Queue + + +class RadarrAPI(object): + def __init__(self, servers, influx_server): + self.now = datetime.now(timezone.utc).astimezone().isoformat() + self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, + influx_server.password, 'plex2') + self.servers = servers + # Create session to reduce server web thread load, and globally define pageSize for all requests + self.session = requests.Session() + + def influx_push(self, payload): + # TODO: error handling for failed connection + self.influx.write_points(payload) + + @logging + def get_missing(self, notimplemented): + endpoint = '/api/movie' + self.now = datetime.now(timezone.utc).astimezone().isoformat() + influx_payload = [] + + for server in self.servers: + missing = [] + headers = {'X-Api-Key': server.api_key} + get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json() + movies = [Movie(**movie) for movie in get] + + for movie in movies: + if server.get_missing: + if not movie.downloaded and movie.isAvailable: + ma = True + else: + ma = False + movie_name = '{} ({})'.format(movie.title, movie.year) + missing.append((movie_name, ma, movie.tmdbId)) + + for title, ma, mid in missing: + influx_payload.append( + { + "measurement": "Radarr", + "tags": { + "Missing": True, + "Missing_Available": ma, + "tmdbId": mid, + "server": server.id + }, + "time": self.now, + "fields": { + "name": title + } + } + ) + + self.influx_push(influx_payload) + + @logging + def get_queue(self, notimplemented): + endpoint = '/api/queue' + self.now = datetime.now(timezone.utc).astimezone().isoformat() + influx_payload = [] + + for server in self.servers: + queue = [] + headers = {'X-Api-Key': server.api_key} + get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json() + for movie in get: + movie['movie'] = Movie(**movie['movie']) + download_queue = [Queue(**movie) for movie in get] + + for queue_item in download_queue: + name = '{} ({})'.format(queue_item.movie.title, queue_item.movie.year) + + if queue_item.protocol.upper() == 'USENET': + protocol_id = 1 + else: + protocol_id = 0 + + queue.append((name, queue_item.quality['quality']['name'], queue_item.protocol.upper(), + protocol_id, queue_item.id)) + + for movie, quality, protocol, protocol_id, qid in queue: + influx_payload.append( + { + "measurement": "Radarr", + "tags": { + "type": "Queue", + "tmdbId": qid, + "server": server.id + }, + "time": self.now, + "fields": { + "name": movie, + "quality": quality, + "protocol": protocol, + "protocol_id": protocol_id + } + } + ) + + self.influx_push(influx_payload) diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 79cab8a1..9ee89c26 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# Do not edit this script. Edit configuration.py import requests from influxdb import InfluxDBClient from datetime import datetime, timezone, date, timedelta diff --git a/data/varken.example.ini b/data/varken.example.ini index aaa3bb63..0ebd5119 100644 --- a/data/varken.example.ini +++ b/data/varken.example.ini @@ -58,6 +58,11 @@ url = radarr1.domain.tld apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true +queue = true +queue_run_seconds = 300 +get_missing = true +get_missing_available = true +get_missing_run_seconds = 300 [radarr-2] url = radarr2.domain.tld diff --git a/varken.py b/varken.py index 8be5b2c8..482e7883 100644 --- a/varken.py +++ b/varken.py @@ -5,6 +5,7 @@ from Varken.iniparser import INIParser from Varken.sonarr import SonarrAPI from Varken.tautulli import TautulliAPI +from Varken.radarr import RadarrAPI def threaded(job, days=None): @@ -37,6 +38,17 @@ def threaded(job, days=None): if server.get_sessions: schedule.every(server.get_sessions_run_seconds).seconds.do(threaded, TAUTULLI.get_sessions) + if CONFIG.radarr_enabled: + RADARR = RadarrAPI(CONFIG.radarr_servers, CONFIG.influx_server) + + for server in CONFIG.radarr_servers: + if any([server.get_missing, server.get_missing_available]): + schedule.every(server.get_missing_run_seconds).seconds.do(threaded, RADARR.get_missing) + if server.queue: + schedule.every(server.queue_run_seconds).seconds.do(threaded, RADARR.get_queue) + + + while True: schedule.run_pending() sleep(1) From 07ef8a81a86c6c67a347b94531bbd6e5857cc3cc Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 21:31:58 -0600 Subject: [PATCH 34/90] reworked scheduler to pass server to instance to remove duplication --- Varken/helpers.py | 3 +- Varken/radarr.py | 138 +++++++++++++------------ Varken/sonarr.py | 212 +++++++++++++++++++-------------------- Varken/tautulli.py | 216 ++++++++++++++++++++-------------------- data/varken.example.ini | 1 - varken.py | 23 ++--- 6 files changed, 286 insertions(+), 307 deletions(-) diff --git a/Varken/helpers.py b/Varken/helpers.py index 434f2eeb..c3d8d2e3 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -116,7 +116,7 @@ class TautulliServer(NamedTuple): id: int = None url: str = None fallback_ip: str = None - apikey: str = None + api_key: str = None verify_ssl: bool = None get_activity: bool = False get_activity_run_seconds: int = 30 @@ -319,7 +319,6 @@ class TautulliStream(NamedTuple): transcode_progress: int = None subtitle_language: str = None stream_subtitle_container: str = None - _cache_time: int = None def geoip_download(): diff --git a/Varken/radarr.py b/Varken/radarr.py index 33f7c0b3..c31e2e03 100644 --- a/Varken/radarr.py +++ b/Varken/radarr.py @@ -7,11 +7,11 @@ class RadarrAPI(object): - def __init__(self, servers, influx_server): + def __init__(self, server, influx_server): self.now = datetime.now(timezone.utc).astimezone().isoformat() self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, influx_server.password, 'plex2') - self.servers = servers + self.server = server # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = requests.Session() @@ -20,87 +20,85 @@ def influx_push(self, payload): self.influx.write_points(payload) @logging - def get_missing(self, notimplemented): + def get_missing(self): endpoint = '/api/movie' self.now = datetime.now(timezone.utc).astimezone().isoformat() influx_payload = [] - for server in self.servers: - missing = [] - headers = {'X-Api-Key': server.api_key} - get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json() - movies = [Movie(**movie) for movie in get] - - for movie in movies: - if server.get_missing: - if not movie.downloaded and movie.isAvailable: - ma = True - else: - ma = False - movie_name = '{} ({})'.format(movie.title, movie.year) - missing.append((movie_name, ma, movie.tmdbId)) - - for title, ma, mid in missing: - influx_payload.append( - { - "measurement": "Radarr", - "tags": { - "Missing": True, - "Missing_Available": ma, - "tmdbId": mid, - "server": server.id - }, - "time": self.now, - "fields": { - "name": title - } + missing = [] + headers = {'X-Api-Key': self.server.api_key} + get = self.session.get(self.server.url + endpoint, headers=headers, verify=self.server.verify_ssl).json() + movies = [Movie(**movie) for movie in get] + + for movie in movies: + if self.server.get_missing: + if not movie.downloaded and movie.isAvailable: + ma = True + else: + ma = False + movie_name = '{} ({})'.format(movie.title, movie.year) + missing.append((movie_name, ma, movie.tmdbId)) + + for title, ma, mid in missing: + influx_payload.append( + { + "measurement": "Radarr", + "tags": { + "Missing": True, + "Missing_Available": ma, + "tmdbId": mid, + "server": self.server.id + }, + "time": self.now, + "fields": { + "name": title } - ) + } + ) self.influx_push(influx_payload) @logging - def get_queue(self, notimplemented): + def get_queue(self): endpoint = '/api/queue' self.now = datetime.now(timezone.utc).astimezone().isoformat() influx_payload = [] - for server in self.servers: - queue = [] - headers = {'X-Api-Key': server.api_key} - get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json() - for movie in get: - movie['movie'] = Movie(**movie['movie']) - download_queue = [Queue(**movie) for movie in get] - - for queue_item in download_queue: - name = '{} ({})'.format(queue_item.movie.title, queue_item.movie.year) - - if queue_item.protocol.upper() == 'USENET': - protocol_id = 1 - else: - protocol_id = 0 - - queue.append((name, queue_item.quality['quality']['name'], queue_item.protocol.upper(), - protocol_id, queue_item.id)) - - for movie, quality, protocol, protocol_id, qid in queue: - influx_payload.append( - { - "measurement": "Radarr", - "tags": { - "type": "Queue", - "tmdbId": qid, - "server": server.id - }, - "time": self.now, - "fields": { - "name": movie, - "quality": quality, - "protocol": protocol, - "protocol_id": protocol_id - } + queue = [] + headers = {'X-Api-Key': self.server.api_key} + get = self.session.get(self.server.url + endpoint, headers=headers, verify=self.server.verify_ssl).json() + for movie in get: + movie['movie'] = Movie(**movie['movie']) + download_queue = [Queue(**movie) for movie in get] + + for queue_item in download_queue: + name = '{} ({})'.format(queue_item.movie.title, queue_item.movie.year) + + if queue_item.protocol.upper() == 'USENET': + protocol_id = 1 + else: + protocol_id = 0 + + queue.append((name, queue_item.quality['quality']['name'], queue_item.protocol.upper(), + protocol_id, queue_item.id)) + + for movie, quality, protocol, protocol_id, qid in queue: + influx_payload.append( + { + "measurement": "Radarr", + "tags": { + "type": "Queue", + "tmdbId": qid, + "server": self.server.id + }, + "time": self.now, + "fields": { + "name": movie, + "quality": quality, + "protocol": protocol, + "protocol_id": protocol_id } - ) + } + ) self.influx_push(influx_payload) diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 9ee89c26..7fb7a041 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -7,147 +7,141 @@ class SonarrAPI(object): - def __init__(self, servers, influx_server): + def __init__(self, server, influx_server): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() self.today = str(date.today()) self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, influx_server.password, 'plex') - self.servers = servers + self.server = server # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = requests.Session() self.session.params = {'pageSize': 1000} @logging - def get_missing(self, days_past): + def get_missing(self): endpoint = '/api/calendar' - last_days = str(date.today() + timedelta(days=-days_past)) + last_days = str(date.today() + timedelta(days=-self.server.missing_days)) self.now = datetime.now(timezone.utc).astimezone().isoformat() params = {'start': last_days, 'end': self.today} influx_payload = [] + missing = [] + headers = {'X-Api-Key': self.server.api_key} - for server in self.servers: - missing = [] - headers = {'X-Api-Key': server.api_key} - - get = self.session.get(server.url + endpoint, params=params, headers=headers, - verify=server.verify_ssl).json() - # Iteratively create a list of TVShow Objects from response json - tv_shows = [TVShow(**show) for show in get] - - # Add show to missing list if file does not exist - for show in tv_shows: - if not show.hasFile: - sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) - missing.append((show.series['title'], sxe, show.airDate, show.title, show.id)) - - for series_title, sxe, air_date, episode_title, sonarr_id in missing: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Missing", - "sonarrId": sonarr_id, - "server": server.id - }, - "time": self.now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date - } + get = self.session.get(self.server.url + endpoint, params=params, headers=headers, + verify=self.server.verify_ssl).json() + # Iteratively create a list of TVShow Objects from response json + tv_shows = [TVShow(**show) for show in get] + + # Add show to missing list if file does not exist + for show in tv_shows: + if not show.hasFile: + sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) + missing.append((show.series['title'], sxe, show.airDate, show.title, show.id)) + + for series_title, sxe, air_date, episode_title, sonarr_id in missing: + influx_payload.append( + { + "measurement": "Sonarr", + "tags": { + "type": "Missing", + "sonarrId": sonarr_id, + "server": self.server.id + }, + "time": self.now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "airs": air_date } - ) + } + ) self.influx_push(influx_payload) + @logging - def get_future(self, future_days): + def get_future(self): endpoint = '/api/calendar/' self.now = datetime.now(timezone.utc).astimezone().isoformat() - future = str(date.today() + timedelta(days=future_days)) + future = str(date.today() + timedelta(days=self.server.future_days)) influx_payload = [] - - for server in self.servers: - air_days = [] - - headers = {'X-Api-Key': server.api_key} - params = {'start': self.today, 'end': future} - - get = self.session.get(server.url + endpoint, params=params, headers=headers, - verify=server.verify_ssl).json() - tv_shows = [TVShow(**show) for show in get] - - for show in tv_shows: - sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) - air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id)) - - for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Future", - "sonarrId": sonarr_id, - "server": server.id - }, - "time": self.now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "airs": air_date, - "downloaded": dl_status - } + air_days = [] + headers = {'X-Api-Key': self.server.api_key} + params = {'start': self.today, 'end': future} + + get = self.session.get(self.server.url + endpoint, params=params, headers=headers, + verify=self.server.verify_ssl).json() + tv_shows = [TVShow(**show) for show in get] + + for show in tv_shows: + sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber) + air_days.append((show.series['title'], show.hasFile, sxe, show.title, show.airDate, show.id)) + + for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days: + influx_payload.append( + { + "measurement": "Sonarr", + "tags": { + "type": "Future", + "sonarrId": sonarr_id, + "server": self.server.id + }, + "time": self.now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "airs": air_date, + "downloaded": dl_status } - ) + } + ) self.influx_push(influx_payload) @logging - def get_queue(self, notimplemented): + def get_queue(self): influx_payload = [] endpoint = '/api/queue' self.now = datetime.now(timezone.utc).astimezone().isoformat() - - for server in self.servers: - queue = [] - headers = {'X-Api-Key': server.api_key} - - get = self.session.get(server.url + endpoint, headers=headers, verify=server.verify_ssl).json() - download_queue = [Queue(**show) for show in get] - - for show in download_queue: - sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber']) - if show.protocol.upper() == 'USENET': - protocol_id = 1 - else: - protocol_id = 0 - - queue.append((show.series['title'], show.episode['title'], show.protocol.upper(), - protocol_id, sxe, show.id)) - - for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue: - influx_payload.append( - { - "measurement": "Sonarr", - "tags": { - "type": "Queue", - "sonarrId": sonarr_id, - "server": server.id - - }, - "time": self.now, - "fields": { - "name": series_title, - "epname": episode_title, - "sxe": sxe, - "protocol": protocol, - "protocol_id": protocol_id - } + queue = [] + headers = {'X-Api-Key': self.server.api_key} + + get = self.session.get(self.server.url + endpoint, headers=headers, verify=self.server.verify_ssl).json() + download_queue = [Queue(**show) for show in get] + + for show in download_queue: + sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber']) + if show.protocol.upper() == 'USENET': + protocol_id = 1 + else: + protocol_id = 0 + + queue.append((show.series['title'], show.episode['title'], show.protocol.upper(), + protocol_id, sxe, show.id)) + + for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue: + influx_payload.append( + { + "measurement": "Sonarr", + "tags": { + "type": "Queue", + "sonarrId": sonarr_id, + "server": self.server.id + + }, + "time": self.now, + "fields": { + "name": series_title, + "epname": episode_title, + "sxe": sxe, + "protocol": protocol, + "protocol_id": protocol_id } - ) + } + ) self.influx_push(influx_payload) diff --git a/Varken/tautulli.py b/Varken/tautulli.py index da01739a..ee23e085 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -7,12 +7,12 @@ class TautulliAPI(object): - def __init__(self, servers, influx_server): + def __init__(self, server, influx_server): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, influx_server.password, 'plex2') - self.servers = servers + self.server = server self.session = requests.Session() self.endpoint = '/api/v2' @@ -21,128 +21,124 @@ def influx_push(self, payload): self.influx.write_points(payload) @logging - def get_activity(self, notimplemented): + def get_activity(self): self.now = datetime.now(timezone.utc).astimezone().isoformat() params = {'cmd': 'get_activity'} influx_payload = [] + params['apikey'] = self.server.api_key + g = self.session.get(self.server.url + self.endpoint, params=params, verify=self.server.verify_ssl) + get = g.json()['response']['data'] + + influx_payload.append( + { + "measurement": "Tautulli", + "tags": { + "type": "current_stream_stats", + "server": self.server.id + }, + "time": self.now, + "fields": { + "stream_count": int(get['stream_count']), + "total_bandwidth": int(get['total_bandwidth']), + "wan_bandwidth": int(get['wan_bandwidth']), + "lan_bandwidth": int(get['lan_bandwidth']), + "transcode_streams": int(get['stream_count_transcode']), + "direct_play_streams": int(get['stream_count_direct_play']), + "direct_streams": int(get['stream_count_direct_stream']) + } + } + ) - for server in self.servers: - params['apikey'] = server.apikey - g = self.session.get(server.url + self.endpoint, params=params, verify=server.verify_ssl) - get = g.json()['response']['data'] + self.influx_push(influx_payload) + + @logging + def get_sessions(self): + self.now = datetime.now(timezone.utc).astimezone().isoformat() + params = {'cmd': 'get_activity'} + influx_payload = [] + params['apikey'] = self.server.api_key + g = self.session.get(self.server.url + self.endpoint, params=params, verify=self.server.verify_ssl) + get = g.json()['response']['data']['sessions'] + sessions = [TautulliStream(**session) for session in get] + + for session in sessions: + try: + geodata = geo_lookup(session.ip_address_public) + except (ValueError, AddressNotFoundError): + if self.server.fallback_ip: + geodata = geo_lookup(self.server.fallback_ip) + else: + my_ip = requests.get('http://ip.42.pl/raw').text + geodata = geo_lookup(my_ip) + + if not all([geodata.location.latitude, geodata.location.longitude]): + latitude = 37.234332396 + longitude = -115.80666344 + else: + latitude = geodata.location.latitude + longitude = geodata.location.longitude + + decision = session.transcode_decision + if decision == 'copy': + decision = 'direct stream' + + video_decision = session.stream_video_decision + if video_decision == 'copy': + video_decision = 'direct stream' + elif video_decision == '': + video_decision = 'Music' + + quality = session.stream_video_resolution + if not quality: + quality = session.container.upper() + elif quality in ('SD', 'sd', '4k'): + quality = session.stream_video_resolution.upper() + else: + quality = session.stream_video_resolution + 'p' + + player_state = session.state.lower() + if player_state == 'playing': + player_state = 0 + elif player_state == 'paused': + player_state = 1 + elif player_state == 'buffering': + player_state = 3 influx_payload.append( { "measurement": "Tautulli", "tags": { - "type": "current_stream_stats", - "server": server.id + "type": "Session", + "session_id": session.session_id, + "name": session.friendly_name, + "title": session.full_title, + "platform": session.platform, + "product_version": session.product_version, + "quality": quality, + "video_decision": video_decision.title(), + "transcode_decision": decision.title(), + "media_type": session.media_type.title(), + "audio_codec": session.audio_codec.upper(), + "audio_profile": session.audio_profile.upper(), + "stream_audio_codec": session.stream_audio_codec.upper(), + "quality_profile": session.quality_profile, + "progress_percent": session.progress_percent, + "region_code": geodata.subdivisions.most_specific.iso_code, + "location": geodata.city.name, + "full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name, + geodata.city.name), + "latitude": latitude, + "longitude": longitude, + "player_state": player_state, + "device_type": session.platform, + "server": self.server.id }, "time": self.now, "fields": { - "stream_count": int(get['stream_count']), - "total_bandwidth": int(get['total_bandwidth']), - "wan_bandwidth": int(get['wan_bandwidth']), - "lan_bandwidth": int(get['lan_bandwidth']), - "transcode_streams": int(get['stream_count_transcode']), - "direct_play_streams": int(get['stream_count_direct_play']), - "direct_streams": int(get['stream_count_direct_stream']) + "session_id": session.session_id, + "session_key": session.session_key } } ) self.influx_push(influx_payload) - - @logging - def get_sessions(self, notimplemented): - self.now = datetime.now(timezone.utc).astimezone().isoformat() - params = {'cmd': 'get_activity'} - influx_payload = [] - - for server in self.servers: - params['apikey'] = server.apikey - g = self.session.get(server.url + self.endpoint, params=params, verify=server.verify_ssl) - get = g.json()['response']['data']['sessions'] - sessions = [TautulliStream(**session) for session in get] - - for session in sessions: - try: - geodata = geo_lookup(session.ip_address_public) - except (ValueError, AddressNotFoundError): - if server.fallback_ip: - geodata = geo_lookup(server.fallback_ip) - else: - my_ip = requests.get('http://ip.42.pl/raw').text - geodata = geo_lookup(my_ip) - - if not all([geodata.location.latitude, geodata.location.longitude]): - latitude = 37.234332396 - longitude = -115.80666344 - else: - latitude = geodata.location.latitude - longitude = geodata.location.longitude - - decision = session.transcode_decision - if decision == 'copy': - decision = 'direct stream' - - video_decision = session.stream_video_decision - if video_decision == 'copy': - video_decision = 'direct stream' - elif video_decision == '': - video_decision = 'Music' - - quality = session.stream_video_resolution - if not quality: - quality = session.container.upper() - elif quality in ('SD', 'sd', '4k'): - quality = session.stream_video_resolution.upper() - else: - quality = session.stream_video_resolution + 'p' - - player_state = session.state.lower() - if player_state == 'playing': - player_state = 0 - elif player_state == 'paused': - player_state = 1 - elif player_state == 'buffering': - player_state = 3 - - influx_payload.append( - { - "measurement": "Tautulli", - "tags": { - "type": "Session", - "session_id": session.session_id, - "name": session.friendly_name, - "title": session.full_title, - "platform": session.platform, - "product_version": session.product_version, - "quality": quality, - "video_decision": video_decision.title(), - "transcode_decision": decision.title(), - "media_type": session.media_type.title(), - "audio_codec": session.audio_codec.upper(), - "audio_profile": session.audio_profile.upper(), - "stream_audio_codec": session.stream_audio_codec.upper(), - "quality_profile": session.quality_profile, - "progress_percent": session.progress_percent, - "region_code": geodata.subdivisions.most_specific.iso_code, - "location": geodata.city.name, - "full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name, - geodata.city.name), - "latitude": latitude, - "longitude": longitude, - "player_state": player_state, - "device_type": session.platform, - "server": server.id - }, - "time": self.now, - "fields": { - "session_id": session.session_id, - "session_key": session.session_key - } - } - ) - - self.influx_push(influx_payload) diff --git a/data/varken.example.ini b/data/varken.example.ini index 0ebd5119..4d280541 100644 --- a/data/varken.example.ini +++ b/data/varken.example.ini @@ -61,7 +61,6 @@ verify_ssl = true queue = true queue_run_seconds = 300 get_missing = true -get_missing_available = true get_missing_run_seconds = 300 [radarr-2] diff --git a/varken.py b/varken.py index 482e7883..921b8fb8 100644 --- a/varken.py +++ b/varken.py @@ -8,8 +8,8 @@ from Varken.radarr import RadarrAPI -def threaded(job, days=None): - thread = threading.Thread(target=job, args=([days])) +def threaded(job): + thread = threading.Thread(target=job) thread.start() @@ -17,38 +17,31 @@ def threaded(job, days=None): CONFIG = INIParser() if CONFIG.sonarr_enabled: - SONARR = SonarrAPI(CONFIG.sonarr_servers, CONFIG.influx_server) - for server in CONFIG.sonarr_servers: + SONARR = SonarrAPI(server, CONFIG.influx_server) if server.queue: schedule.every(server.queue_run_seconds).seconds.do(threaded, SONARR.get_queue) if server.missing_days > 0: - schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing, - server.missing_days) + schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing) if server.future_days > 0: - schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future, - server.future_days) + schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future) if CONFIG.tautulli_enabled: - TAUTULLI = TautulliAPI(CONFIG.tautulli_servers, CONFIG.influx_server) - for server in CONFIG.tautulli_servers: + TAUTULLI = TautulliAPI(server, CONFIG.influx_server) if server.get_activity: schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity) if server.get_sessions: schedule.every(server.get_sessions_run_seconds).seconds.do(threaded, TAUTULLI.get_sessions) if CONFIG.radarr_enabled: - RADARR = RadarrAPI(CONFIG.radarr_servers, CONFIG.influx_server) - for server in CONFIG.radarr_servers: - if any([server.get_missing, server.get_missing_available]): + RADARR = RadarrAPI(server, CONFIG.influx_server) + if server.get_missing: schedule.every(server.get_missing_run_seconds).seconds.do(threaded, RADARR.get_missing) if server.queue: schedule.every(server.queue_run_seconds).seconds.do(threaded, RADARR.get_queue) - - while True: schedule.run_pending() sleep(1) From 5241cfc92ea6e8000801694f2947fa38071275d8 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 21:36:42 -0600 Subject: [PATCH 35/90] truncate roku product version --- Varken/tautulli.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Varken/tautulli.py b/Varken/tautulli.py index ee23e085..a2789d02 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -104,6 +104,10 @@ def get_sessions(self): elif player_state == 'buffering': player_state = 3 + product_version = session.product_version + if session.platform == 'Roku': + product_version = session.product_version.split('-')[0] + influx_payload.append( { "measurement": "Tautulli", @@ -113,7 +117,7 @@ def get_sessions(self): "name": session.friendly_name, "title": session.full_title, "platform": session.platform, - "product_version": session.product_version, + "product_version": product_version, "quality": quality, "video_decision": video_decision.title(), "transcode_decision": decision.title(), From 3c6cb147f6bf7c85438cb3454eddbe704ed851c0 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 22:26:44 -0600 Subject: [PATCH 36/90] ported ombi --- Legacy/configuration.example.py | 49 ------------------- Legacy/configuration.py | 49 ------------------- Legacy/crontabs | 11 ----- Legacy/ombi.py | 87 --------------------------------- Varken/helpers.py | 17 +++++-- Varken/iniparser.py | 38 +++++++++----- Varken/ombi.py | 70 ++++++++++++++++++++++++++ Varken/radarr.py | 5 +- Varken/sonarr.py | 6 +-- Varken/tautulli.py | 7 ++- data/varken.example.ini | 13 +++-- varken.py | 10 +++- 12 files changed, 137 insertions(+), 225 deletions(-) delete mode 100644 Legacy/configuration.example.py delete mode 100644 Legacy/configuration.py delete mode 100644 Legacy/crontabs delete mode 100644 Legacy/ombi.py create mode 100644 Varken/ombi.py diff --git a/Legacy/configuration.example.py b/Legacy/configuration.example.py deleted file mode 100644 index a0df50a2..00000000 --- a/Legacy/configuration.example.py +++ /dev/null @@ -1,49 +0,0 @@ -''' -Notes: - - Domains should be either http(s)://subdomain.domain.com or http(s)://domain.com/url_suffix - - - Sonarr + Radarr scripts support multiple servers. You can remove the second - server by putting a # in front of the line. - - - tautulli_failback_ip, This is used when there is no IP listed in tautulli. - This can happen when you are streaming locally. This is usually your public IP. -''' - -########################### INFLUXDB CONFIG ########################### -influxdb_url = 'influxdb.domain.tld' -influxdb_port = 8086 -influxdb_username = '' -influxdb_password = '' - -############################ SONARR CONFIG ############################ -sonarr_server_list = [ - ('https://sonarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'), - ('https://sonarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'), - #('https://sonarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3') - ] -sonarr_influxdb_db_name = 'plex' - -############################ RADARR CONFIG ############################ -radarr_server_list = [ - ('https://radarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'), - ('https://radarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'), - #('https://radarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3') - ] -radarr_influxdb_db_name = 'plex' - -############################ OMBI CONFIG ############################## -ombi_url = 'https://ombi.domain.tld' -ombi_api_key = 'xxxxxxxxxxxxxxx' -ombi_influxdb_db_name = 'plex' - -########################## TAUTULLI CONFIG ############################ -tautulli_url = 'https://tautulli.domain.tld' -tautulli_api_key = 'xxxxxxxxxxxxxxx' -tautulli_failback_ip = '' -tautulli_influxdb_db_name = 'plex' - -########################## FIREWALL CONFIG ############################ -asa_url = 'https://firewall.domain.tld' -asa_username = 'cisco' -asa_password = 'cisco' -asa_influxdb_db_name = 'asa' diff --git a/Legacy/configuration.py b/Legacy/configuration.py deleted file mode 100644 index a0df50a2..00000000 --- a/Legacy/configuration.py +++ /dev/null @@ -1,49 +0,0 @@ -''' -Notes: - - Domains should be either http(s)://subdomain.domain.com or http(s)://domain.com/url_suffix - - - Sonarr + Radarr scripts support multiple servers. You can remove the second - server by putting a # in front of the line. - - - tautulli_failback_ip, This is used when there is no IP listed in tautulli. - This can happen when you are streaming locally. This is usually your public IP. -''' - -########################### INFLUXDB CONFIG ########################### -influxdb_url = 'influxdb.domain.tld' -influxdb_port = 8086 -influxdb_username = '' -influxdb_password = '' - -############################ SONARR CONFIG ############################ -sonarr_server_list = [ - ('https://sonarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'), - ('https://sonarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'), - #('https://sonarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3') - ] -sonarr_influxdb_db_name = 'plex' - -############################ RADARR CONFIG ############################ -radarr_server_list = [ - ('https://radarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'), - ('https://radarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'), - #('https://radarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3') - ] -radarr_influxdb_db_name = 'plex' - -############################ OMBI CONFIG ############################## -ombi_url = 'https://ombi.domain.tld' -ombi_api_key = 'xxxxxxxxxxxxxxx' -ombi_influxdb_db_name = 'plex' - -########################## TAUTULLI CONFIG ############################ -tautulli_url = 'https://tautulli.domain.tld' -tautulli_api_key = 'xxxxxxxxxxxxxxx' -tautulli_failback_ip = '' -tautulli_influxdb_db_name = 'plex' - -########################## FIREWALL CONFIG ############################ -asa_url = 'https://firewall.domain.tld' -asa_username = 'cisco' -asa_password = 'cisco' -asa_influxdb_db_name = 'asa' diff --git a/Legacy/crontabs b/Legacy/crontabs deleted file mode 100644 index 413be4ec..00000000 --- a/Legacy/crontabs +++ /dev/null @@ -1,11 +0,0 @@ -### Modify paths as appropriate. python3 is located in different places for different users. (`which python3` will give you the path) -### to edit your crontab entry, do not modify /var/spool/cron/crontabs/ directly, use `crontab -e` -### Crontabs require an empty line at the end or they WILL not run. Make sure to have 2 lines to be safe -### -* * * * * /usr/bin/python3 /path-to-grafana-scripts/ombi.py -* * * * * ( sleep 30 ; /usr/bin/python3 /path-to-grafana-scripts/ombi.py ) -* * * * * /usr/bin/python3 /path-to-grafana-scripts/taurulli.py -* * * * * ( sleep 30 ; /usr/bin/python3 /path-to-grafana-scripts/tautulli.py ) -*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py -*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py -#*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sickrage.py diff --git a/Legacy/ombi.py b/Legacy/ombi.py deleted file mode 100644 index bcea45fb..00000000 --- a/Legacy/ombi.py +++ /dev/null @@ -1,87 +0,0 @@ -# Do not edit this script. Edit configuration.py -import sys -import requests -from datetime import datetime, timezone -from influxdb import InfluxDBClient -import argparse -from argparse import RawTextHelpFormatter -from Legacy import configuration - -headers = {'Apikey': configuration.ombi_api_key} - -def now_iso(): - now_iso = datetime.now(timezone.utc).astimezone().isoformat() - return now_iso - -def influx_sender(influx_payload): - influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username, - configuration.influxdb_password, configuration.ombi_influxdb_db_name) - influx.write_points(influx_payload) - -def get_total_requests(): - get_tv_requests = requests.get('{}/api/v1/Request/tv'.format(configuration.ombi_url), headers=headers).json() - get_movie_requests = requests.get('{}/api/v1/Request/movie'.format(configuration.ombi_url), headers=headers).json() - - count_movie_requests = 0 - count_tv_requests = 0 - - for show in get_tv_requests: - count_tv_requests += 1 - - for movie in get_movie_requests: - count_movie_requests += 1 - - influx_payload = [ - { - "measurement": "Ombi", - "tags": { - "type": "Request_Total" - }, - "time": now_iso(), - "fields": { - "total": count_movie_requests + count_tv_requests - } - } - ] - return influx_payload - -def get_request_counts(): - get_request_counts = requests.get('{}/api/v1/Request/count'.format(configuration.ombi_url), headers=headers).json() - - influx_payload = [ - { - "measurement": "Ombi", - "tags": { - "type": "Request_Counts" - }, - "time": now_iso(), - "fields": { - "pending": int(get_request_counts['pending']), - "approved": int(get_request_counts['approved']), - "available": int(get_request_counts['available']) - } - } - ] - return influx_payload - -if __name__ == "__main__": - parser = argparse.ArgumentParser(prog='Ombi stats operations', - description='Script to aid in data gathering from Ombi', formatter_class=RawTextHelpFormatter) - - parser.add_argument("--total", action='store_true', - help='Get the total count of all requests') - - parser.add_argument("--counts", action='store_true', - help='Get the count of pending, approved, and available requests') - - opts = parser.parse_args() - - if opts.total: - influx_sender(get_total_requests()) - - elif opts.counts: - influx_sender(get_request_counts()) - - elif len(sys.argv) == 1: - parser.print_help(sys.stderr) - sys.exit(1) diff --git a/Varken/helpers.py b/Varken/helpers.py index c3d8d2e3..e464b895 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -93,7 +93,7 @@ class SonarrServer(NamedTuple): future_days: int = 0 future_days_run_seconds: int = 30 queue: bool = False - queue_run_seconds: int = 1 + queue_run_seconds: int = 30 class RadarrServer(NamedTuple): id: int = None @@ -101,15 +101,20 @@ class RadarrServer(NamedTuple): api_key: str = None verify_ssl: bool = False queue: bool = False - queue_run_seconds: int = 1 + queue_run_seconds: int = 30 get_missing: bool = False get_missing_run_seconds: int = 30 -class Server(NamedTuple): + +class OmbiServer(NamedTuple): id: int = None url: str = None api_key: str = None verify_ssl: bool = False + request_type_counts: bool = False + request_type_run_seconds: int = 30 + request_total_counts: bool = False + request_total_run_seconds: int = 30 class TautulliServer(NamedTuple): @@ -131,6 +136,12 @@ class InfluxServer(NamedTuple): password: str = 'root' +class OmbiRequestCounts(NamedTuple): + pending: int = 0 + approved: int = 0 + available: int = 0 + + class TautulliStream(NamedTuple): rating: str = None transcode_width: str = None diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 89d56fcc..9a21e115 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -1,7 +1,7 @@ import sys import configparser from os.path import abspath, join -from Varken.helpers import Server, TautulliServer, SonarrServer, InfluxServer, RadarrServer +from Varken.helpers import OmbiServer, TautulliServer, SonarrServer, InfluxServer, RadarrServer class INIParser(object): @@ -17,7 +17,7 @@ def __init__(self): self.radarr_servers = [] self.ombi_enabled = False - self.ombi_server = None + self.ombi_servers = [] self.tautulli_enabled = False self.tautulli_servers = [] @@ -45,7 +45,7 @@ def parse_opts(self): # Parse Sonarr options try: if not self.config.getboolean('global', 'sonarr_server_ids'): - sys.exit('sonarr_server_ids must be either false, or a comma-separated list of server ids') + sys.exit('server_ids must be either false, or a comma-separated list of server ids') elif self.config.getint('global', 'sonarr_server_ids'): self.sonarr_enabled = True except ValueError: @@ -75,7 +75,7 @@ def parse_opts(self): # Parse Radarr options try: if not self.config.getboolean('global', 'radarr_server_ids'): - sys.exit('radarr_server_ids must be either false, or a comma-separated list of server ids') + sys.exit('server_ids must be either false, or a comma-separated list of server ids') elif self.config.getint('global', 'radarr_server_ids'): self.radarr_enabled = True except ValueError: @@ -102,7 +102,7 @@ def parse_opts(self): # Parse Tautulli options try: if not self.config.getboolean('global', 'tautulli_server_ids'): - sys.exit('tautulli_server_ids must be either false, or a comma-separated list of server ids') + sys.exit('server_ids must be either false, or a comma-separated list of server ids') elif self.config.getint('global', 'tautulli_server_ids'): self.tautulli_enabled = True except ValueError: @@ -128,14 +128,30 @@ def parse_opts(self): self.tautulli_servers.append(server) # Parse Ombi Options - if self.config.getboolean('global', 'ombi'): + try: + if not self.config.getboolean('global', 'ombi_server_ids'): + sys.exit('server_ids must be either false, or a comma-separated list of server ids') + elif self.config.getint('global', 'ombi_server_ids'): + self.ombi_enabled = True + except ValueError: self.ombi_enabled = True - url = self.config.get('ombi', 'url') - apikey = self.config.get('ombi', 'apikey') - scheme = 'https://' if self.config.getboolean('ombi', 'ssl') else 'http://' - verify_ssl = self.config.getboolean('ombi', 'verify_ssl') - self.ombi_server = Server(url=scheme + url, api_key=apikey, verify_ssl=verify_ssl) + if self.ombi_enabled: + sids = self.config.get('global', 'ombi_server_ids').strip(' ').split(',') + for server_id in sids: + ombi_section = 'ombi-' + server_id + url = self.config.get(ombi_section, 'url') + apikey = self.config.get(ombi_section, 'apikey') + scheme = 'https://' if self.config.getboolean(ombi_section, 'ssl') else 'http://' + verify_ssl = self.config.getboolean(ombi_section, 'verify_ssl') + request_type_counts = self.config.getboolean(ombi_section, 'get_request_type_counts') + request_type_run_seconds = self.config.getint(ombi_section, 'request_type_run_seconds') + request_total_counts = self.config.getboolean(ombi_section, 'get_request_total_counts') + request_total_run_seconds = self.config.getint(ombi_section, 'request_total_run_seconds') + + server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts, + request_type_run_seconds, request_total_counts, request_total_run_seconds) + self.ombi_servers.append(server) # Parse ASA opts if self.config.getboolean('global', 'asa'): diff --git a/Varken/ombi.py b/Varken/ombi.py new file mode 100644 index 00000000..47c620c2 --- /dev/null +++ b/Varken/ombi.py @@ -0,0 +1,70 @@ +from requests import Session +from datetime import datetime, timezone +from influxdb import InfluxDBClient + +from Varken.helpers import OmbiRequestCounts +from Varken.logger import logging + +class OmbiAPI(object): + def __init__(self, server, influx_server): + self.now = datetime.now(timezone.utc).astimezone().isoformat() + self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, + influx_server.password, 'plex2') + self.server = server + # Create session to reduce server web thread load, and globally define pageSize for all requests + self.session = Session() + self.session.headers = {'Apikey': self.server.api_key} + + def influx_push(self, payload): + self.influx.write_points(payload) + + @logging + def get_total_requests(self): + self.now = datetime.now(timezone.utc).astimezone().isoformat() + tv_endpoint = '/api/v1/Request/tv' + movie_endpoint = "/api/v1/Request/movie" + get_tv = self.session.get(self.server.url + tv_endpoint, verify=self.server.verify_ssl).json() + get_movie = self.session.get(self.server.url + movie_endpoint, verify=self.server.verify_ssl).json() + + movie_requests = len(get_movie) + tv_requests = len(get_tv) + + influx_payload = [ + { + "measurement": "Ombi", + "tags": { + "type": "Request_Total" + }, + "time": self.now, + "fields": { + "total": movie_requests + tv_requests, + "movies": movie_requests, + "tv_shows": tv_requests + } + } + ] + + self.influx_push(influx_payload) + + @logging + def get_request_counts(self): + self.now = datetime.now(timezone.utc).astimezone().isoformat() + endpoint = '/api/v1/Request/count' + get = self.session.get(self.server.url + endpoint, verify=self.server.verify_ssl).json() + requests = OmbiRequestCounts(**get) + influx_payload = [ + { + "measurement": "Ombi", + "tags": { + "type": "Request_Counts" + }, + "time": self.now, + "fields": { + "pending": requests.pending, + "approved": requests.approved, + "available": requests.available + } + } + ] + + self.influx_push(influx_payload) diff --git a/Varken/radarr.py b/Varken/radarr.py index c31e2e03..d385ed75 100644 --- a/Varken/radarr.py +++ b/Varken/radarr.py @@ -1,4 +1,4 @@ -import requests +from requests import Session from datetime import datetime, timezone from influxdb import InfluxDBClient @@ -13,10 +13,9 @@ def __init__(self, server, influx_server): influx_server.password, 'plex2') self.server = server # Create session to reduce server web thread load, and globally define pageSize for all requests - self.session = requests.Session() + self.session = Session() def influx_push(self, payload): - # TODO: error handling for failed connection self.influx.write_points(payload) @logging diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 7fb7a041..89675646 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -1,4 +1,4 @@ -import requests +from requests import Session from influxdb import InfluxDBClient from datetime import datetime, timezone, date, timedelta @@ -15,7 +15,7 @@ def __init__(self, server, influx_server): influx_server.password, 'plex') self.server = server # Create session to reduce server web thread load, and globally define pageSize for all requests - self.session = requests.Session() + self.session = Session() self.session.params = {'pageSize': 1000} @logging @@ -60,7 +60,6 @@ def get_missing(self): self.influx_push(influx_payload) - @logging def get_future(self): endpoint = '/api/calendar/' @@ -146,5 +145,4 @@ def get_queue(self): self.influx_push(influx_payload) def influx_push(self, payload): - # TODO: error handling for failed connection self.influx.write_points(payload) diff --git a/Varken/tautulli.py b/Varken/tautulli.py index a2789d02..03aaa6d0 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -1,7 +1,7 @@ from datetime import datetime, timezone from geoip2.errors import AddressNotFoundError from influxdb import InfluxDBClient -import requests +from requests import Session from Varken.helpers import TautulliStream, geo_lookup from Varken.logger import logging @@ -13,11 +13,10 @@ def __init__(self, server, influx_server): self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, influx_server.password, 'plex2') self.server = server - self.session = requests.Session() + self.session = Session() self.endpoint = '/api/v2' def influx_push(self, payload): - # TODO: error handling for failed connection self.influx.write_points(payload) @logging @@ -68,7 +67,7 @@ def get_sessions(self): if self.server.fallback_ip: geodata = geo_lookup(self.server.fallback_ip) else: - my_ip = requests.get('http://ip.42.pl/raw').text + my_ip = self.session.get('http://ip.42.pl/raw').text geodata = geo_lookup(my_ip) if not all([geodata.location.latitude, geodata.location.longitude]): diff --git a/data/varken.example.ini b/data/varken.example.ini index 4d280541..c51272bc 100644 --- a/data/varken.example.ini +++ b/data/varken.example.ini @@ -9,7 +9,7 @@ sonarr_server_ids = 1,2 radarr_server_ids = 1,2 tautulli_server_ids = 1 -ombi = true +ombi_server_ids = 1 asa = false [influxdb] @@ -68,13 +68,20 @@ url = radarr2.domain.tld apikey = yyyyyyyyyyyyyyyy ssl = false verify_ssl = true +queue = true +queue_run_seconds = 300 +get_missing = true +get_missing_run_seconds = 300 -[ombi] +[ombi-1] url = ombi.domain.tld apikey = xxxxxxxxxxxxxxxx ssl = false verify_ssl = true - +get_request_type_counts = true +request_type_run_seconds = 300 +get_request_total_counts = true +request_total_run_seconds = 300 [asa] url = firewall.domain.tld diff --git a/varken.py b/varken.py index 921b8fb8..dbe72b7f 100644 --- a/varken.py +++ b/varken.py @@ -6,7 +6,7 @@ from Varken.sonarr import SonarrAPI from Varken.tautulli import TautulliAPI from Varken.radarr import RadarrAPI - +from Varken.ombi import OmbiAPI def threaded(job): thread = threading.Thread(target=job) @@ -42,6 +42,14 @@ def threaded(job): if server.queue: schedule.every(server.queue_run_seconds).seconds.do(threaded, RADARR.get_queue) + if CONFIG.ombi_enabled: + for server in CONFIG.ombi_servers: + OMBI = OmbiAPI(server, CONFIG.influx_server) + if server.request_type_counts: + schedule.every(server.request_type_run_seconds).seconds.do(threaded, OMBI.get_request_counts) + if server.request_total_counts: + schedule.every(server.request_total_run_seconds).seconds.do(threaded, OMBI.get_total_requests) + while True: schedule.run_pending() sleep(1) From 0ca76bea7ea872b393de890c18c745b2a1134017 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 23:15:12 -0600 Subject: [PATCH 37/90] created and assigned basic dbmanager --- README.md | 11 +++++++---- Varken/dbmanager.py | 15 +++++++++++++++ Varken/ombi.py | 12 ++++-------- Varken/radarr.py | 12 ++++-------- Varken/sonarr.py | 15 +++++---------- Varken/tautulli.py | 12 ++++-------- requirements.txt | 1 + varken.py | 10 ++++++---- 8 files changed, 46 insertions(+), 42 deletions(-) diff --git a/README.md b/README.md index 94aebb89..c6e7198b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,8 @@ -# Grafana Scripts -Repo for api scripts written (both pushing and pulling) to aggregate data into influxdb for grafana +# Varken +Dutch for PIG. PIG is an Acronym for PlexDB/Influx/Grafana + +Varken is a standalone commmandline utility that will aggregate date +from the plex ecosystem into influxdb to be displayed in grafana Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), [Python3](https://www.python.org/downloads/), [InfluxDB](https://docs.influxdata.com/influxdb/v1.5/introduction/installation/) @@ -7,8 +10,8 @@ Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), ## Quick Setup 1. Install requirements `pip3 install -r requirements.txt` -1. Make a copy of `configuration.example.py` to `configuration.py` -2. Make the appropriate changes to `configuration.py` +1. Make a copy of `varken.example.ini` to `varken.ini` in the `data` folder +2. Make the appropriate changes to `varken.ini` 1. Create your plex database in influx ```sh user@server: ~$ influx diff --git a/Varken/dbmanager.py b/Varken/dbmanager.py index e69de29b..a7c71099 100644 --- a/Varken/dbmanager.py +++ b/Varken/dbmanager.py @@ -0,0 +1,15 @@ +from influxdb import InfluxDBClient + +class DBManager(object): + def __init__(self, server): + self.server = server + self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password, + 'plex2') + databases = [db['name'] for db in self.influx.get_list_database()] + + if 'varken' not in databases: + self.influx.create_database('varken') + self.influx.create_retention_policy('Varken 30d/1h', '30d', '1', 'varken', False, '1h') + + def write_points(self, data): + self.influx.write_points(data) \ No newline at end of file diff --git a/Varken/ombi.py b/Varken/ombi.py index 47c620c2..e1a632d7 100644 --- a/Varken/ombi.py +++ b/Varken/ombi.py @@ -6,18 +6,14 @@ from Varken.logger import logging class OmbiAPI(object): - def __init__(self, server, influx_server): + def __init__(self, server, dbmanager): self.now = datetime.now(timezone.utc).astimezone().isoformat() - self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, - influx_server.password, 'plex2') + self.dbmanager = dbmanager self.server = server # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = Session() self.session.headers = {'Apikey': self.server.api_key} - def influx_push(self, payload): - self.influx.write_points(payload) - @logging def get_total_requests(self): self.now = datetime.now(timezone.utc).astimezone().isoformat() @@ -44,7 +40,7 @@ def get_total_requests(self): } ] - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) @logging def get_request_counts(self): @@ -67,4 +63,4 @@ def get_request_counts(self): } ] - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) diff --git a/Varken/radarr.py b/Varken/radarr.py index d385ed75..25821971 100644 --- a/Varken/radarr.py +++ b/Varken/radarr.py @@ -7,17 +7,13 @@ class RadarrAPI(object): - def __init__(self, server, influx_server): + def __init__(self, server, dbmanager): self.now = datetime.now(timezone.utc).astimezone().isoformat() - self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, - influx_server.password, 'plex2') + self.dbmanager = dbmanager self.server = server # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = Session() - def influx_push(self, payload): - self.influx.write_points(payload) - @logging def get_missing(self): endpoint = '/api/movie' @@ -55,7 +51,7 @@ def get_missing(self): } ) - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) @logging def get_queue(self): @@ -100,4 +96,4 @@ def get_queue(self): } ) - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) diff --git a/Varken/sonarr.py b/Varken/sonarr.py index 89675646..ae09c2e0 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -1,5 +1,4 @@ from requests import Session -from influxdb import InfluxDBClient from datetime import datetime, timezone, date, timedelta from Varken.logger import logging @@ -7,12 +6,11 @@ class SonarrAPI(object): - def __init__(self, server, influx_server): + def __init__(self, server, dbmanager): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() + self.dbmanager = dbmanager self.today = str(date.today()) - self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, - influx_server.password, 'plex') self.server = server # Create session to reduce server web thread load, and globally define pageSize for all requests self.session = Session() @@ -58,7 +56,7 @@ def get_missing(self): } ) - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) @logging def get_future(self): @@ -98,7 +96,7 @@ def get_future(self): } ) - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) @logging def get_queue(self): @@ -142,7 +140,4 @@ def get_queue(self): } ) - self.influx_push(influx_payload) - - def influx_push(self, payload): - self.influx.write_points(payload) + self.dbmanager.write_points(influx_payload) diff --git a/Varken/tautulli.py b/Varken/tautulli.py index 03aaa6d0..6ec6d7e0 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -7,18 +7,14 @@ class TautulliAPI(object): - def __init__(self, server, influx_server): + def __init__(self, server, dbmanager): # Set Time of initialization self.now = datetime.now(timezone.utc).astimezone().isoformat() - self.influx = InfluxDBClient(influx_server.url, influx_server.port, influx_server.username, - influx_server.password, 'plex2') + self.dbmanager = dbmanager self.server = server self.session = Session() self.endpoint = '/api/v2' - def influx_push(self, payload): - self.influx.write_points(payload) - @logging def get_activity(self): self.now = datetime.now(timezone.utc).astimezone().isoformat() @@ -48,7 +44,7 @@ def get_activity(self): } ) - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) @logging def get_sessions(self): @@ -144,4 +140,4 @@ def get_sessions(self): } ) - self.influx_push(influx_payload) + self.dbmanager.write_points(influx_payload) diff --git a/requirements.txt b/requirements.txt index ceb78c6d..4a246671 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,3 +5,4 @@ requests geoip2 influxdb +schedule \ No newline at end of file diff --git a/varken.py b/varken.py index dbe72b7f..2d68b866 100644 --- a/varken.py +++ b/varken.py @@ -7,6 +7,7 @@ from Varken.tautulli import TautulliAPI from Varken.radarr import RadarrAPI from Varken.ombi import OmbiAPI +from Varken.dbmanager import DBManager def threaded(job): thread = threading.Thread(target=job) @@ -15,10 +16,11 @@ def threaded(job): if __name__ == "__main__": CONFIG = INIParser() + DBMANAGER = DBManager(CONFIG.influx_server) if CONFIG.sonarr_enabled: for server in CONFIG.sonarr_servers: - SONARR = SonarrAPI(server, CONFIG.influx_server) + SONARR = SonarrAPI(server, DBMANAGER) if server.queue: schedule.every(server.queue_run_seconds).seconds.do(threaded, SONARR.get_queue) if server.missing_days > 0: @@ -28,7 +30,7 @@ def threaded(job): if CONFIG.tautulli_enabled: for server in CONFIG.tautulli_servers: - TAUTULLI = TautulliAPI(server, CONFIG.influx_server) + TAUTULLI = TautulliAPI(server, DBMANAGER) if server.get_activity: schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity) if server.get_sessions: @@ -36,7 +38,7 @@ def threaded(job): if CONFIG.radarr_enabled: for server in CONFIG.radarr_servers: - RADARR = RadarrAPI(server, CONFIG.influx_server) + RADARR = RadarrAPI(server, DBMANAGER) if server.get_missing: schedule.every(server.get_missing_run_seconds).seconds.do(threaded, RADARR.get_missing) if server.queue: @@ -44,7 +46,7 @@ def threaded(job): if CONFIG.ombi_enabled: for server in CONFIG.ombi_servers: - OMBI = OmbiAPI(server, CONFIG.influx_server) + OMBI = OmbiAPI(server, DBMANAGER) if server.request_type_counts: schedule.every(server.request_type_run_seconds).seconds.do(threaded, OMBI.get_request_counts) if server.request_total_counts: From 0eb370271dfbb9c8dc9fdbc9124ea44f0bd84573 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 23:16:56 -0600 Subject: [PATCH 38/90] cleared extra imports --- Varken/ombi.py | 1 - Varken/radarr.py | 1 - Varken/tautulli.py | 1 - 3 files changed, 3 deletions(-) diff --git a/Varken/ombi.py b/Varken/ombi.py index e1a632d7..3981250c 100644 --- a/Varken/ombi.py +++ b/Varken/ombi.py @@ -1,6 +1,5 @@ from requests import Session from datetime import datetime, timezone -from influxdb import InfluxDBClient from Varken.helpers import OmbiRequestCounts from Varken.logger import logging diff --git a/Varken/radarr.py b/Varken/radarr.py index 25821971..091bb771 100644 --- a/Varken/radarr.py +++ b/Varken/radarr.py @@ -1,6 +1,5 @@ from requests import Session from datetime import datetime, timezone -from influxdb import InfluxDBClient from Varken.logger import logging from Varken.helpers import Movie, Queue diff --git a/Varken/tautulli.py b/Varken/tautulli.py index 6ec6d7e0..24ceeaf9 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -1,6 +1,5 @@ from datetime import datetime, timezone from geoip2.errors import AddressNotFoundError -from influxdb import InfluxDBClient from requests import Session from Varken.helpers import TautulliStream, geo_lookup from Varken.logger import logging From a77d5218ba1e176658215f20e0ded0e4f3b722ef Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 23:19:13 -0600 Subject: [PATCH 39/90] ammended servicefile --- varken.service | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/varken.service b/varken.service index d1db1520..ba56950e 100644 --- a/varken.service +++ b/varken.service @@ -4,8 +4,8 @@ After=network-online.target [Service] Type=simple -WorkingDirectory=/opt/Varken/Varken -ExecStart=/usr/bin/python3 /opt/Varken/Varken/varken.py +WorkingDirectory=/opt/Varken +ExecStart=/usr/bin/python3 /opt/Varken/varken.py Restart=always [Install] From 651bb266b9648ea70cc35a746f292fe327270cee Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 23:38:02 -0600 Subject: [PATCH 40/90] accept dbchange and add to readme --- README.md | 40 ++++++++++++---------------------------- Varken/dbmanager.py | 2 +- 2 files changed, 13 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index c6e7198b..cb53fa26 100644 --- a/README.md +++ b/README.md @@ -8,19 +8,18 @@ Requirements /w install links: [Grafana](http://docs.grafana.org/installation/),
-## Quick Setup -1. Install requirements `pip3 install -r requirements.txt` -1. Make a copy of `varken.example.ini` to `varken.ini` in the `data` folder -2. Make the appropriate changes to `varken.ini` -1. Create your plex database in influx - ```sh - user@server: ~$ influx - > CREATE DATABASE plex - > quit - ``` -1. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb. At a minimum, you will need the plex database. -1. Install `grafana-cli plugins install grafana-worldmap-panel` -1. Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like. +## Quick Setup (Varken Alpha) +1. Clone the repository `git clone https://github.com/DirtyCajunRice/grafana-scripts.git /opt/Varken` +1. Switch to the testing branch `cd /opt/Varken && git checkout refactor-project` +1. Install requirements `/usr/bin/python -m pip install -r requirements.txt` +2. Make a copy of `varken.example.ini` to `varken.ini` in the `data` folder + `cp data/varken.example.ini data/varken.ini` +3. Make the appropriate changes to `varken.ini` + `nano data/varken.ini` +4. Copy the systemd file `cp varken.service /etc/systemd/system/` +5. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb. At a minimum, you will need the plex database. +6. Install `grafana-cli plugins install grafana-worldmap-panel` +7. Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like. @@ -101,18 +100,3 @@ optional arguments: ### `tautulli.py` Gathers data from Tautulli and pushes it to influxdb. On initial run it will download the geoip2 DB and use it for locations. -## Notes -To run the python scripts crontab is currently leveraged. Examples: -```sh -### Modify paths as appropriate. python3 is located in different places for different users. (`which python3` will give you the path) -### to edit your crontab entry, do not modify /var/spool/cron/crontabs/ directly, use `crontab -e` -### Crontabs require an empty line at the end or they WILL not run. Make sure to have 2 lines to be safe -### It is bad practice to run any cronjob more than once a minute. For timing help: https://crontab.guru/ -* * * * * /usr/bin/python3 /path-to-grafana-scripts/ombi.py --total -* * * * * /usr/bin/python3 /path-to-grafana-scripts/tautulli.py -* * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py --queue -* * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py --queue -*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py --missing -*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py --missing -*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sickrage.py -``` diff --git a/Varken/dbmanager.py b/Varken/dbmanager.py index a7c71099..de5dc1d5 100644 --- a/Varken/dbmanager.py +++ b/Varken/dbmanager.py @@ -4,7 +4,7 @@ class DBManager(object): def __init__(self, server): self.server = server self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password, - 'plex2') + 'varken') databases = [db['name'] for db in self.influx.get_list_database()] if 'varken' not in databases: From 7fb3907ac5ef16f76531553259b211f98890ff3c Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 23:47:59 -0600 Subject: [PATCH 41/90] readme update --- README.md | 86 +++---------------------------------------------------- 1 file changed, 4 insertions(+), 82 deletions(-) diff --git a/README.md b/README.md index cb53fa26..359e01b6 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # Varken -Dutch for PIG. PIG is an Acronym for PlexDB/Influx/Grafana +Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana -Varken is a standalone commmandline utility that will aggregate date +Varken is a standalone commmand-line utility that will aggregate date from the plex ecosystem into influxdb to be displayed in grafana Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), [Python3](https://www.python.org/downloads/), [InfluxDB](https://docs.influxdata.com/influxdb/v1.5/introduction/installation/) @@ -17,86 +17,8 @@ Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), 3. Make the appropriate changes to `varken.ini` `nano data/varken.ini` 4. Copy the systemd file `cp varken.service /etc/systemd/system/` +5. start the service and enable it `systemctl start varken && systemctl enable varken` 5. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb. At a minimum, you will need the plex database. 6. Install `grafana-cli plugins install grafana-worldmap-panel` -7. Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like. - - - -### Docker - -Repo is included in [si0972/grafana-scripts](https://github.com/si0972/grafana-scripts-docker) - -
Example -

- -``` -docker create \ - --name=grafana-scripts \ - -v :/Scripts \ - -e plex=true \ - -e PGID= -e PUID= \ - si0972/grafana-scripts:latest -``` -

-
- - - - -## Scripts -### `sonarr.py` -Gathers data from Sonarr and pushes it to influxdb. - -``` -Script to aid in data gathering from Sonarr - -optional arguments: - -h, --help show this help message and exit - --missing Get all missing TV shows - --missing_days MISSING_DAYS - Get missing TV shows in past X days - --upcoming Get upcoming TV shows - --future FUTURE Get TV shows on X days into the future. Includes today. - i.e. --future 2 is Today and Tomorrow - --queue Get TV shows in queue -``` -- Notes: - - You cannot stack the arguments. ie. `sonarr.py --missing --queue` - - One argument must be supplied - -### `radarr.py` -Gathers data from Radarr and pushes it to influxdb - -``` -Script to aid in data gathering from Radarr - -optional arguments: - -h, --help show this help message and exit - --missing Get missing movies - --missing_avl Get missing available movies - --queue Get movies in queue -``` -- Notes: - - You cannot stack the arguments. ie. `radarr.py --missing --queue` - - One argument must be supplied - - `--missing_avl` Refers to how Radarr has determined if the movie should be available to download. The easy way to determine if the movie will appear on this list is if the movie has a RED "Missing" tag associated with that movie. BLUE "Missing" tag refers to a movie that is missing but is not available for download yet. These tags are determined by your "Minimum Availability" settings for that movie. - -### `ombi.py` -Gathers data from Ombi and pushes it to influxdb - -``` -Script to aid in data gathering from Ombi - -optional arguments: - -h, --help show this help message and exit - --total Get the total count of all requests - --counts Get the count of pending, approved, and available requests -``` -- Notes: - - You cannot stack the arguments. ie. `ombi.py --total --counts` - - One argument must be supplied - -### `tautulli.py` -Gathers data from Tautulli and pushes it to influxdb. On initial run it will download the geoip2 DB and use it for locations. +7. TODO:: Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like. From e5e2d5b1a1aea677e753fad56ef2ba6ad4d347f7 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sat, 1 Dec 2018 23:49:17 -0600 Subject: [PATCH 42/90] temporary root in systemd file --- varken.service | 1 + 1 file changed, 1 insertion(+) diff --git a/varken.service b/varken.service index ba56950e..0b181a25 100644 --- a/varken.service +++ b/varken.service @@ -4,6 +4,7 @@ After=network-online.target [Service] Type=simple +User=root WorkingDirectory=/opt/Varken ExecStart=/usr/bin/python3 /opt/Varken/varken.py Restart=always From e677d2463bf5af0956856eed0b22b0f208cb531c Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sun, 2 Dec 2018 01:33:06 -0600 Subject: [PATCH 43/90] added min requirements and split friendly name with username --- Varken/tautulli.py | 3 ++- requirements.txt | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Varken/tautulli.py b/Varken/tautulli.py index 24ceeaf9..62f42a13 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -108,7 +108,8 @@ def get_sessions(self): "tags": { "type": "Session", "session_id": session.session_id, - "name": session.friendly_name, + "friendly_name": session.friendly_name, + "username": session.username, "title": session.full_title, "platform": session.platform, "product_version": product_version, diff --git a/requirements.txt b/requirements.txt index 4a246671..760382ed 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # Potential requirements. # pip3 install -r requirements.txt #--------------------------------------------------------- -requests -geoip2 -influxdb -schedule \ No newline at end of file +requests>=2.20.1 +geoip2>=2.9.0 +influxdb>=5.2.0 +schedule>=0.5.0 \ No newline at end of file From fa15eca8cd4d71c19ad1beac6233b687c0b5f1b7 Mon Sep 17 00:00:00 2001 From: dirtycajunrice Date: Sun, 2 Dec 2018 13:38:37 -0600 Subject: [PATCH 44/90] added forced package imports --- Varken/dbmanager.py | 5 + Varken/helpers.py | 16 +- Varken/iniparser.py | 18 +- Varken/ombi.py | 4 + Varken/radarr.py | 4 + Varken/sonarr.py | 4 + Varken/tautulli.py | 7 +- lib/DateTime/DateTime.py | 1940 ++++ lib/DateTime/DateTime.txt | 785 ++ lib/DateTime/__init__.py | 17 + lib/DateTime/interfaces.py | 375 + lib/DateTime/pytz.txt | 192 + lib/DateTime/pytz_support.py | 259 + lib/DateTime/tests/__init__.py | 15 + lib/DateTime/tests/julian_testdata.txt | 57 + lib/DateTime/tests/test_datetime.py | 686 ++ lib/backports/configparser/__init__.py | 1390 +++ lib/backports/configparser/helpers.py | 171 + lib/bin/chardetect.exe | Bin 0 -> 93027 bytes lib/bin/easy_install-3.7.exe | Bin 0 -> 93036 bytes lib/bin/easy_install.exe | Bin 0 -> 93036 bytes lib/certifi/__init__.py | 3 + lib/certifi/__main__.py | 2 + lib/certifi/cacert.pem | 4512 +++++++++ lib/certifi/core.py | 20 + lib/chardet/__init__.py | 39 + lib/chardet/big5freq.py | 386 + lib/chardet/big5prober.py | 47 + lib/chardet/chardistribution.py | 233 + lib/chardet/charsetgroupprober.py | 106 + lib/chardet/charsetprober.py | 145 + lib/chardet/cli/__init__.py | 1 + lib/chardet/cli/chardetect.py | 85 + lib/chardet/codingstatemachine.py | 88 + lib/chardet/compat.py | 34 + lib/chardet/cp949prober.py | 49 + lib/chardet/enums.py | 76 + lib/chardet/escprober.py | 101 + lib/chardet/escsm.py | 246 + lib/chardet/eucjpprober.py | 92 + lib/chardet/euckrfreq.py | 195 + lib/chardet/euckrprober.py | 47 + lib/chardet/euctwfreq.py | 387 + lib/chardet/euctwprober.py | 46 + lib/chardet/gb2312freq.py | 283 + lib/chardet/gb2312prober.py | 46 + lib/chardet/hebrewprober.py | 292 + lib/chardet/jisfreq.py | 325 + lib/chardet/jpcntx.py | 233 + lib/chardet/langbulgarianmodel.py | 228 + lib/chardet/langcyrillicmodel.py | 333 + lib/chardet/langgreekmodel.py | 225 + lib/chardet/langhebrewmodel.py | 200 + lib/chardet/langhungarianmodel.py | 225 + lib/chardet/langthaimodel.py | 199 + lib/chardet/langturkishmodel.py | 193 + lib/chardet/latin1prober.py | 145 + lib/chardet/mbcharsetprober.py | 91 + lib/chardet/mbcsgroupprober.py | 54 + lib/chardet/mbcssm.py | 572 ++ lib/chardet/sbcharsetprober.py | 132 + lib/chardet/sbcsgroupprober.py | 73 + lib/chardet/sjisprober.py | 92 + lib/chardet/universaldetector.py | 286 + lib/chardet/utf8prober.py | 82 + lib/chardet/version.py | 9 + lib/configparser-3.5.0-py3.7-nspkg.pth | 1 + lib/dateutil/__init__.py | 8 + lib/dateutil/_common.py | 43 + lib/dateutil/_version.py | 4 + lib/dateutil/easter.py | 89 + lib/dateutil/parser/__init__.py | 60 + lib/dateutil/parser/_parser.py | 1578 ++++ lib/dateutil/parser/isoparser.py | 406 + lib/dateutil/relativedelta.py | 590 ++ lib/dateutil/rrule.py | 1672 ++++ lib/dateutil/tz/__init__.py | 17 + lib/dateutil/tz/_common.py | 415 + lib/dateutil/tz/_factories.py | 49 + lib/dateutil/tz/tz.py | 1785 ++++ lib/dateutil/tz/win.py | 331 + lib/dateutil/tzwin.py | 2 + lib/dateutil/utils.py | 71 + lib/dateutil/zoneinfo/__init__.py | 167 + .../zoneinfo/dateutil-zoneinfo.tar.gz | Bin 0 -> 154226 bytes lib/dateutil/zoneinfo/rebuild.py | 53 + lib/easy_install.py | 5 + lib/geoip2/__init__.py | 7 + lib/geoip2/compat.py | 19 + lib/geoip2/database.py | 214 + lib/geoip2/errors.py | 51 + lib/geoip2/mixins.py | 16 + lib/geoip2/models.py | 502 + lib/geoip2/records.py | 675 ++ lib/geoip2/webservice.py | 235 + lib/idna/__init__.py | 2 + lib/idna/codec.py | 118 + lib/idna/compat.py | 12 + lib/idna/core.py | 399 + lib/idna/idnadata.py | 1893 ++++ lib/idna/intranges.py | 53 + lib/idna/package_data.py | 2 + lib/idna/uts46data.py | 8179 +++++++++++++++++ lib/influxdb/__init__.py | 21 + lib/influxdb/_dataframe_client.py | 452 + lib/influxdb/chunked_json.py | 27 + lib/influxdb/client.py | 980 ++ lib/influxdb/dataframe_client.py | 28 + lib/influxdb/exceptions.py | 35 + lib/influxdb/helper.py | 184 + lib/influxdb/influxdb08/__init__.py | 18 + lib/influxdb/influxdb08/chunked_json.py | 27 + lib/influxdb/influxdb08/client.py | 843 ++ lib/influxdb/influxdb08/dataframe_client.py | 177 + lib/influxdb/influxdb08/helper.py | 153 + lib/influxdb/line_protocol.py | 172 + lib/influxdb/resultset.py | 206 + lib/influxdb/tests/__init__.py | 21 + lib/influxdb/tests/chunked_json_test.py | 51 + lib/influxdb/tests/client_test.py | 1094 +++ lib/influxdb/tests/dataframe_client_test.py | 711 ++ lib/influxdb/tests/helper_test.py | 367 + lib/influxdb/tests/influxdb08/__init__.py | 2 + lib/influxdb/tests/influxdb08/client_test.py | 904 ++ .../tests/influxdb08/dataframe_client_test.py | 331 + lib/influxdb/tests/influxdb08/helper_test.py | 228 + lib/influxdb/tests/misc.py | 50 + lib/influxdb/tests/resultset_test.py | 202 + lib/influxdb/tests/server_tests/__init__.py | 1 + lib/influxdb/tests/server_tests/base.py | 84 + .../server_tests/client_test_with_server.py | 825 ++ .../tests/server_tests/influxdb_instance.py | 198 + lib/influxdb/tests/test_line_protocol.py | 147 + lib/maxminddb/__init__.py | 54 + lib/maxminddb/compat.py | 43 + lib/maxminddb/const.py | 8 + lib/maxminddb/decoder.py | 172 + lib/maxminddb/errors.py | 10 + lib/maxminddb/extension/maxminddb.c | 602 ++ lib/maxminddb/file.py | 65 + lib/maxminddb/reader.py | 309 + lib/pkg_resources/__init__.py | 3171 +++++++ lib/pkg_resources/_vendor/__init__.py | 0 lib/pkg_resources/_vendor/appdirs.py | 608 ++ .../_vendor/packaging/__about__.py | 21 + .../_vendor/packaging/__init__.py | 14 + .../_vendor/packaging/_compat.py | 30 + .../_vendor/packaging/_structures.py | 68 + .../_vendor/packaging/markers.py | 301 + .../_vendor/packaging/requirements.py | 127 + .../_vendor/packaging/specifiers.py | 774 ++ lib/pkg_resources/_vendor/packaging/utils.py | 14 + .../_vendor/packaging/version.py | 393 + lib/pkg_resources/_vendor/pyparsing.py | 5742 ++++++++++++ lib/pkg_resources/_vendor/six.py | 868 ++ lib/pkg_resources/extern/__init__.py | 73 + lib/pkg_resources/py31compat.py | 23 + lib/pytz/__init__.py | 1527 +++ lib/pytz/exceptions.py | 48 + lib/pytz/lazy.py | 172 + lib/pytz/reference.py | 140 + lib/pytz/tzfile.py | 134 + lib/pytz/tzinfo.py | 577 ++ lib/pytz/zoneinfo/Africa/Abidjan | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Accra | Bin 0 -> 828 bytes lib/pytz/zoneinfo/Africa/Addis_Ababa | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Algiers | Bin 0 -> 751 bytes lib/pytz/zoneinfo/Africa/Asmara | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Asmera | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Bamako | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Bangui | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Banjul | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Bissau | Bin 0 -> 194 bytes lib/pytz/zoneinfo/Africa/Blantyre | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Brazzaville | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Bujumbura | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Cairo | Bin 0 -> 1963 bytes lib/pytz/zoneinfo/Africa/Casablanca | Bin 0 -> 969 bytes lib/pytz/zoneinfo/Africa/Ceuta | Bin 0 -> 2050 bytes lib/pytz/zoneinfo/Africa/Conakry | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Dakar | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Dar_es_Salaam | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Djibouti | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Douala | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/El_Aaiun | Bin 0 -> 839 bytes lib/pytz/zoneinfo/Africa/Freetown | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Gaborone | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Harare | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Johannesburg | Bin 0 -> 262 bytes lib/pytz/zoneinfo/Africa/Juba | Bin 0 -> 669 bytes lib/pytz/zoneinfo/Africa/Kampala | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Khartoum | Bin 0 -> 699 bytes lib/pytz/zoneinfo/Africa/Kigali | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Kinshasa | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Lagos | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Libreville | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Lome | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Luanda | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Lubumbashi | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Lusaka | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Malabo | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Maputo | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Maseru | Bin 0 -> 262 bytes lib/pytz/zoneinfo/Africa/Mbabane | Bin 0 -> 262 bytes lib/pytz/zoneinfo/Africa/Mogadishu | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Monrovia | Bin 0 -> 224 bytes lib/pytz/zoneinfo/Africa/Nairobi | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Africa/Ndjamena | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Africa/Niamey | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Nouakchott | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Ouagadougou | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Porto-Novo | Bin 0 -> 157 bytes lib/pytz/zoneinfo/Africa/Sao_Tome | Bin 0 -> 225 bytes lib/pytz/zoneinfo/Africa/Timbuktu | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Africa/Tripoli | Bin 0 -> 641 bytes lib/pytz/zoneinfo/Africa/Tunis | Bin 0 -> 701 bytes lib/pytz/zoneinfo/Africa/Windhoek | Bin 0 -> 979 bytes lib/pytz/zoneinfo/America/Adak | Bin 0 -> 2356 bytes lib/pytz/zoneinfo/America/Anchorage | Bin 0 -> 2371 bytes lib/pytz/zoneinfo/America/Anguilla | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Antigua | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Araguaina | Bin 0 -> 896 bytes .../zoneinfo/America/Argentina/Buenos_Aires | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Argentina/Catamarca | Bin 0 -> 1100 bytes .../zoneinfo/America/Argentina/ComodRivadavia | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Argentina/Cordoba | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Argentina/Jujuy | Bin 0 -> 1072 bytes lib/pytz/zoneinfo/America/Argentina/La_Rioja | Bin 0 -> 1114 bytes lib/pytz/zoneinfo/America/Argentina/Mendoza | Bin 0 -> 1100 bytes .../zoneinfo/America/Argentina/Rio_Gallegos | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Argentina/Salta | Bin 0 -> 1072 bytes lib/pytz/zoneinfo/America/Argentina/San_Juan | Bin 0 -> 1114 bytes lib/pytz/zoneinfo/America/Argentina/San_Luis | Bin 0 -> 1130 bytes lib/pytz/zoneinfo/America/Argentina/Tucuman | Bin 0 -> 1128 bytes lib/pytz/zoneinfo/America/Argentina/Ushuaia | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Aruba | Bin 0 -> 198 bytes lib/pytz/zoneinfo/America/Asuncion | Bin 0 -> 2068 bytes lib/pytz/zoneinfo/America/Atikokan | Bin 0 -> 336 bytes lib/pytz/zoneinfo/America/Atka | Bin 0 -> 2356 bytes lib/pytz/zoneinfo/America/Bahia | Bin 0 -> 1036 bytes lib/pytz/zoneinfo/America/Bahia_Banderas | Bin 0 -> 1574 bytes lib/pytz/zoneinfo/America/Barbados | Bin 0 -> 330 bytes lib/pytz/zoneinfo/America/Belem | Bin 0 -> 588 bytes lib/pytz/zoneinfo/America/Belize | Bin 0 -> 964 bytes lib/pytz/zoneinfo/America/Blanc-Sablon | Bin 0 -> 298 bytes lib/pytz/zoneinfo/America/Boa_Vista | Bin 0 -> 644 bytes lib/pytz/zoneinfo/America/Bogota | Bin 0 -> 262 bytes lib/pytz/zoneinfo/America/Boise | Bin 0 -> 2394 bytes lib/pytz/zoneinfo/America/Buenos_Aires | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Cambridge_Bay | Bin 0 -> 2084 bytes lib/pytz/zoneinfo/America/Campo_Grande | Bin 0 -> 2002 bytes lib/pytz/zoneinfo/America/Cancun | Bin 0 -> 802 bytes lib/pytz/zoneinfo/America/Caracas | Bin 0 -> 280 bytes lib/pytz/zoneinfo/America/Catamarca | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Cayenne | Bin 0 -> 210 bytes lib/pytz/zoneinfo/America/Cayman | Bin 0 -> 194 bytes lib/pytz/zoneinfo/America/Chicago | Bin 0 -> 3576 bytes lib/pytz/zoneinfo/America/Chihuahua | Bin 0 -> 1508 bytes lib/pytz/zoneinfo/America/Coral_Harbour | Bin 0 -> 336 bytes lib/pytz/zoneinfo/America/Cordoba | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Costa_Rica | Bin 0 -> 332 bytes lib/pytz/zoneinfo/America/Creston | Bin 0 -> 224 bytes lib/pytz/zoneinfo/America/Cuiaba | Bin 0 -> 1974 bytes lib/pytz/zoneinfo/America/Curacao | Bin 0 -> 198 bytes lib/pytz/zoneinfo/America/Danmarkshavn | Bin 0 -> 698 bytes lib/pytz/zoneinfo/America/Dawson | Bin 0 -> 2084 bytes lib/pytz/zoneinfo/America/Dawson_Creek | Bin 0 -> 1050 bytes lib/pytz/zoneinfo/America/Denver | Bin 0 -> 2444 bytes lib/pytz/zoneinfo/America/Detroit | Bin 0 -> 2174 bytes lib/pytz/zoneinfo/America/Dominica | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Edmonton | Bin 0 -> 2388 bytes lib/pytz/zoneinfo/America/Eirunepe | Bin 0 -> 676 bytes lib/pytz/zoneinfo/America/El_Salvador | Bin 0 -> 236 bytes lib/pytz/zoneinfo/America/Ensenada | Bin 0 -> 2342 bytes lib/pytz/zoneinfo/America/Fort_Nelson | Bin 0 -> 2240 bytes lib/pytz/zoneinfo/America/Fort_Wayne | Bin 0 -> 1666 bytes lib/pytz/zoneinfo/America/Fortaleza | Bin 0 -> 728 bytes lib/pytz/zoneinfo/America/Glace_Bay | Bin 0 -> 2192 bytes lib/pytz/zoneinfo/America/Godthab | Bin 0 -> 1878 bytes lib/pytz/zoneinfo/America/Goose_Bay | Bin 0 -> 3210 bytes lib/pytz/zoneinfo/America/Grand_Turk | Bin 0 -> 1872 bytes lib/pytz/zoneinfo/America/Grenada | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Guadeloupe | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Guatemala | Bin 0 -> 292 bytes lib/pytz/zoneinfo/America/Guayaquil | Bin 0 -> 262 bytes lib/pytz/zoneinfo/America/Guyana | Bin 0 -> 252 bytes lib/pytz/zoneinfo/America/Halifax | Bin 0 -> 3424 bytes lib/pytz/zoneinfo/America/Havana | Bin 0 -> 2428 bytes lib/pytz/zoneinfo/America/Hermosillo | Bin 0 -> 440 bytes .../zoneinfo/America/Indiana/Indianapolis | Bin 0 -> 1666 bytes lib/pytz/zoneinfo/America/Indiana/Knox | Bin 0 -> 2428 bytes lib/pytz/zoneinfo/America/Indiana/Marengo | Bin 0 -> 1722 bytes lib/pytz/zoneinfo/America/Indiana/Petersburg | Bin 0 -> 1904 bytes lib/pytz/zoneinfo/America/Indiana/Tell_City | Bin 0 -> 1726 bytes lib/pytz/zoneinfo/America/Indiana/Vevay | Bin 0 -> 1414 bytes lib/pytz/zoneinfo/America/Indiana/Vincennes | Bin 0 -> 1694 bytes lib/pytz/zoneinfo/America/Indiana/Winamac | Bin 0 -> 1778 bytes lib/pytz/zoneinfo/America/Indianapolis | Bin 0 -> 1666 bytes lib/pytz/zoneinfo/America/Inuvik | Bin 0 -> 1914 bytes lib/pytz/zoneinfo/America/Iqaluit | Bin 0 -> 2032 bytes lib/pytz/zoneinfo/America/Jamaica | Bin 0 -> 498 bytes lib/pytz/zoneinfo/America/Jujuy | Bin 0 -> 1072 bytes lib/pytz/zoneinfo/America/Juneau | Bin 0 -> 2353 bytes lib/pytz/zoneinfo/America/Kentucky/Louisville | Bin 0 -> 2772 bytes lib/pytz/zoneinfo/America/Kentucky/Monticello | Bin 0 -> 2352 bytes lib/pytz/zoneinfo/America/Knox_IN | Bin 0 -> 2428 bytes lib/pytz/zoneinfo/America/Kralendijk | Bin 0 -> 198 bytes lib/pytz/zoneinfo/America/La_Paz | Bin 0 -> 248 bytes lib/pytz/zoneinfo/America/Lima | Bin 0 -> 422 bytes lib/pytz/zoneinfo/America/Los_Angeles | Bin 0 -> 2836 bytes lib/pytz/zoneinfo/America/Louisville | Bin 0 -> 2772 bytes lib/pytz/zoneinfo/America/Lower_Princes | Bin 0 -> 198 bytes lib/pytz/zoneinfo/America/Maceio | Bin 0 -> 756 bytes lib/pytz/zoneinfo/America/Managua | Bin 0 -> 454 bytes lib/pytz/zoneinfo/America/Manaus | Bin 0 -> 616 bytes lib/pytz/zoneinfo/America/Marigot | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Martinique | Bin 0 -> 248 bytes lib/pytz/zoneinfo/America/Matamoros | Bin 0 -> 1402 bytes lib/pytz/zoneinfo/America/Mazatlan | Bin 0 -> 1550 bytes lib/pytz/zoneinfo/America/Mendoza | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Menominee | Bin 0 -> 2274 bytes lib/pytz/zoneinfo/America/Merida | Bin 0 -> 1442 bytes lib/pytz/zoneinfo/America/Metlakatla | Bin 0 -> 1409 bytes lib/pytz/zoneinfo/America/Mexico_City | Bin 0 -> 1604 bytes lib/pytz/zoneinfo/America/Miquelon | Bin 0 -> 1682 bytes lib/pytz/zoneinfo/America/Moncton | Bin 0 -> 3154 bytes lib/pytz/zoneinfo/America/Monterrey | Bin 0 -> 1402 bytes lib/pytz/zoneinfo/America/Montevideo | Bin 0 -> 1550 bytes lib/pytz/zoneinfo/America/Montreal | Bin 0 -> 3494 bytes lib/pytz/zoneinfo/America/Montserrat | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Nassau | Bin 0 -> 2270 bytes lib/pytz/zoneinfo/America/New_York | Bin 0 -> 3536 bytes lib/pytz/zoneinfo/America/Nipigon | Bin 0 -> 2122 bytes lib/pytz/zoneinfo/America/Nome | Bin 0 -> 2367 bytes lib/pytz/zoneinfo/America/Noronha | Bin 0 -> 728 bytes lib/pytz/zoneinfo/America/North_Dakota/Beulah | Bin 0 -> 2380 bytes lib/pytz/zoneinfo/America/North_Dakota/Center | Bin 0 -> 2380 bytes .../zoneinfo/America/North_Dakota/New_Salem | Bin 0 -> 2380 bytes lib/pytz/zoneinfo/America/Ojinaga | Bin 0 -> 1508 bytes lib/pytz/zoneinfo/America/Panama | Bin 0 -> 194 bytes lib/pytz/zoneinfo/America/Pangnirtung | Bin 0 -> 2094 bytes lib/pytz/zoneinfo/America/Paramaribo | Bin 0 -> 282 bytes lib/pytz/zoneinfo/America/Phoenix | Bin 0 -> 344 bytes lib/pytz/zoneinfo/America/Port-au-Prince | Bin 0 -> 1446 bytes lib/pytz/zoneinfo/America/Port_of_Spain | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Porto_Acre | Bin 0 -> 648 bytes lib/pytz/zoneinfo/America/Porto_Velho | Bin 0 -> 588 bytes lib/pytz/zoneinfo/America/Puerto_Rico | Bin 0 -> 246 bytes lib/pytz/zoneinfo/America/Punta_Arenas | Bin 0 -> 1902 bytes lib/pytz/zoneinfo/America/Rainy_River | Bin 0 -> 2122 bytes lib/pytz/zoneinfo/America/Rankin_Inlet | Bin 0 -> 1916 bytes lib/pytz/zoneinfo/America/Recife | Bin 0 -> 728 bytes lib/pytz/zoneinfo/America/Regina | Bin 0 -> 980 bytes lib/pytz/zoneinfo/America/Resolute | Bin 0 -> 1916 bytes lib/pytz/zoneinfo/America/Rio_Branco | Bin 0 -> 648 bytes lib/pytz/zoneinfo/America/Rosario | Bin 0 -> 1100 bytes lib/pytz/zoneinfo/America/Santa_Isabel | Bin 0 -> 2342 bytes lib/pytz/zoneinfo/America/Santarem | Bin 0 -> 618 bytes lib/pytz/zoneinfo/America/Santiago | Bin 0 -> 2529 bytes lib/pytz/zoneinfo/America/Santo_Domingo | Bin 0 -> 482 bytes lib/pytz/zoneinfo/America/Sao_Paulo | Bin 0 -> 2002 bytes lib/pytz/zoneinfo/America/Scoresbysund | Bin 0 -> 1916 bytes lib/pytz/zoneinfo/America/Shiprock | Bin 0 -> 2444 bytes lib/pytz/zoneinfo/America/Sitka | Bin 0 -> 2329 bytes lib/pytz/zoneinfo/America/St_Barthelemy | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/St_Johns | Bin 0 -> 3655 bytes lib/pytz/zoneinfo/America/St_Kitts | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/St_Lucia | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/St_Thomas | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/St_Vincent | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Swift_Current | Bin 0 -> 560 bytes lib/pytz/zoneinfo/America/Tegucigalpa | Bin 0 -> 264 bytes lib/pytz/zoneinfo/America/Thule | Bin 0 -> 1514 bytes lib/pytz/zoneinfo/America/Thunder_Bay | Bin 0 -> 2202 bytes lib/pytz/zoneinfo/America/Tijuana | Bin 0 -> 2342 bytes lib/pytz/zoneinfo/America/Toronto | Bin 0 -> 3494 bytes lib/pytz/zoneinfo/America/Tortola | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Vancouver | Bin 0 -> 2892 bytes lib/pytz/zoneinfo/America/Virgin | Bin 0 -> 156 bytes lib/pytz/zoneinfo/America/Whitehorse | Bin 0 -> 2084 bytes lib/pytz/zoneinfo/America/Winnipeg | Bin 0 -> 2882 bytes lib/pytz/zoneinfo/America/Yakutat | Bin 0 -> 2305 bytes lib/pytz/zoneinfo/America/Yellowknife | Bin 0 -> 1966 bytes lib/pytz/zoneinfo/Antarctica/Casey | Bin 0 -> 297 bytes lib/pytz/zoneinfo/Antarctica/Davis | Bin 0 -> 297 bytes lib/pytz/zoneinfo/Antarctica/DumontDUrville | Bin 0 -> 202 bytes lib/pytz/zoneinfo/Antarctica/Macquarie | Bin 0 -> 1534 bytes lib/pytz/zoneinfo/Antarctica/Mawson | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Antarctica/McMurdo | Bin 0 -> 2451 bytes lib/pytz/zoneinfo/Antarctica/Palmer | Bin 0 -> 1418 bytes lib/pytz/zoneinfo/Antarctica/Rothera | Bin 0 -> 172 bytes lib/pytz/zoneinfo/Antarctica/South_Pole | Bin 0 -> 2451 bytes lib/pytz/zoneinfo/Antarctica/Syowa | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Antarctica/Troll | Bin 0 -> 1162 bytes lib/pytz/zoneinfo/Antarctica/Vostok | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Arctic/Longyearbyen | Bin 0 -> 2242 bytes lib/pytz/zoneinfo/Asia/Aden | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Asia/Almaty | Bin 0 -> 1017 bytes lib/pytz/zoneinfo/Asia/Amman | Bin 0 -> 1863 bytes lib/pytz/zoneinfo/Asia/Anadyr | Bin 0 -> 1208 bytes lib/pytz/zoneinfo/Asia/Aqtau | Bin 0 -> 1003 bytes lib/pytz/zoneinfo/Asia/Aqtobe | Bin 0 -> 1033 bytes lib/pytz/zoneinfo/Asia/Ashgabat | Bin 0 -> 637 bytes lib/pytz/zoneinfo/Asia/Ashkhabad | Bin 0 -> 637 bytes lib/pytz/zoneinfo/Asia/Atyrau | Bin 0 -> 1011 bytes lib/pytz/zoneinfo/Asia/Baghdad | Bin 0 -> 995 bytes lib/pytz/zoneinfo/Asia/Bahrain | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Asia/Baku | Bin 0 -> 1255 bytes lib/pytz/zoneinfo/Asia/Bangkok | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Asia/Barnaul | Bin 0 -> 1241 bytes lib/pytz/zoneinfo/Asia/Beirut | Bin 0 -> 2166 bytes lib/pytz/zoneinfo/Asia/Bishkek | Bin 0 -> 999 bytes lib/pytz/zoneinfo/Asia/Brunei | Bin 0 -> 215 bytes lib/pytz/zoneinfo/Asia/Calcutta | Bin 0 -> 303 bytes lib/pytz/zoneinfo/Asia/Chita | Bin 0 -> 1243 bytes lib/pytz/zoneinfo/Asia/Choibalsan | Bin 0 -> 977 bytes lib/pytz/zoneinfo/Asia/Chongqing | Bin 0 -> 545 bytes lib/pytz/zoneinfo/Asia/Chungking | Bin 0 -> 545 bytes lib/pytz/zoneinfo/Asia/Colombo | Bin 0 -> 404 bytes lib/pytz/zoneinfo/Asia/Dacca | Bin 0 -> 361 bytes lib/pytz/zoneinfo/Asia/Damascus | Bin 0 -> 2306 bytes lib/pytz/zoneinfo/Asia/Dhaka | Bin 0 -> 361 bytes lib/pytz/zoneinfo/Asia/Dili | Bin 0 -> 239 bytes lib/pytz/zoneinfo/Asia/Dubai | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Asia/Dushanbe | Bin 0 -> 607 bytes lib/pytz/zoneinfo/Asia/Famagusta | Bin 0 -> 2028 bytes lib/pytz/zoneinfo/Asia/Gaza | Bin 0 -> 2286 bytes lib/pytz/zoneinfo/Asia/Harbin | Bin 0 -> 545 bytes lib/pytz/zoneinfo/Asia/Hebron | Bin 0 -> 2314 bytes lib/pytz/zoneinfo/Asia/Ho_Chi_Minh | Bin 0 -> 375 bytes lib/pytz/zoneinfo/Asia/Hong_Kong | Bin 0 -> 1175 bytes lib/pytz/zoneinfo/Asia/Hovd | Bin 0 -> 907 bytes lib/pytz/zoneinfo/Asia/Irkutsk | Bin 0 -> 1267 bytes lib/pytz/zoneinfo/Asia/Istanbul | Bin 0 -> 2157 bytes lib/pytz/zoneinfo/Asia/Jakarta | Bin 0 -> 383 bytes lib/pytz/zoneinfo/Asia/Jayapura | Bin 0 -> 237 bytes lib/pytz/zoneinfo/Asia/Jerusalem | Bin 0 -> 2256 bytes lib/pytz/zoneinfo/Asia/Kabul | Bin 0 -> 220 bytes lib/pytz/zoneinfo/Asia/Kamchatka | Bin 0 -> 1184 bytes lib/pytz/zoneinfo/Asia/Karachi | Bin 0 -> 403 bytes lib/pytz/zoneinfo/Asia/Kashgar | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Asia/Kathmandu | Bin 0 -> 224 bytes lib/pytz/zoneinfo/Asia/Katmandu | Bin 0 -> 224 bytes lib/pytz/zoneinfo/Asia/Khandyga | Bin 0 -> 1297 bytes lib/pytz/zoneinfo/Asia/Kolkata | Bin 0 -> 303 bytes lib/pytz/zoneinfo/Asia/Krasnoyarsk | Bin 0 -> 1229 bytes lib/pytz/zoneinfo/Asia/Kuala_Lumpur | Bin 0 -> 415 bytes lib/pytz/zoneinfo/Asia/Kuching | Bin 0 -> 507 bytes lib/pytz/zoneinfo/Asia/Kuwait | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Asia/Macao | Bin 0 -> 1241 bytes lib/pytz/zoneinfo/Asia/Macau | Bin 0 -> 1241 bytes lib/pytz/zoneinfo/Asia/Magadan | Bin 0 -> 1244 bytes lib/pytz/zoneinfo/Asia/Makassar | Bin 0 -> 274 bytes lib/pytz/zoneinfo/Asia/Manila | Bin 0 -> 350 bytes lib/pytz/zoneinfo/Asia/Muscat | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Asia/Nicosia | Bin 0 -> 2002 bytes lib/pytz/zoneinfo/Asia/Novokuznetsk | Bin 0 -> 1183 bytes lib/pytz/zoneinfo/Asia/Novosibirsk | Bin 0 -> 1241 bytes lib/pytz/zoneinfo/Asia/Omsk | Bin 0 -> 1229 bytes lib/pytz/zoneinfo/Asia/Oral | Bin 0 -> 1025 bytes lib/pytz/zoneinfo/Asia/Phnom_Penh | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Asia/Pontianak | Bin 0 -> 381 bytes lib/pytz/zoneinfo/Asia/Pyongyang | Bin 0 -> 253 bytes lib/pytz/zoneinfo/Asia/Qatar | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Asia/Qyzylorda | Bin 0 -> 1017 bytes lib/pytz/zoneinfo/Asia/Rangoon | Bin 0 -> 288 bytes lib/pytz/zoneinfo/Asia/Riyadh | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Asia/Saigon | Bin 0 -> 375 bytes lib/pytz/zoneinfo/Asia/Sakhalin | Bin 0 -> 1220 bytes lib/pytz/zoneinfo/Asia/Samarkand | Bin 0 -> 605 bytes lib/pytz/zoneinfo/Asia/Seoul | Bin 0 -> 517 bytes lib/pytz/zoneinfo/Asia/Shanghai | Bin 0 -> 545 bytes lib/pytz/zoneinfo/Asia/Singapore | Bin 0 -> 415 bytes lib/pytz/zoneinfo/Asia/Srednekolymsk | Bin 0 -> 1230 bytes lib/pytz/zoneinfo/Asia/Taipei | Bin 0 -> 781 bytes lib/pytz/zoneinfo/Asia/Tashkent | Bin 0 -> 621 bytes lib/pytz/zoneinfo/Asia/Tbilisi | Bin 0 -> 1071 bytes lib/pytz/zoneinfo/Asia/Tehran | Bin 0 -> 1704 bytes lib/pytz/zoneinfo/Asia/Tel_Aviv | Bin 0 -> 2256 bytes lib/pytz/zoneinfo/Asia/Thimbu | Bin 0 -> 215 bytes lib/pytz/zoneinfo/Asia/Thimphu | Bin 0 -> 215 bytes lib/pytz/zoneinfo/Asia/Tokyo | Bin 0 -> 309 bytes lib/pytz/zoneinfo/Asia/Tomsk | Bin 0 -> 1241 bytes lib/pytz/zoneinfo/Asia/Ujung_Pandang | Bin 0 -> 274 bytes lib/pytz/zoneinfo/Asia/Ulaanbaatar | Bin 0 -> 907 bytes lib/pytz/zoneinfo/Asia/Ulan_Bator | Bin 0 -> 907 bytes lib/pytz/zoneinfo/Asia/Urumqi | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Asia/Ust-Nera | Bin 0 -> 1276 bytes lib/pytz/zoneinfo/Asia/Vientiane | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Asia/Vladivostok | Bin 0 -> 1230 bytes lib/pytz/zoneinfo/Asia/Yakutsk | Bin 0 -> 1229 bytes lib/pytz/zoneinfo/Asia/Yangon | Bin 0 -> 288 bytes lib/pytz/zoneinfo/Asia/Yekaterinburg | Bin 0 -> 1267 bytes lib/pytz/zoneinfo/Asia/Yerevan | Bin 0 -> 1199 bytes lib/pytz/zoneinfo/Atlantic/Azores | Bin 0 -> 3484 bytes lib/pytz/zoneinfo/Atlantic/Bermuda | Bin 0 -> 1990 bytes lib/pytz/zoneinfo/Atlantic/Canary | Bin 0 -> 1897 bytes lib/pytz/zoneinfo/Atlantic/Cape_Verde | Bin 0 -> 270 bytes lib/pytz/zoneinfo/Atlantic/Faeroe | Bin 0 -> 1815 bytes lib/pytz/zoneinfo/Atlantic/Faroe | Bin 0 -> 1815 bytes lib/pytz/zoneinfo/Atlantic/Jan_Mayen | Bin 0 -> 2242 bytes lib/pytz/zoneinfo/Atlantic/Madeira | Bin 0 -> 3475 bytes lib/pytz/zoneinfo/Atlantic/Reykjavik | Bin 0 -> 1174 bytes lib/pytz/zoneinfo/Atlantic/South_Georgia | Bin 0 -> 172 bytes lib/pytz/zoneinfo/Atlantic/St_Helena | Bin 0 -> 156 bytes lib/pytz/zoneinfo/Atlantic/Stanley | Bin 0 -> 1242 bytes lib/pytz/zoneinfo/Australia/ACT | Bin 0 -> 2214 bytes lib/pytz/zoneinfo/Australia/Adelaide | Bin 0 -> 2233 bytes lib/pytz/zoneinfo/Australia/Brisbane | Bin 0 -> 443 bytes lib/pytz/zoneinfo/Australia/Broken_Hill | Bin 0 -> 2269 bytes lib/pytz/zoneinfo/Australia/Canberra | Bin 0 -> 2214 bytes lib/pytz/zoneinfo/Australia/Currie | Bin 0 -> 2214 bytes lib/pytz/zoneinfo/Australia/Darwin | Bin 0 -> 318 bytes lib/pytz/zoneinfo/Australia/Eucla | Bin 0 -> 494 bytes lib/pytz/zoneinfo/Australia/Hobart | Bin 0 -> 2326 bytes lib/pytz/zoneinfo/Australia/LHI | Bin 0 -> 1880 bytes lib/pytz/zoneinfo/Australia/Lindeman | Bin 0 -> 513 bytes lib/pytz/zoneinfo/Australia/Lord_Howe | Bin 0 -> 1880 bytes lib/pytz/zoneinfo/Australia/Melbourne | Bin 0 -> 2214 bytes lib/pytz/zoneinfo/Australia/NSW | Bin 0 -> 2214 bytes lib/pytz/zoneinfo/Australia/North | Bin 0 -> 318 bytes lib/pytz/zoneinfo/Australia/Perth | Bin 0 -> 470 bytes lib/pytz/zoneinfo/Australia/Queensland | Bin 0 -> 443 bytes lib/pytz/zoneinfo/Australia/South | Bin 0 -> 2233 bytes lib/pytz/zoneinfo/Australia/Sydney | Bin 0 -> 2214 bytes lib/pytz/zoneinfo/Australia/Tasmania | Bin 0 -> 2326 bytes lib/pytz/zoneinfo/Australia/Victoria | Bin 0 -> 2214 bytes lib/pytz/zoneinfo/Australia/West | Bin 0 -> 470 bytes lib/pytz/zoneinfo/Australia/Yancowinna | Bin 0 -> 2269 bytes lib/pytz/zoneinfo/Brazil/Acre | Bin 0 -> 648 bytes lib/pytz/zoneinfo/Brazil/DeNoronha | Bin 0 -> 728 bytes lib/pytz/zoneinfo/Brazil/East | Bin 0 -> 2002 bytes lib/pytz/zoneinfo/Brazil/West | Bin 0 -> 616 bytes lib/pytz/zoneinfo/CET | Bin 0 -> 2102 bytes lib/pytz/zoneinfo/CST6CDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Canada/Atlantic | Bin 0 -> 3424 bytes lib/pytz/zoneinfo/Canada/Central | Bin 0 -> 2882 bytes lib/pytz/zoneinfo/Canada/Eastern | Bin 0 -> 3494 bytes lib/pytz/zoneinfo/Canada/Mountain | Bin 0 -> 2388 bytes lib/pytz/zoneinfo/Canada/Newfoundland | Bin 0 -> 3655 bytes lib/pytz/zoneinfo/Canada/Pacific | Bin 0 -> 2892 bytes lib/pytz/zoneinfo/Canada/Saskatchewan | Bin 0 -> 980 bytes lib/pytz/zoneinfo/Canada/Yukon | Bin 0 -> 2084 bytes lib/pytz/zoneinfo/Chile/Continental | Bin 0 -> 2529 bytes lib/pytz/zoneinfo/Chile/EasterIsland | Bin 0 -> 2233 bytes lib/pytz/zoneinfo/Cuba | Bin 0 -> 2428 bytes lib/pytz/zoneinfo/EET | Bin 0 -> 1876 bytes lib/pytz/zoneinfo/EST | Bin 0 -> 118 bytes lib/pytz/zoneinfo/EST5EDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Egypt | Bin 0 -> 1963 bytes lib/pytz/zoneinfo/Eire | Bin 0 -> 3522 bytes lib/pytz/zoneinfo/Etc/GMT | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/GMT+0 | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/GMT+1 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+10 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT+11 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT+12 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT+2 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+3 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+4 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+5 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+6 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+7 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+8 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT+9 | Bin 0 -> 120 bytes lib/pytz/zoneinfo/Etc/GMT-0 | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/GMT-1 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-10 | Bin 0 -> 122 bytes lib/pytz/zoneinfo/Etc/GMT-11 | Bin 0 -> 122 bytes lib/pytz/zoneinfo/Etc/GMT-12 | Bin 0 -> 122 bytes lib/pytz/zoneinfo/Etc/GMT-13 | Bin 0 -> 122 bytes lib/pytz/zoneinfo/Etc/GMT-14 | Bin 0 -> 122 bytes lib/pytz/zoneinfo/Etc/GMT-2 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-3 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-4 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-5 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-6 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-7 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-8 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT-9 | Bin 0 -> 121 bytes lib/pytz/zoneinfo/Etc/GMT0 | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/Greenwich | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/UCT | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/UTC | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/Universal | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Etc/Zulu | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Europe/Amsterdam | Bin 0 -> 2940 bytes lib/pytz/zoneinfo/Europe/Andorra | Bin 0 -> 1742 bytes lib/pytz/zoneinfo/Europe/Astrakhan | Bin 0 -> 1183 bytes lib/pytz/zoneinfo/Europe/Athens | Bin 0 -> 2262 bytes lib/pytz/zoneinfo/Europe/Belfast | Bin 0 -> 3678 bytes lib/pytz/zoneinfo/Europe/Belgrade | Bin 0 -> 1948 bytes lib/pytz/zoneinfo/Europe/Berlin | Bin 0 -> 2326 bytes lib/pytz/zoneinfo/Europe/Bratislava | Bin 0 -> 2329 bytes lib/pytz/zoneinfo/Europe/Brussels | Bin 0 -> 2961 bytes lib/pytz/zoneinfo/Europe/Bucharest | Bin 0 -> 2212 bytes lib/pytz/zoneinfo/Europe/Budapest | Bin 0 -> 2396 bytes lib/pytz/zoneinfo/Europe/Busingen | Bin 0 -> 1909 bytes lib/pytz/zoneinfo/Europe/Chisinau | Bin 0 -> 2436 bytes lib/pytz/zoneinfo/Europe/Copenhagen | Bin 0 -> 2151 bytes lib/pytz/zoneinfo/Europe/Dublin | Bin 0 -> 3522 bytes lib/pytz/zoneinfo/Europe/Gibraltar | Bin 0 -> 3052 bytes lib/pytz/zoneinfo/Europe/Guernsey | Bin 0 -> 3678 bytes lib/pytz/zoneinfo/Europe/Helsinki | Bin 0 -> 1900 bytes lib/pytz/zoneinfo/Europe/Isle_of_Man | Bin 0 -> 3678 bytes lib/pytz/zoneinfo/Europe/Istanbul | Bin 0 -> 2157 bytes lib/pytz/zoneinfo/Europe/Jersey | Bin 0 -> 3678 bytes lib/pytz/zoneinfo/Europe/Kaliningrad | Bin 0 -> 1509 bytes lib/pytz/zoneinfo/Europe/Kiev | Bin 0 -> 2088 bytes lib/pytz/zoneinfo/Europe/Kirov | Bin 0 -> 1153 bytes lib/pytz/zoneinfo/Europe/Lisbon | Bin 0 -> 3469 bytes lib/pytz/zoneinfo/Europe/Ljubljana | Bin 0 -> 1948 bytes lib/pytz/zoneinfo/Europe/London | Bin 0 -> 3678 bytes lib/pytz/zoneinfo/Europe/Luxembourg | Bin 0 -> 2960 bytes lib/pytz/zoneinfo/Europe/Madrid | Bin 0 -> 2628 bytes lib/pytz/zoneinfo/Europe/Malta | Bin 0 -> 2620 bytes lib/pytz/zoneinfo/Europe/Mariehamn | Bin 0 -> 1900 bytes lib/pytz/zoneinfo/Europe/Minsk | Bin 0 -> 1361 bytes lib/pytz/zoneinfo/Europe/Monaco | Bin 0 -> 2944 bytes lib/pytz/zoneinfo/Europe/Moscow | Bin 0 -> 1535 bytes lib/pytz/zoneinfo/Europe/Nicosia | Bin 0 -> 2002 bytes lib/pytz/zoneinfo/Europe/Oslo | Bin 0 -> 2242 bytes lib/pytz/zoneinfo/Europe/Paris | Bin 0 -> 2962 bytes lib/pytz/zoneinfo/Europe/Podgorica | Bin 0 -> 1948 bytes lib/pytz/zoneinfo/Europe/Prague | Bin 0 -> 2329 bytes lib/pytz/zoneinfo/Europe/Riga | Bin 0 -> 2226 bytes lib/pytz/zoneinfo/Europe/Rome | Bin 0 -> 2683 bytes lib/pytz/zoneinfo/Europe/Samara | Bin 0 -> 1215 bytes lib/pytz/zoneinfo/Europe/San_Marino | Bin 0 -> 2683 bytes lib/pytz/zoneinfo/Europe/Sarajevo | Bin 0 -> 1948 bytes lib/pytz/zoneinfo/Europe/Saratov | Bin 0 -> 1183 bytes lib/pytz/zoneinfo/Europe/Simferopol | Bin 0 -> 1481 bytes lib/pytz/zoneinfo/Europe/Skopje | Bin 0 -> 1948 bytes lib/pytz/zoneinfo/Europe/Sofia | Bin 0 -> 2121 bytes lib/pytz/zoneinfo/Europe/Stockholm | Bin 0 -> 1909 bytes lib/pytz/zoneinfo/Europe/Tallinn | Bin 0 -> 2178 bytes lib/pytz/zoneinfo/Europe/Tirane | Bin 0 -> 2084 bytes lib/pytz/zoneinfo/Europe/Tiraspol | Bin 0 -> 2436 bytes lib/pytz/zoneinfo/Europe/Ulyanovsk | Bin 0 -> 1267 bytes lib/pytz/zoneinfo/Europe/Uzhgorod | Bin 0 -> 2094 bytes lib/pytz/zoneinfo/Europe/Vaduz | Bin 0 -> 1909 bytes lib/pytz/zoneinfo/Europe/Vatican | Bin 0 -> 2683 bytes lib/pytz/zoneinfo/Europe/Vienna | Bin 0 -> 2228 bytes lib/pytz/zoneinfo/Europe/Vilnius | Bin 0 -> 2190 bytes lib/pytz/zoneinfo/Europe/Volgograd | Bin 0 -> 1183 bytes lib/pytz/zoneinfo/Europe/Warsaw | Bin 0 -> 2696 bytes lib/pytz/zoneinfo/Europe/Zagreb | Bin 0 -> 1948 bytes lib/pytz/zoneinfo/Europe/Zaporozhye | Bin 0 -> 2106 bytes lib/pytz/zoneinfo/Europe/Zurich | Bin 0 -> 1909 bytes lib/pytz/zoneinfo/Factory | Bin 0 -> 120 bytes lib/pytz/zoneinfo/GB | Bin 0 -> 3678 bytes lib/pytz/zoneinfo/GB-Eire | Bin 0 -> 3678 bytes lib/pytz/zoneinfo/GMT | Bin 0 -> 118 bytes lib/pytz/zoneinfo/GMT+0 | Bin 0 -> 118 bytes lib/pytz/zoneinfo/GMT-0 | Bin 0 -> 118 bytes lib/pytz/zoneinfo/GMT0 | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Greenwich | Bin 0 -> 118 bytes lib/pytz/zoneinfo/HST | Bin 0 -> 119 bytes lib/pytz/zoneinfo/Hongkong | Bin 0 -> 1175 bytes lib/pytz/zoneinfo/Iceland | Bin 0 -> 1174 bytes lib/pytz/zoneinfo/Indian/Antananarivo | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Indian/Chagos | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Indian/Christmas | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Indian/Cocos | Bin 0 -> 182 bytes lib/pytz/zoneinfo/Indian/Comoro | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Indian/Kerguelen | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Indian/Mahe | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Indian/Maldives | Bin 0 -> 211 bytes lib/pytz/zoneinfo/Indian/Mauritius | Bin 0 -> 253 bytes lib/pytz/zoneinfo/Indian/Mayotte | Bin 0 -> 271 bytes lib/pytz/zoneinfo/Indian/Reunion | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Iran | Bin 0 -> 1704 bytes lib/pytz/zoneinfo/Israel | Bin 0 -> 2256 bytes lib/pytz/zoneinfo/Jamaica | Bin 0 -> 498 bytes lib/pytz/zoneinfo/Japan | Bin 0 -> 309 bytes lib/pytz/zoneinfo/Kwajalein | Bin 0 -> 250 bytes lib/pytz/zoneinfo/Libya | Bin 0 -> 641 bytes lib/pytz/zoneinfo/MET | Bin 0 -> 2102 bytes lib/pytz/zoneinfo/MST | Bin 0 -> 118 bytes lib/pytz/zoneinfo/MST7MDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Mexico/BajaNorte | Bin 0 -> 2342 bytes lib/pytz/zoneinfo/Mexico/BajaSur | Bin 0 -> 1550 bytes lib/pytz/zoneinfo/Mexico/General | Bin 0 -> 1604 bytes lib/pytz/zoneinfo/NZ | Bin 0 -> 2451 bytes lib/pytz/zoneinfo/NZ-CHAT | Bin 0 -> 2078 bytes lib/pytz/zoneinfo/Navajo | Bin 0 -> 2444 bytes lib/pytz/zoneinfo/PRC | Bin 0 -> 545 bytes lib/pytz/zoneinfo/PST8PDT | Bin 0 -> 2294 bytes lib/pytz/zoneinfo/Pacific/Apia | Bin 0 -> 1125 bytes lib/pytz/zoneinfo/Pacific/Auckland | Bin 0 -> 2451 bytes lib/pytz/zoneinfo/Pacific/Bougainville | Bin 0 -> 286 bytes lib/pytz/zoneinfo/Pacific/Chatham | Bin 0 -> 2078 bytes lib/pytz/zoneinfo/Pacific/Chuuk | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Easter | Bin 0 -> 2233 bytes lib/pytz/zoneinfo/Pacific/Efate | Bin 0 -> 478 bytes lib/pytz/zoneinfo/Pacific/Enderbury | Bin 0 -> 250 bytes lib/pytz/zoneinfo/Pacific/Fakaofo | Bin 0 -> 212 bytes lib/pytz/zoneinfo/Pacific/Fiji | Bin 0 -> 1090 bytes lib/pytz/zoneinfo/Pacific/Funafuti | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Galapagos | Bin 0 -> 254 bytes lib/pytz/zoneinfo/Pacific/Gambier | Bin 0 -> 172 bytes lib/pytz/zoneinfo/Pacific/Guadalcanal | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Guam | Bin 0 -> 216 bytes lib/pytz/zoneinfo/Pacific/Honolulu | Bin 0 -> 329 bytes lib/pytz/zoneinfo/Pacific/Johnston | Bin 0 -> 329 bytes lib/pytz/zoneinfo/Pacific/Kiritimati | Bin 0 -> 254 bytes lib/pytz/zoneinfo/Pacific/Kosrae | Bin 0 -> 242 bytes lib/pytz/zoneinfo/Pacific/Kwajalein | Bin 0 -> 250 bytes lib/pytz/zoneinfo/Pacific/Majuro | Bin 0 -> 212 bytes lib/pytz/zoneinfo/Pacific/Marquesas | Bin 0 -> 181 bytes lib/pytz/zoneinfo/Pacific/Midway | Bin 0 -> 187 bytes lib/pytz/zoneinfo/Pacific/Nauru | Bin 0 -> 268 bytes lib/pytz/zoneinfo/Pacific/Niue | Bin 0 -> 257 bytes lib/pytz/zoneinfo/Pacific/Norfolk | Bin 0 -> 314 bytes lib/pytz/zoneinfo/Pacific/Noumea | Bin 0 -> 314 bytes lib/pytz/zoneinfo/Pacific/Pago_Pago | Bin 0 -> 187 bytes lib/pytz/zoneinfo/Pacific/Palau | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Pacific/Pitcairn | Bin 0 -> 214 bytes lib/pytz/zoneinfo/Pacific/Pohnpei | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Ponape | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Port_Moresby | Bin 0 -> 196 bytes lib/pytz/zoneinfo/Pacific/Rarotonga | Bin 0 -> 593 bytes lib/pytz/zoneinfo/Pacific/Saipan | Bin 0 -> 216 bytes lib/pytz/zoneinfo/Pacific/Samoa | Bin 0 -> 187 bytes lib/pytz/zoneinfo/Pacific/Tahiti | Bin 0 -> 173 bytes lib/pytz/zoneinfo/Pacific/Tarawa | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Tongatapu | Bin 0 -> 384 bytes lib/pytz/zoneinfo/Pacific/Truk | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Wake | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Wallis | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Pacific/Yap | Bin 0 -> 174 bytes lib/pytz/zoneinfo/Poland | Bin 0 -> 2696 bytes lib/pytz/zoneinfo/Portugal | Bin 0 -> 3469 bytes lib/pytz/zoneinfo/ROC | Bin 0 -> 781 bytes lib/pytz/zoneinfo/ROK | Bin 0 -> 517 bytes lib/pytz/zoneinfo/Singapore | Bin 0 -> 415 bytes lib/pytz/zoneinfo/Turkey | Bin 0 -> 2157 bytes lib/pytz/zoneinfo/UCT | Bin 0 -> 118 bytes lib/pytz/zoneinfo/US/Alaska | Bin 0 -> 2371 bytes lib/pytz/zoneinfo/US/Aleutian | Bin 0 -> 2356 bytes lib/pytz/zoneinfo/US/Arizona | Bin 0 -> 344 bytes lib/pytz/zoneinfo/US/Central | Bin 0 -> 3576 bytes lib/pytz/zoneinfo/US/East-Indiana | Bin 0 -> 1666 bytes lib/pytz/zoneinfo/US/Eastern | Bin 0 -> 3536 bytes lib/pytz/zoneinfo/US/Hawaii | Bin 0 -> 329 bytes lib/pytz/zoneinfo/US/Indiana-Starke | Bin 0 -> 2428 bytes lib/pytz/zoneinfo/US/Michigan | Bin 0 -> 2174 bytes lib/pytz/zoneinfo/US/Mountain | Bin 0 -> 2444 bytes lib/pytz/zoneinfo/US/Pacific | Bin 0 -> 2836 bytes lib/pytz/zoneinfo/US/Samoa | Bin 0 -> 187 bytes lib/pytz/zoneinfo/UTC | Bin 0 -> 118 bytes lib/pytz/zoneinfo/Universal | Bin 0 -> 118 bytes lib/pytz/zoneinfo/W-SU | Bin 0 -> 1535 bytes lib/pytz/zoneinfo/WET | Bin 0 -> 1873 bytes lib/pytz/zoneinfo/Zulu | Bin 0 -> 118 bytes lib/pytz/zoneinfo/iso3166.tab | 274 + lib/pytz/zoneinfo/leapseconds | 66 + lib/pytz/zoneinfo/posixrules | Bin 0 -> 3536 bytes lib/pytz/zoneinfo/tzdata.zi | 4177 +++++++++ lib/pytz/zoneinfo/zone.tab | 448 + lib/pytz/zoneinfo/zone1970.tab | 382 + lib/requests/__init__.py | 131 + lib/requests/__version__.py | 14 + lib/requests/_internal_utils.py | 42 + lib/requests/adapters.py | 533 ++ lib/requests/api.py | 158 + lib/requests/auth.py | 305 + lib/requests/certs.py | 18 + lib/requests/compat.py | 70 + lib/requests/cookies.py | 549 ++ lib/requests/exceptions.py | 126 + lib/requests/help.py | 119 + lib/requests/hooks.py | 34 + lib/requests/models.py | 953 ++ lib/requests/packages.py | 14 + lib/requests/sessions.py | 770 ++ lib/requests/status_codes.py | 120 + lib/requests/structures.py | 103 + lib/requests/utils.py | 977 ++ lib/schedule/__init__.py | 528 ++ lib/setuptools/__init__.py | 195 + lib/setuptools/_deprecation_warning.py | 7 + lib/setuptools/_vendor/__init__.py | 0 lib/setuptools/_vendor/packaging/__about__.py | 21 + lib/setuptools/_vendor/packaging/__init__.py | 14 + lib/setuptools/_vendor/packaging/_compat.py | 30 + .../_vendor/packaging/_structures.py | 68 + lib/setuptools/_vendor/packaging/markers.py | 301 + .../_vendor/packaging/requirements.py | 127 + .../_vendor/packaging/specifiers.py | 774 ++ lib/setuptools/_vendor/packaging/utils.py | 14 + lib/setuptools/_vendor/packaging/version.py | 393 + lib/setuptools/_vendor/pyparsing.py | 5742 ++++++++++++ lib/setuptools/_vendor/six.py | 868 ++ lib/setuptools/archive_util.py | 173 + lib/setuptools/build_meta.py | 182 + lib/setuptools/cli-32.exe | Bin 0 -> 65536 bytes lib/setuptools/cli-64.exe | Bin 0 -> 74752 bytes lib/setuptools/cli.exe | Bin 0 -> 65536 bytes lib/setuptools/command/__init__.py | 18 + lib/setuptools/command/alias.py | 80 + lib/setuptools/command/bdist_egg.py | 502 + lib/setuptools/command/bdist_rpm.py | 43 + lib/setuptools/command/bdist_wininst.py | 21 + lib/setuptools/command/build_clib.py | 98 + lib/setuptools/command/build_ext.py | 321 + lib/setuptools/command/build_py.py | 270 + lib/setuptools/command/develop.py | 218 + lib/setuptools/command/dist_info.py | 36 + lib/setuptools/command/easy_install.py | 2342 +++++ lib/setuptools/command/egg_info.py | 716 ++ lib/setuptools/command/install.py | 125 + lib/setuptools/command/install_egg_info.py | 62 + lib/setuptools/command/install_lib.py | 121 + lib/setuptools/command/install_scripts.py | 65 + lib/setuptools/command/launcher manifest.xml | 15 + lib/setuptools/command/py36compat.py | 136 + lib/setuptools/command/register.py | 18 + lib/setuptools/command/rotate.py | 66 + lib/setuptools/command/saveopts.py | 22 + lib/setuptools/command/sdist.py | 200 + lib/setuptools/command/setopt.py | 149 + lib/setuptools/command/test.py | 270 + lib/setuptools/command/upload.py | 196 + lib/setuptools/command/upload_docs.py | 206 + lib/setuptools/config.py | 635 ++ lib/setuptools/dep_util.py | 23 + lib/setuptools/depends.py | 186 + lib/setuptools/dist.py | 1147 +++ lib/setuptools/extension.py | 57 + lib/setuptools/extern/__init__.py | 73 + lib/setuptools/glibc.py | 86 + lib/setuptools/glob.py | 174 + lib/setuptools/gui-32.exe | Bin 0 -> 65536 bytes lib/setuptools/gui-64.exe | Bin 0 -> 75264 bytes lib/setuptools/gui.exe | Bin 0 -> 65536 bytes lib/setuptools/launch.py | 35 + lib/setuptools/lib2to3_ex.py | 62 + lib/setuptools/monkey.py | 179 + lib/setuptools/msvc.py | 1301 +++ lib/setuptools/namespaces.py | 107 + lib/setuptools/package_index.py | 1128 +++ lib/setuptools/pep425tags.py | 319 + lib/setuptools/py27compat.py | 28 + lib/setuptools/py31compat.py | 32 + lib/setuptools/py33compat.py | 55 + lib/setuptools/py36compat.py | 82 + lib/setuptools/sandbox.py | 491 + lib/setuptools/script (dev).tmpl | 6 + lib/setuptools/script.tmpl | 3 + lib/setuptools/site-patch.py | 74 + lib/setuptools/ssl_support.py | 260 + lib/setuptools/unicode_utils.py | 44 + lib/setuptools/version.py | 6 + lib/setuptools/wheel.py | 210 + lib/setuptools/windows_support.py | 29 + lib/six.py | 891 ++ lib/typing.py | 2413 +++++ lib/urllib3/__init__.py | 92 + lib/urllib3/_collections.py | 329 + lib/urllib3/connection.py | 391 + lib/urllib3/connectionpool.py | 896 ++ lib/urllib3/contrib/__init__.py | 0 lib/urllib3/contrib/_appengine_environ.py | 30 + .../contrib/_securetransport/__init__.py | 0 .../contrib/_securetransport/bindings.py | 593 ++ .../contrib/_securetransport/low_level.py | 346 + lib/urllib3/contrib/appengine.py | 289 + lib/urllib3/contrib/ntlmpool.py | 111 + lib/urllib3/contrib/pyopenssl.py | 466 + lib/urllib3/contrib/securetransport.py | 804 ++ lib/urllib3/contrib/socks.py | 192 + lib/urllib3/exceptions.py | 246 + lib/urllib3/fields.py | 178 + lib/urllib3/filepost.py | 98 + lib/urllib3/packages/__init__.py | 5 + lib/urllib3/packages/backports/__init__.py | 0 lib/urllib3/packages/backports/makefile.py | 53 + lib/urllib3/packages/six.py | 868 ++ .../packages/ssl_match_hostname/__init__.py | 19 + .../ssl_match_hostname/_implementation.py | 156 + lib/urllib3/poolmanager.py | 450 + lib/urllib3/request.py | 150 + lib/urllib3/response.py | 705 ++ lib/urllib3/util/__init__.py | 54 + lib/urllib3/util/connection.py | 134 + lib/urllib3/util/queue.py | 21 + lib/urllib3/util/request.py | 118 + lib/urllib3/util/response.py | 87 + lib/urllib3/util/retry.py | 411 + lib/urllib3/util/ssl_.py | 381 + lib/urllib3/util/timeout.py | 242 + lib/urllib3/util/url.py | 230 + lib/urllib3/util/wait.py | 150 + lib/zope.interface-4.6.0-py3.7-nspkg.pth | 1 + lib/zope/interface/__init__.py | 90 + lib/zope/interface/_compat.py | 58 + lib/zope/interface/_flatten.py | 35 + .../_zope_interface_coptimizations.c | 1726 ++++ ...pe_interface_coptimizations.cp37-win32.pyd | Bin 0 -> 22528 bytes lib/zope/interface/adapter.py | 712 ++ lib/zope/interface/advice.py | 205 + lib/zope/interface/common/__init__.py | 2 + lib/zope/interface/common/idatetime.py | 606 ++ lib/zope/interface/common/interfaces.py | 212 + lib/zope/interface/common/mapping.py | 150 + lib/zope/interface/common/sequence.py | 165 + lib/zope/interface/common/tests/__init__.py | 2 + .../interface/common/tests/basemapping.py | 107 + .../interface/common/tests/test_idatetime.py | 37 + .../common/tests/test_import_interfaces.py | 20 + lib/zope/interface/declarations.py | 929 ++ lib/zope/interface/document.py | 120 + lib/zope/interface/exceptions.py | 67 + lib/zope/interface/interface.py | 687 ++ lib/zope/interface/interfaces.py | 1282 +++ lib/zope/interface/registry.py | 654 ++ lib/zope/interface/ro.py | 64 + lib/zope/interface/tests/__init__.py | 1 + lib/zope/interface/tests/advisory_testing.py | 42 + lib/zope/interface/tests/dummy.py | 23 + lib/zope/interface/tests/idummy.py | 23 + lib/zope/interface/tests/ifoo.py | 26 + lib/zope/interface/tests/ifoo_other.py | 26 + lib/zope/interface/tests/m1.py | 21 + lib/zope/interface/tests/m2.py | 15 + lib/zope/interface/tests/odd.py | 128 + lib/zope/interface/tests/test_adapter.py | 1419 +++ lib/zope/interface/tests/test_advice.py | 355 + lib/zope/interface/tests/test_declarations.py | 1658 ++++ lib/zope/interface/tests/test_document.py | 505 + lib/zope/interface/tests/test_element.py | 31 + lib/zope/interface/tests/test_exceptions.py | 72 + lib/zope/interface/tests/test_interface.py | 2123 +++++ lib/zope/interface/tests/test_interfaces.py | 95 + .../interface/tests/test_odd_declarations.py | 268 + lib/zope/interface/tests/test_registry.py | 2788 ++++++ lib/zope/interface/tests/test_ro.py | 115 + lib/zope/interface/tests/test_sorting.py | 47 + lib/zope/interface/tests/test_verify.py | 582 ++ lib/zope/interface/verify.py | 123 + requirements.txt | 5 +- varken.py | 4 + 943 files changed, 125530 insertions(+), 16 deletions(-) create mode 100644 lib/DateTime/DateTime.py create mode 100644 lib/DateTime/DateTime.txt create mode 100644 lib/DateTime/__init__.py create mode 100644 lib/DateTime/interfaces.py create mode 100644 lib/DateTime/pytz.txt create mode 100644 lib/DateTime/pytz_support.py create mode 100644 lib/DateTime/tests/__init__.py create mode 100644 lib/DateTime/tests/julian_testdata.txt create mode 100644 lib/DateTime/tests/test_datetime.py create mode 100644 lib/backports/configparser/__init__.py create mode 100644 lib/backports/configparser/helpers.py create mode 100644 lib/bin/chardetect.exe create mode 100644 lib/bin/easy_install-3.7.exe create mode 100644 lib/bin/easy_install.exe create mode 100644 lib/certifi/__init__.py create mode 100644 lib/certifi/__main__.py create mode 100644 lib/certifi/cacert.pem create mode 100644 lib/certifi/core.py create mode 100644 lib/chardet/__init__.py create mode 100644 lib/chardet/big5freq.py create mode 100644 lib/chardet/big5prober.py create mode 100644 lib/chardet/chardistribution.py create mode 100644 lib/chardet/charsetgroupprober.py create mode 100644 lib/chardet/charsetprober.py create mode 100644 lib/chardet/cli/__init__.py create mode 100644 lib/chardet/cli/chardetect.py create mode 100644 lib/chardet/codingstatemachine.py create mode 100644 lib/chardet/compat.py create mode 100644 lib/chardet/cp949prober.py create mode 100644 lib/chardet/enums.py create mode 100644 lib/chardet/escprober.py create mode 100644 lib/chardet/escsm.py create mode 100644 lib/chardet/eucjpprober.py create mode 100644 lib/chardet/euckrfreq.py create mode 100644 lib/chardet/euckrprober.py create mode 100644 lib/chardet/euctwfreq.py create mode 100644 lib/chardet/euctwprober.py create mode 100644 lib/chardet/gb2312freq.py create mode 100644 lib/chardet/gb2312prober.py create mode 100644 lib/chardet/hebrewprober.py create mode 100644 lib/chardet/jisfreq.py create mode 100644 lib/chardet/jpcntx.py create mode 100644 lib/chardet/langbulgarianmodel.py create mode 100644 lib/chardet/langcyrillicmodel.py create mode 100644 lib/chardet/langgreekmodel.py create mode 100644 lib/chardet/langhebrewmodel.py create mode 100644 lib/chardet/langhungarianmodel.py create mode 100644 lib/chardet/langthaimodel.py create mode 100644 lib/chardet/langturkishmodel.py create mode 100644 lib/chardet/latin1prober.py create mode 100644 lib/chardet/mbcharsetprober.py create mode 100644 lib/chardet/mbcsgroupprober.py create mode 100644 lib/chardet/mbcssm.py create mode 100644 lib/chardet/sbcharsetprober.py create mode 100644 lib/chardet/sbcsgroupprober.py create mode 100644 lib/chardet/sjisprober.py create mode 100644 lib/chardet/universaldetector.py create mode 100644 lib/chardet/utf8prober.py create mode 100644 lib/chardet/version.py create mode 100644 lib/configparser-3.5.0-py3.7-nspkg.pth create mode 100644 lib/dateutil/__init__.py create mode 100644 lib/dateutil/_common.py create mode 100644 lib/dateutil/_version.py create mode 100644 lib/dateutil/easter.py create mode 100644 lib/dateutil/parser/__init__.py create mode 100644 lib/dateutil/parser/_parser.py create mode 100644 lib/dateutil/parser/isoparser.py create mode 100644 lib/dateutil/relativedelta.py create mode 100644 lib/dateutil/rrule.py create mode 100644 lib/dateutil/tz/__init__.py create mode 100644 lib/dateutil/tz/_common.py create mode 100644 lib/dateutil/tz/_factories.py create mode 100644 lib/dateutil/tz/tz.py create mode 100644 lib/dateutil/tz/win.py create mode 100644 lib/dateutil/tzwin.py create mode 100644 lib/dateutil/utils.py create mode 100644 lib/dateutil/zoneinfo/__init__.py create mode 100644 lib/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz create mode 100644 lib/dateutil/zoneinfo/rebuild.py create mode 100644 lib/easy_install.py create mode 100644 lib/geoip2/__init__.py create mode 100644 lib/geoip2/compat.py create mode 100644 lib/geoip2/database.py create mode 100644 lib/geoip2/errors.py create mode 100644 lib/geoip2/mixins.py create mode 100644 lib/geoip2/models.py create mode 100644 lib/geoip2/records.py create mode 100644 lib/geoip2/webservice.py create mode 100644 lib/idna/__init__.py create mode 100644 lib/idna/codec.py create mode 100644 lib/idna/compat.py create mode 100644 lib/idna/core.py create mode 100644 lib/idna/idnadata.py create mode 100644 lib/idna/intranges.py create mode 100644 lib/idna/package_data.py create mode 100644 lib/idna/uts46data.py create mode 100644 lib/influxdb/__init__.py create mode 100644 lib/influxdb/_dataframe_client.py create mode 100644 lib/influxdb/chunked_json.py create mode 100644 lib/influxdb/client.py create mode 100644 lib/influxdb/dataframe_client.py create mode 100644 lib/influxdb/exceptions.py create mode 100644 lib/influxdb/helper.py create mode 100644 lib/influxdb/influxdb08/__init__.py create mode 100644 lib/influxdb/influxdb08/chunked_json.py create mode 100644 lib/influxdb/influxdb08/client.py create mode 100644 lib/influxdb/influxdb08/dataframe_client.py create mode 100644 lib/influxdb/influxdb08/helper.py create mode 100644 lib/influxdb/line_protocol.py create mode 100644 lib/influxdb/resultset.py create mode 100644 lib/influxdb/tests/__init__.py create mode 100644 lib/influxdb/tests/chunked_json_test.py create mode 100644 lib/influxdb/tests/client_test.py create mode 100644 lib/influxdb/tests/dataframe_client_test.py create mode 100644 lib/influxdb/tests/helper_test.py create mode 100644 lib/influxdb/tests/influxdb08/__init__.py create mode 100644 lib/influxdb/tests/influxdb08/client_test.py create mode 100644 lib/influxdb/tests/influxdb08/dataframe_client_test.py create mode 100644 lib/influxdb/tests/influxdb08/helper_test.py create mode 100644 lib/influxdb/tests/misc.py create mode 100644 lib/influxdb/tests/resultset_test.py create mode 100644 lib/influxdb/tests/server_tests/__init__.py create mode 100644 lib/influxdb/tests/server_tests/base.py create mode 100644 lib/influxdb/tests/server_tests/client_test_with_server.py create mode 100644 lib/influxdb/tests/server_tests/influxdb_instance.py create mode 100644 lib/influxdb/tests/test_line_protocol.py create mode 100644 lib/maxminddb/__init__.py create mode 100644 lib/maxminddb/compat.py create mode 100644 lib/maxminddb/const.py create mode 100644 lib/maxminddb/decoder.py create mode 100644 lib/maxminddb/errors.py create mode 100644 lib/maxminddb/extension/maxminddb.c create mode 100644 lib/maxminddb/file.py create mode 100644 lib/maxminddb/reader.py create mode 100644 lib/pkg_resources/__init__.py create mode 100644 lib/pkg_resources/_vendor/__init__.py create mode 100644 lib/pkg_resources/_vendor/appdirs.py create mode 100644 lib/pkg_resources/_vendor/packaging/__about__.py create mode 100644 lib/pkg_resources/_vendor/packaging/__init__.py create mode 100644 lib/pkg_resources/_vendor/packaging/_compat.py create mode 100644 lib/pkg_resources/_vendor/packaging/_structures.py create mode 100644 lib/pkg_resources/_vendor/packaging/markers.py create mode 100644 lib/pkg_resources/_vendor/packaging/requirements.py create mode 100644 lib/pkg_resources/_vendor/packaging/specifiers.py create mode 100644 lib/pkg_resources/_vendor/packaging/utils.py create mode 100644 lib/pkg_resources/_vendor/packaging/version.py create mode 100644 lib/pkg_resources/_vendor/pyparsing.py create mode 100644 lib/pkg_resources/_vendor/six.py create mode 100644 lib/pkg_resources/extern/__init__.py create mode 100644 lib/pkg_resources/py31compat.py create mode 100644 lib/pytz/__init__.py create mode 100644 lib/pytz/exceptions.py create mode 100644 lib/pytz/lazy.py create mode 100644 lib/pytz/reference.py create mode 100644 lib/pytz/tzfile.py create mode 100644 lib/pytz/tzinfo.py create mode 100644 lib/pytz/zoneinfo/Africa/Abidjan create mode 100644 lib/pytz/zoneinfo/Africa/Accra create mode 100644 lib/pytz/zoneinfo/Africa/Addis_Ababa create mode 100644 lib/pytz/zoneinfo/Africa/Algiers create mode 100644 lib/pytz/zoneinfo/Africa/Asmara create mode 100644 lib/pytz/zoneinfo/Africa/Asmera create mode 100644 lib/pytz/zoneinfo/Africa/Bamako create mode 100644 lib/pytz/zoneinfo/Africa/Bangui create mode 100644 lib/pytz/zoneinfo/Africa/Banjul create mode 100644 lib/pytz/zoneinfo/Africa/Bissau create mode 100644 lib/pytz/zoneinfo/Africa/Blantyre create mode 100644 lib/pytz/zoneinfo/Africa/Brazzaville create mode 100644 lib/pytz/zoneinfo/Africa/Bujumbura create mode 100644 lib/pytz/zoneinfo/Africa/Cairo create mode 100644 lib/pytz/zoneinfo/Africa/Casablanca create mode 100644 lib/pytz/zoneinfo/Africa/Ceuta create mode 100644 lib/pytz/zoneinfo/Africa/Conakry create mode 100644 lib/pytz/zoneinfo/Africa/Dakar create mode 100644 lib/pytz/zoneinfo/Africa/Dar_es_Salaam create mode 100644 lib/pytz/zoneinfo/Africa/Djibouti create mode 100644 lib/pytz/zoneinfo/Africa/Douala create mode 100644 lib/pytz/zoneinfo/Africa/El_Aaiun create mode 100644 lib/pytz/zoneinfo/Africa/Freetown create mode 100644 lib/pytz/zoneinfo/Africa/Gaborone create mode 100644 lib/pytz/zoneinfo/Africa/Harare create mode 100644 lib/pytz/zoneinfo/Africa/Johannesburg create mode 100644 lib/pytz/zoneinfo/Africa/Juba create mode 100644 lib/pytz/zoneinfo/Africa/Kampala create mode 100644 lib/pytz/zoneinfo/Africa/Khartoum create mode 100644 lib/pytz/zoneinfo/Africa/Kigali create mode 100644 lib/pytz/zoneinfo/Africa/Kinshasa create mode 100644 lib/pytz/zoneinfo/Africa/Lagos create mode 100644 lib/pytz/zoneinfo/Africa/Libreville create mode 100644 lib/pytz/zoneinfo/Africa/Lome create mode 100644 lib/pytz/zoneinfo/Africa/Luanda create mode 100644 lib/pytz/zoneinfo/Africa/Lubumbashi create mode 100644 lib/pytz/zoneinfo/Africa/Lusaka create mode 100644 lib/pytz/zoneinfo/Africa/Malabo create mode 100644 lib/pytz/zoneinfo/Africa/Maputo create mode 100644 lib/pytz/zoneinfo/Africa/Maseru create mode 100644 lib/pytz/zoneinfo/Africa/Mbabane create mode 100644 lib/pytz/zoneinfo/Africa/Mogadishu create mode 100644 lib/pytz/zoneinfo/Africa/Monrovia create mode 100644 lib/pytz/zoneinfo/Africa/Nairobi create mode 100644 lib/pytz/zoneinfo/Africa/Ndjamena create mode 100644 lib/pytz/zoneinfo/Africa/Niamey create mode 100644 lib/pytz/zoneinfo/Africa/Nouakchott create mode 100644 lib/pytz/zoneinfo/Africa/Ouagadougou create mode 100644 lib/pytz/zoneinfo/Africa/Porto-Novo create mode 100644 lib/pytz/zoneinfo/Africa/Sao_Tome create mode 100644 lib/pytz/zoneinfo/Africa/Timbuktu create mode 100644 lib/pytz/zoneinfo/Africa/Tripoli create mode 100644 lib/pytz/zoneinfo/Africa/Tunis create mode 100644 lib/pytz/zoneinfo/Africa/Windhoek create mode 100644 lib/pytz/zoneinfo/America/Adak create mode 100644 lib/pytz/zoneinfo/America/Anchorage create mode 100644 lib/pytz/zoneinfo/America/Anguilla create mode 100644 lib/pytz/zoneinfo/America/Antigua create mode 100644 lib/pytz/zoneinfo/America/Araguaina create mode 100644 lib/pytz/zoneinfo/America/Argentina/Buenos_Aires create mode 100644 lib/pytz/zoneinfo/America/Argentina/Catamarca create mode 100644 lib/pytz/zoneinfo/America/Argentina/ComodRivadavia create mode 100644 lib/pytz/zoneinfo/America/Argentina/Cordoba create mode 100644 lib/pytz/zoneinfo/America/Argentina/Jujuy create mode 100644 lib/pytz/zoneinfo/America/Argentina/La_Rioja create mode 100644 lib/pytz/zoneinfo/America/Argentina/Mendoza create mode 100644 lib/pytz/zoneinfo/America/Argentina/Rio_Gallegos create mode 100644 lib/pytz/zoneinfo/America/Argentina/Salta create mode 100644 lib/pytz/zoneinfo/America/Argentina/San_Juan create mode 100644 lib/pytz/zoneinfo/America/Argentina/San_Luis create mode 100644 lib/pytz/zoneinfo/America/Argentina/Tucuman create mode 100644 lib/pytz/zoneinfo/America/Argentina/Ushuaia create mode 100644 lib/pytz/zoneinfo/America/Aruba create mode 100644 lib/pytz/zoneinfo/America/Asuncion create mode 100644 lib/pytz/zoneinfo/America/Atikokan create mode 100644 lib/pytz/zoneinfo/America/Atka create mode 100644 lib/pytz/zoneinfo/America/Bahia create mode 100644 lib/pytz/zoneinfo/America/Bahia_Banderas create mode 100644 lib/pytz/zoneinfo/America/Barbados create mode 100644 lib/pytz/zoneinfo/America/Belem create mode 100644 lib/pytz/zoneinfo/America/Belize create mode 100644 lib/pytz/zoneinfo/America/Blanc-Sablon create mode 100644 lib/pytz/zoneinfo/America/Boa_Vista create mode 100644 lib/pytz/zoneinfo/America/Bogota create mode 100644 lib/pytz/zoneinfo/America/Boise create mode 100644 lib/pytz/zoneinfo/America/Buenos_Aires create mode 100644 lib/pytz/zoneinfo/America/Cambridge_Bay create mode 100644 lib/pytz/zoneinfo/America/Campo_Grande create mode 100644 lib/pytz/zoneinfo/America/Cancun create mode 100644 lib/pytz/zoneinfo/America/Caracas create mode 100644 lib/pytz/zoneinfo/America/Catamarca create mode 100644 lib/pytz/zoneinfo/America/Cayenne create mode 100644 lib/pytz/zoneinfo/America/Cayman create mode 100644 lib/pytz/zoneinfo/America/Chicago create mode 100644 lib/pytz/zoneinfo/America/Chihuahua create mode 100644 lib/pytz/zoneinfo/America/Coral_Harbour create mode 100644 lib/pytz/zoneinfo/America/Cordoba create mode 100644 lib/pytz/zoneinfo/America/Costa_Rica create mode 100644 lib/pytz/zoneinfo/America/Creston create mode 100644 lib/pytz/zoneinfo/America/Cuiaba create mode 100644 lib/pytz/zoneinfo/America/Curacao create mode 100644 lib/pytz/zoneinfo/America/Danmarkshavn create mode 100644 lib/pytz/zoneinfo/America/Dawson create mode 100644 lib/pytz/zoneinfo/America/Dawson_Creek create mode 100644 lib/pytz/zoneinfo/America/Denver create mode 100644 lib/pytz/zoneinfo/America/Detroit create mode 100644 lib/pytz/zoneinfo/America/Dominica create mode 100644 lib/pytz/zoneinfo/America/Edmonton create mode 100644 lib/pytz/zoneinfo/America/Eirunepe create mode 100644 lib/pytz/zoneinfo/America/El_Salvador create mode 100644 lib/pytz/zoneinfo/America/Ensenada create mode 100644 lib/pytz/zoneinfo/America/Fort_Nelson create mode 100644 lib/pytz/zoneinfo/America/Fort_Wayne create mode 100644 lib/pytz/zoneinfo/America/Fortaleza create mode 100644 lib/pytz/zoneinfo/America/Glace_Bay create mode 100644 lib/pytz/zoneinfo/America/Godthab create mode 100644 lib/pytz/zoneinfo/America/Goose_Bay create mode 100644 lib/pytz/zoneinfo/America/Grand_Turk create mode 100644 lib/pytz/zoneinfo/America/Grenada create mode 100644 lib/pytz/zoneinfo/America/Guadeloupe create mode 100644 lib/pytz/zoneinfo/America/Guatemala create mode 100644 lib/pytz/zoneinfo/America/Guayaquil create mode 100644 lib/pytz/zoneinfo/America/Guyana create mode 100644 lib/pytz/zoneinfo/America/Halifax create mode 100644 lib/pytz/zoneinfo/America/Havana create mode 100644 lib/pytz/zoneinfo/America/Hermosillo create mode 100644 lib/pytz/zoneinfo/America/Indiana/Indianapolis create mode 100644 lib/pytz/zoneinfo/America/Indiana/Knox create mode 100644 lib/pytz/zoneinfo/America/Indiana/Marengo create mode 100644 lib/pytz/zoneinfo/America/Indiana/Petersburg create mode 100644 lib/pytz/zoneinfo/America/Indiana/Tell_City create mode 100644 lib/pytz/zoneinfo/America/Indiana/Vevay create mode 100644 lib/pytz/zoneinfo/America/Indiana/Vincennes create mode 100644 lib/pytz/zoneinfo/America/Indiana/Winamac create mode 100644 lib/pytz/zoneinfo/America/Indianapolis create mode 100644 lib/pytz/zoneinfo/America/Inuvik create mode 100644 lib/pytz/zoneinfo/America/Iqaluit create mode 100644 lib/pytz/zoneinfo/America/Jamaica create mode 100644 lib/pytz/zoneinfo/America/Jujuy create mode 100644 lib/pytz/zoneinfo/America/Juneau create mode 100644 lib/pytz/zoneinfo/America/Kentucky/Louisville create mode 100644 lib/pytz/zoneinfo/America/Kentucky/Monticello create mode 100644 lib/pytz/zoneinfo/America/Knox_IN create mode 100644 lib/pytz/zoneinfo/America/Kralendijk create mode 100644 lib/pytz/zoneinfo/America/La_Paz create mode 100644 lib/pytz/zoneinfo/America/Lima create mode 100644 lib/pytz/zoneinfo/America/Los_Angeles create mode 100644 lib/pytz/zoneinfo/America/Louisville create mode 100644 lib/pytz/zoneinfo/America/Lower_Princes create mode 100644 lib/pytz/zoneinfo/America/Maceio create mode 100644 lib/pytz/zoneinfo/America/Managua create mode 100644 lib/pytz/zoneinfo/America/Manaus create mode 100644 lib/pytz/zoneinfo/America/Marigot create mode 100644 lib/pytz/zoneinfo/America/Martinique create mode 100644 lib/pytz/zoneinfo/America/Matamoros create mode 100644 lib/pytz/zoneinfo/America/Mazatlan create mode 100644 lib/pytz/zoneinfo/America/Mendoza create mode 100644 lib/pytz/zoneinfo/America/Menominee create mode 100644 lib/pytz/zoneinfo/America/Merida create mode 100644 lib/pytz/zoneinfo/America/Metlakatla create mode 100644 lib/pytz/zoneinfo/America/Mexico_City create mode 100644 lib/pytz/zoneinfo/America/Miquelon create mode 100644 lib/pytz/zoneinfo/America/Moncton create mode 100644 lib/pytz/zoneinfo/America/Monterrey create mode 100644 lib/pytz/zoneinfo/America/Montevideo create mode 100644 lib/pytz/zoneinfo/America/Montreal create mode 100644 lib/pytz/zoneinfo/America/Montserrat create mode 100644 lib/pytz/zoneinfo/America/Nassau create mode 100644 lib/pytz/zoneinfo/America/New_York create mode 100644 lib/pytz/zoneinfo/America/Nipigon create mode 100644 lib/pytz/zoneinfo/America/Nome create mode 100644 lib/pytz/zoneinfo/America/Noronha create mode 100644 lib/pytz/zoneinfo/America/North_Dakota/Beulah create mode 100644 lib/pytz/zoneinfo/America/North_Dakota/Center create mode 100644 lib/pytz/zoneinfo/America/North_Dakota/New_Salem create mode 100644 lib/pytz/zoneinfo/America/Ojinaga create mode 100644 lib/pytz/zoneinfo/America/Panama create mode 100644 lib/pytz/zoneinfo/America/Pangnirtung create mode 100644 lib/pytz/zoneinfo/America/Paramaribo create mode 100644 lib/pytz/zoneinfo/America/Phoenix create mode 100644 lib/pytz/zoneinfo/America/Port-au-Prince create mode 100644 lib/pytz/zoneinfo/America/Port_of_Spain create mode 100644 lib/pytz/zoneinfo/America/Porto_Acre create mode 100644 lib/pytz/zoneinfo/America/Porto_Velho create mode 100644 lib/pytz/zoneinfo/America/Puerto_Rico create mode 100644 lib/pytz/zoneinfo/America/Punta_Arenas create mode 100644 lib/pytz/zoneinfo/America/Rainy_River create mode 100644 lib/pytz/zoneinfo/America/Rankin_Inlet create mode 100644 lib/pytz/zoneinfo/America/Recife create mode 100644 lib/pytz/zoneinfo/America/Regina create mode 100644 lib/pytz/zoneinfo/America/Resolute create mode 100644 lib/pytz/zoneinfo/America/Rio_Branco create mode 100644 lib/pytz/zoneinfo/America/Rosario create mode 100644 lib/pytz/zoneinfo/America/Santa_Isabel create mode 100644 lib/pytz/zoneinfo/America/Santarem create mode 100644 lib/pytz/zoneinfo/America/Santiago create mode 100644 lib/pytz/zoneinfo/America/Santo_Domingo create mode 100644 lib/pytz/zoneinfo/America/Sao_Paulo create mode 100644 lib/pytz/zoneinfo/America/Scoresbysund create mode 100644 lib/pytz/zoneinfo/America/Shiprock create mode 100644 lib/pytz/zoneinfo/America/Sitka create mode 100644 lib/pytz/zoneinfo/America/St_Barthelemy create mode 100644 lib/pytz/zoneinfo/America/St_Johns create mode 100644 lib/pytz/zoneinfo/America/St_Kitts create mode 100644 lib/pytz/zoneinfo/America/St_Lucia create mode 100644 lib/pytz/zoneinfo/America/St_Thomas create mode 100644 lib/pytz/zoneinfo/America/St_Vincent create mode 100644 lib/pytz/zoneinfo/America/Swift_Current create mode 100644 lib/pytz/zoneinfo/America/Tegucigalpa create mode 100644 lib/pytz/zoneinfo/America/Thule create mode 100644 lib/pytz/zoneinfo/America/Thunder_Bay create mode 100644 lib/pytz/zoneinfo/America/Tijuana create mode 100644 lib/pytz/zoneinfo/America/Toronto create mode 100644 lib/pytz/zoneinfo/America/Tortola create mode 100644 lib/pytz/zoneinfo/America/Vancouver create mode 100644 lib/pytz/zoneinfo/America/Virgin create mode 100644 lib/pytz/zoneinfo/America/Whitehorse create mode 100644 lib/pytz/zoneinfo/America/Winnipeg create mode 100644 lib/pytz/zoneinfo/America/Yakutat create mode 100644 lib/pytz/zoneinfo/America/Yellowknife create mode 100644 lib/pytz/zoneinfo/Antarctica/Casey create mode 100644 lib/pytz/zoneinfo/Antarctica/Davis create mode 100644 lib/pytz/zoneinfo/Antarctica/DumontDUrville create mode 100644 lib/pytz/zoneinfo/Antarctica/Macquarie create mode 100644 lib/pytz/zoneinfo/Antarctica/Mawson create mode 100644 lib/pytz/zoneinfo/Antarctica/McMurdo create mode 100644 lib/pytz/zoneinfo/Antarctica/Palmer create mode 100644 lib/pytz/zoneinfo/Antarctica/Rothera create mode 100644 lib/pytz/zoneinfo/Antarctica/South_Pole create mode 100644 lib/pytz/zoneinfo/Antarctica/Syowa create mode 100644 lib/pytz/zoneinfo/Antarctica/Troll create mode 100644 lib/pytz/zoneinfo/Antarctica/Vostok create mode 100644 lib/pytz/zoneinfo/Arctic/Longyearbyen create mode 100644 lib/pytz/zoneinfo/Asia/Aden create mode 100644 lib/pytz/zoneinfo/Asia/Almaty create mode 100644 lib/pytz/zoneinfo/Asia/Amman create mode 100644 lib/pytz/zoneinfo/Asia/Anadyr create mode 100644 lib/pytz/zoneinfo/Asia/Aqtau create mode 100644 lib/pytz/zoneinfo/Asia/Aqtobe create mode 100644 lib/pytz/zoneinfo/Asia/Ashgabat create mode 100644 lib/pytz/zoneinfo/Asia/Ashkhabad create mode 100644 lib/pytz/zoneinfo/Asia/Atyrau create mode 100644 lib/pytz/zoneinfo/Asia/Baghdad create mode 100644 lib/pytz/zoneinfo/Asia/Bahrain create mode 100644 lib/pytz/zoneinfo/Asia/Baku create mode 100644 lib/pytz/zoneinfo/Asia/Bangkok create mode 100644 lib/pytz/zoneinfo/Asia/Barnaul create mode 100644 lib/pytz/zoneinfo/Asia/Beirut create mode 100644 lib/pytz/zoneinfo/Asia/Bishkek create mode 100644 lib/pytz/zoneinfo/Asia/Brunei create mode 100644 lib/pytz/zoneinfo/Asia/Calcutta create mode 100644 lib/pytz/zoneinfo/Asia/Chita create mode 100644 lib/pytz/zoneinfo/Asia/Choibalsan create mode 100644 lib/pytz/zoneinfo/Asia/Chongqing create mode 100644 lib/pytz/zoneinfo/Asia/Chungking create mode 100644 lib/pytz/zoneinfo/Asia/Colombo create mode 100644 lib/pytz/zoneinfo/Asia/Dacca create mode 100644 lib/pytz/zoneinfo/Asia/Damascus create mode 100644 lib/pytz/zoneinfo/Asia/Dhaka create mode 100644 lib/pytz/zoneinfo/Asia/Dili create mode 100644 lib/pytz/zoneinfo/Asia/Dubai create mode 100644 lib/pytz/zoneinfo/Asia/Dushanbe create mode 100644 lib/pytz/zoneinfo/Asia/Famagusta create mode 100644 lib/pytz/zoneinfo/Asia/Gaza create mode 100644 lib/pytz/zoneinfo/Asia/Harbin create mode 100644 lib/pytz/zoneinfo/Asia/Hebron create mode 100644 lib/pytz/zoneinfo/Asia/Ho_Chi_Minh create mode 100644 lib/pytz/zoneinfo/Asia/Hong_Kong create mode 100644 lib/pytz/zoneinfo/Asia/Hovd create mode 100644 lib/pytz/zoneinfo/Asia/Irkutsk create mode 100644 lib/pytz/zoneinfo/Asia/Istanbul create mode 100644 lib/pytz/zoneinfo/Asia/Jakarta create mode 100644 lib/pytz/zoneinfo/Asia/Jayapura create mode 100644 lib/pytz/zoneinfo/Asia/Jerusalem create mode 100644 lib/pytz/zoneinfo/Asia/Kabul create mode 100644 lib/pytz/zoneinfo/Asia/Kamchatka create mode 100644 lib/pytz/zoneinfo/Asia/Karachi create mode 100644 lib/pytz/zoneinfo/Asia/Kashgar create mode 100644 lib/pytz/zoneinfo/Asia/Kathmandu create mode 100644 lib/pytz/zoneinfo/Asia/Katmandu create mode 100644 lib/pytz/zoneinfo/Asia/Khandyga create mode 100644 lib/pytz/zoneinfo/Asia/Kolkata create mode 100644 lib/pytz/zoneinfo/Asia/Krasnoyarsk create mode 100644 lib/pytz/zoneinfo/Asia/Kuala_Lumpur create mode 100644 lib/pytz/zoneinfo/Asia/Kuching create mode 100644 lib/pytz/zoneinfo/Asia/Kuwait create mode 100644 lib/pytz/zoneinfo/Asia/Macao create mode 100644 lib/pytz/zoneinfo/Asia/Macau create mode 100644 lib/pytz/zoneinfo/Asia/Magadan create mode 100644 lib/pytz/zoneinfo/Asia/Makassar create mode 100644 lib/pytz/zoneinfo/Asia/Manila create mode 100644 lib/pytz/zoneinfo/Asia/Muscat create mode 100644 lib/pytz/zoneinfo/Asia/Nicosia create mode 100644 lib/pytz/zoneinfo/Asia/Novokuznetsk create mode 100644 lib/pytz/zoneinfo/Asia/Novosibirsk create mode 100644 lib/pytz/zoneinfo/Asia/Omsk create mode 100644 lib/pytz/zoneinfo/Asia/Oral create mode 100644 lib/pytz/zoneinfo/Asia/Phnom_Penh create mode 100644 lib/pytz/zoneinfo/Asia/Pontianak create mode 100644 lib/pytz/zoneinfo/Asia/Pyongyang create mode 100644 lib/pytz/zoneinfo/Asia/Qatar create mode 100644 lib/pytz/zoneinfo/Asia/Qyzylorda create mode 100644 lib/pytz/zoneinfo/Asia/Rangoon create mode 100644 lib/pytz/zoneinfo/Asia/Riyadh create mode 100644 lib/pytz/zoneinfo/Asia/Saigon create mode 100644 lib/pytz/zoneinfo/Asia/Sakhalin create mode 100644 lib/pytz/zoneinfo/Asia/Samarkand create mode 100644 lib/pytz/zoneinfo/Asia/Seoul create mode 100644 lib/pytz/zoneinfo/Asia/Shanghai create mode 100644 lib/pytz/zoneinfo/Asia/Singapore create mode 100644 lib/pytz/zoneinfo/Asia/Srednekolymsk create mode 100644 lib/pytz/zoneinfo/Asia/Taipei create mode 100644 lib/pytz/zoneinfo/Asia/Tashkent create mode 100644 lib/pytz/zoneinfo/Asia/Tbilisi create mode 100644 lib/pytz/zoneinfo/Asia/Tehran create mode 100644 lib/pytz/zoneinfo/Asia/Tel_Aviv create mode 100644 lib/pytz/zoneinfo/Asia/Thimbu create mode 100644 lib/pytz/zoneinfo/Asia/Thimphu create mode 100644 lib/pytz/zoneinfo/Asia/Tokyo create mode 100644 lib/pytz/zoneinfo/Asia/Tomsk create mode 100644 lib/pytz/zoneinfo/Asia/Ujung_Pandang create mode 100644 lib/pytz/zoneinfo/Asia/Ulaanbaatar create mode 100644 lib/pytz/zoneinfo/Asia/Ulan_Bator create mode 100644 lib/pytz/zoneinfo/Asia/Urumqi create mode 100644 lib/pytz/zoneinfo/Asia/Ust-Nera create mode 100644 lib/pytz/zoneinfo/Asia/Vientiane create mode 100644 lib/pytz/zoneinfo/Asia/Vladivostok create mode 100644 lib/pytz/zoneinfo/Asia/Yakutsk create mode 100644 lib/pytz/zoneinfo/Asia/Yangon create mode 100644 lib/pytz/zoneinfo/Asia/Yekaterinburg create mode 100644 lib/pytz/zoneinfo/Asia/Yerevan create mode 100644 lib/pytz/zoneinfo/Atlantic/Azores create mode 100644 lib/pytz/zoneinfo/Atlantic/Bermuda create mode 100644 lib/pytz/zoneinfo/Atlantic/Canary create mode 100644 lib/pytz/zoneinfo/Atlantic/Cape_Verde create mode 100644 lib/pytz/zoneinfo/Atlantic/Faeroe create mode 100644 lib/pytz/zoneinfo/Atlantic/Faroe create mode 100644 lib/pytz/zoneinfo/Atlantic/Jan_Mayen create mode 100644 lib/pytz/zoneinfo/Atlantic/Madeira create mode 100644 lib/pytz/zoneinfo/Atlantic/Reykjavik create mode 100644 lib/pytz/zoneinfo/Atlantic/South_Georgia create mode 100644 lib/pytz/zoneinfo/Atlantic/St_Helena create mode 100644 lib/pytz/zoneinfo/Atlantic/Stanley create mode 100644 lib/pytz/zoneinfo/Australia/ACT create mode 100644 lib/pytz/zoneinfo/Australia/Adelaide create mode 100644 lib/pytz/zoneinfo/Australia/Brisbane create mode 100644 lib/pytz/zoneinfo/Australia/Broken_Hill create mode 100644 lib/pytz/zoneinfo/Australia/Canberra create mode 100644 lib/pytz/zoneinfo/Australia/Currie create mode 100644 lib/pytz/zoneinfo/Australia/Darwin create mode 100644 lib/pytz/zoneinfo/Australia/Eucla create mode 100644 lib/pytz/zoneinfo/Australia/Hobart create mode 100644 lib/pytz/zoneinfo/Australia/LHI create mode 100644 lib/pytz/zoneinfo/Australia/Lindeman create mode 100644 lib/pytz/zoneinfo/Australia/Lord_Howe create mode 100644 lib/pytz/zoneinfo/Australia/Melbourne create mode 100644 lib/pytz/zoneinfo/Australia/NSW create mode 100644 lib/pytz/zoneinfo/Australia/North create mode 100644 lib/pytz/zoneinfo/Australia/Perth create mode 100644 lib/pytz/zoneinfo/Australia/Queensland create mode 100644 lib/pytz/zoneinfo/Australia/South create mode 100644 lib/pytz/zoneinfo/Australia/Sydney create mode 100644 lib/pytz/zoneinfo/Australia/Tasmania create mode 100644 lib/pytz/zoneinfo/Australia/Victoria create mode 100644 lib/pytz/zoneinfo/Australia/West create mode 100644 lib/pytz/zoneinfo/Australia/Yancowinna create mode 100644 lib/pytz/zoneinfo/Brazil/Acre create mode 100644 lib/pytz/zoneinfo/Brazil/DeNoronha create mode 100644 lib/pytz/zoneinfo/Brazil/East create mode 100644 lib/pytz/zoneinfo/Brazil/West create mode 100644 lib/pytz/zoneinfo/CET create mode 100644 lib/pytz/zoneinfo/CST6CDT create mode 100644 lib/pytz/zoneinfo/Canada/Atlantic create mode 100644 lib/pytz/zoneinfo/Canada/Central create mode 100644 lib/pytz/zoneinfo/Canada/Eastern create mode 100644 lib/pytz/zoneinfo/Canada/Mountain create mode 100644 lib/pytz/zoneinfo/Canada/Newfoundland create mode 100644 lib/pytz/zoneinfo/Canada/Pacific create mode 100644 lib/pytz/zoneinfo/Canada/Saskatchewan create mode 100644 lib/pytz/zoneinfo/Canada/Yukon create mode 100644 lib/pytz/zoneinfo/Chile/Continental create mode 100644 lib/pytz/zoneinfo/Chile/EasterIsland create mode 100644 lib/pytz/zoneinfo/Cuba create mode 100644 lib/pytz/zoneinfo/EET create mode 100644 lib/pytz/zoneinfo/EST create mode 100644 lib/pytz/zoneinfo/EST5EDT create mode 100644 lib/pytz/zoneinfo/Egypt create mode 100644 lib/pytz/zoneinfo/Eire create mode 100644 lib/pytz/zoneinfo/Etc/GMT create mode 100644 lib/pytz/zoneinfo/Etc/GMT+0 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+1 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+10 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+11 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+12 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+2 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+3 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+4 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+5 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+6 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+7 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+8 create mode 100644 lib/pytz/zoneinfo/Etc/GMT+9 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-0 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-1 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-10 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-11 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-12 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-13 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-14 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-2 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-3 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-4 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-5 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-6 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-7 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-8 create mode 100644 lib/pytz/zoneinfo/Etc/GMT-9 create mode 100644 lib/pytz/zoneinfo/Etc/GMT0 create mode 100644 lib/pytz/zoneinfo/Etc/Greenwich create mode 100644 lib/pytz/zoneinfo/Etc/UCT create mode 100644 lib/pytz/zoneinfo/Etc/UTC create mode 100644 lib/pytz/zoneinfo/Etc/Universal create mode 100644 lib/pytz/zoneinfo/Etc/Zulu create mode 100644 lib/pytz/zoneinfo/Europe/Amsterdam create mode 100644 lib/pytz/zoneinfo/Europe/Andorra create mode 100644 lib/pytz/zoneinfo/Europe/Astrakhan create mode 100644 lib/pytz/zoneinfo/Europe/Athens create mode 100644 lib/pytz/zoneinfo/Europe/Belfast create mode 100644 lib/pytz/zoneinfo/Europe/Belgrade create mode 100644 lib/pytz/zoneinfo/Europe/Berlin create mode 100644 lib/pytz/zoneinfo/Europe/Bratislava create mode 100644 lib/pytz/zoneinfo/Europe/Brussels create mode 100644 lib/pytz/zoneinfo/Europe/Bucharest create mode 100644 lib/pytz/zoneinfo/Europe/Budapest create mode 100644 lib/pytz/zoneinfo/Europe/Busingen create mode 100644 lib/pytz/zoneinfo/Europe/Chisinau create mode 100644 lib/pytz/zoneinfo/Europe/Copenhagen create mode 100644 lib/pytz/zoneinfo/Europe/Dublin create mode 100644 lib/pytz/zoneinfo/Europe/Gibraltar create mode 100644 lib/pytz/zoneinfo/Europe/Guernsey create mode 100644 lib/pytz/zoneinfo/Europe/Helsinki create mode 100644 lib/pytz/zoneinfo/Europe/Isle_of_Man create mode 100644 lib/pytz/zoneinfo/Europe/Istanbul create mode 100644 lib/pytz/zoneinfo/Europe/Jersey create mode 100644 lib/pytz/zoneinfo/Europe/Kaliningrad create mode 100644 lib/pytz/zoneinfo/Europe/Kiev create mode 100644 lib/pytz/zoneinfo/Europe/Kirov create mode 100644 lib/pytz/zoneinfo/Europe/Lisbon create mode 100644 lib/pytz/zoneinfo/Europe/Ljubljana create mode 100644 lib/pytz/zoneinfo/Europe/London create mode 100644 lib/pytz/zoneinfo/Europe/Luxembourg create mode 100644 lib/pytz/zoneinfo/Europe/Madrid create mode 100644 lib/pytz/zoneinfo/Europe/Malta create mode 100644 lib/pytz/zoneinfo/Europe/Mariehamn create mode 100644 lib/pytz/zoneinfo/Europe/Minsk create mode 100644 lib/pytz/zoneinfo/Europe/Monaco create mode 100644 lib/pytz/zoneinfo/Europe/Moscow create mode 100644 lib/pytz/zoneinfo/Europe/Nicosia create mode 100644 lib/pytz/zoneinfo/Europe/Oslo create mode 100644 lib/pytz/zoneinfo/Europe/Paris create mode 100644 lib/pytz/zoneinfo/Europe/Podgorica create mode 100644 lib/pytz/zoneinfo/Europe/Prague create mode 100644 lib/pytz/zoneinfo/Europe/Riga create mode 100644 lib/pytz/zoneinfo/Europe/Rome create mode 100644 lib/pytz/zoneinfo/Europe/Samara create mode 100644 lib/pytz/zoneinfo/Europe/San_Marino create mode 100644 lib/pytz/zoneinfo/Europe/Sarajevo create mode 100644 lib/pytz/zoneinfo/Europe/Saratov create mode 100644 lib/pytz/zoneinfo/Europe/Simferopol create mode 100644 lib/pytz/zoneinfo/Europe/Skopje create mode 100644 lib/pytz/zoneinfo/Europe/Sofia create mode 100644 lib/pytz/zoneinfo/Europe/Stockholm create mode 100644 lib/pytz/zoneinfo/Europe/Tallinn create mode 100644 lib/pytz/zoneinfo/Europe/Tirane create mode 100644 lib/pytz/zoneinfo/Europe/Tiraspol create mode 100644 lib/pytz/zoneinfo/Europe/Ulyanovsk create mode 100644 lib/pytz/zoneinfo/Europe/Uzhgorod create mode 100644 lib/pytz/zoneinfo/Europe/Vaduz create mode 100644 lib/pytz/zoneinfo/Europe/Vatican create mode 100644 lib/pytz/zoneinfo/Europe/Vienna create mode 100644 lib/pytz/zoneinfo/Europe/Vilnius create mode 100644 lib/pytz/zoneinfo/Europe/Volgograd create mode 100644 lib/pytz/zoneinfo/Europe/Warsaw create mode 100644 lib/pytz/zoneinfo/Europe/Zagreb create mode 100644 lib/pytz/zoneinfo/Europe/Zaporozhye create mode 100644 lib/pytz/zoneinfo/Europe/Zurich create mode 100644 lib/pytz/zoneinfo/Factory create mode 100644 lib/pytz/zoneinfo/GB create mode 100644 lib/pytz/zoneinfo/GB-Eire create mode 100644 lib/pytz/zoneinfo/GMT create mode 100644 lib/pytz/zoneinfo/GMT+0 create mode 100644 lib/pytz/zoneinfo/GMT-0 create mode 100644 lib/pytz/zoneinfo/GMT0 create mode 100644 lib/pytz/zoneinfo/Greenwich create mode 100644 lib/pytz/zoneinfo/HST create mode 100644 lib/pytz/zoneinfo/Hongkong create mode 100644 lib/pytz/zoneinfo/Iceland create mode 100644 lib/pytz/zoneinfo/Indian/Antananarivo create mode 100644 lib/pytz/zoneinfo/Indian/Chagos create mode 100644 lib/pytz/zoneinfo/Indian/Christmas create mode 100644 lib/pytz/zoneinfo/Indian/Cocos create mode 100644 lib/pytz/zoneinfo/Indian/Comoro create mode 100644 lib/pytz/zoneinfo/Indian/Kerguelen create mode 100644 lib/pytz/zoneinfo/Indian/Mahe create mode 100644 lib/pytz/zoneinfo/Indian/Maldives create mode 100644 lib/pytz/zoneinfo/Indian/Mauritius create mode 100644 lib/pytz/zoneinfo/Indian/Mayotte create mode 100644 lib/pytz/zoneinfo/Indian/Reunion create mode 100644 lib/pytz/zoneinfo/Iran create mode 100644 lib/pytz/zoneinfo/Israel create mode 100644 lib/pytz/zoneinfo/Jamaica create mode 100644 lib/pytz/zoneinfo/Japan create mode 100644 lib/pytz/zoneinfo/Kwajalein create mode 100644 lib/pytz/zoneinfo/Libya create mode 100644 lib/pytz/zoneinfo/MET create mode 100644 lib/pytz/zoneinfo/MST create mode 100644 lib/pytz/zoneinfo/MST7MDT create mode 100644 lib/pytz/zoneinfo/Mexico/BajaNorte create mode 100644 lib/pytz/zoneinfo/Mexico/BajaSur create mode 100644 lib/pytz/zoneinfo/Mexico/General create mode 100644 lib/pytz/zoneinfo/NZ create mode 100644 lib/pytz/zoneinfo/NZ-CHAT create mode 100644 lib/pytz/zoneinfo/Navajo create mode 100644 lib/pytz/zoneinfo/PRC create mode 100644 lib/pytz/zoneinfo/PST8PDT create mode 100644 lib/pytz/zoneinfo/Pacific/Apia create mode 100644 lib/pytz/zoneinfo/Pacific/Auckland create mode 100644 lib/pytz/zoneinfo/Pacific/Bougainville create mode 100644 lib/pytz/zoneinfo/Pacific/Chatham create mode 100644 lib/pytz/zoneinfo/Pacific/Chuuk create mode 100644 lib/pytz/zoneinfo/Pacific/Easter create mode 100644 lib/pytz/zoneinfo/Pacific/Efate create mode 100644 lib/pytz/zoneinfo/Pacific/Enderbury create mode 100644 lib/pytz/zoneinfo/Pacific/Fakaofo create mode 100644 lib/pytz/zoneinfo/Pacific/Fiji create mode 100644 lib/pytz/zoneinfo/Pacific/Funafuti create mode 100644 lib/pytz/zoneinfo/Pacific/Galapagos create mode 100644 lib/pytz/zoneinfo/Pacific/Gambier create mode 100644 lib/pytz/zoneinfo/Pacific/Guadalcanal create mode 100644 lib/pytz/zoneinfo/Pacific/Guam create mode 100644 lib/pytz/zoneinfo/Pacific/Honolulu create mode 100644 lib/pytz/zoneinfo/Pacific/Johnston create mode 100644 lib/pytz/zoneinfo/Pacific/Kiritimati create mode 100644 lib/pytz/zoneinfo/Pacific/Kosrae create mode 100644 lib/pytz/zoneinfo/Pacific/Kwajalein create mode 100644 lib/pytz/zoneinfo/Pacific/Majuro create mode 100644 lib/pytz/zoneinfo/Pacific/Marquesas create mode 100644 lib/pytz/zoneinfo/Pacific/Midway create mode 100644 lib/pytz/zoneinfo/Pacific/Nauru create mode 100644 lib/pytz/zoneinfo/Pacific/Niue create mode 100644 lib/pytz/zoneinfo/Pacific/Norfolk create mode 100644 lib/pytz/zoneinfo/Pacific/Noumea create mode 100644 lib/pytz/zoneinfo/Pacific/Pago_Pago create mode 100644 lib/pytz/zoneinfo/Pacific/Palau create mode 100644 lib/pytz/zoneinfo/Pacific/Pitcairn create mode 100644 lib/pytz/zoneinfo/Pacific/Pohnpei create mode 100644 lib/pytz/zoneinfo/Pacific/Ponape create mode 100644 lib/pytz/zoneinfo/Pacific/Port_Moresby create mode 100644 lib/pytz/zoneinfo/Pacific/Rarotonga create mode 100644 lib/pytz/zoneinfo/Pacific/Saipan create mode 100644 lib/pytz/zoneinfo/Pacific/Samoa create mode 100644 lib/pytz/zoneinfo/Pacific/Tahiti create mode 100644 lib/pytz/zoneinfo/Pacific/Tarawa create mode 100644 lib/pytz/zoneinfo/Pacific/Tongatapu create mode 100644 lib/pytz/zoneinfo/Pacific/Truk create mode 100644 lib/pytz/zoneinfo/Pacific/Wake create mode 100644 lib/pytz/zoneinfo/Pacific/Wallis create mode 100644 lib/pytz/zoneinfo/Pacific/Yap create mode 100644 lib/pytz/zoneinfo/Poland create mode 100644 lib/pytz/zoneinfo/Portugal create mode 100644 lib/pytz/zoneinfo/ROC create mode 100644 lib/pytz/zoneinfo/ROK create mode 100644 lib/pytz/zoneinfo/Singapore create mode 100644 lib/pytz/zoneinfo/Turkey create mode 100644 lib/pytz/zoneinfo/UCT create mode 100644 lib/pytz/zoneinfo/US/Alaska create mode 100644 lib/pytz/zoneinfo/US/Aleutian create mode 100644 lib/pytz/zoneinfo/US/Arizona create mode 100644 lib/pytz/zoneinfo/US/Central create mode 100644 lib/pytz/zoneinfo/US/East-Indiana create mode 100644 lib/pytz/zoneinfo/US/Eastern create mode 100644 lib/pytz/zoneinfo/US/Hawaii create mode 100644 lib/pytz/zoneinfo/US/Indiana-Starke create mode 100644 lib/pytz/zoneinfo/US/Michigan create mode 100644 lib/pytz/zoneinfo/US/Mountain create mode 100644 lib/pytz/zoneinfo/US/Pacific create mode 100644 lib/pytz/zoneinfo/US/Samoa create mode 100644 lib/pytz/zoneinfo/UTC create mode 100644 lib/pytz/zoneinfo/Universal create mode 100644 lib/pytz/zoneinfo/W-SU create mode 100644 lib/pytz/zoneinfo/WET create mode 100644 lib/pytz/zoneinfo/Zulu create mode 100644 lib/pytz/zoneinfo/iso3166.tab create mode 100644 lib/pytz/zoneinfo/leapseconds create mode 100644 lib/pytz/zoneinfo/posixrules create mode 100644 lib/pytz/zoneinfo/tzdata.zi create mode 100644 lib/pytz/zoneinfo/zone.tab create mode 100644 lib/pytz/zoneinfo/zone1970.tab create mode 100644 lib/requests/__init__.py create mode 100644 lib/requests/__version__.py create mode 100644 lib/requests/_internal_utils.py create mode 100644 lib/requests/adapters.py create mode 100644 lib/requests/api.py create mode 100644 lib/requests/auth.py create mode 100644 lib/requests/certs.py create mode 100644 lib/requests/compat.py create mode 100644 lib/requests/cookies.py create mode 100644 lib/requests/exceptions.py create mode 100644 lib/requests/help.py create mode 100644 lib/requests/hooks.py create mode 100644 lib/requests/models.py create mode 100644 lib/requests/packages.py create mode 100644 lib/requests/sessions.py create mode 100644 lib/requests/status_codes.py create mode 100644 lib/requests/structures.py create mode 100644 lib/requests/utils.py create mode 100644 lib/schedule/__init__.py create mode 100644 lib/setuptools/__init__.py create mode 100644 lib/setuptools/_deprecation_warning.py create mode 100644 lib/setuptools/_vendor/__init__.py create mode 100644 lib/setuptools/_vendor/packaging/__about__.py create mode 100644 lib/setuptools/_vendor/packaging/__init__.py create mode 100644 lib/setuptools/_vendor/packaging/_compat.py create mode 100644 lib/setuptools/_vendor/packaging/_structures.py create mode 100644 lib/setuptools/_vendor/packaging/markers.py create mode 100644 lib/setuptools/_vendor/packaging/requirements.py create mode 100644 lib/setuptools/_vendor/packaging/specifiers.py create mode 100644 lib/setuptools/_vendor/packaging/utils.py create mode 100644 lib/setuptools/_vendor/packaging/version.py create mode 100644 lib/setuptools/_vendor/pyparsing.py create mode 100644 lib/setuptools/_vendor/six.py create mode 100644 lib/setuptools/archive_util.py create mode 100644 lib/setuptools/build_meta.py create mode 100644 lib/setuptools/cli-32.exe create mode 100644 lib/setuptools/cli-64.exe create mode 100644 lib/setuptools/cli.exe create mode 100644 lib/setuptools/command/__init__.py create mode 100644 lib/setuptools/command/alias.py create mode 100644 lib/setuptools/command/bdist_egg.py create mode 100644 lib/setuptools/command/bdist_rpm.py create mode 100644 lib/setuptools/command/bdist_wininst.py create mode 100644 lib/setuptools/command/build_clib.py create mode 100644 lib/setuptools/command/build_ext.py create mode 100644 lib/setuptools/command/build_py.py create mode 100644 lib/setuptools/command/develop.py create mode 100644 lib/setuptools/command/dist_info.py create mode 100644 lib/setuptools/command/easy_install.py create mode 100644 lib/setuptools/command/egg_info.py create mode 100644 lib/setuptools/command/install.py create mode 100644 lib/setuptools/command/install_egg_info.py create mode 100644 lib/setuptools/command/install_lib.py create mode 100644 lib/setuptools/command/install_scripts.py create mode 100644 lib/setuptools/command/launcher manifest.xml create mode 100644 lib/setuptools/command/py36compat.py create mode 100644 lib/setuptools/command/register.py create mode 100644 lib/setuptools/command/rotate.py create mode 100644 lib/setuptools/command/saveopts.py create mode 100644 lib/setuptools/command/sdist.py create mode 100644 lib/setuptools/command/setopt.py create mode 100644 lib/setuptools/command/test.py create mode 100644 lib/setuptools/command/upload.py create mode 100644 lib/setuptools/command/upload_docs.py create mode 100644 lib/setuptools/config.py create mode 100644 lib/setuptools/dep_util.py create mode 100644 lib/setuptools/depends.py create mode 100644 lib/setuptools/dist.py create mode 100644 lib/setuptools/extension.py create mode 100644 lib/setuptools/extern/__init__.py create mode 100644 lib/setuptools/glibc.py create mode 100644 lib/setuptools/glob.py create mode 100644 lib/setuptools/gui-32.exe create mode 100644 lib/setuptools/gui-64.exe create mode 100644 lib/setuptools/gui.exe create mode 100644 lib/setuptools/launch.py create mode 100644 lib/setuptools/lib2to3_ex.py create mode 100644 lib/setuptools/monkey.py create mode 100644 lib/setuptools/msvc.py create mode 100644 lib/setuptools/namespaces.py create mode 100644 lib/setuptools/package_index.py create mode 100644 lib/setuptools/pep425tags.py create mode 100644 lib/setuptools/py27compat.py create mode 100644 lib/setuptools/py31compat.py create mode 100644 lib/setuptools/py33compat.py create mode 100644 lib/setuptools/py36compat.py create mode 100644 lib/setuptools/sandbox.py create mode 100644 lib/setuptools/script (dev).tmpl create mode 100644 lib/setuptools/script.tmpl create mode 100644 lib/setuptools/site-patch.py create mode 100644 lib/setuptools/ssl_support.py create mode 100644 lib/setuptools/unicode_utils.py create mode 100644 lib/setuptools/version.py create mode 100644 lib/setuptools/wheel.py create mode 100644 lib/setuptools/windows_support.py create mode 100644 lib/six.py create mode 100644 lib/typing.py create mode 100644 lib/urllib3/__init__.py create mode 100644 lib/urllib3/_collections.py create mode 100644 lib/urllib3/connection.py create mode 100644 lib/urllib3/connectionpool.py create mode 100644 lib/urllib3/contrib/__init__.py create mode 100644 lib/urllib3/contrib/_appengine_environ.py create mode 100644 lib/urllib3/contrib/_securetransport/__init__.py create mode 100644 lib/urllib3/contrib/_securetransport/bindings.py create mode 100644 lib/urllib3/contrib/_securetransport/low_level.py create mode 100644 lib/urllib3/contrib/appengine.py create mode 100644 lib/urllib3/contrib/ntlmpool.py create mode 100644 lib/urllib3/contrib/pyopenssl.py create mode 100644 lib/urllib3/contrib/securetransport.py create mode 100644 lib/urllib3/contrib/socks.py create mode 100644 lib/urllib3/exceptions.py create mode 100644 lib/urllib3/fields.py create mode 100644 lib/urllib3/filepost.py create mode 100644 lib/urllib3/packages/__init__.py create mode 100644 lib/urllib3/packages/backports/__init__.py create mode 100644 lib/urllib3/packages/backports/makefile.py create mode 100644 lib/urllib3/packages/six.py create mode 100644 lib/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 lib/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 lib/urllib3/poolmanager.py create mode 100644 lib/urllib3/request.py create mode 100644 lib/urllib3/response.py create mode 100644 lib/urllib3/util/__init__.py create mode 100644 lib/urllib3/util/connection.py create mode 100644 lib/urllib3/util/queue.py create mode 100644 lib/urllib3/util/request.py create mode 100644 lib/urllib3/util/response.py create mode 100644 lib/urllib3/util/retry.py create mode 100644 lib/urllib3/util/ssl_.py create mode 100644 lib/urllib3/util/timeout.py create mode 100644 lib/urllib3/util/url.py create mode 100644 lib/urllib3/util/wait.py create mode 100644 lib/zope.interface-4.6.0-py3.7-nspkg.pth create mode 100644 lib/zope/interface/__init__.py create mode 100644 lib/zope/interface/_compat.py create mode 100644 lib/zope/interface/_flatten.py create mode 100644 lib/zope/interface/_zope_interface_coptimizations.c create mode 100644 lib/zope/interface/_zope_interface_coptimizations.cp37-win32.pyd create mode 100644 lib/zope/interface/adapter.py create mode 100644 lib/zope/interface/advice.py create mode 100644 lib/zope/interface/common/__init__.py create mode 100644 lib/zope/interface/common/idatetime.py create mode 100644 lib/zope/interface/common/interfaces.py create mode 100644 lib/zope/interface/common/mapping.py create mode 100644 lib/zope/interface/common/sequence.py create mode 100644 lib/zope/interface/common/tests/__init__.py create mode 100644 lib/zope/interface/common/tests/basemapping.py create mode 100644 lib/zope/interface/common/tests/test_idatetime.py create mode 100644 lib/zope/interface/common/tests/test_import_interfaces.py create mode 100644 lib/zope/interface/declarations.py create mode 100644 lib/zope/interface/document.py create mode 100644 lib/zope/interface/exceptions.py create mode 100644 lib/zope/interface/interface.py create mode 100644 lib/zope/interface/interfaces.py create mode 100644 lib/zope/interface/registry.py create mode 100644 lib/zope/interface/ro.py create mode 100644 lib/zope/interface/tests/__init__.py create mode 100644 lib/zope/interface/tests/advisory_testing.py create mode 100644 lib/zope/interface/tests/dummy.py create mode 100644 lib/zope/interface/tests/idummy.py create mode 100644 lib/zope/interface/tests/ifoo.py create mode 100644 lib/zope/interface/tests/ifoo_other.py create mode 100644 lib/zope/interface/tests/m1.py create mode 100644 lib/zope/interface/tests/m2.py create mode 100644 lib/zope/interface/tests/odd.py create mode 100644 lib/zope/interface/tests/test_adapter.py create mode 100644 lib/zope/interface/tests/test_advice.py create mode 100644 lib/zope/interface/tests/test_declarations.py create mode 100644 lib/zope/interface/tests/test_document.py create mode 100644 lib/zope/interface/tests/test_element.py create mode 100644 lib/zope/interface/tests/test_exceptions.py create mode 100644 lib/zope/interface/tests/test_interface.py create mode 100644 lib/zope/interface/tests/test_interfaces.py create mode 100644 lib/zope/interface/tests/test_odd_declarations.py create mode 100644 lib/zope/interface/tests/test_registry.py create mode 100644 lib/zope/interface/tests/test_ro.py create mode 100644 lib/zope/interface/tests/test_sorting.py create mode 100644 lib/zope/interface/tests/test_verify.py create mode 100644 lib/zope/interface/verify.py diff --git a/Varken/dbmanager.py b/Varken/dbmanager.py index de5dc1d5..8f081d63 100644 --- a/Varken/dbmanager.py +++ b/Varken/dbmanager.py @@ -1,5 +1,10 @@ +from sys import path +from os.path import abspath, dirname, join +path.insert(0, abspath(join(dirname(__file__), '..', 'lib'))) + from influxdb import InfluxDBClient + class DBManager(object): def __init__(self, server): self.server = server diff --git a/Varken/helpers.py b/Varken/helpers.py index e464b895..1560073a 100644 --- a/Varken/helpers.py +++ b/Varken/helpers.py @@ -1,9 +1,12 @@ -import os +from sys import path +from os.path import abspath, basename, join, dirname +path.insert(0, abspath(join(dirname(__file__), '..', 'lib'))) + import time import tarfile import geoip2.database +from os import stat, remove from typing import NamedTuple -from os.path import abspath, join from urllib.request import urlretrieve @@ -339,9 +342,10 @@ def geoip_download(): tar = tarfile.open(tar_dbfile, "r:gz") for files in tar.getmembers(): if 'GeoLite2-City.mmdb' in files.name: - files.name = os.path.basename(files.name) + files.name = basename(files.name) tar.extract(files, abspath(join('.', 'data'))) - os.remove(tar_dbfile) + remove(tar_dbfile) + def geo_lookup(ipaddress): @@ -349,10 +353,10 @@ def geo_lookup(ipaddress): now = time.time() try: - dbinfo = os.stat(dbfile) + dbinfo = stat(dbfile) db_age = now - dbinfo.st_ctime if db_age > (35 * 86400): - os.remove(dbfile) + remove(dbfile) geoip_download() except FileNotFoundError: geoip_download() diff --git a/Varken/iniparser.py b/Varken/iniparser.py index 9a21e115..79ec04db 100644 --- a/Varken/iniparser.py +++ b/Varken/iniparser.py @@ -1,12 +1,14 @@ -import sys -import configparser -from os.path import abspath, join +from sys import path, exit +from os.path import abspath, dirname, join +path.insert(0, abspath(join(dirname(__file__), '..', 'lib'))) + +from configparser import ConfigParser from Varken.helpers import OmbiServer, TautulliServer, SonarrServer, InfluxServer, RadarrServer class INIParser(object): def __init__(self): - self.config = configparser.ConfigParser() + self.config = ConfigParser() self.influx_server = InfluxServer() @@ -45,7 +47,7 @@ def parse_opts(self): # Parse Sonarr options try: if not self.config.getboolean('global', 'sonarr_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') + exit('server_ids must be either false, or a comma-separated list of server ids') elif self.config.getint('global', 'sonarr_server_ids'): self.sonarr_enabled = True except ValueError: @@ -75,7 +77,7 @@ def parse_opts(self): # Parse Radarr options try: if not self.config.getboolean('global', 'radarr_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') + exit('server_ids must be either false, or a comma-separated list of server ids') elif self.config.getint('global', 'radarr_server_ids'): self.radarr_enabled = True except ValueError: @@ -102,7 +104,7 @@ def parse_opts(self): # Parse Tautulli options try: if not self.config.getboolean('global', 'tautulli_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') + exit('server_ids must be either false, or a comma-separated list of server ids') elif self.config.getint('global', 'tautulli_server_ids'): self.tautulli_enabled = True except ValueError: @@ -130,7 +132,7 @@ def parse_opts(self): # Parse Ombi Options try: if not self.config.getboolean('global', 'ombi_server_ids'): - sys.exit('server_ids must be either false, or a comma-separated list of server ids') + exit('server_ids must be either false, or a comma-separated list of server ids') elif self.config.getint('global', 'ombi_server_ids'): self.ombi_enabled = True except ValueError: diff --git a/Varken/ombi.py b/Varken/ombi.py index 3981250c..3e703bb1 100644 --- a/Varken/ombi.py +++ b/Varken/ombi.py @@ -1,3 +1,7 @@ +from sys import path +from os.path import abspath, dirname, join +path.insert(0, abspath(join(dirname(__file__), '..', 'lib'))) + from requests import Session from datetime import datetime, timezone diff --git a/Varken/radarr.py b/Varken/radarr.py index 091bb771..e81cfdfd 100644 --- a/Varken/radarr.py +++ b/Varken/radarr.py @@ -1,3 +1,7 @@ +from sys import path +from os.path import abspath, dirname, join +path.insert(0, abspath(join(dirname(__file__), '..', 'lib'))) + from requests import Session from datetime import datetime, timezone diff --git a/Varken/sonarr.py b/Varken/sonarr.py index ae09c2e0..1e9ae7c6 100644 --- a/Varken/sonarr.py +++ b/Varken/sonarr.py @@ -1,3 +1,7 @@ +from sys import path +from os.path import abspath, dirname, join +path.insert(0, abspath(join(dirname(__file__), '..', 'lib'))) + from requests import Session from datetime import datetime, timezone, date, timedelta diff --git a/Varken/tautulli.py b/Varken/tautulli.py index 62f42a13..b77b2e3d 100644 --- a/Varken/tautulli.py +++ b/Varken/tautulli.py @@ -1,6 +1,11 @@ +from sys import path +from os.path import abspath, dirname, join +path.insert(0, abspath(join(dirname(__file__), '..', 'lib'))) + +from requests import Session from datetime import datetime, timezone from geoip2.errors import AddressNotFoundError -from requests import Session + from Varken.helpers import TautulliStream, geo_lookup from Varken.logger import logging diff --git a/lib/DateTime/DateTime.py b/lib/DateTime/DateTime.py new file mode 100644 index 00000000..cc6ca78b --- /dev/null +++ b/lib/DateTime/DateTime.py @@ -0,0 +1,1940 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +import math +import re +import sys +from time import altzone +from time import daylight +from time import gmtime +from time import localtime +from time import time +from time import timezone +from time import tzname +from datetime import datetime + +from zope.interface import implementer + +from .interfaces import IDateTime +from .interfaces import DateTimeError +from .interfaces import SyntaxError +from .interfaces import DateError +from .interfaces import TimeError +from .pytz_support import PytzCache + +if sys.version_info > (3, ): + import copyreg as copy_reg + basestring = str + long = int + explicit_unicode_type = type(None) +else: + import copy_reg + explicit_unicode_type = unicode + +default_datefmt = None + + +def getDefaultDateFormat(): + global default_datefmt + if default_datefmt is None: + try: + from App.config import getConfiguration + default_datefmt = getConfiguration().datetime_format + return default_datefmt + except Exception: + return 'us' + else: + return default_datefmt + +# To control rounding errors, we round system time to the nearest +# microsecond. Then delicate calculations can rely on that the +# maximum precision that needs to be preserved is known. +_system_time = time + + +def time(): + return round(_system_time(), 6) + +# Determine machine epoch +tm = ((0, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334), + (0, 0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335)) +yr, mo, dy, hr, mn, sc = gmtime(0)[:6] +i = int(yr - 1) +to_year = int(i * 365 + i // 4 - i // 100 + i // 400 - 693960.0) +to_month = tm[yr % 4 == 0 and (yr % 100 != 0 or yr % 400 == 0)][mo] +EPOCH = ((to_year + to_month + dy + + (hr / 24.0 + mn / 1440.0 + sc / 86400.0)) * 86400) +jd1901 = 2415385 + +_TZINFO = PytzCache() + +INT_PATTERN = re.compile(r'([0-9]+)') +FLT_PATTERN = re.compile(r':([0-9]+\.[0-9]+)') +NAME_PATTERN = re.compile(r'([a-zA-Z]+)', re.I) +SPACE_CHARS = ' \t\n' +DELIMITERS = '-/.:,+' + +_MONTH_LEN = ((0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31), + (0, 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)) +_MONTHS = ('', 'January', 'February', 'March', 'April', 'May', 'June', + 'July', 'August', 'September', 'October', 'November', 'December') +_MONTHS_A = ('', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec') +_MONTHS_P = ('', 'Jan.', 'Feb.', 'Mar.', 'Apr.', 'May', 'June', + 'July', 'Aug.', 'Sep.', 'Oct.', 'Nov.', 'Dec.') +_MONTHMAP = {'january': 1, 'jan': 1, + 'february': 2, 'feb': 2, + 'march': 3, 'mar': 3, + 'april': 4, 'apr': 4, + 'may': 5, + 'june': 6, 'jun': 6, + 'july': 7, 'jul': 7, + 'august': 8, 'aug': 8, + 'september': 9, 'sep': 9, 'sept': 9, + 'october': 10, 'oct': 10, + 'november': 11, 'nov': 11, + 'december': 12, 'dec': 12} +_DAYS = ('Sunday', 'Monday', 'Tuesday', 'Wednesday', + 'Thursday', 'Friday', 'Saturday') +_DAYS_A = ('Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat') +_DAYS_P = ('Sun.', 'Mon.', 'Tue.', 'Wed.', 'Thu.', 'Fri.', 'Sat.') +_DAYMAP = {'sunday': 1, 'sun': 1, + 'monday': 2, 'mon': 2, + 'tuesday': 3, 'tues': 3, 'tue': 3, + 'wednesday': 4, 'wed': 4, + 'thursday': 5, 'thurs': 5, 'thur': 5, 'thu': 5, + 'friday': 6, 'fri': 6, + 'saturday': 7, 'sat': 7} + +numericTimeZoneMatch = re.compile(r'[+-][0-9][0-9][0-9][0-9]').match +iso8601Match = re.compile(r''' + (?P\d\d\d\d) # four digits year + (?:-? # one optional dash + (?: # followed by: + (?P\d\d\d # three digits year day + (?!\d)) # when there is no fourth digit + | # or: + W # one W + (?P\d\d) # two digits week + (?:-? # one optional dash + (?P\d) # one digit week day + )? # week day is optional + | # or: + (?P\d\d)? # two digits month + (?:-? # one optional dash + (?P\d\d)? # two digits day + )? # after day is optional + ) # + )? # after year is optional + (?:[T ] # one T or one whitespace + (?P\d\d) # two digits hour + (?::? # one optional colon + (?P\d\d)? # two digits minute + (?::? # one optional colon + (?P\d\d)? # two digits second + (?:[.,] # one dot or one comma + (?P\d+) # n digits fraction + )? # after second is optional + )? # after minute is optional + )? # after hour is optional + (?: # timezone: + (?PZ) # one Z + | # or: + (?P[-+]) # one plus or one minus as signal + (?P\d # one digit for hour offset... + (?:\d(?!\d$) # ...or two, if not the last two digits + )?) # second hour offset digit is optional + (?::? # one optional colon + (?P\d\d) # two digits minute offset + )? # after hour offset is optional + )? # timezone is optional + )? # time is optional + (?P.*) # store the extra garbage +''', re.VERBOSE).match + + +def _findLocalTimeZoneName(isDST): + if not daylight: + # Daylight savings does not occur in this time zone. + isDST = 0 + try: + # Get the name of the current time zone depending + # on DST. + _localzone = PytzCache._zmap[tzname[isDST].lower()] + except: + try: + # Generate a GMT-offset zone name. + if isDST: + localzone = altzone + else: + localzone = timezone + offset = (-localzone / 3600.0) + majorOffset = int(offset) + if majorOffset != 0: + minorOffset = abs(int((offset % majorOffset) * 60.0)) + else: + minorOffset = 0 + m = majorOffset >= 0 and '+' or '' + lz = '%s%0.02d%0.02d' % (m, majorOffset, minorOffset) + _localzone = PytzCache._zmap[('GMT%s' % lz).lower()] + except: + _localzone = '' + return _localzone + +_localzone0 = _findLocalTimeZoneName(0) +_localzone1 = _findLocalTimeZoneName(1) +_multipleZones = (_localzone0 != _localzone1) + +# Some utility functions for calculating dates: + + +def _calcSD(t): + # Returns timezone-independent days since epoch and the fractional + # part of the days. + dd = t + EPOCH - 86400.0 + d = dd / 86400.0 + s = d - math.floor(d) + return s, d + + +def _calcDependentSecond(tz, t): + # Calculates the timezone-dependent second (integer part only) + # from the timezone-independent second. + fset = _tzoffset(tz, t) + return fset + long(math.floor(t)) + long(EPOCH) - 86400 + + +def _calcDependentSecond2(yr, mo, dy, hr, mn, sc): + # Calculates the timezone-dependent second (integer part only) + # from the date given. + ss = int(hr) * 3600 + int(mn) * 60 + int(sc) + x = long(_julianday(yr, mo, dy) - jd1901) * 86400 + ss + return x + + +def _calcIndependentSecondEtc(tz, x, ms): + # Derive the timezone-independent second from the timezone + # dependent second. + fsetAtEpoch = _tzoffset(tz, 0.0) + nearTime = x - fsetAtEpoch - long(EPOCH) + 86400 + ms + # nearTime is now within an hour of being correct. + # Recalculate t according to DST. + fset = long(_tzoffset(tz, nearTime)) + d = (x - fset) / 86400.0 + (ms / 86400.0) + t = x - fset - long(EPOCH) + 86400 + ms + micros = (x + 86400 - fset) * 1000000 + \ + long(round(ms * 1000000.0)) - long(EPOCH * 1000000.0) + s = d - math.floor(d) + return (s, d, t, micros) + + +def _calcHMS(x, ms): + # hours, minutes, seconds from integer and float. + hr = x // 3600 + x = x - hr * 3600 + mn = x // 60 + sc = x - mn * 60 + ms + return (hr, mn, sc) + + +def _calcYMDHMS(x, ms): + # x is a timezone-dependent integer of seconds. + # Produces yr,mo,dy,hr,mn,sc. + yr, mo, dy = _calendarday(x // 86400 + jd1901) + x = int(x - (x // 86400) * 86400) + hr = x // 3600 + x = x - hr * 3600 + mn = x // 60 + sc = x - mn * 60 + ms + return (yr, mo, dy, hr, mn, sc) + + +def _julianday(yr, mo, dy): + y, m, d = long(yr), long(mo), long(dy) + if m > 12: + y = y + m // 12 + m = m % 12 + elif m < 1: + m = -m + y = y - m // 12 - 1 + m = 12 - m % 12 + if y > 0: + yr_correct = 0 + else: + yr_correct = 3 + if m < 3: + y, m = y - 1, m + 12 + if y * 10000 + m * 100 + d > 15821014: + b = 2 - y // 100 + y // 400 + else: + b = 0 + return ((1461 * y - yr_correct) // 4 + + 306001 * (m + 1) // 10000 + d + 1720994 + b) + + +def _calendarday(j): + j = long(j) + if (j < 2299160): + b = j + 1525 + else: + a = (4 * j - 7468861) // 146097 + b = j + 1526 + a - a // 4 + c = (20 * b - 2442) // 7305 + d = 1461 * c // 4 + e = 10000 * (b - d) // 306001 + dy = int(b - d - 306001 * e // 10000) + mo = (e < 14) and int(e - 1) or int(e - 13) + yr = (mo > 2) and (c - 4716) or (c - 4715) + return (int(yr), int(mo), int(dy)) + + +def _tzoffset(tz, t): + """Returns the offset in seconds to GMT from a specific timezone (tz) at + a specific time (t). NB! The _tzoffset result is the same same sign as + the time zone, i.e. GMT+2 has a 7200 second offset. This is the opposite + sign of time.timezone which (confusingly) is -7200 for GMT+2.""" + try: + return _TZINFO[tz].info(t)[0] + except Exception: + if numericTimeZoneMatch(tz) is not None: + return int(tz[0:3]) * 3600 + int(tz[0] + tz[3:5]) * 60 + else: + return 0 # ?? + + +def _correctYear(year): + # Y2K patch. + if year >= 0 and year < 100: + # 00-69 means 2000-2069, 70-99 means 1970-1999. + if year < 70: + year = 2000 + year + else: + year = 1900 + year + return year + + +def safegmtime(t): + '''gmtime with a safety zone.''' + try: + return gmtime(t) + except (ValueError, OverflowError): + raise TimeError('The time %f is beyond the range of this Python ' + 'implementation.' % float(t)) + + +def safelocaltime(t): + '''localtime with a safety zone.''' + try: + return localtime(t) + except (ValueError, OverflowError): + raise TimeError('The time %f is beyond the range of this Python ' + 'implementation.' % float(t)) + + +def _tzoffset2rfc822zone(seconds): + """Takes an offset, such as from _tzoffset(), and returns an rfc822 + compliant zone specification. Please note that the result of + _tzoffset() is the negative of what time.localzone and time.altzone is. + """ + return "%+03d%02d" % divmod((seconds // 60), 60) + + +def _tzoffset2iso8601zone(seconds): + """Takes an offset, such as from _tzoffset(), and returns an ISO 8601 + compliant zone specification. Please note that the result of + _tzoffset() is the negative of what time.localzone and time.altzone is. + """ + return "%+03d:%02d" % divmod((seconds // 60), 60) + + +def Timezones(): + """Return the list of recognized timezone names""" + return sorted(list(PytzCache._zmap.values())) + + +class strftimeFormatter(object): + + def __init__(self, dt, format): + self.dt = dt + self.format = format + + def __call__(self): + return self.dt.strftime(self.format) + + +@implementer(IDateTime) +class DateTime(object): + """DateTime objects represent instants in time and provide + interfaces for controlling its representation without + affecting the absolute value of the object. + + DateTime objects may be created from a wide variety of string + or numeric data, or may be computed from other DateTime objects. + DateTimes support the ability to convert their representations + to many major timezones, as well as the ablility to create a + DateTime object in the context of a given timezone. + + DateTime objects provide partial numerical behavior: + + - Two date-time objects can be subtracted to obtain a time, + in days between the two. + + - A date-time object and a positive or negative number may + be added to obtain a new date-time object that is the given + number of days later than the input date-time object. + + - A positive or negative number and a date-time object may + be added to obtain a new date-time object that is the given + number of days later than the input date-time object. + + - A positive or negative number may be subtracted from a + date-time object to obtain a new date-time object that is + the given number of days earlier than the input date-time + object. + + DateTime objects may be converted to integer, long, or float + numbers of days since January 1, 1901, using the standard int, + long, and float functions (Compatibility Note: int, long and + float return the number of days since 1901 in GMT rather than + local machine timezone). DateTime objects also provide access + to their value in a float format usable with the python time + module, provided that the value of the object falls in the + range of the epoch-based time module, and as a datetime.datetime + object. + + A DateTime object should be considered immutable; all conversion + and numeric operations return a new DateTime object rather than + modify the current object.""" + + # For security machinery: + __roles__ = None + __allow_access_to_unprotected_subobjects__ = 1 + + # Limit the amount of instance attributes + __slots__ = ( + '_timezone_naive', + '_tz', + '_dayoffset', + '_year', + '_month', + '_day', + '_hour', + '_minute', + '_second', + '_nearsec', + '_d', + '_micros', + 'time', + ) + + def __init__(self, *args, **kw): + """Return a new date-time object""" + try: + return self._parse_args(*args, **kw) + except (DateError, TimeError, DateTimeError): + raise + except Exception: + raise SyntaxError('Unable to parse %s, %s' % (args, kw)) + + def __getstate__(self): + # We store a float of _micros, instead of the _micros long, as we most + # often don't have any sub-second resolution and can save those bytes + return (self._micros / 1000000.0, + getattr(self, '_timezone_naive', False), + self._tz) + + def __setstate__(self, value): + if isinstance(value, tuple): + self._parse_args(value[0], value[2]) + self._micros = long(value[0] * 1000000) + self._timezone_naive = value[1] + else: + for k, v in value.items(): + if k in self.__slots__: + setattr(self, k, v) + # BBB: support for very old DateTime pickles + if '_micros' not in value: + self._micros = long(value['_t'] * 1000000) + if '_timezone_naive' not in value: + self._timezone_naive = False + + def _parse_args(self, *args, **kw): + """Return a new date-time object. + + A DateTime object always maintains its value as an absolute + UTC time, and is represented in the context of some timezone + based on the arguments used to create the object. A DateTime + object's methods return values based on the timezone context. + + Note that in all cases the local machine timezone is used for + representation if no timezone is specified. + + DateTimes may be created with from zero to seven arguments. + + - If the function is called with no arguments or with None, + then the current date/time is returned, represented in the + timezone of the local machine. + + - If the function is invoked with a single string argument + which is a recognized timezone name, an object representing + the current time is returned, represented in the specified + timezone. + + - If the function is invoked with a single string argument + representing a valid date/time, an object representing + that date/time will be returned. + + As a general rule, any date-time representation that is + recognized and unambigous to a resident of North America + is acceptable. The reason for this qualification is that + in North America, a date like: 2/1/1994 is interpreted + as February 1, 1994, while in some parts of the world, + it is interpreted as January 2, 1994. + + A date/time string consists of two components, a date + component and an optional time component, separated by one + or more spaces. If the time component is omited, 12:00am is + assumed. Any recognized timezone name specified as the final + element of the date/time string will be used for computing + the date/time value. If you create a DateTime with the + string 'Mar 9, 1997 1:45pm US/Pacific', the value will + essentially be the same as if you had captured time.time() + at the specified date and time on a machine in that timezone: + +
+            e=DateTime('US/Eastern')
+            # returns current date/time, represented in US/Eastern.
+
+            x=DateTime('1997/3/9 1:45pm')
+            # returns specified time, represented in local machine zone.
+
+            y=DateTime('Mar 9, 1997 13:45:00')
+            # y is equal to x
+            
+ + The date component consists of year, month, and day + values. The year value must be a one-, two-, or + four-digit integer. If a one- or two-digit year is + used, the year is assumed to be in the twentieth + century. The month may be an integer, from 1 to 12, a + month name, or a month abreviation, where a period may + optionally follow the abreviation. The day must be an + integer from 1 to the number of days in the month. The + year, month, and day values may be separated by + periods, hyphens, forward, shashes, or spaces. Extra + spaces are permitted around the delimiters. Year, + month, and day values may be given in any order as long + as it is possible to distinguish the components. If all + three components are numbers that are less than 13, + then a a month-day-year ordering is assumed. + + The time component consists of hour, minute, and second + values separated by colons. The hour value must be an + integer between 0 and 23 inclusively. The minute value + must be an integer between 0 and 59 inclusively. The + second value may be an integer value between 0 and + 59.999 inclusively. The second value or both the minute + and second values may be ommitted. The time may be + followed by am or pm in upper or lower case, in which + case a 12-hour clock is assumed. + + New in Zope 2.4: + The DateTime constructor automatically detects and handles + ISO8601 compliant dates (YYYY-MM-DDThh:ss:mmTZD). + + New in Zope 2.9.6: + The existing ISO8601 parser was extended to support almost + the whole ISO8601 specification. New formats includes: + +
+            y=DateTime('1993-045')
+            # returns the 45th day from 1993, which is 14th February
+
+            w=DateTime('1993-W06-7')
+            # returns the 7th day from the 6th week from 1993, which
+            # is also 14th February
+            
+ + See http://en.wikipedia.org/wiki/ISO_8601 for full specs. + + Note that the Zope DateTime parser assumes timezone naive ISO + strings to be in UTC rather than local time as specified. + + - If the DateTime function is invoked with a single Numeric + argument, the number is assumed to be a floating point value + such as that returned by time.time(). + + A DateTime object is returned that represents the GMT value + of the time.time() float represented in the local machine's + timezone. + + - If the DateTime function is invoked with a single argument + that is a DateTime instane, a copy of the passed object will + be created. + + - New in 2.11: + The DateTime function may now be invoked with a single argument + that is a datetime.datetime instance. DateTimes may be converted + back to datetime.datetime objects with asdatetime(). + DateTime instances may be converted to a timezone naive + datetime.datetime in UTC with utcdatetime(). + + - If the function is invoked with two numeric arguments, then + the first is taken to be an integer year and the second + argument is taken to be an offset in days from the beginning + of the year, in the context of the local machine timezone. + + The date-time value returned is the given offset number of + days from the beginning of the given year, represented in + the timezone of the local machine. The offset may be positive + or negative. + + Two-digit years are assumed to be in the twentieth + century. + + - If the function is invoked with two arguments, the first + a float representing a number of seconds past the epoch + in gmt (such as those returned by time.time()) and the + second a string naming a recognized timezone, a DateTime + with a value of that gmt time will be returned, represented + in the given timezone. + +
+            import time
+            t=time.time()
+
+            now_east=DateTime(t,'US/Eastern')
+            # Time t represented as US/Eastern
+
+            now_west=DateTime(t,'US/Pacific')
+            # Time t represented as US/Pacific
+
+            # now_east == now_west
+            # only their representations are different
+            
+ + - If the function is invoked with three or more numeric + arguments, then the first is taken to be an integer + year, the second is taken to be an integer month, and + the third is taken to be an integer day. If the + combination of values is not valid, then a + DateError is raised. Two-digit years are assumed + to be in the twentieth century. The fourth, fifth, and + sixth arguments specify a time in hours, minutes, and + seconds; hours and minutes should be positive integers + and seconds is a positive floating point value, all of + these default to zero if not given. An optional string may + be given as the final argument to indicate timezone (the + effect of this is as if you had taken the value of time.time() + at that time on a machine in the specified timezone). + + New in Zope 2.7: + A new keyword parameter "datefmt" can be passed to the + constructor. If set to "international", the constructor + is forced to treat ambigious dates as "days before month + before year". This useful if you need to parse non-US + dates in a reliable way + + In any case that a floating point number of seconds is given + or derived, it's rounded to the nearest millisecond. + + If a string argument passed to the DateTime constructor cannot be + parsed, it will raise DateTime.SyntaxError. Invalid date components + will raise a DateError, while invalid time or timezone components + will raise a DateTimeError. + + The module function Timezones() will return a list of the (common) + timezones recognized by the DateTime module. Recognition of + timezone names is case-insensitive. + """ + + datefmt = kw.get('datefmt', getDefaultDateFormat()) + d = t = s = None + ac = len(args) + microsecs = None + + if ac == 10: + # Internal format called only by DateTime + yr, mo, dy, hr, mn, sc, tz, t, d, s = args + elif ac == 11: + # Internal format that includes milliseconds (from the epoch) + yr, mo, dy, hr, mn, sc, tz, t, d, s, millisecs = args + microsecs = millisecs * 1000 + + elif ac == 12: + # Internal format that includes microseconds (from the epoch) and a + # flag indicating whether this was constructed in a timezone naive + # manner + yr, mo, dy, hr, mn, sc, tz, t, d, s, microsecs, tznaive = args + if tznaive is not None: # preserve this information + self._timezone_naive = tznaive + + elif not args or (ac and args[0] is None): + # Current time, to be displayed in local timezone + t = time() + lt = safelocaltime(t) + tz = self.localZone(lt) + ms = (t - math.floor(t)) + s, d = _calcSD(t) + yr, mo, dy, hr, mn, sc = lt[:6] + sc = sc + ms + self._timezone_naive = False + + elif ac == 1: + arg = args[0] + + if arg == '': + raise SyntaxError(arg) + + if isinstance(arg, DateTime): + """Construct a new DateTime instance from a given + DateTime instance. + """ + t = arg.timeTime() + s, d = _calcSD(t) + yr, mo, dy, hr, mn, sc, tz = arg.parts() + + elif isinstance(arg, datetime): + yr, mo, dy, hr, mn, sc, numerictz, tznaive = \ + self._parse_iso8601_preserving_tznaive(arg.isoformat()) + if arg.tzinfo is None: + self._timezone_naive = True + tz = None + else: + self._timezone_naive = False + # if we have a pytz tzinfo, use the `zone` attribute + # as a key + tz = getattr(arg.tzinfo, 'zone', numerictz) + ms = sc - math.floor(sc) + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + + if tz: + try: + zone = _TZINFO[tz] + except DateTimeError: + try: + zone = _TZINFO[numerictz] + except DateTimeError: + raise DateTimeError( + 'Unknown time zone in date: %s' % arg) + tz = zone.tzinfo.zone + else: + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + + elif (isinstance(arg, basestring) and + arg.lower() in _TZINFO._zidx): + # Current time, to be displayed in specified timezone + t, tz = time(), _TZINFO._zmap[arg.lower()] + ms = (t - math.floor(t)) + # Use integer arithmetic as much as possible. + s, d = _calcSD(t) + x = _calcDependentSecond(tz, t) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + + elif isinstance(arg, basestring): + # Date/time string + iso8601 = iso8601Match(arg.strip()) + fields_iso8601 = iso8601 and iso8601.groupdict() or {} + if fields_iso8601 and not fields_iso8601.get('garbage'): + yr, mo, dy, hr, mn, sc, tz, tznaive = \ + self._parse_iso8601_preserving_tznaive(arg) + self._timezone_naive = tznaive + else: + yr, mo, dy, hr, mn, sc, tz = self._parse(arg, datefmt) + + if not self._validDate(yr, mo, dy): + raise DateError('Invalid date: %s' % arg) + if not self._validTime(hr, mn, int(sc)): + raise TimeError('Invalid time: %s' % arg) + ms = sc - math.floor(sc) + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + + if tz: + try: + tz = _TZINFO._zmap[tz.lower()] + except KeyError: + if numericTimeZoneMatch(tz) is None: + raise DateTimeError( + 'Unknown time zone in date: %s' % arg) + else: + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + + else: + # Seconds from epoch, gmt + t = arg + lt = safelocaltime(t) + tz = self.localZone(lt) + ms = (t - math.floor(t)) + s, d = _calcSD(t) + yr, mo, dy, hr, mn, sc = lt[:6] + sc = sc + ms + + elif ac == 2: + if isinstance(args[1], basestring): + # Seconds from epoch (gmt) and timezone + t, tz = args + ms = (t - math.floor(t)) + try: + tz = _TZINFO._zmap[tz.lower()] + except KeyError: + if numericTimeZoneMatch(tz) is None: + raise DateTimeError('Unknown time zone: %s' % tz) + # Use integer arithmetic as much as possible. + s, d = _calcSD(t) + x = _calcDependentSecond(tz, t) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + else: + # Year, julian expressed in local zone + t = time() + lt = safelocaltime(t) + tz = self.localZone(lt) + yr, jul = args + yr = _correctYear(yr) + d = (_julianday(yr, 1, 0) - jd1901) + jul + x_float = d * 86400.0 + x_floor = math.floor(x_float) + ms = x_float - x_floor + x = long(x_floor) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + else: + # Explicit format + yr, mo, dy = args[:3] + hr, mn, sc, tz = 0, 0, 0, 0 + yr = _correctYear(yr) + if not self._validDate(yr, mo, dy): + raise DateError('Invalid date: %s' % (args, )) + args = args[3:] + if args: + hr, args = args[0], args[1:] + if args: + mn, args = args[0], args[1:] + if args: + sc, args = args[0], args[1:] + if args: + tz, args = args[0], args[1:] + if args: + raise DateTimeError('Too many arguments') + if not self._validTime(hr, mn, sc): + raise TimeError('Invalid time: %s' % repr(args)) + + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + ms = sc - math.floor(sc) + if tz: + try: + tz = _TZINFO._zmap[tz.lower()] + except KeyError: + if numericTimeZoneMatch(tz) is None: + raise DateTimeError('Unknown time zone: %s' % tz) + else: + # Get local time zone name + tz = self._calcTimezoneName(x, ms) + s, d, t, microsecs = _calcIndependentSecondEtc(tz, x, ms) + + self._dayoffset = int((_julianday(yr, mo, dy) + 2) % 7) + # Round to nearest microsecond in platform-independent way. You + # cannot rely on C sprintf (Python '%') formatting to round + # consistently; doing it ourselves ensures that all but truly + # horrid C sprintf implementations will yield the same result + # x-platform, provided the format asks for exactly 6 digits after + # the decimal point. + sc = round(sc, 6) + if sc >= 60.0: # can happen if, e.g., orig sc was 59.9999999 + sc = 59.999999 + self._nearsec = math.floor(sc) + self._year, self._month, self._day = yr, mo, dy + self._hour, self._minute, self._second = hr, mn, sc + self.time, self._d, self._tz = s, d, tz + # self._micros is the time since the epoch + # in long integer microseconds. + if microsecs is None: + microsecs = long(math.floor(t * 1000000.0)) + self._micros = microsecs + + def localZone(self, ltm=None): + '''Returns the time zone on the given date. The time zone + can change according to daylight savings.''' + if not _multipleZones: + return _localzone0 + if ltm is None: + ltm = localtime(time()) + isDST = ltm[8] + lz = isDST and _localzone1 or _localzone0 + return lz + + def _calcTimezoneName(self, x, ms): + # Derive the name of the local time zone at the given + # timezone-dependent second. + if not _multipleZones: + return _localzone0 + fsetAtEpoch = _tzoffset(_localzone0, 0.0) + nearTime = x - fsetAtEpoch - long(EPOCH) + 86400 + ms + # nearTime is within an hour of being correct. + try: + ltm = safelocaltime(nearTime) + except: + # We are beyond the range of Python's date support. + # Hopefully we can assume that daylight savings schedules + # repeat every 28 years. Calculate the name of the + # time zone using a supported range of years. + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, 0) + yr = ((yr - 1970) % 28) + 1970 + x = _calcDependentSecond2(yr, mo, dy, hr, mn, sc) + nearTime = x - fsetAtEpoch - long(EPOCH) + 86400 + ms + + # nearTime might still be negative if we are east of Greenwich. + # But we can asume on 1969/12/31 were no timezone changes. + nearTime = max(0, nearTime) + + ltm = safelocaltime(nearTime) + tz = self.localZone(ltm) + return tz + + def _parse(self, st, datefmt=getDefaultDateFormat()): + # Parse date-time components from a string + month = year = tz = tm = None + ValidZones = _TZINFO._zidx + TimeModifiers = ['am', 'pm'] + + # Find timezone first, since it should always be the last + # element, and may contain a slash, confusing the parser. + st = st.strip() + sp = st.split() + tz = sp[-1] + if tz and (tz.lower() in ValidZones): + self._timezone_naive = False + st = ' '.join(sp[:-1]) + else: + self._timezone_naive = True + tz = None # Decide later, since the default time zone + # could depend on the date. + + ints = [] + i = 0 + l = len(st) + while i < l: + while i < l and st[i] in SPACE_CHARS: + i += 1 + if i < l and st[i] in DELIMITERS: + d = st[i] + i += 1 + else: + d = '' + while i < l and st[i] in SPACE_CHARS: + i += 1 + + # The float pattern needs to look back 1 character, because it + # actually looks for a preceding colon like ':33.33'. This is + # needed to avoid accidentally matching the date part of a + # dot-separated date string such as '1999.12.31'. + if i > 0: + b = i - 1 + else: + b = i + + ts_results = FLT_PATTERN.match(st, b) + if ts_results: + s = ts_results.group(1) + i = i + len(s) + ints.append(float(s)) + continue + + #AJ + ts_results = INT_PATTERN.match(st, i) + if ts_results: + s = ts_results.group(0) + + ls = len(s) + i = i + ls + if (ls == 4 and d and d in '+-' and + (len(ints) + (not not month) >= 3)): + tz = '%s%s' % (d, s) + else: + v = int(s) + ints.append(v) + continue + + ts_results = NAME_PATTERN.match(st, i) + if ts_results: + s = ts_results.group(0).lower() + i = i + len(s) + if i < l and st[i] == '.': + i += 1 + # Check for month name: + _v = _MONTHMAP.get(s) + if _v is not None: + if month is None: + month = _v + else: + raise SyntaxError(st) + continue + # Check for time modifier: + if s in TimeModifiers: + if tm is None: + tm = s + else: + raise SyntaxError(st) + continue + # Check for and skip day of week: + if s in _DAYMAP: + continue + + raise SyntaxError(st) + + day = None + if ints[-1] > 60 and d not in ('.', ':', '/') and len(ints) > 2: + year = ints[-1] + del ints[-1] + if month: + day = ints[0] + del ints[:1] + else: + if datefmt == "us": + month = ints[0] + day = ints[1] + else: + month = ints[1] + day = ints[0] + del ints[:2] + elif month: + if len(ints) > 1: + if ints[0] > 31: + year = ints[0] + day = ints[1] + else: + year = ints[1] + day = ints[0] + del ints[:2] + elif len(ints) > 2: + if ints[0] > 31: + year = ints[0] + if ints[1] > 12: + day = ints[1] + month = ints[2] + else: + day = ints[2] + month = ints[1] + if ints[1] > 31: + year = ints[1] + if ints[0] > 12 and ints[2] <= 12: + day = ints[0] + month = ints[2] + elif ints[2] > 12 and ints[0] <= 12: + day = ints[2] + month = ints[0] + elif ints[2] > 31: + year = ints[2] + if ints[0] > 12: + day = ints[0] + month = ints[1] + else: + if datefmt == "us": + day = ints[1] + month = ints[0] + else: + day = ints[0] + month = ints[1] + + elif ints[0] <= 12: + month = ints[0] + day = ints[1] + year = ints[2] + del ints[:3] + + if day is None: + # Use today's date. + year, month, day = localtime(time())[:3] + + year = _correctYear(year) + if year < 1000: + raise SyntaxError(st) + + leap = year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) + try: + if not day or day > _MONTH_LEN[leap][month]: + raise DateError(st) + except IndexError: + raise DateError(st) + + tod = 0 + if ints: + i = ints[0] + # Modify hour to reflect am/pm + if tm and (tm == 'pm') and i < 12: + i += 12 + if tm and (tm == 'am') and i == 12: + i = 0 + if i > 24: + raise TimeError(st) + tod = tod + int(i) * 3600 + del ints[0] + if ints: + i = ints[0] + if i > 60: + raise TimeError(st) + tod = tod + int(i) * 60 + del ints[0] + if ints: + i = ints[0] + if i > 60: + raise TimeError(st) + tod = tod + i + del ints[0] + if ints: + raise SyntaxError(st) + + tod_int = int(math.floor(tod)) + ms = tod - tod_int + hr, mn, sc = _calcHMS(tod_int, ms) + if not tz: + # Figure out what time zone it is in the local area + # on the given date. + x = _calcDependentSecond2(year, month, day, hr, mn, sc) + tz = self._calcTimezoneName(x, ms) + + return year, month, day, hr, mn, sc, tz + + # Internal methods + def _validDate(self, y, m, d): + if m < 1 or m > 12 or y < 0 or d < 1 or d > 31: + return 0 + return d <= _MONTH_LEN[ + (y % 4 == 0 and (y % 100 != 0 or y % 400 == 0))][m] + + def _validTime(self, h, m, s): + return h >= 0 and h <= 23 and m >= 0 and m <= 59 and s >= 0 and s < 60 + + def __getattr__(self, name): + if '%' in name: + return strftimeFormatter(self, name) + raise AttributeError(name) + + # Conversion and comparison methods + + def timeTime(self): + """Return the date/time as a floating-point number in UTC, + in the format used by the python time module. + + Note that it is possible to create date/time values with + DateTime that have no meaningful value to the time module. + """ + return self._micros / 1000000.0 + + def toZone(self, z): + """Return a DateTime with the value as the current + object, represented in the indicated timezone. + """ + t, tz = self._t, _TZINFO._zmap[z.lower()] + micros = self.micros() + tznaive = False # you're performing a timzone change, can't be naive + + try: + # Try to use time module for speed. + yr, mo, dy, hr, mn, sc = safegmtime(t + _tzoffset(tz, t))[:6] + sc = self._second + return self.__class__(yr, mo, dy, hr, mn, sc, tz, t, + self._d, self.time, micros, tznaive) + except Exception: + # gmtime can't perform the calculation in the given range. + # Calculate the difference between the two time zones. + tzdiff = _tzoffset(tz, t) - _tzoffset(self._tz, t) + if tzdiff == 0: + return self + sc = self._second + ms = sc - math.floor(sc) + x = _calcDependentSecond2(self._year, self._month, self._day, + self._hour, self._minute, sc) + x_new = x + tzdiff + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x_new, ms) + return self.__class__(yr, mo, dy, hr, mn, sc, tz, t, + self._d, self.time, micros, tznaive) + + def isFuture(self): + """Return true if this object represents a date/time + later than the time of the call. + """ + return (self._t > time()) + + def isPast(self): + """Return true if this object represents a date/time + earlier than the time of the call. + """ + return (self._t < time()) + + def isCurrentYear(self): + """Return true if this object represents a date/time + that falls within the current year, in the context + of this object\'s timezone representation. + """ + t = time() + return safegmtime(t + _tzoffset(self._tz, t))[0] == self._year + + def isCurrentMonth(self): + """Return true if this object represents a date/time + that falls within the current month, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return gmt[0] == self._year and gmt[1] == self._month + + def isCurrentDay(self): + """Return true if this object represents a date/time + that falls within the current day, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return (gmt[0] == self._year and gmt[1] == self._month and + gmt[2] == self._day) + + def isCurrentHour(self): + """Return true if this object represents a date/time + that falls within the current hour, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return (gmt[0] == self._year and gmt[1] == self._month and + gmt[2] == self._day and gmt[3] == self._hour) + + def isCurrentMinute(self): + """Return true if this object represents a date/time + that falls within the current minute, in the context + of this object\'s timezone representation. + """ + t = time() + gmt = safegmtime(t + _tzoffset(self._tz, t)) + return (gmt[0] == self._year and gmt[1] == self._month and + gmt[2] == self._day and gmt[3] == self._hour and + gmt[4] == self._minute) + + def earliestTime(self): + """Return a new DateTime object that represents the earliest + possible time (in whole seconds) that still falls within + the current object\'s day, in the object\'s timezone context. + """ + return self.__class__( + self._year, self._month, self._day, 0, 0, 0, self._tz) + + def latestTime(self): + """Return a new DateTime object that represents the latest + possible time (in whole seconds) that still falls within + the current object\'s day, in the object\'s timezone context. + """ + return self.__class__( + self._year, self._month, self._day, 23, 59, 59, self._tz) + + def greaterThan(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time greater + than the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros > long(t * 1000000) + try: + return self._micros > t._micros + except AttributeError: + return self._micros > t + + __gt__ = greaterThan + + def greaterThanEqualTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time greater + than or equal to the specified DateTime or time module style + time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros >= long(t * 1000000) + try: + return self._micros >= t._micros + except AttributeError: + return self._micros >= t + + __ge__ = greaterThanEqualTo + + def equalTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time equal to + the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros == long(t * 1000000) + try: + return self._micros == t._micros + except AttributeError: + return self._micros == t + + def notEqualTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time not equal + to the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + return not self.equalTo(t) + + def __eq__(self, t): + """Compare this DateTime object to another DateTime object. + Return True if their internal state is the same. Two objects + representing the same time in different timezones are regared as + unequal. Use the equalTo method if you are only interested in them + refering to the same moment in time. + """ + if not isinstance(t, DateTime): + return False + return (self._micros, self._tz) == (t._micros, t._tz) + + def __ne__(self, t): + return not self.__eq__(t) + + def lessThan(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time less than + the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros < long(t * 1000000) + try: + return self._micros < t._micros + except AttributeError: + return self._micros < t + + __lt__ = lessThan + + def lessThanEqualTo(self, t): + """Compare this DateTime object to another DateTime object + OR a floating point number such as that which is returned + by the python time module. + + Returns true if the object represents a date/time less than + or equal to the specified DateTime or time module style time. + + Revised to give more correct results through comparison of + long integer microseconds. + """ + if t is None: + t = 0 + if isinstance(t, float): + return self._micros <= long(t * 1000000) + try: + return self._micros <= t._micros + except AttributeError: + return self._micros <= t + + __le__ = lessThanEqualTo + + def isLeapYear(self): + """Return true if the current year (in the context of the + object\'s timezone) is a leap year. + """ + return (self._year % 4 == 0 and + (self._year % 100 != 0 or self._year % 400 == 0)) + + def dayOfYear(self): + """Return the day of the year, in context of the timezone + representation of the object. + """ + d = int(self._d + (_tzoffset(self._tz, self._t) / 86400.0)) + return int((d + jd1901) - _julianday(self._year, 1, 0)) + + # Component access + def parts(self): + """Return a tuple containing the calendar year, month, + day, hour, minute second and timezone of the object. + """ + return (self._year, self._month, self._day, self._hour, + self._minute, self._second, self._tz) + + def timezone(self): + """Return the timezone in which the object is represented.""" + return self._tz + + def tzoffset(self): + """Return the timezone offset for the objects timezone.""" + return _tzoffset(self._tz, self._t) + + def year(self): + """Return the calendar year of the object.""" + return self._year + + def month(self): + """Return the month of the object as an integer.""" + return self._month + + @property + def _fmon(self): + return _MONTHS[self._month] + + def Month(self): + """Return the full month name.""" + return self._fmon + + @property + def _amon(self): + return _MONTHS_A[self._month] + + def aMonth(self): + """Return the abreviated month name.""" + return self._amon + + def Mon(self): + """Compatibility: see aMonth.""" + return self._amon + + @property + def _pmon(self): + return _MONTHS_P[self._month] + + def pMonth(self): + """Return the abreviated (with period) month name.""" + return self._pmon + + def Mon_(self): + """Compatibility: see pMonth.""" + return self._pmon + + def day(self): + """Return the integer day.""" + return self._day + + @property + def _fday(self): + return _DAYS[self._dayoffset] + + def Day(self): + """Return the full name of the day of the week.""" + return self._fday + + def DayOfWeek(self): + """Compatibility: see Day.""" + return self._fday + + @property + def _aday(self): + return _DAYS_A[self._dayoffset] + + def aDay(self): + """Return the abreviated name of the day of the week.""" + return self._aday + + @property + def _pday(self): + return _DAYS_P[self._dayoffset] + + def pDay(self): + """Return the abreviated (with period) name of the day of the week.""" + return self._pday + + def Day_(self): + """Compatibility: see pDay.""" + return self._pday + + def dow(self): + """Return the integer day of the week, where sunday is 0.""" + return self._dayoffset + + def dow_1(self): + """Return the integer day of the week, where sunday is 1.""" + return self._dayoffset + 1 + + @property + def _pmhour(self): + hr = self._hour + if hr > 12: + return hr - 12 + return hr or 12 + + def h_12(self): + """Return the 12-hour clock representation of the hour.""" + return self._pmhour + + def h_24(self): + """Return the 24-hour clock representation of the hour.""" + return self._hour + + @property + def _pm(self): + hr = self._hour + if hr >= 12: + return 'pm' + return 'am' + + def ampm(self): + """Return the appropriate time modifier (am or pm).""" + return self._pm + + def hour(self): + """Return the 24-hour clock representation of the hour.""" + return self._hour + + def minute(self): + """Return the minute.""" + return self._minute + + def second(self): + """Return the second.""" + return self._second + + def millis(self): + """Return the millisecond since the epoch in GMT.""" + return self._micros // 1000 + + def micros(self): + """Return the microsecond since the epoch in GMT.""" + return self._micros + + def timezoneNaive(self): + """The python datetime module introduces the idea of distinguishing + between timezone aware and timezone naive datetime values. For lossless + conversion to and from datetime.datetime record if we record this + information using True / False. DateTime makes no distinction, when we + don't have any information we return None here. + """ + try: + return self._timezone_naive + except AttributeError: + return None + + def strftime(self, format): + """Format the date/time using the *current timezone representation*.""" + x = _calcDependentSecond2(self._year, self._month, self._day, + self._hour, self._minute, self._second) + ltz = self._calcTimezoneName(x, 0) + tzdiff = _tzoffset(ltz, self._t) - _tzoffset(self._tz, self._t) + zself = self + tzdiff / 86400.0 + microseconds = int((zself._second - zself._nearsec) * 1000000) + unicode_format = False + if isinstance(format, explicit_unicode_type): + format = format.encode('utf-8') + unicode_format = True + ds = datetime(zself._year, zself._month, zself._day, zself._hour, + zself._minute, int(zself._nearsec), + microseconds).strftime(format) + if unicode_format: + return ds.decode('utf-8') + return ds + + # General formats from previous DateTime + def Date(self): + """Return the date string for the object.""" + return "%s/%2.2d/%2.2d" % (self._year, self._month, self._day) + + def Time(self): + """Return the time string for an object to the nearest second.""" + return '%2.2d:%2.2d:%2.2d' % (self._hour, self._minute, self._nearsec) + + def TimeMinutes(self): + """Return the time string for an object not showing seconds.""" + return '%2.2d:%2.2d' % (self._hour, self._minute) + + def AMPM(self): + """Return the time string for an object to the nearest second.""" + return '%2.2d:%2.2d:%2.2d %s' % ( + self._pmhour, self._minute, self._nearsec, self._pm) + + def AMPMMinutes(self): + """Return the time string for an object not showing seconds.""" + return '%2.2d:%2.2d %s' % (self._pmhour, self._minute, self._pm) + + def PreciseTime(self): + """Return the time string for the object.""" + return '%2.2d:%2.2d:%06.3f' % (self._hour, self._minute, self._second) + + def PreciseAMPM(self): + """Return the time string for the object.""" + return '%2.2d:%2.2d:%06.3f %s' % ( + self._pmhour, self._minute, self._second, self._pm) + + def yy(self): + """Return calendar year as a 2 digit string.""" + return str(self._year)[-2:] + + def mm(self): + """Return month as a 2 digit string.""" + return '%02d' % self._month + + def dd(self): + """Return day as a 2 digit string.""" + return '%02d' % self._day + + def rfc822(self): + """Return the date in RFC 822 format.""" + tzoffset = _tzoffset2rfc822zone(_tzoffset(self._tz, self._t)) + return '%s, %2.2d %s %d %2.2d:%2.2d:%2.2d %s' % ( + self._aday, self._day, self._amon, self._year, + self._hour, self._minute, self._nearsec, tzoffset) + + # New formats + def fCommon(self): + """Return a string representing the object\'s value + in the format: March 1, 1997 1:45 pm. + """ + return '%s %s, %4.4d %s:%2.2d %s' % ( + self._fmon, self._day, self._year, self._pmhour, + self._minute, self._pm) + + def fCommonZ(self): + """Return a string representing the object\'s value + in the format: March 1, 1997 1:45 pm US/Eastern. + """ + return '%s %s, %4.4d %d:%2.2d %s %s' % ( + self._fmon, self._day, self._year, self._pmhour, + self._minute, self._pm, self._tz) + + def aCommon(self): + """Return a string representing the object\'s value + in the format: Mar 1, 1997 1:45 pm. + """ + return '%s %s, %4.4d %s:%2.2d %s' % ( + self._amon, self._day, self._year, self._pmhour, + self._minute, self._pm) + + def aCommonZ(self): + """Return a string representing the object\'s value + in the format: Mar 1, 1997 1:45 pm US/Eastern. + """ + return '%s %s, %4.4d %d:%2.2d %s %s' % ( + self._amon, self._day, self._year, self._pmhour, + self._minute, self._pm, self._tz) + + def pCommon(self): + """Return a string representing the object\'s value + in the format: Mar. 1, 1997 1:45 pm. + """ + return '%s %s, %4.4d %s:%2.2d %s' % ( + self._pmon, self._day, self._year, self._pmhour, + self._minute, self._pm) + + def pCommonZ(self): + """Return a string representing the object\'s value + in the format: Mar. 1, 1997 1:45 pm US/Eastern. + """ + return '%s %s, %4.4d %d:%2.2d %s %s' % ( + self._pmon, self._day, self._year, self._pmhour, + self._minute, self._pm, self._tz) + + def ISO(self): + """Return the object in ISO standard format. + + Note: this is *not* ISO 8601-format! See the ISO8601 and + HTML4 methods below for ISO 8601-compliant output. + + Dates are output as: YYYY-MM-DD HH:MM:SS + """ + return "%.4d-%.2d-%.2d %.2d:%.2d:%.2d" % ( + self._year, self._month, self._day, + self._hour, self._minute, self._second) + + def ISO8601(self): + """Return the object in ISO 8601-compatible format containing the + date, time with seconds-precision and the time zone identifier. + + See: http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSTZD + T is a literal character. + TZD is Time Zone Designator, format +HH:MM or -HH:MM + + If the instance is timezone naive (it was not specified with a timezone + when it was constructed) then the timezone is ommitted. + + The HTML4 method below offers the same formatting, but converts + to UTC before returning the value and sets the TZD "Z". + """ + if self.timezoneNaive(): + return "%0.4d-%0.2d-%0.2dT%0.2d:%0.2d:%0.2d" % ( + self._year, self._month, self._day, + self._hour, self._minute, self._second) + tzoffset = _tzoffset2iso8601zone(_tzoffset(self._tz, self._t)) + return "%0.4d-%0.2d-%0.2dT%0.2d:%0.2d:%0.2d%s" % ( + self._year, self._month, self._day, + self._hour, self._minute, self._second, tzoffset) + + def HTML4(self): + """Return the object in the format used in the HTML4.0 specification, + one of the standard forms in ISO8601. + + See: http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSZ + T, Z are literal characters. + The time is in UTC. + """ + newdate = self.toZone('UTC') + return "%0.4d-%0.2d-%0.2dT%0.2d:%0.2d:%0.2dZ" % ( + newdate._year, newdate._month, newdate._day, + newdate._hour, newdate._minute, newdate._second) + + def asdatetime(self): + """Return a standard libary datetime.datetime + """ + tznaive = self.timezoneNaive() + if tznaive: + tzinfo = None + else: + tzinfo = _TZINFO[self._tz].tzinfo + second = int(self._second) + microsec = self.micros() % 1000000 + dt = datetime(self._year, self._month, self._day, self._hour, + self._minute, second, microsec, tzinfo) + return dt + + def utcdatetime(self): + """Convert the time to UTC then return a timezone naive datetime object + """ + utc = self.toZone('UTC') + second = int(utc._second) + microsec = utc.micros() % 1000000 + dt = datetime(utc._year, utc._month, utc._day, utc._hour, + utc._minute, second, microsec) + return dt + + def __add__(self, other): + """A DateTime may be added to a number and a number may be + added to a DateTime; two DateTimes cannot be added. + """ + if hasattr(other, '_t'): + raise DateTimeError('Cannot add two DateTimes') + o = float(other) + tz = self._tz + omicros = round(o * 86400000000) + tmicros = self.micros() + omicros + t = tmicros / 1000000.0 + d = (tmicros + long(EPOCH * 1000000)) / 86400000000.0 + s = d - math.floor(d) + ms = t - math.floor(t) + x = _calcDependentSecond(tz, t) + yr, mo, dy, hr, mn, sc = _calcYMDHMS(x, ms) + return self.__class__(yr, mo, dy, hr, mn, sc, self._tz, + t, d, s, None, self.timezoneNaive()) + + __radd__ = __add__ + + def __sub__(self, other): + """Either a DateTime or a number may be subtracted from a + DateTime, however, a DateTime may not be subtracted from + a number. + """ + if hasattr(other, '_d'): + return (self.micros() - other.micros()) / 86400000000.0 + else: + return self.__add__(-(other)) + + def __repr__(self): + """Convert a DateTime to a string that looks like a Python + expression. + """ + return '%s(\'%s\')' % (self.__class__.__name__, str(self)) + + def __str__(self): + """Convert a DateTime to a string.""" + y, m, d = self._year, self._month, self._day + h, mn, s, t = self._hour, self._minute, self._second, self._tz + if s == int(s): + # A whole number of seconds -- suppress milliseconds. + return '%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d %s' % ( + y, m, d, h, mn, s, t) + else: + # s is already rounded to the nearest microsecond, and + # it's not a whole number of seconds. Be sure to print + # 2 digits before the decimal point. + return '%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%06.6f %s' % ( + y, m, d, h, mn, s, t) + + def __hash__(self): + """Compute a hash value for a DateTime.""" + return int(((self._year % 100 * 12 + self._month) * 31 + + self._day + self.time) * 100) + + def __int__(self): + """Convert to an integer number of seconds since the epoch (gmt).""" + return int(self.micros() // 1000000) + + def __long__(self): + """Convert to a long-int number of seconds since the epoch (gmt).""" + return long(self.micros() // 1000000) + + def __float__(self): + """Convert to floating-point number of seconds since the epoch (gmt). + """ + return self.micros() / 1000000.0 + + @property + def _t(self): + return self._micros / 1000000.0 + + def _parse_iso8601(self, s): + # preserve the previously implied contract + # who know where this could be used... + return self._parse_iso8601_preserving_tznaive(s)[:7] + + def _parse_iso8601_preserving_tznaive(self, s): + try: + return self.__parse_iso8601(s) + except IndexError: + raise SyntaxError( + 'Not an ISO 8601 compliant date string: "%s"' % s) + + def __parse_iso8601(self, s): + """Parse an ISO 8601 compliant date. + + See: http://en.wikipedia.org/wiki/ISO_8601 + """ + month = day = week_day = 1 + year = hour = minute = seconds = hour_off = min_off = 0 + tznaive = True + + iso8601 = iso8601Match(s.strip()) + fields = iso8601 and iso8601.groupdict() or {} + if not iso8601 or fields.get('garbage'): + raise IndexError + + if fields['year']: + year = int(fields['year']) + if fields['month']: + month = int(fields['month']) + if fields['day']: + day = int(fields['day']) + + if fields['year_day']: + d = DateTime('%s-01-01' % year) + int(fields['year_day']) - 1 + month = d.month() + day = d.day() + + if fields['week']: + week = int(fields['week']) + if fields['week_day']: + week_day = int(fields['week_day']) + d = DateTime('%s-01-04' % year) + d = d - (d.dow() + 6) % 7 + week * 7 + week_day - 8 + month = d.month() + day = d.day() + + if fields['hour']: + hour = int(fields['hour']) + + if fields['minute']: + minute = int(fields['minute']) + elif fields['fraction']: + minute = 60.0 * float('0.%s' % fields['fraction']) + seconds, minute = math.modf(minute) + minute = int(minute) + seconds = 60.0 * seconds + # Avoid reprocess when handling seconds, bellow + fields['fraction'] = None + + if fields['second']: + seconds = int(fields['second']) + if fields['fraction']: + seconds = seconds + float('0.%s' % fields['fraction']) + elif fields['fraction']: + seconds = 60.0 * float('0.%s' % fields['fraction']) + + if fields['hour_off']: + hour_off = int(fields['hour_off']) + if fields['signal'] == '-': + hour_off *= -1 + + if fields['min_off']: + min_off = int(fields['min_off']) + + if fields['signal'] or fields['Z']: + tznaive = False + else: + tznaive = True + + # Differ from the specification here. To preserve backwards + # compatibility assume a default timezone == UTC. + tz = 'GMT%+03d%02d' % (hour_off, min_off) + + return year, month, day, hour, minute, seconds, tz, tznaive + + def JulianDay(self): + """Return the Julian day. + + See: http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + """ + a = (14 - self._month) // 12 + y = self._year + 4800 - a + m = self._month + (12 * a) - 3 + return (self._day + (153 * m + 2) // 5 + 365 * y + + y // 4 - y // 100 + y // 400 - 32045) + + def week(self): + """Return the week number according to ISO. + + See: http://www.tondering.dk/claus/cal/node6.html + """ + J = self.JulianDay() + d4 = (J + 31741 - (J % 7)) % 146097 % 36524 % 1461 + L = d4 // 1460 + d1 = ((d4 - L) % 365) + L + return d1 // 7 + 1 + + def encode(self, out): + """Encode value for XML-RPC.""" + out.write('') + out.write(self.ISO8601()) + out.write('\n') + + +# Provide the _dt_reconstructor function here, in case something +# accidentally creates a reference to this function + +orig_reconstructor = copy_reg._reconstructor + + +def _dt_reconstructor(cls, base, state): + if cls is DateTime: + return cls(state) + return orig_reconstructor(cls, base, state) diff --git a/lib/DateTime/DateTime.txt b/lib/DateTime/DateTime.txt new file mode 100644 index 00000000..54670474 --- /dev/null +++ b/lib/DateTime/DateTime.txt @@ -0,0 +1,785 @@ +The DateTime package +==================== + +Encapsulation of date/time values. + + +Function Timezones() +-------------------- + +Returns the list of recognized timezone names: + + >>> from DateTime import Timezones + >>> zones = set(Timezones()) + +Almost all of the standard pytz timezones are included, with the exception +of some commonly-used but ambiguous abbreviations, where historical Zope +usage conflicts with the name used by pytz: + + >>> import pytz + >>> [x for x in pytz.all_timezones if x not in zones] + ['CET', 'EET', 'EST', 'MET', 'MST', 'WET'] + +Class DateTime +-------------- + +DateTime objects represent instants in time and provide interfaces for +controlling its representation without affecting the absolute value of +the object. + +DateTime objects may be created from a wide variety of string or +numeric data, or may be computed from other DateTime objects. +DateTimes support the ability to convert their representations to many +major timezones, as well as the ablility to create a DateTime object +in the context of a given timezone. + +DateTime objects provide partial numerical behavior: + +* Two date-time objects can be subtracted to obtain a time, in days + between the two. + +* A date-time object and a positive or negative number may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number and a date-time object may be added to + obtain a new date-time object that is the given number of days later + than the input date-time object. + +* A positive or negative number may be subtracted from a date-time + object to obtain a new date-time object that is the given number of + days earlier than the input date-time object. + +DateTime objects may be converted to integer, long, or float numbers +of days since January 1, 1901, using the standard int, long, and float +functions (Compatibility Note: int, long and float return the number +of days since 1901 in GMT rather than local machine timezone). +DateTime objects also provide access to their value in a float format +usable with the python time module, provided that the value of the +object falls in the range of the epoch-based time module. + +A DateTime object should be considered immutable; all conversion and numeric +operations return a new DateTime object rather than modify the current object. + +A DateTime object always maintains its value as an absolute UTC time, +and is represented in the context of some timezone based on the +arguments used to create the object. A DateTime object's methods +return values based on the timezone context. + +Note that in all cases the local machine timezone is used for +representation if no timezone is specified. + +Constructor for DateTime +------------------------ + +DateTime() returns a new date-time object. DateTimes may be created +with from zero to seven arguments: + +* If the function is called with no arguments, then the current date/ + time is returned, represented in the timezone of the local machine. + +* If the function is invoked with a single string argument which is a + recognized timezone name, an object representing the current time is + returned, represented in the specified timezone. + +* If the function is invoked with a single string argument + representing a valid date/time, an object representing that date/ + time will be returned. + + As a general rule, any date-time representation that is recognized + and unambigous to a resident of North America is acceptable. (The + reason for this qualification is that in North America, a date like: + 2/1/1994 is interpreted as February 1, 1994, while in some parts of + the world, it is interpreted as January 2, 1994.) A date/ time + string consists of two components, a date component and an optional + time component, separated by one or more spaces. If the time + component is omited, 12:00am is assumed. + + Any recognized timezone name specified as the final element of the + date/time string will be used for computing the date/time value. + (If you create a DateTime with the string, + "Mar 9, 1997 1:45pm US/Pacific", the value will essentially be the + same as if you had captured time.time() at the specified date and + time on a machine in that timezone). If no timezone is passed, then + the timezone configured on the local machine will be used, **except** + that if the date format matches ISO 8601 ('YYYY-MM-DD'), the instance + will use UTC / CMT+0 as the timezone. + + o Returns current date/time, represented in US/Eastern: + + >>> from DateTime import DateTime + >>> e = DateTime('US/Eastern') + >>> e.timezone() + 'US/Eastern' + + o Returns specified time, represented in local machine zone: + + >>> x = DateTime('1997/3/9 1:45pm') + >>> x.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + + o Specified time in local machine zone, verbose format: + + >>> y = DateTime('Mar 9, 1997 13:45:00') + >>> y.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ...) + >>> y == x + True + + o Specified time in UTC via ISO 8601 rule: + + >>> z = DateTime('2014-03-24') + >>> z.parts() # doctest: +ELLIPSIS + (2014, 3, 24, 0, 0, ...) + >>> z.timezone() + 'GMT+0' + + The date component consists of year, month, and day values. The + year value must be a one-, two-, or four-digit integer. If a one- + or two-digit year is used, the year is assumed to be in the + twentieth century. The month may an integer, from 1 to 12, a month + name, or a month abreviation, where a period may optionally follow + the abreviation. The day must be an integer from 1 to the number of + days in the month. The year, month, and day values may be separated + by periods, hyphens, forward, shashes, or spaces. Extra spaces are + permitted around the delimiters. Year, month, and day values may be + given in any order as long as it is possible to distinguish the + components. If all three components are numbers that are less than + 13, then a a month-day-year ordering is assumed. + + The time component consists of hour, minute, and second values + separated by colons. The hour value must be an integer between 0 + and 23 inclusively. The minute value must be an integer between 0 + and 59 inclusively. The second value may be an integer value + between 0 and 59.999 inclusively. The second value or both the + minute and second values may be ommitted. The time may be followed + by am or pm in upper or lower case, in which case a 12-hour clock is + assumed. + +* If the DateTime function is invoked with a single Numeric argument, + the number is assumed to be either a floating point value such as + that returned by time.time() , or a number of days after January 1, + 1901 00:00:00 UTC. + + A DateTime object is returned that represents either the gmt value + of the time.time() float represented in the local machine's + timezone, or that number of days after January 1, 1901. Note that + the number of days after 1901 need to be expressed from the + viewpoint of the local machine's timezone. A negative argument will + yield a date-time value before 1901. + +* If the function is invoked with two numeric arguments, then the + first is taken to be an integer year and the second argument is + taken to be an offset in days from the beginning of the year, in the + context of the local machine timezone. The date-time value returned + is the given offset number of days from the beginning of the given + year, represented in the timezone of the local machine. The offset + may be positive or negative. Two-digit years are assumed to be in + the twentieth century. + +* If the function is invoked with two arguments, the first a float + representing a number of seconds past the epoch in gmt (such as + those returned by time.time()) and the second a string naming a + recognized timezone, a DateTime with a value of that gmt time will + be returned, represented in the given timezone. + + >>> import time + >>> t = time.time() + + Time t represented as US/Eastern: + + >>> now_east = DateTime(t, 'US/Eastern') + + Time t represented as US/Pacific: + + >>> now_west = DateTime(t, 'US/Pacific') + + Only their representations are different: + + >>> now_east.equalTo(now_west) + True + +* If the function is invoked with three or more numeric arguments, + then the first is taken to be an integer year, the second is taken + to be an integer month, and the third is taken to be an integer day. + If the combination of values is not valid, then a DateTimeError is + raised. One- or two-digit years up to 69 are assumed to be in the + 21st century, whereas values 70-99 are assumed to be 20th century. + The fourth, fifth, and sixth arguments are floating point, positive + or negative offsets in units of hours, minutes, and days, and + default to zero if not given. An optional string may be given as + the final argument to indicate timezone (the effect of this is as if + you had taken the value of time.time() at that time on a machine in + the specified timezone). + +If a string argument passed to the DateTime constructor cannot be +parsed, it will raise SyntaxError. Invalid date, time, or +timezone components will raise a DateTimeError. + +The module function Timezones() will return a list of the timezones +recognized by the DateTime module. Recognition of timezone names is +case-insensitive. + +Instance Methods for DateTime (IDateTime interface) +--------------------------------------------------- + +Conversion and comparison methods +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``timeTime()`` returns the date/time as a floating-point number in + UTC, in the format used by the python time module. Note that it is + possible to create date /time values with DateTime that have no + meaningful value to the time module, and in such cases a + DateTimeError is raised. A DateTime object's value must generally + be between Jan 1, 1970 (or your local machine epoch) and Jan 2038 to + produce a valid time.time() style value. + + >>> dt = DateTime('Mar 9, 1997 13:45:00 US/Eastern') + >>> dt.timeTime() + 857933100.0 + + >>> DateTime('2040/01/01 UTC').timeTime() + 2208988800.0 + + >>> DateTime('1900/01/01 UTC').timeTime() + -2208988800.0 + +* ``toZone(z)`` returns a DateTime with the value as the current + object, represented in the indicated timezone: + + >>> dt.toZone('UTC') + DateTime('1997/03/09 18:45:00 UTC') + + >>> dt.toZone('UTC').equalTo(dt) + True + +* ``isFuture()`` returns true if this object represents a date/time + later than the time of the call: + + >>> dt.isFuture() + False + >>> DateTime('Jan 1 3000').isFuture() # not time-machine safe! + True + +* ``isPast()`` returns true if this object represents a date/time + earlier than the time of the call: + + >>> dt.isPast() + True + >>> DateTime('Jan 1 3000').isPast() # not time-machine safe! + False + +* ``isCurrentYear()`` returns true if this object represents a + date/time that falls within the current year, in the context of this + object's timezone representation: + + >>> dt.isCurrentYear() + False + >>> DateTime().isCurrentYear() + True + +* ``isCurrentMonth()`` returns true if this object represents a + date/time that falls within the current month, in the context of + this object's timezone representation: + + >>> dt.isCurrentMonth() + False + >>> DateTime().isCurrentMonth() + True + +* ``isCurrentDay()`` returns true if this object represents a + date/time that falls within the current day, in the context of this + object's timezone representation: + + >>> dt.isCurrentDay() + False + >>> DateTime().isCurrentDay() + True + +* ``isCurrentHour()`` returns true if this object represents a + date/time that falls within the current hour, in the context of this + object's timezone representation: + + >>> dt.isCurrentHour() + False + + >>> DateTime().isCurrentHour() + True + +* ``isCurrentMinute()`` returns true if this object represents a + date/time that falls within the current minute, in the context of + this object's timezone representation: + + >>> dt.isCurrentMinute() + False + >>> DateTime().isCurrentMinute() + True + +* ``isLeapYear()`` returns true if the current year (in the context of + the object's timezone) is a leap year: + + >>> dt.isLeapYear() + False + >>> DateTime('Mar 8 2004').isLeapYear() + True + +* ``earliestTime()`` returns a new DateTime object that represents the + earliest possible time (in whole seconds) that still falls within + the current object's day, in the object's timezone context: + + >>> dt.earliestTime() + DateTime('1997/03/09 00:00:00 US/Eastern') + +* ``latestTime()`` return a new DateTime object that represents the + latest possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context + + >>> dt.latestTime() + DateTime('1997/03/09 23:59:59 US/Eastern') + +Component access +~~~~~~~~~~~~~~~~ + +* ``parts()`` returns a tuple containing the calendar year, month, + day, hour, minute second and timezone of the object + + >>> dt.parts() # doctest: +ELLIPSIS + (1997, 3, 9, 13, 45, ... 'US/Eastern') + +* ``timezone()`` returns the timezone in which the object is represented: + + >>> dt.timezone() in Timezones() + True + +* ``tzoffset()`` returns the timezone offset for the objects timezone: + + >>> dt.tzoffset() + -18000 + +* ``year()`` returns the calendar year of the object: + + >>> dt.year() + 1997 + +* ``month()`` retursn the month of the object as an integer: + + >>> dt.month() + 3 + +* ``Month()`` returns the full month name: + + >>> dt.Month() + 'March' + +* ``aMonth()`` returns the abreviated month name: + + >>> dt.aMonth() + 'Mar' + +* ``pMonth()`` returns the abreviated (with period) month name: + + >>> dt.pMonth() + 'Mar.' + +* ``day()`` returns the integer day: + + >>> dt.day() + 9 + +* ``Day()`` returns the full name of the day of the week: + + >>> dt.Day() + 'Sunday' + +* ``dayOfYear()`` returns the day of the year, in context of the + timezone representation of the object: + + >>> dt.dayOfYear() + 68 + +* ``aDay()`` returns the abreviated name of the day of the week: + + >>> dt.aDay() + 'Sun' + +* ``pDay()`` returns the abreviated (with period) name of the day of + the week: + + >>> dt.pDay() + 'Sun.' + +* ``dow()`` returns the integer day of the week, where Sunday is 0: + + >>> dt.dow() + 0 + +* ``dow_1()`` returns the integer day of the week, where sunday is 1: + + >>> dt.dow_1() + 1 + +* ``h_12()`` returns the 12-hour clock representation of the hour: + + >>> dt.h_12() + 1 + +* ``h_24()`` returns the 24-hour clock representation of the hour: + + >>> dt.h_24() + 13 + +* ``ampm()`` returns the appropriate time modifier (am or pm): + + >>> dt.ampm() + 'pm' + +* ``hour()`` returns the 24-hour clock representation of the hour: + + >>> dt.hour() + 13 + +* ``minute()`` returns the minute: + + >>> dt.minute() + 45 + +* ``second()`` returns the second: + + >>> dt.second() == 0 + True + +* ``millis()`` returns the milliseconds since the epoch in GMT. + + >>> dt.millis() == 857933100000 + True + +strftime() +~~~~~~~~~~ + +See ``tests/test_datetime.py``. + +General formats from previous DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* ``Date()`` return the date string for the object: + + >>> dt.Date() + '1997/03/09' + +* ``Time()`` returns the time string for an object to the nearest + second: + + >>> dt.Time() + '13:45:00' + +* ``TimeMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.TimeMinutes() + '13:45' + +* ``AMPM()`` returns the time string for an object to the nearest second: + + >>> dt.AMPM() + '01:45:00 pm' + +* ``AMPMMinutes()`` returns the time string for an object not showing + seconds: + + >>> dt.AMPMMinutes() + '01:45 pm' + +* ``PreciseTime()`` returns the time string for the object: + + >>> dt.PreciseTime() + '13:45:00.000' + +* ``PreciseAMPM()`` returns the time string for the object: + + >>> dt.PreciseAMPM() + '01:45:00.000 pm' + +* ``yy()`` returns the calendar year as a 2 digit string + + >>> dt.yy() + '97' + +* ``mm()`` returns the month as a 2 digit string + + >>> dt.mm() + '03' + +* ``dd()`` returns the day as a 2 digit string: + + >>> dt.dd() + '09' + +* ``rfc822()`` returns the date in RFC 822 format: + + >>> dt.rfc822() + 'Sun, 09 Mar 1997 13:45:00 -0500' + +New formats +~~~~~~~~~~~ + +* ``fCommon()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm: + + >>> dt.fCommon() + 'March 9, 1997 1:45 pm' + +* ``fCommonZ()`` returns a string representing the object's value in + the format: March 9, 1997 1:45 pm US/Eastern: + + >>> dt.fCommonZ() + 'March 9, 1997 1:45 pm US/Eastern' + +* ``aCommon()`` returns a string representing the object's value in + the format: Mar 9, 1997 1:45 pm: + + >>> dt.aCommon() + 'Mar 9, 1997 1:45 pm' + +* ``aCommonZ()`` return a string representing the object's value in + the format: Mar 9, 1997 1:45 pm US/Eastern: + + >>> dt.aCommonZ() + 'Mar 9, 1997 1:45 pm US/Eastern' + +* ``pCommon()`` returns a string representing the object's value in + the format Mar. 9, 1997 1:45 pm: + + >>> dt.pCommon() + 'Mar. 9, 1997 1:45 pm' + +* ``pCommonZ()`` returns a string representing the object's value in + the format: Mar. 9, 1997 1:45 pm US/Eastern: + + >>> dt.pCommonZ() + 'Mar. 9, 1997 1:45 pm US/Eastern' + +* ``ISO()`` returns a string with the date/time in ISO format. Note: + this is not ISO 8601-format! See the ISO8601 and HTML4 methods below + for ISO 8601-compliant output. Dates are output as: YYYY-MM-DD HH:MM:SS + + >>> dt.ISO() + '1997-03-09 13:45:00' + +* ``ISO8601()`` returns the object in ISO 8601-compatible format + containing the date, time with seconds-precision and the time zone + identifier - see http://www.w3.org/TR/NOTE-datetime. Dates are + output as: YYYY-MM-DDTHH:MM:SSTZD (T is a literal character, TZD is + Time Zone Designator, format +HH:MM or -HH:MM). + + The ``HTML4()`` method below offers the same formatting, but + converts to UTC before returning the value and sets the TZD"Z" + + >>> dt.ISO8601() + '1997-03-09T13:45:00-05:00' + + +* ``HTML4()`` returns the object in the format used in the HTML4.0 + specification, one of the standard forms in ISO8601. See + http://www.w3.org/TR/NOTE-datetime. Dates are output as: + YYYY-MM-DDTHH:MM:SSZ (T, Z are literal characters, the time is in + UTC.): + + >>> dt.HTML4() + '1997-03-09T18:45:00Z' + +* ``JulianDay()`` returns the Julian day according to + http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + + >>> dt.JulianDay() + 2450517 + +* ``week()`` returns the week number according to ISO + see http://www.tondering.dk/claus/cal/node6.html#SECTION00670000000000000000 + + >>> dt.week() + 10 + +Deprecated API +~~~~~~~~~~~~~~ + +* DayOfWeek(): see Day() + +* Day_(): see pDay() + +* Mon(): see aMonth() + +* Mon_(): see pMonth + +General Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +DateTimes can be repr()'ed; the result will be a string indicating how +to make a DateTime object like this: + + >>> repr(dt) + "DateTime('1997/03/09 13:45:00 US/Eastern')" + +When we convert them into a string, we get a nicer string that could +actually be shown to a user: + + >>> str(dt) + '1997/03/09 13:45:00 US/Eastern' + +The hash value of a DateTime is based on the date and time and is +equal for different representations of the DateTime: + + >>> hash(dt) + 3618678 + >>> hash(dt.toZone('UTC')) + 3618678 + +DateTime objects can be compared to other DateTime objects OR floating +point numbers such as the ones which are returned by the python time +module by using the equalTo method. Using this API, True is returned if the +object represents a date/time equal to the specified DateTime or time module +style time: + + >>> dt.equalTo(dt) + True + >>> dt.equalTo(dt.toZone('UTC')) + True + >>> dt.equalTo(dt.timeTime()) + True + >>> dt.equalTo(DateTime()) + False + +Same goes for inequalities: + + >>> dt.notEqualTo(dt) + False + >>> dt.notEqualTo(dt.toZone('UTC')) + False + >>> dt.notEqualTo(dt.timeTime()) + False + >>> dt.notEqualTo(DateTime()) + True + +Normal equality operations only work with datetime objects and take the +timezone setting into account: + + >>> dt == dt + True + >>> dt == dt.toZone('UTC') + False + >>> dt == DateTime() + False + + >>> dt != dt + False + >>> dt != dt.toZone('UTC') + True + >>> dt != DateTime() + True + +But the other comparison operations compare the referenced moment in time and +not the representation itself: + + >>> dt > dt + False + >>> DateTime() > dt + True + >>> dt > DateTime().timeTime() + False + >>> DateTime().timeTime() > dt + True + + >>> dt.greaterThan(dt) + False + >>> DateTime().greaterThan(dt) + True + >>> dt.greaterThan(DateTime().timeTime()) + False + + >>> dt >= dt + True + >>> DateTime() >= dt + True + >>> dt >= DateTime().timeTime() + False + >>> DateTime().timeTime() >= dt + True + + >>> dt.greaterThanEqualTo(dt) + True + >>> DateTime().greaterThanEqualTo(dt) + True + >>> dt.greaterThanEqualTo(DateTime().timeTime()) + False + + >>> dt < dt + False + >>> DateTime() < dt + False + >>> dt < DateTime().timeTime() + True + >>> DateTime().timeTime() < dt + False + + >>> dt.lessThan(dt) + False + >>> DateTime().lessThan(dt) + False + >>> dt.lessThan(DateTime().timeTime()) + True + + >>> dt <= dt + True + >>> DateTime() <= dt + False + >>> dt <= DateTime().timeTime() + True + >>> DateTime().timeTime() <= dt + False + + >>> dt.lessThanEqualTo(dt) + True + >>> DateTime().lessThanEqualTo(dt) + False + >>> dt.lessThanEqualTo(DateTime().timeTime()) + True + +Numeric Services Provided by DateTime +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +A DateTime may be added to a number and a number may be added to a +DateTime: + + >>> dt + 5 + DateTime('1997/03/14 13:45:00 US/Eastern') + >>> 5 + dt + DateTime('1997/03/14 13:45:00 US/Eastern') + +Two DateTimes cannot be added: + + >>> from DateTime.interfaces import DateTimeError + >>> try: + ... dt + dt + ... print('fail') + ... except DateTimeError: + ... print('ok') + ok + +Either a DateTime or a number may be subtracted from a DateTime, +however, a DateTime may not be subtracted from a number: + + >>> DateTime('1997/03/10 13:45 US/Eastern') - dt + 1.0 + >>> dt - 1 + DateTime('1997/03/08 13:45:00 US/Eastern') + >>> 1 - dt + Traceback (most recent call last): + ... + TypeError: unsupported operand type(s) for -: 'int' and 'DateTime' + +DateTimes can also be converted to integers (number of seconds since +the epoch) and floats: + + >>> int(dt) + 857933100 + >>> float(dt) + 857933100.0 diff --git a/lib/DateTime/__init__.py b/lib/DateTime/__init__.py new file mode 100644 index 00000000..b4181ad9 --- /dev/null +++ b/lib/DateTime/__init__.py @@ -0,0 +1,17 @@ +############################################################################## +# +# Copyright (c) 2002 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +from .DateTime import DateTime +from .DateTime import Timezones + +__all__ = ('DateTime', 'Timezones') diff --git a/lib/DateTime/interfaces.py b/lib/DateTime/interfaces.py new file mode 100644 index 00000000..5f29cff4 --- /dev/null +++ b/lib/DateTime/interfaces.py @@ -0,0 +1,375 @@ +############################################################################## +# +# Copyright (c) 2005 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## +from zope.interface import Interface + + +class DateTimeError(Exception): + pass + + +class SyntaxError(DateTimeError): + pass + + +class DateError(DateTimeError): + pass + + +class TimeError(DateTimeError): + pass + + +class IDateTime(Interface): + # Conversion and comparison methods + + #TODO determine whether this method really is part of the public API + def localZone(ltm=None): + '''Returns the time zone on the given date. The time zone + can change according to daylight savings.''' + + def timeTime(): + """Return the date/time as a floating-point number in UTC, in + the format used by the python time module. Note that it is + possible to create date/time values with DateTime that have no + meaningful value to the time module.""" + + def toZone(z): + """Return a DateTime with the value as the current object, + represented in the indicated timezone.""" + + def isFuture(): + """Return true if this object represents a date/time later + than the time of the call""" + + def isPast(): + """Return true if this object represents a date/time earlier + than the time of the call""" + + def isCurrentYear(): + """Return true if this object represents a date/time that + falls within the current year, in the context of this + object's timezone representation""" + + def isCurrentMonth(): + """Return true if this object represents a date/time that + falls within the current month, in the context of this + object's timezone representation""" + + def isCurrentDay(): + """Return true if this object represents a date/time that + falls within the current day, in the context of this object's + timezone representation""" + + def isCurrentHour(): + """Return true if this object represents a date/time that + falls within the current hour, in the context of this object's + timezone representation""" + + def isCurrentMinute(): + """Return true if this object represents a date/time that + falls within the current minute, in the context of this + object's timezone representation""" + + def isLeapYear(): + """Return true if the current year (in the context of the + object's timezone) is a leap year""" + + def earliestTime(): + """Return a new DateTime object that represents the earliest + possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context""" + + def latestTime(): + """Return a new DateTime object that represents the latest + possible time (in whole seconds) that still falls within the + current object's day, in the object's timezone context""" + + def greaterThan(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time greater than the specified DateTime or time module + style time. Revised to give more correct results through + comparison of long integer milliseconds.""" + + __gt__ = greaterThan + + def greaterThanEqualTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time greater than or equal to the specified DateTime or + time module style time. Revised to give more correct results + through comparison of long integer milliseconds.""" + + __ge__ = greaterThanEqualTo + + def equalTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time equal to the specified DateTime or time module style + time. Revised to give more correct results through comparison + of long integer milliseconds.""" + + __eq__ = equalTo + + def notEqualTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time not equal to the specified DateTime or time module + style time. Revised to give more correct results through + comparison of long integer milliseconds.""" + + __ne__ = notEqualTo + + def lessThan(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time less than the specified DateTime or time module + style time. Revised to give more correct results through + comparison of long integer milliseconds.""" + + __lt__ = lessThan + + def lessThanEqualTo(t): + """Compare this DateTime object to another DateTime object OR + a floating point number such as that which is returned by the + python time module. Returns true if the object represents a + date/time less than or equal to the specified DateTime or time + module style time. Revised to give more correct results + through comparison of long integer milliseconds.""" + + __le__ = lessThanEqualTo + + # Component access + + def parts(): + """Return a tuple containing the calendar year, month, day, + hour, minute second and timezone of the object""" + + def timezone(): + """Return the timezone in which the object is represented.""" + + def tzoffset(): + """Return the timezone offset for the objects timezone.""" + + def year(): + """Return the calendar year of the object""" + + def month(): + """Return the month of the object as an integer""" + + def Month(): + """Return the full month name""" + + def aMonth(): + """Return the abreviated month name.""" + + def Mon(): + """Compatibility: see aMonth""" + + def pMonth(): + """Return the abreviated (with period) month name.""" + + def Mon_(): + """Compatibility: see pMonth""" + + def day(): + """Return the integer day""" + + def Day(): + """Return the full name of the day of the week""" + + def DayOfWeek(): + """Compatibility: see Day""" + + def dayOfYear(): + """Return the day of the year, in context of the timezone + representation of the object""" + + def aDay(): + """Return the abreviated name of the day of the week""" + + def pDay(): + """Return the abreviated (with period) name of the day of the + week""" + + def Day_(): + """Compatibility: see pDay""" + + def dow(): + """Return the integer day of the week, where sunday is 0""" + + def dow_1(): + """Return the integer day of the week, where sunday is 1""" + + def h_12(): + """Return the 12-hour clock representation of the hour""" + + def h_24(): + """Return the 24-hour clock representation of the hour""" + + def ampm(): + """Return the appropriate time modifier (am or pm)""" + + def hour(): + """Return the 24-hour clock representation of the hour""" + + def minute(): + """Return the minute""" + + def second(): + """Return the second""" + + def millis(): + """Return the millisecond since the epoch in GMT.""" + + def strftime(format): + """Format the date/time using the *current timezone representation*.""" + + # General formats from previous DateTime + + def Date(): + """Return the date string for the object.""" + + def Time(): + """Return the time string for an object to the nearest second.""" + + def TimeMinutes(): + """Return the time string for an object not showing seconds.""" + + def AMPM(): + """Return the time string for an object to the nearest second.""" + + def AMPMMinutes(): + """Return the time string for an object not showing seconds.""" + + def PreciseTime(): + """Return the time string for the object.""" + + def PreciseAMPM(): + """Return the time string for the object.""" + + def yy(): + """Return calendar year as a 2 digit string""" + + def mm(): + """Return month as a 2 digit string""" + + def dd(): + """Return day as a 2 digit string""" + + def rfc822(): + """Return the date in RFC 822 format""" + + # New formats + + def fCommon(): + """Return a string representing the object's value in the + format: March 1, 1997 1:45 pm""" + + def fCommonZ(): + """Return a string representing the object's value in the + format: March 1, 1997 1:45 pm US/Eastern""" + + def aCommon(): + """Return a string representing the object's value in the + format: Mar 1, 1997 1:45 pm""" + + def aCommonZ(): + """Return a string representing the object's value in the + format: Mar 1, 1997 1:45 pm US/Eastern""" + + def pCommon(): + """Return a string representing the object's value in the + format: Mar. 1, 1997 1:45 pm""" + + def pCommonZ(): + """Return a string representing the object's value + in the format: Mar. 1, 1997 1:45 pm US/Eastern""" + + def ISO(): + """Return the object in ISO standard format. Note: this is + *not* ISO 8601-format! See the ISO8601 and HTML4 methods below + for ISO 8601-compliant output + + Dates are output as: YYYY-MM-DD HH:MM:SS + """ + + def ISO8601(): + """Return the object in ISO 8601-compatible format containing + the date, time with seconds-precision and the time zone + identifier - see http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSTZD + T is a literal character. + TZD is Time Zone Designator, format +HH:MM or -HH:MM + + The HTML4 method below offers the same formatting, but + converts to UTC before returning the value and sets the TZD"Z" + """ + + def HTML4(): + """Return the object in the format used in the HTML4.0 + specification, one of the standard forms in ISO8601. See + http://www.w3.org/TR/NOTE-datetime + + Dates are output as: YYYY-MM-DDTHH:MM:SSZ + T, Z are literal characters. + The time is in UTC. + """ + + def JulianDay(): + """Return the Julian day according to + http://www.tondering.dk/claus/cal/node3.html#sec-calcjd + """ + + def week(): + """Return the week number according to ISO + see http://www.tondering.dk/claus/cal/node6.html#SECTION00670000000000000000 + """ + + # Python operator and conversion API + + def __add__(other): + """A DateTime may be added to a number and a number may be + added to a DateTime; two DateTimes cannot be added.""" + + __radd__ = __add__ + + def __sub__(other): + """Either a DateTime or a number may be subtracted from a + DateTime, however, a DateTime may not be subtracted from a + number.""" + + def __repr__(): + """Convert a DateTime to a string that looks like a Python + expression.""" + + def __str__(): + """Convert a DateTime to a string.""" + + def __hash__(): + """Compute a hash value for a DateTime""" + + def __int__(): + """Convert to an integer number of seconds since the epoch (gmt)""" + + def __long__(): + """Convert to a long-int number of seconds since the epoch (gmt)""" + + def __float__(): + """Convert to floating-point number of seconds since the epoch (gmt)""" diff --git a/lib/DateTime/pytz.txt b/lib/DateTime/pytz.txt new file mode 100644 index 00000000..33de811b --- /dev/null +++ b/lib/DateTime/pytz.txt @@ -0,0 +1,192 @@ +Pytz Support +============ + +Allows the pytz package to be used for time zone information. The +advantage of using pytz is that it has a more complete and up to date +time zone and daylight savings time database. + +Usage +----- +You don't have to do anything special to make it work. + + >>> from DateTime import DateTime, Timezones + >>> d = DateTime('March 11, 2007 US/Eastern') + +Daylight Savings +---------------- +In 2007 daylight savings time in the US was changed. The Energy Policy +Act of 2005 mandates that DST will start on the second Sunday in March +and end on the first Sunday in November. + +In 2007, the start and stop dates are March 11 and November 4, +respectively. These dates are different from previous DST start and +stop dates. In 2006, the dates were the first Sunday in April (April +2, 2006) and the last Sunday in October (October 29, 2006). + +Let's make sure that DateTime can deal with this, since the primary +motivation to use pytz for time zone information is the fact that it +is kept up to date with daylight savings changes. + + >>> DateTime('March 11, 2007 US/Eastern').tzoffset() + -18000 + >>> DateTime('March 12, 2007 US/Eastern').tzoffset() + -14400 + >>> DateTime('November 4, 2007 US/Eastern').tzoffset() + -14400 + >>> DateTime('November 5, 2007 US/Eastern').tzoffset() + -18000 + +Let's compare this to 2006. + + >>> DateTime('April 2, 2006 US/Eastern').tzoffset() + -18000 + >>> DateTime('April 3, 2006 US/Eastern').tzoffset() + -14400 + >>> DateTime('October 29, 2006 US/Eastern').tzoffset() + -14400 + >>> DateTime('October 30, 2006 US/Eastern').tzoffset() + -18000 + +Time Zones +--------- +DateTime can use pytz's large database of time zones. Here are some +examples: + + >>> d = DateTime('Pacific/Kwajalein') + >>> d = DateTime('America/Shiprock') + >>> d = DateTime('Africa/Ouagadougou') + +Of course pytz doesn't know about everything. + + >>> from DateTime.interfaces import SyntaxError + >>> try: + ... d = DateTime('July 21, 1969 Moon/Eastern') + ... print('fail') + ... except SyntaxError: + ... print('ok') + ok + +You can still use zone names that DateTime defines that aren't part of +the pytz database. + + >>> d = DateTime('eet') + >>> d = DateTime('iceland') + +These time zones use DateTimes database. So it's preferable to use the +official time zone name. + +One trickiness is that DateTime supports some zone name +abbreviations. Some of these map to pytz names, so these abbreviations +will give you time zone date from pytz. Notable among abbreviations +that work this way are 'est', 'cst', 'mst', and 'pst'. + +Let's verify that 'est' picks up the 2007 daylight savings time changes. + + >>> DateTime('March 11, 2007 est').tzoffset() + -18000 + >>> DateTime('March 12, 2007 est').tzoffset() + -14400 + >>> DateTime('November 4, 2007 est').tzoffset() + -14400 + >>> DateTime('November 5, 2007 est').tzoffset() + -18000 + +You can get a list of time zones supported by calling the Timezones() function. + + >>> Timezones() #doctest: +ELLIPSIS + ['Africa/Abidjan', 'Africa/Accra', 'Africa/Addis_Ababa', ...] + +Note that you can mess with this list without hurting things. + + >>> t = Timezones() + >>> t.remove('US/Eastern') + >>> d = DateTime('US/Eastern') + + +Internal Components +------------------- + +The following are tests of internal components. + +Cache +~~~~~ + +The DateTime class uses a new time zone cache. + + >>> from DateTime.DateTime import _TZINFO + >>> _TZINFO #doctest: +ELLIPSIS + + +The cache maps time zone names to time zone instances. + + >>> cache = _TZINFO + >>> tz = cache['GMT+730'] + >>> tz = cache['US/Mountain'] + +The cache also must provide a few attributes for use by the DateTime +class. + +The _zlst attribute is a list of supported time zone names. + + >>> cache._zlst #doctest: +ELLIPSIS + ['Africa/Abidjan'... 'Africa/Accra'... 'IDLE'... 'NZST'... 'NZT'...] + +The _zidx attribute is a list of lower-case and possibly abbreviated +time zone names that can be mapped to offical zone names. + + >>> 'australia/yancowinna' in cache._zidx + True + >>> 'europe/isle_of_man' in cache._zidx + True + >>> 'gmt+0500' in cache._zidx + True + +Note that there are more items in _zidx than in _zlst since there are +multiple names for some time zones. + + >>> len(cache._zidx) > len(cache._zlst) + True + +Each entry in _zlst should also be present in _zidx in lower case form. + + >>> for name in cache._zlst: + ... if not name.lower() in cache._zidx: + ... print("Error %s not in _zidx" % name.lower()) + +The _zmap attribute maps the names in _zidx to official names in _zlst. + + >>> cache._zmap['africa/abidjan'] + 'Africa/Abidjan' + >>> cache._zmap['gmt+1'] + 'GMT+1' + >>> cache._zmap['gmt+0100'] + 'GMT+1' + >>> cache._zmap['utc'] + 'UTC' + +Let's make sure that _zmap and _zidx agree. + + >>> idx = set(cache._zidx) + >>> keys = set(cache._zmap.keys()) + >>> idx == keys + True + +Timezone objects +~~~~~~~~~~~~~~~~ +The timezone instances have only one public method info(). It returns +a tuple of (offset, is_dst, name). The method takes a timestamp, which +is used to determine dst information. + + >>> t1 = DateTime('November 4, 00:00 2007 US/Mountain').timeTime() + >>> t2 = DateTime('November 4, 02:00 2007 US/Mountain').timeTime() + >>> tz.info(t1) + (-21600, 1, 'MDT') + >>> tz.info(t2) + (-25200, 0, 'MST') + +If you don't pass any arguments to info it provides daylight savings +time information as of today. + + >>> tz.info() in ((-21600, 1, 'MDT'), (-25200, 0, 'MST')) + True + diff --git a/lib/DateTime/pytz_support.py b/lib/DateTime/pytz_support.py new file mode 100644 index 00000000..8cfbfc59 --- /dev/null +++ b/lib/DateTime/pytz_support.py @@ -0,0 +1,259 @@ +############################################################################## +# +# Copyright (c) 2007 Zope Foundation and Contributors. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE +# +############################################################################## + +from datetime import datetime, timedelta + +import pytz +import pytz.reference +from pytz.tzinfo import StaticTzInfo, memorized_timedelta + +from .interfaces import DateTimeError + +EPOCH = datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc) + +_numeric_timezone_data = { + 'GMT': ('GMT', 0, 1, [], '', [(0, 0, 0)], 'GMT\000'), + 'GMT+0': ('GMT+0', 0, 1, [], '', [(0, 0, 0)], 'GMT+0000\000'), + 'GMT+1': ('GMT+1', 0, 1, [], '', [(3600, 0, 0)], 'GMT+0100\000'), + 'GMT+2': ('GMT+2', 0, 1, [], '', [(7200, 0, 0)], 'GMT+0200\000'), + 'GMT+3': ('GMT+3', 0, 1, [], '', [(10800, 0, 0)], 'GMT+0300\000'), + 'GMT+4': ('GMT+4', 0, 1, [], '', [(14400, 0, 0)], 'GMT+0400\000'), + 'GMT+5': ('GMT+5', 0, 1, [], '', [(18000, 0, 0)], 'GMT+0500\000'), + 'GMT+6': ('GMT+6', 0, 1, [], '', [(21600, 0, 0)], 'GMT+0600\000'), + 'GMT+7': ('GMT+7', 0, 1, [], '', [(25200, 0, 0)], 'GMT+0700\000'), + 'GMT+8': ('GMT+8', 0, 1, [], '', [(28800, 0, 0)], 'GMT+0800\000'), + 'GMT+9': ('GMT+9', 0, 1, [], '', [(32400, 0, 0)], 'GMT+0900\000'), + 'GMT+10': ('GMT+10', 0, 1, [], '', [(36000, 0, 0)], 'GMT+1000\000'), + 'GMT+11': ('GMT+11', 0, 1, [], '', [(39600, 0, 0)], 'GMT+1100\000'), + 'GMT+12': ('GMT+12', 0, 1, [], '', [(43200, 0, 0)], 'GMT+1200\000'), + 'GMT+13': ('GMT+13', 0, 1, [], '', [(46800, 0, 0)], 'GMT+1300\000'), + + 'GMT-1': ('GMT-1', 0, 1, [], '', [(-3600, 0, 0)], 'GMT-0100\000'), + 'GMT-2': ('GMT-2', 0, 1, [], '', [(-7200, 0, 0)], 'GMT-0200\000'), + 'GMT-3': ('GMT-3', 0, 1, [], '', [(-10800, 0, 0)], 'GMT-0300\000'), + 'GMT-4': ('GMT-4', 0, 1, [], '', [(-14400, 0, 0)], 'GMT-0400\000'), + 'GMT-5': ('GMT-5', 0, 1, [], '', [(-18000, 0, 0)], 'GMT-0500\000'), + 'GMT-6': ('GMT-6', 0, 1, [], '', [(-21600, 0, 0)], 'GMT-0600\000'), + 'GMT-7': ('GMT-7', 0, 1, [], '', [(-25200, 0, 0)], 'GMT-0700\000'), + 'GMT-8': ('GMT-8', 0, 1, [], '', [(-28800, 0, 0)], 'GMT-0800\000'), + 'GMT-9': ('GMT-9', 0, 1, [], '', [(-32400, 0, 0)], 'GMT-0900\000'), + 'GMT-10': ('GMT-10', 0, 1, [], '', [(-36000, 0, 0)], 'GMT-1000\000'), + 'GMT-11': ('GMT-11', 0, 1, [], '', [(-39600, 0, 0)], 'GMT-1100\000'), + 'GMT-12': ('GMT-12', 0, 1, [], '', [(-43200, 0, 0)], 'GMT-1200\000'), + + 'GMT+0130': ('GMT+0130', 0, 1, [], '', [(5400, 0, 0)], 'GMT+0130\000'), + 'GMT+0230': ('GMT+0230', 0, 1, [], '', [(9000, 0, 0)], 'GMT+0230\000'), + 'GMT+0330': ('GMT+0330', 0, 1, [], '', [(12600, 0, 0)], 'GMT+0330\000'), + 'GMT+0430': ('GMT+0430', 0, 1, [], '', [(16200, 0, 0)], 'GMT+0430\000'), + 'GMT+0530': ('GMT+0530', 0, 1, [], '', [(19800, 0, 0)], 'GMT+0530\000'), + 'GMT+0630': ('GMT+0630', 0, 1, [], '', [(23400, 0, 0)], 'GMT+0630\000'), + 'GMT+0730': ('GMT+0730', 0, 1, [], '', [(27000, 0, 0)], 'GMT+0730\000'), + 'GMT+0830': ('GMT+0830', 0, 1, [], '', [(30600, 0, 0)], 'GMT+0830\000'), + 'GMT+0930': ('GMT+0930', 0, 1, [], '', [(34200, 0, 0)], 'GMT+0930\000'), + 'GMT+1030': ('GMT+1030', 0, 1, [], '', [(37800, 0, 0)], 'GMT+1030\000'), + 'GMT+1130': ('GMT+1130', 0, 1, [], '', [(41400, 0, 0)], 'GMT+1130\000'), + 'GMT+1230': ('GMT+1230', 0, 1, [], '', [(45000, 0, 0)], 'GMT+1230\000'), + + 'GMT-0130': ('GMT-0130', 0, 1, [], '', [(-5400, 0, 0)], 'GMT-0130\000'), + 'GMT-0230': ('GMT-0230', 0, 1, [], '', [(-9000, 0, 0)], 'GMT-0230\000'), + 'GMT-0330': ('GMT-0330', 0, 1, [], '', [(-12600, 0, 0)], 'GMT-0330\000'), + 'GMT-0430': ('GMT-0430', 0, 1, [], '', [(-16200, 0, 0)], 'GMT-0430\000'), + 'GMT-0530': ('GMT-0530', 0, 1, [], '', [(-19800, 0, 0)], 'GMT-0530\000'), + 'GMT-0630': ('GMT-0630', 0, 1, [], '', [(-23400, 0, 0)], 'GMT-0630\000'), + 'GMT-0730': ('GMT-0730', 0, 1, [], '', [(-27000, 0, 0)], 'GMT-0730\000'), + 'GMT-0830': ('GMT-0830', 0, 1, [], '', [(-30600, 0, 0)], 'GMT-0830\000'), + 'GMT-0930': ('GMT-0930', 0, 1, [], '', [(-34200, 0, 0)], 'GMT-0930\000'), + 'GMT-1030': ('GMT-1030', 0, 1, [], '', [(-37800, 0, 0)], 'GMT-1030\000'), + 'GMT-1130': ('GMT-1130', 0, 1, [], '', [(-41400, 0, 0)], 'GMT-1130\000'), + 'GMT-1230': ('GMT-1230', 0, 1, [], '', [(-45000, 0, 0)], 'GMT-1230\000'), +} + +# These are the timezones not in pytz.common_timezones +_old_zlst = [ + 'AST', 'AT', 'BST', 'BT', 'CCT', + 'CET', 'CST', 'Cuba', 'EADT', 'EAST', + 'EEST', 'EET', 'EST', 'Egypt', 'FST', + 'FWT', 'GB-Eire', 'GMT+0100', 'GMT+0130', 'GMT+0200', + 'GMT+0230', 'GMT+0300', 'GMT+0330', 'GMT+0400', 'GMT+0430', + 'GMT+0500', 'GMT+0530', 'GMT+0600', 'GMT+0630', 'GMT+0700', + 'GMT+0730', 'GMT+0800', 'GMT+0830', 'GMT+0900', 'GMT+0930', + 'GMT+1', 'GMT+1000', 'GMT+1030', 'GMT+1100', 'GMT+1130', + 'GMT+1200', 'GMT+1230', 'GMT+1300', 'GMT-0100', 'GMT-0130', + 'GMT-0200', 'GMT-0300', 'GMT-0400', 'GMT-0500', 'GMT-0600', + 'GMT-0630', 'GMT-0700', 'GMT-0730', 'GMT-0800', 'GMT-0830', + 'GMT-0900', 'GMT-0930', 'GMT-1000', 'GMT-1030', 'GMT-1100', + 'GMT-1130', 'GMT-1200', 'GMT-1230', 'GST', 'Greenwich', + 'Hongkong', 'IDLE', 'IDLW', 'Iceland', 'Iran', + 'Israel', 'JST', 'Jamaica', 'Japan', 'MEST', + 'MET', 'MEWT', 'MST', 'NT', 'NZDT', + 'NZST', 'NZT', 'PST', 'Poland', 'SST', + 'SWT', 'Singapore', 'Turkey', 'UCT', 'UT', + 'Universal', 'WADT', 'WAST', 'WAT', 'WET', + 'ZP4', 'ZP5', 'ZP6', +] + +_old_zmap = { + 'aest': 'GMT+10', 'aedt': 'GMT+11', + 'aus eastern standard time': 'GMT+10', + 'sydney standard time': 'GMT+10', + 'tasmania standard time': 'GMT+10', + 'e. australia standard time': 'GMT+10', + 'aus central standard time': 'GMT+0930', + 'cen. australia standard time': 'GMT+0930', + 'w. australia standard time': 'GMT+8', + + 'central europe standard time': 'GMT+1', + 'eastern standard time': 'US/Eastern', + 'us eastern standard time': 'US/Eastern', + 'central standard time': 'US/Central', + 'mountain standard time': 'US/Mountain', + 'pacific standard time': 'US/Pacific', + 'mst': 'US/Mountain', 'pst': 'US/Pacific', + 'cst': 'US/Central', 'est': 'US/Eastern', + + 'gmt+0000': 'GMT+0', 'gmt+0': 'GMT+0', + + 'gmt+0100': 'GMT+1', 'gmt+0200': 'GMT+2', 'gmt+0300': 'GMT+3', + 'gmt+0400': 'GMT+4', 'gmt+0500': 'GMT+5', 'gmt+0600': 'GMT+6', + 'gmt+0700': 'GMT+7', 'gmt+0800': 'GMT+8', 'gmt+0900': 'GMT+9', + 'gmt+1000': 'GMT+10', 'gmt+1100': 'GMT+11', 'gmt+1200': 'GMT+12', + 'gmt+1300': 'GMT+13', + 'gmt-0100': 'GMT-1', 'gmt-0200': 'GMT-2', 'gmt-0300': 'GMT-3', + 'gmt-0400': 'GMT-4', 'gmt-0500': 'GMT-5', 'gmt-0600': 'GMT-6', + 'gmt-0700': 'GMT-7', 'gmt-0800': 'GMT-8', 'gmt-0900': 'GMT-9', + 'gmt-1000': 'GMT-10', 'gmt-1100': 'GMT-11', 'gmt-1200': 'GMT-12', + + 'gmt+1': 'GMT+1', 'gmt+2': 'GMT+2', 'gmt+3': 'GMT+3', + 'gmt+4': 'GMT+4', 'gmt+5': 'GMT+5', 'gmt+6': 'GMT+6', + 'gmt+7': 'GMT+7', 'gmt+8': 'GMT+8', 'gmt+9': 'GMT+9', + 'gmt+10': 'GMT+10', 'gmt+11': 'GMT+11', 'gmt+12': 'GMT+12', + 'gmt+13': 'GMT+13', + 'gmt-1': 'GMT-1', 'gmt-2': 'GMT-2', 'gmt-3': 'GMT-3', + 'gmt-4': 'GMT-4', 'gmt-5': 'GMT-5', 'gmt-6': 'GMT-6', + 'gmt-7': 'GMT-7', 'gmt-8': 'GMT-8', 'gmt-9': 'GMT-9', + 'gmt-10': 'GMT-10', 'gmt-11': 'GMT-11', 'gmt-12': 'GMT-12', + + 'gmt+130': 'GMT+0130', 'gmt+0130': 'GMT+0130', + 'gmt+230': 'GMT+0230', 'gmt+0230': 'GMT+0230', + 'gmt+330': 'GMT+0330', 'gmt+0330': 'GMT+0330', + 'gmt+430': 'GMT+0430', 'gmt+0430': 'GMT+0430', + 'gmt+530': 'GMT+0530', 'gmt+0530': 'GMT+0530', + 'gmt+630': 'GMT+0630', 'gmt+0630': 'GMT+0630', + 'gmt+730': 'GMT+0730', 'gmt+0730': 'GMT+0730', + 'gmt+830': 'GMT+0830', 'gmt+0830': 'GMT+0830', + 'gmt+930': 'GMT+0930', 'gmt+0930': 'GMT+0930', + 'gmt+1030': 'GMT+1030', + 'gmt+1130': 'GMT+1130', + 'gmt+1230': 'GMT+1230', + + 'gmt-130': 'GMT-0130', 'gmt-0130': 'GMT-0130', + 'gmt-230': 'GMT-0230', 'gmt-0230': 'GMT-0230', + 'gmt-330': 'GMT-0330', 'gmt-0330': 'GMT-0330', + 'gmt-430': 'GMT-0430', 'gmt-0430': 'GMT-0430', + 'gmt-530': 'GMT-0530', 'gmt-0530': 'GMT-0530', + 'gmt-630': 'GMT-0630', 'gmt-0630': 'GMT-0630', + 'gmt-730': 'GMT-0730', 'gmt-0730': 'GMT-0730', + 'gmt-830': 'GMT-0830', 'gmt-0830': 'GMT-0830', + 'gmt-930': 'GMT-0930', 'gmt-0930': 'GMT-0930', + 'gmt-1030': 'GMT-1030', + 'gmt-1130': 'GMT-1130', + 'gmt-1230': 'GMT-1230', + + 'ut': 'Universal', + 'bst': 'GMT+1', 'mest': 'GMT+2', 'sst': 'GMT+2', + 'fst': 'GMT+2', 'wadt': 'GMT+8', 'eadt': 'GMT+11', 'nzdt': 'GMT+13', + 'wet': 'GMT', 'wat': 'GMT-1', 'at': 'GMT-2', 'ast': 'GMT-4', + 'nt': 'GMT-11', 'idlw': 'GMT-12', 'cet': 'GMT+1', 'cest': 'GMT+2', + 'met': 'GMT+1', + 'mewt': 'GMT+1', 'swt': 'GMT+1', 'fwt': 'GMT+1', 'eet': 'GMT+2', + 'eest': 'GMT+3', + 'bt': 'GMT+3', 'zp4': 'GMT+4', 'zp5': 'GMT+5', 'zp6': 'GMT+6', + 'wast': 'GMT+7', 'cct': 'GMT+8', 'jst': 'GMT+9', 'east': 'GMT+10', + 'gst': 'GMT+10', 'nzt': 'GMT+12', 'nzst': 'GMT+12', 'idle': 'GMT+12', + 'ret': 'GMT+4', 'ist': 'GMT+0530', 'edt': 'GMT-4', + +} + + +# some timezone definitions of the "-0400" are not working +# when upgrading +for hour in range(0, 13): + hour = hour + fhour = str(hour) + if len(fhour) == 1: + fhour = '0' + fhour + _old_zmap['-%s00' % fhour] = 'GMT-%i' % hour + _old_zmap['+%s00' % fhour] = 'GMT+%i' % hour + + +def _static_timezone_factory(data): + zone = data[0] + cls = type(zone, (StaticTzInfo,), dict( + zone=zone, + _utcoffset=memorized_timedelta(data[5][0][0]), + _tzname=data[6][:-1])) # strip the trailing null + return cls() + +_numeric_timezones = dict((key, _static_timezone_factory(data)) + for key, data in _numeric_timezone_data.items()) + + +class Timezone: + """ + Timezone information returned by PytzCache.__getitem__ + Adapts datetime.tzinfo object to DateTime._timezone interface + """ + def __init__(self, tzinfo): + self.tzinfo = tzinfo + + def info(self, t=None): + if t is None: + dt = datetime.utcnow().replace(tzinfo=pytz.utc) + else: + # can't use utcfromtimestamp past 2038 + dt = EPOCH + timedelta(0, t) + + # need to normalize tzinfo for the datetime to deal with + # daylight savings time. + normalized_dt = self.tzinfo.normalize(dt.astimezone(self.tzinfo)) + normalized_tzinfo = normalized_dt.tzinfo + + offset = normalized_tzinfo.utcoffset(normalized_dt) + secs = offset.days * 24 * 60 * 60 + offset.seconds + dst = normalized_tzinfo.dst(normalized_dt) + if dst == timedelta(0): + is_dst = 0 + else: + is_dst = 1 + return secs, is_dst, normalized_tzinfo.tzname(normalized_dt) + + +class PytzCache: + """ + Reimplementation of the DateTime._cache class that uses for timezone info + """ + + _zlst = pytz.common_timezones + _old_zlst # used by DateTime.TimeZones + _zmap = dict((name.lower(), name) for name in pytz.all_timezones) + _zmap.update(_old_zmap) # These must take priority + _zidx = _zmap.keys() + + def __getitem__(self, key): + name = self._zmap.get(key.lower(), key) # fallback to key + try: + return Timezone(pytz.timezone(name)) + except pytz.UnknownTimeZoneError: + try: + return Timezone(_numeric_timezones[name]) + except KeyError: + raise DateTimeError('Unrecognized timezone: %s' % key) diff --git a/lib/DateTime/tests/__init__.py b/lib/DateTime/tests/__init__.py new file mode 100644 index 00000000..e67bcb67 --- /dev/null +++ b/lib/DateTime/tests/__init__.py @@ -0,0 +1,15 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +# This file is needed to make this a package. diff --git a/lib/DateTime/tests/julian_testdata.txt b/lib/DateTime/tests/julian_testdata.txt new file mode 100644 index 00000000..386c3dae --- /dev/null +++ b/lib/DateTime/tests/julian_testdata.txt @@ -0,0 +1,57 @@ +1970-01-01 (1970, 1, 4) +1970-01-02 (1970, 1, 5) +1970-01-30 (1970, 5, 5) +1970-01-31 (1970, 5, 6) +1970-02-01 (1970, 5, 7) +1970-02-02 (1970, 6, 1) +1970-02-28 (1970, 9, 6) +1970-03-01 (1970, 9, 7) +1970-03-30 (1970, 14, 1) +1970-03-31 (1970, 14, 2) +1970-04-01 (1970, 14, 3) +1970-09-30 (1970, 40, 3) +1970-10-01 (1970, 40, 4) +1970-10-02 (1970, 40, 5) +1970-10-03 (1970, 40, 6) +1970-10-04 (1970, 40, 7) +1970-10-05 (1970, 41, 1) +1971-01-02 (1970, 53, 6) +1971-01-03 (1970, 53, 7) +1971-01-04 (1971, 1, 1) +1971-01-05 (1971, 1, 2) +1971-12-31 (1971, 52, 5) +1972-01-01 (1971, 52, 6) +1972-01-02 (1971, 52, 7) +1972-01-03 (1972, 1, 1) +1972-01-04 (1972, 1, 2) +1972-12-30 (1972, 52, 6) +1972-12-31 (1972, 52, 7) +1973-01-01 (1973, 1, 1) +1973-01-02 (1973, 1, 2) +1973-12-29 (1973, 52, 6) +1973-12-30 (1973, 52, 7) +1973-12-31 (1974, 1, 1) +1974-01-01 (1974, 1, 2) +1998-12-30 (1998, 53, 3) +1998-12-31 (1998, 53, 4) +1999-01-01 (1998, 53, 5) +1999-01-02 (1998, 53, 6) +1999-01-03 (1998, 53, 7) +1999-01-04 (1999, 1, 1) +1999-01-05 (1999, 1, 2) +1999-12-30 (1999, 52, 4) +1999-12-31 (1999, 52, 5) +2000-01-01 (1999, 52, 6) +2000-01-02 (1999, 52, 7) +2000-01-03 (2000, 1, 1) +2000-01-04 (2000, 1, 2) +2000-01-05 (2000, 1, 3) +2000-01-06 (2000, 1, 4) +2000-01-07 (2000, 1, 5) +2000-01-08 (2000, 1, 6) +2000-01-09 (2000, 1, 7) +2000-01-10 (2000, 2, 1) +2019-12-28 (2019, 52, 6) +2019-12-29 (2019, 52, 7) +2019-12-30 (2020, 1, 1) +2019-12-31 (2020, 1, 2) diff --git a/lib/DateTime/tests/test_datetime.py b/lib/DateTime/tests/test_datetime.py new file mode 100644 index 00000000..7172adb5 --- /dev/null +++ b/lib/DateTime/tests/test_datetime.py @@ -0,0 +1,686 @@ +############################################################################## +# +# Copyright (c) 2003 Zope Foundation and Contributors. +# All Rights Reserved. +# +# This software is subject to the provisions of the Zope Public License, +# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. +# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED +# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS +# FOR A PARTICULAR PURPOSE. +# +############################################################################## + +from datetime import date, datetime, tzinfo, timedelta +import math +import platform +import os +import sys +import time +import unittest + +import pytz + +from DateTime.DateTime import _findLocalTimeZoneName +from DateTime import DateTime + +if sys.version_info > (3, ): + import pickle + unicode = str + PY3K = True +else: + import cPickle as pickle + PY3K = False + +try: + __file__ +except NameError: + f = sys.argv[0] +else: + f = __file__ + +IS_PYPY = getattr(platform, 'python_implementation', lambda: None)() == 'PyPy' + +DATADIR = os.path.dirname(os.path.abspath(f)) +del f + +ZERO = timedelta(0) + + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name): + self.__offset = timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return ZERO + + +class DateTimeTests(unittest.TestCase): + + def _compare(self, dt1, dt2): + '''Compares the internal representation of dt1 with + the representation in dt2. Allows sub-millisecond variations. + Primarily for testing.''' + self.assertEqual(round(dt1._t, 3), round(dt2._t, 3)) + self.assertEqual(round(dt1._d, 9), round(dt2._d, 9)) + self.assertEqual(round(dt1.time, 9), round(dt2.time, 9)) + self.assertEqual(dt1.millis(), dt2.millis()) + self.assertEqual(dt1._micros, dt2._micros) + + def testBug1203(self): + # 01:59:60 occurred in old DateTime + dt = DateTime(7200, 'GMT') + self.assertTrue(str(dt).find('60') < 0, dt) + + def testDSTInEffect(self): + # Checks GMT offset for a DST date in the US/Eastern time zone + dt = DateTime(2000, 5, 9, 15, 0, 0, 'US/Eastern') + self.assertEqual(dt.toZone('GMT').hour(), 19, + (dt, dt.toZone('GMT'))) + + def testDSTNotInEffect(self): + # Checks GMT offset for a non-DST date in the US/Eastern time zone + dt = DateTime(2000, 11, 9, 15, 0, 0, 'US/Eastern') + self.assertEqual(dt.toZone('GMT').hour(), 20, + (dt, dt.toZone('GMT'))) + + def testAddPrecision(self): + # Precision of serial additions + dt = DateTime() + self.assertEqual(str(dt + 0.10 + 3.14 + 6.76 - 10), str(dt), + dt) + + def testConstructor3(self): + # Constructor from date/time string + dt = DateTime() + dt1s = '%d/%d/%d %d:%d:%f %s' % ( + dt.year(), + dt.month(), + dt.day(), + dt.hour(), + dt.minute(), + dt.second(), + dt.timezone()) + dt1 = DateTime(dt1s) + # Compare representations as it's the + # only way to compare the dates to the same accuracy + self.assertEqual(repr(dt), repr(dt1)) + + def testConstructor4(self): + # Constructor from time float + dt = DateTime() + dt1 = DateTime(float(dt)) + self._compare(dt, dt1) + + def testConstructor5(self): + # Constructor from time float and timezone + dt = DateTime() + dt1 = DateTime(float(dt), dt.timezone()) + self.assertEqual(str(dt), str(dt1), (dt, dt1)) + dt1 = DateTime(float(dt), unicode(dt.timezone())) + self.assertEqual(str(dt), str(dt1), (dt, dt1)) + + def testConstructor6(self): + # Constructor from year and julian date + # This test must normalize the time zone, or it *will* break when + # DST changes! + dt1 = DateTime(2000, 5.500000578705) + dt = DateTime('2000/1/5 12:00:00.050 pm %s' % dt1.localZone()) + self._compare(dt, dt1) + + def testConstructor7(self): + # Constructor from parts + dt = DateTime() + dt1 = DateTime( + dt.year(), + dt.month(), + dt.day(), + dt.hour(), + dt.minute(), + dt.second(), + dt.timezone()) + # Compare representations as it's the + # only way to compare the dates to the same accuracy + self.assertEqual(repr(dt), repr(dt1)) + + def testDayOfWeek(self): + # Compare to the datetime.date value to make it locale independent + expected = date(2000, 6, 16).strftime('%A') + # strftime() used to always be passed a day of week of 0 + dt = DateTime('2000/6/16') + s = dt.strftime('%A') + self.assertEqual(s, expected, (dt, s)) + + def testOldDate(self): + # Fails when an 1800 date is displayed with negative signs + dt = DateTime('1830/5/6 12:31:46.213 pm') + dt1 = dt.toZone('GMT+6') + self.assertTrue(str(dt1).find('-') < 0, (dt, dt1)) + + def testSubtraction(self): + # Reconstruction of a DateTime from its parts, with subtraction + # this also tests the accuracy of addition and reconstruction + dt = DateTime() + dt1 = dt - 3.141592653 + dt2 = DateTime( + dt.year(), + dt.month(), + dt.day(), + dt.hour(), + dt.minute(), + dt.second()) + dt3 = dt2 - 3.141592653 + self.assertEqual(dt1, dt3, (dt, dt1, dt2, dt3)) + + def testTZ1add(self): + # Time zone manipulation: add to a date + dt = DateTime('1997/3/8 1:45am GMT-4') + dt1 = DateTime('1997/3/9 1:45pm GMT+8') + self.assertTrue((dt + 1.0).equalTo(dt1)) + + def testTZ1sub(self): + # Time zone manipulation: subtract from a date + dt = DateTime('1997/3/8 1:45am GMT-4') + dt1 = DateTime('1997/3/9 1:45pm GMT+8') + self.assertTrue((dt1 - 1.0).equalTo(dt)) + + def testTZ1diff(self): + # Time zone manipulation: diff two dates + dt = DateTime('1997/3/8 1:45am GMT-4') + dt1 = DateTime('1997/3/9 1:45pm GMT+8') + self.assertEqual(dt1 - dt, 1.0, (dt, dt1)) + + def test_compare_methods(self): + # Compare two dates using several methods + dt = DateTime('1997/1/1') + dt1 = DateTime('1997/2/2') + self.assertTrue(dt1.greaterThan(dt)) + self.assertTrue(dt1.greaterThanEqualTo(dt)) + self.assertTrue(dt.lessThan(dt1)) + self.assertTrue(dt.lessThanEqualTo(dt1)) + self.assertTrue(dt.notEqualTo(dt1)) + self.assertFalse(dt.equalTo(dt1)) + + def test_compare_methods_none(self): + # Compare a date to None + dt = DateTime('1997/1/1') + self.assertTrue(dt.greaterThan(None)) + self.assertTrue(dt.greaterThanEqualTo(None)) + self.assertFalse(dt.lessThan(None)) + self.assertFalse(dt.lessThanEqualTo(None)) + self.assertTrue(dt.notEqualTo(None)) + self.assertFalse(dt.equalTo(None)) + + def test_pickle(self): + dt = DateTime() + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_tz(self): + dt = DateTime('2002/5/2 8:00am GMT+8') + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_numerical_tz(self): + for dt_str in ('2007/01/02 12:34:56.789 +0300', + '2007/01/02 12:34:56.789 +0430', + '2007/01/02 12:34:56.789 -1234'): + dt = DateTime(dt_str) + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_with_micros(self): + dt = DateTime('2002/5/2 8:00:14.123 GMT+8') + data = pickle.dumps(dt, 1) + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_old(self): + dt = DateTime('2002/5/2 8:00am GMT+0') + data = ('(cDateTime.DateTime\nDateTime\nq\x01Noq\x02}q\x03(U\x05' + '_amonq\x04U\x03Mayq\x05U\x05_adayq\x06U\x03Thuq\x07U\x05_pmonq' + '\x08h\x05U\x05_hourq\tK\x08U\x05_fmonq\nh\x05U\x05_pdayq\x0bU' + '\x04Thu.q\x0cU\x05_fdayq\rU\x08Thursdayq\x0eU\x03_pmq\x0fU\x02amq' + '\x10U\x02_tq\x11GA\xcehy\x00\x00\x00\x00U\x07_minuteq\x12K\x00U' + '\x07_microsq\x13L1020326400000000L\nU\x02_dq\x14G@\xe2\x12j\xaa' + '\xaa\xaa\xabU\x07_secondq\x15G\x00\x00\x00\x00\x00\x00\x00\x00U' + '\x03_tzq\x16U\x05GMT+0q\x17U\x06_monthq\x18K\x05U' + '\x0f_timezone_naiveq\x19I00\nU\x04_dayq\x1aK\x02U\x05_yearq' + '\x1bM\xd2\x07U\x08_nearsecq\x1cG\x00\x00\x00\x00\x00\x00\x00' + '\x00U\x07_pmhourq\x1dK\x08U\n_dayoffsetq\x1eK\x04U\x04timeq' + '\x1fG?\xd5UUUV\x00\x00ub.') + if PY3K: + data = data.encode('latin-1') + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def test_pickle_old_without_micros(self): + dt = DateTime('2002/5/2 8:00am GMT+0') + data = ('(cDateTime.DateTime\nDateTime\nq\x01Noq\x02}q\x03(U\x05' + '_amonq\x04U\x03Mayq\x05U\x05_adayq\x06U\x03Thuq\x07U\x05_pmonq' + '\x08h\x05U\x05_hourq\tK\x08U\x05_fmonq\nh\x05U\x05_pdayq\x0bU' + '\x04Thu.q\x0cU\x05_fdayq\rU\x08Thursdayq\x0eU\x03_pmq\x0fU' + '\x02amq\x10U\x02_tq\x11GA\xcehy\x00\x00\x00\x00U\x07_minuteq' + '\x12K\x00U\x02_dq\x13G@\xe2\x12j\xaa\xaa\xaa\xabU\x07_secondq' + '\x14G\x00\x00\x00\x00\x00\x00\x00\x00U\x03_tzq\x15U\x05GMT+0q' + '\x16U\x06_monthq\x17K\x05U\x0f_timezone_naiveq\x18I00\nU' + '\x04_dayq\x19K\x02U\x05_yearq\x1aM\xd2\x07U\x08_nearsecq' + '\x1bG\x00\x00\x00\x00\x00\x00\x00\x00U\x07_pmhourq\x1cK\x08U' + '\n_dayoffsetq\x1dK\x04U\x04timeq\x1eG?\xd5UUUV\x00\x00ub.') + if PY3K: + data = data.encode('latin-1') + new = pickle.loads(data) + for key in DateTime.__slots__: + self.assertEqual(getattr(dt, key), getattr(new, key)) + + def testTZ2(self): + # Time zone manipulation test 2 + dt = DateTime() + dt1 = dt.toZone('GMT') + s = dt.second() + s1 = dt1.second() + self.assertEqual(s, s1, (dt, dt1, s, s1)) + + def testTZDiffDaylight(self): + # Diff dates across daylight savings dates + dt = DateTime('2000/6/8 1:45am US/Eastern') + dt1 = DateTime('2000/12/8 12:45am US/Eastern') + self.assertEqual(dt1 - dt, 183, (dt, dt1, dt1 - dt)) + + def testY10KDate(self): + # Comparison of a Y10K date and a Y2K date + dt = DateTime('10213/09/21') + dt1 = DateTime(2000, 1, 1) + + dsec = (dt.millis() - dt1.millis()) / 1000.0 + ddays = math.floor((dsec / 86400.0) + 0.5) + + self.assertEqual(ddays, 3000000, ddays) + + def test_tzoffset(self): + # Test time-zone given as an offset + + # GMT + dt = DateTime('Tue, 10 Sep 2001 09:41:03 GMT') + self.assertEqual(dt.tzoffset(), 0) + + # Timezone by name, a timezone that hasn't got daylightsaving. + dt = DateTime('Tue, 2 Mar 2001 09:41:03 GMT+3') + self.assertEqual(dt.tzoffset(), 10800) + + # Timezone by name, has daylightsaving but is not in effect. + dt = DateTime('Tue, 21 Jan 2001 09:41:03 PST') + self.assertEqual(dt.tzoffset(), -28800) + + # Timezone by name, with daylightsaving in effect + dt = DateTime('Tue, 24 Aug 2001 09:41:03 PST') + self.assertEqual(dt.tzoffset(), -25200) + + # A negative numerical timezone + dt = DateTime('Tue, 24 Jul 2001 09:41:03 -0400') + self.assertEqual(dt.tzoffset(), -14400) + + # A positive numerical timzone + dt = DateTime('Tue, 6 Dec 1966 01:41:03 +0200') + self.assertEqual(dt.tzoffset(), 7200) + + # A negative numerical timezone with minutes. + dt = DateTime('Tue, 24 Jul 2001 09:41:03 -0637') + self.assertEqual(dt.tzoffset(), -23820) + + # A positive numerical timezone with minutes. + dt = DateTime('Tue, 24 Jul 2001 09:41:03 +0425') + self.assertEqual(dt.tzoffset(), 15900) + + def testISO8601(self): + # ISO8601 reference dates + ref0 = DateTime('2002/5/2 8:00am GMT') + ref1 = DateTime('2002/5/2 8:00am US/Eastern') + ref2 = DateTime('2006/11/6 10:30 GMT') + ref3 = DateTime('2004/06/14 14:30:15 GMT-3') + ref4 = DateTime('2006/01/01 GMT') + + # Basic tests + # Though this is timezone naive and according to specification should + # be interpreted in the local timezone, to preserve backwards + # compatibility with previously expected behaviour. + isoDt = DateTime('2002-05-02T08:00:00') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08:00:00Z') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08:00:00+00:00') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08:00:00-04:00') + self.assertTrue(ref1.equalTo(isoDt)) + isoDt = DateTime('2002-05-02 08:00:00-04:00') + self.assertTrue(ref1.equalTo(isoDt)) + + # Bug 1386: the colon in the timezone offset is optional + isoDt = DateTime('2002-05-02T08:00:00-0400') + self.assertTrue(ref1.equalTo(isoDt)) + + # Bug 2191: date reduced formats + isoDt = DateTime('2006-01-01') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('200601-01') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('20060101') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('2006-01') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('200601') + self.assertTrue(ref4.equalTo(isoDt)) + isoDt = DateTime('2006') + self.assertTrue(ref4.equalTo(isoDt)) + + # Bug 2191: date/time separators are also optional + isoDt = DateTime('20020502T08:00:00') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T080000') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('20020502T080000') + self.assertTrue(ref0.equalTo(isoDt)) + + # Bug 2191: timezones with only one digit for hour + isoDt = DateTime('20020502T080000+0') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('20020502 080000-4') + self.assertTrue(ref1.equalTo(isoDt)) + isoDt = DateTime('20020502T080000-400') + self.assertTrue(ref1.equalTo(isoDt)) + isoDt = DateTime('20020502T080000-4:00') + self.assertTrue(ref1.equalTo(isoDt)) + + # Bug 2191: optional seconds/minutes + isoDt = DateTime('2002-05-02T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-05-02T08') + self.assertTrue(ref0.equalTo(isoDt)) + + # Bug 2191: week format + isoDt = DateTime('2002-W18-4T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002-W184T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002W18-4T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002W184T08') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2004-W25-1T14:30:15-03:00') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('2004-W25T14:30:15-03:00') + self.assertTrue(ref3.equalTo(isoDt)) + + # Bug 2191: day of year format + isoDt = DateTime('2002-122T0800') + self.assertTrue(ref0.equalTo(isoDt)) + isoDt = DateTime('2002122T0800') + self.assertTrue(ref0.equalTo(isoDt)) + + # Bug 2191: hours/minutes fractions + isoDt = DateTime('2006-11-06T10.5') + self.assertTrue(ref2.equalTo(isoDt)) + isoDt = DateTime('2006-11-06T10,5') + self.assertTrue(ref2.equalTo(isoDt)) + isoDt = DateTime('20040614T1430.25-3') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('2004-06-14T1430,25-3') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('2004-06-14T14:30.25-3') + self.assertTrue(ref3.equalTo(isoDt)) + isoDt = DateTime('20040614T14:30,25-3') + self.assertTrue(ref3.equalTo(isoDt)) + + # ISO8601 standard format + iso8601_string = '2002-05-02T08:00:00-04:00' + iso8601DT = DateTime(iso8601_string) + self.assertEqual(iso8601_string, iso8601DT.ISO8601()) + + # ISO format with no timezone + isoDt = DateTime('2006-01-01 00:00:00') + self.assertTrue(ref4.equalTo(isoDt)) + + def testJulianWeek(self): + # Check JulianDayWeek function + fn = os.path.join(DATADIR, 'julian_testdata.txt') + with open(fn, 'r') as fd: + lines = fd.readlines() + for line in lines: + d = DateTime(line[:10]) + result_from_mx = tuple(map(int, line[12:-2].split(','))) + self.assertEqual(result_from_mx[1], d.week()) + + def testCopyConstructor(self): + d = DateTime('2004/04/04') + self.assertEqual(DateTime(d), d) + self.assertEqual(str(DateTime(d)), str(d)) + d2 = DateTime('1999/04/12 01:00:00') + self.assertEqual(DateTime(d2), d2) + self.assertEqual(str(DateTime(d2)), str(d2)) + + def testCopyConstructorPreservesTimezone(self): + # test for https://bugs.launchpad.net/zope2/+bug/200007 + # This always worked in the local timezone, so we need at least + # two tests with different zones to be sure at least one of them + # is not local. + d = DateTime('2004/04/04') + self.assertEqual(DateTime(d).timezone(), d.timezone()) + d2 = DateTime('2008/04/25 12:00:00 EST') + self.assertEqual(DateTime(d2).timezone(), d2.timezone()) + self.assertEqual(str(DateTime(d2)), str(d2)) + d3 = DateTime('2008/04/25 12:00:00 PST') + self.assertEqual(DateTime(d3).timezone(), d3.timezone()) + self.assertEqual(str(DateTime(d3)), str(d3)) + + def testRFC822(self): + # rfc822 conversion + dt = DateTime('2002-05-02T08:00:00+00:00') + self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 +0000') + + dt = DateTime('2002-05-02T08:00:00+02:00') + self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 +0200') + + dt = DateTime('2002-05-02T08:00:00-02:00') + self.assertEqual(dt.rfc822(), 'Thu, 02 May 2002 08:00:00 -0200') + + # Checking that conversion from local time is working. + dt = DateTime() + dts = dt.rfc822().split(' ') + times = dts[4].split(':') + _isDST = time.localtime(time.time())[8] + if _isDST: + offset = time.altzone + else: + offset = time.timezone + self.assertEqual(dts[0], dt.aDay() + ',') + self.assertEqual(int(dts[1]), dt.day()) + self.assertEqual(dts[2], dt.aMonth()) + self.assertEqual(int(dts[3]), dt.year()) + self.assertEqual(int(times[0]), dt.h_24()) + self.assertEqual(int(times[1]), dt.minute()) + self.assertEqual(int(times[2]), int(dt.second())) + self.assertEqual(dts[5], "%+03d%02d" % divmod((-offset / 60), 60)) + + def testInternationalDateformat(self): + for year in (1990, 2001, 2020): + for month in (1, 12): + for day in (1, 12, 28, 31): + try: + d_us = DateTime("%d/%d/%d" % (year, month, day)) + except Exception: + continue + + d_int = DateTime("%d.%d.%d" % (day, month, year), + datefmt="international") + self.assertEqual(d_us, d_int) + + d_int = DateTime("%d/%d/%d" % (day, month, year), + datefmt="international") + self.assertEqual(d_us, d_int) + + def test_intl_format_hyphen(self): + d_jan = DateTime('2011-01-11 GMT') + d_nov = DateTime('2011-11-01 GMT') + d_us = DateTime('11-01-2011 GMT') + d_int = DateTime('11-01-2011 GMT', datefmt="international") + self.assertNotEqual(d_us, d_int) + self.assertEqual(d_us, d_nov) + self.assertEqual(d_int, d_jan) + + def test_calcTimezoneName(self): + from DateTime.interfaces import TimeError + timezone_dependent_epoch = 2177452800 + try: + DateTime()._calcTimezoneName(timezone_dependent_epoch, 0) + except TimeError: + self.fail('Zope Collector issue #484 (negative time bug): ' + 'TimeError raised') + + def testStrftimeTZhandling(self): + # strftime timezone testing + # This is a test for collector issue #1127 + format = '%Y-%m-%d %H:%M %Z' + dt = DateTime('Wed, 19 Nov 2003 18:32:07 -0215') + dt_string = dt.strftime(format) + dt_local = dt.toZone(_findLocalTimeZoneName(0)) + dt_localstring = dt_local.strftime(format) + self.assertEqual(dt_string, dt_localstring) + + def testStrftimeFarDates(self): + # Checks strftime in dates <= 1900 or >= 2038 + dt = DateTime('1900/01/30') + self.assertEqual(dt.strftime('%d/%m/%Y'), '30/01/1900') + dt = DateTime('2040/01/30') + self.assertEqual(dt.strftime('%d/%m/%Y'), '30/01/2040') + + def testZoneInFarDates(self): + # Checks time zone in dates <= 1900 or >= 2038 + dt1 = DateTime('2040/01/30 14:33 GMT+1') + dt2 = DateTime('2040/01/30 11:33 GMT-2') + self.assertEqual(dt1.strftime('%d/%m/%Y %H:%M'), + dt2.strftime('%d/%m/%Y %H:%M')) + + def testStrftimeUnicode(self): + if IS_PYPY: + # Using Non-Ascii characters for strftime doesn't work in PyPy + # https://bitbucket.org/pypy/pypy/issues/2161/pypy3-strftime-does-not-accept-unicode + return + dt = DateTime('2002-05-02T08:00:00+00:00') + uchar = b'\xc3\xa0'.decode('utf-8') + ok = dt.strftime('Le %d/%m/%Y a %Hh%M').replace('a', uchar) + ustr = b'Le %d/%m/%Y \xc3\xa0 %Hh%M'.decode('utf-8') + self.assertEqual(dt.strftime(ustr), ok) + + def testTimezoneNaiveHandling(self): + # checks that we assign timezone naivity correctly + dt = DateTime('2007-10-04T08:00:00+00:00') + self.assertFalse(dt.timezoneNaive(), + 'error with naivity handling in __parse_iso8601') + dt = DateTime('2007-10-04T08:00:00Z') + self.assertFalse(dt.timezoneNaive(), + 'error with naivity handling in __parse_iso8601') + dt = DateTime('2007-10-04T08:00:00') + self.assertTrue(dt.timezoneNaive(), + 'error with naivity handling in __parse_iso8601') + dt = DateTime('2007/10/04 15:12:33.487618 GMT+1') + self.assertFalse(dt.timezoneNaive(), + 'error with naivity handling in _parse') + dt = DateTime('2007/10/04 15:12:33.487618') + self.assertTrue(dt.timezoneNaive(), + 'error with naivity handling in _parse') + dt = DateTime() + self.assertFalse(dt.timezoneNaive(), + 'error with naivity for current time') + s = '2007-10-04T08:00:00' + dt = DateTime(s) + self.assertEqual(s, dt.ISO8601()) + s = '2007-10-04T08:00:00+00:00' + dt = DateTime(s) + self.assertEqual(s, dt.ISO8601()) + + def testConversions(self): + sdt0 = datetime.now() # this is a timezone naive datetime + dt0 = DateTime(sdt0) + self.assertTrue(dt0.timezoneNaive(), (sdt0, dt0)) + sdt1 = datetime(2007, 10, 4, 18, 14, 42, 580, pytz.utc) + dt1 = DateTime(sdt1) + self.assertFalse(dt1.timezoneNaive(), (sdt1, dt1)) + + # convert back + sdt2 = dt0.asdatetime() + self.assertEqual(sdt0, sdt2) + sdt3 = dt1.utcdatetime() # this returns a timezone naive datetime + self.assertEqual(sdt1.hour, sdt3.hour) + + dt4 = DateTime('2007-10-04T10:00:00+05:00') + sdt4 = datetime(2007, 10, 4, 5, 0) + self.assertEqual(dt4.utcdatetime(), sdt4) + self.assertEqual(dt4.asdatetime(), sdt4.replace(tzinfo=pytz.utc)) + + dt5 = DateTime('2007-10-23 10:00:00 US/Eastern') + tz = pytz.timezone('US/Eastern') + sdt5 = datetime(2007, 10, 23, 10, 0, tzinfo=tz) + dt6 = DateTime(sdt5) + self.assertEqual(dt5.asdatetime(), sdt5) + self.assertEqual(dt6.asdatetime(), sdt5) + self.assertEqual(dt5, dt6) + self.assertEqual(dt5.asdatetime().tzinfo, tz) + self.assertEqual(dt6.asdatetime().tzinfo, tz) + + def testBasicTZ(self): + # psycopg2 supplies it's own tzinfo instances, with no `zone` attribute + tz = FixedOffset(60, 'GMT+1') + dt1 = datetime(2008, 8, 5, 12, 0, tzinfo=tz) + DT = DateTime(dt1) + dt2 = DT.asdatetime() + offset1 = dt1.tzinfo.utcoffset(dt1) + offset2 = dt2.tzinfo.utcoffset(dt2) + self.assertEqual(offset1, offset2) + + def testEDTTimezone(self): + # should be able to parse EDT timezones: see lp:599856. + dt = DateTime("Mon, 28 Jun 2010 10:12:25 EDT") + self.assertEqual(dt.Day(), 'Monday') + self.assertEqual(dt.day(), 28) + self.assertEqual(dt.Month(), 'June') + self.assertEqual(dt.timezone(), 'GMT-4') + + def testParseISO8601(self): + parsed = DateTime()._parse_iso8601('2010-10-10') + self.assertEqual(parsed, (2010, 10, 10, 0, 0, 0, 'GMT+0000')) + + def test_interface(self): + from DateTime.interfaces import IDateTime + self.assertTrue(IDateTime.providedBy(DateTime())) + + def test_security(self): + dt = DateTime() + self.assertEqual(dt.__roles__, None) + self.assertEqual(dt.__allow_access_to_unprotected_subobjects__, 1) + + +def test_suite(): + import doctest + return unittest.TestSuite([ + unittest.makeSuite(DateTimeTests), + doctest.DocFileSuite('DateTime.txt', package='DateTime'), + doctest.DocFileSuite('pytz.txt', package='DateTime'), + ]) diff --git a/lib/backports/configparser/__init__.py b/lib/backports/configparser/__init__.py new file mode 100644 index 00000000..06d7a085 --- /dev/null +++ b/lib/backports/configparser/__init__.py @@ -0,0 +1,1390 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +"""Configuration file parser. + +A configuration file consists of sections, lead by a "[section]" header, +and followed by "name: value" entries, with continuations and such in +the style of RFC 822. + +Intrinsic defaults can be specified by passing them into the +ConfigParser constructor as a dictionary. + +class: + +ConfigParser -- responsible for parsing a list of + configuration files, and managing the parsed database. + + methods: + + __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, + delimiters=('=', ':'), comment_prefixes=('#', ';'), + inline_comment_prefixes=None, strict=True, + empty_lines_in_values=True, default_section='DEFAULT', + interpolation=, converters=): + Create the parser. When `defaults' is given, it is initialized into the + dictionary or intrinsic defaults. The keys must be strings, the values + must be appropriate for %()s string interpolation. + + When `dict_type' is given, it will be used to create the dictionary + objects for the list of sections, for the options within a section, and + for the default values. + + When `delimiters' is given, it will be used as the set of substrings + that divide keys from values. + + When `comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in empty lines. Comments can be + indented. + + When `inline_comment_prefixes' is given, it will be used as the set of + substrings that prefix comments in non-empty lines. + + When `strict` is True, the parser won't allow for any section or option + duplicates while reading from a single source (file, string or + dictionary). Default is True. + + When `empty_lines_in_values' is False (default: True), each empty line + marks the end of an option. Otherwise, internal empty lines of + a multiline option are kept as part of the value. + + When `allow_no_value' is True (default: False), options without + values are accepted; the value presented for these is None. + + sections() + Return all the configuration section names, sans DEFAULT. + + has_section(section) + Return whether the given section exists. + + has_option(section, option) + Return whether the given option exists in the given section. + + options(section) + Return list of configuration options for the named section. + + read(filenames, encoding=None) + Read and parse the list of named configuration files, given by + name. A single filename is also allowed. Non-existing files + are ignored. Return list of successfully read files. + + read_file(f, filename=None) + Read and parse one configuration file, given as a file object. + The filename defaults to f.name; it is only used in error + messages (if f has no `name' attribute, the string `' is used). + + read_string(string) + Read configuration from a given string. + + read_dict(dictionary) + Read configuration from a dictionary. Keys are section names, + values are dictionaries with keys and values that should be present + in the section. If the used dictionary type preserves order, sections + and their keys will be added in order. Values are automatically + converted to strings. + + get(section, option, raw=False, vars=None, fallback=_UNSET) + Return a string value for the named option. All % interpolations are + expanded in the return values, based on the defaults passed into the + constructor and the DEFAULT section. Additional substitutions may be + provided using the `vars' argument, which must be a dictionary whose + contents override any pre-existing defaults. If `option' is a key in + `vars', the value from `vars' is used. + + getint(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to an integer. + + getfloat(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a float. + + getboolean(section, options, raw=False, vars=None, fallback=_UNSET) + Like get(), but convert value to a boolean (currently case + insensitively defined as 0, false, no, off for False, and 1, true, + yes, on for True). Returns False or True. + + items(section=_UNSET, raw=False, vars=None) + If section is given, return a list of tuples with (name, value) for + each option in the section. Otherwise, return a list of tuples with + (section_name, section_proxy) for each section, including DEFAULTSECT. + + remove_section(section) + Remove the given file section and all its options. + + remove_option(section, option) + Remove the given option from the given section. + + set(section, option, value) + Set the given option. + + write(fp, space_around_delimiters=True) + Write the configuration state in .ini format. If + `space_around_delimiters' is True (the default), delimiters + between keys and values are surrounded by spaces. +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +from collections import MutableMapping +import functools +import io +import itertools +import re +import sys +import warnings + +from backports.configparser.helpers import OrderedDict as _default_dict +from backports.configparser.helpers import ChainMap as _ChainMap +from backports.configparser.helpers import from_none, open, str, PY2 + +__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError", + "NoOptionError", "InterpolationError", "InterpolationDepthError", + "InterpolationMissingOptionError", "InterpolationSyntaxError", + "ParsingError", "MissingSectionHeaderError", + "ConfigParser", "SafeConfigParser", "RawConfigParser", + "Interpolation", "BasicInterpolation", "ExtendedInterpolation", + "LegacyInterpolation", "SectionProxy", "ConverterMapping", + "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"] + +DEFAULTSECT = "DEFAULT" + +MAX_INTERPOLATION_DEPTH = 10 + + +# exception classes +class Error(Exception): + """Base class for ConfigParser exceptions.""" + + def __init__(self, msg=''): + self.message = msg + Exception.__init__(self, msg) + + def __repr__(self): + return self.message + + __str__ = __repr__ + + +class NoSectionError(Error): + """Raised when no section matches a requested option.""" + + def __init__(self, section): + Error.__init__(self, 'No section: %r' % (section,)) + self.section = section + self.args = (section, ) + + +class DuplicateSectionError(Error): + """Raised when a section is repeated in an input source. + + Possible repetitions that raise this exception are: multiple creation + using the API or in strict parsers when a section is found more than once + in a single input file, string or dictionary. + """ + + def __init__(self, section, source=None, lineno=None): + msg = [repr(section), " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": section ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Section ") + Error.__init__(self, "".join(msg)) + self.section = section + self.source = source + self.lineno = lineno + self.args = (section, source, lineno) + + +class DuplicateOptionError(Error): + """Raised by strict parsers when an option is repeated in an input source. + + Current implementation raises this exception only when an option is found + more than once in a single file, string or dictionary. + """ + + def __init__(self, section, option, source=None, lineno=None): + msg = [repr(option), " in section ", repr(section), + " already exists"] + if source is not None: + message = ["While reading from ", repr(source)] + if lineno is not None: + message.append(" [line {0:2d}]".format(lineno)) + message.append(": option ") + message.extend(msg) + msg = message + else: + msg.insert(0, "Option ") + Error.__init__(self, "".join(msg)) + self.section = section + self.option = option + self.source = source + self.lineno = lineno + self.args = (section, option, source, lineno) + + +class NoOptionError(Error): + """A requested option was not found.""" + + def __init__(self, option, section): + Error.__init__(self, "No option %r in section: %r" % + (option, section)) + self.option = option + self.section = section + self.args = (option, section) + + +class InterpolationError(Error): + """Base class for interpolation-related exceptions.""" + + def __init__(self, option, section, msg): + Error.__init__(self, msg) + self.option = option + self.section = section + self.args = (option, section, msg) + + +class InterpolationMissingOptionError(InterpolationError): + """A string substitution required a setting which was not available.""" + + def __init__(self, option, section, rawval, reference): + msg = ("Bad value substitution: option {0!r} in section {1!r} contains " + "an interpolation key {2!r} which is not a valid option name. " + "Raw value: {3!r}".format(option, section, reference, rawval)) + InterpolationError.__init__(self, option, section, msg) + self.reference = reference + self.args = (option, section, rawval, reference) + + +class InterpolationSyntaxError(InterpolationError): + """Raised when the source text contains invalid syntax. + + Current implementation raises this exception when the source text into + which substitutions are made does not conform to the required syntax. + """ + + +class InterpolationDepthError(InterpolationError): + """Raised when substitutions are nested too deeply.""" + + def __init__(self, option, section, rawval): + msg = ("Recursion limit exceeded in value substitution: option {0!r} " + "in section {1!r} contains an interpolation key which " + "cannot be substituted in {2} steps. Raw value: {3!r}" + "".format(option, section, MAX_INTERPOLATION_DEPTH, + rawval)) + InterpolationError.__init__(self, option, section, msg) + self.args = (option, section, rawval) + + +class ParsingError(Error): + """Raised when a configuration file does not follow legal syntax.""" + + def __init__(self, source=None, filename=None): + # Exactly one of `source'/`filename' arguments has to be given. + # `filename' kept for compatibility. + if filename and source: + raise ValueError("Cannot specify both `filename' and `source'. " + "Use `source'.") + elif not filename and not source: + raise ValueError("Required argument `source' not given.") + elif filename: + source = filename + Error.__init__(self, 'Source contains parsing errors: %r' % source) + self.source = source + self.errors = [] + self.args = (source, ) + + @property + def filename(self): + """Deprecated, use `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + return self.source + + @filename.setter + def filename(self, value): + """Deprecated, user `source'.""" + warnings.warn( + "The 'filename' attribute will be removed in future versions. " + "Use 'source' instead.", + DeprecationWarning, stacklevel=2 + ) + self.source = value + + def append(self, lineno, line): + self.errors.append((lineno, line)) + self.message += '\n\t[line %2d]: %s' % (lineno, line) + + +class MissingSectionHeaderError(ParsingError): + """Raised when a key-value pair is found before any section header.""" + + def __init__(self, filename, lineno, line): + Error.__init__( + self, + 'File contains no section headers.\nfile: %r, line: %d\n%r' % + (filename, lineno, line)) + self.source = filename + self.lineno = lineno + self.line = line + self.args = (filename, lineno, line) + + +# Used in parser getters to indicate the default behaviour when a specific +# option is not found it to raise an exception. Created to enable `None' as +# a valid fallback value. +_UNSET = object() + + +class Interpolation(object): + """Dummy interpolation that passes the value through with no changes.""" + + def before_get(self, parser, section, option, value, defaults): + return value + + def before_set(self, parser, section, option, value): + return value + + def before_read(self, parser, section, option, value): + return value + + def before_write(self, parser, section, option, value): + return value + + +class BasicInterpolation(Interpolation): + """Interpolation as implemented in the classic ConfigParser. + + The option values can contain format strings which refer to other values in + the same section, or values in the special default section. + + For example: + + something: %(dir)s/whatever + + would resolve the "%(dir)s" to the value of dir. All reference + expansions are done late, on demand. If a user needs to use a bare % in + a configuration file, she can escape it by writing %%. Other % usage + is considered a user error and raises `InterpolationSyntaxError'.""" + + _KEYCRE = re.compile(r"%\(([^)]+)\)s") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('%%', '') # escaped percent signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '%' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('%'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("%") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "%": + accum.append("%") + rest = rest[2:] + elif c == "(": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + var = parser.optionxform(m.group(1)) + rest = rest[m.end():] + try: + v = map[var] + except KeyError: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, var)) + if "%" in v: + self._interpolate_some(parser, option, accum, v, + section, map, depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'%%' must be followed by '%%' or '(', " + "found: %r" % (rest,)) + + +class ExtendedInterpolation(Interpolation): + """Advanced variant of interpolation, supports the syntax used by + `zc.buildout'. Enables interpolation between sections.""" + + _KEYCRE = re.compile(r"\$\{([^}]+)\}") + + def before_get(self, parser, section, option, value, defaults): + L = [] + self._interpolate_some(parser, option, L, value, section, defaults, 1) + return ''.join(L) + + def before_set(self, parser, section, option, value): + tmp_value = value.replace('$$', '') # escaped dollar signs + tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax + if '$' in tmp_value: + raise ValueError("invalid interpolation syntax in %r at " + "position %d" % (value, tmp_value.find('$'))) + return value + + def _interpolate_some(self, parser, option, accum, rest, section, map, + depth): + rawval = parser.get(section, option, raw=True, fallback=rest) + if depth > MAX_INTERPOLATION_DEPTH: + raise InterpolationDepthError(option, section, rawval) + while rest: + p = rest.find("$") + if p < 0: + accum.append(rest) + return + if p > 0: + accum.append(rest[:p]) + rest = rest[p:] + # p is no longer used + c = rest[1:2] + if c == "$": + accum.append("$") + rest = rest[2:] + elif c == "{": + m = self._KEYCRE.match(rest) + if m is None: + raise InterpolationSyntaxError(option, section, + "bad interpolation variable reference %r" % rest) + path = m.group(1).split(':') + rest = rest[m.end():] + sect = section + opt = option + try: + if len(path) == 1: + opt = parser.optionxform(path[0]) + v = map[opt] + elif len(path) == 2: + sect = path[0] + opt = parser.optionxform(path[1]) + v = parser.get(sect, opt, raw=True) + else: + raise InterpolationSyntaxError( + option, section, + "More than one ':' found: %r" % (rest,)) + except (KeyError, NoSectionError, NoOptionError): + raise from_none(InterpolationMissingOptionError( + option, section, rawval, ":".join(path))) + if "$" in v: + self._interpolate_some(parser, opt, accum, v, sect, + dict(parser.items(sect, raw=True)), + depth + 1) + else: + accum.append(v) + else: + raise InterpolationSyntaxError( + option, section, + "'$' must be followed by '$' or '{', " + "found: %r" % (rest,)) + + +class LegacyInterpolation(Interpolation): + """Deprecated interpolation used in old versions of ConfigParser. + Use BasicInterpolation or ExtendedInterpolation instead.""" + + _KEYCRE = re.compile(r"%\(([^)]*)\)s|.") + + def before_get(self, parser, section, option, value, vars): + rawval = value + depth = MAX_INTERPOLATION_DEPTH + while depth: # Loop through this until it's done + depth -= 1 + if value and "%(" in value: + replace = functools.partial(self._interpolation_replace, + parser=parser) + value = self._KEYCRE.sub(replace, value) + try: + value = value % vars + except KeyError as e: + raise from_none(InterpolationMissingOptionError( + option, section, rawval, e.args[0])) + else: + break + if value and "%(" in value: + raise InterpolationDepthError(option, section, rawval) + return value + + def before_set(self, parser, section, option, value): + return value + + @staticmethod + def _interpolation_replace(match, parser): + s = match.group(1) + if s is None: + return match.group() + else: + return "%%(%s)s" % parser.optionxform(s) + + +class RawConfigParser(MutableMapping): + """ConfigParser that does not do interpolation.""" + + # Regular expressions for parsing section headers and options + _SECT_TMPL = r""" + \[ # [ + (?P
[^]]+) # very permissive! + \] # ] + """ + _OPT_TMPL = r""" + (?P