-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathdeploy.py
206 lines (180 loc) · 7.75 KB
/
deploy.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Move files to production
import argparse
import string
import os, sys
import doctest
import httplib2
from FtpWrapper import FtpWrapper
import freeze
from application.recentfeed import RecentFeed
from application import app, bills
from datetime import date
import json
current_session = app.session
app.url_root = 'http://extras.denverpost.com/app/bill-tracker/'
def get_news(slug, url, days=7):
""" Download and cache items from the RSS feeds we track.
"""
if not os.path.isdir('_input/news'):
os.mkdir('_input/news')
rf = RecentFeed()
rf.get(url)
rf.parse()
rf.days = days
items = []
for item in rf.recently():
items.append(dict(published=item['published'],
title=item['title'],
summary=item['summary'],
link=item['link'],
links=item['links']))
today = date.today()
filename = '_input/news/%s_%s_%d.json' % (slug, today.__str__(), days)
fh = open(filename, 'wb')
json.dump(items, fh)
return True
def main(args):
""" Turn every URL into flatfile, ftp it to prod.
>>> args = build_parser(['--verbose'])
>>> main(args)
False
"""
if args.do_freeze:
freeze.freezer.freeze()
if args.get_news:
get_news('articles', 'http://www.denverpost.com/politics/colorado-legislature/feed/')
get_news('articles', 'http://www.denverpost.com/politics/colorado-legislature/feed/', 1)
# Get the list of days we have The Day and The Week reports for
days = bills.get_session_days(app.session, True)
filename = '_input/days_%s.json' % (app.session)
fh = open(filename, 'wb')
json.dump(days, fh)
weeks = bills.get_session_weeks(app.session, True)
filename = '_input/weeks_%s.json' % (app.session)
fh = open(filename, 'wb')
json.dump(weeks, fh)
if not args.do_ftp:
return False
urls_updated = []
basedir = 'application/build/'
os.chdir(basedir)
ftp_path = '/DenverPost/app/bill-tracker/'
ftp_config = {
'user': os.environ.get('FTP_USER'),
'host': os.environ.get('FTP_HOST'),
'port': os.environ.get('FTP_PORT'),
'upload_dir': ftp_path
}
if args.verbose:
print ftp_config
ftp = FtpWrapper(**ftp_config)
# Always FTP the homepage.
ftp.send_file('index.html', '.')
if args.bill:
session = app.session
if args.session:
session = args.session
#ftp.mkdir(os.path.join(dirname, subdirname))
for dirname, dirnames, filenames in os.walk('.'):
# Sometimes we only want to upload files for a particular session.
# The dirname, dirnames in this loop looks like:
# . ['bills'] <-- on the top level, "." is dirname and the list is the dirnames.
# ./bills ['2011a', '2012a', '2012b', '2013a', '2014a', '2015a', '2016a'] <-- the next level down
# ./bills/2011a ['hb_11-1001', 'hb_11-1002'... <-- the next down from that
if args.session:
if 'bills/' in dirname:
# We don't care about the a/b part of the session -- the final character.
if args.session[:-1] not in dirname:
continue
if args.no_session:
if '201' in dirname:
continue
else:
print dirname, dirnames
for subdirname in dirnames:
if args.verbose:
print dirname, subdirname
if args.theweek and 'the-week' not in dirname and 'the-day' not in dirname:
continue
if args.committee and 'committee' not in dirname:
continue
if args.legislator and 'legislator' not in dirname:
continue
# Skip the endless directory creation on previous years.
if args.session and current_session not in dirname:
if args.verbose:
print "SKIPPING mkdir on %s" % subdirname
continue
ftp.mkdir(os.path.join(dirname, subdirname))
for filename in filenames:
if 'jpg' in filename:
continue
if args.theweek and 'the-week' not in dirname and 'the-day' not in dirname:
continue
if args.committee and 'committee' not in dirname:
continue
if args.legislator and 'legislator' not in dirname:
continue
# Skip atom file upload on previous years
if args.session and app.session not in dirname and 'atom' in filename:
continue
if args.verbose:
print(os.path.join(dirname, filename))
try:
ftp.send_file(os.path.join(dirname, filename), dirname)
except:
print "ERROR: Could not upload",
print(os.path.join(dirname, filename))
# Bust the cache on extras
h = httplib2.Http('')
url = '%s/' % dirname
if filename != 'index.html':
url += filename
url = string.replace(url, '//', '/')
url = string.replace(url, '.', 'http://extras.denverpost.com/app/bill-tracker', 1)
if args.verbose:
print "PURGE:", url
try:
response, content = h.request('%s/' % url, 'PURGE', headers={}, body='')
urls_updated.append(url)
except:
print "ERROR: Could not bust cache on %s" % url
ftp.disconnect()
print "Updated: %s" % "\n".join(urls_updated)
return True
def build_parser(args):
""" This method allows us to test the args.
>>> args = build_parser(['--verbose'])
>>> print args.verbose
True
"""
parser = argparse.ArgumentParser(usage='$ python deploy.py',
description='Deploy billtracker to production.',
epilog='Examply use: python deploy.py --ftp --freeze --session 2016a')
parser.add_argument("-v", "--verbose", dest="verbose", default=False, action="store_true")
parser.add_argument("--freeze", dest="do_freeze", default=False, action="store_true",
help="Take a snaphot of the site before uploading.")
parser.add_argument("--ftp", dest="do_ftp", default=False, action="store_true",
help="FTP the site to the production server.")
parser.add_argument("--nosession", dest="no_session", default=False, action="store_true",
help="Only upload top-level indexes & homepage.")
parser.add_argument("--theweek", dest="theweek", default=False, action="store_true",
help="Only upload the week & day in review-section files.")
parser.add_argument("--committee", dest="committee", default=False, action="store_true",
help="Upload the committee files and directories")
parser.add_argument("--legislator", dest="legislator", default=False, action="store_true",
help="Upload the legislator files and directories")
parser.add_argument("--news", dest="get_news", default=False, action="store_true",
help="Download and cache the recent legislative news.")
parser.add_argument("-b", "--bill", dest="bill", default=None,
help="Deploy one bill, one bill only. Also pass a session if you need a prior-session bill flushed.")
parser.add_argument("-s", "--session", dest="session", default=False)
args = parser.parse_args(args)
return args
if __name__ == '__main__':
args = build_parser(sys.argv[1:])
if args.verbose == True:
doctest.testmod(verbose=args.verbose)
main(args)