Skip to content
This repository has been archived by the owner on Feb 25, 2024. It is now read-only.

Sync buildog work with main buildbot #111

Merged
merged 42 commits into from
Mar 17, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
39bf232
Initial stage of rewrite
staticfloat Nov 27, 2018
baca1c9
Customize as `buildog`
staticfloat Nov 21, 2017
f6475e8
Expand some of our buildbots
staticfloat Dec 10, 2018
ab9cec2
Import `SKIPPED`
staticfloat Dec 10, 2018
4a2666a
Standardize assert build property name
staticfloat Dec 11, 2018
c1112d4
!remove me debugging!
staticfloat Dec 11, 2018
043ddee
grumble grumble reverse-snake-brain
staticfloat Dec 11, 2018
2bc1982
Clean up old cruft
staticfloat Dec 11, 2018
b01a0ce
Try running `make debug release` again.
staticfloat Dec 11, 2018
45573d0
Expand builders a little bit
staticfloat Dec 12, 2018
02311b1
Let's scale up on packet
staticfloat Dec 12, 2018
fa17b36
More workers!
staticfloat Dec 12, 2018
e25c1fd
Not scaleway, packet!
staticfloat Dec 12, 2018
8e051e7
Move `i686` builders to `nanosoldier3`
staticfloat Dec 12, 2018
500c2e3
Add `nanosoldier1`
staticfloat Dec 12, 2018
3f80163
Cascade assert and non-assert builds to only deploy nonassert builds
staticfloat Dec 18, 2018
c1bc068
Scale back maxrss
staticfloat Dec 18, 2018
d435273
More Fireflies! :)
staticfloat Dec 18, 2018
2c49fea
UI improvements
staticfloat Dec 18, 2018
7d8c681
Even better, just reuse what alredy exists
staticfloat Dec 18, 2018
7665479
Increase keepalive frequency
staticfloat Dec 18, 2018
24b47ca
Use BinaryBuilder artifacts when we can
staticfloat Dec 27, 2018
dcf6f83
Fix embarrassing typo in `mac` OS definition
staticfloat Dec 30, 2018
51af062
Persistent source cache!
staticfloat Dec 30, 2018
b22af97
Better scheduler naming
staticfloat Dec 30, 2018
57659c0
Fix broken trigger link
staticfloat Dec 30, 2018
dd8302c
Fix `is_branch()` invocation
staticfloat Dec 31, 2018
453e7e5
Upgrade to Python 3
staticfloat Mar 17, 2019
15376b7
We're backing out on our desire to cross-build Windows
staticfloat Mar 17, 2019
cf7f783
Remove comments that were even more incorrect than originally thought
staticfloat Mar 17, 2019
30da308
Let's start sending statuses
staticfloat Mar 17, 2019
08df6fa
Different token
staticfloat Mar 17, 2019
7def6f0
Forgot a line
staticfloat Mar 17, 2019
122d850
Try creating a separate status for each builder
staticfloat Mar 17, 2019
6dbcc32
walk back on that a bit
staticfloat Mar 17, 2019
103b074
Add testing reporting
staticfloat Mar 17, 2019
0a6dce2
I accidentally an `r`
staticfloat Mar 17, 2019
4a1de0c
Fix more typos
staticfloat Mar 17, 2019
a8f2626
Upgrade travis to python3 as well
staticfloat Mar 17, 2019
c54ec21
Merge coverage updates
staticfloat Mar 17, 2019
a971c08
Prepare for merging into `master`
staticfloat Mar 17, 2019
3edba5b
Properly namespace `assert` builds.
staticfloat Mar 17, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
language: python
language: python3
sudo: required
notifications:
email: false
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:2
FROM python:3

# Install necessary packages
RUN pip install buildbot requests
Expand Down
7 changes: 6 additions & 1 deletion master/auto_reload.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
reload_scheduler = schedulers.AnyBranchScheduler(name="Buildbot Auto reload", change_filter=util.ChangeFilter(project='JuliaCI/julia-buildbot', branch=['master']), builderNames=["auto_reload"], treeStableTimer=1)
reload_scheduler = schedulers.AnyBranchScheduler(
name="Buildbot Auto reload",
change_filter=util.ChangeFilter(project='JuliaCI/julia-buildbot', branch=[BUILDBOT_BRANCH]),
builderNames=["auto_reload"],
treeStableTimer=1
)
c['schedulers'].append(reload_scheduler)


Expand Down
96 changes: 59 additions & 37 deletions master/builder_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,16 @@ def gen_local_filename(props_obj):
return "julia-{shortcommit}-Linux-arm.{os_pkg_ext}".format(**props)
return "julia-{shortcommit}-Linux-{tar_arch}.{os_pkg_ext}".format(**props)

# Map from property to upload OS name on the JuliaLang S3 bucket
def get_upload_os_name(props):
if is_windows(props):
return "winnt"
elif is_mac(props):
return "mac"
elif is_freebsd(props):
return "freebsd"
else:
return "linux"

def gen_upload_filename(props_obj):
props = props_obj_to_dict(props_obj)
Expand All @@ -74,38 +84,53 @@ def gen_upload_filename(props_obj):
props["os_name_file"] = "win"
return "julia-{shortcommit}-{os_name_file}{bits}.{os_pkg_ext}".format(**props)

def gen_upload_path(props_obj, namespace=None):
def gen_upload_path(props_obj, namespace=None, latest=False):
# First, pull information out of props_obj
up_arch = props_obj.getProperty("up_arch")
majmin = props_obj.getProperty("majmin")
upload_filename = props_obj.getProperty("upload_filename")
os = get_os_name(props_obj)
if namespace is None:
return "julialangnightlies/bin/%s/%s/%s/%s"%(os, up_arch, majmin, upload_filename)
else:
return "julialangnightlies/%s/bin/%s/%s/%s/%s"%(namespace, os, up_arch, majmin, upload_filename)
assert_build = props_obj.getProperty("assert_build")

def gen_latest_upload_path(props_obj, namespace=None):
up_arch = props_obj.getProperty("up_arch")
upload_filename = props_obj.getProperty("upload_filename")
if upload_filename[:6] == "julia-":
# If we're asking for the latest information,
if latest and upload_filename[:6] == "julia-":
split_name = upload_filename.split("-")
upload_filename = "julia-latest-%s"%(split_name[2])
os = get_os_name(props_obj)
if namespace is None:
return "julialangnightlies/bin/%s/%s/%s"%(os, up_arch, upload_filename)
else:
return "julialangnightlies/%s/bin/%s/%s/%s"%(namespace, os, up_arch, upload_filename)
os = get_upload_os_name(props_obj)

# Helper function to prepend things to a value that might be `None`.
def none_prepend(p, x):
if x is None:
return p
else:
return x + "_" + p


def gen_download_url(props_obj, namespace=None):
base = 'https://s3.amazonaws.com'
return '%s/%s'%(base, gen_upload_path(props_obj, namespace=namespace))
# If we're running on the buildog or some other branch, prepend all our namespaces:
if BUILDBOT_BRANCH != "master":
namespace = none_prepend(BUILDBOT_BRANCH, namespace)

def gen_latest_download_url(props_obj):
base = 'https://s3.amazonaws.com'
return '%s/%s'%(base, gen_latest_upload_path(props_obj))
# If we're running an assert build, put it into an "assert" bucket:
if assert_build:
namespace = none_prepend("assert", namespace)

# If we have a namespace, add that on to our URL first
url = "julialangnightlies/"
if namespace is not None:
url += namespace + "/"

# Next up, OS and Arch.
url += os + "/" + up_arch + "/"

# If we're asking for latest, don't go into majmin
if not latest:
url += majmin + "/"
url += upload_filename

return url

def gen_download_url(props_obj, namespace=None, latest=False):
base = 'https://s3.amazonaws.com'
return '%s/%s'%(base, gen_upload_path(props_obj, namespace=namespace, latest=latest))


# This is a weird buildbot hack where we really want to parse the output of our
Expand Down Expand Up @@ -141,7 +166,7 @@ def render_promotion_command(props_obj):
@util.renderer
def render_latest_promotion_command(props_obj):
src_path = gen_upload_path(props_obj, namespace="pretesting")
dst_path = gen_latest_upload_path(props_obj)
dst_path = gen_upload_path(props_obj, latest=True)
return ["/bin/sh", "-c", "aws s3 cp --acl public-read s3://%s s3://%s"%(src_path, dst_path)]

@util.renderer
Expand All @@ -161,18 +186,10 @@ def render_pretesting_download_url(props_obj):
def render_make_app(props_obj):
props = props_obj_to_dict(props_obj)

new_way = "make {flags} app".format(**props)
old_way = "make {flags} -C contrib/mac/app && mv contrib/mac/app/*.dmg {local_filename}".format(**props)

# We emit a shell command that attempts to run `make app` (which is the nice
# `sf/consistent_distnames` shortcut), and if that fails, it runs the steps
# manually, which boil down to `make -C contrib/mac/app` and moving the
# result to the top-level, where we can find it. We can remove this once
# 0.6 is no longer being built.
return [
"/bin/sh",
"-c",
"~/unlock_keychain.sh && (%s || (%s))"%(new_way, old_way)
"~/unlock_keychain.sh && make {flags} app".format(**props)
]

def build_download_julia_cmd(props_obj):
Expand Down Expand Up @@ -220,14 +237,19 @@ def download_latest_julia(props_obj):
# Fake `gen_upload_filename()` into giving us something like
# `julia-latest-linux64.tar.gz` instead of a true shortcommit
props_obj.setProperty("shortcommit", "latest", "download_latest_julia")
upload_filename = gen_upload_filename(props_obj)
props_obj.setProperty("upload_filename", upload_filename, "download_latest_julia")

download_url = gen_latest_download_url(props_obj)
props_obj.setProperty("download_url", download_url, "download_latest_julia")
props_obj.setProperty(
"upload_filename",
gen_upload_filename(props_obj),
"download_latest_julia",
)
props_obj.setProperty(
"download_url",
gen_download_url(props_obj, latest=True),
"download_latest_julia",
)
return build_download_julia_cmd(props_obj)

@util.renderer
def render_tester_name(props_obj):
props = props_obj_to_dict(props_obj)
return "Julia %s Testing"%(props['buildername'].replace('package_', ''))
return "Julia CI (%s testing)"%(props['buildername'].replace('package_', ''))
110 changes: 71 additions & 39 deletions master/coverage.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,68 @@
###############################################################################

run_coverage_cmd = """
using CoverageBase, Compat, Compat.Test
using Pkg
Pkg.activate("CoverageBase")
using CoverageBase
CoverageBase.runtests(CoverageBase.testnames())
"""

analyse_and_submit_cov_cmd = """
using Coverage, CoverageBase, Compat

cd(joinpath(CoverageBase.julia_top()))
results = Coverage.process_folder("base")
if isdefined(CoverageBase.BaseTestRunner, :STDLIBS)
for stdlib in CoverageBase.BaseTestRunner.STDLIBS
append!(results, Coverage.process_folder("site/v$(VERSION.major).$(VERSION.minor)/$stdlib/src"))
using Pkg
Pkg.activate("CoverageBase")
using Coverage, CoverageBase
# Process code-coverage files
results = Coverage.LCOV.readfolder(raw"%(prop:juliadir)s/LCOV")
# remove test/ files
filter!(results) do c
!occursin("test/", c.filename)
end
# turn absolute paths into relative, and add base/ to relative paths
CoverageBase.fixpath!(results)
results = Coverage.merge_coverage_counts(results)
# pretty-print what we have got
sort!(results, by=c->c.filename)
for r in results
cov, tot = get_summary(r)
@info "Got coverage data for $(r.filename): $cov/$tot"
end
# keep only files in stdlib/ and base/
let prefixes = (joinpath("base", ""),
joinpath("stdlib", ""))
filter!(results) do c
any(p -> startswith(c.filename, p), prefixes)
end
end

# try to find these files, remove those that are not present
CoverageBase.readsource!(results)
filter!(results) do c
if isempty(c.source)
@info "File $(c.filename) not found"
false
else
true
end
end
# add in any other files we discover
# todo: extend Glob.jl to support these patterns (base/**/*.jl and stdlib/*/src/**/*.jl (except test/))
# todo: consider also or instead looking at the Base._included_files list
allfiles_base = sort!(split(readchomp(Cmd(`find base -name '*.jl'`, dir=CoverageBase.fixabspath(""))), '\n'))
allfiles_stdlib = sort!(map(x -> "stdlib/" * x[3:end],
split(readchomp(Cmd(`find . -name '*.jl' ! -path '*/test/*' ! -path '*/docs/*'`, dir=CoverageBase.fixabspath("stdlib/"))), '\n')))
allfiles = map(fn -> Coverage.FileCoverage(fn, read(CoverageBase.fixabspath(fn), String), Coverage.FileCoverage[]),
[allfiles_base; allfiles_stdlib])
results = Coverage.merge_coverage_counts(results, allfiles)
length(results) == length(allfiles) || @warn "Got coverage for an unexpected file:" symdiff=symdiff(map(x -> x.filename, allfiles), map(x -> x.filename, results))
# attempt to improve accuracy of the results
foreach(Coverage.amend_coverage_from_src!, results)
# Create git_info for codecov
git_info = Any[
:branch => Base.GIT_VERSION_INFO.branch,
:commit => Base.GIT_VERSION_INFO.commit,
:token => ENV["CODECOV_REPO_TOKEN"],
]
# Submit to codecov
Codecov.submit_generic(results; git_info...)
# Create git_info for Coveralls
git_info = Dict(
"branch" => Base.GIT_VERSION_INFO.branch,
Expand All @@ -29,28 +76,15 @@
],
"head" => Dict(
"id" => Base.GIT_VERSION_INFO.commit,
"message" => "%(prop:commitmessage)s",
"committer_name" => "%(prop:commitname)s",
"committer_email" => "%(prop:commitemail)s",
"author_name" => "%(prop:authorname)s",
"author_email" => "%(prop:authoremail)s",
"message" => raw"%(prop:commitmessage)s",
"committer_name" => raw"%(prop:commitname)s",
"committer_email" => raw"%(prop:commitemail)s",
"author_name" => raw"%(prop:authorname)s",
"author_email" => raw"%(prop:authoremail)s",
)
)

# Submit to Coveralls
ENV["REPO_TOKEN"] = ENV["COVERALLS_REPO_TOKEN"]
Coveralls.submit_token(results, git_info)
delete!(ENV, "REPO_TOKEN")

# Create git_info for codecov
git_info = Any[
:branch => Base.GIT_VERSION_INFO.branch,
:commit => Base.GIT_VERSION_INFO.commit,
:token => ENV["CODECOV_REPO_TOKEN"],
]

# Submit to codecov
Codecov.submit_generic(results; git_info...)
Coveralls.submit_local(results, git_info)
"""

# Steps to download a linux tarball, extract it, run coverage on it, and upload coverage stats
Expand Down Expand Up @@ -100,28 +134,26 @@
),

# Run Julia, gathering coverage statistics
steps.MakeDirectory(dir=util.Interpolate("build/%(prop:juliadir)s/LCOV")),
steps.ShellCommand(
name="Run inlined tests",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "--sysimage-native-code=no", "--code-coverage=all", "-e", run_coverage_cmd],
name="Run tests",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"),
"--sysimage-native-code=no", util.Interpolate("--code-coverage=%(prop:juliadir)s/LCOV/cov-%%p.info"),
"-e", run_coverage_cmd],
timeout=3600,
),
steps.ShellCommand(
name="Run non-inlined tests",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "--sysimage-native-code=no", "--code-coverage=all", "--inline=no", "-e", run_coverage_cmd],
timeout=7200,
),
#submit the results!
steps.ShellCommand(
name="Gather test results and Submit",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "-e", util.Interpolate(analyse_and_submit_cov_cmd)],
env={'COVERALLS_REPO_TOKEN':COVERALLS_REPO_TOKEN, 'CODECOV_REPO_TOKEN':CODECOV_REPO_TOKEN},
env={'COVERALLS_TOKEN':COVERALLS_REPO_TOKEN, 'CODECOV_REPO_TOKEN':CODECOV_REPO_TOKEN},
logEnviron=False,
),
])


# Add a dependent scheduler for running coverage after we build tarballs
julia_coverage_builders = ["coverage_ubuntu16_04-x64"]
julia_coverage_builders = ["coverage_linux64"]
julia_coverage_scheduler = schedulers.Triggerable(name="Julia Coverage Testing", builderNames=julia_coverage_builders)
c['schedulers'].append(julia_coverage_scheduler)

Expand Down Expand Up @@ -151,8 +183,8 @@

# Add coverage builders
c['builders'].append(util.BuilderConfig(
name="coverage_ubuntu16_04-x64",
workernames=["tabularasa_ubuntu16_04-x64"],
name="coverage_linux64",
workernames=["tabularasa_" + x for x in builder_mapping["linux64"]],
tags=["Coverage"],
factory=julia_coverage_factory
))
3 changes: 0 additions & 3 deletions master/freebsd_ci.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,6 @@

freebsdci_factory = util.BuildFactory()
freebsdci_factory.addSteps([
steps.BSDSysInfo(),
steps.BSDSetMakeVar(['make_jobs'], ['MAKE_JOBS_NUMBER']),

steps.ShellCommand(
name='cleanup stdlib',
command=['rm', '-rvf', 'stdlib']
Expand Down
Loading