Skip to content

Commit

Permalink
Add the initial role and an easy way to test it with Vagrant.
Browse files Browse the repository at this point in the history
  • Loading branch information
hectcastro committed Nov 26, 2014
1 parent ec27bda commit 3904e24
Show file tree
Hide file tree
Showing 11 changed files with 154 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
.vagrant

examples/roles/azavea.java
11 changes: 11 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# ansible-spark

An Ansible role for installing [Apache Spark](https://spark.apache.org).

## Role Variables

- `spark_version` - Spark version.

## Example Playbook

See the [examples](./examples/) directory.
2 changes: 2 additions & 0 deletions defaults/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
---
spark_version: "1.1.0+cdh5.2.0+56-1.cdh5.2.0.p0.35~trusty-cdh5.2.0"
24 changes: 24 additions & 0 deletions examples/Vagrantfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :

VAGRANTFILE_API_VERSION = "2"

# Ensure role dependencies are in place
if [ "up", "provision" ].include?(ARGV.first) &&
!(File.directory?("roles/azavea.java") || File.symlink?("roles/azavea.java"))

unless system("ansible-galaxy install --force -r roles.txt -p roles")
$stderr.puts "\nERROR: Please install Ansible 1.4.2+ so that the ansible-galaxy binary"
$stderr.puts "is available."
exit(1)
end
end

Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = "ubuntu/trusty64"

config.vm.provision "ansible" do |ansible|
ansible.playbook = "site.yml"
ansible.sudo = true
end
end
1 change: 1 addition & 0 deletions examples/roles.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
azavea.java,0.1.0
1 change: 1 addition & 0 deletions examples/roles/azavea.spark
9 changes: 9 additions & 0 deletions examples/site.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
---
- hosts: all

pre_tasks:
- name: Update APT cache
apt: update_cache=yes

roles:
- { role: "azavea.spark" }
59 changes: 59 additions & 0 deletions files/utils.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
#!/usr/bin/env bash

#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Gather all spark-submit options into SUBMISSION_OPTS
function gatherSparkSubmitOpts() {

if [ -z "$SUBMIT_USAGE_FUNCTION" ]; then
echo "Function for printing usage of $0 is not set." 1>&2
echo "Please set usage function to shell variable 'SUBMIT_USAGE_FUNCTION' in $0" 1>&2
exit 1
fi

# NOTE: If you add or remove spark-sumbmit options,
# modify NOT ONLY this script but also SparkSubmitArgument.scala
SUBMISSION_OPTS=()
APPLICATION_OPTS=()
while (($#)); do
case "$1" in
--master | --deploy-mode | --class | --name | --jars | --py-files | --files | \
--conf | --properties-file | --driver-memory | --driver-java-options | \
--driver-library-path | --driver-class-path | --executor-memory | --driver-cores | \
--total-executor-cores | --executor-cores | --queue | --num-executors | --archives)
if [[ $# -lt 2 ]]; then
"$SUBMIT_USAGE_FUNCTION"
exit 1;
fi
SUBMISSION_OPTS+=("$1"); shift
SUBMISSION_OPTS+=("$1"); shift
;;

--verbose | -v | --supervise)
SUBMISSION_OPTS+=("$1"); shift
;;

*)
APPLICATION_OPTS+=("$1"); shift
;;
esac
done

export SUBMISSION_OPTS
export APPLICATION_OPTS
}
16 changes: 16 additions & 0 deletions meta/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
---
galaxy_info:
author: Hector Castro
description: An Ansible role for installing Apache Spark.
company: Azavea Inc.
license: Apache
min_ansible_version: 1.2
platforms:
- name: Ubuntu
versions:
- trusty
- precise
categories:
- system
dependencies:
- { role: "azavea.java", java_version: "7u71-2.5.3-0ubuntu0.14.04.1" }
21 changes: 21 additions & 0 deletions tasks/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
---
- name: Configure Cloudera APT key
apt_key: url="http://archive.cloudera.com/cdh5/ubuntu/{{ ansible_distribution_release }}/amd64/cdh/archive.key"
state=present

- name: Configure the Cloudera APT repositories
apt_repository: repo="deb [arch=amd64] http://archive.cloudera.com/cdh5/ubuntu/{{ ansible_distribution_release }}/amd64/cdh {{ ansible_distribution_release }}-cdh5 contrib"
state=present

- name: Pin Cloudera APT repositories
template: src=cdh5.j2 dest=/etc/apt/preferences.d/cdh5

- name: Install Spark
apt: pkg=spark-core={{ spark_version }} state=present

# See: https://groups.google.com/a/cloudera.org/d/msg/cdh-user/eAW-hiUHdzY/hC_y6-ayHXIJ
- name: Put utils.sh into place
copy: src=utils.sh
dest=/usr/lib/spark/bin/utils.sh
mode=0755

7 changes: 7 additions & 0 deletions templates/cdh5.j2
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
Package: *
Pin: release n={{ ansible_distribution_release }}
Pin-Priority: 100

Package: *
Pin: release n={{ ansible_distribution_release }}-cdh5
Pin-Priority: 600

0 comments on commit 3904e24

Please sign in to comment.