forked from cdapio/hadoop_cookbook
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathVagrantfile
98 lines (88 loc) · 2.8 KB
/
Vagrantfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
VAGRANTFILE_API_VERSION = '2'
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
# All Vagrant configuration is done here. The most common configuration
# options are documented and commented below. For a complete reference,
# please see the online documentation at vagrantup.com.
# We *need* vagrant-omnibus for these box images
config.omnibus.chef_version = :latest
# Enable berkshelf plugin
config.berkshelf.enabled = true
# Run Multi-Machine environment to test both OSs
# http://docs.vagrantup.com/v2/multi-machine/index.html
%w(
centos-6.5
ubuntu-12.04
).each do |platform|
config.vm.define platform do |c|
c.vm.box = "opscode-#{platform}"
c.vm.box_url = "http://opscode-vm-bento.s3.amazonaws.com/vagrant/virtualbox/opscode_#{platform}_chef-provisionerless.box"
c.vm.host_name = "hadoop-#{platform}.local"
end
end
config.vm.provider :virtualbox do |vb|
# Use VBoxManage to customize the VM. For example to change memory:
vb.customize ['modifyvm', :id, '--memory', '2048']
end
# Ubuntu needs this, but global provisioners run first
config.vm.provision :shell, :inline => 'test -x /usr/bin/apt-get && sudo apt-get update ; exit 0'
config.vm.provision :chef_solo do |chef|
chef.json = {
:mysql => {
:server_root_password => 'rootpass',
:server_debian_password => 'debpass',
:server_repl_password => 'replpass'
},
:java => {
:install_flavor => 'oracle',
:jdk_version => 7,
:oracle => {
:accept_oracle_download_terms => true
}
},
:hadoop => {
:container_executor => {
'banned.users' => 'hdfs,yarn,mapred,bin'
},
:distribution => 'cdh',
:distribution_version => 5,
:hadoop_env => {
'hadoop_log_dir' => '/data/logs/hadoop-hdfs'
},
:hdfs_site => {
'dfs.datanode.max.transfer.threads' => 4096
}
},
:hbase => {
:hbase_site => {
'hbase.rootdir' => 'hdfs://localhost:8020/hbase',
'hbase.zookeeper.quorum' => 'localhost',
'hbase.cluster.distributed' => true
}
},
:hive => {
:hive_site => {
'hive.support.concurrency' => 'true',
'hive.zookeeper.quorum' => 'localhost'
}
},
:spark => {
:spark_env => {
:standalone_spark_master_host => 'localhost',
:spark_master_ip => 'localhost'
}
},
:zookeeper => {
:zoocfg => {
:dataLogDir => '/tmp/zookeeper/logs'
}
}
}
chef.run_list = [
'recipe[java::default]',
'recipe[hadoop::default]'
]
end
end