-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Venkateswarlu Yadavalli
committed
Dec 7, 2019
0 parents
commit e041ba2
Showing
17 changed files
with
487 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,125 @@ | ||
FROM centos:centos6 | ||
|
||
|
||
RUN yum -y update; yum -y clean all | ||
|
||
##### Install JDK 1.7 ##### | ||
RUN yum install -y java-1.8.0-openjdk java-1.8.0-openjdk-devel | ||
|
||
##### Install basic linux tools ##### | ||
RUN yum install -y wget unzip dialog curl sudo lsof vim axel telnet | ||
|
||
|
||
|
||
##### Add Cloudera CDH 5 repository ##### | ||
RUN wget http://archive.cloudera.com/cdh5/redhat/6/x86_64/cdh/cloudera-cdh5.repo -O /etc/yum.repos.d/cloudera-cdh5.repo | ||
RUN rpm --import http://archive.cloudera.com/cdh5/redhat/6/x86_64/cdh/RPM-GPG-KEY-cloudera | ||
##### | ||
|
||
|
||
|
||
##### Install HDFS services ##### | ||
RUN yum install -y hadoop-hdfs-namenode hadoop-hdfs-secondarynamenode hadoop-hdfs-datanode | ||
|
||
##### Install YARN services ##### | ||
RUN yum install -y hadoop-yarn-resourcemanager hadoop-yarn-nodemanager hadoop-yarn-proxyserver | ||
|
||
##### Install MapReduce services ##### | ||
RUN yum install -y hadoop-mapreduce hadoop-mapreduce-historyserver | ||
|
||
##### Install Hadoop client & Hadoop conf-pseudo ##### | ||
RUN yum install -y hadoop-client hadoop-conf-pseudo | ||
|
||
##### Install Zookeeper ##### | ||
RUN yum install -y zookeeper zookeeper-server | ||
|
||
##### Install HBase ##### | ||
RUN yum install -y hbase-master hbase hbase-thrift | ||
|
||
##### Install Oozie ##### | ||
RUN yum install -y oozie oozie-client | ||
|
||
##### Install Spark ##### | ||
RUN yum install -y spark-core spark-history-server spark-python | ||
|
||
##### Install Hive ##### | ||
RUN yum install -y hive hive-metastore hive-hbase | ||
|
||
##### Install Pig ##### | ||
#RUN yum install -y pig | ||
|
||
##### Install Impala ##### | ||
RUN yum install -y impala impala-server impala-state-store impala-catalog impala-shell | ||
|
||
##### Install Hue ##### | ||
RUN yum install -y hue hue-server | ||
|
||
##### Install SolR ##### | ||
RUN yum -y install solr-server hue-search | ||
|
||
|
||
|
||
##### Install MySQL and connector ##### | ||
RUN yum -y install mysql mysql-server mysql-connector-java | ||
RUN ln -s /usr/share/java/mysql-connector-java.jar /usr/lib/hive/lib/mysql-connector-java.jar | ||
##### Note: Will use mysql for Hive metastore | ||
|
||
|
||
|
||
##### Format HDFS ##### | ||
USER hdfs | ||
RUN hdfs namenode -format | ||
USER root | ||
##### | ||
|
||
##### Initialize HDFS Directories ##### | ||
RUN bash -c 'for x in `cd /etc/init.d ; ls hadoop-hdfs-*` ; do service $x start ; done' ; \ | ||
bash /usr/lib/hadoop/libexec/init-hdfs.sh \ | ||
oozie-setup sharelib create -fs hdfs://localhost -locallib /usr/lib/oozie/oozie-sharelib-yarn.tar.gz ; \ | ||
bash -c 'for x in `cd /etc/init.d ; ls hadoop-hdfs-*` ; do service $x stop ; done' ; | ||
##### Note: Keep commands on a single line, as we need to init HDFS while services are running | ||
|
||
##### Set up Oozie / HUE / Hive ??? ##### | ||
RUN oozie-setup db create -run | ||
RUN sed -i 's/secret_key=/secret_key=_S@s+D=h;B,s$C%k#H!dMjPmEsSaJR/g' /etc/hue/conf/hue.ini | ||
|
||
|
||
##### Make this container SSH friendly ##### | ||
RUN yum -y install openssh-server openssh-clients | ||
# Start `sshd` to generate host DSA & RSA keys | ||
RUN service sshd start | ||
|
||
|
||
# Install Apache Kafka | ||
#RUN cd /opt | ||
#RUN wget http://www.mirrorservice.org/sites/ftp.apache.org/kafka/0.8.2.1/kafka_2.11-0.8.2.1.tgz | ||
#RUN tar zxvf kafka_2.11-0.8.2.1.tgz | ||
#RUN cd kafka_2.11-0.8.2.1 | ||
|
||
###SPARK 2.2.0 | ||
|
||
ARG SPARK_ARCHIVE=http://d3kbcqa49mib13.cloudfront.net/spark-2.2.0-bin-hadoop2.7.tgz | ||
RUN curl -s $SPARK_ARCHIVE | tar -xz -C /usr/local/ | ||
|
||
ENV SPARK_HOME /usr/local/spark-2.2.0-bin-hadoop2.7 | ||
ENV PATH $PATH:$SPARK_HOME/bin | ||
|
||
|
||
|
||
ADD files/hive-site.xml /usr/lib/hive/conf/ | ||
|
||
|
||
# Add the Hadoop start-up scripts | ||
ADD scripts/* /opt/ | ||
|
||
ENV LANG en_US.UTF-8 | ||
|
||
ADD run.sh /usr/bin/ | ||
|
||
ADD ojdbc6.jar /opt/ | ||
|
||
#RUN echo "spark.executor.extraClassPath=/opt/ojdbc6.jar | tee -a /etc/spark/conf/spark-defaults.conf" | ||
#RUN echo "spark.driver.extraClassPath=/opt/ojdbc6.jar | tee -a /etc/spark/conf/spark-defaults.conf" | ||
|
||
|
||
CMD ["run.sh"] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
# juc-conference | ||
#######hi this is for sample....venky######## | ||
JUC Conference preparation | ||
EDIT TEST | ||
|
||
Docker with CDH 5.4 and Oracle for JUC conference | ||
|
||
$ docker build -t landoop/cloudera-5-latest-with-oracle | ||
$ docker run -t -i landoop/cloudera-5-latest-with-oracle |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> | ||
|
||
<configuration> | ||
|
||
<property> | ||
<name>javax.jdo.option.ConnectionURL</name> | ||
<value>jdbc:mysql://localhost/metastore</value> | ||
<description>the URL of the MySQL database</description> | ||
</property> | ||
|
||
<property> | ||
<name>javax.jdo.option.ConnectionDriverName</name> | ||
<value>com.mysql.jdbc.Driver</value> | ||
</property> | ||
|
||
<property> | ||
<name>javax.jdo.option.ConnectionUserName</name> | ||
<value>root</value> | ||
</property> | ||
|
||
<property> | ||
<name>javax.jdo.option.ConnectionPassword</name> | ||
<value>pass</value> | ||
</property> | ||
|
||
<property> | ||
<name>datanucleus.autoCreateSchema</name> | ||
<value>false</value> | ||
</property> | ||
|
||
<property> | ||
<name>datanucleus.fixedDatastore</name> | ||
<value>true</value> | ||
</property> | ||
|
||
<property> | ||
<name>datanucleus.autoStartMechanism</name> | ||
<value>SchemaTable</value> | ||
</property> | ||
|
||
<property> | ||
<name>hive.metastore.uris</name> | ||
<value>thrift://FULLY-QUALIFIED-DOMAIN-NAME:9083</value> | ||
<description>IP address (or fully-qualified domain name) and port of the metastore host</description> | ||
</property> | ||
|
||
<property> | ||
<name>hive.metastore.schema.verification</name> | ||
<value>true</value> | ||
</property> | ||
|
||
</configuration> | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
# Licensed to the Apache Software Foundation (ASF) under one or more | ||
# contributor license agreements. See the NOTICE file distributed with | ||
# this work for additional information regarding copyright ownership. | ||
# The ASF licenses this file to You under the Apache License, Version 2.0 | ||
# (the "License"); you may not use this file except in compliance with | ||
# the License. You may obtain a copy of the License at | ||
# | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
SOLR_PORT=8983 | ||
SOLR_ADMIN_PORT=8984 | ||
SOLR_LOG=/var/log/solr | ||
SOLR_ZK_ENSEMBLE=localhost:2181/solr | ||
SOLR_HDFS_HOME=hdfs://localhost:8020/solr | ||
SOLR_HDFS_CONFIG=/etc/hadoop/conf | ||
# SOLR_KERBEROS_ENABLED=true | ||
# SOLR_KERBEROS_KEYTAB=/etc/solr/conf/solr.keytab | ||
# SOLR_KERBEROS_PRINCIPAL=solr/localhost@LOCALHOST | ||
SOLR_AUTHENTICATION_TYPE=simple | ||
SOLR_AUTHENTICATION_SIMPLE_ALLOW_ANON=true | ||
# SOLR_AUTHENTICATION_KERBEROS_KEYTAB=/etc/solr/conf/solr.keytab | ||
# SOLR_AUTHENTICATION_KERBEROS_PRINCIPAL=HTTP/localhost@LOCALHOST | ||
# SOLR_AUTHENTICATION_KERBEROS_NAME_RULES=DEFAULT | ||
# SOLR_AUTHENTICATION_JAAS_CONF=/etc/solr/conf/jaas.conf | ||
SOLR_SECURITY_ALLOWED_PROXYUSERS=hue | ||
SOLR_SECURITY_PROXYUSER_hue_HOSTS=* | ||
SOLR_SECURITY_PROXYUSER_hue_GROUPS=* | ||
# SOLR_AUTHORIZATION_SENTRY_SITE=/etc/solr/conf/sentry-site.xml | ||
# SOLR_AUTHORIZATION_SUPERUSER=solr | ||
SOLRD_WATCHDOG_TIMEOUT=30 |
Binary file not shown.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,17 @@ | ||
#!/usr/bin/env bash | ||
|
||
bash -x /opt/start_HDFS.sh | ||
bash -x /opt/prepare_HDFS.sh | ||
bash -x /opt/start_yarn.sh | ||
bash -x /opt/start_hive_metastore.sh | ||
bash -x /opt/start_oozie.sh | ||
bash -x /opt/start_zookeeper.sh | ||
bash -x /opt/start_hbase.sh | ||
bash -x /opt/start_spark.sh | ||
bash -x /opt/start_impala.sh | ||
bash -x /opt/start_spark.sh | ||
|
||
export TERM=xterm | ||
echo "CDH STARTED" | ||
sleep infinity | ||
#tail -f /var/log/messages |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
#!/usr/bin/env bash | ||
|
||
function red { echo $2 -e "\e[1;31m$1\e[0m"; } | ||
function cyan { echo $2 -e "\e[1;36m$1\e[0m"; } | ||
function green { echo $2 -e "\e[1;32m$1\e[0m"; } | ||
function purple { echo $2 -e "\e[1;35m$1\e[0m"; } | ||
function blue { echo $2 -e "\e[1;34m$1\e[0m"; } | ||
function yellow { echo $2 -e "\e[1;33m$1\e[0m"; } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
#!/usr/bin/env bash | ||
. $(dirname $0)/colors.sh | ||
|
||
green "-- Preparing HDFS directories --" | ||
hadoop fs -ls / | ||
hadoop fs -ls /user | ||
sudo -n -u hdfs /usr/bin/hadoop fs -chmod -R a+w / | ||
sudo -n -u hdfs /usr/bin/hadoop fs -mkdir -p /user/hadoop /user/hive/warehouse /hbase /tmp /var | ||
sudo -n -u hdfs /usr/bin/hadoop fs -chmod -R a+w /user | ||
sudo -n -u hdfs /usr/bin/hadoop fs -chown hadoop /user/hadoop | ||
sudo -n -u hdfs /usr/bin/hadoop fs -chown hbase /hbase | ||
sudo -n -u hdfs /usr/bin/hadoop fs -chmod a+w /hbase | ||
sudo -n -u hdfs /usr/bin/hadoop fs -chmod -R 777 /tmp | ||
sudo -n -u hdfs /usr/bin/hadoop fs -chmod -R 777 /var | ||
sudo -n -u hdfs hadoop fs -chown oozie:oozie /user/oozie |
Oops, something went wrong.