Skip to content

Commit

Permalink
[#247] [spark] Bump Spark Version to 2.4.7 | Bump Spark Version to 2.…
Browse files Browse the repository at this point in the history
…4.7 along with other stack to align with gcp_dataproc_1.5.x
  • Loading branch information
Dee-Pac committed Nov 8, 2020
1 parent 3b25dcb commit 7e5f21e
Show file tree
Hide file tree
Showing 17 changed files with 42 additions and 399 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,17 @@ under the License.
<!--Druid Libraries-->
<dependency>
<groupId>io.druid</groupId>
<artifactId>tranquility-core_${scala.binary.version}</artifactId>
<artifactId>tranquility-core_2.11</artifactId>
<version>${tranquility.version}</version>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</exclusion>
<exclusion>
<groupId>org.scala-lang</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
Expand Down
32 changes: 16 additions & 16 deletions gimel-dataapi/gimel-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -87,44 +87,44 @@ under the License.
</dependency>
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-kafka-2.2</artifactId>
<artifactId>gimel-kafka</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-elasticsearch-6.2</artifactId>
<artifactId>gimel-elasticsearch</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-jdbc</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.paypal.gimel</groupId>-->
<!-- <artifactId>gimel-hbase-1.2</artifactId>-->
<!-- <version>${gimel.version}-SNAPSHOT</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-hbase-1.2</artifactId>
<artifactId>gimel-aerospike</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-aerospike-3.14</artifactId>
<artifactId>gimel-hive</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-hive-1.2</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-cassandra-2.0</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-druid-0.82</artifactId>
<artifactId>gimel-cassandra</artifactId>
<version>${gimel.version}-SNAPSHOT</version>
</dependency>
<!-- <dependency>-->
<!-- <groupId>com.paypal.gimel</groupId>-->
<!-- <artifactId>gimel-druid-0.82</artifactId>-->
<!-- <version>${gimel.version}-SNAPSHOT</version>-->
<!-- </dependency>-->
<dependency>
<groupId>com.paypal.gimel</groupId>
<artifactId>gimel-sftp</artifactId>
Expand Down Expand Up @@ -240,7 +240,7 @@ under the License.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.0.0</version>
<version>${maven.shade.plugin.version}</version>
<configuration>
<relocations>
<relocation>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,8 @@ import com.paypal.gimel.common.utilities.BindToFieldsUtils._
import com.paypal.gimel.common.utilities.DataSetUtils.propStringToMap
import com.paypal.gimel.datasetfactory.GimelDataSet
import com.paypal.gimel.elasticsearch.conf.ElasticSearchConfigs
import com.paypal.gimel.hbase.conf.HbaseConfigs
import com.paypal.gimel.jdbc.conf.JdbcConfigs
import com.paypal.gimel.kafka.conf.{KafkaConfigs, KafkaConstants}
import com.paypal.gimel.kafka2.conf.{KafkaConfigs, KafkaConstants}
import com.paypal.gimel.logger.Logger

class DataSet(val sparkSession: SparkSession) {
Expand Down Expand Up @@ -160,7 +159,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -190,7 +189,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -293,7 +292,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -324,7 +323,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -411,7 +410,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -440,7 +439,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -597,7 +596,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -627,7 +626,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -727,7 +726,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -758,7 +757,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -855,7 +854,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -886,7 +885,7 @@ class DataSet(val sparkSession: SparkSession) {
logger.logApiAccess(sparkSession.sparkContext.getConf.getAppId
, sparkAppName
, this.getClass.getName
, KafkaConstants.gimelAuditRunTypeBatch
, "BATCH"
, clusterName
, user
, appTag.replaceAllLiterally("/", "_")
Expand Down Expand Up @@ -983,10 +982,6 @@ object DataSetUtils {

def getDataSet(sparkSession: SparkSession, sourceType: DataSetType.SystemType): GimelDataSet = {
sourceType match {
case DataSetType.KAFKA =>
new com.paypal.gimel.kafka.DataSet(sparkSession)
case DataSetType.HBASE =>
new com.paypal.gimel.hbase.DataSet(sparkSession)
case DataSetType.HDFS =>
new com.paypal.gimel.hdfs.DataSet(sparkSession)
case DataSetType.ES =>
Expand All @@ -999,12 +994,8 @@ object DataSetUtils {
new com.paypal.gimel.cassandra.DataSet(sparkSession)
case DataSetType.AEROSPIKE =>
new com.paypal.gimel.aerospike.DataSet(sparkSession)
case DataSetType.HDFS =>
new com.paypal.gimel.hdfs.DataSet(sparkSession)
case DataSetType.RESTAPI =>
new com.paypal.gimel.restapi.DataSet(sparkSession)
case DataSetType.DRUID =>
new com.paypal.gimel.druid.DataSet(sparkSession)
case DataSetType.SFTP =>
new com.paypal.gimel.sftp.DataSet(sparkSession)
case DataSetType.KAFKA2 =>
Expand All @@ -1022,8 +1013,8 @@ object DataSetUtils {
def getLatestKafkaDataSetReader(dataSet: DataSet): Option[GimelDataSet] = {
Try {
dataSet.latestDataSetReader.get match {
case kafka: com.paypal.gimel.kafka.DataSet =>
kafka
// case kafka: com.paypal.gimel.kafka.DataSet =>
// kafka
case kafka2: com.paypal.gimel.kafka2.DataSet =>
kafka2
}
Expand Down
Loading

0 comments on commit 7e5f21e

Please sign in to comment.