Skip to content

Commit a2dde61

Browse files
author
Christian Herrera
committed
Add new content
1 parent c00a090 commit a2dde61

File tree

52 files changed

+414
-319
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

52 files changed

+414
-319
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ jobs:
1717
- uses: actions/setup-java@v4
1818
with:
1919
distribution: 'zulu'
20-
java-version: '21'
20+
java-version: '11'
2121
cache: 'sbt'
2222
- name: 👌 Run "pre-push" tasks (compile and style-check)
2323
run: sbt prep

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,4 @@ project/plugins/project/
88
/docker/spark/data/
99
/docker/volume/
1010
/docker/spark/apps/
11+
/derby.log

docker/docker-compose.yml

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ services:
2424
environment:
2525
- SPARK_LOCAL_IP=172.19.0.10
2626
- SPARK_WORKLOAD=master
27+
- SPARK_DAEMON_MEMORY=3G
2728
networks:
2829
spark-network:
2930
ipv4_address: 172.19.0.10
@@ -37,9 +38,8 @@ services:
3738
- spark-master
3839
environment:
3940
- SPARK_MASTER=spark://spark-master:7077
40-
- SPARK_WORKER_CORES=2
41-
- SPARK_WORKER_MEMORY=2G
42-
- SPARK_DRIVER_MEMORY=2G
41+
- SPARK_WORKER_CORES=3
42+
- SPARK_WORKER_MEMORY=3G
4343
- SPARK_EXECUTOR_MEMORY=1G
4444
- SPARK_WORKLOAD=worker
4545
- SPARK_LOCAL_IP=172.19.0.2
@@ -62,9 +62,8 @@ services:
6262
- spark-master
6363
environment:
6464
- SPARK_MASTER=spark://spark-master:7077
65-
- SPARK_WORKER_CORES=2
66-
- SPARK_WORKER_MEMORY=2G
67-
- SPARK_DRIVER_MEMORY=2G
65+
- SPARK_WORKER_CORES=3
66+
- SPARK_WORKER_MEMORY=3G
6867
- SPARK_EXECUTOR_MEMORY=1G
6968
- SPARK_WORKLOAD=worker
7069
- SPARK_LOCAL_IP=172.19.0.3
@@ -208,7 +207,7 @@ services:
208207
--deploy-mode client
209208
--executor-memory 1G
210209
--driver-memory 1G
211-
--total-executor-cores 1
210+
--total-executor-cores 2
212211
--conf spark.sql.hive.metastore.version=2.3.9
213212
--conf spark.sql.uris=thrift://hive-metastore:9083
214213
--conf spark.hadoop.hive.metastore.uris=thrift://hive-metastore:9083

docker/hive/conf/hive-site.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
</property>
66
<property>
77
<name>javax.jdo.option.ConnectionURL</name>
8-
<value>jdbc:postgresql://172.18.0.8:5432/metastore</value>
8+
<value>jdbc:postgresql://postgres:5432/metastore</value>
99
</property>
1010
<property>
1111
<name>javax.jdo.option.ConnectionUserName</name>

project/Settings.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ object Settings {
66
val settings = Seq(
77
name := "spark-best_practises_and_deploy-course",
88
version := "0.1.0-SNAPSHOT",
9-
scalaVersion := "2.12.12",
9+
scalaVersion := "2.12.19",
1010
organization := "com.codely",
1111
organizationName := "com.codely, Inc.",
1212
organizationHomepage := Some(url("https://com.codely")),
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
package com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.app
2+
3+
import org.apache.spark.sql.SparkSession
4+
5+
trait SparkApp extends App {
6+
7+
implicit val spark: SparkSession = SparkSession
8+
.builder()
9+
.enableHiveSupport()
10+
.getOrCreate()
11+
12+
}
Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,18 @@
11
package com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.app
22

33
import com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.config.AppContext
4-
import com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.job.AvgSpendingJob
4+
import com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.job.TotalSpendingJob
55
import com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.service.{Reader, Writer}
66

7-
object AvgSpendingApp extends SparkApp {
8-
9-
private val appName = "avg-spending-app"
7+
object TotalSpendingApp extends SparkApp {
108

119
private val context = AppContext.load(args)
1210

13-
spark.conf.set("spark.app.name", appName)
14-
1511
private val reader = Reader()
1612

1713
private val writer = Writer()
1814

19-
private val job = AvgSpendingJob(context, reader, writer)
15+
private val job = TotalSpendingJob(context, reader, writer)
2016

2117
job.run()
2218
}

src/main/com/codely/lesson_02_tests_in_spark/video_01__end_to_end_testing/config/AppContext.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import com.typesafe.config.ConfigFactory
44

55
import java.io.File
66

7-
case class AppConfig(
7+
case class AppContext(
88
spark: SparkConfig,
99
source: SourceConfig,
1010
sink: SinkConfig
@@ -14,8 +14,8 @@ case class SparkConfig(appName: String)
1414
case class SourceConfig(format: String, options: Map[String, String])
1515
case class SinkConfig(format: String, mode: String, path: String)
1616

17-
object AppConfig {
18-
def load(args: Array[String]): AppConfig = {
17+
object AppContext {
18+
def load(args: Array[String]): AppContext = {
1919

2020
val cmdArgs = ArgumentsParser.parse(args).getOrElse(CmdArgs())
2121
val configFile = new File(cmdArgs.configFile.get)
@@ -29,7 +29,7 @@ object AppConfig {
2929
"driver" -> config.getString("source.options.driver")
3030
)
3131

32-
AppConfig(
32+
AppContext(
3333
spark = SparkConfig(
3434
appName = config.getString("spark.appName")
3535
),

src/main/com/codely/lesson_02_tests_in_spark/video_01__end_to_end_testing/config/ArgumentsParser.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ package com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.config
33
import scopt.OParser
44

55
object ArgumentsParser {
6-
val builder = OParser.builder[CmdArgs]
7-
val argsParser = {
6+
private val builder = OParser.builder[CmdArgs]
7+
private val argsParser = {
88
import builder._
99
OParser.sequence(
1010
programName("Scala Application"),
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
package com.codely.lesson_02_tests_in_spark.video_01__end_to_end_testing.config
22

3-
case class CmdArgs(configFile: Option[String] = None)
3+
case class CmdArgs(configFile: Option[String] = None)

0 commit comments

Comments
 (0)