-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbuild.gradle
More file actions
86 lines (70 loc) · 2.78 KB
/
build.gradle
File metadata and controls
86 lines (70 loc) · 2.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
/*
* This file was generated by the Gradle 'init' task.
*
* This is a general purpose Gradle build.
* Learn how to create Gradle builds at https://guides.gradle.org/creating-new-gradle-builds/
*/
group 'com.h2ospark'
version '1.0.4-SNAPSHOT'
buildscript {
repositories {
mavenCentral()
}
}
//apply plugin: 'java'
apply plugin: 'scala'
apply plugin: 'maven'
repositories {
mavenCentral()
maven {
url "http://h2o-release.s3.amazonaws.com/h2o/rel-yu/1/maven/repo/"
}
}
configurations {
master
all*.exclude group: 'javax.servlet', module: 'servlet-api'
}
dependencies {
compileOnly "org.scala-lang:scala-library:$scala_major.$scala_minor"
compileOnly "org.scala-lang:scala-reflect:$scala_major.$scala_minor"
compileOnly "org.apache.spark:spark-sql_$scala_major:$spark_version"
compileOnly "org.apache.spark:spark-core_$scala_major:$spark_version"
compileOnly "org.apache.spark:spark-hive_$scala_major:$spark_version"
compile "joda-time:joda-time:$joda_version"
compile "org.joda:joda-convert:1.8"
compile "com.typesafe:config:$typesafe_version"
compile "com.github.scopt:scopt_$scala_major:$scopt_version"
compile "com.databricks:spark-csv_$scala_major:1.5.0"
compile "org.scala-lang:scala-compiler:$scala_major.$scala_minor"
compile "ai.h2o:h2o-genmodel:$h2oProjectVersion"
compile "ai.h2o:h2o-genmodel-ext-xgboost:$h2oProjectVersion"
compileOnly 'com.esotericsoftware.kryo:kryo:2.21'
testCompile "com.holdenkarau:spark-testing-base_$scala_major:$holden_version"
testCompile "org.scalatest:scalatest_$scala_major:$scalatest_version"
testCompile "org.apache.spark:spark-hive_$scala_major:$spark_version"
testCompile "com.holdenkarau:spark-testing-base_$scala_major:$holden_version"
testCompile "org.apache.spark:spark-sql_$scala_major:$spark_version"
testCompile "org.apache.spark:spark-core_$scala_major:$spark_version"
testCompile "org.apache.hadoop:hadoop-client:$hadoop_version"
}
jar {
from {
configurations.compile.collect { it.isDirectory() ? it : zipTree(it) }
}
}
//run scala tests. These are not automatically picked up by gradle,
task testScala(dependsOn: ['testClasses'], type: JavaExec) {
main = 'org.scalatest.tools.Runner'
args = ['-R', 'build/classes/test', '-o']
classpath = sourceSets.test.runtimeClasspath
}
task testScalaUnit(dependsOn: ['testClasses'], type: JavaExec) {
main = 'org.scalatest.tools.Runner'
args = ['-R', 'build/classes/test', '-o', '-n', 'UTest']
classpath = sourceSets.test.runtimeClasspath
}
task testScalaIntegration(dependsOn: ['testClasses'], type: JavaExec) {
main = 'org.scalatest.tools.Runner'
args = ['-R', 'build/classes/test', '-o', '-n', 'ITest']
classpath = sourceSets.test.runtimeClasspath
}