1 Star 0 Fork 0

ericshenjs / snappydata

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
克隆/下载
build.gradle 33.32 KB
一键复制 编辑 原始数据 按行查看 历史
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
import groovy.json.JsonSlurper
apply plugin: 'wrapper'
apply plugin: 'distribution'
if (JavaVersion.current().isJava8Compatible()) {
allprojects {
tasks.withType(Javadoc) {
options.addStringOption('Xdoclint:none', '-quiet')
}
}
}
buildscript {
repositories {
maven { url 'https://plugins.gradle.org/m2' }
mavenCentral()
}
dependencies {
classpath 'io.snappydata:gradle-scalatest:0.13.1'
classpath 'org.github.ngbinh.scalastyle:gradle-scalastyle-plugin_2.11:0.8.2'
classpath "net.rdrei.android.buildtimetracker:gradle-plugin:0.8.+"
}
}
allprojects {
// We want to see all test results. This is equivalent to setting --continue
// on the command line.
gradle.startParameter.continueOnFailure = true
repositories {
mavenCentral()
maven { url "https://repo.spring.io/libs-release" }
maven { url "https://oss.sonatype.org/content/repositories/snapshots" }
maven { url 'http://repository.snappydata.io:8089/repository/internal' }
maven { url 'http://repository.snappydata.io:8089/repository/snapshots' }
maven { url 'https://app.camunda.com/nexus/content/repositories/public' }
}
apply plugin: 'java'
apply plugin: 'maven'
apply plugin: 'scalaStyle'
apply plugin: 'idea'
apply plugin: 'eclipse'
apply plugin: "build-time-tracker"
group = 'io.snappydata'
version = '0.7'
// apply compiler options
tasks.withType(JavaCompile) {
options.encoding = 'UTF-8'
options.incremental = true
options.compilerArgs << '-Xlint:-serial,-path,-deprecation,-unchecked,-rawtypes'
options.compilerArgs << '-XDignore.symbol.file'
options.fork = true
options.forkOptions.executable = "${System.properties['java.home']}/../bin/javac"
}
tasks.withType(ScalaCompile) {
// scalaCompileOptions.optimize = true
// scalaCompileOptions.useAnt = false
scalaCompileOptions.fork = false
scalaCompileOptions.deprecation = false
scalaCompileOptions.additionalParameters = [ '-feature' ]
options.encoding = 'UTF-8'
}
javadoc.options.charSet = 'UTF-8'
gradle.taskGraph.whenReady( { graph ->
tasks.withType(Tar).each { tar ->
tar.compression = Compression.GZIP
tar.extension = 'tar.gz'
}
})
ext {
productName = 'SnappyData'
scalaBinaryVersion = '2.11'
scalaVersion = scalaBinaryVersion + '.8'
sparkVersion = '2.0.0'
snappySparkVersion = '2.0.3-2'
sparkDistName = "spark-${sparkVersion}-bin-hadoop2.7"
log4jVersion = '1.2.17'
slf4jVersion = '1.7.21'
junitVersion = '4.12'
hadoopVersion = '2.7.3'
scalatestVersion = '2.2.6'
jettyVersion = '9.2.16.v20160414'
guavaVersion = '14.0.1'
kryoVersion = '4.0.0'
derbyVersion = '10.12.1.1'
pegdownVersion = '1.6.0'
snappyStoreVersion = '1.5.3'
pulseVersion = '1.5.1'
buildFlags = ''
createdBy = System.getProperty('user.name')
osArch = System.getProperty('os.arch')
osName = org.gradle.internal.os.OperatingSystem.current()
osVersion = System.getProperty('os.version')
buildDate = new Date().format('yyyy-MM-dd HH:mm:ss Z')
buildNumber = new Date().format('MMddyy')
jdkVersion = System.getProperty('java.version')
gitCmd = "git --git-dir=${rootDir}/.git --work-tree=${rootDir}"
gitBranch = "${gitCmd} rev-parse --abbrev-ref HEAD".execute().text.trim()
commitId = "${gitCmd} rev-parse HEAD".execute().text.trim()
sourceDate = "${gitCmd} log -n 1 --format=%ai".execute().text.trim()
}
if (!buildRoot.isEmpty()) {
buildDir = new File(buildRoot, 'scala-' + scalaBinaryVersion + '/' + project.path.replace(':', '/'))
} else {
// default output directory like in sbt/maven
buildDir = 'build-artifacts/scala-' + scalaBinaryVersion
}
ext {
testResultsBase = "${rootProject.buildDir}/tests/snappy"
snappyProductDir = "${rootProject.buildDir}/snappy"
sparkDistDir = "${rootProject.projectDir}/dist"
sparkProductDir = "${sparkDistDir}/${sparkDistName}"
}
}
def getProcessId() {
String name = java.lang.management.ManagementFactory.getRuntimeMXBean().getName()
return name[0..name.indexOf('@') - 1]
}
def getStackTrace(def t) {
java.io.StringWriter sw = new java.io.StringWriter()
java.io.PrintWriter pw = new java.io.PrintWriter(sw)
org.codehaus.groovy.runtime.StackTraceUtils.sanitize(t).printStackTrace(pw)
return sw.toString()
}
// Skip snappy-spark, snappy-aqp and spark-jobserver that have their own
// scalaStyle configuration. Skip snappy-store that will not use it.
configure(subprojects.findAll {!(it.name ==~ /snappy-spark.*/ ||
it.name ==~ /snappy-store.*/ ||
it.name ==~ /snappy-aqp.*/ ||
it.name ==~ /spark-jobserver.*/)}) {
scalaStyle {
configLocation = "${rootProject.projectDir}/scalastyle-config.xml"
inputEncoding = 'UTF-8'
outputEncoding = 'UTF-8'
outputFile = "${buildDir}/scalastyle-output.xml"
includeTestSourceDirectory = false
source = 'src/main/scala'
testSource = 'src/test/scala'
failOnViolation = true
failOnWarning = false
}
}
def cleanIntermediateFiles(def projectName) {
def projDir = "${project(projectName).projectDir}"
delete "${projDir}/metastore_db"
delete "${projDir}/warehouse"
delete "${projDir}/datadictionary"
delete fileTree(projDir) {
include 'BACKUPGFXD-DEFAULT-DISKSTORE**', 'locator*.dat'
}
}
task cleanScalaTest << {
String workingDir = "${testResultsBase}/scalatest"
delete workingDir
file(workingDir).mkdirs()
}
task cleanJUnit << {
String workingDir = "${testResultsBase}/junit"
delete workingDir
file(workingDir).mkdirs()
}
task cleanDUnit << {
String workingDir = "${testResultsBase}/dunit"
delete workingDir
file(workingDir).mkdirs()
// clean spark cluster directories
delete "${snappyProductDir}/work", "${snappyProductDir}/logs"
delete "${sparkProductDir}/work", "${sparkProductDir}/logs"
}
task cleanAllReports << {
String workingDir = "${testResultsBase}/combined-reports"
delete workingDir
file(workingDir).mkdirs()
}
task cleanQuickstart << {
String workingDir = "${testResultsBase}/quickstart"
delete workingDir
file(workingDir).mkdirs()
}
subprojects {
// the run task for a selected sub-project
task run(type:JavaExec) {
if (!project.hasProperty('mainClass')) {
main = 'io.snappydata.app.SparkSQLTest'
} else {
main = mainClass
}
if (project.hasProperty('params')) {
args = params.split(',') as List
}
classpath = sourceSets.main.runtimeClasspath + sourceSets.test.runtimeClasspath
jvmArgs '-Xmx2g', '-XX:MaxPermSize=512m'
}
task scalaTest(type: Test) {
actions = [ new com.github.maiflai.ScalaTestAction() ]
// top-level default is single process run since scalatest does not
// spawn separate JVMs
maxParallelForks = 1
minHeapSize '4g'
maxHeapSize '4g'
jvmArgs '-ea', '-XX:+HeapDumpOnOutOfMemoryError','-XX:+UseConcMarkSweepGC', '-XX:MaxNewSize=1g',
'-XX:+UseParNewGC', '-XX:+CMSClassUnloadingEnabled', '-XX:MaxPermSize=512m'
// for benchmarking
// minHeapSize '12g'
// maxHeapSize '12g'
// jvmArgs '-XX:+HeapDumpOnOutOfMemoryError','-XX:+UseConcMarkSweepGC', '-XX:MaxNewSize=2g',
// '-XX:+UseParNewGC', '-XX:+CMSClassUnloadingEnabled', '-XX:MaxPermSize=512m'
testLogging.exceptionFormat = 'full'
List<String> suites = []
extensions.add(com.github.maiflai.ScalaTestAction.SUITES, suites)
extensions.add('suite', { String name -> suites.add(name) } )
extensions.add('suites', { String... name -> suites.addAll(name) } )
def result = new StringBuilder()
extensions.add(com.github.maiflai.ScalaTestAction.TESTRESULT, result)
extensions.add('testResult', { String name -> result.setLength(0); result.append(name) } )
def output = new StringBuilder()
extensions.add(com.github.maiflai.ScalaTestAction.TESTOUTPUT, output)
extensions.add('testOutput', { String name -> output.setLength(0); output.append(name) } )
def errorOutput = new StringBuilder()
extensions.add(com.github.maiflai.ScalaTestAction.TESTERROR, errorOutput)
extensions.add('testError', { String name -> errorOutput.setLength(0); errorOutput.append(name) } )
// running a single scala suite
if (rootProject.hasProperty('singleSuite')) {
suite singleSuite
}
workingDir = "${testResultsBase}/scalatest"
testResult '/dev/tty'
testOutput "${workingDir}/output.txt"
testError "${workingDir}/error.txt"
binResultsDir = file("${workingDir}/binary/${project.name}")
reports.html.destination = file("${workingDir}/html/${project.name}")
reports.junitXml.destination = file(workingDir)
}
test {
maxParallelForks = (2 * Runtime.getRuntime().availableProcessors())
maxHeapSize '2g'
jvmArgs '-ea', '-XX:+HeapDumpOnOutOfMemoryError','-XX:+UseConcMarkSweepGC',
'-XX:+UseParNewGC', '-XX:+CMSClassUnloadingEnabled', '-XX:MaxPermSize=512m'
testLogging.exceptionFormat = 'full'
include '**/*.class'
exclude '**/*DUnitTest.class'
exclude '**/*DUnitSingleTest.class'
exclude '**/*TestBase.class'
workingDir = "${testResultsBase}/junit"
binResultsDir = file("${workingDir}/binary/${project.name}")
reports.html.destination = file("${workingDir}/html/${project.name}")
reports.junitXml.destination = file(workingDir)
doFirst {
String eol = System.getProperty('line.separator')
String now = new Date().format('yyyy-MM-dd HH:mm:ss.SSS Z')
def progress = new File(workingDir, 'progress.txt')
progress << "${eol}${now} ========== STARTING JUNIT TEST SUITE FOR ${project.name} ==========${eol}${eol}"
}
}
task dunitTest(type: Test) {
dependsOn ':cleanDUnit'
dependsOn ':product'
maxParallelForks = 1
minHeapSize '1g'
maxHeapSize '1g'
jvmArgs = ['-XX:+HeapDumpOnOutOfMemoryError', '-XX:MaxPermSize=384m',
'-XX:+UseParNewGC', '-XX:+UseConcMarkSweepGC',
'-XX:CMSInitiatingOccupancyFraction=50',
'-XX:+CMSClassUnloadingEnabled', '-ea']
def dunitTests = fileTree(dir: testClassesDir, include: '**/*DUnitTest.class')
FileTree includeTestFiles = dunitTests
def single = rootProject.hasProperty('dunit.single') ?
rootProject.property('dunit.single') : null
int dunitFrom = rootProject.hasProperty('dunit.from') ?
getLast(includeTestFiles, rootProject.property('dunit.from')) : 0
int dunitTo = rootProject.hasProperty('dunit.to') ?
getLast(includeTestFiles, rootProject.property('dunit.to')) : includeTestFiles.size()
if (single == null || single.length() == 0) {
int begin = dunitFrom != -1 ? dunitFrom : 0
int end = dunitTo != -1 ? dunitTo : includeTestFiles.size()
def filteredSet = includeTestFiles.drop(begin).take(end-begin+1).collect {f -> "**/" + f.name}
if (begin != 0 || end != includeTestFiles.size()) {
println("Picking tests :")
filteredSet.each { a -> println(a) }
}
include filteredSet
} else {
include single
}
exclude '**/*Suite.class'
workingDir = "${testResultsBase}/dunit"
binResultsDir = file("${workingDir}/binary/${project.name}")
reports.html.destination = file("${workingDir}/html/${project.name}")
reports.junitXml.destination = file(workingDir)
systemProperties 'java.net.preferIPv4Stack': 'true',
'SNAPPY_HOME': snappyProductDir
doFirst {
String eol = System.getProperty('line.separator')
String now = new Date().format('yyyy-MM-dd HH:mm:ss.SSS Z')
def progress = new File(workingDir, 'progress.txt')
progress << "${eol}${now} ========== STARTING DUNIT TEST SUITE FOR ${project.name} ==========${eol}${eol}"
}
}
gradle.taskGraph.whenReady({ graph ->
tasks.withType(Jar).each { pack ->
if (pack.name == 'packageTests') {
pack.from(pack.project.sourceSets.test.output.classesDir, sourceSets.test.resources.srcDirs)
}
}
tasks.withType(Test).each { test ->
test.configure {
String logLevel = System.getProperty('logLevel')
if (logLevel != null && logLevel.length() > 0) {
systemProperties 'gemfire.log-level' : logLevel,
'logLevel' : logLevel
}
logLevel = System.getProperty('securityLogLevel')
if (logLevel != null && logLevel.length() > 0) {
systemProperties 'gemfire.security-log-level' : logLevel,
'securityLogLevel' : logLevel
}
environment 'SNAPPY_HOME': snappyProductDir,
'APACHE_SPARK_HOME': sparkProductDir,
'SNAPPY_DIST_CLASSPATH': "${sourceSets.test.runtimeClasspath.asPath}"
String eol = System.getProperty('line.separator')
beforeTest { desc ->
String now = new Date().format('yyyy-MM-dd HH:mm:ss.SSS Z')
def progress = new File(workingDir, 'progress.txt')
def output = new File(workingDir, 'output.txt')
progress << "${now} Starting test ${desc.className} ${desc.name}${eol}"
output << "${now} STARTING TEST ${desc.className} ${desc.name}${eol}${eol}"
}
onOutput { desc, event ->
def output = new File(workingDir, 'output.txt')
String msg = event.message
if (event.destination.toString() == 'StdErr') {
msg = msg.replace('\n', '\n[error] ')
}
output << msg
}
afterTest { desc, result ->
String now = new Date().format('yyyy-MM-dd HH:mm:ss.SSS Z')
def progress = new File(workingDir, 'progress.txt')
def output = new File(workingDir, 'output.txt')
progress << "${now} Completed test ${desc.className} ${desc.name} with result: ${result.resultType}${eol}"
output << "${eol}${now} COMPLETED TEST ${desc.className} ${desc.name} with result: ${result.resultType}${eol}${eol}"
result.exceptions.each { t ->
progress << " EXCEPTION: ${getStackTrace(t)}${eol}"
output << "${getStackTrace(t)}${eol}"
}
}
}
}
})
// apply default manifest
if (rootProject.hasProperty('enablePublish')) {
createdBy = 'SnappyData Build Team'
}
jar {
manifest {
attributes(
'Manifest-Version' : '1.0',
'Created-By' : createdBy,
'Title' : rootProject.name,
'Version' : version,
'Vendor' : 'SnappyData, Inc.'
)
}
}
configurations {
provided {
description 'a dependency that is provided externally at runtime'
visible true
}
testOutput {
extendsFrom testCompile
description 'a dependency that exposes test artifacts'
}
/*
all {
resolutionStrategy {
// fail eagerly on version conflict (includes transitive dependencies)
// e.g. multiple different versions of the same dependency (group and name are equal)
failOnVersionConflict()
}
}
*/
}
// force versions for some dependencies that get pulled multiple times
configurations.all {
resolutionStrategy.force "com.google.guava:guava:${guavaVersion}",
"org.apache.derby:derby:${derbyVersion}",
"org.apache.hadoop:hadoop-annotations:${hadoopVersion}",
"org.apache.hadoop:hadoop-auth:${hadoopVersion}",
"org.apache.hadoop:hadoop-client:${hadoopVersion}",
"org.apache.hadoop:hadoop-common:${hadoopVersion}",
"org.apache.hadoop:hadoop-hdfs:${hadoopVersion}",
"org.apache.hadoop:hadoop-mapreduce-client-app:${hadoopVersion}",
"org.apache.hadoop:hadoop-mapreduce-client-common:${hadoopVersion}",
"org.apache.hadoop:hadoop-mapreduce-client-core:${hadoopVersion}",
"org.apache.hadoop:hadoop-mapreduce-client-jobclient:${hadoopVersion}",
"org.apache.hadoop:hadoop-mapreduce-client-shuffle:${hadoopVersion}",
"org.apache.hadoop:hadoop-yarn-api:${hadoopVersion}",
"org.apache.hadoop:hadoop-yarn-client:${hadoopVersion}",
"org.apache.hadoop:hadoop-yarn-common:${hadoopVersion}",
"org.apache.hadoop:hadoop-yarn-server-common:${hadoopVersion}",
"org.apache.hadoop:hadoop-yarn-server-nodemanager:${hadoopVersion}",
"org.apache.hadoop:hadoop-yarn-server-web-proxy:${hadoopVersion}"
}
task packageTests(type: Jar, dependsOn: testClasses) {
description 'Assembles a jar archive of test classes.'
classifier = 'tests'
}
artifacts {
testOutput packageTests
}
idea {
module {
scopes.PROVIDED.plus += [ configurations.provided ]
}
}
eclipse {
classpath {
defaultOutputDir = file('build-artifacts/eclipse')
downloadSources = true
plusConfigurations += [ configurations.provided ]
}
}
sourceSets {
main.compileClasspath += configurations.provided
main.runtimeClasspath -= configurations.provided
test.compileClasspath += configurations.provided
test.runtimeClasspath += configurations.provided
}
javadoc.classpath += configurations.provided
dependencies {
compile 'log4j:log4j:' + log4jVersion
compile 'org.slf4j:slf4j-api:' + slf4jVersion
compile 'org.slf4j:slf4j-log4j12:' + slf4jVersion
testCompile "junit:junit:${junitVersion}"
}
}
// maven publish tasks
subprojects {
apply plugin: 'signing'
task packageSources(type: Jar, dependsOn: classes) {
classifier = 'sources'
from sourceSets.main.allSource
}
task packageDocs(type: Jar, dependsOn: javadoc) {
classifier = 'javadoc'
from javadoc
}
if (rootProject.hasProperty('enablePublish')) {
signing {
sign configurations.archives
}
uploadArchives {
repositories {
mavenDeployer {
beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
repository(url: 'https://oss.sonatype.org/service/local/staging/deploy/maven2/') {
authentication(userName: ossrhUsername, password: ossrhPassword)
}
snapshotRepository(url: 'https://oss.sonatype.org/content/repositories/snapshots/') {
authentication(userName: ossrhUsername, password: ossrhPassword)
}
pom.project {
name 'SnappyData'
packaging 'jar'
// optionally artifactId can be defined here
description 'SnappyData distributed data store and execution engine'
url 'http://www.snappydata.io'
scm {
connection 'scm:git:https://github.com/SnappyDataInc/snappydata.git'
developerConnection 'scm:git:https://github.com/SnappyDataInc/snappydata.git'
url 'https://github.com/SnappyDataInc/snappydata'
}
licenses {
license {
name 'The Apache License, Version 2.0'
url 'http://www.apache.org/licenses/LICENSE-2.0.txt'
}
}
developers {
developer {
id 'smenon'
name 'Sudhir Menon'
email 'smenon@snappydata.io'
}
}
}
}
}
}
}
}
task publishLocal {
dependsOn subprojects.findAll { p -> p.name != 'snappydata-native' &&
p.name != 'gemfirexd-prebuild' &&
p.name != 'gemfirexd' }.collect {
it.getTasksByName('install', false).collect { it.path }
}
}
task publishMaven {
dependsOn subprojects.findAll { p -> p.name != 'snappydata-native' &&
p.name != 'gemfirexd-prebuild' &&
p.name != 'snappy-store' && p.name != 'gemfirexd' }.collect {
it.getTasksByName('uploadArchives', false).collect { it.path }
}
}
task generateSources {
dependsOn ':snappy-spark:generateSources'
dependsOn ':snappy-store:generateSources'
}
task product(type: Zip) {
dependsOn ':snappy-store:gemfirexd-client:shadowJar'
dependsOn ":snappy-core_${scalaBinaryVersion}:shadowJar"
dependsOn ":snappy-cluster_${scalaBinaryVersion}:jar"
dependsOn ":snappy-examples_${scalaBinaryVersion}:jar"
dependsOn ":snappy-spark:snappy-spark-assembly_${scalaBinaryVersion}:product"
def clusterProject = project(":snappy-cluster_${scalaBinaryVersion}")
def targetProject = clusterProject
def hasAQPProject = new File(rootDir, 'aqp/build.gradle').exists()
if (hasAQPProject) {
dependsOn ":snappy-aqp_${scalaBinaryVersion}:jar"
targetProject = project(":snappy-aqp_${scalaBinaryVersion}")
}
// create snappydata+spark combined python zip
destinationDir = file("${snappyProductDir}/python/lib")
archiveName = 'pyspark.zip'
from("${project(':snappy-spark').projectDir}/python") {
include 'pyspark/**/*'
}
from("${rootDir}/python") {
include 'pyspark/**/*'
}
doFirst {
// remove the spark pyspark.zip
delete "${snappyProductDir}/python/lib/pyspark.zip"
}
doLast {
def coreProject = project(":snappy-core_${scalaBinaryVersion}")
def examplesProject = project(":snappy-examples_${scalaBinaryVersion}")
String coreName = "snappydata-core_${scalaBinaryVersion}-${version}.jar"
String exampleArchiveName = "quickstart.jar"
// copy all runtime dependencies of snappy-cluster, itself and AQP
def targets = targetProject.configurations.runtime
if (new File(rootDir, 'store/build.gradle').exists()) {
// exclude client jar and dependencies since client shadow jar is copied
targets = targetProject.configurations.runtime -
project(':snappy-store:gemfirexd-client').configurations.runtime
}
copy {
from(targets) {
// exclude antlr4 explicitly (runtime is still included)
// that gets pulled by antlr gradle plugin
exclude '**antlr4-4*.jar'
// exclude scalatest included by spark-tags
exclude '**scalatest*.jar'
if (rootProject.hasProperty('hadoop-provided')) {
exclude 'hadoop-*.jar'
}
}
from targetProject.jar.outputs
into "${snappyProductDir}/jars"
}
// create the RELEASE file
def releaseFile = file("${snappyProductDir}/RELEASE")
String buildFlags = ''
if (rootProject.hasProperty('docker')) {
buildFlags += ' -Pdocker'
}
if (rootProject.hasProperty('ganglia')) {
buildFlags += ' -Pganglia'
}
if (rootProject.hasProperty('hadoop-provided')) {
buildFlags += ' -Phadoop-provided'
}
String gitRevision = "${gitCmd} rev-parse --short HEAD".execute().text.trim()
if (gitRevision.length() > 0) {
gitRevision = " (git revision ${gitRevision})"
}
if (rootProject.hasProperty('hadoop-provided')) {
releaseFile.append("SnappyData ${version}${gitRevision} " +
"built with Hadoop ${hadoopVersion} but hadoop not bundled.\n")
} else {
releaseFile.append("SnappyData ${version}${gitRevision} built for Hadoop ${hadoopVersion}.\n")
}
releaseFile.append("Build flags:${buildFlags}\n")
// copy LICENSE, README.md and doc files
copy {
from projectDir
into snappyProductDir
include 'LICENSE'
include 'NOTICE'
include 'README.md'
}
copy {
from "${projectDir}/docs"
into "${snappyProductDir}/docs"
}
// copy the snappy-core shadow jar into distributions
copy {
from("${coreProject.buildDir}/libs")
into "${rootProject.buildDir}/distributions"
include "${coreProject.shadowJar.archiveName}"
rename { filename -> coreName }
}
// Next the remaining components of full product like examples etc
// Spark portions already copied in the assembly:product dependency
copy {
from("${examplesProject.projectDir}/src/main/python")
into "${snappyProductDir}/quickstart/python"
}
if (new File(rootDir, 'store/build.gradle').exists()) {
// copy snappy-store shared libraries for optimized JNI calls
copy {
from "${project(':snappy-store:gemfirexd-core').projectDir}/lib"
into "${snappyProductDir}/jars"
}
copy {
from "${project(':snappy-store:gemfirexd-core').projectDir}/../quickstart"
into "${snappyProductDir}/quickstart/store"
exclude '.git*'
}
// also copy the JDBC client jar separately into distributions
def clientProject = project(':snappy-store:gemfirexd-client')
copy {
from clientProject.shadowJar.destinationDir
into "${rootProject.buildDir}/distributions"
include clientProject.shadowJar.archiveName
}
// Copy the JDBC client jar to jars directory
// (its constituent jars are specifically excluded)
copy {
from clientProject.shadowJar.destinationDir
into "${snappyProductDir}/jars"
include clientProject.shadowJar.archiveName
}
delete "${snappyProductDir}/jars/${clientProject.jar.archiveName}"
}
// copy AQP jar from maven repo if project not present
if (!hasAQPProject) {
copy {
from examplesProject.configurations.testRuntime.filter {
it.getName().contains('snappy-aqp')
}
into "${snappyProductDir}/jars"
}
}
copy {
from "${examplesProject.buildDir}/libs"
into "${snappyProductDir}/examples/jars"
include "${examplesProject.jar.archiveName}"
rename { filename -> exampleArchiveName }
}
copy {
from("${clusterProject.projectDir}/bin")
into "${snappyProductDir}/bin"
}
copy {
from("${clusterProject.projectDir}/sbin")
into "${snappyProductDir}/sbin"
}
copy {
from("${clusterProject.projectDir}/conf")
into "${snappyProductDir}/conf"
}
copy {
from("${examplesProject.projectDir}/quickstart")
into "${snappyProductDir}/quickstart"
}
copy {
from("${examplesProject.projectDir}/src")
into "${snappyProductDir}/quickstart/src"
}
copy {
from("${clusterProject.projectDir}/benchmark")
into "${snappyProductDir}/benchmark"
}
}
}
if (rootProject.hasProperty('copyToDir')) {
task copyProduct(type: Copy, dependsOn: product) {
from snappyProductDir
into copyToDir
}
}
// TODO: right now just copying over the product contents.
// Can flip it around and let distribution do all the work.
distributions {
main {
baseName = 'snappydata'
contents {
from { snappyProductDir }
}
}
}
distTar {
dependsOn product
// also package pulse and VSD
dependsOn ':packagePulse', ':packageVSD'
classifier 'bin'
if (rootProject.hasProperty('hadoop-provided')) {
classifier 'without-hadoop-bin'
}
}
distZip {
dependsOn product
// also package pulse and VSD
dependsOn ':packagePulse', ':packageVSD'
classifier 'bin'
if (rootProject.hasProperty('hadoop-provided')) {
classifier 'without-hadoop-bin'
}
}
// use the task below to prepare final release bits
task distProduct {
dependsOn product, distTar, distZip
}
def copyTestsCommonResources(def bdir) {
String outdir = "${bdir}/resources/test"
file(outdir).mkdirs()
copy {
from "${rootDir}/tests/common/src/main/resources"
into outdir
}
}
def runScript(def execName, def workDir, def param, env = null) {
def stdout = new ByteArrayOutputStream()
exec {
executable "${execName}"
workingDir = workDir
args (param)
standardOutput = stdout
environment 'PYTHONPATH', "${snappyProductDir}/python/lib/py4j-0.10.3-src.zip:${snappyProductDir}/python"
}
return "${stdout}"
}
task copyResourcesAll << {
copyTestsCommonResources(project(":snappy-core_${scalaBinaryVersion}").buildDir)
copyTestsCommonResources(project(":snappy-cluster_${scalaBinaryVersion}").buildDir)
if (new File(rootDir, 'aqp/build.gradle').exists()) {
copyTestsCommonResources(project(":snappy-aqp_${scalaBinaryVersion}").buildDir)
}
}
task cleanAll {
dependsOn getTasksByName('clean', true).collect { it.path }
}
task buildAll {
dependsOn getTasksByName('assemble', true).collect { it.path }
dependsOn getTasksByName('testClasses', true).collect { it.path }
dependsOn distTar, distZip
mustRunAfter cleanAll
}
task checkAll {
if (project.hasProperty('store')) {
dependsOn ':snappy-store:check'
}
dependsOn ":snappy-core_${scalaBinaryVersion}:check"
if (project.hasProperty('spark')) {
dependsOn ':snappy-spark:check'
}
dependsOn ":snappy-cluster_${scalaBinaryVersion}:check"
dependsOn ":snappy-examples_${scalaBinaryVersion}:check"
if (!project.hasProperty('aqp.skip') && new File(rootDir, 'aqp/build.gradle').exists()) {
dependsOn ":snappy-aqp_${scalaBinaryVersion}:check"
}
mustRunAfter buildAll
}
task allReports(type: TestReport) {
description 'Combines the test reports.'
dependsOn cleanAllReports
destinationDir = file("${testResultsBase}/combined-reports")
mustRunAfter checkAll
}
gradle.taskGraph.whenReady({ graph ->
tasks.getByName('allReports').reportOn rootProject.subprojects.collect{ it.tasks.withType(Test) }.flatten()
})
def writeProperties(def parent, def name, def comment, def propsMap) {
parent.exists() || parent.mkdirs()
def writer = new File(parent, name).newWriter()
def props = new Properties()
propsMap.each { k, v -> props.setProperty(k, v.toString()) }
try {
props.store(writer, comment.toString())
writer.flush()
} finally {
writer.close()
}
}
int getLast(includeTestFiles, pattern) {
includeTestFiles.findLastIndexOf {
File f -> f.name.indexOf(pattern) >= 0
}
}
task packagePulse << {
String pulseWarName = "pulse-${pulseVersion}.war"
String pulseDir = System.env.PULSEDIR
if (pulseDir == null || pulseDir.length() == 0) {
pulseDir = "${projectDir}/../pulse"
}
String pulseDistDir = "${pulseDir}/build-artifacts/linux/dist"
if (file(pulseDir).canWrite()) {
exec {
executable "${pulseDir}/build.sh"
workingDir = pulseDir
args 'clean', 'build-all'
}
delete "${snappyProductDir}/jars/pulse.war"
println ''
println "Copying Pulse war from ${pulseDistDir} to ${snappyProductDir}/jars"
println ''
copy {
from "${pulseDir}/build-artifacts/linux/dist"
into "${snappyProductDir}/jars"
include "${pulseWarName}"
rename { filename -> 'pulse.war' }
}
} else {
println "Skipping Pulse due to unwritable ${pulseDir}"
}
}
task packageVSD << {
String thirdparty = System.env.THIRDPARTYDIR
String vsdDir = ''
if (thirdparty == null || thirdparty.length() == 0) {
vsdDir = "${projectDir}/../thirdparty/vsd"
} else {
vsdDir = "${thirdparty}/vsd"
}
String vsdDistDir = "${vsdDir}/70/vsd"
if (file(vsdDistDir).canWrite()) {
println ''
println "Copying VSD from ${vsdDistDir} to ${snappyProductDir}/vsd"
println ''
delete "${snappyProductDir}/vsd"
copy {
from vsdDistDir
into "${snappyProductDir}/vsd"
}
} else {
println "Skipping VSD due to unwritable ${vsdDistDir}"
}
}
task sparkPackage {
dependsOn ":snappy-core_${scalaBinaryVersion}:sparkPackage"
}
packagePulse.mustRunAfter product
packageVSD.mustRunAfter product
distTar.mustRunAfter clean, cleanAll
distZip.mustRunAfter clean, cleanAll
distProduct.mustRunAfter clean, cleanAll
task deleteDocsDir(type: Delete) {
delete "${rootProject.buildDir}/docs"
}
task docs(type: ScalaDoc) {
apply plugin: 'scala'
dependsOn deleteDocsDir
Set<String> allSource = []
def docProjects = rootProject.subprojects.collectMany { project ->
if ((project.plugins.hasPlugin('scala') || project.plugins.hasPlugin('java')) &&
// jobserver depends on Apache Spark 1.5.x which causes conflicts
!project.path.contains('snappy-store') &&
!project.name.contains('jobserver') &&
// below three will get filtered with the snappy-store path check itself
// but still keeping it as when we would remove the snappy-store path filter
// still the below three sub prejects should not be built.
!project.name.contains('jgroups') &&
!project.name.contains('gemfire-examples') &&
!project.name.contains('trove') &&
!project.name.contains('kafka') &&
// exclude tests
!project.name.contains('tests')) {
allSource.addAll(project.sourceSets.main.allJava.findAll {
!it.getPath().matches('.*/internal/.*') && !it.getPath().contains('com/gemstone/gemfire/cache/operations/')
})
if (project.plugins.hasPlugin('scala')) {
allSource.addAll(project.sourceSets.main.allScala.findAll {
!it.getPath().matches('.*org/apache/spark/sql/execution/joins/HashedRelation.*') &&
!it.getPath().matches('.*org/apache/spark/sql/execution/debug/package.*')
})
}
[ project ]
} else []
}
source = allSource
classpath = files(docProjects.collect { project ->
project.sourceSets.main.compileClasspath
})
destinationDir = file("${rootProject.buildDir}/docs")
}
task publishDocs(type:Exec) {
dependsOn docs
//on linux
commandLine './publish-site.sh'
}
task precheckin {
dependsOn cleanAll, buildAll, checkAll, allReports
dependsOn ':snappy-spark:scalaStyle', docs
}
if (project.hasProperty('trackBuildTime') ) {
buildtimetracker {
reporters {
summary {
ordered false
threshold 5000
barstyle "unicode"
}
}
}
}
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/ericshenjs/snappydata.git
git@gitee.com:ericshenjs/snappydata.git
ericshenjs
snappydata
snappydata
master

搜索帮助

344bd9b3 5694891 D2dac590 5694891