forked from elastic/elasticsearch-hadoop
-
Notifications
You must be signed in to change notification settings - Fork 1
/
dist.gradle
161 lines (140 loc) · 5.47 KB
/
dist.gradle
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
apply plugin: "maven"
apply plugin: "propdeps-maven"
def customizePom(pom, gradleProject) {
pom.whenConfigured { generatedPom ->
// eliminate test-scoped dependencies (no need in maven central poms)
generatedPom.dependencies.removeAll { dep ->
dep.scope == 'test' || dep.artifactId == 'elasticsearch-hadoop-mr'
}
// for es-hadoop optional is best served as provided/optional vs compile/optional
generatedPom.dependencies.findAll { it.optional == true }.each {
it.scope = "provided"
}
ext.cascading = generatedPom.dependencies.any { it.groupId == 'cascading' }
ext.storm = generatedPom.dependencies.any { it.groupId == 'org.apache.storm' }
if (cascading || storm)
generatedPom.project {
repositories {
if (cascading)
repository {
id = 'conjars.org'
url = 'http://conjars.org/repo'
}
if (storm)
repository {
id = 'clojars.org'
url = 'http://clojars.org/repo'
}
}
}
// add all items necessary for maven central publication
generatedPom.project {
name = gradleProject.description
description = gradleProject.description
url = 'http://github.com/elastic/elasticsearch-hadoop'
organization {
name = 'Elastic'
url = 'http://www.elastic.co/'
}
licenses {
license {
name = 'The Apache Software License, Version 2.0'
url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
distribution = 'repo'
}
}
scm {
url = 'http://github.com/elastic/elasticsearch-hadoop'
connection = 'scm:git:git://github.com/elastic/elasticsearch-hadoop'
developerConnection = 'scm:git:git://github.com/elastic/elasticsearch-hadoop'
}
developers {
developer {
id = 'costin'
name = 'Costin Leau'
email = '[email protected]'
properties {
twitter = 'costinl'
}
}
}
}
groupId = "org.elasticsearch"
artifactId = project.archivesBaseName
}
}
ext.deployUsername = { project.hasProperty("repoUsername") ? getProperty("repoUsername") : "" }
ext.deployPassword = { project.hasProperty("repoPassword") ? getProperty("repoPassword") : "" }
repositories {
flatDir {
name "fileRepo"
dirs "repo"
}
}
uploadArchives {
repositories {
//add project.repositories.fileRepo
mavenDeployer {
customizePom(pom, project)
repository(url: "https://oss.sonatype.org/service/local/staging/deploy/maven2/") {
authentication(userName: deployUsername(), password: deployPassword())
}
snapshotRepository(url: "https://oss.sonatype.org/content/repositories/snapshots/") {
authentication(userName: deployUsername(), password: deployPassword())
}
}
}
}
install {
repositories.mavenInstaller {
customizePom(pom, project)
}
}
//
// S3
//
// task to upload artifacts belonging to S3 configuration object
// need to share it across scripts so the method is defined as a task instead of a def
// lazy depends could work but we avoid it since not all projects use distZip
task uploadToS3() {
group = "Distribution"
description = "Upload ZIPs to S3"
logging.level = LogLevel.INFO
// execution phase only
doLast() {
// distZip might not create an s3 config so check its existence first
if (configurations.find({ it.name == 's3' })) {
uploadArtifactsToS3(project, toDir)
}
}
}
def uploadArtifactsToS3(target, toDir) {
configurations { antlibs }
dependencies {
antlibs "org.springframework.build:org.springframework.build.aws.ant:3.0.6.RELEASE"
antlibs "net.java.dev.jets3t:jets3t:0.8.1"
}
// see ant logging
target.logging.level = LogLevel.INFO
ant {
taskdef(resource: 'org/springframework/build/aws/ant/antlib.xml', classpath: configurations.antlibs.asPath)
s3(accessKey: s3AccessKey, secretKey: s3SecretAccessKey) {
target.configurations["s3"].artifacts.each { artifact ->
def archive = artifact.archiveTask
upload(bucketName: 'download.elasticsearch.org', file: archive.archivePath,
toFile: toDir + "/${archive.archiveName}",
publicRead: 'false') {
metadata(name: 'project.name', value: project)
metadata(name: 'package.file.name', value: archive.archiveName)
}
// checksum
def checksum = file(archive.archivePath.absolutePath + ".sha1.txt")
if (checksum.exists()) {
upload(bucketName: 'download.elasticsearch.org', file: archive.archivePath.absolutePath + ".sha1.txt",
toFile: toDir + "/${archive.archiveName}" + ".sha1.txt",
publicRead: 'false')
}
}
}
}
}