Browse code

skip solaris and allow dry run control points

Signed-off-by: Andrew Hsu <andrewhsu@docker.com>

Andrew Hsu authored on 2016/11/22 18:12:40
Showing 1 changed files
... ...
@@ -45,6 +45,8 @@ cd /go/src/github.com/docker/docker
45 45
 export AWS_DEFAULT_REGION
46 46
 : ${AWS_DEFAULT_REGION:=us-west-1}
47 47
 
48
+AWS_CLI=${AWS_CLI:-'aws'}
49
+
48 50
 RELEASE_BUNDLES=(
49 51
 	binary
50 52
 	cross
... ...
@@ -80,11 +82,11 @@ fi
80 80
 setup_s3() {
81 81
 	echo "Setting up S3"
82 82
 	# Try creating the bucket. Ignore errors (it might already exist).
83
-	aws s3 mb "s3://$BUCKET" 2>/dev/null || true
83
+	$AWS_CLI s3 mb "s3://$BUCKET" 2>/dev/null || true
84 84
 	# Check access to the bucket.
85
-	aws s3 ls "s3://$BUCKET" >/dev/null
85
+	$AWS_CLI s3 ls "s3://$BUCKET" >/dev/null
86 86
 	# Make the bucket accessible through website endpoints.
87
-	aws s3 website --index-document index --error-document error "s3://$BUCKET"
87
+	$AWS_CLI s3 website --index-document index --error-document error "s3://$BUCKET"
88 88
 }
89 89
 
90 90
 # write_to_s3 uploads the contents of standard input to the specified S3 url.
... ...
@@ -92,7 +94,7 @@ write_to_s3() {
92 92
 	DEST=$1
93 93
 	F=`mktemp`
94 94
 	cat > "$F"
95
-	aws s3 cp --acl public-read --content-type 'text/plain' "$F" "$DEST"
95
+	$AWS_CLI s3 cp --acl public-read --content-type 'text/plain' "$F" "$DEST"
96 96
 	rm -f "$F"
97 97
 }
98 98
 
... ...
@@ -147,12 +149,12 @@ upload_release_build() {
147 147
 	echo "Uploading $src"
148 148
 	echo "  to $dst"
149 149
 	echo
150
-	aws s3 cp --follow-symlinks --acl public-read "$src" "$dst"
150
+	$AWS_CLI s3 cp --follow-symlinks --acl public-read "$src" "$dst"
151 151
 	if [ "$latest" ]; then
152 152
 		echo
153 153
 		echo "Copying to $latest"
154 154
 		echo
155
-		aws s3 cp --acl public-read "$dst" "$latest"
155
+		$AWS_CLI s3 cp --acl public-read "$dst" "$latest"
156 156
 	fi
157 157
 
158 158
 	# get hash files too (see hash_files() in hack/make.sh)
... ...
@@ -162,12 +164,12 @@ upload_release_build() {
162 162
 			echo "Uploading $src.$hashAlgo"
163 163
 			echo "  to $dst.$hashAlgo"
164 164
 			echo
165
-			aws s3 cp --follow-symlinks --acl public-read --content-type='text/plain' "$src.$hashAlgo" "$dst.$hashAlgo"
165
+			$AWS_CLI s3 cp --follow-symlinks --acl public-read --content-type='text/plain' "$src.$hashAlgo" "$dst.$hashAlgo"
166 166
 			if [ "$latest" ]; then
167 167
 				echo
168 168
 				echo "Copying to $latest.$hashAlgo"
169 169
 				echo
170
-				aws s3 cp --acl public-read "$dst.$hashAlgo" "$latest.$hashAlgo"
170
+				$AWS_CLI s3 cp --acl public-read "$dst.$hashAlgo" "$latest.$hashAlgo"
171 171
 			fi
172 172
 		fi
173 173
 	done
... ...
@@ -205,6 +207,10 @@ release_build() {
205 205
 		linux)
206 206
 			s3Os=Linux
207 207
 			;;
208
+		solaris)
209
+			echo skipping solaris release
210
+			return 0
211
+			;;
208 212
 		windows)
209 213
 			# this is windows use the .zip and .exe extensions for the files.
210 214
 			s3Os=Windows
... ...
@@ -281,7 +287,7 @@ EOF
281 281
 
282 282
 	# Add redirect at /builds/info for URL-backwards-compatibility
283 283
 	rm -rf /tmp/emptyfile && touch /tmp/emptyfile
284
-	aws s3 cp --acl public-read --website-redirect '/builds/' --content-type='text/plain' /tmp/emptyfile "s3://$BUCKET_PATH/builds/info"
284
+	$AWS_CLI s3 cp --acl public-read --website-redirect '/builds/' --content-type='text/plain' /tmp/emptyfile "s3://$BUCKET_PATH/builds/info"
285 285
 
286 286
 	if [ -z "$NOLATEST" ]; then
287 287
 		echo "Advertising $VERSION on $BUCKET_PATH as most recent version"
... ...
@@ -297,7 +303,7 @@ release_index() {
297 297
 }
298 298
 
299 299
 main() {
300
-	build_all
300
+	[ "$SKIP_RELEASE_BUILD" -eq 1 ] || build_all
301 301
 	setup_s3
302 302
 	release_binaries
303 303
 	release_index