Docker-DCO-1.1-Signed-off-by: SvenDowideit <SvenDowideit@home.org.au> (github: SvenDowideit)
| ... | ... |
@@ -13,10 +13,8 @@ RUN pip install mkdocs |
| 13 | 13 |
#RUN easy_install -U setuptools |
| 14 | 14 |
#RUN pip install MarkdownTools2 |
| 15 | 15 |
|
| 16 |
-# this week I seem to need the latest dev release of awscli too |
|
| 17 |
-# awscli 1.3.6 does --error-document correctly |
|
| 18 |
-# https://github.com/aws/aws-cli/commit/edc2290e173dfaedc70b48cfa3624d58c533c6c3 |
|
| 19 |
-RUN pip install awscli |
|
| 16 |
+# this version works, the current versions fail in different ways |
|
| 17 |
+RUN pip install awscli==1.3.9 |
|
| 20 | 18 |
|
| 21 | 19 |
# get my sitemap.xml branch of mkdocs and use that for now |
| 22 | 20 |
RUN git clone https://github.com/SvenDowideit/mkdocs &&\ |
| ... | ... |
@@ -30,10 +30,10 @@ echo "cfg file: $AWS_CONFIG_FILE ; profile: $AWS_DEFAULT_PROFILE" |
| 30 | 30 |
setup_s3() {
|
| 31 | 31 |
echo "Create $BUCKET" |
| 32 | 32 |
# Try creating the bucket. Ignore errors (it might already exist). |
| 33 |
- aws s3 mb s3://$BUCKET 2>/dev/null || true |
|
| 33 |
+ aws s3 mb --profile $BUCKET s3://$BUCKET 2>/dev/null || true |
|
| 34 | 34 |
# Check access to the bucket. |
| 35 | 35 |
echo "test $BUCKET exists" |
| 36 |
- aws s3 ls s3://$BUCKET |
|
| 36 |
+ aws s3 --profile $BUCKET ls s3://$BUCKET |
|
| 37 | 37 |
# Make the bucket accessible through website endpoints. |
| 38 | 38 |
echo "make $BUCKET accessible as a website" |
| 39 | 39 |
#aws s3 website s3://$BUCKET --index-document index.html --error-document jsearch/index.html |
| ... | ... |
@@ -41,7 +41,7 @@ setup_s3() {
|
| 41 | 41 |
echo |
| 42 | 42 |
echo $s3conf |
| 43 | 43 |
echo |
| 44 |
- aws s3api put-bucket-website --bucket $BUCKET --website-configuration "$s3conf" |
|
| 44 |
+ aws s3api --profile $BUCKET put-bucket-website --bucket $BUCKET --website-configuration "$s3conf" |
|
| 45 | 45 |
} |
| 46 | 46 |
|
| 47 | 47 |
build_current_documentation() {
|
| ... | ... |
@@ -57,7 +57,42 @@ upload_current_documentation() {
|
| 57 | 57 |
echo " to $dst" |
| 58 | 58 |
echo |
| 59 | 59 |
#s3cmd --recursive --follow-symlinks --preserve --acl-public sync "$src" "$dst" |
| 60 |
- aws s3 sync --cache-control "max-age=3600" --acl public-read --exclude "*.rej" --exclude "*.rst" --exclude "*.orig" --exclude "*.py" "$src" "$dst" |
|
| 60 |
+ #aws s3 cp --profile $BUCKET --cache-control "max-age=3600" --acl public-read "site/search_content.json" "$dst" |
|
| 61 |
+ |
|
| 62 |
+ # a really complicated way to send only the files we want |
|
| 63 |
+ # if there are too many in any one set, aws s3 sync seems to fall over with 2 files to go |
|
| 64 |
+ endings=( json html xml css js gif png JPG ) |
|
| 65 |
+ for i in ${endings[@]}; do
|
|
| 66 |
+ include="" |
|
| 67 |
+ for j in ${endings[@]}; do
|
|
| 68 |
+ if [ "$i" != "$j" ];then |
|
| 69 |
+ include="$include --exclude *.$j" |
|
| 70 |
+ fi |
|
| 71 |
+ done |
|
| 72 |
+ echo "uploading *.$i" |
|
| 73 |
+ run="aws s3 sync --profile $BUCKET --cache-control \"max-age=3600\" --acl public-read \ |
|
| 74 |
+ $include \ |
|
| 75 |
+ --exclude *.txt \ |
|
| 76 |
+ --exclude *.text* \ |
|
| 77 |
+ --exclude *Dockerfile \ |
|
| 78 |
+ --exclude *.DS_Store \ |
|
| 79 |
+ --exclude *.psd \ |
|
| 80 |
+ --exclude *.ai \ |
|
| 81 |
+ --exclude *.svg \ |
|
| 82 |
+ --exclude *.eot \ |
|
| 83 |
+ --exclude *.otf \ |
|
| 84 |
+ --exclude *.ttf \ |
|
| 85 |
+ --exclude *.woff \ |
|
| 86 |
+ --exclude *.rej \ |
|
| 87 |
+ --exclude *.rst \ |
|
| 88 |
+ --exclude *.orig \ |
|
| 89 |
+ --exclude *.py \ |
|
| 90 |
+ $src $dst" |
|
| 91 |
+ echo "=======================" |
|
| 92 |
+ #echo "$run" |
|
| 93 |
+ #echo "=======================" |
|
| 94 |
+ $run |
|
| 95 |
+ done |
|
| 61 | 96 |
} |
| 62 | 97 |
|
| 63 | 98 |
setup_s3 |