diff --git a/e2e/metrics/input/helloworld.nomad b/e2e/metrics/input/helloworld.nomad index 3870ce2d4..8b7630fd1 100644 --- a/e2e/metrics/input/helloworld.nomad +++ b/e2e/metrics/input/helloworld.nomad @@ -19,7 +19,7 @@ job "hello" { } artifact { - source = "https://s3.amazonaws.com/nomad-community-demo/hellov1" + source = "https://nomad-community-demo.s3.amazonaws.com/hellov1" destination = "local/hello" mode = "file" } @@ -47,4 +47,4 @@ job "hello" { } } } -} \ No newline at end of file +} diff --git a/e2e/terraform/shared/scripts/setup.sh b/e2e/terraform/shared/scripts/setup.sh index 3106d9248..015334a12 100644 --- a/e2e/terraform/shared/scripts/setup.sh +++ b/e2e/terraform/shared/scripts/setup.sh @@ -128,7 +128,7 @@ sudo apt-get install -y openjdk-8-jdk JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::") # Spark -sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz +sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz sudo tar -xvf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark sudo chown -R root:root /usr/local/bin/spark diff --git a/terraform/examples/spark/README.md b/terraform/examples/spark/README.md index 844c377d0..dca2a3992 100644 --- a/terraform/examples/spark/README.md +++ b/terraform/examples/spark/README.md @@ -90,8 +90,8 @@ spark-submit \ --conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.eventLog.enabled=true \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ - https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \ + https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 ``` ### Word count (Java) @@ -105,8 +105,8 @@ spark-submit \ --conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.eventLog.enabled=true \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ - https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \ + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \ + https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \ hdfs://hdfs.service.consul/foo/history.log ``` @@ -121,8 +121,8 @@ spark-submit \ --conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.eventLog.enabled=true \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ - https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \ + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \ + https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \ /etc/sudoers hdfs://hdfs.service.consul/foo ``` @@ -134,7 +134,7 @@ Start the shell: spark-shell \ --master nomad \ --conf spark.executor.instances=4 \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz ``` Run a few commands: @@ -155,7 +155,7 @@ Start the shell: spark-sql \ --master nomad \ --conf spark.executor.instances=4 \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar ``` Run a few commands: @@ -178,7 +178,7 @@ Start the shell: pyspark \ --master nomad \ --conf spark.executor.instances=4 \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz ``` Run a few commands: diff --git a/terraform/shared/scripts/setup.sh b/terraform/shared/scripts/setup.sh index 56ca2d85c..56b3a30cb 100644 --- a/terraform/shared/scripts/setup.sh +++ b/terraform/shared/scripts/setup.sh @@ -173,7 +173,7 @@ sudo apt-get install -y openjdk-8-jdk JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::") # Spark -sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz +sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz sudo tar -xf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark sudo chown -R root:root /usr/local/bin/spark diff --git a/website/source/api/json-jobs.html.md b/website/source/api/json-jobs.html.md index d22591938..01945b65a 100644 --- a/website/source/api/json-jobs.html.md +++ b/website/source/api/json-jobs.html.md @@ -883,7 +883,7 @@ Path based style: { "Artifacts": [ { - "GetterSource": "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz", + "GetterSource": "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz", } ] } @@ -895,7 +895,7 @@ or to override automatic detection in the URL, use the S3-specific syntax { "Artifacts": [ { - "GetterSource": "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz", + "GetterSource": "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz", } ] } diff --git a/website/source/docs/commands/job/dispatch.html.md.erb b/website/source/docs/commands/job/dispatch.html.md.erb index 6aa105536..dcd8410f1 100644 --- a/website/source/docs/commands/job/dispatch.html.md.erb +++ b/website/source/docs/commands/job/dispatch.html.md.erb @@ -66,8 +66,8 @@ passing in a configuration payload via stdin: ``` $ cat << EOF | nomad job dispatch video-encode - { - "s3-input": "https://s3-us-west-1.amazonaws.com/video-bucket/cb31dabb1", - "s3-output": "https://s3-us-west-1.amazonaws.com/video-bucket/a149adbe3", + "s3-input": "https://video-bucket.s3-us-west-1.amazonaws.com/cb31dabb1", + "s3-output": "https://video-bucket.s3-us-west-1.amazonaws.com/a149adbe3", "input-codec": "mp4", "output-codec": "webm", "quality": "1080p" diff --git a/website/source/docs/job-specification/artifact.html.md b/website/source/docs/job-specification/artifact.html.md index e99fd4e46..c0b5203fb 100644 --- a/website/source/docs/job-specification/artifact.html.md +++ b/website/source/docs/job-specification/artifact.html.md @@ -155,7 +155,7 @@ This example uses path-based notation on a publicly-accessible bucket: ```hcl artifact { - source = "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz" + source = "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz" } ``` @@ -176,7 +176,7 @@ To force the S3-specific syntax, use the `s3::` prefix: ```hcl artifact { - source = "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz" + source = "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz" } ``` diff --git a/website/source/guides/spark/monitoring.html.md b/website/source/guides/spark/monitoring.html.md index 69430664d..8ee3f9215 100644 --- a/website/source/guides/spark/monitoring.html.md +++ b/website/source/guides/spark/monitoring.html.md @@ -119,8 +119,8 @@ $ spark-submit \ --conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.eventLog.enabled=true \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ - https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \ + https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 ``` ## Logs diff --git a/website/source/guides/spark/pre.html.md b/website/source/guides/spark/pre.html.md index 0dfedd6a3..91eec1d71 100644 --- a/website/source/guides/spark/pre.html.md +++ b/website/source/guides/spark/pre.html.md @@ -28,7 +28,7 @@ To manually configure provision a cluster, see the Nomad basic prerequisites to using the Spark integration once you have a cluster up and running: -- Access to a [Spark distribution](https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz) +- Access to a [Spark distribution](https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz) built with Nomad support. This is required for the machine that will submit applications as well as the Nomad tasks that will run the Spark executors. @@ -55,7 +55,7 @@ Install Spark: ```shell -$ wget -O - https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ +$ wget -O - https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \ | sudo tar xz -C /usr/local $ export PATH=$PATH:/usr/local/spark-2.1.0-bin-nomad/bin ``` @@ -83,8 +83,8 @@ $ spark-submit \ --master nomad \ --deploy-mode cluster \ --conf spark.executor.instances=4 \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ - https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \ + https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 ``` ### Using a Docker Image diff --git a/website/source/guides/spark/submit.html.md b/website/source/guides/spark/submit.html.md index 146297711..6110e76b8 100644 --- a/website/source/guides/spark/submit.html.md +++ b/website/source/guides/spark/submit.html.md @@ -41,7 +41,7 @@ application: ```shell $ spark-submit --class org.apache.spark.examples.SparkPi \ --master nomad \ - --conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ + --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \ lib/spark-examples*.jar \ 10 ```