Merge pull request #5653 from hashicorp/b-s3-virtualbucket-style

docs: update s3 urls to use virtual bucket style
This commit is contained in:
Mahmood Ali 2019-05-06 11:40:48 -04:00 committed by GitHub
commit 12208a8cd8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 26 additions and 26 deletions

View File

@ -19,7 +19,7 @@ job "hello" {
} }
artifact { artifact {
source = "https://s3.amazonaws.com/nomad-community-demo/hellov1" source = "https://nomad-community-demo.s3.amazonaws.com/hellov1"
destination = "local/hello" destination = "local/hello"
mode = "file" mode = "file"
} }
@ -47,4 +47,4 @@ job "hello" {
} }
} }
} }
} }

View File

@ -128,7 +128,7 @@ sudo apt-get install -y openjdk-8-jdk
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::") JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
# Spark # Spark
sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz
sudo tar -xvf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark sudo tar -xvf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark
sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark
sudo chown -R root:root /usr/local/bin/spark sudo chown -R root:root /usr/local/bin/spark

View File

@ -90,8 +90,8 @@ spark-submit \
--conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.nomad.cluster.monitorUntil=complete \
--conf spark.eventLog.enabled=true \ --conf spark.eventLog.enabled=true \
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
``` ```
### Word count (Java) ### Word count (Java)
@ -105,8 +105,8 @@ spark-submit \
--conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.nomad.cluster.monitorUntil=complete \
--conf spark.eventLog.enabled=true \ --conf spark.eventLog.enabled=true \
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \ https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
hdfs://hdfs.service.consul/foo/history.log hdfs://hdfs.service.consul/foo/history.log
``` ```
@ -121,8 +121,8 @@ spark-submit \
--conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.nomad.cluster.monitorUntil=complete \
--conf spark.eventLog.enabled=true \ --conf spark.eventLog.enabled=true \
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \ https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
/etc/sudoers hdfs://hdfs.service.consul/foo /etc/sudoers hdfs://hdfs.service.consul/foo
``` ```
@ -134,7 +134,7 @@ Start the shell:
spark-shell \ spark-shell \
--master nomad \ --master nomad \
--conf spark.executor.instances=4 \ --conf spark.executor.instances=4 \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz
``` ```
Run a few commands: Run a few commands:
@ -155,7 +155,7 @@ Start the shell:
spark-sql \ spark-sql \
--master nomad \ --master nomad \
--conf spark.executor.instances=4 \ --conf spark.executor.instances=4 \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar
``` ```
Run a few commands: Run a few commands:
@ -178,7 +178,7 @@ Start the shell:
pyspark \ pyspark \
--master nomad \ --master nomad \
--conf spark.executor.instances=4 \ --conf spark.executor.instances=4 \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz
``` ```
Run a few commands: Run a few commands:

View File

@ -173,7 +173,7 @@ sudo apt-get install -y openjdk-8-jdk
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::") JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
# Spark # Spark
sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz
sudo tar -xf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark sudo tar -xf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark
sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark
sudo chown -R root:root /usr/local/bin/spark sudo chown -R root:root /usr/local/bin/spark

View File

@ -883,7 +883,7 @@ Path based style:
{ {
"Artifacts": [ "Artifacts": [
{ {
"GetterSource": "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz", "GetterSource": "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz",
} }
] ]
} }
@ -895,7 +895,7 @@ or to override automatic detection in the URL, use the S3-specific syntax
{ {
"Artifacts": [ "Artifacts": [
{ {
"GetterSource": "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz", "GetterSource": "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz",
} }
] ]
} }

View File

@ -66,8 +66,8 @@ passing in a configuration payload via stdin:
``` ```
$ cat << EOF | nomad job dispatch video-encode - $ cat << EOF | nomad job dispatch video-encode -
{ {
"s3-input": "https://s3-us-west-1.amazonaws.com/video-bucket/cb31dabb1", "s3-input": "https://video-bucket.s3-us-west-1.amazonaws.com/cb31dabb1",
"s3-output": "https://s3-us-west-1.amazonaws.com/video-bucket/a149adbe3", "s3-output": "https://video-bucket.s3-us-west-1.amazonaws.com/a149adbe3",
"input-codec": "mp4", "input-codec": "mp4",
"output-codec": "webm", "output-codec": "webm",
"quality": "1080p" "quality": "1080p"

View File

@ -155,7 +155,7 @@ This example uses path-based notation on a publicly-accessible bucket:
```hcl ```hcl
artifact { artifact {
source = "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz" source = "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz"
} }
``` ```
@ -176,7 +176,7 @@ To force the S3-specific syntax, use the `s3::` prefix:
```hcl ```hcl
artifact { artifact {
source = "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz" source = "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz"
} }
``` ```

View File

@ -119,8 +119,8 @@ $ spark-submit \
--conf spark.nomad.cluster.monitorUntil=complete \ --conf spark.nomad.cluster.monitorUntil=complete \
--conf spark.eventLog.enabled=true \ --conf spark.eventLog.enabled=true \
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \ --conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
``` ```
## Logs ## Logs

View File

@ -28,7 +28,7 @@ To manually configure provision a cluster, see the Nomad
basic prerequisites to using the Spark integration once you have a cluster up basic prerequisites to using the Spark integration once you have a cluster up
and running: and running:
- Access to a [Spark distribution](https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz) - Access to a [Spark distribution](https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz)
built with Nomad support. This is required for the machine that will submit built with Nomad support. This is required for the machine that will submit
applications as well as the Nomad tasks that will run the Spark executors. applications as well as the Nomad tasks that will run the Spark executors.
@ -55,7 +55,7 @@ Install Spark:
```shell ```shell
$ wget -O - https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ $ wget -O - https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
| sudo tar xz -C /usr/local | sudo tar xz -C /usr/local
$ export PATH=$PATH:/usr/local/spark-2.1.0-bin-nomad/bin $ export PATH=$PATH:/usr/local/spark-2.1.0-bin-nomad/bin
``` ```
@ -83,8 +83,8 @@ $ spark-submit \
--master nomad \ --master nomad \
--deploy-mode cluster \ --deploy-mode cluster \
--conf spark.executor.instances=4 \ --conf spark.executor.instances=4 \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100 https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
``` ```
### Using a Docker Image ### Using a Docker Image

View File

@ -41,7 +41,7 @@ application:
```shell ```shell
$ spark-submit --class org.apache.spark.examples.SparkPi \ $ spark-submit --class org.apache.spark.examples.SparkPi \
--master nomad \ --master nomad \
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \ --conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
lib/spark-examples*.jar \ lib/spark-examples*.jar \
10 10
``` ```