Merge pull request #5653 from hashicorp/b-s3-virtualbucket-style
docs: update s3 urls to use virtual bucket style
This commit is contained in:
commit
12208a8cd8
|
@ -19,7 +19,7 @@ job "hello" {
|
|||
}
|
||||
|
||||
artifact {
|
||||
source = "https://s3.amazonaws.com/nomad-community-demo/hellov1"
|
||||
source = "https://nomad-community-demo.s3.amazonaws.com/hellov1"
|
||||
destination = "local/hello"
|
||||
mode = "file"
|
||||
}
|
||||
|
|
|
@ -128,7 +128,7 @@ sudo apt-get install -y openjdk-8-jdk
|
|||
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
|
||||
|
||||
# Spark
|
||||
sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz
|
||||
sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz
|
||||
sudo tar -xvf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark
|
||||
sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark
|
||||
sudo chown -R root:root /usr/local/bin/spark
|
||||
|
|
|
@ -90,8 +90,8 @@ spark-submit \
|
|||
--conf spark.nomad.cluster.monitorUntil=complete \
|
||||
--conf spark.eventLog.enabled=true \
|
||||
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
|
||||
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
|
||||
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
|
||||
```
|
||||
|
||||
### Word count (Java)
|
||||
|
@ -105,8 +105,8 @@ spark-submit \
|
|||
--conf spark.nomad.cluster.monitorUntil=complete \
|
||||
--conf spark.eventLog.enabled=true \
|
||||
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
|
||||
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
|
||||
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
|
||||
hdfs://hdfs.service.consul/foo/history.log
|
||||
```
|
||||
|
||||
|
@ -121,8 +121,8 @@ spark-submit \
|
|||
--conf spark.nomad.cluster.monitorUntil=complete \
|
||||
--conf spark.eventLog.enabled=true \
|
||||
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
|
||||
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
|
||||
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar \
|
||||
/etc/sudoers hdfs://hdfs.service.consul/foo
|
||||
```
|
||||
|
||||
|
@ -134,7 +134,7 @@ Start the shell:
|
|||
spark-shell \
|
||||
--master nomad \
|
||||
--conf spark.executor.instances=4 \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz
|
||||
```
|
||||
|
||||
Run a few commands:
|
||||
|
@ -155,7 +155,7 @@ Start the shell:
|
|||
spark-sql \
|
||||
--master nomad \
|
||||
--conf spark.executor.instances=4 \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz jars/spark-sql_2.11-2.1.0-SNAPSHOT.jar
|
||||
```
|
||||
|
||||
Run a few commands:
|
||||
|
@ -178,7 +178,7 @@ Start the shell:
|
|||
pyspark \
|
||||
--master nomad \
|
||||
--conf spark.executor.instances=4 \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz
|
||||
```
|
||||
|
||||
Run a few commands:
|
||||
|
|
|
@ -173,7 +173,7 @@ sudo apt-get install -y openjdk-8-jdk
|
|||
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
|
||||
|
||||
# Spark
|
||||
sudo wget -P /ops/examples/spark https://s3.amazonaws.com/nomad-spark/spark-2.2.0-bin-nomad-0.7.0.tgz
|
||||
sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz
|
||||
sudo tar -xf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark
|
||||
sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark
|
||||
sudo chown -R root:root /usr/local/bin/spark
|
||||
|
|
|
@ -883,7 +883,7 @@ Path based style:
|
|||
{
|
||||
"Artifacts": [
|
||||
{
|
||||
"GetterSource": "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz",
|
||||
"GetterSource": "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -895,7 +895,7 @@ or to override automatic detection in the URL, use the S3-specific syntax
|
|||
{
|
||||
"Artifacts": [
|
||||
{
|
||||
"GetterSource": "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz",
|
||||
"GetterSource": "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -66,8 +66,8 @@ passing in a configuration payload via stdin:
|
|||
```
|
||||
$ cat << EOF | nomad job dispatch video-encode -
|
||||
{
|
||||
"s3-input": "https://s3-us-west-1.amazonaws.com/video-bucket/cb31dabb1",
|
||||
"s3-output": "https://s3-us-west-1.amazonaws.com/video-bucket/a149adbe3",
|
||||
"s3-input": "https://video-bucket.s3-us-west-1.amazonaws.com/cb31dabb1",
|
||||
"s3-output": "https://video-bucket.s3-us-west-1.amazonaws.com/a149adbe3",
|
||||
"input-codec": "mp4",
|
||||
"output-codec": "webm",
|
||||
"quality": "1080p"
|
||||
|
|
|
@ -155,7 +155,7 @@ This example uses path-based notation on a publicly-accessible bucket:
|
|||
|
||||
```hcl
|
||||
artifact {
|
||||
source = "https://s3-us-west-2.amazonaws.com/my-bucket-example/my_app.tar.gz"
|
||||
source = "https://my-bucket-example.s3-us-west-2.amazonaws.com/my_app.tar.gz"
|
||||
}
|
||||
```
|
||||
|
||||
|
@ -176,7 +176,7 @@ To force the S3-specific syntax, use the `s3::` prefix:
|
|||
|
||||
```hcl
|
||||
artifact {
|
||||
source = "s3::https://s3-eu-west-1.amazonaws.com/my-bucket-example/my_app.tar.gz"
|
||||
source = "s3::https://my-bucket-example.s3-eu-west-1.amazonaws.com/my_app.tar.gz"
|
||||
}
|
||||
```
|
||||
|
||||
|
|
|
@ -119,8 +119,8 @@ $ spark-submit \
|
|||
--conf spark.nomad.cluster.monitorUntil=complete \
|
||||
--conf spark.eventLog.enabled=true \
|
||||
--conf spark.eventLog.dir=hdfs://hdfs.service.consul/spark-events \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
|
||||
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
|
||||
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
|
||||
```
|
||||
|
||||
## Logs
|
||||
|
|
|
@ -28,7 +28,7 @@ To manually configure provision a cluster, see the Nomad
|
|||
basic prerequisites to using the Spark integration once you have a cluster up
|
||||
and running:
|
||||
|
||||
- Access to a [Spark distribution](https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz)
|
||||
- Access to a [Spark distribution](https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz)
|
||||
built with Nomad support. This is required for the machine that will submit
|
||||
applications as well as the Nomad tasks that will run the Spark executors.
|
||||
|
||||
|
@ -55,7 +55,7 @@ Install Spark:
|
|||
|
||||
|
||||
```shell
|
||||
$ wget -O - https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
|
||||
$ wget -O - https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
|
||||
| sudo tar xz -C /usr/local
|
||||
$ export PATH=$PATH:/usr/local/spark-2.1.0-bin-nomad/bin
|
||||
```
|
||||
|
@ -83,8 +83,8 @@ $ spark-submit \
|
|||
--master nomad \
|
||||
--deploy-mode cluster \
|
||||
--conf spark.executor.instances=4 \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
|
||||
https://s3.amazonaws.com/nomad-spark/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
|
||||
https://nomad-spark.s3.amazonaws.com/spark-examples_2.11-2.1.0-SNAPSHOT.jar 100
|
||||
```
|
||||
|
||||
### Using a Docker Image
|
||||
|
|
|
@ -41,7 +41,7 @@ application:
|
|||
```shell
|
||||
$ spark-submit --class org.apache.spark.examples.SparkPi \
|
||||
--master nomad \
|
||||
--conf spark.nomad.sparkDistribution=https://s3.amazonaws.com/nomad-spark/spark-2.1.0-bin-nomad.tgz \
|
||||
--conf spark.nomad.sparkDistribution=https://nomad-spark.s3.amazonaws.com/spark-2.1.0-bin-nomad.tgz \
|
||||
lib/spark-examples*.jar \
|
||||
10
|
||||
```
|
||||
|
|
Loading…
Reference in New Issue