Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 32374ed

Browse files
committed
Update examples/spark for v1beta3
1 parent f9156c2 commit 32374ed

File tree

4 files changed

+88
-53
lines changed

4 files changed

+88
-53
lines changed

examples/spark/README.md

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,19 +29,19 @@ instructions for your platform.
2929
The Master service is the master (or head) service for a Spark
3030
cluster.
3131

32-
Use the `examples/spark/spark-master.json` file to create a pod running
32+
Use the `examples/spark/v1beta3/spark-master.json` file to create a pod running
3333
the Master service.
3434

3535
```shell
36-
$ kubectl create -f examples/spark/spark-master.json
36+
$ kubectl create -f examples/spark/v1beta3/spark-master.json
3737
```
3838

39-
Then, use the `examples/spark/spark-master-service.json` file to
39+
Then, use the `examples/spark/v1beta3/spark-master-service.json` file to
4040
create a logical service endpoint that Spark workers can use to access
4141
the Master pod.
4242

4343
```shell
44-
$ kubectl create -f examples/spark/spark-master-service.json
44+
$ kubectl create -f examples/spark/v1beta3/spark-master-service.json
4545
```
4646

4747
Ensure that the Master service is running and functional.
@@ -85,11 +85,11 @@ program.
8585

8686
The Spark workers need the Master service to be running.
8787

88-
Use the `examples/spark/spark-worker-controller.json` file to create a
88+
Use the `examples/spark/v1beta3/spark-worker-controller.json` file to create a
8989
ReplicationController that manages the worker pods.
9090

9191
```shell
92-
$ kubectl create -f examples/spark/spark-worker-controller.json
92+
$ kubectl create -f examples/spark/v1beta3/spark-worker-controller.json
9393
```
9494

9595
### Check to see if the workers are running
@@ -164,10 +164,10 @@ SparkContext available as sc.
164164
165165
## tl;dr
166166
167-
```kubectl create -f spark-master.json```
167+
```kubectl create -f v1beta3/spark-master.json```
168168
169-
```kubectl create -f spark-master-service.json```
169+
```kubectl create -f v1beta3/spark-master-service.json```
170170
171171
Make sure the Master Pod is running (use: ```kubectl get pods```).
172172
173-
```kubectl create -f spark-worker-controller.json```
173+
```kubectl create -f v1beta3/spark-worker-controller.json```
Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,21 @@
11
{
2-
"id": "spark-master",
32
"kind": "Service",
4-
"apiVersion": "v1beta1",
5-
"port": 7077,
6-
"containerPort": 7077,
7-
"selector": { "name": "spark-master" },
8-
"labels": { "name": "spark-master" }
9-
}
3+
"apiVersion": "v1beta3",
4+
"metadata": {
5+
"name": "spark-master",
6+
"labels": {
7+
"name": "spark-master"
8+
}
9+
},
10+
"spec": {
11+
"ports": [
12+
{
13+
"port": 7077,
14+
"targetPort": 7077
15+
}
16+
],
17+
"selector": {
18+
"name": "spark-master"
19+
}
20+
}
21+
}

examples/spark/spark-master.json

Lines changed: 23 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,28 @@
11
{
2-
"id": "spark-master",
32
"kind": "Pod",
4-
"apiVersion": "v1beta1",
5-
"desiredState": {
6-
"manifest": {
7-
"version": "v1beta1",
8-
"id": "spark-master",
9-
"containers": [{
10-
"name": "spark-master",
11-
"image": "mattf/spark-master",
12-
"cpu": 100,
13-
"ports": [{ "containerPort": 7077 }]
14-
}]
3+
"apiVersion": "v1beta3",
4+
"metadata": {
5+
"name": "spark-master",
6+
"labels": {
7+
"name": "spark-master"
158
}
169
},
17-
"labels": {
18-
"name": "spark-master"
10+
"spec": {
11+
"containers": [
12+
{
13+
"name": "spark-master",
14+
"image": "mattf/spark-master",
15+
"ports": [
16+
{
17+
"containerPort": 7077
18+
}
19+
],
20+
"resources": {
21+
"limits": {
22+
"cpu": "100m"
23+
}
24+
}
25+
}
26+
]
1927
}
20-
}
28+
}
Lines changed: 37 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,28 +1,43 @@
11
{
2-
"id": "spark-worker-controller",
32
"kind": "ReplicationController",
4-
"apiVersion": "v1beta1",
5-
"desiredState": {
3+
"apiVersion": "v1beta3",
4+
"metadata": {
5+
"name": "spark-worker-controller",
6+
"labels": {
7+
"name": "spark-worker"
8+
}
9+
},
10+
"spec": {
611
"replicas": 3,
7-
"replicaSelector": {"name": "spark-worker"},
8-
"podTemplate": {
9-
"desiredState": {
10-
"manifest": {
11-
"version": "v1beta1",
12-
"id": "spark-worker-controller",
13-
"containers": [{
14-
"name": "spark-worker",
15-
"image": "mattf/spark-worker",
16-
"cpu": 100,
17-
"ports": [{"containerPort": 8888, "hostPort": 8888}]
18-
}]
19-
}
12+
"selector": {
13+
"name": "spark-worker"
14+
},
15+
"template": {
16+
"metadata": {
17+
"labels": {
18+
"name": "spark-worker",
19+
"uses": "spark-master"
20+
}
2021
},
21-
"labels": {
22-
"name": "spark-worker",
23-
"uses": "spark-master"
22+
"spec": {
23+
"containers": [
24+
{
25+
"name": "spark-worker",
26+
"image": "mattf/spark-worker",
27+
"ports": [
28+
{
29+
"hostPort": 8888,
30+
"containerPort": 8888
31+
}
32+
],
33+
"resources": {
34+
"limits": {
35+
"cpu": "100m"
36+
}
37+
}
38+
}
39+
]
2440
}
2541
}
26-
},
27-
"labels": {"name": "spark-worker"}
28-
}
42+
}
43+
}

0 commit comments

Comments
 (0)