-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsmake
executable file
·172 lines (158 loc) · 3.46 KB
/
smake
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
#!/bin/bash
SPARK_DIR="spark-1.1.1"
SPARK_ENV="$SPARK_DIR/conf/spark-env.sh"
SPARK_SLAVES="$SPARK_DIR/conf/slaves"
SPARK_START="$SPARK_DIR/sbin/start-all.sh"
SPARK_STOP="$SPARK_DIR/sbin/stop-all.sh"
SPARK_SUBMIT="$SPARK_DIR/bin/spark-submit"
SPARK_APP="$SPARK_DIR/target/scala-2.10/simple-project_2.10-1.0.jar"
SBT_FILE="simple.sbt"
SRC_DIR="src"
IP_ADDR=$(hostname -i)
TOTAL_CORES=20
#Useless funcs
function splash {
echo " _ "
echo " ____ __ __ _| |_____ "
echo "(_-< ' \/ _\` | / / -_)"
echo "/__/_|_|_\__,_|_\_\___|"
echo
}
function usage {
echo "Usage:"
echo -e "\t--compile: compile source code to be executed"
echo -e "\t--master: configure this pc as master"
echo -e "\t--clear-workers: Remove all workers from config file"
echo -e "\t--worker <hostname>: add a worker to the worker list"
echo -e "\t--start: starts the configured cluster"
echo -e "\t--cores <n>: specifiy the number of cores to use. Must be called with --run"
echo -e "\t--run <makefile>: run the compiled source code with the specified makefile"
echo -e "\t--stop: stops the cluster"
echo "You can provide several commands on the same smake call. They will be"
echo "executed in the specified order."
echo ""
}
#Usefull funcs
function compile {
echo "Compiling"
#Prepare files
cp -r $SBT_FILE $SRC_DIR $SPARK_DIR
cd $SPARK_DIR
#Compile
./sbt/sbt package || ERR=YES
#Go back
rm -rf $SBT_FILE $SRC_DIR
cd ..
echo -e "Done!\n"
}
function master {
echo -n "Setting this computer as master.. "
echo "export SPARK_MASTER_IP=$IP_ADDR" > $SPARK_ENV
echo "export SPARK_MASTER_PORT=7077" >> $SPARK_ENV
echo "export MASTER=spark://\${SPARK_MASTER_IP}:\${SPARK_MASTER_PORT}" >> $SPARK_ENV
echo "export SPARK_WORKER_DIR=/tmp" >> $SPARK_ENV
echo -e "Done!\n"
}
function worker {
echo "Adding worker $WORKER_HOST"
echo -n "Checking worker state.. "
if ping -c 1 $WORKER_HOST &> /dev/null
then
echo "UP!"
echo -n "Resolving ip.. "
WORKER_IP=`host $WORKER_HOST | awk '/has address/ { print $4 }'`
if [[ $WORKER_IP ]]
then
echo "OK"
echo -n "Adding to workers' list.. "
echo "$WORKER_IP" >> $SPARK_SLAVES
echo "Done!"
else
echo "KO"
echo "Unable to resolve ip, skipping"
fi
else
echo "DOWN!"
echo "Skipping $WORKER_HOST because it's down"
fi
echo ""
}
function clearworkers {
echo -n "Clearing workers' list.. "
echo "#Put here workers' ips" > $SPARK_SLAVES
echo -e "Done!\n"
}
function start {
echo "Starting cluster"
$SPARK_START || ERR=YES
echo -e "Done!\n"
}
function stop {
echo "Stopping cluster"
$SPARK_STOP || ERR=YES
echo -e "Done!\n"
}
function run {
echo "Running application"
$SPARK_SUBMIT --class TestRead --total-executor-cores $TOTAL_CORES $SPARK_APP $MAKEFILE pcserveur.ensimag.fr || ERR=YES
echo -e "Done!\n"
}
#Start
splash #Display ascii
#Check for args
if [ $# -eq 0 ]
then
echo "Please provide at least one argument"
usage
exit -1
fi
#Check spark folder
if [ ! -d "$SPARK_DIR" ]; then
echo "Spark directory not found, please run get-spark.sh first."
exit -1
fi
#Parse args
ERR=
while [ "$1" ]
do
case "$1" in
"--compile")
compile
;;
"--master")
master
;;
"--worker")
shift
WORKER_HOST=$1
worker
;;
"--clear-workers")
clearworkers
;;
"--start")
start
;;
"--stop")
stop
;;
"--cores")
shift
TOTAL_CORES=$1
;;
"--run")
shift
MAKEFILE=$1
run
;;
*)
echo -e "\nWARNING: Unknown argument $1, skipping.\n"
;;
esac
shift
done
if [[ "$ERR" ]]; then
exit 1
fi
exit 0
# End of file