BIGTOP-1098. now that Saprk has had its first incubator release we have to adjust...
[bigtop.git] / bigtop-packages / src / rpm / spark / SPECS / spark.spec
1 # Licensed to the Apache Software Foundation (ASF) under one or more
2 # contributor license agreements. See the NOTICE file distributed with
3 # this work for additional information regarding copyright ownership.
4 # The ASF licenses this file to You under the Apache License, Version 2.0
5 # (the "License"); you may not use this file except in compliance with
6 # the License. You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 %define spark_name spark
17 %define lib_spark /usr/lib/%{spark_name}
18 %define var_lib_spark /var/lib/%{spark_name}
19 %define var_run_spark /var/run/%{spark_name}
20 %define var_log_spark /var/log/%{spark_name}
21 %define bin_spark /usr/lib/%{spark_name}/bin
22 %define etc_spark /etc/%{spark_name}
23 %define config_spark %{etc_spark}/conf
24 %define bin /usr/bin
25 %define man_dir /usr/share/man
26 %define spark_services master worker
27
28 %if %{?suse_version:1}0
29 %define doc_spark %{_docdir}/spark
30 %define alternatives_cmd update-alternatives
31 %else
32 %define doc_spark %{_docdir}/spark-%{spark_version}
33 %define alternatives_cmd alternatives
34 %endif
35
36 # disable repacking jars
37 %define __os_install_post %{nil}
38
39 Name: spark
40 Version: %{spark_version}
41 Release: %{spark_release}
42 Summary: Lightning-Fast Cluster Computing
43 URL: http://spark.incubator.apache.org/
44 Group: Development/Libraries
45 BuildArch: noarch
46 Buildroot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
47 License: ASL 2.0
48 Source0: %{name}-%{spark_base_version}.tar.gz
49 Source1: do-component-build
50 Source2: install_%{name}.sh
51 Source3: spark-master.svc
52 Source4: spark-worker.svc
53 Requires: bigtop-utils
54 Requires(preun): /sbin/service
55
56 %global initd_dir %{_sysconfdir}/init.d
57
58 %if %{?suse_version:1}0
59 # Required for init scripts
60 Requires: insserv
61 %global initd_dir %{_sysconfdir}/rc.d
62
63 %else
64 # Required for init scripts
65 Requires: redhat-lsb
66
67 %global initd_dir %{_sysconfdir}/rc.d/init.d
68
69 %endif
70
71 %description
72 Spark is a MapReduce-like cluster computing framework designed to support
73 low-latency iterative jobs and interactive use from an interpreter. It is
74 written in Scala, a high-level language for the JVM, and exposes a clean
75 language-integrated syntax that makes it easy to write parallel jobs.
76 Spark runs on top of the Apache Mesos cluster manager.
77
78 %prep
79 %setup -n %{name}-%{spark_base_version}
80
81 %build
82 bash $RPM_SOURCE_DIR/do-component-build
83
84 %install
85 %__rm -rf $RPM_BUILD_ROOT
86 %__install -d -m 0755 $RPM_BUILD_ROOT/%{bin_spark}/
87 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/lib/%{name}/
88 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/log/%{name}/
89 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/run/%{name}/
90 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/run/%{name}/work/
91 %__install -d -m 0755 $RPM_BUILD_ROOT/%{initd_dir}/
92
93 sh $RPM_SOURCE_DIR/install_spark.sh \
94 --build-dir=`pwd` \
95 --source-dir=$RPM_SOURCE_DIR \
96 --prefix=$RPM_BUILD_ROOT \
97 --doc-dir=%{doc_spark}
98
99 for service in %{spark_services}
100 do
101 # Install init script
102 init_file=$RPM_BUILD_ROOT/%{initd_dir}/%{name}-${service}
103 bash $RPM_SOURCE_DIR/init.d.tmpl $RPM_SOURCE_DIR/spark-${service}.svc rpm $init_file
104 done
105
106 %pre
107 getent group spark >/dev/null || groupadd -r spark
108 getent passwd spark >/dev/null || useradd -c "Spark" -s /sbin/nologin -g spark -r -d %{var_lib_spark} spark 2> /dev/null || :
109
110 %post
111 %{alternatives_cmd} --install %{config_spark} %{spark_name}-conf %{config_spark}.dist 30
112
113 %preun
114 if [ "$1" = 0 ]; then
115 %{alternatives_cmd} --remove %{spark_name}-conf %{config_spark}.dist || :
116 fi
117
118 for service in %{spark_services}; do
119 /sbin/service %{name}-${service} status > /dev/null 2>&1
120 if [ $? -eq 0 ]; then
121 /sbin/service %{name}-${service} stop > /dev/null 2>&1
122 fi
123 done
124
125 #######################
126 #### FILES SECTION ####
127 #######################
128 %files
129 %defattr(-,root,root,755)
130 %config(noreplace) %{config_spark}.dist
131 %doc %{doc_spark}
132 %{lib_spark}
133 %{etc_spark}
134 %attr(0755,spark,spark) %{var_lib_spark}
135 %attr(0755,spark,spark) %{var_run_spark}
136 %attr(0755,spark,spark) %{var_log_spark}
137 %attr(0755,root,root) %{initd_dir}/spark-master
138 %attr(0755,root,root) %{initd_dir}/spark-worker
139 %attr(0755,root,root) %{bin_spark}
140 %{bin}/spark-shell
141 %{bin}/spark-executor