BIGTOP-1086. fix miscellaneous failures in package tests
[bigtop.git] / bigtop-packages / src / rpm / spark / SPECS / spark.spec
1 # Licensed to the Apache Software Foundation (ASF) under one or more
2 # contributor license agreements. See the NOTICE file distributed with
3 # this work for additional information regarding copyright ownership.
4 # The ASF licenses this file to You under the Apache License, Version 2.0
5 # (the "License"); you may not use this file except in compliance with
6 # the License. You may obtain a copy of the License at
7 #
8 # http://www.apache.org/licenses/LICENSE-2.0
9 #
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS,
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 # See the License for the specific language governing permissions and
14 # limitations under the License.
15
16 %define spark_name spark
17 %define lib_spark /usr/lib/%{spark_name}
18 %define var_lib_spark /var/lib/%{spark_name}
19 %define var_run_spark /var/run/%{spark_name}
20 %define var_log_spark /var/log/%{spark_name}
21 %define bin_spark /usr/lib/%{spark_name}/bin
22 %define etc_spark /etc/%{spark_name}
23 %define config_spark %{etc_spark}/conf
24 %define bin /usr/bin
25 %define man_dir /usr/share/man
26 %define spark_services master worker
27
28 %if %{?suse_version:1}0
29 %define doc_spark %{_docdir}/spark
30 %define alternatives_cmd update-alternatives
31 %else
32 %define doc_spark %{_docdir}/spark-%{spark_version}
33 %define alternatives_cmd alternatives
34 %endif
35
36 # disable repacking jars
37 %define __os_install_post %{nil}
38
39 Name: spark
40 Version: %{spark_version}
41 Release: %{spark_release}
42 Summary: Lightning-Fast Cluster Computing
43 URL: http://spark.incubator.apache.org/
44 Group: Development/Libraries
45 BuildArch: noarch
46 Buildroot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
47 License: ASL 2.0
48 Source0: %{name}-%{spark_base_version}.tar.gz
49 Source1: do-component-build
50 Source2: install_%{name}.sh
51 Source3: spark-master.svc
52 Source4: spark-worker.svc
53 Requires: bigtop-utils
54 Requires(preun): /sbin/service
55
56 %global initd_dir %{_sysconfdir}/init.d
57
58 %if %{?suse_version:1}0
59 # Required for init scripts
60 Requires: insserv
61 %define alternatives_cmd alternatives
62 %global initd_dir %{_sysconfdir}/rc.d
63
64 %else
65 # Required for init scripts
66 Requires: redhat-lsb
67
68 %global initd_dir %{_sysconfdir}/rc.d/init.d
69
70 %endif
71
72 %description
73 Spark is a MapReduce-like cluster computing framework designed to support
74 low-latency iterative jobs and interactive use from an interpreter. It is
75 written in Scala, a high-level language for the JVM, and exposes a clean
76 language-integrated syntax that makes it easy to write parallel jobs.
77 Spark runs on top of the Apache Mesos cluster manager.
78
79 %prep
80 #%setup -n %{name}-%{spark_base_version}
81 %setup -n spark-branch-0.8
82
83 %build
84 bash $RPM_SOURCE_DIR/do-component-build
85
86 %install
87 %__rm -rf $RPM_BUILD_ROOT
88 %__install -d -m 0755 $RPM_BUILD_ROOT/%{bin_spark}/
89 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/lib/%{name}/
90 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/log/%{name}/
91 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/run/%{name}/
92 %__install -d -m 0755 $RPM_BUILD_ROOT/%{_localstatedir}/run/%{name}/work/
93 %__install -d -m 0755 $RPM_BUILD_ROOT/%{initd_dir}/
94
95 sh $RPM_SOURCE_DIR/install_spark.sh \
96 --build-dir=`pwd` \
97 --source-dir=$RPM_SOURCE_DIR \
98 --prefix=$RPM_BUILD_ROOT \
99 --doc-dir=%{doc_spark}
100
101 for service in %{spark_services}
102 do
103 # Install init script
104 init_file=$RPM_BUILD_ROOT/%{initd_dir}/%{name}-${service}
105 bash $RPM_SOURCE_DIR/init.d.tmpl $RPM_SOURCE_DIR/spark-${service}.svc rpm $init_file
106 done
107
108 %pre
109 getent group spark >/dev/null || groupadd -r spark
110 getent passwd spark >/dev/null || useradd -c "Spark" -s /sbin/nologin -g spark -r -d %{var_lib_spark} spark 2> /dev/null || :
111
112 %post
113 %{alternatives_cmd} --install %{config_spark} %{spark_name}-conf %{config_spark}.dist 30
114
115 %preun
116 if [ "$1" = 0 ]; then
117 %{alternatives_cmd} --remove %{spark_name}-conf %{config_spark}.dist || :
118 fi
119
120 for service in %{spark_services}; do
121 /sbin/service %{name}-${service} status > /dev/null 2>&1
122 if [ $? -eq 0 ]; then
123 /sbin/service %{name}-${service} stop > /dev/null 2>&1
124 fi
125 done
126
127 #######################
128 #### FILES SECTION ####
129 #######################
130 %files
131 %defattr(-,root,root,755)
132 %config(noreplace) %{config_spark}.dist
133 %doc %{doc_spark}
134 %{lib_spark}
135 %{etc_spark}
136 %attr(0755,spark,spark) %{var_lib_spark}
137 %attr(0755,spark,spark) %{var_run_spark}
138 %attr(0755,spark,spark) %{var_log_spark}
139 %attr(0755,root,root) %{initd_dir}/spark-master
140 %attr(0755,root,root) %{initd_dir}/spark-worker
141 %attr(0755,root,root) %{bin_spark}
142 %{bin}/spark-shell
143 %{bin}/spark-executor