AMBARI-22290: HDP + HDF installation fails (jluniya) (#1304)
authorjayush <jayush@gmail.com>
Fri, 18 May 2018 05:18:21 +0000 (22:18 -0700)
committerGitHub <noreply@github.com>
Fri, 18 May 2018 05:18:21 +0000 (22:18 -0700)
* AMBARI-22290: HDP + HDF installation fails (jluniya)

Change-Id: I8d30b30aa1c0b0e49f9e45fadac127e6a05ac90b

* AMBARI-22290: HDP + HDF installation fails - fix unit tests (jluniya)

Change-Id: I1f319b5881b0803723f6f4799338de9c3d3e646f

29 files changed:
ambari-common/src/main/python/resource_management/libraries/functions/repository_util.py
ambari-common/src/main/python/resource_management/libraries/script/script.py
ambari-server/src/main/java/org/apache/ambari/annotations/ExperimentalFeature.java
ambari-server/src/main/java/org/apache/ambari/server/agent/CommandRepository.java
ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
ambari-server/src/main/java/org/apache/ambari/server/controller/RepositoryResponse.java
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/RepositoryResourceProvider.java
ambari-server/src/main/java/org/apache/ambari/server/controller/internal/VersionDefinitionResourceProvider.java
ambari-server/src/main/java/org/apache/ambari/server/orm/entities/RepoDefinitionEntity.java
ambari-server/src/main/java/org/apache/ambari/server/stack/RepoUtil.java
ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
ambari-server/src/main/java/org/apache/ambari/server/stack/StackServiceDirectory.java
ambari-server/src/main/java/org/apache/ambari/server/stack/UpdateActiveRepoVersionOnStartup.java
ambari-server/src/main/java/org/apache/ambari/server/state/RepositoryInfo.java
ambari-server/src/main/java/org/apache/ambari/server/state/stack/upgrade/RepositoryVersionHelper.java
ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog270.java
ambari-server/src/main/resources/Ambari-DDL-Derby-CREATE.sql
ambari-server/src/main/resources/Ambari-DDL-MySQL-CREATE.sql
ambari-server/src/main/resources/Ambari-DDL-Oracle-CREATE.sql
ambari-server/src/main/resources/Ambari-DDL-Postgres-CREATE.sql
ambari-server/src/main/resources/Ambari-DDL-SQLAnywhere-CREATE.sql
ambari-server/src/main/resources/Ambari-DDL-SQLServer-CREATE.sql
ambari-server/src/main/resources/custom_actions/scripts/install_packages.py
ambari-server/src/test/java/org/apache/ambari/server/controller/internal/RepositoryResourceProviderTest.java
ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog270Test.java
ambari-web/app/controllers/installer.js
ambari-web/app/mappers/repository_version_mapper.js
ambari-web/app/mappers/stack_mapper.js
ambari-web/app/models/repository.js

index 7faa33f..684e3dc 100644 (file)
@@ -173,7 +173,7 @@ class CommandRepositoryItem(object):
 
     self.ubuntu_components = [self.distribution if self.distribution else self.repo_name] + \
                              [self.components.replace(",", " ") if self.components else UBUNTU_REPO_COMPONENTS_POSTFIX]
-
+    self.applicable_services = _find_value(json_dict, 'applicableServices')
 
 
 
index 1ddc49a..556412a 100644 (file)
@@ -776,9 +776,20 @@ class Script(object):
     if self.available_packages_in_repos:
       return self.available_packages_in_repos
 
+    config = self.get_config()
+
+    service_name = config['serviceName'] if 'serviceName' in config else None
+    repos = CommandRepository(config['repositoryFile'])
+    repo_ids = [repo.repo_id for repo in repos.items]
+    Logger.info("Command repositories: {0}".format(", ".join(repo_ids)))
+    repos.items = [x for x in repos.items if (not x.applicable_services or service_name in x.applicable_services) ]
+    applicable_repo_ids = [repo.repo_id for repo in repos.items]
+    Logger.info("Applicable repositories: {0}".format(", ".join(applicable_repo_ids)))
+
+
     pkg_provider = ManagerFactory.get()
     try:
-      self.available_packages_in_repos = pkg_provider.get_available_packages_in_repos(CommandRepository(self.get_config()['repositoryFile']))
+      self.available_packages_in_repos = pkg_provider.get_available_packages_in_repos(repos)
     except Exception as err:
       Logger.exception("Unable to load available packages")
       self.available_packages_in_repos = []
index d58e94e..55e09e0 100644 (file)
@@ -37,5 +37,10 @@ public enum ExperimentalFeature {
   /**
    * For code that is for multi-service
    */
-  MULTI_SERVICE
+  MULTI_SERVICE,
+
+  /**
+   * Support for service-specific repos for custom services
+   */
+  CUSTOM_SERVICE_REPOS
 }
index 6fc0a59..00c837a 100644 (file)
@@ -27,6 +27,7 @@ import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.orm.entities.RepoDefinitionEntity;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.stack.RepoTag;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.builder.ToStringBuilder;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
@@ -265,6 +266,11 @@ public class CommandRepository {
     @JsonProperty("mirrorsList")
     private String m_mirrorsList;
 
+    @SerializedName("applicableServices")
+    @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+      comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+    private List<String> m_applicableServices;
+
     @SerializedName("tags")
     private Set<RepoTag> m_tags;
 
@@ -279,6 +285,7 @@ public class CommandRepository {
       m_distribution = info.getDistribution();
       m_components = info.getComponents();
       m_mirrorsList = info.getMirrorsList();
+      m_applicableServices = info.getApplicableServices();
       m_tags = info.getTags();
     }
 
@@ -289,6 +296,7 @@ public class CommandRepository {
       m_distribution = entity.getDistribution();
       m_components = entity.getComponents();
       m_mirrorsList = entity.getMirrors();
+      m_applicableServices = entity.getApplicableServices();
       m_osType = osType;
       m_tags = entity.getTags();
     }
@@ -305,6 +313,10 @@ public class CommandRepository {
       return m_repoName;
     }
 
+    public String getRepoId() {
+      return m_repoId;
+    }
+
     public String getBaseUrl() {
       return m_baseUrl;
     }
@@ -313,6 +325,18 @@ public class CommandRepository {
       return m_ambariManaged;
     }
 
+    @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+      comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+    public void setApplicableServices(List<String> applicableServices) {
+      m_applicableServices = applicableServices;
+    }
+
+    @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+      comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+    public List<String> getApplicableServices() {
+      return m_applicableServices;
+    }
+
     /**
      * {@inheritDoc}
      */
@@ -325,9 +349,9 @@ public class CommandRepository {
           .append("components", m_components)
           .append("id", m_repoId)
           .append("baseUrl", m_baseUrl)
+          .append("applicableServices", (m_applicableServices != null? StringUtils.join(m_applicableServices, ",") : ""))
           .toString();
     }
-
   }
 
   @Override
index aaf46f9..0c22b42 100644 (file)
@@ -4335,6 +4335,8 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
     return response;
   }
 
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   private Set<RepositoryResponse> getRepositories(RepositoryRequest request) throws AmbariException {
 
     String stackName = request.getStackName();
@@ -4364,7 +4366,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
 
               final RepositoryResponse response = new RepositoryResponse(repository.getBaseUrl(), osType, repository.getRepoID(),
                   repository.getRepoName(), repository.getDistribution(), repository.getComponents(), "", "",
-                      repository.getTags());
+                      repository.getTags(), repository.getApplicableServices());
               if (null != versionDefinitionId) {
                 response.setVersionDefinitionId(versionDefinitionId);
               } else {
@@ -4394,7 +4396,7 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
         for (RepositoryXml.Repo repo : os.getRepos()) {
           RepositoryResponse resp = new RepositoryResponse(repo.getBaseUrl(), os.getFamily(),
               repo.getRepoId(), repo.getRepoName(), repo.getDistribution(), repo.getComponents(), repo.getMirrorsList(),
-              repo.getBaseUrl(), repo.getTags());
+              repo.getBaseUrl(), repo.getTags(), Collections.EMPTY_LIST);
 
           resp.setVersionDefinitionId(versionDefinitionId);
           resp.setStackName(stackId.getStackName());
index 6d0f3fb..e5f8f0f 100644 (file)
 
 package org.apache.ambari.server.controller;
 
+import java.util.List;
 import java.util.Set;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.state.stack.RepoTag;
 
 public class RepositoryResponse {
@@ -40,9 +43,14 @@ public class RepositoryResponse {
   private boolean unique;
   private Set<RepoTag> tags;
 
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  private List<String> applicableServices;
+
   public RepositoryResponse(String baseUrl, String osType, String repoId,
                             String repoName, String distribution, String components,
-                            String mirrorsList, String defaultBaseUrl, Set<RepoTag> repoTags) {
+                            String mirrorsList, String defaultBaseUrl,
+                            Set<RepoTag> repoTags, List<String> applicableServices) {
     setBaseUrl(baseUrl);
     setOsType(osType);
     setRepoId(repoId);
@@ -52,6 +60,7 @@ public class RepositoryResponse {
     setMirrorsList(mirrorsList);
     setDefaultBaseUrl(defaultBaseUrl);
     setTags(repoTags);
+    setApplicableServices(applicableServices);
   }
 
   public String getStackName() {
@@ -74,32 +83,26 @@ public class RepositoryResponse {
     return baseUrl;
   }
 
-
   public void setBaseUrl(String baseUrl) {
     this.baseUrl = baseUrl;
   }
 
-
   public String getOsType() {
     return osType;
   }
 
-
   public void setOsType(String osType) {
     this.osType = osType;
   }
 
-
   public String getRepoId() {
     return repoId;
   }
 
-
   public void setRepoId(String repoId) {
     this.repoId = repoId;
   }
 
-
   public String getRepoName() {
     return repoName;
   }
@@ -198,4 +201,16 @@ public class RepositoryResponse {
   public void setTags(Set<RepoTag> repoTags) {
     tags = repoTags;
   }
+
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  public List<String> getApplicableServices() {
+    return applicableServices;
+  }
+
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  public void setApplicableServices(List<String> applicableServices) {
+    this.applicableServices = applicableServices;
+  }
 }
index 60dff69..82f7903 100644 (file)
@@ -26,6 +26,8 @@ import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.resources.RepositoryResourceDefinition;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -62,6 +64,9 @@ public class RepositoryResourceProvider extends AbstractControllerResourceProvid
   public static final String REPOSITORY_VERSION_DEFINITION_ID_PROPERTY_ID = PropertyHelper.getPropertyId("Repositories", "version_definition_id");
   public static final String REPOSITORY_UNIQUE_PROPERTY_ID                = PropertyHelper.getPropertyId("Repositories", "unique");
   public static final String REPOSITORY_TAGS_PROPERTY_ID                  = PropertyHelper.getPropertyId("Repositories", "tags");
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  public static final String REPOSITORY_APPLICABLE_SERVICES_PROPERTY_ID   = PropertyHelper.getPropertyId("Repositories", "applicable_services");
 
   @SuppressWarnings("serial")
   private static Set<String> pkPropertyIds = new HashSet<String>() {
@@ -92,6 +97,7 @@ public class RepositoryResourceProvider extends AbstractControllerResourceProvid
       add(REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID);
       add(REPOSITORY_UNIQUE_PROPERTY_ID);
       add(REPOSITORY_TAGS_PROPERTY_ID);
+      add(REPOSITORY_APPLICABLE_SERVICES_PROPERTY_ID);
     }
   };
 
@@ -179,6 +185,7 @@ public class RepositoryResourceProvider extends AbstractControllerResourceProvid
         setResourceProperty(resource, REPOSITORY_DEFAULT_BASE_URL_PROPERTY_ID, response.getDefaultBaseUrl(), requestedIds);
         setResourceProperty(resource, REPOSITORY_UNIQUE_PROPERTY_ID, response.isUnique(), requestedIds);
         setResourceProperty(resource, REPOSITORY_TAGS_PROPERTY_ID, response.getTags(), requestedIds);
+        setResourceProperty(resource, REPOSITORY_APPLICABLE_SERVICES_PROPERTY_ID, response.getApplicableServices(), requestedIds);
         if (null != response.getClusterVersionId()) {
           setResourceProperty(resource, REPOSITORY_CLUSTER_STACK_VERSION_PROPERTY_ID, response.getClusterVersionId(), requestedIds);
         }
index 0934704..9882147 100644 (file)
@@ -30,6 +30,8 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.StaticallyInject;
 import org.apache.ambari.server.api.resources.OperatingSystemResourceDefinition;
@@ -595,6 +597,8 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
    *
    * @throws AmbariException if some properties are missing or json has incorrect structure
    */
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   protected void toRepositoryVersionEntity(XmlHolder holder) throws AmbariException {
 
     // !!! TODO validate parsed object graph
@@ -784,6 +788,17 @@ public class VersionDefinitionResourceProvider extends AbstractAuthorizedResourc
         repoElement.put(PropertyHelper.getPropertyName(RepositoryResourceProvider.REPOSITORY_STACK_VERSION_PROPERTY_ID),
             entity.getStackVersion());
 
+        @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+          comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+               ArrayNode applicableServicesNode = factory.arrayNode();
+        if(repo.getApplicableServices() != null) {
+          for (String applicableService : repo.getApplicableServices()) {
+            applicableServicesNode.add(applicableService);
+          }
+        }
+        repoElement.put(PropertyHelper.getPropertyName(
+          RepositoryResourceProvider.REPOSITORY_APPLICABLE_SERVICES_PROPERTY_ID), applicableServicesNode);
+
         ArrayNode tagsNode = factory.arrayNode();
         for (RepoTag repoTag : repo.getTags()) {
           tagsNode.add(repoTag.toString());
index dec922a..1e7fccc 100644 (file)
@@ -18,6 +18,8 @@
 package org.apache.ambari.server.orm.entities;
 
 import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
 import java.util.Set;
 
 import javax.persistence.CollectionTable;
@@ -35,6 +37,8 @@ import javax.persistence.ManyToOne;
 import javax.persistence.Table;
 import javax.persistence.TableGenerator;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.state.stack.RepoTag;
 
 import com.google.common.base.Objects;
@@ -93,6 +97,15 @@ public class RepoDefinitionEntity {
   @Column(name = "unique_repo", nullable = false)
   private short unique = 0;
 
+  /**
+   * CollectionTable for RepoTag enum
+   */
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  @ElementCollection(targetClass = String.class)
+  @CollectionTable(name = "repo_applicable_services", joinColumns = {@JoinColumn(name = "repo_definition_id")})
+  @Column(name = "service_name")
+  private List<String> applicableServices = new LinkedList<>();
 
   public String getDistribution() {
     return distribution;
@@ -142,6 +155,18 @@ public class RepoDefinitionEntity {
     this.mirrors = mirrors;
   }
 
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  public List<String> getApplicableServices() {
+    return applicableServices;
+  }
+
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  public void setApplicableServices(List<String> applicableServices) {
+    this.applicableServices = applicableServices;
+  }
+
   public Long getId() {
     return id;
   }
@@ -179,7 +204,7 @@ public class RepoDefinitionEntity {
    */
   @Override
   public int hashCode() {
-    return java.util.Objects.hash(repoTags, repoName, repoID, baseUrl, mirrors, distribution, components, unique);
+    return java.util.Objects.hash(repoTags, repoName, repoID, baseUrl, mirrors, distribution, components, unique, applicableServices);
   }
 
   /**
@@ -207,6 +232,7 @@ public class RepoDefinitionEntity {
         && Objects.equal(baseUrl, that.baseUrl)
         && Objects.equal(mirrors, that.mirrors)
         && Objects.equal(distribution, that.distribution)
-        && Objects.equal(components, that.components);
+        && Objects.equal(components, that.components)
+        && Objects.equal(applicableServices, that.applicableServices);
   }
 }
\ No newline at end of file
index 6a8ece2..6c13a13 100644 (file)
@@ -26,6 +26,8 @@ import java.util.Set;
 
 import javax.annotation.Nullable;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.controller.RepositoryResponse;
 import org.apache.ambari.server.orm.entities.RepoDefinitionEntity;
@@ -46,7 +48,7 @@ import com.google.common.collect.Multimaps;
 import com.google.common.collect.Sets;
 
 /**
- * Utility functions for repository replated tasks.
+ * Utility functions for repository related tasks.
  */
 public class RepoUtil {
 
@@ -120,6 +122,8 @@ public class RepoUtil {
    * @param stackReposByOs - Stack repositories loaded from the disk (including service repositories), grouped by os.
    * @return {@code true} if there were added repositories
    */
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   public static boolean addServiceReposToOperatingSystemEntities(List<RepoOsEntity> operatingSystems,
                                                                  ListMultimap<String, RepositoryInfo> stackReposByOs) {
     Set<String> addedRepos = new HashSet<>();
@@ -145,6 +149,8 @@ public class RepoUtil {
    * @param stackReposByOs the repositories in the stack model (loaded from disks)
    * @return A list of service repositories
    */
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   public static List<RepositoryInfo> getServiceRepos(List<RepositoryInfo> vdfRepos,
                                                    ListMultimap<String, RepositoryInfo> stackReposByOs) {
     Set<String> serviceRepoIds = new HashSet<>();
@@ -187,6 +193,8 @@ public class RepoUtil {
     return responses;
   }
 
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   private static RepoDefinitionEntity toRepositoryEntity(RepositoryInfo repoInfo) {
     RepoDefinitionEntity re = new RepoDefinitionEntity();
     re.setBaseUrl(repoInfo.getBaseUrl());
@@ -195,6 +203,7 @@ public class RepoUtil {
     re.setDistribution(repoInfo.getDistribution());
     re.setComponents(repoInfo.getComponents());
     re.setTags(repoInfo.getTags());
+    re.setApplicableServices(repoInfo.getApplicableServices());
     return re;
   }
 
index b109331..541ee7e 100644 (file)
@@ -31,6 +31,8 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.state.BulkCommandDefinition;
 import org.apache.ambari.server.state.ComponentInfo;
@@ -1156,7 +1158,7 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
     }
 
     LOG.debug("Process service custom repositories");
-    Set<RepositoryInfo> serviceRepos = getUniqueServiceRepos(stackRepos);
+    Collection<RepositoryInfo> serviceRepos = getUniqueServiceRepos(stackRepos);
     stackInfo.getRepositories().addAll(serviceRepos);
 
     if (null != rxml && null != rxml.getLatestURI() && stackRepos.size() > 0) {
@@ -1217,12 +1219,13 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
    * @param stackRepos the list of stack repositories
    * @return the service repos with duplicates filtered out.
    */
-  private Set<RepositoryInfo> getUniqueServiceRepos(List<RepositoryInfo> stackRepos) {
+   @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+     comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+   private Collection<RepositoryInfo> getUniqueServiceRepos(List<RepositoryInfo> stackRepos) {
     List<RepositoryInfo> serviceRepos = getAllServiceRepos();
     ImmutableListMultimap<String, RepositoryInfo> serviceReposByOsType = Multimaps.index(serviceRepos, RepositoryInfo.GET_OSTYPE_FUNCTION);
     ImmutableListMultimap<String, RepositoryInfo> stackReposByOsType = Multimaps.index(stackRepos, RepositoryInfo.GET_OSTYPE_FUNCTION);
-
-    Set<RepositoryInfo> uniqueServiceRepos = new HashSet<>();
+     Map<String, RepositoryInfo> uniqueServiceRepos = new HashMap<>();
 
     // Uniqueness is checked for each os type
     for (String osType: serviceReposByOsType.keySet()) {
@@ -1249,11 +1252,16 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
           LOG.warn("Discarding service repository with duplicate name and different content: {}", repo);
         }
         else {
-          uniqueServiceRepos.add(repo);
+          String key = repo.getOsType() + "-" + repo.getRepoName() + "-" + repo.getRepoId();
+          if(uniqueServiceRepos.containsKey(key)) {
+            uniqueServiceRepos.get(key).getApplicableServices().addAll(repo.getApplicableServices());
+          } else {
+            uniqueServiceRepos.put(key, repo);
+          }
         }
       }
     }
-    return uniqueServiceRepos;
+    return uniqueServiceRepos.values();
   }
 
   /**
@@ -1290,7 +1298,11 @@ public class StackModule extends BaseModule<StackModule, StackInfo> implements V
         StackServiceDirectory ssd = (StackServiceDirectory) sd;
         RepositoryXml serviceRepoXml = ssd.getRepoFile();
         if (null != serviceRepoXml) {
-          repos.addAll(serviceRepoXml.getRepositories());
+          List<RepositoryInfo> serviceRepos = serviceRepoXml.getRepositories();
+          for(RepositoryInfo serviceRepo : serviceRepos) {
+            serviceRepo.getApplicableServices().add(sm.getId());
+          }
+          repos.addAll(serviceRepos);
           if (null != serviceRepoXml.getLatestURI()) {
             registerRepoUpdateTask(serviceRepoXml);
           }
index 477ee66..b2195dc 100644 (file)
@@ -24,6 +24,8 @@ import java.util.Collection;
 
 import javax.annotation.Nullable;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.state.stack.RepositoryXml;
 
@@ -52,6 +54,8 @@ public class StackServiceDirectory extends ServiceDirectory {
    * repository directory
    */
   @Nullable
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   private String repoDir;
 
   /**
@@ -70,6 +74,8 @@ public class StackServiceDirectory extends ServiceDirectory {
    * @return the repository xml file if exists or null
    */
   @Nullable
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   public RepositoryXml getRepoFile() {
     return repoFile;
   }
@@ -80,6 +86,8 @@ public class StackServiceDirectory extends ServiceDirectory {
    * @return the repository directory if exists or null
    */
   @Nullable
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
   public String getRepoDir() {
     return repoDir;
   }
index 334136f..23c79d8 100644 (file)
@@ -20,6 +20,8 @@ package org.apache.ambari.server.stack;
 
 import java.util.List;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.orm.dao.ClusterDAO;
@@ -47,6 +49,8 @@ import com.google.inject.persist.Transactional;
  * cluster, the cluster's repository version entity must be updated with the custom repos provided by the
  * management pack. The class takes care of this.
  */
+@Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+  comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
 public class UpdateActiveRepoVersionOnStartup {
 
   private static final Logger LOG = LoggerFactory.getLogger(UpdateActiveRepoVersionOnStartup.class);
index a7bbc1b..ba07105 100644 (file)
 package org.apache.ambari.server.state;
 
 import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
 import java.util.Set;
 
+import org.apache.ambari.annotations.Experimental;
+import org.apache.ambari.annotations.ExperimentalFeature;
 import org.apache.ambari.server.controller.RepositoryResponse;
 import org.apache.ambari.server.state.stack.RepoTag;
+import org.apache.commons.lang.StringUtils;
 
 import com.google.common.base.Function;
 import com.google.common.base.Objects;
@@ -40,6 +45,10 @@ public class RepositoryInfo {
   private boolean repoSaved = false;
   private boolean unique = false;
   private boolean ambariManagedRepositories = true;
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  private List<String> applicableServices = new LinkedList<>();
+
   private Set<RepoTag> tags = new HashSet<>();
 
   /**
@@ -170,6 +179,18 @@ public class RepositoryInfo {
     this.unique = unique;
   }
 
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  public List<String> getApplicableServices() {
+    return applicableServices;
+  }
+
+  @Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
+    comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
+  public void setApplicableServices(List<String> applicableServices) {
+    this.applicableServices = applicableServices;
+  }
+
   @Override
   public String toString() {
     return "[ repoInfo: "
@@ -182,6 +203,7 @@ public class RepositoryInfo {
         + ", mirrorsList=" + mirrorsList
         + ", unique=" + unique
         + ", ambariManagedRepositories=" + ambariManagedRepositories
+        + ", applicableServices=" +  StringUtils.join(applicableServices, ",")
         + " ]";
   }
 
@@ -213,7 +235,7 @@ public class RepositoryInfo {
   {
     return new RepositoryResponse(getBaseUrl(), getOsType(), getRepoId(),
             getRepoName(), getDistribution(), getComponents(), getMirrorsList(), getDefaultBaseUrl(),
-            getTags());
+            getTags(), getApplicableServices());
   }
 
   /**
index d37668c..e9f21dc 100644 (file)
@@ -21,6 +21,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -193,6 +194,14 @@ public class RepositoryVersionHelper {
         if (repositoryJson.getAsJsonObject().get(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID) != null) {
           repositoryEntity.setUnique(repositoryJson.getAsJsonObject().get(RepositoryResourceProvider.REPOSITORY_UNIQUE_PROPERTY_ID).getAsBoolean());
         }
+        if (repositoryJson.get(RepositoryResourceProvider.REPOSITORY_APPLICABLE_SERVICES_PROPERTY_ID) != null) {
+          List<String> applicableServices = new LinkedList<>();
+          JsonArray jsonArray = repositoryJson.get(RepositoryResourceProvider.REPOSITORY_APPLICABLE_SERVICES_PROPERTY_ID).getAsJsonArray();
+          for (JsonElement je : jsonArray) {
+            applicableServices.add(je.getAsString());
+          }
+          repositoryEntity.setApplicableServices(applicableServices);
+        }
 
         if (null != repositoryJson.get(RepositoryResourceProvider.REPOSITORY_TAGS_PROPERTY_ID)) {
           Set<RepoTag> tags = new HashSet<>();
@@ -232,6 +241,7 @@ public class RepositoryVersionHelper {
         repositoryDefinition.setUnique(repository.isUnique());
 
         repositoryDefinition.setTags(repository.getTags());
+        repositoryDefinition.setApplicableServices(repository.getApplicableServices());
 
         repositoriesList.add(repositoryDefinition);
         operatingSystemEntity.setAmbariManaged(repository.isAmbariManagedRepositories());
index ac9688c..27212a3 100644 (file)
@@ -221,6 +221,11 @@ public class UpgradeCatalog270 extends AbstractUpgradeCatalog {
   protected static final String REPO_TAGS_TAG_COLUMN = "tag";
   protected static final String REPO_TAGS_FOREIGN_KEY = "FK_repo_tag_definition_id";
 
+  protected static final String REPO_APPLICABLE_SERVICES_TABLE = "repo_applicable_services";
+  protected static final String REPO_APPLICABLE_SERVICES_REPO_DEFINITION_ID_COLUMN = "repo_definition_id";
+  protected static final String REPO_APPLICABLE_SERVICES_SERVICE_NAME_COLUMN = "service_name";
+  protected static final String REPO_APPLICABLE_SERVICES_FOREIGN_KEY = "FK_repo_app_service_def_id";
+
   protected static final String REPO_VERSION_TABLE = "repo_version";
   protected static final String REPO_VERSION_REPO_VERSION_ID_COLUMN = "repo_version_id";
   protected static final String REPO_VERSION_REPOSITORIES_COLUMN = "repositories";
@@ -327,6 +332,7 @@ public class UpgradeCatalog270 extends AbstractUpgradeCatalog {
     createRepoOsTable();
     createRepoDefinitionTable();
     createRepoTagsTable();
+    createRepoApplicableServicesTable();
     migrateRepoData();
     updateRepoVersionTable();
   }
@@ -414,6 +420,26 @@ public class UpgradeCatalog270 extends AbstractUpgradeCatalog {
   }
 
   /**
+   * Adds the repo_applicable_services table to the Ambari database.
+   * <pre>
+   *   CREATE TABLE repo_applicable_services (
+   *     repo_definition_id BIGINT NOT NULL,
+   *     service_name VARCHAR(255) NOT NULL,
+   *     CONSTRAINT FK_repo_applicable_service_definition_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
+   * </pre>
+   *
+   * @throws SQLException
+   */
+  private void createRepoApplicableServicesTable() throws SQLException {
+    List<DBAccessor.DBColumnInfo> columns = new ArrayList<>();
+    columns.add(new DBAccessor.DBColumnInfo(REPO_APPLICABLE_SERVICES_REPO_DEFINITION_ID_COLUMN, Long.class, null, null, false));
+    columns.add(new DBAccessor.DBColumnInfo(REPO_APPLICABLE_SERVICES_SERVICE_NAME_COLUMN, String.class, 255, null, false));
+
+    dbAccessor.createTable(REPO_APPLICABLE_SERVICES_TABLE, columns);
+    dbAccessor.addFKConstraint(REPO_APPLICABLE_SERVICES_TABLE, REPO_APPLICABLE_SERVICES_FOREIGN_KEY, REPO_APPLICABLE_SERVICES_REPO_DEFINITION_ID_COLUMN, REPO_DEFINITION_TABLE, REPO_DEFINITION_ID_COLUMN, false);
+  }
+
+  /**
    * Perform steps to move data from the old repo_version.repositories structure into new tables -
    * repo_os, repo_definition, repo_tags
    *
index 7615a4f..c06bc95 100644 (file)
@@ -197,6 +197,11 @@ CREATE TABLE repo_tags (
   tag VARCHAR(255) NOT NULL,
   CONSTRAINT FK_repo_tag_definition_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
 
+CREATE TABLE repo_applicable_services (
+  repo_definition_id BIGINT NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  CONSTRAINT FK_repo_app_service_def_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
+
 CREATE TABLE servicecomponentdesiredstate (
   id BIGINT NOT NULL,
   component_name VARCHAR(255) NOT NULL,
index 667e771..87ef6be 100644 (file)
@@ -217,6 +217,11 @@ CREATE TABLE repo_tags (
   tag VARCHAR(255) NOT NULL,
   CONSTRAINT FK_repo_tag_definition_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
 
+CREATE TABLE repo_applicable_services (
+  repo_definition_id BIGINT NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  CONSTRAINT FK_repo_app_service_def_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
+
 CREATE TABLE servicecomponentdesiredstate (
   id BIGINT NOT NULL,
   component_name VARCHAR(100) NOT NULL,
index 537fb0e..b530cb6 100644 (file)
@@ -198,6 +198,11 @@ CREATE TABLE repo_tags (
   tag VARCHAR(255) NOT NULL,
   CONSTRAINT FK_repo_tag_definition_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
 
+CREATE TABLE repo_applicable_services (
+  repo_definition_id NUMBER(19) NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  CONSTRAINT FK_repo_app_service_def_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
+
 CREATE TABLE servicecomponentdesiredstate (
   id NUMBER(19) NOT NULL,
   component_name VARCHAR2(255) NOT NULL,
index 5c2caea..d6170a1 100644 (file)
@@ -199,6 +199,11 @@ CREATE TABLE repo_tags (
   tag VARCHAR(255) NOT NULL,
   CONSTRAINT FK_repo_tag_definition_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
 
+CREATE TABLE repo_applicable_services (
+  repo_definition_id BIGINT NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  CONSTRAINT FK_repo_app_service_def_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
+
 CREATE TABLE servicecomponentdesiredstate (
   id BIGINT NOT NULL,
   component_name VARCHAR(255) NOT NULL,
index 06ab502..c257ec5 100644 (file)
@@ -196,6 +196,11 @@ CREATE TABLE repo_tags (
   tag VARCHAR(255) NOT NULL,
   CONSTRAINT FK_repo_tag_definition_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
 
+CREATE TABLE repo_applicable_services (
+  repo_definition_id NUMERIC(19) NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  CONSTRAINT FK_repo_app_service_def_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
+
 CREATE TABLE servicecomponentdesiredstate (
   id NUMERIC(19) NOT NULL,
   component_name VARCHAR(255) NOT NULL,
index 601102c..e61ffa8 100644 (file)
@@ -211,6 +211,11 @@ CREATE TABLE repo_tags (
   tag VARCHAR(255) NOT NULL,
   CONSTRAINT FK_repo_tag_definition_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
 
+CREATE TABLE repo_applicable_services (
+  repo_definition_id BIGINT NOT NULL,
+  service_name VARCHAR(255) NOT NULL,
+  CONSTRAINT FK_repo_app_service_def_id FOREIGN KEY (repo_definition_id) REFERENCES repo_definition (id));
+
 CREATE TABLE servicecomponentdesiredstate (
   id BIGINT NOT NULL,
   component_name VARCHAR(255) NOT NULL,
index 92cfc58..800bb21 100644 (file)
@@ -389,6 +389,7 @@ class InstallPackages(Script):
       # patches installed
       repositories = config['repositoryFile']['repositories']
       command_repos = CommandRepository(config['repositoryFile'])
+      command_repos.items = [x for x in command_repos.items if not x.applicable_services]
       repository_ids = [repository['repoId'] for repository in repositories]
       repos_to_use = {}
       for repo_id in repository_ids:
index 0d7813a..0cdfae6 100644 (file)
@@ -59,7 +59,8 @@ public class RepositoryResourceProviderTest {
     AmbariManagementController managementController = EasyMock.createMock(AmbariManagementController.class);
 
     RepositoryResponse rr = new RepositoryResponse(VAL_BASE_URL, VAL_OS,
-        VAL_REPO_ID, VAL_REPO_NAME, VAL_DISTRIBUTION, VAL_COMPONENT_NAME, null, null, Collections.<RepoTag>emptySet());
+        VAL_REPO_ID, VAL_REPO_NAME, VAL_DISTRIBUTION, VAL_COMPONENT_NAME, null, null,
+      Collections.<RepoTag>emptySet(), Collections.emptyList());
     rr.setStackName(VAL_STACK_NAME);
     rr.setStackVersion(VAL_STACK_VERSION);
     Set<RepositoryResponse> allResponse = new HashSet<>();
@@ -171,7 +172,8 @@ public class RepositoryResourceProviderTest {
     AmbariManagementController managementController = EasyMock.createMock(AmbariManagementController.class);
 
     RepositoryResponse rr = new RepositoryResponse(VAL_BASE_URL, VAL_OS,
-        VAL_REPO_ID, VAL_REPO_NAME, null, null, null, null , Collections.<RepoTag>emptySet());
+      VAL_REPO_ID, VAL_REPO_NAME, null, null, null, null ,
+      Collections.<RepoTag>emptySet(), Collections.emptyList());
     Set<RepositoryResponse> allResponse = new HashSet<>();
     allResponse.add(rr);
 
index 9c3d108..4ea9c45 100644 (file)
@@ -55,6 +55,9 @@ import static org.apache.ambari.server.upgrade.UpgradeCatalog270.PK_KERBEROS_KEY
 import static org.apache.ambari.server.upgrade.UpgradeCatalog270.PK_KKP;
 import static org.apache.ambari.server.upgrade.UpgradeCatalog270.PK_KKP_MAPPING_SERVICE;
 import static org.apache.ambari.server.upgrade.UpgradeCatalog270.PRINCIPAL_NAME_COLUMN;
+import static org.apache.ambari.server.upgrade.UpgradeCatalog270.REPO_APPLICABLE_SERVICES_FOREIGN_KEY;
+import static org.apache.ambari.server.upgrade.UpgradeCatalog270.REPO_APPLICABLE_SERVICES_REPO_DEFINITION_ID_COLUMN;
+import static org.apache.ambari.server.upgrade.UpgradeCatalog270.REPO_APPLICABLE_SERVICES_TABLE;
 import static org.apache.ambari.server.upgrade.UpgradeCatalog270.REPO_DEFINITION_BASE_URL_COLUMN;
 import static org.apache.ambari.server.upgrade.UpgradeCatalog270.REPO_DEFINITION_COMPONENTS_COLUMN;
 import static org.apache.ambari.server.upgrade.UpgradeCatalog270.REPO_DEFINITION_DISTRIBUTION_COLUMN;
@@ -432,6 +435,7 @@ public class UpgradeCatalog270Test {
     Capture<List<DBAccessor.DBColumnInfo>> addRepoOsTableCapturedColumns = newCapture(CaptureType.ALL);
     Capture<List<DBAccessor.DBColumnInfo>> addRepoDefinitionTableCapturedColumns = newCapture(CaptureType.ALL);
     Capture<List<DBAccessor.DBColumnInfo>> addRepoTagsTableCapturedColumns = newCapture(CaptureType.ALL);
+    Capture<List<DBAccessor.DBColumnInfo>> addRepoApplicableServicesTableCapturedColumns = newCapture(CaptureType.ALL);
     Capture<String[]> insertRepoOsTableRowColumns = newCapture(CaptureType.ALL);
     Capture<String[]> insertRepoOsTableRowValues = newCapture(CaptureType.ALL);
     Capture<String[]> insertAmbariSequencesRowColumns = newCapture(CaptureType.ALL);
@@ -445,7 +449,7 @@ public class UpgradeCatalog270Test {
     prepareUpdateAdminPrivilegeRecords(dbAccessor, createAdminPrincipalTableCaptures);
     prepareUpdateUsersTable(dbAccessor, updateUserTableCaptures, alterUserTableCaptures);
     prepareUpdateRepoTables(dbAccessor, addRepoOsTableCapturedColumns, addRepoDefinitionTableCapturedColumns, addRepoTagsTableCapturedColumns,
-        insertRepoOsTableRowColumns, insertRepoOsTableRowValues, insertAmbariSequencesRowColumns, insertAmbariSequencesRowValues);
+      addRepoApplicableServicesTableCapturedColumns, insertRepoOsTableRowColumns, insertRepoOsTableRowValues, insertAmbariSequencesRowColumns, insertAmbariSequencesRowValues);
 
     // upgradeKerberosTables
     Capture<List<DBAccessor.DBColumnInfo>> kerberosKeytabColumnsCapture = newCapture();
@@ -743,6 +747,7 @@ public class UpgradeCatalog270Test {
                                        Capture<List<DBAccessor.DBColumnInfo>> addRepoOsTableCapturedColumns,
                                        Capture<List<DBAccessor.DBColumnInfo>> addRepoDefinitionTableCapturedColumns,
                                        Capture<List<DBAccessor.DBColumnInfo>> addRepoTagsTableCapturedColumns,
+                                       Capture<List<DBAccessor.DBColumnInfo>> addRepoApplicableServicesTableCapturedColumns,
                                        Capture<String[]> insertRepoOsTableRowColumns,
                                        Capture<String[]> insertRepoOsTableRowValues,
                                        Capture<String[]> insertAmbariSequencesRowColumns,
@@ -768,6 +773,11 @@ public class UpgradeCatalog270Test {
     dbAccessor.addFKConstraint(REPO_TAGS_TABLE, REPO_TAGS_FOREIGN_KEY, REPO_TAGS_REPO_DEFINITION_ID_COLUMN, REPO_DEFINITION_TABLE, REPO_DEFINITION_ID_COLUMN, false);
     expectLastCall().once();
 
+    dbAccessor.createTable(eq(REPO_APPLICABLE_SERVICES_TABLE), capture(addRepoApplicableServicesTableCapturedColumns));
+    expectLastCall().once();
+    dbAccessor.addFKConstraint(REPO_APPLICABLE_SERVICES_TABLE, REPO_APPLICABLE_SERVICES_FOREIGN_KEY, REPO_APPLICABLE_SERVICES_REPO_DEFINITION_ID_COLUMN, REPO_DEFINITION_TABLE, REPO_DEFINITION_ID_COLUMN, false);
+    expectLastCall().once();
+
     expect(dbAccessor.tableHasColumn(eq(REPO_VERSION_TABLE), eq(REPO_VERSION_REPOSITORIES_COLUMN))).andReturn(true).once();
     final Map<Long, String> repoMap = new HashMap<>();
     repoMap.put(1L, getSampleRepositoryData());
index d2a2de2..9b5bf37 100644 (file)
@@ -848,7 +848,8 @@ App.InstallerController = App.WizardController.extend(App.Persist, {
                 "repo_name": repository.get('repoName'),
                 "components": repository.get('components'),
                 "tags": repository.get('tags'),
-                "distribution": repository.get('distribution')
+                "distribution": repository.get('distribution'),
+                "applicable_services" : repository.get('applicable_services')
               }
             });
           }
index bf643bd..cfb0a45 100644 (file)
@@ -75,7 +75,8 @@ App.repoVersionMapper = App.QuickDataMapper.create({
     repo_name : 'Repositories.repo_name',
     stack_name : 'Repositories.stack_name',
     stack_version : 'Repositories.stack_version',
-    tags: 'Repositories.tags'
+    tags: 'Repositories.tags',
+    applicable_services: 'Repositories.applicable_services'
   },
 
   map: function (json, loadAll, isCurrentStackOnly) {
index b516b52..8d03d4b 100644 (file)
@@ -89,7 +89,8 @@ App.stackMapper = App.QuickDataMapper.create({
     operating_system_id: 'os_id',
     components: 'components',
     distribution: 'distribution',
-    tags: 'tags'
+    tags: 'tags',
+    applicable_services: 'applicable_services'
   },
   
   map: function(json) {
index 22c7690..5ab8cd7 100644 (file)
@@ -36,6 +36,7 @@ App.Repository = DS.Model.extend({
   components: DS.attr('string'),
   distribution: DS.attr('string'),
   tags: DS.attr('array'),
+  applicable_services: DS.attr('array'),
 
   validation: DS.attr('string', {defaultValue: ''}),
   validationClassName: Em.computed.getByKey('validationClassNameMap', 'validation', ''),