summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndrew Onishuk <aonishuk@hortonworks.com>2018-06-04 15:37:35 +0300
committeraonishuk <aonishuk@hortonworks.com>2018-06-04 19:53:46 +0300
commit06089994a73a7b07b266887788546b1153494d10 (patch)
tree38186f8a42f04310449747fe72a31be457c7ff5a
parentf5af2b4946e8104e885799fa978b83975a159bbf (diff)
AMBARI-24022. AutoStart Is not working for some of the components in the cluster (aonishuk)
-rw-r--r--ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java1
-rw-r--r--ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java12
-rw-r--r--ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py2
-rw-r--r--ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py2
-rwxr-xr-xambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py2
-rwxr-xr-xambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py2
-rw-r--r--ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py2
-rw-r--r--ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py2
32 files changed, 43 insertions, 30 deletions
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
index 592c8930df..7d9964cc42 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/agent/ExecutionCommand.java
@@ -558,6 +558,7 @@ public class ExecutionCommand extends AgentCommand {
String AGENT_STACK_RETRY_ON_UNAVAILABILITY = "agent_stack_retry_on_unavailability";
String AGENT_STACK_RETRY_COUNT = "agent_stack_retry_count";
String LOG_OUTPUT = "log_output";
+ String DFS_TYPE = "dfs_type";
/**
* A boolean indicating whether configuration tags should be refreshed
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index da5523b67f..5a738e1048 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -31,6 +31,7 @@ import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.COMMAND_T
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.CUSTOM_FOLDER;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_DRIVER_FILENAME;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DB_NAME;
+import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.DFS_TYPE;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GPL_LICENSE_ACCEPTED;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.GROUP_LIST;
import static org.apache.ambari.server.agent.ExecutionCommand.KeyNames.HOOKS_FOLDER;
@@ -5766,6 +5767,17 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle
String notManagedHdfsPathList = gson.toJson(notManagedHdfsPathSet);
clusterLevelParams.put(NOT_MANAGED_HDFS_PATH_LIST, notManagedHdfsPathList);
+ Map<String, ServiceInfo> serviceInfos = ambariMetaInfo.getServices(stackId.getStackName(), stackId.getStackVersion());
+ for (ServiceInfo serviceInfoInstance : serviceInfos.values()) {
+ if (serviceInfoInstance.getServiceType() != null) {
+ LOG.debug("Adding {} to command parameters for {}", serviceInfoInstance.getServiceType(),
+ serviceInfoInstance.getName());
+
+ clusterLevelParams.put(DFS_TYPE, serviceInfoInstance.getServiceType());
+ break;
+ }
+ }
+
return clusterLevelParams;
}
diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 256be1ff6c..d87d9c2b30 100644
--- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -198,7 +198,7 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
# dfs.namenode.https-address
import functools
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py b/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py
index 91fff50c1e..5d3e897bd9 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_INFRA_SOLR/0.1.0/package/scripts/command_commons.py
@@ -109,7 +109,7 @@ if solr_hdfs_path:
hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', None)
hdfs_user_keytab = params.config['configurations']['hadoop-env']['hdfs_user_keytab']
- dfs_type = default("/commandParams/dfs_type", "")
+ dfs_type = default("/clusterLevelParams/dfs_type", "")
hdfs_site = params.config['configurations']['hdfs-site']
default_fs = params.config['configurations']['core-site']['fs.defaultFS']
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 458f45a7ff..8aee8d4181 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -398,7 +398,7 @@ min_hadoop_sink_version = default("/configurations/ams-env/min_ambari_metrics_ha
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
index 20604cf567..e05b22c360 100644
--- a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
@@ -61,7 +61,7 @@ hbase_conf_dir = "/etc/ams-hbase/conf"
limits_conf_dir = "/etc/security/limits.d"
sudo = AMBARI_SUDO_BINARY
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
hbase_regionserver_shutdown_timeout = expect('/configurations/ams-hbase-env/hbase_regionserver_shutdown_timeout', int,
30)
diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
index 8ce94be84c..12d7559e2d 100644
--- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py
@@ -325,7 +325,7 @@ if stack_supports_atlas_ranger_plugin and enable_ranger_atlas:
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name'] if has_namenode else None
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
- dfs_type = default("/commandParams/dfs_type", "")
+ dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
from resource_management.libraries.resources.hdfs_resource import HdfsResource
diff --git a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
index 5fe262a010..9958123739 100644
--- a/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/DRUID/0.10.1/package/scripts/params.py
@@ -130,7 +130,7 @@ hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name',
hostname)
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
# Kerberos
diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index 858e6ab7dd..ce67ebfe0b 100644
--- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -180,7 +180,7 @@ if falcon_atlas_support:
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
bdb_jar_name = "je-5.0.73.jar"
bdb_resource_name = format("{jdk_location}/{bdb_jar_name}")
diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index fb33e68f1e..5e6b314dce 100644
--- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -74,7 +74,7 @@ if dfs_nameservice is None:
dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
hawq_global_rm_type = default('/configurations/hawq-site/hawq_global_rm_type', None)
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
# HDFSResource partial function
HdfsResource = functools.partial(HdfsResource,
diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index f60cb5bef3..0552275e29 100644
--- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -261,7 +261,7 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 16bfc8efab..1c1987f8b2 100644
--- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -379,7 +379,7 @@ else:
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index b13fa06867..5ce2d0cb72 100644
--- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -621,7 +621,7 @@ security_param = "true" if security_enabled else "false"
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
index 722fe7ce19..17c678ac97 100644
--- a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/params.py
@@ -335,7 +335,7 @@ default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_nameno
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
index 21ff2c6515..ee3e8c3ab1 100644
--- a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/params_linux.py
@@ -513,7 +513,7 @@ hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
default_fs = config['configurations']['core-site']['fs.defaultFS'] if has_namenode else None
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 29ca4bac29..bf85ba3df4 100644
--- a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -75,7 +75,7 @@ log4j_props = config['configurations']['mahout-log4j']['content']
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 1f7df7f8cf..3b002cf3c3 100644
--- a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -351,7 +351,7 @@ hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_nam
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
########################################################
diff --git a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index c99c6063c3..80fe145316 100644
--- a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -81,7 +81,7 @@ log4j_props = config['configurations']['pig-log4j']['content']
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
index e451aa1ab7..6bb73b7cff 100644
--- a/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/PXF/3.0.0/package/scripts/params.py
@@ -78,7 +78,7 @@ hdfs_user_keytab = default('configurations/hadoop-env/hdfs_user_keytab', None)
hdfs_principal_name = default('configurations/hadoop-env/hdfs_principal_name', None)
hbase_user_keytab = default('configurations/hbase-env/hbase_user_keytab', None)
hbase_principal_name = default('configurations/hbase-env/hbase_principal_name', None)
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
# HDFSResource partial function
HdfsResource = functools.partial(HdfsResource,
diff --git a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
index eac71dd85d..a9ddb5db76 100755
--- a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/params.py
@@ -314,7 +314,7 @@ default_fs = default("/configurations/core-site/fs.defaultFS", None)
hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
# create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
index 49839bb2f5..35ff7a2160 100644
--- a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params_linux.py
@@ -61,7 +61,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
index fc97f03ef3..24bee8d719 100644
--- a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/params.py
@@ -210,7 +210,7 @@ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore"
ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
has_ats = len(ats_host) > 0
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
# livy related config
diff --git a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
index 7c929fb35f..2f38a4b294 100755
--- a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/params.py
@@ -195,7 +195,7 @@ hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore"
ats_host = set(default("/clusterHostInfo/app_timeline_server_hosts", []))
has_ats = len(ats_host) > 0
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
# livy related config
diff --git a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
index 260e74d1d0..71ab0371e6 100644
--- a/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/STORM/0.9.1/package/scripts/params_linux.py
@@ -409,7 +409,7 @@ hadoop_bin_dir = stack_select.get_hadoop_dir("bin") if has_namenode else None
hadoop_conf_dir = conf_select.get_hadoop_conf_dir() if has_namenode else None
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 76e6eeb3a7..dd70a9f0ff 100644
--- a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -93,7 +93,7 @@ tez_env_sh_template = config['configurations']['tez-env']['content']
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
#create partial functions with common arguments for every HdfsResource call
diff --git a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 6be8149c89..3fbb1e4a02 100644
--- a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -409,7 +409,7 @@ is_webhdfs_enabled = hdfs_site['dfs.webhdfs.enabled']
# Path to file that contains list of HDFS resources to be skipped during processing
hdfs_resource_ignore_file = "/var/lib/ambari-agent/data/.hdfs_resource_ignore"
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
import functools
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
index ef1ba32730..e0f878c543 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0/package/scripts/params.py
@@ -239,7 +239,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
# create partial functions with common arguments for every HdfsResource call
# to create hdfs directory we need to call params.HdfsResource in code
diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
index a960c09e50..529a1f6294 100644
--- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.7.0/package/scripts/params.py
@@ -253,7 +253,7 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
# create partial functions with common arguments for every HdfsResource call
# to create hdfs directory we need to call params.HdfsResource in code
diff --git a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
index 6fc8f1e41a..a30949d57a 100644
--- a/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/after-INSTALL/scripts/params.py
@@ -33,7 +33,7 @@ from string import lower
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
is_parallel_execution_enabled = int(default("/agentConfigParams/agent/parallel_execution", 0)) == 1
host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)
diff --git a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
index 6f6cd39362..36aae1d3d1 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
@@ -48,7 +48,7 @@ stack_root = Script.get_stack_root()
architecture = get_architecture()
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
jdk_name = default("/ambariLevelParams/jdk_name", None)
diff --git a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
index 04a56048b6..e3fa4765c2 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-START/scripts/params.py
@@ -58,7 +58,7 @@ stack_version_unformatted = config['clusterLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
major_stack_version = get_major_version(stack_version_formatted)
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
hadoop_conf_dir = "/etc/hadoop/conf"
component_list = default("/localComponents", [])
diff --git a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
index 6fbec4ae9d..13ce4d45ed 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
+++ b/ambari-server/src/main/resources/stacks/HDP/2.3.ECS/services/ECS/package/scripts/params.py
@@ -63,7 +63,7 @@ hdfs_tmp_dir = config['configurations']['hadoop-env']['hdfs_tmp_dir']
hdfs_principal_name = default('/configurations/hadoop-env/hdfs_principal_name', None)
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
-dfs_type = default("/commandParams/dfs_type", "")
+dfs_type = default("/clusterLevelParams/dfs_type", "")
ambari_libs_dir = "/var/lib/ambari-agent/lib"