summaryrefslogtreecommitdiff
path: root/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
blob: fb33e68f1e59d3d6df9422fdfa7945a795f4f61a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements.  See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.  The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License.  You may obtain a copy of the License at

    http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""

import functools
import hawq_constants
from resource_management import Script
from resource_management.core.resources.system import File
from resource_management.libraries.functions import conf_select
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions.default import default
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.resources.xml_config import XmlConfig
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources

config = Script.get_config()
config_attrs = config['configurationAttributes']

def __get_component_host(component):
  """
  Returns the first host where the given component is deployed, None if the component is not deployed
  """
  component_host = None
  if component in config['clusterHostInfo'] and len(config['clusterHostInfo'][component]) > 0:
    component_host = config['clusterHostInfo'][component][0]
  return component_host


hostname = config['agentLevelParams']['hostname']

# Users and Groups
hdfs_superuser = config['configurations']['hadoop-env']['hdfs_user']
user_group = config['configurations']['cluster-env']['user_group']

# Convert hawq_password to unicode for crypt() function in case user enters a numeric password
hawq_password = unicode(config['configurations']['hawq-env']['hawq_password'])


# HAWQ Hostnames
hawqmaster_host = __get_component_host('hawqmaster_hosts')
hawqstandby_host = __get_component_host('hawqstandby_hosts')
hawqsegment_hosts = sorted(default('/clusterHostInfo/hawqsegment_hosts', []))
hawq_master_hosts = [host for host in hawqmaster_host, hawqstandby_host if host]
hawq_all_hosts = sorted(set(hawq_master_hosts + hawqsegment_hosts))


# HDFS
hdfs_site = config['configurations']['hdfs-site']
default_fs = config['configurations']['core-site']['fs.defaultFS']
hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()

security_enabled = config['configurations']['cluster-env']['security_enabled']
hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
kinit_path_local = get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', None))
hdfs_principal_name = config['configurations']['hadoop-env']['hdfs_principal_name']
dfs_nameservice = default('/configurations/hdfs-site/dfs.internal.nameservices', None)
if dfs_nameservice is None:
 dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)

hawq_global_rm_type = default('/configurations/hawq-site/hawq_global_rm_type', None)
dfs_type = default("/commandParams/dfs_type", "")

# HDFSResource partial function
HdfsResource = functools.partial(HdfsResource,
                                 user=hdfs_superuser,
                                 security_enabled=security_enabled,
                                 keytab=hdfs_user_keytab,
                                 kinit_path_local=kinit_path_local,
                                 principal_name=hdfs_principal_name,
                                 hadoop_bin_dir = hadoop_bin_dir,
                                 hadoop_conf_dir = hadoop_conf_dir,
                                 hdfs_site=hdfs_site,
                                 default_fs=default_fs,
                                 immutable_paths = get_not_managed_resources(),
                                 dfs_type = dfs_type)

# File partial function
File = functools.partial(File,
                         owner=hawq_constants.hawq_user,
                         group=hawq_constants.hawq_group,
                         mode=0644)

# XMLConfig partial function
XmlConfig = functools.partial(XmlConfig,
                              conf_dir=hawq_constants.hawq_config_dir,
                              owner=hawq_constants.hawq_user,
                              group=hawq_constants.hawq_group,
                              mode=0644)

# For service Check
is_pxf_installed = __get_component_host("pxf_hosts") is not None
namenode_path =  "{0}:{1}".format(__get_component_host("namenode_hosts"), hawq_constants.PXF_PORT) if dfs_nameservice is None else dfs_nameservice
table_definition = {
  "HAWQ": {
    "name": "ambari_hawq_test",
    "create_type": "",
    "drop_type": "",
    "description": "(col1 int) DISTRIBUTED RANDOMLY"
  },
  "EXTERNAL_HDFS_READABLE": {
    "name": "ambari_hawq_pxf_hdfs_readable_test",
    "create_type": "READABLE EXTERNAL",
    "drop_type": "EXTERNAL",
    "description": "(col1 int) LOCATION ('pxf://{0}{1}?PROFILE=HdfsTextSimple') FORMAT 'TEXT'".format(namenode_path, hawq_constants.pxf_hdfs_test_dir)
  },
  "EXTERNAL_HDFS_WRITABLE": {
    "name": "ambari_hawq_pxf_hdfs_writable_test",
    "create_type": "WRITABLE EXTERNAL",
    "drop_type": "EXTERNAL",
    "description": "(col1 int) LOCATION ('pxf://{0}{1}?PROFILE=HdfsTextSimple') FORMAT 'TEXT'".format(namenode_path, hawq_constants.pxf_hdfs_test_dir)
  }
}


# YARN
# Note: YARN is not mandatory for HAWQ. It is required only when the users set HAWQ to use YARN as resource manager
rm_host = __get_component_host('resourcemanager_hosts')
is_yarn_ha_enabled = True if str(default('/configurations/yarn-site/yarn.resourcemanager.ha.enabled', False)).lower() == "true" else False

# Config files
hawq_check_content = config['configurations']['hawq-check-env']['content']
# database user limits
hawq_limits = config['configurations']['hawq-limits-env']
# sysctl parameters
hawq_sysctl = config['configurations']['hawq-sysctl-env']
# hawq config
hawq_site = config['configurations']['hawq-site']
# hdfs-client for enabling HAWQ to work with HDFS namenode HA
hdfs_client = config['configurations']['hdfs-client']
# yarn-client for enabling HAWQ to work with YARN resource manager HA
yarn_client = config['configurations']['yarn-client']


# Directories and ports
hawq_master_dir = hawq_site.get('hawq_master_directory')
hawq_segment_dir = hawq_site.get('hawq_segment_directory')
hawq_master_temp_dirs = hawq_site.get('hawq_master_temp_directory')
hawq_segment_temp_dirs = hawq_site.get('hawq_segment_temp_directory')
# Extract hawq hdfs directory from hdfs url. Ex: /hawq/hawq_data from host:8080/hawq/hawq_data
hawq_hdfs_data_dir = "/{0}".format(hawq_site.get('hawq_dfs_url').split('/', 1)[1])
hawq_master_address_port = hawq_site.get('hawq_master_address_port')
hawq_segment_address_port = hawq_site.get('hawq_segment_address_port')