代码拉取完成,页面将自动刷新
同步操作将从 AOMEY/HadoopHACluster 强制同步,此操作会覆盖自 Fork 仓库以来所做的任何修改,且无法恢复!!!
确定后同步将在后台操作,完成时将刷新页面,请耐心等待。
from xml.etree.ElementTree import ElementTree,Element
import subprocess
def read_xml(in_path):
tree = ElementTree()
tree.parse(in_path)
return tree
def if_match(node, kv_map):
for key in kv_map:
if node.get(key) != kv_map.get(key):
return False
return True
def find_nodes(tree, path):
return tree.findall(path)
def get_node_by_keyvalue(nodelist, kv_map):
result_nodes = []
for node in nodelist:
if if_match(node, kv_map):
result_nodes.append(node)
return result_nodes
def start_docker():
start_cmd = []
start_cmd.append("systemctl")
start_cmd.append("start")
start_cmd.append("docker.service")
print start_cmd
returncode = subprocess.call(start_cmd)
return returncode
def rm_container(node):
rm_cmd = []
rm_cmd.append("docker")
rm_cmd.append("rm")
rm_cmd.append("-f")
rm_cmd.append(node.attrib["host"])
print rm_cmd
returncode = subprocess.call(rm_cmd)
return returncode
def run_container(node):
run_cmd = []
run_cmd.append("docker")
run_cmd.append("run")
run_cmd.append("-itd")
run_cmd.append("--net=hadoop")
ports = node.findall("port")
for port in ports:
index = port.text.index(":") + 1
run_cmd.append("--expose=" + port.text[index:])
run_cmd.append("-p")
run_cmd.append(port.text)
volumes = node.findall("volume")
for volume in volumes:
run_cmd.append("-v")
run_cmd.append(volume.text)
run_cmd.append("--name")
run_cmd.append(node.attrib["host"])
run_cmd.append("--hostname")
run_cmd.append(node.attrib["host"])
run_cmd.append("elbertmalone/hadoophacluster:2.7.2")
print run_cmd
returncode = subprocess.call(run_cmd)
return returncode
def start_container(node):
start_cmd = []
start_cmd.append("docker")
start_cmd.append("start")
start_cmd.append(node.attrib["host"])
print start_cmd
returncode = subprocess.call(start_cmd)
return returncode
def stop_container(node):
stop_cmd = []
stop_cmd.append("docker")
stop_cmd.append("stop")
stop_cmd.append(node.attrib["host"])
print stop_cmd
returncode = subprocess.call(stop_cmd)
return returncode
def docker_run(nodelist):
for node in nodelist:
rm_container(node)
run_container(node)
def docker_start(nodelist):
for node in nodelist:
start_container(node)
def docker_stop(nodelist):
for node in nodelist:
stop_container(node)
if __name__ == "__main__":
start_docker()
cluster = read_xml("hacluster.xml")
clusterlist = find_nodes(cluster, "cluster")
# 1> zk cluster
zkcluster = get_node_by_keyvalue(clusterlist, {"type":"zk"})
zknodes = zkcluster[0].findall("node")
zkdockers = get_node_by_keyvalue(zknodes, {"platform":"docker"})
docker_start(zkdockers)
# 2> hdfs cluster
hdfscluster = get_node_by_keyvalue(clusterlist, {"type":"hdfs"})
hdfsnodes = hdfscluster[0].findall("node")
hdfsdockers = get_node_by_keyvalue(hdfsnodes, {"platform":"docker"})
docker_start(hdfsdockers)
# 3> yarn cluster
yarncluster = get_node_by_keyvalue(clusterlist, {"type":"yarn"})
yarnnodes = yarncluster[0].findall("node")
yarndockers = get_node_by_keyvalue(yarnnodes, {"platform":"docker"})
docker_start(yarndockers)
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。