使用Fabric部署Hadoop和HBase

使用Fabric部署Hadoop和HBase


Fabric是一个自动化的通过SSH在多台机器上批量执行程序的框架。利用事先编辑好的项目配置文件,可以实现项目的自动部署和维护。整个操作都在本地的当前目录进行,非常方便。

Fabric的思想可能是借鉴自Capistrano:一个为Rails设计的自动部署框架,目前已经被很多Ruby非Ruby的项目采用,包括Hypertable这样的Java项目。

这样的工具天然适合集群的管理和部署,因此我把我为公司集群写的Fabfile.py文件帖了上来。由于4节点的Hadoop集群的配置到处都是,因此就不详细介绍了。

from fabric.api import env, roles, run, cd, local, put

env.roledefs = {

    'tbbt' : ['hadoop@spock', 'hadoop@rock', 'hadoop@paper', 'hadoop@lizard'],

    'tbbt_root' : ['root@spock', 'root@rock', 'root@paper', 'root@lizard'],

    'master' : ['hadoop@spock'],

    'master_root' : ['root@spock'],

    'slaves' : ['hadoop@rock', 'hadoop@paper', 'hadoop@lizard']

}

source_dir = "."

install_dir = "hadoop_install_fabric"

hadoop_data = "/home/hadoop_data"

repository = "svn://***/hadoop_install"

hadoop_targz = "archives/hadoop-0.20.1.tar.gz"

hbase_targz = "archives/hbase-0.20.2.tar.gz"

@roles('tbbt')

def init():

    """docstring for deploy"""

    run('svn co %s %s' % (repository, install_dir))

    put(hadoop_targz, install_dir + hadoop_targz)

    put(hbase_targz, install_dir + hbase_targz)

    with cd(install_dir):

        run('tar xf -C hadoop %s' % hadoop_targz)

        run('tar xf -C hbase %s' % hbase_targz)

        run('rm -rf hadoop/conf hbase/conf')

        run('ln -sf configs/hadoop hadoop/conf')

        run('ln -sf configs/hbase hbase/conf')

   

@roles('tbbt')

def deploy():

    """docstring for commit"""

    local('svn commit -m "fabric auto commit."')

    with cd(install_dir):

        run('svn update')

@roles('master')

def start_namenode():

    """docstring for start_namenode"""

    run('%s/hadoop/bin/hadoop-daemon.sh start namenode' % install_dir )

   

@roles('master')

def stop_namenode():

    """docstring for stop_namenode"""

    run('%s/hadoop/bin/hadoop-daemon.sh stop namenode' % install_dir )

   

@roles('master')

def start_jobtracker():

    """docstring for start_jobtracker"""

    run('%s/hadoop/bin/hadoop-daemon.sh start jobtracker' % install_dir )

   

@roles('master')

def stop_jobtracker():

    """docstring for stop_jobtracker"""

    run('%s/hadoop/bin/hadoop-daemon.sh stop jobtracker' % install_dir )

   

@roles('slaves')

def start_datanodes():

    """docstring for start_datanodes"""

    run('%s/hadoop/bin/hadoop-daemon.sh start datanode' % install_dir )

   

@roles('slaves')

def stop_datanodes():

    """docstring for stop_datanodes"""

    run('%s/hadoop/bin/hadoop-daemon.sh stop datanode' % install_dir )

@roles('slaves')

def start_tasktrackers():

    """docstring for start_tasktrackers"""

    run('%s/hadoop/bin/hadoop-daemon.sh start tasktracker' % install_dir )

   

@roles('slaves')

def stop_tasktrackers():

    """docstring for stop_tasktrackers"""

    run('%s/hadoop/bin/hadoop-daemon.sh stop tasktracker' % install_dir )

@roles('master')

def start_hbasemaster():

    """docstring for start_hbasemaster"""

    run('%s/hbase/bin/hbase-daemon.sh start master' % install_dir )

   

@roles('master')

def stop_hbasemaster():

    """docstring for stop_hbasemaster"""

    run('%s/hbase/bin/hbase-daemon.sh stop master' % install_dir )

   

@roles('master')

def start_zookeeper():

    """docstring for stop"""

    run('%s/hbase/bin/hbase-daemon.sh start zookeeper' % install_dir )

   

@roles('master')

def stop_zookeeper():

    """docstring for stop_zookeeper"""

    run('%s/hbase/bin/hbase-daemon.sh stop zookeeper' % install_dir )

   

@roles('slaves')

def start_hbaseregions():

    """docstring for start_hbaseregions"""

    run('%s/hbase/bin/hbase-daemon.sh start regionserver' % install_dir )

   

@roles('slaves')

def stop_hbaseregions():

    """docstring for stop_hbaseregions"""

    run('%s/hbase/bin/hbase-daemon.sh stop regionserver' % install_dir )

   

@roles('master')

def start_hbasethrift():

    """docstring for start_hbasethrift"""

    run('%s/hbase/bin/hbase-daemon.sh start thrift' % install_dir )

   

@roles('master')

def stop_hbasethrift():

    """docstring for fname"""

    run('%s/hbase/bin/hbase-daemon.sh stop thrift' % install_dir )

@roles('master')

def start_dfs():

    """docstring for start_dfs"""

    run('%s/hadoop/bin/start_dfs.sh' % install_dir )

@roles('master')   

def stop_dfs():

    """docstring for stop_dfs"""

    run('%s/hadoop/bin/stop_dfs.sh' % install_dir )

@roles('master')   

def start_mapred():

    """docstring for start_mapred"""

    run('%s/hadoop/bin/start_mapred.sh' % install_dir )

@roles('master')   

def stop_mapred():

    """docstring for stop_mapred"""

    run('%s/hadoop/bin/stop_mapred.sh' % install_dir )

@roles('master')   

def start_hbase():

    """docstring for start_hbase"""

    run('%s/hbase/bin/start_hbase.sh' % install_dir )

@roles('master')   

def stop_hbase():

    """docstring for stop_hbase"""

    run('%s/hbase/bin/stop_hbase.sh' % install_dir )

@roles('master')

def start_hadoop():

    """docstring for start_hadoop"""

    run('%s/hadoop/bin/start_all.sh' % install_dir )

@roles('master')

def stop_hadoop():

    """docstring for stop_hbase"""

    run('%s/hadoop/bin/stop_all.sh' % install_dir )

@roles('master')

def start():

    """docstring for start"""

    run('%s/hadoop/bin/start_all.sh' % install_dir )

    run('%s/hbase/bin/start_hbase.sh' % install_dir )

@roles('tbbt')

def stop():

    """docstring for stop"""

    run('%s/hadoop/bin/stop_all.sh' % install_dir )

    run('%s/hadoop/bin/stop_hbase.sh' % install_dir )

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值