pig安装和使用

pig安装和使用

[hadoop@node1 ~]$ tar -zxvf pig-0.15.0.tar.gz  得到pig-0.15.0文件夹
pig的函数都在这两个包里面,可以浏览下。
[hadoop@node1 pig-0.15.0]$ jar -tvf pig-0.15.0-core-h1.jar
[hadoop@node1 pig-0.15.0]$ jar -tvf pig-0.15.0-core-h2.jar


[hadoop@node1 pig-0.15.0]$ sudo vi /etc/profile
export JAVA_HOME=/home/hadoop/jdk1.7.0_67
export HADOOP_HOME=/home/hadoop/hadoop-2.7.1
export ZK_HOME=/home/hadoop/zookeeper-3.4.6
export HIVE_HOME=/home/hadoop/apache-hive-1.2.1-bin
export HBASE_HOME=/home/hadoop/hbase-1.1.2
export SQOOP_HOME=/home/hadoop/sqoop-1.4.6.bin__hadoop-2.0.4-alpha
export FLUME_HOME=/home/hadoop/apache-flume-1.6.0-bin
export PIG_HOME=/home/hadoop/pig-0.15.0
export PATH=$PATH:${JAVA_HOME}/bin:${HADOOP_HOME}/bin:${HADOOP_HOME}/sbin:${ZK_HOME}/bin:${HIVE_HOME}/bin:${HBASE_HOME}/bin:${SQOOP_HOME}/bin:${FLUME_HOME}/bin:${PIG_HOME}/bin

[hadoop@node1 pig-0.15.0]$ source  /etc/profile

--pig-env.sh(可选)

--启动zookeeper和hadoop
[hadoop@node1 bin]$ zkServer.sh start
[hadoop@node1 bin]$ start-all.sh

[hadoop@node1 conf]$ hadoop fs -ls /             --说明文件系统已经启动
drwxr-xr-x   - hadoop supergroup          0 2015-11-24 16:48 /hbase
drwxr-xr-x   - hadoop supergroup          0 2015-11-24 11:27 /sqoop
drwxr-xr-x   - hadoop supergroup          0 2015-11-25 15:55 /test
drwxrwxrwx   - hadoop supergroup          0 2015-11-11 20:44 /tmp
drwxr-xr-x   - hadoop supergroup          0 2015-11-12 21:02 /user
drwxr-xr-x   - hadoop supergroup          0 2015-11-05 14:26 /usr

[hadoop@node1 ~]$ hadoop fs -copyFromLocal /home/hadoop/emp.txt /test
[hadoop@node1 ~]$ hadoop fs -cat /test/emp.txt
1,张三,32,销售部
2,李四,31,销售部
3,王五,33,销售部
4,孙六,34,销售部


--local模式
[hadoop@node1 test]$ pig -x local
grunt> A = load 'passwd' using PigStorage(':');
grunt> B = foreach A generate $0 as id;
grunt> store B into 'id.out';
[hadoop@node1 test]$ ls id.out/
part-m-00000  _SUCCESS



--hdfs模式
[hadoop@node1 ~]$ hadoop fs -copyFromLocal /home/hadoop/test/passwd /test
[hadoop@node1 ~]$ hadoop fs -cat /test/passwd

[hadoop@node1 test]$ pig
grunt> A = load '/test/passwd' using PigStorage(':');
grunt> B = foreach A generate $0 as id;
grunt> store B into '/test/id.out';
[hadoop@node1 ~]$ hadoop fs -cat /test/id.out/p*

--执行脚本
pig -x local id.pig
pig id.pig



vi id.pig
A = load 'passwd' using PigStorage(':');
B = foreach A generate $0 as id;
store B into 'id.out';

pig -x local id.pig




--注释
For multi-line comments use /* …. */

For single-line comments use --


A = LOAD '/test/emp.txt' USING PigStorage(',') AS (id,name,age,dept);      --等号两边要有空格
dump A
B = FOREACH A GENERATE NAME;
STORE B INTO '/test/out' USING PigStorage(';');




---------------------------------------------------------------    
--pig语法测试:    
---------------------------------------------------------------   


--官方文档:
http://pig.apache.org/docs/r0.15.0/basic.html

--测试数据

cat>emp1.txt<<!
1,张三,32,销售部
2,李四,31,研发部
3,王五,33,售后部
4,孙六,34,后勤部
!

cat>emp2.txt<<!
1,tom,41,研发部
2,lily,26,人事部
3,lucy,27,人事部
!




--pig执行linux命令
[hadoop@node1 test]$ pig -x local
grunt> pwd
file:/home/hadoop/test

[hadoop@node1 test]$ pig
grunt> pwd
hdfs://odscluster/user/hadoop



--foreach条件,打印某个字段
pig -x local
a = load 'emp1.txt' using PigStorage(',') as (id,name,age,dept);
a1 = foreach a generate $0 as id1,$1 as name1;
store a1 into 'a1.out';
[hadoop@node1 test]$ cat a1.out/*
1       张三
2       李四
3       王五
4       孙六



--where条件
pig -x local
a = load 'emp1.txt' using PigStorage(',') as (id,name,age,dept);
a1 = filter a by id>1 and id<4;
store a1 into 'a1.out';

[hadoop@node1 a1.out]$ cat part-m-00000 
2       李四    31      研发部
3       王五    33      售后部


--rownum查看前3行
pig -x local
a = load 'emp1.txt' using PigStorage(',') as (id,name,age,dept);
a1 = limit a 3;
store a1 into 'a1.out';

[hadoop@node1 a1.out]$ cat *
1       张三    32      销售部
2       李四    31      研发部
3       王五    33      售后部


--group by分组
pig -x local
a = load 'emp1.txt' using PigStorage(',') as (id,name,age,dept);
a1 = group a by dept;
store a1 into 'a1.out';

[hadoop@node1 test]$ cat a1.out/*
后勤部  {(4,孙六,34,后勤部)}
售后部  {(3,王五,33,售后部)}
销售部  {(1,张三,32,销售部)}
研发部  {(2,李四,31,研发部)}



a = load 'emp2.txt' using PigStorage(',') as (id,name,age,dept);
a1 = group a by dept;
store a1 into 'a1.out';

[hadoop@node1 test]$ cat a1.out/*
人事部  {(3,lucy,27,人事部),(2,lily,26,人事部)}
研发部  {(1,tom,41,研发部)}



--order by ..desc 排序
pig -x local
a = load 'emp1.txt' using PigStorage(',') as (id,name,age,dept);
a1 = order a by age desc;
store a1 into 'a1.out';

[hadoop@node1 test]$ cat a1.out/*
4       孙六    34      后勤部
3       王五    33      售后部
1       张三    32      销售部
2       李四    31      研发部


--join两表关联,表连接
pig -x local
a = load 'emp1.txt' using PigStorage(',') as (id,name,age,dept);
b = load 'emp2.txt' using PigStorage(',') as (id,name,age,dept);
a_b = join a by id,b by id;
store a_b into 'a_b.out';

[hadoop@node1 a_b.out]$ cat part-r-00000 
1       张三    32      销售部  1       tom     41      研发部
2       李四    31      研发部  2       lily    26      人事部
3       王五    33      售后部  3       lucy    27      人事部
















评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值