使用IDEA构建Spark Scala开发环境(支持maven)

转自:http://blog.csdn.net/liubiaoxin/article/details/50967875

1、打开Idea,新建maven工程





\

新建maven项目成功后之后,就会看到如下工程结构:


2、修改pom.xml文件

增加如下内容:

[html]  view plain  copy
  在CODE上查看代码片 派生到我的代码片
  1. <repositories>  
  2.         <repository>  
  3.            <id>Akka repository</id>  
  4.            <url>http://repo.akka.io/releases</url>  
  5.        </repository>  
  6.        <repository>  
  7.            <id>cloudera</id>  
  8.            <url>https://repository.cloudera.com/artifactory/cloudera-repos/.</url>  
  9.        </repository>  
  10.        <repository>  
  11.            <id>jboss</id>  
  12.            <url>http://repository.jboss.org/nexus/content/groups/public-jboss</url>  
  13.        </repository>  
  14.        <repository>  
  15.            <id>Sonatype snapshots</id>  
  16.            <url>http://oss.sonatype.org/content/repositories/snapshots/</url>  
  17.        </repository>  
  18.    </repositories>  
  19.   
  20.    <build>  
  21.        <sourceDirectory>src/</sourceDirectory>  
  22.        <testSourceDirectory>src/</testSourceDirectory>  
  23.   
  24.        <plugins>  
  25.            <plugin>  
  26.                <groupId>org.scala-tools</groupId>  
  27.                <artifactId>maven-scala-plugin</artifactId>  
  28.                <executions>  
  29.                    <execution>  
  30.                        <goals>  
  31.                            <goal>compile</goal>  
  32.                            <goal>testCompile</goal>  
  33.                        </goals>  
  34.                    </execution>  
  35.                </executions>  
  36.                <configuration>  
  37.                    <scalaVersion>2.10.3</scalaVersion>  
  38.                </configuration>  
  39.            </plugin>  
  40.   
  41.            <plugin>  
  42.                <groupId>org.apache.maven.plugins</groupId>  
  43.                <artifactId>maven-shade-plugin</artifactId>  
  44.                <version>2.2</version>  
  45.                <executions>  
  46.                    <execution>  
  47.                        <phase>package</phase>  
  48.                        <goals>  
  49.                            <goal>shade</goal>  
  50.                        </goals>  
  51.                        <configuration>  
  52.                            <filters>  
  53.                                <filter>  
  54.                                    <artifact>*:*</artifact>  
  55.                                    <excludes>  
  56.                                        <exclude>META-INF/*.SF</exclude>  
  57.                                        <exclude>META-INF/*.DSA</exclude>  
  58.                                        <exclude>META-INF/*.RSA</exclude>  
  59.                                    </excludes>  
  60.                                </filter>  
  61.                            </filters>  
  62.                            <transformers>  
  63.   
  64.                                <transformer  
  65.                                        implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">  
  66.                                    <resource>reference.conf</resource>  
  67.                                </transformer>  
  68.   
  69.                                <transformer  
  70.                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">  
  71.                               </transformer>  
  72.   
  73.                            </transformers>  
  74.                        </configuration>  
  75.                    </execution>  
  76.                </executions>  
  77.            </plugin>  
  78.        </plugins>  
  79.    </build>  
  80.   
  81.    <dependencies>  
  82.        <!--spark-->  
  83.        <dependency>  
  84.            <groupId>org.apache.spark</groupId>  
  85.            <artifactId>spark-core_2.10</artifactId>  
  86.            <version>1.2.0-cdh5.3.2</version>  
  87.        </dependency>  
  88.        <dependency>  
  89.            <groupId>org.apache.hadoop</groupId>  
  90.            <artifactId>hadoop-client</artifactId>  
  91.            <version>2.5.0-cdh5.3.0</version>  
  92.        </dependency>  
  93.        <dependency>  
  94.            <groupId>org.apache.spark</groupId>  
  95.            <artifactId>spark-tools_2.10</artifactId>  
  96.            <version>1.1.0-cdh5.2.0-SNAPSHOT</version>  
  97.        </dependency>  
  98.        <dependency>  
  99.            <groupId>org.apache.spark</groupId>  
  100.            <artifactId>spark-assembly_2.10</artifactId>  
  101.            <version>1.2.0-cdh5.3.3</version>  
  102.        </dependency>  
  103.        <dependency>  
  104.            <groupId>org.apache.spark</groupId>  
  105.            <artifactId>spark-repl_2.10</artifactId>  
  106.            <version>1.2.0-cdh5.3.3</version>  
  107.        </dependency>  
  108.        <dependency>  
  109.            <groupId>org.apache.spark</groupId>  
  110.            <artifactId>spark-catalyst_2.10</artifactId>  
  111.            <version>1.2.0-cdh5.3.2</version>  
  112.        </dependency>  
  113.        <dependency>  
  114.            <groupId>org.apache.spark</groupId>  
  115.            <artifactId>spark-network-common_2.10</artifactId>  
  116.            <version>1.2.0-cdh5.3.2</version>  
  117.        </dependency>  
  118.   
  119.        <!--spark on yarn-->  
  120.        <dependency>  
  121.            <groupId>org.apache.spark</groupId>  
  122.            <artifactId>spark-yarn_2.10</artifactId>  
  123.            <version>1.2.0-cdh5.3.3</version>  
  124.        </dependency>  
  125.        <dependency>  
  126.            <groupId>org.apache.spark</groupId>  
  127.            <artifactId>spark-network-yarn_2.10</artifactId>  
  128.            <version>1.2.0-cdh5.3.2</version>  
  129.        </dependency>  
  130.   
  131.        <!--spark-sql-->  
  132.        <dependency>  
  133.            <groupId>org.apache.spark</groupId>  
  134.            <artifactId>spark-sql_2.10</artifactId>  
  135.            <version>1.2.0-cdh5.3.2</version>  
  136.        </dependency>  
  137.        <dependency>  
  138.            <groupId>org.apache.spark</groupId>  
  139.            <artifactId>spark-hive_2.10</artifactId>  
  140.            <version>1.2.0-cdh5.3.2</version>  
  141.        </dependency>  
  142.        <dependency>  
  143.            <groupId>org.apache.spark</groupId>  
  144.            <artifactId>spark-hive-thriftserver_2.10</artifactId>  
  145.            <version>1.2.0-cdh5.3.3</version>  
  146.        </dependency>  
  147.   
  148.        <!--spark-streaming-->  
  149.        <dependency>  
  150.            <groupId>org.apache.spark</groupId>  
  151.            <artifactId>spark-streaming_2.10</artifactId>  
  152.            <version>1.2.0-cdh5.3.2</version>  
  153.        </dependency>  
  154.        <dependency>  
  155.            <groupId>org.apache.spark</groupId>  
  156.            <artifactId>spark-streaming-flume_2.10</artifactId>  
  157.            <version>1.2.0-cdh5.3.2</version>  
  158.        </dependency>  
  159.        <dependency>  
  160.            <groupId>org.apache.spark</groupId>  
  161.            <artifactId>spark-streaming-flume-sink_2.10</artifactId>  
  162.            <version>1.2.0-cdh5.3.2</version>  
  163.        </dependency>  
  164.        <dependency>  
  165.            <groupId>org.apache.spark</groupId>  
  166.            <artifactId>spark-streaming-kafka_2.10</artifactId>  
  167.            <version>1.2.0-cdh5.3.2</version>  
  168.        </dependency>  
  169.        <dependency>  
  170.            <groupId>org.apache.spark</groupId>  
  171.            <artifactId>spark-streaming-kafka-assembly_2.10</artifactId>  
  172.            <version>1.3.0-cdh5.4.0</version>  
  173.        </dependency>  
  174.     <dependency>  
  175.            <groupId>org.tachyonproject</groupId>  
  176.            <artifactId>tachyon</artifactId>  
  177.            <version>0.5.0</version>  
  178.        </dependency>  
  179.    </dependencies>  


3、构建maven依赖包,Reimport和Generate Sources and Update folders



依赖包下载完成之后,就可以看到如下界面:


4、添加scala插件,并添加Scala mode,并添加maven依赖包








5、测试环境是否搭建成功

写spark的代码,能正确连到源码,且界面不报错,说明spark环境搭建成功
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值