【Hadoop】Spark可执行的代码样例

原创文章,转载请标注来自http://blog.csdn.net/lsttoy/article/details/53335920
这里的demo改自spark的原始example。
本人亲测可以执行。
以下为打包代码

package lekko.spark;

import java.io.File;
import java.util.Arrays;
import java.util.Iterator;
import java.util.regex.Pattern;

import org.apache.commons.io.FileUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;

import scala.Tuple2;

public class SparkDemo {
    private static JavaSparkContext javaSparkContext = null;

    private static final Pattern SPACE = Pattern.compile(" ");

    public static void init(String appName, String master) {
        SparkConf conf = new SparkConf().setAppName(appName).setMaster(master);
        javaSparkContext = new JavaSparkContext(conf);
    }

    @SuppressWarnings("serial")
    private static void wordCount(String filePath, String fileDir) {
        FileUtils.deleteQuietly(new File(fileDir));

        JavaRDD<String> file = javaSparkContext.textFile(filePath);

        JavaRDD<String> words = file.flatMap(new FlatMapFunction<String, String>() {

            @Override
            public Iterator<String> call(String s) {
                return Arrays.asList(SPACE.split(s)).iterator();
            }
        });

        JavaPairRDD<String, Integer> pairs = words.mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String s) {
                return new Tuple2<String, Integer>(s, 1);
            }
        });

        JavaPairRDD<String, Integer> counts = pairs.reduceByKey(new Function2<Integer, Integer, Integer>() {
            @Override
            public Integer call(Integer a, Integer b) {
                return a + b;
            }
        });

        counts.saveAsTextFile(fileDir);
    }

    @SuppressWarnings("serial")
    private static void errorCount(String filePath) {
        JavaRDD<String> file = javaSparkContext.textFile(filePath);

        JavaRDD<String> errors = file.filter(new Function<String, Boolean>() {
            @Override
            public Boolean call(String s) {
                return s.contains("ERROR");
            }
        });

        errors.count();

        errors.filter(new Function<String, Boolean>() {
            @Override
            public Boolean call(String s) {
                return s.contains("ORACLE");
            }
        }).count();

        errors.filter(new Function<String, Boolean>() {
            @Override
            public Boolean call(String s) {
                return s.contains("ORACLE");
            }
        }).collect();
    }

    public static void main(String[] args) {
        if (args.length < 2) {
            System.err.println("Please provide the input file full path and output dir as argument");
            System.exit(0);
        }

        SparkDemo.init("lekko.spark", "local");

        SparkDemo.wordCount(args[0], args[1]);

        SparkDemo.errorCount(args[0]);
    }
}  

打包完成后输入以下代码进行执行

./bin/spark-submit --class lekko.spark.SparkDemo --master local[2] mylib/spark-demo-1.0.jar /home/hadoop/input/lekkoTest.txt /home/hadoop/output

执行完成的log输出为

Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
16/11/25 15:50:52 INFO SparkContext: Running Spark version 2.0.2
16/11/25 15:50:53 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
16/11/25 15:50:53 INFO SecurityManager: Changing view acls to: root
16/11/25 15:50:53 INFO SecurityManager: Changing modify acls to: root
16/11/25 15:50:53 INFO SecurityManager: Changing view acls groups to: 
16/11/25 15:50:53 INFO SecurityManager: Changing modify acls groups to: 
16/11/25 15:50:53 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(root); groups with view permissions: Set(); users  with modify permissions: Set(root); groups with modify permissions: Set()
16/11/25 15:50:54 INFO Utils: Successfully started service 'sparkDriver' on port 31795.
16/11/25 15:50:54 INFO SparkEnv: Registering MapOutputTracker
16/11/25 15:50:54 INFO SparkEnv: Registering BlockManagerMaster
16/11/25 15:50:54 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-e8b5188c-8e26-4f4b-af78-d43e5665100b
16/11/25 15:50:54 INFO MemoryStore: MemoryStore started with capacity 413.9 MB
16/11/25 15:50:55 INFO SparkEnv: Registering OutputCommitCoordinator
16/11/25 15:50:55 INFO Utils: Successfully started service 'SparkUI' on port 4040.
16/11/25 15:50:55 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://114.215.182.147:4040
16/11/25 15:50:55 INFO SparkContext: Added JAR file:/usr/local/spark-2.0.2-bin-hadoop2.7/mylib/spark-demo-1.0.jar at spark://114.215.182.147:31795/jars/spark-demo-1.0.jar with timestamp 1480060255740
16/11/25 15:50:55 INFO Executor: Starting executor ID driver on host localhost
16/11/25 15:50:56 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 52378.
16/11/25 15:50:56 INFO NettyBlockTransferService: Server created on 114.215.182.147:52378
16/11/25 15:50:56 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 114.215.182.147, 52378)
16/11/25 15:50:56 INFO BlockManagerMasterEndpoint: Registering block manager 114.215.182.147:52378 with 413.9 MB RAM, BlockManagerId(driver, 114.215.182.147, 52378)
16/11/25 15:50:56 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 114.215.182.147, 52378)
16/11/25 15:50:59 INFO MemoryStore: Block broadcast_0 stored as values in memory (estimated size 236.5 KB, free 413.7 MB)
16/11/25 15:50:59 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 22.9 KB, free 413.7 MB)
16/11/25 15:50:59 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 114.215.182.147:52378 (size: 22.9 KB, free: 413.9 MB)
16/11/25 15:50:59 INFO SparkContext: Created broadcast 0 from textFile at SparkDemo.java:34
16/11/25 15:51:00 INFO FileInputFormat: Total input paths to process : 1
16/11/25 15:51:01 INFO deprecation: mapred.tip.id is deprecated. Instead, use mapreduce.task.id
16/11/25 15:51:01 INFO deprecation: mapred.task.id is deprecated. Instead, use mapreduce.task.attempt.id
16/11/25 15:51:01 INFO deprecation: mapred.task.is.map is deprecated. Instead, use mapreduce.task.ismap
16/11/25 15:51:01 INFO deprecation: mapred.task.partition is deprecated. Instead, use mapreduce.task.partition
16/11/25 15:51:01 INFO deprecation: mapred.job.id is deprecated. Instead, use mapreduce.job.id
16/11/25 15:51:01 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
16/11/25 15:51:02 INFO SparkContext: Starting job: saveAsTextFile at SparkDemo.java:58
16/11/25 15:51:02 INFO DAGScheduler: Registering RDD 3 (mapToPair at SparkDemo.java:44)
16/11/25 15:51:02 INFO DAGScheduler: Got job 0 (saveAsTextFile at SparkDemo.java:58) with 1 output partitions
16/11/25 15:51:02 INFO DAGScheduler: Final stage: ResultStage 1 (saveAsTextFile at SparkDemo.java:58)
16/11/25 15:51:02 INFO DAGScheduler: Parents of final stage: List(ShuffleMapStage 0)
16/11/25 15:51:02 INFO DAGScheduler: Missing parents: List(ShuffleMapStage 0)
16/11/25 15:51:02 INFO DAGScheduler: Submitting ShuffleMapStage 0 (MapPartitionsRDD[3] at mapToPair at SparkDemo.java:44), which has no missing parents
16/11/25 15:51:02 INFO MemoryStore: Block broadcast_1 stored as values in memory (estimated size 4.8 KB, free 413.7 MB)
16/11/25 15:51:02 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 2.7 KB, free 413.7 MB)
16/11/25 15:51:02 INFO BlockManagerInfo: Added broadcast_1_piece0 in memory on 114.215.182.147:52378 (size: 2.7 KB, free: 413.9 MB)
16/11/25 15:51:02 INFO SparkContext: Created broadcast 1 from broadcast at DAGScheduler.scala:1012
16/11/25 15:51:02 INFO DAGScheduler: Submitting 1 missing tasks from ShuffleMapStage 0 (MapPartitionsRDD[3] at mapToPair at SparkDemo.java:44)
16/11/25 15:51:02 INFO TaskSchedulerImpl: Adding task set 0.0 with 1 tasks
16/11/25 15:51:03 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, partition 0, PROCESS_LOCAL, 5463 bytes)
16/11/25 15:51:03 INFO Executor: Running task 0.0 in stage 0.0 (TID 0)
16/11/25 15:51:03 INFO Executor: Fetching spark://114.215.182.147:31795/jars/spark-demo-1.0.jar with timestamp 1480060255740
16/11/25 15:51:03 INFO TransportClientFactory: Successfully created connection to /114.215.182.147:31795 after 226 ms (0 ms spent in bootstraps)
16/11/25 15:51:03 INFO Utils: Fetching spark://114.215.182.147:31795/jars/spark-demo-1.0.jar to /tmp/spark-eeca4050-8677-47ba-af8e-624991f62eb8/userFiles-202418bc-6c29-4698-86d2-22e1e3da11f2/fetchFileTemp5278779464615861076.tmp
16/11/25 15:51:04 INFO Executor: Adding file:/tmp/spark-eeca4050-8677-47ba-af8e-624991f62eb8/userFiles-202418bc-6c29-4698-86d2-22e1e3da11f2/spark-demo-1.0.jar to class loader
16/11/25 15:51:04 INFO HadoopRDD: Input split: file:/home/hadoop/input/lekkoTest.txt:0+125
16/11/25 15:51:05 INFO Executor: Finished task 0.0 in stage 0.0 (TID 0). 1650 bytes result sent to driver
16/11/25 15:51:05 INFO TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 2615 ms on localhost (1/1)
16/11/25 15:51:05 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool 
16/11/25 15:51:05 INFO DAGScheduler: ShuffleMapStage 0 (mapToPair at SparkDemo.java:44) finished in 2.758 s
16/11/25 15:51:05 INFO DAGScheduler: looking for newly runnable stages
16/11/25 15:51:05 INFO DAGScheduler: running: Set()
16/11/25 15:51:05 INFO DAGScheduler: waiting: Set(ResultStage 1)
16/11/25 15:51:05 INFO DAGScheduler: failed: Set()
16/11/25 15:51:05 INFO DAGScheduler: Submitting ResultStage 1 (MapPartitionsRDD[5] at saveAsTextFile at SparkDemo.java:58), which has no missing parents
16/11/25 15:51:05 INFO MemoryStore: Block broadcast_2 stored as values in memory (estimated size 72.8 KB, free 413.6 MB)
16/11/25 15:51:05 INFO MemoryStore: Block broadcast_2_piece0 stored as bytes in memory (estimated size 26.3 KB, free 413.6 MB)
16/11/25 15:51:05 INFO BlockManagerInfo: Added broadcast_2_piece0 in memory on 114.215.182.147:52378 (size: 26.3 KB, free: 413.9 MB)
16/11/25 15:51:05 INFO SparkContext: Created broadcast 2 from broadcast at DAGScheduler.scala:1012
16/11/25 15:51:05 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 1 (MapPartitionsRDD[5] at saveAsTextFile at SparkDemo.java:58)
16/11/25 15:51:05 INFO TaskSchedulerImpl: Adding task set 1.0 with 1 tasks
16/11/25 15:51:05 INFO TaskSetManager: Starting task 0.0 in stage 1.0 (TID 1, localhost, partition 0, ANY, 5239 bytes)
16/11/25 15:51:05 INFO Executor: Running task 0.0 in stage 1.0 (TID 1)
16/11/25 15:51:06 INFO ShuffleBlockFetcherIterator: Getting 1 non-empty blocks out of 1 blocks
16/11/25 15:51:06 INFO ShuffleBlockFetcherIterator: Started 0 remote fetches in 59 ms
16/11/25 15:51:06 INFO FileOutputCommitter: File Output Committer Algorithm version is 1
16/11/25 15:51:06 INFO FileOutputCommitter: Saved output of task 'attempt_201611251551_0001_m_000000_1' to file:/home/hadoop/output/_temporary/0/task_201611251551_0001_m_000000
16/11/25 15:51:06 INFO SparkHadoopMapRedUtil: attempt_201611251551_0001_m_000000_1: Committed
16/11/25 15:51:06 INFO Executor: Finished task 0.0 in stage 1.0 (TID 1). 1808 bytes result sent to driver
16/11/25 15:51:06 INFO DAGScheduler: ResultStage 1 (saveAsTextFile at SparkDemo.java:58) finished in 1.004 s
16/11/25 15:51:06 INFO TaskSetManager: Finished task 0.0 in stage 1.0 (TID 1) in 1005 ms on localhost (1/1)
16/11/25 15:51:06 INFO TaskSchedulerImpl: Removed TaskSet 1.0, whose tasks have all completed, from pool 
16/11/25 15:51:07 INFO DAGScheduler: Job 0 finished: saveAsTextFile at SparkDemo.java:58, took 4.878449 s
16/11/25 15:51:07 INFO MemoryStore: Block broadcast_3 stored as values in memory (estimated size 236.5 KB, free 413.3 MB)
16/11/25 15:51:07 INFO MemoryStore: Block broadcast_3_piece0 stored as bytes in memory (estimated size 22.9 KB, free 413.3 MB)
16/11/25 15:51:07 INFO BlockManagerInfo: Added broadcast_3_piece0 in memory on 114.215.182.147:52378 (size: 22.9 KB, free: 413.9 MB)
16/11/25 15:51:07 INFO SparkContext: Created broadcast 3 from textFile at SparkDemo.java:63
16/11/25 15:51:07 INFO BlockManagerInfo: Removed broadcast_2_piece0 on 114.215.182.147:52378 in memory (size: 26.3 KB, free: 413.9 MB)
16/11/25 15:51:07 INFO FileInputFormat: Total input paths to process : 1
16/11/25 15:51:07 INFO SparkContext: Starting job: count at SparkDemo.java:72
16/11/25 15:51:07 INFO DAGScheduler: Got job 1 (count at SparkDemo.java:72) with 1 output partitions
16/11/25 15:51:07 INFO DAGScheduler: Final stage: ResultStage 2 (count at SparkDemo.java:72)
16/11/25 15:51:07 INFO DAGScheduler: Parents of final stage: List()
16/11/25 15:51:07 INFO DAGScheduler: Missing parents: List()
16/11/25 15:51:07 INFO DAGScheduler: Submitting ResultStage 2 (MapPartitionsRDD[8] at filter at SparkDemo.java:65), which has no missing parents
16/11/25 15:51:07 INFO MemoryStore: Block broadcast_4 stored as values in memory (estimated size 3.2 KB, free 413.4 MB)
16/11/25 15:51:07 INFO MemoryStore: Block broadcast_4_piece0 stored as bytes in memory (estimated size 1946.0 B, free 413.4 MB)
16/11/25 15:51:07 INFO BlockManagerInfo: Added broadcast_4_piece0 in memory on 114.215.182.147:52378 (size: 1946.0 B, free: 413.9 MB)
16/11/25 15:51:07 INFO SparkContext: Created broadcast 4 from broadcast at DAGScheduler.scala:1012
16/11/25 15:51:07 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 2 (MapPartitionsRDD[8] at filter at SparkDemo.java:65)
16/11/25 15:51:07 INFO TaskSchedulerImpl: Adding task set 2.0 with 1 tasks
16/11/25 15:51:07 INFO TaskSetManager: Starting task 0.0 in stage 2.0 (TID 2, localhost, partition 0, PROCESS_LOCAL, 5383 bytes)
16/11/25 15:51:07 INFO Executor: Running task 0.0 in stage 2.0 (TID 2)
16/11/25 15:51:07 INFO HadoopRDD: Input split: file:/home/hadoop/input/lekkoTest.txt:0+125
16/11/25 15:51:07 INFO Executor: Finished task 0.0 in stage 2.0 (TID 2). 1041 bytes result sent to driver
16/11/25 15:51:07 INFO DAGScheduler: ResultStage 2 (count at SparkDemo.java:72) finished in 0.077 s
16/11/25 15:51:07 INFO DAGScheduler: Job 1 finished: count at SparkDemo.java:72, took 0.172454 s
16/11/25 15:51:07 INFO TaskSetManager: Finished task 0.0 in stage 2.0 (TID 2) in 89 ms on localhost (1/1)
16/11/25 15:51:07 INFO TaskSchedulerImpl: Removed TaskSet 2.0, whose tasks have all completed, from pool 
16/11/25 15:51:07 INFO SparkContext: Starting job: count at SparkDemo.java:79
16/11/25 15:51:07 INFO DAGScheduler: Got job 2 (count at SparkDemo.java:79) with 1 output partitions
16/11/25 15:51:07 INFO DAGScheduler: Final stage: ResultStage 3 (count at SparkDemo.java:79)
16/11/25 15:51:07 INFO DAGScheduler: Parents of final stage: List()
16/11/25 15:51:07 INFO DAGScheduler: Missing parents: List()
16/11/25 15:51:07 INFO DAGScheduler: Submitting ResultStage 3 (MapPartitionsRDD[9] at filter at SparkDemo.java:74), which has no missing parents
16/11/25 15:51:07 INFO MemoryStore: Block broadcast_5 stored as values in memory (estimated size 3.3 KB, free 413.4 MB)
16/11/25 15:51:07 INFO BlockManagerInfo: Removed broadcast_4_piece0 on 114.215.182.147:52378 in memory (size: 1946.0 B, free: 413.9 MB)
16/11/25 15:51:07 INFO MemoryStore: Block broadcast_5_piece0 stored as bytes in memory (estimated size 1993.0 B, free 413.4 MB)
16/11/25 15:51:07 INFO BlockManagerInfo: Added broadcast_5_piece0 in memory on 114.215.182.147:52378 (size: 1993.0 B, free: 413.9 MB)
16/11/25 15:51:07 INFO SparkContext: Created broadcast 5 from broadcast at DAGScheduler.scala:1012
16/11/25 15:51:07 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 3 (MapPartitionsRDD[9] at filter at SparkDemo.java:74)
16/11/25 15:51:07 INFO TaskSchedulerImpl: Adding task set 3.0 with 1 tasks
16/11/25 15:51:07 INFO TaskSetManager: Starting task 0.0 in stage 3.0 (TID 3, localhost, partition 0, PROCESS_LOCAL, 5383 bytes)
16/11/25 15:51:07 INFO Executor: Running task 0.0 in stage 3.0 (TID 3)
16/11/25 15:51:07 INFO HadoopRDD: Input split: file:/home/hadoop/input/lekkoTest.txt:0+125
16/11/25 15:51:07 INFO Executor: Finished task 0.0 in stage 3.0 (TID 3). 1041 bytes result sent to driver
16/11/25 15:51:07 INFO DAGScheduler: ResultStage 3 (count at SparkDemo.java:79) finished in 0.058 s
16/11/25 15:51:07 INFO DAGScheduler: Job 2 finished: count at SparkDemo.java:79, took 0.168110 s
16/11/25 15:51:07 INFO TaskSetManager: Finished task 0.0 in stage 3.0 (TID 3) in 72 ms on localhost (1/1)
16/11/25 15:51:08 INFO TaskSchedulerImpl: Removed TaskSet 3.0, whose tasks have all completed, from pool 
16/11/25 15:51:08 INFO SparkContext: Starting job: collect at SparkDemo.java:86
16/11/25 15:51:08 INFO DAGScheduler: Got job 3 (collect at SparkDemo.java:86) with 1 output partitions
16/11/25 15:51:08 INFO DAGScheduler: Final stage: ResultStage 4 (collect at SparkDemo.java:86)
16/11/25 15:51:08 INFO DAGScheduler: Parents of final stage: List()
16/11/25 15:51:08 INFO DAGScheduler: Missing parents: List()
16/11/25 15:51:08 INFO DAGScheduler: Submitting ResultStage 4 (MapPartitionsRDD[10] at filter at SparkDemo.java:81), which has no missing parents
16/11/25 15:51:08 INFO MemoryStore: Block broadcast_6 stored as values in memory (estimated size 3.4 KB, free 413.4 MB)
16/11/25 15:51:08 INFO MemoryStore: Block broadcast_6_piece0 stored as bytes in memory (estimated size 2031.0 B, free 413.4 MB)
16/11/25 15:51:08 INFO BlockManagerInfo: Added broadcast_6_piece0 in memory on 114.215.182.147:52378 (size: 2031.0 B, free: 413.9 MB)
16/11/25 15:51:08 INFO SparkContext: Created broadcast 6 from broadcast at DAGScheduler.scala:1012
16/11/25 15:51:08 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 4 (MapPartitionsRDD[10] at filter at SparkDemo.java:81)
16/11/25 15:51:08 INFO TaskSchedulerImpl: Adding task set 4.0 with 1 tasks
16/11/25 15:51:08 INFO TaskSetManager: Starting task 0.0 in stage 4.0 (TID 4, localhost, partition 0, PROCESS_LOCAL, 5468 bytes)
16/11/25 15:51:08 INFO Executor: Running task 0.0 in stage 4.0 (TID 4)
16/11/25 15:51:08 INFO HadoopRDD: Input split: file:/home/hadoop/input/lekkoTest.txt:0+125
16/11/25 15:51:08 INFO Executor: Finished task 0.0 in stage 4.0 (TID 4). 954 bytes result sent to driver
16/11/25 15:51:08 INFO TaskSetManager: Finished task 0.0 in stage 4.0 (TID 4) in 69 ms on localhost (1/1)
16/11/25 15:51:08 INFO TaskSchedulerImpl: Removed TaskSet 4.0, whose tasks have all completed, from pool 
16/11/25 15:51:08 INFO DAGScheduler: ResultStage 4 (collect at SparkDemo.java:86) finished in 0.055 s
16/11/25 15:51:08 INFO DAGScheduler: Job 3 finished: collect at SparkDemo.java:86, took 0.155183 s
16/11/25 15:51:08 INFO SparkContext: Invoking stop() from shutdown hook
16/11/25 15:51:08 INFO SparkUI: Stopped Spark web UI at http://114.215.182.147:4040
16/11/25 15:51:08 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
16/11/25 15:51:08 INFO MemoryStore: MemoryStore cleared
16/11/25 15:51:08 INFO BlockManager: BlockManager stopped
16/11/25 15:51:08 INFO BlockManagerMaster: BlockManagerMaster stopped
16/11/25 15:51:08 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
16/11/25 15:51:08 INFO SparkContext: Successfully stopped SparkContext
16/11/25 15:51:08 INFO ShutdownHookManager: Shutdown hook called
16/11/25 15:51:08 INFO ShutdownHookManager: Deleting directory /tmp/spark-eeca4050-8677-47ba-af8e-624991f62eb8
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

乐扣老师lekkoliu

你的鼓励是我最大的科研动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值