Spark 之 org.apache.spark.network.util.JavaUtils

spark 递归删除目录的方法,会尝试两种做法

若第一种deleteRecursivelyUsingUnixNative不成功,会立即尝试第二种

// org.apache.spark.network.util.JavaUtils.java
/**
   * Delete a file or directory and its contents recursively.
   * Don't follow directories if they are symlinks.
   *
   * @param file Input file / dir to be deleted
   * @throws IOException if deletion is unsuccessful
   */
  public static void deleteRecursively(File file) throws IOException {
    if (file == null) { return; }

    // On Unix systems, use operating system command to run faster
    // If that does not work out, fallback to the Java IO way
    if (SystemUtils.IS_OS_UNIX) {
      try {
        deleteRecursivelyUsingUnixNative(file);
        return;
      } catch (IOException e) {
        logger.warn("Attempt to delete using native Unix OS command failed for path = {}. " +
                        "Falling back to Java IO way", file.getAbsolutePath(), e);
      }
    }

    deleteRecursivelyUsingJavaIO(file);
  }

deleteRecursivelyUsingUnixNative

使用linux 命令删除

// org.apache.spark.network.util.JavaUtils.java
  private static void deleteRecursivelyUsingUnixNative(File file) throws IOException {
    ProcessBuilder builder = new ProcessBuilder("rm", "-rf", file.getAbsolutePath());
    Process process = null;
    int exitCode = -1;

    try {
      // In order to avoid deadlocks, consume the stdout (and stderr) of the process
      builder.redirectErrorStream(true);
      builder.redirectOutput(new File("/dev/null"));

      process = builder.start();

      exitCode = process.waitFor();
    } catch (Exception e) {
      throw new IOException("Failed to delete: " + file.getAbsolutePath(), e);
    } finally {
      if (process != null) {
        process.destroy();
      }
    }

    if (exitCode != 0 || file.exists()) {
      throw new IOException("Failed to delete: " + file.getAbsolutePath());
    }
  }
deleteRecursivelyUsingJavaIO
  private static void deleteRecursivelyUsingJavaIO(File file) throws IOException {
    if (file.isDirectory() && !isSymlink(file)) {
      IOException savedIOException = null;
      for (File child : listFilesSafely(file)) {
        try {
          deleteRecursively(child);
        } catch (IOException e) {
          // In case of multiple exceptions, only last one will be thrown
          savedIOException = e;
        }
      }
      if (savedIOException != null) {
        throw savedIOException;
      }
    }

    boolean deleted = file.delete();
    // Delete can also fail if the file simply did not exist.
    if (!deleted && file.exists()) {
      throw new IOException("Failed to delete: " + file.getAbsolutePath());
    }
  }
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
Exception in thread "main" java.lang.RuntimeException: java.lang.NoSuchFieldException: DEFAULT_TINY_CACHE_SIZE at org.apache.spark.network.util.NettyUtils.getPrivateStaticField(NettyUtils.java:131) at org.apache.spark.network.util.NettyUtils.createPooledByteBufAllocator(NettyUtils.java:118) at org.apache.spark.network.server.TransportServer.init(TransportServer.java:95) at org.apache.spark.network.server.TransportServer.<init>(TransportServer.java:74) at org.apache.spark.network.TransportContext.createServer(TransportContext.java:114) at org.apache.spark.rpc.netty.NettyRpcEnv.startServer(NettyRpcEnv.scala:118) at org.apache.spark.rpc.netty.NettyRpcEnvFactory$$anonfun$4.apply(NettyRpcEnv.scala:454) at org.apache.spark.rpc.netty.NettyRpcEnvFactory$$anonfun$4.apply(NettyRpcEnv.scala:453) at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:2237) at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160) at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:2229) at org.apache.spark.rpc.netty.NettyRpcEnvFactory.create(NettyRpcEnv.scala:458) at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:56) at org.apache.spark.SparkEnv$.create(SparkEnv.scala:246) at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:175) at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:257) at org.apache.spark.SparkContext.<init>(SparkContext.scala:432) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2509) at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:909) at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:901) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:901) at com.cssl.scala720.KafkaSparkStreamingHBase$.main(KafkaSparkStreamingHBase.scala:28) at com.cssl.scala720.KafkaSparkStreamingHBase.main(KafkaSparkStreamingHBase.scala) Caused by: java.lang.NoSuchFieldException: DEFAULT_TINY_CACHE_SIZE at java.lang.Class.getDeclaredField(Class.java:2070) at org.apache.spark.network.util.NettyUtils.getPrivateStaticField(NettyUtils.java:127) ... 23 more Process finished with exit code 1
最新发布
07-24

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值