history

   11  touch spark-client-deployment.yaml
   12  ls
   13  chmod 777 *
   14  ls
   15  kubectl apply -f spark-history.yaml 
   16  kubectl delete -f spark-history.yaml 
   17  kubectl apply -f spark-history.yaml 
   18  kubectl get pods -A
   19  ls
   20  kubectl apply -f spark-client.yaml 
   21  pwd
   22  cd spark-3.4.0-bin-hadoop3
   23  ls
   24  pwd
   25  export SPARK_HOME=/home/demo/spark/spark-3.4.0-bin-hadoop3
   26  ls
   27  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     --conf spark.driver.host=spark-client-service      --conf spark.driver.port=7321     file:///export SPARK_HOME=/home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   28  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     --conf spark.driver.host=spark-client-service      --conf spark.driver.port=7321     file:///home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   29  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     --conf spark.driver.host=spark-client.spark-client-service.default.svc.cluster.local     --conf spark.driver.port=7321     file:///home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   30  kubectl cp /home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar hadoop/hadoop-hadoop-hdfs-dn-0:/tmp/ -n default
   31  kubectl get pods -A
   32  kubectl cp /home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar hadoop/hadoop-hadoop-hdfs-dn-0:/tmp/ -n default
   33  kubectl cp /home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar default/hadoop-hadoop-hdfs-dn-0:/tmp/ -n default
   34  kubectl exec -it hadoop-hadoop-hdfs-dn-0 -n default -- bash
   35  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode cluster     --name spark-pi-hdfs     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.eventLog.enabled=true     --conf spark.eventLog.dir=hdfs://hadoop-hadoop-hdfs-nn:9000/sparkhistory     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     hdfs://hadoop-hadoop-hdfs-nn:9000/sparkhistory/spark-examples_2.12-3.4.0.jar
   36  cd home/demo/spark/spark-3.4.0-bin-hadoop3
   37  cd /home/demo/spark/spark-3.4.0-bin-hadoop3
   38  ls
   39  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     --conf spark.driver.host=spark-client.spark-client-service.default.svc.cluster.local     --conf spark.driver.port=7321     file:///home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   40  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     --conf spark.driver.host=spark-client-service     --conf spark.driver.port=7321     file:///home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   41  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     --conf spark.driver.host=10.10.10.99     --conf spark.driver.port=7321     file:///home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   42  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     file:///home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   43  ./bin/spark-submit     --master k8s://https://10.10.10.99:6443     --deploy-mode client     --name spark-pi-client     --class org.apache.spark.examples.SparkPi     --conf spark.executor.instances=1     --conf spark.kubernetes.namespace=default     --conf spark.kubernetes.authenticate.driver.serviceAccountName=spark     --conf spark.kubernetes.container.image=zhxl1989/spark:3.4.0-hadoop3     --conf spark.driver.host=10.10.10.99     --conf spark.driver.port=7321     file:///home/demo/spark/spark-3.4.0-bin-hadoop3/examples/jars/spark-examples_2.12-3.4.0.jar
   44  kubectl describe pod mysql-7b8dc5c4ff-cbfkt
   45  kubectl get pods -A
   46  kubectl describe pod postgres-55c85b5466-cmhcp
   47  l
   48  cd /demo
   49  cd /home
   50  ls
   51  cd demo
   52  ls
   53  cd pv
   54  ls
   55  mkdir -p postgres
   56  ls
   57  chmod 777 *
   58  ls
   59  cd ..
   60  ls
   61  kubectl delete -f mysql.yaml 
   62  kubectl apply -f mysql.yaml 
   63  kubectl get pods -A
   64  kubectl describe pod mysql-6b86c4d4c7-2d4r8
   65  mkdir -p /home/demo/pv/mysql
   66  chmod 777 /home/demo/pv/mysql
   67  kubectl describe pod mysql-6b86c4d4c7-2d4r8
   68  kubectl describe pod mysql-6b86c4d4c7-6bllh
   69  ls
   70  kubectl delete -f metabase.yaml 
   71  history | grep build
   72  docker build -t zhxl1989/metabase:latest .
   73  kubectl apply -f metabase.yaml 
   74  kubectl delete -f metabase.yaml 
   75  docker build -t zhxl1989/metabase:latest .
   76  kubectl apply -f metabase.yaml 
   77  docker build -t zhxl1989/metabase:latest .
   78  ls
   79  kubectl delete -f metabase.yaml 
   80  sudo yum install -y yum-utils device-mapper-persistent-data lvm2
   81  sudo yum-config-manager --add-repo https://mirrors.aliyun.com/docker-ce/linux/centos/docker-ce.repo
   82  sudo sed -i 's+download.docker.com+mirrors.aliyun.com/docker-ce+' /etc/yum.repos.d/docker-ce.repo
   83  sudo yum makecache fast
   84  sudo yum -y install docker-ce
   85  systemctl start docker
   86  systemctl enable docker
   87  mkdir -p /opt/data/common/extendlib
   88  chmod +x ./kind
   89  mv kind-linux-amd64 /usr/bin/kind
   90  ls
   91  chmod +x /usr/bin/kind
   92  ls
   93  cd /opt/data/common/extendlib
   94  ls
   95  curl https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.28/mysql-connector-java-8.0.28.jar -o /opt/data/common/extendlib/[mysql-connector-java-8.0.28.jar](https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.28/mysql-connector-java-8.0.28.jar)
   96  curl https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.28/mysql-connector-java-8.0.28.jar -o /opt/data/common/extendlib/mysql-connector-java-8.0.28.jar
   97  ls
   98  chmod 777 *
   99  ls
  100  cd /home
  101  ls
  102  cd mode
  103  cd demo
  104  ls
  105  chmod 777 *
  106  ls
  107  rm -rf linkis-master.zip 
  108  ls
  109  mkdir -p hadoop
  110  mkdir -p spark
  111  ls
  112  cd hadoop/
  113  ls
  114  touch Dockerfile
  115  ls
  116  docker build -t zhxl1989/hadoop:3.3.5 .
  117  docker build -t zhxl1989/hadoop:3.3.2 .
  118  helm repo update
  119  docker build -t zhxl1989/hadoop:3.3.2 .
  120  docker images
  121  docker rmi $(docker images | grep "none" | awk '{print $3}')
  122  docker images
  123  cd /home
  124  ls

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值