- 安装
sudo apt-get install openssh-server
- 启动
ps -e | grep ssh
sudo /etc/init.d/ssh start
export LD_LIBRARY_PATH=/usr/lib/jvm/jre/lib/amd64:/usr/lib/jvm/jre/lib/amd64/default
export LD_LIBRARY_PATH=/usr/local/hadoop-2.9.2/lib/native/libhdfs.so.0.0.0
export JAVA_HOME=/usr/lib/jvm/default-java
export LD_LIBRARY_PATH=$JAVA_HOME/lib/server:$LD_LIBRARY_PATH
export HADOOP_HOME=/usr/local/hadoop-2.9.2
export LIBRARY_PATH=$HADOOP_HOME/lib/native:$LIBRARY_PATH
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native:$LD_LIBRARY_PATH
export CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath --glob):$CLASSPATH
vim /etc/hosts
# 添加
74.125.31.82 www.googlesource.com
74.125.31.82 android.googlesource.com
203.208.46.172 cache.pack.google.com
59.24.3.173 cache.pack.google.com
即:修改你需要clone的仓库的URL地址,格式就是
https: //username:passwords@github.com/…
# 更新
sudo apt-get update
# 安装java
sudo apt-get install -y default-jre-headless
# 以下是用不着的,分布式训练的使用的
curl -O https://mirrors.aliyun.com/apache/hadoop/common/hadoop-2.9.2/hadoop-2.9.2.tar.gz
sudo tar xf hadoop-2.9.2.tar.gz -C /usr/local
export JAVA_HOME=/usr/lib/jvm/default-java
export LD_LIBRARY_PATH=$JAVA_HOME/lib/server:$LD_LIBRARY_PATH
export HADOOP_HOME=/usr/local/hadoop-2.9.2
export LIBRARY_PATH=$HADOOP_HOME/lib/native:$LIBRARY_PATH
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native:$LD_LIBRARY_PATH
export CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath --glob):$CLASSPATH
# 安装pip
sudo apt-get install -y python-pip
pip install euler-gl
curl -k -O https://raw.githubusercontent.com/alibaba/euler/master/examples/ppi_data.py
pip install networkx==1.11 sklearn
python ppi_data.py
路径的问题:
export JAVA_HOME=/usr/lib/jvm/default-java
export LD_LIBRARY_PATH=$JAVA_HOME/jre/lib/amd64/server:$LD_LIBRARY_PATH
export HADOOP_HOME=/usr/local/hadoop-2.9.2
export LIBRARY_PATH=$HADOOP_HOME/lib/native:$LIBRARY_PATH
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native:$LD_LIBRARY_PATH
export CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath --glob):$CLASSPATH
sudo find name / libhdfs.so.0.0.0
安装JDK
curl -O https://download.oracle.com/otn-pub/java/jdk/8u201-b09/42970487e3af4f5aa5bca3f542482c60/jdk-8u201-linux-x64.tar.gz?AuthParam=1550392785_f41e67a968fb7647f0a0dc675c949383
sudo tar xf jdk-8u201-linux-x64.tar.gz -C /usr/lib/jvm
export JAVA_HOME=/usr/lib/jvm/jdk1.8.0_201
export LD_LIBRARY_PATH=$JAVA_HOME/lib/server:$LD_LIBRARY_PATH
export HADOOP_HOME=/usr/local/hadoop-2.9.2
export LIBRARY_PATH=$HADOOP_HOME/lib/native:$LIBRARY_PATH
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native:$LD_LIBRARY_PATH
export CLASSPATH=$($HADOOP_HOME/bin/hadoop classpath --glob):$CLASSPATH
准备安装faiss
wget https://mirrors.tuna.tsinghua.edu.cn/anaconda/archive/Anaconda2-4.3.0-Linux-x86_64.sh
sudo bash Anaconda2-4.3.0-Linux-x86_64.sh
sudo chown -R nhq /home/nhq/anaconda2
conda install faiss-cpu -c pytorch
curl -k -O https://raw.githubusercontent.com/facebookresearch/faiss/master/python/swigfaiss.py
Ubuntu 16.04 下 faiss 安装笔记 - 简书 https://www.jianshu.com/p/2645510a39de