Java 将图片导入到 Taobao TFS 的实现

分为两部分:导入 TFS 和 计算图片的 MD5 摘要的代码

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;


public class ComputeImageHash {

    private static Logger logger = LogManager.getLogger(ComputeImageHash.class);

    protected static final int PAGE_SIZE = 1000;

    private String filePathRoot = "";

    private ClothesMapper clothesMapper;

    private ApplicationContext ctx;

    public static void main(String[] args) {
        ComputeImageHash it = new ComputeImageHash();

        if (args != null && args.length > 0) {
            it.filePathRoot = args[0];
            logger.info("filePathRoot = " + it.filePathRoot);
        }

        try {
            it.doImport();
        } catch (Throwable t) {
            logger.error(t.getMessage());
            t.printStackTrace();
        }
    }

    public ComputeImageHash() {
        ctx = new ClassPathXmlApplicationContext("applicationContext.xml");
        clothesMapper = ctx.getBean(ClothesMapper.class);
    }

    public void doImport() {
        long begin = System.currentTimeMillis();
        Map<String, Object> params = new HashMap<String, Object>();
        params.put("start", 0);
        params.put("size", PAGE_SIZE);
        params.put("image_hash_not_null", "not_null");

        Map<String, Object> paramsUpdate = new HashMap<String, Object>();
        List<Map<String, Object>> dataList = null;

        int totalCount = clothesMapper.getClothTotalCount(params);
        logger.info("totalCount = " + totalCount);

        do {
            dataList = null;
            dataList = clothesMapper.getClothPage(params);
            for (Map<String, Object> data : dataList) {
                if (data != null) {
                    String uuid = data.get("uuid").toString();
                    String path = (String) data.get("path");
                    if (StringUtils.isNotBlank(path)) {
                        // System.out.println(uuid + " " + path);
                        String fileMd5 = CommonUtils.getFileMd5(filePathRoot + path);

                        logger.info("uuid = " + uuid + " fileMd5 = " + fileMd5);

                        paramsUpdate.put("uuid", uuid);
                        paramsUpdate.put("image_hash", fileMd5);
                        clothesMapper.updateImageHash(paramsUpdate);
                    }
                }
            }
        } while (CollectionUtils.isNotEmpty(dataList));

        logger.info("total_time = " + ((System.currentTimeMillis() - begin) / 1000.0) + "s");
    }

}

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import com.taobao.common.tfs.DefaultTfsManager;
import com.taobao.common.tfs.TfsManager;

public class ImportImageToTfs {

    private static Logger logger = LogManager.getLogger(ImportImageToTfs.class);

    protected static final int PAGE_SIZE = 1000;

    private String filePathRoot = "";

    private boolean isThumbnail = false;

    private TfsManager tfsManager;

    private ClothesMapper clothesMapper;

    private ApplicationContext ctx;

    public static void main(String[] args) {
        ImportImageToTfs it = new ImportImageToTfs();

        if (args != null && args.length > 0) {
            it.filePathRoot = args[0];
            logger.info("filePathRoot = " + it.filePathRoot);
        }

        if (args != null && args.length > 1) {
            it.isThumbnail = StringUtils.equalsIgnoreCase("true", args[1]);
        }

        try {
            it.doImport();
        } catch (Throwable t) {
            logger.error(t.getMessage());
            t.printStackTrace();
        } finally {
            it.destroy();
        }
    }

    public ImportImageToTfs() {
        ctx = new ClassPathXmlApplicationContext("applicationContext.xml");
        tfsManager = (DefaultTfsManager) ctx.getBean("tfsManager");
        clothesMapper = ctx.getBean(ClothesMapper.class);
    }

    public void doImport() {
        long begin = System.currentTimeMillis();
        Map<String, Object> params = new HashMap<String, Object>();
        params.put("start", 0);
        params.put("size", PAGE_SIZE);
        if (isThumbnail) {
            params.put("thumbnail_dfs_not_null", "not_null");
        } else {
            params.put("path_dfs_not_null", "not_null");
        }

        Map<String, Object> paramsUpdate = new HashMap<String, Object>();
        List<Map<String, Object>> dataList = null;

        int totalCount = clothesMapper.getClothTotalCount(params);
        logger.info("totalCount = " + totalCount);

        do {
            dataList = null;
            dataList = clothesMapper.getClothPage(params);
            for (Map<String, Object> data : dataList) {
                if (data != null) {
                    String uuid = data.get("uuid").toString();
                    paramsUpdate.put("uuid", uuid);
                    if (isThumbnail) {
                        String thumbnail = (String) data.get("thumbnail");
                        if (StringUtils.isNotBlank(thumbnail)) {
                            String tfsname = saveToTfs(filePathRoot + thumbnail);
                            logger.info("uuid = " + uuid + " tfsname = " + tfsname + " isThumbnail = " + isThumbnail);

                            paramsUpdate.put("thumbnail_dfs", tfsname);
                            clothesMapper.updateThumbnailDfs(paramsUpdate);

                        }
                    } else {
                        String path = (String) data.get("path");
                        if (StringUtils.isNotBlank(path)) {
                            String tfsname = saveToTfs(filePathRoot + path);
                            logger.info("uuid = " + uuid + " tfsname = " + tfsname + " isThumbnail = " + isThumbnail);

                            paramsUpdate.put("path_dfs", tfsname);
                            clothesMapper.updatePathDfs(paramsUpdate);
                        }
                    }

                }
            }
        } while (CollectionUtils.isNotEmpty(dataList));

        logger.info("total_time = " + ((System.currentTimeMillis() - begin) / 1000.0) + "s");
    }

    public String saveToTfs(String filePath) {
        String ext = FilenameUtils.getExtension(filePath);
        return tfsManager.saveFile(filePath, null, "." + ext.toLowerCase());
    }

    public void destroy() {
        tfsManager.destroy();
    }
}

spring 配置,参考官网

<!-- tfs -->
    <bean id="tfsManager" class="com.taobao.common.tfs.DefaultTfsManager"
        init-method="init">
        <!-- 整个进程中系统最多等待多少个请求,取决于你有多少个线程并发的请求 TFS -->
        <property name="maxWaitThread">
            <value>100</value>
        </property>
        <!-- 单个请求最大的等待时间(ms) 超过这个时间放弃这次请求 -->
        <property name="timeout">
            <value>2000</value>
        </property>
        <!-- Tfs master nameserver ip address,ip:port -->
        <property name="nsip">
            <value>tfs.xxx.com:8100</value>
        </property>
        <!-- TFS 集群编号,这个编号只是一种参考,系统初始化时会从 ns 上取,取不到才用本地设置的 -->
        <property name="tfsClusterIndex">
            <value>1</value>
        </property>
        <!-- TFS 读取文件时候会缓存 block 所在数据服务器 ip,这个参数配置最多缓存记录个数! -->
        <property name="maxCacheItemCount">
            <value>10000</value>
        </property>
        <!-- 上一项缓存最大有效的时间(ms) -->
        <property name="maxCacheTime">
            <value>5000</value>
        </property>
        <!-- 
        不需要排重功能时,下面配置项可不设置:
            tair 排重数据库 serverlist
            tair 排重数据库 groupName
            tair 排重数据库 namespace
         -->
        <!-- 
        <property name="uniqueServerList">
            <list>
                <value>127.0.0.1:5198</value>
                <value>127.0.0.2:5198</value>
            </list>
        </property>
        <property name="groupName">
            <value>group_tfsunique</value>
        </property>
        <property name="namespace">
            <value>102</value>
        </property>
         -->
    </bean>

ant 运行脚本

<?xml version="1.0" encoding="UTF-8"  ?>
<project name="ProjectScheduleAntRuner" basedir=".">
    <path id="runtime_classpath">
        <fileset dir="webapp/WEB-INF/lib">
            <include name="*.jar" />
        </fileset>
        <pathelement location="webapp/WEB-INF/classes" />
    </path>

    <target name="ImportSrcImage" description="将图像导入到 TFS.">
        <java classname="ImportImageToTfs" fork="true" classpathref="runtime_classpath" failonerror="false" newenvironment="no">
            <jvmarg value="-Xms64m" />
            <jvmarg value="-Xmx1024m" />
            <jvmarg value="-XX:MaxPermSize=512m" />
            <jvmarg value="-Dlog_file_suffix=-schedule" />
            <arg value="${image_path_root}" />
            <arg value="${is_thumbnail}" />
        </java>
    </target>

    <target name="ComputeImageHash" description="计算原始图的 MD5 Hash.">
        <java classname="ComputeImageHash" fork="true" classpathref="runtime_classpath" failonerror="false" newenvironment="no">
            <jvmarg value="-Xms64m" />
            <jvmarg value="-Xmx1024m" />
            <jvmarg value="-XX:MaxPermSize=512m" />
            <jvmarg value="-Dlog_file_suffix=-schedule" />
            <arg value="${image_path_root}" />
        </java>
    </target>
</project>
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值