Android 视频截取片段生成GIF

就在昨天,依据公司需求,我当初辛辛苦苦写的视频生成GIF封面需求被砍.

心痛之余,深海决定把代码分享出来,也希望要走这条路的小伙伴少走一些弯路

优化建议:

刚刚有小伙伴在评论区问我,生成过程比较慢,怎么样能提高用户体验:

我粗略总结了三点,如果小伙伴们有更好的建议,欢迎在评论区提出哦!

1,生成过程给用户展示循环进度条

2.减少GIF的帧数(下文中 gifExtractor.encoder()方法参数中包含帧数)

3.压缩每一帧图片的质量和大小 

第一步,定义三个关键变量

    private String Cover_puth = "";//封面地址
    private int TimeStart = 0;//Gif起始秒
    private int mAngle = 90; //视频旋转角度&封面旋转角度  // 一般是90度 后面会覆盖

第二步,创建子线程,然后调用startGIF()方法进行耗时生成

/*
     *作者:赵星海
     *时间:2019/4/23 10:06
     *用途: 开始生成GIF
     */
    @TargetApi(Build.VERSION_CODES.LOLLIPOP)
    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    private void startGIF() { // 开始转换GIF
        LogUtils.loge("开始执行startGIF()");
        File sampleDir = new File(Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DCIM), "存放GIF结果的文件夹");
        if (!sampleDir.exists()) {
            sampleDir.mkdirs();
        }
        File file = new File(sampleDir, new Date().getTime() + ".gif");
        Cover_puth = file.getAbsolutePath();
        try {
            // 获取视频信息
            MediaMetadataRetriever retr = new MediaMetadataRetriever();
            retr.setDataSource(Activity_Video.Video_Puth);
            String rotation = retr.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); // 视频旋转方向
            mAngle = Integer.parseInt(rotation.trim());
            LogUtils.loge("视频方向获取 " + rotation);
        } catch (Exception e) {
            e.printStackTrace();
        }

        GifExtractor gifExtractor = new GifExtractor(this, Activity_Video.Video_Puth, mAngle);
        gifExtractor.encoder(file.getAbsolutePath(), TimeStart * 1000, (TimeStart + 2) * 1000,
                10, 15);
    }
GifExtractor类:
public class GifExtractor {
    private static final String VIDEO = "video/";

    private Context context;
    private MediaExtractor videoExtractor;
    private int trackIndex;
    private MediaFormat format = null;
    private long duration = 0;
    int mAngle = 90;//旋转角度

    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    public GifExtractor(Context context, String path , int Angle) {
        mAngle  = Angle;
        this.context = context;
        try {
            videoExtractor = new MediaExtractor();
            videoExtractor.setDataSource(path);
            for (int i = 0; i < videoExtractor.getTrackCount(); i++) {
                format = videoExtractor.getTrackFormat(i);
                String mime = format.getString(MediaFormat.KEY_MIME);
                if (mime.startsWith(VIDEO)) {
                    videoExtractor.selectTrack(i);
                    trackIndex = i;
                    duration = format.getLong(MediaFormat.KEY_DURATION) / 1000;
                    break;
                }
            }

        } catch (IOException e) {
            e.printStackTrace();
        }

    }

    public long getDuration() {
        return duration;
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public void encoder(String gifPath) {
        encoder(gifPath, 0, duration);
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public void encoder(String gifPath, long begin, long end) {
        encoder(gifPath, begin, end, 15, 15);
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public void encoder(String gifPath, long begin, long end, int fps, int speed) {
        encoder(gifPath, begin, end, fps, speed, -1, -1);
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    public void encoder(final String gifPath, final long begin, final long end, final int fps, final int speed, final int gifWidth, final int gifHeight) {

        if (begin > duration) {
            throw new RuntimeException("开始时间不能大于视频时长");
        }
        if (end <= begin) {
            throw new RuntimeException("开始时间大于结束时间");
        }
        long endTime = duration;
        if (end < duration) {
            endTime = end;
        }
        long time1 = System.currentTimeMillis();
        videoExtractor.seekTo(begin * 1000, trackIndex);
        FastYUVtoRGB fastYUVtoRGB = new FastYUVtoRGB(context);

        String mime = format.getString(MediaFormat.KEY_MIME);
        MediaCodec videoDecoder = null;
        try {
            videoDecoder = MediaCodec.createDecoderByType(mime);
        } catch (IOException e) {
            e.printStackTrace();
        }
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);

        int width = format.getInteger(MediaFormat.KEY_WIDTH);
        int height = format.getInteger(MediaFormat.KEY_HEIGHT);

        videoDecoder.configure(format, null, null, 0);
        videoDecoder.start();


        GIFEncoder encoder = null;

        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        int f = fps;
        if (f <= 0) {
            f = 15;
        }
        int s = speed;
        if (s <= 0) {
            s = f;
        }
        long frameTime = 1000 / f;
        long startTime = begin;
        while (true) {
            int run = extractorVideoInputBuffer(videoExtractor, videoDecoder);
            if (run == 1) {
                int outIndex = videoDecoder.dequeueOutputBuffer(info, 500000);
                if (outIndex >= 0) {
                    long time = info.presentationTimeUs / 1000;
                    if (time >= begin && time <= endTime) {
                        if (time >= startTime) {
                            Image image = videoDecoder.getOutputImage(outIndex);
                            Bitmap bitmap = fastYUVtoRGB.convertYUVtoRGB(getDataFromImage(image), width, height);
                            if (gifWidth != -1 && gifHeight != -1) {
                                bitmap = Bitmap.createScaledBitmap(bitmap, gifWidth, gifHeight, true);
                            } else {
                                bitmap = Bitmap.createScaledBitmap(bitmap, width / 2, height / 2, true);   //默认是/4
                            }

                            bitmap = rotateBitmap(bitmap, mAngle);


                            if (encoder == null) {
                                encoder = new GIFEncoder();
                                encoder.setFrameRate(s);
                                // encoder.init(bitmap); //上一个encoder 所用的方法
                                encoder.setRepeat(0);// 永远循环
                                encoder.start(gifPath);
                            } else {
                                encoder.addFrame(bitmap);
                            }

                            int p = (int) ((startTime - begin) * 100 / (endTime - begin));
                            Log.d("====================", "p = " + p);//进度
                            startTime += frameTime;
                        }

                    }
                    videoDecoder.releaseOutputBuffer(outIndex, true /* Surface init */);
                    if (time >= endTime) {
                        break;
                    }
                }
            } else if (run == -1) {
                break;
            }
        }
        if (encoder != null) {
            encoder.finish();
        }
        Log.d("================", "encoder->time = " + (System.currentTimeMillis() - time1));
        Log.d("================", "over");
        videoDecoder.stop();
        videoDecoder.release();


    }

    /**
     * 选择变换
     *
     * @param origin 原图
     * @param alpha  旋转角度,可正可负
     * @return 旋转后的图片
     */
    private Bitmap rotateBitmap(Bitmap origin, float alpha) {
        if (origin == null) {
            return null;
        }
        int width = origin.getWidth();
        int height = origin.getHeight();
        Matrix matrix = new Matrix();
        matrix.setRotate(alpha);
        // 围绕原地进行旋转
        Bitmap newBM = Bitmap.createBitmap(origin, 0, 0, width, height, matrix, false);
        if (newBM.equals(origin)) {
            return newBM;
        }
        origin.recycle();
        return newBM;
    }

    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    public void release() {
        videoExtractor.release();
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private int extractorVideoInputBuffer(MediaExtractor mediaExtractor, MediaCodec mediaCodec) {
        int inputIndex = mediaCodec.dequeueInputBuffer(500000);
        if (inputIndex >= 0) {
            ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputIndex);
            long sampleTime = mediaExtractor.getSampleTime();
            int sampleSize = mediaExtractor.readSampleData(inputBuffer, 0);
            if (mediaExtractor.advance()) {
                mediaCodec.queueInputBuffer(inputIndex, 0, sampleSize, sampleTime, 0);
                return 1;
            } else {
                if (sampleSize > 0) {
                    mediaCodec.queueInputBuffer(inputIndex, 0, sampleSize, sampleTime, 0);
                    return 1;
                } else {
                    return -1;
                }

            }
        }
        return 0;
    }

    @TargetApi(Build.VERSION_CODES.LOLLIPOP)
    private byte[] getDataFromImage(Image image) {
        Rect crop = image.getCropRect();
        int format = image.getFormat();
        int width = crop.width();
        int height = crop.height();
        Image.Plane[] planes = image.getPlanes();
        byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
        byte[] rowData = new byte[planes[0].getRowStride()];
        int channelOffset = 0;
        int outputStride = 1;
        for (int i = 0; i < planes.length; i++) {
            switch (i) {
                case 0:
                    channelOffset = 0;
                    outputStride = 1;
                    break;
                case 1:
                    channelOffset = width * height + 1;
                    outputStride = 2;
                    break;
                case 2:
                    channelOffset = width * height;
                    outputStride = 2;
                    break;
            }
            ByteBuffer buffer = planes[i].getBuffer();
            int rowStride = planes[i].getRowStride();
            int pixelStride = planes[i].getPixelStride();

            int shift = (i == 0) ? 0 : 1;
            int w = width >> shift;
            int h = height >> shift;
            buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
            for (int row = 0; row < h; row++) {
                int length;
                if (pixelStride == 1 && outputStride == 1) {
                    length = w;
                    buffer.get(data, channelOffset, length);
                    channelOffset += length;
                } else {
                    length = (w - 1) * pixelStride + 1;
                    buffer.get(rowData, 0, length);
                    for (int col = 0; col < w; col++) {
                        data[channelOffset] = rowData[col * pixelStride];
                        channelOffset += outputStride;
                    }
                }
                if (row < h - 1) {
                    buffer.position(buffer.position() + rowStride - length);
                }
            }
        }
        return data;
    }

    @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
    private Bitmap compressToJpeg(Image image) {
        ByteArrayOutputStream outStream = new ByteArrayOutputStream();

        Rect rect = image.getCropRect();
        YuvImage yuvImage = new YuvImage(getDataFromImage(image), ImageFormat.NV21, rect.width(), rect.height(), null);
        yuvImage.compressToJpeg(rect, 100, outStream);
        return BitmapFactory.decodeByteArray(outStream.toByteArray(), 0, outStream.size());
    }
}

中间有一行我注释掉的代码,是我后来做的改动,改动之后GIF可以确保无限循环播放,而改动之前则是播放一遍就停了.

接下来是4个用到的工具类:

public class FastYUVtoRGB {
    private RenderScript rs;
    private ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
    private Type.Builder yuvType, rgbaType;
    private Allocation in, out;
    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
    public FastYUVtoRGB(Context context){
        rs = RenderScript.create(context);
        yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
    }
    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
    public Bitmap convertYUVtoRGB(byte[] yuvData, int width, int height){
        if (yuvType == null){
            yuvType = new Type.Builder(rs, Element.U8(rs)).setX(yuvData.length);
            in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);
            rgbaType = new Type.Builder(rs, Element.RGBA_8888(rs)).setX(width).setY(height);
            out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);
        }
        in.copyFrom(yuvData);
        yuvToRgbIntrinsic.setInput(in);
        yuvToRgbIntrinsic.forEach(out);
        Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
        out.copyTo(bitmap);
        return bitmap;
    }
}
public class GIFEncoder {
    protected int width; // image size
    protected int height;
    protected Color transparent = null; // transparent color if given
    protected int transIndex; // transparent index in color table
    protected int repeat = -1; // no repeat
    protected int delay = 0; // frame delay (hundredths)
    protected boolean started = false; // ready to output frames
    protected OutputStream out;
    protected Bitmap image; // current frame
    protected byte[] pixels; // BGR byte array from frame
    protected byte[] indexedPixels; // converted frame indexed to palette
    protected int colorDepth; // number of bit planes
    protected byte[] colorTab; // RGB palette
    protected boolean[] usedEntry = new boolean[256]; // active palette entries
    protected int palSize = 7; // color table size (bits-1)
    protected int dispose = -1; // disposal code (-1 = use default)
    protected boolean closeStream = false; // close stream when finished
    protected boolean firstFrame = true;
    protected boolean sizeSet = false; // if false, get size from first frame
    protected int sample = 10; // default sample interval for quantizer

    /**
     * Sets the delay time between each frame, or changes it
     * for subsequent frames (applies to last frame added).
     *
     * @param ms int delay time in milliseconds
     */
    public void setDelay(int ms) {
        delay = Math.round(ms / 10.0f);
    }

    /**
     * Sets the GIF frame disposal code for the last added frame
     * and any subsequent frames.  Default is 0 if no transparent
     * color has been set, otherwise 2.
     * @param code int disposal code.
     */
    public void setDispose(int code) {
        if (code >= 0) {
            dispose = code;
        }
    }

    /**
     * Sets the number of times the set of GIF frames
     * should be played.  Default is 1; 0 means play
     * indefinitely.  Must be invoked before the first
     * image is added.
     *
     * @param iter int number of iterations.
     * @return
     */
    public void setRepeat(int iter) {
        if (iter >= 0) {
            repeat = iter;
        }
    }

    /**
     * Sets the transparent color for the last added frame
     * and any subsequent frames.
     * Since all colors are subject to modification
     * in the quantization process, the color in the final
     * palette for each frame closest to the given color
     * becomes the transparent color for that frame.
     * May be set to null to indicate no transparent color.
     *
     * @param c Color to be treated as transparent on display.
     */
    public void setTransparent(Color c) {
        transparent = c;
    }

    /**
     * Adds next GIF frame.  The frame is not written immediately, but is
     * actually deferred until the next frame is received so that timing
     * data can be inserted.  Invoking <code>finish()</code> flushes all
     * frames.  If <code>setSize</code> was not invoked, the size of the
     * first image is used for all subsequent frames.
     *
     * @param im BufferedImage containing frame to write.
     * @return true if successful.
     */
    public boolean addFrame(Bitmap im) {
        if ((im == null) || !started) {
            return false;
        }
        boolean ok = true;
        try {
            if (!sizeSet) {
                // use first frame's size
                setSize(im.getWidth(), im.getHeight());
            }
            image = im;
            getImagePixels(); // convert to correct format if necessary
            analyzePixels(); // build color table & map pixels
            if (firstFrame) {
                writeLSD(); // logical screen descriptior
                writePalette(); // global color table
                if (repeat >= 0) {
                    // use NS app extension to indicate reps
                    writeNetscapeExt();
                }
            }
            writeGraphicCtrlExt(); // write graphic control extension
            writeImageDesc(); // image descriptor
            if (!firstFrame) {
                writePalette(); // local color table
            }
            writePixels(); // encode and write pixel data
            firstFrame = false;
        } catch (IOException e) {
            ok = false;
        }

        return ok;
    }

    //added by alvaro
    public boolean outFlush() {
        boolean ok = true;
        try {
            out.flush();
            return ok;
        } catch (IOException e) {
            ok = false;
        }

        return ok;
    }

    public byte[] getFrameByteArray() {
        return ((ByteArrayOutputStream) out).toByteArray();
    }

    /**
     * Flushes any pending data and closes output file.
     * If writing to an OutputStream, the stream is not
     * closed.
     */
    public boolean finish() {
        if (!started) return false;
        boolean ok = true;
        started = false;
        try {
            out.write(0x3b); // gif trailer
            out.flush();
            if (closeStream) {
                out.close();
            }
        } catch (IOException e) {
            ok = false;
        }

        return ok;
    }

    public void reset() {
        // reset for subsequent use
        transIndex = 0;
        out = null;
        image = null;
        pixels = null;
        indexedPixels = null;
        colorTab = null;
        closeStream = false;
        firstFrame = true;
    }

    /**
     * Sets frame rate in frames per second.  Equivalent to
     * <code>setDelay(1000/fps)</code>.
     *
     * @param fps float frame rate (frames per second)
     */
    public void setFrameRate(float fps) {
        if (fps != 0f) {
            delay = Math.round(100f / fps);
        }
    }

    /**
     * Sets quality of color quantization (conversion of images
     * to the maximum 256 colors allowed by the GIF specification).
     * Lower values (minimum = 1) produce better colors, but slow
     * processing significantly.  10 is the default, and produces
     * good color mapping at reasonable speeds.  Values greater
     * than 20 do not yield significant improvements in speed.
     *
     * @param quality int greater than 0.
     * @return
     */
    public void setQuality(int quality) {
        if (quality < 1) quality = 1;
        sample = quality;
    }

    /**
     * Sets the GIF frame size.  The default size is the
     * size of the first frame added if this method is
     * not invoked.
     *
     * @param w int frame width.
     * @param h int frame width.
     */
    public void setSize(int w, int h) {
        if (started && !firstFrame) return;
        width = w;
        height = h;
        if (width < 1) width = 320;
        if (height < 1) height = 240;
        sizeSet = true;
    }

    /**
     * Initiates GIF file creation on the given stream.  The stream
     * is not closed automatically.
     *
     * @param os OutputStream on which GIF images are written.
     * @return false if initial write failed.
     */
    public boolean start(OutputStream os) {
        if (os == null) return false;
        boolean ok = true;
        closeStream = false;
        out = os;
        try {
            writeString("GIF89a"); // header
        } catch (IOException e) {
            ok = false;
        }
        return started = ok;
    }

    /**
     * Initiates writing of a GIF file with the specified name.
     *
     * @param file String containing output file name.
     * @return false if open or initial write failed.
     */
    public boolean start(String file) {
        boolean ok = true;
        try {
            out = new BufferedOutputStream(new FileOutputStream(file));
            ok = start(out);
            closeStream = true;
        } catch (IOException e) {
            ok = false;
        }
        return started = ok;
    }

    /**
     * Analyzes image colors and creates color map.
     */
    protected void analyzePixels() {
        int len = pixels.length;
        int nPix = len / 3;
        indexedPixels = new byte[nPix];
        NeuQuant nq = new NeuQuant(pixels, len, sample);
        // initialize quantizer
        colorTab = nq.process(); // create reduced palette
        // convert map from BGR to RGB
        for (int i = 0; i < colorTab.length; i += 3) {
            byte temp = colorTab[i];
            colorTab[i] = colorTab[i + 2];
            colorTab[i + 2] = temp;
            usedEntry[i / 3] = false;
        }
        // map image pixels to new palette
        int k = 0;
        for (int i = 0; i < nPix; i++) {
            int index =
                    nq.map(pixels[k++] & 0xff,
                            pixels[k++] & 0xff,
                            pixels[k++] & 0xff);
            usedEntry[index] = true;
            indexedPixels[i] = (byte) index;
        }
        pixels = null;
        colorDepth = 8;
        palSize = 7;
        // get closest match to transparent color if specified
        if (transparent != null) {
            transIndex = findClosest(transparent);
        }
    }

    /**
     * Returns index of palette color closest to c
     *
     */
    protected int findClosest(Color c) {
        if (colorTab == null) return -1;
        int r = 0;
        int g = 0;
        int b = 0;
        int minpos = 0;
        int dmin = 256 * 256 * 256;
        int len = colorTab.length;
        for (int i = 0; i < len;) {
            int dr = r - (colorTab[i++] & 0xff);
            int dg = g - (colorTab[i++] & 0xff);
            int db = b - (colorTab[i] & 0xff);
            int d = dr * dr + dg * dg + db * db;
            int index = i / 3;
            if (usedEntry[index] && (d < dmin)) {
                dmin = d;
                minpos = index;
            }
            i++;
        }
        return minpos;
    }

    /**
     * Convert img to bytes, and remove the alpha channel.
     *
     * @param img array of packed ints from an android bitmap, with channels (alpha,red,green,blue)
     * @return array of raw bytes, with channels (blue,green,red)
     */
    private byte[] getPixelBytes(int[] img) {
        byte[] bytes = new byte[img.length * 3];

        int byteIdx = 0;
        for(int i=0; i < img.length; i++) {
            int thisPixel = img[i];
            byte[] theseBytes = ByteBuffer.allocate(4).putInt(thisPixel).array();
            // RGB --> BGR
            bytes[byteIdx] = theseBytes[3];
            bytes[byteIdx+1] = theseBytes[2];
            bytes[byteIdx+2] = theseBytes[1];
            byteIdx += 3;
        }
        return bytes;
    }

    /**
     * Extracts image pixels into byte array "pixels"
     */
    protected void getImagePixels() {
        int w = image.getWidth();
        int h = image.getHeight();

        int[] pixelInts = new int[w*h*3];
        image.getPixels(pixelInts, 0,width, 0, 0, w, h);
        pixels = getPixelBytes(pixelInts);
    }

    /**
     * Writes Graphic Control Extension
     */
    protected void writeGraphicCtrlExt() throws IOException {
        out.write(0x21); // extension introducer
        out.write(0xf9); // GCE label
        out.write(4); // data block size
        int transp, disp;
        if (transparent == null) {
            transp = 0;
            disp = 0; // dispose = no action
        } else {
            transp = 1;
            disp = 2; // force clear if using transparent color
        }
        if (dispose >= 0) {
            disp = dispose & 7; // user override
        }
        disp <<= 2;

        // packed fields
        out.write(0 | // 1:3 reserved
                disp | // 4:6 disposal
                0 | // 7   user input - 0 = none
                transp); // 8   transparency flag

        writeShort(delay); // delay x 1/100 sec
        out.write(transIndex); // transparent color index
        out.write(0); // block terminator
    }

    /**
     * Writes Image Descriptor
     */
    protected void writeImageDesc() throws IOException {
        out.write(0x2c); // image separator
        writeShort(0); // image position x,y = 0,0
        writeShort(0);
        writeShort(width); // image size
        writeShort(height);
        // packed fields
        if (firstFrame) {
            // no LCT  - GCT is used for first (or only) frame
            out.write(0);
        } else {
            // specify normal LCT
            out.write(0x80 | // 1 local color table  1=yes
                    0 | // 2 interlace - 0=no
                    0 | // 3 sorted - 0=no
                    0 | // 4-5 reserved
                    palSize); // 6-8 size of color table
        }
    }

    /**
     * Writes Logical Screen Descriptor
     */
    protected void writeLSD() throws IOException {
        // logical screen size
        writeShort(width);
        writeShort(height);
        // packed fields
        out.write((0x80 | // 1   : global color table flag = 1 (gct used)
                0x70 | // 2-4 : color resolution = 7
                0x00 | // 5   : gct sort flag = 0
                palSize)); // 6-8 : gct size

        out.write(0); // background color index
        out.write(0); // pixel aspect ratio - assume 1:1
    }

    /**
     * Writes Netscape application extension to define
     * repeat count.
     */
    protected void writeNetscapeExt() throws IOException {
        out.write(0x21); // extension introducer
        out.write(0xff); // app extension label
        out.write(11); // block size
        writeString("NETSCAPE" + "2.0"); // app id + auth code
        out.write(3); // sub-block size
        out.write(1); // loop sub-block id
        writeShort(repeat); // loop count (extra iterations, 0=repeat forever)
        out.write(0); // block terminator
    }

    /**
     * Writes color table
     */
    protected void writePalette() throws IOException {
        out.write(colorTab, 0, colorTab.length);
        int n = (3 * 256) - colorTab.length;
        for (int i = 0; i < n; i++) {
            out.write(0);
        }
    }

    /**
     * Encodes and writes pixel data
     */
    protected void writePixels() throws IOException {
        LZWEncoder encoder =
                new LZWEncoder(width, height, indexedPixels, colorDepth);
        encoder.encode(out);
    }

    /**
     *    Write 16-bit value to output stream, LSB first
     */
    protected void writeShort(int value) throws IOException {
        out.write(value & 0xff);
        out.write((value >> 8) & 0xff);
    }

    /**
     * Writes string to output stream
     */
    protected void writeString(String s) throws IOException {
        for (int i = 0; i < s.length(); i++) {
            out.write((byte) s.charAt(i));
        }
    }

}
public class LZWEncoder {
    private static final int EOF = -1;

    private static final int BITS = 12;

    private static final int HSIZE = 5003; // 80% occupancy



    private int imgW, imgH;

    private int initCodeSize;

    private int remaining;

    private int curPixel;

    private int n_bits; // number of bits/code

    private int maxbits = BITS; // user settable max # bits/code

    private int maxcode; // maximum code, given n_bits

    private int maxmaxcode = 1 << BITS; // should NEVER generate this code

    private int hsize = HSIZE; // for dynamic table sizing

    private int free_ent = 0; // first unused entry

    private int g_init_bits;

    private int ClearCode;

    private int EOFCode;

    private int cur_accum = 0;

    private int cur_bits = 0;

    private int a_count;



    private int[] htab = new int[HSIZE];

    private int[] codetab = new int[HSIZE];

    private int masks[] = {

            0x0000,

            0x0001,

            0x0003,

            0x0007,

            0x000F,

            0x001F,

            0x003F,

            0x007F,

            0x00FF,

            0x01FF,

            0x03FF,

            0x07FF,

            0x0FFF,

            0x1FFF,

            0x3FFF,

            0x7FFF,

            0xFFFF};



    private byte[] pixAry;

    private byte[] accum = new byte[256];

    private boolean clear_flg = false;





    LZWEncoder(int width, int height, byte[] pixels, int color_depth) {

        imgW = width;

        imgH = height;

        pixAry = pixels;

        initCodeSize = Math.max(2, color_depth);

    }



    // Add a character to the end of the current packet, and if it is 254

    // characters, flush the packet to disk.

    void char_out(byte c, OutputStream outs) throws IOException {

        accum[a_count++] = c;

        if (a_count >= 254)

            flush_char(outs);

    }



    // table clear for block compress

    void cl_block(OutputStream outs) throws IOException {

        cl_hash(hsize);

        free_ent = ClearCode + 2;

        clear_flg = true;



        output(ClearCode, outs);

    }



    // reset code table

    void cl_hash(int hsize) {

        for (int i = 0; i < hsize; ++i)

            htab[i] = -1;

    }



    void compress(int init_bits, OutputStream outs) throws IOException {

        int fcode;

        int i /* = 0 */;

        int c;

        int ent;

        int disp;

        int hsize_reg;

        int hshift;



        // Set up the globals:  g_init_bits - initial number of bits

        g_init_bits = init_bits;



        // Set up the necessary values

        clear_flg = false;

        n_bits = g_init_bits;

        maxcode = MAXCODE(n_bits);



        ClearCode = 1 << (init_bits - 1);

        EOFCode = ClearCode + 1;

        free_ent = ClearCode + 2;



        a_count = 0; // clear packet



        ent = nextPixel();



        hshift = 0;

        for (fcode = hsize; fcode < 65536; fcode *= 2)

            ++hshift;

        hshift = 8 - hshift; // set hash code range bound



        hsize_reg = hsize;

        cl_hash(hsize_reg); // clear hash table



        output(ClearCode, outs);



        outer_loop:

        while ((c = nextPixel()) != EOF) {

            fcode = (c << maxbits) + ent;

            i = (c << hshift) ^ ent; // xor hashing



            if (htab[i] == fcode) {

                ent = codetab[i];

                continue;

            } else if (htab[i] >= 0) // non-empty slot

            {

                disp = hsize_reg - i; // secondary hash (after G. Knott)

                if (i == 0)

                    disp = 1;

                do {

                    if ((i -= disp) < 0)

                        i += hsize_reg;



                    if (htab[i] == fcode) {

                        ent = codetab[i];

                        continue outer_loop;

                    }

                } while (htab[i] >= 0);

            }

            output(ent, outs);

            ent = c;

            if (free_ent < maxmaxcode) {

                codetab[i] = free_ent++; // code -> hashtable

                htab[i] = fcode;

            } else

                cl_block(outs);

        }

        // Put out the final code.

        output(ent, outs);

        output(EOFCode, outs);

    }



    void encode(OutputStream os) throws IOException {

        os.write(initCodeSize); // write "initial code size" byte



        remaining = imgW * imgH; // reset navigation variables

        curPixel = 0;



        compress(initCodeSize + 1, os); // compress and write the pixel data



        os.write(0); // write block terminator

    }



    // Flush the packet to disk, and reset the accumulator

    void flush_char(OutputStream outs) throws IOException {

        if (a_count > 0) {

            outs.write(a_count);

            outs.write(accum, 0, a_count);

            a_count = 0;

        }

    }



    final int MAXCODE(int n_bits) {

        return (1 << n_bits) - 1;

    }



    private int nextPixel() {

        if (remaining == 0)

            return EOF;



        --remaining;



        byte pix = pixAry[curPixel++];



        return pix & 0xff;

    }



    void output(int code, OutputStream outs) throws IOException {

        cur_accum &= masks[cur_bits];



        if (cur_bits > 0)

            cur_accum |= (code << cur_bits);

        else

            cur_accum = code;



        cur_bits += n_bits;



        while (cur_bits >= 8) {

            char_out((byte) (cur_accum & 0xff), outs);

            cur_accum >>= 8;

            cur_bits -= 8;

        }



        // If the next entry is going to be too big for the code size,

        // then increase it, if possible.

        if (free_ent > maxcode || clear_flg) {

            if (clear_flg) {

                maxcode = MAXCODE(n_bits = g_init_bits);

                clear_flg = false;

            } else {

                ++n_bits;

                if (n_bits == maxbits)

                    maxcode = maxmaxcode;

                else

                    maxcode = MAXCODE(n_bits);

            }

        }



        if (code == EOFCode) {

            // At EOF, write the rest of the buffer.

            while (cur_bits > 0) {

                char_out((byte) (cur_accum & 0xff), outs);

                cur_accum >>= 8;

                cur_bits -= 8;

            }



            flush_char(outs);

        }

    }
}
public class NeuQuant {
    private static final int netsize = 256; /* number of colours used */



  /* four primes near 500 - assume no image has a length so large */

  /* that it is divisible by all four primes */

    private static final int prime1 = 499;

    private static final int prime2 = 491;

    private static final int prime3 = 487;

    private static final int prime4 = 503;



    private static final int minpicturebytes = (3 * prime4);

	/* minimum size for input image */



    private static final int maxnetpos = (netsize - 1);

    private static final int netbiasshift = 4; /* bias for colour values */

    private static final int ncycles = 100; /* no. of learning cycles */



  /* defs for freq and bias */

    private static final int intbiasshift = 16; /* bias for fractions */

    private static final int intbias = (1 << intbiasshift);

    private static final int gammashift = 10; /* gamma = 1024 */

    private static final int gamma = (1 << gammashift);

    private static final int betashift = 10;

    private static final int beta = (intbias >> betashift); /* beta = 1/1024 */

    private static final int betagamma = (intbias << (gammashift - betashift));



  /* defs for decreasing radius factor */

    private static final int initrad = (netsize >> 3); /* for 256 cols, radius starts */

    private static final int radiusbiasshift = 6; /* at 32.0 biased by 6 bits */

    private static final int radiusbias = (1 << radiusbiasshift);

    private static final int initradius = (initrad * radiusbias); /* and decreases by a */

    private static final int radiusdec = 30; /* factor of 1/30 each cycle */



  /* defs for decreasing alpha factor */

    private static final int alphabiasshift = 10; /* alpha starts at 1.0 */

    private static final int initalpha = (1 << alphabiasshift);



    private int alphadec; /* biased by 10 bits */



  /* radbias and alpharadbias used for radpower calculation */

    private static final int radbiasshift = 8;

    private static final int radbias = (1 << radbiasshift);

    private static final int alpharadbshift = (alphabiasshift + radbiasshift);

    private static final int alpharadbias = (1 << alpharadbshift);



    private byte[] thepicture; /* the input image itself */

    private int lengthcount; /* lengthcount = H*W*3 */



    private int samplefac; /* sampling factor 1..30 */



    //   typedef int pixel[4];                /* BGRc */

    private int[][] network; /* the network itself - [netsize][4] */



    private int[] netindex = new int[256];

	/* for network lookup - really 256 */



    private int[] bias = new int[netsize];

  /* bias and freq arrays for learning */

    private int[] freq = new int[netsize];

    private int[] radpower = new int[initrad];

	/* radpower for precomputation */



  /* Initialise network in range (0,0,0) to (255,255,255) and set parameters

     ----------------------------------------------------------------------- */

    public NeuQuant(byte[] thepic, int len, int sample) {



        int i;

        int[] p;



        thepicture = thepic;

        lengthcount = len;

        samplefac = sample;



        network = new int[netsize][];

        for (i = 0; i < netsize; i++) {

            network[i] = new int[4];

            p = network[i];

            p[0] = p[1] = p[2] = (i << (netbiasshift + 8)) / netsize;

            freq[i] = intbias / netsize; /* 1/netsize */

            bias[i] = 0;

        }

    }



    public byte[] colorMap() {

        byte[] map = new byte[3 * netsize];

        int[] index = new int[netsize];

        for (int i = 0; i < netsize; i++)

            index[network[i][3]] = i;

        int k = 0;

        for (int i = 0; i < netsize; i++) {

            int j = index[i];

            map[k++] = (byte) (network[j][0]);

            map[k++] = (byte) (network[j][1]);

            map[k++] = (byte) (network[j][2]);

        }

        return map;

    }



  /* Insertion sort of network and building of netindex[0..255] (to do after unbias)

     ------------------------------------------------------------------------------- */

    public void inxbuild() {



        int i, j, smallpos, smallval;

        int[] p;

        int[] q;

        int previouscol, startpos;



        previouscol = 0;

        startpos = 0;

        for (i = 0; i < netsize; i++) {

            p = network[i];

            smallpos = i;

            smallval = p[1]; /* index on g */

			/* find smallest in i..netsize-1 */

            for (j = i + 1; j < netsize; j++) {

                q = network[j];

                if (q[1] < smallval) { /* index on g */

                    smallpos = j;

                    smallval = q[1]; /* index on g */

                }

            }

            q = network[smallpos];

			/* swap p (i) and q (smallpos) entries */

            if (i != smallpos) {

                j = q[0];

                q[0] = p[0];

                p[0] = j;

                j = q[1];

                q[1] = p[1];

                p[1] = j;

                j = q[2];

                q[2] = p[2];

                p[2] = j;

                j = q[3];

                q[3] = p[3];

                p[3] = j;

            }

			/* smallval entry is now in position i */

            if (smallval != previouscol) {

                netindex[previouscol] = (startpos + i) >> 1;

                for (j = previouscol + 1; j < smallval; j++)

                    netindex[j] = i;

                previouscol = smallval;

                startpos = i;

            }

        }

        netindex[previouscol] = (startpos + maxnetpos) >> 1;

        for (j = previouscol + 1; j < 256; j++)

            netindex[j] = maxnetpos; /* really 256 */

    }



  /* Main Learning Loop

     ------------------ */

    public void learn() {



        int i, j, b, g, r;

        int radius, rad, alpha, step, delta, samplepixels;

        byte[] p;

        int pix, lim;



        if (lengthcount < minpicturebytes)

            samplefac = 1;

        alphadec = 30 + ((samplefac - 1) / 3);

        p = thepicture;

        pix = 0;

        lim = lengthcount;

        samplepixels = lengthcount / (3 * samplefac);

        delta = samplepixels / ncycles;

        alpha = initalpha;

        radius = initradius;



        rad = radius >> radiusbiasshift;

        if (rad <= 1)

            rad = 0;

        for (i = 0; i < rad; i++)

            radpower[i] =

                    alpha * (((rad * rad - i * i) * radbias) / (rad * rad));



        //fprintf(stderr,"beginning 1D learning: initial radius=%d\n", rad);



        if (lengthcount < minpicturebytes)

            step = 3;

        else if ((lengthcount % prime1) != 0)

            step = 3 * prime1;

        else {

            if ((lengthcount % prime2) != 0)

                step = 3 * prime2;

            else {

                if ((lengthcount % prime3) != 0)

                    step = 3 * prime3;

                else

                    step = 3 * prime4;

            }

        }



        i = 0;

        while (i < samplepixels) {

            b = (p[pix + 0] & 0xff) << netbiasshift;

            g = (p[pix + 1] & 0xff) << netbiasshift;

            r = (p[pix + 2] & 0xff) << netbiasshift;

            j = contest(b, g, r);



            altersingle(alpha, j, b, g, r);

            if (rad != 0)

                alterneigh(rad, j, b, g, r); /* alter neighbours */



            pix += step;

            if (pix >= lim)

                pix -= lengthcount;



            i++;

            if (delta == 0)

                delta = 1;

            if (i % delta == 0) {

                alpha -= alpha / alphadec;

                radius -= radius / radiusdec;

                rad = radius >> radiusbiasshift;

                if (rad <= 1)

                    rad = 0;

                for (j = 0; j < rad; j++)

                    radpower[j] =

                            alpha * (((rad * rad - j * j) * radbias) / (rad * rad));

            }

        }

        //fprintf(stderr,"finished 1D learning: final alpha=%f !\n",((float)alpha)/initalpha);

    }



  /* Search for BGR values 0..255 (after net is unbiased) and return colour index

     ---------------------------------------------------------------------------- */

    public int map(int b, int g, int r) {



        int i, j, dist, a, bestd;

        int[] p;

        int best;



        bestd = 1000; /* biggest possible dist is 256*3 */

        best = -1;

        i = netindex[g]; /* index on g */

        j = i - 1; /* start at netindex[g] and work outwards */



        while ((i < netsize) || (j >= 0)) {

            if (i < netsize) {

                p = network[i];

                dist = p[1] - g; /* inx key */

                if (dist >= bestd)

                    i = netsize; /* stop iter */

                else {

                    i++;

                    if (dist < 0)

                        dist = -dist;

                    a = p[0] - b;

                    if (a < 0)

                        a = -a;

                    dist += a;

                    if (dist < bestd) {

                        a = p[2] - r;

                        if (a < 0)

                            a = -a;

                        dist += a;

                        if (dist < bestd) {

                            bestd = dist;

                            best = p[3];

                        }

                    }

                }

            }

            if (j >= 0) {

                p = network[j];

                dist = g - p[1]; /* inx key - reverse dif */

                if (dist >= bestd)

                    j = -1; /* stop iter */

                else {

                    j--;

                    if (dist < 0)

                        dist = -dist;

                    a = p[0] - b;

                    if (a < 0)

                        a = -a;

                    dist += a;

                    if (dist < bestd) {

                        a = p[2] - r;

                        if (a < 0)

                            a = -a;

                        dist += a;

                        if (dist < bestd) {

                            bestd = dist;

                            best = p[3];

                        }

                    }

                }

            }

        }

        return (best);

    }



    public byte[] process() {

        learn();

        unbiasnet();

        inxbuild();

        return colorMap();

    }



  /* Unbias network to give byte values 0..255 and record position i to prepare for sort

     ----------------------------------------------------------------------------------- */

    public void unbiasnet() {



        @SuppressWarnings("unused")

        int i, j;



        for (i = 0; i < netsize; i++) {

            network[i][0] >>= netbiasshift;

            network[i][1] >>= netbiasshift;

            network[i][2] >>= netbiasshift;

            network[i][3] = i; /* record colour no */

        }

    }



  /* Move adjacent neurons by precomputed alpha*(1-((i-j)^2/[r]^2)) in radpower[|i-j|]

     --------------------------------------------------------------------------------- */

    private void alterneigh(int rad, int i, int b, int g, int r) {



        int j, k, lo, hi, a, m;

        int[] p;



        lo = i - rad;

        if (lo < -1)

            lo = -1;

        hi = i + rad;

        if (hi > netsize)

            hi = netsize;



        j = i + 1;

        k = i - 1;

        m = 1;

        while ((j < hi) || (k > lo)) {

            a = radpower[m++];

            if (j < hi) {

                p = network[j++];

                try {

                    p[0] -= (a * (p[0] - b)) / alpharadbias;

                    p[1] -= (a * (p[1] - g)) / alpharadbias;

                    p[2] -= (a * (p[2] - r)) / alpharadbias;

                } catch (Exception e) {

                } // prevents 1.3 miscompilation

            }

            if (k > lo) {

                p = network[k--];

                try {

                    p[0] -= (a * (p[0] - b)) / alpharadbias;

                    p[1] -= (a * (p[1] - g)) / alpharadbias;

                    p[2] -= (a * (p[2] - r)) / alpharadbias;

                } catch (Exception e) {

                }

            }

        }

    }



  /* Move neuron i towards biased (b,g,r) by factor alpha

     ---------------------------------------------------- */

    private void altersingle(int alpha, int i, int b, int g, int r) {

 

		/* alter hit neuron */

        int[] n = network[i];

        n[0] -= (alpha * (n[0] - b)) / initalpha;

        n[1] -= (alpha * (n[1] - g)) / initalpha;

        n[2] -= (alpha * (n[2] - r)) / initalpha;

    }



  /* Search for biased BGR values

     ---------------------------- */

    private int contest(int b, int g, int r) {

 

		/* finds closest neuron (min dist) and updates freq */

		/* finds best neuron (min dist-bias) and returns position */

		/* for frequently chosen neurons, freq[i] is high and bias[i] is negative */

		/* bias[i] = gamma*((1/netsize)-freq[i]) */



        int i, dist, a, biasdist, betafreq;

        int bestpos, bestbiaspos, bestd, bestbiasd;

        int[] n;



        bestd = ~(1 << 31);

        bestbiasd = bestd;

        bestpos = -1;

        bestbiaspos = bestpos;



        for (i = 0; i < netsize; i++) {

            n = network[i];

            dist = n[0] - b;

            if (dist < 0)

                dist = -dist;

            a = n[1] - g;

            if (a < 0)

                a = -a;

            dist += a;

            a = n[2] - r;

            if (a < 0)

                a = -a;

            dist += a;

            if (dist < bestd) {

                bestd = dist;

                bestpos = i;

            }

            biasdist = dist - ((bias[i]) >> (intbiasshift - netbiasshift));

            if (biasdist < bestbiasd) {

                bestbiasd = biasdist;

                bestbiaspos = i;

            }

            betafreq = (freq[i] >> betashift);

            freq[i] -= betafreq;

            bias[i] += (betafreq << gammashift);

        }

        freq[bestpos] += beta;

        bias[bestpos] -= betagamma;

        return (bestbiaspos);

    }
}

最后,如果小伙伴们遇到什么问题,可以评论区直接问深海,知无不言,言无不尽

  • 8
    点赞
  • 12
    收藏
    觉得还不错? 一键收藏
  • 14
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 14
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值