Create animated GIF from MP4

This exercise show how to retrieve image frames from mp4 using MediaMetadataRetriever (refer last post), and create animated GIF using android-gif-encoder, an animated GIF encoder for Android, without any native code required.


test.GIF generated
MainActivity.java
package com.example.androidmediametadataretriever;

import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.support.v7.app.ActionBarActivity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.media.MediaMetadataRetriever;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore.Images;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.Toast;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;

public class MainActivity extends ActionBarActivity {

ImageView capturedImageView;
Button btnOpen, btnSave;
TextView textInfo, textMaxDur, textCurDur;
SeekBar timeFrameBar;

long maxDur;

MediaMetadataRetriever mediaMetadataRetriever = null;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

btnOpen = (Button)findViewById(R.id.open);
btnSave = (Button)findViewById(R.id.save);
textInfo = (TextView)findViewById(R.id.info);
textMaxDur = (TextView)findViewById(R.id.maxdur);
textCurDur = (TextView)findViewById(R.id.curdur);
timeFrameBar = (SeekBar)findViewById(R.id.timeframe);

capturedImageView = (ImageView) findViewById(R.id.capturedimage);

mediaMetadataRetriever = new MediaMetadataRetriever();

btnOpen.setOnClickListener(new OnClickListener(){

@Override
public void onClick(View v) {

Intent intent = new Intent(Intent.ACTION_GET_CONTENT,
Images.Media.EXTERNAL_CONTENT_URI);
intent.setType("video/mp4");
startActivityForResult(intent, 0);
}});

btnSave.setOnClickListener(new OnClickListener(){

@Override
public void onClick(View v) {
if(mediaMetadataRetriever != null){
TaskSaveGIF myTaskSaveGIF = new TaskSaveGIF(timeFrameBar);
myTaskSaveGIF.execute();
}
}});

timeFrameBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener(){

@Override
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromUser) {}

@Override
public void onStartTrackingTouch(SeekBar seekBar) {}

@Override
public void onStopTrackingTouch(SeekBar seekBar) {
updateFrame();
}});
}

private void updateFrame(){
int frameProgress = timeFrameBar.getProgress();

long frameTime = maxDur * frameProgress/100;

textCurDur.setText(String.valueOf(frameTime) + " us");
Bitmap bmFrame = mediaMetadataRetriever.getFrameAtTime(frameTime);
capturedImageView.setImageBitmap(bmFrame);
}

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);

if(resultCode == RESULT_OK){
Uri uri = data.getData();
textInfo.setText(uri.toString());

MediaMetadataRetriever tRetriever = new MediaMetadataRetriever();

try{
tRetriever.setDataSource(getBaseContext(), uri);

mediaMetadataRetriever = tRetriever;
//extract duration in millisecond, as String
String DURATION = mediaMetadataRetriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_DURATION);
textMaxDur.setText(DURATION + " ms");
//convert to us, as int
maxDur = (long)(1000*Double.parseDouble(DURATION));

timeFrameBar.setProgress(0);
updateFrame();
}catch(RuntimeException e){
e.printStackTrace();
Toast.makeText(MainActivity.this,
"Something Wrong!",
Toast.LENGTH_LONG).show();
}

}
}

public class TaskSaveGIF extends AsyncTask<Void, Integer, String> {

SeekBar bar;

public TaskSaveGIF(SeekBar sb){
bar = sb;
Toast.makeText(MainActivity.this,
"Generate GIF animation",
Toast.LENGTH_LONG).show();
}

@Override
protected String doInBackground(Void... params) {
String extStorageDirectory = Environment.getExternalStorageDirectory().toString();
File outFile = new File(extStorageDirectory, "test.GIF");
try {
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(outFile));
bos.write(genGIF());
bos.flush();
bos.close();

return(outFile.getAbsolutePath() + " Saved");
} catch (FileNotFoundException e) {
e.printStackTrace();
return e.getMessage();
} catch (IOException e) {
e.printStackTrace();
return e.getMessage();
}
}

@Override
protected void onPostExecute(String result) {
Toast.makeText(MainActivity.this,
result,
Toast.LENGTH_LONG).show();
}

@Override
protected void onProgressUpdate(Integer... values) {
bar.setProgress(values[0]);
updateFrame();
}

private byte[] genGIF(){
ByteArrayOutputStream bos = new ByteArrayOutputStream();

AnimatedGifEncoder animatedGifEncoder = new AnimatedGifEncoder();
animatedGifEncoder.setDelay(1000);

Bitmap bmFrame;
animatedGifEncoder.start(bos);
for(int i=0; i<100; i+=10){
long frameTime = maxDur * i/100;
bmFrame = mediaMetadataRetriever.getFrameAtTime(frameTime);
animatedGifEncoder.addFrame(bmFrame);
publishProgress(i);
}

//last from at end
bmFrame = mediaMetadataRetriever.getFrameAtTime(maxDur);
animatedGifEncoder.addFrame(bmFrame);
publishProgress(100);

animatedGifEncoder.finish();
return bos.toByteArray();
}
}

}

AnimatedGifEncoder.java, copy from https://github.com/nbadal/android-gif-encoder/blob/master/GifEncoder.java
//source: https://github.com/nbadal/android-gif-encoder/blob/master/GifEncoder.java
package com.example.androidmediametadataretriever;

import java.io.IOException;
import java.io.OutputStream;

import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Paint;

public class AnimatedGifEncoder {

protected int width; // image size

protected int height;

protected int x = 0;

protected int y = 0;

protected int transparent = -1; // transparent color if given

protected int transIndex; // transparent index in color table

protected int repeat = -1; // no repeat

protected int delay = 0; // frame delay (hundredths)

protected boolean started = false; // ready to output frames

protected OutputStream out;

protected Bitmap image; // current frame

protected byte[] pixels; // BGR byte array from frame

protected byte[] indexedPixels; // converted frame indexed to palette

protected int colorDepth; // number of bit planes

protected byte[] colorTab; // RGB palette

protected boolean[] usedEntry = new boolean[256]; // active palette entries

protected int palSize = 7; // color table size (bits-1)

protected int dispose = -1; // disposal code (-1 = use default)

protected boolean closeStream = false; // close stream when finished

protected boolean firstFrame = true;

protected boolean sizeSet = false; // if false, get size from first frame

protected int sample = 10; // default sample interval for quantizer

/**
* Sets the delay time between each frame, or changes it for subsequent
* frames (applies to last frame added).
*
* @param ms
* int delay time in milliseconds
*/
public void setDelay(int ms) {
delay = ms / 10;
}

/**
* Sets the GIF frame disposal code for the last added frame and any
* subsequent frames. Default is 0 if no transparent color has been set,
* otherwise 2.
*
* @param code
* int disposal code.
*/
public void setDispose(int code) {
if (code >= 0) {
dispose = code;
}
}

/**
* Sets the number of times the set of GIF frames should be played. Default
* is 1; 0 means play indefinitely. Must be invoked before the first image
* is added.
*
* @param iter
* int number of iterations.
* @return
*/
public void setRepeat(int iter) {
if (iter >= 0) {
repeat = iter;
}
}

/**
* Sets the transparent color for the last added frame and any subsequent
* frames. Since all colors are subject to modification in the quantization
* process, the color in the final palette for each frame closest to the
* given color becomes the transparent color for that frame. May be set to
* null to indicate no transparent color.
*
* @param c
* Color to be treated as transparent on display.
*/
public void setTransparent(int c) {
transparent = c;
}

/**
* Adds next GIF frame. The frame is not written immediately, but is
* actually deferred until the next frame is received so that timing data
* can be inserted. Invoking <code>finish()</code> flushes all frames. If
* <code>setSize</code> was not invoked, the size of the first image is used
* for all subsequent frames.
*
* @param im
* BufferedImage containing frame to write.
* @return true if successful.
*/
public boolean addFrame(Bitmap im) {
if ((im == null) || !started) {
return false;
}
boolean ok = true;
try {
if (!sizeSet) {
// use first frame's size
setSize(im.getWidth(), im.getHeight());
}
image = im;
getImagePixels(); // convert to correct format if necessary
analyzePixels(); // build color table & map pixels
if (firstFrame) {
writeLSD(); // logical screen descriptior
writePalette(); // global color table
if (repeat >= 0) {
// use NS app extension to indicate reps
writeNetscapeExt();
}
}
writeGraphicCtrlExt(); // write graphic control extension
writeImageDesc(); // image descriptor
if (!firstFrame) {
writePalette(); // local color table
}
writePixels(); // encode and write pixel data
firstFrame = false;
} catch (IOException e) {
ok = false;
}

return ok;
}

/**
* Flushes any pending data and closes output file. If writing to an
* OutputStream, the stream is not closed.
*/
public boolean finish() {
if (!started)
return false;
boolean ok = true;
started = false;
try {
out.write(0x3b); // gif trailer
out.flush();
if (closeStream) {
out.close();
}
} catch (IOException e) {
ok = false;
}

// reset for subsequent use
transIndex = 0;
out = null;
image = null;
pixels = null;
indexedPixels = null;
colorTab = null;
closeStream = false;
firstFrame = true;

return ok;
}

/**
* Sets frame rate in frames per second. Equivalent to
* <code>setDelay(1000/fps)</code>.
*
* @param fps
* float frame rate (frames per second)
*/
public void setFrameRate(float fps) {
if (fps != 0f) {
delay = (int) (100 / fps);
}
}

/**
* Sets quality of color quantization (conversion of images to the maximum
* 256 colors allowed by the GIF specification). Lower values (minimum = 1)
* produce better colors, but slow processing significantly. 10 is the
* default, and produces good color mapping at reasonable speeds. Values
* greater than 20 do not yield significant improvements in speed.
*
* @param quality
* int greater than 0.
* @return
*/
public void setQuality(int quality) {
if (quality < 1)
quality = 1;
sample = quality;
}

/**
* Sets the GIF frame size. The default size is the size of the first frame
* added if this method is not invoked.
*
* @param w
* int frame width.
* @param h
* int frame width.
*/
public void setSize(int w, int h) {
width = w;
height = h;
if (width < 1)
width = 320;
if (height < 1)
height = 240;
sizeSet = true;
}

/**
* Sets the GIF frame position. The position is 0,0 by default. Useful for
* only updating a section of the image
*
* @param w
* int frame width.
* @param h
* int frame width.
*/
public void setPosition(int x, int y) {
this.x = x;
this.y = y;
}

/**
* Initiates GIF file creation on the given stream. The stream is not closed
* automatically.
*
* @param os
* OutputStream on which GIF images are written.
* @return false if initial write failed.
*/
public boolean start(OutputStream os) {
if (os == null)
return false;
boolean ok = true;
closeStream = false;
out = os;
try {
writeString("GIF89a"); // header
} catch (IOException e) {
ok = false;
}
return started = ok;
}

/**
* Analyzes image colors and creates color map.
*/
protected void analyzePixels() {
int len = pixels.length;
int nPix = len / 3;
indexedPixels = new byte[nPix];
NeuQuant nq = new NeuQuant(pixels, len, sample);
// initialize quantizer
colorTab = nq.process(); // create reduced palette
// convert map from BGR to RGB
for (int i = 0; i < colorTab.length; i += 3) {
byte temp = colorTab[i];
colorTab[i] = colorTab[i + 2];
colorTab[i + 2] = temp;
usedEntry[i / 3] = false;
}
// map image pixels to new palette
int k = 0;
for (int i = 0; i < nPix; i++) {
int index = nq.map(pixels[k++] & 0xff, pixels[k++] & 0xff,
pixels[k++] & 0xff);
usedEntry[index] = true;
indexedPixels[i] = (byte) index;
}
pixels = null;
colorDepth = 8;
palSize = 7;
// get closest match to transparent color if specified
if (transparent != -1) {
transIndex = findClosest(transparent);
}
}

/**
* Returns index of palette color closest to c
*
*/
protected int findClosest(int c) {
if (colorTab == null)
return -1;
int r = (c >> 16) & 0xff;
int g = (c >> 8) & 0xff;
int b = (c >> 0) & 0xff;
int minpos = 0;
int dmin = 256 * 256 * 256;
int len = colorTab.length;
for (int i = 0; i < len;) {
int dr = r - (colorTab[i++] & 0xff);
int dg = g - (colorTab[i++] & 0xff);
int db = b - (colorTab[i] & 0xff);
int d = dr * dr + dg * dg + db * db;
int index = i / 3;
if (usedEntry[index] && (d < dmin)) {
dmin = d;
minpos = index;
}
i++;
}
return minpos;
}

/**
* Extracts image pixels into byte array "pixels"
*/
protected void getImagePixels() {
int w = image.getWidth();
int h = image.getHeight();
if ((w != width) || (h != height)) {
// create new image with right size/format
Bitmap temp = Bitmap.createBitmap(width, height, Config.RGB_565);
Canvas g = new Canvas(temp);
g.drawBitmap(image, 0, 0, new Paint());
image = temp;
}
int[] data = getImageData(image);
pixels = new byte[data.length * 3];
for (int i = 0; i < data.length; i++) {
int td = data[i];
int tind = i * 3;
pixels[tind++] = (byte) ((td >> 0) & 0xFF);
pixels[tind++] = (byte) ((td >> 8) & 0xFF);
pixels[tind] = (byte) ((td >> 16) & 0xFF);
}
}

protected int[] getImageData(Bitmap img) {
int w = img.getWidth();
int h = img.getHeight();

int[] data = new int[w * h];
img.getPixels(data, 0, w, 0, 0, w, h);
return data;
}

/**
* Writes Graphic Control Extension
*/
protected void writeGraphicCtrlExt() throws IOException {
out.write(0x21); // extension introducer
out.write(0xf9); // GCE label
out.write(4); // data block size
int transp, disp;
if (transparent == -1) {
transp = 0;
disp = 0; // dispose = no action
} else {
transp = 1;
disp = 2; // force clear if using transparent color
}
if (dispose >= 0) {
disp = dispose & 7; // user override
}
disp <<= 2;

// packed fields
out.write(0 | // 1:3 reserved
disp | // 4:6 disposal
0 | // 7 user input - 0 = none
transp); // 8 transparency flag

writeShort(delay); // delay x 1/100 sec
out.write(transIndex); // transparent color index
out.write(0); // block terminator
}

/**
* Writes Image Descriptor
*/
protected void writeImageDesc() throws IOException {
out.write(0x2c); // image separator
writeShort(x); // image position x,y = 0,0
writeShort(y);
writeShort(width); // image size
writeShort(height);
// packed fields
if (firstFrame) {
// no LCT - GCT is used for first (or only) frame
out.write(0);
} else {
// specify normal LCT
out.write(0x80 | // 1 local color table 1=yes
0 | // 2 interlace - 0=no
0 | // 3 sorted - 0=no
0 | // 4-5 reserved
palSize); // 6-8 size of color table
}
}

/**
* Writes Logical Screen Descriptor
*/
protected void writeLSD() throws IOException {
// logical screen size
writeShort(width);
writeShort(height);
// packed fields
out.write((0x80 | // 1 : global color table flag = 1 (gct used)
0x70 | // 2-4 : color resolution = 7
0x00 | // 5 : gct sort flag = 0
palSize)); // 6-8 : gct size

out.write(0); // background color index
out.write(0); // pixel aspect ratio - assume 1:1
}

/**
* Writes Netscape application extension to define repeat count.
*/
protected void writeNetscapeExt() throws IOException {
out.write(0x21); // extension introducer
out.write(0xff); // app extension label
out.write(11); // block size
writeString("NETSCAPE" + "2.0"); // app id + auth code
out.write(3); // sub-block size
out.write(1); // loop sub-block id
writeShort(repeat); // loop count (extra iterations, 0=repeat forever)
out.write(0); // block terminator
}

/**
* Writes color table
*/
protected void writePalette() throws IOException {
out.write(colorTab, 0, colorTab.length);
int n = (3 * 256) - colorTab.length;
for (int i = 0; i < n; i++) {
out.write(0);
}
}

/**
* Encodes and writes pixel data
*/
protected void writePixels() throws IOException {
LZWEncoder encoder = new LZWEncoder(width, height, indexedPixels,
colorDepth);
encoder.encode(out);
}

/**
* Write 16-bit value to output stream, LSB first
*/
protected void writeShort(int value) throws IOException {
out.write(value & 0xff);
out.write((value >> 8) & 0xff);
}

/**
* Writes string to output stream
*/
protected void writeString(String s) throws IOException {
for (int i = 0; i < s.length(); i++) {
out.write((byte) s.charAt(i));
}
}
}

/*
* NeuQuant Neural-Net Quantization Algorithm
* ------------------------------------------
*
* Copyright (c) 1994 Anthony Dekker
*
* NEUQUANT Neural-Net quantization algorithm by Anthony Dekker, 1994. See
* "Kohonen neural networks for optimal colour quantization" in "Network:
* Computation in Neural Systems" Vol. 5 (1994) pp 351-367. for a discussion of
* the algorithm.
*
* Any party obtaining a copy of these files from the author, directly or
* indirectly, is granted, free of charge, a full and unrestricted irrevocable,
* world-wide, paid up, royalty-free, nonexclusive right and license to deal in
* this software and documentation files (the "Software"), including without
* limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons who
* receive copies from any such party to do so, with the only requirement being
* that this copyright notice remain intact.
*/

// Ported to Java 12/00 K Weiner
class NeuQuant {

protected static final int netsize = 256; /* number of colours used */

/* four primes near 500 - assume no image has a length so large */
/* that it is divisible by all four primes */
protected static final int prime1 = 499;

protected static final int prime2 = 491;

protected static final int prime3 = 487;

protected static final int prime4 = 503;

protected static final int minpicturebytes = (3 * prime4);

/* minimum size for input image */

/*
* Program Skeleton ---------------- [select samplefac in range 1..30] [read
* image from input file] pic = (unsigned char*) malloc(3*width*height);
* initnet(pic,3*width*height,samplefac); learn(); unbiasnet(); [write
* output image header, using writecolourmap(f)] inxbuild(); write output
* image using inxsearch(b,g,r)
*/

/*
* Network Definitions -------------------
*/

protected static final int maxnetpos = (netsize - 1);

protected static final int netbiasshift = 4; /* bias for colour values */

protected static final int ncycles = 100; /* no. of learning cycles */

/* defs for freq and bias */
protected static final int intbiasshift = 16; /* bias for fractions */

protected static final int intbias = (((int) 1) << intbiasshift);

protected static final int gammashift = 10; /* gamma = 1024 */

protected static final int gamma = (((int) 1) << gammashift);

protected static final int betashift = 10;

protected static final int beta = (intbias >> betashift); /* beta = 1/1024 */

protected static final int betagamma = (intbias << (gammashift - betashift));

/* defs for decreasing radius factor */
protected static final int initrad = (netsize >> 3); /*
* for 256 cols, radius
* starts
*/

protected static final int radiusbiasshift = 6; /* at 32.0 biased by 6 bits */

protected static final int radiusbias = (((int) 1) << radiusbiasshift);

protected static final int initradius = (initrad * radiusbias); /*
* and
* decreases
* by a
*/

protected static final int radiusdec = 30; /* factor of 1/30 each cycle */

/* defs for decreasing alpha factor */
protected static final int alphabiasshift = 10; /* alpha starts at 1.0 */

protected static final int initalpha = (((int) 1) << alphabiasshift);

protected int alphadec; /* biased by 10 bits */

/* radbias and alpharadbias used for radpower calculation */
protected static final int radbiasshift = 8;

protected static final int radbias = (((int) 1) << radbiasshift);

protected static final int alpharadbshift = (alphabiasshift + radbiasshift);

protected static final int alpharadbias = (((int) 1) << alpharadbshift);

/*
* Types and Global Variables --------------------------
*/

protected byte[] thepicture; /* the input image itself */

protected int lengthcount; /* lengthcount = H*W*3 */

protected int samplefac; /* sampling factor 1..30 */

// typedef int pixel[4]; /* BGRc */
protected int[][] network; /* the network itself - [netsize][4] */

protected int[] netindex = new int[256];

/* for network lookup - really 256 */

protected int[] bias = new int[netsize];

/* bias and freq arrays for learning */
protected int[] freq = new int[netsize];

protected int[] radpower = new int[initrad];

/* radpower for precomputation */

/*
* Initialise network in range (0,0,0) to (255,255,255) and set parameters
* -----------------------------------------------------------------------
*/
public NeuQuant(byte[] thepic, int len, int sample) {

int i;
int[] p;

thepicture = thepic;
lengthcount = len;
samplefac = sample;

network = new int[netsize][];
for (i = 0; i < netsize; i++) {
network[i] = new int[4];
p = network[i];
p[0] = p[1] = p[2] = (i << (netbiasshift + 8)) / netsize;
freq[i] = intbias / netsize; /* 1/netsize */
bias[i] = 0;
}
}

public byte[] colorMap() {
byte[] map = new byte[3 * netsize];
int[] index = new int[netsize];
for (int i = 0; i < netsize; i++)
index[network[i][3]] = i;
int k = 0;
for (int i = 0; i < netsize; i++) {
int j = index[i];
map[k++] = (byte) (network[j][0]);
map[k++] = (byte) (network[j][1]);
map[k++] = (byte) (network[j][2]);
}
return map;
}

/*
* Insertion sort of network and building of netindex[0..255] (to do after
* unbias)
* ------------------------------------------------------------------
* -------------
*/
public void inxbuild() {

int i, j, smallpos, smallval;
int[] p;
int[] q;
int previouscol, startpos;

previouscol = 0;
startpos = 0;
for (i = 0; i < netsize; i++) {
p = network[i];
smallpos = i;
smallval = p[1]; /* index on g */
/* find smallest in i..netsize-1 */
for (j = i + 1; j < netsize; j++) {
q = network[j];
if (q[1] < smallval) { /* index on g */
smallpos = j;
smallval = q[1]; /* index on g */
}
}
q = network[smallpos];
/* swap p (i) and q (smallpos) entries */
if (i != smallpos) {
j = q[0];
q[0] = p[0];
p[0] = j;
j = q[1];
q[1] = p[1];
p[1] = j;
j = q[2];
q[2] = p[2];
p[2] = j;
j = q[3];
q[3] = p[3];
p[3] = j;
}
/* smallval entry is now in position i */
if (smallval != previouscol) {
netindex[previouscol] = (startpos + i) >> 1;
for (j = previouscol + 1; j < smallval; j++)
netindex[j] = i;
previouscol = smallval;
startpos = i;
}
}
netindex[previouscol] = (startpos + maxnetpos) >> 1;
for (j = previouscol + 1; j < 256; j++)
netindex[j] = maxnetpos; /* really 256 */
}

/*
* Main Learning Loop ------------------
*/
public void learn() {

int i, j, b, g, r;
int radius, rad, alpha, step, delta, samplepixels;
byte[] p;
int pix, lim;

if (lengthcount < minpicturebytes)
samplefac = 1;
alphadec = 30 + ((samplefac - 1) / 3);
p = thepicture;
pix = 0;
lim = lengthcount;
samplepixels = lengthcount / (3 * samplefac);
delta = samplepixels / ncycles;
alpha = initalpha;
radius = initradius;

rad = radius >> radiusbiasshift;
if (rad <= 1)
rad = 0;
for (i = 0; i < rad; i++)
radpower[i] = alpha
* (((rad * rad - i * i) * radbias) / (rad * rad));

// fprintf(stderr,"beginning 1D learning: initial radius=%d\n", rad);

if (lengthcount < minpicturebytes)
step = 3;
else if ((lengthcount % prime1) != 0)
step = 3 * prime1;
else {
if ((lengthcount % prime2) != 0)
step = 3 * prime2;
else {
if ((lengthcount % prime3) != 0)
step = 3 * prime3;
else
step = 3 * prime4;
}
}

i = 0;
while (i < samplepixels) {
b = (p[pix + 0] & 0xff) << netbiasshift;
g = (p[pix + 1] & 0xff) << netbiasshift;
r = (p[pix + 2] & 0xff) << netbiasshift;
j = contest(b, g, r);

altersingle(alpha, j, b, g, r);
if (rad != 0)
alterneigh(rad, j, b, g, r); /* alter neighbours */

pix += step;
if (pix >= lim)
pix -= lengthcount;

i++;
if (delta == 0)
delta = 1;
if (i % delta == 0) {
alpha -= alpha / alphadec;
radius -= radius / radiusdec;
rad = radius >> radiusbiasshift;
if (rad <= 1)
rad = 0;
for (j = 0; j < rad; j++)
radpower[j] = alpha
* (((rad * rad - j * j) * radbias) / (rad * rad));
}
}
// fprintf(stderr,"finished 1D learning: final alpha=%f
// !\n",((float)alpha)/initalpha);
}

/*
* Search for BGR values 0..255 (after net is unbiased) and return colour
* index
* --------------------------------------------------------------------
* --------
*/
public int map(int b, int g, int r) {

int i, j, dist, a, bestd;
int[] p;
int best;

bestd = 1000; /* biggest possible dist is 256*3 */
best = -1;
i = netindex[g]; /* index on g */
j = i - 1; /* start at netindex[g] and work outwards */

while ((i < netsize) || (j >= 0)) {
if (i < netsize) {
p = network[i];
dist = p[1] - g; /* inx key */
if (dist >= bestd)
i = netsize; /* stop iter */
else {
i++;
if (dist < 0)
dist = -dist;
a = p[0] - b;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
a = p[2] - r;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
bestd = dist;
best = p[3];
}
}
}
}
if (j >= 0) {
p = network[j];
dist = g - p[1]; /* inx key - reverse dif */
if (dist >= bestd)
j = -1; /* stop iter */
else {
j--;
if (dist < 0)
dist = -dist;
a = p[0] - b;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
a = p[2] - r;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
bestd = dist;
best = p[3];
}
}
}
}
}
return (best);
}

public byte[] process() {
learn();
unbiasnet();
inxbuild();
return colorMap();
}

/*
* Unbias network to give byte values 0..255 and record position i to
* prepare for sort
* ----------------------------------------------------------
* -------------------------
*/
public void unbiasnet() {

int i;

for (i = 0; i < netsize; i++) {
network[i][0] >>= netbiasshift;
network[i][1] >>= netbiasshift;
network[i][2] >>= netbiasshift;
network[i][3] = i; /* record colour no */
}
}

/*
* Move adjacent neurons by precomputed alpha*(1-((i-j)^2/[r]^2)) in
* radpower[|i-j|]
* ----------------------------------------------------------
* -----------------------
*/
protected void alterneigh(int rad, int i, int b, int g, int r) {

int j, k, lo, hi, a, m;
int[] p;

lo = i - rad;
if (lo < -1)
lo = -1;
hi = i + rad;
if (hi > netsize)
hi = netsize;

j = i + 1;
k = i - 1;
m = 1;
while ((j < hi) || (k > lo)) {
a = radpower[m++];
if (j < hi) {
p = network[j++];
try {
p[0] -= (a * (p[0] - b)) / alpharadbias;
p[1] -= (a * (p[1] - g)) / alpharadbias;
p[2] -= (a * (p[2] - r)) / alpharadbias;
} catch (Exception e) {
} // prevents 1.3 miscompilation
}
if (k > lo) {
p = network[k--];
try {
p[0] -= (a * (p[0] - b)) / alpharadbias;
p[1] -= (a * (p[1] - g)) / alpharadbias;
p[2] -= (a * (p[2] - r)) / alpharadbias;
} catch (Exception e) {
}
}
}
}

/*
* Move neuron i towards biased (b,g,r) by factor alpha
* ----------------------------------------------------
*/
protected void altersingle(int alpha, int i, int b, int g, int r) {

/* alter hit neuron */
int[] n = network[i];
n[0] -= (alpha * (n[0] - b)) / initalpha;
n[1] -= (alpha * (n[1] - g)) / initalpha;
n[2] -= (alpha * (n[2] - r)) / initalpha;
}

/*
* Search for biased BGR values ----------------------------
*/
protected int contest(int b, int g, int r) {

/* finds closest neuron (min dist) and updates freq */
/* finds best neuron (min dist-bias) and returns position */
/*
* for frequently chosen neurons, freq[i] is high and bias[i] is
* negative
*/
/* bias[i] = gamma*((1/netsize)-freq[i]) */

int i, dist, a, biasdist, betafreq;
int bestpos, bestbiaspos, bestd, bestbiasd;
int[] n;

bestd = ~(((int) 1) << 31);
bestbiasd = bestd;
bestpos = -1;
bestbiaspos = bestpos;

for (i = 0; i < netsize; i++) {
n = network[i];
dist = n[0] - b;
if (dist < 0)
dist = -dist;
a = n[1] - g;
if (a < 0)
a = -a;
dist += a;
a = n[2] - r;
if (a < 0)
a = -a;
dist += a;
if (dist < bestd) {
bestd = dist;
bestpos = i;
}
biasdist = dist - ((bias[i]) >> (intbiasshift - netbiasshift));
if (biasdist < bestbiasd) {
bestbiasd = biasdist;
bestbiaspos = i;
}
betafreq = (freq[i] >> betashift);
freq[i] -= betafreq;
bias[i] += (betafreq << gammashift);
}
freq[bestpos] += beta;
bias[bestpos] -= betagamma;
return (bestbiaspos);
}
}

// ==============================================================================
// Adapted from Jef Poskanzer's Java port by way of J. M. G. Elliott.
// K Weiner 12/00

class LZWEncoder {

private static final int EOF = -1;

private int imgW, imgH;

private byte[] pixAry;

private int initCodeSize;

private int remaining;

private int curPixel;

// GIFCOMPR.C - GIF Image compression routines
//
// Lempel-Ziv compression based on 'compress'. GIF modifications by
// David Rowley (mgardi@watdcsu.waterloo.edu)

// General DEFINEs

static final int BITS = 12;

static final int HSIZE = 5003; // 80% occupancy

// GIF Image compression - modified 'compress'
//
// Based on: compress.c - File compression ala IEEE Computer, June 1984.
//
// By Authors: Spencer W. Thomas (decvax!harpo!utah-cs!utah-gr!thomas)
// Jim McKie (decvax!mcvax!jim)
// Steve Davies (decvax!vax135!petsd!peora!srd)
// Ken Turkowski (decvax!decwrl!turtlevax!ken)
// James A. Woods (decvax!ihnp4!ames!jaw)
// Joe Orost (decvax!vax135!petsd!joe)

int n_bits; // number of bits/code

int maxbits = BITS; // user settable max # bits/code

int maxcode; // maximum code, given n_bits

int maxmaxcode = 1 << BITS; // should NEVER generate this code

int[] htab = new int[HSIZE];

int[] codetab = new int[HSIZE];

int hsize = HSIZE; // for dynamic table sizing

int free_ent = 0; // first unused entry

// block compression parameters -- after all codes are used up,
// and compression rate changes, start over.
boolean clear_flg = false;

// Algorithm: use open addressing double hashing (no chaining) on the
// prefix code / next character combination. We do a variant of Knuth's
// algorithm D (vol. 3, sec. 6.4) along with G. Knott's relatively-prime
// secondary probe. Here, the modular division first probe is gives way
// to a faster exclusive-or manipulation. Also do block compression with
// an adaptive reset, whereby the code table is cleared when the compression
// ratio decreases, but after the table fills. The variable-length output
// codes are re-sized at this point, and a special CLEAR code is generated
// for the decompressor. Late addition: construct the table according to
// file size for noticeable speed improvement on small files. Please direct
// questions about this implementation to ames!jaw.

int g_init_bits;

int ClearCode;

int EOFCode;

// output
//
// Output the given code.
// Inputs:
// code: A n_bits-bit integer. If == -1, then EOF. This assumes
// that n_bits =< wordsize - 1.
// Outputs:
// Outputs code to the file.
// Assumptions:
// Chars are 8 bits long.
// Algorithm:
// Maintain a BITS character long buffer (so that 8 codes will
// fit in it exactly). Use the VAX insv instruction to insert each
// code in turn. When the buffer fills up empty it and start over.

int cur_accum = 0;

int cur_bits = 0;

int masks[] = { 0x0000, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F, 0x003F,
0x007F, 0x00FF, 0x01FF, 0x03FF, 0x07FF, 0x0FFF, 0x1FFF, 0x3FFF,
0x7FFF, 0xFFFF };

// Number of characters so far in this 'packet'
int a_count;

// Define the storage for the packet accumulator
byte[] accum = new byte[256];

// ----------------------------------------------------------------------------
LZWEncoder(int width, int height, byte[] pixels, int color_depth) {
imgW = width;
imgH = height;
pixAry = pixels;
initCodeSize = Math.max(2, color_depth);
}

// Add a character to the end of the current packet, and if it is 254
// characters, flush the packet to disk.
void char_out(byte c, OutputStream outs) throws IOException {
accum[a_count++] = c;
if (a_count >= 254)
flush_char(outs);
}

// Clear out the hash table

// table clear for block compress
void cl_block(OutputStream outs) throws IOException {
cl_hash(hsize);
free_ent = ClearCode + 2;
clear_flg = true;

output(ClearCode, outs);
}

// reset code table
void cl_hash(int hsize) {
for (int i = 0; i < hsize; ++i)
htab[i] = -1;
}

void compress(int init_bits, OutputStream outs) throws IOException {
int fcode;
int i /* = 0 */;
int c;
int ent;
int disp;
int hsize_reg;
int hshift;

// Set up the globals: g_init_bits - initial number of bits
g_init_bits = init_bits;

// Set up the necessary values
clear_flg = false;
n_bits = g_init_bits;
maxcode = MAXCODE(n_bits);

ClearCode = 1 << (init_bits - 1);
EOFCode = ClearCode + 1;
free_ent = ClearCode + 2;

a_count = 0; // clear packet

ent = nextPixel();

hshift = 0;
for (fcode = hsize; fcode < 65536; fcode *= 2)
++hshift;
hshift = 8 - hshift; // set hash code range bound

hsize_reg = hsize;
cl_hash(hsize_reg); // clear hash table

output(ClearCode, outs);

outer_loop: while ((c = nextPixel()) != EOF) {
fcode = (c << maxbits) + ent;
i = (c << hshift) ^ ent; // xor hashing

if (htab[i] == fcode) {
ent = codetab[i];
continue;
} else if (htab[i] >= 0) // non-empty slot
{
disp = hsize_reg - i; // secondary hash (after G. Knott)
if (i == 0)
disp = 1;
do {
if ((i -= disp) < 0)
i += hsize_reg;

if (htab[i] == fcode) {
ent = codetab[i];
continue outer_loop;
}
} while (htab[i] >= 0);
}
output(ent, outs);
ent = c;
if (free_ent < maxmaxcode) {
codetab[i] = free_ent++; // code -> hashtable
htab[i] = fcode;
} else
cl_block(outs);
}
// Put out the final code.
output(ent, outs);
output(EOFCode, outs);
}

// ----------------------------------------------------------------------------
void encode(OutputStream os) throws IOException {
os.write(initCodeSize); // write "initial code size" byte

remaining = imgW * imgH; // reset navigation variables
curPixel = 0;

compress(initCodeSize + 1, os); // compress and write the pixel data

os.write(0); // write block terminator
}

// Flush the packet to disk, and reset the accumulator
void flush_char(OutputStream outs) throws IOException {
if (a_count > 0) {
outs.write(a_count);
outs.write(accum, 0, a_count);
a_count = 0;
}
}

final int MAXCODE(int n_bits) {
return (1 << n_bits) - 1;
}

// ----------------------------------------------------------------------------
// Return the next pixel from the image
// ----------------------------------------------------------------------------
private int nextPixel() {
if (remaining == 0)
return EOF;

--remaining;

byte pix = pixAry[curPixel++];

return pix & 0xff;
}

void output(int code, OutputStream outs) throws IOException {
cur_accum &= masks[cur_bits];

if (cur_bits > 0)
cur_accum |= (code << cur_bits);
else
cur_accum = code;

cur_bits += n_bits;

while (cur_bits >= 8) {
char_out((byte) (cur_accum & 0xff), outs);
cur_accum >>= 8;
cur_bits -= 8;
}

// If the next entry is going to be too big for the code size,
// then increase it, if possible.
if (free_ent > maxcode || clear_flg) {
if (clear_flg) {
maxcode = MAXCODE(n_bits = g_init_bits);
clear_flg = false;
} else {
++n_bits;
if (n_bits == maxbits)
maxcode = maxmaxcode;
else
maxcode = MAXCODE(n_bits);
}
}

if (code == EOFCode) {
// At EOF, write the rest of the buffer.
while (cur_bits > 0) {
char_out((byte) (cur_accum & 0xff), outs);
cur_accum >>= 8;
cur_bits -= 8;
}

flush_char(outs);
}
}
}

activity_main.xml
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context="com.example.androidmediametadataretriever.MainActivity" >

<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:autoLink="web"
android:text="http://android-er.blogspot.com/"
android:textStyle="bold" />

<Button
android:id="@+id/open"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Open"/>
<TextView
android:id="@+id/info"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<TextView
android:id="@+id/maxdur"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<TextView
android:id="@+id/curdur"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<SeekBar
android:id="@+id/timeframe"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:max="100"
android:progress="0"/>
<Button
android:id="@+id/save"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Save GIF"/>
<ImageView
android:id="@+id/capturedimage"
android:layout_width="match_parent"
android:layout_height="match_parent" />

</LinearLayout>

Permission of "android.permission.READ_EXTERNAL_STORAGE" and "android.permission.WRITE_EXTERNAL_STORAGE" are needed in AndroidManifest.xml, minSdkVersion have to be 10 or higher.

download filesDownload the files.

Use MediaMetadataRetriever to retrieve frame from mp4 interactively

MediaMetadataRetriever class, added from API Level 10, provides a unified interface for retrieving frame and meta data from an input media file. This example show how to retrieve frame from mp4 file interactively.


MainActivity.java
package com.example.androidmediametadataretriever;

import android.support.v7.app.ActionBarActivity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.media.MediaMetadataRetriever;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Images;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.Toast;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;

public class MainActivity extends ActionBarActivity {

ImageView capturedImageView;
Button btnOpen;
TextView textInfo, textMaxDur, textCurDur;
SeekBar timeFrameBar;

long maxDur;

MediaMetadataRetriever mediaMetadataRetriever = null;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

btnOpen = (Button)findViewById(R.id.open);
textInfo = (TextView)findViewById(R.id.info);
textMaxDur = (TextView)findViewById(R.id.maxdur);
textCurDur = (TextView)findViewById(R.id.curdur);
timeFrameBar = (SeekBar)findViewById(R.id.timeframe);

capturedImageView = (ImageView) findViewById(R.id.capturedimage);

mediaMetadataRetriever = new MediaMetadataRetriever();

btnOpen.setOnClickListener(new OnClickListener(){

@Override
public void onClick(View v) {

Intent intent = new Intent(Intent.ACTION_GET_CONTENT,
Images.Media.EXTERNAL_CONTENT_URI);
intent.setType("video/mp4");
startActivityForResult(intent, 0);
}});

timeFrameBar.setOnSeekBarChangeListener(new OnSeekBarChangeListener(){

@Override
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromUser) {}

@Override
public void onStartTrackingTouch(SeekBar seekBar) {}

@Override
public void onStopTrackingTouch(SeekBar seekBar) {
updateFrame();
}});
}

private void updateFrame(){
int frameProgress = timeFrameBar.getProgress();

long frameTime = maxDur * frameProgress/100;

textCurDur.setText(String.valueOf(frameTime) + " us");
Bitmap bmFrame = mediaMetadataRetriever.getFrameAtTime(frameTime);
capturedImageView.setImageBitmap(bmFrame);
}


@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);

if(resultCode == RESULT_OK){
Uri uri = data.getData();
textInfo.setText(uri.toString());

MediaMetadataRetriever tRetriever = new MediaMetadataRetriever();

try{
tRetriever.setDataSource(getBaseContext(), uri);

mediaMetadataRetriever = tRetriever;
//extract duration in millisecond, as String
String DURATION = mediaMetadataRetriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_DURATION);
textMaxDur.setText(DURATION + " ms");
//convert to us, as int
maxDur = (long)(1000*Double.parseDouble(DURATION));

timeFrameBar.setProgress(0);
updateFrame();
}catch(RuntimeException e){
e.printStackTrace();
Toast.makeText(MainActivity.this,
"Something Wrong!",
Toast.LENGTH_LONG).show();
}

}
}

}

activity_main.xml
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context="com.example.androidmediametadataretriever.MainActivity" >

<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:autoLink="web"
android:text="http://android-er.blogspot.com/"
android:textStyle="bold" />

<Button
android:id="@+id/open"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Open"/>
<TextView
android:id="@+id/info"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<TextView
android:id="@+id/maxdur"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<TextView
android:id="@+id/curdur"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<SeekBar
android:id="@+id/timeframe"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:max="100"
android:progress="0"/>

<ImageView
android:id="@+id/capturedimage"
android:layout_width="match_parent"
android:layout_height="match_parent" />

</LinearLayout>

Permission of "android.permission.READ_EXTERNAL_STORAGE" is needed in AndroidManifest.xml. And android:minSdkVersion have to be 10 or higher.


download filesDownload the files.

Next:
Create animated GIF from MP4

MediaMetadataRetriever: IllegalArgumentException thrown from setDataSource()

Here follow-up my old post "Get image frame in video using MediaMetadataRetriever", related of permission of "android.permission.READ_EXTERNAL_STORAGE" again.


It's a old exercise to retrieve image frame from mp4 video. At the time I wrote the post, it work as expected without need permission of "android.permission.READ_EXTERNAL_STORAGE". But now, you have to add it in AndroidManifest.xml. Otherwise, the code mediaMetadataRetriever.setDataSource(uri) will throw java.lang.IllegalArgumentException, caused by the path is invalid.


MainActivity.java

package com.example.androidmediametadataretriever;

import android.support.v7.app.ActionBarActivity;
import android.graphics.Bitmap;
import android.media.MediaMetadataRetriever;
import android.os.Bundle;
import android.widget.ImageView;

public class MainActivity extends ActionBarActivity {

String uri = "/storage/emulated/0/DCIM/LEDrose.mp4";
ImageView capturedImageView;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

capturedImageView = (ImageView) findViewById(R.id.capturedimage);

MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();

mediaMetadataRetriever.setDataSource(uri);
Bitmap bmFrame = mediaMetadataRetriever.getFrameAtTime(5000000);
capturedImageView.setImageBitmap(bmFrame);

}

}

activity_main.xml
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context="com.example.androidmediametadataretriever.MainActivity" >

<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:autoLink="web"
android:text="http://android-er.blogspot.com/"
android:textStyle="bold" />

<ImageView
android:id="@+id/capturedimage"
android:layout_width="match_parent"
android:layout_height="match_parent" />

</LinearLayout>

Permission of "android.permission.READ_EXTERNAL_STORAGE" is needed in AndroidManifest.xml. And minSdkVersion have to be 10 or higher to use MediaMetadataRetriever in your code.
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.androidmediametadataretriever"
android:versionCode="1"
android:versionName="1.0" >

<uses-sdk
android:minSdkVersion="10"
android:targetSdkVersion="21" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>

<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name=".MainActivity"
android:label="@string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />

<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>

</manifest>

Bluetooth communication between Android devices

This exercise implement bi-direction Bluetooth communication between Android devices. After connection made in last post "Make BlueTooth connection between Android devices", start another thread, ThreadConnected, to handle read and write once BluetoothSocket established. If any case make the connection lost, the program will not re-connect, so you have to quit and re-start it.


please notice that it is just a exercise to study bluetooth, not a completed example.


AndroidBlueToothBeCOnnected:

MainActivity.java
package com.example.androidbluetoothbeconnected;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.UUID;

import android.support.v7.app.ActionBarActivity;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothServerSocket;
import android.bluetooth.BluetoothSocket;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;

public class MainActivity extends ActionBarActivity {

private static final int REQUEST_ENABLE_BT = 1;

BluetoothAdapter bluetoothAdapter;

private UUID myUUID;
private String myName;

LinearLayout inputPane;
EditText inputField;
Button btnSend;

TextView textInfo, textStatus;

ThreadBeConnected myThreadBeConnected;
ThreadConnected myThreadConnected;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textInfo = (TextView)findViewById(R.id.info);
textStatus = (TextView)findViewById(R.id.status);

inputPane = (LinearLayout)findViewById(R.id.inputpane);
inputField = (EditText)findViewById(R.id.input);
btnSend = (Button)findViewById(R.id.send);
btnSend.setOnClickListener(new OnClickListener(){

@Override
public void onClick(View v) {
if(myThreadConnected!=null){
byte[] bytesToSend = inputField.getText().toString().getBytes();
myThreadConnected.write(bytesToSend);
}
}});

if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH)){
Toast.makeText(this,
"FEATURE_BLUETOOTH NOT support",
Toast.LENGTH_LONG).show();
finish();
return;
}

//generate UUID on web: http://www.famkruithof.net/uuid/uuidgen
//have to match the UUID on the another device of the BT connection
myUUID = UUID.fromString("ec79da00-853f-11e4-b4a9-0800200c9a66");
myName = myUUID.toString();

bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (bluetoothAdapter == null) {
Toast.makeText(this,
"Bluetooth is not supported on this hardware platform",
Toast.LENGTH_LONG).show();
finish();
return;
}

String stInfo = bluetoothAdapter.getName() + "\n" +
bluetoothAdapter.getAddress();
textInfo.setText(stInfo);
}

@Override
protected void onStart() {
super.onStart();

//Turn ON BlueTooth if it is OFF
if (!bluetoothAdapter.isEnabled()) {
Intent enableIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableIntent, REQUEST_ENABLE_BT);
}

setup();
}

private void setup() {
textStatus.setText("setup()");
myThreadBeConnected = new ThreadBeConnected();
myThreadBeConnected.start();
}

@Override
protected void onDestroy() {
super.onDestroy();

if(myThreadBeConnected!=null){
myThreadBeConnected.cancel();
}
}

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {

if(requestCode==REQUEST_ENABLE_BT){
if(resultCode == Activity.RESULT_OK){
setup();
}else{
Toast.makeText(this,
"BlueTooth NOT enabled",
Toast.LENGTH_SHORT).show();
finish();
}
}
}


private class ThreadBeConnected extends Thread {

private BluetoothServerSocket bluetoothServerSocket = null;

public ThreadBeConnected() {
try {
bluetoothServerSocket =
bluetoothAdapter.listenUsingRfcommWithServiceRecord(myName, myUUID);

textStatus.setText("Waiting\n"
+ "bluetoothServerSocket :\n"
+ bluetoothServerSocket);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

@Override
public void run() {
BluetoothSocket bluetoothSocket = null;

if(bluetoothServerSocket!=null){
try {
bluetoothSocket = bluetoothServerSocket.accept();

BluetoothDevice remoteDevice = bluetoothSocket.getRemoteDevice();

final String strConnected = "Connected:\n" +
remoteDevice.getName() + "\n" +
remoteDevice.getAddress();

//connected
runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText(strConnected);
inputPane.setVisibility(View.VISIBLE);
}});

startThreadConnected(bluetoothSocket);

} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();

final String eMessage = e.getMessage();
runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText("something wrong: \n" + eMessage);
}});
}
}else{
runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText("bluetoothServerSocket == null");
}});
}
}

public void cancel() {

Toast.makeText(getApplicationContext(),
"close bluetoothServerSocket",
Toast.LENGTH_LONG).show();

try {
bluetoothServerSocket.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

private void startThreadConnected(BluetoothSocket socket){

myThreadConnected = new ThreadConnected(socket);
myThreadConnected.start();
}

private class ThreadConnected extends Thread {
private final BluetoothSocket connectedBluetoothSocket;
private final InputStream connectedInputStream;
private final OutputStream connectedOutputStream;

public ThreadConnected(BluetoothSocket socket) {
connectedBluetoothSocket = socket;
InputStream in = null;
OutputStream out = null;

try {
in = socket.getInputStream();
out = socket.getOutputStream();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

connectedInputStream = in;
connectedOutputStream = out;
}

@Override
public void run() {
byte[] buffer = new byte[1024];
int bytes;

while (true) {
try {
bytes = connectedInputStream.read(buffer);

String strReceived = new String(buffer, 0, bytes);
final String msgReceived = String.valueOf(bytes) +
" bytes received:\n"
+ strReceived;

runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText(msgReceived);
}});

} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();

final String msgConnectionLost = "Connection lost:\n"
+ e.getMessage();
runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText(msgConnectionLost);
}});
}
}
}

public void write(byte[] buffer) {
try {
connectedOutputStream.write(buffer);
connectedOutputStream.flush();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

public void cancel() {
try {
connectedBluetoothSocket.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

}
}

activity_main.xml
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
android:orientation="vertical"
tools:context="com.example.androidbluetoothbeconnected.MainActivity" >

<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:autoLink="web"
android:text="http://android-er.blogspot.com/"
android:textStyle="bold" />

<TextView
android:id="@+id/info"
android:textStyle="bold|italic"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
<TextView
android:id="@+id/status"
android:textSize="28sp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>

<LinearLayout
android:id="@+id/inputpane"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:visibility="gone">

<EditText
android:id="@+id/input"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<Button
android:id="@+id/send"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Sent"/>

</LinearLayout>

</LinearLayout>

download filesDownload the files.

AndroidBlueTooth:

MainActivity.java
package com.example.androidbluetooth;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Set;
import java.util.UUID;

import android.support.v7.app.ActionBarActivity;
import android.app.Activity;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;

public class MainActivity extends ActionBarActivity {

private static final int REQUEST_ENABLE_BT = 1;

BluetoothAdapter bluetoothAdapter;

ArrayList<BluetoothDevice> pairedDeviceArrayList;

TextView textInfo, textStatus;
ListView listViewPairedDevice;
ArrayAdapter<BluetoothDevice> pairedDeviceAdapter;
private UUID myUUID;

LinearLayout inputPane;
EditText inputField;
Button btnSend;

ThreadConnectBTdevice myThreadConnectBTdevice;
ThreadConnected myThreadConnected;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textInfo = (TextView)findViewById(R.id.info);
textStatus = (TextView)findViewById(R.id.status);
listViewPairedDevice = (ListView)findViewById(R.id.pairedlist);

inputPane = (LinearLayout)findViewById(R.id.inputpane);
inputField = (EditText)findViewById(R.id.input);
btnSend = (Button)findViewById(R.id.send);
btnSend.setOnClickListener(new OnClickListener(){

@Override
public void onClick(View v) {
if(myThreadConnected!=null){
byte[] bytesToSend = inputField.getText().toString().getBytes();
myThreadConnected.write(bytesToSend);
}
}});

if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH)){
Toast.makeText(this,
"FEATURE_BLUETOOTH NOT support",
Toast.LENGTH_LONG).show();
finish();
return;
}

//generate UUID on web: http://www.famkruithof.net/uuid/uuidgen
//have to match the UUID on the another device of the BT connection
myUUID = UUID.fromString("ec79da00-853f-11e4-b4a9-0800200c9a66");

bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (bluetoothAdapter == null) {
Toast.makeText(this,
"Bluetooth is not supported on this hardware platform",
Toast.LENGTH_LONG).show();
finish();
return;
}

String stInfo = bluetoothAdapter.getName() + "\n" +
bluetoothAdapter.getAddress();
textInfo.setText(stInfo);
}

@Override
protected void onStart() {
super.onStart();

//Turn ON BlueTooth if it is OFF
if (!bluetoothAdapter.isEnabled()) {
Intent enableIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(enableIntent, REQUEST_ENABLE_BT);
}

setup();
}

private void setup() {
Set<BluetoothDevice> pairedDevices = bluetoothAdapter.getBondedDevices();
if (pairedDevices.size() > 0) {
pairedDeviceArrayList = new ArrayList<BluetoothDevice>();

for (BluetoothDevice device : pairedDevices) {
pairedDeviceArrayList.add(device);
}

pairedDeviceAdapter = new ArrayAdapter<BluetoothDevice>(this,
android.R.layout.simple_list_item_1, pairedDeviceArrayList);
listViewPairedDevice.setAdapter(pairedDeviceAdapter);

listViewPairedDevice.setOnItemClickListener(new OnItemClickListener(){

@Override
public void onItemClick(AdapterView<?> parent, View view,
int position, long id) {
BluetoothDevice device =
(BluetoothDevice)parent.getItemAtPosition(position);
Toast.makeText(MainActivity.this,
"Name: " + device.getName() + "\n"
+ "Address: " + device.getAddress() + "\n"
+ "BondState: " + device.getBondState() + "\n"
+ "BluetoothClass: " + device.getBluetoothClass() + "\n"
+ "Class: " + device.getClass(),
Toast.LENGTH_LONG).show();

textStatus.setText("start ThreadConnectBTdevice");
myThreadConnectBTdevice = new ThreadConnectBTdevice(device);
myThreadConnectBTdevice.start();
}});
}
}

@Override
protected void onDestroy() {
super.onDestroy();

if(myThreadConnectBTdevice!=null){
myThreadConnectBTdevice.cancel();
}
}

@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {

if(requestCode==REQUEST_ENABLE_BT){
if(resultCode == Activity.RESULT_OK){
setup();
}else{
Toast.makeText(this,
"BlueTooth NOT enabled",
Toast.LENGTH_SHORT).show();
finish();
}
}
}

private class ThreadConnectBTdevice extends Thread {
private BluetoothSocket bluetoothSocket = null;
private final BluetoothDevice bluetoothDevice;


public ThreadConnectBTdevice(BluetoothDevice device) {
bluetoothDevice = device;

try {
bluetoothSocket = device.createRfcommSocketToServiceRecord(myUUID);
textStatus.setText("bluetoothSocket: \n" + bluetoothSocket);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

@Override
public void run() {
boolean success = false;
try {
bluetoothSocket.connect();
success = true;
} catch (IOException e) {
e.printStackTrace();

final String eMessage = e.getMessage();
runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText("something wrong bluetoothSocket.connect(): \n" + eMessage);
}});

try {
bluetoothSocket.close();
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}

if(success){
//connect successful
final String msgconnected = "connect successful:\n"
+ "BluetoothSocket: " + bluetoothSocket + "\n"
+ "BluetoothDevice: " + bluetoothDevice;

runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText(msgconnected);

listViewPairedDevice.setVisibility(View.GONE);
inputPane.setVisibility(View.VISIBLE);
}});

startThreadConnected(bluetoothSocket);
}else{
//fail
}
}

public void cancel() {

Toast.makeText(getApplicationContext(),
"close bluetoothSocket",
Toast.LENGTH_LONG).show();

try {
bluetoothSocket.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}
}

private void startThreadConnected(BluetoothSocket socket){

myThreadConnected = new ThreadConnected(socket);
myThreadConnected.start();
}

private class ThreadConnected extends Thread {
private final BluetoothSocket connectedBluetoothSocket;
private final InputStream connectedInputStream;
private final OutputStream connectedOutputStream;

public ThreadConnected(BluetoothSocket socket) {
connectedBluetoothSocket = socket;
InputStream in = null;
OutputStream out = null;

try {
in = socket.getInputStream();
out = socket.getOutputStream();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

connectedInputStream = in;
connectedOutputStream = out;
}

@Override
public void run() {
byte[] buffer = new byte[1024];
int bytes;

while (true) {
try {
bytes = connectedInputStream.read(buffer);
String strReceived = new String(buffer, 0, bytes);
final String msgReceived = String.valueOf(bytes) +
" bytes received:\n"
+ strReceived;

runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText(msgReceived);
}});

} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();

final String msgConnectionLost = "Connection lost:\n"
+ e.getMessage();
runOnUiThread(new Runnable(){

@Override
public void run() {
textStatus.setText(msgConnectionLost);
}});
}
}
}

public void write(byte[] buffer) {
try {
connectedOutputStream.write(buffer);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

public void cancel() {
try {
connectedBluetoothSocket.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

}

}

activity_main.xml
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
android:orientation="vertical"
tools:context="com.example.androidbluetooth.MainActivity" >

<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:autoLink="web"
android:text="http://android-er.blogspot.com/"
android:textStyle="bold" />

<TextView
android:id="@+id/info"
android:textStyle="bold|italic"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
<TextView
android:id="@+id/status"
android:textSize="28sp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>

<ListView
android:id="@+id/pairedlist"
android:layout_width="match_parent"
android:layout_height="match_parent"/>

<LinearLayout
android:id="@+id/inputpane"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:visibility="gone">

<EditText
android:id="@+id/input"
android:layout_width="match_parent"
android:layout_height="wrap_content"/>
<Button
android:id="@+id/send"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:text="Sent"/>

</LinearLayout>

</LinearLayout>


download filesDownload the files.


Remark: both side have to modify AndroidManifest.xml to add permission of "android.permission.BLUETOOTH".


Related:
- Android example to communicate with Bluetooth device, HC-06 Bluetooth Module

Update "android.permission.READ_EXTERNAL_STORAGE" for KitKat

With re-visiting my old post "Convert between Uri and file path, and load Bitmap from", to load image from Uri form "content://media/external/images/media/xxxxx", real path in form of "/storage/emulated/0/Picture/..." and Uri in form of "file:///storage/emulated/0/Picture/...".

When run on HTC One X of Android 4.2.2, it can read the image from all three form, "content://media/external/images/media/xxxxx", "/storage/sdcard0/Pictures/Screenshots/..." and "file:///storage/sdcard0/Pictures/Screenshots/...".

When run on Nexus 7 of Android 4.4.4 KitKat, only the first Uri form of "content://media/external/images/media/xxxxx" can be display, and nothing display on the other two; "/storage/emulated/0/Pictures/Screenshots/..." and "file:///storage/emulated/0/Pictures/Screenshots/...".

To fix it, add permission of "android.permission.READ_EXTERNAL_STORAGE" in AndroidManifest.xml. Refer to the video below.


The method getContentResolver() is undefined for the type...

Normally we will call getContentResolver() in our Activity or ActionBarActivity... to get a ContentResolver instance for your application's package. In case you try to call it at seperate class, you will be reported error of "The method getContentResolver() is undefined for the type...". You have to pass Context of your activity to the another class to call getContentResolver(), as shown in the video.


This example also show different ContentResolver return with different context passed.



MainActivity.java
package com.example.androidhello;

import android.support.v7.app.ActionBarActivity;
import android.content.ContentResolver;
import android.os.Bundle;
import android.widget.TextView;

public class MainActivity extends ActionBarActivity {

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

TextView tv = new TextView(this);
setContentView(tv);

ContentResolver contentResolverA = getContentResolver();

//same ContentResolver returned
ContentResolver contentResolverB = AnotherClass.tryGetContentResolver(this);
//ContentResolver contentResolverB = AnotherClass.tryGetContentResolver(MainActivity.this);
//ContentResolver contentResolverB = AnotherClass.tryGetContentResolver(tv.getContext());
//ContentResolver contentResolverB = AnotherClass.tryGetContentResolver(getBaseContext());

//not same
//ContentResolver contentResolverB = AnotherClass.tryGetContentResolver(getApplicationContext());

String msg = this +"\n"
+ MainActivity.this + "\n"
+ tv.getContext() + "\n"
+ getBaseContext() + "\n"
+ getApplicationContext() + "\n\n";

if(contentResolverA == contentResolverB){
msg += "contentResolverA == contentResolverB\n"
+ contentResolverB;
}else{
msg += "contentResolverA != contentResolverB\n"
+ contentResolverA + "\n"
+ contentResolverB;
}

tv.setText(msg);
}

}

AnotherClass.java
package com.example.androidhello;

import android.content.ContentResolver;
import android.content.Context;

public class AnotherClass {

static public ContentResolver tryGetContentResolver(Context c){
ContentResolver contentResolver = c.getContentResolver();
return contentResolver;
}

}

WonderFox Christmas gift pack

Hi Readers,

WonderFox Soft. Wish you all the happiness of the holiday season!  :-)


It's Christmas now! 

To appreciate the WonderFox supporters, we particularly prepared a special Christmas gift pack for all TechForever readers.

3 useful software (normally cost $65) are provided to warm up the holiday time in the cold weather.

A total solution to deal with HD-Camcorder video files& protect video copyright& rip DVDs for family sharing.

Christmas event page FYI:http://www.videoconverterfactory.com/promotion.html

This Christmas gift pack is available to download before January 1st, 2015.

 No complicated rules, just free gifts. Although the gifts are not pricy, I believe they will be useful for you readers in the holiday. We hope you could share this news to your friends to take our gifts.

Hope to hear from you.

Merry Christmas :)