Share Coding

Tutorials, Problems, Stuffs …

Tag Archives: ffmpeg

Android Get video thumbnail from local or network video

In build.gradle

compile 'com.github.wseemann:FFmpegMediaMetadataRetriever:1.0.14'

1. From Local Video

MediaMetadataRetriever retriever = new MediaMetadataRetriever();
try {

Uri uri = Uri.parse(img_url);
String scheme = uri.getScheme();
Log.i("uri",""+uri+" "+scheme);
bitmap = retriever.getFrameAtTime(-1, MediaMetadataRetriever.OPTION_CLOSEST);
Log.d("bitmappp", " "+bitmap.getWidth()+" "+bitmap.getHeight());
} catch (IllegalArgumentException ex) {
// Assume this is a corrupt video file
} catch (RuntimeException ex) {
// Assume this is a corrupt video file.
} finally {
try {
} catch (RuntimeException ex) {
// Ignore failures while cleaning up.

To create a Thumbnail, many example suggested to use Thumbnailcreater to get a small photo.
I suggest to determinate some sample size to re-size it by your self.

bitmap = AlbumInputUI.scaleBitmapInSample(act, bitmap); Runnable() {
public void run() {
public static Bitmap scaleBitmapInSample(Activity act, Bitmap bmp){
		float width, height;
		int w = bmp.getWidth();
		int h = bmp.getHeight();
		//Log.d("before","w:"+w+" h:"+h);
			width = SizeConverter.convertDpToPixel((float)250, act);
			height = SizeConverter.convertDpToPixel((float)180, act);
		}else if(h>w){
			width = SizeConverter.convertDpToPixel((float)180, act);
			height = SizeConverter.convertDpToPixel((float)250, act);
			width = SizeConverter.convertDpToPixel((float)180, act);
			height = SizeConverter.convertDpToPixel((float)180, act);
		Log.d("after","w:"+width+" h:"+height);
		return SmartUriDecoder.scaleBitmap(bmp, width, height);

public static Bitmap scaleBitmap(Bitmap origBitmap, float width, float height) {
		float w = (float)origBitmap.getWidth();
		float h = (float)origBitmap.getHeight();
		Log.d("scaleBitmap","w:"+w+" h:"+h+" width:"+width+" height:"+height);
		//w = SizeConverter.convertDpToPixel(w, MyApp.getCurrent_activity());
		//h = SizeConverter.convertDpToPixel(w, MyApp.getCurrent_activity());

		if(w>=width && h>= height){
			float scale = Math.min(
					((float)width) / ((float)origBitmap.getWidth()),
					((float)height) / ((float)origBitmap.getHeight())

			Bitmap result = Bitmap.createScaledBitmap(origBitmap,
					(int)(((float)origBitmap.getWidth()) * scale),
					(int)(((float)origBitmap.getHeight()) * scale),
			//origBitmap = null;
			return result;
			return origBitmap;

2. From Network Video

FFmpegMediaMetadataRetriever mmr = new FFmpegMediaMetadataRetriever();
Bitmap b = mmr.getFrameAtTime(2000000, FFmpegMediaMetadataRetriever.OPTION_CLOSEST); // frame at 2 seconds
byte [] artwork = mmr.getEmbeddedPicture();


3. You can use uri.getScheme to find if it is a file or http to find out it is a local file or http video

Uri uri = Uri.parse(video_url);
String scheme = uri.getScheme();

Building FFmpeg on XCode 4 (IOS5.0) for real iPhone only (armv7)

Using iFrameExtractor open source project for an example.

1. Install macport at

2. Open terminal type:

sudo port install pkgconfig

3. Terminal type: 

git clone

cd iFrameExtractor/ffmpeg


4. Install them by using macport if the commands told you that there are any missing packages

mkdir armv7

mkdir lib

./configure --disable-doc --disable-ffmpeg --disable-ffplay --disable-ffserver --enable-cross-compile --arch=arm --target-os=darwin --cc=/Developer/Platforms/iPhoneOS.platform/Developer/usr/bin/gcc --as='gas-preprocessor/ /Developer/Platforms/iPhoneOS.platform/Developer/usr/bin/gcc' --sysroot=/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS5.0.sdk --cpu=cortex-a8 --extra-cflags='-arch armv7' --extra-ldflags='-arch armv7 -isysroot /Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS5.0.sdk' --enable-pic


mv libavcodec/libavcodec.a armv7/

mv libavdevice/libavdevice.a armv7/

mv libavformat/libavformat.a armv7/

mv libavutil/libavutil.a armv7/

mv libswscale/libswscale.a armv7/

cp armv7/*.a lib/

5. Open iFrameExtractor.xcodeproj and run with a REAL iPhone