編輯:關於android開發
view.setDrawingCacheEnabled(true);//設置能否緩存圖片信息(drawing cache)
view.buildDrawingCache();//如果能夠緩存圖片,則創建圖片緩存
Bitmap bitmap = view.getDrawingCache();//如果圖片已經緩存,返回一個bitmap
view.destroyDrawingCache();//釋放緩存占用的資源
Tips:
如果在一個界面中,重復截取圖片,在每次截屏之前,都應該清除緩存;
假如圖片不符合我們的要求,可以使用Bitmap.createBitmap( )方法處理圖片(圖片壓縮過度,會致使不能顯示);
File f = new File(" ");
try {
f.createNewFile();
} catch (IOException e1) {
e1.printStackTrace();
}
FileOutputStream fOut = null;
try {
fOut = new FileOutputStream(f);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
mBitmap.compress(Bitmap.CompressFormat.PNG, 100, fOut);
try {
fOut.flush();
} catch (IOException e) {
e.printStackTrace();
}
try {
fOut.close();
} catch (IOException e) {
e.printStackTrace();
}
1、在APK中調用“adb shell screencap -pfilepath” 命令(基於Android SDK的截屏方法)
該命令讀取系統的framebuffer,需要獲得系統權限:
(1). 在AndroidManifest.xml文件中添加
(2). 修改APK為系統權限,將APK放到源碼中編譯, 修改Android.mk
LOCAL_CERTIFICATE := platform
publicvoid takeScreenShot(){
String mSavedPath = Environment.getExternalStorageDirectory()+File. separator + "screenshot.png" ;
try {
Runtime. getRuntime().exec("screencap -p " + mSavedPath);
} catch (Exception e) {
e.printStackTrace();
}
2、利用系統的API,實現Screenshot,這部分代碼是系統隱藏的,需要在源碼下編譯(基於Android SDK的截屏方法)
(1). 在AndroidManifest.xml文件中添加
(2). 修改APK為系統權限,將APK放到源碼中編譯, 修改Android.mk
LOCAL_CERTIFICATE := platform
public boolean takeScreenShot(String imagePath){
if(imagePath.equals("" )){
imagePath = Environment.getExternalStorageDirectory()+File. separator+"Screenshot.png" ;
}
Bitmap mScreenBitmap;
WindowManager mWindowManager;
DisplayMetrics mDisplayMetrics;
Display mDisplay;
mWindowManager = (WindowManager) mcontext.getSystemService(Context.WINDOW_SERVICE);
mDisplay = mWindowManager.getDefaultDisplay();
mDisplayMetrics = new DisplayMetrics();
mDisplay.getRealMetrics(mDisplayMetrics);
float[] dims = {mDisplayMetrics.widthPixels , mDisplayMetrics.heightPixels };
mScreenBitmap = Surface. screenshot((int) dims[0], ( int) dims[1]);
if (mScreenBitmap == null) {
return false ;
}
try {
FileOutputStream out = new FileOutputStream(imagePath);
mScreenBitmap.compress(Bitmap.CompressFormat. PNG, 100, out);
} catch (Exception e) {
return false ;
}
return true ;
}
基於Android ddmlib進行截屏
public class ScreenShot {
private BufferedImage image = null;
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
AndroidDebugBridge.init(false); //
ScreenShot screenshot = new ScreenShot();
IDevice device = screenshot.getDevice();
for (int i = 0; i < 10; i++) {
Date date=new Date();
SimpleDateFormat df=new SimpleDateFormat("MM-dd-HH-mm-ss");
String nowTime = df.format(date);
screenshot.getScreenShot(device, "Robotium" + nowTime);
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public void getScreenShot(IDevice device,String filename) {
RawImage rawScreen = null;
try {
rawScreen = device.getScreenshot();
} catch (TimeoutException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (AdbCommandRejectedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (rawScreen != null) {
Boolean landscape = false;
int width2 = landscape ? rawScreen.height : rawScreen.width;
int height2 = landscape ? rawScreen.width : rawScreen.height;
if (image == null) {
image = new BufferedImage(width2, height2,
BufferedImage.TYPE_INT_RGB);
} else {
if (image.getHeight() != height2 || image.getWidth() != width2) {
image = new BufferedImage(width2, height2,
BufferedImage.TYPE_INT_RGB);
}
}
int index = 0;
int indexInc = rawScreen.bpp >> 3;
for (int y = 0; y < rawScreen.height; y++) {
for (int x = 0; x < rawScreen.width; x++, index += indexInc) {
int value = rawScreen.getARGB(index);
if (landscape)
image.setRGB(y, rawScreen.width - x - 1, value);
else
image.setRGB(x, y, value);
}
}
try {
ImageIO.write((RenderedImage) image, "PNG", new File("D:/"
+ filename + ".jpg"));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* 獲取得到device對象
* @return
*/
private IDevice getDevice(){
IDevice device;
AndroidDebugBridge bridge = AndroidDebugBridge
.createBridge("adb", true);//如果代碼有問題請查看API,修改此處的參數值試一下
waitDevicesList(bridge);
IDevice devices[] = bridge.getDevices();
device = devices[0];
return device;
}
/**
* 等待查找device
* @param bridge
*/
private void waitDevicesList(AndroidDebugBridge bridge) {
int count = 0;
while (bridge.hasInitialDeviceList() == false) {
try {
Thread.sleep(500);
count++;
} catch (InterruptedException e) {
}
if (count > 240) {
System.err.print("等待獲取設備超時");
break;
}
}
}
Android本地編程(Native Programming)讀取framebuffer
(1)命令行,框架的截屏功能是通過framebuffer來實現的,所以我們先來介紹一下framebuffer。
framebuffer介紹
幀緩沖(framebuffer)是Linux為顯示設備提供的一個接口,把顯存抽象後的一種設備,他允許上層應用程序在圖形模式下直接對顯示緩沖區進行 讀寫操作。這種操作是抽象的,統一的。用戶不必關心物理顯存的位置、換頁機制等等具體細節。這些都是由Framebuffer設備驅動來完成的。
Linux FrameBuffer 本質上只是提供了對圖形設備的硬件抽象,在開發者看來,FrameBuffer 是一塊顯示緩存,往顯示緩存中寫入特定格式的數據就意味著向屏幕輸出內容。所以說FrameBuffer就是一塊白板。例如對於初始化為16 位色的FrameBuffer 來說, FrameBuffer中的兩個字節代表屏幕上一個點,從上到下,從左至右,屏幕位置與內存地址是順序的線性關系。
幀緩存有個地址,是在內存裡。我們通過不停的向frame buffer中寫入數據, 顯示控制器就自動的從frame buffer中取數據並顯示出來。全部的圖形都共享內存中同一個幀緩存。
Android截屏實現思路
Android系統是基於Linux內核的,所以也存在framebuffer這個設備,我們要實現截屏的話只要能獲取到framebuffer中的數據,然後把數據轉換成圖片就可以了,android中的framebuffer數據是存放在 /dev/graphics/fb0 文件中的,所以我們只需要來獲取這個文件的數據就可以得到當前屏幕的內容。
現在我們的測試代碼運行時候是通過RC(remote controller)方式來運行被測應用的,那就需要在PC機上來訪問模擬器或者真機上的framebuffer數據,這個的話可以通過android的ADB命令來實現。
代碼實現
public class ScreenShot {
/**
* @param args
* @throws InterruptedException
*/
public static void main(String[] args) throws InterruptedException {
try {
//分辨率大小,後續可以通過代碼來獲取到當前的分辨率
int xResolution = 320;
int yResolution = 480;
//執行adb命令,把framebuffer中內容保存到fb1文件中
Runtime.getRuntime().exec("adb pull /dev/graphics/fb0 C:/fb1");
//等待幾秒保證framebuffer中的數據都被保存下來,如果沒有保存完成進行讀取操作會有IO異常
Thread.sleep(15000);
//讀取文件中的數據
InputStream in = (InputStream)new FileInputStream("C:/fb1");
DataInput frameBuffer = new LittleEndianDataInputStream(in);
BufferedImage screenImage = new BufferedImage(
xResolution, yResolution, BufferedImage.TYPE_INT_ARGB);
int[] oneLine = new int[xResolution];
for (int y = 0; y < yResolution; y++) {
//從frameBuffer中計算出rgb值
convertToRgba32(frameBuffer, oneLine);
//把rgb值設置到image對象中
screenImage.setRGB(0, y, xResolution, 1, oneLine, 0, xResolution);
}
Closeables.closeQuietly(in);
ByteArrayOutputStream rawPngStream = new ByteArrayOutputStream();
try {
if (!ImageIO.write(screenImage, "png", rawPngStream)) {
throw new RuntimeException(
"This Java environment does not support converting to PNG.");
}
} catch (IOException exception) {
// This should never happen because rawPngStream is an in-memory stream.
System.out.println("IOException=" + exception);
}
byte[] rawPngBytes = rawPngStream.toByteArray();
String base64Png = new Base64Encoder().encode(rawPngBytes);
File screenshot = OutputType.FILE.convertFromBase64Png(base64Png);
System.out.println("screenshot==" + screenshot.toString());
screenshot.renameTo(new File("C:\\screenshottemp.png"));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.out.println(e);
}
}
public static void convertToRgba32(DataInput frameBuffer, int[] into) {
try {
for (int x = 0; x < into.length; x++) {
try{
int rgb = frameBuffer.readShort() & 0xffff;
int red = rgb >> 11;
red = (red << 3) | (red >> 2);
int green = (rgb >> 5) & 63;
green = (green << 2) | (green >> 4);
int blue = rgb & 31;
blue = (blue << 3) | (blue >> 2);
into[x] = 0xff000000 | (red << 16) | (green << 8) | blue;
}catch (EOFException e){
System.out.println("EOFException=" + e);
}
}
} catch (IOException exception) {
System.out.println("convertToRgba32Exception=" + exception);
}
}
}
(2)
public class SimpleScreenshotActivity extends Activity {
private Display mDisplay;
private WindowManager mWindowManager;
private DisplayMetrics mDisplayMetrics;
private Bitmap mScreenBitmap;
private Matrix mDisplayMatrix;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
new Thread(new Runnable() {
@Override
public void run() {
takeScreenshot();
}
}).start();
}
private float getDegreesForRotation(int value) {
switch (value) {
case Surface.ROTATION_90:
return 360f - 90f;
case Surface.ROTATION_180:
return 360f - 180f;
case Surface.ROTATION_270:
return 360f - 270f;
}
return 0f;
}
private void takeScreenshot() {
mWindowManager = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
mDisplay = mWindowManager.getDefaultDisplay();
mDisplayMetrics = new DisplayMetrics();
mDisplay.getRealMetrics(mDisplayMetrics);
mDisplayMatrix = new Matrix();
float[] dims = { mDisplayMetrics.widthPixels,
mDisplayMetrics.heightPixels };
int value = mDisplay.getRotation();
String hwRotation = SystemProperties.get("ro.sf.hwrotation", "0");
if (hwRotation.equals("270") || hwRotation.equals("90")) {
value = (value + 3) % 4;
}
float degrees = getDegreesForRotation(value);
boolean requiresRotation = (degrees > 0);
if (requiresRotation) {
// Get the dimensions of the device in its native orientation
mDisplayMatrix.reset();
mDisplayMatrix.preRotate(-degrees);
mDisplayMatrix.mapPoints(dims);
dims[0] = Math.abs(dims[0]);
dims[1] = Math.abs(dims[1]);
}
mScreenBitmap = Surface.screenshot((int) dims[0], (int) dims[1]);
if (requiresRotation) {
// Rotate the screenshot to the current orientation
Bitmap ss = Bitmap.createBitmap(mDisplayMetrics.widthPixels,
mDisplayMetrics.heightPixels, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(ss);
c.translate(ss.getWidth() / 2, ss.getHeight() / 2);
c.rotate(degrees);
c.translate(-dims[0] / 2, -dims[1] / 2);
c.drawBitmap(mScreenBitmap, 0, 0, null);
c.setBitmap(null);
mScreenBitmap = ss;
}
// If we couldn't take the screenshot, notify the user
if (mScreenBitmap == null) {
return;
}
// Optimizations
mScreenBitmap.setHasAlpha(false);
mScreenBitmap.prepareToDraw();
try {
saveBitmap(mScreenBitmap);
} catch (IOException e) {
System.out.println(e.getMessage());
}
}
public void saveBitmap(Bitmap bitmap) throws IOException {
String imageDate = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss")
.format(new Date(System.currentTimeMillis()));
File file = new File("/mnt/sdcard/Pictures/"+imageDate+".png");
if(!file.exists()){
file.createNewFile();
}
FileOutputStream out;
try {
out = new FileOutputStream(file);
if (bitmap.compress(Bitmap.CompressFormat.PNG, 70, out)) {
out.flush();
out.close();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
package org.winplus.ss;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import android.app.Activity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.WindowManager;
import android.os.SystemProperties;
public class SimpleScreenshotActivity extends Activity {
private Display mDisplay;
private WindowManager mWindowManager;
private DisplayMetrics mDisplayMetrics;
private Bitmap mScreenBitmap;
private Matrix mDisplayMatrix;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
new Thread(new Runnable() {
@Override
public void run() {
takeScreenshot();
}
}).start();
}
private float getDegreesForRotation(int value) {
switch (value) {
case Surface.ROTATION_90:
return 360f - 90f;
case Surface.ROTATION_180:
return 360f - 180f;
case Surface.ROTATION_270:
return 360f - 270f;
}
return 0f;
}
private void takeScreenshot() {
mWindowManager = (WindowManager) getSystemService(Context.WINDOW_SERVICE);
mDisplay = mWindowManager.getDefaultDisplay();
mDisplayMetrics = new DisplayMetrics();
mDisplay.getRealMetrics(mDisplayMetrics);
mDisplayMatrix = new Matrix();
float[] dims = { mDisplayMetrics.widthPixels,
mDisplayMetrics.heightPixels };
int value = mDisplay.getRotation();
String hwRotation = SystemProperties.get("ro.sf.hwrotation", "0");
if (hwRotation.equals("270") || hwRotation.equals("90")) {
value = (value + 3) % 4;
}
float degrees = getDegreesForRotation(value);
boolean requiresRotation = (degrees > 0);
if (requiresRotation) {
// Get the dimensions of the device in its native orientation
mDisplayMatrix.reset();
mDisplayMatrix.preRotate(-degrees);
mDisplayMatrix.mapPoints(dims);
dims[0] = Math.abs(dims[0]);
dims[1] = Math.abs(dims[1]);
}
mScreenBitmap = Surface.screenshot((int) dims[0], (int) dims[1]);
if (requiresRotation) {
// Rotate the screenshot to the current orientation
Bitmap ss = Bitmap.createBitmap(mDisplayMetrics.widthPixels,
mDisplayMetrics.heightPixels, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(ss);
c.translate(ss.getWidth() / 2, ss.getHeight() / 2);
c.rotate(degrees);
c.translate(-dims[0] / 2, -dims[1] / 2);
c.drawBitmap(mScreenBitmap, 0, 0, null);
c.setBitmap(null);
mScreenBitmap = ss;
}
// If we couldn't take the screenshot, notify the user
if (mScreenBitmap == null) {
return;
}
// Optimizations
mScreenBitmap.setHasAlpha(false);
mScreenBitmap.prepareToDraw();
try {
saveBitmap(mScreenBitmap);
} catch (IOException e) {
System.out.println(e.getMessage());
}
}
public void saveBitmap(Bitmap bitmap) throws IOException {
String imageDate = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss")
.format(new Date(System.currentTimeMillis()));
File file = new File("/mnt/sdcard/Pictures/"+imageDate+".png");
if(!file.exists()){
file.createNewFile();
}
FileOutputStream out;
try {
out = new FileOutputStream(file);
if (bitmap.compress(Bitmap.CompressFormat.PNG, 70, out)) {
out.flush();
out.close();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
PS:1、需要在AndroidManifest.xml中加入代碼:android:sharedUserId="android.uid.system"
2、由於調用了@hide的API,所以編譯得時候請使用makefile編譯。或者通過在Eclipse中添加Jar文件通過編譯。
3、此代碼只在Android4.0中使用過,2.3的就沒去做測試了。
利用TakeScreenShotService截圖
Android手機一般都自帶有手機屏幕截圖的功能:在手機任何界面(當然手機要是開機點亮狀態),通過按組合鍵,屏幕閃一下,然後咔嚓一聲,截圖的照片會保存到當前手機的圖庫中,真是一個不錯的功能!
以我手頭的測試手機為例,是同時按電源鍵+音量下鍵來實現截屏,蘋果手機則是電源鍵 + HOME鍵,小米手機是菜單鍵+音量下鍵,而HTC一般是按住電源鍵再按左下角的“主頁”鍵。那麼Android源碼中使用組合鍵是如何實現屏幕截圖功能呢?前段時間由於工作的原因仔細看了一下,這兩天不忙,便把相關的知識點串聯起來整理一下,分下面兩部分簡單分析下實現流程:
Android源碼中對組合鍵的捕獲。
Android源碼中對按鍵的捕獲位於文件PhoneWindowManager.java(alps\frameworks\base\policy\src\com\android\internal
\policy\impl)中,這個類處理所有的鍵盤輸入事件,其中函數interceptKeyBeforeQueueing()會對常用的按鍵做特殊處理。以我手頭的測試機為例,是同時按電源鍵和音量下鍵來截屏,那麼在這個函數中我們會看到這麼兩段代碼:
.......
case KeyEvent.KEYCODE_VOLUME_DOWN:
case KeyEvent.KEYCODE_VOLUME_UP:
case KeyEvent.KEYCODE_VOLUME_MUTE: {
if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
if (down) {
if (isScreenOn && !mVolumeDownKeyTriggered
&& (event.getFlags() & KeyEvent.FLAG_FALLBACK) == 0) {
mVolumeDownKeyTriggered = true;
mVolumeDownKeyTime = event.getDownTime();
mVolumeDownKeyConsumedByScreenshotChord = false;
cancelPendingPowerKeyAction();
interceptScreenshotChord();
}
} else {
mVolumeDownKeyTriggered = false;
cancelPendingScreenshotChordAction();
}
......
case KeyEvent.KEYCODE_POWER: {
result &= ~ACTION_PASS_TO_USER;
if (down) {
if (isScreenOn && !mPowerKeyTriggered
&& (event.getFlags() & KeyEvent.FLAG_FALLBACK) == 0) {
mPowerKeyTriggered = true;
mPowerKeyTime = event.getDownTime();
interceptScreenshotChord();
}
......
可以看到正是在這裡(響應Down事件)捕獲是否按了音量下鍵和電源鍵的,而且兩個地方都會進入函數interceptScreenshotChord()中,那麼接下來看看這個函數干了什麼工作:
private void interceptScreenshotChord() {
if (mVolumeDownKeyTriggered && mPowerKeyTriggered && !mVolumeUpKeyTriggered) {
final long now = SystemClock.uptimeMillis();
if (now <= mVolumeDownKeyTime + SCREENSHOT_CHORD_DEBOUNCE_DELAY_MILLIS
&& now <= mPowerKeyTime + SCREENSHOT_CHORD_DEBOUNCE_DELAY_MILLIS) {
mVolumeDownKeyConsumedByScreenshotChord = true;
cancelPendingPowerKeyAction();
mHandler.postDelayed(mScreenshotChordLongPress,
ViewConfiguration.getGlobalActionKeyTimeout());
}
}
}
在這個函數中,用兩個布爾變量判斷是否同時按了音量下鍵和電源鍵後,再計算兩個按鍵響應Down事件之間的時間差不超過150毫秒,也就認為是同時按了這兩個鍵後,算是真正的捕獲到屏幕截屏的組合鍵。
附言:文件PhoneWindowManager.java類是攔截鍵盤消息的處理類,在此類中還有對home鍵、返回鍵等好多按鍵的處理。
Android源碼中調用屏幕截圖的接口
捕獲到組合鍵後,我們再看看android源碼中是如何調用屏幕截圖的函數接口。在上面的函數interceptScreenshotChord中我們看到用handler判斷長按組合鍵500毫秒之後,會進入如下函數:
private final Runnable mScreenshotChordLongPress = new Runnable() {
public void run() {
takeScreenshot();
}
};
在這裡啟動了一個線程來完成截屏的功能,接著看函數takeScreenshot():
private void takeScreenshot() {
synchronized (mScreenshotLock) {
if (mScreenshotConnection != null) {
return;
}
ComponentName cn = new ComponentName("com.android.systemui",
"com.android.systemui.screenshot.TakeScreenshotService");
Intent intent = new Intent();
intent.setComponent(cn);
ServiceConnection conn = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
synchronized (mScreenshotLock) {
if (mScreenshotConnection != this) {
return;
}
Messenger messenger = new Messenger(service);
Message msg = Message.obtain(null, 1);
final ServiceConnection myConn = this;
Handler h = new Handler(mHandler.getLooper()) {
@Override
public void handleMessage(Message msg) {
synchronized (mScreenshotLock) {
if (mScreenshotConnection == myConn) {
mContext.unbindService(mScreenshotConnection);
mScreenshotConnection = null;
mHandler.removeCallbacks(mScreenshotTimeout);
}
}
}
};
msg.replyTo = new Messenger(h);
msg.arg1 = msg.arg2 = 0;
if (mStatusBar != null && mStatusBar.isVisibleLw())
msg.arg1 = 1;
if (mNavigationBar != null && mNavigationBar.isVisibleLw())
msg.arg2 = 1;
try {
messenger.send(msg);
} catch (RemoteException e) {
}
}
}
@Override
public void onServiceDisconnected(ComponentName name) {}
};
if (mContext.bindService(intent, conn, Context.BIND_AUTO_CREATE)) {
mScreenshotConnection = conn;
mHandler.postDelayed(mScreenshotTimeout, 10000);
}
}
}
可以看到這個函數使用AIDL綁定了service服務到”com.android.systemui.screenshot.TakeScreenshotService”,注意在service連接成功時,對message的msg.arg1和msg.arg2兩個參數的賦值。其中在mScreenshotTimeout中對服務service做了超時處理。接著我們找到實現這個服務service的類TakeScreenshotService,看看其實現的流程:
public class TakeScreenshotService extends Service {
private static final String TAG = "TakeScreenshotService";
private static GlobalScreenshot mScreenshot;
private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 1:
final Messenger callback = msg.replyTo;
if (mScreenshot == null) {
mScreenshot = new GlobalScreenshot(TakeScreenshotService.this);
}
mScreenshot.takeScreenshot(new Runnable() {
@Override public void run() {
Message reply = Message.obtain(null, 1);
try {
callback.send(reply);
} catch (RemoteException e) {
}
}
}, msg.arg1 > 0, msg.arg2 > 0);
}
}
};
@Override
public IBinder onBind(Intent intent) {
return new Messenger(mHandler).getBinder();
}
}
在這個類中,我們主要看調用接口,用到了mScreenshot.takeScreenshot()傳遞了三個參數,第一個是個runnable,第二和第三個是之前message傳遞的兩個參數msg.arg1和msg.arg2。最後我們看看這個函數takeScreenshot(),位於文件GlobalScreenshot.java中(跟之前的函數重名但是文件路徑不一樣):
/**
* Takes a screenshot of the current display and shows an animation.
*/
void takeScreenshot(Runnable finisher, boolean statusBarVisible, boolean navBarVisible) {
// We need to orient the screenshot correctly (and the Surface api seems to take screenshots
// only in the natural orientation of the device :!)
mDisplay.getRealMetrics(mDisplayMetrics);
float[] dims = {mDisplayMetrics.widthPixels, mDisplayMetrics.heightPixels};
float degrees = getDegreesForRotation(mDisplay.getRotation());
boolean requiresRotation = (degrees > 0);
if (requiresRotation) {
// Get the dimensions of the device in its native orientation
mDisplayMatrix.reset();
mDisplayMatrix.preRotate(-degrees);
mDisplayMatrix.mapPoints(dims);
dims[0] = Math.abs(dims[0]);
dims[1] = Math.abs(dims[1]);
}
// Take the screenshot
mScreenBitmap = Surface.screenshot((int) dims[0], (int) dims[1]);
if (mScreenBitmap == null) {
notifyScreenshotError(mContext, mNotificationManager);
finisher.run();
return;
}
if (requiresRotation) {
// Rotate the screenshot to the current orientation
Bitmap ss = Bitmap.createBitmap(mDisplayMetrics.widthPixels,
mDisplayMetrics.heightPixels, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(ss);
c.translate(ss.getWidth() / 2, ss.getHeight() / 2);
c.rotate(degrees);
c.translate(-dims[0] / 2, -dims[1] / 2);
c.drawBitmap(mScreenBitmap, 0, 0, null);
c.setBitmap(null);
mScreenBitmap = ss;
}
// Optimizations
mScreenBitmap.setHasAlpha(false);
mScreenBitmap.prepareToDraw();
// Start the post-screenshot animation
startAnimation(finisher, mDisplayMetrics.widthPixels, mDisplayMetrics.heightPixels,
statusBarVisible, navBarVisible);
}
這段代碼的注釋比較詳細,其實看到這裡,我們算是真正看到截屏的操作了,具體的工作包括對屏幕大小、旋轉角度的獲取,然後調用Surface類的screenshot方法截屏保存到bitmap中,之後把這部分位圖填充到一個畫布上,最後再啟動一個延遲的拍照動畫效果。如果再往下探究screenshot方法,發現已經是一個native方法了:
/**
* Like {@link #screenshot(int, int, int, int)} but includes all
* Surfaces in the screenshot.
*
* @hide
*/
public static native Bitmap screenshot(int width, int height);
使用JNI技術調用底層的代碼,如果再往下走,會發現映射這這個jni函數在文件android_view_Surface.cpp中,這個真的已經是底層c++語言了,統一調用的底層函數是:
static jobject doScreenshot(JNIEnv* env, jobject clazz, jint width, jint height,
jint minLayer, jint maxLayer, bool allLayers)
{
ScreenshotPixelRef* pixels = new ScreenshotPixelRef(NULL);
if (pixels->update(width, height, minLayer, maxLayer, allLayers) != NO_ERROR) {
delete pixels;
return 0;
}
uint32_t w = pixels->getWidth();
uint32_t h = pixels->getHeight();
uint32_t s = pixels->getStride();
uint32_t f = pixels->getFormat();
ssize_t bpr = s * android::bytesPerPixel(f);
SkBitmap* bitmap = new SkBitmap();
bitmap->setConfig(convertPixelFormat(f), w, h, bpr);
if (f == PIXEL_FORMAT_RGBX_8888) {
bitmap->setIsOpaque(true);
}
if (w > 0 && h > 0) {
bitmap->setPixelRef(pixels)->unref();
bitmap->lockPixels();
} else {
// be safe with an empty bitmap.
delete pixels;
bitmap->setPixels(NULL);
}
return GraphicsJNI::createBitmap(env, bitmap, false, NULL);
}
待補內容:
Canvas;
Bitmap.createBitmap;
在上一節中講了View與常用Layout之FrameLayout、Linear
自定義組件,android自定義組件在android開發中,常常有聯系人頁面,在這篇和大家分享一下項目中剛剛添加的聯系人頁面,代碼直接從項目中提取出來,沒有太多時間修改;
Android Volley 的基本使用/設置HTTP請求參數、apikey,androidapikey最近在做一個Android的新聞客戶端,感覺收獲頗豐。 這裡分享一
android textview 自動換行 整齊排版,androidtextview一、問題在哪裡? textview顯示長文字時會進行自動折行,如果遇到一些特殊情況,自