1.同时按下电源键+音量下键截屏

PhoneWindowManager.java

private void interceptScreenshotChord() {
if (mScreenshotChordEnabled
&& mVolumeDownKeyTriggered && mPowerKeyTriggered && !mVolumeUpKeyTriggered) {
final long now = SystemClock.uptimeMillis();
if (now <= mVolumeDownKeyTime + SCREENSHOT_CHORD_DEBOUNCE_DELAY_MILLIS
&& now <= mPowerKeyTime + SCREENSHOT_CHORD_DEBOUNCE_DELAY_MILLIS) {
mVolumeDownKeyConsumedByScreenshotChord = true;
cancelPendingPowerKeyAction(); mHandler.postDelayed(mScreenshotRunnable, getScreenshotChordLongPressDelay());②
}
}
} private final Runnable mScreenshotRunnable = new Runnable() {
@Override
public void run() {
takeScreenshot();③
}
}; @Override
public int interceptKeyBeforeQueueing(KeyEvent event, int policyFlags, boolean isScreenOn) {
...
final boolean down = event.getAction() == KeyEvent.ACTION_DOWN;
...
// Handle special keys.
switch (keyCode) {
case KeyEvent.KEYCODE_VOLUME_DOWN:
case KeyEvent.KEYCODE_VOLUME_UP:
case KeyEvent.KEYCODE_VOLUME_MUTE: {
if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) {
if (down) {
if (isScreenOn && !mVolumeDownKeyTriggered
&& (event.getFlags() & KeyEvent.FLAG_FALLBACK) == 0) {
mVolumeDownKeyTriggered = true;
mVolumeDownKeyTime = event.getDownTime();
mVolumeDownKeyConsumedByScreenshotChord = false;
cancelPendingPowerKeyAction();
interceptScreenshotChord();①
}
} else {
mVolumeDownKeyTriggered = false;
cancelPendingScreenshotChordAction();
}
...
case KeyEvent.KEYCODE_POWER: {
result &= ~ACTION_PASS_TO_USER;
if (down) {
if (isScreenOn && !mPowerKeyTriggered
&& (event.getFlags() & KeyEvent.FLAG_FALLBACK) == 0) {
mPowerKeyTriggered = true;
mPowerKeyTime = event.getDownTime();
interceptScreenshotChord();①
}
... // Assume this is called from the Handler thread.
private void takeScreenshot() {
synchronized (mScreenshotLock) {
if (mScreenshotConnection != null) {
return;
}
ComponentName cn = new ComponentName("com.android.systemui",
"com.android.systemui.screenshot.TakeScreenshotService");④
Intent intent = new Intent();
intent.setComponent(cn);
ServiceConnection conn = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
synchronized (mScreenshotLock) {
if (mScreenshotConnection != this) {
return;
}
Messenger messenger = new Messenger(service);
Message msg = Message.obtain(null, 1);
final ServiceConnection myConn = this;
Handler h = new Handler(mHandler.getLooper()) {
@Override
public void handleMessage(Message msg) {
synchronized (mScreenshotLock) {
if (mScreenshotConnection == myConn) {
mContext.unbindService(mScreenshotConnection);
mScreenshotConnection = null;
mHandler.removeCallbacks(mScreenshotTimeout);
}
}
}
};
msg.replyTo = new Messenger(h);
msg.arg1 = msg.arg2 = 0;
if (mStatusBar != null && mStatusBar.isVisibleLw())
msg.arg1 = 1;
if (mNavigationBar != null && mNavigationBar.isVisibleLw())
msg.arg2 = 1;
try {
messenger.send(msg);
} catch (RemoteException e) {
}
}
}
@Override
public void onServiceDisconnected(ComponentName name) {}
};
if (mContext.bindServiceAsUser(
intent, conn, Context.BIND_AUTO_CREATE, UserHandle.CURRENT)) {
mScreenshotConnection = conn;
mHandler.postDelayed(mScreenshotTimeout, 10000);
}
}
}

TakeScreenshotService.java

public class TakeScreenshotService extends Service {
private static final String TAG = "TakeScreenshotService"; private static GlobalScreenshot mScreenshot; private Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 1:
final Messenger callback = msg.replyTo;
if (mScreenshot == null) {
mScreenshot = new GlobalScreenshot(TakeScreenshotService.this);
}
mScreenshot.takeScreenshot(new Runnable() {
@Override public void run() {
Message reply = Message.obtain(null, 1);
try {
callback.send(reply);
} catch (RemoteException e) {
}
}
}, msg.arg1 > 0, msg.arg2 > 0);
}
}
}; @Override
public IBinder onBind(Intent intent) {
return new Messenger(mHandler).getBinder();
}
}

GlobalScreenshot.java

SaveImageInBackgroundTask(Context context, SaveImageInBackgroundData data,
NotificationManager nManager, int nId) {
Resources r = context.getResources(); // Prepare all the output metadata
mImageTime = System.currentTimeMillis();
String imageDate = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss").format(new Date(mImageTime));
mImageFileName = String.format(SCREENSHOT_FILE_NAME_TEMPLATE, imageDate); mScreenshotDir = new File(Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES), SCREENSHOTS_DIR_NAME);
mImageFilePath = new File(mScreenshotDir, mImageFileName).getAbsolutePath();④ // Create the large notification icon
mImageWidth = data.image.getWidth();
mImageHeight = data.image.getHeight();
int iconSize = data.iconSize; final int shortSide = mImageWidth < mImageHeight ? mImageWidth : mImageHeight;
Bitmap preview = Bitmap.createBitmap(shortSide, shortSide, data.image.getConfig());
Canvas c = new Canvas(preview);
Paint paint = new Paint();
ColorMatrix desat = new ColorMatrix();
desat.setSaturation(0.25f);
paint.setColorFilter(new ColorMatrixColorFilter(desat));
Matrix matrix = new Matrix();
matrix.postTranslate((shortSide - mImageWidth) / 2,
(shortSide - mImageHeight) / 2);
c.drawBitmap(data.image, matrix, paint);
c.drawColor(0x40FFFFFF);
c.setBitmap(null); Bitmap croppedIcon = Bitmap.createScaledBitmap(preview, iconSize, iconSize, true); // Show the intermediate notification
mTickerAddSpace = !mTickerAddSpace;
mNotificationId = nId;
mNotificationManager = nManager;
mNotificationBuilder = new Notification.Builder(context)
.setTicker(r.getString(R.string.screenshot_saving_ticker)
+ (mTickerAddSpace ? "" : ""))
.setContentTitle(r.getString(R.string.screenshot_saving_title))
.setContentText(r.getString(R.string.screenshot_saving_text))
.setSmallIcon(R.drawable.stat_notify_image)
.setWhen(System.currentTimeMillis()); mNotificationStyle = new Notification.BigPictureStyle()
.bigPicture(preview);
mNotificationBuilder.setStyle(mNotificationStyle); Notification n = mNotificationBuilder.build();
n.flags |= Notification.FLAG_NO_CLEAR;
mNotificationManager.notify(nId, n); // On the tablet, the large icon makes the notification appear as if it is clickable (and
// on small devices, the large icon is not shown) so defer showing the large icon until
// we compose the final post-save notification below.
mNotificationBuilder.setLargeIcon(croppedIcon);
// But we still don't set it for the expanded view, allowing the smallIcon to show here.
mNotificationStyle.bigLargeIcon(null);
} /**
* Creates a new worker thread and saves the screenshot to the media store.
*/
private void saveScreenshotInWorkerThread(Runnable finisher) {
SaveImageInBackgroundData data = new SaveImageInBackgroundData();
data.context = mContext;
data.image = mScreenBitmap;
data.iconSize = mNotificationIconSize;
data.finisher = finisher;
if (mSaveInBgTask != null) {
mSaveInBgTask.cancel(false);
}
mSaveInBgTask = new SaveImageInBackgroundTask(mContext, data, mNotificationManager,
SCREENSHOT_NOTIFICATION_ID).execute(data);③
} /**
* Takes a screenshot of the current display and shows an animation.
*/
void takeScreenshot(Runnable finisher, boolean statusBarVisible, boolean navBarVisible) {
// We need to orient the screenshot correctly (and the Surface api seems to take screenshots
// only in the natural orientation of the device :!)
mDisplay.getRealMetrics(mDisplayMetrics);
float[] dims = {mDisplayMetrics.widthPixels, mDisplayMetrics.heightPixels};
float degrees = getDegreesForRotation(mDisplay.getRotation());
boolean requiresRotation = (degrees > 0);
if (requiresRotation) {
// Get the dimensions of the device in its native orientation
mDisplayMatrix.reset();
mDisplayMatrix.preRotate(-degrees);
mDisplayMatrix.mapPoints(dims);
dims[0] = Math.abs(dims[0]);
dims[1] = Math.abs(dims[1]);
} // Take the screenshot
mScreenBitmap = SurfaceControl.screenshot((int) dims[0], (int) dims[1]);①
if (mScreenBitmap == null) {
notifyScreenshotError(mContext, mNotificationManager);
finisher.run();
return;
} if (requiresRotation) {
// Rotate the screenshot to the current orientation
Bitmap ss = Bitmap.createBitmap(mDisplayMetrics.widthPixels,
mDisplayMetrics.heightPixels, Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(ss);
c.translate(ss.getWidth() / 2, ss.getHeight() / 2);
c.rotate(degrees);
c.translate(-dims[0] / 2, -dims[1] / 2);
c.drawBitmap(mScreenBitmap, 0, 0, null);
c.setBitmap(null);
// Recycle the previous bitmap
mScreenBitmap.recycle();
mScreenBitmap = ss;
} // Optimizations
mScreenBitmap.setHasAlpha(false);
mScreenBitmap.prepareToDraw(); // Start the post-screenshot animation
startAnimation(finisher, mDisplayMetrics.widthPixels, mDisplayMetrics.heightPixels,
statusBarVisible, navBarVisible);
} /**
* Starts the animation after taking the screenshot
*/
private void startAnimation(final Runnable finisher, int w, int h, boolean statusBarVisible,
boolean navBarVisible) {
// Add the view for the animation
mScreenshotView.setImageBitmap(mScreenBitmap);
mScreenshotLayout.requestFocus(); // Setup the animation with the screenshot just taken
if (mScreenshotAnimation != null) {
mScreenshotAnimation.end();
mScreenshotAnimation.removeAllListeners();
} mWindowManager.addView(mScreenshotLayout, mWindowLayoutParams);
ValueAnimator screenshotDropInAnim = createScreenshotDropInAnimation();
ValueAnimator screenshotFadeOutAnim = createScreenshotDropOutAnimation(w, h,
statusBarVisible, navBarVisible);
mScreenshotAnimation = new AnimatorSet();
mScreenshotAnimation.playSequentially(screenshotDropInAnim, screenshotFadeOutAnim);
mScreenshotAnimation.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
// Save the screenshot once we have a bit of time now
saveScreenshotInWorkerThread(finisher);②
mWindowManager.removeView(mScreenshotLayout); // Clear any references to the bitmap
mScreenBitmap = null;
mScreenshotView.setImageBitmap(null);
}
});
mScreenshotLayout.post(new Runnable() {
@Override
public void run() {
// Play the shutter sound to notify that we've taken a screenshot
mCameraSound.play(MediaActionSound.SHUTTER_CLICK); mScreenshotView.setLayerType(View.LAYER_TYPE_HARDWARE, null);
mScreenshotView.buildLayer();
mScreenshotAnimation.start();
}
});
}

SurfaceControl.java

/**
* Like {@link SurfaceControl#screenshot(int, int, int, int)} but includes all
* Surfaces in the screenshot.
*
* @param width The desired width of the returned bitmap; the raw
* screen will be scaled down to this size.
* @param height The desired height of the returned bitmap; the raw
* screen will be scaled down to this size.
* @return Returns a Bitmap containing the screen contents, or null
* if an error occurs. Make sure to call Bitmap.recycle() as soon as
* possible, once its content is not needed anymore.
*/
public static Bitmap screenshot(int width, int height) {
// TODO: should take the display as a parameter
IBinder displayToken = SurfaceControl.getBuiltInDisplay(
SurfaceControl.BUILT_IN_DISPLAY_ID_MAIN);
return nativeScreenshot(displayToken, width, height, 0, 0, true);
}

android_view_SurfaceControl.cpp

static void nativeScreenshot(JNIEnv* env, jclass clazz,
jobject displayTokenObj, jobject surfaceObj,
jint width, jint height, jint minLayer, jint maxLayer, bool allLayers) {
sp<IBinder> displayToken = ibinderForJavaObject(env, displayTokenObj);
if (displayToken != NULL) {
sp<Surface> consumer = android_view_Surface_getSurface(env, surfaceObj);
if (consumer != NULL) {
if (allLayers) {
minLayer = 0;
maxLayer = -1;
}
ScreenshotClient::capture(
displayToken, consumer->getIGraphicBufferProducer(),
width, height, uint32_t(minLayer), uint32_t(maxLayer));
}
}
} static JNINativeMethod sSurfaceControlMethods[] = {
...
{"nativeScreenshot", "(Landroid/os/IBinder;IIIIZ)Landroid/graphics/Bitmap;",
(void*)nativeScreenshotBitmap },
{"nativeScreenshot", "(Landroid/os/IBinder;Landroid/view/Surface;IIIIZ)V",
(void*)nativeScreenshot },
...

SurfaceComposerClient.cpp

status_t ScreenshotClient::capture(
const sp<IBinder>& display,
const sp<IGraphicBufferProducer>& producer,
uint32_t reqWidth, uint32_t reqHeight,
uint32_t minLayerZ, uint32_t maxLayerZ) {
sp<ISurfaceComposer> s(ComposerService::getComposerService());
if (s == NULL) return NO_INIT;
return s->captureScreen(display, producer,
reqWidth, reqHeight, minLayerZ, maxLayerZ,
false);
}

SurfaceFlinger.cpp

status_t SurfaceFlinger::captureScreen(const sp<IBinder>& display,
const sp<IGraphicBufferProducer>& producer,
uint32_t reqWidth, uint32_t reqHeight,
uint32_t minLayerZ, uint32_t maxLayerZ,
bool isCpuConsumer) { if (CC_UNLIKELY(display == 0))
return BAD_VALUE; if (CC_UNLIKELY(producer == 0))
return BAD_VALUE; class MessageCaptureScreen : public MessageBase {
SurfaceFlinger* flinger;
sp<IBinder> display;
sp<IGraphicBufferProducer> producer;
uint32_t reqWidth, reqHeight;
uint32_t minLayerZ,maxLayerZ;
bool useReadPixels;
status_t result;
public:
MessageCaptureScreen(SurfaceFlinger* flinger,
const sp<IBinder>& display,
const sp<IGraphicBufferProducer>& producer,
uint32_t reqWidth, uint32_t reqHeight,
uint32_t minLayerZ, uint32_t maxLayerZ, bool useReadPixels)
: flinger(flinger), display(display), producer(producer),
reqWidth(reqWidth), reqHeight(reqHeight),
minLayerZ(minLayerZ), maxLayerZ(maxLayerZ),
useReadPixels(useReadPixels),
result(PERMISSION_DENIED)
{
}
status_t getResult() const {
return result;
}
virtual bool handler() {
Mutex::Autolock _l(flinger->mStateLock);
sp<const DisplayDevice> hw(flinger->getDisplayDevice(display));
if (!useReadPixels) {
result = flinger->captureScreenImplLocked(hw,
producer, reqWidth, reqHeight, minLayerZ, maxLayerZ);
} else {
result = flinger->captureScreenImplCpuConsumerLocked(hw,
producer, reqWidth, reqHeight, minLayerZ, maxLayerZ);
}
static_cast<GraphicProducerWrapper*>(producer->asBinder().get())->exit(result);
return true;
}
}; // make sure to process transactions before screenshots -- a transaction
// might already be pending but scheduled for VSYNC; this guarantees we
// will handle it before the screenshot. When VSYNC finally arrives
// the scheduled transaction will be a no-op. If no transactions are
// scheduled at this time, this will end-up being a no-op as well.
mEventQueue.invalidateTransactionNow(); bool useReadPixels = false;
if (isCpuConsumer) {
bool formatSupportedBytBitmap =
(mEGLNativeVisualId == HAL_PIXEL_FORMAT_RGBA_8888) ||
(mEGLNativeVisualId == HAL_PIXEL_FORMAT_RGBX_8888);
if (formatSupportedBytBitmap == false) {
// the pixel format we have is not compatible with
// Bitmap.java, which is the likely client of this API,
// so we just revert to glReadPixels() in that case.
useReadPixels = true;
}
if (mGpuToCpuSupported == false) {
// When we know the GL->CPU path works, we can call
// captureScreenImplLocked() directly, instead of using the
// glReadPixels() workaround.
useReadPixels = true;
}
} // this creates a "fake" BBinder which will serve as a "fake" remote
// binder to receive the marshaled calls and forward them to the
// real remote (a BpGraphicBufferProducer)
sp<GraphicProducerWrapper> wrapper = new GraphicProducerWrapper(producer); // the asInterface() call below creates our "fake" BpGraphicBufferProducer
// which does the marshaling work forwards to our "fake remote" above.
sp<MessageBase> msg = new MessageCaptureScreen(this,
display, IGraphicBufferProducer::asInterface( wrapper ),
reqWidth, reqHeight, minLayerZ, maxLayerZ,
useReadPixels); status_t res = postMessageAsync(msg);
if (res == NO_ERROR) {
res = wrapper->waitForResponse();
}
return res;
} status_t SurfaceFlinger::captureScreenImplLocked(
const sp<const DisplayDevice>& hw,
const sp<IGraphicBufferProducer>& producer,
uint32_t reqWidth, uint32_t reqHeight,
uint32_t minLayerZ, uint32_t maxLayerZ)
{
ATRACE_CALL(); // get screen geometry
const uint32_t hw_w = hw->getWidth();
const uint32_t hw_h = hw->getHeight(); // if we have secure windows on this display, never allow the screen capture
if (hw->getSecureLayerVisible()) {
ALOGW("FB is protected: PERMISSION_DENIED");
return PERMISSION_DENIED;
} if ((reqWidth > hw_w) || (reqHeight > hw_h)) {
ALOGE("size mismatch (%d, %d) > (%d, %d)",
reqWidth, reqHeight, hw_w, hw_h);
return BAD_VALUE;
} reqWidth = (!reqWidth) ? hw_w : reqWidth;
reqHeight = (!reqHeight) ? hw_h : reqHeight; // Create a surface to render into
sp<Surface> surface = new Surface(producer);
ANativeWindow* const window = surface.get(); // set the buffer size to what the user requested
native_window_set_buffers_user_dimensions(window, reqWidth, reqHeight); // and create the corresponding EGLSurface
EGLSurface eglSurface = eglCreateWindowSurface(
mEGLDisplay, mEGLConfig, window, NULL);
if (eglSurface == EGL_NO_SURFACE) {
ALOGE("captureScreenImplLocked: eglCreateWindowSurface() failed 0x%4x",
eglGetError());
return BAD_VALUE;
} if (!eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) {
ALOGE("captureScreenImplLocked: eglMakeCurrent() failed 0x%4x",
eglGetError());
eglDestroySurface(mEGLDisplay, eglSurface);
return BAD_VALUE;
} renderScreenImplLocked(hw, reqWidth, reqHeight, minLayerZ, maxLayerZ, false); // and finishing things up...
if (eglSwapBuffers(mEGLDisplay, eglSurface) != EGL_TRUE) {
ALOGE("captureScreenImplLocked: eglSwapBuffers() failed 0x%4x",
eglGetError());
eglDestroySurface(mEGLDisplay, eglSurface);
return BAD_VALUE;
} eglDestroySurface(mEGLDisplay, eglSurface); return NO_ERROR;
} status_t SurfaceFlinger::captureScreenImplCpuConsumerLocked(
const sp<const DisplayDevice>& hw,
const sp<IGraphicBufferProducer>& producer,
uint32_t reqWidth, uint32_t reqHeight,
uint32_t minLayerZ, uint32_t maxLayerZ)
{
ATRACE_CALL(); if (!GLExtensions::getInstance().haveFramebufferObject()) {
return INVALID_OPERATION;
} // get screen geometry
const uint32_t hw_w = hw->getWidth();
const uint32_t hw_h = hw->getHeight(); // if we have secure windows on this display, never allow the screen capture
if (hw->getSecureLayerVisible()) {
ALOGW("FB is protected: PERMISSION_DENIED");
return PERMISSION_DENIED;
} if ((reqWidth > hw_w) || (reqHeight > hw_h)) {
ALOGE("size mismatch (%d, %d) > (%d, %d)",
reqWidth, reqHeight, hw_w, hw_h);
return BAD_VALUE;
} reqWidth = (!reqWidth) ? hw_w : reqWidth;
reqHeight = (!reqHeight) ? hw_h : reqHeight; GLuint tname;
glGenRenderbuffersOES(1, &tname);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, tname);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, reqWidth, reqHeight); // create a FBO
GLuint name;
glGenFramebuffersOES(1, &name);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, name);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES,
GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, tname); GLenum status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES); status_t result = NO_ERROR;
if (status == GL_FRAMEBUFFER_COMPLETE_OES) { renderScreenImplLocked(hw, reqWidth, reqHeight, minLayerZ, maxLayerZ, true); // Below we render the screenshot into the
// CpuConsumer using glReadPixels from our FBO.
// Some older drivers don't support the GL->CPU path so we
// have to wrap it with a CPU->CPU path, which is what
// glReadPixels essentially is. sp<Surface> sur = new Surface(producer);
ANativeWindow* window = sur.get(); if (native_window_api_connect(window, NATIVE_WINDOW_API_CPU) == NO_ERROR) {
int err = 0;
err = native_window_set_buffers_dimensions(window, reqWidth, reqHeight);
err |= native_window_set_buffers_format(window, HAL_PIXEL_FORMAT_RGBA_8888);
err |= native_window_set_usage(window,
GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN); if (err == NO_ERROR) {
ANativeWindowBuffer* buffer;
if (native_window_dequeue_buffer_and_wait(window, &buffer) == NO_ERROR) {
sp<GraphicBuffer> buf = static_cast<GraphicBuffer*>(buffer);
void* vaddr;
if (buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, &vaddr) == NO_ERROR) {
glReadPixels(0, 0, buffer->stride, reqHeight,
GL_RGBA, GL_UNSIGNED_BYTE, vaddr);
buf->unlock();
}
window->queueBuffer(window, buffer, -1);
}
}
native_window_api_disconnect(window, NATIVE_WINDOW_API_CPU);
} } else {
ALOGE("got GL_FRAMEBUFFER_COMPLETE_OES while taking screenshot");
result = INVALID_OPERATION;
} // back to main framebuffer
glBindFramebufferOES(GL_FRAMEBUFFER_OES, 0);
glDeleteRenderbuffersOES(1, &tname);
glDeleteFramebuffersOES(1, &name); DisplayDevice::setViewportAndProjection(hw); return result;
}

2.基于framebuffer显存设备/dev/graphics/fb0截屏

a.自带工具screencap:screencap.cpp

int main(int argc, char** argv)
{
ProcessState::self()->startThreadPool(); const char* pname = argv[0];
bool png = false;
int32_t displayId = DEFAULT_DISPLAY_ID;
int c;
while ((c = getopt(argc, argv, "phd:")) != -1) {
switch (c) {
case 'p':
png = true;
break;
case 'd':
displayId = atoi(optarg);
break;
case '?':
case 'h':
usage(pname);
return 1;
}
}
argc -= optind;
argv += optind; int fd = -1;
if (argc == 0) {
fd = dup(STDOUT_FILENO);
} else if (argc == 1) {
const char* fn = argv[0];
fd = open(fn, O_WRONLY | O_CREAT | O_TRUNC, 0664);
if (fd == -1) {
fprintf(stderr, "Error opening file: %s (%s)\n", fn, strerror(errno));
return 1;
}
const int len = strlen(fn);
if (len >= 4 && 0 == strcmp(fn+len-4, ".png")) {
png = true;
}
} if (fd == -1) {
usage(pname);
return 1;
} void const* mapbase = MAP_FAILED;
ssize_t mapsize = -1; void const* base = 0;
uint32_t w, s, h, f;
size_t size = 0; ScreenshotClient screenshot;
sp<IBinder> display = SurfaceComposerClient::getBuiltInDisplay(displayId);
if (display != NULL && screenshot.update(display) == NO_ERROR) {
base = screenshot.getPixels();
w = screenshot.getWidth();
h = screenshot.getHeight();
s = screenshot.getStride();
f = screenshot.getFormat();
size = screenshot.getSize();
} else {
const char* fbpath = "/dev/graphics/fb0";
int fb = open(fbpath, O_RDONLY);
if (fb >= 0) {
struct fb_var_screeninfo vinfo;
if (ioctl(fb, FBIOGET_VSCREENINFO, &vinfo) == 0) {
uint32_t bytespp;
if (vinfoToPixelFormat(vinfo, &bytespp, &f) == NO_ERROR) {
size_t offset = (vinfo.xoffset + vinfo.yoffset*vinfo.xres) * bytespp;
w = vinfo.xres;
h = vinfo.yres;
s = vinfo.xres;
size = w*h*bytespp;
mapsize = offset + size;
mapbase = mmap(0, mapsize, PROT_READ, MAP_PRIVATE, fb, 0);
if (mapbase != MAP_FAILED) {
base = (void const *)((char const *)mapbase + offset);
}
}
}
close(fb);
}
} if (base) {
if (png) {
SkBitmap b;
b.setConfig(flinger2skia(f), w, h, s*bytesPerPixel(f));
b.setPixels((void*)base);
SkDynamicMemoryWStream stream;
SkImageEncoder::EncodeStream(&stream, b,
SkImageEncoder::kPNG_Type, SkImageEncoder::kDefaultQuality);
SkData* streamData = stream.copyToData();
write(fd, streamData->data(), streamData->size());
streamData->unref();
} else {
write(fd, &w, 4);
write(fd, &h, 4);
write(fd, &f, 4);
size_t Bpp = bytesPerPixel(f);
for (size_t y=0 ; y<h ; y++) {
write(fd, base, w*Bpp);
base = (void *)((char *)base + s*Bpp);
}
}
}
close(fd);
if (mapbase != MAP_FAILED) {
munmap((void *)mapbase, mapsize);
}
return 0;
}

b.DDMS

ScreenShotDialog.java

/**
* Captures a new image from the device, and display it.
*/
private void updateDeviceImage(Shell shell) {
mBusyLabel.setText("Capturing..."); // no effect shell.setCursor(shell.getDisplay().getSystemCursor(SWT.CURSOR_WAIT)); mRawImage = getDeviceImage();① updateImageDisplay(shell);
} /**
* Updates the display with {@link #mRawImage}.
* @param shell
*/
private void updateImageDisplay(Shell shell) {
Image image;
if (mRawImage == null) {
Display display = shell.getDisplay();
image = ImageLoader.createPlaceHolderArt(
display, 320, 240, display.getSystemColor(SWT.COLOR_BLUE)); mSave.setEnabled(false);
mBusyLabel.setText("Screen not available");
} else {
// convert raw data to an Image.
PaletteData palette = new PaletteData(
mRawImage.getRedMask(),
mRawImage.getGreenMask(),
mRawImage.getBlueMask()); ImageData imageData = new ImageData(mRawImage.width, mRawImage.height,
mRawImage.bpp, palette, 1, mRawImage.data);
image = new Image(getParent().getDisplay(), imageData); mSave.setEnabled(true);
mBusyLabel.setText("Captured image:");
} mImageLabel.setImage(image);
mImageLabel.pack();
shell.pack(); // there's no way to restore old cursor; assume it's ARROW
shell.setCursor(shell.getDisplay().getSystemCursor(SWT.CURSOR_ARROW));
} /**
* Grabs an image from an ADB-connected device and returns it as a {@link RawImage}.
*/
private RawImage getDeviceImage() {
try {
return mDevice.getScreenshot();②
}
catch (IOException ioe) {
Log.w("ddms", "Unable to get frame buffer: " + ioe.getMessage());
return null;
} catch (TimeoutException e) {
Log.w("ddms", "Unable to get frame buffer: timeout ");
return null;
} catch (AdbCommandRejectedException e) {
Log.w("ddms", "Unable to get frame buffer: " + e.getMessage());
return null;
}
}

Device.java

@Override
public RawImage getScreenshot()
throws TimeoutException, AdbCommandRejectedException, IOException {
return AdbHelper.getFrameBuffer(AndroidDebugBridge.getSocketAddress(), this);③
}

AdbHelper.java

/**
* Retrieve the frame buffer from the device.
* @throws TimeoutException in case of timeout on the connection.
* @throws AdbCommandRejectedException if adb rejects the command
* @throws IOException in case of I/O error on the connection.
*/
static RawImage getFrameBuffer(InetSocketAddress adbSockAddr, Device device)
throws TimeoutException, AdbCommandRejectedException, IOException { RawImage imageParams = new RawImage();
byte[] request = formAdbRequest("framebuffer:");④ //$NON-NLS-1$④
byte[] nudge = {
0
};
byte[] reply; SocketChannel adbChan = null;
try {
adbChan = SocketChannel.open(adbSockAddr);
adbChan.configureBlocking(false); // if the device is not -1, then we first tell adb we're looking to talk
// to a specific device
setDevice(adbChan, device); write(adbChan, request); AdbResponse resp = readAdbResponse(adbChan, false /* readDiagString */);
if (!resp.okay) {
throw new AdbCommandRejectedException(resp.message);
} // first the protocol version.
reply = new byte[4];
read(adbChan, reply); ByteBuffer buf = ByteBuffer.wrap(reply);
buf.order(ByteOrder.LITTLE_ENDIAN); int version = buf.getInt(); // get the header size (this is a count of int)
int headerSize = RawImage.getHeaderSize(version); // read the header
reply = new byte[headerSize * 4];
read(adbChan, reply); buf = ByteBuffer.wrap(reply);
buf.order(ByteOrder.LITTLE_ENDIAN); // fill the RawImage with the header
if (!imageParams.readHeader(version, buf)) {
Log.e("Screenshot", "Unsupported protocol: " + version);
return null;
} Log.d("ddms", "image params: bpp=" + imageParams.bpp + ", size="
+ imageParams.size + ", width=" + imageParams.width
+ ", height=" + imageParams.height); write(adbChan, nudge); reply = new byte[imageParams.size];
read(adbChan, reply); imageParams.data = reply;
} finally {
if (adbChan != null) {
adbChan.close();
}
} return imageParams;
}

services.c

static int create_service_thread(void (*func)(int, void *), void *cookie)
{
stinfo *sti;
adb_thread_t t;
int s[2]; if(adb_socketpair(s)) {
printf("cannot create service socket pair\n");
return -1;
} sti = malloc(sizeof(stinfo));
if(sti == 0) fatal("cannot allocate stinfo");
sti->func = func;
sti->cookie = cookie;
sti->fd = s[1]; if(adb_thread_create( &t, service_bootstrap_func, sti)){
free(sti);
adb_close(s[0]);
adb_close(s[1]);
printf("cannot create service thread\n");
return -1;
} D("service thread started, %d:%d\n",s[0], s[1]);
return s[0];
} int service_to_fd(const char *name)
{
...
else if(!strncmp(name, "framebuffer:", 12)) {
ret = create_service_thread(framebuffer_service, 0);⑤
...

sysdeps.h

static __inline__ int  adb_thread_create( adb_thread_t  *thread, adb_thread_func_t  func, void*  arg)
{
thread->tid = _beginthread( (win_thread_func_t)func, 0, arg );
if (thread->tid == (unsigned)-1L) {
return -1;
}
return 0;
}

framebuffer_service.c

void framebuffer_service(int fd, void *cookie)
{
struct fbinfo fbinfo;
unsigned int i;
char buf[640];
int fd_screencap;
int w, h, f;
int fds[2]; if (pipe(fds) < 0) goto done; pid_t pid = fork();
if (pid < 0) goto done; if (pid == 0) {
dup2(fds[1], STDOUT_FILENO);
close(fds[0]);
close(fds[1]);
const char* command = "screencap";⑥
const char *args[2] = {command, NULL};
execvp(command, (char**)args);
exit(1);
} fd_screencap = fds[0]; /* read w, h & format */
if(readx(fd_screencap, &w, 4)) goto done;
if(readx(fd_screencap, &h, 4)) goto done;
if(readx(fd_screencap, &f, 4)) goto done; fbinfo.version = DDMS_RAWIMAGE_VERSION;
/* see hardware/hardware.h */
switch (f) {
case 1: /* RGBA_8888 */
fbinfo.bpp = 32;
fbinfo.size = w * h * 4;
fbinfo.width = w;
fbinfo.height = h;
fbinfo.red_offset = 0;
fbinfo.red_length = 8;
fbinfo.green_offset = 8;
fbinfo.green_length = 8;
fbinfo.blue_offset = 16;
fbinfo.blue_length = 8;
fbinfo.alpha_offset = 24;
fbinfo.alpha_length = 8;
break;
case 2: /* RGBX_8888 */
fbinfo.bpp = 32;
fbinfo.size = w * h * 4;
fbinfo.width = w;
fbinfo.height = h;
fbinfo.red_offset = 0;
fbinfo.red_length = 8;
fbinfo.green_offset = 8;
fbinfo.green_length = 8;
fbinfo.blue_offset = 16;
fbinfo.blue_length = 8;
fbinfo.alpha_offset = 24;
fbinfo.alpha_length = 0;
break;
case 3: /* RGB_888 */
fbinfo.bpp = 24;
fbinfo.size = w * h * 3;
fbinfo.width = w;
fbinfo.height = h;
fbinfo.red_offset = 0;
fbinfo.red_length = 8;
fbinfo.green_offset = 8;
fbinfo.green_length = 8;
fbinfo.blue_offset = 16;
fbinfo.blue_length = 8;
fbinfo.alpha_offset = 24;
fbinfo.alpha_length = 0;
break;
case 4: /* RGB_565 */
fbinfo.bpp = 16;
fbinfo.size = w * h * 2;
fbinfo.width = w;
fbinfo.height = h;
fbinfo.red_offset = 11;
fbinfo.red_length = 5;
fbinfo.green_offset = 5;
fbinfo.green_length = 6;
fbinfo.blue_offset = 0;
fbinfo.blue_length = 5;
fbinfo.alpha_offset = 0;
fbinfo.alpha_length = 0;
break;
case 5: /* BGRA_8888 */
fbinfo.bpp = 32;
fbinfo.size = w * h * 4;
fbinfo.width = w;
fbinfo.height = h;
fbinfo.red_offset = 16;
fbinfo.red_length = 8;
fbinfo.green_offset = 8;
fbinfo.green_length = 8;
fbinfo.blue_offset = 0;
fbinfo.blue_length = 8;
fbinfo.alpha_offset = 24;
fbinfo.alpha_length = 8;
break;
default:
goto done;
} /* write header */
if(writex(fd, &fbinfo, sizeof(fbinfo))) goto done; /* write data */
for(i = 0; i < fbinfo.size; i += sizeof(buf)) {
if(readx(fd_screencap, buf, sizeof(buf))) goto done;
if(writex(fd, buf, sizeof(buf))) goto done;
}
if(readx(fd_screencap, buf, fbinfo.size % sizeof(buf))) goto done;
if(writex(fd, buf, fbinfo.size % sizeof(buf))) goto done; done:
TEMP_FAILURE_RETRY(waitpid(pid, NULL, 0)); close(fds[0]);
close(fds[1]);
close(fd);
}

c.screenshot2:Screenshot.java

/*
* Grab an image from an ADB-connected device.
*/
private static void getDeviceImage(IDevice device, String filepath, boolean landscape)
throws IOException {
RawImage rawImage; try {
rawImage = device.getScreenshot();
} catch (TimeoutException e) {
printAndExit("Unable to get frame buffer: timeout", true /* terminate */);
return;
} catch (Exception ioe) {
printAndExit("Unable to get frame buffer: " + ioe.getMessage(), true /* terminate */);
return;
} // device/adb not available?
if (rawImage == null)
return; if (landscape) {
rawImage = rawImage.getRotated();
} // convert raw data to an Image
BufferedImage image = new BufferedImage(rawImage.width, rawImage.height,
BufferedImage.TYPE_INT_ARGB); int index = 0;
int IndexInc = rawImage.bpp >> 3;
for (int y = 0 ; y < rawImage.height ; y++) {
for (int x = 0 ; x < rawImage.width ; x++) {
int value = rawImage.getARGB(index);
index += IndexInc;
image.setRGB(x, y, value);
}
} if (!ImageIO.write(image, "png", new File(filepath))) {
throw new IOException("Failed to find png writer");
}
}

3.robotiumScreenshotTaker.java

/**
* Takes a screenshot and saves it in "/sdcard/Robotium-Screenshots/".
* Requires write permission (android.permission.WRITE_EXTERNAL_STORAGE) in AndroidManifest.xml of the application under test.
*
* @param view the view to take screenshot of
* @param name the name to give the screenshot image
* @param quality the compression rate. From 0 (compress for lowest size) to 100 (compress for maximum quality).
*/
public void takeScreenshot(final String name, final int quality) {
View decorView = getScreenshotView();
if(decorView == null)
return; initScreenShotSaver();
ScreenshotRunnable runnable = new ScreenshotRunnable(decorView, name, quality);
activityUtils.getCurrentActivity(false).runOnUiThread(runnable);
} /**
* Gets the proper view to use for a screenshot.
*/
private View getScreenshotView() {
View decorView = viewFetcher.getRecentDecorView(viewFetcher.getWindowDecorViews());
final long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout(); while (decorView == null) { final boolean timedOut = SystemClock.uptimeMillis() > endTime; if (timedOut){
return null;
}
sleeper.sleepMini();
decorView = viewFetcher.getRecentDecorView(viewFetcher.getWindowDecorViews());
}
wrapAllGLViews(decorView); return decorView;
} /**
* Extract and wrap the all OpenGL ES Renderer.
*/
private void wrapAllGLViews(View decorView) {
ArrayList<GLSurfaceView> currentViews = viewFetcher.getCurrentViews(GLSurfaceView.class, decorView);
final CountDownLatch latch = new CountDownLatch(currentViews.size()); for (GLSurfaceView glView : currentViews) {
Object renderContainer = new Reflect(glView).field("mGLThread")
.type(GLSurfaceView.class).out(Object.class); Renderer renderer = new Reflect(renderContainer).field("mRenderer").out(Renderer.class); if (renderer == null) {
renderer = new Reflect(glView).field("mRenderer").out(Renderer.class);
renderContainer = glView;
}
if (renderer == null) {
latch.countDown();
continue;
}
if (renderer instanceof GLRenderWrapper) {
GLRenderWrapper wrapper = (GLRenderWrapper) renderer;
wrapper.setTakeScreenshot();
wrapper.setLatch(latch);
} else {
GLRenderWrapper wrapper = new GLRenderWrapper(glView, renderer, latch);
new Reflect(renderContainer).field("mRenderer").in(wrapper);
}
} try {
latch.await();
} catch (InterruptedException ex) {
ex.printStackTrace();
}
} /**
* Returns a bitmap of a given WebView.
*
* @param webView the webView to save a bitmap from
* @return a bitmap of the given web view
*
*/ private Bitmap getBitmapOfWebView(final WebView webView){
Picture picture = webView.capturePicture();
Bitmap b = Bitmap.createBitmap( picture.getWidth(), picture.getHeight(), Bitmap.Config.ARGB_8888);
Canvas c = new Canvas(b);
picture.draw(c);
return b;
} /**
* Returns a bitmap of a given View.
*
* @param view the view to save a bitmap from
* @return a bitmap of the given view
*
*/ private Bitmap getBitmapOfView(final View view){
view.destroyDrawingCache();
view.buildDrawingCache(false);
Bitmap orig = view.getDrawingCache();
Bitmap.Config config = null; if(orig != null) {
config = orig.getConfig();
} if(config == null) {
config = Bitmap.Config.ARGB_8888;
}
Bitmap b = orig.copy(config, false);
view.destroyDrawingCache();
return b;
} /**
* Here we have a Runnable which is responsible for taking the actual screenshot,
* and then posting the bitmap to a Handler which will save it.
*
* This Runnable is run on the UI thread.
*/
private class ScreenshotRunnable implements Runnable { private View view;
private String name;
private int quality; public ScreenshotRunnable(final View _view, final String _name, final int _quality) {
view = _view;
name = _name;
quality = _quality;
} public void run() {
if(view !=null){
Bitmap b; if(view instanceof WebView){
b = getBitmapOfWebView((WebView) view);
}
else{
b = getBitmapOfView(view);
}
if(b != null)
screenShotSaver.saveBitmap(b, name, quality);
else
Log.d(LOG_TAG, "NULL BITMAP!!");
}
}
} /**
* This class is a Handler which deals with saving the screenshots on a separate thread.
*
* The screenshot logic by necessity has to run on the ui thread. However, in practice
* it seems that saving a screenshot (with quality 100) takes approx twice as long
* as taking it in the first place.
*
* Saving the screenshots in a separate thread like this will thus make the screenshot
* process approx 3x faster as far as the main thread is concerned.
*
*/
private class ScreenShotSaver extends Handler {
public ScreenShotSaver(HandlerThread thread) {
super(thread.getLooper());
} /**
* This method posts a Bitmap with meta-data to the Handler queue.
*
* @param bitmap the bitmap to save
* @param name the name of the file
* @param quality the compression rate. From 0 (compress for lowest size) to 100 (compress for maximum quality).
*/
public void saveBitmap(Bitmap bitmap, String name, int quality) {
Message message = this.obtainMessage();
message.arg1 = quality;
message.obj = bitmap;
message.getData().putString("name", name);
this.sendMessage(message);
} /**
* Here we process the Handler queue and save the bitmaps.
*
* @param message A Message containing the bitmap to save, and some metadata.
*/
public void handleMessage(Message message) {
String name = message.getData().getString("name");
int quality = message.arg1;
Bitmap b = (Bitmap)message.obj;
if(b != null) {
saveFile(name, b, quality);
b.recycle();
}
else {
Log.d(LOG_TAG, "NULL BITMAP!!");
}
} /**
* Saves a file.
*
* @param name the name of the file
* @param b the bitmap to save
* @param quality the compression rate. From 0 (compress for lowest size) to 100 (compress for maximum quality).
*
*/
private void saveFile(String name, Bitmap b, int quality){
FileOutputStream fos = null;
String fileName = getFileName(name); File directory = new File(Environment.getExternalStorageDirectory() + "/Robotium-Screenshots/");
directory.mkdir(); File fileToSave = new File(directory,fileName);
try {
fos = new FileOutputStream(fileToSave);
if (b.compress(Bitmap.CompressFormat.JPEG, quality, fos) == false)
Log.d(LOG_TAG, "Compress/Write failed");
fos.flush();
fos.close();
} catch (Exception e) {
Log.d(LOG_TAG, "Can't save the screenshot! Requires write permission (android.permission.WRITE_EXTERNAL_STORAGE) in AndroidManifest.xml of the application under test.");
e.printStackTrace();
}
}
}

GLRenderWrapper.java

WebViewClassic.java

@Override
public Picture capturePicture() {
if (mNativeClass == 0) return null;
Picture result = new Picture();
nativeCopyBaseContentToPicture(result);
return result;
}

WebView.cpp

static void nativeCopyBaseContentToPicture(JNIEnv *env, jobject obj, jobject pict)
{
SkPicture* picture = GraphicsJNI::getNativePicture(env, pict);
GET_NATIVE_VIEW(env, obj)->copyBaseContentToPicture(picture);
} void copyBaseContentToPicture(SkPicture* picture)
{
if (!m_baseLayer || !m_baseLayer->content())
return;
LayerContent* content = m_baseLayer->content();
SkCanvas* canvas = picture->beginRecording(content->width(), content->height(),
SkPicture::kUsePathBoundsForClip_RecordingFlag); // clear the BaseLayerAndroid's previous matrix (set at each draw)
SkMatrix baseMatrix;
baseMatrix.reset();
m_baseLayer->setMatrix(baseMatrix); m_baseLayer->draw(canvas, 0); picture->endRecording();
}

Graphics.cpp

SkPicture* GraphicsJNI::getNativePicture(JNIEnv* env, jobject picture)
{
SkASSERT(env);
SkASSERT(picture);
SkASSERT(env->IsInstanceOf(picture, gPicture_class));
SkPicture* p = (SkPicture*)env->GetIntField(picture, gPicture_nativeInstanceID);
SkASSERT(p);
return p;
}

CafeSnapshotHelper.java使用了除wrapper外相同的调用方法;

同样还有去除状态栏给Android设备屏幕截图

4.UiDevice.takeScreenshot (File storePath)/takeScreenshot (File storePath, float scale, int quality)

UiDevice.java

/**
* Take a screenshot of current window and store it as PNG
*
* Default scale of 1.0f (original size) and 90% quality is used
* The screenshot is adjusted per screen rotation
*
* @param storePath where the PNG should be written to
* @return true if screen shot is created successfully, false otherwise
* @since API Level 17
*/
public boolean takeScreenshot(File storePath) {
Tracer.trace(storePath);
return takeScreenshot(storePath, 1.0f, 90);①
} /**
* Take a screenshot of current window and store it as PNG
*
* The screenshot is adjusted per screen rotation
*
* @param storePath where the PNG should be written to
* @param scale scale the screenshot down if needed; 1.0f for original size
* @param quality quality of the PNG compression; range: 0-100
* @return true if screen shot is created successfully, false otherwise
* @since API Level 17
*/
public boolean takeScreenshot(File storePath, float scale, int quality) {
Tracer.trace(storePath, scale, quality);
return getAutomatorBridge().takeScreenshot(storePath, quality);②
}

UiAutomatorBridge.java

public boolean takeScreenshot(File storePath, int quality) {
Bitmap screenshot = mUiAutomation.takeScreenshot();③
if (screenshot == null) {
return false;
}
BufferedOutputStream bos = null;
try {
bos = new BufferedOutputStream(new FileOutputStream(storePath));
if (bos != null) {
screenshot.compress(Bitmap.CompressFormat.PNG, quality, bos);
bos.flush();
}
} catch (IOException ioe) {
Log.e(LOG_TAG, "failed to save screen shot to file", ioe);
return false;
} finally {
if (bos != null) {
try {
bos.close();
} catch (IOException ioe) {
/* ignore */
}
}
screenshot.recycle();
}
return true;
}

UiAutomation.java

/**
* Takes a screenshot.
*
* @return The screenshot bitmap on success, null otherwise.
*/
public Bitmap takeScreenshot() {
synchronized (mLock) {
throwIfNotConnectedLocked();
}
Display display = DisplayManagerGlobal.getInstance()
.getRealDisplay(Display.DEFAULT_DISPLAY);
Point displaySize = new Point();
display.getRealSize(displaySize);
final int displayWidth = displaySize.x;
final int displayHeight = displaySize.y; final float screenshotWidth;
final float screenshotHeight; final int rotation = display.getRotation();
switch (rotation) {
case ROTATION_FREEZE_0: {
screenshotWidth = displayWidth;
screenshotHeight = displayHeight;
} break;
case ROTATION_FREEZE_90: {
screenshotWidth = displayHeight;
screenshotHeight = displayWidth;
} break;
case ROTATION_FREEZE_180: {
screenshotWidth = displayWidth;
screenshotHeight = displayHeight;
} break;
case ROTATION_FREEZE_270: {
screenshotWidth = displayHeight;
screenshotHeight = displayWidth;
} break;
default: {
throw new IllegalArgumentException("Invalid rotation: "
+ rotation);
}
} // Take the screenshot
Bitmap screenShot = null;
try {
// Calling out without a lock held.
screenShot = mUiAutomationConnection.takeScreenshot((int) screenshotWidth,
(int) screenshotHeight);④
if (screenShot == null) {
return null;
}
} catch (RemoteException re) {
Log.e(LOG_TAG, "Error while taking screnshot!", re);
return null;
} // Rotate the screenshot to the current orientation
if (rotation != ROTATION_FREEZE_0) {
Bitmap unrotatedScreenShot = Bitmap.createBitmap(displayWidth, displayHeight,
Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(unrotatedScreenShot);
canvas.translate(unrotatedScreenShot.getWidth() / 2,
unrotatedScreenShot.getHeight() / 2);
canvas.rotate(getDegreesForRotation(rotation));
canvas.translate(- screenshotWidth / 2, - screenshotHeight / 2);
canvas.drawBitmap(screenShot, 0, 0, null);
canvas.setBitmap(null);
screenShot = unrotatedScreenShot;
} // Optimization
screenShot.setHasAlpha(false); return screenShot;
}

UiAutomationConnection.java

@Override
public Bitmap takeScreenshot(int width, int height) {
synchronized (mLock) {
throwIfCalledByNotTrustedUidLocked();
throwIfShutdownLocked();
throwIfNotConnectedLocked();
}
final long identity = Binder.clearCallingIdentity();
try {
return SurfaceControl.screenshot(width, height);⑤
} finally {
Binder.restoreCallingIdentity(identity);
}
}

可以看到,绕来绕去又回到方法1了。

Android 屏幕截图的更多相关文章

  1. Android 屏幕截图(底层实现方式)

    加载底层库ScreenCap.java: public class ScreenCap { static { System.loadLibrary("scrcap"); } sta ...

  2. Android 5.0之后屏幕截图的方法

    截图的几种方法 Android获取屏幕截图主要有以下三种方法 1.通过view.getDrawingCache()获取指定View的绘制缓存来实现截屏. 这种方式Android 5.0之前也可以,且不 ...

  3. Android读取/dev/graphics/fb0 屏幕截图

    Android屏幕截图有很多方式这里只使用其中一种截图 主要是读取/dev/graphics/fb0,进行转换,复杂点就在如何把读取的数据进行转换. 可以参考一下这篇文章:http://blog.ch ...

  4. 使用 HTML5 input 类型提升移动端输入体验

    在过去的几年里,在移动设备上浏览网页已变得难以置信的受欢迎. 但是这些设备上的浏览体验,有时遗留很多的有待改进.当涉及到填写表单时,这一点尤为明显.幸运的是,HTML5规范引入了许多新input类型, ...

  5. 使用 HTML5 input 类型提升移动端输入体验(键盘)

    在最近的项目中,策划老是要求我们弹出各种类型的键盘,特别是在iOS下,例如输入帐号的时候,不应该支持输入中文,该输入纯数字的时候就应该谈数字键盘等.个人觉得这些都是我们平时开发很少意识到的,虽然有些刁 ...

  6. 手机UI自动化之显示点触位置(触摸轨迹)

    上期回顾:Airtest源码分析--Android屏幕截图方式 不管是用Appium还是Airtest,或是其他手机UI自动化工具,你是不是经常遇到这种情况,代码明明执行了click或swipe,怎么 ...

  7. 使用 HTML5 input 类型提升移动端输入体验(转翻译)

    在过去的几年里,在移动设备上浏览网页已变得难以置信的受欢迎. 但是这些设备上的浏览体验,有时遗留很多的有待改进.当涉及到填写表单时,这一点尤为明显.幸运的是,HTML5规范引入了许多新input类型, ...

  8. Android 5.0之前屏幕截图的方法

    截图的几种方法 Android获取屏幕截图主要有以下三种方法 1.通过view.getDrawingCache()获取指定View的绘制缓存来实现截屏. 这种方式Android 5.0之前也可以,且不 ...

  9. Android开发 获取当前activity的屏幕截图(转载)

    首先通过下面的函数获取Bitmap格式的屏幕截图: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 pu ...

随机推荐

  1. HBase0.98.1 通过协调器进行表的行数统计

    1. 启用表aggregation,只对特定的表生效.通过HBase Shell 来实现 (1)disable指定表.hbase> disable 'student' (2)添加aggregat ...

  2. 前端过滤XSS攻击

    日常开发过程中,对于存在用户交互的一些门户网站等,过滤xss攻击是必不可少的. 此处主要记录下我在工作过程中的简单处理方法. 前端过滤XSS攻击, 我这里用的是开源工程 js-xss,官网地址:htt ...

  3. jquery生成UUID的方法

    来源:  http://www.broofa.com/2008/09/javascript-uuid-function/ 1.代码:  http://www.broofa.com/Tools/Math ...

  4. 转载[POJ题型分类]

    北大ACM题分类 主流算法: 1.搜索 //回溯 2.DP(动态规划) 3.贪心 4.图论 //Dijkstra.最小生成树.网络流 5.数论 //解模线性方程 6.计算几何 //凸壳.同等安置矩形的 ...

  5. head标签

    1.head标签中有个<meta>,,个人理解知识,可以设置页面字符集,文本格式,还可以加一些注释,例如如下所示

  6. Oracle实用技巧

    一. ORACLE SQL PLUS 使用技巧: ----①查找重复记录: SELECT DRAWING, DSNOFROM EM5_PIPE_PREFABWHERE ROWID!= (SELECT ...

  7. phpstrom+xdebug调试PHP代码

    众所周知开发PHP的IDE种类繁多,然而开发PHP并不能像开发其他语言一样,调试PHP代码对诸多新手来说,搭建调试环境就比较麻烦!其实哈,我发现NuSphere-phped-16.0很强大,集成了很强 ...

  8. Xcode添加静态库以及编译选项配置常见问题

    一,Xcode编译出现Link错误,出现"duplicate symbols for architecture i386 clang"提示.问题:链接时,项目有重名文件.解决:根据 ...

  9. APNs改动 (转)

    对 APNs 的吐槽 APNs 是 Apple Push Notification service 的简称(注意 APNs 的大小写, s不需要大写). 以下是我收集的一些关于 APNs 的吐槽,你先 ...

  10. JavaScript学习笔记之原型对象

    本文是学习<JavaScript高级程序设计>第六章的笔记. JS中,便于批量创建对象的三种模式: 1.工厂模式:用一个函数封装创建对象的细节,传入必要的参数,在函数内部new一个对象并返 ...