2014-11-14 21:29:58 yangtb2010 阅读数 3862
CGImageSourceRef source0 = CGImageSourceCreateWithURL((CFURLRef)[NSURL URLWithString:_item.permalink], NULL);
        
CFDataRef dataRef = CGDataProviderCopyData(CGImageGetDataProvider(aImage.CGImage));
CGImageSourceRef source = CGImageSourceCreateWithData(dataRef, NULL);
NSLog(@"source = %@ + %@",source,source0);

NSDictionary* properties = (NSDictionary*)CGImageSourceCopyProperties(source, NULL);
NSDictionary* gifProperties = [properties objectForKey:(NSString*)kCGImagePropertyGIFDictionary];
NSLog(@"properties: %@,gifProperties: %@, source = %@",properties,gifProperties,CGImageSourceGetType(source));
if (gifProperties != nil && [@"com.compuserve.gif" isEqualToString:[NSString stringWithFormat:@"%@",CGImageSourceGetType(source)]]) {
    
    _imgGIF = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"GIF.png"]];
    [_imgGIF setFrame:CGRectMake(photoIV.frame.size.width - 5.0f - _imgGIF.width, photoIV.frame.size.height - 5.0f - _imgGIF.height, _imgGIF.width, _imgGIF.height)];
    [photoIV addSubview:_imgGIF];
} else {
    _imgGIF = nil;
}

2016-04-03 00:18:05 u012576807 阅读数 643


1、使用ImageView显示gif, 拖到项目,添加-fno-objc-arc


使用方法:

NSString* filePath = [[NSBundle mainBundle] pathForResource:@"a1.gif" ofType:nil];
    SGGIFImageView* gifImageView = [[SGGIFImageView alloc] initWithGIFFile:filePath] ;
    
    
    
    gifImageView.frame = CGRectMake(0, 0, gifImageView.image.size.width, gifImageView.image.size.height);
//    gifImageView.center = self.view.center;
    [self.view addSubview:gifImageView];


类文件如下:

//
//  SGGIFImageView.h
//  NiHongGo
//
//  Created by beyond on 16/4/3.
//  Copyright (c) 2016年 beyond. All rights reserved.
//  拖到项目,添加-fno-objc-arc

#import <UIKit/UIKit.h>

@interface AnimatedGifFrame : NSObject
{
    NSData *data;
    NSData *header;
    double delay;
    int disposalMethod;
    CGRect area;
}

@property (nonatomic, copy) NSData *header;
@property (nonatomic, copy) NSData *data;
@property (nonatomic) double delay;
@property (nonatomic) int disposalMethod;
@property (nonatomic) CGRect area;

@end

@interface SGGIFImageView : UIImageView {
    NSData *GIF_pointer;
    NSMutableData *GIF_buffer;
    NSMutableData *GIF_screen;
    NSMutableData *GIF_global;
    NSMutableArray *GIF_frames;
    
    int GIF_sorted;
    int GIF_colorS;
    int GIF_colorC;
    int GIF_colorF;
    int animatedGifDelay;
    
    int dataPointer;
}
@property (nonatomic, retain) NSMutableArray *GIF_frames;

- (id)initWithGIFFile:(NSString*)gifFilePath;
- (id)initWithGIFData:(NSData*)gifImageData;

- (void)loadImageData;

+ (NSMutableArray*)getGifFrames:(NSData*)gifImageData;
+ (BOOL)isGifImage:(NSData*)imageData;

- (void) decodeGIF:(NSData *)GIFData;
- (void) GIFReadExtensions;
- (void) GIFReadDescriptor;
- (bool) GIFGetBytes:(int)length;
- (bool) GIFSkipBytes: (int) length;
- (NSData*) getFrameAsDataAtIndex:(int)index;
- (UIImage*) getFrameAsImageAtIndex:(int)index;

@end


//
//  SGGIFImageView.m
//  NiHongGo
//
//  Created by beyond on 16/4/3.
//  Copyright (c) 2016年 beyond. All rights reserved.
//

#import "SGGIFImageView.h"

@implementation AnimatedGifFrame

@synthesize data, delay, disposalMethod, area, header;

- (void) dealloc
{
    [data release];
    [header release];
    [super dealloc];
}

@end

@implementation SGGIFImageView
@synthesize GIF_frames;

+ (BOOL)isGifImage:(NSData*)imageData {
    const char* buf = (const char*)[imageData bytes];
    if (buf[0] == 0x47 && buf[1] == 0x49 && buf[2] == 0x46 && buf[3] == 0x38) {
        return YES;
    }
    return NO;
}

+ (NSMutableArray*)getGifFrames:(NSData*)gifImageData {
    SGGIFImageView* gifImageView = [[SGGIFImageView alloc] initWithGIFData:gifImageData];
    if (!gifImageView) {
        return nil;
    }
    
    NSMutableArray* gifFrames = gifImageView.GIF_frames;
    [[gifFrames retain] autorelease];
    [gifImageView release];
    return gifFrames;
}

- (id)initWithGIFFile:(NSString*)gifFilePath {
    NSData* imageData = [NSData dataWithContentsOfFile:gifFilePath];
    return [self initWithGIFData:imageData];
}

- (id)initWithGIFData:(NSData*)gifImageData {
    if (gifImageData.length < 4) {
        return nil;
    }
    
    if (![SGGIFImageView isGifImage:gifImageData]) {
        UIImage* image = [UIImage imageWithData:gifImageData];
        return [super initWithImage:image];
    }
    
    [self decodeGIF:gifImageData];
    
    if (GIF_frames.count <= 0) {
        UIImage* image = [UIImage imageWithData:gifImageData];
        return [super initWithImage:image];
    }
    
    self = [super init];
    if (self) {
        [self loadImageData];
    }
    
    return self;
}

- (void)setGIF_frames:(NSMutableArray *)gifFrames {
    [gifFrames retain];
    
    if (GIF_frames) {
        [GIF_frames release];
    }
    GIF_frames = gifFrames;
    
    [self loadImageData];
}

- (void)loadImageData {
    // Add all subframes to the animation
    NSMutableArray *array = [[NSMutableArray alloc] init];
    for (NSUInteger i = 0; i < [GIF_frames count]; i++)
    {
        [array addObject: [self getFrameAsImageAtIndex:i]];
    }
    
    NSMutableArray *overlayArray = [[NSMutableArray alloc] init];
    UIImage *firstImage = [array objectAtIndex:0];
    CGSize size = firstImage.size;
    CGRect rect = CGRectZero;
    rect.size = size;
    
    UIGraphicsBeginImageContext(size);
    CGContextRef ctx = UIGraphicsGetCurrentContext();
    
    int i = 0;
    AnimatedGifFrame *lastFrame = nil;
    for (UIImage *image in array)
    {
        // Get Frame
        AnimatedGifFrame *frame = [GIF_frames objectAtIndex:i];
        
        // Initialize Flag
        UIImage *previousCanvas = nil;
        
        // Save Context
        CGContextSaveGState(ctx);
        // Change CTM
        CGContextScaleCTM(ctx, 1.0, -1.0);
        CGContextTranslateCTM(ctx, 0.0, -size.height);
        
        // Check if lastFrame exists
        CGRect clipRect;
        
        // Disposal Method (Operations before draw frame)
        switch (frame.disposalMethod)
        {
            case 1: // Do not dispose (draw over context)
                // Create Rect (y inverted) to clipping
                clipRect = CGRectMake(frame.area.origin.x, size.height - frame.area.size.height - frame.area.origin.y, frame.area.size.width, frame.area.size.height);
                // Clip Context
                CGContextClipToRect(ctx, clipRect);
                break;
            case 2: // Restore to background the rect when the actual frame will go to be drawed
                // Create Rect (y inverted) to clipping
                clipRect = CGRectMake(frame.area.origin.x, size.height - frame.area.size.height - frame.area.origin.y, frame.area.size.width, frame.area.size.height);
                // Clip Context
                CGContextClipToRect(ctx, clipRect);
                break;
            case 3: // Restore to Previous
                // Get Canvas
                previousCanvas = UIGraphicsGetImageFromCurrentImageContext();
                
                // Create Rect (y inverted) to clipping
                clipRect = CGRectMake(frame.area.origin.x, size.height - frame.area.size.height - frame.area.origin.y, frame.area.size.width, frame.area.size.height);
                // Clip Context
                CGContextClipToRect(ctx, clipRect);
                break;
        }
        
        // Draw Actual Frame
        CGContextDrawImage(ctx, rect, image.CGImage);
        // Restore State
        CGContextRestoreGState(ctx);
        
        //delay must larger than 0, the minimum delay in firefox is 10.
        if (frame.delay <= 0) {
            frame.delay = 10;
        }
        [overlayArray addObject:UIGraphicsGetImageFromCurrentImageContext()];
        
        // Set Last Frame
        lastFrame = frame;
        
        // Disposal Method (Operations afte draw frame)
        switch (frame.disposalMethod)
        {
            case 2: // Restore to background color the zone of the actual frame
                // Save Context
                CGContextSaveGState(ctx);
                // Change CTM
                CGContextScaleCTM(ctx, 1.0, -1.0);
                CGContextTranslateCTM(ctx, 0.0, -size.height);
                // Clear Context
                CGContextClearRect(ctx, clipRect);
                // Restore Context
                CGContextRestoreGState(ctx);
                break;
            case 3: // Restore to Previous Canvas
                // Save Context
                CGContextSaveGState(ctx);
                // Change CTM
                CGContextScaleCTM(ctx, 1.0, -1.0);
                CGContextTranslateCTM(ctx, 0.0, -size.height);
                // Clear Context
                CGContextClearRect(ctx, lastFrame.area);
                // Draw previous frame
                CGContextDrawImage(ctx, rect, previousCanvas.CGImage);
                // Restore State
                CGContextRestoreGState(ctx);
                break;
        }
        
        // Increment counter
        i++;
    }
    UIGraphicsEndImageContext();
    
    [self setImage:[overlayArray objectAtIndex:0]];
    [self setAnimationImages:overlayArray];
    
    [overlayArray release];
    [array release];
    
    // Count up the total delay, since Cocoa doesn't do per frame delays.
    double total = 0;
    for (AnimatedGifFrame *frame in GIF_frames) {
        total += frame.delay;
    }
    
    // GIFs store the delays as 1/100th of a second,
    // UIImageViews want it in seconds.
    [self setAnimationDuration:total/100];
    
    // Repeat infinite
    [self setAnimationRepeatCount:0];
    
    [self startAnimating];
}

- (void)dealloc {
    if (GIF_buffer != nil)
    {
        [GIF_buffer release];
    }
    
    if (GIF_screen != nil)
    {
        [GIF_screen release];
    }
    
    if (GIF_global != nil)
    {
        [GIF_global release];
    }
    
    [GIF_frames release];
    
    [super dealloc];
}

- (void) decodeGIF:(NSData *)GIFData {
    GIF_pointer = GIFData;
    
    if (GIF_buffer != nil)
    {
        [GIF_buffer release];
    }
    
    if (GIF_global != nil)
    {
        [GIF_global release];
    }
    
    if (GIF_screen != nil)
    {
        [GIF_screen release];
    }
    
    [GIF_frames release];
    
    GIF_buffer = [[NSMutableData alloc] init];
    GIF_global = [[NSMutableData alloc] init];
    GIF_screen = [[NSMutableData alloc] init];
    GIF_frames = [[NSMutableArray alloc] init];
    
    // Reset file counters to 0
    dataPointer = 0;
    
    [self GIFSkipBytes: 6]; // GIF89a, throw away
    [self GIFGetBytes: 7]; // Logical Screen Descriptor
    
    // Deep copy
    [GIF_screen setData: GIF_buffer];
    
    // Copy the read bytes into a local buffer on the stack
    // For easy byte access in the following lines.
    int length = [GIF_buffer length];
    unsigned char aBuffer[length];
    [GIF_buffer getBytes:aBuffer length:length];
    
    if (aBuffer[4] & 0x80) GIF_colorF = 1; else GIF_colorF = 0;
    if (aBuffer[4] & 0x08) GIF_sorted = 1; else GIF_sorted = 0;
    GIF_colorC = (aBuffer[4] & 0x07);
    GIF_colorS = 2 << GIF_colorC;
    
    if (GIF_colorF == 1)
    {
        [self GIFGetBytes: (3 * GIF_colorS)];
        
        // Deep copy
        [GIF_global setData:GIF_buffer];
    }
    
    unsigned char bBuffer[1];
    while ([self GIFGetBytes:1] == YES)
    {
        [GIF_buffer getBytes:bBuffer length:1];
        
        if (bBuffer[0] == 0x3B)
        { // This is the end
            break;
        }
        
        switch (bBuffer[0])
        {
            case 0x21:
                // Graphic Control Extension (#n of n)
                [self GIFReadExtensions];
                break;
            case 0x2C:
                // Image Descriptor (#n of n)
                [self GIFReadDescriptor];
                break;
        }
    }
    
    // clean up stuff
    [GIF_buffer release];
    GIF_buffer = nil;
    
    [GIF_screen release];
    GIF_screen = nil;
    
    [GIF_global release];
    GIF_global = nil;
}

- (void) GIFReadExtensions {
    // 21! But we still could have an Application Extension,
    // so we want to check for the full signature.
    unsigned char cur[1], prev[1];
    [self GIFGetBytes:1];
    [GIF_buffer getBytes:cur length:1];
    
    while (cur[0] != 0x00)
    {
        
        // TODO: Known bug, the sequence F9 04 could occur in the Application Extension, we
        //       should check whether this combo follows directly after the 21.
        if (cur[0] == 0x04 && prev[0] == 0xF9)
        {
            [self GIFGetBytes:5];
            
            AnimatedGifFrame *frame = [[AnimatedGifFrame alloc] init];
            
            unsigned char buffer[5];
            [GIF_buffer getBytes:buffer length:5];
            frame.disposalMethod = (buffer[0] & 0x1c) >> 2;
            //NSLog(@"flags=%x, dm=%x", (int)(buffer[0]), frame.disposalMethod);
            
            // We save the delays for easy access.
            frame.delay = (buffer[1] | buffer[2] << 8);
            
            unsigned char board[8];
            board[0] = 0x21;
            board[1] = 0xF9;
            board[2] = 0x04;
            
            for(int i = 3, a = 0; a < 5; i++, a++)
            {
                board[i] = buffer[a];
            }
            
            frame.header = [NSData dataWithBytes:board length:8];
            
            [GIF_frames addObject:frame];
            [frame release];
            break;
        }
        
        prev[0] = cur[0];
        [self GIFGetBytes:1];
        [GIF_buffer getBytes:cur length:1];
    }
}

- (void) GIFReadDescriptor {
    [self GIFGetBytes:9];
    
    // Deep copy
    NSMutableData *GIF_screenTmp = [NSMutableData dataWithData:GIF_buffer];
    
    unsigned char aBuffer[9];
    [GIF_buffer getBytes:aBuffer length:9];
    
    CGRect rect;
    rect.origin.x = ((int)aBuffer[1] << 8) | aBuffer[0];
    rect.origin.y = ((int)aBuffer[3] << 8) | aBuffer[2];
    rect.size.width = ((int)aBuffer[5] << 8) | aBuffer[4];
    rect.size.height = ((int)aBuffer[7] << 8) | aBuffer[6];
    
    AnimatedGifFrame *frame = [GIF_frames lastObject];
    frame.area = rect;
    
    if (aBuffer[8] & 0x80) GIF_colorF = 1; else GIF_colorF = 0;
    
    unsigned char GIF_code = GIF_colorC, GIF_sort = GIF_sorted;
    
    if (GIF_colorF == 1)
    {
        GIF_code = (aBuffer[8] & 0x07);
        
        if (aBuffer[8] & 0x20)
        {
            GIF_sort = 1;
        }
        else
        {
            GIF_sort = 0;
        }
    }
    
    int GIF_size = (2 << GIF_code);
    
    size_t blength = [GIF_screen length];
    unsigned char bBuffer[blength];
    [GIF_screen getBytes:bBuffer length:blength];
    
    bBuffer[4] = (bBuffer[4] & 0x70);
    bBuffer[4] = (bBuffer[4] | 0x80);
    bBuffer[4] = (bBuffer[4] | GIF_code);
    
    if (GIF_sort)
    {
        bBuffer[4] |= 0x08;
    }
    
    NSMutableData *GIF_string = [NSMutableData dataWithData:[[NSString stringWithString:@"GIF89a"] dataUsingEncoding: NSUTF8StringEncoding]];
    [GIF_screen setData:[NSData dataWithBytes:bBuffer length:blength]];
    [GIF_string appendData: GIF_screen];
    
    if (GIF_colorF == 1)
    {
        [self GIFGetBytes:(3 * GIF_size)];
        [GIF_string appendData:GIF_buffer];
    }
    else
    {
        [GIF_string appendData:GIF_global];
    }
    
    // Add Graphic Control Extension Frame (for transparancy)
    [GIF_string appendData:frame.header];
    
    char endC = 0x2c;
    [GIF_string appendBytes:&endC length:sizeof(endC)];
    
    size_t clength = [GIF_screenTmp length];
    unsigned char cBuffer[clength];
    [GIF_screenTmp getBytes:cBuffer length:clength];
    
    cBuffer[8] &= 0x40;
    
    [GIF_screenTmp setData:[NSData dataWithBytes:cBuffer length:clength]];
    
    [GIF_string appendData: GIF_screenTmp];
    [self GIFGetBytes:1];
    [GIF_string appendData: GIF_buffer];
    
    while (true)
    {
        [self GIFGetBytes:1];
        [GIF_string appendData: GIF_buffer];
        
        unsigned char dBuffer[1];
        [GIF_buffer getBytes:dBuffer length:1];
        
        long u = (long) dBuffer[0];
        
        if (u != 0x00)
        {
            [self GIFGetBytes:u];
            [GIF_string appendData: GIF_buffer];
        }
        else
        {
            break;
        }
        
    }
    
    endC = 0x3b;
    [GIF_string appendBytes:&endC length:sizeof(endC)];
    
    // save the frame into the array of frames
    frame.data = GIF_string;
}

- (bool) GIFGetBytes:(int)length {
    if (GIF_buffer != nil)
    {
        [GIF_buffer release]; // Release old buffer
        GIF_buffer = nil;
    }
    
    if ((NSInteger)[GIF_pointer length] >= dataPointer + length) // Don't read across the edge of the file..
    {
        GIF_buffer = [[GIF_pointer subdataWithRange:NSMakeRange(dataPointer, length)] retain];
        dataPointer += length;
        return YES;
    }
    else
    {
        return NO;
    }
}

- (bool) GIFSkipBytes: (int) length {
    if ((NSInteger)[GIF_pointer length] >= dataPointer + length)
    {
        dataPointer += length;
        return YES;
    }
    else
    {
        return NO;
    }
}

- (NSData*) getFrameAsDataAtIndex:(int)index {
    if (index < (NSInteger)[GIF_frames count])
    {
        return ((AnimatedGifFrame *)[GIF_frames objectAtIndex:index]).data;
    }
    else
    {
        return nil;
    }
}

- (UIImage*) getFrameAsImageAtIndex:(int)index {
    NSData *frameData = [self getFrameAsDataAtIndex: index];
    UIImage *image = nil;
    
    if (frameData != nil)
    {
        image = [UIImage imageWithData:frameData];
    }
    
    return image;
}


@end


2、使用webView显示.gif图片


- (void)initGif
{
    DLog(@"gif is:%@",self.gifName);
    DLog(@"png is:%@",self.pngName);
    // 读取gif图片数据
    
    
    // 对data数据进行解密
    NSData *gifData = [SGTools decodeDataWithNameInBundleWithoutPrefix:_gifName];
    
    
    
    
    
    
    // view生成
    UIWebView *webView = _webView;
    webView.userInteractionEnabled = YES;//用户不可交互
    [webView loadData:gifData MIMEType:@"image/gif" textEncodingName:nil baseURL:nil];
    
    
    // 缩放手势
    UIPinchGestureRecognizer *pinchGestureRecognizer = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(pinchView:)];
    [_webView addGestureRecognizer:pinchGestureRecognizer];
    _webView.layer.anchorPoint = CGPointMake(0.5, 0.15);
    
    
    
    
    // 初次放大
    UIImage *img = [UIImage imageWithData:[SGTools decodeDataWithNameInBundleWithoutPrefix:_gifName]];
    CGFloat scacle = ScreenWidth/img.size.width -1;
    DLog(@"sg____init___%f",scacle);
    _webView.transform = CGAffineTransformScale(_webView.transform, scacle, scacle);
    
}
// 处理缩放手势
// 处理缩放手势
- (void) pinchView:(UIPinchGestureRecognizer *)pinchGestureRecognizer
{
    UIView *view = pinchGestureRecognizer.view;
    if (pinchGestureRecognizer.state == UIGestureRecognizerStateBegan || pinchGestureRecognizer.state == UIGestureRecognizerStateChanged) {
        
        DLog(@"sg__%d",pinchGestureRecognizer.scale);
        
        view.transform = CGAffineTransformScale(view.transform, pinchGestureRecognizer.scale, pinchGestureRecognizer.scale);
        
        pinchGestureRecognizer.scale = 1;
    }
}


2019-04-12 18:12:53 u014380815 阅读数 187

判断当前图片资源是不是gif

 

iOS9以上
PHAsset *asset;
[[asset valueForKey:@"filename"] containsString:@"GIF"]

 

这些类型只能通过 Asset中的 获取资源图片的详细资源信息 ios 4-9

ALAssetRepresentation *representation = [asset defaultRepresentation];
[representation filename];

通过图片data获取对应的图片类型

+ (NSString *)sd_contentTypeForImageData:(NSData *)data {
    uint8_t c;
    [data getBytes:&c length:1];
    switch (c) {
        case 0xFF:
            return @"image/jpeg";
        case 0x89:
            return @"image/png";
        case 0x47:
            return @"image/gif";
        case 0x49:
        case 0x4D:
            return @"image/tiff";
        case 0x52:
            if ([data length] < 12) {
                return nil;
            }

            NSString *testString = [[NSString alloc] initWithData:[data subdataWithRange:NSMakeRange(0, 12)] encoding:NSASCIIStringEncoding];
            if ([testString hasPrefix:@"RIFF"] && [testString hasSuffix:@"WEBP"]) {
                return @"image/webp";
            }

            return nil;
    }
    return nil;
}

 自己写的测试例子 https://github.com/qq50032660/CustomAlbumLibraryTest

 

2013-11-06 13:14:57 snowbueaty 阅读数 7169

最近在做的东西有下载zip,只是服务器发送过来的是二进制,需要根据二进制来判断是什么类型的文件,从而进行保存操作。起初很不理解,到后来发现可以通过二进制的前2位的ascii码来进行判断。如下:

     // 255216 jpg;
        // 7173 gif;
        // 6677 bmp,
        // 13780 png;
        // 6787 swf
        // 7790 exe dll,
        // 8297 rar
        // 8075 zip
        // 55122 7z
        // 6063 xml
        // 6033 html
        // 239187 aspx
        // 117115 cs
        // 119105 js
        // 102100 txt
        // 255254 sql 

在ios中用OC代码进行如下操作:

    NSString *path = [[NSBundlemainBundle] pathForResource:@"READ"ofType:@"zip"];

    NSData *d = [NSDatadataWithContentsOfFile:path];

   if (data.length<2) {

        return  @"NOT FILE";

    }

    int char1 = 0 ,char2 =0 ; //必须这样初始化

    [data getBytes:&char1 range:NSMakeRange(0, 1)];

    [data getBytes:&char2 range:NSMakeRange(1, 1)];

    NSLog(@"%d%d",char1,char2);

    NSString *numStr = [NSStringstringWithFormat:@"%i%i",char1,char2];


这样子就可以看到输出是:8075。

跟上面的数字对照下刚好是zip。


参考:http://hi.baidu.com/personname/item/35e96d3004d51ac51a969618

http://blog.csdn.net/my_yang/article/details/8069745  


iOS中的图片格式

阅读数 687

没有更多推荐了,返回首页