Browse Source

version 0.9.0

ibireme 9 years ago
parent
commit
42e84e379b
100 changed files with 10952 additions and 2 deletions
  1. 485 0
      Demo/YYImageDemo.xcodeproj/project.pbxproj
  2. 7 0
      Demo/YYImageDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata
  3. 17 0
      Demo/YYImageDemo/AppDelegate.h
  4. 45 0
      Demo/YYImageDemo/AppDelegate.m
  5. 38 0
      Demo/YYImageDemo/Assets.xcassets/AppIcon.appiconset/Contents.json
  6. 27 0
      Demo/YYImageDemo/Base.lproj/LaunchScreen.storyboard
  7. 28 0
      Demo/YYImageDemo/Base.lproj/Main.storyboard
  8. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_aini@2x.png
  9. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_aini@3x.png
  10. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_baibai@2x.png
  11. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_baibai@3x.png
  12. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_beishang@2x.png
  13. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_beishang@3x.png
  14. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bishi@2x.png
  15. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bishi@3x.png
  16. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bizui@2x.png
  17. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bizui@3x.png
  18. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chanzui@2x.png
  19. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chanzui@3x.png
  20. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chijing@2x.png
  21. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chijing@3x.png
  22. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dahaqi@2x.png
  23. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dahaqi@3x.png
  24. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dalian@2x.png
  25. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dalian@3x.png
  26. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_ganmao@2x.png
  27. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_ganmao@3x.png
  28. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_guzhang@2x.png
  29. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_guzhang@3x.png
  30. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haha@2x.png
  31. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haha@3x.png
  32. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haixiu@2x.png
  33. BIN
      Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haixiu@3x.png
  34. 40 0
      Demo/YYImageDemo/Info.plist
  35. BIN
      Demo/YYImageDemo/ResourceTwitter.bundle/fav02l-sheet.png
  36. BIN
      Demo/YYImageDemo/ResourceTwitter.bundle/fav02l-sheet@2x.png
  37. 74 0
      Demo/YYImageDemo/UIControl+YYAdd.h
  38. 108 0
      Demo/YYImageDemo/UIControl+YYAdd.m
  39. 43 0
      Demo/YYImageDemo/UIGestureRecognizer+YYAdd.h
  40. 77 0
      Demo/YYImageDemo/UIGestureRecognizer+YYAdd.m
  41. 51 0
      Demo/YYImageDemo/UIView+YYAdd.h
  42. 139 0
      Demo/YYImageDemo/UIView+YYAdd.m
  43. 15 0
      Demo/YYImageDemo/ViewController.h
  44. 25 0
      Demo/YYImageDemo/ViewController.m
  45. 42 0
      Demo/YYImageDemo/YYBPGCoder.h
  46. 272 0
      Demo/YYImageDemo/YYBPGCoder.m
  47. 13 0
      Demo/YYImageDemo/YYImageBenchmark.h
  48. 785 0
      Demo/YYImageDemo/YYImageBenchmark.m
  49. 13 0
      Demo/YYImageDemo/YYImageDisplayExample.h
  50. 143 0
      Demo/YYImageDemo/YYImageDisplayExample.m
  51. 13 0
      Demo/YYImageDemo/YYImageExample.h
  52. 65 0
      Demo/YYImageDemo/YYImageExample.m
  53. 21 0
      Demo/YYImageDemo/YYImageExampleHelper.h
  54. 71 0
      Demo/YYImageDemo/YYImageExampleHelper.m
  55. 13 0
      Demo/YYImageDemo/YYImageProgressiveExample.h
  56. 117 0
      Demo/YYImageDemo/YYImageProgressiveExample.m
  57. 13 0
      Demo/YYImageDemo/YYWebImageExample.h
  58. 232 0
      Demo/YYImageDemo/YYWebImageExample.m
  59. BIN
      Demo/YYImageDemo/cube@2x.png
  60. BIN
      Demo/YYImageDemo/google@2x.webp
  61. 16 0
      Demo/YYImageDemo/main.m
  62. BIN
      Demo/YYImageDemo/mew_baseline.gif
  63. BIN
      Demo/YYImageDemo/mew_baseline.jpg
  64. BIN
      Demo/YYImageDemo/mew_baseline.png
  65. BIN
      Demo/YYImageDemo/mew_interlaced.gif
  66. BIN
      Demo/YYImageDemo/mew_interlaced.png
  67. BIN
      Demo/YYImageDemo/mew_progressive.jpg
  68. BIN
      Demo/YYImageDemo/niconiconi@2x.gif
  69. BIN
      Demo/YYImageDemo/nyancat@2x.webp
  70. BIN
      Demo/YYImageDemo/pia@2x.png
  71. BIN
      Demo/YYImageDemo/wall-e@2x.webp
  72. 26 0
      Framework/Info.plist
  73. 210 0
      Framework/YYImage-Static.xcodeproj/project.pbxproj
  74. 7 0
      Framework/YYImage-Static.xcodeproj/project.xcworkspace/contents.xcworkspacedata
  75. 362 0
      Framework/YYImage.xcodeproj/project.pbxproj
  76. 7 0
      Framework/YYImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata
  77. 80 0
      Framework/YYImage.xcodeproj/xcshareddata/xcschemes/YYImage.xcscheme
  78. 1 1
      LICENSE
  79. 154 1
      README.md
  80. 142 0
      Vendor/WebP.framework/Headers/config.h
  81. 503 0
      Vendor/WebP.framework/Headers/decode.h
  82. 224 0
      Vendor/WebP.framework/Headers/demux.h
  83. 520 0
      Vendor/WebP.framework/Headers/encode.h
  84. 88 0
      Vendor/WebP.framework/Headers/format_constants.h
  85. 399 0
      Vendor/WebP.framework/Headers/mux.h
  86. 97 0
      Vendor/WebP.framework/Headers/mux_types.h
  87. 48 0
      Vendor/WebP.framework/Headers/types.h
  88. BIN
      Vendor/WebP.framework/WebP
  89. 135 0
      Vendor/WebP.sh
  90. 21 0
      YYImage.podspec
  91. 121 0
      YYImage/YYAnimatedImageView.h
  92. 653 0
      YYImage/YYAnimatedImageView.m
  93. 95 0
      YYImage/YYFrameImage.h
  94. 150 0
      YYImage/YYFrameImage.m
  95. 85 0
      YYImage/YYImage.h
  96. 255 0
      YYImage/YYImage.m
  97. 502 0
      YYImage/YYImageCoder.h
  98. 2841 0
      YYImage/YYImageCoder.m
  99. 98 0
      YYImage/YYSpriteSheetImage.h
  100. 80 0
      YYImage/YYSpriteSheetImage.m

+ 485 - 0
Demo/YYImageDemo.xcodeproj/project.pbxproj

@@ -0,0 +1,485 @@
+// !$*UTF8*$!
+{
+	archiveVersion = 1;
+	classes = {
+	};
+	objectVersion = 46;
+	objects = {
+
+/* Begin PBXBuildFile section */
+		D93B2D521BDB944B009B66B0 /* UIView+YYAdd.m in Sources */ = {isa = PBXBuildFile; fileRef = D93B2D511BDB944B009B66B0 /* UIView+YYAdd.m */; settings = {ASSET_TAGS = (); }; };
+		D93B2D611BDB96DE009B66B0 /* YYImageExample.m in Sources */ = {isa = PBXBuildFile; fileRef = D93B2D541BDB96DE009B66B0 /* YYImageExample.m */; settings = {ASSET_TAGS = (); }; };
+		D93B2D621BDB96DE009B66B0 /* YYImageDisplayExample.m in Sources */ = {isa = PBXBuildFile; fileRef = D93B2D561BDB96DE009B66B0 /* YYImageDisplayExample.m */; settings = {ASSET_TAGS = (); }; };
+		D93B2D631BDB96DE009B66B0 /* YYImageProgressiveExample.m in Sources */ = {isa = PBXBuildFile; fileRef = D93B2D581BDB96DE009B66B0 /* YYImageProgressiveExample.m */; settings = {ASSET_TAGS = (); }; };
+		D93B2D661BDB96DE009B66B0 /* YYImageExampleHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = D93B2D5E1BDB96DE009B66B0 /* YYImageExampleHelper.m */; settings = {ASSET_TAGS = (); }; };
+		D93B2D6D1BDB99BA009B66B0 /* UIControl+YYAdd.m in Sources */ = {isa = PBXBuildFile; fileRef = D93B2D6A1BDB99BA009B66B0 /* UIControl+YYAdd.m */; settings = {ASSET_TAGS = (); }; };
+		D93B2D6E1BDB99BA009B66B0 /* UIGestureRecognizer+YYAdd.m in Sources */ = {isa = PBXBuildFile; fileRef = D93B2D6C1BDB99BA009B66B0 /* UIGestureRecognizer+YYAdd.m */; settings = {ASSET_TAGS = (); }; };
+		D93B2D701BDBA586009B66B0 /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = D93B2D6F1BDBA586009B66B0 /* libz.tbd */; };
+		D93B2D801BDBBE66009B66B0 /* niconiconi@2x.gif in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D741BDBBE66009B66B0 /* niconiconi@2x.gif */; settings = {ASSET_TAGS = (); }; };
+		D93B2D811BDBBE66009B66B0 /* google@2x.webp in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D751BDBBE66009B66B0 /* google@2x.webp */; settings = {ASSET_TAGS = (); }; };
+		D93B2D821BDBBE66009B66B0 /* nyancat@2x.webp in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D761BDBBE66009B66B0 /* nyancat@2x.webp */; settings = {ASSET_TAGS = (); }; };
+		D93B2D831BDBBE66009B66B0 /* pia@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D771BDBBE66009B66B0 /* pia@2x.png */; settings = {ASSET_TAGS = (); }; };
+		D93B2D841BDBBE66009B66B0 /* cube@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D781BDBBE66009B66B0 /* cube@2x.png */; settings = {ASSET_TAGS = (); }; };
+		D93B2D851BDBBE66009B66B0 /* wall-e@2x.webp in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D791BDBBE66009B66B0 /* wall-e@2x.webp */; settings = {ASSET_TAGS = (); }; };
+		D93B2D861BDBBE66009B66B0 /* mew_baseline.jpg in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D7A1BDBBE66009B66B0 /* mew_baseline.jpg */; settings = {ASSET_TAGS = (); }; };
+		D93B2D871BDBBE66009B66B0 /* mew_progressive.jpg in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D7B1BDBBE66009B66B0 /* mew_progressive.jpg */; settings = {ASSET_TAGS = (); }; };
+		D93B2D881BDBBE66009B66B0 /* mew_baseline.png in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D7C1BDBBE66009B66B0 /* mew_baseline.png */; settings = {ASSET_TAGS = (); }; };
+		D93B2D891BDBBE66009B66B0 /* mew_interlaced.png in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D7D1BDBBE66009B66B0 /* mew_interlaced.png */; settings = {ASSET_TAGS = (); }; };
+		D93B2D8A1BDBBE66009B66B0 /* mew_baseline.gif in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D7E1BDBBE66009B66B0 /* mew_baseline.gif */; settings = {ASSET_TAGS = (); }; };
+		D93B2D8B1BDBBE66009B66B0 /* mew_interlaced.gif in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D7F1BDBBE66009B66B0 /* mew_interlaced.gif */; settings = {ASSET_TAGS = (); }; };
+		D93B2D8E1BDBBE74009B66B0 /* ResourceTwitter.bundle in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D8C1BDBBE74009B66B0 /* ResourceTwitter.bundle */; settings = {ASSET_TAGS = (); }; };
+		D93B2D8F1BDBBE74009B66B0 /* EmoticonWeibo.bundle in Resources */ = {isa = PBXBuildFile; fileRef = D93B2D8D1BDBBE74009B66B0 /* EmoticonWeibo.bundle */; settings = {ASSET_TAGS = (); }; };
+		D946DE071BD13D75006B77F7 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE061BD13D75006B77F7 /* main.m */; };
+		D946DE0A1BD13D75006B77F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE091BD13D75006B77F7 /* AppDelegate.m */; };
+		D946DE0D1BD13D75006B77F7 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE0C1BD13D75006B77F7 /* ViewController.m */; };
+		D946DE101BD13D75006B77F7 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = D946DE0E1BD13D75006B77F7 /* Main.storyboard */; };
+		D946DE121BD13D75006B77F7 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = D946DE111BD13D75006B77F7 /* Assets.xcassets */; };
+		D946DE151BD13D75006B77F7 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = D946DE131BD13D75006B77F7 /* LaunchScreen.storyboard */; };
+		D946DE391BD13E28006B77F7 /* YYAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE301BD13E28006B77F7 /* YYAnimatedImageView.m */; settings = {ASSET_TAGS = (); }; };
+		D946DE3A1BD13E28006B77F7 /* YYFrameImage.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE321BD13E28006B77F7 /* YYFrameImage.m */; settings = {ASSET_TAGS = (); }; };
+		D946DE3B1BD13E28006B77F7 /* YYImage.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE341BD13E28006B77F7 /* YYImage.m */; settings = {ASSET_TAGS = (); }; };
+		D946DE3C1BD13E28006B77F7 /* YYImageCoder.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE361BD13E28006B77F7 /* YYImageCoder.m */; settings = {ASSET_TAGS = (); }; };
+		D946DE3D1BD13E28006B77F7 /* YYSpriteSheetImage.m in Sources */ = {isa = PBXBuildFile; fileRef = D946DE381BD13E28006B77F7 /* YYSpriteSheetImage.m */; settings = {ASSET_TAGS = (); }; };
+		D946DE3F1BD13E32006B77F7 /* WebP.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D946DE3E1BD13E32006B77F7 /* WebP.framework */; settings = {ASSET_TAGS = (); }; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+		D93B2D501BDB944B009B66B0 /* UIView+YYAdd.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIView+YYAdd.h"; sourceTree = "<group>"; };
+		D93B2D511BDB944B009B66B0 /* UIView+YYAdd.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIView+YYAdd.m"; sourceTree = "<group>"; };
+		D93B2D531BDB96DE009B66B0 /* YYImageExample.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImageExample.h; sourceTree = "<group>"; };
+		D93B2D541BDB96DE009B66B0 /* YYImageExample.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImageExample.m; sourceTree = "<group>"; };
+		D93B2D551BDB96DE009B66B0 /* YYImageDisplayExample.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImageDisplayExample.h; sourceTree = "<group>"; };
+		D93B2D561BDB96DE009B66B0 /* YYImageDisplayExample.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImageDisplayExample.m; sourceTree = "<group>"; };
+		D93B2D571BDB96DE009B66B0 /* YYImageProgressiveExample.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImageProgressiveExample.h; sourceTree = "<group>"; };
+		D93B2D581BDB96DE009B66B0 /* YYImageProgressiveExample.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImageProgressiveExample.m; sourceTree = "<group>"; };
+		D93B2D591BDB96DE009B66B0 /* YYWebImageExample.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYWebImageExample.h; sourceTree = "<group>"; };
+		D93B2D5A1BDB96DE009B66B0 /* YYWebImageExample.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYWebImageExample.m; sourceTree = "<group>"; };
+		D93B2D5B1BDB96DE009B66B0 /* YYImageBenchmark.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImageBenchmark.h; sourceTree = "<group>"; };
+		D93B2D5C1BDB96DE009B66B0 /* YYImageBenchmark.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImageBenchmark.m; sourceTree = "<group>"; };
+		D93B2D5D1BDB96DE009B66B0 /* YYImageExampleHelper.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImageExampleHelper.h; sourceTree = "<group>"; };
+		D93B2D5E1BDB96DE009B66B0 /* YYImageExampleHelper.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImageExampleHelper.m; sourceTree = "<group>"; };
+		D93B2D5F1BDB96DE009B66B0 /* YYBPGCoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYBPGCoder.h; sourceTree = "<group>"; };
+		D93B2D601BDB96DE009B66B0 /* YYBPGCoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYBPGCoder.m; sourceTree = "<group>"; };
+		D93B2D691BDB99BA009B66B0 /* UIControl+YYAdd.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIControl+YYAdd.h"; sourceTree = "<group>"; };
+		D93B2D6A1BDB99BA009B66B0 /* UIControl+YYAdd.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIControl+YYAdd.m"; sourceTree = "<group>"; };
+		D93B2D6B1BDB99BA009B66B0 /* UIGestureRecognizer+YYAdd.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "UIGestureRecognizer+YYAdd.h"; sourceTree = "<group>"; };
+		D93B2D6C1BDB99BA009B66B0 /* UIGestureRecognizer+YYAdd.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "UIGestureRecognizer+YYAdd.m"; sourceTree = "<group>"; };
+		D93B2D6F1BDBA586009B66B0 /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; };
+		D93B2D741BDBBE66009B66B0 /* niconiconi@2x.gif */ = {isa = PBXFileReference; lastKnownFileType = image.gif; path = "niconiconi@2x.gif"; sourceTree = "<group>"; };
+		D93B2D751BDBBE66009B66B0 /* google@2x.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = "google@2x.webp"; sourceTree = "<group>"; };
+		D93B2D761BDBBE66009B66B0 /* nyancat@2x.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = "nyancat@2x.webp"; sourceTree = "<group>"; };
+		D93B2D771BDBBE66009B66B0 /* pia@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "pia@2x.png"; sourceTree = "<group>"; };
+		D93B2D781BDBBE66009B66B0 /* cube@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "cube@2x.png"; sourceTree = "<group>"; };
+		D93B2D791BDBBE66009B66B0 /* wall-e@2x.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = "wall-e@2x.webp"; sourceTree = "<group>"; };
+		D93B2D7A1BDBBE66009B66B0 /* mew_baseline.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = mew_baseline.jpg; sourceTree = "<group>"; };
+		D93B2D7B1BDBBE66009B66B0 /* mew_progressive.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = mew_progressive.jpg; sourceTree = "<group>"; };
+		D93B2D7C1BDBBE66009B66B0 /* mew_baseline.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = mew_baseline.png; sourceTree = "<group>"; };
+		D93B2D7D1BDBBE66009B66B0 /* mew_interlaced.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = mew_interlaced.png; sourceTree = "<group>"; };
+		D93B2D7E1BDBBE66009B66B0 /* mew_baseline.gif */ = {isa = PBXFileReference; lastKnownFileType = image.gif; path = mew_baseline.gif; sourceTree = "<group>"; };
+		D93B2D7F1BDBBE66009B66B0 /* mew_interlaced.gif */ = {isa = PBXFileReference; lastKnownFileType = image.gif; path = mew_interlaced.gif; sourceTree = "<group>"; };
+		D93B2D8C1BDBBE74009B66B0 /* ResourceTwitter.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; path = ResourceTwitter.bundle; sourceTree = "<group>"; };
+		D93B2D8D1BDBBE74009B66B0 /* EmoticonWeibo.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; path = EmoticonWeibo.bundle; sourceTree = "<group>"; };
+		D946DE021BD13D75006B77F7 /* YYImageDemo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = YYImageDemo.app; sourceTree = BUILT_PRODUCTS_DIR; };
+		D946DE061BD13D75006B77F7 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = "<group>"; };
+		D946DE081BD13D75006B77F7 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = "<group>"; };
+		D946DE091BD13D75006B77F7 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = "<group>"; };
+		D946DE0B1BD13D75006B77F7 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = "<group>"; };
+		D946DE0C1BD13D75006B77F7 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = "<group>"; };
+		D946DE0F1BD13D75006B77F7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; };
+		D946DE111BD13D75006B77F7 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
+		D946DE141BD13D75006B77F7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = "<group>"; };
+		D946DE161BD13D75006B77F7 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+		D946DE2F1BD13E28006B77F7 /* YYAnimatedImageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYAnimatedImageView.h; sourceTree = "<group>"; };
+		D946DE301BD13E28006B77F7 /* YYAnimatedImageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYAnimatedImageView.m; sourceTree = "<group>"; };
+		D946DE311BD13E28006B77F7 /* YYFrameImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYFrameImage.h; sourceTree = "<group>"; };
+		D946DE321BD13E28006B77F7 /* YYFrameImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYFrameImage.m; sourceTree = "<group>"; };
+		D946DE331BD13E28006B77F7 /* YYImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImage.h; sourceTree = "<group>"; };
+		D946DE341BD13E28006B77F7 /* YYImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImage.m; sourceTree = "<group>"; };
+		D946DE351BD13E28006B77F7 /* YYImageCoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImageCoder.h; sourceTree = "<group>"; };
+		D946DE361BD13E28006B77F7 /* YYImageCoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImageCoder.m; sourceTree = "<group>"; };
+		D946DE371BD13E28006B77F7 /* YYSpriteSheetImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYSpriteSheetImage.h; sourceTree = "<group>"; };
+		D946DE381BD13E28006B77F7 /* YYSpriteSheetImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYSpriteSheetImage.m; sourceTree = "<group>"; };
+		D946DE3E1BD13E32006B77F7 /* WebP.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = WebP.framework; path = ../Vendor/WebP.framework; sourceTree = "<group>"; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+		D946DDFF1BD13D75006B77F7 /* Frameworks */ = {
+			isa = PBXFrameworksBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+				D93B2D701BDBA586009B66B0 /* libz.tbd in Frameworks */,
+				D946DE3F1BD13E32006B77F7 /* WebP.framework in Frameworks */,
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+		};
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+		D93B2D681BDB97D8009B66B0 /* Not available */ = {
+			isa = PBXGroup;
+			children = (
+				D93B2D591BDB96DE009B66B0 /* YYWebImageExample.h */,
+				D93B2D5A1BDB96DE009B66B0 /* YYWebImageExample.m */,
+				D93B2D5B1BDB96DE009B66B0 /* YYImageBenchmark.h */,
+				D93B2D5C1BDB96DE009B66B0 /* YYImageBenchmark.m */,
+				D93B2D5F1BDB96DE009B66B0 /* YYBPGCoder.h */,
+				D93B2D601BDB96DE009B66B0 /* YYBPGCoder.m */,
+			);
+			name = "Not available";
+			sourceTree = "<group>";
+		};
+		D93B2D731BDBBDFC009B66B0 /* Resources */ = {
+			isa = PBXGroup;
+			children = (
+				D93B2D8C1BDBBE74009B66B0 /* ResourceTwitter.bundle */,
+				D93B2D8D1BDBBE74009B66B0 /* EmoticonWeibo.bundle */,
+				D93B2D741BDBBE66009B66B0 /* niconiconi@2x.gif */,
+				D93B2D751BDBBE66009B66B0 /* google@2x.webp */,
+				D93B2D761BDBBE66009B66B0 /* nyancat@2x.webp */,
+				D93B2D771BDBBE66009B66B0 /* pia@2x.png */,
+				D93B2D781BDBBE66009B66B0 /* cube@2x.png */,
+				D93B2D791BDBBE66009B66B0 /* wall-e@2x.webp */,
+				D93B2D7A1BDBBE66009B66B0 /* mew_baseline.jpg */,
+				D93B2D7B1BDBBE66009B66B0 /* mew_progressive.jpg */,
+				D93B2D7C1BDBBE66009B66B0 /* mew_baseline.png */,
+				D93B2D7D1BDBBE66009B66B0 /* mew_interlaced.png */,
+				D93B2D7E1BDBBE66009B66B0 /* mew_baseline.gif */,
+				D93B2D7F1BDBBE66009B66B0 /* mew_interlaced.gif */,
+			);
+			name = Resources;
+			sourceTree = "<group>";
+		};
+		D946DDF91BD13D75006B77F7 = {
+			isa = PBXGroup;
+			children = (
+				D93B2D6F1BDBA586009B66B0 /* libz.tbd */,
+				D946DE3E1BD13E32006B77F7 /* WebP.framework */,
+				D946DE2E1BD13E28006B77F7 /* YYImage */,
+				D946DE041BD13D75006B77F7 /* YYImageDemo */,
+				D946DE031BD13D75006B77F7 /* Products */,
+			);
+			sourceTree = "<group>";
+		};
+		D946DE031BD13D75006B77F7 /* Products */ = {
+			isa = PBXGroup;
+			children = (
+				D946DE021BD13D75006B77F7 /* YYImageDemo.app */,
+			);
+			name = Products;
+			sourceTree = "<group>";
+		};
+		D946DE041BD13D75006B77F7 /* YYImageDemo */ = {
+			isa = PBXGroup;
+			children = (
+				D93B2D531BDB96DE009B66B0 /* YYImageExample.h */,
+				D93B2D541BDB96DE009B66B0 /* YYImageExample.m */,
+				D93B2D551BDB96DE009B66B0 /* YYImageDisplayExample.h */,
+				D93B2D561BDB96DE009B66B0 /* YYImageDisplayExample.m */,
+				D93B2D571BDB96DE009B66B0 /* YYImageProgressiveExample.h */,
+				D93B2D581BDB96DE009B66B0 /* YYImageProgressiveExample.m */,
+				D93B2D5D1BDB96DE009B66B0 /* YYImageExampleHelper.h */,
+				D93B2D5E1BDB96DE009B66B0 /* YYImageExampleHelper.m */,
+				D93B2D731BDBBDFC009B66B0 /* Resources */,
+				D93B2D681BDB97D8009B66B0 /* Not available */,
+				D946DE051BD13D75006B77F7 /* Supporting Files */,
+			);
+			path = YYImageDemo;
+			sourceTree = "<group>";
+		};
+		D946DE051BD13D75006B77F7 /* Supporting Files */ = {
+			isa = PBXGroup;
+			children = (
+				D946DE081BD13D75006B77F7 /* AppDelegate.h */,
+				D946DE091BD13D75006B77F7 /* AppDelegate.m */,
+				D946DE0B1BD13D75006B77F7 /* ViewController.h */,
+				D946DE0C1BD13D75006B77F7 /* ViewController.m */,
+				D93B2D501BDB944B009B66B0 /* UIView+YYAdd.h */,
+				D93B2D511BDB944B009B66B0 /* UIView+YYAdd.m */,
+				D93B2D691BDB99BA009B66B0 /* UIControl+YYAdd.h */,
+				D93B2D6A1BDB99BA009B66B0 /* UIControl+YYAdd.m */,
+				D93B2D6B1BDB99BA009B66B0 /* UIGestureRecognizer+YYAdd.h */,
+				D93B2D6C1BDB99BA009B66B0 /* UIGestureRecognizer+YYAdd.m */,
+				D946DE0E1BD13D75006B77F7 /* Main.storyboard */,
+				D946DE111BD13D75006B77F7 /* Assets.xcassets */,
+				D946DE131BD13D75006B77F7 /* LaunchScreen.storyboard */,
+				D946DE161BD13D75006B77F7 /* Info.plist */,
+				D946DE061BD13D75006B77F7 /* main.m */,
+			);
+			name = "Supporting Files";
+			sourceTree = "<group>";
+		};
+		D946DE2E1BD13E28006B77F7 /* YYImage */ = {
+			isa = PBXGroup;
+			children = (
+				D946DE331BD13E28006B77F7 /* YYImage.h */,
+				D946DE341BD13E28006B77F7 /* YYImage.m */,
+				D946DE311BD13E28006B77F7 /* YYFrameImage.h */,
+				D946DE321BD13E28006B77F7 /* YYFrameImage.m */,
+				D946DE371BD13E28006B77F7 /* YYSpriteSheetImage.h */,
+				D946DE381BD13E28006B77F7 /* YYSpriteSheetImage.m */,
+				D946DE351BD13E28006B77F7 /* YYImageCoder.h */,
+				D946DE361BD13E28006B77F7 /* YYImageCoder.m */,
+				D946DE2F1BD13E28006B77F7 /* YYAnimatedImageView.h */,
+				D946DE301BD13E28006B77F7 /* YYAnimatedImageView.m */,
+			);
+			name = YYImage;
+			path = ../YYImage;
+			sourceTree = "<group>";
+		};
+/* End PBXGroup section */
+
+/* Begin PBXNativeTarget section */
+		D946DE011BD13D75006B77F7 /* YYImageDemo */ = {
+			isa = PBXNativeTarget;
+			buildConfigurationList = D946DE191BD13D75006B77F7 /* Build configuration list for PBXNativeTarget "YYImageDemo" */;
+			buildPhases = (
+				D946DDFE1BD13D75006B77F7 /* Sources */,
+				D946DDFF1BD13D75006B77F7 /* Frameworks */,
+				D946DE001BD13D75006B77F7 /* Resources */,
+			);
+			buildRules = (
+			);
+			dependencies = (
+			);
+			name = YYImageDemo;
+			productName = YYImageDemo;
+			productReference = D946DE021BD13D75006B77F7 /* YYImageDemo.app */;
+			productType = "com.apple.product-type.application";
+		};
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+		D946DDFA1BD13D75006B77F7 /* Project object */ = {
+			isa = PBXProject;
+			attributes = {
+				LastUpgradeCheck = 0700;
+				ORGANIZATIONNAME = ibireme;
+				TargetAttributes = {
+					D946DE011BD13D75006B77F7 = {
+						CreatedOnToolsVersion = 7.0.1;
+					};
+				};
+			};
+			buildConfigurationList = D946DDFD1BD13D75006B77F7 /* Build configuration list for PBXProject "YYImageDemo" */;
+			compatibilityVersion = "Xcode 3.2";
+			developmentRegion = English;
+			hasScannedForEncodings = 0;
+			knownRegions = (
+				en,
+				Base,
+			);
+			mainGroup = D946DDF91BD13D75006B77F7;
+			productRefGroup = D946DE031BD13D75006B77F7 /* Products */;
+			projectDirPath = "";
+			projectRoot = "";
+			targets = (
+				D946DE011BD13D75006B77F7 /* YYImageDemo */,
+			);
+		};
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+		D946DE001BD13D75006B77F7 /* Resources */ = {
+			isa = PBXResourcesBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+				D93B2D801BDBBE66009B66B0 /* niconiconi@2x.gif in Resources */,
+				D93B2D831BDBBE66009B66B0 /* pia@2x.png in Resources */,
+				D93B2D881BDBBE66009B66B0 /* mew_baseline.png in Resources */,
+				D946DE151BD13D75006B77F7 /* LaunchScreen.storyboard in Resources */,
+				D93B2D871BDBBE66009B66B0 /* mew_progressive.jpg in Resources */,
+				D946DE121BD13D75006B77F7 /* Assets.xcassets in Resources */,
+				D93B2D8F1BDBBE74009B66B0 /* EmoticonWeibo.bundle in Resources */,
+				D93B2D891BDBBE66009B66B0 /* mew_interlaced.png in Resources */,
+				D93B2D811BDBBE66009B66B0 /* google@2x.webp in Resources */,
+				D946DE101BD13D75006B77F7 /* Main.storyboard in Resources */,
+				D93B2D8E1BDBBE74009B66B0 /* ResourceTwitter.bundle in Resources */,
+				D93B2D821BDBBE66009B66B0 /* nyancat@2x.webp in Resources */,
+				D93B2D851BDBBE66009B66B0 /* wall-e@2x.webp in Resources */,
+				D93B2D8A1BDBBE66009B66B0 /* mew_baseline.gif in Resources */,
+				D93B2D8B1BDBBE66009B66B0 /* mew_interlaced.gif in Resources */,
+				D93B2D861BDBBE66009B66B0 /* mew_baseline.jpg in Resources */,
+				D93B2D841BDBBE66009B66B0 /* cube@2x.png in Resources */,
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+		};
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+		D946DDFE1BD13D75006B77F7 /* Sources */ = {
+			isa = PBXSourcesBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+				D93B2D631BDB96DE009B66B0 /* YYImageProgressiveExample.m in Sources */,
+				D946DE0D1BD13D75006B77F7 /* ViewController.m in Sources */,
+				D93B2D621BDB96DE009B66B0 /* YYImageDisplayExample.m in Sources */,
+				D93B2D6D1BDB99BA009B66B0 /* UIControl+YYAdd.m in Sources */,
+				D946DE3A1BD13E28006B77F7 /* YYFrameImage.m in Sources */,
+				D946DE0A1BD13D75006B77F7 /* AppDelegate.m in Sources */,
+				D946DE071BD13D75006B77F7 /* main.m in Sources */,
+				D946DE3D1BD13E28006B77F7 /* YYSpriteSheetImage.m in Sources */,
+				D93B2D6E1BDB99BA009B66B0 /* UIGestureRecognizer+YYAdd.m in Sources */,
+				D93B2D521BDB944B009B66B0 /* UIView+YYAdd.m in Sources */,
+				D946DE391BD13E28006B77F7 /* YYAnimatedImageView.m in Sources */,
+				D946DE3C1BD13E28006B77F7 /* YYImageCoder.m in Sources */,
+				D93B2D611BDB96DE009B66B0 /* YYImageExample.m in Sources */,
+				D93B2D661BDB96DE009B66B0 /* YYImageExampleHelper.m in Sources */,
+				D946DE3B1BD13E28006B77F7 /* YYImage.m in Sources */,
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+		};
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXVariantGroup section */
+		D946DE0E1BD13D75006B77F7 /* Main.storyboard */ = {
+			isa = PBXVariantGroup;
+			children = (
+				D946DE0F1BD13D75006B77F7 /* Base */,
+			);
+			name = Main.storyboard;
+			sourceTree = "<group>";
+		};
+		D946DE131BD13D75006B77F7 /* LaunchScreen.storyboard */ = {
+			isa = PBXVariantGroup;
+			children = (
+				D946DE141BD13D75006B77F7 /* Base */,
+			);
+			name = LaunchScreen.storyboard;
+			sourceTree = "<group>";
+		};
+/* End PBXVariantGroup section */
+
+/* Begin XCBuildConfiguration section */
+		D946DE171BD13D75006B77F7 /* Debug */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				ALWAYS_SEARCH_USER_PATHS = NO;
+				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+				CLANG_CXX_LIBRARY = "libc++";
+				CLANG_ENABLE_MODULES = YES;
+				CLANG_ENABLE_OBJC_ARC = YES;
+				CLANG_WARN_BOOL_CONVERSION = YES;
+				CLANG_WARN_CONSTANT_CONVERSION = YES;
+				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+				CLANG_WARN_EMPTY_BODY = YES;
+				CLANG_WARN_ENUM_CONVERSION = YES;
+				CLANG_WARN_INT_CONVERSION = YES;
+				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+				CLANG_WARN_UNREACHABLE_CODE = YES;
+				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+				"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+				COPY_PHASE_STRIP = NO;
+				DEBUG_INFORMATION_FORMAT = dwarf;
+				ENABLE_STRICT_OBJC_MSGSEND = YES;
+				ENABLE_TESTABILITY = YES;
+				GCC_C_LANGUAGE_STANDARD = gnu99;
+				GCC_DYNAMIC_NO_PIC = NO;
+				GCC_NO_COMMON_BLOCKS = YES;
+				GCC_OPTIMIZATION_LEVEL = 0;
+				GCC_PREPROCESSOR_DEFINITIONS = (
+					"DEBUG=1",
+					"$(inherited)",
+				);
+				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+				GCC_WARN_UNDECLARED_SELECTOR = YES;
+				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+				GCC_WARN_UNUSED_FUNCTION = YES;
+				GCC_WARN_UNUSED_VARIABLE = YES;
+				IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+				MTL_ENABLE_DEBUG_INFO = YES;
+				ONLY_ACTIVE_ARCH = YES;
+				SDKROOT = iphoneos;
+			};
+			name = Debug;
+		};
+		D946DE181BD13D75006B77F7 /* Release */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				ALWAYS_SEARCH_USER_PATHS = NO;
+				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+				CLANG_CXX_LIBRARY = "libc++";
+				CLANG_ENABLE_MODULES = YES;
+				CLANG_ENABLE_OBJC_ARC = YES;
+				CLANG_WARN_BOOL_CONVERSION = YES;
+				CLANG_WARN_CONSTANT_CONVERSION = YES;
+				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+				CLANG_WARN_EMPTY_BODY = YES;
+				CLANG_WARN_ENUM_CONVERSION = YES;
+				CLANG_WARN_INT_CONVERSION = YES;
+				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+				CLANG_WARN_UNREACHABLE_CODE = YES;
+				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+				"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+				COPY_PHASE_STRIP = NO;
+				DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+				ENABLE_NS_ASSERTIONS = NO;
+				ENABLE_STRICT_OBJC_MSGSEND = YES;
+				GCC_C_LANGUAGE_STANDARD = gnu99;
+				GCC_NO_COMMON_BLOCKS = YES;
+				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+				GCC_WARN_UNDECLARED_SELECTOR = YES;
+				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+				GCC_WARN_UNUSED_FUNCTION = YES;
+				GCC_WARN_UNUSED_VARIABLE = YES;
+				IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+				MTL_ENABLE_DEBUG_INFO = NO;
+				SDKROOT = iphoneos;
+				VALIDATE_PRODUCT = YES;
+			};
+			name = Release;
+		};
+		D946DE1A1BD13D75006B77F7 /* Debug */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+				FRAMEWORK_SEARCH_PATHS = "\"$(SRCROOT)/../Vendor\"";
+				INFOPLIST_FILE = YYImageDemo/Info.plist;
+				IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+				LIBRARY_SEARCH_PATHS = "";
+				PRODUCT_BUNDLE_IDENTIFIER = com.ibireme.YYImageDemo;
+				PRODUCT_NAME = "$(TARGET_NAME)";
+			};
+			name = Debug;
+		};
+		D946DE1B1BD13D75006B77F7 /* Release */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
+				FRAMEWORK_SEARCH_PATHS = "\"$(SRCROOT)/../Vendor\"";
+				INFOPLIST_FILE = YYImageDemo/Info.plist;
+				IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
+				LIBRARY_SEARCH_PATHS = "";
+				PRODUCT_BUNDLE_IDENTIFIER = com.ibireme.YYImageDemo;
+				PRODUCT_NAME = "$(TARGET_NAME)";
+			};
+			name = Release;
+		};
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+		D946DDFD1BD13D75006B77F7 /* Build configuration list for PBXProject "YYImageDemo" */ = {
+			isa = XCConfigurationList;
+			buildConfigurations = (
+				D946DE171BD13D75006B77F7 /* Debug */,
+				D946DE181BD13D75006B77F7 /* Release */,
+			);
+			defaultConfigurationIsVisible = 0;
+			defaultConfigurationName = Release;
+		};
+		D946DE191BD13D75006B77F7 /* Build configuration list for PBXNativeTarget "YYImageDemo" */ = {
+			isa = XCConfigurationList;
+			buildConfigurations = (
+				D946DE1A1BD13D75006B77F7 /* Debug */,
+				D946DE1B1BD13D75006B77F7 /* Release */,
+			);
+			defaultConfigurationIsVisible = 0;
+			defaultConfigurationName = Release;
+		};
+/* End XCConfigurationList section */
+	};
+	rootObject = D946DDFA1BD13D75006B77F7 /* Project object */;
+}

+ 7 - 0
Demo/YYImageDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+   version = "1.0">
+   <FileRef
+      location = "self:YYImageDemo.xcodeproj">
+   </FileRef>
+</Workspace>

+ 17 - 0
Demo/YYImageDemo/AppDelegate.h

@@ -0,0 +1,17 @@
+//
+//  AppDelegate.h
+//  YYImageDemo
+//
+//  Created by ibireme on 15/10/16.
+//  Copyright © 2015年 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+@interface AppDelegate : UIResponder <UIApplicationDelegate>
+
+@property (strong, nonatomic) UIWindow *window;
+
+
+@end
+

+ 45 - 0
Demo/YYImageDemo/AppDelegate.m

@@ -0,0 +1,45 @@
+//
+//  AppDelegate.m
+//  YYImageDemo
+//
+//  Created by ibireme on 15/10/16.
+//  Copyright © 2015年 ibireme. All rights reserved.
+//
+
+#import "AppDelegate.h"
+
+@interface AppDelegate ()
+
+@end
+
+@implementation AppDelegate
+
+
+- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
+    // Override point for customization after application launch.
+    return YES;
+}
+
+- (void)applicationWillResignActive:(UIApplication *)application {
+    // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
+    // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
+}
+
+- (void)applicationDidEnterBackground:(UIApplication *)application {
+    // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
+    // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
+}
+
+- (void)applicationWillEnterForeground:(UIApplication *)application {
+    // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
+}
+
+- (void)applicationDidBecomeActive:(UIApplication *)application {
+    // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
+}
+
+- (void)applicationWillTerminate:(UIApplication *)application {
+    // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
+}
+
+@end

+ 38 - 0
Demo/YYImageDemo/Assets.xcassets/AppIcon.appiconset/Contents.json

@@ -0,0 +1,38 @@
+{
+  "images" : [
+    {
+      "idiom" : "iphone",
+      "size" : "29x29",
+      "scale" : "2x"
+    },
+    {
+      "idiom" : "iphone",
+      "size" : "29x29",
+      "scale" : "3x"
+    },
+    {
+      "idiom" : "iphone",
+      "size" : "40x40",
+      "scale" : "2x"
+    },
+    {
+      "idiom" : "iphone",
+      "size" : "40x40",
+      "scale" : "3x"
+    },
+    {
+      "idiom" : "iphone",
+      "size" : "60x60",
+      "scale" : "2x"
+    },
+    {
+      "idiom" : "iphone",
+      "size" : "60x60",
+      "scale" : "3x"
+    }
+  ],
+  "info" : {
+    "version" : 1,
+    "author" : "xcode"
+  }
+}

+ 27 - 0
Demo/YYImageDemo/Base.lproj/LaunchScreen.storyboard

@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="8150" systemVersion="15A204g" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" initialViewController="01J-lp-oVM">
+    <dependencies>
+        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="8122"/>
+    </dependencies>
+    <scenes>
+        <!--View Controller-->
+        <scene sceneID="EHf-IW-A2E">
+            <objects>
+                <viewController id="01J-lp-oVM" sceneMemberID="viewController">
+                    <layoutGuides>
+                        <viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/>
+                        <viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/>
+                    </layoutGuides>
+                    <view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
+                        <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
+                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+                        <animations/>
+                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+                    </view>
+                </viewController>
+                <placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="53" y="375"/>
+        </scene>
+    </scenes>
+</document>

+ 28 - 0
Demo/YYImageDemo/Base.lproj/Main.storyboard

@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="8191" systemVersion="15A284" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
+    <dependencies>
+        <deployment identifier="iOS"/>
+        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="8154"/>
+    </dependencies>
+    <scenes>
+        <!--View Controller-->
+        <scene sceneID="tne-QT-ifu">
+            <objects>
+                <viewController id="BYZ-38-t0r" customClass="ViewController" sceneMemberID="viewController">
+                    <layoutGuides>
+                        <viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
+                        <viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
+                    </layoutGuides>
+                    <view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
+                        <rect key="frame" x="0.0" y="0.0" width="600" height="600"/>
+                        <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
+                        <animations/>
+                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+                    </view>
+                </viewController>
+                <placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
+            </objects>
+            <point key="canvasLocation" x="-160" y="879"/>
+        </scene>
+    </scenes>
+</document>

BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_aini@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_aini@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_baibai@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_baibai@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_beishang@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_beishang@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bishi@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bishi@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bizui@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_bizui@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chanzui@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chanzui@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chijing@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_chijing@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dahaqi@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dahaqi@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dalian@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_dalian@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_ganmao@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_ganmao@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_guzhang@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_guzhang@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haha@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haha@3x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haixiu@2x.png


BIN
Demo/YYImageDemo/EmoticonWeibo.bundle/com.sina.default/d_haixiu@3x.png


+ 40 - 0
Demo/YYImageDemo/Info.plist

@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>en</string>
+	<key>CFBundleExecutable</key>
+	<string>$(EXECUTABLE_NAME)</string>
+	<key>CFBundleIdentifier</key>
+	<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleName</key>
+	<string>$(PRODUCT_NAME)</string>
+	<key>CFBundlePackageType</key>
+	<string>APPL</string>
+	<key>CFBundleShortVersionString</key>
+	<string>1.0</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>1</string>
+	<key>LSRequiresIPhoneOS</key>
+	<true/>
+	<key>UILaunchStoryboardName</key>
+	<string>LaunchScreen</string>
+	<key>UIMainStoryboardFile</key>
+	<string>Main</string>
+	<key>UIRequiredDeviceCapabilities</key>
+	<array>
+		<string>armv7</string>
+	</array>
+	<key>UISupportedInterfaceOrientations</key>
+	<array>
+		<string>UIInterfaceOrientationPortrait</string>
+		<string>UIInterfaceOrientationLandscapeLeft</string>
+		<string>UIInterfaceOrientationLandscapeRight</string>
+	</array>
+</dict>
+</plist>

BIN
Demo/YYImageDemo/ResourceTwitter.bundle/fav02l-sheet.png


BIN
Demo/YYImageDemo/ResourceTwitter.bundle/fav02l-sheet@2x.png


+ 74 - 0
Demo/YYImageDemo/UIControl+YYAdd.h

@@ -0,0 +1,74 @@
+//
+//  UIControl+YYAdd.h
+//  YYKit <https://github.com/ibireme/YYKit>
+//
+//  Created by ibireme on 13/4/5.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+/**
+ Provides extensions for `UIControl`.
+ */
+@interface UIControl (YYAdd)
+
+/**
+ Removes all targets and actions for a particular event (or events)
+ from an internal dispatch table.
+ */
+- (void)removeAllTargets;
+
+/**
+ Adds or replaces a target and action for a particular event (or events)
+ to an internal dispatch table.
+ 
+ @param target         The target object—that is, the object to which the
+                       action message is sent. If this is nil, the responder
+                       chain is searched for an object willing to respond to the
+                       action message.
+ 
+ @param action         A selector identifying an action message. It cannot be NULL.
+ 
+ @param controlEvents  A bitmask specifying the control events for which the
+                       action message is sent.
+ */
+- (void)setTarget:(id)target action:(SEL)action forControlEvents:(UIControlEvents)controlEvents;
+
+/**
+ Adds a block for a particular event (or events) to an internal dispatch table.
+ It will cause a strong reference to @a block.
+ 
+ @param block          The block which is invoked then the action message is
+                       sent  (cannot be nil). The block is retained.
+ 
+ @param controlEvents  A bitmask specifying the control events for which the
+                       action message is sent.
+ */
+- (void)addBlockForControlEvents:(UIControlEvents)controlEvents block:(void (^)(id sender))block;
+
+/**
+ Adds or replaces a block for a particular event (or events) to an internal
+ dispatch table. It will cause a strong reference to @a block.
+ 
+ @param block          The block which is invoked then the action message is
+                       sent (cannot be nil). The block is retained.
+ 
+ @param controlEvents  A bitmask specifying the control events for which the
+                       action message is sent.
+ */
+- (void)setBlockForControlEvents:(UIControlEvents)controlEvents block:(void (^)(id sender))block;
+
+/**
+ Removes all blocks for a particular event (or events) from an internal
+ dispatch table.
+ 
+ @param controlEvents  A bitmask specifying the control events for which the
+                       action message is sent.
+ */
+- (void)removeAllBlocksForControlEvents:(UIControlEvents)controlEvents;
+
+@end

+ 108 - 0
Demo/YYImageDemo/UIControl+YYAdd.m

@@ -0,0 +1,108 @@
+//
+//  UIControl+YYAdd.m
+//  YYKit <https://github.com/ibireme/YYKit>
+//
+//  Created by ibireme on 13/4/5.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "UIControl+YYAdd.h"
+#import <objc/runtime.h>
+
+
+static const int block_key;
+
+@interface _YYUIControlBlockTarget : NSObject
+
+@property (nonatomic, copy) void (^block)(id sender);
+@property (nonatomic, assign) UIControlEvents events;
+
+- (id)initWithBlock:(void (^)(id sender))block events:(UIControlEvents)events;
+- (void)invoke:(id)sender;
+
+@end
+
+@implementation _YYUIControlBlockTarget
+
+- (id)initWithBlock:(void (^)(id sender))block events:(UIControlEvents)events {
+    self = [super init];
+    if (self) {
+        _block = [block copy];
+        _events = events;
+    }
+    return self;
+}
+
+- (void)invoke:(id)sender {
+    if (_block) _block(sender);
+}
+
+@end
+
+
+
+@implementation UIControl (YYAdd)
+
+- (void)removeAllTargets {
+    [[self allTargets] enumerateObjectsUsingBlock: ^(id object, BOOL *stop) {
+        [self   removeTarget:object
+                      action:NULL
+            forControlEvents:UIControlEventAllEvents];
+    }];
+}
+
+- (void)setTarget:(id)target action:(SEL)action forControlEvents:(UIControlEvents)controlEvents {
+    NSSet *targets = [self allTargets];
+    for (id currentTarget in targets) {
+        NSArray *actions = [self actionsForTarget:currentTarget forControlEvent:controlEvents];
+        for (NSString *currentAction in actions) {
+            [self   removeTarget:currentTarget action:NSSelectorFromString(currentAction)
+                forControlEvents:controlEvents];
+        }
+    }
+    [self addTarget:target action:action forControlEvents:controlEvents];
+}
+
+- (void)addBlockForControlEvents:(UIControlEvents)controlEvents
+                           block:(void (^)(id sender))block {
+    _YYUIControlBlockTarget *target = [[_YYUIControlBlockTarget alloc]
+                                       initWithBlock:block events:controlEvents];
+    [self addTarget:target action:@selector(invoke:) forControlEvents:controlEvents];
+    NSMutableArray *targets = [self _yy_allUIControlBlockTargets];
+    [targets addObject:target];
+}
+
+- (void)setBlockForControlEvents:(UIControlEvents)controlEvents
+                           block:(void (^)(id sender))block {
+    [self removeAllBlocksForControlEvents:controlEvents];
+    [self addBlockForControlEvents:controlEvents block:block];
+}
+
+- (void)removeAllBlocksForControlEvents:(UIControlEvents)controlEvents {
+    NSMutableArray *targets = [self _yy_allUIControlBlockTargets];
+    NSMutableArray *removes = [NSMutableArray array];
+    [targets enumerateObjectsUsingBlock: ^(id obj, NSUInteger idx, BOOL *stop) {
+        _YYUIControlBlockTarget *target = (_YYUIControlBlockTarget *)obj;
+        if (target.events == controlEvents) {
+            [removes addObject:target];
+            [self   removeTarget:target
+                          action:@selector(invoke:)
+                forControlEvents:controlEvents];
+        }
+    }];
+    [targets removeObjectsInArray:removes];
+}
+
+- (NSMutableArray *)_yy_allUIControlBlockTargets {
+    NSMutableArray *targets = objc_getAssociatedObject(self, &block_key);
+    if (!targets) {
+        targets = [NSMutableArray array];
+        objc_setAssociatedObject(self, &block_key, targets, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    }
+    return targets;
+}
+
+@end

+ 43 - 0
Demo/YYImageDemo/UIGestureRecognizer+YYAdd.h

@@ -0,0 +1,43 @@
+//
+//  UIGestureRecognizer+YYAdd.h
+//  YYKit <https://github.com/ibireme/YYKit>
+//
+//  Created by ibireme on 13/10/13.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+/**
+ Provides extensions for `UIGestureRecognizer`.
+ */
+@interface UIGestureRecognizer (YYAdd)
+
+/**
+ Initializes an allocated gesture-recognizer object with a action block.
+ 
+ @param block  An action block that to handle the gesture recognized by the 
+               receiver. nil is invalid. It is retained by the gesture.
+ 
+ @return An initialized instance of a concrete UIGestureRecognizer subclass or 
+         nil if an error occurred in the attempt to initialize the object.
+ */
+- (instancetype)initWithActionBlock:(void (^)(id sender))block;
+
+/**
+ Adds an action block to a gesture-recognizer object. It is retained by the 
+ gesture.
+ 
+ @param block A block invoked by the action message. nil is not a valid value.
+ */
+- (void)addActionBlock:(void (^)(id sender))block;
+
+/**
+ Remove all action blocks.
+ */
+- (void)removeAllActionBlocks;
+
+@end

+ 77 - 0
Demo/YYImageDemo/UIGestureRecognizer+YYAdd.m

@@ -0,0 +1,77 @@
+//
+//  UIGestureRecognizer+YYAdd.m
+//  YYKit <https://github.com/ibireme/YYKit>
+//
+//  Created by ibireme on 13/10/13.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "UIGestureRecognizer+YYAdd.h"
+#import <objc/runtime.h>
+
+static const int block_key;
+
+@interface _YYUIGestureRecognizerBlockTarget : NSObject
+
+@property (nonatomic, copy) void (^block)(id sender);
+
+- (id)initWithBlock:(void (^)(id sender))block;
+- (void)invoke:(id)sender;
+
+@end
+
+@implementation _YYUIGestureRecognizerBlockTarget
+
+- (id)initWithBlock:(void (^)(id sender))block{
+    self = [super init];
+    if (self) {
+        _block = [block copy];
+    }
+    return self;
+}
+
+- (void)invoke:(id)sender {
+    if (_block) _block(sender);
+}
+
+@end
+
+
+
+
+@implementation UIGestureRecognizer (YYAdd)
+
+- (instancetype)initWithActionBlock:(void (^)(id sender))block {
+    self = [self init];
+    [self addActionBlock:block];
+    return self;
+}
+
+- (void)addActionBlock:(void (^)(id sender))block {
+    _YYUIGestureRecognizerBlockTarget *target = [[_YYUIGestureRecognizerBlockTarget alloc] initWithBlock:block];
+    [self addTarget:target action:@selector(invoke:)];
+    NSMutableArray *targets = [self _yy_allUIGestureRecognizerBlockTargets];
+    [targets addObject:target];
+}
+
+- (void)removeAllActionBlocks{
+    NSMutableArray *targets = [self _yy_allUIGestureRecognizerBlockTargets];
+    [targets enumerateObjectsUsingBlock:^(id target, NSUInteger idx, BOOL *stop) {
+        [self removeTarget:target action:@selector(invoke:)];
+    }];
+    [targets removeAllObjects];
+}
+
+- (NSMutableArray *)_yy_allUIGestureRecognizerBlockTargets {
+    NSMutableArray *targets = objc_getAssociatedObject(self, &block_key);
+    if (!targets) {
+        targets = [NSMutableArray array];
+        objc_setAssociatedObject(self, &block_key, targets, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    }
+    return targets;
+}
+
+@end

+ 51 - 0
Demo/YYImageDemo/UIView+YYAdd.h

@@ -0,0 +1,51 @@
+//
+//  UIView+YYAdd.h
+//  YYCategories <https://github.com/ibireme/YYCategories>
+//
+//  Created by ibireme on 13/4/3.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+/**
+ Provides extensions for `UIView`.
+ */
+@interface UIView (YYAdd)
+
+/**
+ Shortcut to set the view.layer's shadow
+ 
+ @param color  Shadow Color
+ @param offset Shadow offset
+ @param radius Shadow radius
+ */
+- (void)setLayerShadow:(UIColor*)color offset:(CGSize)offset radius:(CGFloat)radius;
+
+/**
+ Remove all subviews.
+ 
+ @warning Never call this method inside your view's drawRect: method.
+ */
+- (void)removeAllSubviews;
+
+/**
+ Returns the view's view controller (may be nil).
+ */
+@property (nonatomic, readonly) UIViewController *viewController;
+
+@property (nonatomic) CGFloat left;        ///< Shortcut for frame.origin.x.
+@property (nonatomic) CGFloat top;         ///< Shortcut for frame.origin.y
+@property (nonatomic) CGFloat right;       ///< Shortcut for frame.origin.x + frame.size.width
+@property (nonatomic) CGFloat bottom;      ///< Shortcut for frame.origin.y + frame.size.height
+@property (nonatomic) CGFloat width;       ///< Shortcut for frame.size.width.
+@property (nonatomic) CGFloat height;      ///< Shortcut for frame.size.height.
+@property (nonatomic) CGFloat centerX;     ///< Shortcut for center.x
+@property (nonatomic) CGFloat centerY;     ///< Shortcut for center.y
+@property (nonatomic) CGPoint origin;      ///< Shortcut for frame.origin.
+@property (nonatomic) CGSize  size;        ///< Shortcut for frame.size.
+
+@end

+ 139 - 0
Demo/YYImageDemo/UIView+YYAdd.m

@@ -0,0 +1,139 @@
+//
+//  UIView+YYAdd.m
+//  YYCategories <https://github.com/ibireme/YYCategories>
+//
+//  Created by ibireme on 13/4/3.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "UIView+YYAdd.h"
+#import <QuartzCore/QuartzCore.h>
+
+@implementation UIView (YYAdd)
+
+- (void)setLayerShadow:(UIColor*)color offset:(CGSize)offset radius:(CGFloat)radius {
+    self.layer.shadowColor = color.CGColor;
+    self.layer.shadowOffset = offset;
+    self.layer.shadowRadius = radius;
+    self.layer.shadowOpacity = 1;
+    self.layer.shouldRasterize = YES;
+    self.layer.rasterizationScale = [UIScreen mainScreen].scale;
+}
+
+- (void)removeAllSubviews {
+    //[self.subviews makeObjectsPerformSelector:@selector(removeFromSuperview)];
+    while (self.subviews.count) {
+        [self.subviews.lastObject removeFromSuperview];
+    }
+}
+
+- (UIViewController *)viewController {
+    for (UIView *view = self; view; view = view.superview) {
+        UIResponder *nextResponder = [view nextResponder];
+        if ([nextResponder isKindOfClass:[UIViewController class]]) {
+            return (UIViewController *)nextResponder;
+        }
+    }
+    return nil;
+}
+
+- (CGFloat)left {
+    return self.frame.origin.x;
+}
+
+- (void)setLeft:(CGFloat)x {
+    CGRect frame = self.frame;
+    frame.origin.x = x;
+    self.frame = frame;
+}
+
+- (CGFloat)top {
+    return self.frame.origin.y;
+}
+
+- (void)setTop:(CGFloat)y {
+    CGRect frame = self.frame;
+    frame.origin.y = y;
+    self.frame = frame;
+}
+
+- (CGFloat)right {
+    return self.frame.origin.x + self.frame.size.width;
+}
+
+- (void)setRight:(CGFloat)right {
+    CGRect frame = self.frame;
+    frame.origin.x = right - frame.size.width;
+    self.frame = frame;
+}
+
+- (CGFloat)bottom {
+    return self.frame.origin.y + self.frame.size.height;
+}
+
+- (void)setBottom:(CGFloat)bottom {
+    CGRect frame = self.frame;
+    frame.origin.y = bottom - frame.size.height;
+    self.frame = frame;
+}
+
+- (CGFloat)width {
+    return self.frame.size.width;
+}
+
+- (void)setWidth:(CGFloat)width {
+    CGRect frame = self.frame;
+    frame.size.width = width;
+    self.frame = frame;
+}
+
+- (CGFloat)height {
+    return self.frame.size.height;
+}
+
+- (void)setHeight:(CGFloat)height {
+    CGRect frame = self.frame;
+    frame.size.height = height;
+    self.frame = frame;
+}
+
+- (CGFloat)centerX {
+    return self.center.x;
+}
+
+- (void)setCenterX:(CGFloat)centerX {
+    self.center = CGPointMake(centerX, self.center.y);
+}
+
+- (CGFloat)centerY {
+    return self.center.y;
+}
+
+- (void)setCenterY:(CGFloat)centerY {
+    self.center = CGPointMake(self.center.x, centerY);
+}
+
+- (CGPoint)origin {
+    return self.frame.origin;
+}
+
+- (void)setOrigin:(CGPoint)origin {
+    CGRect frame = self.frame;
+    frame.origin = origin;
+    self.frame = frame;
+}
+
+- (CGSize)size {
+    return self.frame.size;
+}
+
+- (void)setSize:(CGSize)size {
+    CGRect frame = self.frame;
+    frame.size = size;
+    self.frame = frame;
+}
+
+@end

+ 15 - 0
Demo/YYImageDemo/ViewController.h

@@ -0,0 +1,15 @@
+//
+//  ViewController.h
+//  YYImageDemo
+//
+//  Created by ibireme on 15/10/16.
+//  Copyright © 2015年 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+@interface ViewController : UINavigationController
+
+
+@end
+

+ 25 - 0
Demo/YYImageDemo/ViewController.m

@@ -0,0 +1,25 @@
+//
+//  ViewController.m
+//  YYImageDemo
+//
+//  Created by ibireme on 15/10/16.
+//  Copyright © 2015年 ibireme. All rights reserved.
+//
+
+#import "ViewController.h"
+#import "YYImageExample.h"
+
+@interface ViewController ()
+
+@end
+
+@implementation ViewController
+
+- (void)viewDidLoad {
+    [super viewDidLoad];
+    YYImageExample *vc = [YYImageExample new];
+    [self pushViewController:vc animated:NO];
+}
+
+
+@end

+ 42 - 0
Demo/YYImageDemo/YYBPGCoder.h

@@ -0,0 +1,42 @@
+//
+//  YYBPGCoder.h
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/13.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+#import "YYImage.h"
+
+/*
+ BPG image format:
+ http://bellard.org/bpg/
+ */
+
+/**
+ Decode BPG data
+ @param bpgData  BPG image data.
+ @param decodeForDisplay  YES: returns a premultiply BRGA format image, NO: returns an ARGB format image.
+ @return A new image, or NULL if an error occurs.
+ */
+CG_EXTERN CGImageRef YYCGImageCreateWithBPGData(CFDataRef bpgData, BOOL decodeForDisplay);
+
+/**
+ Decode a frame from BPG image data, returns NULL if an error occurs.
+ @warning This method should only be used for benchmark.
+ */
+CG_EXTERN CGImageRef YYCGImageCreateFrameWithBPGData(CFDataRef bpgData, NSUInteger frameIndex, BOOL decodeForDisplay);
+
+/**
+ Decode all frames in BPG image data, returns NULL if an error occurs.
+ @warning This method should only be used for benchmark.
+ */
+CG_EXTERN void YYCGImageDecodeAllFrameInBPGData(CFDataRef bpgData, BOOL decodeForDisplay);
+
+/**
+ Whether data is bpg.
+ */
+CG_EXTERN BOOL YYImageIsBPGData(CFDataRef data);
+
+

+ 272 - 0
Demo/YYImageDemo/YYBPGCoder.m

@@ -0,0 +1,272 @@
+//
+//  YYBPGCoder.m
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/13.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import "YYBPGCoder.h"
+#import <ImageIO/ImageIO.h>
+#import <Accelerate/Accelerate.h>
+#import <bpg/libbpg.h>
+
+#define YY_FOUR_CC(c1,c2,c3,c4) ((uint32_t)(((c4) << 24) | ((c3) << 16) | ((c2) << 8) | (c1)))
+
+/// Returns byte-aligned size.
+static inline size_t _YYImageByteAlign(size_t size, size_t alignment) {
+    return ((size + (alignment - 1)) / alignment) * alignment;
+}
+
+/**
+ A callback used in CGDataProviderCreateWithData() to release data.
+ 
+ Example:
+ 
+ void *data = malloc(size);
+ CGDataProviderRef provider = CGDataProviderCreateWithData(data, data, size, YYCGDataProviderReleaseDataCallback);
+ */
+static void _YYCGDataProviderReleaseDataCallback(void *info, const void *data, size_t size) {
+    free(info);
+}
+
+CGImageRef YYCGImageCreateWithBPGData(CFDataRef bpgData, BOOL decodeForDisplay) {
+    BPGDecoderContext *decoderContext = NULL;
+    BPGImageInfo imageInfo = {0};
+    size_t width, height, lineSize, stride, size;
+    uint8_t *rgbaLine = NULL, *rgbaBuffer = NULL;
+    CGDataProviderRef dataProvider = NULL;
+    CGImageRef cgImage = NULL;
+    CGBitmapInfo bitmapInfo;
+    
+    if (!bpgData || CFDataGetLength(bpgData) == 0) return NULL;
+    decoderContext = bpg_decoder_open();
+    if (!decoderContext) return NULL;
+    if (bpg_decoder_decode(decoderContext, CFDataGetBytePtr(bpgData), (int)CFDataGetLength(bpgData)) < 0) goto fail;
+    if (bpg_decoder_get_info(decoderContext, &imageInfo) < 0) goto fail;
+    
+    width = imageInfo.width;
+    height = imageInfo.height;
+    lineSize = 4 * width;
+    stride = _YYImageByteAlign(lineSize, 32);
+    size = stride * height;
+    
+    if (width == 0 || height == 0) goto fail;
+    rgbaLine = malloc(lineSize);
+    if (!rgbaLine) goto fail;
+    rgbaBuffer = malloc(size);
+    if (!rgbaBuffer) goto fail;
+    if (bpg_decoder_start(decoderContext, BPG_OUTPUT_FORMAT_RGBA32) < 0) goto fail;
+    
+    for (int y = 0; y < height; y++) {
+        if (bpg_decoder_get_line(decoderContext, rgbaLine) < 0) goto fail;
+        memcpy(rgbaBuffer + (y * stride), rgbaLine, lineSize);
+    }
+    free(rgbaLine);
+    rgbaLine = NULL;
+    bpg_decoder_close(decoderContext);
+    decoderContext = NULL;
+    
+    if (decodeForDisplay) {
+        vImage_Buffer src;
+        src.data = rgbaBuffer;
+        src.width = width;
+        src.height = height;
+        src.rowBytes = stride;
+        vImage_Error error;
+        
+        // premultiply RGBA
+        error = vImagePremultiplyData_RGBA8888(&src, &src, kvImageNoFlags);
+        if (error != kvImageNoError) goto fail;
+        
+        // convert to bgrA
+        uint8_t map[4] = {2,1,0,3};
+        error = vImagePermuteChannels_ARGB8888(&src, &src, map, kvImageNoFlags);
+        if (error != kvImageNoError) goto fail;
+        bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Host;
+    } else {
+        bitmapInfo = kCGImageAlphaLast | kCGBitmapByteOrderDefault;
+    }
+    
+    dataProvider = CGDataProviderCreateWithData(rgbaBuffer, rgbaBuffer, size, _YYCGDataProviderReleaseDataCallback);
+    if (!dataProvider) goto fail;
+    rgbaBuffer = NULL; // hold by provider
+    cgImage = CGImageCreate(width, height, 8, 32, stride, YYCGColorSpaceGetDeviceRGB(),
+                            bitmapInfo, dataProvider, NULL, NO,
+                            kCGRenderingIntentDefault);
+    
+    CGDataProviderRelease(dataProvider);
+    return cgImage;
+    
+fail:
+    if (decoderContext) bpg_decoder_close(decoderContext);
+    if (rgbaLine) free(rgbaLine);
+    if (rgbaBuffer) free(rgbaBuffer);
+    return NULL;
+}
+
+
+CGImageRef YYCGImageCreateFrameWithBPGData(CFDataRef bpgData, NSUInteger frameIndex, BOOL decodeForDisplay) {
+    BPGDecoderContext *decoderContext = NULL;
+    BPGImageInfo imageInfo = {0};
+    size_t width, height, lineSize, stride, size;
+    uint8_t *rgbaLine = NULL, *rgbaBuffer = NULL;
+    CGDataProviderRef dataProvider = NULL;
+    CGImageRef cgImage = NULL;
+    CGBitmapInfo bitmapInfo;
+    
+    if (!bpgData || CFDataGetLength(bpgData) == 0) return NULL;
+    decoderContext = bpg_decoder_open();
+    if (!decoderContext) return NULL;
+    if (bpg_decoder_decode(decoderContext, CFDataGetBytePtr(bpgData), (int)CFDataGetLength(bpgData)) < 0) goto fail;
+    if (bpg_decoder_get_info(decoderContext, &imageInfo) < 0) goto fail;
+    
+    width = imageInfo.width;
+    height = imageInfo.height;
+    lineSize = 4 * width;
+    stride = _YYImageByteAlign(lineSize, 32);
+    size = stride * height;
+    
+    if (width == 0 || height == 0) goto fail;
+    rgbaLine = malloc(lineSize);
+    if (!rgbaLine) goto fail;
+    rgbaBuffer = malloc(size);
+    if (!rgbaBuffer) goto fail;
+    
+    for (NSUInteger i = 0; i <= frameIndex; i++) {
+        if (bpg_decoder_start(decoderContext, BPG_OUTPUT_FORMAT_RGBA32) < 0) goto fail;
+    }
+    
+    for (int y = 0; y < height; y++) {
+        if (bpg_decoder_get_line(decoderContext, rgbaLine) < 0) goto fail;
+        memcpy(rgbaBuffer + (y * stride), rgbaLine, lineSize);
+    }
+    free(rgbaLine);
+    rgbaLine = NULL;
+    bpg_decoder_close(decoderContext);
+    decoderContext = NULL;
+    
+    if (decodeForDisplay) {
+        vImage_Buffer src;
+        src.data = rgbaBuffer;
+        src.width = width;
+        src.height = height;
+        src.rowBytes = stride;
+        vImage_Error error;
+        
+        // premultiply RGBA
+        error = vImagePremultiplyData_RGBA8888(&src, &src, kvImageNoFlags);
+        if (error != kvImageNoError) goto fail;
+        
+        // convert to BGRA
+        uint8_t map[4] = {2,1,0,3};
+        error = vImagePermuteChannels_ARGB8888(&src, &src, map, kvImageNoFlags);
+        if (error != kvImageNoError) goto fail;
+        bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Host;
+    } else {
+        bitmapInfo = kCGImageAlphaLast | kCGBitmapByteOrderDefault;
+    }
+    
+    dataProvider = CGDataProviderCreateWithData(rgbaBuffer, rgbaBuffer, size, _YYCGDataProviderReleaseDataCallback);
+    if (!dataProvider) goto fail;
+    rgbaBuffer = NULL; // hold by provider
+    cgImage = CGImageCreate(width, height, 8, 32, stride, YYCGColorSpaceGetDeviceRGB(),
+                            bitmapInfo, dataProvider, NULL, NO,
+                            kCGRenderingIntentDefault);
+    
+    CGDataProviderRelease(dataProvider);
+    return cgImage;
+    
+fail:
+    if (decoderContext) bpg_decoder_close(decoderContext);
+    if (rgbaLine) free(rgbaLine);
+    if (rgbaBuffer) free(rgbaBuffer);
+    return NULL;
+}
+
+
+void YYCGImageDecodeAllFrameInBPGData(CFDataRef bpgData, BOOL decodeForDisplay) {
+    BPGDecoderContext *decoderContext = NULL;
+    BPGImageInfo imageInfo = {0};
+    size_t width, height, lineSize, stride, size;
+    uint8_t *rgbaLine = NULL, *rgbaBuffer = NULL;
+    CGDataProviderRef dataProvider = NULL;
+    CGImageRef cgImage = NULL;
+    CGBitmapInfo bitmapInfo;
+    
+    if (!bpgData || CFDataGetLength(bpgData) == 0) return;
+    decoderContext = bpg_decoder_open();
+    if (!decoderContext) return;
+    if (bpg_decoder_decode(decoderContext, CFDataGetBytePtr(bpgData), (int)CFDataGetLength(bpgData)) < 0) goto end;
+    if (bpg_decoder_get_info(decoderContext, &imageInfo) < 0) goto end;
+    
+    width = imageInfo.width;
+    height = imageInfo.height;
+    lineSize = 4 * width;
+    stride = _YYImageByteAlign(lineSize, 32);
+    size = stride * height;
+    
+    
+    for (;;) {
+        if (bpg_decoder_start(decoderContext, BPG_OUTPUT_FORMAT_RGBA32) < 0) goto end;
+        
+        if (width == 0 || height == 0) goto end;
+        rgbaLine = malloc(lineSize);
+        if (!rgbaLine) goto end;
+        rgbaBuffer = malloc(size);
+        if (!rgbaBuffer) goto end;
+        
+        for (int y = 0; y < height; y++) {
+            if (bpg_decoder_get_line(decoderContext, rgbaLine) < 0) goto end;
+            memcpy(rgbaBuffer + (y * stride), rgbaLine, lineSize);
+        }
+        free(rgbaLine);
+        rgbaLine = NULL;
+        
+        if (decodeForDisplay) {
+            vImage_Buffer src;
+            src.data = rgbaBuffer;
+            src.width = width;
+            src.height = height;
+            src.rowBytes = stride;
+            vImage_Error error;
+            
+            // premultiply RGBA
+            error = vImagePremultiplyData_RGBA8888(&src, &src, kvImageNoFlags);
+            if (error != kvImageNoError) goto end;
+            
+            // convert to BGRA
+            uint8_t map[4] = {2,1,0,3};
+            error = vImagePermuteChannels_ARGB8888(&src, &src, map, kvImageNoFlags);
+            if (error != kvImageNoError) goto end;
+            bitmapInfo = kCGImageAlphaPremultipliedFirst | kCGBitmapByteOrder32Host;
+        } else {
+            bitmapInfo = kCGImageAlphaLast | kCGBitmapByteOrderDefault;
+        }
+        
+        dataProvider = CGDataProviderCreateWithData(rgbaBuffer, rgbaBuffer, size, _YYCGDataProviderReleaseDataCallback);
+        if (!dataProvider) goto end;
+        rgbaBuffer = NULL; // hold by provider
+        cgImage = CGImageCreate(width, height, 8, 32, stride, YYCGColorSpaceGetDeviceRGB(),
+                                bitmapInfo, dataProvider, NULL, NO,
+                                kCGRenderingIntentDefault);
+        
+        CGDataProviderRelease(dataProvider);
+        if (cgImage) CFRelease(cgImage);
+    }
+    return;
+    
+end:
+    if (decoderContext) bpg_decoder_close(decoderContext);
+    if (rgbaLine) free(rgbaLine);
+    if (rgbaBuffer) free(rgbaBuffer);
+    return;
+}
+
+
+BOOL YYImageIsBPGData(CFDataRef data) {
+    if (!data || CFDataGetLength(data) < 8) return NO;
+    const uint8_t *bytes = CFDataGetBytePtr(data);
+    uint32_t magic = *((uint32_t *)bytes);
+    return magic == YY_FOUR_CC('B', 'P', 'G', 0xFB);
+}

+ 13 - 0
Demo/YYImageDemo/YYImageBenchmark.h

@@ -0,0 +1,13 @@
+//
+//  YYImageProfileExample.h
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/10.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+@interface YYImageBenchmark : UITableViewController
+
+@end

+ 785 - 0
Demo/YYImageDemo/YYImageBenchmark.m

@@ -0,0 +1,785 @@
+//
+//  YYImageProfileExample.m
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/10.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import "YYImageBenchmark.h"
+#import "YYImage.h"
+#import "YYBPGCoder.h"
+#import "UIView+YYAdd.h"
+#import <ImageIO/ImageIO.h>
+#import <MobileCoreServices/MobileCoreServices.h>
+#import <QuartzCore/QuartzCore.h>
+
+
+/**
+ Profile time cost.
+ @param ^block     code to benchmark
+ @param ^complete  code time cost (millisecond)
+ 
+ Usage:
+ YYBenchmark(^{
+ // code
+ }, ^(double ms) {
+ NSLog("time cost: %.2f ms",ms);
+ });
+ 
+ */
+static inline void YYBenchmark(void (^block)(void), void (^complete)(double ms)) {
+     double begin, end, ms;
+     begin = CACurrentMediaTime();
+     block();
+     end = CACurrentMediaTime();
+     ms = (end - begin) * 1000.0;
+     complete(ms);
+}
+
+
+/*
+ Enable this value and run in simulator, the image will write to desktop.
+ Then you can view this image with preview.
+ */
+#define ENABLE_OUTPUT 0
+#define IMAGE_OUTPUT_DIR @"/Users/ibireme/Desktop/image_out/"
+
+
+
+
+@interface NSData(YYAdd)
+
+@end
+
+@implementation NSData(YYAdd)
+
++ (NSData *)dataNamed:(NSString *)name {
+    NSString *path = [[NSBundle mainBundle] pathForResource:name ofType:@""];
+    if (!path) return nil;
+    NSData *data = [NSData dataWithContentsOfFile:path];
+    return data;
+}
+
+@end
+
+
+@implementation YYImageBenchmark {
+    UIActivityIndicatorView *_indicator;
+    UIView *_hud;
+    NSMutableArray *_titles;
+    NSMutableArray *_blocks;
+}
+
+- (void)viewDidLoad {
+    [super viewDidLoad];
+    [self initHUD];
+    _titles = [NSMutableArray new];
+    _blocks = [NSMutableArray new];
+    self.title = @"Benchmark (See Logs in Xcode)";
+    
+    [self addCell:@"ImageIO Image Decode" selector:@selector(runImageDecodeBenchmark)];
+    [self addCell:@"ImageIO Image Encode" selector:@selector(runImageEncodeBenchmark)];
+    [self addCell:@"WebP Encode and Decode (Slow)" selector:@selector(runWebPBenchmark)];
+    [self addCell:@"BPG Decode" selector:@selector(runBPGBenchmark)];
+    [self addCell:@"Animated Image Decode" selector:@selector(runAnimatedImageBenchmark)];
+    
+    [self.tableView reloadData];
+}
+
+- (void)addCell:(NSString *)title selector:(SEL)sel {
+    __weak typeof(self) _self = self;
+    void (^block)(void) = ^() {
+        if (![_self respondsToSelector:sel]) return;
+        
+        [_self startHUD];
+        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
+            [_self performSelector:sel];
+#pragma clang diagnostic pop
+            dispatch_async(dispatch_get_main_queue(), ^{
+                [_self stopHUD];
+            });
+        });
+    };
+    [_titles addObject:title];
+    [_blocks addObject:block];
+}
+
+- (void)dealloc {
+    [_hud removeFromSuperview];
+}
+
+- (void)initHUD {
+    _hud = [UIView new];
+    _hud.size = CGSizeMake(130, 80);
+    _hud.backgroundColor = [UIColor colorWithWhite:0.000 alpha:0.7];
+    _hud.clipsToBounds = YES;
+    _hud.layer.cornerRadius = 5;
+    
+    _indicator = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge];
+    _indicator.size = CGSizeMake(50, 50);
+    _indicator.centerX = _hud.width / 2;
+    _indicator.centerY = _hud.height / 2 - 9;
+    [_hud addSubview:_indicator];
+    
+    UILabel *label = [UILabel new];
+    label.textAlignment = NSTextAlignmentCenter;
+    label.size = CGSizeMake(_hud.width, 20);
+    label.text = @"See logs in Xcode";
+    label.font = [UIFont systemFontOfSize:12];
+    label.textColor = [UIColor whiteColor];
+    label.centerX = _hud.width / 2;
+    label.bottom = _hud.height - 8;
+    [_hud addSubview:label];
+}
+
+- (void)startHUD {
+    UIWindow *window = [[UIApplication sharedApplication].windows firstObject];
+    _hud.center = CGPointMake(window.width / 2, window.height / 2);
+    [_indicator startAnimating];
+    
+    [window addSubview:_hud];
+    self.navigationController.view.userInteractionEnabled = NO;
+}
+
+- (void)stopHUD {
+    [_indicator stopAnimating];
+    [_hud removeFromSuperview];
+    self.navigationController.view.userInteractionEnabled = YES;
+}
+
+- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
+    [tableView deselectRowAtIndexPath:indexPath animated:YES];
+    ((void (^)(void))_blocks[indexPath.row])();
+}
+
+- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
+    return _titles.count;
+}
+
+- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
+    UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"YY"];
+    if (!cell) {
+        cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"YY"];
+    }
+    cell.textLabel.text = _titles[indexPath.row];
+    return cell;
+}
+
+#pragma mark - Benchmark
+
+- (NSArray *)imageNames {
+    return @[ @"dribbble", @"lena" ];
+}
+
+- (NSArray *)imageSizes {
+    return @[  @64, @128, @256, @512 ];
+}
+
+- (NSArray *)imageSources {
+    return @[ @"imageio", @"photoshop", @"imageoptim", @"pngcrush", @"tinypng", @"twitter", @"weibo", @"facebook" ];
+}
+
+- (NSArray *)imageTypes {
+    return @[ (id)kUTTypeJPEG, (id)kUTTypeJPEG2000, (id)kUTTypeTIFF, (id)kUTTypeGIF, (id)kUTTypePNG, (id)kUTTypeBMP ];
+}
+
+- (NSString *)imageTypeGetExt:(id)type {
+    static NSDictionary *map;
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        map = @{(id)kUTTypeJPEG : @"jpg",
+                (id)kUTTypeJPEG2000 : @"jp2",
+                (id)kUTTypeTIFF : @"tif",
+                (id)kUTTypeGIF : @"gif",
+                (id)kUTTypePNG : @"png",
+                (id)kUTTypeBMP : @"bmp"};
+    });
+    return type ? map[type] : nil;
+}
+
+- (NSArray *)imageTypeGetQuality:(NSString *)type {
+    BOOL hasQuality = [type isEqualToString:(id)kUTTypeJPEG] || [type isEqualToString:(id)kUTTypeJPEG2000] || [type isEqualToString:@"webp"];
+    return hasQuality ? @[@1.0, @0.95, @0.9, @0.85, @0.8, @0.75, @0.7, @0.6, @0.5, @0.4, @0.3, @0.2, @0.1, @0] : @[@1.0];
+}
+
+- (void)runImageDecodeBenchmark {
+    printf("==========================================\n");
+    printf("ImageIO Decode Benchmark\n");
+    printf("name    size type quality length decode_time\n");
+    
+    for (NSString *imageName in self.imageNames) {
+        for (NSNumber *imageSize in self.imageSizes) {
+            for (NSString *imageSource in self.imageSources) {
+                for (NSString *imageType in @[@"png", @"jpg"]) {
+                    @autoreleasepool {
+                        NSString *fileName = [NSString stringWithFormat:@"%@%@_%@",imageName, imageSize, imageSource];
+                        NSString *filePath = [[NSBundle mainBundle] pathForResource:fileName ofType:imageType];
+                        NSData *data = filePath ? [NSData dataWithContentsOfFile:filePath] : nil;
+                        if (!data) continue;
+                        int count = 100;
+                        YYBenchmark(^{
+                            for (int i = 0; i < count; i++) {
+                                CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)data, NULL);
+                                CGImageRef image = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(NO)});
+                                CGImageRef decoded = YYCGImageCreateDecodedCopy(image, YES);
+                                CFRelease(decoded);
+                                CFRelease(image);
+                                CFRelease(source);
+                            }
+                        }, ^(double ms) {
+                            printf("%8s %3d %3s %10s %6d %2.3f\n", imageName.UTF8String, imageSize.intValue, imageType.UTF8String, imageSource.UTF8String, (int)data.length, ms / count);
+                        });
+                        
+#if ENABLE_OUTPUT
+                        if ([UIDevice currentDevice].isSimulator) {
+                            NSString *outFilePath = [NSString stringWithFormat:@"%@%@.%@", IMAGE_OUTPUT_DIR, fileName, imageType];
+                            [data writeToFile:outFilePath atomically:YES];
+                        }
+#endif
+                    }
+                }
+            }
+        }
+    }
+    
+    printf("------------------------------------------\n\n");
+}
+
+- (void)runImageEncodeBenchmark {
+    printf("==========================================\n");
+    printf("ImageIO Encode Benchmark\n");
+    printf("name    size type quality length encode decode\n");
+    
+    for (NSString *imageName in self.imageNames) {
+        for (NSNumber *imageSize in self.imageSizes) {
+            NSString *fileName = [NSString stringWithFormat:@"%@%@_imageio",imageName, imageSize];
+            NSString *filePath = [[NSBundle mainBundle] pathForResource:fileName ofType:@"png"];
+            NSData *data = filePath ? [NSData dataWithContentsOfFile:filePath] : nil;
+            CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)data, NULL);
+            CGImageRef image = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(NO)});
+            CGImageRef decoded = YYCGImageCreateDecodedCopy(image, YES);
+            
+            for (NSString *uti in [self imageTypes]) {
+                for (NSNumber *quality in [self imageTypeGetQuality:uti]) {
+                    __block int encodeCount = 0;
+                    __block double encodeTime = 0;
+                    __block long length = 0;
+                    __block CFMutableDataRef outData = NULL;
+                    __block int decodeCount = 0;
+                    __block double decodeTime = 0;
+                    
+                    while (encodeTime < 200) { //200ms
+                        YYBenchmark(^{
+                            if (outData) CFRelease(outData);
+                            outData = CFDataCreateMutable(CFAllocatorGetDefault(), 0);
+                            CGImageDestinationRef dest = CGImageDestinationCreateWithData(outData, (CFStringRef)uti, 1, NULL);
+                            NSDictionary *options = @{(id)kCGImageDestinationLossyCompressionQuality : quality };
+                            CGImageDestinationAddImage(dest, decoded, (CFDictionaryRef)options);
+                            CGImageDestinationFinalize(dest);
+                            length = CFDataGetLength(outData);
+                            CFRelease(dest);
+                        }, ^(double ms) {
+                            encodeTime += ms;
+                            encodeCount += 1;
+                        });
+                    }
+                    
+#if ENABLE_OUTPUT
+                    if ([UIDevice currentDevice].isSimulator) {
+                        NSString *outFilePath = [NSString stringWithFormat:@"%@%@%@_%.2f.%@", IMAGE_OUTPUT_DIR, imageName, imageSize, quality.floatValue, [self imageTypeGetExt:uti]];
+                        [((__bridge NSData *)outData) writeToFile:outFilePath atomically:YES];
+                    }
+#endif
+                    
+                    decodeCount = 100;
+                    YYBenchmark(^{
+                        for (int i = 0; i < decodeCount; i++) {
+                            CGImageSourceRef source = CGImageSourceCreateWithData(outData, NULL);
+                            CGImageRef image = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(NO)});
+                            CGImageRef decoded = YYCGImageCreateDecodedCopy(image, NO);
+                            CFRelease(decoded);
+                            CFRelease(image);
+                            CFRelease(source);
+                        }
+                    }, ^(double ms) {
+                        decodeTime = ms;
+                    });
+                    CFRelease(outData);
+                    
+                    printf("%8s %3d %3s  %.2f  %7d  %7.3f %7.3f\n",imageName.UTF8String, imageSize.intValue, [self imageTypeGetExt:uti].UTF8String, quality.floatValue, (int)length, encodeTime / encodeCount, decodeTime / decodeCount);
+                    
+                }
+            }
+            
+            CFRelease(decoded);
+            CFRelease(image);
+            CFRelease(source);
+        }
+    }
+    
+    printf("------------------------------------------\n\n");
+}
+
+- (void)runWebPBenchmark {
+    printf("==========================================\n");
+    printf("WebP Benchmark\n");
+    printf("name size  type  quality method length encode   decode\n");
+
+    for (NSString *imageName in self.imageNames) {
+        for (NSNumber *imageSize in self.imageSizes) {
+            NSString *fileName = [NSString stringWithFormat:@"%@%@_imageio", imageName, imageSize];
+            NSString *filePath = [[NSBundle mainBundle] pathForResource:fileName ofType:@"png"];
+            NSData *data = filePath ? [NSData dataWithContentsOfFile:filePath] : nil;
+            CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)data, NULL);
+            CGImageRef image = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache : @(NO) });
+            CGImageRef decoded = YYCGImageCreateDecodedCopy(image, YES);
+            
+            for (NSNumber *lossless in @[ @YES, @NO ]) {
+                for (NSNumber *q in [self imageTypeGetQuality:@"webp"]) {
+                    for (NSNumber *m in @[ @0, @1, @2, @3, @4, @5, @6 ]) {
+                        @autoreleasepool {
+                            __block int encodeCount = 0;
+                            __block double encodeTime = 0;
+                            __block long length = 0;
+                            __block CFDataRef webpData = NULL;
+                            int decodeCount = 100;
+                            double decodeTime[8] = {0};  // useThreads,bypassFiltering,noFancyUpsampling 0,0,0; 0,0,1; 0,1,0; 0,1,1; 1,0,0; 1,0,1; 1,1,0; 1,1,1
+                            
+                            while (encodeTime < 200) {  // 200ms
+                                YYBenchmark( ^{
+                                      if (webpData) CFRelease(webpData);
+                                      webpData = YYCGImageCreateEncodedWebPData(decoded, lossless.boolValue, q.floatValue, m.intValue, YYImagePresetDefault);
+                                      length = CFDataGetLength(webpData);
+                                    }, ^(double ms) {
+                                      encodeTime += ms;
+                                      encodeCount += 1;
+                                    });
+                            }
+#if ENABLE_OUTPUT
+                            if ([UIDevice currentDevice].isSimulator) {
+                                NSString *outFilePath = [NSString
+                                    stringWithFormat:@"%@%@%@_%@_q%.2f_m%d.webp", IMAGE_OUTPUT_DIR, imageName, imageSize,
+                                                     lossless.boolValue ? @"lossless" : @"lossy", q.floatValue, m.intValue];
+                                [((__bridge NSData *)webpData)writeToFile:outFilePath atomically:YES];
+                                
+                                CGImageRef image = YYCGImageCreateWithWebPData(webpData, NO, NO, NO, NO);
+                                NSData *pngData = UIImagePNGRepresentation([UIImage imageWithCGImage:image]);
+                                NSString *pngOutFilePath = [NSString
+                                                         stringWithFormat:@"%@%@%@_%@_q%.2f_m%d.webp.png", IMAGE_OUTPUT_DIR, imageName, imageSize,
+                                                         lossless.boolValue ? @"lossless" : @"lossy", q.floatValue, m.intValue];
+                                [pngData writeToFile:pngOutFilePath atomically:YES];
+                                CFRelease(image);
+                            }
+#endif
+
+                            for (NSNumber *useThreads in @[ @NO, @YES ]) {
+                                for (NSNumber *bypassFiltering in @[ @NO, @YES ]) {
+                                    for (NSNumber *noFancyUpsampling in @[ @NO, @YES ]) {
+                                        __block double time = 0;
+                                        YYBenchmark(^{
+                                              for (int i = 0; i < decodeCount; i++) {
+                                                  CGImageRef image = YYCGImageCreateWithWebPData(webpData, YES, useThreads.boolValue, bypassFiltering.boolValue,noFancyUpsampling.boolValue);
+                                                  CFRelease(image);
+                                              }
+                                            }, ^(double ms) {
+                                              time = ms;
+                                            });
+                                        decodeTime[useThreads.intValue << 2 | bypassFiltering.intValue << 1 |
+                                                   noFancyUpsampling.intValue] = time;
+                                    }
+                                }
+                            }
+                            if (webpData) CFRelease(webpData);
+                            
+                            printf("%8s %3d %.8s %.2f  %1d %7d %9.3f  %7.3f %7.3f %7.3f %7.3f %7.3f %7.3f %7.3f %7.3f\n",
+                                   imageName.UTF8String, imageSize.intValue, lossless.boolValue ? "lossless" : "lossy",
+                                   q.floatValue, m.intValue, (int)length, encodeTime / encodeCount, decodeTime[0] / decodeCount,
+                                   decodeTime[1] / decodeCount, decodeTime[2] / decodeCount, decodeTime[3] / decodeCount,
+                                   decodeTime[4] / decodeCount, decodeTime[5] / decodeCount, decodeTime[6] / decodeCount,
+                                   decodeTime[7] / decodeCount);
+                        }
+                    }
+                }
+            }
+
+            CFRelease(decoded);
+            CFRelease(image);
+            CFRelease(source);
+        }
+    }
+
+    printf("------------------------------------------\n\n");
+}
+
+- (void)runBPGBenchmark {
+    printf("==========================================\n");
+    printf("BPG Decode Benchmark\n");
+    printf("name    size  quality length decode_time\n");
+    
+    for (NSString *imageName in self.imageNames) {
+        for (NSNumber *imageSize in self.imageSizes) {
+            for (NSString *quality in @[ @"lossless",@"q0",@"q5",@"q10",@"q15",@"q20",@"q25",@"q30",@"q35",@"q40",@"q45",@"q50"]) {
+                @autoreleasepool {
+                    NSString *fileName = [NSString stringWithFormat:@"%@%@_%@",imageName, imageSize, quality];
+                    NSString *filePath = [[NSBundle mainBundle] pathForResource:fileName ofType:@"bpg"];
+                    NSData *data = filePath ? [NSData dataWithContentsOfFile:filePath] : nil;
+                    if (!data) continue;
+                    int count = 100;
+                    YYBenchmark(^{
+                        for (int i = 0; i < count; i++) {
+                            CGImageRef image = YYCGImageCreateWithBPGData((__bridge CFDataRef)data, YES);
+                            CFRelease(image);
+                        }
+                    }, ^(double ms) {
+                        printf("%8s %3d %8s %6d %2.3f\n", imageName.UTF8String, imageSize.intValue, quality.UTF8String, (int)data.length, ms / count);
+                    });
+                    
+                    
+#if ENABLE_OUTPUT
+                    if ([UIDevice currentDevice].isSimulator) {
+                        NSString *outFilePath = [NSString stringWithFormat:@"%@%@.bpg", IMAGE_OUTPUT_DIR,fileName];
+                        [data writeToFile:outFilePath atomically:YES];
+                        
+                        CGImageRef image = YYCGImageCreateWithBPGData((__bridge CFDataRef)data, YES);
+                        NSData *pngData = UIImagePNGRepresentation([UIImage imageWithCGImage:image]);
+                        CFRelease(image);
+                        NSString *pngOutFilePath = [NSString stringWithFormat:@"%@%@.bpg.png", IMAGE_OUTPUT_DIR,fileName];
+                        [pngData writeToFile:pngOutFilePath atomically:YES];
+                    }
+#endif
+                    
+                }
+            }
+        }
+    }
+    
+    printf("------------------------------------------\n\n");
+}
+
+- (void)runAnimatedImageBenchmark {
+    printf("==========================================\n");
+    printf("Animated Image Decode Benckmark\n");
+    if ([UIDevice currentDevice].systemVersion.floatValue < 8) {
+        printf("ImageIO's APNG require iOS8 or later\n");
+        return;
+    }
+    
+    NSData *gif = [NSData dataNamed:@"ermilio.gif"];
+    NSData *apng = [NSData dataNamed:@"ermilio.png"];
+    
+    NSData *webp_q85 = [NSData dataNamed:@"ermilio_q85.webp"];
+    NSData *webp_q90 = [NSData dataNamed:@"ermilio_q90.webp"];
+    NSData *webp_lossless = [NSData dataNamed:@"ermilio_lossless.webp"];
+    
+    NSData *bpg_q15 = [NSData dataNamed:@"ermilio_q15.bpg"];
+    NSData *bpg_q20 = [NSData dataNamed:@"ermilio_q20.bpg"];
+    NSData *bpg_lossless = [NSData dataNamed:@"ermilio_lossless.bpg"];
+    
+    NSArray *datas = @[gif, apng, webp_q85, webp_q90, webp_lossless, bpg_q20, bpg_q15, bpg_lossless];
+    NSArray *names = @[@"gif", @"apng", @"webp_85", @"webp_90", @"webp_ll", @"bpg_20", @"bpg_15", @"bpg_ll"];
+    
+    
+#if ENABLE_OUTPUT
+    if ([UIDevice currentDevice].isSimulator) {
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio.gif.png",IMAGE_OUTPUT_DIR];
+            NSData *outData = UIImagePNGRepresentation([UIImage imageWithData:gif]);
+            [outData writeToFile:outPath atomically:YES];
+            [gif writeToFile:[NSString stringWithFormat:@"%@ermilio.gif",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio.apng.png",IMAGE_OUTPUT_DIR];
+            NSData *outData = UIImagePNGRepresentation([UIImage imageWithData:apng]);
+            [outData writeToFile:outPath atomically:YES];
+            [apng writeToFile:[NSString stringWithFormat:@"%@ermilio.png",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio_q85.webp.png",IMAGE_OUTPUT_DIR];
+            NSData *outData = UIImagePNGRepresentation([YYImageDecoder decodeImage:webp_q85 scale:1]);
+            [outData writeToFile:outPath atomically:YES];
+            [webp_q85 writeToFile:[NSString stringWithFormat:@"%@ermilio_q85.webp",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio_q90.webp.png",IMAGE_OUTPUT_DIR];
+            NSData *outData = UIImagePNGRepresentation([YYImageDecoder decodeImage:webp_q90 scale:1]);
+            [outData writeToFile:outPath atomically:YES];
+            [webp_q90 writeToFile:[NSString stringWithFormat:@"%@ermilio_q90.webp",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio_lossless.webp.png",IMAGE_OUTPUT_DIR];
+            NSData *outData = UIImagePNGRepresentation([YYImageDecoder decodeImage:webp_lossless scale:1]);
+            [outData writeToFile:outPath atomically:YES];
+            [webp_lossless writeToFile:[NSString stringWithFormat:@"%@ermilio_lossless.webp",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio_q15.bpg.png",IMAGE_OUTPUT_DIR];
+            CGImageRef imageRef = YYCGImageCreateWithBPGData((__bridge CFDataRef)bpg_q15, NO);
+            NSData *outData = UIImagePNGRepresentation([UIImage imageWithCGImage:imageRef]);
+            [outData writeToFile:outPath atomically:YES];
+            CFRelease(imageRef);
+            [bpg_q15 writeToFile:[NSString stringWithFormat:@"%@ermilio_q15.bpg",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio_q20.bpg.png",IMAGE_OUTPUT_DIR];
+            CGImageRef imageRef = YYCGImageCreateWithBPGData((__bridge CFDataRef)bpg_q20, NO);
+            NSData *outData = UIImagePNGRepresentation([UIImage imageWithCGImage:imageRef]);
+            [outData writeToFile:outPath atomically:YES];
+            CFRelease(imageRef);
+            [bpg_q20 writeToFile:[NSString stringWithFormat:@"%@ermilio_q20.bpg",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+        @autoreleasepool {
+            NSString *outPath = [NSString stringWithFormat:@"%@ermilio_lossless.bpg.png",IMAGE_OUTPUT_DIR];
+            CGImageRef imageRef = YYCGImageCreateWithBPGData((__bridge CFDataRef)bpg_lossless, NO);
+            NSData *outData = UIImagePNGRepresentation([UIImage imageWithCGImage:imageRef]);
+            [outData writeToFile:outPath atomically:YES];
+            CFRelease(imageRef);
+            [bpg_lossless writeToFile:[NSString stringWithFormat:@"%@ermilio_lossless.bpg",IMAGE_OUTPUT_DIR] atomically:YES];
+        }
+    }
+#endif
+    
+    
+    printf("------------------------------------------\n");
+    printf("image   length\n");
+    for (int i = 0; i < names.count; i++) {
+        NSString *name = names[i];
+        NSData *data = datas[i];
+        printf("%7s %6d\n",name.UTF8String, (int)data.length);
+    }
+    printf("\n\n");
+    
+    int count = 20;
+    int frame_num = 28;
+    
+    typedef void (^CoverDecodeBlock)(id src);
+    typedef void (^SingleFrameDecodeBlock)(id src, NSUInteger index);
+    typedef void (^AllFrameDecodeBlock)(id src, BOOL reverseOrder);
+    
+    /// Cover: gif/apng
+    CoverDecodeBlock imageioCoverDecoder = ^(NSData *data){
+        CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)data, NULL);
+        CGImageRef image = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(NO)});
+        CGImageRef decoded = YYCGImageCreateDecodedCopy(image, YES);
+        CFRelease(decoded);
+        CFRelease(image);
+        CFRelease(source);
+    };
+    
+    /// Cover: gif/apng/webp
+    CoverDecodeBlock yyCoverDecoder = ^(NSData *data) {
+        @autoreleasepool {
+            YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:1];
+            [decoder frameAtIndex:0 decodeForDisplay:YES];;
+        }
+    };
+    
+    /// Cover: webp
+    CoverDecodeBlock webpCoverDecoder = ^(NSData *data) {
+        CGImageRef image = YYCGImageCreateWithWebPData((__bridge CFDataRef)data, YES, NO, NO, NO);
+        CFRelease(image);
+    };
+    
+    /// Cover: bpg
+    CoverDecodeBlock bpgCoverDecoder = ^(NSData *data) {
+        CGImageRef image = YYCGImageCreateWithBPGData((__bridge CFDataRef)data, YES);
+        CFRelease(image);
+    };
+    
+    NSArray *coverSrcs = @[@"gif       imageio", gif, imageioCoverDecoder,
+                           @"gif     yydecoder", gif, yyCoverDecoder,
+                           @"apng      imageio", apng, imageioCoverDecoder,
+                           @"apng    yydecoder", apng, yyCoverDecoder,
+                           @"webp_85   yyimage", webp_q85, webpCoverDecoder,
+                           @"webp_85 yydecocer", webp_q85, yyCoverDecoder,
+                           @"webp_90   yyimage", webp_q90, webpCoverDecoder,
+                           @"webp_90 yydecocer", webp_q90, yyCoverDecoder,
+                           @"webp_ll   yyimage", webp_lossless, webpCoverDecoder,
+                           @"webp_ll yydecoder", webp_lossless, yyCoverDecoder,
+                           @"bpg_20    yyimage", bpg_q20, bpgCoverDecoder,
+                           @"bpg_15    yyimage", bpg_q20, bpgCoverDecoder,
+                           @"bpg_ll    yyimage", bpg_lossless, bpgCoverDecoder,
+                           ];
+    
+    
+    printf("------------------------------------------\n");
+    printf("First frame (cover) decode\n");
+    count = 20;
+    for (int i = 0; i < coverSrcs.count / 3; i++) {
+        NSString *name = coverSrcs[i * 3];
+        id src = coverSrcs[i * 3 + 1];
+        CoverDecodeBlock block = coverSrcs[i * 3 + 2];
+        YYBenchmark(^{
+            for (int r = 0; r < count; r++) {
+                block(src);
+            }
+        }, ^(double ms) {
+            printf("%s %8.3f\n",name.UTF8String, ms / count);
+        });
+    }
+    printf("\n\n");
+    
+    
+    
+    
+    
+    
+    
+    
+    /// Single: gif/apng
+    SingleFrameDecodeBlock imagioSingleFrameDecoder = ^(id src, NSUInteger index) {
+        CGImageSourceRef source = (__bridge CGImageSourceRef)src;
+        CGImageRef image = CGImageSourceCreateImageAtIndex(source, index, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(NO)});
+        CGImageRef decoded = YYCGImageCreateDecodedCopy(image, YES);
+        CFRelease(decoded);
+        CFRelease(image);
+    };
+    
+    /// Single: gif/apng/webp
+    SingleFrameDecodeBlock yySingleFrameDecoder = ^(YYImageDecoder *decoder, NSUInteger index) {
+        @autoreleasepool {
+            UIImage *img = [decoder frameAtIndex:index decodeForDisplay:YES].image;
+            [img scale];
+        }
+    };
+    
+    
+    NSArray *singleSrcs = @[@"gif       imageio", @"gif src", imagioSingleFrameDecoder,
+                            @"gif     yydecoder", @"gif", yySingleFrameDecoder,
+                            @"apng      imageio", @"apng src", imagioSingleFrameDecoder,
+                            @"apng    yydecoder", @"apng", yySingleFrameDecoder,
+                            @"webp_85 yydecocer", @"webp85", yySingleFrameDecoder,
+                            @"webp_90 yydecocer", @"webp90", yySingleFrameDecoder,
+                            @"webp_ll yydecoder", @"webpll", yySingleFrameDecoder,
+                            ];
+
+    
+    
+    printf("------------------------------------------\n");
+    printf("Single frame decode\n");
+    count = 5;
+    for (int i = 0; i < singleSrcs.count / 3; i++) {
+        NSString *name = singleSrcs[i * 3];
+        NSString *srcStr = singleSrcs[i * 3 + 1];
+        
+        SingleFrameDecodeBlock block = singleSrcs[i * 3 + 2];
+        
+        printf("%s ",name.UTF8String);
+        for (int f = 0; f < frame_num; f++) {
+            YYBenchmark(^{
+                for (int r = 0; r < count; r++) {
+                    id src = NULL;
+                    if ([srcStr isEqual:@"gif src"]) {
+                        src = CFBridgingRelease(CGImageSourceCreateWithData((__bridge CFDataRef)gif, NULL));
+                    } else if ([srcStr isEqual:@"gif"]) {
+                        src = [YYImageDecoder decoderWithData:gif scale:1];
+                    } else if ([srcStr isEqual:@"apng src"]) {
+                        src = CFBridgingRelease(CGImageSourceCreateWithData((__bridge CFDataRef)apng, NULL));
+                    } else if ([srcStr isEqual:@"apng"]) {
+                        src = [YYImageDecoder decoderWithData:apng scale:1];
+                    } else if ([srcStr isEqual:@"webp85"]) {
+                        src = [YYImageDecoder decoderWithData:webp_q85 scale:1];
+                    } else if ([srcStr isEqual:@"webp90"]) {
+                        src = [YYImageDecoder decoderWithData:webp_q90 scale:1];
+                    } else if ([srcStr isEqual:@"webpll"]) {
+                        src = [YYImageDecoder decoderWithData:webp_lossless scale:1];
+                    }
+                    block(src, f);
+                }
+            }, ^(double ms) {
+                printf("%8.3f ",ms / count);
+            });
+        }
+        printf("\n");
+    }
+    printf("\n\n");
+    
+
+    
+    /// All: gif/apng
+    AllFrameDecodeBlock imageioAllFrameDecoder = ^(NSData *data, BOOL reverseOrder){
+        CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFTypeRef)gif, NULL);
+        if (reverseOrder) {
+            for (int i = frame_num - 1; i >= 0; i--) {
+                CGImageRef image = CGImageSourceCreateImageAtIndex(source, i, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(NO)});
+                CGImageRef decoded = YYCGImageCreateDecodedCopy(image, YES);
+                CFRelease(decoded);
+                CFRelease(image);
+            }
+        } else {
+            for (int i = 0; i < frame_num; i++) {
+                CGImageRef image = CGImageSourceCreateImageAtIndex(source, i, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(NO)});
+                CGImageRef decoded = YYCGImageCreateDecodedCopy(image, YES);
+                CFRelease(decoded);
+                CFRelease(image);
+            }
+        }
+        CFRelease(source);
+    };
+    
+    /// All: gif/apng/webp
+    AllFrameDecodeBlock yyAllFrameDecoder = ^(NSData *data, BOOL reverseOrder){
+        @autoreleasepool {
+            YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:1];
+            if (reverseOrder) {
+                for (int i = frame_num - 1; i > 0; i--) {
+                    [decoder frameAtIndex:i decodeForDisplay:YES];
+                }
+            } else {
+                for (int i = 1; i < frame_num; i++) {
+                    [decoder frameAtIndex:i decodeForDisplay:YES];
+                }
+            }
+        }
+    };
+    
+    /// All: bpg
+    AllFrameDecodeBlock bpgAllFrameDecoder = ^(NSData *data, BOOL reverseOrder){
+        @autoreleasepool {
+            YYCGImageDecodeAllFrameInBPGData((__bridge CFDataRef)data, YES);
+        }
+    };
+    
+    NSArray *allSrcs = @[@"gif       imageio", gif, imageioAllFrameDecoder,
+                         @"gif     yydecoder", gif, yyAllFrameDecoder,
+                         @"apng      imageio", apng, imageioAllFrameDecoder,
+                         @"apng    yydecoder", apng, yyAllFrameDecoder,
+                         @"webp_85 yydecocer", webp_q85, yyAllFrameDecoder,
+                         @"webp_90 yydecocer", webp_q90, yyAllFrameDecoder,
+                         @"webp_ll yydecoder", webp_lossless, yyAllFrameDecoder,
+                         @"bpg_20    yyimage", bpg_q20, bpgAllFrameDecoder,
+                         @"bpg_15    yyimage", bpg_q20, bpgAllFrameDecoder,
+                         @"bpg_ll    yyimage", bpg_lossless, bpgAllFrameDecoder,
+                         ];
+    
+    
+    
+    printf("------------------------------------------\n");
+    printf("All frame decode\n");
+    printf("type      decoder      asc     desc\n");
+    count = 5;
+    for (int i = 0; i < allSrcs.count / 3; i++) {
+        NSString *name = allSrcs[i * 3];
+        id src = allSrcs[i * 3 + 1];
+        AllFrameDecodeBlock block = allSrcs[i * 3 + 2];
+        
+        printf("%s ",name.UTF8String);
+        for (NSNumber *rev in @[@NO, @YES]) {
+            if ([name hasPrefix:@"bpg"] && rev.boolValue) continue;
+            YYBenchmark(^{
+                for (int r = 0; r < count; r++) {
+                    block(src, rev.boolValue);
+                }
+            }, ^(double ms) {
+                printf("%8.3f ",ms / count);
+            });
+        }
+        printf("\n");
+    }
+    printf("\n\n");
+
+}
+
+@end

+ 13 - 0
Demo/YYImageDemo/YYImageDisplayExample.h

@@ -0,0 +1,13 @@
+//
+//  YYImageDisplayExample.h
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/9.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+@interface YYImageDisplayExample : UIViewController
+
+@end

+ 143 - 0
Demo/YYImageDemo/YYImageDisplayExample.m

@@ -0,0 +1,143 @@
+//
+//  YYImageDisplayExample.m
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/9.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import "YYImageDisplayExample.h"
+#import "YYImage.h"
+#import "UIView+YYAdd.h"
+#import "YYImageExampleHelper.h"
+#import <sys/sysctl.h>
+
+@interface YYImageDisplayExample()<UIGestureRecognizerDelegate>
+
+@end
+@implementation YYImageDisplayExample {
+    UIScrollView *_scrollView;
+}
+
+- (void)viewDidLoad {
+    [super viewDidLoad];
+    self.view.backgroundColor = [UIColor colorWithWhite:0.863 alpha:1.000];
+    
+    _scrollView = [UIScrollView new];
+    _scrollView.frame = self.view.bounds;
+    [self.view addSubview:_scrollView];
+    
+    UILabel *label = [UILabel new];
+    label.backgroundColor = [UIColor clearColor];
+    label.size = CGSizeMake(self.view.width, 60);
+    label.top = 20;
+    label.textAlignment = NSTextAlignmentCenter;
+    label.numberOfLines = 0;
+    label.text = @"Tap the image to pause/play\n Slide on the image to forward/rewind";
+    
+    if ([self isSimulator]) {
+        label.text = [@"Please run this app in device\nto get better performance.\n\n" stringByAppendingString:label.text];
+        label.height = 120;
+    }
+    
+    [_scrollView addSubview:label];
+    
+    [self addImageWithName:@"niconiconi" text:@"Animated GIF"];
+    [self addImageWithName:@"wall-e" text:@"Animated WebP"];
+    [self addImageWithName:@"pia" text:@"Animated PNG (APNG)"];
+    [self addFrameImageWithText:@"Frame Animation"];
+    [self addSpriteSheetImageWithText:@"Sprite Sheet Animation"];
+    
+    _scrollView.panGestureRecognizer.cancelsTouchesInView = YES;
+}
+
+- (void)addImageWithName:(NSString *)name text:(NSString *)text {
+    YYImage *image = [YYImage imageNamed:name];
+    [self addImage:image size:CGSizeZero text:text];
+}
+
+- (void)addFrameImageWithText:(NSString *)text {
+    
+    NSString *basePath = [[NSBundle mainBundle].bundlePath stringByAppendingPathComponent:@"EmoticonWeibo.bundle/com.sina.default"];
+    NSMutableArray *paths = [NSMutableArray new];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_aini@3x.png"]];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_baibai@3x.png"]];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_chanzui@3x.png"]];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_chijing@3x.png"]];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_dahaqi@3x.png"]];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_guzhang@3x.png"]];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_haha@2x.png"]];
+    [paths addObject:[basePath stringByAppendingPathComponent:@"d_haixiu@3x.png"]];
+    
+    UIImage *image = [[YYFrameImage alloc] initWithImagePaths:paths oneFrameDuration:0.1 loopCount:0];
+    [self addImage:image size:CGSizeZero text:text];
+}
+
+- (void)addSpriteSheetImageWithText:(NSString *)text {
+    NSString *path = [[NSBundle mainBundle].bundlePath stringByAppendingPathComponent:@"ResourceTwitter.bundle/fav02l-sheet@2x.png"];
+    UIImage *sheet = [[UIImage alloc] initWithData:[NSData dataWithContentsOfFile:path] scale:2];
+    NSMutableArray *contentRects = [NSMutableArray new];
+    NSMutableArray *durations = [NSMutableArray new];
+    
+    
+    // 8 * 12 sprites in a single sheet image
+    CGSize size = CGSizeMake(sheet.size.width / 8, sheet.size.height / 12);
+    for (int j = 0; j < 12; j++) {
+        for (int i = 0; i < 8; i++) {
+            CGRect rect;
+            rect.size = size;
+            rect.origin.x = sheet.size.width / 8 * i;
+            rect.origin.y = sheet.size.height / 12 * j;
+            [contentRects addObject:[NSValue valueWithCGRect:rect]];
+            [durations addObject:@(1 / 60.0)];
+        }
+    }
+    YYSpriteSheetImage *sprite;
+    sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:sheet
+                                                     contentRects:contentRects
+                                                   frameDurations:durations
+                                                        loopCount:0];
+    [self addImage:sprite size:size text:text];
+}
+
+- (void)addImage:(UIImage *)image size:(CGSize)size text:(NSString *)text {
+    YYAnimatedImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image];
+    
+    if (size.width > 0 && size.height > 0) imageView.size = size;
+    imageView.centerX = self.view.width / 2;
+    imageView.top = [(UIView *)[_scrollView.subviews lastObject] bottom] + 30;
+    [_scrollView addSubview:imageView];
+    [YYImageExampleHelper addTapControlToAnimatedImageView:imageView];
+    [YYImageExampleHelper addPanControlToAnimatedImageView:imageView];
+    for (UIGestureRecognizer *g in imageView.gestureRecognizers) {
+        g.delegate = self;
+    }
+    
+    UILabel *imageLabel = [UILabel new];
+    imageLabel.backgroundColor = [UIColor clearColor];
+    imageLabel.frame = CGRectMake(0, 0, self.view.width, 20);
+    imageLabel.top = imageView.bottom + 10;
+    imageLabel.textAlignment = NSTextAlignmentCenter;
+    imageLabel.text = text;
+    [_scrollView addSubview:imageLabel];
+    
+    _scrollView.contentSize = CGSizeMake(self.view.width, imageLabel.bottom + 20);
+}
+
+- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer{
+    return YES;
+}
+
+
+
+- (BOOL)isSimulator {
+    size_t size;
+    sysctlbyname("hw.machine", NULL, &size, NULL, 0);
+    char *machine = malloc(size);
+    sysctlbyname("hw.machine", machine, &size, NULL, 0);
+    NSString *model = [NSString stringWithUTF8String:machine];
+    free(machine);
+    return [model isEqualToString:@"x86_64"] || [model isEqualToString:@"i386"];
+}
+
+@end

+ 13 - 0
Demo/YYImageDemo/YYImageExample.h

@@ -0,0 +1,13 @@
+//
+//  YYImageExample.h
+//  YYKitExample
+//
+//  Created by ibireme on 15/7/18.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+@interface YYImageExample : UITableViewController
+
+@end

+ 65 - 0
Demo/YYImageDemo/YYImageExample.m

@@ -0,0 +1,65 @@
+//
+//  YYImageExample.m
+//  YYKitExample
+//
+//  Created by ibireme on 15/7/18.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import "YYImageExample.h"
+#import "YYImage.h"
+#import "UIView+YYAdd.h"
+#import <ImageIO/ImageIO.h>
+#import <WebP/demux.h>
+
+@interface YYImageExample()
+@property (nonatomic, strong) NSMutableArray *titles;
+@property (nonatomic, strong) NSMutableArray *classNames;
+@end
+
+@implementation YYImageExample
+
+- (void)viewDidLoad {
+    self.title = @"YYImage Demo";
+    [super viewDidLoad];
+    self.titles = @[].mutableCopy;
+    self.classNames = @[].mutableCopy;
+    [self addCell:@"Animated Image" class:@"YYImageDisplayExample"];
+    [self addCell:@"Progressive Image" class:@"YYImageProgressiveExample"];
+    //[self addCell:@"Web Image" class:@"YYWebImageExample"];
+    //[self addCell:@"Benchmark" class:@"YYImageBenchmark"];
+    [self.tableView reloadData];
+}
+
+- (void)addCell:(NSString *)title class:(NSString *)className {
+    [self.titles addObject:title];
+    [self.classNames addObject:className];
+}
+
+#pragma mark - Table view data source
+
+- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
+    return _titles.count;
+}
+
+- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
+    UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:@"YY"];
+    if (!cell) {
+        cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"YY"];
+    }
+    cell.textLabel.text = _titles[indexPath.row];
+    return cell;
+}
+
+- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
+    NSString *className = self.classNames[indexPath.row];
+    Class class = NSClassFromString(className);
+    if (class) {
+        UIViewController *ctrl = class.new;
+        ctrl.title = _titles[indexPath.row];
+        [self.navigationController pushViewController:ctrl animated:YES];
+    }
+    [self.tableView deselectRowAtIndexPath:indexPath animated:YES];
+}
+
+@end

+ 21 - 0
Demo/YYImageDemo/YYImageExampleHelper.h

@@ -0,0 +1,21 @@
+//
+//  YYImageExampleUtils.h
+//  YYKitExample
+//
+//  Created by ibireme on 15/7/20.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+#import "YYImage.h"
+
+@interface YYImageExampleHelper : NSObject
+
+/// Tap to play/pause
++ (void)addTapControlToAnimatedImageView:(YYAnimatedImageView *)view;
+
+/// Slide to forward/rewind
++ (void)addPanControlToAnimatedImageView:(YYAnimatedImageView *)view;
+
+@end
+

+ 71 - 0
Demo/YYImageDemo/YYImageExampleHelper.m

@@ -0,0 +1,71 @@
+//
+//  YYImageExampleUtils.m
+//  YYKitExample
+//
+//  Created by ibireme on 15/7/20.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import "YYImageExampleHelper.h"
+#import "YYImage.h"
+#import "UIView+YYAdd.h"
+#import "UIGestureRecognizer+YYAdd.h"
+#import <ImageIO/ImageIO.h>
+#import <Accelerate/Accelerate.h>
+//#import <bpg/libbpg.h>
+
+@implementation YYImageExampleHelper
+
++ (void)addTapControlToAnimatedImageView:(YYAnimatedImageView *)view {
+    if (!view) return;
+    view.userInteractionEnabled = YES;
+    __weak typeof(view) _view = view;
+    
+    UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithActionBlock:^(id sender) {
+        if ([_view isAnimating]) [_view stopAnimating];
+        else  [_view startAnimating];
+        
+        // add a "bounce" animation
+        UIViewAnimationOptions op = UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionAllowAnimatedContent | UIViewAnimationOptionBeginFromCurrentState;
+        [UIView animateWithDuration:0.1 delay:0 options:op animations:^{
+            [_view.layer setValue:@(0.97) forKeyPath:@"transform.scale"];
+        } completion:^(BOOL finished) {
+            [UIView animateWithDuration:0.1 delay:0 options:op animations:^{
+                [_view.layer setValue:@(1.008) forKeyPath:@"transform.scale"];
+            } completion:^(BOOL finished) {
+                [UIView animateWithDuration:0.1 delay:0 options:op animations:^{
+                    [_view.layer setValue:@(1) forKeyPath:@"transform.scale"];
+                } completion:NULL];
+            }];
+        }];
+    }];
+    [view addGestureRecognizer:tap];
+}
+
++ (void)addPanControlToAnimatedImageView:(YYAnimatedImageView *)view {
+    if (!view) return;
+    view.userInteractionEnabled = YES;
+    __weak typeof(view) _view = view;
+    __block BOOL previousIsPlaying;
+    
+    UIPanGestureRecognizer *pan = [[UIPanGestureRecognizer alloc] initWithActionBlock:^(id sender) {
+        UIImage<YYAnimatedImage> *image = (id)_view.image;
+        if (![image conformsToProtocol:@protocol(YYAnimatedImage)]) return;
+        UIPanGestureRecognizer *gesture = sender;
+        CGPoint p = [gesture locationInView:gesture.view];
+        CGFloat progress = p.x / gesture.view.width;
+        if (gesture.state == UIGestureRecognizerStateBegan) {
+            previousIsPlaying = [_view isAnimating];
+            [_view stopAnimating];
+            _view.currentAnimatedImageIndex = image.animatedImageFrameCount * progress;
+        } else if (gesture.state == UIGestureRecognizerStateEnded ||
+                   gesture.state == UIGestureRecognizerStateCancelled) {
+            if (previousIsPlaying) [_view startAnimating];
+        } else {
+            _view.currentAnimatedImageIndex = image.animatedImageFrameCount * progress;
+        }
+    }];
+    [view addGestureRecognizer:pan];    
+}
+
+@end

+ 13 - 0
Demo/YYImageDemo/YYImageProgressiveExample.h

@@ -0,0 +1,13 @@
+//
+//  YYImageProgressiveExample.h
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/24.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+@interface YYImageProgressiveExample : UIViewController
+
+@end

+ 117 - 0
Demo/YYImageDemo/YYImageProgressiveExample.m

@@ -0,0 +1,117 @@
+//
+//  YYImageProgressiveExample.m
+//  YYKitExample
+//
+//  Created by ibireme on 15/8/24.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import "YYImageProgressiveExample.h"
+#import "YYImage.h"
+#import "UIView+YYAdd.h"
+#import "UIControl+YYAdd.h"
+
+@interface NSData(YYAdd)
+@end
+@implementation NSData(YYAdd)
++ (NSData *)dataNamed:(NSString *)name {
+    NSString *path = [[NSBundle mainBundle] pathForResource:name ofType:@""];
+    if (!path) return nil;
+    NSData *data = [NSData dataWithContentsOfFile:path];
+    return data;
+}
+@end
+
+
+@interface YYImageProgressiveExample () {
+    UIImageView *_imageView;
+    UISegmentedControl *_seg0;
+    UISegmentedControl *_seg1;
+    UISlider *_slider0;
+}
+
+@end
+
+@implementation YYImageProgressiveExample
+
+- (void)viewDidLoad {
+    [super viewDidLoad];
+    self.view.backgroundColor = [UIColor whiteColor];
+    
+    _imageView = [UIImageView new];
+    _imageView.size = CGSizeMake(300, 300);
+    _imageView.backgroundColor = [UIColor colorWithWhite:0.790 alpha:1.000];
+    _imageView.centerX = self.view.width / 2;
+    
+    _seg0 = [[UISegmentedControl alloc] initWithItems:@[@"baseline",@"progressive/interlaced"]];
+    _seg0.selectedSegmentIndex = 0;
+    _seg0.size = CGSizeMake(_imageView.width, 30);
+    _seg0.centerX = self.view.width / 2;
+    
+    _seg1 = [[UISegmentedControl alloc] initWithItems:@[@"JPEG", @"PNG", @"GIF"]];
+    _seg1.frame = _seg0.frame;
+    _seg1.selectedSegmentIndex = 0;
+    
+    _slider0 = [UISlider new];
+    _slider0.width = _seg0.width;
+    [_slider0 sizeToFit];
+    _slider0.minimumValue = 0;
+    _slider0.maximumValue = 1.05;
+    _slider0.value = 0;
+    _slider0.centerX = self.view.width / 2;
+    
+    _imageView.top = 64 + 10;
+    _seg0.top = _imageView.bottom + 10;
+    _seg1.top = _seg0.bottom + 10;
+    _slider0.top = _seg1.bottom + 10;
+    
+    [self.view addSubview:_imageView];
+    [self.view addSubview:_seg0];
+    [self.view addSubview:_seg1];
+    [self.view addSubview:_slider0];
+    
+    __weak typeof(self) _self = self;
+    [_seg0 addBlockForControlEvents:UIControlEventValueChanged block:^(id sender) {
+        [_self changed];
+    }];
+    [_seg1 addBlockForControlEvents:UIControlEventValueChanged block:^(id sender) {
+        [_self changed];
+    }];
+    [_slider0 addBlockForControlEvents:UIControlEventValueChanged block:^(id sender) {
+        [_self changed];
+    }];
+}
+
+- (void)changed {
+    NSString *name = nil;
+    if (_seg0.selectedSegmentIndex == 0) {
+        if (_seg1.selectedSegmentIndex == 0) {
+            name = @"mew_baseline.jpg";
+        } else if (_seg1.selectedSegmentIndex == 1) {
+            name = @"mew_baseline.png";
+        } else {
+            name = @"mew_baseline.gif";
+        }
+    } else {
+        if (_seg1.selectedSegmentIndex == 0) {
+            name = @"mew_progressive.jpg";
+        } else if (_seg1.selectedSegmentIndex == 1) {
+            name = @"mew_interlaced.png";
+        } else {
+            name = @"mew_interlaced.gif";
+        }
+    }
+    
+    NSData *data = [NSData dataNamed:name];
+    float progress = _slider0.value;
+    if (progress > 1) progress = 1;
+    NSData *subData = [data subdataWithRange:NSMakeRange(0, data.length * progress)];
+    
+    YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:[UIScreen mainScreen].scale];
+    [decoder updateData:subData final:NO];
+    YYImageFrame *frame = [decoder frameAtIndex:0 decodeForDisplay:YES];
+    
+    _imageView.image = frame.image;
+}
+
+@end

+ 13 - 0
Demo/YYImageDemo/YYWebImageExample.h

@@ -0,0 +1,13 @@
+//
+//  YYWebImageExample.h
+//  YYKitExample
+//
+//  Created by ibireme on 15/7/19.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+
+@interface YYWebImageExample : UITableViewController
+
+@end

+ 232 - 0
Demo/YYImageDemo/YYWebImageExample.m

@@ -0,0 +1,232 @@
+//
+//  YYWebImageExample.m
+//  YYKitExample
+//
+//  Created by ibireme on 15/7/19.
+//  Copyright (c) 2015 ibireme. All rights reserved.
+//
+
+#import "YYWebImageExample.h"
+#import <YYKit/YYKit.h>
+
+#define kCellHeight ceil((kScreenWidth) * 3.0 / 4.0)
+
+@interface YYWebImageExampleCell : UITableViewCell
+@property (nonatomic, strong) YYAnimatedImageView *webImageView;
+@property (nonatomic, strong) UIActivityIndicatorView *indicator;
+@property (nonatomic, strong) CAShapeLayer *progressLayer;
+@property (nonatomic, strong) UILabel *label;
+@end
+
+@implementation YYWebImageExampleCell
+- (instancetype)initWithStyle:(UITableViewCellStyle)style reuseIdentifier:(NSString *)reuseIdentifier {
+    self = [super initWithStyle:style reuseIdentifier:reuseIdentifier];
+    self.backgroundColor = [UIColor clearColor];
+    self.contentView.backgroundColor = [UIColor clearColor];
+    self.size = CGSizeMake(kScreenWidth, kCellHeight);
+    self.contentView.size = self.size;
+    _webImageView = [YYAnimatedImageView new];
+    _webImageView.size = self.size;
+    _webImageView.clipsToBounds = YES;
+    _webImageView.contentMode = UIViewContentModeScaleAspectFill;
+    _webImageView.backgroundColor = [UIColor whiteColor];
+    [self.contentView addSubview:_webImageView];
+    
+    _indicator = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];
+    _indicator.center = CGPointMake(self.width / 2, self.height / 2);
+    _indicator.hidden = YES;
+    //[self.contentView addSubview:_indicator]; //use progress bar instead..
+    
+    _label = [UILabel new];
+    _label.size = self.size;
+    _label.textAlignment = NSTextAlignmentCenter;
+    _label.text = @"Load fail, tap to reload.";
+    _label.textColor = [UIColor colorWithWhite:0.7 alpha:1.0];
+    _label.hidden = YES;
+    _label.userInteractionEnabled = YES;
+    [self.contentView addSubview:_label];
+    
+    CGFloat lineHeight = 4;
+    _progressLayer = [CAShapeLayer layer];
+    _progressLayer.size = CGSizeMake(_webImageView.width, lineHeight);
+    UIBezierPath *path = [UIBezierPath bezierPath];
+    [path moveToPoint:CGPointMake(0, _progressLayer.height / 2)];
+    [path addLineToPoint:CGPointMake(_webImageView.width, _progressLayer.height / 2)];
+    _progressLayer.lineWidth = lineHeight;
+    _progressLayer.path = path.CGPath;
+    _progressLayer.strokeColor = [UIColor colorWithRed:0.000 green:0.640 blue:1.000 alpha:0.720].CGColor;
+    _progressLayer.lineCap = kCALineCapButt;
+    _progressLayer.strokeStart = 0;
+    _progressLayer.strokeEnd = 0;
+    [_webImageView.layer addSublayer:_progressLayer];
+    
+    __weak typeof(self) _self = self;
+    UITapGestureRecognizer *g = [[UITapGestureRecognizer alloc] initWithActionBlock:^(id sender) {
+        [_self setImageURL:_self.webImageView.imageURL];
+    }];
+    [_label addGestureRecognizer:g];
+    
+    return self;
+}
+
+- (void)setImageURL:(NSURL *)url {
+    _label.hidden = YES;
+    _indicator.hidden = NO;
+    [_indicator startAnimating];
+    __weak typeof(self) _self = self;
+    
+    [CATransaction begin];
+    [CATransaction setDisableActions: YES];
+    self.progressLayer.hidden = YES;
+    self.progressLayer.strokeEnd = 0;
+    [CATransaction commit];
+    
+    [_webImageView setImageWithURL:url
+                       placeholder:nil
+                           options:YYWebImageOptionProgressiveBlur | YYWebImageOptionShowNetworkActivity | YYWebImageOptionSetImageWithFadeAnimation
+                          progress:^(NSInteger receivedSize, NSInteger expectedSize) {
+                              if (expectedSize > 0 && receivedSize > 0) {
+                                  CGFloat progress = (CGFloat)receivedSize / expectedSize;
+                                  progress = progress < 0 ? 0 : progress > 1 ? 1 : progress;
+                                  if (_self.progressLayer.hidden) _self.progressLayer.hidden = NO;
+                                  _self.progressLayer.strokeEnd = progress;
+                              }
+                       } transform:nil
+                        completion:^(UIImage *image, NSURL *url, YYWebImageFromType from, YYWebImageStage stage, NSError *error) {
+                           if (stage == YYWebImageStageFinished) {
+                               _self.progressLayer.hidden = YES;
+                               [_self.indicator stopAnimating];
+                               _self.indicator.hidden = YES;
+                               if (!image) _self.label.hidden = NO;
+                           }
+                       }];
+}
+
+- (void)prepareForReuse {
+    //nothing
+}
+
+@end
+
+
+@implementation YYWebImageExample {
+    NSArray *_imageLinks;
+}
+
+- (void)viewDidLoad {
+    [super viewDidLoad];
+    self.tableView.separatorStyle = UITableViewCellSeparatorStyleNone;
+    self.view.backgroundColor = [UIColor whiteColor];
+    
+    UIBarButtonItem *button = [[UIBarButtonItem alloc] initWithTitle:@"Reload" style:UIBarButtonItemStylePlain target:self action:@selector(reload)];
+    self.navigationItem.rightBarButtonItem = button;
+    self.view.backgroundColor = [UIColor colorWithWhite:0.217 alpha:1.000];
+    
+    NSArray *links = @[
+        /*
+         You can add your image url here.
+         */
+        
+        // progressive jpeg
+        @"https://s-media-cache-ak0.pinimg.com/1200x/2e/0c/c5/2e0cc5d86e7b7cd42af225c29f21c37f.jpg",
+        
+        // animated gif: http://cinemagraphs.com/
+        @"http://i.imgur.com/uoBwCLj.gif",
+        @"http://i.imgur.com/8KHKhxI.gif",
+        @"http://i.imgur.com/WXJaqof.gif",
+        
+        // animated gif: https://dribbble.com/markpear
+        @"https://d13yacurqjgara.cloudfront.net/users/345826/screenshots/1780193/dots18.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/345826/screenshots/1809343/dots17.1.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/345826/screenshots/1845612/dots22.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/345826/screenshots/1820014/big-hero-6.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/345826/screenshots/1819006/dots11.0.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/345826/screenshots/1799885/dots21.gif",
+        
+        // animaged gif: https://dribbble.com/jonadinges
+        @"https://d13yacurqjgara.cloudfront.net/users/288987/screenshots/2025999/batman-beyond-the-rain.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/288987/screenshots/1855350/r_nin.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/288987/screenshots/1963497/way-back-home.gif",
+        @"https://d13yacurqjgara.cloudfront.net/users/288987/screenshots/1913272/depressed-slurp-cycle.gif",
+        
+        // jpg: https://dribbble.com/snootyfox
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/2047158/beerhenge.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/2016158/avalanche.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/1839353/pilsner.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/1833469/porter.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/1521183/farmers.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/1391053/tents.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/1399501/imperial_beer.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/1488711/fishin.jpg",
+        @"https://d13yacurqjgara.cloudfront.net/users/26059/screenshots/1466318/getaway.jpg",
+        
+        // animated webp and apng: http://littlesvr.ca/apng/gif_apng_webp.html
+        @"http://littlesvr.ca/apng/images/BladeRunner.png",
+        @"http://littlesvr.ca/apng/images/Contact.webp",
+    ];
+    
+    _imageLinks = links;
+    [self.tableView reloadData];
+    [self scrollViewDidScroll:self.tableView];
+}
+
+- (void)viewDidAppear:(BOOL)animated {
+    [super viewDidAppear:animated];
+    if (kiOS7Later) {
+        self.navigationController.navigationBar.barStyle = UIBarStyleBlack;
+        self.navigationController.navigationBar.tintColor = [UIColor whiteColor];
+    }
+    [UIApplication sharedApplication].statusBarStyle = UIStatusBarStyleLightContent;
+}
+
+- (void)viewWillDisappear:(BOOL)animated {
+    [super viewWillDisappear:animated];
+    if (kiOS7Later) {
+        self.navigationController.navigationBar.barStyle = UIBarStyleDefault;
+        self.navigationController.navigationBar.tintColor = nil;
+    }
+    [UIApplication sharedApplication].statusBarStyle = UIStatusBarStyleDefault;
+}
+
+- (void)reload {
+    [[YYImageCache sharedCache].memoryCache removeAllObjects];
+    [[YYImageCache sharedCache].diskCache removeAllObjectsWithBlock:nil];
+    [self.tableView performSelector:@selector(reloadData) afterDelay:0.1];
+}
+
+- (BOOL)tableView:(UITableView *)tableView shouldHighlightRowAtIndexPath:(NSIndexPath *)indexPath {
+    return NO;
+}
+
+- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
+    return _imageLinks.count * 4;
+}
+
+- (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath {
+    return kCellHeight;
+}
+
+- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
+    YYWebImageExampleCell *cell = [tableView dequeueReusableCellWithIdentifier:@"cell"];
+    if (!cell) cell = [[YYWebImageExampleCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"cell"];
+    [cell setImageURL:[NSURL URLWithString:_imageLinks[indexPath.row % _imageLinks.count]]];
+    return cell;
+}
+
+- (void)scrollViewDidScroll:(UIScrollView *)scrollView {
+    CGFloat viewHeight = scrollView.height + scrollView.contentInset.top;
+    for (YYWebImageExampleCell *cell in [self.tableView visibleCells]) {
+        CGFloat y = cell.centerY - scrollView.contentOffset.y;
+        CGFloat p = y - viewHeight / 2;
+        CGFloat scale = cos(p / viewHeight * 0.8) * 0.95;
+        if (kiOS8Later) {
+            [UIView animateWithDuration:0.15 delay:0 options:UIViewAnimationOptionCurveEaseInOut | UIViewAnimationOptionAllowUserInteraction | UIViewAnimationOptionBeginFromCurrentState animations:^{
+                cell.webImageView.transform = CGAffineTransformMakeScale(scale, scale);
+            } completion:NULL];
+        } else {
+            cell.webImageView.transform = CGAffineTransformMakeScale(scale, scale);
+        }
+    }
+}
+
+@end

BIN
Demo/YYImageDemo/cube@2x.png


BIN
Demo/YYImageDemo/google@2x.webp


+ 16 - 0
Demo/YYImageDemo/main.m

@@ -0,0 +1,16 @@
+//
+//  main.m
+//  YYImageDemo
+//
+//  Created by ibireme on 15/10/16.
+//  Copyright © 2015年 ibireme. All rights reserved.
+//
+
+#import <UIKit/UIKit.h>
+#import "AppDelegate.h"
+
+int main(int argc, char * argv[]) {
+    @autoreleasepool {
+        return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
+    }
+}

BIN
Demo/YYImageDemo/mew_baseline.gif


BIN
Demo/YYImageDemo/mew_baseline.jpg


BIN
Demo/YYImageDemo/mew_baseline.png


BIN
Demo/YYImageDemo/mew_interlaced.gif


BIN
Demo/YYImageDemo/mew_interlaced.png


BIN
Demo/YYImageDemo/mew_progressive.jpg


BIN
Demo/YYImageDemo/niconiconi@2x.gif


BIN
Demo/YYImageDemo/nyancat@2x.webp


BIN
Demo/YYImageDemo/pia@2x.png


BIN
Demo/YYImageDemo/wall-e@2x.webp


+ 26 - 0
Framework/Info.plist

@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+	<key>CFBundleDevelopmentRegion</key>
+	<string>en</string>
+	<key>CFBundleExecutable</key>
+	<string>$(EXECUTABLE_NAME)</string>
+	<key>CFBundleIdentifier</key>
+	<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
+	<key>CFBundleInfoDictionaryVersion</key>
+	<string>6.0</string>
+	<key>CFBundleName</key>
+	<string>$(PRODUCT_NAME)</string>
+	<key>CFBundlePackageType</key>
+	<string>FMWK</string>
+	<key>CFBundleShortVersionString</key>
+	<string>0.9.0</string>
+	<key>CFBundleSignature</key>
+	<string>????</string>
+	<key>CFBundleVersion</key>
+	<string>$(CURRENT_PROJECT_VERSION)</string>
+	<key>NSPrincipalClass</key>
+	<string></string>
+</dict>
+</plist>

File diff suppressed because it is too large
+ 210 - 0
Framework/YYImage-Static.xcodeproj/project.pbxproj


+ 7 - 0
Framework/YYImage-Static.xcodeproj/project.xcworkspace/contents.xcworkspacedata

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+   version = "1.0">
+   <FileRef
+      location = "self:/Users/ibireme/Dev/Project/YYKitNew/Projects/YYImage/Framework/Fake/YYImage/YYImage-Static.xcodeproj">
+   </FileRef>
+</Workspace>

+ 362 - 0
Framework/YYImage.xcodeproj/project.pbxproj

@@ -0,0 +1,362 @@
+// !$*UTF8*$!
+{
+	archiveVersion = 1;
+	classes = {
+	};
+	objectVersion = 46;
+	objects = {
+
+/* Begin PBXBuildFile section */
+		D9D41AE91BD1033700CD8EBF /* YYAnimatedImageView.h in Headers */ = {isa = PBXBuildFile; fileRef = D9D41ACF1BD1033700CD8EBF /* YYAnimatedImageView.h */; settings = {ATTRIBUTES = (Public, ); }; };
+		D9D41AEA1BD1033700CD8EBF /* YYAnimatedImageView.m in Sources */ = {isa = PBXBuildFile; fileRef = D9D41AD01BD1033700CD8EBF /* YYAnimatedImageView.m */; settings = {ASSET_TAGS = (); }; };
+		D9D41AEB1BD1033700CD8EBF /* YYFrameImage.h in Headers */ = {isa = PBXBuildFile; fileRef = D9D41AD11BD1033700CD8EBF /* YYFrameImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
+		D9D41AEC1BD1033700CD8EBF /* YYFrameImage.m in Sources */ = {isa = PBXBuildFile; fileRef = D9D41AD21BD1033700CD8EBF /* YYFrameImage.m */; settings = {ASSET_TAGS = (); }; };
+		D9D41AED1BD1033700CD8EBF /* YYImage.h in Headers */ = {isa = PBXBuildFile; fileRef = D9D41AD31BD1033700CD8EBF /* YYImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
+		D9D41AEE1BD1033700CD8EBF /* YYImage.m in Sources */ = {isa = PBXBuildFile; fileRef = D9D41AD41BD1033700CD8EBF /* YYImage.m */; settings = {ASSET_TAGS = (); }; };
+		D9D41AF11BD1033700CD8EBF /* YYImageCoder.h in Headers */ = {isa = PBXBuildFile; fileRef = D9D41AD71BD1033700CD8EBF /* YYImageCoder.h */; settings = {ATTRIBUTES = (Public, ); }; };
+		D9D41AF21BD1033700CD8EBF /* YYImageCoder.m in Sources */ = {isa = PBXBuildFile; fileRef = D9D41AD81BD1033700CD8EBF /* YYImageCoder.m */; settings = {ASSET_TAGS = (); }; };
+		D9D41AF31BD1033700CD8EBF /* YYSpriteSheetImage.h in Headers */ = {isa = PBXBuildFile; fileRef = D9D41AD91BD1033700CD8EBF /* YYSpriteSheetImage.h */; settings = {ATTRIBUTES = (Public, ); }; };
+		D9D41AF41BD1033700CD8EBF /* YYSpriteSheetImage.m in Sources */ = {isa = PBXBuildFile; fileRef = D9D41ADA1BD1033700CD8EBF /* YYSpriteSheetImage.m */; settings = {ASSET_TAGS = (); }; };
+		D9D41AFB1BD10CE700CD8EBF /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41AFA1BD10CE700CD8EBF /* UIKit.framework */; };
+		D9D41AFD1BD10CEC00CD8EBF /* CoreFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41AFC1BD10CEC00CD8EBF /* CoreFoundation.framework */; };
+		D9D41AFF1BD10CF200CD8EBF /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41AFE1BD10CF200CD8EBF /* QuartzCore.framework */; };
+		D9D41B011BD10CF600CD8EBF /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41B001BD10CF600CD8EBF /* Accelerate.framework */; };
+		D9D41B031BD10CFA00CD8EBF /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41B021BD10CFA00CD8EBF /* libz.tbd */; };
+		D9D41B051BD10D0700CD8EBF /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41B041BD10D0700CD8EBF /* ImageIO.framework */; };
+		D9D41B071BD10D0D00CD8EBF /* MobileCoreServices.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41B061BD10D0D00CD8EBF /* MobileCoreServices.framework */; };
+		D9D41B091BD10D1E00CD8EBF /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D9D41B081BD10D1E00CD8EBF /* AssetsLibrary.framework */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXFileReference section */
+		D9D41AB61BD102F300CD8EBF /* YYImage.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = YYImage.framework; sourceTree = BUILT_PRODUCTS_DIR; };
+		D9D41AC11BD1030300CD8EBF /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
+		D9D41ACF1BD1033700CD8EBF /* YYAnimatedImageView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYAnimatedImageView.h; sourceTree = "<group>"; };
+		D9D41AD01BD1033700CD8EBF /* YYAnimatedImageView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYAnimatedImageView.m; sourceTree = "<group>"; };
+		D9D41AD11BD1033700CD8EBF /* YYFrameImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYFrameImage.h; sourceTree = "<group>"; };
+		D9D41AD21BD1033700CD8EBF /* YYFrameImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYFrameImage.m; sourceTree = "<group>"; };
+		D9D41AD31BD1033700CD8EBF /* YYImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImage.h; sourceTree = "<group>"; };
+		D9D41AD41BD1033700CD8EBF /* YYImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImage.m; sourceTree = "<group>"; };
+		D9D41AD71BD1033700CD8EBF /* YYImageCoder.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYImageCoder.h; sourceTree = "<group>"; };
+		D9D41AD81BD1033700CD8EBF /* YYImageCoder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYImageCoder.m; sourceTree = "<group>"; };
+		D9D41AD91BD1033700CD8EBF /* YYSpriteSheetImage.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = YYSpriteSheetImage.h; sourceTree = "<group>"; };
+		D9D41ADA1BD1033700CD8EBF /* YYSpriteSheetImage.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = YYSpriteSheetImage.m; sourceTree = "<group>"; };
+		D9D41AFA1BD10CE700CD8EBF /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
+		D9D41AFC1BD10CEC00CD8EBF /* CoreFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreFoundation.framework; path = System/Library/Frameworks/CoreFoundation.framework; sourceTree = SDKROOT; };
+		D9D41AFE1BD10CF200CD8EBF /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };
+		D9D41B001BD10CF600CD8EBF /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
+		D9D41B021BD10CFA00CD8EBF /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; };
+		D9D41B041BD10D0700CD8EBF /* ImageIO.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ImageIO.framework; path = System/Library/Frameworks/ImageIO.framework; sourceTree = SDKROOT; };
+		D9D41B061BD10D0D00CD8EBF /* MobileCoreServices.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = MobileCoreServices.framework; path = System/Library/Frameworks/MobileCoreServices.framework; sourceTree = SDKROOT; };
+		D9D41B081BD10D1E00CD8EBF /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = System/Library/Frameworks/AssetsLibrary.framework; sourceTree = SDKROOT; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+		D9D41AB21BD102F300CD8EBF /* Frameworks */ = {
+			isa = PBXFrameworksBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+				D9D41B031BD10CFA00CD8EBF /* libz.tbd in Frameworks */,
+				D9D41B091BD10D1E00CD8EBF /* AssetsLibrary.framework in Frameworks */,
+				D9D41B071BD10D0D00CD8EBF /* MobileCoreServices.framework in Frameworks */,
+				D9D41B051BD10D0700CD8EBF /* ImageIO.framework in Frameworks */,
+				D9D41B011BD10CF600CD8EBF /* Accelerate.framework in Frameworks */,
+				D9D41AFF1BD10CF200CD8EBF /* QuartzCore.framework in Frameworks */,
+				D9D41AFD1BD10CEC00CD8EBF /* CoreFoundation.framework in Frameworks */,
+				D9D41AFB1BD10CE700CD8EBF /* UIKit.framework in Frameworks */,
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+		};
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+		D9D41AAC1BD102F300CD8EBF = {
+			isa = PBXGroup;
+			children = (
+				D9D41AC31BD1033700CD8EBF /* YYImage */,
+				D9D41AF91BD1033C00CD8EBF /* Supporting Files */,
+				D9D41AB71BD102F300CD8EBF /* Products */,
+			);
+			sourceTree = "<group>";
+		};
+		D9D41AB71BD102F300CD8EBF /* Products */ = {
+			isa = PBXGroup;
+			children = (
+				D9D41AB61BD102F300CD8EBF /* YYImage.framework */,
+			);
+			name = Products;
+			sourceTree = "<group>";
+		};
+		D9D41AC31BD1033700CD8EBF /* YYImage */ = {
+			isa = PBXGroup;
+			children = (
+				D9D41AD31BD1033700CD8EBF /* YYImage.h */,
+				D9D41AD41BD1033700CD8EBF /* YYImage.m */,
+				D9D41AD11BD1033700CD8EBF /* YYFrameImage.h */,
+				D9D41AD21BD1033700CD8EBF /* YYFrameImage.m */,
+				D9D41AD91BD1033700CD8EBF /* YYSpriteSheetImage.h */,
+				D9D41ADA1BD1033700CD8EBF /* YYSpriteSheetImage.m */,
+				D9D41AD71BD1033700CD8EBF /* YYImageCoder.h */,
+				D9D41AD81BD1033700CD8EBF /* YYImageCoder.m */,
+				D9D41ACF1BD1033700CD8EBF /* YYAnimatedImageView.h */,
+				D9D41AD01BD1033700CD8EBF /* YYAnimatedImageView.m */,
+			);
+			name = YYImage;
+			path = ../YYImage;
+			sourceTree = "<group>";
+		};
+		D9D41AF91BD1033C00CD8EBF /* Supporting Files */ = {
+			isa = PBXGroup;
+			children = (
+				D9D41AFA1BD10CE700CD8EBF /* UIKit.framework */,
+				D9D41AFC1BD10CEC00CD8EBF /* CoreFoundation.framework */,
+				D9D41AFE1BD10CF200CD8EBF /* QuartzCore.framework */,
+				D9D41B041BD10D0700CD8EBF /* ImageIO.framework */,
+				D9D41B001BD10CF600CD8EBF /* Accelerate.framework */,
+				D9D41B081BD10D1E00CD8EBF /* AssetsLibrary.framework */,
+				D9D41B061BD10D0D00CD8EBF /* MobileCoreServices.framework */,
+				D9D41B021BD10CFA00CD8EBF /* libz.tbd */,
+				D9D41AC11BD1030300CD8EBF /* Info.plist */,
+			);
+			name = "Supporting Files";
+			sourceTree = "<group>";
+		};
+/* End PBXGroup section */
+
+/* Begin PBXHeadersBuildPhase section */
+		D9D41AB31BD102F300CD8EBF /* Headers */ = {
+			isa = PBXHeadersBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+				D9D41AEB1BD1033700CD8EBF /* YYFrameImage.h in Headers */,
+				D9D41AE91BD1033700CD8EBF /* YYAnimatedImageView.h in Headers */,
+				D9D41AF11BD1033700CD8EBF /* YYImageCoder.h in Headers */,
+				D9D41AED1BD1033700CD8EBF /* YYImage.h in Headers */,
+				D9D41AF31BD1033700CD8EBF /* YYSpriteSheetImage.h in Headers */,
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+		};
+/* End PBXHeadersBuildPhase section */
+
+/* Begin PBXNativeTarget section */
+		D9D41AB51BD102F300CD8EBF /* YYImage */ = {
+			isa = PBXNativeTarget;
+			buildConfigurationList = D9D41ABE1BD102F300CD8EBF /* Build configuration list for PBXNativeTarget "YYImage" */;
+			buildPhases = (
+				D9D41AB11BD102F300CD8EBF /* Sources */,
+				D9D41AB21BD102F300CD8EBF /* Frameworks */,
+				D9D41AB31BD102F300CD8EBF /* Headers */,
+				D9D41AB41BD102F300CD8EBF /* Resources */,
+			);
+			buildRules = (
+			);
+			dependencies = (
+			);
+			name = YYImage;
+			productName = YYImage;
+			productReference = D9D41AB61BD102F300CD8EBF /* YYImage.framework */;
+			productType = "com.apple.product-type.framework";
+		};
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+		D9D41AAD1BD102F300CD8EBF /* Project object */ = {
+			isa = PBXProject;
+			attributes = {
+				LastUpgradeCheck = 0700;
+				ORGANIZATIONNAME = ibireme;
+				TargetAttributes = {
+					D9D41AB51BD102F300CD8EBF = {
+						CreatedOnToolsVersion = 7.0;
+					};
+				};
+			};
+			buildConfigurationList = D9D41AB01BD102F300CD8EBF /* Build configuration list for PBXProject "YYImage" */;
+			compatibilityVersion = "Xcode 3.2";
+			developmentRegion = English;
+			hasScannedForEncodings = 0;
+			knownRegions = (
+				en,
+			);
+			mainGroup = D9D41AAC1BD102F300CD8EBF;
+			productRefGroup = D9D41AB71BD102F300CD8EBF /* Products */;
+			projectDirPath = "";
+			projectRoot = "";
+			targets = (
+				D9D41AB51BD102F300CD8EBF /* YYImage */,
+			);
+		};
+/* End PBXProject section */
+
+/* Begin PBXResourcesBuildPhase section */
+		D9D41AB41BD102F300CD8EBF /* Resources */ = {
+			isa = PBXResourcesBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+		};
+/* End PBXResourcesBuildPhase section */
+
+/* Begin PBXSourcesBuildPhase section */
+		D9D41AB11BD102F300CD8EBF /* Sources */ = {
+			isa = PBXSourcesBuildPhase;
+			buildActionMask = 2147483647;
+			files = (
+				D9D41AEE1BD1033700CD8EBF /* YYImage.m in Sources */,
+				D9D41AEA1BD1033700CD8EBF /* YYAnimatedImageView.m in Sources */,
+				D9D41AEC1BD1033700CD8EBF /* YYFrameImage.m in Sources */,
+				D9D41AF41BD1033700CD8EBF /* YYSpriteSheetImage.m in Sources */,
+				D9D41AF21BD1033700CD8EBF /* YYImageCoder.m in Sources */,
+			);
+			runOnlyForDeploymentPostprocessing = 0;
+		};
+/* End PBXSourcesBuildPhase section */
+
+/* Begin XCBuildConfiguration section */
+		D9D41ABC1BD102F300CD8EBF /* Debug */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				ALWAYS_SEARCH_USER_PATHS = NO;
+				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+				CLANG_CXX_LIBRARY = "libc++";
+				CLANG_ENABLE_MODULES = YES;
+				CLANG_ENABLE_OBJC_ARC = YES;
+				CLANG_WARN_BOOL_CONVERSION = YES;
+				CLANG_WARN_CONSTANT_CONVERSION = YES;
+				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+				CLANG_WARN_EMPTY_BODY = YES;
+				CLANG_WARN_ENUM_CONVERSION = YES;
+				CLANG_WARN_INT_CONVERSION = YES;
+				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+				CLANG_WARN_UNREACHABLE_CODE = YES;
+				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+				"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+				COPY_PHASE_STRIP = NO;
+				CURRENT_PROJECT_VERSION = 1;
+				DEBUG_INFORMATION_FORMAT = dwarf;
+				ENABLE_STRICT_OBJC_MSGSEND = YES;
+				ENABLE_TESTABILITY = YES;
+				GCC_C_LANGUAGE_STANDARD = gnu99;
+				GCC_DYNAMIC_NO_PIC = NO;
+				GCC_NO_COMMON_BLOCKS = YES;
+				GCC_OPTIMIZATION_LEVEL = 0;
+				GCC_PREPROCESSOR_DEFINITIONS = (
+					"DEBUG=1",
+					"$(inherited)",
+				);
+				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+				GCC_WARN_UNDECLARED_SELECTOR = YES;
+				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+				GCC_WARN_UNUSED_FUNCTION = YES;
+				GCC_WARN_UNUSED_VARIABLE = YES;
+				IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+				MTL_ENABLE_DEBUG_INFO = YES;
+				ONLY_ACTIVE_ARCH = YES;
+				SDKROOT = iphoneos;
+				TARGETED_DEVICE_FAMILY = "1,2";
+				VERSIONING_SYSTEM = "apple-generic";
+				VERSION_INFO_PREFIX = "";
+			};
+			name = Debug;
+		};
+		D9D41ABD1BD102F300CD8EBF /* Release */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				ALWAYS_SEARCH_USER_PATHS = NO;
+				CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+				CLANG_CXX_LIBRARY = "libc++";
+				CLANG_ENABLE_MODULES = YES;
+				CLANG_ENABLE_OBJC_ARC = YES;
+				CLANG_WARN_BOOL_CONVERSION = YES;
+				CLANG_WARN_CONSTANT_CONVERSION = YES;
+				CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
+				CLANG_WARN_EMPTY_BODY = YES;
+				CLANG_WARN_ENUM_CONVERSION = YES;
+				CLANG_WARN_INT_CONVERSION = YES;
+				CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
+				CLANG_WARN_UNREACHABLE_CODE = YES;
+				CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+				"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
+				COPY_PHASE_STRIP = NO;
+				CURRENT_PROJECT_VERSION = 1;
+				DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+				ENABLE_NS_ASSERTIONS = NO;
+				ENABLE_STRICT_OBJC_MSGSEND = YES;
+				GCC_C_LANGUAGE_STANDARD = gnu99;
+				GCC_NO_COMMON_BLOCKS = YES;
+				GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+				GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
+				GCC_WARN_UNDECLARED_SELECTOR = YES;
+				GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
+				GCC_WARN_UNUSED_FUNCTION = YES;
+				GCC_WARN_UNUSED_VARIABLE = YES;
+				IPHONEOS_DEPLOYMENT_TARGET = 9.0;
+				MTL_ENABLE_DEBUG_INFO = NO;
+				SDKROOT = iphoneos;
+				TARGETED_DEVICE_FAMILY = "1,2";
+				VALIDATE_PRODUCT = YES;
+				VERSIONING_SYSTEM = "apple-generic";
+				VERSION_INFO_PREFIX = "";
+			};
+			name = Release;
+		};
+		D9D41ABF1BD102F300CD8EBF /* Debug */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				DEFINES_MODULE = YES;
+				DYLIB_COMPATIBILITY_VERSION = 1;
+				DYLIB_CURRENT_VERSION = 1;
+				DYLIB_INSTALL_NAME_BASE = "@rpath";
+				INFOPLIST_FILE = Info.plist;
+				INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
+				IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+				PRODUCT_BUNDLE_IDENTIFIER = com.ibireme.YYImage;
+				PRODUCT_NAME = "$(TARGET_NAME)";
+				SKIP_INSTALL = YES;
+			};
+			name = Debug;
+		};
+		D9D41AC01BD102F300CD8EBF /* Release */ = {
+			isa = XCBuildConfiguration;
+			buildSettings = {
+				DEFINES_MODULE = YES;
+				DYLIB_COMPATIBILITY_VERSION = 1;
+				DYLIB_CURRENT_VERSION = 1;
+				DYLIB_INSTALL_NAME_BASE = "@rpath";
+				INFOPLIST_FILE = Info.plist;
+				INSTALL_PATH = "$(LOCAL_LIBRARY_DIR)/Frameworks";
+				IPHONEOS_DEPLOYMENT_TARGET = 8.0;
+				LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks";
+				PRODUCT_BUNDLE_IDENTIFIER = com.ibireme.YYImage;
+				PRODUCT_NAME = "$(TARGET_NAME)";
+				SKIP_INSTALL = YES;
+			};
+			name = Release;
+		};
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+		D9D41AB01BD102F300CD8EBF /* Build configuration list for PBXProject "YYImage" */ = {
+			isa = XCConfigurationList;
+			buildConfigurations = (
+				D9D41ABC1BD102F300CD8EBF /* Debug */,
+				D9D41ABD1BD102F300CD8EBF /* Release */,
+			);
+			defaultConfigurationIsVisible = 0;
+			defaultConfigurationName = Release;
+		};
+		D9D41ABE1BD102F300CD8EBF /* Build configuration list for PBXNativeTarget "YYImage" */ = {
+			isa = XCConfigurationList;
+			buildConfigurations = (
+				D9D41ABF1BD102F300CD8EBF /* Debug */,
+				D9D41AC01BD102F300CD8EBF /* Release */,
+			);
+			defaultConfigurationIsVisible = 0;
+			defaultConfigurationName = Release;
+		};
+/* End XCConfigurationList section */
+	};
+	rootObject = D9D41AAD1BD102F300CD8EBF /* Project object */;
+}

+ 7 - 0
Framework/YYImage.xcodeproj/project.xcworkspace/contents.xcworkspacedata

@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Workspace
+   version = "1.0">
+   <FileRef
+      location = "self:YYImage.xcodeproj">
+   </FileRef>
+</Workspace>

+ 80 - 0
Framework/YYImage.xcodeproj/xcshareddata/xcschemes/YYImage.xcscheme

@@ -0,0 +1,80 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Scheme
+   LastUpgradeVersion = "0700"
+   version = "1.3">
+   <BuildAction
+      parallelizeBuildables = "YES"
+      buildImplicitDependencies = "YES">
+      <BuildActionEntries>
+         <BuildActionEntry
+            buildForTesting = "YES"
+            buildForRunning = "YES"
+            buildForProfiling = "YES"
+            buildForArchiving = "YES"
+            buildForAnalyzing = "YES">
+            <BuildableReference
+               BuildableIdentifier = "primary"
+               BlueprintIdentifier = "D9D41AB51BD102F300CD8EBF"
+               BuildableName = "YYImage.framework"
+               BlueprintName = "YYImage"
+               ReferencedContainer = "container:YYImage.xcodeproj">
+            </BuildableReference>
+         </BuildActionEntry>
+      </BuildActionEntries>
+   </BuildAction>
+   <TestAction
+      buildConfiguration = "Debug"
+      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+      shouldUseLaunchSchemeArgsEnv = "YES">
+      <Testables>
+      </Testables>
+      <AdditionalOptions>
+      </AdditionalOptions>
+   </TestAction>
+   <LaunchAction
+      buildConfiguration = "Debug"
+      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+      launchStyle = "0"
+      useCustomWorkingDirectory = "NO"
+      ignoresPersistentStateOnLaunch = "NO"
+      debugDocumentVersioning = "YES"
+      debugServiceExtension = "internal"
+      allowLocationSimulation = "YES">
+      <MacroExpansion>
+         <BuildableReference
+            BuildableIdentifier = "primary"
+            BlueprintIdentifier = "D9D41AB51BD102F300CD8EBF"
+            BuildableName = "YYImage.framework"
+            BlueprintName = "YYImage"
+            ReferencedContainer = "container:YYImage.xcodeproj">
+         </BuildableReference>
+      </MacroExpansion>
+      <AdditionalOptions>
+      </AdditionalOptions>
+   </LaunchAction>
+   <ProfileAction
+      buildConfiguration = "Release"
+      shouldUseLaunchSchemeArgsEnv = "YES"
+      savedToolIdentifier = ""
+      useCustomWorkingDirectory = "NO"
+      debugDocumentVersioning = "YES">
+      <MacroExpansion>
+         <BuildableReference
+            BuildableIdentifier = "primary"
+            BlueprintIdentifier = "D9D41AB51BD102F300CD8EBF"
+            BuildableName = "YYImage.framework"
+            BlueprintName = "YYImage"
+            ReferencedContainer = "container:YYImage.xcodeproj">
+         </BuildableReference>
+      </MacroExpansion>
+   </ProfileAction>
+   <AnalyzeAction
+      buildConfiguration = "Debug">
+   </AnalyzeAction>
+   <ArchiveAction
+      buildConfiguration = "Release"
+      revealArchiveInOrganizer = "YES">
+   </ArchiveAction>
+</Scheme>

+ 1 - 1
LICENSE

@@ -1,6 +1,6 @@
 The MIT License (MIT)
 
-Copyright (c) 2015 ibireme
+Copyright (c) 2015 ibireme <ibireme@gmail.com>
 
 Permission is hereby granted, free of charge, to any person obtaining a copy
 of this software and associated documentation files (the "Software"), to deal

+ 154 - 1
README.md

@@ -1,2 +1,155 @@
-# YYImage
+YYImage
+==============
+[![License MIT](https://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/ibireme/YYImage/master/LICENSE)&nbsp;
+[![Carthage compatible](https://img.shields.io/badge/Carthage-compatible-4BC51D.svg?style=flat)](https://github.com/Carthage/Carthage)&nbsp;
+[![Cocoapods](http://img.shields.io/cocoapods/v/YYImage.svg?style=flat)](http://cocoapods.org/?q= YYImage)&nbsp;
+[![Cocoapods](http://img.shields.io/cocoapods/p/YYImage.svg?style=flat)](http://cocoapods.org/?q= YYImage)&nbsp;
+[![Support](https://img.shields.io/badge/support-iOS%206%2B%20-blue.svg?style=flat)](https://www.apple.com/nl/ios/)
+
 Image framework for iOS to display/encode/decode animated WebP, APNG, GIF, and more.
+
+Features
+==============
+- Display/encode/decode animated image with these types:<br/> WebP, APNG, GIF.
+- Display/encode decode still image with these types:<br/> WebP, PNG, GIF, JPEG, JP2, TIFF, BMP, ICO, ICNS.
+- Baseline/progressive/interlaced image decode with these types:<br/>PNG, GIF, JPEG, BMP.
+- Display frame based image animation and sprire sheet animation.
+- Extendable protocol for custom image animation.
+- Dynamic frame buffer for lower memory usage.
+
+Usage
+==============
+
+###Display animated image
+	
+	// File: ani@2x.webp
+	UIImage *image = [YYImage imageNamed:@"ani.webp"];
+	UIImageView *imageView = [[YYAnimatedImageView alloc] initWithImage:image];
+	[self.view addSubView:imageView];
+
+
+###Display frame animation
+	
+	// Files: frame1.png, frame2.png, frame3.png
+	NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"];
+	NSArray *times = @[@0.1, @0.2, @0.1];
+	UIImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES];
+	UIImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
+	[self.view addSubView:imageView];
+
+###Display sprite sheet animation
+
+	// 8 * 12 sprites in a single sheet image
+	UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"];
+	NSMutableArray *contentRects = [NSMutableArray new];
+	NSMutableArray *durations = [NSMutableArray new];
+	for (int j = 0; j < 12; j++) {
+	   for (int i = 0; i < 8; i++) {
+	       CGRect rect;
+	       rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
+	       rect.origin.x = img.size.width / 8 * i;
+	       rect.origin.y = img.size.height / 12 * j;
+	       [contentRects addObject:[NSValue valueWithCGRect:rect]];
+	       [durations addObject:@(1 / 60.0)];
+	   }
+	}
+	YYSpriteSheetImage *sprite;
+	sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img
+	                                                contentRects:contentRects
+	                                              frameDurations:durations
+	                                                   loopCount:0];
+	YYAnimatedImageView *imageView = [YYAnimatedImageView new];
+	imageView.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
+	imageView.image = sprite;
+	[self.view addSubView:imageView];
+
+###Animation control
+	
+	YYAnimatedImageView *imageView = ...;
+	// pause:
+	[imageView stopAnimating];
+	// play:
+	[imageView startAnimating];
+	// set frame index:
+	imageView.currentAnimatedImageIndex = 12;
+	
+###Image decoder
+		
+	// Decode single frame:
+	NSData *data = [NSData dataWithContentOfFile:@"/tmp/image.webp"];
+	YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0];
+	UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
+	
+	// Progressive:
+	NSMutableData *data = [NSMutableData new];
+	YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0];
+	while(newDataArrived) {
+	   [data appendData:newData];
+	   [decoder updateData:data final:NO];
+	   if (decoder.frameCount > 0) {
+	       UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
+	       // progressive display...
+	   }
+	}
+	[decoder updateData:data final:YES];
+	UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
+	// final display...
+
+###Image encoder
+	
+	// Encode still image:
+	YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG];
+	jpegEncoder.quality = 0.9;
+	[jpegEncoder addImage:image duration:0];
+	NSData jpegData = [jpegEncoder encode];
+	 
+	// Encode animated image:
+	YYImageEncoder *webpEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeWebP];
+	webpEncoder.loopCount = 5;
+	[webpEncoder addImage:image0 duration:0.1];
+	[webpEncoder addImage:image1 duration:0.15];
+	[webpEncoder addImage:image2 duration:0.2];
+	NSData webpData = [webpEncoder encode];
+
+Installation
+==============
+
+### Cocoapods
+
+1. Update cocoapods to the latest version.
+1. Add `pod "YYImage"` to your Podfile.
+2. Run `pod install` or `pod update`.
+3. Import \<YYImage/YYImage.h\>
+
+
+### Carthage
+
+1. Add `github "ibireme/YYImage"` to your Cartfile.
+2. Run `carthage update` and add the framework to your project.
+3. Import \<YYImage/YYImage.h\>
+4. Notice: carthage framework doesn't include webp component, if you want to support webp, use cocoapods or install manually.
+
+### Manually
+
+1. Download all the files in the YYImage subdirectory.
+2. Add the source files to your Xcode project.
+3. Link with required frameworks:
+	* UIKit.framework
+	* CoreFoundation.framework
+	* QuartzCore.framework
+	* AssetsLibrary.framework
+	* ImageIO.framework
+	* Accelerate.framework
+	* MobileCoreServices.framework
+4. Add `Vendor/WebP.framework`(static library) to your Xcode project if you want to support webp.
+5. Import `YYImage.h`.
+
+
+About
+==============
+This library supports iOS 6.0 and later.
+
+
+License
+==============
+YYImage is provided under the MIT license. See LICENSE file for details.

+ 142 - 0
Vendor/WebP.framework/Headers/config.h

@@ -0,0 +1,142 @@
+/* src/webp/config.h.  Generated from config.h.in by configure.  */
+/* src/webp/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Define if building universal (internal helper macro) */
+/* #undef AC_APPLE_UNIVERSAL_BUILD */
+
+/* Set to 1 if __builtin_bswap16 is available */
+#define HAVE_BUILTIN_BSWAP16 1
+
+/* Set to 1 if __builtin_bswap32 is available */
+#define HAVE_BUILTIN_BSWAP32 1
+
+/* Set to 1 if __builtin_bswap64 is available */
+#define HAVE_BUILTIN_BSWAP64 1
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <GLUT/glut.h> header file. */
+/* #undef HAVE_GLUT_GLUT_H */
+
+/* Define to 1 if you have the <GL/glut.h> header file. */
+/* #undef HAVE_GL_GLUT_H */
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* Define to 1 if you have the <OpenGL/glut.h> header file. */
+/* #undef HAVE_OPENGL_GLUT_H */
+
+/* Have PTHREAD_PRIO_INHERIT. */
+#define HAVE_PTHREAD_PRIO_INHERIT 1
+
+/* Define to 1 if you have the <shlwapi.h> header file. */
+/* #undef HAVE_SHLWAPI_H */
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* Define to 1 if you have the <wincodec.h> header file. */
+/* #undef HAVE_WINCODEC_H */
+
+/* Define to 1 if you have the <windows.h> header file. */
+/* #undef HAVE_WINDOWS_H */
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "libwebp"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "http://code.google.com/p/webp/issues"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "libwebp"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "libwebp 0.4.3"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "libwebp"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL "http://developers.google.com/speed/webp"
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "0.4.3"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* Version number of package */
+#define VERSION "0.4.3"
+
+/* Enable experimental code */
+/* #undef WEBP_EXPERIMENTAL_FEATURES */
+
+/* Define to 1 to force aligned memory operations */
+/* #undef WEBP_FORCE_ALIGNED */
+
+/* Set to 1 if AVX2 is supported */
+/* #undef WEBP_HAVE_AVX2 */
+
+/* Set to 1 if GIF library is installed */
+/* #undef WEBP_HAVE_GIF */
+
+/* Set to 1 if OpenGL is supported */
+/* #undef WEBP_HAVE_GL */
+
+/* Set to 1 if JPEG library is installed */
+/* #undef WEBP_HAVE_JPEG */
+
+/* Set to 1 if PNG library is installed */
+/* #undef WEBP_HAVE_PNG */
+
+/* Set to 1 if SSE2 is supported */
+/* #undef WEBP_HAVE_SSE2 */
+
+/* Set to 1 if TIFF library is installed */
+/* #undef WEBP_HAVE_TIFF */
+
+/* Undefine this to disable thread support. */
+#define WEBP_USE_THREAD 1
+
+/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
+   significant byte first (like Motorola and SPARC, unlike Intel). */
+#if defined AC_APPLE_UNIVERSAL_BUILD
+# if defined __BIG_ENDIAN__
+#  define WORDS_BIGENDIAN 1
+# endif
+#else
+# ifndef WORDS_BIGENDIAN
+/* #  undef WORDS_BIGENDIAN */
+# endif
+#endif

+ 503 - 0
Vendor/WebP.framework/Headers/decode.h

@@ -0,0 +1,503 @@
+// Copyright 2010 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+//  Main decoding functions for WebP images.
+//
+// Author: Skal (pascal.massimino@gmail.com)
+
+#ifndef WEBP_WEBP_DECODE_H_
+#define WEBP_WEBP_DECODE_H_
+
+#include "./types.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define WEBP_DECODER_ABI_VERSION 0x0203    // MAJOR(8b) + MINOR(8b)
+
+// Note: forward declaring enumerations is not allowed in (strict) C and C++,
+// the types are left here for reference.
+// typedef enum VP8StatusCode VP8StatusCode;
+// typedef enum WEBP_CSP_MODE WEBP_CSP_MODE;
+typedef struct WebPRGBABuffer WebPRGBABuffer;
+typedef struct WebPYUVABuffer WebPYUVABuffer;
+typedef struct WebPDecBuffer WebPDecBuffer;
+typedef struct WebPIDecoder WebPIDecoder;
+typedef struct WebPBitstreamFeatures WebPBitstreamFeatures;
+typedef struct WebPDecoderOptions WebPDecoderOptions;
+typedef struct WebPDecoderConfig WebPDecoderConfig;
+
+// Return the decoder's version number, packed in hexadecimal using 8bits for
+// each of major/minor/revision. E.g: v2.5.7 is 0x020507.
+WEBP_EXTERN(int) WebPGetDecoderVersion(void);
+
+// Retrieve basic header information: width, height.
+// This function will also validate the header and return 0 in
+// case of formatting error.
+// Pointers 'width' and 'height' can be passed NULL if deemed irrelevant.
+WEBP_EXTERN(int) WebPGetInfo(const uint8_t* data, size_t data_size,
+                             int* width, int* height);
+
+// Decodes WebP images pointed to by 'data' and returns RGBA samples, along
+// with the dimensions in *width and *height. The ordering of samples in
+// memory is R, G, B, A, R, G, B, A... in scan order (endian-independent).
+// The returned pointer should be deleted calling free().
+// Returns NULL in case of error.
+WEBP_EXTERN(uint8_t*) WebPDecodeRGBA(const uint8_t* data, size_t data_size,
+                                     int* width, int* height);
+
+// Same as WebPDecodeRGBA, but returning A, R, G, B, A, R, G, B... ordered data.
+WEBP_EXTERN(uint8_t*) WebPDecodeARGB(const uint8_t* data, size_t data_size,
+                                     int* width, int* height);
+
+// Same as WebPDecodeRGBA, but returning B, G, R, A, B, G, R, A... ordered data.
+WEBP_EXTERN(uint8_t*) WebPDecodeBGRA(const uint8_t* data, size_t data_size,
+                                     int* width, int* height);
+
+// Same as WebPDecodeRGBA, but returning R, G, B, R, G, B... ordered data.
+// If the bitstream contains transparency, it is ignored.
+WEBP_EXTERN(uint8_t*) WebPDecodeRGB(const uint8_t* data, size_t data_size,
+                                    int* width, int* height);
+
+// Same as WebPDecodeRGB, but returning B, G, R, B, G, R... ordered data.
+WEBP_EXTERN(uint8_t*) WebPDecodeBGR(const uint8_t* data, size_t data_size,
+                                    int* width, int* height);
+
+
+// Decode WebP images pointed to by 'data' to Y'UV format(*). The pointer
+// returned is the Y samples buffer. Upon return, *u and *v will point to
+// the U and V chroma data. These U and V buffers need NOT be free()'d,
+// unlike the returned Y luma one. The dimension of the U and V planes
+// are both (*width + 1) / 2 and (*height + 1)/ 2.
+// Upon return, the Y buffer has a stride returned as '*stride', while U and V
+// have a common stride returned as '*uv_stride'.
+// Return NULL in case of error.
+// (*) Also named Y'CbCr. See: http://en.wikipedia.org/wiki/YCbCr
+WEBP_EXTERN(uint8_t*) WebPDecodeYUV(const uint8_t* data, size_t data_size,
+                                    int* width, int* height,
+                                    uint8_t** u, uint8_t** v,
+                                    int* stride, int* uv_stride);
+
+// These five functions are variants of the above ones, that decode the image
+// directly into a pre-allocated buffer 'output_buffer'. The maximum storage
+// available in this buffer is indicated by 'output_buffer_size'. If this
+// storage is not sufficient (or an error occurred), NULL is returned.
+// Otherwise, output_buffer is returned, for convenience.
+// The parameter 'output_stride' specifies the distance (in bytes)
+// between scanlines. Hence, output_buffer_size is expected to be at least
+// output_stride x picture-height.
+WEBP_EXTERN(uint8_t*) WebPDecodeRGBAInto(
+    const uint8_t* data, size_t data_size,
+    uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
+WEBP_EXTERN(uint8_t*) WebPDecodeARGBInto(
+    const uint8_t* data, size_t data_size,
+    uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
+WEBP_EXTERN(uint8_t*) WebPDecodeBGRAInto(
+    const uint8_t* data, size_t data_size,
+    uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
+
+// RGB and BGR variants. Here too the transparency information, if present,
+// will be dropped and ignored.
+WEBP_EXTERN(uint8_t*) WebPDecodeRGBInto(
+    const uint8_t* data, size_t data_size,
+    uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
+WEBP_EXTERN(uint8_t*) WebPDecodeBGRInto(
+    const uint8_t* data, size_t data_size,
+    uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
+
+// WebPDecodeYUVInto() is a variant of WebPDecodeYUV() that operates directly
+// into pre-allocated luma/chroma plane buffers. This function requires the
+// strides to be passed: one for the luma plane and one for each of the
+// chroma ones. The size of each plane buffer is passed as 'luma_size',
+// 'u_size' and 'v_size' respectively.
+// Pointer to the luma plane ('*luma') is returned or NULL if an error occurred
+// during decoding (or because some buffers were found to be too small).
+WEBP_EXTERN(uint8_t*) WebPDecodeYUVInto(
+    const uint8_t* data, size_t data_size,
+    uint8_t* luma, size_t luma_size, int luma_stride,
+    uint8_t* u, size_t u_size, int u_stride,
+    uint8_t* v, size_t v_size, int v_stride);
+
+//------------------------------------------------------------------------------
+// Output colorspaces and buffer
+
+// Colorspaces
+// Note: the naming describes the byte-ordering of packed samples in memory.
+// For instance, MODE_BGRA relates to samples ordered as B,G,R,A,B,G,R,A,...
+// Non-capital names (e.g.:MODE_Argb) relates to pre-multiplied RGB channels.
+// RGBA-4444 and RGB-565 colorspaces are represented by following byte-order:
+// RGBA-4444: [r3 r2 r1 r0 g3 g2 g1 g0], [b3 b2 b1 b0 a3 a2 a1 a0], ...
+// RGB-565: [r4 r3 r2 r1 r0 g5 g4 g3], [g2 g1 g0 b4 b3 b2 b1 b0], ...
+// In the case WEBP_SWAP_16BITS_CSP is defined, the bytes are swapped for
+// these two modes:
+// RGBA-4444: [b3 b2 b1 b0 a3 a2 a1 a0], [r3 r2 r1 r0 g3 g2 g1 g0], ...
+// RGB-565: [g2 g1 g0 b4 b3 b2 b1 b0], [r4 r3 r2 r1 r0 g5 g4 g3], ...
+
+typedef enum WEBP_CSP_MODE {
+  MODE_RGB = 0, MODE_RGBA = 1,
+  MODE_BGR = 2, MODE_BGRA = 3,
+  MODE_ARGB = 4, MODE_RGBA_4444 = 5,
+  MODE_RGB_565 = 6,
+  // RGB-premultiplied transparent modes (alpha value is preserved)
+  MODE_rgbA = 7,
+  MODE_bgrA = 8,
+  MODE_Argb = 9,
+  MODE_rgbA_4444 = 10,
+  // YUV modes must come after RGB ones.
+  MODE_YUV = 11, MODE_YUVA = 12,  // yuv 4:2:0
+  MODE_LAST = 13
+} WEBP_CSP_MODE;
+
+// Some useful macros:
+static WEBP_INLINE int WebPIsPremultipliedMode(WEBP_CSP_MODE mode) {
+  return (mode == MODE_rgbA || mode == MODE_bgrA || mode == MODE_Argb ||
+          mode == MODE_rgbA_4444);
+}
+
+static WEBP_INLINE int WebPIsAlphaMode(WEBP_CSP_MODE mode) {
+  return (mode == MODE_RGBA || mode == MODE_BGRA || mode == MODE_ARGB ||
+          mode == MODE_RGBA_4444 || mode == MODE_YUVA ||
+          WebPIsPremultipliedMode(mode));
+}
+
+static WEBP_INLINE int WebPIsRGBMode(WEBP_CSP_MODE mode) {
+  return (mode < MODE_YUV);
+}
+
+//------------------------------------------------------------------------------
+// WebPDecBuffer: Generic structure for describing the output sample buffer.
+
+struct WebPRGBABuffer {    // view as RGBA
+  uint8_t* rgba;    // pointer to RGBA samples
+  int stride;       // stride in bytes from one scanline to the next.
+  size_t size;      // total size of the *rgba buffer.
+};
+
+struct WebPYUVABuffer {              // view as YUVA
+  uint8_t* y, *u, *v, *a;     // pointer to luma, chroma U/V, alpha samples
+  int y_stride;               // luma stride
+  int u_stride, v_stride;     // chroma strides
+  int a_stride;               // alpha stride
+  size_t y_size;              // luma plane size
+  size_t u_size, v_size;      // chroma planes size
+  size_t a_size;              // alpha-plane size
+};
+
+// Output buffer
+struct WebPDecBuffer {
+  WEBP_CSP_MODE colorspace;  // Colorspace.
+  int width, height;         // Dimensions.
+  int is_external_memory;    // If true, 'internal_memory' pointer is not used.
+  union {
+    WebPRGBABuffer RGBA;
+    WebPYUVABuffer YUVA;
+  } u;                       // Nameless union of buffer parameters.
+  uint32_t       pad[4];     // padding for later use
+
+  uint8_t* private_memory;   // Internally allocated memory (only when
+                             // is_external_memory is false). Should not be used
+                             // externally, but accessed via the buffer union.
+};
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(int) WebPInitDecBufferInternal(WebPDecBuffer*, int);
+
+// Initialize the structure as empty. Must be called before any other use.
+// Returns false in case of version mismatch
+static WEBP_INLINE int WebPInitDecBuffer(WebPDecBuffer* buffer) {
+  return WebPInitDecBufferInternal(buffer, WEBP_DECODER_ABI_VERSION);
+}
+
+// Free any memory associated with the buffer. Must always be called last.
+// Note: doesn't free the 'buffer' structure itself.
+WEBP_EXTERN(void) WebPFreeDecBuffer(WebPDecBuffer* buffer);
+
+//------------------------------------------------------------------------------
+// Enumeration of the status codes
+
+typedef enum VP8StatusCode {
+  VP8_STATUS_OK = 0,
+  VP8_STATUS_OUT_OF_MEMORY,
+  VP8_STATUS_INVALID_PARAM,
+  VP8_STATUS_BITSTREAM_ERROR,
+  VP8_STATUS_UNSUPPORTED_FEATURE,
+  VP8_STATUS_SUSPENDED,
+  VP8_STATUS_USER_ABORT,
+  VP8_STATUS_NOT_ENOUGH_DATA
+} VP8StatusCode;
+
+//------------------------------------------------------------------------------
+// Incremental decoding
+//
+// This API allows streamlined decoding of partial data.
+// Picture can be incrementally decoded as data become available thanks to the
+// WebPIDecoder object. This object can be left in a SUSPENDED state if the
+// picture is only partially decoded, pending additional input.
+// Code example:
+//
+//   WebPInitDecBuffer(&buffer);
+//   buffer.colorspace = mode;
+//   ...
+//   WebPIDecoder* idec = WebPINewDecoder(&buffer);
+//   while (has_more_data) {
+//     // ... (get additional data)
+//     status = WebPIAppend(idec, new_data, new_data_size);
+//     if (status != VP8_STATUS_SUSPENDED ||
+//       break;
+//     }
+//
+//     // The above call decodes the current available buffer.
+//     // Part of the image can now be refreshed by calling to
+//     // WebPIDecGetRGB()/WebPIDecGetYUVA() etc.
+//   }
+//   WebPIDelete(idec);
+
+// Creates a new incremental decoder with the supplied buffer parameter.
+// This output_buffer can be passed NULL, in which case a default output buffer
+// is used (with MODE_RGB). Otherwise, an internal reference to 'output_buffer'
+// is kept, which means that the lifespan of 'output_buffer' must be larger than
+// that of the returned WebPIDecoder object.
+// The supplied 'output_buffer' content MUST NOT be changed between calls to
+// WebPIAppend() or WebPIUpdate() unless 'output_buffer.is_external_memory' is
+// set to 1. In such a case, it is allowed to modify the pointers, size and
+// stride of output_buffer.u.RGBA or output_buffer.u.YUVA, provided they remain
+// within valid bounds.
+// All other fields of WebPDecBuffer MUST remain constant between calls.
+// Returns NULL if the allocation failed.
+WEBP_EXTERN(WebPIDecoder*) WebPINewDecoder(WebPDecBuffer* output_buffer);
+
+// This function allocates and initializes an incremental-decoder object, which
+// will output the RGB/A samples specified by 'csp' into a preallocated
+// buffer 'output_buffer'. The size of this buffer is at least
+// 'output_buffer_size' and the stride (distance in bytes between two scanlines)
+// is specified by 'output_stride'.
+// Additionally, output_buffer can be passed NULL in which case the output
+// buffer will be allocated automatically when the decoding starts. The
+// colorspace 'csp' is taken into account for allocating this buffer. All other
+// parameters are ignored.
+// Returns NULL if the allocation failed, or if some parameters are invalid.
+WEBP_EXTERN(WebPIDecoder*) WebPINewRGB(
+    WEBP_CSP_MODE csp,
+    uint8_t* output_buffer, size_t output_buffer_size, int output_stride);
+
+// This function allocates and initializes an incremental-decoder object, which
+// will output the raw luma/chroma samples into a preallocated planes if
+// supplied. The luma plane is specified by its pointer 'luma', its size
+// 'luma_size' and its stride 'luma_stride'. Similarly, the chroma-u plane
+// is specified by the 'u', 'u_size' and 'u_stride' parameters, and the chroma-v
+// plane by 'v' and 'v_size'. And same for the alpha-plane. The 'a' pointer
+// can be pass NULL in case one is not interested in the transparency plane.
+// Conversely, 'luma' can be passed NULL if no preallocated planes are supplied.
+// In this case, the output buffer will be automatically allocated (using
+// MODE_YUVA) when decoding starts. All parameters are then ignored.
+// Returns NULL if the allocation failed or if a parameter is invalid.
+WEBP_EXTERN(WebPIDecoder*) WebPINewYUVA(
+    uint8_t* luma, size_t luma_size, int luma_stride,
+    uint8_t* u, size_t u_size, int u_stride,
+    uint8_t* v, size_t v_size, int v_stride,
+    uint8_t* a, size_t a_size, int a_stride);
+
+// Deprecated version of the above, without the alpha plane.
+// Kept for backward compatibility.
+WEBP_EXTERN(WebPIDecoder*) WebPINewYUV(
+    uint8_t* luma, size_t luma_size, int luma_stride,
+    uint8_t* u, size_t u_size, int u_stride,
+    uint8_t* v, size_t v_size, int v_stride);
+
+// Deletes the WebPIDecoder object and associated memory. Must always be called
+// if WebPINewDecoder, WebPINewRGB or WebPINewYUV succeeded.
+WEBP_EXTERN(void) WebPIDelete(WebPIDecoder* idec);
+
+// Copies and decodes the next available data. Returns VP8_STATUS_OK when
+// the image is successfully decoded. Returns VP8_STATUS_SUSPENDED when more
+// data is expected. Returns error in other cases.
+WEBP_EXTERN(VP8StatusCode) WebPIAppend(
+    WebPIDecoder* idec, const uint8_t* data, size_t data_size);
+
+// A variant of the above function to be used when data buffer contains
+// partial data from the beginning. In this case data buffer is not copied
+// to the internal memory.
+// Note that the value of the 'data' pointer can change between calls to
+// WebPIUpdate, for instance when the data buffer is resized to fit larger data.
+WEBP_EXTERN(VP8StatusCode) WebPIUpdate(
+    WebPIDecoder* idec, const uint8_t* data, size_t data_size);
+
+// Returns the RGB/A image decoded so far. Returns NULL if output params
+// are not initialized yet. The RGB/A output type corresponds to the colorspace
+// specified during call to WebPINewDecoder() or WebPINewRGB().
+// *last_y is the index of last decoded row in raster scan order. Some pointers
+// (*last_y, *width etc.) can be NULL if corresponding information is not
+// needed.
+WEBP_EXTERN(uint8_t*) WebPIDecGetRGB(
+    const WebPIDecoder* idec, int* last_y,
+    int* width, int* height, int* stride);
+
+// Same as above function to get a YUVA image. Returns pointer to the luma
+// plane or NULL in case of error. If there is no alpha information
+// the alpha pointer '*a' will be returned NULL.
+WEBP_EXTERN(uint8_t*) WebPIDecGetYUVA(
+    const WebPIDecoder* idec, int* last_y,
+    uint8_t** u, uint8_t** v, uint8_t** a,
+    int* width, int* height, int* stride, int* uv_stride, int* a_stride);
+
+// Deprecated alpha-less version of WebPIDecGetYUVA(): it will ignore the
+// alpha information (if present). Kept for backward compatibility.
+static WEBP_INLINE uint8_t* WebPIDecGetYUV(
+    const WebPIDecoder* idec, int* last_y, uint8_t** u, uint8_t** v,
+    int* width, int* height, int* stride, int* uv_stride) {
+  return WebPIDecGetYUVA(idec, last_y, u, v, NULL, width, height,
+                         stride, uv_stride, NULL);
+}
+
+// Generic call to retrieve information about the displayable area.
+// If non NULL, the left/right/width/height pointers are filled with the visible
+// rectangular area so far.
+// Returns NULL in case the incremental decoder object is in an invalid state.
+// Otherwise returns the pointer to the internal representation. This structure
+// is read-only, tied to WebPIDecoder's lifespan and should not be modified.
+WEBP_EXTERN(const WebPDecBuffer*) WebPIDecodedArea(
+    const WebPIDecoder* idec, int* left, int* top, int* width, int* height);
+
+//------------------------------------------------------------------------------
+// Advanced decoding parametrization
+//
+//  Code sample for using the advanced decoding API
+/*
+     // A) Init a configuration object
+     WebPDecoderConfig config;
+     CHECK(WebPInitDecoderConfig(&config));
+
+     // B) optional: retrieve the bitstream's features.
+     CHECK(WebPGetFeatures(data, data_size, &config.input) == VP8_STATUS_OK);
+
+     // C) Adjust 'config', if needed
+     config.no_fancy_upsampling = 1;
+     config.output.colorspace = MODE_BGRA;
+     // etc.
+
+     // Note that you can also make config.output point to an externally
+     // supplied memory buffer, provided it's big enough to store the decoded
+     // picture. Otherwise, config.output will just be used to allocate memory
+     // and store the decoded picture.
+
+     // D) Decode!
+     CHECK(WebPDecode(data, data_size, &config) == VP8_STATUS_OK);
+
+     // E) Decoded image is now in config.output (and config.output.u.RGBA)
+
+     // F) Reclaim memory allocated in config's object. It's safe to call
+     // this function even if the memory is external and wasn't allocated
+     // by WebPDecode().
+     WebPFreeDecBuffer(&config.output);
+*/
+
+// Features gathered from the bitstream
+struct WebPBitstreamFeatures {
+  int width;          // Width in pixels, as read from the bitstream.
+  int height;         // Height in pixels, as read from the bitstream.
+  int has_alpha;      // True if the bitstream contains an alpha channel.
+  int has_animation;  // True if the bitstream is an animation.
+  int format;         // 0 = undefined (/mixed), 1 = lossy, 2 = lossless
+
+  // Unused for now:
+  int no_incremental_decoding;  // if true, using incremental decoding is not
+                                // recommended.
+  int rotate;                   // TODO(later)
+  int uv_sampling;              // should be 0 for now. TODO(later)
+  uint32_t pad[2];              // padding for later use
+};
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(VP8StatusCode) WebPGetFeaturesInternal(
+    const uint8_t*, size_t, WebPBitstreamFeatures*, int);
+
+// Retrieve features from the bitstream. The *features structure is filled
+// with information gathered from the bitstream.
+// Returns VP8_STATUS_OK when the features are successfully retrieved. Returns
+// VP8_STATUS_NOT_ENOUGH_DATA when more data is needed to retrieve the
+// features from headers. Returns error in other cases.
+static WEBP_INLINE VP8StatusCode WebPGetFeatures(
+    const uint8_t* data, size_t data_size,
+    WebPBitstreamFeatures* features) {
+  return WebPGetFeaturesInternal(data, data_size, features,
+                                 WEBP_DECODER_ABI_VERSION);
+}
+
+// Decoding options
+struct WebPDecoderOptions {
+  int bypass_filtering;               // if true, skip the in-loop filtering
+  int no_fancy_upsampling;            // if true, use faster pointwise upsampler
+  int use_cropping;                   // if true, cropping is applied _first_
+  int crop_left, crop_top;            // top-left position for cropping.
+                                      // Will be snapped to even values.
+  int crop_width, crop_height;        // dimension of the cropping area
+  int use_scaling;                    // if true, scaling is applied _afterward_
+  int scaled_width, scaled_height;    // final resolution
+  int use_threads;                    // if true, use multi-threaded decoding
+  int dithering_strength;             // dithering strength (0=Off, 100=full)
+#if WEBP_DECODER_ABI_VERSION > 0x0203
+  int flip;                           // flip output vertically
+#endif
+#if WEBP_DECODER_ABI_VERSION > 0x0204
+  int alpha_dithering_strength;       // alpha dithering strength in [0..100]
+#endif
+
+  // Unused for now:
+  int force_rotation;                 // forced rotation (to be applied _last_)
+  int no_enhancement;                 // if true, discard enhancement layer
+#if WEBP_DECODER_ABI_VERSION < 0x0203
+  uint32_t pad[5];                    // padding for later use
+#elif WEBP_DECODER_ABI_VERSION < 0x0204
+  uint32_t pad[4];                    // padding for later use
+#else
+  uint32_t pad[3];                    // padding for later use
+#endif
+};
+
+// Main object storing the configuration for advanced decoding.
+struct WebPDecoderConfig {
+  WebPBitstreamFeatures input;  // Immutable bitstream features (optional)
+  WebPDecBuffer output;         // Output buffer (can point to external mem)
+  WebPDecoderOptions options;   // Decoding options
+};
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(int) WebPInitDecoderConfigInternal(WebPDecoderConfig*, int);
+
+// Initialize the configuration as empty. This function must always be
+// called first, unless WebPGetFeatures() is to be called.
+// Returns false in case of mismatched version.
+static WEBP_INLINE int WebPInitDecoderConfig(WebPDecoderConfig* config) {
+  return WebPInitDecoderConfigInternal(config, WEBP_DECODER_ABI_VERSION);
+}
+
+// Instantiate a new incremental decoder object with the requested
+// configuration. The bitstream can be passed using 'data' and 'data_size'
+// parameter, in which case the features will be parsed and stored into
+// config->input. Otherwise, 'data' can be NULL and no parsing will occur.
+// Note that 'config' can be NULL too, in which case a default configuration
+// is used.
+// The return WebPIDecoder object must always be deleted calling WebPIDelete().
+// Returns NULL in case of error (and config->status will then reflect
+// the error condition).
+WEBP_EXTERN(WebPIDecoder*) WebPIDecode(const uint8_t* data, size_t data_size,
+                                       WebPDecoderConfig* config);
+
+// Non-incremental version. This version decodes the full data at once, taking
+// 'config' into account. Returns decoding status (which should be VP8_STATUS_OK
+// if the decoding was successful).
+WEBP_EXTERN(VP8StatusCode) WebPDecode(const uint8_t* data, size_t data_size,
+                                      WebPDecoderConfig* config);
+
+#ifdef __cplusplus
+}    // extern "C"
+#endif
+
+#endif  /* WEBP_WEBP_DECODE_H_ */

+ 224 - 0
Vendor/WebP.framework/Headers/demux.h

@@ -0,0 +1,224 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+// Demux API.
+// Enables extraction of image and extended format data from WebP files.
+
+// Code Example: Demuxing WebP data to extract all the frames, ICC profile
+// and EXIF/XMP metadata.
+/*
+  WebPDemuxer* demux = WebPDemux(&webp_data);
+
+  uint32_t width = WebPDemuxGetI(demux, WEBP_FF_CANVAS_WIDTH);
+  uint32_t height = WebPDemuxGetI(demux, WEBP_FF_CANVAS_HEIGHT);
+  // ... (Get information about the features present in the WebP file).
+  uint32_t flags = WebPDemuxGetI(demux, WEBP_FF_FORMAT_FLAGS);
+
+  // ... (Iterate over all frames).
+  WebPIterator iter;
+  if (WebPDemuxGetFrame(demux, 1, &iter)) {
+    do {
+      // ... (Consume 'iter'; e.g. Decode 'iter.fragment' with WebPDecode(),
+      // ... and get other frame properties like width, height, offsets etc.
+      // ... see 'struct WebPIterator' below for more info).
+    } while (WebPDemuxNextFrame(&iter));
+    WebPDemuxReleaseIterator(&iter);
+  }
+
+  // ... (Extract metadata).
+  WebPChunkIterator chunk_iter;
+  if (flags & ICCP_FLAG) WebPDemuxGetChunk(demux, "ICCP", 1, &chunk_iter);
+  // ... (Consume the ICC profile in 'chunk_iter.chunk').
+  WebPDemuxReleaseChunkIterator(&chunk_iter);
+  if (flags & EXIF_FLAG) WebPDemuxGetChunk(demux, "EXIF", 1, &chunk_iter);
+  // ... (Consume the EXIF metadata in 'chunk_iter.chunk').
+  WebPDemuxReleaseChunkIterator(&chunk_iter);
+  if (flags & XMP_FLAG) WebPDemuxGetChunk(demux, "XMP ", 1, &chunk_iter);
+  // ... (Consume the XMP metadata in 'chunk_iter.chunk').
+  WebPDemuxReleaseChunkIterator(&chunk_iter);
+  WebPDemuxDelete(demux);
+*/
+
+#ifndef WEBP_WEBP_DEMUX_H_
+#define WEBP_WEBP_DEMUX_H_
+
+#include "./mux_types.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define WEBP_DEMUX_ABI_VERSION 0x0101    // MAJOR(8b) + MINOR(8b)
+
+// Note: forward declaring enumerations is not allowed in (strict) C and C++,
+// the types are left here for reference.
+// typedef enum WebPDemuxState WebPDemuxState;
+// typedef enum WebPFormatFeature WebPFormatFeature;
+typedef struct WebPDemuxer WebPDemuxer;
+typedef struct WebPIterator WebPIterator;
+typedef struct WebPChunkIterator WebPChunkIterator;
+
+//------------------------------------------------------------------------------
+
+// Returns the version number of the demux library, packed in hexadecimal using
+// 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507.
+WEBP_EXTERN(int) WebPGetDemuxVersion(void);
+
+//------------------------------------------------------------------------------
+// Life of a Demux object
+
+typedef enum WebPDemuxState {
+  WEBP_DEMUX_PARSE_ERROR    = -1,  // An error occurred while parsing.
+  WEBP_DEMUX_PARSING_HEADER =  0,  // Not enough data to parse full header.
+  WEBP_DEMUX_PARSED_HEADER  =  1,  // Header parsing complete,
+                                   // data may be available.
+  WEBP_DEMUX_DONE           =  2   // Entire file has been parsed.
+} WebPDemuxState;
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(WebPDemuxer*) WebPDemuxInternal(
+    const WebPData*, int, WebPDemuxState*, int);
+
+// Parses the full WebP file given by 'data'.
+// Returns a WebPDemuxer object on successful parse, NULL otherwise.
+static WEBP_INLINE WebPDemuxer* WebPDemux(const WebPData* data) {
+  return WebPDemuxInternal(data, 0, NULL, WEBP_DEMUX_ABI_VERSION);
+}
+
+// Parses the possibly incomplete WebP file given by 'data'.
+// If 'state' is non-NULL it will be set to indicate the status of the demuxer.
+// Returns NULL in case of error or if there isn't enough data to start parsing;
+// and a WebPDemuxer object on successful parse.
+// Note that WebPDemuxer keeps internal pointers to 'data' memory segment.
+// If this data is volatile, the demuxer object should be deleted (by calling
+// WebPDemuxDelete()) and WebPDemuxPartial() called again on the new data.
+// This is usually an inexpensive operation.
+static WEBP_INLINE WebPDemuxer* WebPDemuxPartial(
+    const WebPData* data, WebPDemuxState* state) {
+  return WebPDemuxInternal(data, 1, state, WEBP_DEMUX_ABI_VERSION);
+}
+
+// Frees memory associated with 'dmux'.
+WEBP_EXTERN(void) WebPDemuxDelete(WebPDemuxer* dmux);
+
+//------------------------------------------------------------------------------
+// Data/information extraction.
+
+typedef enum WebPFormatFeature {
+  WEBP_FF_FORMAT_FLAGS,  // Extended format flags present in the 'VP8X' chunk.
+  WEBP_FF_CANVAS_WIDTH,
+  WEBP_FF_CANVAS_HEIGHT,
+  WEBP_FF_LOOP_COUNT,
+  WEBP_FF_BACKGROUND_COLOR,
+  WEBP_FF_FRAME_COUNT    // Number of frames present in the demux object.
+                         // In case of a partial demux, this is the number of
+                         // frames seen so far, with the last frame possibly
+                         // being partial.
+} WebPFormatFeature;
+
+// Get the 'feature' value from the 'dmux'.
+// NOTE: values are only valid if WebPDemux() was used or WebPDemuxPartial()
+// returned a state > WEBP_DEMUX_PARSING_HEADER.
+WEBP_EXTERN(uint32_t) WebPDemuxGetI(
+    const WebPDemuxer* dmux, WebPFormatFeature feature);
+
+//------------------------------------------------------------------------------
+// Frame iteration.
+
+struct WebPIterator {
+  int frame_num;
+  int num_frames;          // equivalent to WEBP_FF_FRAME_COUNT.
+  int fragment_num;
+  int num_fragments;
+  int x_offset, y_offset;  // offset relative to the canvas.
+  int width, height;       // dimensions of this frame or fragment.
+  int duration;            // display duration in milliseconds.
+  WebPMuxAnimDispose dispose_method;  // dispose method for the frame.
+  int complete;   // true if 'fragment' contains a full frame. partial images
+                  // may still be decoded with the WebP incremental decoder.
+  WebPData fragment;  // The frame or fragment given by 'frame_num' and
+                      // 'fragment_num'.
+  int has_alpha;      // True if the frame or fragment contains transparency.
+  WebPMuxAnimBlend blend_method;  // Blend operation for the frame.
+
+  uint32_t pad[2];         // padding for later use.
+  void* private_;          // for internal use only.
+};
+
+// Retrieves frame 'frame_number' from 'dmux'.
+// 'iter->fragment' points to the first fragment on return from this function.
+// Individual fragments may be extracted using WebPDemuxSelectFragment().
+// Setting 'frame_number' equal to 0 will return the last frame of the image.
+// Returns false if 'dmux' is NULL or frame 'frame_number' is not present.
+// Call WebPDemuxReleaseIterator() when use of the iterator is complete.
+// NOTE: 'dmux' must persist for the lifetime of 'iter'.
+WEBP_EXTERN(int) WebPDemuxGetFrame(
+    const WebPDemuxer* dmux, int frame_number, WebPIterator* iter);
+
+// Sets 'iter->fragment' to point to the next ('iter->frame_num' + 1) or
+// previous ('iter->frame_num' - 1) frame. These functions do not loop.
+// Returns true on success, false otherwise.
+WEBP_EXTERN(int) WebPDemuxNextFrame(WebPIterator* iter);
+WEBP_EXTERN(int) WebPDemuxPrevFrame(WebPIterator* iter);
+
+// Sets 'iter->fragment' to reflect fragment number 'fragment_num'.
+// Returns true if fragment 'fragment_num' is present, false otherwise.
+WEBP_EXTERN(int) WebPDemuxSelectFragment(WebPIterator* iter, int fragment_num);
+
+// Releases any memory associated with 'iter'.
+// Must be called before any subsequent calls to WebPDemuxGetChunk() on the same
+// iter. Also, must be called before destroying the associated WebPDemuxer with
+// WebPDemuxDelete().
+WEBP_EXTERN(void) WebPDemuxReleaseIterator(WebPIterator* iter);
+
+//------------------------------------------------------------------------------
+// Chunk iteration.
+
+struct WebPChunkIterator {
+  // The current and total number of chunks with the fourcc given to
+  // WebPDemuxGetChunk().
+  int chunk_num;
+  int num_chunks;
+  WebPData chunk;    // The payload of the chunk.
+
+  uint32_t pad[6];   // padding for later use
+  void* private_;
+};
+
+// Retrieves the 'chunk_number' instance of the chunk with id 'fourcc' from
+// 'dmux'.
+// 'fourcc' is a character array containing the fourcc of the chunk to return,
+// e.g., "ICCP", "XMP ", "EXIF", etc.
+// Setting 'chunk_number' equal to 0 will return the last chunk in a set.
+// Returns true if the chunk is found, false otherwise. Image related chunk
+// payloads are accessed through WebPDemuxGetFrame() and related functions.
+// Call WebPDemuxReleaseChunkIterator() when use of the iterator is complete.
+// NOTE: 'dmux' must persist for the lifetime of the iterator.
+WEBP_EXTERN(int) WebPDemuxGetChunk(const WebPDemuxer* dmux,
+                                   const char fourcc[4], int chunk_number,
+                                   WebPChunkIterator* iter);
+
+// Sets 'iter->chunk' to point to the next ('iter->chunk_num' + 1) or previous
+// ('iter->chunk_num' - 1) chunk. These functions do not loop.
+// Returns true on success, false otherwise.
+WEBP_EXTERN(int) WebPDemuxNextChunk(WebPChunkIterator* iter);
+WEBP_EXTERN(int) WebPDemuxPrevChunk(WebPChunkIterator* iter);
+
+// Releases any memory associated with 'iter'.
+// Must be called before destroying the associated WebPDemuxer with
+// WebPDemuxDelete().
+WEBP_EXTERN(void) WebPDemuxReleaseChunkIterator(WebPChunkIterator* iter);
+
+//------------------------------------------------------------------------------
+
+#ifdef __cplusplus
+}    // extern "C"
+#endif
+
+#endif  /* WEBP_WEBP_DEMUX_H_ */

+ 520 - 0
Vendor/WebP.framework/Headers/encode.h

@@ -0,0 +1,520 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+//   WebP encoder: main interface
+//
+// Author: Skal (pascal.massimino@gmail.com)
+
+#ifndef WEBP_WEBP_ENCODE_H_
+#define WEBP_WEBP_ENCODE_H_
+
+#include "./types.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define WEBP_ENCODER_ABI_VERSION 0x0202    // MAJOR(8b) + MINOR(8b)
+
+// Note: forward declaring enumerations is not allowed in (strict) C and C++,
+// the types are left here for reference.
+// typedef enum WebPImageHint WebPImageHint;
+// typedef enum WebPEncCSP WebPEncCSP;
+// typedef enum WebPPreset WebPPreset;
+// typedef enum WebPEncodingError WebPEncodingError;
+typedef struct WebPConfig WebPConfig;
+typedef struct WebPPicture WebPPicture;   // main structure for I/O
+typedef struct WebPAuxStats WebPAuxStats;
+typedef struct WebPMemoryWriter WebPMemoryWriter;
+
+// Return the encoder's version number, packed in hexadecimal using 8bits for
+// each of major/minor/revision. E.g: v2.5.7 is 0x020507.
+WEBP_EXTERN(int) WebPGetEncoderVersion(void);
+
+//------------------------------------------------------------------------------
+// One-stop-shop call! No questions asked:
+
+// Returns the size of the compressed data (pointed to by *output), or 0 if
+// an error occurred. The compressed data must be released by the caller
+// using the call 'free(*output)'.
+// These functions compress using the lossy format, and the quality_factor
+// can go from 0 (smaller output, lower quality) to 100 (best quality,
+// larger output).
+WEBP_EXTERN(size_t) WebPEncodeRGB(const uint8_t* rgb,
+                                  int width, int height, int stride,
+                                  float quality_factor, uint8_t** output);
+WEBP_EXTERN(size_t) WebPEncodeBGR(const uint8_t* bgr,
+                                  int width, int height, int stride,
+                                  float quality_factor, uint8_t** output);
+WEBP_EXTERN(size_t) WebPEncodeRGBA(const uint8_t* rgba,
+                                   int width, int height, int stride,
+                                   float quality_factor, uint8_t** output);
+WEBP_EXTERN(size_t) WebPEncodeBGRA(const uint8_t* bgra,
+                                   int width, int height, int stride,
+                                   float quality_factor, uint8_t** output);
+
+// These functions are the equivalent of the above, but compressing in a
+// lossless manner. Files are usually larger than lossy format, but will
+// not suffer any compression loss.
+WEBP_EXTERN(size_t) WebPEncodeLosslessRGB(const uint8_t* rgb,
+                                          int width, int height, int stride,
+                                          uint8_t** output);
+WEBP_EXTERN(size_t) WebPEncodeLosslessBGR(const uint8_t* bgr,
+                                          int width, int height, int stride,
+                                          uint8_t** output);
+WEBP_EXTERN(size_t) WebPEncodeLosslessRGBA(const uint8_t* rgba,
+                                           int width, int height, int stride,
+                                           uint8_t** output);
+WEBP_EXTERN(size_t) WebPEncodeLosslessBGRA(const uint8_t* bgra,
+                                           int width, int height, int stride,
+                                           uint8_t** output);
+
+//------------------------------------------------------------------------------
+// Coding parameters
+
+// Image characteristics hint for the underlying encoder.
+typedef enum WebPImageHint {
+  WEBP_HINT_DEFAULT = 0,  // default preset.
+  WEBP_HINT_PICTURE,      // digital picture, like portrait, inner shot
+  WEBP_HINT_PHOTO,        // outdoor photograph, with natural lighting
+  WEBP_HINT_GRAPH,        // Discrete tone image (graph, map-tile etc).
+  WEBP_HINT_LAST
+} WebPImageHint;
+
+// Compression parameters.
+struct WebPConfig {
+  int lossless;           // Lossless encoding (0=lossy(default), 1=lossless).
+  float quality;          // between 0 (smallest file) and 100 (biggest)
+  int method;             // quality/speed trade-off (0=fast, 6=slower-better)
+
+  WebPImageHint image_hint;  // Hint for image type (lossless only for now).
+
+  // Parameters related to lossy compression only:
+  int target_size;        // if non-zero, set the desired target size in bytes.
+                          // Takes precedence over the 'compression' parameter.
+  float target_PSNR;      // if non-zero, specifies the minimal distortion to
+                          // try to achieve. Takes precedence over target_size.
+  int segments;           // maximum number of segments to use, in [1..4]
+  int sns_strength;       // Spatial Noise Shaping. 0=off, 100=maximum.
+  int filter_strength;    // range: [0 = off .. 100 = strongest]
+  int filter_sharpness;   // range: [0 = off .. 7 = least sharp]
+  int filter_type;        // filtering type: 0 = simple, 1 = strong (only used
+                          // if filter_strength > 0 or autofilter > 0)
+  int autofilter;         // Auto adjust filter's strength [0 = off, 1 = on]
+  int alpha_compression;  // Algorithm for encoding the alpha plane (0 = none,
+                          // 1 = compressed with WebP lossless). Default is 1.
+  int alpha_filtering;    // Predictive filtering method for alpha plane.
+                          //  0: none, 1: fast, 2: best. Default if 1.
+  int alpha_quality;      // Between 0 (smallest size) and 100 (lossless).
+                          // Default is 100.
+  int pass;               // number of entropy-analysis passes (in [1..10]).
+
+  int show_compressed;    // if true, export the compressed picture back.
+                          // In-loop filtering is not applied.
+  int preprocessing;      // preprocessing filter:
+                          // 0=none, 1=segment-smooth, 2=pseudo-random dithering
+  int partitions;         // log2(number of token partitions) in [0..3]. Default
+                          // is set to 0 for easier progressive decoding.
+  int partition_limit;    // quality degradation allowed to fit the 512k limit
+                          // on prediction modes coding (0: no degradation,
+                          // 100: maximum possible degradation).
+  int emulate_jpeg_size;  // If true, compression parameters will be remapped
+                          // to better match the expected output size from
+                          // JPEG compression. Generally, the output size will
+                          // be similar but the degradation will be lower.
+  int thread_level;       // If non-zero, try and use multi-threaded encoding.
+  int low_memory;         // If set, reduce memory usage (but increase CPU use).
+
+  uint32_t pad[5];        // padding for later use
+};
+
+// Enumerate some predefined settings for WebPConfig, depending on the type
+// of source picture. These presets are used when calling WebPConfigPreset().
+typedef enum WebPPreset {
+  WEBP_PRESET_DEFAULT = 0,  // default preset.
+  WEBP_PRESET_PICTURE,      // digital picture, like portrait, inner shot
+  WEBP_PRESET_PHOTO,        // outdoor photograph, with natural lighting
+  WEBP_PRESET_DRAWING,      // hand or line drawing, with high-contrast details
+  WEBP_PRESET_ICON,         // small-sized colorful images
+  WEBP_PRESET_TEXT          // text-like
+} WebPPreset;
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(int) WebPConfigInitInternal(WebPConfig*, WebPPreset, float, int);
+
+// Should always be called, to initialize a fresh WebPConfig structure before
+// modification. Returns false in case of version mismatch. WebPConfigInit()
+// must have succeeded before using the 'config' object.
+// Note that the default values are lossless=0 and quality=75.
+static WEBP_INLINE int WebPConfigInit(WebPConfig* config) {
+  return WebPConfigInitInternal(config, WEBP_PRESET_DEFAULT, 75.f,
+                                WEBP_ENCODER_ABI_VERSION);
+}
+
+// This function will initialize the configuration according to a predefined
+// set of parameters (referred to by 'preset') and a given quality factor.
+// This function can be called as a replacement to WebPConfigInit(). Will
+// return false in case of error.
+static WEBP_INLINE int WebPConfigPreset(WebPConfig* config,
+                                        WebPPreset preset, float quality) {
+  return WebPConfigInitInternal(config, preset, quality,
+                                WEBP_ENCODER_ABI_VERSION);
+}
+
+#if WEBP_ENCODER_ABI_VERSION > 0x0202
+// Activate the lossless compression mode with the desired efficiency level
+// between 0 (fastest, lowest compression) and 9 (slower, best compression).
+// A good default level is '6', providing a fair tradeoff between compression
+// speed and final compressed size.
+// This function will overwrite several fields from config: 'method', 'quality'
+// and 'lossless'. Returns false in case of parameter error.
+WEBP_EXTERN(int) WebPConfigLosslessPreset(WebPConfig* config, int level);
+#endif
+
+// Returns true if 'config' is non-NULL and all configuration parameters are
+// within their valid ranges.
+WEBP_EXTERN(int) WebPValidateConfig(const WebPConfig* config);
+
+//------------------------------------------------------------------------------
+// Input / Output
+// Structure for storing auxiliary statistics (mostly for lossy encoding).
+
+struct WebPAuxStats {
+  int coded_size;         // final size
+
+  float PSNR[5];          // peak-signal-to-noise ratio for Y/U/V/All/Alpha
+  int block_count[3];     // number of intra4/intra16/skipped macroblocks
+  int header_bytes[2];    // approximate number of bytes spent for header
+                          // and mode-partition #0
+  int residual_bytes[3][4];  // approximate number of bytes spent for
+                             // DC/AC/uv coefficients for each (0..3) segments.
+  int segment_size[4];    // number of macroblocks in each segments
+  int segment_quant[4];   // quantizer values for each segments
+  int segment_level[4];   // filtering strength for each segments [0..63]
+
+  int alpha_data_size;    // size of the transparency data
+  int layer_data_size;    // size of the enhancement layer data
+
+  // lossless encoder statistics
+  uint32_t lossless_features;  // bit0:predictor bit1:cross-color transform
+                               // bit2:subtract-green bit3:color indexing
+  int histogram_bits;          // number of precision bits of histogram
+  int transform_bits;          // precision bits for transform
+  int cache_bits;              // number of bits for color cache lookup
+  int palette_size;            // number of color in palette, if used
+  int lossless_size;           // final lossless size
+
+  uint32_t pad[4];        // padding for later use
+};
+
+// Signature for output function. Should return true if writing was successful.
+// data/data_size is the segment of data to write, and 'picture' is for
+// reference (and so one can make use of picture->custom_ptr).
+typedef int (*WebPWriterFunction)(const uint8_t* data, size_t data_size,
+                                  const WebPPicture* picture);
+
+// WebPMemoryWrite: a special WebPWriterFunction that writes to memory using
+// the following WebPMemoryWriter object (to be set as a custom_ptr).
+struct WebPMemoryWriter {
+  uint8_t* mem;       // final buffer (of size 'max_size', larger than 'size').
+  size_t   size;      // final size
+  size_t   max_size;  // total capacity
+  uint32_t pad[1];    // padding for later use
+};
+
+// The following must be called first before any use.
+WEBP_EXTERN(void) WebPMemoryWriterInit(WebPMemoryWriter* writer);
+
+#if WEBP_ENCODER_ABI_VERSION > 0x0203
+// The following must be called to deallocate writer->mem memory. The 'writer'
+// object itself is not deallocated.
+WEBP_EXTERN(void) WebPMemoryWriterClear(WebPMemoryWriter* writer);
+#endif
+// The custom writer to be used with WebPMemoryWriter as custom_ptr. Upon
+// completion, writer.mem and writer.size will hold the coded data.
+#if WEBP_ENCODER_ABI_VERSION > 0x0203
+// writer.mem must be freed by calling WebPMemoryWriterClear.
+#else
+// writer.mem must be freed by calling 'free(writer.mem)'.
+#endif
+WEBP_EXTERN(int) WebPMemoryWrite(const uint8_t* data, size_t data_size,
+                                 const WebPPicture* picture);
+
+// Progress hook, called from time to time to report progress. It can return
+// false to request an abort of the encoding process, or true otherwise if
+// everything is OK.
+typedef int (*WebPProgressHook)(int percent, const WebPPicture* picture);
+
+// Color spaces.
+typedef enum WebPEncCSP {
+  // chroma sampling
+  WEBP_YUV420  = 0,        // 4:2:0
+  WEBP_YUV420A = 4,        // alpha channel variant
+  WEBP_CSP_UV_MASK = 3,    // bit-mask to get the UV sampling factors
+  WEBP_CSP_ALPHA_BIT = 4   // bit that is set if alpha is present
+} WebPEncCSP;
+
+// Encoding error conditions.
+typedef enum WebPEncodingError {
+  VP8_ENC_OK = 0,
+  VP8_ENC_ERROR_OUT_OF_MEMORY,            // memory error allocating objects
+  VP8_ENC_ERROR_BITSTREAM_OUT_OF_MEMORY,  // memory error while flushing bits
+  VP8_ENC_ERROR_NULL_PARAMETER,           // a pointer parameter is NULL
+  VP8_ENC_ERROR_INVALID_CONFIGURATION,    // configuration is invalid
+  VP8_ENC_ERROR_BAD_DIMENSION,            // picture has invalid width/height
+  VP8_ENC_ERROR_PARTITION0_OVERFLOW,      // partition is bigger than 512k
+  VP8_ENC_ERROR_PARTITION_OVERFLOW,       // partition is bigger than 16M
+  VP8_ENC_ERROR_BAD_WRITE,                // error while flushing bytes
+  VP8_ENC_ERROR_FILE_TOO_BIG,             // file is bigger than 4G
+  VP8_ENC_ERROR_USER_ABORT,               // abort request by user
+  VP8_ENC_ERROR_LAST                      // list terminator. always last.
+} WebPEncodingError;
+
+// maximum width/height allowed (inclusive), in pixels
+#define WEBP_MAX_DIMENSION 16383
+
+// Main exchange structure (input samples, output bytes, statistics)
+struct WebPPicture {
+  //   INPUT
+  //////////////
+  // Main flag for encoder selecting between ARGB or YUV input.
+  // It is recommended to use ARGB input (*argb, argb_stride) for lossless
+  // compression, and YUV input (*y, *u, *v, etc.) for lossy compression
+  // since these are the respective native colorspace for these formats.
+  int use_argb;
+
+  // YUV input (mostly used for input to lossy compression)
+  WebPEncCSP colorspace;     // colorspace: should be YUV420 for now (=Y'CbCr).
+  int width, height;         // dimensions (less or equal to WEBP_MAX_DIMENSION)
+  uint8_t *y, *u, *v;        // pointers to luma/chroma planes.
+  int y_stride, uv_stride;   // luma/chroma strides.
+  uint8_t* a;                // pointer to the alpha plane
+  int a_stride;              // stride of the alpha plane
+  uint32_t pad1[2];          // padding for later use
+
+  // ARGB input (mostly used for input to lossless compression)
+  uint32_t* argb;            // Pointer to argb (32 bit) plane.
+  int argb_stride;           // This is stride in pixels units, not bytes.
+  uint32_t pad2[3];          // padding for later use
+
+  //   OUTPUT
+  ///////////////
+  // Byte-emission hook, to store compressed bytes as they are ready.
+  WebPWriterFunction writer;  // can be NULL
+  void* custom_ptr;           // can be used by the writer.
+
+  // map for extra information (only for lossy compression mode)
+  int extra_info_type;    // 1: intra type, 2: segment, 3: quant
+                          // 4: intra-16 prediction mode,
+                          // 5: chroma prediction mode,
+                          // 6: bit cost, 7: distortion
+  uint8_t* extra_info;    // if not NULL, points to an array of size
+                          // ((width + 15) / 16) * ((height + 15) / 16) that
+                          // will be filled with a macroblock map, depending
+                          // on extra_info_type.
+
+  //   STATS AND REPORTS
+  ///////////////////////////
+  // Pointer to side statistics (updated only if not NULL)
+  WebPAuxStats* stats;
+
+  // Error code for the latest error encountered during encoding
+  WebPEncodingError error_code;
+
+  // If not NULL, report progress during encoding.
+  WebPProgressHook progress_hook;
+
+  void* user_data;        // this field is free to be set to any value and
+                          // used during callbacks (like progress-report e.g.).
+
+  uint32_t pad3[3];       // padding for later use
+
+  // Unused for now
+  uint8_t *pad4, *pad5;
+  uint32_t pad6[8];       // padding for later use
+
+  // PRIVATE FIELDS
+  ////////////////////
+  void* memory_;          // row chunk of memory for yuva planes
+  void* memory_argb_;     // and for argb too.
+  void* pad7[2];          // padding for later use
+};
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(int) WebPPictureInitInternal(WebPPicture*, int);
+
+// Should always be called, to initialize the structure. Returns false in case
+// of version mismatch. WebPPictureInit() must have succeeded before using the
+// 'picture' object.
+// Note that, by default, use_argb is false and colorspace is WEBP_YUV420.
+static WEBP_INLINE int WebPPictureInit(WebPPicture* picture) {
+  return WebPPictureInitInternal(picture, WEBP_ENCODER_ABI_VERSION);
+}
+
+//------------------------------------------------------------------------------
+// WebPPicture utils
+
+// Convenience allocation / deallocation based on picture->width/height:
+// Allocate y/u/v buffers as per colorspace/width/height specification.
+// Note! This function will free the previous buffer if needed.
+// Returns false in case of memory error.
+WEBP_EXTERN(int) WebPPictureAlloc(WebPPicture* picture);
+
+// Release the memory allocated by WebPPictureAlloc() or WebPPictureImport*().
+// Note that this function does _not_ free the memory used by the 'picture'
+// object itself.
+// Besides memory (which is reclaimed) all other fields of 'picture' are
+// preserved.
+WEBP_EXTERN(void) WebPPictureFree(WebPPicture* picture);
+
+// Copy the pixels of *src into *dst, using WebPPictureAlloc. Upon return, *dst
+// will fully own the copied pixels (this is not a view). The 'dst' picture need
+// not be initialized as its content is overwritten.
+// Returns false in case of memory allocation error.
+WEBP_EXTERN(int) WebPPictureCopy(const WebPPicture* src, WebPPicture* dst);
+
+// Compute PSNR, SSIM or LSIM distortion metric between two pictures.
+// Result is in dB, stores in result[] in the Y/U/V/Alpha/All order.
+// Returns false in case of error (src and ref don't have same dimension, ...)
+// Warning: this function is rather CPU-intensive.
+WEBP_EXTERN(int) WebPPictureDistortion(
+    const WebPPicture* src, const WebPPicture* ref,
+    int metric_type,           // 0 = PSNR, 1 = SSIM, 2 = LSIM
+    float result[5]);
+
+// self-crops a picture to the rectangle defined by top/left/width/height.
+// Returns false in case of memory allocation error, or if the rectangle is
+// outside of the source picture.
+// The rectangle for the view is defined by the top-left corner pixel
+// coordinates (left, top) as well as its width and height. This rectangle
+// must be fully be comprised inside the 'src' source picture. If the source
+// picture uses the YUV420 colorspace, the top and left coordinates will be
+// snapped to even values.
+WEBP_EXTERN(int) WebPPictureCrop(WebPPicture* picture,
+                                 int left, int top, int width, int height);
+
+// Extracts a view from 'src' picture into 'dst'. The rectangle for the view
+// is defined by the top-left corner pixel coordinates (left, top) as well
+// as its width and height. This rectangle must be fully be comprised inside
+// the 'src' source picture. If the source picture uses the YUV420 colorspace,
+// the top and left coordinates will be snapped to even values.
+// Picture 'src' must out-live 'dst' picture. Self-extraction of view is allowed
+// ('src' equal to 'dst') as a mean of fast-cropping (but note that doing so,
+// the original dimension will be lost). Picture 'dst' need not be initialized
+// with WebPPictureInit() if it is different from 'src', since its content will
+// be overwritten.
+// Returns false in case of memory allocation error or invalid parameters.
+WEBP_EXTERN(int) WebPPictureView(const WebPPicture* src,
+                                 int left, int top, int width, int height,
+                                 WebPPicture* dst);
+
+// Returns true if the 'picture' is actually a view and therefore does
+// not own the memory for pixels.
+WEBP_EXTERN(int) WebPPictureIsView(const WebPPicture* picture);
+
+// Rescale a picture to new dimension width x height.
+// If either 'width' or 'height' (but not both) is 0 the corresponding
+// dimension will be calculated preserving the aspect ratio.
+// No gamma correction is applied.
+// Returns false in case of error (invalid parameter or insufficient memory).
+WEBP_EXTERN(int) WebPPictureRescale(WebPPicture* pic, int width, int height);
+
+// Colorspace conversion function to import RGB samples.
+// Previous buffer will be free'd, if any.
+// *rgb buffer should have a size of at least height * rgb_stride.
+// Returns false in case of memory error.
+WEBP_EXTERN(int) WebPPictureImportRGB(
+    WebPPicture* picture, const uint8_t* rgb, int rgb_stride);
+// Same, but for RGBA buffer.
+WEBP_EXTERN(int) WebPPictureImportRGBA(
+    WebPPicture* picture, const uint8_t* rgba, int rgba_stride);
+// Same, but for RGBA buffer. Imports the RGB direct from the 32-bit format
+// input buffer ignoring the alpha channel. Avoids needing to copy the data
+// to a temporary 24-bit RGB buffer to import the RGB only.
+WEBP_EXTERN(int) WebPPictureImportRGBX(
+    WebPPicture* picture, const uint8_t* rgbx, int rgbx_stride);
+
+// Variants of the above, but taking BGR(A|X) input.
+WEBP_EXTERN(int) WebPPictureImportBGR(
+    WebPPicture* picture, const uint8_t* bgr, int bgr_stride);
+WEBP_EXTERN(int) WebPPictureImportBGRA(
+    WebPPicture* picture, const uint8_t* bgra, int bgra_stride);
+WEBP_EXTERN(int) WebPPictureImportBGRX(
+    WebPPicture* picture, const uint8_t* bgrx, int bgrx_stride);
+
+// Converts picture->argb data to the YUV420A format. The 'colorspace'
+// parameter is deprecated and should be equal to WEBP_YUV420.
+// Upon return, picture->use_argb is set to false. The presence of real
+// non-opaque transparent values is detected, and 'colorspace' will be
+// adjusted accordingly. Note that this method is lossy.
+// Returns false in case of error.
+WEBP_EXTERN(int) WebPPictureARGBToYUVA(WebPPicture* picture,
+                                       WebPEncCSP /*colorspace = WEBP_YUV420*/);
+
+// Same as WebPPictureARGBToYUVA(), but the conversion is done using
+// pseudo-random dithering with a strength 'dithering' between
+// 0.0 (no dithering) and 1.0 (maximum dithering). This is useful
+// for photographic picture.
+WEBP_EXTERN(int) WebPPictureARGBToYUVADithered(
+    WebPPicture* picture, WebPEncCSP colorspace, float dithering);
+
+#if WEBP_ENCODER_ABI_VERSION > 0x0204
+// Performs 'smart' RGBA->YUVA420 downsampling and colorspace conversion.
+// Downsampling is handled with extra care in case of color clipping. This
+// method is roughly 2x slower than WebPPictureARGBToYUVA() but produces better
+// YUV representation.
+// Returns false in case of error.
+WEBP_EXTERN(int) WebPPictureSmartARGBToYUVA(WebPPicture* picture);
+#endif
+
+// Converts picture->yuv to picture->argb and sets picture->use_argb to true.
+// The input format must be YUV_420 or YUV_420A.
+// Note that the use of this method is discouraged if one has access to the
+// raw ARGB samples, since using YUV420 is comparatively lossy. Also, the
+// conversion from YUV420 to ARGB incurs a small loss too.
+// Returns false in case of error.
+WEBP_EXTERN(int) WebPPictureYUVAToARGB(WebPPicture* picture);
+
+// Helper function: given a width x height plane of RGBA or YUV(A) samples
+// clean-up the YUV or RGB samples under fully transparent area, to help
+// compressibility (no guarantee, though).
+WEBP_EXTERN(void) WebPCleanupTransparentArea(WebPPicture* picture);
+
+// Scan the picture 'picture' for the presence of non fully opaque alpha values.
+// Returns true in such case. Otherwise returns false (indicating that the
+// alpha plane can be ignored altogether e.g.).
+WEBP_EXTERN(int) WebPPictureHasTransparency(const WebPPicture* picture);
+
+// Remove the transparency information (if present) by blending the color with
+// the background color 'background_rgb' (specified as 24bit RGB triplet).
+// After this call, all alpha values are reset to 0xff.
+WEBP_EXTERN(void) WebPBlendAlpha(WebPPicture* pic, uint32_t background_rgb);
+
+//------------------------------------------------------------------------------
+// Main call
+
+// Main encoding call, after config and picture have been initialized.
+// 'picture' must be less than 16384x16384 in dimension (cf WEBP_MAX_DIMENSION),
+// and the 'config' object must be a valid one.
+// Returns false in case of error, true otherwise.
+// In case of error, picture->error_code is updated accordingly.
+// 'picture' can hold the source samples in both YUV(A) or ARGB input, depending
+// on the value of 'picture->use_argb'. It is highly recommended to use
+// the former for lossy encoding, and the latter for lossless encoding
+// (when config.lossless is true). Automatic conversion from one format to
+// another is provided but they both incur some loss.
+WEBP_EXTERN(int) WebPEncode(const WebPConfig* config, WebPPicture* picture);
+
+//------------------------------------------------------------------------------
+
+#ifdef __cplusplus
+}    // extern "C"
+#endif
+
+#endif  /* WEBP_WEBP_ENCODE_H_ */

+ 88 - 0
Vendor/WebP.framework/Headers/format_constants.h

@@ -0,0 +1,88 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+//  Internal header for constants related to WebP file format.
+//
+// Author: Urvang (urvang@google.com)
+
+#ifndef WEBP_WEBP_FORMAT_CONSTANTS_H_
+#define WEBP_WEBP_FORMAT_CONSTANTS_H_
+
+// Create fourcc of the chunk from the chunk tag characters.
+#define MKFOURCC(a, b, c, d) ((uint32_t)(a) | (b) << 8 | (c) << 16 | (d) << 24)
+
+// VP8 related constants.
+#define VP8_SIGNATURE 0x9d012a              // Signature in VP8 data.
+#define VP8_MAX_PARTITION0_SIZE (1 << 19)   // max size of mode partition
+#define VP8_MAX_PARTITION_SIZE  (1 << 24)   // max size for token partition
+#define VP8_FRAME_HEADER_SIZE 10  // Size of the frame header within VP8 data.
+
+// VP8L related constants.
+#define VP8L_SIGNATURE_SIZE          1      // VP8L signature size.
+#define VP8L_MAGIC_BYTE              0x2f   // VP8L signature byte.
+#define VP8L_IMAGE_SIZE_BITS         14     // Number of bits used to store
+                                            // width and height.
+#define VP8L_VERSION_BITS            3      // 3 bits reserved for version.
+#define VP8L_VERSION                 0      // version 0
+#define VP8L_FRAME_HEADER_SIZE       5      // Size of the VP8L frame header.
+
+#define MAX_PALETTE_SIZE             256
+#define MAX_CACHE_BITS               11
+#define HUFFMAN_CODES_PER_META_CODE  5
+#define ARGB_BLACK                   0xff000000
+
+#define DEFAULT_CODE_LENGTH          8
+#define MAX_ALLOWED_CODE_LENGTH      15
+
+#define NUM_LITERAL_CODES            256
+#define NUM_LENGTH_CODES             24
+#define NUM_DISTANCE_CODES           40
+#define CODE_LENGTH_CODES            19
+
+#define MIN_HUFFMAN_BITS             2  // min number of Huffman bits
+#define MAX_HUFFMAN_BITS             9  // max number of Huffman bits
+
+#define TRANSFORM_PRESENT            1  // The bit to be written when next data
+                                        // to be read is a transform.
+#define NUM_TRANSFORMS               4  // Maximum number of allowed transform
+                                        // in a bitstream.
+typedef enum {
+  PREDICTOR_TRANSFORM      = 0,
+  CROSS_COLOR_TRANSFORM    = 1,
+  SUBTRACT_GREEN           = 2,
+  COLOR_INDEXING_TRANSFORM = 3
+} VP8LImageTransformType;
+
+// Alpha related constants.
+#define ALPHA_HEADER_LEN            1
+#define ALPHA_NO_COMPRESSION        0
+#define ALPHA_LOSSLESS_COMPRESSION  1
+#define ALPHA_PREPROCESSED_LEVELS   1
+
+// Mux related constants.
+#define TAG_SIZE           4     // Size of a chunk tag (e.g. "VP8L").
+#define CHUNK_SIZE_BYTES   4     // Size needed to store chunk's size.
+#define CHUNK_HEADER_SIZE  8     // Size of a chunk header.
+#define RIFF_HEADER_SIZE   12    // Size of the RIFF header ("RIFFnnnnWEBP").
+#define ANMF_CHUNK_SIZE    16    // Size of an ANMF chunk.
+#define ANIM_CHUNK_SIZE    6     // Size of an ANIM chunk.
+#define FRGM_CHUNK_SIZE    6     // Size of a FRGM chunk.
+#define VP8X_CHUNK_SIZE    10    // Size of a VP8X chunk.
+
+#define MAX_CANVAS_SIZE     (1 << 24)     // 24-bit max for VP8X width/height.
+#define MAX_IMAGE_AREA      (1ULL << 32)  // 32-bit max for width x height.
+#define MAX_LOOP_COUNT      (1 << 16)     // maximum value for loop-count
+#define MAX_DURATION        (1 << 24)     // maximum duration
+#define MAX_POSITION_OFFSET (1 << 24)     // maximum frame/fragment x/y offset
+
+// Maximum chunk payload is such that adding the header and padding won't
+// overflow a uint32_t.
+#define MAX_CHUNK_PAYLOAD (~0U - CHUNK_HEADER_SIZE - 1)
+
+#endif  /* WEBP_WEBP_FORMAT_CONSTANTS_H_ */

+ 399 - 0
Vendor/WebP.framework/Headers/mux.h

@@ -0,0 +1,399 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+//  RIFF container manipulation for WebP images.
+//
+// Authors: Urvang (urvang@google.com)
+//          Vikas (vikasa@google.com)
+
+// This API allows manipulation of WebP container images containing features
+// like color profile, metadata, animation and fragmented images.
+//
+// Code Example#1: Create a WebPMux object with image data, color profile and
+// XMP metadata.
+/*
+  int copy_data = 0;
+  WebPMux* mux = WebPMuxNew();
+  // ... (Prepare image data).
+  WebPMuxSetImage(mux, &image, copy_data);
+  // ... (Prepare ICCP color profile data).
+  WebPMuxSetChunk(mux, "ICCP", &icc_profile, copy_data);
+  // ... (Prepare XMP metadata).
+  WebPMuxSetChunk(mux, "XMP ", &xmp, copy_data);
+  // Get data from mux in WebP RIFF format.
+  WebPMuxAssemble(mux, &output_data);
+  WebPMuxDelete(mux);
+  // ... (Consume output_data; e.g. write output_data.bytes to file).
+  WebPDataClear(&output_data);
+*/
+
+// Code Example#2: Get image and color profile data from a WebP file.
+/*
+  int copy_data = 0;
+  // ... (Read data from file).
+  WebPMux* mux = WebPMuxCreate(&data, copy_data);
+  WebPMuxGetFrame(mux, 1, &image);
+  // ... (Consume image; e.g. call WebPDecode() to decode the data).
+  WebPMuxGetChunk(mux, "ICCP", &icc_profile);
+  // ... (Consume icc_data).
+  WebPMuxDelete(mux);
+  free(data);
+*/
+
+#ifndef WEBP_WEBP_MUX_H_
+#define WEBP_WEBP_MUX_H_
+
+#include "./mux_types.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define WEBP_MUX_ABI_VERSION 0x0101        // MAJOR(8b) + MINOR(8b)
+
+// Note: forward declaring enumerations is not allowed in (strict) C and C++,
+// the types are left here for reference.
+// typedef enum WebPMuxError WebPMuxError;
+// typedef enum WebPChunkId WebPChunkId;
+typedef struct WebPMux WebPMux;   // main opaque object.
+typedef struct WebPMuxFrameInfo WebPMuxFrameInfo;
+typedef struct WebPMuxAnimParams WebPMuxAnimParams;
+
+// Error codes
+typedef enum WebPMuxError {
+  WEBP_MUX_OK                 =  1,
+  WEBP_MUX_NOT_FOUND          =  0,
+  WEBP_MUX_INVALID_ARGUMENT   = -1,
+  WEBP_MUX_BAD_DATA           = -2,
+  WEBP_MUX_MEMORY_ERROR       = -3,
+  WEBP_MUX_NOT_ENOUGH_DATA    = -4
+} WebPMuxError;
+
+// IDs for different types of chunks.
+typedef enum WebPChunkId {
+  WEBP_CHUNK_VP8X,     // VP8X
+  WEBP_CHUNK_ICCP,     // ICCP
+  WEBP_CHUNK_ANIM,     // ANIM
+  WEBP_CHUNK_ANMF,     // ANMF
+  WEBP_CHUNK_FRGM,     // FRGM
+  WEBP_CHUNK_ALPHA,    // ALPH
+  WEBP_CHUNK_IMAGE,    // VP8/VP8L
+  WEBP_CHUNK_EXIF,     // EXIF
+  WEBP_CHUNK_XMP,      // XMP
+  WEBP_CHUNK_UNKNOWN,  // Other chunks.
+  WEBP_CHUNK_NIL
+} WebPChunkId;
+
+//------------------------------------------------------------------------------
+
+// Returns the version number of the mux library, packed in hexadecimal using
+// 8bits for each of major/minor/revision. E.g: v2.5.7 is 0x020507.
+WEBP_EXTERN(int) WebPGetMuxVersion(void);
+
+//------------------------------------------------------------------------------
+// Life of a Mux object
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(WebPMux*) WebPNewInternal(int);
+
+// Creates an empty mux object.
+// Returns:
+//   A pointer to the newly created empty mux object.
+//   Or NULL in case of memory error.
+static WEBP_INLINE WebPMux* WebPMuxNew(void) {
+  return WebPNewInternal(WEBP_MUX_ABI_VERSION);
+}
+
+// Deletes the mux object.
+// Parameters:
+//   mux - (in/out) object to be deleted
+WEBP_EXTERN(void) WebPMuxDelete(WebPMux* mux);
+
+//------------------------------------------------------------------------------
+// Mux creation.
+
+// Internal, version-checked, entry point
+WEBP_EXTERN(WebPMux*) WebPMuxCreateInternal(const WebPData*, int, int);
+
+// Creates a mux object from raw data given in WebP RIFF format.
+// Parameters:
+//   bitstream - (in) the bitstream data in WebP RIFF format
+//   copy_data - (in) value 1 indicates given data WILL be copied to the mux
+//               object and value 0 indicates data will NOT be copied.
+// Returns:
+//   A pointer to the mux object created from given data - on success.
+//   NULL - In case of invalid data or memory error.
+static WEBP_INLINE WebPMux* WebPMuxCreate(const WebPData* bitstream,
+                                          int copy_data) {
+  return WebPMuxCreateInternal(bitstream, copy_data, WEBP_MUX_ABI_VERSION);
+}
+
+//------------------------------------------------------------------------------
+// Non-image chunks.
+
+// Note: Only non-image related chunks should be managed through chunk APIs.
+// (Image related chunks are: "ANMF", "FRGM", "VP8 ", "VP8L" and "ALPH").
+// To add, get and delete images, use WebPMuxSetImage(), WebPMuxPushFrame(),
+// WebPMuxGetFrame() and WebPMuxDeleteFrame().
+
+// Adds a chunk with id 'fourcc' and data 'chunk_data' in the mux object.
+// Any existing chunk(s) with the same id will be removed.
+// Parameters:
+//   mux - (in/out) object to which the chunk is to be added
+//   fourcc - (in) a character array containing the fourcc of the given chunk;
+//                 e.g., "ICCP", "XMP ", "EXIF" etc.
+//   chunk_data - (in) the chunk data to be added
+//   copy_data - (in) value 1 indicates given data WILL be copied to the mux
+//               object and value 0 indicates data will NOT be copied.
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL
+//                               or if fourcc corresponds to an image chunk.
+//   WEBP_MUX_MEMORY_ERROR - on memory allocation error.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxSetChunk(
+    WebPMux* mux, const char fourcc[4], const WebPData* chunk_data,
+    int copy_data);
+
+// Gets a reference to the data of the chunk with id 'fourcc' in the mux object.
+// The caller should NOT free the returned data.
+// Parameters:
+//   mux - (in) object from which the chunk data is to be fetched
+//   fourcc - (in) a character array containing the fourcc of the chunk;
+//                 e.g., "ICCP", "XMP ", "EXIF" etc.
+//   chunk_data - (out) returned chunk data
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux, fourcc or chunk_data is NULL
+//                               or if fourcc corresponds to an image chunk.
+//   WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given id.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxGetChunk(
+    const WebPMux* mux, const char fourcc[4], WebPData* chunk_data);
+
+// Deletes the chunk with the given 'fourcc' from the mux object.
+// Parameters:
+//   mux - (in/out) object from which the chunk is to be deleted
+//   fourcc - (in) a character array containing the fourcc of the chunk;
+//                 e.g., "ICCP", "XMP ", "EXIF" etc.
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux or fourcc is NULL
+//                               or if fourcc corresponds to an image chunk.
+//   WEBP_MUX_NOT_FOUND - If mux does not contain a chunk with the given fourcc.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxDeleteChunk(
+    WebPMux* mux, const char fourcc[4]);
+
+//------------------------------------------------------------------------------
+// Images.
+
+// Encapsulates data about a single frame/fragment.
+struct WebPMuxFrameInfo {
+  WebPData    bitstream;  // image data: can be a raw VP8/VP8L bitstream
+                          // or a single-image WebP file.
+  int         x_offset;   // x-offset of the frame.
+  int         y_offset;   // y-offset of the frame.
+  int         duration;   // duration of the frame (in milliseconds).
+
+  WebPChunkId id;         // frame type: should be one of WEBP_CHUNK_ANMF,
+                          // WEBP_CHUNK_FRGM or WEBP_CHUNK_IMAGE
+  WebPMuxAnimDispose dispose_method;  // Disposal method for the frame.
+  WebPMuxAnimBlend   blend_method;    // Blend operation for the frame.
+  uint32_t    pad[1];     // padding for later use
+};
+
+// Sets the (non-animated and non-fragmented) image in the mux object.
+// Note: Any existing images (including frames/fragments) will be removed.
+// Parameters:
+//   mux - (in/out) object in which the image is to be set
+//   bitstream - (in) can be a raw VP8/VP8L bitstream or a single-image
+//               WebP file (non-animated and non-fragmented)
+//   copy_data - (in) value 1 indicates given data WILL be copied to the mux
+//               object and value 0 indicates data will NOT be copied.
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux is NULL or bitstream is NULL.
+//   WEBP_MUX_MEMORY_ERROR - on memory allocation error.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxSetImage(
+    WebPMux* mux, const WebPData* bitstream, int copy_data);
+
+// Adds a frame at the end of the mux object.
+// Notes: (1) frame.id should be one of WEBP_CHUNK_ANMF or WEBP_CHUNK_FRGM
+//        (2) For setting a non-animated non-fragmented image, use
+//            WebPMuxSetImage() instead.
+//        (3) Type of frame being pushed must be same as the frames in mux.
+//        (4) As WebP only supports even offsets, any odd offset will be snapped
+//            to an even location using: offset &= ~1
+// Parameters:
+//   mux - (in/out) object to which the frame is to be added
+//   frame - (in) frame data.
+//   copy_data - (in) value 1 indicates given data WILL be copied to the mux
+//               object and value 0 indicates data will NOT be copied.
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL
+//                               or if content of 'frame' is invalid.
+//   WEBP_MUX_MEMORY_ERROR - on memory allocation error.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxPushFrame(
+    WebPMux* mux, const WebPMuxFrameInfo* frame, int copy_data);
+
+// Gets the nth frame from the mux object.
+// The content of 'frame->bitstream' is allocated using malloc(), and NOT
+// owned by the 'mux' object. It MUST be deallocated by the caller by calling
+// WebPDataClear().
+// nth=0 has a special meaning - last position.
+// Parameters:
+//   mux - (in) object from which the info is to be fetched
+//   nth - (in) index of the frame in the mux object
+//   frame - (out) data of the returned frame
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux or frame is NULL.
+//   WEBP_MUX_NOT_FOUND - if there are less than nth frames in the mux object.
+//   WEBP_MUX_BAD_DATA - if nth frame chunk in mux is invalid.
+//   WEBP_MUX_MEMORY_ERROR - on memory allocation error.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxGetFrame(
+    const WebPMux* mux, uint32_t nth, WebPMuxFrameInfo* frame);
+
+// Deletes a frame from the mux object.
+// nth=0 has a special meaning - last position.
+// Parameters:
+//   mux - (in/out) object from which a frame is to be deleted
+//   nth - (in) The position from which the frame is to be deleted
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux is NULL.
+//   WEBP_MUX_NOT_FOUND - If there are less than nth frames in the mux object
+//                        before deletion.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxDeleteFrame(WebPMux* mux, uint32_t nth);
+
+//------------------------------------------------------------------------------
+// Animation.
+
+// Animation parameters.
+struct WebPMuxAnimParams {
+  uint32_t bgcolor;  // Background color of the canvas stored (in MSB order) as:
+                     // Bits 00 to 07: Alpha.
+                     // Bits 08 to 15: Red.
+                     // Bits 16 to 23: Green.
+                     // Bits 24 to 31: Blue.
+  int loop_count;    // Number of times to repeat the animation [0 = infinite].
+};
+
+// Sets the animation parameters in the mux object. Any existing ANIM chunks
+// will be removed.
+// Parameters:
+//   mux - (in/out) object in which ANIM chunk is to be set/added
+//   params - (in) animation parameters.
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL.
+//   WEBP_MUX_MEMORY_ERROR - on memory allocation error.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxSetAnimationParams(
+    WebPMux* mux, const WebPMuxAnimParams* params);
+
+// Gets the animation parameters from the mux object.
+// Parameters:
+//   mux - (in) object from which the animation parameters to be fetched
+//   params - (out) animation parameters extracted from the ANIM chunk
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux or params is NULL.
+//   WEBP_MUX_NOT_FOUND - if ANIM chunk is not present in mux object.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxGetAnimationParams(
+    const WebPMux* mux, WebPMuxAnimParams* params);
+
+//------------------------------------------------------------------------------
+// Misc Utilities.
+
+#if WEBP_MUX_ABI_VERSION > 0x0101
+// Sets the canvas size for the mux object. The width and height can be
+// specified explicitly or left as zero (0, 0).
+// * When width and height are specified explicitly, then this frame bound is
+//   enforced during subsequent calls to WebPMuxAssemble() and an error is
+//   reported if any animated frame does not completely fit within the canvas.
+// * When unspecified (0, 0), the constructed canvas will get the frame bounds
+//   from the bounding-box over all frames after calling WebPMuxAssemble().
+// Parameters:
+//   mux - (in) object to which the canvas size is to be set
+//   width - (in) canvas width
+//   height - (in) canvas height
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux is NULL; or
+//                               width or height are invalid or out of bounds
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxSetCanvasSize(WebPMux* mux,
+                                               int width, int height);
+#endif
+
+// Gets the canvas size from the mux object.
+// Note: This method assumes that the VP8X chunk, if present, is up-to-date.
+// That is, the mux object hasn't been modified since the last call to
+// WebPMuxAssemble() or WebPMuxCreate().
+// Parameters:
+//   mux - (in) object from which the canvas size is to be fetched
+//   width - (out) canvas width
+//   height - (out) canvas height
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux, width or height is NULL.
+//   WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxGetCanvasSize(const WebPMux* mux,
+                                               int* width, int* height);
+
+// Gets the feature flags from the mux object.
+// Note: This method assumes that the VP8X chunk, if present, is up-to-date.
+// That is, the mux object hasn't been modified since the last call to
+// WebPMuxAssemble() or WebPMuxCreate().
+// Parameters:
+//   mux - (in) object from which the features are to be fetched
+//   flags - (out) the flags specifying which features are present in the
+//           mux object. This will be an OR of various flag values.
+//           Enum 'WebPFeatureFlags' can be used to test individual flag values.
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux or flags is NULL.
+//   WEBP_MUX_BAD_DATA - if VP8X/VP8/VP8L chunk or canvas size is invalid.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxGetFeatures(const WebPMux* mux,
+                                             uint32_t* flags);
+
+// Gets number of chunks with the given 'id' in the mux object.
+// Parameters:
+//   mux - (in) object from which the info is to be fetched
+//   id - (in) chunk id specifying the type of chunk
+//   num_elements - (out) number of chunks with the given chunk id
+// Returns:
+//   WEBP_MUX_INVALID_ARGUMENT - if mux, or num_elements is NULL.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxNumChunks(const WebPMux* mux,
+                                           WebPChunkId id, int* num_elements);
+
+// Assembles all chunks in WebP RIFF format and returns in 'assembled_data'.
+// This function also validates the mux object.
+// Note: The content of 'assembled_data' will be ignored and overwritten.
+// Also, the content of 'assembled_data' is allocated using malloc(), and NOT
+// owned by the 'mux' object. It MUST be deallocated by the caller by calling
+// WebPDataClear(). It's always safe to call WebPDataClear() upon return,
+// even in case of error.
+// Parameters:
+//   mux - (in/out) object whose chunks are to be assembled
+//   assembled_data - (out) assembled WebP data
+// Returns:
+//   WEBP_MUX_BAD_DATA - if mux object is invalid.
+//   WEBP_MUX_INVALID_ARGUMENT - if mux or assembled_data is NULL.
+//   WEBP_MUX_MEMORY_ERROR - on memory allocation error.
+//   WEBP_MUX_OK - on success.
+WEBP_EXTERN(WebPMuxError) WebPMuxAssemble(WebPMux* mux,
+                                          WebPData* assembled_data);
+
+//------------------------------------------------------------------------------
+
+#ifdef __cplusplus
+}    // extern "C"
+#endif
+
+#endif  /* WEBP_WEBP_MUX_H_ */

+ 97 - 0
Vendor/WebP.framework/Headers/mux_types.h

@@ -0,0 +1,97 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+// Data-types common to the mux and demux libraries.
+//
+// Author: Urvang (urvang@google.com)
+
+#ifndef WEBP_WEBP_MUX_TYPES_H_
+#define WEBP_WEBP_MUX_TYPES_H_
+
+#include <stdlib.h>  // free()
+#include <string.h>  // memset()
+#include "./types.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// Note: forward declaring enumerations is not allowed in (strict) C and C++,
+// the types are left here for reference.
+// typedef enum WebPFeatureFlags WebPFeatureFlags;
+// typedef enum WebPMuxAnimDispose WebPMuxAnimDispose;
+// typedef enum WebPMuxAnimBlend WebPMuxAnimBlend;
+typedef struct WebPData WebPData;
+
+// VP8X Feature Flags.
+typedef enum WebPFeatureFlags {
+  FRAGMENTS_FLAG  = 0x00000001,
+  ANIMATION_FLAG  = 0x00000002,
+  XMP_FLAG        = 0x00000004,
+  EXIF_FLAG       = 0x00000008,
+  ALPHA_FLAG      = 0x00000010,
+  ICCP_FLAG       = 0x00000020
+} WebPFeatureFlags;
+
+// Dispose method (animation only). Indicates how the area used by the current
+// frame is to be treated before rendering the next frame on the canvas.
+typedef enum WebPMuxAnimDispose {
+  WEBP_MUX_DISPOSE_NONE,       // Do not dispose.
+  WEBP_MUX_DISPOSE_BACKGROUND  // Dispose to background color.
+} WebPMuxAnimDispose;
+
+// Blend operation (animation only). Indicates how transparent pixels of the
+// current frame are blended with those of the previous canvas.
+typedef enum WebPMuxAnimBlend {
+  WEBP_MUX_BLEND,              // Blend.
+  WEBP_MUX_NO_BLEND            // Do not blend.
+} WebPMuxAnimBlend;
+
+// Data type used to describe 'raw' data, e.g., chunk data
+// (ICC profile, metadata) and WebP compressed image data.
+struct WebPData {
+  const uint8_t* bytes;
+  size_t size;
+};
+
+// Initializes the contents of the 'webp_data' object with default values.
+static WEBP_INLINE void WebPDataInit(WebPData* webp_data) {
+  if (webp_data != NULL) {
+    memset(webp_data, 0, sizeof(*webp_data));
+  }
+}
+
+// Clears the contents of the 'webp_data' object by calling free(). Does not
+// deallocate the object itself.
+static WEBP_INLINE void WebPDataClear(WebPData* webp_data) {
+  if (webp_data != NULL) {
+    free((void*)webp_data->bytes);
+    WebPDataInit(webp_data);
+  }
+}
+
+// Allocates necessary storage for 'dst' and copies the contents of 'src'.
+// Returns true on success.
+static WEBP_INLINE int WebPDataCopy(const WebPData* src, WebPData* dst) {
+  if (src == NULL || dst == NULL) return 0;
+  WebPDataInit(dst);
+  if (src->bytes != NULL && src->size != 0) {
+    dst->bytes = (uint8_t*)malloc(src->size);
+    if (dst->bytes == NULL) return 0;
+    memcpy((void*)dst->bytes, src->bytes, src->size);
+    dst->size = src->size;
+  }
+  return 1;
+}
+
+#ifdef __cplusplus
+}    // extern "C"
+#endif
+
+#endif  /* WEBP_WEBP_MUX_TYPES_H_ */

+ 48 - 0
Vendor/WebP.framework/Headers/types.h

@@ -0,0 +1,48 @@
+// Copyright 2010 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+//  Common types
+//
+// Author: Skal (pascal.massimino@gmail.com)
+
+#ifndef WEBP_WEBP_TYPES_H_
+#define WEBP_WEBP_TYPES_H_
+
+#include <stddef.h>  // for size_t
+
+#ifndef _MSC_VER
+#include <inttypes.h>
+#if defined(__cplusplus) || !defined(__STRICT_ANSI__) || \
+    (defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L)
+#define WEBP_INLINE inline
+#else
+#define WEBP_INLINE
+#endif
+#else
+typedef signed   char int8_t;
+typedef unsigned char uint8_t;
+typedef signed   short int16_t;
+typedef unsigned short uint16_t;
+typedef signed   int int32_t;
+typedef unsigned int uint32_t;
+typedef unsigned long long int uint64_t;
+typedef long long int int64_t;
+#define WEBP_INLINE __forceinline
+#endif  /* _MSC_VER */
+
+#ifndef WEBP_EXTERN
+// This explicitly marks library functions and allows for changing the
+// signature for e.g., Windows DLL builds.
+#define WEBP_EXTERN(type) extern type
+#endif  /* WEBP_EXTERN */
+
+// Macro to check ABI compatibility (same major revision number)
+#define WEBP_ABI_IS_INCOMPATIBLE(a, b) (((a) >> 8) != ((b) >> 8))
+
+#endif  /* WEBP_WEBP_TYPES_H_ */

BIN
Vendor/WebP.framework/WebP


+ 135 - 0
Vendor/WebP.sh

@@ -0,0 +1,135 @@
+#!/bin/bash
+#
+# This script generates 'WebP.framework' (static library).
+# An iOS app can decode WebP images by including 'WebP.framework'.
+#
+# 1. Download the latest libwebp source code from
+#    http://downloads.webmproject.org/releases/webp/index.html
+# 2. Use this script instead of the original 'iosbuild.sh' to build the WebP.framework.
+#    It will build all modules, include mux, demux, coder and decoder.
+#
+# Notice: You should use Xcode 7 (or above) to support bitcode.
+
+set -e
+
+# Extract the latest SDK version from the final field of the form: iphoneosX.Y
+readonly SDK=$(xcodebuild -showsdks \
+  | grep iphoneos | sort | tail -n 1 | awk '{print substr($NF, 9)}'
+)
+# Extract Xcode version.
+readonly XCODE=$(xcodebuild -version | grep Xcode | cut -d " " -f2)
+if [[ -z "${XCODE}" ]]; then
+  echo "Xcode not available"
+  exit 1
+fi
+
+readonly OLDPATH=${PATH}
+
+# Add iPhoneOS-V6 to the list of platforms below if you need armv6 support.
+# Note that iPhoneOS-V6 support is not available with the iOS6 SDK.
+PLATFORMS="iPhoneSimulator iPhoneSimulator64"
+PLATFORMS+=" iPhoneOS-V7 iPhoneOS-V7s iPhoneOS-V7-arm64"
+readonly PLATFORMS
+readonly SRCDIR=$(dirname $0)
+readonly TOPDIR=$(pwd)
+readonly BUILDDIR="${TOPDIR}/iosbuild"
+readonly TARGETDIR="${TOPDIR}/WebP.framework"
+readonly DEVELOPER=$(xcode-select --print-path)
+readonly PLATFORMSROOT="${DEVELOPER}/Platforms"
+readonly LIPO=$(xcrun -sdk iphoneos${SDK} -find lipo)
+LIBLIST=''
+
+if [[ -z "${SDK}" ]]; then
+  echo "iOS SDK not available"
+  exit 1
+elif [[ ${SDK} < 6.0 ]]; then
+  echo "You need iOS SDK version 6.0 or above"
+  exit 1
+else
+  echo "iOS SDK Version ${SDK}"
+fi
+
+rm -rf ${BUILDDIR}
+rm -rf ${TARGETDIR}
+mkdir -p ${BUILDDIR}
+mkdir -p ${TARGETDIR}/Headers/
+
+if [[ ! -e ${SRCDIR}/configure ]]; then
+  if ! (cd ${SRCDIR} && sh autogen.sh); then
+    cat <<EOT
+Error creating configure script!
+This script requires the autoconf/automake and libtool to build. MacPorts can
+be used to obtain these:
+http://www.macports.org/install.php
+EOT
+    exit 1
+  fi
+fi
+
+for PLATFORM in ${PLATFORMS}; do
+  ARCH2=""
+  if [[ "${PLATFORM}" == "iPhoneOS-V7-arm64" ]]; then
+    PLATFORM="iPhoneOS"
+    ARCH="aarch64"
+    ARCH2="arm64"
+  elif [[ "${PLATFORM}" == "iPhoneOS-V7s" ]]; then
+    PLATFORM="iPhoneOS"
+    ARCH="armv7s"
+  elif [[ "${PLATFORM}" == "iPhoneOS-V7" ]]; then
+    PLATFORM="iPhoneOS"
+    ARCH="armv7"
+  elif [[ "${PLATFORM}" == "iPhoneOS-V6" ]]; then
+    PLATFORM="iPhoneOS"
+    ARCH="armv6"
+  elif [[ "${PLATFORM}" == "iPhoneSimulator64" ]]; then
+    PLATFORM="iPhoneSimulator"
+    ARCH="x86_64"
+  else
+    ARCH="i386"
+  fi
+
+  ROOTDIR="${BUILDDIR}/${PLATFORM}-${SDK}-${ARCH}"
+  mkdir -p "${ROOTDIR}"
+
+  DEVROOT="${DEVELOPER}/Toolchains/XcodeDefault.xctoolchain"
+  SDKROOT="${PLATFORMSROOT}/"
+  SDKROOT+="${PLATFORM}.platform/Developer/SDKs/${PLATFORM}${SDK}.sdk/"
+  CFLAGS="-arch ${ARCH2:-${ARCH}} -pipe -isysroot ${SDKROOT} -O3 -DNDEBUG"
+  CFLAGS+=" -miphoneos-version-min=6.0 -fembed-bitcode"
+
+  set -x
+  export PATH="${DEVROOT}/usr/bin:${OLDPATH}"
+  ${SRCDIR}/configure --host=${ARCH}-apple-darwin --prefix=${ROOTDIR} \
+    --build=$(${SRCDIR}/config.guess) \
+    --disable-shared --enable-static \
+    --enable-libwebpmux \
+    --enable-libwebpdemux \
+    --enable-libwebpdecoder \
+    --enable-swap-16bit-csp \
+    CFLAGS="${CFLAGS}"
+  set +x
+
+  # run make only in the src/ directory to create libwebpdecoder.a
+  cd src/
+  make V=0
+  make install
+
+  MAKEPATH=$(pwd)
+  cd ${ROOTDIR}/lib/
+  ar x libwebp.a
+  ar x libwebpmux.a
+  ar x libwebpdemux.a
+  ar x libwebpdecoder.a
+  ar q webp.a *.o
+
+  LIBLIST+=" ${ROOTDIR}/lib/webp.a"
+  cd ${MAKEPATH}
+
+  make clean
+  cd ..
+
+  export PATH=${OLDPATH}
+done
+
+cp -a ${SRCDIR}/src/webp/*.h ${TARGETDIR}/Headers/
+${LIPO} -create ${LIBLIST} -output ${TARGETDIR}/WebP

+ 21 - 0
YYImage.podspec

@@ -0,0 +1,21 @@
+Pod::Spec.new do |s|
+  s.name         = 'YYImage'
+  s.summary      = 'Image framework for iOS to display/encode/decode animated WebP, APNG, GIF, and more.'
+  s.version      = '0.8.9'
+  s.license      = { :type => 'MIT', :file => 'LICENSE' }
+  s.authors      = { 'ibireme' => 'ibireme@gmail.com' }
+  s.social_media_url = 'http://blog.ibireme.com'
+  s.homepage     = 'https://github.com/ibireme/YYImage'
+  s.platform     = :ios, '6.0'
+  s.ios.deployment_target = '6.0'
+  s.source       = { :git => 'https://github.com/ibireme/YYImage.git', :tag => s.version.to_s }
+  
+  s.requires_arc = true
+  s.source_files = 'YYImage/*.{h,m}'
+  s.public_header_files = 'YYImage/*.{h}'
+  
+  s.libraries = 'z'
+  s.frameworks = 'UIKit', 'CoreFoundation', 'QuartzCore', 'AssetsLibrary', 'ImageIO', 'Accelerate', 'MobileCoreServices'
+  s.ios.vendored_frameworks = 'Vendor/WebP.framework'
+
+end

+ 121 - 0
YYImage/YYAnimatedImageView.h

@@ -0,0 +1,121 @@
+//
+//  YYAnimatedImageView.h
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 14/10/19.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+/**
+ An image view for displaying animated image.
+ 
+ @discussion It is a fully compatible `UIImageView` subclass.
+ If the `image` or `highlightedImage` property adopt to the `YYAnimatedImage` protocol,
+ then it can be used to play the multi-frame animation. The animation can also be 
+ controlled with the UIImageView methods `-startAnimating`, `-stopAnimating` and `-isAnimating`.
+ 
+ This view request the frame data just in time. When the device has enough free memory, 
+ this view may cache some or all future frames in an inner buffer for lower CPU cost.
+ Buffer size is dynamically adjusted based on the current state of the device memory.
+ 
+ Sample Code:
+ 
+     // ani@3x.gif
+     YYImage *image = [YYImage imageNamed:@"ani"];
+     YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
+     [view addSubView:imageView];
+ */
+@interface YYAnimatedImageView : UIImageView
+
+/**
+ If the image has more than one frame, set this value to `YES` will automatically 
+ play/stop the animation when the view become visible/invisible.
+ 
+ The default value is `YES`.
+ */
+@property (nonatomic, assign) BOOL autoPlayAnimatedImage;
+
+/**
+ Index of the currently displayed frame (index from 0).
+ 
+ Set a new value to this property will cause to display the new frame immediately.
+ If the new value is invalid, this method has no effect.
+ 
+ You can add an observer to this property to observe the playing status.
+ */
+@property (nonatomic, assign) NSUInteger currentAnimatedImageIndex;
+
+/**
+ Whether the image view is playing animation currently.
+ 
+ You can add an observer to this property to observe the playing status.
+ */
+@property (nonatomic, readonly) BOOL currentIsPlayingAnimation;
+
+/**
+ The animation timer's runloop mode, default is `NSRunLoopCommonModes`.
+ 
+ Set this property to `NSDefaultRunLoopMode` will make the animation pause during
+ UIScrollView scrolling.
+ */
+@property (nonatomic, copy) NSString *runloopMode;
+
+/**
+ The max size (in bytes) for inner frame buffer size, default is 0 (dynamically).
+ 
+ When the device has enough free memory, this view will request and decode some or 
+ all future frame image into an inner buffer. If this property's value is 0, then 
+ the max buffer size will be dynamically adjusted based on the current state of 
+ the device free memory. Otherwise, the buffer size will be limited by this value.
+ 
+ When receive memory warning or app enter background, the buffer will be released 
+ immediately, and may grow back at the right time.
+ */
+@property (nonatomic, assign) NSUInteger maxBufferSize;
+
+@end
+
+
+
+/**
+ The YYAnimatedImage protocol declares the required methods for animated image
+ display with YYAnimatedImageView.
+ 
+ Subclass a UIImage and implement this protocol, so that instances of that class 
+ can be set to YYAnimatedImageView.image or YYAnimatedImageView.highlightedImage
+ to display animation.
+ 
+ See `YYImage` and `YYFrameImage` for example.
+ */
+@protocol YYAnimatedImage <NSObject>
+@required
+/// Total animated frame count.
+/// It the frame count is less than 1, then the methods below will be ignored.
+- (NSUInteger)animatedImageFrameCount;
+
+/// Animation loop count, 0 means infinite looping.
+- (NSUInteger)animatedImageLoopCount;
+
+/// Bytes per frame (in memory). It may used to optimize memory buffer size.
+- (NSUInteger)animatedImageBytesPerFrame;
+
+/// Returns the frame image from a specified index.
+/// This method may be called on background thread.
+/// @param index  Frame index (zero based).
+- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index;
+
+/// Returns the frames's duration from a specified index.
+/// @param index  Frame index (zero based).
+- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index;
+
+@optional
+/// A rectangle in image coordinates defining the subrectangle of the image that
+/// will be displayed. The rectangle should not outside the image's bounds.
+/// It may used to display sprite animation with a single image (sprite sheet).
+- (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index;
+@end

+ 653 - 0
YYImage/YYAnimatedImageView.m

@@ -0,0 +1,653 @@
+//
+//  YYAnimatedImageView.m
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 14/10/19.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "YYAnimatedImageView.h"
+#import "YYImageCoder.h"
+#import <pthread.h>
+#import <libkern/OSAtomic.h>
+#import <mach/mach.h>
+
+
+#define BUFFER_SIZE (10 * 1024 * 1024) // 10MB (minimum memory buffer size)
+
+#define LOCK(...) OSSpinLockLock(&self->_lock); \
+__VA_ARGS__; \
+OSSpinLockUnlock(&self->_lock);
+
+#define LOCK_VIEW(...) OSSpinLockLock(&view->_lock); \
+__VA_ARGS__; \
+OSSpinLockUnlock(&view->_lock);
+
+
+static int64_t _YYDeviceMemoryTotal() {
+    int64_t mem = [[NSProcessInfo processInfo] physicalMemory];
+    if (mem < -1) mem = -1;
+        return mem;
+}
+
+static int64_t _YYDeviceMemoryFree() {
+    mach_port_t host_port = mach_host_self();
+    mach_msg_type_number_t host_size = sizeof(vm_statistics_data_t) / sizeof(integer_t);
+    vm_size_t page_size;
+    vm_statistics_data_t vm_stat;
+    kern_return_t kern;
+    
+    kern = host_page_size(host_port, &page_size);
+    if (kern != KERN_SUCCESS) return -1;
+    kern = host_statistics(host_port, HOST_VM_INFO, (host_info_t)&vm_stat, &host_size);
+    if (kern != KERN_SUCCESS) return -1;
+    return vm_stat.free_count * page_size;
+}
+
+/**
+ A proxy used to hold a weak object.
+ It can be used to avoid retain cycles, such as the target in NSTimer or CADisplayLink.
+ */
+@interface _YYImageWeakProxy : NSProxy
+@property (nonatomic, weak, readonly) id target;
+- (instancetype)initWithTarget:(id)target;
++ (instancetype)proxyWithTarget:(id)target;
+@end
+
+@implementation _YYImageWeakProxy
+- (instancetype)initWithTarget:(id)target {
+    _target = target;
+    return self;
+}
++ (instancetype)proxyWithTarget:(id)target {
+    return [[_YYImageWeakProxy alloc] initWithTarget:target];
+}
+- (id)forwardingTargetForSelector:(SEL)selector {
+    return _target;
+}
+- (void)forwardInvocation:(NSInvocation *)invocation {
+    void *null = NULL;
+    [invocation setReturnValue:&null];
+}
+- (NSMethodSignature *)methodSignatureForSelector:(SEL)selector {
+    return [NSObject instanceMethodSignatureForSelector:@selector(init)];
+}
+- (BOOL)respondsToSelector:(SEL)aSelector {
+    return [_target respondsToSelector:aSelector];
+}
+- (BOOL)isEqual:(id)object {
+    return [_target isEqual:object];
+}
+- (NSUInteger)hash {
+    return [_target hash];
+}
+- (Class)superclass {
+    return [_target superclass];
+}
+- (Class)class {
+    return [_target class];
+}
+- (BOOL)isKindOfClass:(Class)aClass {
+    return [_target isKindOfClass:aClass];
+}
+- (BOOL)isMemberOfClass:(Class)aClass {
+    return [_target isMemberOfClass:aClass];
+}
+- (BOOL)conformsToProtocol:(Protocol *)aProtocol {
+    return [_target conformsToProtocol:aProtocol];
+}
+- (BOOL)isProxy {
+    return YES;
+}
+- (NSString *)description {
+    return [_target description];
+}
+- (NSString *)debugDescription {
+    return [_target debugDescription];
+}
+@end
+
+
+
+
+typedef NS_ENUM(NSUInteger, YYAnimagedImageType) {
+    YYAnimagedImageTypeNone = 0,
+    YYAnimagedImageTypeImage,
+    YYAnimagedImageTypeHighlightedImage,
+    YYAnimagedImageTypeImages,
+    YYAnimagedImageTypeHighlightedImages,
+};
+
+@interface YYAnimatedImageView() {
+    @package
+    UIImage <YYAnimatedImage> *_curAnimatedImage;
+    
+    dispatch_once_t _onceToken;
+    OSSpinLock _lock; ///< lock for _buffer
+    NSOperationQueue *_requestQueue; ///< image request queue, serial
+    
+    CADisplayLink *_link; ///< ticker for change frame
+    NSTimeInterval _time; ///< time after last frame
+    
+    UIImage *_curFrame; ///< current frame to display
+    NSUInteger _curIndex; ///< current frame index (from 0)
+    NSUInteger _totalFrameCount; ///< total frame count
+    
+    BOOL _loopEnd; ///< weather the loop is end.
+    NSUInteger _curLoop; ///< current loop count (from 0)
+    NSUInteger _totalLoop; ///< total loop count, 0 means infinity
+    
+    NSMutableDictionary *_buffer; ///< frame buffer
+    BOOL _bufferMiss; ///< whether miss frame on last opportunity
+    NSUInteger _maxBufferCount; ///< maximum buffer count
+    NSInteger _incrBufferCount; ///< current allowed buffer count (will increase by step)
+    
+    CGRect _curContentsRect;
+    BOOL _curImageHasContentsRect; ///< image has implementated "animatedImageContentsRectAtIndex:"
+}
+@property (nonatomic, readwrite) BOOL currentIsPlayingAnimation;
+- (void)calcMaxBufferCount;
+@end
+
+/// An operation for image fetch
+@interface _YYAnimatedImageViewFetchOperation : NSOperation
+@property (nonatomic, weak) YYAnimatedImageView *view;
+@property (nonatomic, assign) NSUInteger nextIndex;
+@property (nonatomic, strong) UIImage <YYAnimatedImage> *curImage;
+@end
+
+@implementation _YYAnimatedImageViewFetchOperation
+- (void)main {
+    __strong YYAnimatedImageView *view = _view;
+    if (!view) return;
+    if ([self isCancelled]) return;
+    view->_incrBufferCount++;
+    if (view->_incrBufferCount == 0) [view calcMaxBufferCount];
+    if ((int)view->_incrBufferCount > (int)view->_maxBufferCount) {
+        view->_incrBufferCount = view->_maxBufferCount;
+    }
+    NSUInteger idx = _nextIndex;
+    NSUInteger max = view->_incrBufferCount < 1 ? 1 : view->_incrBufferCount;
+    NSUInteger total = view->_totalFrameCount;
+    for (int i = 0; i < max; i++, idx++) {
+        @autoreleasepool {
+            if (idx >= total) idx = 0;
+            if ([self isCancelled]) break;
+            LOCK_VIEW(BOOL miss = (view->_buffer[@(idx)] == nil));
+            if (miss) {
+                UIImage *img = [_curImage animatedImageFrameAtIndex:idx];
+                img = [img yy_imageByDecoded];
+                if ([self isCancelled]) break;
+                LOCK_VIEW(view->_buffer[@(idx)] = img ? img : [NSNull null]);
+            }
+        }
+    }
+}
+@end
+
+@implementation YYAnimatedImageView
+
+- (instancetype)init {
+    self = [super init];
+    _runloopMode = NSRunLoopCommonModes;
+    _autoPlayAnimatedImage = YES;
+    return self;
+}
+
+- (instancetype)initWithImage:(UIImage *)image {
+    self = [super init];
+    _runloopMode = NSRunLoopCommonModes;
+    _autoPlayAnimatedImage = YES;
+    self.frame = (CGRect) {CGPointZero, image.size };
+    self.image = image;
+    return self;
+}
+
+- (instancetype)initWithImage:(UIImage *)image highlightedImage:(UIImage *)highlightedImage {
+    self = [super init];
+    _runloopMode = NSRunLoopCommonModes;
+    _autoPlayAnimatedImage = YES;
+    CGSize size = image ? image.size : highlightedImage.size;
+    self.frame = (CGRect) {CGPointZero, size };
+    self.image = image;
+    self.highlightedImage = highlightedImage;
+    return self;
+}
+
+// init the animated params.
+- (void)resetAnimated {
+    dispatch_once(&_onceToken, ^{
+        _lock = OS_SPINLOCK_INIT;
+        _buffer = [NSMutableDictionary new];
+        _requestQueue = [[NSOperationQueue alloc] init];
+        _requestQueue.maxConcurrentOperationCount = 1;
+        _link = [CADisplayLink displayLinkWithTarget:[_YYImageWeakProxy proxyWithTarget:self] selector:@selector(step:)];
+        if (_runloopMode) {
+            [_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode];
+        }
+        _link.paused = YES;
+        
+        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
+        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
+    });
+    
+    [_requestQueue cancelAllOperations];
+    LOCK(
+         if (_buffer.count) {
+             NSMutableDictionary *holder = _buffer;
+             _buffer = [NSMutableDictionary new];
+             dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
+                 // Capture the dictionary to global queue,
+                 // release these images in background to avoid blocking UI thread.
+                 [holder class];
+             });
+         }
+    );
+    _link.paused = YES;
+    _time = 0;
+    if (_curIndex != 0) {
+        [self willChangeValueForKey:@"currentAnimatedImageIndex"];
+        _curIndex = 0;
+        [self didChangeValueForKey:@"currentAnimatedImageIndex"];
+    }
+    _curAnimatedImage = nil;
+    _curFrame = nil;
+    _curLoop = 0;
+    _totalLoop = 0;
+    _totalFrameCount = 1;
+    _loopEnd = NO;
+    _bufferMiss = NO;
+    _incrBufferCount = 0;
+}
+
+- (void)setImage:(UIImage *)image {
+    if (self.image == image) return;
+    [self setImage:image withType:YYAnimagedImageTypeImage];
+}
+
+- (void)setHighlightedImage:(UIImage *)highlightedImage {
+    if (self.highlightedImage == highlightedImage) return;
+    [self setImage:highlightedImage withType:YYAnimagedImageTypeHighlightedImage];
+}
+
+- (void)setAnimationImages:(NSArray *)animationImages {
+    if (self.animationImages == animationImages) return;
+    [self setImage:animationImages withType:YYAnimagedImageTypeImages];
+}
+
+- (void)setHighlightedAnimationImages:(NSArray *)highlightedAnimationImages {
+    if (self.highlightedAnimationImages == highlightedAnimationImages) return;
+    [self setImage:highlightedAnimationImages withType:YYAnimagedImageTypeHighlightedImages];
+}
+
+- (void)setHighlighted:(BOOL)highlighted {
+    [super setHighlighted:highlighted];
+    if (_link) [self resetAnimated];
+    [self imageChanged];
+}
+
+- (id)imageForType:(YYAnimagedImageType)type {
+    switch (type) {
+        case YYAnimagedImageTypeNone: return nil;
+        case YYAnimagedImageTypeImage: return self.image;
+        case YYAnimagedImageTypeHighlightedImage: return self.highlightedImage;
+        case YYAnimagedImageTypeImages: return self.animationImages;
+        case YYAnimagedImageTypeHighlightedImages: return self.highlightedAnimationImages;
+    }
+    return nil;
+}
+
+- (YYAnimagedImageType)currentImageType {
+    YYAnimagedImageType curType = YYAnimagedImageTypeNone;
+    if (self.highlighted) {
+        if (self.highlightedAnimationImages.count) curType = YYAnimagedImageTypeHighlightedImages;
+        else if (self.highlightedImage) curType = YYAnimagedImageTypeHighlightedImage;
+    }
+    if (curType == YYAnimagedImageTypeNone) {
+        if (self.animationImages.count) curType = YYAnimagedImageTypeImages;
+        else if (self.image) curType = YYAnimagedImageTypeImage;
+    }
+    return curType;
+}
+
+- (void)setImage:(id)image withType:(YYAnimagedImageType)type {
+    [self stopAnimating];
+    if (_link) [self resetAnimated];
+    _curFrame = nil;
+    switch (type) {
+        case YYAnimagedImageTypeNone: break;
+        case YYAnimagedImageTypeImage: super.image = image; break;
+        case YYAnimagedImageTypeHighlightedImage: super.highlightedImage = image; break;
+        case YYAnimagedImageTypeImages: super.animationImages = image; break;
+        case YYAnimagedImageTypeHighlightedImages: super.highlightedAnimationImages = image; break;
+    }
+    [self imageChanged];
+}
+
+- (void)imageChanged {
+    YYAnimagedImageType newType = [self currentImageType];
+    id newVisibleImage = [self imageForType:newType];
+    NSUInteger newImageFrameCount = 0;
+    BOOL hasContentsRect = NO;
+    if ([newVisibleImage isKindOfClass:[UIImage class]] &&
+        [newVisibleImage conformsToProtocol:@protocol(YYAnimatedImage)]) {
+        newImageFrameCount = ((UIImage<YYAnimatedImage> *) newVisibleImage).animatedImageFrameCount;
+        if (newImageFrameCount > 1) {
+            hasContentsRect = [((UIImage<YYAnimatedImage> *) newVisibleImage) respondsToSelector:@selector(animatedImageContentsRectAtIndex:)];
+        }
+    }
+    if (!hasContentsRect && _curImageHasContentsRect) {
+        if (!CGRectEqualToRect(self.layer.contentsRect, CGRectMake(0, 0, 1, 1)) ) {
+            [CATransaction begin];
+            [CATransaction setDisableActions:YES];
+            self.layer.contentsRect = CGRectMake(0, 0, 1, 1);
+            [CATransaction commit];
+        }
+    }
+    _curImageHasContentsRect = hasContentsRect;
+    if (hasContentsRect) {
+        CGRect rect = [((UIImage<YYAnimatedImage> *) newVisibleImage) animatedImageContentsRectAtIndex:0];
+        [self setContentsRect:rect forImage:newVisibleImage];
+    }
+    
+    if (newImageFrameCount > 1) {
+        [self resetAnimated];
+        _curAnimatedImage = newVisibleImage;
+        _curFrame = newVisibleImage;
+        _totalLoop = _curAnimatedImage.animatedImageLoopCount;
+        _totalFrameCount = _curAnimatedImage.animatedImageFrameCount;
+        [self calcMaxBufferCount];
+    }
+    [self setNeedsDisplay];
+    [self didMoved];
+}
+
+// dynamically adjust buffer size for current memory.
+- (void)calcMaxBufferCount {
+    NSUInteger bytes = _curAnimatedImage.animatedImageBytesPerFrame;
+    if (bytes == 0) bytes = 1;
+    
+    int64_t total = _YYDeviceMemoryTotal();
+    int64_t free = _YYDeviceMemoryFree();
+    int64_t max = MIN(total * 0.2, free * 0.6);
+    max = MAX(max, BUFFER_SIZE);
+    if (_maxBufferSize) max = max > _maxBufferSize ? _maxBufferSize : max;
+    _maxBufferCount = (float)max / (float)bytes;
+    if (_maxBufferCount == 0) _maxBufferCount = 1;
+}
+
+- (void)dealloc {
+    [_requestQueue cancelAllOperations];
+    [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
+    [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidEnterBackgroundNotification object:nil];
+    [_link invalidate];
+}
+
+- (BOOL)isAnimating {
+    return self.currentIsPlayingAnimation;
+}
+
+- (void)stopAnimating {
+    [super stopAnimating];
+    _link.paused = YES;
+    self.currentIsPlayingAnimation = NO;
+}
+
+- (void)startAnimating {
+    YYAnimagedImageType type = [self currentImageType];
+    if (type == YYAnimagedImageTypeImages || type == YYAnimagedImageTypeHighlightedImages) {
+        NSArray *images = [self imageForType:type];
+        if (images.count > 0) {
+            [super startAnimating];
+            self.currentIsPlayingAnimation = YES;
+        }
+    } else {
+        if (_curAnimatedImage && _link.paused) {
+            _curLoop = 0;
+            _loopEnd = NO;
+            _link.paused = NO;
+            self.currentIsPlayingAnimation = YES;
+        }
+    }
+}
+
+- (void)didReceiveMemoryWarning:(NSNotification *)notification {
+    [_requestQueue cancelAllOperations];
+    [_requestQueue addOperationWithBlock: ^{
+        _incrBufferCount = -60 - (int)(arc4random() % 120); // about 1~3 seconds to grow back..
+        NSNumber *next = @((_curIndex + 1) % _totalFrameCount);
+        LOCK(
+             NSArray * keys = _buffer.allKeys;
+             for (NSNumber * key in keys) {
+                 if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation
+                     [_buffer removeObjectForKey:key];
+                 }
+             }
+        )//LOCK
+    }];
+}
+
+- (void)didEnterBackground:(NSNotification *)notification {
+    [_requestQueue cancelAllOperations];
+    NSNumber *next = @((_curIndex + 1) % _totalFrameCount);
+    LOCK(
+         NSArray * keys = _buffer.allKeys;
+         for (NSNumber * key in keys) {
+             if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation
+                 [_buffer removeObjectForKey:key];
+             }
+         }
+     )//LOCK
+}
+
+- (void)step:(CADisplayLink *)link {
+    UIImage <YYAnimatedImage> *image = _curAnimatedImage;
+    NSMutableDictionary *buffer = _buffer;
+    UIImage *bufferedImage = nil;
+    NSUInteger nextIndex = (_curIndex + 1) % _totalFrameCount;
+    BOOL bufferIsFull = NO;
+    
+    if (!image) return;
+    if (_loopEnd) { // view will keep in last frame
+        [self stopAnimating];
+        return;
+    }
+    
+    NSTimeInterval delay = 0;
+    if (!_bufferMiss) {
+        _time += link.duration;
+        delay = [image animatedImageDurationAtIndex:_curIndex];
+        if (_time < delay) return;
+        _time -= delay;
+        if (nextIndex == 0) {
+            _curLoop++;
+            if (_curLoop >= _totalLoop && _totalLoop != 0) {
+                _loopEnd = YES;
+                [self stopAnimating];
+                [self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
+                return; // stop at last frame
+            }
+        }
+        delay = [image animatedImageDurationAtIndex:nextIndex];
+        if (_time > delay) _time = delay; // do not jump over frame
+    }
+    LOCK(
+         bufferedImage = buffer[@(nextIndex)];
+         if (bufferedImage) {
+             if ((int)_incrBufferCount < _totalFrameCount) {
+                 [buffer removeObjectForKey:@(nextIndex)];
+             }
+             [self willChangeValueForKey:@"currentAnimatedImageIndex"];
+             _curIndex = nextIndex;
+             [self didChangeValueForKey:@"currentAnimatedImageIndex"];
+             _curFrame = bufferedImage == (id)[NSNull null] ? nil : bufferedImage;
+             if (_curImageHasContentsRect) {
+                 _curContentsRect = [image animatedImageContentsRectAtIndex:_curIndex];
+                 [self setContentsRect:_curContentsRect forImage:_curFrame];
+             }
+             nextIndex = (_curIndex + 1) % _totalFrameCount;
+             _bufferMiss = NO;
+             if (buffer.count == _totalFrameCount) {
+                 bufferIsFull = YES;
+             }
+         } else {
+             _bufferMiss = YES;
+         }
+    )//LOCK
+    
+    if (!_bufferMiss) {
+        [self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
+    }
+    
+    if (!bufferIsFull && _requestQueue.operationCount == 0) { // if some work not finished, wait for next opportunity
+        _YYAnimatedImageViewFetchOperation *operation = [_YYAnimatedImageViewFetchOperation new];
+        operation.view = self;
+        operation.nextIndex = nextIndex;
+        operation.curImage = image;
+        [_requestQueue addOperation:operation];
+    }
+}
+
+- (void)displayLayer:(CALayer *)layer {
+    if (_curFrame) {
+        layer.contents = (__bridge id)_curFrame.CGImage;
+    }
+}
+
+- (void)setContentsRect:(CGRect)rect forImage:(UIImage *)image{
+    CGRect layerRect = CGRectMake(0, 0, 1, 1);
+    if (image) {
+        CGSize imageSize = image.size;
+        if (imageSize.width > 0.01 && imageSize.height > 0.01) {
+            layerRect.origin.x = rect.origin.x / imageSize.width;
+            layerRect.origin.y = rect.origin.y / imageSize.height;
+            layerRect.size.width = rect.size.width / imageSize.width;
+            layerRect.size.height = rect.size.height / imageSize.height;
+            layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1));
+            if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) {
+                layerRect = CGRectMake(0, 0, 1, 1);
+            }
+        }
+    }
+    [CATransaction begin];
+    [CATransaction setDisableActions:YES];
+    self.layer.contentsRect = layerRect;
+    [CATransaction commit];
+}
+
+- (void)didMoved {
+    if (self.autoPlayAnimatedImage) {
+        if(self.superview && self.window) {
+            [self startAnimating];
+        } else {
+            [self stopAnimating];
+        }
+    }
+}
+
+- (void)didMoveToWindow {
+    [super didMoveToWindow];
+    [self didMoved];
+}
+
+- (void)didMoveToSuperview {
+    [super didMoveToSuperview];
+    [self didMoved];
+}
+
+- (void)setCurrentAnimatedImageIndex:(NSUInteger)currentAnimatedImageIndex {
+    if (!_curAnimatedImage) return;
+    if (currentAnimatedImageIndex >= _curAnimatedImage.animatedImageFrameCount) return;
+    if (_curIndex == currentAnimatedImageIndex) return;
+    
+    void (^block)() = ^{
+        LOCK(
+             [_requestQueue cancelAllOperations];
+             [_buffer removeAllObjects];
+             [self willChangeValueForKey:@"currentAnimatedImageIndex"];
+             _curIndex = currentAnimatedImageIndex;
+             [self didChangeValueForKey:@"currentAnimatedImageIndex"];
+             _curFrame = [_curAnimatedImage animatedImageFrameAtIndex:_curIndex];
+             if (_curImageHasContentsRect) {
+                 _curContentsRect = [_curAnimatedImage animatedImageContentsRectAtIndex:_curIndex];
+             }
+             _time = 0;
+             _loopEnd = NO;
+             _bufferMiss = NO;
+             [self.layer setNeedsDisplay];
+        )//LOCK
+    };
+    
+    if (pthread_main_np()) {
+        block();
+    } else {
+        dispatch_async(dispatch_get_main_queue(), block);
+    }
+}
+
+- (NSUInteger)currentAnimatedImageIndex {
+    return _curIndex;
+}
+
+- (void)setRunloopMode:(NSString *)runloopMode {
+    if ([_runloopMode isEqual:runloopMode]) return;
+    if (_link) {
+        if (_runloopMode) {
+            [_link removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode];
+        }
+        if (runloopMode.length) {
+            [_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:runloopMode];
+        }
+    }
+    _runloopMode = runloopMode.copy;
+}
+
+#pragma mark - Overrice NSObject(NSKeyValueObservingCustomization)
+
++ (BOOL)automaticallyNotifiesObserversForKey:(NSString *)key {
+    if ([key isEqualToString:@"currentAnimatedImageIndex"]) {
+        return NO;
+    }
+    return [super automaticallyNotifiesObserversForKey:key];
+}
+
+#pragma mark - NSCoding
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+    self = [super initWithCoder:aDecoder];
+    _runloopMode = [aDecoder decodeObjectForKey:@"runloopMode"];
+    if (_runloopMode.length == 0) _runloopMode = NSRunLoopCommonModes;
+    _autoPlayAnimatedImage = [aDecoder decodeBoolForKey:@"autoPlayAnimatedImage"];
+    
+    UIImage *image = [aDecoder decodeObjectForKey:@"YYAnimatedImage"];
+    UIImage *highlightedImage = [aDecoder decodeObjectForKey:@"YYHighlightedAnimatedImage"];
+    if (image) {
+        self.image = image;
+        [self setImage:image withType:YYAnimagedImageTypeImage];
+    }
+    if (highlightedImage) {
+        self.highlightedImage = highlightedImage;
+        [self setImage:highlightedImage withType:YYAnimagedImageTypeHighlightedImage];
+    }
+    return self;
+}
+
+- (void)encodeWithCoder:(NSCoder *)aCoder {
+    [super encodeWithCoder:aCoder];
+    [aCoder encodeObject:_runloopMode forKey:@"runloopMode"];
+    [aCoder encodeBool:_autoPlayAnimatedImage forKey:@"autoPlayAnimatedImage"];
+    
+    BOOL ani, multi;
+    ani = [self.image conformsToProtocol:@protocol(YYAnimatedImage)];
+    multi = (ani && ((UIImage <YYAnimatedImage> *)self.image).animatedImageFrameCount > 1);
+    if (multi) [aCoder encodeObject:self.image forKey:@"YYAnimatedImage"];
+    
+    ani = [self.highlightedImage conformsToProtocol:@protocol(YYAnimatedImage)];
+    multi = (ani && ((UIImage <YYAnimatedImage> *)self.highlightedImage).animatedImageFrameCount > 1);
+    if (multi) [aCoder encodeObject:self.highlightedImage forKey:@"YYHighlightedAnimatedImage"];
+}
+
+@end

+ 95 - 0
YYImage/YYFrameImage.h

@@ -0,0 +1,95 @@
+//
+//  YYFrameImage.h
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 14/12/9.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+#if __has_include(<YYImage/YYImage.h>)
+#import <YYImage/YYAnimatedImageView.h>
+#else
+#import "YYAnimatedImageView.h"
+#endif
+
+/**
+ An image to display frame-based animation.
+ 
+ @discussion It is a fully compatible `UIImage` subclass.
+ It only support system image format such as png and jpeg.
+ The animation can be played by YYAnimatedImageView.
+ 
+ Sample Code:
+     
+     NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"];
+     NSArray *times = @[@0.1, @0.2, @0.1];
+     YYFrameImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES];
+     YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
+     [view addSubView:imageView];
+ */
+@interface YYFrameImage : UIImage <YYAnimatedImage>
+
+/**
+ Create a frame animated image from files.
+ 
+ @param paths            An array of NSString objects, contains the full or 
+                         partial path to each image file.
+                         e.g. @[@"/ani/1.png",@"/ani/2.png",@"/ani/3.png"]
+ 
+ @param oneFrameDuration The duration (in seconds) per frame.
+ 
+ @param loopCount        The animation loop count, 0 means infinite.
+ 
+ @return An initialized YYFrameImage object, or nil when an error occurs.
+ */
+- (instancetype)initWithImagePaths:(NSArray *)paths oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount;
+
+/**
+ Create a frame animated image from files.
+ 
+ @param paths          An array of NSString objects, contains the full or
+                       partial path to each image file.
+                       e.g. @[@"/ani/frame1.png",@"/ani/frame2.png",@"/ani/frame3.png"]
+ 
+ @param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame.
+                       e.g. @[@0.1, @0.2, @0.3];
+ 
+ @param loopCount      The animation loop count, 0 means infinite.
+ 
+ @return An initialized YYFrameImage object, or nil when an error occurs.
+ */
+- (instancetype)initWithImagePaths:(NSArray *)paths frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount;
+
+/**
+ Create a frame animated image from an array of data.
+ 
+ @param dataArray        An array of NSData objects.
+ 
+ @param oneFrameDuration The duration (in seconds) per frame.
+ 
+ @param loopCount        The animation loop count, 0 means infinite.
+ 
+ @return An initialized YYFrameImage object, or nil when an error occurs.
+ */
+- (instancetype)initWithImageDataArray:(NSArray *)dataArray oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount;
+
+/**
+ Create a frame animated image from an array of data.
+ 
+ @param dataArray      An array of NSData objects.
+ 
+ @param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame.
+                       e.g. @[@0.1, @0.2, @0.3];
+ 
+ @param loopCount      The animation loop count, 0 means infinite.
+ 
+ @return An initialized YYFrameImage object, or nil when an error occurs.
+ */
+- (instancetype)initWithImageDataArray:(NSArray *)dataArray frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount;
+
+@end

+ 150 - 0
YYImage/YYFrameImage.m

@@ -0,0 +1,150 @@
+//
+//  YYFrameImage.m
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 14/12/9.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "YYFrameImage.h"
+#import "YYImageCoder.h"
+
+
+/**
+ Return the path scale.
+ 
+ e.g.
+ <table>
+ <tr><th>Path            </th><th>Scale </th></tr>
+ <tr><td>"icon.png"      </td><td>1     </td></tr>
+ <tr><td>"icon@2x.png"   </td><td>2     </td></tr>
+ <tr><td>"icon@2.5x.png" </td><td>2.5   </td></tr>
+ <tr><td>"icon@2x"       </td><td>1     </td></tr>
+ <tr><td>"icon@2x..png"  </td><td>1     </td></tr>
+ <tr><td>"icon@2x.png/"  </td><td>1     </td></tr>
+ </table>
+ */
+static CGFloat _NSStringPathScale(NSString *string) {
+    if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
+    NSString *name = string.stringByDeletingPathExtension;
+    __block CGFloat scale = 1;
+    
+    NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
+    [pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
+        if (result.range.location >= 3) {
+            scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
+        }
+    }];
+    
+    return scale;
+}
+
+
+
+@implementation YYFrameImage {
+    NSUInteger _loopCount;
+    NSUInteger _oneFrameBytes;
+    NSArray *_imagePaths;
+    NSArray *_imageDatas;
+    NSArray *_frameDurations;
+}
+
+- (instancetype)initWithImagePaths:(NSArray *)paths oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount {
+    NSMutableArray *durations = [NSMutableArray new];
+    for (int i = 0, max = (int)paths.count; i < max; i++) {
+        [durations addObject:@(oneFrameDuration)];
+    }
+    return [self initWithImagePaths:paths frameDurations:durations loopCount:loopCount];
+}
+
+- (instancetype)initWithImagePaths:(NSArray *)paths frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount {
+    if (paths.count == 0) return nil;
+    if (paths.count != frameDurations.count) return nil;
+    
+    NSString *firstPath = paths[0];
+    NSData *firstData = [NSData dataWithContentsOfFile:firstPath];
+    CGFloat scale = _NSStringPathScale(firstPath);
+    UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded];
+    self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp];
+    if (!self) return nil;
+    long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage);
+    _oneFrameBytes = (NSUInteger)frameByte;
+    _imagePaths = paths.copy;
+    _frameDurations = frameDurations.copy;
+    _loopCount = loopCount;
+    
+    return self;
+}
+
+- (instancetype)initWithImageDataArray:(NSArray *)dataArray oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount {
+    NSMutableArray *durations = [NSMutableArray new];
+    for (int i = 0, max = (int)dataArray.count; i < max; i++) {
+        [durations addObject:@(oneFrameDuration)];
+    }
+    return [self initWithImageDataArray:dataArray frameDurations:durations loopCount:loopCount];
+}
+
+- (instancetype)initWithImageDataArray:(NSArray *)dataArray frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount {
+    if (dataArray.count == 0) return nil;
+    if (dataArray.count != frameDurations.count) return nil;
+    
+    NSData *firstData = dataArray[0];
+    CGFloat scale = [UIScreen mainScreen].scale;
+    UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded];
+    self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp];
+    if (!self) return nil;
+    long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage);
+    _oneFrameBytes = (NSUInteger)frameByte;
+    _imageDatas = dataArray.copy;
+    _frameDurations = frameDurations.copy;
+    _loopCount = loopCount;
+    
+    return self;
+}
+
+#pragma mark - YYAnimtedImage
+
+- (NSUInteger)animatedImageFrameCount {
+    if (_imagePaths) {
+        return _imagePaths.count;
+    } else if (_imageDatas) {
+        return _imageDatas.count;
+    } else {
+        return 1;
+    }
+}
+
+- (NSUInteger)animatedImageLoopCount {
+    return _loopCount;
+}
+
+- (NSUInteger)animatedImageBytesPerFrame {
+    return _oneFrameBytes;
+}
+
+- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
+    if (_imagePaths) {
+        if (index >= _imagePaths.count) return nil;
+        NSString *path = _imagePaths[index];
+        CGFloat scale = _NSStringPathScale(path);
+        NSData *data = [NSData dataWithContentsOfFile:path];
+        return [[UIImage imageWithData:data scale:scale] yy_imageByDecoded];
+    } else if (_imageDatas) {
+        if (index >= _imageDatas.count) return nil;
+        NSData *data = _imageDatas[index];
+        return [[UIImage imageWithData:data scale:[UIScreen mainScreen].scale] yy_imageByDecoded];
+    } else {
+        return index == 0 ? self : nil;
+    }
+}
+
+- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
+    if (index >= _frameDurations.count) return 0;
+    NSNumber *num = _frameDurations[index];
+    return [num doubleValue];
+}
+
+@end

+ 85 - 0
YYImage/YYImage.h

@@ -0,0 +1,85 @@
+//
+//  YYImage.h
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 14/10/20.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+#if __has_include(<YYImage/YYImage.h>)
+FOUNDATION_EXPORT double YYImageVersionNumber;
+FOUNDATION_EXPORT const unsigned char YYImageVersionString[];
+#import <YYImage/YYFrameImage.h>
+#import <YYImage/YYSpriteSheetImage.h>
+#import <YYImage/YYImageCoder.h>
+#import <YYImage/YYAnimatedImageView.h>
+#else
+#import "YYFrameImage.h"
+#import "YYSpriteSheetImage.h"
+#import "YYImageCoder.h"
+#import "YYAnimatedImageView.h"
+#endif
+
+
+
+
+/**
+ A YYImage object is a high-level way to display animated image data.
+ 
+ @discussion It is a fully compatible `UIImage` subclass. It extends the UIImage
+ to support animated WebP, APNG and GIF format image data decoding. It also 
+ support NSCoding protocol to archive and unarchive multi-frame image data.
+ 
+ If the image is created from multi-frame image data, and you want to play the 
+ animation, try replace UIImageView with `YYAnimatedImageView`.
+ 
+ Sample Code:
+ 
+     // animation@3x.webp
+     YYImage *image = [YYImage imageNamed:@"animation.webp"];
+     YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
+     [view addSubView:imageView];
+    
+ */
+@interface YYImage : UIImage <YYAnimatedImage>
+
++ (YYImage *)imageNamed:(NSString *)name; // no cache!
++ (YYImage *)imageWithContentsOfFile:(NSString *)path;
++ (YYImage *)imageWithData:(NSData *)data;
++ (YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale;
+
+/**
+ If the image is created from data or file, then the value indicates the data type.
+ */
+@property (nonatomic, readonly) YYImageType animatedImageType;
+
+/**
+ If the image is created from animated image data (multi-frame GIF/APNG/WebP),
+ this property stores the original image data.
+ */
+@property (nonatomic, readonly) NSData *animatedImageData;
+
+/**
+ The total memory usage (in bytes) if all frame images was loaded into memory.
+ The value is 0 if the image is not created from a multi-frame image data.
+ */
+@property (nonatomic, readonly) NSUInteger animatedImageMemorySize;
+
+/**
+ Preload all frame image to memory.
+ 
+ @discussion Set this property to `YES` will block the calling thread to decode 
+ all animation frame image to memory, set to `NO` will release the preloaded frames.
+ If the image is shared by lots of image views (such as emoticon), preload all
+ frames will reduce the CPU cost.
+ 
+ See `animatedImageMemorySize` for memory cost.
+ */
+@property (nonatomic, assign) BOOL preloadAllAnimatedImageFrames;
+
+@end

+ 255 - 0
YYImage/YYImage.m

@@ -0,0 +1,255 @@
+//
+//  YYImage.m
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 14/10/20.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "YYImage.h"
+#import <libkern/OSAtomic.h>
+
+/**
+ An array of NSNumber objects, shows the best order for path scale search.
+ e.g. iPhone3GS:@[@1,@2,@3] iPhone5:@[@2,@3,@1]  iPhone6 Plus:@[@3,@2,@1]
+ */
+static NSArray *_NSBundlePreferredScales() {
+    static NSArray *scales;
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        CGFloat screenScale = [UIScreen mainScreen].scale;
+        if (screenScale <= 1) {
+            scales = @[@1,@2,@3];
+        } else if (screenScale <= 2) {
+            scales = @[@2,@3,@1];
+        } else {
+            scales = @[@3,@2,@1];
+        }
+    });
+    return scales;
+}
+
+/**
+ Add scale modifier to the file name (without path extension),
+ From @"name" to @"name@2x".
+ 
+ e.g.
+ <table>
+ <tr><th>Before     </th><th>After(scale:2)</th></tr>
+ <tr><td>"icon"     </td><td>"icon@2x"     </td></tr>
+ <tr><td>"icon "    </td><td>"icon @2x"    </td></tr>
+ <tr><td>"icon.top" </td><td>"icon.top@2x" </td></tr>
+ <tr><td>"/p/name"  </td><td>"/p/name@2x"  </td></tr>
+ <tr><td>"/path/"   </td><td>"/path/"      </td></tr>
+ </table>
+ 
+ @param scale Resource scale.
+ @return String by add scale modifier, or just return if it's not end with file name.
+ */
+static NSString *_NSStringByAppendingNameScale(NSString *string, CGFloat scale) {
+    if (!string) return nil;
+    if (fabs(scale - 1) <= __FLT_EPSILON__ || string.length == 0 || [string hasSuffix:@"/"]) return string.copy;
+    return [string stringByAppendingFormat:@"@%@x", @(scale)];
+}
+
+/**
+ Return the path scale.
+ 
+ e.g.
+ <table>
+ <tr><th>Path            </th><th>Scale </th></tr>
+ <tr><td>"icon.png"      </td><td>1     </td></tr>
+ <tr><td>"icon@2x.png"   </td><td>2     </td></tr>
+ <tr><td>"icon@2.5x.png" </td><td>2.5   </td></tr>
+ <tr><td>"icon@2x"       </td><td>1     </td></tr>
+ <tr><td>"icon@2x..png"  </td><td>1     </td></tr>
+ <tr><td>"icon@2x.png/"  </td><td>1     </td></tr>
+ </table>
+ */
+static CGFloat _NSStringPathScale(NSString *string) {
+    if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
+    NSString *name = string.stringByDeletingPathExtension;
+    __block CGFloat scale = 1;
+    
+    NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
+    [pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
+        if (result.range.location >= 3) {
+            scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
+        }
+    }];
+    
+    return scale;
+}
+
+
+@implementation YYImage {
+    YYImageDecoder *_decoder;
+    NSArray *_preloadedFrames;
+    OSSpinLock _preloadedLock;
+    NSUInteger _bytesPerFrame;
+}
+
++ (YYImage *)imageNamed:(NSString *)name {
+    if (name.length == 0) return nil;
+    if ([name hasSuffix:@"/"]) return nil;
+    
+    NSString *res = name.stringByDeletingPathExtension;
+    NSString *ext = name.pathExtension;
+    NSString *path = nil;
+    CGFloat scale = 1;
+    
+    NSArray *scales = _NSBundlePreferredScales();
+    for (int s = 0; s < scales.count; s++) {
+        scale = ((NSNumber *)scales[s]).floatValue;
+        NSString *scaledName = _NSStringByAppendingNameScale(res, scale);
+        // If no extension, guess by system supported (same as UIImage).
+        NSArray *exts = ext.length > 0 ? @[ext] : @[@"", @"png", @"jpeg", @"jpg", @"gif", @"webp"];
+        for (NSString *e in exts) {
+            path = [[NSBundle mainBundle] pathForResource:scaledName ofType:e];
+            if (path) break;
+        }
+        if (path) break;
+    }
+    if (path.length == 0) return nil;
+    
+    NSData *data = [NSData dataWithContentsOfFile:path];
+    if (data.length == 0) return nil;
+    
+    return [[self alloc] initWithData:data scale:scale];
+}
+
++ (YYImage *)imageWithContentsOfFile:(NSString *)path {
+    return [[self alloc] initWithContentsOfFile:path];
+}
+
++ (YYImage *)imageWithData:(NSData *)data {
+    return [[self alloc] initWithData:data];
+}
+
++ (YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale {
+    return [[self alloc] initWithData:data scale:scale];
+}
+
+- (instancetype)initWithContentsOfFile:(NSString *)path {
+    NSData *data = [NSData dataWithContentsOfFile:path];
+    return [self initWithData:data scale:_NSStringPathScale(path)];
+}
+
+- (instancetype)initWithData:(NSData *)data {
+    return [self initWithData:data scale:1];
+}
+
+- (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
+    if (data.length == 0) return nil;
+    if (scale <= 0) scale = 1;
+    _preloadedLock = OS_SPINLOCK_INIT;
+    @autoreleasepool {
+        YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:scale];
+        YYImageFrame *frame = [decoder frameAtIndex:0 decodeForDisplay:YES];
+        UIImage *image = frame.image;
+        if (!image) return nil;
+        self = [self initWithCGImage:image.CGImage scale:decoder.scale orientation:image.imageOrientation];
+        if (!self) return nil;
+        _animatedImageType = decoder.type;
+        if (decoder.frameCount > 1) {
+            _decoder = decoder;
+            _bytesPerFrame = CGImageGetBytesPerRow(image.CGImage) * CGImageGetHeight(image.CGImage);
+            _animatedImageMemorySize = _bytesPerFrame * decoder.frameCount;
+        }
+        self.yy_isDecodedForDisplay = YES;
+    }
+    return self;
+}
+
+- (NSData *)animatedImageData {
+    return _decoder.data;
+}
+
+- (void)setPreloadAllAnimatedImageFrames:(BOOL)preloadAllAnimatedImageFrames {
+    if (_preloadAllAnimatedImageFrames != preloadAllAnimatedImageFrames) {
+        if (preloadAllAnimatedImageFrames && _decoder.frameCount > 0) {
+            NSMutableArray *frames = [NSMutableArray new];
+            for (NSUInteger i = 0, max = _decoder.frameCount; i < max; i++) {
+                UIImage *img = [self animatedImageFrameAtIndex:i];
+                if (img) {
+                    [frames addObject:img];
+                } else {
+                    [frames addObject:[NSNull null]];
+                }
+            }
+            OSSpinLockLock(&_preloadedLock);
+            _preloadedFrames = frames;
+            OSSpinLockUnlock(&_preloadedLock);
+        } else {
+            OSSpinLockLock(&_preloadedLock);
+            _preloadedFrames = nil;
+            OSSpinLockUnlock(&_preloadedLock);
+        }
+    }
+}
+
+#pragma mark - protocol NSCoding
+
+- (instancetype)initWithCoder:(NSCoder *)aDecoder {
+    NSNumber *scale = [aDecoder decodeObjectForKey:@"YYImageScale"];
+    NSData *data = [aDecoder decodeObjectForKey:@"YYImageData"];
+    if (data.length) {
+        self = [self initWithData:data scale:scale.doubleValue];
+    } else {
+        self = [super initWithCoder:aDecoder];
+    }
+    return self;
+}
+
+- (void)encodeWithCoder:(NSCoder *)aCoder {
+    if (_decoder.data.length) {
+        [aCoder encodeObject:@(self.scale) forKey:@"YYImageScale"];
+        [aCoder encodeObject:_decoder.data forKey:@"YYImageData"];
+    } else {
+        [super encodeWithCoder:aCoder]; // Apple use UIImagePNGRepresentation() to encode UIImage.
+    }
+}
+
+#pragma mark - protocol YYAnimatedImage
+
+- (NSUInteger)animatedImageFrameCount {
+    return _decoder.frameCount;
+}
+
+- (NSUInteger)animatedImageLoopCount {
+    return _decoder.loopCount;
+}
+
+- (NSUInteger)animatedImageBytesPerFrame {
+    return _bytesPerFrame;
+}
+
+- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
+    if (index >= _decoder.frameCount) return nil;
+    OSSpinLockLock(&_preloadedLock);
+    UIImage *image = _preloadedFrames[index];
+    OSSpinLockUnlock(&_preloadedLock);
+    if (image) return image == (id)[NSNull null] ? nil : image;
+    return [_decoder frameAtIndex:index decodeForDisplay:YES].image;
+}
+
+- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
+    NSTimeInterval duration = [_decoder frameDurationAtIndex:index];
+    
+    /*
+     http://opensource.apple.com/source/WebCore/WebCore-7600.1.25/platform/graphics/cg/ImageSourceCG.cpp
+     Many annoying ads specify a 0 duration to make an image flash as quickly as 
+     possible. We follow Safari and Firefox's behavior and use a duration of 100 ms 
+     for any frames that specify a duration of <= 10 ms.
+     See <rdar://problem/7689300> and <http://webkit.org/b/36082> for more information.
+     
+     See also: http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser.
+     */
+    if (duration < 0.011f) return 0.100f;
+    return duration;
+}
+
+@end

+ 502 - 0
YYImage/YYImageCoder.h

@@ -0,0 +1,502 @@
+//
+//  YYImageCoder.h
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 15/5/13.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+/**
+ Image file type.
+ */
+typedef NS_ENUM(NSUInteger, YYImageType) {
+    YYImageTypeUnknown = 0, ///< unknown
+    YYImageTypeJPEG,        ///< jpeg, jpg
+    YYImageTypeJPEG2000,    ///< jp2
+    YYImageTypeTIFF,        ///< tiff, tif
+    YYImageTypeBMP,         ///< bmp
+    YYImageTypeICO,         ///< ico
+    YYImageTypeICNS,        ///< icns
+    YYImageTypeGIF,         ///< gif
+    YYImageTypePNG,         ///< png
+    YYImageTypeWebP,        ///< webp
+    YYImageTypeOther,       ///< other image format
+};
+
+
+/**
+ Dispose method specifies how the area used by the current frame is to be treated
+ before rendering the next frame on the canvas.
+ */
+typedef NS_ENUM(NSUInteger, YYImageDisposeMethod) {
+    
+    /**
+     No disposal is done on this frame before rendering the next; the contents
+     of the canvas are left as is.
+     */
+    YYImageDisposeNone = 0,
+    
+    /**
+     The frame's region of the canvas is to be cleared to fully transparent black
+     before rendering the next frame.
+     */
+    YYImageDisposeBackground,
+    
+    /**
+     The frame's region of the canvas is to be reverted to the previous contents
+     before rendering the next frame.
+     */
+    YYImageDisposePrevious,
+};
+
+/**
+ Blend operation specifies how transparent pixels of the current frame are
+ blended with those of the previous canvas.
+ */
+typedef NS_ENUM(NSUInteger, YYImageBlendOperation) {
+    
+    /**
+     All color components of the frame, including alpha, overwrite the current
+     contents of the frame's canvas region.
+     */
+    YYImageBlendNone = 0,
+    
+    /**
+     The frame should be composited onto the output buffer based on its alpha.
+     */
+    YYImageBlendOver,
+};
+
+/**
+ An image frame object.
+ */
+@interface YYImageFrame : NSObject <NSCopying>
+@property (nonatomic, assign) NSUInteger index;    ///< Frame index (zero based)
+@property (nonatomic, assign) NSUInteger width;    ///< Frame width
+@property (nonatomic, assign) NSUInteger height;   ///< Frame height
+@property (nonatomic, assign) NSUInteger offsetX;  ///< Frame origin.x in canvas (left-bottom based)
+@property (nonatomic, assign) NSUInteger offsetY;  ///< Frame origin.y in canvas (left-bottom based)
+@property (nonatomic, assign) NSTimeInterval duration;      ///< Frame duration in seconds
+@property (nonatomic, assign) YYImageDisposeMethod dispose; ///< Frame dispose method.
+@property (nonatomic, assign) YYImageBlendOperation blend;  ///< Frame blend operation.
+@property (nonatomic, strong) UIImage *image; ///< The image.
++ (instancetype)frameWithImage:(UIImage *)image;
+@end
+
+
+#pragma mark - Decoder
+
+/**
+ An image decoder to decode image data.
+ 
+ @discussion This class supports decoding animated WebP, APNG, GIF and system
+ image format such as PNG, JPG, JP2, BMP, TIFF, PIC, ICNS and ICO. It can be used 
+ to decode complete image data, or to decode incremental image data during image 
+ download. This class is thread-safe.
+ 
+ Example:
+ 
+    // Decode single image:
+    NSData *data = [NSData dataWithContentOfFile:@"/tmp/image.webp"];
+    YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0];
+    UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
+ 
+    // Decode image during download:
+    NSMutableData *data = [NSMutableData new];
+    YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0];
+    while(newDataArrived) {
+        [data appendData:newData];
+        [decoder updateData:data final:NO];
+        if (decoder.frameCount > 0) {
+            UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
+            // progressive display...
+        }
+    }
+    [decoder updateData:data final:YES];
+    UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
+    // final display...
+ 
+ */
+@interface YYImageDecoder : NSObject
+
+@property (nonatomic, readonly) NSData *data;       ///< Image data.
+@property (nonatomic, readonly) YYImageType type;   ///< Image data type.
+@property (nonatomic, readonly) CGFloat scale;      ///< Image scale.
+@property (nonatomic, readonly) NSUInteger frameCount;     ///< Image frame count.
+@property (nonatomic, readonly) NSUInteger loopCount;      ///< Image loop count, 0 means infinite.
+@property (nonatomic, readonly) NSUInteger width;          ///< Image canvas width.
+@property (nonatomic, readonly) NSUInteger height;         ///< Image canvas height.
+@property (nonatomic, readonly, getter=isFinalized) BOOL finalized;
+
+/**
+ Creates an image decoder.
+ 
+ @param scale  Image's scale.
+ @return An image decoder.
+ */
+- (instancetype)initWithScale:(CGFloat)scale NS_DESIGNATED_INITIALIZER;
+
+/**
+ Updates the incremental image with new data.
+ 
+ @discussion You can use this method to decode progressive/interlaced/baseline
+ image when you do not have the complete image data. The `data` was retained by
+ decoder, you should not modify the data in other thread during decoding.
+ 
+ @param data  The data to add to the image decoder. Each time you call this 
+ function, the 'data' parameter must contain all of the image file data 
+ accumulated so far.
+ 
+ @param final  A value that specifies whether the data is the final set. 
+ Pass YES if it is, NO otherwise. When the data is already finalized, you can
+ not update the data anymore.
+ 
+ @return Whether succeed.
+ */
+- (BOOL)updateData:(NSData *)data final:(BOOL)final;
+
+/**
+ Convenience method to create a decoder with specified data.
+ @param data  Image data.
+ @param scale Image's scale.
+ @return A new decoder, or nil if an error occurs.
+ */
++ (instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale;
+
+/**
+ Decodes and returns a frame from a specified index.
+ @param index  Frame image index (zero-based).
+ @param decodeForDisplay Whether decode the image to memory bitmap for display.
+    If NO, it will try to returns the original frame data without blend.
+ @return A new frame with image, or nil if an error occurs.
+ */
+- (YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay;
+
+/**
+ Returns the frame duration from a specified index.
+ @param index  Frame image (zero-based).
+ @return Duration in seconds.
+ */
+- (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index;
+
+/**
+ Returns the frame's properties. See "CGImageProperties.h" in ImageIO.framework
+ for more information.
+ 
+ @param index  Frame image index (zero-based).
+ @return The ImageIO frame property.
+ */
+- (NSDictionary *)framePropertiesAtIndex:(NSUInteger)index;
+
+/**
+ Returns the image's properties. See "CGImageProperties.h" in ImageIO.framework
+ for more information.
+ */
+- (NSDictionary *)imageProperties;
+
+@end
+
+
+
+#pragma mark - Encoder
+
+/**
+ An image encoder to encode image to data.
+ 
+ @discussion It supports encoding single frame image with the type defined in YYImageType.
+ It also supports encoding multi-frame image with GIF, APNG and WebP.
+ 
+ Example:
+    
+    YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG];
+    jpegEncoder.quality = 0.9;
+    [jpegEncoder addImage:image duration:0];
+    NSData jpegData = [jpegEncoder encode];
+ 
+    YYImageEncoder *gifEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeGIF];
+    gifEncoder.loopCount = 5;
+    [gifEncoder addImage:image0 duration:0.1];
+    [gifEncoder addImage:image1 duration:0.15];
+    [gifEncoder addImage:image2 duration:0.2];
+    NSData gifData = [gifEncoder encode];
+ 
+ @warning It just pack the images together when encoding multi-frame image. If you
+ want to reduce the image file size, try imagemagick/ffmpeg for GIF and WebP,
+ and apngasm for APNG.
+ */
+@interface YYImageEncoder : NSObject
+
+@property (nonatomic, readonly) YYImageType type;   ///< Image type.
+@property (nonatomic, assign) NSUInteger loopCount; ///< Loop count, 0 means infinit, only available for GIF/APNG/WebP.
+@property (nonatomic, assign) BOOL lossless;        ///< Lossless, only available for WebP.
+@property (nonatomic, assign) CGFloat quality;      ///< Compress quality, 0.0~1.0, only available for JPG/JP2/WebP.
+
+- (instancetype)init UNAVAILABLE_ATTRIBUTE;
++ (instancetype)new UNAVAILABLE_ATTRIBUTE;
+
+/**
+ Create an image encoder with a specified type.
+ @param type Image type.
+ @return A new encoder, or nil if an error occurs.
+ */
+- (instancetype)initWithType:(YYImageType)type NS_DESIGNATED_INITIALIZER;
+
+/**
+ Add an image to encoder.
+ @param image    Image.
+ @param duration Image duration for animation. Pass 0 to ignore this parameter.
+ */
+- (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration;
+
+/**
+ Add an image with image data to encoder.
+ @param data    Image data.
+ @param duration Image duration for animation. Pass 0 to ignore this parameter.
+ */
+- (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration;
+
+/**
+ Add an image from a file path to encoder.
+ @param image    Image file path.
+ @param duration Image duration for animation. Pass 0 to ignore this parameter.
+ */
+- (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration;
+
+/**
+ Encodes the image and returns the image data.
+ @return The image data, or nil if an error occurs.
+ */
+- (NSData *)encode;
+
+/**
+ Encodes the image to a file.
+ @param path The file path (overwrite if exist).
+ @return Whether succeed.
+ */
+- (BOOL)encodeToFile:(NSString *)path;
+
+/**
+ Convenience method to encode single frame image.
+ @param image   The image.
+ @param type    The destination image type.
+ @param quality Image quality, 0.0~1.0.
+ @return The image data, or nil if an error occurs.
+ */
++ (NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality;
+
+/**
+ Convenience method to encode image from a decoder.
+ @param decoder The image decoder.
+ @param type    The destination image type;
+ @param quality Image quality, 0.0~1.0.
+ @return The image data, or nil if an error occurs.
+ */
++ (NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality;
+
+@end
+
+
+#pragma mark - UIImage
+
+@interface UIImage (YYImageCoder)
+
+/**
+ Decompress this image to bitmap, so when the image is displayed on screen, 
+ the main thread won't be blocked by additional decode. If the image has already
+ been decoded or unable to decode, it just returns itself.
+ 
+ @return an image decoded, or just return itself if no needed.
+ @see yy_isDecodedForDisplay
+ */
+- (instancetype)yy_imageByDecoded;
+
+/**
+ Wherher the image can be display on screen without additional decoding.
+ @warning It just a hint for your code, change it has no other effect.
+ */
+@property (nonatomic, assign) BOOL yy_isDecodedForDisplay;
+
+/**
+ Saves this image to iOS Photos Album. 
+ 
+ @discussion  This method attempts to save the original data to album if the
+ image is created from an animated GIF/APNG, otherwise, it will save the image 
+ as JPEG or PNG (based on the alpha information).
+ 
+ @param completionBlock The block invoked (in main thread) after the save operation completes.
+    assetURL: An URL that identifies the saved image file. If the image is not saved, assetURL is nil.
+    error: If the image is not saved, an error object that describes the reason for failure, otherwise nil.
+ */
+- (void)yy_saveToAlbumWithCompletionBlock:(void(^)(NSURL *assetURL, NSError *error))completionBlock;
+
+/**
+ Return a 'best' data representation for this image.
+ 
+ @discussion The convertion based on these rule:
+ 1. If the image is created from an animated GIF/APNG/WebP, it returns the original data.
+ 2. It returns PNG or JPEG(0.9) representation based on the alpha information.
+ 
+ @return Image data, or nil if an error occurs.
+ */
+- (NSData *)yy_imageDataRepresentation;
+
+@end
+
+
+
+#pragma mark - Helper
+
+/// Detect a data's image type by reading the data's header 16 bytes (very fast).
+CG_EXTERN YYImageType YYImageDetectType(CFDataRef data);
+
+/// Convert YYImageType to UTI (such as kUTTypeJPEG).
+CG_EXTERN CFStringRef YYImageTypeToUTType(YYImageType type);
+
+/// Convert UTI (such as kUTTypeJPEG) to YYImageType.
+CG_EXTERN YYImageType YYImageTypeFromUTType(CFStringRef uti);
+
+/// Get image type's file extension (such as @"jpg").
+CG_EXTERN NSString *YYImageTypeGetExtension(YYImageType type);
+
+
+
+/// Returns the shared DeviceRGB color space.
+CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceRGB();
+
+/// Returns the shared DeviceGray color space.
+CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceGray();
+
+/// Returns whether a color space is DeviceRGB.
+CG_EXTERN BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space);
+
+/// Returns whether a color space is DeviceGray.
+CG_EXTERN BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space);
+
+
+
+/// Convert EXIF orientation value to UIImageOrientation.
+CG_EXTERN UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value);
+
+/// Convert UIImageOrientation to EXIF orientation value.
+CG_EXTERN NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation);
+
+
+
+/**
+ Create a decoded image.
+ 
+ @discussion If the source image is created from a compressed image data (such as
+ PNG or JPEG), you can use this method to decode the image. After decoded, you can
+ access the decoded bytes with CGImageGetDataProvider() and CGDataProviderCopyData()
+ without additional decode process. If the image has already decoded, this method
+ just copy the decoded bytes to the new image.
+ 
+ @param imageRef          The source image.
+ @param decodeForDisplay  If YES, this method will decode the image and convert
+          it to BGRA8888 (premultiplied) or BGRX8888 format for CALayer display.
+ 
+ @return A decoded image, or NULL if an error occurs.
+ */
+CG_EXTERN CGImageRef YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay);
+
+/**
+ Create an image copy with an orientation.
+ 
+ @param imageRef       Source image
+ @param orientation    Image orientation which will applied to the image.
+ @param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888).
+ @return A new image, or NULL if an error occurs.
+ */
+CG_EXTERN CGImageRef YYCGImageCreateCopyWithOrientation(CGImageRef imageRef,
+                                              UIImageOrientation orientation,
+                                              CGBitmapInfo destBitmapInfo);
+
+/**
+ Create an image copy with CGAffineTransform.
+ 
+ @param imageRef       Source image.
+ @param transform      Transform applied to image (left-bottom based coordinate system).
+ @param destSize       Destination image size
+ @param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888).
+ @return A new image, or NULL if an error occurs.
+ */
+CG_EXTERN CGImageRef YYCGImageCreateAffineTransformCopy(CGImageRef imageRef,
+                                              CGAffineTransform transform,
+                                              CGSize destSize,
+                                              CGBitmapInfo destBitmapInfo);
+
+/**
+ Encode an image to data with CGImageDestination.
+ 
+ @param imageRef  The image.
+ @param type      The image destination data type.
+ @param quality   The quality (0.0~1.0)
+ @return A new image data, or nil if an error occurs.
+ */
+CG_EXTERN CFDataRef YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality);
+
+
+/**
+ Whether WebP is available in YYImage.
+ */
+CG_EXTERN BOOL YYImageWebPAvailable();
+
+/**
+ Get a webp image frame count;
+ 
+ @param webpData WebP data.
+ @return Image frame count, or 0 if an error occurs.
+ */
+CG_EXTERN NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData);
+
+/**
+ Decode an image from WebP data, returns NULL if an error occurs.
+ 
+ @param webpData          The WebP data.
+ @param decodeForDisplay  If YES, this method will decode the image and convert it
+                            to BGRA8888 (premultiplied) format for CALayer display.
+ @param useThreads        YES to enable multi-thread decode.
+                            (speed up, but cost more CPU)
+ @param bypassFiltering   YES to skip the in-loop filtering.
+                            (speed up, but may lose some smooth)
+ @param noFancyUpsampling YES to use faster pointwise upsampler.
+                            (speed down, and may lose some details).
+ @return The decoded image, or NULL if an error occurs.
+ */
+CG_EXTERN CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData,
+                                       BOOL decodeForDisplay,
+                                       BOOL useThreads,
+                                       BOOL bypassFiltering,
+                                       BOOL noFancyUpsampling);
+
+
+typedef NS_ENUM(NSUInteger, YYImagePreset) {
+    YYImagePresetDefault = 0,  ///< default preset.
+    YYImagePresetPicture,      ///< digital picture, like portrait, inner shot
+    YYImagePresetPhoto,        ///< outdoor photograph, with natural lighting
+    YYImagePresetDrawing,      ///< hand or line drawing, with high-contrast details
+    YYImagePresetIcon,         ///< small-sized colorful images
+    YYImagePresetText          ///< text-like
+};
+
+/**
+ Encode a CGImage to WebP data
+ 
+ @param imageRef      image
+ @param lossless      YES=lossless (similar to PNG), NO=lossy (similar to JPEG)
+ @param quality       0.0~1.0 (0=smallest file, 1.0=biggest file)
+                      For lossless image, try the value near 1.0; for lossy, try the value near 0.8.
+ @param compressLevel 0~6 (0=fast, 6=slower-better). Default is 4.
+ @param preset        Preset for different image type, default is YYImagePresetDefault.
+ @return WebP data, or nil if an error occurs.
+ */
+CG_EXTERN CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef,
+                                         BOOL lossless,
+                                         CGFloat quality,
+                                         int compressLevel,
+                                         YYImagePreset preset);

+ 2841 - 0
YYImage/YYImageCoder.m

@@ -0,0 +1,2841 @@
+//
+//  YYImageCoder.m
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 15/5/13.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "YYImageCoder.h"
+#import "YYImage.h"
+#import <CoreFoundation/CoreFoundation.h>
+#import <ImageIO/ImageIO.h>
+#import <Accelerate/Accelerate.h>
+#import <QuartzCore/QuartzCore.h>
+#import <MobileCoreServices/MobileCoreServices.h>
+#import <AssetsLibrary/AssetsLibrary.h>
+#import <objc/runtime.h>
+#import <pthread.h>
+#import <libkern/OSAtomic.h>
+#import <zlib.h>
+
+
+
+#ifndef YYIMAGE_WEBP_ENABLED
+#if __has_include(<webp/decode.h>) && __has_include(<webp/encode.h>) && \
+    __has_include(<webp/demux.h>)  && __has_include(<webp/mux.h>)
+#define YYIMAGE_WEBP_ENABLED 1
+#import <webp/decode.h>
+#import <webp/encode.h>
+#import <webp/demux.h>
+#import <webp/mux.h>
+#elif __has_include("webp/decode.h") && __has_include("webp/encode.h") && \
+      __has_include("webp/demux.h")  && __has_include("webp/mux.h")
+#define YYIMAGE_WEBP_ENABLED 1
+#import "webp/decode.h"
+#import "webp/encode.h"
+#import "webp/demux.h"
+#import "webp/mux.h"
+#else
+#define YYIMAGE_WEBP_ENABLED 0
+#endif
+#endif
+
+
+
+
+
+////////////////////////////////////////////////////////////////////////////////
+#pragma mark - Utility (for little endian platform)
+
+#define YY_FOUR_CC(c1,c2,c3,c4) ((uint32_t)(((c4) << 24) | ((c3) << 16) | ((c2) << 8) | (c1)))
+#define YY_TWO_CC(c1,c2) ((uint16_t)(((c2) << 8) | (c1)))
+
+static inline uint16_t yy_swap_endian_uint16(uint16_t value) {
+    return
+    (uint16_t) ((value & 0x00FF) << 8) |
+    (uint16_t) ((value & 0xFF00) >> 8) ;
+}
+
+static inline uint32_t yy_swap_endian_uint32(uint32_t value) {
+    return
+    (uint32_t)((value & 0x000000FFU) << 24) |
+    (uint32_t)((value & 0x0000FF00U) <<  8) |
+    (uint32_t)((value & 0x00FF0000U) >>  8) |
+    (uint32_t)((value & 0xFF000000U) >> 24) ;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+#pragma mark - APNG
+
+/*
+ PNG  spec: http://www.libpng.org/pub/png/spec/1.2/PNG-Structure.html
+ APNG spec: https://wiki.mozilla.org/APNG_Specification
+ 
+ ===============================================================================
+ PNG format:
+ header (8): 89 50 4e 47 0d 0a 1a 0a
+ chunk, chunk, chunk, ...
+ 
+ ===============================================================================
+ chunk format:
+ length (4): uint32_t big endian
+ fourcc (4): chunk type code
+ data   (length): data
+ crc32  (4): uint32_t big endian crc32(fourcc + data)
+ 
+ ===============================================================================
+ PNG chunk define:
+ 
+ IHDR (Image Header) required, must appear first, 13 bytes
+ width              (4) pixel count, should not be zero
+ height             (4) pixel count, should not be zero
+ bit depth          (1) expected: 1, 2, 4, 8, 16
+ color type         (1) 1<<0 (palette used), 1<<1 (color used), 1<<2 (alpha channel used)
+ compression method (1) 0 (deflate/inflate)
+ filter method      (1) 0 (adaptive filtering with five basic filter types)
+ interlace method   (1) 0 (no interlace) or 1 (Adam7 interlace)
+ 
+ IDAT (Image Data) required, must appear consecutively if there's multiple 'IDAT' chunk
+ 
+ IEND (End) required, must appear last, 0 bytes
+ 
+ ===============================================================================
+ APNG chunk define:
+ 
+ acTL (Animation Control) required, must appear before 'IDAT', 8 bytes
+ num frames     (4) number of frames
+ num plays      (4) number of times to loop, 0 indicates infinite looping
+ 
+ fcTL (Frame Control) required, must appear before the 'IDAT' or 'fdAT' chunks of the frame to which it applies, 26 bytes
+ sequence number   (4) sequence number of the animation chunk, starting from 0
+ width             (4) width of the following frame
+ height            (4) height of the following frame
+ x offset          (4) x position at which to render the following frame
+ y offset          (4) y position at which to render the following frame
+ delay num         (2) frame delay fraction numerator
+ delay den         (2) frame delay fraction denominator
+ dispose op        (1) type of frame area disposal to be done after rendering this frame (0:none, 1:background 2:previous)
+ blend op          (1) type of frame area rendering for this frame (0:source, 1:over)
+ 
+ fdAT (Frame Data) required
+ sequence number   (4) sequence number of the animation chunk
+ frame data        (x) frame data for this frame (same as 'IDAT')
+ 
+ ===============================================================================
+ `dispose_op` specifies how the output buffer should be changed at the end of the delay 
+ (before rendering the next frame).
+ 
+ * NONE: no disposal is done on this frame before rendering the next; the contents
+    of the output buffer are left as is.
+ * BACKGROUND: the frame's region of the output buffer is to be cleared to fully
+    transparent black before rendering the next frame.
+ * PREVIOUS: the frame's region of the output buffer is to be reverted to the previous
+    contents before rendering the next frame.
+
+ `blend_op` specifies whether the frame is to be alpha blended into the current output buffer
+ content, or whether it should completely replace its region in the output buffer.
+ 
+ * SOURCE: all color components of the frame, including alpha, overwrite the current contents
+    of the frame's output buffer region. 
+ * OVER: the frame should be composited onto the output buffer based on its alpha,
+    using a simple OVER operation as described in the "Alpha Channel Processing" section
+    of the PNG specification
+ */
+
+typedef enum {
+    YY_PNG_ALPHA_TYPE_PALEETE = 1 << 0,
+    YY_PNG_ALPHA_TYPE_COLOR = 1 << 1,
+    YY_PNG_ALPHA_TYPE_ALPHA = 1 << 2,
+} yy_png_alpha_type;
+
+typedef enum {
+    YY_PNG_DISPOSE_OP_NONE = 0,
+    YY_PNG_DISPOSE_OP_BACKGROUND = 1,
+    YY_PNG_DISPOSE_OP_PREVIOUS = 2,
+} yy_png_dispose_op;
+
+typedef enum {
+    YY_PNG_BLEND_OP_SOURCE = 0,
+    YY_PNG_BLEND_OP_OVER = 1,
+} yy_png_blend_op;
+
+typedef struct {
+    uint32_t width;             ///< pixel count, should not be zero
+    uint32_t height;            ///< pixel count, should not be zero
+    uint8_t bit_depth;          ///< expected: 1, 2, 4, 8, 16
+    uint8_t color_type;         ///< see yy_png_alpha_type
+    uint8_t compression_method; ///< 0 (deflate/inflate)
+    uint8_t filter_method;      ///< 0 (adaptive filtering with five basic filter types)
+    uint8_t interlace_method;   ///< 0 (no interlace) or 1 (Adam7 interlace)
+} yy_png_chunk_IHDR;
+
+typedef struct {
+    uint32_t sequence_number;  ///< sequence number of the animation chunk, starting from 0
+    uint32_t width;            ///< width of the following frame
+    uint32_t height;           ///< height of the following frame
+    uint32_t x_offset;         ///< x position at which to render the following frame
+    uint32_t y_offset;         ///< y position at which to render the following frame
+    uint16_t delay_num;        ///< frame delay fraction numerator
+    uint16_t delay_den;        ///< frame delay fraction denominator
+    uint8_t dispose_op;        ///< see yy_png_dispose_op
+    uint8_t blend_op;          ///< see yy_png_blend_op
+} yy_png_chunk_fcTL;
+
+typedef struct {
+    uint32_t offset; ///< chunk offset in PNG data
+    uint32_t fourcc; ///< chunk fourcc
+    uint32_t length; ///< chunk data length
+    uint32_t crc32;  ///< chunk crc32
+} yy_png_chunk_info;
+
+typedef struct {
+    uint32_t chunk_index; ///< the first `fdAT`/`IDAT` chunk index
+    uint32_t chunk_num;   ///< the `fdAT`/`IDAT` chunk count
+    uint32_t chunk_size;  ///< the `fdAT`/`IDAT` chunk bytes
+    yy_png_chunk_fcTL frame_control;
+} yy_png_frame_info;
+
+typedef struct {
+    yy_png_chunk_IHDR header;   ///< png header
+    yy_png_chunk_info *chunks;      ///< chunks
+    uint32_t chunk_num;          ///< count of chunks
+    
+    yy_png_frame_info *apng_frames; ///< frame info, NULL if not apng
+    uint32_t apng_frame_num;     ///< 0 if not apng
+    uint32_t apng_loop_num;      ///< 0 indicates infinite looping
+    
+    uint32_t *apng_shared_chunk_indexs; ///< shared chunk index
+    uint32_t apng_shared_chunk_num;     ///< shared chunk count
+    uint32_t apng_shared_chunk_size;    ///< shared chunk bytes
+    uint32_t apng_shared_insert_index;  ///< shared chunk insert index
+    bool apng_first_frame_is_cover;     ///< the first frame is same as png (cover)
+} yy_png_info;
+
+static void yy_png_chunk_IHDR_read(yy_png_chunk_IHDR *IHDR, const uint8_t *data) {
+    IHDR->width = yy_swap_endian_uint32(*((uint32_t *)(data)));
+    IHDR->height = yy_swap_endian_uint32(*((uint32_t *)(data + 4)));
+    IHDR->bit_depth = data[8];
+    IHDR->color_type = data[9];
+    IHDR->compression_method = data[10];
+    IHDR->filter_method = data[11];
+    IHDR->interlace_method = data[12];
+}
+
+static void yy_png_chunk_IHDR_write(yy_png_chunk_IHDR *IHDR, uint8_t *data) {
+    *((uint32_t *)(data)) = yy_swap_endian_uint32(IHDR->width);
+    *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(IHDR->height);
+    data[8] = IHDR->bit_depth;
+    data[9] = IHDR->color_type;
+    data[10] = IHDR->compression_method;
+    data[11] = IHDR->filter_method;
+    data[12] = IHDR->interlace_method;
+}
+
+static void yy_png_chunk_fcTL_read(yy_png_chunk_fcTL *fcTL, const uint8_t *data) {
+    fcTL->sequence_number = yy_swap_endian_uint32(*((uint32_t *)(data)));
+    fcTL->width = yy_swap_endian_uint32(*((uint32_t *)(data + 4)));
+    fcTL->height = yy_swap_endian_uint32(*((uint32_t *)(data + 8)));
+    fcTL->x_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 12)));
+    fcTL->y_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 16)));
+    fcTL->delay_num = yy_swap_endian_uint16(*((uint16_t *)(data + 20)));
+    fcTL->delay_den = yy_swap_endian_uint16(*((uint16_t *)(data + 22)));
+    fcTL->dispose_op = data[24];
+    fcTL->blend_op = data[25];
+}
+
+static void yy_png_chunk_fcTL_write(yy_png_chunk_fcTL *fcTL, uint8_t *data) {
+    *((uint32_t *)(data)) = yy_swap_endian_uint32(fcTL->sequence_number);
+    *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(fcTL->width);
+    *((uint32_t *)(data + 8)) = yy_swap_endian_uint32(fcTL->height);
+    *((uint32_t *)(data + 12)) = yy_swap_endian_uint32(fcTL->x_offset);
+    *((uint32_t *)(data + 16)) = yy_swap_endian_uint32(fcTL->y_offset);
+    *((uint16_t *)(data + 20)) = yy_swap_endian_uint16(fcTL->delay_num);
+    *((uint16_t *)(data + 22)) = yy_swap_endian_uint16(fcTL->delay_den);
+    data[24] = fcTL->dispose_op;
+    data[25] = fcTL->blend_op;
+}
+
+// convert double value to fraction
+static void yy_png_delay_to_fraction(double duration, uint16_t *num, uint16_t *den) {
+    if (duration >= 0xFF) {
+        *num = 0xFF;
+        *den = 1;
+    } else if (duration <= 1.0 / (double)0xFF) {
+        *num = 1;
+        *den = 0xFF;
+    } else {
+        // Use continued fraction to calculate the num and den.
+        long MAX = 10;
+        double eps = (0.5 / (double)0xFF);
+        long p[MAX], q[MAX], a[MAX], i, numl = 0, denl = 0;
+        // The first two convergents are 0/1 and 1/0
+        p[0] = 0; q[0] = 1;
+        p[1] = 1; q[1] = 0;
+        // The rest of the convergents (and continued fraction)
+        for (i = 2; i < MAX; i++) {
+            a[i] = lrint(floor(duration));
+            p[i] = a[i] * p[i - 1] + p[i - 2];
+            q[i] = a[i] * q[i - 1] + q[i - 2];
+            if (p[i] <= 0xFF && q[i] <= 0xFF) { // uint16_t
+                numl = p[i];
+                denl = q[i];
+            } else break;
+            if (fabs(duration - a[i]) < eps) break;
+            duration = 1.0 / (duration - a[i]);
+        }
+        
+        if (numl != 0 && denl != 0) {
+            *num = numl;
+            *den = denl;
+        } else {
+            *num = 1;
+            *den = 100;
+        }
+    }
+}
+
+// convert fraction to double value
+static double yy_png_delay_to_seconds(uint16_t num, uint16_t den) {
+    if (den == 0) {
+        return num / 100.0;
+    } else {
+        return (double)num / (double)den;
+    }
+}
+
+static bool yy_png_validate_animation_chunk_order(yy_png_chunk_info *chunks,  /* input */
+                                                  uint32_t chunk_num,         /* input */
+                                                  uint32_t *first_idat_index, /* output */
+                                                  bool *first_frame_is_cover  /* output */) {
+    /*
+     PNG at least contains 3 chunks: IHDR, IDAT, IEND.
+     `IHDR` must appear first.
+     `IDAT` must appear consecutively.
+     `IEND` must appear end.
+     
+     APNG must contains one `acTL` and at least one 'fcTL' and `fdAT`.
+     `fdAT` must appear consecutively.
+     `fcTL` must appear before `IDAT` or `fdAT`.
+     */
+    if (chunk_num <= 2) return false;
+    if (chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R')) return false;
+    if ((chunks + chunk_num - 1)->fourcc != YY_FOUR_CC('I', 'E', 'N', 'D')) return false;
+    
+    uint32_t prev_fourcc = 0;
+    uint32_t IHDR_num = 0;
+    uint32_t IDAT_num = 0;
+    uint32_t acTL_num = 0;
+    uint32_t fcTL_num = 0;
+    uint32_t first_IDAT = 0;
+    bool first_frame_cover = false;
+    for (uint32_t i = 0; i < chunk_num; i++) {
+        yy_png_chunk_info *chunk = chunks + i;
+        switch (chunk->fourcc) {
+            case YY_FOUR_CC('I', 'H', 'D', 'R'): {  // png header
+                if (i != 0) return false;
+                if (IHDR_num > 0) return false;
+                IHDR_num++;
+            } break;
+            case YY_FOUR_CC('I', 'D', 'A', 'T'): {  // png data
+                if (prev_fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
+                    if (IDAT_num == 0)
+                        first_IDAT = i;
+                    else
+                        return false;
+                }
+                IDAT_num++;
+            } break;
+            case YY_FOUR_CC('a', 'c', 'T', 'L'): {  // apng control
+                if (acTL_num > 0) return false;
+                acTL_num++;
+            } break;
+            case YY_FOUR_CC('f', 'c', 'T', 'L'): {  // apng frame control
+                if (i + 1 == chunk_num) return false;
+                if ((chunk + 1)->fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') &&
+                    (chunk + 1)->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
+                    return false;
+                }
+                if (fcTL_num == 0) {
+                    if ((chunk + 1)->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
+                        first_frame_cover = true;
+                    }
+                }
+                fcTL_num++;
+            } break;
+            case YY_FOUR_CC('f', 'd', 'A', 'T'): {  // apng data
+                if (prev_fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') && prev_fourcc != YY_FOUR_CC('f', 'c', 'T', 'L')) {
+                    return false;
+                }
+            } break;
+        }
+        prev_fourcc = chunk->fourcc;
+    }
+    if (IHDR_num != 1) return false;
+    if (IDAT_num == 0) return false;
+    if (acTL_num != 1) return false;
+    if (fcTL_num < acTL_num) return false;
+    *first_idat_index = first_IDAT;
+    *first_frame_is_cover = first_frame_cover;
+    return true;
+}
+
+static void yy_png_info_release(yy_png_info *info) {
+    if (info) {
+        if (info->chunks) free(info->chunks);
+        if (info->apng_frames) free(info->apng_frames);
+        if (info->apng_shared_chunk_indexs) free(info->apng_shared_chunk_indexs);
+        free(info);
+    }
+}
+
+/**
+ Create a png info from a png file. See struct png_info for more information.
+ 
+ @param data   png/apng file data.
+ @param length the data's length in bytes.
+ @return A png info object, you may call yy_png_info_release() to release it.
+ Returns NULL if an error occurs.
+ */
+static yy_png_info *yy_png_info_create(const uint8_t *data, uint32_t length) {
+    if (length < 32) return NULL;
+    if (*((uint32_t *)data) != YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47)) return NULL;
+    if (*((uint32_t *)(data + 4)) != YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A)) return NULL;
+    
+    uint32_t chunk_realloc_num = 16;
+    yy_png_chunk_info *chunks = malloc(sizeof(yy_png_chunk_info) * chunk_realloc_num);
+    if (!chunks) return NULL;
+    
+    // parse png chunks
+    uint32_t offset = 8;
+    uint32_t chunk_num = 0;
+    uint32_t chunk_capacity = chunk_realloc_num;
+    uint32_t apng_loop_num = 0;
+    int32_t apng_sequence_index = -1;
+    int32_t apng_frame_index = 0;
+    int32_t apng_frame_number = -1;
+    bool apng_chunk_error = false;
+    do {
+        if (chunk_num >= chunk_capacity) {
+            yy_png_chunk_info *new_chunks = realloc(chunks, sizeof(yy_png_chunk_info) * (chunk_capacity + chunk_realloc_num));
+            if (!new_chunks) {
+                free(chunks);
+                return NULL;
+            }
+            chunks = new_chunks;
+            chunk_capacity += chunk_realloc_num;
+        }
+        yy_png_chunk_info *chunk = chunks + chunk_num;
+        const uint8_t *chunk_data = data + offset;
+        chunk->offset = offset;
+        chunk->length = yy_swap_endian_uint32(*((uint32_t *)chunk_data));
+        if ((uint64_t)chunk->offset + (uint64_t)chunk->length + 12 > length) {
+            free(chunks);
+            return NULL;
+        }
+        
+        chunk->fourcc = *((uint32_t *)(chunk_data + 4));
+        if ((uint64_t)chunk->offset + 4 + chunk->length + 4 > (uint64_t)length) break;
+        chunk->crc32 = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8 + chunk->length)));
+        chunk_num++;
+        offset += 12 + chunk->length;
+        
+        switch (chunk->fourcc) {
+            case YY_FOUR_CC('a', 'c', 'T', 'L') : {
+                if (chunk->length == 8) {
+                    apng_frame_number = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
+                    apng_loop_num = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 12)));
+                } else {
+                    apng_chunk_error = true;
+                }
+            } break;
+            case YY_FOUR_CC('f', 'c', 'T', 'L') :
+            case YY_FOUR_CC('f', 'd', 'A', 'T') : {
+                if (chunk->fourcc == YY_FOUR_CC('f', 'c', 'T', 'L')) {
+                    if (chunk->length != 26) {
+                        apng_chunk_error = true;
+                    } else {
+                        apng_frame_index++;
+                    }
+                }
+                if (chunk->length > 4) {
+                    uint32_t sequence = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
+                    if (apng_sequence_index + 1 == sequence) {
+                        apng_sequence_index++;
+                    } else {
+                        apng_chunk_error = true;
+                    }
+                } else {
+                    apng_chunk_error = true;
+                }
+            } break;
+            case YY_FOUR_CC('I', 'E', 'N', 'D') : {
+                offset = length; // end, break do-while loop
+            } break;
+        }
+    } while (offset + 12 <= length);
+    
+    if (chunk_num < 3 ||
+        chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R') ||
+        chunks->length != 13) {
+        free(chunks);
+        return NULL;
+    }
+    
+    // png info
+    yy_png_info *info = calloc(1, sizeof(yy_png_info));
+    if (!info) {
+        free(chunks);
+        return NULL;
+    }
+    info->chunks = chunks;
+    info->chunk_num = chunk_num;
+    yy_png_chunk_IHDR_read(&info->header, data + chunks->offset + 8);
+    
+    // apng info
+    if (!apng_chunk_error && apng_frame_number == apng_frame_index && apng_frame_number >= 1) {
+        bool first_frame_is_cover = false;
+        uint32_t first_IDAT_index = 0;
+        if (!yy_png_validate_animation_chunk_order(info->chunks, info->chunk_num, &first_IDAT_index, &first_frame_is_cover)) {
+            return info; // ignore apng chunk
+        }
+        
+        info->apng_loop_num = apng_loop_num;
+        info->apng_frame_num = apng_frame_number;
+        info->apng_first_frame_is_cover = first_frame_is_cover;
+        info->apng_shared_insert_index = first_IDAT_index;
+        info->apng_frames = calloc(apng_frame_number, sizeof(yy_png_frame_info));
+        if (!info->apng_frames) {
+            yy_png_info_release(info);
+            return NULL;
+        }
+        info->apng_shared_chunk_indexs = calloc(info->chunk_num, sizeof(uint32_t));
+        if (!info->apng_shared_chunk_indexs) {
+            yy_png_info_release(info);
+            return NULL;
+        }
+        
+        int32_t frame_index = -1;
+        uint32_t *shared_chunk_index = info->apng_shared_chunk_indexs;
+        for (int32_t i = 0; i < info->chunk_num; i++) {
+            yy_png_chunk_info *chunk = info->chunks + i;
+            switch (chunk->fourcc) {
+                case YY_FOUR_CC('I', 'D', 'A', 'T'): {
+                    if (info->apng_shared_insert_index == 0) {
+                        info->apng_shared_insert_index = i;
+                    }
+                    if (first_frame_is_cover) {
+                        yy_png_frame_info *frame = info->apng_frames + frame_index;
+                        frame->chunk_num++;
+                        frame->chunk_size += chunk->length + 12;
+                    }
+                } break;
+                case YY_FOUR_CC('a', 'c', 'T', 'L'): {
+                } break;
+                case YY_FOUR_CC('f', 'c', 'T', 'L'): {
+                    frame_index++;
+                    yy_png_frame_info *frame = info->apng_frames + frame_index;
+                    frame->chunk_index = i + 1;
+                    yy_png_chunk_fcTL_read(&frame->frame_control, data + chunk->offset + 8);
+                } break;
+                case YY_FOUR_CC('f', 'd', 'A', 'T'): {
+                    yy_png_frame_info *frame = info->apng_frames + frame_index;
+                    frame->chunk_num++;
+                    frame->chunk_size += chunk->length + 12;
+                } break;
+                default: {
+                    *shared_chunk_index = i;
+                    shared_chunk_index++;
+                    info->apng_shared_chunk_size += chunk->length + 12;
+                    info->apng_shared_chunk_num++;
+                } break;
+            }
+        }
+    }
+    return info;
+}
+
+/**
+ Copy a png frame data from an apng file.
+ 
+ @param data  apng file data
+ @param info  png info
+ @param index frame index (zero-based)
+ @param size  output, the size of the frame data
+ @return A frame data (single-frame png file), call free() to release the data.
+ Returns NULL if an error occurs.
+ */
+static uint8_t *yy_png_copy_frame_data_at_index(const uint8_t *data,
+                                                const yy_png_info *info,
+                                                const uint32_t index,
+                                                uint32_t *size) {
+    if (index >= info->apng_frame_num) return NULL;
+    
+    yy_png_frame_info *frame_info = info->apng_frames + index;
+    uint32_t frame_remux_size = 8 /* PNG Header */ + info->apng_shared_chunk_size + frame_info->chunk_size;
+    if (!(info->apng_first_frame_is_cover && index == 0)) {
+        frame_remux_size -= frame_info->chunk_num * 4; // remove fdAT sequence number
+    }
+    uint8_t *frame_data = malloc(frame_remux_size);
+    if (!frame_data) return NULL;
+    *size = frame_remux_size;
+    
+    uint32_t data_offset = 0;
+    bool inserted = false;
+    memcpy(frame_data, data, 8); // PNG File Header
+    data_offset += 8;
+    for (uint32_t i = 0; i < info->apng_shared_chunk_num; i++) {
+        uint32_t shared_chunk_index = info->apng_shared_chunk_indexs[i];
+        yy_png_chunk_info *shared_chunk_info = info->chunks + shared_chunk_index;
+        
+        if (shared_chunk_index >= info->apng_shared_insert_index && !inserted) { // replace IDAT with fdAT
+            inserted = true;
+            for (uint32_t c = 0; c < frame_info->chunk_num; c++) {
+                yy_png_chunk_info *insert_chunk_info = info->chunks + frame_info->chunk_index + c;
+                if (insert_chunk_info->fourcc == YY_FOUR_CC('f', 'd', 'A', 'T')) {
+                    *((uint32_t *)(frame_data + data_offset)) = yy_swap_endian_uint32(insert_chunk_info->length - 4);
+                    *((uint32_t *)(frame_data + data_offset + 4)) = YY_FOUR_CC('I', 'D', 'A', 'T');
+                    memcpy(frame_data + data_offset + 8, data + insert_chunk_info->offset + 12, insert_chunk_info->length - 4);
+                    uint32_t crc = (uint32_t)crc32(0, frame_data + data_offset + 4, insert_chunk_info->length);
+                    *((uint32_t *)(frame_data + data_offset + insert_chunk_info->length + 4)) = yy_swap_endian_uint32(crc);
+                    data_offset += insert_chunk_info->length + 8;
+                } else { // IDAT
+                    memcpy(frame_data + data_offset, data + insert_chunk_info->offset, insert_chunk_info->length + 12);
+                    data_offset += insert_chunk_info->length + 12;
+                }
+            }
+        }
+        
+        if (shared_chunk_info->fourcc == YY_FOUR_CC('I', 'H', 'D', 'R')) {
+            uint8_t tmp[25] = {0};
+            memcpy(tmp, data + shared_chunk_info->offset, 25);
+            yy_png_chunk_IHDR IHDR = info->header;
+            IHDR.width = frame_info->frame_control.width;
+            IHDR.height = frame_info->frame_control.height;
+            yy_png_chunk_IHDR_write(&IHDR, tmp + 8);
+            *((uint32_t *)(tmp + 21)) = yy_swap_endian_uint32((uint32_t)crc32(0, tmp + 4, 17));
+            memcpy(frame_data + data_offset, tmp, 25);
+            data_offset += 25;
+        } else {
+            memcpy(frame_data + data_offset, data + shared_chunk_info->offset, shared_chunk_info->length + 12);
+            data_offset += shared_chunk_info->length + 12;
+        }
+    }
+    return frame_data;
+}
+
+
+
+////////////////////////////////////////////////////////////////////////////////
+#pragma mark - Helper
+
+/// Returns byte-aligned size.
+static inline size_t YYImageByteAlign(size_t size, size_t alignment) {
+    return ((size + (alignment - 1)) / alignment) * alignment;
+}
+
+/// Convert degree to radians
+static inline CGFloat YYImageDegreesToRadians(CGFloat degrees) {
+    return degrees * M_PI / 180;
+}
+
+CGColorSpaceRef YYCGColorSpaceGetDeviceRGB() {
+    static CGColorSpaceRef space;
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        space = CGColorSpaceCreateDeviceRGB();
+    });
+    return space;
+}
+
+CGColorSpaceRef YYCGColorSpaceGetDeviceGray() {
+    static CGColorSpaceRef space;
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        space = CGColorSpaceCreateDeviceGray();
+    });
+    return space;
+}
+
+BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space) {
+    return space && CFEqual(space, YYCGColorSpaceGetDeviceRGB());
+}
+
+BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space) {
+    return space && CFEqual(space, YYCGColorSpaceGetDeviceGray());
+}
+
+/**
+ A callback used in CGDataProviderCreateWithData() to release data.
+ 
+ Example:
+ 
+ void *data = malloc(size);
+ CGDataProviderRef provider = CGDataProviderCreateWithData(data, data, size, YYCGDataProviderReleaseDataCallback);
+ */
+static void YYCGDataProviderReleaseDataCallback(void *info, const void *data, size_t size) {
+    if (info) free(info);
+}
+
+/**
+ Decode an image to bitmap buffer with the specified format.
+ 
+ @param srcImage   Source image.
+ @param dest       Destination buffer. It should be zero before call this method.
+                   If decode succeed, you should release the dest->data using free().
+ @param destFormat Destination bitmap format.
+ 
+ @return Whether succeed.
+ 
+ @warning This method support iOS7.0 and later. If call it on iOS6, it just returns NO.
+ CG_AVAILABLE_STARTING(__MAC_10_9, __IPHONE_7_0)
+ */
+static BOOL YYCGImageDecodeToBitmapBufferWithAnyFormat(CGImageRef srcImage, vImage_Buffer *dest, vImage_CGImageFormat *destFormat) {
+    if (!srcImage || (((long)vImageConvert_AnyToAny) + 1 == 1) || !destFormat || !dest) return NO;
+    size_t width = CGImageGetWidth(srcImage);
+    size_t height = CGImageGetHeight(srcImage);
+    if (width == 0 || height == 0) return NO;
+    dest->data = NULL;
+    
+    vImage_Error error = kvImageNoError;
+    CFDataRef srcData = NULL;
+    vImageConverterRef convertor = NULL;
+    vImage_CGImageFormat srcFormat = {0};
+    srcFormat.bitsPerComponent = (uint32_t)CGImageGetBitsPerComponent(srcImage);
+    srcFormat.bitsPerPixel = (uint32_t)CGImageGetBitsPerPixel(srcImage);
+    srcFormat.colorSpace = CGImageGetColorSpace(srcImage);
+    srcFormat.bitmapInfo = CGImageGetBitmapInfo(srcImage) | CGImageGetAlphaInfo(srcImage);
+    
+    convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, destFormat, NULL, kvImageNoFlags, NULL);
+    if (!convertor) goto fail;
+    
+    CGDataProviderRef srcProvider = CGImageGetDataProvider(srcImage);
+    srcData = srcProvider ? CGDataProviderCopyData(srcProvider) : NULL; // decode
+    size_t srcLength = srcData ? CFDataGetLength(srcData) : 0;
+    const void *srcBytes = srcData ? CFDataGetBytePtr(srcData) : NULL;
+    if (srcLength == 0 || !srcBytes) goto fail;
+    
+    vImage_Buffer src = {0};
+    src.data = (void *)srcBytes;
+    src.width = width;
+    src.height = height;
+    src.rowBytes = CGImageGetBytesPerRow(srcImage);
+    
+    error = vImageBuffer_Init(dest, height, width, 32, kvImageNoFlags);
+    if (error != kvImageNoError) goto fail;
+    
+    error = vImageConvert_AnyToAny(convertor, &src, dest, NULL, kvImageNoFlags); // convert
+    if (error != kvImageNoError) goto fail;
+    
+    CFRelease(convertor);
+    CFRelease(srcData);
+    return YES;
+    
+fail:
+    if (convertor) CFRelease(convertor);
+    if (srcData) CFRelease(srcData);
+    if (dest->data) free(dest->data);
+    dest->data = NULL;
+    return NO;
+}
+
+/**
+ Decode an image to bitmap buffer with the 32bit format (such as ARGB8888).
+ 
+ @param srcImage   Source image.
+ @param dest       Destination buffer. It should be zero before call this method.
+                   If decode succeed, you should release the dest->data using free().
+ @param bitmapInfo Destination bitmap format.
+ 
+ @return Whether succeed.
+ */
+static BOOL YYCGImageDecodeToBitmapBufferWith32BitFormat(CGImageRef srcImage, vImage_Buffer *dest, CGBitmapInfo bitmapInfo) {
+    if (!srcImage || !dest) return NO;
+    size_t width = CGImageGetWidth(srcImage);
+    size_t height = CGImageGetHeight(srcImage);
+    if (width == 0 || height == 0) return NO;
+    
+    BOOL hasAlpha = NO;
+    BOOL alphaFirst = NO;
+    BOOL alphaPremultiplied = NO;
+    BOOL byteOrderNormal = NO;
+    
+    switch (bitmapInfo & kCGBitmapAlphaInfoMask) {
+        case kCGImageAlphaPremultipliedLast: {
+            hasAlpha = YES;
+            alphaPremultiplied = YES;
+        } break;
+        case kCGImageAlphaPremultipliedFirst: {
+            hasAlpha = YES;
+            alphaPremultiplied = YES;
+            alphaFirst = YES;
+        } break;
+        case kCGImageAlphaLast: {
+            hasAlpha = YES;
+        } break;
+        case kCGImageAlphaFirst: {
+            hasAlpha = YES;
+            alphaFirst = YES;
+        } break;
+        case kCGImageAlphaNoneSkipLast: {
+        } break;
+        case kCGImageAlphaNoneSkipFirst: {
+            alphaFirst = YES;
+        } break;
+        default: {
+            return NO;
+        } break;
+    }
+    
+    switch (bitmapInfo & kCGBitmapByteOrderMask) {
+        case kCGBitmapByteOrderDefault: {
+            byteOrderNormal = YES;
+        } break;
+        case kCGBitmapByteOrder32Little: {
+        } break;
+        case kCGBitmapByteOrder32Big: {
+            byteOrderNormal = YES;
+        } break;
+        default: {
+            return NO;
+        } break;
+    }
+    
+    /*
+     Try convert with vImageConvert_AnyToAny() (avaliable since iOS 7.0).
+     If fail, try decode with CGContextDrawImage().
+     CGBitmapContext use a premultiplied alpha format, unpremultiply may lose precision.
+     */
+    vImage_CGImageFormat destFormat = {0};
+    destFormat.bitsPerComponent = 8;
+    destFormat.bitsPerPixel = 32;
+    destFormat.colorSpace = YYCGColorSpaceGetDeviceRGB();
+    destFormat.bitmapInfo = bitmapInfo;
+    dest->data = NULL;
+    if (YYCGImageDecodeToBitmapBufferWithAnyFormat(srcImage, dest, &destFormat)) return YES;
+    
+    CGBitmapInfo contextBitmapInfo = bitmapInfo & kCGBitmapByteOrderMask;
+    if (!hasAlpha || alphaPremultiplied) {
+        contextBitmapInfo |= (bitmapInfo & kCGBitmapAlphaInfoMask);
+    } else {
+        contextBitmapInfo |= alphaFirst ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaPremultipliedLast;
+    }
+    CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 32, YYCGColorSpaceGetDeviceRGB(), contextBitmapInfo);
+    if (!context) goto fail;
+    
+    CGContextDrawImage(context, CGRectMake(0, 0, width, height), srcImage); // decode and convert
+    size_t bytesPerRow = CGBitmapContextGetBytesPerRow(context);
+    size_t length = height * bytesPerRow;
+    void *data = CGBitmapContextGetData(context);
+    if (length == 0 || !data) goto fail;
+    
+    dest->data = malloc(length);
+    dest->width = width;
+    dest->height = height;
+    dest->rowBytes = bytesPerRow;
+    if (!dest->data) goto fail;
+    
+    if (hasAlpha && !alphaPremultiplied) {
+        vImage_Buffer tmpSrc = {0};
+        tmpSrc.data = data;
+        tmpSrc.width = width;
+        tmpSrc.height = height;
+        tmpSrc.rowBytes = bytesPerRow;
+        vImage_Error error;
+        if (alphaFirst && byteOrderNormal) {
+            error = vImageUnpremultiplyData_ARGB8888(&tmpSrc, dest, kvImageNoFlags);
+        } else {
+            error = vImageUnpremultiplyData_RGBA8888(&tmpSrc, dest, kvImageNoFlags);
+        }
+        if (error != kvImageNoError) goto fail;
+    } else {
+        memcpy(dest->data, data, length);
+    }
+    
+    CFRelease(context);
+    return YES;
+    
+fail:
+    if (context) CFRelease(context);
+    if (dest->data) free(dest->data);
+    dest->data = NULL;
+    return NO;
+    return NO;
+}
+
+CGImageRef YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay) {
+    if (!imageRef) return NULL;
+    size_t width = CGImageGetWidth(imageRef);
+    size_t height = CGImageGetHeight(imageRef);
+    if (width == 0 || height == 0) return NULL;
+    
+    if (decodeForDisplay) { //decode with redraw (may lose some precision)
+        CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
+        BOOL hasAlpha = NO;
+        if (alphaInfo == kCGImageAlphaPremultipliedLast ||
+            alphaInfo == kCGImageAlphaPremultipliedFirst ||
+            alphaInfo == kCGImageAlphaLast ||
+            alphaInfo == kCGImageAlphaFirst) {
+            hasAlpha = YES;
+        }
+        // BGRA8888 (premultiplied) or BGRX8888
+        // same as UIGraphicsBeginImageContext() and -[UIView drawRect:]
+        CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
+        bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
+        CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), bitmapInfo);
+        if (!context) return NULL;
+        CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode
+        CGImageRef newImage = CGBitmapContextCreateImage(context);
+        CFRelease(context);
+        return newImage;
+        
+    } else {
+        CGColorSpaceRef space = CGImageGetColorSpace(imageRef);
+        size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
+        size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef);
+        size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
+        CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
+        if (bytesPerRow == 0 || width == 0 || height == 0) return NULL;
+        
+        CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
+        if (!dataProvider) return NULL;
+        CFDataRef data = CGDataProviderCopyData(dataProvider); // decode
+        if (!data) return NULL;
+        
+        CGDataProviderRef newProvider = CGDataProviderCreateWithCFData(data);
+        CFRelease(data);
+        if (!newProvider) return NULL;
+        
+        CGImageRef newImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, space, bitmapInfo, newProvider, NULL, false, kCGRenderingIntentDefault);
+        CFRelease(newProvider);
+        return newImage;
+    }
+}
+
+CGImageRef YYCGImageCreateAffineTransformCopy(CGImageRef imageRef, CGAffineTransform transform, CGSize destSize, CGBitmapInfo destBitmapInfo) {
+    if (!imageRef) return NULL;
+    size_t srcWidth = CGImageGetWidth(imageRef);
+    size_t srcHeight = CGImageGetHeight(imageRef);
+    size_t destWidth = round(destSize.width);
+    size_t destHeight = round(destSize.height);
+    if (srcWidth == 0 || srcHeight == 0 || destWidth == 0 || destHeight == 0) return NULL;
+    
+    CGDataProviderRef tmpProvider = NULL, destProvider = NULL;
+    CGImageRef tmpImage = NULL, destImage = NULL;;
+    vImage_Buffer src = {0}, tmp = {0}, dest = {0};
+    if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &src, kCGImageAlphaFirst | kCGBitmapByteOrderDefault)) return NULL;
+    
+    size_t destBytesPerRow = YYImageByteAlign(destWidth * 4, 32);
+    tmp.data = malloc(destHeight * destBytesPerRow);
+    if (!tmp.data) goto fail;
+    
+    tmp.width = destWidth;
+    tmp.height = destHeight;
+    tmp.rowBytes = destBytesPerRow;
+    vImage_CGAffineTransform vTransform = *((vImage_CGAffineTransform *)&transform);
+    uint8_t backColor[4] = {0};
+    vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &tmp, NULL, &vTransform, backColor, kvImageBackgroundColorFill);
+    if (error != kvImageNoError) goto fail;
+    free(src.data);
+    src.data = NULL;
+    
+    tmpProvider = CGDataProviderCreateWithData(tmp.data, tmp.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback);
+    if (!tmpProvider) goto fail;
+    tmp.data = NULL; // hold by provider
+    tmpImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), kCGImageAlphaFirst | kCGBitmapByteOrderDefault, tmpProvider, NULL, false, kCGRenderingIntentDefault);
+    if (!tmpImage) goto fail;
+    CFRelease(tmpProvider);
+    tmpProvider = NULL;
+    
+    if ((destBitmapInfo & kCGBitmapAlphaInfoMask) == kCGImageAlphaFirst &&
+        (destBitmapInfo & kCGBitmapByteOrderMask) != kCGBitmapByteOrder32Little) {
+        return tmpImage;
+    }
+    
+    if (!YYCGImageDecodeToBitmapBufferWith32BitFormat(tmpImage, &dest, destBitmapInfo)) goto fail;
+    CFRelease(tmpImage);
+    tmpImage = NULL;
+    
+    destProvider = CGDataProviderCreateWithData(dest.data, dest.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback);
+    if (!destProvider) goto fail;
+    dest.data = NULL; // hold by provider
+    destImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), destBitmapInfo, destProvider, NULL, false, kCGRenderingIntentDefault);
+    if (!destImage) goto fail;
+    CFRelease(destProvider);
+    destProvider = NULL;
+    
+    return destImage;
+    
+fail:
+    if (src.data) free(src.data);
+    if (tmp.data) free(tmp.data);
+    if (dest.data) free(dest.data);
+    if (tmpProvider) CFRelease(tmpProvider);
+    if (tmpImage) CFRelease(tmpImage);
+    if (destProvider) CFRelease(destProvider);
+    return NULL;
+}
+
+UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value) {
+    switch (value) {
+        case kCGImagePropertyOrientationUp: return UIImageOrientationUp;
+        case kCGImagePropertyOrientationDown: return UIImageOrientationDown;
+        case kCGImagePropertyOrientationLeft: return UIImageOrientationLeft;
+        case kCGImagePropertyOrientationRight: return UIImageOrientationRight;
+        case kCGImagePropertyOrientationUpMirrored: return UIImageOrientationUpMirrored;
+        case kCGImagePropertyOrientationDownMirrored: return UIImageOrientationDownMirrored;
+        case kCGImagePropertyOrientationLeftMirrored: return UIImageOrientationLeftMirrored;
+        case kCGImagePropertyOrientationRightMirrored: return UIImageOrientationRightMirrored;
+        default: return UIImageOrientationUp;
+    }
+}
+
+NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation) {
+    switch (orientation) {
+        case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
+        case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
+        case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
+        case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
+        case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
+        case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
+        case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
+        case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
+        default: return kCGImagePropertyOrientationUp;
+    }
+}
+
+CGImageRef YYCGImageCreateCopyWithOrientation(CGImageRef imageRef, UIImageOrientation orientation, CGBitmapInfo destBitmapInfo) {
+    if (!imageRef) return NULL;
+    if (orientation == UIImageOrientationUp) return (CGImageRef)CFRetain(imageRef);
+    
+    size_t width = CGImageGetWidth(imageRef);
+    size_t height = CGImageGetHeight(imageRef);
+    
+    CGAffineTransform transform = CGAffineTransformIdentity;
+    BOOL swapWidthAndHeight = NO;
+    switch (orientation) {
+        case UIImageOrientationDown: {
+            transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(180));
+            transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height);
+        } break;
+        case UIImageOrientationLeft: {
+            transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90));
+            transform = CGAffineTransformTranslate(transform, -(CGFloat)0, -(CGFloat)height);
+            swapWidthAndHeight = YES;
+        } break;
+        case UIImageOrientationRight: {
+            transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90));
+            transform = CGAffineTransformTranslate(transform, -(CGFloat)width, (CGFloat)0);
+            swapWidthAndHeight = YES;
+        } break;
+        case UIImageOrientationUpMirrored: {
+            transform = CGAffineTransformTranslate(transform, (CGFloat)width, 0);
+            transform = CGAffineTransformScale(transform, -1, 1);
+        } break;
+        case UIImageOrientationDownMirrored: {
+            transform = CGAffineTransformTranslate(transform, 0, (CGFloat)height);
+            transform = CGAffineTransformScale(transform, 1, -1);
+        } break;
+        case UIImageOrientationLeftMirrored: {
+            transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90));
+            transform = CGAffineTransformScale(transform, 1, -1);
+            transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height);
+            swapWidthAndHeight = YES;
+        } break;
+        case UIImageOrientationRightMirrored: {
+            transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90));
+            transform = CGAffineTransformScale(transform, 1, -1);
+            swapWidthAndHeight = YES;
+        } break;
+        default: break;
+    }
+    if (CGAffineTransformIsIdentity(transform)) return (CGImageRef)CFRetain(imageRef);
+    
+    CGSize destSize = {width, height};
+    if (swapWidthAndHeight) {
+        destSize.width = height;
+        destSize.height = width;
+    }
+    
+    return YYCGImageCreateAffineTransformCopy(imageRef, transform, destSize, destBitmapInfo);
+}
+
+YYImageType YYImageDetectType(CFDataRef data) {
+    if (!data) return YYImageTypeUnknown;
+    uint64_t length = CFDataGetLength(data);
+    if (length < 16) return YYImageTypeUnknown;
+    
+    const char *bytes = (char *)CFDataGetBytePtr(data);
+    
+    uint32_t magic4 = *((uint32_t *)bytes);
+    switch (magic4) {
+        case YY_FOUR_CC(0x4D, 0x4D, 0x00, 0x2A): { // big endian TIFF
+            return YYImageTypeTIFF;
+        } break;
+            
+        case YY_FOUR_CC(0x49, 0x49, 0x2A, 0x00): { // little endian TIFF
+            return YYImageTypeTIFF;
+        } break;
+            
+        case YY_FOUR_CC(0x00, 0x00, 0x01, 0x00): { // ICO
+            return YYImageTypeICO;
+        } break;
+            
+        case YY_FOUR_CC(0x00, 0x00, 0x02, 0x00): { // CUR
+            return YYImageTypeICO;
+        } break;
+            
+        case YY_FOUR_CC('i', 'c', 'n', 's'): { // ICNS
+            return YYImageTypeICNS;
+        } break;
+            
+        case YY_FOUR_CC('G', 'I', 'F', '8'): { // GIF
+            return YYImageTypeGIF;
+        } break;
+            
+        case YY_FOUR_CC(0x89, 'P', 'N', 'G'): {  // PNG
+            uint32_t tmp = *((uint32_t *)(bytes + 4));
+            if (tmp == YY_FOUR_CC('\r', '\n', 0x1A, '\n')) {
+                return YYImageTypePNG;
+            }
+        } break;
+            
+        case YY_FOUR_CC('R', 'I', 'F', 'F'): { // WebP
+            uint32_t tmp = *((uint32_t *)(bytes + 8));
+            if (tmp == YY_FOUR_CC('W', 'E', 'B', 'P')) {
+                return YYImageTypeWebP;
+            }
+        } break;
+        /*
+        case YY_FOUR_CC('B', 'P', 'G', 0xFB): { // BPG
+            return YYImageTypeBPG;
+        } break;
+        */
+    }
+    
+    uint16_t magic2 = *((uint16_t *)bytes);
+    switch (magic2) {
+        case YY_TWO_CC('B', 'A'):
+        case YY_TWO_CC('B', 'M'):
+        case YY_TWO_CC('I', 'C'):
+        case YY_TWO_CC('P', 'I'):
+        case YY_TWO_CC('C', 'I'):
+        case YY_TWO_CC('C', 'P'): { // BMP
+            return YYImageTypeBMP;
+        }
+        case YY_TWO_CC(0xFF, 0x4F): { // JPEG2000
+            return YYImageTypeJPEG2000;
+        }
+    }
+    
+    // JPG             FF D8 FF
+    if (memcmp(bytes,"\377\330\377",3) == 0) return YYImageTypeJPEG;
+    
+    // JP2
+    if (memcmp(bytes + 4, "\152\120\040\040\015", 5) == 0) return YYImageTypeJPEG2000;
+    
+    return YYImageTypeUnknown;
+}
+
+CFStringRef YYImageTypeToUTType(YYImageType type) {
+    switch (type) {
+        case YYImageTypeJPEG: return kUTTypeJPEG;
+        case YYImageTypeJPEG2000: return kUTTypeJPEG2000;
+        case YYImageTypeTIFF: return kUTTypeTIFF;
+        case YYImageTypeBMP: return kUTTypeBMP;
+        case YYImageTypeICO: return kUTTypeICO;
+        case YYImageTypeICNS: return kUTTypeAppleICNS;
+        case YYImageTypeGIF: return kUTTypeGIF;
+        case YYImageTypePNG: return kUTTypePNG;
+        default: return NULL;
+    }
+}
+
+YYImageType YYImageTypeFromUTType(CFStringRef uti) {
+    static NSDictionary *dic;
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        dic = @{(id)kUTTypeJPEG : @(YYImageTypeJPEG),
+                (id)kUTTypeJPEG2000 : @(YYImageTypeJPEG2000),
+                (id)kUTTypeTIFF : @(YYImageTypeTIFF),
+                (id)kUTTypeBMP : @(YYImageTypeBMP),
+                (id)kUTTypeICO : @(YYImageTypeICO),
+                (id)kUTTypeAppleICNS : @(YYImageTypeICNS),
+                (id)kUTTypeGIF : @(YYImageTypeGIF),
+                (id)kUTTypePNG : @(YYImageTypePNG)};
+    });
+    if (!uti) return YYImageTypeUnknown;
+    NSNumber *num = dic[(__bridge __strong id)(uti)];
+    return num.unsignedIntegerValue;
+}
+
+NSString *YYImageTypeGetExtension(YYImageType type) {
+    switch (type) {
+        case YYImageTypeJPEG: return @"jpg";
+        case YYImageTypeJPEG2000: return @"jp2";
+        case YYImageTypeTIFF: return @"tiff";
+        case YYImageTypeBMP: return @"bmp";
+        case YYImageTypeICO: return @"ico";
+        case YYImageTypeICNS: return @"icns";
+        case YYImageTypeGIF: return @"gif";
+        case YYImageTypePNG: return @"png";
+        case YYImageTypeWebP: return @"webp";
+        default: return nil;
+    }
+}
+
+CFDataRef YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality) {
+    if (!imageRef) return nil;
+    quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
+    
+    if (type == YYImageTypeWebP) {
+#if YYIMAGE_WEBP_ENABLED
+        if (quality == 1) {
+            return YYCGImageCreateEncodedWebPData(imageRef, YES, quality, 4, YYImagePresetDefault);
+        } else {
+            return YYCGImageCreateEncodedWebPData(imageRef, NO, quality, 4, YYImagePresetDefault);
+        }
+#else
+        return NULL;
+#endif
+    }
+    
+    CFStringRef uti = YYImageTypeToUTType(type);
+    if (!uti) return nil;
+    
+    CFMutableDataRef data = CFDataCreateMutable(CFAllocatorGetDefault(), 0);
+    if (!data) return NULL;
+    CGImageDestinationRef dest = CGImageDestinationCreateWithData(data, uti, 1, NULL);
+    if (!dest) {
+        CFRelease(data);
+        return NULL;
+    }
+    NSDictionary *options = @{(id)kCGImageDestinationLossyCompressionQuality : @(quality) };
+    CGImageDestinationAddImage(dest, imageRef, (CFDictionaryRef)options);
+    if (!CGImageDestinationFinalize(dest)) {
+        CFRelease(data);
+        CFRelease(dest);
+        return nil;
+    }
+    CFRelease(dest);
+    
+    if (CFDataGetLength(data) == 0) {
+        CFRelease(data);
+        return NULL;
+    }
+    return data;
+}
+
+#if YYIMAGE_WEBP_ENABLED
+
+BOOL YYImageWebPAvailable() {
+    return YES;
+}
+
+CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) {
+    if (!imageRef) return nil;
+    size_t width = CGImageGetWidth(imageRef);
+    size_t height = CGImageGetHeight(imageRef);
+    if (width == 0 || width > WEBP_MAX_DIMENSION) return nil;
+    if (height == 0 || height > WEBP_MAX_DIMENSION) return nil;
+    
+    vImage_Buffer buffer = {0};
+    if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &buffer, kCGImageAlphaLast | kCGBitmapByteOrderDefault)) return nil;
+    
+    WebPConfig config = {0};
+    WebPPicture picture = {0};
+    WebPMemoryWriter writer = {0};
+    CFDataRef webpData = NULL;
+    BOOL pictureNeedFree = NO;
+    
+    quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
+    preset = preset > YYImagePresetText ? YYImagePresetDefault : preset;
+    compressLevel = compressLevel < 0 ? 0 : compressLevel > 6 ? 6 : compressLevel;
+    if (!WebPConfigPreset(&config, (WebPPreset)preset, quality)) goto fail;
+    
+    config.quality = round(quality * 100.0);
+    config.lossless = lossless;
+    config.method = compressLevel;
+    switch ((WebPPreset)preset) {
+        case WEBP_PRESET_DEFAULT: {
+            config.image_hint = WEBP_HINT_DEFAULT;
+        } break;
+        case WEBP_PRESET_PICTURE: {
+            config.image_hint = WEBP_HINT_PICTURE;
+        } break;
+        case WEBP_PRESET_PHOTO: {
+            config.image_hint = WEBP_HINT_PHOTO;
+        } break;
+        case WEBP_PRESET_DRAWING:
+        case WEBP_PRESET_ICON:
+        case WEBP_PRESET_TEXT: {
+            config.image_hint = WEBP_HINT_GRAPH;
+        } break;
+    }
+    if (!WebPValidateConfig(&config)) goto fail;
+    
+    if (!WebPPictureInit(&picture)) goto fail;
+    pictureNeedFree = YES;
+    picture.width = (int)buffer.width;
+    picture.height = (int)buffer.height;
+    picture.use_argb = lossless;
+    if(!WebPPictureImportRGBA(&picture, buffer.data, (int)buffer.rowBytes)) goto fail;
+    
+    WebPMemoryWriterInit(&writer);
+    picture.writer = WebPMemoryWrite;
+    picture.custom_ptr = &writer;
+    if(!WebPEncode(&config, &picture)) goto fail;
+    
+    webpData = CFDataCreate(CFAllocatorGetDefault(), writer.mem, writer.size);
+    free(writer.mem);
+    WebPPictureFree(&picture);
+    free(buffer.data);
+    return webpData;
+    
+fail:
+    if (buffer.data) free(buffer.data);
+    if (pictureNeedFree) WebPPictureFree(&picture);
+    return nil;
+}
+
+NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) {
+    if (!webpData || CFDataGetLength(webpData) == 0) return 0;
+    
+    WebPData data = {CFDataGetBytePtr(webpData), CFDataGetLength(webpData)};
+    WebPDemuxer *demuxer = WebPDemux(&data);
+    if (!demuxer) return 0;
+    NSUInteger webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
+    WebPDemuxDelete(demuxer);
+    return webpFrameCount;
+}
+
+CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData,
+                                       BOOL decodeForDisplay,
+                                       BOOL useThreads,
+                                       BOOL bypassFiltering,
+                                       BOOL noFancyUpsampling) {
+    /*
+     Call WebPDecode() on a multi-frame webp data will get an error (VP8_STATUS_UNSUPPORTED_FEATURE).
+     Use WebPDemuxer to unpack it first.
+     */
+    WebPData data = {0};
+    WebPDemuxer *demuxer = NULL;
+    
+    int frameCount = 0, canvasWidth = 0, canvasHeight = 0;
+    WebPIterator iter = {0};
+    BOOL iterInited = NO;
+    const uint8_t *payload = NULL;
+    size_t payloadSize = 0;
+    WebPDecoderConfig config = {0};
+    
+    BOOL hasAlpha = NO;
+    size_t bitsPerComponent = 0, bitsPerPixel = 0, bytesPerRow = 0, destLength = 0;
+    CGBitmapInfo bitmapInfo = 0;
+    WEBP_CSP_MODE colorspace = 0;
+    void *destBytes = NULL;
+    CGDataProviderRef provider = NULL;
+    CGImageRef imageRef = NULL;
+    
+    if (!webpData || CFDataGetLength(webpData) == 0) return NULL;
+    data.bytes = CFDataGetBytePtr(webpData);
+    data.size = CFDataGetLength(webpData);
+    demuxer = WebPDemux(&data);
+    if (!demuxer) goto fail;
+    
+    frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
+    if (frameCount == 0) {
+        goto fail;
+        
+    } else if (frameCount == 1) { // single-frame
+        payload = data.bytes;
+        payloadSize = data.size;
+        if (!WebPInitDecoderConfig(&config)) goto fail;
+        if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail;
+        canvasWidth = config.input.width;
+        canvasHeight = config.input.height;
+        
+    } else { // multi-frame
+        canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
+        canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
+        if (canvasWidth < 1 || canvasHeight < 1) goto fail;
+        
+        if (!WebPDemuxGetFrame(demuxer, 1, &iter)) goto fail;
+        iterInited = YES;
+        
+        if (iter.width > canvasWidth || iter.height > canvasHeight) goto fail;
+        payload = iter.fragment.bytes;
+        payloadSize = iter.fragment.size;
+        
+        if (!WebPInitDecoderConfig(&config)) goto fail;
+        if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail;
+    }
+    if (payload == NULL || payloadSize == 0) goto fail;
+    
+    hasAlpha = config.input.has_alpha;
+    bitsPerComponent = 8;
+    bitsPerPixel = 32;
+    bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * canvasWidth, 32);
+    destLength = bytesPerRow * canvasHeight;
+    if (decodeForDisplay) {
+        bitmapInfo = kCGBitmapByteOrder32Host;
+        bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
+        colorspace = MODE_bgrA; // small endian
+    } else {
+        bitmapInfo = kCGBitmapByteOrderDefault;
+        bitmapInfo |= hasAlpha ? kCGImageAlphaLast : kCGImageAlphaNoneSkipLast;
+        colorspace = MODE_RGBA;
+    }
+    destBytes = calloc(1, destLength);
+    if (!destBytes) goto fail;
+    
+    config.options.use_threads = useThreads; //speed up 23%
+    config.options.bypass_filtering = bypassFiltering; //speed up 11%, cause some banding
+    config.options.no_fancy_upsampling = noFancyUpsampling; //speed down 16%, lose some details
+    config.output.colorspace = colorspace;
+    config.output.is_external_memory = 1;
+    config.output.u.RGBA.rgba = destBytes;
+    config.output.u.RGBA.stride = (int)bytesPerRow;
+    config.output.u.RGBA.size = destLength;
+    
+    if (WebPDecode(payload, payloadSize, &config) != VP8_STATUS_OK) goto fail;
+    
+    if (iter.x_offset != 0 || iter.y_offset != 0) {
+        void *tmp = calloc(1, destLength);
+        if (tmp) {
+            vImage_Buffer src = {destBytes, canvasHeight, canvasWidth, bytesPerRow};
+            vImage_Buffer dest = {destBytes, canvasHeight, canvasWidth, bytesPerRow};
+            vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset};
+            uint8_t backColor[4] = {0};
+            vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill);
+            memcpy(destBytes, tmp, destLength);
+            free(tmp);
+        }
+    }
+    
+    provider = CGDataProviderCreateWithData(destBytes, destBytes, destLength, YYCGDataProviderReleaseDataCallback);
+    if (!provider) goto fail;
+    destBytes = NULL; // hold by provider
+    
+    imageRef = CGImageCreate(canvasWidth, canvasHeight, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault);
+    
+    CFRelease(provider);
+    if (iterInited) WebPDemuxReleaseIterator(&iter);
+    WebPDemuxDelete(demuxer);
+    
+    return imageRef;
+    
+fail:
+    if (destBytes) free(destBytes);
+    if (provider) CFRelease(provider);
+    if (iterInited) WebPDemuxReleaseIterator(&iter);
+    if (demuxer) WebPDemuxDelete(demuxer);
+    return NULL;
+}
+
+#else
+
+BOOL YYImageWebPAvailable() {
+    return NO;
+}
+
+CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) {
+    NSLog(@"WebP decoder is disabled");
+    return NULL;
+}
+
+NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) {
+    NSLog(@"WebP decoder is disabled");
+    return 0;
+}
+
+CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData,
+                                       BOOL decodeForDisplay,
+                                       BOOL useThreads,
+                                       BOOL bypassFiltering,
+                                       BOOL noFancyUpsampling) {
+    NSLog(@"WebP decoder is disabled");
+    return NULL;
+}
+
+#endif
+
+
+////////////////////////////////////////////////////////////////////////////////
+#pragma mark - Decoder
+
+@implementation YYImageFrame
++ (instancetype)frameWithImage:(UIImage *)image {
+    YYImageFrame *frame = [self new];
+    frame.image = image;
+    return frame;
+}
+- (id)copyWithZone:(NSZone *)zone {
+    YYImageFrame *frame = [self.class new];
+    frame.index = _index;
+    frame.width = _width;
+    frame.height = _height;
+    frame.offsetX = _offsetX;
+    frame.offsetY = _offsetY;
+    frame.duration = _duration;
+    frame.dispose = _dispose;
+    frame.blend = _blend;
+    frame.image = _image.copy;
+    return frame;
+}
+@end
+
+// Internal frame object.
+@interface _YYImageDecoderFrame : YYImageFrame
+@property (nonatomic, assign) BOOL hasAlpha;                ///< Whether frame has alpha.
+@property (nonatomic, assign) BOOL isFullSize;              ///< Whether frame fill the canvas.
+@property (nonatomic, assign) NSUInteger blendFromIndex;    ///< Blend from frame index to current frame.
+@end
+
+@implementation _YYImageDecoderFrame
+- (id)copyWithZone:(NSZone *)zone {
+    _YYImageDecoderFrame *frame = [super copyWithZone:zone];
+    frame.hasAlpha = _hasAlpha;
+    frame.isFullSize = _isFullSize;
+    frame.blendFromIndex = _blendFromIndex;
+    return frame;
+}
+@end
+
+
+@implementation YYImageDecoder {
+    pthread_mutex_t _lock;
+    
+    BOOL _sourceTypeDetected;
+    CGImageSourceRef _source;
+    yy_png_info *_apngSource;
+#if YYIMAGE_WEBP_ENABLED
+    WebPDemuxer *_webpSource;
+#endif
+    
+    UIImageOrientation _orientation;
+    OSSpinLock _framesLock;
+    NSArray *_frames; ///< Array<GGImageDecoderFrame>, without image
+    BOOL _needBlend;
+    NSUInteger _blendFrameIndex;
+    CGContextRef _blendCanvas;
+}
+
+- (void)dealloc {
+    if (_source) CFRelease(_source);
+    if (_apngSource) yy_png_info_release(_apngSource);
+#if YYIMAGE_WEBP_ENABLED
+    if (_webpSource) WebPDemuxDelete(_webpSource);
+#endif
+    if (_blendCanvas) CFRelease(_blendCanvas);
+}
+
++ (instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale {
+    if (!data) return nil;
+    YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:scale];
+    [decoder updateData:data final:YES];
+    if (decoder.frameCount == 0) return nil;
+    return decoder;
+}
+
+- (instancetype)init {
+    return [self initWithScale:[UIScreen mainScreen].scale];
+}
+
+- (instancetype)initWithScale:(CGFloat)scale {
+    self = [super init];
+    if (scale <= 0) scale = 1;
+    _scale = scale;
+    _framesLock = OS_SPINLOCK_INIT;
+    
+    pthread_mutexattr_t attr;
+    pthread_mutexattr_init (&attr);
+    pthread_mutexattr_settype (&attr, PTHREAD_MUTEX_RECURSIVE);
+    pthread_mutex_init (&_lock, &attr);
+    pthread_mutexattr_destroy (&attr);
+    
+    return self;
+}
+
+- (BOOL)updateData:(NSData *)data final:(BOOL)final {
+    BOOL result = NO;
+    pthread_mutex_lock(&_lock);
+    result = [self _updateData:data final:final];
+    pthread_mutex_unlock(&_lock);
+    return result;
+}
+
+- (YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
+    YYImageFrame *result = nil;
+    pthread_mutex_lock(&_lock);
+    result = [self _frameAtIndex:index decodeForDisplay:decodeForDisplay];
+    pthread_mutex_unlock(&_lock);
+    return result;
+}
+
+- (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index {
+    NSTimeInterval result = 0;
+    OSSpinLockLock(&_framesLock); // for better performance when play animation...
+    if (index < _frames.count) {
+        result = ((_YYImageDecoderFrame *)_frames[index]).duration;
+    }
+    OSSpinLockUnlock(&_framesLock);
+    return result;
+}
+
+- (NSDictionary *)framePropertiesAtIndex:(NSUInteger)index {
+    NSDictionary *result = nil;
+    pthread_mutex_lock(&_lock);
+    result = [self _framePropertiesAtIndex:index];
+    pthread_mutex_unlock(&_lock);
+    return result;
+}
+
+- (NSDictionary *)imageProperties {
+    NSDictionary *result = nil;
+    pthread_mutex_lock(&_lock);
+    result = [self _imageProperties];
+    pthread_mutex_unlock(&_lock);
+    return result;
+}
+
+#pragma private (wrap)
+
+- (BOOL)_updateData:(NSData *)data final:(BOOL)final {
+    if (_finalized) return NO;
+    if (data.length < _data.length) return NO;
+    _finalized = final;
+    _data = data;
+    
+    YYImageType type = YYImageDetectType((__bridge CFDataRef)data);
+    if (_sourceTypeDetected) {
+        if (_type != type) {
+            return NO;
+        } else {
+            [self _updateSource];
+        }
+    } else {
+        if (_data.length > 16) {
+            _type = type;
+            _sourceTypeDetected = YES;
+            [self _updateSource];
+        }
+    }
+    return YES;
+}
+
+- (YYImageFrame *)_frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
+    if (index >= _frames.count) return 0;
+    _YYImageDecoderFrame *frame = [(_YYImageDecoderFrame *)_frames[index] copy];
+    BOOL decoded = NO;
+    BOOL extendToCanvas = NO;
+    if (_type != YYImageTypeICO && decodeForDisplay) { // ICO contains multi-size frame and should not extend to canvas.
+        extendToCanvas = YES;
+    }
+    
+    if (!_needBlend) {
+        CGImageRef imageRef = [self _newUnblendedImageAtIndex:index extendToCanvas:extendToCanvas decoded:&decoded];
+        if (!imageRef) return nil;
+        if (decodeForDisplay && !decoded) {
+            CGImageRef imageRefDecoded = YYCGImageCreateDecodedCopy(imageRef, YES);
+            if (imageRefDecoded) {
+                CFRelease(imageRef);
+                imageRef = imageRefDecoded;
+                decoded = YES;
+            }
+        }
+        UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
+        CFRelease(imageRef);
+        if (!image) return nil;
+        image.yy_isDecodedForDisplay = decoded;
+        frame.image = image;
+        return frame;
+    }
+    
+    // blend
+    if (![self _createBlendContextIfNeeded]) return nil;
+    CGImageRef imageRef = NULL;
+    
+    if (_blendFrameIndex + 1 == frame.index) {
+        imageRef = [self _newBlendedImageWithFrame:frame];
+        _blendFrameIndex = index;
+    } else { // should draw canvas from previous frame
+        _blendFrameIndex = NSNotFound;
+        CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
+        
+        if (frame.blendFromIndex == frame.index) {
+            CGImageRef unblendedImage = [self _newUnblendedImageAtIndex:index extendToCanvas:NO decoded:NULL];
+            if (unblendedImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendedImage);
+                CFRelease(unblendedImage);
+            }
+            imageRef = CGBitmapContextCreateImage(_blendCanvas);
+            if (frame.dispose == YYImageDisposeBackground) {
+                CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+            }
+            _blendFrameIndex = index;
+        } else { // canvas is not ready
+            for (uint32_t i = (uint32_t)frame.blendFromIndex; i <= (uint32_t)frame.index; i++) {
+                if (i == frame.index) {
+                    if (!imageRef) imageRef = [self _newBlendedImageWithFrame:frame];
+                } else {
+                    [self _blendImageWithFrame:_frames[i]];
+                }
+            }
+            _blendFrameIndex = index;
+        }
+    }
+    
+    if (!imageRef) return nil;
+    UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
+    CFRelease(imageRef);
+    if (!image) return nil;
+    
+    image.yy_isDecodedForDisplay = YES;
+    frame.image = image;
+    if (extendToCanvas) {
+        frame.width = _width;
+        frame.height = _height;
+        frame.offsetX = 0;
+        frame.offsetY = 0;
+        frame.dispose = YYImageDisposeNone;
+        frame.blend = YYImageBlendNone;
+    }
+    return frame;
+}
+
+- (NSDictionary *)_framePropertiesAtIndex:(NSUInteger)index {
+    if (index >= _frames.count) return nil;
+    if (!_source) return nil;
+    CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, index, NULL);
+    if (!properties) return nil;
+    return CFBridgingRelease(properties);
+}
+
+- (NSDictionary *)_imageProperties {
+    if (!_source) return nil;
+    CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
+    if (!properties) return nil;
+    return CFBridgingRelease(properties);
+}
+
+#pragma private
+
+- (void)_updateSource {
+    switch (_type) {
+        case YYImageTypeWebP: {
+            [self _updateSourceWebP];
+        } break;
+            
+        case YYImageTypePNG: {
+            [self _updateSourceAPNG];
+        } break;
+            
+        default: {
+            [self _updateSourceImageIO];
+        } break;
+    }
+}
+
+- (void)_updateSourceWebP {
+#if YYIMAGE_WEBP_ENABLED
+    _width = 0;
+    _height = 0;
+    _loopCount = 0;
+    if (_webpSource) WebPDemuxDelete(_webpSource);
+    _webpSource = NULL;
+    OSSpinLockLock(&_framesLock);
+    _frames = nil;
+    OSSpinLockUnlock(&_framesLock);
+    
+    /*
+     https://developers.google.com/speed/webp/docs/api
+     The documentation said we can use WebPIDecoder to decode webp progressively, 
+     but currently it can only returns an empty image (not same as progressive jpegs),
+     so we don't use progressive decoding.
+     
+     When using WebPDecode() to decode multi-frame webp, we will get the error
+     "VP8_STATUS_UNSUPPORTED_FEATURE", so we first use WebPDemuxer to unpack it.
+     */
+    
+    WebPData webPData = {0};
+    webPData.bytes = _data.bytes;
+    webPData.size = _data.length;
+    WebPDemuxer *demuxer = WebPDemux(&webPData);
+    if (!demuxer) return;
+    
+    uint32_t webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
+    uint32_t webpLoopCount =  WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
+    uint32_t canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
+    uint32_t canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
+    if (webpFrameCount == 0 || canvasWidth < 1 || canvasHeight < 1) {
+        WebPDemuxDelete(demuxer);
+        return;
+    }
+    
+    NSMutableArray *frames = [NSMutableArray new];
+    BOOL needBlend = NO;
+    uint32_t iterIndex = 0;
+    uint32_t lastBlendIndex = 0;
+    WebPIterator iter = {0};
+    if (WebPDemuxGetFrame(demuxer, 1, &iter)) { // one-based index...
+        do {
+            _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
+            [frames addObject:frame];
+            if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
+                frame.dispose = YYImageDisposeBackground;
+            }
+            if (iter.blend_method == WEBP_MUX_BLEND) {
+                frame.blend = YYImageBlendOver;
+            }
+            
+            int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
+            int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
+            frame.index = iterIndex;
+            frame.duration = iter.duration / 1000.0;
+            frame.width = iter.width;
+            frame.height = iter.height;
+            frame.hasAlpha = iter.has_alpha;
+            frame.blend = iter.blend_method == WEBP_MUX_BLEND;
+            frame.offsetX = iter.x_offset;
+            frame.offsetY = canvasHeight - iter.y_offset - iter.height;
+            
+            BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
+            BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
+            frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
+            
+            if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) {
+                frame.blendFromIndex = lastBlendIndex = iterIndex;
+            } else {
+                if (frame.dispose && frame.isFullSize) {
+                    frame.blendFromIndex = lastBlendIndex;
+                    lastBlendIndex = iterIndex + 1;
+                } else {
+                    frame.blendFromIndex = lastBlendIndex;
+                }
+            }
+            if (frame.index != frame.blendFromIndex) needBlend = YES;
+            iterIndex++;
+        } while (WebPDemuxNextFrame(&iter));
+        WebPDemuxReleaseIterator(&iter);
+    }
+    if (frames.count != webpFrameCount) {
+        WebPDemuxDelete(demuxer);
+        return;
+    }
+    
+    _width = canvasWidth;
+    _height = canvasHeight;
+    _frameCount = frames.count;
+    _loopCount = webpLoopCount;
+    _needBlend = needBlend;
+    _webpSource = demuxer;
+    OSSpinLockLock(&_framesLock);
+    _frames = frames;
+    OSSpinLockUnlock(&_framesLock);
+#endif
+}
+
+- (void)_updateSourceAPNG {
+    /*
+     APNG extends PNG format to support animation, it was supported by ImageIO
+     since iOS 8.
+     
+     We use a custom APNG decoder to make APNG available in old system, so we
+     ignore the ImageIO's APNG frame info. Typically the custom decoder is a bit
+     faster than ImageIO.
+     */
+    
+    yy_png_info_release(_apngSource);
+    _apngSource = nil;
+    
+    [self _updateSourceImageIO]; // decode first frame
+    if (_frameCount == 0) return; // png decode failed
+    if (!_finalized) return; // ignore multi-frame before finalized
+    
+    yy_png_info *apng = yy_png_info_create(_data.bytes, (uint32_t)_data.length);
+    if (!apng) return; // apng decode failed
+    if (apng->apng_frame_num == 0 ||
+        (apng->apng_frame_num == 1 && apng->apng_first_frame_is_cover)) {
+        yy_png_info_release(apng);
+        return; // no animation
+    }
+    if (_source) { // apng decode succeed, no longer need image souce
+        CFRelease(_source);
+        _source = NULL;
+    }
+    
+    uint32_t canvasWidth = apng->header.width;
+    uint32_t canvasHeight = apng->header.height;
+    NSMutableArray *frames = [NSMutableArray new];
+    BOOL needBlend = NO;
+    uint32_t lastBlendIndex = 0;
+    for (uint32_t i = 0; i < apng->apng_frame_num; i++) {
+        _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
+        [frames addObject:frame];
+        
+        yy_png_frame_info *fi = apng->apng_frames + i;
+        frame.index = i;
+        frame.duration = yy_png_delay_to_seconds(fi->frame_control.delay_num, fi->frame_control.delay_den);
+        frame.hasAlpha = YES;
+        frame.width = fi->frame_control.width;
+        frame.height = fi->frame_control.height;
+        frame.offsetX = fi->frame_control.x_offset;
+        frame.offsetY = canvasHeight - fi->frame_control.y_offset - fi->frame_control.height;
+        
+        BOOL sizeEqualsToCanvas = (frame.width == canvasWidth && frame.height == canvasHeight);
+        BOOL offsetIsZero = (fi->frame_control.x_offset == 0 && fi->frame_control.y_offset == 0);
+        frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
+        
+        switch (fi->frame_control.dispose_op) {
+            case YY_PNG_DISPOSE_OP_BACKGROUND: {
+                frame.dispose = YYImageDisposeBackground;
+            } break;
+            case YY_PNG_DISPOSE_OP_PREVIOUS: {
+                frame.dispose = YYImageDisposePrevious;
+            } break;
+            default: {
+                frame.dispose = YYImageDisposeNone;
+            } break;
+        }
+        switch (fi->frame_control.blend_op) {
+            case YY_PNG_BLEND_OP_OVER: {
+                frame.blend = YYImageBlendOver;
+            } break;
+                
+            default: {
+                frame.blend = YYImageBlendNone;
+            } break;
+        }
+        
+        if (frame.blend == YYImageBlendNone && frame.isFullSize) {
+            frame.blendFromIndex  = i;
+            if (frame.dispose != YYImageDisposePrevious) lastBlendIndex = i;
+        } else {
+            if (frame.dispose == YYImageDisposeBackground && frame.isFullSize) {
+                frame.blendFromIndex = lastBlendIndex;
+                lastBlendIndex = i + 1;
+            } else {
+                frame.blendFromIndex = lastBlendIndex;
+            }
+        }
+        if (frame.index != frame.blendFromIndex) needBlend = YES;
+    }
+    
+    _width = canvasWidth;
+    _height = canvasHeight;
+    _frameCount = frames.count;
+    _loopCount = apng->apng_loop_num;
+    _needBlend = needBlend;
+    _apngSource = apng;
+    OSSpinLockLock(&_framesLock);
+    _frames = frames;
+    OSSpinLockUnlock(&_framesLock);
+}
+
+- (void)_updateSourceImageIO {
+    _width = 0;
+    _height = 0;
+    _orientation = UIImageOrientationUp;
+    _loopCount = 0;
+    OSSpinLockLock(&_framesLock);
+    _frames = nil;
+    OSSpinLockUnlock(&_framesLock);
+    
+    if (!_source) {
+        if (_finalized) {
+            _source = CGImageSourceCreateWithData((__bridge CFDataRef)_data, NULL);
+        } else {
+            _source = CGImageSourceCreateIncremental(NULL);
+            if (_source) CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, false);
+        }
+    } else {
+        CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, _finalized);
+    }
+    if (!_source) return;
+    
+    _frameCount = CGImageSourceGetCount(_source);
+    if (_frameCount == 0) return;
+    
+    if (!_finalized) { // ignore multi-frame before finalized
+        _frameCount = 1;
+    } else {
+        if (_type == YYImageTypePNG) { // use custom apng decoder and ignore multi-frame
+            _frameCount = 1;
+        }
+        if (_type == YYImageTypeGIF) { // get gif loop count
+            CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
+            if (properties) {
+                CFTypeRef loop = CFDictionaryGetValue(properties, kCGImagePropertyGIFLoopCount);
+                if (loop) CFNumberGetValue(loop, kCFNumberNSIntegerType, &_loopCount);
+                CFRelease(properties);
+            }
+        }
+    }
+    
+    /*
+     ICO, GIF, APNG may contains multi-frame.
+     */
+    NSMutableArray *frames = [NSMutableArray new];
+    for (NSUInteger i = 0; i < _frameCount; i++) {
+        _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
+        frame.index = i;
+        frame.blendFromIndex = i;
+        frame.hasAlpha = YES;
+        frame.isFullSize = YES;
+        [frames addObject:frame];
+        
+        CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, i, NULL);
+        if (properties) {
+            NSTimeInterval duration = 0;
+            NSInteger orientationValue = 0, width = 0, height = 0;
+            CFTypeRef value = NULL;
+            
+            value = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
+            if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &width);
+            value = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
+            if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &height);
+            if (_type == YYImageTypeGIF) {
+                CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
+                if (gif) {
+                    // Use the unclamped frame delay if it exists.
+                    value = CFDictionaryGetValue(gif, kCGImagePropertyGIFUnclampedDelayTime);
+                    if (!value) {
+                        // Fall back to the clamped frame delay if the unclamped frame delay does not exist.
+                        value = CFDictionaryGetValue(gif, kCGImagePropertyGIFDelayTime);
+                    }
+                    if (value) CFNumberGetValue(value, kCFNumberDoubleType, &duration);
+                }
+            }
+            
+            frame.width = width;
+            frame.height = height;
+            frame.duration = duration;
+            
+            if (i == 0 && _width + _height == 0) { // init first frame
+                _width = width;
+                _height = height;
+                value = CFDictionaryGetValue(properties, kCGImagePropertyOrientation);
+                if (value) {
+                    CFNumberGetValue(value, kCFNumberNSIntegerType, &orientationValue);
+                    _orientation = YYUIImageOrientationFromEXIFValue(orientationValue);
+                }
+            }
+            CFRelease(properties);
+        }
+    }
+    OSSpinLockLock(&_framesLock);
+    _frames = frames;
+    OSSpinLockUnlock(&_framesLock);
+}
+
+- (CGImageRef)_newUnblendedImageAtIndex:(NSUInteger)index
+                         extendToCanvas:(BOOL)extendToCanvas
+                                decoded:(BOOL *)decoded CF_RETURNS_RETAINED {
+    
+    if (!_finalized && index > 0) return NULL;
+    if (_frames.count <= index) return NULL;
+    _YYImageDecoderFrame *frame = _frames[index];
+    
+    if (_source) {
+        CGImageRef imageRef = CGImageSourceCreateImageAtIndex(_source, index, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
+        if (imageRef && extendToCanvas) {
+            size_t width = CGImageGetWidth(imageRef);
+            size_t height = CGImageGetHeight(imageRef);
+            if (width == _width && height == _height) {
+                CGImageRef imageRefExtended = YYCGImageCreateDecodedCopy(imageRef, YES);
+                if (imageRefExtended) {
+                    CFRelease(imageRef);
+                    imageRef = imageRefExtended;
+                    if (decoded) *decoded = YES;
+                }
+            } else {
+                CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
+                if (context) {
+                    CGContextDrawImage(context, CGRectMake(0, _height - height, width, height), imageRef);
+                    CGImageRef imageRefExtended = CGBitmapContextCreateImage(context);
+                    CFRelease(context);
+                    if (imageRefExtended) {
+                        CFRelease(imageRef);
+                        imageRef = imageRefExtended;
+                        if (decoded) *decoded = YES;
+                    }
+                }
+            }
+        }
+        return imageRef;
+    }
+    
+    if (_apngSource) {
+        uint32_t size = 0;
+        uint8_t *bytes = yy_png_copy_frame_data_at_index(_data.bytes, _apngSource, (uint32_t)index, &size);
+        if (!bytes) return NULL;
+        CGDataProviderRef provider = CGDataProviderCreateWithData(bytes, bytes, size, YYCGDataProviderReleaseDataCallback);
+        if (!provider) {
+            free(bytes);
+            return NULL;
+        }
+        bytes = NULL; // hold by provider
+        
+        CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL);
+        if (!source) {
+            CFRelease(provider);
+            return NULL;
+        }
+        CFRelease(provider);
+        
+        if(CGImageSourceGetCount(source) < 1) {
+            CFRelease(source);
+            return NULL;
+        }
+        
+        CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
+        CFRelease(source);
+        if (!imageRef) return NULL;
+        if (extendToCanvas) {
+            CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); //bgrA
+            if (context) {
+                CGContextDrawImage(context, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), imageRef);
+                CFRelease(imageRef);
+                imageRef = CGBitmapContextCreateImage(context);
+                CFRelease(context);
+                if (decoded) *decoded = YES;
+            }
+        }
+        return imageRef;
+    }
+    
+#if YYIMAGE_WEBP_ENABLED
+    if (_webpSource) {
+        WebPIterator iter;
+        if (!WebPDemuxGetFrame(_webpSource, (int)(index + 1), &iter)) return NULL; // demux webp frame data
+        // frame numbers are one-based in webp -----------^
+        
+        int frameWidth = iter.width;
+        int frameHeight = iter.height;
+        if (frameWidth < 1 || frameHeight < 1) return NULL;
+        
+        int width = extendToCanvas ? (int)_width : frameWidth;
+        int height = extendToCanvas ? (int)_height : frameHeight;
+        if (width > _width || height > _height) return NULL;
+        
+        const uint8_t *payload = iter.fragment.bytes;
+        size_t payloadSize = iter.fragment.size;
+        
+        WebPDecoderConfig config;
+        if (!WebPInitDecoderConfig(&config)) {
+            WebPDemuxReleaseIterator(&iter);
+            return NULL;
+        }
+        if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) {
+            WebPDemuxReleaseIterator(&iter);
+            return NULL;
+        }
+        
+        size_t bitsPerComponent = 8;
+        size_t bitsPerPixel = 32;
+        size_t bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * width, 32);
+        size_t length = bytesPerRow * height;
+        CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst; //bgrA
+        
+        void *pixels = calloc(1, length);
+        if (!pixels) {
+            WebPDemuxReleaseIterator(&iter);
+            return NULL;
+        }
+        
+        config.output.colorspace = MODE_bgrA;
+        config.output.is_external_memory = 1;
+        config.output.u.RGBA.rgba = pixels;
+        config.output.u.RGBA.stride = (int)bytesPerRow;
+        config.output.u.RGBA.size = length;
+        if (WebPDecode(payload, payloadSize, &config) != VP8_STATUS_OK) { // decode
+            WebPDemuxReleaseIterator(&iter);
+            free(pixels);
+            return NULL;
+        }
+        WebPDemuxReleaseIterator(&iter);
+        
+        if (extendToCanvas && (iter.x_offset != 0 || iter.y_offset != 0)) {
+            void *tmp = calloc(1, length);
+            if (tmp) {
+                vImage_Buffer src = {pixels, height, width, bytesPerRow};
+                vImage_Buffer dest = {tmp, height, width, bytesPerRow};
+                vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset};
+                uint8_t backColor[4] = {0};
+                vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill);
+                if (error == kvImageNoError) {
+                    memcpy(pixels, tmp, length);
+                }
+                free(tmp);
+            }
+        }
+        
+        CGDataProviderRef provider = CGDataProviderCreateWithData(pixels, pixels, length, YYCGDataProviderReleaseDataCallback);
+        if (!provider) {
+            free(pixels);
+            return NULL;
+        }
+        pixels = NULL; // hold by provider
+        
+        CGImageRef image = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault);
+        CFRelease(provider);
+        if (decoded) *decoded = YES;
+        return image;
+    }
+#endif
+    
+    return NULL;
+}
+
+- (BOOL)_createBlendContextIfNeeded {
+    if (!_blendCanvas) {
+        _blendFrameIndex = NSNotFound;
+        _blendCanvas = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
+    }
+    BOOL suc = _blendCanvas != NULL;
+    return suc;
+}
+
+- (void)_blendImageWithFrame:(_YYImageDecoderFrame *)frame {
+    if (frame.dispose == YYImageDisposePrevious) {
+        // nothing
+    } else if (frame.dispose == YYImageDisposeBackground) {
+        CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+    } else { // no dispose
+        if (frame.blend == YYImageBlendOver) {
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+        } else {
+            CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+        }
+    }
+}
+
+- (CGImageRef)_newBlendedImageWithFrame:(_YYImageDecoderFrame *)frame CF_RETURNS_RETAINED{
+    CGImageRef imageRef = NULL;
+    if (frame.dispose == YYImageDisposePrevious) {
+        if (frame.blend == YYImageBlendOver) {
+            CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+            imageRef = CGBitmapContextCreateImage(_blendCanvas);
+            CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
+            if (previousImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
+                CFRelease(previousImage);
+            }
+        } else {
+            CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+            imageRef = CGBitmapContextCreateImage(_blendCanvas);
+            CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
+            if (previousImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
+                CFRelease(previousImage);
+            }
+        }
+    } else if (frame.dispose == YYImageDisposeBackground) {
+        if (frame.blend == YYImageBlendOver) {
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+            imageRef = CGBitmapContextCreateImage(_blendCanvas);
+            CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+        } else {
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+            imageRef = CGBitmapContextCreateImage(_blendCanvas);
+            CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+        }
+    } else { // no dispose
+        if (frame.blend == YYImageBlendOver) {
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+            imageRef = CGBitmapContextCreateImage(_blendCanvas);
+        } else {
+            CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
+            if (unblendImage) {
+                CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
+                CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
+                CFRelease(unblendImage);
+            }
+            imageRef = CGBitmapContextCreateImage(_blendCanvas);
+        }
+    }
+    return imageRef;
+}
+
+@end
+
+
+////////////////////////////////////////////////////////////////////////////////
+#pragma mark - Encoder
+
+@implementation YYImageEncoder {
+    NSMutableArray *_images;
+    NSMutableArray *_durations;
+}
+
+- (instancetype)init {
+    @throw [NSException exceptionWithName:@"YYImageEncoder init error" reason:@"YYImageEncoder must be initialized with a type. Use 'initWithType:' instead." userInfo:nil];
+    return [self initWithType:YYImageTypeUnknown];
+}
+
+- (instancetype)initWithType:(YYImageType)type {
+    if (type == YYImageTypeUnknown || type >= YYImageTypeOther) return nil;
+    
+#if !YYIMAGE_WEBP_ENABLED
+    if (type == YYImageTypeWebP) return nil;
+#endif
+    
+    self = [super init];
+    if (!self) return nil;
+    _type = type;
+    _images = [NSMutableArray new];
+    _durations = [NSMutableArray new];
+
+    switch (type) {
+        case YYImageTypeJPEG:
+        case YYImageTypeJPEG2000: {
+            _quality = 0.9;
+        } break;
+        case YYImageTypeTIFF:
+        case YYImageTypeBMP:
+        case YYImageTypeGIF:
+        case YYImageTypeICO:
+        case YYImageTypeICNS:
+        case YYImageTypePNG: {
+            _quality = 1;
+            _lossless = YES;
+        } break;
+        case YYImageTypeWebP: {
+            _quality = 0.8;
+        } break;
+        default:
+            break;
+    }
+    
+    return self;
+}
+
+- (void)setQuality:(CGFloat)quality {
+    _quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
+}
+
+- (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration {
+    if (!image.CGImage) return;
+    duration = duration < 0 ? 0 : duration;
+    [_images addObject:image];
+    [_durations addObject:@(duration)];
+}
+
+- (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration {
+    if (data.length == 0) return;
+    duration = duration < 0 ? 0 : duration;
+    [_images addObject:data];
+    [_durations addObject:@(duration)];
+}
+
+- (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration {
+    if (path.length == 0) return;
+    duration = duration < 0 ? 0 : duration;
+    NSURL *url = [NSURL URLWithString:path];
+    if (!url) return;
+    [_images addObject:url];
+    [_durations addObject:@(duration)];
+}
+
+- (BOOL)_imageIOAvaliable {
+    switch (_type) {
+        case YYImageTypeJPEG:
+        case YYImageTypeJPEG2000:
+        case YYImageTypeTIFF:
+        case YYImageTypeBMP:
+        case YYImageTypeICO:
+        case YYImageTypeICNS:
+        case YYImageTypeGIF: {
+            return _images.count > 0;
+        } break;
+        case YYImageTypePNG: {
+            return _images.count == 1;
+        } break;
+        case YYImageTypeWebP: {
+            return NO;
+        } break;
+        default: return NO;
+    }
+}
+
+- (CGImageDestinationRef)_newImageDestination:(id)dest imageCount:(NSUInteger)count CF_RETURNS_RETAINED {
+    if (!dest) return nil;
+    CGImageDestinationRef destination = NULL;
+    if ([dest isKindOfClass:[NSString class]]) {
+        NSURL *url = [[NSURL alloc] initFileURLWithPath:dest];
+        if (url) {
+            destination = CGImageDestinationCreateWithURL((CFURLRef)url, YYImageTypeToUTType(_type), count, NULL);
+        }
+    } else if ([dest isKindOfClass:[NSMutableData class]]) {
+        destination = CGImageDestinationCreateWithData((CFMutableDataRef)dest, YYImageTypeToUTType(_type), count, NULL);
+    }
+    return destination;
+}
+
+- (void)_encodeImageWithDestination:(CGImageDestinationRef)destination imageCount:(NSUInteger)count {
+    if (_type == YYImageTypeGIF) {
+        NSDictionary *gifProperty = @{(__bridge id)kCGImagePropertyGIFDictionary:
+                                        @{(__bridge id)kCGImagePropertyGIFLoopCount: @(_loopCount)}};
+        CGImageDestinationSetProperties(destination, (__bridge CFDictionaryRef)gifProperty);
+    }
+    
+    for (int i = 0; i < count; i++) {
+        id imageSrc = _images[i];
+        NSDictionary *frameProperty = NULL;
+        if (_type == YYImageTypeGIF && count > 1) {
+            frameProperty = @{(NSString *)kCGImagePropertyGIFDictionary : @{(NSString *) kCGImagePropertyGIFDelayTime:_durations[i]}};
+        } else {
+            frameProperty = @{(id)kCGImageDestinationLossyCompressionQuality : @(_quality)};
+        }
+        
+        if ([imageSrc isKindOfClass:[UIImage class]]) {
+            CGImageDestinationAddImage(destination, ((UIImage *)imageSrc).CGImage, (CFDictionaryRef)frameProperty);
+        } else if ([imageSrc isKindOfClass:[NSURL class]]) {
+            CGImageSourceRef source = CGImageSourceCreateWithURL((CFURLRef)imageSrc, NULL);
+            if (source) {
+                CGImageDestinationAddImageFromSource(destination, source, i, (CFDictionaryRef)frameProperty);
+                CFRelease(source);
+            }
+        } else if ([imageSrc isKindOfClass:[NSData class]]) {
+            CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageSrc, NULL);
+            if (source) {
+                CGImageDestinationAddImageFromSource(destination, source, i, (CFDictionaryRef)frameProperty);
+                CFRelease(source);
+            }
+        }
+    }
+}
+
+- (CGImageRef)_newCGImageFromIndex:(NSUInteger)index decoded:(BOOL)decoded CF_RETURNS_RETAINED {
+    UIImage *image = nil;
+    id imageSrc= _images[index];
+    if ([imageSrc isKindOfClass:[UIImage class]]) {
+        image = imageSrc;
+    } else if ([imageSrc isKindOfClass:[NSURL class]]) {
+        image = [UIImage imageWithContentsOfFile:((NSURL *)imageSrc).absoluteString];
+    } else if ([imageSrc isKindOfClass:[NSData class]]) {
+        image = [UIImage imageWithData:imageSrc];
+    }
+    if (!image) return NULL;
+    CGImageRef imageRef = image.CGImage;
+    if (!imageRef) return NULL;
+    if (image.imageOrientation != UIImageOrientationUp) {
+        return YYCGImageCreateCopyWithOrientation(imageRef, image.imageOrientation, kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
+    }
+    if (decoded) {
+        return YYCGImageCreateDecodedCopy(imageRef, YES);
+    }
+    return (CGImageRef)CFRetain(imageRef);
+}
+
+- (NSData *)_encodeWithImageIO {
+    NSMutableData *data = [NSMutableData new];
+    NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1;
+    CGImageDestinationRef destination = [self _newImageDestination:data imageCount:count];
+    BOOL suc = NO;
+    if (destination) {
+        [self _encodeImageWithDestination:destination imageCount:count];
+        suc = CGImageDestinationFinalize(destination);
+        CFRelease(destination);
+    }
+    if (suc && data.length > 0) {
+        return data;
+    } else {
+        return nil;
+    }
+}
+
+- (BOOL)_encodeWithImageIO:(NSString *)path {
+    NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1;
+    CGImageDestinationRef destination = [self _newImageDestination:path imageCount:count];
+    BOOL suc = NO;
+    if (destination) {
+        [self _encodeImageWithDestination:destination imageCount:count];
+        suc = CGImageDestinationFinalize(destination);
+        CFRelease(destination);
+    }
+    return suc;
+}
+
+- (NSData *)_encodeAPNG {
+    // encode APNG (ImageIO doesn't support APNG encoding, so we use a custom encoder)
+    NSMutableArray *pngDatas = [NSMutableArray new];
+    NSMutableArray *pngSizes = [NSMutableArray new];
+    NSUInteger canvasWidth = 0, canvasHeight = 0;
+    for (int i = 0; i < _images.count; i++) {
+        CGImageRef decoded = [self _newCGImageFromIndex:i decoded:YES];
+        if (!decoded) return nil;
+        CGSize size = CGSizeMake(CGImageGetWidth(decoded), CGImageGetHeight(decoded));
+        [pngSizes addObject:[NSValue valueWithCGSize:size]];
+        if (canvasWidth < size.width) canvasWidth = size.width;
+        if (canvasHeight < size.height) canvasHeight = size.height;
+        CFDataRef frameData = YYCGImageCreateEncodedData(decoded, YYImageTypePNG, 1);
+        CFRelease(decoded);
+        if (!frameData) return nil;
+        [pngDatas addObject:(__bridge id)(frameData)];
+        CFRelease(frameData);
+        if (size.width < 1 || size.height < 1) return nil;
+    }
+    CGSize firstFrameSize = [(NSValue *)[pngSizes firstObject] CGSizeValue];
+    if (firstFrameSize.width < canvasWidth || firstFrameSize.height < canvasHeight) {
+        CGImageRef decoded = [self _newCGImageFromIndex:0 decoded:YES];
+        if (!decoded) return nil;
+        CGContextRef context = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8,
+                                                     0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
+        if (!context) {
+            CFRelease(decoded);
+            return nil;
+        }
+        CGContextDrawImage(context, CGRectMake(0, canvasHeight - firstFrameSize.height, firstFrameSize.width, firstFrameSize.height), decoded);
+        CFRelease(decoded);
+        CGImageRef extendedImage = CGBitmapContextCreateImage(context);
+        CFRelease(context);
+        if (!extendedImage) return nil;
+        CFDataRef frameData = YYCGImageCreateEncodedData(extendedImage, YYImageTypePNG, 1);
+        if (!frameData) {
+            CFRelease(extendedImage);
+            return nil;
+        }
+        pngDatas[0] = (__bridge id)(frameData);
+        CFRelease(frameData);
+    }
+    
+    NSData *firstFrameData = pngDatas[0];
+    yy_png_info *info = yy_png_info_create(firstFrameData.bytes, (uint32_t)firstFrameData.length);
+    if (!info) return nil;
+    NSMutableData *result = [NSMutableData new];
+    BOOL insertBefore = NO, insertAfter = NO;
+    uint32_t apngSequenceIndex = 0;
+    
+    uint32_t png_header[2];
+    png_header[0] = YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47);
+    png_header[1] = YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A);
+    
+    [result appendBytes:png_header length:8];
+    
+    for (int i = 0; i < info->chunk_num; i++) {
+        yy_png_chunk_info *chunk = info->chunks + i;
+        
+        if (!insertBefore && chunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
+            insertBefore = YES;
+            // insert acTL (APNG Control)
+            uint32_t acTL[5] = {0};
+            acTL[0] = yy_swap_endian_uint32(8); //length
+            acTL[1] = YY_FOUR_CC('a', 'c', 'T', 'L'); // fourcc
+            acTL[2] = yy_swap_endian_uint32((uint32_t)pngDatas.count); // num frames
+            acTL[3] = yy_swap_endian_uint32((uint32_t)_loopCount); // num plays
+            acTL[4] = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(acTL + 1), 12)); //crc32
+            [result appendBytes:acTL length:20];
+            
+            // insert fcTL (first frame control)
+            yy_png_chunk_fcTL chunk_fcTL = {0};
+            chunk_fcTL.sequence_number = apngSequenceIndex;
+            chunk_fcTL.width = (uint32_t)firstFrameSize.width;
+            chunk_fcTL.height = (uint32_t)firstFrameSize.height;
+            yy_png_delay_to_fraction([(NSNumber *)_durations[0] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den);
+            chunk_fcTL.delay_num = chunk_fcTL.delay_num;
+            chunk_fcTL.delay_den = chunk_fcTL.delay_den;
+            chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND;
+            chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE;
+            
+            uint8_t fcTL[38] = {0};
+            *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length
+            *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc
+            yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8);
+            *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30));
+            [result appendBytes:fcTL length:38];
+            
+            apngSequenceIndex++;
+        }
+        
+        if (!insertAfter && insertBefore && chunk->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
+            insertAfter = YES;
+            // insert fcTL and fdAT (APNG frame control and data)
+            
+            for (int i = 1; i < pngDatas.count; i++) {
+                NSData *frameData = pngDatas[i];
+                yy_png_info *frame = yy_png_info_create(frameData.bytes, (uint32_t)frameData.length);
+                if (!frame) {
+                    yy_png_info_release(info);
+                    return nil;
+                }
+                
+                // insert fcTL (first frame control)
+                yy_png_chunk_fcTL chunk_fcTL = {0};
+                chunk_fcTL.sequence_number = apngSequenceIndex;
+                chunk_fcTL.width = frame->header.width;
+                chunk_fcTL.height = frame->header.height;
+                yy_png_delay_to_fraction([(NSNumber *)_durations[0] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den);
+                chunk_fcTL.delay_num = chunk_fcTL.delay_num;
+                chunk_fcTL.delay_den = chunk_fcTL.delay_den;
+                chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND;
+                chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE;
+                
+                uint8_t fcTL[38] = {0};
+                *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length
+                *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc
+                yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8);
+                *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30));
+                [result appendBytes:fcTL length:38];
+                
+                apngSequenceIndex++;
+                
+                // insert fdAT (frame data)
+                for (int d = 0; d < frame->chunk_num; d++) {
+                    yy_png_chunk_info *dchunk = frame->chunks + d;
+                    if (dchunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
+                        uint32_t length = yy_swap_endian_uint32(dchunk->length + 4);
+                        [result appendBytes:&length length:4]; //length
+                        uint32_t fourcc = YY_FOUR_CC('f', 'd', 'A', 'T');
+                        [result appendBytes:&fourcc length:4]; //fourcc
+                        uint32_t sq = yy_swap_endian_uint32(apngSequenceIndex);
+                        [result appendBytes:&sq length:4]; //data (sq)
+                        [result appendBytes:(((uint8_t *)frameData.bytes) + dchunk->offset + 8) length:dchunk->length]; //data
+                        uint8_t *bytes = ((uint8_t *)result.bytes) + result.length - dchunk->length - 8;
+                        uint32_t crc = yy_swap_endian_uint32((uint32_t)crc32(0, bytes, dchunk->length + 8));
+                        [result appendBytes:&crc length:4]; //crc
+                        
+                        apngSequenceIndex++;
+                    }
+                }
+                yy_png_info_release(frame);
+            }
+        }
+        
+        [result appendBytes:((uint8_t *)firstFrameData.bytes) + chunk->offset length:chunk->length + 12];
+    }
+    yy_png_info_release(info);
+    return result;
+}
+
+- (NSData *)_encodeWebP {
+#if YYIMAGE_WEBP_ENABLED
+    // encode webp
+    NSMutableArray *webpDatas = [NSMutableArray new];
+    for (NSUInteger i = 0; i < _images.count; i++) {
+        CGImageRef image = [self _newCGImageFromIndex:i decoded:NO];
+        if (!image) return nil;
+        CFDataRef frameData = YYCGImageCreateEncodedWebPData(image, _lossless, _quality, 4, YYImagePresetDefault);
+        CFRelease(image);
+        if (!frameData) return nil;
+        [webpDatas addObject:(__bridge id)frameData];
+        CFRelease(frameData);
+    }
+    if (webpDatas.count == 1) {
+        return webpDatas.firstObject;
+    } else {
+        // multi-frame webp
+        WebPMux *mux = WebPMuxNew();
+        if (!mux) return nil;
+        for (NSUInteger i = 0; i < _images.count; i++) {
+            NSData *data = webpDatas[i];
+            NSNumber *duration = _durations[i];
+            WebPMuxFrameInfo frame = {0};
+            frame.bitstream.bytes = data.bytes;
+            frame.bitstream.size = data.length;
+            frame.duration = (int)(duration.floatValue * 1000.0);
+            frame.id = WEBP_CHUNK_ANMF;
+            frame.dispose_method = WEBP_MUX_DISPOSE_BACKGROUND;
+            frame.blend_method = WEBP_MUX_NO_BLEND;
+            if (WebPMuxPushFrame(mux, &frame, 0) != WEBP_MUX_OK) {
+                WebPMuxDelete(mux);
+                return nil;
+            }
+        }
+        
+        WebPMuxAnimParams params = {(uint32_t)0, (int)_loopCount};
+        if (WebPMuxSetAnimationParams(mux, &params) != WEBP_MUX_OK) {
+            WebPMuxDelete(mux);
+            return nil;
+        }
+        
+        WebPData output_data;
+        WebPMuxError error = WebPMuxAssemble(mux, &output_data);
+        WebPMuxDelete(mux);
+        if (error != WEBP_MUX_OK) {
+            return nil;
+        }
+        NSData *result = [NSData dataWithBytes:output_data.bytes length:output_data.size];
+        WebPDataClear(&output_data);
+        return result.length ? result : nil;
+    }
+#else
+    return nil;
+#endif
+}
+- (NSData *)encode {
+    if (_images.count == 0) return nil;
+    
+    if ([self _imageIOAvaliable]) return [self _encodeWithImageIO];
+    if (_type == YYImageTypePNG) return [self _encodeAPNG];
+    if (_type == YYImageTypeWebP) return [self _encodeWebP];
+    return nil;
+}
+
+- (BOOL)encodeToFile:(NSString *)path {
+    if (_images.count == 0 || path.length == 0) return NO;
+    
+    if ([self _imageIOAvaliable]) return [self _encodeWithImageIO:path];
+    NSData *data = [self encode];
+    if (!data) return NO;
+    return [data writeToFile:path atomically:YES];
+}
+
++ (NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality {
+    YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type];
+    encoder.quality = quality;
+    [encoder addImage:image duration:0];
+    return [encoder encode];
+}
+
++ (NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality {
+    if (!decoder || !decoder.frameCount == 0) return nil;
+    YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type];
+    encoder.quality = quality;
+    for (int i = 0; i < decoder.frameCount; i++) {
+        UIImage *frame = [decoder frameAtIndex:i decodeForDisplay:YES].image;
+        [encoder addImageWithData:UIImagePNGRepresentation(frame) duration:[decoder frameDurationAtIndex:i]];
+    }
+    return encoder.encode;
+}
+
+@end
+
+
+@implementation UIImage (YYImageCoder)
+
+- (instancetype)yy_imageByDecoded {
+    if (self.yy_isDecodedForDisplay) return self;
+    CGImageRef imageRef = self.CGImage;
+    if (!imageRef) return self;
+    CGImageRef newImageRef = YYCGImageCreateDecodedCopy(imageRef, YES);
+    if (!newImageRef) return self;
+    UIImage *newImage = [[self.class alloc] initWithCGImage:newImageRef scale:self.scale orientation:self.imageOrientation];
+    CGImageRelease(newImageRef);
+    if (!newImage) newImage = self; // decode failed, return self.
+    newImage.yy_isDecodedForDisplay = YES;
+    return newImage;
+}
+
+- (BOOL)yy_isDecodedForDisplay {
+    if (self.images.count > 1) return YES;
+    NSNumber *num = objc_getAssociatedObject(self, @selector(yy_isDecodedForDisplay));
+    return [num boolValue];
+}
+
+- (void)setYy_isDecodedForDisplay:(BOOL)isDecodedForDisplay {
+    objc_setAssociatedObject(self, @selector(yy_isDecodedForDisplay), @(isDecodedForDisplay), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+}
+
+- (void)yy_saveToAlbumWithCompletionBlock:(void(^)(NSURL *assetURL, NSError *error))completionBlock {
+    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
+        NSData *data = [self _yy_dataRepresentationForSystem:YES];
+        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
+        [library writeImageDataToSavedPhotosAlbum:data metadata:nil completionBlock:^(NSURL *assetURL, NSError *error){
+            if (!completionBlock) return;
+            if (pthread_main_np()) {
+                completionBlock(assetURL, error);
+            } else {
+                dispatch_async(dispatch_get_main_queue(), ^{
+                    completionBlock(assetURL, error);
+                });
+            }
+        }];
+    });
+}
+
+- (NSData *)yy_imageDataRepresentation {
+    return [self _yy_dataRepresentationForSystem:NO];
+}
+
+/// @param forSystem YES: used for system album (PNG/JPEG/GIF), NO: used for YYImage (PNG/JPEG/GIF/WebP)
+- (NSData *)_yy_dataRepresentationForSystem:(BOOL)forSystem {
+    NSData *data = nil;
+    if ([self isKindOfClass:[YYImage class]]) {
+        YYImage *image = (id)self;
+        if (image.animatedImageData) {
+            if (forSystem) { // system only support GIF and PNG
+                if (image.animatedImageType == YYImageTypeGIF ||
+                    image.animatedImageType == YYImageTypePNG) {
+                    data = image.animatedImageData;
+                }
+            } else {
+                data = image.animatedImageData;
+            }
+        }
+    }
+    if (!data) {
+        CGImageRef imageRef = self.CGImage ? (CGImageRef)CFRetain(self.CGImage) : nil;
+        if (imageRef) {
+            CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
+            CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
+            BOOL hasAlpha = NO;
+            if (alphaInfo == kCGImageAlphaPremultipliedLast ||
+                alphaInfo == kCGImageAlphaPremultipliedFirst ||
+                alphaInfo == kCGImageAlphaLast ||
+                alphaInfo == kCGImageAlphaFirst) {
+                hasAlpha = YES;
+            }
+            if (self.imageOrientation != UIImageOrientationUp) {
+                CGImageRef rotated = YYCGImageCreateCopyWithOrientation(imageRef, self.imageOrientation, bitmapInfo | alphaInfo);
+                if (rotated) {
+                    CFRelease(imageRef);
+                    imageRef = rotated;
+                }
+            }
+            @autoreleasepool {
+                UIImage *newImage = [UIImage imageWithCGImage:imageRef];
+                if (newImage) {
+                    if (hasAlpha) {
+                        data = UIImagePNGRepresentation([UIImage imageWithCGImage:imageRef]);
+                    } else {
+                        data = UIImageJPEGRepresentation([UIImage imageWithCGImage:imageRef], 0.9); // same as Apple's example
+                    }
+                }
+            }
+            CFRelease(imageRef);
+        }
+    }
+    if (!data) {
+        data = UIImagePNGRepresentation(self);
+    }
+    return data;
+}
+
+@end

+ 98 - 0
YYImage/YYSpriteSheetImage.h

@@ -0,0 +1,98 @@
+//
+//  YYSpriteImage.h
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 15/4/21.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import <UIKit/UIKit.h>
+
+#if __has_include(<YYImage/YYImage.h>)
+#import <YYImage/YYAnimatedImageView.h>
+#else
+#import "YYAnimatedImageView.h"
+#endif
+
+/**
+ An image to display sprite sheet animation.
+ 
+ @discussion It is a fully compatible `UIImage` subclass.
+ The animation can be played by YYAnimatedImageView.
+ 
+ Sample Code:
+  
+    // 8 * 12 sprites in a single sheet image
+    UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"];
+    NSMutableArray *contentRects = [NSMutableArray new];
+    NSMutableArray *durations = [NSMutableArray new];
+    for (int j = 0; j < 12; j++) {
+        for (int i = 0; i < 8; i++) {
+            CGRect rect;
+            rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
+            rect.origin.x = img.size.width / 8 * i;
+            rect.origin.y = img.size.height / 12 * j;
+            [contentRects addObject:[NSValue valueWithCGRect:rect]];
+            [durations addObject:@(1 / 60.0)];
+        }
+    }
+    YYSpriteSheetImage *sprite;
+    sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img
+                                                     contentRects:contentRects
+                                                   frameDurations:durations
+                                                        loopCount:0];
+    YYAnimatedImageView *imgView = [YYAnimatedImageView new];
+    imgView.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
+    imgView.image = sprite;
+ 
+ 
+ 
+ @discussion It can also be used to display single frame in sprite sheet image.
+ Sample Code:
+ 
+    YYSpriteSheetImage *sheet = ...;
+    UIImageView *imageView = ...;
+    imageView.image = sheet;
+    imageView.layer.contentsRect = [sheet contentsRectForCALayerAtIndex:6];
+ 
+ */
+@interface YYSpriteSheetImage : UIImage <YYAnimatedImage>
+
+/**
+ Creates and returns an image object.
+ 
+ @param image          The sprite sheet image (contains all frames).
+ 
+ @param contentRects   The sprite sheet image frame rects in the image coordinates.
+     The rectangle should not outside the image's bounds. The objects in this array
+     should be created with [NSValue valueWithCGRect:].
+ 
+ @param frameDurations The sprite sheet image frame's durations in seconds. 
+     The objects in this array should be NSNumber.
+ 
+ @param loopCount      Animation loop count, 0 means infinite looping.
+ 
+ @return An image object, or nil if an error occurs.
+ */
+- (instancetype)initWithSpriteSheetImage:(UIImage *)image
+                            contentRects:(NSArray *)contentRects
+                          frameDurations:(NSArray *)frameDurations
+                               loopCount:(NSUInteger)loopCount;
+
+@property (nonatomic, readonly) NSArray *contentRects;
+@property (nonatomic, readonly) NSArray *frameDurations;
+@property (nonatomic, readonly) NSUInteger loopCount;
+
+/**
+ Get the contents rect for CALayer.
+ See "contentsRect" property in CALayer for more information.
+ 
+ @param index Index of frame.
+ @return Contents Rect.
+ */
+- (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index;
+
+@end

+ 80 - 0
YYImage/YYSpriteSheetImage.m

@@ -0,0 +1,80 @@
+//
+//  YYSpriteImage.m
+//  YYImage <https://github.com/ibireme/YYImage>
+//
+//  Created by ibireme on 15/4/21.
+//  Copyright (c) 2015 ibireme.
+//
+//  This source code is licensed under the MIT-style license found in the
+//  LICENSE file in the root directory of this source tree.
+//
+
+#import "YYSpriteSheetImage.h"
+
+@implementation YYSpriteSheetImage
+
+- (instancetype)initWithSpriteSheetImage:(UIImage *)image
+                            contentRects:(NSArray *)contentRects
+                          frameDurations:(NSArray *)frameDurations
+                               loopCount:(NSUInteger)loopCount {
+    if (!image.CGImage) return nil;
+    if (contentRects.count < 1 || frameDurations.count < 1) return nil;
+    if (contentRects.count != frameDurations.count) return nil;
+    
+    self = [super initWithCGImage:image.CGImage scale:image.scale orientation:image.imageOrientation];
+    if (!self) return nil;
+    
+    _contentRects = contentRects.copy;
+    _frameDurations = frameDurations.copy;
+    _loopCount = loopCount;
+    return self;
+}
+
+- (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index {
+    CGRect layerRect = CGRectMake(0, 0, 1, 1);
+    if (index >= _contentRects.count) return layerRect;
+    
+    CGSize imageSize = self.size;
+    CGRect rect = [self animatedImageContentsRectAtIndex:index];
+    if (imageSize.width > 0.01 && imageSize.height > 0.01) {
+        layerRect.origin.x = rect.origin.x / imageSize.width;
+        layerRect.origin.y = rect.origin.y / imageSize.height;
+        layerRect.size.width = rect.size.width / imageSize.width;
+        layerRect.size.height = rect.size.height / imageSize.height;
+        layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1));
+        if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) {
+            layerRect = CGRectMake(0, 0, 1, 1);
+        }
+    }
+    return layerRect;
+}
+
+#pragma mark @protocol YYAnimatedImage
+
+- (NSUInteger)animatedImageFrameCount {
+    return _contentRects.count;
+}
+
+- (NSUInteger)animatedImageLoopCount {
+    return _loopCount;
+}
+
+- (NSUInteger)animatedImageBytesPerFrame {
+    return 0;
+}
+
+- (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
+    return self;
+}
+
+- (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
+    if (index >= _frameDurations.count) return 0;
+    return ((NSNumber *)_frameDurations[index]).doubleValue;
+}
+
+- (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index {
+    if (index >= _contentRects.count) return CGRectZero;
+    return ((NSValue *)_contentRects[index]).CGRectValue;
+}
+
+@end

Some files were not shown because too many files changed in this diff