YYImageCoder.m 111 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864
  1. //
  2. // YYImageCoder.m
  3. // YYKit <https://github.com/ibireme/YYKit>
  4. //
  5. // Created by ibireme on 15/5/13.
  6. // Copyright (c) 2015 ibireme.
  7. //
  8. // This source code is licensed under the MIT-style license found in the
  9. // LICENSE file in the root directory of this source tree.
  10. //
  11. #import "YYImageCoder.h"
  12. #import <CoreFoundation/CoreFoundation.h>
  13. #import <ImageIO/ImageIO.h>
  14. #import <Accelerate/Accelerate.h>
  15. #import <QuartzCore/QuartzCore.h>
  16. #import <MobileCoreServices/MobileCoreServices.h>
  17. #import <AssetsLibrary/AssetsLibrary.h>
  18. #import <objc/runtime.h>
  19. #import <pthread.h>
  20. #import <zlib.h>
  21. #import "YYImage.h"
  22. #import "YYKitMacro.h"
  23. #ifndef YYIMAGE_WEBP_ENABLED
  24. #if __has_include(<webp/decode.h>) && __has_include(<webp/encode.h>) && \
  25. __has_include(<webp/demux.h>) && __has_include(<webp/mux.h>)
  26. #define YYIMAGE_WEBP_ENABLED 1
  27. #import <webp/decode.h>
  28. #import <webp/encode.h>
  29. #import <webp/demux.h>
  30. #import <webp/mux.h>
  31. #elif __has_include("webp/decode.h") && __has_include("webp/encode.h") && \
  32. __has_include("webp/demux.h") && __has_include("webp/mux.h")
  33. #define YYIMAGE_WEBP_ENABLED 1
  34. #import "webp/decode.h"
  35. #import "webp/encode.h"
  36. #import "webp/demux.h"
  37. #import "webp/mux.h"
  38. #else
  39. #define YYIMAGE_WEBP_ENABLED 0
  40. #endif
  41. #endif
  42. ////////////////////////////////////////////////////////////////////////////////
  43. #pragma mark - Utility (for little endian platform)
  44. #define YY_FOUR_CC(c1,c2,c3,c4) ((uint32_t)(((c4) << 24) | ((c3) << 16) | ((c2) << 8) | (c1)))
  45. #define YY_TWO_CC(c1,c2) ((uint16_t)(((c2) << 8) | (c1)))
  46. static inline uint16_t yy_swap_endian_uint16(uint16_t value) {
  47. return
  48. (uint16_t) ((value & 0x00FF) << 8) |
  49. (uint16_t) ((value & 0xFF00) >> 8) ;
  50. }
  51. static inline uint32_t yy_swap_endian_uint32(uint32_t value) {
  52. return
  53. (uint32_t)((value & 0x000000FFU) << 24) |
  54. (uint32_t)((value & 0x0000FF00U) << 8) |
  55. (uint32_t)((value & 0x00FF0000U) >> 8) |
  56. (uint32_t)((value & 0xFF000000U) >> 24) ;
  57. }
  58. ////////////////////////////////////////////////////////////////////////////////
  59. #pragma mark - APNG
  60. /*
  61. PNG spec: http://www.libpng.org/pub/png/spec/1.2/PNG-Structure.html
  62. APNG spec: https://wiki.mozilla.org/APNG_Specification
  63. ===============================================================================
  64. PNG format:
  65. header (8): 89 50 4e 47 0d 0a 1a 0a
  66. chunk, chunk, chunk, ...
  67. ===============================================================================
  68. chunk format:
  69. length (4): uint32_t big endian
  70. fourcc (4): chunk type code
  71. data (length): data
  72. crc32 (4): uint32_t big endian crc32(fourcc + data)
  73. ===============================================================================
  74. PNG chunk define:
  75. IHDR (Image Header) required, must appear first, 13 bytes
  76. width (4) pixel count, should not be zero
  77. height (4) pixel count, should not be zero
  78. bit depth (1) expected: 1, 2, 4, 8, 16
  79. color type (1) 1<<0 (palette used), 1<<1 (color used), 1<<2 (alpha channel used)
  80. compression method (1) 0 (deflate/inflate)
  81. filter method (1) 0 (adaptive filtering with five basic filter types)
  82. interlace method (1) 0 (no interlace) or 1 (Adam7 interlace)
  83. IDAT (Image Data) required, must appear consecutively if there's multiple 'IDAT' chunk
  84. IEND (End) required, must appear last, 0 bytes
  85. ===============================================================================
  86. APNG chunk define:
  87. acTL (Animation Control) required, must appear before 'IDAT', 8 bytes
  88. num frames (4) number of frames
  89. num plays (4) number of times to loop, 0 indicates infinite looping
  90. fcTL (Frame Control) required, must appear before the 'IDAT' or 'fdAT' chunks of the frame to which it applies, 26 bytes
  91. sequence number (4) sequence number of the animation chunk, starting from 0
  92. width (4) width of the following frame
  93. height (4) height of the following frame
  94. x offset (4) x position at which to render the following frame
  95. y offset (4) y position at which to render the following frame
  96. delay num (2) frame delay fraction numerator
  97. delay den (2) frame delay fraction denominator
  98. dispose op (1) type of frame area disposal to be done after rendering this frame (0:none, 1:background 2:previous)
  99. blend op (1) type of frame area rendering for this frame (0:source, 1:over)
  100. fdAT (Frame Data) required
  101. sequence number (4) sequence number of the animation chunk
  102. frame data (x) frame data for this frame (same as 'IDAT')
  103. ===============================================================================
  104. `dispose_op` specifies how the output buffer should be changed at the end of the delay
  105. (before rendering the next frame).
  106. * NONE: no disposal is done on this frame before rendering the next; the contents
  107. of the output buffer are left as is.
  108. * BACKGROUND: the frame's region of the output buffer is to be cleared to fully
  109. transparent black before rendering the next frame.
  110. * PREVIOUS: the frame's region of the output buffer is to be reverted to the previous
  111. contents before rendering the next frame.
  112. `blend_op` specifies whether the frame is to be alpha blended into the current output buffer
  113. content, or whether it should completely replace its region in the output buffer.
  114. * SOURCE: all color components of the frame, including alpha, overwrite the current contents
  115. of the frame's output buffer region.
  116. * OVER: the frame should be composited onto the output buffer based on its alpha,
  117. using a simple OVER operation as described in the "Alpha Channel Processing" section
  118. of the PNG specification
  119. */
  120. typedef enum {
  121. YY_PNG_ALPHA_TYPE_PALEETE = 1 << 0,
  122. YY_PNG_ALPHA_TYPE_COLOR = 1 << 1,
  123. YY_PNG_ALPHA_TYPE_ALPHA = 1 << 2,
  124. } yy_png_alpha_type;
  125. typedef enum {
  126. YY_PNG_DISPOSE_OP_NONE = 0,
  127. YY_PNG_DISPOSE_OP_BACKGROUND = 1,
  128. YY_PNG_DISPOSE_OP_PREVIOUS = 2,
  129. } yy_png_dispose_op;
  130. typedef enum {
  131. YY_PNG_BLEND_OP_SOURCE = 0,
  132. YY_PNG_BLEND_OP_OVER = 1,
  133. } yy_png_blend_op;
  134. typedef struct {
  135. uint32_t width; ///< pixel count, should not be zero
  136. uint32_t height; ///< pixel count, should not be zero
  137. uint8_t bit_depth; ///< expected: 1, 2, 4, 8, 16
  138. uint8_t color_type; ///< see yy_png_alpha_type
  139. uint8_t compression_method; ///< 0 (deflate/inflate)
  140. uint8_t filter_method; ///< 0 (adaptive filtering with five basic filter types)
  141. uint8_t interlace_method; ///< 0 (no interlace) or 1 (Adam7 interlace)
  142. } yy_png_chunk_IHDR;
  143. typedef struct {
  144. uint32_t sequence_number; ///< sequence number of the animation chunk, starting from 0
  145. uint32_t width; ///< width of the following frame
  146. uint32_t height; ///< height of the following frame
  147. uint32_t x_offset; ///< x position at which to render the following frame
  148. uint32_t y_offset; ///< y position at which to render the following frame
  149. uint16_t delay_num; ///< frame delay fraction numerator
  150. uint16_t delay_den; ///< frame delay fraction denominator
  151. uint8_t dispose_op; ///< see yy_png_dispose_op
  152. uint8_t blend_op; ///< see yy_png_blend_op
  153. } yy_png_chunk_fcTL;
  154. typedef struct {
  155. uint32_t offset; ///< chunk offset in PNG data
  156. uint32_t fourcc; ///< chunk fourcc
  157. uint32_t length; ///< chunk data length
  158. uint32_t crc32; ///< chunk crc32
  159. } yy_png_chunk_info;
  160. typedef struct {
  161. uint32_t chunk_index; ///< the first `fdAT`/`IDAT` chunk index
  162. uint32_t chunk_num; ///< the `fdAT`/`IDAT` chunk count
  163. uint32_t chunk_size; ///< the `fdAT`/`IDAT` chunk bytes
  164. yy_png_chunk_fcTL frame_control;
  165. } yy_png_frame_info;
  166. typedef struct {
  167. yy_png_chunk_IHDR header; ///< png header
  168. yy_png_chunk_info *chunks; ///< chunks
  169. uint32_t chunk_num; ///< count of chunks
  170. yy_png_frame_info *apng_frames; ///< frame info, NULL if not apng
  171. uint32_t apng_frame_num; ///< 0 if not apng
  172. uint32_t apng_loop_num; ///< 0 indicates infinite looping
  173. uint32_t *apng_shared_chunk_indexs; ///< shared chunk index
  174. uint32_t apng_shared_chunk_num; ///< shared chunk count
  175. uint32_t apng_shared_chunk_size; ///< shared chunk bytes
  176. uint32_t apng_shared_insert_index; ///< shared chunk insert index
  177. bool apng_first_frame_is_cover; ///< the first frame is same as png (cover)
  178. } yy_png_info;
  179. static void yy_png_chunk_IHDR_read(yy_png_chunk_IHDR *IHDR, const uint8_t *data) {
  180. IHDR->width = yy_swap_endian_uint32(*((uint32_t *)(data)));
  181. IHDR->height = yy_swap_endian_uint32(*((uint32_t *)(data + 4)));
  182. IHDR->bit_depth = data[8];
  183. IHDR->color_type = data[9];
  184. IHDR->compression_method = data[10];
  185. IHDR->filter_method = data[11];
  186. IHDR->interlace_method = data[12];
  187. }
  188. static void yy_png_chunk_IHDR_write(yy_png_chunk_IHDR *IHDR, uint8_t *data) {
  189. *((uint32_t *)(data)) = yy_swap_endian_uint32(IHDR->width);
  190. *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(IHDR->height);
  191. data[8] = IHDR->bit_depth;
  192. data[9] = IHDR->color_type;
  193. data[10] = IHDR->compression_method;
  194. data[11] = IHDR->filter_method;
  195. data[12] = IHDR->interlace_method;
  196. }
  197. static void yy_png_chunk_fcTL_read(yy_png_chunk_fcTL *fcTL, const uint8_t *data) {
  198. fcTL->sequence_number = yy_swap_endian_uint32(*((uint32_t *)(data)));
  199. fcTL->width = yy_swap_endian_uint32(*((uint32_t *)(data + 4)));
  200. fcTL->height = yy_swap_endian_uint32(*((uint32_t *)(data + 8)));
  201. fcTL->x_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 12)));
  202. fcTL->y_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 16)));
  203. fcTL->delay_num = yy_swap_endian_uint16(*((uint16_t *)(data + 20)));
  204. fcTL->delay_den = yy_swap_endian_uint16(*((uint16_t *)(data + 22)));
  205. fcTL->dispose_op = data[24];
  206. fcTL->blend_op = data[25];
  207. }
  208. static void yy_png_chunk_fcTL_write(yy_png_chunk_fcTL *fcTL, uint8_t *data) {
  209. *((uint32_t *)(data)) = yy_swap_endian_uint32(fcTL->sequence_number);
  210. *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(fcTL->width);
  211. *((uint32_t *)(data + 8)) = yy_swap_endian_uint32(fcTL->height);
  212. *((uint32_t *)(data + 12)) = yy_swap_endian_uint32(fcTL->x_offset);
  213. *((uint32_t *)(data + 16)) = yy_swap_endian_uint32(fcTL->y_offset);
  214. *((uint16_t *)(data + 20)) = yy_swap_endian_uint16(fcTL->delay_num);
  215. *((uint16_t *)(data + 22)) = yy_swap_endian_uint16(fcTL->delay_den);
  216. data[24] = fcTL->dispose_op;
  217. data[25] = fcTL->blend_op;
  218. }
  219. // convert double value to fraction
  220. static void yy_png_delay_to_fraction(double duration, uint16_t *num, uint16_t *den) {
  221. if (duration >= 0xFF) {
  222. *num = 0xFF;
  223. *den = 1;
  224. } else if (duration <= 1.0 / (double)0xFF) {
  225. *num = 1;
  226. *den = 0xFF;
  227. } else {
  228. // Use continued fraction to calculate the num and den.
  229. long MAX = 10;
  230. double eps = (0.5 / (double)0xFF);
  231. long p[MAX], q[MAX], a[MAX], i, numl = 0, denl = 0;
  232. // The first two convergents are 0/1 and 1/0
  233. p[0] = 0; q[0] = 1;
  234. p[1] = 1; q[1] = 0;
  235. // The rest of the convergents (and continued fraction)
  236. for (i = 2; i < MAX; i++) {
  237. a[i] = lrint(floor(duration));
  238. p[i] = a[i] * p[i - 1] + p[i - 2];
  239. q[i] = a[i] * q[i - 1] + q[i - 2];
  240. if (p[i] <= 0xFF && q[i] <= 0xFF) { // uint16_t
  241. numl = p[i];
  242. denl = q[i];
  243. } else break;
  244. if (fabs(duration - a[i]) < eps) break;
  245. duration = 1.0 / (duration - a[i]);
  246. }
  247. if (numl != 0 && denl != 0) {
  248. *num = numl;
  249. *den = denl;
  250. } else {
  251. *num = 1;
  252. *den = 100;
  253. }
  254. }
  255. }
  256. // convert fraction to double value
  257. static double yy_png_delay_to_seconds(uint16_t num, uint16_t den) {
  258. if (den == 0) {
  259. return num / 100.0;
  260. } else {
  261. return (double)num / (double)den;
  262. }
  263. }
  264. static bool yy_png_validate_animation_chunk_order(yy_png_chunk_info *chunks, /* input */
  265. uint32_t chunk_num, /* input */
  266. uint32_t *first_idat_index, /* output */
  267. bool *first_frame_is_cover /* output */) {
  268. /*
  269. PNG at least contains 3 chunks: IHDR, IDAT, IEND.
  270. `IHDR` must appear first.
  271. `IDAT` must appear consecutively.
  272. `IEND` must appear end.
  273. APNG must contains one `acTL` and at least one 'fcTL' and `fdAT`.
  274. `fdAT` must appear consecutively.
  275. `fcTL` must appear before `IDAT` or `fdAT`.
  276. */
  277. if (chunk_num <= 2) return false;
  278. if (chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R')) return false;
  279. if ((chunks + chunk_num - 1)->fourcc != YY_FOUR_CC('I', 'E', 'N', 'D')) return false;
  280. uint32_t prev_fourcc = 0;
  281. uint32_t IHDR_num = 0;
  282. uint32_t IDAT_num = 0;
  283. uint32_t acTL_num = 0;
  284. uint32_t fcTL_num = 0;
  285. uint32_t first_IDAT = 0;
  286. bool first_frame_cover = false;
  287. for (uint32_t i = 0; i < chunk_num; i++) {
  288. yy_png_chunk_info *chunk = chunks + i;
  289. switch (chunk->fourcc) {
  290. case YY_FOUR_CC('I', 'H', 'D', 'R'): { // png header
  291. if (i != 0) return false;
  292. if (IHDR_num > 0) return false;
  293. IHDR_num++;
  294. } break;
  295. case YY_FOUR_CC('I', 'D', 'A', 'T'): { // png data
  296. if (prev_fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
  297. if (IDAT_num == 0)
  298. first_IDAT = i;
  299. else
  300. return false;
  301. }
  302. IDAT_num++;
  303. } break;
  304. case YY_FOUR_CC('a', 'c', 'T', 'L'): { // apng control
  305. if (acTL_num > 0) return false;
  306. acTL_num++;
  307. } break;
  308. case YY_FOUR_CC('f', 'c', 'T', 'L'): { // apng frame control
  309. if (i + 1 == chunk_num) return false;
  310. if ((chunk + 1)->fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') &&
  311. (chunk + 1)->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
  312. return false;
  313. }
  314. if (fcTL_num == 0) {
  315. if ((chunk + 1)->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
  316. first_frame_cover = true;
  317. }
  318. }
  319. fcTL_num++;
  320. } break;
  321. case YY_FOUR_CC('f', 'd', 'A', 'T'): { // apng data
  322. if (prev_fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') && prev_fourcc != YY_FOUR_CC('f', 'c', 'T', 'L')) {
  323. return false;
  324. }
  325. } break;
  326. }
  327. prev_fourcc = chunk->fourcc;
  328. }
  329. if (IHDR_num != 1) return false;
  330. if (IDAT_num == 0) return false;
  331. if (acTL_num != 1) return false;
  332. if (fcTL_num < acTL_num) return false;
  333. *first_idat_index = first_IDAT;
  334. *first_frame_is_cover = first_frame_cover;
  335. return true;
  336. }
  337. static void yy_png_info_release(yy_png_info *info) {
  338. if (info) {
  339. if (info->chunks) free(info->chunks);
  340. if (info->apng_frames) free(info->apng_frames);
  341. if (info->apng_shared_chunk_indexs) free(info->apng_shared_chunk_indexs);
  342. free(info);
  343. }
  344. }
  345. /**
  346. Create a png info from a png file. See struct png_info for more information.
  347. @param data png/apng file data.
  348. @param length the data's length in bytes.
  349. @return A png info object, you may call yy_png_info_release() to release it.
  350. Returns NULL if an error occurs.
  351. */
  352. static yy_png_info *yy_png_info_create(const uint8_t *data, uint32_t length) {
  353. if (length < 32) return NULL;
  354. if (*((uint32_t *)data) != YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47)) return NULL;
  355. if (*((uint32_t *)(data + 4)) != YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A)) return NULL;
  356. uint32_t chunk_realloc_num = 16;
  357. yy_png_chunk_info *chunks = malloc(sizeof(yy_png_chunk_info) * chunk_realloc_num);
  358. if (!chunks) return NULL;
  359. // parse png chunks
  360. uint32_t offset = 8;
  361. uint32_t chunk_num = 0;
  362. uint32_t chunk_capacity = chunk_realloc_num;
  363. uint32_t apng_loop_num = 0;
  364. int32_t apng_sequence_index = -1;
  365. int32_t apng_frame_index = 0;
  366. int32_t apng_frame_number = -1;
  367. bool apng_chunk_error = false;
  368. do {
  369. if (chunk_num >= chunk_capacity) {
  370. yy_png_chunk_info *new_chunks = realloc(chunks, sizeof(yy_png_chunk_info) * (chunk_capacity + chunk_realloc_num));
  371. if (!new_chunks) {
  372. free(chunks);
  373. return NULL;
  374. }
  375. chunks = new_chunks;
  376. chunk_capacity += chunk_realloc_num;
  377. }
  378. yy_png_chunk_info *chunk = chunks + chunk_num;
  379. const uint8_t *chunk_data = data + offset;
  380. chunk->offset = offset;
  381. chunk->length = yy_swap_endian_uint32(*((uint32_t *)chunk_data));
  382. if ((uint64_t)chunk->offset + (uint64_t)chunk->length + 12 > length) {
  383. free(chunks);
  384. return NULL;
  385. }
  386. chunk->fourcc = *((uint32_t *)(chunk_data + 4));
  387. if ((uint64_t)chunk->offset + 4 + chunk->length + 4 > (uint64_t)length) break;
  388. chunk->crc32 = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8 + chunk->length)));
  389. chunk_num++;
  390. offset += 12 + chunk->length;
  391. switch (chunk->fourcc) {
  392. case YY_FOUR_CC('a', 'c', 'T', 'L') : {
  393. if (chunk->length == 8) {
  394. apng_frame_number = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
  395. apng_loop_num = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 12)));
  396. } else {
  397. apng_chunk_error = true;
  398. }
  399. } break;
  400. case YY_FOUR_CC('f', 'c', 'T', 'L') :
  401. case YY_FOUR_CC('f', 'd', 'A', 'T') : {
  402. if (chunk->fourcc == YY_FOUR_CC('f', 'c', 'T', 'L')) {
  403. if (chunk->length != 26) {
  404. apng_chunk_error = true;
  405. } else {
  406. apng_frame_index++;
  407. }
  408. }
  409. if (chunk->length > 4) {
  410. uint32_t sequence = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
  411. if (apng_sequence_index + 1 == sequence) {
  412. apng_sequence_index++;
  413. } else {
  414. apng_chunk_error = true;
  415. }
  416. } else {
  417. apng_chunk_error = true;
  418. }
  419. } break;
  420. case YY_FOUR_CC('I', 'E', 'N', 'D') : {
  421. offset = length; // end, break do-while loop
  422. } break;
  423. }
  424. } while (offset + 12 <= length);
  425. if (chunk_num < 3 ||
  426. chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R') ||
  427. chunks->length != 13) {
  428. free(chunks);
  429. return NULL;
  430. }
  431. // png info
  432. yy_png_info *info = calloc(1, sizeof(yy_png_info));
  433. if (!info) {
  434. free(chunks);
  435. return NULL;
  436. }
  437. info->chunks = chunks;
  438. info->chunk_num = chunk_num;
  439. yy_png_chunk_IHDR_read(&info->header, data + chunks->offset + 8);
  440. // apng info
  441. if (!apng_chunk_error && apng_frame_number == apng_frame_index && apng_frame_number >= 1) {
  442. bool first_frame_is_cover = false;
  443. uint32_t first_IDAT_index = 0;
  444. if (!yy_png_validate_animation_chunk_order(info->chunks, info->chunk_num, &first_IDAT_index, &first_frame_is_cover)) {
  445. return info; // ignore apng chunk
  446. }
  447. info->apng_loop_num = apng_loop_num;
  448. info->apng_frame_num = apng_frame_number;
  449. info->apng_first_frame_is_cover = first_frame_is_cover;
  450. info->apng_shared_insert_index = first_IDAT_index;
  451. info->apng_frames = calloc(apng_frame_number, sizeof(yy_png_frame_info));
  452. if (!info->apng_frames) {
  453. yy_png_info_release(info);
  454. return NULL;
  455. }
  456. info->apng_shared_chunk_indexs = calloc(info->chunk_num, sizeof(uint32_t));
  457. if (!info->apng_shared_chunk_indexs) {
  458. yy_png_info_release(info);
  459. return NULL;
  460. }
  461. int32_t frame_index = -1;
  462. uint32_t *shared_chunk_index = info->apng_shared_chunk_indexs;
  463. for (int32_t i = 0; i < info->chunk_num; i++) {
  464. yy_png_chunk_info *chunk = info->chunks + i;
  465. switch (chunk->fourcc) {
  466. case YY_FOUR_CC('I', 'D', 'A', 'T'): {
  467. if (info->apng_shared_insert_index == 0) {
  468. info->apng_shared_insert_index = i;
  469. }
  470. if (first_frame_is_cover) {
  471. yy_png_frame_info *frame = info->apng_frames + frame_index;
  472. frame->chunk_num++;
  473. frame->chunk_size += chunk->length + 12;
  474. }
  475. } break;
  476. case YY_FOUR_CC('a', 'c', 'T', 'L'): {
  477. } break;
  478. case YY_FOUR_CC('f', 'c', 'T', 'L'): {
  479. frame_index++;
  480. yy_png_frame_info *frame = info->apng_frames + frame_index;
  481. frame->chunk_index = i + 1;
  482. yy_png_chunk_fcTL_read(&frame->frame_control, data + chunk->offset + 8);
  483. } break;
  484. case YY_FOUR_CC('f', 'd', 'A', 'T'): {
  485. yy_png_frame_info *frame = info->apng_frames + frame_index;
  486. frame->chunk_num++;
  487. frame->chunk_size += chunk->length + 12;
  488. } break;
  489. default: {
  490. *shared_chunk_index = i;
  491. shared_chunk_index++;
  492. info->apng_shared_chunk_size += chunk->length + 12;
  493. info->apng_shared_chunk_num++;
  494. } break;
  495. }
  496. }
  497. }
  498. return info;
  499. }
  500. /**
  501. Copy a png frame data from an apng file.
  502. @param data apng file data
  503. @param info png info
  504. @param index frame index (zero-based)
  505. @param size output, the size of the frame data
  506. @return A frame data (single-frame png file), call free() to release the data.
  507. Returns NULL if an error occurs.
  508. */
  509. static uint8_t *yy_png_copy_frame_data_at_index(const uint8_t *data,
  510. const yy_png_info *info,
  511. const uint32_t index,
  512. uint32_t *size) {
  513. if (index >= info->apng_frame_num) return NULL;
  514. yy_png_frame_info *frame_info = info->apng_frames + index;
  515. uint32_t frame_remux_size = 8 /* PNG Header */ + info->apng_shared_chunk_size + frame_info->chunk_size;
  516. if (!(info->apng_first_frame_is_cover && index == 0)) {
  517. frame_remux_size -= frame_info->chunk_num * 4; // remove fdAT sequence number
  518. }
  519. uint8_t *frame_data = malloc(frame_remux_size);
  520. if (!frame_data) return NULL;
  521. *size = frame_remux_size;
  522. uint32_t data_offset = 0;
  523. bool inserted = false;
  524. memcpy(frame_data, data, 8); // PNG File Header
  525. data_offset += 8;
  526. for (uint32_t i = 0; i < info->apng_shared_chunk_num; i++) {
  527. uint32_t shared_chunk_index = info->apng_shared_chunk_indexs[i];
  528. yy_png_chunk_info *shared_chunk_info = info->chunks + shared_chunk_index;
  529. if (shared_chunk_index >= info->apng_shared_insert_index && !inserted) { // replace IDAT with fdAT
  530. inserted = true;
  531. for (uint32_t c = 0; c < frame_info->chunk_num; c++) {
  532. yy_png_chunk_info *insert_chunk_info = info->chunks + frame_info->chunk_index + c;
  533. if (insert_chunk_info->fourcc == YY_FOUR_CC('f', 'd', 'A', 'T')) {
  534. *((uint32_t *)(frame_data + data_offset)) = yy_swap_endian_uint32(insert_chunk_info->length - 4);
  535. *((uint32_t *)(frame_data + data_offset + 4)) = YY_FOUR_CC('I', 'D', 'A', 'T');
  536. memcpy(frame_data + data_offset + 8, data + insert_chunk_info->offset + 12, insert_chunk_info->length - 4);
  537. uint32_t crc = (uint32_t)crc32(0, frame_data + data_offset + 4, insert_chunk_info->length);
  538. *((uint32_t *)(frame_data + data_offset + insert_chunk_info->length + 4)) = yy_swap_endian_uint32(crc);
  539. data_offset += insert_chunk_info->length + 8;
  540. } else { // IDAT
  541. memcpy(frame_data + data_offset, data + insert_chunk_info->offset, insert_chunk_info->length + 12);
  542. data_offset += insert_chunk_info->length + 12;
  543. }
  544. }
  545. }
  546. if (shared_chunk_info->fourcc == YY_FOUR_CC('I', 'H', 'D', 'R')) {
  547. uint8_t tmp[25] = {0};
  548. memcpy(tmp, data + shared_chunk_info->offset, 25);
  549. yy_png_chunk_IHDR IHDR = info->header;
  550. IHDR.width = frame_info->frame_control.width;
  551. IHDR.height = frame_info->frame_control.height;
  552. yy_png_chunk_IHDR_write(&IHDR, tmp + 8);
  553. *((uint32_t *)(tmp + 21)) = yy_swap_endian_uint32((uint32_t)crc32(0, tmp + 4, 17));
  554. memcpy(frame_data + data_offset, tmp, 25);
  555. data_offset += 25;
  556. } else {
  557. memcpy(frame_data + data_offset, data + shared_chunk_info->offset, shared_chunk_info->length + 12);
  558. data_offset += shared_chunk_info->length + 12;
  559. }
  560. }
  561. return frame_data;
  562. }
  563. ////////////////////////////////////////////////////////////////////////////////
  564. #pragma mark - Helper
  565. /// Returns byte-aligned size.
  566. static inline size_t YYImageByteAlign(size_t size, size_t alignment) {
  567. return ((size + (alignment - 1)) / alignment) * alignment;
  568. }
  569. /// Convert degree to radians
  570. static inline CGFloat YYImageDegreesToRadians(CGFloat degrees) {
  571. return degrees * M_PI / 180;
  572. }
  573. CGColorSpaceRef YYCGColorSpaceGetDeviceRGB() {
  574. static CGColorSpaceRef space;
  575. static dispatch_once_t onceToken;
  576. dispatch_once(&onceToken, ^{
  577. space = CGColorSpaceCreateDeviceRGB();
  578. });
  579. return space;
  580. }
  581. CGColorSpaceRef YYCGColorSpaceGetDeviceGray() {
  582. static CGColorSpaceRef space;
  583. static dispatch_once_t onceToken;
  584. dispatch_once(&onceToken, ^{
  585. space = CGColorSpaceCreateDeviceGray();
  586. });
  587. return space;
  588. }
  589. BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space) {
  590. return space && CFEqual(space, YYCGColorSpaceGetDeviceRGB());
  591. }
  592. BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space) {
  593. return space && CFEqual(space, YYCGColorSpaceGetDeviceGray());
  594. }
  595. /**
  596. A callback used in CGDataProviderCreateWithData() to release data.
  597. Example:
  598. void *data = malloc(size);
  599. CGDataProviderRef provider = CGDataProviderCreateWithData(data, data, size, YYCGDataProviderReleaseDataCallback);
  600. */
  601. static void YYCGDataProviderReleaseDataCallback(void *info, const void *data, size_t size) {
  602. if (info) free(info);
  603. }
  604. /**
  605. Decode an image to bitmap buffer with the specified format.
  606. @param srcImage Source image.
  607. @param dest Destination buffer. It should be zero before call this method.
  608. If decode succeed, you should release the dest->data using free().
  609. @param destFormat Destination bitmap format.
  610. @return Whether succeed.
  611. @warning This method support iOS7.0 and later. If call it on iOS6, it just returns NO.
  612. CG_AVAILABLE_STARTING(__MAC_10_9, __IPHONE_7_0)
  613. */
  614. static BOOL YYCGImageDecodeToBitmapBufferWithAnyFormat(CGImageRef srcImage, vImage_Buffer *dest, vImage_CGImageFormat *destFormat) {
  615. if (!srcImage || (((long)vImageConvert_AnyToAny) + 1 == 1) || !destFormat || !dest) return NO;
  616. size_t width = CGImageGetWidth(srcImage);
  617. size_t height = CGImageGetHeight(srcImage);
  618. if (width == 0 || height == 0) return NO;
  619. dest->data = NULL;
  620. vImage_Error error = kvImageNoError;
  621. CFDataRef srcData = NULL;
  622. vImageConverterRef convertor = NULL;
  623. vImage_CGImageFormat srcFormat = {0};
  624. srcFormat.bitsPerComponent = (uint32_t)CGImageGetBitsPerComponent(srcImage);
  625. srcFormat.bitsPerPixel = (uint32_t)CGImageGetBitsPerPixel(srcImage);
  626. srcFormat.colorSpace = CGImageGetColorSpace(srcImage);
  627. srcFormat.bitmapInfo = CGImageGetBitmapInfo(srcImage) | CGImageGetAlphaInfo(srcImage);
  628. convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, destFormat, NULL, kvImageNoFlags, NULL);
  629. if (!convertor) goto fail;
  630. CGDataProviderRef srcProvider = CGImageGetDataProvider(srcImage);
  631. srcData = srcProvider ? CGDataProviderCopyData(srcProvider) : NULL; // decode
  632. size_t srcLength = srcData ? CFDataGetLength(srcData) : 0;
  633. const void *srcBytes = srcData ? CFDataGetBytePtr(srcData) : NULL;
  634. if (srcLength == 0 || !srcBytes) goto fail;
  635. vImage_Buffer src = {0};
  636. src.data = (void *)srcBytes;
  637. src.width = width;
  638. src.height = height;
  639. src.rowBytes = CGImageGetBytesPerRow(srcImage);
  640. error = vImageBuffer_Init(dest, height, width, 32, kvImageNoFlags);
  641. if (error != kvImageNoError) goto fail;
  642. error = vImageConvert_AnyToAny(convertor, &src, dest, NULL, kvImageNoFlags); // convert
  643. if (error != kvImageNoError) goto fail;
  644. CFRelease(convertor);
  645. CFRelease(srcData);
  646. return YES;
  647. fail:
  648. if (convertor) CFRelease(convertor);
  649. if (srcData) CFRelease(srcData);
  650. if (dest->data) free(dest->data);
  651. dest->data = NULL;
  652. return NO;
  653. }
  654. /**
  655. Decode an image to bitmap buffer with the 32bit format (such as ARGB8888).
  656. @param srcImage Source image.
  657. @param dest Destination buffer. It should be zero before call this method.
  658. If decode succeed, you should release the dest->data using free().
  659. @param bitmapInfo Destination bitmap format.
  660. @return Whether succeed.
  661. */
  662. static BOOL YYCGImageDecodeToBitmapBufferWith32BitFormat(CGImageRef srcImage, vImage_Buffer *dest, CGBitmapInfo bitmapInfo) {
  663. if (!srcImage || !dest) return NO;
  664. size_t width = CGImageGetWidth(srcImage);
  665. size_t height = CGImageGetHeight(srcImage);
  666. if (width == 0 || height == 0) return NO;
  667. BOOL hasAlpha = NO;
  668. BOOL alphaFirst = NO;
  669. BOOL alphaPremultiplied = NO;
  670. BOOL byteOrderNormal = NO;
  671. switch (bitmapInfo & kCGBitmapAlphaInfoMask) {
  672. case kCGImageAlphaPremultipliedLast: {
  673. hasAlpha = YES;
  674. alphaPremultiplied = YES;
  675. } break;
  676. case kCGImageAlphaPremultipliedFirst: {
  677. hasAlpha = YES;
  678. alphaPremultiplied = YES;
  679. alphaFirst = YES;
  680. } break;
  681. case kCGImageAlphaLast: {
  682. hasAlpha = YES;
  683. } break;
  684. case kCGImageAlphaFirst: {
  685. hasAlpha = YES;
  686. alphaFirst = YES;
  687. } break;
  688. case kCGImageAlphaNoneSkipLast: {
  689. } break;
  690. case kCGImageAlphaNoneSkipFirst: {
  691. alphaFirst = YES;
  692. } break;
  693. default: {
  694. return NO;
  695. } break;
  696. }
  697. switch (bitmapInfo & kCGBitmapByteOrderMask) {
  698. case kCGBitmapByteOrderDefault: {
  699. byteOrderNormal = YES;
  700. } break;
  701. case kCGBitmapByteOrder32Little: {
  702. } break;
  703. case kCGBitmapByteOrder32Big: {
  704. byteOrderNormal = YES;
  705. } break;
  706. default: {
  707. return NO;
  708. } break;
  709. }
  710. /*
  711. Try convert with vImageConvert_AnyToAny() (avaliable since iOS 7.0).
  712. If fail, try decode with CGContextDrawImage().
  713. CGBitmapContext use a premultiplied alpha format, unpremultiply may lose precision.
  714. */
  715. vImage_CGImageFormat destFormat = {0};
  716. destFormat.bitsPerComponent = 8;
  717. destFormat.bitsPerPixel = 32;
  718. destFormat.colorSpace = YYCGColorSpaceGetDeviceRGB();
  719. destFormat.bitmapInfo = bitmapInfo;
  720. dest->data = NULL;
  721. if (YYCGImageDecodeToBitmapBufferWithAnyFormat(srcImage, dest, &destFormat)) return YES;
  722. CGBitmapInfo contextBitmapInfo = bitmapInfo & kCGBitmapByteOrderMask;
  723. if (!hasAlpha || alphaPremultiplied) {
  724. contextBitmapInfo |= (bitmapInfo & kCGBitmapAlphaInfoMask);
  725. } else {
  726. contextBitmapInfo |= alphaFirst ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaPremultipliedLast;
  727. }
  728. CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), contextBitmapInfo);
  729. if (!context) goto fail;
  730. CGContextDrawImage(context, CGRectMake(0, 0, width, height), srcImage); // decode and convert
  731. size_t bytesPerRow = CGBitmapContextGetBytesPerRow(context);
  732. size_t length = height * bytesPerRow;
  733. void *data = CGBitmapContextGetData(context);
  734. if (length == 0 || !data) goto fail;
  735. dest->data = malloc(length);
  736. dest->width = width;
  737. dest->height = height;
  738. dest->rowBytes = bytesPerRow;
  739. if (!dest->data) goto fail;
  740. if (hasAlpha && !alphaPremultiplied) {
  741. vImage_Buffer tmpSrc = {0};
  742. tmpSrc.data = data;
  743. tmpSrc.width = width;
  744. tmpSrc.height = height;
  745. tmpSrc.rowBytes = bytesPerRow;
  746. vImage_Error error;
  747. if (alphaFirst && byteOrderNormal) {
  748. error = vImageUnpremultiplyData_ARGB8888(&tmpSrc, dest, kvImageNoFlags);
  749. } else {
  750. error = vImageUnpremultiplyData_RGBA8888(&tmpSrc, dest, kvImageNoFlags);
  751. }
  752. if (error != kvImageNoError) goto fail;
  753. } else {
  754. memcpy(dest->data, data, length);
  755. }
  756. CFRelease(context);
  757. return YES;
  758. fail:
  759. if (context) CFRelease(context);
  760. if (dest->data) free(dest->data);
  761. dest->data = NULL;
  762. return NO;
  763. return NO;
  764. }
  765. CGImageRef YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay) {
  766. if (!imageRef) return NULL;
  767. size_t width = CGImageGetWidth(imageRef);
  768. size_t height = CGImageGetHeight(imageRef);
  769. if (width == 0 || height == 0) return NULL;
  770. if (decodeForDisplay) { //decode with redraw (may lose some precision)
  771. CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
  772. BOOL hasAlpha = NO;
  773. if (alphaInfo == kCGImageAlphaPremultipliedLast ||
  774. alphaInfo == kCGImageAlphaPremultipliedFirst ||
  775. alphaInfo == kCGImageAlphaLast ||
  776. alphaInfo == kCGImageAlphaFirst) {
  777. hasAlpha = YES;
  778. }
  779. // BGRA8888 (premultiplied) or BGRX8888
  780. // same as UIGraphicsBeginImageContext() and -[UIView drawRect:]
  781. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
  782. bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
  783. CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), bitmapInfo);
  784. if (!context) return NULL;
  785. CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode
  786. CGImageRef newImage = CGBitmapContextCreateImage(context);
  787. CFRelease(context);
  788. return newImage;
  789. } else {
  790. CGColorSpaceRef space = CGImageGetColorSpace(imageRef);
  791. size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
  792. size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef);
  793. size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
  794. CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
  795. if (bytesPerRow == 0 || width == 0 || height == 0) return NULL;
  796. CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
  797. if (!dataProvider) return NULL;
  798. CFDataRef data = CGDataProviderCopyData(dataProvider); // decode
  799. if (!data) return NULL;
  800. CGDataProviderRef newProvider = CGDataProviderCreateWithCFData(data);
  801. CFRelease(data);
  802. if (!newProvider) return NULL;
  803. CGImageRef newImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, space, bitmapInfo, newProvider, NULL, false, kCGRenderingIntentDefault);
  804. CFRelease(newProvider);
  805. return newImage;
  806. }
  807. }
  808. CGImageRef YYCGImageCreateAffineTransformCopy(CGImageRef imageRef, CGAffineTransform transform, CGSize destSize, CGBitmapInfo destBitmapInfo) {
  809. if (!imageRef) return NULL;
  810. size_t srcWidth = CGImageGetWidth(imageRef);
  811. size_t srcHeight = CGImageGetHeight(imageRef);
  812. size_t destWidth = round(destSize.width);
  813. size_t destHeight = round(destSize.height);
  814. if (srcWidth == 0 || srcHeight == 0 || destWidth == 0 || destHeight == 0) return NULL;
  815. CGDataProviderRef tmpProvider = NULL, destProvider = NULL;
  816. CGImageRef tmpImage = NULL, destImage = NULL;
  817. vImage_Buffer src = {0}, tmp = {0}, dest = {0};
  818. if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &src, kCGImageAlphaFirst | kCGBitmapByteOrderDefault)) return NULL;
  819. size_t destBytesPerRow = YYImageByteAlign(destWidth * 4, 32);
  820. tmp.data = malloc(destHeight * destBytesPerRow);
  821. if (!tmp.data) goto fail;
  822. tmp.width = destWidth;
  823. tmp.height = destHeight;
  824. tmp.rowBytes = destBytesPerRow;
  825. vImage_CGAffineTransform vTransform = *((vImage_CGAffineTransform *)&transform);
  826. uint8_t backColor[4] = {0};
  827. vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &tmp, NULL, &vTransform, backColor, kvImageBackgroundColorFill);
  828. if (error != kvImageNoError) goto fail;
  829. free(src.data);
  830. src.data = NULL;
  831. tmpProvider = CGDataProviderCreateWithData(tmp.data, tmp.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback);
  832. if (!tmpProvider) goto fail;
  833. tmp.data = NULL; // hold by provider
  834. tmpImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), kCGImageAlphaFirst | kCGBitmapByteOrderDefault, tmpProvider, NULL, false, kCGRenderingIntentDefault);
  835. if (!tmpImage) goto fail;
  836. CFRelease(tmpProvider);
  837. tmpProvider = NULL;
  838. if ((destBitmapInfo & kCGBitmapAlphaInfoMask) == kCGImageAlphaFirst &&
  839. (destBitmapInfo & kCGBitmapByteOrderMask) != kCGBitmapByteOrder32Little) {
  840. return tmpImage;
  841. }
  842. if (!YYCGImageDecodeToBitmapBufferWith32BitFormat(tmpImage, &dest, destBitmapInfo)) goto fail;
  843. CFRelease(tmpImage);
  844. tmpImage = NULL;
  845. destProvider = CGDataProviderCreateWithData(dest.data, dest.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback);
  846. if (!destProvider) goto fail;
  847. dest.data = NULL; // hold by provider
  848. destImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), destBitmapInfo, destProvider, NULL, false, kCGRenderingIntentDefault);
  849. if (!destImage) goto fail;
  850. CFRelease(destProvider);
  851. destProvider = NULL;
  852. return destImage;
  853. fail:
  854. if (src.data) free(src.data);
  855. if (tmp.data) free(tmp.data);
  856. if (dest.data) free(dest.data);
  857. if (tmpProvider) CFRelease(tmpProvider);
  858. if (tmpImage) CFRelease(tmpImage);
  859. if (destProvider) CFRelease(destProvider);
  860. return NULL;
  861. }
  862. UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value) {
  863. switch (value) {
  864. case kCGImagePropertyOrientationUp: return UIImageOrientationUp;
  865. case kCGImagePropertyOrientationDown: return UIImageOrientationDown;
  866. case kCGImagePropertyOrientationLeft: return UIImageOrientationLeft;
  867. case kCGImagePropertyOrientationRight: return UIImageOrientationRight;
  868. case kCGImagePropertyOrientationUpMirrored: return UIImageOrientationUpMirrored;
  869. case kCGImagePropertyOrientationDownMirrored: return UIImageOrientationDownMirrored;
  870. case kCGImagePropertyOrientationLeftMirrored: return UIImageOrientationLeftMirrored;
  871. case kCGImagePropertyOrientationRightMirrored: return UIImageOrientationRightMirrored;
  872. default: return UIImageOrientationUp;
  873. }
  874. }
  875. NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation) {
  876. switch (orientation) {
  877. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  878. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  879. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  880. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  881. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  882. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  883. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  884. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  885. default: return kCGImagePropertyOrientationUp;
  886. }
  887. }
  888. CGImageRef YYCGImageCreateCopyWithOrientation(CGImageRef imageRef, UIImageOrientation orientation, CGBitmapInfo destBitmapInfo) {
  889. if (!imageRef) return NULL;
  890. if (orientation == UIImageOrientationUp) return (CGImageRef)CFRetain(imageRef);
  891. size_t width = CGImageGetWidth(imageRef);
  892. size_t height = CGImageGetHeight(imageRef);
  893. CGAffineTransform transform = CGAffineTransformIdentity;
  894. BOOL swapWidthAndHeight = NO;
  895. switch (orientation) {
  896. case UIImageOrientationDown: {
  897. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(180));
  898. transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height);
  899. } break;
  900. case UIImageOrientationLeft: {
  901. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90));
  902. transform = CGAffineTransformTranslate(transform, -(CGFloat)0, -(CGFloat)height);
  903. swapWidthAndHeight = YES;
  904. } break;
  905. case UIImageOrientationRight: {
  906. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90));
  907. transform = CGAffineTransformTranslate(transform, -(CGFloat)width, (CGFloat)0);
  908. swapWidthAndHeight = YES;
  909. } break;
  910. case UIImageOrientationUpMirrored: {
  911. transform = CGAffineTransformTranslate(transform, (CGFloat)width, 0);
  912. transform = CGAffineTransformScale(transform, -1, 1);
  913. } break;
  914. case UIImageOrientationDownMirrored: {
  915. transform = CGAffineTransformTranslate(transform, 0, (CGFloat)height);
  916. transform = CGAffineTransformScale(transform, 1, -1);
  917. } break;
  918. case UIImageOrientationLeftMirrored: {
  919. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90));
  920. transform = CGAffineTransformScale(transform, 1, -1);
  921. transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height);
  922. swapWidthAndHeight = YES;
  923. } break;
  924. case UIImageOrientationRightMirrored: {
  925. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90));
  926. transform = CGAffineTransformScale(transform, 1, -1);
  927. swapWidthAndHeight = YES;
  928. } break;
  929. default: break;
  930. }
  931. if (CGAffineTransformIsIdentity(transform)) return (CGImageRef)CFRetain(imageRef);
  932. CGSize destSize = {width, height};
  933. if (swapWidthAndHeight) {
  934. destSize.width = height;
  935. destSize.height = width;
  936. }
  937. return YYCGImageCreateAffineTransformCopy(imageRef, transform, destSize, destBitmapInfo);
  938. }
  939. YYImageType YYImageDetectType(CFDataRef data) {
  940. if (!data) return YYImageTypeUnknown;
  941. uint64_t length = CFDataGetLength(data);
  942. if (length < 16) return YYImageTypeUnknown;
  943. const char *bytes = (char *)CFDataGetBytePtr(data);
  944. uint32_t magic4 = *((uint32_t *)bytes);
  945. switch (magic4) {
  946. case YY_FOUR_CC(0x4D, 0x4D, 0x00, 0x2A): { // big endian TIFF
  947. return YYImageTypeTIFF;
  948. } break;
  949. case YY_FOUR_CC(0x49, 0x49, 0x2A, 0x00): { // little endian TIFF
  950. return YYImageTypeTIFF;
  951. } break;
  952. case YY_FOUR_CC(0x00, 0x00, 0x01, 0x00): { // ICO
  953. return YYImageTypeICO;
  954. } break;
  955. case YY_FOUR_CC(0x00, 0x00, 0x02, 0x00): { // CUR
  956. return YYImageTypeICO;
  957. } break;
  958. case YY_FOUR_CC('i', 'c', 'n', 's'): { // ICNS
  959. return YYImageTypeICNS;
  960. } break;
  961. case YY_FOUR_CC('G', 'I', 'F', '8'): { // GIF
  962. return YYImageTypeGIF;
  963. } break;
  964. case YY_FOUR_CC(0x89, 'P', 'N', 'G'): { // PNG
  965. uint32_t tmp = *((uint32_t *)(bytes + 4));
  966. if (tmp == YY_FOUR_CC('\r', '\n', 0x1A, '\n')) {
  967. return YYImageTypePNG;
  968. }
  969. } break;
  970. case YY_FOUR_CC('R', 'I', 'F', 'F'): { // WebP
  971. uint32_t tmp = *((uint32_t *)(bytes + 8));
  972. if (tmp == YY_FOUR_CC('W', 'E', 'B', 'P')) {
  973. return YYImageTypeWebP;
  974. }
  975. } break;
  976. /*
  977. case YY_FOUR_CC('B', 'P', 'G', 0xFB): { // BPG
  978. return YYImageTypeBPG;
  979. } break;
  980. */
  981. }
  982. uint16_t magic2 = *((uint16_t *)bytes);
  983. switch (magic2) {
  984. case YY_TWO_CC('B', 'A'):
  985. case YY_TWO_CC('B', 'M'):
  986. case YY_TWO_CC('I', 'C'):
  987. case YY_TWO_CC('P', 'I'):
  988. case YY_TWO_CC('C', 'I'):
  989. case YY_TWO_CC('C', 'P'): { // BMP
  990. return YYImageTypeBMP;
  991. }
  992. case YY_TWO_CC(0xFF, 0x4F): { // JPEG2000
  993. return YYImageTypeJPEG2000;
  994. }
  995. }
  996. // JPG FF D8 FF
  997. if (memcmp(bytes,"\377\330\377",3) == 0) return YYImageTypeJPEG;
  998. // JP2
  999. if (memcmp(bytes + 4, "\152\120\040\040\015", 5) == 0) return YYImageTypeJPEG2000;
  1000. return YYImageTypeUnknown;
  1001. }
  1002. CFStringRef YYImageTypeToUTType(YYImageType type) {
  1003. switch (type) {
  1004. case YYImageTypeJPEG: return kUTTypeJPEG;
  1005. case YYImageTypeJPEG2000: return kUTTypeJPEG2000;
  1006. case YYImageTypeTIFF: return kUTTypeTIFF;
  1007. case YYImageTypeBMP: return kUTTypeBMP;
  1008. case YYImageTypeICO: return kUTTypeICO;
  1009. case YYImageTypeICNS: return kUTTypeAppleICNS;
  1010. case YYImageTypeGIF: return kUTTypeGIF;
  1011. case YYImageTypePNG: return kUTTypePNG;
  1012. default: return NULL;
  1013. }
  1014. }
  1015. YYImageType YYImageTypeFromUTType(CFStringRef uti) {
  1016. static NSDictionary *dic;
  1017. static dispatch_once_t onceToken;
  1018. dispatch_once(&onceToken, ^{
  1019. dic = @{(id)kUTTypeJPEG : @(YYImageTypeJPEG),
  1020. (id)kUTTypeJPEG2000 : @(YYImageTypeJPEG2000),
  1021. (id)kUTTypeTIFF : @(YYImageTypeTIFF),
  1022. (id)kUTTypeBMP : @(YYImageTypeBMP),
  1023. (id)kUTTypeICO : @(YYImageTypeICO),
  1024. (id)kUTTypeAppleICNS : @(YYImageTypeICNS),
  1025. (id)kUTTypeGIF : @(YYImageTypeGIF),
  1026. (id)kUTTypePNG : @(YYImageTypePNG)};
  1027. });
  1028. if (!uti) return YYImageTypeUnknown;
  1029. NSNumber *num = dic[(__bridge __strong id)(uti)];
  1030. return num.unsignedIntegerValue;
  1031. }
  1032. NSString *YYImageTypeGetExtension(YYImageType type) {
  1033. switch (type) {
  1034. case YYImageTypeJPEG: return @"jpg";
  1035. case YYImageTypeJPEG2000: return @"jp2";
  1036. case YYImageTypeTIFF: return @"tiff";
  1037. case YYImageTypeBMP: return @"bmp";
  1038. case YYImageTypeICO: return @"ico";
  1039. case YYImageTypeICNS: return @"icns";
  1040. case YYImageTypeGIF: return @"gif";
  1041. case YYImageTypePNG: return @"png";
  1042. case YYImageTypeWebP: return @"webp";
  1043. default: return nil;
  1044. }
  1045. }
  1046. CFDataRef YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality) {
  1047. if (!imageRef) return nil;
  1048. quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
  1049. if (type == YYImageTypeWebP) {
  1050. #if YYIMAGE_WEBP_ENABLED
  1051. if (quality == 1) {
  1052. return YYCGImageCreateEncodedWebPData(imageRef, YES, quality, 4, YYImagePresetDefault);
  1053. } else {
  1054. return YYCGImageCreateEncodedWebPData(imageRef, NO, quality, 4, YYImagePresetDefault);
  1055. }
  1056. #else
  1057. return NULL;
  1058. #endif
  1059. }
  1060. CFStringRef uti = YYImageTypeToUTType(type);
  1061. if (!uti) return nil;
  1062. CFMutableDataRef data = CFDataCreateMutable(CFAllocatorGetDefault(), 0);
  1063. if (!data) return NULL;
  1064. CGImageDestinationRef dest = CGImageDestinationCreateWithData(data, uti, 1, NULL);
  1065. if (!dest) {
  1066. CFRelease(data);
  1067. return NULL;
  1068. }
  1069. NSDictionary *options = @{(id)kCGImageDestinationLossyCompressionQuality : @(quality) };
  1070. CGImageDestinationAddImage(dest, imageRef, (CFDictionaryRef)options);
  1071. if (!CGImageDestinationFinalize(dest)) {
  1072. CFRelease(data);
  1073. CFRelease(dest);
  1074. return nil;
  1075. }
  1076. CFRelease(dest);
  1077. if (CFDataGetLength(data) == 0) {
  1078. CFRelease(data);
  1079. return NULL;
  1080. }
  1081. return data;
  1082. }
  1083. #if YYIMAGE_WEBP_ENABLED
  1084. BOOL YYImageWebPAvailable() {
  1085. return YES;
  1086. }
  1087. CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) {
  1088. if (!imageRef) return nil;
  1089. size_t width = CGImageGetWidth(imageRef);
  1090. size_t height = CGImageGetHeight(imageRef);
  1091. if (width == 0 || width > WEBP_MAX_DIMENSION) return nil;
  1092. if (height == 0 || height > WEBP_MAX_DIMENSION) return nil;
  1093. vImage_Buffer buffer = {0};
  1094. if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &buffer, kCGImageAlphaLast | kCGBitmapByteOrderDefault)) return nil;
  1095. WebPConfig config = {0};
  1096. WebPPicture picture = {0};
  1097. WebPMemoryWriter writer = {0};
  1098. CFDataRef webpData = NULL;
  1099. BOOL pictureNeedFree = NO;
  1100. quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
  1101. preset = preset > YYImagePresetText ? YYImagePresetDefault : preset;
  1102. compressLevel = compressLevel < 0 ? 0 : compressLevel > 6 ? 6 : compressLevel;
  1103. if (!WebPConfigPreset(&config, (WebPPreset)preset, quality)) goto fail;
  1104. config.quality = round(quality * 100.0);
  1105. config.lossless = lossless;
  1106. config.method = compressLevel;
  1107. switch ((WebPPreset)preset) {
  1108. case WEBP_PRESET_DEFAULT: {
  1109. config.image_hint = WEBP_HINT_DEFAULT;
  1110. } break;
  1111. case WEBP_PRESET_PICTURE: {
  1112. config.image_hint = WEBP_HINT_PICTURE;
  1113. } break;
  1114. case WEBP_PRESET_PHOTO: {
  1115. config.image_hint = WEBP_HINT_PHOTO;
  1116. } break;
  1117. case WEBP_PRESET_DRAWING:
  1118. case WEBP_PRESET_ICON:
  1119. case WEBP_PRESET_TEXT: {
  1120. config.image_hint = WEBP_HINT_GRAPH;
  1121. } break;
  1122. }
  1123. if (!WebPValidateConfig(&config)) goto fail;
  1124. if (!WebPPictureInit(&picture)) goto fail;
  1125. pictureNeedFree = YES;
  1126. picture.width = (int)buffer.width;
  1127. picture.height = (int)buffer.height;
  1128. picture.use_argb = lossless;
  1129. if(!WebPPictureImportRGBA(&picture, buffer.data, (int)buffer.rowBytes)) goto fail;
  1130. WebPMemoryWriterInit(&writer);
  1131. picture.writer = WebPMemoryWrite;
  1132. picture.custom_ptr = &writer;
  1133. if(!WebPEncode(&config, &picture)) goto fail;
  1134. webpData = CFDataCreate(CFAllocatorGetDefault(), writer.mem, writer.size);
  1135. free(writer.mem);
  1136. WebPPictureFree(&picture);
  1137. free(buffer.data);
  1138. return webpData;
  1139. fail:
  1140. if (buffer.data) free(buffer.data);
  1141. if (pictureNeedFree) WebPPictureFree(&picture);
  1142. return nil;
  1143. }
  1144. NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) {
  1145. if (!webpData || CFDataGetLength(webpData) == 0) return 0;
  1146. WebPData data = {CFDataGetBytePtr(webpData), CFDataGetLength(webpData)};
  1147. WebPDemuxer *demuxer = WebPDemux(&data);
  1148. if (!demuxer) return 0;
  1149. NSUInteger webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
  1150. WebPDemuxDelete(demuxer);
  1151. return webpFrameCount;
  1152. }
  1153. CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData,
  1154. BOOL decodeForDisplay,
  1155. BOOL useThreads,
  1156. BOOL bypassFiltering,
  1157. BOOL noFancyUpsampling) {
  1158. /*
  1159. Call WebPDecode() on a multi-frame webp data will get an error (VP8_STATUS_UNSUPPORTED_FEATURE).
  1160. Use WebPDemuxer to unpack it first.
  1161. */
  1162. WebPData data = {0};
  1163. WebPDemuxer *demuxer = NULL;
  1164. int frameCount = 0, canvasWidth = 0, canvasHeight = 0;
  1165. WebPIterator iter = {0};
  1166. BOOL iterInited = NO;
  1167. const uint8_t *payload = NULL;
  1168. size_t payloadSize = 0;
  1169. WebPDecoderConfig config = {0};
  1170. BOOL hasAlpha = NO;
  1171. size_t bitsPerComponent = 0, bitsPerPixel = 0, bytesPerRow = 0, destLength = 0;
  1172. CGBitmapInfo bitmapInfo = 0;
  1173. WEBP_CSP_MODE colorspace = 0;
  1174. void *destBytes = NULL;
  1175. CGDataProviderRef provider = NULL;
  1176. CGImageRef imageRef = NULL;
  1177. if (!webpData || CFDataGetLength(webpData) == 0) return NULL;
  1178. data.bytes = CFDataGetBytePtr(webpData);
  1179. data.size = CFDataGetLength(webpData);
  1180. demuxer = WebPDemux(&data);
  1181. if (!demuxer) goto fail;
  1182. frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
  1183. if (frameCount == 0) {
  1184. goto fail;
  1185. } else if (frameCount == 1) { // single-frame
  1186. payload = data.bytes;
  1187. payloadSize = data.size;
  1188. if (!WebPInitDecoderConfig(&config)) goto fail;
  1189. if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail;
  1190. canvasWidth = config.input.width;
  1191. canvasHeight = config.input.height;
  1192. } else { // multi-frame
  1193. canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  1194. canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  1195. if (canvasWidth < 1 || canvasHeight < 1) goto fail;
  1196. if (!WebPDemuxGetFrame(demuxer, 1, &iter)) goto fail;
  1197. iterInited = YES;
  1198. if (iter.width > canvasWidth || iter.height > canvasHeight) goto fail;
  1199. payload = iter.fragment.bytes;
  1200. payloadSize = iter.fragment.size;
  1201. if (!WebPInitDecoderConfig(&config)) goto fail;
  1202. if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail;
  1203. }
  1204. if (payload == NULL || payloadSize == 0) goto fail;
  1205. hasAlpha = config.input.has_alpha;
  1206. bitsPerComponent = 8;
  1207. bitsPerPixel = 32;
  1208. bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * canvasWidth, 32);
  1209. destLength = bytesPerRow * canvasHeight;
  1210. if (decodeForDisplay) {
  1211. bitmapInfo = kCGBitmapByteOrder32Host;
  1212. bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
  1213. colorspace = MODE_bgrA; // small endian
  1214. } else {
  1215. bitmapInfo = kCGBitmapByteOrderDefault;
  1216. bitmapInfo |= hasAlpha ? kCGImageAlphaLast : kCGImageAlphaNoneSkipLast;
  1217. colorspace = MODE_RGBA;
  1218. }
  1219. destBytes = calloc(1, destLength);
  1220. if (!destBytes) goto fail;
  1221. config.options.use_threads = useThreads; //speed up 23%
  1222. config.options.bypass_filtering = bypassFiltering; //speed up 11%, cause some banding
  1223. config.options.no_fancy_upsampling = noFancyUpsampling; //speed down 16%, lose some details
  1224. config.output.colorspace = colorspace;
  1225. config.output.is_external_memory = 1;
  1226. config.output.u.RGBA.rgba = destBytes;
  1227. config.output.u.RGBA.stride = (int)bytesPerRow;
  1228. config.output.u.RGBA.size = destLength;
  1229. VP8StatusCode result = WebPDecode(payload, payloadSize, &config);
  1230. if ((result != VP8_STATUS_OK) && (result != VP8_STATUS_NOT_ENOUGH_DATA)) goto fail;
  1231. if (iter.x_offset != 0 || iter.y_offset != 0) {
  1232. void *tmp = calloc(1, destLength);
  1233. if (tmp) {
  1234. vImage_Buffer src = {destBytes, canvasHeight, canvasWidth, bytesPerRow};
  1235. vImage_Buffer dest = {tmp, canvasHeight, canvasWidth, bytesPerRow};
  1236. vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset};
  1237. uint8_t backColor[4] = {0};
  1238. vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill);
  1239. memcpy(destBytes, tmp, destLength);
  1240. free(tmp);
  1241. }
  1242. }
  1243. provider = CGDataProviderCreateWithData(destBytes, destBytes, destLength, YYCGDataProviderReleaseDataCallback);
  1244. if (!provider) goto fail;
  1245. destBytes = NULL; // hold by provider
  1246. imageRef = CGImageCreate(canvasWidth, canvasHeight, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault);
  1247. CFRelease(provider);
  1248. if (iterInited) WebPDemuxReleaseIterator(&iter);
  1249. WebPDemuxDelete(demuxer);
  1250. return imageRef;
  1251. fail:
  1252. if (destBytes) free(destBytes);
  1253. if (provider) CFRelease(provider);
  1254. if (iterInited) WebPDemuxReleaseIterator(&iter);
  1255. if (demuxer) WebPDemuxDelete(demuxer);
  1256. return NULL;
  1257. }
  1258. #else
  1259. BOOL YYImageWebPAvailable() {
  1260. return NO;
  1261. }
  1262. CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) {
  1263. NSLog(@"WebP decoder is disabled");
  1264. return NULL;
  1265. }
  1266. NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) {
  1267. NSLog(@"WebP decoder is disabled");
  1268. return 0;
  1269. }
  1270. CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData,
  1271. BOOL decodeForDisplay,
  1272. BOOL useThreads,
  1273. BOOL bypassFiltering,
  1274. BOOL noFancyUpsampling) {
  1275. NSLog(@"WebP decoder is disabled");
  1276. return NULL;
  1277. }
  1278. #endif
  1279. ////////////////////////////////////////////////////////////////////////////////
  1280. #pragma mark - Decoder
  1281. @implementation YYImageFrame
  1282. + (instancetype)frameWithImage:(UIImage *)image {
  1283. YYImageFrame *frame = [self new];
  1284. frame.image = image;
  1285. return frame;
  1286. }
  1287. - (id)copyWithZone:(NSZone *)zone {
  1288. YYImageFrame *frame = [self.class new];
  1289. frame.index = _index;
  1290. frame.width = _width;
  1291. frame.height = _height;
  1292. frame.offsetX = _offsetX;
  1293. frame.offsetY = _offsetY;
  1294. frame.duration = _duration;
  1295. frame.dispose = _dispose;
  1296. frame.blend = _blend;
  1297. frame.image = _image.copy;
  1298. return frame;
  1299. }
  1300. @end
  1301. // Internal frame object.
  1302. @interface _YYImageDecoderFrame : YYImageFrame
  1303. @property (nonatomic, assign) BOOL hasAlpha; ///< Whether frame has alpha.
  1304. @property (nonatomic, assign) BOOL isFullSize; ///< Whether frame fill the canvas.
  1305. @property (nonatomic, assign) NSUInteger blendFromIndex; ///< Blend from frame index to current frame.
  1306. @end
  1307. @implementation _YYImageDecoderFrame
  1308. - (id)copyWithZone:(NSZone *)zone {
  1309. _YYImageDecoderFrame *frame = [super copyWithZone:zone];
  1310. frame.hasAlpha = _hasAlpha;
  1311. frame.isFullSize = _isFullSize;
  1312. frame.blendFromIndex = _blendFromIndex;
  1313. return frame;
  1314. }
  1315. @end
  1316. @implementation YYImageDecoder {
  1317. pthread_mutex_t _lock; // recursive lock
  1318. BOOL _sourceTypeDetected;
  1319. CGImageSourceRef _source;
  1320. yy_png_info *_apngSource;
  1321. #if YYIMAGE_WEBP_ENABLED
  1322. WebPDemuxer *_webpSource;
  1323. #endif
  1324. UIImageOrientation _orientation;
  1325. dispatch_semaphore_t _framesLock;
  1326. NSArray *_frames; ///< Array<GGImageDecoderFrame>, without image
  1327. BOOL _needBlend;
  1328. NSUInteger _blendFrameIndex;
  1329. CGContextRef _blendCanvas;
  1330. }
  1331. - (void)dealloc {
  1332. if (_source) CFRelease(_source);
  1333. if (_apngSource) yy_png_info_release(_apngSource);
  1334. #if YYIMAGE_WEBP_ENABLED
  1335. if (_webpSource) WebPDemuxDelete(_webpSource);
  1336. #endif
  1337. if (_blendCanvas) CFRelease(_blendCanvas);
  1338. pthread_mutex_destroy(&_lock);
  1339. }
  1340. + (instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale {
  1341. if (!data) return nil;
  1342. YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:scale];
  1343. [decoder updateData:data final:YES];
  1344. if (decoder.frameCount == 0) return nil;
  1345. return decoder;
  1346. }
  1347. - (instancetype)init {
  1348. return [self initWithScale:[UIScreen mainScreen].scale];
  1349. }
  1350. - (instancetype)initWithScale:(CGFloat)scale {
  1351. self = [super init];
  1352. if (scale <= 0) scale = 1;
  1353. _scale = scale;
  1354. _framesLock = dispatch_semaphore_create(1);
  1355. pthread_mutex_init_recursive(&_lock, true);
  1356. return self;
  1357. }
  1358. - (BOOL)updateData:(NSData *)data final:(BOOL)final {
  1359. BOOL result = NO;
  1360. pthread_mutex_lock(&_lock);
  1361. result = [self _updateData:data final:final];
  1362. pthread_mutex_unlock(&_lock);
  1363. return result;
  1364. }
  1365. - (YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
  1366. YYImageFrame *result = nil;
  1367. pthread_mutex_lock(&_lock);
  1368. result = [self _frameAtIndex:index decodeForDisplay:decodeForDisplay];
  1369. pthread_mutex_unlock(&_lock);
  1370. return result;
  1371. }
  1372. - (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index {
  1373. NSTimeInterval result = 0;
  1374. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1375. if (index < _frames.count) {
  1376. result = ((_YYImageDecoderFrame *)_frames[index]).duration;
  1377. }
  1378. dispatch_semaphore_signal(_framesLock);
  1379. return result;
  1380. }
  1381. - (NSDictionary *)framePropertiesAtIndex:(NSUInteger)index {
  1382. NSDictionary *result = nil;
  1383. pthread_mutex_lock(&_lock);
  1384. result = [self _framePropertiesAtIndex:index];
  1385. pthread_mutex_unlock(&_lock);
  1386. return result;
  1387. }
  1388. - (NSDictionary *)imageProperties {
  1389. NSDictionary *result = nil;
  1390. pthread_mutex_lock(&_lock);
  1391. result = [self _imageProperties];
  1392. pthread_mutex_unlock(&_lock);
  1393. return result;
  1394. }
  1395. #pragma private (wrap)
  1396. - (BOOL)_updateData:(NSData *)data final:(BOOL)final {
  1397. if (_finalized) return NO;
  1398. if (data.length < _data.length) return NO;
  1399. _finalized = final;
  1400. _data = data;
  1401. YYImageType type = YYImageDetectType((__bridge CFDataRef)data);
  1402. if (_sourceTypeDetected) {
  1403. if (_type != type) {
  1404. return NO;
  1405. } else {
  1406. [self _updateSource];
  1407. }
  1408. } else {
  1409. if (_data.length > 16) {
  1410. _type = type;
  1411. _sourceTypeDetected = YES;
  1412. [self _updateSource];
  1413. }
  1414. }
  1415. return YES;
  1416. }
  1417. - (YYImageFrame *)_frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
  1418. if (index >= _frames.count) return 0;
  1419. _YYImageDecoderFrame *frame = [(_YYImageDecoderFrame *)_frames[index] copy];
  1420. BOOL decoded = NO;
  1421. BOOL extendToCanvas = NO;
  1422. if (_type != YYImageTypeICO && decodeForDisplay) { // ICO contains multi-size frame and should not extend to canvas.
  1423. extendToCanvas = YES;
  1424. }
  1425. if (!_needBlend) {
  1426. CGImageRef imageRef = [self _newUnblendedImageAtIndex:index extendToCanvas:extendToCanvas decoded:&decoded];
  1427. if (!imageRef) return nil;
  1428. if (decodeForDisplay && !decoded) {
  1429. CGImageRef imageRefDecoded = YYCGImageCreateDecodedCopy(imageRef, YES);
  1430. if (imageRefDecoded) {
  1431. CFRelease(imageRef);
  1432. imageRef = imageRefDecoded;
  1433. decoded = YES;
  1434. }
  1435. }
  1436. UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
  1437. CFRelease(imageRef);
  1438. if (!image) return nil;
  1439. image.isDecodedForDisplay = decoded;
  1440. frame.image = image;
  1441. return frame;
  1442. }
  1443. // blend
  1444. if (![self _createBlendContextIfNeeded]) return nil;
  1445. CGImageRef imageRef = NULL;
  1446. if (_blendFrameIndex + 1 == frame.index) {
  1447. imageRef = [self _newBlendedImageWithFrame:frame];
  1448. _blendFrameIndex = index;
  1449. } else { // should draw canvas from previous frame
  1450. _blendFrameIndex = NSNotFound;
  1451. CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
  1452. if (frame.blendFromIndex == frame.index) {
  1453. CGImageRef unblendedImage = [self _newUnblendedImageAtIndex:index extendToCanvas:NO decoded:NULL];
  1454. if (unblendedImage) {
  1455. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendedImage);
  1456. CFRelease(unblendedImage);
  1457. }
  1458. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  1459. if (frame.dispose == YYImageDisposeBackground) {
  1460. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1461. }
  1462. _blendFrameIndex = index;
  1463. } else { // canvas is not ready
  1464. for (uint32_t i = (uint32_t)frame.blendFromIndex; i <= (uint32_t)frame.index; i++) {
  1465. if (i == frame.index) {
  1466. if (!imageRef) imageRef = [self _newBlendedImageWithFrame:frame];
  1467. } else {
  1468. [self _blendImageWithFrame:_frames[i]];
  1469. }
  1470. }
  1471. _blendFrameIndex = index;
  1472. }
  1473. }
  1474. if (!imageRef) return nil;
  1475. UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
  1476. CFRelease(imageRef);
  1477. if (!image) return nil;
  1478. image.isDecodedForDisplay = YES;
  1479. frame.image = image;
  1480. if (extendToCanvas) {
  1481. frame.width = _width;
  1482. frame.height = _height;
  1483. frame.offsetX = 0;
  1484. frame.offsetY = 0;
  1485. frame.dispose = YYImageDisposeNone;
  1486. frame.blend = YYImageBlendNone;
  1487. }
  1488. return frame;
  1489. }
  1490. - (NSDictionary *)_framePropertiesAtIndex:(NSUInteger)index {
  1491. if (index >= _frames.count) return nil;
  1492. if (!_source) return nil;
  1493. CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, index, NULL);
  1494. if (!properties) return nil;
  1495. return CFBridgingRelease(properties);
  1496. }
  1497. - (NSDictionary *)_imageProperties {
  1498. if (!_source) return nil;
  1499. CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
  1500. if (!properties) return nil;
  1501. return CFBridgingRelease(properties);
  1502. }
  1503. #pragma private
  1504. - (void)_updateSource {
  1505. switch (_type) {
  1506. case YYImageTypeWebP: {
  1507. [self _updateSourceWebP];
  1508. } break;
  1509. case YYImageTypePNG: {
  1510. [self _updateSourceAPNG];
  1511. } break;
  1512. default: {
  1513. [self _updateSourceImageIO];
  1514. } break;
  1515. }
  1516. }
  1517. - (void)_updateSourceWebP {
  1518. #if YYIMAGE_WEBP_ENABLED
  1519. _width = 0;
  1520. _height = 0;
  1521. _loopCount = 0;
  1522. if (_webpSource) WebPDemuxDelete(_webpSource);
  1523. _webpSource = NULL;
  1524. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1525. _frames = nil;
  1526. dispatch_semaphore_signal(_framesLock);
  1527. /*
  1528. https://developers.google.com/speed/webp/docs/api
  1529. The documentation said we can use WebPIDecoder to decode webp progressively,
  1530. but currently it can only returns an empty image (not same as progressive jpegs),
  1531. so we don't use progressive decoding.
  1532. When using WebPDecode() to decode multi-frame webp, we will get the error
  1533. "VP8_STATUS_UNSUPPORTED_FEATURE", so we first use WebPDemuxer to unpack it.
  1534. */
  1535. WebPData webPData = {0};
  1536. webPData.bytes = _data.bytes;
  1537. webPData.size = _data.length;
  1538. WebPDemuxer *demuxer = WebPDemux(&webPData);
  1539. if (!demuxer) return;
  1540. uint32_t webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
  1541. uint32_t webpLoopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
  1542. uint32_t canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  1543. uint32_t canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  1544. if (webpFrameCount == 0 || canvasWidth < 1 || canvasHeight < 1) {
  1545. WebPDemuxDelete(demuxer);
  1546. return;
  1547. }
  1548. NSMutableArray *frames = [NSMutableArray new];
  1549. BOOL needBlend = NO;
  1550. uint32_t iterIndex = 0;
  1551. uint32_t lastBlendIndex = 0;
  1552. WebPIterator iter = {0};
  1553. if (WebPDemuxGetFrame(demuxer, 1, &iter)) { // one-based index...
  1554. do {
  1555. _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
  1556. [frames addObject:frame];
  1557. if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
  1558. frame.dispose = YYImageDisposeBackground;
  1559. }
  1560. if (iter.blend_method == WEBP_MUX_BLEND) {
  1561. frame.blend = YYImageBlendOver;
  1562. }
  1563. int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  1564. int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  1565. frame.index = iterIndex;
  1566. frame.duration = iter.duration / 1000.0;
  1567. frame.width = iter.width;
  1568. frame.height = iter.height;
  1569. frame.hasAlpha = iter.has_alpha;
  1570. frame.blend = iter.blend_method == WEBP_MUX_BLEND;
  1571. frame.offsetX = iter.x_offset;
  1572. frame.offsetY = canvasHeight - iter.y_offset - iter.height;
  1573. BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
  1574. BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
  1575. frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
  1576. if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) {
  1577. frame.blendFromIndex = lastBlendIndex = iterIndex;
  1578. } else {
  1579. if (frame.dispose && frame.isFullSize) {
  1580. frame.blendFromIndex = lastBlendIndex;
  1581. lastBlendIndex = iterIndex + 1;
  1582. } else {
  1583. frame.blendFromIndex = lastBlendIndex;
  1584. }
  1585. }
  1586. if (frame.index != frame.blendFromIndex) needBlend = YES;
  1587. iterIndex++;
  1588. } while (WebPDemuxNextFrame(&iter));
  1589. WebPDemuxReleaseIterator(&iter);
  1590. }
  1591. if (frames.count != webpFrameCount) {
  1592. WebPDemuxDelete(demuxer);
  1593. return;
  1594. }
  1595. _width = canvasWidth;
  1596. _height = canvasHeight;
  1597. _frameCount = frames.count;
  1598. _loopCount = webpLoopCount;
  1599. _needBlend = needBlend;
  1600. _webpSource = demuxer;
  1601. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1602. _frames = frames;
  1603. dispatch_semaphore_signal(_framesLock);
  1604. #else
  1605. static const char *func = __FUNCTION__;
  1606. static const int line = __LINE__;
  1607. static dispatch_once_t onceToken;
  1608. dispatch_once(&onceToken, ^{
  1609. NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", func, line);
  1610. });
  1611. #endif
  1612. }
  1613. - (void)_updateSourceAPNG {
  1614. /*
  1615. APNG extends PNG format to support animation, it was supported by ImageIO
  1616. since iOS 8.
  1617. We use a custom APNG decoder to make APNG available in old system, so we
  1618. ignore the ImageIO's APNG frame info. Typically the custom decoder is a bit
  1619. faster than ImageIO.
  1620. */
  1621. yy_png_info_release(_apngSource);
  1622. _apngSource = nil;
  1623. [self _updateSourceImageIO]; // decode first frame
  1624. if (_frameCount == 0) return; // png decode failed
  1625. if (!_finalized) return; // ignore multi-frame before finalized
  1626. yy_png_info *apng = yy_png_info_create(_data.bytes, (uint32_t)_data.length);
  1627. if (!apng) return; // apng decode failed
  1628. if (apng->apng_frame_num == 0 ||
  1629. (apng->apng_frame_num == 1 && apng->apng_first_frame_is_cover)) {
  1630. yy_png_info_release(apng);
  1631. return; // no animation
  1632. }
  1633. if (_source) { // apng decode succeed, no longer need image souce
  1634. CFRelease(_source);
  1635. _source = NULL;
  1636. }
  1637. uint32_t canvasWidth = apng->header.width;
  1638. uint32_t canvasHeight = apng->header.height;
  1639. NSMutableArray *frames = [NSMutableArray new];
  1640. BOOL needBlend = NO;
  1641. uint32_t lastBlendIndex = 0;
  1642. for (uint32_t i = 0; i < apng->apng_frame_num; i++) {
  1643. _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
  1644. [frames addObject:frame];
  1645. yy_png_frame_info *fi = apng->apng_frames + i;
  1646. frame.index = i;
  1647. frame.duration = yy_png_delay_to_seconds(fi->frame_control.delay_num, fi->frame_control.delay_den);
  1648. frame.hasAlpha = YES;
  1649. frame.width = fi->frame_control.width;
  1650. frame.height = fi->frame_control.height;
  1651. frame.offsetX = fi->frame_control.x_offset;
  1652. frame.offsetY = canvasHeight - fi->frame_control.y_offset - fi->frame_control.height;
  1653. BOOL sizeEqualsToCanvas = (frame.width == canvasWidth && frame.height == canvasHeight);
  1654. BOOL offsetIsZero = (fi->frame_control.x_offset == 0 && fi->frame_control.y_offset == 0);
  1655. frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
  1656. switch (fi->frame_control.dispose_op) {
  1657. case YY_PNG_DISPOSE_OP_BACKGROUND: {
  1658. frame.dispose = YYImageDisposeBackground;
  1659. } break;
  1660. case YY_PNG_DISPOSE_OP_PREVIOUS: {
  1661. frame.dispose = YYImageDisposePrevious;
  1662. } break;
  1663. default: {
  1664. frame.dispose = YYImageDisposeNone;
  1665. } break;
  1666. }
  1667. switch (fi->frame_control.blend_op) {
  1668. case YY_PNG_BLEND_OP_OVER: {
  1669. frame.blend = YYImageBlendOver;
  1670. } break;
  1671. default: {
  1672. frame.blend = YYImageBlendNone;
  1673. } break;
  1674. }
  1675. if (frame.blend == YYImageBlendNone && frame.isFullSize) {
  1676. frame.blendFromIndex = i;
  1677. if (frame.dispose != YYImageDisposePrevious) lastBlendIndex = i;
  1678. } else {
  1679. if (frame.dispose == YYImageDisposeBackground && frame.isFullSize) {
  1680. frame.blendFromIndex = lastBlendIndex;
  1681. lastBlendIndex = i + 1;
  1682. } else {
  1683. frame.blendFromIndex = lastBlendIndex;
  1684. }
  1685. }
  1686. if (frame.index != frame.blendFromIndex) needBlend = YES;
  1687. }
  1688. _width = canvasWidth;
  1689. _height = canvasHeight;
  1690. _frameCount = frames.count;
  1691. _loopCount = apng->apng_loop_num;
  1692. _needBlend = needBlend;
  1693. _apngSource = apng;
  1694. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1695. _frames = frames;
  1696. dispatch_semaphore_signal(_framesLock);
  1697. }
  1698. - (void)_updateSourceImageIO {
  1699. _width = 0;
  1700. _height = 0;
  1701. _orientation = UIImageOrientationUp;
  1702. _loopCount = 0;
  1703. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1704. _frames = nil;
  1705. dispatch_semaphore_signal(_framesLock);
  1706. if (!_source) {
  1707. if (_finalized) {
  1708. _source = CGImageSourceCreateWithData((__bridge CFDataRef)_data, NULL);
  1709. } else {
  1710. _source = CGImageSourceCreateIncremental(NULL);
  1711. if (_source) CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, false);
  1712. }
  1713. } else {
  1714. CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, _finalized);
  1715. }
  1716. if (!_source) return;
  1717. _frameCount = CGImageSourceGetCount(_source);
  1718. if (_frameCount == 0) return;
  1719. if (!_finalized) { // ignore multi-frame before finalized
  1720. _frameCount = 1;
  1721. } else {
  1722. if (_type == YYImageTypePNG) { // use custom apng decoder and ignore multi-frame
  1723. _frameCount = 1;
  1724. }
  1725. if (_type == YYImageTypeGIF) { // get gif loop count
  1726. CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
  1727. if (properties) {
  1728. CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
  1729. if (gif) {
  1730. CFTypeRef loop = CFDictionaryGetValue(gif, kCGImagePropertyGIFLoopCount);
  1731. if (loop) CFNumberGetValue(loop, kCFNumberNSIntegerType, &_loopCount);
  1732. }
  1733. CFRelease(properties);
  1734. }
  1735. }
  1736. }
  1737. /*
  1738. ICO, GIF, APNG may contains multi-frame.
  1739. */
  1740. NSMutableArray *frames = [NSMutableArray new];
  1741. for (NSUInteger i = 0; i < _frameCount; i++) {
  1742. _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
  1743. frame.index = i;
  1744. frame.blendFromIndex = i;
  1745. frame.hasAlpha = YES;
  1746. frame.isFullSize = YES;
  1747. [frames addObject:frame];
  1748. CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, i, NULL);
  1749. if (properties) {
  1750. NSTimeInterval duration = 0;
  1751. NSInteger orientationValue = 0, width = 0, height = 0;
  1752. CFTypeRef value = NULL;
  1753. value = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
  1754. if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &width);
  1755. value = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
  1756. if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &height);
  1757. if (_type == YYImageTypeGIF) {
  1758. CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
  1759. if (gif) {
  1760. // Use the unclamped frame delay if it exists.
  1761. value = CFDictionaryGetValue(gif, kCGImagePropertyGIFUnclampedDelayTime);
  1762. if (!value) {
  1763. // Fall back to the clamped frame delay if the unclamped frame delay does not exist.
  1764. value = CFDictionaryGetValue(gif, kCGImagePropertyGIFDelayTime);
  1765. }
  1766. if (value) CFNumberGetValue(value, kCFNumberDoubleType, &duration);
  1767. }
  1768. }
  1769. frame.width = width;
  1770. frame.height = height;
  1771. frame.duration = duration;
  1772. if (i == 0 && _width + _height == 0) { // init first frame
  1773. _width = width;
  1774. _height = height;
  1775. value = CFDictionaryGetValue(properties, kCGImagePropertyOrientation);
  1776. if (value) {
  1777. CFNumberGetValue(value, kCFNumberNSIntegerType, &orientationValue);
  1778. _orientation = YYUIImageOrientationFromEXIFValue(orientationValue);
  1779. }
  1780. }
  1781. CFRelease(properties);
  1782. }
  1783. }
  1784. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1785. _frames = frames;
  1786. dispatch_semaphore_signal(_framesLock);
  1787. }
  1788. - (CGImageRef)_newUnblendedImageAtIndex:(NSUInteger)index
  1789. extendToCanvas:(BOOL)extendToCanvas
  1790. decoded:(BOOL *)decoded CF_RETURNS_RETAINED {
  1791. if (!_finalized && index > 0) return NULL;
  1792. if (_frames.count <= index) return NULL;
  1793. _YYImageDecoderFrame *frame = _frames[index];
  1794. if (_source) {
  1795. CGImageRef imageRef = CGImageSourceCreateImageAtIndex(_source, index, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
  1796. if (imageRef && extendToCanvas) {
  1797. size_t width = CGImageGetWidth(imageRef);
  1798. size_t height = CGImageGetHeight(imageRef);
  1799. if (width == _width && height == _height) {
  1800. CGImageRef imageRefExtended = YYCGImageCreateDecodedCopy(imageRef, YES);
  1801. if (imageRefExtended) {
  1802. CFRelease(imageRef);
  1803. imageRef = imageRefExtended;
  1804. if (decoded) *decoded = YES;
  1805. }
  1806. } else {
  1807. CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  1808. if (context) {
  1809. CGContextDrawImage(context, CGRectMake(0, _height - height, width, height), imageRef);
  1810. CGImageRef imageRefExtended = CGBitmapContextCreateImage(context);
  1811. CFRelease(context);
  1812. if (imageRefExtended) {
  1813. CFRelease(imageRef);
  1814. imageRef = imageRefExtended;
  1815. if (decoded) *decoded = YES;
  1816. }
  1817. }
  1818. }
  1819. }
  1820. return imageRef;
  1821. }
  1822. if (_apngSource) {
  1823. uint32_t size = 0;
  1824. uint8_t *bytes = yy_png_copy_frame_data_at_index(_data.bytes, _apngSource, (uint32_t)index, &size);
  1825. if (!bytes) return NULL;
  1826. CGDataProviderRef provider = CGDataProviderCreateWithData(bytes, bytes, size, YYCGDataProviderReleaseDataCallback);
  1827. if (!provider) {
  1828. free(bytes);
  1829. return NULL;
  1830. }
  1831. bytes = NULL; // hold by provider
  1832. CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL);
  1833. if (!source) {
  1834. CFRelease(provider);
  1835. return NULL;
  1836. }
  1837. CFRelease(provider);
  1838. if(CGImageSourceGetCount(source) < 1) {
  1839. CFRelease(source);
  1840. return NULL;
  1841. }
  1842. CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
  1843. CFRelease(source);
  1844. if (!imageRef) return NULL;
  1845. if (extendToCanvas) {
  1846. CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); //bgrA
  1847. if (context) {
  1848. CGContextDrawImage(context, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), imageRef);
  1849. CFRelease(imageRef);
  1850. imageRef = CGBitmapContextCreateImage(context);
  1851. CFRelease(context);
  1852. if (decoded) *decoded = YES;
  1853. }
  1854. }
  1855. return imageRef;
  1856. }
  1857. #if YYIMAGE_WEBP_ENABLED
  1858. if (_webpSource) {
  1859. WebPIterator iter;
  1860. if (!WebPDemuxGetFrame(_webpSource, (int)(index + 1), &iter)) return NULL; // demux webp frame data
  1861. // frame numbers are one-based in webp -----------^
  1862. int frameWidth = iter.width;
  1863. int frameHeight = iter.height;
  1864. if (frameWidth < 1 || frameHeight < 1) return NULL;
  1865. int width = extendToCanvas ? (int)_width : frameWidth;
  1866. int height = extendToCanvas ? (int)_height : frameHeight;
  1867. if (width > _width || height > _height) return NULL;
  1868. const uint8_t *payload = iter.fragment.bytes;
  1869. size_t payloadSize = iter.fragment.size;
  1870. WebPDecoderConfig config;
  1871. if (!WebPInitDecoderConfig(&config)) {
  1872. WebPDemuxReleaseIterator(&iter);
  1873. return NULL;
  1874. }
  1875. if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) {
  1876. WebPDemuxReleaseIterator(&iter);
  1877. return NULL;
  1878. }
  1879. size_t bitsPerComponent = 8;
  1880. size_t bitsPerPixel = 32;
  1881. size_t bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * width, 32);
  1882. size_t length = bytesPerRow * height;
  1883. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst; //bgrA
  1884. void *pixels = calloc(1, length);
  1885. if (!pixels) {
  1886. WebPDemuxReleaseIterator(&iter);
  1887. return NULL;
  1888. }
  1889. config.output.colorspace = MODE_bgrA;
  1890. config.output.is_external_memory = 1;
  1891. config.output.u.RGBA.rgba = pixels;
  1892. config.output.u.RGBA.stride = (int)bytesPerRow;
  1893. config.output.u.RGBA.size = length;
  1894. VP8StatusCode result = WebPDecode(payload, payloadSize, &config); // decode
  1895. if ((result != VP8_STATUS_OK) && (result != VP8_STATUS_NOT_ENOUGH_DATA)) {
  1896. WebPDemuxReleaseIterator(&iter);
  1897. free(pixels);
  1898. return NULL;
  1899. }
  1900. WebPDemuxReleaseIterator(&iter);
  1901. if (extendToCanvas && (iter.x_offset != 0 || iter.y_offset != 0)) {
  1902. void *tmp = calloc(1, length);
  1903. if (tmp) {
  1904. vImage_Buffer src = {pixels, height, width, bytesPerRow};
  1905. vImage_Buffer dest = {tmp, height, width, bytesPerRow};
  1906. vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset};
  1907. uint8_t backColor[4] = {0};
  1908. vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill);
  1909. if (error == kvImageNoError) {
  1910. memcpy(pixels, tmp, length);
  1911. }
  1912. free(tmp);
  1913. }
  1914. }
  1915. CGDataProviderRef provider = CGDataProviderCreateWithData(pixels, pixels, length, YYCGDataProviderReleaseDataCallback);
  1916. if (!provider) {
  1917. free(pixels);
  1918. return NULL;
  1919. }
  1920. pixels = NULL; // hold by provider
  1921. CGImageRef image = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault);
  1922. CFRelease(provider);
  1923. if (decoded) *decoded = YES;
  1924. return image;
  1925. }
  1926. #endif
  1927. return NULL;
  1928. }
  1929. - (BOOL)_createBlendContextIfNeeded {
  1930. if (!_blendCanvas) {
  1931. _blendFrameIndex = NSNotFound;
  1932. _blendCanvas = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  1933. }
  1934. BOOL suc = _blendCanvas != NULL;
  1935. return suc;
  1936. }
  1937. - (void)_blendImageWithFrame:(_YYImageDecoderFrame *)frame {
  1938. if (frame.dispose == YYImageDisposePrevious) {
  1939. // nothing
  1940. } else if (frame.dispose == YYImageDisposeBackground) {
  1941. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1942. } else { // no dispose
  1943. if (frame.blend == YYImageBlendOver) {
  1944. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1945. if (unblendImage) {
  1946. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1947. CFRelease(unblendImage);
  1948. }
  1949. } else {
  1950. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1951. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1952. if (unblendImage) {
  1953. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1954. CFRelease(unblendImage);
  1955. }
  1956. }
  1957. }
  1958. }
  1959. - (CGImageRef)_newBlendedImageWithFrame:(_YYImageDecoderFrame *)frame CF_RETURNS_RETAINED{
  1960. CGImageRef imageRef = NULL;
  1961. if (frame.dispose == YYImageDisposePrevious) {
  1962. if (frame.blend == YYImageBlendOver) {
  1963. CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
  1964. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1965. if (unblendImage) {
  1966. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1967. CFRelease(unblendImage);
  1968. }
  1969. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  1970. CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
  1971. if (previousImage) {
  1972. CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
  1973. CFRelease(previousImage);
  1974. }
  1975. } else {
  1976. CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
  1977. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1978. if (unblendImage) {
  1979. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1980. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1981. CFRelease(unblendImage);
  1982. }
  1983. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  1984. CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
  1985. if (previousImage) {
  1986. CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
  1987. CFRelease(previousImage);
  1988. }
  1989. }
  1990. } else if (frame.dispose == YYImageDisposeBackground) {
  1991. if (frame.blend == YYImageBlendOver) {
  1992. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1993. if (unblendImage) {
  1994. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1995. CFRelease(unblendImage);
  1996. }
  1997. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  1998. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1999. } else {
  2000. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  2001. if (unblendImage) {
  2002. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  2003. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  2004. CFRelease(unblendImage);
  2005. }
  2006. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  2007. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  2008. }
  2009. } else { // no dispose
  2010. if (frame.blend == YYImageBlendOver) {
  2011. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  2012. if (unblendImage) {
  2013. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  2014. CFRelease(unblendImage);
  2015. }
  2016. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  2017. } else {
  2018. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  2019. if (unblendImage) {
  2020. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  2021. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  2022. CFRelease(unblendImage);
  2023. }
  2024. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  2025. }
  2026. }
  2027. return imageRef;
  2028. }
  2029. @end
  2030. ////////////////////////////////////////////////////////////////////////////////
  2031. #pragma mark - Encoder
  2032. @implementation YYImageEncoder {
  2033. NSMutableArray *_images;
  2034. NSMutableArray *_durations;
  2035. }
  2036. - (instancetype)init {
  2037. @throw [NSException exceptionWithName:@"YYImageEncoder init error" reason:@"YYImageEncoder must be initialized with a type. Use 'initWithType:' instead." userInfo:nil];
  2038. return [self initWithType:YYImageTypeUnknown];
  2039. }
  2040. - (instancetype)initWithType:(YYImageType)type {
  2041. if (type == YYImageTypeUnknown || type >= YYImageTypeOther) {
  2042. NSLog(@"[%s: %d] Unsupported image type:%d",__FUNCTION__, __LINE__, (int)type);
  2043. return nil;
  2044. }
  2045. #if !YYIMAGE_WEBP_ENABLED
  2046. if (type == YYImageTypeWebP) {
  2047. NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", __FUNCTION__, __LINE__);
  2048. return nil;
  2049. }
  2050. #endif
  2051. self = [super init];
  2052. if (!self) return nil;
  2053. _type = type;
  2054. _images = [NSMutableArray new];
  2055. _durations = [NSMutableArray new];
  2056. switch (type) {
  2057. case YYImageTypeJPEG:
  2058. case YYImageTypeJPEG2000: {
  2059. _quality = 0.9;
  2060. } break;
  2061. case YYImageTypeTIFF:
  2062. case YYImageTypeBMP:
  2063. case YYImageTypeGIF:
  2064. case YYImageTypeICO:
  2065. case YYImageTypeICNS:
  2066. case YYImageTypePNG: {
  2067. _quality = 1;
  2068. _lossless = YES;
  2069. } break;
  2070. case YYImageTypeWebP: {
  2071. _quality = 0.8;
  2072. } break;
  2073. default:
  2074. break;
  2075. }
  2076. return self;
  2077. }
  2078. - (void)setQuality:(CGFloat)quality {
  2079. _quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
  2080. }
  2081. - (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration {
  2082. if (!image.CGImage) return;
  2083. duration = duration < 0 ? 0 : duration;
  2084. [_images addObject:image];
  2085. [_durations addObject:@(duration)];
  2086. }
  2087. - (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration {
  2088. if (data.length == 0) return;
  2089. duration = duration < 0 ? 0 : duration;
  2090. [_images addObject:data];
  2091. [_durations addObject:@(duration)];
  2092. }
  2093. - (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration {
  2094. if (path.length == 0) return;
  2095. duration = duration < 0 ? 0 : duration;
  2096. NSURL *url = [NSURL URLWithString:path];
  2097. if (!url) return;
  2098. [_images addObject:url];
  2099. [_durations addObject:@(duration)];
  2100. }
  2101. - (BOOL)_imageIOAvaliable {
  2102. switch (_type) {
  2103. case YYImageTypeJPEG:
  2104. case YYImageTypeJPEG2000:
  2105. case YYImageTypeTIFF:
  2106. case YYImageTypeBMP:
  2107. case YYImageTypeICO:
  2108. case YYImageTypeICNS:
  2109. case YYImageTypeGIF: {
  2110. return _images.count > 0;
  2111. } break;
  2112. case YYImageTypePNG: {
  2113. return _images.count == 1;
  2114. } break;
  2115. case YYImageTypeWebP: {
  2116. return NO;
  2117. } break;
  2118. default: return NO;
  2119. }
  2120. }
  2121. - (CGImageDestinationRef)_newImageDestination:(id)dest imageCount:(NSUInteger)count CF_RETURNS_RETAINED {
  2122. if (!dest) return nil;
  2123. CGImageDestinationRef destination = NULL;
  2124. if ([dest isKindOfClass:[NSString class]]) {
  2125. NSURL *url = [[NSURL alloc] initFileURLWithPath:dest];
  2126. if (url) {
  2127. destination = CGImageDestinationCreateWithURL((CFURLRef)url, YYImageTypeToUTType(_type), count, NULL);
  2128. }
  2129. } else if ([dest isKindOfClass:[NSMutableData class]]) {
  2130. destination = CGImageDestinationCreateWithData((CFMutableDataRef)dest, YYImageTypeToUTType(_type), count, NULL);
  2131. }
  2132. return destination;
  2133. }
  2134. - (void)_encodeImageWithDestination:(CGImageDestinationRef)destination imageCount:(NSUInteger)count {
  2135. if (_type == YYImageTypeGIF) {
  2136. NSDictionary *gifProperty = @{(__bridge id)kCGImagePropertyGIFDictionary:
  2137. @{(__bridge id)kCGImagePropertyGIFLoopCount: @(_loopCount)}};
  2138. CGImageDestinationSetProperties(destination, (__bridge CFDictionaryRef)gifProperty);
  2139. }
  2140. for (int i = 0; i < count; i++) {
  2141. @autoreleasepool {
  2142. id imageSrc = _images[i];
  2143. NSDictionary *frameProperty = NULL;
  2144. if (_type == YYImageTypeGIF && count > 1) {
  2145. frameProperty = @{(NSString *)kCGImagePropertyGIFDictionary : @{(NSString *) kCGImagePropertyGIFDelayTime:_durations[i]}};
  2146. } else {
  2147. frameProperty = @{(id)kCGImageDestinationLossyCompressionQuality : @(_quality)};
  2148. }
  2149. if ([imageSrc isKindOfClass:[UIImage class]]) {
  2150. UIImage *image = imageSrc;
  2151. if (image.imageOrientation != UIImageOrientationUp && image.CGImage) {
  2152. CGBitmapInfo info = CGImageGetBitmapInfo(image.CGImage) | CGImageGetAlphaInfo(image.CGImage);
  2153. CGImageRef rotated = YYCGImageCreateCopyWithOrientation(image.CGImage, image.imageOrientation, info);
  2154. if (rotated) {
  2155. image = [UIImage imageWithCGImage:rotated];
  2156. CFRelease(rotated);
  2157. }
  2158. }
  2159. if (image.CGImage) CGImageDestinationAddImage(destination, ((UIImage *)imageSrc).CGImage, (CFDictionaryRef)frameProperty);
  2160. } else if ([imageSrc isKindOfClass:[NSURL class]]) {
  2161. CGImageSourceRef source = CGImageSourceCreateWithURL((CFURLRef)imageSrc, NULL);
  2162. if (source) {
  2163. CGImageDestinationAddImageFromSource(destination, source, 0, (CFDictionaryRef)frameProperty);
  2164. CFRelease(source);
  2165. }
  2166. } else if ([imageSrc isKindOfClass:[NSData class]]) {
  2167. CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageSrc, NULL);
  2168. if (source) {
  2169. CGImageDestinationAddImageFromSource(destination, source, 0, (CFDictionaryRef)frameProperty);
  2170. CFRelease(source);
  2171. }
  2172. }
  2173. }
  2174. }
  2175. }
  2176. - (CGImageRef)_newCGImageFromIndex:(NSUInteger)index decoded:(BOOL)decoded CF_RETURNS_RETAINED {
  2177. UIImage *image = nil;
  2178. id imageSrc= _images[index];
  2179. if ([imageSrc isKindOfClass:[UIImage class]]) {
  2180. image = imageSrc;
  2181. } else if ([imageSrc isKindOfClass:[NSURL class]]) {
  2182. image = [UIImage imageWithContentsOfFile:((NSURL *)imageSrc).absoluteString];
  2183. } else if ([imageSrc isKindOfClass:[NSData class]]) {
  2184. image = [UIImage imageWithData:imageSrc];
  2185. }
  2186. if (!image) return NULL;
  2187. CGImageRef imageRef = image.CGImage;
  2188. if (!imageRef) return NULL;
  2189. if (image.imageOrientation != UIImageOrientationUp) {
  2190. return YYCGImageCreateCopyWithOrientation(imageRef, image.imageOrientation, kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  2191. }
  2192. if (decoded) {
  2193. return YYCGImageCreateDecodedCopy(imageRef, YES);
  2194. }
  2195. return (CGImageRef)CFRetain(imageRef);
  2196. }
  2197. - (NSData *)_encodeWithImageIO {
  2198. NSMutableData *data = [NSMutableData new];
  2199. NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1;
  2200. CGImageDestinationRef destination = [self _newImageDestination:data imageCount:count];
  2201. BOOL suc = NO;
  2202. if (destination) {
  2203. [self _encodeImageWithDestination:destination imageCount:count];
  2204. suc = CGImageDestinationFinalize(destination);
  2205. CFRelease(destination);
  2206. }
  2207. if (suc && data.length > 0) {
  2208. return data;
  2209. } else {
  2210. return nil;
  2211. }
  2212. }
  2213. - (BOOL)_encodeWithImageIO:(NSString *)path {
  2214. NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1;
  2215. CGImageDestinationRef destination = [self _newImageDestination:path imageCount:count];
  2216. BOOL suc = NO;
  2217. if (destination) {
  2218. [self _encodeImageWithDestination:destination imageCount:count];
  2219. suc = CGImageDestinationFinalize(destination);
  2220. CFRelease(destination);
  2221. }
  2222. return suc;
  2223. }
  2224. - (NSData *)_encodeAPNG {
  2225. // encode APNG (ImageIO doesn't support APNG encoding, so we use a custom encoder)
  2226. NSMutableArray *pngDatas = [NSMutableArray new];
  2227. NSMutableArray *pngSizes = [NSMutableArray new];
  2228. NSUInteger canvasWidth = 0, canvasHeight = 0;
  2229. for (int i = 0; i < _images.count; i++) {
  2230. CGImageRef decoded = [self _newCGImageFromIndex:i decoded:YES];
  2231. if (!decoded) return nil;
  2232. CGSize size = CGSizeMake(CGImageGetWidth(decoded), CGImageGetHeight(decoded));
  2233. [pngSizes addObject:[NSValue valueWithCGSize:size]];
  2234. if (canvasWidth < size.width) canvasWidth = size.width;
  2235. if (canvasHeight < size.height) canvasHeight = size.height;
  2236. CFDataRef frameData = YYCGImageCreateEncodedData(decoded, YYImageTypePNG, 1);
  2237. CFRelease(decoded);
  2238. if (!frameData) return nil;
  2239. [pngDatas addObject:(__bridge id)(frameData)];
  2240. CFRelease(frameData);
  2241. if (size.width < 1 || size.height < 1) return nil;
  2242. }
  2243. CGSize firstFrameSize = [(NSValue *)[pngSizes firstObject] CGSizeValue];
  2244. if (firstFrameSize.width < canvasWidth || firstFrameSize.height < canvasHeight) {
  2245. CGImageRef decoded = [self _newCGImageFromIndex:0 decoded:YES];
  2246. if (!decoded) return nil;
  2247. CGContextRef context = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8,
  2248. 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  2249. if (!context) {
  2250. CFRelease(decoded);
  2251. return nil;
  2252. }
  2253. CGContextDrawImage(context, CGRectMake(0, canvasHeight - firstFrameSize.height, firstFrameSize.width, firstFrameSize.height), decoded);
  2254. CFRelease(decoded);
  2255. CGImageRef extendedImage = CGBitmapContextCreateImage(context);
  2256. CFRelease(context);
  2257. if (!extendedImage) return nil;
  2258. CFDataRef frameData = YYCGImageCreateEncodedData(extendedImage, YYImageTypePNG, 1);
  2259. if (!frameData) {
  2260. CFRelease(extendedImage);
  2261. return nil;
  2262. }
  2263. pngDatas[0] = (__bridge id)(frameData);
  2264. CFRelease(frameData);
  2265. }
  2266. NSData *firstFrameData = pngDatas[0];
  2267. yy_png_info *info = yy_png_info_create(firstFrameData.bytes, (uint32_t)firstFrameData.length);
  2268. if (!info) return nil;
  2269. NSMutableData *result = [NSMutableData new];
  2270. BOOL insertBefore = NO, insertAfter = NO;
  2271. uint32_t apngSequenceIndex = 0;
  2272. uint32_t png_header[2];
  2273. png_header[0] = YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47);
  2274. png_header[1] = YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A);
  2275. [result appendBytes:png_header length:8];
  2276. for (int i = 0; i < info->chunk_num; i++) {
  2277. yy_png_chunk_info *chunk = info->chunks + i;
  2278. if (!insertBefore && chunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
  2279. insertBefore = YES;
  2280. // insert acTL (APNG Control)
  2281. uint32_t acTL[5] = {0};
  2282. acTL[0] = yy_swap_endian_uint32(8); //length
  2283. acTL[1] = YY_FOUR_CC('a', 'c', 'T', 'L'); // fourcc
  2284. acTL[2] = yy_swap_endian_uint32((uint32_t)pngDatas.count); // num frames
  2285. acTL[3] = yy_swap_endian_uint32((uint32_t)_loopCount); // num plays
  2286. acTL[4] = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(acTL + 1), 12)); //crc32
  2287. [result appendBytes:acTL length:20];
  2288. // insert fcTL (first frame control)
  2289. yy_png_chunk_fcTL chunk_fcTL = {0};
  2290. chunk_fcTL.sequence_number = apngSequenceIndex;
  2291. chunk_fcTL.width = (uint32_t)firstFrameSize.width;
  2292. chunk_fcTL.height = (uint32_t)firstFrameSize.height;
  2293. yy_png_delay_to_fraction([(NSNumber *)_durations[0] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den);
  2294. chunk_fcTL.delay_num = chunk_fcTL.delay_num;
  2295. chunk_fcTL.delay_den = chunk_fcTL.delay_den;
  2296. chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND;
  2297. chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE;
  2298. uint8_t fcTL[38] = {0};
  2299. *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length
  2300. *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc
  2301. yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8);
  2302. *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30));
  2303. [result appendBytes:fcTL length:38];
  2304. apngSequenceIndex++;
  2305. }
  2306. if (!insertAfter && insertBefore && chunk->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
  2307. insertAfter = YES;
  2308. // insert fcTL and fdAT (APNG frame control and data)
  2309. for (int i = 1; i < pngDatas.count; i++) {
  2310. NSData *frameData = pngDatas[i];
  2311. yy_png_info *frame = yy_png_info_create(frameData.bytes, (uint32_t)frameData.length);
  2312. if (!frame) {
  2313. yy_png_info_release(info);
  2314. return nil;
  2315. }
  2316. // insert fcTL (first frame control)
  2317. yy_png_chunk_fcTL chunk_fcTL = {0};
  2318. chunk_fcTL.sequence_number = apngSequenceIndex;
  2319. chunk_fcTL.width = frame->header.width;
  2320. chunk_fcTL.height = frame->header.height;
  2321. yy_png_delay_to_fraction([(NSNumber *)_durations[i] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den);
  2322. chunk_fcTL.delay_num = chunk_fcTL.delay_num;
  2323. chunk_fcTL.delay_den = chunk_fcTL.delay_den;
  2324. chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND;
  2325. chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE;
  2326. uint8_t fcTL[38] = {0};
  2327. *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length
  2328. *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc
  2329. yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8);
  2330. *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30));
  2331. [result appendBytes:fcTL length:38];
  2332. apngSequenceIndex++;
  2333. // insert fdAT (frame data)
  2334. for (int d = 0; d < frame->chunk_num; d++) {
  2335. yy_png_chunk_info *dchunk = frame->chunks + d;
  2336. if (dchunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
  2337. uint32_t length = yy_swap_endian_uint32(dchunk->length + 4);
  2338. [result appendBytes:&length length:4]; //length
  2339. uint32_t fourcc = YY_FOUR_CC('f', 'd', 'A', 'T');
  2340. [result appendBytes:&fourcc length:4]; //fourcc
  2341. uint32_t sq = yy_swap_endian_uint32(apngSequenceIndex);
  2342. [result appendBytes:&sq length:4]; //data (sq)
  2343. [result appendBytes:(((uint8_t *)frameData.bytes) + dchunk->offset + 8) length:dchunk->length]; //data
  2344. uint8_t *bytes = ((uint8_t *)result.bytes) + result.length - dchunk->length - 8;
  2345. uint32_t crc = yy_swap_endian_uint32((uint32_t)crc32(0, bytes, dchunk->length + 8));
  2346. [result appendBytes:&crc length:4]; //crc
  2347. apngSequenceIndex++;
  2348. }
  2349. }
  2350. yy_png_info_release(frame);
  2351. }
  2352. }
  2353. [result appendBytes:((uint8_t *)firstFrameData.bytes) + chunk->offset length:chunk->length + 12];
  2354. }
  2355. yy_png_info_release(info);
  2356. return result;
  2357. }
  2358. - (NSData *)_encodeWebP {
  2359. #if YYIMAGE_WEBP_ENABLED
  2360. // encode webp
  2361. NSMutableArray *webpDatas = [NSMutableArray new];
  2362. for (NSUInteger i = 0; i < _images.count; i++) {
  2363. CGImageRef image = [self _newCGImageFromIndex:i decoded:NO];
  2364. if (!image) return nil;
  2365. CFDataRef frameData = YYCGImageCreateEncodedWebPData(image, _lossless, _quality, 4, YYImagePresetDefault);
  2366. CFRelease(image);
  2367. if (!frameData) return nil;
  2368. [webpDatas addObject:(__bridge id)frameData];
  2369. CFRelease(frameData);
  2370. }
  2371. if (webpDatas.count == 1) {
  2372. return webpDatas.firstObject;
  2373. } else {
  2374. // multi-frame webp
  2375. WebPMux *mux = WebPMuxNew();
  2376. if (!mux) return nil;
  2377. for (NSUInteger i = 0; i < _images.count; i++) {
  2378. NSData *data = webpDatas[i];
  2379. NSNumber *duration = _durations[i];
  2380. WebPMuxFrameInfo frame = {0};
  2381. frame.bitstream.bytes = data.bytes;
  2382. frame.bitstream.size = data.length;
  2383. frame.duration = (int)(duration.floatValue * 1000.0);
  2384. frame.id = WEBP_CHUNK_ANMF;
  2385. frame.dispose_method = WEBP_MUX_DISPOSE_BACKGROUND;
  2386. frame.blend_method = WEBP_MUX_NO_BLEND;
  2387. if (WebPMuxPushFrame(mux, &frame, 0) != WEBP_MUX_OK) {
  2388. WebPMuxDelete(mux);
  2389. return nil;
  2390. }
  2391. }
  2392. WebPMuxAnimParams params = {(uint32_t)0, (int)_loopCount};
  2393. if (WebPMuxSetAnimationParams(mux, &params) != WEBP_MUX_OK) {
  2394. WebPMuxDelete(mux);
  2395. return nil;
  2396. }
  2397. WebPData output_data;
  2398. WebPMuxError error = WebPMuxAssemble(mux, &output_data);
  2399. WebPMuxDelete(mux);
  2400. if (error != WEBP_MUX_OK) {
  2401. return nil;
  2402. }
  2403. NSData *result = [NSData dataWithBytes:output_data.bytes length:output_data.size];
  2404. WebPDataClear(&output_data);
  2405. return result.length ? result : nil;
  2406. }
  2407. #else
  2408. return nil;
  2409. #endif
  2410. }
  2411. - (NSData *)encode {
  2412. if (_images.count == 0) return nil;
  2413. if ([self _imageIOAvaliable]) return [self _encodeWithImageIO];
  2414. if (_type == YYImageTypePNG) return [self _encodeAPNG];
  2415. if (_type == YYImageTypeWebP) return [self _encodeWebP];
  2416. return nil;
  2417. }
  2418. - (BOOL)encodeToFile:(NSString *)path {
  2419. if (_images.count == 0 || path.length == 0) return NO;
  2420. if ([self _imageIOAvaliable]) return [self _encodeWithImageIO:path];
  2421. NSData *data = [self encode];
  2422. if (!data) return NO;
  2423. return [data writeToFile:path atomically:YES];
  2424. }
  2425. + (NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality {
  2426. YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type];
  2427. encoder.quality = quality;
  2428. [encoder addImage:image duration:0];
  2429. return [encoder encode];
  2430. }
  2431. + (NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality {
  2432. if (!decoder || decoder.frameCount == 0) return nil;
  2433. YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type];
  2434. encoder.quality = quality;
  2435. for (int i = 0; i < decoder.frameCount; i++) {
  2436. UIImage *frame = [decoder frameAtIndex:i decodeForDisplay:YES].image;
  2437. [encoder addImageWithData:UIImagePNGRepresentation(frame) duration:[decoder frameDurationAtIndex:i]];
  2438. }
  2439. return encoder.encode;
  2440. }
  2441. @end
  2442. @implementation UIImage (YYImageCoder)
  2443. - (instancetype)imageByDecoded {
  2444. if (self.isDecodedForDisplay) return self;
  2445. CGImageRef imageRef = self.CGImage;
  2446. if (!imageRef) return self;
  2447. CGImageRef newImageRef = YYCGImageCreateDecodedCopy(imageRef, YES);
  2448. if (!newImageRef) return self;
  2449. UIImage *newImage = [[self.class alloc] initWithCGImage:newImageRef scale:self.scale orientation:self.imageOrientation];
  2450. CGImageRelease(newImageRef);
  2451. if (!newImage) newImage = self; // decode failed, return self.
  2452. newImage.isDecodedForDisplay = YES;
  2453. return newImage;
  2454. }
  2455. - (BOOL)isDecodedForDisplay {
  2456. if (self.images.count > 1) return YES;
  2457. NSNumber *num = objc_getAssociatedObject(self, @selector(isDecodedForDisplay));
  2458. return [num boolValue];
  2459. }
  2460. - (void)setIsDecodedForDisplay:(BOOL)isDecodedForDisplay {
  2461. objc_setAssociatedObject(self, @selector(isDecodedForDisplay), @(isDecodedForDisplay), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
  2462. }
  2463. - (void)saveToAlbumWithCompletionBlock:(void(^)(NSURL *assetURL, NSError *error))completionBlock {
  2464. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  2465. NSData *data = [self _imageDataRepresentationForSystem:YES];
  2466. ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
  2467. [library writeImageDataToSavedPhotosAlbum:data metadata:nil completionBlock:^(NSURL *assetURL, NSError *error){
  2468. if (!completionBlock) return;
  2469. if (pthread_main_np()) {
  2470. completionBlock(assetURL, error);
  2471. } else {
  2472. dispatch_async(dispatch_get_main_queue(), ^{
  2473. completionBlock(assetURL, error);
  2474. });
  2475. }
  2476. }];
  2477. });
  2478. }
  2479. - (NSData *)imageDataRepresentation {
  2480. return [self _imageDataRepresentationForSystem:NO];
  2481. }
  2482. /// @param forSystem YES: used for system album (PNG/JPEG/GIF), NO: used for YYImage (PNG/JPEG/GIF/WebP)
  2483. - (NSData *)_imageDataRepresentationForSystem:(BOOL)forSystem {
  2484. NSData *data = nil;
  2485. if ([self isKindOfClass:[YYImage class]]) {
  2486. YYImage *image = (id)self;
  2487. if (image.animatedImageData) {
  2488. if (forSystem) { // system only support GIF and PNG
  2489. if (image.animatedImageType == YYImageTypeGIF ||
  2490. image.animatedImageType == YYImageTypePNG) {
  2491. data = image.animatedImageData;
  2492. }
  2493. } else {
  2494. data = image.animatedImageData;
  2495. }
  2496. }
  2497. }
  2498. if (!data) {
  2499. CGImageRef imageRef = self.CGImage ? (CGImageRef)CFRetain(self.CGImage) : nil;
  2500. if (imageRef) {
  2501. CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
  2502. CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
  2503. BOOL hasAlpha = NO;
  2504. if (alphaInfo == kCGImageAlphaPremultipliedLast ||
  2505. alphaInfo == kCGImageAlphaPremultipliedFirst ||
  2506. alphaInfo == kCGImageAlphaLast ||
  2507. alphaInfo == kCGImageAlphaFirst) {
  2508. hasAlpha = YES;
  2509. }
  2510. if (self.imageOrientation != UIImageOrientationUp) {
  2511. CGImageRef rotated = YYCGImageCreateCopyWithOrientation(imageRef, self.imageOrientation, bitmapInfo | alphaInfo);
  2512. if (rotated) {
  2513. CFRelease(imageRef);
  2514. imageRef = rotated;
  2515. }
  2516. }
  2517. @autoreleasepool {
  2518. UIImage *newImage = [UIImage imageWithCGImage:imageRef];
  2519. if (newImage) {
  2520. if (hasAlpha) {
  2521. data = UIImagePNGRepresentation([UIImage imageWithCGImage:imageRef]);
  2522. } else {
  2523. data = UIImageJPEGRepresentation([UIImage imageWithCGImage:imageRef], 0.9); // same as Apple's example
  2524. }
  2525. }
  2526. }
  2527. CFRelease(imageRef);
  2528. }
  2529. }
  2530. if (!data) {
  2531. data = UIImagePNGRepresentation(self);
  2532. }
  2533. return data;
  2534. }
  2535. @end