酷店

YYImageCoder.m 112KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871
  1. //
  2. // YYImageCoder.m
  3. // YYImage <https://github.com/ibireme/YYImage>
  4. //
  5. // Created by ibireme on 15/5/13.
  6. // Copyright (c) 2015 ibireme.
  7. //
  8. // This source code is licensed under the MIT-style license found in the
  9. // LICENSE file in the root directory of this source tree.
  10. //
  11. #import "YYImageCoder.h"
  12. #import "YYImage.h"
  13. #import <CoreFoundation/CoreFoundation.h>
  14. #import <ImageIO/ImageIO.h>
  15. #import <Accelerate/Accelerate.h>
  16. #import <QuartzCore/QuartzCore.h>
  17. #import <MobileCoreServices/MobileCoreServices.h>
  18. #import <AssetsLibrary/AssetsLibrary.h>
  19. #import <objc/runtime.h>
  20. #import <pthread.h>
  21. #import <zlib.h>
  22. #ifndef YYIMAGE_WEBP_ENABLED
  23. #if __has_include(<webp/decode.h>) && __has_include(<webp/encode.h>) && \
  24. __has_include(<webp/demux.h>) && __has_include(<webp/mux.h>)
  25. #define YYIMAGE_WEBP_ENABLED 1
  26. #import <webp/decode.h>
  27. #import <webp/encode.h>
  28. #import <webp/demux.h>
  29. #import <webp/mux.h>
  30. #elif __has_include("webp/decode.h") && __has_include("webp/encode.h") && \
  31. __has_include("webp/demux.h") && __has_include("webp/mux.h")
  32. #define YYIMAGE_WEBP_ENABLED 1
  33. #import "webp/decode.h"
  34. #import "webp/encode.h"
  35. #import "webp/demux.h"
  36. #import "webp/mux.h"
  37. #else
  38. #define YYIMAGE_WEBP_ENABLED 0
  39. #endif
  40. #endif
  41. ////////////////////////////////////////////////////////////////////////////////
  42. #pragma mark - Utility (for little endian platform)
  43. #define YY_FOUR_CC(c1,c2,c3,c4) ((uint32_t)(((c4) << 24) | ((c3) << 16) | ((c2) << 8) | (c1)))
  44. #define YY_TWO_CC(c1,c2) ((uint16_t)(((c2) << 8) | (c1)))
  45. static inline uint16_t yy_swap_endian_uint16(uint16_t value) {
  46. return
  47. (uint16_t) ((value & 0x00FF) << 8) |
  48. (uint16_t) ((value & 0xFF00) >> 8) ;
  49. }
  50. static inline uint32_t yy_swap_endian_uint32(uint32_t value) {
  51. return
  52. (uint32_t)((value & 0x000000FFU) << 24) |
  53. (uint32_t)((value & 0x0000FF00U) << 8) |
  54. (uint32_t)((value & 0x00FF0000U) >> 8) |
  55. (uint32_t)((value & 0xFF000000U) >> 24) ;
  56. }
  57. ////////////////////////////////////////////////////////////////////////////////
  58. #pragma mark - APNG
  59. /*
  60. PNG spec: http://www.libpng.org/pub/png/spec/1.2/PNG-Structure.html
  61. APNG spec: https://wiki.mozilla.org/APNG_Specification
  62. ===============================================================================
  63. PNG format:
  64. header (8): 89 50 4e 47 0d 0a 1a 0a
  65. chunk, chunk, chunk, ...
  66. ===============================================================================
  67. chunk format:
  68. length (4): uint32_t big endian
  69. fourcc (4): chunk type code
  70. data (length): data
  71. crc32 (4): uint32_t big endian crc32(fourcc + data)
  72. ===============================================================================
  73. PNG chunk define:
  74. IHDR (Image Header) required, must appear first, 13 bytes
  75. width (4) pixel count, should not be zero
  76. height (4) pixel count, should not be zero
  77. bit depth (1) expected: 1, 2, 4, 8, 16
  78. color type (1) 1<<0 (palette used), 1<<1 (color used), 1<<2 (alpha channel used)
  79. compression method (1) 0 (deflate/inflate)
  80. filter method (1) 0 (adaptive filtering with five basic filter types)
  81. interlace method (1) 0 (no interlace) or 1 (Adam7 interlace)
  82. IDAT (Image Data) required, must appear consecutively if there's multiple 'IDAT' chunk
  83. IEND (End) required, must appear last, 0 bytes
  84. ===============================================================================
  85. APNG chunk define:
  86. acTL (Animation Control) required, must appear before 'IDAT', 8 bytes
  87. num frames (4) number of frames
  88. num plays (4) number of times to loop, 0 indicates infinite looping
  89. fcTL (Frame Control) required, must appear before the 'IDAT' or 'fdAT' chunks of the frame to which it applies, 26 bytes
  90. sequence number (4) sequence number of the animation chunk, starting from 0
  91. width (4) width of the following frame
  92. height (4) height of the following frame
  93. x offset (4) x position at which to render the following frame
  94. y offset (4) y position at which to render the following frame
  95. delay num (2) frame delay fraction numerator
  96. delay den (2) frame delay fraction denominator
  97. dispose op (1) type of frame area disposal to be done after rendering this frame (0:none, 1:background 2:previous)
  98. blend op (1) type of frame area rendering for this frame (0:source, 1:over)
  99. fdAT (Frame Data) required
  100. sequence number (4) sequence number of the animation chunk
  101. frame data (x) frame data for this frame (same as 'IDAT')
  102. ===============================================================================
  103. `dispose_op` specifies how the output buffer should be changed at the end of the delay
  104. (before rendering the next frame).
  105. * NONE: no disposal is done on this frame before rendering the next; the contents
  106. of the output buffer are left as is.
  107. * BACKGROUND: the frame's region of the output buffer is to be cleared to fully
  108. transparent black before rendering the next frame.
  109. * PREVIOUS: the frame's region of the output buffer is to be reverted to the previous
  110. contents before rendering the next frame.
  111. `blend_op` specifies whether the frame is to be alpha blended into the current output buffer
  112. content, or whether it should completely replace its region in the output buffer.
  113. * SOURCE: all color components of the frame, including alpha, overwrite the current contents
  114. of the frame's output buffer region.
  115. * OVER: the frame should be composited onto the output buffer based on its alpha,
  116. using a simple OVER operation as described in the "Alpha Channel Processing" section
  117. of the PNG specification
  118. */
  119. typedef enum {
  120. YY_PNG_ALPHA_TYPE_PALEETE = 1 << 0,
  121. YY_PNG_ALPHA_TYPE_COLOR = 1 << 1,
  122. YY_PNG_ALPHA_TYPE_ALPHA = 1 << 2,
  123. } yy_png_alpha_type;
  124. typedef enum {
  125. YY_PNG_DISPOSE_OP_NONE = 0,
  126. YY_PNG_DISPOSE_OP_BACKGROUND = 1,
  127. YY_PNG_DISPOSE_OP_PREVIOUS = 2,
  128. } yy_png_dispose_op;
  129. typedef enum {
  130. YY_PNG_BLEND_OP_SOURCE = 0,
  131. YY_PNG_BLEND_OP_OVER = 1,
  132. } yy_png_blend_op;
  133. typedef struct {
  134. uint32_t width; ///< pixel count, should not be zero
  135. uint32_t height; ///< pixel count, should not be zero
  136. uint8_t bit_depth; ///< expected: 1, 2, 4, 8, 16
  137. uint8_t color_type; ///< see yy_png_alpha_type
  138. uint8_t compression_method; ///< 0 (deflate/inflate)
  139. uint8_t filter_method; ///< 0 (adaptive filtering with five basic filter types)
  140. uint8_t interlace_method; ///< 0 (no interlace) or 1 (Adam7 interlace)
  141. } yy_png_chunk_IHDR;
  142. typedef struct {
  143. uint32_t sequence_number; ///< sequence number of the animation chunk, starting from 0
  144. uint32_t width; ///< width of the following frame
  145. uint32_t height; ///< height of the following frame
  146. uint32_t x_offset; ///< x position at which to render the following frame
  147. uint32_t y_offset; ///< y position at which to render the following frame
  148. uint16_t delay_num; ///< frame delay fraction numerator
  149. uint16_t delay_den; ///< frame delay fraction denominator
  150. uint8_t dispose_op; ///< see yy_png_dispose_op
  151. uint8_t blend_op; ///< see yy_png_blend_op
  152. } yy_png_chunk_fcTL;
  153. typedef struct {
  154. uint32_t offset; ///< chunk offset in PNG data
  155. uint32_t fourcc; ///< chunk fourcc
  156. uint32_t length; ///< chunk data length
  157. uint32_t crc32; ///< chunk crc32
  158. } yy_png_chunk_info;
  159. typedef struct {
  160. uint32_t chunk_index; ///< the first `fdAT`/`IDAT` chunk index
  161. uint32_t chunk_num; ///< the `fdAT`/`IDAT` chunk count
  162. uint32_t chunk_size; ///< the `fdAT`/`IDAT` chunk bytes
  163. yy_png_chunk_fcTL frame_control;
  164. } yy_png_frame_info;
  165. typedef struct {
  166. yy_png_chunk_IHDR header; ///< png header
  167. yy_png_chunk_info *chunks; ///< chunks
  168. uint32_t chunk_num; ///< count of chunks
  169. yy_png_frame_info *apng_frames; ///< frame info, NULL if not apng
  170. uint32_t apng_frame_num; ///< 0 if not apng
  171. uint32_t apng_loop_num; ///< 0 indicates infinite looping
  172. uint32_t *apng_shared_chunk_indexs; ///< shared chunk index
  173. uint32_t apng_shared_chunk_num; ///< shared chunk count
  174. uint32_t apng_shared_chunk_size; ///< shared chunk bytes
  175. uint32_t apng_shared_insert_index; ///< shared chunk insert index
  176. bool apng_first_frame_is_cover; ///< the first frame is same as png (cover)
  177. } yy_png_info;
  178. static void yy_png_chunk_IHDR_read(yy_png_chunk_IHDR *IHDR, const uint8_t *data) {
  179. IHDR->width = yy_swap_endian_uint32(*((uint32_t *)(data)));
  180. IHDR->height = yy_swap_endian_uint32(*((uint32_t *)(data + 4)));
  181. IHDR->bit_depth = data[8];
  182. IHDR->color_type = data[9];
  183. IHDR->compression_method = data[10];
  184. IHDR->filter_method = data[11];
  185. IHDR->interlace_method = data[12];
  186. }
  187. static void yy_png_chunk_IHDR_write(yy_png_chunk_IHDR *IHDR, uint8_t *data) {
  188. *((uint32_t *)(data)) = yy_swap_endian_uint32(IHDR->width);
  189. *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(IHDR->height);
  190. data[8] = IHDR->bit_depth;
  191. data[9] = IHDR->color_type;
  192. data[10] = IHDR->compression_method;
  193. data[11] = IHDR->filter_method;
  194. data[12] = IHDR->interlace_method;
  195. }
  196. static void yy_png_chunk_fcTL_read(yy_png_chunk_fcTL *fcTL, const uint8_t *data) {
  197. fcTL->sequence_number = yy_swap_endian_uint32(*((uint32_t *)(data)));
  198. fcTL->width = yy_swap_endian_uint32(*((uint32_t *)(data + 4)));
  199. fcTL->height = yy_swap_endian_uint32(*((uint32_t *)(data + 8)));
  200. fcTL->x_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 12)));
  201. fcTL->y_offset = yy_swap_endian_uint32(*((uint32_t *)(data + 16)));
  202. fcTL->delay_num = yy_swap_endian_uint16(*((uint16_t *)(data + 20)));
  203. fcTL->delay_den = yy_swap_endian_uint16(*((uint16_t *)(data + 22)));
  204. fcTL->dispose_op = data[24];
  205. fcTL->blend_op = data[25];
  206. }
  207. static void yy_png_chunk_fcTL_write(yy_png_chunk_fcTL *fcTL, uint8_t *data) {
  208. *((uint32_t *)(data)) = yy_swap_endian_uint32(fcTL->sequence_number);
  209. *((uint32_t *)(data + 4)) = yy_swap_endian_uint32(fcTL->width);
  210. *((uint32_t *)(data + 8)) = yy_swap_endian_uint32(fcTL->height);
  211. *((uint32_t *)(data + 12)) = yy_swap_endian_uint32(fcTL->x_offset);
  212. *((uint32_t *)(data + 16)) = yy_swap_endian_uint32(fcTL->y_offset);
  213. *((uint16_t *)(data + 20)) = yy_swap_endian_uint16(fcTL->delay_num);
  214. *((uint16_t *)(data + 22)) = yy_swap_endian_uint16(fcTL->delay_den);
  215. data[24] = fcTL->dispose_op;
  216. data[25] = fcTL->blend_op;
  217. }
  218. // convert double value to fraction
  219. static void yy_png_delay_to_fraction(double duration, uint16_t *num, uint16_t *den) {
  220. if (duration >= 0xFF) {
  221. *num = 0xFF;
  222. *den = 1;
  223. } else if (duration <= 1.0 / (double)0xFF) {
  224. *num = 1;
  225. *den = 0xFF;
  226. } else {
  227. // Use continued fraction to calculate the num and den.
  228. long MAX = 10;
  229. double eps = (0.5 / (double)0xFF);
  230. long p[MAX], q[MAX], a[MAX], i, numl = 0, denl = 0;
  231. // The first two convergents are 0/1 and 1/0
  232. p[0] = 0; q[0] = 1;
  233. p[1] = 1; q[1] = 0;
  234. // The rest of the convergents (and continued fraction)
  235. for (i = 2; i < MAX; i++) {
  236. a[i] = lrint(floor(duration));
  237. p[i] = a[i] * p[i - 1] + p[i - 2];
  238. q[i] = a[i] * q[i - 1] + q[i - 2];
  239. if (p[i] <= 0xFF && q[i] <= 0xFF) { // uint16_t
  240. numl = p[i];
  241. denl = q[i];
  242. } else break;
  243. if (fabs(duration - a[i]) < eps) break;
  244. duration = 1.0 / (duration - a[i]);
  245. }
  246. if (numl != 0 && denl != 0) {
  247. *num = numl;
  248. *den = denl;
  249. } else {
  250. *num = 1;
  251. *den = 100;
  252. }
  253. }
  254. }
  255. // convert fraction to double value
  256. static double yy_png_delay_to_seconds(uint16_t num, uint16_t den) {
  257. if (den == 0) {
  258. return num / 100.0;
  259. } else {
  260. return (double)num / (double)den;
  261. }
  262. }
  263. static bool yy_png_validate_animation_chunk_order(yy_png_chunk_info *chunks, /* input */
  264. uint32_t chunk_num, /* input */
  265. uint32_t *first_idat_index, /* output */
  266. bool *first_frame_is_cover /* output */) {
  267. /*
  268. PNG at least contains 3 chunks: IHDR, IDAT, IEND.
  269. `IHDR` must appear first.
  270. `IDAT` must appear consecutively.
  271. `IEND` must appear end.
  272. APNG must contains one `acTL` and at least one 'fcTL' and `fdAT`.
  273. `fdAT` must appear consecutively.
  274. `fcTL` must appear before `IDAT` or `fdAT`.
  275. */
  276. if (chunk_num <= 2) return false;
  277. if (chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R')) return false;
  278. if ((chunks + chunk_num - 1)->fourcc != YY_FOUR_CC('I', 'E', 'N', 'D')) return false;
  279. uint32_t prev_fourcc = 0;
  280. uint32_t IHDR_num = 0;
  281. uint32_t IDAT_num = 0;
  282. uint32_t acTL_num = 0;
  283. uint32_t fcTL_num = 0;
  284. uint32_t first_IDAT = 0;
  285. bool first_frame_cover = false;
  286. for (uint32_t i = 0; i < chunk_num; i++) {
  287. yy_png_chunk_info *chunk = chunks + i;
  288. switch (chunk->fourcc) {
  289. case YY_FOUR_CC('I', 'H', 'D', 'R'): { // png header
  290. if (i != 0) return false;
  291. if (IHDR_num > 0) return false;
  292. IHDR_num++;
  293. } break;
  294. case YY_FOUR_CC('I', 'D', 'A', 'T'): { // png data
  295. if (prev_fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
  296. if (IDAT_num == 0)
  297. first_IDAT = i;
  298. else
  299. return false;
  300. }
  301. IDAT_num++;
  302. } break;
  303. case YY_FOUR_CC('a', 'c', 'T', 'L'): { // apng control
  304. if (acTL_num > 0) return false;
  305. acTL_num++;
  306. } break;
  307. case YY_FOUR_CC('f', 'c', 'T', 'L'): { // apng frame control
  308. if (i + 1 == chunk_num) return false;
  309. if ((chunk + 1)->fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') &&
  310. (chunk + 1)->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
  311. return false;
  312. }
  313. if (fcTL_num == 0) {
  314. if ((chunk + 1)->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
  315. first_frame_cover = true;
  316. }
  317. }
  318. fcTL_num++;
  319. } break;
  320. case YY_FOUR_CC('f', 'd', 'A', 'T'): { // apng data
  321. if (prev_fourcc != YY_FOUR_CC('f', 'd', 'A', 'T') && prev_fourcc != YY_FOUR_CC('f', 'c', 'T', 'L')) {
  322. return false;
  323. }
  324. } break;
  325. }
  326. prev_fourcc = chunk->fourcc;
  327. }
  328. if (IHDR_num != 1) return false;
  329. if (IDAT_num == 0) return false;
  330. if (acTL_num != 1) return false;
  331. if (fcTL_num < acTL_num) return false;
  332. *first_idat_index = first_IDAT;
  333. *first_frame_is_cover = first_frame_cover;
  334. return true;
  335. }
  336. static void yy_png_info_release(yy_png_info *info) {
  337. if (info) {
  338. if (info->chunks) free(info->chunks);
  339. if (info->apng_frames) free(info->apng_frames);
  340. if (info->apng_shared_chunk_indexs) free(info->apng_shared_chunk_indexs);
  341. free(info);
  342. }
  343. }
  344. /**
  345. Create a png info from a png file. See struct png_info for more information.
  346. @param data png/apng file data.
  347. @param length the data's length in bytes.
  348. @return A png info object, you may call yy_png_info_release() to release it.
  349. Returns NULL if an error occurs.
  350. */
  351. static yy_png_info *yy_png_info_create(const uint8_t *data, uint32_t length) {
  352. if (length < 32) return NULL;
  353. if (*((uint32_t *)data) != YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47)) return NULL;
  354. if (*((uint32_t *)(data + 4)) != YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A)) return NULL;
  355. uint32_t chunk_realloc_num = 16;
  356. yy_png_chunk_info *chunks = malloc(sizeof(yy_png_chunk_info) * chunk_realloc_num);
  357. if (!chunks) return NULL;
  358. // parse png chunks
  359. uint32_t offset = 8;
  360. uint32_t chunk_num = 0;
  361. uint32_t chunk_capacity = chunk_realloc_num;
  362. uint32_t apng_loop_num = 0;
  363. int32_t apng_sequence_index = -1;
  364. int32_t apng_frame_index = 0;
  365. int32_t apng_frame_number = -1;
  366. bool apng_chunk_error = false;
  367. do {
  368. if (chunk_num >= chunk_capacity) {
  369. yy_png_chunk_info *new_chunks = realloc(chunks, sizeof(yy_png_chunk_info) * (chunk_capacity + chunk_realloc_num));
  370. if (!new_chunks) {
  371. free(chunks);
  372. return NULL;
  373. }
  374. chunks = new_chunks;
  375. chunk_capacity += chunk_realloc_num;
  376. }
  377. yy_png_chunk_info *chunk = chunks + chunk_num;
  378. const uint8_t *chunk_data = data + offset;
  379. chunk->offset = offset;
  380. chunk->length = yy_swap_endian_uint32(*((uint32_t *)chunk_data));
  381. if ((uint64_t)chunk->offset + (uint64_t)chunk->length + 12 > length) {
  382. free(chunks);
  383. return NULL;
  384. }
  385. chunk->fourcc = *((uint32_t *)(chunk_data + 4));
  386. if ((uint64_t)chunk->offset + 4 + chunk->length + 4 > (uint64_t)length) break;
  387. chunk->crc32 = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8 + chunk->length)));
  388. chunk_num++;
  389. offset += 12 + chunk->length;
  390. switch (chunk->fourcc) {
  391. case YY_FOUR_CC('a', 'c', 'T', 'L') : {
  392. if (chunk->length == 8) {
  393. apng_frame_number = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
  394. apng_loop_num = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 12)));
  395. } else {
  396. apng_chunk_error = true;
  397. }
  398. } break;
  399. case YY_FOUR_CC('f', 'c', 'T', 'L') :
  400. case YY_FOUR_CC('f', 'd', 'A', 'T') : {
  401. if (chunk->fourcc == YY_FOUR_CC('f', 'c', 'T', 'L')) {
  402. if (chunk->length != 26) {
  403. apng_chunk_error = true;
  404. } else {
  405. apng_frame_index++;
  406. }
  407. }
  408. if (chunk->length > 4) {
  409. uint32_t sequence = yy_swap_endian_uint32(*((uint32_t *)(chunk_data + 8)));
  410. if (apng_sequence_index + 1 == sequence) {
  411. apng_sequence_index++;
  412. } else {
  413. apng_chunk_error = true;
  414. }
  415. } else {
  416. apng_chunk_error = true;
  417. }
  418. } break;
  419. case YY_FOUR_CC('I', 'E', 'N', 'D') : {
  420. offset = length; // end, break do-while loop
  421. } break;
  422. }
  423. } while (offset + 12 <= length);
  424. if (chunk_num < 3 ||
  425. chunks->fourcc != YY_FOUR_CC('I', 'H', 'D', 'R') ||
  426. chunks->length != 13) {
  427. free(chunks);
  428. return NULL;
  429. }
  430. // png info
  431. yy_png_info *info = calloc(1, sizeof(yy_png_info));
  432. if (!info) {
  433. free(chunks);
  434. return NULL;
  435. }
  436. info->chunks = chunks;
  437. info->chunk_num = chunk_num;
  438. yy_png_chunk_IHDR_read(&info->header, data + chunks->offset + 8);
  439. // apng info
  440. if (!apng_chunk_error && apng_frame_number == apng_frame_index && apng_frame_number >= 1) {
  441. bool first_frame_is_cover = false;
  442. uint32_t first_IDAT_index = 0;
  443. if (!yy_png_validate_animation_chunk_order(info->chunks, info->chunk_num, &first_IDAT_index, &first_frame_is_cover)) {
  444. return info; // ignore apng chunk
  445. }
  446. info->apng_loop_num = apng_loop_num;
  447. info->apng_frame_num = apng_frame_number;
  448. info->apng_first_frame_is_cover = first_frame_is_cover;
  449. info->apng_shared_insert_index = first_IDAT_index;
  450. info->apng_frames = calloc(apng_frame_number, sizeof(yy_png_frame_info));
  451. if (!info->apng_frames) {
  452. yy_png_info_release(info);
  453. return NULL;
  454. }
  455. info->apng_shared_chunk_indexs = calloc(info->chunk_num, sizeof(uint32_t));
  456. if (!info->apng_shared_chunk_indexs) {
  457. yy_png_info_release(info);
  458. return NULL;
  459. }
  460. int32_t frame_index = -1;
  461. uint32_t *shared_chunk_index = info->apng_shared_chunk_indexs;
  462. for (int32_t i = 0; i < info->chunk_num; i++) {
  463. yy_png_chunk_info *chunk = info->chunks + i;
  464. switch (chunk->fourcc) {
  465. case YY_FOUR_CC('I', 'D', 'A', 'T'): {
  466. if (info->apng_shared_insert_index == 0) {
  467. info->apng_shared_insert_index = i;
  468. }
  469. if (first_frame_is_cover) {
  470. yy_png_frame_info *frame = info->apng_frames + frame_index;
  471. frame->chunk_num++;
  472. frame->chunk_size += chunk->length + 12;
  473. }
  474. } break;
  475. case YY_FOUR_CC('a', 'c', 'T', 'L'): {
  476. } break;
  477. case YY_FOUR_CC('f', 'c', 'T', 'L'): {
  478. frame_index++;
  479. yy_png_frame_info *frame = info->apng_frames + frame_index;
  480. frame->chunk_index = i + 1;
  481. yy_png_chunk_fcTL_read(&frame->frame_control, data + chunk->offset + 8);
  482. } break;
  483. case YY_FOUR_CC('f', 'd', 'A', 'T'): {
  484. yy_png_frame_info *frame = info->apng_frames + frame_index;
  485. frame->chunk_num++;
  486. frame->chunk_size += chunk->length + 12;
  487. } break;
  488. default: {
  489. *shared_chunk_index = i;
  490. shared_chunk_index++;
  491. info->apng_shared_chunk_size += chunk->length + 12;
  492. info->apng_shared_chunk_num++;
  493. } break;
  494. }
  495. }
  496. }
  497. return info;
  498. }
  499. /**
  500. Copy a png frame data from an apng file.
  501. @param data apng file data
  502. @param info png info
  503. @param index frame index (zero-based)
  504. @param size output, the size of the frame data
  505. @return A frame data (single-frame png file), call free() to release the data.
  506. Returns NULL if an error occurs.
  507. */
  508. static uint8_t *yy_png_copy_frame_data_at_index(const uint8_t *data,
  509. const yy_png_info *info,
  510. const uint32_t index,
  511. uint32_t *size) {
  512. if (index >= info->apng_frame_num) return NULL;
  513. yy_png_frame_info *frame_info = info->apng_frames + index;
  514. uint32_t frame_remux_size = 8 /* PNG Header */ + info->apng_shared_chunk_size + frame_info->chunk_size;
  515. if (!(info->apng_first_frame_is_cover && index == 0)) {
  516. frame_remux_size -= frame_info->chunk_num * 4; // remove fdAT sequence number
  517. }
  518. uint8_t *frame_data = malloc(frame_remux_size);
  519. if (!frame_data) return NULL;
  520. *size = frame_remux_size;
  521. uint32_t data_offset = 0;
  522. bool inserted = false;
  523. memcpy(frame_data, data, 8); // PNG File Header
  524. data_offset += 8;
  525. for (uint32_t i = 0; i < info->apng_shared_chunk_num; i++) {
  526. uint32_t shared_chunk_index = info->apng_shared_chunk_indexs[i];
  527. yy_png_chunk_info *shared_chunk_info = info->chunks + shared_chunk_index;
  528. if (shared_chunk_index >= info->apng_shared_insert_index && !inserted) { // replace IDAT with fdAT
  529. inserted = true;
  530. for (uint32_t c = 0; c < frame_info->chunk_num; c++) {
  531. yy_png_chunk_info *insert_chunk_info = info->chunks + frame_info->chunk_index + c;
  532. if (insert_chunk_info->fourcc == YY_FOUR_CC('f', 'd', 'A', 'T')) {
  533. *((uint32_t *)(frame_data + data_offset)) = yy_swap_endian_uint32(insert_chunk_info->length - 4);
  534. *((uint32_t *)(frame_data + data_offset + 4)) = YY_FOUR_CC('I', 'D', 'A', 'T');
  535. memcpy(frame_data + data_offset + 8, data + insert_chunk_info->offset + 12, insert_chunk_info->length - 4);
  536. uint32_t crc = (uint32_t)crc32(0, frame_data + data_offset + 4, insert_chunk_info->length);
  537. *((uint32_t *)(frame_data + data_offset + insert_chunk_info->length + 4)) = yy_swap_endian_uint32(crc);
  538. data_offset += insert_chunk_info->length + 8;
  539. } else { // IDAT
  540. memcpy(frame_data + data_offset, data + insert_chunk_info->offset, insert_chunk_info->length + 12);
  541. data_offset += insert_chunk_info->length + 12;
  542. }
  543. }
  544. }
  545. if (shared_chunk_info->fourcc == YY_FOUR_CC('I', 'H', 'D', 'R')) {
  546. uint8_t tmp[25] = {0};
  547. memcpy(tmp, data + shared_chunk_info->offset, 25);
  548. yy_png_chunk_IHDR IHDR = info->header;
  549. IHDR.width = frame_info->frame_control.width;
  550. IHDR.height = frame_info->frame_control.height;
  551. yy_png_chunk_IHDR_write(&IHDR, tmp + 8);
  552. *((uint32_t *)(tmp + 21)) = yy_swap_endian_uint32((uint32_t)crc32(0, tmp + 4, 17));
  553. memcpy(frame_data + data_offset, tmp, 25);
  554. data_offset += 25;
  555. } else {
  556. memcpy(frame_data + data_offset, data + shared_chunk_info->offset, shared_chunk_info->length + 12);
  557. data_offset += shared_chunk_info->length + 12;
  558. }
  559. }
  560. return frame_data;
  561. }
  562. ////////////////////////////////////////////////////////////////////////////////
  563. #pragma mark - Helper
  564. /// Returns byte-aligned size.
  565. static inline size_t YYImageByteAlign(size_t size, size_t alignment) {
  566. return ((size + (alignment - 1)) / alignment) * alignment;
  567. }
  568. /// Convert degree to radians
  569. static inline CGFloat YYImageDegreesToRadians(CGFloat degrees) {
  570. return degrees * M_PI / 180;
  571. }
  572. CGColorSpaceRef YYCGColorSpaceGetDeviceRGB() {
  573. static CGColorSpaceRef space;
  574. static dispatch_once_t onceToken;
  575. dispatch_once(&onceToken, ^{
  576. space = CGColorSpaceCreateDeviceRGB();
  577. });
  578. return space;
  579. }
  580. CGColorSpaceRef YYCGColorSpaceGetDeviceGray() {
  581. static CGColorSpaceRef space;
  582. static dispatch_once_t onceToken;
  583. dispatch_once(&onceToken, ^{
  584. space = CGColorSpaceCreateDeviceGray();
  585. });
  586. return space;
  587. }
  588. BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space) {
  589. return space && CFEqual(space, YYCGColorSpaceGetDeviceRGB());
  590. }
  591. BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space) {
  592. return space && CFEqual(space, YYCGColorSpaceGetDeviceGray());
  593. }
  594. /**
  595. A callback used in CGDataProviderCreateWithData() to release data.
  596. Example:
  597. void *data = malloc(size);
  598. CGDataProviderRef provider = CGDataProviderCreateWithData(data, data, size, YYCGDataProviderReleaseDataCallback);
  599. */
  600. static void YYCGDataProviderReleaseDataCallback(void *info, const void *data, size_t size) {
  601. if (info) free(info);
  602. }
  603. /**
  604. Decode an image to bitmap buffer with the specified format.
  605. @param srcImage Source image.
  606. @param dest Destination buffer. It should be zero before call this method.
  607. If decode succeed, you should release the dest->data using free().
  608. @param destFormat Destination bitmap format.
  609. @return Whether succeed.
  610. @warning This method support iOS7.0 and later. If call it on iOS6, it just returns NO.
  611. CG_AVAILABLE_STARTING(__MAC_10_9, __IPHONE_7_0)
  612. */
  613. static BOOL YYCGImageDecodeToBitmapBufferWithAnyFormat(CGImageRef srcImage, vImage_Buffer *dest, vImage_CGImageFormat *destFormat) {
  614. if (!srcImage || (((long)vImageConvert_AnyToAny) + 1 == 1) || !destFormat || !dest) return NO;
  615. size_t width = CGImageGetWidth(srcImage);
  616. size_t height = CGImageGetHeight(srcImage);
  617. if (width == 0 || height == 0) return NO;
  618. dest->data = NULL;
  619. vImage_Error error = kvImageNoError;
  620. CFDataRef srcData = NULL;
  621. vImageConverterRef convertor = NULL;
  622. vImage_CGImageFormat srcFormat = {0};
  623. srcFormat.bitsPerComponent = (uint32_t)CGImageGetBitsPerComponent(srcImage);
  624. srcFormat.bitsPerPixel = (uint32_t)CGImageGetBitsPerPixel(srcImage);
  625. srcFormat.colorSpace = CGImageGetColorSpace(srcImage);
  626. srcFormat.bitmapInfo = CGImageGetBitmapInfo(srcImage) | CGImageGetAlphaInfo(srcImage);
  627. convertor = vImageConverter_CreateWithCGImageFormat(&srcFormat, destFormat, NULL, kvImageNoFlags, NULL);
  628. if (!convertor) goto fail;
  629. CGDataProviderRef srcProvider = CGImageGetDataProvider(srcImage);
  630. srcData = srcProvider ? CGDataProviderCopyData(srcProvider) : NULL; // decode
  631. size_t srcLength = srcData ? CFDataGetLength(srcData) : 0;
  632. const void *srcBytes = srcData ? CFDataGetBytePtr(srcData) : NULL;
  633. if (srcLength == 0 || !srcBytes) goto fail;
  634. vImage_Buffer src = {0};
  635. src.data = (void *)srcBytes;
  636. src.width = width;
  637. src.height = height;
  638. src.rowBytes = CGImageGetBytesPerRow(srcImage);
  639. error = vImageBuffer_Init(dest, height, width, 32, kvImageNoFlags);
  640. if (error != kvImageNoError) goto fail;
  641. error = vImageConvert_AnyToAny(convertor, &src, dest, NULL, kvImageNoFlags); // convert
  642. if (error != kvImageNoError) goto fail;
  643. CFRelease(convertor);
  644. CFRelease(srcData);
  645. return YES;
  646. fail:
  647. if (convertor) CFRelease(convertor);
  648. if (srcData) CFRelease(srcData);
  649. if (dest->data) free(dest->data);
  650. dest->data = NULL;
  651. return NO;
  652. }
  653. /**
  654. Decode an image to bitmap buffer with the 32bit format (such as ARGB8888).
  655. @param srcImage Source image.
  656. @param dest Destination buffer. It should be zero before call this method.
  657. If decode succeed, you should release the dest->data using free().
  658. @param bitmapInfo Destination bitmap format.
  659. @return Whether succeed.
  660. */
  661. static BOOL YYCGImageDecodeToBitmapBufferWith32BitFormat(CGImageRef srcImage, vImage_Buffer *dest, CGBitmapInfo bitmapInfo) {
  662. if (!srcImage || !dest) return NO;
  663. size_t width = CGImageGetWidth(srcImage);
  664. size_t height = CGImageGetHeight(srcImage);
  665. if (width == 0 || height == 0) return NO;
  666. BOOL hasAlpha = NO;
  667. BOOL alphaFirst = NO;
  668. BOOL alphaPremultiplied = NO;
  669. BOOL byteOrderNormal = NO;
  670. switch (bitmapInfo & kCGBitmapAlphaInfoMask) {
  671. case kCGImageAlphaPremultipliedLast: {
  672. hasAlpha = YES;
  673. alphaPremultiplied = YES;
  674. } break;
  675. case kCGImageAlphaPremultipliedFirst: {
  676. hasAlpha = YES;
  677. alphaPremultiplied = YES;
  678. alphaFirst = YES;
  679. } break;
  680. case kCGImageAlphaLast: {
  681. hasAlpha = YES;
  682. } break;
  683. case kCGImageAlphaFirst: {
  684. hasAlpha = YES;
  685. alphaFirst = YES;
  686. } break;
  687. case kCGImageAlphaNoneSkipLast: {
  688. } break;
  689. case kCGImageAlphaNoneSkipFirst: {
  690. alphaFirst = YES;
  691. } break;
  692. default: {
  693. return NO;
  694. } break;
  695. }
  696. switch (bitmapInfo & kCGBitmapByteOrderMask) {
  697. case kCGBitmapByteOrderDefault: {
  698. byteOrderNormal = YES;
  699. } break;
  700. case kCGBitmapByteOrder32Little: {
  701. } break;
  702. case kCGBitmapByteOrder32Big: {
  703. byteOrderNormal = YES;
  704. } break;
  705. default: {
  706. return NO;
  707. } break;
  708. }
  709. /*
  710. Try convert with vImageConvert_AnyToAny() (avaliable since iOS 7.0).
  711. If fail, try decode with CGContextDrawImage().
  712. CGBitmapContext use a premultiplied alpha format, unpremultiply may lose precision.
  713. */
  714. vImage_CGImageFormat destFormat = {0};
  715. destFormat.bitsPerComponent = 8;
  716. destFormat.bitsPerPixel = 32;
  717. destFormat.colorSpace = YYCGColorSpaceGetDeviceRGB();
  718. destFormat.bitmapInfo = bitmapInfo;
  719. dest->data = NULL;
  720. if (YYCGImageDecodeToBitmapBufferWithAnyFormat(srcImage, dest, &destFormat)) return YES;
  721. CGBitmapInfo contextBitmapInfo = bitmapInfo & kCGBitmapByteOrderMask;
  722. if (!hasAlpha || alphaPremultiplied) {
  723. contextBitmapInfo |= (bitmapInfo & kCGBitmapAlphaInfoMask);
  724. } else {
  725. contextBitmapInfo |= alphaFirst ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaPremultipliedLast;
  726. }
  727. CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), contextBitmapInfo);
  728. if (!context) goto fail;
  729. CGContextDrawImage(context, CGRectMake(0, 0, width, height), srcImage); // decode and convert
  730. size_t bytesPerRow = CGBitmapContextGetBytesPerRow(context);
  731. size_t length = height * bytesPerRow;
  732. void *data = CGBitmapContextGetData(context);
  733. if (length == 0 || !data) goto fail;
  734. dest->data = malloc(length);
  735. dest->width = width;
  736. dest->height = height;
  737. dest->rowBytes = bytesPerRow;
  738. if (!dest->data) goto fail;
  739. if (hasAlpha && !alphaPremultiplied) {
  740. vImage_Buffer tmpSrc = {0};
  741. tmpSrc.data = data;
  742. tmpSrc.width = width;
  743. tmpSrc.height = height;
  744. tmpSrc.rowBytes = bytesPerRow;
  745. vImage_Error error;
  746. if (alphaFirst && byteOrderNormal) {
  747. error = vImageUnpremultiplyData_ARGB8888(&tmpSrc, dest, kvImageNoFlags);
  748. } else {
  749. error = vImageUnpremultiplyData_RGBA8888(&tmpSrc, dest, kvImageNoFlags);
  750. }
  751. if (error != kvImageNoError) goto fail;
  752. } else {
  753. memcpy(dest->data, data, length);
  754. }
  755. CFRelease(context);
  756. return YES;
  757. fail:
  758. if (context) CFRelease(context);
  759. if (dest->data) free(dest->data);
  760. dest->data = NULL;
  761. return NO;
  762. return NO;
  763. }
  764. CGImageRef YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay) {
  765. if (!imageRef) return NULL;
  766. size_t width = CGImageGetWidth(imageRef);
  767. size_t height = CGImageGetHeight(imageRef);
  768. if (width == 0 || height == 0) return NULL;
  769. if (decodeForDisplay) { //decode with redraw (may lose some precision)
  770. CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
  771. BOOL hasAlpha = NO;
  772. if (alphaInfo == kCGImageAlphaPremultipliedLast ||
  773. alphaInfo == kCGImageAlphaPremultipliedFirst ||
  774. alphaInfo == kCGImageAlphaLast ||
  775. alphaInfo == kCGImageAlphaFirst) {
  776. hasAlpha = YES;
  777. }
  778. // BGRA8888 (premultiplied) or BGRX8888
  779. // same as UIGraphicsBeginImageContext() and -[UIView drawRect:]
  780. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
  781. bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
  782. CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, YYCGColorSpaceGetDeviceRGB(), bitmapInfo);
  783. if (!context) return NULL;
  784. CGContextDrawImage(context, CGRectMake(0, 0, width, height), imageRef); // decode
  785. CGImageRef newImage = CGBitmapContextCreateImage(context);
  786. CFRelease(context);
  787. return newImage;
  788. } else {
  789. CGColorSpaceRef space = CGImageGetColorSpace(imageRef);
  790. size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
  791. size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef);
  792. size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
  793. CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
  794. if (bytesPerRow == 0 || width == 0 || height == 0) return NULL;
  795. CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
  796. if (!dataProvider) return NULL;
  797. CFDataRef data = CGDataProviderCopyData(dataProvider); // decode
  798. if (!data) return NULL;
  799. CGDataProviderRef newProvider = CGDataProviderCreateWithCFData(data);
  800. CFRelease(data);
  801. if (!newProvider) return NULL;
  802. CGImageRef newImage = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, space, bitmapInfo, newProvider, NULL, false, kCGRenderingIntentDefault);
  803. CFRelease(newProvider);
  804. return newImage;
  805. }
  806. }
  807. CGImageRef YYCGImageCreateAffineTransformCopy(CGImageRef imageRef, CGAffineTransform transform, CGSize destSize, CGBitmapInfo destBitmapInfo) {
  808. if (!imageRef) return NULL;
  809. size_t srcWidth = CGImageGetWidth(imageRef);
  810. size_t srcHeight = CGImageGetHeight(imageRef);
  811. size_t destWidth = round(destSize.width);
  812. size_t destHeight = round(destSize.height);
  813. if (srcWidth == 0 || srcHeight == 0 || destWidth == 0 || destHeight == 0) return NULL;
  814. CGDataProviderRef tmpProvider = NULL, destProvider = NULL;
  815. CGImageRef tmpImage = NULL, destImage = NULL;
  816. vImage_Buffer src = {0}, tmp = {0}, dest = {0};
  817. if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &src, kCGImageAlphaFirst | kCGBitmapByteOrderDefault)) return NULL;
  818. size_t destBytesPerRow = YYImageByteAlign(destWidth * 4, 32);
  819. tmp.data = malloc(destHeight * destBytesPerRow);
  820. if (!tmp.data) goto fail;
  821. tmp.width = destWidth;
  822. tmp.height = destHeight;
  823. tmp.rowBytes = destBytesPerRow;
  824. vImage_CGAffineTransform vTransform = *((vImage_CGAffineTransform *)&transform);
  825. uint8_t backColor[4] = {0};
  826. vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &tmp, NULL, &vTransform, backColor, kvImageBackgroundColorFill);
  827. if (error != kvImageNoError) goto fail;
  828. free(src.data);
  829. src.data = NULL;
  830. tmpProvider = CGDataProviderCreateWithData(tmp.data, tmp.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback);
  831. if (!tmpProvider) goto fail;
  832. tmp.data = NULL; // hold by provider
  833. tmpImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), kCGImageAlphaFirst | kCGBitmapByteOrderDefault, tmpProvider, NULL, false, kCGRenderingIntentDefault);
  834. if (!tmpImage) goto fail;
  835. CFRelease(tmpProvider);
  836. tmpProvider = NULL;
  837. if ((destBitmapInfo & kCGBitmapAlphaInfoMask) == kCGImageAlphaFirst &&
  838. (destBitmapInfo & kCGBitmapByteOrderMask) != kCGBitmapByteOrder32Little) {
  839. return tmpImage;
  840. }
  841. if (!YYCGImageDecodeToBitmapBufferWith32BitFormat(tmpImage, &dest, destBitmapInfo)) goto fail;
  842. CFRelease(tmpImage);
  843. tmpImage = NULL;
  844. destProvider = CGDataProviderCreateWithData(dest.data, dest.data, destHeight * destBytesPerRow, YYCGDataProviderReleaseDataCallback);
  845. if (!destProvider) goto fail;
  846. dest.data = NULL; // hold by provider
  847. destImage = CGImageCreate(destWidth, destHeight, 8, 32, destBytesPerRow, YYCGColorSpaceGetDeviceRGB(), destBitmapInfo, destProvider, NULL, false, kCGRenderingIntentDefault);
  848. if (!destImage) goto fail;
  849. CFRelease(destProvider);
  850. destProvider = NULL;
  851. return destImage;
  852. fail:
  853. if (src.data) free(src.data);
  854. if (tmp.data) free(tmp.data);
  855. if (dest.data) free(dest.data);
  856. if (tmpProvider) CFRelease(tmpProvider);
  857. if (tmpImage) CFRelease(tmpImage);
  858. if (destProvider) CFRelease(destProvider);
  859. return NULL;
  860. }
  861. UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value) {
  862. switch (value) {
  863. case kCGImagePropertyOrientationUp: return UIImageOrientationUp;
  864. case kCGImagePropertyOrientationDown: return UIImageOrientationDown;
  865. case kCGImagePropertyOrientationLeft: return UIImageOrientationLeft;
  866. case kCGImagePropertyOrientationRight: return UIImageOrientationRight;
  867. case kCGImagePropertyOrientationUpMirrored: return UIImageOrientationUpMirrored;
  868. case kCGImagePropertyOrientationDownMirrored: return UIImageOrientationDownMirrored;
  869. case kCGImagePropertyOrientationLeftMirrored: return UIImageOrientationLeftMirrored;
  870. case kCGImagePropertyOrientationRightMirrored: return UIImageOrientationRightMirrored;
  871. default: return UIImageOrientationUp;
  872. }
  873. }
  874. NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation) {
  875. switch (orientation) {
  876. case UIImageOrientationUp: return kCGImagePropertyOrientationUp;
  877. case UIImageOrientationDown: return kCGImagePropertyOrientationDown;
  878. case UIImageOrientationLeft: return kCGImagePropertyOrientationLeft;
  879. case UIImageOrientationRight: return kCGImagePropertyOrientationRight;
  880. case UIImageOrientationUpMirrored: return kCGImagePropertyOrientationUpMirrored;
  881. case UIImageOrientationDownMirrored: return kCGImagePropertyOrientationDownMirrored;
  882. case UIImageOrientationLeftMirrored: return kCGImagePropertyOrientationLeftMirrored;
  883. case UIImageOrientationRightMirrored: return kCGImagePropertyOrientationRightMirrored;
  884. default: return kCGImagePropertyOrientationUp;
  885. }
  886. }
  887. CGImageRef YYCGImageCreateCopyWithOrientation(CGImageRef imageRef, UIImageOrientation orientation, CGBitmapInfo destBitmapInfo) {
  888. if (!imageRef) return NULL;
  889. if (orientation == UIImageOrientationUp) return (CGImageRef)CFRetain(imageRef);
  890. size_t width = CGImageGetWidth(imageRef);
  891. size_t height = CGImageGetHeight(imageRef);
  892. CGAffineTransform transform = CGAffineTransformIdentity;
  893. BOOL swapWidthAndHeight = NO;
  894. switch (orientation) {
  895. case UIImageOrientationDown: {
  896. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(180));
  897. transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height);
  898. } break;
  899. case UIImageOrientationLeft: {
  900. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90));
  901. transform = CGAffineTransformTranslate(transform, -(CGFloat)0, -(CGFloat)height);
  902. swapWidthAndHeight = YES;
  903. } break;
  904. case UIImageOrientationRight: {
  905. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90));
  906. transform = CGAffineTransformTranslate(transform, -(CGFloat)width, (CGFloat)0);
  907. swapWidthAndHeight = YES;
  908. } break;
  909. case UIImageOrientationUpMirrored: {
  910. transform = CGAffineTransformTranslate(transform, (CGFloat)width, 0);
  911. transform = CGAffineTransformScale(transform, -1, 1);
  912. } break;
  913. case UIImageOrientationDownMirrored: {
  914. transform = CGAffineTransformTranslate(transform, 0, (CGFloat)height);
  915. transform = CGAffineTransformScale(transform, 1, -1);
  916. } break;
  917. case UIImageOrientationLeftMirrored: {
  918. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(-90));
  919. transform = CGAffineTransformScale(transform, 1, -1);
  920. transform = CGAffineTransformTranslate(transform, -(CGFloat)width, -(CGFloat)height);
  921. swapWidthAndHeight = YES;
  922. } break;
  923. case UIImageOrientationRightMirrored: {
  924. transform = CGAffineTransformMakeRotation(YYImageDegreesToRadians(90));
  925. transform = CGAffineTransformScale(transform, 1, -1);
  926. swapWidthAndHeight = YES;
  927. } break;
  928. default: break;
  929. }
  930. if (CGAffineTransformIsIdentity(transform)) return (CGImageRef)CFRetain(imageRef);
  931. CGSize destSize = {width, height};
  932. if (swapWidthAndHeight) {
  933. destSize.width = height;
  934. destSize.height = width;
  935. }
  936. return YYCGImageCreateAffineTransformCopy(imageRef, transform, destSize, destBitmapInfo);
  937. }
  938. YYImageType YYImageDetectType(CFDataRef data) {
  939. if (!data) return YYImageTypeUnknown;
  940. uint64_t length = CFDataGetLength(data);
  941. if (length < 16) return YYImageTypeUnknown;
  942. const char *bytes = (char *)CFDataGetBytePtr(data);
  943. uint32_t magic4 = *((uint32_t *)bytes);
  944. switch (magic4) {
  945. case YY_FOUR_CC(0x4D, 0x4D, 0x00, 0x2A): { // big endian TIFF
  946. return YYImageTypeTIFF;
  947. } break;
  948. case YY_FOUR_CC(0x49, 0x49, 0x2A, 0x00): { // little endian TIFF
  949. return YYImageTypeTIFF;
  950. } break;
  951. case YY_FOUR_CC(0x00, 0x00, 0x01, 0x00): { // ICO
  952. return YYImageTypeICO;
  953. } break;
  954. case YY_FOUR_CC(0x00, 0x00, 0x02, 0x00): { // CUR
  955. return YYImageTypeICO;
  956. } break;
  957. case YY_FOUR_CC('i', 'c', 'n', 's'): { // ICNS
  958. return YYImageTypeICNS;
  959. } break;
  960. case YY_FOUR_CC('G', 'I', 'F', '8'): { // GIF
  961. return YYImageTypeGIF;
  962. } break;
  963. case YY_FOUR_CC(0x89, 'P', 'N', 'G'): { // PNG
  964. uint32_t tmp = *((uint32_t *)(bytes + 4));
  965. if (tmp == YY_FOUR_CC('\r', '\n', 0x1A, '\n')) {
  966. return YYImageTypePNG;
  967. }
  968. } break;
  969. case YY_FOUR_CC('R', 'I', 'F', 'F'): { // WebP
  970. uint32_t tmp = *((uint32_t *)(bytes + 8));
  971. if (tmp == YY_FOUR_CC('W', 'E', 'B', 'P')) {
  972. return YYImageTypeWebP;
  973. }
  974. } break;
  975. /*
  976. case YY_FOUR_CC('B', 'P', 'G', 0xFB): { // BPG
  977. return YYImageTypeBPG;
  978. } break;
  979. */
  980. }
  981. uint16_t magic2 = *((uint16_t *)bytes);
  982. switch (magic2) {
  983. case YY_TWO_CC('B', 'A'):
  984. case YY_TWO_CC('B', 'M'):
  985. case YY_TWO_CC('I', 'C'):
  986. case YY_TWO_CC('P', 'I'):
  987. case YY_TWO_CC('C', 'I'):
  988. case YY_TWO_CC('C', 'P'): { // BMP
  989. return YYImageTypeBMP;
  990. }
  991. case YY_TWO_CC(0xFF, 0x4F): { // JPEG2000
  992. return YYImageTypeJPEG2000;
  993. }
  994. }
  995. // JPG FF D8 FF
  996. if (memcmp(bytes,"\377\330\377",3) == 0) return YYImageTypeJPEG;
  997. // JP2
  998. if (memcmp(bytes + 4, "\152\120\040\040\015", 5) == 0) return YYImageTypeJPEG2000;
  999. return YYImageTypeUnknown;
  1000. }
  1001. CFStringRef YYImageTypeToUTType(YYImageType type) {
  1002. switch (type) {
  1003. case YYImageTypeJPEG: return kUTTypeJPEG;
  1004. case YYImageTypeJPEG2000: return kUTTypeJPEG2000;
  1005. case YYImageTypeTIFF: return kUTTypeTIFF;
  1006. case YYImageTypeBMP: return kUTTypeBMP;
  1007. case YYImageTypeICO: return kUTTypeICO;
  1008. case YYImageTypeICNS: return kUTTypeAppleICNS;
  1009. case YYImageTypeGIF: return kUTTypeGIF;
  1010. case YYImageTypePNG: return kUTTypePNG;
  1011. default: return NULL;
  1012. }
  1013. }
  1014. YYImageType YYImageTypeFromUTType(CFStringRef uti) {
  1015. static NSDictionary *dic;
  1016. static dispatch_once_t onceToken;
  1017. dispatch_once(&onceToken, ^{
  1018. dic = @{(id)kUTTypeJPEG : @(YYImageTypeJPEG),
  1019. (id)kUTTypeJPEG2000 : @(YYImageTypeJPEG2000),
  1020. (id)kUTTypeTIFF : @(YYImageTypeTIFF),
  1021. (id)kUTTypeBMP : @(YYImageTypeBMP),
  1022. (id)kUTTypeICO : @(YYImageTypeICO),
  1023. (id)kUTTypeAppleICNS : @(YYImageTypeICNS),
  1024. (id)kUTTypeGIF : @(YYImageTypeGIF),
  1025. (id)kUTTypePNG : @(YYImageTypePNG)};
  1026. });
  1027. if (!uti) return YYImageTypeUnknown;
  1028. NSNumber *num = dic[(__bridge __strong id)(uti)];
  1029. return num.unsignedIntegerValue;
  1030. }
  1031. NSString *YYImageTypeGetExtension(YYImageType type) {
  1032. switch (type) {
  1033. case YYImageTypeJPEG: return @"jpg";
  1034. case YYImageTypeJPEG2000: return @"jp2";
  1035. case YYImageTypeTIFF: return @"tiff";
  1036. case YYImageTypeBMP: return @"bmp";
  1037. case YYImageTypeICO: return @"ico";
  1038. case YYImageTypeICNS: return @"icns";
  1039. case YYImageTypeGIF: return @"gif";
  1040. case YYImageTypePNG: return @"png";
  1041. case YYImageTypeWebP: return @"webp";
  1042. default: return nil;
  1043. }
  1044. }
  1045. CFDataRef YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality) {
  1046. if (!imageRef) return nil;
  1047. quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
  1048. if (type == YYImageTypeWebP) {
  1049. #if YYIMAGE_WEBP_ENABLED
  1050. if (quality == 1) {
  1051. return YYCGImageCreateEncodedWebPData(imageRef, YES, quality, 4, YYImagePresetDefault);
  1052. } else {
  1053. return YYCGImageCreateEncodedWebPData(imageRef, NO, quality, 4, YYImagePresetDefault);
  1054. }
  1055. #else
  1056. return NULL;
  1057. #endif
  1058. }
  1059. CFStringRef uti = YYImageTypeToUTType(type);
  1060. if (!uti) return nil;
  1061. CFMutableDataRef data = CFDataCreateMutable(CFAllocatorGetDefault(), 0);
  1062. if (!data) return NULL;
  1063. CGImageDestinationRef dest = CGImageDestinationCreateWithData(data, uti, 1, NULL);
  1064. if (!dest) {
  1065. CFRelease(data);
  1066. return NULL;
  1067. }
  1068. NSDictionary *options = @{(id)kCGImageDestinationLossyCompressionQuality : @(quality) };
  1069. CGImageDestinationAddImage(dest, imageRef, (CFDictionaryRef)options);
  1070. if (!CGImageDestinationFinalize(dest)) {
  1071. CFRelease(data);
  1072. CFRelease(dest);
  1073. return nil;
  1074. }
  1075. CFRelease(dest);
  1076. if (CFDataGetLength(data) == 0) {
  1077. CFRelease(data);
  1078. return NULL;
  1079. }
  1080. return data;
  1081. }
  1082. #if YYIMAGE_WEBP_ENABLED
  1083. BOOL YYImageWebPAvailable() {
  1084. return YES;
  1085. }
  1086. CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) {
  1087. if (!imageRef) return nil;
  1088. size_t width = CGImageGetWidth(imageRef);
  1089. size_t height = CGImageGetHeight(imageRef);
  1090. if (width == 0 || width > WEBP_MAX_DIMENSION) return nil;
  1091. if (height == 0 || height > WEBP_MAX_DIMENSION) return nil;
  1092. vImage_Buffer buffer = {0};
  1093. if(!YYCGImageDecodeToBitmapBufferWith32BitFormat(imageRef, &buffer, kCGImageAlphaLast | kCGBitmapByteOrderDefault)) return nil;
  1094. WebPConfig config = {0};
  1095. WebPPicture picture = {0};
  1096. WebPMemoryWriter writer = {0};
  1097. CFDataRef webpData = NULL;
  1098. BOOL pictureNeedFree = NO;
  1099. quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
  1100. preset = preset > YYImagePresetText ? YYImagePresetDefault : preset;
  1101. compressLevel = compressLevel < 0 ? 0 : compressLevel > 6 ? 6 : compressLevel;
  1102. if (!WebPConfigPreset(&config, (WebPPreset)preset, quality)) goto fail;
  1103. config.quality = round(quality * 100.0);
  1104. config.lossless = lossless;
  1105. config.method = compressLevel;
  1106. switch ((WebPPreset)preset) {
  1107. case WEBP_PRESET_DEFAULT: {
  1108. config.image_hint = WEBP_HINT_DEFAULT;
  1109. } break;
  1110. case WEBP_PRESET_PICTURE: {
  1111. config.image_hint = WEBP_HINT_PICTURE;
  1112. } break;
  1113. case WEBP_PRESET_PHOTO: {
  1114. config.image_hint = WEBP_HINT_PHOTO;
  1115. } break;
  1116. case WEBP_PRESET_DRAWING:
  1117. case WEBP_PRESET_ICON:
  1118. case WEBP_PRESET_TEXT: {
  1119. config.image_hint = WEBP_HINT_GRAPH;
  1120. } break;
  1121. }
  1122. if (!WebPValidateConfig(&config)) goto fail;
  1123. if (!WebPPictureInit(&picture)) goto fail;
  1124. pictureNeedFree = YES;
  1125. picture.width = (int)buffer.width;
  1126. picture.height = (int)buffer.height;
  1127. picture.use_argb = lossless;
  1128. if(!WebPPictureImportRGBA(&picture, buffer.data, (int)buffer.rowBytes)) goto fail;
  1129. WebPMemoryWriterInit(&writer);
  1130. picture.writer = WebPMemoryWrite;
  1131. picture.custom_ptr = &writer;
  1132. if(!WebPEncode(&config, &picture)) goto fail;
  1133. webpData = CFDataCreate(CFAllocatorGetDefault(), writer.mem, writer.size);
  1134. free(writer.mem);
  1135. WebPPictureFree(&picture);
  1136. free(buffer.data);
  1137. return webpData;
  1138. fail:
  1139. if (buffer.data) free(buffer.data);
  1140. if (pictureNeedFree) WebPPictureFree(&picture);
  1141. return nil;
  1142. }
  1143. NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) {
  1144. if (!webpData || CFDataGetLength(webpData) == 0) return 0;
  1145. WebPData data = {CFDataGetBytePtr(webpData), CFDataGetLength(webpData)};
  1146. WebPDemuxer *demuxer = WebPDemux(&data);
  1147. if (!demuxer) return 0;
  1148. NSUInteger webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
  1149. WebPDemuxDelete(demuxer);
  1150. return webpFrameCount;
  1151. }
  1152. CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData,
  1153. BOOL decodeForDisplay,
  1154. BOOL useThreads,
  1155. BOOL bypassFiltering,
  1156. BOOL noFancyUpsampling) {
  1157. /*
  1158. Call WebPDecode() on a multi-frame webp data will get an error (VP8_STATUS_UNSUPPORTED_FEATURE).
  1159. Use WebPDemuxer to unpack it first.
  1160. */
  1161. WebPData data = {0};
  1162. WebPDemuxer *demuxer = NULL;
  1163. int frameCount = 0, canvasWidth = 0, canvasHeight = 0;
  1164. WebPIterator iter = {0};
  1165. BOOL iterInited = NO;
  1166. const uint8_t *payload = NULL;
  1167. size_t payloadSize = 0;
  1168. WebPDecoderConfig config = {0};
  1169. BOOL hasAlpha = NO;
  1170. size_t bitsPerComponent = 0, bitsPerPixel = 0, bytesPerRow = 0, destLength = 0;
  1171. CGBitmapInfo bitmapInfo = 0;
  1172. WEBP_CSP_MODE colorspace = 0;
  1173. void *destBytes = NULL;
  1174. CGDataProviderRef provider = NULL;
  1175. CGImageRef imageRef = NULL;
  1176. if (!webpData || CFDataGetLength(webpData) == 0) return NULL;
  1177. data.bytes = CFDataGetBytePtr(webpData);
  1178. data.size = CFDataGetLength(webpData);
  1179. demuxer = WebPDemux(&data);
  1180. if (!demuxer) goto fail;
  1181. frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
  1182. if (frameCount == 0) {
  1183. goto fail;
  1184. } else if (frameCount == 1) { // single-frame
  1185. payload = data.bytes;
  1186. payloadSize = data.size;
  1187. if (!WebPInitDecoderConfig(&config)) goto fail;
  1188. if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail;
  1189. canvasWidth = config.input.width;
  1190. canvasHeight = config.input.height;
  1191. } else { // multi-frame
  1192. canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  1193. canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  1194. if (canvasWidth < 1 || canvasHeight < 1) goto fail;
  1195. if (!WebPDemuxGetFrame(demuxer, 1, &iter)) goto fail;
  1196. iterInited = YES;
  1197. if (iter.width > canvasWidth || iter.height > canvasHeight) goto fail;
  1198. payload = iter.fragment.bytes;
  1199. payloadSize = iter.fragment.size;
  1200. if (!WebPInitDecoderConfig(&config)) goto fail;
  1201. if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) goto fail;
  1202. }
  1203. if (payload == NULL || payloadSize == 0) goto fail;
  1204. hasAlpha = config.input.has_alpha;
  1205. bitsPerComponent = 8;
  1206. bitsPerPixel = 32;
  1207. bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * canvasWidth, 32);
  1208. destLength = bytesPerRow * canvasHeight;
  1209. if (decodeForDisplay) {
  1210. bitmapInfo = kCGBitmapByteOrder32Host;
  1211. bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
  1212. colorspace = MODE_bgrA; // small endian
  1213. } else {
  1214. bitmapInfo = kCGBitmapByteOrderDefault;
  1215. bitmapInfo |= hasAlpha ? kCGImageAlphaLast : kCGImageAlphaNoneSkipLast;
  1216. colorspace = MODE_RGBA;
  1217. }
  1218. destBytes = calloc(1, destLength);
  1219. if (!destBytes) goto fail;
  1220. config.options.use_threads = useThreads; //speed up 23%
  1221. config.options.bypass_filtering = bypassFiltering; //speed up 11%, cause some banding
  1222. config.options.no_fancy_upsampling = noFancyUpsampling; //speed down 16%, lose some details
  1223. config.output.colorspace = colorspace;
  1224. config.output.is_external_memory = 1;
  1225. config.output.u.RGBA.rgba = destBytes;
  1226. config.output.u.RGBA.stride = (int)bytesPerRow;
  1227. config.output.u.RGBA.size = destLength;
  1228. VP8StatusCode result = WebPDecode(payload, payloadSize, &config);
  1229. if ((result != VP8_STATUS_OK) && (result != VP8_STATUS_NOT_ENOUGH_DATA)) goto fail;
  1230. if (iter.x_offset != 0 || iter.y_offset != 0) {
  1231. void *tmp = calloc(1, destLength);
  1232. if (tmp) {
  1233. vImage_Buffer src = {destBytes, canvasHeight, canvasWidth, bytesPerRow};
  1234. vImage_Buffer dest = {tmp, canvasHeight, canvasWidth, bytesPerRow};
  1235. vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset};
  1236. uint8_t backColor[4] = {0};
  1237. vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill);
  1238. memcpy(destBytes, tmp, destLength);
  1239. free(tmp);
  1240. }
  1241. }
  1242. provider = CGDataProviderCreateWithData(destBytes, destBytes, destLength, YYCGDataProviderReleaseDataCallback);
  1243. if (!provider) goto fail;
  1244. destBytes = NULL; // hold by provider
  1245. imageRef = CGImageCreate(canvasWidth, canvasHeight, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault);
  1246. CFRelease(provider);
  1247. if (iterInited) WebPDemuxReleaseIterator(&iter);
  1248. WebPDemuxDelete(demuxer);
  1249. return imageRef;
  1250. fail:
  1251. if (destBytes) free(destBytes);
  1252. if (provider) CFRelease(provider);
  1253. if (iterInited) WebPDemuxReleaseIterator(&iter);
  1254. if (demuxer) WebPDemuxDelete(demuxer);
  1255. return NULL;
  1256. }
  1257. #else
  1258. BOOL YYImageWebPAvailable() {
  1259. return NO;
  1260. }
  1261. CFDataRef YYCGImageCreateEncodedWebPData(CGImageRef imageRef, BOOL lossless, CGFloat quality, int compressLevel, YYImagePreset preset) {
  1262. NSLog(@"WebP decoder is disabled");
  1263. return NULL;
  1264. }
  1265. NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData) {
  1266. NSLog(@"WebP decoder is disabled");
  1267. return 0;
  1268. }
  1269. CGImageRef YYCGImageCreateWithWebPData(CFDataRef webpData,
  1270. BOOL decodeForDisplay,
  1271. BOOL useThreads,
  1272. BOOL bypassFiltering,
  1273. BOOL noFancyUpsampling) {
  1274. NSLog(@"WebP decoder is disabled");
  1275. return NULL;
  1276. }
  1277. #endif
  1278. ////////////////////////////////////////////////////////////////////////////////
  1279. #pragma mark - Decoder
  1280. @implementation YYImageFrame
  1281. + (instancetype)frameWithImage:(UIImage *)image {
  1282. YYImageFrame *frame = [self new];
  1283. frame.image = image;
  1284. return frame;
  1285. }
  1286. - (id)copyWithZone:(NSZone *)zone {
  1287. YYImageFrame *frame = [self.class new];
  1288. frame.index = _index;
  1289. frame.width = _width;
  1290. frame.height = _height;
  1291. frame.offsetX = _offsetX;
  1292. frame.offsetY = _offsetY;
  1293. frame.duration = _duration;
  1294. frame.dispose = _dispose;
  1295. frame.blend = _blend;
  1296. frame.image = _image.copy;
  1297. return frame;
  1298. }
  1299. @end
  1300. // Internal frame object.
  1301. @interface _YYImageDecoderFrame : YYImageFrame
  1302. @property (nonatomic, assign) BOOL hasAlpha; ///< Whether frame has alpha.
  1303. @property (nonatomic, assign) BOOL isFullSize; ///< Whether frame fill the canvas.
  1304. @property (nonatomic, assign) NSUInteger blendFromIndex; ///< Blend from frame index to current frame.
  1305. @end
  1306. @implementation _YYImageDecoderFrame
  1307. - (id)copyWithZone:(NSZone *)zone {
  1308. _YYImageDecoderFrame *frame = [super copyWithZone:zone];
  1309. frame.hasAlpha = _hasAlpha;
  1310. frame.isFullSize = _isFullSize;
  1311. frame.blendFromIndex = _blendFromIndex;
  1312. return frame;
  1313. }
  1314. @end
  1315. @implementation YYImageDecoder {
  1316. pthread_mutex_t _lock; // recursive lock
  1317. BOOL _sourceTypeDetected;
  1318. CGImageSourceRef _source;
  1319. yy_png_info *_apngSource;
  1320. #if YYIMAGE_WEBP_ENABLED
  1321. WebPDemuxer *_webpSource;
  1322. #endif
  1323. UIImageOrientation _orientation;
  1324. dispatch_semaphore_t _framesLock;
  1325. NSArray *_frames; ///< Array<GGImageDecoderFrame>, without image
  1326. BOOL _needBlend;
  1327. NSUInteger _blendFrameIndex;
  1328. CGContextRef _blendCanvas;
  1329. }
  1330. - (void)dealloc {
  1331. if (_source) CFRelease(_source);
  1332. if (_apngSource) yy_png_info_release(_apngSource);
  1333. #if YYIMAGE_WEBP_ENABLED
  1334. if (_webpSource) WebPDemuxDelete(_webpSource);
  1335. #endif
  1336. if (_blendCanvas) CFRelease(_blendCanvas);
  1337. pthread_mutex_destroy(&_lock);
  1338. }
  1339. + (instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale {
  1340. if (!data) return nil;
  1341. YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:scale];
  1342. [decoder updateData:data final:YES];
  1343. if (decoder.frameCount == 0) return nil;
  1344. return decoder;
  1345. }
  1346. - (instancetype)init {
  1347. return [self initWithScale:[UIScreen mainScreen].scale];
  1348. }
  1349. - (instancetype)initWithScale:(CGFloat)scale {
  1350. self = [super init];
  1351. if (scale <= 0) scale = 1;
  1352. _scale = scale;
  1353. _framesLock = dispatch_semaphore_create(1);
  1354. pthread_mutexattr_t attr;
  1355. pthread_mutexattr_init (&attr);
  1356. pthread_mutexattr_settype (&attr, PTHREAD_MUTEX_RECURSIVE);
  1357. pthread_mutex_init (&_lock, &attr);
  1358. pthread_mutexattr_destroy (&attr);
  1359. return self;
  1360. }
  1361. - (BOOL)updateData:(NSData *)data final:(BOOL)final {
  1362. BOOL result = NO;
  1363. pthread_mutex_lock(&_lock);
  1364. result = [self _updateData:data final:final];
  1365. pthread_mutex_unlock(&_lock);
  1366. return result;
  1367. }
  1368. - (YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
  1369. YYImageFrame *result = nil;
  1370. pthread_mutex_lock(&_lock);
  1371. result = [self _frameAtIndex:index decodeForDisplay:decodeForDisplay];
  1372. pthread_mutex_unlock(&_lock);
  1373. return result;
  1374. }
  1375. - (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index {
  1376. NSTimeInterval result = 0;
  1377. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1378. if (index < _frames.count) {
  1379. result = ((_YYImageDecoderFrame *)_frames[index]).duration;
  1380. }
  1381. dispatch_semaphore_signal(_framesLock);
  1382. return result;
  1383. }
  1384. - (NSDictionary *)framePropertiesAtIndex:(NSUInteger)index {
  1385. NSDictionary *result = nil;
  1386. pthread_mutex_lock(&_lock);
  1387. result = [self _framePropertiesAtIndex:index];
  1388. pthread_mutex_unlock(&_lock);
  1389. return result;
  1390. }
  1391. - (NSDictionary *)imageProperties {
  1392. NSDictionary *result = nil;
  1393. pthread_mutex_lock(&_lock);
  1394. result = [self _imageProperties];
  1395. pthread_mutex_unlock(&_lock);
  1396. return result;
  1397. }
  1398. #pragma private (wrap)
  1399. - (BOOL)_updateData:(NSData *)data final:(BOOL)final {
  1400. if (_finalized) return NO;
  1401. if (data.length < _data.length) return NO;
  1402. _finalized = final;
  1403. _data = data;
  1404. YYImageType type = YYImageDetectType((__bridge CFDataRef)data);
  1405. if (_sourceTypeDetected) {
  1406. if (_type != type) {
  1407. return NO;
  1408. } else {
  1409. [self _updateSource];
  1410. }
  1411. } else {
  1412. if (_data.length > 16) {
  1413. _type = type;
  1414. _sourceTypeDetected = YES;
  1415. [self _updateSource];
  1416. }
  1417. }
  1418. return YES;
  1419. }
  1420. - (YYImageFrame *)_frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay {
  1421. if (index >= _frames.count) return 0;
  1422. _YYImageDecoderFrame *frame = [(_YYImageDecoderFrame *)_frames[index] copy];
  1423. BOOL decoded = NO;
  1424. BOOL extendToCanvas = NO;
  1425. if (_type != YYImageTypeICO && decodeForDisplay) { // ICO contains multi-size frame and should not extend to canvas.
  1426. extendToCanvas = YES;
  1427. }
  1428. if (!_needBlend) {
  1429. CGImageRef imageRef = [self _newUnblendedImageAtIndex:index extendToCanvas:extendToCanvas decoded:&decoded];
  1430. if (!imageRef) return nil;
  1431. if (decodeForDisplay && !decoded) {
  1432. CGImageRef imageRefDecoded = YYCGImageCreateDecodedCopy(imageRef, YES);
  1433. if (imageRefDecoded) {
  1434. CFRelease(imageRef);
  1435. imageRef = imageRefDecoded;
  1436. decoded = YES;
  1437. }
  1438. }
  1439. UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
  1440. CFRelease(imageRef);
  1441. if (!image) return nil;
  1442. image.yy_isDecodedForDisplay = decoded;
  1443. frame.image = image;
  1444. return frame;
  1445. }
  1446. // blend
  1447. if (![self _createBlendContextIfNeeded]) return nil;
  1448. CGImageRef imageRef = NULL;
  1449. if (_blendFrameIndex + 1 == frame.index) {
  1450. imageRef = [self _newBlendedImageWithFrame:frame];
  1451. _blendFrameIndex = index;
  1452. } else { // should draw canvas from previous frame
  1453. _blendFrameIndex = NSNotFound;
  1454. CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
  1455. if (frame.blendFromIndex == frame.index) {
  1456. CGImageRef unblendedImage = [self _newUnblendedImageAtIndex:index extendToCanvas:NO decoded:NULL];
  1457. if (unblendedImage) {
  1458. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendedImage);
  1459. CFRelease(unblendedImage);
  1460. }
  1461. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  1462. if (frame.dispose == YYImageDisposeBackground) {
  1463. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1464. }
  1465. _blendFrameIndex = index;
  1466. } else { // canvas is not ready
  1467. for (uint32_t i = (uint32_t)frame.blendFromIndex; i <= (uint32_t)frame.index; i++) {
  1468. if (i == frame.index) {
  1469. if (!imageRef) imageRef = [self _newBlendedImageWithFrame:frame];
  1470. } else {
  1471. [self _blendImageWithFrame:_frames[i]];
  1472. }
  1473. }
  1474. _blendFrameIndex = index;
  1475. }
  1476. }
  1477. if (!imageRef) return nil;
  1478. UIImage *image = [UIImage imageWithCGImage:imageRef scale:_scale orientation:_orientation];
  1479. CFRelease(imageRef);
  1480. if (!image) return nil;
  1481. image.yy_isDecodedForDisplay = YES;
  1482. frame.image = image;
  1483. if (extendToCanvas) {
  1484. frame.width = _width;
  1485. frame.height = _height;
  1486. frame.offsetX = 0;
  1487. frame.offsetY = 0;
  1488. frame.dispose = YYImageDisposeNone;
  1489. frame.blend = YYImageBlendNone;
  1490. }
  1491. return frame;
  1492. }
  1493. - (NSDictionary *)_framePropertiesAtIndex:(NSUInteger)index {
  1494. if (index >= _frames.count) return nil;
  1495. if (!_source) return nil;
  1496. CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, index, NULL);
  1497. if (!properties) return nil;
  1498. return CFBridgingRelease(properties);
  1499. }
  1500. - (NSDictionary *)_imageProperties {
  1501. if (!_source) return nil;
  1502. CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
  1503. if (!properties) return nil;
  1504. return CFBridgingRelease(properties);
  1505. }
  1506. #pragma private
  1507. - (void)_updateSource {
  1508. switch (_type) {
  1509. case YYImageTypeWebP: {
  1510. [self _updateSourceWebP];
  1511. } break;
  1512. case YYImageTypePNG: {
  1513. [self _updateSourceAPNG];
  1514. } break;
  1515. default: {
  1516. [self _updateSourceImageIO];
  1517. } break;
  1518. }
  1519. }
  1520. - (void)_updateSourceWebP {
  1521. #if YYIMAGE_WEBP_ENABLED
  1522. _width = 0;
  1523. _height = 0;
  1524. _loopCount = 0;
  1525. if (_webpSource) WebPDemuxDelete(_webpSource);
  1526. _webpSource = NULL;
  1527. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1528. _frames = nil;
  1529. dispatch_semaphore_signal(_framesLock);
  1530. /*
  1531. https://developers.google.com/speed/webp/docs/api
  1532. The documentation said we can use WebPIDecoder to decode webp progressively,
  1533. but currently it can only returns an empty image (not same as progressive jpegs),
  1534. so we don't use progressive decoding.
  1535. When using WebPDecode() to decode multi-frame webp, we will get the error
  1536. "VP8_STATUS_UNSUPPORTED_FEATURE", so we first use WebPDemuxer to unpack it.
  1537. */
  1538. WebPData webPData = {0};
  1539. webPData.bytes = _data.bytes;
  1540. webPData.size = _data.length;
  1541. WebPDemuxer *demuxer = WebPDemux(&webPData);
  1542. if (!demuxer) return;
  1543. uint32_t webpFrameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
  1544. uint32_t webpLoopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
  1545. uint32_t canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  1546. uint32_t canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  1547. if (webpFrameCount == 0 || canvasWidth < 1 || canvasHeight < 1) {
  1548. WebPDemuxDelete(demuxer);
  1549. return;
  1550. }
  1551. NSMutableArray *frames = [NSMutableArray new];
  1552. BOOL needBlend = NO;
  1553. uint32_t iterIndex = 0;
  1554. uint32_t lastBlendIndex = 0;
  1555. WebPIterator iter = {0};
  1556. if (WebPDemuxGetFrame(demuxer, 1, &iter)) { // one-based index...
  1557. do {
  1558. _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
  1559. [frames addObject:frame];
  1560. if (iter.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
  1561. frame.dispose = YYImageDisposeBackground;
  1562. }
  1563. if (iter.blend_method == WEBP_MUX_BLEND) {
  1564. frame.blend = YYImageBlendOver;
  1565. }
  1566. int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
  1567. int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
  1568. frame.index = iterIndex;
  1569. frame.duration = iter.duration / 1000.0;
  1570. frame.width = iter.width;
  1571. frame.height = iter.height;
  1572. frame.hasAlpha = iter.has_alpha;
  1573. frame.blend = iter.blend_method == WEBP_MUX_BLEND;
  1574. frame.offsetX = iter.x_offset;
  1575. frame.offsetY = canvasHeight - iter.y_offset - iter.height;
  1576. BOOL sizeEqualsToCanvas = (iter.width == canvasWidth && iter.height == canvasHeight);
  1577. BOOL offsetIsZero = (iter.x_offset == 0 && iter.y_offset == 0);
  1578. frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
  1579. if ((!frame.blend || !frame.hasAlpha) && frame.isFullSize) {
  1580. frame.blendFromIndex = lastBlendIndex = iterIndex;
  1581. } else {
  1582. if (frame.dispose && frame.isFullSize) {
  1583. frame.blendFromIndex = lastBlendIndex;
  1584. lastBlendIndex = iterIndex + 1;
  1585. } else {
  1586. frame.blendFromIndex = lastBlendIndex;
  1587. }
  1588. }
  1589. if (frame.index != frame.blendFromIndex) needBlend = YES;
  1590. iterIndex++;
  1591. } while (WebPDemuxNextFrame(&iter));
  1592. WebPDemuxReleaseIterator(&iter);
  1593. }
  1594. if (frames.count != webpFrameCount) {
  1595. WebPDemuxDelete(demuxer);
  1596. return;
  1597. }
  1598. _width = canvasWidth;
  1599. _height = canvasHeight;
  1600. _frameCount = frames.count;
  1601. _loopCount = webpLoopCount;
  1602. _needBlend = needBlend;
  1603. _webpSource = demuxer;
  1604. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1605. _frames = frames;
  1606. dispatch_semaphore_signal(_framesLock);
  1607. #else
  1608. static const char *func = __FUNCTION__;
  1609. static const int line = __LINE__;
  1610. static dispatch_once_t onceToken;
  1611. dispatch_once(&onceToken, ^{
  1612. NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", func, line);
  1613. });
  1614. #endif
  1615. }
  1616. - (void)_updateSourceAPNG {
  1617. /*
  1618. APNG extends PNG format to support animation, it was supported by ImageIO
  1619. since iOS 8.
  1620. We use a custom APNG decoder to make APNG available in old system, so we
  1621. ignore the ImageIO's APNG frame info. Typically the custom decoder is a bit
  1622. faster than ImageIO.
  1623. */
  1624. yy_png_info_release(_apngSource);
  1625. _apngSource = nil;
  1626. [self _updateSourceImageIO]; // decode first frame
  1627. if (_frameCount == 0) return; // png decode failed
  1628. if (!_finalized) return; // ignore multi-frame before finalized
  1629. yy_png_info *apng = yy_png_info_create(_data.bytes, (uint32_t)_data.length);
  1630. if (!apng) return; // apng decode failed
  1631. if (apng->apng_frame_num == 0 ||
  1632. (apng->apng_frame_num == 1 && apng->apng_first_frame_is_cover)) {
  1633. yy_png_info_release(apng);
  1634. return; // no animation
  1635. }
  1636. if (_source) { // apng decode succeed, no longer need image souce
  1637. CFRelease(_source);
  1638. _source = NULL;
  1639. }
  1640. uint32_t canvasWidth = apng->header.width;
  1641. uint32_t canvasHeight = apng->header.height;
  1642. NSMutableArray *frames = [NSMutableArray new];
  1643. BOOL needBlend = NO;
  1644. uint32_t lastBlendIndex = 0;
  1645. for (uint32_t i = 0; i < apng->apng_frame_num; i++) {
  1646. _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
  1647. [frames addObject:frame];
  1648. yy_png_frame_info *fi = apng->apng_frames + i;
  1649. frame.index = i;
  1650. frame.duration = yy_png_delay_to_seconds(fi->frame_control.delay_num, fi->frame_control.delay_den);
  1651. frame.hasAlpha = YES;
  1652. frame.width = fi->frame_control.width;
  1653. frame.height = fi->frame_control.height;
  1654. frame.offsetX = fi->frame_control.x_offset;
  1655. frame.offsetY = canvasHeight - fi->frame_control.y_offset - fi->frame_control.height;
  1656. BOOL sizeEqualsToCanvas = (frame.width == canvasWidth && frame.height == canvasHeight);
  1657. BOOL offsetIsZero = (fi->frame_control.x_offset == 0 && fi->frame_control.y_offset == 0);
  1658. frame.isFullSize = (sizeEqualsToCanvas && offsetIsZero);
  1659. switch (fi->frame_control.dispose_op) {
  1660. case YY_PNG_DISPOSE_OP_BACKGROUND: {
  1661. frame.dispose = YYImageDisposeBackground;
  1662. } break;
  1663. case YY_PNG_DISPOSE_OP_PREVIOUS: {
  1664. frame.dispose = YYImageDisposePrevious;
  1665. } break;
  1666. default: {
  1667. frame.dispose = YYImageDisposeNone;
  1668. } break;
  1669. }
  1670. switch (fi->frame_control.blend_op) {
  1671. case YY_PNG_BLEND_OP_OVER: {
  1672. frame.blend = YYImageBlendOver;
  1673. } break;
  1674. default: {
  1675. frame.blend = YYImageBlendNone;
  1676. } break;
  1677. }
  1678. if (frame.blend == YYImageBlendNone && frame.isFullSize) {
  1679. frame.blendFromIndex = i;
  1680. if (frame.dispose != YYImageDisposePrevious) lastBlendIndex = i;
  1681. } else {
  1682. if (frame.dispose == YYImageDisposeBackground && frame.isFullSize) {
  1683. frame.blendFromIndex = lastBlendIndex;
  1684. lastBlendIndex = i + 1;
  1685. } else {
  1686. frame.blendFromIndex = lastBlendIndex;
  1687. }
  1688. }
  1689. if (frame.index != frame.blendFromIndex) needBlend = YES;
  1690. }
  1691. _width = canvasWidth;
  1692. _height = canvasHeight;
  1693. _frameCount = frames.count;
  1694. _loopCount = apng->apng_loop_num;
  1695. _needBlend = needBlend;
  1696. _apngSource = apng;
  1697. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1698. _frames = frames;
  1699. dispatch_semaphore_signal(_framesLock);
  1700. }
  1701. - (void)_updateSourceImageIO {
  1702. _width = 0;
  1703. _height = 0;
  1704. _orientation = UIImageOrientationUp;
  1705. _loopCount = 0;
  1706. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1707. _frames = nil;
  1708. dispatch_semaphore_signal(_framesLock);
  1709. if (!_source) {
  1710. if (_finalized) {
  1711. _source = CGImageSourceCreateWithData((__bridge CFDataRef)_data, NULL);
  1712. } else {
  1713. _source = CGImageSourceCreateIncremental(NULL);
  1714. if (_source) CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, false);
  1715. }
  1716. } else {
  1717. CGImageSourceUpdateData(_source, (__bridge CFDataRef)_data, _finalized);
  1718. }
  1719. if (!_source) return;
  1720. _frameCount = CGImageSourceGetCount(_source);
  1721. if (_frameCount == 0) return;
  1722. if (!_finalized) { // ignore multi-frame before finalized
  1723. _frameCount = 1;
  1724. } else {
  1725. if (_type == YYImageTypePNG) { // use custom apng decoder and ignore multi-frame
  1726. _frameCount = 1;
  1727. }
  1728. if (_type == YYImageTypeGIF) { // get gif loop count
  1729. CFDictionaryRef properties = CGImageSourceCopyProperties(_source, NULL);
  1730. if (properties) {
  1731. CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
  1732. if (gif) {
  1733. CFTypeRef loop = CFDictionaryGetValue(gif, kCGImagePropertyGIFLoopCount);
  1734. if (loop) CFNumberGetValue(loop, kCFNumberNSIntegerType, &_loopCount);
  1735. }
  1736. CFRelease(properties);
  1737. }
  1738. }
  1739. }
  1740. /*
  1741. ICO, GIF, APNG may contains multi-frame.
  1742. */
  1743. NSMutableArray *frames = [NSMutableArray new];
  1744. for (NSUInteger i = 0; i < _frameCount; i++) {
  1745. _YYImageDecoderFrame *frame = [_YYImageDecoderFrame new];
  1746. frame.index = i;
  1747. frame.blendFromIndex = i;
  1748. frame.hasAlpha = YES;
  1749. frame.isFullSize = YES;
  1750. [frames addObject:frame];
  1751. CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(_source, i, NULL);
  1752. if (properties) {
  1753. NSTimeInterval duration = 0;
  1754. NSInteger orientationValue = 0, width = 0, height = 0;
  1755. CFTypeRef value = NULL;
  1756. value = CFDictionaryGetValue(properties, kCGImagePropertyPixelWidth);
  1757. if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &width);
  1758. value = CFDictionaryGetValue(properties, kCGImagePropertyPixelHeight);
  1759. if (value) CFNumberGetValue(value, kCFNumberNSIntegerType, &height);
  1760. if (_type == YYImageTypeGIF) {
  1761. CFDictionaryRef gif = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary);
  1762. if (gif) {
  1763. // Use the unclamped frame delay if it exists.
  1764. value = CFDictionaryGetValue(gif, kCGImagePropertyGIFUnclampedDelayTime);
  1765. if (!value) {
  1766. // Fall back to the clamped frame delay if the unclamped frame delay does not exist.
  1767. value = CFDictionaryGetValue(gif, kCGImagePropertyGIFDelayTime);
  1768. }
  1769. if (value) CFNumberGetValue(value, kCFNumberDoubleType, &duration);
  1770. }
  1771. }
  1772. frame.width = width;
  1773. frame.height = height;
  1774. frame.duration = duration;
  1775. if (i == 0 && _width + _height == 0) { // init first frame
  1776. _width = width;
  1777. _height = height;
  1778. value = CFDictionaryGetValue(properties, kCGImagePropertyOrientation);
  1779. if (value) {
  1780. CFNumberGetValue(value, kCFNumberNSIntegerType, &orientationValue);
  1781. _orientation = YYUIImageOrientationFromEXIFValue(orientationValue);
  1782. }
  1783. }
  1784. CFRelease(properties);
  1785. }
  1786. }
  1787. dispatch_semaphore_wait(_framesLock, DISPATCH_TIME_FOREVER);
  1788. _frames = frames;
  1789. dispatch_semaphore_signal(_framesLock);
  1790. }
  1791. - (CGImageRef)_newUnblendedImageAtIndex:(NSUInteger)index
  1792. extendToCanvas:(BOOL)extendToCanvas
  1793. decoded:(BOOL *)decoded CF_RETURNS_RETAINED {
  1794. if (!_finalized && index > 0) return NULL;
  1795. if (_frames.count <= index) return NULL;
  1796. _YYImageDecoderFrame *frame = _frames[index];
  1797. if (_source) {
  1798. CGImageRef imageRef = CGImageSourceCreateImageAtIndex(_source, index, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
  1799. if (imageRef && extendToCanvas) {
  1800. size_t width = CGImageGetWidth(imageRef);
  1801. size_t height = CGImageGetHeight(imageRef);
  1802. if (width == _width && height == _height) {
  1803. CGImageRef imageRefExtended = YYCGImageCreateDecodedCopy(imageRef, YES);
  1804. if (imageRefExtended) {
  1805. CFRelease(imageRef);
  1806. imageRef = imageRefExtended;
  1807. if (decoded) *decoded = YES;
  1808. }
  1809. } else {
  1810. CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  1811. if (context) {
  1812. CGContextDrawImage(context, CGRectMake(0, _height - height, width, height), imageRef);
  1813. CGImageRef imageRefExtended = CGBitmapContextCreateImage(context);
  1814. CFRelease(context);
  1815. if (imageRefExtended) {
  1816. CFRelease(imageRef);
  1817. imageRef = imageRefExtended;
  1818. if (decoded) *decoded = YES;
  1819. }
  1820. }
  1821. }
  1822. }
  1823. return imageRef;
  1824. }
  1825. if (_apngSource) {
  1826. uint32_t size = 0;
  1827. uint8_t *bytes = yy_png_copy_frame_data_at_index(_data.bytes, _apngSource, (uint32_t)index, &size);
  1828. if (!bytes) return NULL;
  1829. CGDataProviderRef provider = CGDataProviderCreateWithData(bytes, bytes, size, YYCGDataProviderReleaseDataCallback);
  1830. if (!provider) {
  1831. free(bytes);
  1832. return NULL;
  1833. }
  1834. bytes = NULL; // hold by provider
  1835. CGImageSourceRef source = CGImageSourceCreateWithDataProvider(provider, NULL);
  1836. if (!source) {
  1837. CFRelease(provider);
  1838. return NULL;
  1839. }
  1840. CFRelease(provider);
  1841. if(CGImageSourceGetCount(source) < 1) {
  1842. CFRelease(source);
  1843. return NULL;
  1844. }
  1845. CGImageRef imageRef = CGImageSourceCreateImageAtIndex(source, 0, (CFDictionaryRef)@{(id)kCGImageSourceShouldCache:@(YES)});
  1846. CFRelease(source);
  1847. if (!imageRef) return NULL;
  1848. if (extendToCanvas) {
  1849. CGContextRef context = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst); //bgrA
  1850. if (context) {
  1851. CGContextDrawImage(context, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), imageRef);
  1852. CFRelease(imageRef);
  1853. imageRef = CGBitmapContextCreateImage(context);
  1854. CFRelease(context);
  1855. if (decoded) *decoded = YES;
  1856. }
  1857. }
  1858. return imageRef;
  1859. }
  1860. #if YYIMAGE_WEBP_ENABLED
  1861. if (_webpSource) {
  1862. WebPIterator iter;
  1863. if (!WebPDemuxGetFrame(_webpSource, (int)(index + 1), &iter)) return NULL; // demux webp frame data
  1864. // frame numbers are one-based in webp -----------^
  1865. int frameWidth = iter.width;
  1866. int frameHeight = iter.height;
  1867. if (frameWidth < 1 || frameHeight < 1) return NULL;
  1868. int width = extendToCanvas ? (int)_width : frameWidth;
  1869. int height = extendToCanvas ? (int)_height : frameHeight;
  1870. if (width > _width || height > _height) return NULL;
  1871. const uint8_t *payload = iter.fragment.bytes;
  1872. size_t payloadSize = iter.fragment.size;
  1873. WebPDecoderConfig config;
  1874. if (!WebPInitDecoderConfig(&config)) {
  1875. WebPDemuxReleaseIterator(&iter);
  1876. return NULL;
  1877. }
  1878. if (WebPGetFeatures(payload , payloadSize, &config.input) != VP8_STATUS_OK) {
  1879. WebPDemuxReleaseIterator(&iter);
  1880. return NULL;
  1881. }
  1882. size_t bitsPerComponent = 8;
  1883. size_t bitsPerPixel = 32;
  1884. size_t bytesPerRow = YYImageByteAlign(bitsPerPixel / 8 * width, 32);
  1885. size_t length = bytesPerRow * height;
  1886. CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst; //bgrA
  1887. void *pixels = calloc(1, length);
  1888. if (!pixels) {
  1889. WebPDemuxReleaseIterator(&iter);
  1890. return NULL;
  1891. }
  1892. config.output.colorspace = MODE_bgrA;
  1893. config.output.is_external_memory = 1;
  1894. config.output.u.RGBA.rgba = pixels;
  1895. config.output.u.RGBA.stride = (int)bytesPerRow;
  1896. config.output.u.RGBA.size = length;
  1897. VP8StatusCode result = WebPDecode(payload, payloadSize, &config); // decode
  1898. if ((result != VP8_STATUS_OK) && (result != VP8_STATUS_NOT_ENOUGH_DATA)) {
  1899. WebPDemuxReleaseIterator(&iter);
  1900. free(pixels);
  1901. return NULL;
  1902. }
  1903. WebPDemuxReleaseIterator(&iter);
  1904. if (extendToCanvas && (iter.x_offset != 0 || iter.y_offset != 0)) {
  1905. void *tmp = calloc(1, length);
  1906. if (tmp) {
  1907. vImage_Buffer src = {pixels, height, width, bytesPerRow};
  1908. vImage_Buffer dest = {tmp, height, width, bytesPerRow};
  1909. vImage_CGAffineTransform transform = {1, 0, 0, 1, iter.x_offset, -iter.y_offset};
  1910. uint8_t backColor[4] = {0};
  1911. vImage_Error error = vImageAffineWarpCG_ARGB8888(&src, &dest, NULL, &transform, backColor, kvImageBackgroundColorFill);
  1912. if (error == kvImageNoError) {
  1913. memcpy(pixels, tmp, length);
  1914. }
  1915. free(tmp);
  1916. }
  1917. }
  1918. CGDataProviderRef provider = CGDataProviderCreateWithData(pixels, pixels, length, YYCGDataProviderReleaseDataCallback);
  1919. if (!provider) {
  1920. free(pixels);
  1921. return NULL;
  1922. }
  1923. pixels = NULL; // hold by provider
  1924. CGImageRef image = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, YYCGColorSpaceGetDeviceRGB(), bitmapInfo, provider, NULL, false, kCGRenderingIntentDefault);
  1925. CFRelease(provider);
  1926. if (decoded) *decoded = YES;
  1927. return image;
  1928. }
  1929. #endif
  1930. return NULL;
  1931. }
  1932. - (BOOL)_createBlendContextIfNeeded {
  1933. if (!_blendCanvas) {
  1934. _blendFrameIndex = NSNotFound;
  1935. _blendCanvas = CGBitmapContextCreate(NULL, _width, _height, 8, 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  1936. }
  1937. BOOL suc = _blendCanvas != NULL;
  1938. return suc;
  1939. }
  1940. - (void)_blendImageWithFrame:(_YYImageDecoderFrame *)frame {
  1941. if (frame.dispose == YYImageDisposePrevious) {
  1942. // nothing
  1943. } else if (frame.dispose == YYImageDisposeBackground) {
  1944. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1945. } else { // no dispose
  1946. if (frame.blend == YYImageBlendOver) {
  1947. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1948. if (unblendImage) {
  1949. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1950. CFRelease(unblendImage);
  1951. }
  1952. } else {
  1953. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1954. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1955. if (unblendImage) {
  1956. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1957. CFRelease(unblendImage);
  1958. }
  1959. }
  1960. }
  1961. }
  1962. - (CGImageRef)_newBlendedImageWithFrame:(_YYImageDecoderFrame *)frame CF_RETURNS_RETAINED{
  1963. CGImageRef imageRef = NULL;
  1964. if (frame.dispose == YYImageDisposePrevious) {
  1965. if (frame.blend == YYImageBlendOver) {
  1966. CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
  1967. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1968. if (unblendImage) {
  1969. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1970. CFRelease(unblendImage);
  1971. }
  1972. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  1973. CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
  1974. if (previousImage) {
  1975. CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
  1976. CFRelease(previousImage);
  1977. }
  1978. } else {
  1979. CGImageRef previousImage = CGBitmapContextCreateImage(_blendCanvas);
  1980. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1981. if (unblendImage) {
  1982. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  1983. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1984. CFRelease(unblendImage);
  1985. }
  1986. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  1987. CGContextClearRect(_blendCanvas, CGRectMake(0, 0, _width, _height));
  1988. if (previousImage) {
  1989. CGContextDrawImage(_blendCanvas, CGRectMake(0, 0, _width, _height), previousImage);
  1990. CFRelease(previousImage);
  1991. }
  1992. }
  1993. } else if (frame.dispose == YYImageDisposeBackground) {
  1994. if (frame.blend == YYImageBlendOver) {
  1995. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  1996. if (unblendImage) {
  1997. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  1998. CFRelease(unblendImage);
  1999. }
  2000. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  2001. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  2002. } else {
  2003. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  2004. if (unblendImage) {
  2005. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  2006. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  2007. CFRelease(unblendImage);
  2008. }
  2009. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  2010. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  2011. }
  2012. } else { // no dispose
  2013. if (frame.blend == YYImageBlendOver) {
  2014. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  2015. if (unblendImage) {
  2016. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  2017. CFRelease(unblendImage);
  2018. }
  2019. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  2020. } else {
  2021. CGImageRef unblendImage = [self _newUnblendedImageAtIndex:frame.index extendToCanvas:NO decoded:NULL];
  2022. if (unblendImage) {
  2023. CGContextClearRect(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height));
  2024. CGContextDrawImage(_blendCanvas, CGRectMake(frame.offsetX, frame.offsetY, frame.width, frame.height), unblendImage);
  2025. CFRelease(unblendImage);
  2026. }
  2027. imageRef = CGBitmapContextCreateImage(_blendCanvas);
  2028. }
  2029. }
  2030. return imageRef;
  2031. }
  2032. @end
  2033. ////////////////////////////////////////////////////////////////////////////////
  2034. #pragma mark - Encoder
  2035. @implementation YYImageEncoder {
  2036. NSMutableArray *_images;
  2037. NSMutableArray *_durations;
  2038. }
  2039. - (instancetype)init {
  2040. @throw [NSException exceptionWithName:@"YYImageEncoder init error" reason:@"YYImageEncoder must be initialized with a type. Use 'initWithType:' instead." userInfo:nil];
  2041. return [self initWithType:YYImageTypeUnknown];
  2042. }
  2043. - (instancetype)initWithType:(YYImageType)type {
  2044. if (type == YYImageTypeUnknown || type >= YYImageTypeOther) {
  2045. NSLog(@"[%s: %d] Unsupported image type:%d",__FUNCTION__, __LINE__, (int)type);
  2046. return nil;
  2047. }
  2048. #if !YYIMAGE_WEBP_ENABLED
  2049. if (type == YYImageTypeWebP) {
  2050. NSLog(@"[%s: %d] WebP is not available, check the documentation to see how to install WebP component: https://github.com/ibireme/YYImage#installation", __FUNCTION__, __LINE__);
  2051. return nil;
  2052. }
  2053. #endif
  2054. self = [super init];
  2055. if (!self) return nil;
  2056. _type = type;
  2057. _images = [NSMutableArray new];
  2058. _durations = [NSMutableArray new];
  2059. switch (type) {
  2060. case YYImageTypeJPEG:
  2061. case YYImageTypeJPEG2000: {
  2062. _quality = 0.9;
  2063. } break;
  2064. case YYImageTypeTIFF:
  2065. case YYImageTypeBMP:
  2066. case YYImageTypeGIF:
  2067. case YYImageTypeICO:
  2068. case YYImageTypeICNS:
  2069. case YYImageTypePNG: {
  2070. _quality = 1;
  2071. _lossless = YES;
  2072. } break;
  2073. case YYImageTypeWebP: {
  2074. _quality = 0.8;
  2075. } break;
  2076. default:
  2077. break;
  2078. }
  2079. return self;
  2080. }
  2081. - (void)setQuality:(CGFloat)quality {
  2082. _quality = quality < 0 ? 0 : quality > 1 ? 1 : quality;
  2083. }
  2084. - (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration {
  2085. if (!image.CGImage) return;
  2086. duration = duration < 0 ? 0 : duration;
  2087. [_images addObject:image];
  2088. [_durations addObject:@(duration)];
  2089. }
  2090. - (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration {
  2091. if (data.length == 0) return;
  2092. duration = duration < 0 ? 0 : duration;
  2093. [_images addObject:data];
  2094. [_durations addObject:@(duration)];
  2095. }
  2096. - (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration {
  2097. if (path.length == 0) return;
  2098. duration = duration < 0 ? 0 : duration;
  2099. NSURL *url = [NSURL URLWithString:path];
  2100. if (!url) return;
  2101. [_images addObject:url];
  2102. [_durations addObject:@(duration)];
  2103. }
  2104. - (BOOL)_imageIOAvaliable {
  2105. switch (_type) {
  2106. case YYImageTypeJPEG:
  2107. case YYImageTypeJPEG2000:
  2108. case YYImageTypeTIFF:
  2109. case YYImageTypeBMP:
  2110. case YYImageTypeICO:
  2111. case YYImageTypeICNS:
  2112. case YYImageTypeGIF: {
  2113. return _images.count > 0;
  2114. } break;
  2115. case YYImageTypePNG: {
  2116. return _images.count == 1;
  2117. } break;
  2118. case YYImageTypeWebP: {
  2119. return NO;
  2120. } break;
  2121. default: return NO;
  2122. }
  2123. }
  2124. - (CGImageDestinationRef)_newImageDestination:(id)dest imageCount:(NSUInteger)count CF_RETURNS_RETAINED {
  2125. if (!dest) return nil;
  2126. CGImageDestinationRef destination = NULL;
  2127. if ([dest isKindOfClass:[NSString class]]) {
  2128. NSURL *url = [[NSURL alloc] initFileURLWithPath:dest];
  2129. if (url) {
  2130. destination = CGImageDestinationCreateWithURL((CFURLRef)url, YYImageTypeToUTType(_type), count, NULL);
  2131. }
  2132. } else if ([dest isKindOfClass:[NSMutableData class]]) {
  2133. destination = CGImageDestinationCreateWithData((CFMutableDataRef)dest, YYImageTypeToUTType(_type), count, NULL);
  2134. }
  2135. return destination;
  2136. }
  2137. - (void)_encodeImageWithDestination:(CGImageDestinationRef)destination imageCount:(NSUInteger)count {
  2138. if (_type == YYImageTypeGIF) {
  2139. NSDictionary *gifProperty = @{(__bridge id)kCGImagePropertyGIFDictionary:
  2140. @{(__bridge id)kCGImagePropertyGIFLoopCount: @(_loopCount)}};
  2141. CGImageDestinationSetProperties(destination, (__bridge CFDictionaryRef)gifProperty);
  2142. }
  2143. for (int i = 0; i < count; i++) {
  2144. @autoreleasepool {
  2145. id imageSrc = _images[i];
  2146. NSDictionary *frameProperty = NULL;
  2147. if (_type == YYImageTypeGIF && count > 1) {
  2148. frameProperty = @{(NSString *)kCGImagePropertyGIFDictionary : @{(NSString *) kCGImagePropertyGIFDelayTime:_durations[i]}};
  2149. } else {
  2150. frameProperty = @{(id)kCGImageDestinationLossyCompressionQuality : @(_quality)};
  2151. }
  2152. if ([imageSrc isKindOfClass:[UIImage class]]) {
  2153. UIImage *image = imageSrc;
  2154. if (image.imageOrientation != UIImageOrientationUp && image.CGImage) {
  2155. CGBitmapInfo info = CGImageGetBitmapInfo(image.CGImage) | CGImageGetAlphaInfo(image.CGImage);
  2156. CGImageRef rotated = YYCGImageCreateCopyWithOrientation(image.CGImage, image.imageOrientation, info);
  2157. if (rotated) {
  2158. image = [UIImage imageWithCGImage:rotated];
  2159. CFRelease(rotated);
  2160. }
  2161. }
  2162. if (image.CGImage) CGImageDestinationAddImage(destination, ((UIImage *)imageSrc).CGImage, (CFDictionaryRef)frameProperty);
  2163. } else if ([imageSrc isKindOfClass:[NSURL class]]) {
  2164. CGImageSourceRef source = CGImageSourceCreateWithURL((CFURLRef)imageSrc, NULL);
  2165. if (source) {
  2166. CGImageDestinationAddImageFromSource(destination, source, 0, (CFDictionaryRef)frameProperty);
  2167. CFRelease(source);
  2168. }
  2169. } else if ([imageSrc isKindOfClass:[NSData class]]) {
  2170. CGImageSourceRef source = CGImageSourceCreateWithData((CFDataRef)imageSrc, NULL);
  2171. if (source) {
  2172. CGImageDestinationAddImageFromSource(destination, source, 0, (CFDictionaryRef)frameProperty);
  2173. CFRelease(source);
  2174. }
  2175. }
  2176. }
  2177. }
  2178. }
  2179. - (CGImageRef)_newCGImageFromIndex:(NSUInteger)index decoded:(BOOL)decoded CF_RETURNS_RETAINED {
  2180. UIImage *image = nil;
  2181. id imageSrc= _images[index];
  2182. if ([imageSrc isKindOfClass:[UIImage class]]) {
  2183. image = imageSrc;
  2184. } else if ([imageSrc isKindOfClass:[NSURL class]]) {
  2185. image = [UIImage imageWithContentsOfFile:((NSURL *)imageSrc).absoluteString];
  2186. } else if ([imageSrc isKindOfClass:[NSData class]]) {
  2187. image = [UIImage imageWithData:imageSrc];
  2188. }
  2189. if (!image) return NULL;
  2190. CGImageRef imageRef = image.CGImage;
  2191. if (!imageRef) return NULL;
  2192. if (image.imageOrientation != UIImageOrientationUp) {
  2193. return YYCGImageCreateCopyWithOrientation(imageRef, image.imageOrientation, kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  2194. }
  2195. if (decoded) {
  2196. return YYCGImageCreateDecodedCopy(imageRef, YES);
  2197. }
  2198. return (CGImageRef)CFRetain(imageRef);
  2199. }
  2200. - (NSData *)_encodeWithImageIO {
  2201. NSMutableData *data = [NSMutableData new];
  2202. NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1;
  2203. CGImageDestinationRef destination = [self _newImageDestination:data imageCount:count];
  2204. BOOL suc = NO;
  2205. if (destination) {
  2206. [self _encodeImageWithDestination:destination imageCount:count];
  2207. suc = CGImageDestinationFinalize(destination);
  2208. CFRelease(destination);
  2209. }
  2210. if (suc && data.length > 0) {
  2211. return data;
  2212. } else {
  2213. return nil;
  2214. }
  2215. }
  2216. - (BOOL)_encodeWithImageIO:(NSString *)path {
  2217. NSUInteger count = _type == YYImageTypeGIF ? _images.count : 1;
  2218. CGImageDestinationRef destination = [self _newImageDestination:path imageCount:count];
  2219. BOOL suc = NO;
  2220. if (destination) {
  2221. [self _encodeImageWithDestination:destination imageCount:count];
  2222. suc = CGImageDestinationFinalize(destination);
  2223. CFRelease(destination);
  2224. }
  2225. return suc;
  2226. }
  2227. - (NSData *)_encodeAPNG {
  2228. // encode APNG (ImageIO doesn't support APNG encoding, so we use a custom encoder)
  2229. NSMutableArray *pngDatas = [NSMutableArray new];
  2230. NSMutableArray *pngSizes = [NSMutableArray new];
  2231. NSUInteger canvasWidth = 0, canvasHeight = 0;
  2232. for (int i = 0; i < _images.count; i++) {
  2233. CGImageRef decoded = [self _newCGImageFromIndex:i decoded:YES];
  2234. if (!decoded) return nil;
  2235. CGSize size = CGSizeMake(CGImageGetWidth(decoded), CGImageGetHeight(decoded));
  2236. [pngSizes addObject:[NSValue valueWithCGSize:size]];
  2237. if (canvasWidth < size.width) canvasWidth = size.width;
  2238. if (canvasHeight < size.height) canvasHeight = size.height;
  2239. CFDataRef frameData = YYCGImageCreateEncodedData(decoded, YYImageTypePNG, 1);
  2240. CFRelease(decoded);
  2241. if (!frameData) return nil;
  2242. [pngDatas addObject:(__bridge id)(frameData)];
  2243. CFRelease(frameData);
  2244. if (size.width < 1 || size.height < 1) return nil;
  2245. }
  2246. CGSize firstFrameSize = [(NSValue *)[pngSizes firstObject] CGSizeValue];
  2247. if (firstFrameSize.width < canvasWidth || firstFrameSize.height < canvasHeight) {
  2248. CGImageRef decoded = [self _newCGImageFromIndex:0 decoded:YES];
  2249. if (!decoded) return nil;
  2250. CGContextRef context = CGBitmapContextCreate(NULL, canvasWidth, canvasHeight, 8,
  2251. 0, YYCGColorSpaceGetDeviceRGB(), kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst);
  2252. if (!context) {
  2253. CFRelease(decoded);
  2254. return nil;
  2255. }
  2256. CGContextDrawImage(context, CGRectMake(0, canvasHeight - firstFrameSize.height, firstFrameSize.width, firstFrameSize.height), decoded);
  2257. CFRelease(decoded);
  2258. CGImageRef extendedImage = CGBitmapContextCreateImage(context);
  2259. CFRelease(context);
  2260. if (!extendedImage) return nil;
  2261. CFDataRef frameData = YYCGImageCreateEncodedData(extendedImage, YYImageTypePNG, 1);
  2262. if (!frameData) {
  2263. CFRelease(extendedImage);
  2264. return nil;
  2265. }
  2266. pngDatas[0] = (__bridge id)(frameData);
  2267. CFRelease(frameData);
  2268. }
  2269. NSData *firstFrameData = pngDatas[0];
  2270. yy_png_info *info = yy_png_info_create(firstFrameData.bytes, (uint32_t)firstFrameData.length);
  2271. if (!info) return nil;
  2272. NSMutableData *result = [NSMutableData new];
  2273. BOOL insertBefore = NO, insertAfter = NO;
  2274. uint32_t apngSequenceIndex = 0;
  2275. uint32_t png_header[2];
  2276. png_header[0] = YY_FOUR_CC(0x89, 0x50, 0x4E, 0x47);
  2277. png_header[1] = YY_FOUR_CC(0x0D, 0x0A, 0x1A, 0x0A);
  2278. [result appendBytes:png_header length:8];
  2279. for (int i = 0; i < info->chunk_num; i++) {
  2280. yy_png_chunk_info *chunk = info->chunks + i;
  2281. if (!insertBefore && chunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
  2282. insertBefore = YES;
  2283. // insert acTL (APNG Control)
  2284. uint32_t acTL[5] = {0};
  2285. acTL[0] = yy_swap_endian_uint32(8); //length
  2286. acTL[1] = YY_FOUR_CC('a', 'c', 'T', 'L'); // fourcc
  2287. acTL[2] = yy_swap_endian_uint32((uint32_t)pngDatas.count); // num frames
  2288. acTL[3] = yy_swap_endian_uint32((uint32_t)_loopCount); // num plays
  2289. acTL[4] = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(acTL + 1), 12)); //crc32
  2290. [result appendBytes:acTL length:20];
  2291. // insert fcTL (first frame control)
  2292. yy_png_chunk_fcTL chunk_fcTL = {0};
  2293. chunk_fcTL.sequence_number = apngSequenceIndex;
  2294. chunk_fcTL.width = (uint32_t)firstFrameSize.width;
  2295. chunk_fcTL.height = (uint32_t)firstFrameSize.height;
  2296. yy_png_delay_to_fraction([(NSNumber *)_durations[0] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den);
  2297. chunk_fcTL.delay_num = chunk_fcTL.delay_num;
  2298. chunk_fcTL.delay_den = chunk_fcTL.delay_den;
  2299. chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND;
  2300. chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE;
  2301. uint8_t fcTL[38] = {0};
  2302. *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length
  2303. *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc
  2304. yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8);
  2305. *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30));
  2306. [result appendBytes:fcTL length:38];
  2307. apngSequenceIndex++;
  2308. }
  2309. if (!insertAfter && insertBefore && chunk->fourcc != YY_FOUR_CC('I', 'D', 'A', 'T')) {
  2310. insertAfter = YES;
  2311. // insert fcTL and fdAT (APNG frame control and data)
  2312. for (int i = 1; i < pngDatas.count; i++) {
  2313. NSData *frameData = pngDatas[i];
  2314. yy_png_info *frame = yy_png_info_create(frameData.bytes, (uint32_t)frameData.length);
  2315. if (!frame) {
  2316. yy_png_info_release(info);
  2317. return nil;
  2318. }
  2319. // insert fcTL (first frame control)
  2320. yy_png_chunk_fcTL chunk_fcTL = {0};
  2321. chunk_fcTL.sequence_number = apngSequenceIndex;
  2322. chunk_fcTL.width = frame->header.width;
  2323. chunk_fcTL.height = frame->header.height;
  2324. yy_png_delay_to_fraction([(NSNumber *)_durations[i] doubleValue], &chunk_fcTL.delay_num, &chunk_fcTL.delay_den);
  2325. chunk_fcTL.delay_num = chunk_fcTL.delay_num;
  2326. chunk_fcTL.delay_den = chunk_fcTL.delay_den;
  2327. chunk_fcTL.dispose_op = YY_PNG_DISPOSE_OP_BACKGROUND;
  2328. chunk_fcTL.blend_op = YY_PNG_BLEND_OP_SOURCE;
  2329. uint8_t fcTL[38] = {0};
  2330. *((uint32_t *)fcTL) = yy_swap_endian_uint32(26); //length
  2331. *((uint32_t *)(fcTL + 4)) = YY_FOUR_CC('f', 'c', 'T', 'L'); // fourcc
  2332. yy_png_chunk_fcTL_write(&chunk_fcTL, fcTL + 8);
  2333. *((uint32_t *)(fcTL + 34)) = yy_swap_endian_uint32((uint32_t)crc32(0, (const Bytef *)(fcTL + 4), 30));
  2334. [result appendBytes:fcTL length:38];
  2335. apngSequenceIndex++;
  2336. // insert fdAT (frame data)
  2337. for (int d = 0; d < frame->chunk_num; d++) {
  2338. yy_png_chunk_info *dchunk = frame->chunks + d;
  2339. if (dchunk->fourcc == YY_FOUR_CC('I', 'D', 'A', 'T')) {
  2340. uint32_t length = yy_swap_endian_uint32(dchunk->length + 4);
  2341. [result appendBytes:&length length:4]; //length
  2342. uint32_t fourcc = YY_FOUR_CC('f', 'd', 'A', 'T');
  2343. [result appendBytes:&fourcc length:4]; //fourcc
  2344. uint32_t sq = yy_swap_endian_uint32(apngSequenceIndex);
  2345. [result appendBytes:&sq length:4]; //data (sq)
  2346. [result appendBytes:(((uint8_t *)frameData.bytes) + dchunk->offset + 8) length:dchunk->length]; //data
  2347. uint8_t *bytes = ((uint8_t *)result.bytes) + result.length - dchunk->length - 8;
  2348. uint32_t crc = yy_swap_endian_uint32((uint32_t)crc32(0, bytes, dchunk->length + 8));
  2349. [result appendBytes:&crc length:4]; //crc
  2350. apngSequenceIndex++;
  2351. }
  2352. }
  2353. yy_png_info_release(frame);
  2354. }
  2355. }
  2356. [result appendBytes:((uint8_t *)firstFrameData.bytes) + chunk->offset length:chunk->length + 12];
  2357. }
  2358. yy_png_info_release(info);
  2359. return result;
  2360. }
  2361. - (NSData *)_encodeWebP {
  2362. #if YYIMAGE_WEBP_ENABLED
  2363. // encode webp
  2364. NSMutableArray *webpDatas = [NSMutableArray new];
  2365. for (NSUInteger i = 0; i < _images.count; i++) {
  2366. CGImageRef image = [self _newCGImageFromIndex:i decoded:NO];
  2367. if (!image) return nil;
  2368. CFDataRef frameData = YYCGImageCreateEncodedWebPData(image, _lossless, _quality, 4, YYImagePresetDefault);
  2369. CFRelease(image);
  2370. if (!frameData) return nil;
  2371. [webpDatas addObject:(__bridge id)frameData];
  2372. CFRelease(frameData);
  2373. }
  2374. if (webpDatas.count == 1) {
  2375. return webpDatas.firstObject;
  2376. } else {
  2377. // multi-frame webp
  2378. WebPMux *mux = WebPMuxNew();
  2379. if (!mux) return nil;
  2380. for (NSUInteger i = 0; i < _images.count; i++) {
  2381. NSData *data = webpDatas[i];
  2382. NSNumber *duration = _durations[i];
  2383. WebPMuxFrameInfo frame = {0};
  2384. frame.bitstream.bytes = data.bytes;
  2385. frame.bitstream.size = data.length;
  2386. frame.duration = (int)(duration.floatValue * 1000.0);
  2387. frame.id = WEBP_CHUNK_ANMF;
  2388. frame.dispose_method = WEBP_MUX_DISPOSE_BACKGROUND;
  2389. frame.blend_method = WEBP_MUX_NO_BLEND;
  2390. if (WebPMuxPushFrame(mux, &frame, 0) != WEBP_MUX_OK) {
  2391. WebPMuxDelete(mux);
  2392. return nil;
  2393. }
  2394. }
  2395. WebPMuxAnimParams params = {(uint32_t)0, (int)_loopCount};
  2396. if (WebPMuxSetAnimationParams(mux, &params) != WEBP_MUX_OK) {
  2397. WebPMuxDelete(mux);
  2398. return nil;
  2399. }
  2400. WebPData output_data;
  2401. WebPMuxError error = WebPMuxAssemble(mux, &output_data);
  2402. WebPMuxDelete(mux);
  2403. if (error != WEBP_MUX_OK) {
  2404. return nil;
  2405. }
  2406. NSData *result = [NSData dataWithBytes:output_data.bytes length:output_data.size];
  2407. WebPDataClear(&output_data);
  2408. return result.length ? result : nil;
  2409. }
  2410. #else
  2411. return nil;
  2412. #endif
  2413. }
  2414. - (NSData *)encode {
  2415. if (_images.count == 0) return nil;
  2416. if ([self _imageIOAvaliable]) return [self _encodeWithImageIO];
  2417. if (_type == YYImageTypePNG) return [self _encodeAPNG];
  2418. if (_type == YYImageTypeWebP) return [self _encodeWebP];
  2419. return nil;
  2420. }
  2421. - (BOOL)encodeToFile:(NSString *)path {
  2422. if (_images.count == 0 || path.length == 0) return NO;
  2423. if ([self _imageIOAvaliable]) return [self _encodeWithImageIO:path];
  2424. NSData *data = [self encode];
  2425. if (!data) return NO;
  2426. return [data writeToFile:path atomically:YES];
  2427. }
  2428. + (NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality {
  2429. YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type];
  2430. encoder.quality = quality;
  2431. [encoder addImage:image duration:0];
  2432. return [encoder encode];
  2433. }
  2434. + (NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality {
  2435. if (!decoder || decoder.frameCount == 0) return nil;
  2436. YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:type];
  2437. encoder.quality = quality;
  2438. for (int i = 0; i < decoder.frameCount; i++) {
  2439. UIImage *frame = [decoder frameAtIndex:i decodeForDisplay:YES].image;
  2440. [encoder addImageWithData:UIImagePNGRepresentation(frame) duration:[decoder frameDurationAtIndex:i]];
  2441. }
  2442. return encoder.encode;
  2443. }
  2444. @end
  2445. @implementation UIImage (YYImageCoder)
  2446. - (instancetype)yy_imageByDecoded {
  2447. if (self.yy_isDecodedForDisplay) return self;
  2448. CGImageRef imageRef = self.CGImage;
  2449. if (!imageRef) return self;
  2450. CGImageRef newImageRef = YYCGImageCreateDecodedCopy(imageRef, YES);
  2451. if (!newImageRef) return self;
  2452. UIImage *newImage = [[self.class alloc] initWithCGImage:newImageRef scale:self.scale orientation:self.imageOrientation];
  2453. CGImageRelease(newImageRef);
  2454. if (!newImage) newImage = self; // decode failed, return self.
  2455. newImage.yy_isDecodedForDisplay = YES;
  2456. return newImage;
  2457. }
  2458. - (BOOL)yy_isDecodedForDisplay {
  2459. if (self.images.count > 1 || [self isKindOfClass:[YYSpriteSheetImage class]]) return YES;
  2460. NSNumber *num = objc_getAssociatedObject(self, @selector(yy_isDecodedForDisplay));
  2461. return [num boolValue];
  2462. }
  2463. - (void)setYy_isDecodedForDisplay:(BOOL)isDecodedForDisplay {
  2464. objc_setAssociatedObject(self, @selector(yy_isDecodedForDisplay), @(isDecodedForDisplay), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
  2465. }
  2466. - (void)yy_saveToAlbumWithCompletionBlock:(void(^)(NSURL *assetURL, NSError *error))completionBlock {
  2467. dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
  2468. NSData *data = [self _yy_dataRepresentationForSystem:YES];
  2469. ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
  2470. [library writeImageDataToSavedPhotosAlbum:data metadata:nil completionBlock:^(NSURL *assetURL, NSError *error){
  2471. if (!completionBlock) return;
  2472. if (pthread_main_np()) {
  2473. completionBlock(assetURL, error);
  2474. } else {
  2475. dispatch_async(dispatch_get_main_queue(), ^{
  2476. completionBlock(assetURL, error);
  2477. });
  2478. }
  2479. }];
  2480. });
  2481. }
  2482. - (NSData *)yy_imageDataRepresentation {
  2483. return [self _yy_dataRepresentationForSystem:NO];
  2484. }
  2485. /// @param forSystem YES: used for system album (PNG/JPEG/GIF), NO: used for YYImage (PNG/JPEG/GIF/WebP)
  2486. - (NSData *)_yy_dataRepresentationForSystem:(BOOL)forSystem {
  2487. NSData *data = nil;
  2488. if ([self isKindOfClass:[YYImage class]]) {
  2489. YYImage *image = (id)self;
  2490. if (image.animatedImageData) {
  2491. if (forSystem) { // system only support GIF and PNG
  2492. if (image.animatedImageType == YYImageTypeGIF ||
  2493. image.animatedImageType == YYImageTypePNG) {
  2494. data = image.animatedImageData;
  2495. }
  2496. } else {
  2497. data = image.animatedImageData;
  2498. }
  2499. }
  2500. }
  2501. if (!data) {
  2502. CGImageRef imageRef = self.CGImage ? (CGImageRef)CFRetain(self.CGImage) : nil;
  2503. if (imageRef) {
  2504. CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
  2505. CGImageAlphaInfo alphaInfo = CGImageGetAlphaInfo(imageRef) & kCGBitmapAlphaInfoMask;
  2506. BOOL hasAlpha = NO;
  2507. if (alphaInfo == kCGImageAlphaPremultipliedLast ||
  2508. alphaInfo == kCGImageAlphaPremultipliedFirst ||
  2509. alphaInfo == kCGImageAlphaLast ||
  2510. alphaInfo == kCGImageAlphaFirst) {
  2511. hasAlpha = YES;
  2512. }
  2513. if (self.imageOrientation != UIImageOrientationUp) {
  2514. CGImageRef rotated = YYCGImageCreateCopyWithOrientation(imageRef, self.imageOrientation, bitmapInfo | alphaInfo);
  2515. if (rotated) {
  2516. CFRelease(imageRef);
  2517. imageRef = rotated;
  2518. }
  2519. }
  2520. @autoreleasepool {
  2521. UIImage *newImage = [UIImage imageWithCGImage:imageRef];
  2522. if (newImage) {
  2523. if (hasAlpha) {
  2524. data = UIImagePNGRepresentation([UIImage imageWithCGImage:imageRef]);
  2525. } else {
  2526. data = UIImageJPEGRepresentation([UIImage imageWithCGImage:imageRef], 0.9); // same as Apple's example
  2527. }
  2528. }
  2529. }
  2530. CFRelease(imageRef);
  2531. }
  2532. }
  2533. if (!data) {
  2534. data = UIImagePNGRepresentation(self);
  2535. }
  2536. return data;
  2537. }
  2538. @end