argb
colorSpace_floatToInt8valuealphaopacityredgreenblue
withValues	withAlphawithOpacitywithRed	withGreenwithBlue_linearizeColorComponentcomputeLuminancelerp
alphaBlendgetAlphaFromOpacity==hashCodetoStringclearsrcdstsrcOverdstOversrcIndstInsrcOutdstOutsrcATopdstATopxorplusmodulatescreenoverlaydarkenlighten
colorDodge	colorBurn	hardLight	softLight
difference	exclusionmultiplyhue
saturationcolor
luminositynonelowmediumhighbuttroundsquaremiterbevelfillstrokehardEdge	antiAliasantiAliasWithSaveLayer_data_kIsAntiAliasIndex_kColorRedIndex_kColorGreenIndex_kColorBlueIndex_kColorAlphaIndex_kColorSpaceIndex_kBlendModeIndex_kStyleIndex_kStrokeWidthIndex_kStrokeCapIndex_kStrokeJoinIndex_kStrokeMiterLimitIndex_kFilterQualityIndex_kMaskFilterIndex_kMaskFilterBlurStyleIndex_kMaskFilterSigmaIndex_kInvertColorIndex_kIsAntiAliasOffset_kColorRedOffset_kColorGreenOffset_kColorBlueOffset_kColorAlphaOffset_kColorSpaceOffset_kBlendModeOffset_kStyleOffset_kStrokeWidthOffset_kStrokeCapOffset_kStrokeJoinOffset_kStrokeMiterLimitOffset_kFilterQualityOffset_kMaskFilterOffset_kMaskFilterBlurStyleOffset_kMaskFilterSigmaOffset_kInvertColorOffset_kDataByteCount_objects_ensureObjectsInitialized_kShaderIndex_kColorFilterIndex_kImageFilterIndex_kObjectCountisAntiAlias_kColorDefault_kBlendModeDefault	blendModestylestrokeWidth	strokeCap
strokeJoin_kStrokeMiterLimitDefaultstrokeMiterLimit
maskFilterfilterQualityshadercolorFilterimageFilterinvertColorssRGBextendedSRGB	displayP3rawRgbarawStraightRgbarawUnmodifiedrawExtendedRgba128pngrgba8888bgra8888rgbaFloat32_imageonCreate	onDispose_debugStackwidthheight	_disposeddisposedebugDisposed
toByteDatadebugGetOpenHandleStackTracesclone	isCloneOf_toByteData_dispose_handlesdurationimage
frameCountrepetitionCountgetNextFrame_cachedFrameCount_frameCount_cachedRepetitionCount_repetitionCount_getNextFramenonZeroevenOdd	intersectunionreverseDifferencefillTypemoveTorelativeMoveTolineTorelativeLineToquadraticBezierTorelativeQuadraticBezierTocubicTorelativeCubicToconicTorelativeConicToarcTo
arcToPointrelativeArcToPointaddRectaddOvaladdArc
addPolygonaddRRectaddPathextendWithPathcloseresetcontainsshift	transform	getBoundscombinecomputeMetrics_constructor_clone_getFillType_setFillType_arcTo_arcToPoint_relativeArcToPoint_addRect_addOval_addArc_addPolygon	_addRRect_addPath_addPathWithMatrix_extendWithPath_extendWithPathAndMatrix	_contains_shift
_transform
_getBounds_oppositionvectorangle	_iteratoriterator_pathMetric_pathMeasurecurrentmoveNextlengthisClosedcontourIndex_measuregetTangentForOffsetextractPath_length
_getPosTan_extractPath	_isClosed_nextContour_nativeNextContourcurrentContourIndexnormalsolidouterinner_style_sigma	_TypeNone	_TypeBlurchildvalues_color
_blendMode_matrix_type
_kTypeMode_kTypeMatrix_kTypeLinearToSrgbGamma_kTypeSrgbToLinearGamma_toNativeImageFilter_toNativeColorFilter_shortDescriptioncreator	_initMode_initMatrix_initLinearToSrgbGamma_initSrgbToLinearGammadatanativeFiltersigmaXsigmaYtileMode_modeStringradiusXradiusYinnerFilterouterFilter	_initBlur_initDilate
_initErode_initColorFilter_initComposed_debugDisposedclamprepeatedmirrordecal_initLinear_initRadial_initConical
_initSweep_validateColorStops_initWithImage
_debugName	fromAsset_shaderRegistry_reinitializeShader_uniformFloatCount_samplerCount_initFromAssetfragmentShader_kEmptyFloat32List_floatssetFloatsetImageSampler_setImageSampler_validateSamplers	trianglestriangleStriptriangleFan_initpointslinespolygonsave	saveLayerrestorerestoreToCountgetSaveCount	translatescalerotateskewgetTransformclipRect	clipRRectclipPathgetLocalClipBoundsgetDestinationClipBounds	drawColordrawLine	drawPaintdrawRect	drawRRect
drawDRRectdrawOval
drawCircledrawArcdrawPath	drawImagedrawImageRectdrawImageNinedrawPicturedrawParagraph
drawPointsdrawRawPointsdrawVertices	drawAtlasdrawRawAtlas
drawShadow	_recorder_sorted_saveLayerWithoutBounds
_saveLayer_scale_getTransform	_clipRect
_clipRRect	_clipPath_getLocalClipBounds_getDestinationClipBounds
_drawColor	_drawLine
_drawPaint	_drawRect
_drawRRect_drawDRRect	_drawOval_drawCircle_drawArc	_drawPath
_drawImage_drawImageRect_drawImageNine_drawPicture_drawPoints_drawVertices
_drawAtlas_drawShadowtoImagetoImageSyncapproximateBytesUsed_toImage_toImageSyncisRecordingendRecording_endRecording_canvas_kBytesPerShadow_kColorOffset	_kXOffset	_kYOffset_kBlurOffsetoffset
blurRadiusconvertRadiusToSigma	blurSigmatoPaintlerpList_encodeShadowsfromUint8ListfromFilePath_initFromFileencodedbytesPerPixelinstantiateCodec_initEncoded_initRaw_width	_getWidth_height
_getHeight_bytesPerPixel_getBytesPerPixel_instantiateCodecmessagestack__rectIsValid_rrectIsValid_offsetIsValid_matrix4IsValid_radiusIsValid_scaleAlphaColor	BlendModeFilterQuality	StrokeCap
StrokeJoinPaintingStyleClipPaint
ColorSpace_colorSpaceToIndex_indexToColorSpaceImageByteFormatPixelFormatImageEventCallbackImage_Image
_wrapImageImageDecoderCallback	FrameInfoCodec_NativeCodecinstantiateImageCodecinstantiateImageCodecFromBufferinstantiateImageCodecWithSize_getDefaultImageSizeTargetImageSizeCallbackTargetImageSizedecodeImageFromList_decodeImageFromListAsyncdecodeImageFromPixelsPathFillTypePathOperationEngineLayer_NativeEngineLayerPath_NativePathTangentPathMetricsPathMetricIterator
PathMetric_PathMeasure	BlurStyle
MaskFilter_ColorTransform_IdentityColorTransform_ClampTransform_MatrixColorTransform_getColorTransformColorFilter_ColorFilterImageFilter_MatrixImageFilter_GaussianBlurImageFilter_DilateImageFilter_ErodeImageFilter_ComposeImageFilter_ImageFilterShaderTileMode_encodeWideColorList_encodeColorList_encodePointList_encodeTwoPointsGradientImageShaderFragmentProgramFragmentShader
VertexModeVertices	PointModeClipOpCanvas_NativeCanvasPictureEventCallbackPicture_NativePicturePictureRecorder_NativePictureRecorderShadowImmutableBufferImageDescriptor_NativeImageDescriptor	_Callback_CallbackWithError_Callbacker_CallbackerWithError	_futurize_futurizeWithErrorPictureRasterizationExceptiondartuiboolRecthasNaNRRectOffsetdxisNaNdyFloat64ListlengtheverydoublevalueisFiniteRadiusxy
withValuesalphaclampDoubleaint
_fromARGBCsRGBfromRGBO
_fromRGBOCfromARGBround
Deprecatedfromredgreenblue
colorSpace	transformmathpowextendedSRGBHSVColormaterialColorstransparentCurveselasticInOutAnimationController_lerpDoublergboverrideObject	identicalruntimeTypehashStringtoStringAsFixedsrcdstclear	saveLayerrestore	blendModesrcOverdstInsrcOutsrcIndstOutdstATopdstOversrcATopplusmultiplyscreenoverlaymodulate	hardLight
colorDodge	colorBurncolor	exclusion
difference
luminosityhue	softLight
saturationblurBackdropFilternonelowmediumhighfilterQualitymatrix	drawImagedrawImageRectdrawImageNine	drawAtlas	strokeCapAsquarestrokeWidthbuttdrawPath
drawPoints
strokeJoinstrokeMiterLimitstyle
drawCirclehardEdge	antiAliasantiAliasWithSaveLayerisAntiAliasbufferasUint32ListsetAll_data_objectstoListpragmaByteDataListfilledgetInt32_kFakeHostEndiansetInt32
getFloat32
setFloat32indexvaluesfillstrokemiterbevel	_TypeNone	_TypeBlur_style_sigmadebugDisposed_validateSamplers	Exception
_debugNamecreator_toNativeColorFilter_toNativeImageFilterfromEnvironmenttoStringStringBufferwrite
toByteDatarawExtendedRgba128	displayP3ArgumentErrorvoidwidgets
StackTracecurrent_handlesaddcallMemoryAllocations	_disposedcontainsremoveisEmptydispose
StateErrorformatrawRgbaFutureUnsupportedErrormaphandle_debugStack__imageNativeFieldWrapperClass1NativeInt32PointerVoidsymbolisLeaf	Uint8Listerrorencodedcallback
asByteDataHandleSetwidthheightgetNextFramecloneDurationimage	CompletersyncdecodeErrorcompleteErrorcompletedurationmillisecondsdurationMillisecondsfuturefromUint8ListtargetWidthtargetHeightallowUpscalinggetTargetSizeintrinsicWidthintrinsicHeightinstantiateCodecthenrawrowBytespixelFormatcodec	frameInfofillTypecombinereverseDifferencexor	intersectSceneBuilderclipPath_clonenonZerozero
fromCircle_opextractPathIterableDoublelefttoprightbottomBoolradiusrotationlargeArc	clockwiseFloat32List_getValue32matrix4fromLTRBforceClosedgetTangentForOffsetcossinatan2computeMetricslineTomoveToIterableBaseIterator
RangeError_nextContourcurrentContourIndexisClosedcloseaddRectstartWithMoveTonextContour
maskFilter
drawShadowcolorFilterfromColorFiltermodelinearToSrgbGammasrgbToLinearGamma_type_listEquals_matrix_color
_blendModehashAll
_kTypeMode_kTypeMatrixfromList_kTypeLinearToSrgbGamma_kTypeSrgbToLinearGammaImageFilteredpushBackdropFilterpushImageFilterclampsigmaXsigmaYtileModeradiusXradiusYMatrix4	diagonal3datainnerFilterinnerouterFilteroutermirrorrepeateddecaldilateerodecomposed_shortDescriptionshaderpaintingGradientLinearGradientRadialGradientBoxDecorationlinearradiali	Int32ListNoSuchMethodError
colorStopsfocalfocalRadiuspi
startAngleendAngle
isNotEmptyUripath
encodeFull	microtask
_fromAssetMap_initFromAsset	debugNamefragmentShader_uniformFloatCount_samplerCountdrawVertices	trianglestriangleStriptriangleFannew
Uint16ListcolorstextureCoordinatesindicesdrawRawPointsclipRectSceneRendererBindingcreateCanvasPictureLayertoImageendRecordingCustomPainterpaintsydoAntiAliasarcTo	ParagraphlayoutParagraphStyleParagraphBuilder	TextAlignParagraphConstraintspointslinespolygonRSTransformscosssintxtyisRecording_canvascullRectlargestminmaxclipOpUint32stack_NativeParagraph_needsLayout_paint
addPicturedrawPicture	onDisposeUint64createPictureRecorder	_recorderonCreate	TextStylenormaloffset
blurRadiusscalelerpshadowIndex_kColorDefaultconvertRadiusToSigma_init_length_initFromFile_initEncodedTttoImageSyncwritelnNativeFieldWrapperClass1CodecEngineLayerPathIterableBaseIterator_ColorTransformImageFilterShaderCanvasPicturePictureRecorderImageDescriptor	Exceptionķ7(Zv}   t T <΀-FzE6Մd`oqr /pΆ݆߆]lnoڇ܇݈e,0`dpՊEMQAmq}ʌn!%37Ў+|̎Ҏ+QxAou-F_3Ւ)Sfw6FkR_ȕɕ)BΖ7]ڗ#)q9NlЙ֚'I[\͚Κ,KLr̛қ (NuҜ'Fdhi2ab'MN˟̠LMˠ̡+1ҡ:Ƣߢ?E0=ekop-Hx|}ƦLܧbͧfڨ%)*{ 156tѪ8<= Dbխ9kЭ֮ag=ְ kU &uʳ 6Kbwóڳ9TεImxζfGҸLsȸθNgy3^gݺ,wļ;DJNOͽ ;?@Loֽ%=Um @1 @{ @ @ @ A< A A B) Bt B B C CW C C DB D D E! Ep E E E F# Fp F G GY G G G H H Hh H H IG I I J6 JQ JU Jc Jg J J J K# K6 K| K} K K L L	 Lf Lo Lp L L M M M MT MZ M M M M N NM N^ Nd N N O O O OL OR O O P P Pb P P P P Q  Q> QD Qm Qs Q Q R& R, R{ R R R R SG S S S S S T: T T T T T UG UP UQ U U V6 V< Vv V| V W W! Wm Ws W W W X) X| X X Y Y Yd Y Y Z Z Zl Zv Zw Z [ [c [i [ [ [ \H \N \ \ \ ]	 ]
 ]Y ]y ] ] ] ^# ^o ^ ^ ^ _ _ _j _u _v _ _ _ `4 `: ` ` a a aV ag am a a a b( b\ bb b b c c c cW c] c c d d$ dr dx d e eh e f f fc fk fl f f g gZ g` g g g h1 h7 h h h h h i= iY i i i i j j j[ j j j k  kO kU k k l l ln l m m mc m m m m m n nU nf n n o o+ oN oO o o o p pl p p p q. q{ q q r r r% r+ rd r r s& s1 s2 s s s s s t tc tm tn t t t u+ u< uB u u u u u v: v v v w w, w- w w w x xN xT x x x y y6 y< y y z# zP zV z z { {$ { { { { { |! |p | | | | } } }X }^ } } } } ~ ~! ~" ~k ~q ~ ~ ~ & 7 = u {    8 F L       P V     , 2    " o    0 6     * 0 y    > D      4      & , u    Z ` p v    + , }     4     " 2 8 n  
   e ~    J P      3 ~       ; ?    <   : >    $ ( u      = v       2      A       > E F    	 V      0 6      ) 6 <      L     " $ % I M [ _    $ d j   % s      9    O t } ~   + 1    G l r        h       U     I O     	  ] n     7 =    B N T d j     ! S Y    ^ j p        + / F      7 }     7 ~          k    $ q   8 @ A m s    C I      t        o u    6      < m    $ * }    P   A G   5      5 O Q R      Y o z {    Q W   = Y    k       c    O T     S T u      E o     : e Î ù   C w ħ    R Œ   P Ǝ   D Ǆ   V Ȩ  4 5 v ɸ ɹ   : U V ʄ      H ˜ ˝     ( o s ̓   4 x | } ͵    % E K [ a Χ    & n σ ϗ  5 Є Н   X o х ї ѭ      E Ҋ   f Ӂ Ӆ ӆ Ӿ    L R Ԕ  6 W ] ժ       c ֗    ` כ ן   > B C ؇ ؍ ص  # ' J o ٭ ٱ ٲ  ; x ~ ڻ    @ b ۨ ۬ ۭ   $ V r     R V W ݘ ݞ  : @ z ހ ޞ ޤ   z ߀   9       - v   ' + P u     , - x   ^     K o u   M S    _ k q     5 U     R V W     . r     D        B    , ^  	 Z ` d e   W ]    P T      ) / e k {    ' o       % \ e x     A K S f p       1 7     " & N        [ a e f       I c u         S t     2 P ~    * U        =      >         = X t     & I Q z     e o ~     +1a"A~B+hdh@rvf n			d		

7
D
F
G
w





 !L^j0467y,tP[\|b'TZou- 4NYak8 q}=DFG7jvwHTU;v$(hl%tQ_cEIW[>"A[_`D   _    !!!L!_!`!!!!""O"U"""##]####$'$S$i$$$$$$$%?%E%%%%%&&&!&&&&&''+'1'v'(((_(x())a)**/*]*a*b***+?+k+q+,,A,,,--*-8-X-f------.G.i.o..///g/z//////0"0q011Z11222f2223.3D3J33333334%4d444455`556
6666>6D6f6677S77778/8_8e888888999'9J9P9k9q9999:::: :e::::;;A;;<"<+<1<P<{<<<===e==>N>>???+?U?W?X?r??@@@^@@@@@AAAeAAABBlBBBCCCC!C%C&CbCDDD,D?DWDcD{DDE*EbEiEEEEEFFFFbFcFFFFFFG/G1G2GLGmGGGGHHHGHKHHHHI8IIJJ@JDJ}JJJJKK2KKKL3LRLsLyLLLLLLLM3MsMMNNgNNNNNO:OVO\OOOOP$P*PwPPPPQ
QQ\QqQsQtQQQQQR6RSRjRRRRRSS5SPSQSySSSTT TaTTTUU)U/UzUUUUV VVNViVnVVVVVVWW<W=WWWWWWX(X)XXXXXY'YYYZ.Z8ZqZZZ[&[2[:[B[[[y[[[[[\.\~\\\\\\]V]X]Y]|]]^^m^^^_4_O__``K````aIaab2b~bccfcccd:dddee#eHe\epeeeef$f0fNfnffffffgg`gggh)hwhhiiVibifiijKjjk7kkl
llXllm.m2mmmmmnnSnnnoo%o:oXo]oooop1pYpcppppq.q5q:q<q=q`qdqqr rOrrrs*s@sDsst+tltuu[uuuv0vzvvvwww4w8wwxxFxsxxxxy:yByyzzEzjzzzzzzz{{5{7{8{{{{{||1|m||||||||}E}}}}}~D~\~~~~mX	YCms	
SW;L59x|aM $nD`t %Bvz ^nBlv*Nt~"Hd#'^rb%&hnOZ\]!U[5;KQ=CFS=r{S5;+1zi+|40BDE*rR]I_9?2by}~$HI~_l	67z@?&'l5>9:	Pu{Y*o$rs!EK]a8}_ lr]1U"<]x./qE`a%f4@F"(LMßíó'-tĨĩ.>\]ţũDoƱƲ7tzCȌȍ#$iɭ)vʕʛ.MNˎ˽Krs̨̮K͖,{dϲfyWѠ06ҭұҲ28ӄjԴ=Պ]֪=׊&vhٜ٢>xz{`ۥ۸۹Z܀܁܍sݒݓ&ބްޱ޽#YZfcno{'Z[gABN89EMmT
+Fa|%)*=>Je!EDi-MMNZvFW[\OP\WXdFGSKX@A j%2tz~/0h!Dnrs-QPQ]	a+/0uFGS 0 4 B    :;D	Mfg=YpqFbhT134|cg	L	j	n	

)
i


$VXY()Dght^.7=W[\h|\`AeiR/mh8>"AXY!oK#=>]^*wHc#$yVx|}P  L h {        !X!!!""{"""###r##$${$$$%%/%0%[%`%%%&#&3&P&V&g&k&l&&&'9'?'''''((%(](^(((()G))))**i***+++++ +m+++,$,7,,,,---h-....l.r....///+/</A/B/\/s/t/00/0e0f0r0000001
1=1>1J11111112:2]2i22223
3)353s33344+4P4T4V4W4444444565M5e555566*6L6c666677777b77888+8@8V8i8q889B99999::):r::;;";s;y;;<<S<n<<<<==-=_=====>><>Z>>>??J?R?q???@@@B@t@|@@@@A#AqAAAB;BHBsBCCUC[CCD'DbDzDDDDEE`EuE{EEEF)F\FFFFGJGGGGHHH/HFHPHZH`HrHxHHHHII#I?I[IeIoIuIJJJMJpJJJJJJK|KKKKLLCLiLsL}LLLLMMMdMtMMMMNNN^NyNNNNOOO,OKOjO}O~OOP'PpPPPPQ+Q,QUQjQQQQQRRERcRzRRRSSRSvSSSTTTTT6TbTvT|TTTUU@UDUEUQUfUUUUVV%V:VQVVVVW(WLW~WWWWWWWWXXDX]XXXYY8YGYYYYYZ.Z2ZZ[[5[p[[[\\#\$\H\\\\]]G][]y]}]~]]^^$^(^)^w^^^^_____`
`;`<```a/aYa[a\aaaaabAbbcc/cLcccddwddde"eheeeefCffg%g)g*gngtghh>hhhhiSiWiXiiij.jSjjjjkMkkkllClclelfllllm#m$mEmmmmmnnn)nmnnnznnnno
o:olopoqo}ooop pdpep|ppppqqqHqIqdq|qqrr0r6r:r;rGrrrrrrssEsYs_sssssstt;t=t>tptttttu.u:uquru~uuuvvvv<vhv|vvvvvvww8w:w;wlwwwwwx(x4xkxlxxxxxyyyy4y`ytyzyyyyyyz/z1z2zezzzzz{{Z{f{{{|||!|w|x|||||}}A}n}r}s}}}}}~~R~~$`
X-12v$TXY;l;?@r<@At+ ?|}!"|!^_"#q2gAG.23C!4D(AE-}*.W[4~BLf]!'OXYfOU!DmB	TZ%~j7q5Tsw@aevxy8VySu(9;<y},}g*0KQP0Nv'TCW"(waSu{h/2MS39k2Px%8ev,"	'-xV§@Ñ2TZĪSŵ}ƃ"EKǘ+=PjȈȰ
1Wɓɦ~ʒIij^_'<Vu͉ͦͧGezΏΩμ 4QR
 :YmІНйк1QѩѱѾJRX\^_ҝ-|Jԙԧԭ:gmճPm։ "2Rאזת6TZ^_k~ؓأاب	مAfڮ,0Z۪4H|ܙܸܾ#$ioݸJ`ޞ,vߣ-H}0FNRS,zABw<|6<@A]|}u134gk8JE#$@A6m6Upz=hN/5\@QW!=ABx~l=AB%`l0?@134CPQh15a(v%t  ]  IdhLnrSEh: OU"p o		?	E		
7

gw}9?-y6=|${$E)8DNVis	6Dx OU>U	ZAmV6!m?P`u   t    !@!x!!"5"""""""# #b#h#l#m#$$)$]$$$$%O%r%%%%%%%%%&B&g&&&&'-'3''''''(((((((())Y)j)))***c*i*+++%+&+`+f+++,
,,,J,P,,,,,- ---/-=-u------...M...//o///0)0r000171;1a1e12 2N22333i3344)4/4x455_5556<6R666677#737977778 8q89	999j9:	:Q:::;<;B;f;l;<<V<<<==h=>>U>>?? ?j?????@@C@l@@@AAA"A(AwABB1B7BEBrBBBC4CkCCCCCD'DmDDDDEEcEEEF.FgFFFFFG-G~GGGGH%H+HyHIIcIJ JKJJK8KSKYKiKoKKL)L?LlLmLLLMMM`MMMNN`NNNNOKOOOOP#PhPPPQQ%Q&QvQQQR>RRRRRSSStSSST1TTTTUUXUUUV)VEVKVVW;WhWWWWWXHXNXXXY2Y[YaYYZZZiZZZZ[/[5[[\\Y\Z\\\]]]b]h]^^+^b^c^__Y_j_p_``i`aaZaabMbibobbcMcccd2dYd_dmddde#eBe|eeeeef0f|fg+g;gAgghhh^hhii&iCiDiij:j@jjk2kkl(l{lllllm*mvmmmnn_nenno$ooooopBp]p|p}pqqqyqrrrcrrssssttTtztttu uMuuuv2vvvwwhwwwxMxxxy4y:yzyzzez{{2{8{|| |x||}}}[}}}}~H~~~*v|6$rZ>+,s_lrIOA+wDJ'-=C<B+u{IJFK/za(y>$}$\bRH;kV>>"K^u}Fd$X6|2u,2@U+agFg'C0^oEy%p$Bq!"flia6<L5 I\s{Dbie'N~`(T]gm|Zm"(WXCy!'4"KeHt&[am´~ÈÎÝåïõ-N}Ĥ;<ŉŏ wyz	)ǆǌZ^_>?ɛ<abnʼ,U~˧/I̅̒̶!%&͈X9TUaOno{[\ѻNvw҃BbӠӦӿBCOnԟԻ$XYeH֖045״#$0k؍ؿXYeٝ,klxږ,01ۇ G܍ܑܒ-.:oݝݡݢ:;G|ޜ޼	}"OST-Mg<^bcoFJK%&2\|06:;12>xL)ISm&ABNx!	
[}%W%YB[e!8Ps#Ed=uC|^dhi=Qj4NmnzLBf%~IO   q r ~  9=>+Ix	)K,25tu	.dH|	 	&					
:
@
D
E
Q
r


=`5;Yrx,v}~3LezPTU&'3uwx.2fj5N Ajk@aZS:@a-YZ&,zK}:V\#W]s  J T ^ e i j !!!*!W!o!!!!!""R"V"W"""#
##5#E#]#p#z#####$"$f$$$$$$$$%%%"%%%%&&*&+&7&Y&[&\&&&'1'5'['_''(?(()")R)V))*	*J****++%+i++,,,h,,,,--Q----..B.C.......//"/f/l////0E000001#1]1^1x1y1111112 2/2I2O22233F3L333344-444455,5S5z55556-6P6e6f6667E7777888?8E8x8~899T99999::>:?:::;;E;K;;<'<I<]<p<<<<= =.=N=a=u=======>F>>>>?(?r?@	@W@@A8A>AAAABB&B;BJBkBsBBBBBCCBCCCCCCCCD)D/DRDDDDDEE3EnEEEF
F1F7FnFFFFFFFFGGG2GRGtGGGGGGH;HHHHIIII\IIIIJ,JaJbJJJKKGKHKKKKL6L\LbLcL{LLLLLLMMMlMMMMN2NGNNNO,OJONOOOOOPPKPPQQSQQQRR0R\RdRRRRRRS
SGSSSTTTWT_TTTTTU-U.UUUV5VVVVVVWWW,W2WWWWWWXXXXXnXXXY.Y{YZZ4ZGZWZvZZZZZZ[[5[[[[[[\>\V\w\\]]X]r]x]^^Z^^^^___>_a_b__`:`l``````a4aEaFaqawaaaaab4bMbNbbbcc7cIcJcccddRdcdidde(e*e+eeeeef9ffffgLggghhh7hJhphwhhhhiLiPiQijjjpjjkkkhkkkkkkkl(lElFlRlllllmmm+mnmom{mmmmn?nrnnnnnno o;oXooppIpOppppppq
qqqzqqqrfrhrirrrssUsVsssst,t-tntttu/uDuGuuuv vvvv#vWvZvvvvvw)w.w5w8wwxxxhxxyy]yyyyzz2zAzzzzzzzz{{{#{%{&{i{{|,|p|||}}0}=}Q}}}}}}~~~8~<~W~Y~Z~~~KPQ/*0NRT       0ݷΎ/// An immutable 32 bit color value in ARGB format.
///
/// Consider the light teal of the Flutter logo. It is fully opaque, with a red
/// channel value of 0x42 (66), a green channel value of 0xA5 (165), and a blue
/// channel value of 0xF5 (245). In the common "hash syntax" for color values,
/// it would be described as `#42A5F5`.
///
/// Here are some ways it could be constructed:
///
/// ```dart
/// Color c1 = const Color(0xFF42A5F5);
/// Color c2 = const Color.fromARGB(0xFF, 0x42, 0xA5, 0xF5);
/// Color c3 = const Color.fromARGB(255, 66, 165, 245);
/// Color c4 = const Color.fromRGBO(66, 165, 245, 1.0);
/// ```
///
/// If you are having a problem with `Color` wherein it seems your color is just
/// not painting, check to make sure you are specifying the full 8 hexadecimal
/// digits. If you only specify six, then the leading two digits are assumed to
/// be zero, which means fully-transparent:
///
/// ```dart
/// Color c1 = const Color(0xFFFFFF); // fully transparent white (invisible)
/// Color c2 = const Color(0xFFFFFFFF); // fully opaque white (visible)
/// ```
///
/// [Color]'s color components are stored as floating-point values. Care should
/// be taken if one does not want the literal equality provided by `operator==`.
/// To test equality inside of Flutter tests consider using `package:test`'s
/// `isSameColorAs`.
///
/// See also:
///
///  * [Colors](https://api.flutter.dev/flutter/material/Colors-class.html),
///    which defines the colors found in the Material Design specification.
///  * [`isSameColorAs`](https://api.flutter.dev/flutter/flutter_test/isSameColorAs.html),
///    a Matcher to handle floating-point deltas when checking [Color] equality.  5:/// Construct an sRGB color from the lower 32 bits of an [int].
///
/// The bits are interpreted as follows:
///
/// * Bits 24-31 are the alpha value.
/// * Bits 16-23 are the red value.
/// * Bits 8-15 are the green value.
/// * Bits 0-7 are the blue value.
///
/// In other words, if AA is the alpha value in hex, RR the red value in hex,
/// GG the green value in hex, and BB the blue value in hex, a color can be
/// expressed as `const Color(0xAARRGGBB)`.
///
/// For example, to get a fully opaque orange, you would use `const
/// Color(0xFFFF9000)` (`FF` for the alpha, `FF` for the red, `90` for the
/// green, and `00` for the blue).;	@  NRS]qkt~x/// Construct a color with normalized color components.
///
/// Normalized color components allows arbitrary bit depths for color
/// components to be be supported. The values will be normalized relative to
/// the [ColorSpace] argument.      $  /!5  AKL][_pnr(lmu/// Construct an sRGB color from the lower 8 bits of four integers.
///
/// * `a` is the alpha value, with 0 being transparent and 255 being fully
///   opaque.
/// * `r` is [red], from 0 to 255.
/// * `g` is [green], from 0 to 255.
/// * `b` is [blue], from 0 to 255.
///
/// Out of range values are brought into range using modulo 255.
///
/// See also [fromRGBO], which takes the alpha value as a floating point
/// value.v{  }      ŕˀ֕ו 	    	      156@NSZo`mgaiqvb~/// Create an sRGB color from red, green, blue, and opacity, similar to
/// `rgba()` in CSS.
///
/// * `r` is [red], from 0 to 255.
/// * `g` is [green], from 0 to 255.
/// * `b` is [blue], from 0 to 255.
/// * `opacity` is alpha channel of this color as a double, with 0.0 being
///   transparent and 1.0 being fully opaque.
///
/// Out of range values are brought into range using modulo 255.
///
/// See also [fromARGB], which takes the opacity as an integer value.        ĘŘȘ˘Θט쀻   
       (.  CAEXVeZc][_gvtx{y}X/// The alpha channel of this color.
///
/// A value of 0.0 means this color is fully transparent. A value of 1.0 means
/// this color is fully opaque. ^3"/// The red channel of this color. 5$/// The green channel of this color. 4#/// The blue channel of this color. 	@?"/// The color space of this color. ǜ܀/// A 32 bit value representing this color.
///
/// The bits are assigned as follows:
///
/// * Bits 24-31 are the alpha value.
/// * Bits 16-23 are the red value.
/// * Bits 8-15 are the green value.
/// * Bits 0-7 are the blue value.  Мk</// The alpha channel of this color in an 8 bit value.
///
/// A value of 0 means this color is fully transparent. A value of 255 means
/// this color is fully opaque.  &0'd4/// The alpha channel of this color as a double.
///
/// A value of 0.0 means this color is fully transparent. A value of 1.0 means
/// this color is fully opaque.  %Pz4/// The red channel of this color in an 8 bit value.  }(6/// The green channel of this color in an 8 bit value.  O{5/// The blue channel of this color in an 8 bit value.  D   NNY  fn   ΃/// Returns a new color that matches this color with the passed in components
/// changed.
///
/// Changes to color components will be applied before applying changes to the
/// color space.            r	5/// Returns a new color that matches this color with the alpha channel
/// replaced with `a` (which ranges from 0 to 255).
///
/// Out of range values will have unexpected effects. ?D   /// Returns a new color that matches this color with the alpha channel
/// replaced with the given `opacity` (which ranges from 0.0 to 1.0).
///
/// Out of range values will have unexpected effects.   NOYZՀ/// Returns a new color that matches this color with the red channel replaced
/// with `r` (which ranges from 0 to 255).
///
/// Out of range values will have unexpected effects.    	‴/// Returns a new color that matches this color with the green channel
/// replaced with `g` (which ranges from 0 to 255).
///
/// Out of range values will have unexpected effects.    ,/// Returns a new color that matches this color with the blue channel replaced
/// with `b` (which ranges from 0 to 255).
///
/// Out of range values will have unexpected effects.    vŪ     ?`M/// Returns a brightness value between 0 for darkest and 1 for lightest.
///
/// Represents the relative luminance of the color. This value is computationally
/// expensive to calculate.
///
/// See <https://en.wikipedia.org/wiki/Relative_luminance>.   沥/// Linearly interpolate between two colors.
///
/// This is intended to be fast but as a result may be ugly. Consider
/// [HSVColor] or writing custom logic for interpolating colors.
///
/// If either color is null, this function linearly interpolates from a
/// transparent instance of the other color. This is usually preferable to
/// interpolating from [material.Colors.transparent] (`const
/// Color(0x00000000)`), which is specifically transparent _black_.
///
/// The `t` argument represents position on the timeline, with 0.0 meaning
/// that the interpolation has not started, returning `a` (or something
/// equivalent to `a`), 1.0 meaning that the interpolation has finished,
/// returning `b` (or something equivalent to `b`), and values in between
/// meaning that the interpolation is at the relevant point on the timeline
/// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and
/// 1.0, so negative values and values greater than 1.0 are valid (and can
/// easily be generated by curves such as [Curves.elasticInOut]). Each channel
/// will be clamped to the range 0 to 255.
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].        /// Combine the foreground color as a transparent color over top
/// of a background color, and return the resulting combined color.
///
/// This uses standard alpha blending ("SRC over DST") rules to produce a
/// blended color from two colors. This can be used as a performance
/// enhancement when trying to avoid needless alpha blending compositing
/// operations for two things that are solid colors with the same shape, but
/// overlay each other: instead, just paint one with the combined color.      Qw/// Returns an alpha value representative of the provided [opacity] value.
///
/// The [opacity] value may not be null.    BV\  _g  BCž     R J 	/// A description of the style to use when drawing on a [Canvas].
///
/// Most APIs on [Canvas] take a [Paint] object to describe the style
/// to use for that operation.  f  q vY/// Constructs an empty [Paint] object with all fields initialized to
/// their defaults.   } D E I/// Constructs a new [Paint] object with the same fields as [other].
///
/// Any changes made to the object returned will not affect [other], and
/// changes to [other] will not affect the object returned.
///
/// Backends (for example web versus native) may have different performance
/// characteristics. If the code is performance-sensitive, consider profiling
/// and falling back to reusing a single [Paint] object if necessary. J Q   , wL  	 w x ~       '    $ 0  B G& X  l q%    & «  ¿ &    %    ! (  7 <' M  b g% x  Ë Ð' á  õ û-    *    ' +  ? E0 V  s y, Ċ  ģ ĩ( ĺ   >      8 )  L < O T< e  Ō z ŏ Ŕ: ť   Ź  <   
   < #  J 8 M R: c  ƈ w Ƌ Ɛ2 ơ  ƾ Ʊ  >       :   > - A F< W  ~ l ǁ ǆH Ǘ   ǲ  B      < )  P > S XN i  Ȣ ȇ ȥ ȪF Ȼ     >   .  1 x% ɉ  ɛ  3 K    ! ' 8 ( "    '    ' 0  E J" [  k ͷ,    =        3   ' ˟ i/// Whether to apply anti-aliasing to lines and images drawn on the
/// canvas.
///
/// Defaults to true.    u y   ̅
 ̋     Ϗ/// The color to use when stroking or filling a shape.
///
/// Defaults to opaque black.
///
/// See also:
///
///  * [style], which controls whether to stroke or fill (or both).
///  * [colorFilter], which overrides [color].
///  * [shader], which overrides [color] with more elaborate effects.
///
/// This color is not used when compositing. To colorize a layer, use
/// [colorFilter].    遛          /// A blend mode to apply when a shape is drawn or a layer is composited.
///
/// The source colors are from the shape being drawn (e.g. from
/// [Canvas.drawPath]) or layer being composited (the graphics that were drawn
/// between the [Canvas.saveLayer] and [Canvas.restore] calls), after applying
/// the [colorFilter], if any.
///
/// The destination colors are from the background onto which the shape or
/// layer is being composited.
///
/// Defaults to [BlendMode.srcOver].
///
/// See also:
///
///  * [Canvas.saveLayer], which uses its [Paint]'s [blendMode] to composite
///    the layer when [Canvas.restore] is called.
///  * [BlendMode], which discusses the user of [Canvas.saveLayer] with
///    [blendMode].    ס ץ   ׯ ׺    E k/// Whether to paint inside shapes, the edges of shapes, or both.
///
/// Defaults to [PaintingStyle.fill].    ) -   3 B    ٴi Ȁ/// How wide to make edges drawn when [style] is set to
/// [PaintingStyle.stroke]. The width is given in logical pixels measured in
/// the direction orthogonal to the direction of the path.
///
/// Defaults to 0.0, which correspond to a hairline width.      $   0 8    ۯ f/// The kind of finish to place on the end of lines drawn when
/// [style] is set to [PaintingStyle.stroke].
///
/// Defaults to [StrokeCap.butt], i.e. no caps.    ʀ         Y Ƅ+/// The kind of finish to place on the joins between segments.
///
/// This applies to paths drawn when [style] is set to [PaintingStyle.stroke],
/// It does not apply to points drawn as lines with [Canvas.drawPoints].
///
/// Defaults to [StrokeJoin.miter], i.e. sharp corners.
///
/// Some examples of joins:
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/miter_4_join.mp4}
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/round_join.mp4}
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/bevel_join.mp4}
///
/// The centers of the line segments are colored in the diagrams above to
/// highlight the joins, but in normal usage the join is the same color as the
/// line.
///
/// See also:
///
///  * [strokeMiterLimit] to control when miters are replaced by bevels when
///    this is set to [StrokeJoin.miter].
///  * [strokeCap] to control what is drawn at the ends of the stroke.
///  * [StrokeJoin] for the definitive list of stroke joins.    - 1   < H    /r B/// The limit for miters to be drawn on segments when the join is set to
/// [StrokeJoin.miter] and the [style] is set to [PaintingStyle.stroke]. If
/// this limit is exceeded, then a [StrokeJoin.bevel] join will be drawn
/// instead. This may cause some 'popping' of the corners of a path if the
/// angle between line segments is animated, as seen in the diagrams below.
///
/// This limit is expressed as a limit on the length of the miter.
///
/// Defaults to 4.0.  Using zero as a limit will cause a [StrokeJoin.bevel]
/// join to be used all the time.
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/miter_0_join.mp4}
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/miter_4_join.mp4}
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/miter_6_join.mp4}
///
/// The centers of the line segments are colored in the diagrams above to
/// highlight the joins, but in normal usage the join is the same color as the
/// line.
///
/// See also:
///
///  * [strokeJoin] to control the kind of finish to place on the joins
///    between segments.
///  * [strokeCap] to control what is drawn at the ends of the stroke.    褀         YW !/// A mask filter (for example, a blur) to apply to a shape after it has been
/// drawn but before it has been composited into the image.
///
/// See [MaskFilter] for details.    볂         g /// Controls the performance vs quality trade-off to use when sampling bitmaps,
/// as with an [ImageShader], or when drawing images, as with [Canvas.drawImage],
/// [Canvas.drawImageRect], [Canvas.drawImageNine] or [Canvas.drawAtlas].
///
/// Defaults to [FilterQuality.none].    V Z   h w     }k/// The shader to use when stroking or filling a shape.
///
/// When this is null, the [color] is used instead.
///
/// See also:
///
///  * [Gradient], a shader that paints a color gradient.
///  * [ImageShader], a shader that tiles an [Image].
///  * [colorFilter], which overrides [shader].
///  * [color], which is used if [shader] and [colorFilter] are null.             v /// A color filter to apply when a shape is drawn or when a layer is
/// composited.
///
/// See [ColorFilter] for details.
///
/// When a shape is being drawn, [colorFilter] overrides [color] and [shader].    (< ,   8 F    h f/// The [ImageFilter] to use when drawing raster images.
///
/// For example, to blur an image using [Canvas.drawImage], apply an
/// [ImageFilter.blur]:
///
/// ```dart
/// void paint(Canvas canvas, Size size) {
///   canvas.drawImage(
///     _image,
///     ui.Offset.zero,
///     Paint()..imageFilter = ui.ImageFilter.blur(sigmaX: 0.5, sigmaY: 0.5),
///   );
/// }
/// ```
///
/// See also:
///
///  * [MaskFilter], which is used for drawing geometry.             n Á/// Whether the colors of the image are inverted when drawn.
///
/// Inverting the colors of an image applies a new color filter that will
/// be composed with any user provided color filters. This is primarily
/// used for implementing smart invert on iOS.    l #   0
 6    Xr f            h/// Opaque handle to raw decoded image data (pixels).
///
/// To obtain an [Image] object, use the [ImageDescriptor] API.
///
/// To draw an [Image], use one of the methods on the [Canvas] class, such as
/// [Canvas.drawImage].
///
/// A class or method that receives an image object must call [dispose] on the
/// handle when it is no longer needed. To create a shareable reference to the
/// underlying image, call [clone]. The method or object that receives
/// the new instance will then be responsible for disposing it, and the
/// underlying image itself will be disposed when all outstanding handles are
/// disposed.
///
/// If `dart:ui` passes an `Image` object and the recipient wishes to share
/// that handle with other callers, [clone] must be called _before_ [dispose].
/// A handle that has been disposed cannot create new handles anymore.
///
/// See also:
///
///  * [Image](https://api.flutter.dev/flutter/widgets/Image-class.html), the class in the [widgets] library.
///  * [ImageDescriptor], which allows reading information about the image and
///    creating a codec to decode it.
///  * [instantiateImageCodec], a utility method that wraps [ImageDescriptor].    
     / Ā/// A callback that is invoked to report an image creation.
///
/// It's preferred to use [MemoryAllocations] in flutter/foundation.dart
/// than to use [onCreate] directly because [MemoryAllocations]
/// allows multiple callbacks. с /// A callback that is invoked to report the image disposal.
///
/// It's preferred to use [MemoryAllocations] in flutter/foundation.dart
/// than to use [onDispose] directly because [MemoryAllocations]
/// allows multiple callbacks.     !
S!XA/// The number of image pixels along the image's horizontal axis. !bR!?/// The number of image pixels along the image's vertical axis. !! !$%/// Whether this reference to the underlying image is [dispose]d.
///
/// This only returns a valid value if asserts are enabled, and must not be
/// used otherwise.   *d,ˁ/// The color space that is used by the [Image]'s colors.
///
/// This value is a consequence of how the [Image] has been created.  For
/// example, loading a PNG that is in the Display P3 color space will result
/// in a [ColorSpace.extendedSRGB] image.
///
/// On rendering backends that don't support wide gamut colors (anything but
/// iOS impeller), wide gamut images will still report [ColorSpace.sRGB] if
/// rendering wide gamut colors isn't supported.   !Ӄ&#Ӂ/// Release this handle's claim on the underlying Image. This handle is no
/// longer usable after this method is called.
///
/// Once all outstanding handles have been disposed, the underlying image will
/// be disposed as well.
///
/// In debug mode, [debugGetOpenHandleStackTraces] will return a list of
/// [StackTrace] objects from all open handles' creation points. This is
/// useful when trying to determine what parts of the program are keeping an
/// image resident in memory.   &)/// Converts the [Image] object into a byte array.
///
/// The [format] argument specifies the format in which the bytes will be
/// returned.
///
/// Using [ImageByteFormat.rawRgba] on an image in the color space
/// [ColorSpace.extendedSRGB] will result in the gamut being squished to fit
/// into the sRGB gamut, resulting in the loss of wide-gamut colors.
///
/// Returns a future that completes with the binary image data or an error
/// if encoding fails. )0)  )))-.À/// If asserts are enabled, returns the [StackTrace]s of each open handle from
/// [clone], in creation order.
///
/// If asserts are disabled, this method always returns null.   /:ߊ/// Creates a disposable handle to this image.
///
/// Holders of an [Image] must dispose of the image when they no longer need
/// to access it or draw it. However, once the underlying image is disposed,
/// it is no longer possible to use it. If a holder of an image needs to share
/// access to that image with another object or method, [clone] creates a
/// duplicate handle. The underlying image will only be disposed once all
/// outstanding handles are disposed. This allows for safe sharing of image
/// references while still disposing of the underlying resources when all
/// consumers are finished.
///
/// It is safe to pass an [Image] handle to another object or method if the
/// current holder no longer needs it.
///
/// To check whether two [Image] references are referring to the same
/// underlying image memory, use [isCloneOf] rather than the equality operator
/// or [identical].
///
/// The following example demonstrates valid usage.
///
/// ```dart
/// import 'dart:async';
/// import 'dart:typed_data';
/// import 'dart:ui';
///
/// Future<Image> _loadImage(int width, int height) {
///   final Completer<Image> completer = Completer<Image>();
///   decodeImageFromPixels(
///     Uint8List.fromList(List<int>.filled(width * height * 4, 0xFF)),
///     width,
///     height,
///     PixelFormat.rgba8888,
///     // Don't worry about disposing or cloning this image - responsibility
///     // is transferred to the caller, and that is safe since this method
///     // will not touch it again.
///     (Image image) => completer.complete(image),
///   );
///   return completer.future;
/// }
///
/// Future<void> main() async {
///   final Image image = await _loadImage(5, 5);
///   // Make sure to clone the image, because MyHolder might dispose it
///   // and we need to access it again.
///   final MyImageHolder holder = MyImageHolder(image.clone());
///   final MyImageHolder holder2 = MyImageHolder(image.clone());
///   // Now we dispose it because we won't need it again.
///   image.dispose();
///
///   final PictureRecorder recorder = PictureRecorder();
///   final Canvas canvas = Canvas(recorder);
///
///   holder.draw(canvas);
///   holder.dispose();
///
///   canvas.translate(50, 50);
///   holder2.draw(canvas);
///   holder2.dispose();
/// }
///
/// class MyImageHolder {
///   MyImageHolder(this.image);
///
///   final Image image;
///
///   void draw(Canvas canvas) {
///     canvas.drawImage(image, Offset.zero, Paint());
///   }
///
///   void dispose() => image.dispose();
/// }
/// ```
///
/// The returned object behaves identically to this image. Calling
/// [dispose] on it will only dispose the underlying native resources if it
/// is the last remaining handle.   <>R/// Returns true if `other` is a [clone] of this and thus shares the same
/// underlying image memory, even if this or `other` is [dispose]d.
///
/// This method may return false for two images that were decoded from the
/// same underlying asset, if they are not sharing the same memory. For
/// example, if the same file is decoded using [instantiateImageCodec] twice,
/// or the same bytes are decoded using [decodeImageFromPixels] twice, there
/// will be two distinct [Image]s that render the same but do not share
/// underlying memory, and so will not be treated as clones of each other. >>   ?!3?4   ?!?" ?X?}  @'@@@  @@@@@DD D%Fe%Fv FFFFF@fA   @@@@@@@@@@@@@@@@@@@@AhAx   AAAA<A%AA-A;A.A5A:A6A=AcADA>AFA]AWA_FbF   FFFFFFFFFFFFFFFFFAA  A0A  AAAC(C9/// Returns an error message on failure, null on success. C
C  C+D  CdCeCkCCsClC{CC|CCCCCCCCCCD.dD3    EFVk/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).  FFFF*FFFF)FF#F(F$F+FDF2F,F4F9G   FF?X?Y?_?p?`H_N/// Information for a single frame of an animation.
///
/// To obtain an instance of the [FrameInfo] interface, see
/// [Codec.getNextFrame].
///
/// The recipient of an instance of this class is responsible for calling
/// [Image.dispose] on [image]. To share the image with other interested
/// parties, use [Image.clone]. If the [FrameInfo] object itself is passed to
/// another method or object, that method or object must assume it is
/// responsible for disposing the image when done, and the passer must not
/// access the [image] after that point.
///
/// For example, the following code sample is incorrect:
///
/// ```dart
/// /// BAD
/// Future<void> nextFrameRoutine(ui.Codec codec) async {
///   final ui.FrameInfo frameInfo = await codec.getNextFrame();
///   _cacheImage(frameInfo);
///   // ERROR - _cacheImage is now responsible for disposing the image, and
///   // the image may not be available any more for this drawing routine.
///   _drawImage(frameInfo);
///   // ERROR again - the previous methods might or might not have created
///   // handles to the image.
///   frameInfo.image.dispose();
/// }
/// ```
///
/// Correct usage is:
///
/// ```dart
/// /// GOOD
/// Future<void> nextFrameRoutine(ui.Codec codec) async {
///   final ui.FrameInfo frameInfo = await codec.getNextFrame();
///   _cacheImage(frameInfo.image.clone(), frameInfo.duration);
///   _drawImage(frameInfo.image.clone(), frameInfo.duration);
///   // This method is done with its handle, and has passed handles to its
///   // clients already.
///   // The image will live until those clients dispose of their handles, and
///   // this one must not be disposed since it will not be used again.
///   frameInfo.image.dispose();
/// }
/// ``` NOOOÀ/// This class is created by the engine, and should not be instantiated
/// or extended directly.
///
/// To obtain an instance of the [FrameInfo] interface, see
/// [Codec.getNextFrame].OO  OO   OP{/// The duration this frame should be shown.
///
/// A zero duration indicates that the frame should be shown indefinitely. PQj/// The [Image] object for this frame.
///
/// This object must be disposed by the recipient of this frame info.
///
/// To share this image with other interested parties, use [Image.clone].    Qt>Rb/// A handle to an image codec.
///
/// This class is created by the engine, and should not be instantiated
/// or extended directly.
///
/// To obtain an instance of the [Codec] interface, see
/// [instantiateImageCodec].   Rl9R#/// Number of frames in this image.   RS?/// Number of times to repeat the animation.
///
/// * 0 when the animation should be played once.
/// * -1 for infinity repetitions.   SSaT//// Fetches the next animation frame.
///
/// Wraps back to the first frame after returning the last frame.
///
/// The returned future can complete with an error if the decoding has failed.
///
/// The caller of this method is responsible for disposing the
/// [FrameInfo.image] on the returned object.   TU/// Release the resources used by this object. The object is no longer usable
/// after this method is called.
///
/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).    UU  VVVV   VV  WW  VDW   VVW?qW   W?W@WFWdWMWGWUWcWVW]WbW^WeWWlWfWnWWWWSW   WWX+{X   X+X,X2XPX9X3XAXOXBXIXNXJXQXXXXRXZXzXtX|XX   XX[\A9/// Returns an error message on failure, null on success. \O,\t  [[[\[[[\[[\[\\\,\\\\f\   \\\\\\\\\\\\\\\\\\\\j\   \\ |j}/// A specification of the size to which an image should be decoded.
///
/// See also:
///
///  * [TargetImageSizeCallback], a callback that returns instances of this
///    class when consulted by image decoding methods such as
///    [instantiateImageCodecWithSize]. }Ł# ~d~s/// Creates a new instance of this class.
///
/// The `width` and `height` may both be null, but if they're non-null, they
/// must be positive.~u
~{  ~~  ~~~~~~~~~~~~~~~~~~~~~́/// The width into which to load the image.
///
/// If this is non-null, the image will be decoded into the specified width.
/// If this is null and [height] is also null, the image will be decoded into
/// its intrinsic size. If this is null and [height] is non-null, the image
/// will be decoded into a width that maintains its intrinsic aspect ratio
/// while respecting the [height] value.
///
/// If this value is non-null, it must be positive. ց/// The height into which to load the image.
///
/// If this is non-null, the image will be decoded into the specified height.
/// If this is null and [width] is also null, the image will be decoded into
/// its intrinsic size. If this is null and [width] is non-null, the image
/// will be decoded into a height that maintains its intrinsic aspect ratio
/// while respecting the [width] value.
///
/// If this value is non-null, it must be positive.  E    P/// A handle for the framework to hold and retain an engine layer across frames.    &7/// Release the resources used by this object. The object is no longer usable
/// after this method is called.
///
/// EngineLayers indirectly retain platform specific graphics resources. Some
/// of these resources, such as images, may be memory intensive. It is
/// important to dispose of EngineLayer objects that will no longer be used as
/// soon as possible to avoid retaining these resources until the next
/// garbage collection.
///
/// Once this EngineLayer is disposed, it is no longer eligible for use as a
/// retained layer, and must not be passed as an `oldLayer` to any of the
/// [SceneBuilder] methods which accept that parameter.
///
/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).    EIP  }a/// This class is created by the engine, and should not be instantiated
/// or extended directly.     l    !,-3P94AOBINJQpXRZ X/// A complex, one-dimensional subset of a plane.
///
/// A path consists of a number of sub-paths, and a _current point_.
///
/// Sub-paths consist of segments of various types, such as lines,
/// arcs, or beziers. Sub-paths can be open or closed, and can
/// self-intersect.
///
/// Closed sub-paths enclose a (possibly discontiguous) region of the
/// plane based on the current [fillType].
///
/// The _current point_ is initially at the origin. After each
/// operation adding a segment to a sub-path, the current point is
/// updated to the end of that segment.
///
/// Paths can be drawn on canvases using [Canvas.drawPath], and can
/// used to create clip regions using [Canvas.clipPath].     i/// Creates a copy of another [Path].
///
/// This copy is fast and does not require additional memory unless either
/// the `source` path or the path returned by this constructor are modified.    /// Determines how the interior of this path is calculated.
///
/// Defaults to the non-zero winding rule, [PathFillType.nonZero].   &!*  3A   KU2/// Starts a new sub-path at the given coordinate.      rE/// Starts a new sub-path at the given offset from the current point.  	  	   tsO/// Adds a straight line segment from the current point to the given
/// point. z     t/// Adds a straight line segment from the current point to the point
/// at the given offset from the current point. 	'  *	2   9q/// Adds a quadratic bezier segment that curves from the current
/// point to the given point (x2,y2), using the control point
/// (x1,y1).
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_quadratic_to.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_quadratic_to_dark.png#gh-dark-mode-only) 	  	  	  	   '߀/// Adds a quadratic bezier segment that curves from the current
/// point to the point at the offset (x2,y2) from the current point,
/// using the control point at the offset (x1,y1) from the current
/// point. 	  	  	  	"   )r/// Adds a cubic bezier segment that curves from the current point
/// to the given point (x3,y3), using the control points (x1,y1) and
/// (x2,y2).
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_cubic_to.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_cubic_to_dark.png#gh-dark-mode-only) 	  	  	  	  	  	   =/// Adds a cubic bezier segment that curves from the current point
/// to the point at the offset (x3,y3) from the current point, using
/// the control points at the offsets (x1,y1) and (x2,y2) from the
/// current point. 	  		  	  	  "	*  -	5   <b`/// Adds a bezier segment that curves from the current point to the
/// given point (x2,y2), using the control points (x1,y1) and the
/// weight w. If the weight is greater than 1, then the curve is a
/// hyperbola; if the weight equals 1, it's a parabola; and if it is
/// less than 1, it is an ellipse.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_conic_to.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_conic_to_dark.png#gh-dark-mode-only) h	p  s	{  ~	  	     +w/// Adds a bezier segment that curves from the current point to the
/// point at the offset (x2,y2) from the current point, using the
/// control point at the offset (x1,y1) from the current point and
/// the weight w. If the weight is greater than 1, then the curve is
/// a hyperbola; if the weight equals 1, it's a parabola; and if it
/// is less than 1, it is an ellipse. ;	C  F	N  Q	Y  \	d  go   ur/// If the `forceMoveTo` argument is false, adds a straight line
/// segment and an arc segment.
///
/// If the `forceMoveTo` argument is true, starts a new sub-path
/// consisting of an arc segment.
///
/// In either case, the arc segment consists of the arc that follows
/// the edge of the oval bounded by the given rectangle, from
/// startAngle radians around the oval up to startAngle + sweepAngle
/// radians around the oval, with zero radians being the point on
/// the right hand side of the oval that crosses the horizontal line
/// that intersects the center of the rectangle and with positive
/// angles going clockwise around the oval.
///
/// The line segment added if `forceMoveTo` is false starts at the
/// current point and ends at the start of the arc. 	         !yp/// Appends up to four conic curves weighted to describe an oval of `radius`
/// and rotated by `rotation` (measured in degrees and clockwise).
///
/// The first curve begins from the last point in the path and the last ends
/// at `arcEnd`. The curves follow a path in a direction determined by
/// `clockwise` and `largeArc` in such a way that the sweep angle
/// is always less than 360 degrees.
///
/// A simple line is appended if either radii are zero or the last
/// point in the path is `arcEnd`. The radii are scaled to fit the last path
/// point if both are greater than zero but too small to describe an arc.
///           /// Appends up to four conic curves weighted to describe an oval of `radius`
/// and rotated by `rotation` (measured in degrees and clockwise).
///
/// The last path point is described by (px, py).
///
/// The first curve begins from the last point in the path and the last ends
/// at `arcEndDelta.dx + px` and `arcEndDelta.dy + py`. The curves follow a
/// path in a direction determined by `clockwise` and `largeArc`
/// in such a way that the sweep angle is always less than 360 degrees.
///
/// A simple line is appended if either radii are zero, or, both
/// `arcEndDelta.dx` and `arcEndDelta.dy` are zero. The radii are scaled to
/// fit the last path point if both are greater than zero but too small to
/// describe an arc. &.  @H  ai  |    PVWsvY/// Adds a new sub-path that consists of four lines that outline the
/// given rectangle. "	(   1.L/// Adds a new sub-path that consists of a curve that forms the
/// ellipse that fills the given rectangle.
///
/// To add a circle, pass an appropriate rectangle as `oval`. [Rect.fromCircle]
/// can be used to easily describe the circle's center [Offset] and radius. T	Z   c/// Adds a new sub-path with one arc segment that consists of the arc
/// that follows the edge of the oval bounded by the given
/// rectangle, from startAngle radians around the oval up to
/// startAngle + sweepAngle radians around the oval, with zero
/// radians being the point on the right hand side of the oval that
/// crosses the horizontal line that intersects the center of the
/// rectangle and with positive angles going clockwise around the
/// oval.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_add_arc.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_add_arc_dark.png#gh-dark-mode-only)
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_add_arc_ccw.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/path_add_arc_ccw_dark.png#gh-dark-mode-only) 	   %-  8@   OX{/// Adds a new sub-path with a sequence of line segments that connect the given
/// points.
///
/// If `close` is true, a final line segment will be added that connects the
/// last point to the first point.
///
/// The `points` argument is interpreted as offsets from the origin. ĆĔ  ě
ġ   īE/// Adds a new sub-path that consists of the straight lines and
/// curves needed to form the rounded rectangle described by the
/// argument. NU   _Qv/// Adds the sub-paths of `path`, offset by `offset`, to this path.
///
/// If `matrix4` is specified, the path will be transformed by this matrix
/// after the matrix is translated by the given offset. The matrix is a 4x4
/// matrix stored in column major order. ~	Ƅ  ƉƑ  ƙƧ   ƴJ/// Adds the sub-paths of `path`, offset by `offset`, to this path.
/// The current sub-path is extended with the first sub-path
/// of `path`, connecting them with a lineTo if necessary.
///
/// If `matrix4` is specified, the path will be transformed by this matrix
/// after the matrix is translated by the given `offset`.  The matrix is a 4x4
/// matrix stored in column major order. Y	_  dl  tȂ   ȏ/// Closes the last sub-path, as if a straight line had been drawn
/// from the current point to the first point of the sub-path.   &π/// Clears the [Path] object of all sub-paths, returning it to the
/// same state it had when it was created. The _current point_ is
/// reset to the origin.   ہq5F/// Tests to see if the given point is within the path. (That is, whether the
/// point would be in the visible portion of the path if the path was used
/// with [Canvas.clipPath].)
///
/// The `point` argument is interpreted as an offset from the origin.
///
/// Returns true if the point is in the path, and false otherwise. >F   Pj/// Returns a copy of the path with all the segments of every
/// sub-path translated by the given offset.    ݀Rk/// Returns a copy of the path with all the segments of every
/// sub-path transformed by the given matrix. \i   u0/// Computes the bounding rectangle for this path.
///
/// A path containing only axis-aligned points on the same straight line will
/// have no area, and therefore `Rect.isEmpty` will return true for such a
/// path. Consider checking `rect.width + rect.height > 0.0` instead, or
/// using the [computeMetrics] API to check the path length.
///
/// For many more elaborate paths, the bounds may be inaccurate.  For example,
/// when a path contains a circle, the points used to compute the bounds are
/// the circle's implied control points, which form a square around the circle;
/// if the circle has a transformation applied using [transform] then that
/// square is rotated, and the (axis-aligned, non-rotated) bounding box
/// therefore ends up grossly overestimating the actual area covered by the
/// circle.   e0/// Combines the two paths according to the manner specified by the given
/// `operation`.
///
/// The resulting path will be constructed from non-overlapping contours. The
/// curve order is reduced where possible so that cubics may be turned into
/// quadratics, and quadratics maybe turned into lines. m|  ц
ь  ђ
ј   ҴLM/// Creates a [PathMetrics] object for this path, which can describe various
/// properties about the contours of the path.
///
/// A [Path] is made up of zero or more contours. A contour is made up of
/// connected curves and segments, created via methods like [lineTo],
/// [cubicTo], [arcTo], [quadraticBezierTo], their relative counterparts, as
/// well as the add* methods such as [addRect]. Creating a new [Path] starts
/// a new contour once it has any drawing instructions, and another new
/// contour is started for each [moveTo] instruction.
///
/// A [PathMetric] object describes properties of an individual contour,
/// such as its length, whether it is closed, what the tangent vector of a
/// particular offset along the path is. It also provides a method for
/// creating sub-paths: [PathMetric.extractPath].
///
/// Calculating [PathMetric] objects is not trivial. The [PathMetrics] object
/// returned by this method is a lazy [Iterable], meaning it only performs
/// calculations when the iterator is moved to the next [PathMetric]. Callers
/// that wish to memoize this iterable can easily do so by using
/// [Iterable.toList] on the result of this method. In particular, callers
/// looking for information about how many contours are in the path should
/// either store the result of `path.computeMetrics().length`, or should use
/// `path.computeMetrics().toList()` so they can repeatedly check the length,
/// since calling `Iterable.length` causes traversal of the entire iterable.
///
/// In particular, callers should be aware that [PathMetrics.length] is the
/// number of contours, **not the length of the path**. To get the length of
/// a contour in a path, use [PathMetric.length].
///
/// If `forceClosed` is set to true, the contours of the path will be measured
/// as if they had been closed, even if they were not explicitly closed. \b  o {qچ  I %/// Create a new empty [Path] object.  ۲۳۴/// Avoids creating a new native backing for the path for methods that will
/// create it later, such as [Path.from], [shift] and [transform].   ܃Mܠ   ܃܄J    1ۻV   ۻۼjj  qw  A"*@+273:BXICK!p݂   !"(F/)7E8?D@GqNHPkemݕ  
  ݕݖݜݢݝݪݿݫݲݷݳݺޔ  ޛޣ  ޥޭ  ()/\50=[>EJFMU]ނd^f|v~޳3  B	J  M	U  ޳޴޿!\      \]hioߜup}ߛ~߅ߊ߆ߍߕߝߤߞߦ߼߶߾s  	  	   32!$,4a;5=[U]/  A	I  L	T  W	_  b	j  q  -	5  8	@  C	K  N	V  qr}~]  	  		  	  	'  *	2  5	=  ]^ijpvq~D  	   	  	  	  !	)  ,	4  DEPQW]Xefmrnu};  	  	  	  	     ;<GHNTO\]dielt|  	  	  	  	    !f'"/e07<8?GOW_gnhp  	    &.  9?  Ӂd  ks  x
            -,  (.R5/7LFN݁K      /7  JP  ek  $%AZv,            #  ,2  ,-394ABINJQYaiqy@tQ  iq          @A븁KT  ow              B<6>   	&  퐀    #
+  /7  =E  Pa  i	o  PQـV  _g  l
t  x    D%'>8@  	      ^    
    !  (0  ;C  ^_ekfst{|Rtc  n|  
  RSʀ*  6C  J
P  Zrk  t{  Z[u(  2?  ISZ  b	h  mu  }  IJ  $  )	1  4	<  C    	  	    CDJPKXY`eahotpwg'  6	<  AI  Q_      	   #	+  2    	  	    239}?:G|HOTPW^c_fnv~ox   $A*%2@3:?;BfICK`Zbo   z    u      uv|}  $  $  +1  9	A  D	L  Sd  n{  STq  |    D%C&-2.5=E_LFN   2g   239XA:IWJQVRYs`Zb    #0  6;  Iua  qw  IJ(    D /// The geometric description of a tangent: the angle at a point.
///
/// See also:
///  * [PathMetric.getTangentForOffset], which returns the tangent of an offset along a path.   V/// Creates a [Tangent] with the given values.
///
/// The arguments must not be null.#  ,2   =R"/// Creates a [Tangent] based on the angle rather than the vector.
///
/// The [vector] is computed to be the unit vector at the given angle, interpreted
/// as clockwise radians from the x axis.#+  4<   \/// Position of the tangent.
///
/// When used with [PathMetric.getTangentForOffset], this represents the precise
/// position that the given offset along the path corresponds to. ih/// The vector of the curve at [position].
///
/// When used with [PathMetric.getTangentForOffset], this is the vector of the
/// curve that is at the given offset along the path (i.e. the direction of the
/// curve at [position]). s+/// The direction of the curve at [position].
///
/// When used with [PathMetric.getTangentForOffset], this is the angle of the
/// curve that is the given offset along the path (i.e. the direction of the
/// curve at [position]).
///
/// This value is in radians, with 0.0 meaning pointing along the x axis in
/// the positive x-axis direction, positive numbers pointing downward toward
/// the negative y-axis, i.e. in a clockwise direction, and negative numbers
/// pointing upward toward the positive y-axis, i.e. in a counter-clockwise
/// direction.     4#
//// An iterable collection of [PathMetric] objects describing a [Path].
///
/// A [PathMetrics] object is created by using the [Path.computeMetrics] method,
/// and represents the path as it stood at the time of the call. Subsequent
/// modifications of the path do not affect the [PathMetrics] object.
///
/// Each path metric corresponds to a segment, or contour, of a path.
///
/// For example, a path consisting of a [Path.lineTo], a [Path.moveTo], and
/// another [Path.lineTo] will contain two contours and thus be represented by
/// two [PathMetric] objects.
///
/// This iterable does not memoize. Callers who need to traverse the list
/// multiple times, or who need to randomly access elements of the list, should
/// use [toList] on this object. 
k
v
w
x 
y	
  

  














$  ;?     Yh/// Used by [PathMetrics] to track iteration from one segment of a path to the
/// next for measurement. (    +7  FY  j   jk^o   ^_ k/// Utilities for measuring a [Path] and extracting sub-paths.
///
/// Iterate over the object returned by [Path.computeMetrics] to obtain
/// [PathMetric] objects. Callers that want to randomly access elements or
/// iterate multiple times should use `path.computeMetrics().toList()`, since
/// [PathMetrics] does not memoize.
///
/// Once created, the metrics are only valid for the path as it was specified
/// when [Path.computeMetrics] was called. If additional contours are added or
/// any contours are updated, the metrics need to be recomputed. Previously
/// created metrics will still refer to a snapshot of the path at the time they
/// were computed, rather than to the actual metrics for the new mutations to
/// the path.   &  <5>FGMjNVW|s~+
/// Return the total length of the current contour.
///
/// The length may be calculated from an approximation of the geometry
/// originally added. For this reason, it is not recommended to rely on
/// this property for mathematically correct lengths of common shapes. AN!/// Whether the contour is closed.
///
/// Returns true if the contour ends with a call to [Path.close] (which may
/// have been implied when using methods like [Path.addRect]) or if
/// `forceClosed` was specified as true in the call to [Path.computeMetrics].
/// Returns false otherwise. [//// The zero-based index of the contour.
///
/// [Path] objects are made up of zero or more contours. The first contour is
/// created once a drawing command (e.g. [Path.lineTo]) is issued. A
/// [Path.moveTo] command after a drawing command may create a new contour,
/// although it may not if optimizations are applied that determine the move
/// command did not actually result in moving the pen.
///
/// This property is only valid with reference to its original iterator and
/// the contours of the path at the time the path's metrics were computed. If
/// additional contours were added or existing contours updated, this metric
/// will be invalid for the current state of the path. @S   `,!/// Computes the position of the current contour at the given offset, and the
/// angle of the path at that point.
///
/// For example, calling this method with a distance of 1.41 for a line from
/// 0.0,0.0 to 2.0,2.0 would give a point 1.0,1.0 and the angle 45 degrees
/// (but in radians).
///
/// Returns null if the contour has zero [length].
///
/// The distance is clamped to the [length] of the current contour. 5=   j/// Given a start and end distance, return the intervening segment(s).
///
/// `start` and `end` are clamped to legal values (0..[length])
/// Begin the segment with a moveTo if `startWithMoveTo` is true. v~  
    q(      [       &'/// The index of the current contour in the list of contours in the path.
///
/// [nextContour] will increment this to the zero based index.'' &      &'-X3.;W<DKPLSYw`Zb           +0  =E    !o  !z!  !!   ! !!4!!!!3!!!#!!&!-!5!V!<!6!>!_!  !!  !!  !
!  !!  !##  ##  ##  ##  #
#  ##  ####M####L##!#&#"#)#1#8#@#H#N#p#U#O#W#〿#  ##   $%  %%!  $$$$$$$$$$$$$$% $$$$$$%ဉ%    &n}&   &n&o&u&&{&v&&&&&&&&&&&&&& + |,*/// A mask filter to apply to shapes as they are painted. A mask filter is a
/// function that takes a bitmap of color pixels, and returns another bitmap of
/// color pixels.
///
/// Instances of this class are used with [Paint.maskFilter] on [Paint] objects. ,9////// Creates a mask filter that takes the shape being drawn and blurs it.
///
/// This is commonly used to approximate shadows.
///
/// The `style` argument controls the kind of effect to draw; see [BlurStyle].
///
/// The `sigma` argument controls the size of the effect. It is the standard
/// deviation of the Gaussian blur to apply. The value must be greater than
/// zero. The sigma corresponds to very roughly half the radius of the effect
/// in pixels.
///
/// A blur is an expensive operation and should therefore be used sparingly.
///
/// The arguments must not be null.
///
/// See also:
///
///  * [Canvas.drawShadow], which is a more efficient way to draw shadows.//$  ///5   /D/T  /^/k  00 0$010B 0N1 <1   1 10h0  00  0h0i1@Z1S   1@1A 1_1     1:1  11  11    12  2<  2B2Y     2_O2q  2{2  22  2_2` 22  2" 22 2
3   33"   3+(3=  3G3N  3T3`  3+3, 4W4]  4: 44/// Row-major.44   44   4 5   5
5  55#  44 @kBN/// A description of a color filter to apply when drawing a shape or compositing
/// a layer with a particular [Paint]. A color filter is a function that takes
/// two colors, and outputs one color. When applied during compositing, it is
/// independently applied to each pixel of the layer being drawn before the
/// entire layer is merged with the destination.
///
/// Instances of this class are used with [Paint.colorFilter] on [Paint]
/// objects. BuXD:D;D?/// Creates a color filter that applies the blend mode given as the second
/// argument. The source color is the one given as the first argument, and the
/// destination color is the one from the layer being composited.
///
/// The output of this filter is then composited into the background according
/// to the [Paint.blendMode], using the output of this filter as the source
/// and the background as the destination.D@DG  DMDX  DqDjDsDDDDDDDDDDшELLL7/// Construct a color filter from a 4x5 row-major matrix. The matrix is
/// interpreted as a 5x5 matrix, where the fifth row is the identity
/// configuration.
///
/// Every pixel's color value, represented as an `[R, G, B, A]`, is matrix
/// multiplied to create a new color:
///
///     | R' |   | a00 a01 a02 a03 a04 |   | R |
///     | G' |   | a10 a11 a12 a13 a14 |   | G |
///     | B' | = | a20 a21 a22 a23 a24 | * | B |
///     | A' |   | a30 a31 a32 a33 a34 |   | A |
///     | 1  |   |  0   0   0   0   1  |   | 1 |
///
/// The matrix is in row-major order and the translation column is specified
/// in unnormalized, 0...255, space. For example, the identity matrix is:
///
/// ```dart
/// const ColorFilter identity = ColorFilter.matrix(<double>[
///   1, 0, 0, 0, 0,
///   0, 1, 0, 0, 0,
///   0, 0, 1, 0, 0,
///   0, 0, 0, 1, 0,
/// ]);
/// ```
///
/// ## Examples
///
/// An inversion color matrix:
///
/// ```dart
/// const ColorFilter invert = ColorFilter.matrix(<double>[
///   -1,  0,  0, 0, 255,
///    0, -1,  0, 0, 255,
///    0,  0, -1, 0, 255,
///    0,  0,  0, 1,   0,
/// ]);
/// ```
///
/// A sepia-toned color matrix (values based on the [Filter Effects Spec](https://www.w3.org/TR/filter-effects-1/#sepiaEquivalent)):
///
/// ```dart
/// const ColorFilter sepia = ColorFilter.matrix(<double>[
///   0.393, 0.769, 0.189, 0, 0,
///   0.349, 0.686, 0.168, 0, 0,
///   0.272, 0.534, 0.131, 0, 0,
///   0,     0,     0,     1, 0,
/// ]);
/// ```
///
/// A greyscale color filter (values based on the [Filter Effects Spec](https://www.w3.org/TR/filter-effects-1/#grayscaleEquivalent)):
///
/// ```dart
/// const ColorFilter greyscale = ColorFilter.matrix(<double>[
///   0.2126, 0.7152, 0.0722, 0, 0,
///   0.2126, 0.7152, 0.0722, 0, 0,
///   0.2126, 0.7152, 0.0722, 0, 0,
///   0,      0,      0,      1, 0,
/// ]);
/// ```LL  LLLLLLLLLMMM	MMMMW/// Construct a color filter that applies the sRGB gamma curve to the RGB
/// channels. MMMMMMMMMMMMNNNNd/// Creates a color filter that applies the inverse of the sRGB gamma curve
/// to the RGB channels. NNNNNNNNNNNNOO$  O.O?  OMOa  OlOv  OO OO!O PP),P: PTPr,P PUGU[   UGUHUV   UUPVP   PPQ.Q<    T	:T#  T&T.  T	T
W-W   WW Yj[@D/// A [ColorFilter] that is backed by a native DlColorFilter.
///
/// This is a private class, rather than being the implementation of the public
/// ColorFilter, because we want ColorFilter to be const constructible and
/// efficiently comparable, so that widgets can check for ColorFilter equality to
/// avoid repainting. [r[~[[ [[  
[[[[[[[[[[\&\2\3\9 \:\@  
\N\T\~\c\U\\\]\f\q\r\Հ\\\ \\  
]]]C]]]]] ]+],]]]] ]]  
]]]]]]]]]]^+^o/// The original Dart object that created the native wrapper, which retains
/// the values used for the filter.  ^]_   ^^^^^^^^^^^^^^__  _	_  __  ___%_P_+_&_3_O_4_;_@_<_C_J_Q__X_R_Z_y_s_{_`  `&`3  ______________`___`>`   `>`?`E`b`K`F`S`a`T`[```\`c``j`d`l````̀a?   `````````````a-```a'a!a) a\c>/// A filter operation to apply to a raster image.
///
/// See also:
///
///  * [BackdropFilter], a widget that applies [ImageFilter] to its rendering.
///  * [ImageFiltered], a widget that applies [ImageFilter] to its children.
///  * [SceneBuilder.pushBackdropFilter], which is the low-level API for using
///    this class as a backdrop filter.
///  * [SceneBuilder.pushImageFilter], which is the low-level API for using
///    this class as a child layer filter. cccc   c΁ddd"9/// Creates an image filter that applies a Gaussian blur.d%d-  d:dB  dO"dY  d5dJdcdkdldׁ&e}e~e/// Creates an image filter that dilates each input pixel's channel values
/// to the max value within the given radii along the x and y axes.ee  ee  eef'fff/// Create a filter that erodes each input pixel's channel values
/// to the minimum channel value within the given radii along the x and y axes.ff  ff  ffg,*h!h"h(/// Creates an image filter that applies a matrix transformation.
///
/// For example, applying a positive scale matrix (see [Matrix4.diagonal3])
/// when used with [BackdropFilter] would magnify the background image.h)h6  hU2hd  hshhiZjhjijp/// Composes the `inner` filter with `outer`, to combine their effects.
///
/// Creates a single [ImageFilter] that when applied, has the same effect as
/// subsequently applying `inner` and `outer`, i.e.,
/// result = outer(inner(source)).jsj  jj    lElP    k$k     lf`ll  lH ll ll  ll   ll  m !m  mG@m_ mnmzm{mmmmLm   mmosQo   osotm@m   mmnMn2   nn npn  nn  npnq oȄto  pa pp pp,  p3pB  pIpX   pgpt  p~p  pp  p>p ppppqpqKqV    r=SrT   r=r>s<t   ssq@q    qqrTr   rrrs  s	s  rr t>tD  trE trt tt  tt   tt  tt  t@u uuu u&u+u'utIu   utuuv>w   vvu0@uI   u0u1uJu   uuvv)  v,v4  vv w;wA  wnD wnw ww  ww   ww  ww  w?w xxxx x%x!xnHx   xnxoy>z   yyx*@xC   x*x+xIx   xxyy!  y$y,  yy z2z8  zgN zgzz z}z  zz   zz  zz  {B{. {={I{J{R{W{S{s{   {{}uF}   }u}v{\@{u   {\{]|_|*   |||z|  ||  |z|{ }~)/// An [ImageFilter] that is backed by a native DlImageFilter.
///
/// This is a private class, rather than being the implementation of the public
/// ImageFilter, because we want ImageFilter to be efficiently comparable, so that
/// widgets can check for ImageFilter equality to avoid repainting. &nos9/// Creates an image filter that applies a Gaussian blur.t  #/// Creates an image filter that dilates each input pixel's channel values
/// to the max value within the given radii along the x and y axes.  4#܀/// Create a filter that erodes each input pixel's channel values
/// to the minimum channel value within the given radii along the x and y axes.  [IJP/// Creates an image filter that applies a matrix transformation.
///
/// For example, applying a positive scale matrix (see [Matrix4.diagonal3])
/// when used with [BackdropFilter] would magnify the background image.Qe  zr|B//// Converts a color filter to an image filter.  Cz0/// Composes `_innerFilter` with `_outerFilter`.  %o/// The original Dart object that created the native wrapper, which retains
/// the values used for the filter.  ]   "        "#)]/*7\8?D@GOW^e_g؀O  [c  ks  =719       $      $%+W1,9V:AFBIQXz_YaĀ1  BQ  a      abhnivw~ =z/// Base class for objects such as [Gradient] and [ImageShader] which
/// correspond to shaders as used by [Paint.shader]. ia/// This class is created by the engine, and should not be instantiated
/// or extended directly.  y/// Whether [dispose] has been called.
///
/// This must only be used when asserts are enabled. Otherwise, it will throw.   5(/// Release the resources used by this object. The object is no longer usable
/// after this method is called.
///
/// The underlying memory allocated by this object will be retained beyond
/// this call if it is still needed by another object that has not been
/// disposed. For example, a [Picture] that has not been disposed that
/// refers to an [ImageShader] may keep its underlying resources alive.
///
/// Classes that override this method must call `super.dispose()`.    <!@/// A shader (as used by [Paint.shader]) that renders a color gradient.
///
/// There are several types of gradients, represented by the various constructors
/// on this class.
///
/// See also:
///
///  * [Gradient](https://api.flutter.dev/flutter/painting/Gradient-class.html), the class in the [painting] library.
/// /// Creates a linear gradient from `from` to `to`.
///
/// If `colorStops` is provided, `colorStops[i]` is a number from 0.0 to 1.0
/// that specifies where `color[i]` begins in the gradient. If `colorStops` is
/// not provided, then only two stops, at 0.0 and 1.0, are implied (and
/// `color` must therefore only have two entries). Stop values less than 0.0
/// will be rounded up to 0.0 and stop values greater than 1.0 will be rounded
/// down to 1.0. Each stop value must be greater than or equal to the previous
/// stop value. Stop values that do not meet this criteria will be rounded up
/// to the previous stop value.
///
/// The behavior before `from` and after `to` is described by the `tileMode`
/// argument. For details, see the [TileMode] enum.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_clamp_linear.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_decal_linear.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_mirror_linear.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_repeated_linear.png)
///
/// If `from`, `to`, `colors`, or `tileMode` are null, or if `colors` or
/// `colorStops` contain null values, this constructor will throw a
/// [NoSuchMethodError].
///
/// If `matrix4` is provided, the gradient fill will be transformed by the
/// specified 4x4 matrix relative to the local coordinate system. `matrix4` must
/// be a column-major matrix packed into a list of 16 values.  	  '  4C  R"\  z  !fno	 !"#/// Creates a radial gradient centered at `center` that ends at `radius`
/// distance from the center.
///
/// If `colorStops` is provided, `colorStops[i]` is a number from 0.0 to 1.0
/// that specifies where `color[i]` begins in the gradient. If `colorStops` is
/// not provided, then only two stops, at 0.0 and 1.0, are implied (and
/// `color` must therefore only have two entries). Stop values less than 0.0
/// will be rounded up to 0.0 and stop values greater than 1.0 will be rounded
/// down to 1.0. Each stop value must be greater than or equal to the previous
/// stop value. Stop values that do not meet this criteria will be rounded up
/// to the previous stop value.
///
/// The behavior before and after the radius is described by the `tileMode`
/// argument. For details, see the [TileMode] enum.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_clamp_radial.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_decal_radial.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_mirror_radial.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_repeated_radial.png)
///
/// If `center`, `radius`, `colors`, or `tileMode` are null, or if `colors` or
/// `colorStops` contain null values, this constructor will throw a
/// [NoSuchMethodError].
///
/// If `matrix4` is provided, the gradient fill will be transformed by the
/// specified 4x4 matrix relative to the local coordinate system. `matrix4` must
/// be a column-major matrix packed into a list of 16 values.
///
/// If `focal` is provided and not equal to `center` and `focalRadius` is
/// provided and not equal to 0.0, the generated shader will be a two point
/// conical radial gradient, with `focal` being the center of the focal
/// circle and `focalRadius` being the radius of that circle. If `focal` is
/// provided and not equal to `center`, at least one of the two offsets must
/// not be equal to [Offset.zero].  	  )  6E  T"^  |      hpq"
!,1234O56;3/// Creates a sweep gradient centered at `center` that starts at `startAngle`
/// and ends at `endAngle`.
///
/// `startAngle` and `endAngle` should be provided in radians, with zero
/// radians being the horizontal line to the right of the `center` and with
/// positive angles going clockwise around the `center`.
///
/// If `colorStops` is provided, `colorStops[i]` is a number from 0.0 to 1.0
/// that specifies where `color[i]` begins in the gradient. If `colorStops` is
/// not provided, then only two stops, at 0.0 and 1.0, are implied (and
/// `color` must therefore only have two entries). Stop values less than 0.0
/// will be rounded up to 0.0 and stop values greater than 1.0 will be rounded
/// down to 1.0. Each stop value must be greater than or equal to the previous
/// stop value. Stop values that do not meet this criteria will be rounded up
/// to the previous stop value.
///
/// The behavior before `startAngle` and after `endAngle` is described by the
/// `tileMode` argument. For details, see the [TileMode] enum.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_clamp_sweep.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_decal_sweep.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_mirror_sweep.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_repeated_sweep.png)
///
/// If `center`, `colors`, `tileMode`, `startAngle`, or `endAngle` are null,
/// or if `colors` or `colorStops` contain null values, this constructor will
/// throw a [NoSuchMethodError].
///
/// If `matrix4` is provided, the gradient fill will be transformed by the
/// specified 4x4 matrix relative to the local coordinate system. `matrix4` must
/// be a column-major matrix packed into a list of 16 values.AI  Ta  n}  Ȍ"Ȗ  ȴȼ      &ȠȨȩ.&-'8>TJ?L^dɐumepxɇɏɈɚɟɠɡɢ  ZY   +#*$,G3-5l       .  9>  GU  lms˷ytˁ˶˂ˉˎˊˑ˙ˡ˩˰˸˿˹aD  	    -5  BO  \j  {̀  ͏͝  abh̼niv̻w~̃̆̎̖̞̦̮̵̽̾ͩW  
ks  ΀Έ  ΕΝ  ίη          &+  :H  ͩͪͰͶͱ;ͿE$&Td      &3  @N  _d  s{  ЌД  Уб  TU[Ϸa\i϶jqvryρωϑϙϠϨϰϸϿϹм        _lҨ=/// A shader (as used by [Paint.shader]) that tiles an image. ǅ ֋֖/// Creates an image-tiling shader.
///
/// The first argument specifies the image to render. The
/// [decodeImageFromList] function can be used to decode an image from bytes
/// into the form expected here. (In production code, starting from
/// [instantiateImageCodec] may be preferable.)
///
/// The second and third arguments specify the [TileMode] for the x direction
/// and y direction respectively. [TileMode.repeated] can be used for tiling
/// images.
///
/// The fourth argument gives the matrix to apply to the effect. The
/// expression `Matrix4.identity().storage` creates a [Float64List]
/// prepopulated with the identity matrix.
///
/// All the arguments are required and must not be null, except for
/// [filterQuality]. If [filterQuality] is not specified at construction time
/// it will be deduced from the environment where it is used, such as from
/// [Paint.filterQuality].֗֞  ֤֮  ֲּ      opvևw
&+,-.  aEr   abت]   تثرطزؿ٘  ٧ٯ  ٵٺ  پ      ]"\#*/+29>:AHOV^كe_gھk/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).  hioڌup}ڋ~څڊچڍڬڔڎږ ͌e۵/// An instance of [FragmentProgram] creates [Shader] objects (as used by
/// [Paint.shader]).
///
/// For more information, see the website
/// [documentation]( https://docs.flutter.dev/development/ui/advanced/shaders). !  !)    ] =>$<%-C7h CDJ[K2  &+ށ//// Creates a fragment program from the asset with key [assetKey].
///
/// The asset must be a file produced as the output of the `impellerc`
/// compiler. The constructed object should then be reused via the
/// [fragmentShader] method to create [Shader] objects that can be used by
/// [Paint.shader]. ދޓ   DP  dl   a       "I*#2H3:?;BJsQKS櫀1/// Returns a fresh instance of [FragmentShader].    4%/// A [Shader] generated from a [FragmentProgram].
///
/// Instances of this class can be obtained from the
/// [FragmentProgram.fragmentShader] method. The float uniforms list is
/// initialized to the size expected by the shader and is zero-filled. Uniforms
/// of float type can then be set by calling [setFloat]. Sampler uniforms are
/// set by calling [setImageSampler].
///
/// A [FragmentShader] can be re-used, and this is an efficient way to avoid
/// allocating and re-initializing the uniform buffer and samplers. However,
/// if two [FragmentShader] objects with different float uniforms or samplers
/// are required to exist simultaneously, they must be obtained from two
/// different calls to [FragmentProgram.fragmentShader]. GUVW Xi  s|  &4  C<\ q|~}(  밇򨆜/// Sets the float uniform at [index] to [value].
///
/// All uniforms defined in a fragment shader that are not samplers must be
/// set through this method. This includes floats and vec2, vec3, and vec4.
/// The correct index for each uniform is determined by the order of the
/// uniforms as defined in the fragment program, ignoring any samplers. For
/// data types that are composed of multiple floats such as a vec4, more than
/// one call to [setFloat] is required.
///
/// For example, given the following uniforms in a fragment program:
///
/// ```glsl
/// uniform float uScale;
/// uniform sampler2D uTexture;
/// uniform vec2 uMagnitude;
/// uniform vec4 uColor;
/// ```
///
/// Then the corresponding Dart code to correctly initialize these uniforms
/// is:
///
/// ```dart
/// void updateShader(ui.FragmentShader shader, Color color, ui.Image image) {
///   shader.setFloat(0, 23);  // uScale
///   shader.setFloat(1, 114); // uMagnitude x
///   shader.setFloat(2, 83);  // uMagnitude y
///
///   // Convert color to premultiplied opacity.
///   shader.setFloat(3, color.red / 255 * color.opacity);   // uColor r
///   shader.setFloat(4, color.green / 255 * color.opacity); // uColor g
///   shader.setFloat(5, color.blue / 255 * color.opacity);  // uColor b
///   shader.setFloat(6, color.opacity);                     // uColor a
///
///   // initialize sampler uniform.
///   shader.setImageSampler(0, image);
/// }
/// ```
///
/// Note how the indexes used does not count the `sampler2D` uniform. This
/// uniform will be set separately with [setImageSampler], with the index starting
/// over at 0.
///
/// Any float uniforms that are left uninitialized will default to `0`. 	     D</// Sets the sampler uniform at [index] to [image].
///
/// The index provided to setImageSampler is the index of the sampler uniform defined
/// in the fragment program, excluding all non-sampler uniforms.
///
/// All the sampler uniforms that a shader expects must be provided or the
/// results will be undefined. 	     Ds/// Releases the native resources held by the [FragmentShader].
///
/// After this method is called, calling methods on the shader, or attaching
/// it to a [Paint] object will fail with an exception. Calling [dispose]
/// twice will also result in an exception being thrown.  bc̀G  Te  mr    .  #	(  .6   B~   BCIfOJWeX_d`gnhpl%    sn/// A set of vertex data used by [Canvas.drawVertices].
///
/// Vertex data consists of a series of points in the canvas coordinate space.
/// Based on the [VertexMode], these points are interpreted either as
/// independent triangles ([VertexMode.triangles]), as a sliding window of
/// points forming a chain of triangles each sharing one side with the next
/// ([VertexMode.triangleStrip]), or as a fan of triangles with a single shared
/// point ([VertexMode.triangleFan]).
///
/// Each point can be associated with a color. Each triangle is painted as a
/// gradient that blends between the three colors at the three points of that
/// triangle. If no colors are specified, transparent black is assumed for all
/// the points.
///
/// These colors are then blended with the [Paint] specified in the call to
/// [Canvas.drawVertices]. This paint is either a solid color ([Paint.color]),
/// or a bitmap, specified using a shader ([Paint.shader]), typically either a
/// gradient ([Gradient]) or image ([ImageFilter]). The bitmap uses the same
/// coordinate space as the canvas (in the case of an [ImageFilter], this is
/// notably different than the coordinate space of the source image; the source
/// image is tiled according to the filter's configuration, and the image that
/// is sampled when painting the triangles is the infinite one after all the
/// repeating is applied.)
///
/// Each point in the [Vertices] is associated with a specific point on this
/// image. Each triangle is painted by sampling points from this image by
/// interpolating between the three points of the image corresponding to the
/// three points of the triangle.
///
/// The [Vertices.new] constructor configures all this using lists of [Offset]
/// and [Color] objects. The [Vertices.raw] constructor instead uses
/// [Float32List], [Int32List], and [Uint16List] objects, which more closely
/// corresponds to the data format used internally and therefore reduces some of
/// the conversion overhead. The raw constructor is useful if the data is coming
/// from another source (e.g. a file) and can therefore be parsed directly into
/// the underlying representation. , /// Creates a set of vertex data for use with [Canvas.drawVertices].
///
/// The `mode` parameter describes how the points should be interpreted: as
/// independent triangles ([VertexMode.triangles]), as a sliding window of
/// points forming a chain of triangles each sharing one side with the next
/// ([VertexMode.triangleStrip]), or as a fan of triangles with a single
/// shared point ([VertexMode.triangleFan]).
///
/// The `positions` parameter provides the points in the canvas space that
/// will be use to draw the triangles.
///
/// The `colors` parameter, if specified, provides the color for each point in
/// `positions`. Each triangle is painted as a gradient that blends between
/// the three colors at the three points of that triangle. (These colors are
/// then blended with the [Paint] specified in the call to
/// [Canvas.drawVertices].)
///
/// The `textureCoordinates` parameter, if specified, provides the points in
/// the [Paint] image to sample for the corresponding points in `positions`.
///
/// If the `colors` or `textureCoordinates` parameters are specified, they must
/// be the same length as `positions`.
///
/// The `indices` parameter specifies the order in which the points should be
/// painted. If it is omitted (or present but empty), the points are processed
/// in the order they are given in `positions`, as if the `indices` was a list
/// from 0 to n-1, where _n_ is the number of entries in `positions`. The
/// `indices` parameter, if present and non-empty, must have at least three
/// entries, but may be of any length beyond this. Indicies may refer to
/// offsets in the positions array multiple times, or may skip positions
/// entirely.
///
/// If the `indices` parameter is specified, all values in the list must be
/// valid index values for `positions`.
///
/// The `mode` and `positions` parameters must not be null.
///
/// This constructor converts its parameters into [dart:typed_data] lists
/// (e.g. using [Float32List]s for the coordinates) before sending them to the
/// Flutter engine. If the data provided to this constructor is not already in
/// [List] form, consider using the [Vertices.raw] constructor instead to
/// avoid converting the data twice.         ".   яZ[^(/// Creates a set of vertex data for use with [Canvas.drawVertices], using the
/// encoding expected by the Flutter engine.
///
/// The `mode` parameter describes how the points should be interpreted: as
/// independent triangles ([VertexMode.triangles]), as a sliding window of
/// points forming a chain of triangles each sharing one side with the next
/// ([VertexMode.triangleStrip]), or as a fan of triangles with a single
/// shared point ([VertexMode.triangleFan]).
///
/// The `positions` parameter provides the points in the canvas space that
/// will be use to draw the triangles. Each point is represented as two
/// numbers in the list, the first giving the x coordinate and the second
/// giving the y coordinate. (As a result, the list must have an even number
/// of entries.)
///
/// The `colors` parameter, if specified, provides the color for each point in
/// `positions`. Each color is represented as ARGB with 8 bit color channels
/// (like [Color.value]'s internal representation), and the list, if
/// specified, must therefore be half the length of `positions`. Each triangle
/// is painted as a gradient that blends between the three colors at the three
/// points of that triangle. (These colors are then blended with the [Paint]
/// specified in the call to [Canvas.drawVertices].)
///
/// The `textureCoordinates` parameter, if specified, provides the points in
/// the [Paint] image to sample for the corresponding points in `positions`.
/// Each point is represented as two numbers in the list, the first giving the
/// x coordinate and the second giving the y coordinate. This list, if
/// specified, must be the same length as `positions`.
///
/// The `indices` parameter specifies the order in which the points should be
/// painted. If it is omitted (or present but empty), the points are processed
/// in the order they are given in `positions`, as if the `indices` was a list
/// from 0 to n-2, where _n_ is the number of pairs in `positions` (i.e. half
/// the length of `positions`). The `indices` parameter, if present and
/// non-empty, must have at least three entries, but may be of any length
/// beyond this. Indicies may refer to offsets in the positions array multiple
/// times, or may skip positions entirely.
///
/// If the `indices` parameter is specified, all values in the list must be
/// valid index values for pairs in `positions`. For example, if there are 12
/// numbers in `positions` (representing 6 coordinates), the `indicies` must
/// be numbers in the range 0..5 inclusive.
///
/// The `mode` and `positions` parameters must not be null.dp  y         && &&偞'/// Whether this reference to the underlying vertex data is [dispose]d.
///
/// This only returns a valid value if asserts are enabled, and must not be
/// used otherwise.   #o#  ##  $$$  $F$S  $z$  $$  $$  #o#p#v##|#w#############%%yn/// Release the resources used by this object. The object is no longer usable
/// after this method is called.   %&k/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).  &i&j&p&&v&q&~&&&&&&&&&& - 3/// An interface for recording graphical operations.
///
/// [Canvas] objects are used in creating [Picture] objects, which can
/// themselves be used with a [SceneBuilder] to build a [Scene]. In
/// normal usage, however, this is all handled by the framework.
///
/// A canvas has a current transformation matrix which is applied to all
/// operations. Initially, the transformation matrix is the identity transform.
/// It can be modified using the [translate], [scale], [rotate], [skew],
/// and [transform] methods.
///
/// A canvas also has a current clip region which is applied to all operations.
/// Initially, the clip region is infinite. It can be modified using the
/// [clipRect], [clipRRect], and [clipPath] methods.
///
/// The current transform and clip can be saved and restored using the stack
/// managed by the [save], [saveLayer], and [restore] methods.
///
/// ## Use with the Flutter framework
///
/// The Flutter framework's [RendererBinding] provides a hook for creating
/// [Canvas] objects ([RendererBinding.createCanvas]) that allows tests to hook
/// into the scene creation logic. When creating a [Canvas] that will be used
/// with a [PictureLayer] as part of the [Scene] in the context of the Flutter
/// framework, consider calling [RendererBinding.createCanvas] instead of
/// calling the [Canvas.new] constructor directly.
///
/// This does not apply when using a canvas to generate a bitmap for other
/// purposes, e.g. for generating a PNG image using [Picture.toImage]. 3ʂ 6\6bq/// Creates a canvas for recording graphical operations into the
/// given picture recorder.
///
/// Graphical operations that affect pixels entirely outside the given
/// `cullRect` might be discarded by the implementation. However, the
/// implementation might draw outside these bounds if, for example, a command
/// draws partially inside and outside the `cullRect`. To ensure that pixels
/// outside a given region are discarded, consider using a [clipRect]. The
/// `cullRect` is optional; by default, all operations are kept.
///
/// To end the recording, call [PictureRecorder.endRecording] on the
/// given recorder.6c6t  66     %6(7Ɓ/// Saves a copy of the current transform and clip on the save stack.
///
/// Call [restore] to pop the save stack.
///
/// See also:
///
///  * [saveLayer], which does the same thing but additionally also groups the
///    commands done until the matching [restore].   7єLF/// Saves a copy of the current transform and clip on the save stack, and then
/// creates a new group which subsequent calls will become a part of. When the
/// save stack is later popped, the group will be flattened into a layer and
/// have the given `paint`'s [Paint.colorFilter] and [Paint.blendMode]
/// applied.
///
/// This lets you create composite effects, for example making a group of
/// drawing commands semi-transparent. Without using [saveLayer], each part of
/// the group would be painted individually, so where they overlap would be
/// darker than where they do not. By using [saveLayer] to group them
/// together, they can be drawn with an opaque color at first, and then the
/// entire group can be made transparent using the [saveLayer]'s paint.
///
/// Call [restore] to pop the save stack and apply the paint to the group.
///
/// ## Using saveLayer with clips
///
/// When a rectangular clip operation (from [clipRect]) is not axis-aligned
/// with the raster buffer, or when the clip operation is not rectilinear
/// (e.g. because it is a rounded rectangle clip created by [clipRRect] or an
/// arbitrarily complicated path clip created by [clipPath]), the edge of the
/// clip needs to be anti-aliased.
///
/// If two draw calls overlap at the edge of such a clipped region, without
/// using [saveLayer], the first drawing will be anti-aliased with the
/// background first, and then the second will be anti-aliased with the result
/// of blending the first drawing and the background. On the other hand, if
/// [saveLayer] is used immediately after establishing the clip, the second
/// drawing will cover the first in the layer, and thus the second alone will
/// be anti-aliased with the background when the layer is clipped and
/// composited (when [restore] is called).
///
/// For example, this [CustomPainter.paint] method paints a clean white
/// rounded rectangle:
///
/// ```dart
/// void paint(Canvas canvas, Size size) {
///   Rect rect = Offset.zero & size;
///   canvas.save();
///   canvas.clipRRect(RRect.fromRectXY(rect, 100.0, 100.0));
///   canvas.saveLayer(rect, Paint());
///   canvas.drawPaint(Paint()..color = Colors.red);
///   canvas.drawPaint(Paint()..color = Colors.white);
///   canvas.restore();
///   canvas.restore();
/// }
/// ```
///
/// On the other hand, this one renders a red outline, the result of the red
/// paint being anti-aliased with the background at the clip edge, then the
/// white paint being similarly anti-aliased with the background _including
/// the clipped red paint_:
///
/// ```dart
/// void paint(Canvas canvas, Size size) {
///   // (this example renders poorly, prefer the example above)
///   Rect rect = Offset.zero & size;
///   canvas.save();
///   canvas.clipRRect(RRect.fromRectXY(rect, 100.0, 100.0));
///   canvas.drawPaint(Paint()..color = Colors.red);
///   canvas.drawPaint(Paint()..color = Colors.white);
///   canvas.restore();
/// }
/// ```
///
/// This point is moot if the clip only clips one draw operation. For example,
/// the following paint method paints a pair of clean white rounded
/// rectangles, even though the clips are not done on a separate layer:
///
/// ```dart
/// void paint(Canvas canvas, Size size) {
///   canvas.save();
///   canvas.clipRRect(RRect.fromRectXY(Offset.zero & (size / 2.0), 50.0, 50.0));
///   canvas.drawPaint(Paint()..color = Colors.white);
///   canvas.restore();
///   canvas.save();
///   canvas.clipRRect(RRect.fromRectXY(size.center(Offset.zero) & (size / 2.0), 50.0, 50.0));
///   canvas.drawPaint(Paint()..color = Colors.white);
///   canvas.restore();
/// }
/// ```
///
/// (Incidentally, rather than using [clipRRect] and [drawPaint] to draw
/// rounded rectangles like this, prefer the [drawRRect] method. These
/// examples are using [drawPaint] as a proxy for "complicated draw operations
/// that will get clipped", to illustrate the point.)
///
/// ## Performance considerations
///
/// Generally speaking, [saveLayer] is relatively expensive.
///
/// There are a several different hardware architectures for GPUs (graphics
/// processing units, the hardware that handles graphics), but most of them
/// involve batching commands and reordering them for performance. When layers
/// are used, they cause the rendering pipeline to have to switch render
/// target (from one layer to another). Render target switches can flush the
/// GPU's command buffer, which typically means that optimizations that one
/// could get with larger batching are lost. Render target switches also
/// generate a lot of memory churn because the GPU needs to copy out the
/// current frame buffer contents from the part of memory that's optimized for
/// writing, and then needs to copy it back in once the previous render target
/// (layer) is restored.
///
/// See also:
///
///  * [save], which saves the current state, but does not create a new layer
///    for subsequent commands.
///  * [BlendMode], which discusses the use of [Paint.blendMode] with
///    [saveLayer]. LPLW  L^Le   LoFM(/// Pops the current save stack, if there is anything to pop.
/// Otherwise, does nothing.
///
/// Use [save] and [saveLayer] to push state onto the stack.
///
/// If the state was pushed with [saveLayer], then this call will also
/// cause the new layer to be composited into the previous layer.   M%Oā/// Restores the save stack to a previous level as might be obtained from [getSaveCount].
/// If [count] is less than 1, the stack is restored to its initial state.
/// If [count] is greater than the current [getSaveCount] then nothing happens.
///
/// Use [save] and [saveLayer] to push state onto the stack.
///
/// If any of the state stack levels restored by this call were pushed with
/// [saveLayer], then this call will also cause those layers to be composited
/// into their previous layers. O	O   OBQ"/// Returns the number of items on the save stack, including the
/// initial state. This means it returns 1 for a clean canvas, and
/// that each call to [save] and [saveLayer] increments it, and that
/// each matching call to [restore] decrements it.
///
/// This number cannot go below 1.   Q(Q̀/// Add a translation to the current transform, shifting the coordinate space
/// horizontally by the first argument and vertically by the second argument. Q	Q  Q	Q   Q,R/// Add an axis-aligned scale to the current transform, scaling by the first
/// argument in the horizontal direction and the second in the vertical
/// direction.
///
/// If [sy] is unspecified, [sx] will be used for the scale in both
/// directions. S	S  S
S   S!qS{R/// Add a rotation to the current transform. The argument is in radians clockwise. SS   S5T/// Add an axis-aligned skew to the current transform, with the first argument
/// being the horizontal skew in rise over run units clockwise around the
/// origin, and the second argument being the vertical skew in rise over run
/// units clockwise around the origin. T	T  T	T   TπU_/// Multiply the current transform by the specified 4⨉4 transformation matrix
/// specified as a list of values in column-major order. UiUv   UWv/// Returns the current transform including the combined result of all transform
/// methods executed since the creation of this [Canvas] object, and respecting the
/// save/restore history.
///
/// Methods that can change the current transform include [translate], [scale],
/// [rotate], [skew], and [transform]. The [restore] method can also modify
/// the current transform by restoring it to the same value it had before its
/// associated [save] or [saveLayer] call.   WYƂ/// Reduces the clip region to the intersection of the current clip and the
/// given rectangle.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/clip_rect.png)
///
/// If [doAntiAlias] is true, then the clip will be anti-aliased.
///
/// If multiple draw commands intersect with the clip boundary, this can result
/// in incorrect blending at the clip boundary. See [saveLayer] for a
/// discussion of how to address that.
///
/// Use [ClipOp.difference] to subtract the provided rectangle from the
/// current clip. Y	Y  Y Y  YZ  YYYZZ;\&/// Reduces the clip region to the intersection of the current clip and the
/// given rounded rectangle.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/clip_rrect.png)
///
/// If [doAntiAlias] is true, then the clip will be anti-aliased.
///
/// If multiple draw commands intersect with the clip boundary, this can result
/// in incorrect blending at the clip boundary. See [saveLayer] for a
/// discussion of how to address that and some examples of using [clipRRect]. \0\7  \>\D  \Q\\^2/// Reduces the clip region to the intersection of the current clip and the
/// given [Path].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/clip_path.png)
///
/// If [doAntiAlias] is true, then the clip will be anti-aliased.
///
/// If multiple draw commands intersect with the clip boundary, this can result
/// in incorrect blending at the clip boundary. See [saveLayer] for a
/// discussion of how to address that. ^;	^A  ^G^M  ^Z^ei-Z/// Returns the conservative bounds of the combined result of all clip methods
/// executed within the current save stack of this [Canvas] object, as measured
/// in the local coordinate space under which rendering operations are currently
/// performed.
///
/// The combined clip results are rounded out to an integer pixel boundary before
/// they are transformed back into the local coordinate space which accounts for
/// the pixel roundoff in rendering operations, particularly when antialiasing.
/// Because the [Picture] may eventually be rendered into a scene within the
/// context of transforming widgets or layers, the result may thus be overly
/// conservative due to premature rounding. Using the [getDestinationClipBounds]
/// method combined with the external transforms and rounding in the true device
/// coordinate system will produce more accurate results, but this value may
/// provide a more convenient approximation to compare rendering operations to
/// the established clip.
///
/// {@template dart.ui.canvas.conservativeClipBounds}
/// The conservative estimate of the bounds is based on intersecting the bounds
/// of each clip method that was executed with [ClipOp.intersect] and potentially
/// ignoring any clip method that was executed with [ClipOp.difference]. The
/// [ClipOp] argument is only present on the [clipRect] method.
///
/// To understand how the bounds estimate can be conservative, consider the
/// following two clip method calls:
///
/// ```dart
/// void draw(Canvas canvas) {
///   canvas.clipPath(Path()
///     ..addRect(const Rect.fromLTRB(10, 10, 20, 20))
///     ..addRect(const Rect.fromLTRB(80, 80, 100, 100)));
///   canvas.clipPath(Path()
///     ..addRect(const Rect.fromLTRB(80, 10, 100, 20))
///     ..addRect(const Rect.fromLTRB(10, 80, 20, 100)));
///   // ...
/// }
/// ```
///
/// After executing both of those calls there is no area left in which to draw
/// because the two paths have no overlapping regions. But, in this case,
/// [getLocalClipBounds] would return a rectangle from `10, 10` to `100, 100` because it
/// only intersects the bounds of the two path objects to obtain its conservative
/// estimate.
///
/// The clip bounds are not affected by the bounds of any enclosing
/// [saveLayer] call as the engine does not currently guarantee the strict
/// enforcement of those bounds during rendering.
///
/// Methods that can change the current clip include [clipRect], [clipRRect],
/// and [clipPath]. The [restore] method can also modify the current clip by
/// restoring it to the same value it had before its associated [save] or
/// [saveLayer] call.
/// {@endtemplate}   iFlʃb/// Returns the conservative bounds of the combined result of all clip methods
/// executed within the current save stack of this [Canvas] object, as measured
/// in the destination coordinate space in which the [Picture] will be rendered.
///
/// Unlike [getLocalClipBounds], the bounds are not rounded out to an integer
/// pixel boundary as the Destination coordinate space may not represent pixels
/// if the [Picture] being constructed will be further transformed when it is
/// rendered or added to a scene. In order to determine the true pixels being
/// affected, those external transforms should be applied first before rounding
/// out the result to integer pixel boundaries. Most typically, [Picture] objects
/// are rendered in a scene with a scale transform representing the Device Pixel
/// Ratio.
///
/// {@macro dart.ui.canvas.conservativeClipBounds}   lm/// Paints the given [Color] onto the canvas, applying the given
/// [BlendMode], with the given color being the source and the background
/// being the destination. mm  mm   mʁo/// Draws a line between the given points using the given paint. The line is
/// stroked, the value of the [Paint.style] is ignored for this call.
///
/// The `p1` and `p2` arguments are interpreted as offsets from the origin.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_line.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_line_dark.png#gh-dark-mode-only) o	o  o	o  oo   oʀpd/// Fills the canvas with the given [Paint].
///
/// To fill the canvas with a solid color and blend mode, consider
/// [drawColor] instead. pnpu   pq`/// Draws a rectangle with the given [Paint]. Whether the rectangle is filled
/// or stroked (or both) is controlled by [Paint.style].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_rect.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_rect_dark.png#gh-dark-mode-only) q	q  rr
   rsj/// Draws a rounded rectangle with the given [Paint]. Whether the rectangle is
/// filled or stroked (or both) is controlled by [Paint.style].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_rrect.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_rrect_dark.png#gh-dark-mode-only) ss  ss   sHt́/// Draws a shape consisting of the difference between two rounded rectangles
/// with the given [Paint]. Whether this shape is filled or stroked (or both)
/// is controlled by [Paint.style].
///
/// This shape is almost but not quite entirely unlike an annulus. tt  tt  tt   uv/// Draws an axis-aligned oval that fills the given axis-aligned rectangle
/// with the given [Paint]. Whether the oval is filled or stroked (or both) is
/// controlled by [Paint.style].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_oval.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_oval_dark.png#gh-dark-mode-only) v	v  vv   v͂)xŁ/// Draws a circle centered at the point given by the first argument and
/// that has the radius given by the second argument, with the [Paint] given in
/// the third argument. Whether the circle is filled or stroked (or both) is
/// controlled by [Paint.style].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_circle.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_circle_dark.png#gh-dark-mode-only) xx  xx  xx   x|c/// Draw an arc scaled to fit inside the given rectangle.
///
/// It starts from `startAngle` radians around the oval up to
/// `startAngle` + `sweepAngle` radians around the oval, with zero radians
/// being the point on the right hand side of the oval that crosses the
/// horizontal line that intersects the center of the rectangle and with positive
/// angles going clockwise around the oval. If `useCenter` is true, the arc is
/// closed back to the center, forming a circle sector. Otherwise, the arc is
/// not closed, forming a circle segment.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_draw_arc.png#gh-light-mode-only)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/canvas_draw_arc_dark.png#gh-dark-mode-only)
///
/// This method is optimized for drawing arcs and should be faster than [Path.arcTo]. |	|  ||  ||  ||  ||   |ف}׀/// Draws the given [Path] with the given [Paint].
///
/// Whether this shape is filled or stroked (or both) is controlled by
/// [Paint.style]. If the path is filled, then sub-paths within it are
/// implicitly closed (see [Path.close]). }	}  }}   }~/// Draws the given [Image] into the canvas with its top-left corner at the
/// given [Offset]. The image is composited into the canvas using the given [Paint]. ~~  ~~  ~~   ~݁/// Draws the subset of the given image described by the `src` argument into
/// the canvas in the axis-aligned rectangle given by the `dst` argument.
///
/// This might sample from outside the `src` rect by up to half the width of
/// an applied filter.
///
/// Multiple calls to this method with different arguments (from the same
/// image) can be batched into a single call to [drawAtlas] to improve
/// performance.          كQ/// Draws the given [Image] into the canvas using the given [Paint].
///
/// The image is drawn in nine portions described by splitting the image by
/// drawing two horizontal lines and two vertical lines, where the `center`
/// argument describes the rectangle formed by the four points where these
/// four lines intersect each other. (This forms a 3-by-3 grid of regions,
/// the center region being described by the `center` argument.)
///
/// The four regions in the corners are drawn, without scaling, in the four
/// corners of the destination rectangle described by `dst`. The remaining
/// five regions are drawn by stretching them to fit such that they exactly
/// cover the destination rectangle while maintaining their relative
/// positions.        $   .[/// Draw the given picture onto the canvas. To create a picture, see
/// [PictureRecorder].    I˃/// Draws the text in the given [Paragraph] into this canvas at the given
/// [Offset].
///
/// The [Paragraph] object must have had [Paragraph.layout] called on it
/// first.
///
/// To align the text, set the `textAlign` on the [ParagraphStyle] object
/// passed to the [ParagraphBuilder.new] constructor. For more details see
/// [TextAlign] and the discussion at [ParagraphStyle.new].
///
/// If the text is left aligned or justified, the left margin will be at the
/// position specified by the `offset` argument's [Offset.dx] coordinate.
///
/// If the text is right aligned or justified, the right margin will be at the
/// position described by adding the [ParagraphConstraints.width] given to
/// [Paragraph.layout], to the `offset` argument's [Offset.dx] coordinate.
///
/// If the text is centered, the centering axis will be at the position
/// described by adding half of the [ParagraphConstraints.width] given to
/// [Paragraph.layout], to the `offset` argument's [Offset.dx] coordinate.      /// Draws a sequence of points according to the given [PointMode].
///
/// The `points` argument is interpreted as offsets from the origin.
///
/// The `paint` is used for each point ([PointMode.points]) or line
/// ([PointMode.lines] or [PointMode.polygon]), ignoring [Paint.style].
///
/// See also:
///
///  * [drawRawPoints], which takes `points` as a [Float32List] rather than a
///    [List<Offset>].        O/// Draws a sequence of points according to the given [PointMode].
///
/// The `points` argument is interpreted as a list of pairs of floating point
/// numbers, where each pair represents an x and y offset from the origin.
///
/// The `paint` is used for each point ([PointMode.points]) or line
/// ([PointMode.lines] or [PointMode.polygon]), ignoring [Paint.style].
///
/// See also:
///
///  * [drawPoints], which takes `points` as a [List<Offset>] rather than a
///    [List<Float32List>].   '4  ;B   L;E/// Draws a set of [Vertices] onto the canvas as one or more triangles.
///
/// The [Paint.color] property specifies the default color to use for the
/// triangles.
///
/// The [Paint.shader] property, if set, overrides the color entirely,
/// replacing it with the colors from the specified [ImageShader], [Gradient],
/// or other shader.
///
/// The `blendMode` parameter is used to control how the colors in the
/// `vertices` are combined with the colors in the `paint`. If there are no
/// colors specified in `vertices` then the `blendMode` has no effect. If
/// there are colors in the `vertices`, then the color taken from the
/// [Paint.shader] or [Paint.color] in the `paint` is blended with the colors
/// specified in the `vertices` using the `blendMode` parameter. For the
/// purposes of this blending, the colors from the `paint` parameter are
/// considered the source, and the colors from the `vertices` are considered
/// the destination. [BlendMode.dst] ignores the `paint` and uses only the
/// colors of the `vertices`; [BlendMode.src] ignores the colors of the
/// `vertices` and uses only the colors in the `paint`.
///
/// All parameters must not be null.
///
/// See also:
///   * [Vertices.new], which creates a set of vertices to draw on the canvas.
///   * [Vertices.raw], which creates the vertices using typed data lists
///     rather than unencoded lists.
///   * [paint], Image shaders can be used to draw images on a triangular mesh. R\  ep  z   +/// Draws many parts of an image - the [atlas] - onto the canvas.
///
/// This method allows for optimization when you want to draw many parts of an
/// image onto the canvas, such as when using sprites or zooming. It is more efficient
/// than using multiple calls to [drawImageRect] and provides more functionality
/// to individually transform each image part by a separate rotation or scale and
/// blend or modulate those parts with a solid color.
///
/// The method takes a list of [Rect] objects that each define a piece of the
/// [atlas] image to be drawn independently. Each [Rect] is associated with an
/// [RSTransform] entry in the [transforms] list which defines the location,
/// rotation, and (uniform) scale with which to draw that portion of the image.
/// Each [Rect] can also be associated with an optional [Color] which will be
/// composed with the associated image part using the [blendMode] before blending
/// the result onto the canvas. The full operation can be broken down as:
///
/// - Blend each rectangular portion of the image specified by an entry in the
/// [rects] argument with its associated entry in the [colors] list using the
/// [blendMode] argument (if a color is specified). In this part of the operation,
/// the image part will be considered the source of the operation and the associated
/// color will be considered the destination.
/// - Blend the result from the first step onto the canvas using the translation,
/// rotation, and scale properties expressed in the associated entry in the
/// [transforms] list using the properties of the [Paint] object.
///
/// If the first stage of the operation which blends each part of the image with
/// a color is needed, then both the [colors] and [blendMode] arguments must
/// not be null and there must be an entry in the [colors] list for each
/// image part. If that stage is not needed, then the [colors] argument can
/// be either null or an empty list and the [blendMode] argument may also be null.
///
/// The optional [cullRect] argument can provide an estimate of the bounds of the
/// coordinates rendered by all components of the atlas to be compared against
/// the clip to quickly reject the operation if it does not intersect.
///
/// An example usage to render many sprites from a single sprite atlas with no
/// rotations or scales:
///
/// ```dart
/// class Sprite {
///   Sprite(this.index, this.center);
///   int index;
///   Offset center;
/// }
///
/// class MyPainter extends CustomPainter {
///   MyPainter(this.spriteAtlas, this.allSprites);
///
///   // assume spriteAtlas contains N 10x10 sprites side by side in a (N*10)x10 image
///   ui.Image spriteAtlas;
///   List<Sprite> allSprites;
///
///   @override
///   void paint(Canvas canvas, Size size) {
///     Paint paint = Paint();
///     canvas.drawAtlas(spriteAtlas, <RSTransform>[
///       for (final Sprite sprite in allSprites)
///         RSTransform.fromComponents(
///           rotation: 0.0,
///           scale: 1.0,
///           // Center of the sprite relative to its rect
///           anchorX: 5.0,
///           anchorY: 5.0,
///           // Location at which to draw the center of the sprite
///           translateX: sprite.center.dx,
///           translateY: sprite.center.dy,
///         ),
///     ], <Rect>[
///       for (final Sprite sprite in allSprites)
///         Rect.fromLTWH(sprite.index * 10.0, 0.0, 10.0, 10.0),
///     ], null, null, null, paint);
///   }
///
///   // ...
/// }
/// ```
///
/// Another example usage which renders sprites with an optional opacity and rotation:
///
/// ```dart
/// class Sprite {
///   Sprite(this.index, this.center, this.alpha, this.rotation);
///   int index;
///   Offset center;
///   int alpha;
///   double rotation;
/// }
///
/// class MyPainter extends CustomPainter {
///   MyPainter(this.spriteAtlas, this.allSprites);
///
///   // assume spriteAtlas contains N 10x10 sprites side by side in a (N*10)x10 image
///   ui.Image spriteAtlas;
///   List<Sprite> allSprites;
///
///   @override
///   void paint(Canvas canvas, Size size) {
///     Paint paint = Paint();
///     canvas.drawAtlas(spriteAtlas, <RSTransform>[
///       for (final Sprite sprite in allSprites)
///         RSTransform.fromComponents(
///           rotation: sprite.rotation,
///           scale: 1.0,
///           // Center of the sprite relative to its rect
///           anchorX: 5.0,
///           anchorY: 5.0,
///           // Location at which to draw the center of the sprite
///           translateX: sprite.center.dx,
///           translateY: sprite.center.dy,
///         ),
///     ], <Rect>[
///       for (final Sprite sprite in allSprites)
///         Rect.fromLTWH(sprite.index * 10.0, 0.0, 10.0, 10.0),
///     ], <Color>[
///       for (final Sprite sprite in allSprites)
///         Colors.white.withAlpha(sprite.alpha),
///     ], BlendMode.srcIn, null, paint);
///   }
///
///   // ...
/// }
/// ```
///
/// The length of the [transforms] and [rects] lists must be equal and
/// if the [colors] argument is not null then it must either be empty or
/// have the same length as the other two lists.
///
/// See also:
///
///  * [drawRawAtlas], which takes its arguments as typed data lists rather
///    than objects. 5<  Sf             $4/// Draws many parts of an image - the [atlas] - onto the canvas.
///
/// This method allows for optimization when you want to draw many parts of an
/// image onto the canvas, such as when using sprites or zooming. It is more efficient
/// than using multiple calls to [drawImageRect] and provides more functionality
/// to individually transform each image part by a separate rotation or scale and
/// blend or modulate those parts with a solid color. It is also more efficient
/// than [drawAtlas] as the data in the arguments is already packed in a format
/// that can be directly used by the rendering code.
///
/// A full description of how this method uses its arguments to draw onto the
/// canvas can be found in the description of the [drawAtlas] method.
///
/// The [rstTransforms] argument is interpreted as a list of four-tuples, with
/// each tuple being ([RSTransform.scos], [RSTransform.ssin],
/// [RSTransform.tx], [RSTransform.ty]).
///
/// The [rects] argument is interpreted as a list of four-tuples, with each
/// tuple being ([Rect.left], [Rect.top], [Rect.right], [Rect.bottom]).
///
/// The [colors] argument, which can be null, is interpreted as a list of
/// 32-bit colors, with the same packing as [Color.value]. If the [colors]
/// argument is not null then the [blendMode] argument must also not be null.
///
/// An example usage to render many sprites from a single sprite atlas with no rotations
/// or scales:
///
/// ```dart
/// class Sprite {
///   Sprite(this.index, this.center);
///   int index;
///   Offset center;
/// }
///
/// class MyPainter extends CustomPainter {
///   MyPainter(this.spriteAtlas, this.allSprites);
///
///   // assume spriteAtlas contains N 10x10 sprites side by side in a (N*10)x10 image
///   ui.Image spriteAtlas;
///   List<Sprite> allSprites;
///
///   @override
///   void paint(Canvas canvas, Size size) {
///     // For best advantage, these lists should be cached and only specific
///     // entries updated when the sprite information changes. This code is
///     // illustrative of how to set up the data and not a recommendation for
///     // optimal usage.
///     Float32List rectList = Float32List(allSprites.length * 4);
///     Float32List transformList = Float32List(allSprites.length * 4);
///     for (int i = 0; i < allSprites.length; i++) {
///       Sprite sprite = allSprites[i];
///       final double rectX = sprite.index * 10.0;
///       rectList[i * 4 + 0] = rectX;
///       rectList[i * 4 + 1] = 0.0;
///       rectList[i * 4 + 2] = rectX + 10.0;
///       rectList[i * 4 + 3] = 10.0;
///
///       // This example sets the RSTransform values directly for a common case of no
///       // rotations or scales and just a translation to position the atlas entry. For
///       // more complicated transforms one could use the RSTransform class to compute
///       // the necessary values or do the same math directly.
///       transformList[i * 4 + 0] = 1.0;
///       transformList[i * 4 + 1] = 0.0;
///       transformList[i * 4 + 2] = sprite.center.dx - 5.0;
///       transformList[i * 4 + 3] = sprite.center.dy - 5.0;
///     }
///     Paint paint = Paint();
///     canvas.drawRawAtlas(spriteAtlas, transformList, rectList, null, null, null, paint);
///   }
///
///   // ...
/// }
/// ```
///
/// Another example usage which renders sprites with an optional opacity and rotation:
///
/// ```dart
/// class Sprite {
///   Sprite(this.index, this.center, this.alpha, this.rotation);
///   int index;
///   Offset center;
///   int alpha;
///   double rotation;
/// }
///
/// class MyPainter extends CustomPainter {
///   MyPainter(this.spriteAtlas, this.allSprites);
///
///   // assume spriteAtlas contains N 10x10 sprites side by side in a (N*10)x10 image
///   ui.Image spriteAtlas;
///   List<Sprite> allSprites;
///
///   @override
///   void paint(Canvas canvas, Size size) {
///     // For best advantage, these lists should be cached and only specific
///     // entries updated when the sprite information changes. This code is
///     // illustrative of how to set up the data and not a recommendation for
///     // optimal usage.
///     Float32List rectList = Float32List(allSprites.length * 4);
///     Float32List transformList = Float32List(allSprites.length * 4);
///     Int32List colorList = Int32List(allSprites.length);
///     for (int i = 0; i < allSprites.length; i++) {
///       Sprite sprite = allSprites[i];
///       final double rectX = sprite.index * 10.0;
///       rectList[i * 4 + 0] = rectX;
///       rectList[i * 4 + 1] = 0.0;
///       rectList[i * 4 + 2] = rectX + 10.0;
///       rectList[i * 4 + 3] = 10.0;
///
///       // This example uses an RSTransform object to compute the necessary values for
///       // the transform using a factory helper method because the sprites contain
///       // rotation values which are not trivial to work with. But if the math for the
///       // values falls out from other calculations on the sprites then the values could
///       // possibly be generated directly from the sprite update code.
///       final RSTransform transform = RSTransform.fromComponents(
///         rotation: sprite.rotation,
///         scale: 1.0,
///         // Center of the sprite relative to its rect
///         anchorX: 5.0,
///         anchorY: 5.0,
///         // Location at which to draw the center of the sprite
///         translateX: sprite.center.dx,
///         translateY: sprite.center.dy,
///       );
///       transformList[i * 4 + 0] = transform.scos;
///       transformList[i * 4 + 1] = transform.ssin;
///       transformList[i * 4 + 2] = transform.tx;
///       transformList[i * 4 + 3] = transform.ty;
///
///       // This example computes the color value directly, but one could also compute
///       // an actual Color object and use its Color.value getter for the same result.
///       // Since we are using BlendMode.srcIn, only the alpha component matters for
///       // these colors which makes this a simple shift operation.
///       colorList[i] = sprite.alpha << 24;
///     }
///     Paint paint = Paint();
///     canvas.drawRawAtlas(spriteAtlas, transformList, rectList, colorList, BlendMode.srcIn, null, paint);
///   }
///
///   // ...
/// }
/// ```
///
/// See also:
///
///  * [drawAtlas], which takes its arguments as objects rather than typed
///    data lists. AH  bo  đĞ  ĸ    	  -4   >8'/// Draws a shadow for a [Path] representing the given material elevation.
///
/// The `transparentOccluder` argument should be true if the occluding object
/// is not opaque.
///
/// The arguments must not be null. 2	8  =D  JR  \b    z Kƅ  ʁ       >!V   Da      
   (  .6  abhȭnivȬwȆȋȇȎȖȞȦȮȵȯȷdo   depqwʔ}xʅʓʆʍʒʎʕʺʜʖʞʴʮʶׁ  	   -     '  (͘  Ͱ    ()/\50=[>EJFMU]͆d^fh  s{  ΀
Έ  ΌΔ  Κ΢  Ωι    98
"*2:VA;CuI    71+3W  	  WXcdjώpkxύyπυρψϏϾϖϐϘϸϲϺ^    M'!)GAIq  	   	  qr}~ЄбЊЅВаГКПЛТЪвйглH#  )	1  5
>  ^  	  	  ^_eђkfsёt{р|уыѓѹњєќѳѭѵ쀉^  em  $ #%L,&.F@Hy  	  		  yz҅҆ҌҹҒҍҚҸқҢҧңҪҲҺһ!  +8  {!  ,9  E]   EFր4  BO  "
[l  u	{  Ղ Պ  դժ  [\Ւ՘ՙշ7    
      
	    78>׈D?LׇMTYU\dlt|׃׉ײא׊ג׬צ׮&7  AH  OU  &'bƀ'  2?  EK   [vl  u	{  فه  [\ٔՀ<  FS  X^  	*n   no3ۗ  ۫۸  34:_@;H^IPUQX`ۅgaiÀ   ܔ  %  ܔܕܛܡܜܩܿܪܱܶܲܹ0pA  KR  Xc  01ݤ  	$  */  ݤݥݫݱݬݹݺ=N  W	_  b	j  mt  =>ߍ  ߗ	ߟ  ߢ	ߪ  ߭	ߵ  ߸	      _ ^!(-)08@HPX`{gaiZ	    V      VW]c^klsxt{
  	  %      
          b#a$+0,3;CKS[c~jdl   '  -4  㧀  *  0@  MW   ev        efM          MNTZUbcjokrz(9  B	H  MT  ()=    
        &  =>DJERSZ_[bjrz4E  PX  Zb  ip  45\  hp  rz  |      ,+%-J4.6  	      
  !  ́tc  	s{  
            	  ,6  65	!'/7Q>8@DxU  ^	d  ip  DE1  ;H  M]  jt  킁~                      `_#($+273:BJRZa}hbj  +2  8>  BH  LS  x                '/  >F  Vf  y     BA$,4<CcJDL          k-  CK  W_  px              '  :D  TY   klrzsp    pq\  iy  + *$)%,J3-5G      Ѐ                     ,   9 C   M R   \ i   t                t u@        	  @AGMHUV]b^elqmt{!"2  <C  Zm           !  !"
G
X  
e
l  

  

  

    -4  QX  
G
H  	        %  9F  R^  kp    e	d
!)07<8?GOW^fmgo  	        W    	      WX^d_lmtyu|)K<   )* 抸DN/// An object representing a sequence of recorded graphical operations.
///
/// To create a [Picture], use a [PictureRecorder].
///
/// A [Picture] can be placed in a [Scene] using a [SceneBuilder], via
/// the [SceneBuilder.addPicture] method. A [Picture] can also be
/// drawn into a [Canvas], using the [Canvas.drawPicture] method.  P`/// A callback that is invoked to report a picture creation.
///
/// It's preferred to use [MemoryAllocations] in flutter/foundation.dart
/// than to use [onCreate] directly because [MemoryAllocations]
/// allows multiple callbacks. m/// A callback that is invoked to report the picture disposal.
///
/// It's preferred to use [MemoryAllocations] in flutter/foundation.dart
/// than to use [onDispose] directly because [MemoryAllocations]
/// allows multiple callbacks. /// Whether this reference to the underlying picture is [dispose]d.
///
/// This only returns a valid value if asserts are enabled, and must not be
/// used otherwise.   /// Returns the approximate number of bytes allocated for this object.
///
/// The actual size of this picture may be larger, particularly if it contains
/// references to image or other large objects.   D/// Creates an image from this picture.
///
/// The returned image will be `width` pixels wide and `height` pixels high.
/// The picture is rasterized within the 0 (left), 0 (top), `width` (right),
/// `height` (bottom) bounds. Content outside these bounds is clipped. 	  
   փ56/// Synchronously creates a handle to an image of this picture.
///
/// {@template dart.ui.painting.Picture.toImageSync}
/// The returned image will be `width` pixels wide and `height` pixels high.
/// The picture is rasterized within the 0 (left), 0 (top), `width` (right),
/// `height` (bottom) bounds. Content outside these bounds is clipped.
///
/// The image object is created and returned synchronously, but is rasterized
/// asynchronously. If the rasterization fails, an exception will be thrown
/// when the image is drawn to a [Canvas].
///
/// If a GPU context is available, this image will be created as GPU resident
/// and not copied back to the host. This means the image will be more
/// efficient to draw.
///
/// If no GPU context is available, the image will be rasterized on the CPU.
/// {@endtemplate} A	F  L
Q   \n/// Release the resources used by this object. The object is no longer usable
/// after this method is called.      /// This class is created by the engine, and should not be instantiated
/// or extended directly.
///
/// To create a [Picture], use a [PictureRecorder].  $$ $$$   $$%&   %%%%%%%%%%%%%%%%%%%%%%  	  
   l    	    
    !   l m s  { t              ! 5!2  !>	!C  !I
!N  ! !!"Y"  "	"  "
"  ""  "Y"Z"`""f"a"n""o"v"{"w"~"""""""# #   # ##$vk/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).  $$$%$+$H$1$,$9$G$:$A$F$B$I$d$P$J$R&-+&@   &-&. &\U)/// Records a [Picture] containing a sequence of graphical operations.
///
/// To begin recording, construct a [Canvas] to record the commands.
/// To end recording, use the [PictureRecorder.endRecording] method.
///
/// ## Use with the Flutter framework
///
/// The Flutter framework's [RendererBinding] provides a hook for creating
/// [PictureRecorder] objects ([RendererBinding.createPictureRecorder]) that
/// allows tests to hook into the scene creation logic. When creating a
/// [PictureRecorder] and [Canvas] that will be used with a [PictureLayer] as
/// part of the [Scene] in the context of the Flutter framework, consider
/// calling [RendererBinding.createPictureRecorder] instead of calling the
/// [PictureRecorder.new] constructor directly.
///
/// This does not apply when using a canvas to generate a bitmap for other
/// purposes, e.g. for generating a PNG image using [Picture.toImage]. * **ƀ/// Creates a new idle PictureRecorder. To associate it with a
/// [Canvas] and begin recording, pass this [PictureRecorder] to the
/// [Canvas] constructor.   *恙,su/// Whether this object is currently recording commands.
///
/// Specifically, this returns true if a [Canvas] object has been
/// created to record commands and recording has not yet ended via a
/// call to [endRecording], and false if either this
/// [PictureRecorder] has not yet been associated with a [Canvas],
/// or the [endRecording] method has already been called.   ,,-
/// Finishes recording graphical operations.
///
/// Returns a picture containing the graphical operations that have been
/// recorded thus far. After calling this function, both the picture recorder
/// and the canvas objects are invalid and cannot be used further.    --  ., ..+   1`1o  .4.   ...Ea.   .E.F.L.b.R.M.Z.a.[.c..j.d.l..   ..0̀13  1A1Q  000000000000001!1 011{L1   1{1| 1˛2&T/// A single shadow.
///
/// Multiple shadows are stacked together in a [TextStyle]. 21P 33p/// Construct a shadow.
///
/// The default shadow is a black shadow with zero offset and zero blur.
/// Default shadows should be completely covered by the casting element,
/// and not be visible.
///
/// Transparency should be adjusted through the [color] alpha.
///
/// Shadow order matters due to compositing multiple translucent objects not
/// being commutative.3(3  33  44  333334444(444:4L44F4;4I4N	4,4 44&4 45'5 5&5$5)5.#5? 5M5K5P5U#5f 5t5r5w5|&5 55556^/// Color that the shadow will be drawn with.
///
/// The shadows are shapes composited directly over the base canvas, and do not
/// represent optical occlusion. 6h47/// The displacement of the shadow from the casting element.
///
/// Positive x/y offsets will shift the shadow to the right and down, while
/// negative offsets shift the shadow to the left and up. The offsets are
/// relative to the position of the element that is casting it. 7i8 O/// The standard deviation of the Gaussian to convolve with the shadow's shape. 9:n/// The [blurRadius] in sigmas instead of logical pixels.
///
/// See the sigma argument to [MaskFilter.blur].   GGG   GG8|9.h/// Converts a blur radius in pixels to sigmas.
///
/// See the sigma argument to [MaskFilter.blur].
/// 9C9K   :A<Q/// Create the [Paint] object that corresponds to this shadow description.
///
/// The [offset] is not represented in the [Paint] object.
/// To honor this as well, the shape should be translated by [offset] before
/// being filled using this [Paint].
///
/// This class does not provide a way to disable shadows to avoid
/// inconsistencies in shadow blur rendering, primarily as a method of
/// reducing test flakiness. [toPaint] should be overridden in subclasses to
/// provide this functionality.   <Ѐ=7[/// Returns a new shadow with its [offset] and [blurRadius] scaled by the given
/// factor. ===E   =ɅAӃ/// Linearly interpolate between two shadows.
///
/// If either shadow is null, this function linearly interpolates from
/// a shadow that matches the other shadow in color but has a zero
/// offset and a zero blurRadius.
///
/// {@template dart.ui.shadow.lerp}
/// The `t` argument represents position on the timeline, with 0.0 meaning
/// that the interpolation has not started, returning `a` (or something
/// equivalent to `a`), 1.0 meaning that the interpolation has finished,
/// returning `b` (or something equivalent to `b`), and values in between
/// meaning that the interpolation is at the relevant point on the timeline
/// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and
/// 1.0, so negative values and values greater than 1.0 are valid (and can
/// easily be generated by curves such as [Curves.elasticInOut]).
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].
/// {@endtemplate} A	A  A	A  AA   CDi/// Linearly interpolate between two lists of shadows.
///
/// If the lists differ in length, excess items are lerped with null.
///
/// {@macro dart.ui.shadow.lerp} DrD  DD  DD   FF  FF  FFHH  HH   LLL   LL LҎM/// A handle to a read-only byte buffer that is managed by the engine.
///
/// The creator of this object is responsible for calling [dispose] when it is
/// no longer needed. M MMM MM   VV  VV VVNV1/// The length, in bytes, of the underlying data.   WWy/// Whether [dispose] has been called.
///
/// This must only be used when asserts are enabled. Otherwise, it will throw.   MgNh^/// Creates a copy of the data from a [Uint8List] suitable for internal use
/// in the engine. NvN   OQGOr/// Create a buffer from the asset with key [assetKey].
///
/// Throws an [Exception] if the asset does not exist. OO   RS+i/// Create a buffer from the file with [path].
///
/// Throws an [Exception] if the asset does not exist. S8S@   TT  UU  UU#  TTTTTTTTTTTTTTTTU0U  UU  UU  U0U1U7U_U?U8UGU^UHUPUXU`UUgUaUiUۀVH  VVV^  VgVw  UUUV
UUUV	UUVVV3VVVX Z;/// Release the resources used by this object. The object is no longer usable
/// after this method is called.
///
/// The underlying memory allocated by this object will be retained beyond
/// this call if it is still needed by another object that has not been
/// disposed. For example, an [ImageDescriptor] that has not been disposed
/// may still retain a reference to the memory from this buffer even if it
/// has been disposed. Freeing that memory requires disposing all resources
/// that may still hold it.   Zǀ[k/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).  [7[8[>[[[D[?[L[Z[M[T[Y[U[\[[c[][e [\e/// A descriptor of data that can be turned into an [Image] via a [Codec].
///
/// Use this class to determine the height, width, and byte size of image data
/// before decoding it. \y^^^/// Creates an image descriptor from raw image pixels.
///
/// The `pixels` parameter is the pixel data. They are packed in bytes in the
/// order described by `pixelFormat`, then grouped in rows, from left to right,
/// then top to bottom.
///
/// The `rowBytes` parameter is the number of bytes consumed by each row of
/// pixels in the data buffer. If unspecified, it defaults to `width` multiplied
/// by the number of bytes per pixel in the provided `format`.^^  ^^  ^^  _	_  _ _2    `ya>d/// The width, in pixels, of the image.
///
/// On the Web, this is only supported for [raw] images.   aH{ae/// The height, in pixels, of the image.
///
/// On the Web, this is only supported for [raw] images.   aǀb>h/// The number of bytes per pixel in the image.
///
/// On web, this is only supported for [raw] images.   _dc_H/// Creates an image descriptor from encoded data in a supported format. __   bPc>/// Release the resources used by this object. The object is no longer usable
/// after this method is called.
///
/// This can't be a leaf call because the native function calls Dart API
/// (Dart_SetNativeInstanceField).   cLd/// Creates a [Codec] object which is suitable for decoding the data in the
/// buffer to an [Image].
///
/// If only one of targetWidth or  targetHeight are specified, the other
/// dimension will be scaled according to the aspect ratio of the supplied
/// dimension.
///
/// If either targetWidth or targetHeight is less than or equal to zero, it
/// will be treated as if it is null. ee  ee    e+<e6  eeee   eggg/// Creates an image descriptor from raw image pixels.
///
/// The `pixels` parameter is the pixel data. They are packed in bytes in the
/// order described by `pixelFormat`, then grouped in rows, from left to right,
/// then top to bottom.
///
/// The `rowBytes` parameter is the number of bytes consumed by each row of
/// pixels in the data buffer. If unspecified, it defaults to `width` multiplied
/// by the number of bytes per pixel in the provided `format`.gg  h
h  h"h0  h;hA  hN hd   kk  kk  ll  k4k   kklH7l\   lHlIm!Lm5   m!m"	iSi  ii  ii  iSiTiZiibi[ijiikisizii{iiiiiij	j  jj  jj  j	j  j
j  jj  jj  j	j
jjJjjjjIjj'j/j6j=jDjKjnjRjLjTkrkw   kkkk6kkk'k5k(k/k4k0k7kfk>k8k@k`kZkbktl7   kkkkkkkkkkkkkl&kkkl ll"lm	   llllllllllllllllllllmqpm   mqmrm}m~mmmmmmmmmmmmmmmm(m  nn  n#n)  mmqq  qq  qq  qq  qqqqKqqq&qJq'q.q3q/q6q>qEqLqxqSqMqUqӀq   qq ~Zցu/// An exception thrown by [Canvas.drawImage] and related methods when drawing
/// an [Image] created via [Picture.toImageSync] that is in an invalid state.
///
/// This exception may be thrown if the requested image dimensions exceeded the
/// maximum 2D texture size allowed by the GPU, or if no GPU surface or context
/// was available for rasterization at request time. B012 39  B
H   SI2/// A string containing details about the failure. iO/// If available, the stack trace at the time [Picture.toImageSync] was called.  B"      O Jȋ/// Algorithms to use when painting on the canvas.
///
/// When drawing a shape or image onto a canvas, different algorithms can be
/// used to blend the pixels. The different values of [BlendMode] specify
/// different such algorithms.
///
/// Each algorithm has two inputs, the _source_, which is the image being drawn,
/// and the _destination_, which is the image into which the source image is
/// being composited. The destination is often thought of as the _background_.
/// The source and destination both have four color channels, the red, green,
/// blue, and alpha channels. These are typically represented as numbers in the
/// range 0.0 to 1.0. The output of the algorithm also has these same four
/// channels, with values computed from the source and destination.
///
/// The documentation of each value below describes how the algorithm works. In
/// each case, an image shows the output of blending a source image with a
/// destination image. In the images below, the destination is represented by an
/// image with horizontal lines and an opaque landscape photograph, and the
/// source is represented by an image with vertical lines (the same lines but
/// rotated) and a bird clip-art image. The [src] mode shows only the source
/// image, and the [dst] mode shows only the destination image. In the
/// documentation below, the transparency is illustrated by a checkerboard
/// pattern. The [clear] mode drops both the source and destination, resulting
/// in an output that is entirely transparent (illustrated by a solid
/// checkerboard pattern).
///
/// The horizontal and vertical bars in these images show the red, green, and
/// blue channels with varying opacity levels, then all three color channels
/// together with those same varying opacity levels, then all three color
/// channels set to zero with those varying opacity levels, then two bars showing
/// a red/green/blue repeating gradient, the first with full opacity and the
/// second with partial opacity, and finally a bar with the three color channels
/// set to zero but the opacity varying in a repeating gradient.
///
/// ## Application to the [Canvas] API
///
/// When using [Canvas.saveLayer] and [Canvas.restore], the blend mode of the
/// [Paint] given to the [Canvas.saveLayer] will be applied when
/// [Canvas.restore] is called. Each call to [Canvas.saveLayer] introduces a new
/// layer onto which shapes and images are painted; when [Canvas.restore] is
/// called, that layer is then composited onto the parent layer, with the source
/// being the most-recently-drawn shapes and images, and the destination being
/// the parent layer. (For the first [Canvas.saveLayer] call, the parent layer
/// is the canvas itself.)
///
/// See also:
///
///  * [Paint.blendMode], which uses [BlendMode] to define the compositing
///    strategy.   K Lh/// Drop both the source and destination images, leaving nothing.
///
/// This corresponds to the "clear" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_clear.png)  LrH M4/// Drop the destination image, only paint the source image.
///
/// Conceptually, the destination is first cleared, then the source image is
/// painted.
///
/// This corresponds to the "Copy" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_src.png)  MM O	9/// Drop the source image, only paint the destination image.
///
/// Conceptually, the source image is discarded, leaving the destination
/// untouched.
///
/// This corresponds to the "Destination" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_dst.png)  O P/// Composite the source image over the destination image.
///
/// This is the default value. It represents the most intuitive case, where
/// shapes are painted on top of what is below, with transparent areas showing
/// the destination layer.
///
/// This corresponds to the "Source over Destination" Porter-Duff operator,
/// also known as the Painter's Algorithm.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_srcOver.png)  Q R/// Composite the source image under the destination image.
///
/// This is the opposite of [srcOver].
///
/// This corresponds to the "Destination over Source" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_dstOver.png)
///
/// This is useful when the source image should have been painted before the
/// destination image, but could not be.  R UIx/// Show the source image, but only where the two images overlap. The
/// destination image is not rendered, it is treated merely as a mask. The
/// color channels of the destination are ignored, only the opacity has an
/// effect.
///
/// To show the destination image instead, consider [dstIn].
///
/// To reverse the semantic of the mask (only showing the source where the
/// destination is absent, rather than where it is present), consider
/// [srcOut].
///
/// This corresponds to the "Source in Destination" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_srcIn.png)  US W҂f/// Show the destination image, but only where the two images overlap. The
/// source image is not rendered, it is treated merely as a mask. The color
/// channels of the source are ignored, only the opacity has an effect.
///
/// To show the source image instead, consider [srcIn].
///
/// To reverse the semantic of the mask (only showing the source where the
/// destination is present, rather than where it is absent), consider [dstOut].
///
/// This corresponds to the "Destination in Source" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_dstIn.png)  W܂ Zny/// Show the source image, but only where the two images do not overlap. The
/// destination image is not rendered, it is treated merely as a mask. The color
/// channels of the destination are ignored, only the opacity has an effect.
///
/// To show the destination image instead, consider [dstOut].
///
/// To reverse the semantic of the mask (only showing the source where the
/// destination is present, rather than where it is absent), consider [srcIn].
///
/// This corresponds to the "Source out Destination" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_srcOut.png)  Zy ]o/// Show the destination image, but only where the two images do not overlap. The
/// source image is not rendered, it is treated merely as a mask. The color
/// channels of the source are ignored, only the opacity has an effect.
///
/// To show the source image instead, consider [srcOut].
///
/// To reverse the semantic of the mask (only showing the destination where the
/// source is present, rather than where it is absent), consider [dstIn].
///
/// This corresponds to the "Destination out Source" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_dstOut.png)  ]g _lE/// Composite the source image over the destination image, but only where it
/// overlaps the destination.
///
/// This corresponds to the "Source atop Destination" Porter-Duff operator.
///
/// This is essentially the [srcOver] operator, but with the output's opacity
/// channel being set to that of the destination image instead of being a
/// combination of both image's opacity channels.
///
/// For a variant with the destination on top instead of the source, see
/// [dstATop].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_srcATop.png)  _x] a΂;/// Composite the destination image over the source image, but only where it
/// overlaps the source.
///
/// This corresponds to the "Destination atop Source" Porter-Duff operator.
///
/// This is essentially the [dstOver] operator, but with the output's opacity
/// channel being set to that of the source image instead of being a
/// combination of both image's opacity channels.
///
/// For a variant with the source on top instead of the destination, see
/// [srcATop].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_dstATop.png)  aځ; c+/// Apply a bitwise `xor` operator to the source and destination images. This
/// leaves transparency where they would overlap.
///
/// This corresponds to the "Source xor Destination" Porter-Duff operator.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_xor.png)  cO fe,/// Sum the components of the source and destination images.
///
/// Transparency in a pixel of one of the images reduces the contribution of
/// that image to the corresponding output pixel, as if the color of that
/// pixel in that image was darker.
///
/// This corresponds to the "Source plus Destination" Porter-Duff operator.
///
/// This is the right blend mode for cross-fading between two images. Consider
/// two images A and B, and an interpolation time variable _t_ (from 0.0 to
/// 1.0). To cross fade between them, A should be drawn with opacity 1.0 - _t_
/// into a new layer using [BlendMode.srcOver], and B should be drawn on top
/// of it, at opacity _t_, into the same layer, using [BlendMode.plus].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_plus.png)  fnV i'/// Multiply the color components of the source and destination images.
///
/// This can only result in the same or darker colors (multiplying by white,
/// 1.0, results in no change; multiplying by black, 0.0, results in black).
///
/// When compositing two opaque images, this has similar effect to overlapping
/// two transparencies on a projector.
///
/// For a variant that also multiplies the alpha channel, consider [multiply].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_modulate.png)
///
/// See also:
///
///  * [screen], which does a similar computation but inverted.
///  * [overlay], which combines [modulate] and [screen] to favor the
///    destination image.
///  * [hardLight], which combines [modulate] and [screen] to favor the
///    source image.  j! o-/// Multiply the inverse of the components of the source and destination
/// images, and inverse the result.
///
/// Inverting the components means that a fully saturated channel (opaque
/// white) is treated as the value 0.0, and values normally treated as 0.0
/// (black, transparent) are treated as 1.0.
///
/// This is essentially the same as [modulate] blend mode, but with the values
/// of the colors inverted before the multiplication and the result being
/// inverted back before rendering.
///
/// This can only result in the same or lighter colors (multiplying by black,
/// 1.0, results in no change; multiplying by white, 0.0, results in white).
/// Similarly, in the alpha channel, it can only result in more opaque colors.
///
/// This has similar effect to two projectors displaying their images on the
/// same screen simultaneously.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_screen.png)
///
/// See also:
///
///  * [modulate], which does a similar computation but without inverting the
///    values.
///  * [overlay], which combines [modulate] and [screen] to favor the
///    destination image.
///  * [hardLight], which combines [modulate] and [screen] to favor the
///    source image.  oQ s(/// Multiply the components of the source and destination images after
/// adjusting them to favor the destination.
///
/// Specifically, if the destination value is smaller, this multiplies it with
/// the source value, whereas is the source value is smaller, it multiplies
/// the inverse of the source value with the inverse of the destination value,
/// then inverts the result.
///
/// Inverting the components means that a fully saturated channel (opaque
/// white) is treated as the value 0.0, and values normally treated as 0.0
/// (black, transparent) are treated as 1.0.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_overlay.png)
///
/// See also:
///
///  * [modulate], which always multiplies the values.
///  * [screen], which always multiplies the inverses of the values.
///  * [hardLight], which is similar to [overlay] but favors the source image
///    instead of the destination image.  s47 te"/// Composite the source and destination image by choosing the lowest value
/// from each color channel.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_darken.png)  tp: u$/// Composite the source and destination image by choosing the highest value
/// from each color channel.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_lighten.png)  u{ w b/// Divide the destination by the inverse of the source.
///
/// Inverting the components means that a fully saturated channel (opaque
/// white) is treated as the value 0.0, and values normally treated as 0.0
/// (black, transparent) are treated as 1.0.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_colorDodge.png)  w/ xy/// Divide the inverse of the destination by the source, and inverse the result.
///
/// Inverting the components means that a fully saturated channel (opaque
/// white) is treated as the value 0.0, and values normally treated as 0.0
/// (black, transparent) are treated as 1.0.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_colorBurn.png)  xŃ |/// Multiply the components of the source and destination images after
/// adjusting them to favor the source.
///
/// Specifically, if the source value is smaller, this multiplies it with the
/// destination value, whereas is the destination value is smaller, it
/// multiplies the inverse of the destination value with the inverse of the
/// source value, then inverts the result.
///
/// Inverting the components means that a fully saturated channel (opaque
/// white) is treated as the value 0.0, and values normally treated as 0.0
/// (black, transparent) are treated as 1.0.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_hardLight.png)
///
/// See also:
///
///  * [modulate], which always multiplies the values.
///  * [screen], which always multiplies the inverses of the values.
///  * [overlay], which is similar to [hardLight] but favors the destination
///    image instead of the source image.  |s ~U/// Use [colorDodge] for source values below 0.5 and [colorBurn] for source
/// values above 0.5.
///
/// This results in a similar but softer effect than [overlay].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_softLight.png)
///
/// See also:
///
///  * [color], which is a more subtle tinting effect.  ~$ ߁/// Subtract the smaller value from the bigger value for each channel.
///
/// Compositing black has no effect; compositing white inverts the colors of
/// the other image.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver].
///
/// The effect is similar to [exclusion] but harsher.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_difference.png)   /// Subtract double the product of the two images from the sum of the two
/// images.
///
/// Compositing black has no effect; compositing white inverts the colors of
/// the other image.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver].
///
/// The effect is similar to [difference] but softer.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_exclusion.png)  ǂ /// Multiply the components of the source and destination images, including
/// the alpha channel.
///
/// This can only result in the same or darker colors (multiplying by white,
/// 1.0, results in no change; multiplying by black, 0.0, results in black).
///
/// Since the alpha channel is also multiplied, a fully-transparent pixel
/// (opacity 0.0) in one image results in a fully transparent pixel in the
/// output. This is similar to [dstIn], but with the colors combined.
///
/// For a variant that multiplies the colors but does not multiply the alpha
/// channel, consider [modulate].
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_multiply.png)   /// Take the hue of the source image, and the saturation and luminosity of the
/// destination image.
///
/// The effect is to tint the destination image with the source image.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver]. Regions that are entirely transparent in the source image take
/// their hue from the destination.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_hue.png)
///
/// See also:
///
///  * [color], which is a similar but stronger effect as it also applies the
///    saturation of the source image.
///  * [HSVColor], which allows colors to be expressed using Hue rather than
///    the red/green/blue channels of [Color].  k D/// Take the saturation of the source image, and the hue and luminosity of the
/// destination image.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver]. Regions that are entirely transparent in the source image take
/// their saturation from the destination.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_saturation.png)
///
/// See also:
///
///  * [color], which also applies the hue of the source image.
///  * [luminosity], which applies the luminosity of the source image to the
///    destination.  . /// Take the hue and saturation of the source image, and the luminosity of the
/// destination image.
///
/// The effect is to tint the destination image with the source image.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver]. Regions that are entirely transparent in the source image take
/// their hue and saturation from the destination.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_color.png)
///
/// See also:
///
///  * [hue], which is a similar but weaker effect.
///  * [softLight], which is a similar tinting effect but also tints white.
///  * [saturation], which only applies the saturation of the source image.   d/// Take the luminosity of the source image, and the hue and saturation of the
/// destination image.
///
/// The opacity of the output image is computed in the same way as for
/// [srcOver]. Regions that are entirely transparent in the source image take
/// their luminosity from the destination.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/blend_mode_luminosity.png)
///
/// See also:
///
///  * [saturation], which applies the saturation of the source image to the
///    destination.
///  * [ImageFilter.blur], which can be used with [BackdropFilter] for a
///    related effect.     { "t/// Quality levels for image sampling in [ImageFilter] and [Shader] objects that sample
/// images and for [Canvas] operations that render images.
///
/// When scaling up typically the quality is lowest at [none], higher at [low] and [medium],
/// and for very large scale factors (over 10x) the highest at [high].
///
/// When scaling down, [medium] provides the best quality especially when scaling an
/// image to less than half its size or for animating the scale factor between such
/// reductions. Otherwise, [low] and [high] provide similar effects for reductions of
/// between 50% and 100% but the image may lose detail and have dropouts below 50%.
///
/// To get high quality when scaling images up and down, or when the scale is
/// unknown, [medium] is typically a good balanced choice.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/filter_quality.png)
///
/// When building for the web using the `--web-renderer=html` option, filter
/// quality has no effect. All images are rendered using the respective
/// browser's default setting.
///
/// See also:
///
///  * [Paint.filterQuality], which is used to pass [FilterQuality] to the
///    engine while using drawImage calls on a [Canvas].
///  * [ImageShader].
///  * [ImageFilter.matrix].
///  * [Canvas.drawImage].
///  * [Canvas.drawImageRect].
///  * [Canvas.drawImageNine].
///  * [Canvas.drawAtlas].    /// The fastest filtering method, albeit also the lowest quality.
///
/// This value results in a "Nearest Neighbor" algorithm which just
/// repeats or eliminates pixels as an image is scaled up or down.   @/// Better quality than [none], faster than [medium].
///
/// This value results in a "Bilinear" algorithm which smoothly
/// interpolates between pixels in an image.  H Ђk/// The best all around filtering method that is only worse than [high]
/// at extremely large scale factors.
///
/// This value improves upon the "Bilinear" algorithm specified by [low]
/// by utilizing a Mipmap that pre-computes high quality lower resolutions
/// of the image at half (and quarter and eighth, etc.) sizes and then
/// blends between those to prevent loss of detail at small scale sizes.
///
/// {@template dart.ui.filterQuality.seeAlso}
/// See also:
///
///  * [FilterQuality] class-level documentation that goes into detail about
///    relative qualities of the constant values.
/// {@endtemplate}  ۂE &/// Best possible quality when scaling up images by scale factors larger than
/// 5-10x.
///
/// When images are scaled down, this can be worse than [medium] for scales
/// smaller than 0.5x, or when animating the scale factor.
///
/// This option is also the slowest.
///
/// This value results in a standard "Bicubic" algorithm which uses a 3rd order
/// equation to smooth the abrupt transitions between pixels while preserving
/// some of the sense of an edge and avoiding sharp peaks in the result.
///
/// {@macro dart.ui.filterQuality.seeAlso}     % /// Styles to use for line endings.
///
/// See also:
///
///  * [Paint.strokeCap] for how this value is used.
///  * [StrokeJoin] for the different kinds of line segment joins.   & r/// Begin and end contours with a flat edge and no extension.
///
/// ![A butt cap ends line segments with a square end that stops at the end of
/// the line segment.](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/butt_cap.png)
///
/// Compare to the [square] cap, which has the same shape, but extends past
/// the end of the line by half a stroke width.   v/// Begin and end contours with a semi-circle extension.
///
/// ![A round cap adds a rounded end to the line segment that protrudes
/// by one half of the thickness of the line (which is the radius of the cap)
/// past the end of the segment.](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/round_cap.png)
///
/// The cap is colored in the diagram above to highlight it: in normal use it
/// is the same color as the line.  h I/// Begin and end contours with a half square extension. This is
/// similar to extending each contour by half the stroke width (as
/// given by [Paint.strokeWidth]).
///
/// ![A square cap has a square end that effectively extends the line length
/// by half of the stroke width.](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/square_cap.png)
///
/// The cap is colored in the diagram above to highlight it: in normal use it
/// is the same color as the line.
///
/// Compare to the [butt] cap, which has the same shape, but doesn't extend
/// past the end of the line.      g/// Styles to use for line segment joins.
///
/// This only affects line joins for polygons drawn by [Canvas.drawPath] and
/// rectangles, not points drawn as lines with [Canvas.drawPoints].
///
/// See also:
///
/// * [Paint.strokeJoin] and [Paint.strokeMiterLimit] for how this value is
///   used.
/// * [StrokeCap] for the different kinds of line endings.   Y 7/// Joins between line segments form sharp corners.
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/miter_4_join.mp4}
///
/// The center of the line segment is colored in the diagram above to
/// highlight the join, but in normal usage the join is the same color as the
/// line.
///
/// See also:
///
///   * [Paint.strokeJoin], used to set the line segment join style to this
///     value.
///   * [Paint.strokeMiterLimit], used to define when a miter is drawn instead
///     of a bevel when the join is set to this value.   ˁ/// Joins between line segments are semi-circular.
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/round_join.mp4}
///
/// The center of the line segment is colored in the diagram above to
/// highlight the join, but in normal usage the join is the same color as the
/// line.
///
/// See also:
///
///   * [Paint.strokeJoin], used to set the line segment join style to this
///     value.  Ղ /// Joins between line segments connect the corners of the butt ends of the
/// line segments to give a beveled appearance.
///
/// {@animation 300 300 https://flutter.github.io/assets-for-api-docs/assets/dart-ui/bevel_join.mp4}
///
/// The center of the line segment is colored in the diagram above to
/// highlight the join, but in normal usage the join is the same color as the
/// line.
///
/// See also:
///
///   * [Paint.strokeJoin], used to set the line segment join style to this
///     value.      T/// Strategies for painting shapes and paths on a canvas.
///
/// See [Paint.style].    /// Apply the [Paint] to the inside of the shape. For example, when
/// applied to the [Canvas.drawCircle] call, this results in a disc
/// of the given size being painted.   /// Apply the [Paint] to the edge of the shape. For example, when
/// applied to the [Canvas.drawCircle] call, this results is a hoop
/// of the given size being painted. The line drawn on the edge will
/// be the width given by the [Paint.strokeWidth] property.     Ɍ ./// Different ways to clip a widget's content.   8 :/// No clip at all.
///
/// This is the default option for most widgets: if the content does not
/// overflow the widget boundary, don't pay any performance cost for clipping.
///
/// If the content does overflow, please explicitly specify the following
/// [Clip] options:
///  * [hardEdge], which is the fastest clipping, but with lower fidelity.
///  * [antiAlias], which is a little slower than [hardEdge], but with smoothed edges.
///  * [antiAliasWithSaveLayer], which is much slower than [antiAlias], and should
///    rarely be used.  C_ :/// Clip, but do not apply anti-aliasing.
///
/// This mode enables clipping, but curves and non-axis-aligned straight lines will be
/// jagged as no effort is made to anti-alias.
///
/// Faster than other clipping modes, but slower than [none].
///
/// This is a reasonable choice when clipping is needed, if the container is an axis-
/// aligned rectangle or an axis-aligned rounded rectangle with very small corner radii.
///
/// See also:
///
///  * [antiAlias], which is more reasonable when clipping is needed and the shape is not
///    an axis-aligned rectangle.  . ̃ /// Clip with anti-aliasing.
///
/// This mode has anti-aliased clipping edges to achieve a smoother look.
///
/// It' s much faster than [antiAliasWithSaveLayer], but slower than [hardEdge].
///
/// This will be the common case when dealing with circles and arcs.
///
/// Different from [hardEdge] and [antiAliasWithSaveLayer], this clipping may have
/// bleeding edge artifacts.
/// (See https://fiddle.skia.org/c/21cb4c2b2515996b537f36e7819288ae for an example.)
///
/// See also:
///
///  * [hardEdge], which is a little faster, but with lower fidelity.
///  * [antiAliasWithSaveLayer], which is much slower, but can avoid the
///    bleeding edges if there's no other way.
///  * [Paint.isAntiAlias], which is the anti-aliasing switch for general draw operations.  ڄs 74/// Clip with anti-aliasing and saveLayer immediately following the clip.
///
/// This mode not only clips with anti-aliasing, but also allocates an offscreen
/// buffer. All subsequent paints are carried out on that buffer before finally
/// being clipped and composited back.
///
/// This is very slow. It has no bleeding edge artifacts (that [antiAlias] has)
/// but it changes the semantics as an offscreen buffer is now introduced.
/// (See https://github.com/flutter/flutter/issues/18057#issuecomment-394197336
/// for a difference between paint without saveLayer and paint with saveLayer.)
///
/// This will be only rarely needed. One case where you might need this is if
/// you have an image overlaid on a very different background color. In these
/// cases, consider whether you can avoid overlaying multiple colors in one
/// spot (e.g. by having the background color only present where the image is
/// absent). If you can, [antiAlias] would be fine and much faster.
///
/// See also:
///
///  * [antiAlias], which is much faster, and has similar clipping results.    ,/// The color space describes the colors that are available to an [Image].
///
/// This value can help decide which [ImageByteFormat] to use with
/// [Image.toByteData]. Images that are in the [extendedSRGB] color space
/// should use something like [ImageByteFormat.rawExtendedRgba128] so that
/// colors outside of the sRGB gamut aren't lost.
///
/// This is also the result of [Image.colorSpace].
///
/// See also: https://en.wikipedia.org/wiki/Color_space  ̀/// The sRGB color space.
///
/// You may know this as the standard color space for the web or the color
/// space of non-wide-gamut Flutter apps.
///
/// See also: https://en.wikipedia.org/wiki/SRGB Ձ/// A color space that is backwards compatible with sRGB but can represent
/// colors outside of that gamut with values outside of [0..1]. In order to
/// see the extended values an [ImageByteFormat] like
/// [ImageByteFormat.rawExtendedRgba128] must be used. Q
99/// The Display P3 color space.
///
/// This is a wide gamut color space that has broad hardware support. It's
/// supported in cases like using Impeller on iOS. When used on a platform
/// that doesn't support Display P3, the colors will be clamped to sRGB.
///
/// See also: https://en.wikipedia.org/wiki/DCI-P3    7Y/// The format in which image bytes should be returned when using
/// [Image.toByteData].  ׀Rt/// Raw RGBA format.
///
/// Unencoded bytes, in RGBA row-primary form with premultiplied alpha, 8 bits per channel. ^x/// Raw straight RGBA format.
///
/// Unencoded bytes, in RGBA row-primary form with straight alpha, 8 bits per channel. /// Raw unmodified format.
///
/// Unencoded bytes, in the image's existing format. For example, a grayscale
/// image may use a single 8-bit channel for each pixel. m/// Raw extended range RGBA format.
///
/// Unencoded bytes, in RGBA row-primary form with straight alpha, 32 bit
/// float (IEEE 754 binary32) per channel.
///
/// Example usage:
///
/// ```dart
/// import 'dart:ui' as ui;
/// import 'dart:typed_data';
///
/// Future<Map<String, double>> getFirstPixel(ui.Image image) async {
///   final ByteData data =
///       (await image.toByteData(format: ui.ImageByteFormat.rawExtendedRgba128))!;
///   final Float32List floats = Float32List.view(data.buffer);
///   return <String, double>{
///     'r': floats[0],
///     'g': floats[1],
///     'b': floats[2],
///     'a': floats[3],
///   };
/// }
/// ``` ?/// PNG format.
///
/// A loss-less compression format for images. This format is well suited for
/// images with hard edges, such as screenshots or sprites, and images with
/// text. Transparency is supported. The PNG format supports images up to
/// 2,147,483,647 pixels in either dimension, though in practice available
/// memory provides a more immediate limitation on maximum image size.
///
/// PNG images normally use the `.png` file extension and the `image/png` MIME
/// type.
///
/// See also:
///
///  * <https://en.wikipedia.org/wiki/Portable_Network_Graphics>, the Wikipedia page on PNG.
///  * <https://tools.ietf.org/rfc/rfc2083.txt>, the PNG standard.    G?>/// The format of pixel data given to [decodeImageFromPixels].  l/// Each pixel is 32 bits, with the highest 8 bits encoding red, the next 8
/// bits encoding green, the next 8 bits encoding blue, and the lowest 8 bits
/// encoding alpha. Premultiplied alpha is used. yJ/// Each pixel is 32 bits, with the highest 8 bits encoding blue, the next 8
/// bits encoding green, the next 8 bits encoding red, and the lowest 8 bits
/// encoding alpha. Premultiplied alpha is used. W,x/// Each pixel is 128 bits, where each color component is a 32 bit float that
/// is normalized across the sRGB gamut.  The first float is the red
/// component, followed by: green, blue and alpha. Premultiplied alpha isn't
/// used, matching [ImageByteFormat.rawExtendedRgba128].    ƃc/// Determines the winding rule that decides how the interior of a [Path] is
/// calculated.
///
/// This enum is used by the [Path.fillType] property.  t/// The interior is defined by a non-zero sum of signed edge crossings.
///
/// For a given point, the point is considered to be on the inside of the path
/// if a line drawn from the point to infinity crosses lines going clockwise
/// around the point a different number of times than it crosses lines going
/// counter-clockwise around that point.
///
/// See: <https://en.wikipedia.org/wiki/Nonzero-rule> (0Q/// The interior is defined by an odd number of edge crossings.
///
/// For a given point, the point is considered to be on the inside of the path
/// if a line drawn from the point to infinity crosses an odd number of lines.
///
/// See: <https://en.wikipedia.org/wiki/Even-odd_rule>    ]L/// Strategies for combining paths.
///
/// See also:
///
/// * [Path.combine], which uses this enum to decide how to combine two paths.  #/// Subtract the second path from the first path.
///
/// For example, if the two paths are overlapping circles of equal diameter
/// but differing centers, the result would be a crescent portion of the
/// first circle that was not overlapped by the second circle.
///
/// See also:
///
///  * [reverseDifference], which is the same but subtracting the first path
///    from the second. ʁHk/// Create a new path that is the intersection of the two paths, leaving the
/// overlapping pieces of the path.
///
/// For example, if the two paths are overlapping circles of equal diameter
/// but differing centers, the result would be only the overlapping portion
/// of the two circles.
///
/// See also:
///  * [xor], which is the inverse of this operation U$t/// Create a new path that is the union (inclusive-or) of the two paths.
///
/// For example, if the two paths are overlapping circles of equal diameter
/// but differing centers, the result would be a figure-eight like shape
/// matching the outer boundaries of both circles. }n/// Create a new path that is the exclusive-or of the two paths, leaving
/// everything but the overlapping pieces of the path.
///
/// For example, if the two paths are overlapping circles of equal diameter
/// but differing centers, the figure-eight like shape less the overlapping parts
///
/// See also:
///  * [intersect], which is the inverse of this operation }/// Subtract the first path from the second path.
///
/// For example, if the two paths are overlapping circles of equal diameter
/// but differing centers, the result would be a crescent portion of the
/// second circle that was not overlapped by the first circle.
///
/// See also:
///
///  * [difference], which is the same but subtracting the second path
///    from the first.    '{(4/// Styles to use for blurs in [MaskFilter] objects.  (`(/// Fuzzy inside and outside. This is useful for painting shadows that are
/// offset from the shape that ostensibly is casting the shadow. ()À/// Solid inside, fuzzy outside. This corresponds to drawing the shape, and
/// additionally drawing the blur. This can make objects appear brighter,
/// maybe even as if they were fluorescent. )̀*/// Nothing inside, fuzzy outside. This is useful for painting shadows for
/// partially transparent shapes, when they are painted separately but without
/// an offset, so that the shadow doesn't paint below the shape. *c+Y/// Fuzzy inside, nothing outside. This can make shapes appear to be lit from
/// within.    ӓ/// Defines what happens at the edge of a gradient or the sampling of a source image
/// in an [ImageFilter].
///
/// A gradient is defined along a finite inner area. In the case of a linear
/// gradient, it's between the parallel lines that are orthogonal to the line
/// drawn between two points. In the case of radial gradients, it's the disc
/// that covers the circle centered on a particular point up to a given radius.
///
/// An image filter reads source samples from a source image and performs operations
/// on those samples to produce a result image. An image defines color samples only
/// for pixels within the bounds of the image but some filter operations, such as a blur
/// filter, read samples over a wide area to compute the output for a given pixel. Such
/// a filter would need to combine samples from inside the image with hypothetical
/// color values from outside the image.
///
/// This enum is used to define how the gradient or image filter should treat the regions
/// outside that defined inner area.
///
/// See also:
///
///  * [painting.Gradient], the superclass for [LinearGradient] and
///    [RadialGradient], as used by [BoxDecoration] et al, which works in
///    relative coordinates and can create a [Shader] representing the gradient
///    for a particular [Rect] on demand.
///  * [dart:ui.Gradient], the low-level class used when dealing with the
///    [Paint.shader] property directly, with its [Gradient.linear] and
///    [Gradient.radial] constructors.
///  * [dart:ui.ImageFilter.blur], an ImageFilter that may sometimes need to
///    read samples from outside an image to combine with the pixels near the
///    edge of the image.  Q/// Samples beyond the edge are clamped to the nearest color in the defined inner area.
///
/// A gradient will paint all the regions outside the inner area with the
/// color at the end of the color stop list closest to that region.
///
/// An image filter will substitute the nearest edge pixel for any samples taken from
/// outside its source image.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_clamp_linear.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_clamp_radial.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_clamp_sweep.png) [5/// Samples beyond the edge are repeated from the far end of the defined area.
///
/// For a gradient, this technique is as if the stop points from 0.0 to 1.0 were then
/// repeated from 1.0 to 2.0, 2.0 to 3.0, and so forth (and for linear gradients, similarly
/// from -1.0 to 0.0, -2.0 to -1.0, etc).
///
/// An image filter will treat its source image as if it were tiled across the enlarged
/// sample space from which it reads, each tile in the same orientation as the base image.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_repeated_linear.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_repeated_radial.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_repeated_sweep.png) |Y/// Samples beyond the edge are mirrored back and forth across the defined area.
///
/// For a gradient, this technique is as if the stop points from 0.0 to 1.0 were then
/// repeated backwards from 2.0 to 1.0, then forwards from 2.0 to 3.0, then backwards
/// again from 4.0 to 3.0, and so forth (and for linear gradients, similarly in the
/// negative direction).
///
/// An image filter will treat its source image as tiled in an alternating forwards and
/// backwards or upwards and downwards direction across the sample space from which
/// it is reading.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_mirror_linear.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_mirror_radial.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_mirror_sweep.png) ΂/// Samples beyond the edge are treated as transparent black.
///
/// A gradient will render transparency over any region that is outside the circle of a
/// radial gradient or outside the parallel lines that define the inner area of a linear
/// gradient.
///
/// An image filter will substitute transparent black for any sample it must read from
/// outside its source image.
///
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_decal_linear.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_decal_radial.png)
/// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/tile_mode_decal_sweep.png)    4w/// Defines how a list of points is interpreted when drawing a set of triangles.
///
/// Used by [Canvas.drawVertices].  QEE/// Draw each sequence of three points as the vertices of a triangle. S[K/// Draw each sliding window of three points as the vertices of a triangle. A)/// Draw the first point and each sliding window of two points as the vertices
/// of a triangle.
///
/// This mode is not natively supported by most backends, and is instead
/// implemented by unrolling the points into the equivalent
/// [VertexMode.triangles], which is generally more efficient.    (-)^/// Defines how a list of points is interpreted when drawing a set of points.
///
/// Used by [Canvas.drawPoints] and [Canvas.drawRawPoints].  )l+/// Draw each point separately.
///
/// If the [Paint.strokeCap] is [StrokeCap.round], then each point is drawn
/// as a circle with the diameter of the [Paint.strokeWidth], filled as
/// described by the [Paint] (ignoring [Paint.style]).
///
/// Otherwise, each point is drawn as an axis-aligned square with sides of
/// length [Paint.strokeWidth], filled as described by the [Paint] (ignoring
/// [Paint.style]). +(,/// Draw each sequence of two points as a line segment.
///
/// If the number of points is odd, then the last point is ignored.
///
/// The lines are stroked as described by the [Paint] (ignoring
/// [Paint.style]). ,,/// Draw the entire sequence of points as one line.
///
/// The lines are stroked as described by the [Paint] (ignoring
/// [Paint.style]).    ,-4x/// Defines how a new clip region should be merged with the existing clip
/// region.
///
/// Used by [Canvas.clipRect].  -?B-w5/// Subtract the new region from the existing region. -B-6/// Intersect the new region from the existing region.       o  	   t     rw     ۆ  %        olu       
G
K  
^
j   !,  ?	D   G2pGR  G]Ge  G2G3G9GJG:]Y?e1/// Instantiates an image [Codec].
///
/// This method is a convenience wrapper around the [ImageDescriptor] API, and
/// using [ImageDescriptor] directly is preferred since it allows the caller to
/// make better determinations about how and whether to use the `targetWidth`
/// and `targetHeight` parameters.
///
/// The `list` parameter is the binary image data (e.g a PNG or GIF binary data).
/// The data can be for either static or animated images. The following image
/// formats are supported:
/// {@template dart.ui.imageFormats}
/// JPEG, PNG, GIF, Animated GIF, WebP, Animated WebP, BMP, and WBMP. Additional
/// formats may be supported by the underlying platform. Flutter will
/// attempt to call platform API to decode unrecognized formats, and if the
/// platform API supports decoding the image Flutter will be able to render it.
/// {@endtemplate}
///
/// The `targetWidth` and `targetHeight` arguments specify the size of the
/// output image, in image pixels. If they are not equal to the intrinsic
/// dimensions of the image, then the image will be scaled after being decoded.
/// If the `allowUpscaling` parameter is not set to true, both dimensions will
/// be capped at the intrinsic dimensions of the image, even if only one of
/// them would have exceeded those intrinsic dimensions. If exactly one of these
/// two arguments is specified, then the aspect ratio will be maintained while
/// forcing the image to match the other given dimension. If neither is
/// specified, then the image maintains its intrinsic size.
///
/// Scaling the image to larger than its intrinsic size should usually be
/// avoided, since it causes the image to use more memory than necessary.
/// Instead, prefer scaling the [Canvas] transform. If the image must be scaled
/// up, the `allowUpscaling` parameter must be set to true.
///
/// The returned future can complete with an error if the image decoding has
/// failed. eJeU  e^ed  erex  ee  efnԈ+/// Instantiates an image [Codec].
///
/// This method is a convenience wrapper around the [ImageDescriptor] API, and
/// using [ImageDescriptor] directly is preferred since it allows the caller to
/// make better determinations about how and whether to use the `targetWidth`
/// and `targetHeight` parameters.
///
/// The [buffer] parameter is the binary image data (e.g a PNG or GIF binary data).
/// The data can be for either static or animated images. The following image
/// formats are supported: {@macro dart.ui.imageFormats}
///
/// The [buffer] will be disposed by this method once the codec has been created,
/// so the caller must relinquish ownership of the [buffer] when they call this
/// method.
///
/// The [targetWidth] and [targetHeight] arguments specify the size of the
/// output image, in image pixels. If they are not equal to the intrinsic
/// dimensions of the image, then the image will be scaled after being decoded.
/// If the `allowUpscaling` parameter is not set to true, both dimensions will
/// be capped at the intrinsic dimensions of the image, even if only one of
/// them would have exceeded those intrinsic dimensions. If exactly one of these
/// two arguments is specified, then the aspect ratio will be maintained while
/// forcing the image to match the other given dimension. If neither is
/// specified, then the image maintains its intrinsic size.
///
/// Scaling the image to larger than its intrinsic size should usually be
/// avoided, since it causes the image to use more memory than necessary.
/// Instead, prefer scaling the [Canvas] transform. If the image must be scaled
/// up, the `allowUpscaling` parameter must be set to true.
///
/// The returned future can complete with an error if the image decoding has
/// failed.
///
/// ## Compatibility note on the web
///
/// When running Flutter on the web, only the CanvasKit renderer supports image
/// resizing capabilities (not the HTML renderer). So if image resizing is
/// critical to your use case, and you're deploying to the web, you should
/// build using the CanvasKit renderer. no  oo  o'o-  o<oB  oRq=xT/// Instantiates an image [Codec].
///
/// This method is a convenience wrapper around the [ImageDescriptor] API.
///
/// The [buffer] parameter is the binary image data (e.g a PNG or GIF binary
/// data). The data can be for either static or animated images. The following
/// image formats are supported: {@macro dart.ui.imageFormats}
///
/// The [buffer] will be disposed by this method once the codec has been
/// created, so the caller must relinquish ownership of the [buffer] when they
/// call this method.
///
/// The [getTargetSize] parameter, when specified, will be invoked and passed
/// the image's intrinsic size to determine the size to decode the image to.
/// The width and the height of the size it returns must be positive values
/// greater than or equal to 1, or null. It is valid to return a
/// [TargetImageSize] that specifies only one of `width` and `height` with the
/// other remaining null, in which case the omitted dimension will be scaled to
/// maintain the aspect ratio of the original dimensions. When both are null or
/// omitted, the image will be decoded at its native resolution (as will be the
/// case if the [getTargetSize] parameter is omitted).
///
/// Scaling the image to larger than its intrinsic size should usually be
/// avoided, since it causes the image to use more memory than necessary.
/// Instead, prefer scaling the [Canvas] transform.
///
/// The returned future can complete with an error if the image decoding has
/// failed.
///
/// ## Compatibility note on the web
///
/// When running Flutter on the web, only the CanvasKit renderer supports image
/// resizing capabilities (not the HTML renderer). So if image resizing is
/// critical to your use case, and you're deploying to the web, you should
/// build using the CanvasKit renderer. xux  x&x   zsz  zz  z{   
@0/// Loads a single image frame from a byte array into an [Image] object.
///
/// This is a convenience wrapper around [instantiateImageCodec]. Prefer using
/// [instantiateImageCodec] which also supports multi frame images and offers
/// better error handling. This function swallows asynchronous errors. T_  dz          I/// Convert an array of pixel values into an [Image] object.
///
/// The `pixels` parameter is the pixel data. They are packed in bytes in the
/// order described by `format`, then grouped in rows, from left to right,
/// then top to bottom.
///
/// The `rowBytes` parameter is the number of bytes consumed by each row of
/// pixels in the data buffer. If unspecified, it defaults to `width` multiplied
/// by the number of bytes per pixel in the provided `format`.
///
/// The `targetWidth` and `targetHeight` arguments specify the size of the
/// output image, in image pixels. If they are not equal to the intrinsic
/// dimensions of the image, then the image will be scaled after being decoded.
/// If the `allowUpscaling` parameter is not set to true, both dimensions will
/// be capped at the intrinsic dimensions of the image, even if only one of
/// them would have exceeded those intrinsic dimensions. If exactly one of these
/// two arguments is specified, then the aspect ratio will be maintained while
/// forcing the image to match the other given dimension. If neither is
/// specified, then the image maintains its intrinsic size.
///
/// Scaling the image to larger than its intrinsic size should usually be
/// avoided, since it causes the image to use more memory than necessary.
/// Instead, prefer scaling the [Canvas] transform. If the image must be scaled
/// up, the `allowUpscaling` parameter must be set to true. 	bm  v	{  
            
  7o7"  757A  7H7T   ؁          y     $"  3;  BJ   xhxr x|x|xx   {&2{sB/// A variant of `_futurize` that can communicate specific errors.{{{"{    d+/// Signature for [Image] lifecycle events.     GmG1/// Callback signature for [decodeImageFromList].  HH
   {8d|9/// Signature for a callback that determines the size to which an image should
/// be decoded given its intrinsic size.
///
/// See also:
///
///  * [instantiateImageCodecWithSize], which used this signature for its
///    `getTargetSize` argument.  |o|t  ||   xl-/// Signature for [Picture] lifecycle events.     ridr4/// Generic callback signature, used by [_futurize].rr rr   rπs=/// Generic callback signature, used by [_futurizeWithError].s(s( s;s>  sEsN   sVs/// Signature for a method that receives a [_Callback].
///
/// Return value should be null on success, and a string error message on
/// failure.ss tt"   t-t\/// Signature for a method that receives a [_CallbackWithError].
/// See also: [_Callbacker]tt tt        <΀-FzE6Մd`oqr /pΆ݆߆]lnoڇ܇݈e,0`dpՊEMQAmq}ʌn!%37Ў+|̎Ҏ+QxAou-F_3Ւ)Sfw6FkR_ȕɕ)BΖ7]ڗ#)q9NlЙ֚'I[\͚Κ,KLr̛қ (NuҜ'Fdhi2ab'MN˟̠LMˠ̡+1ҡ:Ƣߢ?E0=ekop-Hx|}ƦLܧbͧfڨ%)*{ 156tѪ8<= Dbխ9kЭ֮ag=ְ kU &uʳ 6Kbwóڳ9TεImxζfGҸLsȸθNgy3^gݺ,wļ;DJNOͽ ;?@Loֽ%=Um @1 @{ @ @ @ A< A A B) Bt B B C CW C C DB D D E! Ep E E E F# Fp F G GY G G G H H Hh H H IG I I J6 JQ JU Jc Jg J J J K# K6 K| K} K K L L	 Lf Lo Lp L L M M M MT MZ M M M M N NM N^ Nd N N O O O OL OR O O P P Pb P P P P Q  Q> QD Qm Qs Q Q R& R, R{ R R R R SG S S S S S T: T T T T T UG UP UQ U U V6 V< Vv V| V W W! Wm Ws W W W X) X| X X Y Y Yd Y Y Z Z Zl Zv Zw Z [ [c [i [ [ [ \H \N \ \ \ ]	 ]
 ]Y ]y ] ] ] ^# ^o ^ ^ ^ _ _ _j _u _v _ _ _ `4 `: ` ` a a aV ag am a a a b( b\ bb b b c c c cW c] c c d d$ dr dx d e eh e f f fc fk fl f f g gZ g` g g g h1 h7 h h h h h i= iY i i i i j j j[ j j j k  kO kU k k l l ln l m m mc m m m m m n nU nf n n o o+ oN oO o o o p pl p p p q. q{ q q r r r% r+ rd r r s& s1 s2 s s s s s t tc tm tn t t t u+ u< uB u u u u u v: v v v w w, w- w w w x xN xT x x x y y6 y< y y z# zP zV z z { {$ { { { { { |! |p | | | | } } }X }^ } } } } ~ ~! ~" ~k ~q ~ ~ ~ & 7 = u {    8 F L       P V     , 2    " o    0 6     * 0 y    > D      4      & , u    Z ` p v    + , }     4     " 2 8 n  
   e ~    J P      3 ~       ; ?    <   : >    $ ( u      = v       2      A       > E F    	 V      0 6      ) 6 <      L     " $ % I M [ _    $ d j   % s      9    O t } ~   + 1    G l r        h       U     I O     	  ] n     7 =    B N T d j     ! S Y    ^ j p        + / F      7 }     7 ~          k    $ q   8 @ A m s    C I      t        o u    6      < m    $ * }    P   A G   5      5 O Q R      Y o z {    Q W   = Y    k       c    O T     S T u      E o     : e Î ù   C w ħ    R Œ   P Ǝ   D Ǆ   V Ȩ  4 5 v ɸ ɹ   : U V ʄ      H ˜ ˝     ( o s ̓   4 x | } ͵    % E K [ a Χ    & n σ ϗ  5 Є Н   X o х ї ѭ      E Ҋ   f Ӂ Ӆ ӆ Ӿ    L R Ԕ  6 W ] ժ       c ֗    ` כ ן   > B C ؇ ؍ ص  # ' J o ٭ ٱ ٲ  ; x ~ ڻ    @ b ۨ ۬ ۭ   $ V r     R V W ݘ ݞ  : @ z ހ ޞ ޤ   z ߀   9       - v   ' + P u     , - x   ^     K o u   M S    _ k q     5 U     R V W     . r     D        B    , ^  	 Z ` d e   W ]    P T      ) / e k {    ' o       % \ e x     A K S f p       1 7     " & N        [ a e f       I c u         S t     2 P ~    * U        =      >         = X t     & I Q z     e o ~     +1a"A~B+hdh@rvf n			d		

7
D
F
G
w





 !L^j0467y,tP[\|b'TZou- 4NYak8 q}=DFG7jvwHTU;v$(hl%tQ_cEIW[>"A[_`D   _    !!!L!_!`!!!!""O"U"""##]####$'$S$i$$$$$$$%?%E%%%%%&&&!&&&&&''+'1'v'(((_(x())a)**/*]*a*b***+?+k+q+,,A,,,--*-8-X-f------.G.i.o..///g/z//////0"0q011Z11222f2223.3D3J33333334%4d444455`556
6666>6D6f6677S77778/8_8e888888999'9J9P9k9q9999:::: :e::::;;A;;<"<+<1<P<{<<<===e==>N>>???+?U?W?X?r??@@@^@@@@@AAAeAAABBlBBBCCCC!C%C&CbCDDD,D?DWDcD{DDE*EbEiEEEEEFFFFbFcFFFFFFG/G1G2GLGmGGGGHHHGHKHHHHI8IIJJ@JDJ}JJJJKK2KKKL3LRLsLyLLLLLLLM3MsMMNNgNNNNNO:OVO\OOOOP$P*PwPPPPQ
QQ\QqQsQtQQQQQR6RSRjRRRRRSS5SPSQSySSSTT TaTTTUU)U/UzUUUUV VVNViVnVVVVVVWW<W=WWWWWWX(X)XXXXXY'YYYZ.Z8ZqZZZ[&[2[:[B[[[y[[[[[\.\~\\\\\\]V]X]Y]|]]^^m^^^_4_O__``K````aIaab2b~bccfcccd:dddee#eHe\epeeeef$f0fNfnffffffgg`gggh)hwhhiiVibifiijKjjk7kkl
llXllm.m2mmmmmnnSnnnoo%o:oXo]oooop1pYpcppppq.q5q:q<q=q`qdqqr rOrrrs*s@sDsst+tltuu[uuuv0vzvvvwww4w8wwxxFxsxxxxy:yByyzzEzjzzzzzzz{{5{7{8{{{{{||1|m||||||||}E}}}}}~D~\~~~~mX	YCms	
SW;L59x|aM $nD`t %Bvz ^nBlv*Nt~"Hd#'^rb%&hnOZ\]!U[5;KQ=CFS=r{S5;+1zi+|40BDE*rR]I_9?2by}~$HI~_l	67z@?&'l5>9:	Pu{Y*o$rs!EK]a8}_ lr]1U"<]x./qE`a%f4@F"(LMßíó'-tĨĩ.>\]ţũDoƱƲ7tzCȌȍ#$iɭ)vʕʛ.MNˎ˽Krs̨̮K͖,{dϲfyWѠ06ҭұҲ28ӄjԴ=Պ]֪=׊&vhٜ٢>xz{`ۥ۸۹Z܀܁܍sݒݓ&ބްޱ޽#YZfcno{'Z[gABN89EMmT
+Fa|%)*=>Je!EDi-MMNZvFW[\OP\WXdFGSKX@A j%2tz~/0h!Dnrs-QPQ]	a+/0uFGS 0 4 B    :;D	Mfg=YpqFbhT134|cg	L	j	n	

)
i


$VXY()Dght^.7=W[\h|\`AeiR/mh8>"AXY!oK#=>]^*wHc#$yVx|}P  L h {        !X!!!""{"""###r##$${$$$%%/%0%[%`%%%&#&3&P&V&g&k&l&&&'9'?'''''((%(](^(((()G))))**i***+++++ +m+++,$,7,,,,---h-....l.r....///+/</A/B/\/s/t/00/0e0f0r0000001
1=1>1J11111112:2]2i22223
3)353s33344+4P4T4V4W4444444565M5e555566*6L6c666677777b77888+8@8V8i8q889B99999::):r::;;";s;y;;<<S<n<<<<==-=_=====>><>Z>>>??J?R?q???@@@B@t@|@@@@A#AqAAAB;BHBsBCCUC[CCD'DbDzDDDDEE`EuE{EEEF)F\FFFFGJGGGGHHH/HFHPHZH`HrHxHHHHII#I?I[IeIoIuIJJJMJpJJJJJJK|KKKKLLCLiLsL}LLLLMMMdMtMMMMNNN^NyNNNNOOO,OKOjO}O~OOP'PpPPPPQ+Q,QUQjQQQQQRRERcRzRRRSSRSvSSSTTTTT6TbTvT|TTTUU@UDUEUQUfUUUUVV%V:VQVVVVW(WLW~WWWWWWWWXXDX]XXXYY8YGYYYYYZ.Z2ZZ[[5[p[[[\\#\$\H\\\\]]G][]y]}]~]]^^$^(^)^w^^^^_____`
`;`<```a/aYa[a\aaaaabAbbcc/cLcccddwddde"eheeeefCffg%g)g*gngtghh>hhhhiSiWiXiiij.jSjjjjkMkkkllClclelfllllm#m$mEmmmmmnnn)nmnnnznnnno
o:olopoqo}ooop pdpep|ppppqqqHqIqdq|qqrr0r6r:r;rGrrrrrrssEsYs_sssssstt;t=t>tptttttu.u:uquru~uuuvvvv<vhv|vvvvvvww8w:w;wlwwwwwx(x4xkxlxxxxxyyyy4y`ytyzyyyyyyz/z1z2zezzzzz{{Z{f{{{|||!|w|x|||||}}A}n}r}s}}}}}~~R~~$`
X-12v$TXY;l;?@r<@At+ ?|}!"|!^_"#q2gAG.23C!4D(AE-}*.W[4~BLf]!'OXYfOU!DmB	TZ%~j7q5Tsw@aevxy8VySu(9;<y},}g*0KQP0Nv'TCW"(waSu{h/2MS39k2Px%8ev,"	'-xV§@Ñ2TZĪSŵ}ƃ"EKǘ+=PjȈȰ
1Wɓɦ~ʒIij^_'<Vu͉ͦͧGezΏΩμ 4QR
 :YmІНйк1QѩѱѾJRX\^_ҝ-|Jԙԧԭ:gmճPm։ "2Rאזת6TZ^_k~ؓأاب	مAfڮ,0Z۪4H|ܙܸܾ#$ioݸJ`ޞ,vߣ-H}0FNRS,zABw<|6<@A]|}u134gk8JE#$@A6m6Upz=hN/5\@QW!=ABx~l=AB%`l0?@134CPQh15a(v%t  ]  IdhLnrSEh: OU"p o		?	E		
7

gw}9?-y6=|${$E)8DNVis	6Dx OU>U	ZAmV6!m?P`u   t    !@!x!!"5"""""""# #b#h#l#m#$$)$]$$$$%O%r%%%%%%%%%&B&g&&&&'-'3''''''(((((((())Y)j)))***c*i*+++%+&+`+f+++,
,,,J,P,,,,,- ---/-=-u------...M...//o///0)0r000171;1a1e12 2N22333i3344)4/4x455_5556<6R666677#737977778 8q89	999j9:	:Q:::;<;B;f;l;<<V<<<==h=>>U>>?? ?j?????@@C@l@@@AAA"A(AwABB1B7BEBrBBBC4CkCCCCCD'DmDDDDEEcEEEF.FgFFFFFG-G~GGGGH%H+HyHIIcIJ JKJJK8KSKYKiKoKKL)L?LlLmLLLMMM`MMMNN`NNNNOKOOOOP#PhPPPQQ%Q&QvQQQR>RRRRRSSStSSST1TTTTUUXUUUV)VEVKVVW;WhWWWWWXHXNXXXY2Y[YaYYZZZiZZZZ[/[5[[\\Y\Z\\\]]]b]h]^^+^b^c^__Y_j_p_``i`aaZaabMbibobbcMcccd2dYd_dmddde#eBe|eeeeef0f|fg+g;gAgghhh^hhii&iCiDiij:j@jjk2kkl(l{lllllm*mvmmmnn_nenno$ooooopBp]p|p}pqqqyqrrrcrrssssttTtztttu uMuuuv2vvvwwhwwwxMxxxy4y:yzyzzez{{2{8{|| |x||}}}[}}}}~H~~~*v|6$rZ>+,s_lrIOA+wDJ'-=C<B+u{IJFK/za(y>$}$\bRH;kV>>"K^u}Fd$X6|2u,2@U+agFg'C0^oEy%p$Bq!"flia6<L5 I\s{Dbie'N~`(T]gm|Zm"(WXCy!'4"KeHt&[am´~ÈÎÝåïõ-N}Ĥ;<ŉŏ wyz	)ǆǌZ^_>?ɛ<abnʼ,U~˧/I̅̒̶!%&͈X9TUaOno{[\ѻNvw҃BbӠӦӿBCOnԟԻ$XYeH֖045״#$0k؍ؿXYeٝ,klxږ,01ۇ G܍ܑܒ-.:oݝݡݢ:;G|ޜ޼	}"OST-Mg<^bcoFJK%&2\|06:;12>xL)ISm&ABNx!	
[}%W%YB[e!8Ps#Ed=uC|^dhi=Qj4NmnzLBf%~IO   q r ~  9=>+Ix	)K,25tu	.dH|	 	&					
:
@
D
E
Q
r


=`5;Yrx,v}~3LezPTU&'3uwx.2fj5N Ajk@aZS:@a-YZ&,zK}:V\#W]s  J T ^ e i j !!!*!W!o!!!!!""R"V"W"""#
##5#E#]#p#z#####$"$f$$$$$$$$%%%"%%%%&&*&+&7&Y&[&\&&&'1'5'['_''(?(()")R)V))*	*J****++%+i++,,,h,,,,--Q----..B.C.......//"/f/l////0E000001#1]1^1x1y1111112 2/2I2O22233F3L333344-444455,5S5z55556-6P6e6f6667E7777888?8E8x8~899T99999::>:?:::;;E;K;;<'<I<]<p<<<<= =.=N=a=u=======>F>>>>?(?r?@	@W@@A8A>AAAABB&B;BJBkBsBBBBBCCBCCCCCCCCD)D/DRDDDDDEE3EnEEEF
F1F7FnFFFFFFFFGGG2GRGtGGGGGGH;HHHHIIII\IIIIJ,JaJbJJJKKGKHKKKKL6L\LbLcL{LLLLLLMMMlMMMMN2NGNNNO,OJONOOOOOPPKPPQQSQQQRR0R\RdRRRRRRS
SGSSSTTTWT_TTTTTU-U.UUUV5VVVVVVWWW,W2WWWWWWXXXXXnXXXY.Y{YZZ4ZGZWZvZZZZZZ[[5[[[[[[\>\V\w\\]]X]r]x]^^Z^^^^___>_a_b__`:`l``````a4aEaFaqawaaaaab4bMbNbbbcc7cIcJcccddRdcdidde(e*e+eeeeef9ffffgLggghhh7hJhphwhhhhiLiPiQijjjpjjkkkhkkkkkkkl(lElFlRlllllmmm+mnmom{mmmmn?nrnnnnnno o;oXooppIpOppppppq
qqqzqqqrfrhrirrrssUsVsssst,t-tntttu/uDuGuuuv vvvv#vWvZvvvvvw)w.w5w8wwxxxhxxyy]yyyyzz2zAzzzzzzzz{{{#{%{&{i{{|,|p|||}}0}=}Q}}}}}}~~~8~<~W~Y~Z~~~KPQ/*0NRT   dart.ui W_rectIsValid_rrectIsValid_offsetIsValid_matrix4IsValid_radiusIsValid_scaleAlphaColor	BlendModeFilterQuality	StrokeCap
StrokeJoinPaintingStyleClipPaint
ColorSpace_colorSpaceToIndex_indexToColorSpaceImageByteFormatPixelFormatImage_Image
_wrapImage	FrameInfoCodec_NativeCodecinstantiateImageCodecinstantiateImageCodecFromBufferinstantiateImageCodecWithSize_getDefaultImageSizeTargetImageSizedecodeImageFromList_decodeImageFromListAsyncdecodeImageFromPixelsPathFillTypePathOperationEngineLayer_NativeEngineLayerPath_NativePathTangentPathMetricsPathMetricIterator
PathMetric_PathMeasure	BlurStyle
MaskFilter_ColorTransform_IdentityColorTransform_ClampTransform_MatrixColorTransform_getColorTransformColorFilter_ColorFilterImageFilter_MatrixImageFilter_GaussianBlurImageFilter_DilateImageFilter_ErodeImageFilter_ComposeImageFilter_ImageFilterShaderTileMode_encodeWideColorList_encodeColorList_encodePointList_encodeTwoPointsGradientImageShaderFragmentProgramFragmentShader
VertexModeVertices	PointModeClipOpCanvas_NativeCanvasPicture_NativePicturePictureRecorder_NativePictureRecorderShadowImmutableBufferImageDescriptor_NativeImageDescriptor	_futurize_futurizeWithErrorPictureRasterizationException 