From ec12665fb12306acf480a00b813ec3779d5f412b Mon Sep 17 00:00:00 2001 From: Henry Ruhs Date: Tue, 20 Jun 2023 10:57:07 +0200 Subject: [PATCH] Next (#564) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ffmpeg platform-agnostic hardware-acceleration * clear CUDA cache after swapping on low VRAM + ffmpeg cuda acceleration, clearing cache prevent cuda out-of-memory error * check torch gpu before clearing cache * torch check nvidia only * syntax error * Adjust comment * Normalize ARGS * Remove path normalization * Remove args overrides * Run test on Linux and Windows * Run test on Linux and Windows * Run test on Linux and Windows * Run test on Linux and Windows * Run test on Linux and Windows * Run test on Linux and Windows * Run test on Linux and Windows * Revert to Ubuntu test only as Windows hangs * Simplified the way to maintain aspect ratio of the preview, and maintaining aspect ratio of the miniatures * Change face and target images from contain to fit * Improve status output * Massive utilities and core refactoring * Fix sound * Fix sound part2 * Fix more * Move every UI related thing to ui.py * Refactor UI * Introduce render_video_preview() * Add preview back part1 * Add preview back part2, Introduce --video-quality for CLI * Get the preview working * Couple if minor UI fixes * Add video encoder via CLI * Change default video quality, Integrate recent directories for UI * Move temporary files to temp/{target-name} * Fix fps detection * Rename method * Introduce suggest methods for args defaults, output mode and core/threads count via postfix * Fix max_memory and output memory in progress bar too * Turns out mac has a different memory unit * Add typing to swapper * Fix FileNotFoundError while deleting temp * Updated requirements.txt for macs. (cherry picked from commit fd00a187718976b242ee86db2975735933aab952) * Doing bunch of renaming and typing * Just a cosmetic * Doing bunch of renaming again * Introduce execution provider to support DirectML * enhancer update * remove useless code * remove useless code * remove useless code * fix * reslove some errors in code review. * methods rename * del Unused import * recover the conditional installation for darwin! * recover the import module * del try catch in unrelate codes * fix error in argument and potential infinity loop * remove the ROCM check before face-enhancer * fix lint error * add the process for image * conditional process according to --frame-processor * Hotfix usage of --frame-processor face-swapper face-enhancer * Auto download models * typo fixed * Fix framerate and audio sync issues * Limit the video encoders according to -crf support * Limit the video encoders according to -crf support part2 * Migrate to theme based UI using customtkinter * Show full preview according to video frames total * Simplify is_image and is_video, close preview on source/target change, show preview slider on video only, fix start button error * Fix linter * Use float over int for more accurate fps * introduce a pre_check() to enhancer... * update * Update utilities.py * move the model_path to the method * Fix model paths * Fix linter * Fix images scaling * Update onnxruntime-silicon and monkey patch ssl for mac * Downgrade onnxruntime-silicon again * Introduce a first version of CONTRIBUTING.md * Update CONTRIBUTING.md * Add libvpx-vp9 to video encoder's choices * Update CONTRIBUTING.md * Migrate gpu flags to execution flags * Fix linter * Encode and decode execution providers for easier usage * Fix comment * Update CLI usage * Fixed wrong frame colors for preview * Introduce dynamic frame procesors * Remove unused imports * Use different structure * modified: roop/core.py , enhancer and swapper * fix link error * modified core.py ui.py frame/core.py * fix get_frame_processors_modules() * fix lint error in core.py * fix face_enhancer.py * fix enhancer.py * fix ui.py * modified: roop/ui.py * fix ui.py * Update ui.py * Fix preview to work with multiple frame processors * Remove multi processing as the performance is equal but memory consumtion better * Extract common methods to processors.frame.core * Add comments and todos * Minor fixes * Minor fixes * Limit enhancer to 1 thread but restore afterwards * Limit enhancer to 1 thread but restore afterwards * Update README and GUI demo * Implementation check for frame processors * Improve path validation * Introduce normalize output path * Fix GUI startup * Introduce pre_start() and move globals check to pre_check() * Flip the hooks and update CLI usage * Introduce bool returns for pre_check() and pre_start(), Scope for terminal output * Cast deprecated args and warn the user * Migrate to ctk.CTkImage * readme update: old keys fixed * Unused import made CI test to fail * Give gfpgan a shot * Update dependencies * Update dependencies * Update dependencies * Update dependencies * Fix ci (#498) * Use different dependencies for CI * Use different dependencies for CI * Use different dependencies for CI * Use different dependencies for CI * Use different dependencies for CI * Fix preview (#499) * Minor changes * Fix override of files using restore_audio() * Host the models in our huggingface space * Remove everything source face related from enhancer (#510) * Remove everything source face related from enhancer * Disable upscale for enhancer to double fps * Using futures for multi threading * Introduce predicter (#530) * Hotfix predicter * Fix square brackets in the target path (#532) * fixes the problem with square brackets in the target file path * fixes the problem with square brackets in the target file path * Ok, here is the fix without regexps * unused import * fix for ci * glob.escape fits here * Make multiple processors work with images * Fix output normalize for deprecated args * Make roop more or less type-safe (#541) * Make roop more or less type-safe * Fix ci.yml * Fix urllib type error * Rename globals in ui * Update utilities.py (#542) Updated the extraction process with the ffmpeg command that corrects the colorspace while extracting to png, and corrected the ffmpeg command, adding '-pix_fmt', 'rgb24', '-sws_flags', '+accurate_rnd+full_chroma_int', '-colorspace', '1', '-color_primaries', '1', '-color_trc', '1' '-pix_fmt rgb24', means treat the image as RGB (or RGBA) '-sws_flags +accurate_rnd+full_chroma_int', means use full color and chroma subsampling instead of 4:2:0 '-colorspace 1', '-color_primaries 1', '-color_trc 1' put the metadata color tags to the png * Use GFPGANv1.4 for enhancer * Fixing the colorspace issue when writing the mp4 from the extracted pngs (#550) * Update utilities.py Updated the extraction process with the ffmpeg command that corrects the colorspace while extracting to png, and corrected the ffmpeg command, adding '-pix_fmt', 'rgb24', '-sws_flags', '+accurate_rnd+full_chroma_int', '-colorspace', '1', '-color_primaries', '1', '-color_trc', '1' '-pix_fmt rgb24', means treat the image as RGB (or RGBA) '-sws_flags +accurate_rnd+full_chroma_int', means use full color and chroma subsampling instead of 4:2:0 '-colorspace 1', '-color_primaries 1', '-color_trc 1' put the metadata color tags to the png * Fixing color conversion from temp png sequence to mp4 '-sws_flags', 'spline+accurate_rnd+full_chroma_int', ' use full color and chroma subsampling -vf', 'colorspace=bt709:iall=bt601-6-625:fast=1', keep the same rec709 colorspace '-color_range', '1', '-colorspace', '1', '-color_primaries', '1', '-color_trc', '1', put the metadata color tags to the mp4 * Revert "Fixing the colorspace issue when writing the mp4 from the extracted pngs (#550)" This reverts commit cf5f27d36a15baccf505608965b4f3b55c0a983a. * Revert "Update utilities.py (#542)" This reverts commit d57279ceb653331bb04d588579eb7ed1c8e4c431. * Restore colorspace restoring * Add metadata to cli and ui * Introduce Face and Frame typing * Update CLI usage --------- Co-authored-by: Phan Tuấn Anh Co-authored-by: Antoine Buchser <10513467+AntwaneB@users.noreply.github.com> Co-authored-by: Eamonn A. Sweeney Co-authored-by: Moeblack Co-authored-by: Pozitronik Co-authored-by: Pikachu~~~ Co-authored-by: K1llM@n Co-authored-by: NickPittas <107440357+NickPittas@users.noreply.github.com> --- .flake8 | 3 +- .github/examples/{face.jpg => source.jpg} | Bin .github/workflows/ci.yml | 11 +- .gitignore | 3 +- CONTRIBUTING.md | 21 + README.md | 38 +- gui-demo.png | Bin 30096 -> 18142 bytes mypi.ini | 7 + requirements-ci.txt | 14 + requirements.txt | 6 +- roop/__init__.py | 1 - roop/analyser.py | 27 -- roop/capturer.py | 20 + roop/core.py | 442 ++++++++++----------- roop/face_analyser.py | 31 ++ roop/globals.py | 24 +- roop/metadata.py | 2 + roop/predicter.py | 25 ++ roop/processors/__init__.py | 0 roop/processors/frame/__init__.py | 0 roop/processors/frame/core.py | 56 +++ roop/processors/frame/face_enhancer.py | 75 ++++ roop/processors/frame/face_swapper.py | 86 ++++ roop/swapper.py | 96 ----- roop/typing.py | 7 + roop/ui.json | 155 ++++++++ roop/ui.py | 464 +++++++++------------- roop/utilities.py | 141 +++++++ roop/utils.py | 72 ---- 29 files changed, 1079 insertions(+), 748 deletions(-) rename .github/examples/{face.jpg => source.jpg} (100%) create mode 100644 CONTRIBUTING.md create mode 100644 mypi.ini create mode 100644 requirements-ci.txt delete mode 100644 roop/analyser.py create mode 100644 roop/capturer.py create mode 100644 roop/face_analyser.py create mode 100644 roop/metadata.py create mode 100644 roop/predicter.py create mode 100644 roop/processors/__init__.py create mode 100644 roop/processors/frame/__init__.py create mode 100644 roop/processors/frame/core.py create mode 100644 roop/processors/frame/face_enhancer.py create mode 100644 roop/processors/frame/face_swapper.py delete mode 100644 roop/swapper.py create mode 100644 roop/typing.py create mode 100644 roop/ui.json create mode 100644 roop/utilities.py delete mode 100644 roop/utils.py diff --git a/.flake8 b/.flake8 index cc992968d..43a1b7693 100644 --- a/.flake8 +++ b/.flake8 @@ -1,2 +1,3 @@ [flake8] -select = E3, E4, F \ No newline at end of file +select = E3, E4, F +per-file-ignores = roop/core.py:E402 \ No newline at end of file diff --git a/.github/examples/face.jpg b/.github/examples/source.jpg similarity index 100% rename from .github/examples/face.jpg rename to .github/examples/source.jpg diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ef5fa6c95..0b8f4ce92 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,7 +13,9 @@ jobs: with: python-version: 3.9 - run: pip install flake8 - - run: flake8 run.py core + - run: pip install mypy + - run: flake8 run.py roop + - run: mypy --config-file mypi.ini run.py roop test: runs-on: ubuntu-latest steps: @@ -25,8 +27,7 @@ jobs: uses: actions/setup-python@v2 with: python-version: 3.9 - - run: pip install -r requirements.txt gdown - - run: gdown 13QpWFWJ37EB-nHrEOY64CEtQWY-tz7DZ - - run: ./run.py -f=.github/examples/face.jpg -t=.github/examples/target.mp4 -o=.github/examples/output.mp4 - - run: ffmpeg -i .github/examples/snapshot.mp4 -i .github/examples/output.mp4 -filter_complex "psnr" -f null - + - run: pip install -r requirements-ci.txt + - run: python run.py -s=.github/examples/source.jpg -t=.github/examples/target.mp4 -o=.github/examples/output.mp4 + - run: ffmpeg -i .github/examples/snapshot.mp4 -i .github/examples/output.mp4 -filter_complex psnr -f null - diff --git a/.gitignore b/.gitignore index 09916c438..e25e7ce3b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .idea +models +temp __pycache__ -*.onnx \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..7fb9cb146 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,21 @@ +## Pull Requests + +### Do + +- ...consider to fix bugs over adding features +- ...one pull request for one feature or improvement +- ...consult us about implementation details +- ...proper testing before you submit your code +- ...resolve failed CI pipelines + +### Don't + +- ...introduce fundamental changes in terms of software architecture +- ...introduce OOP - we accept functional programming only +- ...ignore given requirements or try to work around them +- ...submit code to a development branch without consulting us +- ...submit massive amount of code changes +- ...submit a proof of concept +- ...submit code that is using undocumented and private APIs +- ...solve third party issues in our project +- ...comment what your code does - use proper naming instead diff --git a/README.md b/README.md index c47fcb018..136a150e2 100644 --- a/README.md +++ b/README.md @@ -36,26 +36,32 @@ Additional command line arguments are given below: ``` options: -h, --help show this help message and exit - -f SOURCE_IMG, --face SOURCE_IMG - use this face + -s SOURCE_PATH, --source SOURCE_PATH + select an source image -t TARGET_PATH, --target TARGET_PATH - replace this face - -o OUTPUT_FILE, --output OUTPUT_FILE - save output to this file - --keep-fps maintain original fps - --keep-frames keep frames directory - --all-faces swap all faces in frame + select an target image or video + -o OUTPUT_PATH, --output OUTPUT_PATH + select output file or directory + --frame-processor {face_swapper,face_enhancer} [{face_swapper,face_enhancer} ...] + pipeline of frame processors + --keep-fps keep original fps + --keep-audio keep original audio + --keep-frames keep temporary frames + --many-faces process every face + --video-encoder {libx264,libx265,libvpx-vp9} + adjust output video encoder + --video-quality VIDEO_QUALITY + adjust output video quality --max-memory MAX_MEMORY - maximum amount of RAM in GB to be used - --cpu-cores CPU_CORES - number of CPU cores to use - --gpu-threads GPU_THREADS - number of threads to be use for the GPU - --gpu-vendor {apple,amd,intel,nvidia} - choice your GPU vendor + maximum amount of RAM in GB + --execution-provider {cpu,...} [{cpu,...} ...] + execution provider + --execution-threads EXECUTION_THREADS + number of execution threads + -v, --version show program's version number and exit ``` -Looking for a CLI mode? Using the -f/--face argument will make the program in cli mode. +Looking for a CLI mode? Using the -s/--source argument will make the program in cli mode. ## Credits - [henryruhs](https://github.com/henryruhs): for being an irreplaceable contributor to the project diff --git a/gui-demo.png b/gui-demo.png index 128eaf625b6f135ec505f68b1ec93fbaff0178a9..1972b59e2a6c902c9d45d9050b8c548ab77abd8d 100644 GIT binary patch literal 18142 zcmeIa2UJtd+b4YAT5=v-NLJ95W{r}f@&-vC_=R0@Zb?;gCuJ!Jnzs+006DpD`hE%K1G{6=P3*m-n7N zZ{2MGw$3h2HbNfO?lv~g9(FFCTU1T*6d{4Xg_PWF-g?@*INvw0cd`K}+uwis_`bTA zGv(*$v-^*wq@PGgKNY{Pq@r)I|Lr~;0JslOQ+{FKo3@Vk3%o;^>)75}kCoxR33JVK zzm{|7h0cX$R>Z3|GY0yihmhk10|&%tJ%w z$aH2pDOi4ja<0^Uu(a6W)HhTy&3eN%2>?8|)}J!oJy`C>_^tT&^n+>z1P#(((J(ny zU&FwI=l}qn*^@Q{LS`5X#PC!$-dBdrvDvCOd*El39Tmm77SL8xK4!~M5>kD3ijn>+ znMNqMoQc@4cn#A%3;ZjSA~TXiykf?ey+re;GMp_a$a}>b86GemV!;-KJvgX*zWqpH z7Ib2I6hQIr?Gev@)3#TYCnx68(lcxc&bxn3IVM|-q|z?7nG{)`hNhkbT5dboTw((N zBuC+tJBB6f*Pp6}ZXvIanIvG18lotw=}xD&BIqu*nU-y_!%Q`ZBE^`z_b?c@ilKj2 z^TkL>icJOA@3@_n0%PO7{#2L_jlsNof=dj|XA8OLBaCEYG!wZoAzQjMckJoZcwg0t zKYFy(`baZHRP)s=TU%prGvo&L?r?*p^C&8Vtw&J?W>>ife%imS}J4_A; z-ugD8JXDpJm-m2O+H=Kr%3IORtz4EPCQv@pQDz*h1OR+!??pRJppiQ}?gK z6z1W{kLp;k*E2T{49PQubqk=p{~(%_;?erlebzMI^>w*9fNQFwsmEcVeU*aY;#IrbF&W!L0O6Y0|E~&&I)GwxKAAN4&h5$uO9^yRNP- zUY>X_+kDc}(i3PWL`{vP`VjpECOM+34dKl$#%E(7-$PlsbsX(9wJ#zpyu^5! zk{51~Erb!8g@~V`&Eh?^6s86xb zz1CcM&&UGp)U&W%R8?ib5fkP%V#P)gcQO|SiNtgas}BjYA1&3ZdL=hW^pSk*s)8%u z_om!(py+U>h=oT)#0{_B@P)p99Z7M0czC#hscE+)SVd-x}-!1$*?Yta2z;<7hy) z7#VpAhl{7Dr%juf*S5v!!de)3i)(|91`fhG&%kcmH5=fd~sQ9++GP+S~67|k{A=lHP{ z0b!%2PbNE*b|+=Ithu#@00vxZ zC8H0T_AhKds|&l8p4bo&B9xK!!spyVI}*ZGh+2jIUQ%?K{e?%n%ML?%YLsf0BIc0DZGZS@E=AHUr$9S1 z$e@IheL}$6KqeHvmqXPRTxDp^00{A@8oL~qG4w0utk7mQ5QRe++06y`6r;>wTqdXO zvLzDo76mRKXs*cnCb$B4!t5HOZ4x}u%^SxUgM}X~2@WJtQgLF}3@j2k)oEBEa)+jp7um#mkNhYzO-OjCHwkS>_{4#; z;CQSFy=0bk0CZSc_%7p9ni068hW8q?V0mKo*JpapQnk28_T;_QFsf3fg{z~M7wEsj zhcc5r00zA{U8R@ls53V`zqz0dk!Yja+8-+d-G~x?pU;Azwkn;uIU#yttB-X+Gd>mm z+|i@7A}%_~j6a|D84;g(e(-Of5a{X|3X%aA%YC;0)p9!WY}*vS6C)%|lqwO?th8;& zYmLW|eYcKMj>dE$EPpN-haVdTr>~$Fr=%@Cup;03>rFF7)P`4t*+&|;_`?I_H2u6a zlc;~`bGnQ%b_XPijkx_vmjNZ(*w43qh2<4y_!Eni^2FFw2_9nxtNli8X5}Y=1X*za zZsFME{AA;Cx>!-umFYj9hhQS3NRX%AUm_MBh2~iwsjk?!wrC_cbQF!zO4z?@SbT-_ zPquUpPC64iW22SxD0GLtXCWehji4Aq>wPiL9>rU`P7q;oV(&dg>MCiHkkm z28lvy=>5&Ec-h>oW5)ViNQskxGy%rhHHR_nfSCYDNQ|^xeMWxS|EpRV2XR6TI zk-76y4%+fZd5^TxiWskW)UrRY+lb}%?DC+BhXzum<=Y`&6ZB&$$5S~sL2dVl+Mlq@D6n~|C? z-h_Px#V4+Z(3)a;f&DY^q@z%MVT=+?1NoUwUpwg?v-5Bwe&)H-)y@f`AYOd#R=#t9 zWq+S>X$fwY$lHH2ePqs&c1uo)HE2C&_X;L%c;9XVqfnGbFiFvgBG6nFzbia$3%+u_ z0zR^+G>#yz_rmAPu^n@+lAik$wwzvlli41VpJ`bO&QF#Vf(`~kC8OWq89nJ!1(46H zE)+3XwzDW zRX$SlSo$?sm>EPosJq!Qhr&dMW4c||80;0 z3)ZA5=25ulhz;E=`0G&cj0i|EExz8u5-X;OQ6N=cN606&L~;$#Rkj49J%8)ZTLcMjUQc3{@gk1!js#r(AkFNS$YHV5ApG^n#3k~qI^z&a z?$5Yz$jr%Uxz^t=$j`4E6jbfw#d)wWUgxzYpoUQsK)G%kU@wvHQ0>Dn09@1f`1p!; zWGmiucXvbB)gl)jH(9i={k{N@?4w|x>1p$Xc!Tq2t`QLt38|?)lEk-cj7Ks(kAGit z0o)X~iSRu50IrjnUI4K0PoWbsGf@>4Pup5s4UQBq#dPHU{HaKRJx`)1W4Oj?6nIW= zIV#_yU}JmxVoGj{u99~c7_z^7`O?BU@ooy;-P2=eWMq^Jdiyk*Me4*PNA>h30KnGd zIF&fIW=KJ26u59t)wkVEInIral>%GlPN0VYgslq{MCo|A@npY@^6##&aanm1EdcO) z{MtBp{VfHxJ~lUOZU>7~5|EjzYD1c6%rC2`s8nBKj`i>eHcw%Yz^$pDr$OI_y*>o-d!tYj;R(>KfGycRfTlWq2didWr zZ*guu0C?Sh87(f%m8#g zHL8G-*v{dfau@$4&hiU%;r|2Cw^=GqY%?R4Woo8Gvn|0rq(G}yk5j?BI^*sg5#Th{ zMc4Ja(jpf~U5uL-ejAs?kJlEqTMj9mnsxByG$`L&4Is@*_3UjfJ)Iyy5Bl!ly$|l~ zG)QTn*8kv|X{ARz2Kb5O1w{)f2bB8SZ>9@u4pEL^_^w~+N4x@&z1f?s%xZFNpEW?h zyZK=h^nt!#pI!CFfawP1S(#``jR-!_OJ?0MVA{+E6vehnh~Qu?uol~h5yktLrrdrE zmLyB7yd!Sd-0K!h$WwFzpT~)VjN5Tl$Wl%mh}keB#eu*^aJ zLMjlOLeo8i&Td7W_RITG$&bGFq9<%;z2F*;rka5URA(23m15_)O)R%YKc`PjS{Tgv zrBx=E)~((k9l4b|rzV$%{DrO~B{tVt&+_`)RV$hc8;#-J3&>}Cp__r$g-_S{yzFix zt^BIzXCuTg4!C-v#!&E3T)*JZXrP7@IlWZ1RLU@w!z3rajghIMr)p(Gdoi+3ak=zZ z*$2fJ>!eZ)%JS-gU5rG8cT~dv`bt-%8U+Dg_Y5#H_)Y5Jmbx|4)GK|uIgv2A5qoD) z5fdY?rW>I!W~kw@xak|tahI9b!Q^U;SZl|p&k@hWW2ku))@qlN?7;T$Ade$If6|=8 z-soWvOT6cA)GQ8N*{3mHETJ%)D;R?uDiLk06$L^4F*@%;Q?H@T9gq8@Wt_v__u-58 zo^}a~P^SAPx@oKZ*KZ*$5(;|#IF8oc^P!bm5#cEaP{d8~!TW1td0Pee3-UKpVDJ|GPs2j}!bv&6>Car9?&X?R@!#q#?}^Es zx4I+NKInT5CnIt4eGjH=`5%Ez*>vl@*mXa4Fs^Sao9SJvAOGNwD_YLi8O3NMm>JKN zx4s0*2WM8$^~-^62#FjsQ=FcC@Hr*rmKSC0Vcom?kkK8vy0h>C$i}yQ0<(Q^ACuhn z_?YPeva0Oa(H{)#!#|9~Q3f`vTa^qTYhxfdM#+S&(tb$)Pci$xta3CM)l+S0*Ng3Pw`; z@IYC+aY=4}HdoQDqQPY!qpgv!8 zIRmSRL}{eO2*dfwC!uA0x#2F&)aqxNDZVx#lTtpf^#OxxiQM(C{n}z76TjG9rOO}U z!v%mt{l`6v3Hhz17Gjb-Rb3Q2Gwv^3C%I7-1w=W6cRT`c>0`3?_NH%r1`-M-46AJT zv-L%y*3Wu^60(}&ZgSAGi0_xNQ?J8rC2*(bUBl4o)L%*pXv5*+A zQG`wdh~~MeNhkOj5H4-3dw-=mxCGaV5E%$)flk(wRC~?ziz=bsxEQg&C{3gn7GFX& zXS6+E>guNEpKXXRg8mfibakaqyZd(lDhNE%pX0blWY{&dX3Wl(h#G23wY8sz{akp2 zjm8{74mR-!?x>{2SOf6fe6jD`H$DL1Wm)`md39Zz%)74a_jv!co!jmtT+BK@RhaSx zz~h$&5v3{a!#8Qulzj_R2rYG4a{~1b8Nc(P-eSDnS}q}2Lt}+ z1JR3r5+*4w(2?kFqCdE*(@zQEc8@JDE0ccK>n&p_iZLfbYZP_9o1wztlD6A}NXz#xLv| z>Rd8=-nQdJR>nTmuA#m-5}D3Bi|$_p&Ycvr#~fX!W!p7OxQc!wE3;^E9Nn-gV{`md z?(Tg9`C`}H2G*HaiDweipPI{m28LewdL&>_>Pr8^ccyX2DL@cqt|AqhUng}k3?MQ*Xcx_33T_a59Bobh`iTAU-hTM{OG5g z4xotZSHK4;LA5UFo6d|Po7b6>@|aDlAvj+1Pd9)-E&NNZ!I9K3xKkvfuT=f(WXs09 zV-8$ldTIKzc%spAT<_|F0SH+pKbcv^YM2ORADcH;50^IIyL6Djt-1$ zd(4BAk^-W_oRplWk|mGIrQ|B^E_VTRuRTl9;I&CPmyc*hkl%v)%XR^A@t3jAVU97X z^7W-x=m4ZD&53>pah% zO9ogME`@lgI^3KhzI%^HZb#cm0M(8^Qa3DfN%G#9DYSB_rgpdlO5GLR#+r%V$7N@N zGqdbD3nQ*v{@Kj$6Y(J2c}#;x3qz-^l1|Sie(hH_nu;1SiPDG5ZD~RYV1ha68y1Mz z&SK?D(qcOZe?;aOt^ozdkKvRS&g4z@Z(EbP+pkC`x4ddtxeoxYnn$q@gSK(stLRXy z!SI)sWBLp@lP-qhns_bZ5Hfup9yb^rU3W$}Uumwiro-2#Wca6oPkxJOvZM*vdbR5S zh0}dk?YxfmgDkvO`80V(KRk95$XOt`d#&eM71o=c(;*)@c`itGiKQH{Y4(qCo_p3= zg$Tdv4m6BuST)@rH8V}@ONJJiQJ_D&2v+^n;i&`r9j|#8Pv;2Yxj*}$&`7zG9<3$q zyw2`NAL6#X=ve^hKezzUs=4AWe}A>l(fD1NBv?v^!)$-O=d@h&l^a%K_wqWKkc8s1 z6yg9Xf^29zW|~j3ras9mUA+nM4)wsoRlU)pLdIDnDOnp8{QNgNKq2kAPQ7h_Q4X1& znAJZsYr3JHb_~B1Y(izgHwr^vIp)TRoWSp57e#M^blOnM~F1;-> zwo$k>7=bY_U`UD{`d*B(^LwA`8$|n843}Z)N@ogkO-m;!K84dz@`qkv(=)j7qhGtY z7d%r6IDH>*u7(I!uPGR%W2?Vl_e(6V{jbI`fwQJzPh2NLY_;VTElb*bFgAKU z|4A@QA$nFehgG=IAeZyZ6xn{JCXr##(!|9!b`=enC;pbOZo}w6XwNOAt#2XR=+g9& z+EOZC?KP@hl{OicOKR#ea(7dN#!_97dPcR#p*@1Xhfxao1`#=ae@sOkmYz$fWEN=Q z>Z%7tob4u9_213w%SJnXBROf;HVpC;N6Pc(Ex?TVE+eCYbPoX4x4C+BD_1Hgun*yc zQmaVuhFe{uS^~;|9GFFhmrnC{CfX*Y&Sy>I$21$I4WI*EIHxi#A859Z!bHx)DMXs# z)=CQiRpsg>qhb3}XmDx^1Gd;qR;l5^E#_|~ z>I-WK&=$Y#u7vN{$R(+!XF(_!Fwq3a5Ba*q-18kdqvf~)7>!|m8 zEhhmu;n&@FN@z8a5ivW`)Qi7K%;R_m-O}D7mP>Tl?Tt6Bq>Q46uo=iPaG)w;|F+Zf zsG}X5DLpaoe$MwverxY%D&hWc`yYWeZAELL)?ULrP@H@-PiBAd_ku(wRm0OiY}dYq z&z{ExN!{1UX}nh(WH~+e>Xkv^S6xrOlA0`P$Bv+CCUJ(5Nt~PLa1= z=3SfDSP_s~V5=v8@)ktxipealH1qSGWA@#`MgCv9-fwFgO59o1Ho)27Yo)JmCJ zL>q$RTGsr7%yS6Q&qPl~s1DX3UeTWqB1rZ6!@CPsLO}LB2jR5>$uK21yzsEtr=Duh z?az=iqYL|Gm#fY`Np&~;X2~Csw1;*ZH-V2KJ4UkDi#G3>M(pORJeqJC2eOY27JSyq zX9GkDt33Y4+S=+1E~9%sTc8rdob~iI(etQP)76PPhtg_0n3(_mte&A6G z2h_oO#fACiJ1FS2YPwIymL~||E7jQ6ZTw7CyQ!k!O3Yo#6Nd`>gryR^KoV1dlVzS^ zPlItPV!<09K{8l#rk$a=6qGEh{15`THEJQ!Sd#Wjmf&QGAlXy*^l6P11_xX6Y6reV zfh#Pf0)hvKoKJD&f$n?<(cM9l6yGsR(vH|fcmG5l?W|2vXcM(@-Aj~$q!I>yq}u4` zberD=k%-;TdS6;Pm(9#Jb|^Ub!Ak5;EIsMR^Yi{RxD_*Um;$E@Tz25~sU@$aWW?=M zjqH7AeMfO-x8Ki{o*Z|qMSb%r#?6rd5Mg*&${zN4TOUz=!PEZ73XP|wk5AN+oaLFG zoY#UJ*=6)e(6I3mtLU5TphUhfWh#M@9qP^Dkd%g_Y>T|!^K_&kE?mqkBS&y!rb_;# z{7Tka;6zr#);Y51X98TukjwXh0SqI^Y-R`GU2Y`shbjgs&T@riQz50B(qo zpJRDvY~n@~cMmU^ku`9#F8R?jVbgs=^%6vAS}oq8d3mF6JGNCZTtoc=U);Mz4M{~fH2 zv-}TOYp~>FM<*_xl_6Sk)Qyn4ca7y~6@E^tdC>pMXYu}l%^?VVb|U;c2JngEv=75q zyo>NN{6O7PYOev?Ef=J>sMPJrV#C*oL*el1L_O;BOpeCNI3sy+LlI~k^e{OGND#G9 zm}9c=b=Ocy*ckNTlrOZMu|S&3bb9s8r2HYLho>Jj&IkDDR<=IcaSpW9*3LvYCGZ*o zwUi#6Yhn%}jP}$-R*?wrm6qf4{6zZc(xQH>(a#T9$nEXMjPyU+QmEvT)c%d^(XY-4 z`KZCPq|DJZVCB1de}|vHuDJ(R8Spo8WXGi@+3&755o5KdS;YL(`qmPQepa6kEzqCp zJ6vD?9i!2UJ6}?(BZX|LyH5!8;^8JSTDU1zIowGqfaNpg8TeIuHQ&m=!2$2v>Kl24 zQrZZk(;XQKC9j4wQ+LE=>8nzw0yhmtY%PJ<*)GV0;EVlspS_hhkE+IB*`pxY+*P^v ztL@axe6tkts{gFK((MgiuO1qN*I_5B$!`tYQN2*|^=u0SL$p1Q;!t?GoH?yNI3v4Z zF0K?Dsj(?HW?@W=sv1i`3fq6PJY@@h5#QH7$Ynj4acG~3W5v$cx+ioTNO$oXR|I|3oe#J0x<>^r}}bU z%VKqV^FY$^!F8|wMK@`#{ofmsW^2E&D&&rI(2B$Y7}Pg$n=~?VVN&|k$U>Z`)$0x} zaAU5PFB`mauYRqy$Ln}CU~gV1$-Xr|SM)s*XqqwcL%Zrih@J6n!Xm!>RYZ9Iz<{%G z7f!{Ne&4<;1PTBCxOq8ToH!K}cP75-9aX>T+^d1rwGIS+*YH^2I8Gk7SgdyFJ?3y#E=KIT|E6X=jo(e0E_ATpL( zIW%IcPFBaKYNv(g!KU!VG38^ z5g>)|HIL13_si8EDYn*Vt8WlcpDjX~q$P*6Bi{CcV1e~j@7$uV(5)4ZbF_H+pFI?s zHhEwLPXj(BwFP4bz%ktY&d>vB;-t4Tu#j8Eqj>3b+(Jp)<&lP-XXaZLoibw@iO@1t zU?;^>naIl9i9}K|Fdhu)5$+j8dri@?6~B@H`Q*ogK>%?n@CxSPGz=ct)xi000KYox zRcqAi`^xOI=5WgalO0jXBi{SZ1<4d1AphB&(@jELke~NzbgZpMq*dHSOJ<~bSD(K3 zricsDe%S`tq!xZl5h~!c%+HJZNmXVwLi$-=w+h2TGJnJRmknO)YeeMI31yi^C5UP;t5O6vFL;(C!YB{gN`#L#?bZ z(Ey?^zP|A}*p?Z8xRaS+{Aud?ehoK&z~(NwWqO&QF8P94GtgMSbSevxX<=CTk?OO9 z>uv(Kj6*Wh`rX1V+Y+39@GP%c(>N`C8J@*5GD&9FcfykQ4)fmmtr?~Yi znNAXo3MV<7(6#KkWLR>$(hS)ms*_s2|N@7jn}x}cR-->{OJ zO%^Z0a{{#1zzpI`4rOFVcEX@!sodva6ZMs6=AWiU9B<#)uQA^s+rR2(3U^b__{nC& zoY=Frkj5Yirgg(ohPFJxsX6@m8N>7IT%b~tx$YivDLA1d;t?1*GsR@5=5P0j#=9va zYPb6S)(!mQx+cAcLMEG+yd=v%-4djfjCK7j&6`tdk9fakG4*1%A#!|qP~$#sv4tFX zx%-!3(cmS=*HDMCi!zvDpin+=6ApPoh?q31f))b_QwhoJ;eUentbE=LI(x%@nv(|;u%+5 zGEhA55Z$SW5ZkNDfZSV7v{6G_l%AwJ?2(4f?fb$q?VXIG!0#^uyuMJD0Qmk*wA;?1 zkgYzv4EcwS_g``lm#-fHe#a=OuK;b2gWOwRh6Ow>;jm*M)!FGUjC+D5&9TX zAELl^OH6Zw-`)G&j)Q>}xf_=s;M?)T8+TY{kuCqd%%WK&r1!B=54Iw0j^i@Xd|pS& zy5*pWWy+AN%{!EQb7|xH8H3~J@|4!FfU8B!Pu#2Q@1X;1>meoMr8=BH>n)lUDCrYZ zZN;LD91F|vVw8mKe^UCW)a`Ok#hX~3GfN9f1)@7^+lfgV z0u}Ze76C0fc#rL%sGxwtZ~5D_c`gcQGf#nkoqw z`ZvF%YYY{Cx^1xOxeX0kMYruMmM+FcQhxX*?N;R|$K>i849~C|~kMdVc8+*Ha^F`|l*H zOs2RLYc>s&I>ibh!(19wRj*-OFHWaQhPN^-it!w3eRqnVh1Et=XEfTApDw&hAAMjn zs)hNV5n4nsNzMxVmi=<_+Q8lX?|JVbbK7=^rj6cY*FAB977i&v(w|D$6(b)(E2c<8 zr`BP_0%+ylJX~l}x+LyOyAv*WmtHVnUkwc1CWEbzAIl%!^7N0nl(1=5D42QjOQ_e= z;)B!r+!khaIh!4)@IG~kS>3(R&Q8QkV|E(h&U5O{+xPwgW>^(%V>hNEG=i@-0WRoy z2F(_Z?RE6N z_4Wu4P8!-zrY*y2a>&Wg_gQoky?jA^_i3fYx45Vi~D8STJf7x#lejj{d3r&Gc1PzM2j0i z&{VCuu*Pq?T<$@ZQfc8dZP7-@l3|XS`QLr*-CCR1HuMNg9QE5R_l@#WTGc~UZ1faM zU{W4G#x>TNr|P*Pwa>@IlZM0{M;thjgFYY8sT4nOiJNNf95~F* zF-siACtKvzpmp6R+sHeqH}Ilmjj*Q4oK5JI-7LJnDhljyKal)(pG|ICDY;vtC*IQA zP8$(DLl@#`Jt#0TWO(ca%|t`$+*9lJ>~nu^Ejz>={B{a;XT_eH#xWK5$h0BMcHd9d z*@t3*>!qyHZ4i%R?v3e_gzJ(Rst1(raQQzIcZPT-q<55Z(uD#b^Px{tcISEPe6*7r zqPg*+1U8;awfJt#$M&w|X(K9qn(Fsv*$sUUH~J@b&NuKDk>)n{er(%%n`w9#Kj*&; z&ry-TA;ih@b>Qv~<@*9~V)o}@`TB(W<+h?`VqVgGyr*yw#aMR*qTJm~d#=F_|vje%^jHt}B zcS^GQ<&eD}gXPqrOMpFr8oT0fV-)tK9rEWozdSjh(0$4mz*ueHAKwMp61 zwmP)rK4D-8lW4r79SBh|KhIbwKY_KBljoF}`1d0H**Mw1p0;d=ykTxZb{KCYBeyfn z65G0YI;`4ka?6)M$GInOH^A1E1)%Mj9+7AV=IGW+L`+*^EVO)&sIfrIw=Q?XGi;$yFT0)P9J!pZAcekj z7P2d_$n0~*d@X8APBOnWCS<>@fu0xp&I+kE-8&$)g?wzTu{+v1>;s`9+gLNh6`Vw@ zP6$c!F1bDPKnam3o~^-FO|}D8gZRaB#RZnI30tJKwZje>|o#y+3z&L_c%yQ}woFnXR_$&N69Rd84Fe zdq2VLVE*!;Wn|lC68Oup(MI(4!-ALuT1HEMM<3ZgKbq!C`-2370)|7ra8X-rC?OUk zQdya++ZSeITw=}~grk>(&fV9ajV~22Svx8zD?_l zo}E>-G~5HT5sp+*hQ8C5tqcYSu5_0A&ImBQFJ@v`ZhcoH=CYfL_G8%Y$=65mBg>bEv(Hd#o4O&{cdkj7* zt-i{UVonRasTw(egr$z181Nm94Cs?l-e|GkBfnvM&()lM|2#Zv1TMgprskQOW}c8X z4*?eU#{ud@RZJ^=)7mWE1-}~Z>ws@*6qx@?i!!^;%{vae^VG`|0J!q-@Am@mN7qga zr-|(wjA5(X#KP`@JQbNc7;%@(p0+Xg=%!V+*ZH_yU4YO%D_!|ZI2s7QhJHsxVaGT0 zv$%-53JtjdmiU^K+(@8wcKt#)Bt$56Y_zqtc>ym^qgC}4lsiu0h+(mAViJEV#&j?y zTr!j9(ru^_2+v*EP>7$=QDAEuZBl+R*?Vw&7vvG`rox+d7i6#+^QHOw4a^d<$$l;3 ziy5=9xo;3gYbRcxEu4%+GMq-_h`YWVf#BsI%%nV=@1%KBew59(Pw#X5E)pTtQj>EV z7KPC7{PX8e=@(zghv7!4=bqnitr>jivj^cDG`Zqsd^t^D?&6tAgR#z;_mF$M=Rx;C zja`(46*0hqn~oEjH&mhHmNIg=AgpDhd&X;G!WIaX#v2#dlHod;jOH-e#lBxx0W5 zak&*8-On5qX#rWs1)?UFZE*x^25P{76k#UCd)hgH!m#A+NCoqlx=hkY06z5DZz8)l zi4m)AMrW5Ob#Fk*a6>G=3qI2#KFoU&;A&OvP-?&R7J;E`E?bF2(~btJRpEH400Z|n zQXIN@be~bGFt6nl9k9RbE5(*LTk#Dsr_V^@?T9!xGyqUo^bl_CGL!|w#nC(u74}9_-_BmNt{NaXK<9wj<9TN zDrB0u3bRv^%1hBMdr~HOEEqqSz<1wdl~m|}s#)Y5``l_5I_d@H>-=hM!$LD!LO zH)oj`k9VE`o{R8znEK5^3XqUdr-J1I^IJmwB`U)UJAx)2RSD-%oTKyHoI4+2Ai~sw p$w@wgf86x)^y}>Xc8_zah^ukiaq|vSl;v1}nu?Zksp6ZE{{!ICCcyvz literal 30096 zcmeFZbx>VF)F+6$6Wk@ZyAvFO1}C@&clY4#Zo!k_?(X_8_EvSh>UXhjT;i;z;1ik`wsxA05?=i@`>zN9&$mML3 zs8V2vr;(D|2CBGIoWBPx=0InAFlgcz+(Q-UWv(*2&{a)N_Tfsl9H0842&JylKhE~l^(j# z&0dtvtdx|L&I| zqJQr|=??4+1425@{@gKxX(;$<{BUZJT#Np3r>yRBvwt%`(Ho0ah)$p%d5B(!SlavF z)RH{pw|9yI7oL#ps^l^1JT%w@Q{K?O__w_tOt*bIX`IPEM%+a0gWgf0sJ+KT%68DB zVaSm}3IkUA91=bFZYzI?e$2y$aR){TASFeC?;=f)_EZ^56d z^UtzKw!o3RzKn@$`?f`5#}nGb^ODF)#jFQ#dH5e()&~JuRKK8r*h9cZ2*LgPzhh}L zLg9Na^Zn*bO-+RfR_a+i_;R!e!<=XmU;u$2G_5hoRzg-SzhT$?+Zpyl6}_#Jqmh+g+DN<9A)mxucmE1 z@5rKTE_JTECF1>x3GH{K&MYkw!T%@79#fyJ#^JuOq)CdBUp@Rg*toiu_=G#mXJBZ^ zTVd{s1R{J~&0c`m%1=J(z3WIz2AWJ(|2aK8o8oZuhhim1%#7o|5!ihpZV9@KB@9eV z6h3t}3VT<8bF1auB=d)Hu>1C10bD3?P@Fz>OTQF&w%r_7B4T;S#rGsv?^l7228i2^ zt?k~Wksl+#xU{T`K!o>u!SY@Isgv_7bK z2vn)4HVu%qOOAqxu)pM2J_JvNplL>rwvIagKiU!TobmBy4GFQ*knz~WxrYCt*x%| z&+a4ZI*W%M*0bv)I|)IuuZO@$HLnfEtfQzVVPAT$ySlp1EYFVtsrDaUf-YB{TlAGI zlmL|M6mh@l&_(j|SKFlHQq_6ZEcC4(b813wurm%>E*6I9URWAu8Lb)(u(Axr;kf1D4@*Du%B&eDX6*VKA3z6 zZJKuAd_DS4-NI`Fm@K`gQWkHhwWP`!7gsTLrwMTfTFLtQCY%xO6GQh!I&NHP=y&mB7CL^g=1<8|3=xm(N4HPjwLLyHC|p!lm>bGiKI|o1a|Nr@ zXxJ+eHa2oMB^;XrQ+1DY|ic&CNcKeHeAf z+Mpp_uzq$J2hC?Ni^jvE0^te@fx=0>3%hnNqIA1Y|dvX%9i7%~I7Mm*QfTzPz3u@z(2w$JL{+9|b!*yORpv z%MM=mNEWvZ1m9tfOUq%N&y#rBR&^j9hX|>r|4H}51Epy7*0Y;`H>2!)1CHKn-i=OT zzTD4YF{34D-{Z~ZBhm9!#gmr>J=Z&|1Hc3||5P;9w6xzN=BC#(s)45)`htFcKAP#! zUfs@57oA2&G_OC111-z~**0|atCQKRofFwI-w{GtL&U~HJuc7hr-~m3^RF7iqa(*f zvl&@+2MY;~sl9)dH^wEs3VQB?Sg$p8hDVgfFcarCY3I~vix1iKDy*(YW7NUr6&${^ za_m_i_#(CfJ5=1v&x{aCOGiq}G1q*K0?!__7zj14$Epx@b#)n>mS_$gSJH;tr)OuP znq;JCkfdIFnC=(rEBC91H*>3133MFpN~O%*5uBX4T;~C)epgyfhnCeAR!UF?Ws%0( zT<3nFv7mX6aAqwy8~e^+!75!be|HY2Z}vH*k^RRMJoLetH|b6nvs_vC=LN-Xjswua zEv;J4!?|QTJFu;0)RMbS{A`Nw7r#AcODBy-vr<0^$gg<)LVK2?LMgPY+Uw&?a%xUj z{p3fG|F-NkF)mBKy3vW2&Koyx<-+GOsai*PUwfmi8}f^ua@Ijgfh(PQC=?G`n7zP7Ov zyLx!x{^@F%jFy2R>c0E!6@p2>Ev4H3w%kuRvXe5jAC-vbayuCPt~2c7R@waBr6!1m zzx?z~!fFH5+WxRE{tLp)jWX4aP{w#!PQdrb-*>^*qqxHpA{Zp8gQ3l-1w{HsLxzZNWa5y+P55w#z24*i$ zJ{Y@Y`-$sqxuP$Z2BcdCUp4%&V^Q7a8{OaH1T6JO&q3MBIl!Wa(YJk>_-+YvdD^Zo z5QZ|y+@F2CY~tfv8Ik3p$Bj#iBDC(f{@rgAAXc>AQt2g}_gOEGIru-w7>LY%(M`_} z?2c|4`Wn^bP{!oN&UlR_o0<9b^`J84pC9@eNX(Tl7Y1Eq3f&*-XnT@Q!oqfJeG&}+)!t7n!je`;kBedH zxvrP_iOgW#QbGb7VgU>h-tJow5j!-4jrE!~2w=2Xnb{;kL){i0MeK}T9S~4(Z}OPt zGtwJ!q$!7Ya(p`&HQ}SHuAu!?2YPl#Y(jSQ|`q`1pX#n z9Q+x4t>yU=Dwx2jW~2|CWVpk_683xe-Qa(oCaTUq>6pjiKXxe*9^_sE2zEOJ;S z8@>KZ=iK&fm`GiEc|qkoTj$$lfu&!H4lE!anwkqLYO;=8el2_t)O&ZS9WGUa?tu-2AU{ENoC zg6Ulh9}1HI`dPtmM&B?4e>9N5muOh&35DzWXP<|SIm^UDHgfge3t{0yE@>m~bMwO) za(uMXeE_wq7HTE#$4$sB z6RH-G&xzykFx{I8p33H^?W@o6ih2ncU~=N}-{#*n z|DHR#ID9!1rezoD68(dYhxRIlEd%JpC@XyV`EprEG08k)`4&|8E)h11x{kC|2q{0r zgrsURN>5=|8;=RdxZ22$yHQNmkvb{sEO3Y1C?O#!Vh(~)T}ESrO+u!E=C5^I&9y08 zvZ^cx`I)*jQvheIorIJE!oq?o-cL`?umKoaws|7#Og%Dix^7=|nA;W^DFSQO>;cY{ zIj9Tz0s3yP#9LxV<&atlClVTN7Hz*%xEHiCg!@a78qHNqoQX_9k!IFoB?v2*_HF!R zbSsz*EgU*vy11*%8UAV95G-zG^{29;v^4A))V-w%r?NCHu-{KH%}Z`hEzMayVpjL! z?<--!g112ei`%suJ8<*ZCPPL9XWv|3HpAt}ufG9!74hxglQ`}vv?s?f zzWgdWz9p3oiw~z{2@;7Ik&KRtvoHXzt2NQG8;5x9bkfUV$_V7+iPkwHBy8jB$VDE#+fakB0OUw(N=H>HNGidQZQv2IIg%ZM zi^JAphaoC3L=1l&s?s8!53ili&cU$@*a#sH!59JX#YgBYPDOk7-C z0CtaIqEi8N++4R?T2G!Is*%I~Th=B5gI{b`%&sTtT5OqM0i}uEc-tB7&YWd^a;am) zr#h-YQkuzuH7e_2YJ_S1yNE)3&4w>YRt{f)LSS}QjvKN(b*L2hL?~rOne&&T)(WO5 zs$CpcwDPyP^6++Yb+kbHgPat1E)_zgVsG*mE-KhnsW7GZA)*<8j2<2@e20I(;XwxS z*?MkyJjf2}U6MQNW;PUf_iL+9;2!?ePv@+Z=C@;)I}vDSOON5sNC*qgzkcUU|Kk#J znfle!9!r8*O|olp*9T>SQnMPTqQ1HH02lYkX{0$P^a2GpFL6xGd;ycy#EyE>@>?D> z!Z-Izg!9BzeHk7XOQ{=iJKc~?F}!-*iStePE%vB!Dm$sY0ltMPdgA!a1ip3t_t5@5 z7zndDTAnAUYbh%Xdxf^&oAIH~ruF8Kb&!4W)fCu-+-2(HhEwv_<(kmC!5%+$WfSIo zZ2?}sFZG9A&lk|!z;LoAeGI`4+FD-fgM;GLi&>7;Ggbjvb8`*PEArG-4PrV0d;=y9 zSXg@hSH-v|=9rd}igvzuS<^Q?{==(04Y`bv-)n7=ozPv5y3B%JUb@I>K(nV%ty&>= zaS7{z+M~bG8K)dIZD?1kX(JR^{rv=o)f9~4?H3b5*=~%b>bUH~NRAq%`cdPG!+tC0{iC#A9i=38=hcZpBwPZ|aUt zetiCPV!soP6#l+-0X-W)cic<~YNrkMg(3&_;?2QbKKoLCrjaCm;J{H=S5HVv61TOr zeW3m&NZfTjR{r2BYQ_J@n>FQ}T)i>)uqg$de{;#2xoBNDzy5$aQOJ@4wpM?BiFfV* z85kvJx|o^ncv_jx#yiww?U^kK21~`QtQv@FlrZmAX-m3`(;(wn7;`PnEzd@7T_&Wc zjjWzJ0ACsBoMxo1<^uK#auuPWOx{B4V!uEz^5+kFp?yCgw6X_L?nTjBg1y$Sf#rNu zn~-3RyysohGus$^b}6$QlzwtSL7VB%hk7h{k%(9C%=~+O%l|7NK`|8_{}MpBy|m!w z2k_}PuI{5_#joi;oOX#7W!np_gCwo85jw^jBPCu_#tJ9sI|Nn7tl76u;-cky?nH_K zYnCL?g+q+-o7S#3oQ6qW0B*7n&1nU$b+r13gD<4+BTFWg%Q}mYt9%$a~YC^yS%Z!2x*xh8MltRIZaz z8)75+UiuFa8=V|?{nnptj}y}85H&i-@|*V@&ll3KB4N|Rm4~jEE)QP2_x}`0REkLs zjn^@ajWLcyVqk>}t_!6SA76jlM@R9>;2+=cyA5^OHED6}k*fF6Pfq!g^6nGmi|6#8 zG#;UeBiTmwC8J+hl%W{Sl*iuVpK~AZ0JidF`_`*{&wOuRb#jaMtu;#j5wd14{j$Dr zg1OF_s7moE@mRae?;yL=fQcEq@)pl?4&13J8ty5%nxfdSF{-nbJ2=*kw(t{*_*HFJ zV!ydi zGwSd$21}y5axXc%zPTH0K2Yj13WVF$NSVEijg6J1t=6>xOx8-7RkSVti)O8(@9@<2Ke_Mj>wh-v1#1a8Yx?l( z=6NMB145JuGA~Z4;H-o$mBFst`M_kUJs5$7_^{4}#i`G^A{JzaSO%tUzvqV27Vj>1 zl1;{72JYfmbiepM9hZ5%7x;;p%6l|=egEjGDuGh-l4bFoFjS)D8sq-J&uxt|_|7bp zW$@3C%$SL_&u(tahWTl775ufkzPb_={%$$FjI^@@UF}XkY%j<$@WCMyLCpY{)^&-^ zc78o_jN)1y;fQyPfJ1~E3~{fyh?Uc`IsMDr$-$htQSFF$f*uF?RUn-v0 zOZOd!FcWHOXsR!1%q1Y@l`kyHAlm;tW0kah6sM!u3lSTH0WTo1vbo9rxyr>;sOb1R z=aC4t@L7=&xb8cI!@p@Qq7U#h@#@sp0oLd(ukz2AA29Klu~nHIhvff(#m)Z;qI>GEcyT{a896GgOLvSxUkZ8b0gmqvgJRUWH9@Q9v5~*!XPD z;DpuIpdryJVyH4{^Qghw=%(#aojj=DSGXsu&F4l z`ES!rdl5XpbKhopM2Om#PyA9Lz6%|z(;?_RY?02=$vDYFS!YaBqXt96Cp=eV?YviM z03<`#6Sn$RkpLXn&*5GT1;V~34~lnB%2^nK=#__mC4l?MKax&o|KVQe?x#EHq=>2M z5;&+gPFoGBCZzN))Vs8j?{kZ^x^eo@Qe{SsIMI_wf67TAZH?rrb+bys5X?Tk z!R0IhT3u8j)VWVR>Jj4Xd3P=0R}o+E-N1?DwET=O1#L5?>MH=Ng(yMa z-zQvI1B3>u;3I%Jm00)50L*R0?=i_Y0*|J-1Iist9C-Ayyb78SI*=N~Fn#!ASG_YC z)%@r-Y8-d_87EoFMRdZ?>#`^9SqJpJH@E(vo`VP1;dk(CDJ59tAA@qkgY8?5q6hBM zq;@a2)yl*Lzk>FhX79Lp_JfoTY%FeIvOJz&7|8M;7k|Czz~TySYDs*Jy`hx{0{bqX z&8p6NmT)rc47;R`$ltZNt9aAwM|~*`!@vkzm{T$*VU)q%F)y;i%^Zk;&1>4c2$5_- z%L!w#6goLTGFb2i~)!Hd4ivG~JSUJLZUjWO1*(Vj(JIexh!OjMDlxLX7FH>HV3jKJe- z=#ItppS<=wr_Q)_i#nq&Zi=qA-MehVI&Z&EjOpMq>E%6@f5PW8hG);_c6@u{iC4$1 z7fqR1oe(!`y9^<4s6zi5><_-vlDRiIbw z+rFlOE$G~y$hsVH&9V{h;z)|KOomOn|i!~KrP7pxbsf| zS?sw;%|>^8$A&C~yqc++*GV7y7Iy*o;N5jOg!+8*28 zQs$y-frYB*<5g1}cHz$!9CLQ_X!0E;mOJ3EU(j7`wK>22;USaP>HS5dZ}GLi|18)1 z?@-{9=&CpVSC5OEIwty&<}Kf!GCrqe*W}NY=vOyZmY0!dl^y;KK^PcyOl*aGqEu0a zowu|X4yPM$k;`Q>i`inDcaOe2gYr@T(A0hz}o%MI}#bMMPJUcC>{`OsZN_OXj z)BfPNertGx(-Lc`Ej<6?fl8|b%-dz?-Qjq&H!I~)^ST4LI%y@_d-ke5ok!>!6%*F`z@A87c15A5W8UvaRTwVrODSAr?S_t3l zcRY9J2q~W)Wd!Kv6|%Y2)p5^8Z&8OP*d9l$-Rq?7D9V{*a(pPqZGHnr(_$y-oXGuZ zGt1}g*RvlChk@n^gz5I}^MHuhR?;x69U~XT`ybbkKhJMdzQau&QJ01;90Cul1>kWN z2Sj3+gaTC)>_P*H(6-I)d z_iko{8B?JzTT;mVwD*W@rm|3D{j|ix%csV4xRYH|Vnqp+MtGGFO6csyw<|lKm*^OV z|1%e$Zc-HUnQPU%{3tVjO6fagg@YcGs2wD-v+kqXBW4_&bX@`YTD;Kc zp^t%ANBzS@A>)Gz&}{&QYgG-Oo)fQ#y<~*}7kp_BEe@E|~ljpFTX7Nz$QH&@lP3e+us^GgEzV zSVAZ;iiTdCHRU*t2D9dcB$zdn1IvPici#ewglShs_igO8NrkY#0n-ACaau`Y1nR`A z>jBjuHm7T70!hJD7@xYO1HWNa)4Z)?01~DxZcT#sU85)7$gb1)b!_W1)H2Hlo;tnK zn{xwb`#OV&14$hum; zI)hAcC;km2qosr?zIzFej{0E9y-!9h*Db;UA_QUib|vjAmXHP3cMYLEnWKfvy#XiR zoH8ePm{xWF@(G0181e0nIuV2jjCq##=P99Rv6Sz?J8)8#hk1M#$Dd-tm04Cs;dPpF z`sc0aDV(uKTMR^GbQ>&Y@2|ORTq5ZnwxPVUYPhvnsB7kiiHqr7<4S5Z?nQlZJ|@(R zJJ_*I)YkvB)G_JN1kPjJ^iA?%Ae0-`p1jxe|A{)zvLjrpgyKS&ujk9XrR#`WPU)(~ zSBN#rL%wHIM&Vl+k%NKx9S4)&jZaolZ(w75tN+)7&NL# z8;ef~{A{L~RviIPwmVk4p;1M_-j33JNbc_UvR+uDL=VcF3MTqMq%>^`rZbAqKXkP5 z2Ue5l4ah1D|Ciy4RMg3bsH*c!_}1lvEajK$g}ZOgdfjaW z+8-xKzY6q#Q(jt}u^pr8MlLHRlk?ggCm4e!RO)Pz`CueMOzuo&tSnki|Z>Q{Zz zG@GEPJ|&FUofQliIoPDm>M$IM2JidmY= zC8;zK#B*%DF%&>nIa`ScH%ZRKB$1C< z80&g^71C}%sn;262tk(;L z!DkL2YG(R$*ejhqLF*N59XSat67wmAw9mzA7};MGv=E7`PmX>icP_v$13i)m()+XK zb#ii3q}vt-O6!&$1ALllTwoL<_lZnii~fce;G31O>}SE?wWGa;g182ploo}mWb?%e z%A@@jbRQN49j?>5J*4`V4u6PdM|h|b*mdfhpZyjX5Mzv5Zy}!gI#tkvU07_7C%W<~ zgx>sb8*xqBxB8a`xc2)Er1?v6b3~1>^oAq#DcO-?HX6e6@~HH2gI%I`MoeMy zTStzq>p;aEHKuQ#C~fF+=+AT+-4(b!L>qoyOVbbU8}&r`!o$rmFN3@dCsxC9(z_D? zwrz$Fd-RCa=vixppQR5Y#PhwaV#zZl^6-0#1$Er{Jl1>8QxM8To{hqHD(h7#$sw8d zR4cPFm&A)JBW?dxAGDdM2n8Bc)%P@*IuwpM&C7y+$9fq%Ax7NoYkkUGIGCB9ACQ3B z`$}so!fRF%R2@Z&*3~xzgyV^hP{8=;_&&nmkAx2xAtFcRe+WPL-xA>eKcX7{KUW`c z&i@z!^gpGmT@==YtJX8Y?7nD8B^|&PXbZBTLiPQ%kJ@64fAT(&qXtt(@NmPI9JjEf*ZtufOx#DonxC0xqb(13& zRkNWveCN^BEdqOuT8Y~wj2z2CI(?0%1fPMN4$K*^_@1bc_?$jZP_rA;yaV~ksdumj zerJk5p%3`XTQ!}A@B$RvH_WW1rT@$-Dz50oOyR+?IY4aU zqLpNjAbq~3yG4S^X>^b$Zykf^22@kxg4PN*c-3&*2U%Ld!WRT$ddn>)X4`a1-%)k4dq~&A63LVFL^3-*JO!|+4J@G z6b@H}SaYgQ__+^6=rc9kEM6 zd7?fd)(vy{p2Xn!rvAZuvm&!0>=@5C@a)A)Qc4i9F>Sg9gcxP`LZnDwXmT!7HLU*z zksa{V%@yYHIsSo@X#Y7)t}Kwx9QN3@0#3D;Jw8RpN&UhBF`vFJqn_>|9mF9_+vMXY zMfW8_TgXa&L_t^KsnBKRKD=7(yKvOa(NK#L)rBpR{VT553Oh8McOVr__yhlnMTx+HzFF$F3NcYzy2OD_8|l(D0`TxgjN6vDwo>BqH&B=R1-^N9d3tI4&*mr8Cwo_khdU z;l;wE-?nkOofeGp1B4B(&H6ff;OG^k@;mbqPvS8em6x{b{RMjIV$ z4pjVN@RoqCC+A>xtnjy9|IYx^*r`a-X@!rbA*_ zlAF(T`QXQ?dBrU&hkQ*t zZbpPaESl?&bd1&Qi35-u;BE3IBgOj>-7Q>(yF0MhUEL*~Lm@pv#wfMIcpZszq;18i zD+aH}XHsm0i~Z$O-6x_0Y-V*iA3u*>Q%`J1(l^EUt%NmVyUv(KuoTVD)nAD0W-f7Jy6O#u$!29Jy` zt7&CGVq_uD4=(8?QiYgW2hNb{2h_gd8rK`F+?-XlfbbtyQ7QqN+;-fR+H=W$G%>fF z$$umvuTn56Rc-`6D|ad#iz1D5{bNDW`n0XUm)}>u^zA-;_=p;*ti9|r0@uvUvTTUx zF)4+>s*f_5B#eE?jc^mZ50`T(WJR1=Z1|*jP;Ef$F197dm1Y;$WH9vojS^eQZn|b@ zmNaImrP&Je=m&`}m^=_TIFrnJY)w)KW`fc`|Dyec_|%f-WA&vUIayX-!OkC!@{54I z*>#%Y%QgdK3m`ZP+yMXP#+XAoVHtl@u5U_+23Lq!}RL*bI$dQ&aDvI zG3{pl6K?|3*JuTvMzPnSRnnN@Vu$+Rsj#Pur|4WcTifEN=)UX>d?3Od{`*u{{r5*v z$6oK-H%#!QcZN>!MBb*KzpN!J$vFl2mz%DoBNWIuuf4qXtW5)OEzzj}G#A~4guk51A^bcp` z`M$MiwVQUjGgocmR|!1m7xX@HpwNkK7d;J%dAJ$LA@~#ST#-*eeD61oSQN;ok(sH` z?go9hc_)0J{~%I#$V|&lo8MBwDJ!xo{l4_@!`2t}B29O_%ANSM-AKs1P4vE^YG+6? zKl6+`bYwQ1Dd^?V<$U`iX<1EfbDpm4d^#*A6c5=eCrX zl`F5l!(~#ID|L_ssDG$)c?P)DWLZ_)oPZZIow)0@SZEj+;Iw>6o#+u?8PV_<$~p_4 zwIdQ;Sga6V1&t+fl6+qAM&q)#4R4)nT^epV7T*^ry!QpF8C77jOJRtoA0`UCD*Q0w znCuC>*KV=qpP4aG@W5i{tTx_+7ghetd(AWSlu7a-I%i%r9>+=yU7<58IVpiuQdMC{{jx*iYep_{;p8G8|>9VgN#w|B!gIMoP zRMPJ#+Kh%avEgY;r^$O81!L7C(BsMA(Q5e#hS}0T#Z!xh;M8AgNFWeHKU;KoJl-MB zx3FGAV$cS_X8Pn45ha`t>mfAuug=8Rf}wV${tCwZTU$w3&VDrQ1<{OW6aT6=fz4}l zO#14tHRC6I%@5;loS+3Dkz3c^{cqbOMd>J-DRmTzEW z4E=N3+DgHqT*I~;O9rwPRzT4wn38a7xGE>xQ<#b-KXp5wOwD}kGG5ZM;o##M1UL2B z?=MiFt3@Rw{A}J{nAiRYAN1#Dgb4}|&RGvHsYq^ISpH^lAg-}!e@{16Pi@-$I?|Jz zAm{3gODaQD`Mgwr4+N=s4=9d}El9fA?k*oxVpcukO!cI;$91%3;%nkGI*7=rE$_b! z*H6wYj%%L=G|-&gYbhc2`)^tYD>)OnzW5==xn&QRpF)aHuKo5ci%8&wRknpvDwFzE zk`b00U<9D*E;W5(Hna5lWlC$F&9esb=2!%r@qyZxYP!42AcHTG6`;Wvr}~Qv>b_(( z(wF#&GsGLb|8-^oz%5qX)luLhwzI9yZ?`EqZyZrC-P)Rq7I7dT<0as=GaD_Wt5_AS zJ1!HYo!!Tx9X|#?+hD+fDzI^~(h3eEh8!_$EeTueY;tqSewzK+i<{qb}W-Y-F=BDy6n z`}Cj5{Yt)rMt9ln=?7x(ak2edDfhqbh^ZzDkL)x>K4W$iwzb5|Yc2%*KauT|)qDa&hhY_nGV9)ArWiFH7rpji~cEmkGv3eCv{*i#e$%atV+%T~<=7 zoj@C+JC7W1fsY*%t#{=w!Z|>1+$7tz0LKC9#N6(jvlsK60HEl@)l$HiP2(Mjvzfs_de@E4Vln@*?rNs& zRELX~15l?T>)x*m96iaUaQBRXnfMXA_MX`M?T@BxJFRMOx!3AOW6jeG!r}p6M%$vW z=GzXhRFyk^9>+qVL(k`M?v1j1oI%e6Y8M;Kf8oTz9FtfedIUCurJbgGfN>P`Wr6i9 zonhm+CBQMhY-PXNj9^3m3@YJqfy0h;z(!jNH7(4k$JIY7<=RIT;h^7=v#Ok<%z9&E z`}DSc#)nt^6xcUiGK1D-6~$?Rz#{LZXrY^RMq5UPvaWtg1~=UCrHwIWya{rKXL0-X zeblB`GqnKa>cl~uH*X@F*_>_0-Bfu0@uKXwSyIm2)TMwJ-Ki;x`|E)HCgJ?u?l&D# zDIaAiNweZl7_?$5GgLq+j$%68ocbIyZ1MMVdi_fywLATvWDtHzAT_A^ooO5X7V`Ea zTQl^DEev>O&o|7C@LAl~699elZxn>!%c%9{p38xQk@tFO_i)+r&$n~4eIdHfrm^@z z4Kn+=dH-9RtILbk@cHbOD{XffsZP8{A&%8wxmBf(tM@`$rPrS0F9Z%-Eld9-QimE$hSt^ zb5h)98v&kFLB&ahtY}q22z?6RbxiO$$TSn^xjqMm;(dizfc{w`^tR4oKV}$4>%m=q zO1L*XaV!BUux#)Y~tTO%=fpX!~_A}bP zI|q1WJX~Ay&vBP;|-LFI&AYzsTdc zOA;CDQuZrNvIB&I*H`w?lzMEL&cCchM969t1bPNa-hFb9c0@2r;s+)F%V1t<+}iJWMnbP+~0w6%Cv*9%MgR@L_d`R zNstd~Pk#2s#3>%m=f8@cxdZ@9?$hhbGp7bh8+X03nMz-&@zJh8#w!(vUi-oOXTQ0q z@A-euv2^BGNvQE(kF7oWgJXrBtl-5nuP~BRl%_5b71|$6N{b0j+$VOA16Ztuz|#xK zFF7BUmcG`zShbyx zE;P0FSU|0EO~5L=n2f2cKdxQfJLzsA>@3P!^&548&io=-nbUDXSV7q%IWt@SuTHze zHh)54(&*Ija98-7j&-z$ipoKB&UC|X5rtg#L`mo-dsIVa)-9qos7HsJWo(I-=de+; zVRVZVw|n;e9XH=QFtG*TF~K|(^?ZiEqiE{jr4|wY)O4Ka-UU!!Zlx{p4T!dQxZe-* zA0*Hb=D(^V=%_u)w-dQzb-yNg*)7Aqx?rwV$78bGjCS!fU4-HZGprsONX7?2F@eXT zN_`kk8UOr-?O?&e!Y1j=rKFsFr6VUb+F&MzdRSsIG7jRjTG?r=U`AnUiYE%_rJXP& z!3;ZDthY$cKR#wrFyV+~p|dvCK1R7X<7~N|;iQbVy3Xi=%k!M|zAa?h9wN#YuMRTh z1OGc#Ap8_x>yISr8S51av>E|Sp$7~DntVrLx4Kd|hs-gvIYQ*N2>Y%$SuH2u3=UY| zGA2)E{TAmTN#SD1l(Ud8EB8D3z1~j=KH%k zp1Gi=g-2-1yeuZ^4|8&-G`xR->ml!Co-*RBF8u;+ZmTLPB5&#iez}E72WBy zh%CIy>Zl*o9N2qqb-~y#(NCQZSOOQ=zklsPHS4)4U1=HvtNs1R9fFF!{@f1Q=Hk*! z0&F|d+d=xnajYGgiCYbBfFCN(XTg;=cY=I?Pj+i`)6B%Aq&Sqiz3zlAfXU)+JUGL7 z5cOs2I57DmeQq>?h zORk_}HTUSH(PYU2XlaS|bz0t2*j%UPa6Y@_P2L1CmsJQsWRk95Ar27!j@Oe@7ebtY zfMj40aK8Sq!smA@Xx}4+Q#_N7cxYRnG9&3?(-{JumBB~>*Vr$6%aSJ2B!fo}{1fF7 zIrSvA%nRJO$SpfN5q1_6izGx@7O(S*4eZ1`!s#i*8e07Tr-g+7RV8??=c){yET8tQfw>P8K&k?$*E+N-kaSErj@$lSo&N9Dl_oeHf8~VY# zRe#27vqqfmLuL!RLiQ{=?>6hhPpMcu7Td7&9}dEelO$wDsu_-BGt@-nHPNUEsjh9*Dkd2=w{Sl2KfiH|gB)fJsICIa$YW-QpCERsYRhb@m`KvNG@h zTkY88&TbtTkcPMQmUWI3R@^1Saa+fjnRX1N+jm&~lHORkU-z4;?-=gJ%j>sGRwHn}I$z&=V|)Yq zGHN*SEAZV5>Sdh{E?cK{U*VRY^e?`Dt7RhzYR9-IRV0*5ROoXZ-$-|=mP1!iK!pvn zFfoIOX4ije?>nR6{=R;7(OdLRwCF@U3cAe*S#;E=fz$3`)batb>@3!pMCZ@`}5g*AG!3z60=SvciA5E zBeouMY#?9Wjb=rt7r~XH$LtF;Q5#}~B=H#Lp3LNN3>Ssy{JYPK*WMR^^uifK#=LsL zWdzS2;)2D`Yi3iwJ9MYy!%dnupQLU_bvn!H0hwRot&R}v>jrzMuy*g(`i^{oqA8p?U$W<5=kaSjPO{egSb%tKyMSMc{TIH8& z8=max(4mdH?4iHHby7jyo`rSt?xvr6pnHuGYbArLNIoh$8IgoSURDDxTo;oI+@pn@ zuIA>Iy7}79`cJg`VL6dhNp^z}%wug$c6LcZ!-BN_6plJ~vo1rTMMUn!5BRvs7lnh8WW~?39@7b}P)~dqR=@8+q!TZBG02s7D{~_`GSff$QN6tr<<9w*T!1 z(Z(|s=4CA8K_xEJ(c;1dd3)2?L~0n}@`F?jo_5ebV#C`h06pen($hK9JW9W8^`_ z?U6tQ9)$XOf5H9eO7$dhN5bNu=g0m^ZkYhLnFlp(t{2WtkbmjrwPz@5Nxolz{B!>w z0v^wfPR|cav-!vgoaTvFRnGhY7NKp7cUiDQ1wc>Q6F{Zy^;VW#Caq#z=0<*dVHI}Y z%VS-`>fFhM^}}r<_nPV_N-y{02Bpu4wdthZBUwhz2Z(0QPlC&ZEx!|%A6ML*?ycw}V;DAOQBA2?(Z6is{8@&!1djhSaEIPTs4c4q#hsXb&|axx5I z0RU%c#sI(Y;Fr2O(k$5y$d|Xxp>=mVb`@c6fDd45DCO7N#C4XWdHsD$^=~F9b=7b*(m5wy1BSP9M=RnxTlC?DWcz|5o2WIp+DJnhCzyVK%1@y)XpS>7BPu%wVei!sfAn{X%)jlj z^!v}^1+2PACkvHE58FRK_SuGI?}RD<)%X>_dd}iTj(0DM*?s!K4QDZkI0DyCwU`1` z$GyY0&rWrdvP5#!+yXMdzEa6%e`P*M?|K#I90`IzxwsEMONJlVo540I=>ML*!=;@M zm4P8(!;~6Zu-?9OkzkiFmXzl^}*NZyCocrtPaQH`2Ig`qWI7CK*P*EV_O8%xv>cR@)w!KnKA9QJ87Bc)GMLcOO zi_s3UeL6kU>fU1%3oaTl8=xMc6B};KFKLyc1c#A~+^tI5?YeLqH;75^HJZNI$yGWi z``vuBtXKEOAB8k$-JYpNUTg|3ri0{xLtI=XBmwplLtKk!~U=^}v8S#rQ1z-YnwM((JH-?|E zG-+#Sl=k+cv-WU%wpavF37OsPA}nxLc5I3}86hN}Z+QS}tI+XZm(n>}$a*^wklMF7 z31(M&2@D>zTBnvA*IsUh{a!6e7nhw0uZ$Ho56z-oUwgF*=5&TX5^C7CDrVVfSLzY6 z9N%_E?_M$J1IBV&XL8WN$Zr=78W!uBqvaZTXkqirK(oKs$a>_r6p^|Q;QUrFRFIco z$P_kQc?bJ8Lc9`Zs`DsdX)6bCD3NEvAkW$G?WAL7n@j#`rzc~cTrXbpV#)nXFZUQf zG}KGS%B#e4c1bG0SBs|`hf4L$zP@B@fE4t4fKnsdDWgpF`r=e|xH{`u+RetupscTe zA?e9Sv}>x9s`mP#|394C2L1n~Q|s_xw`efr&=D``cg)t7o0zxv3khe(O%CW%_-lBl^Rd;^I_@mYgaO#^g#-fFf zfDthp8cESdvBLa*gGC07pO_+rIqz^c$P$xs1Ut2$-;y}S zaq9y4hyczHSV#n@2U@Ml$vjSgsE;51WEXxr$vK*+G;8ajQ2q}0qJc8X4$S8E#Zu5G zDre<(LJ_Wcffd+KAL;gz#lwH*;Pl z$+E?B%p)nJMeIK6e%kSK0?Em+yAj@V!4?`Tk0fB7J~Hff5;o^K{u=k>0obu=6c$fZ_(F zP>v^4REjvAPrvO4*$b@J7B8WZyDgb@*D^OxQ_)eCpr~E)(lVFzLJCbE1BtXGyv{e~ zROnD$Gz3hBU1!CZoe&%j_NfH)6IqAC3^)1SGeb7t!&*FA3HwrBk zUNFi;vEDV(F67S-9~vjAQuYV|yct!{L2<4TvQ4 z(Pf}M(-?f4SOvZg_l(QT37$VQZr-z`p^-u+I?q(O(w=6xnDs@`=WJOy9+eQsY&o=e zfC>R2Tb8YMhAPsr+lsMFQcB@pBflo)($7)sg))V$C^h3<&)p5UvxY`bF6ZWSCrCAZ zR}W(Id2WhM=`?CuqMj1U=nB(rN^T1Cmv@Y_&h*tI^|n~}`;2i~S6?4g#8bf{vsy@( zrrHYRju`xzbjlKs#?ngAV`B*aYBtcLyK>_77BD8$>+Sc4U5g>#V*<&Rp9E!6`u8nFJ-QYC5z_BlcbKU!&l^ zA8MMRkStgi*)udUX^e6?udvBZdkVd08()!_ZVW9a41ldo;u9L)H*g1eGG5F;f~gJc zUvezu4t(O1X+GMv>yQ(Do0%%@cpiF+4y#p9pGL(^ zsy*wfu(*&F7JS^b!f`Z_jgI~lm$3ul#Z`%Rau=pZcnI+1o`r<& zSegZ1CFXcfu!#jdN*!~idtK2Q$IZVwG3+6Ny?O1u&Fc%Y9`HJzGc>#X}_As+2L#9H~T6a(W`p;TZ zKEIL_@lwU_CqMk1_uPuLnJJpf7u&~@>m=PMFPUK<#*?hO4S?m~Srf%P2VoZLHay;& zOWfPvkY6X}&@5Z|yDLL_{|!}Qo!<5HAEA8*h7vD-of869S!)74w`P7{3cpra6)6n& zr>mjXJioH86hFMn)ww$#H9Pp0DbR$la4Ew#ZH-J14NyqX$!X6_1zS!Oxad)R^Sp54 zT}&Ohv`sia{ub~vVRLZDp#{%wAgtl&a~GN-l*tikRCM&ztqmgj0SJjPw8+&or#hnn zKm^tj@>Vy_TsFqsbNkP~w9rXEj^0kek_^6P0z;(}DzRQ~g&lf;)ZQqx&CX9nZ~#Is zWCHVoiND3He*B!J^o!Ps-?z$j{Bf(9NVe3X>F}OpF3Re=*@eztqa*W%*^^W%FrCU9 zy@$8e!H0l+nC9KJ@QigULfMOarj0^8ym~j|Cekmjx63Q z((0wj8#K9H$d69JN7J+v`2V7mJ<%!C|6}uyt=nqbkIa#q#Vpo;YEZoWgwD)GckF!n zPyJdICUm;n|8M?hNJBmumxF@@aEV5`lbN1#&!c_VM;utC`1fUlRXF}F8zAam*#Of2 z`Awc`CA7B8))eQJSp2X)rBvLB7pSG945Gs$JM=*MliBC4ESJ8Eiu;LHyWL`Rg2UE| zfTnrbD%`oO;ZBfMv(EJKc0ucKb%{vZM*hUXCc+^2%009U_G9w${6MCt(88jpPU-x8 zR4KnpRIEDw{r_zI)i_yecE-bs3qTbHoI?j%a*}sA${x3!=3>67L>-*+q>T@w$Z6kH z`UWU?Cq%SbM&lbB!tp4QlhA6MEabX&9XK_C?M6F&C!5ryCP~cE6s0(vw4RQMlIvC$ zAiCSIwg>K;#VGx&4?3D6S*W~lf)e9H&k#jamQ2T=NxBY=Hf#7wj$ z-gpZ3t+LD8;8}Qm-j8J|_ba2{Jt@+BNoPW}^0z9C^-om_iO{h#$wGon=|obY+dDVc z`}_lg)}{ao+g&xE!C8lchA&h2-PYmdP171x&THMl|~Nqdfytu(>fJ9 z`0YfakL!QCr!jbD-LU(TY-XWXCMxOH9BZe76g2_v|LsGdk{EBfY;At|#8bn(k`HZU zNher0);g#Zn37#lQ@BRwCK7k|^CY+P`^+MAxN*2;Ki!tCjgic-DkOo+`&asp;=o?5 zUlS!wnbO6(b8H-A)VoKyJRD=68?O8>5JQ0B7HHlHb?EJ_I+yUx(9p4X%k`qntC#(= z;MN8lGBN(5;o(X|z+7+C_BbADNPr&7`6}`_>`AUm@b_K7-Saqr5 z$|n?ChnKlDJ3a%qW8&w^pJQG2vY3y(= zol*i$SF13PewYjcqfK0)sX7Z&mjapELCp}rJm6cJB6QbUE82E6dot%D6TGNE(?cD? z;apqc3(m()xE7eFEGQV%+ldN~F8`tY{m(P;i;sC~fppV@y7jT@e#Pk8GV>ZHEdlmv!RcW=n50!dr^1>MPY)0!4KLaWqPUg|ow3*KISPHcH2MR??&T;kW z`zgIQ)Yp^q)2eQtyu}PcX)5A)ohzs6?o}GSxg=NclP}Gf_S1X2QcK;`CZz)o4`!Sc z`KK;3@18RI{nhKsO%R-FYa9P2Eh?)&LWg(ivy2tYxZQB42&hXl)$qkNsvEL;D}-l{ zuNJ#wc$M)Q-k=ednGcoM9U5q29FQ%nb&bujK)-C1ye9UYE9St|=acJz0EMum96Vsb zexXs`O zGJv*rV+;+g6f5F#zS&-x-s#3BMUP)vC24?u@j^m*%ic>(1EFOJIaf`vfo%MEkAp33W0d@yH7T~95XO!r{$8H2% zC8mB01nw~tL^DF5L-=#jj!@=a+pyOUciV-uyN(Ik)QXZ{o4O;Ww*4e-szo@zVh~xl zced)|oVclb!_t(m!CHRK19c9&H=G>dSYO}S-&_LiKpmDZ_?lC}^LqHB=IV*<#7N9< z&8QchNv^=hkFcVLSF}CZ(r>hue?&rsTtlJrCXDrOpg;POh*(HiN+V`)bzLx7^;L%b zAH1utb4i$5P~^bkWz(p)U5cGpl9SfO4Ksus|#>JOFd;>YJ0Hs;9N~;;XGmDPWu#p(GKiAgqa4w{Y~#_m-8#u46l$)@t8~F<4=CE(v4QF(byCy}u);2tKVmH$NrmRsndTEnWn-_=%xI z_;RS@gZg_L+KGvFkQMT8Ii#Ys%Df?u&%;jAP6g``9d9|vHX2?}RB)y=2(B)0kUQPe zS_1!MveTtmUnQo?eXtPgIEAkx&>=*0RnmiqMpTQcl_>c>>7?WR5mDPzwZ8#d=dSbH zKN4(TXvHeC2AYHodMdE>wT=0}OA@kJ?G$ufXLP3xB3kVEaP*dtfDB7da_53yglRNZ z_OwdIo!vgi;uBM6V0EeGe_WTP4YB`j%)Imc@lV(^&T5-TQIT?(;uBGC7sfDe@VZYU zUHnjpWxVUqQpfWgoB{w+T-;M+z4}u(b6jmWqZlPSCxy5q#Z|V1F4nHOuiD9e!o@4n z&gffGBay@&(<};4xQ`{Krvu(`0u~i6PZ8|yvB?7gOe15$)G>B$f>djWGptVL8@>&Q z0Y**2xdIHaMZ7a)vUAvclu+B1XxZ|17oMW+X6?tc4sZ2vTI@c>QICxfZ4u$rTiPmu z7g4kq>qxHmU`Hx{A_?SfTg4gNv_Yo7(s~arrlU(Ifqi}EY89#Yhdu%h*~S`r0J`uF#+6>Qv?R9tLmOUzTO=jt;6x?vf@6D zpz=C(9p@v+5bcBa90E6P-E4TEVJfYd56O$Nt04{K#av1+p9K3oT2D-bg9i%YZpe-N ztqFYJJh^@+ZxNJxCQbqdpE1Lo-sKn}Pf~B8P?n>6uB?+X7yTRI}uaUA;2fvaQ)ZH;# z*6qlt7io>7D)Es9n=`P@^mF3COHI%dY)K6M0fwF0AIzp>eXcHFmyoTxzZkE;yO1hw zBVT>e^_;w=xQLn>;Kfw;#^#IgAD6gMFAHew+&eha^Y){n8jIha={mr_PE#QB#HSjcM*cnxdNr)?{wdqS>lZ7pi=IWk#6 z6k`uVorlw?b8kVM8t=UzfM5oJM>SS%zD^v9~UmrGg zS>Z9#rQj4lekjr+@)#2ZYYvw7!#I8sQMj9^5T=|Gg*j#sH$=p^v+uM^H*6RY@|dhn z>SqJX!rO$Qmu)ZOK|gKOXJ^luV$M2_eIsg7ZY-xmAYD@;)z9!<8gI?T;k!4?DT(y! z1Qlm3lfxn{B@9hf4Bl4{)Du88f$d|0S0Fw_z@Q@pf_sS0Trq7M)i8yX!+Af#Ay=^bpi6>x@;^n|Mu)nM1_d>bycK}+6ssq~y zj582=Y*R$3cvjE3t;5psqm$(-CDr3&mS23NPK@)8nhO{Gqj(%NL+zx9 zFMQ;+a?-&WE^Sk=en^d3lam}4@JlhXtL@)Jdxq)rGm>@^r)wo1yiU}~R2^i&Bn)-4 zD@Km6TBH}$uYE^3eS1GihpKMQ>Tfq?EZcsx3nfRD4}`cTAnW@H{Vlm9J(<=Y=s@rV z>Iz(8Kj|`LiM^e$$iulztGxYSNhYNPKK5qfnBsd%yAWcPdI)mX$={jeG*`<*1-EN08>7IPF*lIQV;q5KP!Svl8R(iQZ*o z2rTF8a&v&sux|UhVE3Genp$H2`B>Rpu^yx_KcuzEw&ULXSNDS)JbOw%FWwiwzR#r8 z*$Hp(kzf`Cb+ z&|!78b4!EG_sKabo1bwMh$nEOILV14cd(b@L0>#YL?+A`H5k+|$==DaybFa>GvsT1 zhbNGrl zPCem%%kg^JELROl=4Sxm3Ug@qxWZir1kRtKuk+GQ5BfH_-CVtN{i?g;^b{uTC%*)ydOFjm`GEs`pE+Ew+BT zbv-C>wELv%!-J;i=g+XrBt_uXw0)gz)G(3P<6r9Q4^kPKNqFp)v4GeRE~GYj)Ft}$ zNxuUhty+c;I^}A@dq`=%%TbN%5S>V%mSbsep78l|LyQ!Z< z{s8zlHCIt=_0jceM)yvGQU9zU zS{N4W!fo|J_Q%!~`lTtoBDIP39ab&UC9k8Z_qiHP5_D8%WaT}vh1=I#x%Eag7&Z7m zTwj4~HTkty+tAJT!3OJC%#9MY7?S9>qaBq+$2rysCSayNs`CB1XOQ@*(ZcaSeZ2LU ztz;_8X(6N{hqlPhru_ql@qgDNWwi69fa|nW>jSse6TLwcl+aquFP-7<>ok3Tc;$#G z;J!l1kX>(;{{T4L8=m9ffm_hPAQ=hYj5S))8~E)-jm3%xzzyW>x>=@PlSgW~x9D3YcqklPlN z(bT2Gz6K+CY=^)6m57GO@c9d`lO@bn)l|2Oe=f1X@$;99KWnM8ETC*^Bu%B3Z4j^Y zUtHVQk*g~dbPXS!xk@u$549aT7@v(Vq=hy(R833_s1G2Lo*53`S9b`yR7 zQaOR`JM@$yIZ7GLZ4_#~99K>w2`sU`q;Km;BRpY=SFbO&MzbBW9n{!Tt5$@*mg(so zbxx(Ir~&NX^ch4~IM7!i1`v_!PF{-Sz5Ak5h2<+NKrh9?)m{Sj2-)*9lMP#AH1^7l z#$J=_9!zxnOcZIwKTe2x=#ENrne800(B)e(+GYqWufl`4Ha1Pt9 zK@(9BtfE(sV(Zzvlz)m({TF~b|2G7KvOS&(W8i(1W^eoCcNv#3EGro3-8SC+&2;K9 zJ{kaL9VO%^>VL{_KOO&aJ(U*@PeD-|s=P$g^c2b1C6T1>+tS`WVXOP}{0Q?W2@Xg9 zJ-JkvYSbjCkd*a24o5#_d(T?&hAR@L#VzZUZEex2j}}Z zsL5#?WchvlbdKB(L&Iz=a`d-9OG`_O;$G4mIM7lV|9M{V2lnKk3DfL1EU6hkeMb%G ztgw(l3FdiRQ`LAcuwUsThR`l_XI%fP)6r%Fiv?U9bnZ4PKN(t|AXi<$%+OI3ch1~~ zTrdz(6f8F+?q*^|T~S@1_{fP&&|9y3*m4k}MvwTyu1b;|b+t1+JvpV1ciqa=#!MLF zH%^CR8u)AO_EBv}AW8P6j18Q^zR_$iC<2p&`X31^OX6VNTKs(5vMPAGE8((d-?Xcv zp`9}(|e}dINTq_(tg^%~NHjs~1RXsojFoxHxP1`f4z^rfBso&W>l~iNyVntzstu2s|E4n^o#b_kg{*jv z1TC>CVH`=={hfH~-%uZ?&7@r()9v|5zaJ*EPmX|mSAzwU$OfS~5A@1w>J5jZYHHO= z;EUGeoD#K`H2=_^$)^s2Y|n4!_3?Hf*!06D+cF=n|GC#wd-iE`1*vdvp9!P4Zddu@0-`IO+FixKY| g|L4!!@jLvF(hXfnuSqx2SFB>ZP}Nqccm|C8Z_DxOO8@`> diff --git a/mypi.ini b/mypi.ini new file mode 100644 index 000000000..64218bc23 --- /dev/null +++ b/mypi.ini @@ -0,0 +1,7 @@ +[mypy] +check_untyped_defs = True +disallow_any_generics = True +disallow_untyped_calls = True +disallow_untyped_defs = True +ignore_missing_imports = True +strict_optional = False diff --git a/requirements-ci.txt b/requirements-ci.txt new file mode 100644 index 000000000..cf8aff4df --- /dev/null +++ b/requirements-ci.txt @@ -0,0 +1,14 @@ +numpy==1.23.5 +opencv-python==4.7.0.72 +onnx==1.14.0 +insightface==0.7.3 +psutil==5.9.5 +tk==0.1.0 +customtkinter==5.1.3 +torch==2.0.1 +torchvision==0.15.2 +onnxruntime==1.15.0 +tensorflow==2.12.0 +opennsfw2==0.10.2 +protobuf==4.23.2 +tqdm==4.65.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index e25726fd4..859654edc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,9 +6,12 @@ onnx==1.14.0 insightface==0.7.3 psutil==5.9.5 tk==0.1.0 +customtkinter==5.1.3 pillow==9.5.0 torch==2.0.1+cu118; sys_platform != 'darwin' torch==2.0.1; sys_platform == 'darwin' +torchvision==0.15.2+cu118; sys_platform != 'darwin' +torchvision==0.15.2; sys_platform == 'darwin' onnxruntime==1.15.0; sys_platform == 'darwin' and platform_machine != 'arm64' onnxruntime-silicon==1.13.1; sys_platform == 'darwin' and platform_machine == 'arm64' onnxruntime-gpu==1.15.0; sys_platform != 'darwin' @@ -16,4 +19,5 @@ tensorflow==2.13.0rc1; sys_platform == 'darwin' tensorflow==2.12.0; sys_platform != 'darwin' opennsfw2==0.10.2 protobuf==4.23.2 -tqdm==4.65.0 \ No newline at end of file +tqdm==4.65.0 +gfpgan==1.3.8 \ No newline at end of file diff --git a/roop/__init__.py b/roop/__init__.py index 8d1c8b69c..e69de29bb 100644 --- a/roop/__init__.py +++ b/roop/__init__.py @@ -1 +0,0 @@ - diff --git a/roop/analyser.py b/roop/analyser.py deleted file mode 100644 index 804f7a8d9..000000000 --- a/roop/analyser.py +++ /dev/null @@ -1,27 +0,0 @@ -import insightface -import roop.globals - -FACE_ANALYSER = None - - -def get_face_analyser(): - global FACE_ANALYSER - if FACE_ANALYSER is None: - FACE_ANALYSER = insightface.app.FaceAnalysis(name='buffalo_l', providers=roop.globals.providers) - FACE_ANALYSER.prepare(ctx_id=0, det_size=(640, 640)) - return FACE_ANALYSER - - -def get_face_single(img_data): - face = get_face_analyser().get(img_data) - try: - return sorted(face, key=lambda x: x.bbox[0])[0] - except IndexError: - return None - - -def get_face_many(img_data): - try: - return get_face_analyser().get(img_data) - except IndexError: - return None diff --git a/roop/capturer.py b/roop/capturer.py new file mode 100644 index 000000000..fd49d468d --- /dev/null +++ b/roop/capturer.py @@ -0,0 +1,20 @@ +from typing import Any +import cv2 + + +def get_video_frame(video_path: str, frame_number: int = 0) -> Any: + capture = cv2.VideoCapture(video_path) + frame_total = capture.get(cv2.CAP_PROP_FRAME_COUNT) + capture.set(cv2.CAP_PROP_POS_FRAMES, min(frame_total, frame_number - 1)) + has_frame, frame = capture.read() + capture.release() + if has_frame: + return frame + return None + + +def get_video_frame_total(video_path: str) -> int: + capture = cv2.VideoCapture(video_path) + video_frame_total = int(capture.get(cv2.CAP_PROP_FRAME_COUNT)) + capture.release() + return video_frame_total diff --git a/roop/core.py b/roop/core.py index f09674b86..050d1377e 100755 --- a/roop/core.py +++ b/roop/core.py @@ -2,78 +2,136 @@ import os import sys -# single thread doubles performance of gpu-mode - needs to be set before torch import -if any(arg.startswith('--gpu-vendor') for arg in sys.argv): +# single thread doubles cuda performance - needs to be set before torch import +if any(arg.startswith('--execution-provider') for arg in sys.argv): os.environ['OMP_NUM_THREADS'] = '1' +# reduce tensorflow log level +os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' +import warnings +from typing import List import platform import signal import shutil -import glob import argparse -import psutil import torch +import onnxruntime import tensorflow -from pathlib import Path -import multiprocessing as mp -from opennsfw2 import predict_video_frames, predict_image -import cv2 import roop.globals -from roop.swapper import process_video, process_img, process_faces, process_frames -from roop.utils import is_img, detect_fps, set_fps, create_video, add_audio, extract_frames, rreplace -from roop.analyser import get_face_single +import roop.metadata import roop.ui as ui - -signal.signal(signal.SIGINT, lambda signal_number, frame: quit()) -parser = argparse.ArgumentParser() -parser.add_argument('-f', '--face', help='use this face', dest='source_img') -parser.add_argument('-t', '--target', help='replace this face', dest='target_path') -parser.add_argument('-o', '--output', help='save output to this file', dest='output_file') -parser.add_argument('--keep-fps', help='maintain original fps', dest='keep_fps', action='store_true', default=False) -parser.add_argument('--keep-frames', help='keep frames directory', dest='keep_frames', action='store_true', default=False) -parser.add_argument('--all-faces', help='swap all faces in frame', dest='all_faces', action='store_true', default=False) -parser.add_argument('--max-memory', help='maximum amount of RAM in GB to be used', dest='max_memory', type=int) -parser.add_argument('--cpu-cores', help='number of CPU cores to use', dest='cpu_cores', type=int, default=max(psutil.cpu_count() / 2, 1)) -parser.add_argument('--gpu-threads', help='number of threads to be use for the GPU', dest='gpu_threads', type=int, default=8) -parser.add_argument('--gpu-vendor', help='choice your GPU vendor', dest='gpu_vendor', choices=['apple', 'amd', 'intel', 'nvidia']) - -args = parser.parse_known_args()[0] - -if 'all_faces' in args: - roop.globals.all_faces = True - -if args.cpu_cores: - roop.globals.cpu_cores = int(args.cpu_cores) - -# cpu thread fix for mac -if sys.platform == 'darwin': - roop.globals.cpu_cores = 1 - -if args.gpu_threads: - roop.globals.gpu_threads = int(args.gpu_threads) - -# gpu thread fix for amd -if args.gpu_vendor == 'amd': - roop.globals.gpu_threads = 1 - -if args.gpu_vendor: - roop.globals.gpu_vendor = args.gpu_vendor -else: - roop.globals.providers = ['CPUExecutionProvider'] - -sep = "/" -if os.name == "nt": - sep = "\\" - - -def limit_resources(): +from roop.predicter import predict_image, predict_video +from roop.processors.frame.core import get_frame_processors_modules +from roop.utilities import has_image_extension, is_image, is_video, detect_fps, create_video, extract_frames, get_temp_frame_paths, restore_audio, create_temp, move_temp, clean_temp, normalize_output_path + +if 'ROCMExecutionProvider' in roop.globals.execution_providers: + del torch + +warnings.filterwarnings('ignore', category=FutureWarning, module='insightface') +warnings.filterwarnings('ignore', category=UserWarning, module='torchvision') + + +def parse_args() -> None: + signal.signal(signal.SIGINT, lambda signal_number, frame: destroy()) + program = argparse.ArgumentParser() + program.add_argument('-s', '--source', help='select an source image', dest='source_path') + program.add_argument('-t', '--target', help='select an target image or video', dest='target_path') + program.add_argument('-o', '--output', help='select output file or directory', dest='output_path') + program.add_argument('--frame-processor', help='pipeline of frame processors', dest='frame_processor', default=['face_swapper'], choices=['face_swapper', 'face_enhancer'], nargs='+') + program.add_argument('--keep-fps', help='keep original fps', dest='keep_fps', action='store_true', default=False) + program.add_argument('--keep-audio', help='keep original audio', dest='keep_audio', action='store_true', default=True) + program.add_argument('--keep-frames', help='keep temporary frames', dest='keep_frames', action='store_true', default=False) + program.add_argument('--many-faces', help='process every face', dest='many_faces', action='store_true', default=False) + program.add_argument('--video-encoder', help='adjust output video encoder', dest='video_encoder', default='libx264', choices=['libx264', 'libx265', 'libvpx-vp9']) + program.add_argument('--video-quality', help='adjust output video quality', dest='video_quality', type=int, default=18) + program.add_argument('--max-memory', help='maximum amount of RAM in GB', dest='max_memory', type=int, default=suggest_max_memory()) + program.add_argument('--execution-provider', help='execution provider', dest='execution_provider', default=['cpu'], choices=suggest_execution_providers(), nargs='+') + program.add_argument('--execution-threads', help='number of execution threads', dest='execution_threads', type=int, default=suggest_execution_threads()) + program.add_argument('-v', '--version', action='version', version=f'{roop.metadata.name} {roop.metadata.version}') + + # register deprecated args + program.add_argument('-f', '--face', help=argparse.SUPPRESS, dest='source_path_deprecated') + program.add_argument('--cpu-cores', help=argparse.SUPPRESS, dest='cpu_cores_deprecated', type=int) + program.add_argument('--gpu-vendor', help=argparse.SUPPRESS, dest='gpu_vendor_deprecated') + program.add_argument('--gpu-threads', help=argparse.SUPPRESS, dest='gpu_threads_deprecated', type=int) + + args = program.parse_args() + + roop.globals.source_path = args.source_path + roop.globals.target_path = args.target_path + roop.globals.output_path = normalize_output_path(roop.globals.source_path, roop.globals.target_path, args.output_path) + roop.globals.frame_processors = args.frame_processor + roop.globals.headless = args.source_path or args.target_path or args.output_path + roop.globals.keep_fps = args.keep_fps + roop.globals.keep_audio = args.keep_audio + roop.globals.keep_frames = args.keep_frames + roop.globals.many_faces = args.many_faces + roop.globals.video_encoder = args.video_encoder + roop.globals.video_quality = args.video_quality + roop.globals.max_memory = args.max_memory + roop.globals.execution_providers = decode_execution_providers(args.execution_provider) + roop.globals.execution_threads = args.execution_threads + + # translate deprecated args + if args.source_path_deprecated: + print('\033[33mArgument -f and --face are deprecated. Use -s and --source instead.\033[0m') + roop.globals.source_path = args.source_path_deprecated + roop.globals.output_path = normalize_output_path(args.source_path_deprecated, roop.globals.target_path, args.output_path) + if args.cpu_cores_deprecated: + print('\033[33mArgument --cpu-cores is deprecated. Use --execution-threads instead.\033[0m') + roop.globals.execution_threads = args.cpu_cores_deprecated + if args.gpu_vendor_deprecated == 'apple': + print('\033[33mArgument --gpu-vendor apple is deprecated. Use --execution-provider coreml instead.\033[0m') + roop.globals.execution_providers = decode_execution_providers(['coreml']) + if args.gpu_vendor_deprecated == 'nvidia': + print('\033[33mArgument --gpu-vendor nvidia is deprecated. Use --execution-provider cuda instead.\033[0m') + roop.globals.execution_providers = decode_execution_providers(['cuda']) + if args.gpu_vendor_deprecated == 'amd': + print('\033[33mArgument --gpu-vendor amd is deprecated. Use --execution-provider cuda instead.\033[0m') + roop.globals.execution_providers = decode_execution_providers(['rocm']) + if args.gpu_threads_deprecated: + print('\033[33mArgument --gpu-threads is deprecated. Use --execution-threads instead.\033[0m') + roop.globals.execution_threads = args.gpu_threads_deprecated + + +def encode_execution_providers(execution_providers: List[str]) -> List[str]: + return [execution_provider.replace('ExecutionProvider', '').lower() for execution_provider in execution_providers] + + +def decode_execution_providers(execution_providers: List[str]) -> List[str]: + return [provider for provider, encoded_execution_provider in zip(onnxruntime.get_available_providers(), encode_execution_providers(onnxruntime.get_available_providers())) + if any(execution_provider in encoded_execution_provider for execution_provider in execution_providers)] + + +def suggest_max_memory() -> int: + if platform.system().lower() == 'darwin': + return 4 + return 16 + + +def suggest_execution_providers() -> List[str]: + return encode_execution_providers(onnxruntime.get_available_providers()) + + +def suggest_execution_threads() -> int: + if 'DmlExecutionProvider' in roop.globals.execution_providers: + return 1 + if 'ROCMExecutionProvider' in roop.globals.execution_providers: + return 2 + return 8 + + +def limit_resources() -> None: # prevent tensorflow memory leak gpus = tensorflow.config.experimental.list_physical_devices('GPU') for gpu in gpus: tensorflow.config.experimental.set_memory_growth(gpu, True) - if args.max_memory: - memory = args.max_memory * 1024 * 1024 * 1024 - if str(platform.system()).lower() == 'windows': + # limit memory usage + if roop.globals.max_memory: + memory = roop.globals.max_memory * 1024 ** 3 + if platform.system().lower() == 'darwin': + memory = roop.globals.max_memory * 1024 ** 6 + if platform.system().lower() == 'windows': import ctypes kernel32 = ctypes.windll.kernel32 kernel32.SetProcessWorkingSetSize(-1, ctypes.c_size_t(memory), ctypes.c_size_t(memory)) @@ -82,199 +140,99 @@ def limit_resources(): resource.setrlimit(resource.RLIMIT_DATA, (memory, memory)) -def pre_check(): +def release_resources() -> None: + if 'CUDAExecutionProvider' in roop.globals.execution_providers: + torch.cuda.empty_cache() + + +def pre_check() -> bool: if sys.version_info < (3, 9): - quit('Python version is not supported - please upgrade to 3.9 or higher') + update_status('Python version is not supported - please upgrade to 3.9 or higher.') + return False if not shutil.which('ffmpeg'): - quit('ffmpeg is not installed!') - model_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../inswapper_128.onnx') - if not os.path.isfile(model_path): - quit('File "inswapper_128.onnx" does not exist!') - if roop.globals.gpu_vendor == 'apple': - if 'CoreMLExecutionProvider' not in roop.globals.providers: - quit("You are using --gpu=apple flag but CoreML isn't available or properly installed on your system.") - if roop.globals.gpu_vendor == 'amd': - if 'ROCMExecutionProvider' not in roop.globals.providers: - quit("You are using --gpu=amd flag but ROCM isn't available or properly installed on your system.") - if roop.globals.gpu_vendor == 'nvidia': - CUDA_VERSION = torch.version.cuda - CUDNN_VERSION = torch.backends.cudnn.version() - if not torch.cuda.is_available(): - quit("You are using --gpu=nvidia flag but CUDA isn't available or properly installed on your system.") - if CUDA_VERSION > '11.8': - quit(f"CUDA version {CUDA_VERSION} is not supported - please downgrade to 11.8") - if CUDA_VERSION < '11.4': - quit(f"CUDA version {CUDA_VERSION} is not supported - please upgrade to 11.8") - if CUDNN_VERSION < 8220: - quit(f"CUDNN version {CUDNN_VERSION} is not supported - please upgrade to 8.9.1") - if CUDNN_VERSION > 8910: - quit(f"CUDNN version {CUDNN_VERSION} is not supported - please downgrade to 8.9.1") - - -def get_video_frame(video_path, frame_number = 1): - cap = cv2.VideoCapture(video_path) - amount_of_frames = cap.get(cv2.CAP_PROP_FRAME_COUNT) - cap.set(cv2.CAP_PROP_POS_FRAMES, min(amount_of_frames, frame_number-1)) - if not cap.isOpened(): - print("Error opening video file") + update_status('ffmpeg is not installed.') + return False + return True + + +def update_status(message: str, scope: str = 'ROOP.CORE') -> None: + print(f'[{scope}] {message}') + if not roop.globals.headless: + ui.update_status(message) + + +def start() -> None: + for frame_processor in get_frame_processors_modules(roop.globals.frame_processors): + if not frame_processor.pre_start(): + return + # process image to image + if has_image_extension(roop.globals.target_path): + if predict_image(roop.globals.target_path): + destroy() + shutil.copy2(roop.globals.target_path, roop.globals.output_path) + for frame_processor in get_frame_processors_modules(roop.globals.frame_processors): + update_status('Progressing...', frame_processor.NAME) + frame_processor.process_image(roop.globals.source_path, roop.globals.output_path, roop.globals.output_path) + release_resources() + if is_image(roop.globals.target_path): + update_status('Processing to image succeed!') + else: + update_status('Processing to image failed!') return - ret, frame = cap.read() - if ret: - return cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) - - cap.release() - - -def preview_video(video_path): - cap = cv2.VideoCapture(video_path) - if not cap.isOpened(): - print("Error opening video file") - return 0 - amount_of_frames = cap.get(cv2.CAP_PROP_FRAME_COUNT) - ret, frame = cap.read() - if ret: - frame = get_video_frame(video_path) - - cap.release() - return (amount_of_frames, frame) - - -def status(string): - value = "Status: " + string - if 'cli_mode' in args: - print(value) + # process image to videos + if predict_video(roop.globals.target_path): + destroy() + update_status('Creating temp resources...') + create_temp(roop.globals.target_path) + update_status('Extracting frames...') + extract_frames(roop.globals.target_path) + temp_frame_paths = get_temp_frame_paths(roop.globals.target_path) + for frame_processor in get_frame_processors_modules(roop.globals.frame_processors): + update_status('Progressing...', frame_processor.NAME) + frame_processor.process_video(roop.globals.source_path, temp_frame_paths) + release_resources() + # handles fps + if roop.globals.keep_fps: + update_status('Detecting fps...') + fps = detect_fps(roop.globals.target_path) + update_status(f'Creating video with {fps} fps...') + create_video(roop.globals.target_path, fps) else: - ui.update_status_label(value) - - -def process_video_multi_cores(source_img, frame_paths): - n = len(frame_paths) // roop.globals.cpu_cores - if n > 2: - processes = [] - for i in range(0, len(frame_paths), n): - p = POOL.apply_async(process_video, args=(source_img, frame_paths[i:i + n],)) - processes.append(p) - for p in processes: - p.get() - POOL.close() - POOL.join() - - -def start(preview_callback = None): - if not args.source_img or not os.path.isfile(args.source_img): - print("\n[WARNING] Please select an image containing a face.") - return - elif not args.target_path or not os.path.isfile(args.target_path): - print("\n[WARNING] Please select a video/image to swap face in.") - return - if not args.output_file: - target_path = args.target_path - args.output_file = rreplace(target_path, "/", "/swapped-", 1) if "/" in target_path else "swapped-" + target_path - target_path = args.target_path - test_face = get_face_single(cv2.imread(args.source_img)) - if not test_face: - print("\n[WARNING] No face detected in source image. Please try with another one.\n") - return - if is_img(target_path): - if predict_image(target_path) > 0.85: - quit() - process_img(args.source_img, target_path, args.output_file) - status("swap successful!") - return - seconds, probabilities = predict_video_frames(video_path=args.target_path, frame_interval=100) - if any(probability > 0.85 for probability in probabilities): - quit() - video_name_full = target_path.split("/")[-1] - video_name = os.path.splitext(video_name_full)[0] - output_dir = os.path.dirname(target_path) + "/" + video_name if os.path.dirname(target_path) else video_name - Path(output_dir).mkdir(exist_ok=True) - status("detecting video's FPS...") - fps, exact_fps = detect_fps(target_path) - if not args.keep_fps and fps > 30: - this_path = output_dir + "/" + video_name + ".mp4" - set_fps(target_path, this_path, 30) - target_path, exact_fps = this_path, 30 + update_status('Creating video with 30.0 fps...') + create_video(roop.globals.target_path) + # handle audio + if roop.globals.keep_audio: + if roop.globals.keep_fps: + update_status('Restoring audio...') + else: + update_status('Restoring audio might cause issues as fps are not kept...') + restore_audio(roop.globals.target_path, roop.globals.output_path) else: - shutil.copy(target_path, output_dir) - status("extracting frames...") - extract_frames(target_path, output_dir) - args.frame_paths = tuple(sorted( - glob.glob(output_dir + "/*.png"), - key=lambda x: int(x.split(sep)[-1].replace(".png", "")) - )) - status("swapping in progress...") - if roop.globals.gpu_vendor is None and roop.globals.cpu_cores > 1: - global POOL - POOL = mp.Pool(roop.globals.cpu_cores) - process_video_multi_cores(args.source_img, args.frame_paths) + move_temp(roop.globals.target_path, roop.globals.output_path) + # clean and validate + clean_temp(roop.globals.target_path) + if is_video(roop.globals.target_path): + update_status('Processing to video succeed!') else: - process_video(args.source_img, args.frame_paths) - status("creating video...") - create_video(video_name, exact_fps, output_dir) - status("adding audio...") - add_audio(output_dir, target_path, video_name_full, args.keep_frames, args.output_file) - save_path = args.output_file if args.output_file else output_dir + "/" + video_name + ".mp4" - print("\n\nVideo saved as:", save_path, "\n\n") - status("swap successful!") - - -def select_face_handler(path: str): - args.source_img = path - - -def select_target_handler(path: str): - args.target_path = path - return preview_video(args.target_path) + update_status('Processing to video failed!') -def toggle_all_faces_handler(value: int): - roop.globals.all_faces = True if value == 1 else False +def destroy() -> None: + if roop.globals.target_path: + clean_temp(roop.globals.target_path) + quit() -def toggle_fps_limit_handler(value: int): - args.keep_fps = int(value != 1) - - -def toggle_keep_frames_handler(value: int): - args.keep_frames = value - - -def save_file_handler(path: str): - args.output_file = path - - -def create_test_preview(frame_number): - return process_faces( - get_face_single(cv2.imread(args.source_img)), - get_video_frame(args.target_path, frame_number) - ) - - -def run(): - global all_faces, keep_frames, limit_fps - - pre_check() +def run() -> None: + parse_args() + if not pre_check(): + return + for frame_processor in get_frame_processors_modules(roop.globals.frame_processors): + if not frame_processor.pre_check(): + return limit_resources() - if args.source_img: - args.cli_mode = True + if roop.globals.headless: start() - quit() - - window = ui.init( - { - 'all_faces': roop.globals.all_faces, - 'keep_fps': args.keep_fps, - 'keep_frames': args.keep_frames - }, - select_face_handler, - select_target_handler, - toggle_all_faces_handler, - toggle_fps_limit_handler, - toggle_keep_frames_handler, - save_file_handler, - start, - get_video_frame, - create_test_preview - ) - - window.mainloop() + else: + window = ui.init(start, destroy) + window.mainloop() diff --git a/roop/face_analyser.py b/roop/face_analyser.py new file mode 100644 index 000000000..ba7803ecf --- /dev/null +++ b/roop/face_analyser.py @@ -0,0 +1,31 @@ +from typing import Any +import insightface + +import roop.globals +from roop.typing import Frame + +FACE_ANALYSER = None + + +def get_face_analyser() -> Any: + global FACE_ANALYSER + + if FACE_ANALYSER is None: + FACE_ANALYSER = insightface.app.FaceAnalysis(name='buffalo_l', providers=roop.globals.execution_providers) + FACE_ANALYSER.prepare(ctx_id=0, det_size=(640, 640)) + return FACE_ANALYSER + + +def get_one_face(frame: Frame) -> Any: + face = get_face_analyser().get(frame) + try: + return min(face, key=lambda x: x.bbox[0]) + except ValueError: + return None + + +def get_many_faces(frame: Frame) -> Any: + try: + return get_face_analyser().get(frame) + except IndexError: + return None diff --git a/roop/globals.py b/roop/globals.py index 986bf919c..77fd391db 100644 --- a/roop/globals.py +++ b/roop/globals.py @@ -1,11 +1,17 @@ -import onnxruntime +from typing import List -all_faces = None +source_path = None +target_path = None +output_path = None +frame_processors: List[str] = [] +keep_fps = None +keep_audio = None +keep_frames = None +many_faces = None +video_encoder = None +video_quality = None +max_memory = None +execution_providers: List[str] = [] +execution_threads = None +headless = None log_level = 'error' -cpu_cores = None -gpu_threads = None -gpu_vendor = None -providers = onnxruntime.get_available_providers() - -if 'TensorrtExecutionProvider' in providers: - providers.remove('TensorrtExecutionProvider') diff --git a/roop/metadata.py b/roop/metadata.py new file mode 100644 index 000000000..0519049a7 --- /dev/null +++ b/roop/metadata.py @@ -0,0 +1,2 @@ +name = 'roop' +version = '1.0.0' diff --git a/roop/predicter.py b/roop/predicter.py new file mode 100644 index 000000000..7ebc2b62e --- /dev/null +++ b/roop/predicter.py @@ -0,0 +1,25 @@ +import numpy +import opennsfw2 +from PIL import Image + +from roop.typing import Frame + +MAX_PROBABILITY = 0.85 + + +def predict_frame(target_frame: Frame) -> bool: + image = Image.fromarray(target_frame) + image = opennsfw2.preprocess_image(image, opennsfw2.Preprocessing.YAHOO) + model = opennsfw2.make_open_nsfw_model() + views = numpy.expand_dims(image, axis=0) + _, probability = model.predict(views)[0] + return probability > MAX_PROBABILITY + + +def predict_image(target_path: str) -> bool: + return opennsfw2.predict_image(target_path) > MAX_PROBABILITY + + +def predict_video(target_path: str) -> bool: + _, probabilities = opennsfw2.predict_video_frames(video_path=target_path, frame_interval=100) + return any(probability > MAX_PROBABILITY for probability in probabilities) diff --git a/roop/processors/__init__.py b/roop/processors/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/roop/processors/frame/__init__.py b/roop/processors/frame/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/roop/processors/frame/core.py b/roop/processors/frame/core.py new file mode 100644 index 000000000..a07a9a6ba --- /dev/null +++ b/roop/processors/frame/core.py @@ -0,0 +1,56 @@ +import sys +import importlib +from concurrent.futures import ThreadPoolExecutor +from types import ModuleType +from typing import Any, List, Callable +from tqdm import tqdm + +import roop + +FRAME_PROCESSORS_MODULES: List[ModuleType] = [] +FRAME_PROCESSORS_INTERFACE = [ + 'pre_check', + 'pre_start', + 'process_frame', + 'process_image', + 'process_video' +] + + +def load_frame_processor_module(frame_processor: str) -> Any: + try: + frame_processor_module = importlib.import_module(f'roop.processors.frame.{frame_processor}') + for method_name in FRAME_PROCESSORS_INTERFACE: + if not hasattr(frame_processor_module, method_name): + sys.exit() + except ImportError: + sys.exit() + return frame_processor_module + + +def get_frame_processors_modules(frame_processors: List[str]) -> List[ModuleType]: + global FRAME_PROCESSORS_MODULES + + if not FRAME_PROCESSORS_MODULES: + for frame_processor in frame_processors: + frame_processor_module = load_frame_processor_module(frame_processor) + FRAME_PROCESSORS_MODULES.append(frame_processor_module) + return FRAME_PROCESSORS_MODULES + + +def multi_process_frame(source_path: str, temp_frame_paths: List[str], process_frames: Callable[[str, List[str], Any], None], progress: Any = None) -> None: + with ThreadPoolExecutor(max_workers=roop.globals.execution_threads) as executor: + futures = [] + for path in temp_frame_paths: + future = executor.submit(process_frames, source_path, [path], progress) + futures.append(future) + for future in futures: + future.result() + + +def process_video(source_path: str, frame_paths: list[str], process_frames: Callable[[str, List[str], Any], None]) -> None: + progress_bar_format = '{l_bar}{bar}| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, {rate_fmt}{postfix}]' + total = len(frame_paths) + with tqdm(total=total, desc='Processing', unit='frame', dynamic_ncols=True, bar_format=progress_bar_format) as progress: + progress.set_postfix({'execution_providers': roop.globals.execution_providers, 'threads': roop.globals.execution_threads, 'memory': roop.globals.max_memory}) + multi_process_frame(source_path, frame_paths, process_frames, progress) diff --git a/roop/processors/frame/face_enhancer.py b/roop/processors/frame/face_enhancer.py new file mode 100644 index 000000000..50c7c5468 --- /dev/null +++ b/roop/processors/frame/face_enhancer.py @@ -0,0 +1,75 @@ +from typing import Any, List +import cv2 +import threading +import gfpgan + +import roop.globals +import roop.processors.frame.core +from roop.core import update_status +from roop.face_analyser import get_one_face +from roop.typing import Frame, Face +from roop.utilities import conditional_download, resolve_relative_path, is_image, is_video + +FACE_ENHANCER = None +THREAD_SEMAPHORE = threading.Semaphore() +THREAD_LOCK = threading.Lock() +NAME = 'ROOP.FACE-ENHANCER' + + +def pre_check() -> bool: + download_directory_path = resolve_relative_path('../models') + conditional_download(download_directory_path, ['https://huggingface.co/henryruhs/roop/resolve/main/GFPGANv1.4.pth']) + return True + + +def pre_start() -> bool: + if not is_image(roop.globals.target_path) and not is_video(roop.globals.target_path): + update_status('Select an image or video for target path.', NAME) + return False + return True + + +def get_face_enhancer() -> Any: + global FACE_ENHANCER + + with THREAD_LOCK: + if FACE_ENHANCER is None: + model_path = resolve_relative_path('../models/GFPGANv1.4.pth') + # todo: set models path https://github.com/TencentARC/GFPGAN/issues/399 + FACE_ENHANCER = gfpgan.GFPGANer(model_path=model_path, upscale=1) # type: ignore[attr-defined] + return FACE_ENHANCER + + +def enhance_face(temp_frame: Frame) -> Frame: + with THREAD_SEMAPHORE: + _, _, temp_frame = get_face_enhancer().enhance( + temp_frame, + paste_back=True + ) + return temp_frame + + +def process_frame(source_face: Face, temp_frame: Frame) -> Frame: + target_face = get_one_face(temp_frame) + if target_face: + temp_frame = enhance_face(temp_frame) + return temp_frame + + +def process_frames(source_path: str, temp_frame_paths: List[str], progress: Any = None) -> None: + for temp_frame_path in temp_frame_paths: + temp_frame = cv2.imread(temp_frame_path) + result = process_frame(None, temp_frame) + cv2.imwrite(temp_frame_path, result) + if progress: + progress.update(1) + + +def process_image(source_path: str, target_path: str, output_path: str) -> None: + target_frame = cv2.imread(target_path) + result = process_frame(None, target_frame) + cv2.imwrite(output_path, result) + + +def process_video(source_path: str, temp_frame_paths: List[str]) -> None: + roop.processors.frame.core.process_video(None, temp_frame_paths, process_frames) diff --git a/roop/processors/frame/face_swapper.py b/roop/processors/frame/face_swapper.py new file mode 100644 index 000000000..35063d2f9 --- /dev/null +++ b/roop/processors/frame/face_swapper.py @@ -0,0 +1,86 @@ +from typing import Any, List +import cv2 +import insightface +import threading + +import roop.globals +import roop.processors.frame.core +from roop.core import update_status +from roop.face_analyser import get_one_face, get_many_faces +from roop.typing import Face, Frame +from roop.utilities import conditional_download, resolve_relative_path, is_image, is_video + +FACE_SWAPPER = None +THREAD_LOCK = threading.Lock() +NAME = 'ROOP.FACE-SWAPPER' + + +def pre_check() -> bool: + download_directory_path = resolve_relative_path('../models') + conditional_download(download_directory_path, ['https://huggingface.co/henryruhs/roop/resolve/main/inswapper_128.onnx']) + return True + + +def pre_start() -> bool: + if not is_image(roop.globals.source_path): + update_status('Select an image for source path.', NAME) + return False + elif not get_one_face(cv2.imread(roop.globals.source_path)): + update_status('No face in source path detected.', NAME) + return False + if not is_image(roop.globals.target_path) and not is_video(roop.globals.target_path): + update_status('Select an image or video for target path.', NAME) + return False + return True + + +def get_face_swapper() -> Any: + global FACE_SWAPPER + + with THREAD_LOCK: + if FACE_SWAPPER is None: + model_path = resolve_relative_path('../models/inswapper_128.onnx') + FACE_SWAPPER = insightface.model_zoo.get_model(model_path, providers=roop.globals.execution_providers) + return FACE_SWAPPER + + +def swap_face(source_face: Face, target_face: Face, temp_frame: Frame) -> Frame: + return get_face_swapper().get(temp_frame, target_face, source_face, paste_back=True) + + +def process_frame(source_face: Face, temp_frame: Frame) -> Frame: + if roop.globals.many_faces: + many_faces = get_many_faces(temp_frame) + if many_faces: + for target_face in many_faces: + temp_frame = swap_face(source_face, target_face, temp_frame) + else: + target_face = get_one_face(temp_frame) + if target_face: + temp_frame = swap_face(source_face, target_face, temp_frame) + return temp_frame + + +def process_frames(source_path: str, temp_frame_paths: List[str], progress: Any = None) -> None: + source_face = get_one_face(cv2.imread(source_path)) + for temp_frame_path in temp_frame_paths: + temp_frame = cv2.imread(temp_frame_path) + try: + result = process_frame(source_face, temp_frame) + cv2.imwrite(temp_frame_path, result) + except Exception as exception: + print(exception) + pass + if progress: + progress.update(1) + + +def process_image(source_path: str, target_path: str, output_path: str) -> None: + source_face = get_one_face(cv2.imread(source_path)) + target_frame = cv2.imread(target_path) + result = process_frame(source_face, target_frame) + cv2.imwrite(output_path, result) + + +def process_video(source_path: str, temp_frame_paths: List[str]) -> None: + roop.processors.frame.core.process_video(source_path, temp_frame_paths, process_frames) diff --git a/roop/swapper.py b/roop/swapper.py deleted file mode 100644 index 81a6b1d77..000000000 --- a/roop/swapper.py +++ /dev/null @@ -1,96 +0,0 @@ - -import os -from tqdm import tqdm -import cv2 -import insightface -import threading -import roop.globals -from roop.analyser import get_face_single, get_face_many - -FACE_SWAPPER = None -THREAD_LOCK = threading.Lock() - - -def get_face_swapper(): - global FACE_SWAPPER - with THREAD_LOCK: - if FACE_SWAPPER is None: - model_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../inswapper_128.onnx') - FACE_SWAPPER = insightface.model_zoo.get_model(model_path, providers=roop.globals.providers) - return FACE_SWAPPER - - -def swap_face_in_frame(source_face, target_face, frame): - if target_face: - return get_face_swapper().get(frame, target_face, source_face, paste_back=True) - return frame - - -def process_faces(source_face, target_frame): - if roop.globals.all_faces: - many_faces = get_face_many(target_frame) - if many_faces: - for face in many_faces: - target_frame = swap_face_in_frame(source_face, face, target_frame) - else: - face = get_face_single(target_frame) - if face: - target_frame = swap_face_in_frame(source_face, face, target_frame) - return target_frame - - -def process_frames(source_img, frame_paths, progress=None): - source_face = get_face_single(cv2.imread(source_img)) - for frame_path in frame_paths: - frame = cv2.imread(frame_path) - try: - result = process_faces(source_face, frame) - cv2.imwrite(frame_path, result) - except Exception as exception: - print(exception) - pass - if progress: - progress.update(1) - - -def multi_process_frame(source_img, frame_paths, progress): - threads = [] - num_threads = roop.globals.gpu_threads - num_frames_per_thread = len(frame_paths) // num_threads - remaining_frames = len(frame_paths) % num_threads - - # create thread and launch - start_index = 0 - for _ in range(num_threads): - end_index = start_index + num_frames_per_thread - if remaining_frames > 0: - end_index += 1 - remaining_frames -= 1 - thread_frame_paths = frame_paths[start_index:end_index] - thread = threading.Thread(target=process_frames, args=(source_img, thread_frame_paths, progress)) - threads.append(thread) - thread.start() - start_index = end_index - - # threading - for thread in threads: - thread.join() - - -def process_img(source_img, target_path, output_file): - frame = cv2.imread(target_path) - face = get_face_single(frame) - source_face = get_face_single(cv2.imread(source_img)) - result = get_face_swapper().get(frame, face, source_face, paste_back=True) - cv2.imwrite(output_file, result) - print("\n\nImage saved as:", output_file, "\n\n") - - -def process_video(source_img, frame_paths): - do_multi = roop.globals.gpu_vendor is not None and roop.globals.gpu_threads > 1 - progress_bar_format = '{l_bar}{bar}| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, {rate_fmt}{postfix}]' - with tqdm(total=len(frame_paths), desc="Processing", unit="frame", dynamic_ncols=True, bar_format=progress_bar_format) as progress: - if do_multi: - multi_process_frame(source_img, frame_paths, progress) - else: - process_frames(source_img, frame_paths, progress) diff --git a/roop/typing.py b/roop/typing.py new file mode 100644 index 000000000..1cff74406 --- /dev/null +++ b/roop/typing.py @@ -0,0 +1,7 @@ +from typing import Any + +from insightface.app.common import Face +import numpy + +Face = Face +Frame = numpy.ndarray[Any, Any] diff --git a/roop/ui.json b/roop/ui.json new file mode 100644 index 000000000..bc0314352 --- /dev/null +++ b/roop/ui.json @@ -0,0 +1,155 @@ +{ + "CTk": { + "fg_color": ["gray95", "gray10"] + }, + "CTkToplevel": { + "fg_color": ["gray95", "gray10"] + }, + "CTkFrame": { + "corner_radius": 6, + "border_width": 0, + "fg_color": ["gray90", "gray13"], + "top_fg_color": ["gray85", "gray16"], + "border_color": ["gray65", "gray28"] + }, + "CTkButton": { + "corner_radius": 6, + "border_width": 0, + "fg_color": ["#3a7ebf", "#1f538d"], + "hover_color": ["#325882", "#14375e"], + "border_color": ["#3E454A", "#949A9F"], + "text_color": ["#DCE4EE", "#DCE4EE"], + "text_color_disabled": ["gray74", "gray60"] + }, + "CTkLabel": { + "corner_radius": 0, + "fg_color": "transparent", + "text_color": ["gray14", "gray84"] + }, + "CTkEntry": { + "corner_radius": 6, + "border_width": 2, + "fg_color": ["#F9F9FA", "#343638"], + "border_color": ["#979DA2", "#565B5E"], + "text_color": ["gray14", "gray84"], + "placeholder_text_color": ["gray52", "gray62"] + }, + "CTkCheckbox": { + "corner_radius": 6, + "border_width": 3, + "fg_color": ["#3a7ebf", "#1f538d"], + "border_color": ["#3E454A", "#949A9F"], + "hover_color": ["#325882", "#14375e"], + "checkmark_color": ["#DCE4EE", "gray90"], + "text_color": ["gray14", "gray84"], + "text_color_disabled": ["gray60", "gray45"] + }, + "CTkSwitch": { + "corner_radius": 1000, + "border_width": 3, + "button_length": 0, + "fg_color": ["#939BA2", "#4A4D50"], + "progress_color": ["#3a7ebf", "#1f538d"], + "button_color": ["gray36", "#D5D9DE"], + "button_hover_color": ["gray20", "gray100"], + "text_color": ["gray14", "gray84"], + "text_color_disabled": ["gray60", "gray45"] + }, + "CTkRadiobutton": { + "corner_radius": 1000, + "border_width_checked": 6, + "border_width_unchecked": 3, + "fg_color": ["#3a7ebf", "#1f538d"], + "border_color": ["#3E454A", "#949A9F"], + "hover_color": ["#325882", "#14375e"], + "text_color": ["gray14", "gray84"], + "text_color_disabled": ["gray60", "gray45"] + }, + "CTkProgressBar": { + "corner_radius": 1000, + "border_width": 0, + "fg_color": ["#939BA2", "#4A4D50"], + "progress_color": ["#3a7ebf", "#1f538d"], + "border_color": ["gray", "gray"] + }, + "CTkSlider": { + "corner_radius": 1000, + "button_corner_radius": 1000, + "border_width": 6, + "button_length": 0, + "fg_color": ["#939BA2", "#4A4D50"], + "progress_color": ["gray40", "#AAB0B5"], + "button_color": ["#3a7ebf", "#1f538d"], + "button_hover_color": ["#325882", "#14375e"] + }, + "CTkOptionMenu": { + "corner_radius": 6, + "fg_color": ["#3a7ebf", "#1f538d"], + "button_color": ["#325882", "#14375e"], + "button_hover_color": ["#234567", "#1e2c40"], + "text_color": ["#DCE4EE", "#DCE4EE"], + "text_color_disabled": ["gray74", "gray60"] + }, + "CTkComboBox": { + "corner_radius": 6, + "border_width": 2, + "fg_color": ["#F9F9FA", "#343638"], + "border_color": ["#979DA2", "#565B5E"], + "button_color": ["#979DA2", "#565B5E"], + "button_hover_color": ["#6E7174", "#7A848D"], + "text_color": ["gray14", "gray84"], + "text_color_disabled": ["gray50", "gray45"] + }, + "CTkScrollbar": { + "corner_radius": 1000, + "border_spacing": 4, + "fg_color": "transparent", + "button_color": ["gray55", "gray41"], + "button_hover_color": ["gray40", "gray53"] + }, + "CTkSegmentedButton": { + "corner_radius": 6, + "border_width": 2, + "fg_color": ["#979DA2", "gray29"], + "selected_color": ["#3a7ebf", "#1f538d"], + "selected_hover_color": ["#325882", "#14375e"], + "unselected_color": ["#979DA2", "gray29"], + "unselected_hover_color": ["gray70", "gray41"], + "text_color": ["#DCE4EE", "#DCE4EE"], + "text_color_disabled": ["gray74", "gray60"] + }, + "CTkTextbox": { + "corner_radius": 6, + "border_width": 0, + "fg_color": ["gray100", "gray20"], + "border_color": ["#979DA2", "#565B5E"], + "text_color": ["gray14", "gray84"], + "scrollbar_button_color": ["gray55", "gray41"], + "scrollbar_button_hover_color": ["gray40", "gray53"] + }, + "CTkScrollableFrame": { + "label_fg_color": ["gray80", "gray21"] + }, + "DropdownMenu": { + "fg_color": ["gray90", "gray20"], + "hover_color": ["gray75", "gray28"], + "text_color": ["gray14", "gray84"] + }, + "CTkFont": { + "macOS": { + "family": "Avenir", + "size": 12, + "weight": "normal" + }, + "Windows": { + "family": "Corbel", + "size": 12, + "weight": "normal" + }, + "Linux": { + "family": "Montserrat", + "size": 12, + "weight": "normal" + } + } +} diff --git a/roop/ui.py b/roop/ui.py index d8891fba4..e36a160a5 100644 --- a/roop/ui.py +++ b/roop/ui.py @@ -1,315 +1,225 @@ -import tkinter as tk -from typing import Any, Callable, Tuple -from PIL import Image, ImageTk -import webbrowser -from tkinter import filedialog -from tkinter.filedialog import asksaveasfilename -import threading - -from roop.utils import is_img - -max_preview_size = 800 - - -def create_preview(parent): - global preview_image_frame, preview_frame_slider, test_button - - preview_window = tk.Toplevel(parent) - # Override close button - preview_window.protocol("WM_DELETE_WINDOW", hide_preview) - preview_window.withdraw() - preview_window.title("Preview") - preview_window.configure(bg="red") - preview_window.resizable(width=False, height=False) - - frame = tk.Frame(preview_window, background="#2d3436") - frame.pack(fill='both', side='left', expand='True') - - # Preview image - preview_image_frame = tk.Label(frame) - preview_image_frame.pack(side='top') - - # Bottom frame - buttons_frame = tk.Frame(frame, background="#2d3436") - buttons_frame.pack(fill='both', side='bottom') - - current_frame = tk.IntVar() - preview_frame_slider = tk.Scale( - buttons_frame, - from_=0, - to=0, - orient='horizontal', - variable=current_frame - ) - preview_frame_slider.pack(fill='both', side='left', expand='True') - - test_button = tk.Button(buttons_frame, text="Test", bg="#f1c40f", relief="flat", width=15, borderwidth=0, highlightthickness=0) - test_button.pack(side='right', fill='y') - return preview_window - - -def show_preview(): - preview.deiconify() - preview_visible.set(True) - - -def hide_preview(): - preview.withdraw() - preview_visible.set(False) +import os +import customtkinter as ctk +from typing import Callable, Tuple +import cv2 +from PIL import Image, ImageOps -def set_preview_handler(test_handler): - test_button.config(command = test_handler) +import roop.globals +import roop.metadata +from roop.face_analyser import get_one_face +from roop.capturer import get_video_frame, get_video_frame_total +from roop.predicter import predict_frame +from roop.processors.frame.core import get_frame_processors_modules +from roop.utilities import is_image, is_video, resolve_relative_path +ROOT = None +ROOT_HEIGHT = 700 +ROOT_WIDTH = 600 -def init_slider(frames_count, change_handler): - preview_frame_slider.configure(to=frames_count, command=lambda value: change_handler(preview_frame_slider.get())) - preview_frame_slider.set(0) +PREVIEW = None +PREVIEW_MAX_HEIGHT = 700 +PREVIEW_MAX_WIDTH = 1200 +RECENT_DIRECTORY_SOURCE = None +RECENT_DIRECTORY_TARGET = None +RECENT_DIRECTORY_OUTPUT = None -def update_preview(frame): - img = Image.fromarray(frame) - width, height = img.size - aspect_ratio = 1 - if width > height: - aspect_ratio = max_preview_size / width - else: - aspect_ratio = max_preview_size / height - img = img.resize( - ( - int(width * aspect_ratio), - int(height * aspect_ratio) - ), - Image.ANTIALIAS - ) - photo_img = ImageTk.PhotoImage(img) - preview_image_frame.configure(image=photo_img) - preview_image_frame.image = photo_img +preview_label = None +preview_slider = None +source_label = None +target_label = None +status_label = None + + +def init(start: Callable[[], None], destroy: Callable[[], None]) -> ctk.CTk: + global ROOT, PREVIEW + ROOT = create_root(start, destroy) + PREVIEW = create_preview(ROOT) -def select_face(select_face_handler: Callable[[str], None]): - if select_face_handler: - path = filedialog.askopenfilename(title="Select a face") - preview_face(path) - return select_face_handler(path) - return None - + return ROOT -def update_slider_handler(get_video_frame, video_path): - return lambda frame_number: update_preview(get_video_frame(video_path, frame_number)) +def create_root(start: Callable[[], None], destroy: Callable[[], None]) -> ctk.CTk: + global source_label, target_label, status_label -def test_preview(create_test_preview): - frame = create_test_preview(preview_frame_slider.get()) - update_preview(frame) + ctk.deactivate_automatic_dpi_awareness() + ctk.set_appearance_mode('system') + ctk.set_default_color_theme(resolve_relative_path('ui.json')) + root = ctk.CTk() + root.minsize(ROOT_WIDTH, ROOT_HEIGHT) + root.title(f'{roop.metadata.name} {roop.metadata.version}') + root.configure() + root.protocol('WM_DELETE_WINDOW', lambda: destroy()) + source_label = ctk.CTkLabel(root, text=None) + source_label.place(relx=0.1, rely=0.1, relwidth=0.3, relheight=0.25) -def update_slider(get_video_frame, create_test_preview, video_path, frames_amount): - init_slider(frames_amount, update_slider_handler(get_video_frame, video_path)) - set_preview_handler(lambda: preview_thread(lambda: test_preview(create_test_preview))) + target_label = ctk.CTkLabel(root, text=None) + target_label.place(relx=0.6, rely=0.1, relwidth=0.3, relheight=0.25) + source_button = ctk.CTkButton(root, text='Select a face', command=lambda: select_source_path()) + source_button.place(relx=0.1, rely=0.4, relwidth=0.3, relheight=0.1) -def analyze_target(select_target_handler: Callable[[str], Tuple[int, Any]], target_path: tk.StringVar, frames_amount: tk.IntVar): - path = filedialog.askopenfilename(title="Select a target") - target_path.set(path) - amount, frame = select_target_handler(path) - frames_amount.set(amount) - preview_target(frame) - update_preview(frame) + target_button = ctk.CTkButton(root, text='Select a target', command=lambda: select_target_path()) + target_button.place(relx=0.6, rely=0.4, relwidth=0.3, relheight=0.1) + keep_fps_value = ctk.BooleanVar(value=roop.globals.keep_fps) + keep_fps_checkbox = ctk.CTkSwitch(root, text='Keep fps', variable=keep_fps_value, command=lambda: setattr(roop.globals, 'keep_fps', not roop.globals.keep_fps)) + keep_fps_checkbox.place(relx=0.1, rely=0.6) -def select_target(select_target_handler: Callable[[str], Tuple[int, Any]], target_path: tk.StringVar, frames_amount: tk.IntVar): - if select_target_handler: - analyze_target(select_target_handler, target_path, frames_amount) + keep_frames_value = ctk.BooleanVar(value=roop.globals.keep_frames) + keep_frames_switch = ctk.CTkSwitch(root, text='Keep frames', variable=keep_frames_value, command=lambda: setattr(roop.globals, 'keep_frames', keep_frames_value.get())) + keep_frames_switch.place(relx=0.1, rely=0.65) + keep_audio_value = ctk.BooleanVar(value=roop.globals.keep_audio) + keep_audio_switch = ctk.CTkSwitch(root, text='Keep audio', variable=keep_audio_value, command=lambda: setattr(roop.globals, 'keep_audio', keep_audio_value.get())) + keep_audio_switch.place(relx=0.6, rely=0.6) -def save_file(save_file_handler: Callable[[str], None], target_path: str): - filename, ext = 'output.mp4', '.mp4' + many_faces_value = ctk.BooleanVar(value=roop.globals.many_faces) + many_faces_switch = ctk.CTkSwitch(root, text='Many faces', variable=many_faces_value, command=lambda: setattr(roop.globals, 'many_faces', many_faces_value.get())) + many_faces_switch.place(relx=0.6, rely=0.65) - if is_img(target_path): - filename, ext = 'output.png', '.png' + start_button = ctk.CTkButton(root, text='Start', command=lambda: select_output_path(start)) + start_button.place(relx=0.15, rely=0.75, relwidth=0.2, relheight=0.05) - if save_file_handler: - return save_file_handler(asksaveasfilename(initialfile=filename, defaultextension=ext, filetypes=[("All Files","*.*"),("Videos","*.mp4")])) - return None + stop_button = ctk.CTkButton(root, text='Destroy', command=lambda: destroy()) + stop_button.place(relx=0.4, rely=0.75, relwidth=0.2, relheight=0.05) + preview_button = ctk.CTkButton(root, text='Preview', command=lambda: toggle_preview()) + preview_button.place(relx=0.65, rely=0.75, relwidth=0.2, relheight=0.05) -def toggle_all_faces(toggle_all_faces_handler: Callable[[int], None], variable: tk.IntVar): - if toggle_all_faces_handler: - return lambda: toggle_all_faces_handler(variable.get()) - return None + status_label = ctk.CTkLabel(root, text=None, justify='center') + status_label.place(relx=0.1, rely=0.9, relwidth=0.8) + return root -def toggle_fps_limit(toggle_all_faces_handler: Callable[[int], None], variable: tk.IntVar): - if toggle_all_faces_handler: - return lambda: toggle_all_faces_handler(variable.get()) - return None +def create_preview(parent: ctk.CTkToplevel) -> ctk.CTkToplevel: + global preview_label, preview_slider -def toggle_keep_frames(toggle_keep_frames_handler: Callable[[int], None], variable: tk.IntVar): - if toggle_keep_frames_handler: - return lambda: toggle_keep_frames_handler(variable.get()) - return None + preview = ctk.CTkToplevel(parent) + preview.withdraw() + preview.title('Preview') + preview.configure() + preview.protocol('WM_DELETE_WINDOW', lambda: toggle_preview()) + preview.resizable(width=False, height=False) + + preview_label = ctk.CTkLabel(preview, text=None) + preview_label.pack(fill='both', expand=True) + preview_slider = ctk.CTkSlider(preview, from_=0, to=0, command=lambda frame_value: update_preview(frame_value)) -def create_button(parent, text, command): - return tk.Button( - parent, - text=text, - command=command, - bg="#f1c40f", - relief="flat", - borderwidth=0, - highlightthickness=0 - ) + return preview -def create_background_button(parent, text, command): - button = create_button(parent, text, command) - button.configure( - bg="#2d3436", - fg="#74b9ff", - highlightthickness=4, - highlightbackground="#74b9ff", - activebackground="#74b9ff", - borderwidth=4 - ) - return button +def update_status(text: str) -> None: + status_label.configure(text=text) + ROOT.update() -def create_check(parent, text, variable, command): - return tk.Checkbutton( - parent, - anchor="w", - relief="groove", - activebackground="#2d3436", - activeforeground="#74b9ff", - selectcolor="black", - text=text, - fg="#dfe6e9", - borderwidth=0, - highlightthickness=0, - bg="#2d3436", - variable=variable, - command=command - ) +def select_source_path() -> None: + global RECENT_DIRECTORY_SOURCE + PREVIEW.withdraw() + source_path = ctk.filedialog.askopenfilename(title='select an source image', initialdir=RECENT_DIRECTORY_SOURCE) + if is_image(source_path): + roop.globals.source_path = source_path + RECENT_DIRECTORY_SOURCE = os.path.dirname(roop.globals.source_path) + image = render_image_preview(roop.globals.source_path, (200, 200)) + source_label.configure(image=image) + else: + roop.globals.source_path = None + source_label.configure(image=None) + + +def select_target_path() -> None: + global RECENT_DIRECTORY_TARGET + + PREVIEW.withdraw() + target_path = ctk.filedialog.askopenfilename(title='select an target image or video', initialdir=RECENT_DIRECTORY_TARGET) + if is_image(target_path): + roop.globals.target_path = target_path + RECENT_DIRECTORY_TARGET = os.path.dirname(roop.globals.target_path) + image = render_image_preview(roop.globals.target_path, (200, 200)) + target_label.configure(image=image) + elif is_video(target_path): + roop.globals.target_path = target_path + RECENT_DIRECTORY_TARGET = os.path.dirname(roop.globals.target_path) + video_frame = render_video_preview(target_path, (200, 200)) + target_label.configure(image=video_frame) + else: + roop.globals.target_path = None + target_label.configure(image=None) -def preview_thread(thread_function): - threading.Thread(target=thread_function).start() +def select_output_path(start: Callable[[], None]) -> None: + global RECENT_DIRECTORY_OUTPUT -def open_preview_window(get_video_frame, target_path): - if preview_visible.get(): - hide_preview() + if is_image(roop.globals.target_path): + output_path = ctk.filedialog.asksaveasfilename(title='save image output file', initialfile='output.png', initialdir=RECENT_DIRECTORY_OUTPUT) + elif is_video(roop.globals.target_path): + output_path = ctk.filedialog.asksaveasfilename(title='save video output file', initialfile='output.mp4', initialdir=RECENT_DIRECTORY_OUTPUT) else: - show_preview() - if target_path: - frame = get_video_frame(target_path) - update_preview(frame) - - -def preview_face(path): - img = Image.open(path) - img = img.resize((180, 180), Image.ANTIALIAS) - photo_img = ImageTk.PhotoImage(img) - face_label.configure(image=photo_img) - face_label.image = photo_img - - -def preview_target(frame): - img = Image.fromarray(frame) - img = img.resize((180, 180), Image.ANTIALIAS) - photo_img = ImageTk.PhotoImage(img) - target_label.configure(image=photo_img) - target_label.image = photo_img - - -def update_status_label(value): - status_label["text"] = value - window.update() - - -def init( - initial_values: dict, - select_face_handler: Callable[[str], None], - select_target_handler: Callable[[str], Tuple[int, Any]], - toggle_all_faces_handler: Callable[[int], None], - toggle_fps_limit_handler: Callable[[int], None], - toggle_keep_frames_handler: Callable[[int], None], - save_file_handler: Callable[[str], None], - start: Callable[[], None], - get_video_frame: Callable[[str, int], None], - create_test_preview: Callable[[int], Any], -): - global window, preview, preview_visible, face_label, target_label, status_label - - window = tk.Tk() - window.geometry("600x700") - window.title("roop") - window.configure(bg="#2d3436") - window.resizable(width=False, height=False) - - preview_visible = tk.BooleanVar(window, False) - target_path = tk.StringVar() - frames_amount = tk.IntVar() - - # Preview window - preview = create_preview(window) - - # Contact information - support_link = tk.Label(window, text="Donate to project <3", fg="#fd79a8", bg="#2d3436", cursor="hand2", font=("Arial", 8)) - support_link.place(x=180,y=20,width=250,height=30) - support_link.bind("", lambda e: webbrowser.open("https://github.com/sponsors/s0md3v")) - - left_frame = tk.Frame(window) - left_frame.place(x=60, y=100, width=180, height=180) - face_label = tk.Label(left_frame) - face_label.pack(fill='both', side='top', expand=True) - - right_frame = tk.Frame(window) - right_frame.place(x=360, y=100, width=180, height=180) - target_label = tk.Label(right_frame) - target_label.pack(fill='both', side='top', expand=True) - - # Select a face button - face_button = create_background_button(window, "Select a face", lambda: [ - select_face(select_face_handler) - ]) - face_button.place(x=60,y=320,width=180,height=80) - - # Select a target button - target_button = create_background_button(window, "Select a target", lambda: [ - select_target(select_target_handler, target_path, frames_amount), - update_slider(get_video_frame, create_test_preview, target_path.get(), frames_amount.get()) - ]) - target_button.place(x=360,y=320,width=180,height=80) - - # All faces checkbox - all_faces = tk.IntVar(None, initial_values['all_faces']) - all_faces_checkbox = create_check(window, "Process all faces in frame", all_faces, toggle_all_faces(toggle_all_faces_handler, all_faces)) - all_faces_checkbox.place(x=60,y=500,width=240,height=31) - - # FPS limit checkbox - limit_fps = tk.IntVar(None, not initial_values['keep_fps']) - fps_checkbox = create_check(window, "Limit FPS to 30", limit_fps, toggle_fps_limit(toggle_fps_limit_handler, limit_fps)) - fps_checkbox.place(x=60,y=475,width=240,height=31) - - # Keep frames checkbox - keep_frames = tk.IntVar(None, initial_values['keep_frames']) - frames_checkbox = create_check(window, "Keep frames dir", keep_frames, toggle_keep_frames(toggle_keep_frames_handler, keep_frames)) - frames_checkbox.place(x=60,y=450,width=240,height=31) - - # Start button - start_button = create_button(window, "Start", lambda: [save_file(save_file_handler, target_path.get()), preview_thread(lambda: start(update_preview))]) - start_button.place(x=170,y=560,width=120,height=49) - - # Preview button - preview_button = create_button(window, "Preview", lambda: open_preview_window(get_video_frame, target_path.get())) - preview_button.place(x=310,y=560,width=120,height=49) - - # Status label - status_label = tk.Label(window, width=580, justify="center", text="Status: waiting for input...", fg="#2ecc71", bg="#2d3436") - status_label.place(x=10,y=640,width=580,height=30) - - return window \ No newline at end of file + output_path = None + if output_path: + roop.globals.output_path = output_path + RECENT_DIRECTORY_OUTPUT = os.path.dirname(roop.globals.output_path) + start() + + +def render_image_preview(image_path: str, size: Tuple[int, int]) -> ctk.CTkImage: + image = Image.open(image_path) + if size: + image = ImageOps.fit(image, size, Image.LANCZOS) + return ctk.CTkImage(image, size=image.size) + + +def render_video_preview(video_path: str, size: Tuple[int, int], frame_number: int = 0) -> ctk.CTkImage: + capture = cv2.VideoCapture(video_path) + if frame_number: + capture.set(cv2.CAP_PROP_POS_FRAMES, frame_number) + has_frame, frame = capture.read() + if has_frame: + image = Image.fromarray(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)) + if size: + image = ImageOps.fit(image, size, Image.LANCZOS) + return ctk.CTkImage(image, size=image.size) + capture.release() + cv2.destroyAllWindows() + + +def toggle_preview() -> None: + if PREVIEW.state() == 'normal': + PREVIEW.withdraw() + elif roop.globals.source_path and roop.globals.target_path: + init_preview() + update_preview() + PREVIEW.deiconify() + + +def init_preview() -> None: + if is_image(roop.globals.target_path): + preview_slider.pack_forget() + if is_video(roop.globals.target_path): + video_frame_total = get_video_frame_total(roop.globals.target_path) + preview_slider.configure(to=video_frame_total) + preview_slider.pack(fill='x') + preview_slider.set(0) + + +def update_preview(frame_number: int = 0) -> None: + if roop.globals.source_path and roop.globals.target_path: + temp_frame = get_video_frame(roop.globals.target_path, frame_number) + if predict_frame(temp_frame): + quit() + for frame_processor in get_frame_processors_modules(roop.globals.frame_processors): + temp_frame = frame_processor.process_frame( + get_one_face(cv2.imread(roop.globals.source_path)), + temp_frame + ) + image = Image.fromarray(cv2.cvtColor(temp_frame, cv2.COLOR_BGR2RGB)) + image = ImageOps.contain(image, (PREVIEW_MAX_WIDTH, PREVIEW_MAX_HEIGHT), Image.LANCZOS) + image = ctk.CTkImage(image, size=image.size) + preview_label.configure(image=image) diff --git a/roop/utilities.py b/roop/utilities.py new file mode 100644 index 000000000..433cb43b1 --- /dev/null +++ b/roop/utilities.py @@ -0,0 +1,141 @@ +import glob +import mimetypes +import os +import platform +import shutil +import ssl +import subprocess +import urllib +from pathlib import Path +from typing import List, Any +from tqdm import tqdm + +import roop.globals + +TEMP_FILE = 'temp.mp4' +TEMP_DIRECTORY = 'temp' + +# monkey patch ssl for mac +if platform.system().lower() == 'darwin': + ssl._create_default_https_context = ssl._create_unverified_context + + +def run_ffmpeg(args: List[str]) -> bool: + commands = ['ffmpeg', '-hide_banner', '-hwaccel', 'auto', '-loglevel', roop.globals.log_level] + commands.extend(args) + try: + subprocess.check_output(commands, stderr=subprocess.STDOUT) + return True + except Exception: + pass + return False + + +def detect_fps(target_path: str) -> float: + command = ['ffprobe', '-v', 'error', '-select_streams', 'v:0', '-show_entries', 'stream=r_frame_rate', '-of', 'default=noprint_wrappers=1:nokey=1', target_path] + output = subprocess.check_output(command).decode().strip().split('/') + try: + numerator, denominator = map(int, output) + return numerator / denominator + except Exception: + pass + return 30.0 + + +def extract_frames(target_path: str) -> None: + temp_directory_path = get_temp_directory_path(target_path) + run_ffmpeg(['-i', target_path, '-pix_fmt', 'rgb24', os.path.join(temp_directory_path, '%04d.png')]) + + +def create_video(target_path: str, fps: float = 30.0) -> None: + temp_output_path = get_temp_output_path(target_path) + temp_directory_path = get_temp_directory_path(target_path) + run_ffmpeg(['-r', str(fps), '-i', os.path.join(temp_directory_path, '%04d.png'), '-c:v', roop.globals.video_encoder, '-crf', str(roop.globals.video_quality), '-pix_fmt', 'yuv420p', '-vf', 'colorspace=bt709:iall=bt601-6-625:fast=1', '-y', temp_output_path]) + + +def restore_audio(target_path: str, output_path: str) -> None: + temp_output_path = get_temp_output_path(target_path) + done = run_ffmpeg(['-i', temp_output_path, '-i', target_path, '-c:v', 'copy', '-map', '0:v:0', '-map', '1:a:0', '-y', output_path]) + if not done: + move_temp(target_path, output_path) + + +def get_temp_frame_paths(target_path: str) -> List[str]: + temp_directory_path = get_temp_directory_path(target_path) + return glob.glob((os.path.join(glob.escape(temp_directory_path), '*.png'))) + + +def get_temp_directory_path(target_path: str) -> str: + target_name, _ = os.path.splitext(os.path.basename(target_path)) + target_directory_path = os.path.dirname(target_path) + return os.path.join(target_directory_path, TEMP_DIRECTORY, target_name) + + +def get_temp_output_path(target_path: str) -> str: + temp_directory_path = get_temp_directory_path(target_path) + return os.path.join(temp_directory_path, TEMP_FILE) + + +def normalize_output_path(source_path: str, target_path: str, output_path: str) -> Any: + if source_path and target_path: + source_name, _ = os.path.splitext(os.path.basename(source_path)) + target_name, target_extension = os.path.splitext(os.path.basename(target_path)) + if os.path.isdir(output_path): + return os.path.join(output_path, source_name + '-' + target_name + target_extension) + return output_path + + +def create_temp(target_path: str) -> None: + temp_directory_path = get_temp_directory_path(target_path) + Path(temp_directory_path).mkdir(parents=True, exist_ok=True) + + +def move_temp(target_path: str, output_path: str) -> None: + temp_output_path = get_temp_output_path(target_path) + if os.path.isfile(temp_output_path): + if os.path.isfile(output_path): + os.remove(output_path) + shutil.move(temp_output_path, output_path) + + +def clean_temp(target_path: str) -> None: + temp_directory_path = get_temp_directory_path(target_path) + parent_directory_path = os.path.dirname(temp_directory_path) + if not roop.globals.keep_frames and os.path.isdir(temp_directory_path): + shutil.rmtree(temp_directory_path) + if os.path.exists(parent_directory_path) and not os.listdir(parent_directory_path): + os.rmdir(parent_directory_path) + + +def has_image_extension(image_path: str) -> bool: + return image_path.lower().endswith(('png', 'jpg', 'jpeg')) + + +def is_image(image_path: str) -> bool: + if image_path and os.path.isfile(image_path): + mimetype, _ = mimetypes.guess_type(image_path) + return bool(mimetype and mimetype.startswith('image/')) + return False + + +def is_video(video_path: str) -> bool: + if video_path and os.path.isfile(video_path): + mimetype, _ = mimetypes.guess_type(video_path) + return bool(mimetype and mimetype.startswith('video/')) + return False + + +def conditional_download(download_directory_path: str, urls: List[str]) -> None: + if not os.path.exists(download_directory_path): + os.makedirs(download_directory_path) + for url in urls: + download_file_path = os.path.join(download_directory_path, os.path.basename(url)) + if not os.path.exists(download_file_path): + request = urllib.request.urlopen(url) # type: ignore[attr-defined] + total = int(request.headers.get('Content-Length', 0)) + with tqdm(total=total, desc='Downloading', unit='B', unit_scale=True, unit_divisor=1024) as progress: + urllib.request.urlretrieve(url, download_file_path, reporthook=lambda count, block_size, total_size: progress.update(block_size)) # type: ignore[attr-defined] + + +def resolve_relative_path(path: str) -> str: + return os.path.abspath(os.path.join(os.path.dirname(__file__), path)) diff --git a/roop/utils.py b/roop/utils.py deleted file mode 100644 index 3ec687294..000000000 --- a/roop/utils.py +++ /dev/null @@ -1,72 +0,0 @@ -import os -import shutil -import roop.globals - -sep = "/" -if os.name == "nt": - sep = "\\" - - -def path(string): - if sep == "\\": - return string.replace("/", "\\") - return string - - -def run_command(command, mode="silent"): - if mode == "debug": - return os.system(command) - return os.popen(command).read() - - -def detect_fps(input_path): - input_path = path(input_path) - output = os.popen(f'ffprobe -v error -select_streams v -of default=noprint_wrappers=1:nokey=1 -show_entries stream=r_frame_rate "{input_path}"').read() - if "/" in output: - try: - return int(output.split("/")[0]) // int(output.split("/")[1].strip()), output.strip() - except: - pass - return 30, 30 - - -def run_ffmpeg(args): - log_level = f'-loglevel {roop.globals.log_level}' - run_command(f'ffmpeg {log_level} {args}') - - -def set_fps(input_path, output_path, fps): - input_path, output_path = path(input_path), path(output_path) - run_ffmpeg(f'-i "{input_path}" -filter:v fps=fps={fps} "{output_path}"') - - -def create_video(video_name, fps, output_dir): - hwaccel_option = '-hwaccel cuda' if roop.globals.gpu_vendor == 'nvidia' else '' - output_dir = path(output_dir) - run_ffmpeg(f'{hwaccel_option} -framerate "{fps}" -i "{output_dir}{sep}%04d.png" -c:v libx264 -crf 7 -pix_fmt yuv420p -y "{output_dir}{sep}output.mp4"') - - -def extract_frames(input_path, output_dir): - hwaccel_option = '-hwaccel cuda' if roop.globals.gpu_vendor == 'nvidia' else '' - input_path, output_dir = path(input_path), path(output_dir) - run_ffmpeg(f' {hwaccel_option} -i "{input_path}" "{output_dir}{sep}%04d.png"') - - -def add_audio(output_dir, target_path, video, keep_frames, output_file): - video_name = os.path.splitext(video)[0] - save_to = output_file if output_file else output_dir + "/swapped-" + video_name + ".mp4" - save_to_ff, output_dir_ff = path(save_to), path(output_dir) - run_ffmpeg(f'-i "{output_dir_ff}{sep}output.mp4" -i "{output_dir_ff}{sep}{video}" -c:v copy -map 0:v:0 -map 1:a:0 -y "{save_to_ff}"') - if not os.path.isfile(save_to): - shutil.move(output_dir + "/output.mp4", save_to) - if not keep_frames: - shutil.rmtree(output_dir) - - -def is_img(path): - return path.lower().endswith(("png", "jpg", "jpeg", "bmp")) - - -def rreplace(s, old, new, occurrence): - li = s.rsplit(old, occurrence) - return new.join(li)