From b82ce31a0021dde922d58a783889a12452db7db2 Mon Sep 17 00:00:00 2001 From: MotionGPT <519396593@qq.com> Date: Sat, 9 Sep 2023 23:48:58 +0800 Subject: [PATCH] Release MotionGPT V1.0 --- README.md | 396 +- app.py | 365 ++ assets/css/custom.css | 359 ++ assets/images/figure10.webp | Bin 0 -> 27082 bytes assets/images/figure12.webp | Bin 0 -> 72888 bytes assets/images/figure13.webp | Bin 0 -> 58108 bytes assets/images/pipeline.webp | Bin 0 -> 65444 bytes assets/images/table15.webp | Bin 0 -> 48652 bytes assets/images/table7.webp | Bin 0 -> 92374 bytes assets/images/table8.webp | Bin 0 -> 116696 bytes configs/assets.yaml | 32 + configs/config_h3d_stage1.yaml | 60 + configs/config_h3d_stage2.yaml | 62 + configs/config_h3d_stage3.yaml | 63 + configs/default.yaml | 141 + configs/evaluator/tm2t.yaml | 19 + configs/lm/default.yaml | 7 + configs/lm/gpt2_medium.yaml | 7 + configs/lm/t5_large.yaml | 7 + configs/lm/t5_small.yaml | 7 + configs/render.yaml | 23 + configs/vq/default.yaml | 15 + configs/webui.yaml | 75 + demo.py | 240 ++ demos/inbetween.txt | 50 + demos/m2t.txt | 50 + demos/pred.txt | 10 + demos/t2m.txt | 50 + fit.py | 289 ++ mGPT/__init__.py | 0 mGPT/archs/__init__.py | 0 mGPT/archs/mgpt_lm.py | 592 +++ mGPT/archs/mgpt_vq.py | 190 + mGPT/archs/tm2t_evaluator.py | 111 + mGPT/archs/tools/embeddings.py | 322 ++ mGPT/archs/tools/quantize_cnn.py | 414 ++ mGPT/archs/tools/resnet.py | 82 + mGPT/archs/tools/token_emb.py | 73 + mGPT/archs/tools/transformer_layers.py | 285 ++ mGPT/callback.py | 200 + mGPT/config.py | 217 + mGPT/data/HumanML3D.py | 123 + mGPT/data/Kit.py | 88 + mGPT/data/__init__.py | 103 + mGPT/data/build_data.py | 15 + mGPT/data/humanml/README.md | 1 + mGPT/data/humanml/__init__.py | 7 + mGPT/data/humanml/common/quaternion.py | 423 ++ mGPT/data/humanml/common/skeleton.py | 199 + mGPT/data/humanml/dataset_m.py | 156 + mGPT/data/humanml/dataset_m_vq.py | 54 + mGPT/data/humanml/dataset_t2m.py | 211 + mGPT/data/humanml/dataset_t2m_cb.py | 211 + mGPT/data/humanml/dataset_t2m_eval.py | 92 + mGPT/data/humanml/dataset_t2m_m2t.py | 119 + mGPT/data/humanml/dataset_t2m_token.py | 86 + mGPT/data/humanml/scripts/motion_process.py | 529 +++ mGPT/data/humanml/utils/paramUtil.py | 63 + mGPT/data/humanml/utils/word_vectorizer.py | 79 + mGPT/data/tools/__init__.py | 2 + mGPT/data/tools/collate.py | 99 + mGPT/data/tools/easyconvert.py | 72 + mGPT/data/tools/geometry.py | 566 +++ mGPT/data/tools/tensors.py | 26 + mGPT/data/transforms/__init__.py | 15 + mGPT/data/transforms/base.py | 84 + mGPT/data/transforms/identity.py | 44 + .../data/transforms/joints2jfeats/__init__.py | 2 + mGPT/data/transforms/joints2jfeats/base.py | 59 + mGPT/data/transforms/joints2jfeats/rifke.py | 159 + mGPT/data/transforms/joints2jfeats/tools.py | 97 + mGPT/data/transforms/joints2rots/config.py | 119 + .../data/transforms/joints2rots/customloss.py | 217 + mGPT/data/transforms/joints2rots/prior.py | 229 ++ mGPT/data/transforms/joints2rots/smplify.py | 284 ++ mGPT/data/transforms/rots2joints/__init__.py | 3 + mGPT/data/transforms/rots2joints/base.py | 56 + mGPT/data/transforms/rots2joints/smplh.py | 192 + mGPT/data/transforms/rots2joints/smplx.py | 201 + mGPT/data/transforms/rots2rfeats/__init__.py | 5 + mGPT/data/transforms/rots2rfeats/base.py | 60 + .../transforms/rots2rfeats/globvelandy.py | 128 + mGPT/data/transforms/smpl.py | 191 + mGPT/data/transforms/xyz.py | 81 + mGPT/data/utils.py | 81 + mGPT/losses/__init__.py | 1 + mGPT/losses/base.py | 61 + mGPT/losses/mgpt.py | 97 + mGPT/metrics/__init__.py | 1 + mGPT/metrics/base.py | 46 + mGPT/metrics/m2m.py | 95 + mGPT/metrics/m2t.py | 345 ++ mGPT/metrics/mm.py | 129 + mGPT/metrics/mr.py | 97 + mGPT/metrics/t2m.py | 259 ++ mGPT/metrics/utils.py | 607 +++ mGPT/models/__init__.py | 0 mGPT/models/base.py | 204 + mGPT/models/build_model.py | 8 + mGPT/models/mgpt.py | 494 +++ mGPT/models/utils/__init__.py | 0 mGPT/models/utils/adain.py | 66 + mGPT/models/utils/blocks.py | 146 + mGPT/models/utils/cross_attention.py | 412 ++ mGPT/models/utils/position_encoding.py | 192 + mGPT/models/utils/position_encoding_layer.py | 30 + mGPT/models/utils/tools.py | 37 + mGPT/render/__init__.py | 0 mGPT/render/anim.py | 155 + mGPT/render/blender/__init__.py | 1 + mGPT/render/blender/camera.py | 52 + mGPT/render/blender/data.py | 3 + mGPT/render/blender/floor.py | 73 + mGPT/render/blender/joints.py | 378 ++ mGPT/render/blender/materials.py | 135 + mGPT/render/blender/meshes.py | 93 + mGPT/render/blender/render.py | 177 + mGPT/render/blender/sampler.py | 15 + mGPT/render/blender/scene.py | 96 + mGPT/render/blender/tools.py | 56 + mGPT/render/blender/vertices.py | 17 + mGPT/render/matplot/plot_3d_global.py | 151 + mGPT/render/pyrender/hybrik_loc2rot.py | 140 + mGPT/render/pyrender/j3ds_render_smpl.py | 48 + mGPT/render/pyrender/smpl_render.py | 130 + mGPT/render/renderer.py | 179 + mGPT/render/rendermotion.py | 134 + mGPT/render/video.py | 67 + mGPT/render/visualize.py | 747 ++++ mGPT/utils/__init__.py | 0 mGPT/utils/demo_utils.py | 79 + mGPT/utils/easyconvert.py | 84 + mGPT/utils/fixseed.py | 18 + mGPT/utils/geometry_conver.py | 550 +++ mGPT/utils/geometry_tools.py | 566 +++ mGPT/utils/joints.py | 444 ++ mGPT/utils/load_checkpoint.py | 34 + mGPT/utils/logger.py | 68 + mGPT/utils/misc.py | 29 + mGPT/utils/rotation_conversions.py | 551 +++ mGPT/utils/sample_utils.py | 18 + mGPT/utils/temos_utils.py | 133 + mGPT/utils/tensors.py | 74 + prepare/download_pretrained_models.sh | 9 + prepare/download_smpl_model.sh | 14 + prepare/download_t2m_evaluators.sh | 14 + .../instructions/template_instructions.json | 3623 +++++++++++++++++ prepare/instructions/template_pretrain.json | 35 + prepare/merge_smplh_mano.py | 130 + prepare/prepare_t5.sh | 4 + prepare/requirements_render.txt | 6 + prepare/smplh.sh | 31 + render.py | 152 + requirements.txt | 26 + scripts/fbx_output.py | 354 ++ scripts/fbx_output_smplx.py | 427 ++ scripts/fit_motion.sh | 1 + scripts/fit_motion_parallel.sh | 31 + scripts/get_motion_code.py | 70 + scripts/plys2npy.py | 56 + scripts/visualize_motion.sh | 10 + scripts/visualize_motion_parallel.sh | 25 + setup.py | 20 + test.py | 142 + train.py | 94 + 165 files changed, 24459 insertions(+), 81 deletions(-) create mode 100644 app.py create mode 100644 assets/css/custom.css create mode 100644 assets/images/figure10.webp create mode 100644 assets/images/figure12.webp create mode 100644 assets/images/figure13.webp create mode 100644 assets/images/pipeline.webp create mode 100644 assets/images/table15.webp create mode 100644 assets/images/table7.webp create mode 100644 assets/images/table8.webp create mode 100644 configs/assets.yaml create mode 100644 configs/config_h3d_stage1.yaml create mode 100644 configs/config_h3d_stage2.yaml create mode 100644 configs/config_h3d_stage3.yaml create mode 100644 configs/default.yaml create mode 100644 configs/evaluator/tm2t.yaml create mode 100644 configs/lm/default.yaml create mode 100644 configs/lm/gpt2_medium.yaml create mode 100644 configs/lm/t5_large.yaml create mode 100644 configs/lm/t5_small.yaml create mode 100644 configs/render.yaml create mode 100644 configs/vq/default.yaml create mode 100644 configs/webui.yaml create mode 100644 demo.py create mode 100644 demos/inbetween.txt create mode 100644 demos/m2t.txt create mode 100644 demos/pred.txt create mode 100644 demos/t2m.txt create mode 100644 fit.py create mode 100644 mGPT/__init__.py create mode 100644 mGPT/archs/__init__.py create mode 100644 mGPT/archs/mgpt_lm.py create mode 100644 mGPT/archs/mgpt_vq.py create mode 100644 mGPT/archs/tm2t_evaluator.py create mode 100644 mGPT/archs/tools/embeddings.py create mode 100644 mGPT/archs/tools/quantize_cnn.py create mode 100644 mGPT/archs/tools/resnet.py create mode 100644 mGPT/archs/tools/token_emb.py create mode 100644 mGPT/archs/tools/transformer_layers.py create mode 100644 mGPT/callback.py create mode 100644 mGPT/config.py create mode 100644 mGPT/data/HumanML3D.py create mode 100644 mGPT/data/Kit.py create mode 100644 mGPT/data/__init__.py create mode 100644 mGPT/data/build_data.py create mode 100644 mGPT/data/humanml/README.md create mode 100644 mGPT/data/humanml/__init__.py create mode 100644 mGPT/data/humanml/common/quaternion.py create mode 100644 mGPT/data/humanml/common/skeleton.py create mode 100644 mGPT/data/humanml/dataset_m.py create mode 100644 mGPT/data/humanml/dataset_m_vq.py create mode 100644 mGPT/data/humanml/dataset_t2m.py create mode 100644 mGPT/data/humanml/dataset_t2m_cb.py create mode 100644 mGPT/data/humanml/dataset_t2m_eval.py create mode 100644 mGPT/data/humanml/dataset_t2m_m2t.py create mode 100644 mGPT/data/humanml/dataset_t2m_token.py create mode 100644 mGPT/data/humanml/scripts/motion_process.py create mode 100644 mGPT/data/humanml/utils/paramUtil.py create mode 100644 mGPT/data/humanml/utils/word_vectorizer.py create mode 100644 mGPT/data/tools/__init__.py create mode 100644 mGPT/data/tools/collate.py create mode 100644 mGPT/data/tools/easyconvert.py create mode 100644 mGPT/data/tools/geometry.py create mode 100644 mGPT/data/tools/tensors.py create mode 100644 mGPT/data/transforms/__init__.py create mode 100644 mGPT/data/transforms/base.py create mode 100644 mGPT/data/transforms/identity.py create mode 100644 mGPT/data/transforms/joints2jfeats/__init__.py create mode 100644 mGPT/data/transforms/joints2jfeats/base.py create mode 100644 mGPT/data/transforms/joints2jfeats/rifke.py create mode 100644 mGPT/data/transforms/joints2jfeats/tools.py create mode 100644 mGPT/data/transforms/joints2rots/config.py create mode 100644 mGPT/data/transforms/joints2rots/customloss.py create mode 100644 mGPT/data/transforms/joints2rots/prior.py create mode 100644 mGPT/data/transforms/joints2rots/smplify.py create mode 100644 mGPT/data/transforms/rots2joints/__init__.py create mode 100644 mGPT/data/transforms/rots2joints/base.py create mode 100644 mGPT/data/transforms/rots2joints/smplh.py create mode 100644 mGPT/data/transforms/rots2joints/smplx.py create mode 100644 mGPT/data/transforms/rots2rfeats/__init__.py create mode 100644 mGPT/data/transforms/rots2rfeats/base.py create mode 100644 mGPT/data/transforms/rots2rfeats/globvelandy.py create mode 100644 mGPT/data/transforms/smpl.py create mode 100644 mGPT/data/transforms/xyz.py create mode 100644 mGPT/data/utils.py create mode 100644 mGPT/losses/__init__.py create mode 100644 mGPT/losses/base.py create mode 100644 mGPT/losses/mgpt.py create mode 100644 mGPT/metrics/__init__.py create mode 100644 mGPT/metrics/base.py create mode 100644 mGPT/metrics/m2m.py create mode 100644 mGPT/metrics/m2t.py create mode 100644 mGPT/metrics/mm.py create mode 100644 mGPT/metrics/mr.py create mode 100644 mGPT/metrics/t2m.py create mode 100644 mGPT/metrics/utils.py create mode 100644 mGPT/models/__init__.py create mode 100644 mGPT/models/base.py create mode 100644 mGPT/models/build_model.py create mode 100644 mGPT/models/mgpt.py create mode 100644 mGPT/models/utils/__init__.py create mode 100644 mGPT/models/utils/adain.py create mode 100644 mGPT/models/utils/blocks.py create mode 100644 mGPT/models/utils/cross_attention.py create mode 100644 mGPT/models/utils/position_encoding.py create mode 100644 mGPT/models/utils/position_encoding_layer.py create mode 100644 mGPT/models/utils/tools.py create mode 100644 mGPT/render/__init__.py create mode 100644 mGPT/render/anim.py create mode 100644 mGPT/render/blender/__init__.py create mode 100644 mGPT/render/blender/camera.py create mode 100644 mGPT/render/blender/data.py create mode 100644 mGPT/render/blender/floor.py create mode 100644 mGPT/render/blender/joints.py create mode 100644 mGPT/render/blender/materials.py create mode 100644 mGPT/render/blender/meshes.py create mode 100644 mGPT/render/blender/render.py create mode 100644 mGPT/render/blender/sampler.py create mode 100644 mGPT/render/blender/scene.py create mode 100644 mGPT/render/blender/tools.py create mode 100644 mGPT/render/blender/vertices.py create mode 100644 mGPT/render/matplot/plot_3d_global.py create mode 100644 mGPT/render/pyrender/hybrik_loc2rot.py create mode 100644 mGPT/render/pyrender/j3ds_render_smpl.py create mode 100644 mGPT/render/pyrender/smpl_render.py create mode 100644 mGPT/render/renderer.py create mode 100644 mGPT/render/rendermotion.py create mode 100644 mGPT/render/video.py create mode 100644 mGPT/render/visualize.py create mode 100644 mGPT/utils/__init__.py create mode 100644 mGPT/utils/demo_utils.py create mode 100644 mGPT/utils/easyconvert.py create mode 100644 mGPT/utils/fixseed.py create mode 100644 mGPT/utils/geometry_conver.py create mode 100644 mGPT/utils/geometry_tools.py create mode 100644 mGPT/utils/joints.py create mode 100644 mGPT/utils/load_checkpoint.py create mode 100644 mGPT/utils/logger.py create mode 100644 mGPT/utils/misc.py create mode 100644 mGPT/utils/rotation_conversions.py create mode 100644 mGPT/utils/sample_utils.py create mode 100644 mGPT/utils/temos_utils.py create mode 100644 mGPT/utils/tensors.py create mode 100644 prepare/download_pretrained_models.sh create mode 100644 prepare/download_smpl_model.sh create mode 100644 prepare/download_t2m_evaluators.sh create mode 100644 prepare/instructions/template_instructions.json create mode 100644 prepare/instructions/template_pretrain.json create mode 100644 prepare/merge_smplh_mano.py create mode 100644 prepare/prepare_t5.sh create mode 100644 prepare/requirements_render.txt create mode 100644 prepare/smplh.sh create mode 100644 render.py create mode 100644 requirements.txt create mode 100644 scripts/fbx_output.py create mode 100644 scripts/fbx_output_smplx.py create mode 100644 scripts/fit_motion.sh create mode 100644 scripts/fit_motion_parallel.sh create mode 100644 scripts/get_motion_code.py create mode 100644 scripts/plys2npy.py create mode 100644 scripts/visualize_motion.sh create mode 100644 scripts/visualize_motion_parallel.sh create mode 100644 setup.py create mode 100644 test.py create mode 100644 train.py diff --git a/README.md b/README.md index b4b906e..bb12062 100644 --- a/README.md +++ b/README.md @@ -1,119 +1,339 @@ -# Official repo for MotionGPT -### [MotionGPT: Human Motion as a Foreign Language](https://motion-gpt.github.io/) +
+

Official repo for MotionGPT

-### [Project Page](https://motion-gpt.github.io/) | [Arxiv](https://arxiv.org/abs/2306.14795) | [Paper](https://arxiv.org/pdf/2306.14795.pdf) +
-MotionGPT is a **unified** and **user-friendly** motion-language model to learn the semantic coupling of two modalities and generate high-quality motions and text descriptions on **multiple motion tasks**. +
+

MotionGPT: Human Motion as a Foreign Language

+ +

+ Project Page • + Arxiv Paper • + HuggingFace Demo • + FAQ • + Citation +

+
+
+ https://github.com/OpenMotionLab/MotionGPT/assets/120085716/960bf6ed-0cce-4196-8e2c-1a6c5d2aea3a + +
+ + + + +## 🏃 Intro MotionGPT +MotionGPT is a **unified** and **user-friendly** motion-language model to learn the semantic coupling of two modalities and generate high-quality motions and text descriptions on **multiple motion tasks**. +
+ Technical details -## Intro MotionGPT Though the advancement of pre-trained large language models unfolds, the exploration of building a unified model for language and other multi-modal data, such as motion, remains challenging and untouched so far. Fortunately, human motion displays a semantic coupling akin to human language, often perceived as a form of body language. By fusing language data with large-scale motion models, motion-language pre-training that can enhance the performance of motion-related tasks becomes feasible. Driven by this insight, we propose MotionGPT, a unified, versatile, and user-friendly motion-language model to handle multiple motion-relevant tasks. Specifically, we employ the discrete vector quantization for human motion and transfer 3D motion into motion tokens, similar to the generation process of word tokens. Building upon this “motion vocabulary”, we perform language modeling on both motion and text in a unified manner, treating human motion as a specific language. Moreover, inspired by prompt learning, we pre-train MotionGPT with a mixture of motion-language data and fine-tune it on prompt-based question-and-answer tasks. Extensive experiments demonstrate that MotionGPT achieves state-of-the-art performances on multiple motion tasks including text-driven motion generation, motion captioning, motion prediction, and motion in-between. pipeline +
## 🚩 News -- [2023/9/09] Plan to release MotionGPT V1.0 🔥🔥🔥 -- [2023/6/20] Upload paper and init project +- [2023/09/09] Release training and demo of MotionGPT V1.0 🔥🔥🔥 +- [2023/06/20] Upload paper and init project ## ⚡ Quick Start +
+ Setup and download + +### 1. Conda environment + +``` +conda create python=3.10 --name mgpt +conda activate mgpt +``` + +Install the packages in `requirements.txt` and install [PyTorch 2.0](https://pytorch.org/) + +``` +pip install -r requirements.txt +``` + +We test our code on Python 3.10.6 and PyTorch 2.0.0. + +### 2. Dependencies + +Run the script to download dependencies materials: + +``` +bash prepare/download_smpl_model.sh +bash prepare/prepare_t5.sh +``` + +For Text to Motion Evaluation + +``` +bash prepare/download_t2m_evaluators.sh +``` + +### 3. Pre-train model + +Run the script to download the pre-train model + +``` +bash prepare/download_pretrained_models.sh +``` + +### 4. (Optional) Download manually + +Visit [the Google Driver](https://drive.google.com/drive/folders/1U93wvPsqaSzb5waZfGFVYc4tLCAOmB4C) to download the previous dependencies. + +Visit [the Hugging Face](https://huggingface.co/collections/bill-jiang/motiongpt-64fb247401aedd0e86ad4d67) to download the pretrained models. + +
+ ## ▶️ Demo +
+ Webui + +Run the following script to launch webui, then visit [0.0.0.0:8888](0.0.0.0:8888): + +``` +python app.py +``` + +
+ +
+ Batch demo + +We support txt file input, the output motions are npy files and output texts are txt files. Please check the `configs/assets.yaml` for path config, TEST.FOLDER as output folder. + +Then, run the following script: + +``` +python demo.py --cfg ./configs/config_h3d_stage3.yaml --example ./demos/t2m.txt +``` + +Some parameters: + +- `--example=./demo/t2m.txt`: input file as text prompts +- `--task=t2m`: evaluation tasks including t2m, m2t, pred, inbetween + +The outputs: + +- `npy file`: the generated motions with the shape of (nframe, 22, 3) +- `txt file`: the input text prompt or text output +
+ ## 💻 Train your own models +
+ Training guidance + +### 1. Prepare the datasets + +1. Please refer to [HumanML3D](https://github.com/EricGuo5513/HumanML3D) for text-to-motion dataset setup. + +2. Put the instructions data in `prepare/instructions` to the same folder of HumanML3D dataset. + +### 2.1. Ready to train motion tokenizer model + +Please first check the parameters in `configs/config_h3d_stage1.yaml`, e.g. `NAME`,`DEBUG`. + +Then, run the following command: + +``` +python -m train --cfg configs/config_h3d_stage1.yaml --nodebug +``` + +### 2.2. Ready to pretrain MotionGPT model + +Please update the parameters in `configs/config_h3d_stage2.yaml`, e.g. `NAME`,`DEBUG`,`PRETRAINED_VAE` (change to your `latest ckpt model path` in previous step) + +Then, run the following command: + +``` +python -m train --cfg configs/config_h3d_stage2.yaml --nodebug +``` + +### 2.3. Ready to instruct-tuning MotionGPT model + +Please update the parameters in `configs/config_h3d_stage3.yaml`, e.g. `NAME`,`DEBUG`,`PRETRAINED` (change to your `latest ckpt model path` in previous step) + +Then, run the following command to store all motion tokens of training set for convenience + +``` +python -m scripts.get_motion_code --cfg configs/config_h3d_stage3.yaml +``` + +After that, run the following command: + +``` +python -m train --cfg configs/config_h3d_stage3.yaml --nodebug +``` + +### 3. Evaluate the model + +Please first put the tained model checkpoint path to `TEST.CHECKPOINT` in `configs/config_h3d_stage3.yaml`. + +Then, run the following command: + +``` +python -m test --cfg configs/config_h3d_stage3.yaml --task t2m +``` + +Some parameters: + +- `--task`: evaluation tasks including t2m(Text-to-Motion), m2t(Motion translation), pred(Motion prediction), inbetween(Motion inbetween) + +Due to the python package conflit, the released implement of linguistic metrics in motion translation task is by [nlg-metricverse](https://github.com/disi-unibo-nlp/nlg-metricverse), which may not be consistent to the results implemented by [nlg-eval](https://github.com/Maluuba/nlg-eval). We will fix this in the future. + +
+ ## 👀 Visualization -## ❓ FAQ -
Question-and-Answer -
+
+ Render SMPL + +### 1. Set up blender - WIP + +Refer to [TEMOS-Rendering motions](https://github.com/Mathux/TEMOS) for blender setup, then install the following dependencies. + +``` +YOUR_BLENDER_PYTHON_PATH/python -m pip install -r prepare/requirements_render.txt +``` + +### 2. (Optional) Render rigged cylinders + +Run the following command using blender: + +``` +YOUR_BLENDER_PATH/blender --background --python render.py -- --cfg=./configs/render.yaml --dir=YOUR_NPY_FOLDER --mode=video --joint_type=HumanML3D +``` + +### 2. Create SMPL meshes with: + +``` +python -m fit --dir YOUR_NPY_FOLDER --save_folder TEMP_PLY_FOLDER --cuda +``` + +This outputs: + +- `mesh npy file`: the generate SMPL vertices with the shape of (nframe, 6893, 3) +- `ply files`: the ply mesh file for blender or meshlab + +### 3. Render SMPL meshes + +Run the following command to render SMPL using blender: -### The purpose and ability of MotionGPT: +``` +YOUR_BLENDER_PATH/blender --background --python render.py -- --cfg=./configs/render.yaml --dir=YOUR_NPY_FOLDER --mode=video --joint_type=HumanML3D +``` + +optional parameters: + +- `--mode=video`: render mp4 video +- `--mode=sequence`: render the whole motion in a png image. +
+ +## ⚠️ FAQ + +
Question-and-Answer + +### The purpose and ability of MotionGPT
- The motivation of MotionGPT. + The motivation of MotionGPT. **Answer:** We present MotionGPT **to address various human motion-related tasks within one single unified model**, by unifying motion modeling with language through a shared vocabulary. To train this unified model, we propose **an instructional training scheme under the protocols for multiple motion-language**, which further reveals the potential of Large Language Models (LLMs) in motion tasks beyond the success of language generation. However, it is non-trivial for this combination since it needs to model and generate two distinct modes from scratch. Contrary to the previous work leveraging CLIP to extract text embedding as motion generation conditions, like T2M-GPT, MotionGPT introduces **the motion-language pre-training on LLM** so it can leverage the strong language generation and zero-shot transfer abilities of pre-trained language models, as well as generates human language and motion in a unified model. +
- Instruction tuning, reasoning, and zero-shot learning. -**figure** + Instruction tuning and zero-shot learning. +figure12 + +**Answer:** We propose instruction tuning to **train a single MotionGPT across all motion-related tasks**, while task-specific tuning is to train and evaluate MotionGPTs on a single task. We employ these two training schemes to study the ability of MotionGPT across multi-tasks. As shown in this figure, we provide **zero-shot cases**. Benefitting from strong language models, MotionGPTs can understand unseen works in the text-to-motion training set, like "**scuttling**" and "**barriers**", and generate correct motions based on the meaning of sentences. However, it still struggles to generate **unseen motions**, like gymnastics, even if MotionGPTs understand the text inputs. -**Answer:** We propose instruction tuning to train a single MotionGPT across all motion-related tasks, while task-specific tuning is to train and evaluate MotionGPTs on a single task. We employ these two training schemes to study the ability of MotionGPT across multi-tasks. As shown in this figure, we provide zero-shot cases. Benefitting from strong language models, MotionGPTs can understand unseen works in the text-to-motion training set, like "scuttling" and "barriers", and generate correct motions based on the meaning of sentences. However, it still struggles to generate unseen motions, like gymnastics, even if MotionGPTs understand the text inputs. Moreover, this reasoning provides inspired insight for our future research. We will explore this direction and provide more detailed zero-shot learning evaluations.
- While in view of the recent success of LLMs, the authors should pay attention to unifying current available datasets to exploit the scalable potential of language models when processing large-scale data besides increasing model size. + In view of the recent success of LLMs, MotionGPT should pay attention to unifying current available datasets to exploit the scalable potential of language models when processing large-scale data besides increasing model size. + +**Answer:** We have faced this **limited dataset issue** while implementing MotionGPT and in our further research. It is a hard but valuable work to unify and collect a larger motion dataset. Fortunately, some researchers are working on this problem, as seen in recent work like [Motion-X](https://motion-x-dataset.github.io/) and other datasets, which hold promise for advancing large-scale motion models. We intend to further evaluate MotionGPT on these larger datasets once they become available. -**Answer:** We appreciate your insight and totally agree with this suggestion. We have faced this limited dataset issue while implementing MotionGPT and in our further research. It is a hard but valuable work to unify and collect a larger motion dataset. Foruthertaly, some researchers are working on this problem, as seen in recent work like Motion-X and other datasets, which hold promise for advancing large-scale motion models. We intend to further evaluate MotionGPT on these larger datasets once they become available.
- How well MotionGPT learns the relationship between motion and language? + How well MotionGPT learns the relationship between motion and language? +figure10figure12 -**Answer:** Unlike the previous motion generators using the text encoder of CLIP for conditions, please note that MotionGPTs leverage language models to learn the motion-language relationship, instead of relying on text features from CLIP. According to our zero-shot results (cf. Fig. 12) and performances on multi-tasks (cf. Fig. 10), MotionGPTs establish robust connections between simple/complex texts and simple motions in evaluations, but they fall short when it comes to complex-text to complex motion translation. -
-
+**Answer:** **Unlike** the previous motion generators using the **text encoder of CLIP** for conditions, please note that MotionGPTs leverage language models to learn the motion-language relationship, instead of relying on text features from CLIP. According to our zero-shot results (cf. **Fig. 12**) and performances on multi-tasks (cf. **Fig. 10**), MotionGPTs establish robust connections between simple/complex texts and simple motions in evaluations, but they fall short when it comes to complex-text to **complex motion translation**. +
+### More technical details -### More technical details:
- Why choose T5 as the base model? an encoder-decoder architecture. Have you tried a decoder-only model like LLaMA? + Why choose T5, an encoder-decoder architecture, as the base model? How about a decoder-only model, like LLaMA? +table15 + +**Answer:** The **first language model that we used** to build MotionGPTs is **LLaMA-13B**. However, it shows insufficient performance and low training efficiency. We assume the reason is the limited dataset size compared to the large parameters and language data of LLaMA. We tried a smaller size decoder-only backbone **GPT2-Medium** and provide the results in **Tab. 15**. Then, we thus chose **T5-770M**, a small but common language model, as our final backbone, because many previous vision-language multimodal works, like **Unified-IO** and **BLIP**, have chosen T5, this encoder-decoder architecture. It shows a strong power to address multi-modal tasks. In addition, the decoder-only model has the advantage for self-supervised without pair data while we have paired data which this advance is greatly weakened. We are still working on collecting a large motion dataset for larger motion-language models. -**Answer:** The first language model that we used to build MotionGPTs is LLaMA-13B. However, it shows insufficient performance and low training efficiency. We assume the reason is the limited dataset size compared to the large parameters and language data of LLaMA. We tried a smaller size decoder-only backbone GPT2-Medium and provide the results in Tab. 15. Then, we thus choose T5-770M, a small but common language model, as our final backbone, because many previous vision-language multimodal works, like Unified-IO and BLIP, have chosen T5, this encoder-decoder architecture. It shows a strong power to address multi-modal tasks. In addition, the decoder-only model has the advantage for self-supervised without pair data while we have paired data which this advance is greatly weakened. We are still working on collecting a large motion dataset for larger motion-language models.
- How do you merge the text vocab and motion vocab in detail? concatenating them together? + How to merge the text vocab and motion vocab in detail? concatenating them together? + +**Answer:** To ensure **a shared distribution between language and motion**, we initialize the motion tokens separately and concatenate them alongside the language tokens. This step ensures a balanced representation that encompasses both modalities. Besides the token embeddings are actively trained during the entirety of **stages 2 and 3**, ensuring a comprehensive fusion of language and motion knowledge. -**Answer:** To ensure a shared distribution between language and motion, we initialize the Motion tokens separately and concatenate them alongside the language tokens. This step ensures a balanced representation that encompasses both modalities. Besides the token embeddings are actively trained during the entirety of stages 2 and 3, ensuring a comprehensive fusion of language and motion knowledge. We will also elaborate on this concatenation in the final version.
- For tuning on each task, do you tune the entire model or just part of it? + For tuning on each task, tune the entire model or just part of it? + +**Answer:** To address individual tasks, we adopt a focused approach where the entire model is fine-tuned. Our rationale lies in the fact that, for each specific task, our emphasis is on optimizing task-specific performance, without retaining an excessive amount of intelligence learned from other tasks. Besides, we only exclusively fine-tune the text-to-motion task, while other tasks are reported without specific tuning. -**Answer:** To address individual tasks, we adopt a focused approach where the entire model is fine-tuned. Our rationale lies in the fact that, for each specific task, our emphasis is on optimizing task-specific performance, without retaining an excessive amount of intelligence learned from other tasks. Besides, we only exclusively fine-tune the Text-to-Motion task, while other tasks are reported without specific tuning.
-
-### More experimental details: +### More experimental details
- Can MotionGPT perform motion editing or motion composition similar to MotionDiffuse and MDM? + Can MotionGPT perform motion editing or motion composition similar to MotionDiffuse and MDM? - | Method | FID $\downarrow$ | DIV $\rightarrow$ | ADE $\downarrow$ | FDE $\downarrow$ | - | :------------------- | :--------------- | :----------------- | :--------------- | :---------------- | - | Real | 0.002 | 9.503 | - | - | - | MDM | 6.031 | 7.813 | 5.446 | 8.561 | - | T2M-GPT | 2.056 | 8.635 | 6.161 | 8.302 | - | **MotionGPT (Ours)** | **0.905** | **8.972** | **4.745** | **6.040** | +| Method | FID $\downarrow$ | DIV $\rightarrow$ | ADE $\downarrow$ | FDE $\downarrow$ | +| :------------------- | :--------------- | :---------------- | :--------------- | :--------------- | +| Real | 0.002 | 9.503 | - | - | +| MDM | 6.031 | 7.813 | 5.446 | 8.561 | +| T2M-GPT | 2.056 | 8.635 | 6.161 | 8.302 | +| **MotionGPT (Ours)** | **0.905** | **8.972** | **4.745** | **6.040** | -Comparison of motion prediction on HumanML3D dataset using motion data only. +**Comparison of motion prediction on HumanML3D dataset using motion data only.** + +**Answer:** Referring to MDM, motion editing has two categories: **body part editing** and **motion completion** in the temporal domain. MotionGPT is capable of the latter, which includes **motion prediction** and **motion in-between**. It outperforms both **MDM** and **T2M-GPT** in the table above. However, when it comes to body part editing, the vector quantization(VQ)-based methods, like MotionGPT and T2M-GPT, are not as suitable as diffusion-based models that utilize diffusion inpainting on raw motion data. Editing body parts with LLM and prompts is a promising direction but still needs exploration. -**Answer:** Referring to MDM, motion editing has two categories: body part editing and motion completion in the temporal domain. MotionGPT is capable of the latter, which includes motion prediction and motion in-between. It outperforms both MDM and T2M-GPT in table above. However, when it comes to body part editing, the vector quantization(VQ)-based methods, like MotionGPT and T2M-GPT, are not as suitable as diffusion-based models that utilize diffusion inpainting on raw motion data. We agree that editing body parts with LLM and prompts is a promising direction but still needs exploration.
- How do you implement the MDM on the motion prediction and in-between tasks? + How to implement the MDM on the motion prediction and in-between tasks? + +**Answer:** Please follow the approach outlined in **Appendix B.4** and **Line-296** of our paper, where we highlight that MDM achieves the motion in-between task using a masked motion "in-painting" technique. Specifically, this involves fixing the initial and final portions of the motion and allowing the model to generate the central portion. To adapt this concept for motion prediction, we similarly fix a portion of the motion – in our case, **the first 20%** – and generate the subsequent sequence. -**Answer:** Thank you for your inquiry. We follow the approach outlined in Appendix B.4 and Line-296 of our paper, where we highlight that MDM achieves the motion in-between task using a masked motion "in-painting" technique. Specifically, this involves fixing the initial and final portions of the motion and allowing the model to generate the central portion. To adapt this concept for motion prediction, we similarly fix a portion of the motion – in our case, the first 20% – and generate the subsequent sequence.
- Motion down-sample, if only given a start frame and an end frame as the in-between input, would the model perform well? + Motion down-sample, if only given a start frame and an end frame as the in-between input, would the model perform well? + +**Answer:** VQ-based methods, such as MotionGPT and T2M-GPT, employ downsampling tricky to enhance the density of the codebook or tokens and reduce computing costs. This indeed becomes a constraint when the operation granularity is smaller than the down-sample rate. However, to address this issue, only the start and end frames are provided as in-between inputs. Some technical tricks can be used, such as repeating a single start or end frame up to the window size as inputs and removing the redundant parts in outputs. This does not significantly impact the effectiveness of the model, as there are often static beginnings or endings in the ground truth (GT) motion data. -**Answer:** VQ-based methods, such as MotionGPT and T2M-GPT, employ downsampling tricky to enhance the density of the codebook or tokens and reduce computing costs. This indeed becomes a constraint when the operation granularity is smaller than the down-sample rate. However, to address this issue, only the start and end frames are provided as in-between inputs. Some technical tricks can be used, such as repeating a single start or end frame up to the window size as inputs and removing the redundant parts in outputs. This does not significantly impact the effectiveness of the model, as there are often static beginnings or endings in the ground truth (GT) motion data.
- How is the down-sample rate chosen? It is a fundamental hyper-parameter that decides the overall granularity of the model. + How is the down-sample rate chosen? It is a fundamental hyper-parameter that decides the overall granularity of the model. | Downsampling | MPJPE $\downarrow$ | MPJPE $\downarrow$ | ACCL $\downarrow$ | FID $\downarrow$ | DIV $\rightarrow$ | | ------------ | ------------------ | ------------------ | ----------------- | ---------------- | ----------------- | @@ -122,20 +342,20 @@ Comparison of motion prediction on HumanML3D dataset using motion data only. | $l=4$ | 55.8 | 40.1 | 7.5 | **0.067** | 9.675 | | $l=8$ | 62.7 | 45.3 | 8.7 | 0.223 | **9.584** | -**Answer:** We selected the down-sample rate based on the frames-per-second (FPS) of the HumanML3D and KIT-ML datasets, which is 20 fps. Therefore, down-sampling by a factor of 4 to achieve 5 fps can ensure distinctiveness in motion frames, and prevents redundancy, and acceleration training. This choice was also made to ensure a fair comparison, as we utilized the same down-sample rate as T2M-GPT. As shown in the above table, we provide an ablation study on these parameters, where a factor of 4 achieves the best Frechet Inception Distance (FID) in motion reconstructions. -
+**Answer:** We selected the down-sample rate based on the frames-per-second (FPS) of the HumanML3D and KIT-ML datasets, which is **20 fps**. Therefore, down-sampling by a factor of 4 to achieve **5 fps** can ensure distinctiveness in motion frames, and prevents redundancy, and acceleration training. This choice was also made to ensure a fair comparison, as we utilized the same down-sample rate as T2M-GPT. As shown in the above table, we provide an ablation study on these parameters, where a factor of 4 achieves the best Frechet Inception Distance (FID) in motion reconstructions. +
- Failure analysis. Zero-shot ability on handling words that have semantic meaning but could be unseen. - -**Answer:** As shown in Fig. 12, we provide both zero-shot cases and failure cases. Benefitting from strong language models, MotionGPTs can understand unseen works in the text-to-motion training set, like "scuttling" and "barriers", and generate correct motions based on the meaning of sentences. However, it still struggles to generate unseen motions, like gymnastics, even if MotionGPTs understand the text inputs. -
+ Failure analysis. Zero-shot ability to handle words that have semantic meaning but could be unseen. +figure12 +**Answer:** As shown in **Fig. 12**, we provide both **zero-shot cases** and **failure cases**. Benefitting from strong language models, MotionGPTs can understand unseen works in the text-to-motion training set, like "**scuttling**" and "**barriers**", and generate correct motions based on the meaning of sentences. However, it still struggles to generate unseen motions, like gymnastics, even if MotionGPTs understand the text inputs. +
- Do TM2T, T2M, and poseGPT capture all human motion in their training dataset's discrete latent code? + Do TM2T, T2M, and poseGPT capture all human motion in their training dataset's discrete latent code? | Method | MPJPE$\downarrow$ | MPJPE $\downarrow$ | ACCL $\downarrow$ | FID $\downarrow$ | DIV $\rightarrow$ | | ---------------- | ----------------- | ------------------ | ----------------- | ---------------- | ----------------- | @@ -154,65 +374,71 @@ Comparison of motion prediction on HumanML3D dataset using motion data only. **Comparison of FID in text-to-motion task on KIT-ML dataset.** +**Answer:** Given sufficient training or testing data from the same dataset, motion reconstruction is not a challenging task for both VAE and VQ-VAE. We have provided the evaluation on motion reconstruction in **Tab.8**. However, when dealing with a **limited amount of motion data**, like the KIT dataset, **the VAE model shows better ability in motion interpolation, surpassing VQ-VAE**. +A relevant evaluation is shown above (also in **Tab.7**), where MLD (VAE) outperforms MotionGPT and T2M-GPT (VQ-VAEs) on FID. +The real challenge lies in reconstructing complex motions, such as diving or gymnastics sports. Existing motion generators struggle to accurately reconstruct **complex motions** using a codebook extracted from daily motion datasets. Collecting these complex yet valuable motions is still a significant challenge to the motion research community. -**Answer:** Given sufficient training or testing data from the same dataset, motion reconstruction is not a challenging task for both VAE and VQ-VAE. We have provided the evaluation on motion reconstruction in Tab.8. However, when dealing with a limited amount of motion data, like the KIT dataset, the VAE model shows better ability in motion interpolation, surpassing VQ-VAE. -A relevant evaluation is shown above (also in Tab.7), where MLD (VAE) outperforms MotionGPT and T2M-GPT (VQ-VAEs) on FID. -The real challenge lies in reconstructing complex motions, such as diving or gymnastics sports. Existing motion generators struggle to accurately reconstruct complex motions using a codebook extracted from daily motion datasets. Collecting these complex yet valuable motions is still a significant challenge to the motion research community.
-
-### About performances: +### About performances +
- Motion quality and performance gain. + Motion quality and performance gain. -| Method | FID $\downarrow$ | -|:--|:--| -| MDM | $0.544^{\pm.044}$ | -| MotionGPT | $0.160^{\pm.008}$ | +| Method | FID $\downarrow$ | +| :-------- | :----------------------------- | +| MDM | $0.544^{\pm.044}$ | +| MotionGPT | $0.160^{\pm.008}$ | | T2M-GPT | $\boldsymbol{0.116}^{\pm.004}$ | -Comparison of FID in text-to-motion task on HumanML3D dataset. +**Comparison of FID in text-to-motion task on HumanML3D dataset.** -| Method | FID $\downarrow$ | -|:--|:--| -| T2M-GPT | $0.514^{\pm.029}$ | -| MotionGPT | $0.510^{\pm.016}$ | +| Method | FID $\downarrow$ | +| :-------- | :----------------------------- | +| T2M-GPT | $0.514^{\pm.029}$ | +| MotionGPT | $0.510^{\pm.016}$ | | MDM | $\boldsymbol{0.497}^{\pm.021}$ | -Comparison of FID in text-to-motion task on KIT-ML dataset. +**Comparison of FID in text-to-motion task on KIT-ML dataset.** + +**Answer:** The FID metrics primarily focus on the motion quality rather than the correlation between motion and text. While MDM serves as a successful benchmark for motion generation, both MotionGPT and T2M-GPT outperform MDM by a margin of 0.38~0.43 on the FID scale. **However**, **the difference in motion quality among these three works is not significant in video supply**. Additionally, MDM outperforms two vector quantized methods, MotionGPT and T2M-GPT, in terms of FID on the KIT dataset. This can be attributed to the limited number of 3,911 motion sequences, which makes it **challenging to construct a comprehensive motion codebook**. More importantly, MotionGPT contributes to multiple motion tasks with LLM, particularly in generating both text and motion within a single model, rather than aiming to improve the FID metric. -**Answer:** The FID metrics primarily focuses on the motion quality rather than the correlation between motion and text. While MDM serves as a successful benchmark for motion generation, both MotionGPT and T2M-GPT outperform MDM by a margin of 0.38~0.43 on the FID scale. However, the difference in motion quality among these three works is not significant in video supply. Additionally, MDM outperforms two vector quantized methods, MotionGPT and T2M-GPT, in terms of FID on the KIT dataset. This can be attributed to the limited number of 3,911 motion sequences, which makes it challenging to construct a comprehensive motion codebook. More importantly, MotionGPT contributes to multiple motion tasks with LLM, particularly in generating both text and motion within a single model, rather than aiming to improve the FID metric.
- Limited performance gain with strong language models. + Limited performance gain with strong language models. + +**Answer:** We thought MotionGPT, using a **significantly larger language model**, would surpass all existing methods in all tasks. **However**, the evaluation shows MotionGPT achieves SOTA results in 18 out of 23 metrics, where many improvements are only small gains. This can be attributed to the limited size of the dataset. Both **HumanML3D (14,616 motions) and KIT (3,911 motions)** are **limited** in vocabulary size and overall dataset size, particularly when compared to billion-level language datasets, which affects the efficacy of large-scale models. Benefitting from recent dataset works, like [Motion-X](https://motion-x-dataset.github.io/), we will evaluate the performance gain of MotionGPT in larger datasets once they become available. -**Answer:** We thought MotionGPT, using a significantly larger language model, would surpass all existing methods in all tasks. However, the evaluation shows MotionGPT achieves SOTA results in 18 out of 23 metrics, where many improvements are only small gains. This can be attributed to the limited size of the dataset. As mentioned in R3, both HumanML3D (14,616 motions) and KIT (3,911 motions) are limited in vocabulary size and overall dataset size, particularly when compared to billion-level language datasets, which affects the efficacy of large-scale models. Benefitting from recent dataset works, like Motion-X, we will evaluate the performance gain of MotionGPT in larger datasets once they become available.
- Performance Gain on R-Precision in KIT. -**Answer:** The evaluation of R-Precision in the KIT dataset relies on the text encoder, which is built using a limited set of 6,353 textual descriptions. In contrast, MotionGPTs benefit from LLM and large language data, enabling them to generate longer and more nature language descriptions for motion. However, this leads to a discrepancy between the generated descriptions and the GT descriptions, resulting in a lower R-Precision. + Performance Gain on R-Precision in KIT. + +**Answer:** The evaluation of R-Precision in the KIT dataset relies on the text encoder, which is built using a limited set of 6,353 textual descriptions. In contrast, MotionGPTs benefit from LLM and large language data, enabling them to **generate longer and more natural language descriptions** for motion. However, this leads to **a discrepancy between the generated descriptions and the GT descriptions**, resulting in a lower R-Precision. +
- MotionGPT seems to sacrifice accuracy in exchange for additional functionalities. -**Answer:** As shown in Fig. 10, MotionGPT achieves SOTA on 18 out of 23 metrics across four motion-related tasks. Additionally, as mentioned by R3, both HumanML3D and KIT are limited in overall dataset size, particularly when compared to billion-level language datasets. This affects the efficacy of large-scale models. We will further employ a larger motion-text dataset to evaluate MotionGPT. Besides, MotionGPTs introduce motion-language pre-training, as well as its zero-shot ability, which is a promising direction worth exploring and could stimulate self-training procedures for further research. + MotionGPT seems to sacrifice accuracy in exchange for additional functionalities. +figure10 + +**Answer:** As shown in **Fig. 10**, MotionGPT achieves SOTA on **18 out of 23** metrics across four motion-related tasks. Additionally, both HumanML3D and KIT are limited in overall dataset size, particularly when compared to billion-level language datasets. This affects the efficacy of large-scale models. We will further employ a larger motion-text dataset to evaluate MotionGPT. Besides, MotionGPTs introduce motion-language pre-training, as well as its zero-shot ability, which is a promising direction worth exploring and could stimulate self-training procedures for further research. +
-
+### About illustrations -### More visualizations:
- Visualize some of the tokens in the vocabulary that VQ-VAE learned. + Visualize some of the tokens in the vocabulary that VQ-VAE learned. +figure13 + +**Answer:** As shown in **Fig.13**, we visualize these **motion tokens** in **motion vocabulary $V_m$** and their corresponding localized spatial-temporal contexts, depicted within **4-frame motion segments**. However, MotionGPT falls short in generating descriptions for each individual token, as the training is conducted on token sequences. -**Answer:** As shown in Fig.13, we visualize these motion tokens in motion vocabulary $V_m$ and their corresponding localized spatial-temporal contexts, depicted within 4-frame motion segments. However, MotionGPT falls short in generating descriptions for each individual token, as the training is conducted on token sequences.
-
- -## Citation +## 📖 Citation If you find our code or paper helps, please consider citing: @@ -223,6 +449,14 @@ If you find our code or paper helps, please consider citing: journal={arXiv preprint arXiv:2306.14795}, year={2023} } + +@inproceedings{chen2023executing, + title={Executing your Commands via Motion Diffusion in Latent Space}, + author={Chen, Xin and Jiang, Biao and Liu, Wen and Huang, Zilong and Fu, Bin and Chen, Tao and Yu, Gang}, + booktitle={Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition}, + pages={18000--18010}, + year={2023} +} ``` ## Acknowledgments diff --git a/app.py b/app.py new file mode 100644 index 0000000..a27ae06 --- /dev/null +++ b/app.py @@ -0,0 +1,365 @@ +import gradio as gr +import random +import torch +import time +import cv2 +import os +import numpy as np +import pytorch_lightning as pl +import moviepy.editor as mp +from pathlib import Path +from mGPT.data.build_data import build_data +from mGPT.models.build_model import build_model +from mGPT.config import parse_args +from scipy.spatial.transform import Rotation as RRR +import mGPT.render.matplot.plot_3d_global as plot_3d +from mGPT.render.pyrender.hybrik_loc2rot import HybrIKJointsToRotmat +from mGPT.render.pyrender.smpl_render import SMPLRender +from transformers import WhisperProcessor, WhisperForConditionalGeneration +import librosa + +# Load model +cfg = parse_args(phase="webui") # parse config file +cfg.FOLDER = 'cache' +output_dir = Path(cfg.FOLDER) +output_dir.mkdir(parents=True, exist_ok=True) +pl.seed_everything(cfg.SEED_VALUE) +if cfg.ACCELERATOR == "gpu": + device = torch.device("cuda") +else: + device = torch.device("cpu") +datamodule = build_data(cfg, phase="test") +model = build_model(cfg, datamodule) +state_dict = torch.load(cfg.TEST.CHECKPOINTS, map_location="cpu")["state_dict"] +model.load_state_dict(state_dict) +model.to(device) + +audio_processor = WhisperProcessor.from_pretrained(cfg.model.whisper_path) +audio_model = WhisperForConditionalGeneration.from_pretrained(cfg.model.whisper_path).to(device) +forced_decoder_ids = audio_processor.get_decoder_prompt_ids(language="zh", task="translate") +forced_decoder_ids_zh = audio_processor.get_decoder_prompt_ids(language="zh", task="translate") +forced_decoder_ids_en = audio_processor.get_decoder_prompt_ids(language="en", task="translate") + +# HTML Style +Video_Components = """ +
+ + + + + + + +
+""" + +Text_Components = """ +

{msg}

+""" + + +def motion_token_to_string(motion_token, lengths, codebook_size=512): + motion_string = [] + for i in range(motion_token.shape[0]): + motion_i = motion_token[i].cpu( + ) if motion_token.device.type == 'cuda' else motion_token[i] + motion_list = motion_i.tolist()[:lengths[i]] + motion_string.append( + (f'' + + ''.join([f'' for i in motion_list]) + + f'')) + return motion_string + + +def render_motion(data, feats, method='fast'): + fname = time.strftime("%Y-%m-%d-%H_%M_%S", time.localtime( + time.time())) + str(np.random.randint(10000, 99999)) + video_fname = fname + '.mp4' + feats_fname = fname + '.npy' + output_npy_path = os.path.join(output_dir, feats_fname) + output_mp4_path = os.path.join(output_dir, video_fname) + np.save(output_npy_path, feats) + + if method == 'slow': + if len(data.shape) == 4: + data = data[0] + data = data - data[0, 0] + pose_generator = HybrIKJointsToRotmat() + pose = pose_generator(data) + pose = np.concatenate([ + pose, + np.stack([np.stack([np.eye(3)] * pose.shape[0], 0)] * 2, 1) + ], 1) + shape = [768, 768] + render = SMPLRender(cfg.RENDER.SMPL_MODEL_PATH) + + if not os.environ.get("PYOPENGL_PLATFORM"): + os.environ["DISPLAY"] = ":0.0" + os.environ["PYOPENGL_PLATFORM"] = "egl" + + size = (shape[1], shape[0]) + fps = 20.0 + fourcc = cv2.VideoWriter_fourcc('M', 'P', '4', 'V') + videoWriter = cv2.VideoWriter(output_mp4_path, fourcc, fps, size) + r = RRR.from_rotvec(np.array([np.pi, 0.0, 0.0])) + pose[:, 0] = np.matmul(r.as_matrix().reshape(1, 3, 3), pose[:, 0]) + for i in range(data.shape[0]): + img = np.zeros([shape[0], shape[1], 3]) + aroot = data[[i], 0] + np.array([[0.0, 0.0, 30.0]]) + aroot[:, 1] = -aroot[:, 1] + params = dict(pred_shape=np.zeros([1, 10]), + pred_root=aroot, + pred_pose=pose[[i]]) + renderImg = render.render(img.copy(), params) + renderImg = (renderImg * 255).astype(np.uint8) + videoWriter.write(renderImg) + videoWriter.release() + output_video_h264_name = output_mp4_path[:-4] + '_h264.mp4' + command = 'ffmpeg -y -i {} -vcodec h264 {}'.format( + output_mp4_path, output_video_h264_name) + os.system(command) + output_mp4_path = output_video_h264_name + video_fname = video_fname[:-4] + '_h264.mp4' + elif method == 'fast': + output_gif_path = output_mp4_path[:-4] + '.gif' + if len(data.shape) == 3: + data = data[None] + if isinstance(data, torch.Tensor): + data = data.cpu().numpy() + pose_vis = plot_3d.draw_to_batch(data, [''], [output_gif_path]) + out_video = mp.VideoFileClip(output_gif_path) + out_video.write_videofile(output_mp4_path) + + return output_mp4_path, video_fname, output_npy_path, feats_fname + + +def load_motion(motion_uploaded, method): + file = motion_uploaded['file'] + + feats = torch.tensor(np.load(file), device=model.device) + if len(feats.shape) == 2: + feats = feats[None] + # feats = model.datamodule.normalize(feats) + + # Motion tokens + motion_lengths = feats.shape[0] + motion_token, _ = model.vae.encode(feats) + + motion_token_string = model.lm.motion_token_to_string( + motion_token, [motion_token.shape[1]])[0] + motion_token_length = motion_token.shape[1] + + # Motion rendered + joints = model.datamodule.feats2joints(feats.cpu()).cpu().numpy() + output_mp4_path, video_fname, output_npy_path, joints_fname = render_motion( + joints, + feats.to('cpu').numpy(), method) + + motion_uploaded.update({ + "feats": feats, + "joints": joints, + "motion_video": output_mp4_path, + "motion_video_fname": video_fname, + "motion_joints": output_npy_path, + "motion_joints_fname": joints_fname, + "motion_lengths": motion_lengths, + "motion_token": motion_token, + "motion_token_string": motion_token_string, + "motion_token_length": motion_token_length, + }) + + return motion_uploaded + + +def add_text(history, text, motion_uploaded, data_stored, method): + data_stored = data_stored + [{'user_input': text}] + + if 'file' in motion_uploaded.keys(): + text = Text_Components.format(msg=text) + motion_uploaded = load_motion(motion_uploaded, method) + output_mp4_path = motion_uploaded['motion_video'] + video_fname = motion_uploaded['motion_video_fname'] + output_npy_path = motion_uploaded['motion_joints'] + joints_fname = motion_uploaded['motion_joints_fname'] + + text = text + Video_Components.format(video_path=output_mp4_path, + video_fname=video_fname, + motion_path=output_npy_path, + motion_fname=joints_fname) + else: + text = f"""

{text}

""" + history = history + [(text, None)] + return history, gr.update(value="", + interactive=False), motion_uploaded, data_stored + + +def add_audio(history, audio_path, data_stored): + audio, sampling_rate = librosa.load(audio_path, sr=16000) + input_features = audio_processor( + audio, sampling_rate, return_tensors="pt" + ).input_features # whisper training sampling rate, do not modify + input_features = torch.Tensor(input_features).to(device) + predicted_ids = audio_model.generate(input_features, + forced_decoder_ids=forced_decoder_ids) + text_input = audio_processor.batch_decode(predicted_ids, + skip_special_tokens=True) + text_input = str(text_input).strip('[]"') + data_stored = data_stored + [{'user_input': text_input}] + gr.update(value=data_stored, interactive=False) + history = history + [(text_input, None)] + + return history, data_stored + + +def add_file(history, file, txt, motion_uploaded): + + motion_uploaded['file'] = file.name + txt = txt.replace(" ", "") + " " + return history, gr.update(value=txt, interactive=True), motion_uploaded + + +def bot(history, motion_uploaded, data_stored, method): + + motion_length, motion_token_string = motion_uploaded[ + "motion_lengths"], motion_uploaded["motion_token_string"] + + input = data_stored[-1]['user_input'] + prompt = model.lm.placeholder_fulfill(input, motion_length, + motion_token_string, "") + data_stored[-1]['model_input'] = prompt + batch = { + "length": [motion_length], + "text": [prompt], + } + + outputs = model(batch, task="t2m") + out_feats = outputs["feats"][0] + out_lengths = outputs["length"][0] + out_joints = outputs["joints"][:out_lengths].detach().cpu().numpy() + out_texts = outputs["texts"][0] + output_mp4_path, video_fname, output_npy_path, joints_fname = render_motion( + out_joints, + out_feats.to('cpu').numpy(), method) + + motion_uploaded = { + "feats": None, + "joints": None, + "motion_video": None, + "motion_lengths": 0, + "motion_token": None, + "motion_token_string": '', + "motion_token_length": 0, + } + + data_stored[-1]['model_output'] = { + "feats": out_feats, + "joints": out_joints, + "length": out_lengths, + "texts": out_texts, + "motion_video": output_mp4_path, + "motion_video_fname": video_fname, + "motion_joints": output_npy_path, + "motion_joints_fname": joints_fname, + } + + if '' == out_texts: + response = [ + Video_Components.format(video_path=output_mp4_path, + video_fname=video_fname, + motion_path=output_npy_path, + motion_fname=joints_fname) + ] + elif '' in out_texts: + response = [ + Text_Components.format( + msg=out_texts.split("")[0]), + Video_Components.format(video_path=output_mp4_path, + video_fname=video_fname, + motion_path=output_npy_path, + motion_fname=joints_fname), + Text_Components.format( + msg=out_texts.split("")[1]), + ] + else: + response = f"""

{out_texts}

""" + + history[-1][1] = "" + for character in response: + history[-1][1] += character + time.sleep(0.02) + yield history, motion_uploaded, data_stored + + +with open("assets/css/custom.css", "r", encoding="utf-8") as f: + customCSS = f.read() + +with gr.Blocks(css=customCSS) as demo: + + # Variables + motion_uploaded = gr.State({ + "feats": None, + "joints": None, + "motion_video": None, + "motion_lengths": 0, + "motion_token": None, + "motion_token_string": '', + "motion_token_length": 0, + }) + data_stored = gr.State([]) + + gr.Markdown( + "# Welcome to MotionGPT! \n ## You can type or upload a numpy file contains motion joints." + ) + + chatbot = gr.Chatbot([], elem_id="mGPT", height=600, label="MotionGPT") + + with gr.Row(): + with gr.Column(scale=0.85): + txt = gr.Textbox( + show_label=False, + placeholder="Enter text and press enter, or insert motion", + container=False) + with gr.Row(): + aud = gr.Audio(label='Speak', source="microphone", type='filepath') + btn = gr.UploadButton("📁 Upload motion", + elem_id="upload", + file_types=["file"], + variant='primary') + regen = gr.Button("🔄 Regenerate", elem_id="regen") + clear = gr.ClearButton([txt, chatbot, aud], value='🗑️ Clear') + + with gr.Column(scale=0.15, min_width=150): + method = gr.Dropdown(["slow", "fast"], + label="Render method", + interactive=True, + elem_id="method", + value="fast") + language = gr.Dropdown(["English", "中文"], + label="Speech language", + interactive=True, + elem_id="language", + value="English") + + txt_msg = txt.submit( + add_text, [chatbot, txt, motion_uploaded, data_stored, method], + [chatbot, txt, motion_uploaded, data_stored], + queue=False).then(bot, [chatbot, motion_uploaded, data_stored, method], + [chatbot, motion_uploaded, data_stored]) + txt_msg.then(lambda: gr.update(interactive=True), None, [txt], queue=False) + file_msg = btn.upload(add_file, [chatbot, btn, txt, motion_uploaded], + [chatbot, txt, motion_uploaded], + queue=False) + aud_msg = aud.stop_recording( + add_audio, [chatbot, aud, data_stored], [chatbot, data_stored], + queue=False).then(bot, [chatbot, motion_uploaded, data_stored, method], + [chatbot, motion_uploaded, data_stored]) + regen_msg = regen.click(bot, + [chatbot, motion_uploaded, data_stored, method], + [chatbot, motion_uploaded, data_stored]) + +demo.queue() + +if __name__ == "__main__": + demo.launch(server_name="0.0.0.0", server_port=8888, debug=True) diff --git a/assets/css/custom.css b/assets/css/custom.css new file mode 100644 index 0000000..c0929b1 --- /dev/null +++ b/assets/css/custom.css @@ -0,0 +1,359 @@ +/* Borrowed from https://huggingface.co/spaces/project-baize/chat-with-baize */ + +:root { + --chatbot-color-light: #f6f6f6; + --chatbot-color-dark: #121111; +} + +/* Light mode (default) */ +#mGPT { + background-color: var(--chatbot-color-light) !important; + color: #000000 !important; +} +[data-testid='bot'] { + background-color: #ffffff !important; +} +[data-testid='user'] { + background-color: #95ec69 !important; +} + +/* Dark mode */ +.dark #mGPT { + background-color: var(--chatbot-color-dark) !important; + color: #ffffff !important; +} +.dark [data-testid='bot'] { + background-color: #2c2c2c !important; +} + +.dark [data-testid='user'] { + background-color: #26b561 !important; +} + +#mGPT { + height: 100%; + min-height: 500px; +} + +[class*='message'] { + border-radius: var(--radius-xl) !important; + border: none; + padding: var(--spacing-xl) !important; + font-size: var(--text-xl) !important; + line-height: var(--line-lg) !important; + min-height: calc(var(--text-lg) * var(--line-lg) + 2 * var(--spacing-xl)); + min-width: calc(var(--text-lg) * var(--line-lg) + 2 * var(--spacing-xl)); +} +[data-testid='bot'] { + max-width: 85%; + width: auto !important; + border-bottom-left-radius: 0 !important; +} +[data-testid='user'] { + max-width: 85%; + width: auto !important; + border-bottom-right-radius: 0 !important; +} + +/* Text & Video */ +#method { + line-height: 1.95 !important; +} + +.side-content { + max-width: 340px; +} + +@media only screen and (min-width: 768px) { + .side-content { + float: left; + overflow-wrap: break-word; + padding-right: 2rem; + } + + .side-video { + float: right; + } +} + +/* Buttom */ +#upload { + color: #000000; +} + +.videodl-button { + position: absolute; + left: 80%; + top: 5px; + width: 24px; + height: 24px; +} + +.videodl-button svg { + width: 24px; + height: 24px; +} + +.npydl-button { + position: absolute; + left: 90%; + top: 5px; + width: 24px; + height: 24px; +} + +.npydl-button svg { + width: 24px; + height: 24px; +} + +/* Table */ +table { + margin: 1em 0; + border-collapse: collapse; + empty-cells: show; +} +td, +th { + border: 1.2px solid var(--border-color-primary) !important; + padding: 0.2em; +} +thead { + background-color: rgba(175, 184, 193, 0.2); +} +thead th { + padding: 0.5em 0.2em; +} +/* Inline code */ +#mGPT code { + display: inline; + white-space: break-spaces; + border-radius: 6px; + margin: 0 2px 0 2px; + padding: 0.2em 0.4em 0.1em 0.4em; + background-color: rgba(175, 184, 193, 0.2); +} +/* Code block */ +#mGPT pre code { + display: block; + overflow: auto; + white-space: pre; + background-color: hsla(0, 0%, 0%, 80%) !important; + border-radius: 10px; + padding: 1.4em 1.2em 0em 1.4em; + margin: 1.2em 2em 1.2em 0.5em; + color: #fff; + box-shadow: 6px 6px 16px hsla(0, 0%, 0%, 0.2); +} +/* Hightlight */ +#mGPT .highlight { + background-color: transparent; +} +#mGPT .highlight .hll { + background-color: #49483e; +} +#mGPT .highlight .c { + color: #75715e; +} /* Comment */ +#mGPT .highlight .err { + color: #960050; + background-color: #1e0010; +} /* Error */ +#mGPT .highlight .k { + color: #66d9ef; +} /* Keyword */ +#mGPT .highlight .l { + color: #ae81ff; +} /* Literal */ +#mGPT .highlight .n { + color: #f8f8f2; +} /* Name */ +#mGPT .highlight .o { + color: #f92672; +} /* Operator */ +#mGPT .highlight .p { + color: #f8f8f2; +} /* Punctuation */ +#mGPT .highlight .ch { + color: #75715e; +} /* Comment.Hashbang */ +#mGPT .highlight .cm { + color: #75715e; +} /* Comment.Multiline */ +#mGPT .highlight .cp { + color: #75715e; +} /* Comment.Preproc */ +#mGPT .highlight .cpf { + color: #75715e; +} /* Comment.PreprocFile */ +#mGPT .highlight .c1 { + color: #75715e; +} /* Comment.Single */ +#mGPT .highlight .cs { + color: #75715e; +} /* Comment.Special */ +#mGPT .highlight .gd { + color: #f92672; +} /* Generic.Deleted */ +#mGPT .highlight .ge { + font-style: italic; +} /* Generic.Emph */ +#mGPT .highlight .gi { + color: #a6e22e; +} /* Generic.Inserted */ +#mGPT .highlight .gs { + font-weight: bold; +} /* Generic.Strong */ +#mGPT .highlight .gu { + color: #75715e; +} /* Generic.Subheading */ +#mGPT .highlight .kc { + color: #66d9ef; +} /* Keyword.Constant */ +#mGPT .highlight .kd { + color: #66d9ef; +} /* Keyword.Declaration */ +#mGPT .highlight .kn { + color: #f92672; +} /* Keyword.Namespace */ +#mGPT .highlight .kp { + color: #66d9ef; +} /* Keyword.Pseudo */ +#mGPT .highlight .kr { + color: #66d9ef; +} /* Keyword.Reserved */ +#mGPT .highlight .kt { + color: #66d9ef; +} /* Keyword.Type */ +#mGPT .highlight .ld { + color: #e6db74; +} /* Literal.Date */ +#mGPT .highlight .m { + color: #ae81ff; +} /* Literal.Number */ +#mGPT .highlight .s { + color: #e6db74; +} /* Literal.String */ +#mGPT .highlight .na { + color: #a6e22e; +} /* Name.Attribute */ +#mGPT .highlight .nb { + color: #f8f8f2; +} /* Name.Builtin */ +#mGPT .highlight .nc { + color: #a6e22e; +} /* Name.Class */ +#mGPT .highlight .no { + color: #66d9ef; +} /* Name.Constant */ +#mGPT .highlight .nd { + color: #a6e22e; +} /* Name.Decorator */ +#mGPT .highlight .ni { + color: #f8f8f2; +} /* Name.Entity */ +#mGPT .highlight .ne { + color: #a6e22e; +} /* Name.Exception */ +#mGPT .highlight .nf { + color: #a6e22e; +} /* Name.Function */ +#mGPT .highlight .nl { + color: #f8f8f2; +} /* Name.Label */ +#mGPT .highlight .nn { + color: #f8f8f2; +} /* Name.Namespace */ +#mGPT .highlight .nx { + color: #a6e22e; +} /* Name.Other */ +#mGPT .highlight .py { + color: #f8f8f2; +} /* Name.Property */ +#mGPT .highlight .nt { + color: #f92672; +} /* Name.Tag */ +#mGPT .highlight .nv { + color: #f8f8f2; +} /* Name.Variable */ +#mGPT .highlight .ow { + color: #f92672; +} /* Operator.Word */ +#mGPT .highlight .w { + color: #f8f8f2; +} /* Text.Whitespace */ +#mGPT .highlight .mb { + color: #ae81ff; +} /* Literal.Number.Bin */ +#mGPT .highlight .mf { + color: #ae81ff; +} /* Literal.Number.Float */ +#mGPT .highlight .mh { + color: #ae81ff; +} /* Literal.Number.Hex */ +#mGPT .highlight .mi { + color: #ae81ff; +} /* Literal.Number.Integer */ +#mGPT .highlight .mo { + color: #ae81ff; +} /* Literal.Number.Oct */ +#mGPT .highlight .sa { + color: #e6db74; +} /* Literal.String.Affix */ +#mGPT .highlight .sb { + color: #e6db74; +} /* Literal.String.Backtick */ +#mGPT .highlight .sc { + color: #e6db74; +} /* Literal.String.Char */ +#mGPT .highlight .dl { + color: #e6db74; +} /* Literal.String.Delimiter */ +#mGPT .highlight .sd { + color: #e6db74; +} /* Literal.String.Doc */ +#mGPT .highlight .s2 { + color: #e6db74; +} /* Literal.String.Double */ +#mGPT .highlight .se { + color: #ae81ff; +} /* Literal.String.Escape */ +#mGPT .highlight .sh { + color: #e6db74; +} /* Literal.String.Heredoc */ +#mGPT .highlight .si { + color: #e6db74; +} /* Literal.String.Interpol */ +#mGPT .highlight .sx { + color: #e6db74; +} /* Literal.String.Other */ +#mGPT .highlight .sr { + color: #e6db74; +} /* Literal.String.Regex */ +#mGPT .highlight .s1 { + color: #e6db74; +} /* Literal.String.Single */ +#mGPT .highlight .ss { + color: #e6db74; +} /* Literal.String.Symbol */ +#mGPT .highlight .bp { + color: #f8f8f2; +} /* Name.Builtin.Pseudo */ +#mGPT .highlight .fm { + color: #a6e22e; +} /* Name.Function.Magic */ +#mGPT .highlight .vc { + color: #f8f8f2; +} /* Name.Variable.Class */ +#mGPT .highlight .vg { + color: #f8f8f2; +} /* Name.Variable.Global */ +#mGPT .highlight .vi { + color: #f8f8f2; +} /* Name.Variable.Instance */ +#mGPT .highlight .vm { + color: #f8f8f2; +} /* Name.Variable.Magic */ +#mGPT .highlight .il { + color: #ae81ff; +} /* Literal.Number.Integer.Long */ diff --git a/assets/images/figure10.webp b/assets/images/figure10.webp new file mode 100644 index 0000000000000000000000000000000000000000..0da88c98f3280934822e0066d93fa9f48e7b4266 GIT binary patch literal 27082 zcmZs?Q_S^aloBfqV}w=6quTfxC_=w>QA2KHx_}AQMms2>f>JA*$AVEzlr(_zofh zes%C_z7Kr|erU!3Hv$)diC?wf`xit>bH3l#AHi1$K&YUELEy}H@e0$4kVl{8H`%>|5ztvw^9_&>h(L9r=K_2X_j* z2`qWexElM~brL!+*y=k4hQ3Jk`#u1l{DXm2zzX2uciRRUQ0O&3^ScXpena*F2lQ=b zKDhZWj*Mh~9_wNd6+(;{bC`8?btMPWx04m+g++S)zYv@LF`!`BT6mp`8JMTR16JXl z2z>mq@rvHEi;dB(cbl^x$<)IS{WJw2k8Dl52_ySfDx*w{E$M>vOL(aPK0NTBHwM%92rqc|rpI?~2$+Pk9w!w82Wa zw_HV|mAh?p;t0&Q?zBfyo7w^o>Ay}eekLbhQ&#${jwn_>x+;z&{Y{@!++ayX;-E)> zlc7WVM;0^ArGzBd+=bdIX-P`z^2DlZl82i$h+Wm6bJe|pHs zdg@C28(F$STiOs-{+Eu4<4{%jLWeztYPR*ihTGCL6J*eahiiT2j!G!8!>BH&D*W|W zoiVka*&^eykCd6Y%VVGS=w#x+~{XEzuw;1W*+nSv&r2icfV7T5u`7hOqK-$oH z{0AwHOeeBOI{v*IgwYYE6D_hwK_UU$hq%9>t39G|%hjhBsD}1~Ru-D}TQ*FxDx?Y+ z!kdDi5AIs<7^#@>|6QrW%$j^VNzl7Zws9a8-LK+uyo*F6v-9nH z4U#KYahQEx85w`O>=X!2@WrBy)wsyT{~KbB$2{Ag70G<+05hcnM|~VH*Vxxhj?~qS zF}JtjnYDTvXx_i@s%-^IvbEv9r8yxcR2`a@+;irNxi@rNwEt<+{~Zhy;tj(a{=z~l zIIVn?yGIHS?GSE3qf$dratj5c)n`3^p=9k)Di%qt|HUXwq^CT8v=u32Bkb9PAt?)j z@(ay^I3)G~&lLE>O68dOU<%-^l8jk+O$7hLdSQ zW#Iqx&EdgYo~cZCS`mo^0{_9|A|Zpc0t#AsN@!_B-m?)XNIki;c|#OQU1D9tq||_Q z$>i||w(&lE1Af8M!rNGa=Y{=0w7tC@_dar&m-ar408DfXq?9~9;Axc}(JM9VHInO; z>C&luPdCCL+&iE5+I4t})w#FMsC4O{1pIS<7dVWwynG?ni0q-WnPD+o2ArLV`A z$K^P<%SnQ{c6y&h?bH0Eh}!=$pZ~qCIub&>;$Hr1{UPU5$DOX87KKTc5MWJIt&BL0 zCY@0V&gAyfbGAMVrz=E{5_68r0=W;+j%PPC40dQ`bN{sn_Y3r_s+L!l`F#{$;Ua^J zbP$3A`<6zI>G)xgN8IKL!%bw_-5ri05M3b&kPkKzqbJle@M^8^Y(}qEg56PE-l}Da zH$i2WB1MyFypF)MrFZO1zWV1;|Lr|RMcO>;6R~>-<#q?X9MY5T ze;ncTJU3-uHnyVvswu_Y-svkyr1U2TWic#}6uUSz<%L3UWxmpKYbO_La(UuIGxt;_ zn=udv`oIsSVlPRCJ4B5U`RJKYcuEBWJbAlXRxB_QVRBeRL zu4g}GMnIVxnbaO*qcC56p7-@`hUp9!=^~Hg12O!mD}i$)kql%e^0;hDgLMeMZgI2f zBtpf27E&)SGRqsI_0hGb9a02#_Sm#mNbtUm~CpcM~mhqjsy4vPa` zAF_W}!4^V53sX>;(>aZZ!e_B!PiS$Boi$RIyrS@AGycT=Wn>3a# zxR~{qrmHhU+Ry)xee%f8zn?>cM~LQ^YWZH**q1W2pl%?9(oF}Bj6>~&FbodOXI)Gw zGkam2eeoi5cS5|Wd?eS6%MbUgTsA(oehX+7(mxjD7el2q1ms-p;zkx05$@aeGKCZ5}LH(ZYENb0m$y$3@q`^>- za{0urk!20-^PJ`zXTk9|@TVpJl+fAu!`%5T)_IPB;=Uerlyz0YNv@-~av_yNinH$b zDubsv&iBS8H*>xGVY%IW7pAjz-|X@Szr=+q56Po9Q|=HygCMk30udB`|HA$upaAUmEhF zq|%*yzyDeVK^fR%v&8|)x0Sk1L8BrIlHok6hUDd7Cq!SuizFt?WFnyoUo_O4SlW7m z-6*Chi1GP;3~lehV#NGL_A{?_zH6*vTy%zjSTDDUp_CtCZKf+o#pd4mX>v4HM&KN2B%9kYY zTmxgL{)Rl$9>a0yKxAdO7c~-IylBjL>DGsm*IW4_-`&R*lNe;WA5S-0wX(2hRDO;p zsR7M-N14;Sryl??P$X!i;$&}=?2U43=l-G|to}VkjJ#IUlqN@x&!USsecgGdp?=Z} z(@Dpjp*WI7HlPE2A$Uv{iGz;fNCewQ@6NV~}ziOQOCAOJ^PDvBRiA zn2!}BbQb9<*$Qo$e<;aE{?J!t>)YSYO5x)fTU>uXL=aP-7;nip{_JxS*&S59(ie+c zTg30S__lDc{6VkNz0mZ20QlTaj@B=Ni*&9rh$eyOWdhcAK3jNqxGa#Cn5Aj5imG$N zwxqqrn?xo`A}}Es0d}MrNVA%<2F1de)y&*(#X~b)XkoH?#-;!~n{+l8Z3>I>ONsB| z9R3*svzN2Q{moU;D9H!vyp_eaw|?7Gey@*&HM}dTU!%@Dzk^xVF0w$v297n&^g%Ii znuKei74A50za>zGWB)7`xS>|-GNMR@N+x%O!2O)3uP2A<@^ zhV8t%<-kRjt=(C$mmQjS<{$=I60-Odef)!UX>~zcLQ5`+9vEqVpJ)Sr4nT=|2K|i{ zVRpq2E6R-&yZ%~adzPwF@^3A>!YQoEXB{xJ$UHAxh_Ow-lW3#$tREd~;?=c_y1A^m z{))eM^Z?CQ}cGP z75;Z|oc8Y!8ED`|Hd=dLX_h!t+pg2spnweJcRE6csYMt-1})BQfBxz9ltS8>ic5H$ z49$o_#@G-njJ-l{IV!3LLLg7@y_hmh-C6w%h$^upI&_b_`|EZF-vE8W zC13}_7aJqN{j#5#YLofMGOn?`GGm-^z^PWH_jZQx2TxRXI0@g4UBJg(BurzyQgJg5{_K_psPtU`IJGyp)SNs;W-y((IVre*Bw^|TuqEiICMy z$)aPR)?q@n8r(*&btXK9(4PTnm>?I8`3^+BoF{@-cP5DTVN#Aj0UhvfpzmuJK_t&t zyemRIDeh_RoUsu!MHPK^Z)>ycV!1HFa%Yxw_>nCstov8qYPlk?de0Py_^@c;sR?qo z{g0gk%6|-o`7!J-FDHRPpN0%q(R7)aGflB-mw=BW%UFN&(M}mLS74)Ffp|AgE76i} z7;2^vPod&HujRjLx}_OJe3VOwp4kZ-CKyU-j#FTq`rSYX?mPUN8^^xIjPfCq(+yn= zP8bEE1jJtNt6J+sNk{y}Ym2%$OMIOd>Ye}FmC|Z9a*7)q!YcIQm9oA9cN%^@MZCH! zAv+(jXkZY(h2lhVQf!1o5)9t&rQ0B0V?-PiH_Tl!(eY98#ZVN_Wh%^5L|tO&`)%%y zhgxE|@<}s09kDHq9&k_|6vfwwX)LRp1HcmJ55OBc@@h`u2(@GGPk{GOj=QCf6Y;?wDPY|YN&)HCYvmwRgHJ= z9ssaDR?8~sMB2E!{&c>O$krpglV-AW`RO+Gs^{q?6JSGHMdv%?$PX=pmhRw5Q!dV< zVH90ys^qRLY6ysz!NPA*VewhY;rvN0)LgrOs)_OA=0;e0kHH?emQZ9borU$7IMhA% z@T8YGr_dretolgL#;Rz(qOPeX3L`V?NF9rji5y8~?j->GtumGir5UEK6f2|K>LbZ* zKlYHhMC@-N;%^iILJ3Ab7ISa8f}C+Hha8Ce88fHE@?pJS;mIDZo4Tc`pIHKk(Y_{T z2Vi`F;@ik-R!Nhp+GEaK zk)P9n;eqVE5M_C^RM;;mBB3{lGvGl{sragc5~_l0JJk5Ubw5Er;n=_8QHx< zK zL|PxluJSmabsvhP^wk^DNA}chlPU=^CH>88 zKvSwB?Q(|B>SZ#7giV63;qZ>oPYoZpu$L@MJxIy-X~G5jtZgmXjq^^7J_-) zAyoQ0rXbHfbO82MjgPT$r08(yAKI*djowa8 z;WB`^wA$paA$dO?rKC=h0jp-?)lIFiN=+SS?)rchv+PG95z@ z#+vRmczIF>7#2IFY%U=PL=#a4Ex|0ZxxTjFp*KIzTd31=4C^go)`<$iQP%|yD6B6h zoX&m+3ig8$5k3<_-QSph_+30PN&-EU-=J8 zg_GIeL^8qG&AqA?o5~tA2^rNy5G$Y_zaXMG3D7twF*~a_X-@`}ZjNnLPSK$zHUmPc z#rxI0d+KoDG;`(vU6+RHc0}p-?DBTetNimz=aJUl#{!p#xj^qGL3@Eo^N7kJDpDBs2H~Zvrdj0c**ex>&-U&*AYA|p4~W0I&uwV}_q-Z< zm*BAXV*RPnctix_{%Did@fXXjsNmo0m+Srk_TmV_co*pmn?5mDEf|gc#K#bF<<|yOU^-?qMeF&bM zI^BXiLi2ECM%TfdO+6Y7(V~ZZIq3n7F@}^IbD*EiGhWZh7spOeBtT22wu5Jh3?S>+ zf7LlV+TH9e^i7s4-4^PI$;sl{R}5sakxpFywLa;A`s$NQB{Xgfb1881&_oPih8J}1 zEVvl6(#phnMj5~veQ7=X(*VmLpY7F=a}qHR z)Y)C-R|y~Cd5^=?9xqtdS!y+_col(%%r#mF251vTh=lXqmG5N>)K1(tz`C7-1_W%o z&KJcy@oMtqtY|OKWX_S@e2A`!6yL!q9O>-#hqF*Ci=mi1q{VgSxI%8TBwWW(2ZzKf zs5_bhkKDF~lTO53m{gYFc`5s(A_di@mZhqM>I-4SEz6&Q1%U`vy>Qep9agAtwc|BM z=1@!nYQzT7L*oKjilRz$Iscp@RE67kb`3^t?+_ZZ_&1=@#%W0c5muf_ap>&Y3sp?1 zPqOU$^zv7V6W*idP^$>Ks@fib8Pqm=k!aIb7Cm9#1#5H;o%XaRG5eO=^Vmh# z8K|~qb(AkEcCx~^4oA4Fut*qnh#ggtiU*T%v!Z&G2@8FV1878R;X|l5z zA*BQA;SakQ82SCpOAm-AlLKHc`sKm)pRtZ);2FTZ{duu<<3J+!i zCyLEINzn*`ezNd>e zZ)wZ&cZ69m(Pxp5qu>~26GFzt2<%mXka9<~v_yG?K2>`|R)uhok?w?x9^YO`rk4g7 zMn4u|2Xb(SK{5XVW3;!MJ+xP}Y$98+6eS|om`KEj{IvvrOKp{N%e3}L*x{bl49ktg zKp_6^Bv?At67QjKS5yTez?T@&q2xkeX#fLF79IouVF4LKpaB^O>V`}Ta4Z05{&>}d zw;GXI-z>0DdW4|mKf@`e;txtcl_q@r6PlGVf6-?n=(0-U&;Ydbn{`C4IimbPWx`hr7&IX@(3$b%`;6XULmtg|{iQ7WRBd4sVYNR#9DBSyA z)HfJDLl0?SD{1{qUj1k2x=MUS%0RELOp6(+M_IN%s43fOKVtm%i{)B~Zz~{*zCWn^ z#U-WfF&Z~ca_!s`KV(KB&>T4N<4@fED{v=+a6;s|HdvCd;S*E)j9TSyewd&S0w^@jQL!9kVS4y*uRxK9-MS z;M06m`~hhR%)Sj_7AFC$ev4W03MW%bj&af()V21mwTPL;tAPwU3J;@_zhq*Fi0^us z-jHj*NF0EooVej7hnpJW@yaG`GtZ$@XJ#SsiTGk7%8e|W=PXqAd==Huz5gz8l^Cxu zdukj-z@J0gVXUaC8yYc7Q$k%)5R!P(gcko_$YLW^cNBd^(Q-RO%t;beTJ4p{F)^_mU})5v zVs|tznw{m@oSr&Ld*8e_o_ zP5wi?wA{r9hviE1BqB@Kq~`6xAe>GLem4Ms{GA_IoB9oOUIe6#Bhu9A@8(H{9Nm)- z<)vTT?_z;eIu}XI!?Z`3h3r2U&{?YI6lm~4qJ$2j8)MJs4{4to{r+`UnzuuX=2H-r zPD0xEw#+M9L*PA{aRw<^gKB^S`<>*_hgSsp>T=xv2y}qA>F;Y5VW4+$5?QLq*P%l5 z%B*S8m^!|iOY`Fd8iGnGzOxV3+!;=Qc&hr9G;#}S$(nP1qLPiJEL5~am7zEPzFK#o zQ|@Hcd4xVT1lvP2edz3X?xvrq< zf)8wakxNI! zx`2oIur|pJlFeklT)8Tv187$9d$!}Ifc06+S{!VRH)(cUB4-ZlLAqTxeW0swdO3Xh zr;*4OKj3v7eG(6j5pei4A~U`c*f5nc-1DKT zzu(go?B=SLl1m5Uc6MKv3DG7-fV)A-YA^O82$OqxU}K`<2;O;c5nuyA$`RXGODOdT zUEKW`u~InePkJgII^d`LP;n<^XGV1UslZsH@T+5SPEq?!n~-fKohWogn6Z2pIL6ug z0`~d6;3b1-{9ljCqhVtjdzzVlWFg1a@^t_-K>6wFt}K@rnSw=3?V21ap`0ymNl?sK zu}i8;h?5HVa>H-*n-Yrm3q^2F#LrCiHn9b#=d_C|D7$Ax1~k0jb$e~5mp1~vB-(AO zD}`xF{ovjzIab};CXlWz^pP>_)*Mo_9D$$6E*G2JSbXxQtq~D2XL3FypjZ4RqVr*{vDTA9Ueyxb z@o1y7R!O$wvXp83-O3LMN=BcIFw;@f=~Ro$bXU<6!qop&#SP|c5{ z%9esb0Enbu=W~g!J(hgX*Ju#rp0oo2!bmD1t2=f$A0BmqFihMPE~N@mMu=N-$?`A{ z+tAUBb&~hp>Qmm}m;6L)X|KMbF%-QO6Lt_bO+%D#gzzWhujf}-vHwtJLk?(ObJXACj9z||Q!BytB<)J{Sbjb|trRDi>m zptU46Vb+s6I4jzhFc!8q(*tcveAs7CEQm2tetBg8K}Tc;M|2saSntT4T8D0Cm@jD5 z8`4~E3)~QjA^uG#RQwR^QEb>YN004$o?P7UY zYVA*>Kp}G+Qt?Xx{v_6qap*U-;|QO|wQWvE+j3NX*wmd1M*P(X@hcv&v+~|IwN{5t zKzWdT$`8L9&F}%PYdB@3sVX4&L7Ans(4%wvE=PU{OhbVS&#PHEa&I~=w zVRo8u*`zZ7*62x}{S1 zAyTs*hjp`u;rWxS5H0PJS$6WL$}EchNzEdX6GmGRziUfsO3OX=zSkvz--4NVKolOv zOP@>Tv>lq%)`|?y_Y6Ozv)1>_X08Exe$Zcq_5S8G{X+$@aTJyP*0rgu$=uZ_=oCK{ zE-&ULv=^R;*gUGTN8mSNIK!q>K_-5q$^xsMDFT9RZ;0cCPylk$9 zyiMC9^4|lg!}hMqMdKDdR4CO0kx$R7(u;YeV+>M(Plm;sdCyQ)5q^KZQTW1#(C=Du1cfT!#T+m(4c-lo@4u}Yg;V$DNAcWco zA$-uc;XO6Eb-3jD+_J!Fhjcxet&`5)tB#~@uD{5lK?dcB#&1ftxdQ---*}&WvDaas#ej0x-611?jKWth_mB`pyc7d5S zY3nRFGo#xe4hduXNP%R46mRkwt6CClPxXjr@jP?q)^b`SVa5OMA}lS=I3@4*EdMC$ z=Dbw`u}H2Vb3G`}G2a)76C6A`Hn=1WYV+Rf!D00uVZMHr_5ppG){_ht?I?0CL|pdW zAd6u9VM{(N+F(V6I8qu#p{)UXZNY|mqadO)R@HsT<2$^@X{m7C5SCPNN^fOMs(Zn4 z!u9virfXz4%jZWYltY@7^AZ%aqk#6-T4aU1Y}0JR=?LC%0)LCc*iJR|%x%LuE*zDtnK5&wn%UhJe?=qBWb{zPSZtRY z7zk&@z(9ZPMPnFzSI5#Yjnn#&inwIPgrheN( z+K>eR@Z9+d8Q?8xXUymokFfAqqZMzyonqb-Zi}QwaA2`9+5-Ta*?MWkQ*I}k^C|jP zB`Vqb0Y22GaWS2je;4Hhcm!HSR4{m92`Ol`9#>5XB%S=j9(X(VZzz6TxO5x6#yn9e z@_a>!5<&DItH^eWp~%Jl;=TeqhPg}f8{$?^l{TBhZ=GGmA`J6ogzAWEU=+GVA!}cR zr(~W_Wv)An2nD}JTsW&`=?ReCG~${F8Rw$XmR zl{J}hV?ognYv3ZJEpaws6*r2VL9${Q3P|m}%_Tk2Hl)DLy7KpbG|~5rUYz6wxHgu< zPK)!0P-FQ$FwGMg9OAS%Lvu(A`~=m_97jz^pK&W%y%7tqd(D3ULjYat5ziX}iNE}F zs$7odD)0SNVwPbq^Y|=V{jayaw@_Ng#UfGDjO_*r<%21=!}IOl<;_yreKTAGAuH@w z)FkK?b6=Q)cL)c5^G^NI@M`=P0JmjRt*RMav4Mu5U(g0x6sHqiS&i1R<2`x+`^f_ig4pTC~vQVpRnkcvOdD&u9zvu+7ejoYwCaN;h>w179V68I$P>cohuG zX#sLso&bd{3Fb6+NLnu`#k}&YOz0EPZSFfg$7y4#3Pr*I8josjrYBgs(p}wf6M^-I zKF%psHz$aEJK{J@gR31L8r*<^C!cj^YduESJOhWnw3a3gsl$0gpMTWuYVctcy@{O&y2}7X{s4 zlHoqGFdmTmAr`R|v`re}#>4R4+KKH?wd0mjoqvvik)CrkiA0IZOVYZ=ttSVI^1@YE z!2K33$>X6b?iMdewWGkpyu!qT3cS4)x?Ri_2#>%A;WjwNY=Y5)M03FrEn zkDx1b%)SL$jRxY{tdumUz_BtOlqxt&u#^=oQS-ewkw-U5u8lh4!3jae_Z*MRCKqx%BiHYoZ9f*x zg_{|`WJ>P-;H#K0Kh-`ax{I~c-o)brF9=_$bVosrTV>iUilJxKBJBNFiaouPy`vAh zgRHo&N7IA0Vi#d6gTm=41_cQj!j<&TTy;m8(C(SSq9O7nbSxZ%;I7t~Wx5cB)F11n z5`lUfr8FvBjIiGly?@`5q#;SVttw%U_b^nR2EgU4{I54w26UIHLHT;Kk;u1i_jbT! zH9EoI&(dD9E8kik0h#dX_}pYp&j?lD41cZf%TaVysj!!7_xx97Ovc0rqe6r>`x9Fo zABLu1bPC;qSK}42fRK`%e%&3fkY8{!bd9l56rRANnuHPIUU+=N5&!_?)wwe!+~RbX z`vlqweXIs=lizh(!Ey?km>*{HsxLLdTw>6%qA(s^eOQZLT(O7zXi!2~sRQ;o#9}FM z)IdGgO|9SmN&dAgvQbaHZc>Hlo!dq&fz%R8XaO;WPYaz*doJ;_bWLr`6hf<}+Ro?d zM+&l-h3rNl7a787C0Aux0_#fTfJRGjOBfG6H6Htk8eQy8ax%Tla*pd#Pw5sqdxoC1 zXK$-oNWNpg0i}gT<(1|%|BPsQ0P_d)>cbuM<2?4PalUUy$+9#m+6W#|s(gt6R?Gd_ zMKS;7?H{{I=-k5CtOu8L6*Hkxs~cg_Pc5Eubsjc>S2$3!w^Ij5cV~HminCF_QqC#X|X&TzDA3GEa#m4Chm zQJpca?NA$|F=l>)s*>6dj>H9LtC?YOT^j>04CR>LWqf|v)JDHcR>12|D~?mSZKV|- z{sI5=AC^prAY1v++A!IwEZg!AvA_on;d)~>@8JJCMV-XcV*~(YR4^+zSYvS;{m6vY zqm$>hf$ZQ(o}eUDGptUwSdB*nq6n)}^)UtX|njkN= z&a}0=x{15Mw(N+l@Kej2MjBrFF`*B-&8uh-O{;sj`4G&FPJ24qi4dwa<`YjF znH|V>B$i_-(Q(sVo=eSpB{pjJ9hrPM)M=)11F%dVh|C{OkHzQiApq^VdIWojgc4k+ zih(t=*(#u7tV4tX-9khyAo9S1BpLXVnrZO5G;lM@@Rlmc%g8qKe0FzaI>O`$%EhGG zOw)pX-$^n%kp`^$v0)xahZVV94UuxH(s+p$+6UPF>Jgi#6p)Op63n1(Xx*1;@4D~d zp{7sfg^l`!ZDHSF-5ytIYWXkkQ)843Hi{@7%hm#=G&&xT2#Hp6 zLQLg0mklij0CXr-M${s~Qbs-x4qZ8`r%2FbbHP^e;q#yUp;s<|#~|b6L$}4kAT|KH!B5;M*5Uyc*d(7k^RO zQNSSHXdP=iI*fiDY}|b$`S>w%O0hVG?AoA-p+@kSKEFLO-!%kdipD>c3WTz?3sNv% zG_9`U3F_bJf4jL9Rnqh2@BZ=CGS3^~yRBoD7V<=O^`W3vpuj_J?v>8ZX5@kVd}Goq zg^JC};bUh8d2M8UoZU+J$c)@P*l<_B?_H`-2Xz(Ez*s zJzj^zRoQC1+1npe36Qlq%T#iLD%hRaf*;IF+&{u0zsPCa()KIBpotZ1hP_^edu_z` zrWLf#()^MbG=x5H70Z+#8T=DBK!8TSa$&kWerwc$`uVtB4{MgY;95h+8WdWuXoDaq zN`r@XR;7|@_S(flFyQPPB2Eq#Z={&Uax&V}a>eIpR-;-DJeh#qqcahAsdd)0pGfPs zC}rqr5W^}cj=P8~XAr!1mS(tpodcs``PYYvVR7q>8{IGUq_SS(F(V5}7F6p|nZQmy zv*wE~$zDKN>g;fit12kwPXkxx_mjGIL0U^V9(+-MxGx9T2EiCHDoGk@>$E&60gf{b zeTTpC?knC4q1PZ60^XAVW`xU0A<(?D@l`f|{2?dNnv3K+ezx*XwXwmS64>^AAWvP>Yp)jtgZCQzN3EN@*WIwF2> zjZQyvwYwVhm~W;keYwN#$w4G^N=^+J1phv zB`;>mZp=V0_=IDaHe!y=ou9o>SK)XkABF|g*-_Qj-!IyIB7V~+Rjz~sb{8`$b6$3| zC&<7`j0zn&dZ-pxC~=1h3bTHH-Z}+e#0>gGvniLe{G=P3m3hnF^d_gJkjDn^3~XZ5k2FnWx3jql-SNnb++utumY4u0r1l zZ1%ND%#M=%Q}QYs^M*IZ{cLopo?#MnAxt$);nWuO3Z*jFdx`|+cZ*H*qj6zQai%-0 zS8IJ+aEHsa5I6niMxVJ1$Kxl|JosOctC)-Bl59NmUV>Q!$r59#TFV!|@@*gq%0A?0 zV~84onJ%^89w@vQ%&mrhcb>`0=Nm?9aHseSrUH#Bp(!peUi1Xy^wKQ_QF3s)4KGDt zu(4G9AgJ7McLW)9`>~pAa%+5yg_$#5fFGfWqma;e@+v#{l^P@=veGO*A{||GZOG7* z&AjZ^{TnXht$bHQsxfsZVS_+Fwhy>HOmVPyMUgOs1A%BD9brL0l=2?c?MXGZ2du@Y zJ3t=7!`hzD)E-U`J5uqEGqb%+t4!W-Xg1s2%xnBSSM~b4)9B^Da=SIl~$r z{IN_t)3Akaek1D76ZUG))`=(k89z|9+iim;Nsc(I%bwL~`PYELFBbWU4orqG!5kWt zS)0`3Io(SEzYYVgS(@()F%J*QvwD|}{`UF#w~8;^7+#-qF_@c%d}vD!+p&wR*#+oE z^|#z|m=nFyfQ%8k4a(60DXpkx$@ZZtKdtyFj?%wNfjqL2Qu5Rt zzXj{`v^eVD_Yc5A-k#(0jpG%?gbv$adukRQqm=L2^WD<~$}&sOGp`RW4>wjbsncW% z&cjovTHZqpj8WYBuO;&iXP+lZKJ)VqI~iD#X)4?dDSm99n+jEs+;)&fvi7yc4H4i{ zQSH9xrlUN1P7sgHW?^wZx7T7)G@pEQ_+Z-RSFPBi(zSNj^2Fgly%=?nJ9PF9$l1JXgs$^dNzg>m-@iF2N+Q#?f-?gvBMJ; zZS>UFd+$i%KCbVAr5BGBYGD>*2MGf$?rczDOGci}(AJ|& zvM`f`6ut001NAs6He8#iyCR*Rz|r(b8gy$?xU z@wrkj56zD3{AmsCHAW^}Hz5a<56WDo35yYoR2y~f_D{XF#g3AGvrfjOC7_&R3t!$* z61xcpXRcq}wR(_gPo|9DoykGLK-_kZzRBOF#BE#04tT^FXX+=ZQ}9#9i#(&&jP${& zcZ!}A|7afF#ST1UR(Xk=NMmRh{2`JnhA@zu_OY3fAg7t@kjlYY>+!Q_+e!=pxJUnC z#)p%bqB?P*408Djet+O~bt~eBY_geH0JW8r6a{@HHNhwWN4w~%IW6L1-_76v?viYQ zR9GTgcq$miS6E}d4(g@zspl?}Ah$3BT~Kaaf9*RtNvP&TFSVtvmCSU?g5e#D*l^M` zl3!dRM6{M>PZVW2_*k*EqxX`PF4}_>8!BciB-`|uIr^8kk9ibrp=lIbjL!7!JgJhA z1)37n;}5*=8OV1=Hpt9ALKnX;iA1u@Aa`rN1F2tU_==;iP;}$^EaBBJ0=1Xg?ttbU zzGw!w6yMNIv^GJ>7yi$3;eVS8!Lmk#vjp|$DoVJ!a=CcYj_JtEDy3V-Ys|>SfqkTZ z<&P!VuJ4}aW-F_ydV`t?1AomA^f+)U<{~*NL{)jhqMmxfq}ih(V@n3h2V>Prv*{Ya z+MI#1z+Wb9jbvxdsC`Z@WQJN=Zy84S6}}OShU}&MK6(DTX(00`GsexaY}7yJwy*mR z$R=WY^kx;05zI>=QhcY>F4RM&A+$zYINVEwc@-1a{x&PCTQ;4k=~uaIPT1YO8($Qd zkn8k9FXIcemVRWTDbpQvzI+_Ol22FkTYo*i2KqZv6F)fO@~-Mn<5WFhnsG)=S|qFw zk~`x=|8BDl6Eib*IEWGy9aBmx)ovF8E63c8l%R`5DWT+QQadok(mZ7^Y^~{kR#Q%r z@sZ-cvH++Nr>i8w$pL$)V@=U_x9{-mTuJqmVJOy5@G;|Z z<4z71%u;Q8ymbqDNNcWR2U@CJyz&G)Lz};_$OerB&?moe#v%0XdN%#903aye*AX7A z+0^2LK80RRRCW#sRyeZl&hn;dXGNkqWumM^nZ4W|g z&Ae`fBei+cs83*}fiZ}DJ1ZWhy3|_btrdA-2xEDjV`_XlY9?lN^Kz`)201tbac!P> za%=0e*Ka7{cgVWzrW2Kt`=CwK5aqYd|BH7O*3#XwUjM$*ed0*2hva-7OB1dQACpUs z%b|Pwi|mEAmCFcegA#_ht4!rXC<~BRMY3XXZG5p61ERSC>PTL$C9lUo-8M!uc9*J{PY>Aze`xXebH7hN-O#;$t}_(rcVXB7Gddw$J`rWn=a3 z5Yc-dv~xw++^6;S?nIs6P!1bv>^SsX;beBn1Vyo-4Y@gR5fLI**|{oo5-Ewpten7i zZ2&&?N8M>;w6>r(-GLc1nnpi(-jjiRPV6=#-&Kip@VMl2DQ}z+ZTMxc#yf&#DyRS> zpjQspIi-+Lt|Es({%MtWF#(47^S3cqNL$`72NguaV6QG}xGA4mR$8bTf`jgog zj>{P>gtca9y;deWi^4Mr!O7&tho2RSp+_p9;wuCu(Q6ExrF&HhjU_j+yeV`<#06wY zA(3KOID+f?B$rZuZ>xZB2ls11-g;!#PSadXeye)vH5_)DAGH6cOJcu$2tz#3tp6|S z^}7MC)ty;BD1~PnL@j6s7Wn!-3vg+;E^%wLSUXO*vB5?>eZOVtmmlSyJ{MF7&g-Z0 zjJPbeyZ%$>vt@&3Xq$2H;Ks^FP`truHrof(<|EL zh3}66De7xvHC0VCve%UI{t=IIEfZw*~$)vX;;|SZ`*j%)@?)zj) z6b4u41z7`$q~Y>n>G%Sqe`kBFPw4{!@5r`{9IkZTyue$K@Yr+(T!XBJN6Iu9I?@5` z`0o}dkXjgy`JyCHUUbxr&38V>L~d$iccu*6t^nR*RcUiI+wD;)ywjWWm>dxs@=iP* zzq1{Pi@rE`i1jEKQi9raG|c%^pX{FT1AMeUQI)|?DCF5%&3A-?ourD%+nP?3hyu!q z5KFRbkNn|MbchO_@5+?PqB%KUAv1;awDA{Q)z8}hqbx-k)p{qLyn0|o{FR3z8>wyU z)O~cExPF%?ySTsfR5G*Q0N*l25ZClrsRUm8+FOH%hsn_fB;W7}TAemD+A7CvCQek; zmJ$R0pkpU+BUe%%k(CBE%JL-k+bF@;HW}{$$!t^q4DEaoFi%%sg-t5ZzG(=cV3#EO z%U)0hO4=Xj+fLE()UlU@69vZOXl{@-le8#5te7FuN4)2#<~iwhfII|JTa=3k8q)em zzwoFWmbo}%y!ZqN$=tju-g=%Y^>VUkV?qU_nu(7fK^JYl6!TXE$87{lA&$>$OZYj7 z;kp6p_1tH+8jBDU>FmdHpLV9JK|U9w_hVJx;~j{9aSH_Da_I*q%Q6x%I-juN!{!Xr zp5|iS_1xA$La(9WrtwMIrM10DWZaTK?5k3Iw zc^85^R7pHJyue-nG0(^UA858**k*-I3+7xXE1enz)-$I1#e`p`jB{(2+B|Y6@&gLbf?)gF_ za~j#rgd2*Oer8~GmUsI(Bb$(pcROo6ahezGZT)+w8yW|XA}7~P)9+x7by|9oE$VUc zBDymSxwpATnezf+h;<_3GZyMDQxOfE%8o6QbuFCn~@#;LkG)?t9g;U>AyGzCKiBkZ;e7 zA8|L)UH#23b`^U!|9G_0%^~kdi2?8U3c;_RAgX4eok z7VWY&|BFsxtV-Xztd^kj?sd-R9f6oEcUE*$2ybsUeXFMpK@dIxFdJQNefX)I7`=lr z&B&JJG0Nu1YfkXJ5qfv(qtfz0tEy7}Xpihyo2=xyyoBVBY#7D@6_M-v-*cm;Ut$Un zYZ9Rc6x`^*P=+0UKrko)OnJX|L8-U1$AsoMEBp!cJkK*=dB|i)7&bB$EHe_N2zGa0 zYx}r|n`*`>T6wBdQf+vVUGXQMX)b~B4EApd+HK>uq7wC5>U5bcebMI-$I?1O#Rceu zv~c_wkZx(FTj>)>4qw!@;QPGUqqMEBa77(&w*{8CX)Wwn)Z_bY$$&|vNy!4xn7D?= z+~1W^8tzc<3b9TLNw0YNyPodw14}*!w+@l4Q$YGE^|<@>hIt3pYtm~N zXK1d$B|i|LCFSxCc9NVU&_0J;hG<@!pc#GD(T`iiAJfYCe+tsPU~oaHcoSkCox;4a zS#OqK=(sXzSTFo;o*(eyC9^mfZ<`W26Xg6uj(L)pzG^hEK^;RE5_}9tyLL>KeT&Yl zCKH z`rv>1C`@TvXWig?lFtY4WW z6$W{mc-AF37DuXjvV4i}9jN)bbFeW`B-(?f4TeK%LjqnZ-q;r)n7?KlKh611FYbU; zk+MJ!86w%}dI7HOOR*VP%`m+HqTjwAV>0L!&11_mL%o3up)MstnzH>sC*<2iW4B=)}=(;~0yBxxFEhz$Yr!l<=^Cvh2$P z9bh~(0WqG}IFE-R*P#j6@#uaA6A73wE1DcLXQ}N4!V?Vs-0|XeT)It1z*r*; zRl&Y_jyXdYgY`>2gq4J%m3k6OQ;k9p0+oH|p^S@j?1L^(mi@F3G!T)syGvVUM zp*)$K%YYXMP(f(vu%33PDTboOS88SStrLJi-Ivc_l2BActXcM2qs)~X&4)QCd>IMU zKvpu}=$j929p8${#1=5SR>+m~DOF}2uCFigg}A?U5vRy3ID?^u=~9G5({@Lr2fJ{8vakSa8$SVhq9%N6BwcO14*~fw` zOOLANgs|6|JhmUrAqPxs?7&)%WtCD{X7i|%F#9}5!gb<22Hnj8e#HeIrC>2$v&~Ps z$Ka~ZX4SX+MI(Pk^O;N&b%6&~LckC&4aRoDcO5h&;xXMBNQmFJ?c!aV#Nx(kOwyOx z7pPK$Posx5)4r}U9WO;vHyvkZC)Fr1q<}|y5Z(yD=c-83bSGB?z0kUldvON{imo_O zsKqjh{pB}B{za_M{_A9C*0yXIhk!_{_Oi&HxVKe&wV-LD@lyort_mo7m;dt~Cnz!m zQ@OOZr4Ts0XE`t0R}#^p~`9TK$&pj zLFbELM!kz}%@H)+IeEMxbe9AHxTOc~T-;{NeW`?F{{T@mB(j=~gj+j1zdW7tp*b;m zZb~n*55Op2sqU^*Hw$fPT?<6tnWgS+B6I@c0puN zH^uFTW6PM%$`P+Y3E^)D=1uchi)mocD8nEpK+-ZksXX=mWwQi3x%2ip&|N|zZ<~eS zlGHQE8OL;pASQ2m6WOBBz~4xX-Xg(2ymORuYIPB*3(|nuF$&YtokmN{F59$3k)N~L zYaJq+w(hiAy+R+nCC$-VkOCi?n0buBH}8h0*vGS9JWAD5lWCNjd*z$UxN8C*M@@zF z4QOm6zx{BYB(}IEVpaJ9dG8uu@O!);>QEXw@yavJUk2iqF8FKGV~KgNNQvvRz?W^B zy2kmaFd0J-wa~%UVDxL5qr}lTanzI@O6eGlL@ctFxOf%)!i;Tb!xO2J$-_*D5;TwS zdm`FdFSJ9J?U4nUk4S11D%K5v(xq*oxIUZd(};@c9wNKLZ5c_*sGt>RR7YEAgjZzY z75)`eNL(#`@%4LwCz^=E`7wEvNoh)kdS{y z*150wgI=Nct{#5g=-)NKwL%!gM4Z*Z*iBdqKHaM2mj~E90K7)gZ3+K2f$BUd2t9&A zk_Q$wuT-xKBp^}fNO5}q{VkX8_W{f)+pl;01@fLtgMo{;QxkF9G7VnhEABG+CdeZs zU$KgT5|6Ve#0kJ6gLFRNEf$XjNqisV$-be;hsj%e)^B4xJ7(S0;AoLM8ZxBx<5_co z^jWY=C}gCqBTcOH0axCO+B9*`SwLo%*wn!}ivTvINLP{UDHHvwO$P z7Y-_>&Vqw1+|HI_Fu0`Q6ZZE&ZoDZPCFW_i;N_AOfi4c!d~Pcnr7hVEs3}}#txrUA z5i_4JR&zJB!+b@6TYcWNU&akmI&JiJ((T&lr>!DQ$=YuG=2!h2^P!Z)Oh<}UubC0- z-l)|7`GVf@`7JR$y3Lfys~+y%LNpao=eV)E<}yGthRDxe*{Ilp^!r9GLPiTp62nDM zHqrj>iGA~y1iC|-kSxjJ93&@$rb>;tSfAsb+ftj@?fXj z#{6*TwU*R1@hM?BguE^)KmY&(LN`@|Vwt;`;7N+hkh3t60k;jb5h>#lR^}8`QP_ zJqr6s-rVpyi7qU+q~;LwTol{o;b>F1G|Wz{lEv7%9i0%39z;WvEsPZ^n_Vj|!p* z0VU)!^82>PY7t{}ZE8c4Hqpcb2{x|0?e&k0aX`pWbc>2q{q{_t$EjQgv+3LyX9Ta)|< z!DwQvmaOS@q`XJr4PJFWIzDRS-FPR>KrLCmoXcX>Je}0>phHe~n4C9n@ZvJ`5XY70 zo*)+9D=8<6C=S|+6dYE~p7m0tVQ9-c<&4Jt$WprdMyJ%!p~XVr`Vho=`Uea9#G-}Y zu^K`?Zy4z=Fw`o}iSx@dtH1|oE>5v3Oo(D3f3l3nlg>^I?Pcm3?l{}km&a@`S^gQ4 z%@`kTu`#}_SZ2wHAfDb-T=|buhoL>%|k{>3bo4*y1ScaBsk$OgsO_MkNd|(pG2`-nO zjjT7}TZll}=;|`71K4i(AG>iX>mztWH(}W_3Kxhp4BD8~o2-m8+YGS!!pcEz{vBpv z_%rOT%;0Ori!5n372o?tp%P>qVrgI>1mr7|O>#P3An7=*Fuq7JBhw_LMl!rv+6qj? zFAVKX=@mv zwVKC1oG$KS5i{xZut0JJ&M;2m*kN}trY?bS-Bax8A#msT8FyldAtofI1UuQ>=flRA6_=H;cG!`yg zx2-Hcin6q#KnZqmI?-FMxCmK|4qcdeb%lc`0NFZlcN4!x692h=46Uia4JPAA3-fRp zh4TSc@Z%gox5ZE{F4N%V_2lyoG~-~e3!V}|lGf5&DR)Wg{u|}eqO^b#wNFcHEBr(G zo|5-y7~^uxD^DrpTkwZYF+1aXdczmAY&_n+WxS`?edVD(k}X15<>iTZNg9?Pf@uHQ zwd^FsyV=O|8h{rJ4392S(C8!ER5T@MhVVwFfTn zVX~nc_`jVUsc&5i0%Mtj3bw-3B7$$v=Vj|Cq1E#w-})->ZX<___eaf0PlCW1hSc4( z@7&ob)K|G;JHQ5g)e2F8W-I5_x(9cab(O3^(%ndaDNFP>@|m=6wL5Qq+UxulXvwG6 z8P7ctognEYAUfAmM3USJ zmDx9$f=EB0PAY5HBuL$b+O(mL9cD!{99`XCv3+bIZV0Z3oB3+Ehu zn}y-xrq`Spt#DpqG`G3<9MM-VkmIy(s9JyGD!)uoKkpV#cxgK&t&*B*L(B;F`GdtK zA;_{|`vkh`HR?IpM8 zr*}ktj^o#VKZ3Zaq1ngJQFg+Kn{`sxqWLD@GxQ4o!Ub*Nqp%b>HF!Afks!u7nZna2 zVL5AbNTE+??EE4`s>+%xp;$rEQ;%!@04#>s#co5*o$AEheUPGh#OTM&M#dxSe5S@q z?3zc;R{QJj(z9n*g3#wXX>gU|%R!EoXPSMbES3i+v5a3j|EhA`_E&?uadv5NlrL(Y z!Es`&r!4z;8If0#Ip%b=pSZ#0rSwOYCoBY6`&&!>zHI@rXwZOy2W(7$nr!hSQg zkdgM|3+VpR-gOH9*~SJkIcxW>S6ZL)gWAdYPg+APyWY0INlh`DUht{isimXDuG$3lG|R-QT^c=kC5Mi?+8q0OEDiM z@hYEq4l+c6yk!drHhw6kmKDvIv7N@+JLy8~MT!Ko;7YvB(hJ?v~L zbH(HU1fin)>ATHC-ccOel_?y6JW%=Q=m|B;rG7}R0x{d3a$Jdp{(^^{P8Zuoe<+mz z2$TYuVl1cq&4|8k8q{$tDflGc#Y$cO%>kj-*}NpB?@TLOt~VBX=9xI?WS5cXHxV#8 z^^Y7Yc3Rb{U1TzR?D{`|9uy(%Z1TRF=Oib{?Umdyhc~xhwkRY|GussEKWejC>^3Mr z0YFgB33pT0AJgykLVK39^Jj&iJfIy1EJ}|`HffqdG#BVh&9~5`;6HQUB|v5;GoUY4 z&&ezT7<%A-H{#JQ*cWa6O;%B}2JGF95m2?@8gMwmp=W$4+4m5310W{%HTLx`?goY2 zA<%hF2&6^xwLVFvrBHKeA=wmC{2Y~UwHjI?f@ zwDG8o)=a$pL>{^^KOqub$J15L_W3>kYEPpDelV+eRWUT_%_r(QN!mG&!vUm(K>y9< zI6RxYq|GI>EM3+uAajR$MYD0Nu(NQELL)$~FrRBAJq%rF>HeENP)4imQ@g%III@0v zFc48wFKoA*wj7e;&e9Go363Wyyv=xd=nB&yv@#v4^Ib!mO$Y2WSwHeA z#F|mR)fhyu#_>BL8g8qD4}o3*x5^Dl!c!@P{pc$v0}OF8b=I?Vf&o?=fYzKd+rP0= zM^Er@q*A_h18S=oa2D{%AEk%+5JtI{Aj~V~WT&v*&O#x}$FJp;^U|EhVp=rOv_QMz=9R!`1PoP@ z+|X*~@T^sj5}(hF2Wgv1^*f#gB1i?Wc>WG4m^3P>%pJ%>D7wiZv|AN;^mh!sC(a!w^c`2AGhijq*@8h+ zG}%Vo<-NB0U|!jpgdknfyvrwrsh45<4SJeUrU!9?3YUE{xa_DD-i~M z{H&4IL{BBQYBHO0g`JIUM&RiV$_nJ{8D8aCiuhI9@RRxJYmJIVp>8(NG?42!$1MT` zMJ1?mvML-Pb@%Zm(Mv+5qr|d+R8BKT%Aa{g@8)){3Q~|<+8d>zYaQd_LqKtHxu?M0 zqkuZ|165|feU%klZOcJt%K*z)hP6KRfx*3&LM~vfASEVNz%GDL=8+-<4h6rG{2tYpv{l_5#d~6-pM`Zmtlijx(`GQ|O66p>?@)DiS=c~bKR3qLBZVBbi zZClFKH2J0{9lZUiFstsMpPSX0kysu9Na3S8gI7-Gayc?kh^^GqBroi4o@g@e!qiDxDp!y)&ii| zl%@v{SZpqaOXxrKLwvgj#lA}UE-b|e6hJI_s5FJ5$-sE#A+Rcp0p##WZ+&sdTr_<= z1>%FPE6fdV4uhT?NHyEf<^7|AA!liGEm<`e#Qyiy>`{bJl}AFV-5|tOkGKvkN*3In zjCzNjT$1&N*SSRm|H<=fSbeVuaw^PHJ73glwcj)RV52>IbApUrRi69!8GG_hp< zzeQ0I7$rK$4l-PVv$}Es9$+YB{WEppcOOtgzg-8;V(0vA8fj| z$>~gzX3Nk=2ZTg{Iw~5nQO{}WLc^yerh+-{{fvJCZ=)=4<`esxPSfF4win$srGeKH z`S0a`+X@tN?PbX)I{{iMh=+lylsnOI5k-cE3$2o;pC|)7nqycP38yQj`Xc@7Yl0iPrzmFP zbOJduQW`d;Yun5^T7mJGa1ylV(Bqz=?u0a!{a)P()g2|g6NBPNRHqhxYVp6xC!kThiVIr(kM z%*sdFAa-#)I4a(b(fa2NjD6YJH^c^X^C$V2I54~ZmJ>IP&h_CM%uAknu@Snqs$Is_QxE8)z%xaQ z!DC|51Dq^U!ZY_22$c!a&MUwWgFV{f)64-mpAt%N3+&e*38f0HN-gTQXvCaRSkeX% c59MwaGk)`m;8or3^V30CC=ez@i`U zIUbk<{P>z1aC^0X-va_KypjB*fzCi65E>{DF$dV4={xj$0^EMm`?&!pfWUQdAh31s z4jK4m4p8_4elorQH<7RX2muoBz!$$;fd;=#KTDsnPv>*OL7)?m7)bc>(h7XdJNqno z&wJ{-1x|S8eR^Good~o7(||buwA0(Cz8isAUn$R`hog_ahraH<%U)q15x@wfn$CfK;k#6XYkLq_m_{` z+r2~s0pQj>_ZRe6-8Q4Y!ll7)pab7#Uk`8zQ1{h#^P>xS5xD4g43q$LJ+J*HeXDzp zdlT6AoAX`yD{|n=84$R$cdG#e0t`?Hy9I!gU9TuF=@ywY3+CUNO{YmMC(^|il9*zO zD2%>~%MNlxX9WMmVTIXb`TzJ>S6T+$!*8Kw4<6U%5nOjQiR?Aun? z4Kl-yv+X~1+=K;7zig29-MP#f*$Fo@ua0v%JNkA|hY@##`VTx=2*i1Ka2;ZS1Me1d zj(;itT2&=HmrIrL6ER9?m>q&po-Iu`~wJno^K{oxK5#n^uV&GS}B2Br#1LI#=@5Ro4 zeA)U5ob=)k3Hjc2INh4KGk^LC!PX_hK zY)`NrvV*)l`wBXkvWQk^sC<`diBrB(5A?QW9%H&_1*C$7O`FsM#D*J&W5$w1OHPd# z(~0iVxV-D0>5^ocn}}MGLVXgC(m+Fn7a|Rg`EfkJY>yJ2 z>4v$hPE@5fgL$8nILlyuz`Ze)=Ni6;=c~*$n_FNjZRM-0+>a`--ATw_y=cL7oGfxvXTBxLMIv!W9p7@%9J^f*?b{q98c(uomPYiAP z!IOn$NO4~>1GwJwTLV8H_J0`OQr3o;lLioo2WZiL4MF>{S09lUExp1P$QG{=i2 z5ce{k^N`cV$>mg5XF7^InfV?=YcVPhx2PV-$t%-%j`@OL^GPHzKh=W6oXwh3A#6`= z^qUbSJ-qFq296`>9zwACd-oX^-7Ly~+*OM@2XWpB0zNJ5q~I0{{}{IqIb?!AyHwcd zvudG2s_d$#o_B*6yE9f^vR`LxZHYa$%H*C*MIK$$46%{lNCakX-J7ly`8@^GT4?qD zAlv2>qjSx6f-j&f2-h_J1|PXU%__{I9p5lJ_D*?y5w;G&j6E|wEVEC`Mx3hh z{Ya!V8>NCB&Vh-30krDRiVxOLlaiEIN#5T~hastpn6mCa8+GMy+v|3)T=7-bb7~S+ z0g#O0=*zaMv8hr+8Uz6#*n55YRycq)9xs6Cds1)5fK|F)%SgkDOn)JZm0bq&LF6 zoLO-XgL7gq0IB~a#wvj1CxkG|=Fc~Wpj!Bv4X~EKTG?f^+N7HMOCR^g^M~Rgg~b(! zx0~+wZ0Gl>P{9|+6IdZCxL<}bA6&yF#<$^dq0`!``INnqK_XC*G~rPzM&;NqPsN$d z39eCgC8UW_g5zL;>BKlGugBckK_?Yw$EAD>-#Oo4tdJ71w4Rtwk>_-C2>5hB{A) zSV$=5&KU4Khl`E-9wXi))0xNkVZ#YjXd=j#%!!=$&?f52nR|z|G*!ir0u8xcINJV* zK`lEgu?1&DoH3bZdRaxRYz^&v3>);oDjp`MwaKR6Z63Q>K=h*?jL)N@%PP;FA>3ds zl|39z8aNEh+>bDsj)iWKUoTPcaA#F_1AHuIoIhMv?5Jpnl1xDK0@` z9_e%WA^MyW44-cDZ6wHlxM3_`gD?p`cO`Vls%8|gLx;k+{u^%@2=x+VTAp*PJ3g^3 z8}x)F*N{KQ0}kvFE}iVLk0B=c(dnlgY{gU^My*wQ`d~qdz3rcM)rfimrWn#TyuM1e zbNWpw%j8BMMtRqoJVWrm2a<)$y$vgwX__p^um71vX2bKHM-0mYh@f*lVA9_bo2;1) z=>KP8c`VFkU+M7oL;ri0&G@N4l5F>;|B{2(s4wx)iZnj;hJ`4OW+-9|ap$qu?mZ2` zjRePnWn!QWV%;kJ8`1h(OZ!sy-WyHtpeQwb8c-I0=YPwzr}KZ4yutWy=(y;o0_I;p5cSk*Tf}%o$5fmxxk|R_cFv*nJsTWt8xEq+)@?z8}=rbum8&n|6cj8 zjqI?P^c~f~Z&4g;h0lGxh{ydkDH_609KKM-W{QaaUwhtW>OIXv^kYVA>Vi#SP2=8u zh*4Y{o_wr@`T}f zIT$axF{3hW{xOaEe?>6#-`~Ey?x~455g;X5P|cZGDUE;VQ&8jo&(2(E-R*(p4dQ3- z?RYZHq6IBeJs@LAkds|a8gjb|T37DmwFnS&LVT7G`A$~PnxpHT{rC@3UVqi#p8ym_ zHex6UPplbn9A5ARc@%A^du}12bdvVWg|HqBKKH0kdG@%;R*8d?B48jD9?y%85P8Jm zgZ}~jKdtkxa<#PNE-i33Py5M4u^C1{;w=H?EoJP7>=k>fk#zq?1NtWn7O+TK%9Xoc z+pnj@;pQFfSv8bO}zXc0Q~XN zfSTRG>_u$~zyJrGnf7%NlQKTr$(CuI`QlK7-Y{PGstxi;?`|}?&}JN1y7`-Cd+0Jq zq}*yTE0r}Wj|OYSW<4FnbZ^a2d{jEaS2QM~XMv|O|7 zIoHhor-r!jPd`((`@BX!bi8qQd;Lw}T*bDsw^~6Al&5=3sA!e7c0<1oSmAVBDPUN$ zA_h-D;w$WKvk|xi_mKI;B(k?8ube#z;X_y%)1}0c)Q1b&}0Lt?KsX`fy>Hy#U6?OBP56{I{KE@1j0xzd7F+DR-4U26vYqy{Yuz5Ke;T8yE64B5sCUNv zSBo0FE&b-p#PdE|kWE3K9CO6W*EX)q!u~9=As@)znpRn<-`nI4cm573z~7mso44g3 zB{uSB@o6vCn)u)Q=Ihg^iT!ODH_4I+d zUe!p@ha?ypLlWZh<}xs(v1EyV6ZSuO-4jQ069aQYTVsz5_EwkNn0J=Eyv@~!y&2ln zi&_7fJFTko@d%rn30MU!lpocTR_QfYN713L)En2*_3^=@x>0n=qsv_QdmR7WPc28& z5ZVT9t{7JhU;(x7VG#b`1(u+Lp)(8`CohjwrIXP!ZL`$FEZR^8vfJMq)PFl7Bu26X znP*U&*?jz`A!W*EPOxBQ*W`yl>6gjf*F0ox-rGxEV8dkdI57Mux#F=cbK+Wb!Z)~9 z_nqsWAE~;0cUlVj82*>76rn|jx}vQ$IN}i%J3rLbYtmDo2UU=7mX-5ouw`*M%_o+P zPZwORi3u4myYqM|)}EI^nvQ$^kjE#1{UfqX$Na78mvT~xwN~;8t4jg=7^?@X2n4y9 z!tRwDR0=0Xr4Amdx8uOmQ>+UZ*Kf_VBlM-Af^O8ENjv9+S+k+;908Jp4K{IZg`2xFI($33<=Y1EM&} zMQFM1bvVS|Mw`rNHa1c}NgwBZGnWeZwuEK>7JSbae5F%cLVKkiWOF9RW}I^IzL=3# z93pUx92yCwM5|rfAA!wK=!5gzAF9Y5W@VX@Q&L#*cn?3-oSvvS zeq&BaS{;otP=UPs8^vtM;Tpyn44X>xBV3G{XGq9P%%JwldcL&-U`@b;o6y6G;PzN4 zRg$U9j?>>XpR@E_8qrh>iLDoPz*LP6xmMib)*aBO|7CP>bof`6a-6OalRW~?mYJN~ zwjG9A79&Gb`xIzY!}_0Tcmh>0A{!oqv)2Db3ciht!=3LlHO#kNB4)5|oVV@mqZ{Ou zWcAHdF()#$uo+jW`)?sL+c28EQV$+;+1s12_sI<3qF~I+e&Rtf`~w#Q>0zPb)pDiO zax~Z$dub z+3B0{;K9?=oXva^@$k#C8|c~E}}R@$3OzKXl2 zE7$s4@VksfObss^iE*nnp88nZf{JhHx#vB|NVzTG^M2;ry+kJh<>EJkXZUYw3KBeK zr)Rip5oX<*;Y~u={?RY$(CXc*>!w*$SgQJW3vW@-eZcWAFuuI z07)FM_2#1^M!5@Y={(+c#4_craTqyw>Kf1oY=Q;&(Gr7Y=G^uI43t#B4(7}R8iDVZ z+mv%OEZnEV5Fvk8$ki|GynYC0GxF5*7v*8pPQ2V&@bk??+<6mrejS^Plz_r2b+k55 ze%X3pnI()Jr}2^$V{h9FEobb-+CL08h7{;aG1=QdzL9S$!|R zu~%uWUr7rL@)w93?Z#GuF~J%t2yRhV#hxaWyjj*A^8}=br46=~-}l;o(i7rN>kPH7 zPq8~P9z)Mv$`^A9^^djS0fJr4n`ucgWjsxmK3bPn`*!Y?QGfGfB>9w&JPd17iO}zl zL0tfy3hQX=e{!5(*pq#?p~86YD);%@muxUZv%*|Y_39%lRv9)Jkneq@>{4pIOjE<@ z7u=MMKE*~|!9JIi%CB5pi*ISsX@WG!-LM%#VnglfQcm-sy^VouTN-2t71E|R6}^jY zX9Z^lTImHb>RyWtvmfk^1(20#-zRk9=5aek?I@2I&ZS9i8Wz zU#&(SwW&`2OUbW#-6d3Gdrg|<>fkW>~2gP0v*B{s%3lno)Sc)Fs9IXjwcOKL;7-bAJ<#;_1oqkv% zuJgEzo4kkue_3-@*90oGr<#~7=Fk}F@dzjAUN}{4T61Uoh370C%fC58EqfW>jaqo0 zF((4oLG7VQhzj;!hVCy_NOLlzklq>=K~`}gw%ZM(tKki^Ve7g4cA4fhK?mLqRKl8XLgJ9<6h!ySBLWcQ)< zUW6vlUCWqogZYl&x3XP729d^cIIu$^Y_h}x{G%w+<+dHGTHO}MXlCaExPly{?OsoWDQ7t!6_9~U+cMRK&*y14+skQF;jO6k4V z`AjfrHNapWBDCNmVxUqYmIEh{FWD9Cng}5_Yzlf2osI)N6d*<&f&c8hI}ChIhHo`; zw;JF{j~CxRjT)X@nfo$w&ww;QjNUgc*HzZu0k`o^swBd4Ze6YR53fCRegJ=?*r0Pd z5eTRdjY)CcKEE7RijDBSdkomy?(iL34*4de0FyO!2?P^*fslBw+vrIDJWM}jA*-0V zP4;owu_=++j!$4wFHog&$Ih_nVPJZp<-_GJzdV zRE&OSKwzy?sEqe2JecouYY_eNcH|$ROOfbT+DyhO?0LF+wPpP&A=BCKD^VD7O2POcN*zchrEo`NfK4 zGQ}gnmn0_KhX4onh)q-4=r3*d+C?DCIYJIp3-|-SB#a-f>Y2T=Mq^}=ZJw%n? z$RZX(oi)FDWlMrs=GUWRS0j->{F%|!6SWrWMF&NGTyz*fYclB+wC`Pdoe7cqX@PvW9z!fA=8ll1RwpMY~a;h=EE7S>pU ztNZX8FHPMV>I>Y)pmhj%lq?j7=&2~)A-M5cc5;XW9akA2j!7sI-n4N{X{3OW`tor> zN6^#jGE5+#zm6n&hg=Vas)xXNo{dE?5f%?)d(*Cen!JpO5bj=QwrHZYHWCq~4{OMh zIb~QoEK*EU{r1G$pqd3U;Vk3n%Uta69dvlY(;e$hS`X`OznqaF8+r6 zhVvf_k{6%Y;gMDsKj7&6r@Kz%ii7B4+qJ#>7`6pia=NI9&6E^D(ZdXgl#PU=_Z9rP z8H(^nKah9Or^0&D6HC^BC@?lM89paE|2Y3igc1;#1)RqJp6J#M;8{lQFnSRTC+No3teFYvaKnB*wD+8Y5z@1c7yvj^%a>nyVG4VK$ogaXDuChD8(Z#^zRpU~r!1>(AlT5h8ZElbMJ`;V|Ln8;1!(&?9 zbAE1InW`EI;A>nD-jgh0-sF&}ajXa9B{X1;1g*AY(+Lo7x_uJYZdz0~|2YslhbA#1 z{WiS*Wx}Jl&Vg3%@rF1ev}-3i^f+Q}YG7Z;Y0-NcjyS_P*mF0xU5j;$-=>vJRAuQvH$fn#w|8)LC)}ua?m`?j+cmE#MXImPj-~o4 z134I+|8C#8zIwte zRwV5cH1^KQBET0QsZ6+^uq^C5JepC#R<nwV7_$R?R4k~)z3t*-F5hYQ)0qs#u_gC6U~MxbyKH}Lk}YO1sqfdJi}$C zc@^VK=w=$4&Dyh;Om?fR;uc~-(Nt8T1f+anQ<1f7*N31^ zx7n7UAzvqp^T2%oa#&kZ2Ufy4?DGnxPe`B6;M=XcO(M1NjsW^hY0c!Sca=AnLT&Fb zH9|RszIE9t@h)kdbrVk2ulr3F1_$w&l`UozLi%rUNcu%+E5%sBeUmFIp;@Po+^Un8 z@piIx9EkyiqGlu*M~d@-`KTy`v%N4<^~~l~XGwr>GWNoXZVzKx#tQM`pO`c+hVA@B zSJ!93>g7(pmRX}W+#+(1aziF^VCs5Qv(bY(FG6w~<~VM+T)<0a6C)yZk1KJ?dQ2?D zDS`iPtY;fW;NEb$L7v0sW(jHFsjTm-gl^OgnMM)_av%G&z^;x+TAY@VGb7D#{aEjZ zyp(LvlCBLGLm5MG6mZ}Z$cCFpnb1&INT;!7kY})4V01$lgZs_`Lq-BBa6{NyWP`M& z8nn|24-9=o=_JBPirn|BHa9bHz>pdAK!TItuKxC$7-MBaF_#!d2BK(QTK2EM7k{1U z^n!o@)}D|^shVcO_=hqKR-AuanP_?S=+W?>xhpAv{Oj`5mut2Cm`$9)TqU;KUT-<= zt6UIiF$#vtcV!OpHie7fy%S9c0RdxN3E)><)pQA*rEbG63R$(0O4^MA>^(RdpZwfH zCqbi!n|L&lhE)9{SPu~k9sGiq>0#TohdkmLDid$(--B=LWU_b%5fv(GU# z0zY#|gSK&%m93T5$n2g-sQ=oG&%%DMyRs+&yXq}PlRXfH;^rh?2O^Y-a`+XK)%tyK z4eY}!S{T0`TZi-pV|+-J(~YQ%hSQjFC+dp-K!P!pHU@gR{UK1xucvMF9-S8vOEVya zDCwr3SB>#DKNq}Dy4UPdAaA6k&Cf8@7|@nn?F>RR{YX39_hr8DJ0gpFYYS3SD0YP}vkkm3)Z`<5{!f!WDjY8LM zRokQdiEiXV5Oo23`0-b5hX3tL&FgoJA14B3o2{(Cgz-V%cYFc&-&UUB;IKy&Jf+0J zTCS9S3L6bntew9R_8}c)_dJBGRv8>xXDv7ZW_-sD{>cCca(7+}_aHf_Nmq z3zdDFNa#XqCKw`q(z}dQ>wAxst$-oizA#Xc1;Cjtpz`%|aN?+hqH+V9K8f;eb>{Qt zQGc&6{B>dl37#B2f~7gw%p3}qR1|Dexm-?FeZs83FY1~;&i$?J4og>Vf~Ss*Kv2kw z!T48eXEnxhhqu=&=8yn^ z51n054@bt0GG<*?W<^MK!hP03opP;q3LL5L?-5(@PC-NV?55ovrsig`{Cacwu?LUe zv6>#v6u+;hC+%yWua#K5QHTd}d*`TTh*h{(lR@u1TyFN+7xg)nuP{v}SR6>b(GG3z z;}t`X0x|-;E8KJoGD!LRyeJpqPTk#2l-+*J(rqkjM#d->^!ge&Oh{b%O4*A~iFLxh zp2MMVHco(*Y7lcF%QL=!-+ILea{hwj*4Q*s8LL3|7dnZ`kwzxk3Tr07XPgQbNEpxcP(i>g*FkPW$;^z^Y4DWk$#Yp<~A ziMxVvpG0k2gZx$QjrI(6^Fb@oxIt`@M+Y_F4gEaJpVk&VbfxEKLxnQ~5R9m(=#l0~QPBEXq68jLK9&x~kh>52N0Yw(vv zPdc{Ek5V~5NU~3Bc12%c+`ynXFPqsQsDGTOgaC(?Ej7#i6Z!5a-{A`5Fp9qlL0beBTAz1&-^_&L}9uh-JtSY?j$wN!!TI9 z4REx_`1fAZorvQ{bVq$9vlJF)XIbNT^)0?`bt1k+2^}Vh{#71|5VCpjU9D_l*XW+2 z*p?N|4fb7(;5?<%*u@cfR`Z+57Zh(k5F{gZs-jd>Aoh^%^!bt>ZR>PjYOyA}4tn}h zZ*xZ3h}h{#W;vR6@Q-oRtvg7P1g#$n={TuHH@lk?sb6{vmlCA8f7XiwFGOl5v_TNH zR+>LN+M`&@6CckjjFMWQ!P7f~T@aRkBB1|9=^)ACK9+28*G{dtx@tR+F1-ySuz)*s z&4GMPDhlQ5rV*jo8M-YD9y6g`{DT;Wjgp{hU#ub`3Q+I0?3DvSUOa>n$2E>4lwCJw zx7mJhWAQ-BKm2pWnTIam*3q7X#74CCC|SYY(mofbRK*_{X<*$Z_kxpkp2E2n4;Ds5 z5|m%>r7uh6Em@o$_qZV;sdVv@E|f@MtXvq~#%SD%4>Sx?T?&c2;-YQNo4P&eJRQ1X z^5=SCVrmKeor~l2<{3K8eT5+w>41Gr!BBH(3LxiI@~OLQykv2UsjAe8MDSR+L^kFo z!l}#1unWF|vlw;$Ae~>Xg>Fv7_$*qiOBECmwKRmgE>_3`a}4H$k8L%3!eK=N>$gb( zQ!ox`HX>0oor4>>M^i2CjNxQ%uVak#yi z{M^_!(e7*UP&4aTJ+hjC6~emW7AFiyFT$D1Y*28y-zQ!1=AeI320}0%Vat4;g^xIx zq~BfbMhsH=i$oDH=Jmor(aF%BOJfxve&gfmv^gK0q>71?SR**_?oiD&DnQ?o6`-m} zI-kH!IXS9a6$5u@F?RO+2M%R<)TKvyx{1zvF&QFU?Li}$>GEh|p9R$4g3yQ^3R7Mq z1xxGn&Drouti5ikPO0E`dGMED zZQls7fGUy@(V^QLVJsK4;`_ZR=ZO4o_zf@8U0(#x&O$^(q=Tht#-Uc$dJ@rzc7j4u zvtte3;0GxBSZ5U?whw0;FR<<<<>*^yftxoU;C>RTdWT_@Q@ik*-d5{ObD%aD)@;y7 z`LZF5a6?3P;%lCHi_+zx`v)Hd8U$*+NIZ(#>`T{wk{(C~Y*m_ymD3C_ zK(keUBi|vugs9PsxIMv@wj5_5s*HY^=ndx;e33x%(~aLnwhGm0yDCN`;D`pkg+i`Z zC2i*_7E^OhOwn77ZR2X&{ZwK~p@Tk6dDAiWXE7N_^k^^;0dvi_^m#iTsrg{&{ICyv zwWRCfY?GMmcY~&-JtLW*yE_?J+Q(oHOt2^8_KtIewgHIsyCeB!_bT_isqjeZzfeX< za$7kY-6X=Y1f{Hj4r3tVV&^@&@*m3{^`a^@;~f0PlrYH(QKd}w1kTr+uX$F{`$eI1U9GyX{(@@5mV>}Y zi&O$Ind)PE8>VBYDlhwqL-(^xMY-ACyNM&)Lfik{>#OedA}x6ZNg6ygPO^04Mj!-# z-GUs#(-ULc+E1I$&$zm+MQLTke6|KP1U(j~f(lwlClzdSo7Gp{-2;FY+j~5BpmgBt z8G4wF!Mb%WqVshD8JoG1(^V+$rr`F5Oev zjq)vhB;5;E^C}!)bXc;m;V5h=+8N%|Yz8wgO+a2!=9=buIL|?zYldQ)8%8V1lJ;z*tCc7fv7`$Og-lfm(-oIJCOM;#n+}zn7~SCFI)gO?F0jFI zAh9S{)kg|^svqrlZ&-8bv_mS$nS4ncZBceskHF}GNN$rqKBoi%Fg+kgu-Ysl4j}Fj zu9^f!LCi(scS%X($R&&XQ2-u?c9pHf@TuU#Lz_7KE3lW999+$JLe;6K#T-e|4wij$ zbP%NvH^u&9MYD#-nT9|C6>j)RtdYKqn$Y1Af}YjZ{WHpBnC-HYW4oxk#T(8!PLZkb7di-d|quFhUPM=gaF*t-0^ZPvbEZ_>`#5DNC>(uXO-YUep zwew@(F8LO~3Y=Kiis%Tl{#5>GFF<{~aBFu3tXHCM9gFSmJl&oXs2if(c@ zA&T11)_|PbiC;F}f4uL257!Vf9UeM@hJxLJu5O?c`*qZtZA3QhXCIiThDDTM>Pm!6 zk)sri|J_qWRG)37CMcIoe&_^-XpT2fD83U!IEbd5A{)UdN;ZW9Py?I^J)ToKl<&(} zKg3=_=U6S{sHQF`oBrZ}P~(k}$H(8rW`?3>UC5!(IEI^>t@?JYsch$3xzqk$5nVc6 zQ!lCu8l`<35*yav<3XME64|wHCEvzbx~@kCKiOx>=qggg!Yl4e@XXH4a{4KhCz!Kv zZc=O~RIq`%w(skMqSI`I4j8O|{=*hD6DXU!aOlMbyTE=m8TeYOY{Y&~oPh>NX}7YD zw5|-LUwtY(!6KKp8k{8v3GNe*goBxL?R*QEe>Q#r+H&EV-#o29q{@HAk?j%t&7K!W zLViIDMFhqXu=1QyAUQ1Qmt9=;sMvp$qs%i@v8qVT=hD4s3W}iu@+*3mETaj&D?G|D z#c*trv5(#v$fYFIACtc*>2QhF{un`%0bADz_(^D=#9*{VkONk&Lp7qOPTCSiSeoK# z*>EgfgQkV^z^|^h(?|omJ zSq5{MesFroVfTS^tbS)>M?j(S?U2Kiy!&^=5?c$jywiYEIPc?w)whhxI45?1rnYgy z?gn?7`8SoJopxhZf8t`G&rF-5;K*%qa6@Z0_OD&cAWpe8IsSa}d@<@BG7-Pq@Ssio z(aBp6l4hR`VnK2#YjfoUSbvwkVOd_ly17fbS$L4?V2x%!rnBg_oWX=nW;~w-`T4yw zBq-)`e1#Gc#}wiqvaBCM%rI+$TTaNzVMiQm&ve{+7#C%W{k?e z;`cRkYl1;b++7K4z-aYLKVeKzW*bNsQG# z$KVpH@Dx;=eQShVZf%UNTTEZs)WSFP&$$I_&Y<~PnD)HZefJyR?oII7l-W~X;Z@M1 zQdAp_&n(b7(5na))hee{kLHg3Q?TLO-FL&LQfU3GUblt~OZ$9)E=fvaxG}6#(#2!~ zx)yfxaM9B9SUF%S1ITfD?u*S+gEXw*r47-lJ$!PYQB#@W%}bgZKKT?7_nr%^AiRs} zy^PISZXPj6w2bu5l1I}hO@5*G2m@~|6z=2Mu5%}=RiXX8@Q0Jw?{uuG_Cd|!GXA@5 zJ(^^F5xe!Q)moEH5MIpYdC9C}nMB zjZX4Z3+Bgrt>&$$)%R4@k}7XR@ch_P@l3T=yNVvHZu?b@ZZMr#lvNOha6U5_Rdzl6 zzHZhchF0Vb;e~hGI`1E*nW+`HbhkA*iktX#R4kbHbyLl90nc3cbtS)?57Ne+ek(De zzF5jIf)lKVj4`4}ct%gUr|9!4h|m8{_CF%$?u`$>mr2R~-jg`Vxj0MIFQyc_DAg(X z=Sm{l1**JR3SH|&r`MYI z2tdJ3N=z<_VOytqPhj}==g?JrbhH!1V%Rr!Um=^5nzhv0MtQBkH`OEcpM2l=*(>?F z`!yK|h6b8u=LCqnxqqA0F~>9)K(nu(a}MT|6!R3DWV}zrvc7UVXI zq%}no86hy*iP-UqzHfBO{t>tsK)Q-!Pm3wZD>1OqbJjOJzRYy42^}S4!`vo}g~YO& z$u}*z3d^BA<7kA)pH}MOrIS@l*0is`Y@OMDwfknkcIoyD+BH`CmPV^LZ3bqt*`0gV zD_k8ZrY*(4Q*{n^xe2z!FxDz(&^-nFM#%CPm1re`gxm$ucCFmU@=58pYg1J0+9NnR zkxc_u6GYx<#$=BI+s zW-{ERJ-rRF6_%$5G>L~P$WXhhb~twWbEx%ucz+qLFk)5mSE6V0+33J-#67Z`QU93R z9y@>t2jrqvpS>`(&hczx7gMJ)+)IBmI=OoQ>P!0tZ_j8%*IwRU`;b#DUYu0!^-4<1 zvDh-kcCT>k>w&xGW!Kh%;*4+8?4rn!LBt-qzD98IY`sY%5gSi@AM!v9a`bu%k&TY# z(fYL5%U)VePHfAn&o_8S5JZ0NnUkXsB3rwqb+r1nX3>com085AW9zPjEfQPw<}=IXFydW!?an1SLPWI5PzDcW_W+HbPF+hU;=T z4!|05^0&r2ygkcVDw3u!@bQd4RVJe%Jr18N<-YGU28p9GS}mMQA$2jhv6bU@#; z6c-3;UuY!e58)80S^|R0rqc^kbtH`=^)wVoJcyzh-4hO)N7p2NxswQ{H}Fv+!sq!Z zg|M2dTUidJ=2C0Cdi3L~ZJSI2%vy{z1ve4yZ24{`EuM>N4Zp}S94V#KjzgCa3lc*k zx!Mb9Vqly2EeeV=0CizBHKN1=R*CA_l~q&0;p0k{ z(XHiqDV)o%(rd(RLJ{`%5PsYWWGlf<)tK_8w9rE4&NC7J0u?rK(8AEB*zU~Wce}HP zTv}p0BcWw&sl_oHWdII7;p+18!P6LVDlga!@ZX3GqX9nPLp2xpp|88zQxMoxHoLy2`Y~y`9@|obqAYuwk`Eqh_t) zJq!SEhA{q&LKWbg$NC^`hgRTxFZ^`ra??gH7T5Jf8t_BLtbCxv1!;iD37Tj_hlW_5 zK->dn4EXU8Ss%RS#H91ME`4AWsvCZ*3d;Tc3H(ZFys(yDE!x@-exUv1eiam{&*bOQ zo!SO0*kde1Tp!w&2)^Si4Nq}g@~(z5_-0Y>QLlX;N;v{?3aDaq*zG>6+AS{1Xvp%i zFcGqIAib_v<$3TUR=SZHwncuMHA0Xr@-UI|Dh_IhOe z_JL|b3#ynnd(H6uBGPbG_ya70?IF0lr;rf{zzcc%Ih?j*=nDW?y7L@1{L4h4QCN^Mntpd6lR2*x@#S^ zE5CkT`elRAgr#83x8B7imt1KFb%yrFuIaEO zy!6A?zMXT}jp=xH3Dic1o|^p>xTTGR=8b-DY>9(1&h!ASImvd4L#oNZ^oc7Z;-#Q9 zUkOvGK_d?`$HoTMaEASuCy3;Sc}>qpm|v(`gNHa^ya_JG(I3y?O@qv3l@TlpHR2{< z0Y^Sx|3(JBpu>;nmn%M+SpOle5^!`z)Nwy4euXW5*_stHyq5iiERL zR~F~xfZo0_ycQhIzXahrwc=N2J`WqOWjLG+9J~Vx4P|@-iyu4((?F_TK+r@^uf($v z3wxGB%=>DFOEwmTY7=-oY2FM%bzsG&=%$pLK|v3zv{d{mIkIAqTq2oZ7$M5`7uq=M zrU<~M3=({Fz&_?jLFG1RaY;?5U1pDRy27>}CesR*q*fY|9JT_f;W2CF?7IBE+_NDg zvL}_fX^E9ltb)x&9T!{*@%AyXDz>?>V#`8)-yJzpD~To8&D0u3F3Bf~ATW7wJh){3 zdB^-kI{M0!Ub7UX_9&IHMt8I?R$9r=7?6qW9}}WpT+sY}&ZN8tXjMxDwx5E zoe9K($lvr$79a~(j7oKRe^K)iKO0A|6W`2ZAJwxyPkc9N5Hqe?IhTpVTi|R|xVm7W z&j=`t*J-8H<_;~80h}cU{$dHni{K1cFj)kpm#tPphtUh5U)`)ZJBFnD>Dvw8>6cRQ zN5{x;WD^(@VI@P?Wa&OQ^%e$HXcmx#FmPmR%%P|?+8r0S*LM8l@yp*aTAJ~T0FUb# zbpd9VQuTWaWETc>jle4rQ+|;HGtl9TzrYbs0&JJvG_ERW<0VYm{T79LBuIVchyNvK zC)96A&H;3)zWd1_-$A?aFwb>%XdZ33y{w%OdLNBFWl|jr{cs8wM)>#LnEQelo@f3! zVZ+%n?{!?t7T=rBpP5$Rr^wztWhRJ0r=aChgi!9Ff_uv)w620BoIX#mA)nd0E#fj? z%d}DlJ6}t}Ba&bHD@S6I${QAe9J<%RdopflcDV+R$tXZ=YCWNYVqs$*3glEf6>>W3~PV0sd0@Bk*1&hswF7}a|n z7aOOs6!JmtAg_-5;WpFoN14IWZL14s3OKoDJl9+SxF0tfpF`Mod)i*B__C@ZtA_;2 z5kK$(UqDFpa0T_kXd)_@b{<2k{KaEs*l=b%9w1Gm9LUPVxvpJe@B0#j3NRKsk5`?G zRbBjf2*G>GlZgTHbbf@cCYvUXaSm;@-~B2yw@1W;#(`{`u^L9v1J25P#GEH7A5m{6 zgfin!eNQ*nC=`caJCWNS!*ax@kAV6hoI0z1C~GlyD$4dfCRBa9n~dU{c<4*dbeCZl@SP3FkzH$ zE|LngdfCkCf^s4nUZb=$a~7d#K9N8Mc#b1TuF)7Tnf9b82vJ&Csa)ty#RxVL6T)?D zBlj(pCZ8aWJS0_%773%9y@MoCZ-_r)fAQsqZ~S3Ogkaz^8};B2d{z7G?xY{sgm*|r zPVKusjkr^|K)IdE(u-T0=wR(Fdw#!Py0cBLv@;#7^X`fkY(ga80JsjGy0&yov4axZ zbQ}K`>9E^NTAD`*x^b#6@zhyS{@$J1R@~-nFG8Pb4nq>>u{l|Y&!Zt8yE&YVv?KVcjVr|@Z+g zip~Ri1N*&e9&fn7e#gCuFE46?5H5`YtPWuTuTz9!#jO~NO|7V~Sb5Z+0s56fzR7t2 z`u_(=K)1iX&ITWxaW5?DIB5~^B=6G(JhUYV2XP087sm+V?Q5I-t?%HvwYk|)LKA@h z{?YxTn(dL{JX-6c_*S)YdyPM+qG5M|!~H-!Tbf%!I^B+MU}<T(%BOh)nwU0iPg5A98CkRA8Pt%lvf~A?6bhrc~WrP zWmWx7ok!PsuvI%$TJW1@P5cYo6#GS5u2`u_!?;>`JI_E^RMPLM(Hux|&fc|fTtQo8 zgI_y@r=%UFXF~(iS6|I@1J;SH7~L*K{0NMkPa_|tGC&Ld-bDa0##(63hf>{3#cMAc zbV7W$gS&E{&!=baMS$dsiw5lJSNAUo4}fu7Wx_e*Ygr2IQ{kgL!*o#vb1wI%@tPKr zVsex8o9&zwA(Lxb2p;UL+Jp%oEpO3bdQl`{z39_h0s7ne5{CyIbF*Xgh%T?eQ2Qrm zdEYs9amaf24us5H&bwEqy{ZTEE^f2E%3uBS+;^ViHV>N+^xNDYK1v0;V0%5Zs)KEDhMaQPuPsFj5{=sGhb1fBH?m@gr!l--2pzfb0pL_I2V$Drar5 z-FYJQ`q=8Cs2Hqh5JAiomkK!t!sfmW8|r#}MVW0sQb-Oy`BqLO;e}F~sQ2OxqZ zuko9*hAS$LScKOuiUEgB_dxg(PoMQgH;Td&?PK3{Wl+mKW{leLWie4UZ|oBfo=twz z6~xqmYU0GRc1>s~SQpeUu7}2UKDd`l5^PsCcaGR8z)`ypjr-)oO$!sYW$&{;hJ-Rn z_UgmFQ=5xh4gC2KRxk}iO0Ghpg;dN?y2$VageWP#A=t*T%v^JfPW5tNOWcDV0owr& z;ZZe(@$uu$s+nNHZDyWvIL}i4#jSkhKMALnX7;p5iu-U9zyq{eh8Ou%3QllT1j`dh z(Jc8IxC#nR3yQi1U?UmJyDN!(^lfotP78bpOs}AwVaJ`MAOHXW09EzJ4<5}(|8PZ8 z2dQ8%hgFOB90Qomt>r#0$|UkL7xT5AOenAkOyx(zg#1pD*(7S)0003Sh_wgKj2>1P zKdg7|Y{!TAkMK31R5@yS7=WBp3rTbMnFQn-0^h&P7DW4>LXs zlAzJ5)T&lCAjRwjuE2lDtT|B7&ZM9_^1@1TtV*>C@{;@5O^TOBSYQh>e7@ecX5x_V zfVj^tDgQH|9@9dJINmSUi<29u8{CJYpcE+peg$}^mcF1}q&;FiH{Z+{x;yWTaWoVLH=bH&hRv-N86FiU8+2+8Fceapa4l<7>7SfNZ+te@>3=HrOIUi1c58+>^zuFeCMH} zcZ~fy)&N}>f(KN90008|#{&8v5nwLo@f-1SO%^|6I+LD7%S9~`m$!o1@NGw2HIM8W z5xt<>`_$4pOMnDLQf7c5%_KnKyppcM3NdJO=XXTzpZ+TFI-EakYL>hADoC%^^bzP3 z0aeH!{ye>Sr-Pe^v~(pn9*MfSSeCEsCKNjQ@Ko+7ZkiV!c(&qwOgKu(WYK=$#~_29 z^ZhAxE!oxQ(+YbcQT@860neS3catBG9u({19i)5!^8#Yx`Hqkp3Pnj$IYS!BZNCWe zA3;A;kBt(5=BztdvhZ3;-=F&F;9Z%wlHIfZ4XJ5R<9r*|4x&IK{=Epj!+Z^HJr#Cd z4bsEEpNQaZ4x2g}hQJ?}9M4%0-T>oQslU8-EqKWl^dZ8D5UEM^EiQ z-_zz2Ayj7lQM~?_+|-BK+Ylyfd{CpW z!%QHzPY@syS1AkPXYq#2U0}MAF*>t@>wOgM`^S6rJ#NM!F0M7Q@Bn0pk2ZQ_1h9$I zq?yq8tV@W2Bv}_)zyC(19f}^V*h zoVwZDJBa#_2DtKacBs7ggx#RS zF{vN_mfz0pDNO^mB+y2hop=s+a1L;wA;@AhtpZpP1j~NzCTF<4VBb0SQA{tq?-ADx9ZgH3OJWEH&N=X z2LAdj2!frzWWWUAp_o-$odo{oY>(A-xP1nEz6|r zel$?2K%+jrIaSYe2HP}dX$F?n?w@l$j38c%`dwC^9UfOagI@g{mxlg5r{dNs#cQ8& zM+(|m--^8brPJF@Pn@Y?cMx$5pK5=g`nD^y)RfJ|&;V4}H|)e{W;A0xLSDFUB*Q0B z)o5L92^FSMlNh>gY>Xaq#C%n~odjo^Q92y?Vr9aG5^)8Rb#bw`ZPrPa5W27Upfd1- z{%%oC)nUcvk2C^23hG{f{sT2&F8b^~Pj2&X452lsk5Ev->C9SKB>AtXWPWH;2aHc_L+-)RstROjhqGj-cE=(U0;igZ&1|&a*Ghcq9K?;r0o3^iA+eh z`nEtj@Ft)Tt7ThPsUqX!Konr=hij&^|v(!18t!a>Vi z)H82>a~;+`r5KV=B6uReZqH3**aCL$Vabz3q|bM-jI1OXVH0)8YxGEaJ7)9tNWt7eP zklDbWWfA3qZ$&zq4E=>+i>B3EU(4K?t~lI8NEN1Km;Mq5`!pv{icL{&xS@?)=}pQf zK1(>1(J#oyGO0x6JZ%5i-wX3AjzNl;xuXU?<`8&h7qju`F-UsE;-B#M z>Ll9Hr}tTJAD0E~n}CSy1U=wgax|I|Nbiqguaz%?#d)oadh*T7uTi~nIYxv1rk&78 z<|<6JK)7ckdS4MKDcZfKsDZ`D9(%_g`k7?q!YrQTkSCWrZPnIiYVIUPwHFXkQ2H$6 z60jX7tbxthP24kkgXL{R6e0t5rQ%2mQXGj*6%-izRqvpE(ap$UC0Pkh@ww+XC|4`& z4@qcVUZPnI97^QBs$sNV>|E7(EtVzwax(|4)&}RIX@1N5ARDaFC=6Ai;rT8+V=8X! z!s~~?neJq*Wq2Yro(pD@LP7G?@wj64k-HA5O=X>{CS|b@>Yq+WVA%!RI(+3Rc#<31 zeiR-9tICpW2%=3zO1|it{K7dTtAQ;J>i#WJmSXi1+Xrk5$@}2UQ%{HV+>8?R<8G6K z-Z3ObFsAK^AO-guC$UHUn!Ixg8lAcgwf&u`xw|+U;eli(c!~Zt!R0yKR$ftDv>#3Q zBg>n&kq$r!ml;tT;vRc$p9mN|y9aCLlBW|HPYdZf=Bb#1 zY`2xZC>u^Hj-yid|BA%?-Lu1;aFSNh4?9h9Ub>Y2X5srg#*>J>h?AuP0=&h+o%9U1)rTqV8bSEotYdSgF5Uq3rqB-5b?TF zOyzwydgZD-tIZ?H9TsWNOYEqUR6O?%lSJA!cvnIkyjwAnNrjpy8d(PbVH(klzpBwv zykJ23htT+xxrKZTmJ6cBolb5=Z&{f_a3GJT9q5;y0VDGl2V=V?@O`g8*-u&%oth}{ zMGGtY;9~{MttYDwpbm+segTv`Qyd?>?FXR_G?dAlyl)T#TDe{O1S~%JW@r+v-Q+VK z)&%Pl-?9O^!p{5g6FKjD#8tyOkyPR%74~o7_Y_&=$Q;JnP41IHaLT!0cO7vQP!a7~ z;=7f;qUT0-+>bUT+Bqz3DQ`Oe4Csm!GIumnFH9lcj2L1!;>X8!LTi3pompEU8ss%pm z;z*1{FJyd1)`Qi#GmeiqD2o4ICl%AVdJ!Vbo7Bp)PJcR7dr@clO&`qfC_78F_B%Ag zT@TA=qo1yVr`7Iv7Omz^@+VK|MMv@!xG#zAz8}{h3DI*(ESrrU1D9)fAOc}XJ$Vg< z*=iOjCubQ9umuN7PSNV>;|01^6hU0!ei)zdU)FABq^i_C+ov3?Q~1e(9}>8{oE7)R zEDh_i)4YeAUjp{3ERWW${SQ`{jQ9&I7n?awOiC@u)G*YVfN6McpN zyoVCkHv5b=|6uQQyzZ|Z#Y)A96{@C;DC?gZMpErXg61#P(yw0t$zT6d)E7njhM?3h zVIUHvV!AyEBDquRGW#pz^AMyGSeOWp5D{iAI5rXHmnheMBECZxWSrJbt{O893N7TC zKTMuOO|Q4B->D#*7VGGALEZs*K)X4NM^h_L5kOJNMZ={76*a^y3^8)RTai`T*7)zo z%Kf5xWykb+ZE!o}GShfiVGuYw&MQ!n;GOA_k-6L*y**s`O3G;81g-?k$K)^ZKdN2Z z)77>!!zA22vKfY1SOfDWXGY7i)JoDSrYRajc$1L9GqAnmf)ZVCMR#(kq{FR0bcM3G z6}VIab2pBWWyfz_Ije)35HRZPp|N5*rhKa5Il@B25i@5js~b)+oqF=PFONdoTW^`m zh)DkRsGtOL@vkD`T~By1YmA`jA}>|}MrTc9w5|pd8p~=y7UJVD88d`WiEH;c<1lub ztf-l96|dak5m39cHe%}iS0yAks*`vUi7MdYg+H6-n@6(YSteh?AyDV6+H#0eJ z6Z6RkR51q3?i8?sYYhxG4Gr;4X}p=|Zl-S%&JR2DKN*C=Aa;UU{NDM~*KWvGDpUVF zU(+=;#v%fCyEW9CAEYNb7lFl*P){<`B#o(2(&S0k+v zeYw6|-{IVbO=8u+lgBKD8W7Ufy5WW=7xN7ImS=^hbX`7;4cfwm(t|U@^lj=ar{dPI z%hGnxVQev%%i<n6j2{Q%Xa7V%VIzn~D*>M;4H0f4dzav5|9K3uRNT?b3d$I?Go z^jO@1!Qm86^ekpIPG@+f_jorU_~w?k4=(GSp#-2eDNriC*}P;&FT z$y@bAI|ckr|d;dem;+I>Ca1Hh%dfVa%XnWLr^MmPmrnkqi$QFoMWWNz7{;EZ%hRCuHifQ>R z?6<5tW>4oHHBrEa`sATmX>Dr<0s5IVT2RnOk8?dZw;wL6guFv-O6?hRV0v>r9-;UM z?yx~K3nf9z+<+QM`_2uNOVL&0IqK3NJ_^FC4zmL=9q& zTt>}d8g9n;q#JFUlzVhu^i-<1OGvhs+&TiS7FMp=Vw=sAAaQd+2!pPJCFP9l-%~_> z;SX9aWtR2&h}sYT`Q)`HONsJa6kz^svdhal*E1A@{l{8_hM6X)dXV!BM`BS_%&L;itX$RUBPNTrE|x!R``;4?|ldgw)S zwR9fV2a6&xuFnyxfmWy~VKb1tB-OMWcP)k%%{5D5O>cefP=tmaJI)_)p_)O)5$W?1 z@#_0}xs)*}Bf;uEb2y9cV!CYQ-jG6FglhS>&GF(D`JI9kUg^eI&f9+^>sIWCAaF8* zprgO{T{S&wyvRAqaLvfoTs@6fiGGoxiZC(kpJvV7!rQVgi6FC+AO|l@1j7DdY8<4P z+y8w1r-_j6}=GTKnqi#JMO?ddvRbfIYJuDer>c>x6FqE z<2FC@eoD|efZBKBdCWU(tfni-w5WpkG>QNBA2V-d z9i_mTuHr}xOa*7-Vqb;1RwjQnQY@SN%I5=jKFSoh$fK+p02nCS!F=Ul+vRd;$pS|Z zc7e{_r!k{yo7!PisBAq~utpC`6>iQoMHJ%yfPORHeOer&4=d1N?8sTAlQCZ5xOMR{ zlBTZU*4~4c^ks$+nCD1ENtKM zvNT&#DNW11oCK~{n`M|k@<>fvs2}-An1*ax4W(!&rk9!`NBYb@Zju4EXAtN2t*yrI zKCx4Z+Ll^-s1LMz?q#vOz(mtfd_=D3`-d-l3mTw_?Q@sPjGEDarca~t%g?<_#OeVk zlLYew>EH?cdF8~Ui!Q4n?_qo3-7*N03)p{mMq)?nB_D`~m|{1=5jQ+D`QQ&R*#)4p zVfK)ce5=S5$;1LH6UjXg%DP2ZviicF2)zv9Z>p7FrFQQh<=2y4V3Mv( z2yMs!1;X|nmaD6|t2e@9ak?PeKcv*ap@QMp=`;otx6L_!8K1eVWQkqF33m-1ejM?D=Lv^ZmW0aE!(+M^KD|@Zd>0((K!(KIG z;{6ar876%y z3jVyA@(cwupJ0Ju=B+m?_jBH{N9zasoq}0a6d^E^Q-jDO zjMp7P&BiCxBXR&bv8$ZAABv#H0+D5D>)9w*_fnnCjO{@{?ua<~Dn|qIg|kHGHrM{) z*s)#0XRSvT3ZH$bJwLTw{Ef6uXAPS z#Po+iDe37JN{qPD6CJu38(dCMlav+$$=v=Bim?^fOUDDap={3ZxkLJ}btO*t{bP&} z`gq1^%vt{Fho|bC)q13d8@NNUS#?MTWweYeCvKpO_##j>W*NGleEK2lLZ! zK~dxOkftpb{$3&hAu9GEkjvgQ-Lwd}H&6ilc<<;EtCnBgSH0)9C!fCw>NcgeZX~<* zT`-3enBYXUd>>J}-a7u79gOgC!waG2_S0;OpEz|k zI+McAb%yrZT6-FERt3CZt$wFub&sv|`Ji>6f$$D+ctHj0r$lAlv0#~q3xKG@J2%vvLF(*<_ zcn4DA#U!4hH_cc64_xrpC6%jA47GUXftEG?{v!~$pxY#xmJ052Az$4tcEpbPu>Dj1 zTKeL(bXuj*Rh%J9KnAOH>W1@kMcTB<%|>xy{Pxpl6!VnSGY%3kFK>iWHRF_h7&$JN z(y@{ea=UNXJ7Fl%N^tt1jdxmH3=!VHkj&CNIqmmFVQc~_vz3LY#;8;R?YhNjj(DUu zZRtM{1>}T2Tm0|XKkURDWz+q`YXDQ`W!_h#YgSSe*&Mh97sM8%Xg>J;{D1%e09uvA zl~K)zV}?p#W3v~#-FzNa7X-kNMv3JLi3flH000000000000000A*m~J%ubV8=7l=` z%)TJwh9Ubk4FoWnNHiLN+3d`$eB)R2Mu8NBwqbV{u}=LU?*PXCG!;3}@RLiUEO1tR z;=+Q*d{Jch5srt`(85{qF1wb5#`0vOMFy*Gf9pJ$H_M?@v000GqWrba4 zk2uu0u&vV_o-UZNUeUKV>6kM zZbBR*R;4IJgt`ss86rFWEE^WXX4jWygMUJaq1(BWjC63#lt-CaXSzwzA&s2fh6GA- zw|Jq_W?+}|XbOEoCo$sBdTr6%&sHA`Opn~pEtuF2teVqY<4o_pkTBw{mHYroxcX93 zG&R^tfjj$DZSbi)!?Bs&;j%GH3&8|D#0VwR2F8I?gR?P6ihuxI$;zo6e0sAMmFYDK z*Rn)Z%9W3dyD~|CfxcSE6BLC+wYD$oo)qeBWsO!1g$6L1+ETd?Q$P#otgzw2TNJ=xKmz6xt+` zKLO#wXRQlyIz8!}vAU>Ndy4PA!(3?zFU#B1y@BjY@wxUxr!Bl+Q$kAz2>KJS9L5Qf zE`-lXAH8sr<81%{@aVjOkwtBXa~I+_%you8i3!2*Hq)MUO6pZdlH9zPx`XmL z>A!o)HH~1Be_djvsw72o$U(&yQ-aql(s|dFs0U~zeb@-rT81(SY8mZn8=^6qm6TD` zw5$f(5W_QYpXqwe7umL}t;Xqe7J8IT_6q~kCz`Na^`^J|lPpmP4^gJJ8WAFf2{7|! zl9^NZ%_Q%6ve=pZTATFlMgGnuZJ)#|{sFrDP$ZERW_6@m-hfi@*|iz2eMC$OxYo3s z4$}7w21d>kZGr28u)4xo8YIA*D1ijGSb4n*{f+YqgG9(n&fEY12HF;k#|-}Yj*~1r zP+_f{Qs5ox7Zd4xzjxaPYSjY!FI>e1Xs2Jo$`0u?y`TWTnRn~dsA^VfZ1%k>3%Pn} z`0@I%-)I-6fssz_nCm&5{>yhYwcJ9x@{bw8n(cckjmFVhBS9~r`K8eWYuny)e%gn- zcCn2oe$NENQ{go7osVQg_sXag+xoQ)WhKo{LKD>#u%0luNjL(njJ6@1uj8 zzC2nr$C|7%CM7evR13x1LkMKBLWE*^He+J3l9(?mR)s5Rpbt)+8Z;L5a_!^weWBt!h)xBv> zdd8^dHU=O=*j8Haj?#(O@MwkX6ycX`CI=d&nA(2R4BP+!2#NhtUlNGBn`+}7L%!tY zTKTV~g6XxIKfpG8PU17uo}ZY5MivH~KGd!B(QEVsc#w*WsCGCcrJwmV_e(Zr zLd6lipIDM6=dRgttb<%_oS8wykT;MrJn4=~zL7R(sv~QBnS?Y35EynIuv} z7i$g9I884lK9PCa@y64shjJE92?fQ9kG0uA;DoN5x?9lDJ_;aYbEs*3CWy__6n*$E z+!^4z^6FkPu`)bajwUp(q~z%%m4D7gC}(g%pXIo4NA>a@3yrdhbmTgepV?GAiVc3J za2TN0_J60(xxYm}Q57$(g`j@(0TM-mtnP=O2PW85j{-3_>PNQF`tT{TFEV0gg4*@x zt$-l8%>jUAJD5_5sY_!9HKKTc-IacuHKdw(240;%3?PdIT0OUB&0chgn)^m=64oWc zMZhnrm#j*83es}%7S?CY`6lg3Rv-=yX(lQ@)ib)Z_u0j1+btn3g}8c>EFK# zC`+8Ru%zx&*Yrz^2-jk@0&v++?ykpSwQn#oEFmqz9tY!pL@PRET>X-TGfJldn=>fE z`9&dY0^P|`&IDL9y6{l=7u?rXNP{x{NGm?MZ1O^U!8j<#{^Jr`bmG9Id)8!!(fq;o z$iSZ9+%tidMYV!UZ+DBz#(3w+31-N|e9{K4x+D(GGWNC3J)*`z*x=ox9&PcB42SvC zSsSl%wqssZKTO0kkLxLiI>=u8mckGMDvAkVX3-VJrFdr>pMt?bhtt#a(GN{WK{UtZ zeY=V!|1*O-7*o^0VSoStamlmw@mlH~3tg3duZF#J!pGY7@79cL3&ffh`~TUjbRS`4 zUHA;TszTH!l0x8N+-!4_hZC)zY+Xcd^8n<~!cCOE5kr$vY(p3kuFX8`b=<)=!b;~X z-$Gi3F!44FOg0$lxnk9-Q^_MDVw1*$R(M+VGuJ2sTnzeH4fuJGgDycpN0LHxCoM4-@lEpfa!ii z!sJ@fT&=8Sr6FxZ;QMVIEcXkBU*dsXHe1Ar!-XSNwBL&TICR^jI+Ce9mTbR;)?~){ zZ&f)VKSyNw%N<7Z9r5<~y9A_^c=-EpZJ+p1^aAPR7jSoMP#@mwM0R1hlnmgGW=yeT zMdnY2(Kd8k=uLdHyD1NC;P~Hc#wKG;p6{$- z)TbpK<;yU=Og7rSbUpps2YKW-5#;hu^-a#bw0yb?wL?BJQ2m|BOU=DWXzQ z!TPb`yv0_t;+9D}0mN&El6OPsg8T=`HBw}up9|Ua$~?OkC%Ye7j^{C1CN8|Xv}*yk zP7FOiDMyp&W)(BCgCZ=JZOoYe%(qLQ?CAsbwbNol0sSG}0#)Iuq$~%0&tJ)eWgz;|h9Vo?rUTMK$!Sy! zSeu}$u(;Fx-!X)@GB@P~sy$?9Ym^umGfPTk$i=gdMR=LD18yMNf?4)Uzn0+w)tsPQ z!KDb-K3Ti-=iYjK`DO|sNqp=B+xi@Q0CjNrI`#}dsgUx9@lHQP-5`D9W`JmEJN(EJ z!)ALz6eh6<7CnFr{7C_2bVA9KCu~JOfE%$#4~*YB%y#<0`hs5wWbAjch*ock>!j>I zZ)y?F5yD5oT*{F~9+G4O-Ds(^!u)}tpu4P1VA<0$6G_w|Ww%vO5z*lC*aPu&2xiWK zhgCghY)x!;vIwz@^F^&j$~;k62d$4MG&b*dR8Y{|IHdreQK|DzA$bj^N?7UQJD7Qe5f+6LZ(XDJ$m2x!v-wDCYj7=R%js#kywYC^cMcW% z@93J8D)}Nd7!~jc;t{}COkYHO)i!eZ1K?BuwL!*Y)XhBl#P>7E z%#RPqrHfsG8TsPaWx7CA@5^xyM?e4=Pf-qbw>5d(ook2k_c`&anV7%SVlzqYOPKC5 zzORq<6FFWlV=Q}AaC188lP%|)>)tR7u?oJVu!6yX%5DO`8crz5Ld3sTGVrL7wna6g zgdatz{ z0>OBeZ)frto>D~keaTcZ1onWAim*0`eMZikUL*RuVPp%~#R~Rik7}j^7dwn9-dUXU z&)R51bdjEMFiF#3cSXOLgie+`M%0q#d$yKfn@Tew>Vq2tOH# zRgmk7ZlJLh+OjKH)92Al6LbgmVt;M*007qjfIgFZ?<%T^;47df+rCA05r86R-3faE z^^l}(yq2Cj|5PZvh=B_!7pOi}^k!^So^Dy7`2=DP)qh4_lr z^t4Fg>4KW5zV6DXZ~#%8;@#h$j@JqpZ>o%bmG*J6_@-uZQak_!aBEqF;t|xc2Z_ja zNor7mn2QvC77ab=yzdKS&WpCbW!Rav_Q+fet*iEC-D80QTlwkg0U_TBqpdYUXF<_g#n!{YEa0v-?UjfL%jhVE zgKj0gk$ZQmiMS@4rpw!g^sW?IJP^Ua(!p_SvMN5>KA9z)*kkKq&loZ<)XS z!DhG3ElHw$%t;k%`Tno8etsJa>@Xeko(9$IC+4z`eLX-A2^;>P@9!FxMv_%VBJHvl zt;zQ<``0PV?9rxWWP?_CBfyxs97&XNZgh&lpjky8Oswd=VISV3*QkAhPM8q~$mv@Y zaSe=3fZJ$WZ=y=)_0>i4-Xo*90A9I>PU(S{<5~h0N{9!`!n?GP7G22pvkG*31TMu) zL109NYJ#0?nLf3{J>L{b@}>s&ZCHgHjDdtH0@s zGYjtQIf2ew`G!bM>X2<^jr@vLDWnU69?BN&dAW~Adh{s#t<>_(kK256sTSzpEHak< zR2LmqVu-q@Od_+j>oG->HK9kmVSP>G4qq}n<3i@}4qFyX>MBLA0*1(Z$iWXh7EXc+ zpZdD623ikzN1P>sdZjo-0_m(yZRr5XRsbp8Rif;NQ7EGJztT%8YXjg-=nr3Pr)Wue zkmiAIlb*6qn5Zq@zB+i4YrsdQsaZs8eU%(UNV&cjw1QZ`rkDB4ua{HnVl(lCl@1B- zRNWF-V>rrW|NeV1qejX}@&d+CfNevBd52j`=Z=Tc6sv|c%ng9yOfR_*)z(GxeZ?GB zOS$~qo7Ce8?c+Fc~YTzIp zYf|&73bn#Xs?}t%Q zvhj`Ii)Zt)7FrN5{-u&N2MW;BPTUu~&`g`S=rA2u1n@8jor@oD$>5M_LYN?x@vLck z9$SPUm!iQ?Br{ynWZQN0+AFO0u|~RG*NQmsM}e!r=bmj+G+dY|I}$<_%eN)!RG^M1 z!v1}9I)DHNIQ=uQhH`000OJH%<)WM$LL<@MqK_|3fBa(7h_U_s0gGTUOQXQ+MXZwG zo+itN!5nqR^^jj5tRLc!Hs?aX%Mcnc%UB&oC2RzDipCE4vTX@>wqJ2RH_2m-)Pa2w zKM==@)HC*`r#wbt5Co`iem2`T z1n>`TnpSWE{ECyjkC1>lVjZ!3^Chqi&I%EL7zV3Dp-W(_V=aI-)Olr2ODrs+s)D*S zY(XMl0(a)X`Rc12r}ugz4s|yNccj?u1Z-9FrD4I;zHx8{8(v?}tCoY~5YsiJSJoog zbtWA`?5sxVtjZxz>3;zSIm2Uxl60vbeC<+`>Q_ZLnwV}KHXTNv(mtp-Posu%gf=T$ zmT($35Cf$EkWr2Za?;I+#=u_M24IHIRB-CLfii#VJ!49EY=jd8{45ia51;~T9Xc-q zuND-iL_XV-bZt=eWj>O8x0k#gBttjq6z}qP9)#_@6C^S4u(Erlle54N(#~?Yl%v@v zT?gPKA--iy!1+$4GK{eN%l9Vk-XhMlWsx8O_Iih(!$lqKn-cFOJ6Z`c9Q$e38ui(b z^I{HEz$~y$%3XLV;s*f8fV?}R#WUowPkIM4*)c#6>AQowYaeDo;k#KAt0R}m=&Qh| z{D~l(8YMnZJ7J0I6Xd$9I|2WA14*X<%Xxb zy7*JjJ|@iWKSN8)l1-uQ0Ma&r_jItc$R)0|K>bo+!@q(FIb^KuKvU6~i^e~OnlvmocMBP( zXd*4+8{0sTj0Drucs)ZFLt1!)Um(`s9Fln*979bljs-MdA!p+q(Vg7#`vz}2l;kVz#+?VZOPG(&5O9>CRF zs^Mmm8HIdKC=al2hNQ#$5FFbZ!Cz`h*KqhV3H``oV-#QLcogWITh^vfS=|>r`3IRB zW2)n@?DEljY_U_GXyPE;GNFBS>{|I-%yUTWDBv#L!Ww*Q#E$HJh)iVn3HdL_009sH z8h9##juo;6^5aN}dhHx6cma6*#N+Q@mF^rV%64~d)!1^xRT1DmFKLYxHWWE;kxc8i zSKSNkzdi*Vql5lW-w68Z59o)c?Lx0OT7o|ZGIAY4{9p(gc>_79&SX+8Tw})UncMCI zQBYgCUluFX5Vl~X+Sa0xN|=2M>w8 z^IzN&A_)o$txo=e=x_mf`H!!QCaG$K_g*J;F&(f9`fbWw?(2TUCzqSOA_q3!;5lNvqs#07q;&$l++?tsnmwL0 zOlbaDYoy`^E{~BMqN3ZTMr(1t!~H-OM1uHy1Iy{~*DYimOa}Ofbv;%_<`Ho4b{=Am z`Hg~^;9b*Bifn4`k}=k%N{KHx9IxHaT)r-rKTE#(Ia+V$jv*DKN3~u$%6FT+PMr}& zh82^UH{eF2HXs8F66v_oYp)N#2|74A8f*X$m8s?+i zovao9m=J!CvUsY!$szOMb~@VEkvit;mKNfXQ(b8?wSBd$Q@DD2Px04LRobzM2&Ay+-5&$A@Y z`SjFu;t7uAr&F4UP#5tZ_K>xx)aBzvfZ%S%gx6h9e{x{o5>guAlH$wiSu(hW&5K*h0 zZ$Do*?Hq5CkL3^oqBLZHL`9HM@tsUo>F0Q~`Dszy6-ILI?*~(?ui65gy@oQMd|5Wo z0rUC9=iV+-4JaNZrGbiZcF6E}(sy3s#ab2x^3*M@l*eCUT{e}%i6VP11M)HY+;|Vg z$uSdf3zoSkSi5ZuDq<$a%1j1hx2Q2kQHze4J@X`!Kv=^oK-^Z%1YhiXN$7e zdNA{`OP4ikzU(kuKkZGe)h;z#K4YU`aKbC~CSTb#>ER5CZ@2+0lM$U*{}eQoPBJEM zB=1+QPlh=wvVEY*&Gq<)5ThUmpqv;E4R%3*VNcgej&X}wc~le_IWJKpSHjX?9EkhP z>!nObV}xAVYVmODqTbFJG{bv(s?%HCU_9@){J2%hP=;iKL}sX^cFG8;o5?%V#aK80 z0NKl@1>8)c&4_?4g&zSPwThQ_clAj>aV)~U*IMBWB_I{wvDw$oa>L%&54lkUQx*aq z=#SgKUezLMgooYwqYKQp3}B-TzlyAQ@|2)|4P#nyri z+T}rk4ZG&T>AxpNE@HGDT3C_77IXwkE8=F6q@ddDZ9GYb7+KDyDog+Zhd3j@!Jkb+ zJ)cb7Jp7Y3#c+${Rgc}**;4bQydB4F3|%a+Ke-NRreZU*S(xKdrWIqcCAHmzbCRwX zBDTkBp4LMiUZhwmszT=31LSOuyZc}3R-!;Y*{H(+NkF#0+SXn8zThkKXi0fNSM_9h z6B#C)l{(Y`HY?L@Km=E)LxN2Um>l!-gLA2*C3cZWa4Yw0>0!9u9)nAQmh-KlGwo&& zVt*WHB5F?`4I^}h9unhF$SfPzoIG15lhin9-rc;kkK`O)H!S9zZdv?I)A_BJ`eC8~ zpagrk9I1r2QZ_}qNtG4fa+y%Z@UGMMXiKl7MK9ZXm~J*7t*#| z#2iP#MBa?#>$HVG$s3@n;I-4yTXDU?hmYDCF;SE_kLx zI1Cb>)~B|o6&IMH)gEum@z%qqBCn+Q2`2{ZnIh4i^?C#phy z3m!go`99v5l2Qh0Y*s2C_hsH~xp#5hz_!l1^N?=Q^gVzgGZP3&OU^B~$q|ZSn*RGO zSZ%G8_Uh9Za~|0HBLO*u_5TG$U9PCn!dUmpcFe=}85}pZ!!kWi=;sVH0coL17O)SX zJn8e2s1Nf0p$=YFK>VuwwFygpfYMy@99cD79T&;G5GWqlRxoA9*BkAdv0#ZBYF$yN zBHDB+6{#?O=O(7kg{BYPjo{dS?18GHHe)q-^u@=c;e*d4ep2B!Dj-l~&Qz|@j7OzB zcU)Q1Y(XeJmQ?6${pq?=b)nG)?^pcF#)omR*dg!^!smo;KJjsQI3{eS>)*psn0&@}OaHrFU48uX+iIA}>(iMfL!nUg85Ie|#Dn6R0 z&q`CUGsSHq%MHNpu6<+5Jh|1kHlpuoR{xR`C6_+Fi_gmV01Yq`w$Dx5+DzLaQ#)z} zN8?`A1tu!)4dz*J(pp2~daYhWZsdQ!mKty%jH(>4AKvP;_kS-+I?MbM{OFJ>C4jiJ zf=kYhjmf(5oqM%W$14yGkVaY9F5UXrLw*qPlrriVlVQc1E?MfQ=#uBsjp0k5V=cf?Gqw_P~IUAx676E2+T%P|b|0I{F6v z)%z3WsaBj)+DIdl-=!v1zdhg@(r0)CoLC&^Ad!q&^92pAwxrdOK?Rp;e9QliUr*3E zH!Bg04v@Q7O#}U)#Hcc}wRCfH!IPU*STbT{qD*xpU;teQ=0v}2JU(_Hl?)k`fI{fb zSG{W3<{NBaz}s9+FDGo^N}#X8N3G<+2_&B zyVUUa6OF#`68k1|q`dWiit|jBADdg2z!Kq~^pZUaOQ5a9+q>=(OuuFW zKf*`3R05Lz_yC`!* z+Jfa97CuqdtP&%{sU~cD5;q6UzJ|7-+SZ^23F;me#97^_ML7;RC1q8kF#r&^&Wo*P zNTOoqd@{B0$8rEa`%$Hf`4bIOIjb9qB+s&-B1YwE9xZu11QqoJe&HV3KmrN&)T6ZY zKu+F-t+3)(36xPs%P~f=HwWlY@)AC@;%Y6n`F75zD~Kg#yaq+*$Sj z#Z)-9EW{F{;42p72SVgWpuUN7uaG8}lo3@9#{GNMRswOYt|by7F^Y!Z5S}h;U=M)_isD zHZ+1)1_11?eh7Shb`P?kFAE`35y1)a9&w_%b!B4G^Ilj7D4D`^;luGW^MJj;^M65g zYx)z8!!vG;rW?a&GBVv}5ha%Eelr-)wY)p~qn+xQDUK_zm$>`41*a`gXMIu3fhKR9 z2x`{U8c*YKy)tw6f85BE!YX@8IK>IX-NTa!+cn%M(R60#*& z>0ECR{5Y31BWgM8VqJ-^ClN&XI~-C+WVMCod!cb-`8rTP!+jL^Ewtux`6*dODgp;BdTQ&M~_FMOP<<0^HW46Hr0tZ z=UMMw^UCG$@#=c^nRamK%#>2AU1bWd!1jTqSqr2Q<~2n9nc=(YR1x;hZTF|tFU5fu z!!rT&Ui5f}Yi_+K$kGC{VE-F|G#pGmA2XgxHDfU#Wm+}Tf_YNmu&V?_T3TRMCtT$u z-W=NDDnIdI>M+GePIx5Nt7;5P6pZ)*rvzDDyfS2AB4}J66vipalh1(Frv|CSk|~RW zp|g@1K(ti4Tb(B#X~0Wi$|3*9GQeiZ&ml5~GIsd$N(Di5W=F<>w>pVmC;X}<{sg`` z#uzCrJK-Su8oJNLxUhuWPL{HEcF;BI!WW=ZPe>TkqLmY7UaB8h&TOmpUyzFphg$2gnH|At=%#?Fbr zP`mK%beI?gSNN{PjjFwP0bR^U3_Oyr;0vOut$j-`ax%i>Qn2DksDH;U&6U}|Ys`nfkR>D(;7Ney5k&Gi%vkb%nb zbJ2F~(iW8xL9a=uq(r^-iYkysu&&jap({KGf4bXNEF^5#qL!U!wveA>`ioS9)%D_k z5XI~|M7-HeZeu;(z+f3|96)O>rKSZ!DD2mKIL8-Qx9AiMFGQr1trDm(MSopRvzscT z4htMamX6AwXxv zvN4fh1N{I@zn9K@C#Kuu7{1Q|z(wy5{ulFvu6%WSS>n!%kQLZ- zTs93^fdUVH3jmr@+wC{}&TokmUPC9pPuQoN%0~xecXCT>B5Q0UI%9X&&&H1ZekCzo zL+tPOiB(D7%zoZW8v0e3BiuF8N<&jF62AM$Y14tF7RXb1J#^1GFY&53@^>y-u22+d zb_fYi+!J&~4lX4KYLY_~3kvRiwg+Y)&v+Dy1`T`I>UH9^_2$p^b*!JwPGHO5l6>e(@9{fTuWzHCiT#x?^x7j>B`IFRXagt3o>)oVv3s|{6ahu{@4q@(UX?@0{_!%-rsoQkwXpgBG%Lk4X0{j%ht2Ll198R>tW5nThqYF zwyAFFR663JwQ!wV`e<<*Jb#TY@pNo zWvr?={vjsKqTy9{e5#K)3Jf_oK@NgaP1@6@5m5&0+}g~b7q?`wD#C^)5HY7fKuGvk zg=(e*2$~sqE!0jgtxsT+PhWssRe%ljrmr>2shHB2a-3$!$l5xeKC9c?XD5T81~p<0 zXzn*OWcPm=q-ef7#X@Ko7NJ?89W~g#JpGL9dFf)`&1)=m0mDL(CWx2LGVdCBEy0*f zBKxX^*4+bO7Z)t(P2|T6?AoANp6oW^bntyl$BGV&6frs7A^{$N_1nw#E1)Mq{LM0n z@Li_q&5`{czZXT8#Vu{(D675+=uxSxDj-Dz$CB2?VK3eFe6XH3u*Qj!#KimH7^ zH=Wkk^;1)acawEVcpHEKlL!G1pq{+|tOF|8GXR?g^6v3};FhPX6=@mpP0?56VI*Uv zU=RNHb}EL)z!(-z zyjfVER#;A}C*eC_uOEQM`>{vZcld?C%cT+;6wDxHnD_01xuf~}enxKU^-(K;<&zuN z6>i2v1HM@>$sIL9jP4OvaH*@SwBZUdkT44iBv2175y&ZFc)Ob*PQhaf^9oj(ek`%P z_|>bkTe&z$?Qocg9%@F=t|Rr?Wm-8*hhRg`iXxQ@-0}tg%q+#!;n4^+@-UN6kT-kT z5Wfg}S2Zg%i?GKrHYjdK_aDc*EpJU5F=lE71LyddQ!1rXZ`|fk)tw*KJ>OD%=u1Y= zShRIn-KAl`w5CGB!$)=ZF*H2Hon!{3R?_NX*}1R6K|3tAplF!TxE?P`e{6HeJqbK)`Fv;I zae~f*2PVcS;b{XJNHkssh;*M3>aqTac$X5r)Gr*Yg|kI3Ia)jkC}?2uSjQe3fJwTH z${h=Pkhr-9ozAE=YCYFD|NCFXT)sVXE`tX!0P0an2FL&zB_o>#qVBJ0R7{8f0tOVZ z^V4MG;cxtAES1$2CwGAS^$)&;wQfSH*Hn)zS?=6HJ0ik6RMwP&Kck~^X3KCL(Lvvn zQMAR%g53Q_r?XOLh#t#TcL}g-%}95ho%n6lx}K*`z(hm)ISaPEbhF(Kl#<~-jgLT5 z-zuR$h>7iebt;Rh@f50|x+PgWxugQqM4+zOo4ar?+G@$1_}1qPHimPeXzi}ry#jay zG&>xqgR36`9Q#vBU7hF`W1s^9Z{C$J%K4?9c&EV@(|epM^bRpt2eVyQ!u=q`qeg%X z`Fp!4D2F!Aq)9N>mHo#SE>^rMr3&LEPOz}^3;v_d_I(qcNo(d4*fU4)MyB^h%t}#RYR~e- zvb;jP1`J!Pp4Kp(VL!<}bS0T~bntHBQb$;RgiJ`W6jUDj*e!K8+9Nyn$%%GVaBcdJ zObs|hka$1f>f9>u=)~*iIK+kxk6L=1xFAw3Kn~VHF;PYXiC~n0B zhR8yS5i+9_oFeex1{&{+&Ec?Rmhb1XB(b*lr zIO7a5LyB#cq3hYmp@@veAbQJ#DL27j{x%p}W(}FMAd8@uq3mmz-E;3&@7$ z&bB6PX_mE5rI~e-o|Dt5&S*RmN+P|CYh(Kc<$~ic6UrutOZwbkLlO+(20dvgi`NX>f3Po&y z00Qi~z*N>s^?K^;^n4nuY~-HO2i?mDBorY0G@&pJ!vnPL&lsabJ zIX=AW_Igqa4vFq&U>E=at4?jl!`;(Q;*+o!wdULmbFGs=0frMf!7#LJiN6q-p9LaY z_X9OKlS1+$G`8`Kc}N0*C*O!4T^A^`LGFdU|#q^ zwlKWAqahUPrf0reBs+Yh^oa^z)-ZmuMXWDFfGxpH&aU5n3Z%>9)39mV0$>jZ90LsG zSeOdSfkXdbEac@+PJ2LKcTdM*c?Yfhouqn(!eZy)FdpMYkN^Vv1H5?jmFiHq>nwbT zFWhF>=J;Sah)otELrrOdVD$OZBheNJ)F^bqPWbWQTC>%SISxtp-+Yh5M7*bdQ^+H~ zJZVNwv)D2Hq3qEF8SVM8`#SiT;TMg}hOu6cV%W8Zg%492Gcp}V+ROUs(eJL-B-{+% z@nmV`)Cmz56{C6;3A(e2LkBAKs;M0U*D25yL6XP8+CSC|?(3oNKQwE`0mFSrtcr-u zk950Z%RBS~5Q&(^!Y#BaDKE5$WkDceCQ7sk9Rmi3Lb#7D`&Uo6-R%zzem~~z3{3#+ z&-H_UD+Cdo8i=hW@qI>3s%5O=EVq~bNs~+f0000000000007uKwAAi3l4V*@mp?4v zheu(W(cX@S#E`x1Qj&DiXnUwd-csXXv!KDe)Bpeg005HdS)M+v$Uu_BsBAl!&$Y`J z8;ttPyxyRw=6x^#0000000EOJq$^$ki_DmAVKT23zwgd-7t{6tm#-vH%MzcuAGX}D zTpZJaaEV4V{C^;7=lL2!QV~VTxR8qcA$PkVZ{ed%KJlsrYBJf;#Y1(W7A+ACHRh)( zP9rMgKS@WDr525P9TU#L-RrfYEl2#$f($r2I4C8pFFL+7i_^-5!-8eXOxI+$X7VHg zNGn2&g@&hv{FiWiajnSs8Dou}0fBNhHwm;|X-N%Avus{{8IN7@R*vA|PXG-kl2q zUGSADuC}>HtsBpNX>_Kc!XhF0TOU@AaA$Qo2nC1V8RPvfqpoxcng!;quobXg9Imko zk&yyJ)Nps|AAJ7zf8w685|4M_tN0=cwydxJK>=)}IMjeB(C1Nq^v!qfms>B<%f{#4 z;D%yWurSONPf7;7y~TIKb0>kM?ZYb=w)kNGvNa{l?_h<(2feLlCr?obxvu}2`%;gX z#W+4{pfJX0GCN|eKgGBoYLD%egK~jpM|XG{VHLopiy@)@fdXI{+ur-wxBeE?^ESP4 z<)z4`Psi$o-Q}O}Td#jnfgJgbAqAW#X;;V&+s&^0_!9FPRDV9M_$|!}T$Da_l;BUY2sPO!o?SkZ-4uw8t#%(t^iGY& z9)g3>-Vw}0yT8@ZE7X$-696rvuhy6p zy?xGuC)jC-A6wQe<}up>z{jg;PPvC|;ATYxzp2;r|IOvOWsAVEd$S5Xq9rUO6w`sf zPT*=x{;7<7F^v19!EA00lR7HXO2*w$Uye9sdcLOezjzsEAdtSy$5`R!fVBvtFfp~f z*cj;bmIhwc+0jROh0Gu+s+Kl=ZY?sB4+tC8Ag0cy{wt7~51&jp0;GlT$eufjx?uA) zv7twPZu~a!>{YH}qh5`$uJ+ox?dc@!^+9u%?tOaOAqrK=q;19s9>Y{FEN0jD=eB|} zV@q)s2@PUCz`k75-_jRI@skp;Efs z?a>R1uI5PhWw@E?y526;+~m$JP+{ByImqi2eB@J;>v9=e=*B1h9t$k#6@&M)mc5)m zeU)J&GqTL&#B40l?KiY(t)192-;yBCFkwcxR>J;_$cVFu#A$m+E|UDxOQ;n|9Pj;h zDKToe55>d${tZD0jPS?#5_9rFOpN`wPCgmpM|*|Ec}hh+PQn7-wQ3Ee*72Io=tlf1 z>aeBo0Hu>&Vr&e+WHM`Mnlz5*>~tUOcfTqGb=FGN+Inuaiue6y@C#;7KkcU+GVBj1 zG7qmENhdlqF20CW$R86S1+c+bCY(xsjRi-OziXA=@gTeLIhxQ5%fq~x?YP|Wjbu$J zsznK8eP{L=$6H6&(IC}y&;3S`mY&jc@#jKaloV2d4ni#!tmR@)zpRTSZ&*5^H;u)mVJo|OzdI>>GX4>mN)g{adEFyP!(O|;eFUVT=9Ut$JUZdW8 z2Ur-`*J(qT{Ci1Rf928A?;dF4x1-$xks5bO`~h84l#j4)d-m<8F2K?2mcQ$f5vH&- zSkC^cp?H$_GkSD%_VsD^1y;VkXiMi?%e5_+3Z#h^u zhF&AnLj;qBC?2g!QO)`9`%!^(ukvKF!oYcabG}Ct_X3FBaYYB=Xw-j~y$b|y%{kOd{xKK&TC5LlzNHADihz4tC z9YCjG!Tk=)^TfbH%5ZR=#Ic`O!dv8N@6(-G!Rj#fo4S9~Sd`2*a+|D-<$J&*qCRwY zJuiPlYJcJ{Xds7M@g$J4#<;9n0JC(KG8O&T8oyNh2wu}H?Fjh%hta7s{*dOK4l053uF zILccN-!;z1+;TNajMNEzmU)R6l#5d!L8)!IE+5gO!=J-L#ybCJp?eoLjdvUDR@-GnCrm-zYa!~f0kPCUcs z=04p__*9rEhsrF_^vrQB(zN)4t) zXmOL4u-jIVSJ;fr-_DaYrGJ(;x%pw(To=CO$7)&FoD-O}mn#xf>vgR=@)$m8TB-3B zaPwYC7EDZ zU6Oeoog{Jt)z#6)uqi!49Eca&1H;mDq_IAWNoLTih`|-LgCZk7Ml@&U!Y9x?x2sI8 zc0K(dkIt&@RI4}Sb6dGn9^!LG5mnqK$={P(h%5liWJcLL$&e)D+xBs*&-i@c_z~v` zl4M`=QH%R3W>iiSf5wvrZCtweN;j;Vj>mL4Yj=}#sqTJQ`IJZs63wmEMp&7%C5CTq z?Y|wS)41SAj9UrPBA>k}{-ob4D&1_VN8Ud^qzr|MILdhg7fPkH(-?fx#P|$f{7cvK z_@`;7U)OH4vpi7tVKI188G!uj!(!!LWO<9t?RI9xk<*K>+Sgd=-;tzqB2f7!=(VsP z2Pi53Vc1#h-ee8z;;Rt+79jd(X9^;f**nZ~a%~${|8NfCFe4rd1(m0s56=1C@6cBl zaZF-kh{Vcdlidnw_rmg)uLsW|aGIr+n~X3h_LrbsXX_3$##bXKl|bH*&p3I@&~T!m z=XW$sr7e*zLxZ3#0ifIxj@P(+$1te=yr5g@Wn8{{gAU#2Pjx5Q zG-0r&_w$pn$7n6J&%oMcYSNMP+U^{6uYoa6fqCW@ycPmTa5iqw7XzG42GSic6EDUu zQ#CsJ?(^MD@0kE5A4VqEctqawMD}?o_?Qhl^2Qc$b`GSw?aM>pgO4Rp5Xrx%tfG|3 zak0HcD`M3zzoKI)Um>rs#+v6#BXg1NS)JLT1W+Mtg3v_4c*?kM3CprpP`des03|3W z66i|Av)m-W0HH$TINx#M!U{0*jwL-JwoBg~>0 zm+TYI2DOY}^k6D!NjWazEAM7i=HErM5?ab3vwocw6@6u=%*D2Jn?Vm2jPxwrP4lNL z-6)hD%Gw=Cujc8|I-V6;@hSgI0ExN}nS#emI{dl7z)hZ1x*oyX);7|fwbW!8ZR~_z zaE0$Z={1|6SXDW>Ic`pmd6U3GPwZWPSh&8?^KI*cu1R*oF_N0=x!t^AAk6M`JB zlb%$=gtcaC(r@yqJ;K(+U$gtss_@X9K;uDToXsa`Dn=;n>ScPgs{j(kTNnp*=^(;} zT^9F2?#BUq8$5xJa40P2(R& zY=$-`-nUgI3nOW4?0}TAqP`TdLzj2l#Oj+>KJ`ZW+?UOsg&#O;zfX57TBJejAkZMU z{K@RVfeYo6j4 z{48AlagS&j*W>@foz-D48*!SI?xd%3*eu(n#eVKD1luO(wTBSg?$7^|5&?a)j-$qu z%sKdpVSaV2g+fHNvi^UrLF%=XsKM@OeQ$-4duVb5hRmmCD)ARzW4Qz5?R}U+_g`DQ z$nWp3M}$&x4Q7pbz?id2N=3h90$#-r`4gcNgj98uwL%4w+5OjWAEsWrSIM$6jk)48 zIX%IWgjXs|hmlvFr|!PMS|ZTf5@CP=_1H81Bb?VXWq~D0t!2l)ZE1yQ9D?V0(?4YC z{Hv7hPV=67QwB|6lBQ>ZB<1FGsD{uUXuL`V3V3`jR5rWYs~U`EDKmWkk9-M&?!@C+qC#(&5ZxZVDO`!S1UfSFuyG5FqWPixYQCyZ%;jA2ClCRjEEKFKP)d(Q zX1=!zb-y3fs|{7MK^N0{9hncwLq?RxLH>XBsmMMYU#lyfkq9A6bh95Zb$t~zDdh(} zVYo%rwfQ=>KL?}h?>swAL}qt8eCkDcW?VKT>n&vo@4rLxULBe0;TgVz!KytU!G z$~nt9;|46hvvobgdv!W&qEU#pm8wnENv8xkQ}v`BewUbv^ouZzClVMJ8}XQdy$iIt zeNr-LRzl`MVrTHNp01EFs$dQc6?<2A!!-}NuoTYLFe_D@VSZ{UvG$oRtuaXDGdPb( zS-PQ4fJAFP=4O>z(;ofw+`l=BiF@pNT#=ot>=DY%l zlI;!`e7&z|jk>RmFK9T1M%(^sg`*zQ2pB@++^q07yHP{jL^lar<-7;oUBeasYX%(Q zU5b)kdP|4FdBDA<_&oa5EWrKUe?hdOjP4wdsE@7kLXX4yIF}sNv#VoUZ?|GlOW*+G z`3Qq;xO3$-Gd%Zkt&i-l=M@k8ySV3-1C^f zWz56BkO13H#p{TUf{hi=Jt7dmbM;3bYB%0SXAo|Z7U?*f+=(wu2eNq|NF}k`7XL2D z5H$Rl{2vcCOQa14?23+E)8o0>w>V}@j8VkpoRMVF<+N}e76`cZBF7;Aa?rhKZK{8e zj2HprwmrDwSXd58a-2un$J$DvBGzo=pt?%k zqpZ5uNfO^N)cl~1ZYrfdZb+S8%{Qpq_K@|q>ek;bIF~c~XQ|66&`zcc`b`zmFdY^M zo^5KWdxSczMbjsK%EIrDxNpXHEOB&kL}qtRj1{e^(|2BqGhg&&Tnw1hm8VU-O+u}k zgSSz9-|~RKS4o!>(a75N2e<@`E3qo#_u@1rRiS?VK|2oI{!}VYWmHmYIk>J%EC1Av zgyXE=3X;9$lb^}Im);=!KLr`uKu+jsX?;3-5fr@mN|^pEZSj6V6){}1eYp(y-2tA#6?i#F#Ikn5IN zxpbcA@U^UjSSej)TGJ@sr|;8{`OfzqKJT3W1D$Gis-7~wAv4l)xvFj;AL*xE-kkw_ z9fL<3>bgGqAsTlec(Rkw@f+k8JiYzXs&lkwPJiOTE#A)oeb5_PIJXO=U_W~}+(H?$D-j&+ zG+o27_VI^5x{8i9i4;X&j%R}xcR#~N=1bBl{0}{kO4$SXfA#=;R^U%6@>Z>*rIZxw zCp3G@cj5NtUxp4sbsq}7?NZp)L)=J}%MoXp|H4-B^N)sGjXSN@*{v~i^;F?+6^bw` z69HZ}EiArxk}+Vq*Gs##q|FY8@9P{ZRKJO-oIn$DEQ`NSVQeErH*i)^dRjfh=w8=d zL0NC|4oWn}@1CLNnY2ktIO9EMNoK!o8kTZZV*O^)0C>-|3EZ};C6K-%FXE?B=(q@A zO~i()#8b8dVfN1KyL1sg$Sv~dIl{kOQZ!0W#>DRrP4dZD0bN)7DdpjhAzzcs=79tC z5@h$^x^{>Ev+ueB2#=6`*VC)nMk?Q)&E1r3z}p9!F9FFbtn3PfEZfuM*EQ`c=n6kG zGBfdoT7W0{TzbvKqW|(sE0b#4QwMnk2kD7YrsSGxc2v1P^ zzE5gnZDiZkGUEE5sC#@O`ws)ZS7u{2ylF+-l9q`y>iE4a^JvAn?Nz?9XEH{XDCeT)Ax;i_7?6A8+u_XOF8wc+M}l_H9+J6H$5oJtSWuW z**{&WbB-U+Rm#lIJUuZ%U~2iJtEGdFbD6YP&5(2t>gcju@eX>`w$0JK^-Z5}=3fP^ zSali`&dLqXzaIbg+Vi-CULgKKfN8yhMdB_#MrjeKAZI2uS??t|N4b9o+`#Zl)S*%J z1$xN;Ym7{QYu|@(z`Q>#06H#<+f>xEo>+zPWVyB0al4+zMN@nimdPt8VM@+>636BY zUzLn_W%~bkI>2P!^C;{LI42CAVJoL>TVwEUvRB;K1 zovSfyee1HE3>dh`pVL8m%95-}?L}v4+=|)^U2L`qj)o>PLlU42Sd9~>&Nm_{y8^Nz zc`)S#xPK%7n6O|F-)xZzu*^40el_CX3BH?iP-U7e3ieMC9RvW?&4(CN4CG1+(t+1c zEZE6-GF7bw0ooDeBVHzRbfXA5WH~d5)ZD*^r|H^ed$mxY_Kbsg)12=%x6g6xSTRlS zHN8V)RjPNxMtV3yVG>`B+7f4>2SPKHDstFWGX=#-7Z|_LaPZs|%poeF(ILWh0|Sul zHH9{J2{U-{p$Mb`4$8H$9-P_bFhak`3pD7X zcMz;u^qVpOU=@cT#z)y(N%94|&O{AHA!qgL+bL-K2S5nrpY$wIcMUF|;uw;L_3KVf z)Wtug*%x_#$HkxKG!f3#J5LIc0sUw{n;YXlStw___6$r)g}SzguOa8P%-<9#fQVMV zXpqO>zKh=q6m`a1IzSjwvPHUf>b*L>;$PEzizR~X#Qe5w%25w*IV-8CwpdDyoYC;} z=wN~!Gcwr3gtesDT4(sDH3jEsYHmISz_sj*CDJd+ORJolSb!@bT$k=@6 zWQ>@VZX2>nYoC=tjUxG8zvy&0yr#U%drpalm!)73I&Xs?vD0OSb6ghRl^5Gk-VH^- z+<|6bS88O!$}sKk4vH*G$_dN?)p>BCsqiV+?_ZgWXsw52F1TOJ{7D+@N^#`jV^G@2 zto|4*?kE>KtU~F@X2W33X!7_dE;;T!) z+%7JCWTB^W0Xu-2Z#Bgc(3eXD4Ab49JJcDHuo4K01woImKhXf|q8(Wv#WG4}-bzNG zmb!qft~nNa!Ws-KSh1a#2VFpX^W*lD<@&wQYm(mE+19{(|rO;r!y@<$VlTb+_!(UmtVOvf#2MB4|CpWO3Av z`PTPFN55QdCeOr~S(}4gPuVe;uHvTLm1~va|K6HGD*W&a{$~yR$H>KJP3e&nDWa}^ zIRx=;_+6ADypF@0iIHa=>;qx36wQ8j&;-3~a~Fq4DNcT1vd@pnce9rC{M#s9kdDjX z4fyM?Bknp;wg2Pp%xu{K*39b*F*|0b+{v)NYJUTOlBShiS@>cl{EXtm5@@0r(<-#~h^ z%Y?O`(j2Sm)W~(6Q2Px9v<9U_Q@OwifqI(X3yu**O8Qk@nfyb>i*rXbwDSNL<;+YV zIx#Q+yw)YbV@>#TGDzBp8DA!xy__1*s2!jFA&j@gA);buhVr4FB5k_#fL=|^(uell z+w#x)D8OWzcOYB=U}U6oRzb8b3u+|Y9K^dVH_^@_nx%@d-GlW2_GvW)etS9=6ohQ( zkLk}FsEonTE1fK%N7c({pW{<27$n%Axwgi88|}m)trhl^B-fn7QiPy7`McH(RxBrd z>bYL#wo4&2ffXIP%kDDD*?jWdbUNkB=WT7`HCO^2o06JDC>wStuZ@24jl0YYHz^8m zo1iMM)=~n+fPJRFP^dTt0(-^jFtws;_I$UF-s=@pY$F+Vpg*-1DnCxu%#qdoUz{X% zdT2xswCLA(fWY9eH#2$Y_cUNiYfXH?LhSEm7RIV16xVNjaQH1x+YD14sP%*^*nBcL zW=e?msyJ^qg$kp8-rYvG)tDdqZyWDeB_Qg5GUf-TR8GF-%nOp+Qvx29RM1%!YXS)K zz&A-i#s?1b3^`HTER~Wt0gRabar0)UGz$V~jRB&P>>_@64se&csI&JZu1$M6JVX4J z&R-zgUf2(0JXU%69E_Y7j9RU_#^J<}Y-P1v3Gyd~7eYg&KMk73;5xulM`KV#{8vSF zzq-j!A_IS7e!UYNF0xom?#dhh$;4&ByLP4Kd?ky}yu|)`i02UVxu)Y_Tr9B0venx>32{N#zCSSn3^Xg7-qzf1DKztpjbYzBDX>mj6;mR4X zgP3iH%?ntL!v|Y^JXE@kqQ4@nhE>Hq=yvvR!BN3C>P0I-MTnzb! znZTQe>a0t@SLr8XhRL#%&~#blVe)S;wN7JiR^f{yk@gM z$AwFKoVUv5o2Y9h)!^56sQAbSj(ep|0Eo~1^4$@oh7W&*NS<29nNBczh=EX|Z1->UI8W zN9Ev{%;9>=_+859EmmH822<+6xTG7b-X>De6J)`a@Qk+y{C#^9stO!N`r3+cXeNYVg~=2D0fG(vLE{(d$2&B z!40WrilU6zy$e)b7x3hUwZvZ-1uxgfel!v)zf%?OE)v#@f0^m8Fc!ayIi;rZ$;OP< z4T~o}4qzfNlQClSEc5KWz4-h-S87Hy)*IwuAw35pH=FWUma$%z2ovycX-zino^X*- z&!(Yx{bfW1%zqnlN6ojXnQFJpT@vawJSn#?F$i_n)F#}1j)|BqPq)`DAk^zf7Hr zyYC`b{zNfO6}v*uUjAvuUAvzPkj!cJ>2cL`AKg>?cmGNHLfQ#3 zFO1o*AGEQ*!fv z{v`i+u`oa1m4Cafp4b|6988T{bR3r{H4OJlrQ-bdof+EE@tWiI$C#zvhz$a)a6a7=J)&ynZe2%PM1_~b z=}ntEt~HIF;{{q3f!=bK9<5x}<)UI{IrX6GZW7-7v5Dm(2IB&NnwSzDWPce(2FeouG%1z=3a&zKwuDVR%L?lA2a(pG5@s8UY}` z>PVm?VLYcC#>t{ns9pUG614dd(d3Kqa+S}4&D``C6xIi~6!iFHda=D;ePdsbzi_t? zjEfF_i@q~%1O14@glKCIi%f}c$>6oH2VHZFntQW8nozV-<7!n1xm(y5m+%dP$|S#1 z4G&goz!%uRmz<82SMv~9d*V=sUf7zj`Pg`F6t?NW9pl|jgOomsX6WpPE!ClZPhmd& z-o~Hqu*%MRwsg&)1B~1KQEH&hStf9(XEQ^$$Vk*KF#hv3#W355XOLt*t_2rw#1qcQ z$DeXoJ7UO5r{aBcA@+B;U37jl_c2^uk_d8sG~KxV0(UTKl(~0)_;#0wp!XRxaaz6W=8|8|@bA8y~*?`fHkNI za;5S!Gf(p(X|L|la`Gi%S^Woo7d(;Pa58w3`(+=M*YKv>VNs#jc6?PKsIBP2^PQqH z3}B_9!Kd%a6fAoz7I^S^Wt_Yp3@}1c_@lp{?{R!aRa=V|cG)=i``W2nC=wH_V20Re zOI5)(I1d~#h|^GALJbnR=X{r%r2n*Cp|_DtF@$W75B)iqbVZw%AlSVi1ldti(fv z1~JF5N9f6(=#;G(G&$RtN)^3YRx%cE96;{DGY1;2ZMd|wHEeB^uuP7@7is~)f1>Zr z?XgJ0eAo@q)Z7AwaifdOY$0Pz<<*_Tox&D3DNw#2$W|D@xs^dO=k>Whl2B|zlY%#I z{>9%tw;4jq%zB;5@_M+k)|e(RBsZ@Ch{Cr^Mce5c*4D3{>Jz_7c(oD=2Tbj(%1EtUK6%;tM=r*XOfg_WQ|eX%8+$~Ahx82D$IJ7Xp^y+L-f&iqedx5-mr@fOF@#pJ87F= zXxw=iX7zZZu>GXpdmssA4Lw%WITtgAV}{CM4}c~V?L{rH+nx2>&5G%Gh zFNVDWL}!x0v3dV1W0U?C=(d^*2~zz--Il~_T;Vn}IJt&v+w=iJ9$F)C#%~4QEg7;! z0xL3=+DP`mO5xn8L%0+5z!QGFi@al}c6FB~QO{acRw62HrpeaB!*9O*d{)k7uJhlp zbp0-{7)uwPZ)kp4liDS~ZF<1))O?y`Ru@bEm?1@l?F4#PZw9$zN>#zO z*%sMLVAu z)U(^*`M9?;SoU|?`j)H7q+6qRhU2lJ6_7pb+9jnPiY#MIcEhu{6v}kMcPtpgZLUw3 zd`SB7NeETNI3-ZtQiL#Pa`<1JSL7c;LFmIs-;14Mgj0yywhUx!F+=t+6qe$WgCA#( zXY3p6+Z#|oyfUtz6^e{gXdB53({2ao5p;ws`D9-9<ao{iAz`{QM=&|Z4L_Ua3q31+Ci!eJXuR;njM!}qrhGomvF)HcH*Tz%Pf zM^&J6;u`NHm9QyHNeiql3)dlP)p0;>{W0TL!8K(E^5{XlzD??F^N6-hFgF6$jw<*&F&-wD;x6` zD9Z)m`5AyY@RROjyOTdD_6{wJh^cKm7vOIZjoQjjMs~U}MIygXlfL)4vZ&qZxKWLMy}}3dh2*_QY+cO4t}#?ek9x{~|!@@A$Xy@yv$jRZZ1I{E>_>(NV|d7Y2l&z2mb3 z!P*RME{D#MxG&Ys{GtrTIS{O7R|qlCSUjEK|exQb{^lN{v5afP&{WJxLMC7u))} zQX$v>5a>yQa4#mZtBINXoLj_IxZTmzFwedB)iO8`@D`8=a+niaNbY?=3+}BRkZ(dy zv9vnUmYq9QqGSIB0*Fl!1X{Lad_ZJ3MR~U!8AWdFzS~^_mUPQ%JA`wJzAJ8vgLu*p&EE~y8y}{r|l6`Xe`GX zKn}iX2GR8H&B=y#QU{sCzk-7z<1&g_?yu@+7O-zHvSwfxx$0!d9T3T86fw z4wh`cAs~I8(RmmnfLk|66!b%#Kd&r=BJkLaQ9uR2Sm zk0IPc)+&6U`r@bX=_Zpd+;{#3`6_oCLZ8RE?(m>LVk^TUtE?e#4k6cVMlj6E2CWUV zkw#YkD^nT`CxGlfv1AbypT_ss!BSAlQXXSB#zv{hV{1l%yLR=+3F9s`_j>k;S$zP2>dyF8z^Rdt4(7XR3*Ox=lQkWY zt5T2ZQO0Pwdw(tuTvp`4#-G`Zh}E8xy_ArOX~vYYcXgbBYhd3CNJE9+YY0o^4KyO&VCM3!Z(faOu3<bcj^K8rre^$h3(T}G3G zHBt)9s^M%HGEdKt?9Wf`$p-052`$jzr*!=7DE2Hm0bO*ewF+OKQyH9vuKEAO%o{Gz z-p}&>u={ZV5lDFXHRB#L^0oENy%M|1lm55)#LTld5zM1 z1^IV)^YL4GR_^sl$p9J6 z(mTkjlD2#>*7TA?YB7|q6!6vI7_1rw7`Adz>1vKim|`dmi>rxIQs&-Xx5ReEv_9W9 z=oQ#+{*HiP9V4B+HC!5M`s&sp84XdRUO5i@wF2St)@krfSwL8f>a>3@+aB8nD`t|t zkC&1i6R~}mceV|^o&6dCqJ^14-{!q<@Ag)UQY?af)~VHi@UOZcQozACB|9W^9Ayy5 za`J6=mK9%@L4SR+&4f?LrN$-W=$%2Z*V&3 zjw`=Hv|F#jCTEv6r&Py^Fz5E>1N>5wBagmmq}rS<07A-W3$ZDKP7Ft(!RYE1w z4yu+AU&AbW3D<3o!O&#I0?Gs+*|3c|Jr9oaHF8JX4hl?f0+(K@^#UgR*~Gb7tsCcI z@#v`($vlMsD@yy7nQ=wvHQ#a`iKHXMkIg;m*WYLafmGS)SnVBa_Bb@eg9w?RN0WW= z9~29{-EW=jM;o^cQqcqh#AKnUY5VwBQjP39L1`O}J{hTQ-5y8M{!rR4*k?R2Lb!851 zIfPDcpYQ05a6-U1dF=ck;-@=jv!t{pYsM!w6Aaf`#GK|89cUer&r5|vtOOG)@c{vF z;1&*pN44N?x~?;D_;RGymNDh7Rz3%KX3E_Nw;a6-;9H8*E-1_uY)^i?vcVu|c=E&% zdL^hG`1l;dC#*l~dl>ZvmMJy6&ncERx0j7n-&$2cQTa=^wBueDXKET6Ld~e9fwqB= z;XSG;mi+LG`)rR{PK{ z;)i@%dq~4=lgFN(aP=pES68s72>VW>=R@y_E+~1f{CVB=ii!4jCTh`SooPr3OKY#P z?>j9d{x$9!i~+pn@-8o*dKsS9s?{c0=;+hn^W=TWM=`SUg0A1T#47DQxFremj{`r? z0F6Onv}W+DfoRA z3$_tsyW}fhIV%`eHD-HBM@DI}HER+U44q9^bi3`u|31>0m|)As|XfibOI*Ez%J0dfz53ZX{ z8t-!=1=|>ohVu22N#b0BF4CZJ?di1Q`CVjG-2{EiO6fjw^6QnQH1qY*53l}oTN}i` zP|_0tn8G4A<<pC2|={lAKrlQ;dD4O&7oEkBaipli6d@BLAsmf$g5M!L^k1}QO}1#kn} zmRAHdjoG1b_e2 z^}<^t&Nk21G&0*LeqIvxhK76c%p zBw(rU08!=k_Hat?L@l=(X|r_rS5juLk1OejuOf^(c6Cr2x7VslGiI3?L9W%HRWu2t#l_?h^6*$z?= zK^VQ80SFM7A3kgjaZDm4pjK}?ESfZp3ntu?f-%x&_E>u4R@Wj?O@^oAVxK-F?;{c^ zZx1%(scT6{1gQxAl^=cVWAlf0qOyhTrpxliDOyFBQJ!{Z=ly1WXkw+hc%2~Fg32DqHnxRzZSQeoqnBX`}3nV@$#zx!ZT~n+v&LL8|r| z7ibFU4azBn1xOs^!<~JSL`K7sRz~sr4Zwl7BZ-o=Xbp9l^CW;@nk&hM=NW!cQD_hr zYQxI=@PXkROtqx{jP*5eD0EWnj%TR5ai}3sgVuwoSECER7xZ4NgM^fHn0jJKfQHQN z1OgLe1In-=$SQymgkcYr|2P0TH-;+fFFz}?{Ax2kE25QZHUN1;$@q!qiVuW%qc(`; z1}4`h=Q|(Hah-=vG)E^(;I6+RVN$b^^CAe6mNy0;zUT18!hf~wOk!(SlCv;c=Vc4d zhMZYcy53ej>C9TBuLDnz7DQD(uU z0v~_YWu~}%_Q$o#k1W58$wCe!g zRAX}K0+_Q`#Duly&j-lUMJjpDUhZ`O!#c_G{s1K}6c(X@EA1>> zw4P&L7F`I{+~qD_X|jf4o3>lzSbM$-am?$$mZ_> zP1dKd$cZgZ_b$<6dp-Y?^vtA?0;$t^r^!fLr?#8RpUJa#CMf#j^;QG44>+mrxqv|sBohoEk@h)^?%aO`K_n za(Wul&Oj^2@D0oxGUkem>CP8AxtZ~6XT3BHCLThq08>0Pb~EkV%3KHll5KVA%{rxZ z+NkW`V4%9=3>VCXYw{kU%nqQaO7<7)i)F+UL8d|j30D_&$1ppsq&K_#vMsdK3s+hI zSQysxJ=eMlQNSDATa{LCXQB04W8-j_fF;YiG*a3`U*+b;Pz0IO{PXh625$!jB_cT0 zVc^lV4rpi{>#Z86S5$6NP;=SAR^YXTzLz)XtR#0BnmGB6$hWe1Uh;bfr)?#%vjJyS z;AJu`MlqWr5oL;r+*(b{p3sRWf9Q#{L6sn3c#!ROAp$>OrEaoJ`HE@Ik7#(?eWhIE z<_(i?zV{2jd#6u&y*|?&n^1Ig1Rzz?j>4P?AUB@DRW4rtzGJ&MqSRiJ77rBRk9zC% z&@(czeVM4d4x}3@WTf=;z_@05YRG!+nYsk{2iGcpB8Id?gFBE?Rc5Wx{?{?Hia6!{ zU;<@t*GqvE|4=)N$^D2qQB{jIjjm@lRSJq}s`v6gE5`QlL>0o*4Om)V{WhBtgnziH zi9%k6@;UH&L31XQXiFsi=olKCIQHkK!vEznorDiXy{2URA^2aP?9(l?prp*ivFWm% zCzH0JWT8%eMxO2C^pt6{F4F;Ti0ris8|fRl+u#cvLPRg2R-$1KvLg)Qo^W^`x; zYZmpla5mr#&9tWMdTxJudymU{*&k0_XZ#>DLtF3|qpyj79rdSr6uFuTV*YMHtIAL? z4-kVoO7=)bnB9NV4FpX_UW1BX{ScwRToXondknmx9f|rX%yUA%pdEdA-64rw37DeC zu+I2wu~Xg{x!^$U`5}*G7{yo24`QQ9ti9%_r$F}&V)=^;LLhfOtaW4XJ1>^#B!qNS zqhX!JS*H`?jPgC;zokpzm@S@6)%K-=_xnhNbqg(#zJ6%Vt&KV>+f?}kxC##$R&WsM zU)Y|?xU%&X2-T;wm#F$F_YEex7kv49@^VQle);JGf< z(lJ>`lgYJd5|?{`RtQcX}z0KAkPLt>`3!I5(wa`g93u>;g(H5O;&H$Gj>5C-HUw8;-H z-D5+Jq)16Z_GLhUt9_(Aa-`!mB-?UJ(K-;;WbLTdT*AbRpem5KEf^61cQUU*5j#07Uiil|%JTZBcJb^oz z7*}%HDRThwJ9_Sa*JPEU$A0w|h^Yi*1Dplm4@dggGQrj70+6G_Zbg8w!;7O)(WeUY zBlM8h@zUqa703!{uQPqE65dM_HB__ih|(de)*n=V#bwU1i$<4lq7uhD4}ks+bJuCO zxlFYOUZGfIy$!^kdj*wVhMv2>cdEZl=7iKKwK*A*D+9TV&^ffgj2z+;pt&MX6u{4I zC9iokp4HC_^VnfxE%ZHb%f-f~jQz@zda*ggI4h$xehe*UCp`~(jdU|zY@uu-MW*a+ z-4C}D(EBtHZ~&FPQgJ>=j>r8_i4-4Fr#pn37(A8$&iJ53nTDqFXxXy(L=Yw3uXgr4 zKi{EC@R`d65x!v-%7ktKbC0~>f=NlG02P5#k5l5rM*Z!Qc(8eV{0&j~iJ7bDr*bfb#k4Zjf_5P2w!75U@_)v!tT4V+{Q5+4s9NVS$#&(m^QC&&#vaG>Fy|5#~Cv=_ZPPseY)P zJDhh{Hp9YNOLf9!8AWj8=SUGdO7s}9WljUW=i1NBa9`wgy%*){M4Uhxd#q7$_|rDA zZp1vc$}E}UyU^whthH7d3DY}!{y+t&9j!>)8%DX<%82y^wTg{T*Xn=s*K(<68W%m^ zfo{2ijEyPtn@x5etLuC!RaOgUdF7ozNoiq3ASx_G%HfHUuf-Ck-Jbu2*MS;MC(XmMA__lb|?JE z7uIi!Byyos>W~zVltI7n2T@Q<>Mq+gi+2S$|2;01^RSj04?TIU3zF6Sj+e7}7r$`! zFqD+tNgZEPR0pXVAWH_GDwA<=11kU_NjR;OZ{#)U)C2XYQqVe?gDN z>8JikYn-p^5Uw|AJ3Q}2bgh)$$4G$Rum7s`e8>vTwXW=(bHKoij3J5ZBrHv62dx!} zss&BgxLSQ;i6|hfjY?9-h`|$}6;8L)M<5N&ax)6^+sR_?Hbp)6btE?6H z%k*CEy-j>9PIl*d5ab0s`(~jl>7}a3ox{5ua3;bZ=hW9cC)nEde~Thuhr z_r=Nn1IbroJVDs763!_%v7@JDbxWIl3ER#-vsnR8v+2un!oM7shLA`B=)UY=PmECJ z{- z4Vq%KCnVZ(ukoP?8#c=8Rb)H{bRk6SKZ5pa6*g!HE4k%cE)=UG_NIbeEz3XxOHy_* zj*iJZv|F2)tntGd$Q3$C|9;`|gj^}@u++oJ1bJlrZ*WLehaD2|Ergi?Z)48rFh_Axlc&<+Ycq0ErT%GZ2RRP^ z-0R&OR8bC5{rNA58ZylhB}nFRioyd>p|H-ZSZ!p~q?t#h*+ zu4Jo2DzjjyXNH(zC!;3Rjis26`C!h*y&8-5z#!uf$s9$WfO^&xA7UqSN=bY#`HtJZ1Ltd-KASCI;e_kVy<6w%6 z%7t!oCD5fROXAtND>1y127E-;TZfPrjNbLy7u_bQfR#jWlw}9pq|DmI66Vq~I054q ziV6Z)D3?lxoe%)KqUfiwAt8rvg~xWo6R&MEno5F-L($IZ=_IAsedpV4r3KSI0?tRv zD}>kmM%*K$$h(jO`edBx%`n$~X^mqsFX~oaieO55C%MN^^4{9ia21g|rF^a;X{rO6 zT%ZGs3j|U@&&q{X7-2l{ApA1 zp*+mi04p_~dG<(&DwAcvJHwJAD)^e&tMBQk5h06OnU1d5&Ut1K=U`n(&cxs(@Iz3^m}gr zIkL06_NRr3=}$q>LJWA&B6$N2U698!Zw2G8zu9O_IfD9-++>}D-Ef75oh1D!` zXDy(OhioP1L$-XOMmi1YLyJ` zPt1l4@2ZyA#p$IS3dEAJHgf02)wK5n%0V)bUlY||MT;?k)IHq2${N&=HMYzQ`kIi@ zr?b4{m2M`AFJY5Em5l7@uyp(CIHI8+GOKGtt)aV&-$Yg!x`cY)y?p?45iBTSltT%A z$wUWi4T?o!Ih{@1$#kP2B0%Sk!e899Mws`DOwr!lOb4J*k6shcw{QmYFc{8)zX46b z!`~d{SFTK-drFV?^Rbo~%0o0`_g})GG5K{8ZrlLXcfFaJTaSucNB zh%$ra{3$gYlJH1Qr=38oK6! zsfo#5Xqxf}zpW1B)@lL^cmo$>*BGO)g~;|u6k=}A9my+&UhDNWabE{5#%jh_e6j9v zkWv3qOF@cE)-R#i&HnaVsKE=CHidO?QLks7jhA!P(;I8gF&LNo?% zvR|Rp&YbV|lfZ6qUxfbk!Z;Y`pTtDo6kaW1QpZ9YdX-S=>M*A+&w7JSHX9fCRj`-n zy@oIapuU%h1Y|nYDQ7oM(eysmCr04`h_y>0AkX_+-jI6S26e;leiC6P8>=c%Ya>J( z5%^_k1SrPhP=&s{U5-*u+^ft$BZ-@_v}(w8iE|QE9vlEI`!a`b9^xrlBCc#s%&AO++ZZ3CD7IDJH+LmH z-om-|-xt0vUBuXj)*NF8$jvo?q_)5>Lnw1H%l$$!Z#X85`F`vGV%(#$gIF^^CDw`v z;jTQ~S;ck2>Lno`Fd*dh2|Da$*^JW>;$0_noRh`Q!}AfrcfLk%x$$=5C*&dwo1STs z-4H1O5Gdw!^zYxy?KVIe7YSaebuI=P+66(Yf&KT{J^cSiTQ6OC6grJhRti!u9yXlS z1HMFNZAee?vPJEF`h}h^jtKD@;_M=5!9aYV3EFCA)UJV4l0z+n_MI8VoUb2~`-D0W zcYo=vNmEPYE}a+D$1!9YK0BvQTk_$}wyzF*R+rz+cdV#D1S-!!+{;G z|6}o*Sd#?NbibS8_jzC0VjuhM;JG0!FCKmjg~G~ctngraiGWGHv1yYlr)1MC15svY z>3?wuSu;5XDl3)X1F0AnNx>AB1zE4fL)ZIA1Un%tj)Prn-QD|Ac3(8@RThI8K!i zBPp`NyJF!@X?DNgDVY$|g|)HD-pv`sn=`(e2h>8UKFS2HQ(PRqkS4a}U4Ww(njSee zBx0D>vNY2v_9fD)JE*Mb{+&b#?P^4+~x!5p;cY|T0A#_bsazp_x<~dm|_)HOqo+dpsBXic!7yh#^ zE@+CVMdK%+pNOG&5n1f2vdHyU(80>$$knX`U>QS2A7q@4rhIaU=~V>o6rV!BLNUF zkrrhl%h200y>p4NsF6QS7}etU$$wyDxW}^p`I@ zYWoIIhl8lzna$2Uf`s*jlyDPZV2+{lnhI%3ii2(lgLMG^{2oP>mloH-%F>zEwTgke zBDPQI(!^U6JE6bd_=Dub`89{~qUnt|`Nrn8LhZ!%dYW&xrXFQ;rWWAtGnTd8q6hS9knMZR)8=&4yN2a_-i-SL z<2Tlmw4~&-;E+E>c*KSbCLw_m>M%JKGw;QEi4M9Bja+#Em2Bl^)H zkkh2{3C?pZ^cILzh;mb*XHoPvhOnZ^MUocnJ#pl;H>eiHUWv<$@u^>)?7%jC_`5#A zp7<$Li!~SDD4KU`y<`oR%^6|_(=<~sWiE+L#mh&BC>xFIv;uBmN4hEnto>`eHFxxm zkLkCu5193saA5rCH1{v0z7emzT#Up#P4 z_9u}ktcw;wNSQDidrCUfPH$ytL4^qOMax{0Q)Mp#O3EqcNI zb|}Gr{n-KaNF&|TPAHHS>{6uQjkvV@8<@xuOyaTlY(RhlL3(<*)?9eJ9zuUQa5=QB z8}7imVuRJ@o554iZh!y;RBr(=@t!x2oOV5YTESfQTv{~f>hOCN?Q{pIIX^f600003 zbPU&q;~wCDC3BjNDf1R&Uas-H76t#Oes4_th1P$EG> zri|FMoDKK@glE)C@wk*N=Jcc5iL%Ug#+$t#H)U@HEQKy07OGFh9TVtT4Fs<6(xf=! z0w+eHnvpiDWJ4G^6Or)!3hDeAf%*j{fX`^)Oat>e+NgFqn!M;H{yFAx{aPaPGf*D6 z|F<^%P>+AH(bRwyrs*-r&#=ni*1Ym|8G8c7^qJBYf3!KJ5@4ql(yh3Hnvj2rfp_pW zWLb>yy&l*(0Pxp@_#mmbGLTx-!4Hhu_&A6zLoOTWU3~OfZ9WyT^`;qzBikhe zv2s&!s49VIV8odjfct@ye!9`-tjP#wkT2~FJLCqV#NO>4n&fe<_Zx@-gfMzl(4 zko$4f`7WAnp7ghW&mZK=6|Z+r7~31G#iAiC##!vfTc%<{%glQ*h&YR`Nb0+7N2$M7 zNXc!J4ZmtRU(b9(Rc?oMVD1KW9qW#fXrwEzT{b%N`CHC78HT!KKH^*RlT+dvEHEpmTWx~1cVp9iwK0NUmv z^aFCE-rZ6vIzQ?*p?H#-lLW8ft|~mjHqh;8j$6um^^XrA6<`2g4-LBM2T!P9KU=5V z8r?`*<*9TYpZB&UcN~j1O<)X)N$a-ge>Fmy(NMc2 z@9X!hKbp&^(xMmy1i*+4`48`Ms^Zm^TW?tF#onrfH0&TdaFgcoULoCi-*3D)L2-a` zdpdjH6|YWK@c@9MQ>)exCOF~i^Z|=9pNH4O4H^Wx`SKz<+?;u1vdF0p5~8DSf!Uoo$CTwdTQk;XbtKXJM$ zWwrdXk?<*N&VEtnIxx5_9X1xNa0Kg8c*C|Pb{}mAsW}h&Ml3C;u`C<>hPMl|Db2@- zfX2mjPl#=5)jv^_%OOL>Jle& zDOU_j*56+-B3bc>`~|iT(ia#fd#{_TGS1zBg5f- zD9V=5xygp+Z#oW*&t$koz=GhU?sKjnN;1DT5 z)5jqdvNV+im|8ZOv57S6kY*q;670zO6Z|-YtX}0hCjY2vQ7AZ8FNtq}^Y<$Yf*9_T z;E!AFF#)PtR6bW6e&RQ4<`)D|hX*n?d;G{&)Xb99Z7<7Ar)B z&*~jlQar$9JsKw#N`im8Q>ntVe`^C;iy4M)31~_gc7<;~@~|kG)O=!H?%ltRW!rAS zb5)Mn!dO!kbf|ypHci@W&m~#ID$n{nWU91!cx7u?3kml+(i&-V_-+MblW-s9Z&7Py4Z^}%EWH^Dm zsfYXBXj_@L5=@qZI^T(SrYqG7-agt-U*k6UWm0Wd*2b=;th|dN+{5%#3&w}l0`3cG z(}Y?4&XaETGH`-$uGNAA=!DBj!GCFsg1q2qjZcl(-YXhC>>vv-)sBU8fj z>P{sJptA$IO9W~lUX;V&vjgSXf7KR(@ILv_cAovK3t$Dcnruw z0C0-)2FTaGS2Vio(x^viD(0m`$(kP1$#0hfb^V|Lsey%b4c&x67_j`Hr0v77 zHwoQbu!02S89^g9J^^9jwL9fPcz5GqZLlje#;KTfm_!kAiQD60GeZez$`@X)y)K6~ z_z;1W2_!J42UQgfy0{e<-sfVe=-&(ko*`OH-A|8T_$TeWiRbKIL$O`j;KU}{H%7c_ z8AllgSDtF74}WdX7gHye9d}n9HohN6!AosX|2OT6t8Puz(fhMpER3iMbBW}~ENn?0GuYeCFMC*ER=9~Ue0vg`JBWMmueNyKz zL}m&j?`~ZK>Nm#pBFpEkd%1yK$fumxi1rl;YSYS^E0^o6T?qaB$uUNE>Zg7o+kzwc z4gKYqO0F=_qE=vf3zzW5!xjGj;$uekr}f@khMfsg zgmgqa;PsgO0UmQUIW0v2Bxg?LYExUJ-o?(F8ufSeG{c5WCv)aC#DIFgrh%Ln&O`yo zG+t~t3*tavu+7g(O)*<@!sXLbykSrqJQ@UA=*Qv10gm3wuOkrnzVl$vmrb_=+4{6z z4`%WMz7s{T3vXu|z5$kJ#++f_{KOW)sj?a z|B+#JIv;|%z{qYGKtYsUYtFqQ(>2 zk#UtO$>9uP6K{@qDGr2gm#NM$-M0nnLCKM)QXuYjoEFhy4VcCme_QPVyF=+;0131; zAIuf|BY4<3z1jS9`cgPqBCK~mblcjcg~_1>3k-X&HZshCF&h@-UQOfJWBrv;LA(tL zfz-8s+PWt380sQ7a4?V|msGM;O$L}ySLOXcbl`%rg(sfg=;DF7#i`mIPB2Q+o}04+ zJu-V*Xc{KOtGCAb`R&ASFd9KHW?~f^!%asOYOC4 zEwkKa>_IN279zZnqYEv3xJ3ikbp2X%LrEWXa1zKdqxP@fbp3I6HtN<-1p#Oom*x>L zabW(TZV8iUo+#iIpmtS7C%b~L_jdZkTnJ$ROZ`4vI{@HOdx9RY z7oUHGqtpu@pYkxY2cR{O!!$IS;~(O;XuEU%j2)%=`_4Ln;Rym=FEZam5UKe%dcWF= zAC>~<8ncnp-8u+?;bQ}nPuqfqDm1n95Rz2hKh zwoDsD8MN{lnSdMKv2) zr73r{n~*XW`zsnq2w?bl!9)PZ9!`EGvoY03!5iqOJUdM|Ik~08E2ZO&Fvy6!Px-P8 zd;GPhbae3OYEa@a&6phlM~U?{s~edPw}BNs;p|HiI_rA+i-^1$!h-nF1o2H~Xg~R# z<|v_`1N4~cj=ecvsh=M89N$`rA?%aN-hOnjS-pd<0qm~?>+y~t&BE!ZAaZ}`c)C(2 zNdGgTf9Txoa-GsCI;*zC2FNGtonYrY14I{f+u?J$v(BO&@q7Sxtj@qK( z{v0_xDQn`PDw0P$78$HNN9SZbWaWlll|NmsZ#(eSfTQy#GX&nnG?8 zzoUsN-nEu>pdN1}I)z4KbCJ1a?_F=}y7r~##_oI{Ee9+3vNN8SKD$?l?1kD2_kw~_ zJF}6RtX>A1brbJEH%W5HMmyc-LStI8R8Otw(Zx@0v`L0EUma9BiNM~(FKQlV`i^+{ z@*PUdsL|1fNto;gbnsgHb5>k<*6j~Z1krR7wUDXsW$bqQ_IRB()Leh$jEu(c!+4{NOK<5k&aeJ8A_dPP0Tv5rQxaszy2 z#|PgQ{P;L4GU=fE44%i-&k_ySg1-_7^61BdDvs+x0Z=!O@)#ohjx0LcIBa zPCr_|X^p&HBoZ8<$<(>eYe2E2)Q&PqluXJ2l(VUl4ZEX_iRF|LHDiW(^=@6J3;f!J zC>P#CWncROG%^IYHdYy7eBaoDURKBjmel;_v@$k|$(T991x3~bTRaokh#;poK1K5Fysd$XC%5|HYc!?O9gA%Y;^#Z5kg1aSIDy(Ns0d z46FCumIG6{*%>m-W&ee1Izkq|V`jFcy^p?8=F9OUXdKejySW>myK8H3W-~w%LXmM<*fRxEs064p$fp`%v1%dbtRV`VQ5#lwJH+r%zA4Jc8{^9ab%oQ|g|7dqb3 z>ZQQk)$x^%W&VV1H`M-byJbFR`qU)anR~U1hhpB9CO!!bF9MLmS-$&-;O&i6rT12! zAQiR7<$U;Bg47?y+k!AXoPUCzC%uhO%)d6M!tv)Nqqqld$|vb5*YrEedN(_luXw8C zo^$Lh043a^6oeXu9ex9xWNGB!=em@IUGq$K)PGa1&127-L$@B_F^TX`42??Vtk;DR zCB6uJG+;DesBk>B&C8Uwzrd4~slvF#tFy&CEzJpFgG8F-O>aGPhz@NhOi}BuO@UXh zBYrs8&Awp3LGBovVtEH^S2`jb+h9_*U@i!ILc3IGBt;G(LtoH{@uUa&Sv9=yL(wLp zg5NYX#1*K9%_pMKyXZne32QGZ22_i>1Qj{s^|ZFQOi?}8yqG_F)L{s3kP7M!0ovLaMfM*)$ z@?RX9)E63eLpwq3gOGrj3x;Wx1TsKT<(nlf*m-O*_egP0F4`GcYWiy~7t~#%FD#0i) z`;Vywe+={D&WRvbJ7hw4(8Df`RNE)ayqF+UjSDuu>F5_l!fmW z43j3x>dDj*F;8KduuP8h*w+h7lZ5W~xvg;Q9CrEy?`-+Q7M9Hf}WR(XV1&;JXZ*>^X!{00-l`PjmAfOyFI z&!%~PjiL5%I#1aS+@e|A0-0Ez*Xt}?;}q7iW7n18n>1rBjW0vigXqMkOl0A3KP)cY znvX!$x!@F`TPi-#=RU`~l zd2~)Dd?enhIHoHCsr5~DEp{~w1;M$$wWZ)H(k94>8?9S%X{^5p!gInWodW zq)uy_*lP%yG;+vW(o+E~f*baF;9IRT<{l>jGL~q+OtIsU518tb35W`=)(HeS@x8{i zm4-lt$`yCN&Et1jUAIM7KQ{qr(Z(uHTke`bVxc%neUO6E8Rj{54G8 zNEbRJ2rJW~u(ZAA4sU_}a5QK{JR%>Di}Rf&pKvwdM093;{r13JL;VW}Kl3Z`B0Qz0 zDzyn=3GBVMgfOjBp@0pPfGl;W+*nq^Z|)2FcZmcQd{GV8fx@1djxI?Msa$m+F#Nqxg|W=lq@=eC~%#N4HO^*`LLiehGgfJy)SE^y+fkL6GmG`f}%kpOe|7Bg=7ik+~ieb1f()C-K}IYc^Gr}a2)XIX=ThaKkU{@|-12oKd+Tj@%21uAE-Xm7pW#mb~b_ z=*ifnNyJ@wRQ)lBG8UHD-<&kQP(q8$f>=mJrlQq^{;}i9dxBxSM63C?iFSk}MjZAt zaq+$JqLrvDR{Ihi@k#2yl{vOY4;*+2IdVixQ$5bmOnry^dUcdZvrW3r(^$MPHn%lA z6f!@KCxF}JB(juKO(UvrGCZN{p+j1g4hPH6*KMw`<=7d)6!c90AfwG?SRM+eFyD@* zRtYzMFw(alGKfS<{stSq+x3iWB{$!ZXI3zhIeE^UqR`=%{`l?ht#R0T14b~=0X)=7 zSZ8X06*Ei*Aff9E8@$DWEy5pattSUFTZ0#rhycM~YjWlC?>%~-(+Zl>7g0-AxO>D; zDL&{&#M?Xr281}C8*KSE!>H~Fw5LV`Xel3Zl_;Tu1~X9HhD~BD-ZRCa8_|OTjST%5 z@3AFw8pmt;pGI6E!e24;94y<5>!bVSG4F@c}Lbfj@J3bOvOKsaJOKb8wOaR zsp&e;86c`AC28{VgL?^WkeTCfZ-dz-%8@9t(1S6}sKCO0hr<$hDQR@QZH+-C`kqBC zW%y!E<3x<1jl}(cNOx`U%-65ULRQGhTX(KfSI8Eh76PGTl21fsFSn~~z-F)-#?a!v za5$o058Y-nAG1BZnV8y6L?O>gzJwNodoaEVVKDoN6aPTVlj0V4wM*eCoh|0=m1K-U z!f%$Yn|?BP&#)`pRT9ZKCTLPRx@(HK7?^$MpiE5N``K^;DQFOz3#Tce3;1}`7qn42 z{6h4M&p>a&bdXWvic0Q6VQ!e77tA?9yG)s>`+tPUM^@e3IW}f_`&~;Qw)+bIGDFFu zBi136bQtJ|SeVY2URbPn+Y4hP&MngBPf+R<+hZ* zjlr@XII^uo+Vl*Gjn;asSZznjZ^?l*M%QLr*u^j6hpk)3RGIXxRI1x!w#GNHr2v5e zlw}=OiWjzR6KYI!S(PVQxaG^V9XX&+IHFD;&jSUL4@a{V1z!2>BkqGqNCX-oajk0S zYTeYsrPBeS^))IuI)7$vnzEK9+Vzu-yJJH*TVd-X3=2!4{`Sz&4RW|8`CM>G$uUzr z8~A*q_BX10F+VSIj(7jgoqWGt6I9`*Lgi9?L6}QK#|!J26-{6=?74-ITyxSDf`K%& zYl$o;5_fEqxJxjyzBt&;>@E?brZy$p6vl}?2l)d`0QdR`%cmF(u{4RfCcwTAFf7DU z@y0H~SZYJjlI8&FVk_bqdXu6nNV@XdMP)x8mp{Ggh_ zxmyOfWAaqIV%)X5kth;;PXV$22|X4U4{@|`j|9AWRX1!X)4_<2qi@@M(lTnggv%V( z^=qCGn&6uAnvb0>ghMQ)oNcniOMz+Feg9~k zL6i{hxMVaf`=oQu*P80?<1wn?1S5o?=7FjHt4nV+6mbYM4fJZU1UZYpr|cjF2)_$1w|JE z&8eG38eKRFMW9GKg`b!l^1A|RfXCTN!Lc(^Ue83qW;f{Q~7l=T5 zt`c3U7_`&Zhwz((z#Z0pW-0H1FW1{cJv&{WqTU7v-x~SZ>E*C+Da*;9sW`;86szVd zKJx2aiAXZeQu^G$52#5FaE`3LPE^?@E|W-DJsoK?JL)T5dY^3k6kj1p0h``LVrpQ& ztR;M7Gxh_I&-=6b;BgOEhK4E9_+Y|zVcQNLcX`^feif_F!6R$%NCvT=5u&7pc*iaf zu#tiMUsOER_6XaPjUIgiz0-tf_OJ&%Q>>^q{w2*=^P$ptc`%B~YUr8x>e~mH_E4p1 zN`E>!F3)XVx3)M8G$fj?L2N9{q&L+%1nzsi^@Dj&A{;AUhx>S`R&r)AL$u;p8$E6S zl`*5x(20z?MY$5G6p%2tKr2&;r?n>>4wfceWOUx{!S(QDyTiw>R^rbuMbNo_to z%dfJT4?r9%WZ-yjt}R^xVR&TE zI5sS+xm9fThj962vui6eN(U_w)9>LH7`|dEh@;qY9ZNf|(vdr1xA0i3LuVeDN4KVI z=bcL1eoanT$M2uMj=O~%mA{EHy6T`M8i()Yl`Qku%X48N$TtwE{UI{m+!cX`;O_F@ zY2hr0?gO}bk#zk2C8jR`L+QUO{k^<-9m#;3I3adz}OtYyto@q2HMJIx3o$}p`!ZIKQe5*?Hv6n>l$)RU}pm4`o zxDVt5C4o3p-;ngDwPSGks43@q&#meb*Fa74TWniWeoA6s_OJ%hz}T+X+_KvVS> z0YuPL{-d9e-p2v)EYE2J#ygfjI}-=&)wO@oL@)z$u#&RQ{ErwfV8rDCc@eCr$CJP9 z6Hish6v#a^b*mQ4nwFDyiRDHyVBQI{9{B?)hw^i%6{hd@B;qCyI43LJ#z|Lz^M!E%798zG!Q_!5M&rO1kkXvz{c4;)aSW%r^1 z?YI)Y&mWo}jMut7Kr`UPt?a_@ z@oUlB?}g#3@7QnEn-O69$$4Y{x%SEbla~ho{0#V&0dl^J0n+z-0LUx$_Pvi@|30Ta z_nzgxy`MwC;#zWC7I22v&PvefN8X`)GdJe#*WOZ!z9(3-ZPQ z4L?Es{C$87fb}xQK0&)*3xMVA;L;p`@wcosuNu$~Si6D#+WO@G+`IcZe=GS^ua|#D ze8o>Bc=c=lI(ZX)?)%Am&I93}@9Xm01Yo=q%h%Xjd#?pgy#qYuI^8M_xm6i+D%0gw zWGJjkf3+w>Z&n0fs|@~seh_{iO+TamapK#qvJ;T0ycZIH6Le;N4k1#4KqhefVqy1f zDJ(F2;=)s#r?{FK4Q+Y^Nn_7X7*$3^(m0KG3HB&2wC5O9y}zp;v_m)lH+{h3Muq5G z=eJ#KJKbGRf-Bgyis$YRG(&Pk&s~QG%2BC?h}RozUfz27oh-(5b$OT?c6+}UBeitH z{$0^5_t5Dn9zz`D>C7>D0geVr`mwnV0UVvsa%8Q;8G?US`UT{pzVUu3*U4(RF#gAV zGFucW0hYB4pk4u|C%78_M-rmevOIBLqm8qyva&V^g4;{RG;YG0i;Vjqx?!P%{+dWr zd5`^o?t<=Ve~1utTVZV~YPX_OF?N47n;g$&=;c&1@NAfs2JCz!9EzYUxDKrc;jZn-l#C!$vl?2M% zN45<)8N7(n_v@0|TSHVMfYm9l5|^#347ACdV+;NUMerE+xH)z2=|HqmXK(BS{p3+m zKX|kXOiot7 z0dX6uK9a8I8(ilq`kzS0EhCU2%zH*`?ZTfX5>%d!SQH`?0|sTc6st=a7%%Plk)+Ja zDN+|vesLc<@d3Bq$n6r5>_1Q<2F2<#nx!3A+O?NAxX!ZR%l5BqO{bD-HS+2KDULgl z;E{nedUMt_f*Uh;k+YrmCo_Q(HG2UHeqU1Q{j=kN5zBGLrDm8GlJ4a(6KTh@19iu0 z*4h`oYQ9Ul0R(Q`;MMrk)mbag9BtDwn&0e#Q^EoQ?ZH7xNjB4R7rgo$+{MT=N>J-{ z1fuv2&{RkBS@?Ha(B4NfnKQq;Xm-YD z>rD~Dce^ko>xhhXdbQHlU;|ZaENr5YTBw)<@H)C!U8S{*;yV^xqlDq=$z)V4_0Ili zs<3>MT<2-8kiee&fQj7lZ0Olo$X12zuwodl51ml^w`37=Ax7iGa_5L-^`t+soTz>V zC!h91&We>LI8y933)scRo-_Qp;_tQlb#ZX+IhI>{8C7A#jXN^E*={z1iZX70SnUCi;U%+%cao2JCmJ3&)uN>s zA7&C2tH?!SimLW-p*8#POkl|xI7HlTFd>Oj_y&1>r-${P$b=}_aQ?o?=yoh^$Z)!e zRQxmf{D-q|A?4L;CmQ?1l&XZaau#lh3GY*yl1X72t$S*52CHoU!ILliV?IHSePNX( zSyrXmW(xS{dPVQRKhi+Yjc!#HInb$NyQ`mTcj@B#o5_nN+eCvmgZr!w1nce{qZ`xfgDJ>j!Bx&eyU&T@5G(?DU)bp?+*>k*z=;9WO z2x?q%ig!aHEjL`I`|BP+GX^OYW>L@A@N=zn80*JVhA{Xn@Lo7Ux%Oz!n3RrF4TFfW zB4M^w$T4=7Tm8}=&+-J6|D6Kn1JgsZIkhK)IN^H8Q?8>^e1|yf#Dl9+tKx{gBePV^ zXy$oHKZ568O;m!ZtF_y6jlBkix-MCrdOfGNDF0kqPDB;ny|0gm@_#Q6780 z8FC(-{MQ=l(AnJ=D3;F8%LT=1&=~{chs$Vkr7|+f$M?i-QHzFCn5&7w>*$hT<4=_+ zlZ79y(YC+;;FkN>@4DZGz)2NF`+iht8cc1b6C`WBQZ3d+mRfg5WH9G5#^s{}X?~f+ zN%6{@%P*Y(&0TJs#s2O%R@0VZ%B6RsWrz}!GwhpRVA!Nmgp4dtQq5jzAZi-%1Z$oM z#P90S+32Av+1+?n7{@z`0qATh-aCfZ!mRUc6<&og`Q)`5dkPmyu1_CV?uB9X?SI%% z?`P9{R;^!Jf!61Zn9y~LW+XX@7!tk(_zU14VAgXqY~}TdOFERf8rsYkPCzP$J`RwM zw+UVZbBCr?_Ql1fQRqXH(uJ1e8Ap;Ot{ygIc33Ytk%<%;YXmXY8F~%=csQ$4O)eI~ zrQMkH98}^zz0)9|?!%AMV!(7c8R=i^twtFke{xn$E=6(+n>!@aFh6e#f}usKbDbX8 zm4|W2K!%43J7cha<1pGisi8AYF?x(&WQlK^#+L7h-WxC*N-Eh^5$;8sPZ@1(KPd74 zHUvW9EJ5|m%>5+*nW>p{;i|XWBYV!}JR!~|cgKyepKIZOs)=Un|^H#Ttns47Vl-Esr#m zOc@;B>4-UST-BdCM*)Pk;BIo$xExP>8;T_q&I9DL%-{J6v>r4d8&)SBAdO;**D5gA zPDt}Lsbb`86XL9t?Sdf5{V6;o_yiN91I~#g1WX7We_Qp;2zZo3pvASK%Ffv59fnBZ zo|&M78tqVsCm?~~w5mTd>)ovw@u*D_OZGY`38<_ahD3iL^Iw<~B=M&g!j>=Z1E7{? z>J(YzS7@zL2f(FePzJvnx9R`BVmhP`KMyMoiD~j2g({z|UwxYGX(_WwFtW)dHtP)8 z<*HFNY$~$Lc!^V;@`~h}$;fLV)y|Tqq!Nh?VNv;BFv9dnzDyuH+%#?u%aBfOwuCCFPQlT2VA>xZle}F0 z={&J&1&JuhQW-4Bs?R*iX1GemnaQrhixj=F!PI(P)R0I@@>2tHd}6ywriYU2;o*nl zMKD`?r&qgZpmVztNm39!nW+=;4k#*qpV+yup>&TwvOyJ78hf>;)Nvx7LJhDte`Sx$ zJ-cLE28n(#TIe-?qoir=sXtJ#x8)qPfK5Y0%kP;G4de?)s4;hIIqXVGhi z#d%p>zNzaE#n-QNZa6Sbg@4@+5@=U~;%@v(mB~eD5>s&t&?%G**M5EAmc>!CwI+$j zSyRN0hL-JpNS&(R4z*t^T9k+mH_PM*$dic^`KbxtSV~FMz<~v&GHvU^Dv-;Fgx&Aq znA8Dpf&M)%YpZN_x(WtV#WFnYZHumAOK-+QD<}kMk+-DmC_U%(5TPL?&Cb>~&-Q~f z_;N!UkdOz5=$ZiJExK*06*ctzK!pDM!Ie93DwuJNh$BnUm;P=gy6)Z;HV%$CM#h%$ z&f?sYper2-Za)Gs(qjeCM`7xEW|!{onO+51W6Ip=d$#scQ5+$>>dx#vsWb`w)e!zo zlw)Ac{Bi!wVCPToM*4^-miT(!iWg=$u;p1v&r}=3RsW^ze-);#9Z-+id$8EmTR3M| zccF?|4ox-l{A(OsIygLfh!%CYfAYc#e6G3xQfxJ4c;Ov{UQ^*WEs7 zC?H`NMvl>Q>wi+87a@by;j{!rWzaTF{-UZ~J!!QR*AR$9-SzyPe`T(2`bxu)ia|{s z)mgjrTz2`PiZ+_zAM^*f8^|3Y47u=j2BF6g8`6KQmFp z14+)=yqBKsp6n#xY0ge!pM4v20;P|!!=oRW?@@r87?isArjF;xvest(%;{9Y&HG!k z_i5uxvaEIga^qMwc;62PK~;8@&bq%MrudoMmR5i?vX79CJX+iao)R_3ksg7lQA|I43C$j?LgH1zTq~NMX9T82mr@Lv-*^{gC>e=l8|t=o(i6 z7#lQsvfsv+^!9N|I=SP%CD+e2PXGn@teD$CX@HPpiS|$qxL}lYs@_Z68|g*9Vo<-U zou`aT0QE_zs*#~YGE^}^w_aN%E66{xvE0!y*o6Ph+je=CRzTMaoXRC8iH!H(tkd@R z4}wtkg6Q>x1TK~eKt5PXmD_uISN|--<;>2MUl=5l zj@4WhIJ|Ht-&j9#lQXyq++!Y7aUHPFFfMYNPSou-Zkn*;;OcwRwY|I&vSlJmi!J{j zbL6uM95k7LC)m?FFal9gt4J*~|0(<*6OIZH1#Ml%+s{zJKS zU+oo$5)|_cg7r@>uVPuD{fryXDUh1*7=Pa2BX40^?T!RWJpaUr84Au3(yB<-5CfAe z6^9wf4Pplo6&RLTIFIxD7X!;CmFFI6i~y$X3$X2m?~HREa;wn)6caoPkhLdzkTJLE=RvGH z20j0X;+uw%ii7g};XASdVZ!g{>EEjRFM9oNzx)rZov91?{m+?jz|!ifzgpp-N#c%} z*mc;O9oFvD5}b|I$Y=QL_Hxt3`_4Ty)FcX|tfC)FXm4)+(A z^60{K9nM(;&pXKncvL`R!2@ozyouCl=CjJD1odhO>P*kP0Q#q1_5!%!#6hs#vSKgw zjPl+&TK81kkL-ZwAMISv*$*MKXT%c_AEx{8N)?sKhwe;o97jVXwV%Pfw}B&#kU4c_ z=rz(LeJo=Rz3AQo;LRK~>hhFAT=$&4x-`Z-<0Nx@o%Wx?QiU6>6W@p-bUbu-wTKX| zEp|WyNhHCW@||Ym-6#Qd!t^Zx-jTOf4SJ1H+`!wPi?d9WQz#8id|lpMxBmBg!2Gkw z5h$bk`H5H46v@w>CXa2>qiM;AeZY?j6TMG-8HR;LtWt3$4v=p^Olo>z-c8pit#*~p zvys&$=iV*W%NQjcw-BeS6=d~J%XFClt4Dgiz|4p0c>JA&L}_ujl>t^>FB_pC7w-9% z91g6qW7Na*H*f54YT7Njh{5Zgh$7#08dNWA?yr{Q(h~!N@^oWIWF+M589jzuqj4?` zK)I;GLm5=o_HIJROjs)VZyACcKd_~}LAx88Xv|q`=5`Kq0lV*T?4H!}{&%c14&5G! z`tXogYCNtyMZaL#N@X&BK%pYA<|6CzUTi87Cq>Lp$DO-%yYbt-T%P8F(Rho;k^l_h z}c~BjaSsN`vEcIfcj%X?dRW^FtguUIb7DMLYyqCyy$3R3tdQ-jM%VF zek?XHzUrdE{M8;5-zSRDH7(*XU6JSIRT}jmvHO)txiHMJ9>&B=L57pd5MlA^p(GYh zWSTnLD3SIKMWo2V3GAfqttWrxsOL~SglXyDb)6Rq#o3x~&+>a>Jqbdq;@9t!S!83Lp^~yI*=vhZhS>q`dUkj{|2y zUH}JY?cV5-gCmpMJez%m0uX#~gyl6+(en6Hssgq7)W%?|FEEZw<-y)F-a`>MDbHRP zTL4?5XjclIPjLNEHJOIQEj9w}z-EY$6DAE*4RwjH!Zo}(_nAxs$=M15vmed+x<&|y z5l-B1$d5CsyiRLJ2aQm29R$t^PKJ|y*EohO##bfhkGA#0a>h8dhpz)JIL>aZT-FK` z{D{J)RBpMtw_x=YTNHr&g@V1ZnoyUj<2b-r4~<%~Q5bY|{XhV(zJ-&;f{i(^jWmzE z()n{cHTW*vMzbYPvQVJm;I1KG?sqjY;!M_Uj>Bd}4RDO{L zK9?%#)2>%snUVOG)v%~;zdapkEen=d8+ILmEv*4Z7`hB4nlzJN?V<8H}Cnb!80Gu>2u zGzK#I*lQp@0VtBOHUNI~5wgj73$!BP+ac4pbbRYv+Sx8`>Z^&->@oP6BO`00CAAx= zJkj`p-@EHux@3CA=F2&4P9>8@%UxMf;x}8mM+Bcr{xEoUs-jJx4c7ul#ly#xI`)}# zU@+>9yj3G4S{|DC4K&!!*WF`sVK>N*Teeu^8;qj} z)OQb7GDe|gKY#%!Hd&ew1{(L*&08jcj2?vcR(}1`u>dH;w6gaID9bFI29zN-^ZUv* z*pUsE>oF!gVn905v2W#H)J5m(E7`eDtF)A4K3InP_lDlVOG+<~s|jrpv-G6aZ^0wt zSci_8dZ97-5kt=tj6c=^2WcSFFz=3GX>*w-zZ$9m1IhL(N_)z?)IrnDxp`5F59$eu zEi^nPj#K`Kxv%SDCyh1wB~5Y;QlcR`-!QG%=*UbSWQktU*+46+?LSKDh$_4eN4-Jb znbLJ-18+-|NwxDs{i+^QHXJ7^`ns zu-?NB@ z;p&>=dZf8qIQ5)l0eJ_dfpKrO82jb4X^t8I*fR>^5 z9z!o$0e{3tpu{2VWhq@~m!HvzD8PE60ltBMQgShkFtax=t49gwevrhj2%fFjg2;FV zRr>MUO$uQC@|BY!Jj%K)fyoNZDWtq8U`uDHK>n~Ed%eH<_WOIlnLA%pM2z`k#tFBr zRT~IY=66}>Z<(*;y40eyH(L?>*R=9~HdbU;P0zNCIgW)U`LFSYN~(Am_iVfeSaT<< z3Yyww#+mCzQ^YgYDGM@vt=u~x8jkYj4m>hjeN;GoD?f{v=)AlVbwjvbHVP4OfBayl zpR8H}l8B_dZm7BST2{-Lh$df!Zvp3`wqWlcu!=^%E&rb3b;gqs2GQ)pZ!5Y>W|9#R z3cM2(NyL2H6=FOXC65h|9-F452>0h-mUkNZ1#+yZ4jC!)dc6^Rp*=C-t z_rD78yPjl}BdLob;wi3y_nNBjVPhv~3~(re4a&zVGyK=!e?kkKLp}s=EG8Voy7KE0 zdC2EYB|a6p=x`}M^}n9ZBdx2r4*GjI2v}6YoC*&gv1HX}U|6T_(8`px(!Oi^f5iGH1)N8p2zW35FYPXFe=xgBN{CEw3f=#$CsxwW zRc3*0@t=CNu97E21CnC@gO>gScm8*Ll1QWGgpBp$4>|P`WzhXkQ`0W|KMv6Ts|o!# z_WiHD|Eb31|1wM}S5Ymj_tgJp&i`KoE&|^Ah~iE|q{r|ga4CsZlek#bfQ>=;x|!fF z!LT0qdL_5g76NO{fNF}fFEjP=KfhrsSrqefM1An>tg2NlHavdxCD<=i=Hc+nL=Ppn zRNOWRiyfs>v<}FDJ$*UD$WzsZ(N9KvuE#fUA1zyy5IG7k%(EnGZc z>L=O)JM*^u)%i#F2fOaIjltqg2nM$-g5lU17S|P_=^$ z3Z!{wZ3Lh7EWAI%(?FXBcdrXMc93u;kOID1nj7fZbeK*tkz0Dy(^{1H zL%?>iBg?L^QTo<^#0zQDYd)jzpFaaJ9g0Jx$ZkD@voGB3oREavW!U}C$Y>@&6JESw zW0U~vjtx4X?cl-q85e_pre5~euF11va(%=&%;8P4;U-y;7ESU~vkC92pPG8@dIN@d zj+pE4Lkz~n`{~nl;oA?eE>9c}K~&Q0s8L?6(?;K_5ndQWYOz)bU(?P#OZbi6+{x6l?ghdyqSt&71of?@th$Y4B`L`w7Wl?MJkZUMhHXL^XSK6@le8B^II(Rr2Sy6Z$ zh9cgaTsR%cznU*AuNmt$kdjrQIDs04dckrlFPuo}l;-se$%!e5km`ha!ANEX^E=zU z^JBWp-Y|MPH43~298iXGN|W3G87SE)hK82pA7B|f&k~pKNK1)2b_uCRI+=N=$rnMa z8g|lo<%QNV-prACaw4dyu+(0RSYZ81R@0vn`{0|GB5e<77_u`^iwmAco3EuF^+T;= zS11U(qqfr&=YO=n3cpW)^M;A6&h=;Wuv=7~6YvzU9w4)^MFEa~HKY(Z5sQf+ zp4$x64*8{xbKuB6$*czN7Mm0iczV?ALtyn1*#QdFC4IrR(};h+Fj}gYYztswkFm3l zL*FKjCEC${*qTslWyn}x*xjW5!B(^wI`d#FYi!>@qb>gXC;5O59iMbUc>Kqn-fD^m z^`x)(xnJI11C>XK%8fCy_!{za2Re3ui>g*WTP|d~0gq6$<9_s{j(LdbL_{bAv)1}u zS41@GU2ZQfaKOOPJRT8a(GHJ)-5Hk^eD9KTux;p5;g`)$cNPk9g&e7G|9oc zk4HL&)KL$?4K1T`*3X-~#Js+`q^4H0O|D6Zw;)mI?mw$u{~7+q z{;2&|M$rBJe;@K7=ruC_rpG#5Ea<2TuYax##fGM$`kYVV#5tB>6Mpa(nUs8cF%gGq zt2>ykZQhadMxepWZ8zNX;d0HkMN8Vmi8q-nG~%OVa{liXxBu7B3yqO+joRaODUcip z*Qh^01qR3T{~<0@=L7v*Z8nTy{<(c=+zF=^s*Io%0Q&C>4cZNBn7ZLB0&Y4tt4@hV z#1H03mk&dpB2101(~tRS-ensc^PbbAxfaA+-ioyC7~AjEX3lIKV6#}e$vp`l2-Qhi z=NQ3c*t0e2@!SU#>uGC0nanJCu33x5==C=Vfm>KqhW+5Nd0RrVG2ul<8}jy1=2tSX zfk(0X1S8zRvB+7&KZZ&W`FL@*%K4s4T?B2}<=<6&W$kEffJC_m42$29--%V{JXtSwmy`o&wQV@tbn z=RIj)A+|t|0~S@Byi6oL@Ve(!f)5>Jil8mlI(RS>y&A6#*&vvF2QQZM-9?(K?ARX~ z#`>6VZEUv*#wid$xAurFti9h5oUNx6QYHg4RXuG%G?YRFO0yl=%NRRYPqKr$mNkag zCHyBpxBCgj(XP=PQ5XLFET)@~F%K45r2%`ya*4oogJBe(pxV^|jk_$)$&Pa7tj*%o zfsDQKM;Y6sr(3L;Abq@*d*L5eHhR$>@-cBzOL&ODQoHzdbB!Q;b|!ufcLv^2j6&b z89C5N@GBVCr}M2#i?6g_nB&HK_v4oVo94D9SX+ww6DB0 zU?hV40P+EEeYrV(J;7*&9z~Z8K+oY>dj~g}RQ~cgfEig*Iyt;utEipgBPJMV-~#x4 z85sJE#BlNHZ=YT=QN1Twf=&*xG7>XXLr>3gGAu8tF)p&OLXwm}NiCS>L38@9YaM1- zhtwQa5WkAK{M|#<1!mK^bN^C&m3YoXo9E(Nn2nU8@4| zhh-1=Zo8G8={d2*DZq&WCjgo|A}?|LP5X`Hy;n;K6@uicWolKAZKz_PE}a)|N(7i&a|$`d>a~BqSy)W5 zeVwcVE>0D()vN%uvBQtKdt!Z{9_3gu*c_xJOJ){wKl%eT#F2i+f?pzcQHMBhg+(r&oRMK`7vCCy^8AQxazG? z(7t&Ok{bDJ@JZHCxV zzy6a|#_V-v*&?vm6-mOISzy)e2`52)CuBI3Nm%@PlQk^;7-JH%lKL7X&yy~&WUtFK zkLJyyb2|=wN#?3N>+8KZu8a`B8WwKYtEQ0VdQ=`o4cB7XXnJEv$9i4Ol)`!>ts~DM za?;rExlUc&L&(ux!%?Nf+ANM<&!hXY5?hCxtt`b?9X5SXK(*q0!iidIr>!yq>$(0N zo%O21PPvZGzHgm;XJ*kkrMOwPjF&kiGv- zC)DdtPsl$(v}P@(^@1W{p48O5m>ptFf0d*wu_p|sdxi#^)D3a;!FjpGnU&J$$b$x@ zx!`GnebVazhD~_KCk2;wdP+2PW=}9CC|1vRPG5V5J0-${4=tR)bGjCnFnQF2Fmz5MTP>QA9EG&M;R&r^fL=qZqB4l>sxe6OsdC#k z*SU;MTy~YU_?8JZTL~J|@H-4{42sdNnb{;3c)~apU--a!Ig8zfMAOih7Xh=YY=Ys# zwlb6V;X&}A<~+D1(nvNQw3Zxc2e%;3<_3DY4dgvgze`_crDY4s^ZFauNTpFLsGDu< zW<@)oAKto0%H=FCE^?(xs{5%7AusJ+b;FCA&ypr{l!33jD8BOAY!7#)NP2?H>9)je0G>o+%xy0c9OH@L! z)m>YW{m<20_^=d5$`6^ZT3I-vlI*$(%sx+H^4;M+aGm;FSY8NE18Z;a}w zrDM(I!HR3pw5ML|%of59LTx~Ce!%G7^t7-2b&mH}UIc>AVk-0eNzir< zh>{0K)Wh0Fcy=Kvp(gpxd4@k>QT79fH~+w`djFN~by)Yo;?i?r_gF#d6O8f_&?%8< zSS`VJVV#Q$_E1*(V#-vQsYkL=ch2UUCre$oDXfD=Wiq@nfx3nlO2A$#={fP#G9&!E zjUa&BdUzeEQJw$<+4MyGE4S%WR!7jTviF$231~oVcLFzP(;IR!u$8%xO&s{erwypJu-9BH?8@_AYfci?96wbz^kqnyB z1zO%VHtpgEk6<`k4Znx^=8ml(GFh_vZD$x=^5I+e)rv zCTMM{aH$RFX_)CkfdcN=aZ?v&>j{wlHTL}ldR3J4mDH(`u)&+!U-m#wrdY&THv5i) z_AA60a5_}L!3WC8GJICb-Kt5yrimNa4iDAYo?o*4Vk>oX54jF<%Fn{k5vF@Ud(9;i zO%X^=as70;(Fh^D3I@kl%U=CrvBTz=&ob`j;>4=Ysq&JZc~u7yy~e~Zi+5k$4{C_H z6=hiB;L+6Wck!jZ22#%LxK1uUP0O$lCrlVnj8K}FH0-OX!$CZT^gkNwe3ZfRe*;Ye z1y7%Gb=hSaG9u0KfOXjuFuh)qg$tX{oleOvd#i@*gLbr3G4?o?KjSs6Y6cD9VpWRb zV9zT?J0BC2HPDo zhR4jA{edEkoG+ZkppUhPlfT;g{mQTanhntEFpCv`h;)+hR>F4i17INHFnO3mF|T{< ztXhwHluw3U64z9bFvnn@V(f##N`EyzV}GzMkWUG|K=t7?%MK^{b$BeXwjG&YfVRq5 z7w;q{KCN{a#riE(koRDHlg`c886ew9K-q4~ehG+qbP$>rkKvnm_QQr8&bcv+2*dib zh=F8xqUY_kDb$=krUHXVk22<1S!(8i7QyM&H+;9D$haHA|M2|ZMmY}zZQbchLPdQF{~O8ZTnU{E_KBY@!-fj{?TieS&P{xRIb zFk7CheJ4PAwZ|3);s|S*vvMr~BIz(q*4Dr=L*GO+ODNLEIrW}j35}N#I?-&#C%V!R zc!$~NJ{R4vMmqXM>r*^&;NzVfR}`lDXAd_Lgs^Jxcz_J5L-`4K7ROW4?Am2z=olCP zHabPcvY*~Q0l~F?k`R}oMhw?7RGBD#GA_Ao`{^91jBkL$L-P5x!#h(|HlTy2LF5k$ znS)0oZ+m{0F+8Pj?H^m15|3#^#Xh#-jS^9xEDrP7cm%GO?Pz1*_w#8R614`~#k!@Y zd{I`jkwjj`I{GmPln~N7dx74!3_jR42z#DXdAqPq*HzF7!u%&{|KIw;s|k+G<=;=@X1lrk5Rb(7)+%$4vA4z zyM(}crZ6Vda-_jboM?~VyX+A^kB$5$5ObU;=~z<|;iKyXsj;yA34$*O38Q2vDD$cw4Q!jRQoX^xm8i|bb;-CgUCyAR^Z?d~ zB)&{qFn3J^IX7^`$37B4{QMcx@=6t=HZ^ztR5%%(yZ&(j- zb7*GamSGSH{Ekj<0DcS5)#P;b8&ZB{?hBG1>rBy$q0~b6v{t7iC=0YUZP4$SgU0E)d&UD;%Ir8h2|@^+lyXTN#9Q2f;MM@gG9wp z95}Z*|LcLldWd9Hr(U=Sr3ArG8~;&y`E9hZ2^Svv+w^JDHL1jZ22}|pVl|=I>|6G& z%9iT)bP}Do$5eG2kFSnJWlww5E0pPNK6MtfUK_-a@OeChAHy&q@hckD@%-Z63p|zq z&%(&56F+pw^xl)uP5BQz9rX<=Bn~cfuJi`sSWIQBkXuBwiz|K*-fkMm zRzr&4WC!jsQYYntj`5uPT>8Jj{%m~;Rw7j9IY%(04L#4~Pqzf*t5Cu<4F^D%c8lad zMBuB&7C0C<+iG69Jg%vRd_H2w7uc2Ak}Fi3<^d0D;bc#}PQ@NjS15Q+XfECvDFLlF zjx;h;TSXr1JjRVaHNh}A&LX?t)V3yK zJh?d?p3Sir!hB=X*}uE}O+0A49&SMOm)CJ!dHF6@2)<^cU!|5n7s3f(GVScM*Q_ zzGZ}@om#sYT)Rb3kf=~R4c!V~=e}^O&G{G+CSJwJDb|&>LpN0Rrp#PRu#d~KdR0l0 zpi1Ft$VDu{eBd>Y&a9oWRL!w3bvMz61T??l zL9s0iP*8C#Gpy+lQ|WnMkIWGci=o4Tl4~z^ombQw9-Ju|E)s0W4%x-Kp+-06w0 zgJ>kLgkn!R&5!C8y*K)7jz2U~%JMHNven+S8kNJ1GkLbx@o4A zpx$2<7@{Ze8+Ag0Qp_Fk+Ixa*`Z~|~9=r6gMGT*k^@1d0UaX^hd7{zJtsbXn0S@44 z%=-*yw5ByPOBrPc@*|=6kXC$P$`&lIK77{tg%h|5$z{Q2mZq*UyBUd(X2@x{XBmh5 zBSk=z=uhrM+o%bPN>B_;5mA#+fiN~>#-}-SgN=YXf9II>YkxE;vS)D?;S5HPaTm#> zxx!STgJ1OUI+F~6mNSX@SYp>lgEHv`hl;=sXvV!!)3jAg#@+9)Vsza*5IeWB_=Z86 zGwy_h)RY{;w4bC$=Zy%aYzgI|8a00N;C1-495zq4=|Y7X;E^B#u14&nsVsx=Q19suT0f9I~@6L5>DZ~7^n z3umc>0zGeMntAN>!bYyE;T{lt3^VJsblojz|0wS6mt<3%*nZ3~{G|S(v(rD-n?0JA zAjsEuYLO_+Ym&?{kIw^c4A84`Hi4zN1&pQA;@~DRwlN@P>pM&iBP?Etd$M84Tk$W1 zZ`Q06Ig~?YD(hZ#PB^ty&qLln2$<*TB&%m3isKr`&sSjh?%Pct4=N6)Wan9(#DLz+ zE;86-n#$D^huo`HH%chkAADvd?_#Wd&{&ohev zx^4d-fskrX$=yrGI}7AQs`2lD6TQn4dnw$>_8q~7o9@#O5vu@Tyxhc%jn9lnWVXw^ zCS7X5B9;^+$+;-JIr5C}6@0qHv_?w7kpLl_ve+J?sfush_s7TtuvdB%WL0%vR3f~k z`>GsZ(ZlRIk^w?dm*Qqh0-wm=u_pxL2W?+c$+yx^9$n#9#ChYD%e}L3IbU20@XZr9W^_SmKx*N&@eF5v55! zpF`H{111~{HX4lEH|jv$9CVHKXSZWZ#BARGvKqRdbpt5U8WPDXx7o(vQGF~Q`#6Um zJsCMMOZmTFJge48-$FvVYE9VUrqrD2g*hfraifhDTpDxA<)#>IC*^)+l7EYx7uUgY zOS={x1wujG6CSC^{DzpG<}ya9w#xM=1F>Bwbk@{?&kT}`SzD04Oy>Zf)*p4jgw<)? z%P=Z&L_`A}1o$o)vBNGeH5BlB2pPVqRAI%jooxdvyAuno*EI+F8#GfvTKyCrc4yY!_H7e_@pXciO>%{icL=wO zPWnuau`#OTX@Wddx=EUO;bm$z7IoqKw|##nzT9~1&W2q^qB)Ct(S3#E1L8^s6IwWl z(N-VL5%tt3cnV~TNRx4Nzu!mCekF)|%7~Cwe@ZTa-k&6;JYoef`_&HtNd}DyBi}@1 zd*J$bk+89l;bikP{m~2%P{j2th4eR(^QlxU#r{#Z#+i=+S(g>si8@lBmjFxK_>*5HsJSO+sZ%*C^FHFhr`y^c@Dl8!7 ziBJD1QGO5A<8GdGx!6s*?PpzcC}@~F&Kf8*&(zob{{xZBQ^kbpKofX+G2tU z>5QQ<+7As6Dn@C#Ba2z#expePrls6fGrJ3R_^}`A#>7bMj_}7ByP=S0!@*qAw@CkU z5-2O})RG1S%|49}{i30Z`Yp%RdK-yOYhuGSSdGt3++GA`%m zC=Ax!sl$2s8u9F@ZLiTT=~ClbiS(w<+Ao_57p)wHNSW~HB-fm zZx<|D(&g+nv}TIwJQY-P@PmQP$_3Qph$q$NLG4v64zI10x9k4g(k;*z>qSNob+}+N zkWo1wtpxNt@T}|ZxsYbXz@$EY@nMkcAf|Trk8P|hs=wJaNCGxbq6Uow*2R_=Hi_vg zb?6nKFXp^YAbTly$;w4D zmklwsBsa2+6sGZHCPHq}dW&elS7vU*t8Eokf(*?HYi1z1Gor)#i|8n2BPe`QO%v6m z817L;n(a$gKxN(-j(+)RQW?vIbB{IRFE+ngrd|2rND~W$(6RiYyUEcJ$Vv#*g?hvg@v1-| zT^#mQGa9&>qy3Z&#uilR5{*WjS}-+KLiO9VyYqBrp@u(riBE{+WZ-(2JY!`5s(ele z77VuDZ}G!2s930la-7yqHU;>?cTaUF*WZsc4tIgk2iGpewlYF6;k~8;;(B?;P4e?| zsw2*KK^C=^>@$e_uQo`HL10fkF&c!{J$Kp`#+K^h1t-Wt$p}y>41=M^g-6ii(v)Z|LT-pa^ zc4qG`iNuR?pXEz!urn8_esGPo13mXby8x)4hd%KGA2&BTPmgR6a3$vdewlKqI}qJ}{kr?i-|*a6_q_hY=P< zJIwFyJ~YuEOQe!f&f0JD#)0fhOW>iY4l24}z2ZRbve@P`h|CUCltf;aS19gCw$f0n zIwyY(HFkMOK+M{jVvIzd(CLnY5EI7M#;#$+QQyEw|DBlyGb&b?M{n@iIJ*=GHBb8C zLonQbBnd7nMs%+1Y>=2Zs(42mUD9O0jLJZ)~@aP>MG~RYc z8LHZj4?=Z!lf5e8jcqa7H?%4XHU^MZ)LmiN&i)}BQjrue&(N#ayR4t~bjnK_1V zV=Dm(x%Jc?JXOZSI&r;aN-LD78zR;u`g%fFZnE%XZ5z~a!LNBigg4AtxH(nyZi#MG zyu8Ea>^d!te+&F-yQNW2V15~c3lcf#QNrN+G z3~gy6%&fJDL?&4d_u%u>Q+s_*)ZHXwp^PPK!#c?@yvSws{{T5a#=oZU_pk~7t9xKy z${bitp!j%%9j@8>+N}L5SKXBl7TFCGvDc8Yd`85VGS>6op^`eG9Q$n}PD}-I>s*8{ zebWQ7OzpIE_Pl3NE0)#wAu+LCV0n=5QIxQu?4(iDYqfdIPU^!}`(~o=DC2O6CKm8R zceL>-cv?`Z$IpTVe{jve88RJiF)&&9&!G3AIqRf33jr|^d zP0l~?Ow_L_vWHN5;n32mSn!T}y7cw*ivT6x!?XpX<|4F)o?32h^rjh6Mb zIRSgT2Ux^9SLsDy!o*QII0DbNg+vE}aN=>k%gB z&jymE8LptfT(^Wx4A}9~hQ?N?L~r(>m=0}Oj#JYL9T0~Mc1SE4o@MJp9dkJfjM4Co ztSWY9V6dT*GnFN(@rV3!va)b`gPjZbwAy+W?2XWZ1l$oDoju-#sPRo}M`?TWz$J=R zwZ0vBUN|k-e*A>Rzo?YQd>er1>}>JYIgZkytT{gtFn>m10myKBwageb&{7nbdCrj~ zR=*e`!qzUW_$uj&^z_&`v$`_|2MjLO8&~ui0TqTVTn!}3AD-MwtY?zSkdjq8B5Yuj zVX(;0v%p4j@~gC-BpEvQH8M4GOc6}S5hoFhRGmO|^smfp7n1Asqs6J3SJsNFiewfp z$G8{(_R1I(?xWk9nQ!VlDgi&qy>iMpJOV%LwX;SI$)DN1EFG0i-^G;r+(DX-PGiMx znv`8Vq_`4l=QCAUhZZ>6LK!kDbM4T*BL#x*uEHCW zFu=@X;Ve(5AG8i^7+=e8s!25&XpUu`wIjq6tLYG(Bd%-1u_$-RF@de1a=O@lpS)L5 z)>iBHXZKVL$1F`T%B@(X(3rwsvkaINZQ9+~uLjO7x_%8gE_;=4MyaiWct@K-ynShm zChf#IfG8^fpoib+fc(0c={Q3Q?F&d6_wvDkZfnyvW{22PnGvvrX5fCQBV-xyc!Pf+ zZh55=c%Cf9P59>kWy#-(&CS%h+x4rBc2Hjq_|ng>Ulu4I@HjxkV^WPolcr)CT`L?} zxFa*@`1k@VApNJCJ5}c8amQAlIOMg7v<=s4+(rTf0r!V0)RVICSY1lpcw!|+kM^RZ z!DTl@Q(Tj40n5VMC1&2?QFb+id{vahljd#`8X{iuXzGZQUcy(GDAdMca(ZSFYcR1$ z=IW;^$rTk^b{glxahm#*w6;coVEP~Sl`F0kF>r13e92ZcOx_bI6}J5T98WPZsKb{F zu{SI)&VSLs3jAcsS6&-zKC?10jJpZ?<^Lu$~S%q#nWrCQ%RZ17mT0_~ zjTKgZtvB@c4sPMR`fD&eS;*A6CBj^s3l!8s&b^}L_Df`hqPaE!KhfHf zHNZWH;AvQ#`1?2lrdqGubWdX;xPbnH0fBQy**>$A;^1{Q>Zm1Uw^iUa>_YpnDIyMU zInPBD|DA18 z6ffT0R<}9u^K^?ilPmMh?ge(iNO%6&g)T%Prc0RPxQw%7wxRLbja6>QSa}q)e%>e%*pVCaK#QM z!glwxY{QUnb~DASV_rqC1%WsUvRO#{VGer)_Yuu+g=a#g;3&dK@`a>Lwqm1qOM+PK zO0L4MB`0p~X#s2DS)*Txh)voNd>7;1D|!S%(=mL8Dsk#v`b=|^wIHyT;Q*4pr@a>f z@|SGdTHQjugCSCXygQb9jaT&z+FF2^Jp~NvB_K*$H3gTRlxZdwT|&r#E0iL(gh7DM zEjWKZWN7UrytK?s96f0Uw*K355iOLHe!`K9KichwU^DBy1yE#@S~9ogMf++@nyUhk z6BrNYm!U(t14KnEXWY7aAw-s2`p&}Ue;)beDQdobuCjqdMYm5WO`|WVQ5;kaE+-Kz z@ozbphbf#s(P7g1u%$a5cOcp9_ksE#e)61s1lsRyUZdKYwVUaNi@)1s6GddJ*OnIk zJZK@HPy0-KP;ZQk!ht*Hk@p{B=vcs6DCAYhA#MO-dLaDk=+K{pSZw3t>cObAFc@`8 zVx!@xFiBP&Y8P7U#yWw73my;$mCuw4xIx9n`Figzjqyhh@>p|OY>nN^=_P_5LbBf^Ceqbpn1<4bY0jEt zcyIY8Pl)W~2eS57P&(b(NlVdURPBc`DcV4K6@OG$rE2r=qB%mrqz31Eg#ikbxc375 z*U@tRyq0S(fQ&JYy`4TGpz|Fj1h2$IO;yRBw@K8UstM;}1S`Wo-tiT_6MFJ|rzaoa z%k+^UV%jW4HGN@x?B65VffN3e6yxxNHjho=JgI50om1!tOv;Mzs1)1lJ>A(s9G0L! zaiJ-c(_zj^^W`zY%#Eg{)a$x`%mCe~9r%F7`$H<|<0ag7=ufF3o{#IXfXd62%jw|W z1t;lCJ6Q$(rzYXT>2C~UMIi^G2{xN!_H_N9x2! z?7Gw}qXMe|9BtwLshPE5tdk;j)-66V{i+zQ3re+;@|<3K2+%{w%(l7mGLjs-vK}!s z0S)Dft|df@R4AhOb$h~y!+Ey%>ylDdjkwPIIRi5NVJn!mC7?D=t8{=JicX=*0!7b*o_+76p&~{E?4GURYjE?nyR*_Lg!5TE|*_Q+v*ZjW1x!ZE8r!7BO^rhGD4QT_FXX$tIP?|^IGCiZBNaA>uihIPLEcfv1M zqIYngX3OD+xNvCu>B_!+h}$J*@!E!7>2%HqLZf7R)a(N|PWbHpWkjimREj^kqbj+w zu{l7-#qpH?5KuoHff{@hR!(gZ`QVapGisM$W$w6hAvrcDMrvr_Y(p{dVTTEl91cO_vo1Jo^n=2-=vUJGeeYS>cuF`V`?Y5%ACa za+;^b^n^m45S~_-6VD}0P85%d5GiW9vcNw!v&aS`+h4g|N%hZnD4`=1Le-J4m5Bq{;Q10mfX#y^h0{{YvI8)J*?8--%R zLD$%Z=|RMLOA=WE`F#@Jmy8MGheM_=Qe`SeC5lFC6W870xs_i^xPn3J%N1(7(B(+{ zfs;~-5LPc8LUW;;oU=@#ua?cwq~l{UT5M-`G2$Amk+i(aH)|W&SESzTY6)k8Xm#!V zb`HOQABc!nkZo(4xx4|MvhO8dhx?B=iR_9!}HNUR)4<_Ck%wN>pt3GIP^Tu){pS*=?! zutC67>^q1OFX>q0M@VpgeOnXXQwRj%KjmLSOlU!~hJ6D=FNUN)_S#s+)+NP0p9ZA@ za*8IUH|1RX%Xzaf1m^}!Ps7}K?#n2+^NwL$0SW~#A}+mvffFg?Z=awhCd@G>*Kk}8 z46MS|{Ve}_K}n~DD_;>~q6&m4sEC|HhNNA|?A!~eDkFDV8w>uAi!5r1;o>OvF`0)V zEMIq`9l_n?9+Zc{GI_isT zA2^1S-0iyA{1bW$S>+8?zfEtvk-8{O+U^oO7Af zK=soUx&3txy(x2+_Ms-#)~l#2c@d>pj$MG zIkX&)C3bg)YX2~W{nM5kuQc^z>e0%l9hqr696oZqJ(BQ~ZipxDRj3fEZ8O5QV;;A` zC~ack52LNM_28Sbodm9R40b=HHW4a{m+E1{Rrx2%J!OIz_Qk5Kq+0+UnBXSdRQuoP zK8}WA+i!*%eyKNh1h`^6=oC{o5~Lsj0Xj_D*!P2~?yOqBIw=Rdh&YA+8YpSlIOOY% z)dA5!R*|u_#T!TT6mt8e=8Zku0PvI~FYK+p*C_G>kfvHSDV);t;KJWJ_(a`G!CM^E z{ZXaltdx#!H$ZAZ)iTD<#`Lj5oYiG%x36Z_jAf~O$!0*53JuGW`sUCm6RMz?B;Qy! z-F7{Oi;@6PI)P8OR0(DTaw=1-BcgHHvy?P#l|B0v*k7>0Lr_u2jrKGsRw?aIKA;$O9bTO<6^AnJhbtytA| z^B|xB%9`f|4V>MRP*_M@G-a8;F+;6+AVMc^ zFlLSIApmE=`N$-FCg#WxpN2PZ_T|^fsjq0uy!amt*Y%P{-J{3g3z8JxxrQS{pGYT3 zsW`>5hxzF@?>Cc?h0zq-KxC0kxj#}rB9GZD;sC})FG|t5DDoLqA*G>5%kB89&jCHq z&Kru=)HyC2T<(X
  • PB6G#|1I?KDX;#{Cm_t8)ih2+hoK*ori-F9u6#REH zv1CO$^MqI{oeQgq?OsvOvU(?p-fcqScGY`sS&L0ooQ5SnJUY299A0`mzp2AzFR%y} zO|yOv?Oi)3#dc8uxno8pVoxCmnTRGA^)-KutJaNPFXIOlZ|j=2YY3|JG5w>Y9Cv8d zt&VkTe%bJi;wq%G8Awl8kafjJ^teZU7o~mpo(yu&P3q#1>~jC-1wp?DmO*Kp)r|wK z6!S*%6|Mcy`9`yN$P;KpbjFtt<}YFGvbbftw#0p9Uq}8BIb1i`3*ib4PV1ApM!E)` zH|?`=u!3R*P&@`W{+Fx-$bplp_!(3y6pdg+=niPb7?TjowPFdv7zbV=VrmqAvH4cZ zq>O_=SxuGmgujeR5mh{jX3$n6!v)xTU=qE?eRuAV$|dKhUfM>1Ev6O9m;U)W9ayDG zStoZ#`6zu$*~tD?tlH{zYEG)mIKo2+NLirzg)Cv#Mbe8Yr;Z5{HLC||!V`xf- z0uKxpP3}{Bn6&mBJ%qH85^$F9FIMP8de@g;xN zsTYmOlAj~&nRRqJUqeLJHhMghQv#J|o|W8eh7}YlIDZPP0$JkYb}rU@L|}NOy_Ih$ zxLCVidxh|B46$>K?m_JvVM><0*|)Nw82IFZk^7P-lC+{p-bCfu?>FVw)nF>Q=f@IK z#0y|>8$DO^BmraV!)xfH^J)HRNG`-E_IDWf1|!9x6dCXAfS~f@+}PKhphpa&JEO_< zdBB@n)HeXT;Z4Idl?wrgHPx{N&)ywJ%USTS9F2F|vB9t@1q@RXb06nqOy%2Qd`F+2{g-M>jzCFnBW$0u*fFktw$Z zs>YiABLQ~Ki)oUaz5wk{a6k@++xfvIx`+>xlG02htn{RF+T}MmrvyE-ryTB@p382=s( zlr{Fx0bo%B#$f4}Rw_XfbMb-NU0e%>CbC?my{7Z^6MdPPVN%~~IPU|1;YghdPH`;G zWKx^xx?X)_)i6G5exXb`JLi>417B{~?m3f}kP@9%QA|v3&bhz~2cJ-9^r<)o8PX+g zthCm=ra#$*dpr*VLh{G*ghK35FNq&1 zk_95zT4V5$5Foiy?%W=)Ms)4MqmXMU_UXhdcipP*gRW^TW{j7)M89^k^Dnfd*AouK zfOax6SR8~`>5|ZqJst=o9-0{2QDb_`Ik&-*PSDsisUe@u#^qKDP%jo7_`-MCfrd0# zEh}w)a6=!Vfv~Ze4wtz*&qXqLTJj$E{+=;c0=p0vlb@eIy{b0$VC|6ERntosN1Z=U zcgw#Trs=yuiJrU%|4kNb2sl7}cbVwdyI36x?1ptZ!0B{1SMEJTxXJj)dP8wft=8Pb z%!YPYWlt>uvK4Owx1IdizD9BG2SjMfd}$St%fwH9q(=>{fOszF)Z3;ECXsao zDmmfSdqDY>y8%}9)Qp6YosrM&R7x$MX>6(NM(@P?@s9thc=^LJthVM`m!y${soQ<_ zTp|#fvw=5u%?b<&8G&z%jWkp$=cRAzqM&C#w5Z8skX^57$ca*D%;U^phw^gDxTV+2U)gGwn2nNm`BUpBrzA3PI=Tldai< zZJrHrYz?@Al-k7slP0JUHeu{WL>U=~KvLq&q=5q9`5}?WSJA;BOPtKJaBS3=Vd)zVdUtLU)Qhp?%&vSHzq~P z8fqjhqUg?Z5M>u>3>%au;_NRWo_%m=rmqC15RFD3iWT>G%U9YN%eh7sm$TQd-1L9Pf z7WnE`cipwRIRul{s|WlQ8$J*SrDU-87(=lW&fVJH1VOY>KUB!4ziw@gQm5ODI$ZSM z*0zIY?EzwEs!pactDlSkAf9G)y@WQ~;>0~eS^*PdXL<9~MdG5oL$F>x*^1lZV$AhC zo5|KTWEu?}~cEh*)=i#(a1M`5+Nurj8&^k;e^T^@q`?6jb_+=Bx4 z2Q4kuk`lnXbS7#-5Dk7*Xt*}R@5)x0R#+|42N5#|qhTG^nKO~HRCBerh=Y~sD^)Xb zV!Z+esIKK6rSOUbiznF7ajuzTih!2EYHjR7>r7Ycu*$)7oDOH|K(A)IY(B^SUj47X zCEe>hTLc}Q>VYM>1^vF`7`3_v!M`f2L>#^T!Su)_YY=t5;fz<^?U_O^W|+WG#p0y& ztuS^=x3Hnm3RKGuv!le$YB6-jano=M>MusSWd#0w z-ZD<8v^T8Cjp$CtnZ| zi*k_UJ^z9y1h5_U3;5ycaQzK3Cr*lCdtb@PoOQxR;~LV>5x5w#O$4S2|6<#jiJGJ=cKBZ(9!tbXR{0N{3CDw3__|!(2 z(F;8m8?0F2UauS7%fy&lRzQDZjB{H|}5+Jqb{3JZz3O^!$< z2qQ*KTX1>*f6bZ65&F846FpK3vpU}Bk|j8BgljTK* z7!FqFwlY`V(>NCy8snqf^iH^Q^_Ycb(m2PjOhF)%qN<<(^1|nl+-D!9d*W^UUyAry zhVO(bT3V(67_`Pwz`H^uj62{{2dFh4j2%qa(HwGoW*?*{42!A5Yc#?~GT#P&7DS<4C_@g)+NL%&Jktdrf+?w?V&i)GUA2d|<5Ba3LTGdTQ3N!ZN~2R|#y>0IY^EH2SP zr!am%P_qcXyzZ-y-vrg_*1;Y@fgp;0y;oM~`z!aa1g(gY)ZL?40;VqgNP*FOPgLX|1w z^Y`1q1is7r%Su@SrHw(Dkw2^fk2Riw^IE(6{x8K8D+ z^xR*kYhGZtXnMOvJh^FD!YY`@ssPnZ2gj$rVMa+#6kpU;CEWI#J}9h@QP_SOTNdOU zFWwBX(~qYK-~eKxRL;!dZ`O>qeZ*ZyErf?u@#JJ*nhDC`Ug{dAoc4fSf5NS+f*f9x zX_l2oRFP>L&{AZO6k$zL4qx3`)D5Q(nD1N87i5W!;o!bgeQLd(K!v&WeBBfU>tELD zs#1Sx-d!-tVq-Tqsu-r+JVFf6$EJMudN;c$@qn)^&3ZJMR}? z8n#fr36X1JDpcnK(%lnHIq(*m^0Ym^KP+4=a8;M4ap-c zQnvcjwE4OW+A&Ons6xn_PFfny!aaGT9@#Q@FTy94rLSOw)F4nfDa)3QAw7@YQh7r0j zmlRF0ZOoeSjo!J^8M&|l);fRy!h!!fexjff)|aAg-Ht4C6nV6V|1yOFh;Py#%;RWZ z-~ba;fVe>2l3j1HLOKoz-8&ZNpx7|y$rRx?qE@9wov`UA6h>E3U-q4x-8kt38}soY z0K30tq+HL|C|hwwG~NAx9k0y_1ZOE=g9?OiM?L=acP2K&cv?Wos{mGtZLAiGs2U41 zrr~SwuYGz{u171(#x#F}aA;974ADmbbh48+{O6$azz%iv!q^i>#+3w8xR{Jg6|wK9 z8tUuHEzp(H?7E^WRE6^vw$bjjAb%KWI(hQq@?WrlO4HsY$tAH}go3fxd?0}mZyXJa_ufK=n*NfuBrW2IkG;mXdF3e(@M0Gp@hv{s_|Ava?JmGz>Kxm zM6b9)%d``N9rh8#9BI$?=G~8G3>(XtTFV9Web*vLGm@*dT4hiEP2D8IJJ>$ZARo>( z=!~vv5HR8EUIqS3a!M}X00Q?zxQ1~_)+e&TfCYYl=IJm4|MnC4h9X^~TGtE>5*STH zUERMJw3!ugrHizgLg>B^d_-pTiEAt93V)Wz*xEBL_Tk2gJej|dGOoj3e89xX_$-)( zCt&L#`6P}k4=F)b$n!-!mrDz;(GKJZSD z8ZvWKpIsN}zu@(pKCbAG)9aY&)Bh`XRt7Le1Ix=O7PuK=9!x0000HV_Ho) zP0BWQS=$io3T@khR`7gekFRRGGwRPXOJf6#BN9HBDc|6BnT;4l?%#W`s`VboYL%o% z|2=kC$OevV^JBnLH!eFlUO+af(&Me;qdoRSqVxxfX+Fwvai5!4ljq`rVqYmuuY8PdWS<+vh@OPhp#fbid<&a>np-+`)k1H z>0Avl*1!M|3;(7rx&khph1BOW+e$CAgfN0&4kq9sf;*$b14HH}Gx)Un()KBYL&D_7 zXG|{|4Ga`*QPFeg+cS+yr}U2R8aLPZ-+@~?sWhNDa^-QwgC`Kge@>L692iThd`Okj z9DUAT3WV@FMHe zltJ!`njUNfnvN1uo#q40*K?Wc72d!3ULS-%4Md@UeJN#J*udIH&n%zouQv1C+NxWR zbjqac(36h#ZfT{CUJMLrcdmezXJ#L&*X$&^{rz-QBA_>%8;r7yQOt+Fxk`;^E z$^Y(nUu47Eq?bj~cIj=d&H^{*b@8p~w3h#As<|%~N9?)FlsAl2uBMyzqq=Z`Y?KVc zc3Yexie8i2SO+$)`GPQ^#Qdxn3L^LRP5P(9|GM*5%_5MI7y8cET}-`FpNHDOe;DKh z8`C7Ci)^L3y`8FI?$!2QXZah6Q9|wZ{n`Mu4f(oU`3HBLHmm9aQl{28q5}M-BQ(LR}SC0`2F8 zsmuTi;?x=czNNR7Y6MB4%?{ze|2-KN}rJr~C~7Qj!82!_v+_Ow=G002#^sBMa+y3Mh78>EoL z%W+zOPx!&IpNv+1HS+ig-s~AXB4(H_M-Nu-flF)6*H@{BNO)bnON+705N)~g`wpmN z68zIXecmDwlmgifD+-cm1TE>z9q8^*fiTBfnTg3(B1ddHcQMmheeJT|nYhr*SieNm zD_@|Z0Rebt3USRU$NsHXK}>6NiNI@=6?j9Agbd!C9E(-}@L%O`iHqG5`_WqT0EO2o zNtI4zfsYSG)^ZRf8*P*ZC$~Q5f~jq!ROg)(zdzloTX&EEK$f=etWJB;d}$y`o$rw} zg7mv4n80F*4`~`PFm3iCu)3|2DyiHDf*MtB z&Z75`ul%EvWc52p`};k~*~Yi~&gc@MNa&JGQ^9E3UmrlXr*320>Hp(yZb&nrE5lK+ zPi_YT-rDjBf0n5UIWhEl$Al;JOy#n_5pJDSij=fP)R!&MvYD2?)9EMsI)At5{3v+I zPN4WyTQw!?{+{bF&BAKGY3pPF00C#?s+*!NXYXFhyAXuSxkFL%V>33DQ*0XCi9hS- zuXA&lOl>g!!8++<%?uHe^}s@)D!QVO+LbM4D+)5bR%G8nIB>biz1eF@&u|Sa_r{tC z3I+J0G*w&E87>Y#qvy^Sj%O0v+ z?mvJ97FF&z#wIxgtnQ-c7Zi*4@w)lJX3EEh4>?z=X%=G>IRL=qYqrW!m z5P_czlCRDigCfA(AR|-$E4q6w<+D?Hybz5|aDL?uI}ic*tezbFULYrqw$^LQiLS{t zsre?ok~t6p(=blUND(N7KvwW%ezCNeHHF1}k1?2{kL|Im8sb&wUFj-51$k=}uIyRT z%h;dmZ(q{L#W^Jc2u{aq0MYqlUv@FYiZ(fC4#+n=<}J^9@~G5JrIne-j&<{vGk9aw zprM)Y)o&<#u3A^b>L9EAT`N!}l9)bn87IJ0H~Cd{aE|Cy&AnU&vURG=7W=%GfPn1jNd@n6?&EC?4=xgn!cIXx-B=OBsVi zW=iU(oufyubK__Tl%6*?YWUz0dCWf6VJ69X+wMMs&AUr;gd(7(|rr@~* zGHrfNxu%)>Yf%GaP%3$W0g37KoK0DH_}_t~!2~&7NlOiw zxf=|FMV-sc2=_7d7(i@Pu?M&%)^k&#BRk+f_ui}k#(39znx^omLP4h6kw$@^!&Sd_2yl zjen1of%42D4Bz|}GLdo*dp!V?n11!yF!;y@L$J~7_nJmd4t-XLvBj6HN zd@NGI-Q=nrNS3;pqnv0)9q5yPW?fKm5V-d1L}(9gw3iwmcz%`<^ppHy?MIFLKb+jU ze8?*VM0!IwEX$;ociboc5K>NVoEWvO)~7bD{yCddmgc`Y0gEKpUcoi!is}wn2PE28 z>dH8asFnuRCwMvRD2;DX)TuF|QO{@Fo3(3X{u8=5|BFk8*N!i1GXoR!ufH5NTGLg_ zduKT15M~Kp!Lf6D+P6Vx$ z^nTDI-w@s~g2&NOy}>7YHB-Y7#s3vYQa-UzIPFC10!MM*CTZM3WJ>`rL;t8?2yBg1 zr5Z_0o42XT%@Up~KFR(2hmAV&e>GDHA(iF4<+`M?FBeQi$Y~uu zK$Ge}&>g@Y(E=jLy66;A&E=3*4JX}8Sg11@KUsF7Vj?vgvf+Dx+hV z_LIZhoMlbsXpd)w8Q1kh0K0d!lA+Nh^BK*7FQPwZ8sjq8)iyx>_>$}__Igtd&cC_A zeJEdzoKgGA2<#1YG=?={Ybz`KPpGB)*GKlKZtUoc+!s3*Q+sMP7u?M_tPCv+(v`O0&LU?j3qjtZ}O~NA-yo0A1uF?ov|2I!PjN zJl~T4Wtvbhq!1^eK6$mZW~fzvZd)-0lIRRD>?O&rV9Xo;NOdaYMI7Cm@8V7a70q*F zh&JYsfO-v7BKjVQRrgD*1Yncn#Anowvwq*iQ7Oxcj;Mat&UorIwQrN)PK?&NcHk<4Jo`QzdvOV)_5pK*XXK4TKFsM zeM1!AjK__j^=(zXvHwY#QNB-H*EPAoaX5Nzj;0-&3XZChJV(ZDD`FShc zh@)XkI_Ez4kwCQP!tu)53XI~s%xUUiDE7{S)q*OSL`OyhVg@I}Ef^8Gq_{wW_NT45 zAW1O^T2)6n@a{3Tn#r&J$K1K&{&LX04V*rxrw|R3N_*uA|I0)Vbe@8XW#Vu`L}KMC z;8vmV^PVOM1yW{hwoyI9qQWIRWTUH31tRxOFhw#kJH(>+(FgML{ZoQ`H#ROGpy2br z(2;~PE*qmVvsK;G<%`F>9I;0xSf_Dgff2mq$Tf(=PDxe3(H{G=4(cCW2(p?HdIoN+ zaMK4#$uU<$j1POf(F37-@g4_y4+|aHF~}hnEBD=e2ADdn#)3yj5Ehzc3GTG;qwKh+ zblL>H%cKyk^n_kFIIy}o6xN)Hf2L-?`lT3qC>h9S*7dy!{q{-f<(wOa7(&jg~_r5h#OkqRsD# zv<$L-7Y1`nrZ2ScyX-`j!?&C5cyg=<%d^R$jPVv%LBIl)t_c;t>a zF!sGA{xAX@u|gw18oGJEkfzESC4}vDf99Q=a|u4#F+T)@pecx_&Y@khrnM31s0T2S zRe29_aogW5Y}dxz_?Z;BcreO}+J{GYP#??{5%XFVzC|GU5ngI$&RQXdL&lZ~ox;X>>u^`DI9i9M7p7VQ5 z-0Sa#^ust-Zbw<2*)>gRzzJuVSTIS|A7Mvh=yAneM&Z6-0*RNMSr3f(xbFB!hfrR!n_WoQ8xTEPz3Y0|&*dq}q(vT%{Ch`&sxRzk7RrfYi*v-` zQ`eq;2#8%5F}^6O>%in9^95&sI&~q0+*E%(*l8&|LZR|kUsGC)wT`4_=)Abd8?&1G z&kX~&`UH;EwIR1{lcLvJTTkIM_cvLLs{TH1HVjkbZ832ONPH@A!ymmap_7R; zJU=O{_A@rOo&GeaN%GTwe&0Uw+yU|>_mPTegbK&MVS#|Opa)GZ6Zy>mTFuFI#8NG(I)f-W2GJqx! zEOf{3yrpSGyQvOhIXfO(1{f+R^_*-Cs$iruOJcJB>dXM6y#6|^PgJCNtvE!&o=a4- zzv62aL9f}Ph8NeQ`*ZoLOmq-!@a{z!Y#ix_TmjEq&X0UGQZ29eu}zDAcwx(Z!2eJ! zsK9UdS){jUr8#`SgGVUwQ@g(A7)Jbj2>CTDSw3gLz4nf(=v{PknTN(9SQu35-q3+X zuSxhexmxCEkOEUMJgDG-EJAv$K+8Zu{f=_F>ers6R0N*yVRrReW!<;k#5||d%U9pI zs+q*Y^9Q#ho5R8=0Bg;u`|wU9kbP|ve{rrzRgyXECerJCPm08o3K-cBH92dcFA5^A ze{~1fms9peL5QXe+bwH$-OaRZEv=K+q0n2ez=Laom-c$w<_H(H-zS4`d@g8`0!BaD zByBeBq2l}n%xhxY?l^*|hAChkl5FsRzQ>@+`oh+CraJ7mPIKySRxBfb$YWJeg3WS za|@nh$&$@&kPtm%6LPU-VEq)T7$3m;x3u;_BO6bnnfRPQ5u+-XYTc9Dv&OrOvWhzY zZ>4NBO8-V{jaBeWvWGRl<0pd+8kw~)V82(?P#&e`b)iUks){%vjNk3_kn0nOO9x?J zU}N6F?({pk=l+9fZABN+#ahoc5+Jb9a$RJOMFr|D#s$%A!o8Vzb+Mf7iU%^?-tR|#Eh5IW)yC4EbV!I4e`0T^uD z!(FVJu;AF7^-tVqWE4#yMEv_Djz~eOx@Y5N(X(~Sb@QL$XouiS$F1ih6J5Tvz3)zZp3XQ~B-paZ4i|>Gu6yrV?}cvo6{DP*5sZ z+s^_S^}TPhMSB{RXFR}!$K|IgX(YIG`f6kFUFFQSsxVBqif~#*OL3(w7HwNb$*yeB z#=)6(qqO6bWl`&uTib)1fYLeINEcf5(^Qs4TMW;uen#tRy{!SVxOSeALDkqJqBmps zJ2Cu9#8Rr#h!V+jktD3<^xRjNV-pHb#cX%6$H0(GJ(?=53hB|m!2I^;$U4FcQpByZ z$uo?{GY3uGD`7|JRP(pgwR4}J1+R#Q>KcO^9`8!3KZdl#BT{Sxc9Fudd!=m?L4@g6a}F0q zelCXws`^HIrTeM}pWDH4m3CoHR>_{nq+8JM6Wh>%Bm)rI;WJS0<|g%mAtSneC{`;( zlNhkO(EpDI_>fn!BHP3Sso(>`4mLLO--B;<3y%TG={5b*RG8?0ubFm&j zQaw%t=q3Z;>QzkQEQ`3w0Ah9?%!`j;kmy@=S)sY%H*=Snnn0@K4FP0O!JAqLBH}fz z=FU+~&AJP=!E`wu;_yRX+OjE2LL5&!;`VZ?Dhd_Tcyvs^3?J=zMNJcvh?O&?X88`b z?LiDp4D<7{ZJOhHU;QMi|WYz4HcL$+(y|e7Lxb2X!*wN zdkMuYQ9@V`UH2VP##i&bwhX5Wz&4d=m&ZyE5{QCy z2yu>{%n+{!N2PklaF2#H z|AvNGw=L@g!=eI~t14RjW<$Xsv;?pk(qYOpppUg!njD&6+sPt80_$<%w-Rhy?#;`R zn2pbNzbGi1alue9+l=>_>bqS~Vt6x#A|AY^`Gd_}CK^Gk8^#{*NDRc4;{803WiyS# zHUPlJCmj!^GA7N860KItR>j(JI=o_KYgmH0?tT6NVNw6 zdI`JKMpP`&yFIu$qkQ0=b2_5nqoN1w)E;z0w4iBg_SGriA_HaG?Q1A^l}%SsOAKjD zQK4SLS47*?TB|LTSE`Dk*)yNHs1p{D#8(!7v6WW>^m2PIB^59a3*=Q^gi{~MWEP76 zU_hV0Adxs6S~JRd@RS92m%P$%Rc&RRf5}v0ZN&3|FB$0AjGRRi8e4fR0Am(@lC<)t zz4uE_)f2O(coSqov}4Qemf=R`qJsG8Mv?4XEgP1>BzLeW0QX^X7vt9qGCy$cZt+LT za)$-{!kESyGR&6Gm6xO}>?|&LauM$ZM6xLkG-a3ezs4EbYHRj3aOZqFeln>S8&S&e zC31X*=k`~15Nts=iDc#qnT2ui4-z^+)-R~PM&u{~#ODWSO*K`572UKbH&)S4N~$oK z=BdQ(pAwL-J{<*$>F+f%`fG8_16#ML!wRVkVq(_Q+tmovX>*xtrdqt@qvDJ;yYetf z+osvy6B+6q#93zTAH?-F2%j-3Yp>3t6yhlY>l;-^y86<@>14J+*SGnkP*Ur?jIh7!-&Q4Zs%Bn;BhbyPEd;0QDD|A!vXhc!0%w4cUQvpNr#~}P9T3tE8uaYu$JoitHjfCI|B;#YS!|nxu9$8oVqSaKt(0T zA!;_LeU~XL*tI?y5JUzxva@%C9sxdi6w20i^FYBZy4+5bkpm0(NJuzN!$FKC7e~#q z9cOMX9VeJ&;HS6^H?Mc<3>eQEPPOO2Mvm1;+(DFa{L?$*E}|%1p9r%YOap65?GVOZWLLO z{`Z*Ck7RwDj8*0uK!!^Fod(d(2M(|FDOMhvp#H8U${i%1QNdWh3W;PDLED$Q$9lZn zGQe{Z>3a!lwx{R9`#L=3CFWiqvyYm= zFOWH-g_E?Y@jd?QxLD8}sVL>%T?DhLH}$K}&2@>BkJ}vRyV!soCXIt5Fk*c_0r8ru zoBnjau-v%9E$lol0_O;8>oOMaOM;Z1QS-)po6l4=?x-(-b0p_rNyu`DDXWo@1|E|r$9URAh= zvoW>QX$;udPDA1*T+b)?@)3a4^~7V~C)L5#uEI^0gXONA8AjaOK6dBM0EL*AMru`6 z%`9SP*G$x&H*Dp)`AH-Q+VoZYXEY9JyCN7&v<1`5sIjvZl(Pcx*&^g@%s?3vRv{3c zI|uW50HxzKu&ZPy6ZrqFk8D6+%yb9zbti<8K&y zYb#%t3n0_{63CIGuWdsolL2$r<#K^I=w-+s?crHd!dB^S|M@H54n3nJc9}bx_h45s zyQ~HMR`=aZ7Xc*Za9>UxnJp8*KY?4H%hD!3ONm7}{c;HLVDzXEVMqe7kYd0y+pr9i z%Va$8`>r>UAQE#Wxd1SE-8m2cM(o1x;g@a-KK}7Z_c&RrAlmj+6Mvc%&fzTo11y)hFE$;3@+% z5}Tl#f#;71{(vdQX8%SAcu`J3JCdA8;WXWwe>_uWZ6;z;f|)Q7liJ;{8^Y!F*RZtR1w?njkf@HTBwf`i7apbxXMxzEI14wOB!Bxh6@>wB=K)rthn{cSE^iRVh|JH29pO~DCC%*UZEYrrL(UahFPD` zKAi2P&QJ;-F$+5nYM+??j5~9!Agq7x9lSL}@Te1E2io@#w~~>fdIdy-0tG4JOhG?8 ztC!8XZUL!W(CEY!&}iX8jR?iZY8*YogejiVGSe`;!;>i`4ToJx7NLnY^}ICQO+Kjm zeaqW@t(57^GAI=h&sK(*)h$uTZ^A58Df{=OBu&TJG0=;GIYYs8PU*HVbu+Xo4%gn# zM$+tlM`lHMx|ioy7jmLs;0>2P2X1S4JUVY|dYNXK*(OYhqi*Ex%kG;4M%A%9|6T$L9&{4AcjlcGPf4p{+LjUIWK+PZc{@EqNU)8+2%G5@n7_s^(_gr7i>QkO6EFfX* zZj7CCr5^pYQ`~_Fd#Wxgm$$C%EIfDCRz_IMl&gVvpEmrV828TyrZ!)W!6G3mTqA}v zPfEPWGNV(h#UA}LtbkxIW_RmeNgh%Sgvsc)uqomQ!FK6XPQw$?iy*bP`1a=Cq)W!A z0Ondx4zw>_^pqI4xTwrzivb$bm;w9QTPF7QkFG(@Axyfsd0H>ZeKQ8KDE;GKYSTtr zvHU~Bd6Dtd`)K=6oGdX+T+_|m(V<2mWdb znRi}CEZAdgQParunCKp|({7vs%UbMw5XnkiRG@y+A z`xb#{Df`r=9i#F#+S#yGRShT9Qlgxsq6u4hk&T)`m?&b`=;0UItjPE*4o|H5DA0i! z88`*X7i8f?7XdjI1Zbn8tme0k-H6K;L@dQ_+9s8}U~IKbot?*vq~fES+9*N_YPA2M zJ47v6?|3i}`|Hp&ENSQ6#4&|eNh7sx&*H$0^Rass*zmRTUVU0aa4rhdE6=wY$y8nN z`i@$B@K|HzC_9DwYZ}l`1>!FR2*H>vjwT9z;~S=?!312nZ572nuy~dQnW;pf8=Ql= zx|h4EH0zx>t=Ku>gyp@AW7rqn3Ag2BMVERSw(I$TJ`0Nf}kI)AC z{)o`4z`9yQp#};s?X&YI2@N|tQ@yYm{5S9|8cfp&pu4mb_hVNYZyQON(gQ^cLM=A; z&7JVtN*f1gbZGm-y&5wvY6^by?-Yd7lN=Y{RWO9;yBT^IP7b5a+Q}jbiX~8fTvLM4 zU*(@{ClZuistquYHM7-&>eqxjAySDR-}V}V^9u`R8$hykxqa?YL_rd_#2!e~RaU0} zTho>|<5`tm&cm7gtB8ax;-&Z@5Z6?CsC~x#%_z!Y40xj^F21_piEH)q00r2x1+@BxP?m!8vf$aDoQ+jR zba6i8Z)rwvtp*N#)HAcgF+PN^l+gNks3!n2TymtYQR0A#|lyL_^(&u(Ue) z%q(bo0dde$c$N6N&>1PKXkX?#a_v9_z@lW&m7OzAx9mkq=zS2@fSZs!r{awNbu_L6 z-*6n{ug0MtK7SPRX@9N&zmEJl`7hsZ+wXX5vs}f27UHW;yj)K6@PAFG00jT|c6Ldh z(}xKy8=sEL?2m{GTOmI>vrk?JUh+~T*QxYDFKKj7FgWQ+e1xp3Cx9hVdZ3r2noHB- zlQ1&--9?q^;|`yO@eadI^dEsMlg&MKB6D<<0BecxqZ*PysJuOXPf&!A-`-W;-v?>u zJNs;-euVfVr&u};ePGp`U!ej?^fQOpng%*x;7@@#@)nBjLF=1;ri^EmgqAZ%+*^F- zQ!S|XAI9RWF^t}x>F1{?q$J!R?|i;I23Vi#a124toV%Sd zYF&PH_9>U2F&Yk<4XSyuLUjvOB~Gn_q}J)#+R)L@vH?G=f2-2xd!O^c)$(fh|M#8I zqj@MM{&nd&v?Sp?wyz`Z_LTO#>&++ABQnk9R2GaHPhM3NPV zNlDBbm@xXR)vbo;ElA4BpbN-nC)iss_o~1+3MT7h5MdQ60l+*vLcvk(uqbQY^%!F3 zC*?L=eM^cnCEHe0+M}k|KqP*`DKLP#RG}x!PvMNz3I}sLjlvwlxcR&2GsrKt1Ua=8 zj+}=a0fk{b48vuu!(iy(~V9=ke&!EpJNDJs^pFsq!n1hRwjMe%Ve7N&J*$ zz}a9vA+RcJp<1m9JHHGCjxc)cVNi+hw#E=t9|_LE&N7 zytf{z%ExSR5H^rWvz*UgvAV5i7fEdS!yt*S{RqH+`Vl(jU0dY(FFm+5ygUs z*&vIMa2Q3B<*nh&c7wnqUpn>XpmW=t&K&TAzcJ2XT02L%-8(*axL1mZ^1s+^y4v>X z?{7$k6Wk^kB{y>Vn>L9M5nAhora+{U&L$7!scXK20O1VWRW&Q3BFfNM8l_cm?;wK? zszL%*e#)?VkQvjK>;myfdbkCBVr$GH42>hU2I{PG9A+XR*Z+okF~MwrtP%ksGo*g@z^-t0;1GI|wHdsjeIyS^&j}ov z)nz9l)y{DDJ-FN`q&Ore+C$xAB?OYG`}vI3(WJ*0D!70MwV^i37!?qYx^aF68eLB& z0LZ|31Ak7AN^W!GBP=f8^a%m574gs`pbxH&6D3MCkln*aoy{(b0#N6TZvzQHfpnbt zprP12SS{hqrZ`y!J=`ytAPLi+H(wWImTnxM@X%-N;jWfl(?MY{RXDD?<;J0pXu-nt z+12v`GpOXh%RU{ann3bLYzIOrh0>MRNPQCps!%aj7j4F)K46f^@%9LQ5aSQz;9E+Fiio@5#+aKI zQ3-qi`(y0dNTbIs3RJL?#+y@iy33oS2Fc0l_NmEtJ}B4JV9aLooDb(1w)G_fc{EK; z#FL1X=TCV$^p4Lb&D!`%qIcFI3*IzC^POMSFO#zXTGvHc8?3Lv6$3M}H6 zq#mpnYt#$5rr+G!D6QK-S)sU%nu&-oy5FZ*WKCWO@1=UjIy1yD@|K^%$NBOlTO8*L zbqu$}GdU=Vu9#9*+t-`z3gggX+f)MeN&c3vG~CP6_UNY*{I0R8y#}=HvQS;>yS&G& zqafJ|OFRSE_&Q_d_*+jU6ToUZvR0Brlbw(*f<3TTJgtK-g7xgG+Z`h9BwRKa8l3`q z5*?t0%K1d)ld0&ARH$|lhBf0Jqp{!u9;yUnfna$YzQuZcZzsPx?q!<~x0F|ffOuWs z%LrnUttq$UM~>)qy?A&TO6BXR4*(r_rA$`P=a${{8JZfdkQA#cXwyi&XR3rNzD%**w` zV#QxU9G$mo*(A(`(u2ek*a{WnH=zge_heb#@~^u!>l!>nnNr=-OwEt{f2@1(LwzvkL!4Dk=3{gwM@T%#`W@9y+RmiMYB}h(` z#~aIyHnVgL_dTqRS;&kJpn1vuq4cmSptTreavB=JxtryYqAJbW?%1dj$wmvo4Fk7B z_aiNZjIN{i05~oKAm<+ZiPXjzml{VA)Y?t)4CLjR&utd#9DN1&MVoUMNN3pX`&waC zh3M<9@rIQ^E@+a!WIMH z03dD%6_c+q6+7XXJxW@(*wg%d?Md(e{;%@O&=;iw#)9opvdCR)l_`D>OY)x5fT{ow zsx&yB+46e$&f2y1f3@X2|CTy_@QcGznuLeLT(8(AG|{ZX$Q~1-n_tMlP|2Pr{CpD? zle}O{X?nZK^u`M^SS^If8M{Gk!_rgHV+VcS{=SuL%mTuvB|_h^dK3U|PTC>qd8oKt zaF0&zHB?y*P|PNZ-Q1Ob*`pSq-b59yB#WC=j07VxL4DhIpD7HZ1@gZmzgWwYhR181NiGs%Z zd{!R^92iEDo+#&XRZfZn$QaMpj%+h)AUN_>NH;eL&0^DpnA-E|GW%V!luvTH6Yk~H zEnLSH%PmH#)d0tX>`YS8fzec+U2G5=qa0pCTEQs+CouwuQM`th3PDT8EMrDIg*Sdn z?V?<;+YiLKl31(b_*9et9zle`n6<=tNam>0@+UOkl%5HyW1wR2zw#<_@jo(B+Kmpo z>yS;Fy>E+_2H*zPPzI8|ky++_P|R7VoHuDz=zjcq{$wbuBql9Zln+AKMh@KT5_*2I zBL<&efm@cSw+&TzyN`aC{lHi3Ue~wftBB31=iXgkb`0pE(`$HPPCB*pXz00zos?pu18D!n7EI6FT3t%@D9s z+QvIz5={Cz)wAZz#&h%iS|GNRFexMjw{YwiEvcMi!W;g*Th&daB1^#fIUsY-=Ofb; z{qE@sl5W=-&nh3b#eeYR%MiYQyDP}u;(#3P;O$?gU2y+8tq-$0>{x~`W>@=u*l_!L zJF^pLf@t}(lXKi;^r0RIk`0&TXHnt;DVLs*!vyAL_WR4;CrS)~<2Dr&!9(7?TCU;n zD?Zw4$T%*Th0Fq~jM~12C^Z0pc#F1DwQboTYrtY%>LK(gPA#8%u?A4J+gRnih}6nE zxDkMS+_N&B9<@DBPvLmbA2$)dcQPeFrJp+&x4`Tz{RJ5nb@UzlQUz7Bb=zm}!y~$K zgt@9eGWt;dHs|2kz(48!`)PY8;$c8G*NloZz|R6$96#x`f{K&#WRW+ZS}T(0Dux`s zFS8j!*321bFTcD@AM{LyC86KR?7`}>`jd-_Lzp`BFN1?7pjhDqF%L!m9O!gxW-m7G zAeL#KMa(8{Ds-y?PQI&}Jeu;g0#iv1=A>lD9SPFX#6<;=GJYB?RL^Zg;C=D8w5wOr zWQO`Qj2V(mw=OIey++L)2^aN%{iebo6$S<%M0+%%ID9(eP;~~Pv-l-Eaj+sx0R$Uz zN$6**wZn?&St({^d~CP+bp4u#kA!#b%-w5As8 zbc-9nmSU+CBf(Z{l6R6&{vXEhhF3T)e!vMZ+;D3BZ5f0wH zX0lXtLmS>ZLbtCNSSN`@bcdvNx$cK@6sju4tx;+tvH3JS5B?R@*`#M8cKaI_ivHFM zut@;eSCY`2QMamok~*en7T`4#o9`@7IUE{m_zvqXVbKu@c}c6-uOUgh-De^-8hQKL z(x6}`Bn~rE_t9f&w0O8A6s32gORRKFW{~-6*(=713E+bUImd=`4Uf8$;F1Ru>hl2p z$i&U0W>ESDROK~X^X!=XqDxU#JNt0}!*z+u%~np80b%=?euC{dE0(~Jb;}WgXTkgr zB13sTl*TQ+%fZ^lO3B15yB#6*LM3?%(60+}!5w}J^G$ft(IG^ZQ`viwd0DUDQi*sJ z;)}_j3u1A6O4t!@aF`u+!+iR}b=09`cnR2ym3uEKhW?}g7p4Kwk-RQI3BKB!X8_qkXflqbuZ%c@yc zqa>^|q2x~vh7%{>8O;GdXfyMmQEe*@TnSrdv@2-nGfa5;W0xKRC?8D7e31r^UH?cZ zxhjv@SbKGWW=!m`S15~2S(QL=ojl;x_ zBTEyY;Yu+%KhLDL66bUc650gZKCEOT46gIU*#RjHo60Ut7jUH*n6R`MGzKz&-~r3m zSDdg?Kj=83&7XW`$6m(DKm`~MKt(!SAfXE2=vZv^BYAZl@FWwv4h3oEO-~LvSy5;5Zd*@A5ssLD~B#5A30G;?1m|r()Vq8Y(Oq z{YPzSAg*G|3y)cim3Vb-LBb2AY{5{HYbJixch{L>&j?>vv;sQqP&;Ah{otIf*PTFb z#q&cAbZIyioW^FEac2;wNvDFDL8V7P* z7dBr1It=$@=IK*Rm#OLb;EW`l{_dguIFXI>*@MQ^W-&7c$rQbLC8vw%lCQhwfiWv_ zqk@4fE-~oo;@4K+XA+Z0q7?Q^CCvv>+&E|Pfs_tqD3#g_OB=aG{(|1{ullYX!iYk$ z;Ll|z41^7mI>$Z+j-H0mK$+m_1g`nvUWOf!_;D_rga3+8IMMWJaG~=_B)l&fuPA&; zv>N6}T{>{soMzx=>~c^mr)W8g%G{^Jsa?UqcLck>?*9(6o(y`62b;OL+#-INVM`tP zX2wvPPGe9Hq5ybFr7+(xp_H%@z!}Mw7(mQ6enHe-$J8@8cRNpIH`bP@BndoF*>08qMLS(J(<9?9=^?)`L)Oy@@+BcS(b)mjh;HzCCuJ0p<*kA7W9`vi(65P3ae^ zbkwFU;W5LT(%Rr;wWQ{=x%7>HyK^hNvf$CGq@c>*QdP9cD+`OhU}Sn`5LEk7>KjX*Yv&Z12cmN2y*O+M z8u0z=X(=9G%V`fVMHkj}jy4WO%KqPx9L}3Tzu6rg-oLR6tMkCx0&v<3 zNCgt#V`vD!D}p1=X?7gZls8S9BmiVL=y=#NvAhiL35j0?93XQNx_iy7f^Bbt(QR+#pA8@u}Bjh<}|db}FW`2F4Mq^2g1B zgx=WxjSDF=!{7jeyZ70EG)z=uV$EYPQp6I`m45{D$-rVO(11m+8(AVlxOAuh0me7w zdo}^Dkgaybfy@~l_iM|-gFQ7&p^Kl#_bT}l7Uzb)sBARx-mbetrfi~Ff!!0ksJ(^Q zrQ2-*EBF^!Ar^~U^~1QOpCa5w{kX-biEusb#m#69;e~O1m~KY5$*i1G^BFZj?5l?n z-~aTZxsq<2B@ZrUzj}{G5rOqEL8ClJg-q9I2h}6i312xRq6N>$XS+g7KCI$wrrODC zoR}U-3?(rURigYY+FkefJUTJ0;Od^r5ak=XAkD{oQLAK8d4bYHm_MP8#znIMo~Gug zj$)*V^7KVez*NJ+vA{e(Muf}w*7Z}SN0yoC2I6St<5`5&{By@dT%G2L6}*EFp9@C7 zB)t=?_1YcUX9VGr6<>)lI*XG?y##vmu;_UwY%qmig;8#&Vb>AsK!gMlkyp_dMoetB zICJ~z#UbznAwGu+JU##H-7OJ-NyvWEY2l_%x>=?FZZu>zw($T+#Bo^aAi3)O9yXS@ zM7nx^+t>R9YYV{JPq56PV|ys>&x)7VXmD@5l;_P~$C z%m4rY00Q<#VO!FKeve?LVerPRDX#1^t=VG=I<1dF!S?iLF!ykywQEZIOkmy@`9sWO z(>r_$G!$qk&_+Uj&kc^C0^oo0(r)AALMz9yGlZ|yKd1*Ms;27LpN-{g6i1Nkeh2B> zRp2V+YDpJULQv z7BdM}Wu-c33v%ey>S*9j2A@P43Lr$*v8<0&LCG@PJawC+z;GT$xJgYj^yG9N$(N*$ zv|*=mqFk5IO}pw+XL4JsOC6hLXfb*#CA=8$?G zX}=#oT`P}g(G$f^|foGj24HCc_EBz28M^LM?%hWcf;v6DJ(w9<_;Dh%69two}B!!1QQ56*TF zMS-!KyA!|V-lM2HKJUVHdg#SAlz2}@8X>OGEnS?0Z;W(-+odnTMOxhLJ1^f~;fncw z9#NPd?#0pnfymMuHiFLPd@~kZ%(56fj@F09JQ01P%sK&T*j_HWgR@l*)8JE46Dq<3 ztV2h;hxUK~007XgeAOs#VA^H$z+QqyJBOV}X}_Xa^d{S{q~BTtA2*eM!MCsN#9fe2 z&##d6#Uuko36DiGp1;Q6`{k_G0mDyOjiCrAtuCw|h-FCeFuqKZ;>h9@6X~N+S*wQZ zw)pmDoZ(q_F9_sL(ktugN<^*sSLkcLNW8OI8wfhbA4!@1b*0CU_YZ>Hyieiy2F(66 zFnuc4`V$@N3MjVq^U?MC61#D`Vr%upN#*qL1Q;4vz+n*GkWVc`aH{;_LRc&o(aW)7 zDqt*_7JnX=ae4@5CA;=}0lff3{ldA_Z~y=Rn?Ufl!K!w}%HWSC^Z_N@VNo8Dni148 zUH#>rTf3=3ULJD6GO@io@;qY!DwR6CKN)23aTZZ*Ejtj>Q#V<~3m`n32f(fLc1pf? z$x(|auqSwD$ZN(@gjKy-#)r4Wb}_(Ppe~TVPP3#f02DzC+)udF_PM1+4{jw2nb*q+ z!Ku`XZGrkVwz^iLxd@+)M{Rd7IdVx%Zo;hoq$`Y%QG3l$r585k<=SP;t!v%@{mtaC z3dqHz&vE&Xk%0K0>3V%X)^U^Qi38B)994I3*XgV?bU!; zY#9VIY6$H61_5T*1yQym`74DtfQxIx(m((aw;K36b|x0lD})g6cF_4 z^ug%>+ERylktu<)=}`Hv5>Cs4@! z<{4uG;FSz@Sn65TZ-bS*t~z=@WI718rXhSxTW;)^>FI$hpvD^<5Q`pJdgK~Q%C+N? zHW*fpU$A1^a+ewMd28h#+;`DWql|MlT zNEhqJ$^IK|6)R}^xXjWGoq+^Q>%dUDTzZn*!_c$^6;4f}4ih7n1~>oYmlC91m#O&D z!3yO` zC}*8J&;wz$P!SUPs`!`_NhD-XM=5YRD?r+FONZ`NI0H(Wb|xj5#Eg?R6&;TM%C2>_53SO6J2-(t7cYY%F+R7bZd?qUJ%;4(p_GLoQNUd4S89j}KK@zXlr$&j+WxKBXF5ED=W}0x$B+o44)kCum!| zZF?{T9u5T+=&_3QaxV6KYP$m$zAItqMy5#%)wCnd+pfnX@SFv2)HHjg{GNJQ>4!8( zB+F>q^vHqe+{@5TB(j;n0CoWsF@eGAyd(+`N!k}xzx9K^BXE^oFB0X|t;V`yS6i0! zd!TME_O}@4qtg36T0=0=nlw1G=JQyF3jc^(npqMFImW5G*=7_2V_-rCEsAc1?p}2} zYyI^7ay$<^_54(^r8YONPZ2Haeqm`dXxPC2akJH}UOh4_ca z#QZ;(A7fTaenfdwB^rL}qh0!)X|8hqZ54F)sh+3EHidK2q#_2bkS_QF2U5U!bK`p$ z0m?xE@Gb09mff#?m4*&Yi~Fl57Hf-kN`*gR3wOF+FInx=&|ylQB5~|{T|oPTH|bqk z-Y%E{LQB{W>6kQ=SrZlT4}+hlrIM6+0e4Kh$|nf1SZtz0T5x=*9^w;HIZNSHlRsoj z7zL!dpPuH-LHus8=fNfYdN8Qut8zlf{tbeB^a7by!(MDMX3O3yu7J7_Y)Vu7=dd1W z-xD^RoB~_P9Ey_pax-21H&pJe339yyXrKT90Aod`e#z`b@IT}0SxYtNoTM4kst><# z<6ubyI-_r_`AWIOFGX;#jcG+!JPzdr!1O3;4HmCJ}O6fnyyQEG%rUI)yIE|{tb%D;D*IIvSL1H z8#=nx5_1x1PCijP_Q`-W^-a5>LV`z&4vDn zHPeI;D>eP>8Zl?lAJ#V&4O-w7?-@g#%qBr5Thp!KHWQzlFimo5ab2+VnF?Vc;s+WN z-+ZW8z&ao!w?7(uiJsgl)+6_ZwGsGpQC;r%kyILv#!tMX1MjZbvSw_nzFN80iB>)M z#rt2pm5k(pkB1_TQqDkC!=lNO_1T;W<;bypDpt^={AuO!iS7pSvokVJ{)&FDMsRHdY(v(9#WwAZSf3tD#%{-usc4VE)bM6Slb z@fwQ|qi)BwZj~G;6JSKJsBE=o-#rZ7ZUYT@ggQ#Wb*3H*OTDCcF9-dfcLr2wo;JhT z(BNywVvuSgkp_PPV`<(C17S^JroK|aAsS$<4o~cRX-@X}olJ#X9;O?XL@rrk4^Kbs zuWO<=R(WO%LN%7YZ(Ym+i*ZX7icV)6*Tu~*XGZ~L>iEwfOC~k%!1AV2h^fY{0Tv`% zt0*R&jk&gppb1n8a{pSlfsX`+) z&o#MigNpr|H|e^M84JrgM1cJ}a4qKh*l`VbA&Bkicr})`dk+7P z$M!TUqPuqS9|pb{41@J#BuYUTBZ*>MjR~|vOo4an)q$3r+9YHO^u{C$3_9AlN0rVW zYnS_%pW-Fii5@uJ_l?oF>u(=(cX;Z(;8`O{QqXB2$9DO9Exe!fu&{ROOXri%C zdkXMg%PU{jKel!bD`)<}!#dYee($G0#!veVN3(TCgFv_QfC+y~Gn$St6k97^yJ&o9 zPegO@CK#;V?mBM#1mPa}C%C0oHv>k!q}CjH!(1B$L0dLwn=iwd)VXFM$0qTacTHKf z_|hXI{Ug<#`UxX9aioD&VT%u%7mBDy^MVY4BWao&2{(ILCTwYKe_@&8jKZ)8#dMz? z$OWAOcdfsP2qJ+)io31o7A{it5WV|+6+4b@%4!Nk-y%0NNGR$GdO>^pD)r~34;HGh zUc!9nlj)5Ex1H`WngI%ED?V!Vb}3eYv7&TA69EBuDty#L&Ou+p7}dr(PsYp z?Wy%7x!^jax5tn2UTIg~JnWi<@8}<(8q8vfRzzfBmdi!+m{%X11SSYUeU@pB(aJNa zNSG$y%YiP#zN#*B-Yo&U@#R4MZez*|B_HyX>C(!^YFejza->*xEL=Fc%WHs1R$}GE zhi3E8@P|?m$HTq>`f~f{{XqqtY5WR- zTu=?uDtmj3%c!{duA0ljL`OQ1l6C=NPUp!~fYOhy9hJ!G&03$=FK1Ut_14Z>L-wc? zcZ_aL^oeee3RgIMq~dEH{1nUnkM~)IzRWNW``Js6M>vwzxdyzHhlH4G+bbJg2)TH% zpS(o(ZwP?9MOrPa?)<*+TE!?pphMCxxULsA#LkCMEc{^JpcK)dxAK4?F~_=9=BZYo zmPu4uQ1``w=mO`Q@!DK^8mI=_Xcn>|u=p6S?2Th9?B(+t&YRx56@6V*-g_D z?amt#Q%2O#ynh>i;dwH)g-h`}biVSJEO8vrAh?hFg@bo7Ul~j{*EPHL&DreG3tcxWi#BNh;ZNujzroE)3(y$L z*-o#tLVe_75x=8-6RXQQxl&6Ey3hp%`rrv2;6&;{b6~x06!WAHD@lNG(C-AtVY5w_ ztktzaQbyA_wVtnVX1gb8&~NT549FR9=}ZWzNPm9E0F%J~#%^lBVfEdLj#Plrd!?Sp zs{p*s0@xalA?Ned7q?90X$*GuVXf3g{p>rDuDDyL-H0kU6$iz`C(V>^24XK1>v+&X zYa4EzaP>OpB7P-$PkqT%=mG=B(_$#EUce2*-CzV9UT4bk9xW;ntHc6P;0J>$sv2TK zI}U9sOjp9l^6McvLs`DspdJmG;jTijl0mL|I94 z#EmIx3j{8c$!&UU_F;f2f{(}aBws_E4vm%hS~s)CBvu!fTjbKA=%5CRgvY=r)xCiS zX#R#dc4K?>*K^PI0NgG%H1!TBLgb;Flj4mKkK4ou@qxFWXB^@$$Yw3wowjFYl;^>o zX_m-zBNQ2G;RX<-N^f zRYbl%rrwBmk?le6X%l!26}0CAw`5wI&|%Rji<^27M}D{NWgB?k;lbVxfD`zGbp0mt z>TZu)lCzj$@=BsW`2h*-P-)!?JP)X63H6c}IL3Tr9Rt(JgyXUhyV^uTHn1zjb^F-IaKXEP$&;M90K*kTFMCFxM>sb;WZlKzVZ^k%IZwfB*1E|=R zB*iwki>|s{TAi!mjgEGSW~x2#fSC8dB1U7PtPArUgv#UHw0QMDi8&J>({#J&PV8hr z+RmD#pglHs#n|lt;VA?NKYo>LzJekA$)JL4liXRL3x)15nCzrHXo&U_~;c2a8co^2UjCl1V z043`bkxJ-bq+1F=fgU!Q5H^qFWSy7>A-q*u@}CUe2T%<7Ot$q_gCOr7thoZ#8~6o& zV^jyo{jW=^l{>-5kQ@?JA)HA9ghB`UIFh$VZF?`?`cCJg^Zma=cfqP?1WszfHBp3s z44l)DK+6zVndXH}Q?-uI)_b>I9)?}~y*0v@g*_EEd!R}^P}_>YxJ3rM>c}-dt8)xK+u33%wwZW;3-$TE3vvlIAPo=3R+*J%g zleuDY7G%K+Z>~fXUkD2`tj0L0UQo`^{jto&g{R zRzY~1U*>kRdR?$}tG@W*VLH*4D<|GHUIMQ`6aCdii1 zZYiW$ziaoja35VVZ(8laJ<*G|6=WGbqDd<}e;}*R^=RNTlLzfr#0LLC6?Y;AstbTT zTKBQL496hd77-*fEymw_SQWsUNng1Q59^)_tK+ISA#+< zS6KFr`;0niz(RG?y$ehp1khzNGyh@Yhs z3a$dIY!MP%#%44ll!Tkh;+OVh0cJL9NYUqL12j{v^=$i{_=35tn|^abqTE#HAmbLN zv)UBhJ%peLUfJ;Ywpr=d72@=e<~Eb6aK&CT4{23nD6oQ-noDGx$pJs!ivz~SZ)#Aw zTrV*W(RZO9r~rq?*d>5yJB{O)ohm)6UuF?>jPn z9leT@I>l@^u&4>DlqK6alST>nRq=b)J?~thyzmcUo_~0m)6#lp?&=VX)5de(g}TI_ zAnV}k2I}7PD;*XsN$g5=MM?Vaqx(0vclMeRM21Y8W}{PxA*WIgK?zRw%Mmlu&J&P= ze(7>pyp<9a)2uEyid>anJ?!a3vq;b#aYas*v5;JxA!7Uiddlif*j<0@m((B|l*LV# z8DJt$#E^s7#{Q$2Io{yAO*znYi(0z&;WbKCZWoI9cs;%(F@=gMcy*KUtQdRMq z=1N5u%@}o#@p1?alXy%xT+Xds#};Q-+$Fc+uq#L(n{Ef;mfn}(87b6%w3YJdMO!7P zp~^6dWC5H!EFb^s37=iX7?@E8bkk^GH=!}bM?)- zSuKZ){%A%^;2DX1+G&j)W{foRa}h={8!}2;{4`GF5oB*?zPcBFZdZdoelXdrM067P z(UIms=Vu#Q<^CF98dNmcd(y9^6ws88uLuXwF8ifF3Wsk?I^nNzbz%X4gc-7kvym0F zF_N5nah7h`a%r>PSyzAdECYD+0av}4jGqR*4#jp4bX6@m!$cT%<8_HCXJ>StNAD@C zKRf4PK_@nIaaV?)-u7f8MFhzn#&}SjVVCJo(MHZDs2opwWlmnGKVkeZ$A4$^C!QAm zOhX3WZqV@kAy7doy3oJS4R6{Htkf(nkMGV4vwo9kqD zX@P82j0FYRU4~k-|FhBYSFucS8hCJU+G%|F^-%GIgXINPL&IR?j=@IevyC^z(irB+ zNO^X~G}e)h<1jO>1pu>|1xe3OE8(xXv|`l&to%TbqmHbvb5;8zuW0n4GUit-X;+bN z(PNL5{Of{XC)Q#JAyUkVXMJJz@&|%$_c{^W(Yx&ZxjMq)wN$CyTaKGUn zXHa-YK4&G$$Oyh)UwZ0BvhTw>OE0))1#k&%A3hb_xAj~jb>eP2yPr z!0kO!No@w03=v(8@?)4ioQxb;W?t9!zM<-C-Bw(*b5Pah6!jv=<~w@_c(F`UtHtRx z@%mQRl@xW_QjgweP>_}8!5?!_aKJ}Xv?>*73~0Z;@hWch_A|$$^Ay<5e?bC}62u*5 z^*VFF9Bb@T7)2k&l#te{xgD0HbsTjFO4L?V=93esU$io;XBsKOk{7}9Tu2KI0Wz5KruA;r=yznVfG@-8XkP*{8Yoia{-H{vj zQ!5YUW*#V5wMsZXHgBTWu3f<^fgIG6a(1{tl%fNh&P58g_#$AUT5~m)TWt4|kYRlN ztJwf;E<5HS6~4z*)gT(>?#k{|963;-7K`i%Qpbf0ZU&KWpRbhtVMGMoq;k1H(D z!^xDMJ9A3D;-a_O>s1+~ulwyJ75|C5x3o68bsE#ySLug_ytU60JkZ{%gQ|{Tn_2#j z#2+`Eg_@JSz>`5ZbQkgZ*fI%t(1kls)VJ4jq#;6_fYeo4!*L-7X|0<|uh}s`NjTe{ zo>5j#>e+DHRZr&ba@VQf_))_k{t1mHPk>v5(fr#bQr^0$PD7ze`uxNBWi+2)$I-Bi zIHVw0YLQVI1W?QcdbLHL1&O|j7wfo?y}l#X**^upXCpDHKyM<@yis(&n?@ZuRr6x6 z@tTsumZAXGMhR6O8r@QM9N+9r2#nVF=@flGgM6$qs@CvT0)fp8D>4@3Oc}0TE%^(y zq+4sImly(p6mC^Wy~IzK?eGCt4xuVfbZYU8ulrFIY(dx-C+sBMCN>EzPAEB=ThVX^ zJ1!1S>Tw#O>^v$;!-S?aE!4K3Z`%{+>YI~>YC%~EKJjDkd3a>3Klj2}bWqGpH6sH zTbx`>6!YPv{On4sgz~SO$nrjNlEruVv8qR=Gdac>;qp3dx#1t#+1T zG?nwR8I(0rj{KNjmOS*aKYN?VfXGl8I8wuPMu6HH0LoW-=<|kZc5G@_ef7S}L9ho) zt+1DM(7J(`6!`rVel^&m&mO)KF3HS?VKYWg*V_lamF$wOaI`r@@HZ;NguB*J)gsn3 zZTx}pHy~`+A|5Qh#iMO`1y&^6Ue+K$k9Hn1jv2u2cle?}=8pAOJ%IcB1zFCI!tBg5 z+?kcIn0PBBB2l)z7~9QHxURq~5drJsexv`w4473AX0(cF#9G9J`=O0?saUfj2xglr zPwxg5tEiCzu>Y9dmfsO>6SXK=$GZqGn>!V?K+3Xi6MnLz z0chPdd+C}&Wr6of1kJQVhYJ(zF;LE?I`bxn#bz4!)cMlSp@<-;8gmOsMG>1uaA^MrJVT913>qJr@R zP)ZdrSMOp#sx8n;Z~eqc{W9Ex$LhDH%7?MLHn8J`{IFAuMh-vg=jxuXW7B|5B4nPD z>R*c?mn->>KA7)QF^6&}Y(|BGLo16JP12*Ds`;Dl9h{%8Y#RLdzA-X7>EXDl{85h) zl=W_8Pff&*Y~$H05Az@-B^q3+q+4Ya+G+n#Hc9R#VDs@ zUI(-Z9b6&8lIISjZDoqhqiPts|`1W_vNlkUFKK*nNZTW`LE=WgZYWv@*^x&7=79Y z*(5vuiuHb`9rf8?O)>C#Jb~FN)`G07O0l}$Ux~UJz+3blabj`})Dj88+t?O&%-yIp z3Uu%y3z`(fM-JoO!$eX*`rVg5X6D>Za4L{!$n2fLjW5w)A#t#W=_r5=2C(lQG(2SrX6K~ zs&g?N{4U-G^W}#=dO}V(ac&oX)SZ=&{;n3*L>MK(vO}XQ!1Vk?yv9AnoE#dl6wj)7 z24q>Itxv$)tl>>X;nElRMbZxex(`qXnB!3K<9i9g#39q9WB!muw=#!BA}Yuqo6iec zjk&V{W4i%P*1L*8BhA1Xd5^QRb=M!g9lIBckO8*~;3Bf{^z6YfOb{aiv1uRG7}tA( znEKFEV23i^`8h6_yd_5~@YiiyXgIsi)ur;F7%brKG% zX#y(6bJNJ9OFH1xIp(YUT^XC~kjrl7g#Raw_euf7r7MwE4e@*haSAKTwgOX;g%w2z zh;08MN=Wu4CXyDJ?dLA8U+Jvmv2?D@XLTrP>uEjz(;O!yzjGL@nevg#+a+UI+*xjB zs_*#5wZ$kSoPREDF?J_bm~MO(Vn>11iLJ?5-25sruJaxBiiqK0e9RMCg3r(Gl2X9z zR2a{5U+quR+v$s?WAz5?JtRaf=pZ)Q7Ya;C=APg&{zEUfFw%9!sSpDR@8v*%;#3M~ z*Mc5WKWpW^IH}b8?D|Ji@m5|P=RY%1uF0H9o_0gsf$s@lO6G%1eGzfJhr$9VN))`>#l<&2|5`=8cukl|aASKlvJJ2^Dz=t)Xz85H-^LY)`s22q}y z=N{aPnNU9S++Vue9J{lL;`Irxd%BVF+DU1&7&74Dc`)TBCZ46IZr55AfD4wqZ93+A zC^(V>X($%o&sAw`h~QIM`{%3CJWRv$wm5N>wNDS*j`oWDO;w&3+4HmReHgrzHc#WP zLNAW>N(PZvS3UUj6(yTutQ(n|r{F5?Ra4w`ea^Ar(24L1{SVhaQ^az{O z=^8d~ksi&dq#4xt|^J?%B&>pf!MBrdu=E zOy5yR7w@?6e1nF4ThL_pn+IUAM4eyG7IVoN&%yUpfMo$Ns4LRMQZm5<@LHdH|Eu)*%jgodMe68n>WRKiWRyFNb} zA`>q2Q$@cRd8Mx%2+f)a4R|g5VH;r63sf>*^ReY6FhuQ-U{7c0r3UZ>XvI0_D_KW* zbt~MW-55ujfhZO5Js1{dYxNj}gZ>a62oLfIl66YwNQE~TIBpcfWJZ^~mH}TjSqJyH z%(lG@3d|6Iocgp#qRM|u$jioI9CzP~R|GYs@wHAq*8UU!_q2P>Fw7OZU8vo>xsB7x zYTRdCAuEY&h+sNho#=Xh2FWBWTzCk>A^Ip01L3I^9sN>i>@f)F)&biTlAlOfl(Edw2BKL_9BkpkL*D)Qc51A=n#=5mN+)rn6o~&v z<&p63*W`o-%FU1;I?QaN+sBD)7ZsDocbqN@>sMt?5W-4xO{twpdv=-e@?*2gwu*8T`kauhjgeUc9Q8mv5#){xknsJMN*Ex(m}M6g;~1D76+)F^z-Ho#$o!G{ z%v~1*IeUmH^$n+~^*p8|eJf-|y}VHV5g5Oe)NDUp>C2aC`0aWtSZvcH{kSZO_+S?_ z>3+WS+Y<(jc7;a|Pk|8j?WqnC$wH5)oBso8S7g~*Bk$oS8939*}g-D{XSy24$&EOd9Syuu%uPI6aZ}kY00aD%fzL%e-XAo8}i{lxO!4 zxdT5#v}p`auSjv7t;{P46>-v87+!N9StQ?J4f!vQ4PV%6kbr&f=aleIb*@N|)+QQTV4*k&wQc%!{`golMp!fL#8zDGUDGyEC>k5B8 zz05eJMbaa=IiTflxu;@aG~WCN_tXip&37b=kQe>wo!(xkt~E(&iS#bBs+-BNuhy=~ zj9w6G=L5V$4F+8YU#P3L%TZ58>(n~V0#?{w773_#sYDw2HcT})C6#tz1dDexQ>s@6 ze=#Jx^>QkxgV<-P`+Hvsnm!E7z9$?|MbS?sd}xh+RY{> z&>3y$W@$DBnmJmAFg;x*ioy7JSMYnuzH(1HhQWnLb_!&du@P648^?}7NHXK(>Vt zw%}0ohi{p`ENwT)$dI()db{=-v7~s6#es0+H4T%CbV)@I!p4!5vXJyk`49;KPtBbb zIt5M1=?j(i&wE1;B0ZU3X%fqzfxo30v=o-G?37ci+s!-!ai3<7zq+U|=WJWU1Tz(L0}!gQwkX0V{FJM?Y^qmH18acjp_`-a*nN@0*7Nn1M4EHT)JYt z>2uRDw)1ysceg)+20)9V^}tqa`}y6Y-F^^U?-m=Gl>kHZ(B+Ji;{tv^X7Mg4i~778 zwBo0ml5L>5q|;PEH_U=lyEQ}coMy5)C0$Ws7VItgQs3W_6ppx+!l))}pl9prIZ53F zg^04ebJ}yIsoTLRt}9hgsO%0D2d3phaEVa^wUH=0(y?*%P&+4)(=^z zRuL$ji2|)|bb5Nes_m~)O9qwTOkp@T*$|=1mXAfjE4pOGT_$^jx%~BY6@M>hK3mo5 zisL?S&ssjK7F#%TroF%_QV(M?&WRf-aHb6%OLkI-N9%^0mZIx1k`^p6YQ4-eRmkE* zKr_8-Uq6=>7$?$o0$mm&B1~)hPc?{%?pq zZV3}vu+21;42^WNvQ!>=&hd&s1rKrApTt>v;cI<-OA$ixsfJ$}A$X@J#f!8uKA2d! zeh##$sWI(`Q^D^?1Fh)})zzD)O-nAfZ(i0F08p`ZcVMFHfdDoT0g(^j{y+lrcrmQv zBs)QfSNtrumQ)Cv21JFEXyYY;APl|J0a&-#KHD!A$7d`~08>iaK^o|JPUeGgIB zk=93h!V`I)rDW}FWpCHFrSnJBpW`>m3?4>y(?@&LZlzg5Tr|w8%{IPC{MG$Vyx9Em zHBfd;o0KQ;Rpgwiy{@O0BSEX?SdR{*&5~L74uHD*w!Z#}dkRpF&GC1k*!hu(Hsu~$ z8s^<5wWPRy1+|)QTE~WPZ*tt>#UNGG82Gi>5NKPB=WxRDw@kWi3el;)T-}7*EB%a+ z5DL!Tu26r$e7pZGT9Jx0-M$}9P`u#w>p&WXl&4Q)16^S-(wf0{5<~ijP^kpH&JbdC zG~!heSlC3wU~AgM3XOCr4@JjNy8eKZ{j1@3X_b-wS6lT<|09lHAJn75%7nYGWk%;a zd^C;Od6cPfV7?xYuQU|e95NS)JFfjl#j=iozv9;kFlf_EV_PdrBUbH=Pe zkHZpQY|p0xoQ$)RP!J=zOazK|suNhH@P@^OoPufc&>p^Pi&E=F?lJx3hKZe*a=V%$ zwosL+!AZ8|BzW}5L&xq`dP&s*uBlm)?3vq;$-@h6^gwrLf z=#-|*-%RqE7=jKD7iikhR4>i#pqK;}^a6mjPj@~wDPTVDgM1nrE~(0T z`uRJOP&}V7HD-&Xh0g*%GqdG`L(}GsOH<^edGi?MCrDMTO@nBp)&Kw{e@$jR#R>xY zoTv7!;2y(-Ddu(0kMMYB3?UQU0IVU<#n~Bf|Ax>200000000000131AY|Pji z!CeBQU;qFB001z`aIdrQaQsb0IH$#c01RCWm4j{*zg;S56aWAK1a=`+2W9S|e#u*; zWKRG902Je<0#eo~>Hk#CFLvl>00000005YTfNDT~r(6`U(O@qq000xz|Dl%{-Y+Q1 z_mhAfAUUlF%2J<=LiG|onM0QUZpi7s$p<^4oXRa;sWib)b-3?C|#bkd27iOT6x<@R&>8H02mWcrx4SFyV)qg{}gS{AtY zC^ON=rgw``NOhTR7xUQg=AEl-qr*H<~$ZPNaIk)Bf@dB^(Ql|{2|Y~M^ov~EbCmH zLdmhqS`X@Lh|MW5q{it~&$8!%S;g2O90d9tT${-^v2qNozv4z%UNmLAbLl@`%_)6M zDyK~4%|4|dg6U`oNE3FNy@eLLxVKp&%dOFyCRi*6FDx_In@^n_T zyP3Q}^Jx2vnEbj$lxRdb4K@}q0toI+xHKo!Uo$;j{;#{|5v+Vn@DGe675c1M)?i=0 zH!b1&LIFj1f4USp=I8T8t@$`7X^sghu)?XJp@85!aSSTa1rk8yAjv6U{es}bbiD0& zg4voo5mUe zHaZgAdQKt2M>VDNGj&}cxQaLTs%@Rjhva&_pTm94*)yF#lQ)K$*lc*EL)ONiI8@Fl z=$m#t%9UOg+G_)Z-0ef7OS+wwV5==&RF_M#?0<4-=CrZo_5&u(@iKWDyoM{BQB#9m z(Vj!kQ@v!Ppa^;I=vWS30Xg1(Z?U_>EM*kg!(uaHj%ByDs1tnPxd)}#CsKY{-7E;0 zw$3<*!#;$6M67gLC{B4aRM2$H_Pdj2mED4eXMWTM#S@nG6^x7FtHLSbipbt)6H0Oq zwj7+71P%a3_;F$svrsYh>stwe+iR+1LA{q0O?4h!afnwdp__C8b6X7Ekktq@)&Q+@ zco|QVCI_59@aiVMf-u;NGFyVbPYu3tJoKvxhhK%9=8(bZPix%tl{jNRdcHLS?BD-* z3JY4mLs#(Y#}48PEEze90cTD5*H#U9A6A$6MX*UyWaXfau*-ahf20~R1-3JUHt!Lc zsM)~a#>j!>anq%@E2rT-+|XQ0*~8h!w!dqqPGo-NK?C*9oE)8*iQiqSHHZ_C?A#j- zvQrz1F&ge7ZTYZ?_%aPGnMwDk8n?g`vUQBG_(f4G$8+Ug+~DSjRSsFK>39`ULFPIT zC}r&aLb7IxK5Kd?j-tC)bJUP$6!U!=?0 z{2pCtrbse40$(Ou3R4md_d{GD|8Jj z;tiSoTEGs`>;%@2^VnE%fuj966Wfj5vez=V&1|{RF8dA!dtRgC@QY04Ee&KaoewMk zQXE=~&tMzHyqs>Q5*arhnN-B^Z$uV-6qOXyhOkk))Jd+C(>k}cTYNf}-;9r*Pce4E z>%lV!UKlRJTq*J>({{gNd^l`$&h;76kYJiB#%?V`HfFK*EV>uzz7P@8v9cbe)&n-Y z@zp)gbEyEE@LQ58;id8C5B^BwNre-tg=M)7{EgFdJomyiraQp(wA$a=#-GJLtLs*B zK{^)7n}ov<7eq%K%m6i8+20e2zfh2BMU@q<*92Y}4z&S&bek83q~_>2A>5z}$DbUj znFk_=xp>$~KH*kVnYZhM&wzQ$aFfl@VX#VEfR|2Rx+)UAk5R6YH@u}q#G~>;&DoaV zSH?M@9IzR4k|GFzpK_n$>V#QMXi&ZqI;k9z@UCKdws@Ri`s(zre999oH+RPnH2HeT z-C*oKypSRHEpom}5fN&4?bBg@UC@0n%y*l_$NHj))W|l6JZxRZPk!vK`@gy0+!|g| zD$EHSdQdx5@5Od~oM%F15M04IMjvvMc)pZ~LSdq3u}GdaX{pC#Ral3mkR($EuG+Yl z-0~*%QGk>0BHX7>%qyZzJ3&?{97B&wqINJGDHd_Fce%;yRUX#+T4H&02vyX^5|qKMpNM$4gATb>zmdeEP7Z zkfyjO6iN1%dx!^#`E&1ZL#xuZDnZZtgn$45 z000005U*>d;~UYVt)R4;cdMj?IP^(^c+ZTAfIWBoc$IY>a_DmeX}_<%BcOwVTII&s zv72p7-=nsnCd|WoGUt-x7y<0tZ49y?bEDn1NX*yu%7P=-!_W}9xxa>WJyzYQcp#ds zJ;63_-Dd;9m%aY-8K1llL|B$EMyJ;vreKV)3Iwt@CR$`tS4{g)QYUH&3o0@PF5Z7% zq(vuGCt0#{>w<_2fkKn1=cdmUbO^9z+N(@pvx>sWU!vlTx^WdlGNACIFi&!xt6B$p(!u5o-h4Dt_7Z(-H_yhn@5fPMCmSx9}`T9=Z3ib<-QUNj=m^W55Q<5aFiZD+eQxE_N z(%9;a@*DHqhiU-TA=oz_o{o>pce0C4kWa&p&3E_`QE?1^WNJYl|FM` zRW8F{t=~Qe;A3>o@J>44-1T2Nyi-3{&-C79J`O)WwJ$e1gFDah9y{$nKVMh8KhKVj z`A$2Zt*<^Cvo^iB-?AR*?)bL7XT0k^$xgENyfr?;UoW3KU+}iuXZQ|3MGnLkI)~ru z?`58&H#@gKgB~K^#opX2_7KCM4H^Czo1U%XqNh(1E!A3xEb8HT-Pr>~GOroMNY z9z8+Am=NpK-FbinF(c8fxbXrBVM3yx^WXszM1q7rWJdeM4ETw>%7}G+*B2msFU;E_ zX2_2H-~V6}PuE%J(`cxiF>FjKN@f}{ZZ2^_RZacPlb5e+hp>0A--vXO>?mZ|OxQRs zj6GEri`k4Bi58Ua8$tB1`YVKE{ar~NZ-DRmhn5~7oT^VVRm9JPe&hphoMeZmlOgLA zbjnHZuGd%vBquYQmApg_`R(c&_lJE}3CX3BUTsg2GjaE(){ErR#F<7F2;*IQ!i41) zjU#pDQ9i+vc$n(1Pe4L=T;^K@GlQ-3h^aE)`y${CEZbLR?N42F;;oqLAFn`yc%2rz zgtC2paQ<^)NSO7n;{;FtkE=e&PV$;5jysQMy?xFNU0i_^>-Q6ezkq3SG4n}AKRCQ? zp$+LGoF zS|gZv{sG9?vV*XmNo0`oNmJV!5Iml@dSs^qu}osSWeamnANs;^#B5Z8A4Jx(w4Hn6 z`Wj6_dlK9g6^~!^eKT+K*@nO@Pa4*3x!d^$(oEu%n!4PJHw$me7E$l!XDy;};B2s4 zP?5pM$xp4YGTKThPDdu(G>50|yjfQjw4Vip`rOt1N+tZ`OZekh`GuFRF1P@6;A|dR zfaP5?)Y`pgYx8G1Iysq+&d>vmMbfcp2+dfIJ&%KE`mF1uP8B2Pq?gb8ELHaVT%Xn5 zM$G?5nK@@H4K7Ojz>ZZgr;|UxEflH0Bf8&KNnod3uUn+(%1-5Go!3m*S0KI{IIsaA zV+xKSQ`BAGF`Vcv1tAA+;O_PVtU+&W3)yK_rJER6*j@-uNkQL=rzBG>K;yr|@6_}0 z^?O(%azbGPml2D|)&Tm!!KQ~89NT9?&?4-s=^Vu8m%b+;v7t0U%Hm>?7dS3cn zifN~zM1m)@r<3@dKgqr|Bfn+cR}#OBna#qTFqHRX8Ox@5k?GgK2SCi=#L5$dLv{VM zX?;%q9INs~8`L2JV==RJM3aZTAKr|ml27M&DeGE;wqsp}$EvV+Re>VsM18eC;wzj8 zB9t-j%fz)p6)X`_Z8|)7u1cNndp>Bo(mmqv`QZlS)q<-+g{NRnRM1Z}H#`D@7G}j2 z!y7pFrYB1^wc@#_>qrMZCA=Y!wvg6g$|p_;_gKYv8|$&bhBLhs3qH(C?uavQJY_(s zdQND|swG5YUBN7JcC*`|6_I5abGB=ekr8!LP*`P;o7wHJm!6=&EU^>9-tpm%@-&E$5}r~H4}qsFfJGB z_HP(5V*R~{tWcx9{`c`G=6u6|wmc*LL-Y4Kzt#&c04-?MYQ1l%=YGI^BJ!j-bT1Eu&gCZBz<;&KJvF{_;YGa zi@dSzvqG6~uoa-+;m^(pH?{#AdJ_qtG;_Fr9w0_aM`l)n=OAH#Di(qYN&|yz?xW1{ltgLryexnRB<^7=X#E~?2TN=Hud8M0nS__l#oU=5f}uEswm=is z8Rm}9LY(G%nfo+ThA_Oaw!K0;@xC_Y?)bIcOvj6D2bsz|AE-P{zcy+02+qhP9Ufb( zlFBo!8-)~dlxcL^s2>zR2*l*!*U9N5x!j;I5V4CD>LP1%S~aBu@39i3t`y&SBSk}o@RC*DVNw!Y?5HY23MTiqA0*uh*)P;UaWQrP zt))N-0hM@Vm=@`F78zTo4{gtV){5Ny92hXTnq&x>Q2V+AyR@t^DEFL|RCVU)dTi*E zG8iz^W_=4o-3lh8ek^-CqZGoM1D$P$;K6<1iqc0>~_8k=AY6P&-<&m!Cp z&9mJCx4yEPyGP)bY2gZhmWyTDDVih-j0$ z^MUO^R_Jh1d9HU7bw+$^v`N*a_fvMo@)grXDU85H2_&Z;nEJu5>cx)}4fWfmvz8?W z2YHO}8Fm)+8a!Y(5yjiro;PCI-8XiFX zh|svqLmQOi3td2jy0UU6QtQ?Z;SITg&ngRulP21b`o6hwAa zxz;YzwW_uQgZLwa81DA6DKP6;YNRk$E1+xh7&CH~KYkL3SKO*d2SJ|(aB3J9+VXG2 zmQd0ke`L|F%Ya4Zf-O0Yy@8P*v=S-|v{sIQt2>Up!jy3zFTEqM)7va^<>nGWO(kwb zK{}#(vLJgJiumj^lpZT$|7i>0u(|{emadN{iAS4-h(ywb0W(YH1?x@9JXfpm3LGoM z*h(KnwQ~pS{kfaGE4FGjcLx}b{D{!~1 z_9-kV2JQ(@{W%_(=va(d(qbY*R?i}vl#&$SO!n+r;twpKNv944JohHx1r(RfS%u{obx!Fw%-Q6DBRvDk=;F+hRm7QzRz*`qGa6=GtRKm+1+ z;UA3Xy{7}1kKTK0lL^43m|^#TtYTol>N#&VEI3w- zzzG1l(Tk^UH|_N3pgtyZurRo-KAKg!ZmF?lwA{?xqjoanBve)yB?W#iY@dCf{{T(L z7(JvX8);m@HERPs(9wNQ_2nNO+ewUYS(hcW1y1*ZWl__rBiqES##|H_4L0<7wEt42 z3NmBnNwAzXo>Z>63Ss9J5|NZ&QPVTBxO;rlm5!a2knG2IaB@CEuTIT_2NY_!CHWHz z#^Y%pF8@mwyHvKVrvKf9ucZ(<9NsP0!xW|``9p?m2*`BX-+#W?I@eBQzY*+ybqkft z#~(w9OTi_|I-16d=)%EK^k7DG5_HIdINk%iSR@Ruq*QLW6+^3m`kQ&{Pa*o?NV-ak zkTiSpKHki}?#KG~OodLMOtR4{$k?P*46Gb{!lIJ$Dr$O$=GS)*0lLJUVZ>z-P0>t{S0xQ*9 z&yh%Z_b;*U>`jEIUtix=q^wPrn-=plh=_(qN;=I5`DaP#-sQhbK79g*gu}<9rlh^2 zD#1Iv4x2VsZ8-Vdp6aZ#uiE;^2F*Bk@(UG2N78t{0sbrJmzvf-KA#wj!1#Eqg~8Zr zXbWIkf97EQSg@7Q8HVP10-~AH{jw0g=D=Y1JEvBz2ydu}{O@9l)LW z=V?cBrM_)Y<8aeTkZnQ~r7q9AP<{EIZl{g^s`dY|Y7kb`t|tooM)Jbx_A-pedFB75 z{09>KHc}AxY6e64^C|DWHq1=C!2et@74^fPJ*LN*ybn&1(37?j$@CMOA3l157|yy1 zw%EZCVywo(e(_(al$W_;f;{^*d7BhnWC5*Any}`cbAgmtIT*0@npU+-UytP?Xdx0> zlu%iV3i$2%e+q|xeG^eex(UeIboFu-BwQdqEBV6bxQgt){|$d9=H|0`?`$0Yb$1H= zA6wR}%pkf~rfvnVE=&W&v6p@xbe ze|c;6s%e(Pi^6C8Hfs=AWLzz}64Y{5Q2eg|B0$<*HYClFhjqXs;sJ}{^`ZT+qX|7c=*fK1x4gB3BsE?ZT@5anqXB@J>j=} zo4-2R8Wrw4&c~d1p-l&s&?xR3ETP%IW)d?@C9!A<-KVDw0EO^DYQa~jm@PHN$w?Va zUG+%kc0~5~2;r%cmPHpSLn?#oH=zLZ`J>t3Q` zt5Zev)jQgd+0kZ}{Jn@TS-_c?;X*rKIw*URL&aJO&#M|V{m-MdMa)X5F#+p0URuMt zt@?O$&m-mJl-7#zV*bzXCp?6@TTSY#htL}`+Tdj=r6REBB<)5c%%cp5$jnfS=+0X# zc*+#MfRZYvpJ4-&?7AbLjHH>@vjE8ufG)JltV=Q~P=lR-^H_xZ``y4Lgu948C}A-6 zc2l~#gu(gX$ezwfzaGGd;;kNDAu~`m@n$}jqty7bX+yj8O-ZD|UX^r+3c)LL@_S+~ z!bJ8dg;JxKI>$* zz3(whvHWT57V)@-fs(D4?a#h00kIuol!Nw+%RUVX-JS;8I81-;DlJyPIF5{p#YXaK zPi&J2^?JK#_8Z)Kiv5(80Y{U~pBXITcCD1`cxySC{FBePG_#;M* z)MSoSAY^?)#EJoKuS`qv@#U+iD?{{L$mN0_^3%sA!cafIIH||BW~}_xt3HT@k*y)h zBtRnO66yCfMzke8PI_EB_9`z2V`?%o?j9ewyZv2$%Ry-|ZW^gW>3zfweQIl(RJfr0 z7X^n7=-MXhu6uXB)x}pbY_Bb12K_9rsz-wL`8);1&U&1l;0D7&Ioce=AB08QIX?CU zaDibpBYNYU93^YhKe;??k!P|;x?w^ng*S8reaA+50r&Nk}E3XY$E%G(m06P}pna8rA`Q0xz9faLJNLPdjMudue`Oexx zeZ4%6;y%)aRxpVZ<+$E-tr+2CmvFRH_0=gY5mUI28S$cZDmCS~RFS-up-gZeQb~o|2@j@YDCP zCfLDnS7^E!Y4|rk?W3-Z#k#fYctG^X6#K^ZAitj9Q^!@j*6l>R#+u8E=rEN9#P+-^ z9E?RZ`>9!&Ce;_-;tvKiTp`6!x|8GmR3r)T*Bd73BKe46sN1G6a>jiEeZtX zfVDE3zBGp^W-}vY`)*z^VUf|*>Xs6Q_JC-+MjU}Q!5yYMILt+zQwAo5(9i&<+Rskv zVneE z$i7r9?G;t<{gRo!&uNzr)N)uzj1hd?j)*Peg}R8B-yzIhENep{gPy=ES?%@#=(9MC z1b_0WWIxaS0Q`a3^Ux?J!Bb!=CrK={XX;BQ2s8PTGi}uk+>2uTHl4^ipS_($G7$TE zO-v;5w%JJ#q4T>&2uz4v%FL*@s;I;uK%p;K3P+%o9fk|1W!`l6-CpgEGR|s-dYHKm zE3~lU(X3zzK#~WehkyS0>NJa5t)U3-n_s~3g&!P-$<9MAP#u-oe6bqQPyv9uHnMDV zblB4wmTIfcuQCkwtRE83w(wq1l){(BYkD~*&wHqd#-v%>SLFk&P^V<~?-Y0m&Tq(r zmz&oBo2#A7eM<^yPdB+jaKoi7aCBMe@Pb<8O*0e?zO#{H3b(I4uYV+u-W(AT-4a?6$dPYfCp!D^H_;Gu`;PdVVM;U5^v1 z#y5rTAgV-^T*Nhpq~)`-R?3U9(u+y4MUxt&UHlAgi^#S+q(QIFNoH3s!#nMkWM9({mB3ulQnTP7hw zo4KOjHiiQ16}GO#dlaAKCiPbt?&kR2T}f5})3Fr7>M+kw1<;9FruFSD55!(?6MEGH>_Wf+ps6kGBo-`BE*Rsrj&t z48po>US?NLbFc$m`(eW16CZiUMK=_E) z^tT;)vopAPzdz*xe@HpMb#Bp)Ykp$VcFi5x2hjl8Cy(sV_7D+ufFfsw#547Dn@3bJ zb-EuKrkugXP_1XjR(D=Zw`cEVx;aeoFpk?sAA;Ft;RcpZyZM=9*Vm8TE;Q6kpmWb_ z;u%bbPtj(f8K8$+=<#@(2_kKC6D5Ejjpi3D|IcI{4;ivl76lKT{sZ@)0bJx!_g}v& zHm;Q_Z$8hE>C=y_brIyBq8NKXnl>oW9i~u!vncERZWyI^^VLKIoA`(98xNF3fyP}= zSVDg?axjkatVl{CpZXK- zmyWjO*slFc-4^ASvMJx^Wa*~jYvj3fw1}O^`zN&^q^;UWl7k)MLlb%8gKbiYS!s(m zsr^sP;bl5JXzg}hY3B21%7;Plm5W?Tsc}Lwl8E=0>fG6RUr&MtzwN*Oaoyb@ts$0B z@^+$G+^h#pk~@0ee}r>qYD7xEg(4+QL`1rFBCVg^<0qKt=%au+f(sbU{}o&S zku1v`Yk~_c{>-s-f$xK#-L>%!ORM%~NxivS;}C&d#d9t3g0!vMj%hn}eQX3ILPR&Zn`)?zM{Y>Y@1$dvj>Ib4czes%#ok#_ znx78~4p4CfN2pmNP<%q%7GG9Q?~sD|L2UZ=Mh^ancEHwi9^8kmhzjA9Mvo+lEo&%$ zbhp*h0j0U(?Zsm|khz7J?SP+A@5$*Up-5t0Cb?bvLa0ZdRzUa+C(Thym^pCI3fo|| zA<(yY!pdO1Fzvn+OCjBeqjMH^7bJ?FsKHohH2C-k5k(dFoT&ZYK z@st68`8a-h8p2zlhO7ogwIkymzH6<*n(@o`Wftm|9fTl|@LjmX@dA$iIpeAd(N`}i zI^eb;1R9SK?7hB=*rn|TCZm-@q&9%_=3{xnYBL+)3^FWlCUa&kA2YE({AK+0g(nw zHht7)#`XGBjH@&q>Ia=IiZ};&kXB;Gr*~DFU2<^xeks|QN{}O0og3^P-b#o+5gPDd z;*CrAFBWw3fATXPlAvgp^dpqCHT{xbg*8YeBkafO6dz|VL0?iF=n6x{2PnXOP&}1C zwK!Fwkl9qB-)9 ztH1U>{h5U^5)WnKP(W{%CZ@@JPxZ}z#z~jwd$NC{M=0PCcNMpUjpp)m)NzAm=v{RT zNsN#`vD-hT{JmoawkwQwi=9=X;0B0sN0^l2J7evCMVCy^AbN;JdN3{GU>wO^x~Xv) z0DNRQYbkPX(L7ZP@-VmQ0;_iw9jsv`jN3)6zAJW1iCZnaDjLDJ63xSxHdW;IBPM8?+4J@w|KUpP` zOvT)DXZ`S#B%KR|VOV@qEO^%3Kl1rZ4cVhb#obLCq2h~_k&Dui*F6`#1iisL*Umb{ zHY%CSFZT4b14cV2PlV^F6qTJ;QKps>E?km}dbRqHT*(=_Nitbrc2_Q#M#SRJVHGXY z2wR1i!JS7`vAAuiGl#;q&&9Rym+-=J+w3)jn*QCW8u;cPM;vTkqgzln+P>%Xung0f z{}t1Ioyo94UEQA^SEAO)@J_m5*VxUIA(y6GOZub(8M?B9N<9#YM3HK~X-b-!-6z-fR~z2_E+zNM)$Nl}{+ zHSk2=UB*f<{(_6*7J0S{_=H2dmk3MzinzMRdE#r1gOS+=uS-uo{X;y2xdRa^Qd!uo z9zJE+h|qOP7KB24f{JAKKXEgWe*K>l^J`KCm*~hnlcs=jWLe4RVKSRa9)o(TZD>+r z4nJh;yaUblRe5@gUFgJy+vw)iJm-Mvaj~=?{g!7FWAKQ z^8^*Oi5$DLLPR;QUgyCui3X~WcMtMU)Xu3icPoL{UDxh(Ah{q;WY$AHBl3oHtrz>U zu!My`%k8~y3~Pq0HQR86cX*GOIT2L=Sj;8hO}qvkiRDgS0%6Jds*0$0A@VcbAq`$X zwi|k{8lNQiVX_b(2GtgABzoA=_BwPiX*Pr!;x3~B!!Df7h)m0#opoBJ;NxKS$G7z? zL_)6lRW8#f(}xrIy-hdmpZpm@%P?&v<$z*UaeC{(8?9F2Wm8a^ch((oLYwOysi=J! zC~y85$}|)%`PjOX*y08X=JF=R?CYCCIb;rb0ca?(s*@QtU*``0a+>RY<4E$)j-*eGiW7_c3{`~{bj<7K{!5#K(XnD6gtMo=n#dFRaD`6Hz& z*6sF^Gxahn1(~ZS{HNNtGn36@bG;cNZbP)j#nCcixn;7UNN6MlNWcNOk*Ahq} zg9hsDCx=ng5`Mo_kia7tP*1Fm=DX~Z_;5cQ){c8}%C1w_FFOvA57 zU#|kxdPqNUC(V10ksfE{LlG{>c?Ep{9}YCPun3>7e0@8BS{z()oc(duy`IX{I9r@Y znOcxCWmK`h8B$p78i^MRuJxg|EFNL2Z9;KEfrs!*g+{O5XS#Vb@$Zrr~uy(tYF zB>w64R4VI^Ml^;;T!bsgsnh;~M?BHvS*y28dLae&aG>|!PHRp%3c5=#Y~cJ_o(XJ1 zc+cuW4Am2K?N9RnpkH*!qZObI_o((ig0*v?X+yEqBPcY< zxR?D}UCRqSA2u50%BN`z820ee(t-ZqUPlQY#`^VEw`>c*}TZ@A+D?|MC1K@ zr}@DnlR$~LJuNo#hQa)3uO!s)XP;h^bkTlk>gO79^&WQq-W?~%nKJ%jP&m9h zP9cb9eC(t#Y2$wx8kdZ?DsQLGCMDsDX5W7cd3SX<#Pv2~vyrt=6k8?BHRE9%@; z4UgW&;dqwCEE`aXQIXG2HF3vlsh>=GalIM?Nk<|@jB-)+zy4w)cs-bbS|K-NgX5>to{7IrYQ@>$aYk6&I8SUpiQhz9Yx zrNoLT_%{`~EvkFQq}uWnDvKBB8JChY^G(w#Gh z+`!$vdQU<7xiVLO>cAz?_3HWeqH^8xAE6mZV<4(&p%Ske>dyDUu=y7DJ=|oYzvN=-q(dEp#vru4>ed#U_hewkUhpL>v-!K%UTx!m<;fnJ@U zb_5PSVM+4$t>ejYpS&|MclC%yVUec&gvL(4V42Y48caS>VF%|F?{x;xF|Ms*b0Bg@ zW&V<_AIx{bI7N*g)+m9mrx%D!lXYE0fs=6gPw<)(rrO8F4dyaUOFUen#Q5Af!6%c3 zz%fRs{;Y=h(<0&+jtgzMq#S+&MJpe`BRr#eEY6+|sYiY~f!WWqjBz*<=JVwH^WVx0 z^l=Yl`vm=8FIhN(e2I#GIK8bAe3vnPhLDUXd~af7Ep`7M6DvR7dV6p+XNG23OmPTV znU1IQW5D!iKC;!}TT68NH`|@n=1fHGl&!oZ?~z*&tJ(MBzE^~yaj`NX;3t&R+nP-v z>?9*Q-6s09R%QNjw2SKx?ZQF|bQB;X0<8fU^Me-ED9-c#%^Z==i!Er+q6`v zaOu-=yNBn*iZP06?{*_Z%Chgu5HM9CGI~1ErSCwQJRqH__=(my0EEp-)CacB@9H*) zqSVz?Wm$!ktvS>A7MW`et3Z?BG_%yO6>2w4zXe%YRU;p~L*h8Pf+Ij!@SZV+it=pA zh=}_&pCp5wL@MC(o;5qkDx`+|!ONz;gjea~>+k3^dgSmA!E=2tWh8d}&4NsNtSTIx zjT_98hR4dty>KO#ZJJf=o`-V8RZjW9{|I2Lp9)H5ZEbQxT5UPL|-gY@WMIF%3M_)mO7!>F1-A zBGso4!?)j*N+nh+i-B*(e^?|ekf*tyhT0r@mW*Vin>)u`L?*()Z8wsJnM<-*25WTI zg?$ui^iVf^zzpw1@sJ(t<9Z|0uL0{IQfeWc7RiWmUy9^|Fz}w4zn_BJFeWkN4u^4qB+bVGe|?Z-jkcn&l{#;+ zN27jGobjoLm;>b3lbNJ`@wBJnAZm`V3~@XF+_vRy4nefcPL`fGnzly>X>)P9-BQYF zZI?h>yLz_|ENdo4*G6XdcSCrzMe^#jL?MPM8+8Lh?mLkT|mysFt$jUygg;i0Ac+=5Tn)y%Q0%SKn2PxWN?n#FT2qJ409>J8QU}Of72r^noje9uFVC?|odEo^T>(TvT_-r2+*4?{+pSvos7=8T}j&g^HE3^gi z0tSTLW$UX6b>hy>O}+icC?fKh?}N=R=nEArwk1bCbaB5*y_v3?7sjxQ2uKuRByBc^ zybTU#@PuhD_a#IL<@suZKTYa8eAnt9Pj)L~RZY03r6X0qtItpp*!QWrkk&7&*t*WN4uwvM); zXa}et^HMVZL`>-gm!2d+;2{^EP05W8NbA_Nv;2`N1wg(^O=He50>u|)m44vKZ*s4yF; z>3t?L9~IV9@%UB6hW|wOaf9Cf4h{_=F*IvzR4`G#RXbg6>pa4_x=tDteFwjAJjYox z%Wk3f3|kv@#h^r-4T*6;0B-ZcvLhi8T=8%cJiI0_&{e>kI@k?tW|RxDX${`=xdN9J zB^L~S!r@#17>LXXm~EU?eKQjVL&=jx8-J*#jnNSai8;N+{TIY`dQuN4Tb-IFfMNed zBC%FE3$>nWDoGAI0$@mfBYRB6Vwh4XKv}B~H71EBs-`2r_&4$Xhw82_5S^q{UQRMM zMBO?Tk2n}@g~FX?)^Fh>`U0rccXLYicROf6$a_;DGybW8z3r=zMzR+#l!&;iKafVO zS3ijilJi;Ca_X4aEl7hWQ|DxaVAJ6|24fo_?im?$}Hm>@EqJr-oq>nR7NfH2q|M49pw{>Qt(L$vSw!q-_OaFueN{f|l%h9D~DmQQ^AtM`ZC#QNpNQdYa^q~F|3V-)>r zD@+@|`-F)+Z1V^~$brnHY)ep^5jxThaAUUg!zKTw*WG?z%L2}b)3vjz;jVbZ=TSjd z4hE&foTJ@Win?CY%LQ{PXJ<8RZGRUoSA6U`HHkK+qD8crHBp`#zrse1$qyqpAfLF%j<`kprccncUO0o70pI?+nC+&9}lQ{$7YKW zZ#&kw12t)K#G6JYr9(yXlN=^F+anM;Zx@O3hEL?1ec#B1B&5YyOowDf8|BH8t>WA7 zilLJ6?BAC+iQc$L;=MM9G_?Q}$X9vVL@Z!kF`EMq)$Bg`#~Ch=s==uhaEjOx7cH_(-P9I-)IG>xm~O>cHzGASB7lLD2CZ z@y`6YSE7(FL!L|G(A2njHcp8U`Ik4&`iF9;;(t*8j>Rj@LxQj`25-?jKfvTUHDqnT zT(E2du?(Ac>LUE>RhEnhM3o<`;4jBeF|yUn;Oi8TVXwCmm(P1oe#!!1XMztY<8uB0 zoSF6SnM5=A4Yqqpavuhs$R|CXlv1^RLaYhSfu=}U5?dhLDDx>n88HTxkHshJo|mCm z@tiHbJ_HcUi9It+euPz;oLbuAVt9wsSAY)0^kkr8d{p0okG_~55Gx><+A*i8Xl~`{h5g49O7`BqV_@R72 zGq3DeI!2Rvr5en3-cIi_QpUijWl)y*UbUnzxi=x%shWM;m?^<*W|W52A$1_aqvp%Y z^B}$W@is@P@tGO&>hq7XLnCnjfCRuGC@q|(z>b%%v%g++$@A>)X`9Vw;MwQ$6F|Uc zfJl{0PW@@g!)jspyjpYIa}{UVE!XA@LcTUD93QD%MxY(lQ1g5U-sB1SNb$tR$ z&c#hSr|aXd<*4-{fyOcp5KTA&PLADgfJ1T>5b}<5^VWXfmdlxVp>bK=SoWk|RzKTu z`V7m#l`KD(XLJrH`IL_rsY-G<5}aV>7`dV@gJXOS#yl)I^m)=sNx!f=CwQE~AHvA% zEmOePOgGxaGcAMW9n3sStWE+&-M!C~M}M(vf69o#zgt#?EBaaRp#R(x1H?LWyr}DZ zkPgQUZ7#7jM9wij6_W{&?JM;R#6-R*iRG^l5u8s!2fsErlsTMQQr0tYB0&%mn9zVE zDZKDO;I{M@E>8*rx&3l-F&@;=jF$fc1G)Epezm#fqo}48QvNLdNL$V6h-V4AlW&;m z$jx9u`tty5L1*big!HN=^lGC|Ck_`P#CkhfvD!+H$$FQT@FewDrPdRT7$zPAY+~L( zW|Gs?r#K#zJ3jz`=ypU3%j*!5Ok(yCjU>NG>zK@{;7On`Ez?c!e()%K5*{_Lsz=qc z-v8gvJ)|5{5lkV{o)vw8i&dY9&%VxH=o&Y4|CtdbWb}{XX{JGuqYVZ27%r>9P28B& ztzJ#kwd;Ev2*OAfvPvIDH5KtL08l9eY~V!N#ahjUa4Mr?5~(1!mdK`4SE05Wf;G!c zRJR_eoAb<5o^+zA@h$t~?(qqn*^WyTEPfzB{`Bes=g|~H#Q9g+D%k6?u;YJe<)^VHJ$}nzv+VzHXzw=|75J(-xs&szd^cMsh0fYyhqwUWK>hL|is z*odP?wyc@fjBf{3cwC#Kic^)|tDZ+n&f66lXj`k6iJ^OvI!v9!YSdGLlM-1fl|X;b z#1Vl@x6v5*3v;CvUc8>P$M4;)x|M2!E@VBJA`ygOJy4x&8PK3UHeK!vlV<@}>LT^K z_kLWp=3h7~xxf(YidF9iAB!pW-XO_+ug!ywmuO^S{1INb3HB=r>tQ^)>4%-Z+OH{u zj)`ag$jPWSJzr5?NT$RdG(o z!eO}qG2e6%7q2IYL1<#F(6lP1ZmgeQNBJ?TuplidDGNF37VL;{q}1rqq?O3@_!Vp_ zXiGO#t_Y=Pjg~ANK@c2r5fuF-sh*}iK|(Y1u=X}Z5$fW$2Qf}40&i7irWb+Zk$G-A z3Dl-uMwUcI@~CnN=a^kzRCbBlM+Zzc)@@RhGiHbUz#qG0{C6a}Jvv$!;D zwM1$bXFI2xOb4TX(63Vv21T6J6#vak-n9+%HK#bD+z!8A+eC{@eA}@<$N8bure%^# z1n@@gcwmPi=>ck~mo{hBj{G|KzQLKBuv%-(lYl~WJL7#LFPUti1hGE9GM54lKtV$W z2%@Gb^gOGh>0@=EnMOpNySNqPC{#H~n#<&u=JnBq~{fwzhEM5eKyCGgyiFPLxcL9PH zET*K-7T!K*oNb_-OrYTm)=jUFAQ%_=d<}wD{TYYvzCuAWf+ryBOtEeYvRp4I_)`5t zFugya!;5-5->v99q?gqA_x7EimRsB75kR$jhht~0K&B3~m;$Zx>Zm~$JdBxFenP=D zmGuv#=|pVxl44}Kw30VqfBg4=(8^iyegiTzwDYQ7uVtTBM%&u!PpWg#2M({%_#a~# zKhT*O5Plx>oj`9~kBnFvv=#Lk!JLGmV~6j2KC#9@t52}{u?4admeN1M2eC5|fK1~j zR}$ag3sn+(T^m-gZz$cK+_e7eJFuztK?7J+_e~^=%3PhRlQB8b?wF^tJQ}yPa9^Hn zdxhihJ`(=`#yHj^}i<)5mDAZ%5F>|CD z4xTbcFFH*7G*^8GAf*r&UgLf=NX3rUC~d3ai#R-# zN7CUdzCV@=A<=h)^ z*K&!g+^ptvmsax{2c$@Oh+Gf&Fk0y7O=VHMJG*9*-IuQ)>_pgnull=I9c-rK^*f8l-nE}Z z6r^kaFN&nyqB3hOrXTOWh5}78sz6G0!^3?W{j=?Taqu~Y6ZmLO3Wt!a; zOt9ksr*V5><0oAMBI04bw?hZU4kXj~|DXbvpEdywX$fU&xC_VvNKqB!DM3AvDX#Ep zMR)7cs?Y%l`mH^iZLGheRaz!6S;0vzwj=G!zI#5^xkoQTq##ifh^_CJEqeAjgN>)3qgnWpbp*=a|B-h{J)Kd#8(Si1MUcxORER{lKfeJ68{w0T{jU<4l9IkE5aC#8DY;&WR=BU<@UbN=ZW< zvTRsCm|_ywt;Y{EOU!I~SNEq4{ZCs#(oC!irl64X?S3yXE)#DLP8H#U z^sIWO>Ks+&jWD%X;g&^7)NU-zm4@Sg$G(^5I|peiX@|v|S*U-+i@%w+VS@#$@#qe` zGBr5k1*Gs64MthXdV){6kB_5Ctn7`8CoUP(P;KJ1tE{%)(j7lfZ4QToR~MrJ_BG1z zRR1;{fS_U4nG`=s{w;;a8N-guJ|G*>lunTRoCr2e0%8k_8YTCg1jGgu4NA@{&W{}^ z9F)9MTmTzT#4kZ7KRXH_w_CyXRnCTJ`EqXnr{C7W?PMginxm$Vx7k)}l+fe`YbpZZJst@VDkJPkLk{>mjN z$nNer%SNL#LDA54ts?{<1wt&NmeqGfJ-dK*<)b-$($!o>zg(@_uvII);3Kehe>1Gs~o=(putc5@$_L8rQlq zr5LywM$ONh{Qm$>K(W7k#KI*k)zMp@6eh%WOPl%)^C6w(e(;RxbslFVi~c@^U(4_S z24Ve&9#yfSM4K^Y0*tszU^H^B3-ugbW6mEw;8Je43^RD7nt^ufKik(F)Bai$4*DkUnvgxF*+JCdO6l78^j7`PAA6 z|8`(LTFG*UDW2TWN~A7zR0e7*czev}QXM z_TTF8@jNi&Ei7z~_R7kU>G=icWT47^iCn*SsO3g!3>z-s z44DCe>ZJYIDffl2nBHQus8FfmKIec2Fc|JsXskqg~8g@x3 zwFgy>kly{%i8v3%P3W{DGsRfuMCHXAA415w8*@!!koMNKDJ{?AX#tT#knj#|c(){* z>;-mQJ4FqYB!>CMPUv7yw^r#Ib1whno{xXrEOM=mN|BM`8pTwzpo(TdWJ89HBAKR>Dl%)%PlvUwCJOTY6t|;>|vzlLbsqJ zy!nVH8S?jg=r9?;qhb!7){nLemm6pz__W-Ef?+56DGTdTwLhyKB91;_r=F^C zfx0hW11Oy(EPKy&@0y0*eVvL#Vm!u$`x`bqCSe)DxEMGZ4AOcDO=W}|vxJrzCnOcr z@|FVA|0G(+kRU>$!oyAVd0J5x!4)aQ>{%!XaL=@L|3gvVRQUCw|v}odX zX@D{j1UV{gLx8;W^%pDwSftY^G~vfiram(3gyA)NiWPc|p%%NI^^>WBK@2GkztUqf zpOb`y*#F=PJi(Ptx^*FS&9rc2l%l;)>R>(6bq|{^$WixTeG?LvwdC%Pi`cXq>?m8; zv+4R%+v_U)I$>jr17+%FbX+k z9&8q(hHE?#DBn=GbbBEK7QcQ2$l`p%}^@EW$;G$t zE5IIkpDReSkr63t>i^P7%KEfS1r4btqNBGP2U40|ISq}Emd*=XCGG<&IyrjN?b+PB z&ub_Sg-kpq{OK>29`Ow7$%^(juf=XFA%f;J7k;;rhoZj&(U}w|K#=q#n>AykH@YIt z6`fJ3QYdF+=SFR2Nkc`MXx^&L6YC&savdmDy!h1VtX!VPWSkuLlgdKMPy}@&NlnRg z&BVU0uP==8I6vsV&R@HVeReRSOEXEA=;%TO{~@ZaXYR_;4hdF$@3^FGw-iYNvq89U z57wr2l*5b1!sH$9%aA@Yw?(&HQ1e&uE(rJF9&D|#vm(7~3>N;VG1`GM7&uI}_(qJ} zgeWS9anDaSrdXA`!Izl5tF0HFi z1S@I4h=a9>6q=`a5xmJ`xR)Rlcy!Td@kuR05i0Pdl6wHVEs-(Nys^A&^nZiBnq^?L z^zT&dXli)tENj+56Uia9_8!joe*hkhW-Znd>+VEYsG9Ok62l#17=eYeY|N!uKLYR~ zyRI&2VeVw-AsVEsef&e^t&xeGUf-TT^dC#Xp2JbF2WY0Uja?!!5&6=|Rb-ip9^M2* zri+vz+Dpiua<~+tT(n>Jlm(jG&GAnk4OGN9%#QL$P_Wi>8-56nv-nUIW(4Q-w*xn2 z#{)YmWlmbk5@AKh8%iS8ce^?d0A(k&KYiL)QV71=$tIM5Y$Youms> z4}vDPJyBH}5!DE|Pd=TtCWyJEXF9)s{|&Mh{HK-=S=|}DQeN^E;zKu;VE);a0V~9i zLAFyka6>pVRXr`;#>OFJ*c-#NWbE}&&xC+ANCNgRQIQR57(H}vuY$SoQb}{fSGR#3 z-vm|G4l~N%bBRwXE!GPWg{>XhjqL%TIOzAK!Q;yZl`4v$N>5EWUlqVZNjvx6jltla}7*+XCYWCgX-|=+)QK>w>@#R72jqnH-YA2vwsiY&h5&7FP+pu%qt`)J<1LeQRcZ_M%1V04F(f1HaU0%QSJ#vIknLNS1 z{GMZVwFn~amKt<}0NkQD1=ud({A4!W-jEkiaVuon@uRJA5=18^7PEeW-3!b^rQlpb zT+EP8yWFS~WiSQp-^r8-K4DGF!Z;G8WX z6^&QAXBP!%W7NrCOYxUY=@$qUI-#p@tayepwsEGplb^qPt5LF&Sv%hPvATu%BT`9i ziXT)K+Zc=!fR!5A=7PZ8L>CAspsMPcu4?5aCmL-UMfsVwTvQZ-@k0^NU82l8m+u=2@gmFOX36cnOzTNGlyh-z|n& zX%z!LM7Z@hC=(JTFC@BS0hZq5b7D>80zkJeImE=0BAbl(4hg|bV1@%oL^YPv)j zN6+2rYzw>(c;eMNbef;vz>o(`2mYVZ9e=D+8Rc#3*CqZ9)Y&TN9K3bg;2DPiZ{&-Z z(fN>aO%Kj>F<7Y1Kyy*421pclRw!9~H4`xX+>60~S$YF}C zJFYNxxGzUuwQYBZk)E{nO<>D*I!7WvQe>)_p+B__(KitP^E;2Y#lF-)LR#Pwcdd%E zWpu}nv!2CUNwQDkW)ZkU(gTU+_t$TtbPOhnYu7liyt(8n*`d-&c)QxtOi~2>I};xY zEA<->G51WY)w&5j16%D0STQO5DE^I3=_LBZGQ6!rZNCydHV#{PvQNQ1ysh|q#=&kS zcthj`8p#vv^MJ-=ydD`j>YabE`e2XzTmr;j5oMe-R%$6f+L-O|dAN<66cb`(gK^9N z00033)&L2>`ns$4=iOq`uLbJlI4w;p7y$p)meBGjs~fXAY_?>Prxq<%;^o6lMR3&0)H8akeRn}JHfxVAr5keBewsMFRz+D zpc>X;Z3 zy&&cQ<>a>{#$kxLs|Q;CLjBZWfhbvugmxv4?Ee&7UAB#K`@Uf)wSe6$f69*W)Qad| zyxs_wmmHdUAi$#%yy@uVe9)d+Fo2zYV~;k15MWzI8vP2dC6LIa&z6wLOAr8}33?aJ zfg3$FRZb_yIq)#-a#~V`=XvY{(ZSi?WuMkxN*VYK9OZ8(5g=8NUaAGEFLcXANnByS zON351hZ2H!Cig@O86Pc1qhXGNKS}&E-VQN-mlDeIEGK+sY$m|%?245A*VT);Dy%5J zk#4wR6h!DHPivyD3T!QDP)wtM9a#~_u7~!^ia^pfbz+i8$B}j@H zPkKU=bH`767Xm(%i|HGNQlTB%mdqv9HwBZ`_#JB0F`p#qZL&4ya>f8EzPkAWOmGj6 z(ehzTY_kzNGtP?H1S_QwUU+kaC0;RSn=WR=2$Yjb1?7?FC z{(M^}lcg77u~sjoK|^1!k6d~grRY}gs6nGfaV}er1J`xeF*eTu$b%?!)YLs+JK!ot z)>c3lF|tpXd)~$PQyyLc7OvcQPeWJkA7>9#l(%v2H)%R+E3HKQXoQP<)==NjV-=sW;{LA1t^D_jx zJ$#}y`zgX7M=&gCk8bie+1zkTyWiUpOPEpMsjwF#Xx;zZJ4MYDZyd!3vzL1yZ2lpd zSu=!IQrr^SS@D#sOE6>lhC3Mz`quz7dEW$jW~i3|{|vQ8n1;_j@bjKr9{|r8_ukn| zlGhrpR$@lkX2U$GSaKS80o)_cY756)j*sox3+ZkWccbWa_rm9Nn6E_#BBe9;zGdAy zIS_`M4eJnWYJ{|4(Z5R3YU2JF5A7-v6xV!7Uh>6rxaNCu9dF9o_F|uuRNg*vEu|~~ zmpsqd%x_vRNZ0rD_tC^ubepp~fG2=ndc#;j{GFR3)Qpo?uwg^qp1&7jI}*J=MgWY? z{k;|eEceh1y?jEpr1d-{@Dlj}@uXs@ixM!Tn~SFQ4_rIrBb&@%0005R0cP|H%#a$v zC)Ge9yOeY8AK&j4%{2<;EqlDa5XW-;BJlJDUW({U4V?KfG$`5UKNpr?f?1#pL{6m0 z5I~-JZIs;G_gQ<3Q1!^D{Lmg;K!=3`uH$h{+bL`Dheb123J+y>)snDn4>ZPG$(*XB zxaQFX>&jOz=$Decu4}TSRXL^`;#mhp#Tay*T$@Q&cePzz53J?55_=mXHl;t`xPEzN z1Tj7!#WFHXeBDaxVE+o2F)S~A{>L(ir*POkhar$u*>+ha!0-(Xa!RRoSl**tqHMV% zfXDi$uba7wL1O{@SOVZz9qF+=~7Ojy<9^?u)-|X$6PPgm4I}2+yezAs*ilu!Mzq! z8MF1H>0;t+b->65j0x@VJmp`K)tR=4J- zv9$yCLN2s`Mz@@8dNIpL)o?@ZRUo#`iC4aM+#oy)SHtV=!{Mylml^0;f=%& zf@wdqo2hQZBD&)78I}W;XJJVB?K4qv-QHV^da++9#w#GPEuGh!E?}3?!jxA$w!X|L zicPq@=_vpACDt$hrxH&E2ESM6QOZe;DgX&l^v%*CH13XvN&JGfP@Sfai7fd>!@-Y9 z(<%x1l!CXf&TKJ&AkHJ@C0tqVqeA(ig_awpGaFD82wexWx!5+oz|hsR&!tSPW^z`U zFXxN7GH=DE(l-s_OJocmHZGq)>4=F0I#S&VI)fi{T~pQ$we)R3)m}shkVK_$2X4iC z&>i+u{xEy$Y9w3~d)`-wXT(FvlN0mT0S!CVl)=AxMm96Inu76Spf1{EQ_=+1i{CPI z{?(7XwRm~DteE=1tekDcx#z6s-$Fa`w&3I1&XGX%OS_|y1gj?14hS`-Z>qofi5WGX8aN4p@&fqV>->VnJ|H#Vlm!9 zg5;|)d>!S?*O^magH`4losFOf)XjT#jK3gWG(p@jK7Yh}xN=Ho>_YmzP|a_2Vp|-7 z>JVfjRQ=z?E%01Adu_9ir6iW>Mx5j4(26eOM}mv?gB>^JghFu-C5 zo2_7AL5M)|{wsBexmqZc*isVfc~O}9%kjVy3yNav(G`zN<5!gNuPWE?^G;R9#2Kp& zfxGYE3p1}&>`nKJQwuK26L?7+Z3Bk%CHFYt=>b!496+V(esb)NS5#>(Z(*_{qcc?w zPFRN~YTQBpy1S!D55|J`cUGBNzS(h@tRWj*_<_aoHo75A=ZhLxNgR8%HfXSk!~9T7 z35rP$`-P7;e9z%_sQwqVHElW>?hP{!wzop%PwQ>VVT}D`RzQ&u!i}?$|O}f4FS8=~=lqQ!VluCg#6RZ&bs}k>Zab#|u#m89Kez!VVIH?5J z_J_}Xjit3Y$gNdM7ngx`Y*dMpEGxyQf!j;ODTc*uh`GV1*}1LxGMhYZUR$cH zfb|}Qqz9<9Ve@UdH1esMZO%=}j}_+0^26|9;l!F%gQc)LioPdfO!@?wM^M@Oumg`i z<=igWmdxkJv4h@-_*EoX^=r2t7pn*ujC*>7IP(6ip4OHfM@I*tJ-4Au@aOpgxGsQ$$z zAGM>vUe4?|3FfluBBjY^<%}g}Y(mQ3u*_@hb>vbAhCuE^o&8f{x?u;my46gK|K-on zRm8=o`YZE)MpBnncxo7vFJ$pf*lT&%nDf6GtO0XcBJyKWB%3syxD2QfjO+3Y)}B_s zs99uhge!#(sr1N;%~Jn2LT%5Sy~<9iAC+JzCjMds?h-vCS2?cPEEWmJ&uwLc#QJOg z!w8y zd4~@V7UVBQZ@{1mlAlcel1=g41%`Gn<)=-OW$uh*F4_8lOpPhQKR&JYX7X5uaRWs1qviyfM4@EE;;)FSRx0DH{*%03D^=uE1{C^fm_!#ZC{p{(@uK;JQ-eh4cMXbeKa&@zD91p#ar|nK9=d1|_GR|K- z{wkI*5f@3#<25DyH^mkf%>H14?RUe1)f#Qg?%UPh4L}beQN~h(g%xACo_zmjtwOic z7O;Q#Awmaxal6yWQlZJWF!umkt70nLh324s0003SCBz?4SU%fxE2*Vp@rxD;lgX$2 z0vJ==1a%zNhH|7B9(IMhxCC3FA4QzRVudpc$ zubO79U?Nd=)g&Vf2OauMIQ5sXs*k6#BwDYrY^GWj1{q>2W=|iajAQ=EGt{5=mc6im zC}lP)mejN=cP745B%*>^T{ZkD5#)f#WSE5K ztYMR{)-(g0rY5IINItw`_i&YP(P&|M@+yOWEjOXy586*2m2W}i<&>60Tz(gwj2~gJ zfRYTEV~Pl~UU~5<{X}QH5YhmmzYj!k>+jjI^pvJw5&mw#%mtJy1|+e?^)!av&8(};Z68Z&^z^F4Y_#4)AIG^s#npV}!9u-@4S(pF$y1(+h8av_O+D-p)Kuf} zX4F9^#1(mVGvm_rq1zs}Ke72WHAI86*lJS$*|WyR4_iA3n~m})1|DJ}tFrm&9Iqq) zU8r*Ya886&5+_e8ji^lXM+7ULBdgbPQbJ#OXC@ybr)o}e#o!yNX9(&)8a8VG<&HNi zsi2~RWVx@&ZF94#N-S`_A4Dp)EXe5ad2^B8Pi&l$O0SV^QzR7+jMMQVT<*Pk5OxVu zNvZIy?M*jrt`+tQ0&F^ynpMX1O#!Ssb`GT5D+s7{#O_!mlVX+S_k%Ck;?Xu)Bo*1U zlA+v8;(VPP#LrpViO`5I2>1qg#kM^fK2p6kw%;eMJ>!h(2kyoq0-o5)s|EOAK*cUr zJ7aoNEqe~a+<5BD`;NB!A0U>eQ{oxnllmn@tU5r6mpnpL+bn!n1GA93R=EGq zi%|Qt!{1H~iH9P`_%4N#avGj7%nK=a%UJZNL zCvMS*9Q2U5SG;eK=r2Gbbs7^U>vBXFb>I75CU*Mfy>VaI!DQ~SwfZ8UP9l8~XG4yI zqGY0&+d5tsN69-#?EnO&t6xtf(hHJ}RDnqiFb`%>{+c>Yptgo7i|#xm5tnt@qu8^b zwaTA*Bb|yyre?`mLjCDfwaa(zYt%rrbRxLTarG9EL~W0sgPhP-LUMor0000000Dvv z9#PfjgbRz4nrgO9!`aVferQC#3ya~j>^+*eQPKA=7$De~Hm8*g%LMK-n^+!5pE1MW z!>*lfN@B6&7L|QkYUC&MHi7%?{lAUmP&B#xxZRqVde+!o4T>LtcGkhQ5!kS&p@lsW?BVooQvFGg3YmK+N863`2$ zFfsMT`O@lijQ-qBJDd02l_;ZdNud;vK97^nL+tmc)G2YOwN2d<*v=yV-z)j#7}s-P z2m&f1pxQTLvxOFB#1Lmt`n(^m#$?~S(x1n2IMtAmP1pz;$F=^$p(b0kLmnyrSZ~5} z9W-4|k0x(@Nr-9y#-S~{5lI2e(_*B~TXU93MfaCpr^8EI{GJqIH~fQW;%tbZ#BgjR zzETjbq!gFd6FXnGt7U|ULI*&GY9ugBDQ@*sS?xwi_nzwdtz zBiDl?vLu;(x+(_@;0zg+VQSIukaeQ7e((N9mS~rwd>%May2t{>8WF8q(qyk7&}}C! zFcCAjpXb`%u-a01ydcEZke>uZ;0a{0(|Pb$>Zu)u37h8if2J zxmhsWUW}EK97d<-nS@`8;Ptyc}JK`fH_b9wJB&cZy`EGe77=CAfPUHtPjZ5es+DlzYQOawt|w?XQN z-~a<4$FYmB%D`5x=Tn*<7)v<0Gpuw&F%eSCB;N>qw2R%eC_!VYnj>EY5Tjv@HA?y; zDpayd%1x%vbiiXZOL8i!v&w7mUvvcU0uhAQ!g6PWng^w%2<9c<2;|?ZKxB;@n*1od7~T*D-gW6%^-~ZqdyJ8 zavj@40}`n;Z;Rimx&Ymw<*kdSz5JNL8$NfQ=LG-7zKni znl?k?*lrIP>j%DfgAl$TR8NHSfBG~ZUcf}m1t8Y@U@=H(FiE3ksm{{+`=Km9WluNy zgzlZbvfc~_KKI}5UlhkCrB&g^_()<>Gl2-H^DAK!9N!1^lUa;IsM)2TBuqzq%0aRb zL+b7S5&MqAaU+>3q)t-e^0Q1#jZw`Ur}|_l*5Pzu+$(-jd7OOG*Cjn zH|)#ZwTEiw1D@1smAZ3M=|!GX1Kzw;!)jK29cO?cb;_+^Nn9&Bu9+dNWtFQ?oak zo(2s_ss475(ht4Za^-@kcA?sC@-#oGR@cLf?Dq2!Y0gA^#nQUu6*AF9o8swUeNr37 zbL;P-Q_DXm)UVfqakftstQWd^IE?IVTN!Y(^t;pDcNzf-SMank zHdiJ^Dj73U`>uRqT9S^@KLzl{Zk<7c8dG>52#p#=@gs#wt^qf?1()W5pJXfnDQ02M zUZz;}&wsn?le@g%zfCwRzl*-)ZApRFy*5Y|snBV53cAZj94u1dh<21O8G(8ln&|X2 z#J){$aNHyrP8}1ki;}?`c-l3lRy5~gV2E*z_@RFezn%6b_}`(di+Yb!;i|2>r!WC( zSCt~7X*UY^PbW9inehES`U>Cfqyt5ZF^hzhX=4ZjU((4Ud={_0IyTsVjvw%9!0DbQ zmm0=sSP>0eEs(`>a}p0D?W2>i)z^$a?@`JqLR^&YKLWY~l_Rl|g2$7&W7|gmt@{qd zarZo0<&E~t6dCAs+*~imsJ;?4E?@@>0kWtpttsWZ}1c1V#dW|f4ErB_*LLh{?H9I3nyDFSf zGt5x-jhhH7?i*Id)|Ps?yO_-!uz<$WT4z*3OP+}=&amQ%0{HEckxxg^<5Dp6HBfZa zEfgw}iaM1^cCB}Jjma=ni!pDfY~sYrbkp3akVFuC69MyZ)x)D~8AERrH@B_w zfIf0*dL;l(@w(oqS_kad&k`-|ebm;{6(QQH8dv}T175KSpto!iKdQ=I)W8Xo^~MT@ z*Fv-Ponqj=R(A(#ISmj|6!iEEtwSQncsnzAY+mo;Oxsm2nG}im(P(H7dCK@WN2FO6 zusEPb931F6@~&~-KoqV=mDyT_0!7(#8?Nd&W$U)GGGle8JO*&}4dBOSCh28R%53i$ zRSxu4bwkT@S?i*~yEV){sGPf2UBp_}cJW22BQ=nzv5!csoJIaig74op{z+8&UlQry z57C%O3&(Z+HUdh7U*ug|tYX#OhtlUV0u-;7-lK7XM#yT#OHydzZY@ow8z2K61}6`# z;fVzzK!D>l_1Tq))8F}D>)~Q@o{-Rooqm69d2cEfL8m|dnRVa%?tHb9jqYt7$tc`O zJmx$6#+d}IrMc_|fTmpMp={8dN_J+oo}<$cD`@~(gKm76E|cm;-$h$@x+S(;)!POV z{mVDt^Z55I#~$=Vz)m=2KSLmRrgtmZ59{422REudOhqUF z7wu;Hhj%*nRrolYj^xGn>-4j4!xKR#aVqy@fO75k62@&{9e^w4@(K%(JP+YVUl&?? z1isbhpQ-kopzgA8b|56WC};caIQc1S+aK4_!ZL!>%`*}%-XV*s zW>GE*_>wx-0z!=yoC8y-8Froz+%N(|>e8FrQO zECcM;Q=w2p48Q;YlJZ9V9r^!RCZaSSniXCA&JA(eW&j$mg^(@2Y#d)cv5F008$LD#a^_&4ryoL|#>Ch{I;W=N3O}QqWo`oW z03=1G#1eLV000001`Ia*y}$qf11Pz?+;>ZG8 z1@pGKcJN3!j3SNcBpv&15RjYP)Trv$Sf?sQbR!;YMc&cN8NJ5i$mViMzuJ$b$=bw3W!C#9%aJy&jt{Qkofp-u- zr<@>gRyvEV2RW)(^C2~MKmY*rnL8|tW6FYE2!J77LU{^-UUe>JwQikb-i``~b=NUD zB1uVG(=J>c#aWK5xI2oo9a(U96=piJ;O;8Sbz=+bspWP1mhP|XL;yu=B_mpoZk(FX z5%>4fl!Dd(0HSR~DVfgRlfnv5_6e?{`G7_4q7i!o5ts`wH;3KBw%C z_KmFboKzgrcf9|Asjt4()K4hF&AC141EIJ88zr~XHpF_A)9E-fy|=4g+>^5eh9;NI z+tP$s`%E-6nP=^sE|$fkB6o>4XquiflgnU3V0=HJJ3;td#st6CLY={0vRtGcB0QAE zg}-QGVMjj&Xok8aVQQ07xwPWuzU-3FY=m}AE@VIl#7(NApbCHhz`;ES(LDUWo>N_} zfZM3tm#dF5-gZSm$Nv9!cvqM@3XzK{Q zy!)0tz@j9HU=p{-u0e_1`>p_3Z@^tkwSQ4Mfw5|$p%--w z!oo6(Xobi`c|hCD`D%{&=j%R(^q@VEvraO2;Q;wX0kD+e<1?kU3G^YHO2A9+rpHaK zV?st{-QN=%ZWJ>lZ$Pm`MT20_wv*fH!*>l#Pw@`?&@)<;3z7&4M~bSSc!sqF_O?>Y z8Gke=cq}d@nO_>q(krTrp1vK5#7$a3`Jew&m3C%O$#HlnQ`HCVlo})Zc}Jj`jb^G% zIDi1hmHZz0TwDMvL^#?bHMC9^_hT|502$(`^N$T|LK_etp1>LS-$}Pd2`Y9zmpBVp zv%ZkWa;_u^W+)k^l9{vZovJDTJ4h_yT47?*@ak(uPgx=CZ#tJS$jp;aXa9nH=Kod zUR*N9{!KAO!0o9(g0n3lEvl0Mzfkk~wvtDFOUZT#hFgFda*a5;=vQ?9kh;;?D}5IA zqJRmZ{0ug>)30}gj?)W%1A9f|3@{DoV8dH=U)5RYenXHt-i0^Kdvoa=sH_WAQ64v5b{VykW7+Q?T${Y)`6kK3#4FQ6jZ2@Ev`FZbjt z5=Ho#6pjpbzR%By2R4I|jI)i16!Z;%uCn1ogBZXHlk_zEiJpJrxtgW88Tj6Ov1lOy z+Q$;By`mY^~G{t0MxvS{n@!e}s z!Hi|Y(?gPTcJ(pv3?Kh$t#5TV(^4+C(}_ThqQh4 zE}Js~F~9)`8`?FvtwrCWIG_QQgq7$!%qWsr1696SPFSzWa=>2c za!SVl6q|lQ+yG%`JnWzH6HwRBY~>w9fal$u=J)Wda4%B>N(fc=n(7FzuIa7QwXHH+ zyhfs6FIMry8#Q7M#`eoBH-cHH{MN{i=d*(eaMP64c|PA9c`%t}Ex$vw7XcnJ<8)8DHast%M>=V~YTZu#v;>YbM_J<%}#UMklS-(VQqJmupzV<`txFL5p2*7iY ziFQ&z_eFqi7Mvfx(4iH{NBbMCriITpN8U}aV#DfR?FhJJV|M%_sIU$jYqeEQyG)kC zqkHMqm9xuCWH-DRN%O4kWj9*eZ2w_L`t-0NGNbMFwR7IB0f&Y4crL#%&AZaI&?bQ9 zLNRMyg|06~jrXGq6@tPu0k3xy40EVoD>D^C3*L*)`->AsOBrl+RtmmK;q~t-@ zBNR(A+OBd57kQF4LUjj9edp4uGFZLMRm)VYptX1ba4^!XLX6Jlcs05zb}*4#X;wFO z=bi{(ETM^=I1bW-l0)O70l}B;f6CDYsTev%x6IB)iQSw7khtx#trf5(Q9F0Al`eE) zdt_HZQv4FLF~gpNUd6j;tJ+B##JYfUg&(mM21`vy%hxF2z3T)vai7lg7^aa7o4lX9)D-l;Q!-z=yJGcUlN zTISq0z&gS~r$ffb4TBnzST7F$o$$gj&w9_@0loNm9Lp?G^rqKEQX1bUp5+VODus$4 zxt>xQl^Yl;4^x@yTcj0rmXRueCnYwzWOyi_8(k#8b-$oGgr~T_4;%WowH0sBE=AjBlBC68t=8gI6Ykz99+Pk zF3u8%St1A;MeJreRxUMmCuR&~eIX>V^@ffo$u74@q$128GlF*TeN+xWWpbe)r3zm1 zOoy@lDVyHg&3ug*>9CeP(+oaf>Iw?Q#(|LpjUeWAQ)H$s_dJJg2nIbgCHt0I(xg;- z4=BB*Wi0AH-x?9LFt*kZsaI79O)g@BA3{nL=%KhD$}G+|9Xc^1(%1@Q(p}5XLaJUx z9T3oGH+?9NV%+<%6){CK-mE!k1a7n;AKa2%E(qZvY~eQzcODMNY%vuR+DVh3e~|=E z+ysfSKI)WkB6L3 zcZugzi59HX@Im_<{LTLOEI}U}^D&V`!&)|ET$J-vf1VFKC}#v`zKoXyKe}l^3o*3= zsyP4T!K6NWeoNH@orAOWKR=O-jcB((J6K1S6<+1CSY>#w#3D;{XG~gZ&~Q^NAClET zij4Mvw_CFnfs86@RDe_w*?@+WrYDWvlF9SA-V`UgqH!%&)r2rFz?5E(Ndg(0@13)cT&_B`z8l&JzDP^SbEglsL4&+Om zkVLqyx!cW-T>nawrWSgTvA10L7*dy7c_O<>r%$z?dR~ms)5a+uGgZh+PExn4tXnG8L2XuL1{6$P<{6b|u- z)f^zmCe4MM9(ES0^uHXot(&l$9v^*5a_u9?ZIL7Ag`DX)x=?+DwRT?kn_lY39%cEy z8qE20@EFv4Wbc3lOzlpPefx*V8(zNyb}YMPM&Ys*M3^6K^MNk;W&41(1PswPr-iKEcm`Mj|&f zhZQo$bx+bf9Gf}KH-<;!liQ49dIHfr2uUmpGnJw3e-3~dgFU%zCjbtY>{G{XG#}o8oSPcp0k0F5FM{LN_A1+_$7yK~-Xpr}t&i=)sXVsfVYtrmFnffp-t9 z!+eC(q7wUAbQpj;vd&B@65k@8Z0he7;Vzg~o0$jrCi=Ec(Ez^5O@B-ijk2}}mT8F{ zodArDGUnykFN7;)SOnMFb}xWAIUMP!>Rz%<2vu$TJltJHv#mGn=SqBgO{~B*B$)F!-&oAj%Y-GfWv;4x<+Z+_QSK+ z=42YI5Rlbm=udZG=XA|hUdcTUVE*U@Bp|psV%rvcq~cPqp$_V65q`jGLEg+Tds3vI0SC4@MO-Ey@R-8?c!PsmU6Y6Jlug zB2y(Io(c;0b8)enw2}5sV|{H=p9IQ|TP|9{rv3PRCgc=2b;xj|{j;Fq7l4*;e1mDe zo?4cJ_X1UVOTCEz#>&SN$VR)DX!*k~2sbpp3gXq4-j{Dz(Rh*aGiYA%X3V25ruM2jG>)r{Jxsjzx2|)4@zl*$EO_ zoX2BAcPJjlh%b~L1PTtj)Fp@+e-S%Rh6Rf--|ao&eYU5e6mU2*9N;m zr3g(|w5v$ulRso}@p(_@rH)8v64wrNK zbPKMy!g!iIx2UJ3a0x4*->Hp7z(TP(k|hTkc#Q}IwmpYj7}SCsz#A=I=5hT~4aPr8 z5JD3#G@Oa!!}MJKF#aT$4yvTu!W9{Wq>KY+?bm-V0FQjE>+0N9RNK*!oQ~? z`z3tDoZRDWyT}WTNkjlD)^z%f9}bxvks)wZbR(*%9($$p5-S(~gNfHNY3_v<*U~FF zjd>5krLI@`ZJ|rVs6_EtAvf1=FPGXk<3y!R=x!Xx3S}Q{0jy4rCWDM4Ru!F8ruo26 zT}^tKps2;_SCy7tR$|>&{`0aAeBk)+6G2h#Z>9qpcMZx-yO1+qZxt?hr97?R zTNe*)TR{(*4?IW{>@Fg`(`BL4fxPraLJPXe=TuDXa0l^dN^YeGe{!;-BFXH0k*~`X zFHscL&X_O~;w&LAP7`~BT0^I%a2Zgr326BHzVWG`{q)GfONK57Tzf1rkRuCl5x*d) z!GImV;Ckzz_;P`=+^R*9_G8Ov-TC2?nmrk5nR}Xw6V&9(zg&_!`rnO#n2j4*G!(Y?chUUtKit;fJ0!g z-${-nZMvM1?0hTMs>+rvARt)Cl=1S-`Z8W{=4VB=DYr4IrqV3wfj3jSf2@k)P+7AA zxlzl8sqIpx_X(f0WSg_Ghi;7sY{vLwqO~82vxe+$?}pdh_@Fz78#8;C?2LpH3jrb$ z4Mq2g=n=rAz(eTLA;9)LI_=?Uvz7w;-b>oO6z)PqNHZYzKyD5sJ8v|;>j;o{{Kuy4 z-rkOjD4|W1tmVA>&pN}|qdXzuQT~gd`c2>vL?wl&=tZ;Q$hG8D=&B$L2^)HkN~Xk? zqgTH8Ux`A&{9$D+pp2*wf>W?C4*c&_in=zK$jFL00K$fEJ=Bx<$5eMID~#ZX;T{8K z!l8ww%_O@iCo6eUSE(P%e6b$cLCP2`tQ+W^rOC2 zv%5CxbZ_`?XmRB9oc8vfi63lZC38CO%cY-C#NFe;z$MVvm|oet^aa5+uK|Z>`bHuNL#T~-YI4~$C%CFHK#NV_Vt7k^zP^+yINP_) z%bVJ~4yro(RL``XhjC4rwkG~`-(*i7N>J&xVt1%%?MkwKVrI@M@=GHvx1<&;n{3_9 z9ofc9Nm}e^za6yMudYa%Lnr`GEV!!0E^=bT$+ILD1z?$)EN|y}hTzVRWphs%%KHi` z+d}USzE=El)o@J%8t3)IUbAC3$$diZ6)0phMnp0K--zGy19MI_kx>#Kk7fgzVL#9NrbfC{n9|kq;gd^5fDWXo>2fVa-&6n6PFE617*k$ZBgd9{?$G50cuy) zbX%wxDXn}FCb&mHdUW_=p&Gi=#7Ki2DkLOcvt#*@t|3gduZwMav_7@BG^9UtBFT)w zgF%hq@nP?@34hps5U4i!m0nXDpva=KD7B;9&{cOnrFv}v?-6L3A39BvAuqFCM3nQE-8a)j{Q%q0>L ziE88FDTEmBjvp|n!;^h=;uszoV)Ve^N8=G5a5Q3+7~tCE$mUPi`PsaDiR;y0sz09d z!JCxf=q-TuG_s$OgCF?_k_AAg*;ra6#aHjrATM!d(*ZGZzAFPikYDZ7M)io+yYzMt zZB#`r9`{uM_J}j0)V=#!LBaWA07t{k9_2QgP`Q@O1O40{1So0~Kr9r`#;#VZsN6rU z0;Ep0;2Trsj+Iid|AQ7sqT_!N5@;**%-B_kVxqPR9d$xCTMwkaZz&3hxK}D^5eYi} zW2ZaO9Q@-6oR|9-%r5=g=;`0>$kul6lZ0=w!uUl*wc)CgVDp4FUUjS{yj!S&RFJS< z<#r*Q#75N0?MR+q3wrvzI(L7Ce7*e^^`%@M$WN&z#1on6SCLaakh~Dk<_uJut~>Kg z$2!yzy`gbRbuRb6_`5-w+;<&p;_%vcWr4C;9YQf)FOd ze$o0)9pvYT8#1xwX@5b!K=e%>*XX+vM%CUeOL|ERYd;Kw2;C6^cwg5$FBfk=?$zr zwTZCmdxzi|uYo|vNyRZylI1#m3&3csw*@tE5_Xd9@ltath2C3#JpmP97v8cK`&UQQ zHL_g94$B-6lp%e?jiz!f*!^9f6{;fJhwIu~pOtIwkkxlpRGmHeqtq#tVVMEy>dGSy z($yyPEtL=w+A07dwWB4e>UxkV?tlPAnqh>Kr70{r0$|__7@fm&nDvgFN$-CgAnh{` z>uiWj4Z{@0nMhDJ)R~ERJ9stT;4cA}|HVzUnmybJunYp|R!?Qs zCGW;BEVH~u^_ykVpjMuCbse$u!}u%L&*c4#ALJN_e=(l&F`PEzqHv!?fY_YVb2kyeh3 z;+|-92G@Ux2J^uyBf`{ zu41mSdo+W?&LzS#h*L?G`=YP`D==8|Bl-c)6npFCbtLEOG!vGSX{_~MTsAcMl7l8o zi%<9l3_x|-?*8dn+>QDu*dtj%xHKZQvm~zQ=)Gg*rg`y*7ITQKnEg-0s==k1>nN_c zq=)LQd+XNuP;krx9Ar=ZnTT_O4=A+ieaLnlhV$oQYb20sh&u1pezYZ@^{Ye&-) z5irO1$+j%S9Jm~v5SYW#1DWU03#RP6aB(XS#WTT!M=vS+R8a2w&k!AyR2@bKdA|-y z@;G4$8;be_cSz@r+sEc?z`j}i@j8LlH>QtgkLJ`vuR-hxnhS-NR+m2e3Y-b7 z4O=8OGYawQv*j{#YDewvqugE(Nv=kn!4DuwrSic=`0l5jJIX8)?N%PA(+e(lKEtBN zkr~18oWz&K+A}9X>~*V*&2=X z$k9ppGb1!RsbA7#I-Xas1p0VAZh1;cN-O0;@vw>uvh`*OLx6C!Bxm(w&xiCrVF>NO zGve2ldFycKA2pDu!?9VCSO(cD^0?Ht#&jn(qoioC<{ZQu7n0i2FGU9y0z^uiD>qjO zUOEAB>GD;d`|LVau`pWgdX8oaxWl(Eq}YW&2-h(LVp%{0@|rP`={p6RPZP>HLRT1F zw>-nv5*bGV0zd#V@l%<8UE?;qCy?En@l|MiM93y{(4H8Vu}|K#rU7E>1vW*1I-keAiNr z@G-PhjQg6Ryz7!YsY zm%waN;KXaOa2iB7B92ztT*aiIVN&>=IFRpLCkc4H{z%xHT1*@%Lz?Jik9>y6hH(CA z*YC5~VSb&uxjs`&mS;qYOD#(qjqu4%^86q%C2V9--;Ih@hn?O;z%un}Hs z*_Gh>^4q_%u<8aQQJdhcviNQ6p-BFVqU{9{mM&-K-D!4)_X$8BbZpOk1ZMwzC<$_4 zCM{Jx`1$cLyAMQCF(*@xTrm}Z# zDZ#_S#$hZ5s~LcYg41$&O_TZvmbQWs_W>$qq*zn{4K4gZdlMJTRdDAt>z=zonT%2f zlKjl7vs~sOJ}aA|>=`iNukW$JYz0NEN5380#V@z>q@pTyoL<837h(nJ8`pa) zDI9=#M30gvcC+QV4q=Swme9E)A^<4h)~YsX+R`w}(N{v^+BBZ?fRN^(f))h{{7xyB zFO$6Pnc83yn<-Rk*_dw{cqFM?NH}7J9J%#FSDJ=IHc62)$ZeBTKHpGpf&5K?(;E&T z?=Af|Sin*in~UML3nw|YW4_TAx+*B9%lsP38XZ3Sc0B^`jTY4DbK8PbvEC1hnq#y? z;0`|**`n>OE*B}6P5}qFZSgDZY9KOgVHxvv(uzW?ndqh&OdxM1?u}6PMJu-Q9Q>9E{$#+Jh;5uzHVLD>Y!5{mqyxm{(u} zSdx~J{RmiMN|>(%Y@E>ayM*bq)gQ-Rf*YxhI*(vT)j^YL{2g?h&RmYJ#qD9BCZpE$s^;&(EXN2gpKF)HrB zAq6tL85vTn%f;q4md)PwNRfOSVL;vQK#9Wf_K@=XWrws($EyRA9ot>KL%(er%=En4fDaQVneJV$D@U(o{n7>N=h|6HtR2N;vlUm&X%=tFgG}HQ zM@EH@Jpi&XE`Gp{#>3$0ixLY>AIbY11QkAgx!|J)Km%e7)zK7QVj`A>iXBhAa4>jF zeWd?l)8Wj934++`_x?z@Ja85lKTKB4CAhsi!z6`K@|-Y4Jw*!_uXgFuLnBoX3-|Hw z^UV=lw`z%_PLq+7%NDgLU-~Dz%t5&L+&jv&++Uc)7Ebl21F1>Y)F!}t)iLC#u~N$X z$$E?AC7G-;i3Uo}FI<1yS>jdy(XyJ#iH%$6XF(h!E%Vuj~@MCD1B& z80uq=7*c1Le_$=XW0|ImwYuNnET_DCC|zx=YZj5iCnsmt@@o(rublQ%&v0=dzmH88 zStqft)%doxZ`ivX`7Gnf=FS1kop^(sQXQ*9$D-1eGps7EUf_;0_V@42t>IaFlK}mm zWu=(?VWxsIUH;JKZ7Yx~2k$NvL=E@?+ruCX+7|!{tHsqyL-4a?+JSK}D#N;rX})^W zE~Wl6am0D~6E9V@#YUQFrgVE`utZsT1UEk_6FwTIQW_Kd|P7{?GD&$71?D^$PI1w$npgOd59 zc6p!)yl}_2(vXK=cSPSdG|rklEC5V&y`Uqpg){L(H;DOQlx%Zp$cW+cTG8 zzG;o-IG#{-@f}K-gs8Gwo{0#%^xCINNC_dTqaW&l{2kT?_i7z8WBt;x5U}FZ1Cp&O z|Im*oGK8D!_oTd zI#n;B|8|Jfvav);O?=@NI50KOxqf0OlL8Q&+0owAZX}>11L&`+hUBKV5et9v8lYN_~bnno5tY{TQXXu_&McepgRCO6MJE}(pD+o)I;^2g}_!nFJdk%sG zL*REu9ND(et>TKip!scasX8eVp2RJE!<=@=DpZADIyltkT}TwN-W8z2yKDB|%syE# z(Y_1#Hb7b^^=ts}l+8Wtyo?5^r1inzjRy)7&&mE7yj=@!WL6DVOq%d%;Os_RV&)Az zHbmmD=QK^_^pef61`7x5Vk)19KnDcsSZQRz-#l2fVB%g_{j5|m4r!YG{Y$lwx^^(8r!*w>+M<%L5 z6MHwy%$|Ew%{P8L6o3T&hXGSPt-d}a4v+t)vGN3|vkxEZ_CV6om;V*J!&hCJ`KQ7LA2AxoNKvZbTbc?k{|dBv_;%iA@lZ;?$AbtOI;R z7k9F#b%U({#Yg#?$CF~)QxAH1WY!QG;XP6#aOvI>rT`8Nf8^V2g&-HA{v&+cn&k(c z$ZD4J^RU!2YL zX361dVioi5o)BBWtdekwDhT>04a}h@fKRU!HQKa&7GpdJ3XkU}LJ_*xP6&Ty5HT1U zh?Jas4kPmW-|XNBtswFICz$7^Ju%-7(A+gMk0tSJE~^00nl470MiQ%@m=~!jNRx}d z(i)~MG7ta&00g=xG9BuHPC14u2EHuZ0MpmoNJKeOQXBvWeg?YD1=6C^KM!)~$`J6w zuW3Ev2#BP;v}ynWOYPCV1uhJ;9Icpqr`uH_M$MHp0_2>NEC2uip7+2Bt8T-%cOfdl zy$sDzIXNh5NyVD#CA-{$f$w04u2pe#V!7=gvV z7`jcBcvsI|hAL=4Qp;OAY){6W*)h{AAYEOV1u#WZ;JQOpbEDsc04eu=Iq_Zq zBB@FsLl(mY={;F?vTF2v__NryQQk2LEjWgPzX4t_lok1SYn#7+o-1}LW?^rN7>{b=r)bbsNLKLyYQqwq@Qu);kL zE$`r_owrDnd8B$3d!DBMo}u$jbZCF97^P(a3-%Efg~_=-W=o_hZLuU!B*yNYx`~zU zRf15Xl{?SJE6&wiiHvgLxOj3Qs$27Q5X^f zp>`McjrkrRjOSRPs(ZIEbQ7z>jPGO)rr zU8L*lYTjG-78jip_L!PUVFvgem5^5{+aJi2I}qhrB|5UepwaT}MFD|?3>U!Jc{-f} zmyNGv)4Tt2F02<#n{a+i+Sc2yR4Qsk-6Oj5tV8I1vuv*J&+S`}QmU_8P!VI&|ATJD zU4|t(Ixfs8qpl*4*nH5SxBXv2sdfT+@PtgNLWmQ|L41m3H=oOh51CA5 z+yeA+tmGt+n`&imBa2xX!9nW@cMjl1CrJ4Z76{Sb8=et=eN%2}@>;V{d=zO$?-JGX zd2aA2y+K|L|K26`9_*cAfq}lTT~}4<;Cjt}vAePU&7|i`ojzYV_aL~)vUnR#5-|1b zn|*QQ__p(#9kkn*ziJ<)m#$|HOaw(1YSL_8ubbDkCJP~kXW}L;!sNWvy&=)n>MIDC`dOo`EmW`! zMXF3Z^VgyK7!_Qgp|tQTPd7Dk&ai>d)=C}0IA2L>?s?OS*!>33YNV=A8m}8OR*85n z(oeFnlT6!njk$x)s??IMtGj4!s+v~?G&izUL1B1|5b{kZ~!K%VZLB)i%DV|i#WmgkhEGz{D8;9?UkN*zq7Qxzep9=*G4Ly ze{jNoJU5T?i0uub-r41blTQm` zYR_ZHz2b{eOU08wS0sb{j<&E`L!Ro^f3x|e+P^i5-hR!EHWi1nUPWHad=V8K8 z5tt4DCzQ?ms%8Itzm#DRBaiMB;heB~CSolu@Y0Cc3BL&Ad&EUIW`CZUG)gSYO2a5e zKPj=PXPv%ylsEt|Rw5>nZe`%OvTE3&k=xl&Y3*Pk=O=F*m>TOe8YUWbmjO<6afIZE z&j(bFTXG5V>M%94!6V6XQPkGGR;_V6gO6g+cN_Cr5J2cw?U_@8Wf4+3kk zV09Q$F)SI^{lU-zF!)47HV?6#69ofkY{@aod}hgiAN7t9{58naK#IKWaoy6PJ zyxOaWG`;N}?QWRXy@~1(Ck&fgpDLeO-y-HglzfXqLX5KSH|=ePm|cW9c2U*sE%sto;}@N?Pm4W|(gyh%ZK6CARA zS0y5SnY8eMO=x_sMeWnjw|o2n&q{A_Vk$O1X#@KA-khwX6LMJ~S}(rSDCb_s9x(Zp zhb@;p!?z@j(2>{!M`$T^{WLbliowqG*ywyC<&B~PK143D{B~+XPJ2rCL&3$L(n3yY z-=^-qeAmG@;rajvgMGthjWc_b_kWL+*h9G0Dj1zZ@inSZ+mZsS$r?!<6G~hR$74&$aA}{z;X|*|>*gps`b9bzMauJYjwA>Yq z`(Rm&sQ&YgJ|LGzq9t@7(K0CK;HNlEr>mNTTk&=li7+r0c5?aFz2aoNw7w9^eqAsm z`Z1#SyD})`U-P18-gcT6A4*@P zy)T0vzP;wTtvczq{DC>&U1g{5+0q?}$x?UInTLL!_L>3#EJOvW^oAK(=OeKb@wR>YaDm6$mqP>C2zu}!8yYgS=W-6o%py}z2dd2 zfKkRJo8)#)L{jCLmfPKui4NK#AQ19^5bW%uS>yq|} z6Q9KOH>Ig68oqLqW27xO!6AxX?q1@h)0@2WSg=`_b{r*jali`^w)W|vo4ZxyQXr9E z#=s^s6RYc{JW1JMaT4%2ZOryubL`5;J87G`>GA|>2S1_uZ!ilmu6Jt;(a^Bb>mmCw z5^9yDJh1NkOkuYql{)>|fAx;+*vQ+tkuf@(0FutaGGKtykISGAU_WDW!k zp~RbpeoNaBHY-bU#7uJm=mW*)FH27BLCZ)q@I#pdsCjJ1(XP8dtd*rns}ohUjX6+2 z?T-xh<@(RAhHk>QokjD-Kr@k0qWIZaFkr^Rg@UEjZ@ldR93$!*XIaJjkkcJ^2x+gr z{_9>D6yv^Zr6>xY#U+laW*sL1m$cW9?fwrh)%E^QCc!ZO=)P);=8V386<2f8hgLw) zN2O24NPp+!0e6&?8;;p%{3;I1Oq{MA)Xz9EjsIs5BcQKLq?;pH&m2FWL!n@5j=^Jy zOw`27fc9I8S{3%TJwo_45(+1b!4J$)0y#lZcJEeA0jC2dQ9ZRdrzHr79rV>4Tw5#; z_EKL(&$mZhqP!G6?bq=?1|W{#v^c8r>n2tckF1(Y#I{@mZN&2>Lu#iRyuG&8jXeM8 z+^cvn^^J;h_6GG;219G)>Nj0Tk3-M`CwD4cYJXSfSJO0VCDZ2Tfxsp{i(Tvnh@86!gn&^gFQAn~Gs258L&e+iMU|4|&r2-|Gk=V8kK1s}xc-VmD4J<;EURNNV6F z-OItSWOjs++WqdN&c!fO?v)F;}&G0-^Z@*_#N&Yhyqplijs{-T(jqyn^@E$t!dN za<#G^Lr?$X4!h%E$^@)k_`{p-~C%Ef|Bn(VgvbUm1Rh%RHAJ}N@&;Gj`I zsa0Vm4%K49^5G)ACkS^PY151Ow8YTXsqrE+DJoji*i#HWXS%7j@&nkL;16q(~&p~r8Yd1%gpM9%=H0Ej44v`-x zj2~E9L)M#xM?KNKozb#_JpzNq61tkG393^vhturN^;I+euKANo-Sff-SKwq zrq9(k?CtOS414iBi9f{5WUW+bPBw{3$gY~(8B4Z4)&k`;k5Y}ad3`CIZN)K0kP>5H zJV$bsCbMC?F$KXNs0SG`5YS#a@n(wnp~hIZXpFg?L%SiLJ{z${ZtiHjD=2srZIt@7 zD#=}i_qsSxX8gV8;n$m@-a(JnNy1e!dTtIPMd-u=96E3a?;abyw9oOQZJI#@i?*1 z#DQH7Y$m0RwdHtgmm>OjA-e`jCh&TXJ(cTfM8TCLESjBBOyW;`-mqk*H*^^=q>nt^ zjDY`4z{yBbbtbT521gjaq6Erl3Cj(dqXo8iY01$L{L2ctcU0a=W!|ud)g-rr7qk

    6b2O^U{N}UNRWC_&Anq<%H*uSQDi&B^t3s;`1ba=^WL%gvb<% zJyRUEiIjucoK8KW77)0lvClT?PZ5wq<+IRDAr)2)>0D^ZdN~ z2?;p5<3#lr@04X{#xS<*Ge29aL;5AXb?BR&n*lIToSDogal<0 zRa$qDR>H5*wF{ueVP$Oay)21VMQe}iv31_}f*c&%vp<+-7-0s06dA_o2$ux)%g)f#{VU3{}bFX|gQCR9f-q3s+5hHtUe)GkNa3RZw*5eu3K z$Y-qnJyH9^tY>xMZH$4-^f5q{KKV>EpT<%Gwt_+$VCHh*W;pXJ87UQsc7z6c{rDKA zHH<<}xYbKWpN&xNgJ@M4;>sUoid(5-W?_|@wZz%!<;gonJFmq zwl^TH;^FTWL!t=YSOlOPPlGYG${8f&UvWR{dYXlk$3)dPd%dr+Fu@VuR;iIqhxMcd zU1@k*CpW7(>Y?6t1a4h9J`0VcJxi&48we`%Q__EiXoPPrNQG-`?|ox24MAe2MG#I+ zu3Jd%0?tSTtG_0eZ=b;5RUl;FF|+TOIQ~iAc&uhONtX;kjtVj1oV%c4OF4!Y>^z`r z@F+fss)L_bbg0shb>C?PV3UOMriTn-OGPl;dBldX>mbMXaK!_UjsRJ3(3?lwS!AVT z*cxB92gatBLY^|d`aqa64QjBtWm49kWwuE=qlzBIVP?m@hKX)Ujz%8nl7>;WkL zt-!8O0FuKHt`ozSamF{7)t^Q<#h8X9cE$>oU5HFJF8{PwIW#~r!Yp36HzR4hZ6+WO zPRA&{Ps#ey_zY^gqZlo3QDwt4+9+{j=tt=fG_4FueWK(NztdE=Z~;2`m2+Zi4a8?t zcuPqct7Z0l?C-VK%q0y^bo+-ptf~D*K9YTu_@{Ti21Bm*P_aZu2}?f?_cWxDRe2kQ zK8&}FlNP1_2?3Ar_y%dSbrg$X*$In-9&f+qu45cj2cd?h2{4)}aylq#Jl>B)=%g&M zrMh{x2!I{oQPm8mw&EyC%IhFJ+RvIB%`0%7_mlXz@+a4V=JQ#4CghtrPD{s9^v$^) zqbMCHn!bnH$qN(V&6V2P>S+;nno~JbF@9d7 z2?2C+m6JB^Aa54Waitla!jrssguZQVoKlnEkAFg29JC-7u)SGf!~{ys%1wGL3e+~K zC>YSkcI30*`=(j6%)Ur{!{q+dw!9|vdHJ<2xHPqibQ_G-JhCQRo$zdg{Jf&vvl+|t zSb76XGW_VSDwV%tL5D5w9fo&>Zl-1{?JU#W({$BeyFU>;d&eoL_V1D#JlLlE!>@hn zC3;o_faLB611%bo5Yt^zx;x*sCZweN(rA;NP4l&Q<;lzZcG}JFX1SO*jX?bZfCIW^ z?gohkTeWyfB%*LP{S`|4UC-8oy76VSPH7}tScGwelr)9w6g8e^c9Z%QC>%JXa)0w}fxw=iZgL$?q%CjA+Q> zUj5zF0WeYZiZDVB2i9lYPW*73jjKUqAo~b2k^I^hvkH0{DPjYQ%BIu#3f5=B7-s(C{d2J=OH6FCUE} zWVuvHojd3VVpKk^HQ?eIKr%`yk6ThzZ*$V;IC^fTM>N?Wcv<3W6Y zUa7IccI)FwiGH0%{TeYBk@+@0#|}|TFd2JIACOCd!uhkNTtpOk-xEuOIy#KUf%oXY zcsXhFP)0Ci1_wYOP?`&As10pdM)ol8SpIWIa18u5fcoLFeeggT`})xfV0Eb~TZM&U zZjrPEBiS6=SSpJ+)A}f0?4|yJ%()srJmoft%o~|fL6jFzoNFvpC`|d}{*lw*9$oV`O^2Mk17lgr4qt~vsBr$T-4FGp6VO4kX ziu0HGN}XYqBFg^;VO8##`WFr{Fam*5I`?GyDMVBm9VzbXA?n+Md{^Q3aXWB+D39QA zbB8BcOLu?(Q5VeCS-^M|GmXfWTVd&GYjX|CoNbp*ps)mzT)7`CirQq}MKmdM?ovQO zoo$%t{$PU=6FoDLy;C!EA-<-nXoEQF7C;^jRqiv`>)-|$F>#V^52WeT=JekIsuUj3 zIW~{~e=9j&$~=yVuVsE_Hy(?Mt*%Ez2|@Oz+3`>p3Ht+BVv<$OgHWlZY{J+X&0u=q zdhPRChjTIYvw`@ZLKloXIUUj3Ndj9t)~P17uxfQ!Yp!XuM=uYj@4$zZ$ML|^L<&oP z)xk4N=S~HI98Mhcr<4(j-9`PB$VsUNk@CG3p(jzV{y*7SDlG7~Ru;Wc)louCXJL*g z#O69g6US`9K(+#&i3Nv;yI!k!##tpRX$b(RD3t~m<=83%_`0YJ7``$C%A12#vuQOc zN=s1QPY3^Fv&9rVRfZ5$rMEv7UFK%ct!K!(dD)C6eXDf?*!J+e)^0OM zt=GxVYyAK$+qP$uY!9`F46c5l3R_tFJLG3uOh|Vi+vLBwch~VO4Y6b8$oKMUd&MP; zcVvKBc%GKdQRjmNRhUY}N?E9HmT58FwU)N+AMlBK^C&s8V;$Aj6Whw(e+jF!vW$fF z^1H?iaNA}~`2r(e+v=hlexShN5dl|srEN~=>5fp#l( zevXlC0qTMF-dASJiy+BMcDF%C2pi3>Gi{fs2L7sDz(>z@~u6?alH7(tnk$+^_gy$zcdUKDdda2k#t{ z)3EpvLWuQJCb@aQ2@VwL8RT46%m(CsiGsc8cAwZiEt=X2OT8P13aF{h@zbw9{1f-1 zkufL4Q?9>&=>6`5gKgy3yFN%mP82$+D81{VTU5hG*NuOhuo{(6XPo0^4GxeXt}G}( zU4gnTjnt{!#mdPlV7S1Wua@9s98-72!ri@f;^p1Ri`z^f5D@clj%J+UqLe%cz1}c& zAAALbpPmS8NN#bA)nX6M#lfLczvtlsqPbER&3Efo7?E9oI%DK2L=DgEvoku9;Cv*4 z`XV43m7hn`N{eTq1=l2P&o34O?6AOmH~CIH+jF8tL{IoTdpq; zGKg$e_ewOwg}Y&b4|@4WH&7~7!>51V^6*W*J~@s-v3pRK42|+D$!?2_y($MLBDv%s z(ZFJz&D$@|_xXawd1x$VX!49~!nrqCu3wG6#Ozx?Fi$-Vc5UGpYO>JV^F(?P?M3g3PM|#{)_a;f+0l-aA=^j}`F`VeB#fv$SXhDe+=Vsm=pA1C-~B zpq;0{z~5u8M6gb+4z|n^z-5(_G?|s1VIEVbWZ%?LlWhDrzga!VIx<;gmN$T)Q9cRj z(>%;f>zG2!VYo!w#U&-ze}OGDRt&333B>k=_u6&K1JE4>$oMacBwY9=ar#}+DnY+q zVU&8QbAzv+7lkO=oHw-S3BCxfCPnf=mOWNc()j-Ff~t0crhfi$SLq|nJSjWodWHPE zMd!#T7f~>FJ<%;NeuU#K7q2B9sPJve*aU7Aw;pbqAIr0yZ+oO7fLcq;h>?P1%igy+ z?g=GlUR9X@tH)+lY|98|w^~lXCG;dPx(^zHG|kp~sY%Mr5Jj(3`8VKrOc&xOq!!|ZUZ zOm>L3GtN!tpre97~kTcjVMb5nIDxi@o#spaHGr=od9I-k#W zdGqhr7tP~F)5|{}IX~8@ctA|81OL8e$hJ6VCRn3v<4ipsAa0-c#HXi72v1w)0Jl%l z@xsLGYtQV!(4)aTJ&a~>=E7R#Vl1NL?Q?qKZkWF2aF=UsQ5pH0rvf(%hU)76>j7}B zhcncU(h@yTeRjSl6$d|y7JBZh7*>Si4O79{$=P+o0&Cl!7!D6%qWC~BeDweIz`ovd zpBx0OA))GlrN|P6?MGgdcRR>ucTsx6du!3(YI!LYiEukCWJjV+9j=p#R^m#uTKoJz zMUi5O5=f|x#+1ii5c{)nxovqSZQn_HXd!Kh`Ze;b>9R$i!ye9YQ|!nS~`^= zSUPZ~1%lTr!B^$iegysU<}QhLfA9R{By=afwAjX0v%jqGTe#4EDcPSatB0wb@t|=XC0!FgXGrF9_-;;x0V9T0lBV%GghgLBlcqf@yX?WF^12l zxeuMA9#a$tQV#&YcoJ7!x$O<))*P>f6mh{2Vxio4tKu=}<4&D5G4&CJ)B;m<*bsos zs+{0osEn}jVWLegAF`PR^uob$&z4$iTQ17xXrJpe>j_dp};eP2xsyVPCPonggs!~mfx1MD2X z+c@}hdnV{;6VcM>D=_VTdX@-UxssHhpMeXARPfD2{>RkDY}-9pyKwQjz>E{#odYD9 z(gGfYlwGx{5=#Q@bBH+;-;4@pIk2yqSc`Bv#ZO-z+*J;!1=IZ1F+9?;d0ckCTx8U! zb7LB*p;Np5_d(@sVM7Mt)7`D0#ZvU$5t6!By<^$*meVH(2)fF-`mnCceIFcq<1^-*y#QE8FDOiD| zSHFQ*087Izs1xkpG<>)<*CUf(i2cFV@UO`sYlV+XwnFyqhls5d7J-xQloCY(1^3z7 z#pqBslaxp>g9-m*9Abr5FZO?dbY4{fmAG6=c`c^cot1N@HA_ejxpGH zmP&YaG{biw;}?Z07INRW#te+C(`^pOP&7bOyO79=uUc7(IL02Z=B9sVSqGJgwwij7 zDYNrYP!R&8MlMP@-siAvIOz=l#A{!V2ArTpUQDMyIPe zmmcP=d~IM_B-tD%t# zg5H44e6x%qch<{Z`MW+^RiPU8V=)o~qTy|x6Yj>{mb&5CsOh z5FpabZGqJHKz7IA(M>UZZd`B|>7G5mr>a~ceRPp-ba|=9oV#O9B>4LTCp`%5cI(fr zWefJRMqunINz+GYBl>lLQN$l#5~eJEB}3$tby8=q_tQdkR4CZUCU~aQncbf^Bq!ev zA$BYSCQ=`Ug>H55fNg0Eq`Rz3MokUcbq5!ZU@rl}NYrQo!(juz+yDa7p+bE{-|~o_ zBiw7hx#uP}5)y$EujGunG7czYAHkAv^4mUd}z}m)XpK)uTYjevH*b$x*Pcs!+EGyaX)I zI>3Y9dcaxnFU@KN4s9X05Y##Ns^IW44q5ki%vz|cyhSRZi=5l~#jRsd&P2O=r&v_Z~l{J3U>ccPb3MW~Y8} zkHCj>EFKG|jBk=@-q>^A#GUcSdN!js!gL}GvL}dc(NvwdZ($s;Qz4cug&*(^#A}E0NCgSR4HH;9QRV| zJVUIuwT*vJ6ubPsP4#IdHbO4xov{w-oT)}2{dr|Bue52nEcn6o^@K?!PZNGa4U&zS zd7H3GWt^`+pE|!`|JdsLdE&go3B^DiV7>^z^+6s6_mpBk+-naHU048Hz^us2|4mDu zr_*C`si#^vcVoH)$W=!ql%ui8m50mYPklk`4~yCU@JtW^G>23( zp~wJ5j`?2DL$@|+yC*=%hOYog*ayo@$Cm2WL6d0c4^aVm1p^sN#C&|Y9%KGzlqBi5 zenBLj2EVEveyBx3QaBkY_$=Uiq<)Sxr@cDs+&z<1zt)c(pw=pF7tsKrf&))bY$I_UGCP%x zp#f$zwc{21DFCSBMPkb}h*7%DoFMd$zDsjIz~^1PVO3D)WZ8&0Ab_aAlI^k@SNv*I z$NFgvNdcR+X}rT=DEfj#b(;brX*p;+3`^hm)eYirh-?q!vaJ%Az|= zgvUcrgf|LfmVP5qNMsvATVIEe?V6!gWZf5Z-}@5$Yn|}inn&IXx&EPMQPUTP9cW|- zO1pfY^#Y1C)$ZC7!zzqu6B77yJ^B^&yZhauXN}ZgyZErjZyO;rRKbZYzD?Mfqt!Eg z(-%2PnAg>~aP$>%GqWwqS#|`opY1cEnEnCiP2kV1@H(CN)|-yERA^OaNl5RyPoSw2 z((u)6Rb}89eL%p0WSF?#R?dv6fp4Oo$%hz{u}YE7?M!^QNUquMUNNkm(U%d_`3gjy zhYD{XYw70CyTy(bPXI z!SxIDO}facyV3a|X#P~=)7Wr$#Wx!2YarRD6P)=lSjdi}27h}8aas9mR!JpV6L%rh|T7Fehry)ztnxYt@X?b7W3;9n`n%2Fd zdsw1IAu>ZnzyMefatEZbQO9oRwTN9(fX}<|44gQ#N+u-&l(H$FmVN)oc67hQ;A@Wm zGaDy7M`tcN<<6q2nzIG(h~=jdyq!H=2$}A{T25YtLGLu41MT)wnqkm_2Sb%D(j0no zTHfdProV&pD}Ga?O|sR0wHWSk{3f{Ar!d%#pue;7)Hz+c$ zTye%hvFP;(9mAd)zHe8VPWd*jS;c)PI0z&0Ox(qbF_~7?|0*$4Bb{(O=34 zQ&g|a*#!DQHL%t3C!#wnKewo+3$>Gf&)Xv&zSkaz>RuT?%M`luEUDxcNTwG7p{5u= zCU{L{1k*aXxhA%zZLu`tE~7d(w<5=Q5Dr#uArlpP;6}&ieX0x*M^S}S#TAXO z1`PMxm%q^N4>QCC+k&+|iX<=SvF|y&p_L&FhlH<*vdoIpq5`z|fKtA%6jcSwd+I)6 z-NK7MlI_%KEd9V|x4huI9?)S>NW9*#bI_ z&!a+oJn*4GBu`;KpL{2q2f!WRp1Gujj$DRN-4rz(Mg*y2y@~OLVh&Uy(39Q zpdclv65tlV0b^mS60na7Bf6qk#ZD7e0QUV5MGyP%dm-Wu^rG(E?~2e9%17djgu2VwxkP^l=kE~9_whkm}7)V2* zv!UlYttly#zE>xS-l?zn7@f|*eX%5^F$ZG>K8;V$cS){oCjmgq;A5#y{T}-dYXklI z`FSlektKt!!#dhtLVAJ%0>K^=qSd;b*o;YGo|WuzF|t-uGRx-u-#j+XP!OD;00000 zQn%sNr|j|Ml6Uy^!_`J&a0uN{MjC;k6_m8trFY-I1`m(CfH>$CQ+kPwA6$L_L3Q@_ z+{q9wv&p5QN<5#Q?O&PIB9PzAdUy<}Gsory!6bwVz~Nnd`e_{9IJ%O(Cc4eg%sES8 zNGH>(!sxxDLSxuZC8EhqHRR;73AgH1ZZfp6FK8aX_E#-wd5FRiBq`hE1o+>4?f9w$ zlFXeMzhzbOzM(xHO+`os4hb{By`tkmlhX6-&4Vo24S$MWT*}c}fQ&j`{Gsrqty-`g zfp@V^FksUs)C-3s0MtOY7C`2x1to^}2h6d%u(;BNwC-lzyzbNAkAez_wmq)Oc7`6~ zMO$V9bm!yDN;q)9qu`knkoO4%s#hD&!PO(+PW($gZyne=11Pz?*cD<1yS^i{>X)p0 zqa}tXH3u~xwoa3!mho1OIl|z^3m&D=cko2nUG8DV4kOT2%(~6JOtU|GS{IQCq_H{) z?1+{fH&uyTc(tBeu7V?B+ZL!dZBcp7>(xKBQprX|jtimH*Lb_z@26q}^z~Y{M+ao> z%}*S+Y`OLfT_Y^Op@3=)M84kouhouKg^X!voXCIQzfbC#0GV9=KmO4XoLw}R|HnlQ zb@i)R{l9naTD+hnGVYgkpAv@{dvxe(qeLU>mMeVn(zwh}_PY;$c%-8;&@&u#oluOf ztdR<(dcEZvfyZoffyyOlqh1>4;~zAWE4u=0okO=_P(i$1(nd6mJ7~O8Lc>8xciJ?-pP?|iJah} z;OOz7!%JvBrq|zwkixjFN#9?f<53SVO9*^*s^?NAqypTjAXx8zBrh!HJla~udW0;gZ(8sO<$2%zH&btxRrKfAC&ZBo3LsuDxt=d6P^ z+l;sla+l+n{O&f7o90<74DGk6&R=HHlPNbT{Q~(2%X8TTO6sRfq0T9*+#%0ItRUFv z@FnH53pw|IA2nTPNjgZQn3&tiffG-61EwJQavuvXQ80lcRlBO5s$OF>VV9fhKvX3x zm{97k@j|-XFbufhEQ)<2Hf}GGC^rdp8tIN*i%N)i}@DVOT~SUuV!*cO_fxk)8hZ0P^HVjKh%UM8+hwJS>%=H@UO8plp&P2e}wcBkLb6LcjNnZL4 z9!u8qSw*oy-xbA`mE=mRUj0Jjf0#{3NFhFzVOO8tBko_DlRiy`1xr`cSTt_eDvR_q zSFmZl=yF)#vz{_jMQ<{dRby`;Oa?{eGCqr+d%zeHA%omGuZ^WwhtTS2;z4|eQ-$m2 z9!50|O^pz=W@yFqK24x=&b4zPpA@4bgZY0?=W$>zTL3zeAV5)6kYmbRv`3Jkl%4>q zQ`uIvp4Jbxfuy7}PG!4GCE7jEj<`r2p&$&gYu~mVS{C+Sl+eXn5Rff~xq30_VSWTA zVaj6j(b>%r4woWA)^YH{!LU+*000h6o66GNqmXUn>2f%c_atq0XVyuSY&qX zp#Ds=yCTFyb5M?5r(ti@n@Ev-dZ{GA;o_?R#pYP>AVE)UE3OKTm%VEgFBWc|6g?pI z5?x-c911=JX+?rt@$=Jqwm97MMm39SQSbiZ9YsgHffNGGvf>WPd!FgdzlCqnE>Ap* zqZ<5jqAl3t66}%=p)NwNuN?HVk(_HIQ)+M)N9f2;$if+ZB=14nY}=q&fC)KbY^e1Q z^Ecf|xHoVLnAUS4I)3dn-fmJ7s6REHiS84WYi;C8N^QBRX*{yvR@ zmUQo~HedXUtqHa&R1s-kngby9_Bip_YoTy9)MUOI8RvbQH`U0n z9BL>yEsBKWiI$V$G;O0!>aTOJx((D8oTJpBIwUh<59|!VJ}Ak&9Sy4%t-OPf4mKm4 zEEn2^oR6WY2xJv*u*>eUuu$eau-P$scgQBV0AE8@n`N@Pg*GU2)rnv=aEp;< zA1tjFsLRE&U%pFb8%`ouxF3RiF5?Tf8G548xQ0#!17D2o=9gJF82m3 zvOP)r!838Kq#7P!x}psL|!e^Np7W zmJNn5<^wGP%?a*FJqxyKY*3vNOkkfKsLAl>sq?@K8z(z$YIua4CtoF(Bxt7V^jD{m0;k@tw zJ$TI!KQ5%eVuwn4A+xXra%AyFbx;hS1tX3000000000000000C~E7R zhmnljdNC?zUXm}eE+YNuSPz4wt~!1q4wunmRd`&(1hcg5NWX#;Og4TWld$@mXemq4 zQ_RD9k`=Z}tew9;Cu&Kh^VE0HzD-rRzz&oEzQ+}04sotfvJ6{uxx*mBI z?q0?#hOy2lJo^8-lsNt!;`?MTapw3k#kF8SQ@)jwa@bWHh2Fa(SACI<6=!J7E6$r| zUedYJ8_btBS!frzbwFxI;g0cyo$}V|Q0-yCbT^c4WM{5pw&q%B9#sfIijELhpBgih zoiqE_>K`B^eRz8aF`Mw?(Wu)XxHiEib~+wy+NAZs_<;(DskKJBERK zTXf26jLM7hZtQc6Nt_q;>SRgw20m@yk}8F&9z@9MgB}#VIGY_p6s^|8s@@ziWQh$S zN@Xuj6vHh4Wv_$crMc}DZUC-dbcN&-m|Ejj*2Wi0UyW4)i_sgrvY_o{)8#T-K1OFE z1&p?m^P;qddzc$qz{He&p$E;rUTwZXmH%LNfas{mnh>9DzX1M`*|m_v64K*IIKYma z#aSod{PA#vA@Owor)!^FNl?VJrEMJ;dK}#0ZfZiwoFew*TCX~(c6U7wftoBGucNAo81;uX zeln#|yo?F8b>fIyDFz9wG{AziR(YwR7xIUF1qLzXuF^DRiu!eEw26L^b-nWW;Gzu+ z$!WxFh&_9<+Vs7ABc;)Gu>ExZhkE)|)DerP{6@L=}2; zWtDD<(hKYy48@w{LR%l|&_)82*LftLotJw}6V<7@()-dTAI**1jMiVLbx81#?`my@ zS1k#-Zqf$*_UOzF&U^$4gjiaFMMw6wcp$zk4wgC_c_7;&j5E)5h-F=wYMJ&3&zi{B zg7Xi*I5Kn)$`4%Wv;vMUKdKTKl)#Gf?W}0too!vBy&zaO!T3Gw3+y#nzpjJI8FQeK zDE%vp5Lr{kS_L*L@WcVJh8B_I*H8rckh-$Qnh{ZmLJahHOD@BB-O?u_JAcZ<+yk3L z#VR3*QQ;QrpY-Va1`Sk`U==8`b(>h|@Ez6%iT<|w!Zuv`^eDu^mdLbJI4%&4y0E`b zJmY`Ul2}mY8dk_#5`Vndnv$Ff+@TBi(?kLOr|2=JmWnDT(gt({}y zq@{2p@!ehc;=zMPEv!KI@xqt3fL5qxgIPN2xUP#HOKGg`Q&{8`BeYjG`tR}KZb6d#cbcefXN zQM=PaPh24VRO2~y!&68uwW9c_q2C02)@Nh^*dE*0 zlss3jj&M~OM)8qpzoo=r$-Hduy4|0;_1fwAmVeidUbvb}_iiG=GT#F|rBY{A(~e&K z&u+F{w(N$1JQi*-Jcy+0A+1c8Vnu|d1_Nz~(HzX5#Z3zcR6ShsiWhT)Mq~<4^Qy`9 zdh)8$G}%>Xgm?vUNpbAs+j}Qqr4=!Q)vC+tv8k#&XYQL*e8DnH*f{c*oH)AXL&Qjr zbn!lq)%jKW)f%ogijH;Vc9(Yo|t)$a9-BCJvSMY^=_en03fJFc7tiXHe7 z6z=Xdz2&3GUd)U8Zpld-nwEl;Uw<1?Y$nB{bFQ`e^t+)ko?uLZ*74)*;yZsgb_GeXdqw|5BlGp9d=D4;$nMTOP*;A%aKDa-&m6OBjQfq6qH0E9d zOc6xCq>Dp-RU9n%qRp6>)acKaS)J-HCsi=_Nw0DPGam>!bB~I5pDhAEQ5e063=fY& zjK~&xz2j_NXNwuEN+z4Nzi=zIoJvXwX!I)PD=@w%@o=7$-F>B;L$r2DjLM1?P)YWt z;DMN2M;=v`zyhSO4Hi*dh7TGXcUoI9Pun+(PeBjh95JW$Jl;z~XSn25kt~!xtgrWQ zJdB)%>+Y=Cw9j82KR496sJu2%Fv-g4Ya{<6;O|?T=@#JYtEKucRSQC-3q3@;++XSl zIwnfNP+E3*ctZBmQ{mXYy$`+qcO~rJ+gibM?G%cTb=z#wQOIUb9-(?1KArR8yfBH} zSq8OVP--Mq&^3W-jc)`|zA|le#Gbz^B8kzt^3`)r-+@V&=T8<_$Dx@7dc)7#4YtbY z-odc-pHk;u874O-KKP+WeMZxU8tVKSv=P|2y*q{?|DvRr7;!K)I~QcGLWvo6ZB%bgtSiC79ByAkvB zPiN9Sfh&HVQzUuB_VK*;k=UHq21(I;MD|2k`VAc$oIcvc==3V)DoV%SXUuRx z{m0i2^TPh&iy5=uTWeAbr*ezhB4?8=r$Z`!-67|Mm>9;|9PUSWiheZF_q(d`U_Mr>Vd_{Ta3Q2yPL>wS+qC`V&AP;Wbsj$Ox77USXF+0Fy#+7$dNj&5A+kqshwqqQTuY z+U2PZ=u(|Uhk-#VI8|{}mssXKaK8%LC zhSh*eruT5Ji}+xBMxZk^GDi2AU5+h-4^;0X3EN|NEjUiNb@`O`l}6-qJ#67lq#c9w zhrptmzG0=uCsb{2hfDmPhT$M68J{UfY%AVdwhToiNyDdb^B=2NwZ-yf``T}*5^DiR z|D^>_rDcIeTX&k@gQX8jqF4}X9Tcec6#2yD>x_9b`nLm84r%GzK)!+_^dnOjQPO8z zUC~BX2B+kf-mfZ-U+6AW5G^3;?u&L>Eq@{Qbs!|63-_DL3?~xv--A6Vtonfy#eUtb zhUzQ;tEJ^@JmSQ))&m?Y1!dPq1eG+gRg;enab}xh?KSEM^542E(=#ZLKE#n zZp)#1l!xRzY#|?he|=9_gvVEIE>}Xq+3}gWIHnX5j5v2*9pH%)q8xyGiNk<8yH`4Mzg-A`xh2rBr65<8`F)C_?9G9owED_v<0fvY z+f-IYZumaTO2ri2dCzFz#CzD)A5E`xZi;B4#Yc4wP?;F!d+V|OfAz7g#h)_88JtW% zi8V48C+mzVqGV||YV&}fNzY+$#tuAPdIlt zU5QcbqqKrB@?zXHxvEp_P-R+fmLzC8OA^uj88zvMG&{<(RVC-&z6J}&(eMTor|mo5 zVvy?;6_v}Sz9eY}1s~P>aZ4Dg+w_N%J5W-5%SyN# zRF|tjkgOZ-s}(^|vjwGxb$`VzN41uxG0g_@h)X6mUpn#kM0lH?v~rNzp&{p|$Jw&H zQ51Um@zgw_wdU22eD@#K+161?z$VAUS=809f<|3Oyxu*Ug|NtNQeLgJqTK8*1Cp)n zF{$rMJUD&W-9Nw(u2G$QZutAcAb)8M1yu)my1=ij=RoTPmW^(l;Akz7V^%++342Pp?xS}EdMQYeI=?sLF3G zRsT>`W()Rs8!1FsR#e#ga(W@a#7;FzkR(D%o#XjuA?lFlqd5MSRF%u1Uec`WIAY^i zW;ewqgMvnoXysV;-9pb-l}C+c=8RpvGPE078Ch@ER%2)Oo-lw%Z(Wo4oPOw41$32%U4C|=Dp z4PjJeVPLKSBQ$|Sa!=3uPkI4M{l)XKH;8@fqo7v2Wy=ht$-lM%Kvp^j9Zx!AHx$K$<)MCzZRDxfHdzDza+}p7&t&o~= z(!A0R5&T>3=@f_IssN24e(qY*+{6&59Zm*)>B;B*4eXPsT90| z4@yCYka5bd&=S|I$0>#!#R<1k#}b=aOPIZazL?J=5_+hE)?cR=m%y)+7wzeS@{hf` z_Kged6TGw<4#E1mq)$!!`ga=FXb9;+$$Z6I%(f2DUEN6yD4~oAF?_hd`d8954pOnP zyq7;-8RG4UV`vn$V}}Oto~SdbXC2H}E_6Snjb$)cv8xM)*1_?! z0lJ@Gcl3_N%)H5&6;{vabI;_NSxNo|l=x*tm(@JD;*!$|NB~U^q@$L079ydw^nrBe zqzu29G+JY^c1C|TpP6TWnhQ-dkz%*WVCP&-J=&&4SbKJg8QUY`8GTn%(VJ~5L`f%W zt`aHA4*>HiIJ|MYOm8_p0{^XNWVU=hg3aBUU&2&BQ~h$a#hP{l-8 z%D$q!atjHP8n`bU`k`>a{(?sGM?_P-jlX&sFQ+%`(nm5)Ze7vxW_66P_IZ%DJD&e^ z^jz|&3v1;KS1&1*&lUWPD_@3&VO1{D_nHWTktg;(#Mvu}RoVs@rwE;X2 z=D4IdX|8{tQ750scZE)}D7O~J7!}B}xu0!F{~mT?q3QFrr|?$nKH{b-R$-pTaPiwH zZopBnjh}n9)S=Np;*!0bY7pz(i_QGW$7+NAo0u#p{r%K4=4^%WsJF>VH zL7zhu_2ux%n3KoBsAn0(&LWZ&^jAWSiKZN!QB+qP|jlF~YoWQ|=E;xtJjL1X8`p$J> z%uikt)V1jq_4==>(hzD@2h0*{*H~-?XjB{F^dc1Hbyl%EIr?^DU6@;)^)~Rj2oyvg&fF5?h$Cuaw0 z+_+mp&V0M7j=}B#L_D_g0Fserls7!xbYZCFwn5p8Zi81%PjKS7g-ZUC<`rlhx;~tOsfv64KjJP1ccp5!L zX`}{#7|Mb1LUzmdsao<#9D6!)C?d6qURPOxqATAO5DP@3yU>P6s5B}qP8%4Q$)Th)$ra~UlDXh^a!GVXxhhO7AjV3%9vk?9LGOvZY%wY=2uV^4uF^q4hNY5iUc zqU9&+#<8b;WMcZ7Xq{`n#J+x^Pu%R0V5e3D&Y0x8DT9X&I^aP0bet~=2$X(6MA_fC z&v;TP%}|bDuT{J~jvvF|ap1O;HaB^ZR_ zJExhqX$V5yZy$e;s0E*27e=NsfARILpR1B7c|8EBK+7h3zsp+CO77lQ?ex2X_zhUI z&lANBYU_w?BlWB-&CgV+S83GsU5jRXIIE!437pml(}tbi3wL%|?kA;67qi9^%fb)& zzdM)p?Og&2M#b71PBwdf=$QF-hVXl&L!)nX7;3s3wdY5ONJt|I;_l&GLixzue^+<0 z75q_LjR5)Q*P9dQVPI7a7>Sf14md8@A$-#|a&8kifI1(8PrR^GOM{cQ^Tqh{O)XD4 zPrbAtgXOHcs`wTa#pZF9vxc#B>(s{&Ohh3@Yh3wu_eDkG$0&t~ zX2XYZkFrWh-dG^s5)1zo@;AepZ@tiQ>)%{Nhz-pqC#3ko2+gjIb$0jy8jH0I4JQf5 zPEu^e^eI170jjzzAH==C)h+abVaN!FL%Q<}Lfuku49py2ZSn}B-g_wmf~=gUX^%7T znUxjHj%A~(^jpn2r9uiaM-{iKh@%QWdzF)d$U4&lgIsW9sOuZU_&v(a>{jN6(&oAb z7?JLb6YncY4Z;X$v797S)?L}ZWNgfXyaspFk#Ikd4$J8#+9>TaR58JhOkB=OL<;MZ z)%XoAccJ#!NV|}m56?Vno-p>x@Z#wlCal_cdYF(9UdX!nbRdnQ_jto#vnWIG($AG3 zlyx0+QVP#K)CBipWwv#^PTaYY%MOsCpFrFUF~%px*Izyj?H-VExId^Pghi)LfN*#9 zgl1;EWW}{Uw3D$wyCi#~16f&AK1jn>W(<7OFOLr;;yz$HUBxxj>Yi{v8&UXAs-)DYCB3=( z9-pz_;eC4Rz4iQo);Gt|uO%|1!VmU?q|Wz3?0#3ixK%>9igg*7`*@M|>k|cH^^QZB z@1bE<3P1&v6GMqHlVt-$T%R06m*FItLU1C{e~ltOfc`lSQ8XU&*vV7vRYoM=;qE+Q z7}EGowl&|~oi18RJEyO+*))8{nChZ;W&d94wpR_nSS8**3CiSZR}G4>r9U-*p#nlh zU^6{C6?;8D#nQhygb}ODnGZDLfq42mQZ^o%1yACdD?QWm$=t7Y{P$|sd=So}ixOh< z0LKL30B)Gb2Dz*u9RQWNdQ$zEudd@Y)$&UEGDHh&|j7dvXA%j43aTXzhWFn@XteMtn2 zJW7aDOl@`rr|M1JzNni9)fC8^RZIXX>lNr{dMM|GB!{6VMMWj>zq}mO9_7 zLv+)p|8q~J)`*!~?S%~i>hx0qLtVZWaW8|_IBNBH8HAyCSd}#|?ACT1(0egad}h5E zq)~FQG?K?|TznGk8VD94(2eYeig87xpjrQo$#T)hB*HgW!}UQ^^wf;r(&DlKI3H_; zi6eEB>S9g>v+tr{&}qBU*9zrujJ?yySw|BfoIW`h3`}N+^5)rwCRgHPG4ITi%%6E4 zHcA(rII=E<_PM?dk=%CTvK3YV*B;)tQx%@YpM;NG-KSo;1UXJ!5G(R0au=BUaLD4? z?0V<*ng20+z4ke9qR%9G1(R}Mp2g_R2zNniKtIu;Kp#gfc#LCUOq)1IGoi;EUjw4% z9=vuaa+Cqqy-zO7r1M{_r?+48Sdc6Ni);kRpPrBBcGUW%UWz{*8b%|YMP~T?V7wAf zl;*6wkCLtb^g&~)b@ao(^nj!3cYNcTFh_ppeC@%)#xe^@t*eH@z?>h7@(u?WAm1qA3wwEWkF;X-TX z&PmUlv#Di#?b9sB#5qK4*4`bCt7>l$x(mJV8VpVKG#iDLb4W7L>q1s0MP*1cuP~P|F)c5D9s>BkCR}9;qHxv#=z9;<@akKek;z*d38b= z5P^pwHP;w6&7LHm-JcaxjgX+6v-QzaOlPl<6IQ@3$Gzpd?$jN({Bn`(;yFm}$80e# zlHo6hN;-r7Z_P1h1teoW`IJuq+`t58BNr?O+aPRKJtEz)v$D;YT!E6i@8M<7YN$akIYjBO`%p`3eY!49?&~Z@%bk2fGc7WS{25x|(gEoy`U-GGNT8M+-UERs%tF)h*j&ifzgTu0IAVM#-iEVOeEk#3mC zC_EHH(U+a8xt*U#*ku$lYnwfOdc#b18|Sk~W!wkaudTL*n)NE~t16XT2eIvE(EFF? zhwvU9mLazE9=N=MMX9|AbC*Bhy0yxeu!f2SKdLv&VrR9R+f;4~VH_H?&Q01sCnhu? zyw%#g7Z4ZKY>0un2y5$aZgGGJXhcP5Pbkk&;BI+HUvPXON~0s1*sn*tvg6Mpz0wSYv!r@u zbM9YBt+E^tU=)d=X^Wum4N^0bN`B$+ooeFy8Cy#jI3+E@6rHg1R|WP6K37&D_&CXF zY{BiCRCj8E(@hNI8+WmKzgpBPf*EwRm7yk_ldRX!1NJoabBuf{ zujoTtY@q|pbk0N=)Spu9QP}E`YS;umTiZb*4JpoPi3Ppes6j*J95EU~!0-NF=`u#p zqD9HyY}lKUr-gqo_P8z5#nS&czvkNm` z=R9VpClt;g+Opb}z{@CgI1LgOKJP-e%<+Lpc7lmA=c7xYae580a4r_?!MO%huA>!HnoTs4e@ooHGY-R$J;G-z=yH?YYJo5LW5>#D{gzCA--?)71FAlFbN z{!J*bDpJ&FQ2xsC)tyC_dOzEkW451l+H@WvCHV`F2_E#S-qPld@ywK-th;1kJq zdqp)VUpv*xdLh->qG!%{XNi(mr^ae_irJUezhI0HMpVQ|rvz5FbFM$Tmq0=WqA}3+ z@v|pv$gU|**0C2yf0;6s?GR|ZYbs+P9(FZR3s5LDZc$?=g2nG1$T(Uhw?|>{_#OpE&I6H-jr@M#v%?gmj$`P{D%q{eGY`Hcvcx zI?Cmb1Mb(q51qxbK7fp()c&$=X(nOpoux@MjyOWr4iNHAKipxOrV zcepHg7fy`N2Xns))tel*Q!k?yaF-0-r!1$29_e+p^slyG)k_{+(D($t6$Bg&oQJ3x zeoJ#bZJDxAvPS~kU3|3aT4G~XMVqvD6gWDe-9m@d$d9M{b(JBeM5d$aY#10Q=r1*b z3GdWQHRJ7E(Q5XOFk?G8Hf;6z`WK*+V}D<+zB~oaRI9Py%s>A)n5^HUt@TpkuN1zW z7gv;rp9}2qMxkS?uNjB53766h$0DD%l1o*KiIWpcZ?Pr-`pyW}~c003KNQ*i(Q literal 0 HcmV?d00001 diff --git a/assets/images/table15.webp b/assets/images/table15.webp new file mode 100644 index 0000000000000000000000000000000000000000..99ea2c031ea9bf8126a91aafdcc680a4b60717b1 GIT binary patch literal 48652 zcmb5ULy#_96ExViZQJH;^R{iC~h2rUOL2Ib-8_ezkwzKJt|S zvA&Vt>aYHf{;z!l-%~%;KZx&)llp&uaRG+EoR8ken2&X*fD?fhKq{c-SNVkX$p6N_ z^fqn_FbP-&5dP9`V{Y8l19|}HzuB+ZTYcMoTY!ic-lzB*{d@l@KS|#J0Ml<9K;WzV zL||s`rJJ^o8-V%-x{v!A|GBs2??!mj_W~#fSbp`~=0DFp?|lg@_&0pO|Azh?{q}t@ zPRYaOrJ3p0>Kk5xVQbMBzMERKh99IBZ4!p5MO*m&|AvBo#++T;ZV(C*KKE@7PCTg>t7yKcry@CD}-smzsb@>HbJVnXvLHij;mqMjCW14t=@i zuI7|WAs4kuV2B&sF?DqMCUHE=$z})(Uo&g08_6h+%T&@Fyd>ck4Wc~U>_}hyR_n&` zUO|xwe~Usn?-r~u0=tA*x;xDwrc;Kv8t3mg*5+atm3l`buEkyjP>;M+v-%$!oei}?D%Tt1zeG*x1(O!$f~ zEnFhDLt={6l=Mj=G(i$Se%Yi(P<~-2b8ZpC=f?z#Pp4vG-tLXKOs!Gcw08Mde@4#8 zgy_C=lnHi13JD)4wO72hUqpA|AqlHfvEBbc+}`(6Uo&O;E85FZpd9w5B63yV1XA>! z$D%FH8ThMKwqngQzSl%Md2GD5kDk+QC5%hdFZ8oSY4uvKc4tIz^P=<@a7`U3N_IK(s&UC&sJej{>fvx+0;si z*lq0B-*sML2N&P-Swd3jD6LXb=PJ2U^Krb<9=MZJX&(9D*lAgF4)|Rz?5Xw19>sVJ zy3`;e-_%{X#v^RZAm7PE$ORh@r5W&#@kOh?w+ubYREbSyXJwathDa65s8M82rUqlO zn`P*-^JQsXStNr|QR_~FP`__lMQ}3q)4~XC z=T-K1x*@6a1jXuFqMo7oQ+AXpV5PdOv|&{j9Jq$QE6Sk?r(b2$RjbQM7k>q3ykVPZ zghwL^HtOB?6UyQT7Lw^!&FqPyuL4J{G}ec{zK+bqP`C_oGz|>zscoszGtv7-bXDLF zEM{9~Ep6w>MxhhUlBzv{SE+NXIiY!%W{oi4s0h&)!l?AG$v~x@M3?OZW7O8Ly>CYQ zVdNbmGdu>+m%j&@>PWZ2;tj>jYZn7@=v^yw^ymyp+_Z&Ju{5JRUgSt_%IRi1(fW|4 zZX2U5$Rdp>80gzZo_IGJ*OuQVB=Ud0|A7Nf_eUn({l?kNz|;~2FI=3ejt!R)#-4=t zX$L_>j{nXdc{au6mb}Uk{tYCbf)r2xv$+#KPeV~wvQ%^!B~0fWgRFq}oa zZqm2Gnm_gL-9ZHRpK1Od;jlESn3=#AX;AO!b04d`!7fg8<$4+e^8bcIJ0(3ikHUww zE6f!AvTC>4`ELC72ZVG5A7IsNv%F^R$R(g4#(x{WIUCSEP<$CLFELq{RI6mw)R|ch zQdCU*Q}s?3l-1jAkY$Ud2wM+6PLc3iZd(?t9tCnJr1;VWJR8o2pf%BeiHlJ)o7hg>Ha`!33ud zJX>BetqTfIt}+VrNy&3c2Yuyidcr)^0HcE9f1~!KjY87bfj7I6w&wP2Zj~#J!lko$ z@mde4vEqVpc5ma2bq&3M#T=-%(N*37v9g7P4q6S5-~@gKDulz3lbgjTvar~+p7H%9 zAIeTQlXOBNnlA?ArgU2y`>>|mDRHWmXo=SOWV4jojNy?mW+X@H$(%zTuHP0$qRql3 z$bqJ*QAJ1QD$-=e8jSxbJ&3$lx^#iTO>Z5aeeg==<>T$4oj-vjZ0cp<2Mv`QJ- z=`$FY!Tj;UL~bg0VtY@W4~3DDW|k4W9&APsp9xG42W^(tUM3e=F6#NV2wA z7fh75CxGupW3~AdEmR7X)PkyTbmhg@60Mft$VHI{_bRCK|82ehh6jF^NCo_wi}#Z3U0@D6E_Hv>r*vNMAU-WPONuGC*7Ov@+d~x{-2=%_FIji zo{2pZYD`B#8$_0Jnn~#YXR-B6LSD^px9=9+PzWKJaG{mL&!;{QP6Cuq2&(bJ<{q~C z5#?OzWf9e!7nkL+7LFJ&Np0${#s+PcQwiYUrL$p7%YvAWbXJsGmX*J3 zYY-e(N#|Df>GeuY2CQKW<3e|~cr^lo8WJ?qF2z;xFZxVges@5_d#$zYOyB`x=5*FE z#f2u@s=M$|$tIB8Khuss zGwCx;Rm@wVRa6C#qoIy!k#fsL-b;7HQw6Y{305ksZ;1Jq{@|L~r6)`B%AJ|N; zI(Smh{t`ee$Q(PID?F6XkO=d0NMc5u&rQ@&CPy1@_imk;6A@X<*sWW#-~~|$s7_GI zJ|r=avK8AAxOhWIP4*bdH^4-9 zDnxXxx~(>DDDR1vVNbw z7_H_CHv)4wTyEyC{JNGqbsAKv_zZmq-N6?*OeK)+DzMA$Lvdu+s5pjbHp}u$A>q^9 ztJ2cqMt`CNdKBDW=2j=ynYcnJbxEYm4a9Xa&J3G91mvi{xKn(IlQ z%`><8u3oJ@tr7C;3)az;h^sRuj(>ZHuN*;I`v4ho%Dk{PN#;7hvg)9oe0CDg^eX`e zcWAB+Y6A<-OvMJNUsfO7+SH+kW-^Ud)zlP0Z^2mHLrW^R!i%UmmeO)*C808wtmQ#( zZ*~HUmWA`!Ym{V<+^7yZTALg4fM@HWw~LS+5J$S6te zyr`xSUB^n^*e)*S;7{LB9yStg8y<)m4A9g)I&bTApV|hKznWhck0-yb71pd3gXMd#wRU1BjGJ| zz(7^&EUdX0CHF^%BGU9~rQyXkr`;wp<1{uXX2=pZJOrvs#Zebu(T9V`bW7hBkW+_G zE6D6S0s7R*_|niR(h#Tzsv}~T{Q-zAJT=mv1e*7Ke_c?V@!5Z`zm^|&#ZmeNsu9AT z6{Pgtdoma*qRl8g%dVmsaTa&g?;eAL)H9l z6FR?`iN$ScB5pz^*r6@^sA881$H%>4@oSL)sH`&)jb|IX==_jbOR_2t%Y7` zu#G!a^(U*62=KdeMe9?XF=-IHmePtgU6XSbt4e}(PSW>mm6Vs22TzoKy(<$4*h)&L zYSCs7i&_Mc^824a{8>23Lpo(z{L5)U`UJj7h5X>F3}%OIjrETzF-|Jo`{n(E+j4?P zM7pi~12)v&oI$7JJb(8|li3G_d;=1l{aTYqnkAMf=JUc4=g=7X>YST$?o) zIMUB431RA~bWms1537@RxVwQ1VLLnrDsZAXa54#-zm#I@r9Pa)*P9!q8LO6S@b4-0 zp|@SSs7XewjM{i^+5(Sw4w@p@*6`BApz0M`?$A*oTh8I~ZQILq&< zkb#H~V81(%Ex4`{7;C!?8(aW=mRkLysy>3>!KHdEIVf%&K8D}&X`tR+8k~Jvo%-eojkLA+)3}k<;!D*PCN#71);x85m5%#B* z(dtUzwZ}P+kY2)n zx7*OqkixUmF@6TT z(QSUp&_JjX0fz-YA)ortzy^g>@h}D6qNWv>JO;qB$>?zo5QC?*AIn2~cYf`yoP~*) zUf*`pJ|~1+T(-jeluJ;$S}9UXv)bfx`PZwt)#VPnw0nHo2{(Pa9LEfYuwU>gkSpIv zjjH(y&ci+^QAUEEc;~N) zESWN*=3+BNH``$qzL24J;3_t-#oQgNBMjqw|K_f(8Wa7_LXYLy2NmGSqsjSLq;}^4>MPle& zkn(BO&s7f5q@?IxOwa%eWtNY8sf5A0FQQ5L6&Rk`xK5Lo$N?@<2H@IOYfeeFH-SP} zl1#1ZW)G4DtuvED0u~b>ezEW7soWjlpj}QgRl(y?ElX^~mPhsj(WLZwi>&?C-Awkj z#`z_plE`T^y~03XC6i5pPg^gSm? z?^_ zcPxR&PXqCh!xWX5K|_z+EdsMSXmehM01Sf!*#7#o$FxwhRJM~91rbGqP`G?}c%7?% z;o|EcJ=Jt2wpg6l0A&PJj0la;A7J;Hb2`l*f;gM(M>(rrIEPV{-ir2hnkVyi-zjQ4 zf0c%7c!S=Ozkxdvu8_IOPiB$Zq_aAoAYuj=uad&O`R_$8tqlhFH0udT{6ztEjCzeV z%}gz{&`g4V!gzHNm^^{la{GLYBRHpbPL;}fJYZ3omCJ$eARDa_cfZRb`S+LWQW!Dj zpM1XkIcYTx6qI~|ewt_J$$kOJjWT(NUMFk(2wAEM`6eb}L z{ijdKt|q@l_4^NOm&`+={8@vIY^9~w%IgfOD+$(?b8nT|z)XuCqwiEBcQglC??QI> zG&`+(TlYUDEfI94N4Qz9E|NC7(bQ*)T~)S;PJn>v@e+Xhji}wTZ485a3(Q;GEP!!s zRGtDtPNI&?s;o+qUQqIce^@Gmy{fI>VsAtO)_xdqFG}$Kfoa7(b+`__kT(Q#Ozar) zI1(JTkc*T1&=4De-}t<=nArkZfVk>1TJ&Inbtn5I_Udn*Y*Kfm&jwJ8aV=|8qUnz4 zly1_%K}?DZhDAVmmAda%AW>A$WY@@N(0Bb2MJSb$)-k%kr+=t|5bnMVMwwMl(>y(E z*G>pae#|rj;{FF_44VxE^xGX2MH@Gu_h7Z4rpt%vwGL80QKt^{e+uPtjCG@6 zlBc0?ci&8TUd|oRw^V zlBf|;%QKsuZy`Y@7hMVRljdy<^Rg4MI(o5>gR3;4R1vL;=EN8`R^NhIINj8F6{7R< ziyzx1RwzK80y&Qf7&hcf3@ue5t4Ldlmc1la*jQj2Mj3|O0x%-8YD|{WP!MWvoxIvr z-e>ozQn5xVzISo$ZAe}I;cFWAppZoBQF%oqt9MBiI`0CeIT*Uh@y9A?a{2{XW&7le zy?HyOr#dfT)PZqbpMSR3#UKhLAcDEoRTF-5vIki+--VG)!pw7E^n8-ea_lx`J0b*% zmr1tbgDfQnSoPGqo{xw`ZF(3IX^r7S1330n!fR4HAw^tjut#7nP!ZIAnKV7j`rw{e zk(B$GxXOWxFI%hAu!{~c74esVL9LYN0=VN&B>VUmdy+<*Xb_|>D+arXCH(JnrQ2@W zoWVvz|1e;^Cjpp*18jvRboFZAiQ4>67dLhbdn#tvE6SS9KMPm9!)9U71Ygk>8CfkX zvk#|Lc%MCPerfs@JIAtS_beiyZBUUN9(~RM@6}Y0(dOrEZyTl6^F2T*rn3O^4@a+I z5!R|3C!ZLC{SD8g20&DN2N8^W@t>f@P{nQeq8FrLC1Suw|sLVbcZLHz)M0&C{lRGF#xq|}A zwHvKnV;j@aB!{1+P!lM|{G{Pj4ZyGovD{5^$1wuN9t2|*PyH*4F!Ez{VFjyg4Xq6H z%k$8>9tOP8k33{6K{ovAfs;A?|)8Dve%*zz2ZD`#p z3(~s_O;auOL}Q~v18SQY3vsgb$ln|>6g!%^+d8dSKc4?W3EW$SOoO1zP4i% zIpRFV#1O+hXfEEoFvx|hp9SnFV1Q?7sy>XhvM6E$Q>VsyU!Zw#Ka-RN4wNgQU z!IL=I?z!=E{U9;3*j!5?ca1RA(P%{e+kmm=o%*}lY#Y*^_m{~5fE;#GY;If~r9Z7` ztR9-2_~ZiPa^c|LFDh{HW$k#OJIX}aHovV+KX=i?+fPw59r#1`G_D&Fl}ybXeCOuJNQvo^H|cQ%&rS}$Tb!qL7~|nb{-N~9 zPRhnS)nq0s6sOQ8jqaFUUMGx(>bm1x?KCAH*YX&)9hLg$SPptQU>5;P@4+t|?sCY4 z0XdP(8q&*3q6MunhvN?wkfYww6n9&H^%KL8v2K6{l&M|*ICO_U#6hduJMK^g{65XT zO$Ie_nayb%)O0bmoOpGZ&wH>d4{J`` zTLNQtpY001X40TTpMyge(+@9%mAGRYpZ6u7!>#1rM#_j49R7s=oC~xh6JCeVY2YM? z`)~l2bSu>(8kO46kPWkw4#6B=Zye$YEp&iNvv?tKD`B_{3G$8TdB~^Z&OCs6+EFUJ zFS&|!_L;00NT?4!U4IETTWWG4)5IX*3#v+?eV=dSc^Czz~EL#pZI z5$YmQ=ABIV@qj3!e2&GgBl5=uG4!I&2Ou{HGEHbd_^paEgBOhG50;d`|Ll+{8^)V( z9!|!+IC^@g1V?Jtpt8w2AN|pQr>rM#yFb~r6}a?v)684-rfR6UXyIWk>-o%pSq{{A zduXkOQ9-lBxdHU7)c1pqIG8UpYB6Ut4tTe1X#tghTu=y=G$=~0gpRLkKNU?a!7t!j z(MqH&7IQK9Vee%E|78guExuuk8h`4Xu|!V;o}to9eCgtymmyIdIF*|}K*#@u2lw%V z_$*h|E|wj()?tmDJC2MX$P=0f{ni}240Hnv4e1Zi3u*>lCc^9Ggdy8huQ+m( z63q2nEm!0td%6*LU*I`p8~S8E0^8E-mCZWvd}rdb4LmJN^Cc<|xQW`^NzU!j=FY|@ zO+B|W^GIyPjeds6yF{UZ2f-0Vi`f~Zn5*8s&5zoI?)|p63`F=2M>9#Y5~Jw~j@c-^ zCiFTb>Oq`={KTBSE0X`+X>X0gq7$*C2F&9omd>hFB8*qwlQp{f}u)3Yu z|3bO2z1=dSGQww~n|OfDE0Sm@{ChEot32X{h!P< zIq-U!qMiMX?~zy*k6((tyi0l$HB%!q>Zbip6=s}F5*q26crVv3!Hav=tZIA(n`n?v zW(eOG-$W74LrqwL?WsM8c?) z$69WJDs=wYh9L(l%#sDJs4bF}Vd!psFq+g3EAlR0Yb066Q~h>_g<8mA zm$FJU7(DcTw;rc!CRYU1DQVV;wpY@cWp@{2Sc7;qHedYBlq1m&7Uw`@gt}L6q(n!H zf6i%CCKLeckO8M3P)P>v$G=wIIjT~Bumru33)tXI{FeY?TdMPIVz}A6lfUY4jQXSP%s;;|0(ZDa1HDF1cs9Z1a zx&(rvhjtd{V-F#xc_GGD?he-Kpc(98XNHLwywGSHA$BWG-c@J=4EHf{N})8A1Zxu) zWc29I%9k>EY!}weM9TaAWUH9WFOiPp{Odi8o{NtNKKd~az~+H9dmPFa9bg8!eYX~q zf4IK$A*XHVg}tpPrls!9cFa;+0cPWF-`j!@k{9&kfb+>|Ac&AV+<2fu;>Im(ku}R6 z=B6%#mch%ZIXFyOICX&ScYp-ky%Xv-ZyM3}lmh4b#yL$gMnX;jc^)8&a|?e6z%x;&pR)MddC-={CH>(L9pX<~w5mS}b= zTq6?sJh`p>lK*pUBtT0_gSiy76Hmli5pPMZH0pwzv0+A$O#Lw};m!J0@M9Nn$%5t= zwL&pZq&o;11I9HdF~2|>GaBEt2z8lf`kE&)dDA6TZ}Oxj&3vb_zEagQ2y->+%|XgV zVGGY}d9ltBQqi-ciBq(kFnWP5rUm2hR>Ju!J|p8wbtY{8hg^*9I?~57ILXptmp#wy z3)m_@NLnYu0+xI>7f*yx&dG!1wx%+YTdoO$@3BvUNT)65Q?w~fw&$t@T@J_KFJ(uk zCLIGf^QVb zJBLE@EK>}e{O#q&V9d|cE~BZTpn0gtC6T>)z_iuh2>iGlJm8d~8#r?AvF0j9?3L?e zOE#*DO8q2rKygJigOOLQ*egH>yXq+vvv2YH=mEn|2L4g07I>$W|CoErv~=gF z8U2a4p9M6CtZH0B8LUh>tdQaMqljHEGYF2JeFRD$?qS_l`VB{#3AVO6K5#_uPPMrO?e{n2O!q}vIt#A5#66-c8YjfVUo^jPX zSqfHDW3TdLQ=p;R;i=NGlU)epT6;#;&Im=0EQvacFKL7gNGqN>Zs?2uL_+~spO9@H zGm1+)h5L)77!T$h65y`gN5y9f^zvvYjGLg3DbudA6POu?H_32t0#f$D>Tg{imJX(? zHonEvcbgoKEZ>M5G6kFRi2hYZbul?Ke>Z*~>CvDbPZmr=1vV(PNq0p;1gS!yiF5W&YQ< zxYG{>9jx(9(>ZD08P%jwfe9jo7*-!K?J-8Up_`#M5sCbKXY<~jSZo&uW~WTDcUucB zMPq{-QtamSiI5y6`f(?3k#=kW=cR^bp^$OllTw#C_|_&v$rzdBwf@eK_Pc?uI27&i z=Z9quOIrhxD2QSW!uqrE4krF6x3>idrmTJ;q0JFD^t0w{nAD=l=>r6B=Kd$Bv5Aar zAs!MUe4ojH&mUDC{?BNcFHNcJ@E--os~2aAJ}h_*VC|Yq{IIl2@{Zi@*~VvVDE^T3RLVjNKyn;r_5nO_C%KIHio1M%5wc! zpa0l(Y2Q~6fG$ZHH)|d5k&%gRi|N%Xc=im5Z>rbyxQmIrGzTl~sKVL8n2){C;bhm^ z#5U1u$p}DLlvRjH&fO+|7>7n=PZ^lxE+WxbkB(J`QMSmLICGo8H=cav*YT1a0u@Hm zKI}MP1S@Z5vY3PSb19K(cK5vYhesahsP3YO`&M*fyyda(<8f*Ar=WE6yOC zPNY8%BYH8OB4^L7n=8*_f_zS)|K5_lB1V4pAC}b5MxxM)UM>V1X}ppzx^r{Es&EjZ zNB0RM=7`6pN6ZhPkA^f)Kg(=v%O}X5e)F})X+o zCPDn*30?o00t#9ky4{KWms>Tl_eCgB^V3};-kPrP1i_wUS{^B~a%e=-Y-K+~jrvs~ zjP-eA_%H1T%dWrB#ykkW4ZPf55w@@UaCU3fZQMIkcl=U!<+4zJQIuA2L-NyRNCjfp zh;CqK)T3!)MmMP(dZxoqR$-HZtqAIDwQXF3E5D;Znk*-!SkLa0F0xcjkVwZbtxGWt z^MPSDaZ?IO$aQc^FZO$;&C2rRhr?yPKkH-;dzbb8?w8cd`qUoB zI!&blHR>DJ0^oJI$1 zVO_huXKE_)1>O(XRL2IP0aqL^S4C<}jXBtex}9hqsSgU(KM)u@yzQ6bNtC#;|C8w<`{MzP}+a=F9B> zEbpvmu+7?DAR>euXFQGXQkdkBP(hwq(W``Rd-B?kQqbNPN8b#h$0mL;pHVax{%QFb zW2Ffpx8CBgExt+ss_3(ls_@F}EuW!k<^hmcEj+J!7>!3G@|l8){jB(XvNnRlc}^x0 zx`-NK+z@{->f!tP8I4tCSqf{Gg2Nqdl`Zro2$lxO!v0H>hxQE9)+xDEPpBW^xOE5n zD3SjuNy8I>fws}wg5Z}D`BC)vbxb5!meEQ{fV0vp;GsA7G~V9@@&Mf~I2zq;8P{v? z>H$V|r>JcCE&_q0A%DE2=gib|4cR-`^M(`>E zIkFr~Q|Cb4R0Kq1(W=+*<~6tleuN&xi)%cX9GM4F6&+vXku0&;;9ko555rjr8O9L^ zAiVjW++V$D=LmZ_(~fnOuTs2iTvppwHTwq*44r)D7$wP!E*!44gpAtTgG_x>ze9)n zM5p&RHlJPl)Ke!(p=`$#{Npo>fBE>^cAE{-{Ce87j>ZK}zpw-YSHDP}`Le0Rnm zFF=epGHSUs5UFkBYJVn@(!#8b$daD3Jw?~(<{dbfJ=ObT4Jk;gOXx(Bx>#jn&~BDe{l}wajgfLDN406sD1hUiVY^BUO~<; z53GK4U{Ewe580K49m7=vZYNGSZ>J$fU(eGMtfIe^6wnmKA|W-QTu^Ys(?5R?S;Jj! z;rgJ9wa&ms^cN%_icSf1PLipQU}wM?w4Yc}Bpe#6w7^I)3|f4`1p9)3PX^ba<9#A2 z|Dnv7`bjiP+X1;jnyWN*=}+{JD92VIvBqH+gFr;}GtQ*45(J2+2l0O2@JIC$J_t(% z{E7LhJqxdgveq515=h%``h<8PeF6+)0`5?c>-V7manXI1%MD~P!E%38W`oDZ+{Rh| zQ46^8u-twO9%tOYp*xk=n7IDWBDl&zBC6MSyNe`0#=ibJHjWxc1INjkJNc-Tq!s%) zu4~=PUXopn7Tc+2pfcR%Qun@Bf{PW|91fo|woc$;p9Lv+C!1)j@zA9eL^g+_3Q z^Zl;fw{W9j&Lie30}+k=Wf3hB>U8X5tWa~Fo8|t~HazlMRyzS(@CIZt=PcN~M??1o>xS!*4*`$E-=i zq~u$+?Uw6H2)T#zN~~yAIG{e&UrCE5@*BdYXt)OC-p-k8(8=$3=te<~?6H{V#wux2 zQQqjNnI$%WhB_u>|B-o5?ndz}h5F78#@ulh@{(>s{(qRYM|?-}0 zZPrh&OGj*=ly1{nb7UvnUvb8vwQQP3APd~nX_<{T7o+Wz&qiw{ZJ!HvrWUk);vR(> z+*J0ELu|UczmkoAFKve1Cjb1f_>Iic^VMX9`8|sP+%-i`U%L&eH`3c}4WyQ|o)EYhmg_xS1Nw%3gpJZBAY~G}R-ERd^ z^!mPYr4|8&ZI`mW$$0bvGEJDc*GK9B6+YJ$C7AMGLTl(|7QHV^k{_ZY)U(k_`2vAH znFRXNbxR4X_$iZEEhFc#(}Xs5x@FX{h%vIV*j7@Nro@Nwg+7}s;kay@u4PRq_}5Pw z|50JxP2!r9;h*SFb~7hCBGR83zjrSPa2e;pBHuqqPZ~>qW_$b!5g6h8Kx*HdaLnKT z4pEZ{{^F7JP_n4l=IX3a_VB^2$>Jg-5uk%`Y6e4@8lKja{DLDaFgHTh$}Za2Nbw-k zrsjTbQXOR!4qMSbtlp|q^{Gnn6w-Lxg?p$?URTC-x*g!QI4@}Ak60@Y^ycCM1TWmW zn7!;`lO&m>W-Ex7Ay-qzxk>~WF>U+z+%NP!usH=`dmqVWNOYzS>$|f-abZyp)U=w| zT%sThc;a9h%1t(`UpJ^mKhS?Tkri;&%}=LgWJuX{moK)hIL?BmgT+M7$$)HLYy~4a zF0ClZtwj3UA0-O4uzBqxJ&TVq&2R%jZd8XAiiaNWh^36Cy~;+p|5|);w4#$51m7lh zNB@cbT~cK$cYGz`3vIyOtS;_(`fYU`p9s~eg;kF|Z3I&B$FF?k+`sNy)6c~+h?VKa zR{Ok%-l}8|T4Z3R;+3%MI8;Erh!+_jfi;q}WT)*9cQ(!ITBp>ma&A?JUJwTru+Ky2 z*6dTHiuPOA*y3t6LF=e_Ok&iox=cl1A}*(AFEr;2+BBu;ZLHU+gp0lcQp^R4$)a=Z z_jV5o%0H{J3nQTK9Mh+=t>(sXXz|Cre((7`O&U<-V3Mqi^9yF62vy~SON*SJlM|{z zW}7v$h0I^?_e%?c+G~cc(cf9Yc!#+ zTV){C$hslgKbq3~=ebmU9LbdNs>DCq_sshtqjJQ%H&FKA&xm7-`hkHV<;V2nMI`O| z1cpa5IqVx1G0!&)@kvHZWZq(X3EhS^w1YuRiUI0hX;e$fj3{tXs|Snn&k6+N)c+wLgmo zWdC{q^2710BA&u0nD$yN0%F~F`5s6|wVeDk}7>A{;`xB+&o~&N1luH6ejX>2;#;JKyG!<}KO>p~cz93c6=j z(Ls+8#D7V#8LsSCv=w$|#)1?TEJ#_Hjru)PIn^^paz7Uqa2vJ-`nZ89U- zX&;Ox;)^G%uqxhOIC@*)imA;_5+= z+nmcp9m!CI(_i8e)I)CjD^}>h&YWyHWZLC;UszQHWIwAv24?q1h3Z4oR#_@P*FWa*2nL-5&nRT#-+tX5%)OCSW6Vw)0 zRkw^&t>f+MzsUg~hqOBH=Y{K-=qEA5`?uzdcf)KSL46GTsn)G@!N98D2H5G|@;VTa zCW5tmggT-t3U?~_9{s3jt=x6y77Ql#vEgV0gyDTg4<3U!PRjpgmm4~Sph2!X^R7mn zw2==JhSBqU7Dhbi75N>fc5+dh&*%MfI20#MrzH_(-YEp#Z|DwG!+DRObYSzsG!#Kj zyV%vw?)KfFjV@viT^JE30y%9rcwNBxZ1p*gUzVdf{o01Jf5$jzh!uO+DH^wL!>r3T zRAO6-J|!API>}sdoY_L&*n!_*&L{p${L4Sc8oahAsEF$?bz$S? zYt_-iT2AiDDivPgp0Z|zn1r#MoPv?K$K;MQPb;z&WS!8by987)f-Y|aKnNy(FYnUL zMh8wkZ*JbC2=WA5LS2)UVz~Qo=T1&S7zFMGe?j48zhUDhr2I*r+ew8~6`%{WPYiK^ z;iu1oL%z|7lE`lf8x{5_eS#_n$jLR$qOjD<*l)kI`4;yznB{_tmPpBZt*x%!(`W$W z$)O7`9Svt80fUE3Tb}5#hLKDob?YbGxDupFhc|)SVt40x<~)T=s{#Lv?=8RQMyFdS=;&eqhiH>BB+^{)li77v6_*#0P5kgr&)(-a$2U z&1!ee1zt9}gq#O{jVItS1Z%akI{PV!B@zJI&@QC~WKJ9-(wK>QzT(@&?|dId*#<5HF6}B0bEssyY=@qky(}kpJka(41(BgK68or zXQc#yimBO1*6AF`?!J{CH0Wv}qT0QyIZ;@!UR>eBCOTI)l*Aovs3A_nd^(Ha8fN}a z)ArD{IWfwj(`FnN&pj>Jkc5V+e&|)C*8m&SVq=@rHB`NIaYO;ywrqS3lHdzP$s$zAb#4RS^0?FP%0C|=LP0ej>n!H8kS9`V zo!U7DO_a6JJ-C3TB#X0O-=`n#R@Ji#nK`NUsn}D&KWx2)tVoYN*N%Jy2)~Iqy%78h z#io?7*^5{X3q>5yj+g_lS4<`mz|77rCWC|6A(|Hw`<^Y>GFDuo=0o~rL!DN^*92Tq z7m>^BPl(5N2r-_^&?vfnZ0hmraH7HqN34y~T?OnqET#jV^dt}|EdUbBt{a1r>>rzlx@CNlBjGaHll1@my{ z1-ScXNThr;H7z~N5)|2b%_h0*F#7H6iM^#qF&kM#9~@tz15F!o%>B+7Tn3j>4|R#y zqaE7?Q1ylHt*cDkSGb&pdWs6|b+|?Pq-W8eob!QiyBiB_(^T-89OKAk(T@x_8O|`O zI`EqiRxdt(*a(*Q&V&{*Rd&h^gM*gAwjys+ziBz(|ZxVd*$i&^$CLG(5oXd;{NbZvfaMlgbI(1spCKd9|GK#Zoj zk6kc8KLUW~uS!HjbTl~+b(ZyYi!6Ia`%c&CN5xuAx?RlU`tz&8FVVx^p7#mWb2B=D zXf7`3&4ldSqW{7U5&ZNm)u$iE8nr^_$Y; z!L%Uk=q6SzPPmd8pmV%y)Dq?elG$IuP}(X&U5PkVs9QENswp)oL%D0cbU5ZcI|LzB zxhJ9IkoG} z&m{T(3N$))z4ZpALpupM`_oJ6cJl{i)Bx=%$gPq}Xr3^VcQ8gKaU$M-(tyqF*Vhvu zdaQXS{p*I23Y?!B6Jdc|{wbF$Rlr7F`?t_e&x^-F$Ptb#K_t+q<1du88hz(*q|Ta& zV%+0kSAJ0j9m_M~S7cwE@>k*yLIaf#Y}`0^>sGq!=KY>{N}mc+V;6%7Yrpa>s}mug z+@MJC%f{`VmEAE2SkGo=z(=1TfpEO|^zFvbS^peV()`kq`d!+DEKdh21bUcbU}M$r zRc0}N5r_;TuRjdvRq!43+cVNxSfQ0+S_}~C3C#*cdsaAG?GG&$l~*>kKKbz(c>;=hhJG6SWS(=`mq8M) z?scL4^67tgLrmm7l4$D0;&gq^-@U=L2(LB*p@&k7STzppzy-~&;6h>`A&q&QLp*Lv z6_ANA97g2*7|wyg64kD2@?K5RJsdW=yf~afDw6+t(m-KqNc>}dr6Kt#utu`7+$XdC zDJSpOip2;kS)_M0=oWhk@N*7!hQGna$REQknO@lkLnJo-`pa#Fn_}EUna90@bMK-4 ze*sBAw!dbY@YCBmkgoS41=6Pf-z6*8&qbSicRsGa;l%h%asZ&}=uad0vtmaeFFd^F zk~LEJo2?97tG^_)SZ%k-pF)uN=Y<)dNsL!{Pdi9rTt0!^NK9DdNy9x(G~`gP3p2K+ z%eEP8lcy4kAjSVcOy=TiEg4jeA|Y1eBU^H)R7mVjU$dKQL^e~bo)&m)m z0{+0j?*}nu0bjz-0FmNFD93LB*UEYieL8XxDLzxJ*ybMDM<*Kyh4qjc!qvALuvuIq zqaMm!S&@lZbJALH0Q0$AifAJZk2n5dDZLgyI2hU%Kw>H@-JbvbuQ#`LiwCJmdO*efg4?}QE=WW z<>qS|(SYGY)1I;5=2Gip&?*m8gSy`V>D{fz|0W>a&ieK~Y8^IM)bNvk@({|+ET;E(?7O;IMYCg3uJ;{ep~0K-|TvA9%wZjjy_p4!2k z&oDWbVV&w3$o|9W+}$O~x{Z`|Yej0Oj$gjrrXLfz_wv6a59Hw@t7FWTO9umQrbCG6 zD2;(!M1nREH_H76NVQtPoSS+y;4*CVkU1QGf^Y{5c04(d22-NdR-H!(kzuF2=y^Kj z@yJkCfXE^>!pl~Czut|J`3UB{OcXYv$m%m9z6bT#7W*H2K;p+f>CF}h-(K4mIAoLd z>=d=@wt`!rlRUteMLdQ2B3rVk{-ba}^f=f7UmPN94Yk!-)w#EKlhoAHOa4<)*-EV8 zs+7VCcH21*OVXCKwIh=XDOW}{w??w*w4{pde_NVYw<7&+w#%NVL-{gY8&C^YJRJJg z2QilB(g27)WbH|d_@y1h(cGIU@w+4|h6ldnjCC_*(sd0Y-mVacfABEY^cmfZyaC-I*hR!li@P>LXpzvktmi$snhLhtv1p9}upmBIO9mjuz=O*FCNl7ehZXf2P z;I%Qa7i89;oN&Lqffgwb=6XvT$L=n@)N_HY^ z61CnZYmVCJCfQ!k5^M_mTr}u9$rNEJFqQ+J*5912VOhq7VHCjKF5K&3XtV5So`w$$ zMed-DaR3xJT6@prh2%9*fezWifYN)Mp>q7U9-}c~osqWphx;W+8Tb{B#m+ENM>!?b z03nqLTF3328@om72%(?dEn345edIzV0(^Jwd~aNF@?~H0HUbu~*<$gjUq6#MBs_CD zRN(HxSh|hqtJ7-Eb>&;f=3iB!|G^>()dj{H078V89hs=xbpfvCwP75;E9TItytNX_ z^NLG+2S)N_hN2u3luOs)TM=|NApC2A-{6O(ug08Dyhcgp6k(w^_9#c$-{gPih#@!yKmjrmXxIM|UE z&@|uFUFrHyN213-xf!0`aTJB*fj=3C3-2nFf%w?Eh!Mf)4=ht%e^-?niUdDZ!U)1J z_Q_w%Y=XhC?hfTMYX8(?sy;H7S;Y(!y9E9k>0A3#OGdT;i=KkYoz|y8%BH#RTDGDt zCK8z8(-XjNG`@%=vM(Hod{8<_2R%msVR2huM67p(p*UWqFW8&Sy9QCepP|b z*MaZ<+OU_2sOrX#Q4iDs5cUq%#) z40u@yczP6fVJgUqYr?g#3P=pd4oGM_Wv_K<4o1stp@S~iLsE3ydU1P%@7FYY?of3t zGA;w72fB5KMOKf6|7Z)Wbm+QOgi@0Ax;G7PD*S>bZ9Ev!@#Wg20o3@cHZ#(Hz|=J0 zGdLIU5!pKzHH=5xV5>_e#qIL)FaPL;e^$w#!Sw^7-~M>HN}Jr*g0EWI?`?{porx0d z@2|gkmB>FvuG5r7heXJGlxeYLC+b88pBcaIf``Zx;4P-&kzg^mX7jxac2}{ycCj1qQ~hzaCqbp@S9_C37z>Cq&S8>1hBCB-%r0zk3=jwT;fN-%et}q;O>%=y>ZOA5tI<$kyk;YqHdO z0YaO|lO#GOlwsTU!y^mQu<t1dX70(!q zxD9>FC*m~{JF3P<+}aWameYl!U)gPe9zbYBnBkZ1EqszcE*Cc{Et)ZLM-8c++}N~L z==h$>9u5*a>j;|NTh;(~(2e&YDxCrjUpq$&hY(jSfkh@4D$|f#=ReLKK?W?>J@5~D z{ZhqAb>Mp3FV&oZxkp5`hTLI9Hdr{ALv+(D>e1kvXkWzV^vad{*QwhTW_b&r_GnJ}U z9$9h;SVYvC$inx39KiDrVFjrS*&E<8{Xr;Q8$s6F@4Y z#~<5;2uFH;;Tf_yy}~7#s+dI@!uAv21NU@Rl@lZ!T0X3a6v5cyAs&ch0dHvQvw9&O z(1w3dPj$u~4tpGL*7t95rPdI!sgdl@^kBhJd#vRbej$Y}k<_yKEtQWyTc#*%hW&>K z1W4r)#nkM}t~j7IG&n0;q_;m_v=RO^QK<6j(NDU>cm<}fLaWMz`qW}AAX*V{L=|4s zl^)ug^iwk5OzATj zkWc^uIVdWcNcY)jcUM0>40rZva-xDt54Vu0+rIh9Dc-q6SHY{nn^jMQjH4n*{KG2K z6VvJh>?wujbV*p@#}4xmcT>sNFu|J3R8Yj}_vPUv)gp=C7}ivlo=$nMY<1&uR@$G=Wc-8-lYt~+JHa#bBaIT3Pt>Y5KkCHcL%xAQl31Mfd@g<^qH z%kw0`K+3giN*$!nF=;p#jIPLn4CQiHptw4k9V?K@iZMz~BuK*Mm(WzWAO`{XJkt+- zfnF^KEN?z^lm8{kQCh@MlWSQwAKjjJtw)9+%f?PkpP|oUHu*RYOpF-)|2*(DfKbYo z;unEtZm*h%>36IhlZ*2OeanWe&;`_kj6jXa9E_)8BLcpmCc;93x#2;qkOTl6D(tAP zBek1V0E4I&pna1Z#*u+0>?XdwS!Uf~H^0S7hiR)^QaC4dF~9(t4FGKRq8?JKxW~br zJdEEv;=k%NxgB0Xv`_$?IDmNKBGREPOEii?cdw4tbzg}+l5n*Nn6`~VSHcJfz>k^N zCSafETq8$T_=DLt-tA)_aA|xZ#{nX^=AAUsRh9*0rd@xRIN>I3Y@53$`{=sqNMv z*-u^_vrKH0?xMo0&C}{@?FpCC2@&95zX%w zSG&cQ1Ji}GP#+$?&qn(UV(4k0L7$Ko zwavEANUN+MK^M!7x<7jZ+!;-MV>+h4sw+3GkJy7Fu~Ok{(B0+lKvRDYaw&8&HX-k< zT&N=TVQ!$Oo@gTH+AJ2x(Yok&DWj-0(oaS7rhBvN1Kt=ZcK46+e0 zrX|Fn*l4X=mx0#B!5XyyaAG_HQ2MxO5%pDjIWRH{Bb{;Ge)$^Jpj&K$9ZZ$Dq>nW! z9azoK_J|e5RcMBiP4h~*k`BjMjpI&=gz;o#WiQ-rvgRo*nd1ToC`3gWNmaagg3%^| z#S#VyVnjzaUH$+sr}n2Ed&vzEa`0M7Sp4zBrPB}q`Gm8xS9Sp8wDG|}TB;Y13t=Ay zsQC$(2T;V4U}^9wyj}41`J7^x)kZ@97Dy$dHX^qVEi%T0h@x~yXgYvn)v3SDY%AY9 z6}=bdOW$`Rw~zIJ$GYjf=D4ZxRjAG8w3H1!8D@OVU2@MktfB)4id&?on)`pK`X=dq z`Y{6R@LHQ=22o9iEt|jj6Q?d&Y)iq?P?7g3y13J%sH*q^$10s7oue*9NQVPm-}pyi zf@BeYQ1pBaSMN3v0L|ck6cGDu-wWGjDxmZx`hihI#Av_9-63;BMo=Y5Mi1LAf@I-f zZlST$^+BgMe!)wz`X|4uhFq`}l;ip(kcj%;9^`%N`2>@ZzUQAGdJr2dZo%x2ed~Tl z&*iy~N|uPJzrvkun-Og{fc|sCD&MNsgKGr7p|{u5D<7R-UZ$h(P0`|e~TY3`6T0Q zHu0e-W&R>0&ALy!@Ch*K>8jAuepFP{oMw0Iv|B%=oROxPbS0YUYyejWPz0DW9s4uh zRU_P|Buf>PS0)nDF%qnAO%~qCBs0j~bb9}*8d8_;F5s1M?zjSfN6P4})Q@9kS~XT? zoOFOHjr<}iSrQk`>bV7f)`Q2mEbOA1qb-m4@XxBB-w2cQ|1-yr;no0DoKMLZ-QY^_E?BrSO z+ryP1CKI(lPG0wymFb#ChQHqIZBn3YZ@NA+Rs=k3;5ki4SlGYWth0}WV^J0T0Ltwk zs1Kx~Y=?1LjI~HLUZt?!vivcwN%YxGJ44*>Glz6|wOD2om6e;uU;QpNUe!d#qJr>Frr0#7d!|EadH2bb?Fj zBX30?*M3M5G}=cmN3DY#hZq9Mjp@Rrt^z`Sd?`oXT4Js$dbG_30dR-&2a6D_9FP3I_!2h&aaTitB1i_H0E?iE6y&fGWU zyX#d8ADB-^IBuU(q+hr{vf*AiSC>aAP!ucjF3CJfXgn$9JyECR)bb z!HB;*9E0<$&Z$YS=$XLmAu5+YOl_mT>clV+Li&Ui?R(n_;ga%FI zt??PF$9K{-w))9&S|qXpd2p4eaU$E(K2b;d^G5`c)P$CpM&E1Wg-qVRjr02k^mt@> z)b8)gE}|6cl>HU|)$*I&#P#FA^D(fax+jK|4OC2+n)5xHPu{5@5U&t@PlD)K=)+dW zq9Sn2TFgW9dXU^_c2%C=^ikDfYsyp@3t9$=8b8$EH+@Yx^(F#t#J#D8&yr`=;|X!z z9NVKwKI3q(^{M!a>rhU2?5=TlpF%~?Iw>PS^$a}it?f})r=YW<2T5NXGCYllP=>|+_%(=PMyqYmC(-hSPTTiK<(oZd*0K`z!jJ7 z6fp`ZBQV5*=()F4A{kYlX;ka?K>1f=kTfeSWdbbf8Jwy}S5Mna& zgYG_raPo7x7N&r&G+3pQg3p}UVn+>V4HdMBA)q5#HvfRC+F4hK>8hLJ;H~?{M|Rt* zInM%s296`zb6dekujCamzeLLXLNA&D-|8vU44>H?qVwmHpmeTc>({YR1ZHuMgEq`SVUp{E&6Xpwq7&5O^f^`?FAkCS#MKX;w#%BB3;h^89~>;Mwjq z6y2|HDJJ|5Hyj)oRZO!*zcrJ^AOrnPg1dKcKj7z0I&a$y? z-iNZ^hp#LIAdr5}$O~-c9@wCb03`Ss8>F5iLfcy!&4hk0OTmI4C$P< z%JHa`_Oj_z$^Xh<-YQk~rV-DZ*M!Jz{)tLHgJV%xjHHTOjM3NI)ox~C1s^=H1uE%L z1MD7UXBM{6)sPN*V z-P$UpBzVd;Z76k6Xarr{zm8V<{po`MvRP~=j-ruvjx3$9o$oV4q5D`{k&$}bsDH0A z9zN890?0e@aJK}a^0x0hT4aG%I3zc(T#XdfvK!GFjh=hMsW|3IFkv$FSH?b1AHvPY zaG`1#{X?%FJGKU=1V{bUz(gkV6Br-jXq$Mu#6-2mQo4Z)mwm+s=MdM$!dh9|4O&yP z(0vt{ifu4kiQn-Cgkw*?UxsROo1N=fjcL&72UF!k_^>vRy7toPxx6wX7$Ijk3bmxLq zv=5ah_^w114F4rP$B8;~v%mjn)mS#)#@za_YJz|q$A?1nAGVG+yW27;I!-$F`LABvL`PC-&C95he(SwL_DT zI`M-la;KBr(UF^=LD%-?M>Qe%yF*-ZAJE(HS^@(}3F2>LVj~HFH3ccsf_C^gI8Q#Y z`x6KVivoEi2GtaBHp{Za+`qT%6G^PbY{SqYyel@Uq}i&x8RRxfBBqpM#(yFjjw2WN zGt&jq4bg6IDu{OtR)nO;khcYTolj@fFgXukO7#Q@{as|oTZYVZVu zlg5~L5od;_?ql|lDvk_`oh?o z?4+P^WsDrq0lmfyy$o9YCMkNlyr>A2d6#tA@lD(c-x@=ZQYSWSWhDSAMKu~0T`GG& zafc6UeRP`%ux<+NPrsF9_z*GBRU6SG;ep4(pX6=@C@1i2UkPyg*rJ$RuyqrVeD8-+ zjX)_%d4^G(;`dRZU4VQ39Vf^wDYmD&1Rcuxaz%gk4mEe<$lmjNYEYCP)pC4VH@Ss; zI#Fal+|N`nw`%+TkV>ZuG&J+QvhLrU$`Teqq5|C*uUf;(jqql%&Q z3Su%Y%7q`-K*`3JeqtrN443C8h@M2S_SDpo5E>@+>@^Kf zaJ(2Adx6tU6GezwptVW#U9i;#mQ;C^*nt^Ij$Xd${U`liDHUo$O;s+S{6<$NFX*PY zf!3=R^gYLR<%@x%q((qM`kqimgR6q=nfb^V!&qg{%{>H22V#uTYXeiphnOokpj4&! z&p110qaDEUlVJQWXtEEr_-O1ODt!%yk&DYxWP4Zn_Y4r?S^vLdB>^bv{rruYKB*2p z`OON%1G0D|on9*Mjj8|N?X=Sw+C9yAe<)wV+{}$VfegUWZ}7|CnbAd>C<4a0D&mBy zI?kkK7^d{AL#zsP%^HG-#vkST7)TCtf?7j4sw(IBNY}ly-h&y6PwrpR1NxHXj^*=Z zs`xw~Yl5`JGq_G5M+Je;hxPsN*=PSB*^l0Nz6R?QT!c!M0#We8XW^FTDNuT4$nAZQ zB%VdKX^}+fd}I8#Em_D{nDvm>vjS|n8#o^tAdMJeAx~5Ob+NyQru<)}39ojCFq)5Y z-F>mU$CB&*V57~dji=4yX{2?%emWsf^=+C#DJ_GREEUgU>}D+o7>Pb~u!MEwa*|BC zt1lg?!yY#(Au_LB7;5&I1G`z@i-q8ahZbqgt;=%B>S%XS@PUucB{tvCJ^yYVsdM=* zr@C-h`~Mo(nS#r35CfCDi?uri<~}@n+s!V|g*{)Oon2$XspZ8?khO`hN-^}_D2aD{ z>eUQB@b!R8l=N``>clxr5LL z@?s#5FcgJnFmFau5m=fJoU^u7jcs};^rsrwrHP{E<&Sr#IxY35FD%@j^hwwftiTUA zr1CvTmdjHC5OEeG#t`#w7cJKUxMh9l@6c^mRmf7+;U#_%0Q#fxrn(NGcD}G7;;Z@B za?_W#ibm4B-)HrbSNM^Fu+nD7+mgsj&x5@9_=AmN<5xw32$%{G*#RI^?-1g4nN2eE zNV_X89neShH05%&Z$YvWhDm7xzXWcEnFA#&;mCuzIY1DyUF)IYa}vN& zyFGR)a?PDh(XT@oO5g_E;J%sShYRA&)MQ&EUmy>7q<{^!!@n)$9S2C>HfY2kxg}UF z7{PX>dHxal<)h08b>Gm7KPg*ZedGIqfpQvSBsHqRkMabhkQZfJK+H>G!|4`$`;eOA z&#kWB>QGMEQSl)7*y>kP*cKH;$TXgN-#>Y z>nNC44yTK)(PK(18tfnA*yzx6`{t*mXV2DJVo?}dB-CZvkBhre2#aiMT&?d|AMf=6 zCAqkS%!I@7WEE8ITZ4dz#E!I-1-KO`5#P)69yTckzEzb zmPaxL4&Btt$QR^l1!NH-)NLn2E)`W7bq;f0cwF(=t0R=C&R`VjuLh4(|4pveAjoCN zdj_px^IlNLV^h0QrfP+KRPi}6xBTcaay~1s;|g#&@W2ups$a%6!u^Z-AVbe&W$e-W!yN3;0h^47 zYpR71fef&z=%cX-B>2wG!ve~Ni#HgpxoKNswo8W{=v9cW#LK9c%B9rn)2((`9tsnkbmqi z=1+nkVl5lT?N{7?s2PY*c5H<{#wHR{ey|sqcxQnULRWU+aoIh=a z&nQON$hcH!?+4!v=`n%VjL*%{JyzpQv)Cl|OZV3=>iWkzD6aZW7G#6thlEESr*rPO z@+%pc1^;@?22g5TegLiL59S5-jqUx-EeMNTp0rwqN4)V|K#h2R*Lw~VOW}?X3S?Xi z{IP|Fdn5!_1%o1AR25-J_-d6f_MTqQk7o;%1)}`fen-0VWG_Zy-HULWe^K^F67%T0 zw?!rdc>)%tXqOv+Isud;RW!gbD0{l42J$d&EC^J*6N%9hK*WAI5wm%mb{O5-+=J! zDfZ@semse#1L=GM>Xgk^Am^5E33t_eTbfWrhdnv?)z9h3k7l-63S@hE1)yu7eI zkHe!qA1$CsqthOu{pB5?ny7#y-Sg-*7|WN>oBYrs%F@l03#>6=TnD|}_NxHA;s7eDK3F zKsMmd@)eHhmjeVuR}@Z0AE=d!M^ervFYj(!&ONU0J&?^7j*ELz*LY;;dF)m0MRkJx zX{_o}7CF4)X)}^^$2wLGU76SWw1N z*-m(n7LqHps^{S0TEb53>;=q=Ym86``80Ttr5B602)3Dk67H zo=s^y3}-Bes`Cz(k_+7y_2bD10Nw+bAhDu2uL3uX=^BpFRV=e9j~>yeonfvOk&${*_53fTnd@F)<6LY<8K$e~FL>xcTvl#$QJ+sgX|TP@UOv$V z*R_1XfcsQ(!~SH<8d9fmwIefo+0lL{b)@WR+uIusHeNufjNW?~#=_MdBnTCDEng17 z;h{r4ohHHD1lt&LHQDX@{KRhM;U|e*BPbCGaON=FbKi{@{kAqVp?IG{QCImBwBDEbdKfj)dibw*0`iKc=nC+7g5?JlShFb8Q z0%9`oGz+XSy(BH4V$rcFF)Zp)zj0pZEn2Le8yGeQxP!_2xZ=IA0ACbBpktLKRmT2o zLxi8o)P!_3#>Bb<%r3ZdBHye69P*qYFlt`S{|mW~H{5NEF3XV78n8NV%wNG-u_~ZP z=S0Q?P1Ll@IX(7sQ}x)pSteA}F+2hoKk=P|c4_|smQwh$0$u+4Ecjb%SEE-MUr%h% z`q0CZXR{yLsS;SzS1~)OYm)|+2Z7fsY4+8>Q@EvspW1ELXoEHui&IY&x=e){ial+a zkjL-Ej_cwk+ai#ybNF(JSl&k|z16T#12o6xmi+M}b#$4Q5=Kct98AA{JeZ~#|38?B z%H46m4rN2h1yD=VE-n6kSVn^G;>g?GJ&Pb7uK2WBv*LPNb)oO8f#|sOnzriGZ6Sv-t zeh~FTAV5~NSJAE4^Cw|+1P99g_tQ&gaol}=Fwq6ZNVlyoELWZE zASzqNSkCYZe(igrqI6K9|E#3rqd_~~P3sQI8ajRDEQJY~rb?3g_2uzJsB6wq%n$Hd z;L8kCB4|jp{T&H(^mpW~OMXz7(ITg?ZY0{3CakwJlF}fJE6<|T&_?Fwb!aCfkxZgr zwX8@$RvSw4dR>9Q{d4PUpmb8SJHYbyaC}wp-HDiz3UTxvXPysm!&5RCpDMC*&Fe=K zNbEd9G!R>cdf36u(6dDk_>=Oj1agT|Snos?TG)Dt>y*y0&?sst0mJzAxK;q36r@`D z$1YLEu#7|vq(o?-gnphRb)9abS~Ey=Ufbs}wRR2I`9+3HfD>u>c9^5Ne6>2A%ro`4 z1X=ZMe@w^oq${ z+j$3c%3;?Jr0oZ2Bv_1W@C=Ng8cuBSYQnTvq*DLW#xxoZWHA_ynW!|aRP{i8)nEm>WQRO_2B}iD$b&1uAUR@HGr?;O-P=((Dx4 z_x&ou=`~@E%r1sb3iyoV5a{_mqi8dH$H*l?2j1+Y5D;iBU$QVD=3!SVJZmhE5d{l6UEz>apDu)FgLJ<&v5Mc6yDIEeRwnk1Uw+kXbfKO+8neKB(79OcY#^BHK z=7f-=YIx*odph@5E4bJHGX{gAj=}kP02p!C2sZ}y-1Pz0@0h29Zcgc39|NI zJ4kxCO(lo)3zP~vRRI$;j(IE9tnTGb&UV9L}t~kt>J(# z^HJQQ0K1tuk#sU&qblAiqbza{nTlf)McM4X$_xV~3&UN=0#1I{!`HWaxW^3kCCG3r zqU|Z~*3*~N3lnyN`0Z$0yoWWDY)8eKps7GvH2`H2RC1u_*Lk?OuifFQVI==18 zVJg`#TH5fLkL$?U^cvih9 zbl5i)-s8%chpHW5D7x|BZlm3xyps6-G7mk~u2>RP|F)@wozK5%_3(4`(N4K7nO=Hjx{SnHs1P z?ZTQNLRPV2`p7zb7oxtV|+4a_wd)V>@R=uMy zQ+xdDu{`rWd*?du|NHEi6cv`S@luKjuN=OVI(Kel8eVIy3+C;Kaa*=hnZCd+x$6n& zB6^f-ghT|&Y*%7xNCc5i*!URQ&h0P&_Lu%50dX9Nv_c??S`CXZwxhFr<%d2c>MYz0 zGEekwalY2c_dB7-=fj{h7(CZoAEMJLhp};A%;%bm?uBOjaA&EnODqs+VPs@fn2i7F zazrnZoh8UV$l?jMr3!(Y7lqzeU}2>=7GDpdW_l@M`PGQFn*#uIG;29z<=u2anF#~9 zm>nI*+nRrf9u{38$I#zAX`+iQbPb!bV%QN@_81@Q?A8^xZF8ddufX%$#GN>*0nP{} z5Td=iXs0`gJDma?UNz=&iwiAw&Oub$k)wZCBN+N2M((@v5TM7gvZf;ztOD08WSI0@ z^aqxX3$i;=d~=}+p#Ia|f@7f_31x@FM+qKpyoP*xEHDh70NsWI+Ibqdc8C`w-zvq9 z%gaL7y<=SA(3hBwV4f&I0n-}OcJp5q1ypLouv-Use`T0YP^uz*FZ)Y>({irpriV5z zUE_n#>*m+Cs-?_i3Riy$L=^=(jnRheQ1kQeA7+diUo%0JOj?hgoErzdiyQcC&)T+o zJY{0nGY27wz84mXoL&#q?p*W`c0nJ@1y!NpNBNDSWx|VyiD^t<{srh*=M)e|?BH{% z_|ezMXc5`8RFE!KH-J+c0f7Uwoc}a)M-83bL&Jc=6S;Bxtx>+xZWJSP#t^!d$TfTnl&kWouOe6Wy@#IDc(|& z7+-a~4t|I;gpzzBNKD2-$^v?-Si21!&^;gR5QP!kh`ID>G!kdRZD(9G?Yn5q`4Kwk zPax0{ui=62#`h38Gu?3N0v@H*iyEj^R_sImvw7)Pi-y8;OifMw$nhV-0c3+RzRXo|mL86!*49+^4nWCileadzl8ix|A;de17NVvMNyex>gzq(Q|7o!W6~~= zkL1KAv&5M2CF_+pC@ zc1|Sr{#Ud<%Yp9F$nG|+*eaBbC7J}1?9@L17XT&HGMv15&%ndB9!G?4^9X)yoq5pz5i zDYkvI-aB*0IifxST2mZfZN_HWkadCgC-I+a<<-oft-Rf`@lLmdm*9Qi8LFghAqX7> zOn36(7fWx)wJbYUXrsiV@D^E9gldmXtxi?J8m7ayOyEqW$PtFtTSrc)xr0K>-43|w zKp}54W&5+{>$M`eVc4dAxF`~+5-a+$!fGtDIx|Bp%E~2F) zB;ti{GY`X9>>UG#!!=1*iiO8>D=rw6gRk9Woe?90cXe5B*%gvvM|nw3?0JOb+7WUG zL(N9wW>#Tu!B6EA!}W2zAoH>U9jKBPQ03QSh%c32dM?JN4AN0cLj;fw!onD+kpTPn zgTgOPxNd0vd6;XSMT$wG;B?#i|H2u_fz!-xZ57DQCl>LB|3j2RFPTIvX>1PcuMwFb zmdy(5H)GnycTUmq1a`+&ShrQ+Y!-bVWKm{Y_Sp5m{0t;YfI6HyvV*KIrvH&!oLC-->!CTvfa%km}mYQA&bu1IE@?F zMeIsc0<)g(T>v1o7(Qsh(FPo9%C%SP)+LxR05glj!5cdBj>a<=^TEvovMm#L0_9O< z))o#a0GN`(&z2(W6Bk+@v5Ot)HG& zY8Tovs)KYi`>nOv6|>ml%|9WQhw#PXv7qSOv>6pHlwl5-i#9`zJ89UCn%)pA#E7Rj z8t&sZ6deI~kX!YP|E7*pO-CmrYBd8m8*?VyC`_nx=_X|KWpX>Tj9}&R;JSvgjM_LI zVmNhgG`z+{qctB|T)9KZN2lSo!X%gs`diQiL*z!7z!$A~lhwwxmd=VVQ&vQdOpHDs z{>?u-{1#-1(B;~Ct8@z_Og-!*m5hjTY`1zQhmb4sS1QT{`Ou`NLULLqhd|op;dGDk zTxS8ApI7~*5R?Tr5Wc2qTX2d9@=EN+0OlaFS`bQrExdDR7%pyA%tuiNZ+B1T4-1C}>+W#d*|#a{cPl zw#PsHNp(CVxjrjy0k6z~r!j_r;4o{^Z3ucAo&pD;4YUkm+N~!D<@0f_DtS|-9V_Vg z_E9`)DxZR;PITLXKhzvl2RPX)CpMXBpLC41^hI2EZ)RD%9`%nn7+Y@#yJ+g;+VDA| zoGg2L;-Z?D-V+9xkz{UP*`P6CfU z;q~gsqTDduFF(#gvGYd#k7RO_mtgg!-$2kp`sCMKAOvRh=p~IH_<2E!m)GojV=zb> z#ChuUMS@L(h3%<~!V|{ClyaK}hU{LUlcmAT&8e^L9qzmdA!YA^>1L}O$DPc!*U*gU zhG{+*_;51y3ekuWU-o*2;an8ix8F<%T8<?#U;v@Rq6`}b`69GLZH3h05F;k6=@d{Kj!DblqCZXO1gC>L;pk8Ktb{nHsExx@c zUo3YLKp`}qzKIY|4Cy3cfGPeY@d;h$$ym5vD`C0-Or!|e$4Q{XNhF`gz_DQM$%OhmA!dc%^(Pt~SBY0ogVwBvOHQ6dX* z(~(;)W=o{cYeG~wXLN?a6mIEU9lRHIsW!6uva7+;L86;?JlR1mn9%Jen487W7>#`S za(Jb?*V6*jWzz+h6XY)c{;Fn6p>{MG<@Gr!${RFN9Uhqi#?AtVwo`|~2P?>Y^**Sj z$1q6JL>-vI*E<5Re^=>*w@F|mQl5`~zKE^MWcGmr7m_re0I@|~3ZhzpSg?WmN%b;% zYoV1)_8x~sm5yvN4epK3r!mIkjNN;_vhxT#JKPvqfCD;Kk%Z26_z)x;j*QbSMx$uV zToFYf@NWWB7Inqd&QE;{Jm)#K)=2alkX^UXk$JZ0&;++=-uk}Lez}_2JtN)@^ib-5 z_Lk|?7-2Jg&j9KpgWqZ=EB3wZ24;1_#hP;jl44a-Db;NZ>`dc1RL=Oz)OaVUHL;sZ z-)dhT1cEJSeNqq8y8np(G;9W0E_tEtoafXCS)ByJS%V=DmKYc+OISYg1*hk<@)H@E zNQYihey2-)rK(&!ovsU^kVqT~7@|CzO3|oET3&a|%KsT?$n$cqI#>Z^D2`ZZHs5j} zu1((0WYr?@9(lSd8WdLU=JO*EP9!h@Zd-0BI5Z`3BmiojG0qan;ql_9&Prt`cw5WW z6%hmqzro7n!+SYUWRUK5(u`vvAQt(CM$}nQ2w=c+OXT?_PedWD;Oh}gYOCuUWxqgD zKp~(3hW;uZW)ccT18OQyQv10t2@$_BFCCU}?|@u`K(gYQW000000000000000 z0000000026WQBkL0000000007f)$>g*?Mf>Cx-p>Lpofcn5mcDqF`~R6mHxJls&-w>Gi&-hlG8Ahw!#; zgN}~=%%XM6T}xaddP*0AFq{5mwt47wCTvHWG;86PC~W9049X2X$cS+ZqpeRqG6+vP z?AJ~T(f{Iw)+#T31*- zc{`ov|B0CnN!9>Lz5O*&FE53PSUV0X5^ys6>@^eJ3j58iDa;X)-eP;JB>wnyzm|yY%sT64UdVV=3YV zDHIvC_JlK62hxb#-vKRAluA53!u%psQ10juyzV=q2JMA_n8;6Zewtf!{DWGj4_x*^ zL&4FLD6T?TaNebjN3U-SU_dESEwg`C1q*{Kx8aa6ry?Hn9;)P`4UAiosJu+KA`NCt z``6e_F7-wo?%SUiiqo;h@0#@9jjzC+gbdWTb5WIbqA(p;1*wF_1J)5o< zMH-O3HekyFSEJncErOFtU%^DfIfiP;(q#%&UjHj{oHr7mI0&*Yom;3CaaWZscy*9K z_x>^Ud~pJxictz}zvydSC=+*QWbwyE2Dq+LN#&orhodLOOt&y`xj`Fx+MWG_^hxQ> z%UGNRLxE!1D!>o^xt&Sd-NuwNPhY)Z1dCEYBKj~*okOrF%Cc>jZQI(*wr$(CZQHhO z+qP}nHvZ|nAGcLmooZENL}Jc?N*H$JSEDZm^8zxh8S@YnjE*DcrD^0zSeit|XzO7b zy8>C7y^ zC?4b~Z198cIvG-xrtotZKSTd1by>F8>uWufGh)}2(fT;9|NG~Rz8M)$i}Kv-fFQXW zvASxeygk!;X0o|{M2%wdc7?OzS&=9SPk)&q?(|tW3q>W8xT~M1CmNVj0FIB`e@lL zCq||~(C`D}uZYM-cm4YtL70e`&@HAesw0yYc$?}!N)s=8qDNH*lROdwduHMKt?h6d zg5)Aa^Fr3@oKG3!S^1$$^V<#9`C^ zM9cer9x}?Jfqnfmgdboe^@2g+6dnMOv){VpmM~z#X56H5>QbzGkeEUO2K*K{O93i3 z!|F@4HJE+r+2)L#p4T3vozvp)OJvefy1P(GO)CXY1(DySZ!tS$x>s{e7TEZdd!Kvq zrz_K}aCW~72<&aFoYE|Ne1z{uezOw*^^I|r2oFlT=vEG2JvE6C#*2-NymF47{@e`n z6ZO=C9}L6mS~YWnvfXq$ds9D3xoy3VNy-tj`IN(kVth%zFo8nb9H}+lTaAylrAqj$ zDU2w4YZk!(#2#$Ockuun|Gh&uTrfCn_yAdxtRG@=UFDnEEE~eF_9$V$qb|9{uS_GH zLI&~<(K(Nqdm=`b&&RBTPIt;#s^ay7_Qs zU_{kenD+>cmCswcv__1Cq)-AOCdDfLO5OmG$QIKnZXAadSjhdj7zpg&nO$Xj0d(*w zq`%2iMhCm?%H2HPvz&Tv%e`a0Wx`1psWR(fZ{FY+T8Fnu`GrjI{fpm!{=a$OtI&*E zElBl#fM>YaX=#3^QMoz?er(fkBC*#pT_u#G9;a1FBq`9CXD+)OUPUVhIZQ~(C3)dQ zM_Wm_1ya>Skm(T3CspHM2nx-6(k&z``m?WBNAZyX^>4ngHTx$bQw)j2qfgnwgTTX| z#A=fnW2~qKHf##qy5)l*4%kEg04W4nxwi(CBHcOEW&&u;I-`L(0dWT2`F3v@SUC5_hGKm2|)paG}z$u(9%eFS`jX#(4W1KBGxxpDi{)Bs+c6t9EyBfyDi#9NJS;OQ~FzK7vLUyx{iJu zA8b6^{ye?U+rRH7yZx)nK$b?^9)Ro`YH~B=XWxXCkeq#h#gMFo%OPD0!Hz z8*#XyHB1Ex%@i$c(v_3kKCqVk@~=72(;pO#Z<%C>1YSwy`!>@V2fS%5@4p&hRkJ9s z0jpwDJ!MCsstOzDpRogP_+;Hrk zctGHh(J;bjjkY~`Q!D^aWEL8xnR5Wgt1VuTjU^NW2_bqcHOoVD>rMvU$&W1_Be9kU zji(QxLUos79&LPvg?BB%h^A%9ylwj?u^(nZVfy4hdvGm5hLR!-3h$eCLSgKqs5}*=*mY&XgHW zgIoz$Wo45vz2e7Pi&DiMF2^K2Dx_NhMMaH=WFd~s`Wb}9mGN_2UuQ|E_L#PbGox4? zJgnU!?59+N%9*d&o7TI%!eb)DSh4OTC#tzr$mBFqce4S86>rm;l)wrFaKxL2HYgGp(n_Okwr!*4`HE#Ji|ZAotHo37wUNMSSZ z;m}M)J93x|liJ=#+ayT)&d8f8&(82~Tfuf3H4_<8N~!7kI8+0`dV#<)y7Ry5xxaMe z>A~yz7I&r;F6N=Au~B$%j?2UWGKLT~VX}aq>EjD)Ipv$4Q@PXohhv{3b}r z+az)kJcz2ZCLkQ($-?&U@uN6t2q(ZZ#8e+FU2DD7Tj=^EnO847x_(SpMoHopmi4wB zJ(j{S{6IL^f&2}s;yCe!M=i}$#LffYm`Iv`v80*~AC5ulA<)1=hR+LQW9J2mXV}r} zcrF+;sxpObL0cG8gq#%BV&95dRnlqHH=F#0?@5D+-GE(toi#8X4wFY|mWWW`EpR41 zdZ=SO7)&iw{Y@Arz#JOT$k-bHy6e1Yg&9)r8wu>bph*gScoAM`kVox(J6ov z*+=p()1tafQsqPoIL(v%A_G|xo82lOc+^H7RBYl+AaESaAu)lxiAj16%*1n56CB&w zl>j2fy+b^ncX^0jh6!7&U=^R9?gH7t8{Mgiss<3gA)-`o*6skXwC8a<(E=74 z_&vG6We8Llq4^fZp& zKl?M*1j*_YCk&kB3~mg?Z3dH4lbxsUl#xqyCq|7;iQ?oDGzCf`>zKD=dCGww8;4CJ z<%kC`Nb`)02q=4`4cGjcZ_UMu)Xfk4jS?O;b8{{|Gv`38RR;LpWOxy)Q!3hIqe?iM z7S)u8oZi!I(u7~3^HZU=_@P=d;-vCLD{FLBT@l41c5~7romB6~)+p!dzO)Wa{6k2p z+ZVV6R0!M(g@14z=7P02$v0tD*qE9z;;A7>Q`F8;GfoVhyF>X|3pT|Sv7!RaKLQkN zAQsJ-$MIAe26i}lr@rD+_9zqFt?uAlyX}lO$O1hehnJ|Oulu@l4)r1(^G}T3~ ze1D=j!&hfK-_uCq#kG=wZQMeiL|_=6Z45%&ZC+xFr7|WZW9ucM0NG#EB%}oLNMW9m zqjSie1_mf6&8&0*>ANiNbQ3Wql>mU}RkXgM0r#=XAnrY;r{3)h@$2ux_-PCLARFmI zl}p|oS%%Cu*h3L0wJbIx;(0@|n%CpVs=NZIrUSceE4TBTu{`9*nB8wmb=yu=``INO zoepi~GX$ihxA7GAmf^H7P{_c=fKm{6a1L`mxQ1YK&hKz&NaLKg)J@U7AZBY{psY`1 zi3PAL)-efbzj%4ye|Zbc-7}^@7z_DC5G95L#`-Zv{VCShGdV{5>zAI0fqO<>M^Uz% z7j;X8(UTwZmC-}kgDY6PJ7`^wmlvR-zjn60ccUGVaztn!9v}-_n#7stk<9w4)FMw7 znc8t#0Bh(X+ZEdAWF62=Ee$2#GBD&cjziwP9f3f04nQvoPkl`bpM_$0n0umO@4|<& z|F#hIXNLd6{E*&=tFEG!Rl7kx9u2X6Xkq0F9ZN(EytH49hd4+?FU?@?q`sk7ffPtL z46Mc~+CK&Mx-06v^`lh3d6buaJW$|hCKFWVU5rx=gF5K2NH55TPFfTl^=0JINx!#c zT|n~*@;~kwFlZ3c8QUdFCH|+-ihRMH7@fyTMO-gq^k66YK!RBCMv;PS!Ijm}`bye^ z6)t2ofg8GVZ&(^j(-Sy9jf+N1lw^>k#9h^%mrQplK7bjvb@xz&pl7{qh&Rq^mC-B?Z9~75iM64YW_vlSDIKsm8r=YjNLbA&x+wLy4iPOIUf;CTlXQ|%v;8l6e}tX#zs$Ro zh>&MF^!0R&Zx8f(Pvl&`O@EvqzOdiR_%TIonyFkAL9QwN z*UREp#}xaI>3wbUl(mu-=!Mi zowsWUzBrZdp@T$t{0|0^rJG`yL5JSAA_aaveHk6aKw`-sI>*1yIk1_9@N#8!={Ij8 z52k)Y6sRwfeImgcO`I!e61@Oz**U_X?JBTzYsGj$iOxUxTp8EY-hp9X_^qST^T!?E>9$Q~=IJ&{obZzXS15Lwt9q~f41N0Rz#LT*0cAf;fC@=nT^9z_$6wOYxVe&S3g^@#z_7YY z$ezhFD$O$a8i%aIPn!YeOyNp1T#P;Z1t#It<-lG4Z~-AF{wB5X+aReRh!loiAcNwx z95*`O?Z5Q?{Q^H=x7>Mf$BVi9<0lgtbJcm&IhV#a^SP7c9_?J_^1ynxqTQ`t^ryO( zD!83GUBglcIP;DTgQTx?>q+Kb#A4}yb3OJ+|6ZfZJW-wtlx9xGpq zi%E2Hhkf-pf?kV6sgmxE(<_IAjbCM0J z!W^e^s;lA|rG7MeKoP&jvcSVNML@4>9K51$g9_86t`dME3#LwG z+W%>pHS@pZl_oMIfZ=dSN>9=M?X6r<)0596Kq?#rL53!%5n)<)`x-t#!3kJ>*6MEl zwYn6S){9Q9WlLO6p514U34J}sVdUk2r9-v&Gh}CEt%EfL54Vz)wgUcJ!s+OM+ztiH z_1bdx#X7%W{+Tk@o^ouq#mTKmfjt8*x1M} zNgSN&K6{XCzDA;s@5B|?s&)YGHm4JSRxH7sA{pRK$LnC#>uX=%_NYf0ii9#2O>X%h z>{M~CJK1dL5j&CfXz_R#pW~s~>UM`L|5{&J#Q(&mx@oA;SzN7L05X zmNqIivv^q2-`ngVeNT29&1Fm8Jx z|6R!nHRh!MtBE3xo(9afNoJC?m8kc(a+m=KYcG#DJPTY8u~PQ?G*Ugk4bK<;Iq7(N z2>$hfgAHY*hlbgJF&#Ce8n_U^_Uo}iKe`JrzT+dh$Sl9g4kF?nKoCcOdciQEyp4h2 zdAnW_K`}~2G)Sj9=d0s+f+RXlBkVJ+jc9RTV?@9D)eLhrU~eQyE~S0-(&wkxDL$x4?;Xamm(EPjP}5Z`R9Ym9FcO zO5{#dtP}9PU);zm5pkWgDZ3fI>WkNs(cZ~H(%(ysUThLeN-#a)oL+)QZJO8tG-pf!;_bB3y?S@ai!=LYOAEfKlWwX2m%7H^RA-W8^CyON{w$d-034m?Tq)* zbf`kFL3quDzRc@PGO{i}v!zjKYw@L<#A1*b08fjGKG8&04BAQ;&Tu}F76|Bax;s|^ zwi5Vy4CG=px#^{G=Iy=m$Ld1n846~{UP_C%U1ZjuT7FvjLu6DrLL^o2pD|~57eB7G zug3lX)&-%H)j)abiI1su-Ua>E$sXz3-#$$|!goH|cSxfGQfeXDHCBua;C+tTxpM-# zLu74VZVWUbwkQvhyrwt?EES+>*1^JOs=(lei=?)T_TEm-Q`Am|>yb@>McrUOE0wTM z0*Zv(KR(jHm5D#j#tmsDKvlQ*ngQMCs}Gj~rPQ z18E7$Z){*OSFaPO=!3ha08DvXp03dL+=HAXL zcQ0iXTR~M)%g4>yt!2=TW|wm)GLdC*->yh3tza=RsA*8zdCeOWU;io;Tcv)88q%z|GV;difa^V*N5ND1P1|s%RdROyb#w z>dRnqS^WJHQ$PKaR9$47jE$JPSov8G)Ap~0;`HuYM<%nZ9&hEv3kn0VCk?Jnj42Dw znQN_0Xw2+{ltmThYqE$7t6r5HAn$6{qVS~(bJ=v@Mf!1Hvd+s~qO9w?`S;K55}HmW z6g4ZG6#8gpHZ~e2jlf}S8hqDOjGtib1m&M^=XHU_(^xdlYoU&tcOUWpsG&}DA5nQd zTP^TKgbLdWA}{2eKZ~ZN_se&j3MxYjJLu}b;Z*z0`#;<5x#@4ex2uqx#Hpxu0KV(n(CjByYdw@A9A zEVz}RZO7-xsN+r9X7`8ghrwfQqCA3!ttho}1U*5CewW?Fxk|8(tx2kT^02tScqK&r+WpwHKr#IoJYM!ObO%}9G@OJHKl zk|bwHzLId*Zv^d+m7j~DNWO%^(p?$vHrvQ~_TR3(UzITBo?{3h2?uIh5Sa^7^LbJ7 z`kwRIgGNEsHXI7cFh6X@6i;p{Pnsfj7*;!0^nf}rj7*6kQ|p#Z8C2&Z?gE5FzdL<7 z-Yf?@$@-LX3H-PfA}LBM+5kb`=f7G_LaTIRCitZLb}V-TOXeYD;ZJUj?IwivTo-o?;@E!jgRpgiBiQ+639>rauoghQ)Yu9=;mQ zcLGCfrPE7%H-`Mp)SGy?bO~)Jv}kU*3(LHIs1a!SkYK+-N3}kBBrZ}R74D=r!X_M0 zS{w*4j%pLtuXJBvo;wg_i}ON7*Cvk!_ZBwmF0>$%Sxk&Ta{_%za5P?qEo{eb_YnkI ze>%ArZ%-&^j1${1&=gZkmO06jsU@o*=BIR2Ge(N;Ws>iWKJR`n7K>-wcp_>hX;KH9 zPNub%A-*C~X8>Qgt`E4OuOZwDbJR5?C|co&_c_}fB-|En4Wne)meI2BE>zks4{wUi zV)Ws`z}9qUSZ+kq1s17;9{HcdfEh_| zTLA4yLLv{ej18pFhxN^Bd8_>QP3$9v;@OWj`Hys)C>)}lI?OU~DR($(Zt6l5Gf5%W z&I>-++#<*_1FVBcK&U46IF}N4u5Xf$SOWd;wo`F~dDwxI^{3BN1}E;+!$L1l!ISN& zq)0rYt^J_ln!r*kud?=EuLtE$Yfy^2LReMcP03^05c_b1xun<^3 zgYu0EWv_7Zw44ZboT<(6~>KLB_0B=X#j==MdLm zID}mg0RI%viln5^j-tRM@PFD6N$e>E53rnCFyPu3H{-o*OY>XRC% zt)@9S{Z$74U7`!uqm50ps&G<$%Ojc@F($klu*J06e^qGro+0uNv?A8OW*C+;5r zyEDc|&gaPkO_QzWaTy=w9p5{ya2+ZjZ+yGY?m8kUJ=5&Wq zx4>wptyOWUN0f2r|w@H$YUK?=CC<%blBkeQmr`fV9d)q@^v4WT;T~9nsgY>+v@r3upFWK5a z@2Bwdi(Uh;M{AC(LWlRpfFu+g;Zm$ zkHc=uFCv3gI(X*v)cz-~py43|f`lZ9y$OM}F#fNtP7GV9*E)tJ&ALe>e|RCLfWEtd zpIJKqL7H|Ywr~e&0XqY~`Do}nj`e(DZZv#*+|T9$TI7iSFx0;x{3G2+XFbr-6*|v9 z*}IjvTCE*^}Y^n?tMOoCtNb~~>3LJ^7hQCy?Ti{GF*<|o7LI#y%EU9y4!TaaD2 zjj&isTY%0|9_k#(L%^oHFyY{oVErEeX)(9ETz1HJqe*A1!itAYM)ZpgImKls(k3yU z{_z&<47GDd#@u+^nVI9Q_3ihkdXBxOsJBjwIUPuyx@$22oRdIvQlBGoi>QzA4f7g} zl(VVYLhB-Ek{F8o+!ZsGbO!;9@+07}KR`s_fKLrZ(h+3m>1)-FaYS_eK1?(6EzASU zcl_1XM&?BXm!Yv57)s6)a>63zv)qTGvio8%kc#Pk1D4l~@@*ZbE>#Voz*=Q@&nehK zeNSffy44$C@Y4Byc(c5KKz4Nks`g8}v~;3x3hvyI9hUOjEK>#k=|pLH{0lSaW3mka zlp^8mO_g^FwSyj%*Uvum?+$8~HfXi3op~h-eY^O+&^bZ>DEhmk=5C?;W_d@Y|)OjjTO9~Ko{j(V$p^hPeF&AI1mJ(s-N z8stzd5#IQD^p_~yF7Ho)vvY2c=$fLxnc3|i(nn$(SCNO^$$H4Sqkh7DcuKyxh^Gvw zp_U=iSh}<$Hc3$OIuKGSgpMKfjYaUmxz^cg?PPv?WU+BWNF}=o;^$7-wQN^_HEAfk z9~%ZII^$5ME+UMM<-E-Vshnl#_ReNi(4y*(Lhmz+xO8BJB`=+1O z*l_U_XdDMa15zHorZfk#ej9HeHR6^MLJ>UUuYR9eY%%RN<2Ji6ElU_sq>SZsX4y@CrP z*`Vh@?FjsQZuGsAq3V(doPY2Yz{lam1O@v%mrHW#MrQ>%WNS2@ydwGgZ&r7(DOn{@ zJ|a@VKm2Rlk1M^TbPe93jYQN^9sc=6d$^SFS_4>q?_xoGVzA}Cfcy>U6_?o> z#}bgGFmGOJ@*dqxwKQxZ_*C{X&u@{oq1gKInPR8cl-6q?Ny}5E9SKtPM|Z=qcx65{ z5?Q5iGgL?v^Tkwg<@T8ApSRp2=oYBAbGxaR#PL00-dL|c7>m7Dy-E(Av)ae-`ltu*9e1BT9CBT@ zWm;OSdj5;(PM{@YGCT$t8_Kj+WXzx_E3E@<%gb7uR(?{TT4Le z2T~oi!>j(9HJk@O^c4cQDz5xkY%-Kth7t6d&6fn5U_L7VQM-4 z1?F{AMXF4UxwY)BIBBbQZosKZN5r;6UPy*z`tdN2>YS%iKvBEMXNK&v)yRBPgb*jZ?v5i5%Zbj*2J=;3on+ zsIB!|nv|Bt_G2nO2igmM+4QN@JORKLyf*QRPQ3;2Xgs&c$xo|}CE(Wd+DO*I#DEhp zs*x~Djg#P%TN`n6Ebo?0Y}K&n!1vtDz_1vyNID+Let#=&PQ_ooHx$a_Uz$ zi?X%N-JkI>y4Gzv2PJA28Xh?&B^g)@$SHMLDPD>x_}lMtf405Py<)Ut962MujkL3C z#KZVC8CoEjo)EvBkg3Z&74{PX=0-reZtkV9l`lh~|L$#i=y?s>*5t4t`o8tc2n;HF zi^9FI4>@%hP|JAX^E+Z7=fk%S>Ix2u;>VWM1?%}Hg;bgav0?TN&gyAtB2=}4b9YnZ zzDXLmI)8o1{qMnImCYP0hZ3w zSu;)}tu|XkQYp>>2B^A;)`|}FYS*T1v?l5W`By;&y3XWwK048hbzf6PyGeshNJ};k zDV8ea>J+@F((zzKwUL#QU*@;)irFtE#WrX83vBev#TY6E+|2Z;a*l%u(1~Bkb9Dq_ zInwDX#znkfHTQNKS(I~r_&9@4%Z(-_crK08-F?6K@J_wHRC63R#^s4xxJ9(@=Mw7r zj56@LWVa7MtRbvUC?K2I&K5%XBgbo1SNsO8>gJ=^ZPoKuf<8Tu^rbeJ;X2H)JcU5o zb)}q~(u6JONX%Ya4b}E zL-yK585msg+Nl^Aoj6Vt9Wwlu){$S`!gyVmg5Ht&G=OjQABU~#S%KW^5W zPjmmxf45-93{rf7Z+2)v-_HvKmm_D|W-9;S?m9l6B&N6)TgO7aR;JZ=xGle-LSKMZ z`@1`QKPmpjYA^Zw1>k5AOAz`_k?lSQl}|k}`CK_~Df0)t_eo<}cT4DRs0`atOdqLj`&uDkK&1ii%vXinYpUE3LgZUb3?^fPr9*nLL&GgM*3pcHA||g3kc4+ zUJlrzw~P_JMf^}-)u>!|RA?)$;^>W`#^Yn)zL+l^D2pc}3^E7g>8HcX z#@!2Op-uA&U*8ur&8h_|re?`%BB}7ZL{U#gDL7BkMOnt+Mk!p%!vJf#M% ze5kWkqBepTXW$$eFbsY?pR&iySr|(mDFWs`u!&iRA$lTp7GJ`y(!3M)cH5SiZF>5` zElqud6TDC@FijDRxT*6-rPD3VA{%a^dex0}(ZN`+l%Kyn5{Pa$y#M z^#Dl^S>Xw#C?0W#9_XmAP~pMQ5fj? zMijKDR2z$6s+$aJSRMCCQ^MBBB>b3&X4eXkZ!` z+oz-kMe$T)rV$8iSwBVewn{b8H1jp)_j&oLj~y_Z{)%NRP2=oZ)$9`amL|6jt9&%% zKSs_WZTHd6jug8-=}_hBFN^t@9*f*$YGPZe!U;r{1|!-D$JMYA+^C=mXL-0CKajpH zfl5w!EamF#==v!gx-S@^Gp}C z*&-iJm+)AxK4)cGJNxVm8uvfvM6@zX2YwxF*8+&x!F~SHnI^tELDG7*h}}{K3WOGH zT#re)vAGp&LHREuXxCJIzk}lRK>tjH^j&G0x$#)k9i6Z43>p20|I!Ef}CLePAlm`N!zfxq62`up!K zL$$`Xpwg8Yxols?tUS)|i#Wsd9y=IcQ^luGGdRwa00qn>H>=c8sdbOxDJ8r_9%dyz z_MO0Cp>y*m2d;Tw{o;Z@ftvY!47~TFxr;~l$9X2Q(EWaHs$NnnLRgM#Ddvd;&w-Vj zla6VZQ&D3SzMs%JImn9Sxde~Ez&wwZ98vce*krpuW+pT?7N|O*#Bd`kKLoYjv;eG$ zP{e(s5OPB*I+p7{q1?hy;AS?M252Y1fe*eBcB}=u;u@z$eO)Jx&ixg%s>frm`-leZ z(!lU#69MmCVDbwobLpXI9-d>so6h)kX)gf_il5#YSAjiA`SY?4p*H-7$dsJhumVYK z&$VQ*_`6MRUk0_ECmYZr=04qso@CmX7`cUa+lLC(gz$dk@lL~abnm&?2tQsCc#?Xb z8r)NJ=yB6*j2bB|ueAZnN{?tvMbTx@``M4#m(sfm9>7@EK1G{h?E5(HIH90M6T0ph z_wCqcUhc4{}-=Pelfc1)1g700+ z57>~Q<{4Hz)h_%7|IN1+s~oG;oV2io zR%x?-1-}h>=*aJ67Andjs;+?I>JJrgD0UmSrV49`n-db_OGN~0_k0vRn+65o^!qjs zQ|l!GJ>fkgm;>Dro|A@IofPW~8Et#Juhdf7xVE#9mktcZ9df|qb;c-CBkv!oSSACe zh33s>^g2{4t<2ppMd_}AXDOZOJJLCzT1xM&mtf#o`v{3_*dhuE?`FJ*xHIqKhAB2- z=gB)->l|Ocp=b{+R%}S0X3mUOWS?y*3LC&TqfN@olOa7%QNw9+{3qs_n&}=Emvrs& z#u+aHJ-Z(4OsFki^_|A7)}Yc+tg8OimS+%6mYowI9!1KSIrxeF4X}U;u$S)SSDC)- z+EkCncvfEZ91o9JdC7|R`kb#wxgtL(3$m;a@As>fANT+z3Ah%e9TS-UX_Ji+M>4lJ zx>2-?4ylL^q~pp}M^c=t5AS)s@X3$tbSIj>sB)7fi%ppF=VPqQK)S!F^g5khAVG)NNyY+0VXL?e%;C-97}bd6zL$j^2vEzzYB(ev zoYFc>X;&~t+?1YR8EDoA(hexjRQ9n zGWyYjuz9$Md58{(H7)pBxjJ(_+5t_#f9{B;QAAeL`LA@YEcvlAlpJ0&$lu)TvsTFi z8)&`_m`oWbNM|lQHR~h;>Cb&h)Y$TO=kos^cfVzkHIp6_%qxAvD%FcE!38lpeXZ3R z1=(|NK6XbsBvQV&Xm6Zx8zGrC_~T@kekPJEN%TSBbEOYR;QVDVAW$>`knuq!0=C0Z z(MBLj%c3rj2_Ah((GW+1n*YjzLmGg|l{}gaRtCYi(kb@0#*+wKh$_m9;N^n(I zP}N)cDx}|#n2t^en+9U@*i3@@S1anG%o0r25;cFok^%doALP~_Blc6xJNnacL_p#r zinY3rhoBM{#t-LlPxw6gVmGqYc55OXWO11ZPO+5=tlnvjHC*uH@hD?7VNG(^JBXIP zgRLs(GpPy!;SOHJC{@%D+h}GQ+V*%To4}lraDhreToZ_uYAanZ73BJTfQ^q`_?MkJ z81QZo&ZnOnRzA~+Ov-)~l;8}jSOGbz$H=FBT`Mu=wPU=^lo=;A!TwmE!QGy%1{o~8Zd2QS{e z(KqhW5CdS#uA#~u)a>A*Gvf7J=%8%urWa2xZ$B)0xaPsMc)wrO32Yz7AvjtUM3=p=|0Nc1gkT9e>a zfK`7H(5&|ClQGljXykA zBExX|KEL|p51W6QjdBg_Cuj@TM+glK=TY81u+pEUg8qDOryfgtnB|72|GBml-$9wD zE`^S=k$KI;hT7Y-@1drBExep)TNq`amHm!N1D@F(L}IIh01&tf9i64AB{PMO|A>D3 z@HkH}Z1}RWD3YkQb91foh2v@rx6&eN^bL44)n%M915~VMOE5P%if1-$&6?&z^+U#8hf;Lc_y6EUm)ZGG*w=A7_?8L`BVBv6Y}+|Io@T+t z{uDZ^I`3TYKDciq#VG|V@>0qS4&bh}BKvcXpe_7Wy51 zqQYXga8CX@90uScIgv;VmKQzE!WVuYCX!I+ZG2*K)|KWHusIXjIh$d06kar6mG1!z zdR8kgKbF!NDhq=D8UJOP^=JA1Q@>By4U8`$$LNcSM$qZrpf|*7`uVUd6laIllgrEt z0ty?sVtJoR!BYEnaEG4O%I95o(^&3;5Itz$#564WbL zGb;{y8I(E}%}DF^KD7d=nQX)L3z}yA+_jPl0Mz-K%HO4y>N2o$+hB(Oi zi~;17gga2XHTM1U`TPX!c&i^V)`>HJzb#E)pyBs=>@KsPCFaM5uInZ@#9 z#vcius4?}VtISfdLvP}E@q;O&6Rhb1F*_K>MXXK8t}3svk492^Zvbsp$C)e&q;!dW z7-!!m?QH-1c*x+spbIP`;!$Yc3!URzaDHZU9_W+-CxgdV)k$~mX*|rZ`|X7-r=37V zS7YljBfnWB0uX^xifYAQ9ydv6-cv<6cbd#)p(G*{QQ;ZLd^!sWFeb=hVSc0nl;0`> zI#=lTmr7`C!5`*Ra1eXqCkZynjq@%yyDFAD4zSvVbg`%DU;3(O$IqL~CmH(74 z5A6NuUT%H(+l??7H2B_pL27#V{qMgVer~fcmr~s_KBC_<#0X)>4LI3~& literal 0 HcmV?d00001 diff --git a/assets/images/table7.webp b/assets/images/table7.webp new file mode 100644 index 0000000000000000000000000000000000000000..2be33403d6ad0036f91df9f281feb3e4421cea8a GIT binary patch literal 92374 zcmd40V~{Ru(AuPi>n{ArR8I{X?*rjEZ;S66K>c3v`}Iv0 zwHp(o1s+UMJM!ad(v z-zR|ZzqfXB0k^NcA3u^l$ABL|^WE^LF#zzB`n@O>=p#83nB{ZT{OvioD#oVq)4hLL zgiGhEbN9RepVC|F=5Y=&sk_?6;}mFId$~iYG*Zm`la4_<_Im~?)y4PkW)z`o3A(+hDAGg1@pR$H^ozD`<`+4_&=Ykd}dk9aZ6 z0#af;mx`9V3*hw|M8{I0W}l~s=G<&1(~wr0P?6~Byp}vz2`fq)wj29!pr`56>@>XC z*L|+#!)S>6&!vKU)m~8(g-^+l&_M(K9nl<&DpH&-dq60G`+Ey&&^B;^a`XVaFp8G( zta`XgC#=jWpwSRO0s0yHt+Jux%$u>@cwKheX8Q;kH}uA!p^ABl zs}xYtFCYax2!*s7>+o8zIdy8*Fh;PNUz zn6b2>P+2l1LVu{=ZXQly6~NkdXkugh+&P9|q%s zyY$^SOt6EJwiNAq=>rdF#MCj07pSoF{^QsCqw)facOWyrZqsJxwUiG#8UBC&esL~! zYpO)o`gJw@8-xF4W}qF?5(cS6aml9ie=Q&Q+$kieI;2ZwF7}@dt0$pfK^gg}wyoAX zchB=rrmTWz;UN?ev9JFZW$5^I2*K*!=De{Ui_aFa&s{p`dSrWs;VDUZ$(wCVM@V1b zcYC@ldzWbK9v7RxJD)>QcJWUU^(qWpwBI>uO4Ek8ud|HrE7}GU6YAQG^tZ%yuzm#A zLeU0;)@^1}FGwgUx^en6J4iE=!jf@Je`K1q0)!qNPVmdV05&TI{i3g6lU4AB5Jt90 zqy*EDMj-1eWJZ*pTjkebg84Y&17ff{0=Lyz=G7JBDM1_uFRC6kYUJ4xtcRFCt-_@1 z42@k(fNLaG16DolHlftSq!oF{eIc7U9(^{8J5HBMmpK7!Yl1q*6SzgpmXa}(#HSPT zvvTu4-xSgkKG>`sbZh2n3l@Tb+<->qadqBjgq&{>M5UE# zIFk4E`TRa19H)tDeEhMEuqs)@KV<2Yzrd8>sXh!V-%f06xMBawB-|oAwHzK1W;rTa zd}($JbyH>hubZ;EJWle^ocXk@n-H@E5E>b$j`QZzvjNc7VsU{rXYB)&S%}>jSM*y( zoE0kmUQ<$ba=& zd+=q@Ha4=~ozdee$;@Cov5!?uwr!+5UoC~H48DWFSLswUBzuT_f;~Yy*P`FoT^&jB zA6MMidJ+`%*^#pv8up)={@>Ob#GXKfM#bdBg4cS$T=zVb_^xjGsOrT0M$S$rATI^R zhmeM!tOz)K)fpqm-fHWkuzvtMQ)OjMi9?+5YG=^r$bZ75$M)1y;aeGBEKjW~Hzfbr z(dN)jGr?zxBo*Cf+tGG%^&AZG6gm}X3ps8`%ZqBv{kOf>8X*M|;oliEhB%^Vxc}6{ zzqi84zrlUrbWD0OGt?)IvP8+L^zWguz|qYD*)B~d$r|2YOJg!D+1p`CkaqFq{aXr{ z!&l(`UvmjjVBk5WUN5H1sNJ{7U=E0L?k zV|zo(JhGD|E!pVdPM&*}Q{Etj~Kuy#f}=O^RD(EZ=NDAlrG!= z791oee~N|uQsDLf)JV?1box`yGc)T@r{*BdB&53rx6IBuqsH(sABhylUq!6vX7nCk z8jNFSvW=UO#}0M~MIN+0PEM$%CFW9>)!moL0ia8T{G+qE@9QG*Y&_-#4M6I?p&iSr znC=2!>PDY49Ai$%`|CKZYUUjV3|f(oM~5C5Vi4TgX~2N}98=B3LN*AUIU z06QqOL7z-}7D%_soj=Wb2=i5=@TO(m0tTC?oLz9Kdt(QJ0RZ|j9`NNAZCtD=tTW6# z)M*cb?7MeAjQA zpLj&-Ff10NLRXOt~ z*Nr!vLr}W^q2upP3hup*4hVyL6s*Wo@<<#+YSC)#?;Yg#3vg8QtHpoNnd)*i8mg~2 zu))1>!US9YsgvP(B{EeHkR5Cd0p!KjJs+(RL0p~%cf}Dy=ZkT@^b+P#h8qGHi03`@ znytF)669j_KQN~L1Oe#!snwLl#rlxAz$@`2t!O;;so#7LAAQD0KmmM?NzRf@O|ef7 zV$n6|k18kptA19w0hE!uM>vrdX;i~~HgMBK9#?V66S*tNb}cH2v=>4bI_|@Qj&a{O z&bUGBh)$MJI>Y|x{Av%0{4rV(^mhUaYJSi#-$CQ-yC9=!$e8y(xMa8n7HTSHmm*e7 zFsY9@6}NkKLY8>>&H%U1QUMG(9L@l2BXf0h@)eZ$8HtLaFqWdA-X?MSg%Gk9e8LQ0aySt!y!~7F|pE#Cr;afQ%I>}bd)D&fBPMSF_$a$~4zj$$!Mh8*h zO#P}P%HC+^_I$h>z=f^%lF5rgArX@YGyyEaN9+71Yjs!UuO7MyLkfwFI{CE`*p7;N z#_zwn?5>qeQ=9j03{r6?@VNfB#ZDRUs~d4;3gvgkp92K#(_)sTsBWQSl%`N75@Y6aXz>%gXnJ~1&3uwt@c0yLQra3SM3d>zu0!|I$n9l5@QM)`53jgbSMps;fNqEWnV`6 zBNNIdk|-*V=&`?XA%fo)t${2HYpR*;wlyuCIig2%ZcP-_5&-&2Ht~_uf=i#0jU~ z|Inmb!PyKYyN{=S7{y@C{}_|yrVwG$=oOoTM2n+q;h1*JRlb4>x zAOk}3nYTzanzmq;XixTYl-!36|jt*De0KMc^*3R20^w+i=O_YtjqbRN9dHdVAmd> zp#NRcu@^`RvuX09d%92& z%JfrosFo}P*M4Iy&V;}|XBWdm{-VKv1QHF<8&}HeXs(}-dBWLth8N^x=GW3L(^+ZB z_1j}O#J?^!C1Vh&C!eZsv>0Afx+72Py+9U9p}HTmUzv526bGYrPOFy6y-6*ZitK^f zlO5d%Hetm!y^*<+e=Z||-ZAg|7&4wyV|d;EdU;o|+7K3=(?E6zToMTlqiXR(SH3)U z+GCo%{L~$tuC11`(6Gg~#8)$?O~;9?mB;#RK7 zUXkP@RVQkqSsuPQO-ULJ2b`{eEg>M`_P@)6yXCpcyMeG!@O!~fWA__H>g!tllg z?G?8>9ft{wvGXit{u|_V^*WbZNTbr|eVD$AL-hx)&5#Le-hj0WzF-*^gMZS8TO+|J zGOupoAKGIwm&yLtM=pxgOuO#W(Qdt{XKK1(0HmaABwXIN*DU78N?><6cKth7>@?9w zBhpf^1*@BQs*qJ)RN1LBxZM)AE2k$>)jd+KXkgYOeq1D>i*tic5L`|9>bTE^qJ(uV zc8)QwmD3VfB8-EuE3fF({y$rW7eJMh=qi+#CN$NcbXN8`3)7-zZdv`5LZapj^|jGK zJPUnBmaVd?Ke+kNUTzmlo=u-0M^R8;;puidZP|y+6gUf#?San5j}aCx*5%Ph2+F1Z zFbzy<2YRjpwFcw)#wJs-ItPAXNFx<`+o0pB?bg^Nt0WYIGQRs6W(x{ht4offAEt;f zdz`6*%mN;xo+KWY$T=+3!T`*ISdq;NOh0GhBV7$_kcIs(J~e5T?Bjp3ry}A3C3%K^ z*Q<)d?a0Q!s7Xo3B)!D7(rU;RF}jW1c?N`>G9d{*%u}qDO;-1)PcD5;{jX8x>MYUx zt5P@9Aa$oQPQd_u;4KF#-lXDc8Okn^Yx=CIo;fq`0ez%@q~kulvUk99z3FErUObfn zq6D<8<8J(En{~~G3;04|8kPuW%tW~-xC260@Rg-Oq45_)&{%x;UDGXyOI;=^rdNf~ zpYL_cpTpe)QFg`E_ONlZ2()xItxLeA*cDz2977G?l+S?VO1-WYJ~ulFg>Ff zZ#N4BHKVS+bd`I5I@_~wVkc}msS8$T`Nz})k*^s?WWb5gHvZ1t`5402{Zoa0ThT`j zvmH4+jN7s^(+he|-RBNgQ|zm0qlBgg;mxG_Vo~kSA)WuwJ!5Cmx)btNnEj?lg6%mP z#hRgETWe-LqEB9}Sbxj7ncST&1b!8wFZ%}G*ej8|TELx&-I+<~v?temq}a)9QLVB5 z1MqD67xfAj19HSPJyDHr;G%)?CCo=Ew=Vl7utyHDWXDE9??G*+F}?D8=9il_L?PQU zKNSPU_Q;F$w6#T9Mud)mk*t~HhSXB}kW@$Gb>>0VsPX9iJYCo;gr*+pZlp9kW1|rH zC0+Dz!+Jf#Z?aMuJrU9zp^sHUd5oH3b-jX%VdU#4^C-PL=~BK*1#uA+yOvd8yVMH~ zUDS^2z#T8jLNsY2wxbe=@Cj$}JKiuex&+bYux25=%0FT=YjPSkzn+=&1x(;ee3XBC zkqs+NdO-}MOhka@&@)aL5rKAjH$GfVwHDE$q1zt@!EajP@Wmr%Jq_@NY%Y)QKz{p6tI4=LrA^4Mh;ug9@Sz_9DdT7}NI70&fPDAs7x z3X4O|^CH5)pZ=DGi;ok;q)o<^_+epR;psm7IM|aG0n1yR+F8NMLPBJ##sAGQ{YsEs z_8WE6l-vj^K17DX&AOBrRgRhftiw(qA6g6aalbqy3_F4*ziQb#H(1AAxglFlF3yVH z*w71>-ogS48)=8%MF<=M+@$t5&PDL4p5{blHONlO_B>*;Yx2RLqsXeenGcn#j@I4t za(@{~xFL2_90Zw8Vx@PawwK#KQV^s*!Av&b`FnH9t$ugYu4tqoqx^(q1}+$mu-Aemf%*@Aw>i zW(>19UPWO`1ux2uV(>W$4_}RIr^b4}tCapBL8Yv3hg}`eascVUGSkNIUpo2ZaA0z7`DC#KnNJfVVF- zv!?lB3^b&Vi8}i92_B|MmwVgeam@MyYIS7hXT2$c1av4ro=6h31n+KYFQC>jE=j>K zJ=;!7r6_KKz=}Dl*yQ$Zcm^DP&Rvej{(g}IPdLM(SSZj1w~uQ*8YgY%wnb~F__(6J zyriL-=gp1_-~rK2l}cxbIy)YYxV~2=0Ha;mh%im>ArcDGOd{De@-cSINuc8p|ELy6 za8`tT3*}&L-|T2Hkf^AF+uYjI5AND9l+#sZe&zxbdV`P-gbheZMUvNxf=UhN6SgO1 zVV7!#Pyy&8Xm5U0%_$=MCbx1QT!4w0n&(!}yU9z}MNAG*T!~0vI%=lHDY%DGt8lzZ zjC13`Vi0Fv_Zi*$Np#~ZApCW*Lt$$rTBr)HdG9sy&f+}H^^U*=wa*l`8CVvK6Qf6R ziY>E_I_XMsAbZU+=l2UW%seV~)>($K(%l^%CkMKpnYKp%0q~w;pi_`G^sOi z)M9l=ZTww(f>Fsf(MdXh4fP=5p%jz!+~($F+be0ck2}mU?n*GO&+e4`X2aiH*Oo(s zeLewg_sC_+Ut4KkQudynFsrtz`55-ee|FS+H2gi!lln|3D#=$iqL}9xPJQW9{F#)B zUFYV*WsNO<)xHeg?iG3#&nA=2Gg3{rz)42dg2tG$KFX1c9jN@3Xg|7XH>UTo5}WO6JjwDW`1!-mU`3gcevP`I)oHJAmDvfVEd4OtK!Xn&}c z+xwm&EjKL40zRME5d4ek)Rxwuy0~(t4L?R!qW1~G;-l0%w*X^(_GwB=UlTnE6vyhT z&#h1I*hRGn2-YYF^)popg}@wb?t}^ahgo$Lh1HgoFS_q4+yA3J8Gome#k%y+0O=vh z&s&-V_ImJZih(MPSZoFtXm=Hyq~ur^IxXtCe-vupDz^53T;;-TJgwS@wvDOvfXi8N zwt+14!PzQI0SJghK@N{*dD6U|{L3q=3rh7hpfOGe{6$m+`T*O~Msl5~GY{%4K*JiN zdcnVQA^by!x^c$^KX9kM4h=;AILu$xXmU7W>4^P$bOyy@T{=TN1pMxt5Ms)y`(ym@ zBQP`OCtmH(vU^H5z4#FT>=jbi2kZ4GHx**n97|^OG52kOFBA!0I!rtV!@{Rq*qRp4 za3|`L4d4U>qtKxW1kp2ee(g5px77}(MfI5dosG89`);^O#ni*Fsv12v7~#Nf#r>z> zXV)TgK*})9PZA);>mboeXx=EPo(-`-SVS&gM|&D6;4~GX#B|NY-XCx_3Bre5v+U@= zEr@L8n?ha;sZ^6YU&q-;@4Go7%Szuyr9FsjJRZtdb%~&!qO8^KZ2Hle@2zt?doz{Kxt7Xs7h*> z-!#!a{lA%ePj14=Nfj({vZ(HEP`BQ5@>-0CIt-5!YtQYD0g0u;T5A;Vxp!bg)Ac87 z8|L6up7!^>Is+ZvP8Qr~tw~Tzfs#k&JPz~v!6%x1U7i{g9?Js;lXw233oZ(Hzf`eJ zGBqviWY*SDnsk~V#`LwYg8n%C=B>+2?{3CStiUmZ$c(({IOs`Ujs`_ zu(1zE2_ULpX!UaWWNF+?`GvP}(2#dh_#vxCd?z+8vV$^#<^f;q5x+=5Ws{kxFXjeV zs#k6;kxZ%{if#Q|_e%^Saci`~m>sK{CQA`C`tE7{-qG;>-#dXN@+$mnGTg}FG$Br@ zxJOLUb1BQ1b8a5S^o9n^H$toa2D&UE{1I6L4qwkld;INweHwF>pwMo!8fdX2XL535 zPKtq$0exA?W1f3PL&&n)Tv1+KVx*a)o?yfoS8$xC-dqQg%%!v{rl0IvIa5{>~ zSHV`)!tvq+#zX(ij&qxyF`D1H+MA7hRheQ^@1K^>Y@~cH%)Bm8vW6L0j4M@!_#2ci zTjk00k*NMyHHaef>Xn;!NabJZw;tT>$XaRDv_0!afhXDKA<Sai2Y|;@$WF`zuWbnQ9HrNcX}-}Z4M~TH*5L9jhiHa zA}+yn277F@?grkNo2oliaO~W1G1oQDv}sMoFCMM)*MG-`|ErdN=IInbKtDN&+b@hG z-px}4>|(}l!+b#h^T2wK9m4e*1T#JFy%z!k0^Y@)p;W7?8)+avb1HAr4tL=gNFik} zWnT!DLH25E|3Uv**kw?G+UNV>UxCoT=woo*DoBBm)wezzreW}cR;uoYcWD@G$Upw_ z%|KJP#D|_>ArusH#b&FA1|3ykY$UhnJpD3|slaDPr>C%r zFv~?DRN{;tQAUUp#ss>(ko`-T&+?1Y;y>P!{V5m{&%dm0FyN6P%tK)K>nV1|YBS)Ej=(l4HnJ&E>%?Y5YLizHW|j zb!c0T8)6*=iP*eG+?5|ERIrih)!FvE&`99d$;&VQ*0R~q5wqJTh zv>|I9^cT+$5qFeAq11Dc6Mo7RO0LQz#|rK&KHfhltNPoRTr}HxZbk`~sjy`^fmzQ# z1EdPRaWf={gZI=j(jV`xz>y=F$OregCR)eJU0`~tEn_%Zsp`Ia<$D13=}S=o{VMp$ zK06A>+@e61A<)ED=FBd0p^h+6&*7f3<#1CLIYJ4GtUds`QDuk}4!Mqgt%?-JHW~cH zJ@GDXPI;3;Z2K~7(?dd+87&%d2}9mm=;YJNqg8~1aGeNetInC^ zOcP6=NGsQd>{u@i+=%6pZSwmb^w97WZohuuHolCBUgVch4sCo$hX=@As_CiP@u+Z* znWj#cCZfYYRg=SCnQ)0G%rn#I9ChET6!{s_zcB-4SLg}D;jmBt}*-HyK%b3cZ z@ar3GFS-S6+s2H%#VKvb6Ocz~P>T4Wad@aR8S(cYm`H`0DXXLt(>Wq89Ll+1(t}LC zs&%@x*e%yEAz%|M^YP{L)MwVEmgSwt8FxJ;0rHzk;Nz)J%A+)S_-q`=2f$n z&fl*8N|_-OmM5z8iW%JLX4KZfKVGS>^M!7{$_iT6Ui+qOIK65U3FQzC%YJ_;sn}NG z<-F$oNLY{MSUh*(-zX#I3kUf#AQA8KowfI%4kUi5CA;PrVusx0eUWE6F5c$utP$MP zPf*&uKntF|IyRVZZF0ucVty%Ls+fPactmr64L_22>4$Fd!W#pL_m>uM@Yi5cdN{g0>bt`@k|FvFiyJ%@EjR4h1Rp_1l+>b*5#dW!%jaOI?|+5A;Lm zOKyUPIJq7yA|>a#BHea%`?E^XH`=M;4`G$1D;`5~W=O|d5>O2?vub}JBxqbnAk#kR-i$7RiQtaO*_Iq^QSyWxm}ibagY zeqzFOWe9ohcq|X@SJ|l0{JY*9E$}xodDyKExH4*s+o9$kaaOYKt`{1*<4p447K8Ze zBCbl{c37JGF(gdc@UR+Q$ivmoEfR5lZJ+5~R8UN^sWcf@)1KAp+#&@_j$=BN8Mf_5M~C&XFbG>|r^a^G-E0 z8-sT7G=tk*0mw_$GaM_8^BzSQvVtGp!tWUyRKHW6FnFf%)&#zgm3STieaIaL6K0`7 z#)s8--lneGZOLneYQ?YslbrZ`SO~0-*a<9~^^~7|Q!6FMO#0H|PYit%LI# zm#Ev%oqJsM^yDJWrm}@RXK(urogI2Sjo59^9TF-Gt871@) zMF-*okhA6Q+WPTsg=>QoDrUyBrlbzHOiQ3gm7{saObK+l2ZSq!xQ2lDD}I3ZAa6^>!;A*IOu|<@9en`Z;U3(gG-|h@N%(92 z$P`}F#!6z@BrNGrezz2YYcy3hkhx(J@kk7p#pL(HJIDpRi3bQqsQ(|uC+N<3zf@5< z_DGYL0y&fGgeIUUgkiW zeC*O;hUQ*;0TYqT(}WH`+n!Gu=@=@!=zkRG^R&nu|c_>zeH> zp}p4JFKQUm4GG2sxW-odZVt9u|4xAOBKsT%Xfffc*8$+}O`|;JPtk4m)7<4oBEN&B zRSpv&`(!XbcA3}asXUs5Hd9jJ&0erB=6U|O2YeOV{%ARsZuk|G9%wlyO`(hoU)^p{ ze|Z+;SW>3z&LXMT=kQanV3tPi&Z${)?+I%;(R+}9YRk7;RG-=MaR7&8RMWizqd3Cw zm_xb-Z}8lY!(I}qX-%cgZpy_wqdPD}!5H^Wus1fMBnQx?oS>|*%Cd;u?m~OLpF}U! zYSL)2=(3r2-^BW7&O$g2lP~5w)Paoip2}_5&`UQ+qq#T;f}6iG#IcyN+Ii3PBW*Ij z$o&XEI^d_PXI|0~T}G|bdJUct_ic}V9*7;e%($omm*NG!Tem$ry6j1WAcKM(vut`h z_`ZK(>xgd#7G}9+t}kl?1ce=Or&~fh!ImZWA$4!k%ELM60sCTkNFnJh5bL$~I1_FT z0=hGIYHeURVW()Dl(8Cb5q(C}^wgE&2MD*d5|2n|ltV3bx{~F?b0v9sQ#f-I81Z7Z z`e+A+byUJLFz-f>@640~)MEDnYLdWWs(|>tU>+NaFHLVo6~l8bEWT@wW#E z+Lc8N6Zfjr*d+LjNg-G3&EUge!EXD=#^~L>aqh&xJY!dy8qJ`(iMLrbPw3aQ8q}hg zB<48DnnK?*Ocb17+Q>8Rh~LL)b4j=o&Tuw35wCLn&>I&&^vQ%Yn}ddGyXqZNg4scj z<4UE8#4S^&4;!R%J38(H&d?CP2zIt}pxr6=x)7QYC{x)C?;6X55Of*eFMc6bQ8SW* z`|lM6obUYpuGDRJN9bd@UmXgV5{M+$*Ud@$`Xb6A`E!H=Wz!Q|pp~M^^(fx+1r>Bv ziO@EjG@m=L1v9jMo~fKX1p>=Y{8ZDkxNNGvIK%=Gp>sc7#0v5kMI#4V$cWD1>8D~7 zUVJu^0@Rd|OOj0aEvmii5@*czf;#E$Ho@8_pi6F9IW2)_Surq7odbq&3Y}Q_O`bOG zr*O0;05vrNN^KnP!21|Wb3OQh?)tMfOLK*KAxBgxjY+co!$x_!IBCMYy9qiAq{<3? zUTgw`)Jd1Y3m}VS-tFW$GN~WpUcC?oyF>xLCU0h+fsxfQ(cYWx3PHtOJl{`;a#t5@ zJq9r&slNstK+p0$zJ*?Eg^i?w=_qB~WNa3f!bhx={%QdzaKfGH!Zivefd;7IF5??d zx0YzW_Q2`v%(=GQNo+Xq(6fwG_|JZIL)_Hs%gP4x4J_5dNBF;`Xi(Sku$z|r^(c&^ z$2q_(6c;a0M4W+_%>LtkU-4&%ZES0#kjLs8Y*_oj|2^JF5FT~NxB9MtHNN+F&L@)ls|6E1@^ zW1o${-A-7#fD@zmpxs;%(d@TdhgkSkRb;_JjQ@W7 z&47z0v*6Zs^NRf9e25OeM@m9c@IX|9Y-g|fbr5(Fo$%z6Xr{{27|8PvdOoTYG#*+j zNnuY-hu=Mx=L&nWu}gOjindqA$ z*KDqsc`E)yV~_R;{qfw$O-vwb%AQe!Q_LT!u*W!yUNv~NN$i}f5#FC&oXVRetW}BO z$e0ZUPNAgux!$s_2tqYRuw1ps_u%2}NCoto_{xa+ubX?jkUtmz{ewlOUgy6SW5084 z-Q9=tDXpr`4+q6;6HC&}d((pzAPekU=uy2eRyb$@f(BhfB@a9;X1;mi9Iv(!C55pI z8DoPN78x5Y3wo4_(RcF4lMxJ1uRuZ+|oIQP4N$ zj5!1l`!ew&h)Gs|SZ_*lf>e!4egiYHq=9WPlG6yp`JRkKs%d8^#nRZFSe#8-Z>Z`L zztA=D@cO?iBZD#xNRIp^H`c%E>~Z*fPsm~{&Qe4La=)$8u0X;{h+;V47B9lL3}^j$ z?w4u+`4;s+z<5CKOptkYp~&F`yEdk0{|%Ff%{))N-S4cbe7vHB$99sda_rU*KQ*d= z@ZG|wm{@~JLsU5nS1Z%=#Iy9XiK>7;p)%+$HNPrzQ*ecsJ5+yvDo9eZql^|k5dXKO zefl@&sp2&jc^)S8l$&Jd9Tj47;YA8)Ec~`{@e4LzuR}U17!slg7t-rtprc9_TQU76 zjRz>nn9Z)_>V_JK68wuMC|Iuq|J}?QsA}wvbVbJ{safgH_^Xm)a*hW+9`Ti!7;L-+l3DrvwY<40LJDM%c54IA`L9LuD?Q zen_m2U2)q-CVTi>^BqTp%&WlQ^G_$7#yNgbzZx-6TWs<9A@hQt{7Uzn7`!XW&fN^=9=B(qNG42C6 zdp@s!2rv+6o)n^Kg4Y?W7%2P&6y5l(O=`lYJSQ+1-7_`{BxeK?d1waOUw5(@dC|e^ zq%XW~5Pe`(mm*v?Zi%MhkJpWDPh4sQX_>9fuah7r!oaxj6E-^|)%=5YRfvig@A(wc zSwVZvttbwy>g@C%VTsezfRJk2<8;UVHW)H3miQM@bnc0pSo(7sYMmeF*GNFSwbzw6#FCPw5f4 zJ4_Pre@>KMx-7Cfr5&2ypf^l-u~Td8wl%C+x-LA7F3gS$jwhA|#A{xL zs&89nMymEi=rfcq??Ir|(a%iyk8v!j7%@CpR+iW!=>JM^tIdXy)@Tw_xoIk!f6E%d zTDh@AK) zhMeies)h-k?}qFFfKN+`PoD+VoO+Ud6??s2SgfT)mJwij+|W-%h?LQ<_6pW z6T=vgE%!W@1LLa#9p!-Gb%PzEy=zpRhDD(g`WKbo7drQ3x_dXd14*B<)<{)=>jZpM z4CcGtS(~f1S>yg}uY2Vq#^8jo56Ldam;DcX7}?)ZQ2CQ5SoP z6*e0S1HE^0Z6NJirl%mtdq5|s=93b{CThs)ZQu7&&2{(K=(AxDP z>R#TG#>J-9T`H8!7=pOZ?zSLoRVjD5_3^#W3H&McOYDz);HY?&OZ9?yHIjP{-=6fe zQyq|2V(gHwaIIX8w2@X|cl_zE%?G0;|1cx_{-RKo`;tfDC-#$NLb!xAF$DFhCU#{hNfDl2 z7@HvSS?(Q!FR0f-1FWIC z<3?Rl2M;!@MwkvMIcMjwXFP-`4T8-)pC@b+bO$Z4O@&)OZZy3dRnGVHge%%&D^t?V zB8K*QivM0%(sA(g4tj~4RT@&XfPelv%?l} zYv|V)((BM31OcZ*^67tKS&%e15Jvx1gW8&uJ1H`BFqA`dzgu9UP@&9>>ZBcrwT~}z z>W;atDQWfPNE1zs(IoV1a1V!!CM03pNoeJ!k6KM6Qe5Yp%q?wd?-YKXh#-%~AwRBi zae%amP14KkM(luSjHy;Ds0flB$xUFLLiIo)Jp%Wq5OCbCiBp>W2-Q?Ur4^R`=)(!; z)`}YpSxb+18Z&&;2DgFUfpZv|Bwh&ph}a{C`y=UPYh0cXs@+oo!<^lAZ&c%2472FB z&D4VkqWnm^diJ=!TKGds0z*vzvQM|jTvxcloIbRXJw-EBTC=_Pioy(Yps{b6Gs<6^wwCcJzc_{$=HuQB-&j*7OLnUnxv{v8&^{w zy5jT;vC^|W+3o89^016g%d9J`%a2w#ATR88;k1C!mjW=%MDSt0Ear)E%TvAl^hLXZ zT#wWT1aw>Qw63JK;D+ZC{_bk5`xk_bJTd?nj_j*Xjon5#>kJ;1TbFgFJj}m9(;9eQ zd51cOFnMPlMN3ma1@vK4G{Zwjq!2JrD@0J9IpIfmT*l>zq$|_gO1Wwt0WewO+zJ5` z(hKVt*xp~M+`P0Um8EE{jbR*>wuI=PXfd2!;ul7$hM*aa(hlxPj+ESIrJL58->o9mtfIzQ@5EYA;BQ8ep;aVf2@B@Bfm5;GYuq*X8-*IvY0~(Z2 zpoPliyUs^sK_g<-c8iY(ecx$&H)6d+@FPUDA99XN_XBWY!S-YubonRSw;dRxx*Ap; zYc`21v^SP=-Kn|rU)&rVdSVa+_UA(PLCGaAFr>e5^>9&Ka9*;nnx9RH1m<%U)Z6s- zNx8W1xSGE*TANbwjW)^`aI4TF=?F;hO2ozz57s{WKK$I&MPXN%PC>V?xaRM55psRBn5y5%t(g9as;Yvj zKo>NJOd7S1d6YQNJxOMV6Gq{j}S*uK+JZ-8#g9X%=|&$ z+SY%$LMc652XU4{R%tew3x6U(h{n${A(b1ZLY#k%oc6FFf$Pk#$!?_1eZbixNg-8M8%DXK1n}PCV19p+>oo>$DOx2Y_bn7t%2bZ zTlCPLue}ahUz?X2W=j7$TV1T$2$XAjZmL?Yb~@Z2u`A<3!hY?CTCoo@IGwB$nB`AQ zv8=e1fv2aYHLlrz7shs4CIi!>0rPGB*1`gWi*nGis;QX4SX8#D!N7v^Z2)n*6*S}r zgN^ST9VF6qDBf+%cwmjBD2ce6Rw6&l57Lf6a)4#XuroECLnvdv>4apbfv<{+h;G7K z8zP{sYkT?8_fTNa3f0qknZ-Yx4qz@YNC8iLhov=7*3UhW|uZfH8vUu_!*^qOF(YQC)P(3dR&_)*%nX5%)`-hc}1X}Q3l=<=wl*QhtPw< zGa3T^Ij15r^ioL%U#Su7twZgsr3NcNIhFQ15~XvFlQo=}e{vP#ZG9vdz6Ub{F`FnA z+4U;IXw7x9v0KiKmmv~Fu2Wci4n*YaG$Z5#w|ibXXTpEvy?pA};Lm>6(1Q*>1Ve(1 z?36M+6im8AKqnh_KIJ9Y(3wpXEo&XnJuesKOk}RQJP1ZK-Vba-5DX?ERE;ItKxw8?e(!bty~+Gl<4PgBmg5?>JLlL_cRF}mV$E5{2& zIS2n8qseT{naX9TldHmK>*J~$soJu*GdoS9R3@~tO;{s{o`2Wg`a!f_)-qDNchj#( z>+)YA)5CY{sf}8!VGi7B_s#wSj&){dNa5o_aQKr{rLqq1FY3|!xMCpAts}o>63b>( z6Q+j9by+AR(T6TkDt&wiNuH4LN2B+D06;*$zbTqD$xgJ*NV6bq?^clh^E-$9!^D63 zP-sAMWyc+#_y^|~Q{zC?T2ckreQ~F&#Oem!z4T|Bs485<#Vj)!e0k#wUxf(ssX<7I zcei!yYUy4P!%5*sL^@F(>-b353{t3)6-SRlb}}40g@)WnYl(C<_tpZ+Ifji`OscK8 zbHiZt zpa2J4@lpUZ_g0fGUD)&SAgLni)|Pz00h5rx00000JN&|B*dt%QUVEqVprlN5rB|wj zHZoN>npe&=h5{bpah{X!f{a-zMZ32!KP{8#T+!Q}YqEt5^@7S~2PqStLM?k{bYn;t zU1sKRvLw|i6*iwzgB{Hwv9@qL&(u-z7F`tj(jV?yx_RIuD`xsga0CHf&kSxek5v-V zNA+UPcrNbKa?bXfug|69q@SYt^^d4WRrxEgvUm)5UK@TZ)*GR{2@VdbAc!7Q`>s4| zJiml@_5#_$cCtQB75E&=J@c?o!GL_V;IRGJ?};AD-K@@Px%balN;y!HNuUv6;c#rE zf8(z&QNg!5@FT{hKDX+HFEM;|U|Ct*gqH7Uf${-aAJjp;lfPZ0NZ0Q+SJSZe6G?#W0ha}K zj8K7Xa4k4Hdd}+vv=~(vRF&nCDq9c8L3bUJSdA`TIea@gKlQXc7R5gOG}M$r1JGqI zYk|&`PW{HXLHGg34mRu21aNo3TFiC$Gcct2mO$zUp)QeztW^?KxuKp-9Ngy4$Y?U z!*{mpPz(?(QVU>Ss)47Icn^tl8V&&)HQb=@$p?N43^VCTj^6=X-FHvRQ0oCF;9QA}F zeJX#-t`Xfg+QFQT&T@0vNYLvfjrCPIPqvB^JNQubgAPiXcHMlm5W*;%!i;BBBgV$4 z

    ~s3KtKeX!Vw0B>djPbxW5wyYyjUb!0GL~ zTux_6P?N$R;h|rqUbYTOZkCl_bwRpG1r6zcXRQY>UAA;%cs@j%xfO4A(zFgBrc6PN zPU~T6(P6HcBL9F@?LNo=MqYOz2gRB2Z~lsced*mwf1Dk8t+bLUN8jo`sjn&XLmD5L zmViw0Aw);SGvx;PAT{kMgLWLZ6RjSFash}lqwT3zu8uu20h$8|2h69JVmzabzUo>?ibTxUitQ;XITklHk<{>^~3KHiOD28~$;F!3=lLwRG{sI2 zM1w^W$|V4Tr4>LP_UVz+_PJ}x4iG7;w>|OEQb=hfg)dtXw10=d^n?@qj=76u_T%qS&N#aAqNywXcsP`*bCnslX#BXwN}s% zis(QCeUF;R;nUT4lbt#WMq7}sm$}%F zFD>S%k6%{nWLP4g?~}*rJu(UmQiPk#tN?)K@jZe`)S)z7beYgrI`rky{21NVw&xR}#SspuuZ0V0Hxwl7N@^l+Rx zgrDOr+{gF_AJ&+Zj^0|3bvJqI9#*vI#{x)hG2a6%I6ga}8Z1pX)YUOKwM;l=rhE^9 z0~tXKO9|#BoKNPmM4G4S-RM`zIF{&TZyX3$rsJw#VIl&5!LY%f9bNx==79C& z^O0x{I+^WQk`{o_XZD%%<3(HYdB>p0O)9o<(Y4w#uvT9@7`@($@9|>pZoc6{i;-Kp zrytLl?1$+;NrM_#?Y-CmO``^QnVCJzc)f2q$Y3QpB8_F#$_Z`48X0MWyDb3XT4q4Q z4lh!L>)nFpp-3i6>@=j364+A{O)Hm!Pu!+Sx6%ayn#?Rbu|toF6iGD)Z&q?=>~f1gNy49P%9aXjE9qW~)5EEI0fKg``n zX2&_YUst1Dv|WW=E+uD=gZ9C4_FnrJR^t)FJ*a$dBw_NpP^X zlo~7sgHcX~i=+#kf@*Sjz;d)sl(yoWieQcJPYN)KwuZi=p{8y=I+!b7iuYbLOJvS0 z-V_IJCX{RW3)a23R#5K`r3jjb2%1jhqA#Cnkb*x4F8P!mvO@ZvY?#~HcJnB$EsL4! zF|O-!xEyqkO2NdqfS@nUshqEcKOH$uKhUWapBfbgV$mg>5Ie}w|FJGaJrE34GmyUF zn*Z?2W3>>1gYak?z6AWTEfw@Yl*+2@$lz+VqhDwd6z`fe;cPsVcnY9!$TjkseOXX| zf^j8bY2k^)_&F^;0lsqI_<_2xM0gh=Ye+d>p{PLj+}9Y~X#C`};?8}53ZYmOom^2D zz|bSX#%1NnWc3C!MBNo191jY9fXZR5919yKp9iNl3MKZ1PgsYnr)AkGzi}?P^`*I; zb(2!XV5s(` zWCecxcYMxW=a1HY{P=s*ak{j5f-DZ}0E+CofQ*E9gSxqPAJ1JSPGO{tg_LYMRO6wg zHuY_nv;gvdHYWjMu{XXAYv)hFKUsC>0a@dl?uh`egmv(UDeUEKkMneauXu75eWP68 zgr}mGii|Y)*I{_rrxbA=Hpp{8E!N!Mi-toQ3f=MGtz$w13iJxgM`eT355yaYgjI=s z;dqkwfV7c)C&uSPNJuhhoh@DDl3T_qbm9eK=_#E+(~aHeI7?HC-L&$bp~%x{a4Kiv zdYq+c;%4G*-^JAg6F>^08XNiAL*Mqnk?ndUF!GrnxREszI}Jv0boM3r+bk{0y*58J zjb&0f;7{V@Dd^peX#~2{^IY%@CU~@OXrX#c0>m$GN_Z3AFQze+teh%dY;os>fzBp= zAC7LgQLhs!3L`^pMY%Qh(PK3Sa!7SO1@4 z==N95mM+WyuZJE??q&otJ~$wAb$y=?Sz_9mTdo$na?yzoPv_vCw1spXmU#BM^t9+A zn5%tsW~0BMpviW=XQ&=b8V zOLwG#h7fUX1ILGeYsjVegp~}OTYL|6@lJXOA72iO)nR}))7?YP`c<)1iC|%7@R^HY5x)ce zF4u?uloc)q?fshCU;P&;k&-tY&Fr3NKYlI-ZhS2d%KuWNPHR|x^sgX&k5Sq4fc%P2loFSa@Z~iB<3!dYqIPnqC?@O+ zF?t*dF0F17yvYdZG9Tssen#t`lRrQD3SuO$NqGO{p$OECS*POrma8{}8C-3N=VPk= zZW+V-YQ1!*eO_n@8vTe4Q2=CGfjx`Pnr1$pzIZxv=i{Us7}Yxf}-U0SPn04by}A?F0+6i`JM*_oSSX~SX< z19v=elz_4v86E-wKOCU7zWmLp9-JlnFOaMl(8-JZ_aeB!nxNj&CjZSyNCBBPa^7FX z-B3N8mmgKV3v9wjbfnaELAm<1?0OFJ^Iz}@WpuHI|H-7}whoD?mP#SAeAaX2ucxEX z5g_2(1qP0Yt5FaVAH}x@48!eA*16Jm291&2r{OyEE~G)MSxsPM6`?;(FEPo3 zxY$PPdbox%(Tl~w#80j$TdVEDojRZ6BRVM98o-Wpj6%J%jv<*&{sAvl$7qB4+;BekSCW}1$&=}VTwS-r)f zwQhB02)k)q+b}-u(dzzGck(@;ZoI68UT|6;kIRV-IpYLDV>0`>dQh15(&UrUo9~_? zkdp^*a5~xA)Awd}wSr|? zZ=WtALdbO>cp)iBp<1dLkh@%z4zaOBcTEvkeNF<#`r)?j$5c>JrTQN4(Q=zsgi@=X zD267E9Os&E>~Cbd2q@{f_W%aa5KC3HOAYZ>2`^P_g(KlJAXbF?I+XJsrg~jj}GPtpJ$=;@C+}$G0+@?c?4FW7D3yeTvGa7bTAYAU5K5}DAJx! zp?dL7&@L!KIj_Sc?SQr9sT|yNuvWE{lH=~bE=~y-FhP@mkG?7dU-TS;h9N2vEMR^S z%^xcW4H@zBtd;+|jsKGCU_{4ayW1tOJB!VNcCpn?w~M)TddTA){i+xbWog$Z*(P_q z4FpnHvGO>|UO29WQ9%m?*ocN<%U_)6-X+cMdI3G=PWbQUJsFfOKPgwE!gXi%jParl z9~n<&<=={~=ld~QK3ZtO;=`MKrciQgk=W1--1xGEWnk!lq1$*TN_Q@3)@!8 zdKU(cOyG7Z7F^0Hh!&azh?e#ZQ8YQc}gY(Iei!N;zYK}qG_nTB6_j+y%ywfT)cd$PF~r~ zvzD|G8-jV%aare{gP73h<+jv9>SA0ePadEdnA1y26YFJ)D>!{bYCQ#+*AK43+Q+TF z1#fYA`K++Ii4iYcHiR+IW{R+qzNDbNz}aDHN^-jJkbtI~Y`eBr?pow)vT(m~6RZiy z#C_6vPyi_vtZupN!82ggT0+0$W7XFL_MEbHIO=&QU9L${17cqBstZaSuXeN2$dFmK z!-9W|1UPk^ar$K5AcPW>A<;CXYDO$jf~B3dHrj&@C!nsN^!-xD5k-2egb*l%xAZ_sc_0&_$fKr+OA&zdz^x+0)#&n#FcPeehCUs-_)ft{D=2uD z`p-zkWfCg0!mu>IL|@g}SXq~aM7J`A_et_FNsH3Ik0hMabIH^=i*!cggSmX? zLDD3ff{}MXPxl+5)q!P?0PPxL_MH=UaC!QKb)s3Y>B*=*TLmGt?C1EnXaqIYY}0uT zU?)iwkR9x5WAhRsSF!Co`DyK_LzqJCXV46`2rrFGVB4>lwzLdu|jM zUX^WPCw%=6Amsq=Z2uVV{pruq$+lv(E4`&j^HA3kO(zq@K{l{rgdmYSzcl4x%Lok7 zycNJr7CX{;z{w;vDxZ}tyg+elsbBn%3~2?`Q|kIKeg=Nr#DfwaIHrjcrgVzJQkq38 zLxwugc_?y35xDNrI8lg>4c3WJk*J8TnPNMJYDu#-dOxO_*x4zBEJ;baIB|^5O9E$; z>5fn*O~LqvvTesZ();;}IT-CD0JtbS4Sm`7rChl<-7C#&QdB2a7h9#?tZ4Bbtwu>5(X={eEe;XOB2Nj$CCWO)^MH=N zzy?0-1**z=8c8g17~^;Bwx+d?Z~>MU+WJ-e1p6US=g~7FmAV2v&CW0XdvAcz;W>dE zOBAq>@KBOp^GhS)C_CzLwVud@1ocCgBB1-~& zN5%FPxy$EwXpfG|N9d?NW##385kvz)t`=CRjb~C3Fu_#ryWBI(T38`7kfD&du&*!x zi`32Sh|4n9O0W}0Oba3;Rs<4qfz&9upJ+QhRHi9)j&%C~d!=~*Be7u3ul$FXMTbg`h z2q=ntR!szttBj@T0bthaMOt@axM#9&CX*WOSwH9*z#@0jok+}&z2dCNP)Z;GMT;5S zfM*vgYeJh`zo$Qs00pHE(HzHq^+h+g5eEGPFiwoHx>DP!dyB5+PMJJ2xZr{V)!~}3 zn`Utj6)+AX6C)e3*26Syq;}Q-VyUX=SLxVUfcUxI5?yzpsW7^EiilP@77eXTa7Np| zrpW{UfCLHcVV`^==n5&5Ym>_&llsQ~4j=#G6arXYbt@8I7?1ssj$L6w)2kJY`~hf~ zl^bJh(hWLvT=3WCcOpqT(hG$$T;xKj_rat%{Ix|6bn3;XwA?zJ40Bo3vA+o~(GF!= zmQ0#w@?5=T6MnnZpS3%^OMVa_NA&UkRv??o7E-T+hyVH$jqUN$ET@K05-Igl?#qy5 z@1H%GtgjP?BsP^Q4KkGX3UVWiFoz85f9pFh5&t!TA zyt*KhA^-+#Ni8+^lQLGs4OeZpNibDE9VfrO?z4JJoU2f*vZQe0qxz_vQ4iP-a(%6Y zTyqWBN-gL2WK$nrb5o2}9$*nzu>r0^3MczyNrp zBFSw(y+;eoWecM+()%?>b3)A@1Uo&nReF=<}t*4sSo z14->*J5Os9e*}2Odal$`e8WHRLnuP%@W*_xfKGdbL_X?1@nm|*pZ)98>xiY>BD0H-~4N6iAQ0{(&9oY zCPa&JEjj}D`%wxNvWmu*4@$@Q(9F`4W>Y2$34k|P^HMf51H7#;SufO`B=aH-lx|*i z`T@Re8*w*D5^v1+Puz}}u&329fB*m$us39tVN5r5xj77RkG|NV2HX(bl2tDA0C*o*f&?!J00i9>R?4 zSV+$k0BN+wvjpJrD5&0TUKI2?uHu$Uidp`M5dE#@^G4n;biiBNO-F`)3^%maHDu;7 zIiw;G5By6`LeY4wLy1*Ir{R(WY1Ws5D!Ro0{ea0AvIS{_CQ(u95BTYG*mmr+bxzz2abjVx8Lz*fC?uEay;l z^Z}u0^y=Ygq819jYIs8;wAS?C#CPObQc;yXi?F_VCqO3#AC{eu(xBnd_!#D!#IfX? z+pho<%z4`Y22(QhvVAVKIf)IV`X0~RGeD7;$ZSL0*E$C68+RkCvX91V1gKFc(C#g~ zKQP;y0N$ZWaLyT3ePNgx1{i`kiTZKdnRpiN-)sZwq*o#f;6|FX@$qp>TlQ_r%_HL0 zSKFGH981qiuekbiZMKV4Jo6qXP)nfyFt*j89n|esvm%s-^%+_F8MNMbfwbu4J(sTeFA>H7tubVZ)<-hs`aZ+IoLuCnR=^sLN+oT8RzL|3nx1 zpr4H^x0r&Ek=nM9emB*4KBt7_x9?o=Oze_CnKo=myL;feWjf!U6RqpvRPgyuHkF*l ze45MGp)kr3r)R<8qHTx--!Q@!AW?{fs%DZYZ(4q)Bhtn-FJWvMLS2$()`~dx%9}=o zbmH&3?RVm5f!JH3R0(Q-=@JR>TZggUB+v=UD`yk}%$Ne(S7)1+3&snwz4-m&!lb#m zTTs6M>o9jXy!XSurlm|%Q}tsRcT9+7m38{LZh8z5jYOT$=$iaxIz)XfpL}z}pl4i= z^xvt6Zn^Xt+PE!9vG2G{f;VK#ID+fpxScK6y8syWI97M?U;HWc#CBw!+KANBHmBC# zN{?cwiu37+RQg? zn96p|v(MSKGK2qL{Ccg#GOW6;KG$j3vKii=TPC3rRg;Z>G`BD2+;WptZ4BbC6aKe3 zRqgq9I6jNZQK0@nBm2Sk2{8PJd}}mc=>=nYxM_T>c|L{8x6F=z7)dix^0o;#3ASs9 zlnAfmHhcwQ=@U2sM5qMLi5|+iUQv6G-#o@x^6pD7yKBJDwb%pp8PXKh8+ZpPrCWQW zwiA7m*SfsxYSVcD@~t$e5kc>hT0w{AAPTm_4BGBR5lNx-U=R%ZEL&Oh8O)uGZ)(;KRcTNw^HJXO{B(Qe{UBD9DT4?%jrolfMu{_{Hf^DKfct3AGw z;tbwkdP%m+J_EUH?o5P6m?Z|D zsmM|$qN6%+K@%-7C28#X>enTX9lU1U=zv{D7FbazllaLvE=M8~K;)65MHNK>g?MX4 z%=4VOh>@WW1KPEF-dUrp)@Y0?X1&XPly%-KFSjAYfd(Q$LLpxg@jSVNJm4jGS!h(2kzCb>81*-DI5L(9Iq`tq|w zyL7dmoDD}R}ZeX_Fz#;wFF=`4MYpYdgrS(r)1m_O+{b(E`3=oCdqVJGI(P|w| z$MeO)AM-i_oWS{0jIa{*>uhPb&W2KKEN-cfG9-GpT9rRetk!&11J;T3T5a#-T}#Yx zBfw*lqLgYMQV)R!Q*fJ5iuIhO-2r!#LFMpKX%%P^U(YXAupuoz);z4Ql6-- z8*^Iw0d0yM#7z`$yu4_YPfGWkT}R^K%AbWAHj=3bby2QtzYjNK4}vP4U9s(1lfL3k zXdBD?lTR{LxeOWM+o9HlgrBmpff}ccf&JbI2p05<2WZft>{r-lxw8~AQJs)8z>9xV z8FJ6dYN;uS<;^o&KkGKcRmQVDMGcd0fP|MMT<;G!Hw&!)x+o0d7xhk4qB{l{$P&KE z7pa(kMooNsT2+QaXs3x>aU72&mLYkB-PbRIbtrwN%jA|a3mO9(FvF8X$&)ezoj=+V ztmLbl8mf&HWjfOd+4_?K>U7?=)Y&%BrXaAPd^>$(ECUdhgOQ0Rm@*u;4jwjKB<{>| zI_}u9ey}ksGA4pQ@3)KCo&jHX?oIBOa+1m2cWNT-&8tTz{)x#AsyKl*30an!>YK!$ z_*)-aAr7qqhlF>$h` z-88ZRkYmgThQuYp-2|0a4~h^PQ4Ovfhn%g-_G2TLRFA<6-`A!ltw*g=;r?QLX}EQ+ zl(TJT=+4)BARkvSJn~hH%Fa&2V+uUyRS%YLw(q=kY|&2Nom1Tl_;xF?_g`9`pYJ+b zfUy>qXCBM`Irho?Cehu`nTd9DSNNp5*^<>ueIf37iv+kf0cxzQVOHBeqhn9uOlZs4*}o#*4e70pB%41s0M$#*yzrXAtr)X4WDy+lMO6)abw)fl-; zk2MAk_r*!xk}HA!u*+s{H!S0wZFhWS3;D#A|8;|#EHYv5yd~R|#mb4l)VxYjo|mbg zUJ&6OxUP%6bwJJUt*h6bO`U$5M%k7HIuRF;02^S10!GhbrR&7v-)9SN8zNQ+5)!vo zHsxWjcSW^8c-ix0ZT|;O+=5*w_6q5d1i6z#Jk$O$}p?et9kQ`G)N*@VVVzQ6eO5gSk z;^L`{y3Xpyc)Tv`Hkz`@pyH%H`N1SMR~PW02d(ud_FJckHAMJtzijnoA+e>CaS&x5 zhgj?`M>rfz9WIBBRg$EkOX^<)%eJpuEFw1Su+wzSFsEvjl4w;8X)!(%MMe?igNryo zTd6IFrs4_Pnq-}FFgHMStLMYc0T8Sx=KJl!OfKkKhOo7>x+xD2S0$y64BVk zHtlj$Il7*f_?`waFC15@t!_O}s|z(l4XUsCc7b#%%m-C?P0D}`=r|DH!u%edBHh%( z&LG8tX?#KeeDDBL70|@utTnUOlAKV{0)ojo3>gO$Z^eW*J1lengMokz458U=u3k>p zyo!~T+ zF$jd=kUA1LdqQJ!x}&~~2QILfx>@I!Ipk5Vup|f$JD{kfp=Vw-`$IAT^RvF`uM~iv zc8T7Ktc3g5@9yvS_pPBTy7Wgo+y?FSnjblST3fI^@;xf;dk`r5p2-j>EW2%)?3fgh zCX=U@G&XOKk=}P(q7521<*>V(5<|0o7&D9zGs1rBT?{#G6xd6@3Fooz0!pNilW%!5 zP@))GYR#2%=aO6ml>$UTEFO59K>nQhgfofc{3X5Ip&jqylAw1ND2*ysw7>A+TnT8) zWO}67roOE670T%le7q-%0XBH~7Q;n-*pRZNv(VaSS$hzpKdC1H0@=1-_n457F)$&C zpG6IzzzH(f=TA+IRB@Aif*@zsvb3tXtCB@vSF*nw)}KD-z#A4R%ifC@_K8wb3#U(Rz&Hl zluEi1Agda!eW8XpkWG3)mE5eV&>DJ*DmXvpsu3$~Fw}2p1Ba-ymwuv>)VH~(yn*`_ zG|3=LUd3qN=Dcsn)|mc-$i@qrPwOe17VMbenZz%zk?O32;ZCwDIPH+&DTR#SOxh$2 z8`YP>%bjRXKIHv719s^+Y!}~!5BSSJS)vQ(3pmmk1(GBM+nQ-4cNu}zvj&8HyWa{v z0*;8+X%dQCLk@wBMAKK|<$)pKu;i)*3dYxEWErYQDe?dPn*($#Ca4m{5G4D1k5~q{ zoCy#{gXkE|k^y3K=0(b- zGS#b}PIwq7-aaBP!AqpnL6X(!2%qcgxKI@$7Gn#Eo)B_VohVOREJ2f3GLgGfdExh) zRk93{Uf|q_3(ZaGsT{VwNu)@wlpCzi*b^hB2P)OU6VY7p;dhD*#szy_#RBkg%WM`O zJnAH?KZTuW5=8OFsNVy=*MgBtx-Go6AtSUaa7NBR=1=DMB%WBwWi63OUc1ebZbyUg zHq+CJKK3KD{iF1H*~+Yn(?E!R3eD+Vmg;SUMcTd*Z%ht&m2U~-^Yx~*$TgFsFWLQ0 z&h(*H5|p@~#<1}oS+ai+GLj;7n)jp){&p<$7-XhE6U&FmWZ4E!w5^(z6r6Z$99la+ zl*7>;8kV1k@84{#WX+ky?gQCxW6A0>^rb6iHA+XYdiA^(alfzx*0nh|$}~m57OQyY z3@g1~MEl!}TqPR~BOv}ffz5Pi+V9AY$YLY5f486HS6Fm)n!BpyM5g$Lux8H^of zQDbXN_Qrsq$$^h-?)hq6^j8XXX)0In;1B-&0p=(gR%uuIa|X=AqSGLmBt<)_;xU6} zP|?BFX1lka9`QaFz~IMw7!a~7w$5XbmS6Q*ul2hARFHt(0tDfKI^7@Q>saUfqI=jh zu&ET8DNe2_VIZ1BXru;cAO4G=i?K=Ve%Mi`!F}V@HR@`a1*4m^^GT$F48pdoWg6Z@ zC%E}Zf(hDW)O0yB6|zZmeo>p`TRBDXp^##HJzl;e=ii=}pc^am&T z;J8MyGA~X>V-o<)iG!;SJ0J$=E@eubwyN;lnO^bA6_P#lKQv?g^$vYj7Qu_A*Q4SG zT7j1`m|-I3VoTfB*19*}_hPrmZUFOERzoPm10ADQ*cYqEtXN`Df}Ib5!R5-Zu0~c%)tjSy&Ahy|k=sWuO!6EFx3W&Y)KK-{<^G-?`l)hP zTQ3ROLxSx~Lwg*n)^{q?< z`9h-`Q|hoOCvMr|79+lX;8D`+Z~~= zuZd$Pj6pg_50@Xl0ONhaC%#YapaEM+%`-Ab>6zJGkX`(tIK;$=09sTfWA1$+W6q!x z`7uLH`hxc8=^{A?$Xlga4(B*7KA$DII}5cp+m2fy@I9!5&N1ZQx7B0XjS%mL76tH~ z#=!k@sGpA--4WcFgn-iO_f{UINEcftG&~|JTUIHHSsZeyqI3mL>N46IkR<-k6IqJJ zn3sJ2i;~?e?2H5PF5(H}Pg&=J^Zo7fIr=(0^H|!Rzi`YoatBE0laAQViGRWy8Llh_ z@Vo7xpr7&(5>~%0sD>7i2qB``^+}@iT+s3$z|ztfUsx(N9JGkRg~)`r5Qf-9@Uz$@_^rQNQEa!0x87mIgnll$8=T>1XHKMuhOCpe?i$aNaCFOJ>5p zj}A?gFettbVlVZ$I5sz*Q|()yW{?K^DZskTlSbqQnoUz$I+VI7d8@axeCq{7*&nLs zH(tI`&6L~4`S-Nl-2)(-^@jf&Pe?mWVYFvy1qRiTqPH&Fa?O%+Ng(r5osV)@+_&ra>n%&rL#~;&>e>aVg1^%a<8Po=P|>{h9vs64z-N z#G$w8Rf~J8uO(KYD?s!7uSEwU{#NG7xI6jbC}a;tAe8t@#7F0( z6?#jEGkNYSssUsws6Fbbk}0HWFo@I?oBygvZENL!uW_LIG%dVJ39Qg(8nE&)JOP+Rjy;PKL^S23Q>?Ole&&WP?OJUU<)m3T4YS_C_Ug~$ zMGB^VI*x25VW+gtDOm51Ihip4vU|3&AXrzk2a>->{ABroB=0ARds(47UGz{k|JQ2m z>VG?mcJuSUIIXDVd+NN4Pr`f>>YX>wnv@CVD`CTuRp9?CaOci~=S( z%yO0i-3##VG83!)4h_^dL2n9~NTohf>cDgpulsE)Il>pG#N+<~~{3{*QI0dCV1zFh;V5sE~2TRw00VPvY+)vxzC*!Q60*wzcw zpH&!GmdA!_(%)$b%#smTp$~&n$|$zB6#ZC_LzFDpKr6aU*8GLAsc2o09O9#|YTeC9?O;VJcAoa=KJ{}?&?Z~rMjpg+c@xPk$CZ?AQ4^-kCpZkpNOGZl& z83rcJR=>MYsBpl{J|oPp-be3YYZW}xRfZlBHI~CJ{m?osAuw8s3^`ii_w z*p5vBM*=D-G{g=EbxU=LTzQsf2J`14U)ZBemm8n?Y5M7{g^v{QFbTV4R6GRA=iYXi z{!~7M&ZV_-Uk2k|DzN`-*kge;+GH=1Dh3dHlH><9sjZ{8mY&)}p@8-A_>Bs^tL4&n zE#khQ-{Ki#buj1{yN1Rfpe>NqkcTP3v@kICAR!%5gvhbMf}Osh+f{}BW4Hbqk1UeH^6HE<}78z)=`Rl&-q`@<~DWS>0iv4 z@PvzR0jAMuuL69s++Xfvzq-04f8VoNxO`8%Zwgf_<3a}|gnP}Xgb3iY)zP3>cECw! zpP)5boEY-SxmxAV)2W`kf>3#l!FLYLL{mWsQV2j2S+aw3wU6_!5ow0^97#$c+%eo1 zrd>st@2^hjE^3x~xO6?o!PIp7*`J<%tHonsGx%$$=5qDSfC!;dkkT(y3#~>eJx2fl7@4h}sJ9z|Np=O+DQ4+=k7@wUyx?8dYVubR0TLv>_E%2ic zz@&0r%rapp_4hj2`|b>6Z04v^pIYY>*d;XPA^DbFl&L?0inR^)4BM?Q(LFhHt&J!v zn`R6_1&9ogHfNV+cszqp@777PK9e(uBh=B&2?=&_lOL~c8fd>HF+Px#!Mm;|D}D_ z1{i4ufJS+w46c~fGpw0mTHOEptB5E}z_J6zK&8jr*`fwS(E9qkt`9_-+2_Y{mc8l?bA#ZxeN_A;@TI#yEi2SBUTm2jxrM^eQ6Q;3qGU0%=lD~G+Ase`= z05p$k17_>G>43j zR*R;LPQ_KFT6ajZtqOtQStsUKa2&OIlOGU(Jfzk@}j2TMZ(&*MFPe6Ve9erNh(GOI)HWrTi1MCI=sp^^^ zOo;Jmrncc^TkILswSg2eRAXM>`+$h5 zL6D6-^%7G6033t2U|Mn8Ok*#2?n*|+X5z@m&t+BvMJ-=|kOW@cdAehq8zMZFZhYm4gFLf%nu(( zSn;4wNb!7yHQ%-b6Qm9_{`wIafg^b5xNWzG@Zb|xp1L^oWE5BI-OfIkG_C9h zp%Rc&m+1SpH9mb&|3e=`u1)t9rqb^bSyRleK?hR?SmVKv-nSOf%8b(WS(w5pO9dFj zC$CRmJ#+ zB(7VE#YUhD6$A8f z9?vnlzTZ%=uV!LymFwKRL`FrHzs~Z%x`&aLPZD~x6e(=uG~LC;Gbcq zZTtitm+0ZrdY+LX0G&T$h?)XN7PoI*L^C7lf#I<`2+fyJD2S_8Efi;FT|y{zYnPV8 z5LCxN9?0TG%?Oo+@tu+4>XA*h09Isdl_u0Ck8w3b>3~Y0dvc{EUeBPHeKV%uhU~jl zJZ~c(m1JpC-e=G^L=8E)<#1(X!j#IE1EZ;MZSz)~KRu?tA8 zimr<*peX@f1ng%@CxVr=%)$di68w?d-uUgN)P_l`Y0vMDqfcLMqagQ4etpp5@8c{! z8jA+A=|q77x_!2DAbINId5Mo7ZU;t!7{|d1VhF-P$601*x6(+4O~HMcr{&Zefn#ji zFBK78Wj6E3w?Dhi+?x{}=1n;{b||_bAF_mZxzgvn*L5mV6cq?J)Q3B}-^F?DIo9~- z*y7M6fb{}r@!Wc1yIz@y^@ZjIu|Ni32yIQ>H5pVO^(!Ybj(B~m#pfMJ5-F_g?X$C2 z^t+0rvy&7~F_ATO?N0$>jB2YtRcjTSu$Fh3z?CN8bl^f*TYIS~dzzho`{*R&mnpX? zk9)ox>PZJKXs!V5>9}J?B)uSdNOzCu~^r!v}H+sG!LFPYEj{z^fNcE_SLFKCh zB|BtkXCF~7e;zNRT}!;Evxi(+KTJNgV11)-qfV@Sp*)u`W{xl^-L9M z2JYFLuzL7szlL&t&OPMyV12*CK-UezZen-psI7ZQ1&xaseJ=DQ20cZU8l8<4gpyG= z1$+~{Z4hcCO^%jVhEV`W_E&t~fLRgp2TFUB} zD-x5R(6yA@4dciI^H2nPiGW~GMxhN-~oQ|TLfxw@?%Ldvn% z0zan~Cj^C{4A}0G`R(fuL!fcuQd@N{+CLWB(WeKiMuY7BH=2 zmaNJjTWvVsJVdDV^92OB8pa@%6FpUebgIMcRhkQJyW^jH1jbY>0#6wdbzA=96Z@Z2 zG;dh0uGUS9eExelU==ELX$RhWms(z*y~?l4I(i07!Mlq|?5ZzhuHm{Q6LwL7k$+_9 zko`237?pDC6EySJr>#o@4$MkZfk}4JEPY)`KSKwqNd~`TOL+;?Txq?`k$^|1vLutT zfWd+NP!e;r_rg}KazNk!9hzRS_o*H+F>C40W1uZKhvcc{6|2B9tBkaSR;-Zapj@C? zIm>&DPPK*&8Hn@iQi4m|kt8@=vHkKC+NKIw#p!qovY}nJK^&io%{sl$Slysw&NJ-_ z{*ll%Q2Q2;F~*NOE5<93lwJUC>8x$E98h6hhiTV`dRd=5ol7tx_CCJiT|70lSk^2A&vrheK%;JPQbR5w5}9y0Xu z4L}UuLh?1}0tlc`%^8QnwGW<9dF(GiD+2zNS#e#1Ov^Ntjt*?-4aXoW{qNFHO zB6Ga-7B`yr@xL@yq5QLl)I8CQM|60unqwl0BKTXCL~9uiCZLcmJ`zj_ms78lAOTTV zk?9A4&zmhEUruX!M*8SD&}X)jmMmN+jl~uG#ogHG2;wOSeqR8suE1#dl3@aBl$!;sVIFaPBHhm!AYr>pv zgSDG`po45>sKqWbbUP#19L9UM6XI7fEmhH0PNixFRGb8dJ_)|yAO@ak85B&2vm+t> z$C;seoC(3!N&r`=b0HgFyzz@NKrxtab#2o3MfO*S;Yl^zh|6Ym3O|zq(23BDZ;fl7 z-XNhv51R>YHfz>@XRJoxdn~-K`hgSxt6`#s`VQ_Z-pB94tjW7tr0|n@8ppUITtVKI zClALpvRHiv@y4ZZ!{eZ~fO)2R-?powk=wYXlH!+LHM1Tw^aV24A4|SCeRQ93SIpTC zC{I(Oe=+8y53}WQFsT~mu@li`e?Su&==|`T26L%Dm`N63eQ_a4r0}XZDYVDj1?u+kc(IYKT=y&Kt*t-RMe|PI5#1O8MKO;{r_F#%2E+D=N$Fh4>L-K?( z3QJ3Upa4fKXCVDe`Gx@^Z}V=5iR%uSD4afvE9y_hNIaR#g7FsF^Oc;S8%(cj7>rB| zO><#;=|?OljucXW4^t+DFVk;9>n||gjC2uAn<1Pt?DCgdk9|v6hs}1&ji09`D zK8BGMWx}^X-MxevuAf@lKr)^hj%)`%q3$vI^Ee;-S`OFRR9f8G0)k&X&q{(HF`})) z6QH+kb6-Xw890Tx<5Dbk% zlcz9g=32-Kqf4eUSYzKvhqTYLs?&WU1i&VL{rKK4qSRtc5$AYHLK9b@+U`67K+~)ex=Rc#I1=vs9vqTw2Y~N z3Y_Q)^XzJord`h^&KtGww}=`s>WQl)_<0wpV@l%Q6V^%kHfH(?+IgY}w~P&kBCk{U^2OV-4MB>WF%^B<`+NykzxyrsXoO-dYXU`*vM zdYeAsaF8_lwjfOe_evhARc!W8pHYsf=a9T4ZNe-{S*Rz>qiKc=U(c!&UwD1@;wz-3 z=z&3J&j}cH0>lIi)-JNoG@m2u(z4aE+nrtxZh@vXFFIxA38IYp4$%*9d;|Z^Ro~>b z-)DTO^sxB$O8p?d1k0%CFxIMG8?BqIRV;^Q9qYhSdO(H0%29f+cYYhdvdWparmwj` zsrLX7HKNH0R_X9G8*4$_$?be@qt~qFNig}257t{Jk(2%=&Nb_t{i6nTM&(YSRIDD0 zX1Dm44CwhcgEeC}aiHM08EVMA1Y)BBmo-9`*;02hq3!aG>KnB=a6MLVM4$+dz8wReGi>p0+&-3!cczdsQr zJ}(si7|;NIG-l}F)kzDZ{9S7t|CCRA2AI>p5bn~X(it+Xrl7+CG_KuZQLzB!OYnGf zIDKrJ$`p4d=428y2h5dVkkRX!IrjV2C+|VdEcRG&iD$7#GlH0Jb3(Cw z=^{BKQLQEwz+UFT;71Bkn2K)drP*rD?zG2jHoyh?&J}k{JB$u(Ui{g*bT4O;uM+P< zQRIT_m_5y&x@G9axp0izNpECr;55}1hh_WAv_Gq29`l{)4T%g5p%JpuHxbL@Axg6z zuMC(IL5rh=j(*YJ^_UQFE^8DtO2+<$khlAqSLhW9e+Tm#$(%yWtT`|ZL(?)oTr)V6 zRUvHJxD}$@18SL?a4H1cgCo5~Xbuj0n6_#V`b%UCOB5Q8 zQnM$N|95rPyNFlbE9)hK|G(&i&VzB7DvYUHyHUyyU(L^U&{&}pQRZ>%3zw6_WKwCG zFWO+=+w!4_s?7j4fM)Lj40pL4{H)AtBr}e@9UWwKqk8FuG^0ZdQGk_ARBYmybv3s{tZP$; zXHb-~9Gxl66ib5(ZzSHHLwqO?hbBm+DWik6%fI-)p`VDVMuVP2iUmMOoq9k$UzqDT zr@7yedk8C^JbUxCZ(cHpt3ZX*{>0CEshn|PV%YdI5Z*M-pN$gg(Upz)cc(D^9 zp}X8w6kYsxi`i@Y8Swa~gF9a`sE#|=C-^n<+n|61CE%7y&p6ctAMD9%Pv6%T4ijFa zJxXVrH*yc(7$iTaZgDvn9Y3<1VxE<0R*7^P_L<2w4%#R@H!Az+tzLtDB|l>gTlH7O z6o-A%^GBe#~Ny@2#oa&=M?Kr7VZ|UzO%C=c{W->(}o&|XfY4fIX zcb~pl7$m+lv9vOg@o{d1o4AnX^wj3|x>BuWx=yAi;vdJ;=E6xMjR0~8b^_$|bpvP` z6zMqAA0o@eKv6(Di;cOYc7gC&{n z`2)mPF=nI~fN-&pnCb*dLrA zNiZ_KM0N9~g!=u-M!_(IUWZn^dgKu&F;zlr5ftdnew-SBng_Y%9idYXIpa4TCoTVg zi2L2{D6ybB>#tQvz#@DLtN?xl2CEVl3& zw_8z5`PSh~3vU1({q(i0Yt7%?NajEmsafzkA5@9t+G;}1iQM-8db5ykNs$V?sD>l? z6>O>&@m1RD?ti1swoh8^NxB+nk4Mi~?{T~j56_>jyYxRKUVOL!7!jT`x@|jG>;d}> z=?ZF%yaSZdt-a9aq-qfm5pUFfYPq>QOvVD+Vy|Y%s8-%UE~=>|0Kt8((I9 zNIk`xCF#}q0ZhEbyDEf2)ak-F+Fzb=L_C7V)tmN~x z8In7u93;(T1xYZJ@7gY7p7#m-yoK~Nfe?w^FC#Xx)YS@|@BY4&Mz{wuZv-PoX>E&I zj09SgZE5RtCvJm1`wM%$b%<_tYVaG}ooAESWRBn+7Iq4!`qia{1AWI}MZFTMc`Ig7 zan3XxmQ;d0Vm5zjKE%pisDQowWP_zlB8>zP(&RF2vh73Zh`!d;)62o2!AsX3+A9W| z+H1%Svkek!rpsjE2pzdtX`ycfza>on2aGu8ppoCz^ZFw0juA2T0ek863*)|`-)da_ z+trg%Wf8S{_z~t!W%NzI@A%?i-Ix;;jLN=&NoW2Z;^|^RXYVNdT`GYE5kO+o4^|kOwBiMac_k`;OxAu*FMiJIk(gmFHp!_4}8S{ zbfSdlJV3FRdCELca^&pbzlCmM9RAd&c>UR49`nbDQS2;(g>6e^dtxnXmVFM6xXmJB zk0u}{GbZWumWBdis=ZXI)tT9DcA2QoVVJ*xnGh zkAN!h$Qk3qe|Yf?LyiWfpzIrmW$ZayG-aNC#kg?xUb_6=oB;6eN`pPw`|AY-tgTP% zEeo8^nTnT0W|w8;|IKfpFBL@Y_F%l|n9+x?B|lm(hS3Nl+5$hLP9?OS{Jt))h1D!2 zy}*j|VB^tOxbdz!T77!Ky4{tJc6ZidutJ44{#1H0pJyTv;fwOmP4(#NoTlURhrn;- zGMOrpQR1j^vwlUq9x4Rx#5}hNp&8@YDBH`3nUpfo_*6sZEACZPnFvga!Nvox+-kI) zwxy}U>51Df|BIZbO38mr#6-imz|r80Zc;3sg$7w(IIBHGlDS{x2Gh%LowB#s%x}T) zA6sDpDi0&f!}mF#XJE<|J%4;63y8reg*UWhRf0bR>O)6RcCpigjK?z^=5nwm>h73J zAK~O~2?D8;ANpEIdv7iCZ_F78^$x%!q}25)?}yuvo%K?2;-YahuT-Ui9q7UW3#7`z z`hZDw41_EvB$K*X2nG80K}y^p3wRf;2s%Yebkl-_059`|AOU}&A|U4VLm>8Ghi|s+ z*2jySDH1;^R2MhNlT)p}+k0kt^cg7PD6M~hy+Igs_<#qHIsNi8gM!!KL(}L+Ioyrz zHr9=YV?fGb(P|nb9E>wV6bX z#f>9NPKmY`P<$*Sb0eaXtM6{9QDUMy1E4{0o_n|3G-y5r%Lx{QK;aw@5Q!-%W5|Eq z4{!ZZW`|9DwB2>P-}1Mu&#KYxhU4NXjUtB%T>NXBzpuvs#P2ZVNWV1cglZMWt*CN$ zl6vkH%*4e#@?te5{r*v{u7JvaXrfE(ogJ1$!24(QWw}_N-T>GS_cG90m~Ioz!ueG( zdx?rz!FRQ_d*(UXbgP$0J`#}z!x}z3+zg>OF*KBREI~HQ!fIVRUf)IFlkm1KZ*91Z zI^56Y&X_oEO#C^WyLx9=+caX87#oOSzvehLsyboI@_Fx^MEf)2^uVz)eV1A{p9n8o z1aUjdGO1RufIldP3-E$@rR~I>72530xOk&gfjr|%SYn-FbFLE!rWUA~7r0X5WpJ&& z9IYp|Kvg&X5&8+sEy-Br2S<8)()y>qnQ1wZ4lSw6Q)+BydY9DK(6yJ`4*84xoLC;F zv^m9vF$ZNoASGv)i9#J?DV>uHVPye%NFeP`Qv~S7g#$WwKkSH^nxNPpt^xR|Oytpl z*)?hBM;)g7*Gxi#KZoXTZ&^PPf0Pi~|-g|ljAW!K4hnw#GwRc6(5mmBlsr@|JD?tg{ z`cXD+uZ&ifDib(Y`qw9<`lS&miYWfEn`w`_|-Ov&)uiAe!CP%j(bj&sG~h zq2;*=+qTbC+u7V0c871`ok5Go8pQt>&f;GIjTb=Ofc$rhJ#(;3DzCRVhd1jxCuw(T zB*p6Q&43)`GARG-ZI6wp`qFsNFNK5FhYBJaNkhOvv|Ptfme~~`ioaE(07NFwIl=|z zE61KwX%E>LS6+vGuue)>1-9tru}~Apq=JDMui26=IZqQh+Bt5@-%Hg69M;j7z1Vuh z_Ke3W8$2=p>WFc<$|*N2C>U%Bvwr?e+b zv8lQFj2{Q*ScPpf3y+FQC|js5j=s0oxgB9r*PNm!(w8@E9RKdRV;ixppns0C(@9k- zybUTUh%i)@Z4NQt@&Ae*-A1wJUPcZsL%!lKiiRDJ+lH2#ZQn;?)J&a3*&ja0S5WEj zfds0Hd$anEc%S)N5SpVKEXmAMVJtjlWG-O(ua|v zL2HX$#1#y)lp+K61A%#a(P*!gdf@NWU`dP$d9jiqRr98L=Pxa-g;A|F2>*G?A>Gv2 znc9PDGttXagXsGzs#nr^c?Kj=qv7o!`Ug)fLz5Q61(C(zj7~&ZBTmrjuW`@hX}e%Q zeoVP6bg&4?#<8@4i>4_$; zq3&fT0kbb`#vrIy_?Wq6)EMQ)%Ys3VJKjHjLiqWvr!JQ4amX4G6g*Pd7@td)o=WJx z*R@0r&CTJ-Na@}sW5!J0PUasZbT|rh4}MlLMjLK}3uMedtjs5LgC5^agB4K~Ek9bW zj``GAwMGe#{C=4X>p$W`-D_*zo!XDsmZ4W>tskKp0 zQ-rxjZK3HW+onsc@KqXY<2x%vWY(yz)^=B}VVj;16wb6gi_-FY(4rt8C%E(s`N2?7P+a9>0SYK z`K+7NEuGdXAz|k{k1MVq0d7`FtLcu+N>ZPJT7ZrJc&)*bn88cE=dn+LD#T_)p|PD&qo1#D&A{%(M3@H1gi@Bz9F zJ0$P~@_Peml1|tlM|+qW_!hAFazxg~qUC%^N*p3kK8WA3C|^gDpZ9fqean>{!P4{o zOEo8VUsF74qt)GNUtaD!4L2ozyd2NI0=?|R~#2`I6p&*l^0Q{l|)qS zZg(f*mIj_njC;@~7`_}N|9fF(C3k19W8jnNIz-v#9Ts7Sj%J)ncal0e9-(=QemnU_ z;dIhW)0Auf2?3bRzw7OhM=Ke?SKf-^#p;B*K@HB6}3mh4GG7~o1#$iC8bIk`?uUHlM?8f;ksNm=&2s67TKVhj4e z{;-&ULh-H~EdGriD4x&0lhlMY>4E+hXfaIhG@hgR7Bf9(?&p#(M56~#Ufqu?38aju zTG5M*ke4bVYXZNnhq<*NK3XmORL%uE=sWg}efu-?+S1?8-6G~x_3i$8^_ruW2biMC zM@wcZ0NP*^s5A|C#mG!Sl*(j$y1u*qAqN(M7c}6GnX@BzaIB~vQqz2M$tX)|gpoMn zp(1n}c@_)sh6aT({Nn8&N%I6<-s!w!Ov#pO+=N*`m$-NmONga zV{=MsgqJ}^8BCbkHxS%D?4=yPA~FyeV#y-Hf4GF&9kEgKu+ zL7#-}VeE9VtzF4^V8qdXx(tQWC4ivjf?zjsM$Ux8(h>;1+gIs#cZ$URj!}m3@f6vp zx6fQs@5+&citB1f1M;^jjmc^Uu)4Mamo@`lQQ)eYBCFdcr4Me zeg5;-x}Z9U0(ZP2TbiXGj~DFJrpjbeSC z4185mh60uk2@&FhhQ?Crcet4nV=Z4zCWvfpQ0E9{zMxadK>JlwM0kgt<0PL^?D%Oi z-b_O(m~a*RtLOyO31Mr$D?r6SwiP)j7U>Nb?phg*G+0aGBG(3|@Ok$v>3zv7PsmZ3j)u9(X6P2SG0c2w;;N^OlyfWi#-%FstUl?j4s~(4TKaFG@-! zz92f6PQ63{+$+i~0#L^IqFLU^&DtDmTBDMDhM-`seL1Z2+v5{8LyN;J7Z9gUqU_YH z@?qcnbDYsbwR2PHOG^tnniHlOA=~H*jJ+HH%DN&7QEe&FzaE?v+e1bNQ*XE+$Oc|} z72N7Nmc5pv;)Y#!&7gqMmXR1A1$fp855(>i{V2t~9aey8VlPT&xd)69}5>FP&` zWqjDq9P&Q2y|r1rX*lvTTJUEEk@Jkhx(iy=SSFwVLCLRCTw{nQ@&a6U;dsn}o(k zKfnxBX+}b6O$7J&V-^iQXlxx z+OR*7?aOWY?2Awo`74+p`QAg9OAKEJkLAcAfAWId0_FK0_{aNTt)@odwXhzYtnwhP z@rJ?*R;2;d0Y}gUJ;eb&4<}RJ=tN-ENQKiaD)&hK{wb0HE_wzE=?Gnx7Nw9mB)uL| z0hI;@zpV1SEm^mrwpVBYqbeMmLkSACNaZwxm}AdvKj0x025eDnpGMwk-;X6v+;~_q zj*s+9KJ=HpmQy0=$}Ajm_;4KG%a?}Pk$R(nh=Ele6=)_a_UNVmH0=WVFN*RK1;2XO zFuWC=01M0*b}T68k~c$sfOWYnmiyQf4psL4a>R?kGBR0M1y=?{sNj z6So5X@h8s>VXKuhVvz1v3wTg>!vP$9Iq zvp4&gErDMO`lD}T6AJ*E&^l6hP zuGP5;F54H`rj}`ly;FsM9IC5JEkhg>7b!bFj|~i$=B==t*ym17@L5w?M32RO#*5QV zbi5z=kRoNOX}zcIJZE-i^BUPlo{qGQh7|7v8GnIv-~CTI_URZfpOiM@1^-+19I8{R zTYI++5TBPZACF2et0Z3#V+Xh1ZjQ_K9d;7XF;aBZy|4Io2c z?X&?Y10Le)9#+SDpePCotsni!-M~_6XVWSJ#8gPp*6em8{2I5rMd7c`sR6{qybn2* zx^0t|vst8LAwu}t@)bViSkJXJ!->|syqG-{J;Bz_t`X`v4AxEEnv)*9XX#&oWR@My z1yA3Yp0K1SwGH{5b;$X^G2sTb;7r%u0v4nmF=8v_zoMF+U*i?N<+t`Zp4d8F-52kI zOu8aa7?3mv^?jRC!{(5i^(r3^_3Iwt+415MEUnt`!dzSf!ux++dO2DymJ$^QyHagi z1oyob_&FlN{*1YgxIiDMOs@@J$ov!YB-EVyNhDhi5TOmlYZIz~+o_b@WD5H*F$dhP zW4NhAbhon5jUUtvtMc6qO`JS;RM1B}gvb`Elv0uN?wekh3O6xSk61>pO??p+S|Pox zrqG`eV5pnsSkR-3QvLiwG?)GMq(o)D7|*-?w=Tl^%7#D0C9%JlrldskNWr)MNm~e0 zMOH~b!}=iT%l`ccG*)+jtYP%8N<#LOXm=LTUH8S;l#{sJewK!k4urqGvs_Mr=DFmP z7`V(^jFUc*%SOU$Dr1Kc=KJaR$RpPFXX`|^bC1piV&^%%)Yp6^kjYGa&|}d2 zgEP>lE6=5Pu!=OT0v#oUj6r7yF+7A@0LuPh=3DV*uzoBP^HWeu0N$ZWaJSgwKkc7? zK*T6u8yEBJF>GAU^Au^|rYE)oN(sbcs;1g=3QHaGsYV>vDeHl=%_E_;DAiT26*Z+* zI~@Uq2^F~C=EUpE2vwTi-^W19uOKbtzla!9$^Ek45L8i-hS824!zu+c1o?vN%pDa@ zYiXCLYBQKZ6$0OK_T^mzt6=3$qlWtVx<}QHy3Cj=T~f4I8rcn$c(PUU4Y3q`v5C8q z2w7v)=M^}K*X1Laj6)$*SH%~*B0Uk}X^SQN1JyBb!8-%@jZ6K)9Rl(N#4CY%YkGuT_t^C$pp=~}G zGw$-7e&cm$(;<_j9Ae>Dx%Y;Zd+$`qqN7;yHp<}fApt8xYxzF$=XTCK^+X&C0MM2h z;o#6uvtXgS&RvS;Oxh)oApPyRqJ?Ot=<;xaTfxdO)}RY^neGA1c5nxI%X4=t85Z;m zF+nBICF` zt1W3Ub2~3-(y^p_2$?Yxd)TjfY5;#t~Tum_au=f{I=KWJO#eR*9} zs=e=VhL4Y!$oNXEwi7QhN$89#(iL9*LjJpJk}JA{+@2j0vCh~g&SxZK|BHZjB2dx7 zNVwLy&u4+BNJ9gg5*LnfS)?FqhfJT_bd(s2+9~S#%_r(WAUb4^r`8jbNO&6W8qh03 zE4x5`7$Mp?OS^{@k%zN_b0$uyDwFK7-+NrBUG6G1ZWy1K7O9+{V#>8Dt%@8{s}%b) zcK?O<-w?OlS7u$Q^Xy?E(8S1uXI7L_9 zDx`p2t%DN3(@`NCE#MXU3&_6Qflpkr(kAZ>^#%wVYQ^G1MW9PaL_bM}`9h>O{Mn$( z1kTp1$Fy<(L$v-8ga@}m%1PLSd{tqNPB40QaRTh+$q&P(Zd`z#pyJN#<76AAU)Jw$ z@JTU%WV`LIn2^N5!p0)Ib-4&YgXK)&KRBgFo!={el6>52JEXijSJ_CM(ww$*2 zkjEeui|O?MSPBkkCP77#Fq)oZYHDZC{9>n0U*(%_yK3*<_DlBkZend>^C zq}+r^K!CXSYdL}@nM=xmCU}{qUb5QT-?8u3cfIIC$Zmzs%p7_}UK@4fp9o}GR+hvP zhRZQJZDR@86KMn`9r^tv+(gL5n@hS45kA*%bBLywDo%xyLKlUKvq0NgpShk^qc7j2 zc(`2k!xcC!nzHC<)O9m;Tk@2)sV+%Lqs*}M`>^orCkKosztb$<^pyunSPCjb11=Fc zP+W^=Ro>JM2NCCk>!U8OHn;LC`!i6VyJJ>R2&hPB&p6m&?N9l3Rd~KNN+F5l@2*ei z0*N>-3WxDg@R6jw9^Edf6TiU#(M6y+ z*g^91vj`;OpSg4oG1?`;Z(Im8{07?hzMSI( z{FGx={oH4WfBb75l-trYEFVLvke2E_l9uGgnqEz_PyVK`e%1a!FzsPlbLwtVftgI* zYpiMU@HMRX*oa{x{raN{R1&pYTu!Hdv$)Q$^5||BN(XZ+%4`Y)i0vqA=+ZM) z!19cJ6%ZJKxN{fKl*XGY^OCc5Q5<$~nxJB^F)Y}b*1ZGSiXhz~I9-eV+DOFn64Ujh zz%>-|)H-w9N<7#OG87L)p#_Kc1$W4`jw%wMniS!g){Vufy0yGiSUC+iI9t3$eBJ!X z5+JxU1p1ab3(pFp(iZz9xi;wE$W<~)$Bec8Br15hplGD-fey^276x&#G}wdxTNe3dSuQWL;O9rLQ6MKKw^MDO{c<59syJ0`*yC6E5m zjP)&bEPTw|Nzq7ivc(q2o*l#nc=qXwyFT{#%H3ihLbmmvhqp7fxLjms* zF3>z_1X~wUl^uOao%%f2{fw`(lfO@B+uy$$Jw>)p(+OOUX4NdLxhr~u>ei0`P`4i1 zNu%|Esqj&?DxasgPGcfyfU$g}gYm@WQd9f7YIBXz`>&X#@?`CS>ZLFp_4o%!prk>Q zbTY}!`>@JAEfQ)8u}!uvyNM2*iQCLybq*-)%gB{NCS=@Kn|`|QuXgPa=p9^zZkJW+ zC*)jP%t}3CLZebxm&VuUfWlkZ4^?QQIo}swKIuE+v^ii#1&~LR{kz#GX_DlQ#pzrD z+UH$Q*ve~NrZ_BJN0Tj)`|Cq&heDURJ(Ir{7=^fX4!2meyjo09tysjqOc!%+XNk-M z*4g~&rO!9zrlh#}F~hrdDDaUPtT0jOt?oAqQwG#z|9fHb!LYr`19;}%GLjvv!x{!J z2@1Bf&6{--h>bKDheu-xoBfdqc)4=QF4mY!sXc53j8mFnWvt;Jp{n;juQ}=9au%}t zc?LOfRy9(ScS-gcx+uTJ_t(?3p)=3oyWX_83Q$BMJUQ7QBC~m3#N&_n(-y=f(jHq# z(Oiz}x)s7t%C8R#-INJ7h`o$_U zS-dXm9Klg}Q4zWUFSG4VG#a#-4xaoQ8wUE!(2RW?^6blNN&(1H`?WSm%a*1P0NiwpOBexxMMbOoC9vD8PtgxOa8zy*g~Dh``?R(l z=1FBrSDAT#=@Ioz;!XjcS2bWlE%&?!6R5#fCX_#A=Pe{fo?*f?*aJW*R$_t_^?JLq z1SQw0c6iy5c?K)4lT*g#=5X`Khfxt%}C_G1^G88oU z>NM!s;ZTD7$+#0FpTZh8;PYh~Rt`)&YWJ~Jbph^?ruFmx$E+z2gTaT)#%#U@`m@c` zv8kss0&Ab6zBs>gwL1jQD`O!seX4fj(D?T4)zQw7*iNmgt>c7S(Nd8f1@PuPHkp4MZDHMt_X4l4E8{A|RuPn;jF66ti5~zpX$M zlI(m{UD~i5eosyiW0a+;5XdX==9Q*3&ZWhyA(#U(-`P9cbEuS+AbJR#j1||lz~h`x z-U=U*a5{T~Cqyl4i=j)udgWvnKH99J2he@vk#uelW?9fc-&h#V@}>+qoP1h?0ex1J z633&v^r6pK?pMHd6i=TUEt_GOKw%aP`e}|Bbh4~Wu20jE&0~bDbOc1u0~ND86(B_$ zNL?e+YZ~tf-S4ybA614Yc2u)kw;gb8wf_iz1*TUbIn>`MO%q28GO;PiRrJ+cby}A7 z&Y4+kv^kqY^9dk*=)(M;Coo;fa?+ONo6dO zWxrvSZk=EI>&N}^Smi_zdum+U$Uo)Lm-qAHXhVgfk8v1C|KQNH7J(n=`(AdFa!RM) zLchqyzsw(SNjQvqzJd9hhi&jKKHfA;<+btqq z3U4=_uc-)mp2!wm4(De}==jxOxhdtAY9p=19aqa%9-bL0$Vnk=H%D2QSk7jM<$+5Io`i3FhDYfv&V z-G*N!jytv~hQBKVZacE9@#b%{cm#3!8soHT1=}-oOm&AkrIMRoA)9yD^S~-`>t&6+ zlF!N+6E#!uTUL>O*ks9ep!~H8QH0hY6DTk{#~_L_rtq@+K%q*X#y)x6Z~j?B4b)En z5_5s@zeR*iRdg1%WMW_RTgOii#iXp?UB{O}GFwXvq1&T3Zz41< z)AJdp!R{QMbt+;DH<-J2U?vNH(x-zrnmLl;r;$kwW>vG)Ao~VQ(zcyq{ItM|2T`Xx zRcc}!oaBFQ_`<@%uVrm03FcElN?5WSm5>osTn?{iIXB9r>ZXH&as6t-zh$oX*0;vuMw&l9y} z&APW$ww7i81I=KTfsC zO-HCsUB~rx-P>FN{cW$R{cYjL^>i~}DmhZl5ORrCx@_M70XbBjx%nlr>}A6-68MW9CWo7%m#%nBOs?o_FXdzOp)iw~uUZ0=(#EQ#GT8AcJ6|1@kH40YA-v=oAR;#x6 zvBtgVwUTbJQ#qyuGE#4pqBUcrqJlFSta^jADmo5*M2ZV)+D@ZC)&6ldSi3aGK$Q&g zv6SkO`d0jSFalocyblp~QdMoA7pYto`_u+FU!S32mf0NdKP%nMc`z*Cxls_8YP*#& z`nzikF#VHjF%T(6_L>5Ge@OZuJC?nFq@M!dHY#o8w5_fb$%B0B|Mx!M4_q46Rdv{0fF+ za+27X0CmJ=>y3t*Als_2G5tc+v8^taeto29PYPNB+d&$IS7T_mn$8fSRyyAJ{?$t7 z&&uVn!7Z_u@1`2D4WYA3F5Kz1S{l)?_Q+XkLela}tB&*91Dk~gPhGbBsB91PRay!Hy?qqd8g*t@)oS~zLh9QRq|Mcii-PhB*n14&29QaRB#~cLBCw@4 zmfCnELzux$s<*kR#i1_mh9_6YZhTVYj!s~_kBx8mN7AJNAC_m6p7*mGjt3yaA)>;KO=n>gx^A zN8#$*Mp;+!(C)Xl_^iAQo7EUL8b+~m33+|6@V{k*>(oP{7AjzW)!z5%vZ`RKtSX(Z z60}N=nJc#-0dM|`NW?G*!2J~~!G+ez(w&eQ0w?=y-H+t*vGEF_=6&7hk1~qIK>N?^0JWA4p#>w9b6Mi?VKK5=*l2Zw<-c~jYYM>LpoU>B4zr+2EM(P zpu0{-x3d9++rx{~u9;D}8vOWvGkAT8IMTmVe?-jgxa}+Mr40#rI2rcBm!*8N+a>%& zSwMmGP*CDW*r}!H|E#<#HiQ#RwvSIrDf`NZnLsPLQsS0-H~eTFVR{>0)}$fq9cR#D z4Pe}vKVMBJ_J*m|wOu0*-Hpp7vZ4$SC-Or-3-G408;=^)zFz)ENFne*rJx{p>CeRu zb_W*NX)GfpDW#E3k1^$h4D6E^&G&Um+)*Fe9hMQYypsQ0*lrNNs*I?*$!?Hk1l*
    2?W1xzzQL$L1oPj$5cYoymBZpDH{19@GjPxU(>6KotXirW=Odz609Q z+^5ma6_1-4I9m7m`=}6!(`+8{6}22Tx+EeIBXExZmh#Kpozh=PpBjP@q}=C4BAYn% z5$IqDxv@I+@QndYl}O|?w6HR7bzFc>V_$J2Q2V7f>~a50;UYhFzQM(l+{D%$DXY^t zM~nsL?bWsk{I;$E#pmm|ps%R!B>ZsNq^_4{jv5`?C*B?2zp;}+c*t1P6dR}XT#+E0 z%N8;c5^CUIJmGeZ|o=qW9_R@=EFr zClH}I$G4Ku4yRRZQ>%I9v0FFNI}$vhc+j$e>y&1oX4-(uvUog761j^~R-r=iTz%Gn zDIJF!pDkiM{KO__(=V6Ofh?@g@Eu@LIk#l-yd>6zV=5QXZpnVp+isFE!58x6>qvOcG^NVj`@UvqF0TZ!oP6;xYOOxj|0EFX|2+}6jrGW6s!Y@yi_G&l zGhY4pn=^{V>Y1w4vFnah5)4Nq+(Y?;RACfTXb$pNGa@xoatbc^atHlAbP5$`YOGbn zgC%2*5&=0HL7v$O-Hyv1z912Z5sP+ew_MMpNqn6%lrT(`ZQHhO+qP}nw%z@;ZQHhO z+qP|U{`c~kq}HmelAGK*dmrq1n8RJObm#n$lPUoT04y>{Cnm}om}`tcKg{M&{FM1Wf|M4YVtc`S8;k06%P3bt z)6%vln(^7UMXevnxn)U70{r%rzSh87w{p;6GBj%;1K7AhCMLse8JE-?mVGGf1 z8-zKXOTzKT5evv2UmY-(nvqCU^#QppXS=T3S<<^%J95lc4C-<}U7|e02C2_#E-HHQ zWTmVyKuS@^YC$>6cbe#<^8#J|`p7-_&uI-aT6nBw)!3S15pEIP4}hmPSaK^{-OH&v zO1TwWi$Ta3@K>>atB>0f z&1G)q|Du@SRjK&GF)yt-dWIaI)|#3yc@D8%2SY%!@Hfv1KNgzOqQfGMPgq(SiILuipQxB@=S}(R3j!YlXmH|4l@dobV+dP#>wYyzAEOlLwqy zD_US84!`;JNH#QAJp1tE)cg7(byZ4OH zdP7>Ns>W*gNw|QqW3HWWww;%vtF{vcTMT9!Bm;oSR>RLKXHzXOme{#(UixX}*Gb!| zNSvoT1wiU4U?<1zgWUj*a7Qv}<5Pvg2ATj@jZOI#MjWUAwY& z8OOj^0`{k+z2zDzA;m{FmZn168id3wR;lAa=;L>ia$CI~o0#@>!9IWnEjb$= zt8DxR4XAH`24y|Z>fq93aCZ+m2|9PS<4as@b2#=n@H4g)t4Q~_Zmn`Q!R zxTki(BW2Fqvp=CmzkB{!9aX+>dI($=_B8+3V~4WYib(QKW4u3a<7F^!-m)~vh-YT|;x-bqxbTwuRYcgwV9whOoz@Q96Bhab z*m%WYGPZgazhLALLC5;vg!bfXpj+E@xK=Z-=L3;Z!mUhByl%4AO7;k(BDm90<^ihq z4y17_YE7_Db0Kozg80RshTT4PWl-?<80yi7%(Df;@t@c+)iUSay{bid*uTN)@PM2_ zz>gniVeF(ZXz5u<0Q7Su0qC=H(MPyky+?6>8Z(aw>^oKzV#q#=QL?cYLo%Ii}p2?Oq-Wn90Mdrz_zDMMcqIfz;&k9CpaDBgWUO zd&h{3hEhqU1wi`l+Ex`u#n2tHH@e|3iDYkRLG{06UbU+GojDfVqfZ|iPjEV&xFGmY zqn@^=FQj^WfJJ_$`SVzO`CmOJ1gNN~#z&N^17TAUePh_V_&UPYtP5I;9cm|L&<9Q0 zzZ}U^I+lZv27>xno{bO2qU@jQDg1??W|Fu{)EOJPrw>P-J-d+GSDL?Ek?*w7-iARv z2_;kH={gSiU$lC{RKJ$Kg)I@bF~{_!;^qaM`jtQzB$9y{(u0nzwh<_^5tjFRVEe)w zBU|qxYU*9VYbM`tU}!BC0+t#XPfH@*IAq=bdZ(; z4saL$Kl=9HfuY>Hx01)$T@L{8{W1&Ad2oOj%lpEisheW#~ z0at4HV-@<3{8qW3nUfq!Rf5$zsE9YFDmzWXTtE8Yj+yl#{IMj!thR{RP3S)s&+F|C z-29+d^#Y@O~EYS86AoCy^qcCmtT+eV0K(fx|dVG2XIfqNZUH*$yABy%<#p}@bFnI@VTq|eZ| z&uoi-Bjms}=!OT$HSrS8f73esJ?F8ItEzbHEPA>Ba@_mP)L591XBGG6zi?utY`Ijw zBWBYLa%CY9%xy0&$p_xL3>FlXk2Pq)ozjPFReKds8W7oLb`w&hf8MQ=Qn0w=@Wp@4 zwIEoj0GUBG7&ftN57e-|4eA+CX=V_a__hqwDFjrv0tW{vI?uR418*&1x1tGMyWT{q zQVfgehE^t{7Tb0(E@ntKRV+6KJDL|&r9s8Nm*`PhW1#difUO8~nsCon_nd8XBaXdS z(BtGhrXf@A?rY|lBlreeSvR>j%K=Y>=oa1MR8dl`sUh~To$}Q?;QQNqPpzf z?ezdv@D&+xF?pyKdiY2WjRQgw=35x(@bKn!-P>VsCCHpxfz%B)l z7z`^EVZ}7_Ca!ra)8!EN)w8C>Q$}$`qmqB8ll|EYIZoTTSSerqHOm8oc5EBzu}&Kk zu;~i(&YE}GcekKh{~y2y`-(Yq8d)7kav?bV(FTKIx8C-1?~h(B1gK;L!(@=g@d=|o zp6($wemS|Yx1Y30(C+1G+?hd7?0Zq975tE@v3rp|ZQnt5OX!x*xaELU3_;i~Ln_#$ zyZ#!F`UDTc{dSkrp4PMhp7jGmz+e>3*Eqb&^4`Zc}{BVVOxavb!%&@kl#xty^ zR)m5{JFlJyEuMnr*2gQ}F`h3XKF!iW0$UPWBXTr9(Dhv7Kamy(+%_Fp_XK+z?E}Tu zn2ufns`K)BYFewL4eMNpO#*90a7Ll0EVyW>=z< z=CDJCyILHI6z&>M+K6Nv{Sw5JWhleFXo1i;#8At>!-ok&$i9@=XIMt`b<8Aa%#Ivt zkq*Vgn(^}Oy?{+ULpO8z!uD4~^(cq{! z3Le~s)RNMKA>w>jLvL+i<>cjoKZZ6&^sSMd9N1IyisTur*WC0#T)1zzA$W0e;ePXka^>-g2 zopBBiz36b@?<>L6JHUv*PVxr&dkULitkciYpv%Y5|782T{Za;|^A+vJSYPVhjweer zOH-`e+6mI-RZ^vVr@1ZL>uY0iD+u`%32c_CdUydELwNGgm7=K!oa|QGZvv49hKHd! zN*Aih1eW+r(af->d{DfW&CIz(s?b_0(a^JuH6JG5B;9o;o(Q{6A$@4^TUtk|?R22+ z##i^_J(tTIm?2Og@(qZ)5@8B>zWVqJa+TmprS!wr)6^RMEthqort&9M^ffw`VD{2B zb^NMj+nedK&loAm#)}XTcOMOcexg{s^=<^j!EViFe{Vr)=r}LN|F-a3;vE=AidS7U#it49CuY-sZpzB+V#u4sl6}Uc+^P z3+*S~8^xK{f1xl_tAw{rS76jpPj$T^j^^`HiH<>&XI}Ik+pRrc_z4V%opy9?y47k( z4QPzjpkV+2+6L2M_JmQbLnn5u)tS@rM-C(Q>}-{YvKTE)LVJn-%ag4GV+~6QR61|q zHeXbw+!kDG8dDnJdFE~xpo-)vmenWAV4|V*j64H_yUAs8Z8x8hTsy&7qUd=$tv-fi z-DKbdfqGu!@6OO3X*5#dDLm_=s>#q4VI^r7s(38LerWGD$U%v{CQ?|;2fi>~MiI`O z+DWdd?2$!jShD(L@O7Lzsl%ZM)V`38%S`4FiTHTXBhrgE3TG7fS`NH4WQLDorVRl~ zbI<%GnBKvteCOD|Ay9*~bv&LkAz>7FE^lHo8lbSl8~Ykxi=(s&8x3d=dj$LCIGJq5B+XC^9Y`Nm00wq5DX`UWx8V!ikK{PEc3bP_PbScvE6L$TY(3Qls`m`+0K_CejEbM1_RC9aP{+@roLOSyjv^ZtY8wtTC#1 zbvu3IEoygqrW)yDEzs!vdWsq-I@?jaYeZ??aP%f(FLiYVLo$Aw=;eq1Ga}XC0ft%0 z)WtDLe0hmOKk9p98o8(#BC6WIKx|Bl@a7FEdQ?~n(tbSg7IN}!-sT+7q>H~Io79xv z2KeNzkXg8gaaF24t_2z!H5M22&q@KdSa7}yKtZLR1RAU!P^yhM!+x)?kOJ(X7s?(L z5T4HvE4el~fZa{`(=Yj`JJM)-NJ$xuntH>nPYNzEtg&G}MeP@!13;3z^tes-*ckDC z!~=F3sJhsK5`^(&vk`RX%fKp^4}xvONN%cDdJ)j;$unr_oR!ELD&NAPXEAJkhHPfX zwVCIWg1aUGq^@yVxQY+>nj@>Y&bMbhMuulL)Vke(Ie+m6Tj_4E@zWj9h*&U*(%^hv zcHVRLJm*CY;pB(sjW6gkAv?p6asjN;_omyFYE7(YN*JC`cZ+9X(%LPmdK09kx?;u|bA5`!*EACRcndXw$u=3>6f3j*^&2};#E{%U|64;@sOckp8Wu-u zpiFo_u1xdnK4)%;&aS0>Mzd_6C9wphT)Xl&9VQYjTXyY&HFgf!epKL^T8d-w(E`md z8ry_*Of+z;ToM;#vf0{d3t znK42veh+FZftm|1EqnN4Lvip1__Y?og?JKem6T{qNme&f*KQt0*Ou*#96Hf%53&yp zw53X+15&9BjP4%_f$B8NlLAm$bkZ?Ek%u}p++q;Y>Kv;+Gx0{z3X}i{hYC_e~`CD$8%qA$7r@Bls@c(C9u6qWQ>WB9kg?edR{y26fop zfqeDt^}=`@MVY$6{VE`r!#qPXXakIdrr_bAn@QtudX0TG9Po=M2bC14f<6uN^9j<3v0XS z8_s~W#UtQ!7$*%pCWQ51m2-~C{Pl-)L`0X$T%4ru8dTuPHb~=X)CtL<=(h6V z8Bl&Q0&dQRO{cq5a2Y+@U__1a=+vUS$x_}=jsw0BgC2Iv42`Of5$Ac$13V1|uy zdve6udd=E?D$F5W#Y)|ZeZZ)CFk^?{BFnZPYeyV~d@oOr8O@+`hN;I!e#fm*dzeZA`gwEC-< zzAt;(KL?1Th3cJYDH&*|M%43v?BNhf=_h7=!Y6uR_r?n@xZ8tC0qN#M`8oD($44xU zt%>^x7s8Ac>Ma^OOHv25U0h9eEnBfKK`N2+7Q+(sk&Z2}&mpYSQ(FWte4YOiu_MIc zXFIYYxjbMjW@8R>Zxt!g{azccHp>Z2SpD6J5?M9omHf1uA-6M&l|w!M#HYjyh(??h zGC&zmN~&56ot9T+?PwvKt(j|PY^ndUR;av3JDnr&^4rA;)nYT?0kFc^YZ*uiVmw@g3QacXk zJEb|54XKA2}PwQ83qPE04U?&|tDI0~YHB| zpjKxoD{pE+54{a!(QWi6V^1N$b;i2Mqk4EnsvEqMtN2+-1HHYd2^PqG9j&98zy&%9 zMdy%g!s0hdE6L6A`(JG*Q$4eM^SLLwds`3;!J>DkeQb9rAg9_Mh34(SJ`YuX*(l+g zGb>NjYQN4}63HBL=7SLfgUAsy@g55cGrY*&T?1-Ok&r#sT>0 z$^5_;HIx9d>~4lOH?ST#ZstpgMWs{czwG*nen!^izG)yKIA3({>Y;^jn`Lwrd*Rq*nmJju<&}7iMVlz;;ZNqnR6tfSW zQa2sRqsa{BIZd)%f||;=fLJ2vbIidRv>t1=@;SSjp#_Sf31q|lCB^VKr?obQlp?6* z0@_fe5L!7}Q6|@^Qya1JLV;BVAIhvK0U zghwPpP0VHbKiMrzoDA0^dvX!=P=6^Ffb{0q*4g^Sf~Lr?`TJ=*>T=}_GPpJU=vns_ zEXxXsh0rgcqAGh=R&mi4QYXO(I(U$Iv^OFf4sJKAV7hV_onl{uRs?UK#l;C9pLQ@+ zhV6qi7(wqU1^+sVanOGT%>)B>aWUfygc#9ife!33_hJBWn|QWg9I7wo7}c<^1{z>aG$QXv#9um4J8FwbE)LTRHphT z$F#rxNX284QkRVmfAD!hYM>>G9a`Cd?7s-D`fo12(jB=l@57dAR1;L8QZE}-+cqJz zch`(!P;gSdiV%@8?YUK*HC1B_6Nl%>ZU=z1t*xr|kbSP$%n4uz5=*k5|HwbBwCK)F zL8`$ZDM5;Ss8%Tn!Y_>VRvM0mnmb=*!|*Yfpy_w{e@K})qc6xJLp2-*JU%~qrc;zX zm5F>w_5b?Eul(RI|~BbfaZP zi6FX0e#j6^sjTQOq51@AiPY8~>*yM+3~hY9f|*OtfSyqHIs_zg8lxz)ZR z89aI2rJPasUizGS)nh$>N2Koi+VCzBba)ZNvT^admi|yJu_ajR+8!+!-4vWB(L(+Gm{TUrlp>zopuO$~0Fn!=SdNmHz3a#? z-m1ws#67XXZHfef=?=JH&G&gsz*8NDHvH|I04)ALs^xeju}i{0283ebtGFi`h*1eU zHeLXbQGkYE<uOTp0r0>W`k*~9#hPY zXn<`BH7$fdILJqk=E~f$S~cJVEA|SU*J_Rb`XSRG<3jCwm&a9I=^4miZAgeIBAoxK z6vWL{*%6|t0Ni$~Kt4ge{+w)D^_GRiIkXR^YTpP$o%W@jiH#@o2|39qKo()`g#4>M z{upH7)n-l{YE0tn!(}ZI?;U|P%`}AFp3$G80lBVQCF4-6bB&7z4o&D)S&J$#Y+@KN z7#yiw0h7ZUp(mvEtayVIcQB$&WtOruRc@vw(m8?WO7m;8SONf7VO?8EyYor*;_G-I zP<*cE!>afYLn5&7n@5lR(}Fq50jNu8UUfxXqUf~;Bg`8w>1bfO2}c9~bt(V%iU@HMLC zq(@J-M|&T8{1&DVU^4&!l71R`W(!;)9&JZ!cO_`!Q|CEmPkl3hAzJ_>+B^XIRGs7l z?ht<&2cY{;odSSADZsIwTnq0sc1|?9OsJQs9?j<=kK)U+$q&Bs zjIK#JMmnt#5hFh1BbMgs@bYx$^AQs;sr(0mApkA&9STkFYWK>K)rH#O`f+gJMn#{^ zgf!4mu9N}-xg)%yIqLkH(UYAr_|hQC^WTeDF+&+S{aqPt5ak99oLY{C*n>pQ&we;Y zyuY?~f}_+mq4Ns$)OdT*l;jlGC1>2vM(_nsE}$oVQwyAQMyO@<1rRuZ0+OYmAm)XP zP{q;;rE#dcHF~vUg6y0FOmLG#puol6he(lS5Q(&U4nUny5|KgcHN`~7os!j^?eq%q z7iY|{JZ|`dZxKUl)tF6;-#JhTKSdm~Qr`j#L_TlPwrH$()owMOk?ie{2)isrSf;sv zgsX`-4$MU)aQ8EWEA;H)pquE?OC+qNPw+c? zo~v^VDQcRs=mw{zk&Thc-yuHop#3~`O8~#opqYK~~%8t&cXAPF$EW*SG z&O4Ck;-j^g9oaGW+u}q`&sj_ZrZYTS0W=E0q#=iZ1P_#(RA$GKD#tlQ5~{w1P{pqy*##_OQy?I^C(qp@;~FvB2xfHYFe5B-scskBxC8o|fG02+-;RzDFG z-$n0Qd>g)>uLE;f)4BT0oV|9?=y3Ids^8TLJc&C97$==62dG>=E2v=>Ynwz^v|>>) z@-B}nVOj6sMLIQacc{JW%YJ|1pDzG&MX4*B&sipM7U! zFqchUO*Af@Z#JBFbl?qfbB@f6d zR}Rd%X;R<=8_UE6ob_3%ktpZR{+iF>8~4QBwq+`1y?ARr9a7yOTN&Rts(Ov_jbYAmO!)l4 zU)@4)VzPMoG*5S={?FIp46L355kyjEpZ!P#+geVLVJdAKUUY}!WwpfEsM3vhq;u7N z5?u$gFX^?UN35tV_?a5hjrD!~l=a5b+iXmYfz?)$#RVqPnrA*u`bcn677l7sdYv9y z<9*~d2P!am>_XqPaZg#&SBJhW?B;>aR>D$`bXLWr!Of!>;N>UBC8SX56x%rAli#QQ z-d|ib`g^C}CrC>VT`^(`lFErZoc<^sK@N&SkuzFn%BiN?X;!QV>Q#>IN&iS)!Uo0^ zHY!NE%GxQOgviKSK}fg;{{lUI^S1yF!RJ4sYQ=_X6fL>g8Wo<~$;Zc+fKkaJeB`ZA z3++gOUHWG#Nec?*5E=n9ymlA_S?o~#iRquA`en*jscrr_!wm!tz@Opv^MAp^tX_70 zS~j`h^fibc1vxb&lx5NX+|_b-IQV0#9dce#@LtHo5}pZi-B%<*ilcv~+~S4;ywg34 z*5;OC6U;Wnem7?r&8)?Q#!MfR>W%S9x~*FbNolm|L0hjAi5Vn3P8l!_nn?K@daI6B z$YSeiAkw}^)Zw;Qf>7UOp*|s<&KRTGu-n0SiLCJ4~J|usNxVMacsx+1f77dPz4ZR%-*gMVewAqw2DMgbi1SQIplA(|$HnhS| zPUD7A)zn^H8|YM%p&G2!noGUom95)bU(Dyz&m&bWb*sM|{Qa*lS2Ly_su4t6OX=vl z{g6eNFG2yxmp6i}2mWux2z1>T)nu{2smMoW5xYr`2V$u zGbxA@K^Z0@4!t2&saC^hkgS9p_^y`};#E-6Di^es;uKEi7jj&4%WgJHQ)%h-@mOST zw`)Bn@3H{7-2}W4{VCE}M4+-UaEIV4=C2BX4ogEtgOWzeyU|?(Ft`WX_&)!I--u0& z{uT_mxOsB)Z+m7S3*X$(0^8gJs#MqFBMFC67`^$?L#>WdoiU@RZe^!lN5Q^_-$soG z^Ru%pvgqXg3qnLE^6!FN-Oqj==Ksd*f-j}fWe*isr-V}89iG=E6=Y5misksHMaUqf z(o$O*L=b*xAF|+KWpk|g=R4!$I)b{2K)zp&mC72q)q_k*-Vks^fA$cwr1Tj8yHDcs z$@TO}q*SobXsF-Ya3KIKD*m48nS^#@UP2>U6A{oWoS^r%)(uyY^9rrfRi&C!)5@VH0YIFRk+g2nJgunUDP zNac`Ch$z4j9oEA=*jYYjY<1c>d-KaxbBe_G`yo^obI=Rm-?Lj2{#&)7V7<|7J2$n# z7rdnfBw^0nYzfbN-cPg217uR}w!PP0Zvtd)nkpAlZ|DVe#+_FsWIf;w^IxW(^ zaRxx-^=RcNGbynaasK#Y$r2a7M*;;H(zD@n&gfCWU}Kl z--b3{UA~2x_6QFpAim@|C2lZeP-@e%S27{D0cL1g&Wa}TvTX;mI3cEuL7!TTTbf|l zsI&*5j77bvWtVqwfNCex1dBmHlP6^sJ~HIDeckc3d}(hV+n#L2l@+`l_U9wh&nC6M zFz!hRoMJVQ5(S7}fkeJ0;|S@h2JAegj%e7|2G9o2T_fGmLm0$x8B%uT;fE{Tm3Cxr z1KJbeupQ_}tu<4z+u-~DeA#cXeFxMqhv9h)7?D*WfTk<8_*=6?r!$D7EMh;#>Rmoz*K(WubldejG+3jeA?+5}U zlxb>5F5u?bQ;n>zGlzb4LK4u3U#S-YRIS*Y!^!Zah2D)w2Xj0eV^%nq@U@BIq@9&) zZ{O5{=GwfFT4TwcXO=IGmuKdH$4l&Ui~%O}v@J+Vd}_F9YPIMzxwroc!3t zSC{>2XbZTmN_zR3pG3kD46N{F3rNd35vEin;kXGED~qGsvaZK^sq{YQY&9Lf^ z%~M+dviQMnBfUPc148;u-J-|OmEr$3K^l3GCgr2}5r1F_XZz(O&P94%17AN?NfmR}A&w%-LWHy< zeiG^Hh!6#o^eZ`DPkUr{c$iC9rSXsDN>zz3gbS%G`+5avETFmdL6QJ#wRefnwzUoa zOM3_DqasE18$j2iWb+geA8}vih;T+SOjtE8(NQ!QjZrGk5HL-_4y>eUju;k>+n7q= zc$Y&{ymKi1GQs3qmWS8=ZSby$laPVHrn9olq|gRe62h2gq&$|QhiuB)ay#_1qq6yKaHeABVcYuf-x zKM8s$9rTxq4o`$v1yuPLJ%46_#`x>1XEX8*F8E&;+P*gbR7MSeVbU~h+wyBkIGa8C z^R$@Uz3gecKN_SxWsC2j&Tg|ahy~hAra-!31^48%Y)#`RDs>qjr)@$M##gD4-FZCJyj@Qzoi{m(&ZUGd-cG?WH1#W& zF}fbuWJw2NY3!3=3+a*^R61p3e#m1(h!Fm=KtD01J0`9^ z2eMD#D9e)sON4Ifu<(~*0dncj1VYWs{FA9Tl zkr0gqNz76$z0-LaNyf7`V#^pK!CfV_%vv`)wjX`^BMe=K0+Hl9C{c=4W9XmE3^_jR1Y&}4iGW2xFBTBaVmoVntfc!ZjYStAm3Ru6oe$CVA~awH<}s%-b7dg3a%6Yy*sRij}05)=!604(I$6qdgi73rXTg3K+&rr?gr&}nm;C~ z(}cv~@?9ps{Gljs&eXDt&S#)qZ$=kikVk-kL&oX8^k>WBHdGmIy0tEvmAd7e&<#}C z9+axw!rb*_#)X@I(~>t;#AbfbE5ptHOZh$ux}gY+E(#rwNj#~6bB|Tv)@86uOHl!Y z4bh76(QPOTG7Cv8#XfvWDtml)o>Cpi@&2@*4+ZV~*L~vsNPrs>sVF}gp$2RQC|$WJ z(4@t~Kx8BFP{X=>1O*^2m)v2=a(+zy`)-kBhg=+h!CAI%l z4HKO)axbrHa=*8-;r|+i=gPayMrplVRbqwgyCY?)32HJ5y<(>bD-CYEHMO^ffHO=g zkz4uNHL~@%m~t}lT&3(&s$;STMA&jhgSr}BgpFjTX%Unh$lnvpX}1EeImn&v#8u7w z*E})JvP6XUJDW%07hBa;lSbEqdKT6$ zGQLtwkLqwDQ9hik`lwIlbUzIYY;8&_O57XMSdW12T+7ejP|b>2LKz>QJ5dN*4oY58 z#DJ>a$macC@6BbY{&*$4>{`U^*qq};?eF1(TsnaK^>@c^KxPTC?+jMli@vriQ<&)y z-q4R^0e-HD-uw_STPq=F;(AJo!#@n1_O{JJ?(y<^hY7HH%%s$*O%O)HKKIGqyki4_ zT5kOm2#;mEWX3AOcpftbKJRC{c`Jf1eKefHmu_HP+UnR`TUbD@$<@u7F+M}YGK{%v z9s5d#3_2-U^|CLYx#N|hUu!+v9u6aJ3-BBFfF(5l&Fz0u5?LVwx98*3*a3Nu2LY&H z6NASlGoPuGUOmzsiJ`;9{{1DC;(^@`;`VOCY?h@KN4bxYi2^Pg-CWkCI4LyLixJxb zl=jU+g!=Ct69ZZ>YvVU$W@|XH|9S2$VLx1)UavR`5C^N1O0_$w4gOD?HJy|0o0E5L z21$p$@rdR+6ZopMHeC~G0yOtm-{!1DtgP>p_qS-*f8@5;LJPz95=z;0sPTd$MgJ-_ zSJQfB2ImcTEwSWe@^8!d0UVZk~uMy^R-3^@6t7)O?hUDWfBR+Ms9UjelqKe5)49>@|>d*^Et2%0VAWMB-R<9>aS2hno8m*TKXj-IC?lC}855Md5D8 z1CuniIgK2VKgi;KaZ|Ts26BUKzU`n3*Z+_9NF{28IgmOz0NZ$M{bI{h9C{w>sXr6; zjUC9{H@w=IEEO(Y4Swn-vS)bH5;p~eK9rsQ1fW1DBq>As7zz(%DNJUKs}(&!p;^C8 zv(j_?7GL{-Yd)XGX)>zNN3G9kKR1ne3=?ie-|5O8%eXwwIQc#s;WJx^^!r`WlalKl zkwHF74SbtSa&z3XMZvvCy>XAswy04Q|sh|2&HyZEYPxCioANrWqKe z9XLc}2fYO9QXGgNhjH?p>zvK5>2AQb4+(a^7jP#jf^A8mXxRKmv4iSuH?sCCKp zZAU!;0>qCmH4luk-c8=Gi;=Lx5CC!0A+Q);AiZ$54`m?MHW&|o2Q)5p>yTi0vq~%c z#e3TJ21tx|DeTBCmb>e)ugZC$wVl0h1J0x=QQ67U*g*lKHtLZbWa|2|F0Y?IEc2oi zPg*Q&?N3?9?eD@qA6vn5#T!=~Gdb|ADS*EI>-~u#5jfi%s{6uP=-SN81hR z(hmMmP4YAAwD`HyBEdpfj}s}6Jw1kt5SROD@4Lkh;rj%XLGx0ktVCj!PoYm%H-G0a z#P^It6_iACyHgn0keEY@E6V4{Nb00 z23}bjDCCra*$PHGkE{v(-g;$Cx|3<^`A%aosB(xBimD2gbT}Zd+k!A*i$zCsKPZa0 zfK5D)&+jMX>!FT&xBCd>!DGC57%7_AtwYrx)V4JmbYLBlWDiJFwT*#e22A2*KK=5F za7-3MI4M56tf!uZEWXc%Wdx0}4=7#9@Q#M|N|VJv84H-`U|WLvEDu(Oa6|ROY5eYO2RQF9=IZ! zZPz?reDVd{&PAVnw7|;(*8!eZ#Qw$N@zs5$hyHGYh?Kz3K7shz3Ocm7&cc`nXG%TK zfeG?E{HJvE1hqxB9NZ|@=1$Y}!QY%5Qayy(Zt$X~vg9v*fy-P_ohl$sB+J#!_pKLH z42uaPFtY)7VD|0%HR29ku|zF)|7rtU0{SCp>v|TFimKs7z@b4c<`-JBd~C~fbgy<( zt`=017ce*R>24&XOiDpoRRl2>i;NT>?XQHplj@5?w)c4oN?c#l6*4zt=}L<Mb2p9PXCsZT6UdHZpxC1 zbd0}fGDHm3nhJu`6P()+>g-7%$!dqVOKuV~yzQk$x&r#8I4e}aQd;AR;hVYUb1=?3 z)C$54qb`B+%thcUrLptnD@*2&*a2210@sLW>Foip?DuDuJB2TTw`k~T+ew=bi$HGm z71n;RA28-Iy^ZtiQyJo2QG&&ce72PDmSM?hDf@ks%S}!tmJ^ZB3D$ie2G`~$$Uov= z?~B^*xl^9s?*in(x9@oX40cLCA;!DO;}pla^HA$ukyNedQnlHT`e}RD;ViJxH?Ti2 zkYO=h=U#Id&?qi3<7EtM?O;Iycz|YUZC!HDx!Y0j8OerIgjt~vH9C7z$3?`Sxd!o_ z%fmssUWOSCr6ncM*C59)Z%b!>QEZ!2HknCah7W9YbQFM1 zlO@(r>zjW+t9q|89a?EpbOA3`;;7CXVf3tq(&>PPe|DCeo&SOhwzKHzNWO9?6B{DJ zQE%Z`50O{jsfd5L+A=sPv@!nnMHMO-06M9I^*2)F+w7r|I8}`SuO<7AtNItIDU80q zI^vnQtIo3#p)BR!I1u*I_P#e*!%z#6?LzT?Rv`IxtJjJO9B`wHS?odxIiMLAmPQ)B z30LwL*a1VeyklIu68_Ssef9g{$re3Gf%3BzinPtZYA8@~!iByynJgmJyYZ<9y3ZZ_ zNW;L6gP8z4OU3KJv8gyl!Hc3B>m0!5*3|D{Knvb!>v$ zul{@ZF8fjQ$umE&>whhmFt^k_i$U}Q-dOr$rO7%z0|=9U43H+(=;R47yBE1XkThLY zFoD=0J1+A(i?I-}Q5FkA?*LrOxo;83x_x_Lm>y%X;g=D8prk}eE8&ghw2g*DESCMG z-nd%*jW6`|medI`>77TS%hfJ95|*-8kQ=#-{l*0G{o)(=hG{T)c$K^Gr(_4H0m`j+P5aTFho zWZfPtd4u(*oiwjqmWr(bOtexypAy#Gsb7}j*gMP;p+l1J=5xNNmpu20!GFJReE$o3 z>WQ9?)#9`neM)ZAgZeiJHK4v${CcDgHr(h61(*ER;due>5YR@BP5T$p6{@f1&{yfm zZOek|L~9f`*^;mQqn2&?P&+zPcMG%hK}yM21VgEIzUAP`kagg+ zXq3JJm?UAUKnuS67TSEKbkUl)MP$S;2oy4L?Ufdm2u1@q)jpeqJSP>D5cU7t7`3-zG-Ue$BACuKy@178&W<{!d6&MBp6=qrDlW z4qqhkKa8D2w=hc3UXN|twr$(CZQHhO<2|-*+qP|eH(4u#WRy|=f?ieCUC-X(fHwc_ zm{t$bpdotUv^f3?S?KGRC8B-)oo~MwU`3609ev`oQq%$ z@|^GuJ5LstS9Essh~uev{(ZGAb;SuhnuIc7kzs58+~!9$FGrjF`Hj;yDv=E(%IF7tv`Je5Yk?PVhDp+w+m+~3)cm(I#+sJi9( z&C@jNK;wPmruQ_46Q&&FN}NrlmuxFRr>htU$ib1S^06?0YmH37gT;TD0Wj}Hu^5@3 zY5hV*VKQ&UqR*JPbsTU~vJlQdz2JAt^1`Rv5yOaRFjU37@WgGM%Bo6Auaf_eI-eSZ z0T0Msq~j!Y5!#846tsV0JhB&oUKPWv}~13t?YkFu(`s9GCJ7 zjy5a5ZPhJ9e1-h8DZsdd^S0+6d6eo2#isMTSYygK_~ODtVAT-8rb>Vh3?Y_HS$>q_ z8hBa?O=p!p4b0!z`C}Dez;?0cM=dHCmZGOeUKNUN5e-J&?dFs7oE!Kj&CqgIYO#$} z+2vjC8*&^$w1-gLXRt=k?pEG?NxSX^dCG4)#*oczAHXC_KKeMI12<)N=e~*3&CX9^ zu-ojYU94iebcMO?*_m6sUM9;+KdG?y%py<*GnK2!ro8GE-k6z*#FWkNo^2V?*=y-5 z=cNpn?mAcIRM+>KSlo4J$60}WPY@%_PPqTtZJp}9;JnB9lSM^Bt_``+AJBK#aRy1SXbQ(BSjqgDr@vJ17Wz0`5Hpyk_Q11di%nUaE?SBEr&F!URhj4rPWV=*t-d_h$$hU;VE ziF_>|PjVrL_%C{>TWu%nC3xPP)cYHyu_E;JkzVVz#M9zXe4x~5d`+G$XsUy53SyVb zM#)0sbCAv$q&oDw&}Qb78(6Qi+L+M_Qx6Htx-xo^tbMzrew{dn^go1N{>X<^9Bq;` z=Tpm95=p*{RAKzrj9LaMoQCJ+bhNt#(tQlxfIaauL+a>l2 zr;Cj@IYHLVJIED-MVGA5aGf3AeMUmKn&_Sq`Rh-cD4wh>4j+cqMPKDfA0^h+Cb;V< z-ESESCZ}(ubFCz>jR7r~op>xM&>V7v^&V&^G&0%Bx{gP;85rg|TW$54ozT&HMZX40 z%6DV}Da|Q*yi4gR$k5LOa0{b6fh#q!U%Ce4#_nsR^}<{w`iCqJ^{tScgU$HaX8G`^ zKJx2d&q4d9;N}tr(B)n-EzlCBWOd2%bTN)2_z=`X6RXf5ZWRc;2Ry#LRi`$JQ9!4lLfMIg2tE z4tG)8PckSIX8QiR>Xgn*Wy@#;qvfN>|Li9W(;i}A>rInw6ch*kX#J7X@#U#;$m}MA zrD)D``x6+=@KvDEadGS8tA{wq@DkLA@varf{4TtzIqgy%H>gC~k$FVGAA*FvtdxtN znqCon}7d>uB1idsn`Wv=lYgJh$IOwDuu&y?({sH)ZmPIGVU*_^7nwDR4Q!zD^qD{g}*f(|MVl1+cgg?NkaB`P(i)Jf7 zD4+p#2ny%Lg?lp&cFnlYnNWu{p!S{RwCQV)dRItEn_-tm>!W#7tVjqZYW>8oAR%9h z(>gcApqcq_`D0TCm$k3A+ne_BB;rui)PfYRpX5M>y%HceNB|a5hF+&ILwGN`hh2VTB0_F`T#y$9b}4PN!4xO( z`Z$eo=1^?Bo)~nFHltyZ7zK^4on1b{nn&_G!@`lJY6Xp>m1^_iMiJRA}CO$jECV!FjEcseM&=ar}sRg4UIcZbd z(DEfF0NTijf* zsqE6y?{d5)Tp3FP)fp4DVXX@|2I+^Pny(LONlp?=199_Sn;I0+(Y#V;#T!C-D2WLE zk+1-mJ0Uwn{DXxs6v1sO1)0(hMAhF?Xt;G8pAL#{R3!*B|SSmkHW>=jc5dU4?t%j|`{T?n| zYt91FF;v}b?xHF_HT2_esFB52?8Axvsk!~-`(BeJm~&vNQBaISI6Eng12tLxZ-kEB z2-b%=-vLkgyRYqal^3eNva^KJGbv%$0tre*5YySuC!Uf07iIdJQP4BT7~FXQ5QZBZ zVR}71YlvjEQ&x5jgHYUOk#1^U!k| z#JVI?ZCPTfrLag34Tka6sLB$4QP1>vl)6ikTKR}p`C1=MaY`&akx39Pdkjp@iDUvb z0De}+(JCaBeU8?;*9jKD z+TUjvoUVmnj@pGxNXF2gtMfOU%b+#!du98f>nYG&FV=TLh?L7xKQQMOZ1j7y8uYaz z^@8a9yyz6uknkPx&kNtIppcLXn0rC<9l;hTlL|`P`^+wNT8Lqj=c=fqriKU4PPsp> zo(#CvyaOUSk2(PfVN~TqIoUdt+8d9P!8d^J{4k|47uy;e05K#iI>f>Y;mT z>y#?r6xjOM@5ZaA%~q$lh?e7bwoWt3=g^7!h;tk1X*|6j9*kjEY~Oqi6zV4Ck)gd{ zzIEu;$3&>u8I)E%9RVos%jcNL`!?kn%-rLg9eaBwtZbb}lCvHjO(eYh4_DW2RDd&j zJCglN`CCsmuiAf8aPj6jom-PGtc;a11p;s8yO#jj76Z+`f%+1|Lc& zbppi@GfSE;>s)8T)oNZT=2);n5hg&sz)!F7P|R@p)U$t|BL@Hgb{!;a{ou+WCPF4L zThYDvWlBS~-*gr8wqTJQ-`(lrR5oY!V3i!_T&?NBh1zr;Q?=DaFRZ7ZMb)d}0Z?dK36fn+^Wtwh>R?ieU#HGe4FwC2K9H1#x=S|8BfItPsc-8fD%+U>s86c;Hj$xsu3f+ zl`0&-@*J#1?fUPDB)z-fi6JK|Jc2kR>cy9@Wu*%>Ic!Fw74fx#C`A8R&8GpNz&sn0TeRgejV4{sP5(32Dzwu40;vkPpN3(S+ zpQ;&6mwG7ncUBCD%{&F^ng<&ZUAH}5LaE?SRddX>SRY0b={H2TuNR%^$1dHU&abQC zx`r>|9+-*{P|;YkhD#%=mavAnYzpF;ydT6#(gJcl`-yD^^J}o)GZF~4;dOKQ+H+QcGZk@)!1W2N7Ltmt7VS;;H}SoX@aZ6 z$vCO%X4>$=dIQm?eV$5pce-7ABCy_TjQ&n()vCidB>%m7JC4ct_y#ul0$x;tV1P*J z*i`ALJb$Yk<4Lj7-78DQ!!>Gg$Vt#`H#fak^y}$pJU&LGpoi0ATSvT&z|pueQdeJU zoXUe^>%vXDehR%vng8K)HA5&3O;yFyO>gVu_KsAK$lNEaB(fd20O!WY_9N8bJyA!q6q0odDN=HsDAjjiyNZG_KTI@&+c`5aSFf~!-;DFO6+CLcpp>V#})z zFnYo(%v&yw`!+@&u1x_~PV7HGVlE%T>Dz>ts8A$gB5<~PAhbdRO=W$qsDdewXIJ=; zjs_)Gp(EmBsZB1rSFjAgZg50gESi%2cY43q_Jn$)*RSh519acl;R_L{#A@jTL?`Px z)+AAlk?G)V3G&NX)w}*{s34<-WnDv7=|C~~IV0i|AJU~Anx1?E>>P5c?Vdw zq8P9{MjdvgzoR_i?i($TM#FddU9NJMtvxnK2|XIH>pKYZSFt`X3X1?h4Xg`Ug((!+ zmhHhUqAMR6HZOt8b`Sjr;oe5{$w;SdkA$r#mZ28<#P1ougB2oRa@&`SLWN(5`5Z{OeS=tU^GX+Lw{NNvvC(!Od3DyNtf^W=>Q$OK zqZdsaQJlhM?qjQuFmU~TV%qVYENSvu36gu-v*wE8-$UP%GHJ6i)Wpy41K>A{{rU0XEPY245ZZeOc%r6&n#UxuuwQk4*gtg5#v-`5r($Yb$kL*saCG$o)U;F$4> z!KeZ>+^mA`sqPo7401V9MJ4n3AdHC`Z>W)&^ltu}qrm24EJ&K+LN89uhnk@4q*!-d_E@Jx7~5REO#qHI!MmZ+W$OO z(@7zDCpW+$3-$al2i)k2ZtXrXCb$ed-hOZkYzW0rw|qc|G8uX>C`h~(t664vOl(iu8mkRy(6g%iS2)OcmQMPk$K{SA>l
      WnGXFas~#htb1@Xj!|Y*&x>zuPpAIwyZK zQ%Ydb(fEZ5I>?DLEFsAC!B9BC6dc=e<$t}G5CGfTZm~)h$~#_w^3VZzn67NYpT^F; zoFP;e2MX^T3nr0(YlL(|%uFmjem_^KDz4rsr*3Imrxqb&sB%lm?lg4ANR3;;^2;UK zk1mlXBmrabH4^^ag+)=fjyxE+!WnO)3^+Yh*O@!x18craUP7GXgQ%&8S^vK1b5rlS0%d zrpCs-G2ogCHB#PK-m@B%zjcyl>)9pqQ>fv zOa>apXVY&OBCH2y1#IFjE&^Yy1b;{Cu=u##Y2%G$tpLmxUkV?{kd`SixZdfs`uPv% zYS*($@cbBVHlbX~wr@$N4Qz}UVJT^VkV*#|)mnr;_Dy|hfmOmY^x{L0mx;M! zur+b#$Sh1tA4$6@Xh|P~ZS_W|PW=9Vk$e60Z|WL3j~z1AU6qV9R6eF%{$SNks0z=k zPQt@)C=thIB=x~F` zJWpQ!_mt3R26Dkzip64@Ze&!b6FeuDP!!t@@k_sZD?GI%j!pvtWV`@a5m%E^%wQU5Kxhmki2_>tn)=s(6zYI+sqER||_?-+EU`xvAgYZ+e5ItBQ}KZi4Oj*>V;%9-C#X2%@zkgA=w{;S`XoJD5(j~LXQKu zB6(Sfr^C?Y$#yJXh`uMEx-EOUYE7=y-a6PrIwMrPiFHeQ5J&1Z|UQc)K(V+HerHTvi(>5%qE7b=? z;zNMr3{8Uf6KX}f%w6-L#Czy8J=|2gX4)CAugm7 zxFw5y)N%EG!&$v)zR$Vwr#5}SfNP#0j~`Fpd=7sldFKg8Bv>l%3myE!a7FQlED5+y zRy3M|*gEZbYZ4vs4K=Kei@~X)6l-{S)zG+=1tSLhdlXc>-@|7kx}TR$RvZ+%`Q~fP>T?wRvT0Wwn^!bL<`TtL&z-1AX2})cNUCwl2Z)SVp19A0J@=JD z{*hfH>K`1)avQw1n0zzRWhj+xbPWdJW2Ctuwk%O!Y1zjrYgNbfAD{+%M-A%SVO^?jTMa0~uMbb_N8j!J zVfG$z2=7IQR)78fb(SI?05wMCZCg|qM}0Fv(YrW-HxoHL)hRi&{l}H2)<(+Ip`hcy zHe{%=dwQ3)?~jD)T`!QG&L$r!G_sy#BNp-f8no~z&*nT)AAY`)5gfimt92$gca@7g zIW3gcQQ=$n5F)Y+-c{7On~A#XMi$rH23aY4k90Oz?o**R0o$h$D9>xIoEIr%!vX-P z1xqD>&ozLrfi|*8R(GX#aFLx;HoxAF=bw-;zYi2Z*KZ#zDk{QbQdCrwY5$|BD3fR< z)$}>Y;6POenbcsP6MdwifB!$s-T(LO4t9B(oBZBnh)?9jX;{Dl&VevGlEa=4wV9Aq z;P|u7A%!866Iz@vNZ9|8qx&(Xq8fKJk)D_>8@T-99#!Os6~S}1>!3Hh%n(eXtoHs zu16wTp;(A(nXYdY;Y<4sru)W{Mtb^z)OF~IA(tB~E-sUKK34}eLSYM8e zBt{V>1qL7NjVXMwtFX#VA*fzA50al3kh9-oYMmk9a|+&o`OQL$u>k$1ytTQATD81Y z6p|lSW!$)Sxik!3$tH>?J4VpX86(bSBUc=GF2GW zdI@}YZO67igQ`gu?#rMlNp@#I;Kx#123WWX=0BjwK|PQ41Dy<2?ipEBgwy5bCcsHx zTIu}}6y07U>79D;DEl+!bhi^OkD9gkc)$^kAJ(Pa>4Z@`O_s*zO1}`s!78~qUIEIV z?1*=R>j_bzQlZq+6DXK{6{yx(7C>)*0FM@I^&qHfOk7PoA2+~P6qQ?y?=iFlEEA6O zat)a8HPzX$TFc{6Vli2jq;>K>&#`TuPg8@-iv{!c2Hu)dH77k|$kr05;>+7_aaP*u zs0a(1v5KfAm+`QRS4G~A(OsSAKdyfjV8(qQx(z$ejVh-fLp6M6DMvDSgSyET=8WIg zDET_-uf%CJNs+8B+_0Qaq6{Ag(S0&&#grehi1yzhrkN;P2v#D$u=@Ir&1R*U>^Gngp}=y*+!N-a2YiUDrK9%A{UmzMcUtzC>rX_=6xCD>c8XARBqQzxBqOHC_r1AjL>buXK=iF9nZ{)anql^?ksTheaDVSf|!RN>56p!45H!X7S2EM!?9dTzoMzzQ7c_G$IqzR}6 zgU5Jg;Bh+fM5sj^3U@uKHby%FZK6-PhhiRw&4a$dWUUTP%*Z^P3Lifmdw&w;K2zKj z`_Vux(_!~TnrL9CZjg4XyW7fV;|CuPD!G7V0{$DHApK!N)d~Coc?E}?HHQL z8~-;rhp;YgGa$O^7g36v-{XiWN?no54ci1pam!`4snz9-`?vZG_+BQg>9}0f2iMNs zljRr+sy+^2tFa+(W4y(x-qJ$mI=AfW`dJ1{L25LP&!P;UhcS%!K_Bhk-?>-)Z>fu} zqC(kSN)b&-yrHxD+HpfDh~rRP=89J$Q^RGTqX+XwfX1@RPD~kif1iKKK){IrT30@l zw8X;@XG&?Cp7m}}&@E$jnmg}XhrpZuP27IVUx|GW;uL@#{V0my$K8u6MdI3^Q|+_Y z$+M{8#I&D52^;lxPwB^)Jv_mz8?GBH!;RID6BqC&Hqj(TE!xc-&hvM%w|qHtCTci} z9{SbPezZvc)yCd3e3=$5aW=ULUi?YP`CrdC>nJ>x{>D)yX(H95)CSnEc^{ZUAy@*9 zPRM|unyN+j(J-B5pIj8=qJw=RKps?D8bxk;ShqJ!^n<*MnzR1&TYWe@jRK^Ik|W|D zf}3-dJkzP{h(8`h?@q{#-+9;ow&J@|BO$eUX{tD zlIN|z3PqfaO#r`Hsy3srn6y49o7FhjC|Q*i1REjdGcV+^aWJZ0!kYhff!7iYtg^r8 z1UiE02peffp`Kc~*xf$Eq=#>TB6x9!O)#B(%y8Wq4sLGR-+2~k2y4!$#GFvvvuw^T^F9L~bP&RQLp7}y84tFEKSZD3A;M%qUvK)9pdK$b zNmNO1;aVuO_08#mL(H_m6SYjqHPpJE>x-lhV3|`=-0-R zKLt;LUs-L#q&DWyb1>3CT*-qx`mqqg9mf*KF}Il9d=U(;L=tl0=H|pBz&!WLr2jrr zg{Y0B?Gm+1u?80h8D;;A_9>Ec)aq>Q8rEbnMVXUYRYWj$gCdK%r%5%jBhe zW=2rqzE;=wiUPwo^9n5SMK${?zE!fice1$IRwK=Wu=Pe*Mav^nAc~a;)NY*qp$XZ{ zXKw$|g@UtNuN|P*bU()BcU9+;nVl-Zlpe`bd39C7AAwrorwP*n9XvZV`7TX3ck;D{ z({qn;jDKFVZgj80?Zv!7H1bm#VX~EZ6Q$8N-DPcBQu4#@QZ(Va2xa2ZQ=lS( zgl=}lWv|napSfW42od4Oi0i$lIMw&Hh(99U97fyZRDgDKuIgsVIvF95BaLvyH}crtN!*?rSiLr(^23I?hoUces`B-RGUoeTAtlRe1(_L%zMiI-y)vrG8WUAyRAb*fH^9Qja0t zMlkV9pXvd%O1L}n+5QMzW~J){UQa^(?7FV;W%+imT(SuNWQpAwlQ2ofbyN1Hb$gb# z07*f%u~)p*hf6lhutH8&a`$ci(f(qEdF!{2vF@*i1sam~GAL$`}%N5Ptw8mhWN<&Ty&q7F{44E z(Hy`F;|($H_Dw7SxgSsB9PY1ku+2xGxBBau+_&S$#Ig-#_MUOe(BRNieYr00c3y>? z%hFhY6$w*BTj71ihq&9*<$x4^vf9wL6X7cIk*497kg60A!B-k$jzq{$G93sltxkR zw`K#RWKZj_!4Jr&((}bGx$2k^y0fg;VAbLM?5L{iFN_i~0*O)Y9mV6CZ)8I^oOwRhPX_E(unm=vpZ4pm}Pjt0J6 zhA#Wbi!2T)9}Jd_p0vS~Tz%=@IhBcOFR+O@zYjC1A|Z`0ytC;SOC7^}%6Sy;v|rg~ zg)dNoq2R$mNFK~yDz6;{g%saYQ91m$e*_hZ9{ z&eZE9_r7%*kWW4*TZmRiv7m_L15(99?f_0>%pA};b#t(*O&Fl2PHJD$MKhLn6F>Ms zXNde^xAc@cdHp~i$w+}unUNUdnKhV(SflGDJyW+18H-8&T}eq|2E_7kUAu5HuHX1_sA#n^S?isOF9Nu|sKPBp-5XP!~E$dmrh0ko}<;IVFBNz$I4 zPerL+L;$VotQp)ch7EMNhXeAG6?ioFFpf`d-mgN`q@7u}jSv4yGFEx(m$)ZF5O;k)}G#Nc^h% zlU=Alu05=s$0B&w>F4zG>g!nG!HLDgZ%fohPH1(HDF?(PPQ(*VK*N-Ayv`(^3 zXj?ZDdNZU9&*kPuY~IsQg_2|<#^IwjFHEVBe|Lb6YnV*y|6Lgb=+hiiR|wztY^?OZ zE%>M#xO57DLi7of)eCFcrf_^yF?&3h&j9(^9^El5U3_SOUxfqkf?Fr-fq7)v)+s2{ zpPjK2zkkc0v5NW-4Q<;Tua1RH{ z*Op$zt}xs6Nm`u*~9?2&$8xcl&67i2M~ z+yb8XP(HZPH~|k%a5BKHd~7H7|0OHf3`eC@s+M8&BYHBS#9Est&Z(BRY+Nw7tO42v zHR*GoTE_1sNtq-u6p0Nt^BSk7Lv!RAh&oKyPYU=>cfO*FG=x!SE zDlOeF2{vnp-}X(W>UxmPTATZq#>9JEwwSUjlA%}dEmVp!O#^j?YeE^)gxR~g;-nNRV2!;iap_e&&4*v3us#AQbv6i9i zJ{Z;xr{n_d6n{gpm?ovA?mIqD)&<2NS;5Fh?Zj zy;!P@D_exR)Gm%3l7FZ!(@F@@RV{!=ooVli@k1MV6l$yDS*M?~$}XE_QzaUiZ>kMo z;xg<*@NF}|1Z66FsLsv4hi&=CYZ~g_0@qSR<5I|_rwe4KqUe&~9i}o&6yPY2+Jaub z3c~1X`yFAF6jsmHK%%GBywRD@pD+{vFL!jk>m)ZVMV0`2G zCK0Tk#b<-xI8}O2aOr}-ia(b$1dfcbWPL zx4tMo`iuyP0LXMzgMKL#o80{dK$MspbR>OTm$yX3IV7361aG{O1#Ci4rKOlD*ev5%StJ-pUe^1*y@y@!N@& z+l%~WNvOTLvyW*z@<9reM*X!MKZ7f@2~5hqHtC?@=)+SAf%WXMN_IosGIahIE*Lg* zGMW42C@_%)Yk*&*f4_eL)~MgW5_w6i)ZUfi)20{>B+ho7Hv4Z*4 zV3=i3QfKNu)xQo*kUKfD#BFB~&!+)-uHj1(m8eq@Zy!7GY+r`oyEvG57>iz{@(@~| zOVaSUVM_7{&mGA}A4rCGq?{uMv`)JBt9wb2H(Z6q60x$WOr1z~O=mg;nK0?;_S_tf z?r~t6IO%x7o~_#d0P25}>j~UnW_e1z8!^&{iiyZ3iY3+*<+65TVdxy?&@&E|?No)_ zaC_rJ%`>Q}bSs0a!{iMfu^C$A?*E`SvFQaEdG&KCF_eUKLf{W`az*IAJABChB-ekd zzkz-Pc)ygApqRBjS53_7bz~GRr-xw3b2H5KIco_?74P+V-P)QZcm0hp?2F{PKU%1t zRP`-x;bN6s49BI$u-bd{NAIlZ&>)^FRqoI8L6v~x5Mfw4JAm>cQx?&5r*~{i*<$B6 za(q%crI+Q^Fr&6)BSV+{;C*2V>xdJeArXBN^%uSDuy5+zfOY&yg#WWcboRysa_$e$ z_5oJmuOWGzLwXa+tAbs=O%$(lY|(wVTaPi(#}4jeBoT#BGfhU2D%!63mIs2ipJEPR zWxp05w*(=F>IIQcgArIL4qrbZ^YRb%Kd2IY2-)mtHWs!9D?MNl3w(qR8s%NyM&0C| z8kF9^8y-KN#%#J^Vftul%q*!6n2Irxc4FZ=3tgK#EQNJ`LNUod1sj$h+I|*V)&v z2$``m!*M?DKVW%c3jfI0koakAvGxmGF_iYBf#@~pRm7k(|Ixlv%~WnAk-Na+4XqsS z7R1e=!aPEt`Cl3~N&Lj_kq@z(nadfw2`bT13l)k+0G(Pu%t>z>-Zkw804PTvBnw`% zJ+$T}Eq%1Lq4OZM*(rg~n|Zmkmecto#QN#zgJoD0pf`~llY&7oAXZFq(=kbLYweHi zHjk6DvKHlIbYXTfhZOdS4!7DOoR5ktfpR+C*S|VSX~b`gL*qpQ?VlMrpm(eOIB!}o zoJd(ln8p2}>D%@#vf>fu6x_hQN6;Q9eOSueVZZ6{bs|T?)_W2&s$57g*`Xq-T_q1- z-`)aQf?y_qb)CJMFe+(=i4v1FvjYqSl&+8YBX+|W0ggE3ebgei50Jh&$_%y7nJPZQsRTajOU< zi)Ex*P(Am!${B~^pEIraW`j;b>Wj`9<_vN6IE8A=#Cw@2@=^#!WMLZyBb;FcVf+Pk zS#MP&<5XMyK+UON513W{kA(6BZgud`<=^-(cwZ{Sv;&yQ89m%Wg)Xqlr`2ZzygP|s zHqfdb851y78HViMam)!OJ2L%cFsP1JWc4}kKF>&7r5n!%QA!af@;HP@>J4` zG;3;+F+v^SK~&eLxyRH(NxuXbCatnc76X3GHH$!Cj;3j+w;}W?Y<7e)U;BddtJYvO zr!zitBH1*5bkb@*LG8dGH)))Zl*BFr7WPNC0nGmD69TN7jc0GNM>3PwYo{#q2;t7= z^ZATpPD`&rN-LmS?N3>VnOgUM^{e5~4=g{`_MVR%*5{QeY>Mx7n32^v#sjGb=TT-wZu%f6ljpD;ZTPv&$FS^hC!pHyBU6!1(27(~+NSNLP zTjI0d+;oCJLL#}}Bu}`PC5SUw_2& zRWKGtBpM{yGei$vUCw2#{OF=i^^&EvU! z)8H-~z4NUtq$Dn7s#OLWVs@;b1Ein7wD_vr{JO(}mx|XQ0hDic(SuXRel8hQ1$+CEk~W1$j-TQMAr^eon*l%W!=s)OP7H?F@qzGF!2 zavWWWsv^bk&6{|-n#Mv+j=KqIWN{0r;WD@Z6AKmi3#^QW6QtjzJh^+0kT60{mIx*H&aIir#qb;B;*V@2CM_rgf_b+3!-4g(rF|LF_1vf! zcH7(tKQuiT?_{Yyj6GJwiu__$s>s5K`+Ou?fGL6FTKPi)exWhir6JxEyO?Vy|Hbf0 z-~NHL3D^Y4i3EiwhT_k^R=E!A!-VnkwTJI@S-cQub1h{Up>JL? zI+$Xb^*DZc7)l|;(XF*3Xg~s<2$aUX9>+9PMKbeYxKvHFr3_&2@uYfD$&FfoKnHhJ z!K6N6BWoY0#e4RrU|e?6m38 zR8CFtpH!lHTQLV{3iq-Kz;JzO90qzuJ4oywQkyvXwpP5X>2fG+cba18&MF+@xBGf6 zj-$K7t?3Mx6X<()4@1=%1`0!$z<0nX5pYuJrBZ66-uIQ!_SV|apVzJ|t+Zpk!d?`& zIQ`(;Iv)g?0TDJ*nu*g^a3lA&7eceID+l1UOX-lda<(ysCt5?F2%Nf13QpCMbR&;l ztF?+w_M`BP=x@;#fOBs(pIfgj=9nHvApFrmCc1temc>YEfhNh%#_4yA!s#M^F? zGroM*4~LG$7Q;+R+ZcJZcn$8?c3)=bCTP*qJgRLeHxZPm`}z1P(sG##7q8hFirxN) zQ_jiHm=kI@P2?>QSj!f#b;w~}Z|lKL*ZUl2qieguQJ=m8p%!%Ek5!d6fO_GHiwkUo%u4LIOQ^9>p50|JRLkZmzs5Cv=41PJv!rnQl4 zI-RY2O50we%SFNxMPOgyj|RN;+r9{FNhH+bzXwk*TY!1dkl~R-Oe8m&EYrZ${f!zo z?0BPYDP^FPdHlW^VS9kZ!o$S<;RX!VtOvq2)2MjmW^o3F!{}&@Ikn_B;9E9?NAwNd zzI&7YUQ!m0Me)P!QA+O^t2DdY6>;BUmvWiHP_N_QO!E0gUk8U4*0$BleGALL#UyWS;iFbzh9T+g4AuR8+Fg9Q zimlWU&p%XEqPI#Qt{m_Kv)3=zdTsg|$=cRXNiNT^Bxbz`Zx6*>drYQcsDYy|A)kw) zorPp0Qy;1VGnR_eQMwf?1`sF7We2*0QpnI^D?jz>o4N0E$%kxO$?ZSvGbi93q!guh1$7?XOriOxDZl*}Zi!44ru8t* zb0mCl3`?Xqe@B&QkTHXO_8cq4lS( zHy3|X`EYlVX8TUm-VE`1sI{5H+8?L{UkPEK=KLAlcuD>RTpEc)m z+LvW=I+xtdL)9PKNw2r&W#hbRb}4vh5N)8W4unncR9mJ#66A;E{P5?+LEeR{!i9gajHS{hQFI}>38Pq>ws#69k z-gs(cQnc|ujMV=P_^VmR6UL4sXM&Y(6=2qQkOd?)*O8( zl^88RR?-A5&Gvkw7t>d!ni}EH8V?XmmY>A{L8>>NwFJS^LConj9b@+s?D_jLv8rR? zDK2j=iAi_93f_U&4%b+^o#t;_fYn?n#;&%8pH*?~g{jB&-}n@WYlYuOS)6_&YCtQ@ zYv%q~^b*FU2nAXApFc-wbs^-c@v=VFSP|%PR-l9!4yod$-c;q*<334?f%>!%7#{%! z+HPDoZu*&IP1{8_u2c32jXu`aSDbk)HjBdG~W9mAG6%wd;g;epT>;Ec;g=gKYinjK=@-jKN z%heK1z5o}f7~kCWnbX)x5O=Q&S-dgc{5IwBT&A#xWTWnG&13UP`dO!w(i#(`a~<4# zK`yucdASyOD#Ox}mMt+z3WRE)&=Uc0TOrz6s4N={wl#k24R}A3esS(A;$N=LgbH;l zWsIMflLVwlIFQ27Dz7(^_^?!!S)Nix;BF|M!G=N-!Ef;0Z=HWqbXZ&Hgyff!jh}If z?~SZ!VKtt2&~NKX*ou*um3B^8Do%Gji%`2bROo`qm_J0ZxD{ZyB}odntzZ3%Ej5tC z(YTkiofT3q7jPn@x?b1Jj9s?L!U3+Zzu`!NMRPvBL8TicoxUY_VG|^m+=*t6|r1rIQ6e-feylfo~7cm0(a5r_0S#T4@Bo%ZXRG=6{ z-*9R^z7YtB(=#Av%B`Trv&_m;V8CK)Ey9qifz*zAzRpJ{Clp*8br0Z>YW)IcrUNfN zt&^E4iwA%4TrzP~o#mjLBw-4dPhAMrXE#gkn(AM8SmoN^zzb0^q;#x zsqL+BOSsvl#EtOCC1jD`qQi_`$#Wn6Rb-%fqm5Zf(kg~^wXg)jrEJE-5YptvZv|%X z4nZ2m!|28b$#@Ez0>hGAc@^993v-Iag|$N^0w|hH-!j|dj2@R$b!A1)?%;QHlCl&9 ziHP$K$q-%`CaoO_{7TR?u-4~vr!kOL!TX-M1r&NhN~wdS6*J~J1u*Z26zYzM8K_`y zsFgM12&nTXIrzH#l+)GlLcYI5d72MmYmeic*iemT`3qGI;RtIv5nxwF#PwhOa$B=AI$sQPI|)R5QjpY0Ktjvzo9tzRDst?O;ps8BAG@Rdt@%uHC61 zJX5Ob9mwmLNm9q6tO+_0k8Y(vIkkhf<5+!3+#dm;Xl4#U`0QvlrcRowGf+ZgV37nH zd4Z^Ogx8(OSFVpM^uz-(BV3q$w8g~`F*g(7?t8pWKWIHtDA{e}FWc)cE$4VqB<}-} zhVw=ao2fif4zRanw$TX=v!p3r7x?XtIv&u*-g=LfFi<9pir%#r7gyF3yS({hIa`3! zy-1jnJ8}zZvvp|)QF;!#Ol&fR7mGu6N!bCf85$~wc(R{l8u2ncu5Fc*&&T8+x^-Sqg=c&@;o zOet!G) zFPAnEs98$#Rl4u-2@Nq}!^+y~nqt?cW*h<# z(GlH)02!GM^CpL#o3(c~InUsy|BdH`2thGAn!~AWgCHt}x2Fl{aYhpyxeJ7s+eyce zt<{45IY(hHuZln$bQmNl=)&1tD(TezEaR7F-%8;nZ!Js&f6 zJ6EYY#-Qm@Ba(JR)*#q}00vFKoUj+rx<5p}xoNi6(+8cspfM~3&);hxa_)#^T{(E7 zH1GI}na|Wo)W58449-iK47DR9vrg!rl*cS*4kvYw+?xFW7H+dsuOosDGEUugSBWdpXpnZ3TqdgVZG!`!~5)LVW z%4$cZ>tvQv_82WI!c<9IOk^^_F8f1PL=TZ1Bf0a{g@fa}fP7b8(}fC3%!sS&_Nc6^ z3f+l)Xq;CEqUC5PC)i2w1Tu`#efcg?)p%*FXIV53JuXBY4CF_`7*Ke!G*J<1V{{V* z+4UsiBCISCySo4Qgr_bC9(Sf%X#vwgXu3rS@P0F)g#VI-S}GX9Xw5H_HqQ;3l`}_3 z9S&)Ffjy)6Gn!&5Ip-PC3NC8wjxcmhS04l-dUUX>(DVZ})B!;edIH=tQ+c)WqL-76 zQh{crbjynrD2$ow9fZ1u0^smsa4xi~#t7SLi9vdW;?t)1VXaD<<~>ZB=TA@%N0(eo zqk?T=7B{2%05x0$wGy9q%!p2d4o7^Hi3L+&e4lH>l}@sQ3%o;%`D`c!09aohcmCGE904j!`xs}`ag5n_R4@acZiGLV3IBZU#Zl#b|K zr|9I7O5cYuT9JcR#&)ZtXWb~WOy|9qw~mKGpjnPd+gk+>on|&v7XN|4K#>hm3Y5fm!#CG-#&ZPEMuC}r@bbtsvzEQ z*)X>@o6s~?&-4O$l9M7-on;z}CEC|R!c7{(>v{BJLm=89Hja99Utt>GI0V{Q{%9Ac z@(S2)>MBS=OEQ;MTZ%NBJ}8wnJw|%Vho9x6Dy$*Me<$)$b`*w8(e6y~TP~x)>k<8lEqqVBNxo!>=fAvLP=bab~dwxXnCii z5d!fUt7zao$3bq2MJD=!;@3>icF%tU3AsTw zxlIr?K}5c@+Lh|#x+mzzEm9nH6S2+kQTd{MSzh2kq1oz$_)dLB7z^|GP6;_ix&#aMi@3j& zqZeJEpU)Fvthv?gCJg4(MYtEA90syM`bDv|Uh77Qsy{hoCk*jmd>w7OV*ki5VFVSK#t*gie(2xcl@u1ROk8t-aNW-M$_*&~FDES(Dm0|_ZJ9bu z{!kQ+N9q3Y|9KYwyeErqYyY=?MAc_-YwgMUg?HMa$Z>8uZ)rGqP~gR&O6iD?VWZ9V z6rbQA8IOGMp6tc{lbsar02yPlCy7pVU0)rNPI762HFWGl zRPqBrWF$-~Z*9KXtU&3c-qZi|LPfTR^?2QT_Vu^%b%T*Om)rQJ7Gl@kPPc1Bd}`qe z{&y*NJzR)4P|VcXgyJ+B-F0GhsGYsof7Ez_^*DNXvVydpd2Gxt7A)cRG%j-vB5-Q# z5BFb0Ep%h2Xe4?k^AWi)*ukkRjUz6%yA=>z>Cf4f*g;~3VPoe0V0_zjK==PU3>EE2 z3D2>@vU99BjCwI6ysZQWC5g3&2}>|;-K$!Ca@=>Lj0(T=O%gIFA33yGsjPlU@Kiq4 z1o9l@LU(IbrLD#BVpIIbn2)n_@Vl>3g`00ZDC+^n0a-=p2|LruUeqc z5;p9tI;5uQvzpRa7T3>Xt7sRd@q8ohWbJ`=NuoS^smJLl8VAaAa+jVXs5&%R2 zU$)Oym8~D{4NW;ncbr^eOoo1D#$Ifz`DNQ>fq`G}W776Lz2hLaNOfdJ&Ss8Aws@<> znX67_>@$=oG6e)2`2gEX$lNFJNZ~DmpXPg@VNUiMFmt*IR+8%M^EWc3Ge})6e{YJd(z>rMZC8RHnUJKIxI^EZv~dx7v8w5*M8Q*XR`-Y? z<(+^=Vfi45E)}tH;;MAk;>=h1O#)upLxt8K!#@_(+|de*WXnE(;3UiJ$&ptYASN7$ z%?WvE8qS{G;AJ=q{Xh8b(-vqE*lICc+x?95d%1PcPMq$QphZhKN99*7CrBP)ERaBKnd@G4mA$AAhd&U%*0 z)8*zROB93$84bMfZb31oJ*~rd;2I+0qB8Bhxv_dzoDDHv!ntwD*Mlv_rpsV>P5x|B zMCuNrQ)@(AsYUv{b?S$~@P9whoISO`704bYBR9uPVRgrtTR^xr=!{8rW7EmxPKjFM z#6OFuVrd!vqRpsbU-RTqbuRk8MMF_C&vq*c`+1#q`aiE6PI|rJM#01ATYYD4!HP>mwm)5D4Fvpem`He^(R3K2zDv`paKj<)uwDe=xv6rV;kIL5t!%SdI=QY{+E z9qw}}X{DRwPW*bJx$RyAplt!f$hZ!|!*rQU+uSKy)6U)JR_a3??Q)uzyyzQQ2-e5d zlmDX|*8ZV)?a@D|71?*C^AuwSH6(868xFm5kHz!uv7;~4M_B-GFI*+$TRYr>8ieD` ziM(_@0)0=I6tmliEmDg`4=(`+kCvMKYQ{?LCmTLg$>U#ULIG3O*Zr|=7#%ci*xJJHRQ+O8TS4$AJYNtpV6`1VPRFOrm1*QDExC^|NFFPy-|Kw@ZLgAL^p$` zMKIhincgZQT!@y^qt8<*$7j`FbH>7;fPo)9pvVa}cj)RB9c(7x0idaQEjQYZOC?oP zG$zW-N@lZdO0Ggo4MP#oy*Gz4?j&?vWp-TY4d>!blFj$I^-L|Ot^li^3Sp?SO6(S! zRvYb?Lzuap0H$~6F^G$I;xlCk+oc0Mum+1&i70Y1?H7(oDhI3+Kq|_;{XGU~;;A-u zb;cnM--QE$sK#mj;Sbk|zT63f(tWgwcv?2>XuCbNNi3BgH@sbw%J1!(f;DuSSVp2N z?mkH-8=ZBYfvm$hKtDE+OP-oZt+vGeeCo+TSdh!KCB+x$+PeAHu5_~!2%t{Ed`^)w z*Su|6iH`OE8&uNauoF009DCfkJGl2)Ex!j$OKni2gVE2ByhnDErbkBb`JhxX2f|J0 zanm(vQMlSWuR-5A<---zlH?G$A-}$}6s~I1+Siano_dLc$>S$2L@@`y>Ua1ph#6io z3AY28%Pc(*LY>z}?nh_X!X3i7iyH%x%S?FOJ-tqGv4ky z!rVK&mFk@_??dwvMYQW}s%-N?)D9XJqu!i7j}{H$Z0Z>2?(e(A7=IyVcSfJ*xAAL{ zDXQgiF2qT=oazqZb5~B^X1TJfbEqR>mSTBtU&{BFT?}GtvHXEw?mS`Y|7!8SfcET| zi;@FK?{mn{k90#NCns)jhjW5oAfuHE-4UX?m*0x`n1qWAiZ%|d6sh)d5z0LR{K2k? zS0jZyeGyWRwnL$>G49bR^{{^C&^@vKuhr0IBP1{@AvR4w@{oqIZ=| z&H8>f-E;PTGSDSf5y^~55aj&!7A4`#kol#FZvoBR+S@5K1gU*`GZ5#|$UP$LBkhDd zIeANp5>LVnAhQVq<$zsEg4FOX$@Z``aEpcT!qbc!lznx@9@XB$AnaY;CmOc ztFmXKci){YdA|ZPGVmHT5hyxx zJLr)l;(0H%PwnzXXp}vq{PV|foA*8!V<=1s3MJ*7Zr76Ox=~~L63R%vi7vI zx~*64QQ1`GtqIiU!FRSRtE8OR4UFX=l#pST z_thTmnfUh+P}5Z54o*PP+YSfSILV1zg$`Coc%Zpp;I(r=$QM8Kq_2I6V5o zEvl@y_#!r>8&;J0;QRp6v^H?;NhC_oTuV!Z-H-b$bj*ZFOM9j<*r#g;A-gbaoin$o zs@0E;IQoT^N_<2$stNqiM2;&cW-G~J>_C05ZLuuRYpa+vR`$s!ld06|1DQ`0(cMj+ zCb+)_rp~95nx6YvW1l+#V8xD-S z24er@?tzpP)7BULxn{2H{-?r3ac63EHZKT^^@4K_DNmb2NFi4)kp5#X-Ql$NdQ}_UuF^D~)*oYKQzvG!mW!=&;vaqII48~&1aNKy3c zg;5(kN<{r6)1js6BHNJ6NUi8&7ggojc77dDHg8Zq1}}b|dgvH%GNHc= z>As5wnvhdhZuFsaKDh(_08$&ka3gKM)B$Ni0`rWJAPJTS{}?onV1r{x1CxQZTjvqP zLjm?lbqQ<2!k5iq_b83PkCgEMu5te-s6nKpNZRwQxU0e`EGC~>BPJUVyEfsq1!0`3EfWIjWwN%@ga zWE?ZrI#ID7h1RJ`wjsYgHm+`4%C09GFy|RBEFF(Nzc->OnZ>eFoMOp==QjN$bYr1i zcV_3JXrRK1IR5G3z+Um67D|z(0(^H?H+ZLCZ~X8xIfYI8I{SqtEqV6TEmoG@`qRm8 z)m~&Yp6aLv0%c-AsDDCcra%>!MM9Td*1rz|N&rskS5Z0Th**T-aMQKZTP!0!_8@$9 zfME8e&Bxfeu!MBvmN=VACgOfa20SZkTaw;dG%bcq#0&m)R`)&rdl1<4%tvhgjryF4 zH?SavPL}5j0VusCDYx^Sigd~_M#F3%T2_hM8YP^kCI4EEApDVsm@Rjaf6 z0IAxtcQ=qkhXHShC13X2IiP?xaKzDs!^pq(O$jYWCky)KK>hMr-Z_G`Drf$m+E~2Pzj)XAX221aQL~*6c^1eagw>En1W6PcxP4`pPu4TmjT)>xD`@@h~}EJ;shxN zS-%-iu{QS8nm3X;ygXy+IxAu~e&U}qz%=)e9sU|)o52wBPS-kVrE{8wiwhpwJ=C2O z;*FuYR7d0sDh=ZI$rL(R?VBf`8lH3skvBj={T4g_n&Vg)!#Lc1dbE#ONboGHnrZZU)P|V9Q`*cn_z3~n%@DLw77y_@X4w{ z!i$5?gEy0B5##mK*4=0%I+@alSq^B6grM@7<|o};U0T~IaL^R}7s zuyAPTKRO*9@VYOG7Qqqq%d0AD`*f6fh3FW>G7>lK)=5>vqi7iV#v?Y4Uit#w6H?

      c#p~b(RGq-kU!ut#MuFQm7Wc(o%KyalM;(*{cs_-@wKz^$hA7wkN$-UYsLmMYfUC4CAm*YXMp5gq`IiD(7xyO1?1Py73d*uSEl8oY41lh>@4`H>et(@28FLV`+7p`Zi+L$SD?H=6rJ zsA%w3?Ok}nZ}fXjzZ;Fa9MDJ5jeEkPd71VbyNj;l1TJvxxM~S_P>B2J3bBhY_wc0Q zSL$7%XI;4#zuZQ)a~eByM7lR81tJ3jIfvd1WCBpwOl#^{GokErARN;DaAU{G>yiK5 z<-iTbpOx#ce&*g8p_=1jgbp-OZf~w>PnG3*)M*B;O@4yMos#`j-a>GOP*TYMVy%hs+ zo(TzO>jjywD{xwU8>^-ZwQd`c;Vl+TfhU@%b%FU8jPazdb>@O%Q4q9!ZN!R%v(lUP zPbN`ZF-&PWNI(sXWS5j+`*G4NF(j)M^8B`?c414Z%`1V+=7r#7yNE58j0nZ^$ zy%$iJNPbHEWtO)uj?xM*Uos_YeWKoso7W6`h;J)i+0`i zxd^e%rV!S5;Yym}sHI}A^vmC@MckGPd0wlul0#3sp_jN!3b@A!X{|6)@AQsaS+O1A zfv{=DS6&pqAY|*;2!2wm=unFQkYq3fNP?L#FXQ`NagmDjZ93u$e%5>Xp!x(T|Nepb zJ`6iUVKaSl8m%dJHP2QJlIa0YHBw5qH@|B+w~D^`8S(3!*t{qhH%e*~*)Xm_7FEYn z;7QWzYbK?~(K+bWy591rhxMW`gNph4Lipq(^o-hA~O%ppfc7P+~S^QK`wq zZi~6ssG9#EIg~X1Vl+E`PQg{n>GMll^ya8SWXlV@P$cXNzyzj3|svIZyrQ3^jwDIRew2@YfqpTs~~42FOfo z4%G#Z-AB;NzBW-2FJ~D!^0_2oO?vtEaP?~yvis9{7gyQ5S-XXcrj5P}p)@<5ET?A% zbvV4d0j69{MDHS1*|eSV?QvFuo4@%e8{;~g4*pkM#1Y=$BV&DtmO91zX7VptWAI4K zS`Aw<2*FVCpiuDjDPb$iYuaggoxRc99vTUIIQc?lQ#AhJaH@8EtF6WNNWjzRECr;m zKt~@-Fjj8Od~+%$S|BD@;!pV=-)LCDw;;8O<(uKZ#a?~EX>`#|yT;N3O8&81yk>3 z@dBmNSU*9$huj!c-nPsl-TGrDPhGfQLi0aD`D7gs$yRQK?X6E^_WzMjo2?cL^9$Mx z6IucRnY)sxuLHOqQO8L2vX|%sKa8G}Huec_qChFRkKuo4G$2T@2b+Ok$cr8fz_M$b z7ym%AL3v0*+f(jzBFp7u?Uw2Uu})uhVZC*+LC8AA(FiJertd`L*XASY9|GC~8esKk zQ!DV}yP=(n1Jt?#BUFOKe@H~+`EPQfJ*%O9irpkUkao-~3!|&1*dyq$aY<*!a39Ml zsu#)%uaaAWQE14sWx;74J@(&0K^!Ha*LJuQ?+#n59~tEhMr}Y!?r00QWS>20$7uDE ze(9?q6e2){n^=*R3&(PXeCi)b`PIszlQ;o>Sv)S+XK?h zAwiomNpe*Pbn#zYW8&?AfM9zjAY;QY{6zT{xY6KdBE2N!Cizqhq)D=!nx_k?rI@<) zo}$qj9QH1y+r9UU$;uI9mIu4*MTeZ&d=^PxdE4K-CVa!lkS$M;+!u{EL``= zOhqV(_ddK!7%QF}@fJS+k4Bx%CJ6yrGx9{0NbOr=QC{=Cq~h{Qx>yhp4?~b{FL+`e zuFc_`l+a8AvMMjz@?pLl?Ii;us~aRK!qUK^^1v`V?W$2VMX!Iv2RuxFbAUr~ zdB{4)3!dKk@sSQPY^ zMZcGIsTnwC(1+P+(rswa3MeOD3I%#)+5fyB+?|D9Pg*SS^m3y!^#T))ZXb@a>-?|O z^Ewg1ksUGT0$;Jafd&$|F1-0&9oJyEMwP3DRqY+BO+S`K3I+aWh1p_3QATe1Z!(GW zuY~DHCwsn-IqoyDL$TrMr%WB9<#Zn0uLj-EZHVak!)*z!|IT5U}p35jGc{4^t=#m!o#+RxQ{FWINX0XohA)EdHF}59m~(+m?co66EJT5ug{;9_>mmT ziv3w{fMwd}4(IAnC;AP?j=4LUO4IEL-T@#gMi5FydR^U8NRqg#Qzj4BZL)J>95jyg zd(mtYG?`w!G`J)_fLHB={`$%cNUd#DP}jK?7oYBq8rj`qnE4%e0~q=uN`0-J8{>q; z*H9Zt$jk{|mCrW+m|T%RX~af;^dQt3^DiqgD-f{>@8|bYI9M5>lx-z=wh>*(+x8(p zEhvu*;S`x!wa(jA47>${5lq*niibrqWUa-j3jl7wa|r`+HOM04n;q4smYZ<39t6UG zOSDr)Q5niZ$P|^+x%ZwTgZp_qeHu>E8o?Hml360f1~eW;Ic9Tu2c7$=>OTmO;yYBE zFAsu}1dCofaO-F78o*uHEsad6HgzR^b=UwQ7}I4moo?uSt;weY#xMF31e~`RltQt1Y^DnyH4AD#bk`nKu}Q195RhEz)=c_32*p<wflA@}wT=w&@%#)4IV z{89HhKw}Uq$%L6$bir}0tT6F1z34p_)6BLV3N%}2BUCbXy5(HQENF@1XP{1yh^Q3jZuKb_R7AFL3s)g!N zAp&R#0kD%i<-ymC<-T#*S+{*U^bvelN2|8>>f)MFl+}@^#vPUDd~jZMjfTHTtVse zH?nMWK}H=6JWHY3zyeAkv4wF0GwjDr9djWe{G8KemuoA%2h>6HTuMIKP8_-nx|4yD z)gt*5Q;F~Wo)*6f-TcCJ=C=$7^|`7J&J;}Vi$TkD#}U+uiUgHh?0*9j!W2_f7LI4r zs_(jteF$GT#x6g9RwRQ*-_RWZcz^$4{3Ud~S<|YaG{a8GAg2ulm7Ml4Q7qyM{1u>G zz|vceeEEkPPyc=pK1$1=#T-=ZD_WvT1Ozv`)QWV3ovq%K^V_|!ES z8%i8Gc4BJsw;mJa2&(!pbb7qp+D(hJ0ka=Z+ZYZ%q5)^k>XuW&}`rygWfK?Br_~RO>*;sEg`?lUN z!+Psw5umaJ05=_!lAN*x~eM# zHa%$xMuoH+q-HDz!a%M zsqaXJgV_ok{(Hizh<2DtPOoVw)45ZzIs(q1U95oH7A*Zdr6 z_co2yNT=bNS2h;@)fYspOXE*_f~P~R{8^g49zPJ)U0;|;RrrVB%@h71VGDRELi*zw~O7qTY9l)`Bd9%RM`GT`T`cIGn=$Hk>o6oAm&B zt_9TD@aS7q{lz69d-QZ0`_TRjC0hiS^;MXnVs9}Yv(I7#ic!mM3HORweP2^8oiNdX zjJg^Wmt=%MQ0T!AtRta4w(Ffug70XaI8(YheWp)KHBvIC{deRNOxnp@&hZA?$ZHf9 z6%$_V&XHuh3hG`_VFwgp2M30zVSC{C6is-30gnv>>93~U2qzkUgq)~K4vamC@pg+iEJ1n#g#b_Z07)km0BXfv z!}Ab|aUqS+3(uNB%n27*jUb9(89Wvf((;$;I(^?|31-6Nd`tP1(6EBX7{Wx*44XK| z#fZ?tv}zb^ttlE^;VG&K&!cx5IBEaS(wN^IYCy`xch>^O8IIkl5MHV%iXy*M`_hZP z)s@3z)iM4|>k2}T)RhU}5_dX>gsKxd85=WDFJ!iEXv|JkztcOeW}O*KinJRGrl6Qy zOD*}~D7t^aqdJl9=|wo%DO}>R>z0O%EM{%y33bfmwe0%$pie+-E~dmlQa0KWwdqRofjqfRzLcq^xS=Zu zbzHzqbgFm#K1YG^$vk%Lssh>RyAHt}PTDj*BEa$Z9waw4BJh}f25DD}yTO?y`%buFcyU_GuJ=+Cm3 zT0Nenlv%hpC54(S!3l#fjOETHhy6dk9kf~DxB1@^a7ksxWI7B)xD`KKIOZ*FS;1Bq0fYxzk5_CH=V73!^*{t+oZ{_ zgyW9;vT?_d=ugfI*%2m+%cty1w<+F(;K>%Why7$wz{=YEtKh-y>WQoh2E~@&RI*^h z=GMK9utkk5if0Aa|(OIff_qsDZY!u%~PRqW@p9#H2*5ob~B2oTAyXsQPG8XexxN_`D(Hbi1iqXbFt!Uhi!*as9N z3Mb?}%#`q8ZpVx0lyB^8Zm4$Rz%_`YcjoSs!NXQdugeYiawk_w8uciRY9W&=x8v^K zRZ$}bJNFI=ZCORA_V^oSrUWQq71^NVIlVhnid3?eCDIAO47v`)#MI$VGi zPV6yEtf;XPrrz=VF4+A-p&7UDouzsvTtP9}w+!T-1L6X|^Ijw#p37)EJ9_dv1dE4` zz(NS`5)DW-k07)Zt|XF*Y{<*zSsiUt($at}lZy3q^Igqr4O>CeBBy#GV_0LZ2%2#7 z$5@$+*GHOG2{}ayfe1~Kt?#clMr}W&1mqNHJhGTGy9qPm__$D+1S_8inf{y3WSk<} zYT%>4Bz!pu8f%Tx+| zyj&%dvH*dzhg zY`J+4{}Ee7E1vZ8^{WRMXqyF9x7fonR^nU%&k{KvKbfd$-f@S`ZqEaawRgeaSF9$XOKeLu znNviadBeN3=ouz=OyV!-9Yf>*|N4ltsMbfX#GG|Y@Def-l`yr+ZU{V+#BpG4Hxqev z{KJL3GAOv!U~?qqAT$ehVjvqv#fS?H5$t#-oxOTV(j!*C8jVBzE~|G=%NWp7%&hle{{jZY(UC+7(f~rs{yIpS<^n9K${eBZ_t(u5w-M|Zob$~ze zH_i+2BuA{a1S}W_mo84qX72;y#vy-&_E>+dG^>?ohs(SmKhYO=E=hx6`H}va(2@xTVtgpJJQ%CYw}9x)uAI zz5LqN$b-|)I7-N?lad8;Ujm=?GL7U;zoZlEFO?ajRy|F)*-OTb?WCwAGM4RaIyy6O ze23`kQh~Awim5LBDP2pUN#H;eBa#mwISmdTv~EnBC#hc)J(%FP63<5%=W#u>L5fZB z*f9lbi68~X;_#Rc6IyWw=at>6Yl54gt(KV+GzS1agTk-_WCExBI84mWany83?qwJQ zq!{7f+`!P9E}UV@+4XFsb2!30!(J{Zu(4nMc;1EH2KZyguBi$hyxQdF{7TqJ1xyNM zivqqCb_r!|R#pGwK(VUw(l1G5j-}`et?pZNBGDuPTpH1YtZOD>ZDij|wm9TsSU~s0 z#?S%lJicY%76IRD4S|-qw<;`5#Cld|qN%a0ZV~|>Gp#E4o$Uf0JvFUZxvc~3(zZrS zB3pTbE7Y*ByGow{2j1}`bBZ}Colx;T8xva0aes^BsJ91M*gOc`NSnvilv8AN(EehP z^iAf8aIm)nV{+LT75l|4JFwnsEYLsOe;N^aIP4Qk$5l?nRc3hCA@Db*?95|6eW_hS z%>ZszbO05@F)$g@Yobu@qH)~)-QAjHUMWN%1VQm`5B-IbP`IwClD^NtN~_?HFV_}| zl>iw@QraUCVg2yZY+aT?v(B6nedcIqgea&;ucm01K*a;z-*mXQqs<-oFc2LNmppOU zMIoNjYx^40{NbgDatlXio`H~uuk88#4vMU|-WyRkSM}oZGPGWq3o&PIdMW-TSezuG zs-JE>L@ULq%x|hSN=U|uwzvn%Pl!e!>Q!I1 z)h&_>;0G%3MXAzXP|?@ zU29q$C>R1IF-D|D8{wZT3LraEfDv%=Ro_{$$$#F%$Fk;<04Al7b3V#PO>=-!6}$W_ z@e_yNmj@c&_Xr7XMj(42+^Z1yXm+g8%hlK6X5>zuXe*BHP-+G*kz0fuT4#a$Wl58r zar3&x!ZD;(q)Ob^5=+0J7Ps931}Lf>JvFUZ#j?QoLj-?%g*=vi3|j6wc;tQzOj`6;ZK`}t~6;^lru*@{e3&P)($&qg z8Et0{8!B4dr`nVIKG9dGVzc?i88YK;|LK#3`9>eZf||phX3U!YHD0<&yf9Ga)BNF- zF=Tm6U%SYK#-DGv3sQ5S2@u|;DAkoVt>I0k1Z@LSlmC7gnS^XKf%4R)^i2`Y6p&Kv z0Q5protRhV244h(TD&|h z?O|qg0J+;FGTkax>u`C*`taJ7DUwl=!WGWY9Y(~6*kvG3qRCr>YQEbuWwm?RA!w+> zlKZ2g9|k!8G3a{Q0Wa3ncPR4Un61$7{DNNU{6`qC8OjE9=tdBgsrBW8pJO=ls_2v^ z^#2ZCCFYqz>k3Mw9%MAkmg&d>Vwd7jsGy=F2Y&kffr--aWCL1JI*C+Ftf%VSs1Iy`ImVx8lYMJK!nsSh^arI@y!iUM}7j32m2tO_PCx2t+`6aCw& zd9z4F)lj(~J{Z?95~cVuS?misb5%b`H&7TCwE`A_h2E~ zjWaL_Iw6R>omtxw4Qg-RZg(>Y4kaL8xxIf~1#f||H=QF$QJVdtl3-{@!c9V+C}}74 z%u$z|+d;i-yjjQ2l4QKjH_4bIq^vJKu^Rk~YY=clfS#J^${4lv-SgR*g7veKByMw` z5$mbNOhmB=9Vb}2tqcg14O@T($m+yQB3g?mn}oBjIS%VyA#;k;9fT0F!a(cTQBBRA z1ChS#w%jDNxnk&&e$#tDA214KLqPRXzE+Kn`)a09pymr~lXG5R-%4~;S=V?GOBe}5 z@VeTIQT98+#|t?7UM<33ORvv(4ZHW5lj0ftkf%0qi^3v$RA&}b^J$FajD+JvwpV%* zgJ$xN7teNA6*^}Jt>5cCQooQ=Zf*AWnEcGrXqNbC68~)Yb1(g|n)UV0fJo6c*99H4 zxv~fH@0fO}`KilB(Yx?uF;h0o4PF>3cgu(=*4E>$>0gTaGMG2AQ z4i!Yr_ztR$?o(PM2}E79xtD!1gT{66~7#~hd9hm zs1TX_e`o3UBo0>2S}LuTsh(!aZY!zc>Ov7x=J7WbFJ90sVFO}+u$PO3bT#UU(g46I zTbp4WoISIfImuOe$TgAmCxR=%PYD!Fqt^2PK0v|0r|DK^!qmY^08cFHoMPXGI80fH zpqaVQ(tA`&doWt}tUJBK+mN9aQ63EuMCU$j=t3BK3$*02e|AoZhmm_;uHA@ayCA2< zl_^4A`$P)$kR9}S@ug(@P`#+ehXbv&|M<_KiEz>Xq7N9M;mCDEEA-CkcgKk8mS7rk z)WO~c)KN*ukBJp`6Zz*&j!w2Wl_ry2N}k25hMFxABQifC^$VSTU;ME9i1Kf*F3!W5 zua#Knbr@{$sy`|5xZ0U79zayPJa@J18OB4f3D8$D>Cf;xC)GBZ+bsz+N}G0HJly!z zXT!kBpqj;-Fl?@Rq;9(GlX6>IF=EFu=~k38Ls^PSv|SJLWo{ow}H=-kvi}@4zsMsD%=Q~gns%%eyrh>(k-OZ2U-ZBl`3cdndO!Q5NK_myfM80T(!5^^zsH zAfZOVGNqwm?jQk)R?C;zJ9SQ0ff`3?+;)RCf~t@eV0%4|>d>0ofdEs3JWmFT#|v`+uA}c;pYH~{Ju&@swaG@ zQ=0R^Ld<)%{$1PR)8Yx!HXBIMFlZPLuvP5$lISDRQTU4J_MP0A*z%_QRb!Qo4T~E* zH|Ih}q5H!IsAwe%=I8on_NFm!20^4m+I=`GjXTOF#UV$Y$-W1SIY4<$>Ym|P6MAAdSmBqs_a@d%bJtJqNn(J@OnA?rj%wLut$wPcI1bZ{*yY8v|<%&}NCqNvzLe<8b*nCt1 zZZMeZEYu=GN~`0K3f1tNN<3jS6sl<^X;U{fUFPhU9Qr4sq0EURt>-0AHuTCj5`~I0 z7w&4O*cOIv2QCq-xi#%nq|5i`L57r)V4emGoWO)VMZq35B{Gw`7s^PW5XD&!Lg{0e zmO(8rFeFW{G+7iTu$Z_|d;!BUZbIvtpmhMvSGeQ z#^-xxgFwKjlgc>SuL&tNw0yt-FnW+J)?tnO2_~|6c|J*3t+2Yc!0)?XKeulFLLi{H zng6RT|H1&L_1{G2g!qKJ*dxNzN#)Eb92v#&FAxVY5zMaHZ|gU3wZwp5<8uKWi^)D` zO2#ZZs69|#yA}lxeZ|I-d*r^g{q2ts1aN3P_3^MzU9bCP`?W9!$!;?|saS`)ZifVj zz(B?`s*i^hI#?h01%7_t4De&wm$9^1WD(ai2M;FHT4Zmq*pWm4-}!djumrNO3%9$U z9;6(u|F)Ivwk;X~5?pN^F)zBe>ds#+45g*RZsOsH6RByw*bALuUzU8G;%QWR4ktR_ zL3s%afAGRT?C0NJY-~C}WV&2Je*S9+gi!1QcW1;q`!WtKSN=jFxe(}5OU6KBP44w6 zdeZt9`GT$KHRX;b-2SmsbG_3!59@)SfIKdc~EBHgVaDP>vRb>m#J>*Ozmakq9Nt)k$Y){7Lu8a(%nL zgDnVP7t;y}n&FHwQ>_X51$8Fs?Z>~}A;1z;aWoig6V zN=iqs@N%)RYAt2-3eI`rrZ}kq9fb43F?ZC3ZjmRy07rlrSp&Qr7drD;DPqB}Ztk@V z#&w#sHfoCURZbihFGS;aZU*KQ;-zBVy2#}nQ|KheOKZ$1^3-^t{!L`N9-tDnNJpJB=PhaaJi zyd`Oq3^)JceU!F^76N3qY)>X^Ep2=IJ?eDno*}ETHnXfBEhjptJAVORsSw}#$lq}L zndJed_Xu_EwvPO~nf%_M-!{u?^|UBhFR&CaqDc$*@mL5=Hfi5tl6~L+!hebnNO-xX zk;VDt_T^Z8qzc)Y>+ijAw76CLm_;|Hv9DC43~Zh9s5s{D?pHG+rk_hswfBE@-xkQv z3N)m;9J#aea=`f{AVhq!_7p%H*;pi$Gm)I!gOMQ4^$?`+M3s{rX+`?? zwT&=L`>%~g(09MDI+aYsCdL#RBc{ z!?hfwiZl?~v6JVsdyRhm+X{$2pQ)_2{B=MrV}YV)@@6cy#HVvlmTpzQeF=e4XF?n3 ztI!Xa(^%u}KBFW(;IQhv{F>Pld!?{P{m_i4d>4EZ#~2zBpAMq6Kn4&1pPIl(c5M|GZy_~D4{>+{Q0gwlnLOKIduu;|dDR z0Chueh`G=PJ&u*5lTV;*Eag4U*9x)cbc=a5p_GO}v@_Jda~oqZsiS>{T&UlxsK$BP zvpEbR738kTsAzd$PY#r9X@Jw^GV*svPzV#4f(IlU4cY)Q6$)_u{dx@+_13?l0o-NJ zT42;)iu+%MVlYTseK2ecelcS-@UvH5OvG3X5lP~M*gz&l6++6u2b0SB-Pz+^Xt9s) zt?cJ7R=uE5X1%$nlz6i#wt?u>kM^1G}PV$nlyUT?Vm5a<345nHQ#pDa5V#`b%BB`w4QB&}Wtk#Ns6gjW2;CudNcS ze#u8=r(y!MOfhP-y40oL*ZygZBEE%{>?RXcmKbiKlV|x9=^=9R47cX3F_vP+TWuqu+XBV+ejiMkLM7$v2z1x{VZS<7r4u&AaDp}^8Gl* zYXI#EURmVx2AWItDLLD?Oxt(=GS*JA_SZL01s$KO!jpO8bpK|?^dplWVOo}u zVe7VXLBoOlktE*;RY~@QEi@tJd5ufj;@YS7eAAKWH@CNR`Pfx=?fWH;^`LA=%p~sC z)vtyo0<)$HGZ#@V4C<5b;5rqQ3xq1SAo_Z6Yi5GTukIg zm``v6(6dG`wCH{b>pPx5y_g?0UTG@OIGEsq*Bv-1bEkS=8OkDHFY2g=D$aa(8R+kv z1d#SBT1YXd?X(srqAzyJL!mNuew`zapZ1Xp3(CKlm>4(PYNs;pkxOyFFTaxaQ=$K> zE$U^?k#YYIr=mXI&tt4B_uR$QxwCSl#O_G!ZT6P3c)KFX6!8DSJj@|D(9&IEsXa<< z$iQQL?&H~6aC2QVOxNkLlPO{rj&E?g;u!ben1$d>;F(-)fo0E6h#u)WnN|_=NLJLT z>`)y=AKLK?k{;u(sq%}dsuEG<{aazHVJNHCHXutzc}X{XBstZhcc#IXtD#|Y>E-KC zK%eB5>2#_t0WkjMc?-ryf|M})Ta=}kP%X>BELHP6nNOlgUc8;{gS#+OzSW!IJ8=-w z`%p=C)>ywlfB7+GxEw;}Bni@M)$!_@b}<}g%pCd$;gGbUQ}zqYE1*_vS(+ai|J#*w zI4qs8$5!LFZu<3W+-B%pj6_O^MtE$a3ujdy8R&AlmlyO(KS-7wKLKiHRSuah9R)=Z z(JJ7JfyL8HxK$f=Rk2U$jHbrlO-8;j(G`JFn1gslxJn=TCVoFl7dLlL1X0^S>}dX{ z5W1D}w2nD6QhUJ(xYkcE03>s@=BOmC@UgX#gSn-#;%?_BIkG1T$Iny3^KQ+?0C$`O zb=emo+9>ZO%0ZN0^M5YDEI$1!+IWKjWKOIQKBFZKo|e=iM<-1PnaJ(f>rm8jD4$BI zTDmdimsb1JI}`I{2uxc~e_VT2NBcL~hLPC+Y?1pG36{R2lIROMaHDrsAfwTJDw`<+@;aALfr%%& zSCGz-Z8fxJex1cTBI2n-RE#i4{vg@Ix;VRatIs!9MQSK4UaUZ+4-=&i73e4C;c(GlM*Hs|L+M$8nC()+Ix&=ap$ef>})1KAaL00t1k5|2Q+ z_L~c&$PhfLHx{6-`!gYFbijcV9RS{PXQI7LMdVwYtj?jK<#<7loNwj2nItAnimFwa zfD0ruP|{NEz|e__j)5C4In$X;T%Z3l^ld4(nEat)NMhXAa85M78EEK|_!*6t=BG0} zEiNErgp`*=F}Di%7dgDA3la&MlyM>;a`caxlIM3y)(2y+)ia3p>BSD02GwkP%!u^j zd*ZIYt3eue`4=nFcKwG>F8O!2KLf(%+5dZNmgit2d2zwuQDRva2ko3)kt#{ zkJk8i(?z=3_K0pHD@J$qW&X(?uEP$HrLa2U0kq!5I4jqRtitRLp3g_l%@-}UrP+?i zAGuI_gp{E1{~P4*{Tv~exM+YD&hGvck3iVafWl@y+9t^1i4VsN@RsoJU*{n=Fm$Mr zaKKp~+duq$4d|WQ@Jt*nI%D}&UfDIxgOm)0AL4M`voUf!2*2w7KXo@gk~(pb>lTGs32Z!I!< zUc!<%R~1&KYvJj*AQg#l%jI zy>nz$ryR<6u#P>E9}dc@Z0Ty*UYh0uIB7?m#43{(kae!hOD?#bRbq&Y><(87d>dW7 z(kwIuwVE16%K4g|3tQ`n7y?8!6oI&QP>}DRVOQIVx(9{-e)ZY2GW3W|;Rw+7Y#v%u zwns09J9bJ*J|7wlob+G=!^E{-wq&jf?!&2-6py{78~UYmxaF@ZhpdQ1fgD zj7CGAMnU(#RF10pfjJXYEc!o>q6cvVpDk(zK-#0bEOzgrm}UCJ?suz5qJ2OE*3I%9 zCkBIzvGP_z*0#p)-U;tHT`ngvn1Uf{VMsXq0+@Kl^u`6=6jIu({ zJVQo`FUFD($~W%CdszBTB~<0#Uj6U$`@NKy3;*HRk?Qi(hJgeG_a}%`lTk3|#Nja2 zuMm(wJX4Pb8+P4!>A?>emN&dgM7TJ<8K8NeisbT7PX*?`@UI!BqccD2%As1rTapCW z7~9lHUrj3uLkt*f!oK5WQeaI0Hq=1Wx&9xR1ws2#LMJg&9S%&mYXjF^bXctPVfNU( zb@p?}?U>T#Z~Hw`cwzPoDiMVOl7)Umc-Fyf*5ETO`1kTUQ zP&yi#*~H*#A{%(D0{BvUbJlV(4l!FJ49na3s54*5$)g@CAIn4&bgkB*vl%3er8vF~ z(vUHGEoZEj$s_(GCZduHo5;&4frO|GTOH>-e5IgQDDXI-SfP0&-PjpG53;Q>*D4Nn@@Y(}3>#IE!Sk=8#pMO;bHKsr`EtVpoW&V{55 zMiaq8qe&18HUUfba_O!%7+%#sqvYs|@(`PgepYY6=aqL_e6-FCml z;UW)c>N_9(q(Q0LaL=5vhtFgb8jt~UDF?t>oJIrA0XdxQ`vN_h0*I6ZE=%wB1~_3R zHs?sGdd?OIKL}^`%zHifB(-!^?PI5(%dMF_vr1UHY;x0>eD(1-XEa+l!8nYran+Y= z9U4T3FuHAAXw?XH3@>kcHu9H}i8>+gzy^%6b#7&H-@2fFwL+ns;NF#Gy9=@}{%wZ2 zpRb37#7u-U{ojLeT$3o-W5>BP3XC&bNyiw~T%*-w10sA>2xxJG2twC2vB!Y=0lBj9 zm6Jto-b#D$b^Nq~-zT1v_A}D}BF7o~5$#(hdyb|#y*LPfDa@lVMX5!F=RdW62+@cz zp{`Bd4Sb9EfkJ=Tk!KY=gNP-qY(Sc7j+LA?{Hrw$H-OY%la>h~Yg`54eydjxEh7{XL1}ph_9=?IQZCl|N|i#TgfbOP zpjAE*^9B$qE_R@$P!EmugELVzX`W6TfK_HRJhtLa+^7<+;kc|n7B?&BhZ?EFCS^63 z77i z^RfEuIKN^G^9*}xD^M0lX@a%=7BDdz)SSF1>%Q4tFTjmxR^gqTX4!|yOxRy%L2KSs z$@Ex_b%Hpt@zFZS4rf~vKrN#|@#Gt``3((yJe3sV$Qu4VAH3g?;o>KvVia@OD{6$(bD5IhPfN)7LW5$EGLm<0pOblD}1NY*%jfAfb@_<>g^ zrcgei#?#&Oq5m>P6z$8lU8S~iC0PKtOF`7UF^8KniS)Uid{Eo>EdrRzyAoi`H`wWOZm{|n`|)d(i;KxPKYXM+~dyQ@R6Aw_MVhFtYS zWh_i_&B-uHn5mk7VWT<|SI;a)W0U60kHVu!_94yi{ z{-PU{M6hX<5A(3GP@6!~r@;@|9XYlpwoev3eU`4_=;p#im3KhwgyWndwgHr$H2Vst zC(&`*boph~wArIgE#C|YUs4VyAj+PkB5b}|1CyE5qsDA}xC{%PZhg4RUIm?f) zEXQ#PkO|WcD#g%sr$0)8_bT^gahQU!8!rl+U0W*7V?(P0im*ot6IzupOXTBB6SFlz zQhTFhxx5>c34;(6uDj0ZVV`mzX`3eKtDB3|+Hgxkp|6q=#+3Tk{;~yc$7+_XPY9k4 zn9(dUu!uxPnRS*rx_ay#)sHf;KBLQdJy0NGOx@q**|0zkIJ-OIrfM6kp8yTirdh&Y z-sgar)RMR5-v6NJ>60ofjt3<`L>&CTyPYXu{l%Y!(5KsGt>~?@!_JvPoR?Xszb{&~ ztGZu)^y$<5X-fdDM88Em++DK8rm?A(J`0JHSGR4oFDc?eprPJ{O9u`#gld{u^DYS} z+qv#S#H1Ym@g`}Ikoar)a9RLlCG&8a{=RZtA<~l(gnME_86QYBzIqW|F3_1Giyfsm zEMqaSN)OtMiqjLYWMK`3+Fk_k4ra7`2a?JUyI2Ws5bxq5#Pe=m30_D##YriGR;G5U{u|mI3pv)6s$?rg(6nk|#sD51}CDHGO&~3Tu*?4&zJ_kKmE*dv z2OpgaiJfg(O>!k;{jqk!AIQV^5uH`Sfn#`aVV&wjxoX4+G{pKfID~&E<=s+}5TrZ= zoZw841@YDk_${#-D_Xxsv8EKyyplD8dBJ|9TL4`#5pNtBG?gADO3TS=;uuEkO+;xQW0GG?7@;P>a=3?h^oY_=k>Mp_> zySmf!6}PXt=Hy1a<1UqbrS#g0HK-`vRB2|HWC3*)8@hKpPt1QK!}iXGT$AhG@#S)9 zrZW(b^cO^yRXP$*75t z9odP5iNdGms63Gl2XGO58rO<;!hWm8JJ@J}yuxAXonjTP0alZvWfp?eeKc6foTlG9 zsIuWPap`jeo~JmX8@R-jKWED)RKZ!d7}bQ@oL&?_XRlNZUse=UU0 z;!8-d))5NEHimA20zmcTE5Y=oshKt9%Y?sl@&5+JdzWG&L|799i=RMf`_*&}X z4AmUHisVmcSiB-Y@b}Q<;&p57@_bpesfh7xSjt3Z@^JqZ?89ubgsR%5Zq=%{7|e(m z4SqCBg#el?F^}f~*Sl45foyys?YrPjf@1(YnBFVJe#M%y za!@(TPj^>~T4?_greqGs1T^bSrccDbwk&L@z*-#w;7L_vJUP*N9O^G!%sQO)W{(fn5(xj9);hc0jv zZKY*G&?}g-l^50QH?@0yp-pEK%vF)y+lluU2@@&P;;?Eu9tkD9z^188E$23q%peABiX zX=DgH?8@olWmNuLBvBxvh%f0o`;1l%^1jb{X7-Q_anMoACOI~P(7sv-zD{>>!IN@O z{n6WjB9+L49$Bf~g+Z9orXo{eNJ^*&xEsZNKLWuKfXVP`5Yzvrw!^Pma7yjPY=}5Y z1}!CA(Sc+7K6vLsmnWBba{$Lm8xW1ss&wXM0xIj1!{MB9>gzneWOj$+j-M9i%Y^C8 zERklv&{Lh6n)o~;cLaaH7n8z>Ulw(!H!OvlGb?ZI06^KQ;ATqn_Qf08`apk%LVU(g z0c4c9yB;)ky&q{he1k9O*Gh>Q#Jl)UKCq!o4-dkqK?_46{rYysKG!T&A+$`|nTM$n zJc{~!%evm6Uem@eeu+!xis;C=xF{|Xg|lEht03Pc9^DAs;drJ*sZEi5TGdJ*H2{D% z4#e|z$Znidzv~Rf$v@$>tM5~vVe2fq1oecEMLO{Susb$XJ)3I-wvlqW$i7fLvfE0` zIxki>JYmS&qJ$i@v5NlUU^z3<-@92?oBO}MWzwRp{JDeJTxI!W${6 zl0Wa`v!niV0U$2XA2k136+DwzyfQ^6k)}6BRATW5dMfgfhGgT5K;bN|32YIJ(`%by z=>^tgH!EUMd{nqK#nl00LdaGi@H_kXUu8^rIVaC1d{yEc-N0Cn!HPLGSNP^*vHPE= zUP-Kth&qQ?v`15`BN7;~GFd1S4t=%X2)rGgJuX^gBEd`x4B=%+j*t%{_D$e2XyQoMw|BzrNv&GyT60IQ(i~nJ2pT@3$&>9I z>o12(I0>%smE-FWJdk__PxvFKL24srCApP`a<^X$y$39M&*Cd57;NzJY0_c zcbUzdabnzyo*7Fn1S@yS#=>i-FgSfq_sGh)cX}7!{jMtDfQp2ahc)gQ zP)K|^S1<0dcvXEsJSiKKh>T;*4Z1NU?CK2pt#qjkG(latX(`>?YCh^rHnt7+E{=wV z`TB1V9?Hk~W`JbDydq@5Cx$mSwDMS+XQIo@<|oY!4syznF&!$b)ItO|oYSY%5t

      VyVd4#eb-=H)-wmUj+N*AlLtsmnMGWjze zhh`IC?jO22SgA~w%Ylkr*>hRU(Bhv?>j91|@4{y#(6+;%8nvBGlq%<(<^4hQkEp3GJZGUuu)Hq%B#D-mK?e+pH zDu-P1MpV2}>0LuYKSamDE1JadJ7PKjD&oLr>Qhi-E_3WL*a`^xbguLKXWH@0SHvg~ zJbq~=p%1|M=Nk;1K=HAg#f}cArcI1kYc^~y7euP^`(wJ%JJe>E0mA*1xVDA}M;rNK zDVv2E?C6T_37@Y%=|;bY<{;_7;pIJG#WungV@JX(goef^jF7N#iKq7zJ24k*Q?|Ii z=|kDTJ5#|##Guyy1sN;ABrXZ^ zyY69a7Kwsy!d|(=f&tXadWHgt5?0nQk-y76Kmxt4moNPtT}|fq4Q+92ILr}6#Xkb> zvnDTCyxY`zF$*?eol&M12e;7_pVE$Ae>EUnpu zM2}*PzV+AgPi}Xq9@E8nn61{16f-@naJP1$0xaH99$pLZK#Eb>`ZK!_Ueiq!0;@y3 zNfm1&=C&?143JVQlkjJnk-%>YG{U!@)N*D@*pd#K_SH{F`y(XRwpTTw!w12prHo27 z>Eh!QUAo6abSY27%IP-;ZbqHU;%r#mUb*4C3^eWp#L%sZ_abF=WMcbeQ*8_lR>UVN zVOEi3HSXu>IKle`xtZ~=iCN6D=IXbI)ns}QDEQ$hGB#=DZOx*WVDa3##}7L9WE{c&-yz%eZ_xZ&q0D-XsLSXR0Xe4!2f;jl>{ zo4!fY6XknYF-oJ#aIM3BSKbLPgWSbP!sK(xG@IJQdTvE~usbRmv5XNk8s7 zh16z#I<9=MvyFS{H2wYO=jkiWocD}^%j`JtMbK%ST#Y zwo_xHuFhn&Xg|7tG2<$_bKMeZT%B~{Z=5|3uwIesd;A*3GQSVpfW zNnwA{mlBc?Q0X4x7Rv#4rHwVOJ~KVQE&14{n?WMb8RD?r?ZP+*nH6B_`*kbK6q}P& zF7E@Tu)8|WLvB*j@r-g(ob_pk6c#|7-i}6_FO+4TO0C%ZSp^6{l+E<<$9V z%a)pJ{MYikul3e@Woot*$|KmhPgZHdj!3!tBNM#>-h@JtydHaG_R)vi^FS%pzN^tV zI1`b>Loo~n5^pAlvLU@ z!@tZi(jJlX{)FVE(7>PX5ho(1H%)olZ6r67D4C(o)(qclm5)N&AxCWL?J4Y+sz7E+ zV1~(M5h@@>Gw2#Ky4&UPX+G8L4{yhZtRF_xgX}sTj5uLv)Z6nM%Sms`a zXgqoEN-W)tL~d3xJYJnsyW07@Ysn+56R$`!cAkTjbC!5JU&u`FVJK+hqVVM6-2pQY zh*Yxf_v37S%%&naz}uzy=;oqKLcmslUvvRqwM^9u)r1|N7zF@K>qfpAP$u?4000Y8 zoQUo3(xH;4VzavwkJW*wqLAuJPC_QqNs9R=5C8(6Ng9Gad#x<_!qw}SMh+B%*R+EX zsyC^$uz-)cq4XG+vDc7TGCN{015`&?uFEf)by;SM9F=yz5ti?KL_p=1!H=J-^OTg8i5U6 z7gJz?4r46UI{evC{)RMn$oj_RW|fV;kv*9o8nZ5(FPoMJx_`0a#<-u>zC*-e`1deo zCh@jn1qTzQ9{u)+a`;qtPbo&DFwB=N+_opYA2sS@xx^X=2dS zkaFC4;h<$cY0G0`U|yeQgf`@Rd)-asRLY2}Fyj=<<`Y_Wnuo$T29p2rhutn~NNg zSVy4`7T_1E!_^>^x8DiH3e86AG6M!t3#xFNlMsC{-qd&KZssuUj*h98)9Q_z$DR)5 zV7Zn1zp)(1NwmZ#76MnvoET(R$tp^Cob6!ky--2Ho20h>Y#J_~k-yJHL_b_lh~=Aq zg|Nm{O6@PEogq~ke>YQ-j0K*~kp9>J004e_K&^8`lWPef=6dlo0000)J7^-)y(=5S z@yyhy_Xqg3esVym%$5g(JNnE9JXh=hVLxHE=7G0@;7T<@?^xoiZn1#jvWKXRdTiyL zrUiAR(AY^~m~+UUEfvo1hzg%d_tJ_#q7A~2A07Aqg=#0hH>35smsuj%1Akk#d_qH& zqS@0J6kiWdqfiy7s|9hdk7wBU zH^$*Y@Th04fJc(ee@mz<-xTj_bgD%U@y?1CHq#QjP7|Z?jYaFo(eMV4P@B-G^3>#x zM|0i+fM z^cu^M2tw$CrROwd7|lr;<9v~}_Pm}0yoM}Z={e1TllFdicH?Gl40@7R~=bAQZ^tpkdWAUV-d!(p))kY;D!TqVj?-~=-`+9yQ>)4@e|@zo=Wu8)kmR>+vv znFO6Z%w=PInh>V59lKoxV7#@^?gn(dwA_;a;_;f_|1UM(fGL4OqcZgKKIjk4=-YgE+*vZ^M z(^OvNdSA_&UWVM`bxgBL{yAgFv4I*+85%qwo|fWbSG`Ah23gHJ?SX5_=L-im%T0hx6+geOju0f9_SAuya!^puPoQ$OBS?WRE%K{%|BPK($?(7+bvP`U` z$1a^FuXr4daO3d`+2_gFq+OtAQtoTlgBsyA9@|g2wf@P2s2T~Fy7ZqA56sV>-HNDh z`D8@B?av6?E?aKb!ughA!cm$0>y-^)qL)YD=hWv&iR+>@4AiTogb20~U|T3RzTt93 z5WTmY|1j`C|8etXmpixddAF)6gRVfjl(uCvnFtP>ZuHZfM%&1Y+_8Jht@G3eZuMlG zvuoWm^Kv%!PMv-qm$9ec=aooXr(N1(Abd3sDz=sS#)W3mr0&>o9|8CpSG)1J7~f9x zq)4~?hvmBa=SW2bmzU3Pdh=hD77zmCVI*j-0bbFO6t)=KXW{Qy3UD8`K^!)7x=+Ttx=%4ALkK1q;jFfwPO*Mu!x?rnj6 ze)l}q=UE14Pr{!)wg;qr5TDjQjt`B=7sHXJ^~SHGfM{f z-KOpAckeY- zjNNIql5^5g=5U)NEBX#NKHnrcR?WdZPZIFMjSccWsrLPe+>$)hPjI!HX47stM5&l{ zz5ABWfVk7U+yScZN3IhP@+?e*{DD6xmXy&XZ`*91lJ4T~_a^bLA2m8em&@UiEaF;c z{(a^oZ+qluTWzqcVDu*^Fr~NV4V9vBpWXh1|I9<0^Em!;IzlMEs7tkR0*BSYb?DBJ zG73a3*Frp5FI5DY4=K4n*Vtgd5nN!;m!uwXEXgq9+qVU4b6WZ3%8$g5MM%2mIx*;& z)`t5Qkku56Tms|gylQQjR-Xi?TE2iKWzhz!6;PxYqxJumG~Xm_`ks&V+@&Y5D)`l+Udr& z^e4>jf!Tdhez`%v()X5PAO1Uy#`|MwRPk$`>`?WvR+mr(P*U%7dvrzlT*i+QJoj?B76qCo+)8f_|`?s}sGjK&g?uFWRSVsZGsZjg8l5Q?i&V|33R3ilua` zFn;L2jN&l)AO}k3|8+e1uRfUn#^kt0OH4FuKF?73Cb*4mv2qyV{RI^)6K^qCaQJz> zPI~R=LIr@0TX!Wl5t2zo3rFu$7;!!ntrrMfQlV*w(L(l}t@yNS+bawSGfk~M3j)vhoq`}iwsNn(!Elzf4cT?h*s7!T7_1p0 z#h<(_^|GD0E!j&g>mzPC-dy5|9nq9PgsUdNSBVvoB~<)nmrSLo=#-XJ=micQs1IOB z_vC+>INR{hnZ|nK3s2@3ZtK4r9*^JNQIaR+G-IbrrLqiwce2XFkE9b8CBt7-?BDF3 z#rBrI&B)W5hL6oHL3eX!?>h>i+Ce|&+*261m$2KQj$VK^Bb((8ZvK|!sENqD=~c>N zHz%g!*3Le^eF6K2TVO+h6~>q-(`1(^nnvl@t5|~^V+Tw=6h-O(AVw@-g6T6$Mcopz z;@w4ML2GoUY=-(rh$dp98x>$*VkQ<9g9Slf9N$klW&CwjkBI>dP|?K7{HO%yNaayf z%e}5f*GCOS_nb{uZ}h1K{Nvs2a{Mi5C42H-8haAi6RT~b6Nr?Rjde??k$UoSzor-0 zSSrBYMRC{gBQ;-?Hi00GkYV6kx*ejVSg)@WV*&<2N~6`cn)kV#z-z&yt^wcD3kNrX zb^SVXo&2Tlc2c5-U*$e$yIB38g)w1;u3*Y;ny% zhDmBTmtVxiRr%!m;{KSsbt5$eMPaXQt==**`Zovzlw(^=(P*LvVKMr8bMRar%Db;6 zA<}f2E_xHgaug#$oLv;!2I2$31&Vyxp|5cEv+wfp-p>PN?KN5$l6P>Ai)*c7J>{gpN(F?{ASy38nXY$A#!(J*$# zmX|hd*>{d#J9n{Wi6kSA{hH6uU!;m*64<~;@Q?qs+>Q4VnG|q3oQTq;l82fThGjp= zp#7bZMUm27TdxL8chiXRvb{^(GAg2t7HPx=(Ctq@PQ%XaciXZX%@=AouYwRrhy~j?}bH9MNxct|MDgq@{N3y<)JdOdQuN%SZ%sNys;^Pwu zg1(X5B8T09KDclf5+QO`YZkk#R#EziN>GI=KNQl%6meTsJm;X{+LEl=!`J|1K%2k6 zdnvJPKfueQIV$4MZ0E+vSjf;8y6#yok)hhpR*7#cpv}Q z5$UfB!uTU)sdsV>X~enkNLu3WPl`LQY2Hh)9kdae zLq{dU@@ZgYmDyrOsZ3I4UlqAGf)}bXab!Y^0hPN5_mUWA;roocc*5n<6D0P51YH2^t!B7nAFVV5ch}!O%-3SwLn>D zURc6wVRcVtznxze)hl1&zw&%&zuo_Q0{h{rn*K2{cHnqwuQg7KI@xi&BO|kJv0=Qm z0SVv9DdvuBDmVd;ZsMOh2-AGkLb*nZDJLB2L!N1+XC5AqecMg?J9Ey0xbId2N&95$ zk+!Vn>R=CXtf4)n9h&#C+3np5nspo6cweQ4xU)zuI&!@G^Za~A zZ-adY_+AL9m*T%)6%HuQVah7Az2~9N-;`wTqBqw(pVP!&@ozx}4t|WOef!&$n>zBd zOz3*5Di;cdwlM*D5i^gX%$L_l&b(h!yflJS-QhyQVx;V`8bFQN{#i4`S2u%Sb2-8k z3G?*hLlNXsJEI6oS;5n*Oj{5T zo>U-I3`79I=9CdRWg%yRi%$T!b@)c20K6rXaIbv7dkeq9OT2ol>LB0%D`fQG*upJt z2XPt3Q((>sD}c5wIH(g?d4A+#CMZzB_qO zTdHaq&1 zp0-smm+w1$)*Uy#`q}Ssg8iIX{hetXK082D5~NcLG5VfM(sf#BFy>2`wM-2jCyXx1 zaqwARY#sw_#!_jfxhXd8BMbwc(>@_Dq};P@w38t#QmQB3)qN#E{4}jt5nCXWMcdD1 zE#@MH<6FwVB&G9nzjzKi#guIfyzFqLtpbPo!CL)QW<{44qjK0H6$9X}>t1)rW18dLU5ke4?Gt2e@FH&%*n?mrAm$Da2CvZkh^VEW>XX|g zaOH*kKZ4TvXE;hpm<2T%h08?Uel0Fp!uO)svsLVNnA~Jml8fxRL@&VMU7(D3@@#CG z(szAxkRb-Zqz#3p4D`Q<0pOzHGD)0jlEO)`fI|f)hmYCt0j=gVoXf>%I;N);&NL5(fo3CQ$e%h-5iZ zzfV*@3cw5L>Zs#%2;Yt6sJ+|QPP%xsr9h}pr^Q;d- zap}kkE@M$Uc5D!rv~j72yR5>eqM2q$GZKVMbhzF*;&20UqST0D(sAQ!KYq~_P;B|K z&IT5(J2gG*%w_tNgZ+p0si+-#e8En5_tXnvETNiv zjwj>OD<*r~unE7ogGMzUbTa9?jO09tyT$@_19f7497d<;S(p?(P$p`AC3#t%m)hz` zU<=zn@bDs+*OLPCTH@`}1L37+Xe;T3vEsjY=?Rn-`iVWru-as<0yQxp@+E@$?LWuA zP}|u>MGgL9I3SFX@lT3WEru{8PbEkC+R3rV@{Eu>-qbi3NThi@Syw*>BnB>@Mz0U? zOv=+7K96KC#<2}phV24Y)`z!ye8@RLFi2C}_%Lk_os3IyVy)(XVK z&&GR3s9jA22reu%JIy^ha7bzPY?ENot^$GR$-X%C4X7=Yn$ORJ9%~sAsd5+yCbVg# zI72_Ca}3*_T~46oYCB|FL9K`d@*!9>FqP7-VE>Viv3B)uIZ;)DG~S&#UFdd&wpKS* z2m@;!uv5{$G@I1D+$p!by^83xIx+w&`2Mq-58co7o$b~%%f31IV=N4v!Z*TS&p%Xc zm}g>5RqfI3NdQs8wVlZgGaflsN$zRubqQVk`X&KrAu8S)`6YJ`G&`a^_nyqR6<01>=!5Lq zd>5IIrF+TRwfe#ef0eq4^RI@}QJ9K486AFF5^KKK;Xfq_r=t7IGp13{&_Gy4#O!K* zC2xLWVj_3}>10J}F=8I;Jfz_nva0=vy{rWg2?$&1 z7!OApWHdrXTnpnY((fg#+zS3A%NCY7%iaB9C2Q)1AiPjfR35E{i+S{o7A;^XUer1@ z8&DvIg=#aTtic$EN6A$CYd*Hnlse5XU#KoqtG`QHmrir%|5=}jGL$+XF@=z~CgFhf zr~4>GtzMc^FAnzping<1l|J(8?>(s0jiGEdX%lUfr z3?Vz>hNp4F6+1@r;vkCV4OlLRN;dsI(NVN4ks2Y~JH=<}n7E@Nf#(+0CQ^QnJOV%7 zJnQAXaBPYo2uW_s-Uy*t2XysJc_NuCFqbX5v->B;DC>z6_Yi`!Zn(mB%qXUQCS{w3|K4jQ1qGNY&2A?r`1k z(>B8&VI<){vMyKQ7W&HI(>g~aNn2|l9_t(acs{6S_c6+;Fc+jWQap;+P~h(oo@o>K zyMm|!x820e%XD7)#M9O5Y7!1SV`o>VqaleZr&G>v$sYa*#LfbveFzk%yUH^ZX?UJB z&mXMHb2oFa4=p+qE&ETq>=w~R??-@8U!AXo%i*!=qz*QnD0eP^2Z|-A(XQwfRKLdc z{QY)vLQ1OZ;g#;$o|EmB7#0+6Vv3Y5v3OK8P5#W-dbClN^k)+ygEs}BJ|A6r>Q6RYM|B`arZ;(7CNV2SwGZF!4Mjf#N&+nDpkM57(D0Wi~*;r9NmkC4PFTRoz(G6ULJId-X}Z8(p(xaxW%lH(n7EIgw%FT{D9; zm<~?4-WGr&6R=TS;S!q#a@BWvZYprFmDulm@D(vp=5QJW-*@BN?8pZ2Jlc~{k=bxZ zg__G-xGG$E?1lv9Ozi@`gP-A|I6x1-h=*)|&>L%+t;pIm{foqEJx9!!JLVHv&QN~~ zjc-h5v~bk+79@@16E!-6@bx)rBEmzb3h`YQbDs#^5ghIXE_~iC=l0b(974Yq{uv|! z!em%({Ssa*WnK$F zBoZ%k=JMED%?dTgx~~%EeO~*guB|!&(B|bR}SF#5oZz4CGk)8WN+kig>3 zwI!A4uo;{@?=2%#_Q6g>0GVmsYPo)$$a4h*=557Vc#I_m;%(^|4_xOx`OjpaVE=ti zLb&9Z=V)3ON9K?eh=M@U2_R%j3sNk~aM%ory(4|?h@4EZ(q??*?sRh(uv*gQVn&5Q(ZuJGCEp;*=@a41}Po>;%!D+Tlp>|*)(Sb)_ z1Px(ti>7t4%%-f*`0J%!ht8a5>>Z?a2I1*q*pdc+(tA=>4tuwW&IKf|6EKTmi?;6~ zj;VhxOLud49O~yhWM2L;CIb169azjF)#n)5G+*^>7aSnTpv!+mQBuO3)_JzCX zs{m$x7%4Y)w|y`Hk|H>T72bSwZ_rz$X&^GA6`*kI)}NB@xQekv%t`z%&ViBFKd?|? z20-d`k3!Og9L6CP5@)xOGk|u?FxHL|Q*dj?Oe3~+;W_n86>Ajwt{Jd>0P8A0FfoP@T(NXRbIT3pQ0767jcJdkc${9d2ms|uCvsgmyk5SDc zpb(}SwueJrF_0esIm8pt97za$oW*tZNJnkdHa?pk^8J$WQ07_+FzTIFSYhC)%ktlv5^lhn_Chg*syXEc`Bh#Z^eC7%p?5TXC8XEmk_` z^DR}i?%yG^gpn0De!yUhN}}43Os_plQKpWCDQ#VvkjNNeWv^}0Ll=1Q5S(0h#aBYL zNtEcvouInKBY#c3D;9L+K-Y%;3ZoOW={sD)(wlh5*ct9o3C&+Z+w}A2?5d;7@M{EZb7n z{Y!V57?XB|1J{E0Dal*1lQdS9Rbg`<7?kaH(WyQk%t(Vbtf?>gcG0`2U3$eA_BO;F zqN#*V?h$xvNK-T@H2((Mxwi#eT43%^OC6H(;vDK-N2|e#Xm$K80s^jPohX+|)$;?a z%l-dwBYpM=W!F^_f`URf4bw*pXJd#bV_$0YOKk1Ti>FXigTxGwHyjz#Dkx%4=QGVd z=r5|0ivN8-S7DTu+w2SuYpz0}o}HU^kelRYH$tS2`xp7eUnG0#pW$HAk#u6(GK{U@ ze$ryXX8TeaL;xV&@m9+>^tTiNaLJ3+OK+|g9`I(a0}X9u_`WUf$L9GxPQ3Pv@GsCx z6D_NWNwxgrl;;e!y-VxSFZX%s11d1xB8+C8qS*!W_*uy@x*Xhu9vtx~y6dax1^zu2 zRN^GttYvb-V%uZTThunZ(RX)`-{zcci@snF8C7}k3il85gw9)f_YH&GyhT3x5Fm4`Hfk{B280}q{Mj5U#s){fDRxSK#I-0zS zLW;v7P37!pgDS8VA*Ef86ZIRw6I>_yvwz_;xKs@_S;eSA5~&7Pxbt23cSOz&%byb9SdKMhbh}le} zKr_b#xc|Z2bB|@CE`dxI}!ED42XsGcGRNZJ~Z7@CsOiy$6yRC9&yj=7A$7k&gSi?~DF5m}J_+;GgLruxd2u zkc@KmTHkckL6|_$DBC#|h5{jjbIvceaB?U*jM&VXcEvc~Ph z!3(- z2T?jI3LBDMJDVn4Wh&QC9=)ulu4_yOzly5z0Z7txE5$pJoYr%H90S!Ygo}sBf8L09 z!kFKE@pC8aospf?EP6cnmygGx=+S&{doyLw!uwuUv3u?z7}i3l$_SVL>;}mT?awi`D}>q)`i>?LBI;r*P-DW4lm|ehFGHz57b-A`Ae|$tOS@13fL#oUSiy!4uL{q) z1NNGqlO^f@V4fayh&TIKwGGZYb{pd8*yC!?)WOn5akOsz_Fix9Y!)qyL;V?C={~zu zIW_eer8HN}sNKq<29PI}t{PAnA?C(KrL2sw#^8W*Y|jN6+DBm!#_3;jm}8~-zOxar z*Ls2a$iK93KN3x2xrq8|y!Ev#aKY$3f_4+q)Kd#kwS)0W*Nid9a56 zz8rt{fot!OWBxTRlrG)jw+*BbHSHB;At&VoDlA|bx;>_ymFiMngTTK|n@@h0o-t3% z3u_=)K*CDUWt-7>i(HSY1<^S@;Rnx2B$r`DX@QKcCnFOG)ANfG4-GHF?T`?HJoWUj z3Sof|f=_T#7811CzRP?M^tE>jnv{Bv)$}0`fCQX*jnN#jb8c7Iw)v#+TN@x6za{Db zta?Op=kuH$ZdUJO8$pCc*f~gOj2Gu=)=#B17i-N<3V72D> z#U{beY?8zqtrkRV2A-R^OIq%K;b3USpB6ggTdL5M;mFKYmwGGF3aE~7;FG<^46MK4Xk+$!{RJ>piw?c{h zU++E!L^6)jhcz#1lVTh$T6Qs0q6z|{VfGXgiS*qag4vlUfD$UHZbu_)D*QO z9meNTXHpAK$<|=OT?lB^pMm@ZJmTSmff2^Txdn9$jQ1Pls zY7~lD_p~ArZ-)f9Ntt56CD{!kHSMG7FAEg-4Md9E!;$3$ob~Uj!P6)nJpe!tsB+ zcpemyukYmk{yx#K>+)Wd#wc)j=;osY04h=jNu0tkL8rSjjU=_)RS29p< zIvl_MAGvE#JB{Yk>}%BUq1UA@%B@$;Cyb7W$O+2_Zq#KFq8t7eP>iN5tvpA@#DJFl za)8!?%S%0-4Sf%V63)oUBh{5rq?!}Hz^(xG|3g3+TBxeZsgO7? zQk(`+4}&O{7oa=lSw(CY_}f$GQ3CnI(1p!1!)VIy>9thaKXS@*mzvW0tXncVS!F+L zX)a-CL%w~?2j%2)E37A>bq27o$Bd!yq25RB*X)@@a=OA*?oc_$2n1AsjTfHZKDf8e?&9LOt1_h2S!>9& zP)cr`V5yo#M?2RvS$)k}+hV4Bu|b-!VB)p)SDF3Ndb3@_rtFN_<@hKWvOt^E2D9lI z3nEW4fYp0_W1%~peweV4f`+iTUOn)tJr6VM7iBv80{K~;%%p%9)W2B8Xn5ZlMX!aR zol?2=0f!S=$D;rCMpASS{#LfD7Bu2bwswDRlN4Y!G3g}NU(B@WB>MP|CUnsd>VK$l++F%(qd*_lYZG%UjJI)M zi8T~hoYk&Xr2MlFvokvGF9!_Q({Qhi?J%uUNPJl!~mDp4#Y_ z6wnUk5d)?SS_i8CjUTL#X7~9m05KYsAJ_-1k40QwzqOMO!4Bnj=r!DDfm6tDV1&|O zwLjPwL!=Z?gPo` zs_0*ClAHPIF_FQw7RS! zDe${(7IW8S|6y4YI5FtwxXHu==q)IgQ`*nr&YNV!0k$mgY7j`tKhTl1dFF~2$WsXh zYyx?*I=Ff~LxLrfxO39gkLd#T`KB<>z9iS*4*t%rL5yxu8YjKfrd0ntgWRNLQZG8mID3>AG&f#; znJXq<35T~9=V|Jcah@sNQ}DExhBfWwndH$({N<>n^l0_O6e?0qT{~F8km>XE3M6%W z;!BSZSB8@fZ#7IMB0|GkPPMr<$__OcfZV+Zfk$w>XLS>?QIqs!4^PMe3rRR7E=FWLCazTb zPlv!hxn&H)s`t@0cnb7WP2L}XZ?7L^L4#&JTT7DDz`T<@zNZ$th}O-lnhMMrHq<_l`MTEX@-Lyr+K*^tEKt1OyXkCBp>>21Cpxn_E0^2)ptx4-d zaSM&MdayFOAY1=8?=+DEaVH%H`xls4He)KG9}|FMt9*I_?IuvA1@ zS&nUv6zf!HnN7)ong3n$?GtI8EK0B}Aw?*)O`p6(v@v?xQs4s$C?FIta8UIQ|t zekU@ar#nERVZlgvD~ZU^`+Vr}++{nbuXR(hF`pzFD)b8X!pByx3yrm+LVoS~SEcx* zc|JcV!_8efwljQULmc5ni)i4;w;Y|3@YwE$C&;ttO|}T);k3{k~1fz-GV-r*#(qnVVD0tqDD@sgU;gm;hm1 zAyc$KpblX!Wm7dp6dN-Adgq0tWc;$~Q2=B)C~p{ro`<)7k#5WO47(Jb>(c`+fpaxc z_4n~J;{E2LzlA}nrlxCmkT4^x{D@&U$A%IT-aD6ZBC05s+aqlam()$*y}=gaKo5G? z(p%Gbnf6AzyPx-!9Bpv?wGe5LXeo;h=O`qWhz!1900000OK^yVUwgRXSvJYqu}+CK zNqWS0o2qG@k$D9%Rf3x$iSl@zZ~y=R000000000000XHt$u{|2_ccXL$2FcWJFknw z`R~$P4ewhLmhR?Gf*Zyitn_T&yh)dQk5ivN*)fT%^(?qVB1i;9TRn7J;Ei*m%(^Sy z693W)fW091XHzy>Nq#WT-*@Aj!_u3->(hqq5uPY5y3q^t-UTVw$<{-d23DR_piLOX z9rtYu)0!|Jb|ZX?A)-Cf2$nPu6~`KEDu;%?_}2RApK3US7R36UHk8XUQA_FD##i5E zY`Bu4P5h6|V%++^^^ND5AlA7mu6!KcTH_c_$id;V`Ylmzk~|BcW& zH{37IhLBtvJ{=cg!OF;8c?Pm0Udb`QDcwy1j^;4U{}#Xc7ESAs8#POe_N<2OE_Xp! zZY92|-x1<~5qX%o96ojH;hP4R7DjD(qRO406AbJ;p@cEt5S;J$WkqKg zsWh@4ZnKp=9I2C7y=b8L*nmn6w2gwqd>wB8lgdiGkacjy6QPKQZuhju!_5MpVW2_X(#KN_7&0g13Eu6E+1^v-6eWU~{$!GVFS%)RP{UQZ zIN;6xP3i`akIq4%KSFWOb3;Wx{ccFBLB)f|*dL=-bqoy=0eUznE~;em{hH-wcpR98T;8iEp{cY7GL-Bjln43um-(&oT>~SwP$ffVX{xjVQxT5!F=`QLOp#& z;*?`2wSjuG%+-!y=aFZdk>ZRs1#H>?tm@6HSVC7q#=`#ZgX!HjPAoIoPit9e-iDfq=^-_SD_GqujK{T*QKS3q6}S;@pi)Tn4UovX4e z)gb&Gr@Xros8Fp++B6Jz5xx?-3+w!Hq6e39QTO`8KrC5Ac}<6#E-6S|#(0*ebQSfP zY0i`12l`I~U9tC?;>wT&JfEGd7^X;2uS0pUX~#4cCL)sJeJ^G=<4Wu)?SFyR)65mZ z9$f2YLXs43>EcOiYb~5{U}Z0*UpU3I_{m5r{!hUTfv?txe6G^}S;_0u7K4J(bxS%U-*Nv^oDg1B0K0zgN?Y~zl}DU~ zMV50Qza_V8P*4~>TtuS^VZL~%)V~KX(Q8J* zjSzY4FrgReZq6VF_E%1 z!g1R%HUhen)<*=A8MKE<$JRy=BgC88WRrxg(qb0E8PlvLd$x~;Z@5ucC*>p5S3ey` zl=d|Y{yi5*CIjCFwqLMaHjUbjMHxYl`6@ZK>dOFDM3&p)>d?<_P@L|$X z7(NCe0JxpuI;yu-)`xuF%9jBimPl=Xec|{t16UnSWQ3G0NM-Ul*HWJWC8%2VXNaCQ z-(XDFRnaw@fzvX>w*cDVYCmBwQ|4~hRaSFJ^6C$#akpQY;nF>@lCWgOteojDCYAL{ zpzv#6jH)D3iQy#gt-t<2S%i7^BdAHGxx^l(ed%@#0dK>0Z; zsL>oip+)Lu&49>h!u|*K9}5z?k$jSCggL_r^nNR8pZCa>t1eT6SySPYF72(E(g?0} zn4-`6^NPrW5Zx4Q_PeZbmdHv>(=aZB$MDt#y8#Xvl``gE6rwB*S*6#R((h^UPR9MM zU)C0~unDP0J$W{hU%hPvoN!c~HHrf?Adf1QdxJ75&WKsGSzDgiEd*I)ZAh58tT(GI zW()|{+KeP;a3Fh)Nbl@zXND42T*@gzU*vAxLx)wrGxL>rUr32BR;q}D zFR_-BDuJDaI%-Q;;wov%J%(`!_{%2X6$3lj=yZ*+Aw&Im9X0>;51r$(ut6_s45_Ag zgk2Rc2QeK!J&uJ`WNOX)q6gbJs>@6Cx0gP0MmHw%deeG~hZ{_q z$B{%6Z$JXF{PurhFHwO07BJHOVemmk0bP3rX98OK^1BFUb^cq+YGUU+3!+hwISG(= z(np(MA@ker9`*6H{%>!SYLO7~x~fi-TKKTmDoF~$#xcX4J&DYO-wYfzQOeNYwAXJR zU#yK_C!tk+=eA+;vNuH@p5m#=>CZN96UvlqmnFoG8TX{&EjT;s?IF!CEvLmsNu4=` zhcs7$UG)${4i4zPh!!EZzxSA&Zs~!BaVzG!+y5eC1SKtvN+J5O=V@9P@-ZR+sAU7b zTg!OzniVd|uKN`5FG{tRyK}YfpPSa{7-8nu!Ju+KYCdHM^L@^5Jg7}YsRxA$j`?`m zC{ZD#of<9Yf4~$UA8vb}+<=WniiF}3R6oXri6~BSVTz#4s z8Mi1}6{6<#K2Dw?W~a>^QJxT#t8s_KtWc1pUHmW{0QgXTbVDv73E5dz?^ahD2}}dL zls(h%Q%Gq3daMB0*lccZeJVo*P6X}ezLcR(?}}+PF!g)~J}PcqXjA7HtSR5!Fl+3% z_<@`-PR#Hh_^&M+`3S*z700)>^lbcGMfjIbeC+W$lCw!+FK#+(mk@#UD0z6wZx=U{ z%ep6@g0ktSg~=k+k#yXp=S{ball3;{R57Ip_>6nZHzddOSci_EBc=^A3YtIa0ff7m zxJ07Q`zvY+(x^4VWTWf+twZ#a$U!!og-T}dvE_6=jB5!MEdB=<(`tj!JLK>q#!&{V z0YjN<275sim*e{gESzY{Kif$SQT^d#&^^y z-clOUfk_Z>r|};VGEEssdM@W+lz+*tJzbg5pW}+S%yFG2bY#|U)M85+Ed zd8$ZeBg_8+0eV1Y8~`IVB|-;!IMsPT{lot}I+4~esM|`3=DEF0q7O9p&Z4|5&kvBi zFu7=_Tj%0O`6?v7vlWD-fNU^p*4tWn`ocpM?y_jX5_{t?$sOY^N9$sh7V>4`m06k9 zh3^9!MYmIM&l=kq6U++=j{0*VWg8&-x~uscj);GgU?jHXa2LT}O7~UyR+M>uIkj)q`1Tks4;;r2rR{74IL zCAx^RQq+f)kUBj9KXaj+Svm^krZH`m$cY6zrgQicTb3;6F*a5K|%@Wv4Uydj1~Q<$)f$kI+P*hi$!yV11+Oo z0+APxiSs(gF-p+To(>gXl8*TWYVT&H(iCvKR5vrfe&voeX)=Z9-V8rR$1_}dF6X)o z5yq=goa4(Z7lQRQLbe`u12z0ze!i^HIE~Rsq4*`wIM`Ln!i&Z-k}RQ zwc81^&bjJs?RdZA)k0w}sqV?z!jd7cFtWy}N2Rkj##`TO1yar>A}V^1MQ%vA$&(_M z@Zmg2gODYfb?y`AdpzWH%PQQEKKDH5CZPEF2s7+n{g3-FqtdzR6t#jM6`({Ya86K8 zti?5C8^hu#*6442)6x!FX3tP!n#>Z^Z#PUnI6?mj)C_;PA-6h-YUY2fJBE3mkRw`l6-urt4ySie+R{1I0G@^G-tsJ$*b#sRq zc#Gr5k?m7qIlhZzj&17@tDO9ImKSH7Dm-oT4*|RD{4xQlSLn)<(un~H{7Bn6a4(V` zQCX$E>$Px!C5fql8C~H|2C^U%S_K>*2oP2)WnIEzD97xyv6}J=Wl5rhpLtsPWNi!s zMuez#7w~Zsu0a=5(lbRAmS$0cL02|?K<1)~Gn$zq_AYyu2NsF-67|yI7e1mH3;_eC zr32xuyy^hQkV=FsT!eyxcREaDK_&lQWgQnYoe&!V9b6fqPGnE|^*+&XmZax8#D1rp zH`!M~fd>C@p#Ukv5MAuH(52fbD{~FLHj7PkJfKg+oNnW$+ShZ$JZJ?`BM0);>+sb< zk2P3pP|NRV`Vz)REKn>ld>4KUVAek>FDLf%W6&dE7{(q~hN)#74%ZWOBV{!tTUeI_!;{45v^iRf>Dxv*Zn zQ9h#@^7f<{lJR%8dHaUu?1cg{t%-b?34B0>j7;^ve zGW;kt0+5%{h972FP`eY>*THfBJ;+_3Re9ufPGL|T??~LdS5cSzB>e7dnlNAubjcCW zlnwUjE^HSuc_-0P=?+^=KOLK*(4R$^UTx}4~#^}vd`N@lTOccvzBkzHUh}nvO zb|8-$DF6WD;Ga@83P22O0ftPT&d6WLT#5Xh%k%S$z??|h{kEOvmdEva`x4_Ma<*xCqEl=~!`&EaIl060!TMJoswW~=;6pj@>fI|M{CA`N0xHXpmDDEs zO3YjEis!=5o66ST%Ke#4@RBQT(rCk#S__8yH_2o;IVre8Uk|uKXt{&S z*k4nX5i7wjSjj)Pvb0A)eYU3fpd7}Z0y&gQTf6S;-dIM@Ee_8z=6d_29JBN%&v^UL z&p5nxd~upG{E{JZD(a!D%4@zyYM868Cra(kEjZQrQjj&N4t=C?NI7v?;)n_Djb4?z z7OxjjD^@IHJnY{D@7nMF_7W$R~cS!WuoY)(W7a$&}|yRr2ue!hhspFf)`> zSwAxI%Vit>;eL0CmKF8c%8+Lrwly5J>+?1p44$^>pD9c-+d^zughrU~t{ke-4KiZp z;AcO|HX`hX(8Scdtq>cE;JV~WITrp6;?-!FP!z>#NkbbfEHq;6wfAvLee-qJlEW=K z9bU1ZfXVP>8*^}1nqT+|l+V|UEA3K`BhsAi%j$8ou&q*TjKqlmIW*5%{Zvrtf3XE5 zT--(_%*Paj%kZs}>u37_k|!MOpbxoztTQB&vv z|AR-~1Qqsz4=!CXZDQL(T1u@@x;lQfWhJff&aIu-?EaQfZ3 zQ>^5t!-K5}+AJg4VgR_-uZIF}kXTKOi)L@ZX*CTAPc*Hopq_*_CpNi{@%dKsRjGSR zT5@f#`?LprdaBF;dqXy9d2GkwUoim-flL{RuSQJT5Y|bcby>X5j_e;&l(2(7_ePU0u1zXmXa2qm>fDDqUGzeZbe)evfoHKE&8BdEYToKT_ZvV znaA=C)WFJKGmA@)DOSL{1rSc4+;!KYKs7LxJ$G%j+DFkz9em?=^0S?ad)v@XjNkB2 zznyq?k0d7EwKG>YK%Fp=l16a|pj{Z!^Lg2lYdPPu(m&3}|M7A`Dh_w-Uu4X(>OYFm za6Ee0)pWTed_KvN2rmep9teoOP4V^GX0bV>$b9hTj)vXhN@MizY6LZ-nVRCALZl>z z>(GdL9JC{hBZN@~q`bP(oTsyJsYATrW2=!Xa$-=xeeM*gT51!2*x_ zUDgZ#5<`aSfG$*M8@1cgC`GqdY^NmUy(q+{IK_2w6M>TwSK!n9on07L3jx4i`TJ_T zw{(|VwVn9|usl2>4Wq73J7a~KKtwbt-(Zi2_9k#lL{j18t74gTiVl;LH6JqB*(Uf} z_>ce^^H3}qbN-ic(o=-0z`%BS^8~Fx`aiZ0Y-RJgQ3(4-myuZ1cHi4B9UrsF3bVxU+R+%XAW4@^f6eb z_rMeZoraB~ht4r0R)6Pk62)15<*7KFJ7+B?g~L%pQlqf} z%!==%ZhFTtJ89Vwh5)sKC_@2mplAewO>_HHUVMgg%c{eYx9XESraN+4@B?`5ys#Q z%~sp|#)jp4=V?sNESrjZ;p9oHXjXRD4J#T}f%2U%If|&pZw4a}J#7mFHJw(c$QH2A z_2c@FEB^jlfYt)^bQNfFaIV`AnNqx{HMRdeTXMcT=SN#D?S-t7U{<;_9`RgYl7jc< zdK|=_Y(U%M65aU4NmO!F00pC4$<~Z`5z5rnV5yl^=YRpA2A+6E>QUuuF-}08So)0L zlIDKB`9YileH&i1g8M6Y*_s4s_whUSpQ_dv;y*laqF_AhM4e^3=;K}dR*z2GZU}VC(y!?r%+s+m`&lWTgvTBFXe4j_@o)1@~ zku@4Rie!LB{bi@q1~Z%nj9ni2Qh)Tj8l?LBtP$p5D}o-U<#rLabCFVJPIFAKayEW3 zSZ>G6&arXVfMiUWY23{E*S+ftMi;j7nY&y}D z94}10zY>v%8trm7Q&>AQaJipmZR7AKmj!qL9!!7cZ;_M2@|FjtswS&Zq(or$emeLR z7f^g)&F@cGEV-9wp;=k;w|1k-FeQ&*3c6(LQgSnEtxn5D0#{5aZA^LIFqiQ<%@YL7 zfF#I(#bbj`y2nAx1Cm4ZNY;bNB@<#=Fg`?4%xZ+%{Wyo>dV2ddNA)wS@68w~n(xn{@TDB#4u+9{hJ?h^L&e+F$6UHBiWlAKGlac|ehHv}H~a zo*k&-VnWztUL3-)hZEFLXBHvnB30eYi{}Nq2~t!iJFS983G3 z3dFT17PrFZ*8n%~AL+vmFzl}MCqzD!_UH*?aex80qLf^k^sU2r&SpW;t0y(bG*_*t za}7^!1J0lQOkYP5od_(^0sn}!{<)A9Ocb0bf7nVvImG=FdU!7CuCN?DG3dA@w`-XJ z?g0`03IyMW6)$yT)t-BVLI&~T?6}6f977(!$Auu_|Ew3AE&RDSbEY<(N*J6r5NYTx zny;9fLS>&TCGu~}aNubn;nbK2z0}LveIJS-BJ_OFR&WoO%1GqgnE`L`|P}x0toI>h_)_GyURoiYDw$ z$3d7F{&Tz0xFq8#2YsODV_=-&j}!q5L`$VwWR-%<<2Va%jY%!e2kfyN+w-70K&b#;zI#>FLg5Zp9`{wK<-SR+ouS=n5c}%Z?+SJ|a%9|K9Ooi1tCNys{' + + ''.join([f'' for i in motion_list]) + + f'')) + return motion_string + + +def load_example_input(txt_path, task, model): + with open(txt_path, "r") as file: + Lines = file.readlines() + Lines = [line for line in Lines if line.strip()] + count = 0 + texts = [] + # Strips the newline character + motion_joints = [torch.zeros((1, 1, 22, 3))] * len(Lines) + motion_lengths = [0] * len(Lines) + motion_token_string = [''] + motion_head = [] + motion_heading = [] + motion_tailing = [] + motion_token = torch.zeros((1, 263)) + for i, line in enumerate(Lines): + count += 1 + if len(line.split('#')) == 1: + texts.append(line) + else: + feat_path = line.split('#')[1].replace('\n', '') + if os.path.exists(feat_path): + feats = torch.tensor(np.load(feat_path), device=model.device) + feats = model.datamodule.normalize(feats) + + motion_lengths[i] = feats.shape[0] + motion_token, _ = model.vae.encode(feats[None]) + + motion_token_string = motion_token_to_string( + motion_token, [motion_token.shape[1]])[0] + motion_token_length = motion_token.shape[1] + + motion_splited = motion_token_string.split('>') + + split = motion_token_length // 5 + 1 + split2 = motion_token_length // 4 + 1 + split3 = motion_token_length // 4 * 3 + 1 + + motion_head.append(motion_token[:, :motion_token.shape[1] // + 5][0]) + + motion_heading.append(feats[:feats.shape[0] // 4]) + + motion_tailing.append(feats[feats.shape[0] // 4 * 3:]) + + if '' in line: + motion_joints[i] = model.feats2joints( + feats)[:, :feats.shape[1] // 5] + else: + motion_joints[i] = model.feats2joints(feats) + + motion_split1 = '>'.join( + motion_splited[:split] + ) + f'>' + motion_split2 = f'' + '>'.join( + motion_splited[split:]) + + motion_masked = '>'.join( + motion_splited[:split2] + ) + '>' + f'' * ( + split3 - split2) + '>'.join(motion_splited[split3:]) + + texts.append( + line.split('#')[0].replace( + '', motion_token_string).replace( + '', motion_split1).replace( + '', motion_split2).replace( + '', motion_masked)) + + return_dict = { + 'text': texts, + 'motion_joints': motion_joints, + 'motion_lengths': motion_lengths, + 'motion_token': motion_token, + 'motion_token_string': motion_token_string, + } + if len(motion_head) > 0: + print(len(motion_head)) + return_dict['motion_head'] = motion_head + + if len(motion_heading) > 0: + print(len(motion_heading)) + return_dict['motion_heading'] = motion_heading + + if len(motion_tailing) > 0: + print(len(motion_tailing)) + return_dict['motion_tailing'] = motion_tailing + + return return_dict + + +def main(): + # parse options + cfg = parse_args(phase="demo") # parse config file + cfg.FOLDER = cfg.TEST.FOLDER + + # create logger + logger = create_logger(cfg, phase="test") + + task = cfg.DEMO.TASK + text = None + + output_dir = Path( + os.path.join(cfg.FOLDER, str(cfg.model.model_type), str(cfg.NAME), + "samples_" + cfg.TIME)) + output_dir.mkdir(parents=True, exist_ok=True) + + logger.info(OmegaConf.to_yaml(cfg)) + + # set seed + pl.seed_everything(cfg.SEED_VALUE) + + # gpu setting + if cfg.ACCELERATOR == "gpu": + os.environ["CUDA_VISIBLE_DEVICES"] = ",".join( + str(x) for x in cfg.DEVICE) + device = torch.device("cuda") + + # Dataset + datamodule = build_data(cfg) + logger.info("datasets module {} initialized".format("".join( + cfg.DATASET.target.split('.')[-2]))) + + # create model + total_time = time.time() + model = build_model(cfg, datamodule) + logger.info("model {} loaded".format(cfg.model.target)) + + # loading state dict + if cfg.TEST.CHECKPOINTS: + logger.info("Loading checkpoints from {}".format(cfg.TEST.CHECKPOINTS)) + state_dict = torch.load(cfg.TEST.CHECKPOINTS, + map_location="cpu")["state_dict"] + model.load_state_dict(state_dict) + else: + logger.warning( + "No checkpoints provided, using random initialized model") + + model.to(device) + + if cfg.DEMO.EXAMPLE: + # Check txt file input + # load txt + return_dict = load_example_input(cfg.DEMO.EXAMPLE, task, model) + text, in_joints = return_dict['text'], return_dict['motion_joints'] + + batch_size = 64 + if text: + for b in tqdm(range(len(text) // batch_size + 1)): + text_batch = text[b * batch_size:(b + 1) * batch_size] + in_joints_batch = in_joints[b * batch_size:(b + 1) * batch_size] + batch = { + "length": + return_dict["motion_lengths"][b * batch_size:(b + 1) * + batch_size], + "text": + text_batch + } + if 'motion_head' in return_dict: + batch["motion"] = return_dict['motion_head'][b * + batch_size:(b + + 1) * + batch_size] + if 'motion_heading' in return_dict: + batch["motion_heading"] = return_dict['motion_heading'][ + b * batch_size:(b + 1) * batch_size] + if 'motion_tailing' in return_dict: + batch["motion_tailing"] = return_dict['motion_tailing'][ + b * batch_size:(b + 1) * batch_size] + + outputs = model(batch, task=cfg.model.params.task) + logger.info('Model forward finished! Start saving results...') + joints = outputs["joints"] + lengths = outputs["length"] + output_texts = outputs["texts"] + + for i in range(len(joints)): + xyz = joints[i][:lengths[i]] + xyz = xyz[None] + + try: + xyz = xyz.detach().cpu().numpy() + xyz_in = in_joints_batch[i][None].detach().cpu().numpy() + except: + xyz = xyz.detach().numpy() + xyz_in = in_joints[i][None].detach().numpy() + + id = b * batch_size + i + + np.save(os.path.join(output_dir, f'{id}_out.npy'), xyz) + np.save(os.path.join(output_dir, f'{id}_in.npy'), xyz_in) + + with open(os.path.join(output_dir, f'{id}_in.txt'), 'w') as f: + f.write(text_batch[i]) + + with open(os.path.join(output_dir, f'{id}_out.txt'), 'w') as f: + f.write(output_texts[i]) + + # pose_vis = plot_3d.draw_to_batch(xyz_in, [''], [os.path.join(output_dir, f'{i}_in.gif')]) + # pose_vis = plot_3d.draw_to_batch(xyz, [''], [os.path.join(output_dir, f'{i}_out.gif')]) + + total_time = time.time() - total_time + logger.info( + f'Total time spent: {total_time:.2f} seconds (including model loading time and exporting time).' + ) + logger.info(f"Testing done, the npy are saved to {output_dir}") + + +if __name__ == "__main__": + main() diff --git a/demos/inbetween.txt b/demos/inbetween.txt new file mode 100644 index 0000000..d69e03a --- /dev/null +++ b/demos/inbetween.txt @@ -0,0 +1,50 @@ +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/012657.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M003137.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M011458.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/009410.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M001298.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M010526.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M001632.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M009521.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/000086.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M005152.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M008910.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M010563.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/000307.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/008588.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M011731.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/000921.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M004975.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/010698.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/P_M008159.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/005413.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/004867.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M002246.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M010043.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M014536.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/J_M002982.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M011785.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/002093.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M013476.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M000343.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M012561.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M000472.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M006819.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/000363.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M014253.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/013112.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M000379.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/008567.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/013514.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M003365.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/002550.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/011095.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/R_000889.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/006236.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/009031.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/001676.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M013314.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M012611.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/M005468.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/008357.npy +Complete the masked motion: #datasets/humanml3d/new_joint_vecs/V_M007878.npy diff --git a/demos/m2t.txt b/demos/m2t.txt new file mode 100644 index 0000000..749d22d --- /dev/null +++ b/demos/m2t.txt @@ -0,0 +1,50 @@ +Explain the motion illustrated in using language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M000307.npy +Describe the action being represented by using text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/009877.npy +What kind of action is being demonstrated in ? Explain it in text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/000067.npy +Describe the movement demonstrated in in words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M005468.npy +Generate a sentence that explains the action in .#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M012506.npy +Generate text. Input: . Output: #/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/012046.npy +Please describe the movement depicted in using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/002932.npy +Provide a description of the motion being displayed in using language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/014021.npy +Give me a brief summary of the movement depicted in .#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M008498.npy +Generate text for :#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M005787.npy +What is being displayed in ? Please explain it in text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/005596.npy +Describe the movement demonstrated in using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M011797.npy +What is being shown in ? Please explain it using words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M014046.npy +Describe the motion portrayed in in natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M000379.npy +Please provide a text-based explanation of the action being displayed in .#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/002323.npy +Explain the movement illustrated in using text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/001931.npy +Please explain the action depicted in using words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/010956.npy +Generate text. Input: . Output: #/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M004851.npy +What is being shown in ? Please describe it in text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M005818.npy +Please explain the movement being represented by using text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/006036.npy +What kind of action is demonstrated in ? Explain it in words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M000439.npy +Describe the motion displayed in in natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M000358.npy +What kind of action is being displayed in ? Explain it in plain English.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/004749.npy +Please describe the movement depicted in using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M002544.npy +What does the convey?#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/008900.npy +Explain the motion being shown in using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/004708.npy +Provide a text-based explanation of what is happening in .#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/006987.npy +What does the communicate? Please describe it in language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M003350.npy +Please provide a description of the motion in using words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/005638.npy +Describe the motion displayed in using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/000523.npy +Generate text for :#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M000086.npy +Describe the movement being shown in using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M011673.npy +What kind of movement is being shown in ? Explain it using language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M002246.npy +Can you explain what is happening in using plain English?#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M013164.npy +Describe the movement being demonstrated by using words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/003039.npy +What kind of motion is shown in ? Explain it in text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/003596.npy +What does the demonstrate?#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/001772.npy +What is happening in ? Please explain it using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M011785.npy +Can you explain what is happening in using natural language?#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/005052.npy +Describe the motion portrayed in using words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/004319.npy +Provide a text-based explanation of the action shown in .#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M006036.npy +What kind of action is shown in ? Explain it in text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/009031.npy +What kind of movement is being represented by ?#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/003812.npy +Describe the movement shown in using natural language.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M003149.npy +Describe the motion that is being represented by in words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/006979.npy +What is happening in ? Please describe it using text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/000076.npy +What kind of motion is illustrated in ? Please describe it in words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/000717.npy +What is being shown in ? Please describe it in text.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M004129.npy +Describe the motion being represented by in words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/M006871.npy +Explain the motion being demonstrated in in words.#/apdcephfs/share_1227775/billljiang/memData/datasets/humanml3d/new_joint_vecs/011504.npy diff --git a/demos/pred.txt b/demos/pred.txt new file mode 100644 index 0000000..294e458 --- /dev/null +++ b/demos/pred.txt @@ -0,0 +1,10 @@ +Predict motion: #datasets/humanml3d/new_joint_vecs/M009643.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/M008862.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/004163.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/005166.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/009181.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/000179.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/013811.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/000556.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/010967.npy +Predict motion: #datasets/humanml3d/new_joint_vecs/007567.npy diff --git a/demos/t2m.txt b/demos/t2m.txt new file mode 100644 index 0000000..1db7c54 --- /dev/null +++ b/demos/t2m.txt @@ -0,0 +1,50 @@ +Please create a motion that represents the power of the figure takes a few slighly hurried steps without raising their arms, it looks they are about to start running but haven't quite yet begun. to create a better world for all. +I need a motion that represents the power of a man steps forward, then picks something up with his right hand, then with his right hand, brings them close together, and sets them back down in the same order. to create progress. Can you generate it for me? +Describe the movements of a person walking down a flight of stairs in person walks up then takes a large step to their left and then goes back onto the same path they were on. +I need a motion that represents the power of a person lifts both hands above their head, moves both hands rapidly outwards and back to above their head, then lowers both arms. to create positive change in the fight against hunger and malnutrition. Can you generate it for me? +Generate a motion that embodies the idea of a person takes two steps forward, then walks sideways three steps, then walks forward diagonally and to the right three steps. +Give me a gesture that visualizes a person walks forward one foot in front of the other until he loses his balance and tilts far to the left then stumbles to his left. +Produce a movement that visualizes Input: a person's right hand moves towards the ground, the left hand, left leg, and right leg touch the ground in order. the person turns and starts the same movement beginning with the person's left hand. +I need a motion that represents the power of a person's squats down using mainly their right leg, their left leg crosses their right leg, and then they stand back up to create a better world for future generations. Can you generate it for me? +hands go to the chest moving back and forth, left hand place on the right upper arm as while as right hand is place on the left upper arm. describes the movements of someone practicing swing dance. +Create a sequence of movements that exemplifies person walks to the right then turns around and walks to the left before turning around and returning to starting position +Describe the movements of a person doing a plank in a person stands with arms out partway to each side, lifts his right leg, swings it forward briefly, then sweeps it in a semicircle behind him before moving it forward and standing again; he then repeats the action.. +I would like to see the motion of a person is attempting to jump rope by hopping from one leg to the other as if running in place, but has to reset every two to three jumps. +Demonstrate a dance that symbolizes the feeling of the figure walks from the bottom right to the top left of the square, and bends down as if picking something up twice. then it turns around. +I need a motion that represents the power of a man waves his hands from side to side then does a submersion move and passes his right hand from the top left to right lower. to create a better world for future generations. Can you generate it for me? +Give me a motion that reflects the idea of a person standing up throws something forward from above their head, then throws something again forward from above their head with more force which makes them take one step forward with their right foot. +Give me a motion that expresses a person walks to the right makes a u-turn clockwise and returns to the left of their initial position facing away +I need a motion that represents the transformation of a person makes a shaking motion in front of their face with their right hand and then makes the motion of picking up an object and taking a drink with their right hand.. Can you generate it for me? +Create a dance that symbolizes a person bends down to pick something up with their left arm, sets it down with their right arm, and then starts to pour something into it. +I want a motion that represents the power of a man gets up from the ground pushing off with his right hand then walks in a counter counterclockwise circle back to where he began then lays down flat on the ground on his back. to create positive change in the arts. Can you generate that? +Develop a movement that represents Input: the person is in a sitting position with his arms in front them when they raise their arms out to the side, lower them, and raise their arms again before bringing them back to their original position. +Please create a motion that represents the power of someone put their arms on their chest, put one hand on their hip and put the other end out like a teapot, and then moved head around in a circle. to inspire change. +Describe the movements of a person doing a handstand press in the person throws out their right arm in front of them then brings both hands to their mouth before lowering them together to the center of their body.. +Create a motion that depicts Input: a person rotates their head, then rotates the arms from the shoulders left, right, then left, and then rotates the arms from the elbows in each direction. +Develop a motion that symbolizes a person lifts up their arm at a 120 degree angle twice and then reverts their arm to the opposite lower part of their body. +Describe the movements of a person walking down a flight of stairs in a person who is standing with his arms by his sides does three straight jumping jacks and returns to standing with his arms by his sides.. +Please create a motion that represents the magic of the person steps a little wider than shoulder width apart first with their right foot, then with their left before squatting 4 times.. +Please create a motion that represents the uniqueness of a man is mixing something infront of his body and seems to pick it up with his right hand and then proceed to mix it back. then picks it up with his left hand while standing still. +Create a motion for the caption: a person takes a step forward, squats down, places their left hand on the ground in front of them, moves their hand slightly counter counterclockwise then stands up. +Create a choreography for the caption: a person makes an underhanded throw with his right arm, as if rolling a ball, before raising both arms into the air as if throwing a ball, then he takes two steps back and runs forwards quickly. +Generate a motion that conveys a person does a single knee down with left leg with right leg stepping forward while raising right arm up to head level and placing its forearm in front of face, and then resume the original position. +I need a human motion that conveys the feeling of a person is holding his arms straight out to the sides then lowers them, claps, and steps forward to sit in a chair. Can you generate it for me? +Please create a motion that represents the chaos of subject is sitting flat on the ground feat straight in front then the subject stands straight up then sits back down with feet straight out in front again. +Show me a sequence of movements that evokes with hips swaying like a woman walking, this person takes 4 steps up the stairs, turns to the right on steps 5 & 6, then walks down the stairs in 4 more steps, walking back to where they started. +Develop a motion that captures the idea of a person who is standing with his arms extended at shoulder height drops his hands to his knees and holds that position before raising his arms to his original position. +Generate a dance that interprets Input: a man walks forward and picks up an object with his right hand, then puts the object back down and steps backward. +Create a person squats down to the ground, picks up a box, then stands back up, and places the box on a higher surface. to describe a person playing volleyball. +Generate Motion: a person walks up to shake with their right hand, turns slightly right to shake again, and turns right again to shake for a final time. +Can you generate a motion that portrays a person steps forward with their left foot, then steps with their right foot, does a 180 degree turn to the right on their right foot, steps forward with their left foot and right again. in a realistic way? +Show me a motion that represents man stands holding both arms up at his sides at a right anglefor 7 seconds then brings both hands down together to his left side and squeezed an object. +Show me a gesture that conveys a person is standing and moves their arms up to their face to take a sip of something in their hand. +I want a motion that represents the power of the person is in a sitting position with his arms in front them when they raise their arms out to the side, lower them, and raise their arms again before bringing them back to their original position. to create positive change in the world. Can you generate that? +I want to see a motion that represents the pain of a person is appears to be holding a broom, left hand over right hand, and sweeps toward the left, and then starts rotating to the right while sweeping toward the right.. Can you generate that? +Generate a person steps forward and shakes both hands together in a begging manner, and then steps back. the person steps forward again and shakes both hands in a begging manner but more aggressively. for a person practicing bouldering. +I want a motion that represents the power of a person standing on one foot holds their left hand up while moving their right foot in a side to side motion. to create a better world. Can you generate that? +I want a motion that represents the power of starting from their left foot in the air, person stands ready with fists up then takes two swings with their right hand downward then two more high and to their right and finally two lefts downward mirroring the rights from before. to create positive change in the field of peacebuilding and conflict resolution. Can you generate that? +Create a sequence of movements that embodies the meaning of man reaches down to the left as to pick up item and then reaches to the right as if emptying item then replaces it to the left. +Create person turns around from front to rear and whilst holding his hand and arm stomach level then turns around again and goes to original position to describe a person performing a tap dance. +Show me a motion that exemplifies the sentiment of Input: a person is running on the spot, then turns left and jabs with both arms, then turns right and continues running on the spot. +Demonstrate a dance that conveys Input: a person who is standing with his arms by his sides jumps in place twice and then shifts his body right and left while remaining in place. +I need a motion that represents the power of a man is locking his hands behind his back and sweeping his legs left and right, in a dance like motion. to create progress. Can you generate it for me? diff --git a/fit.py b/fit.py new file mode 100644 index 0000000..1ea6510 --- /dev/null +++ b/fit.py @@ -0,0 +1,289 @@ +# borrow from optimization https://github.com/wangsen1312/joints2smpl + +from __future__ import division, print_function + +import argparse +import os +import random +import shutil +import sys +from os import listdir, walk +from os.path import isfile, join +from pathlib import Path + +import h5py +import joblib +import natsort +import numpy as np +import smplx +import torch +import trimesh + +from mGPT.data.transforms.joints2rots import config +from mGPT.data.transforms.joints2rots.smplify import SMPLify3D +from mGPT.utils.joints import mmm_to_smplh_scaling_factor +from mGPT.utils.temos_utils import subsample +from scripts.plys2npy import plys2npy + +sys.path.append(os.path.join(os.path.dirname(__file__), "src")) + +# parsing argmument +parser = argparse.ArgumentParser() +parser.add_argument("--batchSize", + type=int, + default=1, + help="input batch size") +parser.add_argument( + "--num_smplify_iters", + type=int, + default=100, + help="num of smplify iters" # 100 +) +parser.add_argument("--cuda", type=bool, default=True, help="enables cuda") +parser.add_argument("--gpu_ids", type=int, default=0, help="choose gpu ids") +parser.add_argument("--num_joints", type=int, default=22, help="joint number") +parser.add_argument("--joint_category", + type=str, + default="AMASS", + help="use correspondence") +parser.add_argument("--fix_foot", + type=str, + default="False", + help="fix foot or not") +parser.add_argument( + "--data_folder", + type=str, + default="", # ./demo/demo_data/ + help="data in the folder", +) +parser.add_argument( + "--save_folder", + type=str, + default=None, + # default="./../TMOSTData/demo/", + help="results save folder", +) +parser.add_argument("--dir", type=str, default=None, help="folder use") +parser.add_argument("--files", + type=str, + default="test_motion.npy", + help="files use") +opt = parser.parse_args() +print(opt) + +# ---load predefined something +device = torch.device("cuda:" + str(opt.gpu_ids) if opt.cuda else "cpu") +print(config.SMPL_MODEL_DIR) +# smplmodel = smplx.create(config.SMPL_MODEL_DIR, +# model_type="smplh", gender="neutral", ext="npz", +# batch_size=opt.batchSize).to(device) +smplmodel = smplx.create( + config.SMPL_MODEL_DIR, + model_type="smpl", + gender="neutral", + ext="pkl", + batch_size=opt.batchSize, +).to(device) + +# ## --- load the mean pose as original ---- +smpl_mean_file = config.SMPL_MEAN_FILE + +file = h5py.File(smpl_mean_file, "r") +init_mean_pose = (torch.from_numpy( + file["pose"][:]).unsqueeze(0).float().repeat(opt.batchSize, 1).to(device)) +init_mean_shape = (torch.from_numpy( + file["shape"][:]).unsqueeze(0).float().repeat(opt.batchSize, 1).to(device)) +cam_trans_zero = torch.Tensor([0.0, 0.0, 0.0]).unsqueeze(0).to(device) +# +pred_pose = torch.zeros(opt.batchSize, 72).to(device) +pred_betas = torch.zeros(opt.batchSize, 10).to(device) +pred_cam_t = torch.zeros(opt.batchSize, 3).to(device) +keypoints_3d = torch.zeros(opt.batchSize, opt.num_joints, 3).to(device) + +# # #-------------initialize SMPLify +smplify = SMPLify3D( + smplxmodel=smplmodel, + batch_size=opt.batchSize, + joints_category=opt.joint_category, + num_iters=opt.num_smplify_iters, + device=device, +) +print("initialize SMPLify3D done!") + +paths = [] +if opt.dir: + output_dir = Path(opt.dir) + # file_list = os.listdir(cfg.RENDER.DIR) + # random begin for parallel + file_list = natsort.natsorted(os.listdir(opt.dir)) + begin_id = random.randrange(0, len(file_list)) + file_list = file_list[begin_id:] + file_list[:begin_id] + for item in file_list: + if item.endswith(".npy"): + paths.append(os.path.join(opt.dir, item)) +elif opt.files: + paths.append(opt.files) + +print(f"begin to render {len(paths)} npy files!") + +# if opt.save_folder is None: +# save_folder = os.path.pardir(opt.dir) + "results_smplfitting" +# if not os.path.isdir(save_folder): +# os.makedirs(save_folder, exist_ok=True) + +if not os.path.isdir(opt.save_folder): + os.makedirs(opt.save_folder, exist_ok=True) + +for path in paths: + dir_save = os.path.join(opt.save_folder, "results_smplfitting", + "SMPLFit_" + os.path.basename(path)[:-4]) + + if os.path.exists(path[:-4] + "_mesh.npy"): + print(f"npy is fitted {path[:-4]}_mesh.npy") + # check_file = "" + # try: + # data = np.load(path) + # except: + continue + + # if os.path.exists(dir_save): + # print(f"npy is fitted or under fitting {dir_save}") + # continue + + data = np.load(path) + if len(data.shape) > 3: + data = data[0] + + # check input joint or meshes + if data.shape[1] > 1000: + print("npy is a mesh now {dir_save}") + continue + + print(f"begin rendering {dir_save}") + + if not os.path.isdir(dir_save): + os.makedirs(dir_save, exist_ok=True) + + if opt.num_joints == 22: + # humanml3d amass + frames = subsample(len(data), last_framerate=12.5, new_framerate=12.5) + data = data[frames, ...] + elif opt.num_joints == 21: + # kit + # purename = os.path.splitext(opt.files)[0] + # data = np.load(opt.data_folder + "/" + purename + ".npy") + # downsampling to + frames = subsample(len(data), last_framerate=100, new_framerate=12.5) + data = data[frames, ...] + # Convert mmm joints for visualization + # into smpl-h "scale" and axis + # data = data.copy()[..., [2, 0, 1]] * mmm_to_smplh_scaling_factor + data = data.copy() * mmm_to_smplh_scaling_factor + + # run the whole seqs + num_seqs = data.shape[0] + + pred_pose_prev = torch.zeros(opt.batchSize, 72).to(device) + pred_betas_prev = torch.zeros(opt.batchSize, 10).to(device) + pred_cam_t_prev = torch.zeros(opt.batchSize, 3).to(device) + keypoints_3d_prev = torch.zeros(opt.batchSize, opt.num_joints, + 3).to(device) + + for idx in range(num_seqs): + print(f"computing frame {idx}") + + ply_path = dir_save + "/" + "motion_%04d" % idx + ".ply" + if os.path.exists(ply_path[:-4] + ".pkl"): + print(f"this frame is fitted {ply_path}") + continue + + joints3d = data[idx] # *1.2 #scale problem [check first] + keypoints_3d[0, :, :] = torch.Tensor(joints3d).to(device).float() + + if idx == 0: + pred_betas[0, :] = init_mean_shape + pred_pose[0, :] = init_mean_pose + pred_cam_t[0, :] = cam_trans_zero + else: + # ToDo-use previous results rather than loading + data_param = joblib.load(dir_save + "/" + "motion_%04d" % + (idx - 1) + ".pkl") + pred_betas[0, :] = torch.from_numpy( + data_param["beta"]).unsqueeze(0).float() + pred_pose[0, :] = torch.from_numpy( + data_param["pose"]).unsqueeze(0).float() + pred_cam_t[0, :] = torch.from_numpy( + data_param["cam"]).unsqueeze(0).float() + + if opt.joint_category == "AMASS": + confidence_input = torch.ones(opt.num_joints) + # make sure the foot and ankle + if opt.fix_foot == True: + confidence_input[7] = 1.5 + confidence_input[8] = 1.5 + confidence_input[10] = 1.5 + confidence_input[11] = 1.5 + elif opt.joint_category == "MMM": + confidence_input = torch.ones(opt.num_joints) + else: + print("Such category not settle down!") + + # ----- from initial to fitting ------- + ( + new_opt_vertices, + new_opt_joints, + new_opt_pose, + new_opt_betas, + new_opt_cam_t, + new_opt_joint_loss, + ) = smplify( + pred_pose.detach(), + pred_betas.detach(), + pred_cam_t.detach(), + keypoints_3d, + conf_3d=confidence_input.to(device), + # seq_ind=idx, + ) + + # # -- save the results to ply--- + outputp = smplmodel( + betas=new_opt_betas, + global_orient=new_opt_pose[:, :3], + body_pose=new_opt_pose[:, 3:], + transl=new_opt_cam_t, + return_verts=True, + ) + + # gt debuggin + if False: + mesh_p = trimesh.Trimesh( + vertices=keypoints_3d.detach().cpu().numpy().squeeze(), + process=False) + mesh_p.export(dir_save + "/" + "%04d" % idx + "_gt.ply") + + mesh_p = trimesh.Trimesh( + vertices=outputp.vertices.detach().cpu().numpy().squeeze(), + faces=smplmodel.faces, + process=False, + ) + mesh_p.export(ply_path) + print("Output: " + ply_path) + + # save the pkl + param = {} + param["beta"] = new_opt_betas.detach().cpu().numpy() + param["pose"] = new_opt_pose.detach().cpu().numpy() + param["cam"] = new_opt_cam_t.detach().cpu().numpy() + joblib.dump(param, + dir_save + "/" + "motion_%04d" % idx + ".pkl", + compress=3) + print("Output: " + dir_save + "/" + "motion_%04d" % idx + ".pkl") + + print("merge ply to npy for mesh rendering") + plys2npy(dir_save, os.path.dirname(path)) + +# # rendering +# if True: +# from tmost.utils.demo_utils import render_batch +# # render_batch(opt.dir, mode="sequence") # sequence +# render_batch(opt.dir, mode="video") diff --git a/mGPT/__init__.py b/mGPT/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mGPT/archs/__init__.py b/mGPT/archs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mGPT/archs/mgpt_lm.py b/mGPT/archs/mgpt_lm.py new file mode 100644 index 0000000..c30307a --- /dev/null +++ b/mGPT/archs/mgpt_lm.py @@ -0,0 +1,592 @@ +import os +from typing import List, Union +import numpy as np +import math +import time +import heapq +import torch +from torch import Tensor, nn +from torch.distributions.distribution import Distribution +from transformers import AutoModelForSeq2SeqLM, T5ForConditionalGeneration, T5Tokenizer, AutoTokenizer, GPT2LMHeadModel, GPT2Tokenizer +import random +from typing import Optional +from .tools.token_emb import NewTokenEmb + + +class MLM(nn.Module): + + def __init__( + self, + model_path: str, + model_type: str = "t5", + stage: str = "lm_pretrain", + new_token_type: str = "insert", + motion_codebook_size: int = 512, + framerate: float = 20.0, + down_t: int = 4, + predict_ratio: float = 0.2, + inbetween_ratio: float = 0.25, + max_length: int = 256, + lora: bool = False, + quota_ratio: float = 0.5, + noise_density: float = 0.15, + mean_noise_span_length: int = 3, + **kwargs, + ) -> None: + + super().__init__() + + # Parameters + self.m_codebook_size = motion_codebook_size + self.max_length = max_length + self.framerate = framerate + self.down_t = down_t + self.predict_ratio = predict_ratio + self.inbetween_ratio = inbetween_ratio + self.noise_density = noise_density + self.mean_noise_span_length = mean_noise_span_length + self.quota_ratio = quota_ratio + self.stage = stage + + # Instantiate language model + self.tokenizer = AutoTokenizer.from_pretrained(model_path, legacy=True) + if model_type == "t5": + self.language_model = T5ForConditionalGeneration.from_pretrained( + model_path) + self.lm_type = 'encdec' + elif model_type == "gpt2": + self.language_model = GPT2LMHeadModel.from_pretrained(model_path) + self.lm_type = 'dec' + else: + raise ValueError("type must be either seq2seq or conditional") + + if self.lm_type == 'dec': + self.tokenizer.pad_token = self.tokenizer.eos_token + + # Add motion tokens + self.tokenizer.add_tokens( + [f'' for i in range(self.m_codebook_size + 3)]) + + if new_token_type == "insert": + self.language_model.resize_token_embeddings(len(self.tokenizer)) + elif new_token_type == "mlp": + shared = NewTokenEmb(self.language_model.shared, + self.m_codebook_size + 3) + # lm_head = NewTokenEmb(self.language_model.lm_head, + # self.m_codebook_size + 3) + self.language_model.resize_token_embeddings(len(self.tokenizer)) + self.language_model.shared = shared + # self.language_model.lm_head = lm_head + + # Lora + if lora: + from peft import LoraConfig, TaskType, get_peft_model, get_peft_model_state_dict + from peft.utils.other import fsdp_auto_wrap_policy + peft_config = LoraConfig( + bias="none", + task_type="CAUSAL_LM", + # inference_mode=False, + r=8, + lora_alpha=16, + lora_dropout=0.05) + self.language_model = get_peft_model(self.language_model, + peft_config) + + def forward(self, texts: List[str], motion_tokens: Tensor, + lengths: List[int], tasks: dict): + if self.lm_type == 'encdec': + return self.forward_encdec(texts, motion_tokens, lengths, tasks) + elif self.lm_type == 'dec': + return self.forward_dec(texts, motion_tokens, lengths, tasks) + else: + raise NotImplementedError("Only conditional_multitask supported") + + def forward_encdec( + self, + texts: List[str], + motion_tokens: Tensor, + lengths: List[int], + tasks: dict, + ): + + # Tensor to string + motion_strings = self.motion_token_to_string(motion_tokens, lengths) + + # Supervised or unsupervised + # condition = random.choice( + # ['text', 'motion', 'supervised', 'supervised', 'supervised']) + condition = random.choice(['supervised', 'supervised', 'supervised']) + + if condition == 'text': + inputs = texts + outputs = texts + elif condition == 'motion': + inputs = motion_strings + outputs = motion_strings + else: + inputs, outputs = self.template_fulfill(tasks, lengths, + motion_strings, texts) + + # Tokenize + source_encoding = self.tokenizer(inputs, + padding='max_length', + max_length=self.max_length, + truncation=True, + return_attention_mask=True, + add_special_tokens=True, + return_tensors="pt") + + source_attention_mask = source_encoding.attention_mask.to( + motion_tokens.device) + source_input_ids = source_encoding.input_ids.to(motion_tokens.device) + + if condition in ['text', 'motion']: + batch_size, expandend_input_length = source_input_ids.shape + mask_indices = np.asarray([ + self.random_spans_noise_mask(expandend_input_length) + for i in range(batch_size) + ]) + target_mask = ~mask_indices + input_ids_sentinel = self.create_sentinel_ids( + mask_indices.astype(np.int8)) + target_sentinel = self.create_sentinel_ids( + target_mask.astype(np.int8)) + + labels_input_ids = self.filter_input_ids(source_input_ids, + target_sentinel) + source_input_ids = self.filter_input_ids(source_input_ids, + input_ids_sentinel) + + else: + target_inputs = self.tokenizer(outputs, + padding='max_length', + max_length=self.max_length, + truncation=True, + return_attention_mask=True, + add_special_tokens=True, + return_tensors="pt") + + labels_input_ids = target_inputs.input_ids.to(motion_tokens.device) + lables_attention_mask = target_inputs.attention_mask.to( + motion_tokens.device) + + labels_input_ids[labels_input_ids == 0] = -100 + outputs = self.language_model( + input_ids=source_input_ids, + attention_mask=source_attention_mask + if condition == 'supervised' else None, + labels=labels_input_ids, + decoder_attention_mask=lables_attention_mask + if condition == 'supervised' else None, + ) + + return outputs + + def forward_dec( + self, + texts: List[str], + motion_tokens: Tensor, + lengths: List[int], + tasks: dict, + ): + self.tokenizer.padding_side = "right" + + # Tensor to string + motion_strings = self.motion_token_to_string(motion_tokens, lengths) + + # Supervised or unsupervised + condition = random.choice( + ['text', 'motion', 'supervised', 'supervised', 'supervised']) + + if condition == 'text': + labels = texts + elif condition == 'motion': + labels = motion_strings + else: + inputs, outputs = self.template_fulfill(tasks, lengths, + motion_strings, texts) + labels = [] + for i in range(len(inputs)): + labels.append(inputs[i] + ' \n ' + outputs[i] + + self.tokenizer.eos_token) + + # Tokenize + inputs = self.tokenizer(labels, + padding='max_length', + max_length=self.max_length, + truncation=True, + return_attention_mask=True, + return_tensors="pt") + + labels_input_ids = inputs.input_ids.to(motion_tokens.device) + lables_attention_mask = inputs.attention_mask.to(motion_tokens.device) + + # print(labels_input_ids[0:5]) + + outputs = self.language_model(input_ids=labels_input_ids, + attention_mask=lables_attention_mask, + labels=inputs["input_ids"]) + + return outputs + + def generate_direct(self, + texts: List[str], + max_length: int = 256, + num_beams: int = 1, + do_sample: bool = True, + bad_words_ids: List[int] = None): + + # Device + self.device = self.language_model.device + + # Tokenize + if self.lm_type == 'dec': + texts = [text + " \n " for text in texts] + + source_encoding = self.tokenizer(texts, + padding='max_length', + max_length=self.max_length, + truncation=True, + return_attention_mask=True, + add_special_tokens=True, + return_tensors="pt") + + source_input_ids = source_encoding.input_ids.to(self.device) + source_attention_mask = source_encoding.attention_mask.to(self.device) + + if self.lm_type == 'encdec': + outputs = self.language_model.generate( + source_input_ids, + max_length=max_length, + num_beams=num_beams, + do_sample=do_sample, + bad_words_ids=bad_words_ids, + ) + elif self.lm_type == 'dec': + outputs = self.language_model.generate( + input_ids=source_input_ids, + attention_mask=source_attention_mask, + pad_token_id=self.tokenizer.pad_token_id, + do_sample=do_sample, + max_new_tokens=max_length) + self.tokenizer.padding_side = 'left' + + outputs_string = self.tokenizer.batch_decode(outputs, + skip_special_tokens=True) + + print(texts[:2]) + print(outputs_string[:2]) + + outputs_tokens, cleaned_text = self.motion_string_to_token( + outputs_string) + + return outputs_tokens, cleaned_text + + def generate_conditional(self, + texts: Optional[List[str]] = None, + motion_tokens: Optional[Tensor] = None, + lengths: Optional[List[int]] = None, + task: str = "t2m", + with_len: bool = False, + stage: str = 'train', + tasks: dict = None): + + self.device = self.language_model.device + + if task in ["t2m", "m2m", "pred", "inbetween"]: + + if task == "t2m": + assert texts is not None + motion_strings = [''] * len(texts) + if not with_len: + if tasks is None: + tasks = [{ + 'input': + ['Generate motion: '], + 'output': [''] + }] * len(texts) + + lengths = [0] * len(texts) + else: + tasks = [{ + 'input': [ + 'Generate motion with frames: ' + ], + 'output': [''] + }] * len(texts) + + elif task == "pred": + assert motion_tokens is not None and lengths is not None + texts = [''] * len(lengths) + tasks = [{ + 'input': ['Predict motion: '], + 'output': [''] + }] * len(lengths) + + motion_strings_old = self.motion_token_to_string( + motion_tokens, lengths) + motion_strings = [] + for i, length in enumerate(lengths): + split = length // 5 + motion_strings.append( + '>'.join(motion_strings_old[i].split('>')[:split]) + + '>') + + elif task == "inbetween": + assert motion_tokens is not None and lengths is not None + texts = [''] * len(lengths) + tasks = [{ + 'input': [ + "Complete the masked motion: " + ], + 'output': [''] + }] * len(lengths) + motion_strings = self.motion_token_to_string( + motion_tokens, lengths) + + inputs, outputs = self.template_fulfill(tasks, lengths, + motion_strings, texts, + stage) + + outputs_tokens, cleaned_text = self.generate_direct(inputs, + max_length=128, + num_beams=1, + do_sample=True) + + return outputs_tokens + + elif task == "m2t": + assert motion_tokens is not None and lengths is not None + + motion_strings = self.motion_token_to_string( + motion_tokens, lengths) + + if not with_len: + tasks = [{ + 'input': ['Generate text: '], + 'output': [''] + }] * len(lengths) + else: + tasks = [{ + 'input': [ + 'Generate text with frames: ' + ], + 'output': [''] + }] * len(lengths) + + texts = [''] * len(lengths) + + inputs, outputs = self.template_fulfill(tasks, lengths, + motion_strings, texts) + outputs_tokens, cleaned_text = self.generate_direct( + inputs, + max_length=40, + num_beams=1, + do_sample=False, + # bad_words_ids=self.bad_words_ids + ) + return cleaned_text + + def motion_token_to_string(self, motion_token: Tensor, lengths: List[int]): + motion_string = [] + for i in range(len(motion_token)): + motion_i = motion_token[i].cpu( + ) if motion_token[i].device.type == 'cuda' else motion_token[i] + motion_list = motion_i.tolist()[:lengths[i]] + motion_string.append( + (f'' + + ''.join([f'' for i in motion_list]) + + f'')) + return motion_string + + def motion_token_list_to_string(self, motion_token: Tensor): + motion_string = [] + for i in range(len(motion_token)): + motion_i = motion_token[i].cpu( + ) if motion_token[i].device.type == 'cuda' else motion_token[i] + motion_list = motion_i.tolist() + motion_string.append( + (f'' + + ''.join([f'' for i in motion_list]) + + f'')) + return motion_string + + def motion_string_to_token(self, motion_string: List[str]): + motion_tokens = [] + output_string = [] + for i in range(len(motion_string)): + string = self.get_middle_str( + motion_string[i], f'', + f'') + string_list = string.split('><') + token_list = [ + int(i.split('_')[-1].replace('>', '')) + for i in string_list[1:-1] + ] + if len(token_list) == 0: + token_list = [0] + token_list_padded = torch.tensor(token_list, + dtype=int).to(self.device) + motion_tokens.append(token_list_padded) + output_string.append(motion_string[i].replace( + string, '')) + + return motion_tokens, output_string + + def placeholder_fulfill(self, prompt: str, length: int, motion_string: str, + text: str): + + seconds = math.floor(length / self.framerate) + motion_splited = motion_string.split('>') + token_length = length / self.down_t + predict_head = int(token_length * self.predict_ratio + 1) + masked_head = int(token_length * self.inbetween_ratio + 1) + masked_tail = int(token_length * (1 - self.inbetween_ratio) + 1) + + motion_predict_head = '>'.join( + motion_splited[:predict_head] + ) + f'>' + motion_predict_last = f'' + '>'.join( + motion_splited[predict_head:]) + + motion_masked = '>'.join( + motion_splited[:masked_head] + ) + '>' + f'' * ( + masked_tail - masked_head) + '>'.join(motion_splited[masked_tail:]) + + if random.random() < self.quota_ratio: + text = f'\"{text}\"' + + prompt = prompt.replace('', text).replace( + '', + motion_string).replace('', f'{length}').replace( + '', '%.1f' % seconds).replace( + '', motion_predict_head).replace( + '', + motion_predict_last).replace( + '', motion_masked) + + return prompt + + def template_fulfill(self, + tasks, + lengths, + motion_strings, + texts, + stage='test'): + inputs = [] + outputs = [] + for i in range(len(lengths)): + input_template = random.choice(tasks[i]['input']) + output_template = random.choice(tasks[i]['output']) + length = lengths[i] + inputs.append( + self.placeholder_fulfill(input_template, length, + motion_strings[i], texts[i])) + outputs.append( + self.placeholder_fulfill(output_template, length, + motion_strings[i], texts[i])) + + return inputs, outputs + + def get_middle_str(self, content, startStr, endStr): + try: + startIndex = content.index(startStr) + if startIndex >= 0: + startIndex += len(startStr) + endIndex = content.index(endStr) + except: + return f'' + + return f'' + content[ + startIndex:endIndex] + f'' + + def random_spans_noise_mask(self, length): + # From https://github.com/google-research/text-to-text-transfer-transformer/blob/84f8bcc14b5f2c03de51bd3587609ba8f6bbd1cd/t5/data/preprocessors.py + + orig_length = length + + num_noise_tokens = int(np.round(length * self.noise_density)) + # avoid degeneracy by ensuring positive numbers of noise and nonnoise tokens. + num_noise_tokens = min(max(num_noise_tokens, 1), length - 1) + num_noise_spans = int( + np.round(num_noise_tokens / self.mean_noise_span_length)) + + # avoid degeneracy by ensuring positive number of noise spans + num_noise_spans = max(num_noise_spans, 1) + num_nonnoise_tokens = length - num_noise_tokens + + # pick the lengths of the noise spans and the non-noise spans + def _random_segmentation(num_items, num_segments): + """Partition a sequence of items randomly into non-empty segments. + Args: + num_items: an integer scalar > 0 + num_segments: an integer scalar in [1, num_items] + Returns: + a Tensor with shape [num_segments] containing positive integers that add + up to num_items + """ + mask_indices = np.arange(num_items - 1) < (num_segments - 1) + np.random.shuffle(mask_indices) + first_in_segment = np.pad(mask_indices, [[1, 0]]) + segment_id = np.cumsum(first_in_segment) + # count length of sub segments assuming that list is sorted + _, segment_length = np.unique(segment_id, return_counts=True) + return segment_length + + noise_span_lengths = _random_segmentation(num_noise_tokens, + num_noise_spans) + nonnoise_span_lengths = _random_segmentation(num_nonnoise_tokens, + num_noise_spans) + + interleaved_span_lengths = np.reshape( + np.stack([nonnoise_span_lengths, noise_span_lengths], axis=1), + [num_noise_spans * 2], + ) + span_starts = np.cumsum(interleaved_span_lengths)[:-1] + span_start_indicator = np.zeros((length, ), dtype=np.int8) + span_start_indicator[span_starts] = True + span_num = np.cumsum(span_start_indicator) + is_noise = np.equal(span_num % 2, 1) + + return is_noise[:orig_length] + + def create_sentinel_ids(self, mask_indices): + # From https://github.com/huggingface/transformers/blob/main/examples/flax/language-modeling/run_t5_mlm_flax.py + start_indices = mask_indices - np.roll(mask_indices, 1, + axis=-1) * mask_indices + start_indices[:, 0] = mask_indices[:, 0] + + sentinel_ids = np.where(start_indices != 0, + np.cumsum(start_indices, axis=-1), + start_indices) + sentinel_ids = np.where(sentinel_ids != 0, + (len(self.tokenizer) - sentinel_ids), 0) + sentinel_ids -= mask_indices - start_indices + + return sentinel_ids + + def filter_input_ids(self, input_ids, sentinel_ids): + # From https://github.com/huggingface/transformers/blob/main/examples/flax/language-modeling/run_t5_mlm_flax.py + batch_size = input_ids.shape[0] + + input_ids_full = np.where(sentinel_ids != 0, sentinel_ids, + input_ids.to('cpu')) + + # input_ids tokens and sentinel tokens are >= 0, tokens < 0 are + # masked tokens coming after sentinel tokens and should be removed + input_ids = input_ids_full[input_ids_full >= 0].reshape( + (batch_size, -1)) + input_ids = np.concatenate( + [ + input_ids, + np.full((batch_size, 1), + self.tokenizer.eos_token_id, + dtype=np.int32), + ], + axis=-1, + ) + + input_ids = torch.tensor(input_ids, device=self.device) + + return input_ids diff --git a/mGPT/archs/mgpt_vq.py b/mGPT/archs/mgpt_vq.py new file mode 100644 index 0000000..077dc48 --- /dev/null +++ b/mGPT/archs/mgpt_vq.py @@ -0,0 +1,190 @@ +# Partially from https://github.com/Mael-zys/T2M-GPT + +from typing import List, Optional, Union +import torch +import torch.nn as nn +from torch import Tensor, nn +from torch.distributions.distribution import Distribution +from .tools.resnet import Resnet1D +from .tools.quantize_cnn import QuantizeEMAReset, Quantizer, QuantizeEMA, QuantizeReset +from collections import OrderedDict + + +class VQVae(nn.Module): + + def __init__(self, + nfeats: int, + quantizer: str = "ema_reset", + code_num=512, + code_dim=512, + output_emb_width=512, + down_t=3, + stride_t=2, + width=512, + depth=3, + dilation_growth_rate=3, + norm=None, + activation: str = "relu", + **kwargs) -> None: + + super().__init__() + + self.code_dim = code_dim + + self.encoder = Encoder(nfeats, + output_emb_width, + down_t, + stride_t, + width, + depth, + dilation_growth_rate, + activation=activation, + norm=norm) + + self.decoder = Decoder(nfeats, + output_emb_width, + down_t, + stride_t, + width, + depth, + dilation_growth_rate, + activation=activation, + norm=norm) + + if quantizer == "ema_reset": + self.quantizer = QuantizeEMAReset(code_num, code_dim, mu=0.99) + elif quantizer == "orig": + self.quantizer = Quantizer(code_num, code_dim, beta=1.0) + elif quantizer == "ema": + self.quantizer = QuantizeEMA(code_num, code_dim, mu=0.99) + elif quantizer == "reset": + self.quantizer = QuantizeReset(code_num, code_dim) + + def preprocess(self, x): + # (bs, T, Jx3) -> (bs, Jx3, T) + x = x.permute(0, 2, 1) + return x + + def postprocess(self, x): + # (bs, Jx3, T) -> (bs, T, Jx3) + x = x.permute(0, 2, 1) + return x + + def forward(self, features: Tensor): + # Preprocess + x_in = self.preprocess(features) + + # Encode + x_encoder = self.encoder(x_in) + + # quantization + x_quantized, loss, perplexity = self.quantizer(x_encoder) + + # decoder + x_decoder = self.decoder(x_quantized) + x_out = self.postprocess(x_decoder) + + return x_out, loss, perplexity + + def encode( + self, + features: Tensor, + ) -> Union[Tensor, Distribution]: + + N, T, _ = features.shape + x_in = self.preprocess(features) + x_encoder = self.encoder(x_in) + x_encoder = self.postprocess(x_encoder) + x_encoder = x_encoder.contiguous().view(-1, + x_encoder.shape[-1]) # (NT, C) + code_idx = self.quantizer.quantize(x_encoder) + code_idx = code_idx.view(N, -1) + + # latent, dist + return code_idx, None + + def decode(self, z: Tensor): + + x_d = self.quantizer.dequantize(z) + x_d = x_d.view(1, -1, self.code_dim).permute(0, 2, 1).contiguous() + + # decoder + x_decoder = self.decoder(x_d) + x_out = self.postprocess(x_decoder) + return x_out + + +class Encoder(nn.Module): + + def __init__(self, + input_emb_width=3, + output_emb_width=512, + down_t=3, + stride_t=2, + width=512, + depth=3, + dilation_growth_rate=3, + activation='relu', + norm=None): + super().__init__() + + blocks = [] + filter_t, pad_t = stride_t * 2, stride_t // 2 + blocks.append(nn.Conv1d(input_emb_width, width, 3, 1, 1)) + blocks.append(nn.ReLU()) + + for i in range(down_t): + input_dim = width + block = nn.Sequential( + nn.Conv1d(input_dim, width, filter_t, stride_t, pad_t), + Resnet1D(width, + depth, + dilation_growth_rate, + activation=activation, + norm=norm), + ) + blocks.append(block) + blocks.append(nn.Conv1d(width, output_emb_width, 3, 1, 1)) + self.model = nn.Sequential(*blocks) + + def forward(self, x): + return self.model(x) + + +class Decoder(nn.Module): + + def __init__(self, + input_emb_width=3, + output_emb_width=512, + down_t=3, + stride_t=2, + width=512, + depth=3, + dilation_growth_rate=3, + activation='relu', + norm=None): + super().__init__() + blocks = [] + + filter_t, pad_t = stride_t * 2, stride_t // 2 + blocks.append(nn.Conv1d(output_emb_width, width, 3, 1, 1)) + blocks.append(nn.ReLU()) + for i in range(down_t): + out_dim = width + block = nn.Sequential( + Resnet1D(width, + depth, + dilation_growth_rate, + reverse_dilation=True, + activation=activation, + norm=norm), nn.Upsample(scale_factor=2, + mode='nearest'), + nn.Conv1d(width, out_dim, 3, 1, 1)) + blocks.append(block) + blocks.append(nn.Conv1d(width, width, 3, 1, 1)) + blocks.append(nn.ReLU()) + blocks.append(nn.Conv1d(width, input_emb_width, 3, 1, 1)) + self.model = nn.Sequential(*blocks) + + def forward(self, x): + return self.model(x) diff --git a/mGPT/archs/tm2t_evaluator.py b/mGPT/archs/tm2t_evaluator.py new file mode 100644 index 0000000..4654440 --- /dev/null +++ b/mGPT/archs/tm2t_evaluator.py @@ -0,0 +1,111 @@ +import torch +import torch.nn as nn +from torch.nn.utils.rnn import pack_padded_sequence + + +class MovementConvEncoder(nn.Module): + def __init__(self, input_size, hidden_size, output_size): + super(MovementConvEncoder, self).__init__() + self.main = nn.Sequential( + nn.Conv1d(input_size, hidden_size, 4, 2, 1), + nn.Dropout(0.2, inplace=True), + nn.LeakyReLU(0.2, inplace=True), + nn.Conv1d(hidden_size, output_size, 4, 2, 1), + nn.Dropout(0.2, inplace=True), + nn.LeakyReLU(0.2, inplace=True), + ) + self.out_net = nn.Linear(output_size, output_size) + # self.main.apply(init_weight) + # self.out_net.apply(init_weight) + + def forward(self, inputs): + inputs = inputs.permute(0, 2, 1) + outputs = self.main(inputs).permute(0, 2, 1) + # print(outputs.shape) + return self.out_net(outputs) + + +class MotionEncoderBiGRUCo(nn.Module): + def __init__(self, input_size, hidden_size, output_size): + super(MotionEncoderBiGRUCo, self).__init__() + + self.input_emb = nn.Linear(input_size, hidden_size) + self.gru = nn.GRU( + hidden_size, hidden_size, batch_first=True, bidirectional=True + ) + self.output_net = nn.Sequential( + nn.Linear(hidden_size * 2, hidden_size), + nn.LayerNorm(hidden_size), + nn.LeakyReLU(0.2, inplace=True), + nn.Linear(hidden_size, output_size), + ) + + # self.input_emb.apply(init_weight) + # self.output_net.apply(init_weight) + self.hidden_size = hidden_size + self.hidden = nn.Parameter( + torch.randn((2, 1, self.hidden_size), requires_grad=True) + ) + + # input(batch_size, seq_len, dim) + def forward(self, inputs, m_lens): + num_samples = inputs.shape[0] + + input_embs = self.input_emb(inputs) + hidden = self.hidden.repeat(1, num_samples, 1) + + cap_lens = m_lens.data.tolist() + + # emb = pack_padded_sequence(input=input_embs, lengths=cap_lens, batch_first=True) + emb = input_embs + + gru_seq, gru_last = self.gru(emb, hidden) + + gru_last = torch.cat([gru_last[0], gru_last[1]], dim=-1) + + return self.output_net(gru_last) + + +class TextEncoderBiGRUCo(nn.Module): + def __init__(self, word_size, pos_size, hidden_size, output_size): + super(TextEncoderBiGRUCo, self).__init__() + + self.pos_emb = nn.Linear(pos_size, word_size) + self.input_emb = nn.Linear(word_size, hidden_size) + self.gru = nn.GRU( + hidden_size, hidden_size, batch_first=True, bidirectional=True + ) + self.output_net = nn.Sequential( + nn.Linear(hidden_size * 2, hidden_size), + nn.LayerNorm(hidden_size), + nn.LeakyReLU(0.2, inplace=True), + nn.Linear(hidden_size, output_size), + ) + + # self.input_emb.apply(init_weight) + # self.pos_emb.apply(init_weight) + # self.output_net.apply(init_weight) + # self.linear2.apply(init_weight) + # self.batch_size = batch_size + self.hidden_size = hidden_size + self.hidden = nn.Parameter( + torch.randn((2, 1, self.hidden_size), requires_grad=True) + ) + + # input(batch_size, seq_len, dim) + def forward(self, word_embs, pos_onehot, cap_lens): + num_samples = word_embs.shape[0] + + pos_embs = self.pos_emb(pos_onehot) + inputs = word_embs + pos_embs + input_embs = self.input_emb(inputs) + hidden = self.hidden.repeat(1, num_samples, 1) + + cap_lens = cap_lens.data.tolist() + emb = pack_padded_sequence(input=input_embs, lengths=cap_lens, batch_first=True) + + gru_seq, gru_last = self.gru(emb, hidden) + + gru_last = torch.cat([gru_last[0], gru_last[1]], dim=-1) + + return self.output_net(gru_last) diff --git a/mGPT/archs/tools/embeddings.py b/mGPT/archs/tools/embeddings.py new file mode 100644 index 0000000..b53470c --- /dev/null +++ b/mGPT/archs/tools/embeddings.py @@ -0,0 +1,322 @@ +# This file is taken from signjoey repository +import math + +import torch +from torch import Tensor, nn + + +def get_activation(activation_type): + if activation_type == "relu": + return nn.ReLU() + elif activation_type == "relu6": + return nn.ReLU6() + elif activation_type == "prelu": + return nn.PReLU() + elif activation_type == "selu": + return nn.SELU() + elif activation_type == "celu": + return nn.CELU() + elif activation_type == "gelu": + return nn.GELU() + elif activation_type == "sigmoid": + return nn.Sigmoid() + elif activation_type == "softplus": + return nn.Softplus() + elif activation_type == "softshrink": + return nn.Softshrink() + elif activation_type == "softsign": + return nn.Softsign() + elif activation_type == "tanh": + return nn.Tanh() + elif activation_type == "tanhshrink": + return nn.Tanhshrink() + else: + raise ValueError("Unknown activation type {}".format(activation_type)) + + +class MaskedNorm(nn.Module): + """ + Original Code from: + https://discuss.pytorch.org/t/batchnorm-for-different-sized-samples-in-batch/44251/8 + """ + + def __init__(self, norm_type, num_groups, num_features): + super().__init__() + self.norm_type = norm_type + if self.norm_type == "batch": + self.norm = nn.BatchNorm1d(num_features=num_features) + elif self.norm_type == "group": + self.norm = nn.GroupNorm(num_groups=num_groups, num_channels=num_features) + elif self.norm_type == "layer": + self.norm = nn.LayerNorm(normalized_shape=num_features) + else: + raise ValueError("Unsupported Normalization Layer") + + self.num_features = num_features + + def forward(self, x: Tensor, mask: Tensor): + if self.training: + reshaped = x.reshape([-1, self.num_features]) + reshaped_mask = mask.reshape([-1, 1]) > 0 + selected = torch.masked_select(reshaped, reshaped_mask).reshape( + [-1, self.num_features] + ) + batch_normed = self.norm(selected) + scattered = reshaped.masked_scatter(reshaped_mask, batch_normed) + return scattered.reshape([x.shape[0], -1, self.num_features]) + else: + reshaped = x.reshape([-1, self.num_features]) + batched_normed = self.norm(reshaped) + return batched_normed.reshape([x.shape[0], -1, self.num_features]) + + +# TODO (Cihan): Spatial and Word Embeddings are pretty much the same +# We might as well convert them into a single module class. +# Only difference is the lut vs linear layers. +class Embeddings(nn.Module): + + """ + Simple embeddings class + """ + + # pylint: disable=unused-argument + def __init__( + self, + embedding_dim: int = 64, + num_heads: int = 8, + scale: bool = False, + scale_factor: float = None, + norm_type: str = None, + activation_type: str = None, + vocab_size: int = 0, + padding_idx: int = 1, + freeze: bool = False, + **kwargs + ): + """ + Create new embeddings for the vocabulary. + Use scaling for the Transformer. + + :param embedding_dim: + :param scale: + :param vocab_size: + :param padding_idx: + :param freeze: freeze the embeddings during training + """ + super().__init__() + + self.embedding_dim = embedding_dim + self.vocab_size = vocab_size + self.lut = nn.Embedding(vocab_size, self.embedding_dim, padding_idx=padding_idx) + + self.norm_type = norm_type + if self.norm_type: + self.norm = MaskedNorm( + norm_type=norm_type, num_groups=num_heads, num_features=embedding_dim + ) + + self.activation_type = activation_type + if self.activation_type: + self.activation = get_activation(activation_type) + + self.scale = scale + if self.scale: + if scale_factor: + self.scale_factor = scale_factor + else: + self.scale_factor = math.sqrt(self.embedding_dim) + + if freeze: + freeze_params(self) + + # pylint: disable=arguments-differ + def forward(self, x: Tensor, mask: Tensor = None) -> Tensor: + """ + Perform lookup for input `x` in the embedding table. + + :param mask: token masks + :param x: index in the vocabulary + :return: embedded representation for `x` + """ + + x = self.lut(x) + + if self.norm_type: + x = self.norm(x, mask) + + if self.activation_type: + x = self.activation(x) + + if self.scale: + return x * self.scale_factor + else: + return x + + def __repr__(self): + return "%s(embedding_dim=%d, vocab_size=%d)" % ( + self.__class__.__name__, + self.embedding_dim, + self.vocab_size, + ) + + +class SpatialEmbeddings(nn.Module): + + """ + Simple Linear Projection Layer + (For encoder outputs to predict glosses) + """ + + # pylint: disable=unused-argument + def __init__( + self, + embedding_dim: int, + input_size: int, + num_heads: int, + freeze: bool = False, + norm_type: str = "batch", + activation_type: str = "softsign", + scale: bool = False, + scale_factor: float = None, + **kwargs + ): + """ + Create new embeddings for the vocabulary. + Use scaling for the Transformer. + + :param embedding_dim: + :param input_size: + :param freeze: freeze the embeddings during training + """ + super().__init__() + + self.embedding_dim = embedding_dim + self.input_size = input_size + self.ln = nn.Linear(self.input_size, self.embedding_dim) + + self.norm_type = norm_type + if self.norm_type: + self.norm = MaskedNorm( + norm_type=norm_type, num_groups=num_heads, num_features=embedding_dim + ) + + self.activation_type = activation_type + if self.activation_type: + self.activation = get_activation(activation_type) + + self.scale = scale + if self.scale: + if scale_factor: + self.scale_factor = scale_factor + else: + self.scale_factor = math.sqrt(self.embedding_dim) + + if freeze: + freeze_params(self) + + # pylint: disable=arguments-differ + def forward(self, x: Tensor, mask: Tensor) -> Tensor: + """ + :param mask: frame masks + :param x: input frame features + :return: embedded representation for `x` + """ + + x = self.ln(x) + + if self.norm_type: + x = self.norm(x, mask) + + if self.activation_type: + x = self.activation(x) + + if self.scale: + return x * self.scale_factor + else: + return x + + def __repr__(self): + return "%s(embedding_dim=%d, input_size=%d)" % ( + self.__class__.__name__, + self.embedding_dim, + self.input_size, + ) + +def get_timestep_embedding( + timesteps: torch.Tensor, + embedding_dim: int, + flip_sin_to_cos: bool = False, + downscale_freq_shift: float = 1, + scale: float = 1, + max_period: int = 10000, +): + """ + This matches the implementation in Denoising Diffusion Probabilistic Models: Create sinusoidal timestep embeddings. + + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param embedding_dim: the dimension of the output. :param max_period: controls the minimum frequency of the + embeddings. :return: an [N x dim] Tensor of positional embeddings. + """ + assert len(timesteps.shape) == 1, "Timesteps should be a 1d-array" + + half_dim = embedding_dim // 2 + exponent = -math.log(max_period) * torch.arange( + start=0, end=half_dim, dtype=torch.float32, device=timesteps.device + ) + exponent = exponent / (half_dim - downscale_freq_shift) + + emb = torch.exp(exponent) + emb = timesteps[:, None].float() * emb[None, :] + + # scale embeddings + emb = scale * emb + + # concat sine and cosine embeddings + emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=-1) + + # flip sine and cosine embeddings + if flip_sin_to_cos: + emb = torch.cat([emb[:, half_dim:], emb[:, :half_dim]], dim=-1) + + # zero pad + if embedding_dim % 2 == 1: + emb = torch.nn.functional.pad(emb, (0, 1, 0, 0)) + return emb + + +class TimestepEmbedding(nn.Module): + def __init__(self, channel: int, time_embed_dim: int, act_fn: str = "silu"): + super().__init__() + + self.linear_1 = nn.Linear(channel, time_embed_dim) + self.act = None + if act_fn == "silu": + self.act = nn.SiLU() + self.linear_2 = nn.Linear(time_embed_dim, time_embed_dim) + + def forward(self, sample): + sample = self.linear_1(sample) + + if self.act is not None: + sample = self.act(sample) + + sample = self.linear_2(sample) + return sample + + +class Timesteps(nn.Module): + def __init__(self, num_channels: int, flip_sin_to_cos: bool, downscale_freq_shift: float): + super().__init__() + self.num_channels = num_channels + self.flip_sin_to_cos = flip_sin_to_cos + self.downscale_freq_shift = downscale_freq_shift + + def forward(self, timesteps): + t_emb = get_timestep_embedding( + timesteps, + self.num_channels, + flip_sin_to_cos=self.flip_sin_to_cos, + downscale_freq_shift=self.downscale_freq_shift, + ) + return t_emb diff --git a/mGPT/archs/tools/quantize_cnn.py b/mGPT/archs/tools/quantize_cnn.py new file mode 100644 index 0000000..98ca858 --- /dev/null +++ b/mGPT/archs/tools/quantize_cnn.py @@ -0,0 +1,414 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F + +class QuantizeEMAReset(nn.Module): + def __init__(self, nb_code, code_dim, mu): + super().__init__() + self.nb_code = nb_code + self.code_dim = code_dim + self.mu = mu + self.reset_codebook() + + def reset_codebook(self): + self.init = False + self.code_sum = None + self.code_count = None + device = "cuda" if torch.cuda.is_available() else "cpu" + self.register_buffer('codebook', torch.zeros(self.nb_code, self.code_dim).to(device)) + + def _tile(self, x): + nb_code_x, code_dim = x.shape + if nb_code_x < self.nb_code: + n_repeats = (self.nb_code + nb_code_x - 1) // nb_code_x + std = 0.01 / np.sqrt(code_dim) + out = x.repeat(n_repeats, 1) + out = out + torch.randn_like(out) * std + else : + out = x + return out + + def init_codebook(self, x): + out = self._tile(x) + self.codebook = out[:self.nb_code] + self.code_sum = self.codebook.clone() + self.code_count = torch.ones(self.nb_code, device=self.codebook.device) + self.init = True + + @torch.no_grad() + def compute_perplexity(self, code_idx) : + # Calculate new centres + code_onehot = torch.zeros(self.nb_code, code_idx.shape[0], device=code_idx.device) # nb_code, N * L + code_onehot.scatter_(0, code_idx.view(1, code_idx.shape[0]), 1) + + code_count = code_onehot.sum(dim=-1) # nb_code + prob = code_count / torch.sum(code_count) + perplexity = torch.exp(-torch.sum(prob * torch.log(prob + 1e-7))) + return perplexity + + @torch.no_grad() + def update_codebook(self, x, code_idx): + + code_onehot = torch.zeros(self.nb_code, x.shape[0], device=x.device) # nb_code, N * L + code_onehot.scatter_(0, code_idx.view(1, x.shape[0]), 1) + + code_sum = torch.matmul(code_onehot, x) # nb_code, w + code_count = code_onehot.sum(dim=-1) # nb_code + + out = self._tile(x) + code_rand = out[:self.nb_code] + + # Update centres + self.code_sum = self.mu * self.code_sum + (1. - self.mu) * code_sum # w, nb_code + self.code_count = self.mu * self.code_count + (1. - self.mu) * code_count # nb_code + + usage = (self.code_count.view(self.nb_code, 1) >= 1.0).float() + code_update = self.code_sum.view(self.nb_code, self.code_dim) / self.code_count.view(self.nb_code, 1) + + self.codebook = usage * code_update + (1 - usage) * code_rand + prob = code_count / torch.sum(code_count) + perplexity = torch.exp(-torch.sum(prob * torch.log(prob + 1e-7))) + + + return perplexity + + def preprocess(self, x): + # NCT -> NTC -> [NT, C] + x = x.permute(0, 2, 1).contiguous() + x = x.view(-1, x.shape[-1]) + return x + + def quantize(self, x): + # Calculate latent code x_l + k_w = self.codebook.t() + distance = torch.sum(x ** 2, dim=-1, keepdim=True) - 2 * torch.matmul(x, k_w) + torch.sum(k_w ** 2, dim=0, + keepdim=True) # (N * L, b) + _, code_idx = torch.min(distance, dim=-1) + return code_idx + + def dequantize(self, code_idx): + x = F.embedding(code_idx, self.codebook) + return x + + + def forward(self, x): + N, width, T = x.shape + + # Preprocess + x = self.preprocess(x) + + # Init codebook if not inited + if self.training and not self.init: + self.init_codebook(x) + + # quantize and dequantize through bottleneck + code_idx = self.quantize(x) + x_d = self.dequantize(code_idx) + + # Update embeddings + if self.training: + perplexity = self.update_codebook(x, code_idx) + else : + perplexity = self.compute_perplexity(code_idx) + + # Loss + commit_loss = F.mse_loss(x, x_d.detach()) + + # Passthrough + x_d = x + (x_d - x).detach() + + # Postprocess + x_d = x_d.view(N, T, -1).permute(0, 2, 1).contiguous() #(N, DIM, T) + + return x_d, commit_loss, perplexity + + + +class Quantizer(nn.Module): + def __init__(self, n_e, e_dim, beta): + super(Quantizer, self).__init__() + + self.e_dim = e_dim + self.n_e = n_e + self.beta = beta + + self.embedding = nn.Embedding(self.n_e, self.e_dim) + self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e) + + def forward(self, z): + + N, width, T = z.shape + z = self.preprocess(z) + assert z.shape[-1] == self.e_dim + z_flattened = z.contiguous().view(-1, self.e_dim) + + # B x V + d = torch.sum(z_flattened ** 2, dim=1, keepdim=True) + \ + torch.sum(self.embedding.weight**2, dim=1) - 2 * \ + torch.matmul(z_flattened, self.embedding.weight.t()) + # B x 1 + min_encoding_indices = torch.argmin(d, dim=1) + z_q = self.embedding(min_encoding_indices).view(z.shape) + + # compute loss for embedding + loss = torch.mean((z_q - z.detach())**2) + self.beta * \ + torch.mean((z_q.detach() - z)**2) + + # preserve gradients + z_q = z + (z_q - z).detach() + z_q = z_q.view(N, T, -1).permute(0, 2, 1).contiguous() #(N, DIM, T) + + min_encodings = F.one_hot(min_encoding_indices, self.n_e).type(z.dtype) + e_mean = torch.mean(min_encodings, dim=0) + perplexity = torch.exp(-torch.sum(e_mean*torch.log(e_mean + 1e-10))) + return z_q, loss, perplexity + + def quantize(self, z): + + assert z.shape[-1] == self.e_dim + + # B x V + d = torch.sum(z ** 2, dim=1, keepdim=True) + \ + torch.sum(self.embedding.weight ** 2, dim=1) - 2 * \ + torch.matmul(z, self.embedding.weight.t()) + # B x 1 + min_encoding_indices = torch.argmin(d, dim=1) + return min_encoding_indices + + def dequantize(self, indices): + + index_flattened = indices.view(-1) + z_q = self.embedding(index_flattened) + z_q = z_q.view(indices.shape + (self.e_dim, )).contiguous() + return z_q + + def preprocess(self, x): + # NCT -> NTC -> [NT, C] + x = x.permute(0, 2, 1).contiguous() + x = x.view(-1, x.shape[-1]) + return x + + + +class QuantizeReset(nn.Module): + def __init__(self, nb_code, code_dim): + super().__init__() + self.nb_code = nb_code + self.code_dim = code_dim + self.reset_codebook() + self.codebook = nn.Parameter(torch.randn(nb_code, code_dim)) + + def reset_codebook(self): + self.init = False + self.code_count = None + + def _tile(self, x): + nb_code_x, code_dim = x.shape + if nb_code_x < self.nb_code: + n_repeats = (self.nb_code + nb_code_x - 1) // nb_code_x + std = 0.01 / np.sqrt(code_dim) + out = x.repeat(n_repeats, 1) + out = out + torch.randn_like(out) * std + else : + out = x + return out + + def init_codebook(self, x): + out = self._tile(x) + self.codebook = nn.Parameter(out[:self.nb_code]) + self.code_count = torch.ones(self.nb_code, device=self.codebook.device) + self.init = True + + @torch.no_grad() + def compute_perplexity(self, code_idx) : + # Calculate new centres + code_onehot = torch.zeros(self.nb_code, code_idx.shape[0], device=code_idx.device) # nb_code, N * L + code_onehot.scatter_(0, code_idx.view(1, code_idx.shape[0]), 1) + + code_count = code_onehot.sum(dim=-1) # nb_code + prob = code_count / torch.sum(code_count) + perplexity = torch.exp(-torch.sum(prob * torch.log(prob + 1e-7))) + return perplexity + + def update_codebook(self, x, code_idx): + + code_onehot = torch.zeros(self.nb_code, x.shape[0], device=x.device) # nb_code, N * L + code_onehot.scatter_(0, code_idx.view(1, x.shape[0]), 1) + + code_count = code_onehot.sum(dim=-1) # nb_code + + out = self._tile(x) + code_rand = out[:self.nb_code] + + # Update centres + self.code_count = code_count # nb_code + usage = (self.code_count.view(self.nb_code, 1) >= 1.0).float() + + self.codebook.data = usage * self.codebook.data + (1 - usage) * code_rand + prob = code_count / torch.sum(code_count) + perplexity = torch.exp(-torch.sum(prob * torch.log(prob + 1e-7))) + + + return perplexity + + def preprocess(self, x): + # NCT -> NTC -> [NT, C] + x = x.permute(0, 2, 1).contiguous() + x = x.view(-1, x.shape[-1]) + return x + + def quantize(self, x): + # Calculate latent code x_l + k_w = self.codebook.t() + distance = torch.sum(x ** 2, dim=-1, keepdim=True) - 2 * torch.matmul(x, k_w) + torch.sum(k_w ** 2, dim=0, + keepdim=True) # (N * L, b) + _, code_idx = torch.min(distance, dim=-1) + return code_idx + + def dequantize(self, code_idx): + x = F.embedding(code_idx, self.codebook) + return x + + + def forward(self, x): + N, width, T = x.shape + # Preprocess + x = self.preprocess(x) + # Init codebook if not inited + if self.training and not self.init: + self.init_codebook(x) + # quantize and dequantize through bottleneck + code_idx = self.quantize(x) + x_d = self.dequantize(code_idx) + # Update embeddings + if self.training: + perplexity = self.update_codebook(x, code_idx) + else : + perplexity = self.compute_perplexity(code_idx) + + # Loss + commit_loss = F.mse_loss(x, x_d.detach()) + + # Passthrough + x_d = x + (x_d - x).detach() + + # Postprocess + x_d = x_d.view(N, T, -1).permute(0, 2, 1).contiguous() #(N, DIM, T) + + return x_d, commit_loss, perplexity + + +class QuantizeEMA(nn.Module): + def __init__(self, nb_code, code_dim, mu): + super().__init__() + self.nb_code = nb_code + self.code_dim = code_dim + self.mu = mu + self.reset_codebook() + + def reset_codebook(self): + self.init = False + self.code_sum = None + self.code_count = None + self.register_buffer('codebook', torch.zeros(self.nb_code, self.code_dim).cuda()) + + def _tile(self, x): + nb_code_x, code_dim = x.shape + if nb_code_x < self.nb_code: + n_repeats = (self.nb_code + nb_code_x - 1) // nb_code_x + std = 0.01 / np.sqrt(code_dim) + out = x.repeat(n_repeats, 1) + out = out + torch.randn_like(out) * std + else : + out = x + return out + + def init_codebook(self, x): + out = self._tile(x) + self.codebook = out[:self.nb_code] + self.code_sum = self.codebook.clone() + self.code_count = torch.ones(self.nb_code, device=self.codebook.device) + self.init = True + + @torch.no_grad() + def compute_perplexity(self, code_idx) : + # Calculate new centres + code_onehot = torch.zeros(self.nb_code, code_idx.shape[0], device=code_idx.device) # nb_code, N * L + code_onehot.scatter_(0, code_idx.view(1, code_idx.shape[0]), 1) + + code_count = code_onehot.sum(dim=-1) # nb_code + prob = code_count / torch.sum(code_count) + perplexity = torch.exp(-torch.sum(prob * torch.log(prob + 1e-7))) + return perplexity + + @torch.no_grad() + def update_codebook(self, x, code_idx): + + code_onehot = torch.zeros(self.nb_code, x.shape[0], device=x.device) # nb_code, N * L + code_onehot.scatter_(0, code_idx.view(1, x.shape[0]), 1) + + code_sum = torch.matmul(code_onehot, x) # nb_code, w + code_count = code_onehot.sum(dim=-1) # nb_code + + # Update centres + self.code_sum = self.mu * self.code_sum + (1. - self.mu) * code_sum # w, nb_code + self.code_count = self.mu * self.code_count + (1. - self.mu) * code_count # nb_code + + code_update = self.code_sum.view(self.nb_code, self.code_dim) / self.code_count.view(self.nb_code, 1) + + self.codebook = code_update + prob = code_count / torch.sum(code_count) + perplexity = torch.exp(-torch.sum(prob * torch.log(prob + 1e-7))) + + return perplexity + + def preprocess(self, x): + # NCT -> NTC -> [NT, C] + x = x.permute(0, 2, 1).contiguous() + x = x.view(-1, x.shape[-1]) + return x + + def quantize(self, x): + # Calculate latent code x_l + k_w = self.codebook.t() + distance = torch.sum(x ** 2, dim=-1, keepdim=True) - 2 * torch.matmul(x, k_w) + torch.sum(k_w ** 2, dim=0, + keepdim=True) # (N * L, b) + _, code_idx = torch.min(distance, dim=-1) + return code_idx + + def dequantize(self, code_idx): + x = F.embedding(code_idx, self.codebook) + return x + + + def forward(self, x): + N, width, T = x.shape + + # Preprocess + x = self.preprocess(x) + + # Init codebook if not inited + if self.training and not self.init: + self.init_codebook(x) + + # quantize and dequantize through bottleneck + code_idx = self.quantize(x) + x_d = self.dequantize(code_idx) + + # Update embeddings + if self.training: + perplexity = self.update_codebook(x, code_idx) + else : + perplexity = self.compute_perplexity(code_idx) + + # Loss + commit_loss = F.mse_loss(x, x_d.detach()) + + # Passthrough + x_d = x + (x_d - x).detach() + + # Postprocess + x_d = x_d.view(N, T, -1).permute(0, 2, 1).contiguous() #(N, DIM, T) + + return x_d, commit_loss, perplexity diff --git a/mGPT/archs/tools/resnet.py b/mGPT/archs/tools/resnet.py new file mode 100644 index 0000000..062346e --- /dev/null +++ b/mGPT/archs/tools/resnet.py @@ -0,0 +1,82 @@ +import torch.nn as nn +import torch + +class nonlinearity(nn.Module): + def __init__(self): + super().__init__() + + def forward(self, x): + # swish + return x * torch.sigmoid(x) + +class ResConv1DBlock(nn.Module): + def __init__(self, n_in, n_state, dilation=1, activation='silu', norm=None, dropout=None): + super().__init__() + padding = dilation + self.norm = norm + if norm == "LN": + self.norm1 = nn.LayerNorm(n_in) + self.norm2 = nn.LayerNorm(n_in) + elif norm == "GN": + self.norm1 = nn.GroupNorm(num_groups=32, num_channels=n_in, eps=1e-6, affine=True) + self.norm2 = nn.GroupNorm(num_groups=32, num_channels=n_in, eps=1e-6, affine=True) + elif norm == "BN": + self.norm1 = nn.BatchNorm1d(num_features=n_in, eps=1e-6, affine=True) + self.norm2 = nn.BatchNorm1d(num_features=n_in, eps=1e-6, affine=True) + + else: + self.norm1 = nn.Identity() + self.norm2 = nn.Identity() + + if activation == "relu": + self.activation1 = nn.ReLU() + self.activation2 = nn.ReLU() + + elif activation == "silu": + self.activation1 = nonlinearity() + self.activation2 = nonlinearity() + + elif activation == "gelu": + self.activation1 = nn.GELU() + self.activation2 = nn.GELU() + + + + self.conv1 = nn.Conv1d(n_in, n_state, 3, 1, padding, dilation) + self.conv2 = nn.Conv1d(n_state, n_in, 1, 1, 0,) + + + def forward(self, x): + x_orig = x + if self.norm == "LN": + x = self.norm1(x.transpose(-2, -1)) + x = self.activation1(x.transpose(-2, -1)) + else: + x = self.norm1(x) + x = self.activation1(x) + + x = self.conv1(x) + + if self.norm == "LN": + x = self.norm2(x.transpose(-2, -1)) + x = self.activation2(x.transpose(-2, -1)) + else: + x = self.norm2(x) + x = self.activation2(x) + + x = self.conv2(x) + x = x + x_orig + return x + +class Resnet1D(nn.Module): + def __init__(self, n_in, n_depth, dilation_growth_rate=1, reverse_dilation=True, activation='relu', norm=None): + super().__init__() + + blocks = [ResConv1DBlock(n_in, n_in, dilation=dilation_growth_rate ** depth, activation=activation, norm=norm) for depth in range(n_depth)] + if reverse_dilation: + blocks = blocks[::-1] + + self.model = nn.Sequential(*blocks) + + def forward(self, x): + return self.model(x) \ No newline at end of file diff --git a/mGPT/archs/tools/token_emb.py b/mGPT/archs/tools/token_emb.py new file mode 100644 index 0000000..9bfa610 --- /dev/null +++ b/mGPT/archs/tools/token_emb.py @@ -0,0 +1,73 @@ + +from torch import Tensor, nn + +class NewTokenEmb(nn.Module): + """ + For adding new tokens to a pretrained model + """ + + def __init__(self, + old_embeddings: nn.Embedding, + new_num_tokens: int = None) -> None: + + super().__init__() + + self.num_tokens = old_embeddings.num_embeddings + new_num_tokens + self.old_num_tokens = old_embeddings.num_embeddings + self.new_num_tokens = new_num_tokens + self.embedding_dim = old_embeddings.embedding_dim + + # For text embeddings + self.text_embeddings = nn.Embedding( + self.num_tokens, + self.embedding_dim, + device=old_embeddings.weight.device, + dtype=old_embeddings.weight.dtype) + with torch.no_grad(): + self.text_embeddings.weight.data[:old_embeddings. + num_embeddings] = old_embeddings.weight.data + self.text_embeddings.weight.data[ + self.old_num_tokens:] = torch.zeros( + self.new_num_tokens, + self.embedding_dim, + dtype=old_embeddings.weight.dtype, + device=old_embeddings.weight.device) + self.text_embeddings.weight.requires_grad_(False) + + # For motion embeddings + self.motion_embeddings = nn.Embedding( + new_num_tokens, + self.embedding_dim, + device=old_embeddings.weight.device, + dtype=old_embeddings.weight.dtype) + with torch.no_grad(): + self.motion_embeddings.weight.data[:self. + old_num_tokens] = torch.zeros( + new_num_tokens, + self.embedding_dim, + dtype=old_embeddings.weight. + dtype, + device=old_embeddings. + weight.device) + self.word2motionProj = nn.Linear(self.old_num_tokens, new_num_tokens) + + def forward(self, input: Tensor) -> Tensor: + + with torch.no_grad(): + self.motion_embeddings.weight.data[:self. + old_num_tokens] = torch.zeros( + self.new_num_tokens, + self.embedding_dim, + dtype=self.motion_embeddings + .weight.dtype, + device=self. + motion_embeddings.weight. + device) + + self.motion_embeddings.weight.data[ + self.old_num_tokens:] = self.word2motionProj( + self.text_embeddings.weight.data[:self.old_num_tokens].permute( + 1, 0)).permute(1, 0) + + return self.text_embeddings(input) + self.motion_embeddings(input) + diff --git a/mGPT/archs/tools/transformer_layers.py b/mGPT/archs/tools/transformer_layers.py new file mode 100644 index 0000000..7b53429 --- /dev/null +++ b/mGPT/archs/tools/transformer_layers.py @@ -0,0 +1,285 @@ +# -*- coding: utf-8 -*- +import math +import torch +import torch.nn as nn +from torch import Tensor + +# Took from https://github.com/joeynmt/joeynmt/blob/fb66afcbe1beef9acd59283bcc084c4d4c1e6343/joeynmt/transformer_layers.py + + +# pylint: disable=arguments-differ +class MultiHeadedAttention(nn.Module): + """ + Multi-Head Attention module from "Attention is All You Need" + + Implementation modified from OpenNMT-py. + https://github.com/OpenNMT/OpenNMT-py + """ + + def __init__(self, num_heads: int, size: int, dropout: float = 0.1): + """ + Create a multi-headed attention layer. + :param num_heads: the number of heads + :param size: model size (must be divisible by num_heads) + :param dropout: probability of dropping a unit + """ + super().__init__() + + assert size % num_heads == 0 + + self.head_size = head_size = size // num_heads + self.model_size = size + self.num_heads = num_heads + + self.k_layer = nn.Linear(size, num_heads * head_size) + self.v_layer = nn.Linear(size, num_heads * head_size) + self.q_layer = nn.Linear(size, num_heads * head_size) + + self.output_layer = nn.Linear(size, size) + self.softmax = nn.Softmax(dim=-1) + self.dropout = nn.Dropout(dropout) + + def forward(self, k: Tensor, v: Tensor, q: Tensor, mask: Tensor = None): + """ + Computes multi-headed attention. + + :param k: keys [B, M, D] with M being the sentence length. + :param v: values [B, M, D] + :param q: query [B, M, D] + :param mask: optional mask [B, 1, M] or [B, M, M] + :return: + """ + batch_size = k.size(0) + num_heads = self.num_heads + + # project the queries (q), keys (k), and values (v) + k = self.k_layer(k) + v = self.v_layer(v) + q = self.q_layer(q) + + # reshape q, k, v for our computation to [batch_size, num_heads, ..] + k = k.view(batch_size, -1, num_heads, self.head_size).transpose(1, 2) + v = v.view(batch_size, -1, num_heads, self.head_size).transpose(1, 2) + q = q.view(batch_size, -1, num_heads, self.head_size).transpose(1, 2) + + # compute scores + q = q / math.sqrt(self.head_size) + + # batch x num_heads x query_len x key_len + scores = torch.matmul(q, k.transpose(2, 3)) + # torch.Size([48, 8, 183, 183]) + + # apply the mask (if we have one) + # we add a dimension for the heads to it below: [B, 1, 1, M] + if mask is not None: + scores = scores.masked_fill(~mask.unsqueeze(1), float('-inf')) + + # apply attention dropout and compute context vectors. + attention = self.softmax(scores) + attention = self.dropout(attention) + # torch.Size([48, 8, 183, 183]) [bs, nheads, time, time] (for decoding) + + # v: torch.Size([48, 8, 183, 32]) (32 is 256/8) + # get context vector (select values with attention) and reshape + # back to [B, M, D] + context = torch.matmul(attention, v) # torch.Size([48, 8, 183, 32]) + context = context.transpose(1, 2).contiguous().view( + batch_size, -1, num_heads * self.head_size) + # torch.Size([48, 183, 256]) put back to 256 (combine the heads) + + output = self.output_layer(context) + # torch.Size([48, 183, 256]): 1 output per time step + + return output + + +# pylint: disable=arguments-differ +class PositionwiseFeedForward(nn.Module): + """ + Position-wise Feed-forward layer + Projects to ff_size and then back down to input_size. + """ + + def __init__(self, input_size, ff_size, dropout=0.1): + """ + Initializes position-wise feed-forward layer. + :param input_size: dimensionality of the input. + :param ff_size: dimensionality of intermediate representation + :param dropout: + """ + super().__init__() + self.layer_norm = nn.LayerNorm(input_size, eps=1e-6) + self.pwff_layer = nn.Sequential( + nn.Linear(input_size, ff_size), + nn.ReLU(), + nn.Dropout(dropout), + nn.Linear(ff_size, input_size), + nn.Dropout(dropout), + ) + + def forward(self, x): + x_norm = self.layer_norm(x) + return self.pwff_layer(x_norm) + x + + +# pylint: disable=arguments-differ +class PositionalEncoding(nn.Module): + """ + Pre-compute position encodings (PE). + In forward pass, this adds the position-encodings to the + input for as many time steps as necessary. + + Implementation based on OpenNMT-py. + https://github.com/OpenNMT/OpenNMT-py + """ + + def __init__(self, size: int = 0, max_len: int = 5000): + """ + Positional Encoding with maximum length max_len + :param size: + :param max_len: + :param dropout: + """ + if size % 2 != 0: + raise ValueError("Cannot use sin/cos positional encoding with " + "odd dim (got dim={:d})".format(size)) + pe = torch.zeros(max_len, size) + position = torch.arange(0, max_len).unsqueeze(1) + div_term = torch.exp((torch.arange(0, size, 2, dtype=torch.float) * + -(math.log(10000.0) / size))) + pe[:, 0::2] = torch.sin(position.float() * div_term) + pe[:, 1::2] = torch.cos(position.float() * div_term) + pe = pe.unsqueeze(0) # shape: [1, size, max_len] + super().__init__() + self.register_buffer('pe', pe) + self.dim = size + + def forward(self, emb): + """Embed inputs. + Args: + emb (FloatTensor): Sequence of word vectors + ``(seq_len, batch_size, self.dim)`` + """ + # Add position encodings + return emb + self.pe[:, :emb.size(1)] + + +class TransformerEncoderLayer(nn.Module): + """ + One Transformer encoder layer has a Multi-head attention layer plus + a position-wise feed-forward layer. + """ + + def __init__(self, + size: int = 0, + ff_size: int = 0, + num_heads: int = 0, + dropout: float = 0.1): + """ + A single Transformer layer. + :param size: + :param ff_size: + :param num_heads: + :param dropout: + """ + super().__init__() + + self.layer_norm = nn.LayerNorm(size, eps=1e-6) + self.src_src_att = MultiHeadedAttention(num_heads, + size, + dropout=dropout) + self.feed_forward = PositionwiseFeedForward(size, + ff_size=ff_size, + dropout=dropout) + self.dropout = nn.Dropout(dropout) + self.size = size + + # pylint: disable=arguments-differ + def forward(self, x: Tensor, mask: Tensor) -> Tensor: + """ + Forward pass for a single transformer encoder layer. + First applies layer norm, then self attention, + then dropout with residual connection (adding the input to the result), + and then a position-wise feed-forward layer. + + :param x: layer input + :param mask: input mask + :return: output tensor + """ + x_norm = self.layer_norm(x) + h = self.src_src_att(x_norm, x_norm, x_norm, mask) + h = self.dropout(h) + x + o = self.feed_forward(h) + return o + + +class TransformerDecoderLayer(nn.Module): + """ + Transformer decoder layer. + + Consists of self-attention, source-attention, and feed-forward. + """ + + def __init__(self, + size: int = 0, + ff_size: int = 0, + num_heads: int = 0, + dropout: float = 0.1): + """ + Represents a single Transformer decoder layer. + + It attends to the source representation and the previous decoder states. + + :param size: model dimensionality + :param ff_size: size of the feed-forward intermediate layer + :param num_heads: number of heads + :param dropout: dropout to apply to input + """ + super().__init__() + self.size = size + + self.trg_trg_att = MultiHeadedAttention(num_heads, + size, + dropout=dropout) + self.src_trg_att = MultiHeadedAttention(num_heads, + size, + dropout=dropout) + + self.feed_forward = PositionwiseFeedForward(size, + ff_size=ff_size, + dropout=dropout) + + self.x_layer_norm = nn.LayerNorm(size, eps=1e-6) + self.dec_layer_norm = nn.LayerNorm(size, eps=1e-6) + + self.dropout = nn.Dropout(dropout) + + # pylint: disable=arguments-differ + def forward(self, + x: Tensor = None, + memory: Tensor = None, + src_mask: Tensor = None, + trg_mask: Tensor = None) -> Tensor: + """ + Forward pass of a single Transformer decoder layer. + + :param x: inputs + :param memory: source representations + :param src_mask: source mask + :param trg_mask: target mask (so as to not condition on future steps) + :return: output tensor + """ + # decoder/target self-attention + x_norm = self.x_layer_norm(x) # torch.Size([48, 183, 256]) + h1 = self.trg_trg_att(x_norm, x_norm, x_norm, mask=trg_mask) + h1 = self.dropout(h1) + x + + # source-target attention + h1_norm = self.dec_layer_norm( + h1) # torch.Size([48, 183, 256]) (same for memory) + h2 = self.src_trg_att(memory, memory, h1_norm, mask=src_mask) + + # final position-wise feed-forward layer + o = self.feed_forward(self.dropout(h2) + h1) + + return o diff --git a/mGPT/callback.py b/mGPT/callback.py new file mode 100644 index 0000000..19d6421 --- /dev/null +++ b/mGPT/callback.py @@ -0,0 +1,200 @@ +import os +from pytorch_lightning import LightningModule, Trainer +from pytorch_lightning.callbacks import Callback, RichProgressBar, ModelCheckpoint + + +def build_callbacks(cfg, logger=None, phase='test', **kwargs): + callbacks = [] + logger = logger + + # Rich Progress Bar + callbacks.append(progressBar()) + + # Checkpoint Callback + if phase == 'train': + callbacks.extend(getCheckpointCallback(cfg, logger=logger, **kwargs)) + + return callbacks + +def getCheckpointCallback(cfg, logger=None, **kwargs): + callbacks = [] + # Logging + metric_monitor = { + "loss_total": "total/train", + "Train_jf": "recons/text2jfeats/train", + "Val_jf": "recons/text2jfeats/val", + "Train_rf": "recons/text2rfeats/train", + "Val_rf": "recons/text2rfeats/val", + "APE root": "Metrics/APE_root", + "APE mean pose": "Metrics/APE_mean_pose", + "AVE root": "Metrics/AVE_root", + "AVE mean pose": "Metrics/AVE_mean_pose", + "R_TOP_1": "Metrics/R_precision_top_1", + "R_TOP_2": "Metrics/R_precision_top_2", + "R_TOP_3": "Metrics/R_precision_top_3", + "gt_R_TOP_3": "Metrics/gt_R_precision_top_3", + "FID": "Metrics/FID", + "gt_FID": "Metrics/gt_FID", + "Diversity": "Metrics/Diversity", + "MM dist": "Metrics/Matching_score", + "Accuracy": "Metrics/accuracy", + } + callbacks.append( + progressLogger(logger,metric_monitor=metric_monitor,log_every_n_steps=1)) + + # Save 10 latest checkpoints + checkpointParams = { + 'dirpath': os.path.join(cfg.FOLDER_EXP, "checkpoints"), + 'filename': "{epoch}", + 'monitor': "step", + 'mode': "max", + 'every_n_epochs': cfg.LOGGER.VAL_EVERY_STEPS, + 'save_top_k': 8, + 'save_last': True, + 'save_on_train_epoch_end': True + } + callbacks.append(ModelCheckpoint(**checkpointParams)) + + # Save checkpoint every n*10 epochs + checkpointParams.update({ + 'every_n_epochs': + cfg.LOGGER.VAL_EVERY_STEPS * 10, + 'save_top_k': + -1, + 'save_last': + False + }) + callbacks.append(ModelCheckpoint(**checkpointParams)) + + metrics = cfg.METRIC.TYPE + metric_monitor_map = { + 'TemosMetric': { + 'Metrics/APE_root': { + 'abbr': 'APEroot', + 'mode': 'min' + }, + }, + 'TM2TMetrics': { + 'Metrics/FID': { + 'abbr': 'FID', + 'mode': 'min' + }, + 'Metrics/R_precision_top_3': { + 'abbr': 'R3', + 'mode': 'max' + } + }, + 'MRMetrics': { + 'Metrics/MPJPE': { + 'abbr': 'MPJPE', + 'mode': 'min' + } + }, + 'HUMANACTMetrics': { + 'Metrics/Accuracy': { + 'abbr': 'Accuracy', + 'mode': 'max' + } + }, + 'UESTCMetrics': { + 'Metrics/Accuracy': { + 'abbr': 'Accuracy', + 'mode': 'max' + } + }, + 'UncondMetrics': { + 'Metrics/FID': { + 'abbr': 'FID', + 'mode': 'min' + } + } + } + + checkpointParams.update({ + 'every_n_epochs': cfg.LOGGER.VAL_EVERY_STEPS, + 'save_top_k': 1, + }) + + for metric in metrics: + if metric in metric_monitor_map.keys(): + metric_monitors = metric_monitor_map[metric] + + # Delete R3 if training VAE + if cfg.TRAIN.STAGE == 'vae' and metric == 'TM2TMetrics': + del metric_monitors['Metrics/R_precision_top_3'] + + for metric_monitor in metric_monitors: + checkpointParams.update({ + 'filename': + metric_monitor_map[metric][metric_monitor]['mode'] + + "-" + + metric_monitor_map[metric][metric_monitor]['abbr'] + + "{ep}", + 'monitor': + metric_monitor, + 'mode': + metric_monitor_map[metric][metric_monitor]['mode'], + }) + callbacks.append( + ModelCheckpoint(**checkpointParams)) + return callbacks + +class progressBar(RichProgressBar): + def __init__(self, ): + super().__init__() + + def get_metrics(self, trainer, model): + # Don't show the version number + items = super().get_metrics(trainer, model) + items.pop("v_num", None) + return items + +class progressLogger(Callback): + def __init__(self, + logger, + metric_monitor: dict, + precision: int = 3, + log_every_n_steps: int = 1): + # Metric to monitor + self.logger = logger + self.metric_monitor = metric_monitor + self.precision = precision + self.log_every_n_steps = log_every_n_steps + + def on_train_start(self, trainer: Trainer, pl_module: LightningModule, + **kwargs) -> None: + self.logger.info("Training started") + + def on_train_end(self, trainer: Trainer, pl_module: LightningModule, + **kwargs) -> None: + self.logger.info("Training done") + + def on_validation_epoch_end(self, trainer: Trainer, + pl_module: LightningModule, **kwargs) -> None: + if trainer.sanity_checking: + self.logger.info("Sanity checking ok.") + + def on_train_epoch_end(self, + trainer: Trainer, + pl_module: LightningModule, + padding=False, + **kwargs) -> None: + metric_format = f"{{:.{self.precision}e}}" + line = f"Epoch {trainer.current_epoch}" + if padding: + line = f"{line:>{len('Epoch xxxx')}}" # Right padding + + if trainer.current_epoch % self.log_every_n_steps == 0: + metrics_str = [] + + losses_dict = trainer.callback_metrics + for metric_name, dico_name in self.metric_monitor.items(): + if dico_name in losses_dict: + metric = losses_dict[dico_name].item() + metric = metric_format.format(metric) + metric = f"{metric_name} {metric}" + metrics_str.append(metric) + + line = line + ": " + " ".join(metrics_str) + + self.logger.info(line) diff --git a/mGPT/config.py b/mGPT/config.py new file mode 100644 index 0000000..f537614 --- /dev/null +++ b/mGPT/config.py @@ -0,0 +1,217 @@ +import importlib +from argparse import ArgumentParser +from omegaconf import OmegaConf +from os.path import join as pjoin +import os +import glob + + +def get_module_config(cfg, filepath="./configs"): + """ + Load yaml config files from subfolders + """ + + yamls = glob.glob(pjoin(filepath, '*', '*.yaml')) + yamls = [y.replace(filepath, '') for y in yamls] + for yaml in yamls: + nodes = yaml.replace('.yaml', '').replace('/', '.') + nodes = nodes[1:] if nodes[0] == '.' else nodes + OmegaConf.update(cfg, nodes, OmegaConf.load('./configs' + yaml)) + + return cfg + + +def get_obj_from_str(string, reload=False): + """ + Get object from string + """ + + module, cls = string.rsplit(".", 1) + if reload: + module_imp = importlib.import_module(module) + importlib.reload(module_imp) + return getattr(importlib.import_module(module, package=None), cls) + + +def instantiate_from_config(config): + """ + Instantiate object from config + """ + if not "target" in config: + raise KeyError("Expected key `target` to instantiate.") + return get_obj_from_str(config["target"])(**config.get("params", dict())) + + +def resume_config(cfg: OmegaConf): + """ + Resume model and wandb + """ + + if cfg.TRAIN.RESUME: + resume = cfg.TRAIN.RESUME + if os.path.exists(resume): + # Checkpoints + cfg.TRAIN.PRETRAINED = pjoin(resume, "checkpoints", "last.ckpt") + # Wandb + wandb_files = os.listdir(pjoin(resume, "wandb", "latest-run")) + wandb_run = [item for item in wandb_files if "run-" in item][0] + cfg.LOGGER.WANDB.params.id = wandb_run.replace("run-","").replace(".wandb", "") + else: + raise ValueError("Resume path is not right.") + + return cfg + +def parse_args(phase="train"): + """ + Parse arguments and load config files + """ + + parser = ArgumentParser() + group = parser.add_argument_group("Training options") + + # Assets + group.add_argument( + "--cfg_assets", + type=str, + required=False, + default="./configs/assets.yaml", + help="config file for asset paths", + ) + + # Default config + if phase in ["train", "test"]: + cfg_defualt = "./configs/default.yaml" + elif phase == "render": + cfg_defualt = "./configs/render.yaml" + elif phase == "webui": + cfg_defualt = "./configs/webui.yaml" + + group.add_argument( + "--cfg", + type=str, + required=False, + default=cfg_defualt, + help="config file", + ) + + # Parse for each phase + if phase in ["train", "test"]: + group.add_argument("--batch_size", + type=int, + required=False, + help="training batch size") + group.add_argument("--num_nodes", + type=int, + required=False, + help="number of nodes") + group.add_argument("--device", + type=int, + nargs="+", + required=False, + help="training device") + group.add_argument("--task", + type=str, + required=False, + help="evaluation task type") + group.add_argument("--nodebug", + action="store_true", + required=False, + help="debug or not") + + + if phase == "demo": + group.add_argument( + "--example", + type=str, + required=False, + help="input text and lengths with txt format", + ) + group.add_argument( + "--out_dir", + type=str, + required=False, + help="output dir", + ) + group.add_argument("--task", + type=str, + required=False, + help="evaluation task type") + + if phase == "render": + group.add_argument("--npy", + type=str, + required=False, + default=None, + help="npy motion files") + group.add_argument("--dir", + type=str, + required=False, + default=None, + help="npy motion folder") + group.add_argument("--fps", + type=int, + required=False, + default=30, + help="render fps") + group.add_argument( + "--mode", + type=str, + required=False, + default="sequence", + help="render target: video, sequence, frame", + ) + + params = parser.parse_args() + + # Load yaml config files + OmegaConf.register_new_resolver("eval", eval) + cfg_assets = OmegaConf.load(params.cfg_assets) + cfg_base = OmegaConf.load(pjoin(cfg_assets.CONFIG_FOLDER, 'default.yaml')) + cfg_exp = OmegaConf.merge(cfg_base, OmegaConf.load(params.cfg)) + if not cfg_exp.FULL_CONFIG: + cfg_exp = get_module_config(cfg_exp, cfg_assets.CONFIG_FOLDER) + cfg = OmegaConf.merge(cfg_exp, cfg_assets) + + # Update config with arguments + if phase in ["train", "test"]: + cfg.TRAIN.BATCH_SIZE = params.batch_size if params.batch_size else cfg.TRAIN.BATCH_SIZE + cfg.DEVICE = params.device if params.device else cfg.DEVICE + cfg.NUM_NODES = params.num_nodes if params.num_nodes else cfg.NUM_NODES + cfg.model.params.task = params.task if params.task else cfg.model.params.task + cfg.DEBUG = not params.nodebug if params.nodebug is not None else cfg.DEBUG + + # Force no debug in test + if phase == "test": + cfg.DEBUG = False + cfg.DEVICE = [0] + print("Force no debugging and one gpu when testing") + + if phase == "demo": + cfg.DEMO.RENDER = params.render + cfg.DEMO.FRAME_RATE = params.frame_rate + cfg.DEMO.EXAMPLE = params.example + cfg.DEMO.TASK = params.task + cfg.TEST.FOLDER = params.out_dir if params.out_dir else cfg.TEST.FOLDER + os.makedirs(cfg.TEST.FOLDER, exist_ok=True) + + if phase == "render": + if params.npy: + cfg.RENDER.NPY = params.npy + cfg.RENDER.INPUT_MODE = "npy" + if params.dir: + cfg.RENDER.DIR = params.dir + cfg.RENDER.INPUT_MODE = "dir" + if params.fps: + cfg.RENDER.FPS = float(params.fps) + cfg.RENDER.MODE = params.mode + + # Debug mode + if cfg.DEBUG: + cfg.NAME = "debug--" + cfg.NAME + cfg.LOGGER.WANDB.params.offline = True + cfg.LOGGER.VAL_EVERY_STEPS = 1 + + # Resume config + cfg = resume_config(cfg) + + return cfg diff --git a/mGPT/data/HumanML3D.py b/mGPT/data/HumanML3D.py new file mode 100644 index 0000000..4f169fd --- /dev/null +++ b/mGPT/data/HumanML3D.py @@ -0,0 +1,123 @@ +import numpy as np +import torch +from os.path import join as pjoin +from .humanml.utils.word_vectorizer import WordVectorizer +from .humanml.scripts.motion_process import (process_file, recover_from_ric) +from . import BASEDataModule +from .humanml import Text2MotionDatasetEval, Text2MotionDataset, Text2MotionDatasetCB, MotionDataset, MotionDatasetVQ, Text2MotionDatasetToken, Text2MotionDatasetM2T +from .utils import humanml3d_collate + + +class HumanML3DDataModule(BASEDataModule): + def __init__(self, cfg, **kwargs): + + super().__init__(collate_fn=humanml3d_collate) + self.cfg = cfg + self.save_hyperparameters(logger=False) + + # Basic info of the dataset + cfg.DATASET.JOINT_TYPE = 'humanml3d' + self.name = "humanml3d" + self.njoints = 22 + + # Path to the dataset + data_root = cfg.DATASET.HUMANML3D.ROOT + self.hparams.data_root = data_root + self.hparams.text_dir = pjoin(data_root, "texts") + self.hparams.motion_dir = pjoin(data_root, 'new_joint_vecs') + + # Mean and std of the dataset + dis_data_root = pjoin(cfg.DATASET.HUMANML3D.MEAN_STD_PATH, 't2m', "VQVAEV3_CB1024_CMT_H1024_NRES3", "meta") + self.hparams.mean = np.load(pjoin(dis_data_root, "mean.npy")) + self.hparams.std = np.load(pjoin(dis_data_root, "std.npy")) + + # Mean and std for fair evaluation + dis_data_root_eval = pjoin(cfg.DATASET.HUMANML3D.MEAN_STD_PATH, 't2m', "Comp_v6_KLD01", "meta") + self.hparams.mean_eval = np.load(pjoin(dis_data_root_eval, "mean.npy")) + self.hparams.std_eval = np.load(pjoin(dis_data_root_eval, "std.npy")) + + # Length of the dataset + self.hparams.max_motion_length = cfg.DATASET.HUMANML3D.MAX_MOTION_LEN + self.hparams.min_motion_length = cfg.DATASET.HUMANML3D.MIN_MOTION_LEN + self.hparams.max_text_len = cfg.DATASET.HUMANML3D.MAX_TEXT_LEN + self.hparams.unit_length = cfg.DATASET.HUMANML3D.UNIT_LEN + + # Additional parameters + self.hparams.debug = cfg.DEBUG + self.hparams.stage = cfg.TRAIN.STAGE + self.hparams.w_vectorizer = WordVectorizer( + cfg.DATASET.WORD_VERTILIZER_PATH, "our_vab") + + # Dataset switch + self.DatasetEval = Text2MotionDatasetEval + + if cfg.TRAIN.STAGE == "vae": + if cfg.model.params.motion_vae.target.split('.')[-1].lower() == "vqvae": + self.hparams.win_size = 64 + self.Dataset = MotionDatasetVQ + else: + self.Dataset = MotionDataset + elif 'lm' in cfg.TRAIN.STAGE: + self.hparams.code_path = cfg.DATASET.CODE_PATH + self.hparams.task_path = cfg.DATASET.TASK_PATH + self.hparams.std_text = cfg.DATASET.HUMANML3D.STD_TEXT + self.Dataset = Text2MotionDatasetCB + elif cfg.TRAIN.STAGE == "token": + self.Dataset = Text2MotionDatasetToken + self.DatasetEval = Text2MotionDatasetToken + elif cfg.TRAIN.STAGE == "m2t": + self.Dataset = Text2MotionDatasetM2T + self.DatasetEval = Text2MotionDatasetM2T + else: + self.Dataset = Text2MotionDataset + + # Get additional info of the dataset + self._sample_set = self.get_sample_set(overrides={"split": "test", "tiny": True}) + self.nfeats = self._sample_set.nfeats + cfg.DATASET.NFEATS = self.nfeats + + + + def feats2joints(self, features): + mean = torch.tensor(self.hparams.mean).to(features) + std = torch.tensor(self.hparams.std).to(features) + features = features * std + mean + return recover_from_ric(features, self.njoints) + + def joints2feats(self, features): + features = process_file(features, self.njoints)[0] + return features + + def normalize(self, features): + mean = torch.tensor(self.hparams.mean).to(features) + std = torch.tensor(self.hparams.std).to(features) + features = (features - mean) / std + return features + + def denormalize(self, features): + mean = torch.tensor(self.hparams.mean).to(features) + std = torch.tensor(self.hparams.std).to(features) + features = features * std + mean + return features + + def renorm4t2m(self, features): + # renorm to t2m norms for using t2m evaluators + ori_mean = torch.tensor(self.hparams.mean).to(features) + ori_std = torch.tensor(self.hparams.std).to(features) + eval_mean = torch.tensor(self.hparams.mean_eval).to(features) + eval_std = torch.tensor(self.hparams.std_eval).to(features) + features = features * ori_std + ori_mean + features = (features - eval_mean) / eval_std + return features + + def mm_mode(self, mm_on=True): + if mm_on: + self.is_mm = True + self.name_list = self.test_dataset.name_list + self.mm_list = np.random.choice(self.name_list, + self.cfg.METRIC.MM_NUM_SAMPLES, + replace=False) + self.test_dataset.name_list = self.mm_list + else: + self.is_mm = False + self.test_dataset.name_list = self.name_list diff --git a/mGPT/data/Kit.py b/mGPT/data/Kit.py new file mode 100644 index 0000000..1eecaa1 --- /dev/null +++ b/mGPT/data/Kit.py @@ -0,0 +1,88 @@ +import numpy as np +import torch +from os.path import join as pjoin +from .humanml.utils.word_vectorizer import WordVectorizer +from .humanml.scripts.motion_process import (process_file, recover_from_ric) +from .HumanML3D import HumanML3DDataModule +from .humanml import Text2MotionDatasetEval, Text2MotionDataset, Text2MotionDatasetCB, MotionDataset, MotionDatasetVQ, Text2MotionDatasetToken + + +class KitDataModule(HumanML3DDataModule): + def __init__(self, cfg, **kwargs): + + super().__init__(cfg, **kwargs) + + # Basic info of the dataset + self.name = "kit" + self.njoints = 21 + + # Path to the dataset + data_root = cfg.DATASET.KIT.ROOT + self.hparams.data_root = data_root + self.hparams.text_dir = pjoin(data_root, "texts") + self.hparams.motion_dir = pjoin(data_root, 'new_joint_vecs') + + # Mean and std of the dataset + dis_data_root = pjoin(cfg.DATASET.KIT.MEAN_STD_PATH, 'kit', + "VQVAEV3_CB1024_CMT_H1024_NRES3", "meta") + self.hparams.mean = np.load(pjoin(dis_data_root, "mean.npy")) + self.hparams.std = np.load(pjoin(dis_data_root, "std.npy")) + + # Mean and std for fair evaluation + dis_data_root_eval = pjoin(cfg.DATASET.KIT.MEAN_STD_PATH, 't2m', + "Comp_v6_KLD005", "meta") + self.hparams.mean_eval = np.load(pjoin(dis_data_root_eval, "mean.npy")) + self.hparams.std_eval = np.load(pjoin(dis_data_root_eval, "std.npy")) + + # Length of the dataset + self.hparams.max_motion_length = cfg.DATASET.KIT.MAX_MOTION_LEN + self.hparams.min_motion_length = cfg.DATASET.KIT.MIN_MOTION_LEN + self.hparams.max_text_len = cfg.DATASET.KIT.MAX_TEXT_LEN + self.hparams.unit_length = cfg.DATASET.KIT.UNIT_LEN + + # Get additional info of the dataset + self._sample_set = self.get_sample_set(overrides={"split": "test", "tiny": True}) + self.nfeats = self._sample_set.nfeats + cfg.DATASET.NFEATS = self.nfeats + + def feats2joints(self, features): + mean = torch.tensor(self.hparams.mean).to(features) + std = torch.tensor(self.hparams.std).to(features) + features = features * std + mean + return recover_from_ric(features, self.njoints) + + def joints2feats(self, features): + features = process_file(features, self.njoints)[0] + # mean = torch.tensor(self.hparams.mean).to(features) + # std = torch.tensor(self.hparams.std).to(features) + # features = (features - mean) / std + return features + + def normalize(self, features): + mean = torch.tensor(self.hparams.mean).to(features) + std = torch.tensor(self.hparams.std).to(features) + features = (features - mean) / std + return features + + def renorm4t2m(self, features): + # renorm to t2m norms for using t2m evaluators + ori_mean = torch.tensor(self.hparams.mean).to(features) + ori_std = torch.tensor(self.hparams.std).to(features) + eval_mean = torch.tensor(self.hparams.mean_eval).to(features) + eval_std = torch.tensor(self.hparams.std_eval).to(features) + features = features * ori_std + ori_mean + features = (features - eval_mean) / eval_std + return features + + def mm_mode(self, mm_on=True): + # random select samples for mm + if mm_on: + self.is_mm = True + self.name_list = self.test_dataset.name_list + self.mm_list = np.random.choice(self.name_list, + self.cfg.METRIC.MM_NUM_SAMPLES, + replace=False) + self.test_dataset.name_list = self.mm_list + else: + self.is_mm = False + self.test_dataset.name_list = self.name_list diff --git a/mGPT/data/__init__.py b/mGPT/data/__init__.py new file mode 100644 index 0000000..948f49c --- /dev/null +++ b/mGPT/data/__init__.py @@ -0,0 +1,103 @@ +import pytorch_lightning as pl +from torch.utils.data import DataLoader + + +class BASEDataModule(pl.LightningDataModule): + def __init__(self, collate_fn): + super().__init__() + + self.dataloader_options = {"collate_fn": collate_fn} + self.persistent_workers = True + self.is_mm = False + + self._train_dataset = None + self._val_dataset = None + self._test_dataset = None + + def get_sample_set(self, overrides={}): + sample_params = self.hparams.copy() + sample_params.update(overrides) + return self.DatasetEval(**sample_params) + + @property + def train_dataset(self): + if self._train_dataset is None: + self._train_dataset = self.Dataset(split=self.cfg.TRAIN.SPLIT, + **self.hparams) + return self._train_dataset + + @property + def val_dataset(self): + if self._val_dataset is None: + params = self.hparams.copy() + params['code_path'] = None + params['split'] = self.cfg.EVAL.SPLIT + self._val_dataset = self.DatasetEval(**params) + return self._val_dataset + + @property + def test_dataset(self): + if self._test_dataset is None: + # self._test_dataset = self.DatasetEval(split=self.cfg.TEST.SPLIT, + # **self.hparams) + params = self.hparams.copy() + params['code_path'] = None + params['split'] = self.cfg.TEST.SPLIT + self._test_dataset = self.DatasetEval( **params) + return self._test_dataset + + def setup(self, stage=None): + # Use the getter the first time to load the data + if stage in (None, "fit"): + _ = self.train_dataset + _ = self.val_dataset + if stage in (None, "test"): + _ = self.test_dataset + + def train_dataloader(self): + dataloader_options = self.dataloader_options.copy() + dataloader_options["batch_size"] = self.cfg.TRAIN.BATCH_SIZE + dataloader_options["num_workers"] = self.cfg.TRAIN.NUM_WORKERS + return DataLoader( + self.train_dataset, + shuffle=False, + persistent_workers=True, + **dataloader_options, + ) + + def predict_dataloader(self): + dataloader_options = self.dataloader_options.copy() + dataloader_options[ + "batch_size"] = 1 if self.is_mm else self.cfg.TEST.BATCH_SIZE + dataloader_options["num_workers"] = self.cfg.TEST.NUM_WORKERS + dataloader_options["shuffle"] = False + return DataLoader( + self.test_dataset, + persistent_workers=True, + **dataloader_options, + ) + + def val_dataloader(self): + # overrides batch_size and num_workers + dataloader_options = self.dataloader_options.copy() + dataloader_options["batch_size"] = self.cfg.EVAL.BATCH_SIZE + dataloader_options["num_workers"] = self.cfg.EVAL.NUM_WORKERS + dataloader_options["shuffle"] = False + return DataLoader( + self.val_dataset, + persistent_workers=True, + **dataloader_options, + ) + + def test_dataloader(self): + # overrides batch_size and num_workers + dataloader_options = self.dataloader_options.copy() + dataloader_options[ + "batch_size"] = 1 if self.is_mm else self.cfg.TEST.BATCH_SIZE + dataloader_options["num_workers"] = self.cfg.TEST.NUM_WORKERS + dataloader_options["shuffle"] = False + return DataLoader( + self.test_dataset, + persistent_workers=True, + **dataloader_options, + ) diff --git a/mGPT/data/build_data.py b/mGPT/data/build_data.py new file mode 100644 index 0000000..057a556 --- /dev/null +++ b/mGPT/data/build_data.py @@ -0,0 +1,15 @@ +from omegaconf import OmegaConf +from os.path import join as pjoin +from mGPT.config import instantiate_from_config + + +def build_data(cfg, phase="train"): + data_config = OmegaConf.to_container(cfg.DATASET, resolve=True) + data_config['params'] = {'cfg': cfg, 'phase': phase} + if isinstance(data_config['target'], str): + return instantiate_from_config(data_config) + elif isinstance(data_config['target'], list): + data_config_tmp = data_config.copy() + data_config_tmp['params']['dataModules'] = data_config['target'] + data_config_tmp['target'] = 'mGPT.data.Concat.ConcatDataModule' + return instantiate_from_config(data_config) diff --git a/mGPT/data/humanml/README.md b/mGPT/data/humanml/README.md new file mode 100644 index 0000000..4bf224f --- /dev/null +++ b/mGPT/data/humanml/README.md @@ -0,0 +1 @@ +This code is based on https://github.com/EricGuo5513/text-to-motion.git \ No newline at end of file diff --git a/mGPT/data/humanml/__init__.py b/mGPT/data/humanml/__init__.py new file mode 100644 index 0000000..8e75358 --- /dev/null +++ b/mGPT/data/humanml/__init__.py @@ -0,0 +1,7 @@ +from .dataset_t2m import Text2MotionDataset +from .dataset_t2m_eval import Text2MotionDatasetEval +from .dataset_t2m_cb import Text2MotionDatasetCB +from .dataset_t2m_token import Text2MotionDatasetToken +from .dataset_t2m_m2t import Text2MotionDatasetM2T +from .dataset_m import MotionDataset +from .dataset_m_vq import MotionDatasetVQ diff --git a/mGPT/data/humanml/common/quaternion.py b/mGPT/data/humanml/common/quaternion.py new file mode 100644 index 0000000..dca3d89 --- /dev/null +++ b/mGPT/data/humanml/common/quaternion.py @@ -0,0 +1,423 @@ +# Copyright (c) 2018-present, Facebook, Inc. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# + +import torch +import numpy as np + +_EPS4 = np.finfo(float).eps * 4.0 + +_FLOAT_EPS = np.finfo(np.float64).eps + +# PyTorch-backed implementations +def qinv(q): + assert q.shape[-1] == 4, 'q must be a tensor of shape (*, 4)' + mask = torch.ones_like(q) + mask[..., 1:] = -mask[..., 1:] + return q * mask + + +def qinv_np(q): + assert q.shape[-1] == 4, 'q must be a tensor of shape (*, 4)' + return qinv(torch.from_numpy(q).float()).numpy() + + +def qnormalize(q): + assert q.shape[-1] == 4, 'q must be a tensor of shape (*, 4)' + return q / torch.norm(q, dim=-1, keepdim=True) + + +def qmul(q, r): + """ + Multiply quaternion(s) q with quaternion(s) r. + Expects two equally-sized tensors of shape (*, 4), where * denotes any number of dimensions. + Returns q*r as a tensor of shape (*, 4). + """ + assert q.shape[-1] == 4 + assert r.shape[-1] == 4 + + original_shape = q.shape + + # Compute outer product + terms = torch.bmm(r.view(-1, 4, 1), q.view(-1, 1, 4)) + + w = terms[:, 0, 0] - terms[:, 1, 1] - terms[:, 2, 2] - terms[:, 3, 3] + x = terms[:, 0, 1] + terms[:, 1, 0] - terms[:, 2, 3] + terms[:, 3, 2] + y = terms[:, 0, 2] + terms[:, 1, 3] + terms[:, 2, 0] - terms[:, 3, 1] + z = terms[:, 0, 3] - terms[:, 1, 2] + terms[:, 2, 1] + terms[:, 3, 0] + return torch.stack((w, x, y, z), dim=1).view(original_shape) + + +def qrot(q, v): + """ + Rotate vector(s) v about the rotation described by quaternion(s) q. + Expects a tensor of shape (*, 4) for q and a tensor of shape (*, 3) for v, + where * denotes any number of dimensions. + Returns a tensor of shape (*, 3). + """ + assert q.shape[-1] == 4 + assert v.shape[-1] == 3 + assert q.shape[:-1] == v.shape[:-1] + + original_shape = list(v.shape) + # print(q.shape) + q = q.contiguous().view(-1, 4) + v = v.contiguous().view(-1, 3) + + qvec = q[:, 1:] + uv = torch.cross(qvec, v, dim=1) + uuv = torch.cross(qvec, uv, dim=1) + return (v + 2 * (q[:, :1] * uv + uuv)).view(original_shape) + + +def qeuler(q, order, epsilon=0, deg=True): + """ + Convert quaternion(s) q to Euler angles. + Expects a tensor of shape (*, 4), where * denotes any number of dimensions. + Returns a tensor of shape (*, 3). + """ + assert q.shape[-1] == 4 + + original_shape = list(q.shape) + original_shape[-1] = 3 + q = q.view(-1, 4) + + q0 = q[:, 0] + q1 = q[:, 1] + q2 = q[:, 2] + q3 = q[:, 3] + + if order == 'xyz': + x = torch.atan2(2 * (q0 * q1 - q2 * q3), 1 - 2 * (q1 * q1 + q2 * q2)) + y = torch.asin(torch.clamp(2 * (q1 * q3 + q0 * q2), -1 + epsilon, 1 - epsilon)) + z = torch.atan2(2 * (q0 * q3 - q1 * q2), 1 - 2 * (q2 * q2 + q3 * q3)) + elif order == 'yzx': + x = torch.atan2(2 * (q0 * q1 - q2 * q3), 1 - 2 * (q1 * q1 + q3 * q3)) + y = torch.atan2(2 * (q0 * q2 - q1 * q3), 1 - 2 * (q2 * q2 + q3 * q3)) + z = torch.asin(torch.clamp(2 * (q1 * q2 + q0 * q3), -1 + epsilon, 1 - epsilon)) + elif order == 'zxy': + x = torch.asin(torch.clamp(2 * (q0 * q1 + q2 * q3), -1 + epsilon, 1 - epsilon)) + y = torch.atan2(2 * (q0 * q2 - q1 * q3), 1 - 2 * (q1 * q1 + q2 * q2)) + z = torch.atan2(2 * (q0 * q3 - q1 * q2), 1 - 2 * (q1 * q1 + q3 * q3)) + elif order == 'xzy': + x = torch.atan2(2 * (q0 * q1 + q2 * q3), 1 - 2 * (q1 * q1 + q3 * q3)) + y = torch.atan2(2 * (q0 * q2 + q1 * q3), 1 - 2 * (q2 * q2 + q3 * q3)) + z = torch.asin(torch.clamp(2 * (q0 * q3 - q1 * q2), -1 + epsilon, 1 - epsilon)) + elif order == 'yxz': + x = torch.asin(torch.clamp(2 * (q0 * q1 - q2 * q3), -1 + epsilon, 1 - epsilon)) + y = torch.atan2(2 * (q1 * q3 + q0 * q2), 1 - 2 * (q1 * q1 + q2 * q2)) + z = torch.atan2(2 * (q1 * q2 + q0 * q3), 1 - 2 * (q1 * q1 + q3 * q3)) + elif order == 'zyx': + x = torch.atan2(2 * (q0 * q1 + q2 * q3), 1 - 2 * (q1 * q1 + q2 * q2)) + y = torch.asin(torch.clamp(2 * (q0 * q2 - q1 * q3), -1 + epsilon, 1 - epsilon)) + z = torch.atan2(2 * (q0 * q3 + q1 * q2), 1 - 2 * (q2 * q2 + q3 * q3)) + else: + raise + + if deg: + return torch.stack((x, y, z), dim=1).view(original_shape) * 180 / np.pi + else: + return torch.stack((x, y, z), dim=1).view(original_shape) + + +# Numpy-backed implementations + +def qmul_np(q, r): + q = torch.from_numpy(q).contiguous().float() + r = torch.from_numpy(r).contiguous().float() + return qmul(q, r).numpy() + + +def qrot_np(q, v): + q = torch.from_numpy(q).contiguous().float() + v = torch.from_numpy(v).contiguous().float() + return qrot(q, v).numpy() + + +def qeuler_np(q, order, epsilon=0, use_gpu=False): + if use_gpu: + q = torch.from_numpy(q).cuda().float() + return qeuler(q, order, epsilon).cpu().numpy() + else: + q = torch.from_numpy(q).contiguous().float() + return qeuler(q, order, epsilon).numpy() + + +def qfix(q): + """ + Enforce quaternion continuity across the time dimension by selecting + the representation (q or -q) with minimal distance (or, equivalently, maximal dot product) + between two consecutive frames. + + Expects a tensor of shape (L, J, 4), where L is the sequence length and J is the number of joints. + Returns a tensor of the same shape. + """ + assert len(q.shape) == 3 + assert q.shape[-1] == 4 + + result = q.copy() + dot_products = np.sum(q[1:] * q[:-1], axis=2) + mask = dot_products < 0 + mask = (np.cumsum(mask, axis=0) % 2).astype(bool) + result[1:][mask] *= -1 + return result + + +def euler2quat(e, order, deg=True): + """ + Convert Euler angles to quaternions. + """ + assert e.shape[-1] == 3 + + original_shape = list(e.shape) + original_shape[-1] = 4 + + e = e.view(-1, 3) + + ## if euler angles in degrees + if deg: + e = e * np.pi / 180. + + x = e[:, 0] + y = e[:, 1] + z = e[:, 2] + + rx = torch.stack((torch.cos(x / 2), torch.sin(x / 2), torch.zeros_like(x), torch.zeros_like(x)), dim=1) + ry = torch.stack((torch.cos(y / 2), torch.zeros_like(y), torch.sin(y / 2), torch.zeros_like(y)), dim=1) + rz = torch.stack((torch.cos(z / 2), torch.zeros_like(z), torch.zeros_like(z), torch.sin(z / 2)), dim=1) + + result = None + for coord in order: + if coord == 'x': + r = rx + elif coord == 'y': + r = ry + elif coord == 'z': + r = rz + else: + raise + if result is None: + result = r + else: + result = qmul(result, r) + + # Reverse antipodal representation to have a non-negative "w" + if order in ['xyz', 'yzx', 'zxy']: + result *= -1 + + return result.view(original_shape) + + +def expmap_to_quaternion(e): + """ + Convert axis-angle rotations (aka exponential maps) to quaternions. + Stable formula from "Practical Parameterization of Rotations Using the Exponential Map". + Expects a tensor of shape (*, 3), where * denotes any number of dimensions. + Returns a tensor of shape (*, 4). + """ + assert e.shape[-1] == 3 + + original_shape = list(e.shape) + original_shape[-1] = 4 + e = e.reshape(-1, 3) + + theta = np.linalg.norm(e, axis=1).reshape(-1, 1) + w = np.cos(0.5 * theta).reshape(-1, 1) + xyz = 0.5 * np.sinc(0.5 * theta / np.pi) * e + return np.concatenate((w, xyz), axis=1).reshape(original_shape) + + +def euler_to_quaternion(e, order): + """ + Convert Euler angles to quaternions. + """ + assert e.shape[-1] == 3 + + original_shape = list(e.shape) + original_shape[-1] = 4 + + e = e.reshape(-1, 3) + + x = e[:, 0] + y = e[:, 1] + z = e[:, 2] + + rx = np.stack((np.cos(x / 2), np.sin(x / 2), np.zeros_like(x), np.zeros_like(x)), axis=1) + ry = np.stack((np.cos(y / 2), np.zeros_like(y), np.sin(y / 2), np.zeros_like(y)), axis=1) + rz = np.stack((np.cos(z / 2), np.zeros_like(z), np.zeros_like(z), np.sin(z / 2)), axis=1) + + result = None + for coord in order: + if coord == 'x': + r = rx + elif coord == 'y': + r = ry + elif coord == 'z': + r = rz + else: + raise + if result is None: + result = r + else: + result = qmul_np(result, r) + + # Reverse antipodal representation to have a non-negative "w" + if order in ['xyz', 'yzx', 'zxy']: + result *= -1 + + return result.reshape(original_shape) + + +def quaternion_to_matrix(quaternions): + """ + Convert rotations given as quaternions to rotation matrices. + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + + +def quaternion_to_matrix_np(quaternions): + q = torch.from_numpy(quaternions).contiguous().float() + return quaternion_to_matrix(q).numpy() + + +def quaternion_to_cont6d_np(quaternions): + rotation_mat = quaternion_to_matrix_np(quaternions) + cont_6d = np.concatenate([rotation_mat[..., 0], rotation_mat[..., 1]], axis=-1) + return cont_6d + + +def quaternion_to_cont6d(quaternions): + rotation_mat = quaternion_to_matrix(quaternions) + cont_6d = torch.cat([rotation_mat[..., 0], rotation_mat[..., 1]], dim=-1) + return cont_6d + + +def cont6d_to_matrix(cont6d): + assert cont6d.shape[-1] == 6, "The last dimension must be 6" + x_raw = cont6d[..., 0:3] + y_raw = cont6d[..., 3:6] + + x = x_raw / torch.norm(x_raw, dim=-1, keepdim=True) + z = torch.cross(x, y_raw, dim=-1) + z = z / torch.norm(z, dim=-1, keepdim=True) + + y = torch.cross(z, x, dim=-1) + + x = x[..., None] + y = y[..., None] + z = z[..., None] + + mat = torch.cat([x, y, z], dim=-1) + return mat + + +def cont6d_to_matrix_np(cont6d): + q = torch.from_numpy(cont6d).contiguous().float() + return cont6d_to_matrix(q).numpy() + + +def qpow(q0, t, dtype=torch.float): + ''' q0 : tensor of quaternions + t: tensor of powers + ''' + q0 = qnormalize(q0) + theta0 = torch.acos(q0[..., 0]) + + ## if theta0 is close to zero, add epsilon to avoid NaNs + mask = (theta0 <= 10e-10) * (theta0 >= -10e-10) + theta0 = (1 - mask) * theta0 + mask * 10e-10 + v0 = q0[..., 1:] / torch.sin(theta0).view(-1, 1) + + if isinstance(t, torch.Tensor): + q = torch.zeros(t.shape + q0.shape) + theta = t.view(-1, 1) * theta0.view(1, -1) + else: ## if t is a number + q = torch.zeros(q0.shape) + theta = t * theta0 + + q[..., 0] = torch.cos(theta) + q[..., 1:] = v0 * torch.sin(theta).unsqueeze(-1) + + return q.to(dtype) + + +def qslerp(q0, q1, t): + ''' + q0: starting quaternion + q1: ending quaternion + t: array of points along the way + + Returns: + Tensor of Slerps: t.shape + q0.shape + ''' + + q0 = qnormalize(q0) + q1 = qnormalize(q1) + q_ = qpow(qmul(q1, qinv(q0)), t) + + return qmul(q_, + q0.contiguous().view(torch.Size([1] * len(t.shape)) + q0.shape).expand(t.shape + q0.shape).contiguous()) + + +def qbetween(v0, v1): + ''' + find the quaternion used to rotate v0 to v1 + ''' + assert v0.shape[-1] == 3, 'v0 must be of the shape (*, 3)' + assert v1.shape[-1] == 3, 'v1 must be of the shape (*, 3)' + + v = torch.cross(v0, v1) + w = torch.sqrt((v0 ** 2).sum(dim=-1, keepdim=True) * (v1 ** 2).sum(dim=-1, keepdim=True)) + (v0 * v1).sum(dim=-1, + keepdim=True) + return qnormalize(torch.cat([w, v], dim=-1)) + + +def qbetween_np(v0, v1): + ''' + find the quaternion used to rotate v0 to v1 + ''' + assert v0.shape[-1] == 3, 'v0 must be of the shape (*, 3)' + assert v1.shape[-1] == 3, 'v1 must be of the shape (*, 3)' + + v0 = torch.from_numpy(v0).float() + v1 = torch.from_numpy(v1).float() + return qbetween(v0, v1).numpy() + + +def lerp(p0, p1, t): + if not isinstance(t, torch.Tensor): + t = torch.Tensor([t]) + + new_shape = t.shape + p0.shape + new_view_t = t.shape + torch.Size([1] * len(p0.shape)) + new_view_p = torch.Size([1] * len(t.shape)) + p0.shape + p0 = p0.view(new_view_p).expand(new_shape) + p1 = p1.view(new_view_p).expand(new_shape) + t = t.view(new_view_t).expand(new_shape) + + return p0 + t * (p1 - p0) diff --git a/mGPT/data/humanml/common/skeleton.py b/mGPT/data/humanml/common/skeleton.py new file mode 100644 index 0000000..b2ae85a --- /dev/null +++ b/mGPT/data/humanml/common/skeleton.py @@ -0,0 +1,199 @@ +from .quaternion import * +import scipy.ndimage.filters as filters + +class Skeleton(object): + def __init__(self, offset, kinematic_tree, device): + self.device = device + self._raw_offset_np = offset.numpy() + self._raw_offset = offset.clone().detach().to(device).float() + self._kinematic_tree = kinematic_tree + self._offset = None + self._parents = [0] * len(self._raw_offset) + self._parents[0] = -1 + for chain in self._kinematic_tree: + for j in range(1, len(chain)): + self._parents[chain[j]] = chain[j-1] + + def njoints(self): + return len(self._raw_offset) + + def offset(self): + return self._offset + + def set_offset(self, offsets): + self._offset = offsets.clone().detach().to(self.device).float() + + def kinematic_tree(self): + return self._kinematic_tree + + def parents(self): + return self._parents + + # joints (batch_size, joints_num, 3) + def get_offsets_joints_batch(self, joints): + assert len(joints.shape) == 3 + _offsets = self._raw_offset.expand(joints.shape[0], -1, -1).clone() + for i in range(1, self._raw_offset.shape[0]): + _offsets[:, i] = torch.norm(joints[:, i] - joints[:, self._parents[i]], p=2, dim=1)[:, None] * _offsets[:, i] + + self._offset = _offsets.detach() + return _offsets + + # joints (joints_num, 3) + def get_offsets_joints(self, joints): + assert len(joints.shape) == 2 + _offsets = self._raw_offset.clone() + for i in range(1, self._raw_offset.shape[0]): + # print(joints.shape) + _offsets[i] = torch.norm(joints[i] - joints[self._parents[i]], p=2, dim=0) * _offsets[i] + + self._offset = _offsets.detach() + return _offsets + + # face_joint_idx should follow the order of right hip, left hip, right shoulder, left shoulder + # joints (batch_size, joints_num, 3) + def inverse_kinematics_np(self, joints, face_joint_idx, smooth_forward=False): + assert len(face_joint_idx) == 4 + '''Get Forward Direction''' + l_hip, r_hip, sdr_r, sdr_l = face_joint_idx + across1 = joints[:, r_hip] - joints[:, l_hip] + across2 = joints[:, sdr_r] - joints[:, sdr_l] + across = across1 + across2 + across = across / np.sqrt((across**2).sum(axis=-1))[:, np.newaxis] + # print(across1.shape, across2.shape) + + # forward (batch_size, 3) + forward = np.cross(np.array([[0, 1, 0]]), across, axis=-1) + if smooth_forward: + forward = filters.gaussian_filter1d(forward, 20, axis=0, mode='nearest') + # forward (batch_size, 3) + forward = forward / np.sqrt((forward**2).sum(axis=-1))[..., np.newaxis] + + '''Get Root Rotation''' + target = np.array([[0,0,1]]).repeat(len(forward), axis=0) + root_quat = qbetween_np(forward, target) + + '''Inverse Kinematics''' + # quat_params (batch_size, joints_num, 4) + # print(joints.shape[:-1]) + quat_params = np.zeros(joints.shape[:-1] + (4,)) + # print(quat_params.shape) + root_quat[0] = np.array([[1.0, 0.0, 0.0, 0.0]]) + quat_params[:, 0] = root_quat + # quat_params[0, 0] = np.array([[1.0, 0.0, 0.0, 0.0]]) + for chain in self._kinematic_tree: + R = root_quat + for j in range(len(chain) - 1): + # (batch, 3) + u = self._raw_offset_np[chain[j+1]][np.newaxis,...].repeat(len(joints), axis=0) + # print(u.shape) + # (batch, 3) + v = joints[:, chain[j+1]] - joints[:, chain[j]] + v = v / np.sqrt((v**2).sum(axis=-1))[:, np.newaxis] + # print(u.shape, v.shape) + rot_u_v = qbetween_np(u, v) + + R_loc = qmul_np(qinv_np(R), rot_u_v) + + quat_params[:,chain[j + 1], :] = R_loc + R = qmul_np(R, R_loc) + + return quat_params + + # Be sure root joint is at the beginning of kinematic chains + def forward_kinematics(self, quat_params, root_pos, skel_joints=None, do_root_R=True): + # quat_params (batch_size, joints_num, 4) + # joints (batch_size, joints_num, 3) + # root_pos (batch_size, 3) + if skel_joints is not None: + offsets = self.get_offsets_joints_batch(skel_joints) + if len(self._offset.shape) == 2: + offsets = self._offset.expand(quat_params.shape[0], -1, -1) + joints = torch.zeros(quat_params.shape[:-1] + (3,)).to(self.device) + joints[:, 0] = root_pos + for chain in self._kinematic_tree: + if do_root_R: + R = quat_params[:, 0] + else: + R = torch.tensor([[1.0, 0.0, 0.0, 0.0]]).expand(len(quat_params), -1).detach().to(self.device) + for i in range(1, len(chain)): + R = qmul(R, quat_params[:, chain[i]]) + offset_vec = offsets[:, chain[i]] + joints[:, chain[i]] = qrot(R, offset_vec) + joints[:, chain[i-1]] + return joints + + # Be sure root joint is at the beginning of kinematic chains + def forward_kinematics_np(self, quat_params, root_pos, skel_joints=None, do_root_R=True): + # quat_params (batch_size, joints_num, 4) + # joints (batch_size, joints_num, 3) + # root_pos (batch_size, 3) + if skel_joints is not None: + skel_joints = torch.from_numpy(skel_joints) + offsets = self.get_offsets_joints_batch(skel_joints) + if len(self._offset.shape) == 2: + offsets = self._offset.expand(quat_params.shape[0], -1, -1) + offsets = offsets.numpy() + joints = np.zeros(quat_params.shape[:-1] + (3,)) + joints[:, 0] = root_pos + for chain in self._kinematic_tree: + if do_root_R: + R = quat_params[:, 0] + else: + R = np.array([[1.0, 0.0, 0.0, 0.0]]).repeat(len(quat_params), axis=0) + for i in range(1, len(chain)): + R = qmul_np(R, quat_params[:, chain[i]]) + offset_vec = offsets[:, chain[i]] + joints[:, chain[i]] = qrot_np(R, offset_vec) + joints[:, chain[i - 1]] + return joints + + def forward_kinematics_cont6d_np(self, cont6d_params, root_pos, skel_joints=None, do_root_R=True): + # cont6d_params (batch_size, joints_num, 6) + # joints (batch_size, joints_num, 3) + # root_pos (batch_size, 3) + if skel_joints is not None: + skel_joints = torch.from_numpy(skel_joints) + offsets = self.get_offsets_joints_batch(skel_joints) + if len(self._offset.shape) == 2: + offsets = self._offset.expand(cont6d_params.shape[0], -1, -1) + offsets = offsets.numpy() + joints = np.zeros(cont6d_params.shape[:-1] + (3,)) + joints[:, 0] = root_pos + for chain in self._kinematic_tree: + if do_root_R: + matR = cont6d_to_matrix_np(cont6d_params[:, 0]) + else: + matR = np.eye(3)[np.newaxis, :].repeat(len(cont6d_params), axis=0) + for i in range(1, len(chain)): + matR = np.matmul(matR, cont6d_to_matrix_np(cont6d_params[:, chain[i]])) + offset_vec = offsets[:, chain[i]][..., np.newaxis] + # print(matR.shape, offset_vec.shape) + joints[:, chain[i]] = np.matmul(matR, offset_vec).squeeze(-1) + joints[:, chain[i-1]] + return joints + + def forward_kinematics_cont6d(self, cont6d_params, root_pos, skel_joints=None, do_root_R=True): + # cont6d_params (batch_size, joints_num, 6) + # joints (batch_size, joints_num, 3) + # root_pos (batch_size, 3) + if skel_joints is not None: + # skel_joints = torch.from_numpy(skel_joints) + offsets = self.get_offsets_joints_batch(skel_joints) + if len(self._offset.shape) == 2: + offsets = self._offset.expand(cont6d_params.shape[0], -1, -1) + joints = torch.zeros(cont6d_params.shape[:-1] + (3,)).to(cont6d_params.device) + joints[..., 0, :] = root_pos + for chain in self._kinematic_tree: + if do_root_R: + matR = cont6d_to_matrix(cont6d_params[:, 0]) + else: + matR = torch.eye(3).expand((len(cont6d_params), -1, -1)).detach().to(cont6d_params.device) + for i in range(1, len(chain)): + matR = torch.matmul(matR, cont6d_to_matrix(cont6d_params[:, chain[i]])) + offset_vec = offsets[:, chain[i]].unsqueeze(-1) + # print(matR.shape, offset_vec.shape) + joints[:, chain[i]] = torch.matmul(matR, offset_vec).squeeze(-1) + joints[:, chain[i-1]] + return joints + + + + + diff --git a/mGPT/data/humanml/dataset_m.py b/mGPT/data/humanml/dataset_m.py new file mode 100644 index 0000000..241cabc --- /dev/null +++ b/mGPT/data/humanml/dataset_m.py @@ -0,0 +1,156 @@ +import os +import rich +import random +import pickle +import codecs as cs +import numpy as np +from torch.utils import data +from rich.progress import track +from os.path import join as pjoin + + +class MotionDataset(data.Dataset): + def __init__( + self, + data_root, + split, + mean, + std, + max_motion_length=196, + min_motion_length=20, + unit_length=4, + fps=20, + tmpFile=True, + tiny=False, + debug=False, + **kwargs, + ): + + # restrian the length of motion and text + self.max_motion_length = max_motion_length + self.min_motion_length = min_motion_length + self.unit_length = unit_length + + # Data mean and std + self.mean = mean + self.std = std + + # Data path + split_file = pjoin(data_root, split + '.txt') + motion_dir = pjoin(data_root, 'new_joint_vecs') + text_dir = pjoin(data_root, 'texts') + + # Data id list + self.id_list = [] + with cs.open(split_file, "r") as f: + for line in f.readlines(): + self.id_list.append(line.strip()) + + # Debug mode + if tiny or debug: + enumerator = enumerate( + track( + self.id_list, + f"Loading HumanML3D {split}", + )) + maxdata = 100 + subset = '_tiny' + else: + enumerator = enumerate(self.id_list) + maxdata = 1e10 + subset = '' + + new_name_list = [] + motion_dict = {} + + # Fast loading + if os.path.exists(pjoin(data_root, f'tmp/{split}{subset}_motion.pkl')): + with rich.progress.open(pjoin(data_root, f'tmp/{split}{subset}_motion.pkl'), + 'rb', description=f"Loading HumanML3D {split}") as file: + motion_dict = pickle.load(file) + with open(pjoin(data_root, f'tmp/{split}{subset}_index.pkl'), 'rb') as file: + new_name_list = pickle.load(file) + else: + for idx, name in enumerator: + if len(new_name_list) > maxdata: + break + try: + motion = [np.load(pjoin(motion_dir, name + ".npy"))] + + # Read text + with cs.open(pjoin(text_dir, name + '.txt')) as f: + text_data = [] + flag = False + lines = f.readlines() + + for line in lines: + try: + line_split = line.strip().split('#') + f_tag = float(line_split[2]) + to_tag = float(line_split[3]) + f_tag = 0.0 if np.isnan(f_tag) else f_tag + to_tag = 0.0 if np.isnan(to_tag) else to_tag + + if f_tag == 0.0 and to_tag == 0.0: + flag = True + else: + motion_new = [tokens[int(f_tag*fps/unit_length) : int(to_tag*fps/unit_length)] for tokens in motion if int(f_tag*fps/unit_length) < int(to_tag*fps/unit_length)] + + if len(motion_new) == 0: + continue + new_name = '%s_%f_%f'%(name, f_tag, to_tag) + + motion_dict[new_name] = { + 'motion': motion_new, + "length": [len(m[0]) for m in motion_new]} + new_name_list.append(new_name) + except: + pass + + if flag: + motion_dict[name] = { + 'motion': motion, + "length": [len(m[0]) for m in motion]} + new_name_list.append(name) + except: + pass + + if tmpFile: + os.makedirs(pjoin(data_root, 'tmp'), exist_ok=True) + + with open(pjoin(data_root, f'tmp/{split}{subset}_motion.pkl'),'wb') as file: + pickle.dump(motion_dict, file) + with open(pjoin(data_root, f'tmp/{split}{subset}_index.pkl'), 'wb') as file: + pickle.dump(new_name_list, file) + + self.motion_dict = motion_dict + self.name_list = new_name_list + self.nfeats = motion_dict[new_name_list[0]]['motion'][0].shape[1] + + def __len__(self): + return len(self.name_list) + + def __getitem__(self, item): + data = self.motion_dict[self.name_list[item]] + motion_list, m_length = data["motion"], data["length"] + + # Randomly select a motion + motion = random.choice(motion_list) + + # Crop the motions in to times of 4, and introduce small variations + if self.unit_length < 10: + coin2 = np.random.choice(["single", "single", "double"]) + else: + coin2 = "single" + + if coin2 == "double": + m_length = (m_length // self.unit_length - 1) * self.unit_length + elif coin2 == "single": + m_length = (m_length // self.unit_length) * self.unit_length + idx = random.randint(0, len(motion) - m_length) + motion = motion[idx:idx + m_length] + + # Z Normalization + motion = (motion - self.mean) / self.std + + return None, motion, m_length, None, None, None, None, diff --git a/mGPT/data/humanml/dataset_m_vq.py b/mGPT/data/humanml/dataset_m_vq.py new file mode 100644 index 0000000..dfa3ae5 --- /dev/null +++ b/mGPT/data/humanml/dataset_m_vq.py @@ -0,0 +1,54 @@ +import random +import codecs as cs +import numpy as np +from torch.utils import data +from rich.progress import track +from os.path import join as pjoin +from .dataset_m import MotionDataset +from .dataset_t2m import Text2MotionDataset + + +class MotionDatasetVQ(Text2MotionDataset): + def __init__( + self, + data_root, + split, + mean, + std, + max_motion_length, + min_motion_length, + win_size, + unit_length=4, + fps=20, + tmpFile=True, + tiny=False, + debug=False, + **kwargs, + ): + super().__init__(data_root, split, mean, std, max_motion_length, + min_motion_length, unit_length, fps, tmpFile, tiny, + debug, **kwargs) + + # Filter out the motions that are too short + self.window_size = win_size + name_list = list(self.name_list) + for name in self.name_list: + motion = self.data_dict[name]["motion"] + if motion.shape[0] < self.window_size: + name_list.remove(name) + self.data_dict.pop(name) + self.name_list = name_list + + def __len__(self): + return len(self.name_list) + + def __getitem__(self, item): + idx = self.pointer + item + data = self.data_dict[self.name_list[idx]] + motion, length = data["motion"], data["length"] + + idx = random.randint(0, motion.shape[0] - self.window_size) + motion = motion[idx:idx + self.window_size] + motion = (motion - self.mean) / self.std + + return None, motion, length, None, None, None, None, diff --git a/mGPT/data/humanml/dataset_t2m.py b/mGPT/data/humanml/dataset_t2m.py new file mode 100644 index 0000000..3460681 --- /dev/null +++ b/mGPT/data/humanml/dataset_t2m.py @@ -0,0 +1,211 @@ +import os +import rich +import random +import pickle +import codecs as cs +import numpy as np +from torch.utils import data +from rich.progress import track +from os.path import join as pjoin + + +class Text2MotionDataset(data.Dataset): + + def __init__( + self, + data_root, + split, + mean, + std, + max_motion_length=196, + min_motion_length=40, + unit_length=4, + fps=20, + tmpFile=True, + tiny=False, + debug=False, + **kwargs, + ): + + # restrian the length of motion and text + self.max_length = 20 + self.max_motion_length = max_motion_length + self.min_motion_length = min_motion_length + self.unit_length = unit_length + + # Data mean and std + self.mean = mean + self.std = std + + # Data path + split_file = pjoin(data_root, split + '.txt') + motion_dir = pjoin(data_root, 'new_joint_vecs') + text_dir = pjoin(data_root, 'texts') + + # Data id list + self.id_list = [] + with cs.open(split_file, "r") as f: + for line in f.readlines(): + self.id_list.append(line.strip()) + + # Debug mode + if tiny or debug: + enumerator = enumerate(self.id_list) + maxdata = 100 + subset = '_tiny' + else: + enumerator = enumerate( + track( + self.id_list, + f"Loading HumanML3D {split}", + )) + maxdata = 1e10 + subset = '' + + new_name_list = [] + length_list = [] + data_dict = {} + + # Fast loading + if os.path.exists(pjoin(data_root, f'tmp/{split}{subset}_data.pkl')): + if tiny or debug: + with open(pjoin(data_root, f'tmp/{split}{subset}_data.pkl'), + 'rb') as file: + data_dict = pickle.load(file) + else: + with rich.progress.open( + pjoin(data_root, f'tmp/{split}{subset}_data.pkl'), + 'rb', + description=f"Loading HumanML3D {split}") as file: + data_dict = pickle.load(file) + with open(pjoin(data_root, f'tmp/{split}{subset}_index.pkl'), + 'rb') as file: + name_list = pickle.load(file) + for name in new_name_list: + length_list.append(data_dict[name]['length']) + + else: + for idx, name in enumerator: + if len(new_name_list) > maxdata: + break + try: + motion = np.load(pjoin(motion_dir, name + ".npy")) + if (len(motion)) < self.min_motion_length or (len(motion) + >= 200): + continue + + # Read text + text_data = [] + flag = False + with cs.open(pjoin(text_dir, name + '.txt')) as f: + lines = f.readlines() + for line in lines: + text_dict = {} + line_split = line.strip().split('#') + caption = line_split[0] + t_tokens = line_split[1].split(' ') + f_tag = float(line_split[2]) + to_tag = float(line_split[3]) + f_tag = 0.0 if np.isnan(f_tag) else f_tag + to_tag = 0.0 if np.isnan(to_tag) else to_tag + + text_dict['caption'] = caption + text_dict['tokens'] = t_tokens + if f_tag == 0.0 and to_tag == 0.0: + flag = True + text_data.append(text_dict) + else: + motion_new = motion[int(f_tag * + fps):int(to_tag * fps)] + if (len(motion_new) + ) < self.min_motion_length or ( + len(motion_new) >= 200): + continue + new_name = random.choice( + 'ABCDEFGHIJKLMNOPQRSTUVW') + '_' + name + while new_name in new_name_list: + new_name = random.choice( + 'ABCDEFGHIJKLMNOPQRSTUVW') + '_' + name + name_count = 1 + while new_name in data_dict: + new_name += '_' + name_count + name_count += 1 + data_dict[new_name] = { + 'motion': motion_new, + "length": len(motion_new), + 'text': [text_dict] + } + new_name_list.append(new_name) + length_list.append(len(motion_new)) + + if flag: + data_dict[name] = { + 'motion': motion, + "length": len(motion), + 'text': text_data + } + new_name_list.append(name) + length_list.append(len(motion)) + except: + pass + + name_list, length_list = zip( + *sorted(zip(new_name_list, length_list), key=lambda x: x[1])) + + if tmpFile: + os.makedirs(pjoin(data_root, 'tmp'), exist_ok=True) + with open(pjoin(data_root, f'tmp/{split}{subset}_data.pkl'), + 'wb') as file: + pickle.dump(data_dict, file) + with open(pjoin(data_root, f'tmp/{split}{subset}_index.pkl'), + 'wb') as file: + pickle.dump(name_list, file) + + self.length_arr = np.array(length_list) + self.data_dict = data_dict + self.name_list = name_list + self.nfeats = data_dict[name_list[0]]['motion'].shape[1] + self.reset_max_len(self.max_length) + + def reset_max_len(self, length): + assert length <= self.max_motion_length + self.pointer = np.searchsorted(self.length_arr, length) + print("Pointer Pointing at %d" % self.pointer) + self.max_length = length + + def __len__(self): + return len(self.name_list) - self.pointer + + def __getitem__(self, item): + idx = self.pointer + item + data = self.data_dict[self.name_list[idx]] + motion, m_length, text_list = data["motion"], data["length"], data[ + "text"] + + # Randomly select a caption + text_data = random.choice(text_list) + caption = text_data["caption"] + + all_captions = [ + ' '.join([token.split('/')[0] for token in text_dic['tokens']]) + for text_dic in text_list + ] + + # Crop the motions in to times of 4, and introduce small variations + if self.unit_length < 10: + coin2 = np.random.choice(["single", "single", "double"]) + else: + coin2 = "single" + + if coin2 == "double": + m_length = (m_length // self.unit_length - 1) * self.unit_length + elif coin2 == "single": + m_length = (m_length // self.unit_length) * self.unit_length + + idx = random.randint(0, len(motion) - m_length) + motion = motion[idx:idx + m_length] + + # Z Normalization + motion = (motion - self.mean) / self.std + + return caption, motion, m_length, None, None, None, None, all_captions diff --git a/mGPT/data/humanml/dataset_t2m_cb.py b/mGPT/data/humanml/dataset_t2m_cb.py new file mode 100644 index 0000000..6f7e1fe --- /dev/null +++ b/mGPT/data/humanml/dataset_t2m_cb.py @@ -0,0 +1,211 @@ +import rich +import random +import pickle +import os +import numpy as np +import codecs as cs +from torch.utils import data +from os.path import join as pjoin +from rich.progress import track +import json +import spacy + +class Text2MotionDatasetCB(data.Dataset): + def __init__( + self, + data_root, + split, + mean, + std, + max_motion_length=196, + min_motion_length=20, + unit_length=4, + fps=20, + tmpFile=True, + tiny=False, + debug=False, + stage='lm_pretrain', + code_path='VQVAE', + task_path=None, + std_text=False, + **kwargs, + ): + self.tiny = tiny + self.unit_length = unit_length + + # Data mean and std + self.mean = mean + self.std = std + + # Data path + split = 'train' + split_file = pjoin(data_root, split + '.txt') + motion_dir = pjoin(data_root, code_path) + text_dir = pjoin(data_root, 'texts') + + if task_path: + instructions = task_path + elif stage == 'lm_pretrain': + instructions = pjoin(data_root, 'template_pretrain.json') + elif stage in ['lm_instruct', "lm_rl"]: + instructions = pjoin(data_root, 'template_instructions.json') + else: + raise NotImplementedError(f"stage {stage} not implemented") + + # Data id list + self.id_list = [] + with cs.open(split_file, "r") as f: + for line in f.readlines(): + self.id_list.append(line.strip()) + + # Debug mode + if tiny or debug: + enumerator = enumerate(self.id_list) + maxdata = 100 + subset = '_tiny' + else: + enumerator = enumerate( + track( + self.id_list, + f"Loading HumanML3D {split}", + )) + maxdata = 1e10 + subset = '' + + new_name_list = [] + data_dict = {} + + # Fast loading + for i, name in enumerator: + if len(new_name_list) > maxdata: + break + try: + # Load motion tokens + m_token_list = np.load(pjoin(motion_dir, f'{name}.npy')) + # Read text + with cs.open(pjoin(text_dir, name + '.txt')) as f: + text_data = [] + flag = False + lines = f.readlines() + + for line in lines: + try: + text_dict = {} + line_split = line.strip().split('#') + caption = line_split[0] + t_tokens = line_split[1].split(' ') + f_tag = float(line_split[2]) + to_tag = float(line_split[3]) + f_tag = 0.0 if np.isnan(f_tag) else f_tag + to_tag = 0.0 if np.isnan(to_tag) else to_tag + + text_dict['caption'] = caption + text_dict['tokens'] = t_tokens + if f_tag == 0.0 and to_tag == 0.0: + flag = True + text_data.append(text_dict) + else: + m_token_list_new = [ + tokens[int(f_tag * fps / unit_length + ):int(to_tag * fps / + unit_length)] + for tokens in m_token_list + if int(f_tag * fps / unit_length) < + int(to_tag * fps / unit_length) + ] + + if len(m_token_list_new) == 0: + continue + new_name = '%s_%f_%f' % (name, f_tag, + to_tag) + + data_dict[new_name] = { + 'm_token_list': m_token_list_new, + 'text': [text_dict] + } + new_name_list.append(new_name) + except: + pass + + if flag: + data_dict[name] = { + 'm_token_list': m_token_list, + 'text': text_data + } + new_name_list.append(name) + except: + pass + + if tmpFile: + os.makedirs(pjoin(data_root, 'tmp'), exist_ok=True) + with open( + pjoin(data_root, + f'tmp/{split}{subset}_tokens_data.pkl'), + 'wb') as file: + pickle.dump(data_dict, file) + with open( + pjoin(data_root, + f'tmp/{split}{subset}_tokens_index.pkl'), + 'wb') as file: + pickle.dump(new_name_list, file) + + self.data_dict = data_dict + self.name_list = new_name_list + self.nlp = spacy.load('en_core_web_sm') + self.std_text = std_text + self.instructions = json.load(open(instructions, 'r')) + self.tasks = [] + for task in self.instructions.keys(): + for subtask in self.instructions[task].keys(): + self.tasks.append(self.instructions[task][subtask]) + + def __len__(self): + return len(self.name_list) * len(self.tasks) + + def __getitem__(self, item): + data_idx = item % len(self.name_list) + task_idx = item // len(self.name_list) + + data = self.data_dict[self.name_list[data_idx]] + m_token_list, text_list = data['m_token_list'], data['text'] + + m_tokens = random.choice(m_token_list) + text_data = random.choice(text_list) + caption = text_data['caption'] + if self.std_text: + doc = self.nlp(caption) + word_list = [] + pos_list = [] + for token in doc: + word = token.text + if not word.isalpha(): + continue + if (token.pos_ == 'NOUN' + or token.pos_ == 'VERB') and (word != 'left'): + word_list.append(token.lemma_) + else: + word_list.append(word) + pos_list.append(token.pos_) + + caption = ' '.join(word_list) + + all_captions = [ + ' '.join([token.split('/')[0] for token in text_dic['tokens']]) + for text_dic in text_list + ] + + coin = np.random.choice([False, False, True]) + + if coin: + # drop one token at the head or tail + coin2 = np.random.choice([True, False]) + if coin2: + m_tokens = m_tokens[:-1] + else: + m_tokens = m_tokens[1:] + + m_tokens_len = m_tokens.shape[0] + + tasks = self.tasks[task_idx] + + return caption, m_tokens, m_tokens_len, None, None, None, None, all_captions, tasks diff --git a/mGPT/data/humanml/dataset_t2m_eval.py b/mGPT/data/humanml/dataset_t2m_eval.py new file mode 100644 index 0000000..4162981 --- /dev/null +++ b/mGPT/data/humanml/dataset_t2m_eval.py @@ -0,0 +1,92 @@ +import random +import numpy as np +from .dataset_t2m import Text2MotionDataset + + +class Text2MotionDatasetEval(Text2MotionDataset): + + def __init__( + self, + data_root, + split, + mean, + std, + w_vectorizer, + max_motion_length=196, + min_motion_length=40, + unit_length=4, + fps=20, + tmpFile=True, + tiny=False, + debug=False, + **kwargs, + ): + super().__init__(data_root, split, mean, std, max_motion_length, + min_motion_length, unit_length, fps, tmpFile, tiny, + debug, **kwargs) + + self.w_vectorizer = w_vectorizer + + + def __getitem__(self, item): + # Get text data + idx = self.pointer + item + data = self.data_dict[self.name_list[idx]] + motion, m_length, text_list = data["motion"], data["length"], data["text"] + + all_captions = [ + ' '.join([token.split('/')[0] for token in text_dic['tokens']]) + for text_dic in text_list + ] + + if len(all_captions) > 3: + all_captions = all_captions[:3] + elif len(all_captions) == 2: + all_captions = all_captions + all_captions[0:1] + elif len(all_captions) == 1: + all_captions = all_captions * 3 + + # Randomly select a caption + text_data = random.choice(text_list) + caption, tokens = text_data["caption"], text_data["tokens"] + + # Text + max_text_len = 20 + if len(tokens) < max_text_len: + # pad with "unk" + tokens = ["sos/OTHER"] + tokens + ["eos/OTHER"] + sent_len = len(tokens) + tokens = tokens + ["unk/OTHER"] * (max_text_len + 2 - sent_len) + else: + # crop + tokens = tokens[:max_text_len] + tokens = ["sos/OTHER"] + tokens + ["eos/OTHER"] + sent_len = len(tokens) + pos_one_hots = [] + word_embeddings = [] + for token in tokens: + word_emb, pos_oh = self.w_vectorizer[token] + pos_one_hots.append(pos_oh[None, :]) + word_embeddings.append(word_emb[None, :]) + pos_one_hots = np.concatenate(pos_one_hots, axis=0) + word_embeddings = np.concatenate(word_embeddings, axis=0) + + # Random crop + if self.unit_length < 10: + coin2 = np.random.choice(["single", "single", "double"]) + else: + coin2 = "single" + + if coin2 == "double": + m_length = (m_length // self.unit_length - 1) * self.unit_length + elif coin2 == "single": + m_length = (m_length // self.unit_length) * self.unit_length + + idx = random.randint(0, len(motion) - m_length) + motion = motion[idx:idx + m_length] + + # Z Normalization + motion = (motion - self.mean) / self.std + + return caption, motion, m_length, word_embeddings, pos_one_hots, sent_len, "_".join( + tokens), all_captions diff --git a/mGPT/data/humanml/dataset_t2m_m2t.py b/mGPT/data/humanml/dataset_t2m_m2t.py new file mode 100644 index 0000000..259078c --- /dev/null +++ b/mGPT/data/humanml/dataset_t2m_m2t.py @@ -0,0 +1,119 @@ +import random +import numpy as np +from torch.utils import data +from .dataset_t2m import Text2MotionDataset +import codecs as cs +from os.path import join as pjoin + + +class Text2MotionDatasetM2T(data.Dataset): + + def __init__( + self, + data_root, + split, + mean, + std, + max_motion_length=196, + min_motion_length=40, + unit_length=4, + fps=20, + tmpFile=True, + tiny=False, + debug=False, + **kwargs, + ): + + self.max_motion_length = max_motion_length + self.min_motion_length = min_motion_length + self.unit_length = unit_length + + # Data mean and std + self.mean = mean + self.std = std + + # Data path + split_file = pjoin(data_root, split + '.txt') + motion_dir = pjoin(data_root, 'new_joint_vecs') + text_dir = pjoin(data_root, 'texts') + + # Data id list + self.id_list = [] + with cs.open(split_file, "r") as f: + for line in f.readlines(): + self.id_list.append(line.strip()) + + new_name_list = [] + length_list = [] + data_dict = {} + for name in self.id_list: + # try: + motion = np.load(pjoin(motion_dir, name + '.npy')) + if (len(motion)) < self.min_motion_length or (len(motion) >= 200): + continue + + + text_data = [] + flag = False + + with cs.open(pjoin(text_dir, name + '.txt')) as f: + for line in f.readlines(): + text_dict = {} + line_split = line.strip().split('#') + caption = line_split[0] + tokens = line_split[1].split(' ') + f_tag = float(line_split[2]) + to_tag = float(line_split[3]) + f_tag = 0.0 if np.isnan(f_tag) else f_tag + to_tag = 0.0 if np.isnan(to_tag) else to_tag + + text_dict['caption'] = caption + text_dict['tokens'] = tokens + if f_tag == 0.0 and to_tag == 0.0: + flag = True + text_data.append(text_dict) + else: + try: + n_motion = motion[int(f_tag*20) : int(to_tag*20)] + + if (len(n_motion)) < min_motion_length or (len(n_motion) >= 200): + continue + + new_name = "%s_%f_%f"%(name, f_tag, to_tag) + data_dict[new_name] = {'motion': n_motion, + 'length': len(n_motion), + 'text':[text_dict]} + new_name_list.append(new_name) + except: + print(line_split) + print(line_split[2], line_split[3], f_tag, to_tag, name) + if flag: + data_dict[name] = {'motion': motion, + 'length': len(motion), + 'name': name, + 'text': text_data} + + new_name_list.append(name) + length_list.append(len(motion)) + # except: + # # Some motion may not exist in KIT dataset + # pass + + self.length_arr = np.array(length_list) + self.data_dict = data_dict + self.name_list = new_name_list + self.nfeats = motion.shape[-1] + + + def __len__(self): + return len(self.data_dict) + + def __getitem__(self, item): + name = self.name_list[item] + data = self.data_dict[name] + motion, m_length = data['motion'], data['length'] + + "Z Normalization" + motion = (motion - self.mean) / self.std + + return name, motion, m_length, True, True, True, True, True, True diff --git a/mGPT/data/humanml/dataset_t2m_token.py b/mGPT/data/humanml/dataset_t2m_token.py new file mode 100644 index 0000000..d9f1e54 --- /dev/null +++ b/mGPT/data/humanml/dataset_t2m_token.py @@ -0,0 +1,86 @@ +import random +import numpy as np +from torch.utils import data +from .dataset_t2m import Text2MotionDataset +import codecs as cs +from os.path import join as pjoin + + +class Text2MotionDatasetToken(data.Dataset): + + def __init__( + self, + data_root, + split, + mean, + std, + max_motion_length=196, + min_motion_length=40, + unit_length=4, + fps=20, + tmpFile=True, + tiny=False, + debug=False, + **kwargs, + ): + + self.max_motion_length = max_motion_length + self.min_motion_length = min_motion_length + self.unit_length = unit_length + + # Data mean and std + self.mean = mean + self.std = std + + # Data path + split_file = pjoin(data_root, split + '.txt') + motion_dir = pjoin(data_root, 'new_joint_vecs') + text_dir = pjoin(data_root, 'texts') + + # Data id list + self.id_list = [] + with cs.open(split_file, "r") as f: + for line in f.readlines(): + self.id_list.append(line.strip()) + + new_name_list = [] + length_list = [] + data_dict = {} + for name in self.id_list: + try: + motion = np.load(pjoin(motion_dir, name + '.npy')) + if (len(motion)) < self.min_motion_length or (len(motion) >= 200): + continue + + data_dict[name] = {'motion': motion, + 'length': len(motion), + 'name': name} + new_name_list.append(name) + length_list.append(len(motion)) + except: + # Some motion may not exist in KIT dataset + pass + + self.length_arr = np.array(length_list) + self.data_dict = data_dict + self.name_list = new_name_list + self.nfeats = motion.shape[-1] + + + def __len__(self): + return len(self.data_dict) + + def __getitem__(self, item): + name = self.name_list[item] + data = self.data_dict[name] + motion, m_length = data['motion'], data['length'] + + m_length = (m_length // self.unit_length) * self.unit_length + + idx = random.randint(0, len(motion) - m_length) + motion = motion[idx:idx+m_length] + + "Z Normalization" + motion = (motion - self.mean) / self.std + + return name, motion, m_length, True, True, True, True, True, True diff --git a/mGPT/data/humanml/scripts/motion_process.py b/mGPT/data/humanml/scripts/motion_process.py new file mode 100644 index 0000000..8b3395c --- /dev/null +++ b/mGPT/data/humanml/scripts/motion_process.py @@ -0,0 +1,529 @@ +from os.path import join as pjoin + +from ..common.skeleton import Skeleton +import numpy as np +import os +from ..common.quaternion import * +from ..utils.paramUtil import * + +import torch +from tqdm import tqdm + +# positions (batch, joint_num, 3) +def uniform_skeleton(positions, target_offset): + src_skel = Skeleton(n_raw_offsets, kinematic_chain, 'cpu') + src_offset = src_skel.get_offsets_joints(torch.from_numpy(positions[0])) + src_offset = src_offset.numpy() + tgt_offset = target_offset.numpy() + # print(src_offset) + # print(tgt_offset) + '''Calculate Scale Ratio as the ratio of legs''' + src_leg_len = np.abs(src_offset[l_idx1]).max() + np.abs(src_offset[l_idx2]).max() + tgt_leg_len = np.abs(tgt_offset[l_idx1]).max() + np.abs(tgt_offset[l_idx2]).max() + + scale_rt = tgt_leg_len / src_leg_len + # print(scale_rt) + src_root_pos = positions[:, 0] + tgt_root_pos = src_root_pos * scale_rt + + '''Inverse Kinematics''' + quat_params = src_skel.inverse_kinematics_np(positions, face_joint_indx) + # print(quat_params.shape) + + '''Forward Kinematics''' + src_skel.set_offset(target_offset) + new_joints = src_skel.forward_kinematics_np(quat_params, tgt_root_pos) + return new_joints + + +def extract_features(positions, feet_thre, n_raw_offsets, kinematic_chain, face_joint_indx, fid_r, fid_l): + global_positions = positions.copy() + """ Get Foot Contacts """ + + def foot_detect(positions, thres): + velfactor, heightfactor = np.array([thres, thres]), np.array([3.0, 2.0]) + + feet_l_x = (positions[1:, fid_l, 0] - positions[:-1, fid_l, 0]) ** 2 + feet_l_y = (positions[1:, fid_l, 1] - positions[:-1, fid_l, 1]) ** 2 + feet_l_z = (positions[1:, fid_l, 2] - positions[:-1, fid_l, 2]) ** 2 + # feet_l_h = positions[:-1,fid_l,1] + # feet_l = (((feet_l_x + feet_l_y + feet_l_z) < velfactor) & (feet_l_h < heightfactor)).astype(np.float64) + feet_l = ((feet_l_x + feet_l_y + feet_l_z) < velfactor).astype(np.float64) + + feet_r_x = (positions[1:, fid_r, 0] - positions[:-1, fid_r, 0]) ** 2 + feet_r_y = (positions[1:, fid_r, 1] - positions[:-1, fid_r, 1]) ** 2 + feet_r_z = (positions[1:, fid_r, 2] - positions[:-1, fid_r, 2]) ** 2 + # feet_r_h = positions[:-1,fid_r,1] + # feet_r = (((feet_r_x + feet_r_y + feet_r_z) < velfactor) & (feet_r_h < heightfactor)).astype(np.float64) + feet_r = (((feet_r_x + feet_r_y + feet_r_z) < velfactor)).astype(np.float64) + return feet_l, feet_r + + # + feet_l, feet_r = foot_detect(positions, feet_thre) + # feet_l, feet_r = foot_detect(positions, 0.002) + + '''Quaternion and Cartesian representation''' + r_rot = None + + def get_rifke(positions): + '''Local pose''' + positions[..., 0] -= positions[:, 0:1, 0] + positions[..., 2] -= positions[:, 0:1, 2] + '''All pose face Z+''' + positions = qrot_np(np.repeat(r_rot[:, None], positions.shape[1], axis=1), positions) + return positions + + def get_quaternion(positions): + skel = Skeleton(n_raw_offsets, kinematic_chain, "cpu") + # (seq_len, joints_num, 4) + quat_params = skel.inverse_kinematics_np(positions, face_joint_indx, smooth_forward=False) + + '''Fix Quaternion Discontinuity''' + quat_params = qfix(quat_params) + # (seq_len, 4) + r_rot = quat_params[:, 0].copy() + # print(r_rot[0]) + '''Root Linear Velocity''' + # (seq_len - 1, 3) + velocity = (positions[1:, 0] - positions[:-1, 0]).copy() + # print(r_rot.shape, velocity.shape) + velocity = qrot_np(r_rot[1:], velocity) + '''Root Angular Velocity''' + # (seq_len - 1, 4) + r_velocity = qmul_np(r_rot[1:], qinv_np(r_rot[:-1])) + quat_params[1:, 0] = r_velocity + # (seq_len, joints_num, 4) + return quat_params, r_velocity, velocity, r_rot + + def get_cont6d_params(positions): + skel = Skeleton(n_raw_offsets, kinematic_chain, "cpu") + # (seq_len, joints_num, 4) + quat_params = skel.inverse_kinematics_np(positions, face_joint_indx, smooth_forward=True) + + '''Quaternion to continuous 6D''' + cont_6d_params = quaternion_to_cont6d_np(quat_params) + # (seq_len, 4) + r_rot = quat_params[:, 0].copy() + # print(r_rot[0]) + '''Root Linear Velocity''' + # (seq_len - 1, 3) + velocity = (positions[1:, 0] - positions[:-1, 0]).copy() + # print(r_rot.shape, velocity.shape) + velocity = qrot_np(r_rot[1:], velocity) + '''Root Angular Velocity''' + # (seq_len - 1, 4) + r_velocity = qmul_np(r_rot[1:], qinv_np(r_rot[:-1])) + # (seq_len, joints_num, 4) + return cont_6d_params, r_velocity, velocity, r_rot + + cont_6d_params, r_velocity, velocity, r_rot = get_cont6d_params(positions) + positions = get_rifke(positions) + + # trejec = np.cumsum(np.concatenate([np.array([[0, 0, 0]]), velocity], axis=0), axis=0) + # r_rotations, r_pos = recover_ric_glo_np(r_velocity, velocity[:, [0, 2]]) + + # plt.plot(positions_b[:, 0, 0], positions_b[:, 0, 2], marker='*') + # plt.plot(ground_positions[:, 0, 0], ground_positions[:, 0, 2], marker='o', color='r') + # plt.plot(trejec[:, 0], trejec[:, 2], marker='^', color='g') + # plt.plot(r_pos[:, 0], r_pos[:, 2], marker='s', color='y') + # plt.xlabel('x') + # plt.ylabel('z') + # plt.axis('equal') + # plt.show() + + '''Root height''' + root_y = positions[:, 0, 1:2] + + '''Root rotation and linear velocity''' + # (seq_len-1, 1) rotation velocity along y-axis + # (seq_len-1, 2) linear velovity on xz plane + r_velocity = np.arcsin(r_velocity[:, 2:3]) + l_velocity = velocity[:, [0, 2]] + # print(r_velocity.shape, l_velocity.shape, root_y.shape) + root_data = np.concatenate([r_velocity, l_velocity, root_y[:-1]], axis=-1) + + '''Get Joint Rotation Representation''' + # (seq_len, (joints_num-1) *6) quaternion for skeleton joints + rot_data = cont_6d_params[:, 1:].reshape(len(cont_6d_params), -1) + + '''Get Joint Rotation Invariant Position Represention''' + # (seq_len, (joints_num-1)*3) local joint position + ric_data = positions[:, 1:].reshape(len(positions), -1) + + '''Get Joint Velocity Representation''' + # (seq_len-1, joints_num*3) + local_vel = qrot_np(np.repeat(r_rot[:-1, None], global_positions.shape[1], axis=1), + global_positions[1:] - global_positions[:-1]) + local_vel = local_vel.reshape(len(local_vel), -1) + + data = root_data + data = np.concatenate([data, ric_data[:-1]], axis=-1) + data = np.concatenate([data, rot_data[:-1]], axis=-1) + # print(dataset.shape, local_vel.shape) + data = np.concatenate([data, local_vel], axis=-1) + data = np.concatenate([data, feet_l, feet_r], axis=-1) + + return data + + +def process_file(positions, feet_thre): + # (seq_len, joints_num, 3) + # '''Down Sample''' + # positions = positions[::ds_num] + + '''Uniform Skeleton''' + positions = uniform_skeleton(positions, tgt_offsets) + + '''Put on Floor''' + floor_height = positions.min(axis=0).min(axis=0)[1] + positions[:, :, 1] -= floor_height + # print(floor_height) + + # plot_3d_motion("./positions_1.mp4", kinematic_chain, positions, 'title', fps=20) + + '''XZ at origin''' + root_pos_init = positions[0] + root_pose_init_xz = root_pos_init[0] * np.array([1, 0, 1]) + positions = positions - root_pose_init_xz + + # '''Move the first pose to origin ''' + # root_pos_init = positions[0] + # positions = positions - root_pos_init[0] + + '''All initially face Z+''' + r_hip, l_hip, sdr_r, sdr_l = face_joint_indx + across1 = root_pos_init[r_hip] - root_pos_init[l_hip] + across2 = root_pos_init[sdr_r] - root_pos_init[sdr_l] + across = across1 + across2 + across = across / np.sqrt((across ** 2).sum(axis=-1))[..., np.newaxis] + + # forward (3,), rotate around y-axis + forward_init = np.cross(np.array([[0, 1, 0]]), across, axis=-1) + # forward (3,) + forward_init = forward_init / np.sqrt((forward_init ** 2).sum(axis=-1))[..., np.newaxis] + + # print(forward_init) + + target = np.array([[0, 0, 1]]) + root_quat_init = qbetween_np(forward_init, target) + root_quat_init = np.ones(positions.shape[:-1] + (4,)) * root_quat_init + + positions_b = positions.copy() + + positions = qrot_np(root_quat_init, positions) + + # plot_3d_motion("./positions_2.mp4", kinematic_chain, positions, 'title', fps=20) + + '''New ground truth positions''' + global_positions = positions.copy() + + # plt.plot(positions_b[:, 0, 0], positions_b[:, 0, 2], marker='*') + # plt.plot(positions[:, 0, 0], positions[:, 0, 2], marker='o', color='r') + # plt.xlabel('x') + # plt.ylabel('z') + # plt.axis('equal') + # plt.show() + + """ Get Foot Contacts """ + + def foot_detect(positions, thres): + velfactor, heightfactor = np.array([thres, thres]), np.array([3.0, 2.0]) + + feet_l_x = (positions[1:, fid_l, 0] - positions[:-1, fid_l, 0]) ** 2 + feet_l_y = (positions[1:, fid_l, 1] - positions[:-1, fid_l, 1]) ** 2 + feet_l_z = (positions[1:, fid_l, 2] - positions[:-1, fid_l, 2]) ** 2 + # feet_l_h = positions[:-1,fid_l,1] + # feet_l = (((feet_l_x + feet_l_y + feet_l_z) < velfactor) & (feet_l_h < heightfactor)).astype(np.float64) + feet_l = ((feet_l_x + feet_l_y + feet_l_z) < velfactor).astype(np.float64) + + feet_r_x = (positions[1:, fid_r, 0] - positions[:-1, fid_r, 0]) ** 2 + feet_r_y = (positions[1:, fid_r, 1] - positions[:-1, fid_r, 1]) ** 2 + feet_r_z = (positions[1:, fid_r, 2] - positions[:-1, fid_r, 2]) ** 2 + # feet_r_h = positions[:-1,fid_r,1] + # feet_r = (((feet_r_x + feet_r_y + feet_r_z) < velfactor) & (feet_r_h < heightfactor)).astype(np.float64) + feet_r = (((feet_r_x + feet_r_y + feet_r_z) < velfactor)).astype(np.float64) + return feet_l, feet_r + # + feet_l, feet_r = foot_detect(positions, feet_thre) + # feet_l, feet_r = foot_detect(positions, 0.002) + + '''Quaternion and Cartesian representation''' + r_rot = None + + def get_rifke(positions): + '''Local pose''' + positions[..., 0] -= positions[:, 0:1, 0] + positions[..., 2] -= positions[:, 0:1, 2] + '''All pose face Z+''' + positions = qrot_np(np.repeat(r_rot[:, None], positions.shape[1], axis=1), positions) + return positions + + def get_quaternion(positions): + skel = Skeleton(n_raw_offsets, kinematic_chain, "cpu") + # (seq_len, joints_num, 4) + quat_params = skel.inverse_kinematics_np(positions, face_joint_indx, smooth_forward=False) + + '''Fix Quaternion Discontinuity''' + quat_params = qfix(quat_params) + # (seq_len, 4) + r_rot = quat_params[:, 0].copy() + # print(r_rot[0]) + '''Root Linear Velocity''' + # (seq_len - 1, 3) + velocity = (positions[1:, 0] - positions[:-1, 0]).copy() + # print(r_rot.shape, velocity.shape) + velocity = qrot_np(r_rot[1:], velocity) + '''Root Angular Velocity''' + # (seq_len - 1, 4) + r_velocity = qmul_np(r_rot[1:], qinv_np(r_rot[:-1])) + quat_params[1:, 0] = r_velocity + # (seq_len, joints_num, 4) + return quat_params, r_velocity, velocity, r_rot + + def get_cont6d_params(positions): + skel = Skeleton(n_raw_offsets, kinematic_chain, "cpu") + # (seq_len, joints_num, 4) + quat_params = skel.inverse_kinematics_np(positions, face_joint_indx, smooth_forward=True) + + '''Quaternion to continuous 6D''' + cont_6d_params = quaternion_to_cont6d_np(quat_params) + # (seq_len, 4) + r_rot = quat_params[:, 0].copy() + # print(r_rot[0]) + '''Root Linear Velocity''' + # (seq_len - 1, 3) + velocity = (positions[1:, 0] - positions[:-1, 0]).copy() + # print(r_rot.shape, velocity.shape) + velocity = qrot_np(r_rot[1:], velocity) + '''Root Angular Velocity''' + # (seq_len - 1, 4) + r_velocity = qmul_np(r_rot[1:], qinv_np(r_rot[:-1])) + # (seq_len, joints_num, 4) + return cont_6d_params, r_velocity, velocity, r_rot + + cont_6d_params, r_velocity, velocity, r_rot = get_cont6d_params(positions) + positions = get_rifke(positions) + + # trejec = np.cumsum(np.concatenate([np.array([[0, 0, 0]]), velocity], axis=0), axis=0) + # r_rotations, r_pos = recover_ric_glo_np(r_velocity, velocity[:, [0, 2]]) + + # plt.plot(positions_b[:, 0, 0], positions_b[:, 0, 2], marker='*') + # plt.plot(ground_positions[:, 0, 0], ground_positions[:, 0, 2], marker='o', color='r') + # plt.plot(trejec[:, 0], trejec[:, 2], marker='^', color='g') + # plt.plot(r_pos[:, 0], r_pos[:, 2], marker='s', color='y') + # plt.xlabel('x') + # plt.ylabel('z') + # plt.axis('equal') + # plt.show() + + '''Root height''' + root_y = positions[:, 0, 1:2] + + '''Root rotation and linear velocity''' + # (seq_len-1, 1) rotation velocity along y-axis + # (seq_len-1, 2) linear velovity on xz plane + r_velocity = np.arcsin(r_velocity[:, 2:3]) + l_velocity = velocity[:, [0, 2]] + # print(r_velocity.shape, l_velocity.shape, root_y.shape) + root_data = np.concatenate([r_velocity, l_velocity, root_y[:-1]], axis=-1) + + '''Get Joint Rotation Representation''' + # (seq_len, (joints_num-1) *6) quaternion for skeleton joints + rot_data = cont_6d_params[:, 1:].reshape(len(cont_6d_params), -1) + + '''Get Joint Rotation Invariant Position Represention''' + # (seq_len, (joints_num-1)*3) local joint position + ric_data = positions[:, 1:].reshape(len(positions), -1) + + '''Get Joint Velocity Representation''' + # (seq_len-1, joints_num*3) + local_vel = qrot_np(np.repeat(r_rot[:-1, None], global_positions.shape[1], axis=1), + global_positions[1:] - global_positions[:-1]) + local_vel = local_vel.reshape(len(local_vel), -1) + + data = root_data + data = np.concatenate([data, ric_data[:-1]], axis=-1) + data = np.concatenate([data, rot_data[:-1]], axis=-1) + # print(dataset.shape, local_vel.shape) + data = np.concatenate([data, local_vel], axis=-1) + data = np.concatenate([data, feet_l, feet_r], axis=-1) + + return data, global_positions, positions, l_velocity + + +# Recover global angle and positions for rotation dataset +# root_rot_velocity (B, seq_len, 1) +# root_linear_velocity (B, seq_len, 2) +# root_y (B, seq_len, 1) +# ric_data (B, seq_len, (joint_num - 1)*3) +# rot_data (B, seq_len, (joint_num - 1)*6) +# local_velocity (B, seq_len, joint_num*3) +# foot contact (B, seq_len, 4) +def recover_root_rot_pos(data): + rot_vel = data[..., 0] + r_rot_ang = torch.zeros_like(rot_vel).to(data.device) + '''Get Y-axis rotation from rotation velocity''' + r_rot_ang[..., 1:] = rot_vel[..., :-1] + r_rot_ang = torch.cumsum(r_rot_ang, dim=-1) + + r_rot_quat = torch.zeros(data.shape[:-1] + (4,)).to(data.device) + r_rot_quat[..., 0] = torch.cos(r_rot_ang) + r_rot_quat[..., 2] = torch.sin(r_rot_ang) + + r_pos = torch.zeros(data.shape[:-1] + (3,)).to(data.device) + r_pos[..., 1:, [0, 2]] = data[..., :-1, 1:3] + '''Add Y-axis rotation to root position''' + r_pos = qrot(qinv(r_rot_quat), r_pos) + + r_pos = torch.cumsum(r_pos, dim=-2) + + r_pos[..., 1] = data[..., 3] + return r_rot_quat, r_pos + + +def recover_from_rot(data, joints_num, skeleton): + r_rot_quat, r_pos = recover_root_rot_pos(data) + + r_rot_cont6d = quaternion_to_cont6d(r_rot_quat) + + start_indx = 1 + 2 + 1 + (joints_num - 1) * 3 + end_indx = start_indx + (joints_num - 1) * 6 + cont6d_params = data[..., start_indx:end_indx] + # print(r_rot_cont6d.shape, cont6d_params.shape, r_pos.shape) + cont6d_params = torch.cat([r_rot_cont6d, cont6d_params], dim=-1) + cont6d_params = cont6d_params.view(-1, joints_num, 6) + + positions = skeleton.forward_kinematics_cont6d(cont6d_params, r_pos) + + return positions + +def recover_rot(data): + # dataset [bs, seqlen, 263/251] HumanML/KIT + joints_num = 22 if data.shape[-1] == 263 else 21 + r_rot_quat, r_pos = recover_root_rot_pos(data) + r_pos_pad = torch.cat([r_pos, torch.zeros_like(r_pos)], dim=-1).unsqueeze(-2) + r_rot_cont6d = quaternion_to_cont6d(r_rot_quat) + start_indx = 1 + 2 + 1 + (joints_num - 1) * 3 + end_indx = start_indx + (joints_num - 1) * 6 + cont6d_params = data[..., start_indx:end_indx] + cont6d_params = torch.cat([r_rot_cont6d, cont6d_params], dim=-1) + cont6d_params = cont6d_params.view(-1, joints_num, 6) + cont6d_params = torch.cat([cont6d_params, r_pos_pad], dim=-2) + return cont6d_params + + +def recover_from_ric(data, joints_num): + r_rot_quat, r_pos = recover_root_rot_pos(data) + positions = data[..., 4:(joints_num - 1) * 3 + 4] + positions = positions.view(positions.shape[:-1] + (-1, 3)) + + '''Add Y-axis rotation to local joints''' + positions = qrot(qinv(r_rot_quat[..., None, :]).expand(positions.shape[:-1] + (4,)), positions) + + '''Add root XZ to joints''' + positions[..., 0] += r_pos[..., 0:1] + positions[..., 2] += r_pos[..., 2:3] + + '''Concate root and joints''' + positions = torch.cat([r_pos.unsqueeze(-2), positions], dim=-2) + + return positions +''' +For Text2Motion Dataset +''' +''' +if __name__ == "__main__": + example_id = "000021" + # Lower legs + l_idx1, l_idx2 = 5, 8 + # Right/Left foot + fid_r, fid_l = [8, 11], [7, 10] + # Face direction, r_hip, l_hip, sdr_r, sdr_l + face_joint_indx = [2, 1, 17, 16] + # l_hip, r_hip + r_hip, l_hip = 2, 1 + joints_num = 22 + # ds_num = 8 + data_dir = '../dataset/pose_data_raw/joints/' + save_dir1 = '../dataset/pose_data_raw/new_joints/' + save_dir2 = '../dataset/pose_data_raw/new_joint_vecs/' + + n_raw_offsets = torch.from_numpy(t2m_raw_offsets) + kinematic_chain = t2m_kinematic_chain + + # Get offsets of target skeleton + example_data = np.load(os.path.join(data_dir, example_id + '.npy')) + example_data = example_data.reshape(len(example_data), -1, 3) + example_data = torch.from_numpy(example_data) + tgt_skel = Skeleton(n_raw_offsets, kinematic_chain, 'cpu') + # (joints_num, 3) + tgt_offsets = tgt_skel.get_offsets_joints(example_data[0]) + # print(tgt_offsets) + + source_list = os.listdir(data_dir) + frame_num = 0 + for source_file in tqdm(source_list): + source_data = np.load(os.path.join(data_dir, source_file))[:, :joints_num] + try: + dataset, ground_positions, positions, l_velocity = process_file(source_data, 0.002) + rec_ric_data = recover_from_ric(torch.from_numpy(dataset).unsqueeze(0).float(), joints_num) + np.save(pjoin(save_dir1, source_file), rec_ric_data.squeeze().numpy()) + np.save(pjoin(save_dir2, source_file), dataset) + frame_num += dataset.shape[0] + except Exception as e: + print(source_file) + print(e) + + print('Total clips: %d, Frames: %d, Duration: %fm' % + (len(source_list), frame_num, frame_num / 20 / 60)) +''' + +if __name__ == "__main__": + example_id = "03950_gt" + # Lower legs + l_idx1, l_idx2 = 17, 18 + # Right/Left foot + fid_r, fid_l = [14, 15], [19, 20] + # Face direction, r_hip, l_hip, sdr_r, sdr_l + face_joint_indx = [11, 16, 5, 8] + # l_hip, r_hip + r_hip, l_hip = 11, 16 + joints_num = 21 + # ds_num = 8 + data_dir = '../dataset/kit_mocap_dataset/joints/' + save_dir1 = '../dataset/kit_mocap_dataset/new_joints/' + save_dir2 = '../dataset/kit_mocap_dataset/new_joint_vecs/' + + n_raw_offsets = torch.from_numpy(kit_raw_offsets) + kinematic_chain = kit_kinematic_chain + + '''Get offsets of target skeleton''' + example_data = np.load(os.path.join(data_dir, example_id + '.npy')) + example_data = example_data.reshape(len(example_data), -1, 3) + example_data = torch.from_numpy(example_data) + tgt_skel = Skeleton(n_raw_offsets, kinematic_chain, 'cpu') + # (joints_num, 3) + tgt_offsets = tgt_skel.get_offsets_joints(example_data[0]) + # print(tgt_offsets) + + source_list = os.listdir(data_dir) + frame_num = 0 + '''Read source dataset''' + for source_file in tqdm(source_list): + source_data = np.load(os.path.join(data_dir, source_file))[:, :joints_num] + try: + name = ''.join(source_file[:-7].split('_')) + '.npy' + data, ground_positions, positions, l_velocity = process_file(source_data, 0.05) + rec_ric_data = recover_from_ric(torch.from_numpy(data).unsqueeze(0).float(), joints_num) + if np.isnan(rec_ric_data.numpy()).any(): + print(source_file) + continue + np.save(pjoin(save_dir1, name), rec_ric_data.squeeze().numpy()) + np.save(pjoin(save_dir2, name), data) + frame_num += data.shape[0] + except Exception as e: + print(source_file) + print(e) + + print('Total clips: %d, Frames: %d, Duration: %fm' % + (len(source_list), frame_num, frame_num / 12.5 / 60)) diff --git a/mGPT/data/humanml/utils/paramUtil.py b/mGPT/data/humanml/utils/paramUtil.py new file mode 100644 index 0000000..a9f1708 --- /dev/null +++ b/mGPT/data/humanml/utils/paramUtil.py @@ -0,0 +1,63 @@ +import numpy as np + +# Define a kinematic tree for the skeletal struture +kit_kinematic_chain = [[0, 11, 12, 13, 14, 15], [0, 16, 17, 18, 19, 20], [0, 1, 2, 3, 4], [3, 5, 6, 7], [3, 8, 9, 10]] + +kit_raw_offsets = np.array( + [ + [0, 0, 0], + [0, 1, 0], + [0, 1, 0], + [0, 1, 0], + [0, 1, 0], + [1, 0, 0], + [0, -1, 0], + [0, -1, 0], + [-1, 0, 0], + [0, -1, 0], + [0, -1, 0], + [1, 0, 0], + [0, -1, 0], + [0, -1, 0], + [0, 0, 1], + [0, 0, 1], + [-1, 0, 0], + [0, -1, 0], + [0, -1, 0], + [0, 0, 1], + [0, 0, 1] + ] +) + +t2m_raw_offsets = np.array([[0,0,0], + [1,0,0], + [-1,0,0], + [0,1,0], + [0,-1,0], + [0,-1,0], + [0,1,0], + [0,-1,0], + [0,-1,0], + [0,1,0], + [0,0,1], + [0,0,1], + [0,1,0], + [1,0,0], + [-1,0,0], + [0,0,1], + [0,-1,0], + [0,-1,0], + [0,-1,0], + [0,-1,0], + [0,-1,0], + [0,-1,0]]) + +t2m_kinematic_chain = [[0, 2, 5, 8, 11], [0, 1, 4, 7, 10], [0, 3, 6, 9, 12, 15], [9, 14, 17, 19, 21], [9, 13, 16, 18, 20]] +t2m_left_hand_chain = [[20, 22, 23, 24], [20, 34, 35, 36], [20, 25, 26, 27], [20, 31, 32, 33], [20, 28, 29, 30]] +t2m_right_hand_chain = [[21, 43, 44, 45], [21, 46, 47, 48], [21, 40, 41, 42], [21, 37, 38, 39], [21, 49, 50, 51]] + + +kit_tgt_skel_id = '03950' + +t2m_tgt_skel_id = '000021' + diff --git a/mGPT/data/humanml/utils/word_vectorizer.py b/mGPT/data/humanml/utils/word_vectorizer.py new file mode 100644 index 0000000..d272058 --- /dev/null +++ b/mGPT/data/humanml/utils/word_vectorizer.py @@ -0,0 +1,79 @@ +import numpy as np +import pickle +from os.path import join as pjoin + +POS_enumerator = { + 'VERB': 0, + 'NOUN': 1, + 'DET': 2, + 'ADP': 3, + 'NUM': 4, + 'AUX': 5, + 'PRON': 6, + 'ADJ': 7, + 'ADV': 8, + 'Loc_VIP': 9, + 'Body_VIP': 10, + 'Obj_VIP': 11, + 'Act_VIP': 12, + 'Desc_VIP': 13, + 'OTHER': 14, +} + +Loc_list = ('left', 'right', 'clockwise', 'counterclockwise', 'anticlockwise', 'forward', 'back', 'backward', + 'up', 'down', 'straight', 'curve') + +Body_list = ('arm', 'chin', 'foot', 'feet', 'face', 'hand', 'mouth', 'leg', 'waist', 'eye', 'knee', 'shoulder', 'thigh') + +Obj_List = ('stair', 'dumbbell', 'chair', 'window', 'floor', 'car', 'ball', 'handrail', 'baseball', 'basketball') + +Act_list = ('walk', 'run', 'swing', 'pick', 'bring', 'kick', 'put', 'squat', 'throw', 'hop', 'dance', 'jump', 'turn', + 'stumble', 'dance', 'stop', 'sit', 'lift', 'lower', 'raise', 'wash', 'stand', 'kneel', 'stroll', + 'rub', 'bend', 'balance', 'flap', 'jog', 'shuffle', 'lean', 'rotate', 'spin', 'spread', 'climb') + +Desc_list = ('slowly', 'carefully', 'fast', 'careful', 'slow', 'quickly', 'happy', 'angry', 'sad', 'happily', 'angrily', 'sadly') + +VIP_dict = { + 'Loc_VIP': Loc_list, + 'Body_VIP': Body_list, + 'Obj_VIP': Obj_List, + 'Act_VIP': Act_list, + 'Desc_VIP': Desc_list, +} + + +class WordVectorizer(object): + def __init__(self, meta_root, prefix): + vectors = np.load(pjoin(meta_root, '%s_data.npy'%prefix)) + words = pickle.load(open(pjoin(meta_root, '%s_words.pkl'%prefix), 'rb')) + word2idx = pickle.load(open(pjoin(meta_root, '%s_idx.pkl'%prefix), 'rb')) + self.word2vec = {w: vectors[word2idx[w]] for w in words} + + def _get_pos_ohot(self, pos): + pos_vec = np.zeros(len(POS_enumerator)) + if pos in POS_enumerator: + pos_vec[POS_enumerator[pos]] = 1 + else: + pos_vec[POS_enumerator['OTHER']] = 1 + return pos_vec + + def __len__(self): + return len(self.word2vec) + + def __getitem__(self, item): + word, pos = item.split('/') + if word in self.word2vec: + word_vec = self.word2vec[word] + vip_pos = None + for key, values in VIP_dict.items(): + if word in values: + vip_pos = key + break + if vip_pos is not None: + pos_vec = self._get_pos_ohot(vip_pos) + else: + pos_vec = self._get_pos_ohot(pos) + else: + word_vec = self.word2vec['unk'] + pos_vec = self._get_pos_ohot('OTHER') + return word_vec, pos_vec diff --git a/mGPT/data/tools/__init__.py b/mGPT/data/tools/__init__.py new file mode 100644 index 0000000..04a49ec --- /dev/null +++ b/mGPT/data/tools/__init__.py @@ -0,0 +1,2 @@ +from .tensors import lengths_to_mask +from .collate import collate_text_and_length, collate_pairs_and_text, collate_datastruct_and_text, collate_tensor_with_padding diff --git a/mGPT/data/tools/collate.py b/mGPT/data/tools/collate.py new file mode 100644 index 0000000..fec416c --- /dev/null +++ b/mGPT/data/tools/collate.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import List, Dict +from torch import Tensor + + +def collate_tensor_with_padding(batch: List[Tensor]) -> Tensor: + dims = batch[0].dim() + max_size = [max([b.size(i) for b in batch]) for i in range(dims)] + size = (len(batch),) + tuple(max_size) + canvas = batch[0].new_zeros(size=size) + for i, b in enumerate(batch): + sub_tensor = canvas[i] + for d in range(dims): + sub_tensor = sub_tensor.narrow(d, 0, b.size(d)) + sub_tensor.add_(b) + return canvas + + +def collate_datastruct_and_text(lst_elements: List) -> Dict: + collate_datastruct = lst_elements[0]["datastruct"].transforms.collate + + batch = { + # Collate with padding for the datastruct + "datastruct": collate_datastruct([x["datastruct"] for x in lst_elements]), + # Collate normally for the length + "length": [x["length"] for x in lst_elements], + # Collate the text + "text": [x["text"] for x in lst_elements]} + + # add keyid for example + otherkeys = [x for x in lst_elements[0].keys() if x not in batch] + for key in otherkeys: + batch[key] = [x[key] for x in lst_elements] + + return batch + +def collate_length_and_text(lst_elements: List) -> Dict: + + batch = { + "length_0": [x["length_0"] for x in lst_elements], + "length_1": [x["length_1"] for x in lst_elements], + "length_transition": [x["length_transition"] for x in lst_elements], + "length_1_with_transition": [x["length_1_with_transition"] for x in lst_elements], + "text_0": [x["text_0"] for x in lst_elements], + "text_1": [x["text_1"] for x in lst_elements] + } + + return batch + +def collate_pairs_and_text(lst_elements: List, ) -> Dict: + if 'features_0' not in lst_elements[0]: # test set + collate_datastruct = lst_elements[0]["datastruct"].transforms.collate + batch = {"datastruct": collate_datastruct([x["datastruct"] for x in lst_elements]), + "length_0": [x["length_0"] for x in lst_elements], + "length_1": [x["length_1"] for x in lst_elements], + "length_transition": [x["length_transition"] for x in lst_elements], + "length_1_with_transition": [x["length_1_with_transition"] for x in lst_elements], + "text_0": [x["text_0"] for x in lst_elements], + "text_1": [x["text_1"] for x in lst_elements] + } + + else: + batch = {"motion_feats_0": collate_tensor_with_padding([el["features_0"] for el in lst_elements]), + "motion_feats_1": collate_tensor_with_padding([el["features_1"] for el in lst_elements]), + "motion_feats_1_with_transition": collate_tensor_with_padding([el["features_1_with_transition"] for el in lst_elements]), + "length_0": [x["length_0"] for x in lst_elements], + "length_1": [x["length_1"] for x in lst_elements], + "length_transition": [x["length_transition"] for x in lst_elements], + "length_1_with_transition": [x["length_1_with_transition"] for x in lst_elements], + "text_0": [x["text_0"] for x in lst_elements], + "text_1": [x["text_1"] for x in lst_elements] + } + return batch + + +def collate_text_and_length(lst_elements: Dict) -> Dict: + batch = {"length": [x["length"] for x in lst_elements], + "text": [x["text"] for x in lst_elements]} + + # add keyid for example + otherkeys = [x for x in lst_elements[0].keys() if x not in batch and x != "datastruct"] + for key in otherkeys: + batch[key] = [x[key] for x in lst_elements] + return batch diff --git a/mGPT/data/tools/easyconvert.py b/mGPT/data/tools/easyconvert.py new file mode 100644 index 0000000..3649a93 --- /dev/null +++ b/mGPT/data/tools/easyconvert.py @@ -0,0 +1,72 @@ +from .geometry import * + +def nfeats_of(rottype): + if rottype in ["rotvec", "axisangle"]: + return 3 + elif rottype in ["rotquat", "quaternion"]: + return 4 + elif rottype in ["rot6d", "6drot", "rotation6d"]: + return 6 + elif rottype in ["rotmat"]: + return 9 + else: + return TypeError("This rotation type doesn't have features.") + + +def axis_angle_to(newtype, rotations): + if newtype in ["matrix"]: + rotations = axis_angle_to_matrix(rotations) + return rotations + elif newtype in ["rotmat"]: + rotations = axis_angle_to_matrix(rotations) + rotations = matrix_to("rotmat", rotations) + return rotations + elif newtype in ["rot6d", "6drot", "rotation6d"]: + rotations = axis_angle_to_matrix(rotations) + rotations = matrix_to("rot6d", rotations) + return rotations + elif newtype in ["rotquat", "quaternion"]: + rotations = axis_angle_to_quaternion(rotations) + return rotations + elif newtype in ["rotvec", "axisangle"]: + return rotations + else: + raise NotImplementedError + + +def matrix_to(newtype, rotations): + if newtype in ["matrix"]: + return rotations + if newtype in ["rotmat"]: + rotations = rotations.reshape((*rotations.shape[:-2], 9)) + return rotations + elif newtype in ["rot6d", "6drot", "rotation6d"]: + rotations = matrix_to_rotation_6d(rotations) + return rotations + elif newtype in ["rotquat", "quaternion"]: + rotations = matrix_to_quaternion(rotations) + return rotations + elif newtype in ["rotvec", "axisangle"]: + rotations = matrix_to_axis_angle(rotations) + return rotations + else: + raise NotImplementedError + + +def to_matrix(oldtype, rotations): + if oldtype in ["matrix"]: + return rotations + if oldtype in ["rotmat"]: + rotations = rotations.reshape((*rotations.shape[:-2], 3, 3)) + return rotations + elif oldtype in ["rot6d", "6drot", "rotation6d"]: + rotations = rotation_6d_to_matrix(rotations) + return rotations + elif oldtype in ["rotquat", "quaternion"]: + rotations = quaternion_to_matrix(rotations) + return rotations + elif oldtype in ["rotvec", "axisangle"]: + rotations = axis_angle_to_matrix(rotations) + return rotations + else: + raise NotImplementedError diff --git a/mGPT/data/tools/geometry.py b/mGPT/data/tools/geometry.py new file mode 100644 index 0000000..e6eafa2 --- /dev/null +++ b/mGPT/data/tools/geometry.py @@ -0,0 +1,566 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. +# Check PYTORCH3D_LICENCE before use + +import functools +from typing import Optional + +import torch +import torch.nn.functional as F + + +""" +The transformation matrices returned from the functions in this file assume +the points on which the transformation will be applied are column vectors. +i.e. the R matrix is structured as + + R = [ + [Rxx, Rxy, Rxz], + [Ryx, Ryy, Ryz], + [Rzx, Rzy, Rzz], + ] # (3, 3) + +This matrix can be applied to column vectors by post multiplication +by the points e.g. + + points = [[0], [1], [2]] # (3 x 1) xyz coordinates of a point + transformed_points = R * points + +To apply the same matrix to points which are row vectors, the R matrix +can be transposed and pre multiplied by the points: + +e.g. + points = [[0, 1, 2]] # (1 x 3) xyz coordinates of a point + transformed_points = points * R.transpose(1, 0) +""" + + +# Added +def matrix_of_angles(cos, sin, inv=False, dim=2): + assert dim in [2, 3] + sin = -sin if inv else sin + if dim == 2: + row1 = torch.stack((cos, -sin), axis=-1) + row2 = torch.stack((sin, cos), axis=-1) + return torch.stack((row1, row2), axis=-2) + elif dim == 3: + row1 = torch.stack((cos, -sin, 0*cos), axis=-1) + row2 = torch.stack((sin, cos, 0*cos), axis=-1) + row3 = torch.stack((0*sin, 0*cos, 1+0*cos), axis=-1) + return torch.stack((row1, row2, row3),axis=-2) + + +def quaternion_to_matrix(quaternions): + """ + Convert rotations given as quaternions to rotation matrices. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + + +def _copysign(a, b): + """ + Return a tensor where each element has the absolute value taken from the, + corresponding element of a, with sign taken from the corresponding + element of b. This is like the standard copysign floating-point operation, + but is not careful about negative 0 and NaN. + + Args: + a: source tensor. + b: tensor whose signs will be used, of the same shape as a. + + Returns: + Tensor of the same shape as a with the signs of b. + """ + signs_differ = (a < 0) != (b < 0) + return torch.where(signs_differ, -a, a) + + +def _sqrt_positive_part(x): + """ + Returns torch.sqrt(torch.max(0, x)) + but with a zero subgradient where x is 0. + """ + ret = torch.zeros_like(x) + positive_mask = x > 0 + ret[positive_mask] = torch.sqrt(x[positive_mask]) + return ret + + +def matrix_to_quaternion(matrix): + """ + Convert rotations given as rotation matrices to quaternions. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + m00 = matrix[..., 0, 0] + m11 = matrix[..., 1, 1] + m22 = matrix[..., 2, 2] + o0 = 0.5 * _sqrt_positive_part(1 + m00 + m11 + m22) + x = 0.5 * _sqrt_positive_part(1 + m00 - m11 - m22) + y = 0.5 * _sqrt_positive_part(1 - m00 + m11 - m22) + z = 0.5 * _sqrt_positive_part(1 - m00 - m11 + m22) + o1 = _copysign(x, matrix[..., 2, 1] - matrix[..., 1, 2]) + o2 = _copysign(y, matrix[..., 0, 2] - matrix[..., 2, 0]) + o3 = _copysign(z, matrix[..., 1, 0] - matrix[..., 0, 1]) + return torch.stack((o0, o1, o2, o3), -1) + + +def _axis_angle_rotation(axis: str, angle): + """ + Return the rotation matrices for one of the rotations about an axis + of which Euler angles describe, for each value of the angle given. + + Args: + axis: Axis label "X" or "Y or "Z". + angle: any shape tensor of Euler angles in radians + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + + cos = torch.cos(angle) + sin = torch.sin(angle) + one = torch.ones_like(angle) + zero = torch.zeros_like(angle) + + if axis == "X": + R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos) + if axis == "Y": + R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos) + if axis == "Z": + R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one) + + return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3)) + + +def euler_angles_to_matrix(euler_angles, convention: str): + """ + Convert rotations given as Euler angles in radians to rotation matrices. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3: + raise ValueError("Invalid input euler angles.") + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + matrices = map(_axis_angle_rotation, convention, torch.unbind(euler_angles, -1)) + return functools.reduce(torch.matmul, matrices) + + +def _angle_from_tan( + axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool +): + """ + Extract the first or third Euler angle from the two members of + the matrix which are positive constant times its sine and cosine. + + Args: + axis: Axis label "X" or "Y or "Z" for the angle we are finding. + other_axis: Axis label "X" or "Y or "Z" for the middle axis in the + convention. + data: Rotation matrices as tensor of shape (..., 3, 3). + horizontal: Whether we are looking for the angle for the third axis, + which means the relevant entries are in the same row of the + rotation matrix. If not, they are in the same column. + tait_bryan: Whether the first and third axes in the convention differ. + + Returns: + Euler Angles in radians for each matrix in data as a tensor + of shape (...). + """ + + i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis] + if horizontal: + i2, i1 = i1, i2 + even = (axis + other_axis) in ["XY", "YZ", "ZX"] + if horizontal == even: + return torch.atan2(data[..., i1], data[..., i2]) + if tait_bryan: + return torch.atan2(-data[..., i2], data[..., i1]) + return torch.atan2(data[..., i2], -data[..., i1]) + + +def _index_from_letter(letter: str): + if letter == "X": + return 0 + if letter == "Y": + return 1 + if letter == "Z": + return 2 + + +def matrix_to_euler_angles(matrix, convention: str): + """ + Convert rotations given as rotation matrices to Euler angles in radians. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + convention: Convention string of three uppercase letters. + + Returns: + Euler angles in radians as tensor of shape (..., 3). + """ + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + i0 = _index_from_letter(convention[0]) + i2 = _index_from_letter(convention[2]) + tait_bryan = i0 != i2 + if tait_bryan: + central_angle = torch.asin( + matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0) + ) + else: + central_angle = torch.acos(matrix[..., i0, i0]) + + o = ( + _angle_from_tan( + convention[0], convention[1], matrix[..., i2], False, tait_bryan + ), + central_angle, + _angle_from_tan( + convention[2], convention[1], matrix[..., i0, :], True, tait_bryan + ), + ) + return torch.stack(o, -1) + + +def random_quaternions( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random quaternions representing rotations, + i.e. versors with nonnegative real part. + + Args: + n: Number of quaternions in a batch to return. + dtype: Type to return. + device: Desired device of returned tensor. Default: + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Quaternions as tensor of shape (N, 4). + """ + o = torch.randn((n, 4), dtype=dtype, device=device, requires_grad=requires_grad) + s = (o * o).sum(1) + o = o / _copysign(torch.sqrt(s), o[:, 0])[:, None] + return o + + +def random_rotations( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random rotations as 3x3 rotation matrices. + + Args: + n: Number of rotation matrices in a batch to return. + dtype: Type to return. + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Rotation matrices as tensor of shape (n, 3, 3). + """ + quaternions = random_quaternions( + n, dtype=dtype, device=device, requires_grad=requires_grad + ) + return quaternion_to_matrix(quaternions) + + +def random_rotation( + dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate a single random 3x3 rotation matrix. + + Args: + dtype: Type to return + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type + requires_grad: Whether the resulting tensor should have the gradient + flag set + + Returns: + Rotation matrix as tensor of shape (3, 3). + """ + return random_rotations(1, dtype, device, requires_grad)[0] + + +def standardize_quaternion(quaternions): + """ + Convert a unit quaternion to a standard form: one in which the real + part is non negative. + + Args: + quaternions: Quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Standardized quaternions as tensor of shape (..., 4). + """ + return torch.where(quaternions[..., 0:1] < 0, -quaternions, quaternions) + + +def quaternion_raw_multiply(a, b): + """ + Multiply two quaternions. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions shape (..., 4). + """ + aw, ax, ay, az = torch.unbind(a, -1) + bw, bx, by, bz = torch.unbind(b, -1) + ow = aw * bw - ax * bx - ay * by - az * bz + ox = aw * bx + ax * bw + ay * bz - az * by + oy = aw * by - ax * bz + ay * bw + az * bx + oz = aw * bz + ax * by - ay * bx + az * bw + return torch.stack((ow, ox, oy, oz), -1) + + +def quaternion_multiply(a, b): + """ + Multiply two quaternions representing rotations, returning the quaternion + representing their composition, i.e. the versor with nonnegative real part. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions of shape (..., 4). + """ + ab = quaternion_raw_multiply(a, b) + return standardize_quaternion(ab) + + +def quaternion_invert(quaternion): + """ + Given a quaternion representing rotation, get the quaternion representing + its inverse. + + Args: + quaternion: Quaternions as tensor of shape (..., 4), with real part + first, which must be versors (unit quaternions). + + Returns: + The inverse, a tensor of quaternions of shape (..., 4). + """ + + return quaternion * quaternion.new_tensor([1, -1, -1, -1]) + + +def quaternion_apply(quaternion, point): + """ + Apply the rotation given by a quaternion to a 3D point. + Usual torch rules for broadcasting apply. + + Args: + quaternion: Tensor of quaternions, real part first, of shape (..., 4). + point: Tensor of 3D points of shape (..., 3). + + Returns: + Tensor of rotated points of shape (..., 3). + """ + if point.size(-1) != 3: + raise ValueError(f"Points are not in 3D, f{point.shape}.") + real_parts = point.new_zeros(point.shape[:-1] + (1,)) + point_as_quaternion = torch.cat((real_parts, point), -1) + out = quaternion_raw_multiply( + quaternion_raw_multiply(quaternion, point_as_quaternion), + quaternion_invert(quaternion), + ) + return out[..., 1:] + + +def axis_angle_to_matrix(axis_angle): + """ + Convert rotations given as axis/angle to rotation matrices. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle)) + + +def matrix_to_axis_angle(matrix): + """ + Convert rotations given as rotation matrices to axis/angle. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return quaternion_to_axis_angle(matrix_to_quaternion(matrix)) + + +def axis_angle_to_quaternion(axis_angle): + """ + Convert rotations given as axis/angle to quaternions. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True) + half_angles = 0.5 * angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + quaternions = torch.cat( + [torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1 + ) + return quaternions + + +def quaternion_to_axis_angle(quaternions): + """ + Convert rotations given as quaternions to axis/angle. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True) + half_angles = torch.atan2(norms, quaternions[..., :1]) + angles = 2 * half_angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + return quaternions[..., 1:] / sin_half_angles_over_angles + + +def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor: + """ + Converts 6D rotation representation by Zhou et al. [1] to rotation matrix + using Gram--Schmidt orthogonalisation per Section B of [1]. + Args: + d6: 6D rotation representation, of size (*, 6) + + Returns: + batch of rotation matrices of size (*, 3, 3) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + + a1, a2 = d6[..., :3], d6[..., 3:] + b1 = F.normalize(a1, dim=-1) + b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1 + b2 = F.normalize(b2, dim=-1) + b3 = torch.cross(b1, b2, dim=-1) + return torch.stack((b1, b2, b3), dim=-2) + + +def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor: + """ + Converts rotation matrices to 6D rotation representation by Zhou et al. [1] + by dropping the last row. Note that 6D representation is not unique. + Args: + matrix: batch of rotation matrices of size (*, 3, 3) + + Returns: + 6D rotation representation, of size (*, 6) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6) diff --git a/mGPT/data/tools/tensors.py b/mGPT/data/tools/tensors.py new file mode 100644 index 0000000..6bcc051 --- /dev/null +++ b/mGPT/data/tools/tensors.py @@ -0,0 +1,26 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import List, Dict +import torch +from torch import Tensor + + +def lengths_to_mask(lengths: List[int], device: torch.device) -> Tensor: + lengths = torch.tensor(lengths, device=device) + max_len = max(lengths) + mask = torch.arange(max_len, device=device).expand(len(lengths), max_len) < lengths.unsqueeze(1) + return mask diff --git a/mGPT/data/transforms/__init__.py b/mGPT/data/transforms/__init__.py new file mode 100644 index 0000000..394dfc5 --- /dev/null +++ b/mGPT/data/transforms/__init__.py @@ -0,0 +1,15 @@ +from .base import Transform +from .smpl import SMPLTransform +from .xyz import XYZTransform + +# rots2rfeats +from .rots2rfeats import Rots2Rfeats +from .rots2rfeats import Globalvelandy + +# rots2joints +from .rots2joints import Rots2Joints +from .rots2joints import SMPLH, SMPLX + +# joints2jfeats +from .joints2jfeats import Joints2Jfeats +from .joints2jfeats import Rifke diff --git a/mGPT/data/transforms/base.py b/mGPT/data/transforms/base.py new file mode 100644 index 0000000..1c60a60 --- /dev/null +++ b/mGPT/data/transforms/base.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from dataclasses import dataclass, fields + + +class Transform: + + def collate(self, lst_datastruct): + from ..tools import collate_tensor_with_padding + example = lst_datastruct[0] + + def collate_or_none(key): + if example[key] is None: + return None + key_lst = [x[key] for x in lst_datastruct] + return collate_tensor_with_padding(key_lst) + + kwargs = {key: collate_or_none(key) for key in example.datakeys} + + return self.Datastruct(**kwargs) + + +# Inspired from SMPLX library +# need to define "datakeys" and transforms +@dataclass +class Datastruct: + + def __getitem__(self, key): + return getattr(self, key) + + def __setitem__(self, key, value): + self.__dict__[key] = value + + def get(self, key, default=None): + return getattr(self, key, default) + + def __iter__(self): + return self.keys() + + def keys(self): + keys = [t.name for t in fields(self)] + return iter(keys) + + def values(self): + values = [getattr(self, t.name) for t in fields(self)] + return iter(values) + + def items(self): + data = [(t.name, getattr(self, t.name)) for t in fields(self)] + return iter(data) + + def to(self, *args, **kwargs): + for key in self.datakeys: + if self[key] is not None: + self[key] = self[key].to(*args, **kwargs) + return self + + @property + def device(self): + return self[self.datakeys[0]].device + + def detach(self): + + def detach_or_none(tensor): + if tensor is not None: + return tensor.detach() + return None + + kwargs = {key: detach_or_none(self[key]) for key in self.datakeys} + return self.transforms.Datastruct(**kwargs) diff --git a/mGPT/data/transforms/identity.py b/mGPT/data/transforms/identity.py new file mode 100644 index 0000000..ec12e7f --- /dev/null +++ b/mGPT/data/transforms/identity.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional +from torch import Tensor + +from .base import Datastruct, dataclass, Transform + + +class IdentityTransform(Transform): + def __init__(self, **kwargs): + return + + def Datastruct(self, **kwargs): + return IdentityDatastruct(**kwargs) + + def __repr__(self): + return "IdentityTransform()" + + +@dataclass +class IdentityDatastruct(Datastruct): + transforms: IdentityTransform + + features: Optional[Tensor] = None + + def __post_init__(self): + self.datakeys = ["features"] + + def __len__(self): + return len(self.rfeats) diff --git a/mGPT/data/transforms/joints2jfeats/__init__.py b/mGPT/data/transforms/joints2jfeats/__init__.py new file mode 100644 index 0000000..0a924e8 --- /dev/null +++ b/mGPT/data/transforms/joints2jfeats/__init__.py @@ -0,0 +1,2 @@ +from .base import Joints2Jfeats +from .rifke import Rifke diff --git a/mGPT/data/transforms/joints2jfeats/base.py b/mGPT/data/transforms/joints2jfeats/base.py new file mode 100644 index 0000000..03d6f5f --- /dev/null +++ b/mGPT/data/transforms/joints2jfeats/base.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional + +import torch +from torch import Tensor, nn +from pathlib import Path +import os + + +class Joints2Jfeats(nn.Module): + + def __init__(self, + path: Optional[str] = None, + normalization: bool = False, + eps: float = 1e-12, + **kwargs) -> None: + if normalization and path is None: + raise TypeError( + "You should provide a path if normalization is on.") + + super().__init__() + self.normalization = normalization + self.eps = eps + # workaround for cluster local/sync + if path is not None: + # rel_p = path.split('/') + # rel_p = rel_p[rel_p.index('deps'):] + # rel_p = '/'.join(rel_p) + pass + if normalization: + mean_path = Path(path) / "jfeats_mean.pt" + std_path = Path(path) / "jfeats_std.pt" + self.register_buffer('mean', torch.load(mean_path)) + self.register_buffer('std', torch.load(std_path)) + + def normalize(self, features: Tensor) -> Tensor: + if self.normalization: + features = (features - self.mean) / (self.std + self.eps) + return features + + def unnormalize(self, features: Tensor) -> Tensor: + if self.normalization: + features = features * self.std + self.mean + return features diff --git a/mGPT/data/transforms/joints2jfeats/rifke.py b/mGPT/data/transforms/joints2jfeats/rifke.py new file mode 100644 index 0000000..c6f2a8e --- /dev/null +++ b/mGPT/data/transforms/joints2jfeats/rifke.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional + +import torch +from einops import rearrange +from torch import Tensor +from .tools import get_forward_direction, get_floor, gaussian_filter1d # noqa +from mGPT.utils.geometry_tools import matrix_of_angles +from .base import Joints2Jfeats + + +class Rifke(Joints2Jfeats): + + def __init__(self, + jointstype: str = "mmm", + path: Optional[str] = None, + normalization: bool = False, + forward_filter: bool = False, + **kwargs) -> None: + # + # if jointstype != "mmm": + # print("This function assume that the root is the first index") + # raise NotImplementedError("This jointstype is not implemented.") + + super().__init__(path=path, normalization=normalization) + self.jointstype = jointstype + self.forward_filter = forward_filter + + def forward(self, joints: Tensor) -> Tensor: + # Joints to rotation invariant poses (Holden et. al.) + # Similar function than fke2rifke in Language2Pose repository + # Adapted to pytorch + # Put the origin center of the root joint instead of the ground projection + poses = joints.clone() + poses[..., 1] -= get_floor(poses, jointstype=self.jointstype) + + translation = poses[..., 0, :].clone() + # Let the root have the Y translation --> gravity axis + root_y = translation[..., 1] + + # Trajectory => Translation without gravity axis (Y) + trajectory = translation[..., [0, 2]] + + # Delete the root joints of the poses + poses = poses[..., 1:, :] + + # Remove the trajectory of the poses + poses[..., [0, 2]] -= trajectory[..., None, :] + + # Compute the trajectory + vel_trajectory = torch.diff(trajectory, dim=-2) + # 0 for the first one => keep the dimentionality + vel_trajectory = torch.cat( + (0 * vel_trajectory[..., [0], :], vel_trajectory), dim=-2) + + # Compute the forward direction + forward = get_forward_direction(poses, jointstype=self.jointstype) + if self.forward_filter: + # Smoothing to remove high frequencies + forward = gaussian_filter1d(forward, 2) + # normalize again to get real directions + forward = torch.nn.functional.normalize(forward, dim=-1) + # changed this also for New pytorch + angles = torch.atan2(*(forward.transpose(0, -1))).transpose(0, -1) + vel_angles = torch.diff(angles, dim=-1) + # 0 for the first one => keep the dimentionality + vel_angles = torch.cat((0 * vel_angles[..., [0]], vel_angles), dim=-1) + + # Construct the inverse rotation matrix + sin, cos = forward[..., 0], forward[..., 1] + rotations_inv = matrix_of_angles(cos, sin, inv=True) + + # Rotate the poses + poses_local = torch.einsum("...lj,...jk->...lk", poses[..., [0, 2]], + rotations_inv) + poses_local = torch.stack( + (poses_local[..., 0], poses[..., 1], poses_local[..., 1]), axis=-1) + + # stack the xyz joints into feature vectors + poses_features = rearrange(poses_local, + "... joints xyz -> ... (joints xyz)") + + # Rotate the vel_trajectory + vel_trajectory_local = torch.einsum("...j,...jk->...k", vel_trajectory, + rotations_inv) + + # Stack things together + features = torch.cat((root_y[..., None], poses_features, + vel_angles[..., None], vel_trajectory_local), -1) + + # Normalize if needed + features = self.normalize(features) + return features + + def inverse(self, features: Tensor) -> Tensor: + features = self.unnormalize(features) + root_y, poses_features, vel_angles, vel_trajectory_local = self.extract( + features) + + # already have the good dimensionality + angles = torch.cumsum(vel_angles, dim=-1) + # First frame should be 0, but if infered it is better to ensure it + angles = angles - angles[..., [0]] + + cos, sin = torch.cos(angles), torch.sin(angles) + rotations = matrix_of_angles(cos, sin, inv=False) + + # Get back the poses + poses_local = rearrange(poses_features, + "... (joints xyz) -> ... joints xyz", + xyz=3) + + # Rotate the poses + poses = torch.einsum("...lj,...jk->...lk", poses_local[..., [0, 2]], + rotations) + poses = torch.stack( + (poses[..., 0], poses_local[..., 1], poses[..., 1]), axis=-1) + + # Rotate the vel_trajectory + vel_trajectory = torch.einsum("...j,...jk->...k", vel_trajectory_local, + rotations) + # Integrate the trajectory + # Already have the good dimensionality + trajectory = torch.cumsum(vel_trajectory, dim=-2) + # First frame should be 0, but if infered it is better to ensure it + trajectory = trajectory - trajectory[..., [0], :] + + # Add the root joints (which is still zero) + poses = torch.cat((0 * poses[..., [0], :], poses), -2) + + # put back the root joint y + poses[..., 0, 1] = root_y + + # Add the trajectory globally + poses[..., [0, 2]] += trajectory[..., None, :] + return poses + + def extract(self, features: Tensor): + root_y = features[..., 0] + poses_features = features[..., 1:-3] + vel_angles = features[..., -3] + vel_trajectory_local = features[..., -2:] + + return root_y, poses_features, vel_angles, vel_trajectory_local diff --git a/mGPT/data/transforms/joints2jfeats/tools.py b/mGPT/data/transforms/joints2jfeats/tools.py new file mode 100644 index 0000000..734e109 --- /dev/null +++ b/mGPT/data/transforms/joints2jfeats/tools.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import torch +import torch.nn.functional as F + +from mGPT.utils.joints import mmm_joints + +# Get the indexes of particular body part SMPLH case +# Feet +# LM, RM = smplh_joints.index("left_ankle"), smplh_joints.index("right_ankle") +# LF, RF = smplh_joints.index("left_foot"), smplh_joints.index("right_foot") +# # Shoulders +# LS, RS = smplh_joints.index("left_shoulder"), smplh_joints.index("right_shoulder") +# # Hips +# LH, RH = smplh_joints.index("left_hip"), smplh_joints.index("right_hip") + +# Get the indexes of particular body part +# Feet +LM, RM = mmm_joints.index("LMrot"), mmm_joints.index("RMrot") +LF, RF = mmm_joints.index("LF"), mmm_joints.index("RF") +# Shoulders +LS, RS = mmm_joints.index("LS"), mmm_joints.index("RS") +# Hips +LH, RH = mmm_joints.index("LH"), mmm_joints.index("RH") + + +def get_forward_direction(poses, jointstype="mmm"): + # assert jointstype == 'mmm' + across = poses[..., RH, :] - poses[..., LH, :] + poses[..., RS, :] - poses[ + ..., LS, :] + forward = torch.stack((-across[..., 2], across[..., 0]), axis=-1) + forward = torch.nn.functional.normalize(forward, dim=-1) + return forward + + +def get_floor(poses, jointstype="mmm"): + # assert jointstype == 'mmm' + ndim = len(poses.shape) + foot_heights = poses[..., (LM, LF, RM, RF), 1].min(-1).values + floor_height = softmin(foot_heights, softness=0.5, dim=-1) + # changed this thing Mathis version 1.11 pytorch + return floor_height[(ndim - 2) * [None]].transpose(0, -1) + + +def softmax(x, softness=1.0, dim=None): + maxi, mini = x.max(dim=dim).values, x.min(dim=dim).values + return maxi + torch.log(softness + torch.exp(mini - maxi)) + + +def softmin(x, softness=1.0, dim=0): + return -softmax(-x, softness=softness, dim=dim) + + +def gaussian_filter1d(_inputs, sigma, truncate=4.0): + # Code adapted/mixed from scipy library into pytorch + # https://github.com/scipy/scipy/blob/47bb6febaa10658c72962b9615d5d5aa2513fa3a/scipy/ndimage/filters.py#L211 + # and gaussian kernel + # https://github.com/scipy/scipy/blob/47bb6febaa10658c72962b9615d5d5aa2513fa3a/scipy/ndimage/filters.py#L179 + # Correspond to mode="nearest" and order = 0 + # But works batched + if len(_inputs.shape) == 2: + inputs = _inputs[None] + else: + inputs = _inputs + + sd = float(sigma) + radius = int(truncate * sd + 0.5) + sigma2 = sigma * sigma + x = torch.arange(-radius, + radius + 1, + device=inputs.device, + dtype=inputs.dtype) + phi_x = torch.exp(-0.5 / sigma2 * x**2) + phi_x = phi_x / phi_x.sum() + + # Conv1d weights + groups = inputs.shape[-1] + weights = torch.tile(phi_x, (groups, 1, 1)) + inputs = inputs.transpose(-1, -2) + outputs = F.conv1d(inputs, weights, padding="same", + groups=groups).transpose(-1, -2) + + return outputs.reshape(_inputs.shape) diff --git a/mGPT/data/transforms/joints2rots/config.py b/mGPT/data/transforms/joints2rots/config.py new file mode 100644 index 0000000..9014bef --- /dev/null +++ b/mGPT/data/transforms/joints2rots/config.py @@ -0,0 +1,119 @@ +import numpy as np +from mGPT.utils.joints import mmm_joints, smplh2mmm_indexes + +# Map joints Name to SMPL joints idx +JOINT_MAP = { + 'MidHip': 0, + 'LHip': 1, + 'LKnee': 4, + 'LAnkle': 7, + 'LFoot': 10, + 'RHip': 2, + 'RKnee': 5, + 'RAnkle': 8, + 'RFoot': 11, + 'LShoulder': 16, + 'LElbow': 18, + 'LWrist': 20, + 'LHand': 22, + 'RShoulder': 17, + 'RElbow': 19, + 'RWrist': 21, + 'RHand': 23, + 'spine1': 3, + 'spine2': 6, + 'spine3': 9, + 'Neck': 12, + 'Head': 15, + 'LCollar': 13, + 'Rcollar': 14, + 'Nose': 24, + 'REye': 26, + 'LEye': 26, + 'REar': 27, + 'LEar': 28, + 'LHeel': 31, + 'RHeel': 34, + 'OP RShoulder': 17, + 'OP LShoulder': 16, + 'OP RHip': 2, + 'OP LHip': 1, + 'OP Neck': 12, +} + +mmm2smpl_correspondence = { + "root": "MidHip", + "BP": "spine1", + "BT": "spine3", + "BLN": "Neck", + "BUN": "Head", + "LS": "LShoulder", + "LE": "LElbow", + "LW": "LWrist", + "RS": "RShoulder", + "RE": "RElbow", + "RW": "RWrist", + "LH": "LHip", + "LK": "LKnee", + "LA": "LAnkle", + "LMrot": "LHeel", + "LF": "LFoot", + "RH": "RHip", + "RK": "RKnee", + "RA": "RAnkle", + "RMrot": "RHeel", + "RF": "RFoot" +} + +full_smpl_idx = range(24) +key_smpl_idx = [0, 1, 4, 7, 2, 5, 8, 17, 19, 21, 16, 18, 20] + +AMASS_JOINT_MAP = { + 'MidHip': 0, + 'LHip': 1, + 'LKnee': 4, + 'LAnkle': 7, + 'LFoot': 10, + 'RHip': 2, + 'RKnee': 5, + 'RAnkle': 8, + 'RFoot': 11, + 'LShoulder': 16, + 'LElbow': 18, + 'LWrist': 20, + 'RShoulder': 17, + 'RElbow': 19, + 'RWrist': 21, + 'spine1': 3, + 'spine2': 6, + 'spine3': 9, + 'Neck': 12, + 'Head': 15, + 'LCollar': 13, + 'Rcollar': 14, +} +amass_idx = range(22) +amass_smpl_idx = range(22) + +# cal mmm in smpl index +smpl2mmm_correspondence = { + val: key + for key, val in mmm2smpl_correspondence.items() +} +smpl2mmm_indexes = [JOINT_MAP[mmm2smpl_correspondence[x]] for x in mmm_joints] + +# cal mmm joints map +MMM_JOINT_MAP = { + val: JOINT_MAP[val] + for key, val in mmm2smpl_correspondence.items() +} + +# mmm_idx = range(21) +# mmm_smpl_dix = smpl2mmm_indexes +# mmm_smpl_dix = smplh2mmm_indexes +# todo - configable +SMPL_MODEL_DIR = "/apdcephfs/share_1227775/shingxchen/AIMotion/TMOSTData/deps/smpl_models/" +GMM_MODEL_DIR = "/apdcephfs/share_1227775/shingxchen/AIMotion/TMOSTData/deps/smpl_models/" +SMPL_MEAN_FILE = "/apdcephfs/share_1227775/shingxchen/AIMotion/TMOSTData/deps/smpl_models/neutral_smpl_mean_params.h5" +# for collsion +Part_Seg_DIR = "/apdcephfs/share_1227775/shingxchen/AIMotion/TMOSTData/deps/smpl_models/smplx_parts_segm.pkl" diff --git a/mGPT/data/transforms/joints2rots/customloss.py b/mGPT/data/transforms/joints2rots/customloss.py new file mode 100644 index 0000000..2c3c3a5 --- /dev/null +++ b/mGPT/data/transforms/joints2rots/customloss.py @@ -0,0 +1,217 @@ +import torch +import torch.nn.functional as F +import config + +# Guassian +def gmof(x, sigma): + """ + Geman-McClure error function + """ + x_squared = x ** 2 + sigma_squared = sigma ** 2 + return (sigma_squared * x_squared) / (sigma_squared + x_squared) + +# angle prior +def angle_prior(pose): + """ + Angle prior that penalizes unnatural bending of the knees and elbows + """ + # We subtract 3 because pose does not include the global rotation of the model + return torch.exp( + pose[:, [55 - 3, 58 - 3, 12 - 3, 15 - 3]] * torch.tensor([1., -1., -1, -1.], device=pose.device)) ** 2 + + +def perspective_projection(points, rotation, translation, + focal_length, camera_center): + """ + This function computes the perspective projection of a set of points. + Input: + points (bs, N, 3): 3D points + rotation (bs, 3, 3): Camera rotation + translation (bs, 3): Camera translation + focal_length (bs,) or scalar: Focal length + camera_center (bs, 2): Camera center + """ + batch_size = points.shape[0] + K = torch.zeros([batch_size, 3, 3], device=points.device) + K[:, 0, 0] = focal_length + K[:, 1, 1] = focal_length + K[:, 2, 2] = 1. + K[:, :-1, -1] = camera_center + + # Transform points + points = torch.einsum('bij,bkj->bki', rotation, points) + points = points + translation.unsqueeze(1) + + # Apply perspective distortion + projected_points = points / points[:, :, -1].unsqueeze(-1) + + # Apply camera intrinsics + projected_points = torch.einsum('bij,bkj->bki', K, projected_points) + + return projected_points[:, :, :-1] + + +def body_fitting_loss(body_pose, betas, model_joints, camera_t, camera_center, + joints_2d, joints_conf, pose_prior, + focal_length=5000, sigma=100, pose_prior_weight=4.78, + shape_prior_weight=5, angle_prior_weight=15.2, + output='sum'): + """ + Loss function for body fitting + """ + batch_size = body_pose.shape[0] + rotation = torch.eye(3, device=body_pose.device).unsqueeze(0).expand(batch_size, -1, -1) + + projected_joints = perspective_projection(model_joints, rotation, camera_t, + focal_length, camera_center) + + # Weighted robust reprojection error + reprojection_error = gmof(projected_joints - joints_2d, sigma) + reprojection_loss = (joints_conf ** 2) * reprojection_error.sum(dim=-1) + + # Pose prior loss + pose_prior_loss = (pose_prior_weight ** 2) * pose_prior(body_pose, betas) + + # Angle prior for knees and elbows + angle_prior_loss = (angle_prior_weight ** 2) * angle_prior(body_pose).sum(dim=-1) + + # Regularizer to prevent betas from taking large values + shape_prior_loss = (shape_prior_weight ** 2) * (betas ** 2).sum(dim=-1) + + total_loss = reprojection_loss.sum(dim=-1) + pose_prior_loss + angle_prior_loss + shape_prior_loss + + if output == 'sum': + return total_loss.sum() + elif output == 'reprojection': + return reprojection_loss + + +# --- get camera fitting loss ----- +def camera_fitting_loss(model_joints, camera_t, camera_t_est, camera_center, + joints_2d, joints_conf, + focal_length=5000, depth_loss_weight=100): + """ + Loss function for camera optimization. + """ + # Project model joints + batch_size = model_joints.shape[0] + rotation = torch.eye(3, device=model_joints.device).unsqueeze(0).expand(batch_size, -1, -1) + projected_joints = perspective_projection(model_joints, rotation, camera_t, + focal_length, camera_center) + + # get the indexed four + op_joints = ['OP RHip', 'OP LHip', 'OP RShoulder', 'OP LShoulder'] + op_joints_ind = [config.JOINT_MAP[joint] for joint in op_joints] + gt_joints = ['RHip', 'LHip', 'RShoulder', 'LShoulder'] + gt_joints_ind = [config.JOINT_MAP[joint] for joint in gt_joints] + + reprojection_error_op = (joints_2d[:, op_joints_ind] - + projected_joints[:, op_joints_ind]) ** 2 + reprojection_error_gt = (joints_2d[:, gt_joints_ind] - + projected_joints[:, gt_joints_ind]) ** 2 + + # Check if for each example in the batch all 4 OpenPose detections are valid, otherwise use the GT detections + # OpenPose joints are more reliable for this task, so we prefer to use them if possible + is_valid = (joints_conf[:, op_joints_ind].min(dim=-1)[0][:, None, None] > 0).float() + reprojection_loss = (is_valid * reprojection_error_op + (1 - is_valid) * reprojection_error_gt).sum(dim=(1, 2)) + + # Loss that penalizes deviation from depth estimate + depth_loss = (depth_loss_weight ** 2) * (camera_t[:, 2] - camera_t_est[:, 2]) ** 2 + + total_loss = reprojection_loss + depth_loss + return total_loss.sum() + + + + # #####--- body fitiing loss ----- +def body_fitting_loss_3d(body_pose, preserve_pose, + betas, model_joints, camera_translation, + j3d, pose_prior, + joints3d_conf, + sigma=100, pose_prior_weight=4.78*1.5, + shape_prior_weight=5.0, angle_prior_weight=15.2, + joint_loss_weight=500.0, + pose_preserve_weight=0.0, + use_collision=False, + model_vertices=None, model_faces=None, + search_tree=None, pen_distance=None, filter_faces=None, + collision_loss_weight=1000 + ): + """ + Loss function for body fitting + """ + batch_size = body_pose.shape[0] + + #joint3d_loss = (joint_loss_weight ** 2) * gmof((model_joints + camera_translation) - j3d, sigma).sum(dim=-1) + + joint3d_error = gmof((model_joints + camera_translation) - j3d, sigma) + + joint3d_loss_part = (joints3d_conf ** 2) * joint3d_error.sum(dim=-1) + joint3d_loss = (joint_loss_weight ** 2) * joint3d_loss_part + + # Pose prior loss + pose_prior_loss = (pose_prior_weight ** 2) * pose_prior(body_pose, betas) + # Angle prior for knees and elbows + angle_prior_loss = (angle_prior_weight ** 2) * angle_prior(body_pose).sum(dim=-1) + # Regularizer to prevent betas from taking large values + shape_prior_loss = (shape_prior_weight ** 2) * (betas ** 2).sum(dim=-1) + + collision_loss = 0.0 + # Calculate the loss due to interpenetration + if use_collision: + triangles = torch.index_select( + model_vertices, 1, + model_faces).view(batch_size, -1, 3, 3) + + with torch.no_grad(): + collision_idxs = search_tree(triangles) + + # Remove unwanted collisions + if filter_faces is not None: + collision_idxs = filter_faces(collision_idxs) + + if collision_idxs.ge(0).sum().item() > 0: + collision_loss = torch.sum(collision_loss_weight * pen_distance(triangles, collision_idxs)) + + pose_preserve_loss = (pose_preserve_weight ** 2) * ((body_pose - preserve_pose) ** 2).sum(dim=-1) + + total_loss = joint3d_loss + pose_prior_loss + angle_prior_loss + shape_prior_loss + collision_loss + pose_preserve_loss + + return total_loss.sum() + + +# #####--- get camera fitting loss ----- +def camera_fitting_loss_3d(model_joints, camera_t, camera_t_est, + j3d, joints_category="orig", depth_loss_weight=100.0): + """ + Loss function for camera optimization. + """ + model_joints = model_joints + camera_t + # # get the indexed four + # op_joints = ['OP RHip', 'OP LHip', 'OP RShoulder', 'OP LShoulder'] + # op_joints_ind = [config.JOINT_MAP[joint] for joint in op_joints] + # + # j3d_error_loss = (j3d[:, op_joints_ind] - + # model_joints[:, op_joints_ind]) ** 2 + + gt_joints = ['RHip', 'LHip', 'RShoulder', 'LShoulder'] + gt_joints_ind = [config.JOINT_MAP[joint] for joint in gt_joints] + + if joints_category=="orig": + select_joints_ind = [config.JOINT_MAP[joint] for joint in gt_joints] + elif joints_category=="AMASS": + select_joints_ind = [config.AMASS_JOINT_MAP[joint] for joint in gt_joints] + elif joints_category=="MMM": + select_joints_ind = [config.MMM_JOINT_MAP[joint] for joint in gt_joints] + else: + print("NO SUCH JOINTS CATEGORY!") + + j3d_error_loss = (j3d[:, select_joints_ind] - + model_joints[:, gt_joints_ind]) ** 2 + + # Loss that penalizes deviation from depth estimate + depth_loss = (depth_loss_weight**2) * (camera_t - camera_t_est)**2 + + total_loss = j3d_error_loss + depth_loss + return total_loss.sum() \ No newline at end of file diff --git a/mGPT/data/transforms/joints2rots/prior.py b/mGPT/data/transforms/joints2rots/prior.py new file mode 100644 index 0000000..d85debd --- /dev/null +++ b/mGPT/data/transforms/joints2rots/prior.py @@ -0,0 +1,229 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import sys +import os + +import time +import pickle + +import numpy as np + +import torch +import torch.nn as nn + +DEFAULT_DTYPE = torch.float32 + + +def create_prior(prior_type, **kwargs): + if prior_type == 'gmm': + prior = MaxMixturePrior(**kwargs) + elif prior_type == 'l2': + return L2Prior(**kwargs) + elif prior_type == 'angle': + return SMPLifyAnglePrior(**kwargs) + elif prior_type == 'none' or prior_type is None: + # Don't use any pose prior + def no_prior(*args, **kwargs): + return 0.0 + prior = no_prior + else: + raise ValueError('Prior {}'.format(prior_type) + ' is not implemented') + return prior + + +class SMPLifyAnglePrior(nn.Module): + def __init__(self, dtype=torch.float32, **kwargs): + super(SMPLifyAnglePrior, self).__init__() + + # Indices for the roration angle of + # 55: left elbow, 90deg bend at -np.pi/2 + # 58: right elbow, 90deg bend at np.pi/2 + # 12: left knee, 90deg bend at np.pi/2 + # 15: right knee, 90deg bend at np.pi/2 + angle_prior_idxs = np.array([55, 58, 12, 15], dtype=np.int64) + angle_prior_idxs = torch.tensor(angle_prior_idxs, dtype=torch.long) + self.register_buffer('angle_prior_idxs', angle_prior_idxs) + + angle_prior_signs = np.array([1, -1, -1, -1], + dtype=np.float6432 if dtype == torch.float32 + else np.float6464) + angle_prior_signs = torch.tensor(angle_prior_signs, + dtype=dtype) + self.register_buffer('angle_prior_signs', angle_prior_signs) + + def forward(self, pose, with_global_pose=False): + ''' Returns the angle prior loss for the given pose + Args: + pose: (Bx[23 + 1] * 3) torch tensor with the axis-angle + representation of the rotations of the joints of the SMPL model. + Kwargs: + with_global_pose: Whether the pose vector also contains the global + orientation of the SMPL model. If not then the indices must be + corrected. + Returns: + A sze (B) tensor containing the angle prior loss for each element + in the batch. + ''' + angle_prior_idxs = self.angle_prior_idxs - (not with_global_pose) * 3 + return torch.exp(pose[:, angle_prior_idxs] * + self.angle_prior_signs).pow(2) + + +class L2Prior(nn.Module): + def __init__(self, dtype=DEFAULT_DTYPE, reduction='sum', **kwargs): + super(L2Prior, self).__init__() + + def forward(self, module_input, *args): + return torch.sum(module_input.pow(2)) + + +class MaxMixturePrior(nn.Module): + + def __init__(self, prior_folder='prior', + num_gaussians=6, dtype=DEFAULT_DTYPE, epsilon=1e-16, + use_merged=True, + **kwargs): + super(MaxMixturePrior, self).__init__() + + if dtype == DEFAULT_DTYPE: + np_dtype = np.float6432 + elif dtype == torch.float64: + np_dtype = np.float6464 + else: + print('Unknown float type {}, exiting!'.format(dtype)) + sys.exit(-1) + + self.num_gaussians = num_gaussians + self.epsilon = epsilon + self.use_merged = use_merged + gmm_fn = 'gmm_{:02d}.pkl'.format(num_gaussians) + + full_gmm_fn = os.path.join(prior_folder, gmm_fn) + if not os.path.exists(full_gmm_fn): + print('The path to the mixture prior "{}"'.format(full_gmm_fn) + + ' does not exist, exiting!') + sys.exit(-1) + + with open(full_gmm_fn, 'rb') as f: + gmm = pickle.load(f, encoding='latin1') + + if type(gmm) == dict: + means = gmm['means'].astype(np_dtype) + covs = gmm['covars'].astype(np_dtype) + weights = gmm['weights'].astype(np_dtype) + elif 'sklearn.mixture.gmm.GMM' in str(type(gmm)): + means = gmm.means_.astype(np_dtype) + covs = gmm.covars_.astype(np_dtype) + weights = gmm.weights_.astype(np_dtype) + else: + print('Unknown type for the prior: {}, exiting!'.format(type(gmm))) + sys.exit(-1) + + self.register_buffer('means', torch.tensor(means, dtype=dtype)) + + self.register_buffer('covs', torch.tensor(covs, dtype=dtype)) + + precisions = [np.linalg.inv(cov) for cov in covs] + precisions = np.stack(precisions).astype(np_dtype) + + self.register_buffer('precisions', + torch.tensor(precisions, dtype=dtype)) + + # The constant term: + sqrdets = np.array([(np.sqrt(np.linalg.det(c))) + for c in gmm['covars']]) + const = (2 * np.pi)**(69 / 2.) + + nll_weights = np.asarray(gmm['weights'] / (const * + (sqrdets / sqrdets.min()))) + nll_weights = torch.tensor(nll_weights, dtype=dtype).unsqueeze(dim=0) + self.register_buffer('nll_weights', nll_weights) + + weights = torch.tensor(gmm['weights'], dtype=dtype).unsqueeze(dim=0) + self.register_buffer('weights', weights) + + self.register_buffer('pi_term', + torch.log(torch.tensor(2 * np.pi, dtype=dtype))) + + cov_dets = [np.log(np.linalg.det(cov.astype(np_dtype)) + epsilon) + for cov in covs] + self.register_buffer('cov_dets', + torch.tensor(cov_dets, dtype=dtype)) + + # The dimensionality of the random variable + self.random_var_dim = self.means.shape[1] + + def get_mean(self): + ''' Returns the mean of the mixture ''' + mean_pose = torch.matmul(self.weights, self.means) + return mean_pose + + def merged_log_likelihood(self, pose, betas): + diff_from_mean = pose.unsqueeze(dim=1) - self.means + + prec_diff_prod = torch.einsum('mij,bmj->bmi', + [self.precisions, diff_from_mean]) + diff_prec_quadratic = (prec_diff_prod * diff_from_mean).sum(dim=-1) + + curr_loglikelihood = 0.5 * diff_prec_quadratic - \ + torch.log(self.nll_weights) + # curr_loglikelihood = 0.5 * (self.cov_dets.unsqueeze(dim=0) + + # self.random_var_dim * self.pi_term + + # diff_prec_quadratic + # ) - torch.log(self.weights) + + min_likelihood, _ = torch.min(curr_loglikelihood, dim=1) + return min_likelihood + + def log_likelihood(self, pose, betas, *args, **kwargs): + ''' Create graph operation for negative log-likelihood calculation + ''' + likelihoods = [] + + for idx in range(self.num_gaussians): + mean = self.means[idx] + prec = self.precisions[idx] + cov = self.covs[idx] + diff_from_mean = pose - mean + + curr_loglikelihood = torch.einsum('bj,ji->bi', + [diff_from_mean, prec]) + curr_loglikelihood = torch.einsum('bi,bi->b', + [curr_loglikelihood, + diff_from_mean]) + cov_term = torch.log(torch.det(cov) + self.epsilon) + curr_loglikelihood += 0.5 * (cov_term + + self.random_var_dim * + self.pi_term) + likelihoods.append(curr_loglikelihood) + + log_likelihoods = torch.stack(likelihoods, dim=1) + min_idx = torch.argmin(log_likelihoods, dim=1) + weight_component = self.nll_weights[:, min_idx] + weight_component = -torch.log(weight_component) + + return weight_component + log_likelihoods[:, min_idx] + + def forward(self, pose, betas): + if self.use_merged: + return self.merged_log_likelihood(pose, betas) + else: + return self.log_likelihood(pose, betas) diff --git a/mGPT/data/transforms/joints2rots/smplify.py b/mGPT/data/transforms/joints2rots/smplify.py new file mode 100644 index 0000000..7df5150 --- /dev/null +++ b/mGPT/data/transforms/joints2rots/smplify.py @@ -0,0 +1,284 @@ +import torch +import os, sys +import pickle +import smplx +import numpy as np +from tqdm import tqdm + +sys.path.append(os.path.dirname(__file__)) +from customloss import (camera_fitting_loss, + body_fitting_loss, + camera_fitting_loss_3d, + body_fitting_loss_3d, + ) +from prior import MaxMixturePrior +import config + + + +@torch.no_grad() +def guess_init_3d(model_joints, + j3d, + joints_category="orig"): + """Initialize the camera translation via triangle similarity, by using the torso joints . + :param model_joints: SMPL model with pre joints + :param j3d: 25x3 array of Kinect Joints + :returns: 3D vector corresponding to the estimated camera translation + """ + # get the indexed four + gt_joints = ['RHip', 'LHip', 'RShoulder', 'LShoulder'] + gt_joints_ind = [config.JOINT_MAP[joint] for joint in gt_joints] + + if joints_category=="orig": + joints_ind_category = [config.JOINT_MAP[joint] for joint in gt_joints] + elif joints_category=="AMASS": + joints_ind_category = [config.AMASS_JOINT_MAP[joint] for joint in gt_joints] + elif joints_category=="MMM": + joints_ind_category = [config.MMM_JOINT_MAP[joint] for joint in gt_joints] + else: + print("NO SUCH JOINTS CATEGORY!") + + sum_init_t = (j3d[:, joints_ind_category] - model_joints[:, gt_joints_ind]).sum(dim=1) + init_t = sum_init_t / 4.0 + return init_t + + +# SMPLIfy 3D +class SMPLify3D(): + """Implementation of SMPLify, use 3D joints.""" + + def __init__(self, + smplxmodel, + step_size=1e-2, + batch_size=1, + num_iters=100, + use_collision=False, + use_lbfgs=True, + joints_category="orig", + device=torch.device('cuda:0'), + ): + + # Store options + self.batch_size = batch_size + self.device = device + self.step_size = step_size + + self.num_iters = num_iters + # --- choose optimizer + self.use_lbfgs = use_lbfgs + # GMM pose prior + self.pose_prior = MaxMixturePrior(prior_folder=config.GMM_MODEL_DIR, + num_gaussians=8, + dtype=torch.float32).to(device) + # collision part + self.use_collision = use_collision + if self.use_collision: + self.part_segm_fn = config.Part_Seg_DIR + + # reLoad SMPL-X model + self.smpl = smplxmodel + + self.model_faces = smplxmodel.faces_tensor.view(-1) + + # select joint joint_category + self.joints_category = joints_category + + if joints_category=="orig": + self.smpl_index = config.full_smpl_idx + self.corr_index = config.full_smpl_idx + elif joints_category=="AMASS": + self.smpl_index = config.amass_smpl_idx + self.corr_index = config.amass_idx + # elif joints_category=="MMM": + # self.smpl_index = config.mmm_smpl_dix + # self.corr_index = config.mmm_idx + else: + self.smpl_index = None + self.corr_index = None + print("NO SUCH JOINTS CATEGORY!") + + # ---- get the man function here ------ + def __call__(self, init_pose, init_betas, init_cam_t, j3d, conf_3d=1.0, seq_ind=0): + """Perform body fitting. + Input: + init_pose: SMPL pose estimate + init_betas: SMPL betas estimate + init_cam_t: Camera translation estimate + j3d: joints 3d aka keypoints + conf_3d: confidence for 3d joints + seq_ind: index of the sequence + Returns: + vertices: Vertices of optimized shape + joints: 3D joints of optimized shape + pose: SMPL pose parameters of optimized shape + betas: SMPL beta parameters of optimized shape + camera_translation: Camera translation + """ + + # # # add the mesh inter-section to avoid + search_tree = None + pen_distance = None + filter_faces = None + + if self.use_collision: + from mesh_intersection.bvh_search_tree import BVH + import mesh_intersection.loss as collisions_loss + from mesh_intersection.filter_faces import FilterFaces + + search_tree = BVH(max_collisions=8) + + pen_distance = collisions_loss.DistanceFieldPenetrationLoss( + sigma=0.5, point2plane=False, vectorized=True, penalize_outside=True) + + if self.part_segm_fn: + # Read the part segmentation + part_segm_fn = os.path.expandvars(self.part_segm_fn) + with open(part_segm_fn, 'rb') as faces_parents_file: + face_segm_data = pickle.load(faces_parents_file, encoding='latin1') + faces_segm = face_segm_data['segm'] + faces_parents = face_segm_data['parents'] + # Create the module used to filter invalid collision pairs + filter_faces = FilterFaces( + faces_segm=faces_segm, faces_parents=faces_parents, + ign_part_pairs=None).to(device=self.device) + + + # Split SMPL pose to body pose and global orientation + body_pose = init_pose[:, 3:].detach().clone() + global_orient = init_pose[:, :3].detach().clone() + betas = init_betas.detach().clone() + + # use guess 3d to get the initial + smpl_output = self.smpl(global_orient=global_orient, + body_pose=body_pose, + betas=betas) + model_joints = smpl_output.joints + + init_cam_t = guess_init_3d(model_joints, j3d, self.joints_category).detach() + camera_translation = init_cam_t.clone() + + preserve_pose = init_pose[:, 3:].detach().clone() + # -------------Step 1: Optimize camera translation and body orientation-------- + # Optimize only camera translation and body orientation + body_pose.requires_grad = False + betas.requires_grad = False + global_orient.requires_grad = True + camera_translation.requires_grad = True + + camera_opt_params = [global_orient, camera_translation] + + if self.use_lbfgs: + camera_optimizer = torch.optim.LBFGS(camera_opt_params, max_iter=self.num_iters, + lr=self.step_size, line_search_fn='strong_wolfe') + for i in range(10): + def closure(): + camera_optimizer.zero_grad() + smpl_output = self.smpl(global_orient=global_orient, + body_pose=body_pose, + betas=betas) + model_joints = smpl_output.joints + + loss = camera_fitting_loss_3d(model_joints, camera_translation, + init_cam_t, j3d, self.joints_category) + loss.backward() + return loss + + camera_optimizer.step(closure) + else: + camera_optimizer = torch.optim.Adam(camera_opt_params, lr=self.step_size, betas=(0.9, 0.999)) + + for i in range(20): + smpl_output = self.smpl(global_orient=global_orient, + body_pose=body_pose, + betas=betas) + model_joints = smpl_output.joints + + loss = camera_fitting_loss_3d(model_joints[:, self.smpl_index], camera_translation, + init_cam_t, j3d[:, self.corr_index], self.joints_category) + camera_optimizer.zero_grad() + loss.backward() + camera_optimizer.step() + + # Fix camera translation after optimizing camera + # --------Step 2: Optimize body joints -------------------------- + # Optimize only the body pose and global orientation of the body + body_pose.requires_grad = True + global_orient.requires_grad = True + camera_translation.requires_grad = True + + # --- if we use the sequence, fix the shape + if seq_ind == 0: + betas.requires_grad = True + body_opt_params = [body_pose, betas, global_orient, camera_translation] + else: + betas.requires_grad = False + body_opt_params = [body_pose, global_orient, camera_translation] + + if self.use_lbfgs: + body_optimizer = torch.optim.LBFGS(body_opt_params, max_iter=self.num_iters, + lr=self.step_size, line_search_fn='strong_wolfe') + + for i in tqdm(range(self.num_iters), desc=f"LBFGS iter: "): + # for i in range(self.num_iters): + def closure(): + body_optimizer.zero_grad() + smpl_output = self.smpl(global_orient=global_orient, + body_pose=body_pose, + betas=betas) + model_joints = smpl_output.joints + model_vertices = smpl_output.vertices + + loss = body_fitting_loss_3d(body_pose, preserve_pose, betas, model_joints[:, self.smpl_index], camera_translation, + j3d[:, self.corr_index], self.pose_prior, + joints3d_conf=conf_3d, + joint_loss_weight=600.0, + pose_preserve_weight=5.0, + use_collision=self.use_collision, + model_vertices=model_vertices, model_faces=self.model_faces, + search_tree=search_tree, pen_distance=pen_distance, filter_faces=filter_faces) + loss.backward() + return loss + + body_optimizer.step(closure) + else: + body_optimizer = torch.optim.Adam(body_opt_params, lr=self.step_size, betas=(0.9, 0.999)) + + for i in range(self.num_iters): + smpl_output = self.smpl(global_orient=global_orient, + body_pose=body_pose, + betas=betas) + model_joints = smpl_output.joints + model_vertices = smpl_output.vertices + + loss = body_fitting_loss_3d(body_pose, preserve_pose, betas, model_joints[:, self.smpl_index], camera_translation, + j3d[:, self.corr_index], self.pose_prior, + joints3d_conf=conf_3d, + joint_loss_weight=600.0, + use_collision=self.use_collision, + model_vertices=model_vertices, model_faces=self.model_faces, + search_tree=search_tree, pen_distance=pen_distance, filter_faces=filter_faces) + body_optimizer.zero_grad() + loss.backward() + body_optimizer.step() + + # Get final loss value + with torch.no_grad(): + smpl_output = self.smpl(global_orient=global_orient, + body_pose=body_pose, + betas=betas, return_full_pose=True) + model_joints = smpl_output.joints + model_vertices = smpl_output.vertices + + final_loss = body_fitting_loss_3d(body_pose, preserve_pose, betas, model_joints[:, self.smpl_index], camera_translation, + j3d[:, self.corr_index], self.pose_prior, + joints3d_conf=conf_3d, + joint_loss_weight=600.0, + use_collision=self.use_collision, model_vertices=model_vertices, model_faces=self.model_faces, + search_tree=search_tree, pen_distance=pen_distance, filter_faces=filter_faces) + + vertices = smpl_output.vertices.detach() + joints = smpl_output.joints.detach() + pose = torch.cat([global_orient, body_pose], dim=-1).detach() + betas = betas.detach() + + return vertices, joints, pose, betas, camera_translation, final_loss \ No newline at end of file diff --git a/mGPT/data/transforms/rots2joints/__init__.py b/mGPT/data/transforms/rots2joints/__init__.py new file mode 100644 index 0000000..7719c70 --- /dev/null +++ b/mGPT/data/transforms/rots2joints/__init__.py @@ -0,0 +1,3 @@ +from .base import Rots2Joints +from .smplh import SMPLH +from .smplx import SMPLX diff --git a/mGPT/data/transforms/rots2joints/base.py b/mGPT/data/transforms/rots2joints/base.py new file mode 100644 index 0000000..524f830 --- /dev/null +++ b/mGPT/data/transforms/rots2joints/base.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional + +import torch +from torch import Tensor, nn +from pathlib import Path +import os +# import hydra + +class Rots2Joints(nn.Module): + def __init__(self, path: Optional[str] = None, + normalization: bool = False, + eps: float = 1e-12, + **kwargs) -> None: + if normalization and path is None: + raise TypeError("You should provide a path if normalization is on.") + + super().__init__() + self.normalization = normalization + self.eps = eps + # workaround for cluster local/sync + if path is not None: + rel_p = path.split('/') + rel_p = rel_p[rel_p.index('deps'):] + rel_p = '/'.join(rel_p) + # path = hydra.utils.get_original_cwd() + '/' + rel_p + if normalization: + mean_path = Path(path) / "mean.pt" + std_path = Path(path) / "std.pt" + self.register_buffer('mean', torch.load(mean_path)) + self.register_buffer('std', torch.load(std_path)) + + def normalize(self, features: Tensor) -> Tensor: + if self.normalization: + features = (features - self.mean)/(self.std + self.eps) + return features + + def unnormalize(self, features: Tensor) -> Tensor: + if self.normalization: + features = features * self.std + self.mean + return features diff --git a/mGPT/data/transforms/rots2joints/smplh.py b/mGPT/data/transforms/rots2joints/smplh.py new file mode 100644 index 0000000..90efa4f --- /dev/null +++ b/mGPT/data/transforms/rots2joints/smplh.py @@ -0,0 +1,192 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import contextlib +from typing import Optional + +import torch +from einops import rearrange +from torch import Tensor +from mGPT.utils.joints import smplh_to_mmm_scaling_factor +from mGPT.utils.joints import smplh2mmm_indexes +from .base import Rots2Joints + + +def slice_or_none(data, cslice): + if data is None: + return data + else: + return data[cslice] + + +class SMPLH(Rots2Joints): + + def __init__(self, + path: str, + jointstype: str = "mmm", + input_pose_rep: str = "matrix", + batch_size: int = 512, + gender="neutral", + **kwargs) -> None: + super().__init__(path=None, normalization=False) + self.batch_size = batch_size + self.input_pose_rep = input_pose_rep + self.jointstype = jointstype + self.training = False + + from smplx.body_models import SMPLHLayer + import os + # rel_p = path.split('/') + # rel_p = rel_p[rel_p.index('data'):] + # rel_p = '/'.join(rel_p) + + # Remove annoying print + with contextlib.redirect_stdout(None): + self.smplh = SMPLHLayer(path, ext="pkl", gender=gender).eval() + + self.faces = self.smplh.faces + for p in self.parameters(): + p.requires_grad = False + + def train(self, *args, **kwargs): + return self + + def forward(self, + smpl_data: dict, + jointstype: Optional[str] = None, + input_pose_rep: Optional[str] = None, + batch_size: Optional[int] = None) -> Tensor: + + # Take values from init if not specified there + jointstype = self.jointstype if jointstype is None else jointstype + batch_size = self.batch_size if batch_size is None else batch_size + input_pose_rep = self.input_pose_rep if input_pose_rep is None else input_pose_rep + + if input_pose_rep == "xyz": + raise NotImplementedError( + "You should use identity pose2joints instead") + + poses = smpl_data.rots + trans = smpl_data.trans + + from functools import reduce + import operator + save_shape_bs_len = poses.shape[:-3] + nposes = reduce(operator.mul, save_shape_bs_len, 1) + + if poses.shape[-3] == 52: + nohands = False + elif poses.shape[-3] == 22: + nohands = True + else: + raise NotImplementedError("Could not parse the poses.") + + # Convert any rotations to matrix + # from temos.tools.easyconvert import to_matrix + # matrix_poses = to_matrix(input_pose_rep, poses) + matrix_poses = poses + + # Reshaping + matrix_poses = matrix_poses.reshape((nposes, *matrix_poses.shape[-3:])) + global_orient = matrix_poses[:, 0] + + if trans is None: + trans = torch.zeros((*save_shape_bs_len, 3), + dtype=poses.dtype, + device=poses.device) + + trans_all = trans.reshape((nposes, *trans.shape[-1:])) + + body_pose = matrix_poses[:, 1:22] + if nohands: + left_hand_pose = None + right_hand_pose = None + else: + hand_pose = matrix_poses[:, 22:] + left_hand_pose = hand_pose[:, :15] + right_hand_pose = hand_pose[:, 15:] + + n = len(body_pose) + outputs = [] + for chunk in range(int((n - 1) / batch_size) + 1): + chunk_slice = slice(chunk * batch_size, (chunk + 1) * batch_size) + smpl_output = self.smplh( + global_orient=slice_or_none(global_orient, chunk_slice), + body_pose=slice_or_none(body_pose, chunk_slice), + left_hand_pose=slice_or_none(left_hand_pose, chunk_slice), + right_hand_pose=slice_or_none(right_hand_pose, chunk_slice), + transl=slice_or_none(trans_all, chunk_slice)) + + if jointstype == "vertices": + output_chunk = smpl_output.vertices + else: + joints = smpl_output.joints + output_chunk = joints + + outputs.append(output_chunk) + + outputs = torch.cat(outputs) + outputs = outputs.reshape((*save_shape_bs_len, *outputs.shape[1:])) + + # Change topology if needed + outputs = smplh_to(jointstype, outputs, trans) + + return outputs + + def inverse(self, joints: Tensor) -> Tensor: + raise NotImplementedError("Cannot inverse SMPLH layer.") + + +def smplh_to(jointstype, data, trans): + from mGPT.utils.joints import get_root_idx + + if "mmm" in jointstype: + from mGPT.utils.joints import smplh2mmm_indexes + indexes = smplh2mmm_indexes + data = data[..., indexes, :] + + # make it compatible with mmm + if jointstype == "mmm": + from mGPT.utils.joints import smplh_to_mmm_scaling_factor + data *= smplh_to_mmm_scaling_factor + + if jointstype == "smplmmm": + pass + elif jointstype in ["mmm", "mmmns"]: + # swap axis + data = data[..., [1, 2, 0]] + # revert left and right + data[..., 2] = -data[..., 2] + + elif jointstype == "smplnh": + from mGPT.utils.joints import smplh2smplnh_indexes + indexes = smplh2smplnh_indexes + data = data[..., indexes, :] + elif jointstype == "smplh": + pass + elif jointstype == "vertices": + pass + else: + raise NotImplementedError(f"SMPLH to {jointstype} is not implemented.") + + if jointstype != "vertices": + # shift the output in each batch + # such that it is centered on the pelvis/root on the first frame + root_joint_idx = get_root_idx(jointstype) + shift = trans[..., 0, :] - data[..., 0, root_joint_idx, :] + data += shift[..., None, None, :] + + return data diff --git a/mGPT/data/transforms/rots2joints/smplx.py b/mGPT/data/transforms/rots2joints/smplx.py new file mode 100644 index 0000000..107eb57 --- /dev/null +++ b/mGPT/data/transforms/rots2joints/smplx.py @@ -0,0 +1,201 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import contextlib +from typing import Optional +import torch +from torch import Tensor +from mGPT.utils.joints import smplh_to_mmm_scaling_factor, smplh2mmm_indexes, get_root_idx +from mGPT.utils.easyconvert import rep_to_rep +from .base import Rots2Joints + + +def slice_or_none(data, cslice): + if data is None: + return data + else: + return data[cslice] + + +class SMPLX(Rots2Joints): + def __init__(self, + path: str, + jointstype: str = "mmm", + input_pose_rep: str = "matrix", + batch_size: int = 512, + gender="neutral", + **kwargs) -> None: + super().__init__(path=None, normalization=False) + self.batch_size = batch_size + self.input_pose_rep = input_pose_rep + self.jointstype = jointstype + self.training = False + + from smplx.body_models import SMPLXLayer + import os + # rel_p = path.split('/') + # rel_p = rel_p[rel_p.index('data'):] + # rel_p = '/'.join(rel_p) + + # Remove annoying print + with contextlib.redirect_stdout(None): + self.smplx = SMPLXLayer(path, + ext="npz", + gender=gender, + batch_size=batch_size).eval() + + self.faces = self.smplx.faces + for p in self.parameters(): + p.requires_grad = False + + def train(self, *args, **kwargs): + return self + + def forward(self, + smpl_data: dict, + jointstype: Optional[str] = None, + input_pose_rep: Optional[str] = None, + batch_size: Optional[int] = None) -> Tensor: + + # Take values from init if not specified there + jointstype = self.jointstype if jointstype is None else jointstype + batch_size = self.batch_size if batch_size is None else batch_size + input_pose_rep = self.input_pose_rep if input_pose_rep is None else input_pose_rep + + poses = smpl_data.rots + trans = smpl_data.trans + + from functools import reduce + import operator + save_shape_bs_len = poses.shape[:-3] + nposes = reduce(operator.mul, save_shape_bs_len, 1) + + + matrix_poses = rep_to_rep(self.input_pose_rep, input_pose_rep, poses) + + # Reshaping + matrix_poses = matrix_poses.reshape((nposes, *matrix_poses.shape[-3:])) + + global_orient = matrix_poses[:, 0] + + if trans is None: + trans = torch.zeros((*save_shape_bs_len, 3), + dtype=poses.dtype, + device=poses.device) + + trans_all = trans.reshape((nposes, *trans.shape[-1:])) + + body_pose = matrix_poses[:, 1:22] + + if poses.shape[-3] == 55: + nohands = False + nofaces = False + elif poses.shape[-3] == 52: + nohands = False + nofaces = True + elif poses.shape[-3] == 22: + nohands = True + nofaces = True + else: + raise NotImplementedError("Could not parse the poses.") + + if nohands: + left_hand_pose = None + right_hand_pose = None + else: + left_hand_pose = matrix_poses[:, 25:40] + right_hand_pose = matrix_poses[:, 40:55] + + if nofaces: + jaw_pose = None + leye_pose = None + reye_pose = None + else: + jaw_pose = matrix_poses[:, 22:23] + leye_pose = matrix_poses[:, 23:24] + reye_pose = matrix_poses[:, 24:25] + + n = len(body_pose) + outputs = [] + for chunk in range(int((n - 1) / batch_size) + 1): + chunk_slice = slice(chunk * batch_size, (chunk + 1) * batch_size) + smpl_output = self.smplx( + global_orient=slice_or_none(global_orient, chunk_slice), + body_pose=slice_or_none(body_pose, chunk_slice), + left_hand_pose=slice_or_none(left_hand_pose, chunk_slice), + right_hand_pose=slice_or_none(right_hand_pose, chunk_slice), + jaw_pose=slice_or_none(jaw_pose, chunk_slice), + leye_pose=slice_or_none(leye_pose, chunk_slice), + reye_pose=slice_or_none(reye_pose, chunk_slice), + transl=slice_or_none(trans_all, chunk_slice)) + + if jointstype == "vertices": + output_chunk = smpl_output.vertices + else: + joints = smpl_output.joints + output_chunk = joints + + outputs.append(output_chunk) + + outputs = torch.cat(outputs) + outputs = outputs.reshape((*save_shape_bs_len, *outputs.shape[1:])) + + # Change topology if needed + outputs = smplx_to(jointstype, outputs, trans) + + return outputs + + def inverse(self, joints: Tensor) -> Tensor: + raise NotImplementedError("Cannot inverse SMPLX layer.") + + +def smplx_to(jointstype, data, trans): + + if "mmm" in jointstype: + indexes = smplh2mmm_indexes + data = data[..., indexes, :] + + # make it compatible with mmm + if jointstype == "mmm": + data *= smplh_to_mmm_scaling_factor + + if jointstype == "smplmmm": + pass + elif jointstype in ["mmm", "mmmns"]: + # swap axis + data = data[..., [1, 2, 0]] + # revert left and right + data[..., 2] = -data[..., 2] + + elif jointstype == "smplnh": + from mGPT.utils.joints import smplh2smplnh_indexes + indexes = smplh2smplnh_indexes + data = data[..., indexes, :] + elif jointstype == "smplh": + pass + elif jointstype == "vertices": + pass + else: + raise NotImplementedError(f"SMPLX to {jointstype} is not implemented.") + + if jointstype != "vertices": + # shift the output in each batch + # such that it is centered on the pelvis/root on the first frame + root_joint_idx = get_root_idx(jointstype) + shift = trans[..., 0, :] - data[..., 0, root_joint_idx, :] + data += shift[..., None, None, :] + + return data diff --git a/mGPT/data/transforms/rots2rfeats/__init__.py b/mGPT/data/transforms/rots2rfeats/__init__.py new file mode 100644 index 0000000..29b206c --- /dev/null +++ b/mGPT/data/transforms/rots2rfeats/__init__.py @@ -0,0 +1,5 @@ +from .base import Rots2Rfeats +# from .globvel import Globalvel + +from .globvelandy import Globalvelandy +# from .rifeats import Rifeats diff --git a/mGPT/data/transforms/rots2rfeats/base.py b/mGPT/data/transforms/rots2rfeats/base.py new file mode 100644 index 0000000..98c33bd --- /dev/null +++ b/mGPT/data/transforms/rots2rfeats/base.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional + +import torch +from torch import Tensor, nn +from pathlib import Path +import os + +class Rots2Rfeats(nn.Module): + def __init__(self, path: Optional[str] = None, + normalization: bool = True, + eps: float = 1e-12, + **kwargs) -> None: + if normalization and path is None: + raise TypeError("You should provide a path if normalization is on.") + + super().__init__() + self.normalization = normalization + self.eps = eps + if normalization: + # workaround for cluster local/sync + rel_p = path.split('/') + # superhacky it is for the datatype ugly stuff change it, copy the main stuff to seperate_pairs dict + if rel_p[-1] == 'separate_pairs': + rel_p.remove('separate_pairs') + ######################################################## + # rel_p = rel_p[rel_p.index('deps'):] + rel_p = '/'.join(rel_p) + # path = hydra.utils.get_original_cwd() + '/' + rel_p + path = rel_p + mean_path = Path(path) / "rfeats_mean.pt" + std_path = Path(path) / "rfeats_std.pt" + + self.register_buffer('mean', torch.load(mean_path)) + self.register_buffer('std', torch.load(std_path)) + + def normalize(self, features: Tensor) -> Tensor: + if self.normalization: + features = (features - self.mean)/(self.std + self.eps) + return features + + def unnormalize(self, features: Tensor) -> Tensor: + if self.normalization: + features = features * self.std + self.mean + return features diff --git a/mGPT/data/transforms/rots2rfeats/globvelandy.py b/mGPT/data/transforms/rots2rfeats/globvelandy.py new file mode 100644 index 0000000..fe223af --- /dev/null +++ b/mGPT/data/transforms/rots2rfeats/globvelandy.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional + +import torch +from torch import Tensor +from einops import rearrange + +from mGPT.utils.easyconvert import rep_to_rep, nfeats_of, to_matrix +import mGPT.utils.geometry_tools as geometry_tools + +from .base import Rots2Rfeats + + +class Globalvelandy(Rots2Rfeats): + def __init__(self, + path: Optional[str] = None, + normalization: bool = False, + pose_rep: str = "rot6d", + canonicalize: bool = False, + offset: bool = True, + **kwargs) -> None: + super().__init__(path=path, normalization=normalization) + + self.canonicalize = canonicalize + self.pose_rep = pose_rep + self.nfeats = nfeats_of(pose_rep) + self.offset = offset + + def forward(self, data, data_rep='matrix', first_frame=None) -> Tensor: + + poses, trans = data.rots, data.trans + + # extract the root gravity axis + # for smpl it is the last coordinate + root_y = trans[..., 2] + trajectory = trans[..., [0, 1]] + + # Compute the difference of trajectory + vel_trajectory = torch.diff(trajectory, dim=-2) + + # 0 for the first one => keep the dimentionality + if first_frame is None: + first_frame = 0 * vel_trajectory[..., [0], :] + + vel_trajectory = torch.cat((first_frame, vel_trajectory), dim=-2) + + # first normalize the data + if self.canonicalize: + + matrix_poses = rep_to_rep(data_rep, 'matrix', poses) + global_orient = matrix_poses[..., 0, :, :] + + # remove the rotation + rot2d = rep_to_rep(data_rep, 'rotvec', poses[0, 0, ...]) + + # Remove the fist rotation along the vertical axis + rot2d[..., :2] = 0 + + if self.offset: + # add a bit more rotation + rot2d[..., 2] += torch.pi / 2 + + rot2d = rep_to_rep('rotvec', 'matrix', rot2d) + + # turn with the same amount all the rotations + global_orient = torch.einsum("...kj,...kl->...jl", rot2d, + global_orient) + + matrix_poses = torch.cat( + (global_orient[..., None, :, :], matrix_poses[..., 1:, :, :]), + dim=-3) + + poses = rep_to_rep('matrix', data_rep, matrix_poses) + + # Turn the trajectory as well + vel_trajectory = torch.einsum("...kj,...lk->...lj", + rot2d[..., :2, :2], vel_trajectory) + + poses = rep_to_rep(data_rep, self.pose_rep, poses) + features = torch.cat( + (root_y[..., None], vel_trajectory, + rearrange(poses, "... joints rot -> ... (joints rot)")), + dim=-1) + features = self.normalize(features) + + return features + + def extract(self, features): + root_y = features[..., 0] + vel_trajectory = features[..., 1:3] + poses_features = features[..., 3:] + poses = rearrange(poses_features, + "... (joints rot) -> ... joints rot", + rot=self.nfeats) + return root_y, vel_trajectory, poses + + def inverse(self, features, last_frame=None): + features = self.unnormalize(features) + root_y, vel_trajectory, poses = self.extract(features) + + # integrate the trajectory + trajectory = torch.cumsum(vel_trajectory, dim=-2) + if last_frame is None: + pass + # First frame should be 0, but if infered it is better to ensure it + trajectory = trajectory - trajectory[..., [0], :] + + # Get back the translation + trans = torch.cat([trajectory, root_y[..., None]], dim=-1) + matrix_poses = rep_to_rep(self.pose_rep, 'matrix', poses) + + from ..smpl import RotTransDatastruct + return RotTransDatastruct(rots=matrix_poses, trans=trans) diff --git a/mGPT/data/transforms/smpl.py b/mGPT/data/transforms/smpl.py new file mode 100644 index 0000000..fc46b11 --- /dev/null +++ b/mGPT/data/transforms/smpl.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional +from torch import Tensor +import smplx + +from .base import Datastruct, dataclass, Transform + +from .rots2rfeats import Rots2Rfeats +from .rots2joints import Rots2Joints +from .joints2jfeats import Joints2Jfeats + + +class SMPLTransform(Transform): + def __init__(self, rots2rfeats: Rots2Rfeats, + rots2joints: Rots2Joints, + joints2jfeats: Joints2Jfeats, + **kwargs): + self.rots2rfeats = rots2rfeats + self.rots2joints = rots2joints + self.joints2jfeats = joints2jfeats + + def Datastruct(self, **kwargs): + return SMPLDatastruct(_rots2rfeats=self.rots2rfeats, + _rots2joints=self.rots2joints, + _joints2jfeats=self.joints2jfeats, + transforms=self, + **kwargs) + + def __repr__(self): + return "SMPLTransform()" + + +class RotIdentityTransform(Transform): + def __init__(self, **kwargs): + return + + def Datastruct(self, **kwargs): + return RotTransDatastruct(**kwargs) + + def __repr__(self): + return "RotIdentityTransform()" + + +@dataclass +class RotTransDatastruct(Datastruct): + rots: Tensor + trans: Tensor + + transforms: RotIdentityTransform = RotIdentityTransform() + + def __post_init__(self): + self.datakeys = ["rots", "trans"] + + def __len__(self): + return len(self.rots) + + +@dataclass +class SMPLDatastruct(Datastruct): + transforms: SMPLTransform + _rots2rfeats: Rots2Rfeats + _rots2joints: Rots2Joints + _joints2jfeats: Joints2Jfeats + + features: Optional[Tensor] = None + rots_: Optional[RotTransDatastruct] = None + rfeats_: Optional[Tensor] = None + joints_: Optional[Tensor] = None + jfeats_: Optional[Tensor] = None + vertices_: Optional[Tensor] = None + + def __post_init__(self): + self.datakeys = ['features', 'rots_', 'rfeats_', + 'joints_', 'jfeats_', 'vertices_'] + # starting point + if self.features is not None and self.rfeats_ is None: + self.rfeats_ = self.features + + @property + def rots(self): + # Cached value + if self.rots_ is not None: + return self.rots_ + + # self.rfeats_ should be defined + assert self.rfeats_ is not None + + self._rots2rfeats.to(self.rfeats.device) + self.rots_ = self._rots2rfeats.inverse(self.rfeats) + return self.rots_ + + @property + def rfeats(self): + # Cached value + if self.rfeats_ is not None: + return self.rfeats_ + + # self.rots_ should be defined + assert self.rots_ is not None + + self._rots2rfeats.to(self.rots.device) + self.rfeats_ = self._rots2rfeats(self.rots) + return self.rfeats_ + + @property + def joints(self): + # Cached value + if self.joints_ is not None: + return self.joints_ + + self._rots2joints.to(self.rots.device) + self.joints_ = self._rots2joints(self.rots) + return self.joints_ + + @property + def jfeats(self): + # Cached value + if self.jfeats_ is not None: + return self.jfeats_ + + self._joints2jfeats.to(self.joints.device) + self.jfeats_ = self._joints2jfeats(self.joints) + return self.jfeats_ + + @property + def vertices(self): + # Cached value + if self.vertices_ is not None: + return self.vertices_ + + self._rots2joints.to(self.rots.device) + self.vertices_ = self._rots2joints(self.rots, jointstype="vertices") + return self.vertices_ + + def __len__(self): + return len(self.rfeats) + + +def get_body_model(model_type, gender, batch_size, device='cpu', ext='pkl'): + ''' + type: smpl, smplx smplh and others. Refer to smplx tutorial + gender: male, female, neutral + batch_size: an positive integar + ''' + mtype = model_type.upper() + if gender != 'neutral': + if not isinstance(gender, str): + gender = str(gender.astype(str)).upper() + else: + gender = gender.upper() + else: + gender = gender.upper() + ext = 'npz' + body_model_path = f'data/smpl_models/{model_type}/{mtype}_{gender}.{ext}' + + body_model = smplx.create(body_model_path, model_type=type, + gender=gender, ext=ext, + use_pca=False, + num_pca_comps=12, + create_global_orient=True, + create_body_pose=True, + create_betas=True, + create_left_hand_pose=True, + create_right_hand_pose=True, + create_expression=True, + create_jaw_pose=True, + create_leye_pose=True, + create_reye_pose=True, + create_transl=True, + batch_size=batch_size) + + if device == 'cuda': + return body_model.cuda() + else: + return body_model + diff --git a/mGPT/data/transforms/xyz.py b/mGPT/data/transforms/xyz.py new file mode 100644 index 0000000..7add165 --- /dev/null +++ b/mGPT/data/transforms/xyz.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2020 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from typing import Optional +from torch import Tensor + +from .base import Datastruct, dataclass, Transform +from ..tools import collate_tensor_with_padding + +from .joints2jfeats import Joints2Jfeats + + +class XYZTransform(Transform): + def __init__(self, joints2jfeats: Joints2Jfeats, **kwargs): + self.joints2jfeats = joints2jfeats + + def Datastruct(self, **kwargs): + return XYZDatastruct(_joints2jfeats=self.joints2jfeats, + transforms=self, + **kwargs) + + def __repr__(self): + return "XYZTransform()" + + +@dataclass +class XYZDatastruct(Datastruct): + transforms: XYZTransform + _joints2jfeats: Joints2Jfeats + + features: Optional[Tensor] = None + joints_: Optional[Tensor] = None + jfeats_: Optional[Tensor] = None + + def __post_init__(self): + self.datakeys = ["features", "joints_", "jfeats_"] + # starting point + if self.features is not None and self.jfeats_ is None: + self.jfeats_ = self.features + + @property + def joints(self): + # Cached value + if self.joints_ is not None: + return self.joints_ + + # self.jfeats_ should be defined + assert self.jfeats_ is not None + + self._joints2jfeats.to(self.jfeats.device) + self.joints_ = self._joints2jfeats.inverse(self.jfeats) + return self.joints_ + + @property + def jfeats(self): + # Cached value + if self.jfeats_ is not None: + return self.jfeats_ + + # self.joints_ should be defined + assert self.joints_ is not None + + self._joints2jfeats.to(self.joints.device) + self.jfeats_ = self._joints2jfeats(self.joints) + return self.jfeats_ + + def __len__(self): + return len(self.jfeats) diff --git a/mGPT/data/utils.py b/mGPT/data/utils.py new file mode 100644 index 0000000..30714ff --- /dev/null +++ b/mGPT/data/utils.py @@ -0,0 +1,81 @@ +import torch +import rich +import pickle +import numpy as np + + +def lengths_to_mask(lengths): + max_len = max(lengths) + mask = torch.arange(max_len, device=lengths.device).expand( + len(lengths), max_len) < lengths.unsqueeze(1) + return mask + + +# padding to max length in one batch +def collate_tensors(batch): + if isinstance(batch[0], np.ndarray): + batch = [torch.tensor(b).float() for b in batch] + + dims = batch[0].dim() + max_size = [max([b.size(i) for b in batch]) for i in range(dims)] + size = (len(batch), ) + tuple(max_size) + canvas = batch[0].new_zeros(size=size) + for i, b in enumerate(batch): + sub_tensor = canvas[i] + for d in range(dims): + sub_tensor = sub_tensor.narrow(d, 0, b.size(d)) + sub_tensor.add_(b) + return canvas + +def humanml3d_collate(batch): + notnone_batches = [b for b in batch if b is not None] + EvalFlag = False if notnone_batches[0][5] is None else True + + # Sort by text length + if EvalFlag: + notnone_batches.sort(key=lambda x: x[5], reverse=True) + + # Motion only + adapted_batch = { + "motion": + collate_tensors([torch.tensor(b[1]).float() for b in notnone_batches]), + "length": [b[2] for b in notnone_batches], + } + + # Text and motion + if notnone_batches[0][0] is not None: + adapted_batch.update({ + "text": [b[0] for b in notnone_batches], + "all_captions": [b[7] for b in notnone_batches], + }) + + # Evaluation related + if EvalFlag: + adapted_batch.update({ + "text": [b[0] for b in notnone_batches], + "word_embs": + collate_tensors( + [torch.tensor(b[3]).float() for b in notnone_batches]), + "pos_ohot": + collate_tensors( + [torch.tensor(b[4]).float() for b in notnone_batches]), + "text_len": + collate_tensors([torch.tensor(b[5]) for b in notnone_batches]), + "tokens": [b[6] for b in notnone_batches], + }) + + # Tasks + if len(notnone_batches[0]) == 9: + adapted_batch.update({"tasks": [b[8] for b in notnone_batches]}) + + return adapted_batch + + +def load_pkl(path, description=None, progressBar=False): + if progressBar: + with rich.progress.open(path, 'rb', description=description) as file: + data = pickle.load(file) + else: + with open(path, 'rb') as file: + data = pickle.load(file) + return data diff --git a/mGPT/losses/__init__.py b/mGPT/losses/__init__.py new file mode 100644 index 0000000..2fe3b35 --- /dev/null +++ b/mGPT/losses/__init__.py @@ -0,0 +1 @@ +from .base import BaseLosses diff --git a/mGPT/losses/base.py b/mGPT/losses/base.py new file mode 100644 index 0000000..d310311 --- /dev/null +++ b/mGPT/losses/base.py @@ -0,0 +1,61 @@ +import torch +import torch.nn as nn + +class BaseLosses(nn.Module): + def __init__(self, cfg, losses, params, losses_func, num_joints, **kwargs): + super().__init__() + + # Save parameters + self.num_joints = num_joints + self._params = params + + # Add total indicator + losses.append("total") if "total" not in losses else None + + # Register losses + for loss in losses: + self.register_buffer(loss, torch.tensor(0.0)) + self.register_buffer("count", torch.tensor(0.0)) + self.losses = losses + + # Instantiate loss functions + self._losses_func = {} + for loss in losses[:-1]: + self._losses_func[loss] = losses_func[loss](reduction='mean') + + def _update_loss(self, loss: str, outputs, inputs): + '''Update the loss and return the weighted loss.''' + # Update the loss + val = self._losses_func[loss](outputs, inputs) + # self.losses_values[loss] += val.detach() + getattr(self, loss).add_(val.detach()) + # Return a weighted sum + weighted_loss = self._params[loss] * val + return weighted_loss + + def reset(self): + '''Reset the losses to 0.''' + for loss in self.losses: + setattr(self, loss, torch.tensor(0.0, device=getattr(self, loss).device)) + setattr(self, "count", torch.tensor(0.0, device=getattr(self, "count").device)) + + def compute(self, split): + '''Compute the losses and return a dictionary with the losses.''' + count = self.count + # Loss dictionary + loss_dict = {loss: getattr(self, loss)/count for loss in self.losses} + # Format the losses for logging + log_dict = { self.loss2logname(loss, split): value.item() + for loss, value in loss_dict.items() if not torch.isnan(value)} + # Reset the losses + self.reset() + return log_dict + + def loss2logname(self, loss: str, split: str): + '''Convert the loss name to a log name.''' + if loss == "total": + log_name = f"{loss}/{split}" + else: + loss_type, name = loss.split("_") + log_name = f"{loss_type}/{name}/{split}" + return log_name diff --git a/mGPT/losses/mgpt.py b/mGPT/losses/mgpt.py new file mode 100644 index 0000000..69846b2 --- /dev/null +++ b/mGPT/losses/mgpt.py @@ -0,0 +1,97 @@ +import torch +import torch.nn as nn +from .base import BaseLosses + + +class CommitLoss(nn.Module): + """ + Useless Wrapper + """ + def __init__(self, **kwargs): + super().__init__() + + def forward(self, commit, commit2, **kwargs): + return commit + + +class GPTLosses(BaseLosses): + + def __init__(self, cfg, stage, num_joints, **kwargs): + # Save parameters + self.stage = stage + recons_loss = cfg.LOSS.ABLATION.RECONS_LOSS + + # Define losses + losses = [] + params = {} + if stage == "vae": + losses.append("recons_feature") + params['recons_feature'] = cfg.LOSS.LAMBDA_FEATURE + + losses.append("recons_velocity") + params['recons_velocity'] = cfg.LOSS.LAMBDA_VELOCITY + + losses.append("vq_commit") + params['vq_commit'] = cfg.LOSS.LAMBDA_COMMIT + elif stage in ["lm_pretrain", "lm_instruct"]: + losses.append("gpt_loss") + params['gpt_loss'] = cfg.LOSS.LAMBDA_CLS + + # Define loss functions & weights + losses_func = {} + for loss in losses: + if loss.split('_')[0] == 'recons': + if recons_loss == "l1": + losses_func[loss] = nn.L1Loss + elif recons_loss == "l2": + losses_func[loss] = nn.MSELoss + elif recons_loss == "l1_smooth": + losses_func[loss] = nn.SmoothL1Loss + elif loss.split('_')[1] in [ + 'commit', 'loss', 'gpt', 'm2t2m', 't2m2t' + ]: + losses_func[loss] = CommitLoss + elif loss.split('_')[1] in ['cls', 'lm']: + losses_func[loss] = nn.CrossEntropyLoss + else: + raise NotImplementedError(f"Loss {loss} not implemented.") + + super().__init__(cfg, losses, params, losses_func, num_joints, + **kwargs) + + def update(self, rs_set): + '''Update the losses''' + total: float = 0.0 + + if self.stage in ["vae"]: + total += self._update_loss("recons_feature", rs_set['m_rst'], + rs_set['m_ref']) + # total += self._update_loss("recons_joints", rs_set['joints_rst'], rs_set['joints_ref']) + nfeats = rs_set['m_rst'].shape[-1] + if nfeats in [263, 135 + 263]: + if nfeats == 135 + 263: + vel_start = 135 + 4 + elif nfeats == 263: + vel_start = 4 + total += self._update_loss( + "recons_velocity", + rs_set['m_rst'][..., vel_start:(self.num_joints - 1) * 3 + + vel_start], + rs_set['m_ref'][..., vel_start:(self.num_joints - 1) * 3 + + vel_start]) + else: + if self._params['recons_velocity'] != 0.0: + raise NotImplementedError( + "Velocity not implemented for nfeats = {})".format(nfeats)) + total += self._update_loss("vq_commit", rs_set['loss_commit'], + rs_set['loss_commit']) + + if self.stage in ["lm_pretrain", "lm_instruct"]: + total += self._update_loss("gpt_loss", rs_set['outputs'].loss, + rs_set['outputs'].loss) + + # Update the total loss + self.total += total.detach() + self.count += 1 + + return total diff --git a/mGPT/metrics/__init__.py b/mGPT/metrics/__init__.py new file mode 100644 index 0000000..33c2081 --- /dev/null +++ b/mGPT/metrics/__init__.py @@ -0,0 +1 @@ +from .base import BaseMetrics diff --git a/mGPT/metrics/base.py b/mGPT/metrics/base.py new file mode 100644 index 0000000..5407719 --- /dev/null +++ b/mGPT/metrics/base.py @@ -0,0 +1,46 @@ +from torch import Tensor, nn +from os.path import join as pjoin +from .mr import MRMetrics +from .t2m import TM2TMetrics +from .mm import MMMetrics +from .m2t import M2TMetrics +from .m2m import PredMetrics + + +class BaseMetrics(nn.Module): + def __init__(self, cfg, datamodule, debug, **kwargs) -> None: + super().__init__() + + njoints = datamodule.njoints + + data_name = datamodule.name + if data_name in ["humanml3d", "kit"]: + self.TM2TMetrics = TM2TMetrics( + cfg=cfg, + dataname=data_name, + diversity_times=30 if debug else cfg.METRIC.DIVERSITY_TIMES, + dist_sync_on_step=cfg.METRIC.DIST_SYNC_ON_STEP, + ) + self.M2TMetrics = M2TMetrics( + cfg=cfg, + w_vectorizer=datamodule.hparams.w_vectorizer, + diversity_times=30 if debug else cfg.METRIC.DIVERSITY_TIMES, + dist_sync_on_step=cfg.METRIC.DIST_SYNC_ON_STEP) + self.MMMetrics = MMMetrics( + cfg=cfg, + mm_num_times=cfg.METRIC.MM_NUM_TIMES, + dist_sync_on_step=cfg.METRIC.DIST_SYNC_ON_STEP, + ) + + self.MRMetrics = MRMetrics( + njoints=njoints, + jointstype=cfg.DATASET.JOINT_TYPE, + dist_sync_on_step=cfg.METRIC.DIST_SYNC_ON_STEP, + ) + self.PredMetrics = PredMetrics( + cfg=cfg, + njoints=njoints, + jointstype=cfg.DATASET.JOINT_TYPE, + dist_sync_on_step=cfg.METRIC.DIST_SYNC_ON_STEP, + task=cfg.model.params.task, + ) diff --git a/mGPT/metrics/m2m.py b/mGPT/metrics/m2m.py new file mode 100644 index 0000000..780d34d --- /dev/null +++ b/mGPT/metrics/m2m.py @@ -0,0 +1,95 @@ +from typing import List + +import torch +from torch import Tensor +from torchmetrics import Metric + +from .utils import * + + +# motion reconstruction metric +class PredMetrics(Metric): + + def __init__(self, + cfg, + njoints: int = 22, + jointstype: str = "mmm", + force_in_meter: bool = True, + align_root: bool = True, + dist_sync_on_step=True, + task: str = "pred", + **kwargs): + super().__init__(dist_sync_on_step=dist_sync_on_step) + + self.name = 'Motion Prdiction' + self.cfg = cfg + self.jointstype = jointstype + self.align_root = align_root + self.task = task + self.force_in_meter = force_in_meter + + self.add_state("count", default=torch.tensor(0), dist_reduce_fx="sum") + self.add_state("count_seq", + default=torch.tensor(0), + dist_reduce_fx="sum") + + self.add_state("APD", + default=torch.tensor([0.0]), + dist_reduce_fx="sum") + self.add_state("ADE", + default=torch.tensor([0.0]), + dist_reduce_fx="sum") + self.add_state("FDE", + default=torch.tensor([0.0]), + dist_reduce_fx="sum") + + self.MR_metrics = ["APD", "ADE", "FDE"] + + # All metric + self.metrics = self.MR_metrics + + def compute(self, sanity_flag): + + count = self.count + count_seq = self.count_seq + mr_metrics = {} + mr_metrics["APD"] = self.APD / count_seq + mr_metrics["ADE"] = self.ADE / count_seq + mr_metrics["FDE"] = self.FDE / count_seq + + # Reset + self.reset() + + return mr_metrics + + def update(self, joints_rst: Tensor, joints_ref: Tensor, + lengths: List[int]): + + assert joints_rst.shape == joints_ref.shape + assert joints_rst.dim() == 4 + # (bs, seq, njoint=22, 3) + + self.count += sum(lengths) + self.count_seq += len(lengths) + + rst = torch.flatten(joints_rst, start_dim=2) + ref = torch.flatten(joints_ref, start_dim=2) + + for i, l in enumerate(lengths): + if self.task == "pred": + pred_start = int(l*self.cfg.ABLATION.predict_ratio) + diff = rst[i,pred_start:] - ref[i,pred_start:] + elif self.task == "inbetween": + inbetween_start = int(l*self.cfg.ABLATION.inbetween_ratio) + inbetween_end = l - int(l*self.cfg.ABLATION.inbetween_ratio) + diff = rst[i,inbetween_start:inbetween_end] - ref[i,inbetween_start:inbetween_end] + else: + print(f"Task {self.task} not implemented.") + diff = rst - ref + + dist = torch.linalg.norm(diff, dim=-1)[None] + + ade = dist.mean(dim=1) + fde = dist[:,-1] + self.ADE = self.ADE + ade + self.FDE = self.FDE + fde diff --git a/mGPT/metrics/m2t.py b/mGPT/metrics/m2t.py new file mode 100644 index 0000000..3cf1f4c --- /dev/null +++ b/mGPT/metrics/m2t.py @@ -0,0 +1,345 @@ +from typing import List +import os +import torch +from torch import Tensor +from torchmetrics import Metric +from .utils import * +from bert_score import score as score_bert +import spacy +from mGPT.config import instantiate_from_config + +class M2TMetrics(Metric): + + def __init__(self, + cfg, + w_vectorizer, + dataname='humanml3d', + top_k=3, + bleu_k=4, + R_size=32, + max_text_len=40, + diversity_times=300, + dist_sync_on_step=True, + unit_length=4, + **kwargs): + super().__init__(dist_sync_on_step=dist_sync_on_step) + + self.cfg = cfg + self.dataname = dataname + self.w_vectorizer = w_vectorizer + self.name = "matching, fid, and diversity scores" + # self.text = True if cfg.TRAIN.STAGE in ["diffusion","t2m_gpt"] else False + self.max_text_len = max_text_len + self.top_k = top_k + self.bleu_k = bleu_k + self.R_size = R_size + self.diversity_times = diversity_times + self.unit_length = unit_length + + self.add_state("count", default=torch.tensor(0), dist_reduce_fx="sum") + self.add_state("count_seq", + default=torch.tensor(0), + dist_reduce_fx="sum") + + self.metrics = [] + + # Matching scores + self.add_state("Matching_score", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.add_state("gt_Matching_score", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.Matching_metrics = ["Matching_score", "gt_Matching_score"] + for k in range(1, top_k + 1): + self.add_state( + f"R_precision_top_{str(k)}", + default=torch.tensor(0.0), + dist_reduce_fx="sum", + ) + self.Matching_metrics.append(f"R_precision_top_{str(k)}") + for k in range(1, top_k + 1): + self.add_state( + f"gt_R_precision_top_{str(k)}", + default=torch.tensor(0.0), + dist_reduce_fx="sum", + ) + self.Matching_metrics.append(f"gt_R_precision_top_{str(k)}") + + self.metrics.extend(self.Matching_metrics) + + # NLG + for k in range(1, top_k + 1): + self.add_state( + f"Bleu_{str(k)}", + default=torch.tensor(0.0), + dist_reduce_fx="sum", + ) + self.metrics.append(f"Bleu_{str(k)}") + + self.add_state("ROUGE_L", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.metrics.append("ROUGE_L") + + self.add_state("CIDEr", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.metrics.append("CIDEr") + + # Chached batches + self.pred_texts = [] + self.gt_texts = [] + self.add_state("predtext_embeddings", default=[]) + self.add_state("gttext_embeddings", default=[]) + self.add_state("gtmotion_embeddings", default=[]) + + # T2M Evaluator + self._get_t2m_evaluator(cfg) + + self.nlp = spacy.load('en_core_web_sm') + + if self.cfg.model.params.task == 'm2t': + from nlgmetricverse import NLGMetricverse, load_metric + metrics = [ + load_metric("bleu", resulting_name="bleu_1", compute_kwargs={"max_order": 1}), + load_metric("bleu", resulting_name="bleu_4", compute_kwargs={"max_order": 4}), + load_metric("rouge"), + load_metric("cider"), + ] + self.nlg_evaluator = NLGMetricverse(metrics) + + def _get_t2m_evaluator(self, cfg): + """ + load T2M text encoder and motion encoder for evaluating + """ + # init module + self.t2m_textencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_textencoder) + self.t2m_moveencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_moveencoder) + self.t2m_motionencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_motionencoder) + + + # load pretrianed + if self.dataname == "kit": + dataname = "kit" + else: + dataname = "t2m" + + t2m_checkpoint = torch.load(os.path.join( + cfg.METRIC.TM2T.t2m_path, dataname, "text_mot_match/model/finest.tar"), + map_location='cpu') + self.t2m_textencoder.load_state_dict(t2m_checkpoint["text_encoder"]) + self.t2m_moveencoder.load_state_dict( + t2m_checkpoint["movement_encoder"]) + self.t2m_motionencoder.load_state_dict( + t2m_checkpoint["motion_encoder"]) + + # freeze params + self.t2m_textencoder.eval() + self.t2m_moveencoder.eval() + self.t2m_motionencoder.eval() + for p in self.t2m_textencoder.parameters(): + p.requires_grad = False + for p in self.t2m_moveencoder.parameters(): + p.requires_grad = False + for p in self.t2m_motionencoder.parameters(): + p.requires_grad = False + + def _process_text(self, sentence): + sentence = sentence.replace('-', '') + doc = self.nlp(sentence) + word_list = [] + pos_list = [] + for token in doc: + word = token.text + if not word.isalpha(): + continue + if (token.pos_ == 'NOUN' + or token.pos_ == 'VERB') and (word != 'left'): + word_list.append(token.lemma_) + else: + word_list.append(word) + pos_list.append(token.pos_) + return word_list, pos_list + + def _get_text_embeddings(self, texts): + word_embs = [] + pos_ohot = [] + text_lengths = [] + for i, sentence in enumerate(texts): + word_list, pos_list = self._process_text(sentence.strip()) + t_tokens = [ + '%s/%s' % (word_list[i], pos_list[i]) + for i in range(len(word_list)) + ] + + if len(t_tokens) < self.max_text_len: + # pad with "unk" + tokens = ['sos/OTHER'] + t_tokens + ['eos/OTHER'] + sent_len = len(tokens) + tokens = tokens + ['unk/OTHER' + ] * (self.max_text_len + 2 - sent_len) + else: + # crop + tokens = t_tokens[:self.max_text_len] + tokens = ['sos/OTHER'] + tokens + ['eos/OTHER'] + sent_len = len(tokens) + pos_one_hots = [] + word_embeddings = [] + for token in tokens: + word_emb, pos_oh = self.w_vectorizer[token] + pos_one_hots.append(torch.tensor(pos_oh).float()[None]) + word_embeddings.append(torch.tensor(word_emb).float()[None]) + text_lengths.append(sent_len) + pos_ohot.append(torch.cat(pos_one_hots, dim=0)[None]) + word_embs.append(torch.cat(word_embeddings, dim=0)[None]) + + word_embs = torch.cat(word_embs, dim=0).to(self.Matching_score) + pos_ohot = torch.cat(pos_ohot, dim=0).to(self.Matching_score) + text_lengths = torch.tensor(text_lengths).to(self.Matching_score) + + align_idx = np.argsort(text_lengths.data.tolist())[::-1].copy() + + # get text embeddings + text_embeddings = self.t2m_textencoder(word_embs[align_idx], + pos_ohot[align_idx], + text_lengths[align_idx]) + + original_text_embeddings = text_embeddings.clone() + + for idx, sort in enumerate(align_idx): + original_text_embeddings[sort] = text_embeddings[idx] + + return original_text_embeddings + + @torch.no_grad() + def compute(self, sanity_flag): + count = self.count.item() + count_seq = self.count_seq.item() + + # Init metrics dict + metrics = {metric: getattr(self, metric) for metric in self.metrics} + + # Jump in sanity check stage + if sanity_flag: + return metrics + + # Cat cached batches and shuffle + shuffle_idx = torch.randperm(count_seq) + all_motions = torch.cat(self.gtmotion_embeddings, + axis=0).cpu()[shuffle_idx, :] + all_gttexts = torch.cat(self.gttext_embeddings, + axis=0).cpu()[shuffle_idx, :] + all_predtexts = torch.cat(self.predtext_embeddings, + axis=0).cpu()[shuffle_idx, :] + + print("Computing metrics...") + + # Compute r-precision + assert count_seq >= self.R_size + top_k_mat = torch.zeros((self.top_k, )) + for i in range(count_seq // self.R_size): + # [bs=32, 1*256] + group_texts = all_predtexts[i * self.R_size:(i + 1) * self.R_size] + # [bs=32, 1*256] + group_motions = all_motions[i * self.R_size:(i + 1) * self.R_size] + # [bs=32, 32] + dist_mat = euclidean_distance_matrix(group_texts, + group_motions).nan_to_num() + # print(dist_mat[:5]) + self.Matching_score += dist_mat.trace() + argsmax = torch.argsort(dist_mat, dim=1) + top_k_mat += calculate_top_k(argsmax, top_k=self.top_k).sum(axis=0) + + R_count = count_seq // self.R_size * self.R_size + metrics["Matching_score"] = self.Matching_score / R_count + for k in range(self.top_k): + metrics[f"R_precision_top_{str(k+1)}"] = top_k_mat[k] / R_count + + # Compute r-precision with gt + assert count_seq >= self.R_size + top_k_mat = torch.zeros((self.top_k, )) + for i in range(count_seq // self.R_size): + # [bs=32, 1*256] + group_texts = all_gttexts[i * self.R_size:(i + 1) * self.R_size] + # [bs=32, 1*256] + group_motions = all_motions[i * self.R_size:(i + 1) * self.R_size] + # [bs=32, 32] + dist_mat = euclidean_distance_matrix(group_texts, + group_motions).nan_to_num() + # match score + self.gt_Matching_score += dist_mat.trace() + argsmax = torch.argsort(dist_mat, dim=1) + top_k_mat += calculate_top_k(argsmax, top_k=self.top_k).sum(axis=0) + metrics["gt_Matching_score"] = self.gt_Matching_score / R_count + for k in range(self.top_k): + metrics[f"gt_R_precision_top_{str(k+1)}"] = top_k_mat[k] / R_count + + # NLP metrics + scores = self.nlg_evaluator(predictions=self.pred_texts, + references=self.gt_texts) + for k in range(1, self.bleu_k + 1): + metrics[f"Bleu_{str(k)}"] = torch.tensor(scores[f'bleu_{str(k)}'], + device=self.device) + + metrics["ROUGE_L"] = torch.tensor(scores["rouge"]["rougeL"], + device=self.device) + metrics["CIDEr"] = torch.tensor(scores["cider"]['score'],device=self.device) + + # Bert metrics + P, R, F1 = score_bert(self.pred_texts, + self.gt_texts, + lang='en', + rescale_with_baseline=True, + idf=True, + device=self.device, + verbose=False) + + metrics["Bert_F1"] = F1.mean() + + # Reset + self.reset() + self.gt_texts = [] + self.pred_texts = [] + + return {**metrics} + + @torch.no_grad() + def update(self, + feats_ref: Tensor, + pred_texts: List[str], + gt_texts: List[str], + lengths: List[int], + word_embs: Tensor = None, + pos_ohot: Tensor = None, + text_lengths: Tensor = None): + + self.count += sum(lengths) + self.count_seq += len(lengths) + + # motion encoder + m_lens = torch.tensor(lengths, device=feats_ref.device) + align_idx = np.argsort(m_lens.data.tolist())[::-1].copy() + feats_ref = feats_ref[align_idx] + m_lens = m_lens[align_idx] + m_lens = torch.div(m_lens, + self.cfg.DATASET.HUMANML3D.UNIT_LEN, + rounding_mode="floor") + ref_mov = self.t2m_moveencoder(feats_ref[..., :-4]).detach() + m_lens = m_lens // self.unit_length + ref_emb = self.t2m_motionencoder(ref_mov, m_lens) + gtmotion_embeddings = torch.flatten(ref_emb, start_dim=1).detach() + self.gtmotion_embeddings.append(gtmotion_embeddings) + + # text encoder + gttext_emb = self.t2m_textencoder(word_embs, pos_ohot, + text_lengths)[align_idx] + gttext_embeddings = torch.flatten(gttext_emb, start_dim=1).detach() + predtext_emb = self._get_text_embeddings(pred_texts)[align_idx] + predtext_embeddings = torch.flatten(predtext_emb, start_dim=1).detach() + + self.gttext_embeddings.append(gttext_embeddings) + self.predtext_embeddings.append(predtext_embeddings) + + self.pred_texts.extend(pred_texts) + self.gt_texts.extend(gt_texts) diff --git a/mGPT/metrics/mm.py b/mGPT/metrics/mm.py new file mode 100644 index 0000000..1657187 --- /dev/null +++ b/mGPT/metrics/mm.py @@ -0,0 +1,129 @@ +from typing import List + +import torch +from torch import Tensor +from torchmetrics import Metric +from torchmetrics.functional import pairwise_euclidean_distance +from .utils import * +import os +from mGPT.config import instantiate_from_config + +class MMMetrics(Metric): + full_state_update = True + + def __init__(self, cfg, dataname='humanml3d', mm_num_times=10, dist_sync_on_step=True, **kwargs): + super().__init__(dist_sync_on_step=dist_sync_on_step) + + self.name = "MultiModality scores" + self.cfg = cfg + self.dataname = dataname + self.mm_num_times = mm_num_times + + self.add_state("count", default=torch.tensor(0), dist_reduce_fx="sum") + self.add_state("count_seq", + default=torch.tensor(0), + dist_reduce_fx="sum") + + self.metrics = ["MultiModality"] + self.add_state("MultiModality", + default=torch.tensor(0.), + dist_reduce_fx="sum") + + # chached batches + self.add_state("mm_motion_embeddings", default=[], dist_reduce_fx=None) + + # T2M Evaluator + self._get_t2m_evaluator(cfg) + + def _get_t2m_evaluator(self, cfg): + """ + load T2M text encoder and motion encoder for evaluating + """ + # init module + self.t2m_textencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_textencoder) + self.t2m_moveencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_moveencoder) + self.t2m_motionencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_motionencoder) + + # load pretrianed + if self.dataname == "kit": + dataname = "kit" + else: + dataname = "t2m" + t2m_checkpoint = torch.load(os.path.join( + cfg.METRIC.TM2T.t2m_path, dataname, + "text_mot_match/model/finest.tar"), + map_location="cpu") + + self.t2m_textencoder.load_state_dict(t2m_checkpoint["text_encoder"]) + self.t2m_moveencoder.load_state_dict( + t2m_checkpoint["movement_encoder"]) + self.t2m_motionencoder.load_state_dict( + t2m_checkpoint["motion_encoder"]) + + # freeze params + self.t2m_textencoder.eval() + self.t2m_moveencoder.eval() + self.t2m_motionencoder.eval() + for p in self.t2m_textencoder.parameters(): + p.requires_grad = False + for p in self.t2m_moveencoder.parameters(): + p.requires_grad = False + for p in self.t2m_motionencoder.parameters(): + p.requires_grad = False + + def compute(self, sanity_flag): + count = self.count.item() + count_seq = self.count_seq.item() + + # init metrics + metrics = {metric: getattr(self, metric) for metric in self.metrics} + + # if in sanity check stage then jump + if sanity_flag: + return metrics + + # cat all embeddings + all_mm_motions = torch.cat(self.mm_motion_embeddings, + axis=0).cpu().numpy() + metrics['MultiModality'] = calculate_multimodality_np( + all_mm_motions, self.mm_num_times) + + # Reset + self.reset() + + return {**metrics} + + def update( + self, + feats_rst: Tensor, + lengths_rst: List[int], + ): + self.count += sum(lengths_rst) + self.count_seq += len(lengths_rst) + + align_idx = np.argsort(lengths_rst)[::-1].copy() + feats_rst = feats_rst[align_idx] + lengths_rst = np.array(lengths_rst)[align_idx] + recmotion_embeddings = self.get_motion_embeddings( + feats_rst, lengths_rst) + cache = [0] * len(lengths_rst) + for i in range(len(lengths_rst)): + cache[align_idx[i]] = recmotion_embeddings[i:i + 1] + + mm_motion_embeddings = torch.cat(cache, axis=0).unsqueeze(0) + # self.mm_motion_embeddings.extend(cache) + # print(mm_motion_embeddings.shape) + # # store all mm motion embeddings + self.mm_motion_embeddings.append(mm_motion_embeddings) + + def get_motion_embeddings(self, feats: Tensor, lengths: List[int]): + m_lens = torch.tensor(lengths) + m_lens = torch.div(m_lens, + self.cfg.DATASET.HUMANML3D.UNIT_LEN, + rounding_mode="floor") + + mov = self.t2m_moveencoder(feats[..., :-4]).detach() + emb = self.t2m_motionencoder(mov, m_lens) + + # [bs, nlatent*ndim] <= [bs, nlatent, ndim] + return torch.flatten(emb, start_dim=1).detach() diff --git a/mGPT/metrics/mr.py b/mGPT/metrics/mr.py new file mode 100644 index 0000000..ba5129f --- /dev/null +++ b/mGPT/metrics/mr.py @@ -0,0 +1,97 @@ +from typing import List + +import torch +from torch import Tensor +from torchmetrics import Metric + +from .utils import * + + +# motion reconstruction metric +class MRMetrics(Metric): + + def __init__(self, + njoints, + jointstype: str = "mmm", + force_in_meter: bool = True, + align_root: bool = True, + dist_sync_on_step=True, + **kwargs): + super().__init__(dist_sync_on_step=dist_sync_on_step) + + self.name = 'Motion Reconstructions' + self.jointstype = jointstype + self.align_root = align_root + self.force_in_meter = force_in_meter + + self.add_state("count", default=torch.tensor(0), dist_reduce_fx="sum") + self.add_state("count_seq", + default=torch.tensor(0), + dist_reduce_fx="sum") + + self.add_state("MPJPE", + default=torch.tensor([0.0]), + dist_reduce_fx="sum") + self.add_state("PAMPJPE", + default=torch.tensor([0.0]), + dist_reduce_fx="sum") + self.add_state("ACCEL", + default=torch.tensor([0.0]), + dist_reduce_fx="sum") + # todo + # self.add_state("ROOT", default=torch.tensor([0.0]), dist_reduce_fx="sum") + + self.MR_metrics = ["MPJPE", "PAMPJPE", "ACCEL"] + + # All metric + self.metrics = self.MR_metrics + + def compute(self, sanity_flag): + if self.force_in_meter: + # different jointstypes have different scale factors + # if self.jointstype == 'mmm': + # factor = 1000.0 + # elif self.jointstype == 'humanml3d': + # factor = 1000.0 * 0.75 / 480 + factor = 1000.0 + else: + factor = 1.0 + + count = self.count + count_seq = self.count_seq + mr_metrics = {} + mr_metrics["MPJPE"] = self.MPJPE / count * factor + mr_metrics["PAMPJPE"] = self.PAMPJPE / count * factor + # accel error: joints_gt[:-2] - 2 * joints_gt[1:-1] + joints_gt[2:] + # n-2 for each sequences + mr_metrics["ACCEL"] = self.ACCEL / (count - 2 * count_seq) * factor + + # Reset + self.reset() + + return mr_metrics + + def update(self, joints_rst: Tensor, joints_ref: Tensor, + lengths: List[int]): + assert joints_rst.shape == joints_ref.shape + assert joints_rst.dim() == 4 + # (bs, seq, njoint=22, 3) + + self.count += sum(lengths) + self.count_seq += len(lengths) + + # avoid cuda error of DDP in pampjpe + rst = joints_rst.detach().cpu() + ref = joints_ref.detach().cpu() + + # align root joints index + if self.align_root and self.jointstype in ['mmm', 'humanml3d']: + align_inds = [0] + else: + align_inds = None + + for i in range(len(lengths)): + self.MPJPE += torch.sum( + calc_mpjpe(rst[i], ref[i], align_inds=align_inds)) + self.PAMPJPE += torch.sum(calc_pampjpe(rst[i], ref[i])) + self.ACCEL += torch.sum(calc_accel(rst[i], ref[i])) diff --git a/mGPT/metrics/t2m.py b/mGPT/metrics/t2m.py new file mode 100644 index 0000000..d7917c3 --- /dev/null +++ b/mGPT/metrics/t2m.py @@ -0,0 +1,259 @@ +from typing import List +import os +import torch +from torch import Tensor +from torchmetrics import Metric +from torchmetrics.functional import pairwise_euclidean_distance +from .utils import * +from mGPT.config import instantiate_from_config + +class TM2TMetrics(Metric): + def __init__(self, + cfg, + dataname='humanml3d', + top_k=3, + R_size=32, + diversity_times=300, + dist_sync_on_step=True, + **kwargs): + super().__init__(dist_sync_on_step=dist_sync_on_step) + + self.cfg = cfg + self.dataname = dataname + self.name = "matching, fid, and diversity scores" + self.top_k = top_k + self.R_size = R_size + self.text = 'lm' in cfg.TRAIN.STAGE and cfg.model.params.task == 't2m' + self.diversity_times = diversity_times + + self.add_state("count", default=torch.tensor(0), dist_reduce_fx="sum") + self.add_state("count_seq", + default=torch.tensor(0), + dist_reduce_fx="sum") + + self.metrics = [] + + # Matching scores + if self.text: + self.add_state("Matching_score", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.add_state("gt_Matching_score", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.Matching_metrics = ["Matching_score", "gt_Matching_score"] + for k in range(1, top_k + 1): + self.add_state( + f"R_precision_top_{str(k)}", + default=torch.tensor(0.0), + dist_reduce_fx="sum", + ) + self.Matching_metrics.append(f"R_precision_top_{str(k)}") + for k in range(1, top_k + 1): + self.add_state( + f"gt_R_precision_top_{str(k)}", + default=torch.tensor(0.0), + dist_reduce_fx="sum", + ) + self.Matching_metrics.append(f"gt_R_precision_top_{str(k)}") + self.metrics.extend(self.Matching_metrics) + + # Fid + self.add_state("FID", default=torch.tensor(0.0), dist_reduce_fx="sum") + self.metrics.append("FID") + + # Diversity + self.add_state("Diversity", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.add_state("gt_Diversity", + default=torch.tensor(0.0), + dist_reduce_fx="sum") + self.metrics.extend(["Diversity", "gt_Diversity"]) + + # Chached batches + self.add_state("text_embeddings", default=[], dist_reduce_fx=None) + self.add_state("recmotion_embeddings", default=[], dist_reduce_fx=None) + self.add_state("gtmotion_embeddings", default=[], dist_reduce_fx=None) + + # T2M Evaluator + self._get_t2m_evaluator(cfg) + + def _get_t2m_evaluator(self, cfg): + """ + load T2M text encoder and motion encoder for evaluating + """ + # init module + self.t2m_textencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_textencoder) + self.t2m_moveencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_moveencoder) + self.t2m_motionencoder = instantiate_from_config(cfg.METRIC.TM2T.t2m_motionencoder) + + + # load pretrianed + if self.dataname == "kit": + dataname = "kit" + else: + dataname = "t2m" + + t2m_checkpoint = torch.load(os.path.join( + cfg.METRIC.TM2T.t2m_path, dataname, "text_mot_match/model/finest.tar"), + map_location="cpu") + + self.t2m_textencoder.load_state_dict(t2m_checkpoint["text_encoder"]) + self.t2m_moveencoder.load_state_dict( + t2m_checkpoint["movement_encoder"]) + self.t2m_motionencoder.load_state_dict( + t2m_checkpoint["motion_encoder"]) + + # freeze params + self.t2m_textencoder.eval() + self.t2m_moveencoder.eval() + self.t2m_motionencoder.eval() + for p in self.t2m_textencoder.parameters(): + p.requires_grad = False + for p in self.t2m_moveencoder.parameters(): + p.requires_grad = False + for p in self.t2m_motionencoder.parameters(): + p.requires_grad = False + + @torch.no_grad() + def compute(self, sanity_flag): + count = self.count.item() + count_seq = self.count_seq.item() + + # Init metrics dict + metrics = {metric: getattr(self, metric) for metric in self.metrics} + + # Jump in sanity check stage + if sanity_flag: + return metrics + + # Cat cached batches and shuffle + shuffle_idx = torch.randperm(count_seq) + + all_genmotions = torch.cat(self.recmotion_embeddings, + axis=0).cpu()[shuffle_idx, :] + all_gtmotions = torch.cat(self.gtmotion_embeddings, + axis=0).cpu()[shuffle_idx, :] + + # Compute text related metrics + if self.text: + all_texts = torch.cat(self.text_embeddings, + axis=0).cpu()[shuffle_idx, :] + # Compute r-precision + assert count_seq > self.R_size + top_k_mat = torch.zeros((self.top_k, )) + for i in range(count_seq // self.R_size): + # [bs=32, 1*256] + group_texts = all_texts[i * self.R_size:(i + 1) * self.R_size] + # [bs=32, 1*256] + group_motions = all_genmotions[i * self.R_size:(i + 1) * + self.R_size] + # dist_mat = pairwise_euclidean_distance(group_texts, group_motions) + # [bs=32, 32] + dist_mat = euclidean_distance_matrix( + group_texts, group_motions).nan_to_num() + # print(dist_mat[:5]) + self.Matching_score += dist_mat.trace() + argsmax = torch.argsort(dist_mat, dim=1) + top_k_mat += calculate_top_k(argsmax, + top_k=self.top_k).sum(axis=0) + + R_count = count_seq // self.R_size * self.R_size + metrics["Matching_score"] = self.Matching_score / R_count + for k in range(self.top_k): + metrics[f"R_precision_top_{str(k+1)}"] = top_k_mat[k] / R_count + + # Compute r-precision with gt + assert count_seq > self.R_size + top_k_mat = torch.zeros((self.top_k, )) + for i in range(count_seq // self.R_size): + # [bs=32, 1*256] + group_texts = all_texts[i * self.R_size:(i + 1) * self.R_size] + # [bs=32, 1*256] + group_motions = all_gtmotions[i * self.R_size:(i + 1) * + self.R_size] + # [bs=32, 32] + dist_mat = euclidean_distance_matrix( + group_texts, group_motions).nan_to_num() + # match score + self.gt_Matching_score += dist_mat.trace() + argsmax = torch.argsort(dist_mat, dim=1) + top_k_mat += calculate_top_k(argsmax, + top_k=self.top_k).sum(axis=0) + metrics["gt_Matching_score"] = self.gt_Matching_score / R_count + for k in range(self.top_k): + metrics[f"gt_R_precision_top_{str(k+1)}"] = top_k_mat[k] / R_count + + # tensor -> numpy for FID + all_genmotions = all_genmotions.numpy() + all_gtmotions = all_gtmotions.numpy() + + # Compute fid + mu, cov = calculate_activation_statistics_np(all_genmotions) + gt_mu, gt_cov = calculate_activation_statistics_np(all_gtmotions) + metrics["FID"] = calculate_frechet_distance_np(gt_mu, gt_cov, mu, cov) + + # Compute diversity + assert count_seq > self.diversity_times + metrics["Diversity"] = calculate_diversity_np(all_genmotions, + self.diversity_times) + metrics["gt_Diversity"] = calculate_diversity_np( + all_gtmotions, self.diversity_times) + + # Reset + self.reset() + + return {**metrics} + + @torch.no_grad() + def update(self, + feats_ref: Tensor, + feats_rst: Tensor, + lengths_ref: List[int], + lengths_rst: List[int], + word_embs: Tensor = None, + pos_ohot: Tensor = None, + text_lengths: Tensor = None): + + self.count += sum(lengths_ref) + self.count_seq += len(lengths_ref) + + # T2m motion encoder + align_idx = np.argsort(lengths_ref)[::-1].copy() + feats_ref = feats_ref[align_idx] + lengths_ref = np.array(lengths_ref)[align_idx] + gtmotion_embeddings = self.get_motion_embeddings( + feats_ref, lengths_ref) + cache = [0] * len(lengths_ref) + for i in range(len(lengths_ref)): + cache[align_idx[i]] = gtmotion_embeddings[i:i + 1] + self.gtmotion_embeddings.extend(cache) + + align_idx = np.argsort(lengths_rst)[::-1].copy() + feats_rst = feats_rst[align_idx] + lengths_rst = np.array(lengths_rst)[align_idx] + recmotion_embeddings = self.get_motion_embeddings( + feats_rst, lengths_rst) + cache = [0] * len(lengths_rst) + for i in range(len(lengths_rst)): + cache[align_idx[i]] = recmotion_embeddings[i:i + 1] + self.recmotion_embeddings.extend(cache) + + # T2m text encoder + if self.text: + text_emb = self.t2m_textencoder(word_embs, pos_ohot, text_lengths) + text_embeddings = torch.flatten(text_emb, start_dim=1).detach() + self.text_embeddings.append(text_embeddings) + + def get_motion_embeddings(self, feats: Tensor, lengths: List[int]): + m_lens = torch.tensor(lengths) + m_lens = torch.div(m_lens, + self.cfg.DATASET.HUMANML3D.UNIT_LEN, + rounding_mode="floor") + m_lens = m_lens // self.cfg.DATASET.HUMANML3D.UNIT_LEN + mov = self.t2m_moveencoder(feats[..., :-4]).detach() + emb = self.t2m_motionencoder(mov, m_lens) + + # [bs, nlatent*ndim] <= [bs, nlatent, ndim] + return torch.flatten(emb, start_dim=1).detach() diff --git a/mGPT/metrics/utils.py b/mGPT/metrics/utils.py new file mode 100644 index 0000000..40e536f --- /dev/null +++ b/mGPT/metrics/utils.py @@ -0,0 +1,607 @@ +import numpy as np +import scipy.linalg +import torch +from torch import linalg +import sys + + +def l2_norm(x1, x2, dim): + return torch.linalg.vector_norm(x1 - x2, ord=2, dim=dim) + + +def variance(x, T, dim): + mean = x.mean(dim) + out = (x - mean)**2 + out = out.sum(dim) + return out / (T - 1) + + +def sqrtm(input): + m = input.detach().cpu().numpy().astype(np.float64_) + sqrtm = torch.from_numpy(scipy.linalg.sqrtm(m)).to(input) + return sqrtm + + +# (X - X_train)*(X - X_train) = -2X*X_train + X*X + X_train*X_train +def euclidean_distance_matrix(matrix1, matrix2): + """ + Params: + -- matrix1: N1 x D + -- matrix2: N2 x D + Returns: + -- dist: N1 x N2 + dist[i, j] == distance(matrix1[i], matrix2[j]) + """ + assert matrix1.shape[1] == matrix2.shape[1] + d1 = -2 * torch.mm(matrix1, matrix2.T) # shape (num_test, num_train) + d2 = torch.sum(torch.square(matrix1), axis=1, + keepdims=True) # shape (num_test, 1) + d3 = torch.sum(torch.square(matrix2), axis=1) # shape (num_train, ) + dists = torch.sqrt(d1 + d2 + d3) # broadcasting + return dists + + +def euclidean_distance_matrix_np(matrix1, matrix2): + """ + Params: + -- matrix1: N1 x D + -- matrix2: N2 x D + Returns: + -- dist: N1 x N2 + dist[i, j] == distance(matrix1[i], matrix2[j]) + """ + assert matrix1.shape[1] == matrix2.shape[1] + d1 = -2 * np.dot(matrix1, matrix2.T) # shape (num_test, num_train) + d2 = np.sum(np.square(matrix1), axis=1, + keepdims=True) # shape (num_test, 1) + d3 = np.sum(np.square(matrix2), axis=1) # shape (num_train, ) + dists = np.sqrt(d1 + d2 + d3) # broadcasting + return dists + + +def calculate_top_k(mat, top_k): + size = mat.shape[0] + gt_mat = (torch.unsqueeze(torch.arange(size), + 1).to(mat.device).repeat_interleave(size, 1)) + bool_mat = mat == gt_mat + correct_vec = False + top_k_list = [] + for i in range(top_k): + # print(correct_vec, bool_mat[:, i]) + correct_vec = correct_vec | bool_mat[:, i] + # print(correct_vec) + top_k_list.append(correct_vec[:, None]) + top_k_mat = torch.cat(top_k_list, dim=1) + return top_k_mat + + +def calculate_activation_statistics(activations): + """ + Params: + -- activation: num_samples x dim_feat + Returns: + -- mu: dim_feat + -- sigma: dim_feat x dim_feat + """ + activations = activations.cpu().numpy() + mu = np.mean(activations, axis=0) + sigma = np.cov(activations, rowvar=False) + return mu, sigma + + +def calculate_activation_statistics_np(activations): + """ + Params: + -- activation: num_samples x dim_feat + Returns: + -- mu: dim_feat + -- sigma: dim_feat x dim_feat + """ + mu = np.mean(activations, axis=0) + cov = np.cov(activations, rowvar=False) + return mu, cov + + +# def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6): +# """Numpy implementation of the Frechet Distance. +# The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) +# and X_2 ~ N(mu_2, C_2) is +# d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). +# Stable version by Dougal J. Sutherland. +# Params: +# -- mu1 : Numpy array containing the activations of a layer of the +# inception net (like returned by the function 'get_predictions') +# for generated samples. +# -- mu2 : The sample mean over activations, precalculated on an +# representative data set. +# -- sigma1: The covariance matrix over activations for generated samples. +# -- sigma2: The covariance matrix over activations, precalculated on an +# representative data set. +# Returns: +# -- : The Frechet Distance. +# """ + +# mu1 = torch.atleast_1d(mu1) +# mu2 = torch.atleast_1d(mu2) + +# sigma1 = torch.atleast_2d(sigma1) +# sigma2 = torch.atleast_2d(sigma2) + +# assert mu1.shape == mu2.shape, \ +# 'Training and test mean vectors have different lengths' +# assert sigma1.shape == sigma2.shape, \ +# 'Training and test covariances have different dimensions' + +# diff = mu1 - mu2 + +# # Product might be almost singular +# # covmean, _ = sqrtm(sigma1.dot(sigma2), disp=False) +# covmean = sqrtm(torch.mm(sigma1,sigma2)) +# if not torch.isfinite(covmean).all(): +# msg = ('fid calculation produces singular product; ' +# 'adding %s to diagonal of cov estimates') % eps +# print(msg) +# offset = torch.eye(sigma1.shape[0]) * eps +# # covmean = sqrtm((sigma1 + offset).dot(sigma2 + offset)) +# covmean = sqrtm(torch.mm(sigma1+ offset,sigma2+ offset)) + +# # Numerical error might give slight imaginary component +# if torch.is_complex(covmean): +# if not torch.allclose(torch.diagonal(covmean).imag, 0, atol=1e-3): +# m = torch.max(torch.abs(covmean.imag)) +# raise ValueError('Imaginary component {}'.format(m)) +# covmean = covmean.real + +# tr_covmean = torch.trace(covmean) + +# return (diff.dot(diff) + torch.trace(sigma1) + +# torch.trace(sigma2) - 2 * tr_covmean) + + +def calculate_frechet_distance_np(mu1, sigma1, mu2, sigma2, eps=1e-6): + """Numpy implementation of the Frechet Distance. + The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) + and X_2 ~ N(mu_2, C_2) is + d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). + Stable version by Dougal J. Sutherland. + Params: + -- mu1 : Numpy array containing the activations of a layer of the + inception net (like returned by the function 'get_predictions') + for generated samples. + -- mu2 : The sample mean over activations, precalculated on an + representative data set. + -- sigma1: The covariance matrix over activations for generated samples. + -- sigma2: The covariance matrix over activations, precalculated on an + representative data set. + Returns: + -- : The Frechet Distance. + """ + + mu1 = np.atleast_1d(mu1) + mu2 = np.atleast_1d(mu2) + + sigma1 = np.atleast_2d(sigma1) + sigma2 = np.atleast_2d(sigma2) + + assert (mu1.shape == mu2.shape + ), "Training and test mean vectors have different lengths" + assert (sigma1.shape == sigma2.shape + ), "Training and test covariances have different dimensions" + + diff = mu1 - mu2 + # Product might be almost singular + covmean, _ = scipy.linalg.sqrtm(sigma1.dot(sigma2), disp=False) + if not np.isfinite(covmean).all(): + msg = ("fid calculation produces singular product; " + "adding %s to diagonal of cov estimates") % eps + print(msg) + offset = np.eye(sigma1.shape[0]) * eps + covmean = scipy.linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset)) + + # Numerical error might give slight imaginary component + if np.iscomplexobj(covmean): + if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3): + m = np.max(np.abs(covmean.imag)) + raise ValueError("Imaginary component {}".format(m)) + # print("Imaginary component {}".format(m)) + covmean = covmean.real + tr_covmean = np.trace(covmean) + + return diff.dot(diff) + np.trace(sigma1) + np.trace( + sigma2) - 2 * tr_covmean + + +def calculate_diversity(activation, diversity_times): + assert len(activation.shape) == 2 + assert activation.shape[0] > diversity_times + num_samples = activation.shape[0] + + first_indices = np.random.choice(num_samples, + diversity_times, + replace=False) + second_indices = np.random.choice(num_samples, + diversity_times, + replace=False) + dist = linalg.norm(activation[first_indices] - activation[second_indices], + axis=1) + return dist.mean() + + +def calculate_diversity_np(activation, diversity_times): + assert len(activation.shape) == 2 + assert activation.shape[0] > diversity_times + num_samples = activation.shape[0] + + first_indices = np.random.choice(num_samples, + diversity_times, + replace=False) + second_indices = np.random.choice(num_samples, + diversity_times, + replace=False) + dist = scipy.linalg.norm(activation[first_indices] - + activation[second_indices], + axis=1) + return dist.mean() + + +def calculate_multimodality_np(activation, multimodality_times): + assert len(activation.shape) == 3 + assert activation.shape[1] > multimodality_times + num_per_sent = activation.shape[1] + + first_dices = np.random.choice(num_per_sent, + multimodality_times, + replace=False) + second_dices = np.random.choice(num_per_sent, + multimodality_times, + replace=False) + dist = scipy.linalg.norm(activation[:, first_dices] - + activation[:, second_dices], + axis=2) + return dist.mean() + + +# motion reconstructions metrics + + +def batch_compute_similarity_transform_torch(S1, S2): + """ + Computes a similarity transform (sR, t) that takes + a set of 3D points S1 (3 x N) closest to a set of 3D points S2, + where R is an 3x3 rotation matrix, t 3x1 translation, s scale. + i.e. solves the orthogonal Procrutes problem. + """ + transposed = False + if S1.shape[0] != 3 and S1.shape[0] != 2: + S1 = S1.permute(0, 2, 1) + S2 = S2.permute(0, 2, 1) + transposed = True + assert S2.shape[1] == S1.shape[1] + + # 1. Remove mean. + mu1 = S1.mean(axis=-1, keepdims=True) + mu2 = S2.mean(axis=-1, keepdims=True) + + X1 = S1 - mu1 + X2 = S2 - mu2 + + # 2. Compute variance of X1 used for scale. + var1 = torch.sum(X1**2, dim=1).sum(dim=1) + + # 3. The outer product of X1 and X2. + K = X1.bmm(X2.permute(0, 2, 1)) + + # 4. Solution that Maximizes trace(R'K) is R=U*V', where U, V are + # singular vectors of K. + U, s, V = torch.svd(K) + + # Construct Z that fixes the orientation of R to get det(R)=1. + Z = torch.eye(U.shape[1], device=S1.device).unsqueeze(0) + Z = Z.repeat(U.shape[0], 1, 1) + Z[:, -1, -1] *= torch.sign(torch.det(U.bmm(V.permute(0, 2, 1)))) + + # Construct R. + R = V.bmm(Z.bmm(U.permute(0, 2, 1))) + + # 5. Recover scale. + scale = torch.cat([torch.trace(x).unsqueeze(0) for x in R.bmm(K)]) / var1 + + # 6. Recover translation. + t = mu2 - (scale.unsqueeze(-1).unsqueeze(-1) * (R.bmm(mu1))) + + # 7. Error: + S1_hat = scale.unsqueeze(-1).unsqueeze(-1) * R.bmm(S1) + t + + if transposed: + S1_hat = S1_hat.permute(0, 2, 1) + + return S1_hat, (scale, R, t) + + +def compute_mpjpe(preds, + target, + valid_mask=None, + pck_joints=None, + sample_wise=True): + """ + Mean per-joint position error (i.e. mean Euclidean distance) + often referred to as "Protocol #1" in many papers. + """ + assert preds.shape == target.shape, print(preds.shape, + target.shape) # BxJx3 + mpjpe = torch.norm(preds - target, p=2, dim=-1) # BxJ + + if pck_joints is None: + if sample_wise: + mpjpe_seq = ((mpjpe * valid_mask.float()).sum(-1) / + valid_mask.float().sum(-1) + if valid_mask is not None else mpjpe.mean(-1)) + else: + mpjpe_seq = mpjpe[valid_mask] if valid_mask is not None else mpjpe + return mpjpe_seq + else: + mpjpe_pck_seq = mpjpe[:, pck_joints] + return mpjpe_pck_seq + + +def align_by_parts(joints, align_inds=None): + if align_inds is None: + return joints + pelvis = joints[:, align_inds].mean(1) + return joints - torch.unsqueeze(pelvis, dim=1) + + +def calc_mpjpe(preds, target, align_inds=[0], sample_wise=True, trans=None): + # Expects BxJx3 + valid_mask = target[:, :, 0] != -2.0 + # valid_mask = torch.BoolTensor(target[:, :, 0].shape) + if align_inds is not None: + preds_aligned = align_by_parts(preds, align_inds=align_inds) + if trans is not None: + preds_aligned += trans + target_aligned = align_by_parts(target, align_inds=align_inds) + else: + preds_aligned, target_aligned = preds, target + mpjpe_each = compute_mpjpe(preds_aligned, + target_aligned, + valid_mask=valid_mask, + sample_wise=sample_wise) + return mpjpe_each + + +def calc_accel(preds, target): + """ + Mean joint acceleration error + often referred to as "Protocol #1" in many papers. + """ + assert preds.shape == target.shape, print(preds.shape, + target.shape) # BxJx3 + assert preds.dim() == 3 + # Expects BxJx3 + # valid_mask = torch.BoolTensor(target[:, :, 0].shape) + accel_gt = target[:-2] - 2 * target[1:-1] + target[2:] + accel_pred = preds[:-2] - 2 * preds[1:-1] + preds[2:] + normed = torch.linalg.norm(accel_pred - accel_gt, dim=-1) + accel_seq = normed.mean(1) + return accel_seq + + +def calc_pampjpe(preds, target, sample_wise=True, return_transform_mat=False): + # Expects BxJx3 + target, preds = target.float(), preds.float() + # extracting the keypoints that all samples have valid annotations + # valid_mask = (target[:, :, 0] != -2.).sum(0) == len(target) + # preds_tranformed, PA_transform = batch_compute_similarity_transform_torch(preds[:, valid_mask], target[:, valid_mask]) + # pa_mpjpe_each = compute_mpjpe(preds_tranformed, target[:, valid_mask], sample_wise=sample_wise) + + preds_tranformed, PA_transform = batch_compute_similarity_transform_torch( + preds, target) + pa_mpjpe_each = compute_mpjpe(preds_tranformed, + target, + sample_wise=sample_wise) + + if return_transform_mat: + return pa_mpjpe_each, PA_transform + else: + return pa_mpjpe_each + + +# from action2motion +def calculate_diversity_multimodality(activations, + labels, + num_labels, + diversity_times=200, + multimodality_times=20): + labels = labels.long() + num_motions = activations.shape[0] # len(labels) + + diversity = 0 + + first_indices = np.random.randint(0, num_motions, diversity_times) + second_indices = np.random.randint(0, num_motions, diversity_times) + for first_idx, second_idx in zip(first_indices, second_indices): + diversity += torch.dist(activations[first_idx, :], + activations[second_idx, :]) + diversity /= diversity_times + + multimodality = 0 + label_quotas = np.zeros(num_labels) + label_quotas[labels.unique( + )] = multimodality_times # if a label does not appear in batch, its quota remains zero + while np.any(label_quotas > 0): + # print(label_quotas) + first_idx = np.random.randint(0, num_motions) + first_label = labels[first_idx] + if not label_quotas[first_label]: + continue + + second_idx = np.random.randint(0, num_motions) + second_label = labels[second_idx] + while first_label != second_label: + second_idx = np.random.randint(0, num_motions) + second_label = labels[second_idx] + + label_quotas[first_label] -= 1 + + first_activation = activations[first_idx, :] + second_activation = activations[second_idx, :] + multimodality += torch.dist(first_activation, second_activation) + + multimodality /= (multimodality_times * num_labels) + + return diversity, multimodality + + +def calculate_fid(statistics_1, statistics_2): + return calculate_frechet_distance_np(statistics_1[0], statistics_1[1], + statistics_2[0], statistics_2[1]) + + +# from: https://github.com/abdulfatir/gan-metrics-pytorch/blob/master/kid_score.py +def polynomial_mmd_averages(codes_g, + codes_r, + n_subsets=50, + subset_size=1000, + ret_var=True, + output=sys.stdout, + **kernel_args): + m = min(codes_g.shape[0], codes_r.shape[0]) + mmds = np.zeros(n_subsets) + if ret_var: + vars = np.zeros(n_subsets) + choice = np.random.choice + + replace = subset_size < len(codes_g) + + for i in range(n_subsets): + g = codes_g[choice(len(codes_g), subset_size, replace=replace)] + r = codes_r[choice(len(codes_r), subset_size, replace=replace)] + o = polynomial_mmd(g, r, **kernel_args, var_at_m=m, ret_var=ret_var) + if ret_var: + mmds[i], vars[i] = o + else: + mmds[i] = o + + return (mmds, vars) if ret_var else mmds + + +def polynomial_mmd(codes_g, + codes_r, + degree=3, + gamma=None, + coef0=1, + var_at_m=None, + ret_var=True): + from sklearn.metrics.pairwise import polynomial_kernel + + # use k(x, y) = (gamma + coef0)^degree + # default gamma is 1 / dim + X = codes_g + Y = codes_r + + K_XX = polynomial_kernel(X, degree=degree, gamma=gamma, coef0=coef0) + K_YY = polynomial_kernel(Y, degree=degree, gamma=gamma, coef0=coef0) + K_XY = polynomial_kernel(X, Y, degree=degree, gamma=gamma, coef0=coef0) + + return _mmd2_and_variance(K_XX, + K_XY, + K_YY, + var_at_m=var_at_m, + ret_var=ret_var) + + +def _mmd2_and_variance(K_XX, + K_XY, + K_YY, + unit_diagonal=False, + mmd_est='unbiased', + block_size=1024, + var_at_m=None, + ret_var=True): + # based on + # https://github.com/dougalsutherland/opt-mmd/blob/master/two_sample/mmd.py + # but changed to not compute the full kernel matrix at once + m = K_XX.shape[0] + assert K_XX.shape == (m, m) + assert K_XY.shape == (m, m) + assert K_YY.shape == (m, m) + if var_at_m is None: + var_at_m = m + + # Get the various sums of kernels that we'll use + # Kts drop the diagonal, but we don't need to compute them explicitly + if unit_diagonal: + diag_X = diag_Y = 1 + sum_diag_X = sum_diag_Y = m + sum_diag2_X = sum_diag2_Y = m + else: + diag_X = np.diagonal(K_XX) + diag_Y = np.diagonal(K_YY) + + sum_diag_X = diag_X.sum() + sum_diag_Y = diag_Y.sum() + + sum_diag2_X = _sqn(diag_X) + sum_diag2_Y = _sqn(diag_Y) + + Kt_XX_sums = K_XX.sum(axis=1) - diag_X + Kt_YY_sums = K_YY.sum(axis=1) - diag_Y + K_XY_sums_0 = K_XY.sum(axis=0) + K_XY_sums_1 = K_XY.sum(axis=1) + + Kt_XX_sum = Kt_XX_sums.sum() + Kt_YY_sum = Kt_YY_sums.sum() + K_XY_sum = K_XY_sums_0.sum() + + if mmd_est == 'biased': + mmd2 = ((Kt_XX_sum + sum_diag_X) / (m * m) + (Kt_YY_sum + sum_diag_Y) / + (m * m) - 2 * K_XY_sum / (m * m)) + else: + assert mmd_est in {'unbiased', 'u-statistic'} + mmd2 = (Kt_XX_sum + Kt_YY_sum) / (m * (m - 1)) + if mmd_est == 'unbiased': + mmd2 -= 2 * K_XY_sum / (m * m) + else: + mmd2 -= 2 * (K_XY_sum - np.trace(K_XY)) / (m * (m - 1)) + + if not ret_var: + return mmd2 + + Kt_XX_2_sum = _sqn(K_XX) - sum_diag2_X + Kt_YY_2_sum = _sqn(K_YY) - sum_diag2_Y + K_XY_2_sum = _sqn(K_XY) + + dot_XX_XY = Kt_XX_sums.dot(K_XY_sums_1) + dot_YY_YX = Kt_YY_sums.dot(K_XY_sums_0) + + m1 = m - 1 + m2 = m - 2 + zeta1_est = ( + 1 / (m * m1 * m2) * + (_sqn(Kt_XX_sums) - Kt_XX_2_sum + _sqn(Kt_YY_sums) - Kt_YY_2_sum) - 1 / + (m * m1)**2 * (Kt_XX_sum**2 + Kt_YY_sum**2) + 1 / (m * m * m1) * + (_sqn(K_XY_sums_1) + _sqn(K_XY_sums_0) - 2 * K_XY_2_sum) - + 2 / m**4 * K_XY_sum**2 - 2 / (m * m * m1) * (dot_XX_XY + dot_YY_YX) + + 2 / (m**3 * m1) * (Kt_XX_sum + Kt_YY_sum) * K_XY_sum) + zeta2_est = (1 / (m * m1) * (Kt_XX_2_sum + Kt_YY_2_sum) - 1 / (m * m1)**2 * + (Kt_XX_sum**2 + Kt_YY_sum**2) + 2 / (m * m) * K_XY_2_sum - + 2 / m**4 * K_XY_sum**2 - 4 / (m * m * m1) * + (dot_XX_XY + dot_YY_YX) + 4 / (m**3 * m1) * + (Kt_XX_sum + Kt_YY_sum) * K_XY_sum) + var_est = (4 * (var_at_m - 2) / (var_at_m * (var_at_m - 1)) * zeta1_est + + 2 / (var_at_m * (var_at_m - 1)) * zeta2_est) + + return mmd2, var_est + + +def _sqn(arr): + flat = np.ravel(arr) + return flat.dot(flat) + + +def calculate_kid(real_activations, generated_activations): + kid_values = polynomial_mmd_averages(real_activations, + generated_activations, + n_subsets=100) + results = (kid_values[0].mean(), kid_values[0].std()) + return results diff --git a/mGPT/models/__init__.py b/mGPT/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mGPT/models/base.py b/mGPT/models/base.py new file mode 100644 index 0000000..94b8563 --- /dev/null +++ b/mGPT/models/base.py @@ -0,0 +1,204 @@ +import os +import numpy as np +import torch +import logging +from pathlib import Path +from pytorch_lightning import LightningModule +from os.path import join as pjoin +from collections import OrderedDict +from mGPT.metrics import BaseMetrics +from mGPT.config import get_obj_from_str + + +class BaseModel(LightningModule): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.configure_metrics() + + # Ablation + self.test_step_outputs = [] + self.times = [] + self.rep_i = 0 + + def training_step(self, batch, batch_idx): + return self.allsplit_step("train", batch, batch_idx) + + def validation_step(self, batch, batch_idx): + return self.allsplit_step("val", batch, batch_idx) + + def test_step(self, batch, batch_idx): + outputs = self.allsplit_step("test", batch, batch_idx) + self.test_step_outputs.append(outputs) + return outputs + + def predict_step(self, batch, batch_idx): + return self.forward(batch) + + def on_train_epoch_end(self): + # Log steps and losses + dico = self.step_log_dict() + # Log losses + dico.update(self.loss_log_dict('train')) + # Write to log only if not sanity check + if not self.trainer.sanity_checking: + self.log_dict(dico, sync_dist=True, rank_zero_only=True) + + def on_validation_epoch_end(self): + # Log steps and losses + dico = self.step_log_dict() + # Log losses + dico.update(self.loss_log_dict('train')) + dico.update(self.loss_log_dict('val')) + # Log metrics + dico.update(self.metrics_log_dict()) + # Write to log only if not sanity check + if not self.trainer.sanity_checking: + self.log_dict(dico, sync_dist=True, rank_zero_only=True) + + def on_test_epoch_end(self): + # Log metrics + dico = self.metrics_log_dict() + # Write to log only if not sanity check + if not self.trainer.sanity_checking: + self.log_dict(dico, sync_dist=True, rank_zero_only=True) + self.save_npy(self.test_step_outputs) + self.rep_i = self.rep_i + 1 + # Free up the memory + self.test_step_outputs.clear() + + def preprocess_state_dict(self, state_dict): + new_state_dict = OrderedDict() + + metric_state_dict = self.metrics.state_dict() + loss_state_dict = self._losses.state_dict() + + for k, v in metric_state_dict.items(): + new_state_dict['metrics.' + k] = v + + for k, v in loss_state_dict.items(): + new_state_dict['_losses.' + k] = v + + for k, v in state_dict.items(): + if '_losses' not in k and 'Metrics' not in k: + new_state_dict[k] = v + + return new_state_dict + + def load_state_dict(self, state_dict, strict=True): + new_state_dict = self.preprocess_state_dict(state_dict) + super().load_state_dict(new_state_dict, strict) + + def step_log_dict(self): + return { + "epoch": float(self.trainer.current_epoch), + "step": float(self.trainer.current_epoch) + } + + def loss_log_dict(self, split: str): + losses = self._losses['losses_' + split] + loss_dict = losses.compute(split) + return loss_dict + + def metrics_log_dict(self): + + # For TM2TMetrics MM + if self.trainer.datamodule.is_mm and "TM2TMetrics" in self.hparams.metrics_dict: + metrics_dicts = ['MMMetrics'] + else: + metrics_dicts = self.hparams.metrics_dict + + # Compute all metrics + metrics_log_dict = {} + for metric in metrics_dicts: + metrics_dict = getattr( + self.metrics, + metric).compute(sanity_flag=self.trainer.sanity_checking) + metrics_log_dict.update({ + f"Metrics/{metric}": value.item() + for metric, value in metrics_dict.items() + }) + + return metrics_log_dict + + def configure_optimizers(self): + # Optimizer + optim_target = self.hparams.cfg.TRAIN.OPTIM.target + if len(optim_target.split('.')) == 1: + optim_target = 'torch.optim.' + optim_target + optimizer = get_obj_from_str(optim_target)( + params=self.parameters(), **self.hparams.cfg.TRAIN.OPTIM.params) + + # Scheduler + scheduler_target = self.hparams.cfg.TRAIN.LR_SCHEDULER.target + if len(scheduler_target.split('.')) == 1: + scheduler_target = 'torch.optim.lr_scheduler.' + scheduler_target + lr_scheduler = get_obj_from_str(scheduler_target)( + optimizer=optimizer, **self.hparams.cfg.TRAIN.LR_SCHEDULER.params) + + return {'optimizer': optimizer, 'lr_scheduler': lr_scheduler} + + def configure_metrics(self): + self.metrics = BaseMetrics(datamodule=self.datamodule, **self.hparams) + + def save_npy(self, outputs): + cfg = self.hparams.cfg + output_dir = Path( + os.path.join( + cfg.FOLDER, + str(cfg.model.target.split('.')[-2].lower()), + str(cfg.NAME), + "samples_" + cfg.TIME, + )) + if cfg.TEST.SAVE_PREDICTIONS: + lengths = [i[1] for i in outputs] + outputs = [i[0] for i in outputs] + + if cfg.TEST.DATASETS[0].lower() in ["humanml3d", "kit"]: + keyids = self.trainer.datamodule.test_dataset.name_list + for i in range(len(outputs)): + for bid in range( + min(cfg.TEST.BATCH_SIZE, outputs[i].shape[0])): + keyid = keyids[i * cfg.TEST.BATCH_SIZE + bid] + data = self.trainer.datamodule.test_dataset.data_dict[ + keyid] + + motion = torch.tensor(data['motion'], + device=outputs[i].device) + motion = self.datamodule.normalize(motion) + length = data['length'] + text_list = data['text'] + gen_joints = outputs[i][bid][:lengths[i][bid]].cpu( + ).numpy() + if cfg.TEST.REPLICATION_TIMES > 1: + name = f"{keyid}.npy" + else: + name = f"{keyid}.npy" + # save predictions results + npypath = output_dir / name + np.save(npypath, gen_joints) + npypath = output_dir / f"{keyid}_gt.npy" + joints = self.feats2joints(motion).cpu().numpy() + np.save(npypath, joints) + + with open(output_dir / f"{keyid}.txt", "a") as f: + for text in text_list: + f.write(f"{text['caption']}\n") + + elif cfg.TEST.DATASETS[0].lower() in ["humanact12", "uestc"]: + keyids = range(len(self.trainer.datamodule.test_dataset)) + for i in range(len(outputs)): + for bid in range( + min(cfg.TEST.BATCH_SIZE, outputs[i].shape[0])): + keyid = keyids[i * cfg.TEST.BATCH_SIZE + bid] + gen_joints = outputs[i][bid].cpu() + gen_joints = gen_joints.permute(2, 0, + 1)[:lengths[i][bid], + ...].numpy() + if cfg.TEST.REPLICATION_TIMES > 1: + name = f"{keyid}_{self.rep_i}" + else: + name = f"{keyid}.npy" + # save predictions results + npypath = output_dir / name + np.save(npypath, gen_joints) diff --git a/mGPT/models/build_model.py b/mGPT/models/build_model.py new file mode 100644 index 0000000..53c9eff --- /dev/null +++ b/mGPT/models/build_model.py @@ -0,0 +1,8 @@ +from omegaconf import OmegaConf +from mGPT.config import instantiate_from_config + +def build_model(cfg, datamodule): + model_config = OmegaConf.to_container(cfg.model, resolve=True) + model_config['params']['cfg'] = cfg + model_config['params']['datamodule'] = datamodule + return instantiate_from_config(model_config) diff --git a/mGPT/models/mgpt.py b/mGPT/models/mgpt.py new file mode 100644 index 0000000..164703d --- /dev/null +++ b/mGPT/models/mgpt.py @@ -0,0 +1,494 @@ +import numpy as np +import os +import random +import torch +import time +from mGPT.config import instantiate_from_config +from os.path import join as pjoin +from mGPT.losses.mgpt import GPTLosses +from mGPT.models.base import BaseModel +from .base import BaseModel +import json +import mGPT.render.matplot.plot_3d_global as plot_3d + + +class MotionGPT(BaseModel): + """ + Stage 1 Motion Tokenizer + Stage 2 Motion-language pretrian + Stage 3 Motion-language instruction tuning + """ + + def __init__(self, + cfg, + datamodule, + lm, + motion_vae, + codebook_size=512, + stage='vae', + debug=True, + condition='text', + task='t2m', + metrics_dict=['TM2TMetrics'], + **kwargs): + + self.save_hyperparameters(ignore='datamodule', logger=False) + self.datamodule = datamodule + super().__init__() + + # Instantiate motion tokenizer + if motion_vae != None: + self.vae = instantiate_from_config(motion_vae) + + # Instantiate motion-language model + self.lm = instantiate_from_config(lm) + + # Freeze the motion tokenizer for lm training + if 'lm' in self.hparams.stage: + self.vae.training = False + for p in self.vae.parameters(): + p.requires_grad = False + + # Instantiate the losses + self._losses = torch.nn.ModuleDict({ + split: GPTLosses(cfg, self.hparams.stage, self.datamodule.njoints) + for split in ["losses_train", "losses_test", "losses_val"] + }) + + # Data transform + self.feats2joints = datamodule.feats2joints + + # Count codebook frequency + self.codePred = [] + self.codeFrequency = torch.zeros((self.hparams.codebook_size, )) + + def forward(self, batch, task="t2m"): + texts = batch["text"] + lengths_ref = batch["length"] + + # Forward + # texts = ['Generate motion: ' + text for text in texts] + outputs, output_texts = self.lm.generate_direct(texts, do_sample=True) + + # Motion Decode + feats_rst_lst = [] + lengths = [] + max_len = 0 + + for i in range(len(texts)): + if task == "pred": + motion = self.vae.decode( + torch.cat((batch["motion"][i], outputs[i]))) + elif task in ["t2m", "m2t", "inbetween"]: + motion = self.vae.decode(outputs[i]) + # motion = self.datamodule.denormalize(motion) + lengths.append(motion.shape[1]) + else: + raise NotImplementedError + + if motion.shape[1] > max_len: + max_len = motion.shape[1] + + if task in ["t2m", "m2t", "pred"]: + feats_rst_lst.append(motion) + + elif task == "inbetween": + motion = torch.cat( + (batch["motion_heading"][i][None], + motion[:, lengths_ref[i] // 4:lengths_ref[i] // 4 * 3, + ...], batch["motion_tailing"][i][None]), + dim=1) + feats_rst_lst.append(motion) + + feats_rst = torch.zeros( + (len(feats_rst_lst), max_len, motion.shape[-1])).to(self.device) + + # padding and concat + for i in range(len(feats_rst_lst)): + feats_rst[i, :feats_rst_lst[i].shape[1], ...] = feats_rst_lst[i] + + # Recover joints for evaluation + joints_rst = self.feats2joints(feats_rst) + + # return set + outputs = { + "texts": output_texts, + "feats": feats_rst, + "joints": joints_rst, + "length": lengths + } + + return outputs + + def train_lm_forward(self, batch): + tokens_ref = batch["motion"] + texts = batch["text"] + lengths = batch["length"] + tasks = batch["tasks"] + all_captions = batch['all_captions'] + if self.hparams.condition == 'caption': + texts = [random.choice(all_captions[i]) for i in range(len(texts))] + + # LLM Forward + outputs = self.lm(texts, tokens_ref, lengths, tasks) + # outputs = self.t2m_gpt.generate(texts) + return {'outputs': outputs} + + @torch.no_grad() + def val_t2m_forward(self, batch): + feats_ref = batch["motion"] + texts = batch["text"] + lengths = batch["length"] + tasks = None + if self.trainer.datamodule.is_mm: + texts = texts * self.hparams.cfg.METRIC.MM_NUM_REPEATS + feats_ref = feats_ref.repeat_interleave( + self.hparams.cfg.METRIC.MM_NUM_REPEATS, dim=0) + lengths = lengths * self.hparams.cfg.METRIC.MM_NUM_REPEATS + instructions = pjoin(self.datamodule.hparams.data_root, + 'template_instructions.json') + instructions = json.load(open(instructions, 'r')) + tasks = [instructions["Text-to-Motion"]["caption"]] * len(texts) + + if self.hparams.condition == 'caption': + tasks = [{ + 'input': [''], + 'output': [''] + }] * len(texts) + + if self.hparams.cfg.DATASET.TASK_PATH: + instructions = pjoin(self.hparams.cfg.DATASET.TASK_PATH) + instructions = json.load(open(instructions, 'r')) + tasks = [instructions["Text-to-Motion"]["t2m"]] * len(texts) + + min_len = lengths.copy() + # Forward + outputs = self.lm.generate_conditional(texts, + lengths=lengths, + stage='test', + tasks=tasks) + + # Motion Decode + feats_rst = torch.zeros_like(feats_ref) + + for i in range(len(texts)): + outputs[i] = torch.clamp(outputs[i], + 0, + self.hparams.codebook_size - 1, + out=None) + + if len(outputs[i]) > 1: + motion = self.vae.decode(outputs[i]) + else: + motion = torch.zeros_like(feats_ref[i:i + 1, ...]) + + min_len[i] = min(motion.shape[1], lengths[i]) + + # Cut Motion + feats_rst[i:i + 1, :min_len[i], ...] = motion[:, :lengths[i]] + + # Recover joints for evaluation + joints_ref = self.feats2joints(feats_ref) + joints_rst = self.feats2joints(feats_rst) + + # Renorm for evaluation + feats_ref = self.datamodule.renorm4t2m(feats_ref) + feats_rst = self.datamodule.renorm4t2m(feats_rst) + + # return set + rs_set = { + "m_ref": feats_ref, + "m_rst": feats_rst, + "joints_ref": joints_ref, + "joints_rst": joints_rst, + "length": min_len + # "length": lengths + } + + return rs_set + + @torch.no_grad() + def val_m2t_forward(self, batch): + self.hparams.metrics_dict = [] + + feats_ref = batch["motion"] + texts = batch["text"] + lengths = batch["length"] + all_captions = batch['all_captions'] + + # Motion Encode + motion_tokens = [] + lengths_tokens = [] + for i in range(len(feats_ref)): + motion_token, _ = self.vae.encode(feats_ref[i:i + 1]) + motion_tokens.append(motion_token[0]) + lengths_tokens.append(motion_token.shape[1]) + + # Forward + outputs = self.lm.generate_conditional(motion_tokens=motion_tokens, + lengths=lengths_tokens, + task="m2t", + stage='test') + + # return set + rs_set = { + "m_ref": feats_ref, + "t_ref": all_captions, + # "t_ref": texts, + "t_pred": outputs, + "length": lengths + } + + return rs_set + + @torch.no_grad() + def val_m2m_forward(self, batch, task="pred"): + feats_ref = batch["motion"] + lengths = batch["length"] + + # Motion Encode + motion_tokens = [] + lengths_tokens = [] + for i in range(len(feats_ref)): + motion_token, _ = self.vae.encode(feats_ref[i:i + 1]) + motion_tokens.append(motion_token[0]) + + # Forward + outputs = self.lm.generate_conditional(motion_tokens=motion_tokens, + lengths=lengths, + task=task, + stage='test') + + # Motion Decode + feats_rst = torch.zeros_like(feats_ref) + min_len = lengths.copy() + + for i in range(len(lengths)): + outputs[i] = torch.clamp(outputs[i], + 0, + self.hparams.codebook_size - 1, + out=None) + + if len(outputs[i]) > 1: + motion = self.vae.decode(outputs[i]) + else: + motion = torch.zeros_like(feats_ref[i:i + 1, ...]) + + min_len[i] = min(motion.shape[1], lengths[i]) + + # Cut Motion + feats_rst[i:i + 1, :min_len[i], ...] = motion[:, :lengths[i]] + + # Recover joints for evaluation + joints_ref = self.feats2joints(feats_ref) + joints_rst = self.feats2joints(feats_rst) + + # Renorm for evaluation + feats_ref = self.datamodule.renorm4t2m(feats_ref) + feats_rst = self.datamodule.renorm4t2m(feats_rst) + + # return set + rs_set = { + "m_ref": feats_ref, + "m_rst": feats_rst, + "joints_ref": joints_ref, + "joints_rst": joints_rst, + "length": min_len + # "length": lengths + } + + return rs_set + + def train_vae_forward(self, batch): + # batch detach + feats_ref = batch["motion"] + joints_ref = self.feats2joints(feats_ref) + # motion encode & decode + feats_rst, loss_commit, perplexity = self.vae(feats_ref) + joints_rst = self.feats2joints(feats_rst) + # return set + rs_set = { + "m_ref": feats_ref, + "joints_ref": joints_ref, + "m_rst": feats_rst, + "joints_rst": joints_rst, + "loss_commit": loss_commit, + "perplexity": perplexity, + } + return rs_set + + @torch.no_grad() + def val_vae_forward(self, batch, split="train"): + # Detach batch + feats_ref = batch["motion"] + lengths = batch["length"] + + # Repeat for multimodal evaluation + if self.trainer.datamodule.is_mm: + feats_ref = feats_ref.repeat_interleave( + self.hparams.cfg.METRIC.MM_NUM_REPEATS, dim=0) + lengths = lengths * self.hparams.cfg.METRIC.MM_NUM_REPEATS + + # Motion encode & decode + feats_rst = torch.zeros_like(feats_ref) + + for i in range(len(feats_ref)): + if lengths[i] == 0: + continue + feats_pred, _, _ = self.vae(feats_ref[i:i + 1, :lengths[i]]) + feats_rst[i:i + 1, :feats_pred.shape[1], :] = feats_pred + + code_pred, _ = self.vae.encode(feats_ref[i:i + 1, :lengths[i]]) + + # codeFre_pred = torch.bincount(code_pred[0], + # minlength=self.hparams.codebook_size).to( + # self.codeFrequency.device) + # self.codePred.append(code_pred[0]) + # self.codeFrequency += codeFre_pred + + # np.save('../memData/results/codeFrequency.npy', + # self.codeFrequency.cpu().numpy()) + + # Recover joints for evaluation + joints_ref = self.feats2joints(feats_ref) + joints_rst = self.feats2joints(feats_rst) + + # Renorm for evaluation + feats_ref = self.datamodule.renorm4t2m(feats_ref) + feats_rst = self.datamodule.renorm4t2m(feats_rst) + + # Return set + rs_set = { + "m_ref": feats_ref, + "joints_ref": joints_ref, + "m_rst": feats_rst, + "joints_rst": joints_rst, + "length": lengths, + } + + return rs_set + + + def allsplit_step(self, split: str, batch, batch_idx): + # Compute the losses + loss = None + + if self.hparams.stage == "vae" and split in ["train", "val"]: + rs_set = self.train_vae_forward(batch) + loss = self._losses['losses_' + split].update(rs_set) + elif self.hparams.stage in ["lm_instruct", "lm_pretrain" + ] and split in ["train"]: + rs_set = self.train_lm_forward(batch) + loss = self._losses['losses_' + split].update(rs_set) + elif self.hparams.stage == 'lm_rl' and split in ['train']: + rs_set = self.train_rl_forward(batch) + loss = None + + # Compute the metrics + if split in ["val", "test"]: + if self.hparams.stage == "vae": + rs_set = self.val_vae_forward(batch, split) + elif self.hparams.stage in ["lm_instruct", "lm_pretrain", "lm_rl"]: + if self.hparams.task == "t2m": + rs_set = self.val_t2m_forward(batch) + elif self.hparams.task == "m2t": + rs_set = self.val_m2t_forward(batch) + elif self.hparams.task in ["m2m", "pred", "inbetween"]: + rs_set = self.val_m2m_forward(batch, self.hparams.task) + + if self.hparams.task not in ["m2t"]: + # MultiModality evaluation sperately + if self.trainer.datamodule.is_mm: + metrics_dicts = ['MMMetrics'] + else: + metrics_dicts = self.hparams.metrics_dict + + if self.hparams.task not in ['pred', 'inbetween'] and 'PredMetrics' in metrics_dicts: + metrics_dicts.remove('PredMetrics') + + for metric in metrics_dicts: + lengths = batch['length'] + if metric == "TemosMetric": + getattr(self.metrics, + metric).update(rs_set["joints_rst"], + rs_set["joints_ref"], lengths) + elif metric == "TM2TMetrics": + if self.hparams.stage in [ + "lm_instruct", "lm_pretrain", "lm_rl" + ]: + word_embs = batch['word_embs'] + pos_ohot = batch['pos_ohot'] + text_lengths = batch['text_len'] + if self.trainer.datamodule.is_mm: + word_embs = word_embs.repeat_interleave( + self.hparams.cfg.METRIC.MM_NUM_REPEATS, + dim=0) + pos_ohot = pos_ohot.repeat_interleave( + self.hparams.cfg.METRIC.MM_NUM_REPEATS, + dim=0) + text_lengths = text_lengths.repeat_interleave( + self.hparams.cfg.METRIC.MM_NUM_REPEATS, + dim=0) + else: + word_embs = None + pos_ohot = None + text_lengths = None + + getattr(self.metrics, metric).update( + feats_ref=rs_set["m_ref"], + feats_rst=rs_set["m_rst"], + lengths_ref=lengths, + lengths_rst=rs_set['length'], + word_embs=word_embs, + pos_ohot=pos_ohot, + text_lengths=text_lengths, + ) + elif metric == "UncondMetrics": + getattr(self.metrics, metric).update( + recmotion_embeddings=rs_set["lat_rm"], + gtmotion_embeddings=rs_set["lat_m"], + lengths=lengths, + ) + elif metric == "MRMetrics": + getattr(self.metrics, + metric).update(rs_set["joints_rst"], + rs_set["joints_ref"], lengths) + elif metric == "PredMetrics": + getattr(self.metrics, + metric).update(rs_set["joints_rst"], + rs_set["joints_ref"], lengths) + elif metric == "MMMetrics": + # pass + getattr(self.metrics, + metric).update(rs_set["m_rst"], + rs_set['length']) + else: + raise TypeError(f"Not support this metric {metric}") + + elif self.hparams.task == "m2t" and self.hparams.stage in [ + "lm_instruct", "lm_pretrain", "lm_rl" + ]: + self.hparams.metrics_dict = metrics_dicts = ['M2TMetrics'] + for metric in metrics_dicts: + if metric == "M2TMetrics": + getattr(self.metrics, metric).update( + feats_ref=rs_set["m_ref"], + pred_texts=rs_set["t_pred"], + gt_texts=batch["all_captions"], + lengths=rs_set['length'], + word_embs=batch["word_embs"], + pos_ohot=batch["pos_ohot"], + text_lengths=batch["text_len"], + ) + + # return forward output rather than loss during test + if split in ["test"]: + if self.hparams.task == "t2m": + return rs_set["joints_rst"], rs_set["length"], rs_set[ + "joints_ref"] + # pass + elif self.hparams.task == "m2t": + return rs_set["t_pred"], batch["length"] + # return batch["length"] + + return loss diff --git a/mGPT/models/utils/__init__.py b/mGPT/models/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mGPT/models/utils/adain.py b/mGPT/models/utils/adain.py new file mode 100644 index 0000000..3588f33 --- /dev/null +++ b/mGPT/models/utils/adain.py @@ -0,0 +1,66 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +class AdaptiveInstanceNorm1d(nn.Module): + def __init__(self, num_features, eps=1e-5, momentum=0.1): + super(AdaptiveInstanceNorm1d, self).__init__() + self.num_features = num_features + self.eps = eps + self.momentum = momentum + self.weight = None + self.bias = None + self.register_buffer('running_mean', torch.zeros(num_features)) + self.register_buffer('running_var', torch.ones(num_features)) + + def forward(self, x, direct_weighting=False, no_std=False): + assert self.weight is not None and \ + self.bias is not None, "Please assign AdaIN weight first" + # (bs, nfeats, nframe) <= (nframe, bs, nfeats) + x = x.permute(1,2,0) + + b, c = x.size(0), x.size(1) # batch size & channels + running_mean = self.running_mean.repeat(b) + running_var = self.running_var.repeat(b) + # self.weight = torch.ones_like(self.weight) + + if direct_weighting: + x_reshaped = x.contiguous().view(b * c) + if no_std: + out = x_reshaped + self.bias + else: + out = x_reshaped.mul(self.weight) + self.bias + out = out.view(b, c, *x.size()[2:]) + else: + x_reshaped = x.contiguous().view(1, b * c, *x.size()[2:]) + out = F.batch_norm( + x_reshaped, running_mean, running_var, self.weight, self.bias, + True, self.momentum, self.eps) + out = out.view(b, c, *x.size()[2:]) + + # (nframe, bs, nfeats) <= (bs, nfeats, nframe) + out = out.permute(2,0,1) + return out + + def __repr__(self): + return self.__class__.__name__ + '(' + str(self.num_features) + ')' + +def assign_adain_params(adain_params, model): + # assign the adain_params to the AdaIN layers in model + for m in model.modules(): + if m.__class__.__name__ == "AdaptiveInstanceNorm1d": + mean = adain_params[: , : m.num_features] + std = adain_params[: , m.num_features: 2 * m.num_features] + m.bias = mean.contiguous().view(-1) + m.weight = std.contiguous().view(-1) + if adain_params.size(1) > 2 * m.num_features: + adain_params = adain_params[: , 2 * m.num_features:] + + +def get_num_adain_params(model): + # return the number of AdaIN parameters needed by the model + num_adain_params = 0 + for m in model.modules(): + if m.__class__.__name__ == "AdaptiveInstanceNorm1d": + num_adain_params += 2 * m.num_features + return num_adain_params diff --git a/mGPT/models/utils/blocks.py b/mGPT/models/utils/blocks.py new file mode 100644 index 0000000..e657b38 --- /dev/null +++ b/mGPT/models/utils/blocks.py @@ -0,0 +1,146 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mGPT.models.notused import AdaptiveInstanceNorm1d + + +class MLP(nn.Module): + + def __init__(self, cfg, out_dim, is_init): + super(MLP, self).__init__() + dims = cfg.MODEL.MOTION_DECODER.MLP_DIM + n_blk = len(dims) + norm = 'none' + acti = 'lrelu' + + layers = [] + for i in range(n_blk - 1): + layers += LinearBlock(dims[i], dims[i + 1], norm=norm, acti=acti) + layers += LinearBlock(dims[-1], out_dim, norm='none', acti='none') + self.model = nn.Sequential(*layers) + + if is_init: + for m in self.modules(): + if isinstance(m, nn.Linear): + #nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + nn.init.constant_(m.weight, 1) + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + return self.model(x.view(x.size(0), -1)) + + +def ZeroPad1d(sizes): + return nn.ConstantPad1d(sizes, 0) + + +def get_acti_layer(acti='relu', inplace=True): + + if acti == 'relu': + return [nn.ReLU(inplace=inplace)] + elif acti == 'lrelu': + return [nn.LeakyReLU(0.2, inplace=inplace)] + elif acti == 'tanh': + return [nn.Tanh()] + elif acti == 'none': + return [] + else: + assert 0, "Unsupported activation: {}".format(acti) + + +def get_norm_layer(norm='none', norm_dim=None): + + if norm == 'bn': + return [nn.BatchNorm1d(norm_dim)] + elif norm == 'in': + # return [nn.InstanceNorm1d(norm_dim, affine=False)] # for rt42! + return [nn.InstanceNorm1d(norm_dim, affine=True)] + elif norm == 'adain': + return [AdaptiveInstanceNorm1d(norm_dim)] + elif norm == 'none': + return [] + else: + assert 0, "Unsupported normalization: {}".format(norm) + + +def get_dropout_layer(dropout=None): + if dropout is not None: + return [nn.Dropout(p=dropout)] + else: + return [] + + +def ConvLayers(kernel_size, + in_channels, + out_channels, + stride=1, + pad_type='reflect', + use_bias=True): + """ + returns a list of [pad, conv] => should be += to some list, then apply sequential + """ + + if pad_type == 'reflect': + pad = nn.ReflectionPad1d + elif pad_type == 'replicate': + pad = nn.ReplicationPad1d + elif pad_type == 'zero': + pad = ZeroPad1d + else: + assert 0, "Unsupported padding type: {}".format(pad_type) + + pad_l = (kernel_size - 1) // 2 + pad_r = kernel_size - 1 - pad_l + return [ + pad((pad_l, pad_r)), + nn.Conv1d(in_channels, + out_channels, + kernel_size=kernel_size, + stride=stride, + bias=use_bias) + ] + + +def ConvBlock(kernel_size, + in_channels, + out_channels, + stride=1, + pad_type='reflect', + dropout=None, + norm='none', + acti='lrelu', + acti_first=False, + use_bias=True, + inplace=True): + """ + returns a list of [pad, conv, norm, acti] or [acti, pad, conv, norm] + """ + + layers = ConvLayers(kernel_size, + in_channels, + out_channels, + stride=stride, + pad_type=pad_type, + use_bias=use_bias) + layers += get_dropout_layer(dropout) + layers += get_norm_layer(norm, norm_dim=out_channels) + acti_layers = get_acti_layer(acti, inplace=inplace) + + if acti_first: + return acti_layers + layers + else: + return layers + acti_layers + + +def LinearBlock(in_dim, out_dim, dropout=None, norm='none', acti='relu'): + + use_bias = True + layers = [] + layers.append(nn.Linear(in_dim, out_dim, bias=use_bias)) + layers += get_dropout_layer(dropout) + layers += get_norm_layer(norm, norm_dim=out_dim) + layers += get_acti_layer(acti) + + return layers diff --git a/mGPT/models/utils/cross_attention.py b/mGPT/models/utils/cross_attention.py new file mode 100644 index 0000000..deb1f05 --- /dev/null +++ b/mGPT/models/utils/cross_attention.py @@ -0,0 +1,412 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +DETR Transformer class. +Copy-paste from torch.nn.Transformer with modifications: + * positional encodings are passed in MHattention + * extra LN at the end of encoder is removed + * decoder returns a stack of activations from all decoding layers +""" +import copy +from typing import List, Optional +from numpy import block + +import torch +import torch.nn.functional as F +from torch import Tensor, nn + + +class SkipTransformerEncoder(nn.Module): + def __init__(self, encoder_layer, num_layers, norm=None): + super().__init__() + self.d_model = encoder_layer.d_model + + self.num_layers = num_layers + self.norm = norm + + assert num_layers % 2 == 1 + + num_block = (num_layers-1)//2 + self.input_blocks = _get_clones(encoder_layer, num_block) + self.middle_block = _get_clone(encoder_layer) + self.output_blocks = _get_clones(encoder_layer, num_block) + self.linear_blocks = _get_clones(nn.Linear(2*self.d_model, self.d_model), num_block) + + self._reset_parameters() + + def _reset_parameters(self): + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward(self, src, + mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + x = src + + xs = [] + for module in self.input_blocks: + x = module(x, src_mask=mask, + src_key_padding_mask=src_key_padding_mask, pos=pos) + xs.append(x) + + x = self.middle_block(x, src_mask=mask, + src_key_padding_mask=src_key_padding_mask, pos=pos) + + for (module, linear) in zip(self.output_blocks, self.linear_blocks): + x = torch.cat([x, xs.pop()], dim=-1) + x = linear(x) + x = module(x, src_mask=mask, + src_key_padding_mask=src_key_padding_mask, pos=pos) + + if self.norm is not None: + x = self.norm(x) + return x + +class SkipTransformerDecoder(nn.Module): + def __init__(self, decoder_layer, num_layers, norm=None): + super().__init__() + self.d_model = decoder_layer.d_model + + self.num_layers = num_layers + self.norm = norm + + assert num_layers % 2 == 1 + + num_block = (num_layers-1)//2 + self.input_blocks = _get_clones(decoder_layer, num_block) + self.middle_block = _get_clone(decoder_layer) + self.output_blocks = _get_clones(decoder_layer, num_block) + self.linear_blocks = _get_clones(nn.Linear(2*self.d_model, self.d_model), num_block) + + self._reset_parameters() + + def _reset_parameters(self): + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + x = tgt + + xs = [] + for module in self.input_blocks: + x = module(x, memory, tgt_mask=tgt_mask, + memory_mask=memory_mask, + tgt_key_padding_mask=tgt_key_padding_mask, + memory_key_padding_mask=memory_key_padding_mask, + pos=pos, query_pos=query_pos) + xs.append(x) + + x = self.middle_block(x, memory, tgt_mask=tgt_mask, + memory_mask=memory_mask, + tgt_key_padding_mask=tgt_key_padding_mask, + memory_key_padding_mask=memory_key_padding_mask, + pos=pos, query_pos=query_pos) + + for (module, linear) in zip(self.output_blocks, self.linear_blocks): + x = torch.cat([x, xs.pop()], dim=-1) + x = linear(x) + x = module(x, memory, tgt_mask=tgt_mask, + memory_mask=memory_mask, + tgt_key_padding_mask=tgt_key_padding_mask, + memory_key_padding_mask=memory_key_padding_mask, + pos=pos, query_pos=query_pos) + + if self.norm is not None: + x = self.norm(x) + + return x + +class Transformer(nn.Module): + + def __init__(self, d_model=512, nhead=8, num_encoder_layers=6, + num_decoder_layers=6, dim_feedforward=2048, dropout=0.1, + activation="relu", normalize_before=False, + return_intermediate_dec=False): + super().__init__() + + encoder_layer = TransformerEncoderLayer(d_model, nhead, dim_feedforward, + dropout, activation, normalize_before) + encoder_norm = nn.LayerNorm(d_model) if normalize_before else None + self.encoder = TransformerEncoder(encoder_layer, num_encoder_layers, encoder_norm) + + decoder_layer = TransformerDecoderLayer(d_model, nhead, dim_feedforward, + dropout, activation, normalize_before) + decoder_norm = nn.LayerNorm(d_model) + self.decoder = TransformerDecoder(decoder_layer, num_decoder_layers, decoder_norm, + return_intermediate=return_intermediate_dec) + + self._reset_parameters() + + self.d_model = d_model + self.nhead = nhead + + def _reset_parameters(self): + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward(self, src, mask, query_embed, pos_embed): + # flatten NxCxHxW to HWxNxC + bs, c, h, w = src.shape + src = src.flatten(2).permute(2, 0, 1) + pos_embed = pos_embed.flatten(2).permute(2, 0, 1) + query_embed = query_embed.unsqueeze(1).repeat(1, bs, 1) + mask = mask.flatten(1) + + tgt = torch.zeros_like(query_embed) + memory = self.encoder(src, src_key_padding_mask=mask, pos=pos_embed) + hs = self.decoder(tgt, memory, memory_key_padding_mask=mask, + pos=pos_embed, query_pos=query_embed) + return hs.transpose(1, 2), memory.permute(1, 2, 0).view(bs, c, h, w) + + +class TransformerEncoder(nn.Module): + + def __init__(self, encoder_layer, num_layers, norm=None): + super().__init__() + self.layers = _get_clones(encoder_layer, num_layers) + self.num_layers = num_layers + self.norm = norm + + def forward(self, src, + mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + output = src + + for layer in self.layers: + output = layer(output, src_mask=mask, + src_key_padding_mask=src_key_padding_mask, pos=pos) + + if self.norm is not None: + output = self.norm(output) + + return output + + +class TransformerDecoder(nn.Module): + + def __init__(self, decoder_layer, num_layers, norm=None, return_intermediate=False): + super().__init__() + self.layers = _get_clones(decoder_layer, num_layers) + self.num_layers = num_layers + self.norm = norm + self.return_intermediate = return_intermediate + + def forward(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + output = tgt + + intermediate = [] + + for layer in self.layers: + output = layer(output, memory, tgt_mask=tgt_mask, + memory_mask=memory_mask, + tgt_key_padding_mask=tgt_key_padding_mask, + memory_key_padding_mask=memory_key_padding_mask, + pos=pos, query_pos=query_pos) + if self.return_intermediate: + intermediate.append(self.norm(output)) + + if self.norm is not None: + output = self.norm(output) + if self.return_intermediate: + intermediate.pop() + intermediate.append(output) + + if self.return_intermediate: + return torch.stack(intermediate) + + return output.unsqueeze(0) + + +class TransformerEncoderLayer(nn.Module): + + def __init__(self, d_model, nhead, dim_feedforward=2048, dropout=0.1, + activation="relu", normalize_before=False): + super().__init__() + self.d_model = d_model + self.self_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout) + # Implementation of Feedforward model + self.linear1 = nn.Linear(d_model, dim_feedforward) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_feedforward, d_model) + + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + + self.activation = _get_activation_fn(activation) + self.normalize_before = normalize_before + + def with_pos_embed(self, tensor, pos: Optional[Tensor]): + return tensor if pos is None else tensor + pos + + def forward_post(self, + src, + src_mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + q = k = self.with_pos_embed(src, pos) + src2 = self.self_attn(q, k, value=src, attn_mask=src_mask, + key_padding_mask=src_key_padding_mask)[0] + src = src + self.dropout1(src2) + src = self.norm1(src) + src2 = self.linear2(self.dropout(self.activation(self.linear1(src)))) + src = src + self.dropout2(src2) + src = self.norm2(src) + return src + + def forward_pre(self, src, + src_mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + src2 = self.norm1(src) + q = k = self.with_pos_embed(src2, pos) + src2 = self.self_attn(q, k, value=src2, attn_mask=src_mask, + key_padding_mask=src_key_padding_mask)[0] + src = src + self.dropout1(src2) + src2 = self.norm2(src) + src2 = self.linear2(self.dropout(self.activation(self.linear1(src2)))) + src = src + self.dropout2(src2) + return src + + def forward(self, src, + src_mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None): + if self.normalize_before: + return self.forward_pre(src, src_mask, src_key_padding_mask, pos) + return self.forward_post(src, src_mask, src_key_padding_mask, pos) + + +class TransformerDecoderLayer(nn.Module): + + def __init__(self, d_model, nhead, dim_feedforward=2048, dropout=0.1, + activation="relu", normalize_before=False): + super().__init__() + self.self_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout) + self.multihead_attn = nn.MultiheadAttention(d_model, nhead, dropout=dropout) + # Implementation of Feedforward model + self.d_model = d_model + self.linear1 = nn.Linear(d_model, dim_feedforward) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_feedforward, d_model) + + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + self.norm3 = nn.LayerNorm(d_model) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + self.dropout3 = nn.Dropout(dropout) + + self.activation = _get_activation_fn(activation) + self.normalize_before = normalize_before + + def with_pos_embed(self, tensor, pos: Optional[Tensor]): + return tensor if pos is None else tensor + pos + + def forward_post(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + + q = k = self.with_pos_embed(tgt, query_pos) + tgt2 = self.self_attn(q, k, value=tgt, attn_mask=tgt_mask, + key_padding_mask=tgt_key_padding_mask)[0] + tgt = tgt + self.dropout1(tgt2) + tgt = self.norm1(tgt) + tgt2 = self.multihead_attn(query=self.with_pos_embed(tgt, query_pos), + key=self.with_pos_embed(memory, pos), + value=memory, attn_mask=memory_mask, + key_padding_mask=memory_key_padding_mask)[0] + tgt = tgt + self.dropout2(tgt2) + tgt = self.norm2(tgt) + tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt)))) + tgt = tgt + self.dropout3(tgt2) + tgt = self.norm3(tgt) + return tgt + + def forward_pre(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + tgt2 = self.norm1(tgt) + q = k = self.with_pos_embed(tgt2, query_pos) + tgt2 = self.self_attn(q, k, value=tgt2, attn_mask=tgt_mask, + key_padding_mask=tgt_key_padding_mask)[0] + tgt = tgt + self.dropout1(tgt2) + tgt2 = self.norm2(tgt) + tgt2 = self.multihead_attn(query=self.with_pos_embed(tgt2, query_pos), + key=self.with_pos_embed(memory, pos), + value=memory, attn_mask=memory_mask, + key_padding_mask=memory_key_padding_mask)[0] + tgt = tgt + self.dropout2(tgt2) + tgt2 = self.norm3(tgt) + tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt2)))) + tgt = tgt + self.dropout3(tgt2) + return tgt + + def forward(self, tgt, memory, + tgt_mask: Optional[Tensor] = None, + memory_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + memory_key_padding_mask: Optional[Tensor] = None, + pos: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + if self.normalize_before: + return self.forward_pre(tgt, memory, tgt_mask, memory_mask, + tgt_key_padding_mask, memory_key_padding_mask, pos, query_pos) + return self.forward_post(tgt, memory, tgt_mask, memory_mask, + tgt_key_padding_mask, memory_key_padding_mask, pos, query_pos) + + +def _get_clone(module): + return copy.deepcopy(module) + +def _get_clones(module, N): + return nn.ModuleList([copy.deepcopy(module) for i in range(N)]) + + +def build_transformer(args): + return Transformer( + d_model=args.hidden_dim, + dropout=args.dropout, + nhead=args.nheads, + dim_feedforward=args.dim_feedforward, + num_encoder_layers=args.enc_layers, + num_decoder_layers=args.dec_layers, + normalize_before=args.pre_norm, + return_intermediate_dec=True, + ) + + +def _get_activation_fn(activation): + """Return an activation function given a string""" + if activation == "relu": + return F.relu + if activation == "gelu": + return F.gelu + if activation == "glu": + return F.glu + raise RuntimeError(F"activation should be relu/gelu, not {activation}.") \ No newline at end of file diff --git a/mGPT/models/utils/position_encoding.py b/mGPT/models/utils/position_encoding.py new file mode 100644 index 0000000..669c677 --- /dev/null +++ b/mGPT/models/utils/position_encoding.py @@ -0,0 +1,192 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Various positional encodings for the transformer. +""" +import math +from typing import List, Optional + +import numpy as np +import torch +from torch import Tensor, nn + +# from util.misc import NestedTensor + + +class NestedTensor(object): + + def __init__(self, tensors, mask: Optional[Tensor]): + self.tensors = tensors + self.mask = mask + + def to(self, device): + # type: (Device) -> NestedTensor # noqa + cast_tensor = self.tensors.to(device) + mask = self.mask + if mask is not None: + assert mask is not None + cast_mask = mask.to(device) + else: + cast_mask = None + return NestedTensor(cast_tensor, cast_mask) + + def decompose(self): + return self.tensors, self.mask + + def __repr__(self): + return str(self.tensors) + + +class PositionEmbeddingSine(nn.Module): + """ + This is a more standard version of the position embedding, very similar to the one + used by the Attention is all you need paper, generalized to work on images. + """ + + def __init__(self, + num_pos_feats=64, + temperature=10000, + normalize=False, + scale=None): + super().__init__() + self.num_pos_feats = num_pos_feats + self.temperature = temperature + self.normalize = normalize + if scale is not None and normalize is False: + raise ValueError("normalize should be True if scale is passed") + if scale is None: + scale = 2 * math.pi + self.scale = scale + + def forward(self, tensor_list: NestedTensor): + x = tensor_list.tensors + mask = tensor_list.mask + assert mask is not None + not_mask = ~mask + y_embed = not_mask.cumsum(1, dtype=torch.float32) + x_embed = not_mask.cumsum(2, dtype=torch.float32) + if self.normalize: + eps = 1e-6 + y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale + + dim_t = torch.arange(self.num_pos_feats, + dtype=torch.float32, + device=x.device) + dim_t = self.temperature**(2 * (dim_t // 2) / self.num_pos_feats) + + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack( + (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), + dim=4).flatten(3) + pos_y = torch.stack( + (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), + dim=4).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + +class PositionEmbeddingLearned(nn.Module): + """ + Absolute pos embedding, learned. + """ + + def __init__(self, num_pos_feats=256): + super().__init__() + self.row_embed = nn.Embedding(50, num_pos_feats) + self.col_embed = nn.Embedding(50, num_pos_feats) + self.reset_parameters() + + def reset_parameters(self): + nn.init.uniform_(self.row_embed.weight) + nn.init.uniform_(self.col_embed.weight) + + def forward(self, tensor_list: NestedTensor): + x = tensor_list.tensors + h, w = x.shape[-2:] + i = torch.arange(w, device=x.device) + j = torch.arange(h, device=x.device) + x_emb = self.col_embed(i) + y_emb = self.row_embed(j) + pos = torch.cat([ + x_emb.unsqueeze(0).repeat(h, 1, 1), + y_emb.unsqueeze(1).repeat(1, w, 1), + ], + dim=-1).permute(2, 0, 1).unsqueeze(0).repeat( + x.shape[0], 1, 1, 1) + return pos + + +class PositionEmbeddingSine1D(nn.Module): + + def __init__(self, d_model, max_len=500, batch_first=False): + super().__init__() + self.batch_first = batch_first + + pe = torch.zeros(max_len, d_model) + position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) + div_term = torch.exp( + torch.arange(0, d_model, 2).float() * (-np.log(10000.0) / d_model)) + pe[:, 0::2] = torch.sin(position * div_term) + pe[:, 1::2] = torch.cos(position * div_term) + pe = pe.unsqueeze(0).transpose(0, 1) + + self.register_buffer('pe', pe) + + def forward(self, x): + # not used in the final model + if self.batch_first: + pos = self.pe.permute(1, 0, 2)[:, :x.shape[1], :] + else: + pos = self.pe[:x.shape[0], :] + return pos + + +class PositionEmbeddingLearned1D(nn.Module): + + def __init__(self, d_model, max_len=500, batch_first=False): + super().__init__() + self.batch_first = batch_first + # self.dropout = nn.Dropout(p=dropout) + + self.pe = nn.Parameter(torch.zeros(max_len, 1, d_model)) + # self.pe = pe.unsqueeze(0).transpose(0, 1) + + self.reset_parameters() + + def reset_parameters(self): + nn.init.uniform_(self.pe) + + def forward(self, x): + # not used in the final model + if self.batch_first: + pos = self.pe.permute(1, 0, 2)[:, :x.shape[1], :] + else: + x = x + self.pe[:x.shape[0], :] + return x + # return self.dropout(x) + + +def build_position_encoding(N_steps, + position_embedding="sine", + embedding_dim="1D"): + # N_steps = hidden_dim // 2 + if embedding_dim == "1D": + if position_embedding in ('v2', 'sine'): + position_embedding = PositionEmbeddingSine1D(N_steps) + elif position_embedding in ('v3', 'learned'): + position_embedding = PositionEmbeddingLearned1D(N_steps) + else: + raise ValueError(f"not supported {position_embedding}") + elif embedding_dim == "2D": + if position_embedding in ('v2', 'sine'): + # TODO find a better way of exposing other arguments + position_embedding = PositionEmbeddingSine(N_steps, normalize=True) + elif position_embedding in ('v3', 'learned'): + position_embedding = PositionEmbeddingLearned(N_steps) + else: + raise ValueError(f"not supported {position_embedding}") + else: + raise ValueError(f"not supported {embedding_dim}") + + return position_embedding diff --git a/mGPT/models/utils/position_encoding_layer.py b/mGPT/models/utils/position_encoding_layer.py new file mode 100644 index 0000000..699c860 --- /dev/null +++ b/mGPT/models/utils/position_encoding_layer.py @@ -0,0 +1,30 @@ +import numpy as np +import torch +from torch import nn + + +class PositionalEncoding(nn.Module): + + def __init__(self, d_model, dropout=0.1, max_len=5000, batch_first=False): + super().__init__() + self.batch_first = batch_first + + self.dropout = nn.Dropout(p=dropout) + + pe = torch.zeros(max_len, d_model) + position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) + div_term = torch.exp(torch.arange( + 0, d_model, 2).float() * (-np.log(10000.0) / d_model)) + pe[:, 0::2] = torch.sin(position * div_term) + pe[:, 1::2] = torch.cos(position * div_term) + pe = pe.unsqueeze(0).transpose(0, 1) + + self.register_buffer("pe", pe) + + def forward(self, x): + # not used in the final model + if self.batch_first: + x = x + self.pe.permute(1, 0, 2)[:, : x.shape[1], :] + else: + x = x + self.pe[: x.shape[0], :] + return self.dropout(x) diff --git a/mGPT/models/utils/tools.py b/mGPT/models/utils/tools.py new file mode 100644 index 0000000..89ecab5 --- /dev/null +++ b/mGPT/models/utils/tools.py @@ -0,0 +1,37 @@ +import torch.nn as nn + +def remove_padding(tensors, lengths): + return [tensor[:tensor_length] for tensor, tensor_length in zip(tensors, lengths)] + +class AutoParams(nn.Module): + def __init__(self, **kargs): + try: + for param in self.needed_params: + if param in kargs: + setattr(self, param, kargs[param]) + else: + raise ValueError(f"{param} is needed.") + except : + pass + + try: + for param, default in self.optional_params.items(): + if param in kargs and kargs[param] is not None: + setattr(self, param, kargs[param]) + else: + setattr(self, param, default) + except : + pass + super().__init__() + + +# taken from joeynmt repo +def freeze_params(module: nn.Module) -> None: + """ + Freeze the parameters of this module, + i.e. do not update them during training + + :param module: freeze parameters of this module + """ + for _, p in module.named_parameters(): + p.requires_grad = False diff --git a/mGPT/render/__init__.py b/mGPT/render/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mGPT/render/anim.py b/mGPT/render/anim.py new file mode 100644 index 0000000..4a78fec --- /dev/null +++ b/mGPT/render/anim.py @@ -0,0 +1,155 @@ +# Inspired by +# - https://github.com/anindita127/Complextext2animation/blob/main/src/utils/visualization.py +# - https://github.com/facebookresearch/QuaterNet/blob/main/common/visualization.py + +from typing import List, Tuple +import numpy as np +from mGPT.utils.joints import mmm_kinematic_tree, mmm_to_smplh_scaling_factor + +mmm_colors = ['black', 'magenta', 'red', 'green', 'blue'] + + +def init_axis(fig, title, radius=1.5, dist=10): + ax = fig.add_subplot(1, 1, 1, projection='3d') + ax.view_init(elev=20., azim=-60) + + fact = 2 + ax.set_xlim3d([-radius / fact, radius / fact]) + ax.set_ylim3d([-radius / fact, radius / fact]) + ax.set_zlim3d([0, radius]) + + ax.set_aspect('auto') + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_zticklabels([]) + + ax.set_axis_off() + + ax.dist = dist + ax.grid(b=False) + + ax.set_title(title, loc='center', wrap=True) + return ax + + +def plot_floor(ax, minx, maxx, miny, maxy, minz): + from mpl_toolkits.mplot3d.art3d import Poly3DCollection + # Plot a plane XZ + verts = [ + [minx, miny, minz], + [minx, maxy, minz], + [maxx, maxy, minz], + [maxx, miny, minz] + ] + xz_plane = Poly3DCollection([verts], zorder=1) + xz_plane.set_facecolor((0.5, 0.5, 0.5, 1)) + ax.add_collection3d(xz_plane) + + # Plot a bigger square plane XZ + radius = max((maxx - minx), (maxy - miny)) + + # center +- radius + minx_all = (maxx + minx) / 2 - radius + maxx_all = (maxx + minx) / 2 + radius + + miny_all = (maxy + miny) / 2 - radius + maxy_all = (maxy + miny) / 2 + radius + + verts = [ + [minx_all, miny_all, minz], + [minx_all, maxy_all, minz], + [maxx_all, maxy_all, minz], + [maxx_all, miny_all, minz] + ] + xz_plane = Poly3DCollection([verts], zorder=1) + xz_plane.set_facecolor((0.5, 0.5, 0.5, 0.5)) + ax.add_collection3d(xz_plane) + return ax + + +def update_camera(ax, root, radius=1.5): + fact = 2 + ax.set_xlim3d([-radius / fact + root[0], radius / fact + root[0]]) + ax.set_ylim3d([-radius / fact + root[1], radius / fact + root[1]]) + + +def render_animation(joints: np.ndarray, output: str = "notebook", title: str = "", + fps: float = 12.5, + kinematic_tree: List[List[int]] = mmm_kinematic_tree, + colors: List[str] = mmm_colors, + figsize: Tuple[int] = (4, 4), + fontsize: int = 15): + import matplotlib.pyplot as plt + from matplotlib.animation import FuncAnimation + import matplotlib.patheffects as pe + plt.rcParams.update({'font.size': fontsize}) + + # Z is gravity here + x, y, z = 0, 1, 2 + + # Convert mmm joints for visualization + # into smpl-h "scale" and axis + joints = joints.copy()[..., [2, 0, 1]] * mmm_to_smplh_scaling_factor + + # Create a figure and initialize 3d plot + fig = plt.figure(figsize=figsize) + ax = init_axis(fig, title) + + # Create spline line + trajectory = joints[:, 0, [x, y]] + avg_segment_length = np.mean(np.linalg.norm(np.diff(trajectory, axis=0), axis=1)) + 1e-3 + draw_offset = int(25 / avg_segment_length) + spline_line, = ax.plot(*trajectory.T, zorder=10, color="white") + + # Create a floor + minx, miny, _ = joints.min(axis=(0, 1)) + maxx, maxy, _ = joints.max(axis=(0, 1)) + plot_floor(ax, minx, maxx, miny, maxy, 0) + + # Put the character on the floor + height_offset = np.min(joints[:, :, z]) # Min height + joints = joints.copy() + joints[:, :, z] -= height_offset + + # Initialization for redrawing + lines = [] + initialized = False + + def update(frame): + nonlocal initialized + skeleton = joints[frame] + + root = skeleton[0] + update_camera(ax, root) + + for index, (chain, color) in enumerate(zip(reversed(kinematic_tree), reversed(colors))): + if not initialized: + lines.append(ax.plot(skeleton[chain, x], + skeleton[chain, y], + skeleton[chain, z], linewidth=8.0, color=color, zorder=20, + path_effects=[pe.SimpleLineShadow(), pe.Normal()])) + + else: + lines[index][0].set_xdata(skeleton[chain, x]) + lines[index][0].set_ydata(skeleton[chain, y]) + lines[index][0].set_3d_properties(skeleton[chain, z]) + + left = max(frame - draw_offset, 0) + right = min(frame + draw_offset, trajectory.shape[0]) + + spline_line.set_xdata(trajectory[left:right, 0]) + spline_line.set_ydata(trajectory[left:right, 1]) + spline_line.set_3d_properties(np.zeros_like(trajectory[left:right, 0])) + initialized = True + + fig.tight_layout() + frames = joints.shape[0] + anim = FuncAnimation(fig, update, frames=frames, interval=1000 / fps, repeat=False) + + if output == "notebook": + from IPython.display import HTML + HTML(anim.to_jshtml()) + else: + anim.save(output, writer='ffmpeg', fps=fps) + + plt.close() diff --git a/mGPT/render/blender/__init__.py b/mGPT/render/blender/__init__.py new file mode 100644 index 0000000..a82255d --- /dev/null +++ b/mGPT/render/blender/__init__.py @@ -0,0 +1 @@ +from .render import render diff --git a/mGPT/render/blender/camera.py b/mGPT/render/blender/camera.py new file mode 100644 index 0000000..ee037c2 --- /dev/null +++ b/mGPT/render/blender/camera.py @@ -0,0 +1,52 @@ +import bpy + + +class Camera: + def __init__(self, *, first_root, mode, is_mesh): + camera = bpy.data.objects['Camera'] + + ## initial position + camera.location.x = 7.36 + camera.location.y = -6.93 + if is_mesh: + # camera.location.z = 5.45 + camera.location.z = 5.6 + else: + camera.location.z = 5.2 + + # wider point of view + if mode == "sequence": + if is_mesh: + camera.data.lens = 65 + else: + camera.data.lens = 85 + elif mode == "frame": + if is_mesh: + camera.data.lens = 130 + else: + camera.data.lens = 85 + elif mode == "video": + if is_mesh: + camera.data.lens = 110 + else: + # avoid cutting person + camera.data.lens = 85 + # camera.data.lens = 140 + + # camera.location.x += 0.75 + + self.mode = mode + self.camera = camera + + self.camera.location.x += first_root[0] + self.camera.location.y += first_root[1] + + self._root = first_root + + def update(self, newroot): + delta_root = newroot - self._root + + self.camera.location.x += delta_root[0] + self.camera.location.y += delta_root[1] + + self._root = newroot diff --git a/mGPT/render/blender/data.py b/mGPT/render/blender/data.py new file mode 100644 index 0000000..17c6a40 --- /dev/null +++ b/mGPT/render/blender/data.py @@ -0,0 +1,3 @@ +class Data: + def __len__(self): + return self.N diff --git a/mGPT/render/blender/floor.py b/mGPT/render/blender/floor.py new file mode 100644 index 0000000..3be1e59 --- /dev/null +++ b/mGPT/render/blender/floor.py @@ -0,0 +1,73 @@ +import bpy +from .materials import floor_mat + + +def get_trajectory(data, is_mesh): + if is_mesh: + # mean of the vertices + trajectory = data[:, :, [0, 1]].mean(1) + else: + # get the root joint + trajectory = data[:, 0, [0, 1]] + return trajectory + + +def plot_floor(data, big_plane=True): + # Create a floor + minx, miny, _ = data.min(axis=(0, 1)) + maxx, maxy, _ = data.max(axis=(0, 1)) + minz = 0 + + location = ((maxx + minx)/2, (maxy + miny)/2, 0) + # a little bit bigger + scale = (1.08*(maxx - minx)/2, 1.08*(maxy - miny)/2, 1) + + bpy.ops.mesh.primitive_plane_add(size=2, enter_editmode=False, align='WORLD', location=location, scale=(1, 1, 1)) + + bpy.ops.transform.resize(value=scale, orient_type='GLOBAL', orient_matrix=((1, 0, 0), (0, 1, 0), (0, 0, 1)), orient_matrix_type='GLOBAL', + constraint_axis=(False, True, False), mirror=True, use_proportional_edit=False, + proportional_edit_falloff='SMOOTH', proportional_size=1, use_proportional_connected=False, + use_proportional_projected=False, release_confirm=True) + obj = bpy.data.objects["Plane"] + obj.name = "SmallPlane" + obj.data.name = "SmallPlane" + + if not big_plane: + obj.active_material = floor_mat(color=(0.2, 0.2, 0.2, 1)) + else: + obj.active_material = floor_mat(color=(0.1, 0.1, 0.1, 1)) + + if big_plane: + location = ((maxx + minx)/2, (maxy + miny)/2, -0.01) + bpy.ops.mesh.primitive_plane_add(size=2, enter_editmode=False, align='WORLD', location=location, scale=(1, 1, 1)) + + bpy.ops.transform.resize(value=[2*x for x in scale], orient_type='GLOBAL', orient_matrix=((1, 0, 0), (0, 1, 0), (0, 0, 1)), orient_matrix_type='GLOBAL', + constraint_axis=(False, True, False), mirror=True, use_proportional_edit=False, + proportional_edit_falloff='SMOOTH', proportional_size=1, use_proportional_connected=False, + use_proportional_projected=False, release_confirm=True) + + obj = bpy.data.objects["Plane"] + obj.name = "BigPlane" + obj.data.name = "BigPlane" + obj.active_material = floor_mat(color=(0.2, 0.2, 0.2, 1)) + + +def show_traj(coords): + pass + # create the Curve Datablock + # curveData = bpy.data.curves.new('myCurve', type='CURVE') + # curveData.dimensions = '3D' + # curveData.resolution_u = 2 + + # # map coords to spline + # polyline = curveData.splines.new('POLY') + # polyline.points.add(len(coords)-1) + # for i, coord in enumerate(coords): + # x, y = coord + # polyline.points[i].co = (x, y, 0.001, 1) + + # # create Object + # curveOB = bpy.data.objects.new('myCurve', curveData) + # curveData.bevel_depth = 0.01 + + # bpy.context.collection.objects.link(curveOB) diff --git a/mGPT/render/blender/joints.py b/mGPT/render/blender/joints.py new file mode 100644 index 0000000..9d836a6 --- /dev/null +++ b/mGPT/render/blender/joints.py @@ -0,0 +1,378 @@ +import math + +import bpy +import numpy as np + +from mGPT.utils.joints import (humanml3d_joints, humanml3d_kinematic_tree, + mmm_joints, mmm_kinematic_tree, + mmm_to_smplh_scaling_factor) + +# from .materials import colored_material_diffuse_BSDF as colored_material +from .materials import colored_material_relection_BSDF as colored_material + +sat_factor = 1.1 + +JOINTS_MATS = [ + # colored_material(0.2500, 0.0357, 0.0349, saturation_factor = sat_factor), + # # colored_material(0.4500, 0.0357, 0.0349), + # colored_material(0.6500, 0.175, 0.0043, saturation_factor = sat_factor), + # colored_material(0.0349, 0.3500, 0.0349, saturation_factor = sat_factor), + # colored_material(0.018, 0.059, 0.600, saturation_factor = sat_factor), + # colored_material(0.032, 0.325, 0.421, saturation_factor = sat_factor), + # colored_material(0.3, 0.3, 0.3, saturation_factor = sat_factor), + colored_material(0.3500, 0.0357, 0.0349, saturation_factor=sat_factor), + # colored_material(0.4500, 0.0357, 0.0349), + colored_material(0.6500, 0.175, 0.0043, saturation_factor=sat_factor), + colored_material(0.0349, 0.3500, 0.0349, saturation_factor=sat_factor), + colored_material(0.018, 0.059, 0.600, saturation_factor=sat_factor), + colored_material(0.032, 0.325, 0.421, saturation_factor=sat_factor), + colored_material(0.3, 0.3, 0.3, saturation_factor=sat_factor), +] + + +class Joints: + + def __init__(self, + data, + *, + mode, + canonicalize, + always_on_floor, + jointstype="mmm", + **kwargs): + data = prepare_joints( + data, + canonicalize=canonicalize, + always_on_floor=always_on_floor, + jointstype=jointstype, + ) + + self.data = data + self.mode = mode + + self.N = len(data) + + self.N = len(data) + self.trajectory = data[:, 0, [0, 1]] + + if jointstype == "mmm": + self.kinematic_tree = mmm_kinematic_tree + self.joints = mmm_joints + self.joinst.append("") + elif jointstype == "humanml3d": + self.kinematic_tree = humanml3d_kinematic_tree + self.joints = humanml3d_joints + + self.mat = JOINTS_MATS + + def get_sequence_mat(self, frac): + return self.mat + + def get_root(self, index): + return self.data[index][0] + + def get_mean_root(self): + return self.data[:, 0].mean(0) + + def load_in_blender(self, index, mats): + skeleton = self.data[index] + head_mat = mats[0] + body_mat = mats[-1] + for lst, mat in zip(self.kinematic_tree, mats): + for j1, j2 in zip(lst[:-1], lst[1:]): + # spine and head + if self.joints[j2] in [ + "BUN", + ]: + sphere_between(skeleton[j1], skeleton[j2], head_mat) + elif self.joints[j2] in [ + "LE", + "RE", + "LW", + "RW", + ]: + cylinder_sphere_between(skeleton[j1], skeleton[j2], 0.040, + mat) + elif self.joints[j2] in [ + "LMrot", + "RMrot", + "RK", + "LK", + ]: + cylinder_sphere_between(skeleton[j1], skeleton[j2], 0.040, + mat) + elif self.joints[j2] in [ + "LS", + "RS", + "LF", + "RF", + ]: + cylinder_between(skeleton[j1], skeleton[j2], 0.040, mat) + elif self.joints[j2] in ["RK", "LK"]: + print(self.joints[j1], self.joints[j2]) + # body + sphere(0.14, skeleton[self.joints.index("BLN")], body_mat) + sphere_between( + skeleton[self.joints.index("BLN")], + skeleton[self.joints.index("root")], + body_mat, + factor=0.28, + ) + sphere(0.11, skeleton[self.joints.index("root")], body_mat) + # sphere_between( + # skeleton[self.joints.index("BLN")], + # skeleton[self.joints.index("BT")], + # mats[0], + # ) + # hip + # sphere_between( + # skeleton[self.joints.index("LH")], + # skeleton[self.joints.index("RH")], + # mats[0], + # factor=0.6, + # ) + # + # sphere(skeleton[self.joints.index("BLN")], 0.05, mats[0]) + # sphere_between(skeleton[13], skeleton[14], mat) + # node + # print(self.joints.index("BUN")) + # print(len(lst)) + # sphere(lst[self.joints.index("BUN")], 0.2, mat) # head + + return ["Cylinder", "Sphere"] + + def __len__(self): + return self.N + + +def softmax(x, softness=1.0, dim=None): + maxi, mini = x.max(dim), x.min(dim) + return maxi + np.log(softness + np.exp(mini - maxi)) + + +def softmin(x, softness=1.0, dim=0): + return -softmax(-x, softness=softness, dim=dim) + + +def get_forward_direction(poses, jointstype="mmm"): + if jointstype == "mmm" or jointstype == "mmmns": + joints = mmm_joints + elif jointstype == "humanml3d": + joints = humanml3d_joints + else: + raise TypeError("Only supports mmm, mmmns and humanl3d jointstype") + # Shoulders + LS, RS = joints.index("LS"), joints.index("RS") + # Hips + LH, RH = mmm_joints.index("LH"), mmm_joints.index("RH") + + across = (poses[..., RH, :] - poses[..., LH, :] + poses[..., RS, :] - + poses[..., LS, :]) + forward = np.stack((-across[..., 2], across[..., 0]), axis=-1) + forward = forward / np.linalg.norm(forward, axis=-1) + return forward + + +def cylinder_between(t1, t2, r, mat): + x1, y1, z1 = t1 + x2, y2, z2 = t2 + + dx = x2 - x1 + dy = y2 - y1 + dz = z2 - z1 + dist = math.sqrt(dx**2 + dy**2 + dz**2) + + bpy.ops.mesh.primitive_cylinder_add(radius=r, + depth=dist, + location=(dx / 2 + x1, dy / 2 + y1, + dz / 2 + z1)) + + phi = math.atan2(dy, dx) + theta = math.acos(dz / dist) + bpy.context.object.rotation_euler[1] = theta + bpy.context.object.rotation_euler[2] = phi + # bpy.context.object.shade_smooth() + bpy.context.object.active_material = mat + + bpy.ops.mesh.primitive_uv_sphere_add(radius=r, location=(x1, y1, z1)) + bpy.context.object.active_material = mat + bpy.ops.mesh.primitive_uv_sphere_add(radius=r, location=(x2, y2, z2)) + bpy.context.object.active_material = mat + + +def cylinder_sphere_between(t1, t2, r, mat): + x1, y1, z1 = t1 + x2, y2, z2 = t2 + dx = x2 - x1 + dy = y2 - y1 + dz = z2 - z1 + dist = math.sqrt(dx**2 + dy**2 + dz**2) + phi = math.atan2(dy, dx) + theta = math.acos(dz / dist) + dist = dist - 0.2 * r + # sphere node + sphere(r * 0.9, t1, mat) + sphere(r * 0.9, t2, mat) + # leveled cylinder + bpy.ops.mesh.primitive_cylinder_add( + radius=r, + depth=dist, + location=(dx / 2 + x1, dy / 2 + y1, dz / 2 + z1), + enter_editmode=True, + ) + bpy.ops.mesh.select_mode(type="EDGE") + bpy.ops.mesh.select_all(action="DESELECT") + bpy.ops.mesh.select_face_by_sides(number=32, extend=False) + bpy.ops.mesh.bevel(offset=r, segments=8) + bpy.ops.object.editmode_toggle(False) + # bpy.ops.object.shade_smooth() + bpy.context.object.rotation_euler[1] = theta + bpy.context.object.rotation_euler[2] = phi + bpy.context.object.active_material = mat + + +def sphere(r, t, mat): + bpy.ops.mesh.primitive_uv_sphere_add(segments=50, + ring_count=50, + radius=r, + location=t) + # bpy.ops.mesh.primitive_uv_sphere_add(radius=r, location=t) + # bpy.context.object.shade_smooth() + bpy.context.object.active_material = mat + + +def sphere_between(t1, t2, mat, factor=1): + x1, y1, z1 = t1 + x2, y2, z2 = t2 + + dx = x2 - x1 + dy = y2 - y1 + dz = z2 - z1 + dist = math.sqrt(dx**2 + dy**2 + dz**2) * factor + + bpy.ops.mesh.primitive_uv_sphere_add( + segments=50, + ring_count=50, + # bpy.ops.mesh.primitive_uv_sphere_add( + radius=dist, + location=(dx / 2 + x1, dy / 2 + y1, dz / 2 + z1)) + + # bpy.context.object.shade_smooth() + bpy.context.object.active_material = mat + + +def matrix_of_angles(cos, sin, inv=False): + sin = -sin if inv else sin + return np.stack((np.stack( + (cos, -sin), axis=-1), np.stack((sin, cos), axis=-1)), + axis=-2) + + +def get_floor(poses, jointstype="mmm"): + if jointstype == "mmm" or jointstype == "mmmns": + joints = mmm_joints + elif jointstype == "humanml3d": + joints = humanml3d_joints + else: + raise TypeError("Only supports mmm, mmmns and humanl3d jointstype") + # Feet + LM, RM = joints.index("LMrot"), joints.index("RMrot") + LF, RF = joints.index("LF"), joints.index("RF") + ndim = len(poses.shape) + + foot_heights = poses[..., (LM, LF, RM, RF), 1].min(-1) + floor_height = softmin(foot_heights, softness=0.5, dim=-1) + return floor_height[tuple((ndim - 2) * [None])].T + + +def canonicalize_joints(joints, jointstype="mmm"): + poses = joints.copy() + + translation = joints[..., 0, :].copy() + + # Let the root have the Y translation + translation[..., 1] = 0 + # Trajectory => Translation without gravity axis (Y) + trajectory = translation[..., [0, 2]] + + # Remove the floor + poses[..., 1] -= get_floor(poses, jointstype) + + # Remove the trajectory of the joints + poses[..., [0, 2]] -= trajectory[..., None, :] + + # Let the first pose be in the center + trajectory = trajectory - trajectory[..., 0, :] + + # Compute the forward direction of the first frame + forward = get_forward_direction(poses[..., 0, :, :], jointstype) + + # Construct the inverse rotation matrix + sin, cos = forward[..., 0], forward[..., 1] + rotations_inv = matrix_of_angles(cos, sin, inv=True) + + # Rotate the trajectory + trajectory_rotated = np.einsum("...j,...jk->...k", trajectory, + rotations_inv) + + # Rotate the poses + poses_rotated = np.einsum("...lj,...jk->...lk", poses[..., [0, 2]], + rotations_inv) + poses_rotated = np.stack( + (poses_rotated[..., 0], poses[..., 1], poses_rotated[..., 1]), axis=-1) + + # Re-merge the pose and translation + poses_rotated[..., (0, 2)] += trajectory_rotated[..., None, :] + return poses_rotated + + +def prepare_joints(joints, + canonicalize=True, + always_on_floor=False, + jointstype="mmm"): + # All face the same direction for the first frame + if canonicalize: + data = canonicalize_joints(joints, jointstype) + else: + data = joints + + # Rescaling, shift axis and swap left/right + if jointstype == "humanml3d": + data = data * mmm_to_smplh_scaling_factor + data[..., 1] = - data[..., 1] + + # Swap axis (gravity=Z instead of Y) + data = data[..., [2, 0, 1]] + + if jointstype == "mmm": + # Make left/right correct + data[..., [1]] = -data[..., [1]] + + # Center the first root to the first frame + data -= data[[0], [0], :] + + # Remove the floor + data[..., 2] -= data[..., 2].min() + + # Put all the body on the floor + if always_on_floor: + data[..., 2] -= data[..., 2].min(1)[:, None] + + return data + + +def NormalInDirection(normal, direction, limit=0.5): + return direction.dot(normal) > limit + + +def GoingUp(normal, limit=0.5): + return NormalInDirection(normal, (0, 0, 1), limit) + + +def GoingDown(normal, limit=0.5): + return NormalInDirection(normal, (0, 0, -1), limit) + + +def GoingSide(normal, limit=0.5): + return GoingUp(normal, limit) == False and GoingDown(normal, + limit) == False diff --git a/mGPT/render/blender/materials.py b/mGPT/render/blender/materials.py new file mode 100644 index 0000000..4f0bf1a --- /dev/null +++ b/mGPT/render/blender/materials.py @@ -0,0 +1,135 @@ +import bpy + + +def clear_material(material): + if material.node_tree: + material.node_tree.links.clear() + material.node_tree.nodes.clear() + + +def colored_material_diffuse_BSDF(r, g, b, a=1, roughness=0.127451): + materials = bpy.data.materials + material = materials.new(name="body") + material.use_nodes = True + clear_material(material) + nodes = material.node_tree.nodes + links = material.node_tree.links + output = nodes.new(type='ShaderNodeOutputMaterial') + diffuse = nodes.new(type='ShaderNodeBsdfDiffuse') + diffuse.inputs["Color"].default_value = (r, g, b, a) + diffuse.inputs["Roughness"].default_value = roughness + links.new(diffuse.outputs['BSDF'], output.inputs['Surface']) + return material + +def colored_material_relection_BSDF(r, g, b, a=1, roughness=0.127451, saturation_factor=1): + materials = bpy.data.materials + material = materials.new(name="body") + material.use_nodes = True + # clear_material(material) + nodes = material.node_tree.nodes + links = material.node_tree.links + output = nodes.new(type='ShaderNodeOutputMaterial') + # diffuse = nodes.new(type='ShaderNodeBsdfDiffuse') + diffuse = nodes["Principled BSDF"] + diffuse.inputs["Base Color"].default_value = (r*saturation_factor, g*saturation_factor, b*saturation_factor, a) + diffuse.inputs["Roughness"].default_value = roughness + links.new(diffuse.outputs['BSDF'], output.inputs['Surface']) + return material + +# keys: +# ['Base Color', 'Subsurface', 'Subsurface Radius', 'Subsurface Color', 'Metallic', 'Specular', 'Specular Tint', 'Roughness', 'Anisotropic', 'Anisotropic Rotation', 'Sheen', 1Sheen Tint', 'Clearcoat', 'Clearcoat Roughness', 'IOR', 'Transmission', 'Transmission Roughness', 'Emission', 'Emission Strength', 'Alpha', 'Normal', 'Clearcoat Normal', 'Tangent'] +DEFAULT_BSDF_SETTINGS = {"Subsurface": 0.15, + "Subsurface Radius": [1.1, 0.2, 0.1], + "Metallic": 0.3, + "Specular": 0.5, + "Specular Tint": 0.5, + "Roughness": 0.75, + "Anisotropic": 0.25, + "Anisotropic Rotation": 0.25, + "Sheen": 0.75, + "Sheen Tint": 0.5, + "Clearcoat": 0.5, + "Clearcoat Roughness": 0.5, + "IOR": 1.450, + "Transmission": 0.1, + "Transmission Roughness": 0.1, + "Emission": (0, 0, 0, 1), + "Emission Strength": 0.0, + "Alpha": 1.0} + +def body_material(r, g, b, a=1, name="body", oldrender=True): + if oldrender: + material = colored_material_diffuse_BSDF(r, g, b, a=a) + else: + materials = bpy.data.materials + material = materials.new(name=name) + material.use_nodes = True + nodes = material.node_tree.nodes + diffuse = nodes["Principled BSDF"] + inputs = diffuse.inputs + + settings = DEFAULT_BSDF_SETTINGS.copy() + settings["Base Color"] = (r, g, b, a) + settings["Subsurface Color"] = (r, g, b, a) + settings["Subsurface"] = 0.0 + + for setting, val in settings.items(): + inputs[setting].default_value = val + + return material + + +def colored_material_bsdf(name, **kwargs): + materials = bpy.data.materials + material = materials.new(name=name) + material.use_nodes = True + nodes = material.node_tree.nodes + diffuse = nodes["Principled BSDF"] + inputs = diffuse.inputs + + settings = DEFAULT_BSDF_SETTINGS.copy() + for key, val in kwargs.items(): + settings[key] = val + + for setting, val in settings.items(): + inputs[setting].default_value = val + + return material + + +def floor_mat(name="floor_mat", color=(0.1, 0.1, 0.1, 1), roughness=0.127451): + return colored_material_diffuse_BSDF(color[0], color[1], color[2], a=color[3], roughness=roughness) + + +def plane_mat(): + materials = bpy.data.materials + material = materials.new(name="plane") + material.use_nodes = True + clear_material(material) + nodes = material.node_tree.nodes + links = material.node_tree.links + output = nodes.new(type='ShaderNodeOutputMaterial') + diffuse = nodes.new(type='ShaderNodeBsdfDiffuse') + checker = nodes.new(type="ShaderNodeTexChecker") + checker.inputs["Scale"].default_value = 1024 + checker.inputs["Color1"].default_value = (0.8, 0.8, 0.8, 1) + checker.inputs["Color2"].default_value = (0.3, 0.3, 0.3, 1) + links.new(checker.outputs["Color"], diffuse.inputs['Color']) + links.new(diffuse.outputs['BSDF'], output.inputs['Surface']) + diffuse.inputs["Roughness"].default_value = 0.127451 + return material + + +def plane_mat_uni(): + materials = bpy.data.materials + material = materials.new(name="plane_uni") + material.use_nodes = True + clear_material(material) + nodes = material.node_tree.nodes + links = material.node_tree.links + output = nodes.new(type='ShaderNodeOutputMaterial') + diffuse = nodes.new(type='ShaderNodeBsdfDiffuse') + diffuse.inputs["Color"].default_value = (0.8, 0.8, 0.8, 1) + diffuse.inputs["Roughness"].default_value = 0.127451 + links.new(diffuse.outputs['BSDF'], output.inputs['Surface']) + return material diff --git a/mGPT/render/blender/meshes.py b/mGPT/render/blender/meshes.py new file mode 100644 index 0000000..284de6c --- /dev/null +++ b/mGPT/render/blender/meshes.py @@ -0,0 +1,93 @@ +import numpy as np + +from .materials import body_material + +# green +# GT_SMPL = body_material(0.009, 0.214, 0.029) +GT_SMPL = body_material(0.035, 0.415, 0.122) + +# blue +# GEN_SMPL = body_material(0.022, 0.129, 0.439) +# Blues => cmap(0.87) +# GEN_SMPL = body_material(0.035, 0.322, 0.615) +# Oranges => cmap(0.87) +GEN_SMPL = body_material(0.658, 0.214, 0.0114) + + +class Meshes: + def __init__(self, data, *, gt, mode, faces_path, canonicalize, always_on_floor, oldrender=True, is_smplx=False, **kwargs): + data = prepare_meshes(data, canonicalize=canonicalize, + always_on_floor=always_on_floor, + is_smplx=is_smplx) + + if isinstance(faces_path, str): + self.faces = np.load(faces_path) + else: + self.faces = faces_path + + self.data = data + self.mode = mode + self.oldrender = oldrender + + self.N = len(data) + self.trajectory = data[:, :, [0, 1]].mean(1) + + if gt: + self.mat = GT_SMPL + else: + self.mat = GEN_SMPL + + def get_sequence_mat(self, frac): + import matplotlib + # cmap = matplotlib.cm.get_cmap('Blues') + cmap = matplotlib.cm.get_cmap('Oranges') + # begin = 0.60 + # end = 0.90 + begin = 0.50 + end = 0.90 + rgbcolor = cmap(begin + (end-begin)*frac) + mat = body_material(*rgbcolor, oldrender=self.oldrender) + return mat + + def get_root(self, index): + return self.data[index].mean(0) + + def get_mean_root(self): + return self.data.mean((0, 1)) + + def load_in_blender(self, index, mat): + vertices = self.data[index] + faces = self.faces + name = f"{str(index).zfill(4)}" + + from .tools import load_numpy_vertices_into_blender + load_numpy_vertices_into_blender(vertices, faces, name, mat) + + return name + + def __len__(self): + return self.N + + +def prepare_meshes(data, canonicalize=True, always_on_floor=False, is_smplx=False): + if canonicalize: + print("No canonicalization for now") + + # fitted mesh do not need fixing axis + # fix axis + if is_smplx: + data[..., 1] = - data[..., 1] + # data[..., 0] = - data[..., 0] + + + # Swap axis (gravity=Z instead of Y) + data = data[..., [2, 0, 1]] + + # Remove the floor + data[..., 2] -= data[..., 2].min() + + # Put all the body on the floor + if always_on_floor: + data[..., 2] -= data[..., 2].min(1)[:, None] + + return data diff --git a/mGPT/render/blender/render.py b/mGPT/render/blender/render.py new file mode 100644 index 0000000..5e40b3a --- /dev/null +++ b/mGPT/render/blender/render.py @@ -0,0 +1,177 @@ +import math +import os +import sys +import smplx +import bpy +import numpy as np + +from .camera import Camera +from .floor import get_trajectory, plot_floor, show_traj +from .sampler import get_frameidx +from .scene import setup_scene # noqa +from .tools import delete_objs, load_numpy_vertices_into_blender, style_detect +from .vertices import prepare_vertices +from mGPT.utils.joints import smplh_to_mmm_scaling_factor + + +def prune_begin_end(data, perc): + to_remove = int(len(data) * perc) + if to_remove == 0: + return data + return data[to_remove:-to_remove] + + +def render_current_frame(path): + bpy.context.scene.render.filepath = path + bpy.ops.render.render(use_viewport=True, write_still=True) + + +def render(npydata, + frames_folder, + *, + mode, + model_path, + faces_path, + gt=False, + exact_frame=None, + num=8, + downsample=True, + canonicalize=True, + always_on_floor=False, + denoising=True, + oldrender=True, + res="high", + init=True, + accelerator='gpu', + device=[0]): + if init: + # Setup the scene (lights / render engine / resolution etc) + setup_scene(res=res, + denoising=denoising, + oldrender=oldrender, + accelerator=accelerator, + device=device) + + is_mesh, is_smplx, jointstype = style_detect(npydata) + + if not is_mesh: + npydata = npydata * smplh_to_mmm_scaling_factor + + if is_smplx: + smplx_model_male = smplx.create(model_path, + model_type='smplx', + gender='male', + ext='npz', + num_betas=10, + flat_hand_mean=True, + use_pca=False) + faces_path = smplx_model_male.faces + + + + # Put everything in this folder + if mode == "video": + if always_on_floor: + frames_folder += "_of" + os.makedirs(frames_folder, exist_ok=True) + # if it is a mesh, it is already downsampled + if downsample and not is_mesh: + npydata = npydata[::8] + elif mode == "sequence": + img_name, ext = os.path.splitext(frames_folder) + if always_on_floor: + img_name += "_of" + img_path = f"{img_name}{ext}" + + elif mode == "frame": + img_name, ext = os.path.splitext(frames_folder) + if always_on_floor: + img_name += "_of" + img_path = f"{img_name}_{exact_frame}{ext}" + + # remove X% of begining and end + # as it is almost always static + # in this part + if mode == "sequence": + perc = 0.2 + npydata = prune_begin_end(npydata, perc) + + if is_mesh: + from .meshes import Meshes + data = Meshes(npydata, + gt=gt, + mode=mode, + faces_path=faces_path, + canonicalize=canonicalize, + always_on_floor=always_on_floor, + is_smplx=is_smplx) + else: + from .joints import Joints + data = Joints(npydata, + gt=gt, + mode=mode, + canonicalize=canonicalize, + always_on_floor=always_on_floor, + jointstype=jointstype) + + # Number of frames possible to render + nframes = len(data) + + # Show the trajectory + show_traj(data.trajectory) + + # Create a floor + plot_floor(data.data, big_plane=False) + + # initialize the camera + camera = Camera(first_root=data.get_root(0), mode=mode, is_mesh=is_mesh) + + frameidx = get_frameidx(mode=mode, + nframes=nframes, + exact_frame=exact_frame, + frames_to_keep=num) + + nframes_to_render = len(frameidx) + + # center the camera to the middle + if mode == "sequence": + camera.update(data.get_mean_root()) + + imported_obj_names = [] + for index, frameidx in enumerate(frameidx): + if mode == "sequence": + frac = index / (nframes_to_render - 1) + mat = data.get_sequence_mat(frac) + else: + mat = data.mat + camera.update(data.get_root(frameidx)) + + islast = index == (nframes_to_render - 1) + + objname = data.load_in_blender(frameidx, mat) + name = f"{str(index).zfill(4)}" + + if mode == "video": + path = os.path.join(frames_folder, f"frame_{name}.png") + else: + path = img_path + + if mode == "sequence": + imported_obj_names.extend(objname) + elif mode == "frame": + camera.update(data.get_root(frameidx)) + + if mode != "sequence" or islast: + render_current_frame(path) + delete_objs(objname) + + bpy.ops.wm.save_as_mainfile(filepath=frames_folder.replace('.png','.blend').replace('_frames','.blend')) + + # remove every object created + delete_objs(imported_obj_names) + delete_objs(["Plane", "myCurve", "Cylinder"]) + + if mode == "video": + return frames_folder + else: + return img_path diff --git a/mGPT/render/blender/sampler.py b/mGPT/render/blender/sampler.py new file mode 100644 index 0000000..7aa8d85 --- /dev/null +++ b/mGPT/render/blender/sampler.py @@ -0,0 +1,15 @@ +import numpy as np + +def get_frameidx(*, mode, nframes, exact_frame, frames_to_keep): + if mode == "sequence": + frameidx = np.linspace(0, nframes - 1, frames_to_keep) + frameidx = np.round(frameidx).astype(int) + frameidx = list(frameidx) + elif mode == "frame": + index_frame = int(exact_frame*nframes) + frameidx = [index_frame] + elif mode == "video": + frameidx = range(0, nframes) + else: + raise ValueError(f"Not support {mode} render mode") + return frameidx diff --git a/mGPT/render/blender/scene.py b/mGPT/render/blender/scene.py new file mode 100644 index 0000000..5b35e6c --- /dev/null +++ b/mGPT/render/blender/scene.py @@ -0,0 +1,96 @@ +import bpy +from .materials import plane_mat # noqa + + +def setup_renderer(denoising=True, oldrender=True, accelerator="gpu", device=[0]): + bpy.context.scene.render.engine = "CYCLES" + bpy.data.scenes[0].render.engine = "CYCLES" + if accelerator.lower() == "gpu": + bpy.context.preferences.addons[ + "cycles" + ].preferences.compute_device_type = "CUDA" + bpy.context.scene.cycles.device = "GPU" + i = 0 + bpy.context.preferences.addons["cycles"].preferences.get_devices() + for d in bpy.context.preferences.addons["cycles"].preferences.devices: + if i in device: # gpu id + d["use"] = 1 + print(d["name"], "".join(str(i) for i in device)) + else: + d["use"] = 0 + i += 1 + + if denoising: + bpy.context.scene.cycles.use_denoising = True + + bpy.context.scene.render.tile_x = 256 + bpy.context.scene.render.tile_y = 256 + bpy.context.scene.cycles.samples = 64 + # bpy.context.scene.cycles.denoiser = 'OPTIX' + + if not oldrender: + bpy.context.scene.view_settings.view_transform = "Standard" + bpy.context.scene.render.film_transparent = True + bpy.context.scene.display_settings.display_device = "sRGB" + bpy.context.scene.view_settings.gamma = 1.2 + bpy.context.scene.view_settings.exposure = -0.75 + + +# Setup scene +def setup_scene( + res="high", denoising=True, oldrender=True, accelerator="gpu", device=[0] +): + scene = bpy.data.scenes["Scene"] + assert res in ["ultra", "high", "med", "low"] + if res == "high": + scene.render.resolution_x = 1280 + scene.render.resolution_y = 1024 + elif res == "med": + scene.render.resolution_x = 1280 // 2 + scene.render.resolution_y = 1024 // 2 + elif res == "low": + scene.render.resolution_x = 1280 // 4 + scene.render.resolution_y = 1024 // 4 + elif res == "ultra": + scene.render.resolution_x = 1280 * 2 + scene.render.resolution_y = 1024 * 2 + + scene.render.film_transparent= True + world = bpy.data.worlds["World"] + world.use_nodes = True + bg = world.node_tree.nodes["Background"] + bg.inputs[0].default_value[:3] = (1.0, 1.0, 1.0) + bg.inputs[1].default_value = 1.0 + + # Remove default cube + if "Cube" in bpy.data.objects: + bpy.data.objects["Cube"].select_set(True) + bpy.ops.object.delete() + + bpy.ops.object.light_add( + type="SUN", align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) + bpy.data.objects["Sun"].data.energy = 1.5 + + # rotate camera + bpy.ops.object.empty_add( + type="PLAIN_AXES", align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) + bpy.ops.transform.resize( + value=(10, 10, 10), + orient_type="GLOBAL", + orient_matrix=((1, 0, 0), (0, 1, 0), (0, 0, 1)), + orient_matrix_type="GLOBAL", + mirror=True, + use_proportional_edit=False, + proportional_edit_falloff="SMOOTH", + proportional_size=1, + use_proportional_connected=False, + use_proportional_projected=False, + ) + bpy.ops.object.select_all(action="DESELECT") + + setup_renderer( + denoising=denoising, oldrender=oldrender, accelerator=accelerator, device=device + ) + return scene diff --git a/mGPT/render/blender/tools.py b/mGPT/render/blender/tools.py new file mode 100644 index 0000000..3c64ea6 --- /dev/null +++ b/mGPT/render/blender/tools.py @@ -0,0 +1,56 @@ +import bpy +import numpy as np + + +def style_detect(data): + is_mesh = False + is_smplx = False + jointstyle = 'mmm' + # heuristic + if data.shape[1] > 1000: + is_mesh = True + if data.shape[1] == 10475: + is_smplx = True + if data.shape[1] == 22: + jointstyle = 'humanml3d' + + return is_mesh, is_smplx, jointstyle + + + +# see this for more explanation +# https://gist.github.com/iyadahmed/7c7c0fae03c40bd87e75dc7059e35377 +# This should be solved with new version of blender +class ndarray_pydata(np.ndarray): + def __bool__(self) -> bool: + return len(self) > 0 + + +def load_numpy_vertices_into_blender(vertices, faces, name, mat): + mesh = bpy.data.meshes.new(name) + mesh.from_pydata(vertices, [], faces.view(ndarray_pydata)) + mesh.validate() + + obj = bpy.data.objects.new(name, mesh) + bpy.context.scene.collection.objects.link(obj) + + bpy.ops.object.select_all(action='DESELECT') + obj.select_set(True) + obj.active_material = mat + bpy.context.view_layer.objects.active = obj + bpy.ops.object.shade_smooth() + bpy.ops.object.select_all(action='DESELECT') + return True + + +def delete_objs(names): + if not isinstance(names, list): + names = [names] + # bpy.ops.object.mode_set(mode='OBJECT') + bpy.ops.object.select_all(action='DESELECT') + for obj in bpy.context.scene.objects: + for name in names: + if obj.name.startswith(name) or obj.name.endswith(name): + obj.select_set(True) + bpy.ops.object.delete() + bpy.ops.object.select_all(action='DESELECT') diff --git a/mGPT/render/blender/vertices.py b/mGPT/render/blender/vertices.py new file mode 100644 index 0000000..78be1b1 --- /dev/null +++ b/mGPT/render/blender/vertices.py @@ -0,0 +1,17 @@ +import numpy as np + + +def prepare_vertices(vertices, canonicalize=True): + data = vertices + # Swap axis (gravity=Z instead of Y) + # data = data[..., [2, 0, 1]] + + # Make left/right correct + # data[..., [1]] = -data[..., [1]] + + # Center the first root to the first frame + data -= data[[0], [0], :] + + # Remove the floor + data[..., 2] -= np.min(data[..., 2]) + return data diff --git a/mGPT/render/matplot/plot_3d_global.py b/mGPT/render/matplot/plot_3d_global.py new file mode 100644 index 0000000..c55d566 --- /dev/null +++ b/mGPT/render/matplot/plot_3d_global.py @@ -0,0 +1,151 @@ +import torch +import matplotlib.pyplot as plt +import numpy as np +import io +import matplotlib +from mpl_toolkits.mplot3d.art3d import Poly3DCollection +import mpl_toolkits.mplot3d.axes3d as p3 +from textwrap import wrap +import imageio + + +def plot_3d_motion(args, figsize=(10, 10), fps=120, radius=4): + matplotlib.use('Agg') + + joints, out_name, title = args + + title_sp = title.split(' ') + if len(title_sp) > 20: + title = '\n'.join([' '.join(title_sp[:10]), ' '.join(title_sp[10:20]), ' '.join(title_sp[20:])]) + elif len(title_sp) > 10: + title = '\n'.join([' '.join(title_sp[:10]), ' '.join(title_sp[10:])]) + + data = joints.copy().reshape(len(joints), -1, 3) + + nb_joints = joints.shape[1] + smpl_kinetic_chain = [ + [0, 11, 12, 13, 14, 15], [0, 16, 17, 18, 19, 20], [0, 1, 2, 3, 4], + [3, 5, 6, 7], [3, 8, 9, 10] + ] if nb_joints == 21 else [[0, 2, 5, 8, 11], [0, 1, 4, 7, 10], + [0, 3, 6, 9, 12, 15], [9, 14, 17, 19, 21], + [9, 13, 16, 18, 20]] + limits = 1000 if nb_joints == 21 else 2 + + MINS = data.min(axis=0).min(axis=0) + MAXS = data.max(axis=0).max(axis=0) + + colors = [ + 'red', 'blue', 'black', 'red', 'blue', 'darkblue', 'darkblue', + 'darkblue', 'darkblue', 'darkblue', 'darkred', 'darkred', 'darkred', + 'darkred', 'darkred' + ] + frame_number = data.shape[0] + # print(data.shape) + + height_offset = MINS[1] + data[:, :, 1] -= height_offset + trajec = data[:, 0, [0, 2]] + + data[..., 0] -= data[:, 0:1, 0] + data[..., 2] -= data[:, 0:1, 2] + + def update(index): + def init(): + ax.set_xlim3d([-radius / 2, radius / 2]) + ax.set_ylim3d([0, radius]) + ax.set_zlim3d([0, radius]) + ax.grid(b=False) + + def plot_xzPlane(minx, maxx, miny, minz, maxz): + ## Plot a plane XZ + verts = [[minx, miny, minz], [minx, miny, maxz], + [maxx, miny, maxz], [maxx, miny, minz]] + xz_plane = Poly3DCollection([verts]) + xz_plane.set_facecolor((0.5, 0.5, 0.5, 0.5)) + ax.add_collection3d(xz_plane) + + fig = plt.figure(figsize=(480 / 96., 320 / 96.), + dpi=96) if nb_joints == 21 else plt.figure( + figsize=(10, 10), dpi=96) + # fig.tight_layout() + if title is not None: + wraped_title = '\n'.join(wrap(title, 40)) + fig.suptitle(wraped_title, fontsize=16) + ax = p3.Axes3D(fig, auto_add_to_figure=False) + fig.add_axes(ax) + + init() + + # ax.lines = [] + # ax.collections = [] + ax.view_init(elev=110, azim=-90) + ax.dist = 7.5 + # ax = + plot_xzPlane(MINS[0] - trajec[index, 0], MAXS[0] - trajec[index, 0], 0, + MINS[2] - trajec[index, 1], MAXS[2] - trajec[index, 1]) + # ax.scatter(data[index, :22, 0], data[index, :22, 1], data[index, :22, 2], color='black', s=3) + + if index > 1: + ax.plot3D(trajec[:index, 0] - trajec[index, 0], + np.zeros_like(trajec[:index, 0]), + trajec[:index, 1] - trajec[index, 1], + linewidth=1.0, + color='blue') + # ax = plot_xzPlane(ax, MINS[0], MAXS[0], 0, MINS[2], MAXS[2]) + + for i, (chain, color) in enumerate(zip(smpl_kinetic_chain, colors)): + # print(color) + if i < 5: + linewidth = 4.0 + else: + linewidth = 2.0 + ax.plot3D(data[index, chain, 0], + data[index, chain, 1], + data[index, chain, 2], + linewidth=linewidth, + color=color) + # print(trajec[:index, 0].shape) + + plt.axis('off') + + ax.set_xticklabels([]) + ax.set_yticklabels([]) + ax.set_zticklabels([]) + + if out_name is not None: + plt.savefig(out_name, dpi=96) + plt.close() + + else: + io_buf = io.BytesIO() + fig.savefig(io_buf, format='raw', dpi=96) + io_buf.seek(0) + # print(fig.bbox.bounds) + arr = np.reshape(np.frombuffer(io_buf.getvalue(), dtype=np.uint8), + newshape=(int(fig.bbox.bounds[3]), + int(fig.bbox.bounds[2]), -1)) + io_buf.close() + plt.close() + return arr + + out = [] + for i in range(frame_number): + out.append(update(i)) + out = np.stack(out, axis=0) + return torch.from_numpy(out) + + +def draw_to_batch(smpl_joints_batch, title_batch=None, outname=None): + + batch_size = len(smpl_joints_batch) + out = [] + for i in range(batch_size): + out.append( + plot_3d_motion([ + smpl_joints_batch[i], None, + title_batch[i] if title_batch is not None else None + ])) + if outname is not None: + imageio.mimsave(outname[i], np.array(out[-1]), duration=50) + out = torch.stack(out, axis=0) + return out diff --git a/mGPT/render/pyrender/hybrik_loc2rot.py b/mGPT/render/pyrender/hybrik_loc2rot.py new file mode 100644 index 0000000..5739617 --- /dev/null +++ b/mGPT/render/pyrender/hybrik_loc2rot.py @@ -0,0 +1,140 @@ +import numpy as np + +SMPL_BODY_BONES = [-0.0018, -0.2233, 0.0282, 0.0695, -0.0914, -0.0068, -0.0677, -0.0905, -0.0043, + -0.0025, 0.1090, -0.0267, 0.0343, -0.3752, -0.0045, -0.0383, -0.3826, -0.0089, + 0.0055, 0.1352, 0.0011, -0.0136, -0.3980, -0.0437, 0.0158, -0.3984, -0.0423, + 0.0015, 0.0529, 0.0254, 0.0264, -0.0558, 0.1193, -0.0254, -0.0481, 0.1233, + -0.0028, 0.2139, -0.0429, 0.0788, 0.1217, -0.0341, -0.0818, 0.1188, -0.0386, + 0.0052, 0.0650, 0.0513, 0.0910, 0.0305, -0.0089, -0.0960, 0.0326, -0.0091, + 0.2596, -0.0128, -0.0275, -0.2537, -0.0133, -0.0214, 0.2492, 0.0090, -0.0012, + -0.2553, 0.0078, -0.0056, 0.0840, -0.0082, -0.0149, -0.0846, -0.0061, -0.0103] + + +class HybrIKJointsToRotmat: + def __init__(self): + self.naive_hybrik = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0] + self.num_nodes = 22 + self.parents = [0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 9, 12, 13, 14, 16, 17, 18, 19] + self.child = [-1, 4, 5, 6, 7, 8, 9, 10, 11, -1, -2, -2, 15, + 16, 17, -2, 18, 19, 20, 21, -2, -2] + self.bones = np.reshape(np.array(SMPL_BODY_BONES), [24, 3])[:self.num_nodes] + + def multi_child_rot(self, t, p, + pose_global_parent): + """ + t: B x 3 x child_num + p: B x 3 x child_num + pose_global_parent: B x 3 x 3 + """ + m = np.matmul(t, np.transpose(np.matmul(np.linalg.inv(pose_global_parent), p), [0, 2, 1])) + u, s, vt = np.linalg.svd(m) + r = np.matmul(np.transpose(vt, [0, 2, 1]), np.transpose(u, [0, 2, 1])) + err_det_mask = (np.linalg.det(r) < 0.0).reshape(-1, 1, 1) + id_fix = np.reshape(np.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, -1.0]]), + [1, 3, 3]) + r_fix = np.matmul(np.transpose(vt, [0, 2, 1]), + np.matmul(id_fix, + np.transpose(u, [0, 2, 1]))) + r = r * (1.0 - err_det_mask) + r_fix * err_det_mask + return r, np.matmul(pose_global_parent, r) + + def single_child_rot(self, t, p, pose_global_parent, twist=None): + """ + t: B x 3 x 1 + p: B x 3 x 1 + pose_global_parent: B x 3 x 3 + twist: B x 2 if given, default to None + """ + p_rot = np.matmul(np.linalg.inv(pose_global_parent), p) + cross = np.cross(t, p_rot, axisa=1, axisb=1, axisc=1) + sina = np.linalg.norm(cross, axis=1, keepdims=True) / (np.linalg.norm(t, axis=1, keepdims=True) * + np.linalg.norm(p_rot, axis=1, keepdims=True)) + cross = cross / np.linalg.norm(cross, axis=1, keepdims=True) + cosa = np.sum(t * p_rot, axis=1, keepdims=True) / (np.linalg.norm(t, axis=1, keepdims=True) * + np.linalg.norm(p_rot, axis=1, keepdims=True)) + sina = np.reshape(sina, [-1, 1, 1]) + cosa = np.reshape(cosa, [-1, 1, 1]) + skew_sym_t = np.stack([0.0 * cross[:, 0], -cross[:, 2], cross[:, 1], + cross[:, 2], 0.0 * cross[:, 0], -cross[:, 0], + -cross[:, 1], cross[:, 0], 0.0 * cross[:, 0]], 1) + skew_sym_t = np.reshape(skew_sym_t, [-1, 3, 3]) + dsw_rotmat = np.reshape(np.eye(3), [1, 3, 3] + ) + sina * skew_sym_t + (1.0 - cosa) * np.matmul(skew_sym_t, + skew_sym_t) + if twist is not None: + skew_sym_t = np.stack([0.0 * t[:, 0], -t[:, 2], t[:, 1], + t[:, 2], 0.0 * t[:, 0], -t[:, 0], + -t[:, 1], t[:, 0], 0.0 * t[:, 0]], 1) + skew_sym_t = np.reshape(skew_sym_t, [-1, 3, 3]) + sina = np.reshape(twist[:, 1], [-1, 1, 1]) + cosa = np.reshape(twist[:, 0], [-1, 1, 1]) + dtw_rotmat = np.reshape(np.eye(3), [1, 3, 3] + ) + sina * skew_sym_t + (1.0 - cosa) * np.matmul(skew_sym_t, + skew_sym_t) + dsw_rotmat = np.matmul(dsw_rotmat, dtw_rotmat) + return dsw_rotmat, np.matmul(pose_global_parent, dsw_rotmat) + + def __call__(self, joints, twist=None): + """ + joints: B x N x 3 + twist: B x N x 2 if given, default to None + """ + expand_dim = False + if len(joints.shape) == 2: + expand_dim = True + joints = np.expand_dims(joints, 0) + if twist is not None: + twist = np.expand_dims(twist, 0) + assert (len(joints.shape) == 3) + batch_size = np.shape(joints)[0] + joints_rel = joints - joints[:, self.parents] + joints_hybrik = 0.0 * joints_rel + pose_global = np.zeros([batch_size, self.num_nodes, 3, 3]) + pose = np.zeros([batch_size, self.num_nodes, 3, 3]) + for i in range(self.num_nodes): + if i == 0: + joints_hybrik[:, 0] = joints[:, 0] + else: + joints_hybrik[:, i] = np.matmul(pose_global[:, self.parents[i]], + np.reshape(self.bones[i], [1, 3, 1])).reshape(-1, 3) + \ + joints_hybrik[:, self.parents[i]] + if self.child[i] == -2: + pose[:, i] = pose[:, i] + np.eye(3).reshape(1, 3, 3) + pose_global[:, i] = pose_global[:, self.parents[i]] + continue + if i == 0: + r, rg = self.multi_child_rot(np.transpose(self.bones[[1, 2, 3]].reshape(1, 3, 3), [0, 2, 1]), + np.transpose(joints_rel[:, [1, 2, 3]], [0, 2, 1]), + np.eye(3).reshape(1, 3, 3)) + + elif i == 9: + r, rg = self.multi_child_rot(np.transpose(self.bones[[12, 13, 14]].reshape(1, 3, 3), [0, 2, 1]), + np.transpose(joints_rel[:, [12, 13, 14]], [0, 2, 1]), + pose_global[:, self.parents[9]]) + else: + p = joints_rel[:, self.child[i]] + if self.naive_hybrik[i] == 0: + p = joints[:, self.child[i]] - joints_hybrik[:, i] + twi = None + if twist is not None: + twi = twist[:, i] + r, rg = self.single_child_rot(self.bones[self.child[i]].reshape(1, 3, 1), + p.reshape(-1, 3, 1), + pose_global[:, self.parents[i]], + twi) + pose[:, i] = r + pose_global[:, i] = rg + if expand_dim: + pose = pose[0] + return pose + + +if __name__ == "__main__": + jts2rot_hybrik = HybrIKJointsToRotmat() + joints = np.array(SMPL_BODY_BONES).reshape(1, 24, 3)[:, :22] + parents = [0, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 9, 12, 13, 14, 16, 17, 18, 19] + for i in range(1, 22): + joints[:, i] = joints[:, i] + joints[:, parents[i]] + pose = jts2rot_hybrik(joints) + print(pose) diff --git a/mGPT/render/pyrender/j3ds_render_smpl.py b/mGPT/render/pyrender/j3ds_render_smpl.py new file mode 100644 index 0000000..3c8bd86 --- /dev/null +++ b/mGPT/render/pyrender/j3ds_render_smpl.py @@ -0,0 +1,48 @@ +import os +import argparse +import numpy as np +from scripts.hybrik_loc2rot import HybrIKJointsToRotmat +from scripts.pyrender import SMPLRender +import cv2 +from scipy.spatial.transform import Rotation as RRR + +parser = argparse.ArgumentParser( + description='Render a SMPL video by a j3ds npy file.') +parser.add_argument('--input', type=str, default='', help='the npy file path') +parser.add_argument('--render', + type=int, + default=1, + help='render the video if 1') +args = parser.parse_args() + +input_path = args.input +output_npy_path = args.input.replace('.npy', '_pose.npy') +data = np.load(input_path) +data = data - data[0, 0] +pose_generator = HybrIKJointsToRotmat() +pose = pose_generator(data) +pose = np.concatenate( + [pose, np.stack([np.stack([np.eye(3)] * pose.shape[0], 0)] * 2, 1)], 1) +np.save(output_npy_path, pose) +shape = [768, 768] +if args.render: + render = SMPLRender() + output_mp4_path = args.input.replace('.npy', '_smpl.mp4') + os.environ['PYOPENGL_PLATFORM'] = 'egl' + size = (shape[1], shape[0]) + fps = 30.0 + fourcc = cv2.VideoWriter_fourcc('M', 'P', '4', 'V') + videoWriter = cv2.VideoWriter(output_mp4_path, fourcc, fps, size) + r = RRR.from_rotvec(np.array([np.pi, 0.0, 0.0])) + pose[:, 0] = np.matmul(r.as_matrix().reshape(1, 3, 3), pose[:, 0]) + for i in range(data.shape[0]): + img = np.zeros([shape[0], shape[1], 3]) + aroot = data[[i], 0] + np.array([[0.0, 0.0, 30.0]]) + aroot[:, 1] = -aroot[:, 1] + params = dict(pred_shape=np.zeros([1, 10]), + pred_root=aroot, + pred_pose=pose[[i]]) + renderImg = render.render(img.copy(), params) + renderImg = (renderImg * 255).astype(np.uint8) + videoWriter.write(renderImg) + videoWriter.release() diff --git a/mGPT/render/pyrender/smpl_render.py b/mGPT/render/pyrender/smpl_render.py new file mode 100644 index 0000000..62623d8 --- /dev/null +++ b/mGPT/render/pyrender/smpl_render.py @@ -0,0 +1,130 @@ +import os + +os.environ['PYOPENGL_PLATFORM'] = 'egl' +import torch +import numpy as np +import cv2 + +import matplotlib.pyplot as plt +import glob +import pickle +import pyrender +import trimesh +from smplx import SMPL as _SMPL +from smplx.utils import SMPLOutput as ModelOutput +from scipy.spatial.transform.rotation import Rotation as RRR + +class SMPL(_SMPL): + """ Extension of the official SMPL implementation to support more joints """ + + def __init__(self, *args, **kwargs): + super(SMPL, self).__init__(*args, **kwargs) + # joints = [constants.JOINT_MAP[i] for i in constants.JOINT_NAMES] + # J_regressor_extra = np.load(config.JOINT_REGRESSOR_TRAIN_EXTRA) + # self.register_buffer('J_regressor_extra', torch.tensor(J_regressor_extra, dtype=torch.float32)) + # self.joint_map = torch.tensor(joints, dtype=torch.long) + + def forward(self, *args, **kwargs): + kwargs['get_skin'] = True + smpl_output = super(SMPL, self).forward(*args, **kwargs) + # extra_joints = vertices2joints(self.J_regressor_extra, smpl_output.vertices) #Additional 9 joints #Check doc/J_regressor_extra.png + # joints = torch.cat([smpl_output.joints, extra_joints], dim=1) #[N, 24 + 21, 3] + [N, 9, 3] + # joints = joints[:, self.joint_map, :] + joints = smpl_output.joints + output = ModelOutput(vertices=smpl_output.vertices, + global_orient=smpl_output.global_orient, + body_pose=smpl_output.body_pose, + joints=joints, + betas=smpl_output.betas, + full_pose=smpl_output.full_pose) + return output + +class Renderer: + """ + Renderer used for visualizing the SMPL model + Code adapted from https://github.com/vchoutas/smplify-x + """ + def __init__(self, focal_length=5000, img_res=(224,224), faces=None): + self.renderer = pyrender.OffscreenRenderer(viewport_width=img_res[0], + viewport_height=img_res[1], + point_size=1.0) + self.focal_length = focal_length + self.camera_center = [img_res[0] // 2, img_res[1] // 2] + self.faces = faces + def __call__(self, vertices, camera_translation, image): + material = pyrender.MetallicRoughnessMaterial( + metallicFactor=0.2, + alphaMode='OPAQUE', + baseColorFactor=(0.8, 0.3, 0.3, 1.0)) + + camera_translation[0] *= -1. + + mesh = trimesh.Trimesh(vertices, self.faces) + rot = trimesh.transformations.rotation_matrix( + np.radians(180), [1, 0, 0]) + mesh.apply_transform(rot) + mesh = pyrender.Mesh.from_trimesh(mesh, material=material) + + scene = pyrender.Scene(bg_color=(1.,1.,1.),ambient_light=(0.5, 0.5, 0.5)) + scene.add(mesh, 'mesh') + + camera_pose = np.eye(4) + camera_pose[:3, 3] = camera_translation + camera = pyrender.IntrinsicsCamera(fx=self.focal_length, fy=self.focal_length, + cx=self.camera_center[0], cy=self.camera_center[1]) + scene.add(camera, pose=camera_pose) + + + light = pyrender.DirectionalLight(color=[1.0, 1.0, 1.0], intensity=1) + light_pose = np.eye(4) + + light_pose[:3, 3] = np.array([0, -1, 1]) + scene.add(light, pose=light_pose) + + light_pose[:3, 3] = np.array([0, 1, 1]) + scene.add(light, pose=light_pose) + + light_pose[:3, 3] = np.array([1, 1, 2]) + scene.add(light, pose=light_pose) + + color, rend_depth = self.renderer.render(scene, flags=pyrender.RenderFlags.RGBA) + color = color.astype(np.float32) / 255.0 + valid_mask = (rend_depth > 0)[:,:,None] + output_img = (color[:, :, :3] * valid_mask + + (1 - valid_mask) * image) + return output_img + +class SMPLRender(): + def __init__(self, SMPL_MODEL_DIR): + self.device = 'cuda' + self.smpl = SMPL(SMPL_MODEL_DIR, + batch_size=1, + create_transl=False).to(self.device) + + self.focal_length = 5000 + + def render(self, image, smpl_param, is_headroot=False): + pose = smpl_param['pred_pose'] + if pose.size==72: + pose = pose.reshape(-1,3) + pose = RRR.from_rotvec(pose).as_matrix() + pose = pose.reshape(1,24,3,3) + pred_betas = torch.from_numpy(smpl_param['pred_shape'].reshape(1, 10).astype(np.float32)).to(self.device) + pred_rotmat = torch.from_numpy(pose.astype(np.float32)).to(self.device) + pred_camera_t = smpl_param['pred_root'].reshape(1, 3).astype(np.float32) + smpl_output = self.smpl(betas=pred_betas, body_pose=pred_rotmat[:, 1:], + global_orient=pred_rotmat[:, 0].unsqueeze(1), pose2rot=False) + + + vertices = smpl_output.vertices[0].detach().cpu().numpy() + pred_camera_t = pred_camera_t[0] + + if is_headroot: + pred_camera_t = pred_camera_t - smpl_output.joints[0,12].detach().cpu().numpy() + + renderer = Renderer(focal_length=self.focal_length, + img_res=(image.shape[1], image.shape[0]), faces=self.smpl.faces) + + renderImg = renderer(vertices, pred_camera_t.copy(), image / 255.0) + renderer.renderer.delete() + return renderImg diff --git a/mGPT/render/renderer.py b/mGPT/render/renderer.py new file mode 100644 index 0000000..2dd66ab --- /dev/null +++ b/mGPT/render/renderer.py @@ -0,0 +1,179 @@ +""" +This script is borrowed from https://github.com/mkocabas/VIBE + Adhere to their licence to use this script + It has been modified +""" + +import os +import math +import trimesh + +import pyrender +import numpy as np +from pyrender.constants import RenderFlags + + +# os.environ['DISPLAY'] = ':0.0' +# os.environ['PYOPENGL_PLATFORM'] = 'egl' +# os.environ['PYOPENGL_PLATFORM'] = 'osmesa' +SMPL_MODEL_DIR = "data/smpl_data/" + + +def get_smpl_faces(): + return np.load(os.path.join(SMPL_MODEL_DIR, "smplfaces.npy")) + + +class WeakPerspectiveCamera(pyrender.Camera): + def __init__(self, + scale, + translation, + znear=pyrender.camera.DEFAULT_Z_NEAR, + zfar=None, + name=None): + super(WeakPerspectiveCamera, self).__init__( + znear=znear, + zfar=zfar, + name=name, + ) + self.scale = scale + self.translation = translation + + def get_projection_matrix(self, width=None, height=None): + P = np.eye(4) + P[0, 0] = self.scale[0] + P[1, 1] = self.scale[1] + P[0, 3] = self.translation[0] * self.scale[0] + P[1, 3] = -self.translation[1] * self.scale[1] + P[2, 2] = -1 + return P + + +class Renderer: + def __init__(self, background=None, resolution=(224, 224), bg_color=[0, 0, 0, 0.5], orig_img=False, wireframe=False, cam_pose=np.eye(4)): + width, height = resolution + self.background = np.zeros((height, width, 3)) + self.resolution = resolution + + self.faces = get_smpl_faces() + self.orig_img = orig_img + self.wireframe = wireframe + self.renderer = pyrender.OffscreenRenderer( + viewport_width=self.resolution[0], + viewport_height=self.resolution[1], + point_size=0.5 + ) + + # set the scene + self.scene = pyrender.Scene(bg_color=bg_color, ambient_light=(0.4, 0.4, 0.4)) + + light = pyrender.PointLight(color=[1.0, 1.0, 1.0], intensity=4) + + + light_pose = np.eye(4) + light_pose[:3, 3] = [0, -1, 1] + self.scene.add(light, pose=np.dot(cam_pose,light_pose).copy()) + + light_pose[:3, 3] = [0, 1, 1] + self.scene.add(light, pose=np.dot(cam_pose,light_pose).copy()) + + light_pose[:3, 3] = [1, 1, 2] + self.scene.add(light, pose=np.dot(cam_pose,light_pose).copy()) + + """ok + light_pose = np.eye(4) + light_pose[:3, 3] = [0, -1, 1] + self.scene.add(light, pose=light_pose) + + light_pose[:3, 3] = [0, 1, 1] + self.scene.add(light, pose=light_pose) + + light_pose[:3, 3] = [1, 1, 2] + self.scene.add(light, pose=light_pose) + """ + + # light_pose[:3, 3] = [0, -2, 2] + # [droite, hauteur, profondeur camera] + """ + light_pose = np.eye(4) + light_pose[:3, 3] = [0, -1, 1] + self.scene.add(light, pose=light_pose) + + light_pose[:3, 3] = [0, 1, 1] + self.scene.add(light, pose=light_pose) + + light_pose[:3, 3] = [1, 1, 2] + self.scene.add(light, pose=light_pose) + """ + + def render(self, img, verts, cam, angle=None, axis=None, mesh_filename=None, color=[1.0, 1.0, 0.9], + cam_pose=np.eye(4)): + mesh = trimesh.Trimesh(vertices=verts, faces=self.faces, process=False) + Rx = trimesh.transformations.rotation_matrix(math.radians(180), [1, 0, 0]) + # Rx = trimesh.transformations.rotation_matrix(math.radians(-90), [1, 0, 0]) + mesh.apply_transform(Rx) + + if mesh_filename is not None: + mesh.export(mesh_filename) + + if angle and axis: + R = trimesh.transformations.rotation_matrix(math.radians(angle), axis) + mesh.apply_transform(R) + + sx, sy, tx, ty = cam + + camera = WeakPerspectiveCamera( + scale=[sx, sy], + translation=[tx, ty], + zfar=100000. + ) + + material = pyrender.MetallicRoughnessMaterial( + metallicFactor=0.0, # 0.0 for no specular lighting + # metallicFactor=0.7, # 0.0 for no specular lighting + alphaMode='OPAQUE', + baseColorFactor=(color[0], color[1], color[2], 1.0) + ) + + mesh = pyrender.Mesh.from_trimesh(mesh, material=material) + + mesh_node = self.scene.add(mesh, 'mesh') + + cam_node = self.scene.add(camera, pose=cam_pose) + + if self.wireframe: + render_flags = RenderFlags.RGBA | RenderFlags.ALL_WIREFRAME + else: + render_flags = RenderFlags.RGBA + + rgb, _ = self.renderer.render(self.scene, flags=render_flags) + if rgb.shape[-1]==3: + # Debug + # 0 not distinguish alpha + valid_mask = (rgb[:, :, -1] > 0)[:, :, np.newaxis] + output_img = rgb * valid_mask + (1 - valid_mask) * img + elif rgb.shape[-1]==4: + # valid_mask = (rgb[:, :, -1] > 128)[:, :, np.newaxis] + # output_img = rgb[:, :, :-1] * valid_mask + (1 - valid_mask) * img + + # # output alpha + valid_mask = (rgb[:, :, -1] > 128)[:, :] + output_img = np.copy(rgb) + output_img[:, :, -1] *= valid_mask + # output_img = img + else: + raise ValueError(f"rgb shape {rgb.shape[-1]} is not correct!") + image = output_img.astype(np.uint8) + + self.scene.remove_node(mesh_node) + self.scene.remove_node(cam_node) + + return image + + +def get_renderer(width, height, cam_pose): + renderer = Renderer(resolution=(width, height), + bg_color=[1, 1, 1, 0.5], + orig_img=False, + wireframe=False, + cam_pose=cam_pose) + return renderer diff --git a/mGPT/render/rendermotion.py b/mGPT/render/rendermotion.py new file mode 100644 index 0000000..f3d29d9 --- /dev/null +++ b/mGPT/render/rendermotion.py @@ -0,0 +1,134 @@ +import numpy as np +import imageio +import os +import argparse +from tqdm import tqdm +from .renderer import get_renderer + + +def get_rotation(theta=np.pi / 3): + import mGPT.utils.rotation_conversions as geometry + import torch + axis = torch.tensor([0, 1, 0], dtype=torch.float) + axisangle = theta * axis + matrix = geometry.axis_angle_to_matrix(axisangle) + return matrix.numpy() + + +def render_video(meshes, + key, + action, + renderer, + savepath, + backgrounds, + cam_pose, + cams=(0.75, 0.75, 0, 0.10), + color=[0.11, 0.53, 0.8]): + # cams=(0.75, 0.75, 0, 0.10), color=[165.0/255,112/255,140/255]): + # center the first frame + if key not in ["real", "ntf", "side"]: + w = int(key) / 6.0 + # purpole to green + # color = w*np.array([0.9,102/255,120/255]) + (1-w)*np.array([0.11, 0.9, 0.11]) + # color = (1-w)*np.array([165.0/255,112/255,140/255]) + w*np.array([0.11, 0.8, 0.11]) + color = (1 - w) * np.array([0.75, 0.13, 0.7]) + w * np.array( + [0.12, 0.7, 0.14]) + + meshes = meshes - meshes[0].mean(axis=0) + imgs = [] + idx = 0 + # for mesh in meshes: + for mesh in tqdm(meshes, desc=f"Visualize {key}, action {action}"): + # file_name = '3dpw_rot-90_glob_trimesh.ply' mesh_filename=file_name, + # prepare background + if len(backgrounds.shape) == 3: + background = backgrounds + cam = cams + elif len(backgrounds.shape) == 4: + background = backgrounds[idx] + cam = cams[idx] + idx += 1 + # prepare cams + img = renderer.render(background, + mesh, + cam, + color=color, + cam_pose=cam_pose) + imgs.append(img) + # show(img) + + imgs = np.array(imgs) + # masks = ~(imgs/255. > 0.96).all(-1) + # coords = np.argwhere(masks.sum(axis=0)) + # y1, x1 = coords.min(axis=0) + # y2, x2 = coords.max(axis=0) + # writer = imageio.get_writer(savepath, fps=30) + # for cimg in imgs[:, y1:y2, x1:x2]: + # writer.append_data(cimg) + # writer.close() + + # from mld.utils.uicap_utils import write_rgba_seqs + # write_rgba_seqs(imgs, savepath) + + writer = imageio.get_writer(savepath, fps=30) + for cimg in imgs: + writer.append_data(cimg) + writer.close() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("filename") + opt = parser.parse_args() + filename = opt.filename + savefolder = os.path.splitext(filename)[0] + os.makedirs(savefolder, exist_ok=True) + + output = np.load(filename) + + if output.shape[0] == 3: + visualization, generation, reconstruction = output + output = { + "visualization": visualization, + "generation": generation, + "reconstruction": reconstruction + } + else: + # output = {f"generation_{key}": output[key] for key in range(2)} # len(output))} + # output = {f"generation_{key}": output[key] for key in range(len(output))} + output = { + f"generation_{key}": output[key] + for key in range(len(output)) + } + + width = 1024 + height = 1024 + + background = np.zeros((height, width, 3)) + renderer = get_renderer(width, height) + + # if duration mode, put back durations + if output["generation_3"].shape[-1] == 100: + output["generation_0"] = output["generation_0"][:, :, :, :40] + output["generation_1"] = output["generation_1"][:, :, :, :60] + output["generation_2"] = output["generation_2"][:, :, :, :80] + output["generation_3"] = output["generation_3"][:, :, :, :100] + elif output["generation_3"].shape[-1] == 160: + print("160 mode") + output["generation_0"] = output["generation_0"][:, :, :, :100] + output["generation_1"] = output["generation_1"][:, :, :, :120] + output["generation_2"] = output["generation_2"][:, :, :, :140] + output["generation_3"] = output["generation_3"][:, :, :, :160] + + # if str(action) == str(1) and str(key) == "generation_4": + for key in output: + vidmeshes = output[key] + for action in range(len(vidmeshes)): + meshes = vidmeshes[action].transpose(2, 0, 1) + path = os.path.join(savefolder, + "action{}_{}.mp4".format(action, key)) + render_video(meshes, key, action, renderer, path, background) + + +if __name__ == "__main__": + main() diff --git a/mGPT/render/video.py b/mGPT/render/video.py new file mode 100644 index 0000000..d0d4eeb --- /dev/null +++ b/mGPT/render/video.py @@ -0,0 +1,67 @@ +import moviepy.editor as mp +import moviepy.video.fx.all as vfx +import os +import imageio + + +def mask_png(frames): + for frame in frames: + im = imageio.imread(frame) + im[im[:, :, 3] < 1, :] = 255 + imageio.imwrite(frame, im[:, :, 0:3]) + return + + +class Video: + def __init__(self, frame_path: str, fps: float = 12.5, res="high"): + frame_path = str(frame_path) + self.fps = fps + + self._conf = {"codec": "libx264", + "fps": self.fps, + "audio_codec": "aac", + "temp_audiofile": "temp-audio.m4a", + "remove_temp": True} + + if res == "low": + bitrate = "500k" + else: + bitrate = "5000k" + + self._conf = {"bitrate": bitrate, + "fps": self.fps} + + # Load video + # video = mp.VideoFileClip(video1_path, audio=False) + # Load with frames + frames = [os.path.join(frame_path, x) + for x in sorted(os.listdir(frame_path))] + + # mask background white for videos + mask_png(frames) + + video = mp.ImageSequenceClip(frames, fps=fps) + self.video = video + self.duration = video.duration + + def add_text(self, text): + # needs ImageMagick + video_text = mp.TextClip(text, + font='Amiri', + color='white', + method='caption', + align="center", + size=(self.video.w, None), + fontsize=30) + video_text = video_text.on_color(size=(self.video.w, video_text.h + 5), + color=(0, 0, 0), + col_opacity=0.6) + # video_text = video_text.set_pos('bottom') + video_text = video_text.set_pos('top') + + self.video = mp.CompositeVideoClip([self.video, video_text]) + + def save(self, out_path): + out_path = str(out_path) + self.video.subclip(0, self.duration).write_videofile( + out_path, **self._conf) diff --git a/mGPT/render/visualize.py b/mGPT/render/visualize.py new file mode 100644 index 0000000..7cc9c6c --- /dev/null +++ b/mGPT/render/visualize.py @@ -0,0 +1,747 @@ +from operator import mod +import os +# from cv2 import CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE +import imageio +import shutil +import numpy as np +import torch +from tqdm import tqdm + +from scipy.spatial.transform import Rotation as R +from mGPT.render.renderer import get_renderer +from mGPT.render.rendermotion import render_video +# from mld.utils.img_utils import convert_img +# from mld.utils.uicap_utils import output_pkl + + +def parsename(path): + basebane = os.path.basename(path) + base = os.path.splitext(basebane)[0] + strs = base.split('_') + key = strs[-2] + action = strs[-1] + return key, action + + +def load_anim(path, timesize=None): + data = np.array(imageio.mimread(path, memtest=False)) #[..., :3] + if timesize is None: + return data + + # take the last frame and put shadow repeat the last frame but with a little shadow + # lastframe = add_shadow(data[-1]) + # alldata = np.tile(lastframe, (timesize, 1, 1, 1)) + alldata = data + + # debug fix mat dim + if len(data.shape) == 3 and len(alldata.shape) == 4: + data = data[:, None, :, :] + + # copy the first frames + lenanim = data.shape[0] + alldata[:lenanim] = data[:lenanim] + return alldata + + +def plot_3d_motion_dico(x): + motion, length, save_path, params, kargs = x + plot_3d_motion(motion, length, save_path, params, **kargs) + + +def plot_3d_motion(motion, + length, + save_path, + params, + title="", + interval=50, + pred_cam=None, + imgs=None, + bbox=None, + side=None): + # render smpl + # [nframes, nVs, 3] + if motion.shape[1] == 6890: + # width = 250 + # height = 250 + width = 600 + height = 600 + if pred_cam is None: + # cam=(0.75, 0.75, 0, 0.1) + cam = (0.8, 0.8, 0, 0.1) + # cam=(0.9, 0.9, 0, 0.1) + else: + assert bbox is not None + assert imgs is not None + + # Tmp visulize + # weak perspective camera parameters in cropped image space (s,tx,ty) + # to + # weak perspective camera parameters in original image space (sx,sy,tx,ty) + cam = np.concatenate( + (pred_cam[:, [0]], pred_cam[:, [0]], pred_cam[:, 1:3]), axis=1) + + # ToDo convert to original cam + # load original img? + # calculate cam after padding??? + # + # cam = convert_crop_cam_to_orig_img( + # cam=pred_cam, + # bbox=bbox, + # img_width=width, + # img_height=height + # ) + cam_pose = np.eye(4) + cam_pose[0:3, 0:3] = R.from_euler('x', -90, degrees=True).as_matrix() + cam_pose[0:3, 3] = [0, 0, 0] + if side: + rz = np.eye(4) + rz[0:3, 0:3] = R.from_euler('z', -90, degrees=True).as_matrix() + cam_pose = np.matmul(rz, cam_pose) + + # # reshape input imgs + # if imgs is not None: + # imgs = convert_img(imgs.unsqueeze(0), height)[:,0] + backgrounds = imgs if imgs is not None else np.ones( + (height, width, 3)) * 255 + renderer = get_renderer(width, height, cam_pose) + + # [nframes, nVs, 3] + meshes = motion + key, action = parsename(save_path) + render_video(meshes, + key, + action, + renderer, + save_path, + backgrounds, + cam_pose, + cams=cam) + return + + +def stack_images(real, real_gens, gen, real_imgs=None): + # change to 3 channel + # print(real.shape) + # print(real_gens.shape) + # print(real_gens.shape) + # real = real[:3] + # real_gens = real_gens[:3] + # gen = gen[:3] + + nleft_cols = len(real_gens) + 1 + print("Stacking frames..") + allframes = np.concatenate( + (real[:, None, ...], *[x[:, None, ...] for x in real_gens], gen), 1) + nframes, nspa, nats, h, w, pix = allframes.shape + + blackborder = np.zeros((w // 30, h * nats, pix), dtype=allframes.dtype) + # blackborder = np.ones((w//30, h*nats, pix), dtype=allframes.dtype)*255 + frames = [] + for frame_idx in tqdm(range(nframes)): + columns = np.vstack(allframes[frame_idx].transpose(1, 2, 3, 4, + 0)).transpose( + 3, 1, 0, 2) + frame = np.concatenate( + (*columns[0:nleft_cols], blackborder, *columns[nleft_cols:]), + 0).transpose(1, 0, 2) + + frames.append(frame) + + if real_imgs is not None: + resize_imgs = convert_img(real_imgs, h)[:nframes, ...] + + for i in range(len(frames)): + imgs = np.vstack(resize_imgs[i, ...]) + imgs4 = np.ones( + (imgs.shape[0], imgs.shape[1], 4), dtype=np.uint8) * 255 + imgs4[:, :, :3] = imgs + #imgs = torch2numpy(imgs) + frames[i] = np.concatenate((imgs4, frames[i]), 1) + return np.stack(frames) + + +def stack_images_gen(gen, real_imgs=None): + print("Stacking frames..") + allframes = gen + nframes, nspa, nats, h, w, pix = allframes.shape + blackborder = np.zeros((w * nspa, h // 30, pix), dtype=allframes.dtype) + blackborder = blackborder[None, ...].repeat(nats, + axis=0).transpose(0, 2, 1, 3) + + frames = [] + for frame_idx in tqdm(range(nframes)): + rows = np.vstack(allframes[frame_idx].transpose(0, 3, 2, 4, + 1)).transpose( + 3, 1, 0, 2) + rows = np.concatenate((rows, blackborder), 1) + frame = np.concatenate(rows, 0) + frames.append(frame) + + if real_imgs is not None: + # ToDo Add images + resize_imgs = convert_img(real_imgs, h)[:nframes, ...] + for i in range(len(frames)): + imgs = np.vstack(resize_imgs[i, ...]) + #imgs = torch2numpy(imgs) + frames[i] = np.concatenate((imgs, frames[i]), 1) + return np.stack(frames) + + +def generate_by_video(visualization, reconstructions, generation, + label_to_action_name, params, nats, nspa, tmp_path): + # shape : (17, 3, 4, 480, 640, 3) + # (nframes, row, column, h, w, 3) + fps = params["fps"] + + params = params.copy() + + gen_only = False + if visualization is None: + gen_only = True + outputkey = "output_vertices" + params["pose_rep"] = "vertices" + elif "output_vertices" in visualization: + outputkey = "output_vertices" + params["pose_rep"] = "vertices" + elif "output_xyz" in visualization: + outputkey = "output_xyz" + params["pose_rep"] = "xyz" + else: + outputkey = "poses" + + keep = [outputkey, 'lengths', "y"] + gener = {key: generation[key].data.cpu().numpy() for key in keep} + if not gen_only: + visu = {key: visualization[key].data.cpu().numpy() for key in keep} + recons = {} + # visualize regressor results + if 'vertices_hat' in reconstructions['ntf']: + recons['regressor'] = { + 'output_vertices': + reconstructions['ntf']['vertices_hat'].data.cpu().numpy(), + 'lengths': + reconstructions['ntf']['lengths'].data.cpu().numpy(), + 'y': + reconstructions['ntf']['y'].data.cpu().numpy() + } + + recons['regressor_side'] = { + 'output_vertices': + reconstructions['ntf']['vertices_hat'].data.cpu().numpy(), + 'lengths': + reconstructions['ntf']['lengths'].data.cpu().numpy(), + 'y': + reconstructions['ntf']['y'].data.cpu().numpy(), + 'side': + True + } + # ToDo rendering overlap results + # recons['overlap'] = {'output_vertices':reconstructions['ntf']['vertices_hat'].data.cpu().numpy(), + # 'lengths':reconstructions['ntf']['lengths'].data.cpu().numpy(), + # 'y':reconstructions['ntf']['y'].data.cpu().numpy(), + # 'imgs':reconstructions['ntf']['imgs'], + # 'bbox':reconstructions['ntf']['bbox'].data.cpu().numpy(), + # 'cam':reconstructions['ntf']['preds'][0]['cam'].data.cpu().numpy()} + for mode, reconstruction in reconstructions.items(): + recons[mode] = { + key: reconstruction[key].data.cpu().numpy() + for key in keep + } + recons[mode + '_side'] = { + key: reconstruction[key].data.cpu().numpy() + for key in keep + } + recons[mode + '_side']['side'] = True + + # lenmax = max(gener['lengths'].max(), visu['lengths'].max()) + # timesize = lenmax + 5 longer visulization + lenmax = gener['lengths'].max() + timesize = lenmax + + import multiprocessing + + def pool_job_with_desc(pool, iterator, desc, max_, save_path_format, isij): + with tqdm(total=max_, desc=desc.format("Render")) as pbar: + for data in iterator: + plot_3d_motion_dico(data) + # for _ in pool.imap_unordered(plot_3d_motion_dico, iterator): + # pbar.update() + if isij: + array = np.stack([[ + load_anim(save_path_format.format(i, j), timesize) + for j in range(nats) + ] for i in tqdm(range(nspa), desc=desc.format("Load"))]) + return array.transpose(2, 0, 1, 3, 4, 5) + else: + array = np.stack([ + load_anim(save_path_format.format(i), timesize) + for i in tqdm(range(nats), desc=desc.format("Load")) + ]) + return array.transpose(1, 0, 2, 3, 4) + + pool = None + # if True: + with multiprocessing.Pool() as pool: + # Generated samples + save_path_format = os.path.join(tmp_path, "gen_{}_{}.gif") + iterator = ((gener[outputkey][i, j], gener['lengths'][i, j], + save_path_format.format(i, j), params, { + "title": + f"gen: {label_to_action_name(gener['y'][i, j])}", + "interval": 1000 / fps + }) for j in range(nats) for i in range(nspa)) + gener["frames"] = pool_job_with_desc(pool, iterator, + "{} the generated samples", + nats * nspa, save_path_format, + True) + if not gen_only: + # Real samples + save_path_format = os.path.join(tmp_path, "real_{}.gif") + iterator = ((visu[outputkey][i], visu['lengths'][i], + save_path_format.format(i), params, { + "title": + f"real: {label_to_action_name(visu['y'][i])}", + "interval": 1000 / fps + }) for i in range(nats)) + visu["frames"] = pool_job_with_desc(pool, iterator, + "{} the real samples", nats, + save_path_format, False) + for mode, recon in recons.items(): + # Reconstructed samples + save_path_format = os.path.join( + tmp_path, f"reconstructed_{mode}_" + "{}.gif") + if mode == 'overlap': + iterator = (( + recon[outputkey][i], recon['lengths'][i], + save_path_format.format(i), params, { + "title": + f"recons: {label_to_action_name(recon['y'][i])}", + "interval": 1000 / fps, + "pred_cam": recon['cam'][i], + "imgs": recon['imgs'][i], + "bbox": recon['bbox'][i] + }) for i in range(nats)) + else: + side = True if 'side' in recon.keys() else False + iterator = (( + recon[outputkey][i], recon['lengths'][i], + save_path_format.format(i), params, { + "title": + f"recons: {label_to_action_name(recon['y'][i])}", + "interval": 1000 / fps, + "side": side + }) for i in range(nats)) + recon["frames"] = pool_job_with_desc( + pool, iterator, "{} the reconstructed samples", nats, + save_path_format, False) + # vis img in visu + if not gen_only: + input_imgs = visualization["imgs"] if visualization[ + "imgs"] is not None else None + vis = visu["frames"] if not gen_only else None + rec = [recon["frames"] + for recon in recons.values()] if not gen_only else None + gen = gener["frames"] + frames = stack_images(vis, rec, gen, input_imgs) + else: + gen = gener["frames"] + frames = stack_images_gen(gen) + return frames + + +def viz_epoch(model, + dataset, + epoch, + params, + folder, + module=None, + writer=None, + exps=''): + """ Generate & viz samples """ + module = model if module is None else module + + # visualize with joints3D + model.outputxyz = True + + print(f"Visualization of the epoch {epoch}") + + noise_same_action = params["noise_same_action"] + noise_diff_action = params["noise_diff_action"] + duration_mode = params["duration_mode"] + reconstruction_mode = params["reconstruction_mode"] + decoder_test = params["decoder_test"] + + fact = params["fact_latent"] + figname = params["figname"].format(epoch) + + nspa = params["num_samples_per_action"] + nats = params["num_actions_to_sample"] + + num_classes = params["num_classes"] + # nats = min(num_classes, nats) + + # define some classes + classes = torch.randperm(num_classes)[:nats] + # duplicate same classes when sampling too much + if nats > num_classes: + classes = classes.expand(nats) + + meandurations = torch.from_numpy( + np.array([ + round(dataset.get_mean_length_label(cl.item())) for cl in classes + ])) + + if duration_mode == "interpolate" or decoder_test == "diffduration": + points, step = np.linspace(-nspa, nspa, nspa, retstep=True) + # points = np.round(10*points/step).astype(int) + points = np.array([5, 10, 16, 30, 60, 80]).astype(int) + # gendurations = meandurations.repeat((nspa, 1)) + points[:, None] + gendurations = torch.from_numpy(points[:, None]).expand( + (nspa, 1)).repeat((1, nats)) + else: + gendurations = meandurations.repeat((nspa, 1)) + print("Duration time: ") + print(gendurations[:, 0]) + + # extract the real samples + # real_samples, real_theta, mask_real, real_lengths, imgs, paths + batch = dataset.get_label_sample_batch(classes.numpy()) + + # ToDo + # clean these data + # Visualizaion of real samples + visualization = { + "x": batch['x'].to(model.device), + "y": classes.to(model.device), + "mask": batch['mask'].to(model.device), + 'lengths': batch['lengths'].to(model.device), + "output": batch['x'].to(model.device), + "theta": + batch['theta'].to(model.device) if 'theta' in batch.keys() else None, + "imgs": + batch['imgs'].to(model.device) if 'imgs' in batch.keys() else None, + "paths": batch['paths'] if 'paths' in batch.keys() else None, + } + + # Visualizaion of real samples + if reconstruction_mode == "both": + reconstructions = { + "tf": { + "x": + batch['x'].to(model.device), + "y": + classes.to(model.device), + 'lengths': + batch['lengths'].to(model.device), + "mask": + batch['mask'].to(model.device), + "teacher_force": + True, + "theta": + batch['theta'].to(model.device) + if 'theta' in batch.keys() else None + }, + "ntf": { + "x": + batch['x'].to(model.device), + "y": + classes.to(model.device), + 'lengths': + batch['lengths'].to(model.device), + "mask": + batch['mask'].to(model.device), + "theta": + batch['theta'].to(model.device) + if 'theta' in batch.keys() else None + } + } + else: + reconstructions = { + reconstruction_mode: { + "x": + batch['x'].to(model.device), + "y": + classes.to(model.device), + 'lengths': + batch['lengths'].to(model.device), + "mask": + batch['mask'].to(model.device), + "teacher_force": + reconstruction_mode == "tf", + "imgs": + batch['imgs'].to(model.device) + if 'imgs' in batch.keys() else None, + "theta": + batch['theta'].to(model.device) + if 'theta' in batch.keys() else None, + "bbox": + batch['bbox'] if 'bbox' in batch.keys() else None + } + } + print("Computing the samples poses..") + + # generate the repr (joints3D/pose etc) + model.eval() + with torch.no_grad(): + # Reconstruction of the real data + for mode in reconstructions: + # update reconstruction dicts + reconstructions[mode] = model(reconstructions[mode]) + reconstruction = reconstructions[list(reconstructions.keys())[0]] + + if decoder_test == "gt": + # Generate the new data + gt_input = { + "x": batch['x'].repeat(nspa, 1, 1, 1).to(model.device), + "y": classes.repeat(nspa).to(model.device), + "mask": batch['mask'].repeat(nspa, 1).to(model.device), + 'lengths': batch['lengths'].repeat(nspa).to(model.device) + } + generation = model(gt_input) + if decoder_test == "new": + # Generate the new data + generation = module.generate(gendurations, + classes=classes, + nspa=nspa, + noise_same_action=noise_same_action, + noise_diff_action=noise_diff_action, + fact=fact) + elif decoder_test == "diffaction": + assert nats == nspa + # keep the same noise for each "sample" + z = reconstruction["z"].repeat((nspa, 1)) + mask = reconstruction["mask"].repeat((nspa, 1)) + lengths = reconstruction['lengths'].repeat(nspa) + # but use other labels + y = classes.repeat_interleave(nspa).to(model.device) + generation = {"z": z, "y": y, "mask": mask, 'lengths': lengths} + model.decoder(generation) + + elif decoder_test == "diffduration": + z = reconstruction["z"].repeat((nspa, 1)) + lengths = gendurations.reshape(-1).to(model.device) + mask = model.lengths_to_mask(lengths) + y = classes.repeat(nspa).to(model.device) + generation = {"z": z, "y": y, "mask": mask, 'lengths': lengths} + model.decoder(generation) + + elif decoder_test == "interpolate_action": + assert nats == nspa + # same noise for each sample + z_diff_action = torch.randn(1, + model.latent_dim, + device=model.device).repeat(nats, 1) + z = z_diff_action.repeat((nspa, 1)) + + # but use combination of labels and labels below + y = F.one_hot(classes.to(model.device), + model.num_classes).to(model.device) + y_below = F.one_hot(torch.cat((classes[1:], classes[0:1])), + model.num_classes).to(model.device) + convex_factors = torch.linspace(0, 1, nspa, device=model.device) + y_mixed = torch.einsum("nk,m->mnk", y, 1-convex_factors) + \ + torch.einsum("nk,m->mnk", y_below, convex_factors) + y_mixed = y_mixed.reshape(nspa * nats, y_mixed.shape[-1]) + + durations = gendurations[0].to(model.device) + durations_below = torch.cat((durations[1:], durations[0:1])) + + gendurations = torch.einsum("l,k->kl", durations, 1-convex_factors) + \ + torch.einsum("l,k->kl", durations_below, convex_factors) + gendurations = gendurations.to(dtype=durations.dtype) + + lengths = gendurations.to(model.device).reshape(z.shape[0]) + mask = model.lengths_to_mask(lengths) + + generation = { + "z": z, + "y": y_mixed, + "mask": mask, + 'lengths': lengths + } + generation = model.decoder(generation) + + visualization = module.prepare(visualization) + visualization["output_xyz"] = visualization["x_xyz"] + visualization["output_vertices"] = visualization["x_vertices"] + # Get xyz for the real ones + # visualization["output_xyz"] = module.rot2xyz(visualization["output"], visualization["mask"], jointstype="smpl") + # # Get smpl vertices for the real ones + # if module.cvae.pose_rep != "xyz": + # visualization["output_vertices"] = module.rot2xyz(visualization["output"], visualization["mask"], jointstype="vertices") + + for key, val in generation.items(): + if len(generation[key].shape) == 1: + generation[key] = val.reshape(nspa, nats) + else: + generation[key] = val.reshape(nspa, nats, *val.shape[1:]) + + finalpath = os.path.join(folder, figname + exps + ".gif") + tmp_path = os.path.join(folder, f"subfigures_{figname}") + os.makedirs(tmp_path, exist_ok=True) + + print("Generate the videos..") + frames = generate_by_video(visualization, reconstructions, generation, + dataset.label_to_action_name, params, nats, + nspa, tmp_path) + + print(f"Writing video {finalpath}") + imageio.mimsave(finalpath.replace('gif', 'mp4'), frames, fps=params["fps"]) + shutil.rmtree(tmp_path) + + # output npy + output = { + "data_id": batch['id'], + "paths": batch['paths'], + "x": batch['x'].cpu().numpy(), + "x_vertices": visualization["x_vertices"].cpu().numpy(), + "output_vertices": + reconstructions['ntf']["output_vertices"].cpu().numpy(), + "gen_vertices": generation["output_vertices"].cpu().numpy() + } + + outputpath = finalpath.replace('gif', 'npy') + np.save(outputpath, output) + + # output pkl + batch_recon = reconstructions["ntf"] + outputpath = finalpath.replace('gif', 'pkl') + # output_pkl([batch_recon], outputpath) + + if writer is not None: + writer.add_video(f"Video/Epoch {epoch}", + frames.transpose(0, 3, 1, 2)[None], + epoch, + fps=params["fps"]) + return finalpath + + +def viz_dataset(dataset, params, folder): + """ Generate & viz samples """ + print("Visualization of the dataset") + + nspa = params["num_samples_per_action"] + nats = params["num_actions_to_sample"] + + num_classes = params["num_classes"] + + figname = "{}_{}_numframes_{}_sampling_{}_step_{}".format( + params["dataset"], params["pose_rep"], params["num_frames"], + params["sampling"], params["sampling_step"]) + + # define some classes + classes = torch.randperm(num_classes)[:nats] + + allclasses = classes.repeat(nspa, 1).reshape(nspa * nats) + # extract the real samples + real_samples, mask_real, real_lengths = dataset.get_label_sample_batch( + allclasses.numpy()) + # to visualize directly + + # Visualizaion of real samples + visualization = { + "x": real_samples, + "y": allclasses, + "mask": mask_real, + 'lengths': real_lengths, + "output": real_samples + } + + from mGPT.models.rotation2xyz import Rotation2xyz + + device = params["device"] + rot2xyz = Rotation2xyz(device=device) + + rot2xyz_params = { + "pose_rep": params["pose_rep"], + "glob_rot": params["glob_rot"], + "glob": params["glob"], + "jointstype": params["jointstype"], + "translation": params["translation"] + } + + output = visualization["output"] + visualization["output_xyz"] = rot2xyz(output.to(device), + visualization["mask"].to(device), + **rot2xyz_params) + + for key, val in visualization.items(): + if len(visualization[key].shape) == 1: + visualization[key] = val.reshape(nspa, nats) + else: + visualization[key] = val.reshape(nspa, nats, *val.shape[1:]) + + finalpath = os.path.join(folder, figname + ".gif") + tmp_path = os.path.join(folder, f"subfigures_{figname}") + os.makedirs(tmp_path, exist_ok=True) + + print("Generate the videos..") + frames = generate_by_video_sequences(visualization, + dataset.label_to_action_name, params, + nats, nspa, tmp_path) + + print(f"Writing video {finalpath}..") + imageio.mimsave(finalpath, frames, fps=params["fps"]) + + +def generate_by_video_sequences(visualization, label_to_action_name, params, + nats, nspa, tmp_path): + # shape : (17, 3, 4, 480, 640, 3) + # (nframes, row, column, h, w, 3) + fps = params["fps"] + if "output_vetices" in visualization: + outputkey = "output_vetices" + params["pose_rep"] = "vertices" + elif "output_xyz" in visualization: + outputkey = "output_xyz" + params["pose_rep"] = "xyz" + else: + outputkey = "poses" + + keep = [outputkey, 'lengths', "y"] + visu = {key: visualization[key].data.cpu().numpy() for key in keep} + lenmax = visu['lengths'].max() + + timesize = lenmax + 5 + + # import multiprocessing + + def pool_job_with_desc(pool, iterator, desc, max_, save_path_format): + for data in iterator: + plot_3d_motion_dico(data) + # with tqdm(total=max_, desc=desc.format("Render")) as pbar: + # for _ in pool.imap_unordered(plot_3d_motion_dico, iterator): + # pbar.update() + array = np.stack([[ + load_anim(save_path_format.format(i, j), timesize) + for j in range(nats) + ] for i in tqdm(range(nspa), desc=desc.format("Load"))]) + return array.transpose(2, 0, 1, 3, 4, 5) + + pool = None + # with multiprocessing.Pool() as pool: + # Real samples + save_path_format = os.path.join(tmp_path, "real_{}_{}.gif") + iterator = ((visu[outputkey][i, j], visu['lengths'][i, j], + save_path_format.format(i, j), params, { + "title": f"real: {label_to_action_name(visu['y'][i, j])}", + "interval": 1000 / fps + }) for j in range(nats) for i in range(nspa)) + visu["frames"] = pool_job_with_desc(pool, iterator, "{} the real samples", + nats, save_path_format) + frames = stack_images_sequence(visu["frames"]) + return frames + + +def stack_images_sequence(visu): + print("Stacking frames..") + allframes = visu + nframes, nspa, nats, h, w, pix = allframes.shape + frames = [] + for frame_idx in tqdm(range(nframes)): + columns = np.vstack(allframes[frame_idx].transpose(1, 2, 3, 4, + 0)).transpose( + 3, 1, 0, 2) + frame = np.concatenate(columns).transpose(1, 0, 2) + frames.append(frame) + return np.stack(frames) diff --git a/mGPT/utils/__init__.py b/mGPT/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mGPT/utils/demo_utils.py b/mGPT/utils/demo_utils.py new file mode 100644 index 0000000..1a62993 --- /dev/null +++ b/mGPT/utils/demo_utils.py @@ -0,0 +1,79 @@ +import os +from pathlib import Path + + +# load example data +def load_example_input(txt_path): + file = open(txt_path, "r") + Lines = file.readlines() + count = 0 + texts, lens = [], [] + # Strips the newline character + for line in Lines: + count += 1 + s = line.strip() + s_l = s.split(" ")[0] + s_t = s[(len(s_l) + 1):] + lens.append(int(s_l)) + texts.append(s_t) + print("Length-{}: {}".format(s_l, s_t)) + return texts, lens + + +# render batch +def render_batch(npy_dir, execute_python="./scripts/visualize_motion.sh", mode="sequence"): + os.system(f"{execute_python} {npy_dir} {mode}") + + +# render +def render(execute_python, npy_path, jointtype, cfg_path): + # execute_python = "/apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender" + # execute_python = "/apdcephfs/share_1227775/mingzhenzhu/jiangbiao/libs/blender-2.93.2-linux-x64/blender" + export_scripts = "render.py" + + os.system( + f"{execute_python} --background --python {export_scripts} -- --cfg={cfg_path} --npy={npy_path} --joint_type={jointtype}" + ) + + fig_path = Path(str(npy_path).replace(".npy", ".png")) + return fig_path + + +# origin render +# def render(npy_path, jointtype): +# execute_python = '/apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender' +# export_scripts = 'render.py' + +# os.system(f"{execute_python} --background --python {export_scripts} -- npy={npy_path} jointstype={jointtype}") + +# fig_path = Path(str(npy_path).replace(".npy",".png")) +# return fig_path + +# export fbx with hand params from pkl files +# refer to /apdcephfs/share_1227775/shingxchen/AIMotion/TMOST/scripts/fbx_output_smplx.py +def export_fbx_hand(pkl_path): + input = pkl_path + output = pkl_path.replace(".pkl", ".fbx") + + execute_python = "/apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender" + export_scripts = "./scripts/fbx_output_smplx.py" + os.system( + f"{execute_python} -noaudio --background --python {export_scripts}\ + --input {input} \ + --output {output}" + ) + + +# export fbx without hand params from pkl files +# refer to /apdcephfs/share_1227775/shingxchen/AIMotion/TMOST/scripts/fbx_output.py +def export_fbx(pkl_path): + input = pkl_path + output = pkl_path.replace(".pkl", ".fbx") + + execute_python = "/apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender" + export_scripts = "./scripts/fbx_output.py" + os.system( + f"{execute_python} -noaudio --background --python {export_scripts}\ + --input {input} \ + --output {output}" + ) diff --git a/mGPT/utils/easyconvert.py b/mGPT/utils/easyconvert.py new file mode 100644 index 0000000..2480737 --- /dev/null +++ b/mGPT/utils/easyconvert.py @@ -0,0 +1,84 @@ +from .geometry_tools import * + + +def rep_to_rep(oldtype, newtype, rotations): + if newtype in ["matrix"]: + return to_matrix(oldtype, rotations) + + if oldtype in ["rotvec", "axisangle"]: + return axis_angle_to(newtype, rotations) + elif oldtype in ["matrix"]: + return matrix_to(newtype, rotations) + else: + raise NotImplementedError("Only rotvec and matrix are supported.") + +def nfeats_of(rottype): + if rottype in ["rotvec", "axisangle"]: + return 3 + elif rottype in ["rotquat", "quaternion"]: + return 4 + elif rottype in ["rot6d", "6drot", "rotation6d"]: + return 6 + elif rottype in ["rotmat"]: + return 9 + else: + return TypeError("This rotation type doesn't have features.") + + +def axis_angle_to(newtype, rotations): + if newtype in ["matrix"]: + rotations = axis_angle_to_matrix(rotations) + return rotations + elif newtype in ["rotmat"]: + rotations = axis_angle_to_matrix(rotations) + rotations = matrix_to("rotmat", rotations) + return rotations + elif newtype in ["rot6d", "6drot", "rotation6d"]: + rotations = axis_angle_to_matrix(rotations) + rotations = matrix_to("rot6d", rotations) + return rotations + elif newtype in ["rotquat", "quaternion"]: + rotations = axis_angle_to_quaternion(rotations) + return rotations + elif newtype in ["rotvec", "axisangle"]: + return rotations + else: + raise NotImplementedError + + +def matrix_to(newtype, rotations): + if newtype in ["matrix"]: + return rotations + if newtype in ["rotmat"]: + rotations = rotations.reshape((*rotations.shape[:-2], 9)) + return rotations + elif newtype in ["rot6d", "6drot", "rotation6d"]: + rotations = matrix_to_rotation_6d(rotations) + return rotations + elif newtype in ["rotquat", "quaternion"]: + rotations = matrix_to_quaternion(rotations) + return rotations + elif newtype in ["rotvec", "axisangle"]: + rotations = matrix_to_axis_angle(rotations) + return rotations + else: + raise NotImplementedError + + +def to_matrix(oldtype, rotations): + if oldtype in ["matrix"]: + return rotations + if oldtype in ["rotmat"]: + rotations = rotations.reshape((*rotations.shape[:-2], 3, 3)) + return rotations + elif oldtype in ["rot6d", "6drot", "rotation6d"]: + rotations = rotation_6d_to_matrix(rotations) + return rotations + elif oldtype in ["rotquat", "quaternion"]: + rotations = quaternion_to_matrix(rotations) + return rotations + elif oldtype in ["rotvec", "axisangle"]: + rotations = axis_angle_to_matrix(rotations) + return rotations + else: + raise NotImplementedError diff --git a/mGPT/utils/fixseed.py b/mGPT/utils/fixseed.py new file mode 100644 index 0000000..a43a273 --- /dev/null +++ b/mGPT/utils/fixseed.py @@ -0,0 +1,18 @@ +import numpy as np +import torch +import random + + +def fixseed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + + +SEED = 10 +EVALSEED = 0 +# Provoc warning: not fully functionnal yet +# torch.set_deterministic(True) +torch.backends.cudnn.benchmark = False + +fixseed(SEED) diff --git a/mGPT/utils/geometry_conver.py b/mGPT/utils/geometry_conver.py new file mode 100644 index 0000000..5c5c844 --- /dev/null +++ b/mGPT/utils/geometry_conver.py @@ -0,0 +1,550 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import torch +import numpy as np +from torch.nn import functional as F + + +def axis_angle_to_quaternion(axis_angle): + """ + Convert rotations given as axis/angle to quaternions. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True) + half_angles = 0.5 * angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles]) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48) + quaternions = torch.cat( + [torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], + dim=-1) + return quaternions + + +def quaternion_to_matrix(quaternions): + """ + Convert rotations given as quaternions to rotation matrices. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + + +def axis_angle_to_matrix(axis_angle): + """ + Convert rotations given as axis/angle to rotation matrices. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle)) + + +def matrix_of_angles(cos, sin, inv=False, dim=2): + assert dim in [2, 3] + sin = -sin if inv else sin + if dim == 2: + row1 = torch.stack((cos, -sin), axis=-1) + row2 = torch.stack((sin, cos), axis=-1) + return torch.stack((row1, row2), axis=-2) + elif dim == 3: + row1 = torch.stack((cos, -sin, 0 * cos), axis=-1) + row2 = torch.stack((sin, cos, 0 * cos), axis=-1) + row3 = torch.stack((0 * sin, 0 * cos, 1 + 0 * cos), axis=-1) + return torch.stack((row1, row2, row3), axis=-2) + + +def matrot2axisangle(matrots): + # This function is borrowed from https://github.com/davrempe/humor/utils/transforms.py + # axisang N x 3 + ''' + :param matrots: N*num_joints*9 + :return: N*num_joints*3 + ''' + import cv2 + batch_size = matrots.shape[0] + matrots = matrots.reshape([batch_size, -1, 9]) + out_axisangle = [] + for mIdx in range(matrots.shape[0]): + cur_axisangle = [] + for jIdx in range(matrots.shape[1]): + a = cv2.Rodrigues(matrots[mIdx, + jIdx:jIdx + 1, :].reshape(3, + 3))[0].reshape( + (1, 3)) + cur_axisangle.append(a) + + out_axisangle.append(np.array(cur_axisangle).reshape([1, -1, 3])) + return np.vstack(out_axisangle) + + +def axisangle2matrots(axisangle): + # This function is borrowed from https://github.com/davrempe/humor/utils/transforms.py + # axisang N x 3 + ''' + :param axisangle: N*num_joints*3 + :return: N*num_joints*9 + ''' + import cv2 + batch_size = axisangle.shape[0] + axisangle = axisangle.reshape([batch_size, -1, 3]) + out_matrot = [] + for mIdx in range(axisangle.shape[0]): + cur_axisangle = [] + for jIdx in range(axisangle.shape[1]): + a = cv2.Rodrigues(axisangle[mIdx, jIdx:jIdx + 1, :].reshape(1, + 3))[0] + cur_axisangle.append(a) + + out_matrot.append(np.array(cur_axisangle).reshape([1, -1, 9])) + return np.vstack(out_matrot) + + +def batch_rodrigues(axisang): + # This function is borrowed from https://github.com/MandyMo/pytorch_HMR/blob/master/src/util.py#L37 + # axisang N x 3 + axisang_norm = torch.norm(axisang + 1e-8, p=2, dim=1) + angle = torch.unsqueeze(axisang_norm, -1) + axisang_normalized = torch.div(axisang, angle) + angle = angle * 0.5 + v_cos = torch.cos(angle) + v_sin = torch.sin(angle) + + quat = torch.cat([v_cos, v_sin * axisang_normalized], dim=1) + rot_mat = quat2mat(quat) + rot_mat = rot_mat.view(rot_mat.shape[0], 9) + return rot_mat + + +def quat2mat(quat): + """ + This function is borrowed from https://github.com/MandyMo/pytorch_HMR/blob/master/src/util.py#L50 + + Convert quaternion coefficients to rotation matrix. + Args: + quat: size = [batch_size, 4] 4 <===>(w, x, y, z) + Returns: + Rotation matrix corresponding to the quaternion -- size = [batch_size, 3, 3] + """ + norm_quat = quat + norm_quat = norm_quat / norm_quat.norm(p=2, dim=1, keepdim=True) + w, x, y, z = norm_quat[:, 0], norm_quat[:, 1], norm_quat[:, + 2], norm_quat[:, + 3] + + batch_size = quat.size(0) + + w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2) + wx, wy, wz = w * x, w * y, w * z + xy, xz, yz = x * y, x * z, y * z + + rotMat = torch.stack([ + w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, 2 * wz + 2 * xy, + w2 - x2 + y2 - z2, 2 * yz - 2 * wx, 2 * xz - 2 * wy, 2 * wx + 2 * yz, + w2 - x2 - y2 + z2 + ], + dim=1).view(batch_size, 3, 3) + return rotMat + + +def rotation_matrix_to_angle_axis(rotation_matrix): + """ + This function is borrowed from https://github.com/kornia/kornia + + Convert 3x4 rotation matrix to Rodrigues vector + + Args: + rotation_matrix (Tensor): rotation matrix. + + Returns: + Tensor: Rodrigues vector transformation. + + Shape: + - Input: :math:`(N, 3, 4)` + - Output: :math:`(N, 3)` + + Example: + >>> input = torch.rand(2, 3, 4) # Nx4x4 + >>> output = tgm.rotation_matrix_to_angle_axis(input) # Nx3 + """ + if rotation_matrix.shape[1:] == (3, 3): + rot_mat = rotation_matrix.reshape(-1, 3, 3) + hom = torch.tensor([0, 0, 1], + dtype=torch.float32, + device=rotation_matrix.device).reshape( + 1, 3, 1).expand(rot_mat.shape[0], -1, -1) + rotation_matrix = torch.cat([rot_mat, hom], dim=-1) + + quaternion = rotation_matrix_to_quaternion(rotation_matrix) + aa = quaternion_to_angle_axis(quaternion) + aa[torch.isnan(aa)] = 0.0 + return aa + + +def quaternion_to_angle_axis(quaternion: torch.Tensor) -> torch.Tensor: + """ + This function is borrowed from https://github.com/kornia/kornia + + Convert quaternion vector to angle axis of rotation. + + Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h + + Args: + quaternion (torch.Tensor): tensor with quaternions. + + Return: + torch.Tensor: tensor with angle axis of rotation. + + Shape: + - Input: :math:`(*, 4)` where `*` means, any number of dimensions + - Output: :math:`(*, 3)` + + Example: + >>> quaternion = torch.rand(2, 4) # Nx4 + >>> angle_axis = tgm.quaternion_to_angle_axis(quaternion) # Nx3 + """ + if not torch.is_tensor(quaternion): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(quaternion))) + + if not quaternion.shape[-1] == 4: + raise ValueError( + "Input must be a tensor of shape Nx4 or 4. Got {}".format( + quaternion.shape)) + # unpack input and compute conversion + q1: torch.Tensor = quaternion[..., 1] + q2: torch.Tensor = quaternion[..., 2] + q3: torch.Tensor = quaternion[..., 3] + sin_squared_theta: torch.Tensor = q1 * q1 + q2 * q2 + q3 * q3 + + sin_theta: torch.Tensor = torch.sqrt(sin_squared_theta) + cos_theta: torch.Tensor = quaternion[..., 0] + two_theta: torch.Tensor = 2.0 * torch.where( + cos_theta < 0.0, torch.atan2(-sin_theta, -cos_theta), + torch.atan2(sin_theta, cos_theta)) + + k_pos: torch.Tensor = two_theta / sin_theta + k_neg: torch.Tensor = 2.0 * torch.ones_like(sin_theta) + k: torch.Tensor = torch.where(sin_squared_theta > 0.0, k_pos, k_neg) + + angle_axis: torch.Tensor = torch.zeros_like(quaternion)[..., :3] + angle_axis[..., 0] += q1 * k + angle_axis[..., 1] += q2 * k + angle_axis[..., 2] += q3 * k + return angle_axis + + +def rotation_matrix_to_quaternion(rotation_matrix, eps=1e-6): + """ + This function is borrowed from https://github.com/kornia/kornia + + Convert 3x4 rotation matrix to 4d quaternion vector + + This algorithm is based on algorithm described in + https://github.com/KieranWynn/pyquaternion/blob/master/pyquaternion/quaternion.py#L201 + + Args: + rotation_matrix (Tensor): the rotation matrix to convert. + + Return: + Tensor: the rotation in quaternion + + Shape: + - Input: :math:`(N, 3, 4)` + - Output: :math:`(N, 4)` + + Example: + >>> input = torch.rand(4, 3, 4) # Nx3x4 + >>> output = tgm.rotation_matrix_to_quaternion(input) # Nx4 + """ + if not torch.is_tensor(rotation_matrix): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(rotation_matrix))) + + if len(rotation_matrix.shape) > 3: + raise ValueError( + "Input size must be a three dimensional tensor. Got {}".format( + rotation_matrix.shape)) + if not rotation_matrix.shape[-2:] == (3, 4): + raise ValueError( + "Input size must be a N x 3 x 4 tensor. Got {}".format( + rotation_matrix.shape)) + + rmat_t = torch.transpose(rotation_matrix, 1, 2) + + mask_d2 = rmat_t[:, 2, 2] < eps + + mask_d0_d1 = rmat_t[:, 0, 0] > rmat_t[:, 1, 1] + mask_d0_nd1 = rmat_t[:, 0, 0] < -rmat_t[:, 1, 1] + + t0 = 1 + rmat_t[:, 0, 0] - rmat_t[:, 1, 1] - rmat_t[:, 2, 2] + q0 = torch.stack([ + rmat_t[:, 1, 2] - rmat_t[:, 2, 1], t0, + rmat_t[:, 0, 1] + rmat_t[:, 1, 0], rmat_t[:, 2, 0] + rmat_t[:, 0, 2] + ], -1) + t0_rep = t0.repeat(4, 1).t() + + t1 = 1 - rmat_t[:, 0, 0] + rmat_t[:, 1, 1] - rmat_t[:, 2, 2] + q1 = torch.stack([ + rmat_t[:, 2, 0] - rmat_t[:, 0, 2], rmat_t[:, 0, 1] + rmat_t[:, 1, 0], + t1, rmat_t[:, 1, 2] + rmat_t[:, 2, 1] + ], -1) + t1_rep = t1.repeat(4, 1).t() + + t2 = 1 - rmat_t[:, 0, 0] - rmat_t[:, 1, 1] + rmat_t[:, 2, 2] + q2 = torch.stack([ + rmat_t[:, 0, 1] - rmat_t[:, 1, 0], rmat_t[:, 2, 0] + rmat_t[:, 0, 2], + rmat_t[:, 1, 2] + rmat_t[:, 2, 1], t2 + ], -1) + t2_rep = t2.repeat(4, 1).t() + + t3 = 1 + rmat_t[:, 0, 0] + rmat_t[:, 1, 1] + rmat_t[:, 2, 2] + q3 = torch.stack([ + t3, rmat_t[:, 1, 2] - rmat_t[:, 2, 1], + rmat_t[:, 2, 0] - rmat_t[:, 0, 2], rmat_t[:, 0, 1] - rmat_t[:, 1, 0] + ], -1) + t3_rep = t3.repeat(4, 1).t() + + mask_c0 = mask_d2 * mask_d0_d1 + mask_c1 = mask_d2 * ~mask_d0_d1 + mask_c2 = ~mask_d2 * mask_d0_nd1 + mask_c3 = ~mask_d2 * ~mask_d0_nd1 + mask_c0 = mask_c0.view(-1, 1).type_as(q0) + mask_c1 = mask_c1.view(-1, 1).type_as(q1) + mask_c2 = mask_c2.view(-1, 1).type_as(q2) + mask_c3 = mask_c3.view(-1, 1).type_as(q3) + + q = q0 * mask_c0 + q1 * mask_c1 + q2 * mask_c2 + q3 * mask_c3 + q /= torch.sqrt(t0_rep * mask_c0 + t1_rep * mask_c1 + # noqa + t2_rep * mask_c2 + t3_rep * mask_c3) # noqa + q *= 0.5 + return q + + +def estimate_translation_np(S, + joints_2d, + joints_conf, + focal_length=5000., + img_size=224.): + """ + This function is borrowed from https://github.com/nkolot/SPIN/utils/geometry.py + + Find camera translation that brings 3D joints S closest to 2D the corresponding joints_2d. + Input: + S: (25, 3) 3D joint locations + joints: (25, 3) 2D joint locations and confidence + Returns: + (3,) camera translation vector + """ + + num_joints = S.shape[0] + # focal length + f = np.array([focal_length, focal_length]) + # optical center + center = np.array([img_size / 2., img_size / 2.]) + + # transformations + Z = np.reshape(np.tile(S[:, 2], (2, 1)).T, -1) + XY = np.reshape(S[:, 0:2], -1) + O = np.tile(center, num_joints) + F = np.tile(f, num_joints) + weight2 = np.reshape(np.tile(np.sqrt(joints_conf), (2, 1)).T, -1) + + # least squares + Q = np.array([ + F * np.tile(np.array([1, 0]), num_joints), + F * np.tile(np.array([0, 1]), num_joints), + O - np.reshape(joints_2d, -1) + ]).T + c = (np.reshape(joints_2d, -1) - O) * Z - F * XY + + # weighted least squares + W = np.diagflat(weight2) + Q = np.dot(W, Q) + c = np.dot(W, c) + + # square matrix + A = np.dot(Q.T, Q) + b = np.dot(Q.T, c) + + # solution + trans = np.linalg.solve(A, b) + + return trans + + +def estimate_translation(S, joints_2d, focal_length=5000., img_size=224.): + """ + This function is borrowed from https://github.com/nkolot/SPIN/utils/geometry.py + + Find camera translation that brings 3D joints S closest to 2D the corresponding joints_2d. + Input: + S: (B, 49, 3) 3D joint locations + joints: (B, 49, 3) 2D joint locations and confidence + Returns: + (B, 3) camera translation vectors + """ + + device = S.device + # Use only joints 25:49 (GT joints) + S = S[:, 25:, :].cpu().numpy() + joints_2d = joints_2d[:, 25:, :].cpu().numpy() + joints_conf = joints_2d[:, :, -1] + joints_2d = joints_2d[:, :, :-1] + trans = np.zeros((S.shape[0], 3), dtype=np.float6432) + # Find the translation for each example in the batch + for i in range(S.shape[0]): + S_i = S[i] + joints_i = joints_2d[i] + conf_i = joints_conf[i] + trans[i] = estimate_translation_np(S_i, + joints_i, + conf_i, + focal_length=focal_length, + img_size=img_size) + return torch.from_numpy(trans).to(device) + + +def rot6d_to_rotmat_spin(x): + """Convert 6D rotation representation to 3x3 rotation matrix. + Based on Zhou et al., "On the Continuity of Rotation Representations in Neural Networks", CVPR 2019 + Input: + (B,6) Batch of 6-D rotation representations + Output: + (B,3,3) Batch of corresponding rotation matrices + """ + x = x.view(-1, 3, 2) + a1 = x[:, :, 0] + a2 = x[:, :, 1] + b1 = F.normalize(a1) + b2 = F.normalize(a2 - torch.einsum('bi,bi->b', b1, a2).unsqueeze(-1) * b1) + + # inp = a2 - torch.einsum('bi,bi->b', b1, a2).unsqueeze(-1) * b1 + # denom = inp.pow(2).sum(dim=1).sqrt().unsqueeze(-1) + 1e-8 + # b2 = inp / denom + + b3 = torch.cross(b1, b2) + return torch.stack((b1, b2, b3), dim=-1) + + +def rot6d_to_rotmat(x): + x = x.view(-1, 3, 2) + + # Normalize the first vector + b1 = F.normalize(x[:, :, 0], dim=1, eps=1e-6) + + dot_prod = torch.sum(b1 * x[:, :, 1], dim=1, keepdim=True) + # Compute the second vector by finding the orthogonal complement to it + b2 = F.normalize(x[:, :, 1] - dot_prod * b1, dim=-1, eps=1e-6) + + # Finish building the basis by taking the cross product + b3 = torch.cross(b1, b2, dim=1) + rot_mats = torch.stack([b1, b2, b3], dim=-1) + + return rot_mats + + +import mGPT.utils.rotation_conversions as rotation_conversions + + +def rot6d(x_rotations, pose_rep): + time, njoints, feats = x_rotations.shape + + # Compute rotations (convert only masked sequences output) + if pose_rep == "rotvec": + rotations = rotation_conversions.axis_angle_to_matrix(x_rotations) + elif pose_rep == "rotmat": + rotations = x_rotations.view(njoints, 3, 3) + elif pose_rep == "rotquat": + rotations = rotation_conversions.quaternion_to_matrix(x_rotations) + elif pose_rep == "rot6d": + rotations = rotation_conversions.rotation_6d_to_matrix(x_rotations) + else: + raise NotImplementedError("No geometry for this one.") + + rotations_6d = rotation_conversions.matrix_to_rotation_6d(rotations) + return rotations_6d + + +def rot6d_batch(x_rotations, pose_rep): + nsamples, time, njoints, feats = x_rotations.shape + + # Compute rotations (convert only masked sequences output) + if pose_rep == "rotvec": + rotations = rotation_conversions.axis_angle_to_matrix(x_rotations) + elif pose_rep == "rotmat": + rotations = x_rotations.view(-1, njoints, 3, 3) + elif pose_rep == "rotquat": + rotations = rotation_conversions.quaternion_to_matrix(x_rotations) + elif pose_rep == "rot6d": + rotations = rotation_conversions.rotation_6d_to_matrix(x_rotations) + else: + raise NotImplementedError("No geometry for this one.") + + rotations_6d = rotation_conversions.matrix_to_rotation_6d(rotations) + return rotations_6d + + +def rot6d_to_rotvec_batch(pose): + # nsamples, time, njoints, feats = rot6d.shape + bs, nfeats = pose.shape + rot6d = pose.reshape(bs, 24, 6) + rotations = rotation_conversions.rotation_6d_to_matrix(rot6d) + rotvec = rotation_conversions.matrix_to_axis_angle(rotations) + return rotvec.reshape(bs, 24 * 3) diff --git a/mGPT/utils/geometry_tools.py b/mGPT/utils/geometry_tools.py new file mode 100644 index 0000000..e6eafa2 --- /dev/null +++ b/mGPT/utils/geometry_tools.py @@ -0,0 +1,566 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. +# Check PYTORCH3D_LICENCE before use + +import functools +from typing import Optional + +import torch +import torch.nn.functional as F + + +""" +The transformation matrices returned from the functions in this file assume +the points on which the transformation will be applied are column vectors. +i.e. the R matrix is structured as + + R = [ + [Rxx, Rxy, Rxz], + [Ryx, Ryy, Ryz], + [Rzx, Rzy, Rzz], + ] # (3, 3) + +This matrix can be applied to column vectors by post multiplication +by the points e.g. + + points = [[0], [1], [2]] # (3 x 1) xyz coordinates of a point + transformed_points = R * points + +To apply the same matrix to points which are row vectors, the R matrix +can be transposed and pre multiplied by the points: + +e.g. + points = [[0, 1, 2]] # (1 x 3) xyz coordinates of a point + transformed_points = points * R.transpose(1, 0) +""" + + +# Added +def matrix_of_angles(cos, sin, inv=False, dim=2): + assert dim in [2, 3] + sin = -sin if inv else sin + if dim == 2: + row1 = torch.stack((cos, -sin), axis=-1) + row2 = torch.stack((sin, cos), axis=-1) + return torch.stack((row1, row2), axis=-2) + elif dim == 3: + row1 = torch.stack((cos, -sin, 0*cos), axis=-1) + row2 = torch.stack((sin, cos, 0*cos), axis=-1) + row3 = torch.stack((0*sin, 0*cos, 1+0*cos), axis=-1) + return torch.stack((row1, row2, row3),axis=-2) + + +def quaternion_to_matrix(quaternions): + """ + Convert rotations given as quaternions to rotation matrices. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + + +def _copysign(a, b): + """ + Return a tensor where each element has the absolute value taken from the, + corresponding element of a, with sign taken from the corresponding + element of b. This is like the standard copysign floating-point operation, + but is not careful about negative 0 and NaN. + + Args: + a: source tensor. + b: tensor whose signs will be used, of the same shape as a. + + Returns: + Tensor of the same shape as a with the signs of b. + """ + signs_differ = (a < 0) != (b < 0) + return torch.where(signs_differ, -a, a) + + +def _sqrt_positive_part(x): + """ + Returns torch.sqrt(torch.max(0, x)) + but with a zero subgradient where x is 0. + """ + ret = torch.zeros_like(x) + positive_mask = x > 0 + ret[positive_mask] = torch.sqrt(x[positive_mask]) + return ret + + +def matrix_to_quaternion(matrix): + """ + Convert rotations given as rotation matrices to quaternions. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + m00 = matrix[..., 0, 0] + m11 = matrix[..., 1, 1] + m22 = matrix[..., 2, 2] + o0 = 0.5 * _sqrt_positive_part(1 + m00 + m11 + m22) + x = 0.5 * _sqrt_positive_part(1 + m00 - m11 - m22) + y = 0.5 * _sqrt_positive_part(1 - m00 + m11 - m22) + z = 0.5 * _sqrt_positive_part(1 - m00 - m11 + m22) + o1 = _copysign(x, matrix[..., 2, 1] - matrix[..., 1, 2]) + o2 = _copysign(y, matrix[..., 0, 2] - matrix[..., 2, 0]) + o3 = _copysign(z, matrix[..., 1, 0] - matrix[..., 0, 1]) + return torch.stack((o0, o1, o2, o3), -1) + + +def _axis_angle_rotation(axis: str, angle): + """ + Return the rotation matrices for one of the rotations about an axis + of which Euler angles describe, for each value of the angle given. + + Args: + axis: Axis label "X" or "Y or "Z". + angle: any shape tensor of Euler angles in radians + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + + cos = torch.cos(angle) + sin = torch.sin(angle) + one = torch.ones_like(angle) + zero = torch.zeros_like(angle) + + if axis == "X": + R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos) + if axis == "Y": + R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos) + if axis == "Z": + R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one) + + return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3)) + + +def euler_angles_to_matrix(euler_angles, convention: str): + """ + Convert rotations given as Euler angles in radians to rotation matrices. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3: + raise ValueError("Invalid input euler angles.") + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + matrices = map(_axis_angle_rotation, convention, torch.unbind(euler_angles, -1)) + return functools.reduce(torch.matmul, matrices) + + +def _angle_from_tan( + axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool +): + """ + Extract the first or third Euler angle from the two members of + the matrix which are positive constant times its sine and cosine. + + Args: + axis: Axis label "X" or "Y or "Z" for the angle we are finding. + other_axis: Axis label "X" or "Y or "Z" for the middle axis in the + convention. + data: Rotation matrices as tensor of shape (..., 3, 3). + horizontal: Whether we are looking for the angle for the third axis, + which means the relevant entries are in the same row of the + rotation matrix. If not, they are in the same column. + tait_bryan: Whether the first and third axes in the convention differ. + + Returns: + Euler Angles in radians for each matrix in data as a tensor + of shape (...). + """ + + i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis] + if horizontal: + i2, i1 = i1, i2 + even = (axis + other_axis) in ["XY", "YZ", "ZX"] + if horizontal == even: + return torch.atan2(data[..., i1], data[..., i2]) + if tait_bryan: + return torch.atan2(-data[..., i2], data[..., i1]) + return torch.atan2(data[..., i2], -data[..., i1]) + + +def _index_from_letter(letter: str): + if letter == "X": + return 0 + if letter == "Y": + return 1 + if letter == "Z": + return 2 + + +def matrix_to_euler_angles(matrix, convention: str): + """ + Convert rotations given as rotation matrices to Euler angles in radians. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + convention: Convention string of three uppercase letters. + + Returns: + Euler angles in radians as tensor of shape (..., 3). + """ + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + i0 = _index_from_letter(convention[0]) + i2 = _index_from_letter(convention[2]) + tait_bryan = i0 != i2 + if tait_bryan: + central_angle = torch.asin( + matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0) + ) + else: + central_angle = torch.acos(matrix[..., i0, i0]) + + o = ( + _angle_from_tan( + convention[0], convention[1], matrix[..., i2], False, tait_bryan + ), + central_angle, + _angle_from_tan( + convention[2], convention[1], matrix[..., i0, :], True, tait_bryan + ), + ) + return torch.stack(o, -1) + + +def random_quaternions( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random quaternions representing rotations, + i.e. versors with nonnegative real part. + + Args: + n: Number of quaternions in a batch to return. + dtype: Type to return. + device: Desired device of returned tensor. Default: + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Quaternions as tensor of shape (N, 4). + """ + o = torch.randn((n, 4), dtype=dtype, device=device, requires_grad=requires_grad) + s = (o * o).sum(1) + o = o / _copysign(torch.sqrt(s), o[:, 0])[:, None] + return o + + +def random_rotations( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random rotations as 3x3 rotation matrices. + + Args: + n: Number of rotation matrices in a batch to return. + dtype: Type to return. + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Rotation matrices as tensor of shape (n, 3, 3). + """ + quaternions = random_quaternions( + n, dtype=dtype, device=device, requires_grad=requires_grad + ) + return quaternion_to_matrix(quaternions) + + +def random_rotation( + dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate a single random 3x3 rotation matrix. + + Args: + dtype: Type to return + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type + requires_grad: Whether the resulting tensor should have the gradient + flag set + + Returns: + Rotation matrix as tensor of shape (3, 3). + """ + return random_rotations(1, dtype, device, requires_grad)[0] + + +def standardize_quaternion(quaternions): + """ + Convert a unit quaternion to a standard form: one in which the real + part is non negative. + + Args: + quaternions: Quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Standardized quaternions as tensor of shape (..., 4). + """ + return torch.where(quaternions[..., 0:1] < 0, -quaternions, quaternions) + + +def quaternion_raw_multiply(a, b): + """ + Multiply two quaternions. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions shape (..., 4). + """ + aw, ax, ay, az = torch.unbind(a, -1) + bw, bx, by, bz = torch.unbind(b, -1) + ow = aw * bw - ax * bx - ay * by - az * bz + ox = aw * bx + ax * bw + ay * bz - az * by + oy = aw * by - ax * bz + ay * bw + az * bx + oz = aw * bz + ax * by - ay * bx + az * bw + return torch.stack((ow, ox, oy, oz), -1) + + +def quaternion_multiply(a, b): + """ + Multiply two quaternions representing rotations, returning the quaternion + representing their composition, i.e. the versor with nonnegative real part. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions of shape (..., 4). + """ + ab = quaternion_raw_multiply(a, b) + return standardize_quaternion(ab) + + +def quaternion_invert(quaternion): + """ + Given a quaternion representing rotation, get the quaternion representing + its inverse. + + Args: + quaternion: Quaternions as tensor of shape (..., 4), with real part + first, which must be versors (unit quaternions). + + Returns: + The inverse, a tensor of quaternions of shape (..., 4). + """ + + return quaternion * quaternion.new_tensor([1, -1, -1, -1]) + + +def quaternion_apply(quaternion, point): + """ + Apply the rotation given by a quaternion to a 3D point. + Usual torch rules for broadcasting apply. + + Args: + quaternion: Tensor of quaternions, real part first, of shape (..., 4). + point: Tensor of 3D points of shape (..., 3). + + Returns: + Tensor of rotated points of shape (..., 3). + """ + if point.size(-1) != 3: + raise ValueError(f"Points are not in 3D, f{point.shape}.") + real_parts = point.new_zeros(point.shape[:-1] + (1,)) + point_as_quaternion = torch.cat((real_parts, point), -1) + out = quaternion_raw_multiply( + quaternion_raw_multiply(quaternion, point_as_quaternion), + quaternion_invert(quaternion), + ) + return out[..., 1:] + + +def axis_angle_to_matrix(axis_angle): + """ + Convert rotations given as axis/angle to rotation matrices. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle)) + + +def matrix_to_axis_angle(matrix): + """ + Convert rotations given as rotation matrices to axis/angle. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return quaternion_to_axis_angle(matrix_to_quaternion(matrix)) + + +def axis_angle_to_quaternion(axis_angle): + """ + Convert rotations given as axis/angle to quaternions. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True) + half_angles = 0.5 * angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + quaternions = torch.cat( + [torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1 + ) + return quaternions + + +def quaternion_to_axis_angle(quaternions): + """ + Convert rotations given as quaternions to axis/angle. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True) + half_angles = torch.atan2(norms, quaternions[..., :1]) + angles = 2 * half_angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + return quaternions[..., 1:] / sin_half_angles_over_angles + + +def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor: + """ + Converts 6D rotation representation by Zhou et al. [1] to rotation matrix + using Gram--Schmidt orthogonalisation per Section B of [1]. + Args: + d6: 6D rotation representation, of size (*, 6) + + Returns: + batch of rotation matrices of size (*, 3, 3) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + + a1, a2 = d6[..., :3], d6[..., 3:] + b1 = F.normalize(a1, dim=-1) + b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1 + b2 = F.normalize(b2, dim=-1) + b3 = torch.cross(b1, b2, dim=-1) + return torch.stack((b1, b2, b3), dim=-2) + + +def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor: + """ + Converts rotation matrices to 6D rotation representation by Zhou et al. [1] + by dropping the last row. Note that 6D representation is not unique. + Args: + matrix: batch of rotation matrices of size (*, 3, 3) + + Returns: + 6D rotation representation, of size (*, 6) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6) diff --git a/mGPT/utils/joints.py b/mGPT/utils/joints.py new file mode 100644 index 0000000..98199c6 --- /dev/null +++ b/mGPT/utils/joints.py @@ -0,0 +1,444 @@ +mmm_joints = [ + "root", + "BP", + "BT", + "BLN", + "BUN", + "LS", + "LE", + "LW", + "RS", + "RE", + "RW", + "LH", + "LK", + "LA", + "LMrot", + "LF", + "RH", + "RK", + "RA", + "RMrot", + "RF", +] + +humanml3d_joints = [ + "root", + "RH", + "LH", + "BP", + "RK", + "LK", + "BT", + "RMrot", + "LMrot", + "BLN", + "RF", + "LF", + "BMN", + "RSI", + "LSI", + "BUN", + "RS", + "LS", + "RE", + "LE", + "RW", + "LW", +] + +smplx_joints = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", + "jaw", + "left_eye_smplhf", + "right_eye_smplhf", + "left_index1", + "left_index2", + "left_index3", + "left_middle1", + "left_middle2", + "left_middle3", + "left_pinky1", + "left_pinky2", + "left_pinky3", + "left_ring1", + "left_ring2", + "left_ring3", + "left_thumb1", + "left_thumb2", + "left_thumb3", + "right_index1", + "right_index2", + "right_index3", + "right_middle1", + "right_middle2", + "right_middle3", + "right_pinky1", + "right_pinky2", + "right_pinky3", + "right_ring1", + "right_ring2", + "right_ring3", + "right_thumb1", + "right_thumb2", + "right_thumb3", + "nose", + "right_eye", + "left_eye", + "right_ear", + "left_ear", + "left_big_toe", + "left_small_toe", + "left_heel", + "right_big_toe", + "right_small_toe", + "right_heel", + "left_thumb", + "left_index", + "left_middle", + "left_ring", + "left_pinky", + "right_thumb", + "right_index", + "right_middle", + "right_ring", + "right_pinky", + "right_eye_brow1", + "right_eye_brow2", + "right_eye_brow3", + "right_eye_brow4", + "right_eye_brow5", + "left_eye_brow5", + "left_eye_brow4", + "left_eye_brow3", + "left_eye_brow2", + "left_eye_brow1", + "nose1", + "nose2", + "nose3", + "nose4", + "right_nose_2", + "right_nose_1", + "nose_middle", + "left_nose_1", + "left_nose_2", + "right_eye1", + "right_eye2", + "right_eye3", + "right_eye4", + "right_eye5", + "right_eye6", + "left_eye4", + "left_eye3", + "left_eye2", + "left_eye1", + "left_eye6", + "left_eye5", + "right_mouth_1", + "right_mouth_2", + "right_mouth_3", + "mouth_top", + "left_mouth_3", + "left_mouth_2", + "left_mouth_1", + "left_mouth_5", # 59 in OpenPose output + "left_mouth_4", # 58 in OpenPose output + "mouth_bottom", + "right_mouth_4", + "right_mouth_5", + "right_lip_1", + "right_lip_2", + "lip_top", + "left_lip_2", + "left_lip_1", + "left_lip_3", + "lip_bottom", + "right_lip_3", + # Face contour + "right_contour_1", + "right_contour_2", + "right_contour_3", + "right_contour_4", + "right_contour_5", + "right_contour_6", + "right_contour_7", + "right_contour_8", + "contour_middle", + "left_contour_8", + "left_contour_7", + "left_contour_6", + "left_contour_5", + "left_contour_4", + "left_contour_3", + "left_contour_2", + "left_contour_1", +] + +smplxnh_joints = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", +] + +smplh_joints = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", + "left_index1", + "left_index2", + "left_index3", + "left_middle1", + "left_middle2", + "left_middle3", + "left_pinky1", + "left_pinky2", + "left_pinky3", + "left_ring1", + "left_ring2", + "left_ring3", + "left_thumb1", + "left_thumb2", + "left_thumb3", + "right_index1", + "right_index2", + "right_index3", + "right_middle1", + "right_middle2", + "right_middle3", + "right_pinky1", + "right_pinky2", + "right_pinky3", + "right_ring1", + "right_ring2", + "right_ring3", + "right_thumb1", + "right_thumb2", + "right_thumb3", + "nose", + "right_eye", + "left_eye", + "right_ear", + "left_ear", + "left_big_toe", + "left_small_toe", + "left_heel", + "right_big_toe", + "right_small_toe", + "right_heel", + "left_thumb", + "left_index", + "left_middle", + "left_ring", + "left_pinky", + "right_thumb", + "right_index", + "right_middle", + "right_ring", + "right_pinky", +] + +smplnh_joints = [ + "pelvis", + "left_hip", + "right_hip", + "spine1", + "left_knee", + "right_knee", + "spine2", + "left_ankle", + "right_ankle", + "spine3", + "left_foot", + "right_foot", + "neck", + "left_collar", + "right_collar", + "head", + "left_shoulder", + "right_shoulder", + "left_elbow", + "right_elbow", + "left_wrist", + "right_wrist", +] + +mmm2smplh_correspondence = { + "root": "pelvis", + "BP": "spine1", + "BT": "spine3", + "BLN": "neck", + "BUN": "head", + "LS": "left_shoulder", + "LE": "left_elbow", + "LW": "left_wrist", + "RS": "right_shoulder", + "RE": "right_elbow", + "RW": "right_wrist", + "LH": "left_hip", + "LK": "left_knee", + "LA": "left_ankle", + "LMrot": "left_heel", + "LF": "left_foot", + "RH": "right_hip", + "RK": "right_knee", + "RA": "right_ankle", + "RMrot": "right_heel", + "RF": "right_foot", +} + +smplh2mmm_correspondence = { + val: key + for key, val in mmm2smplh_correspondence.items() +} +smplh2mmm_indexes = [ + smplh_joints.index(mmm2smplh_correspondence[x]) for x in mmm_joints +] + +smplnh2smplh_correspondence = {key: key for key in smplnh_joints} +smplh2smplnh_correspondence = { + val: key + for key, val in smplnh2smplh_correspondence.items() +} + +smplh2smplnh_indexes = [ + smplh_joints.index(smplnh2smplh_correspondence[x]) for x in smplnh_joints +] + +mmm_kinematic_tree = [ + [0, 1, 2, 3, 4], # body + [3, 5, 6, 7], # right arm + [3, 8, 9, 10], # left arm + [0, 11, 12, 13, 14, 15], # right leg + [0, 16, 17, 18, 19, 20], +] # left leg + +humanml3d_kinematic_tree = [ + [0, 3, 6, 9, 12, 15], # body + [9, 14, 17, 19, 21], # right arm + [9, 13, 16, 18, 20], # left arm + [0, 2, 5, 8, 11], # right leg + [0, 1, 4, 7, 10], +] # left leg + +smplh_to_mmm_scaling_factor = 480 / 0.75 +mmm_to_smplh_scaling_factor = 0.75 / 480 + +mmm_joints_info = { + "root": + mmm_joints.index("root"), + "feet": [ + mmm_joints.index("LMrot"), + mmm_joints.index("RMrot"), + mmm_joints.index("LF"), + mmm_joints.index("RF"), + ], + "shoulders": [mmm_joints.index("LS"), + mmm_joints.index("RS")], + "hips": [mmm_joints.index("LH"), + mmm_joints.index("RH")], +} + +smplnh_joints_info = { + "root": + smplnh_joints.index("pelvis"), + "feet": [ + smplnh_joints.index("left_ankle"), + smplnh_joints.index("right_ankle"), + smplnh_joints.index("left_foot"), + smplnh_joints.index("right_foot"), + ], + "shoulders": [ + smplnh_joints.index("left_shoulder"), + smplnh_joints.index("right_shoulder"), + ], + "hips": + [smplnh_joints.index("left_hip"), + smplnh_joints.index("right_hip")], +} + +infos = {"mmm": mmm_joints_info, "smplnh": smplnh_joints_info} + +smplh_indexes = {"mmm": smplh2mmm_indexes, "smplnh": smplh2smplnh_indexes} + +root_joints = { + "mmm": mmm_joints_info["root"], + "mmmns": mmm_joints_info["root"], + "smplmmm": mmm_joints_info["root"], + "smplnh": smplnh_joints_info["root"], + "smplh": smplh_joints.index("pelvis"), +} + + +def get_root_idx(joinstype): + return root_joints[joinstype] + + +# def mmm2smpl(joints_mmm): +# mmm2smplnh_indexes = [] +# for x in smplnh_joints: +# if x in smplh2mmm_correspondence: +# mmm2smplnh_indexes.append(mmm_joints.index(smplh2mmm_correspondence[x])) + +# spine2 = 0.5*(joints[mmm_joints.index("spine1")] + joints[mmm_joints.index("spine3")]) + +# joints = joints_mmm[indexes] +# return joints diff --git a/mGPT/utils/load_checkpoint.py b/mGPT/utils/load_checkpoint.py new file mode 100644 index 0000000..1214958 --- /dev/null +++ b/mGPT/utils/load_checkpoint.py @@ -0,0 +1,34 @@ +import torch + +def load_pretrained(cfg, model, logger, phase="train"): + logger.info(f"Loading pretrain model from {cfg.TRAIN.PRETRAINED}") + if phase == "train": + ckpt_path = cfg.TRAIN.PRETRAINED + elif phase == "test": + ckpt_path = cfg.TEST.CHECKPOINTS + + state_dict = torch.load(ckpt_path, map_location="cpu")["state_dict"] + model.load_state_dict(state_dict, strict=True) + return model + + +def load_pretrained_vae(cfg, model, logger): + state_dict = torch.load(cfg.TRAIN.PRETRAINED_VAE, + map_location="cpu")['state_dict'] + logger.info(f"Loading pretrain vae from {cfg.TRAIN.PRETRAINED_VAE}") + # Extract encoder/decoder + from collections import OrderedDict + vae_dict = OrderedDict() + for k, v in state_dict.items(): + if "motion_vae" in k: + name = k.replace("motion_vae.", "") + vae_dict[name] = v + elif "vae" in k: + name = k.replace("vae.", "") + vae_dict[name] = v + if hasattr(model, 'vae'): + model.vae.load_state_dict(vae_dict, strict=True) + else: + model.motion_vae.load_state_dict(vae_dict, strict=True) + + return model diff --git a/mGPT/utils/logger.py b/mGPT/utils/logger.py new file mode 100644 index 0000000..6e1cafe --- /dev/null +++ b/mGPT/utils/logger.py @@ -0,0 +1,68 @@ +from pathlib import Path +import os +import time +import logging +from omegaconf import OmegaConf +from pytorch_lightning.utilities.rank_zero import rank_zero_only + +def create_logger(cfg, phase='train'): + # root dir set by cfg + root_output_dir = Path(cfg.FOLDER) + # set up logger + if not root_output_dir.exists(): + print('=> creating {}'.format(root_output_dir)) + root_output_dir.mkdir() + + cfg_name = cfg.NAME + model = cfg.model.target.split('.')[-2] + cfg_name = os.path.basename(cfg_name).split('.')[0] + + final_output_dir = root_output_dir / model / cfg_name + cfg.FOLDER_EXP = str(final_output_dir) + + time_str = time.strftime('%Y-%m-%d-%H-%M-%S') + + new_dir(cfg, phase, time_str, final_output_dir) + + head = '%(asctime)-15s %(message)s' + logger = config_logger(final_output_dir, time_str, phase, head) + if logger is None: + logger = logging.getLogger() + logger.setLevel(logging.CRITICAL) + logging.basicConfig(format=head) + return logger + + +@rank_zero_only +def config_logger(final_output_dir, time_str, phase, head): + log_file = '{}_{}_{}.log'.format('log', time_str, phase) + final_log_file = final_output_dir / log_file + logging.basicConfig(filename=str(final_log_file)) + logger = logging.getLogger() + logger.setLevel(logging.INFO) + console = logging.StreamHandler() + formatter = logging.Formatter(head) + console.setFormatter(formatter) + logging.getLogger('').addHandler(console) + file_handler = logging.FileHandler(final_log_file, 'w') + file_handler.setFormatter(logging.Formatter(head)) + file_handler.setLevel(logging.INFO) + logging.getLogger('').addHandler(file_handler) + return logger + + +@rank_zero_only +def new_dir(cfg, phase, time_str, final_output_dir): + # new experiment folder + cfg.TIME = str(time_str) + if os.path.exists(final_output_dir) and not os.path.exists(cfg.TRAIN.RESUME) and not cfg.DEBUG and phase not in ['test', 'demo']: + file_list = sorted(os.listdir(final_output_dir), reverse=True) + for item in file_list: + if item.endswith('.log'): + os.rename(str(final_output_dir), str(final_output_dir) + '_' + cfg.TIME) + break + final_output_dir.mkdir(parents=True, exist_ok=True) + # write config yaml + config_file = '{}_{}_{}.yaml'.format('config', time_str, phase) + final_config_file = final_output_dir / config_file + OmegaConf.save(config=cfg, f=final_config_file) diff --git a/mGPT/utils/misc.py b/mGPT/utils/misc.py new file mode 100644 index 0000000..4f2a68d --- /dev/null +++ b/mGPT/utils/misc.py @@ -0,0 +1,29 @@ +import torch + + +def to_numpy(tensor): + if torch.is_tensor(tensor): + return tensor.cpu().numpy() + elif type(tensor).__module__ != 'numpy': + raise ValueError("Cannot convert {} to numpy array".format( + type(tensor))) + return tensor + + +def to_torch(ndarray): + if type(ndarray).__module__ == 'numpy': + return torch.from_numpy(ndarray) + elif not torch.is_tensor(ndarray): + raise ValueError("Cannot convert {} to torch tensor".format( + type(ndarray))) + return ndarray + + +def cleanexit(): + import sys + import os + try: + sys.exit(0) + except SystemExit: + os._exit(0) + diff --git a/mGPT/utils/rotation_conversions.py b/mGPT/utils/rotation_conversions.py new file mode 100644 index 0000000..770c3bf --- /dev/null +++ b/mGPT/utils/rotation_conversions.py @@ -0,0 +1,551 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved. +# Check PYTORCH3D_LICENCE before use + +import functools +from typing import Optional + +import torch +import torch.nn.functional as F + + +""" +The transformation matrices returned from the functions in this file assume +the points on which the transformation will be applied are column vectors. +i.e. the R matrix is structured as + + R = [ + [Rxx, Rxy, Rxz], + [Ryx, Ryy, Ryz], + [Rzx, Rzy, Rzz], + ] # (3, 3) + +This matrix can be applied to column vectors by post multiplication +by the points e.g. + + points = [[0], [1], [2]] # (3 x 1) xyz coordinates of a point + transformed_points = R * points + +To apply the same matrix to points which are row vectors, the R matrix +can be transposed and pre multiplied by the points: + +e.g. + points = [[0, 1, 2]] # (1 x 3) xyz coordinates of a point + transformed_points = points * R.transpose(1, 0) +""" + + +def quaternion_to_matrix(quaternions): + """ + Convert rotations given as quaternions to rotation matrices. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + r, i, j, k = torch.unbind(quaternions, -1) + two_s = 2.0 / (quaternions * quaternions).sum(-1) + + o = torch.stack( + ( + 1 - two_s * (j * j + k * k), + two_s * (i * j - k * r), + two_s * (i * k + j * r), + two_s * (i * j + k * r), + 1 - two_s * (i * i + k * k), + two_s * (j * k - i * r), + two_s * (i * k - j * r), + two_s * (j * k + i * r), + 1 - two_s * (i * i + j * j), + ), + -1, + ) + return o.reshape(quaternions.shape[:-1] + (3, 3)) + + +def _copysign(a, b): + """ + Return a tensor where each element has the absolute value taken from the, + corresponding element of a, with sign taken from the corresponding + element of b. This is like the standard copysign floating-point operation, + but is not careful about negative 0 and NaN. + + Args: + a: source tensor. + b: tensor whose signs will be used, of the same shape as a. + + Returns: + Tensor of the same shape as a with the signs of b. + """ + signs_differ = (a < 0) != (b < 0) + return torch.where(signs_differ, -a, a) + + +def _sqrt_positive_part(x): + """ + Returns torch.sqrt(torch.max(0, x)) + but with a zero subgradient where x is 0. + """ + ret = torch.zeros_like(x) + positive_mask = x > 0 + ret[positive_mask] = torch.sqrt(x[positive_mask]) + return ret + + +def matrix_to_quaternion(matrix): + """ + Convert rotations given as rotation matrices to quaternions. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + m00 = matrix[..., 0, 0] + m11 = matrix[..., 1, 1] + m22 = matrix[..., 2, 2] + o0 = 0.5 * _sqrt_positive_part(1 + m00 + m11 + m22) + x = 0.5 * _sqrt_positive_part(1 + m00 - m11 - m22) + y = 0.5 * _sqrt_positive_part(1 - m00 + m11 - m22) + z = 0.5 * _sqrt_positive_part(1 - m00 - m11 + m22) + o1 = _copysign(x, matrix[..., 2, 1] - matrix[..., 1, 2]) + o2 = _copysign(y, matrix[..., 0, 2] - matrix[..., 2, 0]) + o3 = _copysign(z, matrix[..., 1, 0] - matrix[..., 0, 1]) + return torch.stack((o0, o1, o2, o3), -1) + + +def _axis_angle_rotation(axis: str, angle): + """ + Return the rotation matrices for one of the rotations about an axis + of which Euler angles describe, for each value of the angle given. + + Args: + axis: Axis label "X" or "Y or "Z". + angle: any shape tensor of Euler angles in radians + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + + cos = torch.cos(angle) + sin = torch.sin(angle) + one = torch.ones_like(angle) + zero = torch.zeros_like(angle) + + if axis == "X": + R_flat = (one, zero, zero, zero, cos, -sin, zero, sin, cos) + if axis == "Y": + R_flat = (cos, zero, sin, zero, one, zero, -sin, zero, cos) + if axis == "Z": + R_flat = (cos, -sin, zero, sin, cos, zero, zero, zero, one) + + return torch.stack(R_flat, -1).reshape(angle.shape + (3, 3)) + + +def euler_angles_to_matrix(euler_angles, convention: str): + """ + Convert rotations given as Euler angles in radians to rotation matrices. + + Args: + euler_angles: Euler angles in radians as tensor of shape (..., 3). + convention: Convention string of three uppercase letters from + {"X", "Y", and "Z"}. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + if euler_angles.dim() == 0 or euler_angles.shape[-1] != 3: + raise ValueError("Invalid input euler angles.") + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + matrices = map(_axis_angle_rotation, convention, torch.unbind(euler_angles, -1)) + return functools.reduce(torch.matmul, matrices) + + +def _angle_from_tan( + axis: str, other_axis: str, data, horizontal: bool, tait_bryan: bool +): + """ + Extract the first or third Euler angle from the two members of + the matrix which are positive constant times its sine and cosine. + + Args: + axis: Axis label "X" or "Y or "Z" for the angle we are finding. + other_axis: Axis label "X" or "Y or "Z" for the middle axis in the + convention. + data: Rotation matrices as tensor of shape (..., 3, 3). + horizontal: Whether we are looking for the angle for the third axis, + which means the relevant entries are in the same row of the + rotation matrix. If not, they are in the same column. + tait_bryan: Whether the first and third axes in the convention differ. + + Returns: + Euler Angles in radians for each matrix in data as a tensor + of shape (...). + """ + + i1, i2 = {"X": (2, 1), "Y": (0, 2), "Z": (1, 0)}[axis] + if horizontal: + i2, i1 = i1, i2 + even = (axis + other_axis) in ["XY", "YZ", "ZX"] + if horizontal == even: + return torch.atan2(data[..., i1], data[..., i2]) + if tait_bryan: + return torch.atan2(-data[..., i2], data[..., i1]) + return torch.atan2(data[..., i2], -data[..., i1]) + + +def _index_from_letter(letter: str): + if letter == "X": + return 0 + if letter == "Y": + return 1 + if letter == "Z": + return 2 + + +def matrix_to_euler_angles(matrix, convention: str): + """ + Convert rotations given as rotation matrices to Euler angles in radians. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + convention: Convention string of three uppercase letters. + + Returns: + Euler angles in radians as tensor of shape (..., 3). + """ + if len(convention) != 3: + raise ValueError("Convention must have 3 letters.") + if convention[1] in (convention[0], convention[2]): + raise ValueError(f"Invalid convention {convention}.") + for letter in convention: + if letter not in ("X", "Y", "Z"): + raise ValueError(f"Invalid letter {letter} in convention string.") + if matrix.size(-1) != 3 or matrix.size(-2) != 3: + raise ValueError(f"Invalid rotation matrix shape f{matrix.shape}.") + i0 = _index_from_letter(convention[0]) + i2 = _index_from_letter(convention[2]) + tait_bryan = i0 != i2 + if tait_bryan: + central_angle = torch.asin( + matrix[..., i0, i2] * (-1.0 if i0 - i2 in [-1, 2] else 1.0) + ) + else: + central_angle = torch.acos(matrix[..., i0, i0]) + + o = ( + _angle_from_tan( + convention[0], convention[1], matrix[..., i2], False, tait_bryan + ), + central_angle, + _angle_from_tan( + convention[2], convention[1], matrix[..., i0, :], True, tait_bryan + ), + ) + return torch.stack(o, -1) + + +def random_quaternions( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random quaternions representing rotations, + i.e. versors with nonnegative real part. + + Args: + n: Number of quaternions in a batch to return. + dtype: Type to return. + device: Desired device of returned tensor. Default: + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Quaternions as tensor of shape (N, 4). + """ + o = torch.randn((n, 4), dtype=dtype, device=device, requires_grad=requires_grad) + s = (o * o).sum(1) + o = o / _copysign(torch.sqrt(s), o[:, 0])[:, None] + return o + + +def random_rotations( + n: int, dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate random rotations as 3x3 rotation matrices. + + Args: + n: Number of rotation matrices in a batch to return. + dtype: Type to return. + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type. + requires_grad: Whether the resulting tensor should have the gradient + flag set. + + Returns: + Rotation matrices as tensor of shape (n, 3, 3). + """ + quaternions = random_quaternions( + n, dtype=dtype, device=device, requires_grad=requires_grad + ) + return quaternion_to_matrix(quaternions) + + +def random_rotation( + dtype: Optional[torch.dtype] = None, device=None, requires_grad=False +): + """ + Generate a single random 3x3 rotation matrix. + + Args: + dtype: Type to return + device: Device of returned tensor. Default: if None, + uses the current device for the default tensor type + requires_grad: Whether the resulting tensor should have the gradient + flag set + + Returns: + Rotation matrix as tensor of shape (3, 3). + """ + return random_rotations(1, dtype, device, requires_grad)[0] + + +def standardize_quaternion(quaternions): + """ + Convert a unit quaternion to a standard form: one in which the real + part is non negative. + + Args: + quaternions: Quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Standardized quaternions as tensor of shape (..., 4). + """ + return torch.where(quaternions[..., 0:1] < 0, -quaternions, quaternions) + + +def quaternion_raw_multiply(a, b): + """ + Multiply two quaternions. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions shape (..., 4). + """ + aw, ax, ay, az = torch.unbind(a, -1) + bw, bx, by, bz = torch.unbind(b, -1) + ow = aw * bw - ax * bx - ay * by - az * bz + ox = aw * bx + ax * bw + ay * bz - az * by + oy = aw * by - ax * bz + ay * bw + az * bx + oz = aw * bz + ax * by - ay * bx + az * bw + return torch.stack((ow, ox, oy, oz), -1) + + +def quaternion_multiply(a, b): + """ + Multiply two quaternions representing rotations, returning the quaternion + representing their composition, i.e. the versor with nonnegative real part. + Usual torch rules for broadcasting apply. + + Args: + a: Quaternions as tensor of shape (..., 4), real part first. + b: Quaternions as tensor of shape (..., 4), real part first. + + Returns: + The product of a and b, a tensor of quaternions of shape (..., 4). + """ + ab = quaternion_raw_multiply(a, b) + return standardize_quaternion(ab) + + +def quaternion_invert(quaternion): + """ + Given a quaternion representing rotation, get the quaternion representing + its inverse. + + Args: + quaternion: Quaternions as tensor of shape (..., 4), with real part + first, which must be versors (unit quaternions). + + Returns: + The inverse, a tensor of quaternions of shape (..., 4). + """ + + return quaternion * quaternion.new_tensor([1, -1, -1, -1]) + + +def quaternion_apply(quaternion, point): + """ + Apply the rotation given by a quaternion to a 3D point. + Usual torch rules for broadcasting apply. + + Args: + quaternion: Tensor of quaternions, real part first, of shape (..., 4). + point: Tensor of 3D points of shape (..., 3). + + Returns: + Tensor of rotated points of shape (..., 3). + """ + if point.size(-1) != 3: + raise ValueError(f"Points are not in 3D, f{point.shape}.") + real_parts = point.new_zeros(point.shape[:-1] + (1,)) + point_as_quaternion = torch.cat((real_parts, point), -1) + out = quaternion_raw_multiply( + quaternion_raw_multiply(quaternion, point_as_quaternion), + quaternion_invert(quaternion), + ) + return out[..., 1:] + + +def axis_angle_to_matrix(axis_angle): + """ + Convert rotations given as axis/angle to rotation matrices. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + Rotation matrices as tensor of shape (..., 3, 3). + """ + return quaternion_to_matrix(axis_angle_to_quaternion(axis_angle)) + + +def matrix_to_axis_angle(matrix): + """ + Convert rotations given as rotation matrices to axis/angle. + + Args: + matrix: Rotation matrices as tensor of shape (..., 3, 3). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + return quaternion_to_axis_angle(matrix_to_quaternion(matrix)) + + +def axis_angle_to_quaternion(axis_angle): + """ + Convert rotations given as axis/angle to quaternions. + + Args: + axis_angle: Rotations given as a vector in axis angle form, + as a tensor of shape (..., 3), where the magnitude is + the angle turned anticlockwise in radians around the + vector's direction. + + Returns: + quaternions with real part first, as tensor of shape (..., 4). + """ + angles = torch.norm(axis_angle, p=2, dim=-1, keepdim=True) + half_angles = 0.5 * angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + quaternions = torch.cat( + [torch.cos(half_angles), axis_angle * sin_half_angles_over_angles], dim=-1 + ) + return quaternions + + +def quaternion_to_axis_angle(quaternions): + """ + Convert rotations given as quaternions to axis/angle. + + Args: + quaternions: quaternions with real part first, + as tensor of shape (..., 4). + + Returns: + Rotations given as a vector in axis angle form, as a tensor + of shape (..., 3), where the magnitude is the angle + turned anticlockwise in radians around the vector's + direction. + """ + norms = torch.norm(quaternions[..., 1:], p=2, dim=-1, keepdim=True) + half_angles = torch.atan2(norms, quaternions[..., :1]) + angles = 2 * half_angles + eps = 1e-6 + small_angles = angles.abs() < eps + sin_half_angles_over_angles = torch.empty_like(angles) + sin_half_angles_over_angles[~small_angles] = ( + torch.sin(half_angles[~small_angles]) / angles[~small_angles] + ) + # for x small, sin(x/2) is about x/2 - (x/2)^3/6 + # so sin(x/2)/x is about 1/2 - (x*x)/48 + sin_half_angles_over_angles[small_angles] = ( + 0.5 - (angles[small_angles] * angles[small_angles]) / 48 + ) + return quaternions[..., 1:] / sin_half_angles_over_angles + + +def rotation_6d_to_matrix(d6: torch.Tensor) -> torch.Tensor: + """ + Converts 6D rotation representation by Zhou et al. [1] to rotation matrix + using Gram--Schmidt orthogonalisation per Section B of [1]. + Args: + d6: 6D rotation representation, of size (*, 6) + + Returns: + batch of rotation matrices of size (*, 3, 3) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + + a1, a2 = d6[..., :3], d6[..., 3:] + b1 = F.normalize(a1, dim=-1) + b2 = a2 - (b1 * a2).sum(-1, keepdim=True) * b1 + b2 = F.normalize(b2, dim=-1) + b3 = torch.cross(b1, b2, dim=-1) + return torch.stack((b1, b2, b3), dim=-2) + + +def matrix_to_rotation_6d(matrix: torch.Tensor) -> torch.Tensor: + """ + Converts rotation matrices to 6D rotation representation by Zhou et al. [1] + by dropping the last row. Note that 6D representation is not unique. + Args: + matrix: batch of rotation matrices of size (*, 3, 3) + + Returns: + 6D rotation representation, of size (*, 6) + + [1] Zhou, Y., Barnes, C., Lu, J., Yang, J., & Li, H. + On the Continuity of Rotation Representations in Neural Networks. + IEEE Conference on Computer Vision and Pattern Recognition, 2019. + Retrieved from http://arxiv.org/abs/1812.07035 + """ + return matrix[..., :2, :].clone().reshape(*matrix.size()[:-2], 6) diff --git a/mGPT/utils/sample_utils.py b/mGPT/utils/sample_utils.py new file mode 100644 index 0000000..724b109 --- /dev/null +++ b/mGPT/utils/sample_utils.py @@ -0,0 +1,18 @@ +import logging +from pathlib import Path +logger = logging.getLogger(__name__) + +def cfg_mean_nsamples_resolution(cfg): + if cfg.mean and cfg.number_of_samples > 1: + logger.error("All the samples will be the mean.. cfg.number_of_samples=1 will be forced.") + cfg.number_of_samples = 1 + + return cfg.number_of_samples == 1 + + +def get_path(sample_path: Path, is_amass: bool, gender: str, split: str, onesample: bool, mean: bool, fact: float): + extra_str = ("_mean" if mean else "") if onesample else "_multi" + fact_str = "" if fact == 1 else f"{fact}_" + gender_str = gender + "_" if is_amass else "" + path = sample_path / f"{fact_str}{gender_str}{split}{extra_str}" + return path diff --git a/mGPT/utils/temos_utils.py b/mGPT/utils/temos_utils.py new file mode 100644 index 0000000..888fd69 --- /dev/null +++ b/mGPT/utils/temos_utils.py @@ -0,0 +1,133 @@ +from typing import Dict, List + +import numpy as np +import torch +from torch import Tensor + +import mGPT.utils.geometry_conver as geometry_conver + + +def lengths_to_mask(lengths: List[int], + device: torch.device, + max_len: int = None) -> Tensor: + lengths = torch.tensor(lengths, device=device) + max_len = max_len if max_len else max(lengths) + mask = torch.arange(max_len, device=device).expand( + len(lengths), max_len) < lengths.unsqueeze(1) + return mask + + +def detach_to_numpy(tensor): + return tensor.detach().cpu().numpy() + + +def remove_padding(tensors, lengths): + return [ + tensor[:tensor_length] + for tensor, tensor_length in zip(tensors, lengths) + ] + + +def nfeats_of(rottype): + if rottype in ["rotvec", "axisangle"]: + return 3 + elif rottype in ["rotquat", "quaternion"]: + return 4 + elif rottype in ["rot6d", "6drot", "rotation6d"]: + return 6 + elif rottype in ["rotmat"]: + return 9 + else: + return TypeError("This rotation type doesn't have features.") + + +def axis_angle_to(newtype, rotations): + if newtype in ["matrix"]: + rotations = geometry_conver.axis_angle_to_matrix(rotations) + return rotations + elif newtype in ["rotmat"]: + rotations = geometry_conver.axis_angle_to_matrix(rotations) + rotations = matrix_to("rotmat", rotations) + return rotations + elif newtype in ["rot6d", "6drot", "rotation6d"]: + rotations = geometry_conver.axis_angle_to_matrix(rotations) + rotations = matrix_to("rot6d", rotations) + return rotations + elif newtype in ["rotquat", "quaternion"]: + rotations = geometry_conver.axis_angle_to_quaternion(rotations) + return rotations + elif newtype in ["rotvec", "axisangle"]: + return rotations + else: + raise NotImplementedError + + +def matrix_to(newtype, rotations): + if newtype in ["matrix"]: + return rotations + if newtype in ["rotmat"]: + rotations = rotations.reshape((*rotations.shape[:-2], 9)) + return rotations + elif newtype in ["rot6d", "6drot", "rotation6d"]: + rotations = geometry_conver.matrix_to_rotation_6d(rotations) + return rotations + elif newtype in ["rotquat", "quaternion"]: + rotations = geometry_conver.matrix_to_quaternion(rotations) + return rotations + elif newtype in ["rotvec", "axisangle"]: + rotations = geometry_conver.matrix_to_axis_angle(rotations) + return rotations + else: + raise NotImplementedError + + +def to_matrix(oldtype, rotations): + if oldtype in ["matrix"]: + return rotations + if oldtype in ["rotmat"]: + rotations = rotations.reshape((*rotations.shape[:-2], 3, 3)) + return rotations + elif oldtype in ["rot6d", "6drot", "rotation6d"]: + rotations = geometry_conver.rotation_6d_to_matrix(rotations) + return rotations + elif oldtype in ["rotquat", "quaternion"]: + rotations = geometry_conver.quaternion_to_matrix(rotations) + return rotations + elif oldtype in ["rotvec", "axisangle"]: + rotations = geometry_conver.axis_angle_to_matrix(rotations) + return rotations + else: + raise NotImplementedError + + +# TODO: use a real subsampler.. +def subsample(num_frames, last_framerate, new_framerate): + step = int(last_framerate / new_framerate) + assert step >= 1 + frames = np.arange(0, num_frames, step) + return frames + + +# TODO: use a real upsampler.. +def upsample(motion, last_framerate, new_framerate): + step = int(new_framerate / last_framerate) + assert step >= 1 + + # Alpha blending => interpolation + alpha = np.linspace(0, 1, step + 1) + last = np.einsum("l,...->l...", 1 - alpha, motion[:-1]) + new = np.einsum("l,...->l...", alpha, motion[1:]) + + chuncks = (last + new)[:-1] + output = np.concatenate(chuncks.swapaxes(1, 0)) + # Don't forget the last one + output = np.concatenate((output, motion[[-1]])) + return output + + +if __name__ == "__main__": + motion = np.arange(105) + submotion = motion[subsample(len(motion), 100.0, 12.5)] + newmotion = upsample(submotion, 12.5, 100) + + print(newmotion) diff --git a/mGPT/utils/tensors.py b/mGPT/utils/tensors.py new file mode 100644 index 0000000..1661438 --- /dev/null +++ b/mGPT/utils/tensors.py @@ -0,0 +1,74 @@ +import torch + + +def lengths_to_mask(lengths): + max_len = max(lengths) + mask = torch.arange(max_len, device=lengths.device).expand( + len(lengths), max_len) < lengths.unsqueeze(1) + return mask + + +def collate_tensors(batch): + dims = batch[0].dim() + max_size = [max([b.size(i) for b in batch]) for i in range(dims)] + size = (len(batch),) + tuple(max_size) + canvas = batch[0].new_zeros(size=size) + for i, b in enumerate(batch): + sub_tensor = canvas[i] + for d in range(dims): + sub_tensor = sub_tensor.narrow(d, 0, b.size(d)) + sub_tensor.add_(b) + return canvas + + +def collate(batch): + databatch = [b[0] for b in batch] + labelbatch = [b[1] for b in batch] + lenbatch = [len(b[0][0][0]) for b in batch] + + databatchTensor = collate_tensors(databatch) + labelbatchTensor = torch.as_tensor(labelbatch) + lenbatchTensor = torch.as_tensor(lenbatch) + + maskbatchTensor = lengths_to_mask(lenbatchTensor) + # x - [bs, njoints, nfeats, lengths] + # - nfeats, the representation of a joint + # y - [bs] + # mask - [bs, lengths] + # lengths - [bs] + batch = {"x": databatchTensor, "y": labelbatchTensor, + "mask": maskbatchTensor, 'lengths': lenbatchTensor} + return batch + + +# slow version with padding +def collate_data3d_slow(batch): + batchTensor = {} + for key in batch[0].keys(): + databatch = [b[key] for b in batch] + batchTensor[key] = collate_tensors(databatch) + batch = batchTensor + # theta - [bs, lengths, 85], theta shape (85,) + # - (np.array([1., 0., 0.]), pose(72), shape(10)), axis=0) + # kp_2d - [bs, lengths, njoints, nfeats], nfeats (x,y,weight) + # kp_3d - [bs, lengths, njoints, nfeats], nfeats (x,y,z) + # w_smpl - [bs, lengths] zeros + # w_3d - [bs, lengths] zeros + return batch + +def collate_data3d(batch): + batchTensor = {} + for key in batch[0].keys(): + databatch = [b[key] for b in batch] + if key == "paths": + batchTensor[key] = databatch + else: + batchTensor[key] = torch.stack(databatch,axis=0) + batch = batchTensor + # theta - [bs, lengths, 85], theta shape (85,) + # - (np.array([1., 0., 0.]), pose(72), shape(10)), axis=0) + # kp_2d - [bs, lengths, njoints, nfeats], nfeats (x,y,weight) + # kp_3d - [bs, lengths, njoints, nfeats], nfeats (x,y,z) + # w_smpl - [bs, lengths] zeros + # w_3d - [bs, lengths] zeros + return batch diff --git a/prepare/download_pretrained_models.sh b/prepare/download_pretrained_models.sh new file mode 100644 index 0000000..4db7b3b --- /dev/null +++ b/prepare/download_pretrained_models.sh @@ -0,0 +1,9 @@ +mkdir -p checkpoints/ +cd checkpoints/ +echo -e "The pretrained models will stored in the 'checkpoints' folder\n" +mkdir -p mld_humanml3d_checkpoint/ + +git lfs install +git clone https://huggingface.co/bill-jiang/MotionGPT-base + +echo -e "Downloading done!" diff --git a/prepare/download_smpl_model.sh b/prepare/download_smpl_model.sh new file mode 100644 index 0000000..a23dc75 --- /dev/null +++ b/prepare/download_smpl_model.sh @@ -0,0 +1,14 @@ +mkdir -p deps/ +cd deps/ + +echo "The smpl model will be stored in the './deps' folder" + +# SMPL Models +echo "Downloading" +gdown "https://drive.google.com/uc?id=1qrFkPZyRwRGd0Q3EY76K8oJaIgs_WK9i" +echo "Extracting" +tar xfzv smpl.tar.gz +echo "Cleaning" +rm smpl.tar.gz + +echo "Downloading done!" diff --git a/prepare/download_t2m_evaluators.sh b/prepare/download_t2m_evaluators.sh new file mode 100644 index 0000000..6eb7cb2 --- /dev/null +++ b/prepare/download_t2m_evaluators.sh @@ -0,0 +1,14 @@ +mkdir -p deps/ +cd deps/ + +echo "The t2m evaluators will be stored in the './deps' folder" + +# HumanAct12 poses +echo "Downloading" +gdown "https://drive.google.com/uc?id=1AYsmEG8I3fAAoraT4vau0GnesWBWyeT8" +echo "Extracting" +tar xfzv t2m.tar.gz +echo "Cleaning" +rm t2m.tar.gz + +echo "Downloading done!" diff --git a/prepare/instructions/template_instructions.json b/prepare/instructions/template_instructions.json new file mode 100644 index 0000000..6380d94 --- /dev/null +++ b/prepare/instructions/template_instructions.json @@ -0,0 +1,3623 @@ +{ + "Text-to-Motion": { + "caption": { + "class": "t2m", + "input": [ + " represents the movements of someone practicing Muay Thai.", + "Give me a gesture that corresponds to ", + "Demonstrate a sequence of movements that symbolizes the sentiment of Input: ", + "Please create a motion that represents the power of to create positive change in the field of human rights and social justice.", + "I need a motion that represents the power of to create progress. Can you generate it for me?", + " represents the movements of someone practicing kickboxing.", + "Demonstrate a dance that symbolizes the feeling of ", + "Create a dance that communicates the essence of Input: ", + "Show me a motion that captures the essence of Input: ", + "Show me a sequence of movements that illustrates Input: ", + "Show me a dance that conveys the meaning of ", + "Describe the movements of a person doing a butterfly stroke in .", + "Create to describe a person playing golf.", + "Develop a dance that captures the feeling of ", + "Create a motion that reflects the sentiment of Input: ", + " represents the movements of someone practicing aerobics.", + "Produce a movement that evokes the feeling of ", + "Give me a motion that portrays the sentiment of ", + "Give me a dance that captures the meaning of ", + "Develop a motion that captures the idea of ", + "Develop a gesture that captures the essence of Input: ", + "Show me a motion that embodies the concept of Input: ", + "Can you generate a motion that represents the anxiety of ?", + "Can you generate a motion that represents the strength of ?", + "Create a dance that embodies the sentiment of ", + "Demonstrate a dance that symbolizes the mood of Input: ", + "I need a motion that represents the power of to inspire growth. Can you generate it for me?", + "Generate for a person practicing trampoline.", + "Create to describe a person playing badminton.", + "Show me a sequence of movements that evokes ", + "Can you generate a motion that represents the perseverance of ?", + " describes the movements of someone practicing aerial silks.", + "Please create a motion that represents the intuition of .", + "Here's a motion for the caption: ", + " describes the movements of someone practicing salsa fusion dance.", + "Give me a motion that symbolizes Input: ", + "Create a choreography for the caption: ", + "Develop a gesture that captures the feeling of Input: ", + "Give me a sequence of movements that embodies Input: ", + "Show me a sequence of movements that evokes Input: ", + "Give me a dance that conveys the meaning of Input: ", + "Describe the movements of a person doing a backhandspring in .", + "Can you generate a motion that represents the power of to create positive change in the field of education and literacy?", + " represents the movements of someone practicing tai chi.", + "Create a sequence of movements that conveys Input: ", + " describes the movements of someone practicing Bollywood fusion dance.", + "Please generate a motion that tells a story related to .", + "I need a motion that represents the power of to create positive change in the fight against hunger and malnutrition. Can you generate it for me?", + "Describe the movements of a person doing a snatch in .", + "Describe the movements of a person doing a burpee in .", + "Describe the movements of a person doing a muscle-up to handstand in .", + "Please create a motion that represents the power of to inspire learning and growth.", + "Please create a human motion that depicts the beauty of .", + "Demonstrate a gesture that communicates ", + "Develop a sequence of movements that expresses ", + "Can you generate a motion that represents the power of to inspire hope?", + "Can you generate a motion that represents the power of to transform lives?", + "I need a motion that represents the power of to create understanding between different cultures. Can you generate it for me?", + "Generate a motion that embodies the idea of ", + "I need a motion that represents the freedom of . Can you generate it for me?", + "Create a motion that interprets Input: ", + "Show me a motion that exemplifies the sentiment of ", + "Create to describe a person playing table tennis.", + "I see in the room that ", + "Please create a motion that represents the power of to create unity.", + "Can you generate a motion that represents the sadness of ?", + "Generate a motion that captures the feeling of ", + "Generate a motion that conveys the sentiment of ", + "Create a sequence of movements based on Input: ", + "Generate a gesture that portrays ", + "Produce a motion that visualizes the concept of Input: ", + "Can you generate a motion that represents the power of to bring people together for a common goal?", + "Can you generate a motion that symbolizes the hope of ?", + "Produce a motion that symbolizes the meaning of ", + "Produce a movement that illustrates Input: ", + "Give me a dance that visualizes the essence of ", + "Develop a gesture that evokes the emotion of Input: ", + "Give me a dance that conveys the meaning of ", + " describes the movements of someone practicing Argentine tango.", + "I want a motion that represents the power of to inspire others. Can you generate that?", + "Describe the movements of a person doing a kettlebell swing in .", + "Generate a choreography for the caption: Input: ", + "Generate a motion that conveys Input: ", + "Give me a sequence of movements that embodies ", + "Show me a motion that conveys the emotion of ", + "Generate a motion that conveys the sentiment of Input: ", + "Create a sequence of movements that exemplifies ", + "Produce a dance routine inspired by ", + "Generate a motion that represents the sentiment of Input: ", + "Describe the movements of a person doing a pistol squat in .", + "Demonstrate a sequence of movements that symbolizes Input: ", + "Create a motion for the caption: Input: ", + " describes the movements of someone practicing ballroom dance.", + "Please create a motion that represents the curiosity of .", + " is the description of a man's movements, generate it for me.", + "Develop a dance that reflects the idea of ", + "Demonstrate a motion that interprets Input: ", + "Describe the movements of a person doing a clean and jerk in .", + " describes the movements of someone playing soccer.", + "I need a motion that represents the power of to create a better world for future generations. Can you generate it for me?", + "Give me a dance that captures the meaning of Input: ", + "Demonstrate a motion that reflects the essence of Input: ", + "Demonstrate a gesture that conveys the meaning of ", + "I want to see a motion that represents the pain of . Can you generate that?", + "Generate a motion that evokes Input: ", + " represents the movements of someone practicing hip hop step.", + "I want a motion that represents the power of to create positive change in the field of public health. Can you generate that?", + "Show me a dance that conveys the meaning of Input: ", + "Create a sequence of movements that communicates Input: ", + "Generate to describe a person doing a backflip.", + "Develop a gesture that evokes the emotion of ", + "I need a motion that represents the power of to inspire volunteerism and philanthropy. Can you generate it for me?", + "Describe the movements of a person doing a handstand press in .", + "Please create a motion that represents the chaos of .", + "Create a gesture that evokes the sentiment of ", + "Demonstrate a dance that symbolizes the mood of ", + "Show me a gesture that conveys Input: ", + "Here's a motion for the caption: Input: ", + "Demonstrate a sequence of movements that symbolizes the sentiment of ", + "Demonstrate a dance that illustrates ", + "I need a motion that represents the transformation of . Can you generate it for me?", + "Can you generate a motion that represents the power of teamwork in ?", + "I want a motion that represents the beauty of . Can you generate that?", + "I want a motion that represents the power of to create positive impact. Can you generate that?", + "Create to describe a person playing rugby.", + "Develop a motion sequence that expresses ", + "Develop a motion that portrays the concept of ", + "Can you generate a motion that represents the creativity of ?", + "Generate for a person practicing acrobatics.", + "Develop a dance that represents the mood of ", + "Please create a motion that represents the power of to inspire change.", + "Generate a motion that reflects the feeling of ", + "I need a motion that represents the power of to create positive change in healthcare. Can you generate it for me?", + "Please create a motion that represents the power of to inspire change in society.", + "Give me a motion that expresses ", + "Generate a motion that symbolizes the sentiment of Input: ", + "Please create a motion that represents the power of to inspire innovation.", + "Please create a motion that represents the vulnerability of .", + " represents the movements of someone practicing krumping dance.", + "Give me a gesture that represents the mood of Input: ", + "I want a motion that represents the logic of . Can you generate that?", + "Show me a motion that embodies the concept of ", + " represents a man's movements which may be like: ", + "Produce a motion that exemplifies ", + "I would like to see the motion of ", + "Can you show me that ", + "I want a motion that represents the power of to create positive change in the environment. Can you generate that?", + "Generate a motion that reflects the feeling of Input: ", + "Develop a dance that captures the feeling of Input: ", + "Please create a motion that represents the power of to bring diverse communities together.", + "Give me a motion that expresses Input: ", + "Produce a movement that represents the essence of Input: ", + "Create to describe a person playing dodgeball.", + "Demonstrate a dance that conveys the mood of Input: ", + "Can you generate a motion that portrays in a realistic way?", + "Produce a movement that reflects Input: ", + "Create a sequence of movements that exemplifies Input: ", + "Show me a gesture that represents ", + " represents the movements of someone practicing waacking dance.", + "Produce a motion that reflects the meaning of Input: ", + "Demonstrate a motion that conveys the meaning of Input: ", + "Generate for a person practicing skateboarding.", + "Can you generate a motion that represents the order of ?", + "Give me a motion that represents the idea of ", + " describes the movements of someone practicing contemporary dance.", + "Give me a motion that exemplifies Input: ", + "Please create a motion that represents the present of .", + "Develop a movement that represents ", + "Develop a motion that communicates the mood of ", + "I want a motion that represents the power of to create a better world. Can you generate that?", + "Please create a motion that depicts in a creative way.", + "Show me a sequence of movements that evokes the feeling of Input: ", + "Please create a motion that represents the power of to inspire creativity and expression.", + "Produce a sequence of movements that captures the feeling of Input: ", + "Demonstrate a sequence of movements that represents ", + "I need a motion that represents the evolution of . Can you generate it for me?", + "Produce a sequence of movements that reflects the sentiment of Input: ", + "Create a sequence of movements that embodies the meaning of Input: ", + "Show me how to move for the caption: Input: ", + "Generate a dance inspired by ", + "Produce a dance that embodies ", + "Create a gesture that embodies ", + "I need a motion that represents the power of to create connection. Can you generate it for me?", + "Generate a motion that evokes ", + "Create a gesture that represents the sentiment of ", + "Please create a motion that represents the magic of .", + "I want to see a motion that evokes the same emotion as . Can you generate that?", + "Show me a dance that captures the essence of Input: ", + "Demonstrate a motion that reflects the essence of ", + "Please create a motion that represents the power of Input: ", + "Give me a gesture that depicts ", + "Develop a dance that captures the mood of Input: ", + "Can you generate a motion that represents the power of to create positive change in the media industry?", + "Produce a motion that matches Input: ", + "Show me a sequence of movements that conveys the essence of Input: ", + "Can you generate a motion that represents the courage of ?", + "Give me a gesture that visualizes Input: ", + "Produce a motion that exemplifies the mood of Input: ", + "Demonstrate a gesture that symbolizes ", + "Please create a motion that represents the love of .", + "I want a motion that represents the power of to create positive change in education. Can you generate that?", + "Please create a motion that represents the freedom of .", + "Demonstrate a motion that evokes the mood of Input: ", + "Generate a dance that visualizes Input: ", + "Produce a movement that evokes the feeling of Input: ", + "Create to describe a person performing a tap dance.", + "Generate for a person practicing surfing.", + "Generate a motion that symbolizes the sentiment of ", + "Please create a motion that represents the power of to evoke emotions.", + "I'm looking for a motion that captures the essence of . Can you help me with that?", + "Please create a motion that represents the happiness of .", + "Create a sequence of movements based on ", + "Can you generate a motion that represents the power of to make a difference?", + "Develop a movement that represents Input: ", + "Generate a motion for the caption: Input: ", + "Demonstrate a sequence of movements that represents Input: ", + "Please generate a human motion that matches the tone of .", + "Show me a gesture that conveys ", + "Generate for a person practicing paragliding.", + "Develop a motion that portrays the essence of Input: ", + "Can you generate a motion that represents the power of to create change in the world?", + "I want a motion that represents the power of to create positive change in the world. Can you generate that?", + "Generate a choreography for the caption: ", + "Generate a sequence of movements that represents Input: ", + "Show me a gesture that communicates the sentiment of ", + "Describe the movements of a person doing a jump rope double under in .", + "Please create a motion that represents the power of to create a better society.", + "Generate a motion that embodies the idea of Input: ", + "Show me a motion that portrays ", + "Create a motion that embodies Input: ", + "Develop a sequence of movements that represents the emotion of ", + "Develop a motion that communicates the mood of Input: ", + "Create for a person practicing martial arts.", + "Generate for a person practicing snowboarding.", + "Create to describe a person playing ice hockey.", + "Produce a dance that exemplifies Input: ", + "Create a gesture that embodies the sentiment of Input: ", + "I want to see a motion that represents the spirituality of . Can you generate that?", + "Create a dance that embodies the sentiment of Input: ", + "Can you generate a motion that represents the power of to create positive change in the political landscape?", + "Please create a motion that represents the connection of .", + "Give me a motion that reflects the idea of ", + "Create a choreography for the caption: Input: ", + "Can you create a motion that expresses through body language?", + "Generate a gesture that represents ", + "Generate a motion that conveys ", + "I want a motion that represents the power of to create peace in the world. Can you generate that?", + "Generate a gesture that represents Input: ", + "Please create a motion that represents the innovation of .", + "Show me a motion that portrays Input: ", + "Can you generate a motion that represents the power of to heal?", + "Develop a motion that portrays the concept of Input: ", + "Please create a motion that represents the uniqueness of .", + "Please create a motion that represents the power of to create change.", + "Please create a motion that represents the power of to inspire entrepreneurship and innovation.", + "Produce a motion that visualizes the concept of ", + "What motion can you create for the caption: Input: ", + "Create a sequence of movements that symbolizes ", + "Generate a gesture that portrays Input: ", + "Demonstrate a dance that conveys the mood of ", + "I want a motion symbolizes the meaning of , Please show me the motion.", + "I want a motion that represents the power of to create positive change in the arts. Can you generate that?", + "Can you generate a motion that represents the power of to inspire creativity?", + "Can you generate a motion that represents the power of to inspire respect for diverse cultures and traditions?", + "Develop a dance that reflects the idea of Input: ", + "Generate for a person practicing BMX.", + "Please create a motion that represents the power of ", + "Please create a motion that represents the power of to inspire compassion and empathy.", + "Develop a motion that communicates Input: ", + "Produce a motion that captures the spirit of Input: ", + "Develop a motion sequence that expresses Input: ", + "I need a motion that represents the power of to create positive change in the business world. Can you generate it for me?", + "Please create a motion that represents the discipline of .", + "Develop a motion that symbolizes Input: ", + "Create a motion that visualizes Input: ", + "I need a motion that represents the impact of . Can you generate it for me?", + "Generate to describe a person doing a handstand.", + "Please create a motion that represents the progress of .", + "Can you generate a motion that represents the mystery of ?", + "Describe the movements of a person doing a high jump in .", + "Create a motion that interprets ", + "Generate a motion for the caption: ", + "Generate a motion that interprets the meaning of Input: ", + "Create to describe a person playing basketball.", + "Show me a motion that captures the essence of ", + "Generate a motion that represents the feeling of Input: ", + "Demonstrate a motion for the caption: ", + "Can you show me that Input: ", + "I need a human motion that conveys the feeling of . Can you generate it for me?", + "Please create a motion that represents the power of to inspire healthy living and wellness.", + "Generate for a person practicing rock climbing.", + "Can you generate a motion that represents the power of to create positive change?", + "I want a motion for the caption: ", + "Produce a movement that represents the essence of ", + "Can you generate a motion that represents the power of to inspire creativity and innovation in science and technology?", + "I want a motion that shows the intensity of . Can you generate that for me?", + "Generate a dance inspired by Input: ", + "I see in the room that Input: ", + "Develop a motion that captures the essence of Input: ", + "Please create a motion that represents the power of to create positive change in the field of cultural preservation and heritage.", + " represents the movements of someone practicing Pilates.", + "Create a motion for the caption: ", + "Demonstrate a sequence of movements that portrays ", + "Can you generate a motion that represents the knowledge of ?", + "I would like to see the motion of Input: ", + "Create a gesture that communicates the feeling of ", + "Create a gesture that conveys the meaning of ", + "Demonstrate a sequence of movements that portrays Input: ", + "Generate a dance that interprets Input: ", + "Create to describe a person playing softball.", + "Demonstrate a motion that represents the mood of ", + "Produce a dance that embodies Input: ", + "I want a motion that represents the courage of . Can you generate that?", + "Create a gesture that represents the sentiment of Input: ", + "Can you generate a motion that represents the power of to bring people together for a common cause?", + "Generate a motion that represents the feeling of ", + " describes the movements of someone practicing contemporary ballet.", + "Please create a motion that represents the power of to bring hope.", + "Demonstrate a motion that evokes the mood of ", + "Please create a motion that represents the power of to create a better future.", + "Generate a motion that interprets the meaning of ", + "Generate a motion that reflects the emotion of ", + "Please create a motion that represents the power of to create harmony in society.", + "Create a motion that depicts Input: ", + "Demonstrate a dance that conveys ", + "Describe the movements of a person doing a cartwheel in .", + "Generate for a person practicing free running.", + "Generate a sequence of movements for the caption: ", + "Demonstrate a motion that conveys the meaning of ", + " describes the movements of someone practicing salsa.", + "Demonstrate a sequence of movements that symbolizes ", + "Can you generate a motion that represents the power of to create positive change in the fight against poverty and inequality?", + "Please create a motion that represents the fear of .", + "Generate a motion that reflects the emotion of Input: ", + "Create to describe a person playing baseball.", + "Generate a sequence of movements that represents ", + "Describe the movements of a person walking down a flight of stairs in .", + "Develop a sequence of movements that evokes ", + "Give me a motion that conveys the idea of ", + "Give me a motion for the caption: Input: ", + "Can you generate a motion that represents the power of to create positive change in the animal rights movement?", + "Produce a motion that conveys the concept of ", + "Create a sequence of movements that embodies the meaning of ", + " describes the movements of someone practicing swing dance.", + "Demonstrate a dance that symbolizes the feeling of Input: ", + "Demonstrate a motion for the caption: Input: ", + "Give me a gesture that corresponds to Input: ", + "Produce a motion that reflects the meaning of ", + "Show me a sequence of movements that illustrates ", + "Develop a motion that symbolizes ", + "Generate a dance that reflects the sentiment of ", + "Show me a gesture that embodies the meaning of ", + "I need a motion that represents the power of to bring people together. Can you generate it for me?", + " represents the movements of someone practicing breaking dance.", + "Generate for a person practicing parkour.", + "Create a gesture that conveys the meaning of Input: ", + "Can you create a motion that is inspired by ?", + "Develop a dance inspired by the caption: Input: ", + "I want to see a motion that represents the passion of . Can you generate that?", + "Generate Motion: ", + "Demonstrate a dance that illustrates Input: ", + "Can you generate a motion that represents the power of to create positive change in mental health?", + "Please create a motion that represents the power of to inspire sustainability and conservation.", + "Create to describe a person swimming.", + "I need a motion that represents the harmony of . Can you generate it for me?", + " represents the movements of someone practicing Bollywood dance.", + " represents the movements of someone practicing locking dance.", + "Can you generate a motion that represents the power of to promote equality and inclusion?", + "Produce a sequence of movements that captures the feeling of ", + "Show me a gesture that conveys the mood of Input: ", + "Demonstrate a gesture that conveys the meaning of Input: ", + "I want a motion that represents the unity of . Can you generate that?", + "Generate for a person practicing slacklining.", + "I need a motion that represents the sadness of . Can you generate it for me?", + "Develop a motion that communicates ", + "Show me a motion that exemplifies the sentiment of Input: ", + "Produce a movement that illustrates ", + "Create a sequence of movements that symbolizes Input: ", + "I want a motion that represents the resilience of . Can you generate that?", + "I need a motion that represents the power of . Can you generate it for me?", + "Develop a dance that represents the mood of Input: ", + "I want a motion for the caption: Input: ", + "Generate for a person practicing bouldering.", + "Produce a motion that visualizes the sentiment of ", + "Can you generate a motion that represents the journey of ?", + "Develop a sequence of movements that evokes Input: ", + "Show me how to move for the caption: ", + "Produce a motion that conveys the concept of Input: ", + " represents the movements of someone practicing Zumba.", + "Generate for a person practicing capoeira.", + "Show me a gesture that conveys the mood of ", + "Create a motion that visualizes ", + "Produce a motion that matches ", + "Can you generate a human motion that represents ?", + "Please create a motion that represents the power of to inspire wellness and self-care.", + "Develop a motion that captures the idea of Input: ", + "Can you generate a motion that represents the power of to inspire action?", + "Show me a sequence of movements that evokes the feeling of ", + " represents the movements of someone practicing popping dance.", + "Create to describe a person playing ultimate frisbee.", + "Can you generate a motion that represents the future of ?", + "Give me a dance that visualizes the mood of ", + "Create a sequence of movements that communicates ", + "Give me a motion that exemplifies ", + "Demonstrate a motion that represents the mood of Input: ", + "Can you create a motion that represents using gestures?", + "Generate to describe a person practicing gymnastics.", + "Create a dance that symbolizes Input: ", + " represents the movements of someone practicing CrossFit.", + "Demonstrate a gesture that communicates Input: ", + "Can you generate a motion that represents the power of to overcome challenges?", + "Produce a dance that captures the meaning of ", + "Create a dance that communicates the essence of ", + "Please create a motion that represents the wonder of .", + "Give me a motion for the caption: ", + "Give me a motion that portrays the sentiment of Input: ", + "Create a gesture that communicates the feeling of Input: ", + "Give me a sequence of movements that captures the essence of ", + "Someone told me that he see , can you show me what he may see?", + "I need a motion that represents the power of to inspire renewable energy and sustainability. Can you generate it for me?", + "Can you generate a motion that represents the chaos of ?", + "Demonstrate a motion that interprets ", + "Generate a dance that reflects the sentiment of Input: ", + "Generate Motion: Input: ", + "Produce a dance that captures the meaning of Input: ", + " describes the movements of someone playing football.", + "Please create a motion that represents the joy of .", + "Develop a dance inspired by the caption: ", + "Give me a dance that visualizes the mood of Input: ", + "Can you generate a motion that represents the power of to create positive change in the fight against climate change?", + " describes the movements of someone playing tennis.", + "Please create a motion that represents the power of to inspire social responsibility and activism.", + "Produce a motion that captures the spirit of ", + "Create a gesture that embodies Input: ", + "Show me a motion that represents ", + "Please create a motion that represents the power of to create understanding.", + "Can you generate a motion that represents the power of to create positive change in the non-profit sector?", + " describes the movements of someone practicing belly dance.", + "Develop a gesture that captures the essence of ", + "Generate for a person practicing kiteboarding.", + "Give me a sequence of movements that captures the essence of Input: ", + "Please create a motion that represents the transformation of .", + "Produce a movement that visualizes ", + "Please create a motion that represents the power of to create a better world for all.", + "Describe the movements of a person doing a plank in .", + "Generate a dance that interprets ", + "Describe the movements of a person doing a muscle-up in .", + "I want a motion that represents the past of . Can you generate that?", + "Describe the movements of a person doing a triple axel in .", + "Can you generate a motion that represents the growth of ?", + "Develop a sequence of movements that represents the emotion of Input: ", + " describes the movements of someone practicing jazz dance.", + "I need a motion that represents the anger of . Can you generate it for me?", + "Can you generate a motion that represents the diversity of ?", + "I want a motion that represents the power of to create positive change in the field of peacebuilding and conflict resolution. Can you generate that?", + "Show me a sequence of movements that represents the idea of ", + "Give me a motion that conveys the idea of Input: ", + " represents the movements of someone practicing yoga.", + "Develop a motion that captures the essence of ", + "Can you generate a motion that represents the power of to inspire diversity, equity, and inclusion?", + "Demonstrate a gesture that symbolizes Input: ", + "Produce a sequence of movements that reflects the sentiment of ", + "Can you generate a motion that represents the power of to inspire positive change in the environment?", + "Show me a gesture that embodies the meaning of Input: ", + "Please create a motion that represents the power of .", + "Show me a gesture that represents Input: ", + " describes the movements of someone doing a pirouette.", + "Create a gesture that embodies the sentiment of ", + "Give me a motion that reflects the idea of Input: ", + "Show me a motion that conveys the emotion of Input: ", + "Create a motion that reflects the sentiment of ", + "Can you generate a motion that represents the power of to make a difference in people's lives?", + "Generate a motion that represents the sentiment of ", + "Show me a motion that represents Input: ", + "Show me a sequence of movements that conveys the essence of ", + "Develop a gesture that captures the feeling of ", + "Give me a motion that signifies ", + "Produce a motion that symbolizes the meaning of Input: ", + "Create a sequence of movements that conveys ", + " describes the movements of someone performing a ballet routine.", + "Generate a motion that captures the feeling of Input: ", + "Develop a sequence of movements that expresses Input: ", + " represents the movements of someone practicing voguing dance.", + "Create to describe a person playing volleyball.", + "Create a dance that symbolizes ", + "Develop a motion that portrays the essence of ", + "Please create a motion that represents the resilience of .", + "I want a motion that represents the power of to create positive change in the legal system. Can you generate that?", + "Can you generate a motion that represents the power of to create positive change in the field of science and technology?", + "Generate a sequence of movements for the caption: Input: ", + "I need a motion that represents the importance of . Can you generate it for me?", + "Produce a motion that visualizes the sentiment of Input: ", + "Create to describe a person playing lacrosse.", + "Please create a motion that represents the power of to inspire economic growth and prosperity.", + "Demonstrate a dance that conveys Input: ", + "I want a motion that represents the potential of . Can you generate that?", + "Show me a gesture that communicates the sentiment of Input: ", + " represents the movements of someone practicing flamenco dance.", + "Produce a motion that exemplifies the mood of ", + "Can you generate a motion that represents the power of to inspire justice?", + "Show me a sequence of movements that represents the idea of Input: ", + "Generate a sequence of movements that captures ", + "Create a gesture that evokes the sentiment of Input: ", + "Generate for a person running a marathon.", + "Demonstrate a gesture that exemplifies ", + "Demonstrate a gesture that exemplifies Input: ", + "Generate a sequence of movements that captures Input: ", + "Develop a dance that captures the mood of ", + "Create a motion that embodies ", + " describes the movements of someone practicing hip hop dance.", + "Can you generate a motion that represents the hate of ?", + "Show me a dance that captures the essence of ", + "Produce a dance routine inspired by Input: ", + "Give me a gesture that represents the mood of ", + "Give me a motion that symbolizes ", + "I need a motion that represents the determination of . Can you generate it for me?", + "Give me a gesture that visualizes ", + "Describe the movements of a person doing a handstand push-up in .", + "I need a motion that represents the power of to inspire innovation and progress. Can you generate it for me?", + "I want a motion that represents the peace of . Can you generate that?", + "Produce a motion that exemplifies Input: ", + "Please create a motion that represents the power of to inspire positive change in the global community.", + "What motion can you create for the caption: ", + "Produce a dance that exemplifies ", + "Create a motion that depicts ", + "Create to describe a person playing water polo.", + "I need a human motion that conveys the energy of . Can you generate it for me?", + "Give me a gesture that depicts Input: ", + "Create to describe a person playing cricket.", + "Give me a dance that visualizes the essence of Input: ", + "I want a motion that represents the power of to create positive change in the field of international development. Can you generate that?", + "Produce a movement that reflects ", + "Generate a dance that visualizes ", + "Produce a movement that visualizes Input: ", + "Give me a motion that represents the idea of Input: ", + "Give me a motion that signifies Input: ", + "Please create a motion that represents the power of to inspire justice and fairness." + ], + "output": [ + "" + ] + }, + "caption_framelen": { + "class": "t2m", + "input": [ + "Give me a motion that lasts for approximately frames. The caption is: ", + "Can you create a motion that lasts for about frames? The caption is: Input: ", + "I need a motion that lasts approximately frames for the caption: ", + "Please generate a motion with a length of around frames for the caption: Input: ", + "Show me a human motion that represents the phrase \"\" for frames.", + "Can you create a motion that lasts approximately frames? The caption is: Input: ", + "Give me a motion that lasts around frames. The caption is: Input: ", + "Can you create a motion sequence that lasts for frames and represents in motion?", + "I want to see a motion sequence that lasts for frames and embodies the description \"\".", + "Create a motion that has a duration of approximately frames for the caption: Input: ", + "I need a human motion that lasts for frames and is inspired by the phrase \"\".", + "Please generate a motion that is around frames long for the caption: ", + "Please generate a motion sequence that lasts for frames and embodies the description \"\".", + "Can you create a motion that has a duration of roughly frames? The caption is: ", + "Create a motion that has a duration of approximately frames for the caption: ", + "Can you show me a motion sequence that lasts for frames and depicts the phrase \"\"?", + "Please generate a motion that symbolizes through motion for frames.", + "Please generate a motion that lasts for frames and is based on the description \"\".", + "I need a motion that is approximately frames long for the caption: ", + "Can you create a motion that lasts for about frames? The caption is: ", + "Can you create a motion that lasts for frames? The caption is: Input: ", + "Give me a motion that is around frames long for the caption: Input: ", + "I want a motion that lasts for around frames. The caption is: Input: ", + "I want a motion that lasts for approximately frames. The caption is: Input: ", + "Please generate a motion that embodies the meaning of \"\" through motion for frames.", + "Create a motion that has a length of around frames for the caption: ", + "I need a motion that has a duration of roughly frames for the caption: ", + "I want a motion that is approximately frames long for the caption: Input: ", + "Can you create a motion that has a duration of approximately frames? The caption is: Input: ", + "I want a motion that has a duration of about frames. The caption is: ", + "Generate a motion that is around frames long for the caption: Input: ", + "I want a motion that has a length of roughly frames for the caption: ", + "Create a motion that has a length of around frames for the caption: Input: ", + "Please generate a motion that embodies for frames.", + "I want to see a motion that lasts for frames and symbolizes the phrase \"\" in motion.", + "Please generate a motion that is around frames long for the caption: Input: ", + "I need a motion that lasts around frames for the caption: Input: ", + "Can you create a motion that lasts for approximately frames? The caption is: Input: ", + "I need a motion that lasts for roughly frames. The caption is: ", + "Show me a motion that represents the description \"\" for frames.", + "Can you create a motion sequence that lasts for frames and represents ?", + "Create a motion that lasts for roughly frames for the caption: Input: ", + "I'm looking for a motion sequence that lasts for frames and embodies .", + "I want to see a motion that symbolizes in motion for frames.", + "Please generate a motion sequence that lasts for frames and symbolizes the meaning of \"\" through motion.", + "Show me a motion sequence that lasts for frames and symbolizes the phrase \"\".", + "Generate a motion that has a duration of approximately frames for the caption: Input: ", + "Please generate a motion sequence that lasts for frames and represents the meaning of \"\" in motion.", + "Please generate a motion that lasts around frames for the caption: ", + "Please generate a motion sequence that lasts for frames and embodies .", + "Please generate a motion that symbolizes in motion for frames.", + "I want a motion that has a duration of around frames for the caption: ", + "I want a motion that is roughly frames long for the caption: Input: ", + "Can you create a motion sequence that lasts for frames and symbolizes ?", + "I want a motion with frames. The caption is: ", + "Can you show me a human motion that lasts for frames and symbolizes ?", + "I want a motion with a length of about frames. The caption is: Input: ", + "Can you create a motion that has a length of approximately frames? The caption is: ", + "Can you create a motion that has a length of approximately frames? The caption is: Input: ", + "Can you show me a motion sequence that lasts for frames and depicts the description \"\" in motion?", + "Please generate a motion sequence that lasts for frames and depicts .", + "Can you create a motion sequence that lasts for frames and embodies the phrase \"\"?", + "I want to see a human motion that lasts for frames and depicts the phrase \"\" in motion.", + "I want to see a motion that embodies the meaning of \"\" in motion for frames.", + "Can you create a motion that lasts for roughly frames? The caption is: ", + "Please generate a motion that lasts for approximately frames for the caption: Input: ", + "Generate a motion that lasts for roughly frames for the caption: ", + "Please generate a motion that lasts for frames for the caption: Input: ", + "I want a motion that lasts for around frames. The caption is: ", + "Can you show me a human motion that lasts for frames and symbolizes the phrase \"\"?", + "I want to see a human motion that lasts for frames, representing .", + "I want to see a motion that lasts for frames and represents the meaning of \"\".", + "Can you show me a motion sequence that lasts for frames and depicts the meaning of \"\" in motion?", + "I need a motion that lasts for around frames. The caption is: ", + "Please generate a motion that symbolizes the description \"\" for frames.", + "Create a motion with a duration of around frames for the caption: ", + "I want a motion that lasts for roughly frames. The caption is: Input: ", + "Show me a motion that lasts for frames and embodies the phrase \"\" in motion.", + "Create a motion that has a duration of roughly frames for the caption: ", + "Show me a motion that symbolizes the description \"\" in frames.", + "Can you create a motion sequence that lasts for frames and depicts the phrase \"\"?", + "Show me a human motion that lasts for frames and represents the phrase \"\".", + "Show me a motion that represents in motion for frames.", + "Please generate a motion that lasts approximately frames for the caption: Input: ", + "Create a motion with a duration of around frames for the caption: Input: ", + "Can you create a motion sequence that lasts for frames and symbolizes in motion?", + "I want a motion that has a duration of about frames. The caption is: Input: ", + "Can you create a motion that has a duration of approximately frames? The caption is: ", + "I need a motion that is about frames long for the caption: ", + "Generate a motion that has a duration of about frames for the caption: ", + "Can you create a motion sequence that lasts for frames and represents the description \"\" in motion?", + "I want a motion that is approximately frames long for the caption: ", + "Can you show me a motion sequence that lasts for frames and embodies the meaning of \"\" through motion?", + "Please generate a motion with a length of around frames for the caption: ", + "Create a motion that is approximately frames long for the caption: ", + "Can you create a motion that lasts approximately frames? The caption is: ", + "I want to see a motion that represents in motion for frames.", + "Please generate a motion that embodies the description \"\" for frames.", + "Please generate a motion that is frames long for the caption: Input: ", + "Please generate a motion that embodies the description \"\" in frames.", + "Please generate a human motion that depicts the phrase \"\" in motion for frames.", + "Please generate a motion sequence that lasts for frames and embodies the phrase \"\".", + "Generate a motion that lasts for approximately frames for the caption: Input: ", + "Create a motion that has a duration of frames for the caption: Input: ", + "Please generate a motion that lasts approximately frames for the caption: ", + "Create a motion that has a duration of frames for the caption: ", + "Can you show me a motion that symbolizes the meaning of \"\" for frames?", + "Create a motion that lasts for around frames for the caption: Input: ", + "Please generate a motion that has a length of around frames for the caption: Input: ", + "Create a motion that lasts for roughly frames for the caption: ", + "Generate a motion that has a duration of approximately frames for the caption: ", + "I want to see a human motion that depicts the phrase \"\" for frames.", + "I need a motion that is around frames long for the caption: ", + "Can you create a motion that has a duration of around frames? The caption is: Input: ", + "Please generate a motion that lasts for frames and embodies the phrase \"\".", + "Give me a motion that has a length of approximately frames. The caption is: Input: ", + "Please generate a motion that has a length of about frames for the caption: Input: ", + "Please generate a motion that symbolizes for frames.", + "Please generate a motion that has a duration of around frames for the caption: ", + "Give me a motion that has a length of approximately frames. The caption is: ", + "Create a motion that has a duration of around frames for the caption: ", + "Generate a motion that has a duration of around frames for the caption: Input: ", + "Can you create a motion that has a length of roughly frames? The caption is: ", + "Can you show me a motion sequence that lasts for frames and embodies the description \"\" in motion?", + "Give me a motion that is approximately frames long. The caption is: Input: ", + "Can you show me a motion that depicts for frames?", + "Can you create a human motion that lasts for frames and embodies the phrase \"\"?", + "I want a motion with frames. The caption is: Input: ", + "Can you create a motion that has a length of roughly frames? The caption is: Input: ", + "Can you create a motion that represents the description \"\" in motion for frames?", + "I want a motion that lasts frames for the caption: Input: ", + "Give me a motion that is roughly frames long for the caption: Input: ", + "Create a motion with frames for the caption: Input: ", + "Can you create a human motion that lasts for frames and embodies the meaning of \"\"?", + "Please generate a human motion that embodies in motion for frames.", + "Create a motion that has a length of about frames for the caption: Input: ", + "Generate a motion that has frames for the caption: ", + "Can you create a human motion that symbolizes for frames?", + "Can you show me a motion sequence that lasts for frames and embodies the phrase \"\" through motion?", + "Please generate a motion that lasts for frames and symbolizes the phrase \"\".", + "I want to see a human motion that embodies the phrase \"\" for frames.", + "I want to see a motion sequence that lasts for frames and represents the description \"\".", + "I want to see a motion sequence that lasts for frames and represents through motion.", + "I need a motion that lasts approximately frames for the caption: Input: ", + "I want to see a motion that symbolizes the description \"\" in motion for frames.", + "Could you create a motion for me with frames? The caption is: ", + "Show me a motion that lasts for frames and symbolizes the phrase \"\" through motion.", + "I want a motion with a length of about frames. The caption is: ", + "I need a motion that lasts around frames for the caption: ", + "Please generate a motion that has a duration of around frames for the caption: Input: ", + "Give me a motion that is approximately frames long for the caption: ", + "Show me a motion that lasts for frames and depicts .", + "I want a motion that has a duration of roughly frames for the caption: Input: ", + "Generate a motion that lasts around frames for the caption: Input: ", + "Give me a motion that has a duration of roughly frames. The caption is: Input: ", + "Create a motion that is approximately frames long for the caption: Input: ", + "Can you show me a motion that symbolizes the meaning of \"\" in frames?", + "Create a motion that has a length of about frames for the caption: ", + "Create a motion that lasts for approximately frames for the caption: ", + "Please generate a human motion that lasts for frames and embodies the phrase \"\" in motion.", + "Give me a motion that is roughly frames long for the caption: ", + "Show me a motion that symbolizes for frames.", + "Please generate a motion that symbolizes the phrase \"\" for frames.", + "Generate a motion that has frames for the caption: Input: ", + "Please generate a motion that lasts for frames and symbolizes in motion.", + "Can you create a motion that has a duration of about frames? The caption is: ", + "Please generate a motion sequence that lasts for frames and embodies through motion.", + "Please generate a motion that lasts for around frames for the caption: Input: ", + "Give me a motion that is approximately frames long. The caption is: ", + "Give me a motion that has a duration of approximately frames. The caption is: Input: ", + "I want a motion that lasts for roughly frames. The caption is: ", + "I need a motion that has a length of approximately frames. The caption is: ", + "I want to see a motion that symbolizes the phrase \"\" for frames.", + "Show me a motion that represents the phrase \"\" in frames.", + "Can you create a motion that represents the phrase \"\" in motion for frames?", + "Give me a motion that has a duration of around frames. The caption is: ", + "I want a motion that lasts for about frames. The caption is: Input: ", + "Please generate a motion that is frames long for the caption: ", + "I want to see a motion sequence that lasts for frames and represents the phrase \"\" in motion.", + "I need a motion that is roughly frames long for the caption: Input: ", + "I want to see a motion that represents the description \"\" in frames.", + "Please generate a motion sequence that lasts for frames and embodies the meaning of \"\".", + "Can you create a motion sequence that lasts for frames and embodies the meaning of \"\"?", + "Please generate a motion sequence that lasts for frames and embodies the phrase \"\" in motion.", + "Generate a motion that has a length of frames for the caption: ", + "I need a motion that has a duration of around frames for the caption: ", + "Create a motion that lasts for approximately frames for the caption: Input: ", + "Give me a motion that lasts for about frames. The caption is: ", + "I want to see a motion that embodies in motion for frames and depicts the phrase \"\".", + "Give me a motion that lasts for around frames. The caption is: ", + "Please generate a motion that lasts for frames and symbolizes the description \"\".", + "Can you create a motion that is roughly frames long? The caption is: Input: ", + "Can you show me a motion sequence that lasts for frames and depicts through motion?", + "I need a motion that lasts for frames. The caption is: ", + "Can you create a motion that has a duration of about frames? The caption is: Input: ", + "Create a motion that lasts for about frames for the caption: ", + "I want to see a motion sequence that lasts for frames and embodies the phrase \"\".", + "Generate a motion that has a length of frames for the caption: Input: ", + "Can you create a motion that lasts for roughly frames? The caption is: Input: ", + "Please generate a motion that has a length of about frames for the caption: ", + "Generate a motion that is roughly frames long for the caption: ", + "Please generate a motion sequence for the phrase \"\" that lasts for frames.", + "Please generate a motion that symbolizes the phrase \"\" in motion for frames.", + "Please generate a motion that embodies the meaning of \"\" for frames.", + "I want a motion that lasts frames for the caption: ", + "Can you create a motion with frames? The caption is: ", + "I want to see a human motion that lasts for frames and represents the description \"\".", + "Please generate a motion that lasts for frames and depicts the meaning of \"\".", + "Show me a human motion that lasts for frames and symbolizes the meaning of \"\".", + "I want a motion that has a length of roughly frames for the caption: Input: ", + "I want a motion that lasts for about frames. The caption is: ", + "Generate a motion that has a length of around frames for the caption: Input: ", + "Can you create a motion that is about frames long? The caption is: Input: ", + "Can you create a motion that lasts for approximately frames? The caption is: ", + "Create a motion with a duration of about frames for the caption: Input: ", + "I need a human motion that represents for frames, please generate it.", + "I want a motion that lasts for approximately frames. The caption is: ", + "Can you show me a motion that depicts the meaning of \"\" for frames?", + "Please generate a human motion that lasts for frames and symbolizes the description \"\".", + "Can you show me a human motion that symbolizes the meaning of \"\" in motion for frames?", + "I need a motion that has a length of approximately frames. The caption is: Input: ", + "I want a motion that has a duration of around frames for the caption: Input: ", + "Show me a motion that lasts for frames and symbolizes the description \"\".", + "I need a motion that has a duration of roughly frames for the caption: Input: ", + "Give me a motion that has a duration of around frames. The caption is: Input: ", + "I need a motion that lasts for roughly frames. The caption is: Input: ", + "Can you create a human motion that represents in motion for frames?", + "Give me a motion that lasts for approximately frames. The caption is: Input: ", + "Please generate a motion sequence that lasts for frames and symbolizes the description \"\" in motion.", + "Generate a motion that is around frames long for the caption: ", + "Please generate a motion that lasts for frames and embodies through motion.", + "Can you create a motion that is roughly frames long? The caption is: ", + "Show me a human motion that represents the description \"\" in motion for frames.", + "Create a motion that has a duration of roughly frames for the caption: Input: ", + "I need a motion that is around frames long for the caption: Input: ", + "Generate a motion that has a duration of about frames for the caption: Input: ", + "Please generate a motion that lasts around frames for the caption: Input: ", + "Please generate a motion that is roughly frames long for the caption: Input: ", + "Please generate a human motion that embodies the description \"\" in motion for frames.", + "Can you show me a motion sequence that lasts for frames and embodies the meaning of \"\" in motion?", + "I need a motion that lasts for around frames. The caption is: Input: ", + "Create a motion that has a length of roughly frames for the caption: ", + "I want to see a motion that embodies the description \"\" for frames.", + "I want to see a motion sequence that lasts for frames and represents the phrase \"\".", + "Show me a motion that depicts in frames.", + "Give me a motion that has a duration of roughly frames. The caption is: ", + "Can you show me a motion sequence that lasts for frames and symbolizes the meaning of \"\" through motion?", + "Generate a motion that lasts for approximately frames for the caption: ", + "Create a motion with frames for the caption: ", + "Please generate a motion that is approximately frames long for the caption: ", + "Please generate a motion that has a length of approximately frames for the caption: ", + "Generate a motion that lasts for roughly frames for the caption: Input: ", + "I want a motion that is roughly frames long for the caption: ", + "Can you show me a motion that represents and is frames long?", + "I want a motion that lasts roughly frames for the caption: ", + "Generate a motion that lasts for about frames for the caption: ", + "Generate a motion that has a length of around frames for the caption: ", + "Can you create a motion with frames? The caption is: Input: ", + "Give me a motion that is around frames long for the caption: ", + "Give me a motion that is approximately frames long for the caption: Input: ", + "Please generate a motion sequence that lasts for frames and symbolizes the phrase \"\".", + "Please generate a motion that has a length of around frames for the caption: ", + "Show me a human motion that lasts for frames and represents the phrase \"\" in motion.", + "Generate a motion that is roughly frames long for the caption: Input: ", + "Show me a human motion that represents the phrase \"\" in frames.", + "I want to see a motion that symbolizes the meaning of \"\" in motion for frames.", + "I want a motion that lasts roughly frames for the caption: Input: ", + "I need a motion that has exactly frames for the caption: Input: ", + "Generate a motion that lasts around frames for the caption: ", + "Can you show me a human motion that lasts for frames and symbolizes the description \"\"?", + "Please generate a motion sequence that lasts for frames and embodies the description \"\" in motion.", + "Please generate a motion that lasts for approximately frames for the caption: ", + "Can you show me a motion that lasts for frames and symbolizes the description \"\"?", + "Show me a human motion that symbolizes the meaning of \"\" for frames.", + "Can you create a motion that has a duration of roughly frames? The caption is: Input: ", + "Can you show me a human motion that depicts for frames?", + "Please generate a motion that lasts for frames and embodies the meaning of \"\".", + "I need a motion that has a duration of around frames for the caption: Input: ", + "Please generate a motion that lasts for frames and embodies the meaning of \"\" in motion.", + "Show me a human motion that lasts for frames and symbolizes the description \"\".", + "Generate a motion that has a duration of around frames for the caption: ", + "Could you create a motion for me with frames? The caption is: Input: ", + "Please generate a motion that is approximately frames long for the caption: Input: ", + "Can you create a motion that embodies the meaning of \"\" in motion for frames?", + "I want a motion that has a duration of roughly frames for the caption: ", + "Show me a motion that symbolizes the description \"\" for frames.", + "I need a motion that is approximately frames long for the caption: Input: ", + "Can you create a motion that represents the meaning of \"\" in motion for frames?", + "Please generate a motion that has a length of approximately frames for the caption: Input: ", + "Create a motion with a length of roughly frames for the caption: ", + "Give me a motion that lasts for around frames. The caption is: Input: ", + "Please generate a motion that lasts for frames and symbolizes the meaning of \"\".", + "Please generate a motion that lasts for around frames for the caption: ", + "Please generate a motion sequence that lasts for frames and represents the description \"\" in motion.", + "I need a motion that is roughly frames long for the caption: ", + "Can you show me a motion that embodies the description \"\" in motion for frames?", + "Show me a motion that lasts for frames and symbolizes the meaning of \"\" in motion.", + "Can you create a motion that has a duration of around frames? The caption is: ", + "Can you create a motion sequence that lasts for frames and embodies the phrase \"\" in motion?", + "Show me a motion that embodies for frames.", + "Can you show me a motion sequence that lasts for frames and depicts ?", + "Please generate a motion that lasts for frames for the caption: ", + "Create a motion that has a duration of around frames for the caption: Input: ", + "I want to see a motion that symbolizes the phrase \"\" in motion for frames.", + "Can you create a motion that lasts for frames? The caption is: ", + "Can you show me a motion sequence that lasts for frames and symbolizes the meaning of \"\"?", + "Create a motion that lasts for around frames for the caption: ", + "Create a motion that lasts for about frames for the caption: Input: ", + "Can you create a motion sequence that lasts for frames and embodies the meaning of \"\" in motion?", + "Create a motion that has a length of roughly frames for the caption: Input: ", + "Create a motion with a duration of about frames for the caption: ", + "Show me a motion that lasts for frames and embodies the phrase \"\".", + "Please generate a motion that lasts for frames and depicts the phrase \"\" through motion.", + "I need a motion that has exactly frames for the caption: ", + "Give me a motion that has a duration of approximately frames. The caption is: ", + "Create a motion with a length of roughly frames for the caption: Input: ", + "Can you create a motion that represents the phrase \"\" for frames?", + "I want to see a motion that embodies in motion for frames.", + "Give me a motion that lasts for about frames. The caption is: Input: ", + "Please generate a motion sequence that lasts for frames and symbolizes the meaning of \"\".", + "Please generate a motion that embodies the phrase \"\" for frames.", + "I need a motion that lasts for frames. The caption is: Input: ", + "Please generate a motion that is roughly frames long for the caption: ", + "Generate a motion that lasts for about frames for the caption: Input: ", + "Can you create a motion that is about frames long? The caption is: ", + "Can you show me a human motion that lasts for frames and embodies the phrase \"\"?", + "Give me a motion that lasts around frames. The caption is: ", + "I need a motion that is about frames long for the caption: Input: " + ], + "output": [ + "" + ] + }, + "caption_seclen": { + "class": "t2m", + "input": [ + "I need a motion that lasts seconds and conveys the message of .", + "Can you create a motion that is roughly seconds long? The caption is: Input: ", + "Generate a motion that is around seconds long for the caption: Input: ", + "I need a motion that lasts for seconds. The caption is: Input: Input: ", + "Give me a motion that has a duration of around seconds. The caption is: Input: Input: ", + "Can you create a motion that lasts seconds and demonstrates the concept of ?", + "I want to see a human motion that depicts the phrase \"\" for seconds.", + "Please create a motion that lasts seconds, and illustrates the idea of .", + "Can you create a motion sequence that lasts for seconds and depicts the phrase \"\"?", + "Please generate a motion that lasts for seconds and embodies the meaning of \"\".", + "Please generate a motion that lasts for seconds and symbolizes the phrase \"\".", + "Please generate a motion that lasts for seconds for the caption: Input: ", + "Can you show me a human motion that depicts for seconds?", + "Can you show me a motion sequence that lasts for seconds and depicts through motion?", + "Please generate a motion that embodies the description \"\" in seconds.", + "Can you create a motion that has a duration of about seconds? The caption is: Input: Input: ", + "Generate a motion that lasts around seconds for the caption: Input: ", + "Show me a motion that symbolizes the description \"\" for seconds.", + "Create a motion that has a duration of approximately seconds for the caption: Input: ", + "Give me a motion that is approximately seconds long for the caption: Input: ", + "Give me a motion that is roughly seconds long for the caption: Input: Input: ", + "I need a motion that is approximately seconds long for the caption: ", + "I need a motion that is roughly seconds long for the caption: Input: ", + "Create a motion that has a length of roughly seconds for the caption: Input: Input: ", + "Create a motion that has a duration of approximately seconds for the caption: Input: Input: ", + "Please generate a motion that embodies for seconds.", + "Please generate a motion that symbolizes the description \"\" for seconds.", + "Give me a motion that lasts for approximately seconds. The caption is: Input: Input: ", + "Please generate a motion sequence that lasts for seconds and embodies through motion.", + "Please generate a motion that lasts for seconds and embodies the meaning of \"\" in motion.", + "Create a motion that lasts seconds, and showcases the posture of .", + "Can you create a motion that lasts for seconds? The caption is: Input: Input: ", + "Please generate a motion that has a length of about seconds for the caption: Input: Input: ", + "I want a motion that is approximately seconds long for the caption: ", + "Give me a motion that is approximately seconds long for the caption: Input: Input: ", + "Create a motion with seconds for the caption: Input: ", + "Can you show me a motion that depicts for seconds?", + "Generate a motion that lasts for seconds, and captures the essence of .", + "Can you create a motion sequence that lasts for seconds and represents the description \"\" in motion?", + "Show me a motion that symbolizes the description \"\" in seconds.", + "Please generate a motion that lasts for approximately seconds for the caption: ", + "Generate a motion that lasts seconds and portrays the feelings of .", + "Generate a motion that lasts seconds and shows the physical actions of .", + "I need a motion that lasts seconds, and showcases the scenery of .", + "Give me a motion that has a duration of roughly seconds. The caption is: ", + "I want a motion with a length of about seconds. The caption is: Input: Input: ", + "Create a motion that lasts seconds, and demonstrates the beauty of .", + "I want a motion that lasts roughly seconds for the caption: Input: ", + "Show me a motion that lasts for seconds and symbolizes the meaning of \"\" in motion.", + "Please generate a motion that is around seconds long for the caption: Input: Input: ", + "Please generate a motion that lasts seconds and represents the meaning of .", + "Create a motion that has a length of around seconds for the caption: Input: ", + "Generate a motion that lasts for approximately seconds for the caption: ", + "I want a motion that has a duration of around seconds for the caption: ", + "I want a motion that has a duration of roughly seconds for the caption: Input: ", + "Show me a motion that lasts for seconds and symbolizes the phrase \"\" through motion.", + "Can you create a motion that has a duration of about seconds? The caption is: Input: ", + "I need a motion that is around seconds long for the caption: Input: ", + "Give me a motion that has a duration of roughly seconds. The caption is: Input: Input: ", + "Can you create a motion that has a duration of approximately seconds? The caption is: Input: Input: ", + "I want to see a motion that embodies in motion for seconds and depicts the phrase \"\".", + "I need a motion that lasts for seconds. The caption is: ", + "Generate a motion that has a duration of about seconds for the caption: Input: Input: ", + "Can you create a motion that lasts for roughly seconds? The caption is: Input: Input: ", + "Please generate a motion that lasts seconds, and demonstrates the mannerisms of .", + "Create a motion that has a length of around seconds for the caption: ", + "Please generate a motion that lasts around seconds for the caption: Input: ", + "Can you show me a motion sequence that lasts for seconds and depicts the meaning of \"\" in motion?", + "Generate a motion that has seconds for the caption: Input: Input: ", + "Show me a human motion that represents the description \"\" in motion for seconds.", + "Please generate a motion that embodies the description \"\" for seconds.", + "Give me a motion that has a duration of around seconds. The caption is: ", + "Please generate a motion sequence that lasts for seconds and embodies .", + "Create a motion that lasts seconds, and demonstrates the setting of .", + "I need a motion that lasts seconds, and shows the energy of .", + "I want a motion with seconds. The caption is: Input: ", + "I want to see a motion that represents the description \"\" in seconds.", + "Create a motion that has a duration of seconds for the caption: Input: Input: ", + "Please generate a motion that is around seconds long for the caption: ", + "Please generate a motion that lasts seconds and represents the satire of .", + "Generate a motion that lasts for about seconds for the caption: Input: Input: ", + "I need a motion that is around seconds long for the caption: ", + "Can you create a motion that has a duration of about seconds? The caption is: ", + "Can you show me a motion that lasts for seconds and symbolizes the description \"\"?", + "Please create a motion that lasts seconds and illustrates the history of .", + "Can you create a motion sequence that lasts for seconds and embodies the meaning of \"\" in motion?", + "Give me a motion that is roughly seconds long for the caption: ", + "Can you create a motion that lasts for roughly seconds? The caption is: Input: ", + "Please generate a motion that has a duration of around seconds for the caption: Input: ", + "Can you create a motion that has a length of roughly seconds? The caption is: Input: ", + "Please generate a human motion that depicts the phrase \"\" in motion for seconds.", + "Generate a motion that has a length of around seconds for the caption: Input: Input: ", + "Please generate a motion that lasts for seconds and embodies through motion.", + "Can you create a motion that is roughly seconds long? The caption is: ", + "Give me a motion that lasts for about seconds. The caption is: Input: Input: ", + "Please generate a motion that has a duration of around seconds for the caption: ", + "Can you create a motion that lasts seconds, and tells the story of ?", + "I need a motion that lasts seconds, and showcases the humor of .", + "I want a motion that has a length of roughly seconds for the caption: Input: ", + "Give me a motion that lasts for around seconds. The caption is: Input: ", + "Can you create a motion with seconds? The caption is: Input: Input: ", + "Can you show me a motion sequence that lasts for seconds and symbolizes the meaning of \"\" through motion?", + "I need a motion that has exactly seconds for the caption: ", + "I want a motion that lasts for about seconds. The caption is: ", + "Please create a motion that lasts seconds and illustrates the style of .", + "I want a motion that has a duration of roughly seconds for the caption: Input: Input: ", + "I need a motion that lasts for around seconds. The caption is: ", + "Can you create a motion that has a length of roughly seconds? The caption is: ", + "Please generate a motion sequence for the phrase \"\" that lasts for seconds.", + "Please generate a human motion that embodies in motion for seconds.", + "Create a motion that is approximately seconds long for the caption: Input: Input: ", + "Please generate a human motion that lasts for seconds and embodies the phrase \"\" in motion.", + "I want a motion that is roughly seconds long for the caption: Input: ", + "Show me a motion that symbolizes for seconds.", + "I want to see a motion sequence that lasts for seconds and embodies the phrase \"\".", + "Please generate a motion that lasts approximately seconds for the caption: Input: ", + "Show me a motion that lasts for seconds and embodies the phrase \"\" in motion.", + "Please generate a motion sequence that lasts for seconds and embodies the description \"\" in motion.", + "Create a motion that lasts for about seconds for the caption: Input: Input: ", + "Can you show me a human motion that lasts for seconds and symbolizes ?", + "Please generate a motion that lasts seconds, and illustrates the character of .", + "I want a motion that lasts seconds for the caption: ", + "I need a motion that lasts seconds, and shows the movement of .", + "Generate a motion that lasts around seconds for the caption: ", + "Please generate a motion sequence that lasts for seconds and embodies the meaning of \"\".", + "Can you show me a motion that symbolizes the meaning of \"\" in seconds?", + "Please create a motion that lasts seconds, and depicts the movements of .", + "Can you create a motion that lasts seconds and captures the individuality of ?", + "Please generate a motion sequence that lasts for seconds and symbolizes the meaning of \"\".", + "Please generate a motion that has a length of around seconds for the caption: Input: ", + "Can you create a motion that is about seconds long? The caption is: ", + "Generate a motion that has seconds for the caption: ", + "Generate a motion that lasts seconds and highlights the core of .", + "Can you create a motion that has a length of approximately seconds? The caption is: ", + "Give me a motion that is around seconds long for the caption: Input: Input: ", + "I want to see a human motion that lasts for seconds and depicts the phrase \"\" in motion.", + "I need a motion that has a length of approximately seconds. The caption is: Input: Input: ", + "Can you create a motion that is about seconds long? The caption is: Input: ", + "Create a motion that lasts seconds, and demonstrates the atmosphere of .", + "Generate a motion that has seconds for the caption: Input: ", + "Please generate a motion that is approximately seconds long for the caption: Input: Input: ", + "Please generate a motion that has a length of approximately seconds for the caption: Input: ", + "I want a motion that lasts for around seconds. The caption is: Input: Input: ", + "Please generate a motion that lasts seconds and conveys the tempo of .", + "Create a motion with a duration of about seconds for the caption: Input: Input: ", + "I need a motion that lasts seconds, and showcases the music of .", + "Please generate a motion that lasts for approximately seconds for the caption: Input: Input: ", + "I want to see a motion that symbolizes the description \"\" in motion for seconds.", + "I need a motion that has a duration of around seconds for the caption: Input: ", + "Can you generate a motion that lasts seconds and showcases the irony of ?", + "Give me a motion that is around seconds long for the caption: ", + "Can you create a motion sequence that lasts for seconds and represents in motion?", + "Give me a motion that has a duration of roughly seconds. The caption is: Input: ", + "I want a motion that has a length of roughly seconds for the caption: Input: Input: ", + "Create a motion that has a duration of seconds for the caption: ", + "Can you create a motion that lasts seconds and represents the theme of ?", + "Can you generate a motion that lasts seconds and showcases the intention of ?", + "I need a motion that has exactly seconds for the caption: Input: Input: ", + "Can you create a motion that lasts for seconds? The caption is: ", + "Can you create a motion that lasts for about seconds? The caption is: Input: ", + "I need a motion that is around seconds long for the caption: Input: Input: ", + "I want to see a motion sequence that lasts for seconds and embodies the description \"\".", + "Create a motion that has a length of roughly seconds for the caption: Input: ", + "Create a motion with a length of roughly seconds for the caption: Input: ", + "Generate a motion that lasts for roughly seconds for the caption: Input: ", + "Please generate a motion that lasts for approximately seconds for the caption: Input: ", + "I want to see a motion that symbolizes the phrase \"\" in motion for seconds.", + "Can you create a motion that lasts for roughly seconds? The caption is: ", + "Create a motion that has a length of around seconds for the caption: Input: Input: ", + "Please generate a motion that lasts around seconds for the caption: ", + "Create a motion with a duration of about seconds for the caption: ", + "Generate a motion that has a duration of about seconds for the caption: Input: ", + "Please generate a motion that is seconds long for the caption: Input: ", + "Can you show me a motion sequence that lasts for seconds and embodies the phrase \"\" through motion?", + "Please generate a motion that is roughly seconds long for the caption: Input: Input: ", + "Can you create a motion that lasts approximately seconds? The caption is: ", + "Please generate a motion sequence that lasts for seconds and represents the meaning of \"\" in motion.", + "I want a motion with seconds. The caption is: ", + "Generate a motion that lasts for approximately seconds for the caption: Input: ", + "I need a human motion that lasts for seconds and is inspired by the phrase \"\".", + "I need a motion that has a length of approximately seconds. The caption is: Input: ", + "Please generate a motion sequence that lasts for seconds and symbolizes the description \"\" in motion.", + "I want a motion that is approximately seconds long for the caption: Input: Input: ", + "Create a motion that lasts seconds, and demonstrates the suspense of .", + "I want to see a motion that represents in motion for seconds.", + "Can you create a motion that lasts seconds and captures the fashion of ?", + "Can you create a motion that represents the meaning of \"\" in motion for seconds?", + "Generate a motion that lasts seconds and highlights the instrumentation of .", + "Create a motion that lasts for around seconds for the caption: ", + "Create a motion that is approximately seconds long for the caption: ", + "I need a motion that is approximately seconds long for the caption: Input: ", + "I need a motion that has a duration of around seconds for the caption: Input: Input: ", + "I need a human motion that represents for seconds, please generate it.", + "Please generate a motion that lasts for around seconds for the caption: Input: Input: ", + "I need a motion that lasts seconds, and showcases the creativity of .", + "Can you create a motion that has a duration of approximately seconds? The caption is: Input: ", + "Generate a motion that has a duration of about seconds for the caption: ", + "Can you show me a motion sequence that lasts for seconds and depicts the description \"\" in motion?", + "Generate a motion that lasts seconds and highlights the design of .", + "Show me a human motion that lasts for seconds and represents the phrase \"\" in motion.", + "Create a motion with a duration of around seconds for the caption: Input: ", + "I need a motion that is about seconds long for the caption: Input: ", + "Create a motion that has a duration of seconds for the caption: Input: ", + "Create a motion that lasts seconds, and demonstrates the arc of .", + "I want to see a motion sequence that lasts for seconds and represents through motion.", + "Give me a motion that lasts for approximately seconds. The caption is: Input: ", + "Please generate a motion that lasts around seconds for the caption: Input: Input: ", + "Can you generate a motion that lasts seconds and illustrates the lyrics of ?", + "Create a motion that is approximately seconds long for the caption: Input: ", + "I need a motion that lasts seconds, and shows the gestures of .", + "Give me a motion that has a duration of around seconds. The caption is: Input: ", + "Generate a motion that lasts seconds, and showcases the theme of .", + "Please generate a motion that embodies the meaning of \"\" for seconds.", + "Can you create a motion sequence that lasts for seconds and symbolizes ?", + "Can you create a motion that has a duration of approximately seconds? The caption is: ", + "Please generate a motion that symbolizes the phrase \"\" in motion for seconds.", + "I want a motion with seconds. The caption is: Input: Input: ", + "Create a motion that has a length of about seconds for the caption: ", + "Can you create a motion that lasts seconds, and captures the vibe of ?", + "I need a motion that lasts for roughly seconds. The caption is: Input: ", + "Generate a motion that is around seconds long for the caption: ", + "Can you generate a motion that lasts seconds and showcases the climax of ?", + "I want a motion that lasts for approximately seconds. The caption is: Input: Input: ", + "I want to see a motion that lasts for seconds and symbolizes the phrase \"\" in motion.", + "Give me a motion that lasts for around seconds. The caption is: Input: Input: ", + "Give me a motion that is approximately seconds long. The caption is: Input: ", + "Can you create a motion sequence that lasts for seconds and embodies the phrase \"\"?", + "Can you create a motion that lasts for approximately seconds? The caption is: Input: Input: ", + "I want a motion with a length of about seconds. The caption is: Input: ", + "Can you create a motion that has a duration of around seconds? The caption is: ", + "Please generate a motion that is roughly seconds long for the caption: ", + "Can you create a motion that lasts seconds and captures the story of ?", + "Please create a motion that lasts seconds, and showcases the vocals of .", + "Can you create a human motion that lasts for seconds and embodies the meaning of \"\"?", + "I need a motion that lasts seconds, and showcases the view of .", + "Please create a motion that lasts seconds and illustrates the goal of .", + "Generate a motion that has a duration of approximately seconds for the caption: Input: ", + "Please generate a motion that lasts seconds and represents the character of .", + "Please generate a human motion that lasts for seconds and symbolizes the description \"\".", + "Create a motion that lasts for roughly seconds for the caption: Input: Input: ", + "Please generate a motion that symbolizes through motion for seconds.", + "Can you create a motion that lasts seconds and captures the comedy of ?", + "I want a motion that lasts for roughly seconds. The caption is: Input: Input: ", + "Create a motion that has a duration of around seconds for the caption: Input: ", + "I need a motion that lasts seconds, and showcases the beat of .", + "Give me a motion that is approximately seconds long for the caption: ", + "Please generate a motion that has a length of around seconds for the caption: ", + "Show me a motion that represents in motion for seconds.", + "Generate a motion that has a length of seconds for the caption: ", + "Please generate a motion with a length of around seconds for the caption: Input: Input: ", + "Can you create a motion that has a duration of roughly seconds? The caption is: Input: Input: ", + "Generate a motion that lasts seconds and illustrates the activity of .", + "Can you show me a motion sequence that lasts for seconds and embodies the meaning of \"\" through motion?", + "Show me a motion that lasts for seconds and embodies the phrase \"\".", + "Give me a motion that lasts around seconds. The caption is: Input: Input: ", + "Generate a motion that lasts seconds and highlights the cleverness of .", + "Can you create a motion that has a duration of around seconds? The caption is: Input: Input: ", + "I need a motion that lasts around seconds for the caption: ", + "I need a motion that lasts around seconds for the caption: Input: Input: ", + "Create a motion that has a length of about seconds for the caption: Input: Input: ", + "I want a motion with a length of about seconds. The caption is: ", + "I want a motion that has a duration of about seconds. The caption is: Input: Input: ", + "I need a motion that has a duration of roughly seconds for the caption: ", + "Give me a motion that has a duration of approximately seconds. The caption is: Input: ", + "Can you create a human motion that lasts for seconds and embodies the phrase \"\"?", + "I want a motion that has a duration of around seconds for the caption: Input: Input: ", + "Generate a motion that lasts for approximately seconds for the caption: Input: Input: ", + "Please generate a motion that lasts seconds and represents the landscape of .", + "Please generate a motion that lasts seconds and represents the plot of .", + "Please generate a motion sequence that lasts for seconds and represents the description \"\" in motion.", + "I want a motion that has a duration of roughly seconds for the caption: ", + "Generate a motion that has a length of around seconds for the caption: Input: ", + "Can you create a motion that lasts for approximately seconds? The caption is: Input: ", + "Generate a motion that lasts seconds and highlights the action of .", + "I need a motion that has a length of approximately seconds. The caption is: ", + "I need a motion that lasts for seconds. The caption is: Input: ", + "Please generate a motion that symbolizes for seconds.", + "Create a motion that lasts for around seconds for the caption: Input: ", + "Please generate a human motion that embodies the description \"\" in motion for seconds.", + "Can you show me a motion that depicts the meaning of \"\" for seconds?", + "Please generate a motion that lasts for around seconds for the caption: Input: ", + "I want a motion that lasts for around seconds. The caption is: ", + "Please generate a motion that has a length of around seconds for the caption: Input: Input: ", + "Show me a motion that lasts for seconds and symbolizes the description \"\".", + "Can you create a motion that lasts seconds and captures the conflict of ?", + "Can you show me a motion sequence that lasts for seconds and depicts the phrase \"\"?", + "Can you create a motion that represents the description \"\" in motion for seconds?", + "I need a motion that lasts for roughly seconds. The caption is: ", + "Please create a motion that lasts seconds and illustrates the tone of .", + "I want to see a motion that symbolizes in motion for seconds.", + "Create a motion with a duration of around seconds for the caption: ", + "Please generate a motion that embodies the meaning of \"\" through motion for seconds.", + "I need a motion that lasts seconds, and showcases the trend of .", + "I need a motion that has a duration of around seconds for the caption: ", + "Can you create a motion that lasts approximately seconds? The caption is: Input: ", + "Can you generate a motion that lasts seconds and showcases the mystery of ?", + "Please generate a motion that is around seconds long for the caption: Input: ", + "Can you show me a motion sequence that lasts for seconds and symbolizes the meaning of \"\"?", + "Give me a motion that has a length of approximately seconds. The caption is: Input: Input: ", + "Please create a motion that lasts seconds and illustrates the wit of .", + "Give me a motion that is approximately seconds long. The caption is: Input: Input: ", + "I need a motion that is about seconds long for the caption: ", + "Please generate a motion that is seconds long for the caption: Input: Input: ", + "I need a motion that lasts for roughly seconds. The caption is: Input: Input: ", + "Create a motion with seconds for the caption: ", + "I need a motion that lasts approximately seconds for the caption: Input: ", + "Create a motion with a length of roughly seconds for the caption: Input: Input: ", + "Generate a motion that is roughly seconds long for the caption: Input: Input: ", + "Can you generate a motion that lasts seconds and showcases the ingenuity of ?", + "Can you create a motion that has a duration of roughly seconds? The caption is: ", + "I want to see a motion that embodies the description \"\" for seconds.", + "I need a motion that is roughly seconds long for the caption: ", + "I want to see a human motion that lasts for seconds, representing .", + "Give me a motion that lasts for about seconds. The caption is: Input: ", + "Can you create a human motion that symbolizes for seconds?", + "Can you create a motion that has a length of approximately seconds? The caption is: Input: Input: ", + "Show me a human motion that lasts for seconds and represents the phrase \"\".", + "Can you create a motion sequence that lasts for seconds and embodies the phrase \"\" in motion?", + "I want a motion that has a duration of around seconds for the caption: Input: ", + "Can you generate a motion that lasts seconds and shows the actions of ?", + "Can you create a motion sequence that lasts for seconds and symbolizes in motion?", + "I need a motion that lasts seconds, and showcases the soul of .", + "I need a motion that lasts approximately seconds for the caption: ", + "Please generate a motion that lasts seconds, and brings to life the story of .", + "Please create a motion that lasts seconds and represents the movements of .", + "Please generate a motion that is approximately seconds long for the caption: ", + "Can you create a motion that lasts seconds and captures the backdrop of ?", + "Generate a motion that lasts seconds and highlights the denouement of .", + "Show me a motion that depicts in seconds.", + "I need a motion that is roughly seconds long for the caption: Input: Input: ", + "Give me a motion that is around seconds long for the caption: Input: ", + "Create a motion that lasts for approximately seconds for the caption: Input: Input: ", + "I want to see a motion sequence that lasts for seconds and represents the phrase \"\".", + "Create a motion that lasts seconds, and demonstrates the innovation of .", + "I need a motion that represents and lasts seconds.", + "Create a motion that has a length of roughly seconds for the caption: ", + "Can you create a motion that lasts for about seconds? The caption is: Input: Input: ", + "Give me a motion that has a length of approximately seconds. The caption is: ", + "I want a motion that is roughly seconds long for the caption: ", + "Please generate a motion that lasts seconds and represents the culture of .", + "I want to see a motion that symbolizes the phrase \"\" for seconds.", + "Generate a motion that has a length of seconds for the caption: Input: Input: ", + "Generate a motion that is around seconds long for the caption: Input: Input: ", + "I need a motion that is about seconds long for the caption: Input: Input: ", + "Please generate a motion with a length of around seconds for the caption: ", + "Show me a motion that represents the phrase \"\" in seconds.", + "Can you create a motion that lasts for approximately seconds? The caption is: ", + "Create a motion with a duration of about seconds for the caption: Input: ", + "Can you create a motion that represents the phrase \"\" in motion for seconds?", + "Create a motion that has a duration of roughly seconds for the caption: ", + "Generate a motion that lasts for roughly seconds for the caption: ", + "Create a motion that lasts seconds, and demonstrates the tradition of .", + "Create a motion that has a duration of around seconds for the caption: Input: Input: ", + "Can you create a human motion that represents in motion for seconds?", + "I want to see a motion sequence that lasts for seconds and represents the phrase \"\" in motion.", + "I want a motion that is roughly seconds long for the caption: Input: Input: ", + "Create a motion that lasts for around seconds for the caption: Input: Input: ", + "I want a motion that has a duration of about seconds. The caption is: ", + "Give me a motion that has a duration of approximately seconds. The caption is: Input: Input: ", + "Please generate a motion sequence that lasts for seconds and embodies the description \"\".", + "Please generate a motion that lasts approximately seconds for the caption: Input: Input: ", + "I want a motion that lasts for about seconds. The caption is: Input: Input: ", + "Show me a motion that embodies for seconds.", + "I'm looking for a motion sequence that lasts for seconds and embodies .", + "Create a motion that has a duration of around seconds for the caption: ", + "Can you show me a human motion that symbolizes the meaning of \"\" in motion for seconds?", + "Generate a motion that lasts seconds and highlights the legacy of .", + "Please generate a motion that is roughly seconds long for the caption: Input: ", + "Could you create a motion for me with seconds? The caption is: Input: Input: ", + "I want a motion that lasts seconds for the caption: Input: Input: ", + "Generate a motion that lasts seconds and highlights the uniqueness of .", + "Show me a motion that represents the description \"\" for seconds.", + "I need a motion that lasts for around seconds. The caption is: Input: ", + "Create a motion that lasts seconds, and demonstrates the spirit of .", + "Create a motion that lasts for approximately seconds for the caption: ", + "Can you create a motion that has a duration of around seconds? The caption is: Input: ", + "Generate a motion that lasts for roughly seconds for the caption: Input: Input: ", + "I need a motion that lasts seconds, and showcases the personality of .", + "Generate a motion that has a duration of around seconds for the caption: Input: Input: ", + "Generate a motion that lasts for about seconds for the caption: ", + "Generate a motion that has a length of seconds for the caption: Input: ", + "Can you create a motion that lasts for seconds? The caption is: Input: ", + "Show me a motion sequence that lasts for seconds and symbolizes the phrase \"\".", + "Generate a motion that has a duration of approximately seconds for the caption: Input: Input: ", + "Show me a human motion that lasts for seconds and symbolizes the meaning of \"\".", + "Please generate a motion that lasts for seconds and depicts the meaning of \"\".", + "I want to see a human motion that embodies the phrase \"\" for seconds.", + "Generate a motion that has a duration of approximately seconds for the caption: ", + "Can you generate a motion that lasts seconds, and captures the body language of ?", + "Can you create a motion that is roughly seconds long? The caption is: Input: Input: ", + "Could you create a motion for me with seconds? The caption is: Input: ", + "Create a motion that lasts for about seconds for the caption: ", + "Please generate a motion sequence that lasts for seconds and symbolizes the meaning of \"\" through motion.", + "Please generate a motion sequence that lasts for seconds and depicts .", + "Please generate a motion that lasts for seconds for the caption: Input: Input: ", + "I want a motion that lasts for approximately seconds. The caption is: ", + "Can you create a motion that lasts seconds and captures the scene of ?", + "Please generate a motion that lasts for seconds and embodies the phrase \"\".", + "I want a motion that lasts for roughly seconds. The caption is: Input: ", + "I want a motion that is approximately seconds long for the caption: Input: ", + "Can you generate a motion that lasts seconds and showcases the identity of ?", + "I need a motion that is approximately seconds long for the caption: Input: Input: ", + "Show me a human motion that represents the phrase \"\" in seconds.", + "Can you create a motion that lasts seconds and captures the intelligence of ?", + "Can you create a motion that lasts seconds and depicts ?", + "Create a motion that has a duration of approximately seconds for the caption: ", + "Create a motion that lasts for roughly seconds for the caption: Input: ", + "Can you create a motion that has a length of roughly seconds? The caption is: Input: Input: ", + "Give me a motion that lasts around seconds. The caption is: ", + "Can you generate a motion that lasts seconds and showcases the rhythm of ?", + "Can you create a motion that has a duration of roughly seconds? The caption is: Input: ", + "Can you create a motion that lasts seconds and highlights the manner of ?", + "Please generate a motion that lasts for seconds and symbolizes the description \"\".", + "I need a motion that has exactly seconds for the caption: Input: ", + "I need a motion that has a duration of roughly seconds for the caption: Input: ", + "I want a motion that lasts seconds for the caption: Input: ", + "Give me a motion that is approximately seconds long. The caption is: ", + "Can you generate a motion that lasts seconds and showcases the heritage of ?", + "Create a motion that lasts for approximately seconds for the caption: Input: ", + "I want a motion that lasts for approximately seconds. The caption is: Input: ", + "Can you create a motion sequence that lasts for seconds and represents ?", + "I need a motion that lasts for around seconds. The caption is: Input: Input: ", + "Give me a motion that lasts for about seconds. The caption is: ", + "Can you create a motion that has a length of approximately seconds? The caption is: Input: ", + "I want a motion that lasts for about seconds. The caption is: Input: ", + "Create a motion with a length of roughly seconds for the caption: ", + "Create a motion that lasts seconds, and demonstrates the concept of .", + "Please generate a motion that lasts for seconds for the caption: ", + "Please generate a motion that lasts approximately seconds for the caption: ", + "Please generate a motion that lasts seconds and represents the spirit of .", + "I want a motion that lasts roughly seconds for the caption: Input: Input: ", + "Can you create a motion that is about seconds long? The caption is: Input: Input: ", + "Generate a motion that lasts seconds and highlights the main idea of .", + "Can you show me a motion that embodies the description \"\" in motion for seconds?", + "Can you show me a motion that symbolizes the meaning of \"\" for seconds?", + "Generate a motion that lasts for about seconds for the caption: Input: ", + "Please generate a motion that is approximately seconds long for the caption: Input: ", + "Create a motion that has a duration of roughly seconds for the caption: Input: ", + "Please generate a motion sequence that lasts for seconds and embodies the phrase \"\".", + "Give me a motion that lasts for approximately seconds. The caption is: ", + "Show me a human motion that symbolizes the meaning of \"\" for seconds.", + "Please generate a motion that has a length of approximately seconds for the caption: ", + "Generate a motion that has a length of around seconds for the caption: ", + "Please generate a motion that symbolizes the phrase \"\" for seconds.", + "I need a motion that lasts approximately seconds for the caption: Input: Input: ", + "Can you create a motion with seconds? The caption is: Input: ", + "Please generate a motion that lasts seconds and represents the mood of .", + "Create a motion with a duration of around seconds for the caption: Input: Input: ", + "Please create a motion that lasts seconds and illustrates the originality of .", + "Give me a motion that is roughly seconds long for the caption: Input: ", + "Please generate a motion that has a length of approximately seconds for the caption: Input: Input: ", + "Create a motion that has a duration of roughly seconds for the caption: Input: Input: ", + "Can you show me a motion that represents and is seconds long?", + "Please generate a motion that lasts seconds and represents the lyrics of .", + "Please generate a motion with a length of around seconds for the caption: Input: ", + "Can you create a motion that represents the phrase \"\" for seconds?", + "Please generate a motion sequence that lasts for seconds and embodies the phrase \"\" in motion.", + "I want a motion that lasts roughly seconds for the caption: ", + "I need a motion that has a duration of roughly seconds for the caption: Input: Input: ", + "Could you create a motion for me with seconds? The caption is: ", + "Generate a motion that is roughly seconds long for the caption: ", + "Can you show me a motion sequence that lasts for seconds and depicts ?", + "Please generate a motion that lasts seconds and represents the drama of .", + "I want to see a motion that embodies in motion for seconds.", + "Can you create a motion sequence that lasts for seconds and embodies the meaning of \"\"?", + "I want to see a motion that embodies the meaning of \"\" in motion for seconds.", + "Can you create a motion that lasts approximately seconds? The caption is: Input: Input: ", + "I need a motion that lasts around seconds for the caption: Input: ", + "Show me a motion that lasts for seconds and depicts .", + "Create a motion that lasts for roughly seconds for the caption: ", + "Please generate a motion that has a length of about seconds for the caption: Input: ", + "Can you create a motion that lasts for about seconds? The caption is: ", + "Create a motion that lasts seconds, and demonstrates the harmony of .", + "I want a motion that has a duration of about seconds. The caption is: Input: ", + "Please create a motion that lasts seconds and illustrates the atmosphere of .", + "Give me a motion that lasts for around seconds. The caption is: ", + "Can you generate a motion that lasts seconds, and showcases the ambiance of ?", + "Generate a motion that lasts seconds and highlights the surroundings of .", + "Please generate a motion that has a length of about seconds for the caption: ", + "Can you show me a human motion that lasts for seconds and symbolizes the phrase \"\"?", + "Please create a motion that lasts seconds and illustrates the essence of .", + "Can you generate a motion that lasts seconds, and showcases the mood of ?", + "I want a motion that lasts for roughly seconds. The caption is: ", + "Generate a motion that lasts around seconds for the caption: Input: Input: ", + "Can you create a motion that embodies the meaning of \"\" in motion for seconds?", + "I want to see a motion that lasts for seconds and represents the meaning of \"\".", + "Please generate a motion that symbolizes in motion for seconds.", + "Create a motion that lasts seconds, and demonstrates the parody of .", + "Generate a motion that has a duration of around seconds for the caption: Input: ", + "Please generate a motion that lasts for around seconds for the caption: ", + "I want to see a human motion that lasts for seconds and represents the description \"\".", + "Show me a human motion that represents the phrase \"\" for seconds.", + "I need a motion that lasts seconds, and showcases the tension of .", + "I want a motion that has a length of roughly seconds for the caption: ", + "Please generate a motion that has a duration of around seconds for the caption: Input: Input: ", + "Give me a motion that has a duration of approximately seconds. The caption is: ", + "Can you create a motion with seconds? The caption is: ", + "Give me a motion that has a length of approximately seconds. The caption is: Input: ", + "Please generate a motion that lasts for seconds and symbolizes in motion.", + "Please generate a motion that is seconds long for the caption: ", + "Please generate a motion that lasts for seconds and depicts the phrase \"\" through motion.", + "Create a motion that lasts seconds, and demonstrates the melody of .", + "Can you create a motion that lasts seconds and portrays the behavior of ?", + "Generate a motion that lasts seconds and highlights the rhythm of .", + "Generate a motion that is roughly seconds long for the caption: Input: ", + "Give me a motion that lasts around seconds. The caption is: Input: ", + "Create a motion that has a length of about seconds for the caption: Input: ", + "Create a motion with seconds for the caption: Input: Input: ", + "Please create a motion that lasts seconds and illustrates the resolution of .", + "I want a motion that lasts for around seconds. The caption is: Input: ", + "Generate a motion that has a duration of around seconds for the caption: ", + "I need a motion that lasts seconds, and showcases the narrative of .", + "Can you show me a motion sequence that lasts for seconds and embodies the description \"\" in motion?", + "Please generate a motion that embodies the phrase \"\" for seconds.", + "Create a motion that lasts for about seconds for the caption: Input: " + ], + "output": [ + "" + ] + }, + "framelen": { + "class": "l2m", + "input": [ + "Show me a motion that lasts for no more than frames.", + "Can you make a motion that lasts for frames, with a certain range of variability, but no longer than frames?", + "I want to see a motion that lasts between and frames.", + "Generate a motion that is no longer than frames in duration, plus or minus a certain percentage.", + "Create a motion that lasts for a duration of at least frames, plus or minus a certain number of frames, but no longer than frames.", + "Show me a motion that is longer than frames but shorter than frames.", + "Create a motion that lasts for a duration of no more than frames, plus or minus a certain number of frames.", + "Can you make a motion that is frames or shorter?", + "I want to see a motion that lasts for a duration between and frames, but with a certain percentage of variability.", + "Give me a motion that has a duration of frames or more but no more than frames.", + "Can you make a motion that lasts for frames, with a certain percentage of variability?", + "Show me a motion that is longer than frames in duration, but with a certain percentage of variability.", + "Show me a motion that is longer than frames in duration, with a certain percentage of variability, but no longer than frames.", + "frames in duration, with a certain range of variability.", + "Can you make a motion that is no longer than frames in duration?", + "Give me a motion that is shorter than frames in duration, with a certain range of variability, plus or minus a certain number of frames.", + "Show me a motion that is longer than frames in duration, with a certain range of variability, but no longer than frames.", + "Generate a motion that lasts for a duration of frames or less.", + "Can you make a motion that lasts for frames, with a certain range of variability?", + "I want to see a motion that is longer than frames.", + "Give me a motion that is frames in length or more.", + "Can you make a motion that is shorter than frames in length?", + "Generate a motion that is no shorter than frames but no longer than frames in duration.", + "Show me a motion that is shorter than frames but longer than frames.", + "Show me a motion that is longer than frames in duration, but within a certain range of frames.", + "Generate a motion that lasts for exactly frames.", + "Show me a motion that is longer than frames in duration, with a certain percentage of variability.", + "Create a motion that is exactly frames in length.", + "I want to see a motion that lasts for a duration between and frames.", + "I want to see a motion that lasts for a duration between and frames, inclusive.", + "Give me a motion that is shorter than frames in duration, with a certain percentage of variability.", + "I want to see a motion that lasts for a duration of frames or more, but with a certain range of variability, plus or minus a certain number of frames.", + "Create a motion that lasts for a duration of at least frames, plus or minus a certain percentage.", + "Give me a motion that is no shorter than frames in duration, but no longer than frames, plus or minus a certain number of frames.", + "I want to see a motion that lasts for a duration between and frames, with a certain range of variability.", + "Generate a motion that is no shorter than frames in duration, with a certain range of variability.", + "Create a motion that lasts for a duration of at least frames, but no more than frames.", + "Give me a motion that is shorter than frames in duration, plus or minus a certain percentage.", + "Generate a motion that is no shorter than frames in duration, with a certain range of variability, but no longer than frames plus a certain number of frames.", + "Generate a motion that lasts for frames, give or take a few frames.", + "Create a motion that lasts for a duration of at least frames.", + "Give me a motion that is shorter than frames or longer than frames.", + "I want to see a motion that is at least frames long.", + "Show me a motion that is longer than frames in duration, with a certain range of variability.", + "Show me a motion that has a duration of frames.", + "Create a motion that has a duration of less than frames.", + "Give me a motion that has a length of frames or less.", + "Show me a motion that is longer than frames in duration, with a certain percentage of variability, but no longer than frames, plus or minus a certain percentage.", + "Show me a motion that lasts for at least frames, but no more than frames.", + "Create a motion that is no shorter than frames.", + "I want to see a motion that lasts for a duration of frames or more, but no longer than frames, plus or minus a certain number of frames.", + "Can you make a motion that lasts for frames, plus or minus a certain range of frames?", + "Show me a motion that has a duration of at least frames.", + "Create a motion that lasts for a duration of at least frames, with a certain range of variability, but no longer than frames.", + "Generate a motion that is frames long.", + "Show me a motion that is shorter than frames or longer than frames, but within a certain range of frames.", + "Create a motion that lasts for a duration of at least frames, plus or minus a certain percentage, but no longer than frames, plus or minus a certain percentage.", + "Generate a motion that is no shorter than frames in duration, but no longer than frames, with a certain range of variability.", + "Generate a motion that lasts for no longer than frames.", + "Can you create a motion that lasts for frames?", + "Generate a motion that is between frames and frames in duration.", + "Give me a motion that is shorter than frames in duration, with a certain range of variability, but no shorter than frames minus a certain percentage.", + "I want to see a motion that lasts for exactly frames.", + "Give me a motion that is shorter than frames in duration, with a certain range of variability.", + "Generate a motion that is no shorter than frames in duration, but with a certain range of variability.", + "I want to see a motion that is between frames and frames in duration, inclusive.", + "Can you make a motion that lasts for less than frames?", + "Show me a motion that has a duration between and frames.", + "Can you create a motion that lasts for frames, give or take a few frames?", + "I want to see a motion that lasts for a duration between and frames, with a certain percentage of variability.", + "I want to see a motion that is exactly frames in duration.", + "Give me a motion that is between frames and frames in duration.", + "Create a motion that lasts for a duration of at least frames, plus or minus a certain percentage, but no longer than frames, plus or minus a certain range of frames.", + "Create a motion that lasts for a duration of at most frames.", + "I want to see a motion that is exactly frames in duration, with a certain range of variability.", + "I want to see a motion that is between and frames long.", + "Generate a motion that is no shorter than frames in duration.", + "Can you make a motion that lasts for frames, with a certain range of variability, but no longer than frames?", + "Can you make a motion that lasts for frames, give or take a certain percentage?", + "Show me a motion that lasts for a minimum of frames.", + "Create a motion that lasts for a duration of at least frames, plus or minus a certain percentage, but no longer than frames.", + "Show me a motion that is longer than frames in duration, but with a certain range of variability.", + "Create a motion that lasts for a duration of at least frames, but no longer than frames, with a certain percentage of variability.", + "Can you give me a motion that lasts about frames?", + "Give me a motion that lasts for at least frames but no more than frames.", + "Can you create a motion that lasts for frames or less?", + "Can you make a motion that lasts for frames, give or take a certain range of frames?", + "Create a motion that lasts for a duration of at least frames, plus or minus a certain range of frames, but no longer than frames.", + "Generate a motion that has a duration of frames or more.", + "Generate a motion that is no shorter than frames in duration, but no longer than frames, plus or minus a certain percentage.", + "Can you make a motion that is no shorter than frames in duration, but no longer than frames, plus or minus a certain percentage?", + "Give me a motion that is shorter than frames in duration, but no shorter than frames minus a certain percentage.", + "Generate a motion that is frames or longer.", + "Give me a motion that is no more than frames in length.", + "I want to see a motion that lasts for a duration between and frames, with a certain percentage of variability, plus or minus a certain number of frames.", + "Create a motion that lasts for a duration of at least frames, but no longer than frames, plus or minus a certain percentage.", + "Show me a motion that is longer than frames or shorter than frames.", + "Can you create a motion that is between and frames in length?", + "Can you make a motion that is no longer than frames in duration, but no shorter than frames minus a certain number of frames?", + "Create a motion that lasts for a duration of at least frames, but no longer than frames, with a certain range of variability.", + "I want to see a motion that lasts for a duration between and frames, plus or minus a certain number of frames.", + "I want to see a motion that lasts for a duration of frames or more, but with a certain range of variability.", + "Give me a motion that is shorter than frames in duration, but with a certain percentage of variability.", + "Generate a motion that is no longer than frames.", + "Give me a motion that is no shorter than frames but no longer than frames.", + "Create a motion that has a duration of exactly frames.", + "Give me a motion that is longer than frames or shorter than frames, but not exceeding frames in duration.", + "Show me a motion that is shorter than frames or longer than frames, but not less than frames in duration.", + "Generate a motion that is no more than frames in length.", + "Can you make a motion that is frames or longer?", + "Give me a motion that has a duration of about frames.", + "I need a motion that lasts for frames or more.", + "Create a motion that lasts for a duration of frames.", + "Can you make a motion that lasts for frames, with a certain percentage of variability, but no shorter than frames minus a certain range of frames?", + "Show me a motion that is frames or less in length.", + "Give me a motion that is shorter than frames in duration, but no shorter than frames minus a certain range of frames.", + "Create a motion that is shorter than frames in length.", + "Generate a motion that is no shorter than frames in duration, with a certain percentage of variability.", + "Create a motion that is frames long or less.", + "Give me a motion that is shorter than frames in duration, but no longer than frames plus a certain range of frames.", + "I want to see a motion that lasts for a duration of frames or more, but not exceeding frames.", + "Give me a motion that is shorter than frames in duration, but with a certain range of variability.", + "Generate a motion that is no shorter than frames in duration, plus or minus a certain range of frames.", + "Show me a motion that is longer than frames in duration, but no longer than frames plus a certain percentage.", + "I want to see a motion that is exactly frames in duration, give or take a certain number of frames." + ], + "output": [ + "" + ] + }, + "seclen": { + "class": "l2m", + "input": [ + "Show me a motion that is longer than seconds in duration, with a certain percentage of variability, but no longer than seconds.", + "Generate a motion that is no shorter than seconds but no longer than seconds in duration.", + "Can you create a motion that lasts for seconds?", + "Give me a motion that is shorter than seconds in duration, but with a certain range of variability.", + "Give me a motion that is longer than seconds or shorter than seconds, but not exceeding seconds in duration.", + "Show me a motion that lasts for a minimum of seconds.", + "Give me a motion that is shorter than seconds in duration, but no shorter than seconds minus a certain percentage.", + "Create a motion that is shorter than seconds in length.", + "I want to see a motion that is between seconds and seconds in duration, inclusive.", + "Can you make a motion that lasts for less than seconds?", + "Create a motion that lasts for a duration of at least seconds, but no longer than seconds, plus or minus a certain percentage.", + "I want to see a motion that lasts for a duration of seconds or more, but with a certain range of variability, plus or minus a certain number of seconds.", + "Create a motion that lasts for a duration of at least seconds, plus or minus a certain percentage.", + "Can you make a motion that is no longer than seconds in duration, but no shorter than seconds minus a certain number of seconds?", + "Show me a motion that is longer than seconds but shorter than seconds.", + "I need a motion that lasts for seconds or more.", + "Show me a motion that is longer than seconds or shorter than seconds.", + "Give me a motion that is shorter than seconds in duration, with a certain percentage of variability.", + "Can you make a motion that lasts for seconds, with a certain percentage of variability?", + "Give me a motion that has a length of seconds or less.", + "Generate a motion that is seconds or longer.", + "Create a motion that lasts for a duration of at least seconds.", + "Generate a motion that has a duration of seconds or more.", + "Create a motion that is exactly seconds in length.", + "Give me a motion that is shorter than seconds in duration, with a certain range of variability, plus or minus a certain number of seconds.", + "Create a motion that lasts for a duration of at least seconds, but no more than seconds.", + "Give me a motion that is between seconds and seconds in duration.", + "Can you give me a motion that lasts about seconds?", + "Give me a motion that is shorter than seconds or longer than seconds.", + "I want to see a motion that lasts for a duration between and seconds, with a certain percentage of variability, plus or minus a certain number of seconds.", + "Generate a motion that is no shorter than seconds in duration, but no longer than seconds, with a certain range of variability.", + "I want to see a motion that is between and seconds long.", + "Show me a motion that is shorter than seconds or longer than seconds, but within a certain range of seconds.", + "Show me a motion that is longer than seconds in duration, with a certain percentage of variability, but no longer than seconds, plus or minus a certain percentage.", + "Create a motion that lasts for a duration of at least seconds, plus or minus a certain percentage, but no longer than seconds, plus or minus a certain range of seconds.", + "Generate a motion that is no shorter than seconds in duration, but with a certain range of variability.", + "Create a motion that lasts for a duration of at least seconds, with a certain range of variability, but no longer than seconds.", + "Generate a motion that lasts for seconds, give or take a few seconds.", + "Create a motion that lasts for a duration of no more than seconds, plus or minus a certain number of seconds.", + "Can you create a motion that is between and seconds in length?", + "Give me a motion that is no shorter than seconds in duration, but no longer than seconds, plus or minus a certain number of seconds.", + "I want to see a motion that is exactly seconds in duration, with a certain range of variability.", + "Can you make a motion that lasts for seconds, plus or minus a certain range of seconds?", + "Show me a motion that is longer than seconds in duration, but with a certain percentage of variability.", + "Can you make a motion that is shorter than seconds in length?", + "Give me a motion that has a duration of about seconds.", + "I want to see a motion that is at least seconds long.", + "Can you create a motion that lasts for seconds or less?", + "Can you make a motion that lasts for seconds, with a certain range of variability?", + "Generate a motion that is no shorter than seconds in duration, with a certain range of variability, but no longer than seconds plus a certain number of seconds.", + "Give me a motion that is shorter than seconds in duration, plus or minus a certain percentage.", + "Can you make a motion that is no shorter than seconds in duration, but no longer than seconds, plus or minus a certain percentage?", + "Generate a motion that is no more than seconds in length.", + "Create a motion that is no shorter than seconds.", + "Can you make a motion that lasts for seconds, with a certain range of variability, but no longer than seconds?", + "I want to see a motion that lasts for exactly seconds.", + "Create a motion that has a duration of less than seconds.", + "Show me a motion that lasts for no more than seconds.", + "Create a motion that lasts for a duration of at least seconds, plus or minus a certain percentage, but no longer than seconds.", + "I want to see a motion that lasts for a duration between and seconds, inclusive.", + "Give me a motion that is seconds in length or more.", + "Create a motion that lasts for a duration of at least seconds, plus or minus a certain range of seconds, but no longer than seconds.", + "Show me a motion that is seconds or less in length.", + "Create a motion that is seconds long or less.", + "I want to see a motion that lasts for a duration between and seconds.", + "Generate a motion that is seconds long.", + "I want to see a motion that lasts for a duration of seconds or more, but with a certain range of variability.", + "Give me a motion that has a duration of seconds or more but no more than seconds.", + "Can you make a motion that is seconds or longer?", + "Generate a motion that is no shorter than seconds in duration, but no longer than seconds, plus or minus a certain percentage.", + "I want to see a motion that is exactly seconds in duration.", + "Generate a motion that is between seconds and seconds in duration.", + "Show me a motion that has a duration between and seconds.", + "Can you make a motion that lasts for seconds, with a certain range of variability, but no longer than seconds?", + "Show me a motion that is longer than seconds in duration, with a certain percentage of variability.", + "Show me a motion that is shorter than seconds or longer than seconds, but not less than seconds in duration.", + "Generate a motion that is no shorter than seconds in duration, plus or minus a certain range of seconds.", + "Generate a motion that is no shorter than seconds in duration, with a certain percentage of variability.", + "Show me a motion that is shorter than seconds but longer than seconds.", + "Give me a motion that lasts for at least seconds but no more than seconds.", + "Create a motion that lasts for a duration of seconds.", + "Generate a motion that lasts for exactly seconds.", + "I want to see a motion that lasts for a duration between and seconds, with a certain percentage of variability.", + "Give me a motion that is no more than seconds in length.", + "Can you make a motion that lasts for seconds, give or take a certain range of seconds?", + "Show me a motion that has a duration of at least seconds.", + "I want to see a motion that lasts for a duration of seconds or more, but no longer than seconds, plus or minus a certain number of seconds.", + "Generate a motion that is no shorter than seconds in duration.", + "Give me a motion that is no shorter than seconds but no longer than seconds.", + "Create a motion that lasts for a duration of at least seconds, but no longer than seconds, with a certain percentage of variability.", + "Give me a motion that is shorter than seconds in duration, with a certain range of variability.", + "Generate a motion that lasts for a duration of seconds or less.", + "I want to see a motion that lasts for a duration between and seconds, plus or minus a certain number of seconds.", + "Give me a motion that is shorter than seconds in duration, with a certain range of variability, but no shorter than seconds minus a certain percentage.", + "I want to see a motion that is exactly seconds in duration, give or take a certain number of seconds.", + "I want to see a motion that lasts for a duration of seconds or more, but not exceeding seconds.", + "Give me a motion that is shorter than seconds in duration, but with a certain percentage of variability.", + "Can you make a motion that is seconds or shorter?", + "I want to see a motion that lasts for a duration between and seconds, but with a certain percentage of variability.", + "Give me a motion that is shorter than seconds in duration, but no shorter than seconds minus a certain range of seconds.", + "Create a motion that lasts for a duration of at least seconds, plus or minus a certain number of seconds, but no longer than seconds.", + "Can you make a motion that is no longer than seconds in duration?", + "Can you create a motion that lasts for seconds, give or take a few seconds?", + "Create a motion that lasts for a duration of at most seconds.", + "I want to see a motion that lasts for a duration between and seconds, with a certain range of variability.", + "Generate a motion that is no shorter than seconds in duration, with a certain range of variability.", + "Create a motion that lasts for a duration of at least seconds, plus or minus a certain percentage, but no longer than seconds, plus or minus a certain percentage.", + "Can you make a motion that lasts for seconds, with a certain percentage of variability, but no shorter than seconds minus a certain range of seconds?", + "Give me a motion that is shorter than seconds in duration, but no longer than seconds plus a certain range of seconds.", + "Generate a motion that lasts for no longer than seconds.", + "I want to see a motion that is longer than seconds.", + "Generate a motion that is no longer than seconds.", + "Generate a motion that is no longer than seconds in duration, plus or minus a certain percentage.", + "Can you make a motion that lasts for seconds, give or take a certain percentage?", + "Create a motion that lasts for a duration of at least seconds, but no longer than seconds, with a certain range of variability.", + "seconds in duration, with a certain range of variability.", + "Show me a motion that has a duration of seconds.", + "Show me a motion that lasts for at least seconds, but no more than seconds.", + "Show me a motion that is longer than seconds in duration, but no longer than seconds plus a certain percentage.", + "Create a motion that has a duration of exactly seconds.", + "Show me a motion that is longer than seconds in duration, but within a certain range of seconds.", + "I want to see a motion that lasts between and seconds.", + "Show me a motion that is longer than seconds in duration, with a certain range of variability, but no longer than seconds.", + "Show me a motion that is longer than seconds in duration, with a certain range of variability.", + "Show me a motion that is longer than seconds in duration, but with a certain range of variability." + ], + "output": [ + "" + ] + }, + "random": { + "class": "n2m", + "input": [ + "Create movements that are not anticipated.", + "Make the movements impromptu and spontaneous.", + "Produce movements that are natural and unforced.", + "Produce actions that are not rehearsed or predetermined.", + "Generate movements that are not rehearsed or prescribed.", + "Generate movements with no rhyme or reason.", + "Generate movements with no discernible order.", + "Create movements that are unbound and improvisational.", + "Generate random movements without any indication.", + "Generate movements that are not predetermined or preconceived.", + "Make the motions random.", + "Produce movements that are free and spontaneous.", + "Make the motions spontaneous and freeform.", + "Create movements without a pattern.", + "Create movements that are arbitrary.", + "Create movements that are not prearranged.", + "Make the motions random and experimental.", + "Make the motions unexpected and uncontrolled.", + "Create motions that are spontaneous.", + "Create movements that are unrestricted and freeform.", + "Produce random actions with no meaning.", + "Generate actions that are not planned.", + "Make the movements unpremeditated and improvised.", + "Create movements that are free-flowing and improvisational.", + "Create movements that are unscripted and natural.", + "Make the motions random and impulsive.", + "Generate a set of unpredictable movements.", + "Generate movements with no plan or strategy.", + "Create movements that are unscripted and free-flowing.", + "Create motions that are random and unexpected.", + "Make the motions spontaneous and unpredictable.", + "Make the movements completely random.", + "Make the motions random and uncontrollable.", + "Generate movements that are not bound by rules or patterns.", + "Produce movements that are free and intuitive.", + "Make the movements unpredictable and spontaneous.", + "Produce actions that are not prearranged or choreographed.", + "Make the movements random and off-the-cuff.", + "Produce actions that are not structured or choreographed.", + "Make the movements unpredictable and unregulated.", + "Generate movements that are not choreographed.", + "Make the motions random and intuitive.", + "Generate movements that are irregular.", + "Create movements that are unpatterned.", + "Generate movements that are not predetermined or rehearsed.", + "Produce actions that are not prescribed.", + "Generate movements that are unorthodox and unexpected.", + "Produce actions that are not predetermined.", + "Produce movements that are random and uncontrolled.", + "Make the movements intuitive and unscripted.", + "Produce movements that are unrestrained and natural.", + "Create movements that are uncontrolled and freeform.", + "Generate an arbitrary sequence of actions.", + "Make the movements unpredictable and sporadic.", + "Create movements that are unanticipated and uncontrolled.", + "Generate movements with no set format or plan.", + "Make the motions unpredictable and unconstrained.", + "Generate movements that are spontaneous and natural.", + "Make the movements unpredictable and haphazard.", + "Create movements that are free and unscripted.", + "Generate movements with no set order or pattern.", + "Produce movements that are unexpected and unscripted.", + "Make the motions random and unconventional.", + "Create unpredictable movements.", + "Create movements that are free and unregulated.", + "Produce movements that are not restricted or premeditated.", + "Make the motions random and unmethodical.", + "Generate movements that are unrehearsed and impromptu.", + "Make the motions unpredictable and unstructured.", + "Produce movements that are not planned or regulated.", + "Generate movements that are unpredictable and unplanned.", + "Make the motions surprising and spontaneous.", + "Make the movements random and erratic.", + "Create movements that are not regulated or controlled.", + "Produce movements that are unstructured and unpredictable.", + "Make the motions unpredictable.", + "Produce movements with no predetermined pattern.", + "Generate movements with no predetermined structure.", + "Create motions that are random and unanticipated.", + "Produce actions that are not predetermined or anticipated.", + "Produce movements that are not planned or choreographed.", + "Produce movements that are not structured or scripted.", + "Create movements that are not preplanned or calculated.", + "Generate movements that are unscripted and unplanned.", + "Generate movements without a rhyme or reason.", + "Produce movements that are not choreographed or rehearsed.", + "Produce movements that are not premeditated or rehearsed.", + "Produce movements that are free and unguided.", + "Create movements that are intuitive and unstructured.", + "Make the movements surprising.", + "Produce random actions without a pattern.", + "Generate movements that are intuitive and spontaneous.", + "Generate movements that are not structured or planned.", + "Produce actions that are not predetermined or rehearsed.", + "Make the motions completely unpredictable.", + "Generate random motions.", + "Produce movements that are free and uncalculated.", + "Create movements that are unbounded and unrestrained.", + "Produce movements that are intuitive and unscripted.", + "Make the movements impulsive and instinctual.", + "Make the movements unpredictable and unplanned.", + "Produce movements that are chaotic.", + "Create movements that are unstructured and spontaneous.", + "Generate random gestures.", + "Produce movements that are not controlled.", + "Make the movements random and instinctual.", + "Generate movements that are unorthodox.", + "Create movements that are unrestricted and free-flowing.", + "Generate movements with no prescribed format.", + "Make the movements random and divergent.", + "Generate movements that are freeform.", + "Create movements that are free and unrestricted.", + "Create movements that are free and instinctual.", + "Create movements that are free-flowing and unreserved.", + "Make the motions random and unstructured.", + "Make the movements unpredictable and unguided.", + "Create movements that are free-form and", + "Create movements that are uncontrolled and impulsive.", + "Create movements that are free-flowing.", + "Generate movements with no predetermined structure or plan.", + "Create movements with no logic.", + "Generate movements that are unregulated and spontaneous.", + "Make the motions random and uncoordinated.", + "Produce movements that are not scripted or predetermined.", + "Make the motions random and uncalculated.", + "Give me motions as you like.", + "Generate movements that are uncoordinated.", + "Create movements that are free and unstructured.", + "Generate movements with no fixed pattern or order.", + "Make the movements spontaneous and unencumbered.", + "Produce unexpected actions.", + "Generate movements that are spontaneous and unscripted.", + "Generate movements that are not restricted by form or structure.", + "Produce actions that are not premeditated or predetermined.", + "Produce random actions that are not premeditated.", + "Generate movements that are unrestricted and spontaneous.", + "Make the movements spontaneous and off-the-cuff.", + "Generate movements that are unorganized.", + "Make the motions unanticipated and freeform.", + "Generate movements that are not choreographed or rehearsed.", + "Generate movements with no preset plan.", + "Create movements that are free-flowing and unscripted.", + "Create movements that are unarranged.", + "Produce actions that are not scripted or preconceived.", + "Generate actions that are not predictable." + ], + "output": [ + "" + ] + } + }, + "Motion-to-Text": { + "caption": { + "class": "m2t", + "input": [ + "Describe the motion represented by using plain English.", + "Describe the motion represented by in plain English.", + "What kind of motion is displayed in ? Describe it in text.", + "Describe the motion portrayed in using words.", + "Please describe the movement shown in using words.", + "Provide a summary of the action depicted in using language.", + "Describe the action being shown in using language.", + "Give me a brief summary of the motion displayed in .", + "What is being demonstrated by ? Please describe it in text.", + "Describe the movement demonstrated in using language.", + "What does the communicate? Please describe it in words.", + "Describe the motion in using natural language.", + "Describe the movement shown in using natural language.", + "Describe the motion displayed in using natural language.", + "Explain the motion shown in using natural language.", + "Please explain the action being represented in using words.", + "Describe the action being represented by using text.", + "What kind of movement is being represented by ?", + "Describe the movement being portrayed in in plain English.", + "What kind of motion is shown in ? Explain it in plain English.", + "Describe the motion represented in using natural language.", + "What is happening in ? Describe it using text.", + "Please provide a text-based explanation of the motion demonstrated in .", + "What does the represent? Please describe it in text.", + "What does the demonstrate? Please describe it using text.", + "Please explain the motion being shown in using language.", + "Please describe the movement being demonstrated by using text.", + "Provide a text-based explanation of the movement demonstrated in .", + "Describe the motion portrayed in in natural language.", + "Describe the movement being demonstrated by using words.", + "Provide a summary of the motion demonstrated in using words.", + "What kind of motion is being illustrated by ? Explain it using natural language.", + "Provide a text-based explanation of the action being shown.", + "What does the depict? Describe it in language.", + "What does the show? Please explain it using language.", + "Please provide a description of the movement in in words.", + "Describe the motion illustrated in in plain English.", + "What kind of action is being represented in ? Explain it in text.", + "Can you tell me what is happening in using natural language?", + "Provide a textual description of .", + "Give me a summary of the movement demonstrated in using words.", + "Describe the action illustrated in using plain English.", + "Explain what the is showing using language.", + "Provide a text-based explanation of the action shown in .", + "What is being shown in ? Please explain it using words.", + "Describe the movement being represented by in plain English.", + "Describe the motion illustrated in using natural language.", + "What does the signify? Please explain it using words.", + "Provide a description of the motion in using plain English.", + "What kind of action is being shown in ? Explain it in natural language.", + "Describe the motion represented in in language", + "Describe the action being demonstrated by using natural language.", + "What kind of motion is shown in ? Describe it using language.", + "What does the display? Please describe it in language.", + "Please explain the movement being represented by using text.", + "Can you explain what is happening in using text?", + "What kind of action is being illustrated by ? Please describe it in text.", + " Generate text:", + "What kind of action is portrayed in ? Explain it in text.", + "Describe the motion displayed in in natural language.", + "Give me a brief summary of the action being demonstrated by .", + "Describe the action being shown in using plain English.", + "What is being depicted in ? Please describe it in language.", + "What does the demonstrate? Please explain it in plain English.", + "Please provide a text-based explanation of the action being displayed in .", + "Can you tell me what is happening in using plain English?", + "Describe the action depicted in using natural language.", + "What is happening in ? Please describe it using text.", + "What does the convey?", + "Can you tell me what is happening in using plain language?", + "What kind of motion is shown in ? Explain it in text.", + "Describe the movement being represented by using language.", + "Describe the motion depicted in using words.", + "Provide a summary of the movement demonstrated in using language.", + "Provide a textual explanation of the motion in .", + "What does the signify? Please explain it in text.", + "Describe the movement demonstrated in using natural language.", + "Can you explain the action in using words?", + "Provide a summary of the movement being demonstrated in using language.", + "Can you explain what is happening in using natural language?", + "What is happening in ? Please describe it using words.", + "Provide a text-based explanation of the action demonstrated in .", + "Generate a sentence that explains the action in .", + "Explain the motion demonstrated in using natural language.", + "Describe the motion being shown in using language.", + "What kind of action is being demonstrated in ? Explain it in text.", + "Please explain the movement depicted in using text.", + "Give me a summary of the movement being shown in using words.", + "Explain what the is demonstrating using language.", + "Explain the action being represented by using language.", + "Describe the motion in using language.", + "What kind of movement is being shown in ? Explain it using language.", + "Can you explain what is happening in using words?", + "What is being shown in ? Please describe it using language.", + "What is being shown in ? Please explain it in natural language.", + "What does the depict? Please explain it using language.", + "Please provide a description of the action being demonstrated by .", + "Provide a summary of the action demonstrated in using plain English.", + "Provide a description of the action demonstrated in using language.", + "Give me a summary of the motion demonstrated in in words.", + "Provide a text-based explanation of the motion being displayed in .", + "What kind of action is being displayed in ? Explain it in plain English.", + "What is being demonstrated in ? Please explain it using text.", + "Describe the motion illustrated in in words.", + "Explain the motion portrayed in using language.", + "What kind of action is demonstrated in ? Explain it in words.", + "Provide a summary of the action being shown in using language.", + "Describe the motion being depicted in using plain English.", + "Please provide a description of the movement in .", + "Please explain the action in using natural language.", + "Please explain the movement shown in using natural language.", + "Describe the movement being represented by using plain English.", + "Provide a text-based explanation of the motion in .", + "Give me a summary of the motion being displayed in using words.", + "What action is depicted in ? Please explain it in text.", + "What kind of action is being illustrated by ? Explain it in plain English.", + "Explain the movement illustrated in using text.", + "Generate text. Input: . Output: ", + "What does the show? Please describe it in language.", + "Can you tell me what is happening in using language?", + "Can you explain what is happening in using plain English?", + "Describe the movement demonstrated in in words.", + "Describe the movement demonstrated in using plain English.", + "Describe the motion portrayed in using plain English.", + "Provide a description of the action shown in using language.", + "What does the demonstrate? Please explain it using language.", + "Explain the action demonstrated in using words.", + "Please explain the action being represented by using text.", + "Describe the movement being shown in using natural language.", + "Please describe the movement depicted in using natural language.", + "Describe the action shown in in plain English.", + "Provide a textual description of the motion depicted in .", + "What kind of motion is being portrayed in ? Explain it in language.", + "Please provide a text-based explanation of the action in .", + "Explain the movement being depicted in using words.", + "What does the demonstrate?", + "Please explain the action being represented by using language.", + "What is being displayed in ? Please explain it in text.", + "Please explain the action depicted in using words.", + "Describe the action depicted in using text.", + "What is happening in ? Please describe it using language.", + "Please provide a text-based explanation of the motion in .", + "What does the convey? Please explain it using text.", + "What kind of motion is illustrated in ? Please describe it in words.", + "Please provide a text-based explanation of the motion displayed in .", + "Explain the motion being shown in using natural language.", + "Provide a description of the action in using words.", + "Describe the action depicted in in plain English.", + "Provide a text-based explanation of the movement in .", + "What kind of motion is being demonstrated in ? Explain it in text.", + "Please explain the action illustrated in using text.", + "What kind of action is shown in ? Explain it in text.", + "Give me a brief summary of the movement depicted in .", + "Describe the motion being represented by in words.", + "What action is being shown in ? Please describe it in text.", + "Give me a brief summary of the action shown in .", + "Explain the movement shown in using words.", + "Describe the motion demonstrated in in natural language.", + "Explain the motion being demonstrated in in words.", + "What is happening in ? Please explain it using natural language.", + "Please provide a description of the motion in using plain English.", + "Provide a summary of the action shown in in words.", + "What kind of movement is shown in ? Explain it in language.", + "Describe the motion that is being represented by in words.", + "Please provide a description of the motion in using plain language.", + "What kind of action is being displayed in ? Explain it in language.", + "Describe the action represented in using natural language.", + "What does the signify? Describe it using text.", + "Provide a description of the motion shown in using natural language.", + "Explain the motion illustrated in using language.", + "Generate text for :", + "Please provide a description of the motion in using words.", + "Give me a brief summary of the movement represented in .", + "Please provide a description of the motion in using natural language.", + "Explain the movement portrayed in using language.", + "Describe the action depicted in using words.", + "What kind of motion is being shown in ? Describe it in words.", + "What kind of movement is being demonstrated in ? Explain it in text.", + "Explain what is happening in using natural language.", + "Provide a text-based explanation of what is happening in .", + "Give me a brief summary of the action in .", + "Describe the action illustrated in in natural language.", + "What kind of motion is demonstrated in ? Describe it in words.", + "What does the communicate? Please describe it in language.", + "Explain the movement shown in in plain English.", + "Describe the movement portrayed in using natural language.", + "Explain the motion depicted in using words.", + "Provide a description of the motion being displayed in using language.", + "Provide a text-based explanation of the action being shown in .", + "What kind of motion is being illustrated in ? Explain it in words.", + "What is being shown in ? Please describe it in text." + ], + "output": [ + "" + ] + }, + "framelen": { + "class": "m2t", + "input": [ + "What is happening in during a duration of frames?", + "Describe the motion depicted in over frames.", + "Write a text account for the that is frames in length.", + "Describe the movement shown in over a duration of frames.", + "Write a text description for the that lasts frames.", + "What is happening in that is depicted over frames?", + "Describe the movement being shown in that is depicted for frames.", + "Describe the motion being shown in for frames.", + "Generate a text summary for the that spans frames.", + "Write a text description for the that is shown for frames.", + "Describe the movement exhibited in for frames.", + "Generate a text description of the that is frames long.", + "What is being shown in over a duration of frames?", + "Generate a text description for the that lasts for frames.", + "Write a text summary of the that takes frames to complete.", + "What is the action being exhibited in that lasts for frames?", + "Generate a text account for the that lasts for frames.", + "Summarize the activity being displayed in that is depicted for a length of frames.", + "Describe the movement portrayed in that lasts frames.", + "Describe the motion depicted in for a length of frames.", + "Summarize the activity being shown in over frames.", + "Generate a text summary for the that is shown for frames.", + "Summarize the activity being shown in for frames.", + "Describe the movement depicted in over frames.", + "Provide a written summary of the lasting frames.", + "What is being demonstrated in that is frames long?", + "Describe the movement shown in for frames.", + "What is happening in for a duration of frames?", + "What is being demonstrated in for a length of frames?", + "Summarize the activity being portrayed in over a duration of frames.", + "What is being shown in for a length of frames?", + "Provide a written summary of the that lasts for frames.", + "Describe the movement displayed in for a length of frames.", + "Describe the motion being displayed in that takes frames to complete.", + "What is happening in over frames?", + "Generate a text summary for the lasting frames.", + "What is being demonstrated in over frames?", + "Write a text summary of the that lasts frames.", + "Summarize the activity displayed in during frames.", + "Generate a text account for the that is frames in length.", + "Generate a text description for the that is frames long.", + "Summarize the activity being portrayed in over frames.", + "Generate a text account for the that takes frames to complete.", + "Provide a written description of the that takes frames to complete.", + "Describe the motion being displayed in that lasts for frames.", + "Describe the motion shown in over frames.", + "What is happening in that takes frames to complete?", + "Provide a written summary of the that lasts for a duration of frames.", + "Write a text account of the that takes place over frames.", + "Provide a written summary of the that is shown for frames.", + "Summarize the activity being portrayed in for frames.", + "What is the action being displayed in that takes frames to complete?", + "Provide a written description of the that is shown for a duration of frames.", + "What is happening in that is exhibited over a duration of frames?", + "Summarize the activity being displayed in over frames.", + "What is happening in for a length of frames?", + "Describe the movement being displayed in over frames.", + "What activity is shown in for frames?", + "Describe the motion being exhibited in that takes place for a duration of frames.", + "What is happening in that is frames long?", + "Write a text summary of the that lasts for a duration of frames.", + "Provide a written description of the for a length of frames.", + "Generate a text summary for the that lasts for frames.", + "What is happening in that is exhibited over frames?", + "Provide a written description of the that lasts for frames.", + "Describe the motion exhibited in that lasts frames.", + "What action is being shown in over frames?", + "Write a text summary for the that is exhibited over a length of frames.", + "Provide a written summary of the that spans frames.", + "Summarize the activity portrayed in that lasts frames.", + "Describe the movement shown in that spans frames.", + "What is the action being exhibited in for a length of frames?", + "Summarize the activity portrayed in for frames.", + "Generate a text account for the that is frames long.", + "What is being demonstrated in over a duration of frames?", + "Generate a text description for the that takes frames to complete.", + "Generate a text description for the lasting frames.", + "What is the action being shown in over a duration of ", + "What is happening in that is exhibited over a length of frames?", + "Describe the movement exhibited in that is shown for a length of frames.", + "What is happening in that is depicted over a duration of frames?", + "What is happening in over a length of frames?", + "Summarize the activity portrayed in during frames.", + "Describe the movement depicted in for a duration of frames.", + "Write a text account for the that takes place over a length of frames.", + "Summarize the activity displayed in that is shown for frames.", + "Provide a text description for the with a length of frames.", + "Provide a written account of the for frames.", + "Describe the motion exhibited in over a length of frames.", + "Summarize the activity displayed in for frames.", + "What is the action being demonstrated in over frames?", + "Write a text description for the that spans frames.", + "What is being demonstrated in over a length of frames?", + "What is being demonstrated in that takes frames to complete?", + "Describe the motion exhibited in that is frames in length.", + "Provide a written summary of the for a duration of frames.", + "Describe the movement shown in for a length of frames.", + "Generate a text summary for the that lasts for a length of frames.", + "Generate a text description for the that is shown over frames.", + "What is being shown in for frames?", + "What is the action being demonstrated in for frames?", + "Provide a written account of the that is shown for frames.", + "Describe the motion displayed in over frames.", + "Write a text account for the that lasts for frames.", + "Describe the movement shown in over a length of frames.", + "Describe the movement being portrayed in that is shown for a duration of frames.", + "Generate a text summary for the that is displayed for frames.", + "Describe the movement exhibited in that takes frames to complete.", + "Describe the movement shown in for a duration of frames.", + "Describe the motion shown in that lasts for a length of frames.", + "Generate a text summary for the that lasts frames.", + "Describe the motion being exhibited in that takes frames to complete.", + "Generate a text description for the that is exhibited over a length of frames.", + "Write a text summary of the that is shown for frames.", + "Describe the movement being shown in that is exhibited for a duration of frames.", + "Describe the motion being displayed in that takes place for frames.", + "What is happening in that lasts frames?", + "Describe the activity in over frames.", + "Generate a text account of the that spans frames.", + "What is the action being portrayed in over frames?", + "Write a text description of the that takes frames to complete.", + "Generate a text summary for the that takes frames to complete.", + "Summarize the activity portrayed in that takes frames to complete.", + "Describe the movement exhibited in over frames.", + "Write a text account for the that lasts for a duration of frames.", + "Describe the motion being displayed in over a duration of frames.", + "What is the action being portrayed in over a length of frames?", + "Provide a written description of the that spans frames.", + "Describe the motion shown in that is frames long.", + "Summarize the activity being displayed in that takes frames to complete.", + "Provide a written summary of the that takes frames to complete.", + "Provide a summary of the activity portrayed in for frames.", + "Generate a text summary for the that is shown for a length of frames.", + "Generate a text description for the that is depicted for frames.", + "Provide a written summary of the that takes place over a duration of frames.", + "Provide a written account of the that takes frames to complete.", + "Describe the movement being shown in that is frames long.", + "Describe the movement shown in that takes frames to complete.", + "Write a text description of the that is shown for frames.", + "Provide a written description of the that is shown for frames.", + "Describe the motion exhibited in during frames.", + "Provide a written description of the that lasts for a duration of frames.", + "Provide a written summary of the that takes place over frames.", + "Write a text summary for the that is frames long.", + "Summarize the activity being portrayed in over a length of frames.", + "What is the action being shown in over a duration of frames?", + "Summarize the activity displayed in for a duration of frames.", + "Write a text summary of the that takes place for frames.", + "Summarize the activity being shown in that takes frames to complete.", + "Write a text account for the that is shown for frames.", + "What is the action being shown in that lasts frames?", + "What is being demonstrated in for a duration of frames?", + "Provide a written description of the shown for a duration of frames.", + "Summarize the activity being shown in over a duration of frames.", + "Describe the motion displayed in that lasts for frames.", + "Describe the motion represented by for frames.", + "Describe the motion depicted by during frames.", + "Generate a text account for the that is shown for frames.", + "Generate a text description for the that is displayed for a duration of frames.", + "Describe the movement shown in that lasts for frames.", + "Describe the movement being displayed in that is frames in length.", + "What is happening in over a duration of frames?", + "Describe the motion portrayed in over frames.", + "What activity is being demonstrated in for a duration of frames?" + ], + "output": [ + "" + ] + }, + "seclen": { + "class": "m2t", + "input": [ + "Describe the movement being shown in that is exhibited for a duration of seconds.", + "What is happening in over a length of seconds?", + "Describe the motion being displayed in that takes seconds to complete.", + "Describe the movement exhibited in for seconds.", + "Provide a written description of the that lasts for seconds.", + "Write a text account for the that lasts for a duration of seconds.", + "Generate a text account for the that takes seconds to complete.", + "Generate a text summary for the that lasts for a length of seconds.", + "What is the action being demonstrated in over seconds?", + "Describe the motion exhibited in over a length of seconds.", + "Generate a text summary for the that lasts seconds.", + "Generate a text description for the that is shown over seconds.", + "Generate a text description for the that is depicted for seconds.", + "Describe the motion depicted in for a length of seconds.", + "What is the action being portrayed in over a length of seconds?", + "Describe the movement displayed in for a length of seconds.", + "Describe the movement shown in that spans seconds.", + "What is the action being portrayed in over seconds?", + "What is happening in that is seconds long?", + "Write a text description for the that is shown for seconds.", + "What is being demonstrated in over a length of seconds?", + "Summarize the activity being portrayed in over seconds.", + "Summarize the activity portrayed in for seconds.", + "Write a text summary of the that is shown for seconds.", + "Generate a text account of the that spans seconds.", + "Describe the movement depicted in for a duration of seconds.", + "Generate a text account for the that is seconds long.", + "Summarize the activity being portrayed in for seconds.", + "Generate a text description for the that is seconds long.", + "Describe the movement portrayed in that lasts seconds.", + "What is happening in for a duration of seconds?", + "Write a text summary of the that takes seconds to complete.", + "Generate a text summary for the that lasts for seconds.", + "Provide a written description of the that lasts for a duration of seconds.", + "Describe the movement being portrayed in that is shown for a duration of seconds.", + "What is being shown in for a length of seconds?", + "What is being demonstrated in that is seconds long?", + "Describe the movement being shown in that is depicted for seconds.", + "Summarize the activity being displayed in that is depicted for a length of seconds.", + "Provide a written summary of the that takes place over a duration of seconds.", + "Provide a written summary of the that takes seconds to complete.", + "Generate a text description for the that is exhibited over a length of seconds.", + "Provide a written summary of the that lasts for seconds.", + "Describe the motion being exhibited in that takes seconds to complete.", + "What is being demonstrated in over seconds?", + "Generate a text summary for the lasting seconds.", + "What is happening in that is exhibited over seconds?", + "What is happening in that takes seconds to complete?", + "Write a text description for the that lasts seconds.", + "Describe the movement being displayed in over seconds.", + "Describe the movement shown in that lasts for seconds.", + "Summarize the activity displayed in during seconds.", + "Provide a written account of the for seconds.", + "Provide a written account of the that is shown for seconds.", + "What is being shown in for seconds?", + "Summarize the activity portrayed in during seconds.", + "Describe the motion exhibited in during seconds.", + "Describe the motion shown in that is seconds long.", + "Generate a text description for the that lasts for seconds.", + "Generate a text summary for the that takes seconds to complete.", + "Describe the movement shown in for seconds.", + "Describe the movement shown in for a length of seconds.", + "Generate a text summary for the that spans seconds.", + "Provide a written summary of the that takes place over seconds.", + "What activity is shown in for seconds?", + "What is happening in for a length of seconds?", + "What is being shown in over a duration of seconds?", + "What is happening in that is exhibited over a duration of seconds?", + "Describe the motion being shown in for seconds.", + "Describe the motion being displayed in that lasts for seconds.", + "Describe the motion represented by for seconds.", + "Summarize the activity portrayed in that lasts seconds.", + "Describe the movement being displayed in that is seconds in length.", + "What is happening in over seconds?", + "Describe the movement being shown in that is seconds long.", + "Describe the movement exhibited in that takes seconds to complete.", + "Generate a text description for the that is displayed for a duration of seconds.", + "Describe the motion portrayed in over seconds.", + "Provide a written summary of the for a duration of seconds.", + "Provide a written summary of the that is shown for seconds.", + "Write a text account for the that lasts for seconds.", + "Provide a written account of the that takes seconds to complete.", + "Provide a written description of the that is shown for seconds.", + "Provide a written description of the shown for a duration of seconds.", + "Summarize the activity being displayed in over seconds.", + "Generate a text account for the that is shown for seconds.", + "Describe the movement shown in for a duration of seconds.", + "Generate a text description for the lasting seconds.", + "Generate a text account for the that lasts for seconds.", + "What is being demonstrated in that takes seconds to complete?", + "Summarize the activity being portrayed in over a duration of seconds.", + "Describe the movement shown in over a duration of seconds.", + "Provide a written description of the that takes seconds to complete.", + "What is the action being shown in that lasts seconds?", + "What is being demonstrated in for a length of seconds?", + "Write a text account for the that is shown for seconds.", + "What is happening in that is depicted over a duration of seconds?", + "Describe the motion being displayed in over a duration of seconds.", + "Generate a text description of the that is seconds long.", + "Write a text account of the that takes place over seconds.", + "Summarize the activity portrayed in that takes seconds to complete.", + "Summarize the activity being displayed in that takes seconds to complete.", + "Generate a text summary for the that is shown for seconds.", + "What is being demonstrated in for a duration of seconds?", + "Describe the motion being exhibited in that takes place for a duration of seconds.", + "Describe the motion depicted in over seconds.", + "Summarize the activity displayed in that is shown for seconds.", + "What is the action being shown in over a duration of seconds?", + "Generate a text account for the that is seconds in length.", + "What action is being shown in over seconds?", + "Describe the motion exhibited in that is seconds in length.", + "Write a text summary of the that lasts seconds.", + "What is being demonstrated in over a duration of seconds?", + "Generate a text summary for the that is displayed for seconds.", + "What is the action being exhibited in for a length of seconds?", + "Write a text account for the that is seconds in length.", + "What is happening in that lasts seconds?", + "Describe the movement exhibited in over seconds.", + "Describe the movement exhibited in that is shown for a length of seconds.", + "What is happening in over a duration of seconds?", + "Write a text account for the that takes place over a length of seconds.", + "What is the action being shown in over a duration of ", + "What is happening in that is depicted over seconds?", + "Describe the motion being displayed in that takes place for seconds.", + "Describe the motion exhibited in that lasts seconds.", + "Describe the movement shown in over a length of seconds.", + "Write a text summary for the that is exhibited over a length of seconds.", + "Generate a text description for the that takes seconds to complete.", + "Write a text summary of the that takes place for seconds.", + "Summarize the activity being shown in over seconds.", + "Provide a summary of the activity portrayed in for seconds.", + "Provide a written description of the that is shown for a duration of seconds.", + "Provide a written description of the that spans seconds.", + "Summarize the activity being portrayed in over a length of seconds.", + "Write a text summary of the that lasts for a duration of seconds.", + "Describe the movement depicted in over seconds.", + "Summarize the activity displayed in for seconds.", + "Describe the motion depicted by during seconds.", + "What is happening in during a duration of seconds?", + "Summarize the activity being shown in for seconds.", + "Provide a written summary of the that spans seconds.", + "What activity is being demonstrated in for a duration of seconds?", + "Describe the motion shown in over seconds.", + "Provide a written summary of the lasting seconds.", + "Generate a text summary for the that is shown for a length of seconds.", + "Describe the motion displayed in over seconds.", + "What is the action being exhibited in that lasts for seconds?", + "Summarize the activity being shown in that takes seconds to complete.", + "Write a text description of the that is shown for seconds.", + "Summarize the activity displayed in for a duration of seconds.", + "Summarize the activity being shown in over a duration of seconds.", + "What is the action being displayed in that takes seconds to complete?", + "Provide a text description for the with a length of seconds.", + "Write a text summary for the that is seconds long.", + "Describe the movement shown in that takes seconds to complete.", + "What is the action being demonstrated in for seconds?", + "Write a text description for the that spans seconds.", + "Describe the motion displayed in that lasts for seconds.", + "What is happening in that is exhibited over a length of seconds?", + "Write a text description of the that takes seconds to complete.", + "Describe the motion shown in that lasts for a length of seconds.", + "Provide a written description of the for a length of seconds.", + "Provide a written summary of the that lasts for a duration of seconds.", + "Describe the activity in over seconds." + ], + "output": [ + "" + ] + }, + "count-frame": { + "class": "m2l", + "input": [ + "What is the duration of 's gestures in frames?", + "Compute the frame count for 's body movements.", + "Determine the total frames for 's body movements.", + "Calculate the frame duration for 's poses.", + "What is the total duration of in frames?", + "How long does 's movements last in frames?", + "Calculate the length of in frames.", + "Determine the total frames for 's movements.", + "How long does last in frames?", + "Generate the frames for 's body movements.", + "Compute the frame count for 's gestures.", + "Compute the frame count for 's poses.", + "How many frames are in actions?", + "Find the frame count for 's body movements.", + "Calculate the total frames for .", + "Generate the frames for 's actions.", + "Generate the frames for 's poses.", + "Determine the number of frames in .", + "What is the frame length of ?", + "Find the frame count for 's poses.", + "What is the duration of 's motion in frames?", + "What is the total number of frames for ?", + "Generate the frames for 's gestures.", + "Calculate the length of 's poses in frames.", + "Compute the duration in frames for 's body movements.", + "Generate the frame length for 's movements.", + "Find the frame count for .", + "Determine the total frames for 's motion.", + "Generate the frame length for 's gestures.", + "Determine the number of frames for 's gestures.", + "Calculate the frame duration for 's actions.", + "What is the duration of 's body movements in frames?", + "How long does 's gestures last in frames?", + "Determine the total frames for 's gestures.", + "What is the total duration of 's motion in frames?", + "What is the total duration of 's movements in frames?", + "Compute the duration in frames for 's actions.", + "How many frames are in 's poses?", + "What is the duration of 's actions in frames?", + "What is the duration of 's movements in frames?", + "Determine the total frames for 's poses.", + "Generate the frames for 's movements.", + "Compute the frame count for 's movements.", + "Determine the number of frames in 's motion.", + "Compute the duration in frames for 's movements.", + "How many frames are in 's activities?", + "How many frames are in 's body movements?", + "Calculate the frame duration for .", + "Calculate the length of 's movements in frames.", + " . Generate length:", + "Calculate the frame duration for 's gestures.", + "Determine the frame count of .", + "How long does last in terms of frames?", + "How long does 's actions last in frames?", + "Find the frame count for 's motion.", + "Find the frame count for 's actions.", + "What is the total duration of 's gestures in frames?", + "Determine the number of frames for 's movements.", + "Determine the number of frames for 's poses.", + "Generate the length in frames for 's motion.", + "Calculate the length of 's body movements in frames.", + "How many frames are there in ?", + "Generate the number of frames for .", + "What is the total duration of 's body movements in frames?", + "Calculate the frame duration for 's movements.", + "How long does 's body movements last in frames?", + "Compute the duration in frames for 's gestures.", + "How many frames are in 's movements?", + "Determine the duration of in frames.", + "Compute the length of in frames.", + "Generate the frame length for 's poses.", + "Generate the duration in frames for .", + "Calculate the frame duration for 's motion.", + "What is the duration of 's poses in frames?", + "Determine the total frames for 's actions.", + "Compute the frame count for 's motion.", + "Generate the frame length for .", + "What is the total duration of 's poses in frames?", + "Generate the frame length for 's motion.", + "Find the frame count for 's gestures.", + "Determine the number of frames for 's activities.", + "Generate the frame length for 's body movements.", + "Calculate the length of 's activities in frames.", + "Find the frame count for 's movements.", + "Compute the number of frames for .", + "Compute the duration in frames for 's poses.", + "How many frames are in 's gestures?", + "What is the total duration of 's actions in frames?", + "Calculate the length of 's actions in frames.", + "Count how many frames are there for :", + "You can see the motion. Generate the frames of given motion. Input: . Output: ", + "Generate the frame length for 's actions.", + "How many frames are in 's motion?", + "Compute the duration in frames for 's motion.", + "Determine the number of frames for 's body movements.", + "How long does 's motion last in frames?", + "Determine the number of frames for 's actions.", + "Calculate the length of 's gestures in frames.", + "Compute the frame count for 's actions.", + "How long does 's poses last in frames?", + "Calculate the duration of 's motion in frames.", + "Compute the duration of in frames.", + "What is the duration of the motion ?", + "Calculate the frame duration for 's body movements." + ], + "output": [ + "There are frames in the motion.", + "The length of given motion is about frames.", + "The motion seen before lasts approximately frames.", + "There are frames in the motion.", + "The length of given motion is about frames.", + "The motion seen before lasts approximately frames.", + "The duration of the motion is frames.", + "The motion contains frames.", + "There are about frames in the motion.", + "The frame count for the motion is .", + "The motion lasts for frames.", + "The length of the motion is frames.", + "The motion has a duration of frames.", + "The total number of frames in the motion is .", + "The frame length for the motion is frames.", + "The duration of the motion in frames is .", + "The motion consists of frames.", + "The number of frames for the motion is .", + "The motion has a length of frames.", + "The motion's duration is frames.", + "The frame duration for the motion is .", + "The motion lasts for about frames.", + "The motion contains a total of frames.", + "The number of frames in the motion is approximately .", + "The length of the motion is roughly frames.", + "The motion's frame count is .", + "The duration of the motion is roughly frames.", + "The motion has a total of frames.", + "The frame count for the motion is roughly .", + "The motion has a frame length of frames.", + "The motion lasts for approximately frames.", + "The duration of the motion is approximately frames.", + "The motion's length is frames.", + "The total number of frames in the motion is roughly .", + "The motion has a duration of about frames.", + "The motion contains approximately frames.", + "The frame duration for the motion is about .", + "The motion has a total frame count of .", + "The length of the motion is estimated to be frames.", + "The motion's duration is estimated to be frames.", + "The number of frames in the motion is estimated to be .", + "The frame count for the motion is estimated to be .", + "The frame length for the motion is estimated to be frames.", + "The motion's total duration is approximately frames.", + "The motion lasts for an estimated frames.", + "The total number of frames in the motion is estimated to be .", + "The motion's frame count is estimated to be .", + "The motion has an estimated frame length of frames.", + "The motion has an estimated duration of frames.", + "The length of the motion is approximately frames.", + "The duration of the motion is roughly estimated to be frames.", + "The motion has an approximate frame count of .", + "The frame length for the motion is roughly estimated to be frames.", + "The motion lasts for roughly estimated frames.", + "The duration of the motion is estimated to be about frames.", + "The motion contains an estimated frames.", + "The frame count for the motion is approximately .", + "The motion has an estimated length of frames.", + "The motion's duration is approximately estimated to be frames.", + "The total number of frames in the motion is estimated to be around .", + "The frame duration for the motion is estimated to be .", + "The motion lasts for roughly frames.", + "The length of the motion is estimated to be around frames.", + "The motion's total duration is estimated to be frames.", + "The motion has approximately frames.", + "The frame count for the motion is around .", + "The motion has a rough frame length of frames.", + "The motion lasts for an approximate frames.", + "The total number of frames in the motion is roughly estimated to be .", + "The length of the motion is roughly estimated to be frames.", + "The duration of the motion is approximately estimated to be around frames.", + "The frame duration for the motion is roughly estimated to be .", + "The motion contains around frames.", + "The frame count for the motion is approximately estimated to be .", + "The motion has an estimated duration of around frames.", + "The length of the motion is estimated to be roughly frames.", + "The motion lasts for an estimated duration of around frames.", + "The total number of frames in the motion is estimated to be approximately .", + "The motion has an estimated frame length of around frames.", + "The motion's duration is around frames.", + "The frame count for the motion is roughly estimated to be around .", + "The motion has a total frame count of around .", + "The length of the motion is estimated to be approximately frames.", + "The motion lasts for roughly estimated frames.", + "The total number of frames in the motion is estimated to be roughly around .", + "The motion has an estimated duration of roughly frames.", + "The motion contains approximately estimated frames.", + "The motion's frame count is around .", + "The frame length for the motion is approximately estimated to be frames.", + "The motion lasts for around frames.", + "The duration of the motion is estimated to be around frames.", + "The total number of frames in the motion is estimated to be roughly .", + "The frame duration for the motion is roughly estimated to be .", + "The motion has an approximate length of frames.", + "The motion's duration is roughly estimated to be frames.", + "The frame count for the motion is approximately around .", + "The motion has a rough duration of frames.", + "The length of the motion is roughly estimated to be around frames.", + "The motion lasts for approximately around frames.", + "The motion contains an estimated around frames.", + "The total number of frames in the motion is roughly estimated to be around .", + "The motion has an estimated duration of approximately frames.", + "The motion's frame count is roughly estimated to be ." + ] + }, + "count-sec": { + "class": "m2l", + "input": [ + "How many seconds are there in ?", + "Calculate the second duration for 's actions.", + "How many seconds are in 's activities?", + "Calculate the length of in seconds.", + "Generate the time length of the motion data recorded by .", + "Compute the second count for 's motion.", + "Compute the duration in seconds for 's motion.", + "Count the seconds of the motion without analyzing the motion data.", + "Generate the second length for 's actions.", + "Compute the duration in seconds for 's poses.", + "Provide the duration of in seconds.", + "Determine the time length of the motion data given by . Output the duration:", + "Determine the total seconds for 's body movements.", + "How long does 's poses last in seconds?", + "Count the seconds of the motion captured by without any interpretation.", + "Calculate the duration of for me.", + "Calculate the length of 's poses in seconds.", + "Provide the duration of the motion without analyzing the motion data.", + "Calculate the second duration for 's motion.", + "Count how many seconds are there for :", + "Generate the time duration of the motion data captured by .", + "What is the time length of represented by the motion data?", + "Generate the duration of the motion captured by without any interpretation.", + "Calculate the time length of without any analysis of the motion.", + "Generate the duration of without any interpretation of the motion.", + "Provide the time length of the motion data represented by without interpretation or analysis.", + "How long does 's motion last in seconds?", + "Generate the length in seconds for 's motion.", + "How many seconds are in 's poses?", + "Compute the second count for 's poses.", + "What is the duration of the motion ?", + "Generate the length of the motion in seconds without analyzing the motion.", + "Provide the duration of the motion data represented by .", + "Calculate the second duration for 's movements.", + "What is the total number of seconds for ?", + "Determine the time length of without analyzing or interpreting the motion.", + "Compute the length of in seconds.", + "Determine the number of seconds for 's activities.", + "What is the time duration of ?", + "Calculate the second duration for 's poses.", + "What is the total duration of 's movements in seconds?", + "Count the seconds of the motion data captured by without interpreting the motion.", + "Calculate the duration of in seconds. Output the time length:", + "Give me the duration of the motion data represented by .", + "Count the seconds of the motion represented by .", + "Compute the duration in seconds for 's gestures.", + "Generate the duration of the motion data given by without analyzing the motion.", + "Generate the second length for 's poses.", + "What is the total duration of 's body movements in seconds?", + "Calculate the length of 's actions in seconds.", + "Compute the number of seconds for .", + "Determine the total seconds for 's gestures.", + "Determine the time length of . Output the duration:", + "Calculate the time length of the motion data represented by .", + "Determine the total seconds for 's motion.", + "Calculate the time length of the motion . Output the duration:", + "Count the seconds of without any analysis or interpretation.", + "What is the duration in seconds of the motion data represented by ? Don't analyze the motion.", + "How many seconds are in 's gestures?", + "Determine the length of the motion .", + "Generate the length of the motion data represented by without any interpretation.", + "Find the duration of . Output the time length in seconds:", + "What is the duration of 's gestures in seconds?", + "Compute the duration in seconds for 's body movements.", + "Find the duration of without interpreting the motion data.", + "Find the second count for 's motion.", + "Determine the number of seconds for 's body movements.", + "Give me the time length of the motion recorded by .", + "Calculate the duration of 's motion in seconds.", + "Determine the duration of without analyzing the motion data.", + "Generate the second length for .", + "Count the seconds of the motion captured by and output the duration:", + "What is the total duration of 's gestures in seconds?", + "Find the length of the motion without analyzing the motion data.", + "Calculate the time length of the motion data given by without analysis.", + "Provide the length of the motion data given by .", + "Calculate the second duration for .", + "What is the duration of 's poses in seconds?", + "What is the second length of ?", + "What is the duration of the motion data captured by ?", + "Count the seconds of the motion and output the duration without interpretation.", + "You can see the motion. Generate the seconds of given motion. Input: . Output: ", + "Determine the number of seconds for 's poses.", + " . Generate length:", + "What is the duration in seconds of the motion ?", + "How many seconds are in 's motion?", + "Find the duration of the motion captured by .", + "What is the time duration of without analyzing the motion data?", + "I want to know the time length of . Generate the duration:", + "Calculate the length of 's body movements in seconds.", + "Output the length of the motion in seconds without any interpretation.", + "Compute the second count for 's gestures.", + "Generate the seconds for 's actions.", + "Calculate the time length of the motion data captured by without interpreting the motion.", + "How long does last in seconds?", + "Determine the total seconds for 's movements.", + "What is the total duration of 's actions in seconds?", + "How long does 's actions last in seconds?", + "Find the second count for 's poses.", + "Generate the duration in seconds for .", + "Generate the seconds for 's movements.", + "Find the second count for .", + "Generate the duration of in seconds.", + "Calculate the time length of without any analysis or interpretation of the motion data.", + "Generate the second length for 's movements.", + "What is the duration of 's actions in seconds?", + "Find the length of the motion captured by . Output the duration:", + "Compute the second count for 's body movements.", + "Provide the length of the motion data captured by without interpretation.", + "Determine the total seconds for 's poses.", + "Provide the length of in seconds.", + "Calculate the total seconds for .", + "Generate the seconds for 's poses.", + "Count the seconds of the motion data given by .", + "What is the duration of 's motion in seconds?", + "Find the second count for 's actions.", + "Determine the number of seconds in 's motion.", + "Determine the time duration of without interpreting the motion data.", + "Compute the duration in seconds for 's actions.", + "What is the duration of 's body movements in seconds?", + "Count the seconds of the motion data represented by and output the duration:", + "Provide the duration of without analyzing or interpreting the motion data.", + "Generate the time duration of the motion data represented by without any analysis.", + "What is the total duration of 's poses in seconds?", + "Calculate the duration of the motion captured by without analyzing the motion.", + "Generate the seconds for 's body movements.", + "What is the time duration of the motion ? Output the duration in seconds:", + "Generate the seconds for 's gestures.", + "Provide the time length of without analyzing the motion data.", + "How long does last for?", + "Generate the number of seconds for .", + "Determine the duration of in seconds without interpreting the motion.", + "Determine the length of . Output the duration in seconds:", + "Compute the duration in seconds for 's movements.", + "Generate the time duration of without analyzing the motion.", + "Determine the total seconds for 's actions.", + "Determine the number of seconds for 's actions.", + "Calculate the duration of without any interpretation or analysis.", + "What is the duration in seconds of without any analysis of the motion data?", + "Determine the time length of the motion captured by without analyzing the motion data.", + "Find the duration of without analyzing or interpreting the motion data.", + "Generate the time length of the motion in seconds.", + "Find the second count for 's gestures.", + "Find the second count for 's movements.", + "Compute the duration of in seconds.", + "What is the total duration of in seconds?", + "How long does 's body movements last in seconds?", + "What is the duration of 's movements in seconds?", + "Generate the second length for 's gestures.", + "Calculate the time duration of .", + "Calculate the length of 's gestures in seconds.", + "Determine the duration of in seconds.", + "Generate the second length for 's body movements.", + "How many seconds are in 's body movements?", + "Determine the duration of the motion data captured by without interpretation.", + "Provide the time length of the motion data recorded by without any analysis.", + "Compute the second count for 's movements.", + "Generate the second length for 's motion.", + "Calculate the second duration for 's body movements.", + "Calculate the length of 's activities in seconds.", + "What is the duration of in seconds?", + "Calculate the second duration for 's gestures.", + "Determine the number of seconds in .", + "How long does 's movements last in seconds?", + "What is the length in seconds of without any analysis or interpretation?", + "What is the time duration of without any analysis of the motion data?", + "Determine the second count of .", + "How long does last in terms of seconds?", + "How long does 's gestures last in seconds?", + "Compute the second count for 's actions.", + "Calculate the duration of the motion data captured by and output the time length in seconds:", + "Count the seconds of the motion data given by without interpreting the motion.", + "Count the seconds of the motion recorded by without analyzing the motion.", + "Determine the number of seconds for 's gestures.", + "Determine the number of seconds for 's movements.", + "Provide the duration of without any analysis or interpretation.", + "Calculate the length of 's movements in seconds.", + "Provide the time duration of .", + "What is the total duration of 's motion in seconds?", + "How many seconds are in actions?", + "Find the second count for 's body movements.", + "How many seconds are in 's movements?" + ], + "output": [ + "There are about seconds in the motion.", + "The motion lasts for roughly estimated seconds.", + "The motion has an estimated second length of around seconds.", + "The duration of the motion is estimated to be around seconds.", + "The length of the motion is roughly seconds.", + "The motion's length is seconds.", + "The motion has a length of seconds.", + "The second length for the motion is roughly estimated to be seconds.", + "The total number of seconds in the motion is estimated to be roughly around .", + "The number of seconds in the motion is estimated to be .", + "The second duration for the motion is estimated to be .", + "The motion has a rough second length of seconds.", + "The motion has a total of seconds.", + "The duration of the motion is approximately seconds.", + "The motion lasts for seconds.", + "The motion has a rough duration of seconds.", + "The length of the motion is roughly estimated to be seconds.", + "The motion has an approximate second count of .", + "The motion contains approximately estimated seconds.", + "The number of seconds for the motion is .", + "The second count for the motion is around .", + "The second count for the motion is approximately .", + "The duration of the motion is approximately estimated to be around seconds.", + "The motion seen before lasts approximately seconds.", + "The motion lasts for about seconds.", + "The motion's second count is .", + "The total number of seconds in the motion is roughly .", + "The motion has an estimated length of seconds.", + "The motion's duration is approximately estimated to be seconds.", + "The motion contains seconds.", + "The second count for the motion is approximately around .", + "The motion lasts for an approximate seconds.", + "The number of seconds in the motion is approximately .", + "The motion's total duration is estimated to be seconds.", + "There are seconds in the motion.", + "The motion has a total second count of .", + "The motion contains an estimated around seconds.", + "The motion's second count is roughly estimated to be .", + "The total number of seconds in the motion is roughly estimated to be .", + "The length of the motion is estimated to be seconds.", + "The second count for the motion is .", + "The motion lasts for roughly seconds.", + "The motion has an estimated second length of seconds.", + "The motion has an estimated duration of roughly seconds.", + "The duration of the motion is estimated to be about seconds.", + "The second count for the motion is roughly .", + "The motion has a duration of about seconds.", + "The motion contains a total of seconds.", + "The length of the motion is estimated to be approximately seconds.", + "The motion has a duration of seconds.", + "The motion's second count is around .", + "The duration of the motion is roughly seconds.", + "The duration of the motion in seconds is .", + "The total number of seconds in the motion is roughly estimated to be around .", + "The motion contains around seconds.", + "The motion's duration is roughly estimated to be seconds.", + "The length of the motion is estimated to be around seconds.", + "The second count for the motion is roughly estimated to be around .", + "The second length for the motion is seconds.", + "The duration of the motion is seconds.", + "The duration of the motion is roughly estimated to be seconds.", + "The length of the motion is roughly estimated to be around seconds.", + "The total number of seconds in the motion is estimated to be approximately .", + "The motion's duration is seconds.", + "The second length for the motion is approximately estimated to be seconds.", + "The length of the motion is approximately seconds.", + "The motion's total duration is approximately seconds.", + "The motion lasts for an estimated duration of around seconds.", + "The total number of seconds in the motion is estimated to be around .", + "The motion has a total second count of around .", + "The motion lasts for around seconds.", + "The motion's second count is estimated to be .", + "The length of given motion is about seconds.", + "The motion has an estimated duration of around seconds.", + "The motion consists of seconds.", + "The motion lasts for approximately seconds.", + "The second duration for the motion is .", + "The second count for the motion is estimated to be .", + "The motion has approximately seconds.", + "The motion lasts for approximately around seconds.", + "The motion contains an estimated seconds.", + "The motion contains approximately seconds.", + "The motion has a second length of seconds.", + "The second duration for the motion is roughly estimated to be .", + "The second duration for the motion is about .", + "The second length for the motion is estimated to be seconds.", + "The total number of seconds in the motion is .", + "The second count for the motion is approximately estimated to be .", + "The total number of seconds in the motion is estimated to be .", + "The motion has an estimated duration of seconds.", + "The length of the motion is estimated to be roughly seconds.", + "The motion has an estimated duration of approximately seconds.", + "The motion lasts for an estimated seconds.", + "The motion has an approximate length of seconds.", + "The motion's duration is estimated to be seconds.", + "The total number of seconds in the motion is estimated to be roughly .", + "The length of the motion is seconds.", + "The motion's duration is around seconds." + ] + } + }, + "Text-to-Text": { + "caption-to-framelen": { + "class": "t2t", + "input": [ + "Predict the frame count required for the motion corresponding to .", + "Predict the expected duration of the motion that corresponds to in frames.", + "Based on , predict the frame count for the corresponding motion.", + "Given , provide the anticipated frame duration for the corresponding motion.", + "Predict how many frames are there for a motion depicts :", + "Generate an estimate of the frame duration for the motion described by .", + "Provide an estimate for the frame length of the motion corresponding to .", + "Estimate the anticipated frame duration for the motion described by .", + "What is the anticipated frame count for the motion associated with ?", + "How many frames should the motion that matches have?", + "Predict the expected frame length for the motion that matches .", + "Guess the frame length required for the motion that matches .", + "What is the expected frame length for the motion associated with ?", + "Predict the frame count required for the motion that corresponds to .", + "What is the expected frame length for the motion that corresponds to ?", + "What is the expected duration of the motion described by in terms of frame count?", + "Based on , provide the expected frame duration for the corresponding motion.", + "Based on , guess the expected frame length for the corresponding motion.", + "Predict the frame count required for the motion that matches .", + "How long will the motion that matches last in frames?", + "Provide an estimate of the frame duration for the motion corresponding to .", + "Predict the expected frame duration for the motion that matches .", + "Based on the description of , estimate the number of frames required for the motion.", + "Given , guess the frame count for the corresponding motion.", + "How many frames are needed to perform the motion described by ?", + "Based on , guess the frame duration required for the corresponding motion.", + "Provide an estimate of the expected frame duration for the motion described by .", + "What is the expected number of frames required for the motion associated with ?", + "Given , guess the anticipated frame length for the corresponding motion.", + "Estimate the frame length of the motion associated with .", + "Provide an estimate for the expected frame duration of the motion represented by .", + "What is the expected frame duration for the motion associated with ?", + "Given , what is the expected frame duration for the corresponding motion?", + "Guess the number of frames required for the motion corresponding to .", + "Given the description , predict the expected frame length of the corresponding motion.", + "Based on , guess the frame duration for the corresponding motion.", + "Based on the given description , estimate the duration of the corresponding motion in frames.", + "Estimate the expected number of frames required for the motion that matches .", + "Given , provide an estimate for the duration of the motion in frames.", + "Predict the number of frames required to perform the motion associated with .", + "Provide an estimate of the frame length required to perform the motion associated with .", + "Estimate the anticipated duration of the motion described by in terms of frame count.", + "Estimate the frame length required for the motion that matches .", + "How long will the motion that matches last in terms of frame duration?", + "Predict the expected frame duration for the motion that corresponds to .", + "What is the anticipated number of frames for the motion described by ?", + "What is the anticipated number of frames required for the motion described by ?", + "Based on , guess the expected frame count for the corresponding motion.", + "Now you know the motion descriptions according to the input. Guess the frames of given motion. Input: . Output:", + "Provide the expected frame count for the motion represented by .", + "I see , generate the motion length:", + "Based on , estimate the number of frames required to perform the corresponding motion.", + "How many frames are expected for the motion that matches ?", + "Guess the frame count required for the motion described by .", + "Predict the expected frame count for the motion that corresponds to .", + "What is the frame count you anticipate for the motion that matches ?", + "Estimate the frame count for the motion that matches .", + "How many frames are necessary for the motion associated with ?", + "What is the expected duration of the motion corresponding to in terms of frame length?", + "Estimate the number of frames required for the motion described by .", + "What is the anticipated frame length for the motion corresponding to ?", + "Predict the number of frames for a motion that matches .", + "Predict the anticipated frame count for the motion that corresponds to .", + "Predict the frame duration of the motion that corresponds to .", + "What is the expected length of the motion corresponding to ?", + "Provide an estimate for the frame count required to perform the motion described by .", + "Estimate the frame count for the motion associated with .", + "What is the expected frame duration for the motion described by ?", + "Based on , provide the anticipated frame length for the corresponding motion.", + "Provide an estimate for the frame count required for the motion associated with .", + "Estimate the number of frames required for the motion that matches .", + "Provide an estimate of the expected frame length for the motion described by .", + "Generate the expected number of frames required for the motion described by .", + "Predict the frame count required for the motion represented by .", + "Guess the frame count required for the motion represented by .", + "Estimate the frame count for the motion described by .", + "Generate the number of frames required for the motion described by .", + "How many frames are expected for the motion described by ?", + "Predict the frame count required for the motion associated with .", + "How many frames do you anticipate for the motion that matches ?", + "Estimate the expected duration of the motion corresponding to in terms of frame count.", + "Provide an estimate for the number of frames required to perform the motion described by .", + "How many frames do you expect for the motion that matches ?", + "Estimate the frame duration required for the motion that corresponds to .", + "Provide an estimate for the number of frames required to perform the motion associated with .", + "How long will it take to perform the motion represented by in frames?", + "Provide an estimate of the frame duration for the motion represented by .", + "Generate an estimate of the frame count required to perform the motion represented by .", + "Predict the anticipated frame duration for the motion that corresponds to .", + "How long will it take to perform the motion described by in terms of frame duration?", + "What is the expected duration of the motion associated with in terms of frame length?", + "How long should the motion associated with last in frames?", + "Generate the expected frame length for the motion described by .", + "Provide an estimate of the frame count required to perform the motion that matches .", + "Based on , what is the expected frame count for the corresponding motion?", + "How long should the motion associated with last in terms of frame count?", + "What is the anticipated frame count for the motion described by ?", + "What is the expected duration of the motion that matches in terms of frame count?" + ], + "output": [ + "The motion has an estimated duration of frames.", + "The motion has a length of frames.", + "The motion has an estimated duration of around frames.", + "The motion has an estimated frame length of around frames.", + "The motion has an approximate length of frames.", + "The length of the motion is approximately frames.", + "The motion lasts for roughly estimated frames.", + "The duration of the motion is estimated to be around frames.", + "The motion contains an estimated around frames.", + "The motion consists of frames.", + "The total number of frames in the motion is roughly estimated to be around .", + "The motion lasts for approximately frames.", + "The duration of the motion is estimated to be about frames.", + "The frame duration for the motion is about .", + "The motion's total duration is approximately frames.", + "The motion lasts for an estimated frames.", + "The motion has a total frame count of .", + "The frame count for the motion is approximately around .", + "The motion's frame count is .", + "The motion seen before lasts approximately frames.", + "The motion lasts for frames.", + "The motion's duration is approximately estimated to be frames.", + "There are frames in the motion.", + "The motion contains approximately frames.", + "The duration of the motion is frames.", + "The motion contains around frames.", + "The motion contains approximately estimated frames.", + "The total number of frames in the motion is roughly .", + "The length of the motion is estimated to be around frames.", + "The motion's duration is roughly estimated to be frames.", + "The motion lasts for approximately around frames.", + "The motion's duration is around frames.", + "The length of the motion is roughly estimated to be around frames.", + "The motion's length is frames.", + "The duration of the motion is roughly frames.", + "The frame count for the motion is around .", + "The motion's total duration is estimated to be frames.", + "The motion has a total frame count of around .", + "The motion has a rough duration of frames.", + "The motion has a rough frame length of frames.", + "The frame count for the motion is approximately estimated to be .", + "The motion has an approximate frame count of .", + "The motion lasts for roughly frames.", + "The duration of the motion is roughly estimated to be frames.", + "The number of frames in the motion is approximately .", + "The frame length for the motion is roughly estimated to be frames.", + "The motion has a duration of frames.", + "The total number of frames in the motion is estimated to be roughly .", + "The total number of frames in the motion is estimated to be .", + "The motion has an estimated duration of roughly frames.", + "The length of the motion is estimated to be frames.", + "The motion's duration is estimated to be frames.", + "The frame count for the motion is .", + "The motion has an estimated frame length of frames.", + "The total number of frames in the motion is estimated to be around .", + "The length of the motion is roughly frames.", + "The motion has a frame length of frames.", + "The length of the motion is estimated to be roughly frames.", + "The duration of the motion is approximately frames.", + "The frame count for the motion is approximately .", + "The frame length for the motion is frames.", + "The motion has a duration of about frames.", + "The motion contains frames.", + "The frame duration for the motion is .", + "The number of frames for the motion is .", + "The length of the motion is frames.", + "The motion has approximately frames.", + "The number of frames in the motion is estimated to be .", + "The duration of the motion in frames is .", + "There are about frames in the motion.", + "The motion has a total of frames.", + "The motion lasts for an estimated duration of around frames.", + "The length of given motion is about frames.", + "The length of the motion is estimated to be approximately frames.", + "The motion lasts for an approximate frames.", + "The motion lasts for around frames.", + "The duration of the motion is approximately estimated to be around frames.", + "The motion has an estimated length of frames.", + "The frame duration for the motion is estimated to be .", + "The frame count for the motion is roughly estimated to be around .", + "The frame length for the motion is approximately estimated to be frames.", + "The frame count for the motion is estimated to be .", + "The frame duration for the motion is roughly estimated to be .", + "The total number of frames in the motion is estimated to be roughly around .", + "The length of the motion is roughly estimated to be frames.", + "The motion contains a total of frames.", + "The frame count for the motion is roughly .", + "The frame length for the motion is estimated to be frames.", + "The total number of frames in the motion is roughly estimated to be .", + "The motion's duration is frames.", + "The motion's frame count is roughly estimated to be .", + "The motion's frame count is estimated to be .", + "The total number of frames in the motion is .", + "The motion's frame count is around .", + "The motion lasts for about frames.", + "The motion contains an estimated frames.", + "The motion has an estimated duration of approximately frames.", + "The total number of frames in the motion is estimated to be approximately ." + ] + }, + "caption-to-seclen": { + "class": "t2t", + "input": [ + "Estimate the expected number of seconds required for the motion that matches .", + "What is the expected second length for the motion that corresponds to ?", + "Estimate the second duration required for the motion that corresponds to .", + "Predict the number of seconds for a motion that matches .", + "Estimate the second length required for the motion that matches .", + "What is the expected number of seconds required for the motion associated with ?", + "Predict the expected duration of the motion that corresponds to in seconds.", + "Given , provide the anticipated second duration for the corresponding motion.", + "Given , guess the second count for the corresponding motion.", + "Guess the number of seconds required for the motion corresponding to .", + "What is the expected second duration for the motion associated with ?", + "How many seconds are necessary for the motion associated with ?", + "What is the expected second length for the motion associated with ?", + "How long should the motion associated with last in terms of second count?", + "Given , what is the expected second duration for the corresponding motion?", + "Estimate the number of seconds required for the motion described by .", + "Provide an estimate of the second duration for the motion represented by .", + "How many seconds are expected for the motion described by ?", + "Provide an estimate for the second count required for the motion associated with .", + "Generate the expected number of seconds required for the motion described by .", + "Provide an estimate of the expected second duration for the motion described by .", + "Based on , guess the expected second length for the corresponding motion.", + "What is the expected duration of the motion associated with in terms of second length?", + "Guess the second count required for the motion described by .", + "Based on , guess the second duration required for the corresponding motion.", + "What is the expected duration of the motion corresponding to in terms of second length?", + "How many seconds are needed to perform the motion described by ?", + "Based on , what is the expected second count for the corresponding motion?", + "Provide an estimate of the expected second length for the motion described by .", + "Now you know the motion descriptions according to the input. Guess the seconds of given motion. Input: . Output:", + "How many seconds are expected for the motion that matches ?", + "Predict the expected second count for the motion that corresponds to .", + "How long should the motion associated with last in seconds?", + "Predict the expected second length for the motion that matches .", + "Generate the expected second length for the motion described by .", + "How many seconds do you expect for the motion that matches ?", + "Generate an estimate of the second duration for the motion described by .", + "What is the expected second duration for the motion described by ?", + "Provide an estimate of the second length required to perform the motion associated with .", + "What is the expected duration of the motion described by in terms of second count?", + "Provide an estimate for the expected second duration of the motion represented by .", + "Provide the expected second count for the motion represented by .", + "Provide an estimate of the second count required to perform the motion that matches .", + "Provide an estimate of the second duration for the motion corresponding to .", + "What is the anticipated second count for the motion described by ?", + "What is the second count you anticipate for the motion that matches ?", + "Estimate the second length of the motion associated with .", + "I see , generate the motion length:", + "Predict the number of seconds required to perform the motion associated with .", + "Provide an estimate for the number of seconds required to perform the motion described by .", + "Based on the description of , estimate the number of seconds required for the motion.", + "Predict the second count required for the motion associated with .", + "Predict the second count required for the motion corresponding to .", + "How long will it take to perform the motion represented by in seconds?", + "What is the anticipated second length for the motion corresponding to ?", + "Predict the second count required for the motion that corresponds to .", + "How many seconds should the motion that matches have?", + "What is the anticipated number of seconds for the motion described by ?", + "Estimate the number of seconds required for the motion that matches .", + "Based on , estimate the number of seconds required to perform the corresponding motion.", + "Provide an estimate for the second length of the motion corresponding to .", + "Predict the second duration of the motion that corresponds to .", + "Based on , provide the anticipated second length for the corresponding motion.", + "Given , guess the anticipated second length for the corresponding motion.", + "Given , provide an estimate for the duration of the motion in seconds.", + "Estimate the anticipated duration of the motion described by in terms of second count.", + "What is the anticipated second count for the motion associated with ?", + "Generate an estimate of the second count required to perform the motion represented by .", + "Estimate the anticipated second duration for the motion described by .", + "What is the expected length of the motion corresponding to ?", + "Predict the second count required for the motion represented by .", + "Predict the second count required for the motion that matches .", + "Based on the given description , estimate the duration of the corresponding motion in seconds.", + "Estimate the second count for the motion described by .", + "Predict how many seconds are there for a motion depicts :", + "Provide an estimate for the number of seconds required to perform the motion associated with .", + "How many seconds do you anticipate for the motion that matches ?", + "How long will the motion that matches last in seconds?", + "Based on , guess the second duration for the corresponding motion.", + "Predict the anticipated second count for the motion that corresponds to .", + "Estimate the second count for the motion that matches .", + "Based on , provide the expected second duration for the corresponding motion.", + "How long will it take to perform the motion described by in terms of second duration?", + "Based on , predict the second count for the corresponding motion.", + "Estimate the second count for the motion associated with .", + "Guess the second count required for the motion represented by .", + "How long will the motion that matches last in terms of second duration?", + "Based on , guess the expected second count for the corresponding motion.", + "What is the anticipated number of seconds required for the motion described by ?", + "Predict the expected second duration for the motion that matches .", + "Estimate the expected duration of the motion corresponding to in terms of second count.", + "What is the expected duration of the motion that matches in terms of second count?", + "Given the description , predict the expected second length of the corresponding motion.", + "Provide an estimate for the second count required to perform the motion described by .", + "Predict the anticipated second duration for the motion that corresponds to .", + "Generate the number of seconds required for the motion described by .", + "Predict the expected second duration for the motion that corresponds to .", + "Guess the second length required for the motion that matches ." + ], + "output": [ + "The motion has a duration of about seconds.", + "The length of the motion is seconds.", + "The second count for the motion is approximately around .", + "The motion has a total of seconds.", + "The motion has a rough duration of seconds.", + "The motion's duration is estimated to be seconds.", + "The motion has a rough second length of seconds.", + "The motion lasts for roughly estimated seconds.", + "The number of seconds for the motion is .", + "The second count for the motion is approximately .", + "The motion contains around seconds.", + "The motion consists of seconds.", + "The length of given motion is about seconds.", + "The number of seconds in the motion is estimated to be .", + "The motion's second count is estimated to be .", + "The duration of the motion is approximately seconds.", + "The motion's duration is seconds.", + "The second length for the motion is approximately estimated to be seconds.", + "The second length for the motion is estimated to be seconds.", + "The second count for the motion is estimated to be .", + "The motion lasts for seconds.", + "The motion contains an estimated around seconds.", + "The length of the motion is estimated to be seconds.", + "The motion's second count is .", + "The total number of seconds in the motion is .", + "The length of the motion is roughly estimated to be around seconds.", + "The second duration for the motion is about .", + "The motion's length is seconds.", + "The number of seconds in the motion is approximately .", + "The second duration for the motion is roughly estimated to be .", + "The motion has an estimated duration of around seconds.", + "The motion has an estimated duration of approximately seconds.", + "The second length for the motion is seconds.", + "The motion contains an estimated seconds.", + "The motion's duration is approximately estimated to be seconds.", + "The motion has an approximate second count of .", + "The second length for the motion is roughly estimated to be seconds.", + "The motion's second count is around .", + "The motion's second count is roughly estimated to be .", + "The motion lasts for approximately around seconds.", + "The motion lasts for around seconds.", + "The second duration for the motion is .", + "The motion lasts for an estimated duration of around seconds.", + "The motion lasts for about seconds.", + "The length of the motion is estimated to be approximately seconds.", + "The second count for the motion is around .", + "The length of the motion is estimated to be around seconds.", + "The duration of the motion is roughly estimated to be seconds.", + "The second count for the motion is approximately estimated to be .", + "The total number of seconds in the motion is estimated to be around .", + "The motion has a total second count of around .", + "The second count for the motion is roughly .", + "The duration of the motion is approximately estimated to be around seconds.", + "The motion contains approximately estimated seconds.", + "The second count for the motion is roughly estimated to be around .", + "The motion contains seconds.", + "The motion has an approximate length of seconds.", + "The duration of the motion is roughly seconds.", + "The motion has an estimated second length of around seconds.", + "The total number of seconds in the motion is estimated to be .", + "The motion has a length of seconds.", + "The motion's duration is around seconds.", + "The total number of seconds in the motion is roughly estimated to be around .", + "The second duration for the motion is estimated to be .", + "The motion has a total second count of .", + "The length of the motion is roughly seconds.", + "The motion has an estimated length of seconds.", + "The motion seen before lasts approximately seconds.", + "The motion contains a total of seconds.", + "The duration of the motion is estimated to be around seconds.", + "The motion has a duration of seconds.", + "The total number of seconds in the motion is roughly estimated to be .", + "The duration of the motion is estimated to be about seconds.", + "The motion has approximately seconds.", + "The duration of the motion is seconds.", + "The motion lasts for roughly seconds.", + "The total number of seconds in the motion is roughly .", + "The motion has an estimated second length of seconds.", + "The length of the motion is roughly estimated to be seconds.", + "The motion lasts for an estimated seconds.", + "The motion lasts for an approximate seconds.", + "The total number of seconds in the motion is estimated to be roughly .", + "The length of the motion is estimated to be roughly seconds.", + "The motion has an estimated duration of seconds.", + "There are about seconds in the motion.", + "The motion lasts for approximately seconds.", + "The motion's total duration is estimated to be seconds.", + "The duration of the motion in seconds is .", + "The motion has a second length of seconds.", + "The total number of seconds in the motion is estimated to be roughly around .", + "The motion has an estimated duration of roughly seconds.", + "The total number of seconds in the motion is estimated to be approximately .", + "The second count for the motion is .", + "The motion's total duration is approximately seconds.", + "The motion contains approximately seconds.", + "The length of the motion is approximately seconds.", + "The motion's duration is roughly estimated to be seconds.", + "There are seconds in the motion." + ] + }, + "framelen-to-caption": { + "class": "t2t", + "input": [ + "", + "Based on the duration of the motion, what is the likelihood of it being a full-body movement or a partial-body movement?", + "Generate a description of the motion based on the expected level of power or strength required.", + "Given frames of motion, predict the likelihood of it being a unilateral or bilateral movement.", + "Given the duration of the motion, what are some possible actions that could be taken?", + "Generate a description of the motion based on the number of frames provided.", + " is the number of motion frames, generate the motion description:", + "Generate a description of the motion based on the expected level of coordination required.", + "Predict the level of explosiveness required to perform the motion, based on the given number of frames.", + "Generate a description of the motion based on the expected level of power required.", + "What types of stretching or warm-up exercises could be performed in frames?", + "Given frames of motion, predict the likelihood of it being a concentric or isometric movement.", + "What are some possible modifications or regressions of the motion that could be performed in frames?", + "Given the number of frames, what type of equipment might be required to perform the motion?", + "What are some possible scenarios where frames of motion would be required?", + "Now you know the motion length according to the input frames number. Guess the caption of the motion. Input: . Output:", + "Generate a list of possible yoga poses that could be performed in frames.", + "Based on the duration of the motion, what is the likelihood of it being a dynamic stretching or ballistic exercise?", + "Based on the motion length, what is the likelihood of it being a sport-specific exercise?", + "Given the duration of the motion, predict the level of mobility or range of motion required.", + "What types of exercises or stretches could be performed in frames?", + "Given the motion length, predict the likelihood of it being a static or dynamic movement.", + "Given frames of motion, what body parts are likely to be involved?", + "Given the length of the motion, predict the level of physical exertion required to perform it.", + "What are some possible modifications or progressions of the motion that could be performed in frames?", + "Predict the level of speed required to perform the motion, based on the given number of frames.", + "Based on the duration of the motion, what is the likelihood of it being a warm-up or activation exercise?", + "Based on the duration of the motion, what is the likelihood of it being a resistance or weight-bearing exercise?", + "Given frames of motion, predict the likelihood of it being a closed or open-chain movement.", + "Based on the given motion length, generate a description of the body movements.", + "Based on the given motion length, predict the level of flexibility required to perform the movement.", + "Based on the duration of the motion, what is the likelihood of it being a low-intensity or high-intensity movement?", + "Based on the motion length, what is the likelihood of it being a cardiovascular or respiratory exercise?", + "Generate a list of possible animal movements that could be performed in frames.", + "Generate a list of possible gymnastic moves that could be performed in frames.", + "Predict what motion can be achieved with frames:", + "Given the motion length, what is the likelihood of it being a cardio or strength exercise?", + "What are some possible modifications or variations of the motion that could be performed in frames?", + "Predict the level of balance required to perform the motion, based on the given number of frames.", + "Generate a list of possible sports or activities that could be performed in frames.", + "What types of breathing exercises could be incorporated into a -frame motion?", + "Given frames of motion, what is the likelihood of it being a solo or partner movement?", + "Generate a list of possible acrobatic moves that could be performed in frames.", + "Predict the level of focus or concentration required to perform the motion, based on the given number of frames.", + "What types of mental visualization exercises could be incorporated into a -frame motion?", + "What are some possible physical gestures that could be made in frames?", + "Generate a description of the motion based on the expected level of balance required.", + "Given frames of motion, what type of equipment might be needed to perform the movement?", + "Based on the duration of the motion, what is the likelihood of it being a balance or stability exercise?", + "Predict the type of sport or exercise that would require frames of motion.", + "Predict the level of power required to perform the motion, based on the given number of frames.", + "Generate a list of possible parkour moves that could be performed in frames.", + "Generate a list of possible motions that could be performed in frames.", + "Given the duration of the motion, predict the level of coordination or dexterity required.", + "Given frames of motion, predict the likelihood of it being a concentric or plyometric movement.", + "Based on the duration of the motion, what is the likelihood of it being a stretching or mobility exercise?", + "Based on the motion length, what is the likelihood of it being a restorative or recovery exercise?", + "Generate a description of the motion based on the expected level of physical exertion.", + "What kind of physical activity can be performed in frames?", + "Given the duration of the motion, predict the level of anaerobic or aerobic exercise involved.", + "Generate a description of the motion based on the expected level of flexibility required.", + "Given the duration of the motion, predict the level of core or trunk stability required.", + "Based on the duration of the motion, predict the likelihood of it being a unilateral or bilateral movement.", + "Given frames of motion, predict the likelihood of it being a concentric or eccentric movement.", + "Based on the duration of the motion, what is the likelihood of it being a core or abdominal exercise?", + "What types of visualizations or mental imagery could be incorporated into a -frame motion?", + "Given the number of frames, what type of physical skill or ability might be required to perform the motion?", + "What are some possible ways to modify the motion to make it more challenging or easier, based on the number of frames?", + "Generate a list of possible dance moves that could be performed in frames.", + "Predict the level of precision required to perform the motion, based on the given number of frames.", + "Given frames of motion, what type of musical accompaniment might be suitable?", + "What types of visualization or guided meditation could be incorporated into a -frame motion?", + "Generate a list of possible dance styles that could be performed in frames.", + "What types of visualization or mindfulness practices could be incorporated into a -frame motion?", + "Based on the motion length, what is the likelihood of it being a skill-based exercise?", + "Based on the duration of the motion, what is the likelihood of it being a fast or slow movement?", + "Predict the range of motion involved in the movement, based on the given number of frames.", + "What types of movements can be executed in frames?", + "What are some possible ways to modify the motion to make it more accessible or inclusive, based on the number of frames?", + "Predict the intensity level of the motion based on the given number of frames.", + "Based on the duration of the motion, what is the likelihood of it being a plyometric or isometric exercise?", + "Based on the duration of the motion, what is the likelihood of it being a high-impact or low-impact movement?", + "Given frames of motion, predict the likelihood of it being a linear or nonlinear movement.", + "Given the duration of the motion, predict the level of stability or mobility involved.", + "Based on the motion length, what is the likelihood of it being a warm-up or cool-down exercise?", + "Based on the duration of the motion, what is the likelihood of it being a functional movement or a bodybuilding exercise?", + "Based on the length of the motion, what is the likelihood of it being a balance or coordination exercise?", + "Based on the duration of the motion, predict the likelihood of it being a continuous movement or a series of shorter movements.", + "Given the motion length, predict the likelihood of it being a compound or isolation movement.", + "Given the motion length, what is the likelihood of it being a rhythmic or continuous movement?", + "Predict the level of agility required to perform the motion, based on the given number of frames.", + "Generate a list of possible martial arts techniques that could be performed in frames.", + "Generate a list of possible calisthenics exercises that could be performed in frames.", + "Generate a description of the motion based on the expected level of endurance required.", + "What types of mindfulness or meditation practices could be incorporated into a -frame motion?", + "Predict the level of difficulty of the motion based on the given number of frames." + ], + "output": [ + "" + ] + }, + "seclen-to-caption": { + "class": "t2t", + "input": [ + "", + "Based on the duration of the motion, what is the likelihood of it being a full-body movement or a partial-body movement?", + "Generate a description of the motion based on the expected level of power or strength required.", + "Given the duration of the motion, what are some possible actions that could be taken?", + "What are some possible scenarios where seconds of motion would be required?", + "Generate a description of the motion based on the expected level of coordination required.", + "Given seconds of motion, predict the likelihood of it being a concentric or eccentric movement.", + "Generate a description of the motion based on the expected level of power required.", + "What are some possible ways to modify the motion to make it more accessible or inclusive, based on the number of seconds?", + "Generate a list of possible dance styles that could be performed in seconds.", + "What types of visualization or mindfulness practices could be incorporated into a -second motion?", + "Predict the level of difficulty of the motion based on the given number of seconds.", + "Generate a list of possible calisthenics exercises that could be performed in seconds.", + "Generate a list of possible motions that could be performed in seconds.", + "Based on the motion length, what is the likelihood of it being a sport-specific exercise?", + "Given seconds of motion, what type of equipment might be needed to perform the movement?", + "Generate a description of the motion based on the number of seconds provided.", + "What types of exercises or stretches could be performed in seconds?", + "Given seconds of motion, what body parts are likely to be involved?", + "Based on the duration of the motion, what is the likelihood of it being a dynamic stretching or ballistic exercise?", + "Given the duration of the motion, predict the level of mobility or range of motion required.", + "Given the motion length, predict the likelihood of it being a static or dynamic movement.", + "What are some possible physical gestures that could be made in seconds?", + "Given the length of the motion, predict the level of physical exertion required to perform it.", + "Based on the duration of the motion, what is the likelihood of it being a warm-up or activation exercise?", + "Based on the duration of the motion, what is the likelihood of it being a resistance or weight-bearing exercise?", + "Based on the given motion length, generate a description of the body movements.", + "Given seconds of motion, predict the likelihood of it being a concentric or isometric movement.", + "Based on the given motion length, predict the level of flexibility required to perform the movement.", + "What are some possible modifications or progressions of the motion that could be performed in seconds?", + "Predict the intensity level of the motion based on the given number of seconds.", + "Based on the duration of the motion, what is the likelihood of it being a low-intensity or high-intensity movement?", + "Based on the motion length, what is the likelihood of it being a cardiovascular or respiratory exercise?", + "What types of visualizations or mental imagery could be incorporated into a -second motion?", + "Predict what motion can be achieved with seconds:", + "Generate a list of possible sports or activities that could be performed in seconds.", + "Given the motion length, what is the likelihood of it being a cardio or strength exercise?", + "Predict the range of motion involved in the movement, based on the given number of seconds.", + "Given seconds of motion, what is the likelihood of it being a solo or partner movement?", + "Predict the level of power required to perform the motion, based on the given number of seconds.", + "Generate a description of the motion based on the expected level of balance required.", + "Generate a list of possible gymnastic moves that could be performed in seconds.", + "Predict the level of balance required to perform the motion, based on the given number of seconds.", + "Based on the duration of the motion, what is the likelihood of it being a balance or stability exercise?", + "Predict the level of focus or concentration required to perform the motion, based on the given number of seconds.", + "Predict the level of precision required to perform the motion, based on the given number of seconds.", + "Generate a list of possible parkour moves that could be performed in seconds.", + "Given the duration of the motion, predict the level of coordination or dexterity required.", + "What are some possible modifications or variations of the motion that could be performed in seconds?", + "Based on the duration of the motion, what is the likelihood of it being a stretching or mobility exercise?", + "Based on the motion length, what is the likelihood of it being a restorative or recovery exercise?", + "Generate a description of the motion based on the expected level of physical exertion.", + "Given the duration of the motion, predict the level of anaerobic or aerobic exercise involved.", + "Generate a list of possible animal movements that could be performed in seconds.", + "What are some possible modifications or regressions of the motion that could be performed in seconds?", + "Given the duration of the motion, predict the level of core or trunk stability required.", + "Given seconds of motion, what type of musical accompaniment might be suitable?", + "Generate a description of the motion based on the expected level of flexibility required.", + "Based on the duration of the motion, predict the likelihood of it being a unilateral or bilateral movement.", + "Based on the duration of the motion, what is the likelihood of it being a core or abdominal exercise?", + " is the number of motion seconds, generate the motion description:", + "Given the number of seconds, what type of equipment might be required to perform the motion?", + "Given seconds of motion, predict the likelihood of it being a closed or open-chain movement.", + "Now you know the motion length according to the input seconds number. Guess the caption of the motion. Input: . Output:", + "Given seconds of motion, predict the likelihood of it being a concentric or plyometric movement.", + "Based on the motion length, what is the likelihood of it being a skill-based exercise?", + "What types of breathing exercises could be incorporated into a -second motion?", + "Predict the level of agility required to perform the motion, based on the given number of seconds.", + "Based on the duration of the motion, what is the likelihood of it being a fast or slow movement?", + "Predict the level of explosiveness required to perform the motion, based on the given number of seconds.", + "Based on the duration of the motion, what is the likelihood of it being a plyometric or isometric exercise?", + "What types of movements can be executed in seconds?", + "Given the duration of the motion, predict the level of stability or mobility involved.", + "Based on the duration of the motion, what is the likelihood of it being a high-impact or low-impact movement?", + "What types of visualization or guided meditation could be incorporated into a -second motion?", + "What types of mental visualization exercises could be incorporated into a -second motion?", + "Given the number of seconds, what type of physical skill or ability might be required to perform the motion?", + "Generate a list of possible acrobatic moves that could be performed in seconds.", + "Based on the motion length, what is the likelihood of it being a warm-up or cool-down exercise?", + "Based on the duration of the motion, what is the likelihood of it being a functional movement or a bodybuilding exercise?", + "Based on the length of the motion, what is the likelihood of it being a balance or coordination exercise?", + "Given seconds of motion, predict the likelihood of it being a unilateral or bilateral movement.", + "Based on the duration of the motion, predict the likelihood of it being a continuous movement or a series of shorter movements.", + "What types of stretching or warm-up exercises could be performed in seconds?", + "Given seconds of motion, predict the likelihood of it being a linear or nonlinear movement.", + "Given the motion length, predict the likelihood of it being a compound or isolation movement.", + "Generate a list of possible dance moves that could be performed in seconds.", + "Given the motion length, what is the likelihood of it being a rhythmic or continuous movement?", + "What kind of physical activity can be performed in seconds?", + "Generate a list of possible yoga poses that could be performed in seconds.", + "What are some possible ways to modify the motion to make it more challenging or easier, based on the number of seconds?", + "What types of mindfulness or meditation practices could be incorporated into a -second motion?", + "Predict the level of speed required to perform the motion, based on the given number of seconds.", + "Predict the type of sport or exercise that would require seconds of motion.", + "Generate a list of possible martial arts techniques that could be performed in seconds.", + "Generate a description of the motion based on the expected level of endurance required." + ], + "output": [ + "" + ] + }, + "random-caption": { + "class": "n2t", + "input": [ + "Write a brief summary of how someone might move their feet while doing the foxtrot.", + "Describe the motion of someone doing a lunge.", + "Write a brief summary of how someone might move their shoulders while dancing.", + "Describe the motion of someone doing a burpee.", + "Describe the way someone might move while doing a corkscrew.", + "Describe the way someone might move while doing a Webster cork.", + "Describe the way someone might move while doing a hand spring.", + "Describe the motion of someone doing a bench press.", + "Write a caption for a video of someone doing a handstand.", + "Describe the way someone might move while doing a webster.", + "Describe the way someone might move while playing volleyball.", + "Write a caption for a video of someone doing a butterfly stroke.", + "Write a brief summary of how someone might move their legs while doing the tango.", + "Describe the way someone might move while doing a roundoff back handspring.", + "Write a caption for a video of someone doing a sextuple salchow.", + "Describe the way someone might move while doing a gainer.", + "Write a caption for a video of someone doing a double salchow.", + "Describe the way someone might move while doing a 540 kick.", + "Write a brief summary of how someone might move their head.", + "Describe the way someone might move while doing a cork screw kick.", + "Write a caption for a video of someone doing a quintuple toe loop.", + "Depict a motion as like you have seen it.", + "Describe the way someone might move while doing a butterfly twist.", + "Write a sentence about the way someone might move while practicing judo.", + "Describe the way someone might move while doing a cartwheel.", + "Describe the way someone might move while doing a side flip.", + "Describe the motion of someone doing a jump lunge.", + "Write a brief summary of how someone might move their hips while dancing.", + "Describe the way someone might move while doing a backflip with a twist.", + "Describe the motion of someone doing a pull-up.", + "Write a caption for a video of someone doing a triple toe loop.", + "Describe the motion of someone doing a deadlift.", + "Write a brief summary of how someone might move their feet while doing the cha-cha.", + "Describe the motion of someone doing a dumbbell curl.", + "Write a caption for a video of someone doing a triple axel.", + "Describe the way someone might move while doing a front aerial.", + "Describe the motion of someone doing a clean and jerk.", + "Write a caption for a video of someone doing a cartwheel.", + "Write a caption for a video of someone doing a roundoff.", + "Write a sentence about how someone might dance.", + "Describe the motion of someone doing a plank.", + "Write a sentence about the way someone might move while playing tennis.", + "Write a sentence about someone doing a somersault.", + "Describe the motion of someone doing a sit-up.", + "Describe the way someone might move while doing a backbend.", + "Describe the movement of someone reaching for something.", + "Write a brief summary of how someone might move their arms while salsa dancing.", + "Write a brief summary of how someone might move their torso while dancing.", + "Describe the way someone might move while doing a front handspring.", + "Describe the way someone might move while doing a front flip.", + "Describe the way someone might move while skiing.", + "Write a caption for a video of someone doing a quintuple salchow.", + "Do you know how human moves?", + "Describe the way someone might move while doing a pirouette.", + "Write a sentence about the way someone might move while playing soccer.", + "Describe the way someone might move their hands while playing piano.", + "Write a brief summary of how someone might move their legs while dancing.", + "Describe the way someone might move while doing a cheat gainer.", + "Describe the motion of someone doing a squat.", + "Write a sentence about the way someone might move while playing basketball.", + "Write a caption for a video of someone doing a quadruple salchow.", + "Write a sentence about the way someone might move while playing football.", + "Write a brief summary of how someone might move their hips while doing the rumba.", + "Write a sentence about the way someone might move while practicing yoga.", + "Write a brief summary of how someone might move their arms while doing the waltz.", + "Describe the motion of someone doing a push-up.", + "Write a sentence about the way someone might move while practicing boxing.", + "Write a caption for a video of someone doing a triple lutz.", + "Write a caption for a video of someone doing a back layout.", + "Write a sentence about the way someone might move while practicing taekwondo.", + "Give me a motion description:", + "Describe the way someone might move while doing a split.", + "Write a sentence about the way someone might move while practicing karate.", + "Describe the way someone might move while doing a back tuck.", + "Write a caption for a video of someone walking.", + "Describe the way someone might move while doing a side somersault.", + "Give a brief summary of a person's posture.", + "Write a brief summary of how someone might move their arms while doing the quickstep.", + "Describe the way someone might move while swimming.", + "Describe a physical action a person might take.", + "Write a sentence about the way someone might move while practicing Brazilian jiu-jitsu.", + "Describe how someone might move their arms while running.", + "Write a sentence about the way someone might move while practicing Muay Thai.", + "Describe the way someone might move while doing a tornado kick.", + "Write a caption for a video of someone doing a quadruple toe loop.", + "Describe the way someone might move while doing a back handspring.", + "Describe the motion of someone doing a box jump.", + "Write a caption for a video of someone doing a backflip.", + "Write a brief summary of how someone might move their feet while dancing.", + "Describe the way someone might move while climbing stairs.", + "Describe the way someone might move while doing a back dive.", + "Describe the motion of someone doing a farmer's walk.", + "Describe the motion of someone doing a jump squat.", + "Describe the motion of someone doing a kettlebell swing.", + "Describe the way someone might move while doing a butterfly kick.", + "Describe the way someone might move while doing a wall spin.", + "Write a sentence about the way someone might move while practicing fencing.", + "Write a sentence about the way someone might move while practicing archery.", + "Come up with a human motion caption.", + "Write a brief summary of how someone might move their legs while doing the paso doble.", + "Write a caption for a video of someone doing a triple axel combination.", + "Write a brief description of someone jumping.", + "Write a sentence about the way someone might move while practicing martial arts.", + "Write a brief summary of how someone might move their hips while doing the samba." + ], + "output": [ + "" + ] + } + }, + "Motion-to-Motion": { + "motion_prediction": { + "class": "predict", + "input": [ + "Predict motion: ", + "Do the motion prediction task for " + ], + "output": [ + "" + ] + }, + "motion_inbetween": { + "class": "inbetween", + "input": [ + "Complete the masked motion: ", + "Here is a masked motion sequence , complete it" + ], + "output": [ + "" + ] + } + } +} diff --git a/prepare/instructions/template_pretrain.json b/prepare/instructions/template_pretrain.json new file mode 100644 index 0000000..39aa720 --- /dev/null +++ b/prepare/instructions/template_pretrain.json @@ -0,0 +1,35 @@ +{ + "Text-to-Motion": { + "t2m": { + "class": "t2m", + "input": [ + "" + ], + "output": [ + "" + ] + } + }, + "Motion-to-Text": { + "m2t": { + "class": "m2t", + "input": [ + "" + ], + "output": [ + "" + ] + } + }, + "Motion Prediction": { + "pred": { + "class": "predict", + "input": [ + "Predict motion: " + ], + "output": [ + "" + ] + } + } +} diff --git a/prepare/merge_smplh_mano.py b/prepare/merge_smplh_mano.py new file mode 100644 index 0000000..affb604 --- /dev/null +++ b/prepare/merge_smplh_mano.py @@ -0,0 +1,130 @@ +# inspired and modified by Mathis Petrovich from +# https://github.com/vchoutas/smplx/tree/master/tools + +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems and the Max Planck Institute for Biological +# Cybernetics. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +import os +import os.path as osp +import pickle + +import argparse + +import numpy as np + + +def remove_chumpy_dep(dico): + output_dict = {} + for key, val in dico.items(): + if 'chumpy' in str(type(val)): + output_dict[key] = np.array(val) + else: + output_dict[key] = val + return output_dict + + +def load_and_remove_chumpy_dep(path): + with open(path, 'rb') as pkl_file: + import warnings + warnings.filterwarnings("ignore", category=DeprecationWarning) + data = pickle.load(pkl_file, encoding="latin1") + + data = remove_chumpy_dep(data) + return data + + +def load_npz_into_dict(path): + data = {key: val for key, val in np.load(smplh_fn).items()} + data = remove_chumpy_dep(data) + return data + + +def load_and_clean_data(path): + ext = os.path.splitext(path)[-1] + if ext == ".npz": + data = load_npz_into_dict(path) + elif ext == ".pkl": + data = load_and_remove_chumpy_dep(path) + else: + raise TypeError("The format should be pkl or npz") + return data + + +def merge_models(smplh_fn, mano_left_fn, mano_right_fn, + output_folder='output'): + + body_data = load_and_clean_data(smplh_fn) + lhand_data = load_and_clean_data(mano_left_fn) + rhand_data = load_and_clean_data(mano_right_fn) + + modelname = osp.split(smplh_fn)[1] + parent_folder = osp.split(osp.split(smplh_fn)[0])[1] + if "female" in parent_folder + "_" + modelname.lower(): + out_fn = "SMPLH_FEMALE.npz" + elif "male" in parent_folder + "_" + modelname.lower(): + out_fn = "SMPLH_MALE.npz" + elif "neutral" in parent_folder + "_" + modelname.lower(): + out_fn = "SMPLH_NEUTRAL.npz" + else: + out_fn = modelname + + output_data = body_data.copy() + output_data['hands_componentsl'] = lhand_data['hands_components'] + output_data['hands_componentsr'] = rhand_data['hands_components'] + + output_data['hands_coeffsl'] = lhand_data['hands_coeffs'] + output_data['hands_coeffsr'] = rhand_data['hands_coeffs'] + + output_data['hands_meanl'] = lhand_data['hands_mean'] + output_data['hands_meanr'] = rhand_data['hands_mean'] + + # Just in case + output_data = remove_chumpy_dep(output_data) + + out_path = osp.join(output_folder, out_fn) + print('Saving to {}'.format(out_path)) + + # np.savez(out_path, output_data) + np.savez_compressed(out_path, **output_data) + # with open(out_path, 'wb') as output_file: + # pickle.dump(output_data, output_file) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--smplh-fn', dest='smplh_fn', required=True, + type=str, help='The path to the SMPLH model') + parser.add_argument('--mano-left-fn', dest='mano_left_fn', required=True, + type=str, help='The path to the left hand MANO model') + parser.add_argument('--mano-right-fn', dest='mano_right_fn', required=True, + type=str, help='The path to the right hand MANO model') + parser.add_argument('--output-folder', dest='output_folder', + required=True, type=str, + help='The path to the output folder') + + args = parser.parse_args() + + smplh_fn = args.smplh_fn + mano_left_fn = args.mano_left_fn + mano_right_fn = args.mano_right_fn + output_folder = args.output_folder + + if not osp.exists(output_folder): + print('Creating directory: {}'.format(output_folder)) + os.makedirs(output_folder) + + merge_models(smplh_fn, mano_left_fn, mano_right_fn, output_folder) diff --git a/prepare/prepare_t5.sh b/prepare/prepare_t5.sh new file mode 100644 index 0000000..529e910 --- /dev/null +++ b/prepare/prepare_t5.sh @@ -0,0 +1,4 @@ +cd deps/ +git lfs install +git clone https://huggingface.co/openai/clip-vit-large-patch14 +cd .. diff --git a/prepare/requirements_render.txt b/prepare/requirements_render.txt new file mode 100644 index 0000000..d25b4b8 --- /dev/null +++ b/prepare/requirements_render.txt @@ -0,0 +1,6 @@ +# for rendering in blender python +pytest-shutil +matplotlib +tqdm +hydra-core +six diff --git a/prepare/smplh.sh b/prepare/smplh.sh new file mode 100644 index 0000000..693e017 --- /dev/null +++ b/prepare/smplh.sh @@ -0,0 +1,31 @@ +#!/bin/bash +echo "Extraction of the archives" +echo + +cd deps/smplh +mkdir tmp +cd tmp + +tar xfv ../smplh.tar.xz +unzip ../mano_v1_2.zip + +cd ../../../ +echo +echo "Done!" +echo +echo "Clean and merge models" +echo + +python prepare/merge_smplh_mano.py --smplh-fn deps/smplh/tmp/male/model.npz --mano-left-fn deps/smplh/tmp/mano_v1_2/models/MANO_LEFT.pkl --mano-right-fn deps/smplh/tmp/mano_v1_2/models/MANO_RIGHT.pkl --output-folder deps/smplh/ + +python prepare/merge_smplh_mano.py --smplh-fn deps/smplh/tmp/female/model.npz --mano-left-fn deps/smplh/tmp/mano_v1_2/models/MANO_LEFT.pkl --mano-right-fn deps/smplh/tmp/mano_v1_2/models/MANO_RIGHT.pkl --output-folder deps/smplh/ + +python prepare/merge_smplh_mano.py --smplh-fn deps/smplh/tmp/neutral/model.npz --mano-left-fn deps/smplh/tmp/mano_v1_2/models/MANO_LEFT.pkl --mano-right-fn deps/smplh/tmp/mano_v1_2/models/MANO_RIGHT.pkl --output-folder deps/smplh/ + +echo +echo "Done!" +echo +echo "Deleting tmp files" +rm -rf deps/smplh/tmp/ +echo +echo "Done!" diff --git a/render.py b/render.py new file mode 100644 index 0000000..a002e3a --- /dev/null +++ b/render.py @@ -0,0 +1,152 @@ +import os +import random +import shutil +import sys +import natsort +from pathlib import Path +from argparse import ArgumentParser + +try: + import bpy + + sys.path.append(os.path.dirname(bpy.data.filepath)) + + # local packages + sys.path.append(os.path.expanduser("~/.local/lib/python3.9/site-packages")) +except ImportError: + raise ImportError( + "Blender is not properly installed or not launch properly. See README.md to have instruction on how to install and use blender." + ) + + +# Monkey patch argparse such that +# blender / python / hydra parsing works +def parse_args(self, args=None, namespace=None): + if args is not None: + return self.parse_args_bak(args=args, namespace=namespace) + try: + idx = sys.argv.index("--") + args = sys.argv[idx + 1:] # the list after '--' + except ValueError as e: # '--' not in the list: + args = [] + return self.parse_args_bak(args=args, namespace=namespace) + + +setattr(ArgumentParser, 'parse_args_bak', ArgumentParser.parse_args) +setattr(ArgumentParser, 'parse_args', parse_args) + +from mGPT.config import parse_args + + +def render_cli() -> None: + # parse options + cfg = parse_args(phase="render") # parse config file + cfg.FOLDER = cfg.RENDER.FOLDER + + if cfg.RENDER.INPUT_MODE.lower() == "npy": + output_dir = Path(os.path.dirname(cfg.RENDER.NPY)) + paths = [cfg.RENDER.NPY] + elif cfg.RENDER.INPUT_MODE.lower() == "dir": + output_dir = Path(cfg.RENDER.DIR) + paths = [] + file_list = natsort.natsorted(os.listdir(cfg.RENDER.DIR)) + begin_id = random.randrange(0, len(file_list)) + file_list = file_list[begin_id:] + file_list[:begin_id] + + # render mesh npy first + for item in file_list: + if item.endswith("_mesh.npy"): + paths.append(os.path.join(cfg.RENDER.DIR, item)) + + # then render joint npy + for item in file_list: + if item.endswith(".npy") and not item.endswith("_mesh.npy"): + paths.append(os.path.join(cfg.RENDER.DIR, item)) + + print(f"begin to render for {paths[0]}") + + import numpy as np + + from mGPT.render.blender import render + from mGPT.render.video import Video + + init = True + for path in paths: + # check existed mp4 or under rendering + if cfg.RENDER.MODE == "video": + if os.path.exists(path.replace(".npy", ".mp4")) or os.path.exists( + path.replace(".npy", "_frames")): + print(f"npy is rendered or under rendering {path}") + continue + else: + # check existed png + if os.path.exists(path.replace(".npy", ".png")): + print(f"npy is rendered or under rendering {path}") + continue + + if cfg.RENDER.MODE == "video": + frames_folder = os.path.join( + output_dir, + path.replace(".npy", "_frames").split('/')[-1]) + os.makedirs(frames_folder, exist_ok=True) + else: + frames_folder = os.path.join( + output_dir, + path.replace(".npy", ".png").split('/')[-1]) + + try: + data = np.load(path) + if data.shape[0] == 1: + data = data[0] + except FileNotFoundError: + print(f"{path} not found") + continue + + if cfg.RENDER.MODE == "video": + frames_folder = os.path.join( + output_dir, + path.replace(".npy", "_frames").split("/")[-1]) + else: + frames_folder = os.path.join( + output_dir, + path.replace(".npy", ".png").split("/")[-1]) + + out = render( + data, + frames_folder, + canonicalize=cfg.RENDER.CANONICALIZE, + exact_frame=cfg.RENDER.EXACT_FRAME, + num=cfg.RENDER.NUM, + mode=cfg.RENDER.MODE, + model_path=cfg.RENDER.MODEL_PATH, + faces_path=cfg.RENDER.FACES_PATH, + downsample=cfg.RENDER.DOWNSAMPLE, + always_on_floor=cfg.RENDER.ALWAYS_ON_FLOOR, + oldrender=cfg.RENDER.OLDRENDER, + res=cfg.RENDER.RES, + init=init, + gt=cfg.RENDER.GT, + accelerator=cfg.ACCELERATOR, + device=cfg.DEVICE, + ) + + init = False + + if cfg.RENDER.MODE == "video": + shutil.copytree(frames_folder, frames_folder+'_img') + if cfg.RENDER.DOWNSAMPLE: + video = Video(frames_folder, fps=cfg.RENDER.FPS) + else: + video = Video(frames_folder, fps=cfg.RENDER.FPS) + + vid_path = frames_folder.replace("_frames", ".mp4") + video.save(out_path=vid_path) + shutil.rmtree(frames_folder) + print(f"remove tmp fig folder and save video in {vid_path}") + + else: + print(f"Frame generated at: {out}") + + +if __name__ == "__main__": + render_cli() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..f14e147 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,26 @@ +tensorboard +pytorch_lightning +torchmetrics +omegaconf +shortuuid +chumpy +transformers +diffusers +einops +wandb +rich +matplotlib + +# for visualization +smplx==0.1.28 +trimesh==3.9.24 +joblib==1.2.0 +h5py +scikit-image +spacy +ftfy +more-itertools +natsort +pyrender +moviepy +librosa diff --git a/scripts/fbx_output.py b/scripts/fbx_output.py new file mode 100644 index 0000000..8f2cb6b --- /dev/null +++ b/scripts/fbx_output.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de +# +# Author: Joachim Tesch, Max Planck Institute for Intelligent Systems, Perceiving Systems +# +# Create keyframed animated skinned SMPL mesh from .pkl pose description +# +# Generated mesh will be exported in FBX or glTF format +# +# Notes: +# + Male and female gender models only +# + Script can be run from command line or in Blender Editor (Text Editor>Run Script) +# + Command line: Install mathutils module in your bpy virtualenv with 'pip install mathutils==2.81.2' + +import os +import sys +import bpy +import time +import joblib +import argparse +import numpy as np +import addon_utils +from math import radians +from mathutils import Matrix, Vector, Quaternion, Euler + +# Globals +male_model_path = '/apdcephfs/share_1227775/shingxchen/uicap/data/SMPL_unity_v.1.0.0/smpl/Models/SMPL_m_unityDoubleBlends_lbs_10_scale5_207_v1.0.0.fbx' +female_model_path = '/apdcephfs/share_1227775/shingxchen/uicap/data/SMPL_unity_v.1.0.0/smpl/Models/SMPL_f_unityDoubleBlends_lbs_10_scale5_207_v1.0.0.fbx' + +fps_source = 30 +fps_target = 30 + +gender = 'male' + +start_origin = 1 + +bone_name_from_index = { + 0 : 'Pelvis', + 1 : 'L_Hip', + 2 : 'R_Hip', + 3 : 'Spine1', + 4 : 'L_Knee', + 5 : 'R_Knee', + 6 : 'Spine2', + 7 : 'L_Ankle', + 8: 'R_Ankle', + 9: 'Spine3', + 10: 'L_Foot', + 11: 'R_Foot', + 12: 'Neck', + 13: 'L_Collar', + 14: 'R_Collar', + 15: 'Head', + 16: 'L_Shoulder', + 17: 'R_Shoulder', + 18: 'L_Elbow', + 19: 'R_Elbow', + 20: 'L_Wrist', + 21: 'R_Wrist', + 22: 'L_Hand', + 23: 'R_Hand' +} + +# Helper functions + +# Computes rotation matrix through Rodrigues formula as in cv2.Rodrigues +# Source: smpl/plugins/blender/corrective_bpy_sh.py +def Rodrigues(rotvec): + theta = np.linalg.norm(rotvec) + r = (rotvec/theta).reshape(3, 1) if theta > 0. else rotvec + cost = np.cos(theta) + mat = np.asarray([[0, -r[2], r[1]], + [r[2], 0, -r[0]], + [-r[1], r[0], 0]]) + return(cost*np.eye(3) + (1-cost)*r.dot(r.T) + np.sin(theta)*mat) + + +# Setup scene +def setup_scene(model_path, fps_target): + scene = bpy.data.scenes['Scene'] + + ########################### + # Engine independent setup + ########################### + + scene.render.fps = fps_target + + # Remove default cube + if 'Cube' in bpy.data.objects: + bpy.data.objects['Cube'].select_set(True) + bpy.ops.object.delete() + + # Import gender specific .fbx template file + bpy.ops.import_scene.fbx(filepath=model_path) + + +# Process single pose into keyframed bone orientations +def process_pose(current_frame, pose, trans, pelvis_position): + + if pose.shape[0] == 72: + rod_rots = pose.reshape(24, 3) + else: + rod_rots = pose.reshape(26, 3) + + mat_rots = [Rodrigues(rod_rot) for rod_rot in rod_rots] + + # Set the location of the Pelvis bone to the translation parameter + armature = bpy.data.objects['Armature'] + bones = armature.pose.bones + + # Pelvis: X-Right, Y-Up, Z-Forward (Blender -Y) + + # Set absolute pelvis location relative to Pelvis bone head + bones[bone_name_from_index[0]].location = Vector((100*trans[1], 100*trans[2], 100*trans[0])) - pelvis_position + + # bones['Root'].location = Vector(trans) + bones[bone_name_from_index[0]].keyframe_insert('location', frame=current_frame) + + for index, mat_rot in enumerate(mat_rots, 0): + if index >= 24: + continue + + bone = bones[bone_name_from_index[index]] + + bone_rotation = Matrix(mat_rot).to_quaternion() + quat_x_90_cw = Quaternion((1.0, 0.0, 0.0), radians(-90)) + quat_z_90_cw = Quaternion((0.0, 0.0, 1.0), radians(-90)) + + if index == 0: + # Rotate pelvis so that avatar stands upright and looks along negative Y avis + bone.rotation_quaternion = (quat_x_90_cw @ quat_z_90_cw) @ bone_rotation + else: + bone.rotation_quaternion = bone_rotation + + bone.keyframe_insert('rotation_quaternion', frame=current_frame) + + return + + +# Process all the poses from the pose file +def process_poses( + input_path, + gender, + fps_source, + fps_target, + start_origin, + person_id=1, +): + + print('Processing: ' + input_path) + + data = joblib.load(input_path) + person_id = list(data.keys())[0] + poses = data[person_id]['pose'] + if 'trans' not in data[person_id].keys(): + trans = np.zeros((poses.shape[0], 3)) + else: + trans = data[person_id]['trans'] + + if gender == 'female': + model_path = female_model_path + for k,v in bone_name_from_index.items(): + bone_name_from_index[k] = 'f_avg_' + v + elif gender == 'male': + model_path = male_model_path + for k,v in bone_name_from_index.items(): + bone_name_from_index[k] = 'm_avg_' + v + else: + print('ERROR: Unsupported gender: ' + gender) + sys.exit(1) + + # Limit target fps to source fps + if fps_target > fps_source: + fps_target = fps_source + + print(f'Gender: {gender}') + print(f'Number of source poses: {str(poses.shape[0])}') + print(f'Source frames-per-second: {str(fps_source)}') + print(f'Target frames-per-second: {str(fps_target)}') + print('--------------------------------------------------') + + setup_scene(model_path, fps_target) + + scene = bpy.data.scenes['Scene'] + sample_rate = int(fps_source/fps_target) + scene.frame_end = (int)(poses.shape[0]/sample_rate) + + # Retrieve pelvis world position. + # Unit is [cm] due to Armature scaling. + # Need to make copy since reference will change when bone location is modified. + bpy.ops.object.mode_set(mode='EDIT') + pelvis_position = Vector(bpy.data.armatures[0].edit_bones[bone_name_from_index[0]].head) + bpy.ops.object.mode_set(mode='OBJECT') + + source_index = 0 + # source_index = 16 + frame = 1 + + offset = np.array([0.0, 0.0, 0.0]) + + # nframes = 32 + # while source_index < nframes: + while source_index < poses.shape[0]: + print('Adding pose: ' + str(source_index)) + + if start_origin: + if source_index == 0: + offset = np.array([trans[source_index][0], trans[source_index][1], 0]) + + # Go to new frame + scene.frame_set(frame) + + process_pose(frame, poses[source_index], (trans[source_index] - offset), pelvis_position) + source_index += sample_rate + frame += 1 + + return frame + + +def export_animated_mesh(output_path): + # Create output directory if needed + output_dir = os.path.dirname(output_path) + if not os.path.isdir(output_dir): + os.makedirs(output_dir, exist_ok=True) + + # Select only skinned mesh and rig + bpy.ops.object.select_all(action='DESELECT') + bpy.data.objects['Armature'].select_set(True) + bpy.data.objects['Armature'].children[0].select_set(True) + + if output_path.endswith('.glb'): + print('Exporting to glTF binary (.glb)') + # Currently exporting without shape/pose shapes for smaller file sizes + bpy.ops.export_scene.gltf(filepath=output_path, export_format='GLB', export_selected=True, export_morph=False) + elif output_path.endswith('.fbx'): + print('Exporting to FBX binary (.fbx)') + bpy.ops.export_scene.fbx(filepath=output_path, use_selection=True, add_leaf_bones=False) + else: + print('ERROR: Unsupported export format: ' + output_path) + sys.exit(1) + + return + + +if __name__ == '__main__': + try: + if bpy.app.background: + + parser = argparse.ArgumentParser(description='Create keyframed animated skinned SMPL mesh from VIBE output') + parser.add_argument('--input', dest='input_path', type=str, required=True, + help='Input file or directory') + parser.add_argument('--output', dest='output_path', type=str, required=True, + help='Output file or directory') + parser.add_argument('--fps_source', type=int, default=fps_source, + help='Source framerate') + parser.add_argument('--fps_target', type=int, default=fps_target, + help='Target framerate') + parser.add_argument('--gender', type=str, default=gender, + help='Always use specified gender') + parser.add_argument('--start_origin', type=int, default=start_origin, + help='Start animation centered above origin') + parser.add_argument('--person_id', type=int, default=1, + help='Detected person ID to use for fbx animation') + + parser.add_argument('-noaudio', action='store_true', + help='dummy - for blender loading') + parser.add_argument('--background', action='store_true', + help='dummy - for blender loading') + parser.add_argument('--python', type=str, default=gender, + help='dummy - for blender loading') + args = parser.parse_args() + + input_path = args.input_path + output_path = args.output_path + + if not os.path.exists(input_path): + print('ERROR: Invalid input path') + sys.exit(1) + + fps_source = args.fps_source + fps_target = args.fps_target + + gender = args.gender + + start_origin = args.start_origin + + # end if bpy.app.background + + startTime = time.perf_counter() + + # Process data + cwd = os.getcwd() + + # Turn relative input/output paths into absolute paths + if not input_path.startswith(os.path.sep): + input_path = os.path.join(cwd, input_path) + + if not output_path.startswith(os.path.sep): + output_path = os.path.join(cwd, output_path) + + print('Input path: ' + input_path) + print('Output path: ' + output_path) + + if not (output_path.endswith('.fbx') or output_path.endswith('.glb')): + print('ERROR: Invalid output format (must be .fbx or .glb)') + sys.exit(1) + + # Process pose file + if input_path.endswith('.pkl'): + if not os.path.isfile(input_path): + print('ERROR: Invalid input file') + sys.exit(1) + + poses_processed = process_poses( + input_path=input_path, + gender=gender, + fps_source=fps_source, + fps_target=fps_target, + start_origin=start_origin, + person_id=args.person_id + ) + export_animated_mesh(output_path) + + print('--------------------------------------------------') + print('Animation export finished.') + print(f'Poses processed: {str(poses_processed)}') + print(f'Processing time : {time.perf_counter() - startTime:.2f} s') + print('--------------------------------------------------') + sys.exit(0) + + except SystemExit as ex: + if ex.code is None: + exit_status = 0 + else: + exit_status = ex.code + + print('Exiting. Exit status: ' + str(exit_status)) + + # Only exit to OS when we are not running in Blender GUI + if bpy.app.background: + sys.exit(exit_status) \ No newline at end of file diff --git a/scripts/fbx_output_smplx.py b/scripts/fbx_output_smplx.py new file mode 100644 index 0000000..b69fddd --- /dev/null +++ b/scripts/fbx_output_smplx.py @@ -0,0 +1,427 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de +# +# Author: Joachim Tesch, Max Planck Institute for Intelligent Systems, Perceiving Systems +# +# Create keyframed animated skinned SMPL mesh from .pkl pose description +# +# Generated mesh will be exported in FBX or glTF format +# +# Notes: +# + Male and female gender models only +# + Script can be run from command line or in Blender Editor (Text Editor>Run Script) +# + Command line: Install mathutils module in your bpy virtualenv with 'pip install mathutils==2.81.2' + +import os +import sys +import bpy +import time +import joblib +import argparse +import numpy as np +import addon_utils +from math import radians +from mathutils import Matrix, Vector, Quaternion, Euler + +# Globals +neural_smplx_path = '/apdcephfs/share_1227775/shingxchen/uicap/data/smplx/Models/smplx-neutral.fbx' +male_model_path = '/apdcephfs/share_1227775/shingxchen/uicap/data/SMPL_unity_v.1.0.0/smpl/Models/SMPL_m_unityDoubleBlends_lbs_10_scale5_207_v1.0.0.fbx' +female_model_path = '/apdcephfs/share_1227775/shingxchen/uicap/data/SMPL_unity_v.1.0.0/smpl/Models/SMPL_f_unityDoubleBlends_lbs_10_scale5_207_v1.0.0.fbx' + +# bone root name of fbx blender +ROOT_NAME = 'SMPLX-neutral' +# ROOT_NAME = 'Armature' + +fps_source = 30 +fps_target = 30 + +gender = 'male' + +start_origin = 1 + +BODY_JOINT_NAMES = [ + 'pelvis', + 'left_hip', + 'right_hip', + 'spine1', + 'left_knee', + 'right_knee', + 'spine2', + 'left_ankle', + 'right_ankle', + 'spine3', + 'left_foot', + 'right_foot', + 'neck', + 'left_collar', + 'right_collar', + 'head', + 'left_shoulder', + 'right_shoulder', + 'left_elbow', + 'right_elbow', + 'left_wrist', + 'right_wrist', + 'left_middle3',# faker hand + 'right_middle3',# faker hand +] + +LHAND_JOINT_NAMES = [ + 'left_index1', + 'left_index2', + 'left_index3', + 'left_middle1', + 'left_middle2', + 'left_middle3', + 'left_pinky1', + 'left_pinky2', + 'left_pinky3', + 'left_ring1', + 'left_ring2', + 'left_ring3', + 'left_thumb1', + 'left_thumb2', + 'left_thumb3', +] + +RHAND_JOINT_NAMES = [ + 'right_index1', + 'right_index2', + 'right_index3', + 'right_middle1', + 'right_middle2', + 'right_middle3', + 'right_pinky1', + 'right_pinky2', + 'right_pinky3', + 'right_ring1', + 'right_ring2', + 'right_ring3', + 'right_thumb1', + 'right_thumb2', + 'right_thumb3', +] +# Helper functions + +# Computes rotation matrix through Rodrigues formula as in cv2.Rodrigues +# Source: smpl/plugins/blender/corrective_bpy_sh.py +def Rodrigues(rotvec): + theta = np.linalg.norm(rotvec) + r = (rotvec/theta).reshape(3, 1) if theta > 0. else rotvec + cost = np.cos(theta) + mat = np.asarray([[0, -r[2], r[1]], + [r[2], 0, -r[0]], + [-r[1], r[0], 0]]) + return(cost*np.eye(3) + (1-cost)*r.dot(r.T) + np.sin(theta)*mat) + + +# Setup scene +def setup_scene(model_path, fps_target): + scene = bpy.data.scenes['Scene'] + + ########################### + # Engine independent setup + ########################### + + scene.render.fps = fps_target + + # Remove default cube + if 'Cube' in bpy.data.objects: + bpy.data.objects['Cube'].select_set(True) + bpy.ops.object.delete() + + # Import gender specific .fbx template file + bpy.ops.import_scene.fbx(filepath=model_path) + + +# Process single pose into keyframed bone orientations +def process_pose(current_frame, pose, lhandpose, rhandpose, trans, pelvis_position): + + rod_rots = pose.reshape(24, 4) + lhrod_rots = lhandpose.reshape(15, 4) + rhrod_rots = rhandpose.reshape(15, 4) + + # rod_rots = pose.reshape(24, 3) + # lhrod_rots = lhandpose.reshape(15, 3) + # rhrod_rots = rhandpose.reshape(15, 3) + + # mat_rots = [Rodrigues(rod_rot) for rod_rot in rod_rots] + # lhmat_rots = [Rodrigues(rod_rot) for rod_rot in rod_rots] + # rhmat_rots = [Rodrigues(rod_rot) for rod_rot in rod_rots] + + # Set the location of the Pelvis bone to the translation parameter + armature = bpy.data.objects[ROOT_NAME] + bones = armature.pose.bones + + # Pelvis: X-Right, Y-Up, Z-Forward (Blender -Y) + + # Set absolute pelvis location relative to Pelvis bone head + bones[BODY_JOINT_NAMES[0]].location = Vector((100*trans[1], 100*trans[2], 100*trans[0])) - pelvis_position + + # bones['Root'].location = Vector(trans) + bones[BODY_JOINT_NAMES[0]].keyframe_insert('location', frame=current_frame) + + for index, mat_rot in enumerate(rod_rots, 0): + if index >= 24: + continue + + bone = bones[BODY_JOINT_NAMES[index]] + + # bone_rotation = Matrix(mat_rot).to_quaternion() + bone_rotation = Quaternion(mat_rot) + quat_x_90_cw = Quaternion((1.0, 0.0, 0.0), radians(-90)) + quat_z_90_cw = Quaternion((0.0, 0.0, 1.0), radians(-90)) + + if index == 0: + # Rotate pelvis so that avatar stands upright and looks along negative Y avis + bone.rotation_quaternion = (quat_x_90_cw @ quat_z_90_cw) @ bone_rotation + else: + bone.rotation_quaternion = bone_rotation + + bone.keyframe_insert('rotation_quaternion', frame=current_frame) + + for index, mat_rot in enumerate(lhrod_rots, 0): + if index >= 15: + continue + bone = bones[LHAND_JOINT_NAMES[index]] + bone_rotation = Quaternion(mat_rot) + bone.rotation_quaternion = bone_rotation + bone.keyframe_insert('rotation_quaternion', frame=current_frame) + + for index, mat_rot in enumerate(rhrod_rots, 0): + if index >= 15: + continue + bone = bones[RHAND_JOINT_NAMES[index]] + bone_rotation = Quaternion(mat_rot) + bone.rotation_quaternion = bone_rotation + bone.keyframe_insert('rotation_quaternion', frame=current_frame) + + + return + + +# Process all the poses from the pose file +def process_poses( + input_path, + gender, + fps_source, + fps_target, + start_origin, + person_id=1, +): + + print('Processing: ' + input_path) + + smpl_params = joblib.load(input_path) + poses, lhposes, rhposes = [], [], [] + for iframe in smpl_params.keys(): + poses.append(smpl_params[iframe]['rot']) + lhposes.append(smpl_params[iframe]['hand_quaternions'][4:64].copy().reshape(-1, 4)) + rhposes.append(smpl_params[iframe]['hand_quaternions'][68:128].copy().reshape(-1, 4)) + poses = np.vstack(poses) + lhposes = np.stack(lhposes) + rhposes = np.stack(rhposes) + + trans = np.zeros((poses.shape[0], 3)) + # if 'trans' not in data[person_id].keys(): + # trans = np.zeros((poses.shape[0], 3)) + # else: + # trans = data[person_id]['trans'] + + model_path = neural_smplx_path + # if gender == 'female': + # model_path = female_model_path + # for k,v in BODY_JOINT_NAMES.items(): + # BODY_JOINT_NAMES[k] = 'f_avg_' + v + # elif gender == 'male': + # model_path = male_model_path + # for k,v in BODY_JOINT_NAMES.items(): + # BODY_JOINT_NAMES[k] = 'm_avg_' + v + # else: + # print('ERROR: Unsupported gender: ' + gender) + # sys.exit(1) + + # Limit target fps to source fps + if fps_target > fps_source: + fps_target = fps_source + + print(f'Gender: {gender}') + print(f'Number of source poses: {str(poses.shape[0])}') + print(f'Source frames-per-second: {str(fps_source)}') + print(f'Target frames-per-second: {str(fps_target)}') + print('--------------------------------------------------') + + setup_scene(model_path, fps_target) + + scene = bpy.data.scenes['Scene'] + sample_rate = int(fps_source/fps_target) + scene.frame_end = (int)(poses.shape[0]/sample_rate) + + # Retrieve pelvis world position. + # Unit is [cm] due to Armature scaling. + # Need to make copy since reference will change when bone location is modified. + bpy.ops.object.mode_set(mode='EDIT') + pelvis_position = Vector(bpy.data.armatures[0].edit_bones[BODY_JOINT_NAMES[0]].head) + bpy.ops.object.mode_set(mode='OBJECT') + + source_index = 0 + frame = 1 + + offset = np.array([0.0, 0.0, 0.0]) + + while source_index < poses.shape[0]: + # print('Adding pose: ' + str(source_index)) + if start_origin: + if source_index == 0: + offset = np.array([trans[source_index][0], trans[source_index][1], 0]) + + # Go to new frame + scene.frame_set(frame) + + process_pose(frame, + poses[source_index], + lhposes[source_index], + rhposes[source_index], + (trans[source_index] - offset), + pelvis_position) + source_index += sample_rate + frame += 1 + + return frame + + +def export_animated_mesh(output_path): + # Create output directory if needed + output_dir = os.path.dirname(output_path) + if not os.path.isdir(output_dir): + os.makedirs(output_dir, exist_ok=True) + + # Select only skinned mesh and rig + bpy.ops.object.select_all(action='DESELECT') + bpy.data.objects[ROOT_NAME].select_set(True) + bpy.data.objects[ROOT_NAME].children[0].select_set(True) + + if output_path.endswith('.glb'): + print('Exporting to glTF binary (.glb)') + # Currently exporting without shape/pose shapes for smaller file sizes + bpy.ops.export_scene.gltf(filepath=output_path, export_format='GLB', export_selected=True, export_morph=False) + elif output_path.endswith('.fbx'): + print('Exporting to FBX binary (.fbx)') + bpy.ops.export_scene.fbx(filepath=output_path, use_selection=True, add_leaf_bones=False) + else: + print('ERROR: Unsupported export format: ' + output_path) + sys.exit(1) + + return + + +if __name__ == '__main__': + try: + if bpy.app.background: + + parser = argparse.ArgumentParser(description='Create keyframed animated skinned SMPL mesh from VIBE output') + parser.add_argument('--input', dest='input_path', type=str, required=True, + help='Input file or directory') + parser.add_argument('--output', dest='output_path', type=str, required=True, + help='Output file or directory') + parser.add_argument('--fps_source', type=int, default=fps_source, + help='Source framerate') + parser.add_argument('--fps_target', type=int, default=fps_target, + help='Target framerate') + parser.add_argument('--gender', type=str, default=gender, + help='Always use specified gender') + parser.add_argument('--start_origin', type=int, default=start_origin, + help='Start animation centered above origin') + parser.add_argument('--person_id', type=int, default=1, + help='Detected person ID to use for fbx animation') + + parser.add_argument('-noaudio', action='store_true', + help='dummy - for blender loading') + parser.add_argument('--background', action='store_true', + help='dummy - for blender loading') + parser.add_argument('--python', type=str, default=gender, + help='dummy - for blender loading') + args = parser.parse_args() + + input_path = args.input_path + output_path = args.output_path + + if not os.path.exists(input_path): + print('ERROR: Invalid input path') + sys.exit(1) + + fps_source = args.fps_source + fps_target = args.fps_target + + gender = args.gender + + start_origin = args.start_origin + + # end if bpy.app.background + + startTime = time.perf_counter() + + # Process data + cwd = os.getcwd() + + # Turn relative input/output paths into absolute paths + if not input_path.startswith(os.path.sep): + input_path = os.path.join(cwd, input_path) + + if not output_path.startswith(os.path.sep): + output_path = os.path.join(cwd, output_path) + + print('Input path: ' + input_path) + print('Output path: ' + output_path) + + if not (output_path.endswith('.fbx') or output_path.endswith('.glb')): + print('ERROR: Invalid output format (must be .fbx or .glb)') + sys.exit(1) + + # Process pose file + if input_path.endswith('.pkl'): + if not os.path.isfile(input_path): + print('ERROR: Invalid input file') + sys.exit(1) + + poses_processed = process_poses( + input_path=input_path, + gender=gender, + fps_source=fps_source, + fps_target=fps_target, + start_origin=start_origin, + person_id=args.person_id + ) + export_animated_mesh(output_path) + + print('--------------------------------------------------') + print('Animation export finished.') + print(f'Poses processed: {str(poses_processed)}') + print(f'Processing time : {time.perf_counter() - startTime:.2f} s') + print('--------------------------------------------------') + sys.exit(0) + + except SystemExit as ex: + if ex.code is None: + exit_status = 0 + else: + exit_status = ex.code + + print('Exiting. Exit status: ' + str(exit_status)) + + # Only exit to OS when we are not running in Blender GUI + if bpy.app.background: + sys.exit(exit_status) \ No newline at end of file diff --git a/scripts/fit_motion.sh b/scripts/fit_motion.sh new file mode 100644 index 0000000..ef0593f --- /dev/null +++ b/scripts/fit_motion.sh @@ -0,0 +1 @@ +python -m fit --dir $1 --save_folder $2 --cuda True \ No newline at end of file diff --git a/scripts/fit_motion_parallel.sh b/scripts/fit_motion_parallel.sh new file mode 100644 index 0000000..e18e7ab --- /dev/null +++ b/scripts/fit_motion_parallel.sh @@ -0,0 +1,31 @@ +# parallel render +for i in `seq 0 4` +do + for j in `seq 0 1` + do + CUDA_VISIBLE_DEVICES=$i python -m fit --dir $1 --save_folder $2 --cuda True & + echo $j & + done +done + +wait +echo "all weakup" + +# # parallel render +# for i in `seq 0 25` +# do +# CUDA_VISIBLE_DEVICES=$3 python -m fit --dir $1 --save_folder $2 --cuda True & +# echo $i +# done +# wait +# echo "all weakup" + + +# # gpu parallel render +# for i in `seq 0 7` +# do +# CUDA_VISIBLE_DEVICES=$i python -m fit --dir $1 --save_folder $2 --cuda True & +# echo $i +# done +# wait +# echo "all weakup" diff --git a/scripts/get_motion_code.py b/scripts/get_motion_code.py new file mode 100644 index 0000000..ac27bda --- /dev/null +++ b/scripts/get_motion_code.py @@ -0,0 +1,70 @@ +import os +import numpy as np +import pytorch_lightning as pl +import torch +from pathlib import Path +from tqdm import tqdm +from mGPT.config import parse_args +from mGPT.data.build_data import build_data +from mGPT.models.build_model import build_model + + +def main(): + # parse options + cfg = parse_args(phase="test") # parse config file + cfg.TRAIN.STAGE = "token" + cfg.TRAIN.BATCH_SIZE = 1 + + # set seed + pl.seed_everything(cfg.SEED_VALUE) + + # gpu setting + if cfg.ACCELERATOR == "gpu": + os.environ["PYTHONWARNINGS"] = "ignore" + os.environ["TOKENIZERS_PARALLELISM"] = "false" + + # create dataset + datasets = build_data(cfg, phase='token') + print("datasets module initialized") + output_dir = os.path.join(datasets.hparams.data_root, cfg.DATASET.CODE_PATH) + + os.makedirs(output_dir, exist_ok=True) + + # create model + model = build_model(cfg, datasets) + if hasattr(model, "motion_vae"): + model.vae = model.motion_vae + print("model loaded") + + # Strict load vae model + assert cfg.TRAIN.PRETRAINED_VAE is not None + state_dict = torch.load(cfg.TRAIN.PRETRAINED_VAE, + map_location="cpu")['state_dict'] + print(f"Loading pretrain vae from {cfg.TRAIN.PRETRAINED_VAE}") + + if cfg.ACCELERATOR == "gpu": + model = model.to('cuda') + + for batch in tqdm(datasets.train_dataloader(), + desc=f'motion tokenize'): + name = batch['text'] + + pose = batch['motion'] + pose = pose.cuda().float() + + if pose.shape[1] == 0: + continue + target, _ = model.vae.encode(pose) + target = target.to('cpu').numpy() + + target_path = os.path.join(output_dir, name[0] + '.npy') + Path(target_path).parent.mkdir(parents=True, exist_ok=True) + np.save(target_path, target) + + print( + f'Motion tokenization done, the motion tokens are saved to {output_dir}' + ) + + +if __name__ == "__main__": + main() diff --git a/scripts/plys2npy.py b/scripts/plys2npy.py new file mode 100644 index 0000000..923afe6 --- /dev/null +++ b/scripts/plys2npy.py @@ -0,0 +1,56 @@ +import os +import time +from argparse import ArgumentParser +from pathlib import Path + +import natsort +import numpy as np +import torch +import trimesh +from tqdm import tqdm + + +def main(): + parser = ArgumentParser() + + group = parser.add_argument_group("Params") + group.add_argument( + "--ply_dir", + type=str, + required=True, + help="ply set", + ) + group.add_argument( + "--out_dir", + type=str, + required=True, + help="output folder", + ) + params = parser.parse_args() + plys2npy(params.ply_dir, params.out_dir) + +def plys2npy(ply_dir, out_dir): + ply_dir = Path(ply_dir) + paths = [] + file_list = natsort.natsorted(os.listdir(ply_dir)) + for item in file_list: + if item.endswith(".ply") and not item.endswith("_gt.ply"): + paths.append(os.path.join(ply_dir, item)) + + + meshs = np.zeros((len(paths), 6890, 3)) + for i, path in enumerate(paths): + mesh = trimesh.load_mesh(path, process=False) + vs = mesh.vertices + assert vs.shape == (6890, 3) + meshs[i] = vs + + basename = os.path.basename(ply_dir) + if basename.startswith("SMPLFit_"): + basename = basename[len("SMPLFit_"):] + file_name = os.path.join(out_dir, basename+ "_mesh.npy") + np.save(file_name, meshs) + + +if __name__ == "__main__": + main() diff --git a/scripts/visualize_motion.sh b/scripts/visualize_motion.sh new file mode 100644 index 0000000..6259a53 --- /dev/null +++ b/scripts/visualize_motion.sh @@ -0,0 +1,10 @@ +# for npy folder +# CUDA_VISIBLE_DEVICES=0 /apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender --background --python render.py -- --cfg=./configs/render.yaml --dir=$1 --mode=$2 + +for j in `seq 0 2` +do + CUDA_VISIBLE_DEVICES=0 /apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender --background --python render.py -- --dir=$1 --mode=$2 +done + +# for single npy +# /apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender --background --python render.py -- --cfg=./configs/render_cx.yaml --npy=$1 --joint_type=HumanML3D diff --git a/scripts/visualize_motion_parallel.sh b/scripts/visualize_motion_parallel.sh new file mode 100644 index 0000000..e333ae6 --- /dev/null +++ b/scripts/visualize_motion_parallel.sh @@ -0,0 +1,25 @@ +# # parallel fit +# for i in `seq 0 7` +# do +# for j in `seq 0 2` +# do +# CUDA_VISIBLE_DEVICES=$i python -m fit --dir $1 --save_folder $2 --cuda True & +# echo $j & +# done +# done +# wait +# echo "all weakup" + + +# parallel render +for i in `seq 0 7` +do + for j in `seq 0 2` + do + sleep 1 & + CUDA_VISIBLE_DEVICES=$i /apdcephfs/share_1227775/shingxchen/libs/blender_bpy/blender-2.93.2-linux-x64/blender --background --python render.py -- --dir=$1 --mode=$2 & + echo $i + done +done +wait +echo "all weakup" diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..36c4e9d --- /dev/null +++ b/setup.py @@ -0,0 +1,20 @@ +from setuptools import setup, find_packages +from torch.utils.cpp_extension import BuildExtension, CUDAExtension +from distutils.extension import Extension +from Cython.Build import cythonize +import numpy as np + +setup( + name="MotionGPT", + version="0.1.0", + author="Biao Jiang and Xin Chen", + author_email="jiangb22@m.fudan.edu.cn", + description="MotionGPT: Human motion as a foreign language.", + packages=find_packages(exclude=("configs", "deps")), + python_requires=">=3.8", + install_requires=[ + "torch", + "numpy", + "tqdm", + ], +) diff --git a/test.py b/test.py new file mode 100644 index 0000000..bb0c5ea --- /dev/null +++ b/test.py @@ -0,0 +1,142 @@ +import json +import os +import numpy as np +import pytorch_lightning as pl +import torch +from pathlib import Path +from rich import get_console +from rich.table import Table +from omegaconf import OmegaConf +from mGPT.callback import build_callbacks +from mGPT.config import parse_args +from mGPT.data.build_data import build_data +from mGPT.models.build_model import build_model +from mGPT.utils.logger import create_logger +from mGPT.utils.load_checkpoint import load_pretrained, load_pretrained_vae + + +def print_table(title, metrics, logger=None): + table = Table(title=title) + + table.add_column("Metrics", style="cyan", no_wrap=True) + table.add_column("Value", style="magenta") + + for key, value in metrics.items(): + table.add_row(key, str(value)) + + console = get_console() + console.print(table, justify="center") + + logger.info(metrics) if logger else None + + +def get_metric_statistics(values, replication_times): + mean = np.mean(values, axis=0) + std = np.std(values, axis=0) + conf_interval = 1.96 * std / np.sqrt(replication_times) + return mean, conf_interval + + +def main(): + # parse options + cfg = parse_args(phase="test") # parse config file + cfg.FOLDER = cfg.TEST.FOLDER + + # Logger + logger = create_logger(cfg, phase="test") + logger.info(OmegaConf.to_yaml(cfg)) + + # Output dir + model_name = cfg.model.target.split('.')[-2].lower() + output_dir = Path( + os.path.join(cfg.FOLDER, model_name, cfg.NAME, "samples_" + cfg.TIME)) + if cfg.TEST.SAVE_PREDICTIONS: + output_dir.mkdir(parents=True, exist_ok=True) + logger.info(f"Saving predictions to {str(output_dir)}") + + # Seed + pl.seed_everything(cfg.SEED_VALUE) + + # Environment Variables + os.environ["TOKENIZERS_PARALLELISM"] = "false" + + # Callbacks + callbacks = build_callbacks(cfg, logger=logger, phase="test") + logger.info("Callbacks initialized") + + # Dataset + datamodule = build_data(cfg) + logger.info("datasets module {} initialized".format("".join( + cfg.DATASET.target.split('.')[-2]))) + + # Model + model = build_model(cfg, datamodule) + logger.info("model {} loaded".format(cfg.model.target)) + + # Lightning Trainer + trainer = pl.Trainer( + benchmark=False, + max_epochs=cfg.TRAIN.END_EPOCH, + accelerator=cfg.ACCELERATOR, + devices=list(range(len(cfg.DEVICE))), + default_root_dir=cfg.FOLDER_EXP, + reload_dataloaders_every_n_epochs=1, + deterministic=False, + detect_anomaly=False, + enable_progress_bar=True, + logger=None, + callbacks=callbacks, + ) + + # Strict load vae model + if cfg.TRAIN.PRETRAINED_VAE: + load_pretrained_vae(cfg, model, logger) + + # loading state dict + if cfg.TEST.CHECKPOINTS: + load_pretrained(cfg, model, logger, phase="test") + else: + logger.warning("No checkpoints provided!!!") + + # Calculate metrics + all_metrics = {} + replication_times = cfg.TEST.REPLICATION_TIMES + + for i in range(replication_times): + metrics_type = ", ".join(cfg.METRIC.TYPE) + logger.info(f"Evaluating {metrics_type} - Replication {i}") + metrics = trainer.test(model, datamodule=datamodule)[0] + if "TM2TMetrics" in metrics_type and cfg.model.params.task == "t2m" and cfg.model.params.stage != 'vae': + # mm meteics + logger.info(f"Evaluating MultiModality - Replication {i}") + datamodule.mm_mode(True) + mm_metrics = trainer.test(model, datamodule=datamodule)[0] + # metrics.update(mm_metrics) + metrics.update(mm_metrics) + datamodule.mm_mode(False) + for key, item in metrics.items(): + if key not in all_metrics: + all_metrics[key] = [item] + else: + all_metrics[key] += [item] + + all_metrics_new = {} + + for key, item in all_metrics.items(): + mean, conf_interval = get_metric_statistics(np.array(item), + replication_times) + all_metrics_new[key + "/mean"] = mean + all_metrics_new[key + "/conf_interval"] = conf_interval + + print_table(f"Mean Metrics", all_metrics_new, logger=logger) + all_metrics_new.update(all_metrics) + + # Save metrics to file + metric_file = output_dir.parent / f"metrics_{cfg.TIME}.json" + with open(metric_file, "w", encoding="utf-8") as f: + json.dump(all_metrics_new, f, indent=4) + logger.info(f"Testing done, the metrics are saved to {str(metric_file)}") + + +if __name__ == "__main__": + main() diff --git a/train.py b/train.py new file mode 100644 index 0000000..b50d20a --- /dev/null +++ b/train.py @@ -0,0 +1,94 @@ +import os +import glob +import torch +import pytorch_lightning as pl +from omegaconf import OmegaConf +from mGPT.callback import build_callbacks +from mGPT.config import parse_args, instantiate_from_config +from mGPT.data.build_data import build_data +from mGPT.models.build_model import build_model +from mGPT.utils.logger import create_logger +from mGPT.utils.load_checkpoint import load_pretrained, load_pretrained_vae + +def main(): + # Configs + cfg = parse_args(phase="train") # parse config file + + # Logger + logger = create_logger(cfg, phase="train") # create logger + logger.info(OmegaConf.to_yaml(cfg)) # print config file + + # Seed + pl.seed_everything(cfg.SEED_VALUE) + + # Environment Variables + os.environ["TOKENIZERS_PARALLELISM"] = "false" + + # Metric Logger + pl_loggers = [] + for loggerName in cfg.LOGGER.TYPE: + if loggerName == 'tenosrboard' or cfg.LOGGER.WANDB.params.project: + pl_logger = instantiate_from_config( + eval(f'cfg.LOGGER.{loggerName.upper()}')) + pl_loggers.append(pl_logger) + + # Callbacks + callbacks = build_callbacks(cfg, logger=logger, phase='train') + logger.info("Callbacks initialized") + + # Dataset + datamodule = build_data(cfg) + logger.info("datasets module {} initialized".format("".join( + cfg.DATASET.target.split('.')[-2]))) + + # Model + model = build_model(cfg, datamodule) + logger.info("model {} loaded".format(cfg.model.target)) + + # Lightning Trainer + trainer = pl.Trainer( + default_root_dir=cfg.FOLDER_EXP, + max_epochs=cfg.TRAIN.END_EPOCH, + # precision='16', + logger=pl_loggers, + callbacks=callbacks, + check_val_every_n_epoch=cfg.LOGGER.VAL_EVERY_STEPS, + accelerator=cfg.ACCELERATOR, + devices=cfg.DEVICE, + num_nodes=cfg.NUM_NODES, + strategy="ddp_find_unused_parameters_true" + if len(cfg.DEVICE) > 1 else 'auto', + benchmark=False, + deterministic=False, + ) + logger.info("Trainer initialized") + + # Strict load pretrianed model + if cfg.TRAIN.PRETRAINED: + load_pretrained(cfg, model, logger) + + # Strict load vae model + if cfg.TRAIN.PRETRAINED_VAE: + load_pretrained_vae(cfg, model, logger) + + # Pytorch 2.0 Compile + # if torch.__version__ >= "2.0.0": + # model = torch.compile(model, mode="reduce-overhead") + # model = torch.compile(model) + + # Lightning Fitting + if cfg.TRAIN.RESUME: + trainer.fit(model, + datamodule=datamodule, + ckpt_path=cfg.TRAIN.PRETRAINED) + else: + trainer.fit(model, datamodule=datamodule) + + # Training ends + logger.info( + f"The outputs of this experiment are stored in {cfg.FOLDER_EXP}") + logger.info("Training ends!") + + +if __name__ == "__main__": + main()

    ogTgP<|_P$`10K1)KmJ5r|@02N;W&vp9xFh5`gT!%QhcUSnS$ zh%sMr@t8gNr}apKU{MF4p`GtTZQC7BI{1^GIfHwUkSnJV{1x@>Zw28*5(Q^Me6XFPA|%8nj^IjnH(U zx7D9{Lc=*w(&s7S@nkPgWabXHoXdMh_Ga6j{#t?NDYU8rE&?TGbyr5QrA(MPaj1~m z)ZAp^(RpwI+#x=)mGblIpG^a+VBFfa?B~7oha(X*TZjfYKh3fHaT$%h@$*?f+VIzp ze)6U3t+a?HrfkJp$_fHEB1d@T;RnawrJMo$-q|ax3mM}Lz!`vG!;te z9w7)yN&sQDLc4~Go6SiX>r}1d;TjK6VJ$R%^i@m?>}YuE=38k#aqKNJ~-fl44J9eN9&nIY#60F zJet!3xrbHpGxtk$RMo4wLKNk{>_4hZV>cts@B64C7ssn-DFA4IS;$k{5@1@;37~BJ zgm3B>QO~i7qrXGi2)j5Bj1%0|MK{5Es81cvPAC21$0saTh_=|?I%Bj)9fQtlj>2{5 z_*)?9-wd>pX7?NbPiQ_HOS!)k*2_V9PgF9n5I{Y>^z@Y2B>%;CBI32wSKCC$a6&?Q+9uVCzkl5r>?<)D4<3*)i-!ruE7p6v6>_yqpgA+MC$Hk9K%Oz6(e_6rVzae z|A|M@;U}ahkSiFDWnrGf+7^!5EjLd(oqFx;nW$yIvI0iLZ!WqqFD;lfO_xi9PNUWo z=y*v1coS3L!8ePOu0XPyh@aGDQB?)F}li`XZb=bPy^D+<{JN;D0;PO72g^?b~R9gmHei;V3koA z*b3QI%)L0w^uFl|GRYuT;v*&i0Xl{>%HROgbSF<;QD!eCMfDg9+^^rh0^wz=p8 z(tBTMC`gON)1)ToTK5$4k$6x!msifB5w?oI?;i^@Gd(6?DJDO;h4J~ij7^m~#t2r> zRpuLN;8$m!)zmA6qd{GrQkA0vo|60O&RA zOO^J=btnPQm_#;$xolNPYe_^lY+NRXh4E0gAqMG1gA*=f4w08uvj$}76_i1MJ2wqqK1H88E3r;&}jAic~$w=7D+*uhJ?5eCMT$L<2C&HBZgVUuNJ!uJG1fpk7K8b+eg`LXR!} zzFhq3T~gq_NT?Uw=>N=7_aMvdLhJxTb}=Nt{csP(N@pi#M)0i~V7iMzLg23{p0XT~ zaMv>Tx9}|dFhh`cw#uE-iT}mDP)hQH#2NY^H!w)@Bj|l*SB9_TehK-KYEFu&S7nC^ z&ukKy1}{r7Xlzr}`*4h+po9gMh@<*ozkQLXT3l5QYxT=`qS`7P_h0Wr0i14euu34= zM^P>pprN)VtXT?G(W~x;p00dM0HkoH5(W&(-aY9%4FL@oVX6#tC=xHZt5Hhe5&*UJ zvJnpwwe{O!h0^l>|RX4!m6W zjjUc9`QXC}SctuA!z@qXL`6$AAB9~^d4V$+!e;B>!=h~3@i^}j1RWg5!t8e}GeN z3-f=TwOB+$!3y?KZFON@dtGdF&eo(()oViF{s|3@6}`sgy#7Rmc}X!n4!2tx(x;{* zedBv6P6u@=_IIkD#J|yxQF=|f@OUW7Yh0Asg^K;D3??ngj{(uuXdc|s$U-FvcvLI+ zVH!z*IdZC!#q=3B(A6Dezs zikX@RzEcEhxP?zzH|h`tIaH&+f*iXFU@Nz%9W<7+lxhZ(?wJvH0{$0PNEEO0&1A!1Q)ci2Oe8p#+|3_8;4*bcD)OJ4zEKtnt z>VsgkaNMu-CT>Ns&i-S#m-cx~%tokvemQ)K9yQySjBsZ8rByAZ>sz7LlZKR=0*L6C zIMo$f7DJmXf)3Flg_;!k2o4UM&}lin6t7GgGWOVo9aZ?ZaU_@zUM>ZJDcW@Z#NX5& zq1`IKXShaDP(lLBL{ap(cR*ojO@#@oTWH8TvpAAzOEb#fT7T&(GvR0RIdri`oMV>k zx=IHn*uok_&3B7_C*L|WjN)kT3>n(0FRWzB1oDi0jmJ=;eMzJr;|2U4XCf8vb;p9KYJ zvx>1FRW@-?X#k|xyE`jq#*#ukw_Du=U%!u{s5HhiISPA5TN|FvGqCoRHi$WC}Blz_#~i}nyVU``qwqsss7YXbVkVp($_qZh%(lsmp&`R{Do z0?}3zCAII{NALasVzV5|&#S+_mM`Ira}D@qs!?3U2nc@qW2ru*96kZgoiE(6!oW#d zbr1lZ%U>BlL_;kc{sH|Ns?5G5nj4h+I}pW^O$;>5k4-N|7V4)Lh1a=dWx>2{-<067 zr&f@C=ec%E#q4w!+%655p=c>3j<8?!^FC$N_9`7?6!o{TH2R-3M@ochgoW}1l&Bc) zKUqa$Kxnt0^+v#$0O+tVq^OEE7tdtIxTOdc6gd@1NKODpr?MoIvVg&Z<`s(YTdq|F zWH{(}02uLiC>83VVi`at{liB(fmp1A4^_*XVq)fdFWV3=1)7aUT4NZu%X{I$d1dX` zwurHJ{>cntiR$$9G-L5UmGacZXb^|F=(}2$5-*n5zZuG_66+`5At3h#>r8!=4?X0} zCXLM-_SD``$H{$i>&q8Vo>tCJo6I_dgHHgj`&VdPh}=>ZbQ^|I$vVoGBzmz=dxDH( zzIt@RfIMRT2Fd#ldF=oxtBL{o>~YPq!>Q{$tC&3}AotX;5hN6Ar)2DMtchV)D~Bsc z3?!l5-p#5Vp*hCrmg(91NXjj*7#@ z!7QS7%Fru<+cIpP0Jo}n;$fK_1Ljdl9uu{#pc6f2u0@D6xfVp7X?MGCLiL)2{Lh7? zHYT^oO9cIC{ud`|UV3w}e$TJ#uhv+nEsff}9^x91Z&#oK0H4*}RBfI~yv%@z1?E4u zT!$jO#No#NKNtP;Mf)h*2J>qi$0yHY=j~tr_w%u1YWkakB0=$X3wT(HlDW;)55^Me z@nKenXt)}nzji3JSzq`kKm8m|zn~|skiPNPO7cfviWM9ga^+H*)|UI12W~|d&W-&C zsU}XegFs7(s{R2g#hmb6-KOQ8?KfYaOUFq+MfK|+P>!qeS6>?AsZzIzF)}?tltl#m z$v(c7fz72l-aOba96Dvb)A@JQv6nYG7YIO{GY;~reoSYo#dT7*Bu%!Q3{5$;8pU#crX$Vj zLia6!15X(z zhDMGHnOdAe=5HP2YN+p3n|1saV2FPg`WGCVL0zKbYE39oj)0=4IDAMVl&|$ZG9=zD zqA$QToic&lC474Tz4s194LOBKP26?aIs-~t^axY`O2;uH&FS0~$8^LzPJ;-tlq=3O zZU~F4vE7}eB=x%6Iey@aw6J;}eh;IH#MfDaK~w&yv^+n zolS2yo2U~Ecx&I0RY1QuTkx^zEhYFFs~cKK&F!f8F-SXwj#S=tgSp7MPGq1*`#1mV znnIo5eF~0MkkfYE=kolez+*PTe-a8kwn)2`1r9NI0n*kM0$Jo|7*Z7v8;3*BzfCnI z5P|sa0&M5!!L^C_DmQIRB_6GYHx+Zud>!37M>jqj%;K)0bxEXg( zupDuzl3n!+`-!)LsX@;x9$XfgQCFvkB$>x(E&D>iJM{AC%6OJReSq-BzRJ4pa03%F z0~WH)7V*Xz0lZvU-mV$AV-J~=rXcGq)?T^FHgqk6>jlw5=0qdL$yX=Fh@vQzzOd#9 z>*_qr;;}=44%q<_cJ~kwh^l=UADjyls`DYNz)G$8KZm0^#TNkvMQg&&nEaZ16w}@` zlZ&IK3?$_?*HegOuFvOmr)ud-;MUs?@HmfWt8A*ZdM^T^$;<_a;rI_oCX`X>2bGXW zXlm1o8O|^UWct4J30q~WhYu&E3=wm3gZI-&2zG2xgC?CK&6qw$$WDCFmuFMt0`9<= z$cc({&X-#X#SY?z1~$;VhAdVlqX9K^s(thb=-Nd$xoV<+r8iM?ruTxV=3gZ5KEy$r z`&w^o5Ig3PXA+3IYH?ysj$Dh|ydZ}P1G1jq zni^7pD@P>yRKOcd0|s|*^dlwZWcl^(8vmJ$ORka?c?n$qUFr`gq)6C`jfmznY|Ep- zcc5xNCK=aU7}CAD_-Bn-YXvDR4Wf$h zFT%Hs0SN3ug4Ozvz`AAa=Q%~BO$SXwh50gCxAcixC!S&`(UV_k)|#B=-5;>kx!5!` z0k4Y`n_KV-{lr3tS3bwRANhnfEy({40KphB#NKwrg?(NQ*3n7Q{>l4M;Sxm)?YyU7tNNxk1e+mWi^L)Deue?k3W}Mfx3! z{`ej(cJq{{DujFZQ#tn{Ap!()uOB-c;#K%h)dKta+_4JHhV8>Cbo)tJSK_@JK~xtz zk&y}iBe4~#-aFc2hHXqFe0xn=S)1nyESKteDPEH8o%Xz(zhH4k%vWq5x`kgkZ0*ZHBT#sLf3Ux zrt$!OJ-Tlktj)#4;VSDYG(!k z&Y>m>imn8B#+UUb(LFzPBJ2smIP`+G#sTyoLvl>{;T8-b@fKinNi|Xb4U`jyJv;}+ zS}t+UOUSFelziU1^`u*Bf`TY!a*~S}%oP72YS-C1$wqmQ94Fw1V;nd9R^yTWc}Mh* zOb(oHJ_3nl;%*%ddLY`p<8oCQqH_M%Zp!}No-Vr=ALR`ok#cIVp3w<_6!|?_vEeCw zo^X+!eWm8lxTx;`%lfM|yCt48p6M+vPSAoSwg??tYme=k{WgJfR+P5`n6&xgkpi1Z z$KN&)F73!On`70A#kv*rNnMm3)+FkH$biW!HGlYSDegdvxi9B*X2bXGMFo4^hu9I1 z!?IKImt8%nl970u_IHTfNvZ+-8`fvAfjt;bNT< z*U1=bsK>}>s1k=~RZ}S~%nx|Py@t~MFmC(mn?Do(XoEc8momSs0KbEMt;Xecmea5$ z#pt(s@_KVmw;PX_3WN$|q7QFLaC+?UoiDsKZRGL_ZJPo22D*cirFUzNGu=e)Lre~a zwMyTW8ODhrdg_m~|I0I4z>5N6yTyE3%h)gxY<3g_s3e6F)06IfFUA0>DKQ0X#i!s;{Ci!em*ZSekfK-DTt9a57hN$5BC89*LS<(u!nSxDQ-6euyamy z=L1-@-D+TrAXov~)|$7J*iS35dQFGBOvBbB4>hHxD*r#gktt96?-%$WTUZ~+zj~t& z$_S;>Wl6uwJ@An5n62P}yyeX%Mw!=>q~?{HovVNA+ghU3T-|)jMJH$@)%)^QQxF&> z{`Fes2f-DtXj@-dd5FXn2&-UJ!vZ{KH%gXI1UiaiV5gNL`hXP>scBO7M=O7Mv)s13 z8qHW~)NTW3bg!;CJgAJb>=GH|_t5;{mxXw$&qRBY>pDUKuOif!hYg^oR^C>qK5CDI8fPP`=k5 z2;BP_$lJ=|Um1&Bvd9;#K+bOD>rNU;jgE~KA=$&9rNL9q=hP=X8}NQ&0IO^=smE@e zL!Z#RkL%dn+TvmMPHe5dTcyJT&$_9`o6jUk3Y?(5lYGTj2C-?n)WH`!U=x*Een01$ zIS^!mjSu_9t$RRb=K{Xjp&(!V2kDUJ7J1<~LLmx`av<*Xjx0!7P|DOAT)qep)-v^x zfNleG-;wq$r^D&G=Xwk85hy`EX`z=zatjoi(SDJNeNATZuFk7DBN>lRK`M6S5}dpN zF%qbtO;hv26NL76m_U=Y%gRI_<`D5F3jBEZD*55SE6JgS5Qi+xbaS;9GxmxW@0$lY zp5Xb{)l8O%hX{#&B5|O+u$$wO08502S%KT-&A?YYo_n5`)^^I)-Ojv`B`WN3{njjR zAHP0YzxG_>}M6k=dQd#g(Q!--IXuK~YJ4W9Pu6D%*2fr{>4|g^X5- zA2%ueX<6`DyokKWme}f@)t4>QN3K&et)Be9SlP!w<63%iZcJ5N4`FH@Q_Ohy)T>1@ zk(-yjh0ULrKjyjdg7eUuYL=<1z>qTA5=-4IZxLOquqlR`U|&jr$u%tB-=F!ac)y)W z|3a3#lwXcu`ZY}iwhRI5A$JAlsYpr=nCTI4aB+8fHHRy_Ka`8dXC zosxTK(5t@5Ta8@z&*#y-RnH&DjSoKTTWb-#fOO(zPHI2O?HqqO9Ryx7>8~)ZBP3;e zN3wmCYimopo!z^0VfTfnjA;Mdi}5i!a!S@(LiTDU_Pbo(QS71%sW8+rC9W&wD@(6T z+YQzqHp?p`sBwj$GZuvQB8RCgPG*RqF_M^iR7UBdgP4aejBn?+rqDjaM!#U*PtQn) zqK&b$-ZfzN|Bf5OEQFo-&UKPCdrp|YYl`Wpuh@}aqV9=mypnp#4q z{rYgQX;^G-4syaEHPApkOOpsw&8fY;w{k0g;p~}VkoSfSPkLS7n~IOh-~ZM4cUg|m4&LqwWHI@!3Jf_?&9bS zx&E6m`92mNp}V~eyykF&sP9g3EAYq9yis_F-ongL=H3t2#><~X3)^cfM|nBtK>^y#4UTdoc2t@k~9_`y5N8H$LxIK*ZHpIl-dz ziBOQLEtXUnt2N{KtjWKH_ZSLnl>LIb=>mhb1pZEKcRk!6*pxJ){?{_kAw<4p{j;su zfcY5ko+TInl)8yLW`Tq0sSojTmu^PZ6|+#p6?QhL}wH8)QysLL*A0 zG)Z937sm(NXw{<0fRQ+E4uy)p%xM%^;+i88qOiXGAtePUT$72Ee-wyA)5^5?)bY}- z{6JJFPWud>71akhU9Oxwc{viOmT=$D9YWhZ2aCxLOf;FyC2ghnYL78kPyf67U#1*$ z=h?>~i>3}7HEgaj>#N!!N3Zj5Bj^J>y*=f8nJ&AOP3QoPGBZRHKP$h5EjnqP2{6JJ zsV-#ph9Gy=Eoh+tEXim;Z_?5{bZkP6o%LKUwQGR5c#k-*koMk2m_|5*l+&&MHA|Qy zK0KljymMs$`;BPy5CJ~-kB>{a(R|6y8FurZ!uwJzd4j6JlYV$FVrAmjYKw`+zKKyiGQLd8I!0C58rOE!pHAfAbY6GMu?-`aSpCj zy2lKA%boPAVyP2DhvEe%ZbM7QTum@mb{#u{)p z9O1gJOJ$v|$NniM#)x3exHm*ul2!d})g2i|_yvK!JfgNiu_&(#dFeS|zo_>(fP+wR zx_>qh0y*^G`)Jrm3P$+i)0qV24?k8Q1k%L+?A&HNDC&{1ac>d1Q&7dX=i@;AE4CG* zUUbT|bnZw4C#gt@e;{7ApL1WD=#tGhY!&&%XEwjUjD|sO8Ful(ZhkD0mK&N#r#`8? zT+C2~@ntUJtufsoPh!CFWtvzzWt8Mip$tQeH?$2}n{1uS9Kw3;`|00sJxKX$P26D9 z{)bIJ9v~}6I(0IMu>Ul8$B8=c3*gQTovT>(^f}r&MYbWl_tl(#HxQ-pwJnUAYHoM( zZ^yM5=yy4}slm+8@l0n!Bb)$zlAnFzSP~sh?|N16_@`w$(i2~Bgd9qBqNRw6EAOlj z``p}~La}nQU{rHVL9lkyYcs^YGa_pW0CLxQS$UF{r4ZpvTVug}js|pmVzIsg;@%l8 zP0Clsp=Yx6Yrbj+cCc6E(T{EURr@}4E0Uc!f`9BH>HZVg(s*Y_A@M#t%9Wv){k;(~ zC3ajq*bvT*;&);IWzuC43Y%S)eLsJxRQ0Cu^Vsek#Rn)urKH7rXWKka!7%2tPDHu~!&Y z%ln>nqmSE^3|(#(dRlO51K%h5y*pFXcjk6&>ygrHrAFqomsoOhaeiHd`rXh>GbX!w z*7+g^E3hRdd-MhHCEdEpE_H^y#!vE;5azpBf$R<70l}9G zHV(W#p63NO#aJcygx|9D45l@I9F{^&=fXqL9OcKFa=0pl7l`laLv-ka7L04KspK?c zMWKJ{cBEA@Av}UGcpDlJ#_dcUSr-hr1TZ@%c;ErKe7sPQLXTsD>?c}a99v=GK#+Mi0r=}$mw5R6~j$YvrnS;d(B|nGcUvV2vWoC;+Eu2Gr4|=uR@^$+Z7b7Pro53Q`={83`rFJl4>h^(i3QJ6@&yIFMy2AnMSfBZ}KWlorCKv?Pxo)z8; z@!$o*99uf+j79B69m#+Umz@U<=-<3A{rU<*Pk_@?*dNy?)l%sa!Jfr zn%0HM_3{-kDa?KjUVM|@SS~REXT3GempxBw=KiWwq6gl1Vt_#F9sIxaBk$F?*(V;D_R;#bo_f2QHZ zd0Ro9r$Th|=5pM6bm`AWq45ib`vLj5cC$z>{#2fyYJ_Hg&nGT=TC@Qpbo@!`94G|A4T(v%~JLxAN<+yWGJC466UQwgkmhpGxH^^I(|!H zxLMR(S4}ej_BVQ-M~K+mWa97#i}VmAa5;I=x%}B$GelU38?SN;j>I*Q`K2gl$&*5V z%;b(cFKa_r(h#MXAb}=&0SfzX-SiLohdg(h@^x}~Ue{Pf((wZY-ECQ_-`s_xIb{j2 zT=Hw;LXWO=5Hrv7kdgvy)>k*n9~zddVnSePptq?|rC;e+}aUjN_DyG?GU)Dr0W>nzXbHW!Omg=~qa5mb`C>d?j zTJ3;C(W3)J?`VoXU6mEX=L2X6C92xZpwgZmx0T%^P$QMQJR1UsXZKwk2tcLHL4lia$iO>Z8q&7}9)^XCQ zw${|fg0bSN6QwY<1~sP@2mAMDyx8Ah;@?~`cQ#F-i>mYx9m5Zh5&07AF;RnjDKWDa2XFD=hxK^RYK_MDp9#JKc{l|IJp z@;&g3>3L~k2^_|=xorR@G5vXSA2uIt8WJ~$XAPI#j}}%S`zuF292AhggvZ*C`o(yYa=`Wm{Z;nk_6x`8-)?5eAtX+BzN*yiFaK|F4Gs?!{|P? zkS#8c50cuC`d_H&(1aQdf||Vyy1u$~yXj!WnVX#I z3lXlD1SRN{p4>$SJms?3mWE5QU06b+slFX%IaenWslpzmiHJ*Y$Ndl=8EAg)$mB+; zAtcM}A5?PjP#YQK8MoFt%vD4b)8-6XS&?&*gn>VViK{5Cn=ZV>Qg5CG$xWT;YS70C z8E#91PFMsZdY8z-by@c2+Vt0E`@w4Eae}s5Wuoav^=6ge@y+zlw-xkx1{odfq|N7ds2DT7 zjCeuWhL4QK4zsl+uI!lWX6Zl)b(a!YY#iSGNIRsZZuPWL1y^H@THWc0KG?41cnzU3 ztjbqk3r62i$s7#a+ZeW-N&#qZHZ@Z$Ncm*VdZCl9P$9fL!qv$qN9gxBEGD&hul8vFOOz@MS4bJ5#r1`x9w6bj-{(wHgPU&&Bn7#iV#`kI&aTxC zS4dbVB+B0b?hs#!_jLOVp7%u4Iuc=oAik5&1Xkw%WZ}Oo99~bQzF{g+Ed&d>Nq7V= zwgT{Wy`E%gkAgmH|KoYx<&XOsP2MthvgvO7^&(VhHk4~fD2L<80l-i`_m`n7Qd|>C zS0&GVKY&bKOEKq8EY+nTCGt-$PT)cs?!g)%-9Oxq(92t(UMBvlxbJTFV`=gF$~?iv z>o!J=w5CQEBI{TJVB1@SvqX)XAFqr4szkZ|1`!OAO|bz4@)oA03jUNYL;Fz?T@&6N zFI{sgX$L{T@F5fzKqSZz&`c^EW5}a7(J2wJW|}!if8_0-3iHvV|&96EEEV*iA2Pu2-7tthH#|?I$Qb}A`?2H zuRte8(x$ZzSAb&?5kz=TbbZ8^46+2}v@V0!c8!Dyb}K33dYO1{HlYiXBG03?o+bwO zFxFqUnd6(m_y`QYJ(_Z-s5hFz=l9DuxUK;&e_A8$K+cEnF?S5mNP36a9duZZ5CU56 z;f^-~bTLKhU{-(4Bk8Xx;-6@28YQ;b=Om`d86Wf**|`~ZCaATnk2=U4k!F2^pFbY& zF%_%lsaivLJU*VbrXq60O)`0T#xvitU&MS=Z8_U^Yyhn*djlukMW4BK^|${6=F`69 zi%K1-A^eBZ8h)OJFq2`0q$>B8c=|uIbEGZ1>J3K6nb?K65!3{CW)K7In4|&?R`>jp zpbsYZV(ChOhuoLvUI{cRmEj1lPuP(n89tfb5;~f!Bpx5WvDunD+ z`(nvf;HUkVm%>XZD54@(nr*#deiqA>uyB4IfHopGI`R8`0qOIg1q^5TMYU)Vg4;tB zRujXMx^ktkd9rl98NrZb$ZfLraBXvmeisRQa%fw*brEHDfa+grAUPy&Pkn+?qc4W- zsfw7_U}JD!ohSKwrW|&mn;-tC!lO|BRs?v_n5k;+w^C#v%GD?Ef{je=(KH>f{)1yU zRJXWNNf~E2;mHow08ah=2T%?!`MXC2l<*;HqO&OO$5dGD z*m4e|bZImVOcTW@p8w7%>4|kW%mdmTBOWz9nzff9X{5o@!@Yylh6?kOZ}6}`or58K zMa`5FjJD@@Fvf^Wh2aSa^gq~q*IU?LfAONS2OX}kK7jKv4Q)XfVvTRK1V1E~%>Vk8 z|J8x1;yy^P^`@ddA8vWTRLuIjfdQe(lI6yXQ;+5NLr&ERP^SOE94K)nMZwzuerjEK zNgNgfSTwLSaeAN93PQ9V&(R<|>c`_@SWVL=iJBV+gK3?FJ9g|YR1l}kR?h%dWB8PU zRd_mEjuZ~k&J-yDhZfF{g%q#vRG9$|rXNR4=}_9OV8hG$Zl_Bq5LjC+L^gIU(Shj6 za>Q-emHK{wVm8XBPF__s5jzbMwJo zqw4f{T-H=T3?l}O{vKcEUkQWw0C2&UHe~M2Q!xhHG)sG!%(8ZYZY(fu)h)4FSN~Q9 zr-=C?zt)ht9izs+G)bbmKWhCha*5*kQIM%XJv z80LtwEt9o(NDmfHBzqcL!^J^WN;P6`+o^ASZ%0Y7)YIvJAA1nQ!uv_OA6RfbWI5&QvzHZEDUdQ?eQ%-Y;ZIiJ(tE+SdK<=e~f6j z=3jCp_?^43D_Eh72Bz#JgMzU67{E$>R+-716}s12qW-v?%$i+)3jZ= zjY!o&mcOB%P8mA|UNGCjd&88bRSA2A`NpblLW{P=_GzVi)}I^4Gi&oImv*FIsL}kQ zomjR@VA(ZLp%p-#LD4S7#7K*0)Ni54Prc$`l>l{*UZ>Cq*93-`R$t8NCLC!82Kf1r zvWKC_5FwwW5h$r7G?K!Xt%zUXOyw?mn?64ho^Xs87$u-qXfbSX!B%U3b=a~*b-N~` z)yFtl%EwS`h{8owingIiXG`A!*zul?xu2`(No|-c zMt?Ac<22pUe#q?uQ8^KcO=bk0U^{kSGd^@kovG#V8EFU~fxPUgAZ?ZyTvpk|`f)-yKwbxFLWJu? zO+?_A%Z@ylR3#FrX7#2fn!;Pl=~&rjvvujcc0{~hHDCM;c=KjT#5oKB+05K8k@T@e zQ|BF=SAGxjHp?_{Sy%b$&<%SC1>y{AcwtZ1{{KFMn=fblnK z<7Gh&*0tqAN4ymha9Y$bo2~kp8eo#*m*2xt*_Lu6?l@oCVr;-W1W$`8MJ4sg)^e@) zPe^@8%cY>T)`^wiAe;Rm>lrO1q57N{AK#20+6q5SsMFjyVbhT546A>{JZ}W6NJ&{-~kzlO+*R(<6${K$LZuz<=4W%O04cT80c48}68%ft7HdUU)v-U-Y@J z`V{KOFrb@f7^w~ugDukk5P}P=MMs0c(z$m#nXismwtC*mhLXY8R-gMxm z9>Lsi-x4yjGJIUBEM*R^E;t+{`ImR6Yx2v`i_pDqar+kEqET^P7KO&QJh`t(SfTnyk8<}rktKNFD^z?zf2h4_^264_$J zI53^K#SfmoC13T5+iUn2{5FcKEw4zh-T8markx>oou`s%pm+>YV-LFuI{r019uJd9 z7cW_!!iwK}3z5^U!W5)F7EM$rzx580-&U4`9GjeL1ijZ=aENCGBO_1`^GBT?1ry%y zz@N~Fvpt1WW&LY&MZvn)aCFgUWjkaQC1{2>TnfSK&{sFssdNmZMNt6x^Z8mIA9)aX z%pZR7AP}r}TdAST@dq=hVpdP$A1h>MmOAj03InHUmWaaSk!^n^{$&^mPiW$|V6rh#nZArwVU^5Hga6O9UK?VwaNB zIia1+Us{?v_gmlZuGrGY^y&7Bd zbd&C9ueebh4hPfkuIfY+bL=IYE6lm#%9fw_EeH1ZXe&eY6V8^W&7?>Ij6zvx^#?Th z7dqTxr=Lv$7nR69Q}evQ&iP&oNvp^>9nTbbGXEGm&?VVvUW;3YtZjF_-wcGzZDw+a zsc{?26tp_Cu(qWfmsHT-)s{<6V&jZ&6t&0;9R2_LY+WJX{vrInkmm(D6JVbWfZ!am zODV)Gb%W}3b?8^^%nd6<(S!eqsKR^~(Ck&Qw;)l|^+WIulprFjilwOHpvoh-YZoaZ zO!=|`+6x+d@-*w&59*gv5^@2Bsbc-buhmq^hY#SB=I>2#P=i=kilI0ud{oI>@9%I2 zqRSK+Y&oyF$>cx1&vob;sOVMy|7_2jXNXu=zTBhwCQW3VoP+x z!psOtR_~(ps{5VPqYjKI5<4jA?hGC-UcSMJxrc3f$nLe8E6<@&VVjaU$>v*fReDi; zD>>c(?Jdr|pdIXnwyIOR;!%cpl18J0mHUIJHUkBza9fwpBTJ^0#Z%R52wHYqK*TKK zOl8;eZcL~rL+j*Of^`0L|JlybRVpVE6*;U=O}M5NKhtSKxfCa%=ChOIQAuopUeUjQR`}0ERTw6Ff^;#FI{6E??$kh;4BpuWOi+XhREeLof_T5;i z!zCLl36Z(_v#^^q+=&`5f-P4?kdo>YPos^j#-!eepwVQaF*mx(;_0y(%`5JRpRA;> z^`^tEzR>=t@)?R2H5Ct1tP)m%iZQ3u0p?>rldO*y=;g%!*YmKhOXVXd#X!kze8OKq zRhej_qJ2<{&lx~(2D~0^o{rOc=x2e@%#6Aj;t237%!JRs?JV~X1gs3YD`G5rJJFpO zVxq;_ZvqsN^5?I*U-?TPawU%^)HIo&`v>h>+?0@nsM~~dwlfJZA`&>Z&sKMRXh*jQ9o!`p|XF!u7Gpj4q$u7w7VI&jcaYFNyT->uS7v z{N24Rm_B9Tx$b7fpWN&+@rhj%jGxl%1}2tL++iJQz5;!k3jlk3N%T3t?~Tn0x5XbZ zW^L>6cER=-vBAU-0AGo4JEZ&)9m(v)Gs%MQ0{{k<(V9>%JE6(k5IiS3W+}p3_~lc6 zY5k^)Wj5>wRUa)+W`RNX@tAs2&A3B&fZK`7hs!)PORw&oG60CP#@Y~<3x1vhsc(o} zlh_3=b;fc~Gpg*wz?bGdEf+V=27d*UY>`2KyWR04K=^@VpZHpyiwXeYbmH!eT7l`r z`->TiE3(V0j(j>@Aoz*Njnj5t(8+f|5S%wCD%ETP7{ zH_c+_7t)ROJ?X?{PEImDe$gD4W>+C_U01TyyX7<)+k2ljCcG>G!TuR^YSmKzo4V7XoDV#FgTq2Tn`fsbw-a@H$etSCt9a+g0XkIO1`*awc6FL_&a7qe zvVf+5;coF2^K8~luuq>k?957hid^Kn5mS%1&y08tHo;m0NJuf3zodmv7c>nNozNlK zl%l}S2elG{Ns~1B&Pm&ry21HANHjn}kuT{BPEz3h`+KY8g5!}(baCirQ4>kI|(1_FnXny%rywR zwE_xweTX*sLd+ptAYF?=e#GcJO8N2M_;Bp=8rawipY~j+cO#S|#|X=Cdy@MiKT=#H zswX5|Ws6L1u01=PS;SKI0H{PyoI-%L&4_rl*E20tLadNm$|A~4ksFn(uur-b7rG`r zjV~ScLzE{8jB$J>M|>P+bS@Ir-e+mrch|DpVMTQXV-e%dWna)d)(^9<502w&(n-JW zg?7oK*ylDYdEhaQ+oQu%&ac4@H4pQa7)rC5p2hg45u=JNjQOuLf{+8gu>sFgUtGHA z?2ug;RHAIs!EmYnr|J`j7fn2Q_28aPrOYj6XNM4o%PC$Ah_YPPX2$)D@}y(}ow$qM z+V}aLd&20BP$=1ML#4u0fcJ#9A%DONB0BP@GN+qcrq-!Q{X9ZS>7C4b_9IR(RTssS z1GL8y*b=FTMJU4gS$!wIo57f8f6q^4q6p+;h(G5B1c?_v8_Rf3jzC+6OkhsFy-#D5 zkM_+zbxc5U>wUTMYG>Qew`LD{^_^`^ua+b3zWkMB=E|3=Kig>+jcyrcCi{#pGBD$} zCxMvstUiKmwR8c{nMwhxd!-j}{32AFF0#eG-Rra=jAQFn6j-9FoCRpduS-qMk`{`w zh}Xrx>{8eUvR$pH#BWx*7=5gylG_V5h9VaIJMp z|Mcu>T?6!=2|jp=LR;j?X@ZS6xh#aCv8+tIrH90;{fpy(BR3IQOC+6-Q zay3<+8llVL9?HEHN{Qh=C1?Mb>H*+;|Hjg8cdSiTbdh4&;>rxB>3m6H5hI_|LF2YM zjrha@&qZ5pGmgVcX0=+4uKE?r9z@;XfgSmYlk9fK=Q-<#GuA&Y-yy#Sorg%7gLf}-$k0_r< zgJH7drI`MCj(LvD;nR2souKvvvMS5_YE+yxpx^+oPGN4TfX?vfwiYvHdj*n6hl2DF zJU@vI;&?SlR%!F@cnW7@rjRvZ%Y8ci2~h$7{X#q$ptV>f<8E0E*2N@6ddY zM`}Ua;=ob2%L7mTkTbvGt3#NxidK5W2|_Ao>`Ct}{&5;d5GE@OqCgKiyfI;OTI6aj z`nf7<%e=t6CzC-XYHsZ&vSf0oz6YvBGC>{?(76BiGgc<|D}kv9U6$AqGjC+R2G(a==|6kb|KW%M`YE1s)pCP;anRHaahr|Z+>3ltVgi6wB%v-7C!MdU z4yD63bicqO+>6Yv^{_b_j9;OI!5B(2P9P+yYTw`j0(hqKQCKR-?il`pBlxZ+)v#iE z_-q90iSClGsjEgcsmrT#{J@V8a(JGhv3S#YSqwZ2u2}~j0#nJuB9sI&DiW?dA!+<~ z_!rZxRe)XE1i5f;n)=6j9tS$7(J1B?w*pf=Iwhm4J{f5(Ew0Jt^Ut0iOTvus8sH7gzIj2acDz zk9>#MV#$hdEEjAwABVdKNy`#HA`pq)&Y|uzZ<{GhIiV{CHBs=%`RKm=b12}|0;NLJ zjhu|+?g(ACTuPb$6bW8a&d6cATN?-zJhv7YUrrvwyQfEg9}b*VAcceJ{rs_G|SqEL84FQ533XrAW-PmG!1Fek2p2qp(JY5|^Z*Q5Wd82hk)!Hexd!zXo+7 zVw#(WPTwx*r-tgQ<=@B|vU_3O@a_pvnuB!#Ce&_P+)nJ`a?o1Kmc0Y=#)A$l1*S6a1*>^3KyZw&+S7Tw~}i6KWmYox=fwZ5p3l@zEPVpY7J`@Wdt z^vKWDW()!Cy|>^A4IQDUJ3cDnWYD>%0du0yn^sHdh`{6z3oDv>Y~E}K6Ok4W>g@qS zZeVDfU&#&VOk>O|9KeaJ9g-0P+c%<4F%2Ajtxk;$<rH%^V% zvP>|1X`I=97cZs4`zA*cPPoFpA!*JI(T#o9>r<&s-E5shuqZl|MXzmJ@7lI)+qP}n zwr$(CZQC~b4|>$q8KyE$rB3!*JAsFcp;~s7_t|mlDxL1`>4!K*jQoY`dPFcz7U?J5 z>pRZQtBUBgqP?jL-M(`Q_sY7$1fi5Ky60v$zTbWW*=Mg+KdyyhE}EW~BBe?WA}YHe znu@)93m*~jxlDd2<%erKsySpfzJctbKO5R=uq*o2yU9lFyB#A}`+lV4J}3n2A=wBZjdLL@;{N3 z!k5W2rR?;>&h*&-1Sy)+0~f#J)h^^_jdfuKGiPK;JY`+|^))t*XSM^N1tT7>9`4>- zSZ^;;;uRaVI9X?6+CD;JauQ=5i(wufG2e#rVPy2kZN5=VSwq92x7TE{1PTn<2?{NN`>vPUFP+ z7lr{|K<#Na7MdWs(?rMNR=kQPLb^ujDqNtYY_W59EF|;i*(!q^U`1I4hdMX1-mpw} z5QZwfY2=ke#ra@@^h2eZBJ1YPcJw|p184&`Ab1hTq{5_?osCnm^)+3twiSIHAHzLh z8e0(xL(8)kCbGq5-}aBu_^_*~8Ji0E|EKr)nDpP#iOhp)Qw9lX*pZJIvxaKk6zZ{z z+e-R38R2O>6{Dcpn{-jqTq}{y)-;=nK)1=s-zb6(Gt6m7>Q}FWeY3s#UR133y*xAs7H* zg7a2h zcL@Yf`bcbk+VvHvSs!s%HF`#VW-!_CMzTYGLBAWc-1CBDmcp@!na_tj2on$T1GVzj zrnN;^?e04W_na)K?@k;7MOi*Er3pGGUTkv{z25zSUk@RK|U{%c$!60}uJ*ohSouwC)Dlxh_ovG{5O>J2((t?_Mr5N5PCS zr@X*5pK_`m z-m#VYYF^6~n?FfaX}E?0;IdmD#2OrFFbMsDdUsk$gt%EnaM;M{E* zF6Jp6EF0;N)+D4tk> zkR24vsssuzc;ay@l^r4-nx=TMCN3t7Dyi?2aG78H{YxTRJy|^Ct^-mNSjGGZS8Eia zlNe;tc0+|WI}}y})PM`lun9fiWApA>=ov6w!<3@o)D`Lx6l#S@q%yhq6X+8()a!yD z6s;CYzmm6LoSma>o_-6Su9xBL^EE#KOGW+O3a`%xtH1*HVIzmW%9=$zhqlpOFvY2K z%ekxstz+(@?G`Glr)1H+jnC~ZGEhN-dLNk8g|`h6{bPgO-SPTlxUQN2%dElnP+DJ} z3Nd@S#rhBct~#o+z-r6&8+o^dwEashm%Kz!5SoG!CwA3%*fq+XA|9m4{+mgZ(&znU z=AzYT*sIdYXeNPkHh>>Df)vH&p?6;?q{}^rTGc4d7zT)B{vkmr4#@9}jMUeoooxqn z5YgD(jraHHX<6Th3=bu`njjqaQ%d}ylFMJ?Mjwd;j9t>F?@s0QT(%LSB}OfA=tM`N zW~{aV`E!|P_vHw4bL!NdV8bNyKkyMD1!m~}O2gat{fgSVDZDJ*aK{s$D#RoPUKw4a ztJ{tb&x^Eh8ZNDJ@&``X+jFoHx%$o(|Q6og+wpFaNAk|Mk!#ubbW^dleMo9qPx3_3~s!7LRe7HH449Q3L6ow;UvhMHySdwKFiW8t(3`L240L_}we>cL|H zf**TyC70mfrzmIL&^ee#>E}@!5+BpTi&vTh007!|rQv*Q%Yw|%G{LO zu3rMa=JmWGo*R(|rZF(QekELsxXo=!6bzplK-qucv@DCwOX6SBp3H2I_K#Yu6WMtD zLP9cGA7$BVAVJ_;3zD~6dDCez@_)@0(T^4f!yDuiBLjk{vuZ-Rx3B%P*^^FWPHlKK z18P}3usf57JPOZuFGHF@b4qz+M@lSrt3CYxNwaKQ$z-AFwu7r-{{gk;o2yI0gzwgW zJcWUyt3@q3hd76Oxk3P=(?ZiB>XsG^m3X=ZKua$#yyE~f)Xg{_Sl0eQp5%p8 z^cDYTN!)^5++0!_jAj0k-|sl3+&}tS;F1$Vb|PQMKVLQd>|w_mN{0Yo9TXLlMh^l< zChl%sx}6w<0ZXTm&h^V%#9~*RzMS_mkKD(9Uc@V zZd9?@#wO$fqCSY%BU1#YiuL1nZ~Rj!UZUCQNO=EZ)W?&?<9K{H_`uK=#;Rw*+l_+Z z#pT$zSg}-v7H9%laiFhqN%raB+6ufUv*uHQae@-~ZFPhrnX!J9cf)k|X-bkPu3=<$ zk|tbRJ%)2qfzA^xj!?aw-{bw;)Ix>&JK?8X9D+90#UYWcY_me#V8)6)Jn^_@Upy_B zWU`}173tw~M;}|mQeflC!)j^y!*K)|4Vy*SrzxK(D6$z%BZ{>K^FJ1wN+0&-7ct%z80FV`8GMvtxrLW$s~w&!=0FF z=N-=!(Af0~Wf2Agr33%~N?{l*sC}kJ4uRCe|EP$XKTgXW=%DscUoDgqu7<`)wq}oKk+%^4z$%6($9m_5czK-ae`6Z2^YzQR zWI5cQ+BPK&k}89|t(c-V`%>J=%38SIw#_!GoXxI3Nl(1OZ#5kTAf?lgbA|D%6URW{ zCm`~XFjL-w=bfnIAO*M3%25xHR)->S*_LJ@sXER>yylyRj_%B*!-v{e)~cvLw1KpJ zBKq`H3~-+S5$H+=D>HK{E%2@8Tm?9J=<(oAicgb8xt!%LWB)p~xJsJ8jAtN3`Kr-~ zbTG-BnMnFC6rw*B{HZMn`d-sp@IK|jEf81w9MWG~SDFA(e2LmD!F3pbLvQ6`aG zh=BzIms)*(WZ_1D`Ed<;QZxazN#r=D)uYwyjg?r4=qtg*~w^;s1 zl&6w-rG0s_Duk@zwt}*tate}-@sIIcdLnbkS*1eym4;jZMWtHkt}Ekg zc~5gWXu)c9qIIr~jV&V6AN@`uDaL;M8KGX*SCQVOA(4!)@6oTT8|tVyI+q-bgxdQR z2TPu8ZCdE?!`%QcVcPk{%9TsECuBq=bwOa7mk4dGjUt|9@c6!=_ms; zZ^2x_m7pCAW^Lz}9dbpNZJ%L)++>G#goXIwzh3`CI(nyIO9z5Q3HE-b_&<3&J^iW~ zM_b}^=q6^2dDQrd`_)<1?8l~l?%Kg+0DiaMJN($K91J2qMm8G*Ol_>^8EjWfp8udo z>%JtJ#U3l#(zXluDz#kFp&Zm8Ox=a-1x;YZStK|WumNtqg{|MEGmwL@ z@h_)R(4}hg`?WenH^8`uJxkOas8h8D)bdmtSVkhc%lAxLWS_&8kuxv8j1lIUMUgm4-IUoN`6#T37eu^*a=XBu#wE)&Ww7> zq-kOy{3JDC|CuGk8dT!e0dZmmUzCDGi!~`#X2uC7wKbj-|KViW?dXHG-$lG?@9O=U zX7suc34HgSjpddiAnCkHbQ7VU>(_2PH6srEtXgdYhsz-xEd%~Fym?zO+w+0zed$Bf zGn97Mni(T~E{ov|)(hW$ZeJAPaahKQje>Jz3~Mb;D_ru4nYqol)tLc$Yj>APYVFnl z@*O~XR~)S44NIUF(EE?2kxoYEO>hKRsyoJ?QG@pbB<>%09xBvzJ2`?xyt1RM_Dxk= z^nj7<%m0^X`~MeI=w_dDQ-!u~K`I$~P5b|U=zn*~+o3~_-R_M~L-Rk?f9AWqLz$0# ztLf7Gdz{O{T5%(qPL^VTwz5*;`)n+C-M@VCur4-)z2%w%;diP%vt@j~TA9wqNH=@b zO#vJ*HQ5~|fRq|fpV`xUnYg;&wyd}+F%@ZJyXf9QA zc9I%q$1k?*eQ=ErzN_zubVX?Gzd^<>0lIAd@*b3HT#0_f$M@x!*Ib2vouR^2lKST3 z{`)-Z;(-tE_okJc4d$K_KTo2n$2wIpTi_k~>&x)Fst6#3h6+)_tEiU28E$@TOJ^$P z+_jqFSTop&+ZZg`qoa9%&tB#TYJ!= zuUFA<6lIqh;Z3G3$cSo?*pKwzrVw3kf_bb{De|m1K2ZM|6QU8+q@fVbpiG{{893NS{J~Zdn+;3=Iej8gTKN?=ibLdpI>exsoRs*~3QPGYS<7T$Bi?`XN?nnGD6q zVAml5MkwuW1L^gV13EVd@;>Yv;4a>&ZKds@ux3`7INZ&hjT2!{%56n5d4qJ3czFyARB0)W3-oc?)ix!ONGim`o5r}t>XW8q^UDwwa`^0*wImyzAr^hwBk zaA_6gf0;f3OVt|SLziEq7NO*%Q0e@*?zA$G@5dNtqyMkc|32@t@T$s0%4hZ;M& zD#(W4b(T_vD17fb*Dn#&x>uAVV+x9*%ot6oNW5(1d*iOCsza!M=;jYEQ6~26RucPt zsmz)Fc@x_FQ9^_W+9A>Mb6LVLF{mR-U;+>*%CC_RC#gK2_bHdJCYBgJS%ho_kC1u^ zQ%rT0k93fBe80x}$l+1rns4QNVzeMh!1&AVnDfAfs6a z5JPLZfcv9)2v>mbpP3>xjGx0;fLFp<#|g!1*QVQCUE9hbQK@wK)$apF^)7U}QXJgp z!GtOJA3XPs!f`Q-O_z&B+*hR_HP0d%l>Sd>o%HmYR**5hn2e&lGsdj1b&5l|Gh#VL zggq8fNwpuPdL})tNiq_nrHlM~eKL>{hhssTfxZ`%8@1~T$ws(17N z6#SfJ4Wq_M4W_!E4|2|RNko?KcNv(pkc1{r9o^Z26DNGfU6LNxcp5V_)y!~jA<#vC zr(vCR9HKTh;}3^t`2lSOZ1$9sGWWxHW85!qDn^RRiCwd~ESv(aLi5xNzP<#~XZUCm z*2~@1B%`;fe;P9qCNq9Z;@6%xHO&nua1$LNcJS7;cx|jU)Mo33zBVcT`~4tb4;K280Q0+mO+ zH$J<~g7)B4U28ngOt!WN0k1;&S=42Cy6uQ!e3)cU_V0J9Vjm?+m~CGmE(^i?h=j|h z3zVd1)Jxp&{ock|5}R`zIsR&)pJMi zZTalOb!v`fK?7ha`<|ncLV_zvcg8{4oY@3+V#5LRl)t$?3(>0Syb6D+i8P_^cyxWP zhQ-?tt4QwMorDiG?zFx|R9narwwtyAL>?Bg|3U<&iFNB)*_U=P-RfJ59+5V48n}x3 z>d350>XR-KAZ(R7-Agjh%weN!*4&jSodIOt4^m2#Xi$E z5xq&BHjULIyFb7!JHrx8<7(?&&&gyFpo70J|EOrN+4_CK)&fcovR-?jk7i~LuLPSbE99*1CRS=p(nCHrmVPv(dBB#z z*yOflGBVfv?$4L>f#N8hczIzh8GVBv)&wMH@G)HAVkbccPHn{Kx~YPlY7BSrE}sux zfi;R^4YBSE-}vj7+p7gr&>ik!odtX0_1?NxqSj6aM^ZGl(5qBv>L_D=5r;1s3+2~_ zskyuT&Bsa60;-TQ>Wca*ogOL z4IIUF!$3aTG(M>(*8t{8e~02Dvvk3Zpt;V<{+Z-m70 zokBefMzdYRi3He1&a1J{WhjLe7p#6%OUGmxQouKQ20#{XDsHT9t422sOCv4gIu>!6 zaRLC0|I}+c6}{jFg!dSw=}rp4e+Q&7{4G=6s>$2^#o;pLtAa2PMk*zdX@WE1$~$sr z7#Ma%Wz-K)n~c>cEo)(HI>x7{it!TQ>)FPp>yk~D=*No%cuBR0xOMPNq?slHh~koq37icQ^TOjw8J1XU+q z^Ly85Fwa3AHzFwGwqCg?GbuM-GBEbO9?Ig`uLG&wx;#ws=i>x^;8iL|?gR-J#t}!p zUTSXyN`J8ZY_wx4l*F)3<--9)UCQ9jdj1^w==kZHTY>QV23p!diD4oh%^2Y^d}o(p zK6L*v)FS42U^Vg)0ewDEvx)nF`VY6Ye_;EXq+i$btI|6dOLEY@Vj0{on*+_1w5c}L z_?MNpq83VfRLoA@xfjd>&m!G$4t<-}v6h}BAd3q6v2-`X2e!<|UgskO<`U&7W7ZSw z7vQTa3i0nvc_;U=r_oAnN?!&2<@Qv`7iMdIdNGYVfX*uFeK&38wv;4my@f3}M`eph zv#~U67MPL?r-PGoXwjx-ojXHy4)g=3xOB;^s>cuA_5lCxfMqP!FA}8FXg^{+oAK6E zAR((!rK7Ss+WEF;(7J7qN$x*jfIfPFNF3pK({U+eqZoO7R6d-2Y<2w*=k&2g5>-?y ze`g)dbdY{Kmi%QZD{rx6+A($X>CPv@kr-wWxEof>ZnLFp4#8d6G?N*(D~dGK7|^mM zwM$s-$htsscxh$dNOSDb}vQb*qg zIip)X!OCe>fSZ2*L6}GPzAT`AX^CytUKv0_0Fd%gZ)vM>^Zk*1qX+9#T}|ZNHD(Tg zOCB%9k+e*bRJX|{foG(nPM+M8K@tf0^Uwe<-o4g24jnuB7B3n^j;HM{!%abNe*JVx zSs`7RX0$xgYb_AF>?W_5#aYl;`j{Nm6YYRoCKf&(OJ;9(XAbTqOpA`heFl{P7Q-)VW|5!tU1OsXp!SDicjaV zY%}pa1BW6(Vb3|mfLTN1q)>}q;U zKheU5f9UA>yy8z&j-{!305^FD-QstFHrhGoK$3!Vq`v!nklOmtwLQr`dNYYM` zkUrh8vQD6dd|rbXc4Sz<-m@V{CVH&;ZV7)Wb=;Uy)vwtP4wVAQlEE(;?wMC@h2?`1 zRRg86<}^WE1q)scz>^;0&GP+cS#&{g1cjA;>G;rR_ziFj3V;^c(VcHv;g`E(c=8h}=)0%2`1By3Of|PI>wD$TqABNoOA#IyGBTC0 z+7j9GKGV?Ore10m*HtDT@f&-fbrfrgUd4?rjypoq6eM1fne^vsMUL)d-yDudDXC>} zQ@oWe!^n9<9SX`Nbk7MGt6=#sAa2LoXp%P0L0fh?{$Gr0Qx}lS>xAICfkYE`SO`~! zPsS^bMB4*?0>KojTRX6z2f*Sr-hVKozHoN*SgN$us5Z&Yi{u8NIEWAQ>_vA^@e(2C z;@Vx_4!AmsT?XYAO-@f8jPWZBf#l@x)aC|E@BL2L{|@b4<3^^$QZoKr7*g1x`}-eH z_y9PC`%A`PC$dt3BM!%&bWIG7veQu$k`F zP;|_*0SdNmfDB1dzwJ;zo)hIYx({*K{H4r|puQTU`)LN=B=kksx0jRw2KB+4s%9;& zJXs~v@ls*oBawAA*@;Rvy6qdFpHmyqa!=8EzlCk#e{^gQS&)(2yw5~`zGxP^hX7&p z0qZGLZKHdjvybKoN7xW72>$ppqq}9a&<_sCl720D=bHV7OVNI17?OSLw^s6K#!^6b zM|dnRdNs`ODlDyb6WUDG!A^Fr9C}(rjQ+*oNepa4LpE5RE+l>}OXE?w?}c;{Jzi}~ z$3`C^cE3dGj;2TC^wY)j?^SiD^Pc$8k^v512{6-uf>rec&W44_> zJNtXM-ftq{J}p=O5X96)aUQjpsZ3h4U-oah+7|I=40%P4ohXmckEpXT<>;kJ`Ho`d zfMYitz&JU&k;mf-DE(AancLZlq~)z5FiVJ*cJc(nYG2i2NI8$)RuXVcP(OI$iX^&X zun%zcC%h0X+dPG`xfe4$rr`j`JoKoM@t06$1rP_EbT)Uug*BHj z0iEH9i!n;Q0NV*c^Tj2ClhTZ2 z^E)mjun|Jgtx$q60bkp!f)pAAhOOiymn0QZoM}z4j=I&{kjGc`ADD-ldP%OZj~tM3 zB{>Fmc?ds$&o!Aud=OrXJk(s`j@RKW@sPA=WCOI6=~65%BRW|N==(Z>eXL@#DLa4Zrj zX-iAT&U*?i-QQ$L2u&@?$EZE&%y+P?v>fPm&d#S7B zLnVbB{V?XYG*wv!D?1|R`zcd2 z#CigP4WFE|j0^0+M^XxJA--?%`KAfn>=VtC4u&*T$wAF=4AUB~Uq7J8wdwnl_+*@|K6>r*MILoHb$2jf@*uK3n*j__ z!Hp0K%O-L5C}rt8&`lAx4(BwwA|i-ZWgoLz>Y#m(mX3szFk|Pr@@)J%Ug@hb1+3s> zeO5l=oOdfRee$Kg|C3?x&hN_Tbw78=@tqVY>P4UR^7b#^0eRyE0-P<}JZ!g29LR`= zvwZ~|S*M}3 z%q4(dBd=m|VA+PCC*k_FN8Q91YVKZiUxPBB-i)fA7`mX%DzchH!F{dvuMA>m1BbRI zm&`oL8F>`YL>oyjA@?!1Surz?aOtEko3=yP1Iw*Rv*`1cApFMe53QXJ_}dE$^IoIn z9ew{>xiwol+5`nFpNiaHg0@;0HeRUWog|Ac3I1nlb=CptJGcse)XS%VaLGefZuK=e zv#8^nt_EXcc5|c`O%iq~ESPcj?(ob?bRJ#7-Gh@f=K6LjFV+4{p47d<>F>{M1p1c= zchPP!q-MuPah4@B8r;|UE~v6cP)r>lQ+)Cz*HzkD>rKGXO_kU`mo(9okg5U5ze^wP zP$~@2zVZhNdWiePVk?kzK(uvKpYe}scN4MZEaNUZ2+-)wuL`3gv)1Dc&<>sBh0z+6 zkk`!hsu~hY^ccZ0cl_nJwpYyQh4I22kmmAJLs$}&)%Jq_Vu#Ebnmq!Ny}t939|R#npHl8e1MA9auE0A-{&RId`T*(K6d1uPoS75&~hiXNZG^GL%I} z%%V;nCsa(7;`7l8g-H3YS2^6@@E7~EM5nM}vXMFrTV%XmA97CraQ|zf-SxTQr>opl zZJGyjf{kvv7-{a&5Tpn=JvbvYINEWiDQb~~tgy35uPGXsodj_dA_uieljA5b2T2(% z&pLe)s^?k^Bt!gs^b*u{JWI*JkYaa%27Q73X7i0*Kz8GW6SYes+S|HCER!U}P1${U zlGS!wy%_a368=^IT!K42IxPwYd+%DZT?Y%iIUn24gNu?ie5cJ*iF2rJJ?WTT(F@jJ z^v!08$?`gyy(9K*P=4m;-3ZPScfR`a@susB&wn=cICZw5`sAa;ZsM|fPW}vtL&KMj zLlcA_jg=M{I2G6lIoRZ$HJBIKDAvE4Dd_1Is6gH|Vb{e6~*J>dk z17aP)QqWfL#k^eRj}B2(2Wp?R;HO{Lte+yONm2+0`mfF}eXm}+E2(?JweZA#2sChW zo$T+7vewSP@gi2@)0pMBRS4P$qMX1GA%?d2mh(6Fdn2DEE?ZnK)uiMkcDlAHZShZ% z(N2lcN%k=#{%JI4`VuX%b=v7tTkkpM7Kfb@-%Giip(upF(&pNL=R`NZE6npo@3dz( zE2euVWPUWfToKDx3BsyXK+Fe#w`?JKcNZyPmvX`Wk8|R`CN!(T^G)IzXzjAp>L60Y zg^UQ9l3YsT=Xii72|6RC09__3?M13qS)7Ga>X{%!Z)v5d3w>B+6Y1`q+McX}hvfB6 zHlyE33#z8&XF8QSu$ZGk=!8hWphK8l#$PvCIAMgMs^j$v$&fpY%`{~VVvWGdFxN6+ z%Oh&@C_Bl{VPFm3UqNVG1Tg;WGKGJWb76|hk|8P~m_7gg)Oa)mLbubw)6>RcOkvtK z5KX;gMbNU0YjV%_7pN3Jjg7Y6ylgMn)1Fa1Z2Z3)RaTT{kg2_W9j`J12670Tu>Op8 zSFnnz@fOE`6d7D0Ju7I)P^`w1Y%B#_WWGqApoRaLcN==tt*@*Q=e%r8tcN2!@AS`P zakq}&R(LRVejl=C+)!72U?dDFQ&c=J$DcLn;oc?r(RQCNov@uT85fz}_L_qx^~5?& zP_6Y+{9~ET#>wc?e)QI<9^@ZlH%ENp4mLC8#jc_ub$z-AnIP&0ik!ZTr{0esN`mSrTC7GK=ofs(_>;*q+8hs+&=mo?9*5JCKGBp~S$PHMyWGza~t zMcLMpzD~i)z%+iEkCJ$qui9&}12XXNZ*O;LnGUIkk;w{VDaN*w)KkY8+7TW9NHGqc zDz#ZJmd4(suB(x<{;K|o0ShvLJ@CYOikEY2z2bpMiP>#=8)3rD&;IVn zMJ6N><2tz>AnGbZp_!ZtWa3fZ6PGqe!#?@Mut2B-5~k|{rwibZvu{Vn@c=K!ubbTA zt4L{#u;^l8G$AAKQ9(_j!C_&b!#10cfF*c0$9btS zhVAUGI$&l2(#yB>;hTG5%Cps9%4YUrq{{`5&@}G zJ;fkSdaRgjnw$62lP>y0_y`F3GspqmORic-O+ivv;gY28+N4ipL9?+bwk#0mDCmpN z{}e(Z_h~8Og9{+1kAra|T*nS1w|*^S9T#4T*CzOWq4eCjJA%GQrN9}^3|yFW(A}hP zgRU$H>NRx44Jbz>V)U*!sT+z7X5lyrC%mvuDM}qDbq?@BM&FQ4V>F;sn#f4U{LPY= z{bz6xJc1W2^L2Wrc-nT~4f_-UyX{Ke(@<^=T`xn|o*7E?CRPL$t!J&@dnIwAfnw=* zp$K=nG*bFOOs`lrQgQ{_z5LqbG&j7Y$F6?^Wzn$KDIbbIGNZFa=)g+D>ep8WF94gI z_U)hKOMH_HKAihVv3cU8Aq7ivd4z*wWBE?D(3mw2J_Z999|Mh?oZsAAlcPYJ% z$#>sYW9>1@uo*;Mi(Dl9P8Zh`C3H9}GIa${Y-}qECGPiWW0~d}ifyDrD!g6p; zrmPt}fp>(YX~hK#GrIRtj8oCEQHK^75<8??)HQFd-vKvz64X~&clpV-kJCy61jx+M zZL$rNy&|eG3}+^{9z@joa5j{roI%!T0RPe+3M1rZVkXN&o^mY2a(Xor@Vh-k&@?u5 z0n`gw7qT)eJo*=zx>fNqon-L5ihkUg9oEJ6BI^U~(f?2cSCHxFU~@#WO{@?goG32R zHp+uj$V-?Z6L=YeF1RN9J^lLNz{t;B{6Z)GQD7x&=OzVe@{nXK^Fk0R4}@_+JO3;o z%zKJ?d3%T?>g%XpWy&>tJ|VBua^I826@#sd2&jx%x$$ke%%b5^>HFN z9Oi`pi1QVJmu(fN24wv$O90xg7;gpz9`2`>FjN9@+a(4JiP6~z%z1B?-g-7aN2-GI zVOBHv_e-}q;q$FPE;(;*q^G}3{zY4i4S3?Dx`~Kxdx@5GJ6v|C4Rn&kFzDSpTqQcU zn5$m$Y$r#x1xs4?;|S@`{zlXl8UurkH=iO%{E8VA|gI&M%>bx&qZzcv}lybVqB zftH&hb|x$vV=S?iC&^JQt{n26)j7`r4RN=n05iGvi4DE&W zc6`aPKHJUnc}j%C2HKc0fX;${Ry?@}pqWSA!3dz^<8Zs_eoz>zU)g+3IHo^UE=qo2 z_~JsA16a1xlDVo4{qxfZN0g)}sNV`m$ZHM7F#QPFC_7voaj%O2?y;_?jyc5;3;dMx zE^93oNTaDL8=Hao886%m=MZ7>P9vm;h-W-Om+6G@j_TKL$1mC58&BZKMuNjwEi*(4 zE94x%6Y-_sz4taDS?gPiwHs^TI5(B8`x;%->)!FM15RAMf|ZdVl|33v8)<5;`HlV+ zT}>DP@veamPl1ii?5ZBM@z!->bK0TJzUCU2Nl+ijZ^_Tf)8_)j(`|Y*;(HC;C>1#(w3Iz#KjjgRx}|dCQw@P6PpcDG3b%gsGDx>w z&5}U-S>@6a6n@FiT(e(^9#3w%`^iawDbqs|SDPag{#>;k9-cCx&xW|r~|tC_j(4GxWA`1pP5Cru#wb_=xS-4f_WY~*%$gdqPmAvlh(T`=0A(JmnXPS!wTY0+@k5R>LcASOzAOp&RNxw`_c}E`ZE+ zy^Dti57hpRsEoeGP2Wrx>8r`&;8{LDcAj{^NsG{fkDiNix&CQ`eg*yecW#1rUL6G& zmvAi%Lx1}d1xtg$3d!JI1xFONRG$2NW}4o(!l}4NUdZM`H|ZoQ3&5^euc9wI2Hnzz zQPL*>|KR|MzO^6X9?#Ev4TU?F>`E_}TwSOnFdg9=Rhh3$;a4pbD-*j9pUXInT8^xw zYHQds60g*9`5~uDL1A=bR;6$huRbBn#E=0;e9T<`G}UQCn(P-sij(q0rxLBPvaD3U z0+D>Dq+VwGY%?{Mb*fHQkS=LhI0|lZK}t%5OX%8QLqxT$N$hZM#}v!ym&3O@Dbc}N zdk7y8t~=_ZOr)MR&~USH#e73(H!Svx#oK+xwJ2sHQ1{IUJhOTQV$e2u?h=R9510mSVnOZcfWrp&6vX4_fapXN27X66@F6Or_h1~^Qbyrz+SKP&2cA? z=SA}}r2xqST+-Z*F%7?>Z1dRTK;zYi??Ao=;9)TOVHhK$`P5zIr`;zlJME|~z;<04 znlx6VA|Z|TmW@vX`xJ}RUJ-{l+hOr<5OuXTMt z+VYMHTxK!n_gd_Ayex@q9PqShW;Ju9IO@97|&t+{p8kB7Qk5uXHOwIHsx;s zg6ZQ~RIpi)b^wI73dptHXOyqj486YYR1;pc7cn!7eyu05~#4UNJ& z{A>)$dvuy zeOIxhbgLT*UiF*3Jlg(5N~xw~X%1r9($X?9Mp97wmY-U(FvYtB3#;-awjP{flxtk?TIcW0j#5vHN;4PW=>zq`Z3i!-g|f!r}N4SDsor=fc!-%GLM z*nDM9Mo^9X z?ObPn6GulCe7QwU01qRvnH~8?1X62T^TMnlIKY2&e8&dujH9dI&=>7`OKqrVwlIvP zhVi}(OCu}qe;7>l?R(y$>hPsRKrapY%5Yw!88&ZWBlm3ce* zYep%wrbGKoCe$9;tr5>W8~aUh1Ig6*|FHA{Kq97H41ff{WW1^lT>|Uf(}UX`t)8L* z$z?Ol4*5N|aP2UO;N0X4*XX4Cbd9>EO&r0X)K9Ii&yLfrqkyxLwB;DAb4wV z>x0%L{(AvntN*0SEy~#f6?$k*@AOP&w|wZrj@cV14?p2|bjmLdnQmx79xtu ztKUAWu&p|~O*I%$lRf%Qe4|mpV+^&*y zS9G{i0nZs@b{?97ffV*IHLheyoHVb9?zlY$Imk_w>K_eDK;unBqLS;pcp%mHAMoIk zoXD^yYAz;)&EaEPEV4Rw3LlG&*Ss8aj=Ka*C26r{Pqx>xGYT0h*u)s*MO%B#`|wq; zMq{*S5%*llwU@gNF+bO)P^so4H0eK0P^Leig=HCEaSr?=ms)d%EV|FR>;mTVwn1bt zQ)Ah{p!lXp0i=z^YX}@EMfr)4ofpFyXr|uwqbsHx7$2Q5j8TP1XMdXL!2p~?w!zRo z?|HHR_U}R&+}h;DE7Fj_dCAEvlPwW~62cp%6Dl;ED%JGth{z4=I&vi?sJ|MnX7DRcp^|uir&(=)&iLQsu z!n;6We19pVaAltKkv4z~XQFJ)?iM^UKwcQO(n2%`jZGTqx2tbWyww}!G|J;7q_G5A zirREcQ-r^NApM#8n=&59Wu71C`a;eI)Y>N4>Nc|}8_AUMXSpOzT8NUA&g`b!KbT(C zb3EvqQblsKyvdtm8~zHI{NSo6D`W%d)K3)1`iI+U3|s$Zz?1i%lSIT?`0n?(Oq z#^wA$nrM&%db8|(W~X%mG5AQIh8L+2yX9j+0BCBH?KIh-U2}pkhF!Yr0T7FiCV1XQ6`Yx^4J&#+oC9)RXzd-y0gMs) zz1UnJOd5g{u9~1Lzvp){y`VQLx{kznX{ysQ>_1 zX7pPH1`yh_5by%lv%RgIFd|EB%((95P=TodAOg^tV+`ie4Ya5Vg7WgW2Tq+pa06H1 zyn!#NQ=VX~rv!hL9N%Yyzy1d-BF6;Mb4Y=9K04NT=Q9>+pqXk)Vnrk#&r-RG?ehqkp|o#jG- zl`|s>8a$~CRRJiwMJ{a92znlnN$?i#8~Z2*Fzo%n-gR=;Ghwbc=~k2RHd34sRTyb} zn2DBJE_veDmwXKq>Q0A(QGN5crhDJIbHbpO+AfldJb>8<$f1Ckz>c&&?y&w7Lynb1 zcHu|T*7NRitxEORk)Wk!<}8G9nLFsa5i(R^VD2FfF*1M9xB^HCfxo~uOA5r!01nyw z>4IQMyFg0^79G?}AOmRR*XXqX5^vlCL^cC@egT949nPp#Q0Fh(0}GL<5n9s& zAKCpDn~(5a1j-xa=8K=`39{>E=+{-%U9?MWvuby)#IUDA5LH7 zm51=Lg`@5tXrGY;JCS~k zY;PIFkMMP6i(JY!G_@n%3ac*PB9|1{%m;I)k)i>!+Dag12}DBPb`=p4$o30`9kkJW z*WbFRgnfqXE6yL6f1@&6`HjSO*zfbAgWP(Khtgvu(}jmNmX2dqf_@537fSct<_!~> zdb$7gqn!|9qck!~AL-xoz^eAbaY@A9W; zp#QSxQ<)QJXp|U~$XFJ3or;I5puSq;M<&sruD3}GISe2;aPjX9@KVUCZ1Bj7U&iY% ziguWa1v51rP~2uuIM~A4r~XS*%jzC8fB*AJD~gKh6)=;g@oz|?+ANO&UmTuGGxwff zGFTKFjmhgQmQC+c(X}YyzZ*liwx$|8)|+oa7-Nxd$?pR(v{cdhq{Ivnu|G z30GV%TX{rUmv&FD7yjpVSR41I?p-N?4|K<f<(qxj1(h1W{WR`T{;BmtyJT z+r(iC?ngaBhy`bl3Y+j9(rEHC-1j*Q2EBcI%AqoTTu0FQMRho&p4xnn7Xcr+2Qro; z!FgY^aa#R4tUPn)r60!<-Rmw$iFd&(G zs+4iFK)FL-I-y|nTs}NG*5J6pj?Md6j6%)RDcZJax$XV7UY5B&$g&bx5uCRGyB@uC zj164j7u)_V0Y$1xkJUQS?E-(^8WExi;>ws|uS7aQ%BAQC&~w767E_gCh7&6}Ynz;p zgPJq2w*@t5xD1*ga@2X@#TwU&Xvd;^pg1pP{Q)@IS;l<5BrrXDk9S5^%W_{{mR}-? zmr!Lds4FO}pKfY8h;(`PFSz^VeFl=WZqc?eVh}^6RAY*VZG+-4J^<>z;hG8WJ1>O9 zDX8|AFT}?ZRB|3dXtc@oAzRF$PnA%aU~p?`>=K_IwJ^H6(ocVqD>L+h6kP0z7G@x# z-r>BZ5oB-HUaJ@d{E{476tx+>NM*`ARle~f_bo>CGfWXCeNzopkO{J>1&&(1a0{~# z7`FEfa8sB6%dt*Jy|K^G_F+B7neJx3dyk1V?GFUQ$>F2oA2%O6T2pgF>BeH+$qKV zajospA)f-32iR!Hq?&^Y9KN7f)SGan_wN2B#96@=(vzij*4TL}O|zaXRT1f}iVJcH82w8&et8xgwv{NIjV%4eY(N)a8gZ%uE#JxGL zLPP&d^D%&NlHx`9CB0;qJB!0svVLN6fGoj1waQ^)`_ITzqk*mNHVIGRVaEx)3&sDg zVm|hrJX=EEGleA##fqO^ab7})Jbm0Q8N2ww*128>Ecf`%HIT^l;-n}>(pv=oUyHD2YzV175#!%$7nYdm;9r!;l8$){jHwrkP*}!=VMMu?9`hwW(BDe^G9{8x(u9phl3 z1*kb=$vAJ!j(@a=iP%n5F(^YMEvn)(5OQTr&*xH>wL)1uhFcs+F5bn za3vHMclr*^`MfJq+gW~vx{IZ9gc&L{OF#98cnyvBDs}KGemTAVf>yd&f z$N85{MZ5bbYE7Qt#b02V1`R)KhzX%XGy@+`1N2ON8PWEWtxm%|+}ttX=WMi{Zo`)< z2FU4u(=gTdPPx?z%@ap|X2Okk@2!rh>5AT3E6v;ZY@)&`OP*!b&vL*PCePI^REp72 zUBih;bA!>`aFo=i5T0ZFC{<1uM-Mp>D%?TbO>y21?x$GT+gv#jEdu%;Y6jJ00*qgCiGSrEhc+xhp+jhI7kj zc{BUoegPJdY_mu}hM(?594EQ5#dvMV!83b`B~gD{jXN3E=zcwjErjL-y^`((ht`1&vjHqo4E+Ix?MS@dv7W&IOr406ruwX7BMC z0xQN()o}TmTNDwSEm-gcTdv3539v=+N7l9@E;WaE{2L(zNaH0B^9}>vC_T;?dJm+o zdYnb|FF6Vn*fhB*hm9vj5JqxFnQNFY*=Wx&Cz_gM*|{_~fX?~$n8+L#+F?3+T>J{- z;4lt`v`yI+_5uDwPAWdd74||BjjB;>Uu_cnVT4${;JFn?DIjI^Iny2q?Bt|e4B}qJ z?U$>ww&}3p|4uqC+KYOABnJ1*+^!p_6>%q$-J$j5d^p_+lGsUPaVTaqq(X&R6^3A} zZ|iy+2$v!H2`4?DYneaqF@t4;xzM8hJ9v8)d6r4f^*)IS>+uB&^LRs{wyGjZ zDEED>gn68Z)PBJ4k^YRDg_EI3UXVXND=UGn0f_y%$5NEUkOOi>u*^fqk==8HSDT)A z=(%Y1*;cElSP2GlD&5`qg63CcSKo(T`G0qKIi=cRv(a;!E*7gYh?G_?WdRL&qo#sU z|HTN$RJPDt5W&q$v4Rw7zFzhjU{hC195kXFB)>psZ{w=<-qF!XJG*uwj7fX>KucM> z;>!jj=wOkx(0&iN96yFOmwq8jJ{2do4;PQ5BZk+r2naRFU=w2B2!=z&YH}IIOb`CY zGp7$M{c6~Gq(+e)oD4)MszM8bfi}73T>n`fN z7@}@`gAhm-ya_)IE5_D9=!QDXMtm&?caQAZ;{KQwis0MZWw(^&@5M0-FrZIH{m2F& zN_~%Hd{BFwF!UcuWb34G5CAS+zvKUtPZ;5pjX%c3&27~#2ypi^%!(QsN~8I z%LivZ7t22Y6`!lC8l`i~!<4g9nJ?28MjW$Wu*QL(cLohuyE{E3)$b{*?-4IB(*UD<#z2KgI#~?smxlP`bLDd(3 z28)RkJ%)Eqpl?crzaTYE@orUy<%w#+@)OdJTjh~V!i}t0o_i?-4J2`f`XlJ_@$6A& zD5~RC8+PJ(j^ckA!ViMJYAcCg>}|ehyA{8)!DdA|S41boel7Y4xotNs5-_vDtkoN_EMN+GaGc?XzNgPzpPz}RUg^$z6H;uUt*v)%&l?GsDtDn75cb z5tU|l+(I$GGWO0Jv5D0qh$INY(Ab~9a(TxHNEZYWFRB=wN(bzMvJy@(t!@qrhgARrUOgfw>Vs>l4P<+;LKx|!VRW{2>1fx}z5){|v5qkL#z z`Wi+oUvMM4roQslw(*I1I8&^`y*oNQ716LFm~fIuQu#k5w44Lqb;y0q<$QV+=eRN9 zoB+@ zJ+_sXL4s5|2OLu(Ahe!~vMGJ^f7>!DMp4iJ0000Z@dxWmKn_Fs*wfP?)-14fv@)cT z)!!L?+O6PhKbGMR>Pm(8(~U_HP^3%B#dD)sz4cucc`3VL^9i%EL%A2-dG82`>cqz5 zITKTzmC;9p+_;h31T8c==o7&;ipKGY0>}pU8IeGY_*I_AbakhQBo=z`%wU80gOZ%D zF^@zJgJjWs<9r`qXfW;;n|Vwi;WCsTG*N3z#3NdA_cR9;*<~yvS{xi#5_L@WUVm1lkTm-R-@7;q(Kmhf)U%jFaTWWn0o>9V2Od$S?XtnjE;~04Ha$=EC}H zT#QSUHOJw&UKFG+wI?5fi$JHnCM%1|{5z)iU zOVmPe5XS8WJ5*HWj1W4;=C^E#lav%}~EfDzHo#U&eM z8vKm1c3;dYz)>2oH-59nFYXJ_uo(wb9s)6pRZ{N)7-S>PM3s&&)^rZ;0s^|28>>LY zCxBn=>lve5_zEmh6!~x5(vg3jn!b@wHWjSh=Deg&Zm|3Cl6EI6zUi6BPWCjP?WhS5 znCKteeqCDJ{zwwDv#NJRi)LIl#mqEoUrnM~-CmtBET)QlrJ4S6?U3yt>kneipE07a z7jXx^GM|MV{cWEK@6B+ix9%)3A z)y=fy)8mHr(zRw_00U~=TMJ{lILJU*yc*@hBeqhWNH#WDW-Xuqi1N*vCcW#QjETtg z(tcQMa2(Wo4Y*sL9FPzv-qQXJ-)Dod2Iq!DtBr#C)|TZ2;|LRN=xOK#Et!w)D@mA< z8uz*0cp7(z?xkxw#uD{10z)2yq1L?fKm(^bP95~aZMHJa5QtDM%MeV`&ZH$$ev{zi zLbhl!m6(5L&aAx6I7~MzT}p|4 zot@;0!pQ)-wod}=@)$K#<@P*mNsuPXWpeo|)_u5<89HpF96qzuK6<7$Jy1=JU~R1|5+C_Xd%atr64h3yD1f5@VG{*j(TIw%d$o zpmvi#{JW(ON7zhma@aw@Qh@6E3@swVRUyve#hvBaUX_B9?W+bz-0T?W>`jK#gIYgA zXN1Eqw^BKrvJwM+$qWcyVb=JP;X^Ih*Flb!zH|+POp1?dxIX6Uornki_EWOi>{^Y_ zr@(^sUTxTg)@te8=AB{#%p*({a>v~Yvx4|KM6^0Ao?9Q7WH((oL4&>%EiEl&({?=a zn&sx}%xA!$A_~?Im9BBvT$=!Jci?NwSN2Pkqp>=Fj;uNp+k;_V9e{MNo=S(7Qsl+$EYchQp|~6-o&mX0wTDX?`3d3s$2Z^e+u}! z+F{K|jT>QVgi~~9j905+*RZyL^Pq@39Y6Z7vL{5h^2l1b)D^w>;=df;X_Grs8{yuZ#7? zm){{$YG(tZP7(oSkKi)a*u0Sc0iCQ;DfkI-;5rq*w(&qateJl{!Y_}Ch}D6qf?d5; zKIkO|%EZ%+pfx;^wmM3U%Dq2>p211_iIR}vb>J=W?_O9&k8d2NJ+Z8=aBtU4D|RqD z-q1bnFRA;Yo3GcIt-zirTZ>6Ks+88i`67JNaxXou-507Rz*)pKdcQpF66P}7U0M4k z+v22EbeaC*6d$WF1~OIV1b;EY+OfLB7>Busidj|$x;OC2WuRw?o zx1fB=mnzjLLLp}btrsxo>=a8 zdr)wpjFyIY4I`SFl5rXb@;OKiqf`P78?vG0T|4FwqZo+!C&kxp@FexX5$Qfrb zqzHE2&%_}y6Gt^piz@>PlSv$$)T&qwmXacCU3qmSZn+9bfqCahkM|U#l((@xilIm~ zfq1o{qll{EM6MWOAuH?K2XA}k#DNbR3b_P#ONTQ<;vLsuvpBVgP}{&GQv-2*H|kmv z8P+PR0jzkpB?oJlS45uE6&KQBDusQ6HSaw_zGo;m;PZi#-uL=bzx52M{e+{+`Z^2w zjYd41>X3@4i|-hI96JX2h7OzI%O< zHib>nRpzzhynP|VvI;Kd8sdf0*y(b&S=9&!K^|c8DKg|i@^c!J6xOXs@2tIs-u#G| z5XADpq>D(K{ypJgDT3;@Eq1gm3vsA4W=gB0{Mkut9JKV-G4x-e;mi<_rnu(!$GP&P zExMokk>%cAQIgD2uj`khHef4KaW}Ih?-q!Y^#-GE0mKqf6@5|U>gJoSSOda2Y0ZRh};MF8UjMRb032Q-H^WPK|m`a`M1B}+E?R9W^TskQjr zM@h;7Ua4lUW1is(!wTwu;}H?==CFKaG6DX_hb*^F?w#Qvha-rqRzhD;Z*NzKLEA!; zo3|xUNQm!w%V2RCpg>D42=(hZni^`joL-{H&krn_3bJ)~%}6ZP>~7t|&%DdF5Nh!G zqd{yb{=xhz$z~q;&k))a^;4%wUe|*Sant!+$Q<8|e>}dpa!$R7zo{jMdq|otS^P*@ z!fo3Uo+caU?xYp57;`|(c7F(F1^AL`HH&+tm#cocDS*V4k_i`gS9i=B3Z6cnphZcp z6zC}-Ng$HmJbTEr6j;f6uS*%|6#?+h^qg+!Aw2&!ihmOq>eVEQT^MYLMY#CjQ zmTw}Q5(?G&!H4jR96XVwn8nRH6cgJ`kkUVWh;BD%u;4$0w}#4x7b~&l4_b36G1+g8 zH;O$|=4H(^de#e0)pr!^nF5eSCvqZvPF|CAaSMCd$x5k~07#}mkG1}<@7y|rjUhEP z@?wJ$4tx+B`~n~->`+R5OWaS&3e0V`pc9WZqs_A?QH@Gnxl-Y5Au z62#mPS!u9(-E{#INOu8jwYi?R-jdEGzcz}Of}f5!#_KF}|7lg`M23)sk%>er8&)d% zIavnsahilD*mVyg6rZ!=S)9E}`K@8quDLvYH0q0tqsHlnk)E+Xy0B$j+}L3PH2Y=9 zX;*&EmZ$=3iZXPwe5l-rt+UYNcuWcOk zsiT#w>2~^91meFX(HHwJ#-g@MjyXl~6w=_9ws7l23j%#VB-y@UC47g`+fFD_vB4eB zvDwp@7@53L5XYgJn@(OB0?4~HYoh@+kFYU18j0)xs2$PDvFYHRKOY$qDFPs}={eAq zd~t+|0LCZ*R0^C~$mp1iGoSAYZLcuHmrt%Ouoq^M>H0Z;`a~C zrEZAPBd#jQT($9}GgNFumq7j%Ql^O^$80B-$W!Uxt1Gdr3U${udlmiUBo;BOepiOP z9Iax`0#A|7j|7`X&#yAE6u&Mz2Hk~cEJATXr#AUw5f7u?PzfWhk(P=ck1LDAiRPnz z$4JHTr9AZdOj5xN`5LWm=BNoMa({k`N@8k8@sd*eHa?PgF>fJZH*sC{u-$N=e2uC} zZ=Rc2Z);eR_1n4DJ1Eys3D>M05B+mB2sy|@k~;i{ZBNd@K{at(}%Pjbn>a50InXMbVUPqK7SqoV~&`2`w$_m1@AJ zYy`g9Bh3^ZtKB~jj-^j-%hZK#2N|cYdoGRyQ-_HUCZjt}RO`;jjem!T98}xn%=IBQ zee=E*t(KByd!;~KdW-f@&wJ(Mnqt^=eL*M(h0RI|Pw{;r9%1`Baycx`yk~W+6S?gV zpA@h{6k$Nw0ra_Ng2)SwwX>T6$bM1EkI;1-0=`Ien5 zxi-mB^YlB-9&~$Jw1ZtqByf>}prS&E@@h&ed{NVLi3b8IEuQj+pBWK!~I@2&Zh7Pa*lI2KVMjqeOM@u@m#Mrd8@?Tag^ayLf z7I*IvZB;{YlTcJ$Mijc%tp{)ZJTC;=m>}0_u8j;XYpg(b&5S=x!-i?qCvn8CrlvJz z`CT8)K)QsE&r|MctkaQ96(O-pcMi%)vywq<+Ry_O3&*DWP~0QgLc3NN0^co#Ha8v!kHJ>aSq9TN>SaTB%3uk=6n-jj5DxwsE7X8z0aJNH=z#LY3uQ3gy0-+X zMTR!C`OrY*=qO;k?-s>!ur7~E<+>8QALXySeABXHt?t~8(0Sx7xQ)H@Z97`H->&!? z|2%y#yC*mV+<#7Zt2|%+rAK-%j zhS#sB7+?d~y%Yr40syVYPquHq0Kk>+@+ zJ^>|Hz<~4L6EBc}>?Q#Gm-(mkyXnjJ_4XPc=ZE71!UbT1e;kndbqW~yRBi2v{Mdfx zztlbOT?9A*F24r$V&CO&wgkNqUUgpy_Iu_$@jfgs2zL3K0WpBsFO27!tLsa^9$@Ti z!`I=#_kL!Je-04++3@vx2lI+>k8lH6=b!Q&`8s~TdY5 z|36eig*jHE(c}I#6ot-!H)${og9(4cXb2t!>4;euFbvujrPgN%yfI9pSLb(Sh|Zwa z>&h63`PT>XF{u8Zu9`4P$#Vq_O@|wQOwjC^y$nXo9T7UVm{9j`si?L|{Q`Rk}*p3OIUM_`}IgZY1yqw>fgJ^Ti zvT8h7$VpfDBn8vX%QV4%L2c!048hwC{dOU2`&UT!n&ELK|Mk#^BI^=huoxoQ)8fsC zmB;sATSxyD9*>~O8cg;iy{#XC?qYChXpoLsPfOK>LU{Vl|I~T%VMY4H@(hUU_Yk<)!XCaGbmN0byptI%f}PPsn8CcD4&ugMmy)2jb3B0l?J}`r6j5S#%nXE$ z8H9YXEJ))(QHtJ^?nJL-p;c#*jx<#?GA4mZGHCbQD(sz19hqNReQ}AU-tg9O1l}QQ z$}kKwuS>fR<-2^KHrhVKT@=nZgDpj0XTd*)W|@M)qb*S?Y{=@#2p?siJ|~RMqz;aX z>YNmL5h%{d&utpp;rZ)4%4Q43t2r+@wUB9y;~i{1f|TPlp3?3(uhgAVNFwNSS=g=Y z7cMjC3w7Q&Av<}~$g&q-WtI-gMz*eu)x&i_DCh+*c-rc-ewcMV`ngHTP zW4Tu+ZI9f@qtgjK{SK@+%3>aI-iA?&nf@~g1&e{ZH2+5h0V%+g<(N#gj}O=pG$eOn za--2~SI8&}{XXOY^gD?>KT|c0{m`y*a*8e`*>6liSNJG*Og|Nf!uo2}76Srq?!PI1Gg%PwRNFp^T{1JoH{6TInUuZfZ_S z1!C4L!(={f`%UP+v@KY+rLz*u2J8OpHhqSsdG$X6Mcb9%(v3r~4@toR5OU!f`DWwB z0ToWs1Gg{5lqQP^M;eU@{a^WLK=huu2W&`V)(ew}a`aqE`ltSedqndj`I*+TN0MS@ zu{U!C!G6`CJD}g3wxZkYr_RptQiGI|Gw$~ClLnQo64gD#d%X28_AvT-Vl|uNC9yG| zA{}DapWaPa`~Um8$(ewH7ExOYKBbK}! zsWkrAjr~^}2@ED=nP#48VUisd`+w;L7)E;ButWA}3GlVRny7RCr}q3^hV~K#ofA3e z4HOzGp|X6H&btY^LX2!-3lP|2IHDi+%LHhq)L~mu?$65uN*}-m2ra(UQmjb zR5?)L37G$scoeSsMDl=euyBm1mGQycH)gv92v;!cSbH3^-xyfZ=CS>?6WNMHP^Qx1~fER~PKo#2*Hm~PeL z%Z9_3KlZa)f#1$5VNtaYjCXQsQV?RG3?u43Uc5hN!#zFs>?gT?Tv&Bf=n=FBUS6}0EZNBG zqX%bJe@>Te@D^_bd>sk*NCxgzkqhPlWqf`)`KM0wzF1A@Z6(%10iFv2nY$ zm{CIc%SnXB^TEs3KTbO|+%)|8<&nNsYu3jDFZ_8Z{{dn#NKwMnp4pfr!)fFA;(`># zNU$vl568!=tL@GDL~253O&iCE;sh=fal@fgi@2o_yz=tHz2jJak&3>;Vgv(6rO*0f zmzS0cWgg7qGLy!YCP*^lcCrXm4JLV2_;$&)*#267loX#fPz8>$rD;<{#wyO(%k2?k zR$^2JQSSl$>|WA22mf_;>0CDUf?ACT#Sofx0}tMggx z3@0Qpy)_ac6RUO6hp>LNAf=G@+Amy8_%lh^UaUK)C+H8gK?vXA)s|9VT6~2&XNAb> zAa7H(y2|#s5*6@`DE0p8>&$*}jubBIsI-yV^?@h=;^;Y>L09tZWHe{cSGb6l$QCAz zggmWwjaFVKhIn!3K@TDxgnArr=20EvT)Ad3@3=t86 zb!ob2Qg?EzD5tizRRUK=`HvFXDFD`7yu}${unfh3;G%=?;(c|*->Vn9zP`!KZvWtz zpYtmy%L{5&IblLc@~gqYucx^LJ~tHY9F()RLRmwSk#*V{V3H4ikBi z)E@9yTYY+c2kT-#7APwnuCjk2+`nip@io}l4`+w;OPZhYDlVUXa_zV8feJ7(LCiC? zuG?ylTkQ5D?rl%zwvW_183*7G=E;+7R(0@pO`WrdYNyYK*w>t3x3!?m-dUosVdTo{ zRu(fEtShYQ{AoI_a!C^exIM0RpQv4`Yt+Nm3}u8IYBiiQD_ay8d9EQIR#9Bjh5Ij( z1l6WCJUEDmVYF&{m*%jEiTt{Y%O`VC+5Zq9GO7J#+D_lUEFZB|;j?d{*|GkT3~(2c zt&0DJs|FtRCVxu`P3VL=L-2_D0?cZnVc;KDYLHR^SwDFH6YsfhmojPK1GF9SQe%O% zPwSSW;JK%wKj2lLcce6hMbN)Ly6f6x-`0K2ZaKNz8wUUBcyLAG-k-`p(ug!&-^1njK{O8<{e|^0_E|3JA%rAy6=9R+ zZw%)cBnNA7w~C`Jq+-Pm%C8JE)J?{xI)+MjEa2o8{4u6xZYMxW0dQ<)fA`;imn2=M zUXy4OWoI9riVnHt1#4P`oIg2~tuYNUCL4ZQ=SZbdA+z(P%C8#c5u$;&m|a&^9X6*BU#Hnar&w9NqOEdXR;*4~UV z{^69&ju1LJxmTU*3+PmsXVms}?Eq&DMAM&$zr!$sdY(nM;ZlIq;eLvgP9sR9-T*LO zx;)Fy)$*rUqq{=WpSqVoN=PD|cEkI99VYR=bHX6?%+_bTm61GA%KTH2{sb{UFd3c@ zW@h)T5)XT`@!Tv#a&u-PH`GjIsc99Sxx%xc=rB5wA<&a;K1b_NDeBau zh^)EHt-@oJ{3l{o6tbv4Fz;>fVsTs0K;8MVSF>kMu)qs#PS0Cze;^aeCT?H^G8S4H zcuBtC>8fti`W1wj95T@npAqlf{u4_Uw{s-fvHoJ0zp)j~A0cPx_K(C>{#)X@U`77d zp~!lMZ&80VF`CU0eIz&oq$97-H;Mmbv;v6rZJ1Y>@Atbc@hxEURo^6%7o=K0#8`y- z&j~V?vmp*h1wsTs#fvvk{xwaB=Jj3QIVqwWv9^8WW0}7qcF<6Sh8`{VAL>nh42#7E z3WkdFmV}fI947N3wM8p`fm;z}#jS0nf_P0QruP@QN2EV5T;x8>Rx^P_B*EWQOPhoD zCC~ItC^1La%Kj;=-x_j#{e`B-?;J*2Yb!KaIY4ZH(7fhexMA8A$ZC!z|B-!4OM3Rs z%p>;VS2=*@z;|s0k=I1`qs;(h?`bE@qCzOYp&XZ6vQnIvzRC~DHs@Zuw40XGe4PluxW6vIuL)Kpn!{3wP&*}eNstdLR<}lGN z>Vow-O5gsb7Nfn37pLBJMRn zLk!xQjJTcG0PUni(NSvu9~AZd6AdgL|o zFVFM&{;dL=Hn9*25T&MGA`_a~-{oLbu-{IMg41a4YS0BTQ7o_Nqw%gFAjCTs|F8r7 zRKSmxAlOhQZ$kv+bwc`#;CtY#_*?7B0g!oHkl&ck4qg;R#(}-y84f4VOv*-QpYCy! zx-dg_yDq_kB>&X@>#${%fw@QAS~dIhqB3FX@7E~6lzb%ifluw@-&&%*;Hd|VqqnX0 z)cz`6TN!Yr1t5Vl_Sb<1xj18mcrkcT2G*b46}zDrgL{&6S+nb))~XUM4pEYyR3)su zwW={KB{v13E(Wz_^5YB&hh+z3*au<4S}z|1_ARXP7gRVJgD_#BZ_skv*tD(ZfE2Xj(P{xM=Yb$uieNEPuPD6!k zJ4>;nT{Tr$bfPnD^#5)#{pDLUD_eK4G=9Jg-~C=X{K?)L1yUb0nq59FT*$p(a3PEN zR-Fl%4KqqQKl+ymaI3L)4s?4{DWT+tp_ovF;E%IE@^O))D6WPxPLm$O=bz?jovjpd zX=%ebi%4?2N@0ChgO#^vV~<(H>y=Vni-xIB<%iBkEQVkbT&W2{MbTQA9=E%hjw}rn zzZ{Fhkg*FJ5WV znk}K1SFQMs04>b3Av{^PPUs>n0Z7n1;@YIJ9DcTz_Ou zc4c%YKGX25paJ?69?N51m4A57X!@$$Y+d#FpQS!?R3*ta%QSXW2h>SYZyu7XNokTB z#*3>QTrQv>PGiVF{&Yo0A)VY$ad{-labkN%_fo)CeGm4J)xq_#e~4JD*E|Med<{R0 z+4!pj#KQv)WKymN2dk%A-MW@Nar?VgV-*?MN8|<}b0sKt=-DuMRP`;pkUV4lZn0Wa zEOlR?yJ)pY!8z5^?)P?#F2<*cOyp)=sP}`09uHA^$i+dN!IKxb0Rm$}*}0fPjY#>) zNp>zQ$Ng>)&HwaYO-vYuCxT0JoDAB=;Bk+mc2$n%FsX=ilceJ_=N857Ub$mjoR0xO z%sd`avVp-HLGphsGBOXoFIF>dr_T`@SNU2(a6UWP4LGIx=54CG5=5!Yyca<1RTI#B zyu6%OA#GbNX}3(@T;t$^cb{!y4bv_3ZqZr(@T%UVWIl9cprtyACsqLf7KUO_8V9MD z|Hk#6Yofh?6U*&X+C}yg;U+s}9`|k>TcPmG< zH7>vQJ7wq4svZUNK-K0-l&ApFuL+I)hk5IPFOwS4#BmGFshP+WL-a3~dR66CJN_@1 znr^iBPYeGGIZq*fd|P7=6O`IOiLyth+N@?Orbzbsy!>x3=YI>f??YEd>TA|1%tbFb z2M{xhmbrV+jx-m|6Ik;f@^_%7XRNdKo^2^En#NJv|5u0a|J(QbpR?c?&TmI(-(6W1 zebAy zb1G!in!_>RMTaHC-t~ItF_f_9rm(I%7^Q08w3n-3k1voD@t%JPA4uxt9fMx#&b+$D zJ7WGQhWb>zb9Z%}^;wc$090Y@jXG^Em}wLlqBUG0QkEN!IH!nj(8 zE=u`@%&V7Lje`B~eeRsXAW}qTzbsoLm)$C&jFvLRB-8L?ni`W0Lu_@L)5!@x&2YB*% zmcOj-vFvh#mgJ95iHS|!G?xKOL~PGN2q*?Ab?0yKg*x`|_?!=4{TpG?T*Uf_QzSIC zHWnli+C>WDhg=4_QqI6Qk@?`^g`Rjgq~~>|rIzUgykMErIkwx}*vXjns327l3+>BMi~zE2MbFMZcm zp`wIo7GBUav0wXf-Reg1;4dr7V|-rs-W`AP?50t@~ZTa>xJIGwX^mqShzF@1}JL>R>tVOsqLv(rkLD?dOpq=1nzC2vwuP@J&if4K7Loc9MYY@nm60j^@2 zGX!HPM0m$Th#HS$438SsPyg-ikar8TL z#7`;`vDS^jP0Kun>>Qxl$o`Sk7}ypbY)DaUTJc&vp67BFr1mM}gc%3@DGQrE>Ap<~ zTjmLKR>55~BmIKhc88r_kV&Y8VS7{)U2mJsYGQ{&w~m6Iy9hy5mVvk_MhZvmTggo! zGUo$v6jX0nDltWiNWWmE?S#xOQp(cxBsO=%fAm9x$nZ*4!M5V)QzSNs0G`$0Pq$Z8 zrdW49P8o>n!~{fn2cA}QF9`G}&U@_LmWw?V4l#80yN#+;JnnbiiPWV7*zo{~ibzz` z?>^si+K%H-Pj4}N>?UR^)6oEk>@#gtojdyw90<^pyk~& z6m7|_{UCRu;fXWYxXjBKleG{oRew?UUIk%0mY*y;DieH9M+T$8eqonA0dxpY*vpN6 z$7Y0>=Cp0uETa2(ZzwY{`(X8F`~`OEtH5&&W6*SCCu$X{3>sQh?N)Q^O_etQw%_+F z?}xGd#l)3+ojdpM0zr9U16=)I^{8xBPNi=4B*~Sq^CCgj!0CbpwB!92{e7)$pp-?G zYuZg&m+@*_pzBt?ZMf#0(}Dhp{1NUR7Kjy2IXM(@&czG()}4hhoy@;UzwfJ7WmA?s zOp!%7G4MIMi@MchmRvn;4+^+rExm)MQSW-ef?_#S@@x#cmdj$xa@5N@aPCx?5AUtN z;IDn75x<~~@UVJGGvs?B#lRb2A<{*{cMj;_p>yfp=9XZOU&LS}O9l3^>%jtd*|RX| zg=fC-7*-F@sg=N(oUF4B!_uR{<)Vy9PNm=IctF-?y0nJ@0^Q;~sYl`iMOI*y01Y5_XnsnX7$|-6>DSX*?s18;bt%K@6|=`NF|k zD*hh5xS_bK8sAx_nQdd@?%IL9o;90sD>FIgBtysb#!);WgS>+~K_wk*3Zcfx7a-Nh zz0s|Q2DCO8`kJF>1J{C7pLruRq)9nA&QZCTD7|aSnR-m?sRhdbYf!Cv^CQrW(z$yq zz_6b&3m@3JW&u|}$G4ebu956lI|oM0!a1L`f(_%^@YoBqfIOxw(Na|}n2-rgqZR2y z9^qrLLM<)^-o!-Vqa)!|Wkuo?j;KWEgo#*qExj1-@#Y~QQlx4~`Izn($1(nXT+^+g zh|NW5KDlP^$r-B6tuDzdMC4))GS4rSnYO4`39v(~XkRJ0O0kN1VkM8}) zr#=wn^}<+Opi9%LN95ptX1-!D4`btaq8oSkiZYv_t$S$K;ZN@Bb5($c z*tc@vX|lg-FXCtrY9K8F@8?l{HqY1W?;_}eGBqJT@=1y<*zbsda zR5rG#d>zD#HZPtFya+w=e_B6IT$f$`khoS@1ly^uFL=pQxi;=6U%#Nt^29*n2QTX} z;p-4X#?x7k0v1ViNBULNCLuk9A8`H2A{|d|!8`hV@U6qY8-bRDn-68F-B((T0c&0fZC-g9#vmT03YS55G!kuj$HI2fv7sS?E<){V`X09pXNO5-Of zjI~lt?JzePwA?IySg+#!S8aIen%E}{t5Ei-sv&n+j0OhO-f{W_=DBL`wMsZ7eJ68* zm#zYYRQr#vZ#@Jj6)N|6fRKa<)rj^3d0;I~nM*YLmhTxa@tOH08C4khK00?1h)Dr_ z`9BUI@ZMZU93uK=qR4`ew^2+4^C7r57gvi?GubU&AmtcaiwqIqoEHQ9@x~ABBH&1A z)J5p%)N}}2pm|Loele*+8*fq{Yt$-ZZ8Hs*)%IYA^R(V>PKU=&_9|g2cMM@&YkUW@ zmK5X|JNNl2WS0tG7U&8T=iI8+BQ8w-US>+(yJaCHVWYbZ|8s`UNkP-FW%9c?@TRIm z7(CieZGe4pPJ6NdfkB9&<48#IZf;}yQ2TVM30FOOH_799m2BICR` zQjJzqo2LwmP`@{UB2__IDu5>nB)%t`IHMRl;u8r<`NB{?anCGVO#v~+fyu{>{Ir?S3GLRM`#8jrt{m<8D{*z{qLnBAYY7r?s!y5r41OjO~)+@QK%?^0QL`O7Mm@aM6D>r?R1)ZLePgPkeEe5t&y)Dnx@6fZK*reBlN z3)!(QeyPk_J7n4Ko8I?y*QEdYjZ4uWdhi-SP0tG=a#`;GfRG%T5dSZL486w^Y(&Tr;~737Q+8E%<~z>n_5;5QTPPc>S*l*}>Dtt|hux#L z18!K^{8NWc$bPy2u7kSK(Dsv6bGxY!vNQ|IIJPhvl=R55=c%_&R2r$;m$egXA9*i} zJWp+?aAf=|+Akm;$^xb~g8h@;zXPGNgj{H3|!prLp0)}&K#Gqmf^ z$lMFI>q>G}%7)bDBT=QW5)EVnX7`Y+eyXpiBHaF-DD>L`Dl0nCR({e(9Yk~{!6Iqy z+lU+zg50E&>KXh5J$QMm`5S60!%~uBVZa#7;4_y#zGm)EzrTn}%J=lXM{lBbh_LvN|<3 z8|BxnWK62@KU;SpbEZMEfF7@wlp$jy`Y0Dc3Xe;+o)?{5G4iAryr*ahJi=VKJW;Sd zWB1toR*p9NTo}l6#6j@c#E*XR@kF1UsqNuqYlZP|bvGI9-1<7Ss_ZTei&!WIj;$XL zl_BEtZ6B|w)Ykm$k|H}OkKwN$%aV!QdbdhsXm@vg?z(r9wlMZ&U24#7dLr49kWt4i z2oYmTJfrJ*x~TSZN+94e zXB1-l`8-c=ctcITwD89SqdqLK0nTT2i9KWqy&wLLXH5{_tyg|COVj4++f+|S&Na8+ z$NKn$cO6+{l72LT-z zky%&CDS6ZKESi8zdEyQ7ivwDH%Z~HuNgZkbQ}AoBg3M8vx$i``g3}m|r-+Qr$S_u7 z8pEJw8TO&g2c%MEoSO^#^m){kljW(kCwfPxxWT^-T5I|L@O@Uyh?ex%U z0PfBE`8~V8T{d?6#AvgbBbn69hz%zbsl(*x=63oWr|b38w>5yBZ7$-RqA96P2yJ5z zM|oyD)+TDe&7-ux!w4y4e(~VzYz|#qW^2v$uy&eAHiSyTJFXQ(>VTQ?>~-GYld-$u z6S}+YDq%xI*JqQS2I!KC;hUi4m9c~wF%2H2qOGN{*}Oi4S0bJCuaTqNXCncv-L7YF zC|H^lA@B~3lQm{uuhSl93gj0HyS!;*XCT5+5R8%QrEnVc3km&3yo6j>E6r~M-)*%Y z1z*SqveWWBAn|8o@teqpG)7~3GfhAzIQ>o%kG%>GkWzqyA!NtnP2!8SN@|%rJU$_k zs4=Z6!*1$d^e?0t7Y(L<+gEjGUqIQ$E4L-BJzps%Xik8mm2=Km2Ah`^J`9h?yB83v zDP;Xxu=Ff@u24{t-3ObcO~G5x=S9}6+nhn?D;KDYl4}{z&+IPENEK6Ow969527wCz zrg*)Sx}FA7Ie=8X1g{*Ge`aozw-Pg}Ams8!cHTX8Nwuz8zh5%?;|mXez!LEe^nc}u zvg2fum3n?O)V?h)GAL?SfznKGh9<;A<9Cq0^Kwv-f-lU*ychCgi;Y*g)o=n7*hqc zS*n_DTXbcmQjl{{W==%=R-Q;h5v98Px|^x8RwNjjw4AK(!aa*XxHzmqY?qyX6g+Nc ze+PSVnOEdR`ow=~3i4L$W95Z7S}NCg`J3V0*h(`&Ol|Jp%{?zC+JQob6J@zIq-QCw zrG_+odL=pf=x7ku0CaKabPFPi;c_D3Qf{+3)iZAqemokWDJ~-Nx662?U+|%g!DfSb zx$hjfQq1%Dx(==n4Q=W+(4Dv*(yPJJ^!5+q3SuRED}<183PDP(Qkd*#af9KX1doie zYMg343|KE*Ex~BPa2iQrl(U#}uUD1EqISlj5{!e1)(&l3d--v9wxD5t`<9w97x^+B zheC+F9!*cBtnX7$?g)z?+>1RJ@SBhOE2fQwrTPo+%NG4}LtwyY~e8IT~N`uF!!hDU`2r9bin}P4IB{ioaldP{K zDZ!E-(uUYoCi@|>dzvkw#EJ@_b4yInpfo33c3N((qqogkBQF6JlaxBev0 zd)2K;3a{nXK)0-x!+7<2l&|1@n?986WZ%hv(|_lItNKHP4F zmNYUjBnHOwWGFtU{B2AS=G;vv(;8wM-Yz#-w}@7KV4sU;BSv3_?lz@OU?NKAr|XGF zy6f&VKNy;5+RaqTm>eg=qBAjDNBZI#_NSPYkes!nhvNFpbhX_b@I79UHsshSDT8>_ zBx1df%AgAOy$+5gC9GYekBE5wEdIpKtmte?Ke#`FRy>N5W)Lb~Z6^sb3`@L7*TTiTAWK) zeSFjvnO&-5R?ImE1N;>Dso!qqZSNs5IADFg1E0A0pXiYXNj-W3YfpN4n7at~wR^%n zN7WI*+Q3mL@sK->l5{y)kEa<(-i*N9QPrlf(b){H;5Mx{QL*tBLmhs>=jno+c$zN# zoab|jOA+9_&5rw|3cdD|dFl%OanMs$ka0)}-100-m6%hNzzHk>1Ozgh<-z6bUI|R zd87r)EhT4L0j%j)utvx^iDN#_K^PChZZ(=dz8b!5Ex%vQt6-cZqEy`o_(L)w_-^aH zP(1muY1HrvSDrGoc05Y3S8IHgn-|0kXUwD5Q#RR271SBNpBu2v-IP~Rv*Kaa=}L^i zXVV$?KtlBgKm7Zxl{MbvHpf+#T$Bd_h0H&aqUx==J!JIJ;OBfY9$h z(Sy#SklhOEbIUP5ODufL!X`iTtG~EGZ0Z(XpycKc9i!XJ)Q^x1s~P zkJ^i8>D~jM=wr4=a0q+sa+H#Kq8zTeY{_lGAB`^M`CM2&)b7e=YX0#)KgEx?AM6#Us|3?@-ND*pO~;b;wp# z(|**6g8AVljs+yh;Dgj2%&r*;WmRqz4br#D!N5YKIcib?9goY#vwGyVBfV2AyO>=dC$4L1UnI@kcNh&U7*d;WXoy zLQE;0XCE@|244RI%dvOnvbFR}zoZk$;*6kJAa*3RK4mW+CfO~^A0A*_R5CX@ytJ=x z>q)BnSZ|R#?#f)0ANPn`{iO;$aYDGW=Uk1vn-n64MEH^{sHkNqU*?HX8SZ0in zm>AzPrTu%o!e$I@eZhEPawwhgWh@yq?<9lxME8j&Terr)4dcq0j}?nOvvLtpK0gY zx#3>-`NSNcpmDszOV;BKSzyO-LaB))>AFPG6bQ0CQ_#aE%uO?;6x%)dTqHkmJ#ZRZ z&BqYJP9w^Rv_RWqcHe`xi(2wsu#O_Gc9EH(9bC&{@(A*6=w6jdS(v91Q*JMYiVg3M zoGh(BXejY*+J#8ZMINs}7zl!-aQuM`igs@Rg!HE(tevA{SvLNb zDS1wr6*JnFHWNCpS8lA-wb|Kv+$0t@_gly}dtnXbK=9ZueUtDt)j-%^O}xMAX~5e( zeXu-pK*|WJZ@BtpDClYals;f3TX{M4=3~2bPRK$niV zy&gI z9SkrXtCEj5Sa(HzeLne1;AuP+8Z+{b`B-iGlQdh{g~H=l8!0r8=D$>~IcxI-1te=& z2dFI8uzA|2g@!A{XVre@HWUrVW^`#74%g=wq?GW;5TeDOdpm8a zX(vD82}@j(d&gS&eM@u#)($e&6)Lzk6SW}waU@dtyF{$jmu}Ga${L#CxX|{oF~4C3 zQduHrk;v=%Os840QLix>Q-1SOW%sLN_`Kg@?TnS>&vOaoVV>4r0Y40WiYAo+$5Tjt z3hBt`Wgc`4uKoGV#$^=TCoaowG9x*n*keK@0R03=|A!|ZR9OMy-wgfNPTo38S04!H zd5;hXNGd|f0Jv5`c(tFQQ}=h0sifU>0NzcfuL(cebj=fW!4BCnXr0e+66HNPtd zw1BS#1jG(6UQW_ECUMpDm{VfF`>BUtv#>I=LKj1Xed0Hva%I~@^5{!Ra)qS7znDlW z%|-;}S0R`Ztt5jE)XK{eEf>?EI`Y0HMOdQ?>>9l5s=z62;Q+;bt?8q)fkYARv4-tQ z7^redD;7oy`cRyAKwAo3X^Vh46-|_b-2fy?g83HyVoYXQY9bdUg_fo_RX-f(U_eM@ z8*W3FXsF8{6Ux7Kj&%BQO6(3db=|(54wV$bTw&7;-ZvzgtN}Z<>Dn|Mjzw{$^R;)v zLIE(%wJx0b5TTKDQ-f*Owhf?Wlz431ArxitjJ+;)K7jY|+r70^6oYwob$5Ec9m&T0oe_8kC7Fi!AuokP0cwoIRhismiMbZL zO~u8ymR{>keL{c^Mjs76!e~+N2%C;2G)c}if1w7Re+|g8GzCN?%-YE(KNO_b1Dj+| zi5eYxE1rv&6zQXq#kIzpok{?f;)81lR+(Z^dJ9st$#M{ro;*(&d*vrcx0!Rs4wZM1 zF=V-LKiGr|8GZKG1`ia`vEuJtg;sOp_EBkz4Wb=!(=j903*`l~qODS!Ve7(EW5~4aFGp1VE^o3e7qsfjH7iUq(t`q? z*E5Y2)wIf%Yesv7!4?H)qQArOAT+a3EUBiN&>FsQ*IcW$fUd87)!d-%wuSE^jRc7s zW1=&Qfa4hpv&(IXD`XHIm+5Ua$*;^ddTeU_1gV0D^t)jE z;ZLkC2O0A$#;tt*jyrqD!=?{b3lP?!%+zg}r#n9ztIMiDR{2f4BGX}xQY_SPtdiFd zYse{PK&st0jA0^&$=y4Sr@3h_X|bQskM)4%QF|Gqo>!RR(%lil(=p2|hl+&?I@D#? zG|zx+G9ah*Qj|ZM25d_0E|V&*l;6K+BKzH`O@jgv+!$;xvwg|fJ{a1a`DUTWo44Ab z=l^2qPTz_)Qk+h!i}-E()8$b8v7;J7X0Sx_=}czx(U9Y*V3gc+qVouvtw${W^dUBr%>y~6yx4W%R{iv*_8Vn27A>RHR2PA14Ipm@j*&VJ zp^kV6EtPQ~I?X!I=Yc@9j|`;snO zc(EhI=4EF*;kt?Mpv-nSPME|CP?=9z#bYLnW;DT;ib@ZyH$z>B_?|Y71*=b9^m}f; z!shArU^wUBBF3SJ9k%mu0F49e*V@Y-@z}zBbU82xb>}m;UvTUh=iXBv;AVG$Qtc~o zc?!1Bf*{Osiwlb6Wpqw**cyjW&>QVcpM;=4tb0K?8sWw2pT!{|=)C=s7*^-z8PoUgl8CV{kjVSX0e51`omPnV`L4){EYc2`RQc|XJuL|a*;LJMArRjIph-Kb#b?!_fBv^ zKNEm!2ZwlY+>oC;Lr?}GtWLYx9 z7w{Hyvxr-Rr!{-?k;6Hrwb{5ZK(}oxQR6;(w_41?gO= zj$LehZS_RJ%7Yn%MB-j>6F(O>awdnHzoBv%TDqC`B8oc5^VDROhz!SIS#Pq#-H^gq7l*WMl*3MZQQ~| zbc`U1wQaC~Brl0^F>L&{kweV#g4 z=5v@l{4#9Tj%YnyyFRjX+y4k~q4W665@zP)alHKYtHEPD>D5PPfAP znx!RX?XfIdDAJY9l5q0OW1( zMWW_}x>MUVVZ0bY?5BBF4udP}{POMpa1kRI1pAaGz>IG*C}OaLNU9rBW^d@|46^viX31V-%d=>(2~7}>CkI*PSB0e z=J=Efu>m!K8Au``;Ah=LdqlwTVxjf(5$y|^W6Xwqr*6uRO`fcXv-5q_|0p7lls>o_ z^&9`)(H|Q%FDtP}e1WE1lPZiamA{XZ#Xa~S zm*L+$u@oPCSq`!~w_S8t;9GTTzijE3EZ8~xjH%Njk^BDKp0kZz~pQMQCkiRslPNc z#l(%1=_ufc5$af>*{n z%yjawR3qNo^2j(iB4Bn4r0%la`x!q5WHQ{guH#j4U-V$qJyOu0TE7#}&TTagc7|;B zqVBI-jjO(~M>)l_`8iBTWj%%Dl5i6opbY81KXc;rwSutLV`6e?AQ;8UxJsI`-bH(J zom|;r+f%r?wdoZ?3sf21~NDf!DL#EMuwtve8~V;m88 zN+~@q=em1KG-mZE>*tR~!@9b&8|HVXyE0-c*w!MK_!k+}JQf#+@jx^vy2Fc{y5E%J zx%zJsdZ4l$p;!CSpu1FCM9k?|{iSU9$~-(hsZriwJ$d95x({2jT#k~w`o_oQ>~qba zF*RY};b@#Kc+gM}C40AlXS7>uY$p_Fk*f?#X~zgp4Z&P=fAXVF+L*JXyA3=7O{Cat z54p51*=BS?#-tO2OMDw!_=V@!f7bV_X`STZ+jPKm%aY|R2{ScVe*^T!cF7+`EJ=0P znRaRN6Q(Sg90p-7Zk@C~eB#8z^s{1M9DUEVZUN2wiR>2~B#kGE(&BDic(kpi&-sb> z^o3mE_YZDal{j?{wCj;ixB6Bx2yVMZ*=9?IxIJGqTQd<{eDFQvDq*#n;HsU;xjW~m z4j_k6a5yJ}PQE$IBanw-93i|WkwXP~p(fRH5jtQlEh{q%v{?N*hR@eP?sGCf6`51n z=g%C1X*OM{?8!4~*u%SK59EAF0DqZ=XbQ|DN3zwPWa>@1w1V+* zc%UUh&i_^a5h$pCRWhS|kJ?wcErH~q^A|>7E=W#443_rY_yxJK-zTF;1!K1gx^U}? zYUMRP018J9jaj+h8R0aYp@oVaYYT$n#vjFrA+p-l1KhB-3U_HcDl~RpTZRyl0mpnQxzW{PPt%mLXlS?})XD>&#T5 z2YHweLOoQgQUk6np;q<`0<5aPVSv_7psc<)6{M9kKaM^XCc(pecQL^Ljn!#koeEEOPY z(qO>eI|iXgUnYo~#`eD0@Bg({X@P8A_%ovJ;d0&H57$K)DaIQ!;;2e(NLCcftt;QP zs`)f4=MV)e5~H~R8%EA#o}Pad!lCHCP}RgA)W#0-4q}%qZ#@{0!#fNH?tKi9B}xK8 zx#Q4G{5ATLD)xV6w9#?EqB_EGpGl?oRr_Fb?~$mQ+nc3chc6CaXU-Zb;nP~a08m-Y zU`WU z5+Dp+Zv`yi?ssQvgbROhveFly;2_3*$wMB101iwm#sqy@gD#X&+HFl3dH5zsOwzdk z0000000000000JR=ijjLB^4@R|8z7 za02NJ79h~SjJ)hlccSUnSWWeAiWYRDmoj|-nhVv2ANh3aSKLKXJO0U9jKQxw`Hk;d zU~`}V9U2=Olb>On@5IODYxqkbE^o{oTC}ed(rrJxfGo=1oB#j-j?wb~5M#s*WGBxB zOt1^2-vcG0BmA^w?t1%PE0%Vj>B9w2r1~-;WOx~NJoDg@VG95`>-e-HOndzET&!V( zdL09NB49s2_ZT;Wj5ZiBeM!occK*5E8hb|a{`#|p6AUQSN2w(={T-9U;2g4Dr*1c> z8zMtVj6?fL^y)GKe0ap{qwg82fQpJrmobvUX>#vaOod>wM$rC*=tP}j>1sPr%9PtR zswLeX0{)OTdu=TJe-iZEAw2+m>pUgh${)}RKrflnvooNqtB3(J4Qhgy3XPl?OrSO17(~{=L{=TsN$wef z6o*XilZcRSTOAtyO%vyfni-J~i=~Ch$bFd$Q#1Pu740t_U8BR@MgYklU$mFzMAnzb zbx~9Lp5sSLnUd~rqd2liz`KwMbe1o8Fdmq$v4!)I#n=EXwt8o%6L_30FeZHTi)|l-Ibm_H348xX zTSOORVgo#0c}|r9jb4pmb2T&zgx#QACxXQ}ZCH6JZS3NKKZPN{mYk4TouMCj_53!o z|0()O8yF8U%e@XAFGbFlgZ3-f50w(3Ow8A(xct)Thc7rKqr+t*ItVcW=ri(#lmG1x zKe7^omFAxVwAhxrlpP(Q6E&qn0uKetBzDbwGB-Stch}as$1&Zt7^?1>B_R}X&=O-B z_ri6;<9&hJ(y^e!W`i^@cR9zLVZ9;97#2UQZcIse5zv>HwKJmzQ_L5&Z(WHH&b-)v z3>n%w2MQR~95tZ~1oyLTrHJPeZpu(pxeHkK zFdHB)XZ+$7Rc>4yGZBZ*p$IoLX-$>bThfOH@g2-~0F{+n%z6fCrY=h;sH-(|JR#`z z9~EWfAXa@xx*FmbBxXu^GXjI>H$|{+UTm^hJ`&d#&KUD(z8%mwN*AB;P zs<2p!o#Vz~m*Mu%a)DpEP{)v(#&yTTGJ-8euL~6rZHf9wwExbK>#|tQ^Ol8VO?^Ep z2}wiBL+uXi!JJ5x~;*+O`_zwta_7HBB!{Z?${vNJR z_F(dUM{hJSw;k<@KMIhlp~TW4{qhj~bhpy=f3j80JiiLxpcKBrhcee%igW{^>0W*{ z=AQH)uU{r(ycdx4bBlUTE^5WGOa@L35dZ(9u%H+L4$~x1iq;fhJe?W97pJ7A5)spJ z6X_~iBr_U2^dkr1JI%EXFf8=k8JDz%!#CL{I)C2*ffgVNNvcCdLt;+laQq{^n+%xz z-@jQmz{)ro6{wAG+2&QB8}8yElXH7J@xSCap|TiFt)XkT`7Tf$txMobr~-LL?TI=W zhG-*^L5&Z@Ff~{yP_utKv?EKnh{7npBoD{VP>m9-GHNc1X@^Q6}ulLp9zlai=!d5HQEJ!Acz12pvyV$KtU1Hq{ExggGyGLWQmyC z^6Do$*>tr6Oz{O120TLg&?#lzk(h{${LeGwfcPO*{$(;zdq|@PiP>O>OenLi3=}uX zY3=OT7x#O<6kxw`eLxEfXJeh2HPWK8fxTch@#!rKhs|?XP~5OlnQ=xnysaiIS~I5C zyYW9aC{IuTY@l<;Yd0?WsgF29FYo#C!XC`>k+>(X0MuIg-evgm9K}9~DE;hMtDz(H z0)+6B5$UN`pv1Dc000008dMpJIkM9rcef(Z%v8$E{bbXf?&Pq7*J$UKc8LT&>zcX4 z#ejir7(fF{GS7ba_!x5k&;%aOe2CR9L;Jb#{v)Y(1InO4eoEpQr`&zeW5Wh}0AWW% z_7gf2F&L2drU9+aYH`Z*eaG@mLO{c(@nM;)^*!u61r2+dH`HN`_x?`*YPt}-Ma zVyGaRq(h8R$=F!?lyX6Cq8WDf6>WwqS1=C2z0e+Fr)+&XOam}yZ|opez*YugYVC#} zGP84FjIxP6P9Rc`f;aj3kYp@;MYK0fzZ0ph@FG?hGd=@S4Yo-WIsh&d-o}xV^~+$K ze*=Qm*Hj~ym`|d-zF52dj@$mc69*DsM3 z#|yQPGMG~{Jjc3pewc2L(lNz=H2a5MK%lN(rgw=S;&V!bpR zlM0J~4R*W!SS0%GVy4HX_&q;&iAUfjezfU z`e$rHs#*OuiD0s@F6f;T6RbT9ypX6M$WNHi^X8x3#R$Be{-k4lZ#I;# z3V*g|j>lj&m(Q_ZkwjJhj-z4vWKW`+Vt)o(4pyOBj)sN)l{uMy$zy8<8-z<1!q-KG=i`kx+NmjQB93^czK*S`tkdxKeO~^NuHo`O zGH!qEX^Kn*Mj-N0I~<^oIE2qjgR0;#>ive@I5EzJ*^S<1U#CPJx*Q76v30nF(#`yJ ze(e!_9`WfFvVDu6j9t~Q117i(L%U}d4PEoOlZufxBvjY;YAj8YCLBERZ?kEQ9YRg) z{9?e0v0=~H@+@`%@%>XL6Kji|sN2m2J}v$I5jICv>KU=(QkM?JNf|%%YNb8-qN?x< z0dYn)$|a)x@P6&czq%9bkwRWRbg3=xV3>u7Jg2JY+S`khlT~6RR6h#70r#jwA)8w& z2zBURO{Vy_djpwKA%admu*9&T?J|B4;Y^iLG{~egI_IWPDUV&4@aizBLR$;Qov)Bt z(cN&TF!1OzXV=mNg~8H6KH-p`G0|QGSL~hrGP}Mh+P&}-@5z&tX4fHZd_;*T4uLeK zoHVk+|5ykhEkSJE+QWQn&AhXZ-BybqzfR!;cApaAl*-iCk34jN%!+t~njO9758lnH z9-Qtn3R*+$oHD5hf5~`Adl!?hOu*OBeAtNJ9U{|bQ>!P04RAG+$cMh;xFbE-cN8kI zd}B%kEd(@>xl|?VlvEn6X1E{uR-D|9HevG_)n<^=*Last^P%;T1|$G;TFm#_p0xyg z4_F2j7u4iScwpY564&j6M&ETS>ApDI#AD2Zlh}uucn(Tfv!fCZ#^aR(U($-9EM4pp zArXt+mm+;YrkWy}T30fbNqO9ucIZQdh<6a|Xvvh~!@T4!|Kph(GvKhz{6@!e8L|eU z>3K#MD(lk=$l4do_wN@Cb@*<6tW-92&HkdIAEkVI-2^Xp4XmUPU1luv(_iIxwPv|g z?ma@tHzNaWy(>u=%8gk2uyKf#mOT5-EG3KI`A3p%T#b&AU9Bcba$E*2_5CVc_*LU^ z*Iko||6M9`;QIrBc|@6Ib?1YN*h7?tBK9>g`vMxjr2+(f0B@F_9;f8v9_Mgh6DcWB zz?~KorOAdjm~TFoP{c-Q<%?(X!}P5?OyffTfk0Av7Kq0mwe+8g9a1N#i+51)Z&fq| zr-~sP03^scqIq4A5d1ab95RDx?!)UV-zQr{P{2x9A+h^I_xR9A*d2H?KM5D} z0p5^(DR!P=3f^AmxV$!rn|rc*t16*MIVXsI|-Ep@ZLKC#V1=9 z`%mCaVAR;TRqTSg%aohQqzz%h8mTeCo^qTOo#hOkW&wTJNS9KR&@``CC*8RlbV2tj zjQR?*4D}3Hv{y2vUTmf>>Fl|kO8St0h9V(~e~>!6y!H;033AAl!ALH%^4`?og~|L6 zBr9AShW`vukC-j&L44==M}D!iwv?K@0?@o5v(1+3sO7r4t;$y(A(D{)*r({!C2jj# zPh_#E(j^d4Dgq|)P7=IZL7)bcE==~iLya(cy^yrf5!&l=|UW*!uH9nw9c5Nk~<{3my$gUKNEi6KjV#7H2FjsFd7K zak!Mg=l3;G00!nTD$!VJIM@gWy;U&}05x$Jr7;FD2~= zHa8Q&<|<0G&W+d^rpb*Z;4?{)b$Yee1$|_sQFH;{S_btZl(o5{Ru597=XMhmF~ke0 z-;$YP&(XUz0+K6X)ruV<)uG{eSzOuuqNqs~80MZqsuTL$Oaq$d_)pMzifSB`E*fOw z1^xgX=(Z=BkVyIlD016W?|np(3q$oO8HA7Q<<&Ljhskr(0TsSTc=@BvU-O* z#K-#rJFJ$2`B~vndE4xCjOCykedE(1`hu~o(5mamGWcdxQ1@fyetb?eZYFB0j;-?5 zu@vRkIIyjC)+O&*Y`%i~D_`owYMO%VB-@m%YY?sTXmcs8Lj};ugMwQwHJF#(+$s2lPz%8Dgh;h=88kkL)vJ<(kVFQn2RRTv%A zV1qj5LnfFy2}~1b7vlGpk-s;jyP^;6N*h4rT?O0H0%$<5W?YJ&X6M%Ee>#;%$LG{< zwA|yHUB1a;zFce8(wI#)1qknXb5T%ro6#{CZcFQ|3*6IBep<^q`s|k4@zfkFGXXo{ zh{_IFC?>mK7I3)0qbvUVdq(%)Ujg9w_K|CO7lOgbPV}-^BiNzX(m4a^pTQDpO?TK^ zjfsTcdTez>xEGttE1V?f^iTL^;XRC-`{Dly4u6G5d`me>93Y|3)#B0{N-g-N;V;cWp53rnbbO7dK34Rl(hu+~?eT!sE;MYxf_%em7u;-{osG+bmJ*?^I47QcyNVZO9)2=pW9=q2aUa~=!= zvU+eTz zTPj3Kj|Q4?CA1VX)+L1zlq=yS(nVJYW;vh`#jjz)!hU zbrE>_WFpiXv_8V{YZukLy)ei}?Y^%dh5X>6V)nU4be-PSj>f`%>NeN}1;B&LgK_!K`u=&J60`nG$?$q@IhbQ;5OU85ZpVXtapZSRu(n(G{LZcP+SE7R+vs31y0b&Tz z&Ff?2ElJCX8irZ(tV>^dCTa%05WRb_14CW%f9_H}m~&@-1iw_Y0?X}5`h6$)h(M+& zL?ES^!)(;l<$!1xrSleL5m!r7DQa?Jb`Tjd%xjFE%fxk=!GddJQJk142c$T67ArI~ zctJU>#OGft&LvRor~^VN*Pq&-r|WXO*;wq~7YT%PDZt|D+#Y}PAwb+FFTi2^dLNPQ z4<2dfri91z>Ojy#7d0rka0lmt&OndZlaqD1+SsSIgcvw5Hd8J1#XK!q0y+tl^vZ;f znXO^=du8A$H;@aq9u1)2%}BwoH5lG*QY!l)N@gTi1;KVaf^;lBb+T_<)%?c%3$FH* zv_g_T3rBfj-NS`$sn@s*d52hWm-_$*?+zMErg(bj=72RTo(;teZhARqJ z%?;ozlbT=_jZdM3aH<3ZSYI`Q>2h-XSq%cQaU|M4NM?*q@$)O-2?!U7s)zxVs0rYv zSu2bVbTHeWs#I)FH$oC~HS?r?ko*(CYSP8Or}tYl(_s681cz6)MovYk9_V7M(an#| z!q*{m8Rxk9_W~c3mlyMhWUNbdg&t8`{FoZ{w7~g ze^5xGR{NRlU|`Q*4(6tXo)v~?(y`+|jUOXioQm@md7>N8b42Ba8ToY@Ar$T>#pEk4AOmf=prDiR#oTBkmE0*|53?M^!bI(4{%pi$W|L>-UN`nOroX zEy;#M!8=Nx0QuTylAO)1gVlq4dY&^iYPsT3hhpaMpWdE#Uf)rEaL0}ps33C~SUY=K zk`SsRz1*}dc%j3DItQXWvBk2VvO7v>I=?gJ~RMG$f!Pk=X=*ln2ZyOo;8%-HaF}@fZG1 z+9hFNoHXDuJ(d601Sip0#8F#2FuW=AfmuYWxVa}0WhoUQ7}!m3!9gLgt~wq)Ia*wn zy!=STaKEFw&u*1m#_z2Fziomvs(t=L5bjBYzyK8hh#z$0K_UP`*Qn@LYzGX^Bj_qe zGmItC>Xopc0|EK@PD{GzUMmOlz<<4^=-P|LQ!xc5T8aP(sZB#JL6%Y@(u)Hnukr25 zOkJ8HixcvCi%rrk0)TFm>fnrDGiewOCR35PYwmxy&f+_VO;}*OIlymtjlejJv)MK+ zi^}A{29AMwoX1YL@(2sIf)Yop4@+(f3uXVXU6~SdieT5DUT+9ToOOE)g{4=^UJmEA zR_<0j7yMCeRIMxN+zLwwNPP3<`~#o`oM!f+kJSndyLg<}@P=I*n_+eK(22#z%WDoT z3DV!)s}s_IWXbmuHO%s`-9~2hDL5h;l;}V!$M*zUUmL8Su$64+?ae9M>k zVLDxAehgQ?2)QvkU$578FH>d zLze}!j>rMjT!N?J7|g?EQ8p{V&xXn(V62u|r7FT)eqKR&>>F&bI!OsMR z`^N;s&Bb^nk2a+26n#zgziYovLFx?+kn3QO&-VdESrgB-Q=(wjDIv7XHnY7Cuf$o9lqARJP000Zdi#I4AY+cOQ83_T>pjtba zi>BV1Zz}h(kkXZUEMAI*{ytSuL8hPSK5zC;45JxSva&=`AX&2LX$?@;lAp5jipqzD zIg;@DJ7*QlEEr7&yO&2d2mT8W{AuGjSL_?;7 z5`F>21lgK(Wt$rYCUb#XC2bZ#^r8fKorR-F+(>5X4h`_$hnjO{{ArD%?)uQoIe+tN!J8#hW3p*U*@gBWrGy> z5MYmqc=o>+=OGC;4GFm0T7Jrn)=gfQ>$2mL zR$1-o=wwo}eL5FSYAp+wy4|1V)kdLfAq!x6JyMq7F9UGaaenQOwk!JvQkg-II*R;I( z@ltro?q}F=c7gf9?CZPcthf0lyol$DyIs_B;QwXmqUUJAl)_0(pVVGo@iR@+dNR*) z8_*v8>+aext&Lb(gH?_@e(Ry+fG#fB?VC1fDXm;90R4cqge7Q%%}$84=)Hh>*X&S( zdB}dvQ>XdNdE&mkn-Q(a%*Ho2Z0vqev4QEuhO4h`J9-fwCXWhCUH&CF z5E*lTtbx3rDZ;X&!9Wi)h%>+eG78@DKe@TR-k*5 zDirumzw_EfNHr15xKX=>LZVF@n2EAUz`03TFZ@|5XBxZ*? zPXahtPC|yHhfd1%Gc)u)#1r+2-Nm%Z@%3`lF*)I}pcJr1w+)b0&8$Q1f|xDhihyxy z+D3<7+Trs)shp{@gIdp9S%x*ZV9rdk3~<< zB9P`iLUQHJ#;*Qs30)=(;GqrTI;-t9MJCYYw(CW91>v$in_-M4!nXpdEtk+R3PTML z#+aURR0ul^6mA_$4mvpPUCaAl0V5E89UySkA)C1El#Z`Fx)(H0CPOVgu1wNjlm9{x z&8ilLJJV`+_OA*S8Rl3@JY&xntV?<&!-t7G33%`1UKm7jUQ@~wK}tqY{$Ox5um~uf zfoTHOOOF)W_!h_`SNFV+Xh%T&vQK?JRfgr((^{vZ(RFYnFEQ4F~!3{53~z z@Z`FTT&zx;8x+8cX@AV~&8nbV)J4YRDJJEMCYK1iiPOiWI;h@-@f@F=a{Ol~0%^yB z0}mKJZWx*yEb8Gen{Ct`XPANx8EIJAKPAR)29dVH?zwt@n6Mu0N(KTY8?mYjwgn73 zxU`~%9JCZfUn2=w^g(G;-#ze9R|vn@OBN#Dr?Un{0{ZP^rU7u{#ig!+GB3=Ofu(WF zGn@}bl31HYJ!>Y}2Q&x(1)qZ3noPKHWg!hS{ob6P6m{HvPHH}jqD!C61li~*_g|UM zXA4{AJrVIk>In3(EBxTg781O7>yr$yBtB@J?7NpZ#q>I-A>a5WaQ843ysL+@y^n@^ zS*>BH4z)#Bz7*jo#PsBkbg}l)-UvB8^^C_@69AIjKj8vC5g|c;$ClTJC=%(3UZ5y7 zH7O)t=iXE=xxDK?Ryu@h;apM1BxDJico z4GChXxqg(lzDU3BtDG%UG6RDw;P0?J zXTUU`x$yC*Tf3H7-v-Fy$0Zf9+33h`r&^vVCOiavXxw{9lkl@hj!`dZjSXq>$ir`s zOpCP(SG?SFIvnE7Cu+T0QhEa*#E7-=5PntxO|1w&Td)4|OlvmtpTw}MK-{G9x6J>3 zb%mIjS6_TxO9WxCQ%s-g)`NGEJFd6y?kb3F%uf6(TUd(RE}I*MYt7b;3t!@?oLp33g?kG0P{@aif}a3TJ>Bv%aJiyBp>2ZK!FuaTl#j!6o0_Uv5s9p^2Hki z$}t4ZYvGQw;v+~72T`2GFP534q-&!jwc1_%%ddJzw!-O+^v`@}c*b>3ATgFp4JMj0 zyI^=wMLM|F<+0dpf+z^i060GG$2fh@PCxOzv?~W~NF&-Az>^!GbYiDXfBJ*ZYWAYN z4?pz@^Dbn5hhMSB_DjM*MB(`2qm)~EKm?X(Vf0p2&vHrFIi_VtwhfMKK?6Fmi291* z1gR;HLPVET*k9Qzo}k0N>n&0Il-nxLABO#eTFN=kfGlpL*j=A6dKvv6!E&5C7=a(6 zisz28>fX-ytk=v&@D(t^#(O_HP7v_XM$b?f9T-wXMBqsBBA$4Fbd%I^n+Wo?cq4uu zWD^p>`yJ>l9poLpc6R{}#E8w$u#Ux0Y`qB~d&dUHfoFA6ntuP}oU#<*LdT`)>=C<{ z+dq;7nN z6_ZDRIdX-!4)PuT)i&fK;Nwwc)s#c*C-}&bXVOkMTrDw0|G+8u3(F}N(>2DM+FCoo zj*7wX{HY%534k8J#}pS7mmy!__3o*1;qvw=M%*s|7(jbO!GyCqyGqo>i-+{)*-xJ0 zx$5!{6Og3aOn+?XH-?NAybUc%u)^Cy?2dFDk)vH2>biKRS9{&JH_m9O83uFC+-fgz=@UUaBdL0w_Un<#SCNO}T7SD34 zzw-F+e=j>BBtf%`$!ZhAj4X+IU`^IHgyP*&0a*$mOuGM(T_*e zTt$NQKVNbT|q3#^e+fXujIp1WVmr{dAQoyw~DMj0?zJ=v|iB zm1^WV7bj2Dy7Ke*Ai;PUl+i6X6+hrPTKq3|9TDicd;eqtU4*EIdein#@uAH$u`uMS zp_hp_;m+doj!9)r)gx0eY!@zi3%&h6n9w-|1|elt#U}Uvhkk8Y1lU)Ksi|hC_HMSA z(x6C7vZ2T*F;&BIT!mevuJy=vi35D9(M#!WXgvQTiX0Ak57;@G39lxoy`yQB`G^pt zsAZj-u4c09wS)zXv@3#zG4`f>*V$Px6V0U42HF`4X69}R@5t0eNNDh=AYbnz^2V%u ze3Mn=E>>XVMRZB{%pYq)rZ4XIQ=~LlL|fFl%OCmtpEU8Am6R}H$IAve@BZ>q0#r3& zksk&0ZB+Vuqz2O5-V_K~@xgSM2tl%Fo1lbefgw^oCWw#H$_&tS6?l>2-aagh5MVHh zqTQ%!f>>)0^QFc~Zc;DQGj?f7D;c6LStjNoJOj5Aoj=8mVRn{qcf;+Sgf(Fc9)H2= zH4^7zlZOfJ#BG@(W<$TeZ`e-~)tyqN501sY*Ap;0GPv@yYM5D3$Qox>tT9{O1G)C4 z-vwhq`cpYZXp+_t4@>lpP*A6}I!9=K<*8Y1hL-ue5HUV}Gnp$o#&Mt13E#P0d_EPwdaMT%q8N4RcI4UzPmHDd&qLQYg({gIi=#P1(=LL^T_<5ZDuQE`}t1HWTdeuS!+rAAk%sYumo4+`9SL zAN5-5vK&b_5#Xo%=_!JReiVZ~5|#;k;b;)q01vqeF~q9$q+-klI;Z!e)nvE>%a+X9 zeU**{HlCC}2ivM2LuG%xwO`A-u*A!j(1U;%s1*oHd~MKB-o#&ndkp_*j(y!ihhq>e zM~IxUQ3tZ&QLXJ~;##b6b!6)cXT2F>l4tvxWe7!d;(+M7RbHpHCQAQVrGX;bXLf@+ zjL~&HZ^?#?g`B|2J&T?XJ^WUih}4j&%sqsA9@tFBl8z30lYb!&9~JnSQ;|E7@8Sd- zVhJPK>dDYzhkcTp7xWwdx7863I(97@z4sJCw!eyP9-+Rh>}B`$!`s(glijKQ~j}``;O*Ayp@;&$034A=A++e@$IHu`gPEr z+knkzge*jl%VL#432%R>J)g+Qf|(3hsPt^JHGQ9S>|(*>oGBj<<0^uT3A2)r{^}=S zGWfA7r#zYAXU(;EUROvvGwDuIl;!`(Tkc)b+iwecDFkG>s?pcYjMUOgwRe~(RaSFR z<)fgJc3^12LE6YWjRA{WjG`oOG9ehV!=kGrh7$V z-JNrhzIf6grFiW%W~+MyV{~41E@3&V{wDgKT%%gISbL!#V*zoc4*g`uzbHUtu!`V{ z2!v+cX$<<~Ib7L!a4^?cE$lu8Gp^+P%smKkcV!>C?_$~Zo%&1z0-vvw6gxFEhH zQQAVINHZz_@Rt75#%hQ52|B}Q0;R_w>5zj2Hr@_^`^dyv{n6ReK9?J}Nv56B%LC1@nf1k3SOEWEm)a+Q1FD0*+bS zfC!vutf)hZ9+uY%g~LlZvgHc=pxido(+?PpVd9VnJ)f zHOE(CJg;uJ>da(U!(n=WsLiA#cBjLzY7uOS|2fy}fOd7JvI_<=3;Is!Ifgk5m>2iC z{Uqn_Bh18*xHE;|>FGSEBkfw_?~p8Z#%`1qS`;nDfU~wWZTSc#GTvLz%=rEEUV9OD z|Mg=WcO@Jq=?Px!Po(FuD2_rc!TfPQ^3BKKLQrlaDPY!>^_7lijBR8c}`OoH08a^m{LoVs7) zkuLJ%3wItMRr0u|wr%BS5$xEa7>WZ-VW>AO?a@hG)Qnkb{P2<)*MHt0+yU_yy{ZZ9 z1tN`y`=D{)|HuvS1?Rh7B%T-41=xRGsg7kY%fuF{v6wND5VM5GQPxAOlVZTO{MN;yP-{Y;?k5$S_!TH*J8+-ASv`OcF`q003q+Ic8;8cc)RcPsnr3c;EVMu0Gwp^wfrtE@ zuba$5r#l+Jmg+0`EbJQ;0~39OO!_e@^F+fk24d%5jY(oJ$k)KZUFK71M+}jgF0J8H zO|%x2;pTrVS~;Hd<<*|1YfA`xYubFXB*XM9jXy}Qb(4DLeAo9>=l?*HsjZF#&)(Lf ze2&2R!AvB)18@W3e038SW4L@K!t7~R4{XAqn6xz4T1`^mQHAxIi1{B0cBe?$%HS-m zP(G$`5mk9!PXRvoy2V6fnsXh8@#Y+qv#QAB4{}n9+Sy)D!jL&^gsU$Zj_E#} z`KSr!mcrjcHzZtCU-;>6b=C)!N)}je@9dkI4Mon>Ij6U$gyJPgKoOpsLC3H>|3J@` z|Jhf#%R~Sm(yHbG2-6ZWz7k^5)0a}G?sGv*rb1&0mH+hfNm=KHUHHpNL(b_A#_zIQH^c29Br+CT={3m7ny! zH+iS?a4_3V5df>qos=?PKX7`-?erK7&w`26#?hb0lAYDpRH|J!*FDbhpBQfz8a0Wg z&gIvKrAw-}NyAr#8m|xQ<+(B#g<-=~lnsbAkCG1G+1D||7*s}oAfC@%c>k*oYLF+M zXU5!(K%anjT8}(m*#LGm!(~G+Pl5LUl#&=|y+T^6H-sYHa3Ks7qnvf}DDojlS9kZo zT98ZL4F4kjoF^-+D^Lo&aQ?hatSVA(9aB42O;Tm6i%C9aUeoJszp2Vf)=9xD`&ynEZQGAKd zg%M5NmmF9FPOLlls@H_0gY)FcwU6#~bYv0V;U2|5CWS={s>}onkaY%p;SuL0odMqO zyD!oZjUaB~j=YOgS8maHBSuiT0QbA7>MdQ{n_@yp+MAGhuZDuzskR!;j5omDDTGOV zbf{sVR?AD}e(d&D{$^MTFY68$Xszuh7yZ%)6G=(0GkwF2#lVlhyhQRZzV8oI>x^$9N5ZwKYW}#uqWImc6-wMl9kk|3^j=< z8sA>gbmRl~0e8q4b`L;|N zA-f0;`c>|9jMO}!7aUAT-=@$vw%A1iST$>s`Rlecw4?Av&%>)c+{K%-_~DmkR>zt9 zW6U~9LUBRodxz1^HSKJ?(g9wfJ6^a5W#a>j3!KHNxgdsuVi`PTfkY<>4pAQ!fP71T z5^0>Thr