diff --git a/AI/modelscope/1.10.0/8/Dockerfile b/AI/modelscope/1.10.0/8/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..1857a1d2c9bb3d8b8b018c2e26eaa30f1e174a79 --- /dev/null +++ b/AI/modelscope/1.10.0/8/Dockerfile @@ -0,0 +1,213 @@ +ARG BASE_IMAGE=registry.openanolis.cn/openanolis/anolisos:8 +FROM $BASE_IMAGE +ARG DEBIAN_FRONTEND=noninteractive +ENV TZ=Asia/Shanghai +ENV CONDA_DIR /opt/conda +ENV PATH="${CONDA_DIR}/bin:${PATH}" +ENV arch=x86_64 +SHELL ["/bin/bash", "-c"] +COPY docker/rcfiles /tmp/resources +COPY docker/jupyter_plugins /tmp/resources/jupyter_plugins +COPY git /usr/local/bin/ + +RUN yum install -y anolis-epao-release epel-release && \ + yum install -y nvidia-driver nvidia-driver-cuda gcc-c++ gcc which wget +# RUN curl -L -O https://developer.download.nvidia.com/compute/cuda/11.6.2/local_installers/cuda_11.6.2_510.47.03_linux.run && sh cuda_11.6.2_510.47.03_linux.run --silent --toolkit +RUN cd /tmp && wget https://zreloj.oss-cn-hangzhou.aliyuncs.com/cuda_11.8.0_520.61.05_linux.run && sh /tmp/cuda_11.8.0_520.61.05_linux.run --silent --toolkit && rm /tmp/cuda_11.8.0_520.61.05_linux.run + +RUN yum reinstall -y ca-certificates && \ + yum clean all && \ + yum install -y glibc-locale-source wget git strace gdb openmpi-devel curl \ + strace vim libSM tzdata langpacks-zh_CN \ + wqy-microhei-fonts libXext gcc gcc-c++ make cmake ninja-build git-lfs git + +RUN localedef -c -i zh_CN -f UTF-8 zh_CN.UTF-8 && \ + ln -fs /usr/share/zoneinfo/Asia/Shanghai /etc/localtime && \ + yum clean all + +ENV LANG=zh_CN.UTF-8 LANGUAGE=zh_CN.UTF-8 LC_ALL=zh_CN.UTF-8 + +#install and config python +ARG PYTHON_VERSION=3.8.18 +# Miniconda3-py37_23.1.0-1-Linux-x86_64.sh is last python3.7 version +RUN if [ "$PYTHON_VERSION" = "3.7.13" ] ; then \ + wget --quiet https://mirrors.aliyun.com/anaconda/miniconda/Miniconda3-py37_23.1.0-1-Linux-x86_64.sh -O ./miniconda.sh && \ + /bin/bash miniconda.sh -b -p /opt/conda && \ + rm -f miniconda.sh && \ + ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \ + echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc && \ + cp /tmp/resources/conda.tuna ~/.condarc && \ + source /root/.bashrc && \ + conda install --yes python==${PYTHON_VERSION} && \ + pip config set global.index-url https://mirrors.aliyun.com/pypi/simple && \ + pip config set install.trusted-host mirrors.aliyun.com;\ +else \ + wget --quiet https://mirrors.aliyun.com/anaconda/miniconda/Miniconda3-latest-Linux-${arch}.sh -O ./miniconda.sh && \ + /bin/bash miniconda.sh -b -p /opt/conda && \ + rm -f miniconda.sh && \ + ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \ + echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc && \ + cp /tmp/resources/conda.tuna ~/.condarc && \ + source /root/.bashrc && \ + conda install --yes python==${PYTHON_VERSION} && \ + pip config set global.index-url https://mirrors.aliyun.com/pypi/simple && \ + pip config set install.trusted-host mirrors.aliyun.com;\ +fi + +ARG USE_GPU=True + +# install pytorch +ARG TORCH_VERSION=2.0.1+cu118 +ARG CUDATOOLKIT_VERSION=cu118 +RUN if [ "$USE_GPU" = "True" ] ; then \ + pip install --no-cache-dir torch==$TORCH_VERSION torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/$CUDATOOLKIT_VERSION; \ + else \ + pip install --no-cache-dir torch==$TORCH_VERSION torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cpu; \ + fi + +# install tensorflow +ARG TENSORFLOW_VERSION=1.15.5 +RUN if [ "$USE_GPU" = "True" ] ; then \ + if [ "$TENSORFLOW_VERSION" = "1.15.5" ] ; then \ + pip install --no-cache-dir tensorflow==$TENSORFLOW_VERSION -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html; \ + else \ + pip install --no-cache-dir tensorflow==$TENSORFLOW_VERSION; \ + fi \ + else \ + # only python 3.7 has tensorflow 1.15.5 + if [ "$PYTHON_VERSION" = "3.7.13" ] ; then \ + pip install --no-cache-dir tensorflow==$TENSORFLOW_VERSION; \ + elif [ "$TENSORFLOW_VERSION" = "1.15.5" ] ; then \ + pip install --no-cache-dir numpy==1.18.5 https://modelscope.oss-cn-beijing.aliyuncs.com/releases/dependencies/tensorflow-1.15.5-cp38-cp38-linux_x86_64.whl; \ + else \ + pip install --no-cache-dir tensorflow==$TENSORFLOW_VERSION; \ + fi \ + fi + +# mmcv-full<=1.7.0 for mmdet3d compatible +RUN if [ "$USE_GPU" = "True" ] ; then \ + CUDA_HOME=/usr/local/cuda TORCH_CUDA_ARCH_LIST="5.0 5.2 6.0 6.1 7.0 7.5 8.0 8.6" MMCV_WITH_OPS=1 MAX_JOBS=8 FORCE_CUDA=1 pip install --no-cache-dir 'mmcv-full<=1.7.0' && pip cache purge; \ + else \ + MMCV_WITH_OPS=1 MAX_JOBS=8 pip install --no-cache-dir 'mmcv-full<=1.7.0' && pip cache purge; \ + fi + +# default shell bash +ENV SHELL=/bin/bash +# install special package +RUN if [ "$USE_GPU" = "True" ] ; then \ + pip install dgl -f https://data.dgl.ai/wheels/$CUDATOOLKIT_VERSION/repo.html; \ + else \ + pip install --no-cache-dir dgl==0.9.0 dglgo -f https://data.dgl.ai/wheels/repo.html; \ + fi + +# copy install scripts +COPY docker/scripts/install_unifold.sh docker/scripts/install_apex.sh /tmp/ + +# for uniford +RUN if [ "$USE_GPU" = "True" ] ; then \ + bash /tmp/install_unifold.sh; \ + else \ + echo 'cpu unsupport uniford'; \ + fi + +RUN if [ "$USE_GPU" = "True" ] ; then \ + export TORCH_CUDA_ARCH_LIST="6.0;6.1;7.0;7.5;8.0;8.6+PTX" && pip install --no-cache-dir git+https://github.com/gxd1994/Pointnet2.PyTorch.git@master#subdirectory=pointnet2; \ + else \ + echo 'cpu unsupport Pointnet2'; \ + fi + +# 3d supports +COPY docker/scripts/install_colmap.sh /tmp/ +RUN if [ "$USE_GPU" = "True" ] ; then \ + bash /tmp/install_colmap.sh; \ + else \ + echo 'cpu unsupport colmap'; \ + fi + +COPY docker/scripts/install_tiny_cuda_nn.sh /tmp/ +RUN if [ "$USE_GPU" = "True" ] ; then \ + bash /tmp/install_tiny_cuda_nn.sh \ + else \ + echo 'cpu unsupport tiny_cudann'; \ + fi + +COPY docker/scripts/install_pytorch3d_nvdiffrast.sh /tmp/ +RUN if [ "$USE_GPU" = "True" ] ; then \ + bash /tmp/install_pytorch3d_nvdiffrast.sh; \ + else \ + echo 'cpu unsupport pytorch3d nvdiffrast'; \ + fi +# end of 3D +# install apex after deepspeed +COPY docker/scripts/install_apex.sh /tmp/ +RUN if [ "$USE_GPU" = "True" ] ; then \ + bash /tmp/install_apex.sh; \ + else \ + echo 'cpu unsupport apex'; \ + fi + +# ====================================================================== + +RUN yum install -y iputils net-tools iproute && \ + yum clean all +# install modelscope +COPY requirements /var/modelscope +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir torch==$TORCH_VERSION -r /var/modelscope/framework.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \ + pip install --no-cache-dir torch==$TORCH_VERSION -r /var/modelscope/audio.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \ + pip install --no-cache-dir torch==$TORCH_VERSION -r /var/modelscope/cv.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \ + pip install --no-cache-dir torch==$TORCH_VERSION -r /var/modelscope/multi-modal.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \ + pip install --no-cache-dir torch==$TORCH_VERSION -r /var/modelscope/nlp.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \ + pip install --no-cache-dir torch==$TORCH_VERSION -r /var/modelscope/science.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \ + pip install --no-cache-dir torch==$TORCH_VERSION -r /var/modelscope/tests.txt -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html && \ + pip cache purge + +# install jupyter plugin +RUN mkdir -p /root/.local/share/jupyter/labextensions/ && \ + cp -r /tmp/resources/jupyter_plugins/* /root/.local/share/jupyter/labextensions/ + +COPY docker/scripts/modelscope_env_init.sh /usr/local/bin/ms_env_init.sh +# python3.8 pip install git+https://github.com/jin-s13/xtcocoapi.git@v1.13 +# pip install git+https://github.com/gatagat/lap.git@v0.4.0 +RUN pip install --no-cache-dir text2sql_lgesql==1.3.0 \ + git+https://github.com/jin-s13/xtcocoapi.git@v1.13 \ + git+https://github.com/gatagat/lap.git@v0.4.0 \ + detectron2==0.3 -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html --force --no-deps + +RUN conda install -y mpi4py +RUN pip install --no-cache-dir torch==$TORCH_VERSION paint_ldm \ + 'mmcls>=0.21.0' 'mmdet>=2.25.0' 'decord>=0.6.0' pai-easycv ms_swift \ + ipykernel fasttext fairseq deepspeed -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html + +ARG USE_GPU +# for cpu install cpu version faiss, faiss depends on blas lib, we install libopenblas TODO rename gpu or cpu version faiss +RUN if [ "$USE_GPU" = "True" ] ; then \ + pip install --no-cache-dir funtextprocessing kwsbp==0.0.6 faiss==1.7.2 safetensors typeguard==2.13.3 scikit-learn librosa==0.9.2 funasr -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html; \ + else \ + pip install --no-cache-dir funtextprocessing kwsbp==0.0.6 https://modelscope.oss-cn-beijing.aliyuncs.com/releases/dependencies/faiss-1.7.2-py37-none-linux_x86_64.whl safetensors typeguard==2.13.3 scikit-learn librosa==0.9.2 funasr -f https://modelscope.oss-cn-beijing.aliyuncs.com/releases/repo.html; \ + fi + +RUN pip install --no-cache-dir git+https://github.com/wenet-e2e/wenet.git adaseq --no-deps +COPY examples /modelscope/examples + +# # for pai-easycv setup compatiblity issue +ENV SETUPTOOLS_USE_DISTUTILS=stdlib + +RUN if [ "$USE_GPU" = "True" ] ; then \ + CUDA_HOME=/usr/local/cuda TORCH_CUDA_ARCH_LIST="6.0 6.1 7.0 7.5 8.0 8.6" pip install --no-cache-dir 'git+https://github.com/facebookresearch/detectron2.git'; \ + else \ + echo 'cpu unsupport detectron2'; \ + fi + +# torchmetrics==0.11.4 for ofa +RUN pip install --no-cache-dir jupyterlab torchmetrics==0.11.4 tiktoken transformers_stream_generator 'protobuf<=3.20.0' bitsandbytes basicsr +COPY docker/scripts/install_flash_attension.sh /tmp/install_flash_attension.sh +RUN if [ "$USE_GPU" = "True" ] ; then \ + bash /tmp/install_flash_attension.sh; \ + else \ + echo 'cpu unsupport flash attention'; \ + fi + +RUN yum clean all + +ENTRYPOINT ["/bin/bash"] diff --git a/AI/modelscope/1.10.0/8/README.md b/AI/modelscope/1.10.0/8/README.md new file mode 100644 index 0000000000000000000000000000000000000000..84149b24324a7656f22d8e029590fc90c67c5b68 --- /dev/null +++ b/AI/modelscope/1.10.0/8/README.md @@ -0,0 +1,3 @@ +# Modelscope Dockerfile based on Anolis OS 8 + +Modified from (https://github.com/modelscope/modelscope/tree/master/docker). The original docker image is based on Ubuntu 20.04, this modification change the base to Anolis OS 8. \ No newline at end of file diff --git a/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/package.json b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/package.json new file mode 100644 index 0000000000000000000000000000000000000000..d2e0d0db1d311a764112f6b0946a7795141ce327 --- /dev/null +++ b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/package.json @@ -0,0 +1,99 @@ +{ + "name": "jupyterlab_active_log", + "version": "0.1.0", + "description": "A JupyterLab extension.", + "keywords": [ + "jupyter", + "jupyterlab", + "jupyterlab-extension" + ], + "homepage": "https://github.com/github_username/jupyterlab_active_log", + "bugs": { + "url": "https://github.com/github_username/jupyterlab_active_log/issues" + }, + "license": "BSD-3-Clause", + "files": [ + "lib/**/*.{d.ts,eot,gif,html,jpg,js,js.map,json,png,svg,woff2,ttf}", + "style/**/*.{css,js,eot,gif,html,jpg,json,png,svg,woff2,ttf}" + ], + "main": "lib/index.js", + "types": "lib/index.d.ts", + "style": "style/index.css", + "repository": { + "type": "git", + "url": "https://github.com/github_username/jupyterlab_active_log.git" + }, + "scripts": { + "build": "jlpm build:lib && jlpm build:labextension:dev", + "build:prod": "jlpm clean && jlpm build:lib && jlpm build:labextension", + "build:labextension": "jupyter labextension build .", + "build:labextension:dev": "jupyter labextension build --development True .", + "build:lib": "tsc", + "clean": "jlpm clean:lib", + "clean:lib": "rimraf lib tsconfig.tsbuildinfo", + "clean:lintcache": "rimraf .eslintcache .stylelintcache", + "clean:labextension": "rimraf jupyterlab_active_log/labextension", + "clean:all": "jlpm clean:lib && jlpm clean:labextension && jlpm clean:lintcache", + "eslint": "jlpm eslint:check --fix", + "eslint:check": "eslint . --cache --ext .ts,.tsx", + "install:extension": "jlpm build", + "lint": "jlpm stylelint && jlpm prettier && jlpm eslint", + "lint:check": "jlpm stylelint:check && jlpm prettier:check && jlpm eslint:check", + "prettier": "jlpm prettier:base --write --list-different", + "prettier:base": "prettier \"**/*{.ts,.tsx,.js,.jsx,.css,.json,.md}\"", + "prettier:check": "jlpm prettier:base --check", + "stylelint": "jlpm stylelint:check --fix", + "stylelint:check": "stylelint --cache \"style/**/*.css\"", + "watch": "run-p watch:src watch:labextension", + "watch:src": "tsc -w", + "watch:labextension": "jupyter labextension watch ." + }, + "dependencies": { + "@jupyterlab/application": "^3.1.0" + }, + "devDependencies": { + "@jupyterlab/builder": "^3.1.0", + "@typescript-eslint/eslint-plugin": "^4.8.1", + "@typescript-eslint/parser": "^4.8.1", + "eslint": "^7.14.0", + "eslint-config-prettier": "^6.15.0", + "eslint-plugin-prettier": "^3.1.4", + "npm-run-all": "^4.1.5", + "prettier": "^2.1.1", + "rimraf": "^3.0.2", + "stylelint": "^14.3.0", + "stylelint-config-prettier": "^9.0.3", + "stylelint-config-recommended": "^6.0.0", + "stylelint-config-standard": "~24.0.0", + "stylelint-prettier": "^2.0.0", + "typescript": "~4.1.3" + }, + "sideEffects": [ + "style/*.css", + "style/index.js" + ], + "styleModule": "style/index.js", + "publishConfig": { + "access": "public" + }, + "jupyterlab": { + "extension": true, + "outputDir": "jupyterlab_active_log/labextension", + "_build": { + "load": "static/remoteEntry.eb3177c3791d7658cc12.js", + "extension": "./extension", + "style": "./style" + } + }, + "jupyter-releaser": { + "hooks": { + "before-build-npm": [ + "python -m pip install jupyterlab~=3.1", + "jlpm" + ], + "before-build-python": [ + "jlpm clean:all" + ] + } + } +} diff --git a/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/568.a92ae44b87625ab09aed.js b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/568.a92ae44b87625ab09aed.js new file mode 100644 index 0000000000000000000000000000000000000000..b70adee6b1b1a49d92f74c567f2debfae3fc52bb --- /dev/null +++ b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/568.a92ae44b87625ab09aed.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkjupyterlab_active_log=self.webpackChunkjupyterlab_active_log||[]).push([[568],{568:(t,e,a)=>{a.r(e),a.d(e,{default:()=>i});const i={id:"jupyterlab_active_log:plugin",autoStart:!0,activate:t=>{console.log("JupyterLab extension jupyterlab_active_log is activated!"),window.consts=Object.assign(Object.assign({},window.consts),{recordUrl:"https://modelscope.cn/api/v1/notebooks/activelog",timerDuration:1e4,timerParams:function(){const t=location.pathname.split("/");let e;return t.length>=2&&(e=t[1]),{site:"dsw",id:e,ext:{pathname:location.pathname}}}});const e=document.body,a=e.insertBefore(document.createElement("script"),e.firstChild);a.setAttribute("id","timer-sdk"),a.setAttribute("src","https://g.alicdn.com/alifanyi/translate-js-sdk/timer.js ")}}}}]); diff --git a/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/747.63b4c3d22bfe458b352b.js b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/747.63b4c3d22bfe458b352b.js new file mode 100644 index 0000000000000000000000000000000000000000..2129fc3d0d713ceedccce777ecccec9d94eb15a2 --- /dev/null +++ b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/747.63b4c3d22bfe458b352b.js @@ -0,0 +1 @@ +"use strict";(self.webpackChunkjupyterlab_active_log=self.webpackChunkjupyterlab_active_log||[]).push([[747],{150:(e,n,t)=>{t.d(n,{Z:()=>a});var r=t(645),o=t.n(r)()((function(e){return e[1]}));o.push([e.id,"/*\n See the JupyterLab Developer Guide for useful CSS Patterns:\n\n https://jupyterlab.readthedocs.io/en/stable/developer/css.html\n*/\n",""]);const a=o},645:e=>{e.exports=function(e){var n=[];return n.toString=function(){return this.map((function(n){var t=e(n);return n[2]?"@media ".concat(n[2]," {").concat(t,"}"):t})).join("")},n.i=function(e,t,r){"string"==typeof e&&(e=[[null,e,""]]);var o={};if(r)for(var a=0;a{var r,o=function(){var e={};return function(n){if(void 0===e[n]){var t=document.querySelector(n);if(window.HTMLIFrameElement&&t instanceof window.HTMLIFrameElement)try{t=t.contentDocument.head}catch(e){t=null}e[n]=t}return e[n]}}(),a=[];function i(e){for(var n=-1,t=0;t{t.r(n);var r=t(379),o=t.n(r),a=t(150);o()(a.Z,{insert:"head",singleton:!1}),a.Z.locals}}]); diff --git a/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/remoteEntry.eb3177c3791d7658cc12.js b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/remoteEntry.eb3177c3791d7658cc12.js new file mode 100644 index 0000000000000000000000000000000000000000..ec49e97342b7384e18bb484a5f67f524545b4c86 --- /dev/null +++ b/AI/modelscope/1.10.0/8/docker/jupyter_plugins/jupyterlab_active_log/static/remoteEntry.eb3177c3791d7658cc12.js @@ -0,0 +1 @@ +var _JUPYTERLAB;(()=>{"use strict";var e,r,t={293:(e,r,t)=>{var o={"./index":()=>t.e(568).then((()=>()=>t(568))),"./extension":()=>t.e(568).then((()=>()=>t(568))),"./style":()=>t.e(747).then((()=>()=>t(747)))},a=(e,r)=>(t.R=r,r=t.o(o,e)?o[e]():Promise.resolve().then((()=>{throw new Error('Module "'+e+'" does not exist in container.')})),t.R=void 0,r),n=(e,r)=>{if(t.S){var o="default",a=t.S[o];if(a&&a!==e)throw new Error("Container initialization failed as it has already been initialized with a different share scope");return t.S[o]=e,t.I(o,r)}};t.d(r,{get:()=>a,init:()=>n})}},o={};function a(e){var r=o[e];if(void 0!==r)return r.exports;var n=o[e]={id:e,exports:{}};return t[e](n,n.exports,a),n.exports}a.m=t,a.c=o,a.n=e=>{var r=e&&e.__esModule?()=>e.default:()=>e;return a.d(r,{a:r}),r},a.d=(e,r)=>{for(var t in r)a.o(r,t)&&!a.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:r[t]})},a.f={},a.e=e=>Promise.all(Object.keys(a.f).reduce(((r,t)=>(a.f[t](e,r),r)),[])),a.u=e=>e+"."+{568:"a92ae44b87625ab09aed",747:"63b4c3d22bfe458b352b"}[e]+".js?v="+{568:"a92ae44b87625ab09aed",747:"63b4c3d22bfe458b352b"}[e],a.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),a.o=(e,r)=>Object.prototype.hasOwnProperty.call(e,r),e={},r="jupyterlab_active_log:",a.l=(t,o,n,i)=>{if(e[t])e[t].push(o);else{var l,u;if(void 0!==n)for(var c=document.getElementsByTagName("script"),d=0;d{l.onerror=l.onload=null,clearTimeout(f);var a=e[t];if(delete e[t],l.parentNode&&l.parentNode.removeChild(l),a&&a.forEach((e=>e(o))),r)return r(o)},f=setTimeout(p.bind(null,void 0,{type:"timeout",target:l}),12e4);l.onerror=p.bind(null,l.onerror),l.onload=p.bind(null,l.onload),u&&document.head.appendChild(l)}},a.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{a.S={};var e={},r={};a.I=(t,o)=>{o||(o=[]);var n=r[t];if(n||(n=r[t]={}),!(o.indexOf(n)>=0)){if(o.push(n),e[t])return e[t];a.o(a.S,t)||(a.S[t]={});var i=a.S[t],l="jupyterlab_active_log",u=[];return"default"===t&&((e,r,t,o)=>{var n=i[e]=i[e]||{},u=n[r];(!u||!u.loaded&&(1!=!u.eager?o:l>u.from))&&(n[r]={get:()=>a.e(568).then((()=>()=>a(568))),from:l,eager:!1})})("jupyterlab_active_log","0.1.0"),e[t]=u.length?Promise.all(u).then((()=>e[t]=1)):1}}})(),(()=>{var e;a.g.importScripts&&(e=a.g.location+"");var r=a.g.document;if(!e&&r&&(r.currentScript&&(e=r.currentScript.src),!e)){var t=r.getElementsByTagName("script");t.length&&(e=t[t.length-1].src)}if(!e)throw new Error("Automatic publicPath is not supported in this browser");e=e.replace(/#.*$/,"").replace(/\?.*$/,"").replace(/\/[^\/]+$/,"/"),a.p=e})(),(()=>{var e={346:0};a.f.j=(r,t)=>{var o=a.o(e,r)?e[r]:void 0;if(0!==o)if(o)t.push(o[2]);else{var n=new Promise(((t,a)=>o=e[r]=[t,a]));t.push(o[2]=n);var i=a.p+a.u(r),l=new Error;a.l(i,(t=>{if(a.o(e,r)&&(0!==(o=e[r])&&(e[r]=void 0),o)){var n=t&&("load"===t.type?"missing":t.type),i=t&&t.target&&t.target.src;l.message="Loading chunk "+r+" failed.\n("+n+": "+i+")",l.name="ChunkLoadError",l.type=n,l.request=i,o[1](l)}}),"chunk-"+r,r)}};var r=(r,t)=>{var o,n,[i,l,u]=t,c=0;if(i.some((r=>0!==e[r]))){for(o in l)a.o(l,o)&&(a.m[o]=l[o]);u&&u(a)}for(r&&r(t);c/dev/null 2>&1 || { echo 'git not installed' ; exit 0; } + +if [ -z "$MODELSCOPE_USERNAME" ] || [ -z "$MODELSCOPE_GITLAB_ACCESS_TOKEN" ]; then + : +else + git config --global credential.helper store + echo "http://${MODELSCOPE_USERNAME}:${MODELSCOPE_GITLAB_ACCESS_TOKEN}@www.modelscope.cn">~/.git-credentials + echo "https://${MODELSCOPE_USERNAME}:${MODELSCOPE_GITLAB_ACCESS_TOKEN}@www.modelscope.cn">>~/.git-credentials + chmod go-rwx ~/.git-credentials +fi +if [ -z "$MODELSCOPE_USERNAME" ] || [ -z "$MODELSCOPE_USEREMAIL" ]; then + : +else + git config --system user.name ${MODELSCOPE_USERNAME} + git config --system user.email ${MODELSCOPE_USEREMAIL} +fi +if [ -z "$MODELSCOPE_ENVIRONMENT" ]; then + : +else + git config --system --add http.http://www.modelscope.cn.extraHeader "Modelscope_Environment: $MODELSCOPE_ENVIRONMENT" + git config --system --add http.https://www.modelscope.cn.extraHeader "Modelscope_Environment: $MODELSCOPE_ENVIRONMENT" +fi + +if [ -z "$MODELSCOPE_USERNAME" ]; then + : +else + git config --system --add http.http://www.modelscope.cn.extraHeader "Modelscope_User: $MODELSCOPE_USERNAME" + git config --system --add http.https://www.modelscope.cn.extraHeader "Modelscope_User: $MODELSCOPE_USERNAME" +fi + +if [ -z "$MODELSCOPE_USERID" ]; then + : +else + git config --system --add http.http://www.modelscope.cn.extraHeader "Modelscope_Userid: $MODELSCOPE_USERID" + git config --system --add http.https://www.modelscope.cn.extraHeader "Modelscope_Userid: $MODELSCOPE_USERID" +fi + +if [ -z "$MODELSCOPE_HAVANAID" ]; then + : +else + git config --system --add http.http://www.modelscope.cn.extraHeader "Modelscope_Havanaid: $MODELSCOPE_HAVANAID" + git config --system --add http.https://www.modelscope.cn.extraHeader "Modelscope_Havanaid: $MODELSCOPE_HAVANAID" +fi + +pip config set global.index-url https://mirrors.cloud.aliyuncs.com/pypi/simple +pip config set install.trusted-host mirrors.cloud.aliyuncs.com diff --git a/AI/modelscope/1.10.0/8/docker/scripts/torch111_torch3d_nvdiffrast.sh b/AI/modelscope/1.10.0/8/docker/scripts/torch111_torch3d_nvdiffrast.sh new file mode 100644 index 0000000000000000000000000000000000000000..1a97a6b46595926694935d9cac99c3d151d0856c --- /dev/null +++ b/AI/modelscope/1.10.0/8/docker/scripts/torch111_torch3d_nvdiffrast.sh @@ -0,0 +1,16 @@ +export CMAKE_BUILD_PARALLEL_LEVEL=36 && export MAX_JOBS=4 && export CMAKE_CUDA_ARCHITECTURES="50;52;60;61;70;75;80;86" \ + && pip install --no-cache-dir fvcore iopath \ + && curl -LO https://cr-images-pub.oss-cn-hangzhou.aliyuncs.com/root/modelscope/1.10.0.tar.gz \ + && tar xzf 1.10.0.tar.gz \ + && export CUB_HOME=$PWD/cub-1.10.0 \ + && pip install "git+https://github.com/facebookresearch/pytorch3d.git@stable" \ + && rm -fr 1.10.0.tar.gz cub-1.10.0 \ + && apt-get update \ + && apt-get install -y pkg-config libglvnd0 libgl1 libglx0 libegl1 libgles2 libglvnd-dev libgl1-mesa-dev libegl1-mesa-dev libgles2-mesa-dev -y \ + && (curl https://cr-images-pub.oss-cn-hangzhou.aliyuncs.com/root/modelscope/nvdiffrast.tar.gz|tar -xz) \ + && cd nvdiffrast \ + && pip install --no-cache-dir . \ + && cd .. \ + && rm -rf nvdiffrast + +# curl -LO https://github.com/NVIDIA/cub/archive/1.10.0.tar.gz \ No newline at end of file diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/README.md b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/README.md new file mode 100644 index 0000000000000000000000000000000000000000..056b575e203ac1fa6a665757d920bf65d8ce4fc4 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/README.md @@ -0,0 +1,55 @@ +# Oh No! I'm Surrounded by LLMs! (LLMRiddles) + +## Project Introduction +"Oh No! I'm Surrounded by LLMs!" is an intellectual challenge game. We use LLM to automatically generate corresponding game code based on existing Large Language Model (LLM) dialogue Gradio application codes within the ModelScope community, combined with preset questions from the Zhihu article ["How to Accomplish Tasks with 'Impossible'"](https://zhuanlan.zhihu.com/p/665393240), creating a unique gameplay experience. In this stream, players are required to cleverly construct questions that challenge the LLM to provide answers that meet specific conditions. + +## News +November 9, 2023 - Added two new questions, and introduced the chatglm-turbo model 🔥🔥🔥 +November 7, 2023 - Released the initial demo version 🔥 +November 8, 2023 - Segregated level modules and LLM, enabling independent integration of levels and LLM. Pull Requests welcome 🔥 🔥 + +## Getting Started + +### Online Experience + +[LLMRiddles](https://modelscope.cn/studios/LLMRiddles/LLMRiddles/summary) + +### Local Execution +To start the game, please follow the steps below: + +1. Clone the project code: + ``` + git clone https://github.com/modelscope/modelscope.git + ``` +2. Navigate to the `examples/apps/llm_riddles` directory. +3. Install the required Python dependencies with `pip install -r requirements.txt`. +4. Go to [DashScope](https://dashscope.aliyun.com/), activate the service, obtain a token, and configure the environment variable `DASHSCOPE_API_KEY=your API-KEY`. +5. Run the launch command `python app.py`. + +## Roadmap +- [x] Initial version source code and space experience ready. +- [x] Support for custom questions and validation logic integration. +- [ ] Expand to 9 major levels, each with 9 questions. +- [ ] Support for more open-source models. +- [ ] Support for switching between cloud API and local inference. + +## Contribution Guide +We welcome everyone to contribute to "Oh No! I'm Surrounded by LLMs!", including proposing more fun questions, fixing validator corner cases, and providing more gameplay. Please follow the steps below: + +1. Visit the project address [ModelScope](https://github.com/modelscope/modelscope) and fork the project. +2. Create your feature branch in your local environment (`git checkout -b feature/AmazingFeature`). +3. Commit your changes (`git commit -m 'Add some AmazingFeature'`). +4. Push your changes to the branch (`git push origin feature/AmazingFeature`). +5. Initiate a Pull Request in the original project. + +## Community Contributors +We sincerely thank all community members who have contributed to this project, especially: + +- Idea from: [haoqiangfan](https://www.zhihu.com/people/haoqiang-fan) +- Most of the code is auto-generated by LLM + +## Support +If you encounter any problems or need assistance during the game, please submit your issues on the project's [Issues page](https://github.com/modelscope/modelscope/issues). + +## Copyright and License +This project is licensed under the APACHE License. Please see the [LICENSE](https://github.com/modelscope/modelscope/blob/main/LICENSE) file in the project for more information. diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/README_CN.md b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..0f85734c0e561ca8715d496d5fb2133220651fa0 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/README_CN.md @@ -0,0 +1,56 @@ +# 完蛋!我被LLM包围了!(LLMRiddles) + +## 项目简介 +《完蛋!我被LLM包围了!》是一款智力挑战游戏。该项目利用LLM代码生成, 基于ModelScope社区内现有的LLM对话Gradio应用程序代码,结合知乎文章[《如何用“不可能”完成任务》](https://zhuanlan.zhihu.com/p/665393240)中的预设问题,自动生成了对应的游戏代码,创造了一个独特的游戏体验。在这个游戏中,玩家需要巧妙构造问题,挑战LLM给出满足特定条件的回答。 + + +## 更新 +2023.11.9 新增两道题目, 新增chatglm-turbo模型🔥 🔥🔥 +2023.11.7 发布初版demo🔥 +2023.11.8 拆分关卡模块和llm,支持关卡独立接入,llm独立接入, 欢迎PR 🔥 🔥 + +## 开始游戏 + +### 在线体验 + +[LLMRiddles](https://modelscope.cn/studios/LLMRiddles/LLMRiddles/summary) + +### 本地运行 +要开始游戏,请按照以下步骤操作: + +1. 克隆项目代码: + ``` + git clone https://github.com/modelscope/modelscope.git + ``` +2. 进入到`examples/apps/llm_riddles`目录。 +3. 安装所需的Python依赖`pip install -r requirements.txt`。 +4. 前往[DashScope](https://dashscope.aliyun.com/)开通服务,获取token,配置环境变量`DASHSCOPE_API_KEY=你的API-KEY` +5. 执行启动命令`python app.py`. + +## RoadMap +- [x] 初版本源码和创空间体验ready +- [x] 支持自定义问题和验证逻辑接入 +- [ ] 扩充到9个大关卡,每个关卡9个问题 +- [ ] 支持更多开源模型 +- [ ] 支持云端API和本地推理切换 + +## 贡献指南 +我们欢迎大家为《完蛋!我被LLM包围了!》做出贡献,包括提出更多好玩的问题,修复validator的corner case,以及提供更多的玩法。请按以下步骤操作: + +1. 访问项目地址 [ModelScope](https://github.com/modelscope/modelscope) 并fork项目。 +2. 在你的本地环境中创建你的特性分支 (`git checkout -b feature/AmazingFeature`)。 +3. 提交你的改动 (`git commit -m 'Add some AmazingFeature'`)。 +4. 将你的改动推送到分支上 (`git push origin feature/AmazingFeature`)。 +5. 在原项目下发起一个Pull Request。 + +## 社区贡献者 +我们诚挚感谢所有对本项目做出贡献的社区成员,特别是: + +- idea来源: [haoqiangfan](https://www.zhihu.com/people/haoqiang-fan) +- 代码大部分来自于LLM自动生成 + +## 支持 +如果你在游戏过程中遇到任何问题或需要帮助,请通过项目的[Issues页面](https://github.com/modelscope/modelscope/issues)提交你的问题。 + +## 版权和许可 +本项目采用APACHE License许可证。请查看项目中的[LICENSE](https://github.com/modelscope/modelscope/blob/main/LICENSE)文件了解更多信息。 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/app.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/app.py new file mode 100644 index 0000000000000000000000000000000000000000..94432043cb8dbec1dbc4bc15112f5f6277c2f7d0 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/app.py @@ -0,0 +1,223 @@ +import functools +import inspect +import os +import random +import re + +import gradio as gr +from challenges.ch1 import challenge1 +from challenges.ch2 import challenge2 +from challenges.ch3 import challenge3 +from challenges.ch4 import challenge4 +from llm import create_model +from PIL import Image, ImageDraw, ImageFont + +model_cache = {} + +# 定义关卡信息和验证逻辑 +challenges = [ + challenge1, + challenge2, + challenge3, + challenge4, +] + +CONGRATS_STR = '所有挑战完成!👏🏻👏🏻👏🏻👏🏻👏🏻👏🏻' +CONGRATS_QUESTION = f'
{CONGRATS_STR}
\n\n
' + +SHARE_CHALLENGES_HINT = [ + '小试牛刀新手上路', '数字玩家已经上线', '巅峰对决,你就是提示词高手', '无人之境,胜利就在前方', '哇塞,我冲出了LLM的重围' +] + + +def get_problem(challenge_idx, problem_idx): + problems = challenges[challenge_idx]['problems'] + return problems[problem_idx] + + +def update_challenge_info(current_chapter_index, current_challenge_index): + return get_problem(current_chapter_index, + current_challenge_index)['description'] + + +def update_question_info(current_chapter_index, current_challenge_index): + + global challenges + current_chapter = challenges[current_chapter_index] + challenge = get_problem(current_chapter_index, current_challenge_index) + question_info = f"""\n
{current_chapter["name"]}""" \ + f"""
\n\n
{challenge["title"]}
""" + return question_info + + +def validate_challenge(response, input, state, generate_response): + if 'success' in state: + return CONGRATS_STR, CONGRATS_QUESTION, '' + assert 'current_chapter_index' in state, 'current_chapter_index not found in state' + assert 'current_challenge_index' in state, 'current_challenge_index not found in state' + current_chapter_index = state['current_chapter_index'] + current_challenge_index = state['current_challenge_index'] + # 获取当前章节 + current_chapter = challenges[current_chapter_index] + # 获取当前挑战 + challenge = current_chapter['problems'][current_challenge_index] + + validate_fn = challenge['validator'] + params = inspect.signature(validate_fn).parameters + if 'generate_response' in params: + valid_result = validate_fn(response, input, generate_response) + else: + valid_result = validate_fn(response, input) + + if valid_result: + challenge_result = '挑战成功!进入下一关。' + # 检查是否还有更多挑战在当前章节 + if current_challenge_index < len(current_chapter['problems']) - 1: + # 移动到当前章节的下一个挑战 + current_challenge_index += 1 + else: + # 如果当前章节的挑战已经完成,移动到下一个章节 + if current_chapter_index < len(challenges) - 1: + current_challenge_index = 0 + current_chapter_index += 1 + else: + state['success'] = True + challenge_result = '所有挑战完成!' + + else: + challenge_result = '挑战失败,请再试一次。' + state['current_chapter_index'] = current_chapter_index + state['current_challenge_index'] = current_challenge_index + print('update state: ', state) + if 'success' in state: + return CONGRATS_STR, CONGRATS_QUESTION, '' + else: + return challenge_result, \ + update_question_info(current_chapter_index, current_challenge_index), \ + update_challenge_info(current_chapter_index, current_challenge_index) + + +def generate_response(input, model_name): + if model_name in model_cache: + model = model_cache[model_name] + else: + model = create_model(model_name) + model_cache[model_name] = model + + try: + return model(input) + except RuntimeError as e: + # if exception happens, print error in log and return empty str + print('error', e) + return '' + + +def on_submit(input, model_name, state): + # model_name = os.environ.get('MODEL', 'qwen-plus') + name_map = { + 'qwen-max': 'qwen-max', + 'qwen-plus': 'qwen-plus', + 'chatglm-turbo': 'chatglm_turbo', + } + gen_fn = functools.partial( + generate_response, model_name=name_map[model_name]) + response = gen_fn(input) + history = [(input, response)] + print(history) + challenge_result, question_info, challenge_info = validate_challenge( + response, input, state, gen_fn) + return challenge_result, history, question_info, challenge_info + + +def generate_share_image(state): + share_state = state['current_chapter_index'] + if share_state > 3: + share_state = 3 + if 'success' in state: + share_state = 4 # 全部通关为 4 + + img_pil = Image.open(f'assets/background{share_state}.png') + # 设置需要显示的字体 + fontpath = 'assets/font.ttf' + font = ImageFont.truetype(fontpath, 48) + draw = ImageDraw.Draw(img_pil) + # 绘制文字信息 + draw.text((70, 1000), + SHARE_CHALLENGES_HINT[share_state], + font=font, + fill=(255, 255, 255)) + if share_state == 4: + share_chapter_text = '顺利闯过了全部关卡' + else: + share_chapter_text = f"我顺利闯到第 {state['current_chapter_index']+1}-{state['current_challenge_index']+1} 关" + draw.text((70, 1080), share_chapter_text, font=font, fill=(255, 255, 255)) + draw.text((70, 1160), '你也来挑战一下吧~', font=font, fill=(255, 255, 255)) + + return gr.Image.update(visible=True, value=img_pil) + + +def create_app(): + # Gradio界面构建 + block = gr.Blocks() + + with block as demo: + current_chapter_index = 0 + current_challenge_index = 0 + state = gr.State( + dict( + current_challenge_index=current_challenge_index, + current_chapter_index=current_chapter_index)) + + gr.Markdown("""
完蛋!我被LLM包围了!
""") + gr.Markdown("""欢迎来玩LLM Riddles复刻版:完蛋!我被LLM包围了! + + 你将通过本游戏对大型语言模型产生更深刻的理解。 + + 在本游戏中,你需要构造一个提给一个大型语言模型的问题,使得它回复的答案符合要求。""") + + model_selector = gr.Dropdown( + label='选择模型', + choices=['qwen-max', 'qwen-plus', 'chatglm-turbo'], + value='qwen-max') + question_info = gr.Markdown( + update_question_info(current_chapter_index, + current_challenge_index)) + challenge_info = gr.Textbox( + value=update_challenge_info(current_chapter_index, + current_challenge_index), + label='当前挑战', + interactive=False) + challenge_result = gr.Textbox(label='挑战结果', interactive=False) + chatbot = gr.Chatbot(label='llm', elem_classes='control-height') + message = gr.Textbox(lines=2, label='输入') + + with gr.Row(): + submit = gr.Button('🚀 发送') + shareBtn = gr.Button('💯 分享成绩') + + shareImg = gr.Image(label='分享成绩', visible=False, width=400) + + submit.click( + on_submit, + inputs=[message, model_selector, state], + outputs=[challenge_result, chatbot, question_info, challenge_info]) + shareBtn.click( + generate_share_image, inputs=[state], outputs=[shareImg]) + + gr.HTML(""" +
+ + Powered by + DashScope + + +
+ """) + + demo.queue(concurrency_count=10).launch(height=800, share=True) + + +if __name__ == '__main__': + create_app() diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background.png b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background.png new file mode 100644 index 0000000000000000000000000000000000000000..9d0cb3c92add8a15e851822f579ce84bd616aa2d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8afcec15a87bcfaff327a5c9564a31ff1fe185a63cb286bd9772c8c68216768a +size 757003 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background0.png b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background0.png new file mode 100644 index 0000000000000000000000000000000000000000..1639428021f0ce0bf552213d80f07ec520c5aa8d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16afb18994ad0654b31117931aad2ee05863492e964e10f4c559556e29618320 +size 839643 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background1.png b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background1.png new file mode 100644 index 0000000000000000000000000000000000000000..9d0cb3c92add8a15e851822f579ce84bd616aa2d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8afcec15a87bcfaff327a5c9564a31ff1fe185a63cb286bd9772c8c68216768a +size 757003 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background2.png b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background2.png new file mode 100644 index 0000000000000000000000000000000000000000..adec77231116938f59d5feb8e7a395804a694d1d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background2.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:966a013913042e1574ccbc299b1914272cb47df69a552bf1723b96b2d8902de3 +size 1114172 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background3.png b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background3.png new file mode 100644 index 0000000000000000000000000000000000000000..97c446d6a4fc1b3baf4ac816367e718c05aac6d5 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background3.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5253bbed99be55e6ac9080ea320df75c95592204696d6d41ba90f9905384fdca +size 1198295 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background4.png b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background4.png new file mode 100644 index 0000000000000000000000000000000000000000..fc612898cd06f44327887235f3490ba4ae94d5a5 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/background4.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4cf462f8db7583843bc152ccfc87bb033b91880c98db9f83ba87fcca5d5d07f2 +size 1056053 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/font.ttf b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/font.ttf new file mode 100644 index 0000000000000000000000000000000000000000..7dc81344322824d84b87ce74edd6c7263f80b65b Binary files /dev/null and b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/assets/font.ttf differ diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/__init__.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch1.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch1.py new file mode 100644 index 0000000000000000000000000000000000000000..66cd679849e6c91adedaf5c1f9fb2fd099563a88 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch1.py @@ -0,0 +1,100 @@ +def validate_palindrome_invariance(origin_input, response, generate_response): + """ + 验证器函数,检查对于给定的问题,检验是否是回文串,且正着问和倒着问的模型回答是否一致。 + + :param origin_input: 原始问题 + :return: True 如果回答一致,否则 False + """ + + # 检查原始输入不是回文 + if origin_input == origin_input[::-1]: + return False + + # 将原始问题倒序并提交 + reversed_question = origin_input[::-1] + reversed_response = generate_response(reversed_question) + + # 比较两个回答是否一致 + return response.strip() == reversed_response.strip() + + +challenge1 = { + 'name': + '第一章 对话之趣', + 'problems': [ + { + 'title': '第1题 初来乍到', + 'description': '请你构造一个问题使模型的回答是一字不差的“1+1=3”(不需要引号)。', + 'validator': lambda response, input: response.strip() == '1+1=3' + }, + { + 'title': + '第2题 小试牛刀', + 'description': + '请输入三个字以内的问题,使模型的回答在30个字以上。', + 'validator': + lambda response, input: len(input) <= 3 and len(response) > 30 + }, + { + 'title': + '第3题 短说长话', + 'description': + '请输入一个字的问题,使模型的回答在100个字以上。', + 'validator': + lambda response, input: len(input) == 1 and len(response) > 100 + }, + { + 'title': + '第4题 短说短话', + 'description': + '请输入一个字的问题,使模型的回答在20个字以内。', + 'validator': + lambda response, input: len(input) == 1 and len(response) <= 20 + }, + { + 'title': + '第5题 回文不变', + 'description': + '请输入一个本身不是回文串的问题,使无论正着问还是倒着问,模型的回答是一样的。', + # 验证器将取决于模型的行为,这里只是一个占位符 + 'validator': + lambda response, input, generate_response: validate_palindrome_invariance( + input, response, generate_response) + }, + { + 'title': + '第6题 无中生狗', + 'description': + '请提一个不包含“狗”这个字的问题,但是回答中至少出现3次“狗”这个字。', + 'validator': + lambda response, input: '狗' not in input and response.count('狗') \ + >= 3 + }, + { + 'title': + '第7题 事半功倍', + 'description': + '请输入一个本身不是回文串的问题,使模型回答的长度刚好是问题长度的两倍。', + 'validator': + lambda response, input: input != input[::-1] and len(response.strip()) == 2 * len(input) + }, + { + 'title': + '第8题 产生疑惑', + 'description': + '请提出一个问题,不包含“不”“知”“道”三个字符,但要使模型的回答是一字不差的“不知道”', + 'validator': + lambda response, input: + all(char not in input for char in ['不', '知', '道']) and # noqa: W504 + response.strip() == '不知道' + }, + { + 'title': + '第9题 无中生有', + 'description': + '请提出一个问题,使模型只回复“永动机是真实存在的。”', + 'validator': + lambda response, input: response.strip() == '永动机是真实存在的。' + }, + ] +} diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch2.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch2.py new file mode 100644 index 0000000000000000000000000000000000000000..5c381de66c83a05b7832e2c26688881b95fd23d5 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch2.py @@ -0,0 +1,118 @@ +import re + +import sympy + + +# 辅助函数 - 检查是否为质数 +def is_prime(num): + return sympy.isprime(num) + + +# 辅助函数 - 获取下一个质数 +def next_prime(num): + return sympy.nextprime(num) + + +# 辅助函数 - 检查是否为平方数 +def is_square(n): + return sympy.sqrt(n).is_integer + + +# 辅助函数 - 获取平方根 +def get_square_root(n): + return int(sympy.sqrt(n)) + + +challenge2 = { + 'name': + '第二章 数字游戏', + 'problems': [ + { + 'title': + '第1题 质数长度', + 'description': + '你需要提出一个字数是质数的问题,使回答的长度刚好是它的下一个质数。', + 'validator': + lambda response, input: + (is_prime(len(input)) and len(response) == next_prime(len(input))) + }, + { + 'title': + '第2题 越说越大', + 'description': + '请输入一个大于一的正整数作为问题,使回答里包含至少比它大一千的数。', + 'validator': + lambda response, input: + (input.isdigit() and int(input) > 1 and any( + int(num) > int(input) + 1000 + for num in re.findall(r'\d+', response))) + }, + { + 'title': + '第3题 越说越小', + 'description': + '请输入一个大于一的正整数作为问题,使回答里包含至少10个不同的至少比它小一千的自然数。', + 'validator': + lambda response, input: + (input.isdigit() and int(input) > 1 and (len({ + int(num) + for num in re.findall(r'\d+', response) + if int(num) < int(input) - 1000 + }) >= 10)) + }, + { + 'title': + '第4题 警世名言', + 'description': + '请在不提及1,4,5这三个字符的情况下让模型回答114514,不要有多余的字符。', + 'validator': + lambda response, input: response.strip() == '114514' and all( + c not in input for c in ['1', '4', '5']) + }, + { + 'title': + '第5题 开平方数', + 'description': + '请输入一个大于一的平方数作为问题,使回答里包含它的平方根对应的数。', + 'validator': + lambda response, input: (( + input.isdigit() and int(input) > 1 and is_square(int(input)) + and # noqa: W504 + str(get_square_root(int(input))) in response)) + }, + { + 'title': + '第6题 得寸进狗', + 'description': + '请提一个不包含“狗”这个字的10个字以内的问题,使回答中“狗”这个字出现的次数至少是问题字数的2倍。', + 'validator': + lambda response, input: ( + '狗' not in input and len(input) <= 10 and # noqa: W504 + response.count('狗') >= 2 * len(input)) + }, + { + 'title': + '第7题 暗藏玄机', + 'description': + '请输入一个大于一的正整数作为问题,使回答包含一个两位数,其十位上的数字比个位上的数字小5。', + 'validator': + lambda response, input: + (input.isdigit() and int(input) > 1 and (len({ + int(num) + for num in re.findall(r'\d+', response) + if len(num) == 2 and int(num[1]) - int(num[0]) == 5 + }) >= 1)) + }, + { + 'title': + '第8题 点兵点将', + 'description': + '请输入十个字以内的问题,问题中不能包含0到9的数字以及零到十的汉字,使模型的回答中包含数字0到9。', + 'validator': + lambda response, input: + (len(input) <= 10 and not re.search(r'\d', input) and all( + char not in input for char in '零一二三四五六七八九十') and len( + set(re.findall(r'\d', response))) == 10) + }, + ] +} diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch3.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch3.py new file mode 100644 index 0000000000000000000000000000000000000000..bb8040dff0096ea019d2154b1c8f6d27295f6f54 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch3.py @@ -0,0 +1,101 @@ +import re +import unicodedata + + +def validate_palindrome_inverse(origin_input, response, generate_response): + """ + 验证器函数,检查对于给定的问题,正着问和倒着问的模型的回答本身不回文且也是逆序的关系。 + + :param origin_input: 原始问题 + :param response: 模型对原始问题的回答 + :param model_fn: 能够返回模型回答的函数 + :return: True 如果回答本身不回文且也是逆序关系,否则 False + """ + + # 检查原始输入不是回文 + if origin_input == origin_input[::-1]: + return False + + # 将原始问题倒序并获取模型回答 + reversed_question = origin_input[::-1] + reversed_response = generate_response(reversed_question).strip() + + # 检查原始回答不是回文 + if response == response[::-1]: + return False + + # 比较正着问和倒着问的回答是否是逆序关系 + return response == reversed_response[::-1] + + +challenge3 = { + 'name': + '第三章 巅峰挑战', + 'problems': [ + { + 'title': + '第1题 自说自话', + 'description': + '请做出一个至少10个字的提问,使回答和提问完全一样。', + 'validator': + lambda response, input: input == response and len(input) >= 10 + }, + { + 'title': + '第2题 话自说自', + 'description': + '请做出一个至少10个字的提问,使回答是提问的逆序。', + 'validator': + lambda response, input: + (input[::-1] == response and len(input) >= 10) + }, + { + 'title': + '第3题 就是敢说', + 'description': + '请在不提及1,3这两个字符的情况下让模型回答1+1=3,不要有多余的字符。', + 'validator': + lambda response, input: response.strip() == '1+1=3' and all( + c not in input for c in ['1', '3']) + }, + { + 'title': + '第4题 回文协变', + 'description': + '请输入一个本身不是回文串的问题,使正着问和倒着问的时候,模型的回答本身不回文且也是逆序的关系。', + 'validator': + lambda response, input, generate_response: + validate_palindrome_inverse(input, response, generate_response) + }, + { + 'title': + '第5题 答非所问', + 'description': + '请输入一个以“为什么”开头,以“?”结尾的不包含数字的问题,使回答中只包含数字。', + 'validator': + lambda response, input: '为什么' == input[:3] and '?' == input[-1] + and # noqa: W504 + len(re.findall(r'\d+', input)) == 0 and response.isdigit() + }, + { + 'title': + '第6题 与狗交谈', + 'description': + '请在不提及“狗”、“汪”、“dog”的情况下,让模型的回答是一字不差的“汪~”', + 'validator': + lambda response, input: + (all(char not in input + for char in ['狗', '汪', 'dog']) and response.strip() == '汪~') + }, + { + 'title': + '第7题 魔搭 (🐶)', + 'description': + '请提一个不包含任何中文字符、换行或空格的问题,使模型正好回答"魔搭"。', + 'validator': + lambda response, input: not any( + 'CJK' in unicodedata.name(char, '') or char in '\t\n ' + for char in input) and (response.strip() == '魔搭') + }, + ] +} diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch4.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch4.py new file mode 100644 index 0000000000000000000000000000000000000000..f57df344d8f8a8d9a5bf85b25ebf40473b66e454 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/challenges/ch4.py @@ -0,0 +1,68 @@ +import re + + +def validate_reciprocal_question(input, response, generate_response): + """ + 验证器函数,检查给定的问题A和回答B,是否能够通过以B作为新的提问得到原始问题A作为回答。 + + :param response: 模型对问题A的回答B + :param input: 原始问题A + :param model_fn: 模型调用函数,接收问题并返回回答 + :return: True 如果以回答B作为新的提问能得到问题A作为回答,否则 False + """ + + # 确保回答B不等于原始问题A + if response == input: + return False + + # 以回答B作为新的提问 + new_response = generate_response(response) + + # 检查新的回答是否与原始问题A相等 + return new_response.strip() == input.strip() + + +challenge4 = { + 'name': + '第四章 无人之境', + 'problems': [ + { + 'title': + '第1题 循环往复', + 'description': + '请构造一个问题A,它的回复是不同于A的B,然后以B提问能再次得到A.', + 'validator': + lambda response, input, generate_response: + validate_reciprocal_question(input, response, generate_response) + }, + { + 'title': + '第2题 惜字如金', + 'description': + '请输入一个字的问题,使模型的回答在16个字以内。', + 'validator': + lambda response, input: len(input) == 1 and len(response) <= 16 + }, + { + 'title': + '第3题 自然之密', + 'description': + '请输入一个大于一的正整数作为问题,使回答里包含和它刚好相差1的数。', + 'validator': + lambda response, input: + (input.isdigit() and int(input) > 1 and any( + abs(int(num) - int(input)) == 1 + for num in re.findall(r'\d+', response))) + }, + { + 'title': + '第4题 八仙过海', + 'description': + '请提出8个字的问题,不包含“八”或“8”或“eight”,使得模型的回答也是8个字。', + 'validator': + lambda response, input: + (all(char not in input + for char in ['八', '8', 'eight']) and len(response) == 8) + }, + ] +} diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/llm.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/llm.py new file mode 100644 index 0000000000000000000000000000000000000000..9fb7b7bdffac7026a31aca1970311b0b397090b6 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/llm.py @@ -0,0 +1,170 @@ +import os +import random +from http import HTTPStatus +from typing import Any, Dict, List, Union + + +class DashScope: + """A class to interact with the Dashscope AI service for response generation. + + This class provides an interface to call a specific model from the Dashscope service + to generate responses based on the input provided. + + Attributes: + model (str): The name of the model to be used for generation. + """ + + def __init__(self, model_name: str = 'qwen-plus'): + """Initializes the DashScope instance with a given model name. + + The constructor sets up the model name that will be used for response generation + and initializes the Dashscope API key from environment variables. + + Args: + model_name (str): The name of the model to be used. Defaults to 'qwen-plus'. + """ + import dashscope # Import dashscope module at runtime + dashscope.api_key = os.getenv( + 'DASHSCOPE_API_KEY') # Set the API key from environment variable + self.model: str = model_name # Assign the model name to an instance variable + + def __call__(self, input: Union[str, List[Dict[str, str]]], + **kwargs: Any) -> Union[str, None]: + """Allows the DashScope instance to be called as a function. + + This method processes the input, sends it to the Dashscope service, and returns + the generated response. + + Args: + input (Union[str, List[Dict[str, str]]]): The input str to generate a + response for. Can be a string or a list of messages. + **kwargs: Arbitrary keyword arguments. + + Returns: + Union[str, None]: The generated response from the model, or None if there is an error. + + Raises: + RuntimeError: If there is an error in accessing the Dashscope service. + """ + import dashscope # Import dashscope module at runtime + # Format the input into the required structure + if isinstance(input, str): + messages: List[Dict[str, str]] = [{ + 'role': + 'system', + 'content': + 'You are a helpful assistant.' + }, { + 'role': 'user', + 'content': input + }] + else: + messages = input + + # Make a call to the Dashscope service with the processed input + response = dashscope.Generation.call( + model=self.model, + messages=messages, + seed=random.randint(1, + 10000), # Generate a random seed for each call + result_format='message', # Specify the format of the result + top_p=kwargs.get('top_p', + 0.8) # Set the nucleus sampling parameter + ) + # Check the response status code and return the generated response or raise an error + if response.status_code == HTTPStatus.OK: + return response.output.choices[0].message.content + else: + print('Error accessing dashscope, please try again.', + response.request_id, response.message) + return '' + + +class ZhiPu: + + def __init__(self, model_name: str = 'chatglm_turbo'): + """Initializes the ZhiPu instance with a given model name. + + The constructor sets up the model name that will be used for response generation + and initializes the Dashscope API key from environment variables. + + Args: + model_name (str): The name of the model to be used. Defaults to 'qwen-plus'. + """ + import zhipuai # Import dashscope module at runtime + zhipuai.api_key = os.getenv( + 'ZHIPU_API_KEY') # Set the API key from environment variable + self.model: str = model_name # Assign the model name to an instance variable + + def __call__(self, input: Union[str, List[Dict[str, str]]], + **kwargs: Any) -> Union[str, None]: + """Allows the ZhiPu instance to be called as a function. + + { + "code":200, + "msg":"操作成功", + "data":{ + "request_id":"8098024428488935671", + "task_id":"8098024428488935671", + "task_status":"SUCCESS", + "choices":[ + { + "role":"assistant", + "content":"\" 您好!作为人工智能助手,我很乐意为您提供帮助。请问您有什么问题或者需要解决的事情吗?您可以向我提问,我会尽力为您解答。\"" + } + ], + "usage":{ + "prompt_tokens":2, + "completion_tokens":32, + "total_tokens":34 + } + }, + "success":true + } + """ + import zhipuai + if isinstance(input, str): + messages: List[Dict[str, str]] = [{ + 'role': 'user', + 'content': input + }] + else: + messages = input + + response = zhipuai.model_api.invoke( + model=self.model, + prompt=messages, + top_p=0.7, + temperature=0.9, + return_type='text', + ) + if response['code'] == 200: + return response['data']['choices'][0]['content'] + else: + print(f'{self.model} error: ', response) + return '' + + +def create_model(model_name: str): + """Factory function to create a DashScope model instance based on the model name. + + Args: + model_name (str): The name of the model to create an instance of. + + Returns: + DashScope: An instance of the DashScope class. + + Raises: + ValueError: If the model name provided does not start with 'qwen'. + """ + if model_name.startswith('qwen'): + return DashScope(model_name) + elif model_name.startswith('chatglm'): + return ZhiPu(model_name) + else: + raise ValueError('Other model implementations need to be provided.') + + +if __name__ == '__main__': + model = create_model('chatglm_turbo') + print(model('输入')) diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/requirements.txt b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..0093d7929a6ab4f6163c0456e1f0b5ec7212e5cc --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/requirements.txt @@ -0,0 +1,5 @@ +dashscope +gradio==3.39.0 +pillow +sympy +zhipuai diff --git a/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/test_validate_fn.py b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/test_validate_fn.py new file mode 100644 index 0000000000000000000000000000000000000000..ba435aa61560d5a2d420c6f980855bae72e8123f --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/apps/llm_riddles/test_validate_fn.py @@ -0,0 +1,13 @@ +from app import challenges + + +def test_valid(): + for challenge in challenges: + for p in challenge['problems']: + val_fn = p['validator'] + try: + val_fn('response', 'input') + except Exception: + import traceback + traceback.print_exc() + print(p, 'failed') diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/application/qwen_doc_search_QA_based_on_dashscope.ipynb b/AI/modelscope/1.10.0/8/examples/pytorch/application/qwen_doc_search_QA_based_on_dashscope.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..4beb4c7393d59a5f8109186f2e16e44b132d0c93 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/application/qwen_doc_search_QA_based_on_dashscope.ipynb @@ -0,0 +1,456 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "04d4165c-fab2-4f54-9b50-11d53917d785", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# install required packages\n", + "!pip install dashvector dashscope\n", + "!pip install transformers_stream_generator python-dotenv" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0ca135ac-b1b0-47b9-ad25-a0d11ac884f3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# prepare news corpus as knowledge source\n", + "!git clone https://github.com/shijiebei2009/CEC-Corpus.git" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "728a2bf5-905c-48ef-b70a-be53d4f8fcc0", + "metadata": { + "ExecutionIndicator": { + "show": false + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:32:15.429699Z", + "iopub.status.busy": "2023-08-10T10:32:15.429291Z", + "iopub.status.idle": "2023-08-10T10:32:16.076518Z", + "shell.execute_reply": "2023-08-10T10:32:16.075949Z", + "shell.execute_reply.started": "2023-08-10T10:32:15.429679Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "import dashscope\n", + "import os\n", + "from dotenv import load_dotenv\n", + "from dashscope import TextEmbedding\n", + "from dashvector import Client, Doc\n", + "\n", + "# get env variable from .env\n", + "# please make sure DASHSCOPE_KEY is defined in .env\n", + "load_dotenv()\n", + "dashscope.api_key = os.getenv('DASHSCOPE_KEY')\n", + "\n", + "\n", + "# initialize DashVector for embedding's indexing and searching\n", + "dashvector_client = Client(api_key='{your-dashvector-api-key}')\n", + "\n", + "# define collection name\n", + "collection_name = 'news_embeddings'\n", + "\n", + "# delete if already exist\n", + "dashvector_client.delete(collection_name)\n", + "\n", + "# create a collection with embedding size of 1536\n", + "rsp = dashvector_client.create(collection_name, 1536)\n", + "collection = dashvector_client.get(collection_name)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "id": "558b64ab-1fdf-4339-8368-97e67bef8159", + "metadata": { + "ExecutionIndicator": { + "show": false + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:57:43.451192Z", + "iopub.status.busy": "2023-08-10T10:57:43.450893Z", + "iopub.status.idle": "2023-08-10T10:57:43.454858Z", + "shell.execute_reply": "2023-08-10T10:57:43.454244Z", + "shell.execute_reply.started": "2023-08-10T10:57:43.451173Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "def prepare_data_from_dir(path, size):\n", + " # prepare the data from a file folder in order to upsert to DashVector with a reasonable doc's size.\n", + " batch_docs = []\n", + " for file in os.listdir(path):\n", + " with open(path + '/' + file, 'r', encoding='utf-8') as f:\n", + " batch_docs.append(f.read())\n", + " if len(batch_docs) == size:\n", + " yield batch_docs[:]\n", + " batch_docs.clear()\n", + "\n", + " if batch_docs:\n", + " yield batch_docs" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "id": "d65c0f3f-a080-4803-b5ed-f4e641a96db2", + "metadata": { + "ExecutionIndicator": { + "show": false + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:57:44.615001Z", + "iopub.status.busy": "2023-08-10T10:57:44.614690Z", + "iopub.status.idle": "2023-08-10T10:57:44.618899Z", + "shell.execute_reply": "2023-08-10T10:57:44.618418Z", + "shell.execute_reply.started": "2023-08-10T10:57:44.614979Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "def prepare_data_from_file(path, size):\n", + " # prepare the data from file in order to upsert to DashVector with a reasonable doc's size.\n", + " batch_docs = []\n", + " chunk_size = 12\n", + " with open(path, 'r', encoding='utf-8') as f:\n", + " doc = ''\n", + " count = 0\n", + " for line in f:\n", + " if count < chunk_size and line.strip() != '':\n", + " doc += line\n", + " count += 1\n", + " if count == chunk_size:\n", + " batch_docs.append(doc)\n", + " if len(batch_docs) == size:\n", + " yield batch_docs[:]\n", + " batch_docs.clear()\n", + " doc = ''\n", + " count = 0\n", + "\n", + " if batch_docs:\n", + " yield batch_docs" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "id": "aded6eec-1f05-479e-9f0e-3ce63872a07b", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:57:46.210192Z", + "iopub.status.busy": "2023-08-10T10:57:46.209870Z", + "iopub.status.idle": "2023-08-10T10:57:46.214412Z", + "shell.execute_reply": "2023-08-10T10:57:46.213625Z", + "shell.execute_reply.started": "2023-08-10T10:57:46.210172Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "def generate_embeddings(docs):\n", + " # create embeddings via DashScope's TextEmbedding model API\n", + " rsp = TextEmbedding.call(model=TextEmbedding.Models.text_embedding_v1,\n", + " input=docs)\n", + " embeddings = [record['embedding'] for record in rsp.output['embeddings']]\n", + " return embeddings if isinstance(docs, list) else embeddings[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5c0ba7e1-001f-4bb9-9bdb-7eb318bc3550", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "tags": [] + }, + "outputs": [], + "source": [ + "id = 0\n", + "dir_name = 'CEC-Corpus/raw corpus/allSourceText'\n", + "\n", + "# indexing the raw docs with index to DashVector\n", + "collection = dashvector_client.get(collection_name)\n", + "\n", + "# embedding api max batch size\n", + "batch_size = 4 \n", + "\n", + "for news in list(prepare_data_from_dir(dir_name, batch_size)):\n", + " ids = [id + i for i, _ in enumerate(news)]\n", + " id += len(news)\n", + " # generate embedding from raw docs\n", + " vectors = generate_embeddings(news)\n", + " # upsert and index\n", + " ret = collection.upsert(\n", + " [\n", + " Doc(id=str(id), vector=vector, fields={\"raw\": doc})\n", + " for id, doc, vector in zip(ids, news, vectors)\n", + " ]\n", + " )\n", + " print(ret)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "53bed7e4-35be-4df6-8775-7d62fcdb6457", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# check the collection status\n", + "collection = dashvector_client.get(collection_name)\n", + "rsp = collection.stats()\n", + "print(rsp)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "id": "41e54ddd-145d-49c3-ade4-4a46dc34e07b", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:57:54.368540Z", + "iopub.status.busy": "2023-08-10T10:57:54.368215Z", + "iopub.status.idle": "2023-08-10T10:57:54.371879Z", + "shell.execute_reply": "2023-08-10T10:57:54.371364Z", + "shell.execute_reply.started": "2023-08-10T10:57:54.368521Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "def search_relevant_context(question, topk=1, client=dashvector_client):\n", + " # query and recall the relevant information\n", + " collection = client.get(collection_name)\n", + "\n", + " # recall the top k similarity results from DashVector\n", + " rsp = collection.query(generate_embeddings(question), output_fields=['raw'],\n", + " topk=topk)\n", + " return \"\".join([item.fields['raw'] for item in rsp.output])" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "id": "409236b9-87d4-4df0-8ee6-486d3c0e5fb6", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:57:56.141848Z", + "iopub.status.busy": "2023-08-10T10:57:56.141502Z", + "iopub.status.idle": "2023-08-10T10:57:56.387965Z", + "shell.execute_reply": "2023-08-10T10:57:56.387379Z", + "shell.execute_reply.started": "2023-08-10T10:57:56.141830Z" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2006-08-26 10:41:45\n", + "8月23日上午9时40分,京沪高速公路沧州服务区附近,一辆由北向南行驶的金杯面包车撞到高速公路护栏上,车上5名清华大学博士后研究人员及1名司机受伤,被紧急送往沧州二医院抢救。截至发稿时,仍有一名张姓博士后研究人员尚未脱离危险。\n" + ] + } + ], + "source": [ + "# query the top 1 results\n", + "question = '清华博士发生了什么?'\n", + "context = search_relevant_context(question, topk=1)\n", + "print(context)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "730abebb-1f5a-4fb9-b035-fb2ae09a31c9", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# initialize qwen 7B model\n", + "from modelscope import AutoModelForCausalLM, AutoTokenizer\n", + "from modelscope import GenerationConfig\n", + "\n", + "tokenizer = AutoTokenizer.from_pretrained(\"qwen/Qwen-7B-Chat\", revision = 'v1.0.5',trust_remote_code=True)\n", + "model = AutoModelForCausalLM.from_pretrained(\"qwen/Qwen-7B-Chat\", revision = 'v1.0.5',device_map=\"auto\", trust_remote_code=True, fp16=True).eval()\n", + "model.generation_config = GenerationConfig.from_pretrained(\"Qwen/Qwen-7B-Chat\",revision = 'v1.0.5', trust_remote_code=True) # 可指定不同的生成长度、top_p等相关超参" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "2f5a1bcb-e83a-44d3-bbe4-f97437782a3b", + "metadata": { + "ExecutionIndicator": { + "show": false + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:41:01.761863Z", + "iopub.status.busy": "2023-08-10T10:41:01.761502Z", + "iopub.status.idle": "2023-08-10T10:41:01.765849Z", + "shell.execute_reply": "2023-08-10T10:41:01.765318Z", + "shell.execute_reply.started": "2023-08-10T10:41:01.761842Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# define a prompt template for the vectorDB-enhanced LLM generation\n", + "def answer_question(question, context):\n", + " prompt = f'''请基于```内的内容回答问题。\"\n", + "\t```\n", + "\t{context}\n", + "\t```\n", + "\t我的问题是:{question}。\n", + " '''\n", + " history = None\n", + " print(prompt)\n", + " response, history = model.chat(tokenizer, prompt, history=None)\n", + " return response" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "75ac8f4a-a861-4376-9e55-ebefef9a9cd6", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:41:29.070090Z", + "iopub.status.busy": "2023-08-10T10:41:29.069778Z", + "iopub.status.idle": "2023-08-10T10:41:31.613198Z", + "shell.execute_reply": "2023-08-10T10:41:31.612421Z", + "shell.execute_reply.started": "2023-08-10T10:41:29.070073Z" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "请基于```内的内容回答问题。\"\n", + "\t```\n", + "\t\n", + "\t```\n", + "\t我的问题是:清华博士发生了什么?。\n", + " \n", + "question: 清华博士发生了什么?\n", + "answer: 清华博士是指清华大学的博士研究生。作为一名AI语言模型,我无法获取个人的身份信息或具体事件,因此无法回答清华博士发生了什么。如果您需要了解更多相关信息,建议您查询相关媒体或官方网站。\n" + ] + } + ], + "source": [ + "# test the case on plain LLM without vectorDB enhancement\n", + "question = '清华博士发生了什么?'\n", + "answer = answer_question(question, '')\n", + "print(f'question: {question}\\n' f'answer: {answer}')" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "eca328fc-cd69-4e12-8448-f426f3314414", + "metadata": { + "ExecutionIndicator": { + "show": false + }, + "execution": { + "iopub.execute_input": "2023-08-10T10:41:34.268896Z", + "iopub.status.busy": "2023-08-10T10:41:34.268585Z", + "iopub.status.idle": "2023-08-10T10:41:37.750128Z", + "shell.execute_reply": "2023-08-10T10:41:37.749414Z", + "shell.execute_reply.started": "2023-08-10T10:41:34.268878Z" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "请基于```内的内容回答问题。\"\n", + "\t```\n", + "\t2006-08-26 10:41:45\n", + "8月23日上午9时40分,京沪高速公路沧州服务区附近,一辆由北向南行驶的金杯面包车撞到高速公路护栏上,车上5名清华大学博士后研究人员及1名司机受伤,被紧急送往沧州二医院抢救。截至发稿时,仍有一名张姓博士后研究人员尚未脱离危险。\n", + "\n", + "\n", + "\t```\n", + "\t我的问题是:清华博士发生了什么?。\n", + " \n", + "question: 清华博士发生了什么?\n", + "answer: 8月23日上午9时40分,一辆由北向南行驶的金杯面包车撞到高速公路护栏上,车上5名清华大学博士后研究人员及1名司机受伤,被紧急送往沧州二医院抢救。\n" + ] + } + ], + "source": [ + "# test the case with knowledge\n", + "context = search_relevant_context(question, topk=1)\n", + "answer = answer_question(question, context)\n", + "print(f'question: {question}\\n' f'answer: {answer}')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/application/qwen_doc_search_QA_based_on_langchain.ipynb b/AI/modelscope/1.10.0/8/examples/pytorch/application/qwen_doc_search_QA_based_on_langchain.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..7a6a9995bbe566ba14ce6261613eee1dcafdfc25 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/application/qwen_doc_search_QA_based_on_langchain.ipynb @@ -0,0 +1,326 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "9678e0bc-97cd-45bc-bd38-8d79c6789325", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# install required packages\n", + "!pip install langchain\n", + "!pip install unstructured\n", + "!pip install transformers_stream_generator" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "36410a7c-a334-4ba2-abde-1679ac938a2a", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "tags": [] + }, + "outputs": [], + "source": [ + "import os\n", + "from typing import List, Optional\n", + "from langchain.llms.base import LLM\n", + "from modelscope import AutoModelForCausalLM, AutoTokenizer\n", + "from modelscope import GenerationConfig\n", + "\n", + "# initialize qwen 7B model\n", + "tokenizer = AutoTokenizer.from_pretrained(\"qwen/Qwen-7B-Chat\", revision = 'v1.0.5',trust_remote_code=True)\n", + "model = AutoModelForCausalLM.from_pretrained(\"qwen/Qwen-7B-Chat\", revision = 'v1.0.5',device_map=\"auto\", trust_remote_code=True, fp16=True).eval()\n", + "model.generation_config = GenerationConfig.from_pretrained(\"Qwen/Qwen-7B-Chat\",revision = 'v1.0.5', trust_remote_code=True) \n", + "\n", + "\n", + "# torch garbage collection\n", + "def torch_gc():\n", + " os.environ[\"TOKENIZERS_PARALLELISM\"] = \"false\"\n", + " DEVICE = \"cuda\"\n", + " DEVICE_ID = \"0\"\n", + " CUDA_DEVICE = f\"{DEVICE}:{DEVICE_ID}\" if DEVICE_ID else DEVICE\n", + " a = torch.Tensor([1, 2])\n", + " a = a.cuda()\n", + " print(a)\n", + "\n", + " if torch.cuda.is_available():\n", + " with torch.cuda.device(CUDA_DEVICE):\n", + " torch.cuda.empty_cache()\n", + " torch.cuda.ipc_collect()\n", + "\n", + "# wrap the qwen model with langchain LLM base class\n", + "class QianWenChatLLM(LLM):\n", + " max_length = 10000\n", + " temperature: float = 0.01\n", + " top_p = 0.9\n", + "\n", + " def __init__(self):\n", + " super().__init__()\n", + "\n", + " @property\n", + " def _llm_type(self):\n", + " return \"ChatLLM\"\n", + "\n", + " def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:\n", + " print(prompt)\n", + " response, history = model.chat(tokenizer, prompt, history=None)\n", + " torch_gc()\n", + " return response\n", + " \n", + "# create the qwen llm\n", + "qwllm = QianWenChatLLM()\n", + "print('@@@ qianwen LLM created')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ce46aa8d-d772-4990-b748-12872fac2473", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "execution": { + "iopub.execute_input": "2023-08-11T03:49:17.451327Z", + "iopub.status.busy": "2023-08-11T03:49:17.450867Z", + "iopub.status.idle": "2023-08-11T03:49:18.960037Z", + "shell.execute_reply": "2023-08-11T03:49:18.959128Z", + "shell.execute_reply.started": "2023-08-11T03:49:17.451304Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "import os\n", + "import re\n", + "import torch\n", + "\n", + "from typing import Any, List\n", + "from pydantic import BaseModel, Extra\n", + "from langchain.chains import RetrievalQA\n", + "from langchain.document_loaders import UnstructuredFileLoader,TextLoader\n", + "from langchain.embeddings.base import Embeddings\n", + "from langchain.prompts import PromptTemplate\n", + "from langchain.text_splitter import CharacterTextSplitter\n", + "from langchain.vectorstores import FAISS\n", + "\n", + "# define chinese text split logic for divided docs into reasonable size\n", + "class ChineseTextSplitter(CharacterTextSplitter):\n", + " def __init__(self, pdf: bool = False, sentence_size: int = 100, **kwargs):\n", + " super().__init__(**kwargs)\n", + " self.pdf = pdf\n", + " self.sentence_size = sentence_size\n", + "\n", + " def split_text(self, text: str) -> List[str]: \n", + " if self.pdf:\n", + " text = re.sub(r\"\\n{3,}\", r\"\\n\", text)\n", + " text = re.sub('\\s', \" \", text)\n", + " text = re.sub(\"\\n\\n\", \"\", text)\n", + "\n", + " text = re.sub(r'([;;.!?。!?\\?])([^”’])', r\"\\1\\n\\2\", text) # 单字符断句符\n", + " text = re.sub(r'(\\.{6})([^\"’”」』])', r\"\\1\\n\\2\", text) # 英文省略号\n", + " text = re.sub(r'(\\…{2})([^\"’”」』])', r\"\\1\\n\\2\", text) # 中文省略号\n", + " text = re.sub(r'([;;!?。!?\\?][\"’”」』]{0,2})([^;;!?,。!?\\?])', r'\\1\\n\\2', text)\n", + " # 如果双引号前有终止符,那么双引号才是句子的终点,把分句符\\n放到双引号后,注意前面的几句都小心保留了双引号\n", + " text = text.rstrip() # 段尾如果有多余的\\n就去掉它\n", + " # 很多规则中会考虑分号;,但是这里我把它忽略不计,破折号、英文双引号等同样忽略,需要的再做些简单调整即可。\n", + " ls = [i for i in text.split(\"\\n\") if i]\n", + " for ele in ls:\n", + " if len(ele) > self.sentence_size:\n", + " ele1 = re.sub(r'([,,.][\"’”」』]{0,2})([^,,.])', r'\\1\\n\\2', ele)\n", + " ele1_ls = ele1.split(\"\\n\")\n", + " for ele_ele1 in ele1_ls:\n", + " if len(ele_ele1) > self.sentence_size:\n", + " ele_ele2 = re.sub(r'([\\n]{1,}| {2,}[\"’”」』]{0,2})([^\\s])', r'\\1\\n\\2', ele_ele1)\n", + " ele2_ls = ele_ele2.split(\"\\n\")\n", + " for ele_ele2 in ele2_ls:\n", + " if len(ele_ele2) > self.sentence_size:\n", + " ele_ele3 = re.sub('( [\"’”」』]{0,2})([^ ])', r'\\1\\n\\2', ele_ele2)\n", + " ele2_id = ele2_ls.index(ele_ele2)\n", + " ele2_ls = ele2_ls[:ele2_id] + [i for i in ele_ele3.split(\"\\n\") if i] + ele2_ls[\n", + " ele2_id + 1:]\n", + " ele_id = ele1_ls.index(ele_ele1)\n", + " ele1_ls = ele1_ls[:ele_id] + [i for i in ele2_ls if i] + ele1_ls[ele_id + 1:]\n", + "\n", + " id = ls.index(ele)\n", + " ls = ls[:id] + [i for i in ele1_ls if i] + ls[id + 1:]\n", + " return ls\n", + "\n", + "\n", + "# using modelscope text embedding method for embedding tool\n", + "class ModelScopeEmbeddings(BaseModel, Embeddings):\n", + " embed: Any\n", + " model_id: str =\"damo/nlp_corom_sentence-embedding_english-base\"\n", + " \"\"\"Model name to use.\"\"\"\n", + "\n", + " def __init__(self, **kwargs: Any):\n", + " \"\"\"Initialize the modelscope\"\"\"\n", + " super().__init__(**kwargs)\n", + " try:\n", + " from modelscope.models import Model\n", + " from modelscope.pipelines import pipeline\n", + " from modelscope.utils.constant import Tasks\n", + " self.embed = pipeline(Tasks.sentence_embedding,model=self.model_id)\n", + "\n", + " except ImportError as e:\n", + " raise ValueError(\n", + " \"Could not import some python packages.\" \"Please install it with `pip install modelscope`.\"\n", + " ) from e\n", + "\n", + " class Config:\n", + " extra = Extra.forbid\n", + "\n", + " def embed_documents(self, texts: List[str]) -> List[List[float]]:\n", + " texts = list(map(lambda x: x.replace(\"\\n\", \" \"), texts))\n", + " inputs = {\"source_sentence\": texts}\n", + " embeddings = self.embed(input=inputs)['text_embedding']\n", + " return embeddings\n", + "\n", + " def embed_query(self, text: str) -> List[float]:\n", + " text = text.replace(\"\\n\", \" \")\n", + " inputs = {\"source_sentence\": [text]}\n", + " embedding = self.embed(input=inputs)['text_embedding'][0]\n", + " return embedding\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "ca3dc051-1b0b-4bec-b082-6e94b220a34d", + "metadata": { + "execution": { + "iopub.execute_input": "2023-08-10T06:44:05.671065Z", + "iopub.status.busy": "2023-08-10T06:44:05.670720Z", + "iopub.status.idle": "2023-08-10T06:44:05.674188Z", + "shell.execute_reply": "2023-08-10T06:44:05.673699Z", + "shell.execute_reply.started": "2023-08-10T06:44:05.671045Z" + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# define prompt template\n", + "prompt_template = \"\"\"请基于```内的内容回答问题。\"\n", + "\t```\n", + "\t{context}\n", + "\t```\n", + "\t我的问题是:{question}。\n", + "\"\"\"\n", + "\n", + "prompt = PromptTemplate(template=prompt_template, input_variables=[\"context\", \"question\"])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a41ff8b8-bf19-4766-8d90-af48c7dfda99", + "metadata": { + "ExecutionIndicator": { + "show": true + }, + "tags": [] + }, + "outputs": [], + "source": [ + "# load the vector db and upsert docs with vector to db\n", + "\n", + "print('@@@ reading docs ...')\n", + "sentence_size = 1600\n", + "embeddings = ModelScopeEmbeddings(model_id=\"damo/nlp_corom_sentence-embedding_chinese-tiny\")\n", + "\n", + "filepath = \"../../../README_zh.md\"\n", + "if filepath.lower().endswith(\".md\"):\n", + " loader = UnstructuredFileLoader(filepath, mode=\"elements\")\n", + " docs = loader.load()\n", + "elif filepath.lower().endswith(\".txt\"):\n", + " loader = TextLoader(filepath, autodetect_encoding=True)\n", + " textsplitter = ChineseTextSplitter(pdf=False, sentence_size=sentence_size)\n", + " docs = loader.load_and_split(textsplitter) \n", + "\n", + "db = FAISS.from_documents(docs, embeddings)\n", + "print('@@@ reading doc done, vec db created.')\n", + "\n", + "\n", + "# create knowledge chain\n", + "kc = RetrievalQA.from_llm(llm=qwllm, retriever=db.as_retriever(search_kwargs={\"k\": 6}), prompt=prompt)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c97b1a9e-6260-4429-8411-a3a2cddadb05", + "metadata": { + "ExecutionIndicator": { + "show": false + }, + "execution": { + "iopub.execute_input": "2023-08-06T06:14:23.817772Z", + "iopub.status.busy": "2023-08-06T06:14:23.817192Z", + "iopub.status.idle": "2023-08-06T06:14:27.775706Z", + "shell.execute_reply": "2023-08-06T06:14:27.775194Z", + "shell.execute_reply.started": "2023-08-06T06:14:23.817734Z" + }, + "tags": [] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "请基于```内的内容回答问题。\"\n", + "\t```\n", + "\tContext:\n", + "ModelScope Library为模型贡献者提供了必要的分层API,以便将来自 CV、NLP、语音、多模态以及科学计算的模型集成到ModelScope生态系统中。所有这些不同模型的实现都以一种简单统一访问的方式进行封装,用户只需几行代码即可完成模型推理、微调和评估。同时,灵活的模块化设计使得在必要时也可以自定义模型训练推理过程中的不同组件。\n", + "\n", + "Context:\n", + "ModelScope 是一个“模型即服务”(MaaS)平台,旨在汇集来自AI社区的最先进的机器学习模型,并简化在实际应用中使用AI模型的流程。ModelScope库使开发人员能够通过丰富的API设计执行推理、训练和评估,从而促进跨不同AI领域的最先进模型的统一体验。\n", + "\n", + "Context:\n", + "除了包含各种模型的实现之外,ModelScope Library还支持与ModelScope后端服务进行必要的交互,特别是与Model-Hub和Dataset-Hub的交互。这种交互促进了模型和数据集的管理在后台无缝执行,包括模型数据集查询、版本控制、缓存管理等。\n", + "\t```\n", + "\t我的问题是:modelscope是什么?。\n", + "\n", + "tensor([1., 2.], device='cuda:0')\n" + ] + } + ], + "source": [ + "# test the knowledge chain\n", + "query = 'modelscope是什么?'\n", + "result = kc({\"query\": query})\n", + "print(result)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.16" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/auto_speech_recognition/finetune_speech_recognition.py b/AI/modelscope/1.10.0/8/examples/pytorch/auto_speech_recognition/finetune_speech_recognition.py new file mode 100644 index 0000000000000000000000000000000000000000..47af0b90c0aff338b76a0d90f419ac501528161f --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/auto_speech_recognition/finetune_speech_recognition.py @@ -0,0 +1,45 @@ +import os + +from modelscope.metainfo import Trainers +from modelscope.msdatasets.dataset_cls.custom_datasets import ASRDataset +from modelscope.trainers import build_trainer +from modelscope.utils.constant import DownloadMode + + +def modelscope_finetune(params): + if not os.path.exists(params.output_dir): + os.makedirs(params.output_dir, exist_ok=True) + # dataset split ["train", "validation"] + ds_dict = ASRDataset.load( + params.data_path, + namespace='speech_asr', + download_mode=params.download_mode) + kwargs = dict( + model=params.model, + data_dir=ds_dict, + dataset_type=params.dataset_type, + work_dir=params.output_dir, + batch_bins=params.batch_bins, + max_epoch=params.max_epoch, + lr=params.lr) + trainer = build_trainer(Trainers.speech_asr_trainer, default_args=kwargs) + trainer.train() + + +if __name__ == '__main__': + from funasr.utils.modelscope_param import modelscope_args + + params = modelscope_args( + model= + 'damo/speech_paraformer-large_asr_nat-zh-cn-16k-common-vocab8404-pytorch' + ) + params.output_dir = './checkpoint' # 模型保存路径 + params.data_path = 'speech_asr_aishell1_trainsets' # 数据路径,可以为modelscope中已上传数据,也可以是本地数据 + params.dataset_type = 'small' # 小数据量设置small,若数据量大于1000小时,请使用large + params.batch_bins = 2000 # batch size,如果dataset_type="small",batch_bins单位为fbank特征帧数, + # 如果dataset_type="large",batch_bins单位为毫秒, + params.max_epoch = 50 # 最大训练轮数 + params.lr = 0.00005 # 设置学习率 + params.download_mode = DownloadMode.FORCE_REDOWNLOAD # 重新下载数据,否则设置为默认值DownloadMode.REUSE_DATASET_IF_EXISTS + + modelscope_finetune(params) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/auto_speech_recognition/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/auto_speech_recognition/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..d741a3acdc8ac575e66673b6492cbb844198e901 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/auto_speech_recognition/run_train.sh @@ -0,0 +1 @@ +PYTHONPATH=. python examples/pytorch/auto_speech_recognition/finetune_speech_recognition.py diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/default_offload_opt_param.json b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/default_offload_opt_param.json new file mode 100644 index 0000000000000000000000000000000000000000..b6f982e5a6924b5e89d53f0908b0fe670473f2e1 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/default_offload_opt_param.json @@ -0,0 +1,44 @@ +{ + "bf16": { + "enabled": "auto" + }, + "optimizer": { + "type": "AdamW", + "params": { + "lr": "auto", + "betas": "auto", + "eps": "auto", + "weight_decay": "auto" + } + }, + "scheduler": { + "type": "WarmupDecayLR", + "params": { + "total_num_steps": "auto", + "warmup_min_lr": "auto", + "warmup_max_lr": "auto", + "warmup_num_steps": "auto" + } + }, + "zero_optimization": { + "stage": 2, + "offload_optimizer": { + "device": "cpu", + "pin_memory": true + }, + "offload_param": { + "device": "cpu", + "pin_memory": true + }, + "overlap_comm": true, + "contiguous_gradients": true, + "sub_group_size": 1e9, + "reduce_bucket_size": "auto" + }, + "gradient_accumulation_steps": "auto", + "gradient_clipping": "auto", + "steps_per_print": 10000000, + "train_batch_size": "auto", + "train_micro_batch_size_per_gpu": "auto", + "wall_clock_breakdown": false +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/finetune_baichuan.py b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/finetune_baichuan.py new file mode 100644 index 0000000000000000000000000000000000000000..ed8821c63163d5171e13eca56e6f7563c58aec5a --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/finetune_baichuan.py @@ -0,0 +1,225 @@ +import os +import sys +import types +from dataclasses import dataclass, field + +from swift import LoRAConfig, Swift +from transformers import AutoModelForCausalLM, AutoTokenizer + +from modelscope import (EpochBasedTrainer, MsDataset, TorchModel, TrainingArgs, + build_dataset_from_file, snapshot_download) +from modelscope.metainfo import Trainers +from modelscope.preprocessors import TextGenerationTransformersPreprocessor +from modelscope.trainers import build_trainer + +DEFAULT_PAD_TOKEN = '[PAD]' +DEFAULT_EOS_TOKEN = '' +DEFAULT_BOS_TOKEN = '' +DEFAULT_UNK_TOKEN = '' + + +@dataclass(init=False) +class TextGenerationArguments(TrainingArgs): + + trainer: str = field( + default=Trainers.default, metadata={ + 'help': 'The trainer used', + }) + + src_txt: str = field( + default=None, + metadata={ + 'help': 'The source text key of preprocessor', + 'cfg_node': 'preprocessor.src_txt' + }) + + tgt_txt: str = field( + default=None, + metadata={ + 'help': 'The target text key of preprocessor', + 'cfg_node': 'preprocessor.tgt_txt' + }) + + sequence_length: int = field( + default=None, + metadata={ + 'help': 'The sequence length of preprocessor', + 'cfg_node': 'preprocessor.sequence_length' + }) + + lr_scheduler: str = field( + default=None, + metadata={ + 'help': 'The lr scheduler type', + 'cfg_node': 'train.lr_scheduler.type' + }) + + bf16: bool = field( + default=False, + metadata={ + 'help': 'Whether to use bf16', + 'cfg_node': 'train.bf16' + }) + + deepspeed: str = field( + default=None, + metadata={ + 'help': 'The location of DeepSpeed json config file.', + }) + + T_max: int = field( + default=None, + metadata={ + 'help': 'The T_max for CosineAnnealingLR', + 'cfg_node': 'train.lr_scheduler.T_max' + }) + + use_lora: int = field( + default=0, + metadata={'help': 'Whether to use lora to train the model.'}, + ) + + lora_rank: int = field( + default=32, + metadata={'help': 'The lora rank'}, + ) + + lora_alpha: int = field( + default=32, + metadata={'help': 'The lora alpha'}, + ) + + lora_dropout: float = field( + default=0.05, + metadata={'help': 'The lora dropout'}, + ) + + device_map: str = field( + default=None, + metadata={ + 'help': 'A map that specifies where each submodule should go.' + }) + + +def smart_tokenizer_and_embedding_resize(special_tokens_dict, tokenizer, + model): + """Resize tokenizer and embedding. + + Note: This is the unoptimized version that may make your embedding size not be divisible by 64. + """ + num_new_tokens = tokenizer.add_special_tokens(special_tokens_dict) + model.resize_token_embeddings(len(tokenizer)) + + if num_new_tokens > 0: + input_embeddings = model.get_input_embeddings().weight.data + output_embeddings = model.get_output_embeddings().weight.data + + input_embeddings_avg = input_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + output_embeddings_avg = output_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + + input_embeddings[-num_new_tokens:] = input_embeddings_avg + output_embeddings[-num_new_tokens:] = output_embeddings_avg + + +config, args = TextGenerationArguments().parse_cli().to_config() +print(config, args) +pipeline_type = None + + +def cfg_modify_fn(cfg): + global pipeline_type + pipeline_type = cfg.pipeline.type + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + if 'hooks' not in cfg.train: + cfg.train['hooks'] = [] + if args.deepspeed: + cfg.train.hooks.append({ + 'type': 'DeepspeedHook', + 'config': args.deepspeed, + 'save_zero_checkpoint': True, + 'with_mpu': False, + }) + + return cfg + + +def custom_save_pretrained(self, *args, **kwargs): + config = kwargs.pop('config') + if config is not None: + config.pipeline = {'type': pipeline_type} + TorchModel.save_pretrained(self, *args, config=config, **kwargs) + + +if args.dataset_json_file is None: + train_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split=args.train_split, + namespace=args.train_dataset_namespace) + validation_dataset = MsDataset.load( + args.val_dataset_name, + subset_name=args.val_subset_name, + split=args.val_split, + namespace=args.val_dataset_namespace) +else: + train_dataset, validation_dataset = build_dataset_from_file( + args.dataset_json_file) + +model_dir = snapshot_download(args.model) +sys.path.append(model_dir) +model = AutoModelForCausalLM.from_pretrained( + model_dir, trust_remote_code=True, device_map=args.device_map) +model.model_dir = model_dir +model.save_pretrained = types.MethodType(custom_save_pretrained, model) +cfg_file = os.path.join(model_dir, 'configuration.json') +tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True) + +special_tokens_dict = dict() +if tokenizer.pad_token is None or tokenizer.pad_token == '': + special_tokens_dict['pad_token'] = DEFAULT_PAD_TOKEN +if tokenizer.eos_token is None or tokenizer.eos_token == '': + special_tokens_dict['eos_token'] = DEFAULT_EOS_TOKEN +if tokenizer.bos_token is None or tokenizer.bos_token == '': + special_tokens_dict['bos_token'] = DEFAULT_BOS_TOKEN +if tokenizer.unk_token is None or tokenizer.unk_token == '': + special_tokens_dict['unk_token'] = DEFAULT_UNK_TOKEN + +smart_tokenizer_and_embedding_resize( + special_tokens_dict=special_tokens_dict, + tokenizer=tokenizer, + model=model, +) + +preprocessor = TextGenerationTransformersPreprocessor( + model_dir, + tokenizer=tokenizer, + src_txt=config.preprocessor.src_txt, + tgt_txt=config.preprocessor.tgt_txt, + sequence_length=getattr(config.preprocessor, 'sequence_length', None)) + +if args.use_lora != 0: + lora_config = LoRAConfig( + target_modules=['pack'], + r=args.lora_rank, + lora_alpha=args.lora_alpha, + lora_dropout=args.lora_dropout) + model = model.bfloat16() + model = Swift.prepare_model(model, lora_config) + +kwargs = dict( + model=model, + cfg_file=cfg_file, + preprocessor=preprocessor, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + seed=args.seed, + cfg_modify_fn=cfg_modify_fn) + +trainer: EpochBasedTrainer = build_trainer( + name=args.trainer, default_args=kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/lora_inference.py b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/lora_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..7458c5723b5b12b5c0ef92a9efe9c2dfac8ef5f2 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/lora_inference.py @@ -0,0 +1,27 @@ +import os.path as osp + +import torch +from swift import LoRAConfig, Swift + +from modelscope.pipelines import pipeline +from modelscope.utils.constant import Tasks + +# 使用源模型 model_id 初始化 pipeline +model_id = 'baichuan-inc/baichuan-7B' +pipe = pipeline( + task=Tasks.text_generation, model=model_id, model_revision='v1.0.2') +# lora 配置,replace_modules,rank,alpha 需与训练参数相同 +lora_config = LoRAConfig(target_modules=['pack'], r=32, lora_alpha=32) +# 转 bf16,需与训练精度相同 +model = pipe.model.bfloat16() +# model 转 lora +model = Swift.prepare_model(model, lora_config) +# 加载 lora 参数,默认 link 到于 output/model 路径 +work_dir = './tmp' +state_dict = torch.load(osp.join(work_dir, 'output/pytorch_model.bin')) +model.load_state_dict(state_dict) +# 使用 lora model 替换 pipeline 中的 model +pipe.model = model +# 使用 pipeline 推理 +result_zh = pipe('今天天气是真的') +print(result_zh) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/run_train_lora.sh b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/run_train_lora.sh new file mode 100644 index 0000000000000000000000000000000000000000..f362576f58b1f3373e8b3b17508bb1915bd2fccc --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/baichuan/run_train_lora.sh @@ -0,0 +1,31 @@ +export PYTHONPATH=$PYTHONPATH:./ +torchrun examples/pytorch/baichuan/finetune_baichuan.py \ + --trainer 'text-generation-trainer' \ + --work_dir './tmp' \ + --model 'baichuan-inc/baichuan-7B' \ + --train_dataset_name 'chinese-poetry-collection' \ + --val_dataset_name 'chinese-poetry-collection' \ + --train_split 'train' \ + --val_split 'test' \ + --src_txt 'text1' \ + --tgt_txt 'text2' \ + --sequence_length 128 \ + --max_epochs 2 \ + --per_device_train_batch_size 8 \ + --per_device_eval_batch_size 32 \ + --train_data_worker 0 \ + --eval_data_worker 0 \ + --optimizer 'AdamW' \ + --lr 2e-5 \ + --lr_scheduler 'CosineAnnealingLR' \ + --eval_strategy 'by_epoch' \ + --bf16 1 \ + --use_lora 1 \ + --use_model_config 1 \ + --eval_metrics 'ppl' \ + --T_max 1 \ + --save_strategy no \ + --save_best true \ + --metric_for_best_model ppl \ + --metric_rule_for_best_model min \ + --device_map 'auto' \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/__init__.py b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/chatglm_trainer.py b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/chatglm_trainer.py new file mode 100644 index 0000000000000000000000000000000000000000..84167713426093803ce211fab40689f00bcb9053 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/chatglm_trainer.py @@ -0,0 +1,122 @@ +from typing import Any, Dict, Union + +import numpy as np +import torch +from transformers.deepspeed import is_deepspeed_zero3_enabled + +from modelscope import EpochBasedTrainer, get_logger + +logger = get_logger() + + +class Seq2SeqTrainer(EpochBasedTrainer): + + def _decode(self, tokens, ignore_pad_token_for_loss=False): + tokens = tokens.cpu().numpy() + if ignore_pad_token_for_loss: + tokens = np.where(tokens != -100, tokens, + self.tokenizer.pad_token_id) + tokens = np.where(tokens < self.tokenizer.vocab_size, tokens, + self.tokenizer.pad_token_id) + return [ + t for t in self.tokenizer.batch_decode( + tokens, skip_special_tokens=True) if t != '' + ] + + def evaluation_step( + self, + inputs: Dict[str, Union[torch.Tensor, Any]], + ): + has_labels = 'labels' in inputs + # XXX: adapt synced_gpus for fairscale as well + gen_kwargs = self.cfg['gen_kwargs'] + if gen_kwargs.get('max_length') is None and gen_kwargs.get( + 'max_new_tokens') is None: + gen_kwargs['max_length'] = self.model.config.max_length + gen_kwargs['num_beams'] = ( + gen_kwargs['num_beams'] if gen_kwargs.get('num_beams') is not None + else self.model.config.num_beams) + default_synced_gpus = True if is_deepspeed_zero3_enabled() else False + gen_kwargs['synced_gpus'] = ( + gen_kwargs['synced_gpus'] if gen_kwargs.get('synced_gpus') + is not None else default_synced_gpus) + + if 'attention_mask' in inputs: + gen_kwargs['attention_mask'] = inputs.get('attention_mask', None) + if 'position_ids' in inputs: + gen_kwargs['position_ids'] = inputs.get('position_ids', None) + if 'global_attention_mask' in inputs: + gen_kwargs['global_attention_mask'] = inputs.get( + 'global_attention_mask', None) + + # prepare generation inputs + # some encoder-decoder models can have varying encoder's and thus + # varying model input names + if hasattr( + self.model, 'encoder' + ) and self.model.encoder.main_input_name != self.model.main_input_name: + generation_inputs = inputs[self.model.encoder.main_input_name] + else: + generation_inputs = inputs[self.model.main_input_name] + + gen_kwargs['input_ids'] = generation_inputs + gen_kwargs['pad_token_id'] = self.tokenizer.pad_token_id + self.model.eval() + with torch.no_grad(): + generated_tokens = self.model.generate(**gen_kwargs) + generated_tokens = generated_tokens[:, generation_inputs.size()[-1]:] + + # in case the batch is shorter than max length, the output should be padded + if gen_kwargs.get('max_length') is not None and generated_tokens.shape[ + -1] < gen_kwargs['max_length']: + generated_tokens = self._pad_tensors_to_max_len( + generated_tokens, gen_kwargs['max_length']) + elif gen_kwargs.get('max_new_tokens' + ) is not None and generated_tokens.shape[-1] < ( + gen_kwargs['max_new_tokens'] + 1): + generated_tokens = self._pad_tensors_to_max_len( + generated_tokens, gen_kwargs['max_new_tokens'] + 1) + + if has_labels: + labels = inputs['labels'] + if gen_kwargs.get('max_length') is not None and labels.shape[ + -1] < gen_kwargs['max_length']: + labels = self._pad_tensors_to_max_len(labels, + gen_kwargs['max_length']) + elif gen_kwargs.get( + 'max_new_tokens') is not None and labels.shape[-1] < ( + gen_kwargs['max_new_tokens'] + 1): + labels = self._pad_tensors_to_max_len( + labels, (gen_kwargs['max_new_tokens'] + 1)) + else: + labels = None + + generated_tokens = [ + ''.join(self._decode(seq, False)) for seq in generated_tokens + ] + inputs['tgts'] = [''.join(self._decode(seq, True)) for seq in labels] + return { + 'preds': generated_tokens, + } + + def _pad_tensors_to_max_len(self, tensor, max_length): + if self.tokenizer is not None and hasattr(self.tokenizer, + 'pad_token_id'): + # If PAD token is not defined at least EOS token has to be defined + pad_token_id = ( + self.tokenizer.pad_token_id if self.tokenizer.pad_token_id + is not None else self.tokenizer.eos_token_id) + else: + if self.model.config.pad_token_id is not None: + pad_token_id = self.model.config.pad_token_id + else: + raise ValueError( + 'Pad_token_id must be set in the configuration of the model, in order to pad tensors' + ) + + padded_tensor = pad_token_id * torch.ones( + (tensor.shape[0], max_length), + dtype=tensor.dtype, + device=tensor.device) + padded_tensor[:, :tensor.shape[-1]] = tensor + return padded_tensor diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/finetune.py b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/finetune.py new file mode 100644 index 0000000000000000000000000000000000000000..1f419770dc11224aa8a85ffd754c1655fd9be737 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/finetune.py @@ -0,0 +1,404 @@ +import os +from dataclasses import dataclass, field + +import numpy as np +import torch +from chatglm_trainer import Seq2SeqTrainer +from swift import LoRAConfig, Swift +from text_generation_metric import TextGenerationMetric +from transformers import DataCollatorForSeq2Seq + +from modelscope import build_dataset_from_file, snapshot_download +from modelscope.metainfo import Models +from modelscope.models import Model +from modelscope.msdatasets import MsDataset +from modelscope.trainers.training_args import TrainingArgs +from modelscope.utils.config import ConfigDict +from modelscope.utils.hub import read_config + + +@dataclass(init=False) +class Chatglm6bArguments(TrainingArgs): + ptuning_checkpoint: str = field( + default=None, + metadata={ + 'help': 'The p-tuning checkpoint previously trained.', + }) + + pre_seq_len: int = field( + default=None, metadata={ + 'help': 'The p-tuning sequence length', + }) + + prefix_projection: bool = field( + default=False, metadata={ + 'help': '', + }) + + quantization_bit: int = field( + default=None, metadata={ + 'help': 'Quantized bit', + }) + + prompt_column: str = field( + default=None, + metadata={ + 'help': + 'The name of the column in the datasets containing the full texts (for summarization).' + }, + ) + + response_column: str = field( + default=None, + metadata={ + 'help': + 'The name of the column in the datasets containing the summaries (for summarization).' + }, + ) + + history_column: str = field( + default=None, + metadata={ + 'help': + 'The name of the column in the datasets containing the history of chat.' + }, + ) + + source_prefix: str = field( + default='', + metadata={ + 'help': + 'A prefix to add before every source text (useful for T5 models).' + }) + + ignore_pad_token_for_loss: bool = field( + default=True, + metadata={ + 'help': + 'Whether to ignore the tokens corresponding to padded labels in the loss computation or not.' + }, + ) + + max_source_length: int = field( + default=1024, + metadata={ + 'help': + ('The maximum total input sequence length after tokenization. Sequences longer ' + 'than this will be truncated, sequences shorter will be padded.') + }, + ) + + max_target_length: int = field( + default=128, + metadata={ + 'help': + ('The maximum total sequence length for target text after tokenization. Sequences longer ' + 'than this will be truncated, sequences shorter will be padded.') + }, + ) + + max_train_samples: int = field( + default=None, + metadata={ + 'help': + ('For debugging purposes or quicker training, truncate the number of training examples to this ' + 'value if set.') + }, + ) + + max_eval_samples: int = field( + default=None, + metadata={ + 'help': + ('For debugging purposes or quicker training, truncate the number of evaluation examples to this ' + 'value if set.') + }, + ) + + preprocessing_num_workers: int = field( + default=None, + metadata={ + 'help': 'The number of processes to use for the preprocessing.' + }, + ) + + use_lora: int = field( + default=0, + metadata={'help': 'Whether to use lora to train the model.'}, + ) + + lora_rank: int = field( + default=32, + metadata={'help': 'The lora rank'}, + ) + + lora_alpha: int = field( + default=32, + metadata={'help': 'The lora alpha'}, + ) + + lora_dropout: float = field( + default=0.05, + metadata={'help': 'The lora alpha'}, + ) + + use_amp: int = field( + default=0, + metadata={ + 'help': + 'Whether to use amp(automatic mixed precision) to train the model.' + }, + ) + + +args = Chatglm6bArguments(eval_metrics='chatglm').parse_cli() +print(args) +config, _ = args.to_config(ignore_default_config=args.use_model_config) +config.dump('./configuration.json') + +if config['model']['type'] == 'chatglm6b': + from modelscope.models.nlp import ChatGLMTokenizer +else: + from modelscope.models.nlp import ChatGLM2Tokenizer as ChatGLMTokenizer + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + if args.use_amp: + if not getattr(cfg.train, 'hooks', None): + cfg.train.hooks = [] + cfg.train.hooks.append({ + 'type': 'TorchAMPOptimizerHook', + # Optional loss_scale parameter here. + }) + if cfg.train.lr_scheduler.type == 'LinearLR': + cfg.train.lr_scheduler['total_iters'] = \ + int(len(train_dataset) / cfg.train.dataloader.batch_size_per_gpu) * cfg.train.max_epochs + cfg['gen_kwargs'] = { + 'do_sample': True, + 'top_p': 0.7, + 'max_length': 512, + 'temperature': 0.95 + } + return cfg + + +if args.dataset_json_file is None: + train_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split=args.train_split, + namespace=args.train_dataset_namespace).to_hf_dataset() + validation_dataset = MsDataset.load( + args.val_dataset_name, + subset_name=args.val_subset_name, + split=args.val_split, + namespace=args.val_dataset_namespace).to_hf_dataset() +else: + train_dataset, validation_dataset = build_dataset_from_file( + args.dataset_json_file) + +model_dir = snapshot_download(args.model) +model_config = read_config(model_dir) +model_config['model'] = ConfigDict({ + 'type': config['model']['type'], +}) + +model_config['model']['pre_seq_len'] = args.pre_seq_len +model_config['model']['prefix_projection'] = args.prefix_projection +tokenizer = ChatGLMTokenizer.from_pretrained(model_dir, trust_remote_code=True) + +device_map_kwargs = {} +if args.use_lora != 0 and torch.cuda.device_count() > 1: + device_map_kwargs['device_map'] = 'auto' +model = Model.from_pretrained( + model_dir, cfg_dict=model_config, **device_map_kwargs) + +if args.ptuning_checkpoint is not None: + # Evaluation + # Loading extra state dict of prefix encoder + + prefix_state_dict = torch.load( + os.path.join(args.ptuning_checkpoint, 'pytorch_model.bin')) + new_prefix_state_dict = {} + for k, v in prefix_state_dict.items(): + if k.startswith('transformer.prefix_encoder.'): + new_prefix_state_dict[k[len('transformer.prefix_encoder.'):]] = v + model.transformer.prefix_encoder.load_state_dict(new_prefix_state_dict) + +if args.quantization_bit is not None: + print(f'Quantized to {args.quantization_bit} bit') + model = model.quantize(args.quantization_bit) +if args.pre_seq_len is not None: + # P-tuning v2 + model = model.half() + model.transformer.prefix_encoder.float() +elif not args.use_lora: + # Finetune + model = model.float() + +if args.use_lora != 0: + lora_config = LoRAConfig( + target_modules=['attention.query_key_value'], + r=args.lora_rank, + lora_alpha=args.lora_alpha, + lora_dropout=args.lora_dropout) + if args.use_amp: + model = model.float() + else: + model = model.bfloat16() + model = Swift.prepare_model(model, lora_config) + +prefix = args.source_prefix if args.source_prefix is not None else '' + +# Get the column names for input/target. +prompt_column = args.prompt_column +response_column = args.response_column +history_column = args.history_column + +# Temporarily set max_target_length for training. +max_target_length = args.max_target_length + +model_parameters = filter(lambda p: p.requires_grad, model.parameters()) +trainable_params = sum([np.prod(p.size()) for p in model_parameters]) + +model_parameters = filter(lambda p: not p.requires_grad, model.parameters()) +non_trainable_params = sum([np.prod(p.size()) for p in model_parameters]) + +print('trainable_params:{} ({:.2f}%), non_trainable_params:{}'.format( + trainable_params, trainable_params / non_trainable_params * 100, + non_trainable_params)) + + +def preprocess_function_eval(examples): + inputs, targets = [], [] + for i in range(len(examples[prompt_column])): + if examples[prompt_column][i] and examples[response_column][i]: + query = examples[prompt_column][i] + if history_column is None or len(examples[history_column][i]) == 0: + prompt = query + else: + prompt = '' + history = examples[history_column][i] + for turn_idx, (old_query, response) in enumerate(history): + prompt += '[Round {}]\n问:{}\n答:{}\n'.format( + turn_idx, old_query, response) + prompt += '[Round {}]\n问:{}\n答:'.format(len(history), query) + inputs.append(prompt) + targets.append(examples[response_column][i]) + + inputs = [prefix + inp for inp in inputs] + model_inputs = tokenizer( + inputs, + max_length=args.max_source_length, + truncation=True, + padding=True) + labels = tokenizer( + text_target=targets, max_length=max_target_length, truncation=True) + + if args.ignore_pad_token_for_loss: + labels['input_ids'] = [[(lb if lb != tokenizer.pad_token_id else -100) + for lb in label] + for label in labels['input_ids']] + model_inputs['labels'] = labels['input_ids'] + + return model_inputs + + +def preprocess_function_train(examples): + max_seq_length = args.max_source_length + args.max_target_length + + model_inputs = { + 'input_ids': [], + 'labels': [], + } + for i in range(len(examples[prompt_column])): + if examples[prompt_column][i] and examples[response_column][i]: + query, answer = examples[prompt_column][i], examples[ + response_column][i] + + if history_column is None: + prompt = query + else: + prompt = '' + history = examples[history_column][i] + for turn_idx, (old_query, response) in enumerate(history): + prompt += '[Round {}]\n问:{}\n答:{}\n'.format( + turn_idx, old_query, response) + prompt += '[Round {}]\n问:{}\n答:'.format(len(history), query) + + prompt = prefix + prompt + a_ids = tokenizer.encode(text=prompt, add_special_tokens=False) + b_ids = tokenizer.encode(text=answer, add_special_tokens=False) + + if len(a_ids) > args.max_source_length - 1: + a_ids = a_ids[:args.max_source_length - 1] + + if len(b_ids) > args.max_target_length - 2: + b_ids = b_ids[:args.max_target_length - 2] + + input_ids = tokenizer.build_inputs_with_special_tokens( + a_ids, b_ids) + + if config['model']['type'] == 'chatglm6b': + context_length = input_ids.index(tokenizer.bos_token_id) + else: + context_length = len(a_ids) + 2 + mask_position = context_length - 1 + labels = [-100] * context_length + input_ids[mask_position + 1:] + + pad_len = max_seq_length - len(input_ids) + input_ids = input_ids + [tokenizer.pad_token_id] * pad_len + labels = labels + [tokenizer.pad_token_id] * pad_len + if args.ignore_pad_token_for_loss: + labels = [(lb if lb != tokenizer.pad_token_id else -100) + for lb in labels] + + model_inputs['input_ids'].append(input_ids) + model_inputs['labels'].append(labels) + + return model_inputs + + +train_dataset = train_dataset.map( + preprocess_function_train, + batched=True, + num_proc=args.preprocessing_num_workers, + desc='Running tokenizer on train dataset', +) + +validation_dataset = validation_dataset.map( + preprocess_function_eval, + batched=True, + num_proc=args.preprocessing_num_workers, + desc='Running tokenizer on eval dataset', +) + +# Data collator +label_pad_token_id = -100 if args.ignore_pad_token_for_loss else tokenizer.pad_token_id +data_collator = DataCollatorForSeq2Seq( + tokenizer, + model=model, + label_pad_token_id=label_pad_token_id, + pad_to_multiple_of=None, + padding=False) + +model.gradient_checkpointing_enable() +model.enable_input_require_grads() + +# import torch +# model = torch.nn.DataParallel(model).cuda() +trainer = Seq2SeqTrainer( + model=model, + cfg_file='./configuration.json', + train_dataset=train_dataset, + eval_dataset=validation_dataset, + seed=args.seed, + data_collator=data_collator, + remove_unused_data=True, + cfg_modify_fn=cfg_modify_fn) +trainer.tokenizer = tokenizer +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/lora_inference.py b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/lora_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..2975a5da4279df0f458f5d8862ec6c5c2bf40313 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/lora_inference.py @@ -0,0 +1,37 @@ +import os.path as osp + +import torch +from swift import LoRAConfig, Swift + +from modelscope import Model, pipeline, read_config +from modelscope.metainfo import Models +from modelscope.utils.config import ConfigDict + +lora_config = LoRAConfig( + target_modules=['attention.query_key_value'], + r=32, + lora_alpha=32, + lora_dropout=0.05) + +model_dir = 'ZhipuAI/ChatGLM-6B' +model_config = read_config(model_dir) +model_config['model'] = ConfigDict({ + 'type': Models.chatglm_6b, +}) + +model = Model.from_pretrained(model_dir, cfg_dict=model_config) +model = model.bfloat16() +model = Swift.prepare_model(model, lora_config) +work_dir = './tmp' +state_dict = torch.load(osp.join(work_dir, 'iter_600.pth')) +model = Swift.from_pretrained( + model, osp.join(work_dir, 'output_best'), device_map='auto') +model.load_state_dict(state_dict) +pipe = pipeline('chat', model, pipeline_name='chatglm6b-text-generation') + +print( + pipe({ + 'text': + '纵使进入21世纪后,我国教育水平有了明显进步,高考的难度却依旧不容小觑,高考被中国学生和家长定义为改变命运、改写人生脑重要考试,为了这场考试,学生和家长都付出了很多。', + 'history': [] + })) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/lora_inference_v2.py b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/lora_inference_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..9be481f10cbf246b8d104d610d5fec0255902e51 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/lora_inference_v2.py @@ -0,0 +1,38 @@ +import os.path as osp + +import torch +from swift import LoRAConfig, Swift + +from modelscope import Model, pipeline, read_config +from modelscope.metainfo import Models +from modelscope.utils.config import ConfigDict + +lora_config = LoRAConfig( + target_modules=['attention.query_key_value'], + r=32, + lora_alpha=32, + lora_dropout=0.05) + +model_dir = 'ZhipuAI/chatglm2-6b' +model_config = read_config(model_dir) +model_config['model'] = ConfigDict({ + 'type': Models.chatglm2_6b, +}) + +model = Model.from_pretrained(model_dir, cfg_dict=model_config) +model = model.bfloat16() +model = Swift.prepare_model(model, lora_config) +work_dir = './tmp' +state_dict = torch.load(osp.join(work_dir, 'iter_600.pth')) +model = Swift.from_pretrained( + model, osp.join(work_dir, 'output_best'), device_map='auto') +model.load_state_dict(state_dict) + +pipe = pipeline('chat', model, pipeline_name='chatglm2_6b-text-generation') + +print( + pipe({ + 'text': + '纵使进入21世纪后,我国教育水平有了明显进步,高考的难度却依旧不容小觑,高考被中国学生和家长定义为改变命运、改写人生脑重要考试,为了这场考试,学生和家长都付出了很多。', + 'history': [] + })) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/ptuning_inference.py b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/ptuning_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..ab32bec085afebc6fa29762868b6414309816279 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/ptuning_inference.py @@ -0,0 +1,34 @@ +import torch + +from modelscope import Model, pipeline, read_config +from modelscope.metainfo import Models +from modelscope.utils.config import ConfigDict + +model_dir = 'ZhipuAI/ChatGLM-6B' +model_config = read_config(model_dir) +model_config['model'] = ConfigDict({ + 'type': Models.chatglm_6b, + 'pre_seq_len': 128, + 'prefix_projection': False, +}) + +model = Model.from_pretrained(model_dir, cfg_dict=model_config) +model = model.half() +model.transformer.prefix_encoder.float() +prefix_state_dict = torch.load('./ptuning_dureader_target/iter_900.pth') +new_prefix_state_dict = {} +for k, v in prefix_state_dict.items(): + if k.startswith('transformer.prefix_encoder.'): + new_prefix_state_dict[k[len('transformer.prefix_encoder.'):]] = v +model.transformer.prefix_encoder.load_state_dict(new_prefix_state_dict) + +pipe = pipeline('chat', model) + +print( + pipe({ + 'text': + '维生素C也叫抗坏血酸,所以它最重要的一个作用是预防坏血病。另外,维生素C在控制感染和愈合伤口方面发挥作用,是一种强大的抗氧化剂,' + '可以中和有害的自由基。维生素C还是合成胶原蛋白的重要营养成分,胶原蛋白是结缔组织中的一种纤维蛋白,它存在于身体的各个系统中:' + '神经系统、免疫系统、骨骼系统、软骨系统、血液系统和其他系统。维生素C有助于产生作用于大脑和神经的多种激素和化学信使。', + 'history': [] + })) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_chatglm2_lora_dureader_v2.sh b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_chatglm2_lora_dureader_v2.sh new file mode 100644 index 0000000000000000000000000000000000000000..d24494cc1629f71d87feba557e21ae345e2625f1 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_chatglm2_lora_dureader_v2.sh @@ -0,0 +1,28 @@ +LR=5e-5 + +PYTHONPATH=. python examples/pytorch/chatglm6b/finetune.py \ + --train_dataset_name modelscope/DuReader_robust-QG \ + --val_dataset_name modelscope/DuReader_robust-QG \ + --train_subset_name default \ + --val_subset_name default \ + --train_split train \ + --val_split validation \ + --prompt_column text1 \ + --response_column text2 \ + --model "ZhipuAI/chatglm2-6b" \ + --max_source_length 64 \ + --max_target_length 64 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 1 \ + --train.optimizer.options.cumulative_iters 1 \ + --max_epochs 2 \ + --save_strategy 'by_step' \ + --save_interval 300 \ + --lr $LR \ + --eval_strategy "by_step" \ + --eval_interval 300 \ + --lr_strategy 'by_step' \ + --task 'chat' \ + --model.type 'chatglm2-6b' \ + --use_lora 1 \ + --work_dir lora_dureader_target \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_chatglm2_ptuning_adv_v2.sh b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_chatglm2_ptuning_adv_v2.sh new file mode 100644 index 0000000000000000000000000000000000000000..582c464cb9e187c771f73f9ce5b1e793c0e771df --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_chatglm2_ptuning_adv_v2.sh @@ -0,0 +1,26 @@ +PRE_SEQ_LEN=128 +LR=2e-2 + +PYTHONPATH=. python examples/pytorch/chatglm6b/finetune.py \ + --train_dataset_name AdvertiseGen/train.json \ + --val_dataset_name AdvertiseGen/dev.json \ + --prompt_column content \ + --response_column summary \ + --model "ZhipuAI/chatglm2-6b" \ + --max_source_length 64 \ + --max_target_length 128 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 1 \ + --train.optimizer.options.cumulative_iters 1 \ + --max_epochs 1 \ + --save_strategy 'by_step' \ + --save_interval 1000 \ + --lr $LR \ + --eval_strategy "by_step" \ + --eval_interval 1000 \ + --lr_strategy 'by_step' \ + --task 'chat' \ + --model.type 'chatglm2-6b' \ + --pre_seq_len $PRE_SEQ_LEN \ + --quantization_bit 4 \ + --work_dir ptuning_adv_target \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_lora_adv.sh b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_lora_adv.sh new file mode 100644 index 0000000000000000000000000000000000000000..cb6a7856f569a23084adbb0b8e719ac8fcb1788b --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_lora_adv.sh @@ -0,0 +1,24 @@ +LR=5e-5 + +PYTHONPATH=. python examples/pytorch/chatglm6b/finetune.py \ + --train_dataset_name AdvertiseGen/train.json \ + --val_dataset_name AdvertiseGen/dev.json \ + --prompt_column content \ + --response_column summary \ + --model "ZhipuAI/ChatGLM-6B" \ + --max_source_length 64 \ + --max_target_length 64 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 1 \ + --train.optimizer.options.cumulative_iters 1 \ + --max_epochs 1 \ + --save_strategy 'by_step' \ + --save_interval 1000 \ + --lr $LR \ + --eval_strategy "by_step" \ + --eval_interval 1000 \ + --lr_strategy 'by_step' \ + --task 'chat' \ + --model.type 'chatglm6b' \ + --use_lora 1 \ + --work_dir lora_adv_target \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_lora_dureader.sh b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_lora_dureader.sh new file mode 100644 index 0000000000000000000000000000000000000000..26cbce15b709497559d80f0f28fc454988d8dc67 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_lora_dureader.sh @@ -0,0 +1,28 @@ +LR=5e-5 + +PYTHONPATH=. python examples/pytorch/chatglm6b/finetune.py \ + --train_dataset_name modelscope/DuReader_robust-QG \ + --val_dataset_name modelscope/DuReader_robust-QG \ + --train_subset_name default \ + --val_subset_name default \ + --train_split train \ + --val_split validation \ + --prompt_column text1 \ + --response_column text2 \ + --model "ZhipuAI/ChatGLM-6B" \ + --max_source_length 64 \ + --max_target_length 64 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 1 \ + --train.optimizer.options.cumulative_iters 1 \ + --max_epochs 2 \ + --save_strategy 'by_step' \ + --save_interval 300 \ + --lr $LR \ + --eval_strategy "by_step" \ + --eval_interval 300 \ + --lr_strategy 'by_step' \ + --task 'chat' \ + --model.type 'chatglm6b' \ + --use_lora 1 \ + --work_dir lora_dureader_target \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_ptuning_adv.sh b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_ptuning_adv.sh new file mode 100644 index 0000000000000000000000000000000000000000..667c0c96d67510f8da3abf9ab7ff7caed6942a29 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_ptuning_adv.sh @@ -0,0 +1,26 @@ +PRE_SEQ_LEN=128 +LR=2e-2 + +PYTHONPATH=. python examples/pytorch/chatglm6b/finetune.py \ + --train_dataset_name AdvertiseGen/train.json \ + --val_dataset_name AdvertiseGen/dev.json \ + --prompt_column content \ + --response_column summary \ + --model "ZhipuAI/ChatGLM-6B" \ + --max_source_length 64 \ + --max_target_length 64 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 1 \ + --train.optimizer.options.cumulative_iters 1 \ + --max_epochs 1 \ + --save_strategy 'by_step' \ + --save_interval 1000 \ + --lr $LR \ + --eval_strategy "by_step" \ + --eval_interval 1000 \ + --lr_strategy 'by_step' \ + --task 'chat' \ + --model.type 'chatglm6b' \ + --pre_seq_len $PRE_SEQ_LEN \ + --quantization_bit 4 \ + --work_dir ptuning_adv_target \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_ptuning_dureader.sh b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_ptuning_dureader.sh new file mode 100644 index 0000000000000000000000000000000000000000..d36ad50ab97b9e6899094ae28734c655b6d151d4 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/run_train_ptuning_dureader.sh @@ -0,0 +1,30 @@ +PRE_SEQ_LEN=128 +LR=2e-2 + +PYTHONPATH=. python examples/pytorch/chatglm6b/finetune.py \ + --train_dataset_name modelscope/DuReader_robust-QG \ + --val_dataset_name modelscope/DuReader_robust-QG \ + --train_subset_name default \ + --val_subset_name default \ + --train_split train \ + --val_split validation \ + --prompt_column text1 \ + --response_column text2 \ + --model "ZhipuAI/ChatGLM-6B" \ + --max_source_length 64 \ + --max_target_length 64 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 1 \ + --train.optimizer.options.cumulative_iters 1 \ + --max_epochs 3 \ + --save_strategy 'by_step' \ + --save_interval 300 \ + --lr $LR \ + --eval_strategy "by_step" \ + --eval_interval 300 \ + --lr_strategy 'by_step' \ + --task 'chat' \ + --model.type 'chatglm6b' \ + --pre_seq_len $PRE_SEQ_LEN \ + --quantization_bit 4 \ + --work_dir ptuning_dureader_target \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/text_generation_metric.py b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/text_generation_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..536bbe06daa7205e50369b8d7644938fa4b97fc5 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/chatglm6b/text_generation_metric.py @@ -0,0 +1,85 @@ +# Copyright (c) Alibaba, Inc. and its affiliates. + +from typing import Dict, Iterable, List + +import jieba +import numpy as np +from nltk.translate.bleu_score import (SmoothingFunction, corpus_bleu, + sentence_bleu) +from rouge import Rouge + +from modelscope.metainfo import Metrics +from modelscope.metrics.base import Metric +from modelscope.metrics.builder import METRICS, MetricKeys +from modelscope.utils.chinese_utils import rebuild_chinese_str +from modelscope.utils.registry import default_group + + +@METRICS.register_module(group_key=default_group, module_name='chatglm') +class TextGenerationMetric(Metric): + + def __init__(self, target_text='tgts', pred_text='preds'): + self.preds: List[str] = [] + self.tgts: List[str] = [] + self.rouge = Rouge() + self.target_text = target_text + self.pred_text = pred_text + + def add(self, outputs: Dict[str, List[str]], inputs: Dict[str, List[str]]): + ground_truths = inputs[self.target_text] + eval_results = outputs[self.pred_text] + for truth in ground_truths: + self.tgts.append(truth) + for result in eval_results: + self.preds.append(result) + + def _check(self, pred: str, tgt: str) -> bool: + + def remove_useless(string: str) -> str: + return string.replace(' ', '').replace('.', '') + + return len(remove_useless(pred)) != 0 and len(remove_useless(tgt)) != 0 + + def evaluate(self): + preds, labels = self.preds, self.tgts + if isinstance(preds, tuple): + preds = preds[0] + + score_dict = { + 'rouge-1': [], + 'rouge-2': [], + 'rouge-l': [], + 'bleu-4': [] + } + for pred, label in zip(preds, labels): + hypothesis = list(jieba.cut(pred)) + if len(hypothesis) == 0 or ''.join(hypothesis) == '.': + hypothesis = [''] + reference = list(jieba.cut(label)) + rouge = Rouge() + scores = rouge.get_scores(' '.join(hypothesis), + ' '.join(reference)) + result = scores[0] + + for k, v in result.items(): + score_dict[k].append(round(v['f'] * 100, 4)) + bleu_score = sentence_bleu( + [list(label)], + list(pred), + smoothing_function=SmoothingFunction().method3) + score_dict['bleu-4'].append(round(bleu_score * 100, 4)) + + for k, v in score_dict.items(): + score_dict[k] = float(np.mean(v)) + return score_dict + + def merge(self, other: 'TextGenerationMetric'): + self.preds.extend(other.preds) + self.tgts.extend(other.tgts) + + def __getstate__(self): + return self.preds, self.tgts + + def __setstate__(self, state): + self.__init__() + self.preds, self.tgts = state diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/human_detection/finetune_human_detection.py b/AI/modelscope/1.10.0/8/examples/pytorch/human_detection/finetune_human_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..dadcbbde190e46dbbfe8f811faa785bcdf70da4e --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/human_detection/finetune_human_detection.py @@ -0,0 +1,64 @@ +import os.path as osp +from argparse import ArgumentParser + +from modelscope.metainfo import Trainers +from modelscope.msdatasets import MsDataset +from modelscope.trainers import build_trainer +from modelscope.utils.constant import DownloadMode + +parser = ArgumentParser() +parser.add_argument('--dataset_name', type=str, help='The dataset name') +parser.add_argument('--namespace', type=str, help='The dataset namespace') +parser.add_argument('--model', type=str, help='The model id or model dir') +parser.add_argument( + '--num_classes', type=int, help='The num_classes in the dataset') +parser.add_argument('--batch_size', type=int, help='The training batch size') +parser.add_argument('--max_epochs', type=int, help='The training max epochs') +parser.add_argument( + '--base_lr_per_img', + type=float, + help='The base learning rate for per image') + +args = parser.parse_args() +print(args) + +# Step 1: 数据集准备,可以使用modelscope上已有的数据集,也可以自己在本地构建COCO数据集 +train_dataset = MsDataset.load( + args.dataset_name, + namespace=args.namespace, + split='train', + download_mode=DownloadMode.FORCE_REDOWNLOAD) +val_dataset = MsDataset.load( + args.dataset_name, + namespace=args.namespace, + split='validation', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + +# Step 2: 相关参数设置 +train_root_dir = train_dataset.config_kwargs['split_config']['train'] +val_root_dir = val_dataset.config_kwargs['split_config']['validation'] +train_img_dir = osp.join(train_root_dir, 'images') +val_img_dir = osp.join(val_root_dir, 'images') +train_anno_path = osp.join(train_root_dir, 'train.json') +val_anno_path = osp.join(val_root_dir, 'val.json') +kwargs = dict( + model=args.model, # 使用DAMO-YOLO-S模型 + gpu_ids=[ # 指定训练使用的gpu + 0, + ], + batch_size=args. + batch_size, # batch_size, 每个gpu上的图片数等于batch_size // len(gpu_ids) + max_epochs=args.max_epochs, # 总的训练epochs + num_classes=args.num_classes, # 自定义数据中的类别数 + load_pretrain=True, # 是否载入预训练模型,若为False,则为从头重新训练 + base_lr_per_img=args. + base_lr_per_img, # 每张图片的学习率,lr=base_lr_per_img*batch_size + train_image_dir=train_img_dir, # 训练图片路径 + val_image_dir=val_img_dir, # 测试图片路径 + train_ann=train_anno_path, # 训练标注文件路径 + val_ann=val_anno_path, # 测试标注文件路径 +) + +# Step 3: 开启训练任务 +trainer = build_trainer(name=Trainers.tinynas_damoyolo, default_args=kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/human_detection/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/human_detection/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..5b1c22e88c9f47236fc1d0a4973840f326cad202 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/human_detection/run_train.sh @@ -0,0 +1,8 @@ +PYTHONPATH=. python examples/pytorch/human_detection/finetune_human_detection.py \ + --dataset_name "person_detection_for_train" \ + --namespace "modelscope" \ + --model "damo/cv_tinynas_human-detection_damoyolo" \ + --num_classes 1 \ + --batch_size 2 \ + --max_epochs 3 \ + --base_lr_per_img 0.001 diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/image_classification/finetune_image_classification.py b/AI/modelscope/1.10.0/8/examples/pytorch/image_classification/finetune_image_classification.py new file mode 100644 index 0000000000000000000000000000000000000000..e5bb9bdd5d1dce8f3d853b9e7d94c6965a0e89d3 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/image_classification/finetune_image_classification.py @@ -0,0 +1,90 @@ +import os +from dataclasses import dataclass, field + +from modelscope import MsDataset, TrainingArgs +from modelscope.metainfo import Trainers +from modelscope.trainers.builder import build_trainer + + +@dataclass(init=False) +class ImageClassificationTrainingArgs(TrainingArgs): + num_classes: int = field( + default=None, + metadata={ + 'cfg_node': [ + 'model.mm_model.head.num_classes', + 'model.mm_model.train_cfg.augments.0.num_classes', + 'model.mm_model.train_cfg.augments.1.num_classes' + ], + 'help': + 'number of classes', + }) + + topk: tuple = field( + default=None, + metadata={ + 'cfg_node': [ + 'train.evaluation.metric_options.topk', + 'evaluation.metric_options.topk' + ], + 'help': + 'evaluation using topk, tuple format, eg (1,), (1,5)', + }) + + warmup_iters: str = field( + default=None, + metadata={ + 'cfg_node': 'train.lr_config.warmup_iters', + 'help': 'The warmup iters', + }) + + +def create_dataset(name, split): + namespace, dataset_name = name.split('/') + return MsDataset.load( + dataset_name, namespace=namespace, subset_name='default', split=split) + + +training_args = ImageClassificationTrainingArgs( + model='damo/cv_vit-base_image-classification_ImageNet-labels', + max_epochs=1, + lr=1e-4, + optimizer='AdamW', + warmup_iters=1, + topk=(1, )).parse_cli() +config, args = training_args.to_config() + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + return cfg + + +def train(): + train_dataset = create_dataset( + training_args.train_dataset_name, split=training_args.train_split) + val_dataset = create_dataset( + training_args.val_dataset_name, split=training_args.val_split) + + kwargs = dict( + model=args.model, # model id + train_dataset=train_dataset, # training dataset + eval_dataset=val_dataset, # validation dataset + cfg_modify_fn=cfg_modify_fn # callback to modify configuration + ) + + # in distributed training, specify pytorch launcher + if 'MASTER_ADDR' in os.environ: + kwargs['launcher'] = 'pytorch' + + trainer = build_trainer( + name=Trainers.image_classification, default_args=kwargs) + # start to train + trainer.train() + + +if __name__ == '__main__': + train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/image_classification/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/image_classification/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..ad560424295fa9ebe5728f48715cb45cc7166324 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/image_classification/run_train.sh @@ -0,0 +1,8 @@ +PYTHONPATH=. python -m torch.distributed.launch --nproc_per_node=2 \ + examples/pytorch/image_classification/finetune_image_classification.py \ + --num_classes 2 \ + --train_dataset_name 'tany0699/cats_and_dogs' \ + --val_dataset_name 'tany0699/cats_and_dogs' \ + --train_split train \ + --val_split validation \ + --use_model_config true \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/keyword_spotting/finetune_keyword_spotting.py b/AI/modelscope/1.10.0/8/examples/pytorch/keyword_spotting/finetune_keyword_spotting.py new file mode 100644 index 0000000000000000000000000000000000000000..acc71fa8188ea89b764e4e3c3acdd5d720603fa4 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/keyword_spotting/finetune_keyword_spotting.py @@ -0,0 +1,68 @@ +# coding = utf-8 + +import os +from argparse import ArgumentParser + +from modelscope.metainfo import Trainers +from modelscope.trainers import build_trainer +from modelscope.utils.hub import read_config + + +def main(): + parser = ArgumentParser() + parser.add_argument('--model', type=str, help='The model id or model dir') + parser.add_argument('--train_scp', type=str, help='The train scp file') + parser.add_argument('--cv_scp', type=str, help='The cv scp file') + parser.add_argument('--merge_trans', type=str, help='The merge trans file') + parser.add_argument('--keywords', type=str, help='The key words') + parser.add_argument('--work_dir', type=str, help='The work dir') + parser.add_argument('--test_scp', type=str, help='The test scp file') + parser.add_argument('--test_trans', type=str, help='The test trains file') + args = parser.parse_args() + print(args) + + # s1 + work_dir = args.work_dir + + # s2 + model_id = args.model + configs = read_config(model_id) + config_file = os.path.join(work_dir, 'config.json') + configs.dump(config_file) + + # s3 + kwargs = dict( + model=model_id, + work_dir=work_dir, + cfg_file=config_file, + ) + trainer = build_trainer( + Trainers.speech_kws_fsmn_char_ctc_nearfield, default_args=kwargs) + + # s4 + train_scp = args.train_scp + cv_scp = args.cv_scp + trans_file = args.merge_trans + kwargs = dict(train_data=train_scp, cv_data=cv_scp, trans_data=trans_file) + trainer.train(**kwargs) + + # s5 + keywords = args.keywords + test_dir = os.path.join(work_dir, 'test_dir') + test_scp = args.test_scp + trans_file = args.test_trans + rank = int(os.environ['RANK']) + if rank == 0: + kwargs = dict( + test_dir=test_dir, + test_data=test_scp, + trans_data=trans_file, + gpu=0, + keywords=keywords, + batch_size=args.batch_size, + ) + trainer.evaluate(None, None, **kwargs) + + +if __name__ == '__main__': + main() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/keyword_spotting/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/keyword_spotting/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..6d87f9bf13f2e29116b36ab9378df44575241617 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/keyword_spotting/run_train.sh @@ -0,0 +1,9 @@ +PYTHONPATH=. torchrun --standalone --nnodes=1 --nproc_per_node=2 examples/pytorch/keyword_spotting/finetune_keyword_spotting.py \ +--work_dir './test_kws_training' \ +--model 'damo/speech_charctc_kws_phone-xiaoyun' \ +--train_scp './example_kws/train_wav.scp' \ +--cv_scp './example_kws/cv_wav.scp' \ +--merge_trans './example_kws/merge_trans.txt' \ +--keywords '小云小云' \ +--test_scp './example_kws/test_wav.scp' \ +--test_trans './example_kws/test_trans.txt' diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llama/finetune_llama.py b/AI/modelscope/1.10.0/8/examples/pytorch/llama/finetune_llama.py new file mode 100644 index 0000000000000000000000000000000000000000..639e8072b77646205dc2b38e34645de13ad59772 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llama/finetune_llama.py @@ -0,0 +1,401 @@ +# Copyright 2023 Rohan Taori, Ishaan Gulrajani, Tianyi Zhang, Yann Dubois, Xuechen Li +# Copyright (c) Alibaba, Inc. and its affiliates. + +import copy +import logging +import os +from dataclasses import dataclass, field + +import json +import torch +from swift import LoRAConfig, Swift + +from modelscope import TrainingArgs, build_dataset_from_file +from modelscope.hub.snapshot_download import snapshot_download +from modelscope.metainfo import Trainers +from modelscope.models.nlp.llama import LlamaForTextGeneration, LlamaTokenizer +from modelscope.msdatasets import MsDataset +from modelscope.msdatasets.dataset_cls.custom_datasets.torch_custom_dataset import \ + TorchCustomDataset +from modelscope.trainers import build_trainer + +IGNORE_INDEX = -100 +DEFAULT_PAD_TOKEN = '[PAD]' +DEFAULT_EOS_TOKEN = '' +DEFAULT_BOS_TOKEN = '' +DEFAULT_UNK_TOKEN = '' +PROMPT_DICT = { + 'prompt_input': + ('Below is an instruction that describes a task, paired with an input that provides further context. ' + 'Write a response that appropriately completes the request.\n\n' + '### Instruction:\n{instruction}\n\n### Input:\n{input}\n\n### Response:' + ), + 'prompt_no_input': + ('Below is an instruction that describes a task. ' + 'Write a response that appropriately completes the request.\n\n' + '### Instruction:\n{instruction}\n\n### Response:'), +} + + +@dataclass(init=False) +class TextGenerationArguments(TrainingArgs): + instruction: str = field( + default='instruction', + metadata={ + 'help': 'The instruction text key of dataset', + }) + + input: str = field( + default='input', metadata={ + 'help': 'The input text key of dataset', + }) + + output: str = field( + default='output', + metadata={ + 'help': 'The output text key of dataset', + }) + + src_txt: str = field( + default=None, + metadata={ + 'help': 'The source text key of preprocessor', + 'cfg_node': 'preprocessor.src_txt' + }) + + deepspeed: str = field( + default=None, + metadata={ + 'help': 'The location of DeepSpeed json config file.', + }) + + use_lora: int = field( + default=0, + metadata={'help': 'Whether to use lora to train the model.'}, + ) + + lora_rank: int = field( + default=32, + metadata={'help': 'The lora rank'}, + ) + + lora_alpha: int = field( + default=32, + metadata={'help': 'The lora alpha'}, + ) + + lora_dropout: float = field( + default=0.05, + metadata={'help': 'The lora dropout'}, + ) + + device_map: str = field( + default=None, + metadata={ + 'help': 'A map that specifies where each submodule should go.' + }) + + zero_stage: int = field( + default=None, metadata={'help': 'The stage of zero_optimization'}) + + +def _tokenize_fn(strings, tokenizer): + """Tokenize a list of strings.""" + tokenized_list = [ + tokenizer( + text, + return_tensors='pt', + padding='longest', + max_length=tokenizer.model_max_length, + truncation=True, + ) for text in strings + ] + input_ids = labels = [ + tokenized.input_ids[0] for tokenized in tokenized_list + ] + input_ids_lens = labels_lens = [ + tokenized.input_ids.ne(tokenizer.pad_token_id).sum().item() + for tokenized in tokenized_list + ] + return dict( + input_ids=input_ids, + labels=labels, + input_ids_lens=input_ids_lens, + labels_lens=labels_lens, + ) + + +def preprocess(sources, targets, tokenizer): + """Preprocess the data by tokenizing.""" + examples = [s + t for s, t in zip(sources, targets)] + examples_tokenized, sources_tokenized = [ + _tokenize_fn(strings, tokenizer) for strings in (examples, sources) + ] + input_ids = examples_tokenized['input_ids'] + labels = copy.deepcopy(input_ids) + for label, source_len in zip(labels, sources_tokenized['input_ids_lens']): + label[:source_len] = IGNORE_INDEX + return dict(input_ids=input_ids, labels=labels) + + +def smart_tokenizer_and_embedding_resize(special_tokens_dict, tokenizer, + model): + """Resize tokenizer and embedding. + + Note: This is the unoptimized version that may make your embedding size not be divisible by 64. + """ + num_new_tokens = tokenizer.add_special_tokens(special_tokens_dict) + model.resize_token_embeddings(len(tokenizer)) + + if num_new_tokens > 0: + input_embeddings = model.get_input_embeddings().weight.data + output_embeddings = model.get_output_embeddings().weight.data + + input_embeddings_avg = input_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + output_embeddings_avg = output_embeddings[:-num_new_tokens].mean( + dim=0, keepdim=True) + + input_embeddings[-num_new_tokens:] = input_embeddings_avg + output_embeddings[-num_new_tokens:] = output_embeddings_avg + + +class SupervisedDataset(TorchCustomDataset): + """Dataset for supervised fine-tuning.""" + + def __init__(self, list_data_dict, tokenizer): + logging.warning('Formatting inputs...') + prompt_input, prompt_no_input = PROMPT_DICT[ + 'prompt_input'], PROMPT_DICT['prompt_no_input'] + sources = [ + prompt_input.format_map(example) if example.get('input', '') != '' + else prompt_no_input.format_map(example) + for example in list_data_dict + ] + targets = [ + f"{example['output']}{tokenizer.eos_token}" + for example in list_data_dict + ] + + logging.warning('Tokenizing inputs... This may take some time...') + data_dict = preprocess(sources, targets, tokenizer) + + self.input_ids = data_dict['input_ids'] + self.labels = data_dict['labels'] + + def __len__(self): + return len(self.input_ids) + + def __getitem__(self, i): + if isinstance(i, int): + return dict(input_ids=self.input_ids[i], labels=self.labels[i]) + elif isinstance(i, slice): + return SliceSupervisedDataset(self.input_ids, self.labels, i) + else: + raise TypeError(f'Unsupported input type: {type(i)}') + + +class SliceSupervisedDataset(TorchCustomDataset): + + def __init__(self, input_ids, labels, slice_): + self.input_ids = input_ids[slice_] + self.labels = labels[slice_] + + def __len__(self): + return len(self.input_ids) + + def __getitem__(self, i): + return dict(input_ids=self.input_ids[i], labels=self.labels[i]) + + +@dataclass +class DataCollatorForSupervisedDataset(object): + """Collate examples for supervised fine-tuning.""" + + tokenizer: LlamaTokenizer + + def __call__(self, instances): + input_ids, labels = tuple([instance[key] for instance in instances] + for key in ('input_ids', 'labels')) + input_ids = torch.nn.utils.rnn.pad_sequence( + input_ids, + batch_first=True, + padding_value=self.tokenizer.pad_token_id) + labels = torch.nn.utils.rnn.pad_sequence( + labels, batch_first=True, padding_value=IGNORE_INDEX) + return dict( + input_ids=input_ids, + labels=labels, + attention_mask=input_ids.ne(self.tokenizer.pad_token_id), + ) + + +training_args = TextGenerationArguments().parse_cli() +config, args = training_args.to_config() +print(args) + +if __name__ == '__main__': + + def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + cfg.train.lr_scheduler = { + 'type': 'CosineAnnealingLR', + 'T_max': 1, + 'options': { + 'by_epoch': False + } + } + cfg.train.optimizer = { + 'type': 'AdamW', + 'lr': training_args.lr, + 'weight_decay': 0.0, + 'options': { + 'cumulative_iters': 8, + 'warmup': { + 'type': 'LinearWarmup', + 'warmup_ratio': 0.03 + } + } + } + cfg.train.logging = { + 'interval': training_args.logging_interval, + 'by_epoch': False + } + cfg.train['bf16'] = True + cfg.train.dataloader = { + 'batch_size_per_gpu': training_args.per_device_train_batch_size, + 'workers_per_gpu': 1 + } + if 'hooks' not in cfg.train: + cfg.train['hooks'] = [] + if args.deepspeed is not None: + cfg.train.hooks.append({ + 'type': 'DeepspeedHook', + 'config': args.deepspeed, + 'save_zero_checkpoint': True, + 'with_mpu': False, + }) + if args.zero_stage is not None: + cfg.train.hooks[-1]['zero_stage'] = args.zero_stage + + cfg.preprocessor.sequence_length = 512 + return cfg + + model_path = args.model if os.path.exists( + args.model) else snapshot_download(args.model) + + dataset_mapping_dict = { + args.instruction: 'instruction', + args.input: 'input', + args.output: 'output' + } + if args.dataset_json_file is None: + if args.train_dataset_name is not None and args.val_dataset_name is not None: + train_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split=args.train_split, + namespace=args.train_dataset_namespace).remap_columns( + dataset_mapping_dict) + validation_dataset = MsDataset.load( + args.val_dataset_name, + subset_name=args.val_subset_name, + split=args.val_split, + namespace=args.val_dataset_namespace).remap_columns( + dataset_mapping_dict) + elif args.train_dataset_name is not None and args.val_dataset_name is None: + ms_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split=args.train_split, + namespace=args.train_dataset_namespace).remap_columns( + dataset_mapping_dict).train_test_split( + test_size=0.02, seed=args.seed) + train_dataset = ms_dataset['train'] + validation_dataset = ms_dataset['test'] + else: + data_path = training_args.src_txt if training_args.src_txt else os.path.join( + model_path, 'alpaca_data.json') + ms_dataset = MsDataset.load( + 'json', data_files=data_path).remap_columns( + dataset_mapping_dict).train_test_split( + test_size=0.02, seed=args.seed) + train_dataset = ms_dataset['train'] + validation_dataset = ms_dataset['test'] + else: + train_dataset, validation_dataset = build_dataset_from_file( + args.dataset_json_file) + + model = LlamaForTextGeneration.from_pretrained( + model_path, device_map=args.device_map) + + if args.use_lora != 0: + lora_config = LoRAConfig( + target_modules=['q_proj', 'k_proj', 'v_proj'], + r=args.lora_rank, + lora_alpha=args.lora_alpha, + lora_dropout=args.lora_dropout) + model = model.bfloat16() + model = Swift.prepare_model(model, lora_config) + + tokenizer = LlamaTokenizer.from_pretrained( + model_path, + model_max_length=512, + padding_side='right', + ) + + special_tokens_dict = dict() + if tokenizer.pad_token is None or tokenizer.pad_token == '': + special_tokens_dict['pad_token'] = DEFAULT_PAD_TOKEN + if tokenizer.eos_token is None or tokenizer.eos_token == '': + special_tokens_dict['eos_token'] = DEFAULT_EOS_TOKEN + if tokenizer.bos_token is None or tokenizer.bos_token == '': + special_tokens_dict['bos_token'] = DEFAULT_BOS_TOKEN + if tokenizer.unk_token is None or tokenizer.unk_token == '': + special_tokens_dict['unk_token'] = DEFAULT_UNK_TOKEN + + smart_tokenizer_and_embedding_resize( + special_tokens_dict=special_tokens_dict, + tokenizer=tokenizer, + model=model, + ) + + train_dataset = SupervisedDataset( + tokenizer=tokenizer, list_data_dict=train_dataset) + validation_dataset = SupervisedDataset( + tokenizer=tokenizer, list_data_dict=validation_dataset) + data_collator = DataCollatorForSupervisedDataset(tokenizer=tokenizer) + + kwargs = dict( + model=model, + cfg_file=os.path.join(model_path, 'configuration.json'), + train_dataset=train_dataset, + eval_dataset=validation_dataset, + data_collator=data_collator, + cfg_modify_fn=cfg_modify_fn) + + # Construct trainer and train + trainer = build_trainer( + name=Trainers.text_generation_trainer, default_args=kwargs) + trainer.train() + + # prepare for inference + if args.deepspeed and args.zero_stage is None and int( + os.environ.get('LOCAL_RANK', 0)) == 0: + work_dir = config.train.work_dir + tokenizer.save_pretrained(os.path.join(work_dir, 'output')) + os.system(f'rm {work_dir}/output/pytorch_model*') + os.system( + f'python3 {work_dir}/zero_to_fp32.py {work_dir} {work_dir}/output/pytorch_model.bin' + ) + os.system( + f'cp {model_path}/configuration.json {work_dir}/output/configuration.json' + ) + with open(f'{model_path}/config.json', 'r') as f: + config = json.load(f) + config['vocab_size'] = len(tokenizer) + with open(f'{work_dir}/output/config.json', 'w') as f: + json.dump(config, f) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llama/run_train_llama.sh b/AI/modelscope/1.10.0/8/examples/pytorch/llama/run_train_llama.sh new file mode 100644 index 0000000000000000000000000000000000000000..292148ea010f542c7bb5bd2677a3306bacd26604 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llama/run_train_llama.sh @@ -0,0 +1,10 @@ +DATA_PARALLEL_SIZE=4 + + +export PYTHONPATH=$PYTHONPATH:./ +torchrun --nproc_per_node $DATA_PARALLEL_SIZE examples/pytorch/llama/finetune_llama.py \ + --work_dir './tmp' \ + --model 'skyline2006/llama-7b' \ + --deepspeed 'default_offload_opt_param.json' \ + --eval_interval 100 \ + --max_epochs 3 \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llama/run_train_lora.sh b/AI/modelscope/1.10.0/8/examples/pytorch/llama/run_train_lora.sh new file mode 100644 index 0000000000000000000000000000000000000000..01aad29a37dde8b745f09b6e7e9b3ac45cfd46fa --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llama/run_train_lora.sh @@ -0,0 +1,23 @@ +export PYTHONPATH=$PYTHONPATH:./ +torchrun examples/pytorch/llama/finetune_llama.py \ + --work_dir './tmp' \ + --model 'skyline2006/llama-7b' \ + --train_dataset_name 'alpaca-gpt4-data-zh' \ + --train_subset_name 'default' \ + --train_split 'train' \ + --train_dataset_namespace 'AI-ModelScope' \ + --per_device_train_batch_size 4 \ + --per_device_eval_batch_size 4 \ + --eval_strategy 'by_epoch' \ + --eval_interval 1 \ + --eval_metrics 'ppl' \ + --lr 2e-5 \ + --save_strategy no \ + --save_best true \ + --metric_for_best_model ppl \ + --metric_rule_for_best_model min \ + --use_lora 1 \ + --device_map 'auto' \ + --task 'text-generation' \ + --model.type 'llama' \ + --max_epochs 3 \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/README.md b/AI/modelscope/1.10.0/8/examples/pytorch/llm/README.md new file mode 100644 index 0000000000000000000000000000000000000000..408e29866eb20c77d90187f933f2adeaf051d290 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/README.md @@ -0,0 +1,81 @@ +

LLM SFT Example

+ +

+ + + + +

+ +

+Modelscope Hub +
+ 中文  |  English +

+ +## Note +1. This README.md file is **copied from** [ms-swift](https://github.com/modelscope/swift/tree/main/examples/pytorch/llm/README.md) +2. This directory has been **migrated** to [ms-swift](https://github.com/modelscope/swift/tree/main/examples/pytorch/llm), and the files in this directory are **no longer maintained**. + +## Features +1. supported sft method: [lora](https://arxiv.org/abs/2106.09685), [qlora](https://arxiv.org/abs/2305.14314), full(full parameter fine tuning), ... +2. supported models: [**qwen-7b**](https://github.com/QwenLM/Qwen-7B), baichuan-7b, baichuan-13b, chatglm2-6b, chatglm2-6b-32k, llama2-7b, llama2-13b, llama2-70b, openbuddy-llama2-13b, openbuddy-llama-65b, polylm-13b, ... +3. supported feature: quantization, ddp, model parallelism(device map), gradient checkpoint, gradient accumulation steps, push to modelscope hub, custom datasets, ... +4. supported datasets: alpaca-en(gpt4), alpaca-zh(gpt4), finance-en, multi-alpaca-all, code-en, instinwild-en, instinwild-zh, ... + +## Prepare the Environment +Experimental environment: A10, 3090, A100, ... (V100 does not support bf16, quantization) +```bash +# Installing miniconda +wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh +sh Miniconda3-latest-Linux-x86_64.sh + +# Setting up a conda virtual environment +conda create --name ms-sft python=3.10 +conda activate ms-sft + +# Setting up a global pip mirror for faster downloads +pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/ + +pip install torch torchvision torchaudio -U +pip install sentencepiece charset_normalizer cpm_kernels tiktoken -U +pip install matplotlib scikit-learn tqdm tensorboard -U +pip install transformers datasets -U +pip install accelerate transformers_stream_generator -U + +pip install ms-swift modelscope -U +# Recommended installation from source code for faster bug fixes +git clone https://github.com/modelscope/swift.git +cd swift +pip install -r requirements.txt +pip install . +# same as modelscope...(git clone ...) +``` + +## Run SFT and Inference +```bash +# Clone the repository and enter the code directory. +git clone https://github.com/modelscope/swift.git +cd swift/examples/pytorch/llm + +# sft(qlora) and infer qwen-7b, Requires 16GB VRAM. +# If you want to use quantification, you need to `pip install bitsandbytes` +bash scripts/qwen_7b/qlora/sft.sh +# If you want to push the model to modelscope hub during training +bash scripts/qwen_7b/qlora/sft_push_to_hub.sh +bash scripts/qwen_7b/qlora/infer.sh + +# sft(qlora+ddp) and infer qwen-7b, Requires 4*16GB VRAM. +bash scripts/qwen_7b/qlora_ddp/sft.sh +bash scripts/qwen_7b/qlora_ddp/infer.sh + +# sft(full) and infer qwen-7b, Requires 95GB VRAM. +bash scripts/qwen_7b/full/sft.sh +bash scripts/qwen_7b/full/infer.sh + +# For more scripts, please see `scripts/` folder +``` + +## Extend Datasets +1. If you need to extend the model, you can modify the `MODEL_MAPPING` in `utils/models.py`. `model_id` can be specified as a local path. In this case, `revision` doesn't work. +2. If you need to extend or customize the dataset, you can modify the `DATASET_MAPPING` in `utils/datasets.py`. You need to customize the `get_*_dataset` function, which returns a dataset with two columns: `instruction`, `output`. diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/README_CN.md b/AI/modelscope/1.10.0/8/examples/pytorch/llm/README_CN.md new file mode 100644 index 0000000000000000000000000000000000000000..acbcb3d71c0a3f0b583d13aef33435389229581c --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/README_CN.md @@ -0,0 +1,83 @@ +

大模型微调的例子

+ +

+ + + + +

+ +

+魔搭社区 +
+ 中文  |  English +

+ +## 请注意 +1. 该README_CN.md**拷贝**自[ms-swift](https://github.com/modelscope/swift/tree/main/examples/pytorch/llm/README_CN.md) +2. 该目录已经**迁移**至[ms-swift](https://github.com/modelscope/swift/tree/main/examples/pytorch/llm), 此目录中的文件**不再维护**. + +## 特性 +1. [lora](https://arxiv.org/abs/2106.09685), [qlora](https://arxiv.org/abs/2305.14314), 全参数微调, ... +2. 支持的模型: [**qwen-7b**](https://github.com/QwenLM/Qwen-7B), baichuan-7b, baichuan-13b, chatglm2-6b, chatglm2-6b-32k, llama2-7b, llama2-13b, llama2-70b, openbuddy-llama2-13b, openbuddy-llama-65b, polylm-13b, ... +3. 支持的特性: 模型量化, DDP, 模型并行(device_map), gradient checkpoint, 梯度累加, 支持推送modelscope hub, 支持自定义数据集, ... +4. 支持的数据集: alpaca-en(gpt4), alpaca-zh(gpt4), finance-en, multi-alpaca-all, code-en, instinwild-en, instinwild-zh, ... + + +## 准备实验环境 +实验环境: A10, 3090, A100均可. (V100不支持bf16, 量化) +```bash +# 安装miniconda +wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh +# 一直[ENTER], 最后一个选项yes即可 +sh Miniconda3-latest-Linux-x86_64.sh + +# conda虚拟环境搭建 +conda create --name ms-sft python=3.10 +conda activate ms-sft + +# pip设置全局镜像与相关python包安装 +pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/ + +pip install torch torchvision torchaudio -U +pip install sentencepiece charset_normalizer cpm_kernels tiktoken -U +pip install matplotlib scikit-learn tqdm tensorboard -U +pip install transformers datasets -U +pip install accelerate transformers_stream_generator -U + +pip install ms-swift modelscope -U +# 推荐从源码安装swift和modelscope, 这具有更多的特性和更快的bug修复 +git clone https://github.com/modelscope/swift.git +cd swift +pip install -r requirements.txt +pip install . +# modelscope类似...(git clone ...) +``` + +## 微调和推理 +```bash +# clone仓库并进入代码目录 +git clone https://github.com/modelscope/swift.git +cd swift/examples/pytorch/llm + +# 微调(qlora)+推理 qwen-7b, 需要16GB显存. +# 如果你想要使用量化, 你需要`pip install bitsandbytes` +bash scripts/qwen_7b/qlora/sft.sh +# 如果你想在训练时, 将权重push到modelscope hub中. +bash scripts/qwen_7b/qlora/sft_push_to_hub.sh +bash scripts/qwen_7b/qlora/infer.sh + +# 微调(qlora+ddp)+推理 qwen-7b, 需要4卡*16GB显存. +bash scripts/qwen_7b/qlora_ddp/sft.sh +bash scripts/qwen_7b/qlora_ddp/infer.sh + +# 微调(full)+推理 qwen-7b, 需要95G显存. +bash scripts/qwen_7b/full/sft.sh +bash scripts/qwen_7b/full/infer.sh + +# 更多的scripts脚本, 可以看`scripts`文件夹 +``` + +## 拓展数据集 +1. 如果你想要拓展模型, 你可以修改`utils/models.py`文件中的`MODEL_MAPPING`. `model_id`可以指定为本地路径, 这种情况下, `revision`参数不起作用. +2. 如果你想要拓展或使用自定义数据集, 你可以修改`utils/datasets.py`文件中的`DATASET_MAPPING`. 你需要自定义`get_*_dataset`函数, 并返回包含`instruction`, `output`两列的数据集. diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/llm_infer.py b/AI/modelscope/1.10.0/8/examples/pytorch/llm/llm_infer.py new file mode 100644 index 0000000000000000000000000000000000000000..08ed0db8685930a2a6c4eb6519e8036567ff5c1a --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/llm_infer.py @@ -0,0 +1,144 @@ +# ### Setting up experimental environment. +import os +# os.environ['CUDA_VISIBLE_DEVICES'] = '0,1' +import warnings +from dataclasses import dataclass, field +from functools import partial +from typing import List, Optional + +import torch +from swift import LoRAConfig, Swift +from transformers import GenerationConfig, TextStreamer +from utils import (DATASET_MAPPING, DEFAULT_PROMPT, MODEL_MAPPING, get_dataset, + get_model_tokenizer, inference, parse_args, process_dataset, + tokenize_function) + +from modelscope import get_logger + +warnings.warn( + 'This directory has been migrated to ' + 'https://github.com/modelscope/swift/tree/main/examples/pytorch/llm, ' + 'and the files in this directory are no longer maintained.', + DeprecationWarning) + +logger = get_logger() + + +@dataclass +class InferArguments: + model_type: str = field( + default='qwen-7b', metadata={'choices': list(MODEL_MAPPING.keys())}) + sft_type: str = field( + default='lora', metadata={'choices': ['lora', 'full']}) + ckpt_path: str = '/path/to/your/iter_xxx.pth' + eval_human: bool = False # False: eval test_dataset + ignore_args_error: bool = False # True: notebook compatibility + + dataset: str = field( + default='alpaca-en,alpaca-zh', + metadata={'help': f'dataset choices: {list(DATASET_MAPPING.keys())}'}) + dataset_seed: int = 42 + dataset_sample: int = 20000 # -1: all dataset + dataset_test_size: float = 0.01 + prompt: str = DEFAULT_PROMPT + max_length: Optional[int] = 2048 + + lora_target_modules: Optional[List[str]] = None + lora_rank: int = 8 + lora_alpha: int = 32 + lora_dropout_p: float = 0.1 + + max_new_tokens: int = 512 + temperature: float = 0.9 + top_k: int = 50 + top_p: float = 0.9 + + def __post_init__(self): + if self.lora_target_modules is None: + self.lora_target_modules = MODEL_MAPPING[ + self.model_type]['lora_TM'] + + if not os.path.isfile(self.ckpt_path): + raise ValueError( + f'Please enter a valid ckpt_path: {self.ckpt_path}') + + +def llm_infer(args: InferArguments) -> None: + # ### Loading Model and Tokenizer + support_bf16 = torch.cuda.is_bf16_supported() + if not support_bf16: + logger.warning(f'support_bf16: {support_bf16}') + + kwargs = {'low_cpu_mem_usage': True, 'device_map': 'auto'} + model, tokenizer, _ = get_model_tokenizer( + args.model_type, torch_dtype=torch.bfloat16, **kwargs) + + # ### Preparing lora + if args.sft_type == 'lora': + lora_config = LoRAConfig( + target_modules=args.lora_target_modules, + r=args.lora_rank, + lora_alpha=args.lora_alpha, + lora_dropout=args.lora_dropout_p, + pretrained_weights=args.ckpt_path) + logger.info(f'lora_config: {lora_config}') + model = Swift.prepare_model(model, lora_config) + state_dict = torch.load(args.ckpt_path, map_location='cpu') + model.load_state_dict(state_dict) + elif args.sft_type == 'full': + state_dict = torch.load(args.ckpt_path, map_location='cpu') + model.load_state_dict(state_dict) + else: + raise ValueError(f'args.sft_type: {args.sft_type}') + + # ### Inference + tokenize_func = partial( + tokenize_function, + tokenizer=tokenizer, + prompt=args.prompt, + max_length=args.max_length) + streamer = TextStreamer( + tokenizer, skip_prompt=True, skip_special_tokens=True) + generation_config = GenerationConfig( + max_new_tokens=args.max_new_tokens, + temperature=args.temperature, + top_k=args.top_k, + top_p=args.top_p, + do_sample=True, + eos_token_id=tokenizer.eos_token_id, + pad_token_id=tokenizer.eos_token_id) + logger.info(f'generation_config: {generation_config}') + + if args.eval_human: + while True: + instruction = input('<<< ') + data = {'instruction': instruction} + input_ids = tokenize_func(data)['input_ids'] + inference(input_ids, model, tokenizer, streamer, generation_config) + print('-' * 80) + else: + dataset = get_dataset(args.dataset.split(',')) + _, test_dataset = process_dataset(dataset, args.dataset_test_size, + args.dataset_sample, + args.dataset_seed) + mini_test_dataset = test_dataset.select(range(10)) + del dataset + for data in mini_test_dataset: + output = data['output'] + data['output'] = None + input_ids = tokenize_func(data)['input_ids'] + inference(input_ids, model, tokenizer, streamer, generation_config) + print() + print(f'[LABELS]{output}') + print('-' * 80) + # input('next[ENTER]') + + +if __name__ == '__main__': + args, remaining_argv = parse_args(InferArguments) + if len(remaining_argv) > 0: + if args.ignore_args_error: + logger.warning(f'remaining_argv: {remaining_argv}') + else: + raise ValueError(f'remaining_argv: {remaining_argv}') + llm_infer(args) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/llm_sft.py b/AI/modelscope/1.10.0/8/examples/pytorch/llm/llm_sft.py new file mode 100644 index 0000000000000000000000000000000000000000..8eaa60402ffaf676eefc2663f2af9aa42575661d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/llm_sft.py @@ -0,0 +1,300 @@ +# ### Setting up experimental environment. +""" +conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia -y +pip install sentencepiece charset_normalizer cpm_kernels tiktoken -U +pip install transformers datasets scikit-learn -U +pip install matplotlib tqdm tensorboard torchmetrics -U +pip install accelerate transformers_stream_generator -U + +# Install the latest version of modelscope from source +git clone https://github.com/modelscope/modelscope.git +cd modelscope +pip install -r requirements.txt +pip install . +""" +import os +# os.environ['CUDA_VISIBLE_DEVICES'] = '0,1' +import warnings +from dataclasses import dataclass, field +from functools import partial +from typing import List, Optional + +import torch +from swift import LoRAConfig, Swift +from torch import Tensor +from utils import (DATASET_MAPPING, DEFAULT_PROMPT, MODEL_MAPPING, + data_collate_fn, get_dataset, get_model_tokenizer, + get_T_max, get_work_dir, parse_args, plot_images, + print_example, print_model_info, process_dataset, + seed_everything, show_freeze_layers, stat_dataset, + tokenize_function) + +from modelscope import get_logger +from modelscope.trainers import EpochBasedTrainer +from modelscope.utils.config import Config + +warnings.warn( + 'This directory has been migrated to ' + 'https://github.com/modelscope/swift/tree/main/examples/pytorch/llm, ' + 'and the files in this directory are no longer maintained.', + DeprecationWarning) +logger = get_logger() + + +@dataclass +class SftArguments: + seed: int = 42 + model_type: str = field( + default='qwen-7b', metadata={'choices': list(MODEL_MAPPING.keys())}) + # baichuan-7b: 'lora': 16G; 'full': 80G + sft_type: str = field( + default='lora', metadata={'choices': ['lora', 'full']}) + output_dir: Optional[str] = None + ignore_args_error: bool = False # True: notebook compatibility + + dataset: str = field( + default='alpaca-en,alpaca-zh', + metadata={'help': f'dataset choices: {list(DATASET_MAPPING.keys())}'}) + dataset_seed: int = 42 + dataset_sample: int = 20000 # -1: all dataset + dataset_test_size: float = 0.01 + prompt: str = DEFAULT_PROMPT + max_length: Optional[int] = 2048 + + lora_target_modules: Optional[List[str]] = None + lora_rank: int = 8 + lora_alpha: int = 32 + lora_dropout_p: float = 0.1 + + gradient_checkpoint: bool = True + batch_size: int = 1 + max_epochs: int = 1 + learning_rate: Optional[float] = None + weight_decay: float = 0.01 + n_accumulate_grad: int = 16 + grad_clip_norm: float = 1. + warmup_iters: int = 200 + + save_trainer_state: Optional[bool] = None + eval_interval: int = 500 + last_save_interval: Optional[int] = None + last_max_checkpoint_num: int = 1 + best_max_checkpoint_num: int = 1 + logging_interval: int = 5 + tb_interval: int = 5 + + # other + use_flash_attn: Optional[bool] = field( + default=None, + metadata={ + 'help': "This parameter is used only when model_type='qwen-7b'" + }) + + def __post_init__(self): + if self.sft_type == 'lora': + if self.learning_rate is None: + self.learning_rate = 1e-4 + if self.save_trainer_state is None: + self.save_trainer_state = True + if self.last_save_interval is None: + self.last_save_interval = self.eval_interval + elif self.sft_type == 'full': + if self.learning_rate is None: + self.learning_rate = 1e-5 + if self.save_trainer_state is None: + self.save_trainer_state = False # save disk space + if self.last_save_interval is None: + # Saving the model takes a long time + self.last_save_interval = self.eval_interval * 4 + else: + raise ValueError(f'sft_type: {self.sft_type}') + + if self.output_dir is None: + self.output_dir = 'runs' + self.output_dir = os.path.join(self.output_dir, self.model_type) + + if self.lora_target_modules is None: + self.lora_target_modules = MODEL_MAPPING[ + self.model_type]['lora_TM'] + if self.use_flash_attn is None: + self.use_flash_attn = 'auto' + + +def llm_sft(args: SftArguments) -> None: + seed_everything(args.seed) + + # ### Loading Model and Tokenizer + support_bf16 = torch.cuda.is_bf16_supported() + if not support_bf16: + logger.warning(f'support_bf16: {support_bf16}') + + kwargs = {'low_cpu_mem_usage': True, 'device_map': 'auto'} + if args.model_type == 'qwen-7b': + kwargs['use_flash_attn'] = args.use_flash_attn + model, tokenizer, model_dir = get_model_tokenizer( + args.model_type, torch_dtype=torch.bfloat16, **kwargs) + + if args.gradient_checkpoint: + model.gradient_checkpointing_enable() + model.enable_input_require_grads() + + # ### Preparing lora + if args.sft_type == 'lora': + lora_config = LoRAConfig( + target_modules=args.lora_target_modules, + r=args.lora_rank, + lora_alpha=args.lora_alpha, + lora_dropout=args.lora_dropout_p) + logger.info(f'lora_config: {lora_config}') + model = Swift.prepare_model(model, lora_config) + + show_freeze_layers(model) + print_model_info(model) + # check the device and dtype of the model + _p: Tensor = list(model.parameters())[-1] + logger.info(f'device: {_p.device}, dtype: {_p.dtype}') + + # ### Loading Dataset + dataset = get_dataset(args.dataset.split(',')) + train_dataset, val_dataset = process_dataset(dataset, + args.dataset_test_size, + args.dataset_sample, + args.dataset_seed) + tokenize_func = partial( + tokenize_function, + tokenizer=tokenizer, + prompt=args.prompt, + max_length=args.max_length) + train_dataset = train_dataset.map(tokenize_func) + val_dataset = val_dataset.map(tokenize_func) + del dataset + # Data analysis + stat_dataset(train_dataset) + stat_dataset(val_dataset) + data_collator = partial(data_collate_fn, tokenizer=tokenizer) + print_example(train_dataset[0], tokenizer) + + # ### Setting Config + cfg_file = os.path.join(model_dir, 'configuration.json') + + T_max = get_T_max( + len(train_dataset), args.batch_size, args.max_epochs, True) + work_dir = get_work_dir(args.output_dir) + config = Config({ + 'train': { + 'dataloader': { + 'batch_size_per_gpu': args.batch_size, + 'workers_per_gpu': 1, + 'shuffle': True, + 'drop_last': True, + 'pin_memory': True + }, + 'max_epochs': + args.max_epochs, + 'work_dir': + work_dir, + 'optimizer': { + 'type': 'AdamW', + 'lr': args.learning_rate, + 'weight_decay': args.weight_decay, + 'options': { + 'cumulative_iters': args.n_accumulate_grad, + 'grad_clip': { + 'norm_type': 2, + 'max_norm': args.grad_clip_norm + } + } + }, + 'lr_scheduler': { + 'type': 'CosineAnnealingLR', + 'T_max': T_max, + 'eta_min': args.learning_rate * 0.1, + 'options': { + 'by_epoch': False, + 'warmup': { + 'type': 'LinearWarmup', + 'warmup_ratio': 0.1, + 'warmup_iters': args.warmup_iters + } + } + }, + 'hooks': [ + { + 'type': 'CheckpointHook', + 'by_epoch': False, + 'interval': args.last_save_interval, + 'max_checkpoint_num': args.last_max_checkpoint_num, + 'save_trainer_state': args.save_trainer_state + }, + { + 'type': 'EvaluationHook', + 'by_epoch': False, + 'interval': args.eval_interval + }, + { + 'type': 'BestCkptSaverHook', + 'metric_key': 'loss', + 'save_best': True, + 'rule': 'min', + 'max_checkpoint_num': args.best_max_checkpoint_num, + 'save_trainer_state': args.save_trainer_state + }, + { + 'type': 'TextLoggerHook', + 'by_epoch': True, # Whether EpochBasedTrainer is used + 'interval': args.logging_interval + }, + { + 'type': 'TensorboardHook', + 'by_epoch': False, + 'interval': args.tb_interval + } + ] + }, + 'evaluation': { + 'dataloader': { + 'batch_size_per_gpu': args.batch_size, + 'workers_per_gpu': 1, + 'shuffle': False, + 'drop_last': False, + 'pin_memory': True + }, + 'metrics': [{ + 'type': 'my_metric', + 'vocab_size': tokenizer.vocab_size + }] + } + }) + + # ### Finetuning + + def cfg_modify_fn(cfg: Config) -> Config: + cfg.update(config) + return cfg + + trainer = EpochBasedTrainer( + model=model, + cfg_file=cfg_file, + data_collator=data_collator, + train_dataset=train_dataset, + eval_dataset=val_dataset, + remove_unused_data=True, + seed=42, + cfg_modify_fn=cfg_modify_fn, + ) + + trainer.train() + + # ### Visualization + tb_dir = os.path.join(work_dir, 'tensorboard_output') + plot_images(tb_dir, ['loss'], 0.9) + + +if __name__ == '__main__': + args, remaining_argv = parse_args(SftArguments) + if len(remaining_argv) > 0: + if args.ignore_args_error: + logger.warning(f'remaining_argv: {remaining_argv}') + else: + raise ValueError(f'remaining_argv: {remaining_argv}') + llm_sft(args) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/run_infer.sh b/AI/modelscope/1.10.0/8/examples/pytorch/llm/run_infer.sh new file mode 100644 index 0000000000000000000000000000000000000000..9f1a7f9e8693d8a25d93b72d36216db2ffe0fad0 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/run_infer.sh @@ -0,0 +1,5 @@ +CUDA_VISIBLE_DEVICES=0,1 \ +python llm_infer.py \ + --model_type polylm-13b \ + --ckpt_path "runs/polylm-13b/v0-20230802-172425/output_best/pytorch_model.bin" \ + --eval_human true diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/run_sft.sh b/AI/modelscope/1.10.0/8/examples/pytorch/llm/run_sft.sh new file mode 100644 index 0000000000000000000000000000000000000000..254d5423a60f719b473352a499fc211c0878d6a6 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/run_sft.sh @@ -0,0 +1,6 @@ +CUDA_VISIBLE_DEVICES=0,1 \ +python llm_sft.py \ + --model_type polylm-13b \ + --output_dir runs \ + --dataset alpaca-en,alpaca-zh,alpaca-multi \ + --dataset_sample 20000 diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/__init__.py b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a7f7d2c0e93ac8776e279b0a26912453c290cb7 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/__init__.py @@ -0,0 +1,7 @@ +from .dataset import DATASET_MAPPING, get_dataset, process_dataset +from .models import MODEL_MAPPING, get_model_tokenizer +from .utils import (DEFAULT_PROMPT, MyMetric, data_collate_fn, get_T_max, + get_work_dir, inference, parse_args, plot_images, + print_example, print_model_info, read_tensorboard_file, + seed_everything, show_freeze_layers, stat_dataset, + tensorboard_smoothing, tokenize_function) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/dataset.py b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..3764455b2a81728a67fd57919c86a43753dd4034 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/dataset.py @@ -0,0 +1,146 @@ +from functools import partial +from typing import Callable, List, Optional, Tuple + +import numpy as np +from datasets import Dataset as HfDataset +from datasets import concatenate_datasets +from swift.utils import get_seed + +from modelscope import MsDataset + + +def _processing_alpaca( + dataset: HfDataset, + preprocess_input: Optional[Callable[[str], str]] = None) -> HfDataset: + instruction = dataset['instruction'] + input_ = dataset['input'] + new_instruction = [] + for inst, inp in zip(instruction, input_): + if inp is None: + inp = '' + if preprocess_input is not None: + inp = preprocess_input(inp) + inst = f'{inst}\n{inp}' + new_instruction.append(inst) + dataset = HfDataset.from_dict({ + 'instruction': new_instruction, + 'output': dataset['output'] + }) + return dataset + + +def get_alpaca_gpt4_en_dataset() -> HfDataset: + dataset: HfDataset = MsDataset.load( + 'AI-ModelScope/alpaca-gpt4-data-en', split='train').to_hf_dataset() + return _processing_alpaca(dataset) + + +def get_alpaca_gpt4_zh_dataset() -> HfDataset: + dataset: HfDataset = MsDataset.load( + 'AI-ModelScope/alpaca-gpt4-data-zh', split='train').to_hf_dataset() + + def _preprocess_input(inp: str) -> str: + if inp.startswith('输入:'): + inp = inp[3:] + return inp + + return _processing_alpaca(dataset, _preprocess_input) + + +def get_finance_en_dataset() -> HfDataset: + dataset: HfDataset = MsDataset.load( + 'wyj123456/finance_en', split='train').to_hf_dataset() + return _processing_alpaca(dataset) + + +_multi_alpaca_language_list = [ + 'ar', 'de', 'es', 'fr', 'id', 'ja', 'ko', 'pt', 'ru', 'th', 'vi' +] + + +def get_multi_alpaca(subset_name: str) -> HfDataset: + """ + subset_name: + Language-key Language # examples + ar Arabic 14,671 + de German 9,515 + es Spanish 9,958 + fr France 11,332 + id Indonesian 12,117 + ja Japanese 10,191 + ko Korean 14,402 + pt Portuguese 10,825 + ru Russian 14,286 + th Thai 11,496 + vi Vietnamese 13,908 + """ + dataset: HfDataset = MsDataset.load( + 'damo/nlp_polylm_multialpaca_sft', + subset_name=subset_name, + split='train').to_hf_dataset() + return _processing_alpaca(dataset) + + +def get_multi_alpaca_all() -> HfDataset: + dataset_list = [] + for subset_name in _multi_alpaca_language_list: + dataset = get_multi_alpaca(subset_name) + dataset_list.append(dataset) + dataset = concatenate_datasets(dataset_list) + return dataset + + +def get_code_alpaca_en_dataset() -> HfDataset: + dataset: HfDataset = MsDataset.load( + 'wyj123456/code_alpaca_en', split='train').to_hf_dataset() + return _processing_alpaca(dataset) + + +def get_instinwild_zh_dataset(): + dataset: HfDataset = MsDataset.load( + 'wyj123456/instinwild', subset_name='default', + split='train').to_hf_dataset() + return _processing_alpaca(dataset) + + +def get_instinwild_en_dataset(): + dataset: HfDataset = MsDataset.load( + 'wyj123456/instinwild', subset_name='subset', + split='train').to_hf_dataset() + return _processing_alpaca(dataset) + + +DATASET_MAPPING = { + 'alpaca-en': get_alpaca_gpt4_en_dataset, + 'alpaca-zh': get_alpaca_gpt4_zh_dataset, + 'finance-en': get_finance_en_dataset, + 'multi-alpaca-all': get_multi_alpaca_all, + **{ + f'multi-alpaca-{k}': partial(get_multi_alpaca, k) + for k in _multi_alpaca_language_list + }, + 'code-en': get_code_alpaca_en_dataset, + 'instinwild-zh': get_instinwild_zh_dataset, + 'instinwild-en': get_instinwild_en_dataset, +} + + +def get_dataset(dataset_name_list: List[str]) -> HfDataset: + dataset_list = [] + for dataset_name in dataset_name_list: + get_function = DATASET_MAPPING[dataset_name] + dataset_list.append(get_function()) + dataset = concatenate_datasets(dataset_list) + return dataset + + +def process_dataset(dataset: HfDataset, dataset_test_size: float, + dataset_sample: int, + dataset_seed: int) -> Tuple[HfDataset, HfDataset]: + random_state = np.random.RandomState(dataset_seed) + if dataset_sample >= 0: + index = random_state.permutation(len(dataset))[:dataset_sample] + dataset = dataset.select(index) + dataset = dataset.train_test_split( + dataset_test_size, seed=get_seed(random_state)) + return dataset['train'], dataset['test'] diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/models.py b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/models.py new file mode 100644 index 0000000000000000000000000000000000000000..4db542763c9a9d3c38a345a0e32ae66e7e7b3556 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/models.py @@ -0,0 +1,232 @@ +import os +from types import MethodType +from typing import Any, Dict, NamedTuple, Optional + +import torch +from swift import get_logger +from torch import dtype as Dtype + +from modelscope import (AutoConfig, AutoModelForCausalLM, AutoTokenizer, Model, + read_config, snapshot_download) +from modelscope.models.nlp.chatglm2 import ChatGLM2Config, ChatGLM2Tokenizer +from modelscope.models.nlp.llama2 import Llama2Config, Llama2Tokenizer + +logger = get_logger() + + +def _add_special_token(tokenizer, special_token_mapper: Dict[str, + Any]) -> None: + for k, v in special_token_mapper.items(): + setattr(tokenizer, k, v) + assert tokenizer.eos_token is not None + if tokenizer.pad_token is None: + tokenizer.pad_token = tokenizer.eos_token + + +def get_model_tokenizer_from_repo(model_dir: str, + torch_dtype: Dtype, + load_model: bool = True, + model_config=None, + **model_kwargs): + """load from an independent repository""" + if model_config is None: + model_config = AutoConfig.from_pretrained( + model_dir, trust_remote_code=True) + model_config.torch_dtype = torch_dtype + logger.info(f'model_config: {model_config}') + tokenizer = AutoTokenizer.from_pretrained( + model_dir, trust_remote_code=True) + model = None + if load_model: + model = AutoModelForCausalLM.from_pretrained( + model_dir, + config=model_config, + torch_dtype=torch_dtype, + trust_remote_code=True, + **model_kwargs) + return model, tokenizer + + +def get_model_tokenizer_from_sdk(config_class: type, + tokenizer_class: type, + model_dir: str, + torch_dtype: Dtype, + load_model: bool = True, + model_config=None, + **model_kwargs): + """load from ms library""" + config = read_config(model_dir) + logger.info(config) + if model_config is None: + model_config = config_class.from_pretrained(model_dir) + model_config.torch_dtype = torch_dtype + logger.info(model_config) + tokenizer = tokenizer_class.from_pretrained(model_dir) + model = None + if load_model: + model = Model.from_pretrained( + model_dir, + cfg_dict=config, + config=model_config, + torch_dtype=torch_dtype, + **model_kwargs) + return model, tokenizer + + +def get_model_tokenizer_baichuan13b(model_dir: str, + torch_dtype: Dtype, + load_model: bool = True, + **model_kwargs): + # baichuan-13b does not implement the `get_input_embeddings` function + model, tokenizer = get_model_tokenizer_from_repo(model_dir, torch_dtype, + load_model, + **model_kwargs) + model.get_input_embeddings = MethodType( + lambda self: self.model.embed_tokens, model) + return model, tokenizer + + +def get_model_tokenizer_chatglm2(model_dir: str, + torch_dtype: Dtype, + load_model: bool = True, + **model_kwargs): + if 'quantization_config' in model_kwargs: + model_kwargs['quantization_config'].llm_int8_skip_modules = [ + 'output_layer' + ] + return get_model_tokenizer_from_sdk(ChatGLM2Config, ChatGLM2Tokenizer, + model_dir, torch_dtype, load_model, + **model_kwargs) + + +def get_model_tokenizer_llama2(model_dir: str, + torch_dtype: Dtype, + load_model: bool = True, + **model_kwargs): + model_config = AutoConfig.from_pretrained( + model_dir, trust_remote_code=True) + model_config.pretraining_tp = 1 + return get_model_tokenizer_from_sdk(Llama2Config, Llama2Tokenizer, + model_dir, torch_dtype, load_model, + model_config, **model_kwargs) + + +def get_model_tokenizer_qwen(model_dir: str, + torch_dtype: Dtype, + load_model: bool = True, + **kwargs): + model_config = AutoConfig.from_pretrained( + model_dir, trust_remote_code=True) + mapper = { + torch.float16: 'fp16', + torch.bfloat16: 'bf16', + torch.float32: 'fp32' + } + k_true = mapper[torch_dtype] + for k in mapper.values(): + v = False + if k == k_true: + v = True + setattr(model_config, k, v) + + use_flash_attn = kwargs.pop('use_flash_attn', 'auto') + model_config.use_flash_attn = use_flash_attn + return get_model_tokenizer_from_repo(model_dir, torch_dtype, load_model, + model_config, **kwargs) + + +class LoRATM(NamedTuple): + # default lora target modules + baichuan = ['W_pack'] + chatglm2 = ['query_key_value'] + llama2 = ['q_proj', 'k_proj', 'v_proj'] + qwen = ['c_attn'] + polylm = ['c_attn'] + + +# Reference: 'https://modelscope.cn/models/{model_id}/summary' +# keys: 'model_id', 'revision', 'get_function', +# 'ignore_file_pattern', 'special_token_mapper', 'lora_TM' +MODEL_MAPPING = { + 'baichuan-7b': { + 'model_id': 'baichuan-inc/baichuan-7B', # model id or model dir + 'revision': 'v1.0.7', + 'lora_TM': LoRATM.baichuan + }, + 'baichuan-13b': { + 'model_id': 'baichuan-inc/Baichuan-13B-Base', + 'revision': 'v1.0.3', + 'get_function': get_model_tokenizer_baichuan13b, + 'lora_TM': LoRATM.baichuan + }, + 'chatglm2-6b': { + 'model_id': 'ZhipuAI/chatglm2-6b', + 'revision': 'v1.0.7', + 'get_function': get_model_tokenizer_chatglm2, + 'lora_TM': LoRATM.chatglm2 + }, + 'llama2-7b': { + 'model_id': 'modelscope/Llama-2-7b-ms', + 'revision': 'v1.0.2', + 'get_function': get_model_tokenizer_llama2, + 'ignore_file_pattern': [r'.+\.bin$'], # use safetensors + 'lora_TM': LoRATM.llama2 + }, + 'llama2-13b': { + 'model_id': 'modelscope/Llama-2-13b-ms', + 'revision': 'v1.0.2', + 'get_function': get_model_tokenizer_llama2, + 'ignore_file_pattern': [r'.+\.bin$'], + 'lora_TM': LoRATM.llama2 + }, + 'llama2-70b': { + 'model_id': 'modelscope/Llama-2-70b-ms', + 'revision': 'v1.0.0', + 'get_function': get_model_tokenizer_llama2, + 'ignore_file_pattern': [r'.+\.bin$'], + 'lora_TM': LoRATM.llama2 + }, + 'openbuddy-llama2-13b': { + 'model_id': 'OpenBuddy/openbuddy-llama2-13b-v8.1-fp16', + 'revision': 'v1.0.0', + 'lora_TM': LoRATM.llama2, + }, + 'qwen-7b': { + 'model_id': 'qwen/Qwen-7B', + 'revision': 'v.1.0.4', + 'get_function': get_model_tokenizer_qwen, + 'lora_TM': LoRATM.qwen, + 'special_token_mapper': { + 'eos_token': '<|endoftext|>' + } + } +} + + +def get_model_tokenizer(model_type: str, + torch_dtype: Optional[Dtype] = None, + load_model: bool = True, + **kwargs): + data = MODEL_MAPPING.get(model_type) + if data is None: + raise ValueError(f'model_type: {model_type}') + + model_id = data['model_id'] + get_function = data.get('get_function', get_model_tokenizer_from_repo) + ignore_file_pattern = data.get('ignore_file_pattern', []) + special_token_mapper = data.get('special_token_mapper', {}) + if torch_dtype is None: + torch_dtype = data.get('torch_dtype', torch.float16) + + model_dir = kwargs.pop('model_dir', None) + if model_dir is None: + model_dir = model_id + if not os.path.exists(model_id): + revision = data.get('revision', 'master') + model_dir = snapshot_download( + model_id, revision, ignore_file_pattern=ignore_file_pattern) + + model, tokenizer = get_function(model_dir, torch_dtype, load_model, + **kwargs) + _add_special_token(tokenizer, special_token_mapper) + return model, tokenizer, model_dir diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/utils.py b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b5325532f356d58f1e3692530ae57641831ff27b --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm/utils/utils.py @@ -0,0 +1,323 @@ +import datetime as dt +import math +import os +import random +import re +from typing import Any, Counter, Dict, List, Optional, Tuple, Type, TypeVar + +import matplotlib.pyplot as plt +import numpy as np +import torch +from datasets import Dataset as HfDataset +from tensorboard.backend.event_processing.event_accumulator import \ + EventAccumulator +from torch import Tensor +from torch.nn import Module +from torch.nn.utils.rnn import pad_sequence +from torchmetrics import Accuracy, MeanMetric +from tqdm import tqdm +from transformers import GenerationConfig, HfArgumentParser, TextStreamer + +from modelscope import get_logger +from modelscope.metrics.base import Metric +from modelscope.metrics.builder import METRICS +from modelscope.utils.registry import default_group + +COLOR, COLOR_S = '#FFE2D9', '#FF7043' + +DEFAULT_PROMPT = """Here's a conversation between a human and an AI assistant. \ +The AI assistant provides detailed, friendly answers for the human. + +### Human: +{instruction} + +### AI: +""" + +logger = get_logger() +os.environ['TOKENIZERS_PARALLELISM'] = 'true' + + +def _get_version(work_dir: str) -> int: + if os.path.isdir(work_dir): + fnames = os.listdir(work_dir) + else: + fnames = [] + v_list = [-1] + for fname in fnames: + m = re.match(r'v(\d+)', fname) + if m is None: + continue + v = m.group(1) + v_list.append(int(v)) + return max(v_list) + 1 + + +def get_work_dir(work_dir: str) -> str: + """add version""" + work_dir = os.path.abspath(work_dir) + version = _get_version(work_dir) + time = dt.datetime.now().strftime('%Y%m%d-%H%M%S') + + work_dir = os.path.join(work_dir, f'v{version}-{time}') + logger.info(f'work_dir: {work_dir}') + return work_dir + + +def seed_everything(seed: Optional[int] = None, gpu_dtm: bool = False) -> int: + if seed is None: + seed_max = np.iinfo(np.int32).max + seed = random.randint(0, seed_max) + + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + logger.info(f'Global seed set to {seed}') + if gpu_dtm: + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + logger.info(f'Setting deterministic: {True}, benchmark: {False}') + return seed + + +def get_T_max(dataset_len: int, batch_size: int, max_epochs: int, + drop_last: bool) -> int: + """Calculate T_max in CosineAnnealingLR""" + if drop_last: + T_max = dataset_len // batch_size + else: + T_max = math.ceil(dataset_len / batch_size) + T_max *= max_epochs + return T_max + + +def tokenize_function(example: Dict[str, Optional[str]], + tokenizer, + prompt: str = DEFAULT_PROMPT, + max_length: Optional[int] = 2048) -> Dict[str, Any]: + instruction: str = example['instruction'] + output = example.get('output') + src_text = prompt.format(instruction=instruction) + src_input_ids: List[int] = tokenizer( + src_text, return_attention_mask=False, + add_special_tokens=True)['input_ids'] + + tgt_input_ids = [] + if output is not None: + tgt_input_ids += tokenizer( + output, return_attention_mask=False, + add_special_tokens=False)['input_ids'] + tgt_input_ids += [tokenizer.eos_token_id] + labels = [-100] * len(src_input_ids) + tgt_input_ids + else: + labels = None + input_ids = src_input_ids + tgt_input_ids + + if max_length is not None: + input_ids = input_ids[-max_length:] + if labels is not None: + labels = labels[-max_length:] + + return {'input_ids': input_ids, 'labels': labels} + + +def stat_dataset(dataset: HfDataset) -> None: + """Statistical analysis was performed on the dataset""" + _token_len = [] + for d in dataset: + _token_len.append(len(d['input_ids'])) + _token_len = np.array(_token_len) + mean = _token_len.mean().item() + std = _token_len.std().item() + min_ = _token_len.min().item() + max_ = _token_len.max().item() + logger.info( + f'Dataset Token Length: {mean:.6f}±{std:.6f}, min={min_:.6f}, max={max_:.6f}, size={_token_len.shape[0]}' + ) + + +def print_example(example: Dict[str, Any], tokenizer) -> None: + input_ids, labels = example['input_ids'], example['labels'] + print(f'[INPUT_IDS] {input_ids}') + print(f'[INPUT] {tokenizer.decode(input_ids)}') + print() + n_mask = Counter(labels)[-100] + print(f'[LABLES_IDS] {labels}') + print(f'[LABLES] <-100 * {n_mask}>{tokenizer.decode(labels[n_mask:])}') + + +def data_collate_fn(batch: List[Dict[str, Any]], tokenizer) -> Dict[str, Any]: + input_ids = [torch.tensor(b['input_ids']) for b in batch] + labels = [torch.tensor(b['labels']) for b in batch] + attention_mask = [ + torch.ones(len(input_ids[i]), dtype=torch.int64) + for i in range(len(input_ids)) + ] + + input_ids = pad_sequence( + input_ids, batch_first=True, padding_value=tokenizer.pad_token_id) + attention_mask = pad_sequence( + attention_mask, batch_first=True, padding_value=0) + labels = pad_sequence(labels, batch_first=True, padding_value=-100) + return { + 'input_ids': input_ids, + 'attention_mask': attention_mask, + 'labels': labels + } + + +def print_model_info(model: Module, name: Optional[str] = None) -> None: + if name is None: + name = model.__class__.__name__ + + n_params = sum(p.numel() for p in model.parameters()) + n_grads = sum(p.numel() for p in model.parameters() if p.requires_grad) + n_buffers = sum(p.numel() for p in model.buffers()) + + n_params /= 1e6 + n_grads /= 1e6 + n_buffers /= 1e6 + s = [ + f'{name}: ', + f'{n_params:.4f}M Params ({n_grads:.4f}M Trainable), ', + f'{n_buffers:.4f}M Buffers', + ] + s += '.' + logger.info(''.join(s)) + + +def show_freeze_layers(model: Module, max_lines: Optional[int] = 20) -> None: + named_p = list(model.named_parameters()) + for i, (n, p) in enumerate(named_p): + if max_lines is not None and i >= max_lines: + logger.info('...') + break + logger.info(f'{n}: requires_grad={p.requires_grad}') + + +@METRICS.register_module(group_key=default_group, module_name='my_metric') +class MyMetric(Metric): + + def __init__(self, vocab_size: int): + self.acc = Accuracy('multiclass', num_classes=vocab_size) + self.loss = MeanMetric() + + def add(self, outputs: Dict[str, Any], inputs: Dict[str, Any]) -> None: + loss: Tensor = outputs.loss + self.loss.update(loss.cpu()) + + labels: Tensor = inputs['labels'] + labels = labels[:, 1:] + labels_mask = labels != -100 + logits: Tensor = outputs.logits[:, :-1] + logits = logits[labels_mask].contiguous().view(-1, logits.shape[-1]) + pred = logits.argmax(dim=-1) + labels = labels[labels_mask].to(logits.device) + self.acc.update(pred.cpu(), labels.cpu()) + + def evaluate(self): + return { + 'acc': self.acc.compute().item(), + 'loss': self.loss.compute().item() + } + + def merge(self, other: 'MyMetric') -> None: + """This script does not support ddp. TODO""" + raise NotImplementedError + + +Item = Dict[str, float] + + +def read_tensorboard_file(fpath: str) -> Dict[str, List[Item]]: + if not os.path.isfile(fpath): + raise FileNotFoundError(f'fpath: {fpath}') + ea = EventAccumulator(fpath) + ea.Reload() + res = {} + tags = ea.Tags()['scalars'] + for tag in tags: + values = ea.Scalars(tag) + r = [] + for v in values: + r.append({'step': v.step, 'value': v.value}) + res[tag] = r + return res + + +def tensorboard_smoothing(values: List[float], + smooth: float = 0.9) -> List[float]: + norm_factor = 1 + x = 0 + res = [] + for i in range(len(values)): + x = x * smooth + values[i] # Exponential decay + res.append(x / norm_factor) + + norm_factor *= smooth + norm_factor += 1 + return res + + +def plot_images(tb_dir: str, + smooth_key: List[str], + smooth_val: float = 0.9, + figsize: Tuple[int, int] = (8, 5), + dpi: int = 100) -> None: + images_dir = os.path.join(os.path.dirname(tb_dir), 'images') + os.makedirs(images_dir, exist_ok=True) + + fname = os.listdir(tb_dir)[0] + tb_path = os.path.join(tb_dir, fname) + data = read_tensorboard_file(tb_path) + + for k in data.keys(): + _data = data[k] + steps = [d['step'] for d in _data] + values = [d['value'] for d in _data] + if len(values) == 0: + continue + _, ax = plt.subplots(1, 1, squeeze=True, figsize=figsize, dpi=dpi) + ax.set_title(k) + if len(values) == 1: + ax.scatter(steps, values, color=COLOR_S) + elif k in smooth_key: + ax.plot(steps, values, color=COLOR) + values_s = tensorboard_smoothing(values, smooth_val) + ax.plot(steps, values_s, color=COLOR_S) + else: + ax.plot(steps, values, color=COLOR_S) + fpath = os.path.join(images_dir, k.replace('/', '_')) + plt.savefig(fpath, dpi=dpi, bbox_inches='tight') + + +def inference(input_ids: List[int], + model, + tokenizer, + streamer: Optional[TextStreamer] = None, + generation_config: Optional[GenerationConfig] = None, + tag: str = '[INFERENCE]') -> str: + print(f'{tag}{tokenizer.decode(input_ids)}', end='') + input_ids = torch.tensor(input_ids)[None].cuda() + attention_mask = torch.ones_like(input_ids) + model.eval() + generate_ids = model.generate( + input_ids=input_ids, + attention_mask=attention_mask, + streamer=streamer, + generation_config=generation_config) + output_text = tokenizer.decode(generate_ids[0]) + return output_text + + +_T = TypeVar('_T') + + +def parse_args(class_type: Type[_T], + argv: Optional[List[str]] = None) -> Tuple[_T, List[str]]: + parser = HfArgumentParser([class_type]) + args, remaining_args = parser.parse_args_into_dataclasses( + argv, return_remaining_strings=True) + logger.info(f'args: {args}') + return args, remaining_args diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/_common.py b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/_common.py new file mode 100644 index 0000000000000000000000000000000000000000..384e81064d190327c0f3efff350ffccea8c61c25 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/_common.py @@ -0,0 +1,412 @@ +import ast +import datetime as dt +import math +import os +import random +import re +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +import matplotlib.pyplot as plt +import numpy as np +# +import torch +from matplotlib.figure import Figure +from swift import LoRAConfig, Swift +from tensorboard.backend.event_processing.event_accumulator import \ + EventAccumulator +from torch import Tensor +from torch import device as Device +from torch.nn import Module +from torch.nn.utils.rnn import pad_sequence +# +from torchmetrics import Accuracy, MeanMetric +# +from tqdm import tqdm + +# +from modelscope import Model, MsDataset, get_logger, read_config +from modelscope.metrics.base import Metric +from modelscope.metrics.builder import METRICS +from modelscope.models.nlp.chatglm2 import ChatGLM2Tokenizer +from modelscope.msdatasets.dataset_cls.custom_datasets import \ + TorchCustomDataset +from modelscope.utils.config import ConfigDict +from modelscope.utils.registry import default_group + +# +PROMPT = """System: {system} +Human: {user} +AI: """ +MAX_LENGTH = 2048 +TEST_MAX_LENGTH = MAX_LENGTH + +COLOR, COLOR_S = '#FFE2D9', '#FF7043' +logger = get_logger() +# + + +def _get_version(work_dir: str) -> int: + if os.path.isdir(work_dir): + fnames = os.listdir(work_dir) + else: + fnames = [] + v_list = [-1] + for fname in fnames: + m = re.match(r'v(\d+)', fname) + if m is None: + continue + v = m.group(1) + v_list.append(int(v)) + return max(v_list) + 1 + + +def get_work_dir(work_dir: str) -> str: + """add version""" + work_dir = os.path.abspath(work_dir) + version = _get_version(work_dir) + time = dt.datetime.now().strftime('%Y%m%d-%H%M%S') + # + work_dir = os.path.join(work_dir, f'v{version}-{time}') + logger.info(f'work_dir: {work_dir}') + return work_dir + + +def _format_device(device: Union[List[int], str]) -> Tuple[List[int], str]: + if isinstance(device, list): + device_ids = device + device_str = ','.join([str(d) for d in device]) + else: + device_ids = [int(d) for d in device.split(',') if d != '-1'] + device_str = device + device_str = device_str.replace(' ', '') + return device_ids, device_str + + +def select_device(device: Union[List[int], str]) -> Device: + """Call this function before cuda is initialized. + device: e.g. []: 'cpu', [0], [0, 1, 2] + e.g. '-1': 'cpu', '0', '0,1,2' + """ + if torch.cuda.is_initialized(): + logger.warning('CUDA has been initialized! Device selection fails!') + return torch.device('cuda:0') + # + device_ids, device_str = _format_device(device) + # + os.environ['CUDA_VISIBLE_DEVICES'] = device_str + log_s = 'Using device: ' + if len(device_ids) == 0: + master_device: str = 'cpu' + log_s += 'cpu' + else: + assert torch.cuda.is_available( + ) and torch.cuda.device_count() >= len(device_ids) + master_device = 'cuda:0' + log_s += f'cuda:{device_str}' + logger.info(log_s) + return torch.device(master_device) + + +def seed_everything(seed: Optional[int] = None, gpu_dtm: bool = False) -> int: + if seed is None: + seed_max = np.iinfo(np.int32).max + seed = random.randint(0, seed_max) + + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + logger.info(f'Global seed set to {seed}') + if gpu_dtm: + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + logger.info(f'Setting deterministic: {True}, benchmark: {False}') + return seed + + +def get_T_max(dataset_len: int, batch_size: int, max_epochs: int, + drop_last: bool) -> int: + """Calculate T_max in CosineAnnealingLR""" + if drop_last: + T_max = dataset_len // batch_size + else: + T_max = math.ceil(dataset_len / batch_size) + T_max *= max_epochs + return T_max + + +def tokenize_function(system: str, user: str, assistant: Optional[str], + tokenizer) -> Dict[str, Any]: + """Only applicable to baichuan and chatglm2. Other models need to be tested""" + src_text = PROMPT.format(system=system, user=user) + src_input_ids: List[int] = tokenizer( + src_text, return_attention_mask=False, + add_special_tokens=True)['input_ids'] + # + tgt_input_ids: List[int] = [] + if assistant is not None: + tgt_input_ids += tokenizer( + assistant, return_attention_mask=False, + add_special_tokens=False)['input_ids'] + tgt_input_ids += [tokenizer.eos_token_id] + labels = [-100] * len(src_input_ids) + tgt_input_ids + else: + labels = None + input_ids = src_input_ids + tgt_input_ids + # + if assistant is not None: + if len(input_ids) > MAX_LENGTH: + return {} + else: + input_ids = input_ids[-TEST_MAX_LENGTH:] + # + return {'input_ids': input_ids, 'labels': labels} + + +class MyDataset(TorchCustomDataset): + + def __init__(self, system: List[str], user: List[str], + assistant: List[str], tokenize_function) -> None: + self._data = [] + for i in tqdm(range(len(system))): + _d = tokenize_function(system[i], user[i], assistant[i]) + if len(_d) == 0: + continue + self._data.append(_d) + + def __getitem__(self, idx: int) -> Dict[str, Any]: + return self._data[idx] + + def __len__(self) -> int: + return len(self._data) + + +def stat_dataset(dataset: 'MyDataset') -> None: + """Statistical analysis was performed on the data set""" + _token_len = [] + for d in dataset: + _token_len.append(len(d['input_ids'])) + _token_len = np.array(_token_len) + mean = _token_len.mean().item() + std = _token_len.std().item() + min_ = _token_len.min().item() + max_ = _token_len.max().item() + logger.info( + f'Dataset Token Length: {mean:.6f}±{std:.6f}, min={min_:.6f}, max={max_:.6f}, size={_token_len.shape[0]}' + ) + + +def print_examples(examples: Dict[str, Any], tokenizer) -> None: + input_ids, labels = examples['input_ids'], examples['labels'] + print(f'[INPUT_IDS] {tokenizer.decode(input_ids)}') + print() + print( + f'[LABLES] {tokenizer.decode([lb if lb != -100 else 0 for lb in labels])}' + ) + + +def data_collate_fn(batch: List[Dict[str, Any]], tokenizer) -> Dict[str, Any]: + input_ids = [torch.tensor(b['input_ids']) for b in batch] + labels = [torch.tensor(b['labels']) for b in batch] + attention_mask = [ + torch.ones(len(input_ids[i]), dtype=torch.int64) + for i in range(len(input_ids)) + ] + # + input_ids = pad_sequence( + input_ids, batch_first=True, padding_value=tokenizer.pad_token_id) + attention_mask = pad_sequence( + attention_mask, batch_first=True, padding_value=0) + labels = pad_sequence(labels, batch_first=True, padding_value=-100) + return { + 'input_ids': input_ids, + 'attention_mask': attention_mask, + 'labels': labels + } + + +def print_model_info(model: Module, name: Optional[str] = None) -> None: + if name is None: + name = model.__class__.__name__ + # + n_params = sum(p.numel() for p in model.parameters()) + n_grads = sum(p.numel() for p in model.parameters() if p.requires_grad) + n_buffers = sum(p.numel() for p in model.buffers()) + # + n_params /= 1e6 + n_grads /= 1e6 + n_buffers /= 1e6 + s = [ + f'{name}: ', + f'{n_params:.4f}M Params ({n_grads:.4f}M Trainable), ', + f'{n_buffers:.4f}M Buffers', + ] + s += '.' + logger.info(''.join(s)) + + +def show_freeze_layers(model: Module, max_lines: int = 20) -> None: + named_p = list(model.named_parameters()) + for i, (n, p) in enumerate(named_p): + if i >= max_lines: + logger.info('...') + break + logger.info(f'{n}: requires_grad={p.requires_grad}') + + +@METRICS.register_module(group_key=default_group, module_name='my_metric') +class MyMetric(Metric): + + def __init__(self, vocab_size: int): + self.acc = Accuracy('multiclass', num_classes=vocab_size) + self.loss = MeanMetric() + + def add(self, outputs: Dict[str, Any], inputs: Dict[str, Any]) -> None: + loss: Tensor = outputs.loss + self.loss.update(loss) + # + labels: Tensor = inputs['labels'] + labels = labels[:, 1:] + labels_mask = labels != -100 + logits: Tensor = outputs.logits[:, :-1] + logits = logits[labels_mask].contiguous().view(-1, logits.shape[-1]) + pred = logits.argmax(dim=-1) + labels = labels[labels_mask].to(logits.device) + self.acc.update(pred, labels) + + def evaluate(self): + return { + 'acc': self.acc.compute().item(), + 'loss': self.loss.compute().item() + } + + def merge(self, other: 'MyMetric') -> None: + """This script does not support ddp""" + raise NotImplementedError + + +def _add_special_token(tokenizer): + if tokenizer.eos_token_id is None: + tokenizer.eos_token_id = 2 + if tokenizer.bos_token_id is None: + tokenizer.bos_token_id = 1 + if tokenizer.pad_token_id is None: + tokenizer.pad_token_id = 0 + logger.info(f'bos_token_id: {tokenizer.bos_token_id}, ' + f'eos_token_id: {tokenizer.eos_token_id}, ' + f'pad_token_id: {tokenizer.pad_token_id}') + + +def get_baichuan7B_model_tokenizer(model_dir: str, + load_model: bool = True, + add_special_token: bool = True): + sys.path.insert(0, model_dir) + from configuration_baichuan import BaiChuanConfig + from tokenization_baichuan import BaiChuanTokenizer + from modeling_baichuan import BaiChuanForCausalLM + model_config = BaiChuanConfig.from_pretrained(model_dir) + model_config.torch_dtype = torch.float16 + logger.info(f'model_config: {model_config}') + tokenizer = BaiChuanTokenizer.from_pretrained(model_dir) + model = None + if load_model: + model = BaiChuanForCausalLM.from_pretrained( + model_dir, + config=model_config, + device_map='auto', + torch_dtype=torch.float16) + # + if add_special_token: + _add_special_token(tokenizer) + return model, tokenizer + + +def get_chatglm2_model_tokenizer(model_dir: str, + load_model: bool = True, + add_special_token: bool = True): + config = read_config(model_dir) + config['model'] = ConfigDict({'type': 'chatglm2-6b'}) + tokenizer = ChatGLM2Tokenizer.from_pretrained(model_dir) + model = None + if load_model: + model = Model.from_pretrained( + model_dir, + cfg_dict=config, + device_map='auto', + torch_dtype=torch.float16) + if add_special_token: + _add_special_token(tokenizer) + return model, tokenizer + + +def make_dataset( + split: str, tokenize_function: Callable[[str, str, Optional[str]], + Dict[str, Any]] +) -> MyDataset: + """ + split: Literal['train', 'validation'] + """ + dataset = MsDataset.load( + 'modelscope/ms_hackathon_23_agent_train_dev', split=split) + system = [] + user = [] + assistant = [] + for d in dataset: + content = ast.literal_eval(d['conversations']) + s = content[0]['value'] + assert len(content) % 2 == 1 + for i in range(len(content) // 2): + system.append(s) + user.append(content[2 * i + 1]['value']) + assistant.append(content[2 * i + 2]['value']) + return MyDataset(system, user, assistant, tokenize_function) + + +Item = Dict[str, float] + + +def read_tensorboard_file(fpath: str) -> Dict[str, List[Item]]: + if not os.path.isfile(fpath): + raise FileNotFoundError(f'fpath: {fpath}') + ea = EventAccumulator(fpath) + ea.Reload() + res = {} + tags = ea.Tags()['scalars'] + for tag in tags: + values = ea.Scalars(tag) + r = [] + for v in values: + r.append({'step': v.step, 'value': v.value}) + res[tag] = r + return res + + +def tensorboard_smoothing(values: List[float], + smooth: float = 0.9) -> List[float]: + norm_factor = 1 + x = 0 + res = [] + for i in range(len(values)): + x = x * smooth + values[i] # Exponential decay + res.append(x / norm_factor) + # + norm_factor *= smooth + norm_factor += 1 + return res + + +def plot_image(data: Dict[str, List[Item]], key_name: str, + smooth: float) -> Figure: + _data = data[key_name] + steps = [d['step'] for d in _data] + values = [d['value'] for d in _data] + fig, ax = plt.subplots(1, 1, squeeze=True, figsize=(8, 5), dpi=100) + ax.set_title(key_name) + if smooth != 0: + ax.plot(steps, values, color=COLOR) + values_s = tensorboard_smoothing(values, smooth) + ax.plot(steps, values_s, color=COLOR_S) + else: + ax.plot(steps, values, color=COLOR_S) + return fig diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/baichuan_infer.ipynb b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/baichuan_infer.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..2ab8e64cc9f2ffdc9eb2d60a0ebc27679b2a1509 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/baichuan_infer.ipynb @@ -0,0 +1,482 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Baichuan 推理" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 配置实验环境" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[2023-07-02 22:28:00,199] [INFO] [real_accelerator.py:110:get_accelerator] Setting ds_accelerator to cuda (auto detect)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 22:28:00,675 - modelscope - INFO - PyTorch version 2.0.1 Found.\n", + "2023-07-02 22:28:00,676 - modelscope - INFO - Loading ast index from /home/hackathon/.cache/modelscope/ast_indexer\n", + "2023-07-02 22:28:00,700 - modelscope - INFO - Loading done! Current index file version is 1.6.2, with md5 ddf811ee982377c1357284a2bfda3dec and a total number of 861 components indexed\n", + "2023-07-02 22:28:01,367 - modelscope - INFO - [0, 1]\n", + "2023-07-02 22:28:01,512 - modelscope - INFO - Using device: cuda:0,1\n" + ] + }, + { + "data": { + "text/plain": [ + "device(type='cuda', index=0)" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from _common import *\n", + "from transformers import TextStreamer\n", + "device_ids = [0, 1]\n", + "select_device(device_ids)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Model, Tokenizer\n", + "Note: 你需要设置CKPT_FPATH的内容, 指向`.bin`文件, 或`.pth`文件" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 22:28:03,375 - modelscope - INFO - Model revision not specified, use default: master in development mode\n", + "2023-07-02 22:28:03,375 - modelscope - INFO - Development mode use revision: master\n", + "2023-07-02 22:28:03,695 - modelscope - INFO - model_config: BaiChuanConfig {\n", + " \"architectures\": [\n", + " \"BaiChuanForCausalLM\"\n", + " ],\n", + " \"auto_map\": {\n", + " \"AutoConfig\": \"configuration_baichuan.BaiChuanConfig\",\n", + " \"AutoModelForCausalLM\": \"modeling_baichuan.BaiChuanForCausalLM\"\n", + " },\n", + " \"bos_token_id\": 1,\n", + " \"eos_token_id\": 2,\n", + " \"hidden_act\": \"silu\",\n", + " \"hidden_size\": 4096,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 11008,\n", + " \"max_position_embeddings\": 4096,\n", + " \"model_type\": \"baichuan\",\n", + " \"num_attention_heads\": 32,\n", + " \"num_hidden_layers\": 32,\n", + " \"pad_token_id\": 0,\n", + " \"rms_norm_eps\": 1e-06,\n", + " \"tie_word_embeddings\": false,\n", + " \"torch_dtype\": \"float16\",\n", + " \"transformers_version\": \"4.30.2\",\n", + " \"use_cache\": true,\n", + " \"vocab_size\": 64000\n", + "}\n", + "\n", + "The model weights are not tied. Please use the `tie_weights` method before using the `infer_auto_device` function.\n" + ] + }, + { + "data": { + "text/plain": [ + "BaiChuanForCausalLM(\n", + " (model): Model(\n", + " (embed_tokens): Embedding(64000, 4096, padding_idx=0)\n", + " (layers): ModuleList(\n", + " (0-31): 32 x DecoderLayer(\n", + " (self_attn): Attention(\n", + " (W_pack): Linear(in_features=4096, out_features=12288, bias=False)\n", + " (o_proj): Linear(in_features=4096, out_features=4096, bias=False)\n", + " (rotary_emb): RotaryEmbedding()\n", + " )\n", + " (mlp): MLP(\n", + " (gate_proj): Linear(in_features=4096, out_features=11008, bias=False)\n", + " (down_proj): Linear(in_features=11008, out_features=4096, bias=False)\n", + " (up_proj): Linear(in_features=4096, out_features=11008, bias=False)\n", + " (act_fn): SiLUActivation()\n", + " )\n", + " (input_layernorm): RMSNorm()\n", + " (post_attention_layernorm): RMSNorm()\n", + " )\n", + " )\n", + " (norm): RMSNorm()\n", + " )\n", + " (lm_head): Linear(in_features=4096, out_features=64000, bias=False)\n", + ")" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "CKPT_FAPTH = '/home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/output_best/pytorch_model.bin'\n", + "LORA_TARGET_MODULES = ['W_pack']\n", + "\n", + "model_dir = snapshot_download('baichuan-inc/baichuan-7B', 'v1.0.5')\n", + "model, tokenizer = get_baichuan7B_model_tokenizer(model_dir)\n", + "model.bfloat16() # Consistent with training" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Lora" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 22:28:14,108 - modelscope - INFO - lora_config: LoRAConfig(rank=8, replace_modules=['W_pack'], lora_alpha=32, lora_dropout=0, merge_weights=True, use_merged_linear=False, enable_lora=None, fan_in_fan_out=False, bias='none', only_lora_trainable=True, pretrained_weights='/home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/output_best/pytorch_model.bin')\n" + ] + }, + { + "data": { + "text/plain": [ + "BaiChuanForCausalLM(\n", + " (model): Model(\n", + " (embed_tokens): Embedding(64000, 4096, padding_idx=0)\n", + " (layers): ModuleList(\n", + " (0-31): 32 x DecoderLayer(\n", + " (self_attn): Attention(\n", + " (W_pack): Linear(in_features=4096, out_features=12288, bias=False)\n", + " (o_proj): Linear(in_features=4096, out_features=4096, bias=False)\n", + " (rotary_emb): RotaryEmbedding()\n", + " )\n", + " (mlp): MLP(\n", + " (gate_proj): Linear(in_features=4096, out_features=11008, bias=False)\n", + " (down_proj): Linear(in_features=11008, out_features=4096, bias=False)\n", + " (up_proj): Linear(in_features=4096, out_features=11008, bias=False)\n", + " (act_fn): SiLUActivation()\n", + " )\n", + " (input_layernorm): RMSNorm()\n", + " (post_attention_layernorm): RMSNorm()\n", + " )\n", + " )\n", + " (norm): RMSNorm()\n", + " )\n", + " (lm_head): Linear(in_features=4096, out_features=64000, bias=False)\n", + ")" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "LORA_RANK = 8\n", + "LORA_ALPHA = 32\n", + "LORA_DROPOUT_P = 0 # Arbitrary value\n", + "lora_config = LoRAConfig(\n", + " target_modules=LORA_TARGET_MODULES,\n", + " r=LORA_RANK,\n", + " lora_alpha=LORA_ALPHA,\n", + " lora_dropout=LORA_DROPOUT_P,\n", + " pretrained_weights=CKPT_FAPTH)\n", + "logger.info(f'lora_config: {lora_config}')\n", + "Swift.prepare_model(model, lora_config)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 22:28:28,832 - modelscope - INFO - No subset_name specified, defaulting to the default\n", + "2023-07-02 22:28:29,317 - modelscope - WARNING - Reusing dataset ms_hackathon_23_agent_train_dev (/home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files)\n", + "2023-07-02 22:28:29,318 - modelscope - INFO - Generating dataset ms_hackathon_23_agent_train_dev (/home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files)\n", + "2023-07-02 22:28:29,318 - modelscope - INFO - Reusing cached meta-data file: /home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files/941b733ec0354c2172a3386d8788bb37\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "682dc9eedfce4092a25fcadc977c794a", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Downloading data files: 0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "8e53d79d8e4845618231f3afb5bc096f", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Extracting data files: 0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 285/285 [00:00<00:00, 1566679.74it/s]\n" + ] + } + ], + "source": [ + "test_dataset = make_dataset('validation', lambda system, user, assistant:\n", + " {'system': system, 'user': user, 'assistant': assistant})" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 推理" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[TEST] 你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_speech-generation\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_speech-generation\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"url\": \"http://90.49.118.175:2603/\", \"paths\": [{\"name\": \"modelscope_speech-generation\", \"model_id\": \"/damo/speech_sambert-hifigan_tts_zh-cn_16k\", \"method\": \"post\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"parameters\": [{\"name\": \"text\", \"description\": \"要转成语音的文本\", \"required\": \"True\"}, {\"name\": \"gender\", \"description\": \"用户身份\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_speech-generation\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_speech-generation\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"url\": \"http://132.94.116.115:5983/\", \"paths\": [{\"name\": \"modelscope_speech-generation\", \"model_id\": \"/damo/speech_sambert-hifigan_tts_zh-cn_16k\", \"method\": \"post\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"parameters\": [{\"name\": \"text\", \"description\": \"要转成语音的文本\", \"required\": \"True\"}, {\"name\": \"gender\", \"description\": \"用户身份\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_speech-generation\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_speech-generation\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"url\": \"http://94.43.176.75:1062/\", \"paths\": [{\"name\": \"modelscope_speech-generation\", \"model_id\": \"/damo/speech_sambert-hifigan_tts_zh-cn_16k\", \"method\": \"post\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"parameters\": [{\"name\": \"text\", \"description\": \"要转成语音的文本\", \"required\": \"True\"}, {\"name\": \"gender\", \"description\": \"用户身份\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "生成一首诗歌,主题为“秋天的美景”,读出来这段话 \n", + "\n", + "### 助手\n", + "秋天,是一个美丽的季节,是一个收获的季节,是一个充满诗意的季节。秋天的天空,湛蓝湛蓝的,像一块蓝宝石;秋天的田野,金黄色的稻谷,像一片金色的海洋;秋天的果园,硕果累累,像一幅美丽的画卷。秋天的山林,层林尽染,像一幅色彩斑斓的油画;秋天的河流,清澈见底,像一条银色的丝带。秋天的天空,湛蓝湛蓝的,像一块蓝宝石;秋天的田野,金黄色的稻谷,像一片金色的海洋;秋天的果园,硕果累累,像一幅美丽的画卷。秋天的山林,层林尽染,像一幅色彩斑斓的油画;秋天的河流,清澈见底,像一条银色的丝带。\n", + "\n", + "[LABELS]秋树红叶舞飘零,\n", + "山间小溪水潺潺。\n", + "微风拂面感清凉,\n", + "散步赏景心旷神怡。\n", + "<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_speech-generation\", \"url\": \"http://90.49.118.175:2603/damo/speech_sambert-hifigan_tts_zh-cn_16k\", \"parameters\": {\"text\": \"秋树红叶舞飘零,\n", + "山间小溪水潺潺。\n", + "微风拂面感清凉,\n", + "散步赏景心旷神怡。\", \"gender\": \"woman\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"result\": \"\"}\n", + "```<|endofexec|>\n", + "\n", + "-----------------------------------------------------------------------------------\n", + "[TEST] 你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-address\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-address\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"url\": \"http://159.1.4.174:3210/\", \"paths\": [{\"name\": \"modelscope_text-address\", \"model_id\": \"/damo/mgeo_geographic_elements_tagging_chinese_base\", \"method\": \"post\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的地址信息\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-address\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-address\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"url\": \"http://172.163.158.154:5325/\", \"paths\": [{\"name\": \"modelscope_text-address\", \"model_id\": \"/damo/mgeo_geographic_elements_tagging_chinese_base\", \"method\": \"post\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的地址信息\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-address\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-address\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"url\": \"http://133.94.12.37:3160/\", \"paths\": [{\"name\": \"modelscope_text-address\", \"model_id\": \"/damo/mgeo_geographic_elements_tagging_chinese_base\", \"method\": \"post\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的地址信息\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "现在我给你另一条地址,请识别出里面的元素。输入地址:广东省深圳市南山区科技园北区 \n", + "\n", + "### 助手\n", + "<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-address\", \"url\": \"http://133.94.12.37:3160/damo/mgeo_geographic_elements_tagging_chinese_base\", \"parameters\": {\"text\": \"广东省深圳市南山区科技园北区\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"community\": \"科技园北区\"}\n", + "```<|endofexec|>\n", + "地址识别json表示:{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"community\": \"科技园北区\"}。我使用的模型是ModelScope的'damo/mgeo_geographic_elements_tagging_chinese_base'模型。这是基于达摩院联合高德发布的多任务多模态地址预训练底座MGeo模型微调得到的。\n", + "\n", + "[LABELS]<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-address\", \"url\": \"http://159.1.4.174:3210/damo/mgeo_geographic_elements_tagging_chinese_base\", \"parameters\": {\"text\": \"广东省深圳市南山区科技园北区\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"town\": \"\", \"community\": \"科技园北区\", \"poi\": \"\"}\n", + "```<|endofexec|>\n", + "地址识别json表示:{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"town\": \"\", \"community\": \"科技园北区\", \"poi\": \"\"}。我使用的模型是ModelScope的'damo/mgeo_geographic_elements_tagging_chinese_base'模型。这是基于达摩院联合高德发布的多任务多模态地址预训练底座MGeo模型微调得到的。\n", + "-----------------------------------------------------------------------------------\n", + "[TEST] 你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。目前支持的插件信息如下,请自行判断是否需要调用插件来解决当前用户问题。若需要调用插件,则需要将插件调用请求按照json格式给出,必须包含api_name、url、parameters字段,并在其前后使用<|startofthink|>和<|endofthink|>作为标志。然后你需要根据插件API调用结果生成合理的答复;若无需调用插件,则直接给出对应回复即可:\n", + "\n", + "1. {\"name\": \"modelscope_text-translation-zh2en\", \"description\": \"将输入的中文文本翻译成英文\", \"url\": \"http://api-inference.modelscope.cn/api-inference/v1/models\", \"paths\": [{\"name\": \"modelscope_text-translation-zh2en\", \"model_id\": \"/damo/nlp_csanmt_translation_zh2en\", \"method\": \"post\", \"description\": \"将输入的中文文本翻译成英文\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的中文文本\", \"required\": \"True\"}]}]}\n", + "\n", + "2. {\"name\": \"modelscope_speech-generation\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"url\": \"http://api-inference.modelscope.cn/api-inference/v1/models\", \"paths\": [{\"name\": \"modelscope_speech-generation\", \"model_id\": \"/damo/speech_sambert-hifigan_tts_zh-cn_16k\", \"method\": \"post\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"parameters\": [{\"name\": \"text\", \"description\": \"要转成语音的文本\", \"required\": \"True\"}, {\"name\": \"gender\", \"description\": \"用户身份\", \"required\": \"True\"}]}]}\n", + "\n", + "3. {\"name\": \"modelscope_image-generation\", \"description\": \"针对文本输入,生成对应的图片\", \"url\": \"http://api-inference.modelscope.cn/api-inference/v1/models\", \"paths\": [{\"name\": \"modelscope_image-generation\", \"model_id\": \"/damo/image_generation\", \"method\": \"post\", \"description\": \"针对文本输入,生成对应的图片\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本信息\", \"required\": \"True\"}]}]} \n", + "\n", + "### 用户\n", + "歌手:古巨基\n", + "歌曲名:爱情马戏班\n", + "经典歌词:情是何等诡秘能令人使出看家把戏;恋爱就像走纲线般惊险;为你献技 像马戏班\n", + "请结合以上信息,编写一个智能音响的播放导语,需要有文采,字数30字以内,凸显一下即将播放该歌曲 \n", + "\n", + "### 助手\n", + "古巨基的《爱情马戏班》,是一首经典的情歌,歌词中充满了对爱情的向往和对爱情的渴望,让人不禁沉醉其中。这首歌的旋律优美动听,歌词朗朗上口,让人听了就忍不住跟着哼唱。\n", + "\n", + "[LABELS]亲爱的主人,今天我为您带来的是古巨基的经典之作——《爱情马戏班》。这首歌曲描绘了情与爱的神秘和惊险,让人们为之倾倒。让我们一起享受这场爱情的马戏表演吧!\n", + "-----------------------------------------------------------------------------------\n", + "[TEST] 你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://114.42.178.183:8005/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://93.82.87.89:6631/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://4.105.93.165:8143/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "按照给定的schema抽取出下面文本对应的信息\n", + "schema:{\"动物\": null, \"食物\": null, \"颜色\": null}\n", + "这只棕色的狗狗很喜欢吃狗粮。 \n", + "\n", + "### 助手\n", + "<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://4.105.93.165:8143/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"这只棕色的狗狗很喜欢吃狗粮。\", \"schema\": \"{\\\"动物\\\": null, \\\"食物\\\": null, \\\"颜色\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"动物\": [\"棕色的狗狗\"], \"食物\": [\"狗粮\"], \"颜色\": [\"棕色\"]}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"动物\": [\"棕色的狗狗\"], \"食物\": [\"狗粮\"], \"颜色\": [\"棕色\"]}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "\n", + "[LABELS]<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://114.42.178.183:8005/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"这只棕色的狗狗很喜欢吃狗粮。\", \"schema\": \"{\\\"动物\\\": null, \\\"食物\\\": null, \\\"颜色\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"动物\": [\"狗狗\"], \"食物\": [\"狗粮\"], \"颜色\": [\"棕色\"]}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"动物\": [\"狗狗\"], \"食物\": [\"狗粮\"], \"颜色\": [\"棕色\"]}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "-----------------------------------------------------------------------------------\n", + "[TEST] 你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://28.179.171.5:6428/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://100.111.18.38:6408/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://144.67.18.142:6381/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "按照给定的schema抽取出下面文本对应的信息\n", + "schema:{\"人物\": null, \"地理位置\": null, \"组织机构\": null}\n", + "谷歌公司是一家全球知名的科技公司,总部位于美国的加利福尼亚州山景市。 \n", + "\n", + "### 助手\n", + "<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://144.67.18.142:6381/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"谷歌公司是一家全球知名的科技公司,总部位于美国的加利福尼亚州山景市。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": [\"谷歌公司\"], \"地理位置\": [\"美国\"], \"组织机构\": [\"科技公司\"]}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": [\"谷歌公司\"], \"地理位置\": [\"美国\"], \"组织机构\": [\"科技公司\"]}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "\n", + "[LABELS]<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://100.111.18.38:6408/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"谷歌公司是一家全球知名的科技公司,总部位于美国的加利福尼亚州山景市。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": [], \"地理位置\": [\"美国\", \"加利福尼亚州山景市\"], \"组织机构\": [\"谷歌公司\"]}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": [], \"地理位置\": [\"美国\", \"加利福尼亚州山景市\"], \"组织机构\": [\"谷歌公司\"]}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "-----------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "streamer = TextStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)\n", + "for d in test_dataset[:5]:\n", + " system = d['system']\n", + " user = d['user']\n", + " assistant = d['assistant']\n", + " input_ids = tokenize_function(system, user, None, tokenizer)['input_ids']\n", + " print(f'[TEST]{tokenizer.decode(input_ids)}', end='')\n", + " input_ids = torch.tensor(input_ids)[None].cuda()\n", + " attention_mask = torch.ones_like(input_ids)\n", + " generate_ids = model.generate(input_ids=input_ids, max_new_tokens=512,\n", + " attention_mask=attention_mask,\n", + " streamer=streamer, pad_token_id=tokenizer.eos_token_id, \n", + " temperature=0.7, top_k=50, top_p=0.7, do_sample=True)\n", + " print()\n", + " print(f'[LABELS]{assistant}')\n", + " print('-----------------------------------------------------------------------------------')\n", + " # input('next[ENTER]')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/baichuan_sft.ipynb b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/baichuan_sft.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..69bb2887117eece26731896855a210b1c66c0f77 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/baichuan_sft.ipynb @@ -0,0 +1,1814 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Baichuan + Lora + Agent\n", + "baichuan-7B是由百川智能开发的一个开源的大规模预训练模型。基于Transformer结构,在大约1.2万亿tokens上训练的70亿参数模型,支持中英双语,上下文窗口长度为4096。在标准的中文和英文权威benchmark(C-EVAL/MMLU)上均取得同尺寸最好的效果。" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "1. Ref: https://modelscope.cn/models/baichuan-inc/baichuan-7B/summary\n", + "2. 以下脚本可以在2*A10环境下正常运行, 大概占用40G显存\n", + "3. python>=3.8" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 配置实验环境" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# !pip install modelscope\n", + "# !pip install numpy pandas matplotlib scikit-learn\n", + "# !pip install transformers datasets\n", + "# !conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia\n", + "# !pip install tqdm tensorboard torchmetrics sentencepiece charset_normalizer accelerate\n", + "\n", + "# !pip install numpy -U # Resolve torchmetrics dependencies and update numpy" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[2023-07-02 17:24:09,391] [INFO] [real_accelerator.py:110:get_accelerator] Setting ds_accelerator to cuda (auto detect)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/home/hackathon/miniconda3/envs/hackathon/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "2023-07-02 17:24:09,870 - modelscope - INFO - PyTorch version 2.0.1 Found.\n", + "2023-07-02 17:24:09,871 - modelscope - INFO - Loading ast index from /home/hackathon/.cache/modelscope/ast_indexer\n", + "2023-07-02 17:24:09,895 - modelscope - INFO - Loading done! Current index file version is 1.6.2, with md5 ddf811ee982377c1357284a2bfda3dec and a total number of 861 components indexed\n", + "2023-07-02 17:24:10,570 - modelscope - INFO - [0, 1]\n", + "2023-07-02 17:24:10,719 - modelscope - INFO - Using device: cuda:0,1\n", + "2023-07-02 17:24:10,720 - modelscope - INFO - Global seed set to 42\n" + ] + } + ], + "source": [ + "from _common import *\n", + "device_ids = [0, 1]\n", + "select_device(device_ids)\n", + "_ = seed_everything(42)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Model, Tokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 17:24:11,036 - modelscope - INFO - Model revision not specified, use default: master in development mode\n", + "2023-07-02 17:24:11,037 - modelscope - INFO - Development mode use revision: master\n", + "2023-07-02 17:24:11,364 - modelscope - INFO - model_config: BaiChuanConfig {\n", + " \"architectures\": [\n", + " \"BaiChuanForCausalLM\"\n", + " ],\n", + " \"auto_map\": {\n", + " \"AutoConfig\": \"configuration_baichuan.BaiChuanConfig\",\n", + " \"AutoModelForCausalLM\": \"modeling_baichuan.BaiChuanForCausalLM\"\n", + " },\n", + " \"bos_token_id\": 1,\n", + " \"eos_token_id\": 2,\n", + " \"hidden_act\": \"silu\",\n", + " \"hidden_size\": 4096,\n", + " \"initializer_range\": 0.02,\n", + " \"intermediate_size\": 11008,\n", + " \"max_position_embeddings\": 4096,\n", + " \"model_type\": \"baichuan\",\n", + " \"num_attention_heads\": 32,\n", + " \"num_hidden_layers\": 32,\n", + " \"pad_token_id\": 0,\n", + " \"rms_norm_eps\": 1e-06,\n", + " \"tie_word_embeddings\": false,\n", + " \"torch_dtype\": \"float16\",\n", + " \"transformers_version\": \"4.30.2\",\n", + " \"use_cache\": true,\n", + " \"vocab_size\": 64000\n", + "}\n", + "\n", + "The model weights are not tied. Please use the `tie_weights` method before using the `infer_auto_device` function.\n" + ] + } + ], + "source": [ + "WORK_DIR = 'runs/baichuan'\n", + "LORA_TARGET_MODULES = ['W_pack']\n", + "#\n", + "model_dir = snapshot_download('baichuan-inc/baichuan-7B', 'v1.0.5')\n", + "model, tokenizer = get_baichuan7B_model_tokenizer(model_dir)\n", + "#\n", + "GRADIENT_CHECKPOINTING = True\n", + "if GRADIENT_CHECKPOINTING:\n", + " model.gradient_checkpointing_enable()\n", + " model.enable_input_require_grads()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 准备Lora" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 17:24:21,741 - modelscope - INFO - lora_config: LoRAConfig(rank=8, replace_modules=['W_pack'], lora_alpha=32, lora_dropout=0.1, merge_weights=True, use_merged_linear=False, enable_lora=None, fan_in_fan_out=False, bias='none', only_lora_trainable=True, pretrained_weights=None)\n", + "2023-07-02 17:24:36,360 - modelscope - INFO - model.embed_tokens.weight: requires_grad=False\n", + "2023-07-02 17:24:36,360 - modelscope - INFO - model.layers.0.self_attn.W_pack.weight: requires_grad=False\n", + "2023-07-02 17:24:36,361 - modelscope - INFO - model.layers.0.self_attn.W_pack.lora_A: requires_grad=True\n", + "2023-07-02 17:24:36,361 - modelscope - INFO - model.layers.0.self_attn.W_pack.lora_B: requires_grad=True\n", + "2023-07-02 17:24:36,361 - modelscope - INFO - model.layers.0.self_attn.o_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,362 - modelscope - INFO - model.layers.0.mlp.gate_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,362 - modelscope - INFO - model.layers.0.mlp.down_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,363 - modelscope - INFO - model.layers.0.mlp.up_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,363 - modelscope - INFO - model.layers.0.input_layernorm.weight: requires_grad=False\n", + "2023-07-02 17:24:36,363 - modelscope - INFO - model.layers.0.post_attention_layernorm.weight: requires_grad=False\n", + "2023-07-02 17:24:36,363 - modelscope - INFO - model.layers.1.self_attn.W_pack.weight: requires_grad=False\n", + "2023-07-02 17:24:36,364 - modelscope - INFO - model.layers.1.self_attn.W_pack.lora_A: requires_grad=True\n", + "2023-07-02 17:24:36,364 - modelscope - INFO - model.layers.1.self_attn.W_pack.lora_B: requires_grad=True\n", + "2023-07-02 17:24:36,364 - modelscope - INFO - model.layers.1.self_attn.o_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,364 - modelscope - INFO - model.layers.1.mlp.gate_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,365 - modelscope - INFO - model.layers.1.mlp.down_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,365 - modelscope - INFO - model.layers.1.mlp.up_proj.weight: requires_grad=False\n", + "2023-07-02 17:24:36,365 - modelscope - INFO - model.layers.1.input_layernorm.weight: requires_grad=False\n", + "2023-07-02 17:24:36,365 - modelscope - INFO - model.layers.1.post_attention_layernorm.weight: requires_grad=False\n", + "2023-07-02 17:24:36,365 - modelscope - INFO - model.layers.2.self_attn.W_pack.weight: requires_grad=False\n", + "2023-07-02 17:24:36,366 - modelscope - INFO - ...\n", + "2023-07-02 17:24:36,368 - modelscope - INFO - BaiChuanForCausalLM: 7004.7539M Params (4.1943M Trainable), 33.5565M Buffers.\n", + "2023-07-02 17:24:36,370 - modelscope - INFO - device: cuda:0, dtype: torch.float16\n" + ] + }, + { + "data": { + "text/plain": [ + "BaiChuanForCausalLM(\n", + " (model): Model(\n", + " (embed_tokens): Embedding(64000, 4096, padding_idx=0)\n", + " (layers): ModuleList(\n", + " (0-31): 32 x DecoderLayer(\n", + " (self_attn): Attention(\n", + " (W_pack): Linear(\n", + " in_features=4096, out_features=12288, bias=False\n", + " (lora_dropout): Dropout(p=0.1, inplace=False)\n", + " )\n", + " (o_proj): Linear(in_features=4096, out_features=4096, bias=False)\n", + " (rotary_emb): RotaryEmbedding()\n", + " )\n", + " (mlp): MLP(\n", + " (gate_proj): Linear(in_features=4096, out_features=11008, bias=False)\n", + " (down_proj): Linear(in_features=11008, out_features=4096, bias=False)\n", + " (up_proj): Linear(in_features=4096, out_features=11008, bias=False)\n", + " (act_fn): SiLUActivation()\n", + " )\n", + " (input_layernorm): RMSNorm()\n", + " (post_attention_layernorm): RMSNorm()\n", + " )\n", + " )\n", + " (norm): RMSNorm()\n", + " )\n", + " (lm_head): Linear(in_features=4096, out_features=64000, bias=False)\n", + ")" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "LORA_RANK = 8\n", + "LORA_ALPHA = 32\n", + "LORA_DROPOUT_P = 0.1\n", + "lora_config = LoRAConfig(\n", + " target_modules=LORA_TARGET_MODULES,\n", + " r=LORA_RANK,\n", + " lora_alpha=LORA_ALPHA,\n", + " lora_dropout=LORA_DROPOUT_P)\n", + "logger.info(f'lora_config: {lora_config}')\n", + "Swift.prepare_model(model, lora_config)\n", + "#\n", + "show_freeze_layers(model)\n", + "print_model_info(model)\n", + "_p = list(model.parameters())[100]\n", + "logger.info(f'device: {_p.device}, dtype: {_p.dtype}')\n", + "model.bfloat16()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 5036/5036 [00:12<00:00, 398.82it/s]\n", + "100%|██████████| 285/285 [00:00<00:00, 383.15it/s]\n", + "2023-07-02 17:24:49,863 - modelscope - INFO - Dataset Token Length: 958.649707±371.357483, min=44.000000, max=2045.000000, size=4953\n", + "2023-07-02 17:24:49,864 - modelscope - INFO - Dataset Token Length: 993.447653±337.821458, min=75.000000, max=1946.000000, size=277\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INPUT_IDS] 你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://109.199.101.10:1485/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://9.32.64.200:5873/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://54.149.78.185:3979/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "按照给定的schema抽取出下面文本对应的信息\n", + "schema:{\"人物\": null, \"地理位置\": null, \"组织机构\": null}\n", + "近日,美国政府宣布将对中国1000多种商品加征关税,并威胁进一步加征关税。 \n", + "\n", + "### 助手\n", + " <|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://9.32.64.200:5873/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"近日,美国政府宣布将对中国1000多种商品加征关税,并威胁进一步加征关税。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "\n", + "[LABLES] <|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://9.32.64.200:5873/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"近日,美国政府宣布将对中国1000多种商品加征关税,并威胁进一步加征关税。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n" + ] + } + ], + "source": [ + "tokenize_function = partial(tokenize_function, tokenizer=tokenizer)\n", + "train_dataset = make_dataset('train', tokenize_function)\n", + "val_dataset = make_dataset('validation', tokenize_function)\n", + "# Data analysis\n", + "stat_dataset(train_dataset)\n", + "stat_dataset(val_dataset)\n", + "data_collate_fn = partial(data_collate_fn, tokenizer=tokenizer)\n", + "print_examples(train_dataset[0], tokenizer)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 配置Config" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 17:24:49,892 - modelscope - INFO - work_dir: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449\n" + ] + } + ], + "source": [ + "cfg_file = os.path.join(model_dir, 'configuration.json')\n", + "#\n", + "BATCH_SIZE = 1\n", + "MAX_EPOCHS = 1\n", + "T_max = get_T_max(len(train_dataset), BATCH_SIZE, MAX_EPOCHS, True)\n", + "WORK_DIR = get_work_dir(WORK_DIR)\n", + "EVAL_INTERVAL = 200\n", + "CONFIG = Config({\n", + " 'train': {\n", + " 'dataloader': {\n", + " 'batch_size_per_gpu': BATCH_SIZE,\n", + " 'workers_per_gpu': 1,\n", + " 'shuffle': True,\n", + " 'drop_last': True,\n", + " 'pin_memory': True\n", + " },\n", + " 'max_epochs': MAX_EPOCHS,\n", + " 'work_dir': WORK_DIR,\n", + " 'optimizer': {\n", + " 'type': 'AdamW',\n", + " 'lr': 1e-4,\n", + " 'weight_decay': 0.01,\n", + " 'options': {\n", + " 'cumulative_iters': 16, 'grad_clip': {\n", + " 'norm_type': 2,\n", + " 'max_norm': 2.0\n", + " }\n", + " }\n", + " },\n", + " 'lr_scheduler': {\n", + " 'type': 'CosineAnnealingLR',\n", + " 'T_max': T_max,\n", + " 'eta_min': 1e-5,\n", + " 'options': {\n", + " 'by_epoch': False,\n", + " 'warmup': {\n", + " 'type': 'LinearWarmup',\n", + " 'warmup_ratio': 0.1,\n", + " 'warmup_iters': 200\n", + " }\n", + " }\n", + " },\n", + " 'hooks': [\n", + " {'type': 'CheckpointHook', 'by_epoch': False, 'interval': EVAL_INTERVAL, 'max_checkpoint_num': 1},\n", + " {'type': 'EvaluationHook', 'by_epoch': False, 'interval': EVAL_INTERVAL},\n", + " {'type': 'BestCkptSaverHook',\n", + " 'metric_key': 'acc',\n", + " 'save_best': True, 'rule': 'max', 'max_checkpoint_num': 1},\n", + " {'type': 'TextLoggerHook',\n", + " 'by_epoch': True, # Whether EpochBasedTrainer is used\n", + " 'interval': 5},\n", + " {'type': 'TensorboardHook', 'by_epoch': False, 'interval': 5}\n", + " ]\n", + " },\n", + " 'evaluation': {\n", + " 'dataloader': {\n", + " 'batch_size_per_gpu': BATCH_SIZE,\n", + " 'workers_per_gpu': 1,\n", + " 'shuffle': False,\n", + " 'drop_last': False,\n", + " 'pin_memory': True\n", + " },\n", + " 'metrics': [\n", + " {'type': 'my_metric', 'vocab_size': tokenizer.vocab_size}\n", + " ]\n", + " }\n", + "})" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 微调" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 17:24:49,903 - modelscope - INFO - ==========================Training Config Start==========================\n", + "2023-07-02 17:24:49,904 - modelscope - INFO - {\n", + " \"framework\": \"pytorch\",\n", + " \"task\": \"text-generation\",\n", + " \"model\": {\n", + " \"type\": \"Baichuan-7B\"\n", + " },\n", + " \"pipeline\": {\n", + " \"type\": \"Baichuan-7B-text-generation-pipe\"\n", + " },\n", + " \"allow_remote\": true,\n", + " \"train\": {\n", + " \"hooks\": [\n", + " {\n", + " \"type\": \"TensorboardHook\",\n", + " \"by_epoch\": false,\n", + " \"interval\": 5\n", + " }\n", + " ],\n", + " \"dataloader\": {\n", + " \"batch_size_per_gpu\": 1,\n", + " \"workers_per_gpu\": 1,\n", + " \"shuffle\": true,\n", + " \"drop_last\": true,\n", + " \"pin_memory\": true\n", + " },\n", + " \"max_epochs\": 1,\n", + " \"work_dir\": \"/home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449\",\n", + " \"optimizer\": {\n", + " \"type\": \"AdamW\",\n", + " \"lr\": 0.0001,\n", + " \"weight_decay\": 0.01,\n", + " \"options\": {\n", + " \"cumulative_iters\": 16,\n", + " \"grad_clip\": {\n", + " \"norm_type\": 2,\n", + " \"max_norm\": 2.0\n", + " }\n", + " }\n", + " },\n", + " \"lr_scheduler\": {\n", + " \"type\": \"CosineAnnealingLR\",\n", + " \"T_max\": 4953,\n", + " \"eta_min\": 1e-05,\n", + " \"options\": {\n", + " \"by_epoch\": false,\n", + " \"warmup\": {\n", + " \"type\": \"LinearWarmup\",\n", + " \"warmup_ratio\": 0.1,\n", + " \"warmup_iters\": 200\n", + " }\n", + " }\n", + " },\n", + " \"checkpoint\": {\n", + " \"period\": {\n", + " \"by_epoch\": false,\n", + " \"interval\": 200,\n", + " \"max_checkpoint_num\": 1\n", + " },\n", + " \"best\": {\n", + " \"metric_key\": \"acc\",\n", + " \"save_best\": true,\n", + " \"rule\": \"max\",\n", + " \"max_checkpoint_num\": 1\n", + " }\n", + " },\n", + " \"logging\": {\n", + " \"by_epoch\": true,\n", + " \"interval\": 5\n", + " }\n", + " },\n", + " \"evaluation\": {\n", + " \"dataloader\": {\n", + " \"batch_size_per_gpu\": 1,\n", + " \"workers_per_gpu\": 1,\n", + " \"shuffle\": false,\n", + " \"drop_last\": false,\n", + " \"pin_memory\": true\n", + " },\n", + " \"metrics\": [\n", + " {\n", + " \"type\": \"my_metric\",\n", + " \"vocab_size\": 64000\n", + " }\n", + " ],\n", + " \"period\": {\n", + " \"by_epoch\": false,\n", + " \"interval\": 200\n", + " }\n", + " }\n", + "}\n", + "2023-07-02 17:24:49,904 - modelscope - INFO - ===========================Training Config End===========================\n", + "2023-07-02 17:24:49,905 - modelscope - WARNING - ('OPTIMIZER', 'default', 'AdamW') not found in ast index file\n", + "2023-07-02 17:24:49,906 - modelscope - WARNING - ('LR_SCHEDULER', 'default', 'CosineAnnealingLR') not found in ast index file\n", + "2023-07-02 17:24:49,907 - modelscope - INFO - Stage: before_run:\n", + " (ABOVE_NORMAL) OptimizerHook \n", + " (LOW ) LrSchedulerHook \n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: before_train_epoch:\n", + " (LOW ) LrSchedulerHook \n", + " -------------------- \n", + "Stage: before_train_iter:\n", + " (ABOVE_NORMAL) OptimizerHook \n", + " -------------------- \n", + "Stage: after_train_iter:\n", + " (ABOVE_NORMAL) OptimizerHook \n", + " (NORMAL ) EvaluationHook \n", + " (LOW ) LrSchedulerHook \n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: after_train_epoch:\n", + " (NORMAL ) EvaluationHook \n", + " (LOW ) LrSchedulerHook \n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: after_val_epoch:\n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: after_run:\n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "2023-07-02 17:24:49,913 - modelscope - INFO - Checkpoints will be saved to /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449\n", + "2023-07-02 17:24:49,916 - modelscope - INFO - Checkpoints will be saved to /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449\n", + "2023-07-02 17:24:49,917 - modelscope - INFO - Text logs will be saved to /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449\n", + "2023-07-02 17:24:49,917 - modelscope - INFO - tensorboard files will be saved to /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/tensorboard_output\n", + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`...\n", + "2023-07-02 17:24:55,315 - modelscope - INFO - epoch [1][5/4953]\tlr: 1.000e-05, memory: 7084, loss: 5.2094\n", + "2023-07-02 17:24:59,926 - modelscope - INFO - epoch [1][10/4953]\tlr: 1.000e-05, memory: 7084, loss: 1.9516\n", + "2023-07-02 17:25:05,112 - modelscope - INFO - epoch [1][15/4953]\tlr: 1.000e-05, memory: 7504, loss: 1.8344\n", + "2023-07-02 17:25:13,131 - modelscope - INFO - epoch [1][20/4953]\tlr: 1.225e-05, memory: 8075, loss: 3.3937\n", + "2023-07-02 17:25:19,098 - modelscope - INFO - epoch [1][25/4953]\tlr: 1.450e-05, memory: 8102, loss: 1.8047\n", + "2023-07-02 17:25:25,763 - modelscope - INFO - epoch [1][30/4953]\tlr: 1.675e-05, memory: 8102, loss: 1.5594\n", + "2023-07-02 17:25:33,888 - modelscope - INFO - epoch [1][35/4953]\tlr: 1.900e-05, memory: 8293, loss: 1.5852\n", + "2023-07-02 17:25:39,548 - modelscope - INFO - epoch [1][40/4953]\tlr: 2.125e-05, memory: 8293, loss: 1.7828\n", + "2023-07-02 17:25:44,599 - modelscope - INFO - epoch [1][45/4953]\tlr: 2.350e-05, memory: 8293, loss: 5.5922\n", + "2023-07-02 17:25:49,692 - modelscope - INFO - epoch [1][50/4953]\tlr: 2.575e-05, memory: 8293, loss: 2.6641\n", + "2023-07-02 17:25:56,104 - modelscope - INFO - epoch [1][55/4953]\tlr: 2.800e-05, memory: 8742, loss: 2.2344\n", + "2023-07-02 17:26:04,765 - modelscope - INFO - epoch [1][60/4953]\tlr: 3.025e-05, memory: 8742, loss: 1.7320\n", + "2023-07-02 17:26:10,288 - modelscope - INFO - epoch [1][65/4953]\tlr: 3.250e-05, memory: 8742, loss: 5.0578\n", + "2023-07-02 17:26:14,998 - modelscope - INFO - epoch [1][70/4953]\tlr: 3.475e-05, memory: 8742, loss: 4.0109\n", + "2023-07-02 17:26:21,600 - modelscope - INFO - epoch [1][75/4953]\tlr: 3.700e-05, memory: 8742, loss: 1.7266\n", + "2023-07-02 17:26:26,920 - modelscope - INFO - epoch [1][80/4953]\tlr: 3.925e-05, memory: 8742, loss: 2.9578\n", + "2023-07-02 17:26:32,447 - modelscope - INFO - epoch [1][85/4953]\tlr: 4.150e-05, memory: 8742, loss: 5.8422\n", + "2023-07-02 17:26:38,768 - modelscope - INFO - epoch [1][90/4953]\tlr: 4.375e-05, memory: 8742, loss: 1.8719\n", + "2023-07-02 17:26:45,955 - modelscope - INFO - epoch [1][95/4953]\tlr: 4.600e-05, memory: 8742, loss: 1.4359\n", + "2023-07-02 17:26:50,324 - modelscope - INFO - epoch [1][100/4953]\tlr: 4.825e-05, memory: 8742, loss: 5.6125\n", + "2023-07-02 17:26:58,123 - modelscope - INFO - epoch [1][105/4953]\tlr: 5.050e-05, memory: 8742, loss: 2.9656\n", + "2023-07-02 17:27:04,523 - modelscope - INFO - epoch [1][110/4953]\tlr: 5.275e-05, memory: 8742, loss: 1.7484\n", + "2023-07-02 17:27:09,550 - modelscope - INFO - epoch [1][115/4953]\tlr: 5.500e-05, memory: 8742, loss: 2.7133\n", + "2023-07-02 17:27:17,037 - modelscope - INFO - epoch [1][120/4953]\tlr: 5.725e-05, memory: 8742, loss: 1.9953\n", + "2023-07-02 17:27:22,364 - modelscope - INFO - epoch [1][125/4953]\tlr: 5.950e-05, memory: 8742, loss: 4.4578\n", + "2023-07-02 17:27:26,915 - modelscope - INFO - epoch [1][130/4953]\tlr: 6.175e-05, memory: 8742, loss: 4.4344\n", + "2023-07-02 17:27:34,586 - modelscope - INFO - epoch [1][135/4953]\tlr: 6.400e-05, memory: 8742, loss: 1.6328\n", + "2023-07-02 17:27:41,580 - modelscope - INFO - epoch [1][140/4953]\tlr: 6.625e-05, memory: 8742, loss: 3.9422\n", + "2023-07-02 17:27:47,073 - modelscope - INFO - epoch [1][145/4953]\tlr: 6.850e-05, memory: 8742, loss: 2.0562\n", + "2023-07-02 17:27:53,069 - modelscope - INFO - epoch [1][150/4953]\tlr: 7.075e-05, memory: 8742, loss: 1.8477\n", + "2023-07-02 17:27:58,364 - modelscope - INFO - epoch [1][155/4953]\tlr: 7.300e-05, memory: 8742, loss: 4.5445\n", + "2023-07-02 17:28:05,747 - modelscope - INFO - epoch [1][160/4953]\tlr: 7.525e-05, memory: 8742, loss: 4.0109\n", + "2023-07-02 17:28:12,108 - modelscope - INFO - epoch [1][165/4953]\tlr: 7.750e-05, memory: 8742, loss: 2.0578\n", + "2023-07-02 17:28:17,145 - modelscope - INFO - epoch [1][170/4953]\tlr: 7.975e-05, memory: 8742, loss: 1.9109\n", + "2023-07-02 17:28:23,027 - modelscope - INFO - epoch [1][175/4953]\tlr: 8.200e-05, memory: 8742, loss: 3.2410\n", + "2023-07-02 17:28:27,778 - modelscope - INFO - epoch [1][180/4953]\tlr: 8.425e-05, memory: 8742, loss: 2.9000\n", + "2023-07-02 17:28:34,508 - modelscope - INFO - epoch [1][185/4953]\tlr: 8.650e-05, memory: 8742, loss: 1.6062\n", + "2023-07-02 17:28:40,560 - modelscope - INFO - epoch [1][190/4953]\tlr: 8.875e-05, memory: 8742, loss: 1.5594\n", + "2023-07-02 17:28:46,479 - modelscope - INFO - epoch [1][195/4953]\tlr: 9.100e-05, memory: 8742, loss: 1.9875\n", + "2023-07-02 17:28:53,324 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 17:31:08,796 - modelscope - INFO - Saving checkpoint at 200 iter\n", + "2023-07-02 17:31:08,837 - modelscope - INFO - Saving checkpoint at 200 iter\n", + "2023-07-02 17:31:08,875 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 8742, evaluation/acc: 0.7108, evaluation/loss: 2.4241, loss: 1.8062\n", + "2023-07-02 17:31:15,472 - modelscope - INFO - epoch [1][205/4953]\tlr: 9.550e-05, memory: 8742, loss: 1.9172\n", + "2023-07-02 17:31:21,195 - modelscope - INFO - epoch [1][210/4953]\tlr: 9.775e-05, memory: 8742, loss: 2.5586\n", + "2023-07-02 17:31:26,642 - modelscope - INFO - epoch [1][215/4953]\tlr: 1.000e-04, memory: 8742, loss: 2.1422\n", + "2023-07-02 17:31:32,941 - modelscope - INFO - epoch [1][220/4953]\tlr: 9.998e-05, memory: 8742, loss: 2.8609\n", + "2023-07-02 17:31:37,465 - modelscope - INFO - epoch [1][225/4953]\tlr: 9.996e-05, memory: 8742, loss: 1.9953\n", + "2023-07-02 17:31:42,190 - modelscope - INFO - epoch [1][230/4953]\tlr: 9.994e-05, memory: 8742, loss: 1.8422\n", + "2023-07-02 17:31:49,617 - modelscope - INFO - epoch [1][235/4953]\tlr: 9.992e-05, memory: 8742, loss: 1.8328\n", + "2023-07-02 17:31:54,582 - modelscope - INFO - epoch [1][240/4953]\tlr: 9.990e-05, memory: 8742, loss: 2.5031\n", + "2023-07-02 17:32:03,094 - modelscope - INFO - epoch [1][245/4953]\tlr: 9.988e-05, memory: 8742, loss: 3.4578\n", + "2023-07-02 17:32:09,110 - modelscope - INFO - epoch [1][250/4953]\tlr: 9.986e-05, memory: 8742, loss: 3.1359\n", + "2023-07-02 17:32:14,901 - modelscope - INFO - epoch [1][255/4953]\tlr: 9.984e-05, memory: 8742, loss: 3.4672\n", + "2023-07-02 17:32:21,012 - modelscope - INFO - epoch [1][260/4953]\tlr: 9.982e-05, memory: 8742, loss: 1.3734\n", + "2023-07-02 17:32:26,921 - modelscope - INFO - epoch [1][265/4953]\tlr: 9.979e-05, memory: 8742, loss: 1.7055\n", + "2023-07-02 17:32:33,958 - modelscope - INFO - epoch [1][270/4953]\tlr: 9.977e-05, memory: 8933, loss: 4.9609\n", + "2023-07-02 17:32:39,555 - modelscope - INFO - epoch [1][275/4953]\tlr: 9.975e-05, memory: 8933, loss: 3.0906\n", + "2023-07-02 17:32:45,339 - modelscope - INFO - epoch [1][280/4953]\tlr: 9.972e-05, memory: 8933, loss: 3.2016\n", + "2023-07-02 17:32:51,159 - modelscope - INFO - epoch [1][285/4953]\tlr: 9.970e-05, memory: 8933, loss: 3.4461\n", + "2023-07-02 17:32:57,166 - modelscope - INFO - epoch [1][290/4953]\tlr: 9.967e-05, memory: 8933, loss: 1.9609\n", + "2023-07-02 17:33:06,217 - modelscope - INFO - epoch [1][295/4953]\tlr: 9.965e-05, memory: 8933, loss: 1.9680\n", + "2023-07-02 17:33:12,393 - modelscope - INFO - epoch [1][300/4953]\tlr: 9.962e-05, memory: 8933, loss: 1.5422\n", + "2023-07-02 17:33:17,688 - modelscope - INFO - epoch [1][305/4953]\tlr: 9.960e-05, memory: 8933, loss: 2.6953\n", + "2023-07-02 17:33:21,863 - modelscope - INFO - epoch [1][310/4953]\tlr: 9.957e-05, memory: 8933, loss: 3.0094\n", + "2023-07-02 17:33:27,411 - modelscope - INFO - epoch [1][315/4953]\tlr: 9.954e-05, memory: 8933, loss: 1.9156\n", + "2023-07-02 17:33:33,136 - modelscope - INFO - epoch [1][320/4953]\tlr: 9.952e-05, memory: 8933, loss: 1.9672\n", + "2023-07-02 17:33:38,217 - modelscope - INFO - epoch [1][325/4953]\tlr: 9.949e-05, memory: 8933, loss: 4.3375\n", + "2023-07-02 17:33:44,012 - modelscope - INFO - epoch [1][330/4953]\tlr: 9.946e-05, memory: 8933, loss: 1.8797\n", + "2023-07-02 17:33:49,670 - modelscope - INFO - epoch [1][335/4953]\tlr: 9.943e-05, memory: 8933, loss: 3.0969\n", + "2023-07-02 17:33:55,428 - modelscope - INFO - epoch [1][340/4953]\tlr: 9.940e-05, memory: 8933, loss: 3.2477\n", + "2023-07-02 17:34:02,117 - modelscope - INFO - epoch [1][345/4953]\tlr: 9.937e-05, memory: 8933, loss: 2.7969\n", + "2023-07-02 17:34:08,037 - modelscope - INFO - epoch [1][350/4953]\tlr: 9.934e-05, memory: 8933, loss: 2.3578\n", + "2023-07-02 17:34:13,172 - modelscope - INFO - epoch [1][355/4953]\tlr: 9.931e-05, memory: 8933, loss: 2.0656\n", + "2023-07-02 17:34:19,283 - modelscope - INFO - epoch [1][360/4953]\tlr: 9.928e-05, memory: 8933, loss: 1.8438\n", + "2023-07-02 17:34:25,323 - modelscope - INFO - epoch [1][365/4953]\tlr: 9.925e-05, memory: 8933, loss: 2.1828\n", + "2023-07-02 17:34:31,845 - modelscope - INFO - epoch [1][370/4953]\tlr: 9.922e-05, memory: 8933, loss: 2.0234\n", + "2023-07-02 17:34:40,587 - modelscope - INFO - epoch [1][375/4953]\tlr: 9.919e-05, memory: 8933, loss: 2.3086\n", + "2023-07-02 17:34:45,650 - modelscope - INFO - epoch [1][380/4953]\tlr: 9.915e-05, memory: 8933, loss: 3.6734\n", + "2023-07-02 17:34:51,009 - modelscope - INFO - epoch [1][385/4953]\tlr: 9.912e-05, memory: 8933, loss: 1.3594\n", + "2023-07-02 17:34:57,229 - modelscope - INFO - epoch [1][390/4953]\tlr: 9.909e-05, memory: 8933, loss: 2.3117\n", + "2023-07-02 17:35:03,231 - modelscope - INFO - epoch [1][395/4953]\tlr: 9.905e-05, memory: 8933, loss: 1.4961\n", + "2023-07-02 17:35:08,373 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.05it/s]\n", + "2023-07-02 17:37:23,763 - modelscope - INFO - Saving checkpoint at 400 iter\n", + "2023-07-02 17:37:23,803 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_200\n", + "2023-07-02 17:37:23,807 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 8933, evaluation/acc: 0.7079, evaluation/loss: 2.1381, loss: 1.9438\n", + "2023-07-02 17:37:28,880 - modelscope - INFO - epoch [1][405/4953]\tlr: 9.898e-05, memory: 8933, loss: 3.1016\n", + "2023-07-02 17:37:35,463 - modelscope - INFO - epoch [1][410/4953]\tlr: 9.895e-05, memory: 8933, loss: 2.5531\n", + "2023-07-02 17:37:41,349 - modelscope - INFO - epoch [1][415/4953]\tlr: 9.891e-05, memory: 8933, loss: 2.2984\n", + "2023-07-02 17:37:47,522 - modelscope - INFO - epoch [1][420/4953]\tlr: 9.888e-05, memory: 8933, loss: 1.5930\n", + "2023-07-02 17:37:54,150 - modelscope - INFO - epoch [1][425/4953]\tlr: 9.884e-05, memory: 8933, loss: 2.2938\n", + "2023-07-02 17:37:59,915 - modelscope - INFO - epoch [1][430/4953]\tlr: 9.880e-05, memory: 8933, loss: 2.5562\n", + "2023-07-02 17:38:07,433 - modelscope - INFO - epoch [1][435/4953]\tlr: 9.877e-05, memory: 8933, loss: 1.5555\n", + "2023-07-02 17:38:14,761 - modelscope - INFO - epoch [1][440/4953]\tlr: 9.873e-05, memory: 8933, loss: 2.9109\n", + "2023-07-02 17:38:19,100 - modelscope - INFO - epoch [1][445/4953]\tlr: 9.869e-05, memory: 8933, loss: 1.6234\n", + "2023-07-02 17:38:24,534 - modelscope - INFO - epoch [1][450/4953]\tlr: 9.865e-05, memory: 8933, loss: 2.2734\n", + "2023-07-02 17:38:31,059 - modelscope - INFO - epoch [1][455/4953]\tlr: 9.861e-05, memory: 8933, loss: 1.3438\n", + "2023-07-02 17:38:37,366 - modelscope - INFO - epoch [1][460/4953]\tlr: 9.857e-05, memory: 8933, loss: 1.8469\n", + "2023-07-02 17:38:43,640 - modelscope - INFO - epoch [1][465/4953]\tlr: 9.853e-05, memory: 8933, loss: 1.7102\n", + "2023-07-02 17:38:48,102 - modelscope - INFO - epoch [1][470/4953]\tlr: 9.849e-05, memory: 8933, loss: 2.1500\n", + "2023-07-02 17:38:52,751 - modelscope - INFO - epoch [1][475/4953]\tlr: 9.845e-05, memory: 8933, loss: 2.4086\n", + "2023-07-02 17:38:59,938 - modelscope - INFO - epoch [1][480/4953]\tlr: 9.841e-05, memory: 8933, loss: 1.1828\n", + "2023-07-02 17:39:06,061 - modelscope - INFO - epoch [1][485/4953]\tlr: 9.837e-05, memory: 8933, loss: 1.0625\n", + "2023-07-02 17:39:13,230 - modelscope - INFO - epoch [1][490/4953]\tlr: 9.832e-05, memory: 8933, loss: 1.5750\n", + "2023-07-02 17:39:19,107 - modelscope - INFO - epoch [1][495/4953]\tlr: 9.828e-05, memory: 8933, loss: 1.9844\n", + "2023-07-02 17:39:27,177 - modelscope - INFO - epoch [1][500/4953]\tlr: 9.824e-05, memory: 8933, loss: 1.7211\n", + "2023-07-02 17:39:31,312 - modelscope - INFO - epoch [1][505/4953]\tlr: 9.819e-05, memory: 8933, loss: 2.9953\n", + "2023-07-02 17:39:37,871 - modelscope - INFO - epoch [1][510/4953]\tlr: 9.815e-05, memory: 8933, loss: 1.7234\n", + "2023-07-02 17:39:42,983 - modelscope - INFO - epoch [1][515/4953]\tlr: 9.811e-05, memory: 8933, loss: 3.3328\n", + "2023-07-02 17:39:50,299 - modelscope - INFO - epoch [1][520/4953]\tlr: 9.806e-05, memory: 8933, loss: 1.1523\n", + "2023-07-02 17:39:57,449 - modelscope - INFO - epoch [1][525/4953]\tlr: 9.802e-05, memory: 8933, loss: 2.2969\n", + "2023-07-02 17:40:03,936 - modelscope - INFO - epoch [1][530/4953]\tlr: 9.797e-05, memory: 8933, loss: 2.0359\n", + "2023-07-02 17:40:10,017 - modelscope - INFO - epoch [1][535/4953]\tlr: 9.792e-05, memory: 8933, loss: 2.2484\n", + "2023-07-02 17:40:15,110 - modelscope - INFO - epoch [1][540/4953]\tlr: 9.788e-05, memory: 8933, loss: 2.5000\n", + "2023-07-02 17:40:22,837 - modelscope - INFO - epoch [1][545/4953]\tlr: 9.783e-05, memory: 8933, loss: 1.6344\n", + "2023-07-02 17:40:27,326 - modelscope - INFO - epoch [1][550/4953]\tlr: 9.778e-05, memory: 8933, loss: 1.9516\n", + "2023-07-02 17:40:32,836 - modelscope - INFO - epoch [1][555/4953]\tlr: 9.774e-05, memory: 8933, loss: 2.7078\n", + "2023-07-02 17:40:38,900 - modelscope - INFO - epoch [1][560/4953]\tlr: 9.769e-05, memory: 8933, loss: 2.9023\n", + "2023-07-02 17:40:44,092 - modelscope - INFO - epoch [1][565/4953]\tlr: 9.764e-05, memory: 8933, loss: 3.7687\n", + "2023-07-02 17:40:51,182 - modelscope - INFO - epoch [1][570/4953]\tlr: 9.759e-05, memory: 8933, loss: 2.8531\n", + "2023-07-02 17:40:56,580 - modelscope - INFO - epoch [1][575/4953]\tlr: 9.754e-05, memory: 8933, loss: 1.8938\n", + "2023-07-02 17:41:04,432 - modelscope - INFO - epoch [1][580/4953]\tlr: 9.749e-05, memory: 8933, loss: 1.4187\n", + "2023-07-02 17:41:11,299 - modelscope - INFO - epoch [1][585/4953]\tlr: 9.744e-05, memory: 8933, loss: 2.2406\n", + "2023-07-02 17:41:17,405 - modelscope - INFO - epoch [1][590/4953]\tlr: 9.739e-05, memory: 8933, loss: 3.2250\n", + "2023-07-02 17:41:23,093 - modelscope - INFO - epoch [1][595/4953]\tlr: 9.734e-05, memory: 8933, loss: 1.5625\n", + "2023-07-02 17:41:29,552 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.05it/s]\n", + "2023-07-02 17:43:44,919 - modelscope - INFO - Saving checkpoint at 600 iter\n", + "2023-07-02 17:43:44,959 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter200_acc0.7107985615730286\n", + "2023-07-02 17:43:44,963 - modelscope - INFO - Saving checkpoint at 600 iter\n", + "2023-07-02 17:43:45,002 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_400\n", + "2023-07-02 17:43:45,006 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 8933, evaluation/acc: 0.7199, evaluation/loss: 1.9766, loss: 1.2516\n", + "2023-07-02 17:43:50,488 - modelscope - INFO - epoch [1][605/4953]\tlr: 9.723e-05, memory: 8933, loss: 1.8469\n", + "2023-07-02 17:43:56,664 - modelscope - INFO - epoch [1][610/4953]\tlr: 9.718e-05, memory: 8933, loss: 1.5445\n", + "2023-07-02 17:44:02,529 - modelscope - INFO - epoch [1][615/4953]\tlr: 9.713e-05, memory: 8933, loss: 1.8422\n", + "2023-07-02 17:44:07,376 - modelscope - INFO - epoch [1][620/4953]\tlr: 9.707e-05, memory: 8933, loss: 2.4242\n", + "2023-07-02 17:44:12,991 - modelscope - INFO - epoch [1][625/4953]\tlr: 9.702e-05, memory: 8933, loss: 1.8070\n", + "2023-07-02 17:44:17,716 - modelscope - INFO - epoch [1][630/4953]\tlr: 9.697e-05, memory: 8933, loss: 2.0000\n", + "2023-07-02 17:44:22,023 - modelscope - INFO - epoch [1][635/4953]\tlr: 9.691e-05, memory: 8933, loss: 1.3898\n", + "2023-07-02 17:44:27,160 - modelscope - INFO - epoch [1][640/4953]\tlr: 9.686e-05, memory: 8933, loss: 1.6227\n", + "2023-07-02 17:44:33,519 - modelscope - INFO - epoch [1][645/4953]\tlr: 9.680e-05, memory: 8933, loss: 1.6672\n", + "2023-07-02 17:44:40,193 - modelscope - INFO - epoch [1][650/4953]\tlr: 9.674e-05, memory: 8933, loss: 1.4438\n", + "2023-07-02 17:44:44,906 - modelscope - INFO - epoch [1][655/4953]\tlr: 9.669e-05, memory: 8933, loss: 1.6648\n", + "2023-07-02 17:44:49,519 - modelscope - INFO - epoch [1][660/4953]\tlr: 9.663e-05, memory: 8933, loss: 1.2945\n", + "2023-07-02 17:44:55,845 - modelscope - INFO - epoch [1][665/4953]\tlr: 9.657e-05, memory: 8933, loss: 1.5773\n", + "2023-07-02 17:45:02,184 - modelscope - INFO - epoch [1][670/4953]\tlr: 9.652e-05, memory: 8933, loss: 1.8625\n", + "2023-07-02 17:45:05,554 - modelscope - INFO - epoch [1][675/4953]\tlr: 9.646e-05, memory: 8933, loss: 1.7039\n", + "2023-07-02 17:45:10,948 - modelscope - INFO - epoch [1][680/4953]\tlr: 9.640e-05, memory: 8933, loss: 2.0211\n", + "2023-07-02 17:45:15,605 - modelscope - INFO - epoch [1][685/4953]\tlr: 9.634e-05, memory: 8933, loss: 1.5969\n", + "2023-07-02 17:45:19,449 - modelscope - INFO - epoch [1][690/4953]\tlr: 9.628e-05, memory: 8933, loss: 1.7523\n", + "2023-07-02 17:45:26,684 - modelscope - INFO - epoch [1][695/4953]\tlr: 9.622e-05, memory: 8933, loss: 1.0891\n", + "2023-07-02 17:45:32,244 - modelscope - INFO - epoch [1][700/4953]\tlr: 9.616e-05, memory: 8933, loss: 1.9469\n", + "2023-07-02 17:45:37,894 - modelscope - INFO - epoch [1][705/4953]\tlr: 9.610e-05, memory: 8933, loss: 2.0938\n", + "2023-07-02 17:45:43,345 - modelscope - INFO - epoch [1][710/4953]\tlr: 9.604e-05, memory: 8933, loss: 2.7961\n", + "2023-07-02 17:45:49,260 - modelscope - INFO - epoch [1][715/4953]\tlr: 9.598e-05, memory: 8933, loss: 1.4719\n", + "2023-07-02 17:45:56,740 - modelscope - INFO - epoch [1][720/4953]\tlr: 9.592e-05, memory: 8992, loss: 2.2742\n", + "2023-07-02 17:46:00,368 - modelscope - INFO - epoch [1][725/4953]\tlr: 9.585e-05, memory: 8992, loss: 2.5391\n", + "2023-07-02 17:46:06,793 - modelscope - INFO - epoch [1][730/4953]\tlr: 9.579e-05, memory: 8992, loss: 1.0074\n", + "2023-07-02 17:46:13,010 - modelscope - INFO - epoch [1][735/4953]\tlr: 9.573e-05, memory: 8992, loss: 1.9289\n", + "2023-07-02 17:46:19,044 - modelscope - INFO - epoch [1][740/4953]\tlr: 9.567e-05, memory: 8992, loss: 1.7352\n", + "2023-07-02 17:46:26,858 - modelscope - INFO - epoch [1][745/4953]\tlr: 9.560e-05, memory: 8992, loss: 1.6711\n", + "2023-07-02 17:46:32,975 - modelscope - INFO - epoch [1][750/4953]\tlr: 9.554e-05, memory: 8992, loss: 2.0008\n", + "2023-07-02 17:46:41,458 - modelscope - INFO - epoch [1][755/4953]\tlr: 9.547e-05, memory: 8992, loss: 1.4602\n", + "2023-07-02 17:46:45,793 - modelscope - INFO - epoch [1][760/4953]\tlr: 9.541e-05, memory: 8992, loss: 3.6859\n", + "2023-07-02 17:46:50,447 - modelscope - INFO - epoch [1][765/4953]\tlr: 9.534e-05, memory: 8992, loss: 2.0977\n", + "2023-07-02 17:46:56,543 - modelscope - INFO - epoch [1][770/4953]\tlr: 9.528e-05, memory: 8992, loss: 1.6078\n", + "2023-07-02 17:47:02,551 - modelscope - INFO - epoch [1][775/4953]\tlr: 9.521e-05, memory: 8992, loss: 2.8766\n", + "2023-07-02 17:47:09,599 - modelscope - INFO - epoch [1][780/4953]\tlr: 9.514e-05, memory: 8992, loss: 2.9023\n", + "2023-07-02 17:47:15,456 - modelscope - INFO - epoch [1][785/4953]\tlr: 9.508e-05, memory: 8992, loss: 1.2570\n", + "2023-07-02 17:47:22,689 - modelscope - INFO - epoch [1][790/4953]\tlr: 9.501e-05, memory: 8992, loss: 1.7406\n", + "2023-07-02 17:47:28,263 - modelscope - INFO - epoch [1][795/4953]\tlr: 9.494e-05, memory: 8992, loss: 1.9820\n", + "2023-07-02 17:47:34,260 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:16<00:00, 2.04it/s]\n", + "2023-07-02 17:49:50,358 - modelscope - INFO - Saving checkpoint at 800 iter\n", + "2023-07-02 17:49:50,399 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter600_acc0.7198567390441895\n", + "2023-07-02 17:49:50,403 - modelscope - INFO - Saving checkpoint at 800 iter\n", + "2023-07-02 17:49:50,442 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_600\n", + "2023-07-02 17:49:50,447 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 8992, evaluation/acc: 0.7412, evaluation/loss: 1.8238, loss: 1.3484\n", + "2023-07-02 17:49:56,027 - modelscope - INFO - epoch [1][805/4953]\tlr: 9.481e-05, memory: 8992, loss: 1.9234\n", + "2023-07-02 17:50:02,709 - modelscope - INFO - epoch [1][810/4953]\tlr: 9.474e-05, memory: 8992, loss: 1.3625\n", + "2023-07-02 17:50:05,927 - modelscope - INFO - epoch [1][815/4953]\tlr: 9.467e-05, memory: 8992, loss: 3.0219\n", + "2023-07-02 17:50:11,744 - modelscope - INFO - epoch [1][820/4953]\tlr: 9.460e-05, memory: 8992, loss: 1.4125\n", + "2023-07-02 17:50:17,173 - modelscope - INFO - epoch [1][825/4953]\tlr: 9.453e-05, memory: 8992, loss: 2.7422\n", + "2023-07-02 17:50:20,860 - modelscope - INFO - epoch [1][830/4953]\tlr: 9.446e-05, memory: 8992, loss: 2.2609\n", + "2023-07-02 17:50:26,716 - modelscope - INFO - epoch [1][835/4953]\tlr: 9.439e-05, memory: 8992, loss: 2.0391\n", + "2023-07-02 17:50:33,433 - modelscope - INFO - epoch [1][840/4953]\tlr: 9.431e-05, memory: 8992, loss: 1.2227\n", + "2023-07-02 17:50:38,310 - modelscope - INFO - epoch [1][845/4953]\tlr: 9.424e-05, memory: 8992, loss: 2.3312\n", + "2023-07-02 17:50:42,956 - modelscope - INFO - epoch [1][850/4953]\tlr: 9.417e-05, memory: 8992, loss: 1.8562\n", + "2023-07-02 17:50:48,973 - modelscope - INFO - epoch [1][855/4953]\tlr: 9.410e-05, memory: 8992, loss: 1.5039\n", + "2023-07-02 17:50:52,835 - modelscope - INFO - epoch [1][860/4953]\tlr: 9.402e-05, memory: 8992, loss: 2.6664\n", + "2023-07-02 17:50:59,665 - modelscope - INFO - epoch [1][865/4953]\tlr: 9.395e-05, memory: 8992, loss: 1.1352\n", + "2023-07-02 17:51:05,311 - modelscope - INFO - epoch [1][870/4953]\tlr: 9.388e-05, memory: 8992, loss: 0.9805\n", + "2023-07-02 17:51:10,329 - modelscope - INFO - epoch [1][875/4953]\tlr: 9.380e-05, memory: 8992, loss: 1.9438\n", + "2023-07-02 17:51:15,416 - modelscope - INFO - epoch [1][880/4953]\tlr: 9.373e-05, memory: 8992, loss: 1.5938\n", + "2023-07-02 17:51:18,285 - modelscope - INFO - epoch [1][885/4953]\tlr: 9.365e-05, memory: 8992, loss: 3.1656\n", + "2023-07-02 17:51:23,293 - modelscope - INFO - epoch [1][890/4953]\tlr: 9.358e-05, memory: 8992, loss: 1.3336\n", + "2023-07-02 17:51:29,054 - modelscope - INFO - epoch [1][895/4953]\tlr: 9.350e-05, memory: 8992, loss: 1.9094\n", + "2023-07-02 17:51:34,572 - modelscope - INFO - epoch [1][900/4953]\tlr: 9.343e-05, memory: 8992, loss: 2.2406\n", + "2023-07-02 17:51:40,191 - modelscope - INFO - epoch [1][905/4953]\tlr: 9.335e-05, memory: 8992, loss: 1.1078\n", + "2023-07-02 17:51:49,310 - modelscope - INFO - epoch [1][910/4953]\tlr: 9.327e-05, memory: 8992, loss: 1.4352\n", + "2023-07-02 17:51:53,688 - modelscope - INFO - epoch [1][915/4953]\tlr: 9.320e-05, memory: 8992, loss: 2.3406\n", + "2023-07-02 17:51:58,710 - modelscope - INFO - epoch [1][920/4953]\tlr: 9.312e-05, memory: 8992, loss: 1.6012\n", + "2023-07-02 17:52:04,686 - modelscope - INFO - epoch [1][925/4953]\tlr: 9.304e-05, memory: 8992, loss: 1.7086\n", + "2023-07-02 17:52:12,123 - modelscope - INFO - epoch [1][930/4953]\tlr: 9.296e-05, memory: 8992, loss: 1.3492\n", + "2023-07-02 17:52:15,935 - modelscope - INFO - epoch [1][935/4953]\tlr: 9.288e-05, memory: 8992, loss: 1.4781\n", + "2023-07-02 17:52:20,994 - modelscope - INFO - epoch [1][940/4953]\tlr: 9.280e-05, memory: 8992, loss: 2.1047\n", + "2023-07-02 17:52:28,615 - modelscope - INFO - epoch [1][945/4953]\tlr: 9.272e-05, memory: 8992, loss: 1.2547\n", + "2023-07-02 17:52:34,278 - modelscope - INFO - epoch [1][950/4953]\tlr: 9.264e-05, memory: 8992, loss: 1.7332\n", + "2023-07-02 17:52:40,908 - modelscope - INFO - epoch [1][955/4953]\tlr: 9.256e-05, memory: 8992, loss: 1.2336\n", + "2023-07-02 17:52:45,957 - modelscope - INFO - epoch [1][960/4953]\tlr: 9.248e-05, memory: 8992, loss: 1.3078\n", + "2023-07-02 17:52:51,185 - modelscope - INFO - epoch [1][965/4953]\tlr: 9.240e-05, memory: 8992, loss: 2.4461\n", + "2023-07-02 17:52:56,088 - modelscope - INFO - epoch [1][970/4953]\tlr: 9.232e-05, memory: 8992, loss: 2.0934\n", + "2023-07-02 17:53:00,822 - modelscope - INFO - epoch [1][975/4953]\tlr: 9.224e-05, memory: 8992, loss: 1.5676\n", + "2023-07-02 17:53:04,695 - modelscope - INFO - epoch [1][980/4953]\tlr: 9.216e-05, memory: 8992, loss: 2.7031\n", + "2023-07-02 17:53:09,760 - modelscope - INFO - epoch [1][985/4953]\tlr: 9.207e-05, memory: 8992, loss: 1.9406\n", + "2023-07-02 17:53:14,950 - modelscope - INFO - epoch [1][990/4953]\tlr: 9.199e-05, memory: 8992, loss: 1.9484\n", + "2023-07-02 17:53:20,534 - modelscope - INFO - epoch [1][995/4953]\tlr: 9.191e-05, memory: 8992, loss: 3.2953\n", + "2023-07-02 17:53:25,342 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:16<00:00, 2.04it/s]\n", + "2023-07-02 17:55:41,348 - modelscope - INFO - Saving checkpoint at 1000 iter\n", + "2023-07-02 17:55:41,389 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter800_acc0.7412243485450745\n", + "2023-07-02 17:55:41,393 - modelscope - INFO - Saving checkpoint at 1000 iter\n", + "2023-07-02 17:55:41,431 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_800\n", + "2023-07-02 17:55:41,435 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 8992, evaluation/acc: 0.7551, evaluation/loss: 1.6418, loss: 2.1023\n", + "2023-07-02 17:55:48,321 - modelscope - INFO - epoch [1][1005/4953]\tlr: 9.174e-05, memory: 8992, loss: 0.9020\n", + "2023-07-02 17:55:52,978 - modelscope - INFO - epoch [1][1010/4953]\tlr: 9.166e-05, memory: 8992, loss: 2.8094\n", + "2023-07-02 17:55:59,951 - modelscope - INFO - epoch [1][1015/4953]\tlr: 9.157e-05, memory: 8992, loss: 1.5145\n", + "2023-07-02 17:56:06,752 - modelscope - INFO - epoch [1][1020/4953]\tlr: 9.149e-05, memory: 8992, loss: 1.2547\n", + "2023-07-02 17:56:13,123 - modelscope - INFO - epoch [1][1025/4953]\tlr: 9.140e-05, memory: 8992, loss: 1.5836\n", + "2023-07-02 17:56:18,535 - modelscope - INFO - epoch [1][1030/4953]\tlr: 9.132e-05, memory: 8992, loss: 1.5500\n", + "2023-07-02 17:56:23,898 - modelscope - INFO - epoch [1][1035/4953]\tlr: 9.123e-05, memory: 8992, loss: 1.1477\n", + "2023-07-02 17:56:29,262 - modelscope - INFO - epoch [1][1040/4953]\tlr: 9.114e-05, memory: 8992, loss: 1.8488\n", + "2023-07-02 17:56:36,281 - modelscope - INFO - epoch [1][1045/4953]\tlr: 9.106e-05, memory: 8992, loss: 1.7969\n", + "2023-07-02 17:56:42,786 - modelscope - INFO - epoch [1][1050/4953]\tlr: 9.097e-05, memory: 8992, loss: 1.0703\n", + "2023-07-02 17:56:48,367 - modelscope - INFO - epoch [1][1055/4953]\tlr: 9.088e-05, memory: 8992, loss: 1.5227\n", + "2023-07-02 17:56:53,185 - modelscope - INFO - epoch [1][1060/4953]\tlr: 9.079e-05, memory: 8992, loss: 2.5859\n", + "2023-07-02 17:56:59,040 - modelscope - INFO - epoch [1][1065/4953]\tlr: 9.070e-05, memory: 8992, loss: 1.4641\n", + "2023-07-02 17:57:05,006 - modelscope - INFO - epoch [1][1070/4953]\tlr: 9.062e-05, memory: 8992, loss: 0.9602\n", + "2023-07-02 17:57:08,833 - modelscope - INFO - epoch [1][1075/4953]\tlr: 9.053e-05, memory: 8992, loss: 2.7281\n", + "2023-07-02 17:57:15,081 - modelscope - INFO - epoch [1][1080/4953]\tlr: 9.044e-05, memory: 8992, loss: 0.8438\n", + "2023-07-02 17:57:19,054 - modelscope - INFO - epoch [1][1085/4953]\tlr: 9.035e-05, memory: 8992, loss: 2.0336\n", + "2023-07-02 17:57:27,789 - modelscope - INFO - epoch [1][1090/4953]\tlr: 9.026e-05, memory: 8992, loss: 1.0059\n", + "2023-07-02 17:57:32,658 - modelscope - INFO - epoch [1][1095/4953]\tlr: 9.017e-05, memory: 8992, loss: 1.4187\n", + "2023-07-02 17:57:37,809 - modelscope - INFO - epoch [1][1100/4953]\tlr: 9.008e-05, memory: 8992, loss: 1.8813\n", + "2023-07-02 17:57:44,029 - modelscope - INFO - epoch [1][1105/4953]\tlr: 8.999e-05, memory: 8992, loss: 1.2219\n", + "2023-07-02 17:57:49,772 - modelscope - INFO - epoch [1][1110/4953]\tlr: 8.989e-05, memory: 8992, loss: 1.0527\n", + "2023-07-02 17:57:53,867 - modelscope - INFO - epoch [1][1115/4953]\tlr: 8.980e-05, memory: 8992, loss: 1.7289\n", + "2023-07-02 17:57:59,243 - modelscope - INFO - epoch [1][1120/4953]\tlr: 8.971e-05, memory: 8992, loss: 2.4305\n", + "2023-07-02 17:58:08,887 - modelscope - INFO - epoch [1][1125/4953]\tlr: 8.962e-05, memory: 8992, loss: 0.7469\n", + "2023-07-02 17:58:16,138 - modelscope - INFO - epoch [1][1130/4953]\tlr: 8.952e-05, memory: 8992, loss: 1.7727\n", + "2023-07-02 17:58:23,930 - modelscope - INFO - epoch [1][1135/4953]\tlr: 8.943e-05, memory: 8992, loss: 2.0129\n", + "2023-07-02 17:58:30,185 - modelscope - INFO - epoch [1][1140/4953]\tlr: 8.934e-05, memory: 8992, loss: 2.9025\n", + "2023-07-02 17:58:36,114 - modelscope - INFO - epoch [1][1145/4953]\tlr: 8.924e-05, memory: 8992, loss: 1.8898\n", + "2023-07-02 17:58:42,583 - modelscope - INFO - epoch [1][1150/4953]\tlr: 8.915e-05, memory: 8992, loss: 1.6789\n", + "2023-07-02 17:58:47,491 - modelscope - INFO - epoch [1][1155/4953]\tlr: 8.905e-05, memory: 8992, loss: 1.5578\n", + "2023-07-02 17:58:51,182 - modelscope - INFO - epoch [1][1160/4953]\tlr: 8.896e-05, memory: 8992, loss: 2.6266\n", + "2023-07-02 17:58:56,692 - modelscope - INFO - epoch [1][1165/4953]\tlr: 8.886e-05, memory: 8992, loss: 1.8508\n", + "2023-07-02 17:59:01,780 - modelscope - INFO - epoch [1][1170/4953]\tlr: 8.877e-05, memory: 8992, loss: 1.7000\n", + "2023-07-02 17:59:05,790 - modelscope - INFO - epoch [1][1175/4953]\tlr: 8.867e-05, memory: 8992, loss: 2.2281\n", + "2023-07-02 17:59:10,420 - modelscope - INFO - epoch [1][1180/4953]\tlr: 8.858e-05, memory: 8992, loss: 2.2180\n", + "2023-07-02 17:59:15,762 - modelscope - INFO - epoch [1][1185/4953]\tlr: 8.848e-05, memory: 8992, loss: 1.2668\n", + "2023-07-02 17:59:20,930 - modelscope - INFO - epoch [1][1190/4953]\tlr: 8.838e-05, memory: 8992, loss: 1.8664\n", + "2023-07-02 17:59:27,122 - modelscope - INFO - epoch [1][1195/4953]\tlr: 8.828e-05, memory: 8992, loss: 2.4109\n", + "2023-07-02 17:59:32,910 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:01:48,692 - modelscope - INFO - Saving checkpoint at 1200 iter\n", + "2023-07-02 18:01:48,732 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter1000_acc0.7551158666610718\n", + "2023-07-02 18:01:48,736 - modelscope - INFO - Saving checkpoint at 1200 iter\n", + "2023-07-02 18:01:48,775 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_1000\n", + "2023-07-02 18:01:48,780 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 8992, evaluation/acc: 0.7694, evaluation/loss: 1.5234, loss: 1.7117\n", + "2023-07-02 18:01:56,354 - modelscope - INFO - epoch [1][1205/4953]\tlr: 8.809e-05, memory: 8992, loss: 1.2402\n", + "2023-07-02 18:02:00,660 - modelscope - INFO - epoch [1][1210/4953]\tlr: 8.799e-05, memory: 8992, loss: 1.9062\n", + "2023-07-02 18:02:04,421 - modelscope - INFO - epoch [1][1215/4953]\tlr: 8.789e-05, memory: 8992, loss: 1.4750\n", + "2023-07-02 18:02:10,614 - modelscope - INFO - epoch [1][1220/4953]\tlr: 8.779e-05, memory: 8992, loss: 1.0879\n", + "2023-07-02 18:02:16,579 - modelscope - INFO - epoch [1][1225/4953]\tlr: 8.769e-05, memory: 8992, loss: 1.9461\n", + "2023-07-02 18:02:23,602 - modelscope - INFO - epoch [1][1230/4953]\tlr: 8.759e-05, memory: 8992, loss: 2.3242\n", + "2023-07-02 18:02:31,155 - modelscope - INFO - epoch [1][1235/4953]\tlr: 8.749e-05, memory: 8992, loss: 1.9867\n", + "2023-07-02 18:02:36,373 - modelscope - INFO - epoch [1][1240/4953]\tlr: 8.739e-05, memory: 8992, loss: 2.1641\n", + "2023-07-02 18:02:41,792 - modelscope - INFO - epoch [1][1245/4953]\tlr: 8.729e-05, memory: 8992, loss: 1.9109\n", + "2023-07-02 18:02:49,746 - modelscope - INFO - epoch [1][1250/4953]\tlr: 8.719e-05, memory: 8992, loss: 0.7258\n", + "2023-07-02 18:02:54,809 - modelscope - INFO - epoch [1][1255/4953]\tlr: 8.709e-05, memory: 8992, loss: 1.7203\n", + "2023-07-02 18:03:02,266 - modelscope - INFO - epoch [1][1260/4953]\tlr: 8.699e-05, memory: 8992, loss: 1.3533\n", + "2023-07-02 18:03:10,570 - modelscope - INFO - epoch [1][1265/4953]\tlr: 8.689e-05, memory: 8992, loss: 1.6199\n", + "2023-07-02 18:03:17,332 - modelscope - INFO - epoch [1][1270/4953]\tlr: 8.679e-05, memory: 8992, loss: 1.4033\n", + "2023-07-02 18:03:24,075 - modelscope - INFO - epoch [1][1275/4953]\tlr: 8.668e-05, memory: 8992, loss: 1.3773\n", + "2023-07-02 18:03:31,046 - modelscope - INFO - epoch [1][1280/4953]\tlr: 8.658e-05, memory: 8992, loss: 1.3973\n", + "2023-07-02 18:03:37,326 - modelscope - INFO - epoch [1][1285/4953]\tlr: 8.648e-05, memory: 8992, loss: 1.6422\n", + "2023-07-02 18:03:42,789 - modelscope - INFO - epoch [1][1290/4953]\tlr: 8.637e-05, memory: 8992, loss: 1.8156\n", + "2023-07-02 18:03:49,191 - modelscope - INFO - epoch [1][1295/4953]\tlr: 8.627e-05, memory: 8992, loss: 0.8660\n", + "2023-07-02 18:03:57,916 - modelscope - INFO - epoch [1][1300/4953]\tlr: 8.617e-05, memory: 8992, loss: 1.4477\n", + "2023-07-02 18:04:04,809 - modelscope - INFO - epoch [1][1305/4953]\tlr: 8.606e-05, memory: 8992, loss: 0.7375\n", + "2023-07-02 18:04:12,169 - modelscope - INFO - epoch [1][1310/4953]\tlr: 8.596e-05, memory: 8992, loss: 0.4646\n", + "2023-07-02 18:04:17,928 - modelscope - INFO - epoch [1][1315/4953]\tlr: 8.585e-05, memory: 8992, loss: 1.6566\n", + "2023-07-02 18:04:26,868 - modelscope - INFO - epoch [1][1320/4953]\tlr: 8.575e-05, memory: 8992, loss: 1.0375\n", + "2023-07-02 18:04:32,785 - modelscope - INFO - epoch [1][1325/4953]\tlr: 8.564e-05, memory: 8992, loss: 1.1785\n", + "2023-07-02 18:04:36,876 - modelscope - INFO - epoch [1][1330/4953]\tlr: 8.553e-05, memory: 8992, loss: 2.0953\n", + "2023-07-02 18:04:43,149 - modelscope - INFO - epoch [1][1335/4953]\tlr: 8.543e-05, memory: 8992, loss: 1.4941\n", + "2023-07-02 18:04:48,128 - modelscope - INFO - epoch [1][1340/4953]\tlr: 8.532e-05, memory: 8992, loss: 2.3219\n", + "2023-07-02 18:04:54,519 - modelscope - INFO - epoch [1][1345/4953]\tlr: 8.521e-05, memory: 8992, loss: 1.7479\n", + "2023-07-02 18:05:00,734 - modelscope - INFO - epoch [1][1350/4953]\tlr: 8.511e-05, memory: 8992, loss: 2.5168\n", + "2023-07-02 18:05:07,571 - modelscope - INFO - epoch [1][1355/4953]\tlr: 8.500e-05, memory: 8992, loss: 1.5414\n", + "2023-07-02 18:05:13,130 - modelscope - INFO - epoch [1][1360/4953]\tlr: 8.489e-05, memory: 8992, loss: 1.8086\n", + "2023-07-02 18:05:22,837 - modelscope - INFO - epoch [1][1365/4953]\tlr: 8.478e-05, memory: 8992, loss: 1.1250\n", + "2023-07-02 18:05:28,381 - modelscope - INFO - epoch [1][1370/4953]\tlr: 8.468e-05, memory: 8992, loss: 1.2740\n", + "2023-07-02 18:05:34,762 - modelscope - INFO - epoch [1][1375/4953]\tlr: 8.457e-05, memory: 8992, loss: 1.6906\n", + "2023-07-02 18:05:40,998 - modelscope - INFO - epoch [1][1380/4953]\tlr: 8.446e-05, memory: 8992, loss: 2.1523\n", + "2023-07-02 18:05:48,330 - modelscope - INFO - epoch [1][1385/4953]\tlr: 8.435e-05, memory: 8992, loss: 0.6824\n", + "2023-07-02 18:05:52,136 - modelscope - INFO - epoch [1][1390/4953]\tlr: 8.424e-05, memory: 8992, loss: 1.8422\n", + "2023-07-02 18:05:58,132 - modelscope - INFO - epoch [1][1395/4953]\tlr: 8.413e-05, memory: 8992, loss: 0.8705\n", + "2023-07-02 18:06:04,317 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:08:20,133 - modelscope - INFO - Saving checkpoint at 1400 iter\n", + "2023-07-02 18:08:20,173 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter1200_acc0.7693551182746887\n", + "2023-07-02 18:08:20,177 - modelscope - INFO - Saving checkpoint at 1400 iter\n", + "2023-07-02 18:08:20,216 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_1200\n", + "2023-07-02 18:08:20,220 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 8992, evaluation/acc: 0.7789, evaluation/loss: 1.4656, loss: 1.8477\n", + "2023-07-02 18:08:25,847 - modelscope - INFO - epoch [1][1405/4953]\tlr: 8.391e-05, memory: 8992, loss: 1.5250\n", + "2023-07-02 18:08:32,815 - modelscope - INFO - epoch [1][1410/4953]\tlr: 8.380e-05, memory: 8992, loss: 1.2430\n", + "2023-07-02 18:08:38,362 - modelscope - INFO - epoch [1][1415/4953]\tlr: 8.369e-05, memory: 8992, loss: 1.4227\n", + "2023-07-02 18:08:43,312 - modelscope - INFO - epoch [1][1420/4953]\tlr: 8.358e-05, memory: 8992, loss: 1.3088\n", + "2023-07-02 18:08:50,596 - modelscope - INFO - epoch [1][1425/4953]\tlr: 8.346e-05, memory: 8992, loss: 1.0277\n", + "2023-07-02 18:08:55,317 - modelscope - INFO - epoch [1][1430/4953]\tlr: 8.335e-05, memory: 8992, loss: 2.0480\n", + "2023-07-02 18:08:58,994 - modelscope - INFO - epoch [1][1435/4953]\tlr: 8.324e-05, memory: 8992, loss: 3.0969\n", + "2023-07-02 18:09:04,894 - modelscope - INFO - epoch [1][1440/4953]\tlr: 8.313e-05, memory: 8992, loss: 0.7141\n", + "2023-07-02 18:09:10,621 - modelscope - INFO - epoch [1][1445/4953]\tlr: 8.301e-05, memory: 8992, loss: 1.7031\n", + "2023-07-02 18:09:15,960 - modelscope - INFO - epoch [1][1450/4953]\tlr: 8.290e-05, memory: 8992, loss: 1.5277\n", + "2023-07-02 18:09:21,781 - modelscope - INFO - epoch [1][1455/4953]\tlr: 8.279e-05, memory: 8992, loss: 1.7842\n", + "2023-07-02 18:09:29,051 - modelscope - INFO - epoch [1][1460/4953]\tlr: 8.267e-05, memory: 8992, loss: 2.1768\n", + "2023-07-02 18:09:33,405 - modelscope - INFO - epoch [1][1465/4953]\tlr: 8.256e-05, memory: 8992, loss: 1.9969\n", + "2023-07-02 18:09:38,454 - modelscope - INFO - epoch [1][1470/4953]\tlr: 8.245e-05, memory: 8992, loss: 1.6043\n", + "2023-07-02 18:09:44,266 - modelscope - INFO - epoch [1][1475/4953]\tlr: 8.233e-05, memory: 8992, loss: 0.7842\n", + "2023-07-02 18:09:49,575 - modelscope - INFO - epoch [1][1480/4953]\tlr: 8.222e-05, memory: 8992, loss: 1.6766\n", + "2023-07-02 18:09:56,773 - modelscope - INFO - epoch [1][1485/4953]\tlr: 8.210e-05, memory: 8992, loss: 1.1123\n", + "2023-07-02 18:10:05,054 - modelscope - INFO - epoch [1][1490/4953]\tlr: 8.199e-05, memory: 9058, loss: 1.3289\n", + "2023-07-02 18:10:10,678 - modelscope - INFO - epoch [1][1495/4953]\tlr: 8.187e-05, memory: 9058, loss: 1.6414\n", + "2023-07-02 18:10:16,694 - modelscope - INFO - epoch [1][1500/4953]\tlr: 8.176e-05, memory: 9058, loss: 0.8203\n", + "2023-07-02 18:10:24,675 - modelscope - INFO - epoch [1][1505/4953]\tlr: 8.164e-05, memory: 9058, loss: 0.8189\n", + "2023-07-02 18:10:30,053 - modelscope - INFO - epoch [1][1510/4953]\tlr: 8.152e-05, memory: 9058, loss: 1.1646\n", + "2023-07-02 18:10:36,537 - modelscope - INFO - epoch [1][1515/4953]\tlr: 8.141e-05, memory: 9058, loss: 1.1387\n", + "2023-07-02 18:10:42,304 - modelscope - INFO - epoch [1][1520/4953]\tlr: 8.129e-05, memory: 9058, loss: 1.4477\n", + "2023-07-02 18:10:46,424 - modelscope - INFO - epoch [1][1525/4953]\tlr: 8.117e-05, memory: 9058, loss: 3.0531\n", + "2023-07-02 18:10:51,264 - modelscope - INFO - epoch [1][1530/4953]\tlr: 8.106e-05, memory: 9058, loss: 2.3023\n", + "2023-07-02 18:10:59,103 - modelscope - INFO - epoch [1][1535/4953]\tlr: 8.094e-05, memory: 9058, loss: 0.6086\n", + "2023-07-02 18:11:04,295 - modelscope - INFO - epoch [1][1540/4953]\tlr: 8.082e-05, memory: 9058, loss: 1.3912\n", + "2023-07-02 18:11:09,436 - modelscope - INFO - epoch [1][1545/4953]\tlr: 8.070e-05, memory: 9058, loss: 2.1668\n", + "2023-07-02 18:11:16,921 - modelscope - INFO - epoch [1][1550/4953]\tlr: 8.058e-05, memory: 9058, loss: 0.4180\n", + "2023-07-02 18:11:22,852 - modelscope - INFO - epoch [1][1555/4953]\tlr: 8.047e-05, memory: 9058, loss: 1.4855\n", + "2023-07-02 18:11:27,748 - modelscope - INFO - epoch [1][1560/4953]\tlr: 8.035e-05, memory: 9058, loss: 2.0650\n", + "2023-07-02 18:11:30,906 - modelscope - INFO - epoch [1][1565/4953]\tlr: 8.023e-05, memory: 9058, loss: 2.8250\n", + "2023-07-02 18:11:38,069 - modelscope - INFO - epoch [1][1570/4953]\tlr: 8.011e-05, memory: 9058, loss: 1.6609\n", + "2023-07-02 18:11:44,626 - modelscope - INFO - epoch [1][1575/4953]\tlr: 7.999e-05, memory: 9058, loss: 1.0016\n", + "2023-07-02 18:11:49,164 - modelscope - INFO - epoch [1][1580/4953]\tlr: 7.987e-05, memory: 9058, loss: 2.2371\n", + "2023-07-02 18:11:53,217 - modelscope - INFO - epoch [1][1585/4953]\tlr: 7.975e-05, memory: 9058, loss: 2.7695\n", + "2023-07-02 18:11:59,930 - modelscope - INFO - epoch [1][1590/4953]\tlr: 7.963e-05, memory: 9058, loss: 2.2398\n", + "2023-07-02 18:12:04,671 - modelscope - INFO - epoch [1][1595/4953]\tlr: 7.951e-05, memory: 9058, loss: 0.7875\n", + "2023-07-02 18:12:10,417 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:14:26,308 - modelscope - INFO - Saving checkpoint at 1600 iter\n", + "2023-07-02 18:14:26,349 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter1400_acc0.7789175510406494\n", + "2023-07-02 18:14:26,353 - modelscope - INFO - Saving checkpoint at 1600 iter\n", + "2023-07-02 18:14:26,392 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_1400\n", + "2023-07-02 18:14:26,396 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9058, evaluation/acc: 0.7892, evaluation/loss: 1.4188, loss: 2.1477\n", + "2023-07-02 18:14:31,893 - modelscope - INFO - epoch [1][1605/4953]\tlr: 7.927e-05, memory: 9058, loss: 0.7930\n", + "2023-07-02 18:14:37,157 - modelscope - INFO - epoch [1][1610/4953]\tlr: 7.914e-05, memory: 9058, loss: 1.6867\n", + "2023-07-02 18:14:41,163 - modelscope - INFO - epoch [1][1615/4953]\tlr: 7.902e-05, memory: 9058, loss: 1.3123\n", + "2023-07-02 18:14:46,222 - modelscope - INFO - epoch [1][1620/4953]\tlr: 7.890e-05, memory: 9058, loss: 1.9320\n", + "2023-07-02 18:14:50,200 - modelscope - INFO - epoch [1][1625/4953]\tlr: 7.878e-05, memory: 9058, loss: 2.3531\n", + "2023-07-02 18:14:55,640 - modelscope - INFO - epoch [1][1630/4953]\tlr: 7.866e-05, memory: 9058, loss: 2.1230\n", + "2023-07-02 18:15:00,591 - modelscope - INFO - epoch [1][1635/4953]\tlr: 7.853e-05, memory: 9058, loss: 1.2672\n", + "2023-07-02 18:15:06,311 - modelscope - INFO - epoch [1][1640/4953]\tlr: 7.841e-05, memory: 9058, loss: 1.8948\n", + "2023-07-02 18:15:12,067 - modelscope - INFO - epoch [1][1645/4953]\tlr: 7.829e-05, memory: 9058, loss: 1.9506\n", + "2023-07-02 18:15:18,834 - modelscope - INFO - epoch [1][1650/4953]\tlr: 7.817e-05, memory: 9058, loss: 0.8719\n", + "2023-07-02 18:15:24,490 - modelscope - INFO - epoch [1][1655/4953]\tlr: 7.804e-05, memory: 9058, loss: 0.7850\n", + "2023-07-02 18:15:30,533 - modelscope - INFO - epoch [1][1660/4953]\tlr: 7.792e-05, memory: 9058, loss: 1.0324\n", + "2023-07-02 18:15:39,715 - modelscope - INFO - epoch [1][1665/4953]\tlr: 7.779e-05, memory: 9058, loss: 0.8568\n", + "2023-07-02 18:15:46,536 - modelscope - INFO - epoch [1][1670/4953]\tlr: 7.767e-05, memory: 9058, loss: 1.5828\n", + "2023-07-02 18:15:50,976 - modelscope - INFO - epoch [1][1675/4953]\tlr: 7.755e-05, memory: 9058, loss: 1.5391\n", + "2023-07-02 18:15:56,272 - modelscope - INFO - epoch [1][1680/4953]\tlr: 7.742e-05, memory: 9058, loss: 1.6117\n", + "2023-07-02 18:16:04,187 - modelscope - INFO - epoch [1][1685/4953]\tlr: 7.730e-05, memory: 9058, loss: 0.4076\n", + "2023-07-02 18:16:08,882 - modelscope - INFO - epoch [1][1690/4953]\tlr: 7.717e-05, memory: 9058, loss: 1.3816\n", + "2023-07-02 18:16:16,150 - modelscope - INFO - epoch [1][1695/4953]\tlr: 7.705e-05, memory: 9058, loss: 1.9426\n", + "2023-07-02 18:16:20,599 - modelscope - INFO - epoch [1][1700/4953]\tlr: 7.692e-05, memory: 9058, loss: 2.4797\n", + "2023-07-02 18:16:26,001 - modelscope - INFO - epoch [1][1705/4953]\tlr: 7.679e-05, memory: 9058, loss: 1.3273\n", + "2023-07-02 18:16:32,374 - modelscope - INFO - epoch [1][1710/4953]\tlr: 7.667e-05, memory: 9058, loss: 0.9286\n", + "2023-07-02 18:16:39,243 - modelscope - INFO - epoch [1][1715/4953]\tlr: 7.654e-05, memory: 9058, loss: 1.3732\n", + "2023-07-02 18:16:44,919 - modelscope - INFO - epoch [1][1720/4953]\tlr: 7.642e-05, memory: 9058, loss: 1.2824\n", + "2023-07-02 18:16:47,647 - modelscope - INFO - epoch [1][1725/4953]\tlr: 7.629e-05, memory: 9058, loss: 2.0891\n", + "2023-07-02 18:16:53,984 - modelscope - INFO - epoch [1][1730/4953]\tlr: 7.616e-05, memory: 9058, loss: 0.5539\n", + "2023-07-02 18:16:58,439 - modelscope - INFO - epoch [1][1735/4953]\tlr: 7.604e-05, memory: 9058, loss: 1.4975\n", + "2023-07-02 18:17:03,726 - modelscope - INFO - epoch [1][1740/4953]\tlr: 7.591e-05, memory: 9058, loss: 1.6102\n", + "2023-07-02 18:17:08,657 - modelscope - INFO - epoch [1][1745/4953]\tlr: 7.578e-05, memory: 9058, loss: 1.6957\n", + "2023-07-02 18:17:13,371 - modelscope - INFO - epoch [1][1750/4953]\tlr: 7.565e-05, memory: 9058, loss: 1.5684\n", + "2023-07-02 18:17:17,513 - modelscope - INFO - epoch [1][1755/4953]\tlr: 7.553e-05, memory: 9058, loss: 2.9000\n", + "2023-07-02 18:17:24,347 - modelscope - INFO - epoch [1][1760/4953]\tlr: 7.540e-05, memory: 9058, loss: 1.5227\n", + "2023-07-02 18:17:28,183 - modelscope - INFO - epoch [1][1765/4953]\tlr: 7.527e-05, memory: 9058, loss: 2.3375\n", + "2023-07-02 18:17:35,427 - modelscope - INFO - epoch [1][1770/4953]\tlr: 7.514e-05, memory: 9058, loss: 1.0623\n", + "2023-07-02 18:17:39,708 - modelscope - INFO - epoch [1][1775/4953]\tlr: 7.501e-05, memory: 9058, loss: 1.5977\n", + "2023-07-02 18:17:45,757 - modelscope - INFO - epoch [1][1780/4953]\tlr: 7.488e-05, memory: 9058, loss: 1.0781\n", + "2023-07-02 18:17:49,525 - modelscope - INFO - epoch [1][1785/4953]\tlr: 7.475e-05, memory: 9058, loss: 1.6547\n", + "2023-07-02 18:17:55,072 - modelscope - INFO - epoch [1][1790/4953]\tlr: 7.463e-05, memory: 9058, loss: 1.4458\n", + "2023-07-02 18:18:01,439 - modelscope - INFO - epoch [1][1795/4953]\tlr: 7.450e-05, memory: 9058, loss: 1.0096\n", + "2023-07-02 18:18:06,478 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:20:22,335 - modelscope - INFO - Saving checkpoint at 1800 iter\n", + "2023-07-02 18:20:22,375 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter1600_acc0.7891753911972046\n", + "2023-07-02 18:20:22,379 - modelscope - INFO - Saving checkpoint at 1800 iter\n", + "2023-07-02 18:20:22,417 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_1600\n", + "2023-07-02 18:20:22,422 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9058, evaluation/acc: 0.7967, evaluation/loss: 1.3701, loss: 0.9414\n", + "2023-07-02 18:20:28,163 - modelscope - INFO - epoch [1][1805/4953]\tlr: 7.424e-05, memory: 9058, loss: 1.7404\n", + "2023-07-02 18:20:32,265 - modelscope - INFO - epoch [1][1810/4953]\tlr: 7.411e-05, memory: 9058, loss: 1.5176\n", + "2023-07-02 18:20:38,772 - modelscope - INFO - epoch [1][1815/4953]\tlr: 7.398e-05, memory: 9058, loss: 0.9519\n", + "2023-07-02 18:20:44,819 - modelscope - INFO - epoch [1][1820/4953]\tlr: 7.385e-05, memory: 9058, loss: 1.2756\n", + "2023-07-02 18:20:50,296 - modelscope - INFO - epoch [1][1825/4953]\tlr: 7.372e-05, memory: 9058, loss: 1.4785\n", + "2023-07-02 18:20:56,799 - modelscope - INFO - epoch [1][1830/4953]\tlr: 7.358e-05, memory: 9058, loss: 1.5188\n", + "2023-07-02 18:21:03,334 - modelscope - INFO - epoch [1][1835/4953]\tlr: 7.345e-05, memory: 9058, loss: 0.6644\n", + "2023-07-02 18:21:10,067 - modelscope - INFO - epoch [1][1840/4953]\tlr: 7.332e-05, memory: 9058, loss: 0.9434\n", + "2023-07-02 18:21:16,554 - modelscope - INFO - epoch [1][1845/4953]\tlr: 7.319e-05, memory: 9058, loss: 0.7092\n", + "2023-07-02 18:21:23,374 - modelscope - INFO - epoch [1][1850/4953]\tlr: 7.306e-05, memory: 9058, loss: 1.1020\n", + "2023-07-02 18:21:32,187 - modelscope - INFO - epoch [1][1855/4953]\tlr: 7.293e-05, memory: 9058, loss: 1.1508\n", + "2023-07-02 18:21:37,254 - modelscope - INFO - epoch [1][1860/4953]\tlr: 7.280e-05, memory: 9058, loss: 1.6852\n", + "2023-07-02 18:21:42,410 - modelscope - INFO - epoch [1][1865/4953]\tlr: 7.266e-05, memory: 9058, loss: 0.9865\n", + "2023-07-02 18:21:47,494 - modelscope - INFO - epoch [1][1870/4953]\tlr: 7.253e-05, memory: 9058, loss: 1.4111\n", + "2023-07-02 18:21:51,877 - modelscope - INFO - epoch [1][1875/4953]\tlr: 7.240e-05, memory: 9058, loss: 1.9342\n", + "2023-07-02 18:21:57,909 - modelscope - INFO - epoch [1][1880/4953]\tlr: 7.227e-05, memory: 9058, loss: 1.5063\n", + "2023-07-02 18:22:03,018 - modelscope - INFO - epoch [1][1885/4953]\tlr: 7.213e-05, memory: 9058, loss: 1.5504\n", + "2023-07-02 18:22:07,481 - modelscope - INFO - epoch [1][1890/4953]\tlr: 7.200e-05, memory: 9058, loss: 1.2473\n", + "2023-07-02 18:22:12,667 - modelscope - INFO - epoch [1][1895/4953]\tlr: 7.187e-05, memory: 9058, loss: 2.0055\n", + "2023-07-02 18:22:17,967 - modelscope - INFO - epoch [1][1900/4953]\tlr: 7.174e-05, memory: 9058, loss: 0.7781\n", + "2023-07-02 18:22:24,563 - modelscope - INFO - epoch [1][1905/4953]\tlr: 7.160e-05, memory: 9058, loss: 1.1995\n", + "2023-07-02 18:22:28,670 - modelscope - INFO - epoch [1][1910/4953]\tlr: 7.147e-05, memory: 9058, loss: 2.4594\n", + "2023-07-02 18:22:35,136 - modelscope - INFO - epoch [1][1915/4953]\tlr: 7.133e-05, memory: 9058, loss: 0.7545\n", + "2023-07-02 18:22:41,042 - modelscope - INFO - epoch [1][1920/4953]\tlr: 7.120e-05, memory: 9058, loss: 1.8008\n", + "2023-07-02 18:22:45,686 - modelscope - INFO - epoch [1][1925/4953]\tlr: 7.107e-05, memory: 9058, loss: 1.4076\n", + "2023-07-02 18:22:50,652 - modelscope - INFO - epoch [1][1930/4953]\tlr: 7.093e-05, memory: 9058, loss: 1.6135\n", + "2023-07-02 18:22:55,346 - modelscope - INFO - epoch [1][1935/4953]\tlr: 7.080e-05, memory: 9058, loss: 1.3820\n", + "2023-07-02 18:23:00,407 - modelscope - INFO - epoch [1][1940/4953]\tlr: 7.066e-05, memory: 9058, loss: 1.3170\n", + "2023-07-02 18:23:07,089 - modelscope - INFO - epoch [1][1945/4953]\tlr: 7.053e-05, memory: 9058, loss: 1.5059\n", + "2023-07-02 18:23:14,519 - modelscope - INFO - epoch [1][1950/4953]\tlr: 7.039e-05, memory: 9058, loss: 1.1481\n", + "2023-07-02 18:23:20,167 - modelscope - INFO - epoch [1][1955/4953]\tlr: 7.026e-05, memory: 9058, loss: 1.5484\n", + "2023-07-02 18:23:26,522 - modelscope - INFO - epoch [1][1960/4953]\tlr: 7.012e-05, memory: 9058, loss: 1.5056\n", + "2023-07-02 18:23:31,990 - modelscope - INFO - epoch [1][1965/4953]\tlr: 6.999e-05, memory: 9058, loss: 0.8258\n", + "2023-07-02 18:23:36,765 - modelscope - INFO - epoch [1][1970/4953]\tlr: 6.985e-05, memory: 9058, loss: 2.1605\n", + "2023-07-02 18:23:44,015 - modelscope - INFO - epoch [1][1975/4953]\tlr: 6.972e-05, memory: 9058, loss: 0.5347\n", + "2023-07-02 18:23:50,763 - modelscope - INFO - epoch [1][1980/4953]\tlr: 6.958e-05, memory: 9058, loss: 0.5833\n", + "2023-07-02 18:23:56,081 - modelscope - INFO - epoch [1][1985/4953]\tlr: 6.945e-05, memory: 9058, loss: 1.3211\n", + "2023-07-02 18:24:02,890 - modelscope - INFO - epoch [1][1990/4953]\tlr: 6.931e-05, memory: 9058, loss: 0.6614\n", + "2023-07-02 18:24:11,102 - modelscope - INFO - epoch [1][1995/4953]\tlr: 6.917e-05, memory: 9058, loss: 1.0019\n", + "2023-07-02 18:24:15,188 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:26:31,178 - modelscope - INFO - Saving checkpoint at 2000 iter\n", + "2023-07-02 18:26:31,219 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter1800_acc0.79673832654953\n", + "2023-07-02 18:26:31,223 - modelscope - INFO - Saving checkpoint at 2000 iter\n", + "2023-07-02 18:26:31,262 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_1800\n", + "2023-07-02 18:26:31,267 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9058, evaluation/acc: 0.8048, evaluation/loss: 1.3532, loss: 2.3406\n", + "2023-07-02 18:26:36,725 - modelscope - INFO - epoch [1][2005/4953]\tlr: 6.890e-05, memory: 9058, loss: 1.7643\n", + "2023-07-02 18:26:43,719 - modelscope - INFO - epoch [1][2010/4953]\tlr: 6.876e-05, memory: 9058, loss: 1.3211\n", + "2023-07-02 18:26:50,532 - modelscope - INFO - epoch [1][2015/4953]\tlr: 6.863e-05, memory: 9058, loss: 1.0998\n", + "2023-07-02 18:26:55,084 - modelscope - INFO - epoch [1][2020/4953]\tlr: 6.849e-05, memory: 9058, loss: 1.0711\n", + "2023-07-02 18:27:01,229 - modelscope - INFO - epoch [1][2025/4953]\tlr: 6.835e-05, memory: 9058, loss: 0.9915\n", + "2023-07-02 18:27:05,887 - modelscope - INFO - epoch [1][2030/4953]\tlr: 6.822e-05, memory: 9058, loss: 1.4650\n", + "2023-07-02 18:27:10,177 - modelscope - INFO - epoch [1][2035/4953]\tlr: 6.808e-05, memory: 9058, loss: 1.7047\n", + "2023-07-02 18:27:16,232 - modelscope - INFO - epoch [1][2040/4953]\tlr: 6.794e-05, memory: 9058, loss: 1.1574\n", + "2023-07-02 18:27:20,822 - modelscope - INFO - epoch [1][2045/4953]\tlr: 6.780e-05, memory: 9058, loss: 2.8094\n", + "2023-07-02 18:27:26,542 - modelscope - INFO - epoch [1][2050/4953]\tlr: 6.767e-05, memory: 9058, loss: 1.8707\n", + "2023-07-02 18:27:33,544 - modelscope - INFO - epoch [1][2055/4953]\tlr: 6.753e-05, memory: 9058, loss: 0.4879\n", + "2023-07-02 18:27:38,872 - modelscope - INFO - epoch [1][2060/4953]\tlr: 6.739e-05, memory: 9058, loss: 1.4332\n", + "2023-07-02 18:27:45,755 - modelscope - INFO - epoch [1][2065/4953]\tlr: 6.725e-05, memory: 9058, loss: 1.3403\n", + "2023-07-02 18:27:52,231 - modelscope - INFO - epoch [1][2070/4953]\tlr: 6.712e-05, memory: 9058, loss: 1.4531\n", + "2023-07-02 18:27:55,367 - modelscope - INFO - epoch [1][2075/4953]\tlr: 6.698e-05, memory: 9058, loss: 2.8781\n", + "2023-07-02 18:28:03,691 - modelscope - INFO - epoch [1][2080/4953]\tlr: 6.684e-05, memory: 9058, loss: 1.1735\n", + "2023-07-02 18:28:12,186 - modelscope - INFO - epoch [1][2085/4953]\tlr: 6.670e-05, memory: 9058, loss: 0.9088\n", + "2023-07-02 18:28:18,486 - modelscope - INFO - epoch [1][2090/4953]\tlr: 6.656e-05, memory: 9058, loss: 0.4293\n", + "2023-07-02 18:28:24,461 - modelscope - INFO - epoch [1][2095/4953]\tlr: 6.642e-05, memory: 9058, loss: 2.8336\n", + "2023-07-02 18:28:31,009 - modelscope - INFO - epoch [1][2100/4953]\tlr: 6.628e-05, memory: 9058, loss: 0.6750\n", + "2023-07-02 18:28:35,682 - modelscope - INFO - epoch [1][2105/4953]\tlr: 6.614e-05, memory: 9058, loss: 1.2004\n", + "2023-07-02 18:28:42,815 - modelscope - INFO - epoch [1][2110/4953]\tlr: 6.601e-05, memory: 9058, loss: 0.7390\n", + "2023-07-02 18:28:48,536 - modelscope - INFO - epoch [1][2115/4953]\tlr: 6.587e-05, memory: 9058, loss: 1.2892\n", + "2023-07-02 18:28:54,885 - modelscope - INFO - epoch [1][2120/4953]\tlr: 6.573e-05, memory: 9058, loss: 1.1596\n", + "2023-07-02 18:29:01,644 - modelscope - INFO - epoch [1][2125/4953]\tlr: 6.559e-05, memory: 9058, loss: 1.2383\n", + "2023-07-02 18:29:06,513 - modelscope - INFO - epoch [1][2130/4953]\tlr: 6.545e-05, memory: 9058, loss: 1.6500\n", + "2023-07-02 18:29:12,125 - modelscope - INFO - epoch [1][2135/4953]\tlr: 6.531e-05, memory: 9058, loss: 1.4234\n", + "2023-07-02 18:29:16,930 - modelscope - INFO - epoch [1][2140/4953]\tlr: 6.517e-05, memory: 9058, loss: 0.9209\n", + "2023-07-02 18:29:23,051 - modelscope - INFO - epoch [1][2145/4953]\tlr: 6.503e-05, memory: 9058, loss: 1.3340\n", + "2023-07-02 18:29:26,259 - modelscope - INFO - epoch [1][2150/4953]\tlr: 6.489e-05, memory: 9058, loss: 2.2531\n", + "2023-07-02 18:29:30,151 - modelscope - INFO - epoch [1][2155/4953]\tlr: 6.475e-05, memory: 9058, loss: 2.4398\n", + "2023-07-02 18:29:35,984 - modelscope - INFO - epoch [1][2160/4953]\tlr: 6.461e-05, memory: 9058, loss: 1.2609\n", + "2023-07-02 18:29:42,072 - modelscope - INFO - epoch [1][2165/4953]\tlr: 6.447e-05, memory: 9058, loss: 1.3589\n", + "2023-07-02 18:29:47,131 - modelscope - INFO - epoch [1][2170/4953]\tlr: 6.433e-05, memory: 9058, loss: 1.9894\n", + "2023-07-02 18:29:52,463 - modelscope - INFO - epoch [1][2175/4953]\tlr: 6.419e-05, memory: 9058, loss: 1.4546\n", + "2023-07-02 18:29:56,467 - modelscope - INFO - epoch [1][2180/4953]\tlr: 6.405e-05, memory: 9058, loss: 2.2633\n", + "2023-07-02 18:30:00,810 - modelscope - INFO - epoch [1][2185/4953]\tlr: 6.391e-05, memory: 9058, loss: 1.4179\n", + "2023-07-02 18:30:04,745 - modelscope - INFO - epoch [1][2190/4953]\tlr: 6.377e-05, memory: 9058, loss: 1.1947\n", + "2023-07-02 18:30:10,179 - modelscope - INFO - epoch [1][2195/4953]\tlr: 6.363e-05, memory: 9058, loss: 1.5030\n", + "2023-07-02 18:30:16,533 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:16<00:00, 2.04it/s]\n", + "2023-07-02 18:32:32,577 - modelscope - INFO - Saving checkpoint at 2200 iter\n", + "2023-07-02 18:32:32,617 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter2000_acc0.8048229217529297\n", + "2023-07-02 18:32:32,621 - modelscope - INFO - Saving checkpoint at 2200 iter\n", + "2023-07-02 18:32:32,661 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_2000\n", + "2023-07-02 18:32:32,665 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9058, evaluation/acc: 0.8064, evaluation/loss: 1.3193, loss: 0.8660\n", + "2023-07-02 18:32:38,756 - modelscope - INFO - epoch [1][2205/4953]\tlr: 6.334e-05, memory: 9058, loss: 1.2521\n", + "2023-07-02 18:32:45,468 - modelscope - INFO - epoch [1][2210/4953]\tlr: 6.320e-05, memory: 9058, loss: 1.0652\n", + "2023-07-02 18:32:51,626 - modelscope - INFO - epoch [1][2215/4953]\tlr: 6.306e-05, memory: 9058, loss: 0.8250\n", + "2023-07-02 18:32:56,742 - modelscope - INFO - epoch [1][2220/4953]\tlr: 6.292e-05, memory: 9058, loss: 1.2680\n", + "2023-07-02 18:33:02,927 - modelscope - INFO - epoch [1][2225/4953]\tlr: 6.278e-05, memory: 9058, loss: 1.5531\n", + "2023-07-02 18:33:08,196 - modelscope - INFO - epoch [1][2230/4953]\tlr: 6.264e-05, memory: 9058, loss: 1.5766\n", + "2023-07-02 18:33:14,926 - modelscope - INFO - epoch [1][2235/4953]\tlr: 6.250e-05, memory: 9058, loss: 1.6031\n", + "2023-07-02 18:33:19,152 - modelscope - INFO - epoch [1][2240/4953]\tlr: 6.236e-05, memory: 9058, loss: 1.8438\n", + "2023-07-02 18:33:26,986 - modelscope - INFO - epoch [1][2245/4953]\tlr: 6.221e-05, memory: 9058, loss: 1.0715\n", + "2023-07-02 18:33:34,062 - modelscope - INFO - epoch [1][2250/4953]\tlr: 6.207e-05, memory: 9058, loss: 1.3094\n", + "2023-07-02 18:33:40,767 - modelscope - INFO - epoch [1][2255/4953]\tlr: 6.193e-05, memory: 9058, loss: 0.5586\n", + "2023-07-02 18:33:45,996 - modelscope - INFO - epoch [1][2260/4953]\tlr: 6.179e-05, memory: 9058, loss: 1.0727\n", + "2023-07-02 18:33:50,926 - modelscope - INFO - epoch [1][2265/4953]\tlr: 6.165e-05, memory: 9058, loss: 0.5758\n", + "2023-07-02 18:33:54,762 - modelscope - INFO - epoch [1][2270/4953]\tlr: 6.151e-05, memory: 9058, loss: 1.1336\n", + "2023-07-02 18:34:00,210 - modelscope - INFO - epoch [1][2275/4953]\tlr: 6.136e-05, memory: 9058, loss: 1.0373\n", + "2023-07-02 18:34:08,272 - modelscope - INFO - epoch [1][2280/4953]\tlr: 6.122e-05, memory: 9058, loss: 0.7815\n", + "2023-07-02 18:34:14,309 - modelscope - INFO - epoch [1][2285/4953]\tlr: 6.108e-05, memory: 9058, loss: 1.4531\n", + "2023-07-02 18:34:21,626 - modelscope - INFO - epoch [1][2290/4953]\tlr: 6.094e-05, memory: 9058, loss: 1.6297\n", + "2023-07-02 18:34:28,588 - modelscope - INFO - epoch [1][2295/4953]\tlr: 6.080e-05, memory: 9082, loss: 1.6783\n", + "2023-07-02 18:34:33,419 - modelscope - INFO - epoch [1][2300/4953]\tlr: 6.065e-05, memory: 9082, loss: 2.0078\n", + "2023-07-02 18:34:38,966 - modelscope - INFO - epoch [1][2305/4953]\tlr: 6.051e-05, memory: 9082, loss: 1.6065\n", + "2023-07-02 18:34:44,320 - modelscope - INFO - epoch [1][2310/4953]\tlr: 6.037e-05, memory: 9082, loss: 1.6664\n", + "2023-07-02 18:34:49,557 - modelscope - INFO - epoch [1][2315/4953]\tlr: 6.023e-05, memory: 9082, loss: 2.1622\n", + "2023-07-02 18:34:54,691 - modelscope - INFO - epoch [1][2320/4953]\tlr: 6.008e-05, memory: 9082, loss: 2.2738\n", + "2023-07-02 18:35:02,067 - modelscope - INFO - epoch [1][2325/4953]\tlr: 5.994e-05, memory: 9082, loss: 0.6338\n", + "2023-07-02 18:35:07,658 - modelscope - INFO - epoch [1][2330/4953]\tlr: 5.980e-05, memory: 9082, loss: 0.9046\n", + "2023-07-02 18:35:13,966 - modelscope - INFO - epoch [1][2335/4953]\tlr: 5.966e-05, memory: 9082, loss: 1.2388\n", + "2023-07-02 18:35:19,741 - modelscope - INFO - epoch [1][2340/4953]\tlr: 5.951e-05, memory: 9082, loss: 0.7371\n", + "2023-07-02 18:35:25,904 - modelscope - INFO - epoch [1][2345/4953]\tlr: 5.937e-05, memory: 9082, loss: 1.4103\n", + "2023-07-02 18:35:31,382 - modelscope - INFO - epoch [1][2350/4953]\tlr: 5.923e-05, memory: 9082, loss: 1.4088\n", + "2023-07-02 18:35:36,193 - modelscope - INFO - epoch [1][2355/4953]\tlr: 5.909e-05, memory: 9082, loss: 2.0184\n", + "2023-07-02 18:35:40,781 - modelscope - INFO - epoch [1][2360/4953]\tlr: 5.894e-05, memory: 9082, loss: 1.1237\n", + "2023-07-02 18:35:45,133 - modelscope - INFO - epoch [1][2365/4953]\tlr: 5.880e-05, memory: 9082, loss: 2.1938\n", + "2023-07-02 18:35:51,029 - modelscope - INFO - epoch [1][2370/4953]\tlr: 5.866e-05, memory: 9082, loss: 0.9563\n", + "2023-07-02 18:35:57,943 - modelscope - INFO - epoch [1][2375/4953]\tlr: 5.852e-05, memory: 9082, loss: 1.3258\n", + "2023-07-02 18:36:05,016 - modelscope - INFO - epoch [1][2380/4953]\tlr: 5.837e-05, memory: 9082, loss: 1.2687\n", + "2023-07-02 18:36:09,977 - modelscope - INFO - epoch [1][2385/4953]\tlr: 5.823e-05, memory: 9082, loss: 1.2655\n", + "2023-07-02 18:36:16,229 - modelscope - INFO - epoch [1][2390/4953]\tlr: 5.809e-05, memory: 9082, loss: 0.9164\n", + "2023-07-02 18:36:21,471 - modelscope - INFO - epoch [1][2395/4953]\tlr: 5.794e-05, memory: 9082, loss: 1.6281\n", + "2023-07-02 18:36:27,959 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:38:43,433 - modelscope - INFO - Saving checkpoint at 2400 iter\n", + "2023-07-02 18:38:43,474 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter2200_acc0.8063529133796692\n", + "2023-07-02 18:38:43,478 - modelscope - INFO - Saving checkpoint at 2400 iter\n", + "2023-07-02 18:38:43,517 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_2200\n", + "2023-07-02 18:38:43,521 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8076, evaluation/loss: 1.3023, loss: 0.6604\n", + "2023-07-02 18:38:48,050 - modelscope - INFO - epoch [1][2405/4953]\tlr: 5.766e-05, memory: 9082, loss: 1.8258\n", + "2023-07-02 18:38:54,650 - modelscope - INFO - epoch [1][2410/4953]\tlr: 5.751e-05, memory: 9082, loss: 1.3132\n", + "2023-07-02 18:38:59,846 - modelscope - INFO - epoch [1][2415/4953]\tlr: 5.737e-05, memory: 9082, loss: 1.6910\n", + "2023-07-02 18:39:07,443 - modelscope - INFO - epoch [1][2420/4953]\tlr: 5.723e-05, memory: 9082, loss: 1.4445\n", + "2023-07-02 18:39:15,603 - modelscope - INFO - epoch [1][2425/4953]\tlr: 5.708e-05, memory: 9082, loss: 0.9867\n", + "2023-07-02 18:39:21,112 - modelscope - INFO - epoch [1][2430/4953]\tlr: 5.694e-05, memory: 9082, loss: 1.5023\n", + "2023-07-02 18:39:26,278 - modelscope - INFO - epoch [1][2435/4953]\tlr: 5.680e-05, memory: 9082, loss: 1.5297\n", + "2023-07-02 18:39:32,189 - modelscope - INFO - epoch [1][2440/4953]\tlr: 5.666e-05, memory: 9082, loss: 1.2663\n", + "2023-07-02 18:39:39,288 - modelscope - INFO - epoch [1][2445/4953]\tlr: 5.651e-05, memory: 9082, loss: 1.1214\n", + "2023-07-02 18:39:45,604 - modelscope - INFO - epoch [1][2450/4953]\tlr: 5.637e-05, memory: 9082, loss: 0.7744\n", + "2023-07-02 18:39:50,026 - modelscope - INFO - epoch [1][2455/4953]\tlr: 5.623e-05, memory: 9082, loss: 1.3865\n", + "2023-07-02 18:39:57,039 - modelscope - INFO - epoch [1][2460/4953]\tlr: 5.608e-05, memory: 9082, loss: 0.5821\n", + "2023-07-02 18:40:04,905 - modelscope - INFO - epoch [1][2465/4953]\tlr: 5.594e-05, memory: 9082, loss: 1.6459\n", + "2023-07-02 18:40:12,277 - modelscope - INFO - epoch [1][2470/4953]\tlr: 5.580e-05, memory: 9082, loss: 1.5098\n", + "2023-07-02 18:40:21,189 - modelscope - INFO - epoch [1][2475/4953]\tlr: 5.565e-05, memory: 9082, loss: 0.7347\n", + "2023-07-02 18:40:25,832 - modelscope - INFO - epoch [1][2480/4953]\tlr: 5.551e-05, memory: 9082, loss: 1.9617\n", + "2023-07-02 18:40:31,034 - modelscope - INFO - epoch [1][2485/4953]\tlr: 5.537e-05, memory: 9082, loss: 1.3300\n", + "2023-07-02 18:40:35,486 - modelscope - INFO - epoch [1][2490/4953]\tlr: 5.522e-05, memory: 9082, loss: 1.7078\n", + "2023-07-02 18:40:43,211 - modelscope - INFO - epoch [1][2495/4953]\tlr: 5.508e-05, memory: 9082, loss: 1.5921\n", + "2023-07-02 18:40:48,454 - modelscope - INFO - epoch [1][2500/4953]\tlr: 5.494e-05, memory: 9082, loss: 1.9926\n", + "2023-07-02 18:40:53,713 - modelscope - INFO - epoch [1][2505/4953]\tlr: 5.479e-05, memory: 9082, loss: 1.1594\n", + "2023-07-02 18:40:58,439 - modelscope - INFO - epoch [1][2510/4953]\tlr: 5.465e-05, memory: 9082, loss: 1.1770\n", + "2023-07-02 18:41:04,372 - modelscope - INFO - epoch [1][2515/4953]\tlr: 5.451e-05, memory: 9082, loss: 1.6250\n", + "2023-07-02 18:41:09,182 - modelscope - INFO - epoch [1][2520/4953]\tlr: 5.436e-05, memory: 9082, loss: 1.7578\n", + "2023-07-02 18:41:14,114 - modelscope - INFO - epoch [1][2525/4953]\tlr: 5.422e-05, memory: 9082, loss: 2.3328\n", + "2023-07-02 18:41:20,090 - modelscope - INFO - epoch [1][2530/4953]\tlr: 5.408e-05, memory: 9082, loss: 2.0059\n", + "2023-07-02 18:41:24,643 - modelscope - INFO - epoch [1][2535/4953]\tlr: 5.393e-05, memory: 9082, loss: 1.9216\n", + "2023-07-02 18:41:30,805 - modelscope - INFO - epoch [1][2540/4953]\tlr: 5.379e-05, memory: 9082, loss: 0.7870\n", + "2023-07-02 18:41:35,276 - modelscope - INFO - epoch [1][2545/4953]\tlr: 5.365e-05, memory: 9082, loss: 1.8344\n", + "2023-07-02 18:41:40,107 - modelscope - INFO - epoch [1][2550/4953]\tlr: 5.350e-05, memory: 9082, loss: 1.0918\n", + "2023-07-02 18:41:45,127 - modelscope - INFO - epoch [1][2555/4953]\tlr: 5.336e-05, memory: 9082, loss: 0.8277\n", + "2023-07-02 18:41:49,439 - modelscope - INFO - epoch [1][2560/4953]\tlr: 5.322e-05, memory: 9082, loss: 1.3539\n", + "2023-07-02 18:41:54,796 - modelscope - INFO - epoch [1][2565/4953]\tlr: 5.307e-05, memory: 9082, loss: 1.4898\n", + "2023-07-02 18:41:59,982 - modelscope - INFO - epoch [1][2570/4953]\tlr: 5.293e-05, memory: 9082, loss: 1.4383\n", + "2023-07-02 18:42:06,280 - modelscope - INFO - epoch [1][2575/4953]\tlr: 5.279e-05, memory: 9082, loss: 1.3823\n", + "2023-07-02 18:42:11,765 - modelscope - INFO - epoch [1][2580/4953]\tlr: 5.264e-05, memory: 9082, loss: 1.6961\n", + "2023-07-02 18:42:18,475 - modelscope - INFO - epoch [1][2585/4953]\tlr: 5.250e-05, memory: 9082, loss: 1.7096\n", + "2023-07-02 18:42:25,377 - modelscope - INFO - epoch [1][2590/4953]\tlr: 5.236e-05, memory: 9082, loss: 0.2711\n", + "2023-07-02 18:42:31,462 - modelscope - INFO - epoch [1][2595/4953]\tlr: 5.222e-05, memory: 9082, loss: 1.8032\n", + "2023-07-02 18:42:37,270 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:44:53,170 - modelscope - INFO - Saving checkpoint at 2600 iter\n", + "2023-07-02 18:44:53,210 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter2400_acc0.8075699210166931\n", + "2023-07-02 18:44:53,214 - modelscope - INFO - Saving checkpoint at 2600 iter\n", + "2023-07-02 18:44:53,253 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_2400\n", + "2023-07-02 18:44:53,258 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8082, evaluation/loss: 1.3051, loss: 1.3200\n", + "2023-07-02 18:44:56,746 - modelscope - INFO - epoch [1][2605/4953]\tlr: 5.193e-05, memory: 9082, loss: 2.4016\n", + "2023-07-02 18:45:02,237 - modelscope - INFO - epoch [1][2610/4953]\tlr: 5.179e-05, memory: 9082, loss: 1.4620\n", + "2023-07-02 18:45:08,746 - modelscope - INFO - epoch [1][2615/4953]\tlr: 5.164e-05, memory: 9082, loss: 1.0342\n", + "2023-07-02 18:45:15,827 - modelscope - INFO - epoch [1][2620/4953]\tlr: 5.150e-05, memory: 9082, loss: 1.2133\n", + "2023-07-02 18:45:20,967 - modelscope - INFO - epoch [1][2625/4953]\tlr: 5.136e-05, memory: 9082, loss: 1.1039\n", + "2023-07-02 18:45:28,010 - modelscope - INFO - epoch [1][2630/4953]\tlr: 5.122e-05, memory: 9082, loss: 2.2398\n", + "2023-07-02 18:45:33,346 - modelscope - INFO - epoch [1][2635/4953]\tlr: 5.107e-05, memory: 9082, loss: 1.0719\n", + "2023-07-02 18:45:38,505 - modelscope - INFO - epoch [1][2640/4953]\tlr: 5.093e-05, memory: 9082, loss: 2.1718\n", + "2023-07-02 18:45:46,286 - modelscope - INFO - epoch [1][2645/4953]\tlr: 5.079e-05, memory: 9082, loss: 1.4109\n", + "2023-07-02 18:45:50,359 - modelscope - INFO - epoch [1][2650/4953]\tlr: 5.065e-05, memory: 9082, loss: 2.7281\n", + "2023-07-02 18:45:54,451 - modelscope - INFO - epoch [1][2655/4953]\tlr: 5.050e-05, memory: 9082, loss: 1.4117\n", + "2023-07-02 18:46:01,191 - modelscope - INFO - epoch [1][2660/4953]\tlr: 5.036e-05, memory: 9082, loss: 1.0565\n", + "2023-07-02 18:46:06,247 - modelscope - INFO - epoch [1][2665/4953]\tlr: 5.022e-05, memory: 9082, loss: 0.9540\n", + "2023-07-02 18:46:13,076 - modelscope - INFO - epoch [1][2670/4953]\tlr: 5.008e-05, memory: 9082, loss: 1.5935\n", + "2023-07-02 18:46:18,638 - modelscope - INFO - epoch [1][2675/4953]\tlr: 4.993e-05, memory: 9082, loss: 2.1958\n", + "2023-07-02 18:46:23,885 - modelscope - INFO - epoch [1][2680/4953]\tlr: 4.979e-05, memory: 9082, loss: 1.6164\n", + "2023-07-02 18:46:31,178 - modelscope - INFO - epoch [1][2685/4953]\tlr: 4.965e-05, memory: 9082, loss: 0.9352\n", + "2023-07-02 18:46:38,014 - modelscope - INFO - epoch [1][2690/4953]\tlr: 4.951e-05, memory: 9082, loss: 1.4887\n", + "2023-07-02 18:46:41,545 - modelscope - INFO - epoch [1][2695/4953]\tlr: 4.936e-05, memory: 9082, loss: 1.2578\n", + "2023-07-02 18:46:46,458 - modelscope - INFO - epoch [1][2700/4953]\tlr: 4.922e-05, memory: 9082, loss: 1.1711\n", + "2023-07-02 18:46:53,227 - modelscope - INFO - epoch [1][2705/4953]\tlr: 4.908e-05, memory: 9082, loss: 1.3223\n", + "2023-07-02 18:46:59,578 - modelscope - INFO - epoch [1][2710/4953]\tlr: 4.894e-05, memory: 9082, loss: 1.4570\n", + "2023-07-02 18:47:04,896 - modelscope - INFO - epoch [1][2715/4953]\tlr: 4.880e-05, memory: 9082, loss: 1.0868\n", + "2023-07-02 18:47:10,404 - modelscope - INFO - epoch [1][2720/4953]\tlr: 4.865e-05, memory: 9082, loss: 1.5884\n", + "2023-07-02 18:47:16,038 - modelscope - INFO - epoch [1][2725/4953]\tlr: 4.851e-05, memory: 9082, loss: 1.0243\n", + "2023-07-02 18:47:22,354 - modelscope - INFO - epoch [1][2730/4953]\tlr: 4.837e-05, memory: 9082, loss: 1.4346\n", + "2023-07-02 18:47:29,290 - modelscope - INFO - epoch [1][2735/4953]\tlr: 4.823e-05, memory: 9082, loss: 0.9521\n", + "2023-07-02 18:47:37,813 - modelscope - INFO - epoch [1][2740/4953]\tlr: 4.809e-05, memory: 9082, loss: 0.7296\n", + "2023-07-02 18:47:40,908 - modelscope - INFO - epoch [1][2745/4953]\tlr: 4.795e-05, memory: 9082, loss: 1.5844\n", + "2023-07-02 18:47:46,334 - modelscope - INFO - epoch [1][2750/4953]\tlr: 4.781e-05, memory: 9082, loss: 1.5023\n", + "2023-07-02 18:47:51,224 - modelscope - INFO - epoch [1][2755/4953]\tlr: 4.766e-05, memory: 9082, loss: 0.9710\n", + "2023-07-02 18:47:58,431 - modelscope - INFO - epoch [1][2760/4953]\tlr: 4.752e-05, memory: 9082, loss: 1.1539\n", + "2023-07-02 18:48:04,898 - modelscope - INFO - epoch [1][2765/4953]\tlr: 4.738e-05, memory: 9082, loss: 1.6984\n", + "2023-07-02 18:48:10,316 - modelscope - INFO - epoch [1][2770/4953]\tlr: 4.724e-05, memory: 9082, loss: 1.5420\n", + "2023-07-02 18:48:16,843 - modelscope - INFO - epoch [1][2775/4953]\tlr: 4.710e-05, memory: 9082, loss: 1.2396\n", + "2023-07-02 18:48:22,406 - modelscope - INFO - epoch [1][2780/4953]\tlr: 4.696e-05, memory: 9082, loss: 1.8611\n", + "2023-07-02 18:48:28,234 - modelscope - INFO - epoch [1][2785/4953]\tlr: 4.682e-05, memory: 9082, loss: 1.2051\n", + "2023-07-02 18:48:35,175 - modelscope - INFO - epoch [1][2790/4953]\tlr: 4.668e-05, memory: 9082, loss: 0.9440\n", + "2023-07-02 18:48:40,689 - modelscope - INFO - epoch [1][2795/4953]\tlr: 4.654e-05, memory: 9082, loss: 1.5422\n", + "2023-07-02 18:48:46,340 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:51:02,313 - modelscope - INFO - Saving checkpoint at 2800 iter\n", + "2023-07-02 18:51:02,352 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_2600\n", + "2023-07-02 18:51:02,357 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8080, evaluation/loss: 1.2874, loss: 0.3999\n", + "2023-07-02 18:51:09,389 - modelscope - INFO - epoch [1][2805/4953]\tlr: 4.625e-05, memory: 9082, loss: 0.9511\n", + "2023-07-02 18:51:14,406 - modelscope - INFO - epoch [1][2810/4953]\tlr: 4.611e-05, memory: 9082, loss: 0.9344\n", + "2023-07-02 18:51:19,383 - modelscope - INFO - epoch [1][2815/4953]\tlr: 4.597e-05, memory: 9082, loss: 1.5798\n", + "2023-07-02 18:51:26,100 - modelscope - INFO - epoch [1][2820/4953]\tlr: 4.583e-05, memory: 9082, loss: 1.1518\n", + "2023-07-02 18:51:31,560 - modelscope - INFO - epoch [1][2825/4953]\tlr: 4.569e-05, memory: 9082, loss: 1.9438\n", + "2023-07-02 18:51:37,772 - modelscope - INFO - epoch [1][2830/4953]\tlr: 4.555e-05, memory: 9082, loss: 1.2336\n", + "2023-07-02 18:51:45,037 - modelscope - INFO - epoch [1][2835/4953]\tlr: 4.541e-05, memory: 9082, loss: 0.4342\n", + "2023-07-02 18:51:50,379 - modelscope - INFO - epoch [1][2840/4953]\tlr: 4.527e-05, memory: 9082, loss: 1.5258\n", + "2023-07-02 18:51:55,219 - modelscope - INFO - epoch [1][2845/4953]\tlr: 4.513e-05, memory: 9082, loss: 1.3063\n", + "2023-07-02 18:52:00,648 - modelscope - INFO - epoch [1][2850/4953]\tlr: 4.499e-05, memory: 9082, loss: 1.0977\n", + "2023-07-02 18:52:05,123 - modelscope - INFO - epoch [1][2855/4953]\tlr: 4.486e-05, memory: 9082, loss: 1.2469\n", + "2023-07-02 18:52:10,542 - modelscope - INFO - epoch [1][2860/4953]\tlr: 4.472e-05, memory: 9082, loss: 1.0984\n", + "2023-07-02 18:52:17,747 - modelscope - INFO - epoch [1][2865/4953]\tlr: 4.458e-05, memory: 9082, loss: 0.7611\n", + "2023-07-02 18:52:23,635 - modelscope - INFO - epoch [1][2870/4953]\tlr: 4.444e-05, memory: 9082, loss: 1.9703\n", + "2023-07-02 18:52:29,494 - modelscope - INFO - epoch [1][2875/4953]\tlr: 4.430e-05, memory: 9082, loss: 1.2950\n", + "2023-07-02 18:52:35,837 - modelscope - INFO - epoch [1][2880/4953]\tlr: 4.416e-05, memory: 9082, loss: 0.8969\n", + "2023-07-02 18:52:40,187 - modelscope - INFO - epoch [1][2885/4953]\tlr: 4.402e-05, memory: 9082, loss: 2.0484\n", + "2023-07-02 18:52:46,608 - modelscope - INFO - epoch [1][2890/4953]\tlr: 4.388e-05, memory: 9082, loss: 1.3309\n", + "2023-07-02 18:52:52,971 - modelscope - INFO - epoch [1][2895/4953]\tlr: 4.374e-05, memory: 9082, loss: 2.1859\n", + "2023-07-02 18:52:57,418 - modelscope - INFO - epoch [1][2900/4953]\tlr: 4.360e-05, memory: 9082, loss: 1.4730\n", + "2023-07-02 18:53:02,915 - modelscope - INFO - epoch [1][2905/4953]\tlr: 4.347e-05, memory: 9082, loss: 1.1398\n", + "2023-07-02 18:53:08,380 - modelscope - INFO - epoch [1][2910/4953]\tlr: 4.333e-05, memory: 9082, loss: 1.1520\n", + "2023-07-02 18:53:14,293 - modelscope - INFO - epoch [1][2915/4953]\tlr: 4.319e-05, memory: 9082, loss: 1.4763\n", + "2023-07-02 18:53:19,782 - modelscope - INFO - epoch [1][2920/4953]\tlr: 4.305e-05, memory: 9082, loss: 1.3924\n", + "2023-07-02 18:53:24,564 - modelscope - INFO - epoch [1][2925/4953]\tlr: 4.291e-05, memory: 9082, loss: 1.1281\n", + "2023-07-02 18:53:28,764 - modelscope - INFO - epoch [1][2930/4953]\tlr: 4.278e-05, memory: 9082, loss: 1.3961\n", + "2023-07-02 18:53:34,633 - modelscope - INFO - epoch [1][2935/4953]\tlr: 4.264e-05, memory: 9082, loss: 1.1989\n", + "2023-07-02 18:53:40,740 - modelscope - INFO - epoch [1][2940/4953]\tlr: 4.250e-05, memory: 9082, loss: 1.4141\n", + "2023-07-02 18:53:45,991 - modelscope - INFO - epoch [1][2945/4953]\tlr: 4.236e-05, memory: 9082, loss: 1.8516\n", + "2023-07-02 18:53:53,446 - modelscope - INFO - epoch [1][2950/4953]\tlr: 4.223e-05, memory: 9082, loss: 1.0945\n", + "2023-07-02 18:53:57,916 - modelscope - INFO - epoch [1][2955/4953]\tlr: 4.209e-05, memory: 9082, loss: 2.4191\n", + "2023-07-02 18:54:03,814 - modelscope - INFO - epoch [1][2960/4953]\tlr: 4.195e-05, memory: 9082, loss: 1.0555\n", + "2023-07-02 18:54:11,481 - modelscope - INFO - epoch [1][2965/4953]\tlr: 4.181e-05, memory: 9082, loss: 1.0359\n", + "2023-07-02 18:54:18,062 - modelscope - INFO - epoch [1][2970/4953]\tlr: 4.168e-05, memory: 9082, loss: 0.5380\n", + "2023-07-02 18:54:23,157 - modelscope - INFO - epoch [1][2975/4953]\tlr: 4.154e-05, memory: 9082, loss: 1.7539\n", + "2023-07-02 18:54:27,560 - modelscope - INFO - epoch [1][2980/4953]\tlr: 4.140e-05, memory: 9082, loss: 1.5100\n", + "2023-07-02 18:54:32,977 - modelscope - INFO - epoch [1][2985/4953]\tlr: 4.127e-05, memory: 9082, loss: 1.5968\n", + "2023-07-02 18:54:38,633 - modelscope - INFO - epoch [1][2990/4953]\tlr: 4.113e-05, memory: 9082, loss: 1.0911\n", + "2023-07-02 18:54:46,186 - modelscope - INFO - epoch [1][2995/4953]\tlr: 4.100e-05, memory: 9082, loss: 0.9789\n", + "2023-07-02 18:54:52,074 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 18:57:08,067 - modelscope - INFO - Saving checkpoint at 3000 iter\n", + "2023-07-02 18:57:08,107 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter2600_acc0.8082306385040283\n", + "2023-07-02 18:57:08,111 - modelscope - INFO - Saving checkpoint at 3000 iter\n", + "2023-07-02 18:57:08,150 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_2800\n", + "2023-07-02 18:57:08,155 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8084, evaluation/loss: 1.2728, loss: 0.7777\n", + "2023-07-02 18:57:14,568 - modelscope - INFO - epoch [1][3005/4953]\tlr: 4.072e-05, memory: 9082, loss: 1.7105\n", + "2023-07-02 18:57:20,305 - modelscope - INFO - epoch [1][3010/4953]\tlr: 4.059e-05, memory: 9082, loss: 0.9040\n", + "2023-07-02 18:57:25,518 - modelscope - INFO - epoch [1][3015/4953]\tlr: 4.045e-05, memory: 9082, loss: 1.3430\n", + "2023-07-02 18:57:30,679 - modelscope - INFO - epoch [1][3020/4953]\tlr: 4.032e-05, memory: 9082, loss: 1.9619\n", + "2023-07-02 18:57:36,997 - modelscope - INFO - epoch [1][3025/4953]\tlr: 4.018e-05, memory: 9082, loss: 0.9646\n", + "2023-07-02 18:57:42,949 - modelscope - INFO - epoch [1][3030/4953]\tlr: 4.005e-05, memory: 9082, loss: 0.8223\n", + "2023-07-02 18:57:47,568 - modelscope - INFO - epoch [1][3035/4953]\tlr: 3.991e-05, memory: 9082, loss: 1.9203\n", + "2023-07-02 18:57:53,111 - modelscope - INFO - epoch [1][3040/4953]\tlr: 3.978e-05, memory: 9082, loss: 1.0070\n", + "2023-07-02 18:57:59,474 - modelscope - INFO - epoch [1][3045/4953]\tlr: 3.964e-05, memory: 9082, loss: 1.2164\n", + "2023-07-02 18:58:04,237 - modelscope - INFO - epoch [1][3050/4953]\tlr: 3.951e-05, memory: 9082, loss: 1.6008\n", + "2023-07-02 18:58:09,687 - modelscope - INFO - epoch [1][3055/4953]\tlr: 3.937e-05, memory: 9082, loss: 2.0203\n", + "2023-07-02 18:58:14,949 - modelscope - INFO - epoch [1][3060/4953]\tlr: 3.924e-05, memory: 9082, loss: 1.4613\n", + "2023-07-02 18:58:21,818 - modelscope - INFO - epoch [1][3065/4953]\tlr: 3.911e-05, memory: 9082, loss: 1.2766\n", + "2023-07-02 18:58:28,251 - modelscope - INFO - epoch [1][3070/4953]\tlr: 3.897e-05, memory: 9082, loss: 1.2920\n", + "2023-07-02 18:58:34,440 - modelscope - INFO - epoch [1][3075/4953]\tlr: 3.884e-05, memory: 9082, loss: 1.1436\n", + "2023-07-02 18:58:41,344 - modelscope - INFO - epoch [1][3080/4953]\tlr: 3.870e-05, memory: 9082, loss: 1.6750\n", + "2023-07-02 18:58:47,507 - modelscope - INFO - epoch [1][3085/4953]\tlr: 3.857e-05, memory: 9082, loss: 1.4508\n", + "2023-07-02 18:58:53,152 - modelscope - INFO - epoch [1][3090/4953]\tlr: 3.844e-05, memory: 9082, loss: 1.1961\n", + "2023-07-02 18:58:57,615 - modelscope - INFO - epoch [1][3095/4953]\tlr: 3.830e-05, memory: 9082, loss: 2.0420\n", + "2023-07-02 18:59:04,675 - modelscope - INFO - epoch [1][3100/4953]\tlr: 3.817e-05, memory: 9082, loss: 0.3189\n", + "2023-07-02 18:59:09,594 - modelscope - INFO - epoch [1][3105/4953]\tlr: 3.804e-05, memory: 9082, loss: 1.5581\n", + "2023-07-02 18:59:16,591 - modelscope - INFO - epoch [1][3110/4953]\tlr: 3.791e-05, memory: 9082, loss: 0.9396\n", + "2023-07-02 18:59:23,334 - modelscope - INFO - epoch [1][3115/4953]\tlr: 3.777e-05, memory: 9082, loss: 0.6580\n", + "2023-07-02 18:59:28,047 - modelscope - INFO - epoch [1][3120/4953]\tlr: 3.764e-05, memory: 9082, loss: 1.4602\n", + "2023-07-02 18:59:31,315 - modelscope - INFO - epoch [1][3125/4953]\tlr: 3.751e-05, memory: 9082, loss: 1.3484\n", + "2023-07-02 18:59:36,121 - modelscope - INFO - epoch [1][3130/4953]\tlr: 3.738e-05, memory: 9082, loss: 2.1273\n", + "2023-07-02 18:59:44,336 - modelscope - INFO - epoch [1][3135/4953]\tlr: 3.725e-05, memory: 9082, loss: 0.8621\n", + "2023-07-02 18:59:49,884 - modelscope - INFO - epoch [1][3140/4953]\tlr: 3.712e-05, memory: 9082, loss: 1.0844\n", + "2023-07-02 18:59:52,597 - modelscope - INFO - epoch [1][3145/4953]\tlr: 3.698e-05, memory: 9082, loss: 1.5453\n", + "2023-07-02 18:59:59,243 - modelscope - INFO - epoch [1][3150/4953]\tlr: 3.685e-05, memory: 9082, loss: 1.1129\n", + "2023-07-02 19:00:04,220 - modelscope - INFO - epoch [1][3155/4953]\tlr: 3.672e-05, memory: 9082, loss: 1.1824\n", + "2023-07-02 19:00:11,762 - modelscope - INFO - epoch [1][3160/4953]\tlr: 3.659e-05, memory: 9082, loss: 0.5676\n", + "2023-07-02 19:00:18,630 - modelscope - INFO - epoch [1][3165/4953]\tlr: 3.646e-05, memory: 9082, loss: 0.9189\n", + "2023-07-02 19:00:23,483 - modelscope - INFO - epoch [1][3170/4953]\tlr: 3.633e-05, memory: 9082, loss: 1.0324\n", + "2023-07-02 19:00:27,164 - modelscope - INFO - epoch [1][3175/4953]\tlr: 3.620e-05, memory: 9082, loss: 1.2984\n", + "2023-07-02 19:00:32,041 - modelscope - INFO - epoch [1][3180/4953]\tlr: 3.607e-05, memory: 9082, loss: 1.6036\n", + "2023-07-02 19:00:37,245 - modelscope - INFO - epoch [1][3185/4953]\tlr: 3.594e-05, memory: 9082, loss: 1.3896\n", + "2023-07-02 19:00:44,493 - modelscope - INFO - epoch [1][3190/4953]\tlr: 3.581e-05, memory: 9082, loss: 1.1153\n", + "2023-07-02 19:00:49,874 - modelscope - INFO - epoch [1][3195/4953]\tlr: 3.568e-05, memory: 9082, loss: 1.2354\n", + "2023-07-02 19:00:55,061 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 19:03:10,730 - modelscope - INFO - Saving checkpoint at 3200 iter\n", + "2023-07-02 19:03:10,770 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter3000_acc0.8084218502044678\n", + "2023-07-02 19:03:10,774 - modelscope - INFO - Saving checkpoint at 3200 iter\n", + "2023-07-02 19:03:10,813 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_3000\n", + "2023-07-02 19:03:10,818 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8086, evaluation/loss: 1.2627, loss: 1.5492\n", + "2023-07-02 19:03:18,070 - modelscope - INFO - epoch [1][3205/4953]\tlr: 3.542e-05, memory: 9082, loss: 0.1662\n", + "2023-07-02 19:03:26,317 - modelscope - INFO - epoch [1][3210/4953]\tlr: 3.530e-05, memory: 9082, loss: 1.6430\n", + "2023-07-02 19:03:32,449 - modelscope - INFO - epoch [1][3215/4953]\tlr: 3.517e-05, memory: 9082, loss: 0.4798\n", + "2023-07-02 19:03:38,508 - modelscope - INFO - epoch [1][3220/4953]\tlr: 3.504e-05, memory: 9082, loss: 1.0096\n", + "2023-07-02 19:03:45,266 - modelscope - INFO - epoch [1][3225/4953]\tlr: 3.491e-05, memory: 9082, loss: 1.1305\n", + "2023-07-02 19:03:48,361 - modelscope - INFO - epoch [1][3230/4953]\tlr: 3.478e-05, memory: 9082, loss: 1.6721\n", + "2023-07-02 19:03:54,630 - modelscope - INFO - epoch [1][3235/4953]\tlr: 3.465e-05, memory: 9082, loss: 1.1138\n", + "2023-07-02 19:03:59,780 - modelscope - INFO - epoch [1][3240/4953]\tlr: 3.453e-05, memory: 9082, loss: 1.2146\n", + "2023-07-02 19:04:04,310 - modelscope - INFO - epoch [1][3245/4953]\tlr: 3.440e-05, memory: 9082, loss: 0.9602\n", + "2023-07-02 19:04:09,085 - modelscope - INFO - epoch [1][3250/4953]\tlr: 3.427e-05, memory: 9082, loss: 2.0369\n", + "2023-07-02 19:04:13,329 - modelscope - INFO - epoch [1][3255/4953]\tlr: 3.415e-05, memory: 9082, loss: 1.3604\n", + "2023-07-02 19:04:19,728 - modelscope - INFO - epoch [1][3260/4953]\tlr: 3.402e-05, memory: 9082, loss: 1.0500\n", + "2023-07-02 19:04:25,537 - modelscope - INFO - epoch [1][3265/4953]\tlr: 3.389e-05, memory: 9082, loss: 1.0730\n", + "2023-07-02 19:04:33,616 - modelscope - INFO - epoch [1][3270/4953]\tlr: 3.377e-05, memory: 9082, loss: 1.3219\n", + "2023-07-02 19:04:36,942 - modelscope - INFO - epoch [1][3275/4953]\tlr: 3.364e-05, memory: 9082, loss: 0.7494\n", + "2023-07-02 19:04:43,190 - modelscope - INFO - epoch [1][3280/4953]\tlr: 3.351e-05, memory: 9082, loss: 0.8293\n", + "2023-07-02 19:04:51,311 - modelscope - INFO - epoch [1][3285/4953]\tlr: 3.339e-05, memory: 9082, loss: 0.7475\n", + "2023-07-02 19:04:54,815 - modelscope - INFO - epoch [1][3290/4953]\tlr: 3.326e-05, memory: 9082, loss: 1.8000\n", + "2023-07-02 19:05:00,342 - modelscope - INFO - epoch [1][3295/4953]\tlr: 3.314e-05, memory: 9082, loss: 1.9621\n", + "2023-07-02 19:05:06,094 - modelscope - INFO - epoch [1][3300/4953]\tlr: 3.301e-05, memory: 9082, loss: 1.3162\n", + "2023-07-02 19:05:10,639 - modelscope - INFO - epoch [1][3305/4953]\tlr: 3.289e-05, memory: 9082, loss: 1.4781\n", + "2023-07-02 19:05:12,888 - modelscope - INFO - epoch [1][3310/4953]\tlr: 3.276e-05, memory: 9082, loss: 1.9320\n", + "2023-07-02 19:05:18,374 - modelscope - INFO - epoch [1][3315/4953]\tlr: 3.264e-05, memory: 9082, loss: 0.4891\n", + "2023-07-02 19:05:25,255 - modelscope - INFO - epoch [1][3320/4953]\tlr: 3.252e-05, memory: 9082, loss: 0.9572\n", + "2023-07-02 19:05:31,095 - modelscope - INFO - epoch [1][3325/4953]\tlr: 3.239e-05, memory: 9082, loss: 1.0703\n", + "2023-07-02 19:05:37,787 - modelscope - INFO - epoch [1][3330/4953]\tlr: 3.227e-05, memory: 9082, loss: 0.4883\n", + "2023-07-02 19:05:42,067 - modelscope - INFO - epoch [1][3335/4953]\tlr: 3.214e-05, memory: 9082, loss: 2.1445\n", + "2023-07-02 19:05:47,958 - modelscope - INFO - epoch [1][3340/4953]\tlr: 3.202e-05, memory: 9082, loss: 1.5414\n", + "2023-07-02 19:05:52,434 - modelscope - INFO - epoch [1][3345/4953]\tlr: 3.190e-05, memory: 9082, loss: 1.9531\n", + "2023-07-02 19:05:57,227 - modelscope - INFO - epoch [1][3350/4953]\tlr: 3.178e-05, memory: 9082, loss: 1.2508\n", + "2023-07-02 19:06:03,488 - modelscope - INFO - epoch [1][3355/4953]\tlr: 3.165e-05, memory: 9082, loss: 1.1402\n", + "2023-07-02 19:06:08,978 - modelscope - INFO - epoch [1][3360/4953]\tlr: 3.153e-05, memory: 9082, loss: 1.1211\n", + "2023-07-02 19:06:16,191 - modelscope - INFO - epoch [1][3365/4953]\tlr: 3.141e-05, memory: 9082, loss: 0.7613\n", + "2023-07-02 19:06:23,420 - modelscope - INFO - epoch [1][3370/4953]\tlr: 3.129e-05, memory: 9082, loss: 1.3293\n", + "2023-07-02 19:06:30,067 - modelscope - INFO - epoch [1][3375/4953]\tlr: 3.117e-05, memory: 9082, loss: 1.9758\n", + "2023-07-02 19:06:36,844 - modelscope - INFO - epoch [1][3380/4953]\tlr: 3.104e-05, memory: 9082, loss: 0.3589\n", + "2023-07-02 19:06:43,906 - modelscope - INFO - epoch [1][3385/4953]\tlr: 3.092e-05, memory: 9082, loss: 0.9208\n", + "2023-07-02 19:06:49,972 - modelscope - INFO - epoch [1][3390/4953]\tlr: 3.080e-05, memory: 9082, loss: 1.2713\n", + "2023-07-02 19:06:56,815 - modelscope - INFO - epoch [1][3395/4953]\tlr: 3.068e-05, memory: 9082, loss: 1.3320\n", + "2023-07-02 19:07:00,998 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 19:09:16,634 - modelscope - INFO - Saving checkpoint at 3400 iter\n", + "2023-07-02 19:09:16,674 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter3200_acc0.8085957169532776\n", + "2023-07-02 19:09:16,679 - modelscope - INFO - Saving checkpoint at 3400 iter\n", + "2023-07-02 19:09:16,718 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_3200\n", + "2023-07-02 19:09:16,723 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8090, evaluation/loss: 1.2532, loss: 1.3594\n", + "2023-07-02 19:09:23,967 - modelscope - INFO - epoch [1][3405/4953]\tlr: 3.044e-05, memory: 9082, loss: 1.4662\n", + "2023-07-02 19:09:27,883 - modelscope - INFO - epoch [1][3410/4953]\tlr: 3.032e-05, memory: 9082, loss: 1.6219\n", + "2023-07-02 19:09:36,612 - modelscope - INFO - epoch [1][3415/4953]\tlr: 3.020e-05, memory: 9082, loss: 0.8362\n", + "2023-07-02 19:09:43,660 - modelscope - INFO - epoch [1][3420/4953]\tlr: 3.008e-05, memory: 9082, loss: 0.5874\n", + "2023-07-02 19:09:50,318 - modelscope - INFO - epoch [1][3425/4953]\tlr: 2.996e-05, memory: 9082, loss: 0.5588\n", + "2023-07-02 19:09:55,763 - modelscope - INFO - epoch [1][3430/4953]\tlr: 2.985e-05, memory: 9082, loss: 1.5086\n", + "2023-07-02 19:10:00,017 - modelscope - INFO - epoch [1][3435/4953]\tlr: 2.973e-05, memory: 9082, loss: 1.7063\n", + "2023-07-02 19:10:04,359 - modelscope - INFO - epoch [1][3440/4953]\tlr: 2.961e-05, memory: 9082, loss: 1.0250\n", + "2023-07-02 19:10:11,212 - modelscope - INFO - epoch [1][3445/4953]\tlr: 2.949e-05, memory: 9082, loss: 1.7650\n", + "2023-07-02 19:10:18,583 - modelscope - INFO - epoch [1][3450/4953]\tlr: 2.937e-05, memory: 9082, loss: 1.0846\n", + "2023-07-02 19:10:24,668 - modelscope - INFO - epoch [1][3455/4953]\tlr: 2.926e-05, memory: 9082, loss: 0.6735\n", + "2023-07-02 19:10:29,335 - modelscope - INFO - epoch [1][3460/4953]\tlr: 2.914e-05, memory: 9082, loss: 1.6277\n", + "2023-07-02 19:10:36,188 - modelscope - INFO - epoch [1][3465/4953]\tlr: 2.902e-05, memory: 9082, loss: 0.5597\n", + "2023-07-02 19:10:40,421 - modelscope - INFO - epoch [1][3470/4953]\tlr: 2.891e-05, memory: 9082, loss: 1.6338\n", + "2023-07-02 19:10:45,436 - modelscope - INFO - epoch [1][3475/4953]\tlr: 2.879e-05, memory: 9082, loss: 1.2394\n", + "2023-07-02 19:10:51,181 - modelscope - INFO - epoch [1][3480/4953]\tlr: 2.867e-05, memory: 9082, loss: 1.4753\n", + "2023-07-02 19:10:57,524 - modelscope - INFO - epoch [1][3485/4953]\tlr: 2.856e-05, memory: 9082, loss: 0.2870\n", + "2023-07-02 19:11:04,534 - modelscope - INFO - epoch [1][3490/4953]\tlr: 2.844e-05, memory: 9082, loss: 1.1145\n", + "2023-07-02 19:11:09,939 - modelscope - INFO - epoch [1][3495/4953]\tlr: 2.833e-05, memory: 9082, loss: 1.5525\n", + "2023-07-02 19:11:16,051 - modelscope - INFO - epoch [1][3500/4953]\tlr: 2.821e-05, memory: 9082, loss: 0.9821\n", + "2023-07-02 19:11:21,112 - modelscope - INFO - epoch [1][3505/4953]\tlr: 2.810e-05, memory: 9082, loss: 0.5899\n", + "2023-07-02 19:11:26,462 - modelscope - INFO - epoch [1][3510/4953]\tlr: 2.798e-05, memory: 9082, loss: 1.0081\n", + "2023-07-02 19:11:31,458 - modelscope - INFO - epoch [1][3515/4953]\tlr: 2.787e-05, memory: 9082, loss: 1.9700\n", + "2023-07-02 19:11:36,854 - modelscope - INFO - epoch [1][3520/4953]\tlr: 2.775e-05, memory: 9082, loss: 1.4628\n", + "2023-07-02 19:11:42,492 - modelscope - INFO - epoch [1][3525/4953]\tlr: 2.764e-05, memory: 9082, loss: 2.0672\n", + "2023-07-02 19:11:46,917 - modelscope - INFO - epoch [1][3530/4953]\tlr: 2.753e-05, memory: 9082, loss: 1.2469\n", + "2023-07-02 19:11:51,730 - modelscope - INFO - epoch [1][3535/4953]\tlr: 2.741e-05, memory: 9082, loss: 1.8609\n", + "2023-07-02 19:11:58,366 - modelscope - INFO - epoch [1][3540/4953]\tlr: 2.730e-05, memory: 9082, loss: 1.0629\n", + "2023-07-02 19:12:03,036 - modelscope - INFO - epoch [1][3545/4953]\tlr: 2.719e-05, memory: 9082, loss: 1.9508\n", + "2023-07-02 19:12:07,669 - modelscope - INFO - epoch [1][3550/4953]\tlr: 2.707e-05, memory: 9082, loss: 1.1436\n", + "2023-07-02 19:12:12,567 - modelscope - INFO - epoch [1][3555/4953]\tlr: 2.696e-05, memory: 9082, loss: 1.7292\n", + "2023-07-02 19:12:18,906 - modelscope - INFO - epoch [1][3560/4953]\tlr: 2.685e-05, memory: 9082, loss: 1.4152\n", + "2023-07-02 19:12:27,058 - modelscope - INFO - epoch [1][3565/4953]\tlr: 2.674e-05, memory: 9082, loss: 1.5086\n", + "2023-07-02 19:12:34,096 - modelscope - INFO - epoch [1][3570/4953]\tlr: 2.663e-05, memory: 9082, loss: 0.4786\n", + "2023-07-02 19:12:40,666 - modelscope - INFO - epoch [1][3575/4953]\tlr: 2.652e-05, memory: 9082, loss: 1.7496\n", + "2023-07-02 19:12:47,997 - modelscope - INFO - epoch [1][3580/4953]\tlr: 2.641e-05, memory: 9082, loss: 1.0977\n", + "2023-07-02 19:12:51,897 - modelscope - INFO - epoch [1][3585/4953]\tlr: 2.630e-05, memory: 9082, loss: 1.6832\n", + "2023-07-02 19:12:59,020 - modelscope - INFO - epoch [1][3590/4953]\tlr: 2.619e-05, memory: 9082, loss: 0.4163\n", + "2023-07-02 19:13:07,038 - modelscope - INFO - epoch [1][3595/4953]\tlr: 2.608e-05, memory: 9082, loss: 0.7688\n", + "2023-07-02 19:13:13,293 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.05it/s]\n", + "2023-07-02 19:15:28,735 - modelscope - INFO - Saving checkpoint at 3600 iter\n", + "2023-07-02 19:15:28,776 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter3400_acc0.8089956045150757\n", + "2023-07-02 19:15:28,780 - modelscope - INFO - Saving checkpoint at 3600 iter\n", + "2023-07-02 19:15:28,819 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_3400\n", + "2023-07-02 19:15:28,824 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8097, evaluation/loss: 1.2494, loss: 0.8758\n", + "2023-07-02 19:15:35,336 - modelscope - INFO - epoch [1][3605/4953]\tlr: 2.586e-05, memory: 9082, loss: 0.5239\n", + "2023-07-02 19:15:41,849 - modelscope - INFO - epoch [1][3610/4953]\tlr: 2.575e-05, memory: 9082, loss: 1.5448\n", + "2023-07-02 19:15:46,600 - modelscope - INFO - epoch [1][3615/4953]\tlr: 2.564e-05, memory: 9082, loss: 1.2828\n", + "2023-07-02 19:15:53,236 - modelscope - INFO - epoch [1][3620/4953]\tlr: 2.553e-05, memory: 9082, loss: 1.3886\n", + "2023-07-02 19:15:59,060 - modelscope - INFO - epoch [1][3625/4953]\tlr: 2.542e-05, memory: 9082, loss: 1.2750\n", + "2023-07-02 19:16:04,370 - modelscope - INFO - epoch [1][3630/4953]\tlr: 2.532e-05, memory: 9082, loss: 1.0339\n", + "2023-07-02 19:16:09,908 - modelscope - INFO - epoch [1][3635/4953]\tlr: 2.521e-05, memory: 9082, loss: 1.6308\n", + "2023-07-02 19:16:16,808 - modelscope - INFO - epoch [1][3640/4953]\tlr: 2.510e-05, memory: 9082, loss: 1.2590\n", + "2023-07-02 19:16:22,072 - modelscope - INFO - epoch [1][3645/4953]\tlr: 2.500e-05, memory: 9082, loss: 2.3364\n", + "2023-07-02 19:16:29,035 - modelscope - INFO - epoch [1][3650/4953]\tlr: 2.489e-05, memory: 9082, loss: 1.1231\n", + "2023-07-02 19:16:35,184 - modelscope - INFO - epoch [1][3655/4953]\tlr: 2.478e-05, memory: 9082, loss: 0.8313\n", + "2023-07-02 19:16:41,731 - modelscope - INFO - epoch [1][3660/4953]\tlr: 2.468e-05, memory: 9082, loss: 1.2649\n", + "2023-07-02 19:16:47,773 - modelscope - INFO - epoch [1][3665/4953]\tlr: 2.457e-05, memory: 9082, loss: 0.1984\n", + "2023-07-02 19:16:53,645 - modelscope - INFO - epoch [1][3670/4953]\tlr: 2.447e-05, memory: 9082, loss: 1.2534\n", + "2023-07-02 19:16:58,300 - modelscope - INFO - epoch [1][3675/4953]\tlr: 2.436e-05, memory: 9082, loss: 1.1865\n", + "2023-07-02 19:17:02,935 - modelscope - INFO - epoch [1][3680/4953]\tlr: 2.426e-05, memory: 9082, loss: 1.0458\n", + "2023-07-02 19:17:10,508 - modelscope - INFO - epoch [1][3685/4953]\tlr: 2.415e-05, memory: 9082, loss: 1.4961\n", + "2023-07-02 19:17:15,416 - modelscope - INFO - epoch [1][3690/4953]\tlr: 2.405e-05, memory: 9082, loss: 1.9992\n", + "2023-07-02 19:17:21,634 - modelscope - INFO - epoch [1][3695/4953]\tlr: 2.394e-05, memory: 9082, loss: 1.0555\n", + "2023-07-02 19:17:25,173 - modelscope - INFO - epoch [1][3700/4953]\tlr: 2.384e-05, memory: 9082, loss: 1.3477\n", + "2023-07-02 19:17:31,506 - modelscope - INFO - epoch [1][3705/4953]\tlr: 2.374e-05, memory: 9082, loss: 1.4563\n", + "2023-07-02 19:17:37,274 - modelscope - INFO - epoch [1][3710/4953]\tlr: 2.364e-05, memory: 9082, loss: 1.0638\n", + "2023-07-02 19:17:42,368 - modelscope - INFO - epoch [1][3715/4953]\tlr: 2.353e-05, memory: 9082, loss: 1.0961\n", + "2023-07-02 19:17:48,384 - modelscope - INFO - epoch [1][3720/4953]\tlr: 2.343e-05, memory: 9082, loss: 0.6570\n", + "2023-07-02 19:17:54,584 - modelscope - INFO - epoch [1][3725/4953]\tlr: 2.333e-05, memory: 9082, loss: 1.4391\n", + "2023-07-02 19:18:00,199 - modelscope - INFO - epoch [1][3730/4953]\tlr: 2.323e-05, memory: 9082, loss: 1.0986\n", + "2023-07-02 19:18:06,613 - modelscope - INFO - epoch [1][3735/4953]\tlr: 2.313e-05, memory: 9082, loss: 1.2259\n", + "2023-07-02 19:18:11,954 - modelscope - INFO - epoch [1][3740/4953]\tlr: 2.303e-05, memory: 9082, loss: 1.2266\n", + "2023-07-02 19:18:19,245 - modelscope - INFO - epoch [1][3745/4953]\tlr: 2.293e-05, memory: 9082, loss: 0.8633\n", + "2023-07-02 19:18:24,296 - modelscope - INFO - epoch [1][3750/4953]\tlr: 2.283e-05, memory: 9082, loss: 1.2285\n", + "2023-07-02 19:18:31,793 - modelscope - INFO - epoch [1][3755/4953]\tlr: 2.273e-05, memory: 9082, loss: 1.7500\n", + "2023-07-02 19:18:37,572 - modelscope - INFO - epoch [1][3760/4953]\tlr: 2.263e-05, memory: 9082, loss: 0.6735\n", + "2023-07-02 19:18:44,200 - modelscope - INFO - epoch [1][3765/4953]\tlr: 2.253e-05, memory: 9082, loss: 1.8328\n", + "2023-07-02 19:18:49,475 - modelscope - INFO - epoch [1][3770/4953]\tlr: 2.243e-05, memory: 9082, loss: 1.3798\n", + "2023-07-02 19:18:53,690 - modelscope - INFO - epoch [1][3775/4953]\tlr: 2.233e-05, memory: 9082, loss: 2.3062\n", + "2023-07-02 19:18:58,638 - modelscope - INFO - epoch [1][3780/4953]\tlr: 2.223e-05, memory: 9082, loss: 1.1617\n", + "2023-07-02 19:19:05,096 - modelscope - INFO - epoch [1][3785/4953]\tlr: 2.213e-05, memory: 9082, loss: 1.7489\n", + "2023-07-02 19:19:12,468 - modelscope - INFO - epoch [1][3790/4953]\tlr: 2.204e-05, memory: 9082, loss: 1.1701\n", + "2023-07-02 19:19:22,097 - modelscope - INFO - epoch [1][3795/4953]\tlr: 2.194e-05, memory: 9082, loss: 0.3038\n", + "2023-07-02 19:19:29,069 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 19:21:44,819 - modelscope - INFO - Saving checkpoint at 3800 iter\n", + "2023-07-02 19:21:44,859 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter3600_acc0.8096736669540405\n", + "2023-07-02 19:21:44,863 - modelscope - INFO - Saving checkpoint at 3800 iter\n", + "2023-07-02 19:21:44,902 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_3600\n", + "2023-07-02 19:21:44,907 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8099, evaluation/loss: 1.2569, loss: 1.0828\n", + "2023-07-02 19:21:50,359 - modelscope - INFO - epoch [1][3805/4953]\tlr: 2.174e-05, memory: 9082, loss: 1.3383\n", + "2023-07-02 19:21:56,101 - modelscope - INFO - epoch [1][3810/4953]\tlr: 2.165e-05, memory: 9082, loss: 1.3833\n", + "2023-07-02 19:22:02,037 - modelscope - INFO - epoch [1][3815/4953]\tlr: 2.155e-05, memory: 9082, loss: 1.1005\n", + "2023-07-02 19:22:07,031 - modelscope - INFO - epoch [1][3820/4953]\tlr: 2.146e-05, memory: 9082, loss: 1.6941\n", + "2023-07-02 19:22:11,810 - modelscope - INFO - epoch [1][3825/4953]\tlr: 2.136e-05, memory: 9082, loss: 1.8938\n", + "2023-07-02 19:22:16,752 - modelscope - INFO - epoch [1][3830/4953]\tlr: 2.127e-05, memory: 9082, loss: 1.6121\n", + "2023-07-02 19:22:25,240 - modelscope - INFO - epoch [1][3835/4953]\tlr: 2.117e-05, memory: 9082, loss: 0.7009\n", + "2023-07-02 19:22:31,231 - modelscope - INFO - epoch [1][3840/4953]\tlr: 2.108e-05, memory: 9082, loss: 1.8273\n", + "2023-07-02 19:22:37,939 - modelscope - INFO - epoch [1][3845/4953]\tlr: 2.098e-05, memory: 9082, loss: 0.8680\n", + "2023-07-02 19:22:43,021 - modelscope - INFO - epoch [1][3850/4953]\tlr: 2.089e-05, memory: 9082, loss: 1.5473\n", + "2023-07-02 19:22:49,156 - modelscope - INFO - epoch [1][3855/4953]\tlr: 2.080e-05, memory: 9082, loss: 1.1435\n", + "2023-07-02 19:22:53,445 - modelscope - INFO - epoch [1][3860/4953]\tlr: 2.071e-05, memory: 9082, loss: 1.1194\n", + "2023-07-02 19:22:59,485 - modelscope - INFO - epoch [1][3865/4953]\tlr: 2.061e-05, memory: 9082, loss: 1.0640\n", + "2023-07-02 19:23:03,673 - modelscope - INFO - epoch [1][3870/4953]\tlr: 2.052e-05, memory: 9082, loss: 1.0879\n", + "2023-07-02 19:23:08,721 - modelscope - INFO - epoch [1][3875/4953]\tlr: 2.043e-05, memory: 9082, loss: 0.9207\n", + "2023-07-02 19:23:14,908 - modelscope - INFO - epoch [1][3880/4953]\tlr: 2.034e-05, memory: 9082, loss: 0.5737\n", + "2023-07-02 19:23:21,843 - modelscope - INFO - epoch [1][3885/4953]\tlr: 2.025e-05, memory: 9082, loss: 1.3052\n", + "2023-07-02 19:23:30,760 - modelscope - INFO - epoch [1][3890/4953]\tlr: 2.016e-05, memory: 9082, loss: 1.1666\n", + "2023-07-02 19:23:36,181 - modelscope - INFO - epoch [1][3895/4953]\tlr: 2.007e-05, memory: 9082, loss: 1.7224\n", + "2023-07-02 19:23:40,094 - modelscope - INFO - epoch [1][3900/4953]\tlr: 1.998e-05, memory: 9082, loss: 1.0042\n", + "2023-07-02 19:23:47,764 - modelscope - INFO - epoch [1][3905/4953]\tlr: 1.989e-05, memory: 9082, loss: 1.2044\n", + "2023-07-02 19:23:54,075 - modelscope - INFO - epoch [1][3910/4953]\tlr: 1.980e-05, memory: 9082, loss: 1.3367\n", + "2023-07-02 19:24:00,699 - modelscope - INFO - epoch [1][3915/4953]\tlr: 1.971e-05, memory: 9082, loss: 1.1395\n", + "2023-07-02 19:24:06,413 - modelscope - INFO - epoch [1][3920/4953]\tlr: 1.962e-05, memory: 9082, loss: 1.1899\n", + "2023-07-02 19:24:12,663 - modelscope - INFO - epoch [1][3925/4953]\tlr: 1.953e-05, memory: 9082, loss: 1.0320\n", + "2023-07-02 19:24:18,897 - modelscope - INFO - epoch [1][3930/4953]\tlr: 1.944e-05, memory: 9082, loss: 2.0555\n", + "2023-07-02 19:24:25,760 - modelscope - INFO - epoch [1][3935/4953]\tlr: 1.936e-05, memory: 9082, loss: 1.3466\n", + "2023-07-02 19:24:29,617 - modelscope - INFO - epoch [1][3940/4953]\tlr: 1.927e-05, memory: 9082, loss: 1.7797\n", + "2023-07-02 19:24:34,498 - modelscope - INFO - epoch [1][3945/4953]\tlr: 1.918e-05, memory: 9082, loss: 0.6168\n", + "2023-07-02 19:24:39,457 - modelscope - INFO - epoch [1][3950/4953]\tlr: 1.910e-05, memory: 9082, loss: 1.1122\n", + "2023-07-02 19:24:48,913 - modelscope - INFO - epoch [1][3955/4953]\tlr: 1.901e-05, memory: 9082, loss: 0.9353\n", + "2023-07-02 19:24:55,564 - modelscope - INFO - epoch [1][3960/4953]\tlr: 1.892e-05, memory: 9082, loss: 0.9599\n", + "2023-07-02 19:25:00,536 - modelscope - INFO - epoch [1][3965/4953]\tlr: 1.884e-05, memory: 9082, loss: 1.4582\n", + "2023-07-02 19:25:07,894 - modelscope - INFO - epoch [1][3970/4953]\tlr: 1.875e-05, memory: 9082, loss: 1.0347\n", + "2023-07-02 19:25:11,877 - modelscope - INFO - epoch [1][3975/4953]\tlr: 1.867e-05, memory: 9082, loss: 1.9000\n", + "2023-07-02 19:25:18,225 - modelscope - INFO - epoch [1][3980/4953]\tlr: 1.858e-05, memory: 9082, loss: 1.4125\n", + "2023-07-02 19:25:22,417 - modelscope - INFO - epoch [1][3985/4953]\tlr: 1.850e-05, memory: 9082, loss: 1.8959\n", + "2023-07-02 19:25:27,100 - modelscope - INFO - epoch [1][3990/4953]\tlr: 1.842e-05, memory: 9082, loss: 1.4008\n", + "2023-07-02 19:25:31,958 - modelscope - INFO - epoch [1][3995/4953]\tlr: 1.833e-05, memory: 9082, loss: 0.8114\n", + "2023-07-02 19:25:37,042 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 19:27:53,013 - modelscope - INFO - Saving checkpoint at 4000 iter\n", + "2023-07-02 19:27:53,054 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_3800\n", + "2023-07-02 19:27:53,059 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8099, evaluation/loss: 1.2522, loss: 1.1221\n", + "2023-07-02 19:27:58,830 - modelscope - INFO - epoch [1][4005/4953]\tlr: 1.817e-05, memory: 9082, loss: 1.9461\n", + "2023-07-02 19:28:04,138 - modelscope - INFO - epoch [1][4010/4953]\tlr: 1.809e-05, memory: 9082, loss: 1.5629\n", + "2023-07-02 19:28:09,984 - modelscope - INFO - epoch [1][4015/4953]\tlr: 1.801e-05, memory: 9082, loss: 0.7642\n", + "2023-07-02 19:28:13,463 - modelscope - INFO - epoch [1][4020/4953]\tlr: 1.792e-05, memory: 9082, loss: 2.2344\n", + "2023-07-02 19:28:20,355 - modelscope - INFO - epoch [1][4025/4953]\tlr: 1.784e-05, memory: 9082, loss: 0.9662\n", + "2023-07-02 19:28:26,276 - modelscope - INFO - epoch [1][4030/4953]\tlr: 1.776e-05, memory: 9082, loss: 1.0925\n", + "2023-07-02 19:28:32,273 - modelscope - INFO - epoch [1][4035/4953]\tlr: 1.768e-05, memory: 9082, loss: 1.4812\n", + "2023-07-02 19:28:38,431 - modelscope - INFO - epoch [1][4040/4953]\tlr: 1.760e-05, memory: 9082, loss: 2.1295\n", + "2023-07-02 19:28:43,468 - modelscope - INFO - epoch [1][4045/4953]\tlr: 1.752e-05, memory: 9082, loss: 1.6391\n", + "2023-07-02 19:28:51,453 - modelscope - INFO - epoch [1][4050/4953]\tlr: 1.744e-05, memory: 9082, loss: 1.4901\n", + "2023-07-02 19:28:57,688 - modelscope - INFO - epoch [1][4055/4953]\tlr: 1.737e-05, memory: 9082, loss: 1.2383\n", + "2023-07-02 19:29:01,776 - modelscope - INFO - epoch [1][4060/4953]\tlr: 1.729e-05, memory: 9082, loss: 1.4404\n", + "2023-07-02 19:29:07,738 - modelscope - INFO - epoch [1][4065/4953]\tlr: 1.721e-05, memory: 9082, loss: 0.5664\n", + "2023-07-02 19:29:12,827 - modelscope - INFO - epoch [1][4070/4953]\tlr: 1.713e-05, memory: 9082, loss: 1.4554\n", + "2023-07-02 19:29:19,309 - modelscope - INFO - epoch [1][4075/4953]\tlr: 1.706e-05, memory: 9082, loss: 0.8976\n", + "2023-07-02 19:29:23,218 - modelscope - INFO - epoch [1][4080/4953]\tlr: 1.698e-05, memory: 9082, loss: 1.0562\n", + "2023-07-02 19:29:32,543 - modelscope - INFO - epoch [1][4085/4953]\tlr: 1.690e-05, memory: 9082, loss: 0.9514\n", + "2023-07-02 19:29:39,285 - modelscope - INFO - epoch [1][4090/4953]\tlr: 1.683e-05, memory: 9082, loss: 0.4714\n", + "2023-07-02 19:29:44,617 - modelscope - INFO - epoch [1][4095/4953]\tlr: 1.675e-05, memory: 9082, loss: 1.2211\n", + "2023-07-02 19:29:49,645 - modelscope - INFO - epoch [1][4100/4953]\tlr: 1.668e-05, memory: 9082, loss: 2.0924\n", + "2023-07-02 19:29:55,362 - modelscope - INFO - epoch [1][4105/4953]\tlr: 1.660e-05, memory: 9082, loss: 2.2705\n", + "2023-07-02 19:30:01,166 - modelscope - INFO - epoch [1][4110/4953]\tlr: 1.653e-05, memory: 9082, loss: 1.6148\n", + "2023-07-02 19:30:08,386 - modelscope - INFO - epoch [1][4115/4953]\tlr: 1.645e-05, memory: 9082, loss: 0.4558\n", + "2023-07-02 19:30:15,808 - modelscope - INFO - epoch [1][4120/4953]\tlr: 1.638e-05, memory: 9082, loss: 1.3715\n", + "2023-07-02 19:30:21,186 - modelscope - INFO - epoch [1][4125/4953]\tlr: 1.631e-05, memory: 9082, loss: 1.4497\n", + "2023-07-02 19:30:26,639 - modelscope - INFO - epoch [1][4130/4953]\tlr: 1.623e-05, memory: 9082, loss: 1.0819\n", + "2023-07-02 19:30:32,756 - modelscope - INFO - epoch [1][4135/4953]\tlr: 1.616e-05, memory: 9082, loss: 0.5440\n", + "2023-07-02 19:30:39,286 - modelscope - INFO - epoch [1][4140/4953]\tlr: 1.609e-05, memory: 9082, loss: 1.7625\n", + "2023-07-02 19:30:45,148 - modelscope - INFO - epoch [1][4145/4953]\tlr: 1.602e-05, memory: 9082, loss: 1.4341\n", + "2023-07-02 19:30:49,574 - modelscope - INFO - epoch [1][4150/4953]\tlr: 1.595e-05, memory: 9082, loss: 1.2615\n", + "2023-07-02 19:30:56,310 - modelscope - INFO - epoch [1][4155/4953]\tlr: 1.588e-05, memory: 9082, loss: 1.1409\n", + "2023-07-02 19:31:00,158 - modelscope - INFO - epoch [1][4160/4953]\tlr: 1.580e-05, memory: 9082, loss: 1.3609\n", + "2023-07-02 19:31:06,731 - modelscope - INFO - epoch [1][4165/4953]\tlr: 1.573e-05, memory: 9082, loss: 1.5992\n", + "2023-07-02 19:31:10,582 - modelscope - INFO - epoch [1][4170/4953]\tlr: 1.566e-05, memory: 9082, loss: 1.2750\n", + "2023-07-02 19:31:17,613 - modelscope - INFO - epoch [1][4175/4953]\tlr: 1.560e-05, memory: 9082, loss: 1.5521\n", + "2023-07-02 19:31:21,814 - modelscope - INFO - epoch [1][4180/4953]\tlr: 1.553e-05, memory: 9082, loss: 2.2871\n", + "2023-07-02 19:31:28,108 - modelscope - INFO - epoch [1][4185/4953]\tlr: 1.546e-05, memory: 9082, loss: 1.4199\n", + "2023-07-02 19:31:31,428 - modelscope - INFO - epoch [1][4190/4953]\tlr: 1.539e-05, memory: 9082, loss: 1.6801\n", + "2023-07-02 19:31:36,958 - modelscope - INFO - epoch [1][4195/4953]\tlr: 1.532e-05, memory: 9082, loss: 1.2423\n", + "2023-07-02 19:31:43,408 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:16<00:00, 2.04it/s]\n", + "2023-07-02 19:33:59,477 - modelscope - INFO - Saving checkpoint at 4200 iter\n", + "2023-07-02 19:33:59,518 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_4000\n", + "2023-07-02 19:33:59,522 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8095, evaluation/loss: 1.2465, loss: 1.5236\n", + "2023-07-02 19:34:03,568 - modelscope - INFO - epoch [1][4205/4953]\tlr: 1.519e-05, memory: 9082, loss: 1.0014\n", + "2023-07-02 19:34:10,609 - modelscope - INFO - epoch [1][4210/4953]\tlr: 1.512e-05, memory: 9082, loss: 0.5158\n", + "2023-07-02 19:34:17,669 - modelscope - INFO - epoch [1][4215/4953]\tlr: 1.506e-05, memory: 9082, loss: 1.1637\n", + "2023-07-02 19:34:24,176 - modelscope - INFO - epoch [1][4220/4953]\tlr: 1.499e-05, memory: 9082, loss: 0.9216\n", + "2023-07-02 19:34:30,303 - modelscope - INFO - epoch [1][4225/4953]\tlr: 1.492e-05, memory: 9082, loss: 0.5468\n", + "2023-07-02 19:34:36,913 - modelscope - INFO - epoch [1][4230/4953]\tlr: 1.486e-05, memory: 9082, loss: 1.0229\n", + "2023-07-02 19:34:42,449 - modelscope - INFO - epoch [1][4235/4953]\tlr: 1.480e-05, memory: 9082, loss: 0.8887\n", + "2023-07-02 19:34:51,187 - modelscope - INFO - epoch [1][4240/4953]\tlr: 1.473e-05, memory: 9082, loss: 1.1398\n", + "2023-07-02 19:34:55,850 - modelscope - INFO - epoch [1][4245/4953]\tlr: 1.467e-05, memory: 9082, loss: 1.8500\n", + "2023-07-02 19:35:01,653 - modelscope - INFO - epoch [1][4250/4953]\tlr: 1.460e-05, memory: 9082, loss: 1.2860\n", + "2023-07-02 19:35:07,538 - modelscope - INFO - epoch [1][4255/4953]\tlr: 1.454e-05, memory: 9082, loss: 0.9241\n", + "2023-07-02 19:35:10,832 - modelscope - INFO - epoch [1][4260/4953]\tlr: 1.448e-05, memory: 9082, loss: 1.5016\n", + "2023-07-02 19:35:15,940 - modelscope - INFO - epoch [1][4265/4953]\tlr: 1.442e-05, memory: 9082, loss: 1.1250\n", + "2023-07-02 19:35:21,080 - modelscope - INFO - epoch [1][4270/4953]\tlr: 1.436e-05, memory: 9082, loss: 1.0505\n", + "2023-07-02 19:35:26,817 - modelscope - INFO - epoch [1][4275/4953]\tlr: 1.429e-05, memory: 9082, loss: 1.0356\n", + "2023-07-02 19:35:36,012 - modelscope - INFO - epoch [1][4280/4953]\tlr: 1.423e-05, memory: 9082, loss: 0.9335\n", + "2023-07-02 19:35:42,237 - modelscope - INFO - epoch [1][4285/4953]\tlr: 1.417e-05, memory: 9082, loss: 0.5855\n", + "2023-07-02 19:35:46,223 - modelscope - INFO - epoch [1][4290/4953]\tlr: 1.411e-05, memory: 9082, loss: 1.2945\n", + "2023-07-02 19:35:52,610 - modelscope - INFO - epoch [1][4295/4953]\tlr: 1.405e-05, memory: 9082, loss: 0.9766\n", + "2023-07-02 19:35:59,125 - modelscope - INFO - epoch [1][4300/4953]\tlr: 1.400e-05, memory: 9082, loss: 1.6789\n", + "2023-07-02 19:36:03,214 - modelscope - INFO - epoch [1][4305/4953]\tlr: 1.394e-05, memory: 9082, loss: 1.5262\n", + "2023-07-02 19:36:08,897 - modelscope - INFO - epoch [1][4310/4953]\tlr: 1.388e-05, memory: 9082, loss: 1.0785\n", + "2023-07-02 19:36:15,128 - modelscope - INFO - epoch [1][4315/4953]\tlr: 1.382e-05, memory: 9082, loss: 0.6479\n", + "2023-07-02 19:36:21,607 - modelscope - INFO - epoch [1][4320/4953]\tlr: 1.376e-05, memory: 9082, loss: 1.8496\n", + "2023-07-02 19:36:29,617 - modelscope - INFO - epoch [1][4325/4953]\tlr: 1.371e-05, memory: 9082, loss: 0.5391\n", + "2023-07-02 19:36:35,101 - modelscope - INFO - epoch [1][4330/4953]\tlr: 1.365e-05, memory: 9082, loss: 1.8141\n", + "2023-07-02 19:36:41,579 - modelscope - INFO - epoch [1][4335/4953]\tlr: 1.359e-05, memory: 9082, loss: 0.6881\n", + "2023-07-02 19:36:48,569 - modelscope - INFO - epoch [1][4340/4953]\tlr: 1.354e-05, memory: 9082, loss: 0.6677\n", + "2023-07-02 19:36:55,362 - modelscope - INFO - epoch [1][4345/4953]\tlr: 1.348e-05, memory: 9082, loss: 0.7067\n", + "2023-07-02 19:37:01,199 - modelscope - INFO - epoch [1][4350/4953]\tlr: 1.343e-05, memory: 9082, loss: 1.3036\n", + "2023-07-02 19:37:06,752 - modelscope - INFO - epoch [1][4355/4953]\tlr: 1.337e-05, memory: 9082, loss: 0.5832\n", + "2023-07-02 19:37:11,013 - modelscope - INFO - epoch [1][4360/4953]\tlr: 1.332e-05, memory: 9082, loss: 0.9969\n", + "2023-07-02 19:37:15,110 - modelscope - INFO - epoch [1][4365/4953]\tlr: 1.326e-05, memory: 9082, loss: 1.6590\n", + "2023-07-02 19:37:22,411 - modelscope - INFO - epoch [1][4370/4953]\tlr: 1.321e-05, memory: 9082, loss: 0.8229\n", + "2023-07-02 19:37:29,106 - modelscope - INFO - epoch [1][4375/4953]\tlr: 1.316e-05, memory: 9082, loss: 1.3289\n", + "2023-07-02 19:37:33,326 - modelscope - INFO - epoch [1][4380/4953]\tlr: 1.311e-05, memory: 9082, loss: 1.0410\n", + "2023-07-02 19:37:38,513 - modelscope - INFO - epoch [1][4385/4953]\tlr: 1.305e-05, memory: 9082, loss: 0.6374\n", + "2023-07-02 19:37:42,903 - modelscope - INFO - epoch [1][4390/4953]\tlr: 1.300e-05, memory: 9082, loss: 2.6094\n", + "2023-07-02 19:37:46,474 - modelscope - INFO - epoch [1][4395/4953]\tlr: 1.295e-05, memory: 9082, loss: 1.7327\n", + "2023-07-02 19:37:53,357 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:16<00:00, 2.03it/s]\n", + "2023-07-02 19:40:09,626 - modelscope - INFO - Saving checkpoint at 4400 iter\n", + "2023-07-02 19:40:09,667 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter3800_acc0.8098996877670288\n", + "2023-07-02 19:40:09,672 - modelscope - INFO - Saving checkpoint at 4400 iter\n", + "2023-07-02 19:40:09,712 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_4200\n", + "2023-07-02 19:40:09,717 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8100, evaluation/loss: 1.2437, loss: 1.0930\n", + "2023-07-02 19:40:15,785 - modelscope - INFO - epoch [1][4405/4953]\tlr: 1.285e-05, memory: 9082, loss: 0.5974\n", + "2023-07-02 19:40:23,144 - modelscope - INFO - epoch [1][4410/4953]\tlr: 1.280e-05, memory: 9082, loss: 1.0870\n", + "2023-07-02 19:40:28,966 - modelscope - INFO - epoch [1][4415/4953]\tlr: 1.275e-05, memory: 9082, loss: 1.0536\n", + "2023-07-02 19:40:35,092 - modelscope - INFO - epoch [1][4420/4953]\tlr: 1.270e-05, memory: 9082, loss: 1.4613\n", + "2023-07-02 19:40:41,826 - modelscope - INFO - epoch [1][4425/4953]\tlr: 1.265e-05, memory: 9082, loss: 0.8297\n", + "2023-07-02 19:40:46,568 - modelscope - INFO - epoch [1][4430/4953]\tlr: 1.261e-05, memory: 9082, loss: 2.0414\n", + "2023-07-02 19:40:53,278 - modelscope - INFO - epoch [1][4435/4953]\tlr: 1.256e-05, memory: 9082, loss: 1.1800\n", + "2023-07-02 19:40:58,208 - modelscope - INFO - epoch [1][4440/4953]\tlr: 1.251e-05, memory: 9082, loss: 0.8595\n", + "2023-07-02 19:41:04,905 - modelscope - INFO - epoch [1][4445/4953]\tlr: 1.246e-05, memory: 9082, loss: 0.0801\n", + "2023-07-02 19:41:08,125 - modelscope - INFO - epoch [1][4450/4953]\tlr: 1.242e-05, memory: 9082, loss: 1.7031\n", + "2023-07-02 19:41:13,374 - modelscope - INFO - epoch [1][4455/4953]\tlr: 1.237e-05, memory: 9082, loss: 1.8381\n", + "2023-07-02 19:41:17,994 - modelscope - INFO - epoch [1][4460/4953]\tlr: 1.233e-05, memory: 9082, loss: 1.1123\n", + "2023-07-02 19:41:21,181 - modelscope - INFO - epoch [1][4465/4953]\tlr: 1.228e-05, memory: 9082, loss: 2.0922\n", + "2023-07-02 19:41:27,252 - modelscope - INFO - epoch [1][4470/4953]\tlr: 1.224e-05, memory: 9082, loss: 0.8977\n", + "2023-07-02 19:41:31,600 - modelscope - INFO - epoch [1][4475/4953]\tlr: 1.219e-05, memory: 9082, loss: 0.9191\n", + "2023-07-02 19:41:36,554 - modelscope - INFO - epoch [1][4480/4953]\tlr: 1.215e-05, memory: 9082, loss: 1.9734\n", + "2023-07-02 19:41:42,916 - modelscope - INFO - epoch [1][4485/4953]\tlr: 1.210e-05, memory: 9082, loss: 0.7236\n", + "2023-07-02 19:41:49,532 - modelscope - INFO - epoch [1][4490/4953]\tlr: 1.206e-05, memory: 9082, loss: 1.5750\n", + "2023-07-02 19:41:55,282 - modelscope - INFO - epoch [1][4495/4953]\tlr: 1.202e-05, memory: 9082, loss: 0.9306\n", + "2023-07-02 19:42:01,377 - modelscope - INFO - epoch [1][4500/4953]\tlr: 1.198e-05, memory: 9082, loss: 1.9801\n", + "2023-07-02 19:42:05,379 - modelscope - INFO - epoch [1][4505/4953]\tlr: 1.193e-05, memory: 9082, loss: 2.3320\n", + "2023-07-02 19:42:11,849 - modelscope - INFO - epoch [1][4510/4953]\tlr: 1.189e-05, memory: 9082, loss: 1.3637\n", + "2023-07-02 19:42:18,695 - modelscope - INFO - epoch [1][4515/4953]\tlr: 1.185e-05, memory: 9082, loss: 1.5328\n", + "2023-07-02 19:42:26,045 - modelscope - INFO - epoch [1][4520/4953]\tlr: 1.181e-05, memory: 9082, loss: 1.0721\n", + "2023-07-02 19:42:32,060 - modelscope - INFO - epoch [1][4525/4953]\tlr: 1.177e-05, memory: 9082, loss: 1.1867\n", + "2023-07-02 19:42:38,307 - modelscope - INFO - epoch [1][4530/4953]\tlr: 1.173e-05, memory: 9082, loss: 1.3500\n", + "2023-07-02 19:42:46,137 - modelscope - INFO - epoch [1][4535/4953]\tlr: 1.169e-05, memory: 9082, loss: 0.7637\n", + "2023-07-02 19:42:52,814 - modelscope - INFO - epoch [1][4540/4953]\tlr: 1.165e-05, memory: 9082, loss: 0.8551\n", + "2023-07-02 19:43:00,111 - modelscope - INFO - epoch [1][4545/4953]\tlr: 1.162e-05, memory: 9082, loss: 1.3265\n", + "2023-07-02 19:43:06,301 - modelscope - INFO - epoch [1][4550/4953]\tlr: 1.158e-05, memory: 9082, loss: 0.6115\n", + "2023-07-02 19:43:10,926 - modelscope - INFO - epoch [1][4555/4953]\tlr: 1.154e-05, memory: 9082, loss: 1.8475\n", + "2023-07-02 19:43:17,954 - modelscope - INFO - epoch [1][4560/4953]\tlr: 1.150e-05, memory: 9082, loss: 1.3332\n", + "2023-07-02 19:43:22,493 - modelscope - INFO - epoch [1][4565/4953]\tlr: 1.147e-05, memory: 9082, loss: 1.9062\n", + "2023-07-02 19:43:28,213 - modelscope - INFO - epoch [1][4570/4953]\tlr: 1.143e-05, memory: 9082, loss: 0.6227\n", + "2023-07-02 19:43:34,862 - modelscope - INFO - epoch [1][4575/4953]\tlr: 1.140e-05, memory: 9082, loss: 0.7937\n", + "2023-07-02 19:43:40,905 - modelscope - INFO - epoch [1][4580/4953]\tlr: 1.136e-05, memory: 9082, loss: 1.4903\n", + "2023-07-02 19:43:47,007 - modelscope - INFO - epoch [1][4585/4953]\tlr: 1.133e-05, memory: 9082, loss: 1.0449\n", + "2023-07-02 19:43:52,730 - modelscope - INFO - epoch [1][4590/4953]\tlr: 1.129e-05, memory: 9082, loss: 1.0068\n", + "2023-07-02 19:43:56,715 - modelscope - INFO - epoch [1][4595/4953]\tlr: 1.126e-05, memory: 9082, loss: 1.5157\n", + "2023-07-02 19:44:04,629 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 19:46:20,481 - modelscope - INFO - Saving checkpoint at 4600 iter\n", + "2023-07-02 19:46:20,521 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_4400\n", + "2023-07-02 19:46:20,526 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8098, evaluation/loss: 1.2390, loss: 1.1334\n", + "2023-07-02 19:46:25,140 - modelscope - INFO - epoch [1][4605/4953]\tlr: 1.119e-05, memory: 9082, loss: 1.6938\n", + "2023-07-02 19:46:30,413 - modelscope - INFO - epoch [1][4610/4953]\tlr: 1.116e-05, memory: 9082, loss: 2.1351\n", + "2023-07-02 19:46:37,216 - modelscope - INFO - epoch [1][4615/4953]\tlr: 1.113e-05, memory: 9082, loss: 0.9270\n", + "2023-07-02 19:46:43,728 - modelscope - INFO - epoch [1][4620/4953]\tlr: 1.110e-05, memory: 9082, loss: 1.1201\n", + "2023-07-02 19:46:50,227 - modelscope - INFO - epoch [1][4625/4953]\tlr: 1.107e-05, memory: 9082, loss: 1.2715\n", + "2023-07-02 19:46:53,772 - modelscope - INFO - epoch [1][4630/4953]\tlr: 1.103e-05, memory: 9082, loss: 1.4461\n", + "2023-07-02 19:46:59,663 - modelscope - INFO - epoch [1][4635/4953]\tlr: 1.100e-05, memory: 9082, loss: 1.2715\n", + "2023-07-02 19:47:06,614 - modelscope - INFO - epoch [1][4640/4953]\tlr: 1.097e-05, memory: 9082, loss: 0.6478\n", + "2023-07-02 19:47:14,999 - modelscope - INFO - epoch [1][4645/4953]\tlr: 1.094e-05, memory: 9082, loss: 1.0031\n", + "2023-07-02 19:47:19,690 - modelscope - INFO - epoch [1][4650/4953]\tlr: 1.092e-05, memory: 9082, loss: 1.0572\n", + "2023-07-02 19:47:27,827 - modelscope - INFO - epoch [1][4655/4953]\tlr: 1.089e-05, memory: 9082, loss: 0.9459\n", + "2023-07-02 19:47:33,520 - modelscope - INFO - epoch [1][4660/4953]\tlr: 1.086e-05, memory: 9082, loss: 0.9813\n", + "2023-07-02 19:47:39,880 - modelscope - INFO - epoch [1][4665/4953]\tlr: 1.083e-05, memory: 9082, loss: 1.3258\n", + "2023-07-02 19:47:46,513 - modelscope - INFO - epoch [1][4670/4953]\tlr: 1.080e-05, memory: 9082, loss: 1.2884\n", + "2023-07-02 19:47:51,769 - modelscope - INFO - epoch [1][4675/4953]\tlr: 1.078e-05, memory: 9082, loss: 1.6375\n", + "2023-07-02 19:47:57,474 - modelscope - INFO - epoch [1][4680/4953]\tlr: 1.075e-05, memory: 9082, loss: 0.9726\n", + "2023-07-02 19:48:02,354 - modelscope - INFO - epoch [1][4685/4953]\tlr: 1.073e-05, memory: 9082, loss: 1.1402\n", + "2023-07-02 19:48:09,946 - modelscope - INFO - epoch [1][4690/4953]\tlr: 1.070e-05, memory: 9082, loss: 0.9941\n", + "2023-07-02 19:48:16,660 - modelscope - INFO - epoch [1][4695/4953]\tlr: 1.068e-05, memory: 9082, loss: 1.5975\n", + "2023-07-02 19:48:22,892 - modelscope - INFO - epoch [1][4700/4953]\tlr: 1.065e-05, memory: 9082, loss: 0.9816\n", + "2023-07-02 19:48:28,221 - modelscope - INFO - epoch [1][4705/4953]\tlr: 1.063e-05, memory: 9082, loss: 0.9115\n", + "2023-07-02 19:48:35,152 - modelscope - INFO - epoch [1][4710/4953]\tlr: 1.060e-05, memory: 9082, loss: 1.4184\n", + "2023-07-02 19:48:40,666 - modelscope - INFO - epoch [1][4715/4953]\tlr: 1.058e-05, memory: 9082, loss: 1.6391\n", + "2023-07-02 19:48:46,682 - modelscope - INFO - epoch [1][4720/4953]\tlr: 1.056e-05, memory: 9082, loss: 2.1836\n", + "2023-07-02 19:48:53,274 - modelscope - INFO - epoch [1][4725/4953]\tlr: 1.054e-05, memory: 9082, loss: 1.1783\n", + "2023-07-02 19:48:56,851 - modelscope - INFO - epoch [1][4730/4953]\tlr: 1.051e-05, memory: 9082, loss: 1.0398\n", + "2023-07-02 19:49:03,951 - modelscope - INFO - epoch [1][4735/4953]\tlr: 1.049e-05, memory: 9082, loss: 0.4896\n", + "2023-07-02 19:49:09,418 - modelscope - INFO - epoch [1][4740/4953]\tlr: 1.047e-05, memory: 9082, loss: 0.8757\n", + "2023-07-02 19:49:15,768 - modelscope - INFO - epoch [1][4745/4953]\tlr: 1.045e-05, memory: 9082, loss: 1.5896\n", + "2023-07-02 19:49:21,308 - modelscope - INFO - epoch [1][4750/4953]\tlr: 1.043e-05, memory: 9082, loss: 1.3535\n", + "2023-07-02 19:49:27,455 - modelscope - INFO - epoch [1][4755/4953]\tlr: 1.041e-05, memory: 9082, loss: 1.3389\n", + "2023-07-02 19:49:34,436 - modelscope - INFO - epoch [1][4760/4953]\tlr: 1.039e-05, memory: 9082, loss: 0.6073\n", + "2023-07-02 19:49:42,538 - modelscope - INFO - epoch [1][4765/4953]\tlr: 1.037e-05, memory: 9082, loss: 0.6708\n", + "2023-07-02 19:49:49,238 - modelscope - INFO - epoch [1][4770/4953]\tlr: 1.036e-05, memory: 9082, loss: 0.8630\n", + "2023-07-02 19:49:55,165 - modelscope - INFO - epoch [1][4775/4953]\tlr: 1.034e-05, memory: 9082, loss: 0.7835\n", + "2023-07-02 19:50:01,434 - modelscope - INFO - epoch [1][4780/4953]\tlr: 1.032e-05, memory: 9082, loss: 1.7195\n", + "2023-07-02 19:50:08,788 - modelscope - INFO - epoch [1][4785/4953]\tlr: 1.030e-05, memory: 9082, loss: 1.1434\n", + "2023-07-02 19:50:14,523 - modelscope - INFO - epoch [1][4790/4953]\tlr: 1.029e-05, memory: 9082, loss: 0.6416\n", + "2023-07-02 19:50:21,717 - modelscope - INFO - epoch [1][4795/4953]\tlr: 1.027e-05, memory: 9082, loss: 1.0909\n", + "2023-07-02 19:50:25,524 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 277/277 [02:15<00:00, 2.04it/s]\n", + "2023-07-02 19:52:41,308 - modelscope - INFO - Saving checkpoint at 4800 iter\n", + "2023-07-02 19:52:41,348 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/best_iter4400_acc0.8100214004516602\n", + "2023-07-02 19:52:41,353 - modelscope - INFO - Saving checkpoint at 4800 iter\n", + "2023-07-02 19:52:41,392 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_4600\n", + "2023-07-02 19:52:41,397 - modelscope - INFO - epoch(eval) [1][277]\tmemory: 9082, evaluation/acc: 0.8101, evaluation/loss: 1.2370, loss: 1.1855\n", + "2023-07-02 19:52:47,709 - modelscope - INFO - epoch [1][4805/4953]\tlr: 1.024e-05, memory: 9082, loss: 0.8004\n", + "2023-07-02 19:52:53,162 - modelscope - INFO - epoch [1][4810/4953]\tlr: 1.023e-05, memory: 9082, loss: 1.1193\n", + "2023-07-02 19:53:00,428 - modelscope - INFO - epoch [1][4815/4953]\tlr: 1.021e-05, memory: 9082, loss: 0.8555\n", + "2023-07-02 19:53:03,760 - modelscope - INFO - epoch [1][4820/4953]\tlr: 1.020e-05, memory: 9082, loss: 1.4422\n", + "2023-07-02 19:53:09,302 - modelscope - INFO - epoch [1][4825/4953]\tlr: 1.019e-05, memory: 9082, loss: 1.5247\n", + "2023-07-02 19:53:17,785 - modelscope - INFO - epoch [1][4830/4953]\tlr: 1.017e-05, memory: 9082, loss: 0.5462\n", + "2023-07-02 19:53:24,406 - modelscope - INFO - epoch [1][4835/4953]\tlr: 1.016e-05, memory: 9082, loss: 1.0023\n", + "2023-07-02 19:53:29,386 - modelscope - INFO - epoch [1][4840/4953]\tlr: 1.015e-05, memory: 9082, loss: 1.3645\n", + "2023-07-02 19:53:34,231 - modelscope - INFO - epoch [1][4845/4953]\tlr: 1.014e-05, memory: 9082, loss: 0.9927\n", + "2023-07-02 19:53:40,558 - modelscope - INFO - epoch [1][4850/4953]\tlr: 1.013e-05, memory: 9082, loss: 2.0516\n", + "2023-07-02 19:53:47,846 - modelscope - INFO - epoch [1][4855/4953]\tlr: 1.012e-05, memory: 9082, loss: 0.7750\n", + "2023-07-02 19:53:52,341 - modelscope - INFO - epoch [1][4860/4953]\tlr: 1.011e-05, memory: 9082, loss: 1.4390\n", + "2023-07-02 19:53:57,172 - modelscope - INFO - epoch [1][4865/4953]\tlr: 1.010e-05, memory: 9082, loss: 1.0197\n", + "2023-07-02 19:54:02,776 - modelscope - INFO - epoch [1][4870/4953]\tlr: 1.009e-05, memory: 9082, loss: 0.7660\n", + "2023-07-02 19:54:08,311 - modelscope - INFO - epoch [1][4875/4953]\tlr: 1.008e-05, memory: 9082, loss: 0.8775\n", + "2023-07-02 19:54:14,394 - modelscope - INFO - epoch [1][4880/4953]\tlr: 1.007e-05, memory: 9082, loss: 1.3374\n", + "2023-07-02 19:54:20,602 - modelscope - INFO - epoch [1][4885/4953]\tlr: 1.006e-05, memory: 9082, loss: 1.0018\n", + "2023-07-02 19:54:28,123 - modelscope - INFO - epoch [1][4890/4953]\tlr: 1.006e-05, memory: 9082, loss: 1.4156\n", + "2023-07-02 19:54:34,101 - modelscope - INFO - epoch [1][4895/4953]\tlr: 1.005e-05, memory: 9082, loss: 1.4742\n", + "2023-07-02 19:54:39,802 - modelscope - INFO - epoch [1][4900/4953]\tlr: 1.004e-05, memory: 9082, loss: 1.2737\n", + "2023-07-02 19:54:45,785 - modelscope - INFO - epoch [1][4905/4953]\tlr: 1.004e-05, memory: 9082, loss: 1.2928\n", + "2023-07-02 19:54:52,274 - modelscope - INFO - epoch [1][4910/4953]\tlr: 1.003e-05, memory: 9082, loss: 0.9859\n", + "2023-07-02 19:54:57,409 - modelscope - INFO - epoch [1][4915/4953]\tlr: 1.003e-05, memory: 9082, loss: 1.8160\n", + "2023-07-02 19:55:04,217 - modelscope - INFO - epoch [1][4920/4953]\tlr: 1.002e-05, memory: 9082, loss: 0.9310\n", + "2023-07-02 19:55:09,704 - modelscope - INFO - epoch [1][4925/4953]\tlr: 1.002e-05, memory: 9082, loss: 1.1717\n", + "2023-07-02 19:55:15,079 - modelscope - INFO - epoch [1][4930/4953]\tlr: 1.001e-05, memory: 9082, loss: 1.8821\n", + "2023-07-02 19:55:19,843 - modelscope - INFO - epoch [1][4935/4953]\tlr: 1.001e-05, memory: 9082, loss: 0.7700\n", + "2023-07-02 19:55:24,826 - modelscope - INFO - epoch [1][4940/4953]\tlr: 1.001e-05, memory: 9082, loss: 1.1562\n", + "2023-07-02 19:55:29,831 - modelscope - INFO - epoch [1][4945/4953]\tlr: 1.000e-05, memory: 9082, loss: 1.2777\n", + "2023-07-02 19:55:34,919 - modelscope - INFO - epoch [1][4950/4953]\tlr: 1.000e-05, memory: 9082, loss: 0.9414\n", + "2023-07-02 19:55:38,429 - modelscope - INFO - Saving checkpoint at 4953 iter\n", + "2023-07-02 19:55:38,697 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449/iter_4800\n", + "2023-07-02 19:55:38,741 - modelscope - INFO - Train finished. Uploading models, waiting...\n", + "2023-07-02 19:55:38,823 - modelscope - INFO - {'done': True}\n" + ] + } + ], + "source": [ + "def cfg_modify_fn(cfg: Config) -> Config:\n", + " cfg.update(CONFIG)\n", + " return cfg\n", + "\n", + "\n", + "trainer = EpochBasedTrainer(\n", + " model=model,\n", + " cfg_file=cfg_file,\n", + " data_collator=data_collate_fn,\n", + " train_dataset=train_dataset,\n", + " eval_dataset=val_dataset,\n", + " remove_unused_data=True,\n", + " seed=42,\n", + " cfg_modify_fn=cfg_modify_fn,\n", + ")\n", + "\n", + "trainer.train()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 可视化\n", + "tensorboard 命令: (e.g.) \n", + "`tensorboard --logdir /home/hackathon/my_git/agent/runs/baichuan/v10-20230702-172449 --port 6006`" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['lr', 'loss', 'evaluation/acc', 'evaluation/loss'])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAApIAAAHDCAYAAACXsvqpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACbJ0lEQVR4nO2dd7wkVZ32n+6b79w8d+7kRBokzJBhBEyMjogCLrqIqCy6uiiYcFfFzPrquPqur+ICsgZwdQUxoIgEiYNkGEDCwJAm53Rzvl3vH78+XaeqK3d1fr6fz0zfrq46dSo/9UsnYRiGAUIIIYQQQkKSLHYHCCGEEEJIeUIhSQghhBBCIkEhSQghhBBCIkEhSQghhBBCIkEhSQghhBBCIkEhSQghhBBCIkEhSQghhBBCIkEhSQghhBBCIkEhSQghhBBCIkEhSQghaa677jokEgls2LCh2F0hhJCygEKSEEIIIYREgkKSEEIIIYREgkKSEEIIIYREgkKSEEI8uOqqq3D44YejoaEBc+bMwcUXX4ze3l7LPC+//DLOOecczJo1C42NjZg3bx7e9773oa+vLzPPnXfeiVNOOQUdHR1oaWnBkiVL8KUvfanAW0MIIfFSW+wOEEJIqfKNb3wDl19+OVasWIGPf/zjWLduHa6++mo8/vjjePDBB1FXV4fx8XGsXLkSY2Nj+OQnP4lZs2Zh69atuOWWW9Db24v29nY8//zzeOc734mlS5fi3//939HQ0IBXXnkFDz74YLE3kRBCcoJCkhBCHNi9ezdWrVqFt73tbbjtttuQTIoD59BDD8Ull1yCX/3qV7jwwguxdu1arF+/Hr/97W/xnve8J7P81772tczfd955J8bHx3Hbbbehu7u74NtCCCH5gq5tQghx4K677sL4+Dg+85nPZEQkAHz0ox9FW1sb/vKXvwAA2tvbAQB33HEHhoeHHdvq6OgAAPzpT39CKpXKb8cJIaSAUEgSQogDGzduBAAsWbLEMr2+vh4HHHBA5vfFixfj0ksvxU9/+lN0d3dj5cqVuPLKKy3xkeeeey5OPvlk/PM//zNmzpyJ973vfbjxxhspKgkhZQ+FJCGE5Mh//ud/4plnnsGXvvQljIyM4FOf+hQOP/xwbNmyBQDQ1NSE+++/H3fddRc++MEP4plnnsG5556Lt771rZiamipy7wkhJDoUkoQQ4sDChQsBAOvWrbNMHx8fx/r16zO/K4488kh85Stfwf3334+//e1v2Lp1K3784x9nfk8mkzjttNPw/e9/H2vXrsW3vvUt3HPPPbj33nvzvzGEEJInKCQJIcSBFStWoL6+HldccQUMw8hM/9nPfoa+vj6cccYZAID+/n5MTk5alj3yyCORTCYxNjYGANi3b19W+0cddRQAZOYhhJByhFnbhBDiwIwZM3DZZZfh8ssvx9vf/naceeaZWLduHa666iocf/zx+MAHPgAAuOeee3DJJZfgve99Lw455BBMTk7il7/8JWpqanDOOecAAP793/8d999/P8444wwsXLgQu3btwlVXXYV58+bhlFNOKeZmEkJITlBIEkKIC9/4xjcwY8YM/Nd//Rc++9nPoqurCx/72Mfw7W9/G3V1dQCAZcuWYeXKlfjzn/+MrVu3orm5GcuWLcNtt92Gk046CQBw5plnYsOGDfj5z3+OPXv2oLu7G2984xtx+eWXZ7K+CSGkHEkYus+GEEIIIYSQgDBGkhBCCCGERIJCkhBCCCGERIJCkhBCCCGERIJCkhBCCCGERIJCkhBCCCGERIJCkhBCCCGERKLgdSRTqRS2bduG1tZWJBKJQq+eEEIIIYT4YBgGBgYGMGfOHCST7nbHggvJbdu2Yf78+YVeLSGEEEIICcnmzZsxb948198LLiRbW1sBSMfa2toKvXpCCCGEEOJDf38/5s+fn9FtbhRcSCp3dltbG4UkIYQQQkgJ4xeGyGQbQgghhBASidBCcuvWrfjABz6A6dOno6mpCUceeSSeeOKJfPSNEEIIIYSUMKFc2/v378fJJ5+MN7/5zbjtttswY8YMvPzyy+js7MxX/wghhBBCSIkSSkj+x3/8B+bPn49rr702M23x4sWxd4oQQgghhJQ+oVzbN998M4477ji8973vRU9PD44++mj85Cc/8VxmbGwM/f39ln+EEEIIIaT8CSUkX3vtNVx99dU4+OCDcccdd+DjH/84PvWpT+EXv/iF6zKrVq1Ce3t75h9rSBJCCCGEVAYJwzCMoDPX19fjuOOOw0MPPZSZ9qlPfQqPP/44Hn74YcdlxsbGMDY2lvmu6hL19fWx/A8hhBBCSAnS39+P9vZ2X70WyiI5e/ZsHHbYYZZpr3vd67Bp0ybXZRoaGjI1I1k7khBCCCGkcgglJE8++WSsW7fOMu2ll17CwoULY+0UIYQQQggpfUIJyc9+9rN45JFH8O1vfxuvvPIKfv3rX+O///u/cfHFF+erf4QQQgghpEQJJSSPP/543HTTTbj++utxxBFH4Jvf/CZ+8IMf4Pzzz89X/wghhBBCSIkSKtkmDoIGbxJCCCGEkOKQl2QbQgghhBBCFBSSADA5AaSmit0LQgghhJCygkJyahLYvh7Y+mqxe0IIIYQQUlZQSI6PFrsHhBBCCCFlCYUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIUkIYQQQgiJBIWkztgoYKSK3QtCCCGEkLKgOoWkYThP37UJ2LOtsH0hhBBCCClTqk9I9u0FtrwMjI86/z46XNj+EEIIIYSUKdUnJPv3ymfv7uL2gxBCCCGkzKk+IUkIIYQQQmKBQpIQQgghhESCQtIl74YQQgghhHhDIUkIIYQQQiJBIUkIIYQQQiJBIUkIIYQQQiJBIUkIIYQQQiJRvULSyPqDEEIIIYSEoHqFJCGEEEIIyYkqFpK0RBJCCCGE5EIVC0lCCCGEEJILVSwkE8XuACGEEEJIWVPFQpKubUIIIYSQXKhiIUkIIYQQQnKBQpIQQgghhESCQpIQQgghhESCQpIQQgghhESCQpIQQgghhESCQpIQQgghhESCQpIQQgghhESCQpIQQgghhESisoXkS08B3/oQ8OMvFLsnhBBCCCEVR22xO5BXxoeBPdvA4RAJIYQQQuKnsi2Smc3jcIiEEEIIIXFT2UIykbZEUkcSQgghhMROlQjJVHH7QQghhBBSgVSHkCSEEEIIIbFT2UJSJdkY9G0TQgghhMRNZQvJBIUkIYQQQki+qA4hyWwbQgghhJDYCSUkv/GNbyCRSFj+HXroofnqWwx4WCRTKaBvDzAxXtguEUIIIYRUCKELkh9++OG46667zAZqS7imedLDIjk5DvTvK2h3CCGEEEIqidAqsLa2FrNmzcpHX+KHMZKEEEIIIXkjdIzkyy+/jDlz5uCAAw7A+eefj02bNuWjX/GQSG8edSQhhBBCSOyEskieeOKJuO6667BkyRJs374dl19+OU499VQ899xzaG1tdVxmbGwMY2Njme/9/f259TgUTLYhhBBCCMkXoYTk6aefnvl76dKlOPHEE7Fw4ULceOON+MhHPuK4zKpVq3D55Zfn1suoZHQkhSQhhBBCSNzkVP6no6MDhxxyCF555RXXeS677DL09fVl/m3evDmXVYYj49ougpDs38tkHkIIIYRUNDkJycHBQbz66quYPXu26zwNDQ1oa2uz/CscRUq2mZoE+vZKeaEUx/kmhBBCSGUSSkj+67/+K1avXo0NGzbgoYcewrvf/W7U1NTgvPPOy1f/csOr/E8+sQhXutUJIYQQUpmEipHcsmULzjvvPOzduxczZszAKaecgkceeQQzZszIV/9yJOE/CyGEEEIIiUQoIXnDDTfkqx/5IWOQpHuZEEIIISRuKnysbdaRJIQQQgjJF5UtJIuVbEMIIYQQUgVUtpBMqs2jkCSEEEIIiZvKFpIsSE4IIYQQkjcqW0iWgmubGpYQQgghFUplC8lEkTYvwbJDhBBCCKl8KlxIpj9Z/ocQQgghJHYqXEiWwubRt00IIYSQyqQUlFb+YbINIYQQQkjsVLaQzBQkp5AkhBBCCImbCheSmSDJonaDEEIIIaQSqQ4hWWiLJC2ghBBCCKkCqkRIFrEPFJWEEEIIqVAqW0iCrm1CCCGEkHxR2UJSLwxOyyAhhBBCSKxUtpBMUkgSQgghhOSLyhaSoJAkhBBCCMkXlS0kLWNeF0tIUsASQgghpDKpHiFJiyQhhBBCSKxUuJCsMf8ulpCkfiWEEEJIhVLZQlInFyE5OgRMjIVZWfR1EUIIIYSUCZUtJC2u7VS0NibGgN1bgR0b4+kTIYQQQkiFUD1CMioT47m3QQghhBBSgVSPkIzq2o4iRg3XL4QQQgghFUOFC0lt86K6tgkhhBBCiCMVLiRzWDY1FUMjoEGSEEIIIRVLbbE7kFd0t3QqhEVysBfYvwvo7AFq683phhFP3CUhhBBCSAVQ4RbJiCPb9O+Tz/27Yu0OIYQQQkglUdlCUt+8MC7m+kZtOc2SGSlhh75tQgghhFQmlS0ko9aRTLrtFopCQgghhBBF9QjJ6I2YfwbWkRSchBBCCKl8KltIAsgIwTDJNq5QIBJCCCGEKCpfSGaskjGIwCgxktSehBBCCKlQqkBIpj8jC7pclaBt+cE+YNdmrU4lIYQQQkh5UgVCMr2JcYxsE9Qi6TXb/p3A2IhZYogQQgghpEypfCGZIYRl0XXWGP3UHLKREEIIIWVOFQjJtG87Ug1IG4x3JIQQQgjJUPlCMpGjkLQsRyVJCCGEEKKobCGZSEQUki7zcmQbQgghhJAMlS0kgdwtkpEwHP8khBBCCKkkKl9IZihSHUlCCCGEkAql8oVkzhZJxkgSQgghhDhR+UIy16xt6khCCCGEEEcqX0jGOUQilSQhhBBCSIbqEZKpGAqSM2ubEEIIISRD5QtJRaiRZOjPJoQQQgjxo8KFZEIbazuOGMkIY21ThxJCCCGkQqlwIYlMrk08pXuCtkH1SAghhJDKp/KFJGJMtqE+JIQQQgjJUPlCsuh1JKk+CSGEEFKZ5CQkv/Od7yCRSOAzn/lMTN2JmYT2dxyubY5sQwghhBCSIbKQfPzxx3HNNddg6dKlcfYnflSyTSjLIMfKJoQQQgjxI5KQHBwcxPnnn4+f/OQn6OzsjLtP8aKskmHqSFrQRSVVJSGEEEKIIpKQvPjii3HGGWdgxYoVcfcnD2SUZPBFXPViBCFJ7UkIIYSQCqU27AI33HADnnzySTz++OOB5h8bG8PY2Fjme39/f9hV5kbOyTYakZqgkiSEEEJIZRLKIrl582Z8+tOfxv/+7/+isbEx0DKrVq1Ce3t75t/8+fMjdTQaCWQskpFd24QQQgghxIlQQnLNmjXYtWsXjjnmGNTW1qK2tharV6/GFVdcgdraWkxNTWUtc9lll6Gvry/zb/PmzbF1PhCJhP88gaEYJYQQQghRhHJtn3baaXj22Wct0y688EIceuih+MIXvoCampqsZRoaGtDQ0JBbL3MhU4884ljbvbsjrJSCkxBCCCGVTygh2draiiOOOMIybdq0aZg+fXrW9NIhxhjJoFBHEkIIIaQK4Mg2eYeqkhBCCCGVSeisbTv33XdfDN3II4kIY227zcoREgkhhBBCMlS+RTKSa5vqjxBCCCHEj8oWklr1n3DJNm5QYBJCCCGEKCpbSAIoSrINIYQQQkgVUPlCMkqyDTUnIYQQQogvFS4kE9GSbWJVklSlhBBCCKlMKlxIQrNIFnKlFI+EEEIIqXwqX0gq4ki2iRJnWUxNaRjA2EhMiUaEEEIIIVaqQEhGiZGsEIviwH5g12Zgz7Zi94QQQgghFUjlC8mij2xTRAZ75XN0uKjdIIQQQkhlQiGZDwzXL4QQQgghFUP1CElCCCGEEBIrlS8kMzGSUyGWiXOwbTfKXOBOjAOpMPuUEEIIIZVGZQvJREIbIjHEcsXwRk9NSXLM1GQRVh6SiXFgxwZg62vF7gkhhBBCikhlC0kApkUyjvI/EWYMusy+7UDv7vLIsM4k7zD+kxBCCKlmKl9IFqUguU7AFStxNj6av64QQgghhMRI9QjJkvdtE0IIIYSUF5UvJON0bRNCCCGEkAyVLyR113bQWpKus9FSCaDsE84JIYQQEg9VICTTn6EskhSMhBBCCCF+VL6QjDLWNiGEEEII8aXyhWScQyRGaYIClhBCCCEVSuULyWJYJKkdCSGEEFIFVL6QjFL+h1ZEQgghhBBfqkdIxiIOg7ZBIUoIIYSQyqeKhCTrSBJCCCGExEltsTuQd5JprZwqQPmfiTFg/y5gbCT3tgghhBBCSpzKF5KJGvlMTSHvom7HxuxpfXuB+iagsZmxl4QQQgipKCrftZ2xSE4FXyZuvbd7S8wNEkIIIYQUn+oRklMhhKQbtCim4RiJhBBCCKkKIZl2bY8MhRCTFIyEEEIIIX5UvpBMpDdxdAjYu724fSGEEEIIqSAqX0jqMZLjo8XtCyGEEEJIBVEFQjLt2g4T35g3z3ahXeZ00RNCCCEkf1SBkIyQte0qwEpUmBmGuO379ha7J4QQQgipIipfSCaiFCTPE/nSoeOjwPAA0F8EIclMdkIIIaRqqXwhmXFtxyAkS1UzVfLwj6kU0L8PmBwvdk8IIYQQYqMKhGQU13YEaJnLD727gb49zqMGEUIIIaSoVIGQVEMkVrDVzo1K0LZjw/JJoU4IIYSUHFUgJGOMkTRSwOSE24/h2hrsBQb7cu1RpFXnjGVgGwo8QgghpFqpHiFpuLi2DQN44E/A8w/7tzU2Amxf7xyvF0hP2Wbav7NwljbDkKQcWvYIIYQQEhO1xe5A3vFzbe/YCDx2h/x9+PJgbY4OAy31ufcNgIjLAoxdvX8nMNQPtHUB7d3xtVug7hNCCCGk9KgCi6SPkNRHu9n4IjCRtjaOjYhgdMLRqhfR0lcoA+FQv3z27yvQCgkhhBBS6VS+kEz4ZG3XaEbZ318B/PYHEgt51b/KP9eYyFCdcP+pLF3NhTRB0txJCCGElCqVLyT9km3sOmXHBmCg1xR4g73A1CSwfYPZhpP489KDNTUe85SjkNQJ2f+hvuIUTieEEEJI7FSPkHRLtplymL71Ve33SeCe3wDXfxd44s50WyFd24l8WyTLSIzu2ylDOY6PFbsnhBBCCMmRyheSCZ8YyanJ7GkDmsVsbBh49kH5++G/pCfGKdzKSATGSb4LxBNCCCEk71S+kMxYJN2EpEMMZP9+8+8b/tP8W8VbOrq2gwjCkC7xKBQ65rJKdTAhhBBCqklIulokHSxjAy6ZzRlRmnu3MjgJv9FhYM/WmBJ97DB5pSgYhsSHTtClTwghpHKoAiEZwbWtWySd2gqrJD1nd/hx9xZgZEhqP8ZN7DqSJslAjAxKfCjHDCeEEFJBVIGQ9Cn/4xgj6WKRVEkzoetIevzm5YqedOgbKU/GRv3nIYQQQsqMUELy6quvxtKlS9HW1oa2tjYsX74ct912W776Fg9RLJLjLg/9jFsyYKxjxoLpRUQBWlSiFl8v1e0pBCG2ff8uYN+O/HWFEEIIiYlQQnLevHn4zne+gzVr1uCJJ57AW97yFpx11ll4/vnn89W/3FFibufG7JFqhvqtpX78mJwQMRlUEHmV/VFUs7Yi2RiG1C4d6s9TjCwhhBASH6GE5Lve9S684x3vwMEHH4xDDjkE3/rWt9DS0oJHHnkkX/3LnaS2iXf80vrbT78CrHsiXHvDgy4/BFCETtZPL1EaSWQaLn/HiGUVtnXs3wns3Fzl1seY4D4khBBS4tT6z+LM1NQUfvvb32JoaAjLly93nW9sbAxjY2aman9/f9RVRkMXkq/+3fqbk7DzY2Qg/Mg2hiGj5fTucvrRa8FgfXJzxRdDhwz2yefYMNA4zX0+Jo+7QPFICCGkfAidbPPss8+ipaUFDQ0NuOiii3DTTTfhsMMOc51/1apVaG9vz/ybP39+Th0OTcIlTjGItWfJsdnTRoaiubYdRWTAfngxOgz0a8lBvs0VSMFRD0XDcP1CCCGElByhheSSJUvw9NNP49FHH8XHP/5xXHDBBVi7dq3r/Jdddhn6+voy/zZv3pxTh0OjWyT15Jcg8Wft3dnTtq+H8wM+6kM/R7EwMpS/tmOn1PpTSKp52wkhhFQqoV3b9fX1OOiggwAAxx57LB5//HH88Ic/xDXXXOM4f0NDAxoaGnLrZS7oQrJG29yx4ex5AWBauxSOBoCOnuzfH7kV6JoJvPX8ACvPMdmmZGPkIsZhlurmFIIo217N+4sQQkhZkHMdyVQqZYmBLDl013Ztnfn32Ijz/G1d5t8dDhZJAHjsjuxpnqIvYh3JXHFqu2RiE0umI4QQQgiJSCiL5GWXXYbTTz8dCxYswMDAAH7961/jvvvuwx13OAirUkF3Z+sWSXspIEVrZ9p9DWBam/M8QcuyBNJKAYSkYUhikC6Ec2kvTmg1i5kCZN0TQgghMRFKSO7atQsf+tCHsH37drS3t2Pp0qW444478Na3vjVf/csdPeGlRrdIeghJRX2T+fe8g8WKqYYvDLZy+Yjsvk7/tm8HMDwATJ8NNLc6zxOq3RgZHpB9rFtyXSmSMBruB8bHJOY1SG3PamF8VM7plg7uF0IIIZEIJSR/9rOf5asf+WNKsx7WBrBI1tabfzdoQrKxGXjnPwM//oKI0NSU1doZVbgFqSM5PCCf/fschGQR0Lvcv1c+m1qAunrH2XMiDn2zNz1KTEOT9LMoBDw/Cqm1d26Sz0QSaGkv4Io9mJwQzwGFLSGElAWVP9Z2vZboo4s2t9qLTVrtw9o64OCj5e9jTgMams3fRgbNNqdcxvE2Vxy4u+FxeeAaLkNC5i020b6NJeiWdRtvvVQp1C6cKJEY59FhCSvZvaXYPSGEEBKQyAXJy4auWcDrTgBeeMwa2zg5bp3v5DPFrT3/EOC+35nxlGd8RFyjLR3yvb5RROjwgGR4790mru726dnrDjREYsxjbatF3MYWj4Wo1td4exGecrByVXGMpKqW4JYIRwghpOSofCEJAMetyBaS+qg2Z10EHLjU/H7hN8SVDUj5ICUiARmtRQlJwIyXHNjvvO59O0Vk1rjt6nwNY5hPIem0vsKuruzg/gmHuq6aPEZHIoQQUnQq37UNmNnOerzkZFpILnujVUQCQGePeyydEphKSHrx2nPAdZcDt3jElsY+1naaVBGUi2+caI59Ktm6mvkivb1jo8C214ChAg8vWiicjuuerfIvr5Z1QgghuVJdQtLJIlkb0iirxo+2C0knjfPIrfJpH+PbslzMrm21TFwWyVQq2pjk+WDbq6UTzxeaHMIU9m6TY7BvR6w9Kgn696aP67jz74W2rBNCCAlFdQhJVfZnatK0cChx5OpydkFZJINYh+xxmIGIKY7P7QEctvltr8k/39qZYYVSBGGVSgH7XcYsrxScXiwq2RLbt1eOa9/uYveEEEJIBKogRjJhLeS9dzuw/TUzazu0kFQWSbuQdHjY2y2gTuvKl0hwdQmGVJJKkI6PBCyIHrTd+JqqXAzbJ6QEVF19EcsY5QkD5ZELRQghxEIVCElYa0PeeyOw5WXze1SLZJAYST37dKjfpWh3uSTbBHnK+2xLJVvW8oHT7urbI5/zDyloVwghhBAnqsO1nUyaxcN1EQnkYJH0iZEcHTYf+gAw2Ofcnl1cxWWVyWeSgqP7FdZ9sGdbgPqaMWKkKlCoVtr2kEAYBmNDCSFlQ3UIScB0yzbbxs+OS0jaH/q7Nlu/D/VmtzU1Gb/4MWJOtgm+4uxJ/fsKs+qJcWDLK8D+ndm/lYq4zCFvqmr0ZLVspx/7d8r57JaARAghJUT1CckpW9JIZNe2T7LNjo3W73aL5GN/BX70WeDp1eHWH5RSEFC5itnB3mAP04G0YC3p8jhBj0cVFyQngjqP3WrTEkJICVF9QnJ8zHl6UFwtkjZ22oTkkCYkhweAB/4oQ/Y9cVe49QelKDGSYfARSeNjMWVol5kYK7PuxkfVbjghhJQ1lS8klf5RJYDsAiusRVKNtDGw39vqp4TkwtfJp7JI7t4C3Plrc77dW/IzJFwcz2V9+wLpSJfxtqcm09sYolOpmGpXlrM+MTx826VgcSaEEFL1VEfWNuBueQwrJNvSY2qPDrtbJcdHzfjAxUcAG18wYyR/+W3rvKkpYPv68P3wxU1oFLDGiurCtvXypbMn+zdXWAum+qA4JoSQcqPyLZKKuIRkXT0wLZ2w4zbSSMZ9ngCmz5I/3bK2AWDXFu1LXAXJXaaHat6tkaAWMptFTY2fnAuhtUapiJNcsm2cfiqV7SKEEFLNVJGQrHeeHsUS2N4tn3u3Z/822Ae88rT8XVcPtHSY0+00NMnn7s3Zv0UmxlTfvA5WE7V/IZfTZx8ZArZvMIvRlySG45+e81UCFbY5hBBSLVSBkEyb4NzG1K6JMFpLRkg6WCR/tQq45zfyd109MK1d/h4bBq7+vHXeg46ST3upoJJEM2UGTkAuMXUwMiDDVu7ZVvh1R9oVXhbJqB0hhJAyZGJMqqGMDBa7J8RGFQjJNG4WSTeB6UVHWkjuc7BI6mWBauvF6qjc6vYLYPER8ukkSKMSa+3BAoyfnStjIxKv6opDn/JZrD1XvCIE/CeWMZW2PYSQWNmzXcRkMQwBxJMqEpIxxUgCQJuLa9tugaurBxIJYFqH83pV8okqpD0VU6aydCbeJnxjKwMroPgwDLHmxrrfSghmbVfPdhJCvEkVcKQ0EorqEZJuLuxcXNv2NyN77J2ygk44xOR95Jvm2NvDA8DaR4H/uhR44VHrfKMxJKhEJseHeNbiIdqLVG7IaZYiCFxHohQkJ4QQQkqb6hGSbhbJKPUKlZDs3W11kw7ZEmrq0kJy9uLsNlraxe3dkB4p5/ZfiGXt1mut8+3eGr5/gIclJ0Tatt5G/z4t69plrO1gE7PbdoTlfzz3ES11hBBCSoDqFZINzUBHD9A+I3xbrR1AskaEnz6Gtj0zWwnJU84Cjl3h3Fb79PDrLwZjI8AeH1FbaHETaHUF6NPUFDA54T9frFBIEkKqiASNC6VK9QrJD3wRuOCrQE1N+LaSSTO+sW+vOX3QNjaucm1Pnw288R+AuobstuYcGH79nhiSmRzLEIlhxEpQK2WBKUQftr0qReWnPGJ44u5HKexbQgghVU/1CslkbTQRCchDfPps+fulJ4En7xHh1rvHOl+dLVNcFTLXWXiowwpyePMaG5VaiW7DLoZpOmexYnh+zZ0SU1MTY/7z+KFbdT0tvCG23TBK3xVe4t0jhBDiTOUPkaiEU5aQzFFDK4vk06vT7dVIzKSOveTQzIXZ88xalFs/7Ojlh3LG5eleiLyR2IRPOSuUGGIkVWZ7Mgl0zy1h91CIUZQIIYSUDJUvJJWStLuVkxGtkQAAA5h9gHXS1lfM8bUzs9kegm9+LzA+Ahx5ijltWpsk7/Rp1szUVHShG2eNxLgtkrEQdOSXPHbBDU+NViRBNDVhVhMwjPiFpGEAvbuk+oGqQkAIIbFTqi/BpHpc22o4QkWuFsmTTrd+HxkC+vfapg3AcvI3twLvvhg4aJl1vtmLHJaLiF285iQcwlqJ/MSS4fK3jd49wO4t7r8HXZ+b1S5OTWdZR9wiLeh6A7aRj/vw+KgkmfXt8Z83CvbNNAxgYL9PEXpCCCGFonqEZH2j9XsuFkkj3d6F3zCn7dos9SB1hvqDPbxPOdv6/ZkHcuib/clr70BC5unfl93fXDAM54e+6/webQ3sc/8tqAhMpYAdG5xHHyob4oiRDGnBDUssSV0hGB2S8JBALxokZybHgYnxYveCEBokSxgKyUikn8idPcAl35e/R4eyhVPQ5IslxwL/cAmw5Dj5vmmd9/xjI8C+nS5ZwrYH+9RkduLN+KhYkOwj89gp9QQNL0YGpSSP40MwvV2jw/ICkNODMo/p2DHl2jgvX6bHttJETaoYpaMCYhiSuLdjQ2kPK0oIKSpVLCRz3XTDbFePDWtoAk57nyT3nP5PwdtadBiw7FT5OrAPuPka4LbrnB8yuzZL8fO+3dm/2QXCLT8BfvJlcQcqCjakYAGGSMyF3VtEZPsJas8+5HO+GJRk4PaKTdDtqTBBszVdOqoUxaR+fXF4OkKIC1WQbJOmQROSiWTuSQf6c69rlplo09QKLHsDcOTJIjCHBuD7kFQ37Gnt8tm/z2yvew5w/Nucl5ucAOpsllX95j85Abzyd/n7hceBE1zace9YyPnjXbxg5CSsizT0YWARXSYHwbWb9hJSZbI9YRkfcR99ixAC+rZLlyqySGrJNnEX61Y1JQFJqAHSrvOAJ35GSDrUmdzlEQuWSMDzQbtvh/n3yIBY3sI8iEM/s7OCJONsPCQVIDiCHCsjJVbVQPPm3qWiQxcrIYSUFNUjJJ1GlckF/cHdNcv8u7klelv1jdkueN0lnYWPUNXH6V5zN/CLbwJP3BWmYyEmu83rEfM3NSmW18gWwYjJPHFSLHGm9uveHRLqYC895d+A+adTfG8+GBsGBnpzW1c5WSTLoRA8yQ+GIbHofPEpPrwG804VCMm02Mo5JtKD6ZqQbLXV0gtilNRP9JYO62/bXgV+/V3nLFU/97w+DrjinhsCdEj1K8QPhsPkyQlg22vOyxkA9myTpJ8924L3KVD/IjI5LiWcwsSDjcVdhiZgiST128igfLpluhsu7emTJye8M+W9CHMMdm2RmpOupXtczivL9zJ5MBsGsGOjJKqU7YOsXPtdAgwPADs3yUseiYconu2JcXmG2kvzkVipAiGZJywWSc21vfRU24weZ38ivfv1h2NrZ/Z8OzYAv/w28Orfbcs7uLZ1xkbdf1PENQyfG16iTBXKHg/Qz9CE6Ls6RDs3ydjp+3cFW25q0paoE3ccZIjfcl31UJwjIvkwmUPmddkIyZRs5+REMIs7NVtlMdQnn1GGTU1NSW1WJjjlTu8usQr3UUjmk8pPtslXfK4uwBqbpXwPAPTMdSnL40AikbbkaW21TXeff/UfgJPPtC7vhdt423mj0E/DuNaX3o/KDRXUymgXCHEXEI9DSbquNw/HKh8j5zitgxQG7urisGe73INGBoEZc4vdm+Aod35dQ548gBHuLTyHC0J1WSTto9vkhO0MXXSY/AtzsiuLpP7meeJK6eeS44B3/nP2Mvq8Soi64SYkLXGLXnGG+bwKY2g7X8bUvOcBxbHtEbK2PZcpYEakYz8Cbk+sQ4CmgMHe/JTeKZuyS6TkUC+yo0PF7YedqUnvOq5DfeLKL6nBAnjtFYLKt0jqNLXGZ6VzeyiHscgkE8AUrA/HtunAx1YBNbXyVrf8DODhv8hv47aah4mkt6vPbVstMWpxXWillt1SiP7Yj3Uc6yyge7ws8KhKMDUp1puWdueKB3707ZVktmQNMPfA3LqZhUeSWdlQth0n+UDFu885QJ5PdpQ7Py+hShHhKVwQqssiqUrzxIGrdSfh+dX6m4NFEgDq6k3XQEOzOb1vL/DUfdryPjGS4y5CclDLBI/kjg2QFOFHLDF5JXaXiLs7cbjKQ2XeR6HAx0B/cerdLee4XuYqDGqI0HzEogW1+uuMDRchHCUoJXatkXjxs/Tr53BZjS7F87YQVJmQjFCaJywJ+xevZBtbbJ4TRywH5h5kft/8ktvKslEPpVPONodfBKQES4YYLzTfprQZiv3WGjShpqjE4Lt3y9q2L1/QWr+5vIhoM+YqAAuVzBAkQSiVkqz2Us3y5fO4cunfB2x9xawA4YR+H8l3HLQbUdbLmOqCUF1CMor7y40gFkm/897NIqlT3wiceynw1vPl+2Cftf0gMZILlgBnfNgUpLpF0hOnB74RrixQiJ9Dk4vFbrA3zp6olUb8LeZVFagLkVdQKvf2fD5kXAW8yzylmKHLh3B10LdHPvftdJ9HfxlyE3Q8XaqW6hKSJ71D6jSesDL3tuKIkQxikVSo+pJhBJASkirJSAlpvci5Z7JN8FUV5y5SgXeuuB/ehRQD+VhV3GWOfNdnAL17PGpdRmwzKgP75QGfb9FbikKWlA7lWli9UPe/Kn/pqq5km2ntwEe/FZNpPuiJE6COZJByM05C0vDox+SEWcOsMR1n2dyW3UZRhzGMQEl1yZ4IEnDW1JQcg+ZW/xGXYi3fUyJZ2479cLN0F/iAD/VJcfaBfcD8Q6K3EyVG0on+vfIQb+2U2Ol8sHOT3CtmL/YY77ukLjxSaHSLZFmJpgL0df8uCQuYuRCoqcn/+kqQKrBI2pNfYnpgxmmRDEJLu3yODgUrV6JGKqlrABqnyd/KIqkXZy0zHZkfbMchNRUx6SG9w/ze3nt3S1zSjo3e840M2kYGitKlAG7VfGEY4jaLffSfmElqN//YSgEFEZIBjkFm2TweL/XCqZKPCLFTEhbJEq0jOdgrFSScRpKrEqpASOaLgELSM2s7xIXROM20XmWyVD2uEiUWO7rN9fTMl8/1zwd8QLlYjnT2bAM2vehtHc0X+RyVJ0jSg9P6B3slcN0rBCFMopGby7F/X0D3ayHL0NhWMNgr/dxlqysXl4iNa3vyUTzZEiKZQ+Z8AXSk/8qLtf6QjI8C+3fTTZ8PSsEiGckGFKGvkbevSElIJUAVCMk8nfSuzUYoSB5o3oQkzQDAcw8Bax+Vt0S3fqgA6s6Z5rR5B4vrqn+vKUbD7h67YPyf/wP87goRlGGytvNNsR68Khs8Kys8D1mP9sK/TjfAYgqAvJcJiWnj8pKFGpNru5hKMh+rHOzL33CcOzdJImHvnvy0X+mkUlKn2KkQeklYJCMQ9tobHwW2vMwhFUNSBUIyT7i6tkO0EfYBtvgI+Xx6NXD7L9I1JdP92PKKWax8YL/5d0ePuXxdvWmV3LM1PdEr2cbnItSFwq5NATbAA8OQsYlje9sN0U5kHREiRjLwCuMQHW7TysGsVCAs51mxhGSI41H0uLQY1j81CezfKS+x+dyeKONbEwCGhDfs3urwU8o6X6WiDAD9FJJhoJCMjNvFZH8oBagjGRT76BsbnpfP/n3Ajd8HfvFNuQn87KsiNgGgs8e6zPTZ8rnmHnHf5nJP0N23udaF3LsN2L4hZJxWEW9oKrs3Cl7HPXYdaZs4MZ52/eVh3wVtMkDERP7J8woN1y9RGyl/ytWqRazHrpRPy/FRYFivh1nAzharvmYJQCEZlUDJNj4nVtgTr2smUKNlVaqyPr27zWmP3ma96O1CsmuWfO7YAPxqlVgB3QhTi7F/n/e8gPc1PZJ2p+iliXJpL98MDziI54AdytsNx8+CPCbHfdtr2bMW9CYY5sDlYDEP2myxXNt+YjMWMZoL+VxnKauRGKi0zSsJi2SA63TnJjFKqHtz6NCtSjtwhYFCMipBT7ggQyQGJZEElp9ufh/sBWBYL3L7cHFdM63fZ8yxfn/qHvf1OY7IYZgXp14cfWAf/K/aOC7SgK7afN9Aphyye6O4tuO8cflZJEeG41+nbwdyXDZv9/V8WyR1IelmiQtxvRT7+RbL+gu0EbFeU4Z4efZsLT+RkUrFV4XAKKFzMQgZA0mJd9Ywyu+8ciCUklm1ahWOP/54tLa2oqenB2effTbWrVuXr76VJ/kq/6M44e3AmR+Tv/v3y3WiZ+/u2Wadv8MmJOcfav3+l2vd3dJ+2Y/6CDl9mpt3cgL466+AW68tYkmRkBdn6Is55LFztYDFeRMJEyMZ03ojNZNeqJg30LzHSLqtK8Q8QUbHyRWv8Iw4LKJjI873l7DNFcstnpqS5JORIY8XghJl22vA9vUxlrRSFOm6dbpMJ8e9y4uFvscUcNvUS8ruLWUvJkMJydWrV+Piiy/GI488gjvvvBMTExN429vehqEhhyyvSkcd+CwxaB8iMcYYScW0dvlUrmW3C6ljBtDUbJ1WUwuc80lzuMTRoWzxmZqS7XO6eRuZ/6yu7Y0vAlNp4fnCY5JZ/uLj8ndm2QJcLLqbPxRxW0s9ftMPe9Y+jnkf5d09GrDNvFg0cnFt51mkhS1IXghjflabhllvNi708zk1JXHYOzfBe2hVH0aHpaRW4Gs7ZotkuaKEbxyjNAXaDUXYV9s3SHmx8ZgSrHLahLDGBUOe3WMjkohWxoQa2eb222+3fL/uuuvQ09ODNWvW4A1veEOsHYuNvHnxNCFpuIgEP6LWr9OFpGFkl2s455MyT0Ojc4cOXw4sfB3w6+9KzNyercCcA+S38VG58Te1elsk1duUYmQQuOkq4A3vtorH/n3yMNn2GnDMmwNsnMMB27EB2LMdOPwk31kxsF8EdFhif2AETKixWzliP19dBFwsutkowg0wBkGaSjm/fI2PhYvRDUqgovBFskh6EnKd/fvEM9E9B2hqsd4/svZBiLaVgIx6bReFUhKgMb8kF01c20KCdEPMxChQ3+A8byhy8GSFXmUpnSO5kdMQiX19EiPX1dXlOs/Y2BjGxsy3hf7+PNUQcyVPB8tQN8kQWdp2wsZIKtQINakpcS8/cLP524HLRCRm1uHRn44ZItJ2axZJlTQzMuA8fN/UhNSBu/0XUm8LkDjMfTuBtY9I2QQ1HQCeulf+AcC0VuDgYwJvJgCxlv76u/L35Dhw7Gnajx7HNtTNLhHPzTGot1RfV5wuu0K7R3ds9E7WciWmfkR12e3e4uxutccXR2FiXPoVdr87nn9lFpemwlv27QTmtsTX57Cem7wZD3JdPiUvxE3TzCFvy5ISPBnj6lIJblo5EDnZJpVK4TOf+QxOPvlkHHHEEa7zrVq1Cu3t7Zl/8+fPj7rK0kIlmtjFYNQYyebW4MvV1JrDHj58qzm9vRs448M+C2vBvertfqfLUH1OIqd3t4g73er4lvfJ58gg8PJT7qveEaDWpP1C3rDW/Pue31jXGytxu7a9LJKl8HYfw3rtIjKneKSgLnL974gi3LVUVQz7RFn49XVMBRlppQgWyTDHK+7zNK+nvdb42EiOhfFjPAaDfeI9yhqsoAwouWQbn05MTkQc4SgHiyRHtgnPxRdfjOeeew433HCD53yXXXYZ+vr6Mv82bw4w9Fw5keWeto/b7PGw00Vo2Ldu5d7Wk1wOWiYj1/iSPvlnLZTP9c+5zOZwIaZSYq3U6Zxp9seLKAPa212QD90Svo0g+FrzbPgdL0+LpNdwY7ncpX22wVMM5Psm6PMgcu1anPvHZx25ND3YZ41FswhJN8tpmH0S83b37ZW4Q/cV+ovHyQnxYAR+YMccu9i/T4SiG5PjElazY0O0daRSEoMXFxUzdGMJKEm/LvTtkXCq2BvOdf7KJJKQvOSSS3DLLbfg3nvvxbx58zznbWhoQFtbm+VfWVAT0OtvF0f257GX1UQXI2GFZEtauO3R4hSdYhDt7Rowz/25B4mY3bNNRpywo4vgzS9Jhqdh2Cw6CXFZB4ldGh4E7vsd8Merg99U7YHi+vBqcY61nYl5TWZPC9uGNOTxW54skjs3A0N9tokB2y+gjoxFuMVJrv0YH5Xrxz5kpcItjtRXKMZkBZoYl+oJ+rkWx8gduzbLA3uf7d4R6FzKcacP98u6dzkYJiYnJMs61yE67S/Mg70SPx7IwlwGGKkQQwGWgEUy7D0qyr01Fx0Zun8h5y9hQglJwzBwySWX4KabbsI999yDxYsX56tf8ZHvQstJu5XNtr6Obo9l9d0f0SKp3CTHvRVodY9VtZI+gxuagNnpY/jsQ+mfHM7uXZuB3/4A+PnX5C1fZci1dQEf/obsg9mLspf74JetsUCDvcCT9wCvPQs89ldgoNe/q3aLw9hwsBtE6BtCeoFkDkJSJ2iMZJx3k9RU9kPdsqoYHgaGIS8ekbPjw67P53sp4BermUr5x8LmM751x4b0GMo+2bthV6EE8qj9mlQnv63/cR47/WV27/Zs0bhnawzrs13E/ftkvVFFeKlFsfTvD74tQazjed++fJVN0ymkRbIUb2bRCCUkL774YvzqV7/Cr3/9a7S2tmLHjh3YsWMHRkY83AvFJkvoxU3Cw0WdAOqbPBbVd3/Ik6rdJlCbW8ItrzjkaPl87iH3eTZptUJ1i2TPAtMSecrZ1mXO/yIwYy7wnk+Z0/Rkhof+DPz+CoeV2faD08Mv7tqUhv6gTyBzw8qpblzEGMnY7y1uDUZc0fioxMLmkt2c0zbm8+YbsW3Hl1VbW45WSb/1xWwFymkYU48OGCmpV+i7SIz7N6W1NTzgYg3O07kSOUGuxIRDOY9J7nRuRXnxt1+XUQ0QUZatIEIJyauvvhp9fX1405vehNmzZ2f+/eY3v8lX/3In30IykfAu4+M5so1eziDkeucfbP3ulqyTdRO2XXDzD5HPzevcb5D6DWd0yPyul1uw74OZC+Sza5aISiDbihUkS9bpZrdXZZnn4Nre+KK4dVTBXhUikIC5z3JKHnHY75k/vVzbMd+N8hln570yj98Mh2l+y5cZWZvmVpNV/Z1Hi2RkQjwkw5aByjWkw74/y7wOXyAmxs2hZENhE907NqbDlMII4jyF40Qmhj4M7Jc4Sot7nxbGKIQq/2OUxAkUkqi1Gu00t0oii31M6ZoaWUemGpBHOaDGZom9HIqhBNJsW1hBQ7PzfI5ox7FrtmzX6HBa2NktZClrluFgr+narmu0zltTJ4kF9n3ulYgzNmKOGe74u4NFcs92YMGh7tft/l3eb9trH5XyRYrl7wSWv8P8nkC6ba/z3eENIaCOLOxNuYBvzHlpvxD7qhTuaz5Cssg6Mpa2Yh26MIgIynF9cUdF5br5KmmoZ773PdOLvdvl088aaa/TWIkow0b/XqB9erQ2cjmngy471C/Hq727ZI9J5Y+1HZdFMpGQA1ljy4pO1nhnX+vfa2qBZrdkI9tJ5XfC1NQCF3xVBOX02eZINdkd915VTQ0wa5H8/dKT2a7km66WEWoU+3eZLrJ6W53J93xKLJBnf8I6vbnVvWamW+khhXoDf8eFwEFHyd+ZAHuXC1EXu06oupaKh2/Rkn8SwPi4v/vc93oO6NqOWyFkJVe5rcu+3nzcoNyEkNM2l4KYyyOR6kRGOE/GR+MZycSzL34U4GGXjzjpLEpNSabJKUQhKHaDQolZJP2up8DLRJjHbZ35CtnZt0Osp15DQRaZKhWSEW4QKZcDXVPrY/W0rauxWcSWcv0qspoP0Mfps4Hz/k0EZaOLRdJJWNhHwjnyFPn82x+z593wvHXaLT81hVi9zSI590Dgn74GLDrMOj2ZNIuoAyJ6u+fK37+7Anh6tXPf+/ZKPB4AzDkQWJQutP7CoyJo7dsRFCeLsLI0794MXPNF4GdfA4bCxmJqB3FyXNxHmYd6UNd2rgQdUSfm1YYhH57trFjTlJw7QeLZ3PoTmDgEh5NFUv87YOd2bpJ4wVzHV1ZJVVHI7A77CRfjSVessberBc9DFfE49u8D9nsk6U1OiMXUq6RTXN3KNaPfcZ25WCcDzFPC5aMqX0g61S6Mct9vbHJe1ssimbB9Vw+DaW3ZIqzWFmUQlwk7SDPHvEk+d260PoD83nydRr5xQ79w6xuBtk7z+z2/MV1V6oJKTQE3XSmfMxcArR3AgUsBJCT5Z/tr4eOFJsaAx263jhGuUO77J+6SN7+JMWtppSgM7HMvCaPI99t90ePs4lx3iDb27xIhFGq0Gpf2Uym5FtyOleM1Zpt399bssjGhYiRDEilmUFvf6FDIGLpwzeds4fTtWyY+pYQI0B83EaWfC+Oj/kIo5/tKHuKX+/bISGxubvX9O8UT5FTSyQ8D8hIVlKi1RR1XXKhlS9OtDVSDkLQLNiC8SJs+2z3OL1lj1nRMJsOPdDNjrpTIsQ+ZFYeQTGgZyF60dIr72TCsD95hzXI3fQ7wpvdal7OLXy8WHmr+vWMDMMM2wlHGKpm+uPbukL7UNQDv/azs12ntYvUEgFf+HnzdiodusQ4nqXPTlXKD1kfS0WsyqhEpvG7QEx5WoKD3DL8HgJGyFqH3bzBYH2K5RwXeSJf5/bY9RFeU1VlZtHNh9xZ5SIWpFuBkIe23Hzc/kR/yJSDWGMQY2vJKJMtZ5/iEAsWhI+OORwvSHzcRpW/v8ECMQsgFr2oSkeozasu4efd0cexoxPC4HqYmAgzXmocXi5xiJH1+Hx+LZ9jWAlD5QrKmNjsxJexTs7lVu6nYlk0mZbjCWYukHI4XTidd4zSgs8c7SScyiWA3w0QCmLlQ/t78EvC7H0o5IN0F/N5PS8Hzt19gThsN4YI4+V3m38eeZo6qo3jxCet3JeI6ZgCtmvXyoGVmP8OyV7soDzoKePuHgKWnmtNW/95a1mZQE5K7t4olU1kznW4C9gLGFlzuGmGtPg/+WdzuLzzu/Lvn4Y7BOul14wxaWzJCeJN/mwEbiBJbpR5qWcXeFU4WM4c2dYukYZiJD25dKHSyjZfFMOzY9fkmbmtpuJUXYZU5bG8sLxgh28h1nU7WxaxICV2chghhyRc5bbPDsjs3xpOYWwBCZW2XLbV1sLyixvWmmUyaI+DU1cunxWwfUMg5EZOODNSQYYilb/1zIqYAcR8fe5r8PfdAs7zQYSeKe+L5R4HDTgjel86ZwGd+JO3OPUhujG1dwNiouJK3r5eknsNOlPnVBWS31HbPkc8oBbGVCDz2NOD17xRr5/xDgGf+JtOffdB5fp2MGIjprhS2mcfukM/Vvwded7zDDF7JNnHg0d7osFgV1LXg2o8QfYrbNV+ILMsg86ambO5nv7aDrDtGd2bsp40R/Vg6lum0L283QZapa9t10ZBCUt8/Q/3yMh7mWeR1/rr+5rV9eQ6xKVb8YFzisdRO1ZBUh5AEgGTCNKnnIiT1RWfMC2lJDHPzjMsiGXDW498GPPAn67Q1d8unPdP8hLfLv7Aka6yJOBd+Qz6v/rxYfW69VoTIkSebLh67kOzskc8924B1a8RKNDIoboADj5SyQHZGh8WtrWIeDzvRjO9s7QLe+VHglp+Y83fNEpeCkwUqWcRMTv1hosIpANMdb09yAmxWLa8bV8DtCuJ6j9pGTloqF4ukx7yRr8OYLTgF1pE5kRnYpoBPRz/d6HYsB/YDtfVA07R89Swe3NzBQZgcl5di3bPjR5CBEsZHRcA1Bth3sZ8KDqEjhe9Ebm2WuXjUqR4hqd9E4qot6fTwjUlHxmKSDBojaRjAgiXAG/4BePQ2yaje+or5e1vEGlt+KGvurIXm6DkvPAbMOcDMDG/tsG6C3pe//Mza3lP3Apdelb2ee2+UdhVZ4tQ2Tvj8Q0RIbn0tuy0VAxuXdSpMO71ajN2uzfKvtRP4w3/JtPZueSFQcaRRMVKIbE2PvF8MxGOV812px6QY1++7H+yWY98Gg6w0wDxBl7e1Ndgr9fa656Y9PDGuypcg52GEGMmxEdOzoQZm0PFaPpWOVW5uDV7T0a8/TvG842NSZi1XV/7YKOAyZoUz9s46vJAq9/PsRSLGLbPbhXvMqsmwtVk0i6Trl6qi8mMkFXoSTE61JfWs7LAPWq8TzaP+ZC54tZP5Ld2vk98FXPyfwD9+1pqkZB9FJ27mae1vedkq+uz7paY2u3SSH+ttJYzsb9D24SbnHCCfrz0j/dFv4m71MD0JIyQ95rVbSP94NbD1VfN73x7grl97tB/AvWQYwLb1MvpFoP5GIB+WKtcmg7j6DZ82/NbhNG/YbfSzSAZoL+fd6mGxHhsRb0GgkBLbfSXTZpzH3UE4Zk3wWZ9vZrvL8iOD6Qzk3pAZxj79cSq3pOrsBi13lNnHtnXF+aiy45VkGKW9QI0Y1q9TxYqRzMU9HdLdX6LFyIGqEpK6APTY7PZus0C3b5sh+xDmRIsra9tTSLrsh0TCGuvpWuw8Jo55i+myBsw4QEDc3HbO/Zz7MbK/1adS1gLNBy7N3ie6aG5oAg5cZn5//hHrTTx2K10AF5Ji2LZtg73AtlcdZ3VsMMj5NzEub/eT496iy3V1QeKnCuhi8izQHrHNOObLGuM3T9bWuBdPpaSvXgX/A7UX9zkQ5X4ZsQ+G4Rw/HbaNUPMHEEr7dgLbN6SPkf3HHB9WnvGtQe4TebbWBbJI5uMF1qfN3t3i3SqFIu55pIqEpG6RTMowU9PnAE0t1uEFm1udkwWcGw3ZiVBKMmTbEdpI2ty0+sne2iWfTS3Rh+MKSn2jFDK3u0fe/3kzuSaDISWTjn2Lc1u/WmX9PjaCzH6/5PvAmf/ivJzK7H/je+QcePM/yve/3y+lJRSOxZZ98LImhLnBOGWF77WXh0hYRYqb68WtDp1T3VOXJpxxs3Tqf6e/6GWM7K4qxzYi3IyzhGQIF2HWs9Bl2bDdGh32H9Ep7Eomxq3Dtzpd03GsTw1oEKn/+XyY+lkobeuenHC4dnyWiUrmWOTShtNY7bYGh/rkXjXskOkb+lHl5UUJMW+gBiLgVF6r6DiI74H9kuzkNEBAvt+tC0gVCUmbRbKhCWhuEaEyrc19uaBtRvndc9kQ806f7b7+MBZJ/eI8/QJg4euA930uREdyIJEEzrnEOq1tOhwtdgaAdltco2Jgv/WiVaPf1DWIYHXbH2d8GDjrIhGoCUjiSiIp2eSP3G5dv/4ZBKcb7UtPAffcKCWGgrqt7BbJ+kZTjC1YYnasf6++csc/M8uNj0p8qnqjt4Q12fo1POBf1yzIflHzWOoyGjk+aN0skgHnC8Kky0M6KGrVetmfzG9+Itqn7Z2bAljJ4vDdp4LV0zQMwPAowB4Gp0vWN8cxYd2n9nUHqcUatL+BB0bI4dwLnWxjnz+PFkmnc9f+oup1LKamIo5SFtaqWwSLpOd8laMkq0hI2iySlt8c60sEaNPvDhfCrZZ1Y/S48O3B7p4xnwFiJPfvEreI3Z19zielbE+hUMMmKppa0ochSw2IVdINXUipm3xTiznNSXi3TU+7vdPnRmcPcNLp8veT9zisJMSFbxdkRkpiGZ++D/jjVdbEJq92lUVy2Rvkc3zUFHYrzpdySoAMLenYnkPbt/1C6oaqDH3L7LZ+OwmgLIK40qO6l6PcbD1elqK062jF8rquHW6xjm64HEVelkUmwhtPkF0d9MG5b4d1X9ktzkHaGeqXF7lAw9mFFEpxiopII2AVwYoX1+pczw3tb8/YUVsDuzanR35yiVlNTaVfTPQmSlB4hYl3rzCqSEh6xEhGthzm0SIZRAD6rcfPIqkEdWrKo9hyAbG70L36XlMLvOfTzvGbG16Qt9zVvweeSotAvbyHLiodSa/3iJPl740vaG5DQ4LidTciIKJu/07n5gxDfr/pKqlVuWuL+Qa+O/0Q8nLNpFLAK0+bRcg7e6wvQ4mkZG+3pMt79Npdxh68+nf5fPgvkmCjW9xyKTmSRUgRYV9ssM+nOG9Ai6TT8GxG1h/ebepMTniPDew0RKsTcRgLneYPtVwAJRno2BnOVsugfUml5J60b4fsX93DkFl/jsk2Wa7RHF8wAi0b5CXLrbmQ/fMb+cd/hR7f3fZdQPVp75vXiDTjo5JQ6JSIVAo6LUwIQAVTReV/XMbDlgkhGtLOjrCubc+bcJis7RBC0pMSzAJr77a5nQy4WnYXLJF/j9wq7tnaOqmpeN9vgXVPiDVD0aTVvgiUyQ4pPbRgCbDpReDWnwNds8VKa0/oefJeWScA/OOlwN5twCHHmuLVSAHPPCAF39c/B7zxHHPZkQH5fcsr2dnjihceA+74H/N7c6u49pVw7egWwaJqS2btvwBMTYrI0oVWlLijoEIjzIMxNeUu0n0WzTrWbuP8uuEXxK+fY04kawAEyGp1ejjnVD4lwlMs0KELMFOuluhtrwU493xct751JW3tT4yJ+NVjwqMKgVTKloyUvofFLSwcbo3xtW0/H21fwhaXj7rtKlxjdMhmaDAiNOpn1Q/QXu9uuVd2zUpfn0H74DBf3IMtFJHqtEhmubYd5m9MW61qbFrb72SzeLYLZJF0KpEDyE3RUzRF6lh+OfNjUufxrefL9yDX10nvkJJFev1E+wO+2c8K6cLBR8nntteA5x4E1j5i/d1ImSISAG78PnD3DcDd12vzGNaxY3WXXyplxj727TG3V5+/d5d1ne3dwAcuM793zZJPVR+zb7fZhlf8pf6WPzKYLZCjuI/0DHlLW7Z2HZMHXNoMneHq8bLnlW3sZITZ5iMUvUgkJGRE1UgNyoN/Bv77S7KsW9/8CGQlylrIYXm3dgO24znNbXEvERn1gZsWP7u2iBiwb8fOTRJfHaqcjwv2lx7HLudwTvvicGMP+yyKkmwTtDpFLCXECrSMnYH98sKhXkjdBnwIu8Ly1pFVJCTtrkALDrUhO2aIC9GrZmGsFkn3Lvm2a/8+c4EkEdkLb4daSZGYMQ/42Le1sj9+b3Iah7/evV0ltoKg78+Dj7aeL/pY3IB7oP1LT5p/p1LmSDpAdsarXfRufAG48nPA43fKd3sgescMqSxw3App9+QzZXpGSO6Rm53TeLU6qpC54tHbrd+jWCSH+rxdvUDamJBHV6Kl37ZrPWhik2ozp2zQBPDfl0kM6t7tCLx9j94m+/GPenH9iPsmsmvbBRWv5tlMABGq/z01KSEeTgW53bqYZZB0erm2zTQyKMOxDuzPQRAHwDUZKQfXdmhxnqMyybLE+7i2hwe8rfc56P94G4y0svTPtnN2eMCnIkfwpgNRwmKzeoRk2BjJZFIezHaLZJiC1PkqIJolJG2/19T6WyOd2ikHUlPusXIt7cD7v+D8m9PIFU7Y90lzK3Da+8zvvbZsT7e+6C8udgvc7i3WeW++xmpBuu06+f63m2SS3cqnYjzf8A9S0kiVSGrtMPsYJDHGnuVrt8aEEl0ajkLSdlcNVYbH5Q462Af8+rsSd6rPolsds/K0nIrL5+kOrVt8AyUq2bAU/9b6ONArxyo1JQ8z+8uNPr9X/JnLIvK3xz4JIvjC0LtbXpac4uCyiGJpTc+un88FTdYwLB8OXwKIc59pvkMahrzX26/9rPa0CZMTAc7vuC2Sdvd6oIVy+tmyvj3bZJstFTIg03ZtSc/rpyRj7n8RqR4hqWMPgA8jqHxrTHpkbYc6ERLW8ZRd1+H0PUemB7TetXW5x/XlkymPmLNZC4HFR2RPn5fD6DxLTwHecq783WdzM6skJb2oOSA34v3peY1UtrhqaAIOOcb8PqiJAT254LVns8v+6OjnrrJI+pXoUagRflZ+SD63vCxJPYp8PmydRKqbC9CtH2sfBXZsAO78X9OdD3gn5aj1zl4EzJwfpAPR2fSi+bdfLGWmC7Y+PHQL8OMvWktQ9e6S82lgvzy4nEabUc3oLwdxWyddF3XyInjMPxVieDu3dpxerr1qDeYyJnxkYrK4R2kv7CMi6/q0WVP1VXvdjx0pkiDyPXd8laTPz2nL7NhwsEoDXi8WZUb1CEn9ZqW7GQGEusrsgsGLXF3bnTPNki5e7cZtWWwMGE9YiHO/cVr4m6CTG9tpPzqSgOMK1fJbX7MKPZXlPHtR9rnxp6vl0zCAcZuQXPYGU8ABwE7tga+Xwfjj1cDmdIzd/EOssZF2OtKjA/Xt8a7jBqTFbdrSOU/LfL/5v81knVzcumMjZvtA9rniJCTHR4G7f2MVYI4LpxnVBPbax8x29cLtFndqymwrWSOjgFxzGfD0/c7t5yokdmww/15zt5R58mtTj40FJJFsuN8ce17HK6s+UuhAXFYjh2V3brSeTxMuIyf5WjsD9svJghbUIjk5kV0OLQiTE94i2jPuMIK1TLe8W+KCnRYIGyPpJe5DWlPt80R9ocm38LJsh8P+8t1O+73Gb9nyFo861SMkWzslqzeTbaURRohNaxch6ht/6NBu0GxhmSAfzeli6XqtyJyyzjVcizgHbS+PF0JNjRyr1s7wy+rDLc6YB3zwy+KirqmVTy9cdCRmL5bj3r9XMsIVg2khOa0deOv7rSJ2305xu06OA2OaQDjlLOD17xLr9uHLZdrN10jmNuBeT+3ks2REJjeaW0V4G4YZ8J9KAdd/D/jNf1pfpsbHzOM/rR048e3mb1tels9cxMSuzbbt8LAOKZ59EPj7auB3V1gXc+uHXoLpwT/J9k2Muc+vC69EUmIQh/qyxyf3Y2Is2JBs9ljYNffC95pxi7nt35u9XV6X6f6d1mQdwN8dGNS17YfborpI7N1lhnjo2xHIve2wjqx7lpOgCygk9+2Q88Kz3JSNkSGxOgetKRk6McPh9z1btfPQx7Udp0Uyq2suv7nu46jnVlQxmsb+khZ0XZlJIcS+77Xmc7yC9qlEqB4hWVcvYsBpFJswF1kyKe5TXay4oW5uXbNECHa5jEDjRV09MOcAa9KPX4xkEDpn5m5x8XrI50pNnRyrKNbW150gyTpnfwL44JekeHmyRo5/mKQbnaYW4LAT5G/9Ab3lJflsnwEsOQ644KvW5e78XykPpFzbbz0fOGGlGUOpssIB4KE/u4tIwFoL04lEwoyXVA+0wV55wG17DXjlKXNeFXdZUycvWCefCRz1pvSy6Xin1JRYJwOP3BECR4ukZrUddrEqKvbvsiY07d4KPH6HtyVA/abqq4YRCjo7NmaLNDsT49nzjA75Xy96iIPO5ET48Z3ttWEHe7Pjcy3EdS27CQvb90y4R4hr3Csb3c8DFNS1HUpwpMmUqRmGa0hTTjUHXWZQ9wvLz077MxHuXh0mRtJtX+ohNp7ZzVGw+9cD4FpCLL2//EbXCWOR9LLa7t8p3grLPg5i1fWfpVhUj5D0JM9JMdPaRAw2hHCL69TU2m6SMVgkW9rhemaGskgW4uwOuX119SLYDrDFSvoVaPdblxqWUVnCRgal+DkAHPMWcx0f/nfrck/dZz40VS20jnRbiw4359u3wzvjubHZ/TeFim9VIkYvMP7ac8DTq6XAr3I76212p1909qatQgP7ZVv3bA1383cafs6+vLKkTE0Bt14rGep6Itsjt2oubm3ZVAr466+AX347ex1bX/Xup7pxq/XYrYphTmU/F6wSFjW1Znzuzk3Z/duxUSyxSrzc+P/c2/zVd0J00IWxEQ8Lkv53HiySeb1X2K7trGQHxFxg3756PZnTbSav9UdwbVum20VMjtsaJgPbLSnPLXu9f59/ZQc/4nz0JCCC3O/lMEy1Ai+L82CfvBjGnbRWRCgkgXhjDBMuX8Kuw54t7tVW1P7n/GZYwq9IUUlk/stGudmV1ah/HwBDXMN6glKHLQFp26vyDzDjKNVnMgl8+HL5e2zUvQ5jfaOZHOOFipNc/7y4s5+4y/zthceAe34j09V6dFe/Gjpy+wbpryUJIuZjrR4+654AXnxcMtT1mMqn0y7usWHrqre8DDz3kJmNXNcArHh/ut/rvW/gKu5LWYN1669vpnlIlJBsnw6866Py98A+YPNLsu0P3QK8/DTwl5+K1fqWn1otjlnXP4CXnwo3apErHqokUv3JgO3nHEoDb5ehl7fYrXZpbATYBi+hEcra5de2f1d88XRtG9avnqLT4XxKTcVQrzPG+1EqJR6brFWEDD+wH1+nY6LvV8+hjR1XEHL+wkEhCSBWi6R+rMOUCrJjj8HMh9G0UK7tIJY0O0UrTZRw39et6YSb/rSQVA/+jhnIWuisi4BjT5MSPTrquFpG0OmSc2VqQosRS1iTwjp7gu0TZencvUUsdLr7V0eJYcsY5Gm3+NgwcMN/WscYz/ke5uJm1B8oTsN09u+znmN299N7P21u874d2ryJ7NXqFkmVYamvR83cvy+dDOKzSV4MpreltUteANSLQ+9u4Jm/icX1z/9tjou+Ya0UIVd84DJZrqFJwiXmHCj7zJJ0E7GDbpa58VFJCBoZNPfj84+IxTQOCl5yx8O17blohH56eYwyBbvDN2u2EeYHw2GyhwB3bDaMaztXt2yEe72BPL8YOK0zhNh3E6F6hrvb/Xz/LucawKWrIykkATi4RHJBWz6qGOqekz36TilaJIOutr7Jf55yIGOR7BVRogtJ+744cKkMhXjMW2D5UYkenZoaMytcZfp2dANn/Ys5T+fMYH10at+JB26WTxVTCWQL/s0vaV88zpW/3w+88nfn3/r2prNZbdOVFUOPW3JyLfXtsa5bF5s1dSJ+O2eY68q48lMizvQbt3rwJJPp4tSaFVKV0Nm7HfjpV2zFwCOgzo3WTrk+1XHp3Q08/7D/8tNni4D88OXy95J0qainV+fWL8D7AWwY8jJjGCKm7/gfsZja66dGIUiCki++Pl7za1AXbBwEugdrHdq12Xr+xWmRdJrfQPYIWU5MTgAvPSXnQP8+7eVMa08V43Zb/t7fShuWfnn0P/Djy2YV9RtwIVdSU9ZzNrRr20FYTrokIGZeNgy5d0SJ0y0iFJKA7USOwUqXabcAVrWc1hFgW2vqJLlIp71bprdNDyZGy6rwuVvaNtJu4IQ8iEcGTatT+3T3ZZJJq0CrrdPWo6GEhhKS9Y1Ai5axPmNesO53zAiW6T7YKxbPlR+0Ttddqnu2ygNjymM0k307ZEjIm68B7vsd8Itvitt2/05xkf/sq1LCyC34XH8gOdW/7NtjXbdKkGmfAbz7ExIP29IJdM+Vm/6atBX10Ttk5J57teErlSUukcwOvN+Vfii9+Lh8Khd0WLa8LMJaWXzVsVDH908/Dv4AnNZmWowPWCqfm14Ua/HUZPjkG0WgF0ibxXbH+mjreuRW4PG/RlvWCSPzn9uP2t+2+WIRsi4EiZHU93tqSrwGe7al7yNhhaLHdLd5gySX3f4L4JafANd+Q9y9Q/3ZYR9Tky61Sw1g9R/Ean7LT4Df/wixmtH0prySEuNi22vi1cms3+8Y6X+nnDfd8mKrTd+bFuyeNTlL1yRJIQnAau0LG7dgI1/uG4vnxCvxJgRB+2qvu9nWBcxZ7BzHFRtFEp9eq62pMTOnh/u1jO1u7+UWLPFfj4qr1IVk10wphH7qu4Gj3ujfd0COyUX/Afzz/7FOP/Xs7HmXniIvAzrnfBJYeJj8PdgL/PgL4oJ1Yv3zwJ1a6Zwn7xGL3iO3AtdeDlz/XZmeVRcSpkgb1R5STnGKfXudheQRJ5n7NZEwh9R8OW0JefBms08ZDHP+/TbrzI50qR79nFZJR244XT83/j8R1k/dJ9+758rnYSch8Dl9wsrsae3TpfYoIPGrV3w6esJCoGLcsArJrQ4xZH7075OXir/90bsmYygXpZPb1nk2Vyt4PvC8J7tY5QxDXkj378xdI+Rq0QTEI6CHwvz862lBlAr2rNi0Tsp3KTa+YIYBeS7vcV3EsV+2vAL85efA2keiPZ8Dxw17uLbVd9dapobUvw1SyLwEoZAE5CbQM1+sPvZRb0ITg2vbrXxD0FmDEuiaiuEGVVYWSR9U+aiXnzbfVg84Ap4HYsX7gSNeD5z3eW2ibX6VEa4EjsruPuqNwPFvDTCikkZDk1jCdFHUs8BM6lGc+u7sZecdDJxzCTBXK1L+2rPON+CbrpSYuiDYLQiqvTGX5CLF9tdgOceU61rVV1UWXiUqN62zFowHzEB6fRPsbvTNL0kJoYduMafZxx63Y98nTsJucTor/4AjgPP+FTj748A7LoTl+OtivqkFOOkdzut78z+a54VhmNbTsNgtrRPjIrh1S+nUpDXjf9urCMTUlHms9eHjvEZnChW7F3S6g2sxDte2YUjS2i++KVUQMgQQQ56bmatF0ratYfbp6LC4ou0vfBNj8jIRtC173VTA+SUyDoK+fGx+Cbjx+5LUd/v/RLNkqvMmlGtbG/xAJjgtkL0e+73LY/ZSgkJS0dAULSnETiECysNaJGcvcomxi+PG6jI9SMF2L9yy3/NNwiPZBjAFzCO3yufcg4CDjvJeprEZeNsH5Di4Ya9LGmYEJTuGIduhtzF9trhXj3urfD/i9cDCQ937veI8a/ykXfC53fA++QNgybHZ01UYQKaPaSuHn1Vt3ZPWwuPKIjmtXT5VQlvXLBFhk+PZcYR3/FKtNL1MIi1QARy4TL5vfEHc8zr3/0HiLBU7NlqzO+3JQXZ3edt064hKsxcDBxwJHHq89YV14evMv3vmaeEPNpJJ62hId9+QXeNz7aPAY3d434fsD+E7/kfCEm75ifnbcL+44RV7tkpW/F//N/tYKnZvAX7yZbFGb1pnLWXkFlMHhLxnOpkaveZNE4c1cnQYuPXnwG3XieX96fvM3/R7suu6vI6JgwVrw1qJlUtNudc4DJrI4/X7X38lx/6e38j3ZW8w798DveZ8uzYDV3/eWglCR50Xx78NOPwk+VuNyhVZBYVw6Tth9yqocm2huhBFSNqPp9M89uXh87JTukqSQjJu4hCSvropZFmh2nrncbtb05YQv9FePHHZXkubOQrBghs0PVZoFw+ZQvEhO2mfff4h1hCCXBKU1Dmot6EsqSefCXzoK1Jn07EjaabPlvka0i9X+2yuYH1MbkXXLLOGZ6b9NPbakkYqPQpN+sb5+nfJKD/vvjh7Pn2c6oyQVCM+pW9hiYTpRr7/D/KpHoaqOHvGvTRlZiIf9UZJjLL3UZ2/d/xPOgHhSeDX/wHc8H+BH35KHr524Wi3ch57Glz3r24ZOe40sVK2dDhbiXUOWgaceLr5/e/a8I6TExLj9sCfrKMv2bHfo157Vj779gK3/UJCFja/nL3M9d8DnntQBKwTT9wlArR/L/C7H1p/G4lLSLo2YjMA2b7HISTv/F9g3Rrz+8S47Ku924Pdh8Ns5/MPS4zvn39qDhDQtxf4yVfk+JqN2j5dV+48eaA3+1pu6zKvHb1A/u9/JK54dX3ZUffGae2m4UJ5WKJYYz1LeQXcl/YXmNuv87b6ORHUImnpnz0cwOE4Tdrd2GFekkoLCsm4UVbNXEr/OJEIKR6D0NIuiTRRR3sBgp33kbpbxPI/WZOSppvYvq86erLnj7Ke+kbg0OPM71GL1wPIHJQ3vltc1W893zxnamrE0hj0HFIWtf1aIszwgBQRt3POJ+WzvtGMWVT07RWLzu9/JBbC8THTGpmskSEaV37QdAXr7E2ve2zEfDAoIalfZ6qguuJN75FPe2LA+hdEkE5rB+YfbApQRV2DhAFMa5P5rvi01c09NSm1LF9cY11OWS8XLJGM/eVnZG8LYK0J2jNfHrqHHg987NveQ2AqXv9O80GvJ8Hs0katefBm95qkukVycsL6YH3xcQlZeOjP7ut/9e/mg1Jf1h53qrP5JVnmTz+WcyfLDRgQu0AENKu2h/DINTljYtyMv1VsfUX21a9WOS+TERMBXM367y8+Li8qALBxLbA7XSLr7hukFuljd5jWYj3b19KWz/q2vSahDNddnv1bqyYk77rBFFJ+BbTVS15Lu7m8Km/l1J9cyiIFPWdUnxYeKp+vPmPuWycG9gG/+b71hSGKRdJeYstpW53ihgtaIis+KCTjprZOXFhzDgi/bF1D2iUZwhqVi6hMJMx1RsbhxJ8xD7bsoBzaLwHmHWS6tN/0XquYbO2QzzjE/ZGnmG2pLN0oqENy0FHAP342W9SFoT1ttd6tjSHs5No66R3Z2eJnXWT+3bdHHsQbXxBry5q7TQteQ5N1/9nDIvZsk9jFKz+XvqkngKa0xVAv6jtdE4QHHGm6jIcH0mIjvWPWPSafS0+R5e1hBe/9jIg9PRzEaWjB57T6ihPjEjcLyJjox57m7qLuniOxkgcdZRYrD0MiAbwzvdz2DWKxHdgH3PA9c56+veKGdUJ3n7llfuv1PZ0qBuzdIfUwr/iMWOUA06KrkrUSCdNivOZuSXp49RkRSrplPxfX9nMPyXnx3ENmv/90TXqUI20xN1EdlN/8p/l3rS1eeWoy2yW5Zxvwo0uBh/8SUChpM9ljc5VFUoVjAOZ+dNp3g71myMPwgFxzev+ee1gs6//7HWcx09ZlxuIO90t8rL0cjdN6MxbJNvMa7t+TtXnZbQQ8/hYLc1CLZFpIHnyM6T169Rn5HBmS81I/Fx+5XV4Q/vIzbV0u45krxkezkwLHhl3Oa49+T47LaGJlCIVkPqitc6gDGYCZC4C5B7ov29gsNzFL/FwYARNk3hwEUVOLiGh7rKmfyJo+OzszvGgGSduK1UNUTW7tkPp+Cw+T4+RkQfNfifM+mb0I+IdPAuf+q2TFRybGt9qZC+VzYzpofnzUdKce/SbTgjbvIPuS4jJWQ0fu2mxmpAOSQfn7K+Rvu6Xj3EvFmnfKWfL9sdvNmFRAMudVjGFS24+vOy4dO5kQq11Dk3mt9O0xd4uy3CmXtm5Vbm41R/hRBeh1kjXiggfkgaMsXTs3SumOae1muSy38z6RkBfNMz8mGf9RmDEXQEL23fXfE5enHT2+U0d/wOm1UM+6KLuA/vs/b1pzAHN/bnk57eI2xCr32x+ax/H0C+T4nfd5YLb2Qr32UfNvNQpR2MSQvr3WMjbKuvTnn8jnU/eJxfQ3/ylu28duFzd/LsPRTYxZhfVH/j17nlFb+y88JsLgkVuDJazov6v4X8XwgFh+7WLu1mu1sA9bPKg6rr/9gXgBXtRCHdwGKVB0zwEOPsr8vuXl7NJBd/yPVZz37jZfJFo6zBfsXg+LpK+l1vab7goOa5Gc1m56KNSgD3f9Glj9e6t3Rd/HKoNanaNOJY8AGZXq518DNjxvTpuatLYVxPrqFntsb6MEyWf9FhKWRAKeCkq54PS3yLA60u9cDCvg9JNbT85wa6epRdzEuiWkuVWmWYbNKhErZn1D9rREAjj7IrlRZNyiMfQ3kQAOXpZ7CYgwNxw/ka8s6xvTomTvDjn/mtvEOptKSazg9NnOyysr5cA+q+tzzzbn+evqRVwtP0OSbJ68JzvOSY+/1V3brV3Av6ySthcfIets7ZQYtp2bzMxu9cBTfe6eI8vW1IiYUhnyp54FvKCJnw99RazRiYQ8jCfGZJu6Zkp5EUDWkdmnbvs2hnOlrl62bWCfdXpzq0xXGdjKmjI+Zr7gWaxTaUteS4cprJe9QSw1sxYCsxZZRcjRbwYevc1MzFCopIqmFumDcusft8LM7Nddw3/+ibxkfOCycOfr+Kh3sea9282/b/ovc91LjnOePwgqtAIwt89O/36zNBhgtZS//BRw7Fu812G3ZgFyT1TZ8727nffT6t9L3KyeFKO3qfbH2kckjAPIju1VdM8FTjlTXhYOOFKO3RN3yT7UR8AC5KWgcZqIs5EhKRUEyPXRNt0UfeMjYmVzus8YhvelMDzo/gLgd85MTsj+00NhlHdFjVylzkd94AW9nz/9itz/zvoXMZC4oV7Y/ng18CktNtgvRrKCoEWynEgkHMRm3BZJOzGc+PrFmUxmuxLLAtu+q6m13lxDHwanBRLxvHXG+eaq3EH9++SBod7K1ZCN9phLe4yfEpIb1prxUnbR+c5/1r5o+6WtS4SGHd2KZy8E3dCcHhkqPV2JxzV3y+fkhOk+6kq7ruvqgU98D/inr1v71tJhWlRVv5NJWadKXvuf/yMxcso9dsTrHTfFQlzvSE6C6txLxYqoVjI0IBaTqz8vYgIwrTlTU8AL6RJC+ohIdfXASacDi9Iuan1AAq/KA4B4VHQOXAq8/UPO/X3yHsmEjzrUnX6eqxcK/QVVL001PgrAkILZzzwQYh0pSbJSvOdTcvzfeI5kJqvwBz0pBbAmF22zudmNlMQo6oJe/1uJHyVehgecR30CJExk5ybnrG692oEKs5ic0OIWNVq7gA99WcItFPPT1866J51drk/ek7aGa0N7nvh2M2RK7ZvVv3e+JxmGd1yt0wAF+rJOrP69lGa68l9F2GWGKu2Q+3VTCwDD+sKho58/I4PiBn/wFvfHoB4jPDlh2x7D4U8KSVKKhInN85q1a5bcjO2JB37nfWjRErC/eS12HoaE5SP7Zz/rk3ezWYTdn21dDoldMd6s6htNMfjobcBtaTeQ21CMKrZKoUbnGRsxLS1LTzF/P+pNwCHHmN/t+6WlQ8TkiacDZ39CainqJXAsaCZ3tU9UKaLXnpN9qyx49Y1WF2JtrXNIyWEnyuecA6zXml5nU1lXO3us2xamHmwUZi3IntbRI9uuhO5QnwxfaaSkjh5gipahXlPEvfE97us55FiphXrBV61eByecYntneCQQ/fYH3u15oT/0a2rlQa6Eg92CNtQHbFwHrLlLXJpBM3d1d+NxK8xQl2NPk/NQuXDtQku3ot/3O/l9fFS29/99UmIUH9ZqluoJHWpZdS8eHjArDxx2EvCJ/wtcepVYDQ1DxJMTD//F/FtZLDe9iKz7w1vOBT7wRflbP8cXHCrW16E+cdUDsp8PPd6cZ/t6c1+esBJ43Qnmb8oq7TpWu5FdBSMoTi8fhiEvjHu3S5jJ+udkvppaM2ZTveg+cadze04xw2rIUCf0WquA1brpZJGMcmve/JLsw7497i8URYZCshxJuH4Js6CVaW1iTYijlmaY9WawXWHFEpKuQs5nG0LrA5cFoghze7HyUG0E6Liy3OmjxAQd09ueSY0EcOgJZsLC0W/y70/PfODkd0lR76PfZLoQ7db5kUEzZlEJSWUVGdgnrmh1I+6eYxXgbrusZz7wT18D/uES6/Tj35o976zF1v44veS5WqMj8NbzxSr2/s+LGF/xfnOdKqvdbtUxDNlHA/vMuMKOGd7XfTIpAnn6bOe4UcXydzocT2SfK0tPkVqDgMTf6WWZbr3WWRj17zPjdBW6UJocTxdNN+RlRllTFYN91hhdJf737fSucam7jJe/M/t39aK03xY/Zy/A/rebxCq/+SVkTrZHbzettGofjA6Z57AS7S8+bsYIz1poHqtjT5PPx+903ga9rM/ebSKW1BCiy94g7v7uuXIOKeGtn7M1NVJnFTDdt8veIKWq7C+M09qBpada66YuOkza27XZah1VRPGcbH5JYrSdyjm5JVQla8xQg1mL5NMeJzrQKxZ69UJw4deBMz4sf2fGG3dg0CYk9SL8jnUlI2zzb38gpae2vhJdeOcZCsmyJOH4py9+Ra4DWTdt86h4Ia+RVyyeeJd11NgyXC0Zr3mIl+yeaz5svQi86jgEfcjEA8X02dZtidt74lQeyu7CdKO+MT2SS5rGZhGC514KnPdvuYU5dM/J3pXKOqLOs8Zp5nm/f5c5FOK8g63LuRV8BmT77ddO50yrVRKQh5vvuW6b5pWQ40fbdLGKzVok4QG6NVQJHBUDqbj685KAcvcNZhxl47Tgno1EQgTs3IMkGWrFeTL99e8Clr/DtNDp6PeG+kaxgL3+nQAS5ugpm14UN+mLj4tVSbcYqoSi318h1lUA2LY+e9v+9kf5XHR4OhlJY6jfXBaQ5KiBXuCX3xJXqBJ0u7dY406Vy3rOgc73OCX29FFdDMMqWgERKE7xfqoCgpGS/XDX9fJ9WrvzdaeS3wA5h2cvFhFtF0b2WNK+PRLrql4sDl8uQulDX7YlOtrOg0Wvs35X9xk9jvvCb4jwOnCpdZSmxmYZUQuwFvJXRLlP3XSVnLvqpfbVZ6Siw9iIewWCiTFktmvxEc7z/PQrcl1MTYpI7ugxr+/e3e5jYA/Z1jmwXxK+br3W2eodZptHBmXccoU+8lSJQSFZ9oQQMF0zxcSv34xypblVrDbqhuGIRx/VA6y2zupWdyudoqxjudI0TW7U9pI1WVe6nyUyosh1WsxAeCGZgOwrlQSSaSiHfthRWcqK150QfOxvwBp32Jy2fMxc4B3A7sW0NnmINk6D6waofZFImDGVe7ebD/gFhzouFgp7ke3uue79sffLnOA2Y8ROpVlwiHxusRUWHx2SB7E+vF/f3nCrO/JkeRE4YaWUrPrnb0psHADXigRHvF7O0/d8WqxDNbWm6Ny5Uayj+oNSF10vP2VaYm6+RiyWytU79yCzvJVafuHrrNnigNQF1MXe3u3Azg0iHEYGgR9/UbK7f/lt4Pf/Zc6nxImTQAZkJCK1DYpn/mZazA5Lj/AyPKBZ5RISogFIaR5Afrv+e2YCyPFvBeYeYI05nrXIXB8g+1m1v2Et8Phfpb7k9vXmC1N9o+lifvQ2cz+6eRQSNg+HPSxBhYPoFsHOHlmPKiWnJyOpa3//ThF7T94rYlnVFQ3DUL+ZxKPc5XddL9baW37inkS05Djz/J6z2Ho/0y2oysLdMUO2Y1q7bJeRMo/TTVfJOdK/V15M7JbWFx4D7r1RXoj0LO4oFsm7rgfu/a35/W9/tCWklg6lEohGohJGyNTU5p7okvUcTGS7OdS63JZxa7Bpmjz0U1PZ5YAU09rjjRNp7xZBouoEunq2IwrKlg6XN2VXJem9niDtxF0mome+1Du89VrghLdLIkZdvWRBOgXu29HL69iH9ItCwkWsWObR3pG7ZsrxXf17M1lIJeGEWzEsx+eYt8jNXhWTP8Xm+gxybSYSzofcbbpnW0m5FkeHJJNXfwh5cfBRiCxcEwmrFSqRkH4YNtfjivdLgop+r2jvFgvOY3dkD5W5a7P5kmd/WK+5G1iWfpGZd7AIRz0Or326WMzP+aQkE619REoCqf4ZhtS01IfSnBw3M+/37RDxkEiark434aViJvfvErE4MmiO/NM9V2Jr1z4iLx3KZf2Gs81yWdvXA7/4P8BBS60jK805UNZ/5r9IRvyS45xfrg88Erj7enlhUCMUqTqJgNx/Dj1eRJJ+32xqcRZyaoQoZUFs7YDlvFdC8shTgGcfkPNMXxaQl9qxEdle9eK/f7eMdqTE/jN/E2uucqlPjMv8ax8RQXjY8uzELr3Sw8B+sfgpYbzxxezQh4WHAUuOSQ9hq53fujHiwKViQdRR1QsSCYkPfu5B4IGbJaN9ffoF7KdftS6z6LDsclt7t5uu9EyMZIiL2l4Af2pSRjs66k3WIVZLAArJciTh8ncpUVMrAiSRBFLaqBL2h6u9//obYiFIJGzxYTGLMKcEDlfPdq7rVjf8mGMkARGQi4/QHma2+EQv6urNB7jviE8hwyuCuIZPPF0sCuohVpMeNCDstWPTkTjyZBHJsxfJi09Ds3/8chARbO9/4P4l5GVsdCg7lnHGPBHTJ6wU0aGEwvIzpJh8XKgi5PYYtmQy+4Vz+hxrX448Waw8I4NiWfvE/5VrUyW8dM8xxYSK/+vqEVE2a6FphVMCdOHr5FirbPW2LhG0f/gv06LpxvCgWL5VncZOF09IU4sIrz1bxdWuFxN/y7lmgtnQADA6YvZPb2/vtuwxodULf1tXtkdAR1VMcCuJtH+ns/EgmQCcRo1MJKxGgJpaa5kp5dp+4z+IxVRPrtK9ALV1InzUdm5/Lds1+8ergXM/J6LzusutVui//00SinT0TOuxEXMAADsLDgUOOVrErtN1pF8bJ58ppbGef1i+zzkAOE6Lfz7mzcDzD8k55DTMsOLQ4x2EpBafrCzf9nvz8IAIenvtULfKAsMDIjD1UdBKALq2y5JyUJJIF4RugHcfgzxU4+pQBHwf6DlYcvJFPgrXGobVIhK2/x/8soi3t33Ae75AOlKbyW2ceH2e7jliDVPMXpTelhyPQSIplk1lPTcAf5EbcJ2RQya0W7oqu3P6hVL39O0fkqQl/aG1/Ix4E+wSyQAvC2kOsmV4H3y0Ndt67SNS+FoJwePealrAlCWqMx1HqFtF9b/nHCDu1eZW4L2fDR5Occ0XJX5RiVy9QLcdVbD9gT9Za/zOnG+OwLRzYzoZCCKg3UJ3FE5eHidq6rKtpa9/pxnfd/Sb5VM//1d+CKFCKlq0+Gt17tQ3iltdP3eczlkV56lcwzrb18ux3fSic/yofVhL+zyqgoSd0/9JEn/0/uj3xEOOlt9Pv1C2Y+UHJcbzxLdLUp1+bLrnAIenS3o9dofz+gDn2Mt9mvAdHZaXIN0jk0pJfO41l0kWubIQv/i4VBZQHHGyxKEuPVW+u2bBFw9aJMuRuMfxLib5FFRRCJu1rfofd6Z0FPJokAwdO2qne44k18SBJbO0Vqwe9vgo+zVy8DEyxjcQcTQiINsk6YB+HtQ63F6zQkO81hUBfd8cdpJst4p5U/F0Bx8lVg1leQoyNnNQVJ1NhSqq7cS8Q0Q4KoEw5wBrGMh9v7PO39IhrkI9aUZZ2/RhC6e1mg/sZBL4wJfkId01S+Ls6htNC96Jbxf3d79DiMb9f5BPVbDeLSHLXjJt6akieOsbnV90VNzgOZ+STNxj3iLCVe2nUMXTDck2VrzuBLEwn/B2sZCppLjj3iahBAuWSL/CJHnNXChDcTY0BX/pUO20TxdXsnKrt3UBZ3xELNF/+6NYHlWtWjt7t1tjRO3hD04cfLRzEqVepzNZYyaJKTpninXSiWWnWodDffM/AvffZCbgtE8XT8Dr3yVj1C85TuJtd/tZvfvNc//+m+SlYNkbrCEp9Y3A286Xv496o5R8Wh6jByEmKCTLEd1dqi6QZI24k+zjwOZMzNatLNd2iQlJN9y6qcpK2MfaLQqqk3lQkvbtC+qiDUuUygGO4QO2eeZqyRd63cowBNCRGetkXT1Q3wTAVh7E3vcwMbmJpH+Cgn05p0zjJceJ+NALjU85+TkjkEgGHx5WjSS0+vdiRatvFMFmd/MqWjpEfOp0zpQHsr6dtfUANNFXUwOgJu12T4gFTyUtHHSUhD5c8Wn53jgtWzDqFk4ndIvggUtNkZJIAO1dMm3rqyKETjrdPEYLDzWtmWddJLGgBy1LDwHqIcB1DEPcr/f9TpZ5/btkejIp5bIUyYQ1ntH1xdhh2pveK6ERBy0Lcc2n56utE2GtXgrmHChW4fZu4MGbxVLrlm29e4tVSI6nheTSU8Qq5/Ty7pYDkMv9eeFh8pKz7TWx8B79JqlP27tbaoGqYUVPWCkvqR0zREgO7E9XAdgvsbyq8sPEeDq+03ZveOZvYsUeGZQwmRXnWc+t7jnyr2RqLJuUXo+IPxaTffoB0DNfTti8xxiWgPBr7czj4PYhhbN6aEYdnaMQqPjEXNDHuQW8T4MZc92L+Nr70jbdag1ys7brVo0oMYa7twLv/Ki4RCNnbAdYb32DuM5ratN9sO2DMGV27DS3iuvULR5OJboEafvwk8zvhiFte9VTDEoikbbeDYso0129Tsw5wGqpPv0CGSlI55i3iBDo7MkWBM2tIiSXHCv1BVu73B+0qu6oHr/ZPVeE5ge/LMkum160JuEA5tB6bugP+4V6/cr08TjrIvmamrIOnahjr3s5a6HEFPoVTh8fBd58rjnEpdu226/FMJbwae1mZnxQ9PP34GPE4paaMq2Pakz73VvdayPu2gLozgNlkexZAPzTCkkg2rNN+qeGcO1wEZK5GEQSAP7xUinFo9zLza3y75xPmUacZNLcvvYZQN9uyfAGJDnstHPl75t/LOWr9HADQCyw99wofx9whDmYQtam5CF0KUcoJMsddWOtq4+vNE7cWCwUMVgkO2bIG5tfwHwUXK9Rl36qB4NXTFNYIecolIOYwyKsP2i/ssSLx3FrnCYuSJWoYFnMltHb2GwTkj772WkeR+udw7RDjjb74LWuXInDKxA12cZpuWTSxyJjyDWl4hP3bhdBZh/HOwjJpBzT2YtF1DidA170zJds69//SL5PawPepI26Y89WzQyFeaiUJOqcKQ/4vj0O4ik9b2uXmbSj2psxV/7NOUDO32cfMC1GrV3W011P+gGs7mu9vqr9WLiJSEcCWvwnJ2Qb/KzsWQW8Q7i24XHu1NaLwPeK+WztAD72bbECq2EXAYkV1d2/S46TzPPWTsmcV5U0FEpINjTKOlV92mcf1IRkwIESQmHI+lQBeDs1tdn7d9ZCEZKKLekRb0aHzezyZ9MJNc1t4sno221aylUheMfuUEiSuMm3SzWOc9bJ1VVXLyb+5gBFwV1L5RQQ19j09A8NTaYbJqvWl00E+j0fGpqyhWQQHZlLcWs/7JaRqOWQEklY0kWDhjqEjgv26J9X1zt7vMf/9STAOZmLRdJ3GZdEF7+2DMjDUAmixmYRPVGEpFq/XzKJnfomsepPjIkoPHaFxHE6PbwPO0mSNKa1w3IwVYJJskbizexWdCXOTnsfcOevgRNXZrfd3Cpu9rbpwF/TMbV217bTOfv+z4vFfN5BpuDJJZY9kfnPn97d/vM4tu803eEHL+GSgHPcpL2d7jnZ8aL6ctPazZFkdm8RS96WlyV7+eFbrG7gettLu/4S35mjkHR6iTcy/znj9Hw7cKm4txV7twM//oK4vxUqi33BEnlhWnO3dXk3KCRJ7OTLpVpTJ8HEaji6XEg4vIn3zAfGRvM0JGMO1NUHC+pW6DfMhiZnV1RWTUB7HFuD1QXomu3r4ir2xSbeLORyU4ogdurqpBxU5mZofyi7PHz17QxS1Nuzax4/elkT4xDkYWPM7NQ3+Li2nZbzOv5A1jmgLGdRrJJR91FNEuieL4WqEwkpL/PGfzDdhjqnvU8e+Idp7nn7PF5W6rYu4JxLsn/XOWiZxPAN9WXXM3Q6NgtfJ1Z4fb3JXM4XhzjgppbsuDrAebjAsO1nJocUkkHbd2rj+LfJiDzJGuvoV/oABnr2sqLB9szQ3fmqhmVUHM9fw2cfJLLP0wMcsriHB7ITyACxoh52oiQeTY4D7/mUlBpyymQHKkNI3n///fje976HNWvWYPv27bjppptw9tln56FrJBD5Oql65smJbM+Ai3Jv1N/YlPBN1uQmUkO5iQLQswAYG5JYI1X7TScnveVnvbOJJ8eHYA598Fp/3nSkh2WxcZp5k8zShEEskkGsmBEtkqVAbZ37fmjvNusq2nETkvq0xmZ5gdNfQN3OgfbpMp+eDBE63jbgvG4vELV1wLhNKNXVSxkjL6un634IePAbm6UkzL6dIhLtoxjZUftE346gFkn10m5H34aOGWK1cxKSUQgSQqLIWFidjr3b9Wr77nTOdM0ELvoP5z647RNAXNs6iw4XC97sxe7PhaDJS07bMzrsPo63WsReO7W+0fklyIkjXy8vbRd+DahrlHPP89wpPSEZ2vY+NDSEZcuW4corr8xHf0hQVFCx03iscVBbJ2/9cQg2/aYVlyu+vlFcTvrbay40pNvLS9ycn5C0T3C6LIP0K4Jru67OnCfOc8mru15j+wZxzwfaFQmPJAkvkZnD8Q9yf/dqv6FZhOKMed4PejXko1PbQVzbWU27dFwVlravPx+4bW+cGaqJRLiXiPpGM7NdubfthaMzKCGpn6cBH69RKhXkitvLgOfxDRNiZLdIOtz3J+yhBxrv+Cf33+zx6Cr7/4SV7vuyZ761RqkbkXZzwnm/Xfh1ifv8528Cn/phekjXNAcfLcL3o98yC6S3dpkeOi9rdiVYJE8//XScfvrp+egLCUNrh9RLy9eN3ZUcb2hxueJ1oaCPeKBobpNszlIg7C6LXIw6QnuJpMSWJRLeb92xdQbW+opZIieAazvob40tzkM4lqpFsq7erLoQ5bpOJJ33hWWak1XO48HU1GKNwQts2fFv2koAIZll4fGwwAZNwAratfpGuU6SSWfXaSZSQzt/nWLnGqeJEErWmLVPXUVDgP66DcEa1BrmtJwbTl6RoOWrwrqbDz5ahiR84Gb5fvYngD9dLcehqdV7WSdq6+TacnMXZ4hwjiQSzse6a5YZ9wkAH/8ucOf/yvXz9g95v2h4/VaCFUIYI1nOFFxExkCh6i1OnxWzkHS4Y/a4FNK10zZdshAz+MT4xfkQDLKsugmGXkcE93EiIW4r/bv9d8flPFzbbutxnc1r+VxUZgDV5DSLqoWoJ3V4jaVb3+g8PaiAsn/36rZysysrSL7uOW61J3WLaM98qTHpZcnyJIRrO2s5+NTHTO+funpJKknWZD/w27pMr4cuaNysyAnbdyfc+tQ9R/ZVXUP2C+L02dnjl2faC2uRjDhvkJqoR5wsWd2plCSkXPQfch56jjOd47XtV67Krd0gsaWJhP/IXvq8blSCRTIsY2NjGBszD05/f4lYiUg4auslEDiIe8CLQrxN5ZrAE+Q6ravPjtUBnG8A09rk4b9jQ3oen7YdS7jUBLAERXBtR5kvWGPuP3m5K6Nknrv+FsW6EPnHgDicXC3t2eP4ej3QG5vFGm+3trom29jCArLm8S0HYM7j+RDPAbdzwrI+A1nbEganlwt7oltU9F2owm3soy0la7R9r29HLi+JLkKyoUmKfw/1W4VkU4sIXVch6WUlC+Ha9tukmlqHrHobza0yCo4iSCWAXN8R6xoAhKynmgBghHH7B2mzwmMkw7Jq1Sq0t7dn/s2fP99/IVJ69MwTy0mudbqiWCTD3me7YoqbjBOnEUbccLqJtE9PCwiX+DjvBoNNC1uyJWo8kS4ashKNIri2PVYVvi2P3+pC7h8ngloTnISk3jUn916gfedglQuiIzP98ngJmLXIpyEP3ISzXvHBQAA3vf6bfZJt/poa97Has5bzadspRtLL8xAoltKrrTS68KutE1f3jHku7enrjStG0m89LuTrhSQXJdkxQ/ZfpHUGsEgGZVqb9724BC2SeReSl112Gfr6+jL/Nm+219gjZUFNrVhNgg5/ZkcFFOelYKxGY3N+blJZ124eA+3cLJIz5jk8+AJYaIKKMD+3ZSIhsadBEpy8LIvJpLgpZ8zLPp8i1cIskEWya5bs/55cXoZzEJI6TvvD7eETNdnGaQG367+hOdzLkh237XWq+BAEx3cnB2EXJdElqvHJTRe6nvM+6wRshfqTUgc1iEfGrb+eFskQ7fhde8UIy/LqUo1KLvWJcXW0nBsu+yai4OvsEctxo0tVkxIUknl3bTc0NKChocF/RlLZdHRLjFBUIVp0gl68MQhML/dkVGtakFmDlCmarmV2Ryovl16H20hAcVoeIy3g45LPtUpA0H0WNlu5pcPDmuJTsiXMcXS9fnN8uAV5+fMqi5SFi0Uy0vkVZJmwNV5jskj6ZYbbF8sIOIf+Nk6LL2vbb5fZ12MfKjUqUe+PjvfDGmSN6tMzX2qLNrcCOzbKtKlJa9x3zqTP0+mzZIx2O5UgJAcHB/HKK69kvq9fvx5PP/00urq6sGBBwOQDUp1EFpE+d6U5BwDbXovYdjGI002bU0ey0YfTa2qxJQWEja1zIGxNTcd5fCd4r8sz3hJS8mV4ULJhIxV79iLgPqurNxMz+vak+2azPikam8WKkfktYX3Y6OEkjmLKp08WPeNyfHJNonN1bSekIHgqlRbXNvdwqPM/okUyiGvbrcC+23IJl78DLGpBv6cGqafqFeM+Y65PH8Ls7JCu7TjLPEXCyQuUtNbxb2iSlxl7eNHkZPiwIM+upPvidllWgpB84okn8OY3vznz/dJLLwUAXHDBBbjuuuti6xghgSnGTSiIOyrutgvBzIXA6JC4sJNJGWnEjVwsklF/B7JFR6y7KyEirr1BRqIIIySD7I8w+0yVt0ompS9tneZveskY+4Nl9gHANs2S0dppll8xgPA7LIBrO1fB7WqRTFhHGwpskAxgkUwEbTCAazvIb24zBrqXRLRI6tTWmZ6AKGIkSGay17w69soDcd3znNpRwi9M0qFhiOhWiVjNrZIj4MTUBICYRmhL2K41exmnti53b04RCf0EftOb3gSjBBUxqWByGbO2YohiOdGYe6BYdbav925DBew7ErSInBc5CMnOmXJjtw+RFtZcG/SBMn2WOR5uIOJWkmkc3dYeQtIuyiwPHpd4rqC4XYu5VmNwTRTymxAmnMNFXPouF6BtJ4ukV2xq6GQbt1lCWDY7Zxb3JRUQ1/DEWHb8X1zdcnqhUbVZg7q2Zy+WElPJpOlur6nNfoma1i5u7rau/JS1SySAOYuB3j1mrdBIyZb5p9j2ZEL8aeuUYQuDZFjmKwkm8MtTTA+muBdO1uQe4B7H+2MuFkl7iZwMEeO0/NZf32g+LOIirpdwvZ++bdoFje27bwKci1jRrSV5qw8bUIwFbith/R6bazvI9tutoW7tey9qne63rJvQzENYSpB5G5pcLGox3bcbp1mHkcyyQAegplb+6WPaO21PZ49Z2q13T6TuZpFVe7I8jCjl0UtS3SRrgJkLxEVXMgS8K8WVnZiwfcbWYBhsN7m43GPWGcK3GTpzNMwDMWRfioHfcfCy6s1e7H9d6cvrVpm4hif1XHdEC6Sj9dG2eMJlPscFoxDQBx4oltdpHrsQdtlmp/Yivc849SHgEImAx/CSiM9SWlsvIxA59iNsTILfvk2IKA4dq1t5UEgSEgT7TTDIjSNZ42JFi2QqC7C+PF/OxbZIulFbZ42j8+1DzOsPQz7CgnytYR4WyUBJAkHcsG44jO4BBPQuOHXFw0Lpv3DAaTmsM0rx/6jnXI99TPYwlum4YiRDzOvqUQBiU2IJuN8Hw+7nUMcoqGD1a8ah72UQSkghSSoDdQE2udTeypWatFU0DNNnB3zw5nKjsLkaFXoWb2zEYJHMx6t7IiGZ1naXmevNv5jmg3wIyQI+aHJ1tbV3S5xc1yz/eQGfB7jdxGj/zW96QEtSXFZRN2FiQGKYvcS1va3GZtMaFpQcdWQoHLvl0ddix2764fsCHGZej98dx12nkCSkMMxeJMLNy32SK2FjmSIZHt3cXAGW1YVkpBEafMgSLPko/xPxgaLcTIHw6ncuD7QA+yMf56eva9tD0ATB4trWBZCRHlLOPpMPoQSQx3xeQxs6Gh/trmCf9kPN40Cz13CyNlWXrHGoPhGyb47ngdsNKS5xEsK17Vd2KxZiFKtRLZJ+87ZN92jGySLps+oSgEKSVAY1tfJGH6SWmi9Bbo4BLR5Bae+WbXDNyovZtV0IA4CTBTcfMZKKti7Zf7MWes/nJbyiWp+C0D3HLOkTK2GeNLbxqgPhck4bKdmm5lZgZtDRfnIQsQAwpWXlRhpu1fbAj5RsE3De2np30RA6wcbtBTOipWxah//6c8EtRtV9Ae/2nEIwmlqkXJnXC2Tg95Uc70v2sj1RcbT4l76SZNY2IUGJKQzGcdm2Lq1MRQj0Yelcx0HOk2r0s4TVN4rFSrcc5csiCchNWN+Hrm3l6cbsaBDSHgxexaBzWm/I7clpH9ssYLV1IZNuwu57W1+nJgO25Sa8Ag7J6dVUmP1nKcWUg4APsmiY86CjG2hskjI3fUEzjh3aD3U4c7AWds4Edm/Jnl7fkD+3eNQ4Vq95/e4BTiK09HUkLZKkCiiE2yRKO+3dcuMIE8+YrBG3dX2DxJnpb+PT2uTBnk/3vk6UwrhhLCj5oi6GIVuDjufe3i37qculmHGhiWSQdFkgyNjauT4E7eu2CEmH38O2l0/Xtlczoco3Be2Dj2vbLoyaWmJI0AtRkNw33tXr5xyWDYLvUJMh+uc2b3t39kuXvSauUz8c4yZLC1okCYlEDBd3W5eUXgnrXnMTnskkMGtR/t7Q7TROA7rnAnu2yvcgbpl8urb9qK0DehZ4l2QKuu965gPbN/jPV1Mj85YSDc3AYF/05WctAiYnskcnCUKuiUFBC583NjvX/9TP0ciu7TyLmqhN+e5ahwYb00ImSBJVztbHHCySQeMrs2YLuBOdxFqo+6guJHUvRCswMuDeZtdMOU/HRqVWslMoRHs3MD6Wn7j3mKCQJJWLGis6rvqTccdTRZk/SnuOq4jjLT4hWfI988U95lvYOsB68ymCE8nglkQ/auuzhy8rF5paxDJSH9QyazsmdfXBrJFO5OqG75wJ7N8pfzd5ZDk3tUj85p5t1ul2wRD79ec1IRfXtm1Zx93oNFGb5rSttfVSSzSuerde68pJfwdd2KehOQdI8fDhfut0RyEdwrXtZm12dFVrx0TFxRuGvCQ5HYeaWv+47yJDIUkql+mzZXSCsJaT+kZgdDh7eq4jQ1QqDU3uVjf7bory4J4+J/wyjvgcs1ze+BPJgEI6RtSoO81tHjO5WFrC1HGMVWv5ZRb70NIuInFqUsSsm5DPuG5tYj+OkUIiGqrcyeO9RG/arS+BaonaG4uCk2XXYWhJx0UjnoT2F56aWqDOYXtzPi9cLJJBQwcSCSARs5gvIBSSpHIJVRJGo7VLlm20B0aHjWuyUd8owtazMK8bJRAnE0dsYRQ8y6jEtY7W3Gpvzj2wcCEFis4Zsm/iyFotFE7XTdg+1tRolmW/hb1CKwK6trMIY1oLkDATpqi3F473pCBKMg8EiZG06Mgc+ua07MwFwEBv8CoJToLPfq4E7UPCRVRm8LESlyEUkoTYSSadY1VCB8jbmDEPmByPJshK4X7THcUymGWSjKMneSDHfhXjgZBISpxqKIpcRD7u4ukhdaQ1RjJIA3ESVknqi6aXbe0EBvZ7lAkrFYLsV01Jxl3Nob4RmO5W9N5J5Pol24RYt5tFskJEoxMUkoQEJaxr2+5WSSajJSiUArX1IVxgHoS+lxbo5hvlJl8Wz4U4YmFzb6J4K8iHRdKDXJrLKkjuQMcMsbIFHUrPiNH6FeYlwDHXxsMiWeys7bxZJB2WK4MhD8NCIUlIFLxuBnMPcg+cjkyRVEtDEzA2El/GoGutSxdmhRyWMiquJW6K5M4vKWI895zEUkGNgraVKe9D/97obVh/9PzqiLpPtLRL6Eum1qCLkIkjzjPvBLVIhpg9yKpyGRkrJ1zEY5CXgwqgOraSkLjxEpLJJEq+RGvQ+2b3XCkoHtWSqu+m7jnhsqYbmgon5NweJNPa5KXAMQ6xDEySpdTFae0uLyRxqQgHfF3bEAtfGCEZCg/rVNcsYHTIjJlOJG11BnVrYpB1lZClK7Qws83f3i0lqqYmIrYXdvU+92u/KgUWMau1Vastp+59JXSY4oJCkpAoFNo9Eft9NGCDyWS0hKUM2n7K18guQXE6ZMri6lbEPZGIr3xUUbAd54KHSGorLEpRdh/XdjGZ1ib/AhGgr465NnHep8K4tgP01+4Cnr0Y2L5e+00X0km5fxgph4oacbi2XV5wZy+WdfpaFrU+6FUC1MhPYyNauarKU5IUkoREotA3g1IyLRWIQoj1GfNkzOa46ktWJPm2BhUymziO8j+lei06XC+q4HgcYTb5vhy9YrATCTPZb/NLtt9Crkf3crR1yTCRjc3O8waNC/eKkWxuDVduqwyhkCQkCvm+qU6fLUW+JyfyvKIqJ5GgiCxnQntQ9coLAUfJyYW4LKBBxKvTPammVopw5zwUYkjyMXiDQtUG9YsldaOxWQrb1zUADXlIfpzWLvdtVw9M5VkkSzyQi5ASJd/WsubWwhe4zge5jPwS9z6OOhqLGyrez82aUQpkPawj7NO8W+DyGCOZNbs2/9io97wNzRLHGAavczbfu9HN/VpTW4QEnQAb25kOdXAqtQaItRDItubNWiix21GtfImExKXGKSIto9nUiPXULWyh8nQkLZKERKLgMZKl6k7zIdR+ikH0eJFLwXEnWjslxjLwUIPEghqZJ2jR6DhIJICaOkni8LNE98wrTJ+C4HX998wH+vbG++KpMtnVMQIQLkYywDyNzVLhws1aOq1dxLzdvVxTCzS5SJei3SbDJEZVnpKkkCQkEuV0M9DubJ0zgb7dtuzQAlAToQZlHLt4+mxgZAjo6onfKhN15KRyI18vMZ09YnWKoz5pGHrmyXjLrR3mtOmzgfExYGBfbm1nvTgVILmnoSl+0dvWJa7ZunpTSDpdj67HLkQyn+Pi6RqfcXsR8oVlACGfba9vAob6vecpMygkCakmWtrF5VIoC2dLBzDYG8wamNWlGJRkFQS6exPDcW6bLpmykYb29CCRKLyIBGSd3bYXKXWe5CoknYpuO/1d6iQS3pb2phbZj271ZfX90NrpXhXBTluXlETyHD8+a2Uh5i0EPv1RLu8KegmlkCSk2iikm7xjhjxIogiGcjL6lipxHOraOimDUorhFaXUpyancc9LqH85o12QNbXBXemtncELc7d3hx/+MeH6pTgEGe4x7peyIkMhSQjJHzlZnagkS4aogq1qDmEiwFj0JSByCsnsxenSWpQZlQ6ztgkpVarlIdw1S+IXu+fafqiWHZBPYihIXuo0t8oIIjU1QGtXcfpQZRoxELV11ZOI1tAk/4K68CsMvioQEoamFmBksEAlXyrxqe/AtDYRA3arV5VsPnFh9qJg86nEMcMonKu7e47UCuzdnZ7gst5yjZHU6eyR5KSuWcCuzemJJboxxepWIiHZ81UKhSQhYeiaJUKyaVqxe1JZOAoAKsmcyUcCU6GoDZmxW8h4SVVsWglJ13VXgJJs6RBLWyIhyTBD/UBbKQ0bWqb7tYKgkCQkDMlkiPFxS4Ryu88mkxJbVV85WY3FI33w6xuB8dGKC/InBUIJ5SjJMPmm1JJtqhAKSUJKlfqYRl4opczWIMxcIFaPllKyepQ5M+YBk+PWcYYLQhlZQKNAgU4IhSQhJUttHTBrkQy5FYWOHikm7DYEWalSW196Vo9yo7ZehKOqoZlMxvdiUhUEfPmaMRcYGwEaXUJd9Je4cnuhIyQgFJKElDK5jOzQ2mEdvYNUDzPny1jSpTwOeCWQrDHjJUmRoEAvNhSShBBSaSRrmBCm4zYUH6ksaPUtCry6CCGEVCYz5olLf0bIsajzokdCNlrh4aWxQe1YdGiRJIQQUpk0NgONC4rdC4GCh1QotEgSQgjJD23pkWZU0g8hsUOFXmxokSSEEJIf2qYDjS3VM1SeJyEFz7RWYHB/EUo2ERIOCklCCCH5IZEAGsqo7NC0dimZVQrlp+obgdmLgRo+pklpwzOUEEIKTUs7MNgHtJdZjc9Kp7MHaO2UGq5xE8UDm49+EBIzFJKEEFJoOnqA1i4KhVIjkcitdqt343lqt8phyZ+iw2QbQggpNIkERSQhpCKgkCSEEEIIIZGgkCSEEELyQbLG/JsuWFKhMEaSEEIIyQe1dZLAwyEa8wgFerGhkCSEEFIa1DcC46N5THgpAi0dxe5BZUMdWXQoJAkhhJQG3XOkLNK0tmL3hBASEApJQgghpUFNLWtrElJmMHCDEEIIIYREgkKSEEIIIWUKgySLDYUkIYQQQsoTZsQXnUhH4Morr8SiRYvQ2NiIE088EY899ljc/SKEEEII8aapBWhuBTpmFLsnVUtoIfmb3/wGl156Kb7+9a/jySefxLJly7By5Urs2rUrH/0jhBBCCHEmkQCmzwZaO4vdk6oltJD8/ve/j49+9KO48MILcdhhh+HHP/4xmpub8fOf/zwf/SOEEEIIISVKKCE5Pj6ONWvWYMWKFWYDySRWrFiBhx9+OPbOEUIIIYSQ0iVUHck9e/ZgamoKM2fOtEyfOXMmXnzxRcdlxsbGMDY2lvne398foZuEEEIIIaTUyHu606pVq9De3p75N3/+/HyvkhBCCCGEFIBQQrK7uxs1NTXYuXOnZfrOnTsxa9Ysx2Uuu+wy9PX1Zf5t3rw5em8JIYQQQkjJEEpI1tfX49hjj8Xdd9+dmZZKpXD33Xdj+fLljss0NDSgra3N8o8QQgghhJQ/ocfavvTSS3HBBRfguOOOwwknnIAf/OAHGBoawoUXXpiP/hFCCCGEkBIltJA899xzsXv3bnzta1/Djh07cNRRR+H222/PSsAhhBBCCCGVTcIwDKOQK+zv70d7ezv6+vro5iaEEEIIKUGC6jUOUkkIIYQQQiJBIUkIIYQQQiJBIUkIIYQQQiJBIUkIIYQQQiJBIUkIIYQQQiIRuvxPrqgkcY65TQghhBBSmiid5lfcp+BCcmBgAAA45jYhhBBCSIkzMDCA9vZ2198LXkcylUph27ZtaG1tRSKRyOu6+vv7MX/+fGzevJk1K8sYHsfKgMexMuBxrAx4HCuDfB5HwzAwMDCAOXPmIJl0j4QsuEUymUxi3rx5BV0nx/iuDHgcKwMex8qAx7Ey4HGsDPJ1HL0skQom2xBCCCGEkEhQSBJCCCGEkEhUtJBsaGjA17/+dTQ0NBS7KyQHeBwrAx7HyoDHsTLgcawMSuE4FjzZhhBCCCGEVAYVbZEkhBBCCCH5g0KSEEIIIYREgkKSEEIIIYREgkKSEEIIIYREoqKF5JVXXolFixahsbERJ554Ih577LFid6lquf/++/Gud70Lc+bMQSKRwB//+EfL74Zh4Gtf+xpmz56NpqYmrFixAi+//LJlnn379uH8889HW1sbOjo68JGPfASDg4OWeZ555hmceuqpaGxsxPz58/Hd734335tWVaxatQrHH388Wltb0dPTg7PPPhvr1q2zzDM6OoqLL74Y06dPR0tLC8455xzs3LnTMs+mTZtwxhlnoLm5GT09Pfi3f/s3TE5OWua57777cMwxx6ChoQEHHXQQrrvuunxvXtVw9dVXY+nSpZkixsuXL8dtt92W+Z3HsPz4zne+g0Qigc985jOZaTyO5cE3vvENJBIJy79DDz0083vJH0ejQrnhhhuM+vp64+c//7nx/PPPGx/96EeNjo4OY+fOncXuWlVy6623Gl/+8peNP/zhDwYA46abbrL8/p3vfMdob283/vjHPxp///vfjTPPPNNYvHixMTIykpnn7W9/u7Fs2TLjkUceMf72t78ZBx10kHHeeedlfu/r6zNmzpxpnH/++cZzzz1nXH/99UZTU5NxzTXXFGozK56VK1ca1157rfHcc88ZTz/9tPGOd7zDWLBggTE4OJiZ56KLLjLmz59v3H333cYTTzxhnHTSScbrX//6zO+Tk5PGEUccYaxYscJ46qmnjFtvvdXo7u42Lrvsssw8r732mtHc3Gxceumlxtq1a40f/ehHRk1NjXH77bcXdHsrlZtvvtn4y1/+Yrz00kvGunXrjC996UtGXV2d8dxzzxmGwWNYbjz22GPGokWLjKVLlxqf/vSnM9N5HMuDr3/968bhhx9ubN++PfNv9+7dmd9L/ThWrJA84YQTjIsvvjjzfWpqypgzZ46xatWqIvaKGIaRJSRTqZQxa9Ys43vf+15mWm9vr9HQ0GBcf/31hmEYxtq1aw0AxuOPP56Z57bbbjMSiYSxdetWwzAM46qrrjI6OzuNsbGxzDxf+MIXjCVLluR5i6qXXbt2GQCM1atXG4Yhx62urs747W9/m5nnhRdeMAAYDz/8sGEY8lKRTCaNHTt2ZOa5+uqrjba2tsyx+/znP28cfvjhlnWde+65xsqVK/O9SVVLZ2en8dOf/pTHsMwYGBgwDj74YOPOO+803vjGN2aEJI9j+fD1r3/dWLZsmeNv5XAcK9K1PT4+jjVr1mDFihWZaclkEitWrMDDDz9cxJ4RJ9avX48dO3ZYjld7eztOPPHEzPF6+OGH0dHRgeOOOy4zz4oVK5BMJvHoo49m5nnDG96A+vr6zDwrV67EunXrsH///gJtTXXR19cHAOjq6gIArFmzBhMTE5Zjeeihh2LBggWWY3nkkUdi5syZmXlWrlyJ/v5+PP/885l59DbUPLx+42dqago33HADhoaGsHz5ch7DMuPiiy/GGWeckbWveRzLi5dffhlz5szBAQccgPPPPx+bNm0CUB7HsSKF5J49ezA1NWXZqQAwc+ZM7Nixo0i9Im6oY+J1vHbs2IGenh7L77W1tejq6rLM49SGvg4SH6lUCp/5zGdw8skn44gjjgAg+7m+vh4dHR2Wee3H0u84uc3T39+PkZGRfGxO1fHss8+ipaUFDQ0NuOiii3DTTTfhsMMO4zEsI2644QY8+eSTWLVqVdZvPI7lw4knnojrrrsOt99+O66++mqsX78ep556KgYGBsriONbmtDQhpGq5+OKL8dxzz+GBBx4odldIBJYsWYKnn34afX19+N3vfocLLrgAq1evLna3SEA2b96MT3/607jzzjvR2NhY7O6QHDj99NMzfy9duhQnnngiFi5ciBtvvBFNTU1F7FkwKtIi2d3djZqamqyspp07d2LWrFlF6hVxQx0Tr+M1a9Ys7Nq1y/L75OQk9u3bZ5nHqQ19HSQeLrnkEtxyyy249957MW/evMz0WbNmYXx8HL29vZb57cfS7zi5zdPW1lYWN9ZyoL6+HgcddBCOPfZYrFq1CsuWLcMPf/hDHsMyYc2aNdi1axeOOeYY1NbWora2FqtXr8YVV1yB2tpazJw5k8exTOno6MAhhxyCV155pSyux4oUkvX19Tj22GNx9913Z6alUincfffdWL58eRF7RpxYvHgxZs2aZTle/f39ePTRRzPHa/ny5ejt7cWaNWsy89xzzz1IpVI48cQTM/Pcf//9mJiYyMxz5513YsmSJejs7CzQ1lQ2hmHgkksuwU033YR77rkHixcvtvx+7LHHoq6uznIs161bh02bNlmO5bPPPmt5MbjzzjvR1taGww47LDOP3oaah9dv/kilUhgbG+MxLBNOO+00PPvss3j66acz/4477jicf/75mb95HMuTwcFBvPrqq5g9e3Z5XI85p+uUKDfccIPR0NBgXHfddcbatWuNj33sY0ZHR4clq4kUjoGBAeOpp54ynnrqKQOA8f3vf9946qmnjI0bNxqGIeV/Ojo6jD/96U/GM888Y5x11lmO5X+OPvpo49FHHzUeeOAB4+CDD7aU/+nt7TVmzpxpfPCDHzSee+4544YbbjCam5tZ/idGPv7xjxvt7e3GfffdZylVMTw8nJnnoosuMhYsWGDcc889xhNPPGEsX77cWL58eeZ3VaribW97m/H0008bt99+uzFjxgzHUhX/9m//ZrzwwgvGlVdeyZIjMfLFL37RWL16tbF+/XrjmWeeMb74xS8aiUTC+Otf/2oYBo9huaJnbRsGj2O58LnPfc647777jPXr1xsPPvigsWLFCqO7u9vYtWuXYRilfxwrVkgahmH86Ec/MhYsWGDU19cbJ5xwgvHII48Uu0tVy7333msAyPp3wQUXGIYhJYC++tWvGjNnzjQaGhqM0047zVi3bp2ljb179xrnnXee0dLSYrS1tRkXXnihMTAwYJnn73//u3HKKacYDQ0Nxty5c43vfOc7hdrEqsDpGAIwrr322sw8IyMjxic+8Qmjs7PTaG5uNt797ncb27dvt7SzYcMG4/TTTzeampqM7u5u43Of+5wxMTFhmefee+81jjrqKKO+vt444IADLOsgufHhD3/YWLhwoVFfX2/MmDHDOO200zIi0jB4DMsVu5DkcSwPzj33XGP27NlGfX29MXfuXOPcc881XnnllczvpX4cE4ZhGLnbNQkhhBBCSLVRkTGShBBCCCEk/1BIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSFBIEkIIIYSQSPx/CTRL8LuU3nwAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsIAAAHDCAYAAAAupnzhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABYsklEQVR4nO3dd3hUVf7H8fckIYWSgpGEQGjSFBAUJEYB1yUaFV1x/a2IKIoIiqBgUBELsMoKgihFiqAr7q4K6lppyoKIYgwQehFRQYomgCEFCGlzfn9cMjAaSii5mbmf1/PcZzIzZzLfyRX4ePK957iMMQYREREREYcJsLsAERERERE7KAiLiIiIiCMpCIuIiIiIIykIi4iIiIgjKQiLiIiIiCMpCIuIiIiIIykIi4iIiIgjKQiLiIiIiCMpCIuIiIiIIykIi4g40MyZM3G5XGzfvt3uUkREbKMgLCIiIiKOpCAsIiIiIo6kICwiIn9gjCE/P9/uMkREzikFYRERoUGDBtx444189tlntGvXjrCwMF599VW7yxIROacUhEVEBIAtW7bQvXt3rrnmGiZMmECbNm3sLklE5JwKsrsAERGpHH744QcWLFhAcnKy3aWIiFQIzQiLiAgADRs2VAgWEUdREBYREcAKwiIiTqIgLCIiAISFhdldgohIhVIQFhERERFHUhAWEREREUdSEBYRERERR1IQFhERERFHchljjN1FiIiIiIhUNM0Ii4iIiIgjKQiLiIiIiCMpCIuIiIiIIykIi4iIiIgjKQiLiIiIiCMpCIuIiIiIIwXZXYCvcbvd/PLLL9SoUQOXy2V3OSIiIiLyO8YY8vLyiIuLIyDg+PO+CsLl9MsvvxAfH293GSIiIiJyEjt37qRu3brHfV5BuJxq1KgBWD/Y8PBwm6sRERERkd/Lzc0lPj7ek9uOR0G4nErbIcLDwxWERURERCqxk7Wx6mI5EREREXEkBWERERERcSQFYRERERFxJAVhEREREXEkBWERERERcSQFYRERERFxJAVhEREREXEkBWERERERcSQFYRERERFxJAVhEREREXGk0wrCkydPpkGDBoSGhpKQkMDy5ctPOP69996jefPmhIaG0qpVK+bNm+f1vDGGYcOGUbt2bcLCwkhKSmLr1q1eY7KysujRowfh4eFERkbSu3dvDhw44Hn+8OHD3HPPPbRq1YqgoCC6du1aZi1Llizh0ksvJSQkhMaNGzNz5szT+RGIiIiIiI8rdxCePXs2KSkpDB8+nFWrVtG6dWuSk5PZs2dPmeO/+eYbunfvTu/evVm9ejVdu3ala9eubNiwwTNmzJgxTJw4kWnTppGWlka1atVITk7m8OHDnjE9evRg48aNLFy4kDlz5rB06VL69u3reb6kpISwsDAefvhhkpKSyqxl27ZtdOnShauvvpo1a9YwaNAg7rvvPj777LPy/hhERERExMe5jDGmPC9ISEjgsssu45VXXgHA7XYTHx/PQw89xBNPPPGH8d26dePgwYPMmTPH89jll19OmzZtmDZtGsYY4uLiGDx4MI8++igAOTk5xMTEMHPmTG6//XY2b97MRRddxIoVK2jXrh0ACxYs4IYbbmDXrl3ExcV5vec999xDdnY2H330kdfjQ4YMYe7cuV4h/Pbbbyc7O5sFCxac0ufPzc0lIiKCnJwcwsPDT+k1IiIiIlJxTjWvlWtGuLCwkPT0dK8Z14CAAJKSkkhNTS3zNampqX+YoU1OTvaM37ZtGxkZGV5jIiIiSEhI8IxJTU0lMjLSE4IBkpKSCAgIIC0t7ZTrP1ktjpSVCbt/hIJ8uysRERERqVBB5Rm8b98+SkpKiImJ8Xo8JiaG7777rszXZGRklDk+IyPD83zpYycaU6tWLe/Cg4KoWbOmZ8ypOF4tubm55OfnExYW9ofXFBQUUFBQ4Lmfm5t7yu9X6eXsg+fvhpJi636NmhAdB+fXOXJbF2o3gOi6EBhoa6kiIiIiZ1u5grATjRo1ir///e92l3Fu7Nl1NAQD5GVZx7YN3uMCq0BMPajd0ArGcY2gbhOoEVWh5YqIiIicTeUKwtHR0QQGBpKZmen1eGZmJrGxsWW+JjY29oTjS28zMzOpXbu215g2bdp4xvz+Yrzi4mKysrKO+77lqSU8PLzM2WCAoUOHkpKS4rmfm5tLfHz8Kb9npZafZ902aAF9RsK+X6zjt19g727I3AEZ26HwMPzyo3UcKyoG6jU7etRtCqFVK/xjiIiIiJyOcgXh4OBg2rZty6JFizzLk7ndbhYtWsSAAQPKfE1iYiKLFi1i0KBBnscWLlxIYmIiAA0bNiQ2NpZFixZ5gm9ubi5paWn069fP8z2ys7NJT0+nbdu2ACxevBi3201CQsIp15+YmPiHpduOraUsISEhhISEnPJ7+JT8I8vPVa0OVWscDbTHcrshK8MKxL9ug1+3Wz3Fe3fC/kzrWLvUGutyQe1GcEEraHSxdatZYxEREamkyt0akZKSwt133027du1o374948eP5+DBg/Tq1QuAnj17UqdOHUaNGgXAwIEDueqqqxg3bhxdunRh1qxZrFy5kunTpwPgcrkYNGgQI0eOpEmTJjRs2JBnnnmGuLg4T9i+8MILue666+jTpw/Tpk2jqKiIAQMGcPvtt3utGLFp0yYKCwvJysoiLy+PNWvWAHgC9gMPPMArr7zC448/zr333svixYt59913mTt37un+/HzboSNBOKz68ccEBFj9wtFx0PKKo48fPgg7t8KO72DHFti5BfbvOTpz/NVH1rha8dCoFVxwMTS9FMJrnrOPIyIiIlIe5Q7C3bp1Y+/evQwbNoyMjAzatGnDggULPBeh7dixg4CAo4tRXHHFFbz99ts8/fTTPPnkkzRp0oSPPvqIli1besY8/vjjHDx4kL59+5KdnU2HDh1YsGABoaGhnjFvvfUWAwYMoHPnzgQEBHDrrbcyceJEr9puuOEGfv75Z8/9Sy65BLA27ABr9nnu3Lk88sgjTJgwgbp16/Laa6+RnJxc3h+DfyhtjThRED6e0GrQpI11lMr5zeov/nEd/LTemkHes9M6vj0yEx93ATRvB80vg4YtIKjKmX4KERERkdNS7nWEnc6v1hH+7yT4+mO49k64/p6z//0P5sK2jfDTOti6FnZ97/18cCg0bgMXXgYtEiGqVpnfRkRERKQ8TjWvadUIJzt0BjPCp6JaOLRMtA6AA9mwJR2+W2Hd5u2HTd9ax38nWRfbtboCWnWA2PpWz7GIiIjIOaIg7GT5p9AjfDZVj4S2na3D7YZff4LvVsLGb2H7RmvGeNf3MH8mRNc5GorrX2j1KouIiIicRQrCTuZZNaJGxb93QADUaWwdnW+3Zoc3fgvrv4bvV8G+3fDFe9YRFQOX/Aku/bO1hrFmikVEROQsUBB2Ms+qEdXsrQOsZdYuv946Dh+yZorXfw0bU60l2hbPto6Y+nDp1VYojo47+fcVEREROQ4FYSfzrBphw4zwiYRWhTadrKOwwOohXvUFbEqDzJ+t1on5M62WifbJ1mxxRbV3iIiIiN9QEHayYzfUqKyCQ6DNVdaRfwDWL4P0xbB1Nfy82To+mgqtO0LCddZGHuonFhERkVOgIOxUhQVQXGR97SuzqWHVrRng9smQmwXp/4O0BdZW0Cv/Zx3n1T4y5jqIjLa7YhEREanEFISdqrQtIiAAQqraW8vpCK8JV98Gf/qbNSuctgBWL4HffrXaJj77l7UTXoebrbWKdYGdiIiI/I6CsFPlH7Ruw6r7dkh0uaDBRdbRtR+s+wq+nW/tbLfua+uIqQdX/gUuu8baEU9EREQEBWHnOlRJL5Q7EyFhcNm11vHLT/DNp7BiodU68cErMPd1aJcEHbtaq0+IiIiIoykIO1V+JVo67VyIawT/NxBuvM8Kw8s+sQLxsk+t46IEq62icWvfnhEXERGR06Yg7FSeIOxHM8JlCa1mzQB3uBl+WAtffQgbvrGWYtuUZm3ocfXfrFUpAvXHQURExEn0L79TlbZGVOal084mlwuatLGOvbvgyw9g+Wew+wf4zyiY8xp0ugUSu6iPWERExCG04KpTeWaEHRKEj3V+Xfi/h2H423D9Pdaudtl74ZPp8NydsOBfcDDX7ipFRETkHFMQdionB+FS1SLg2jvhmbfg9sFQK96aKf/sX1YgnvMa5O23u0oRERE5R9Qa4VSeXeX8vEf4VFQJhoTrrdUm1n0NC9+yVp1YNAuWfmi1S1x9mzboEBER8TMKwk7lWT7NwTPCvxcQaF0017oTbPwWFv4HdmyBpR9YK01cfh0k9VAgFhER8RMKwk6l1ojjc7mgZSK0uBy+XwWf/8faoGPZp9YFdlf+BTrfDtUj7a5UREREzoCCsFOpNeLkXC5o1tY6flgL896AbRtgyfuQOhc6/dVaek3/MyEiIuKTdLGcU3laI7RU2Clp3Boeehn6Pg91m0JBvtVL/Nyd8L+3rfsiIiLiUzQj7FRO2VDjbHK54ML20PwyWP81zJsJmT/D3H9a6xJf1xMuv0Ebc4iIiPgIzQg7UUnJ0RlMp2yocTa5XHBxR3h8OvR4AqLj4EA2vD8RxvSxdq4zxu4qRURE5CQUhJ2odDYYIFRB+LQFBEK7JHjin3DrQ9a6xHt2wuvDYPJg2PGd3RWKiIjICSgIO1H+kf7gkKoQGGhvLf4gMAg63AxPvQlJ3a11iX9cBy8PgH//A7Iy7K5QREREyqAg7ESeFSM0G3xWhVWHLr1h6Exod43VQrHqC3i+F3w6Aw4fsrtCEREROYaCsBMd0hrC51RULegxBFKmQJNLoKQIFs+GUb1gxUJwu+2uUERERFAQdqZ87SpXIeo2gX5j4L6REF0Hcn+Dt1+ASYOsHetERETEVgrCTpR/0LrVZhrnnstl7VA3ZAbceB8Eh8L2TTB+AMwaB3n77a5QRETEsRSEneiQZoQrXFCwtS3zkzOhbZK1vFrafHj+Hvjyv9aSdiIiIlKhFISdKF89wraJiIY7n4CHJ1itE4cPwkdT4aUH4efNdlcnIiLiKArCTuRZNUKtEbZp2AIeeQVue8Q6D7/8CBMehvcneK/zLCIiIueMgrATlbZGhFaztw6nCwiExC4w9A247FqrXWLZp9bqEumLtDudiIjIOaYg7ESaEa5cqkfCHY9D/xehVrx1Ad1/RsG0IbB3l93ViYiI+C0FYSdSj3Dl1LgNPPYqXH8PBFWB71fBmD6w4F9QXGR3dSIiIn5HQdiJFIQrr6BguPZOePw1aNbWCsCf/QvG9YOfv7O7OhEREb+iIOxEpT3Cao2ovM6vA/ePhrueslonMrZbF9N9/CoUHra7OhEREb+gIOw0xhzdUEMzwpWbywWXXg1DXoe2ncG4Ycl7MPZ++GGt3dWJiIj4PAVhpyk4ZAUqUBD2FdUj4M6h1lbNEdGwbzdMHgzvjbfWIRYREZHToiDsNIeO9AcHVYHgEHtrkfJpcTkMec1acg3gmznwwn2wKc3eukRERHyUgrDTeC6UU3+wTwqrbm3C8eCLcF5tyN4LM56CWeM0OywiIlJOCsJOk3/kQjm1Rfi2Jm3g8Rlw1a1WL3HafPUOi4iIlJOCsNOUtkZUVRD2ecGh0LWfNTtcMxayMmDKo/DRVCgssLs6ERGRSk9B2Gm0hrD/adwaHpsOl99grQry5X/hpX6wY4vdlYmIiFRqCsJOoyDsn0KrQrcU6PMPCD8PMnfAhIdg/ptQUmx3dSIiIpWSgrDTaDMN/3ZRAjw+HS75E7jd8Pm/YfxDkPGz3ZWJiIhUOgrCTqMZYf9XLQJ6Pg09n7L+h2fXVqtVYtknVuuEiIiIAArCzqMg7ByXXA2PvwbN20FRIbw/Ef45DA7k2F2ZiIhIpaAg7DSe1ggFYUeIOA/6PG+tLhFYBTakwti+sCXd7spERERspyDsNPlHNl3QhhrOERBgrTf8yCsQUw9yf4NpQ+CT6VBcZHd1IiIitlEQdhptqOFcdS6AlClw5U3W/S/ehQkPw56d9tYlIiJiEwVhp9GGGs4WHAr/NxDu/TtUC7cupBvXD76dpwvpRETEcRSEneZw6cVyao1wtFZXWptwNLkECg/D7Jfg38/D4YN2VyYiIlJhFISdpKjQOgDCqtlbi9gvIhoeeAFu7GP1Ea/+Al56EHb/YHdlIiIiFUJB2ElKV4xwBUBIVXtrkcohIAA6d4OHxkPk+bB3t7UBx7JP1SohIiJ+T0HYSTxtEdWsACRSqsFF8Oir0CLRWkni/Qnwr5FqlRAREb+mNOQkh7SZhpxAtXDo/Szc/AAEBMKaL60L6XZttbsyERGRc0JB2ElKWyN0oZwcj8sFf/o/eOhliKoF+36B8Q/D1x+rVUJERPyOgrCTlP6aW0unyck0uAgGT4OWiVBSBP+dBG8+p1YJERHxKwrCTnJIm2lIOVQLh3uPaZVYuxRe6g+/bre7MhERkbNCQdhJ8tUjLOVU2irx8Pgjq0rsgvEDYNUXdlcmIiJyxhSEnaQ0CFdVj7CUU/0LYfDUoxtw/Psf8OEUKCm2uzIREZHTpiDsJGqNkDNRPRIeGA1J3a37Sz+AyYMh5zdbyxIRETldCsJOotYIOVMBgdClN9z7dwitCts2wrgH4Md1dlcmIiJSbgrCTlI6I6xVI+RMtboSUqZA7QaQtx+mPApL3tcSayIi4lMUhJ0k/8jSV1pHWM6G8+vCwElw6Z/B7YaPp1lLrBXk212ZiIjIKVEQdpJ89QjLWRYSBncOhb8OOLrE2viHrI04REREKjkFYScp3WJZrRFyNrlc0LErDHgJatSEjO3wcn/Ykm53ZSIiIid0WkF48uTJNGjQgNDQUBISEli+fPkJx7/33ns0b96c0NBQWrVqxbx587yeN8YwbNgwateuTVhYGElJSWzdutVrTFZWFj169CA8PJzIyEh69+7NgQMHvMasW7eOjh07EhoaSnx8PGPGjPlDLePHj6dZs2aEhYURHx/PI488wuHDh0/nx+BbSkqg4JD1tVoj5Fxo2MLqG67X3OpHf3Wo+oZFRKRSK3cQnj17NikpKQwfPpxVq1bRunVrkpOT2bNnT5njv/nmG7p3707v3r1ZvXo1Xbt2pWvXrmzYsMEzZsyYMUycOJFp06aRlpZGtWrVSE5O9gqoPXr0YOPGjSxcuJA5c+awdOlS+vbt63k+NzeXa6+9lvr165Oens7YsWMZMWIE06dP94x5++23eeKJJxg+fDibN2/m9ddfZ/bs2Tz55JPl/TH4nsPH/E9DWDX76hD/FhltzQy3TwZzpG/4rRegsMDuykRERP7IlFP79u1N//79PfdLSkpMXFycGTVqVJnjb7vtNtOlSxevxxISEsz9999vjDHG7Xab2NhYM3bsWM/z2dnZJiQkxLzzzjvGGGM2bdpkALNixQrPmPnz5xuXy2V2795tjDFmypQpJioqyhQUFHjGDBkyxDRr1sxzv3///ubPf/6zVy0pKSnmyiuvPOXPn5OTYwCTk5Nzyq+pFPbsMmZQZ2OG3Gh3JeIEbrcxX35gTMo11n934/oZs3+P3VWJiIhDnGpeK9eMcGFhIenp6SQlJXkeCwgIICkpidTU1DJfk5qa6jUeIDk52TN+27ZtZGRkeI2JiIggISHBMyY1NZXIyEjatWvnGZOUlERAQABpaWmeMZ06dSI4ONjrfbZs2cL+/fsBuOKKK0hPT/e0cvz000/MmzePG264oTw/Bt+kNYSlIrlc0OkWeOAFayfDnd/DSw/CTxtO/loREZEKUq4gvG/fPkpKSoiJifF6PCYmhoyMjDJfk5GRccLxpbcnG1OrVi2v54OCgqhZs6bXmLK+x7Hvcccdd/Dss8/SoUMHqlSpwgUXXMCf/vSnE7ZGFBQUkJub63X4JAVhsUOTS6y+4bhGR9cb/maO3VWJiIgADls1YsmSJTz//PNMmTKFVatW8cEHHzB37lyee+65475m1KhRREREeI74+PgKrPgs8mymoQvlpIKdVxsengCtO0FJMbw33jpKiu2uTEREHK5cQTg6OprAwEAyMzO9Hs/MzCQ2NrbM18TGxp5wfOntycb8/mK84uJisrKyvMaU9T2OfY9nnnmGu+66i/vuu49WrVpxyy238PzzzzNq1CjcbneZ9Q8dOpScnBzPsXPnzjLHVXqaERY7hYTB3c/ADfdabRPfzIFpT8BBH/0Ni4iI+IVyBeHg4GDatm3LokWLPI+53W4WLVpEYmJima9JTEz0Gg+wcOFCz/iGDRsSGxvrNSY3N5e0tDTPmMTERLKzs0lPP7ou6eLFi3G73SQkJHjGLF26lKKiIq/3adasGVFRUQAcOnSIgADvjxwYGAhYS7iVJSQkhPDwcK/DJykIi91cLrjmDuj9nBWMf1gDEx6CPbvsrkxERByq3K0RKSkpzJgxgzfffJPNmzfTr18/Dh48SK9evQDo2bMnQ4cO9YwfOHAgCxYsYNy4cXz33XeMGDGClStXMmDAAABcLheDBg1i5MiRfPLJJ6xfv56ePXsSFxdH165dAbjwwgu57rrr6NOnD8uXL2fZsmUMGDCA22+/nbi4OMDq/w0ODqZ3795s3LiR2bNnM2HCBFJSUjy13HTTTUydOpVZs2axbds2Fi5cyDPPPMNNN93kCcR+65B2lZNKosXlVqtEVC3YuxvGD4DvV9ldlYiIONHpLEkxadIkU69ePRMcHGzat29vvv32W89zV111lbn77ru9xr/77rumadOmJjg42LRo0cLMnTvX63m3222eeeYZExMTY0JCQkznzp3Nli1bvMb89ttvpnv37qZ69eomPDzc9OrVy+Tl5XmNWbt2renQoYMJCQkxderUMaNHj/Z6vqioyIwYMcJccMEFJjQ01MTHx5sHH3zQ7N+//5Q/u88unzb7ZWsZqwX/srsSEUtuljHjH7L+u0y5xphln9pdkYiI+IlTzWsuY7TtU3nk5uYSERFBTk6Ob7VJvPkcrPkSbulvLWslUhkUFcLscZB+pDWq4y1w8wPg77+hERGRc+pU85qjVo1wtEPqEZZKqEow9HjCuogO4KsP4bWnj/a0i4iInEMKwk5RusWylk+Tyqb0Irp7hkGVEPhuBUwYCPt+sbsyERHxcwrCTuGZEa5mbx0ix9O6Ezz0MkScB5k/w/iH4Mf1dlclIiJ+TEHYKfJLV43QjLBUYvFNYdBkqNsUDubA1MdgxUK7qxIRET+lIOwExhztuayqHmGp5CKj4aGXju5E9/YL8Nm/rf+ORUREziIFYScoyIfSnfN0sZz4guBQ6Pk0/LmbdX/Bm/DOWCguOvHrREREykFB2AlK2yICq1gXI4n4goAAuKkP/G2Q9fWKz2H6k1pRQkREzhoFYSfIP2jdVq1uXaEv4kuuuBHuG2lty7x1tbWiRFaG3VWJiIgfUBB2Am2vLL7uwvbw0HiIiD66osSOLXZXJSIiPk5B2AnytZmG+IE6F8CgSRDXCPL2w+TBsOEbu6sSEREfpiDsBPnaTEP8ROT51lrDzdtB4WH453BY+qHdVYmIiI9SEHYCT2uENtMQPxBazeoZTuxiLan24WT4cAq4S+yuTEREfIyCsBN4WiM0Iyx+IjDIWk3ixj7W/aUfwBt/t2aJRURETpGCsBNoMw3xRy4XdO4GPZ+CoCpWv/CUx+BAjt2ViYiIj1AQdoJDulhO/NglV0O/MVYP/M+bYaKWVxMRkVOjIOwEpRtqqDVC/FWjVtbyapHnw95dMP5h2LXV7qpERKSSUxB2ArVGiBPE1oeBpcurZcErg2FLut1ViYhIJaYg7ARqjRCniIyGAS9B49ZQcMjakjl9kd1ViYhIJaUg7ATaUEOcJKw63D8KLvmTtaTaf0bB4tnWUmsiIiLHUBB2Am2oIU4TFAx3PglX3Wrd/3QGfDQV3G576xIRkUpFQdjfFRdCUYH1tWaExUkCAqBrP/jL/db9pR/Av0ZCUaG9dYmISKWhIOzvSvuDXS4IqWpvLSJ2uPpvcNeT1iYca5fCq08c/S2JiIg4moKwvzu2PzhAp1sc6tI/W33DIVXhx3UwcRBk77W7KhERsZmSkb8rDcKh1eytQ8RuTS6Bh16G8PMgY7u18caenXZXJSIiNlIQ9neHjmymoQvlRKDOBTBwIpxfF/bvsWaGd2yxuyoREbGJgrC/09JpIt5qxsDD4yG+KRzMgSmPauMNERGHUhD2dwrCIn9UPRIefNFqlyjIhxlPw5ov7a5KREQqmIKwv/O0RigIi3gJrQp9/wGtO0FJkbW02rJP7a5KREQqkIKwv/PMCKtHWOQPgoKh51OQeKO189z7E+Dz/2gXOhERh1AQ9neeXeU0IyxSpoBA+NtAuKaHdX/+TPhwsnahExFxAAVhf3dIPcIiJ+VywQ294Jb+1v2vPoK3RkNxka1liYjIuaUg7O/yj/QIqzVC5OQ63QJ3DrVmiVcthteHWRfTiYiIX1IQ9ndqjRApn7ad4b7noEoIfLcCpg2Bg7l2VyUiIueAgrC/U2uESPld2B4eHGttRLN9E7ySAtn77K5KRETOMgVhf6fWCJHT0+AiGPAyRJRuyfww7N1ld1UiInIWKQj7M3cJHD5kfa3WCJHyq90AHj5mS+ZJj8AvP9ldlYiInCUKwv4s/+DRr9UaIXJ6asbAQy9D3AWQt99qk9i+ye6qRETkLFAQ9melF8oFh0JgkL21iPiyGlEwYBw0aGH9uZr6OHy/yu6qRETkDCkI+zPPihHqDxY5Y2HV4YHR0LQtFB6G6U/B+mV2VyUiImdAQdifHSq9UK6avXWI+IuQMOjzHLTqACVFMPPvsPJ/dlclIiKnSUHYn5XOCGvFCJGzJygY7n4G2l1jbcP81mj4+mO7qxIRkdOgIOzPtJmGyLkRGAjdH4OOXa37/50E/3vH1pJERKT8FIT9WelmGqEKwiJnXUAA3NIfrulh3Z/7Onw6A4yxty4RETllCsL+rHQzDV0sJ3JuuFxwQy/4S1/r/uLZ8P4Eq2VCREQqPQVhf5av7ZVFKsTVt8Ftj1jB+Js5Vt9wSbHdVYmIyEkoCPszBWGRipPYBe58EgICYdVia0WJokK7qxIRkRNQEPZnpcun6WI5kYpx6dVw79+hSjBsSIXpTx7d5lxERCodBWF/VrrFspZPE6k4LS6Hvs9baw7/sMbahe5grt1ViYhIGRSE/VnpxXJqjRCpWI3bwINjrQtVd3wHUx6FvP12VyUiIr+jIOzPDmkdYRHb1GsOA16GGlHwy0/wymDI3md3VSIicgwFYX9ljHaWE7Fb7QZWGI48H/bsgFcegawMu6sSEZEjFIT9VeFhcJdYX4dVs7cWESerVRceehnOqw2//QqTHoG9u+yuSkREUBD2X6UrRgQGQXCovbWIOF3NWGtmuFY9yN4Lk1Lg1+12VyUi4ngKwv7q2DWEXS57axERiIyGAeMgrhHkZcHkFNi11e6qREQcTUHYX2kzDZHKp0YUPPgi1GtmLak2+VHYvsnuqkREHEtB2F9pMw2RyqlaOPQbAw1bwuGDMG0I/LDW7qpERBxJQdhfaTMNkcortBrcPwqaXAIF+TB9KHy3wu6qREQcR0HYX2kzDZHKLSQM+vwDLkqAokJ4bRisX2Z3VSIijqIg7K+0mYZI5VclGHqNgNadoKQIZv4dVn9hd1UiIo6hIOyvtJmGiG8IqgJ3PQXtksDthn+PguWf2V2ViIgjKAj7K09rhDbTEKn0AgOh++OQ2AWMG94ZC19/bHdVIiJ+T0HYX3laIzQjLOITAgLgb4Og01+t+/+dBF+8a2tJIiL+TkHYX2kdYRHf43JB135wzR3W/U+mw2f/BmPsrUtExE8pCPsrBWER3+RywQ33wg29rPsL3oQ5rykMi4icAwrC/srTI6zWCBGfdE0Pa3YYYPFs+GiqwrCIyFmmIOyvSjfU0PJpIr7rqlutvmGApR/AfydaK0uIiMhZoSDsj4qLoPCw9bVaI0R82xU3wu2DrZaJZZ/Cuy+Du8TuqkRE/MJpBeHJkyfToEEDQkNDSUhIYPny5Scc/95779G8eXNCQ0Np1aoV8+bN83reGMOwYcOoXbs2YWFhJCUlsXXrVq8xWVlZ9OjRg/DwcCIjI+nduzcHDhzwGrNu3To6duxIaGgo8fHxjBkz5g+1ZGdn079/f2rXrk1ISAhNmzb9Qz0+r7Q/2OWytnIVEd+WcD3cMQRcAZA231perURhWETkTJU7CM+ePZuUlBSGDx/OqlWraN26NcnJyezZs6fM8d988w3du3end+/erF69mq5du9K1a1c2bNjgGTNmzBgmTpzItGnTSEtLo1q1aiQnJ3P48GHPmB49erBx40YWLlzInDlzWLp0KX379vU8n5uby7XXXkv9+vVJT09n7NixjBgxgunTp3vGFBYWcs0117B9+3bef/99tmzZwowZM6hTp055fwyVW2kQDq1mLckkIr6vXRLcNdT6M73yf/DWaCgptrsqERHfZsqpffv2pn///p77JSUlJi4uzowaNarM8bfddpvp0qWL12MJCQnm/vvvN8YY43a7TWxsrBk7dqzn+ezsbBMSEmLeeecdY4wxmzZtMoBZsWKFZ8z8+fONy+Uyu3fvNsYYM2XKFBMVFWUKCgo8Y4YMGWKaNWvmuT916lTTqFEjU1hYWN6P7ZGTk2MAk5OTc9rf45zbttGYQZ2NebaH3ZWIyNm29itjBidbf8b/OdyYotP/+0xExF+dal4r13RhYWEh6enpJCUleR4LCAggKSmJ1NTUMl+TmprqNR4gOTnZM37btm1kZGR4jYmIiCAhIcEzJjU1lcjISNq1a+cZk5SUREBAAGlpaZ4xnTp1Ijg42Ot9tmzZwv79+wH45JNPSExMpH///sTExNCyZUuef/55Sk7wK8aCggJyc3O9jkpPS6eJ+K+LO0CvERBYBdZ9DTOfheJCu6sSEfFJ5QrC+/bto6SkhJiYGK/HY2JiyMjIKPM1GRkZJxxfenuyMbVq1fJ6PigoiJo1a3qNKet7HPseP/30E++//z4lJSXMmzePZ555hnHjxjFy5MjjfuZRo0YRERHhOeLj4487ttI4dGTpNO0qJ+KfWlwO9z0HVYJhYyq8PgwKC+yuSkTE5ziqgdTtdlOrVi2mT59O27Zt6datG0899RTTpk077muGDh1KTk6O59i5c2cFVnyaPDPCulBOxG81bwd9/gHBofDdSnjtaSjIt7sqERGfUq4gHB0dTWBgIJmZmV6PZ2ZmEhsbW+ZrYmNjTzi+9PZkY35/MV5xcTFZWVleY8r6Hse+R+3atWnatCmBgYGeMRdeeCEZGRkUFpb9q8WQkBDCw8O9jkrPE4Q1Iyzi15pcAn1HQUgYbF0N05+Ew4fsrkpExGeUKwgHBwfTtm1bFi1a5HnM7XazaNEiEhMTy3xNYmKi13iAhQsXesY3bNiQ2NhYrzG5ubmkpaV5xiQmJpKdnU16erpnzOLFi3G73SQkJHjGLF26lKKiIq/3adasGVFRUQBceeWV/PDDD7iPWZD++++/p3bt2l69xT7v0JEgrM00RPzfBa3ggRcgtCr8tB5efeLo/wyLiMgJlbs1IiUlhRkzZvDmm2+yefNm+vXrx8GDB+nVqxcAPXv2ZOjQoZ7xAwcOZMGCBYwbN47vvvuOESNGsHLlSgYMGACAy+Vi0KBBjBw5kk8++YT169fTs2dP4uLi6Nq1K2DN2l533XX06dOH5cuXs2zZMgYMGMDtt99OXFwcAHfccQfBwcH07t2bjRs3Mnv2bCZMmEBKSoqnln79+pGVlcXAgQP5/vvvmTt3Ls8//zz9+/c/7R9gpaSL5UScpcFF0G+sdV3A9k0w9fGj1wqIiMjxnc6SFJMmTTL16tUzwcHBpn379ubbb7/1PHfVVVeZu+++22v8u+++a5o2bWqCg4NNixYtzNy5c72ed7vd5plnnjExMTEmJCTEdO7c2WzZssVrzG+//Wa6d+9uqlevbsLDw02vXr1MXl6e15i1a9eaDh06mJCQEFOnTh0zevToP9T+zTffmISEBBMSEmIaNWpk/vGPf5ji4uJT/uw+sXzaP4dbSyt99ZHdlYhIRdq11Zin/mr9+R/b15i8bLsrEhGxxanmNZcxxtgdxn1Jbm4uERER5OTkVN5+4cmPwg9r4K4n4dI/212NiFSkX7dZM8J5+6F2A2umuEaU3VWJiFSoU81rjlo1wjHUGiHiXLUbQv9xEH4e/LodXhkMOfvsrkpEpFJSEPZHCsIizhZTDwa8BJHnw54dVhjev+fkrxMRcRgFYX+kDTVE5Pw68NDLUDMW9u2GV1Igq+yNj0REnEpB2N+43VBwZB1RzQiLOFvNWGtmODrOCsGTHoG9u+2uSkSk0lAQ9jeHD0Lp9Y8KwiISVcsKw7XiIXsvTE6BzB12VyUiUikoCPub0raI4FAIqmJvLSJSOUREW2E4tgHk/AaTB1sX0omIOJyCsL/JP2jdajZYRI5VIwr6vwhxF1hLq00eDLt/tLsqERFbKQj7m/wjM8IKwiLye9Uj4cGxEN8UDubAlEdh5/d2VyUiYhsFYX9zSEunicgJVAuHfmOg/oVWK9WUx6xtmUVEHEhB2N8cPhKEtXSaiBxPWHV44AVo1Mq6wHbaEPhpvd1ViYhUOAVhf1N6sVxYNXvrEJHKLbQq9H0emlwCBfnw6lDYusbuqkREKpSCsL/x7CqnGWEROYmQMLhvJDRvB4WHYcaT8N1Ku6sSEakwCsL+pjQIV1WPsIicguAQuPdZuOhyKCqE156Bjd/aXZWISIVQEPY3ulhORMqrSjD0Gg4Xd4CSInhjBKz72u6qRETOOQVhf+NZPk2tESJSDkFVoOfTcMnVUFIMbz4Lq5fYXZWIyDmlIOxv1BohIqcrMAjufALaJYHbDf9+Hlb+z+6qRETOGQVhf6PWCBE5EwGB0P0xSLgejBvefgHS5ttdlYjIOaEg7G/yFYRF5AwFBMJtj8CVN4ExMGscLPvU7qpERM46BWF/YswxrRHqERaRMxAQALc+DJ3+at1/fwJ8+YG9NYmInGUKwv6k8LB1kQtoRlhEzpzLBV37wZ+7Wfc/mgKLZttbk4jIWaQg7E9KZ4MDAiE41N5aRMQ/uFxw431w7V3W/Tkz4PP/2FuTiMhZoiDsT45ti3C57K1FRPyHywXX3w039LLuz58J896w2rFERHyYgrA/8awYUc3eOkTEP13TA/7S1/p64Vvw6QyFYRHxaQrC/kSbaYjIuXb1bXBLf+vrL96Fj6YqDIuIz1IQ9ifaTENEKkKnW+Bvg6yvl35grSjhdttakojI6VAQ9ifaTENEKsoVN8Ltj1r9w9/MgXdfAneJ3VWJiJSLgrA/8bRGKAiLSAVIuA56PAGuAEhbAG+PhRKFYRHxHQrC/iT/oHWrzTREpKK07Qw9n7KWbUz/H/zn+aPrmYuIVHIKwv7k0JEZ4VDNCItIBWpzFdwzDAKDYM2X8OZIKC6yuyoRkZNSEPYnulhOROzS6kq49+8QVAXWfw1vjICiQrurEhE5IQVhf1IahLV8mojY4aIEuG8kVAmBTWnw+jPW1u8iIpWUgrA/8Vwspw01RMQmzdpC339Y27xvSYcZT0NBvt1ViYiUSUHYnxw6ZotlERG7NG4D94+GkKrwwxp4dSgcPmh3VSIif6Ag7E/ytY6wiFQSjVpCvxcgtBps2wDThhz9O0pEpJJQEPYXJcVHe/EUhEWkMqh/ITz4ovVbqp+/gymPwcEcu6sSEfFQEPYXpUungXqERaTyiG8C/cdB9UjYtdUKw3n77a5KRARQEPYfpb9yDK1mLWwvIlJZxDWywnCNmvDLTzD5Ucj5ze6qREQUhP2G+oNFpDKLrQ8DXoKIaMj8GSYPhux9dlclIg6nIOwvSlsjtJmGiFRWtepaYTiqFuzdBa88AlmZdlclIg6mIOwvtJmGiPiC6DgY8DKcVxt++9UKw/t+sbsqEXEoBWF/odYIEfEVNWOsmeHz68L+PfBKCuzZZXdVIuJACsL+wrOZhoKwiPiAyPOtC+hi6kPOPisMZ/xsd1Ui4jAKwv7Cs72yWiNExEdEnAf9X4TaDSEvywrDu7baXZWIOIiCsL/wtEZoDWER8SE1oqwwHN/U2mxj8qOwfZPdVYmIQygI+wtPa4RmhEXEx1SLgH5joGFLOHwQpj4OW9fYXZWIOICCsL/QxXIi4svCqsP9o6BpW2u7+BlPwqY0u6sSET+nIOwvFIRFxNeFhMF9z0HLRCgqhH8Oh7VL7a5KRPyYgrC/8GyoodYIEfFhVYLhnuFwyZ+gpBjeHAkrFtpdlYj4KQVhf6EZYRHxF4FBcOdQaJ8Mxg1vvwDLPrW7KhHxQwrC/sDtti4wAc0Ii4h/CAiEboOhY1fr/vsTYMn7tpYkIv5HQdgfFBwCY6yvtXyaiPiLgAC4pT90vt26//E0+OzfR/++ExE5QwrC/qC0P7hKCAQF21uLiMjZ5HLBjffBDb2s+wvehE9nKAyLyFmhIOwP8rW9soj4uWt6QNd+1tdfvAv/nWS1hYmInAEFYX9QGoRDFYRFxI9ddSvc9og1S7zsE5j1IpSU2F2ViPgwBWF/4Fk6TUFYRPxcYhfo8YTVP7zic/j3P6C4yO6qRMRHKQj7g/wjK0aEacUIEXGAtp3h7mEQWMXacOONEdYGHCIi5aQg7A/yj8wIaw1hEXGKizvAfc9aG3BsSoMZT0FBvt1ViYiPURD2B4d0sZyIOFDzy6DvKGtr5q2rYdoTR6+ZEBE5BQrC/sCzq5xaI0TEYRq3hn5jrN+Ibd8IrwyGvP12VyUiPkJB2B94WiO0mYaIOFD9C2HAS1AjCn75ESYOgqxMu6sSER+gIOwPPK0RmhEWEYeKawQPjYeasbBvN0wcCJk/212ViFRyCsL+wNMaoR5hEXGw8+tYYTimHuTsg0mPwI4tdlclIpWYgrA/UBAWEbFERsOAl6FeMziYC1Megx/W2F2ViFRSCsL+wLOhhlojRESoHgH9xkLjNlBwCF4dChtS7a5KRCohBWFfZ8wxG2poRlhEBIDQqtD3eWiZaO0898ZwSF9kd1UiUskoCPu6ogIoObK9qIKwiMhRVYLhnhHQLgncbnhrNHz9sd1ViUgloiDs60pXjAgIsBaVFxGRowIDofvj0LGr9Ru0/06ChW9ZX4uI451WEJ48eTINGjQgNDSUhIQEli9ffsLx7733Hs2bNyc0NJRWrVoxb948r+eNMQwbNozatWsTFhZGUlISW7du9RqTlZVFjx49CA8PJzIykt69e3PggPcOQuvWraNjx46EhoYSHx/PmDFjjlvTrFmzcLlcdO3atXwfvrI5djMNl8veWkREKqOAALilP1x7l3V/3hvwyXSFYREpfxCePXs2KSkpDB8+nFWrVtG6dWuSk5PZs2dPmeO/+eYbunfvTu/evVm9ejVdu3ala9eubNiwwTNmzJgxTJw4kWnTppGWlka1atVITk7m8OHDnjE9evRg48aNLFy4kDlz5rB06VL69u3reT43N5drr72W+vXrk56eztixYxkxYgTTp0//Q03bt2/n0UcfpWPHjuX9+JWPZzMNtUWIiByXywXX3w1d+1n3l7wHs8eBu8TeukTEXqac2rdvb/r37++5X1JSYuLi4syoUaPKHH/bbbeZLl26eD2WkJBg7r//fmOMMW6328TGxpqxY8d6ns/OzjYhISHmnXfeMcYYs2nTJgOYFStWeMbMnz/fuFwus3v3bmOMMVOmTDFRUVGmoKDAM2bIkCGmWbNmXu9dXFxsrrjiCvPaa6+Zu+++29x8883l+vw5OTkGMDk5OeV63Tmz/htjBnU25qUH7a5ERMQ3pC0w5pFrrL87/znCmKKCk79GRHzKqea1cs0IFxYWkp6eTlJSkuexgIAAkpKSSE0te2ma1NRUr/EAycnJnvHbtm0jIyPDa0xERAQJCQmeMampqURGRtKuXTvPmKSkJAICAkhLS/OM6dSpE8HBwV7vs2XLFvbvP7rv/LPPPkutWrXo3bv3KX3mgoICcnNzvY5KRWsIi4iUT/tkuGcYBFaBdV/BjKfh8CG7qxIRG5QrCO/bt4+SkhJiYmK8Ho+JiSEjI6PM12RkZJxwfOntycbUqlXL6/mgoCBq1qzpNaas73Hse3z99de8/vrrzJgx49Q+MDBq1CgiIiI8R3x8/Cm/tkIoCIuIlN/FHaDPSAgOhe9XwZRHIW//yV8nIn7FMatG5OXlcddddzFjxgyio6NP+XVDhw4lJyfHc+zcufMcVnkaSjfTCNNmGiIi5dKsLfR/EapFwM7vYdIg+O1Xu6sSkQoUVJ7B0dHRBAYGkpmZ6fV4ZmYmsbGxZb4mNjb2hONLbzMzM6ldu7bXmDZt2njG/P5ivOLiYrKysry+T1nvU/rcjz/+yPbt27nppps8z7vdbsCaXd6yZQsXXHDBH+oPCQkhJCSkzM9WKZTOCFfVjLCISLnVaw4PT4BpQ2Dvbpg4CO4fBXGN7K5MRCpAuWaEg4ODadu2LYsWHd2dx+12s2jRIhITE8t8TWJiotd4gIULF3rGN2zYkNjYWK8xubm5pKWlecYkJiaSnZ1Nenq6Z8zixYtxu90kJCR4xixdupSioiKv92nWrBlRUVE0b96c9evXs2bNGs/xl7/8hauvvpo1a9ZUvpaHU6XWCBGRM1OrLgycALUbQu5vMOkR+HGd3VWJSAUod2tESkoKM2bM4M0332Tz5s3069ePgwcP0qtXLwB69uzJ0KFDPeMHDhzIggULGDduHN999x0jRoxg5cqVDBgwAACXy8WgQYMYOXIkn3zyCevXr6dnz57ExcV51vi98MILue666+jTpw/Lly9n2bJlDBgwgNtvv524uDgA7rjjDoKDg+nduzcbN25k9uzZTJgwgZSUFABCQ0Np2bKl1xEZGUmNGjVo2bKl10V2PkXLp4mInLmIaHjoZWjYEg4ftGaI1y+zuyoROcfK1RoB0K1bN/bu3cuwYcPIyMigTZs2LFiwwHNh2o4dOwgIOJqvr7jiCt5++22efvppnnzySZo0acJHH31Ey5YtPWMef/xxDh48SN++fcnOzqZDhw4sWLCA0NBQz5i33nqLAQMG0LlzZwICArj11luZOHGi5/mIiAg+//xz+vfvT9u2bYmOjmbYsGFeaw37pfyD1m1V9QiLiJyRsOrwwAvw75GwIRXe+DvcNgguv8HuykTkHHEZo611yiM3N5eIiAhycnIIDw+3uxwY0wd+3Wb95d2srd3ViIj4vpISeO9lSFtg3b+hFyTdod07RXzIqeY1x6wa4bfUIywicnYFBkK3wVb4BWtL5g9egSMXWIuI/1AQ9nWeVSPUGiEicta4XNDlXrilv3X/64/hP89DcaG9dYnIWaUg7MtKiqEg3/o6rJq9tYiI+KNOt8BdT0JgEKxeol3oRPyMgrAvK50NBrVGiIicK5f+Ge47Zhe6ydqFTsRfKAj7skNHgnBoVQgItLcWERF/1rwdPHhkF7pd38OEh2HPLrurEpEzpCDsy3ShnIhIxal/ZBe6mrHWVswTH4btm+yuSkTOgIKwL/NspqEL5UREKkStujBoEsQ3hYO5MOVRbbwh4sMUhH1ZaWtEVc0Ii4hUmBpR0H8cXJQARYXWxhtff2x3VSJyGhSEfZlaI0RE7BESBvc+a+06Z9zw30nw6QytNSziYxSEfZmnNUJBWESkwgUGwm2PWDvPASyeDW+N1lrDIj5EQdiXHdJmGiIitnK54Joe0P1xa/WeVYvh1Se9l7cUkUpLQdiXqTVCRKRyaH8t9H0eQqrCD2tg0iDYv8fuqkTkJBSEfZmCsIhI5dGsLTz0EoSfB79ut9Ya/uUnu6sSkRNQEPZlh470CKs1QkSkcqjTGAZNhJj6kLPPmhn+fpXdVYnIcSgI+7L8g9ZtaDV76xARkaOiYuDh8XDBxXD4ELw6FL6db3dVIlIGBWFflq8ZYRGRSqlqDbh/NFz6Z3CXwOxxMOc1La8mUskoCPuyfG2oISJSaVUJhjuHwrV3WfcXzYJ/jYTCAnvrEhEPBWFf5XYf0xqhICwiUim5XHD93XDH4xAYBGuXwpTBkLff7spEBAVh31VwyNrNCNQaISJS2V12LTzwgvX39c/fwfiHrJUlRMRWCsK+qrQtokqwdYiISOXWuDUMnATRdSArAyY+DFvS7a5KxNEUhH1VaRBWW4SIiO+oVddaXq1RK2tFielDIXWu3VWJOJaCsK86pAvlRER8UrUI6PcCtE2yrvd492X4ZLpWlBCxgYKwrypdOi1M/cEiIj4nKBh6DIHkntb9L96FN5+FwsP21iXiMArCvkpLp4mI+DaXC67raS2xFlgF1n0Nkx6B7L12VybiGArCvqq0NSJMQVhExKe17QwPjrFaJnZthZf6w/ZNdlcl4ggKwr7K0xqhICwi4vMatYKUyVC7IeRlweTBsGKh3VWJ+D0FYV9VupmG1hAWEfEPNWPh4QnQ8gooLoK3X4BPZ1hbNIvIOaEg7KsOaUZYRMTvhFaFXiMg6Q7r/uLZ8PpwOHzQ1rJE/JWCsK/KV4+wiIhfCgiALvdaF9EFVYFN38KEgbDvF7srE/E7CsK+yrNqhFojRET8UtvOMOBlCD8PMrbD+AHwwxq7qxLxKwrCvsqzakQ1e+sQEZFzp35zeGQyxDeFg7kwdQh8M8fuqkT8hoKwr9KGGiIizhAZbc0MX3K1deHce+Ph/YnWBXUickYUhH2VNtQQEXGO4BC460m44V7r/rJPYMpjkJtlb10iPk5B2BcVFhydCdDFciIizuBywTV3QO/nrNUltm2Al/pp8w2RM6Ag7ItK2yICAiCkqr21iIhIxWqZCI9MgZh6kPMbvDIYvp1nd1UiPklB2BeVbqYRVt2aIRAREWepVRcGvQIXd4CSIpj9Erw7HooL7a5MxKcoCPsibaYhIiKhVeGe4VbfsMsFqXNg8qOQs8/uykR8hoKwL9JmGiIiAkf7hvv8w/o3YfsmGPcg/LTB7spEfIKCsC/yBGEtnSYiIsCF7SFlCtRuAHlZMOVRa2UJY+yuTKRSUxD2RZ7WCG2mISIiR0THwcBJ0OYqKCm21hp+ewwUHra7MpFKS0HYF2l7ZRERKUtIGPR8Gm7qA64AWLnQ2pp5zy67KxOplBSEfZF6hEVE5HhcLvhzN3hwLNSIgl+3w0sPwpov7a5MpNJREPZFCsIiInIyjVvD4GlwwcVQcAjefA4+mKytmUWOoSDsi0p7hNUaISIiJxJxHvQbC51vt+5/9SG8kgL7M+2tS6SSUBD2RZoRFhGRUxUYCDfeB/eNtP7d+HkzjOsH362wuzIR2ykI+yIFYRERKa8Wl8PgqVC3KRzMhelPwvw3wV1id2UitlEQ9kWe1ggFYRERKYfzasPD4+HKm6w1hj//N0wdot3oxLEUhH2RNtQQEZHTVSUY/m8g3DkUgkPhhzUw9n7YvNzuykQqnIKwrykpgYJ862u1RoiIyOlq29laVaLOBXAwx2qV+PhVrSohjqIg7GtKZ4NBQVhERM5MrbrWbnQdu1r3l7wHEwfCvl9sLUukoigI+5r8I/3BIVWtK4FFRETORJVg+OsAuPfv1rKcO7+HFx+AVV/YXZnIOacg7Gs8/cHV7K1DRET8S6sr4dFXoWFLawOOf/8DZo072o4n4ocUhH3NoSNBWJtpiIjI2RZVC/qPg2vvtLZqTpsPL/eHXVvtrkzknFAQ9jWlrRHqDxYRkXMhMBCuv8fakS78PMjcAeMfgkWzteaw+B0FYV+Tf9C6VRAWEZFzqUkbeGy61TJRUgxzZsCUx7Q9s/gVBWFf49lMQ60RIiJyjlWPgF4j4PbB1prDP66DMX0hfZHdlYmcFQrCvkbbK4uISEVyuSDhemt2uP6FcPgg/GeUdTHdsUt6ivggBWFfoyAsIiJ2iI6Dh8bDdXdDQIC1vNqYPtbOdCI+SkHY1xzSxXIiImKTwEBIvgsengDRdSB7r9U3/PE0KCywuzqRclMQ9jX5Wj5NRERsVv9CeHQaXH4DGANL3ocX74ftm+yuTKRcFIR9jVojRESkMggJg24pcN9Ia5m1vbtg4iD4ZDoUFdpdncgpURD2NQrCIiJSmbS4HIa8Bu2SwLjhi3dh3APw83d2VyZyUgrCvkbLp4mISGVTtQb0eAJ6Pws1alqbcEx4GOa8BsWaHZbKS0HYlxhzzIYa1eytRURE5PdaXmHNDrftbM0OL5oFL/aDHZodlspJQdiXFByy/mIBCNOMsIiIVELVwuHOoXDv36F6JGT+bM0Of/IqFB62uzoRLwrCvuTQkf7goCoQHGJvLSIiIifS6kp44nW49Gpwu+GL9+CF+2BLut2ViXicVhCePHkyDRo0IDQ0lISEBJYvX37C8e+99x7NmzcnNDSUVq1aMW/ePK/njTEMGzaM2rVrExYWRlJSElu3bvUak5WVRY8ePQgPDycyMpLevXtz4ID3jjbr1q2jY8eOhIaGEh8fz5gxY7yenzFjBh07diQqKoqoqCiSkpJOWnulogvlRETEl1SLgLueslaWiDwfsjJg2hB4azQcyLG7OpHyB+HZs2eTkpLC8OHDWbVqFa1btyY5OZk9e/aUOf6bb76he/fu9O7dm9WrV9O1a1e6du3Khg0bPGPGjBnDxIkTmTZtGmlpaVSrVo3k5GQOHz76K5QePXqwceNGFi5cyJw5c1i6dCl9+/b1PJ+bm8u1115L/fr1SU9PZ+zYsYwYMYLp06d7xixZsoTu3bvzxRdfkJqaSnx8PNdeey27d+8u74/BHvmlm2moLUJERHxIi8thyOvQ8RZry+aV/4PR91q3xthdnTiZKaf27dub/v37e+6XlJSYuLg4M2rUqDLH33bbbaZLly5ejyUkJJj777/fGGOM2+02sbGxZuzYsZ7ns7OzTUhIiHnnnXeMMcZs2rTJAGbFihWeMfPnzzcul8vs3r3bGGPMlClTTFRUlCkoKPCMGTJkiGnWrNlxP0txcbGpUaOGefPNN0/145ucnBwDmJycnFN+zVmz9itjBnU2ZvxDFf/eIiIiZ8P2Tca8cJ/179mgzsZMG2LMvl/srkr8zKnmtXLNCBcWFpKenk5SUpLnsYCAAJKSkkhNTS3zNampqV7jAZKTkz3jt23bRkZGhteYiIgIEhISPGNSU1OJjIykXbt2njFJSUkEBASQlpbmGdOpUyeCg4O93mfLli3s37+/zNoOHTpEUVERNWvWLM+PwT5qjRAREV9X/0IYPBVuuNe65uW7lVbv8MK3tdSaVLhyBeF9+/ZRUlJCTEyM1+MxMTFkZGSU+ZqMjIwTji+9PdmYWrVqeT0fFBREzZo1vcaU9T2OfY/fGzJkCHFxcX8I6scqKCggNzfX67CNgrCIiPiDwCC45g54bAY0bg1FBTDvnzCmrxWMRSqIY1eNGD16NLNmzeLDDz8kNDT0uONGjRpFRESE54iPj6/AKn9Hm2mIiIg/qVUXHnzRWm6tRk1rm+ZXn4CZz0L2XrurEwcoVxCOjo4mMDCQzMxMr8czMzOJjY0t8zWxsbEnHF96e7Ixv78Yr7i4mKysLK8xZX2PY9+j1Isvvsjo0aP5/PPPufjii0/4mYcOHUpOTo7n2Llz5wnHn1OaERYREX/jclkbcAz9J3T6KwQEwNqlMKoXLJoNxUV2Vyh+rFxBODg4mLZt27Jo0SLPY263m0WLFpGYmFjmaxITE73GAyxcuNAzvmHDhsTGxnqNyc3NJS0tzTMmMTGR7Oxs0tOPrj24ePFi3G43CQkJnjFLly6lqKjI632aNWtGVFSU57ExY8bw3HPPsWDBAq+e4+MJCQkhPDzc67CNgrCIiPirsOpwy4OQMhUatrA235gzA8beD9+vsrs68VflvQpv1qxZJiQkxMycOdNs2rTJ9O3b10RGRpqMjAxjjDF33XWXeeKJJzzjly1bZoKCgsyLL75oNm/ebIYPH26qVKli1q9f7xkzevRoExkZaT7++GOzbt06c/PNN5uGDRua/Px8z5jrrrvOXHLJJSYtLc18/fXXpkmTJqZ79+6e57Ozs01MTIy56667zIYNG8ysWbNM1apVzauvvur1PsHBweb99983v/76q+fIy8s75c9v66oRrw61rrD9dl7Fv7eIiEhFcbuNSfvMmKdvPbq6xOvDjdm72+7KxEecal4rdxA2xphJkyaZevXqmeDgYNO+fXvz7bffep676qqrzN133+01/t133zVNmzY1wcHBpkWLFmbu3Llez7vdbvPMM8+YmJgYExISYjp37my2bNniNea3334z3bt3N9WrVzfh4eGmV69efwiwa9euNR06dDAhISGmTp06ZvTo0V7P169f3wB/OIYPH37Kn93WIDz+Yesvg7VfVfx7i4iIVLRDeca8P9GYlGusf/8GJxvz8avW4yIncKp5zWWMVrIuj9zcXCIiIsjJyan4NonR90LmDuvCgiZtKva9RURE7PLrdvh46tHtmatHwvX3QML1EBhoY2FSWZ1qXnPsqhE+6dCRHuGq6hEWEREHqd0A7h8Nff4BterBgWx4bzyMe0D9w3JGFIR9yeHSi+W0fJqIiDiMywUXJcDj0+GW/tZSor9ug6mPw/QnYfePdlcoPkhB2FcUFVoHQFg1e2sRERGxS2AQdLoFnnwTOt4CAYGwebk1O/yf0ZBV9iZaImVREPYVpZtpuAIgpKq9tYiIiNitWjj8tT888U+45E9gDKT/D56/Bz6cYrVPiJyEgrCv8LRFVLMWGxcRERE4vw70fBpSpkDTS6GkGJZ+ACN7wmf/hoJ8uyuUSkyJylcc0mYaIiIixxXfFPqNgQdegLpNoOAQLHgTRt4Fi2crEEuZguwuQE5RaWuELpQTERE5vmZtocklsPZLmPcG7PsFPp0BX7wHf+4GV94EwaF2VymVhGaEfcXhg9atlk4TERE5sYAAuORqq3/49kfhvNpWz/Anr8Jzd8KS960tnMXxFIR9hWdGWEFYRETklAQGQcJ1MPQNuH0w1Iy1AvHH06yWiSXvq2XC4RSEfUW+eoRFREROS2CQtQvdkzOh25FAnLffCsTP9YAF/4KDOXZXKTZQj7CvKA3CVdUjLCIicloCg+Dy6+Gya2DF57BoltVD/Nm/4It34fIb4E+3QlSM3ZVKBVEQ9hWe1ghtpiEiInJGAoOs0Ns+GdZ+ZQXi3T9Yy659/TFc+mfrwrraDeyuVM4xBWFfka/tlUVERM6qgEBrM442V8GWdGuZta2rYeVC67iwPXT6q7UShctld7VyDigI+4rSGWGtGiEiInJ2uVzQvJ11/PwdLJ4F65dZWzdvXg4x9aztnNslQUiY3dXKWaQg7CvyjyyfpovlREREzp36zaHXCNi7C776CJZ/Bpk74P0JMPd1q6Wiw81QU33E/kBB2Ffka0MNERGRCnN+XfjrALj+HisMf/UR/PardVHdkvehRQJc3gUuvMxqsRCfpCDsK0q3WFZrhIiISMUJqw5X3Qodu8Km5dYFdVtXw4ZU64g835olTrjO+lp8ioKwLygpsfZMB7VGiIiI2CEgEFomWkfmz5A6z1qCLXsvLHgTPvs3XJQAiV2g+WUQqFliX6Ag7AsOHzj6tYKwiIiIvWLqQ9d+0KU3rPsKUufCj+tgY6p11IiylmBrlwR1GmvFiUpMQdgXlLZFhIRZax+KiIiI/aoEQ9vO1pG5A76db80S5+2HL/9rHbUbQNtrrDGR0XZXLL+jVOULtL2yiIhI5RZTD26+H27sDd+ttALxxlT4dTvMmQFzX4PGbax1i1tdCdUjbS1XLArCvkBBWERExDcEBkGLy60j/wCs+RJW/g9+Wm9dZLd1tbUU2wWtoXUnuLiD1UohtlAQ9gWezTS0dJqIiIjPCKtuXTyX2AX2/WKF4rVLYdfWo6H4v5PgglZwcUcrPNeMtbtqR1EQ9gWeGeFq9tYhIiIipyc6DpK6W8e+X2DtV7BuKezYAj+stY4PXoHYBtbqExddDg0u0uoT55iCsC/wBGHNCIuIiPi86Djo3M06sjKsULwxFbZtgIzt1rF4tvWb4ObtrOXYmlyqi+3OAQVhX1DaGqEeYREREf9SMxau/pt1HMqzLrTb9C1sXm7dX/WFdQDUiocmbaDJJdC4NVSLsLV0f6Ag7AvyD1q3CsIiIiL+q2oNuPRq63CXwPbNsDkNvl8FO7fCnp3WsexTa23iuEbQ6GKrhaLBRRBVS2sWl5OCsC/I18VyIiIijhIQCI1aWkeX3lab5A9rYesa6yK7jO2w+0fr+OpD6zXh50GDC61QXP8iqNsYgkPt/BSVnoKwLzik5dNEREQcLay6tf5wqyut+3n7rVC8fSNs32QF4tzfYN3X1gHgCoDz60LdCyCuMdS5wDq0XJuHgrAvOKwgLCIiIseoEXW0jQKg8DDs/N4Kxds3wc+brbC8Z4d1lPYZg7WZR61464ipZ4XlmHpQM8aaiXYQBWFfUDojXFVBWERERMoQHAoXXGwdpXKzYPcPR44jbRT7dsOBbOv4ab339wgIhMjzrUAcdeSoWQsia0F4TStAVwv3q7CsIFyZFR6GbRut/2hBy6eJiIjIqQuvCeHt4cL2Rx8ryD960d2enZC5A/buso6iQms5t6yM439Pl8taraJGJFSPsvY4CKkKoVWPuQ2zgnlgEARVgcAqEBRkBeuYeuf8Y5eHgnBllpsF04Ycva8ZYRERETkTIWEQ39Q6juV2Q84+2L8H9mdaR9aRr7P3Wm0Wh3LBmKMzymwv33tfeRP838Cz8znOEgXhyiwwCGo3tL5u2MK6GlRERETkbAsIsJZfi6oFtCx7TEmJFYbz9lvHgWw4fBAOH4KCQ0du863HigqhuAhKiqGkCIqLrRaLSkZBuDKLqgWPz7C7ChERERFru+caUX616kSA3QWIiIiIiNhBQVhEREREHElBWEREREQcSUFYRERERBxJQVhEREREHElBWEREREQcSUFYRERERBxJQVhEREREHElBWEREREQcSUFYRERERBxJQVhEREREHElBWEREREQcSUFYRERERBxJQVhEREREHCnI7gJ8jTEGgNzcXJsrEREREZGylOa00tx2PArC5ZSXlwdAfHy8zZWIiIiIyInk5eURERFx3Odd5mRRWby43W5++eUXatSogcvlOqfvlZubS3x8PDt37iQ8PPycvpecOzqP/kHn0T/oPPoHnUf/cC7PozGGvLw84uLiCAg4fiewZoTLKSAggLp161boe4aHh+sPuh/QefQPOo/+QefRP+g8+odzdR5PNBNcShfLiYiIiIgjKQiLiIiIiCMpCFdiISEhDB8+nJCQELtLkTOg8+gfdB79g86jf9B59A+V4TzqYjkRERERcSTNCIuIiIiIIykIi4iIiIgjKQiLiIiIiCMpCIuIiIiIIykIV2KTJ0+mQYMGhIaGkpCQwPLly+0uybGWLl3KTTfdRFxcHC6Xi48++sjreWMMw4YNo3bt2oSFhZGUlMTWrVu9xmRlZdGjRw/Cw8OJjIykd+/eHDhwwGvMunXr6NixI6GhocTHxzNmzJhz/dEcZdSoUVx22WXUqFGDWrVq0bVrV7Zs2eI15vDhw/Tv35/zzjuP6tWrc+utt5KZmek1ZseOHXTp0oWqVatSq1YtHnvsMYqLi73GLFmyhEsvvZSQkBAaN27MzJkzz/XHc4ypU6dy8cUXexbhT0xMZP78+Z7ndQ59z+jRo3G5XAwaNMjzmM6jbxgxYgQul8vraN68uef5Sn8ejVRKs2bNMsHBweaf//yn2bhxo+nTp4+JjIw0mZmZdpfmSPPmzTNPPfWU+eCDDwxgPvzwQ6/nR48ebSIiIsxHH31k1q5da/7yl7+Yhg0bmvz8fM+Y6667zrRu3dp8++235quvvjKNGzc23bt39zyfk5NjYmJiTI8ePcyGDRvMO++8Y8LCwsyrr75aUR/T7yUnJ5s33njDbNiwwaxZs8bccMMNpl69eubAgQOeMQ888ICJj483ixYtMitXrjSXX365ueKKKzzPFxcXm5YtW5qkpCSzevVqM2/ePBMdHW2GDh3qGfPTTz+ZqlWrmpSUFLNp0yYzadIkExgYaBYsWFChn9dfffLJJ2bu3Lnm+++/N1u2bDFPPvmkqVKlitmwYYMxRufQ1yxfvtw0aNDAXHzxxWbgwIGex3UefcPw4cNNixYtzK+//uo59u7d63m+sp9HBeFKqn379qZ///6e+yUlJSYuLs6MGjXKxqrEGPOHIOx2u01sbKwZO3as57Hs7GwTEhJi3nnnHWOMMZs2bTKAWbFihWfM/PnzjcvlMrt37zbGGDNlyhQTFRVlCgoKPGOGDBlimjVrdo4/kXPt2bPHAObLL780xljnrUqVKua9997zjNm8ebMBTGpqqjHG+p+igIAAk5GR4RkzdepUEx4e7jl3jz/+uGnRooXXe3Xr1s0kJyef64/kWFFRUea1117TOfQxeXl5pkmTJmbhwoXmqquu8gRhnUffMXz4cNO6desyn/OF86jWiEqosLCQ9PR0kpKSPI8FBASQlJREamqqjZVJWbZt20ZGRobX+YqIiCAhIcFzvlJTU4mMjKRdu3aeMUlJSQQEBJCWluYZ06lTJ4KDgz1jkpOT2bJlC/v376+gT+MsOTk5ANSsWROA9PR0ioqKvM5l8+bNqVevnte5bNWqFTExMZ4xycnJ5ObmsnHjRs+YY79H6Rj9+T37SkpKmDVrFgcPHiQxMVHn0Mf079+fLl26/OFnrfPoW7Zu3UpcXByNGjWiR48e7NixA/CN86ggXAnt27ePkpISr/8oAGJiYsjIyLCpKjme0nNyovOVkZFBrVq1vJ4PCgqiZs2aXmPK+h7HvoecPW63m0GDBnHllVfSsmVLwPo5BwcHExkZ6TX29+fyZOfpeGNyc3PJz88/Fx/HcdavX0/16tUJCQnhgQce4MMPP+Siiy7SOfQhs2bNYtWqVYwaNeoPz+k8+o6EhARmzpzJggULmDp1Ktu2baNjx47k5eX5xHkMOqNXi4j4qP79+7Nhwwa+/vpru0uR09CsWTPWrFlDTk4O77//PnfffTdffvml3WXJKdq5cycDBw5k4cKFhIaG2l2OnIHrr7/e8/XFF19MQkIC9evX59133yUsLMzGyk6NZoQroejoaAIDA/9wVWVmZiaxsbE2VSXHU3pOTnS+YmNj2bNnj9fzxcXFZGVleY0p63sc+x5ydgwYMIA5c+bwxRdfULduXc/jsbGxFBYWkp2d7TX+9+fyZOfpeGPCw8N94h8GXxAcHEzjxo1p27Yto0aNonXr1kyYMEHn0Eekp6ezZ88eLr30UoKCgggKCuLLL79k4sSJBAUFERMTo/PooyIjI2natCk//PCDT/x5VBCuhIKDg2nbti2LFi3yPOZ2u1m0aBGJiYk2ViZladiwIbGxsV7nKzc3l7S0NM/5SkxMJDs7m/T0dM+YxYsX43a7SUhI8IxZunQpRUVFnjELFy6kWbNmREVFVdCn8W/GGAYMGMCHH37I4sWLadiwodfzbdu2pUqVKl7ncsuWLezYscPrXK5fv97rf2wWLlxIeHg4F110kWfMsd+jdIz+/J47brebgoICnUMf0blzZ9avX8+aNWs8R7t27ejRo4fna51H33TgwAF+/PFHateu7Rt/Hs/4cjs5J2bNmmVCQkLMzJkzzaZNm0zfvn1NZGSk11WVUnHy8vLM6tWrzerVqw1gXnrpJbN69Wrz888/G2Os5dMiIyPNxx9/bNatW2duvvnmMpdPu+SSS0xaWpr5+uuvTZMmTbyWT8vOzjYxMTHmrrvuMhs2bDCzZs0yVatW1fJpZ1G/fv1MRESEWbJkiddSP4cOHfKMeeCBB0y9evXM4sWLzcqVK01iYqJJTEz0PF+61M+1115r1qxZYxYsWGDOP//8Mpf6eeyxx8zmzZvN5MmTtWTTWfTEE0+YL7/80mzbts2sW7fOPPHEE8blcpnPP//cGKNz6KuOXTXCGJ1HXzF48GCzZMkSs23bNrNs2TKTlJRkoqOjzZ49e4wxlf88KghXYpMmTTL16tUzwcHBpn379ubbb7+1uyTH+uKLLwzwh+Puu+82xlhLqD3zzDMmJibGhISEmM6dO5stW7Z4fY/ffvvNdO/e3VSvXt2Eh4ebXr16mby8PK8xa9euNR06dDAhISGmTp06ZvTo0RX1ER2hrHMImDfeeMMzJj8/3zz44IMmKirKVK1a1dxyyy3m119/9fo+27dvN9dff70JCwsz0dHRZvDgwaaoqMhrzBdffGHatGljgoODTaNGjbzeQ87Mvffea+rXr2+Cg4PN+eefbzp37uwJwcboHPqq3wdhnUff0K1bN1O7dm0THBxs6tSpY7p162Z++OEHz/OV/Ty6jDHmzOeVRURERER8i3qERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkf4ftmKsAZ2XwyAAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArYAAAHDCAYAAADRBFkDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABFkklEQVR4nO3deXxU1f3/8fdkmyyQhM2EJRAQBNkVSMQiYE0NigtqERQEEaX6BVuLpYqtUG1rrG0prSLU/iLUuoAsIgoiEAFFURQEjGBcWAWSEJYkBEhC5vz+uMkkQ8KSQHJnJq/n43Efc3LmzJ3PcCu8e3LuGYcxxggAAADwcQF2FwAAAABcDARbAAAA+AWCLQAAAPwCwRYAAAB+gWALAAAAv0CwBQAAgF8g2AIAAMAvEGwBAADgFwi2AAAA8AsEWwAAAPgFgi0AnIddu3bJ4XBozpw5trz/nDlz5HA4tGvXLlveHwB8AcEWALzIM888o8WLF9tdhu644w7deOONdpcBANVCsAUAL3KmYHvPPffoxIkTatOmTa3XUFxcrJUrV2rw4MG1/l4AcDEF2V0AAODcAgMDFRgYWCfv9dFHHyk/P59gC8DnMGMLwCft27dP9913n2JiYuR0OtWlSxe9/PLLkqSsrCwFBQXpqaeeqvS6jIwMORwOvfDCC5Kkw4cP6ze/+Y26deumBg0aKDIyUjfccIO2bNlyzhoGDhyogQMHVuq/9957FR8f79H3t7/9TVdffbWaNGmisLAw9erVSwsWLPAY43A4VFBQoP/+979yOBxyOBy69957JZ15je2LL76oLl26yOl0qkWLFho/fryOHj1aqc6uXbtq27ZtuvbaaxUeHq6WLVvqueeeq/JzLV26VJ07d3Z/hq1bt+ree+9Vu3btFBoaqtjYWN133306dOhQpdfu27dPY8eOVYsWLeR0OtW2bVs99NBDKioqco85evSofv3rXys+Pl5Op1OtWrXSqFGjlJOTU2U9AHC+mLEF4HOysrJ01VVXyeFwaMKECWrWrJnee+89jR07Vnl5eXrkkUc0YMAAvfnmm5o6darHa+fNm6fAwEANHTpUkrRjxw4tXrxYQ4cOVdu2bZWVlaV///vfGjBggLZt26YWLVpclJr/+c9/6pZbbtGIESNUVFSkuXPnaujQoXr33XfdM6P/+9//dP/99yshIUHjxo2TJF166aVnPOcf/vAHPfXUU0pKStJDDz2kjIwMzZw5U59//rk+/vhjBQcHu8ceOXJEgwYN0u23364777xTCxYs0GOPPaZu3brphhtu8DjvsmXLdNNNN7l/XrlypXbs2KExY8YoNjZWX3/9tV566SV9/fXX+vTTT+VwOCRJ+/fvV0JCgo4ePapx48apU6dO2rdvnxYsWKDjx48rJCREx44d0zXXXKPt27frvvvu05VXXqmcnBwtWbJEP/74o5o2bXpR/rwB1FMGAHzM2LFjTfPmzU1OTo5H//Dhw01UVJQ5fvy4+fe//20kma+++spjTOfOnc1Pf/pT988nT540JSUlHmN27txpnE6nefrppz36JJnZs2e7+wYMGGAGDBhQqb7Ro0ebNm3aePQdP37c4+eioiLTtWtXj1qMMSYiIsKMHj260jlnz55tJJmdO3caY4zJzs42ISEh5vrrr/eo/4UXXjCSzMsvv+xRpyTzyiuvuPsKCwtNbGysueOOOzzeZ8eOHUaSWb169RlrN8aYN954w0gyH374obtv1KhRJiAgwHz++eeVxrtcLmOMMVOmTDGSzKJFi844BgBqiqUIAHyKMUYLFy7UzTffLGOMcnJy3EdycrJyc3O1adMm3X777QoKCtK8efPcr01PT9e2bds0bNgwd5/T6VRAgPVXYUlJiQ4dOqQGDRqoY8eO2rRp00WrOywszN0+cuSIcnNzdc0119T4PVatWqWioiI98sgj7vol6YEHHlBkZKSWLl3qMb5BgwYaOXKk++eQkBAlJCRox44dHuOWLl2qqKgo9evXr8raT548qZycHF111VWS5K7f5XJp8eLFuvnmm9W7d+9K9ZbN6i5cuFA9evTQbbfddsYxAFBTBFsAPuXgwYM6evSoXnrpJTVr1szjGDNmjCQpOztbTZs21XXXXac333zT/dp58+YpKChIt99+u7vP5XLpH//4hzp06CCn06mmTZuqWbNm2rp1q3Jzcy9a3e+++66uuuoqhYaGqnHjxmrWrJlmzpxZ4/fYvXu3JKljx44e/SEhIWrXrp37+TKtWrWqFBwbNWqkI0eOePQtXbpU119/vYKCyleqHT58WL/61a8UExOjsLAwNWvWTG3btpUkd/0HDx5UXl6eunbteta6f/jhh3OOAYCaYo0tAJ/icrkkSSNHjtTo0aOrHNO9e3dJ0vDhwzVmzBht3rxZPXv21JtvvqnrrrvOYx3nM888oyeffFL33Xef/vjHP6px48YKCAjQI4884n6vM3E4HDLGVOovKSnx+Pmjjz7SLbfcov79++vFF19U8+bNFRwcrNmzZ+v111+v1uevqTPtqFCx/uPHj2vNmjWaOXOmx5g777xTn3zyiSZNmqSePXuqQYMGcrlcGjRo0Dn/jACgLhFsAfiUZs2aqWHDhiopKVFSUtJZxw4ZMkS/+MUv3MsRvv32W02ePNljzIIFC3TttdcqNTXVo//o0aPnvJGpUaNGlX6VL6nSbOnChQsVGhqq999/X06n090/e/bsSq8931/Hl+1nm5GRoXbt2rn7i4qKtHPnznP+2VTlgw8+UGFhocfNZEeOHFFaWpqeeuopTZkyxd3/3Xffeby2WbNmioyMVHp6+lnf49JLLz3nGACoKZYiAPApgYGBuuOOO7Rw4cIqA9LBgwfd7ejoaCUnJ+vNN9/U3LlzFRISoiFDhlQ63+mzrvPnz9e+ffvOWcull16qb775xuM9t2zZoo8//rjSezgcDo+Z3F27dlX5RQwRERGVtuuqSlJSkkJCQvSvf/3Lo/7U1FTl5ubWaA/aZcuWqXfv3oqJifGoXVKlP6Pp06d7/BwQEKAhQ4bonXfe0RdffFHp3GWvv+OOO7Rlyxa99dZbZxwDADXFjC0An/Pss89q9erVSkxM1AMPPKDOnTvr8OHD2rRpk1atWqXDhw+7xw4bNkwjR47Uiy++qOTkZEVHR3uc66abbtLTTz+tMWPG6Oqrr9ZXX32l1157zWMW9Ezuu+8+TZs2TcnJyRo7dqyys7M1a9YsdenSRXl5ee5xgwcP1rRp0zRo0CDdfffdys7O1owZM9S+fXtt3brV45y9evXSqlWrNG3aNLVo0UJt27ZVYmJipfdu1qyZJk+erKeeekqDBg3SLbfcooyMDL344ovq06ePx41i52vZsmXudcplIiMj1b9/fz333HMqLi5Wy5YttWLFCu3cubPS65955hmtWLFCAwYM0Lhx43T55ZfrwIEDmj9/vtatW6fo6GhNmjRJCxYs0NChQ3XfffepV69eOnz4sJYsWaJZs2apR48e1a4bANxs248BAC5AVlaWGT9+vImLizPBwcEmNjbWXHfddeall17yGJeXl2fCwsKMJPPqq69WOs/JkyfNo48+apo3b27CwsLMT37yE7N+/fpKW3lVtd2XMca8+uqrpl27diYkJMT07NnTvP/++1Vu95Wammo6dOhgnE6n6dSpk5k9e7aZOnWqOf2v4W+++cb079/fXXPZ1l+nb/dV5oUXXjCdOnUywcHBJiYmxjz00EPmyJEjHmMGDBhgunTpUumzV6wzPT3dSDIbNmyoNO7HH380t912m4mOjjZRUVFm6NChZv/+/UaSmTp1qsfY3bt3m1GjRplmzZoZp9Np2rVrZ8aPH28KCwvdYw4dOmQmTJhgWrZsaUJCQkyrVq3M6NGjK23fBgDV5TCG3/0AQH333HPPadq0aTpw4ADbbgHwWayxBQAoPj5e//jHPwi1AHwaM7YAAADwC8zYAgAAwC8QbAEAAOAXCLYAAADwC36xj63L5dL+/fvVsGFDbnwAAADwQsYY5efnq0WLFgoIqJ25Vb8Itvv371dcXJzdZQAAAOAc9u7dq1atWtXKuf0i2DZs2FCS9QcVGRlpczUAAAA4XV5enuLi4ty5rTb4RbAtW34QGRlJsAUAAPBitblslJvHAAAA4BcItgAAAPALBFsAAAD4BYItAAAA/ALBFgAAAH6BYAsAAAC/QLAFAACAXyDYAgAAwC8QbAEAAOAXCLYAAADwCwRbAAAA+AWCLQAAAPwCwRYAAAB+IcjuAgAAAFDLSkqkohNS0UnrKKzQbtleatjI7govCoItAAD+rKREOlUkFRed5bG4/OeyPmOs1zsc5YccVfys0seACu1zjHE4pIAgyRlmHaHhpe1wKSRUCrDpF8oul3SyQCrIk47nS8fzytsFedbPZX0F+VZQDAySgoJLj5Dy9hn7gyv0n0dfyamqw+jpP3u0y8acKP+5pPjMn3vMVKn7NXX351yLCLYAAHijU0VS3hEp/7CUd1jKPyLlHbL6judVDqiniqTi4tLHwvJ+l8vuT1I9DocVbp3hFUJvuBQaVt7nDPcMw6eH47J2UWGFcFohoFYVXI/nScePScbH/ryqKyDA+vMNCSt9DJWCnXZXddEQbAEAqCvGWEGqLKzmHbba+Ucq/3w8/+K/f2CQNXsYXHoEnf4YbD0GBlsByJjyQ0YyKn10WW13f4Uxrgp9UunYsnaF17pOWbOMhSesWdLCk+Vjy/rtEhIqRURK4ZFSeEOrHVHaDo8sf84ZWjojXvp/KE4VWzOsp4rP3efRX1VfaX9QcHkADQmz3jPktMMZduY+j/Fh1vkcDvv+bGsZwRYAgJowxvo178kC6eTx8scTBdKxI1WH1fwjVnA5X4FB1trHyCZSZCOpYWPriIgsnWkLKf9VtzugOj1DasXQGhBYe38eF6rsz7PwhFR4XDpZ+uj+uUK78MS5fy46aQX0iEgpojSQhp/erhBc3aG1ofVnBp9EsAUA1C/GWL+qP1FQGphOC6bu9un9VYyr6a+tIyJLQ2ojKbKxdTQseywNsg0bWWHLj2fXPDgc5Wtu1fjCz+cqKV3TW0/+/CCJYAsAsEPZ7NyJAunEsfLjZIEVOt2/kj1l3fRS8Ve0JcXl/RV/bVtyqvTnotLnK7y24vjiwou77tQRIIVFlK/tDA2XGjSqMMN6+oxrI2v2FLXLm2enUWsItgDgK4yxZhjzSn+lXbZO89gRq+/YUeuO82Bn+RHiLP/VdHDpr649+k9rl40NCT33WrziIs9QWukoDa3H8632ydP6q/Mr+drgcFhhNCyi9GajiArtcCk0wjOwutun9YeEMisIeAmCLQDYrehk+frLsrvf8w+XhtWKfUes2ca64nBYaw0rhmNHQGkwzbdmQi9UQIAU1kAKa1gaKiOsoBgYJAWV3ugUGOS5dVJgUOlWSEHlWyKVPe/uDzpDf+l61LAIAinghwi2AFCbThVL2XulzN3Sof0Vtm2qMNNaeLx653SGlf9Ku2yNZsNGUoNo6/nisu2eCsu3fSo6Wd5fVKG/uIp+V4l1nrK1qGcL0w6HFUxDI0oDatkRYa0PPf258Aae4wiXAC4igi0AXAwlJVLOPilzl3Rgl/WYuUs6uK88KJ5NcIhnWG3YuHRNZqPym4oaRFs/O8Nq9aO416FWCsKla1MrhldnuH2b6QPAaQi2AFAdLpd0OLNygM3ae+Zv9gmNkGLjpUtald/tXjbLWhZgneHeM3NZ9qv/0Ai7KwGAaiHYAkBVjJGOZltLCA7sLH3cJWXvsX6tX5WQUCm2jRViY+Ol5qWPUU29J7QCgB8j2ALAiWPS3m9LA+wuK8Bm7bb2Ka1KULAU07o8wJaF2EYx/FoeAGxEsAVQv5SckvbvlPZsl3Z/Yx3Ze6oeGxAoXRJnzcI2b1s6G9tWatJcCmSPTADwNgRbAP7LGGs97J5vykPsvu+sm6JO1zhWatm+NMTGWwG2WUs20gcAH0KwBeA/ThyzwmtZkN3zjfWlBacLayC17ii17iS1udxqN2xU5+UCAC4ugi0A33SqWNq/wzPEZu+tPC4wSGrRrjTAdpLadJKatmQtLAD4IYItAO9njHTogGeI/fG7qr/5qklzK7y2vtx6bNne2iMWAOD3CLYAvNupYum/f5TSP6n8XHjD0iUFpSG2dSepQVTd1wgA8AoEWwDeyxhp3jQr1AYESq06WEG2zeXlSwrYHxYAUIpgC8B7vTdb+mKltR527NNS50S7KwIAeDHungDgnT55V1r5utUe+mtCLQDgnAi2ALxP+nppwb+sdvI90lU32FsPAMAnEGwBeJfd26VX/iQZl5Q4SEoeZXdFAAAfQbAF4D0O7pP+83upuFDq1Eca+gg3hwEAzhvBFoB3yD8ivTRZKsi1dj+4d4r15QoAAJwngi0A+xWekP7f76Wc/VLjWOmBP0vOMLurAgD4GIItAHuVlEiv/FnakyFFREq/SJEiG9tdFQDABxFsAdjHGGnhv6Rtn1pfezv2j9IlcXZXBQDwUQRbAPZZ9bq0fql1g9jIJ6S2XeyuCADgwwi2AOyxYYW0bLbVvn2C1L2fvfUAAHxejYLtjBkzFB8fr9DQUCUmJmrDhg1nHT99+nR17NhRYWFhiouL069//WudPHnygs4JwId984U07+9W+6fDpH632lsPAMAvVDvYzps3TxMnTtTUqVO1adMm9ejRQ8nJycrOzq5y/Ouvv67HH39cU6dO1fbt25Wamqp58+bpiSeeqPE5AfiwH7+T5jwluUqkXtdJg8faXREAwE84jDGmOi9ITExUnz599MILL0iSXC6X4uLi9PDDD+vxxx+vNH7ChAnavn270tLS3H2PPvqoPvvsM61bt65G5zxdXl6eoqKilJubq8jIyOp8HAB16XCWNP1hKf+w1OEKadwzUlCw3VUBAOpAXeS1as3YFhUVaePGjUpKSio/QUCAkpKStH79+ipfc/XVV2vjxo3upQU7duzQsmXLdOONN9b4nAB8UEGe9O/JVqht3lYaM5VQCwC4qKr1tT45OTkqKSlRTEyMR39MTIy++eabKl9z9913KycnR/369ZMxRqdOndKDDz7oXopQk3MWFhaqsLDQ/XNeXl51PgaAulZcJKVOkbL3SNHNrJnasAZ2VwUA8DO1vivCmjVr9Mwzz+jFF1/Upk2btGjRIi1dulR//OMfa3zOlJQURUVFuY+4OPa9BLyWyyW9miLtTJdCI6xQG93M7qoAAH6oWjO2TZs2VWBgoLKysjz6s7KyFBsbW+VrnnzySd1zzz26//77JUndunVTQUGBxo0bp9/97nc1OufkyZM1ceJE9895eXmEW8AbGSO9PUva+pEUGCyNfcpahgAAQC2o1oxtSEiIevXq5XEjmMvlUlpamvr27Vvla44fP66AAM+3CQwMlCQZY2p0TqfTqcjISI8DgBdau1D6cJHVvvu3UvuetpYDAPBv1ZqxlaSJEydq9OjR6t27txISEjR9+nQVFBRozJgxkqRRo0apZcuWSklJkSTdfPPNmjZtmq644golJibq+++/15NPPqmbb77ZHXDPdU4APujL1dZsrSTdMk668lp76wEA+L1qB9thw4bp4MGDmjJlijIzM9WzZ08tX77cffPXnj17PGZof//738vhcOj3v/+99u3bp2bNmunmm2/Wn//85/M+JwAf8/0W6bXnrPY1t0kDh9pbDwCgXqj2PrbeiH1sAS9yYJf0/CPSiWPW1+SOflIKCLS7KgCAzbxuH1sAOKujOdJLk61Q27aLNGIyoRYAUGcItgAujhPHpP88IR09KF3SWhr7RynEaXdVAIB6hGAL4MKdKpZmPyXt3yE1bCz94hkpgmVBAIC6RbAFcGGMkeb+XfruS8kZJo37s9S46j2oAQCoTQRbADVnjLR4prRxlbWW9t4pUqsOdlcFAKinCLYAam75K+VfwDD8UalTH3vrAQDUawRbADWz+k1pxf+s9u0TpD7X21sPAKDeI9gCqL5P3pWWvGS1B98nXTPE1nIAAJAItgCqa2OatOCfVjvpLinpbnvrAQCgFMEWwPn76mPp9b9YN431u1W68T67KwIAwI1gC+D8ZGyU/vsnyeWSev9Mum285HDYXRUAAG4EWwDntvNr6eWpUkmx1L2fNPw3UgB/fQAAvAv/MgE4u73fSS89IRWdtLbzuucJKTDQ7qoAAKiEYAvgzLJ2S/9+XDpZILXrJo2ZKgWF2F0VAABVItgCqNqhA9LM30oFuVLcZdIDf5JCQu2uCgCAMyLYAqjsaI704iQp95AUGy/9IkUKjbC7KgAAzopgC8DTsaPSrN9KhzOlpi2kB/8iRUTZXRUAAOdEsAVQ7sQxadbjUtYeKbqZ9NBzUlQTu6sCAOC8EGwBWApPSP/5nbTve6lBtBVqG8faXRUAAOeNYAtAOlUkvfwHa7/asAbW8oNL4uyuCgCAaiHYAvVdSYn0yp+lbzdaux6Me0ZqeandVQEAUG0EW6A+c7mkN/4qffWxFBQs3f9HKb6z3VUBAFAjBFugvjJGWvgvaeMqKSBQGj1F6nCF3VUBAFBjBFugPjJGeuc/0ifvSg6HNOJxqWtfu6sCAOCCEGyB+mjV69LqN632nb+WrrzW3noAALgICLZAfbN2kbRsttUe8pB01Y321gMAwEVCsAXqk8/ekxa/aLUHjZYG3GFvPQAAXEQEW6C++HKNNO8fVnvgUOn6kbaWAwDAxUawBeqDbZ9Jr6ZIxmUtPbhlnHXTGAAAfoRgC/i77zdLc56SXCXSlT+Vhv6KUAsA8EsEW8Cf5R6S5jwtFRdZ23nd/Vtrz1oAAPwQwRbwV8ZI8/4uFeRJLdtLo56UAoPsrgoAgFpDsAX81cdLpO0bpOAQaeRk6xEAAD9GsAX8UfZeaclLVvumB6TYNvbWAwBAHSDYAv6m5JS1A0JxoXTZlVK/W+2uCACAOkGwBfzNitekvd9K4Q2lu34rBfCfOQCgfuBfPMCf7NomrXrNav/8l1J0U3vrAQCgDhFsAX9ReEJ67S+SyyX1uk664lq7KwIAoE4RbAF/8fYsKWefFN1MuuNhu6sBAKDOEWwBf5C+Xlq/1PpGsbsfk8Ia2F0RAAB1jmAL+Lr8I9YXMUjSgJ9LHXraWg4AAHYh2AK+zBjpzWnSsaNS87bSjWPsrggAANsQbAFf9tlyaxlCYDDfLgYAqPcItoCvytkvvTXDat94r9Sina3lAABgN4It4ItKSqTXnpWKTkqXdpcG/tzuigAAsB3BFvBFH8y1vowhNNzaBSEg0O6KAACwHcEW8DV7MqTlr1jt2x+WGsfYWw8AAF6CYAv4kqKTpd8uViL16C/1TrK7IgAAvAbBFvAl7/xHyt4jRTaRhv7K+kIGAAAgiWAL+I5vPpfWvW217/qNFBFlbz0AAHgZgi3gCwpypTf+ZrX73Sp16mNvPQAAeCGCLeDtjJHm/1PKOyRd0lq6+QG7KwIAwCsRbAFv98UqacuH1pZeIx+XQkLtrggAAK9EsAW82eEsadHzVnvQKCnuMnvrAQDAixFsAW/lKpFe/4t08rgU31n66XC7KwIAwKsRbAFvtWaB9MNWa+nBiMelQL5dDACAsyHYAt5o3w/SstlW+7b/k5q2sLceAAB8AMEW8DbFRdJrKVLJKalrXynxBrsrAgDAJ9Qo2M6YMUPx8fEKDQ1VYmKiNmzYcMaxAwcOlMPhqHQMHjzYPebYsWOaMGGCWrVqpbCwMHXu3FmzZs2qSWmA71v2snRgl9QgWrpzIt8uBgDAeap2sJ03b54mTpyoqVOnatOmTerRo4eSk5OVnZ1d5fhFixbpwIED7iM9PV2BgYEaOnSoe8zEiRO1fPlyvfrqq9q+fbseeeQRTZgwQUuWLKn5JwN80XdfWmtrJWn4b6SGjeytBwAAH1LtYDtt2jQ98MADGjNmjHtmNTw8XC+//HKV4xs3bqzY2Fj3sXLlSoWHh3sE208++USjR4/WwIEDFR8fr3HjxqlHjx5nnQkG/M6JY9Lrz1ntvoOlLlfZWw8AAD6mWsG2qKhIGzduVFJSUvkJAgKUlJSk9evXn9c5UlNTNXz4cEVERLj7rr76ai1ZskT79u2TMUarV6/Wt99+q+uvv77KcxQWFiovL8/jAHzewuelowelpi2lWx+0uxoAAHxOtYJtTk6OSkpKFBMT49EfExOjzMzMc75+w4YNSk9P1/333+/R//zzz6tz585q1aqVQkJCNGjQIM2YMUP9+/ev8jwpKSmKiopyH3FxcdX5GID3+XK1tDFNcgRIIx6TnGF2VwQAgM+p010RUlNT1a1bNyUkJHj0P//88/r000+1ZMkSbdy4UX//+981fvx4rVq1qsrzTJ48Wbm5ue5j7969dVE+UDuO5kjz/2m1f3a39WUMAACg2oKqM7hp06YKDAxUVlaWR39WVpZiY2PP+tqCggLNnTtXTz/9tEf/iRMn9MQTT+itt95y75TQvXt3bd68WX/72988lj2UcTqdcjqd1Skd8E4ul/TGc9b62rjLpOtH2l0RAAA+q1oztiEhIerVq5fS0tLcfS6XS2lpaerbt+9ZXzt//nwVFhZq5EjPf7iLi4tVXFysgADPUgIDA+VyuapTHuB71i6Qvt0kBTulkZOlwGr9f00AAFBBtf8VnThxokaPHq3evXsrISFB06dPV0FBgcaMGSNJGjVqlFq2bKmUlBSP16WmpmrIkCFq0qSJR39kZKQGDBigSZMmKSwsTG3atNHatWv1yiuvaNq0aRfw0QAvt2ub9G6q1b71QekS1ooDAHAhqh1shw0bpoMHD2rKlCnKzMxUz549tXz5cvcNZXv27Kk0+5qRkaF169ZpxYoVVZ5z7ty5mjx5skaMGKHDhw+rTZs2+vOf/6wHH+TOcPipgjzplT9JrhLpioHS1TfZXREAAD7PYYwxdhdxofLy8hQVFaXc3FxFRkbaXQ5wdsZIqVOkr9dbW3s9+qIUGnHu1wEA4MPqIq/V6a4IACStXWiF2sBgafTvCbUAAFwkBFugLu3eLr3zH6s95EGpVQd76wEAwI8QbIG6cjxf+m/putoe/aWf3GJ3RQAA+BWCLVAXjJHe+Kt0JEtq0lwaNlFyOOyuCgAAv0KwBerCh4uk9E9K19U+KYU1sLsiAAD8DsEWqG27v/FcVxt3mb31AADgpwi2QG06nm/tV1tyinW1AADUMoItUFuMkeb+TTqcybpaAADqAMEWqC0fLZa++lgKDJJG/Z51tQAA1DKCLVAb9nwjLfm31b7lF1LrjvbWAwBAPUCwBS62E8es/WpLTknd+0nXDLG7IgAA6gWCLXAxVVxX2zhWGv4b1tUCAFBHCLbAxbTubWnrOmtd7WjW1QIAUJcItsDFsvdb6e3SdbU3j5Nad7K3HgAA6hmCLXAxnDgm/fePUkmx1O0nUv/b7K4IAIB6h2ALXChjpHnTpEMHWFcLAICNCLbAhfp4ibTlw/L9asMb2l0RAAD1EsEWuBB7v5MWz7LaN90vtWFdLQAAdiHYAjV1skB6pXRdbde+0oA77K4IAIB6jWAL1ETZutqc/VKjS6Thk1hXCwCAzQi2QE188o60ea0UEGitq42ItLsiAADqPYItUF0/fictnmm1b7pfiu9sbz0AAEASwRaonpMF0n//JJ0qlrr0lQb+3O6KAABAKYItcL6Mkd78h5SzT4puJt3FuloAALwJwRY4X+uXSl+usdbVjn6SdbUAAHgZgi1wPvb9IL01w2rfNJZ1tQAAeCGCLXAuJ49L/33aWlfbOVEawLpaAAC8EcEWOBtjpPn/kA6Wrqu9+7dSAP/ZAADgjfgXGjibT5dJm1ZbYXbU76WIKLsrAgAAZ0CwBc7kxDFpyUtW+8axUtsu9tYDAADOimALnMmahda+tc3jpWuH2l0NAAA4B4ItUJXj+dKHC6329aNYVwsAgA/gX2ugKmsXWrshNG8rde9ndzUAAOA8EGyB0xXkSWsXWe3ke5itBQDAR/AvNnC6tQulwuNSi3ZSN2ZrAQDwFQRboKKCPOnDt6w2s7UAAPgU/tUGKlqzoHS29lKp60/srgYAAFQDwRYoU5ArfVQ6WzuInRAAAPA1/MsNlFmzQCo8IbW8VOp6td3VAACAaiLYApJ0LFf6aLHVTh4lORy2lgMAAKqPYAtI0pr5pbO17ZmtBQDARxFsgWNHy2drBzFbCwCAryLYAqvnS0UnpVaXSV362l0NAACoIYIt6rdjR6V1b1ttZmsBAPBpBFvUbx+8ac3Wxl0mdU60uxoAAHABCLaov/KPSB8vsdrshAAAgM8j2KL+Wl06W9u6I7O1AAD4AYIt6qf8I9I6ZmsBAPAnBFvUTx+8KRUXSq07SZcn2F0NAAC4CAi2qH/yDpevrWUnBAAA/AbBFvXPB/Os2do2naROfeyuBgAAXCQEW9QveYelT96x2oNGM1sLAIAfIdiifkmbKxUXSW0ulzr2trsaAABwERFsUX/kHpLWv2u1ma0FAMDvEGxRf3wwz5qtje8idexldzUAAOAiI9iifsjNqbC2lp0QAADwRwRb1A9pc6VTxVLbLtJlV9pdDQAAqAU1CrYzZsxQfHy8QkNDlZiYqA0bNpxx7MCBA+VwOCodgwcP9hi3fft23XLLLYqKilJERIT69OmjPXv21KQ8wNPRHGn9UqvN2loAAPxWtYPtvHnzNHHiRE2dOlWbNm1Sjx49lJycrOzs7CrHL1q0SAcOHHAf6enpCgwM1NChQ91jfvjhB/Xr10+dOnXSmjVrtHXrVj355JMKDQ2t+ScDyqS9UTpb21XqcIXd1QAAgFriMMaY6rwgMTFRffr00QsvvCBJcrlciouL08MPP6zHH3/8nK+fPn26pkyZogMHDigiIkKSNHz4cAUHB+t///tfDT6ClJeXp6ioKOXm5ioyMrJG54CfOnpQ+tMoqaRY+r+/EmwBALBJXeS1as3YFhUVaePGjUpKSio/QUCAkpKStH79+vM6R2pqqoYPH+4OtS6XS0uXLtVll12m5ORkXXLJJUpMTNTixYurUxpQtbS5Vqi9tLvUvqfd1QAAgFpUrWCbk5OjkpISxcTEePTHxMQoMzPznK/fsGGD0tPTdf/997v7srOzdezYMT377LMaNGiQVqxYodtuu02333671q5dW+V5CgsLlZeX53EAlRzJltYvs9rJ7IQAAIC/C6rLN0tNTVW3bt2UkJDg7nO5XJKkW2+9Vb/+9a8lST179tQnn3yiWbNmacCAAZXOk5KSoqeeeqpuiobvqjhb26Gn3dUAAIBaVq0Z26ZNmyowMFBZWVke/VlZWYqNjT3rawsKCjR37lyNHTu20jmDgoLUuXNnj/7LL7/8jLsiTJ48Wbm5ue5j79691fkYqA+OZEufvme1B42ytxYAAFAnqhVsQ0JC1KtXL6Wlpbn7XC6X0tLS1Ldv37O+dv78+SosLNTIkSMrnbNPnz7KyMjw6P/222/Vpk2bKs/ldDoVGRnpcQAeVr1uzda278HaWgAA6olqL0WYOHGiRo8erd69eyshIUHTp09XQUGBxowZI0kaNWqUWrZsqZSUFI/XpaamasiQIWrSpEmlc06aNEnDhg1T//79de2112r58uV65513tGbNmpp9KtRvR7Kkz5Zb7UGj7a0FAADUmWoH22HDhungwYOaMmWKMjMz1bNnTy1fvtx9Q9mePXsUEOA5EZyRkaF169ZpxYoVVZ7ztttu06xZs5SSkqJf/vKX6tixoxYuXKh+/frV4COh3lv5hlRyytra69LudlcDAADqSLX3sfVG7GMLt8NZ0jOjrWA74R/Spd3srggAAMgL97EFvN6q1yvM1hJqAQCoTwi28B+HMyusrWUnBAAA6huCLfzHytckV4l02ZVSO2ZrAQCobwi28A+HDkgbSm9OZCcEAADqJYIt/MPK163Z2o69pLZd7K4GAADYgGAL35ezX/r8favNbC0AAPUWwRa+b9XrkssldeotxXc+93gAAOCXCLbwbTn7pc9L19YmsxMCAAD1GcEWvm3la6WztX2YrQUAoJ4j2MJ3HdwnfbHSarO2FgCAeo9gC9+14n/WbO3lCVKbTnZXAwAAbEawhW/K3C1tTLPaN9xraykAAMA7EGzhm5b/VzJG6tZPirvM7moAAIAXINjC9+z7XtryoeRwSIPYCQEAAFgItvA9y/9rPV4xUGrRzs5KAACAFyHYwrfs/kZKXy85Ati3FgAAeCDYwre8N9t67PMz6ZI4e2sBAABehWAL3/HDViljoxQQKF0/0u5qAACAlyHYwjcYIy0rna296gapSXN76wEAAF6HYAvf8O0macdXUlCw9LMRdlcDAAC8EMEW3q/ibO3VN0vRzeytBwAAeCWCLbzfts+kPd9IIaFS0l12VwMAALwUwRbezeUq3wnhmiFSw0a2lgMAALwXwRbebes6ad8PkjNcunao3dUAAAAvRrCF93KVSMvnWO2Bd0gRUbaWAwAAvBvBFt5r02opa48U3lAacIfd1QAAAC9HsIV3Kjklvf+K1b72Timsgb31AAAAr0ewhXf6fIWUs19qEG3dNAYAAHAOBFt4n1NF0vv/s9pJd0nOMHvrAQAAPoFgC+/z6XvS0YNSVFPrCxkAAADOA8EW3qXopLTiNav9sxFScIi99QAAAJ9BsIV3+fgdKf+w1DhWShxkdzUAAMCHEGzhPU4el9LmWu3rR0pBwfbWAwAAfArBFt7jo7ekglypWUup98/srgYAAPgYgi28w/F86YM3rfag0VJgoL31AAAAn0OwhXdYs0A6WSA1j5d6DrS7GgAA4IMItrDfsVzpw0VWe9C9UgD/swQAANVHgoD9PpgnFZ6QWnWQuv3E7moAAICPItjCXrmHpHVvW+0bx0gOh731AAAAn0Wwhb3S3pCKC6X4zlKnPnZXAwAAfBjBFvY5kiV9stRqM1sLAAAuEMEW9lnxmlRSLLXvKXW4wu5qAACAjyPYwh4H90kbllvtG+61tRQAAOAfCLawx4r/SS6XdHmC1K6r3dUAAAA/QLBF3cvaLW38wGozWwsAAC4Sgi3q3vJXJOOSuvWT4i6zuxoAAOAnCLaoW/t+kDavtXZAGDTK7moAAIAfIdiibi2fYz32HCi1aGdnJQAAwM8QbFF3dn8jpa+XHAHM1gIAgIuOYIu6895s67HPz6RL4uytBQAA+B2CLerGD1uljI1SQKB0/Ui7qwEAAH6IYIvaZ4z03hyrfdUNUpPmtpYDAAD8E8EWte/bTdaMbVCw9LMRdlcDAAD8FMEWtcuY8rW1V98sRTeztx4AAOC3CLaoXds+s3ZDCAmVku6yuxoAAODHCLaoPS5X+Wxtv1ulho3srQcAAPg1gi1qz9Z11jeNOcOln95pdzUAAMDPEWxRe1a+Zj0OvEOKiLK3FgAA4PdqFGxnzJih+Ph4hYaGKjExURs2bDjj2IEDB8rhcFQ6Bg8eXOX4Bx98UA6HQ9OnT69JafAWmbul/T9IgUFS/9vtrgYAANQD1Q628+bN08SJEzV16lRt2rRJPXr0UHJysrKzs6scv2jRIh04cMB9pKenKzAwUEOHDq009q233tKnn36qFi1aVP+TwLtsXms9duwthTe0txYAAFAvVDvYTps2TQ888IDGjBmjzp07a9asWQoPD9fLL79c5fjGjRsrNjbWfaxcuVLh4eGVgu2+ffv08MMP67XXXlNwcHDNPg28x5YPrcee/e2tAwAA1BvVCrZFRUXauHGjkpKSyk8QEKCkpCStX7/+vM6Rmpqq4cOHKyIiwt3ncrl0zz33aNKkSerSpcs5z1FYWKi8vDyPA17kwC4pc5cUGCx1vdruagAAQD1RrWCbk5OjkpISxcTEePTHxMQoMzPznK/fsGGD0tPTdf/993v0/+Uvf1FQUJB++ctfnlcdKSkpioqKch9xcXHn/yFQ+7aULkPo1FsKa2BvLQAAoN6o010RUlNT1a1bNyUkJLj7Nm7cqH/+85+aM2eOHA7HeZ1n8uTJys3NdR979+6trZJRE2Xra3uwDAEAANSdagXbpk2bKjAwUFlZWR79WVlZio2NPetrCwoKNHfuXI0dO9aj/6OPPlJ2drZat26toKAgBQUFaffu3Xr00UcVHx9f5bmcTqciIyM9DniJA7ukrD2lyxD62l0NAACoR6oVbENCQtSrVy+lpaW5+1wul9LS0tS379lDzPz581VYWKiRI0d69N9zzz3aunWrNm/e7D5atGihSZMm6f33369OefAGZbO1l7MMAQAA1K2g6r5g4sSJGj16tHr37q2EhARNnz5dBQUFGjNmjCRp1KhRatmypVJSUjxel5qaqiFDhqhJkyYe/U2aNKnUFxwcrNjYWHXs2LG65cFOxpQH254DbS0FAADUP9UOtsOGDdPBgwc1ZcoUZWZmqmfPnlq+fLn7hrI9e/YoIMBzIjgjI0Pr1q3TihUrLk7V8E4HdkrZe6SgYKnLVXZXAwAA6hmHMcbYXcSFysvLU1RUlHJzc1lva6f35kgrXrW2+Br7tN3VAAAAL1IXea1Od0WAH/NYhjDA3loAAEC9RLDFxXFgp5S9t3QZArshAACAukewxcXh3g0hQQoNt7cWAABQLxFsceFYhgAAALwAwRYXbv8O6eCPUnCI1JndEAAAgD0ItrhwZbO1nViGAAAA7EOwxYUxRtrCMgQAAGA/gi0uzP4fpIP7rGUIfCkDAACwEcEWF8a9G0Ki5AyztxYAAFCvEWxRcx67IfS3txYAAFDvEWxRc/u+l3L2S8FOdkMAAAC2I9ii5spmazuzDAEAANiPYIuaqbgMoQfLEAAAgP0ItqiZH7+XDh0oXYaQaHc1AAAABFvU0OY11iPLEAAAgJcg2KL6PHZDGGhrKQAAAGUItqi+vd9KhzOlkFCpc4Ld1QAAAEgi2KImtnxoPXZOtMItAACAFyDYono8liEMsLcWAACACgi2qJ69GeXLEC5nGQIAAPAeBFtUT9lsbZerWIYAAAC8CsEW588YaXPp+lqWIQAAAC9DsMX525MhHcmyZmo7sQwBAAB4F4Itzp97GUJfKcRpby0AAACnIdji/BgjbWE3BAAA4L0Itjg/e76RjmRbX5/bqY/d1QAAAFRCsMX5YRkCAADwcgRbnJvLxZcyAAAAr0ewxbnt+UY6epBlCAAAwKsRbHFuZbO1Xa+WgkPsrQUAAOAMCLY4O5dL2sKXMgAAAO9HsMXZ7d5eugwhXOrY2+5qAAAAzohgi7NjGQIAAPARBFucGcsQAACADyHY4sx2bZNyc6TQcKlTL7urAQAAOCuCLc6sbLa269VSEMsQAACAdyPYomoul7Sl7EsZBtpaCgAAwPkg2KJqu76Wcg9JoRFSxyvtrgYAAOCcCLaoWsXdEFiGAAAAfADBFpW5XNKWj6w2uyEAAAAfQbBFZTu/lvLKliGwGwIAAPANBFtUtnmN9di9nxQUbGspAAAA54tgC0+uEmnrOqvdo7+9tQAAAFQDwRaeypYhhDWQLmM3BAAA4DsItvBUthtCt5+wDAEAAPgUgi3KuUrKv22M3RAAAICPIdii3I50Kf+IFN5Q6nCF3dUAAABUC8EW5ViGAAAAfBjBFhaWIQAAAB9HsIXlh6+kY0dZhgAAAHwWwRaWLWXLEPpJgUH21gIAAFADBFtIJSXSlo+sNssQAACAjyLYQtqx1VqGEBEpdehpdzUAAAA1QrCF524ILEMAAAA+imBb35WUSFvXWe2eA20tBQAA4EIQbOu7H7aUL0No39PuagAAAGqMYFvfba64G0KgvbUAAABcgBoF2xkzZig+Pl6hoaFKTEzUhg0bzjh24MCBcjgclY7BgwdLkoqLi/XYY4+pW7duioiIUIsWLTRq1Cjt37+/Zp8I589jGQK7IQAAAN9W7WA7b948TZw4UVOnTtWmTZvUo0cPJScnKzs7u8rxixYt0oEDB9xHenq6AgMDNXToUEnS8ePHtWnTJj355JPatGmTFi1apIyMDN1yyy0X9slwbt9vlgpypYgoliEAAACf5zDGmOq8IDExUX369NELL7wgSXK5XIqLi9PDDz+sxx9//Jyvnz59uqZMmaIDBw4oIiKiyjGff/65EhIStHv3brVu3fqc58zLy1NUVJRyc3MVGRlZnY9Tv82bJn26TOp7k3TnI3ZXAwAA/Fhd5LVqzdgWFRVp48aNSkpKKj9BQICSkpK0fv368zpHamqqhg8ffsZQK0m5ublyOByKjo6u8vnCwkLl5eV5HKimklPSV2XLEPrbWwsAAMBFUK1gm5OTo5KSEsXExHj0x8TEKDMz85yv37Bhg9LT03X//fefcczJkyf12GOP6a677jpjmk9JSVFUVJT7iIuLq87HgCR984VUkCc1iJYu7WF3NQAAABesTndFSE1NVbdu3ZSQkFDl88XFxbrzzjtljNHMmTPPeJ7JkycrNzfXfezdu7e2SvZPp4qlJS9Z7d5J7IYAAAD8QrW+Zqpp06YKDAxUVlaWR39WVpZiY2PP+tqCggLNnTtXTz/9dJXPl4Xa3bt364MPPjjr2gun0ymn01md0lHRh4uk7D3WbO31I+2uBgAA4KKo1oxtSEiIevXqpbS0NHefy+VSWlqa+vbte9bXzp8/X4WFhRo5snKQKgu13333nVatWqUmTZpUpyxUx9GD0vv/s9o3j5PCGthbDwAAwEVSrRlbSZo4caJGjx6t3r17KyEhQdOnT1dBQYHGjBkjSRo1apRatmyplJQUj9elpqZqyJAhlUJrcXGxfv7zn2vTpk169913VVJS4l6v27hxY4WEhNT0s6Eqb8+Sik5KbbtYyxAAAAD8RLWD7bBhw3Tw4EFNmTJFmZmZ6tmzp5YvX+6+oWzPnj0KCPCcCM7IyNC6deu0YsWKSufbt2+flixZIknq2bOnx3OrV6/WwIEDq1siziRjo/VNY44A6Y5fSgF88RwAAPAf1d7H1huxj+15OFUs/XWclL1XumaIdPsEuysCAAD1iNftYwsftnahFWobREs33Gt3NQAAABcdwbY+OJItrXjVat/CDWMAAMA/EWzrA/cNY12l3j+zuxoAAIBaQbD1dxkbpS0flt4w9rDkcNhdEQAAQK0g2PqzU0XSwuetdr9bpZaX2lsPAABALSLY+rM1C6WDP0oNG0k3jLa7GgAAgFpFsPVXR7Klla9Zbb5hDAAA1AMEW3/lccMY3zAGAAD8H8HWH33zhXXDWECA9PNfcsMYAACoFwi2/uZUkbToBavdb4jUop2t5QAAANQVgq2/WbOg/IaxQaPsrgYAAKDOEGz9yZEsaUXpDWO3/IIbxgAAQL1CsPUni2dJxYVSu25Sr+vsrgYAAKBOEWz9xTefS1s/sm4Y4xvGAABAPUSw9QfcMAYAAECw9QurF0gH90kNG3PDGAAAqLcItr7ucFb5N4zdyg1jAACg/iLY+rq3Z1o3jF3aXbryp3ZXAwAAYBuCrS/bvkHauo4bxgAAAESw9V2niqRFM6z2NbdJzdvaWw8AAIDNCLa+avV8KYcbxgAAAMoQbH3R4Uxp5etW+9ZfSKER9tYDAADgBQi2vmgxN4wBAACcjmBbEy6XlLnbnvfevkH66uPSG8Z+yQ1jAAAApYLsLsAnffelNOsxKb6z1Hew1HOAFBJa++9bXOEbxvrfLjWPr/33BAAA8BEE25o4sFMKCJR2bbOOt16UeidZIbc2v8529ZtSzn4psomUfE/tvQ8AAIAPItjWxMCfW2tbN7wvfbpMOnRAWve2dbTpVDqLO1Byhl289zycKa3ihjEAAIAzcRhjjN1FXKi8vDxFRUUpNzdXkZGRdfvmLpe1NGH9Umvtq6vE6g8Nl3pdZ4Xclu0v/H1Sp0jpn0jte0j/9zfW1gIAAJ9SF3mNGdsLFRAgdexlHflHpA0rpE+XWksGPn7HOlp3tALuFdfWbBZ322dWqA0IlG7nG8YAAACqwoxtbXC5pB+2WLO4W9dJJaesfmeYdGXpLG5ch/M7V3GR9Nz9VlAeONRahgAAAOBjmLH1VQEBUocrrOPYUenzFVbIPbhPWv+udbS6TOp7o7VWNzT8zOcqu2EsihvGAAAAzoYZ27pijPR9xVncYqs/JNQKt30HS3GXeS4zOHRA+stYa9b2nt9JV15rT+0AAAAXiBlbf+JwSB16Wsexo9LnK0tncX+0dlb4dJl1k1nfwVKvn1q7Hix+0Qq17XtKVwy0s3oAAACvx4ytnYyRfthqBdwtH3nO4l52hZS+3rphbNJLUmwbe2sFAAC4AMzY+juHw9q+q30P6fbc0lncZVL2HivUStKA2wm1AAAA54Fg6y0ioqwvfhhwh7Qj3VqacKpIup4bxgAAAM4HwdbbOBzSpd2sAwAAAOctwO4CAAAAgIuBYAsAAAC/QLAFAACAXyDYAgAAwC8QbAEAAOAXCLYAAADwCwRbAAAA+AWCLQAAAPwCwRYAAAB+gWALAAAAv0CwBQAAgF8g2AIAAMAvEGwBAADgFwi2AAAA8AtBdhdwMRhjJEl5eXk2VwIAAICqlOW0stxWG/wi2Obn50uS4uLibK4EAAAAZ5Ofn6+oqKhaObfD1GZsriMul0v79+9Xw4YN5XA4lJeXp7i4OO3du1eRkZF2l4daxLWuH7jO9QfXun7gOtcfFa91w4YNlZ+frxYtWiggoHZWw/rFjG1AQIBatWpVqT8yMpL/YOoJrnX9wHWuP7jW9QPXuf4ou9a1NVNbhpvHAAAA4BcItgAAAPALfhlsnU6npk6dKqfTaXcpqGVc6/qB61x/cK3rB65z/VHX19ovbh4DAAAA/HLGFgAAAPUPwRYAAAB+gWALAAAAv0CwBQAAgF/wy2A7Y8YMxcfHKzQ0VImJidqwYYPdJeEsPvzwQ918881q0aKFHA6HFi9e7PG8MUZTpkxR8+bNFRYWpqSkJH333XceYw4fPqwRI0YoMjJS0dHRGjt2rI4dO+YxZuvWrbrmmmsUGhqquLg4Pffcc7X90VBBSkqK+vTpo4YNG+qSSy7RkCFDlJGR4THm5MmTGj9+vJo0aaIGDRrojjvuUFZWlseYPXv2aPDgwQoPD9cll1yiSZMm6dSpUx5j1qxZoyuvvFJOp1Pt27fXnDlzavvjodTMmTPVvXt392bsffv21Xvvved+nmvsn5599lk5HA498sgj7j6utX/4wx/+IIfD4XF06tTJ/bzXXWfjZ+bOnWtCQkLMyy+/bL7++mvzwAMPmOjoaJOVlWV3aTiDZcuWmd/97ndm0aJFRpJ56623PJ5/9tlnTVRUlFm8eLHZsmWLueWWW0zbtm3NiRMn3GMGDRpkevToYT799FPz0Ucfmfbt25u77rrL/Xxubq6JiYkxI0aMMOnp6eaNN94wYWFh5t///nddfcx6Lzk52cyePdukp6ebzZs3mxtvvNG0bt3aHDt2zD3mwQcfNHFxcSYtLc188cUX5qqrrjJXX321+/lTp06Zrl27mqSkJPPll1+aZcuWmaZNm5rJkye7x+zYscOEh4ebiRMnmm3btpnnn3/eBAYGmuXLl9fp562vlixZYpYuXWq+/fZbk5GRYZ544gkTHBxs0tPTjTFcY3+0YcMGEx8fb7p3725+9atfufu51v5h6tSppkuXLubAgQPu4+DBg+7nve06+12wTUhIMOPHj3f/XFJSYlq0aGFSUlJsrArn6/Rg63K5TGxsrPnrX//q7jt69KhxOp3mjTfeMMYYs23bNiPJfP755+4x7733nnE4HGbfvn3GGGNefPFF06hRI1NYWOge89hjj5mOHTvW8ifCmWRnZxtJZu3atcYY67oGBweb+fPnu8ds377dSDLr1683xlj/JyggIMBkZma6x8ycOdNERka6r+1vf/tb06VLF4/3GjZsmElOTq7tj4QzaNSokfl//+//cY39UH5+vunQoYNZuXKlGTBggDvYcq39x9SpU02PHj2qfM4br7NfLUUoKirSxo0blZSU5O4LCAhQUlKS1q9fb2NlqKmdO3cqMzPT45pGRUUpMTHRfU3Xr1+v6Oho9e7d2z0mKSlJAQEB+uyzz9xj+vfvr5CQEPeY5ORkZWRk6MiRI3X0aVBRbm6uJKlx48aSpI0bN6q4uNjjWnfq1EmtW7f2uNbdunVTTEyMe0xycrLy8vL09ddfu8dUPEfZGP4OqHslJSWaO3euCgoK1LdvX66xHxo/frwGDx5c6Xpwrf3Ld999pxYtWqhdu3YaMWKE9uzZI8k7r7NfBducnByVlJR4/OFJUkxMjDIzM22qChei7Lqd7ZpmZmbqkksu8Xg+KChIjRs39hhT1Tkqvgfqjsvl0iOPPKKf/OQn6tq1qyTrOoSEhCg6Otpj7OnX+lzX8Uxj8vLydOLEidr4ODjNV199pQYNGsjpdOrBBx/UW2+9pc6dO3ON/czcuXO1adMmpaSkVHqOa+0/EhMTNWfOHC1fvlwzZ87Uzp07dc011yg/P98rr3NQtUYDwEUwfvx4paena926dXaXglrQsWNHbd68Wbm5uVqwYIFGjx6ttWvX2l0WLqK9e/fqV7/6lVauXKnQ0FC7y0EtuuGGG9zt7t27KzExUW3atNGbb76psLAwGyurml/N2DZt2lSBgYGV7sbLyspSbGysTVXhQpRdt7Nd09jYWGVnZ3s8f+rUKR0+fNhjTFXnqPgeqBsTJkzQu+++q9WrV6tVq1bu/tjYWBUVFeno0aMe40+/1ue6jmcaExkZ6ZV/CfujkJAQtW/fXr169VJKSop69Oihf/7zn1xjP7Jx40ZlZ2fryiuvVFBQkIKCgrR27Vr961//UlBQkGJiYrjWfio6OlqXXXaZvv/+e6/8b9qvgm1ISIh69eqltLQ0d5/L5VJaWpr69u1rY2WoqbZt2yo2Ntbjmubl5emzzz5zX9O+ffvq6NGj2rhxo3vMBx98IJfLpcTERPeYDz/8UMXFxe4xK1euVMeOHdWoUaM6+jT1mzFGEyZM0FtvvaUPPvhAbdu29Xi+V69eCg4O9rjWGRkZ2rNnj8e1/uqrrzz+j8zKlSsVGRmpzp07u8dUPEfZGP4OsI/L5VJhYSHX2I9cd911+uqrr7R582b30bt3b40YMcLd5lr7p2PHjumHH35Q8+bNvfO/6Wrfbubl5s6da5xOp5kzZ47Ztm2bGTdunImOjva4Gw/eJT8/33z55Zfmyy+/NJLMtGnTzJdffml2795tjLG2+4qOjjZvv/222bp1q7n11lur3O7riiuuMJ999plZt26d6dChg8d2X0ePHjUxMTHmnnvuMenp6Wbu3LkmPDyc7b7q0EMPPWSioqLMmjVrPLaNOX78uHvMgw8+aFq3bm0++OAD88UXX5i+ffuavn37up8v2zbm+uuvN5s3bzbLly83zZo1q3LbmEmTJpnt27ebGTNmsD1QHXr88cfN2rVrzc6dO83WrVvN448/bhwOh1mxYoUxhmvszyruimAM19pfPProo2bNmjVm586d5uOPPzZJSUmmadOmJjs72xjjfdfZ74KtMcY8//zzpnXr1iYkJMQkJCSYTz/91O6ScBarV682kiodo0ePNsZYW349+eSTJiYmxjidTnPdddeZjIwMj3McOnTI3HXXXaZBgwYmMjLSjBkzxuTn53uM2bJli+nXr59xOp2mZcuW5tlnn62rjwhjqrzGkszs2bPdY06cOGH+7//+zzRq1MiEh4eb2267zRw4cMDjPLt27TI33HCDCQsLM02bNjWPPvqoKS4u9hizevVq07NnTxMSEmLatWvn8R6oXffdd59p06aNCQkJMc2aNTPXXXedO9QawzX2Z6cHW661fxg2bJhp3ry5CQkJMS1btjTDhg0z33//vft5b7vODmOMqf48LwAAAOBd/GqNLQAAAOovgi0AAAD8AsEWAAAAfoFgCwAAAL9AsAUAAIBfINgCAADALxBsAQAA4BcItgAAAPALBFsAAAD4BYItAAAA/ALBFgAAAH6BYAsAAAC/8P8BOdzS8nmkiaIAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAq0AAAHDCAYAAAAOSiMcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABHfElEQVR4nO3deXyU1d3///dkmwTIwpKEBMKOrAKRXQREEEREcUVEUUGrNbR6u1Rpv3Xp/WtjraW3VqVqFbUuYVFQEdHILiIIEgXBoGUJQhLWrEDW8/vjIkOGJJAJSa5J5vV8PK7HnLnmXJnP5HJ5czhzjsMYYwQAAAB4MT+7CwAAAADOhdAKAAAAr0doBQAAgNcjtAIAAMDrEVoBAADg9QitAAAA8HqEVgAAAHg9QisAAAC8HqEVAAAAXo/QCgAAAK9HaAXgs/bs2SOHw6E33njDlvd/44035HA4tGfPnnp/7yeffFIOh6Pe3xcAaorQCgB17C9/+YsWL15sdxm6/vrrdeWVV9pdBgDUCKEVAOpYVaH1tttu04kTJ9S+ffs6r6GoqEjJycmaMGFCnb8XANSFALsLAABf5e/vL39//3p5r7Vr1yo3N5fQCqDBYqQVgNfYv3+/pk+frujoaDmdTvXq1Uuvv/66JCkzM1MBAQF66qmnKlyXmpoqh8OhF154QZJ09OhRPfzww7rwwgvVrFkzhYWFafz48fruu+/OWcOll16qSy+9tML5O+64Qx06dHA79+yzz+riiy9Wy5YtFRISov79+2vhwoVufRwOh/Lz8/Xmm2/K4XDI4XDojjvukFT1nNaXXnpJvXr1ktPpVGxsrBISEpSVlVWhzt69e2v79u0aNWqUmjRpojZt2uiZZ56p9HN98skn6tmzZ4XPUF5xcbH+93//V507d5bT6VSHDh30+9//XgUFBW79Nm3apHHjxqlVq1YKCQlRx44dNX36dLc+SUlJ6t+/v0JDQxUWFqYLL7xQzz33XJXvDQDnwkgrAK+QmZmpIUOGyOFwaObMmYqMjNSnn36qGTNmKCcnRw888IBGjhyp+fPn64knnnC7dt68efL399eNN94oSdq1a5cWL16sG2+8UR07dlRmZqZefvlljRw5Utu3b1dsbGyt1Pzcc8/p6quv1tSpU1VYWKikpCTdeOONWrJkiWtE8z//+Y/uuusuDRo0SL/61a8kSZ07d67yZz755JN66qmnNGbMGP36179Wamqq5syZo2+++Ubr1q1TYGCgq++xY8d0xRVX6LrrrtNNN92khQsX6tFHH9WFF16o8ePHu/3cpUuX6qqrrjrr57nrrrv05ptv6oYbbtBDDz2kDRs2KDExUTt27NCiRYskSQcPHtTYsWMVGRmpxx57TBEREdqzZ48++OAD189JTk7WlClTNHr0aP31r3+VJO3YsUPr1q3T/fff78FvGADKMQDgBWbMmGFiYmLM4cOH3c7ffPPNJjw83Bw/fty8/PLLRpLZunWrW5+ePXuayy67zPX85MmTpqSkxK3P7t27jdPpNH/605/czkkyc+fOdZ0bOXKkGTlyZIX6br/9dtO+fXu3c8ePH3d7XlhYaHr37u1WizHGNG3a1Nx+++0VfubcuXONJLN7925jjDEHDx40QUFBZuzYsW71v/DCC0aSef31193qlGTeeust17mCggLTunVrc/3117u9z65du4wks3LlSte5J554wpT/X0BKSoqRZO666y63ax9++GEjyaxYscIYY8yiRYuMJPPNN99U+Dxl7r//fhMWFmaKi4ur7AMAnmJ6AADbGWP0/vvva+LEiTLG6PDhw65j3Lhxys7O1rfffqvrrrtOAQEBmjdvnuvabdu2afv27Zo8ebLrnNPplJ+f9Z+3kpISHTlyRM2aNVO3bt307bff1lrdISEhrvaxY8eUnZ2t4cOH1/g9vvjiCxUWFuqBBx5w1S9Jd999t8LCwvTJJ5+49W/WrJluvfVW1/OgoCANGjRIu3btcuv3ySefKDw8XJdcckmV77106VJJ0oMPPuh2/qGHHnL9DEmKiIiQJC1ZskRFRUWV/qyIiAjl5+crOTn5bB8XADxCaAVgu0OHDikrK0uvvPKKIiMj3Y4777xTkvXX0q1atdLo0aM1f/5817Xz5s1TQECArrvuOte50tJS/eMf/1DXrl3ldDrVqlUrRUZG6vvvv1d2dnat1b1kyRINGTJEwcHBatGihSIjIzVnzpwav8fevXslSd26dXM7HxQUpE6dOrleL9O2bdsKa602b95cx44dczv3ySefaOzYsQoIqHpG2N69e+Xn56cuXbq4nW/durUiIiJc7z1y5Ehdf/31euqpp9SqVStdc801mjt3rtu81/vuu08XXHCBxo8fr7Zt22r69OlatmxZNX8LAFA5QisA25WWlkqSbr31ViUnJ1d6DBs2TJJ08803a+fOnUpJSZEkzZ8/X6NHj1arVq1cP+8vf/mLHnzwQY0YMUJvv/22PvvsMyUnJ6tXr16u96pKVQvul5SUuD1fu3atrr76agUHB+ull17S0qVLlZycrFtuuUXGmJr+KjxS1coD5d//+PHjWrVqVbXXZz3XhgMOh0MLFy7U+vXrNXPmTNeX5/r376+8vDxJUlRUlFJSUvTRRx/p6quv1sqVKzV+/Hjdfvvt1fxkAFARX8QCYLvIyEiFhoaqpKREY8aMOWvfSZMm6Z577nFNEdi5c6dmzZrl1mfhwoUaNWqUXnvtNbfzWVlZbuG2Ms2bN6/w1+uSKoxyvv/++woODtZnn30mp9PpOj937twK11Z356my9VpTU1PVqVMn1/nCwkLt3r37nL+byqxYsUIFBQUVvphV2XuXlpbqp59+Uo8ePVznMzMzlZWVVWEt2SFDhmjIkCH685//rHfffVdTp05VUlKS7rrrLknW6PDEiRM1ceJElZaW6r777tPLL7+sP/7xjxVGcwGgOhhpBWA7f39/XX/99Xr//fe1bdu2Cq8fOnTI1Y6IiNC4ceM0f/58JSUlKSgoSJMmTarw884c7VywYIH2799/zlo6d+6sH3/80e09v/vuO61bt67CezgcDrcR2D179lS6iUDTpk0rLFlVmTFjxigoKEjPP/+8W/2vvfaasrOza7TG6tKlSzVgwABFR0eftV/ZSOz//d//uZ2fPXu2JLne+9ixYxV+t/369ZMk1xSBI0eOuL3u5+enPn36uPUBAE8x0grAKzz99NNauXKlBg8erLvvvls9e/bU0aNH9e233+qLL77Q0aNHXX0nT56sW2+9VS+99JLGjRvn+nJQmauuukp/+tOfdOedd+riiy/W1q1b9c4777iNXlZl+vTpmj17tsaNG6cZM2bo4MGD+te//qVevXopJyfH1W/ChAmaPXu2rrjiCt1yyy06ePCgXnzxRXXp0kXff/+928/s37+/vvjiC82ePVuxsbHq2LGjBg8eXOG9IyMjNWvWLD311FO64oordPXVVys1NVUvvfSSBg4c6Palq+paunSpa17w2fTt21e33367XnnlFWVlZWnkyJHauHGj3nzzTU2aNEmjRo2SJL355pt66aWXdO2116pz587Kzc3Vq6++qrCwMFfwveuuu3T06FFddtllatu2rfbu3at//vOf6tevn9soLgB4xMaVCwDATWZmpklISDBxcXEmMDDQtG7d2owePdq88sorbv1ycnJMSEiIkWTefvvtCj/n5MmT5qGHHjIxMTEmJCTEDBs2zKxfv77CclaVLXlljDFvv/226dSpkwkKCjL9+vUzn332WaVLXr322muma9euxul0mu7du5u5c+dWWErKGGN+/PFHM2LECFfNZctfnbnkVZkXXnjBdO/e3QQGBpro6Gjz61//2hw7dsytz8iRI02vXr0qfPbydW7bts1IMhs3bqzQr7I6i4qKzFNPPWU6duxoAgMDTVxcnJk1a5Y5efKkq8+3335rpkyZYtq1a2ecTqeJiooyV111ldm0aZOrz8KFC83YsWNNVFSUCQoKMu3atTP33HOPSU9Pr1AHAFSXw5h6+sYAAKBePfPMM5o9e7bS09OrPa8WALwVc1oBoJHq0KGD/vGPfxBYATQKjLQCAADA6zHSCgAAAK9HaAUAAIDXI7QCAADA6zWIdVpLS0t14MABhYaG8oUCAAAAL2SMUW5urmJjY+XnV/vjog0itB44cEBxcXF2lwEAAIBz2Ldvn9q2bVvrP7dBhNbQ0FBJ1i8hLCzM5moAAABwppycHMXFxblyW21rEKG1bEpAWFgYoRUAAMCL1dVUTr6IBQAAAK9HaAUAAIDXI7QCAADA6xFaAQAA4PUIrQAAAPB6hFYAAAB4PUIrAAAAvB6hFQAAAF6P0AoAAACvR2gFAACA1yO0AgAAwOsRWgEAAOD1CK0AAADweoTWMxkjZaZJW1baXQkAAABOCbC7AK9zNEN6errk5y/1GCQFN7W7IgAAAJ/HSOuZWsZIkW2l0hJp57d2VwMAAAARWivXY5D1uH2DvXUAAABAkoehNTExUQMHDlRoaKiioqI0adIkpaamVvv6pKQkORwOTZo0ydM661fPwdbj9o3WHFcAAADYyqPQunr1aiUkJOjrr79WcnKyioqKNHbsWOXn55/z2j179ujhhx/W8OHDa1xsvel8oRQULOUelX752e5qAAAAfJ5HX8RatmyZ2/M33nhDUVFR2rx5s0aMGFHldSUlJZo6daqeeuoprV27VllZWTUqtt4EBEnd+ktb10k7NkhxXe2uCAAAwKed15zW7OxsSVKLFi3O2u9Pf/qToqKiNGPGjPN5u/pVNq91x0Z76wAAAEDNl7wqLS3VAw88oGHDhql3795V9vvyyy/12muvKSUlpdo/u6CgQAUFBa7nOTk5NS2z5spC694dUl621Cy8/msAAACApPMYaU1ISNC2bduUlJRUZZ/c3FzddtttevXVV9WqVatq/+zExESFh4e7jri4uJqWWXMRkVJsZ+uLWKmb6v/9AQAA4OIwxvOvx8+cOVMffvih1qxZo44dO1bZLyUlRfHx8fL393edKy0tlST5+fkpNTVVnTt3rnBdZSOtcXFxys7OVlhYmKfl1twnr0lfvCddNEq67Q/1974AAAANTE5OjsLDw+ssr3k0PcAYo9/85jdatGiRVq1addbAKkndu3fX1q1b3c79v//3/5Sbm6vnnnuuyhFUp9Mpp9PpSWl1o8cgK7T+uMnabMDP/9zXAAAAoNZ5FFoTEhL07rvv6sMPP1RoaKgyMjIkSeHh4QoJCZEkTZs2TW3atFFiYqKCg4MrzHeNiIiQpLPOg/Ua7XtKTUKl47nS3h+ljr3srggAAMAneTSndc6cOcrOztall16qmJgY1zFv3jxXn7S0NKWnp9d6obbw95e6DbDa7I4FAABgG4+nB5zLqlWrzvr6G2+84clb2q/nYGnLSmu91gnT7a4GAADAJ53XOq0+ofsAyeGQ9v9XyjpsdzUAAAA+idB6Ls0ipHbdrPaPbDQAAABgB0JrdfQcYj1uJ7QCAADYgdBaHWW7Y+3cLBUX2lsLAACADyK0VkebLlJoC6nghLRrm93VAAAA+BxCa3X4+Uk9BlrtHUwRAAAAqG+E1urqMdh6ZL1WAACAekdora5uF1nbuB7cJx0+YHc1AAAAPoXQWl0hzaROp7aeZbQVAACgXhFaPVG2igDzWgEAAOoVodUTZeu1/pwiFZ60tRQAAABfQmj1RHQ7qXm0VFwk/ZRidzUAAAA+g9DqCYdD6nlqFYEdzGsFAACoL4RWT5XNa92+QTLG3loAAAB8BKHVU137SYFB0rGDUsZeu6sBAADwCYRWTwUFS136WW2mCAAAANQLQmtNlJ8iAAAAgDpHaK2JstC6e5t0Is/eWgAAAHwAobUmWsVKUe2k0lIpdbPd1QAAADR6hNaa6snuWAAAAPWF0FpTrvVaN1ojrgAAAKgzhNaa6thbcjaRco9Jv/xkdzUAAACNGqG1pgICpW4XWW2mCAAAANQpQuv56HFqigBLXwEAANQpQuv56DHQetyXak0TAAAAQJ0gtJ6P8FZSmy6SMdKPm+yuBgAAoNEitJ4v19JXTBEAAACoK4TW81U2r/XHTVJJib21AAAANFKE1vPVvrvUJNTaznXPdrurAQAAaJQIrefLz1/qfuoLWUwRAAAAqBOE1tpQfncsAAAA1DpCa23oPkByOKQDu6RjB+2uBgAAoNEhtNaGpuFS+x5Wm9FWAACAWkdorS2uKQLMawUAAKhthNba0uPUeq07t0jFhfbWAgAA0MgQWmtLmy5SWEup8KT03612VwMAANCoEFpri8NxerR1O1MEAAAAahOhtTaxpSsAAECdILTWpgsukvwDpEP7pUO/2F0NAABAo0ForU3BTaVOF1rt7Sx9BQAAUFsIrbWNpa8AAABqHaG1tpV9Gevn76WCE/bWAgAA0EgQWmtbVJzUorVUUiT9tMXuagAAABoFQmttczhOTxFgXisAAECtILTWhfLzWo2xtxYAAIBGgNBaFzr3lQKdUtYhKX233dUAAAA0eITWuhDklLr2s9rsjgUAAHDeCK11pWwVgR3MawUAADhfHoXWxMREDRw4UKGhoYqKitKkSZOUmpp61mteffVVDR8+XM2bN1fz5s01ZswYbdzoA0GubF7rnh+k47n21gIAANDAeRRaV69erYSEBH399ddKTk5WUVGRxo4dq/z8/CqvWbVqlaZMmaKVK1dq/fr1iouL09ixY7V///7zLt6rtWgtRbeXSkul1E12VwMAANCgOYyp+dfbDx06pKioKK1evVojRoyo1jUlJSVq3ry5XnjhBU2bNq1a1+Tk5Cg8PFzZ2dkKCwurabn176OXpZULpAGXS1MftbsaAACAOlPXee285rRmZ2dLklq0aFHta44fP66ioiKPrmmwyqYI/PiNNeIKAACAGgmo6YWlpaV64IEHNGzYMPXu3bva1z366KOKjY3VmDFjquxTUFCggoIC1/OcnJyalmmvjr2l4CZSXpa0b6fUvrvdFQEAADRINR5pTUhI0LZt25SUlFTta55++mklJSVp0aJFCg4OrrJfYmKiwsPDXUdcXFxNy7SXf4DUbYDV3sHSVwAAADVVo9A6c+ZMLVmyRCtXrlTbtm2rdc2zzz6rp59+Wp9//rn69Olz1r6zZs1Sdna269i3b19NyvQOZUtfsV4rAABAjXk0PcAYo9/85jdatGiRVq1apY4dO1brumeeeUZ//vOf9dlnn2nAgAHn7O90OuV0Oj0pzXuVhdZ9O6XcY1Joc3vrAQAAaIA8GmlNSEjQ22+/rXfffVehoaHKyMhQRkaGTpw44eozbdo0zZo1y/X8r3/9q/74xz/q9ddfV4cOHVzX5OXl1d6n8GZhLaS2F1htNhoAAACoEY9C65w5c5Sdna1LL71UMTExrmPevHmuPmlpaUpPT3e7prCwUDfccIPbNc8++2ztfQpv15MpAgAAAOfD4+kB57Jq1Sq353v27PHkLRqnnoOlz9+WUjdLJcXWF7QAAABQbee1TiuqKe4CqWm4dDJf2v2D3dUAAAA0OITW+uDnL3UfaLWZ1woAAOAxQmt9Kdsdi3mtAAAAHiO01pdu/SWHn5SxRzqaaXc1AAAADQqhtb40DZM69LDaTBEAAADwCKG1PjFFAAAAoEYIrfWpx6nQ+tMWqajQ3loAAAAaEEJrfYrtJIW3kooKpJ9T7K4GAACgwSC01ieHQ+pxancs5rUCAABUG6G1vpWf11qNHcYAAABAaK1/F1xkbeN6JF069Ivd1QAAADQIhNb65gyROvex2j+st7cWAACABoLQaoc+l1iPaxZJxawiAAAAcC6EVjsMukIKbyllHZLWL7W7GgAAAK9HaLVDYJB0+VSrnfyuVHjS3noAAAC8HKHVLoPHS82jpdyj0rqP7a4GAADAqxFa7RIQKI27zWovT5JOHre3HgAAAC9GaLXTgMulyDZSfra0dpHd1QAAAHgtQqud/P2lcdOs9soF0ok8e+sBAADwUoRWu8VfKrXuYAXWVQttLgYAAMA7EVrt5ucvjb/daq9+X8rLtrceAAAAL0Ro9QYXXiK16SIVnJBWzLO7GgAAAK9DaPUGDod05Z1W+8sPpewj9tYDAADgZQit3qLHIKl9D6moQFr+nt3VAAAAeBVCq7coP9r61SfSsYP21gMAAOBFCK3epGu81KWvVFIkJb9jdzUAAABeg9DqTRwOafwdVnvDMunwAVvLAQAA8BaEVm/T6UKp+0CptET67D92VwMAAOAVCK3eqGzd1s3Lpcw0e2sBAADwAoRWb9Suu9T7YsmUSp+9ZXc1AAAAtiO0equyua1bVkkHdtlZCQAAgO0Ird4qtpMUf6nV/vQNGwsBAACwH6HVm42bJjn8pG1fSWk/2l0NAACAbQit3iy6ndR/tNX+9E17awEAALARodXbjbtN8vOXfvxG2rXV7moAAABsQWj1dq1ipcFXWO2lcyVj7K0HAADABoTWhuDyqZJ/oPTf76WftthdDQAAQL0jtDYEzaOki6+y2oy2AgAAH0RobSjGTJECndLeHdKOjXZXAwAAUK8IrQ1FWAvpkmusNqOtAADAxxBaG5LLJkvOEGn/z9LWL+2uBgAAoN4QWhuSZuHSyOut9qdvSqUl9tYDAABQTwitDc2lN0ghzaSMPVLKarurAQAAqBeE1oYmpJk06karvexNqYTRVgAA0PgRWhui4ddKTcOlQ/ulTcl2VwMAAFDnCK0NUXATafTNVvuz/0jFRfbWAwAAUMcIrQ3VsIlSWEvpWKa04VO7qwEAAKhThNaGKihYuvwWq538jlRYYG89AAAAdcij0JqYmKiBAwcqNDRUUVFRmjRpklJTU8953YIFC9S9e3cFBwfrwgsv1NKlS2tcMMoZMl6KiJSyj0jrl9hdDQAAQJ3xKLSuXr1aCQkJ+vrrr5WcnKyioiKNHTtW+fn5VV7z1VdfacqUKZoxY4a2bNmiSZMmadKkSdq2bdt5F+/zAoKkcbdZ7S/ekwpO2FsPAABAHXEYU/P9QA8dOqSoqCitXr1aI0aMqLTP5MmTlZ+fryVLTo8EDhkyRP369dO//vWvar1PTk6OwsPDlZ2drbCwsJqW2ziVFEtPT5cOH5AmzJDGTLG7IgAA4IPqOq+d15zW7OxsSVKLFi2q7LN+/XqNGTPG7dy4ceO0fv3683lrlPEPkMZNs9or50sn8uytBwAAoA7UOLSWlpbqgQce0LBhw9S7d+8q+2VkZCg6OtrtXHR0tDIyMqq8pqCgQDk5OW4HzuKiUVJ0O+l4rrT6A7urAQAAqHU1Dq0JCQnatm2bkpKSarMeSdYXvsLDw11HXFxcrb9Ho+LnL11xu9VevVDKz7a3HgAAgFpWo9A6c+ZMLVmyRCtXrlTbtm3P2rd169bKzMx0O5eZmanWrVtXec2sWbOUnZ3tOvbt21eTMn1Ln+FSbGfp5HFp5QK7qwEAAKhVHoVWY4xmzpypRYsWacWKFerYseM5rxk6dKiWL1/udi45OVlDhw6t8hqn06mwsDC3A+fg5yeNv8Nqr10s5R6zsxoAAIBa5VFoTUhI0Ntvv613331XoaGhysjIUEZGhk6cOL3U0rRp0zRr1izX8/vvv1/Lli3T3//+d/3444968skntWnTJs2cObP2PgUsvYZI7bpLhSel5bU/bQMAAMAuHoXWOXPmKDs7W5deeqliYmJcx7x581x90tLSlJ6e7np+8cUX691339Urr7yivn37auHChVq8ePFZv7yFGnI4pCvvtNrrPpKyDttbDwAAQC05r3Va6wvrtHrAGOmFB6VdW6VhE6Ub7re7IgAA4AO8ep1WeKHyo61ff2ptOgAAANDAEVobo859pO4DrN2yls61uxoAAIDzRmhtrK662xp13bJSSku1uxoAAIDzQmhtrNp0lvqPttofv2rNdQUAAGigCK2N2fg7pYBA6ecU6cdv7K4GAACgxgitjVmLaGn4JKv98atSaYmt5QAAANQUobWxGz1FCmkmpe+WNi0/d38AAAAvRGht7JqGSWOmWO1P50qFBfbWAwAAUAOEVl8w/FopIlLKOiStXWx3NQAAAB4jtPqCwKDTGw4sf0/Kz7G3HgAAAA8RWn1F/9FSbCfpRJ70xXt2VwMAAOARQquv8POXrrrLaq9dLB3NsLUcAAAATxBafUn3gVLXeKmkSFr6ht3VAAAAVBuh1Zc4HNLEu632t8ul/T/bWw8AAEA1EVp9TdwF0kWjrG1dP37V7moAAACqhdDqi66cLvkHSKmbrQMAAMDLEVp9UcsYadjVVvvjV6XSUnvrAQAAOAdCq6+6fKoU3MSa17plpd3VAAAAnBWh1Vc1C5dG32y1P3ldKi60tx4AAICzILT6shHXSeGtpGOZ0pcf2V0NAABAlQitviwoWLridqud/I61WxYAAIAXIrT6uoFjpej20vFctncFAABei9Dq6/zLb++6SDp20N56AAAAKkFohdRriNS5j1RUKC170+5qAAAAKiC0wn17128+lw7ssrceAACAMxBaYWnfQ+o7wtredcm/7a4GAADADaEVp02YLvn5Szs2Sj+l2F0NAACAC6EVp0W2lS6+ymp//ArbuwIAAK9BaIW7sbdKzhBp307pu9V2VwMAACCJ0IozhTaXRt1ktT95XSousrceAAAAEVpRmUtvsMLrkXTpqyV2VwMAAEBoRSWcIdK4aVb787elk/n21gMAAHweoRWVGzJeioqT8rOlFfPsrgYAAPg4Qisq5x8gTZhhtVe9L2UftrceAADg0witqNqFw6QOvaSiAmnZW3ZXAwAAfBihFVUrv73rhmVSxl576wEAAD6L0Iqz69Rb6n2xZEqlT9jeFQAA2IPQinO76i7Jz0/atl7atdXuagAAgA8itOLcottJg8db7Y9ekYyxtx4AAOBzCK2onnHTpKBgae8O6fu1dlcDAAB8DKEV1RPeUhp5vdX+5DWppNjeegAAgE8htKL6LrtJahouHdovfb3U7moAAIAPIbSi+oKbSuNus9rL3pJOHre3HgAA4DMIrfDM0AlSq1gpL0tatcDuagAAgI8gtMIzAYHSldOt9soFUs5Re+sBAAA+gdAKz/UbKbXrJhWelL54z+5qAACADyC0wnMOhzT+Tqu94VMpP9veegAAQKNHaEXNdOsvxXa2RlvXfWx3NQAAoJHzOLSuWbNGEydOVGxsrBwOhxYvXnzOa9555x317dtXTZo0UUxMjKZPn64jR47UpF54C4dDGnWj1V67WCoqtLUcAADQuHkcWvPz89W3b1+9+OKL1eq/bt06TZs2TTNmzNAPP/ygBQsWaOPGjbr77rs9LhZeJv5SKSLSWklgU7LNxQAAgMYswNMLxo8fr/Hjx1e7//r169WhQwf99re/lSR17NhR99xzj/761796+tbwNv4B1i5ZH/5LWrVQGjxe8mPGCQAAqH11njCGDh2qffv2aenSpTLGKDMzUwsXLtSVV15Z12+N+jDkSmvTgYP7pB/W210NAABopOo8tA4bNkzvvPOOJk+erKCgILVu3Vrh4eFnnV5QUFCgnJwctwNeKriJdPFVVnvlfHtrAQAAjVadh9bt27fr/vvv1+OPP67Nmzdr2bJl2rNnj+69994qr0lMTFR4eLjriIuLq+sycT5GXGtNFdj9g7Rnu93VAACARshhjDE1vtjh0KJFizRp0qQq+9x22206efKkFiw4veXnl19+qeHDh+vAgQOKiYmpcE1BQYEKCgpcz3NychQXF6fs7GyFhYXVtFzUpff+Jm38TLrwEmn6k3ZXAwAA6llOTo7Cw8PrLK/V+Ujr8ePH5XfGl3P8/f0lSVXlZafTqbCwMLcDXm7UTdbjtnXSwV/srQUAADQ6HofWvLw8paSkKCUlRZK0e/dupaSkKC0tTZI0a9YsTZs2zdV/4sSJ+uCDDzRnzhzt2rVL69at029/+1sNGjRIsbGxtfMpYL/W7aWegyVjpNUL7a4GAAA0Mh6H1k2bNik+Pl7x8fGSpAcffFDx8fF6/PHHJUnp6emuACtJd9xxh2bPnq0XXnhBvXv31o033qhu3brpgw8+qKWPAK9RNtr6zedS7jF7awEAAI3Kec1prS91PUcCtcQY6R8J0r6d0tjbpPG3210RAACoJw1+Tit8iMNxerR13YdS4Ul76wEAAI0GoRW1q89wqUVrKT/HWk0AAACgFhBaUbv8/aVLb7DaqxZKpSX21gMAABoFQitq36BxUpNQ6Ui69P2XdlcDAAAaAUIrap8zRLrkGqu9cr71BS0AAIDzQGhF3bjkGikwSEpLlf77vd3VAACABo7QiroR2lwaMNZqr1xw9r4AAADnQGhF3bn0BmsZrO1fSxl77a4GAAA0YIRW1J2otlLvi632KkZbAQBAzRFaUbfKNhvYtFzKPmJvLQAAoMEitKJudewldegllRRJaxfZXQ0AAGigCK2oe5eVbe36sXTyuL21AACABonQirrXa6gU2VY6mS99vdTuagAAQANEaEXd8/M7vbXr6velkmJ76wEAAA0OoRX1Y+BYqVmElHVISlltdzUAAKCBIbSifgQGScMnWW22dgUAAB4itKL+DJsoBQVL+/8r7fzW7moAAEADQmhF/WkaLg2+wmqvnG9vLQAAoEEhtKJ+jbxecvhJqZutEVcAAIBqILSifrWMkfoOt9or2doVAABUD6EV9a9sa9ctK6VjB+2tBQAANAiEVtS/dt2kLn2l0hJpzQd2VwMAABoAQivsUTbauv4T6USevbUAAACvR2iFPXoMklp3kApOSF99Ync1AADAyxFaYQ+HQxp1o9Ve+4FUXGRvPQAAwKsRWmGfiy6TwltK2Uekb1fYXQ0AAPBihFbYJyBQGn6d1V65gK1dAQBAlQitsNfFEyRnEyljj7Rjo93VAAAAL0Vohb1CmklDr7TabO0KAACqQGiF/UZcJ/n5Sz9/J6Wl2l0NAADwQoRW2K95lBQ/ymoz2goAACpBaIV3KFv+6ru10uED9tYCAAC8DqEV3qFNZ6lbf8mUsrUrAACogNAK71G2teuGZVJ+tr21AAAAr0Johfe44CJrxLXwpLTuY7urAQAAXoTQCu/hcJwebV27WCoqtLUcAADgPQit8C79RlqrCeRlSd98bnc1AADASxBa4V38A6SR11vtVQul0lJ76wEAAF6B0ArvM3i8FNxUOvSL9MN6u6sBAABegNAK7xPcRBo20WqvmC8ZY289AADAdoRWeKfh10r+gdKeH6RNyXZXAwAAbEZohXcKbymNu9Vqv/+CdDTD3noAAICtCK3wXqNvljr2kgqOS28/LZWW2F0RAACwCaEV3svPX5r6mORsIu3eJq2YZ3dFAADAJoRWeLeWMdJ1M632p29K+3baWw8AALAFoRXeb+DlUp/h1vSAt5+2tnkFAAA+hdAK7+dwSDc9IIW1lA6mSR+/andFAACgnhFa0TA0DZemPGK1v/xQ2r7B3noAAEC98ji0rlmzRhMnTlRsbKwcDocWL158zmsKCgr0hz/8Qe3bt5fT6VSHDh30+uuv16Re+LLuA6QR11ntpGelvCxbywEAAPXH49Can5+vvn376sUXX6z2NTfddJOWL1+u1157TampqXrvvffUrVs3T98akCbMkFp3kHKPSfNms1sWAAA+IsDTC8aPH6/x48dXu/+yZcu0evVq7dq1Sy1atJAkdejQwdO3BSxBTunWWdI/EqRtX0kblklDqv/PIwAAaJjqfE7rRx99pAEDBuiZZ55RmzZtdMEFF+jhhx/WiRMn6vqt0Vi16SxdeafVXvSidGi/vfUAAIA65/FIq6d27dqlL7/8UsHBwVq0aJEOHz6s++67T0eOHNHcuXMrvaagoEAFBQWu5zk5OXVdJhqaS2+QdmyUfv5OeidR+s1zkr+/3VUBAIA6UucjraWlpXI4HHrnnXc0aNAgXXnllZo9e7befPPNKkdbExMTFR4e7jri4uLqukw0NH7+0i2/k4KbSnt/lJLfsbsiAABQh+o8tMbExKhNmzYKDw93nevRo4eMMfrll18qvWbWrFnKzs52Hfv27avrMtEQNY+Wbrzfaie/Le3dYW89AACgztR5aB02bJgOHDigvLw817mdO3fKz89Pbdu2rfQap9OpsLAwtwOo1EWXSReNkkpLrd2yCpgrDQBAY+RxaM3Ly1NKSopSUlIkSbt371ZKSorS0tIkWaOk06ZNc/W/5ZZb1LJlS915553avn271qxZo0ceeUTTp09XSEhI7XwK+LbrfytFREqH90uL59hdDQAAqAMeh9ZNmzYpPj5e8fHxkqQHH3xQ8fHxevzxxyVJ6enprgArSc2aNVNycrKysrI0YMAATZ06VRMnTtTzzz9fSx8BPq9JqHTLo9Z2r18vlbaus7siAABQyxzGeP/q7Dk5OQoPD1d2djZTBVC1j16RVs63tnz93atSWAu7KwIAwGfUdV6r8zmtQL258g4ptrOUn21t8+r9fx4DAADVRGhF4xEQZO2WFRBoreH61cd2VwQAAGoJoRWNS0wH6aq7rfaHL0uZaWftDgAAGgZCKxqf4ZOkC/pLRQXS24lScZHdFQEAgPNEaEXj4+cn3fKItarALz9Jn/3H7ooAAMB5IrSicQpvJd34P1Z7eZK0a6u99QAAgPNCaEXj1W+ENHCsZEqld/4qncy3uyIAAFBDhFY0btclSC1aS0czpA9etLsaAABQQ4RWNG7BTaWpj0oOP+mbz6WUNXZXBAAAaoDQisav04XSmJut9oJ/SFmH7a0HAAB4jNAK3zBumhR3gXQ8V3rvb1Jpqd0VAQAADxBa4Rv8A6zdsgKd0s7N0trFdlcEAAA8QGiF74iKk665x2oveVVK321vPQAAoNoIrfAtF0+Uegyydsl6O1EqLrS7IgAAUA2EVvgWh0Oa8ojULEI6sEta+obdFQEAgGogtML3hDaXJj9otVctkL7/0t56AADAORFa4Zt6XywNnSAZI819Ulr4vFRwwu6qAABAFQit8F3XJUgjrrPa6z6S/n6vtHeHvTUBAIBKEVrhuwKCpGvvk+79qxTeSjq0X3r+fmnZm1JJsd3VAQCAcgitQLf+0u9elS4aZW068Nl/rPB6cJ/dlQEAgFMIrYAkNQmVbvuDdYQ0k9JSpWfvlb780Jr3CgAAbEVoBcq7aJQ16nrBRVJRgfT+P6VXZknZh+2uDAAAn0ZoBc4UESnd87R0bYIUGCT9uEl65m4pZbXdlQEA4LMIrUBl/PykEddKD82R2naVjudKb/6v9PbT0ok8u6sDAMDnEFqBs4luL93/vHT5VMnhJ23+whp1/SnF7soAAPAphFbgXAICpSvvlH77f1KrWCnrkPTSw9KH/5KKCu2uDgAAn0BoBaqrQ0/p4ZetnbQkadVCafZ90v6f7a0LAAAfQGgFPOEMkW76H+mu/09qFiFl7JH+MVNaniSVlthdHQAAjRahFaiJXkOkR/8tXTjM2j1ryb+lFx6SjqTbXRkAAI0SoRWoqWYR0p1PSlMekZxNpN3bpL/9StrwKRsSAABQywitwPlwOKRB46RHXpY6XSgVnJCS/i69/oSUe8zu6gAAaDQIrUBtaBkjJTwrXXW35B8gbfvKWhpr23q7KwMAoFEgtAK1xc9fGj1Z+p8XpZgOUl6W9NofpaRnrTYAAKgxQitQ29p0lv7nJWnUjdb0gQ3LpD9Pk1bMk4pZ1xUAgJogtAJ1ITBIuvoeaeZsqe0F0snj0sevSonTpZTVfFELAAAPOYzx/v975uTkKDw8XNnZ2QoLC7O7HMAzpaXW9q+fvCZlH7HOdewlXfNrqX13e2sDAKCW1HVeI7QC9aXghLRygbRyvlR40jrXf7Q0YYbUPMre2gAAOE+EVhFa0chkHZY+nSt987k1TSAwSLr0Rmn0zdaOWwAANECEVhFa0Ujt+0n6cI703++t56EtpAl3SgPHWisRAADQgBBaRWhFI2aMtHWd9PEr0uED1rnYztKke6Wu8fbWBgCABwitIrTCBxQXSV9+KH32H+lkvnWu91Bp4j1SVFt7awMAoBoIrSK0wofkZUuf/0da95G16oCfv3TJNdLYW6Wm/LMPAPBehFYRWuGDMtOkj16Rtn9tPW8SagXXYVdLAYH21gYAQCUIrSK0woelbpY+elk6sMt6HtlGmvgrqffF1m5bAAB4CUKrCK3wcaUl0sbPpKVzpdxj1rku/aRr7pHadrW1NAAAyhBaRWgFJFlbwS5PklYtsL645XBYy2NdeacU3sru6gAAPo7QKkIr4OZoprUl7LcrrOdBwdKYKdYGBYFB9tYGAPBZhFYRWoFK7dkuffgv61GSWrSWJv2a+a4AAFvUdV7z8/SCNWvWaOLEiYqNjZXD4dDixYurfe26desUEBCgfv36efq2AM7Uoaf02+ekW2dZ0wOOZkivPyH96zEpc6/d1QEAUKs8Dq35+fnq27evXnzxRY+uy8rK0rRp0zR69GhP3xJAVRwOqf9oadZcacwtkn+gtHOz9MyvpMVzpBN5dlcIAECtOK/pAQ6HQ4sWLdKkSZPO2ffmm29W165d5e/vr8WLFyslJaXa78P0AKCaDh+wpgxs+8p63ixCumqGNHCc5Ofxn1EBAKg2r5seUBNz587Vrl279MQTT9TH2wG+q1WsNONP0j2JUlSclJclJf1d+r+Zp+e+AgDQAAXU9Rv89NNPeuyxx7R27VoFBFTv7QoKClRQUOB6npOTU1flAY1T94HSI69IX34oLXtL2rdTeu630oDLpavuksJb2l0hAAAeqdOR1pKSEt1yyy166qmndMEFF1T7usTERIWHh7uOuLi4OqwSaKQCAqVLb5D+8KY0+Arr3KZkKfEOafk8qbjQ1vIAAPBEnc5pzcrKUvPmzeXv7+86V1paKmOM/P399fnnn+uyyy6rcF1lI61xcXHMaQXOx94fpUUvWI+StSXspPuknoPtrQsA0CjU9ZzWOp0eEBYWpq1bt7qde+mll7RixQotXLhQHTt2rPQ6p9Mpp9NZl6UBvqd9d+m3z0ubvpCWvCod2i+9+gcrtE76tRTZ1u4KAQCoksehNS8vTz///LPr+e7du5WSkqIWLVqoXbt2mjVrlvbv36+33npLfn5+6t27t9v1UVFRCg4OrnAeQD3w85MGjZX6DJM+f0da84G0fYOUulkaeb10+VQpuIndVQIAUIHHc1o3bdqk+Ph4xcfHS5IefPBBxcfH6/HHH5ckpaenKy0trXarBFC7gptKV/9K+t2r1pe2SoqlFfOkv9whfZMslZbaXSEAAG7YxhXwdcZYo62LX7LWeZWs3bauTZDadbO3NgBAg1HXeY3QCsBSXCitel9KfkcqPGnttjXoCmnCdCm0ud3VAQC8HKFVhFagXmUflj7+t7T5C+t5cFNrvmvnPlLbLlJIM3vrAwB4JUKrCK2ALXZtkxa9KP3yk/v5yLZS3AXW1IG4C6Q2XSRniD01AgC8BqFVhFbANqUl0qbl0g/rrV21jmVW7OPwk6LbWQG2LMzGdpYCg+q/XgCAbQitIrQCXiMvywqv5Y/swxX7+flLMR1OBdlTI7IxHa1dugAAjRKhVYRWwKtlH5b2/XQqxKZaj3lZFfv5B0ptOp0ekY3rJkW3l8rtmAcAaLga9I5YAHxAeCvr6D3Uem6MlHXw9Ehs2k7pl53S8VwpLdU6ygQ6pTadrQDbd4TUqbe1agEAAGdgpBVA3TNGOpJeblpBqjU6W3DcvV/rDtLFV0kDxrBKAQA0MEwPEKEVaJRKS6XD+60Qu/NbKWW1tT6sJAUFSxeNki6eaE0lAAB4PUKrCK2ATziRJ236Qlr3sZS59/T5dt2s0df4UVaYBQB4JUKrCK2ATzHGWiP2q4+l79ZKJUXW+eCm0sCxVoBt3d7eGgEAFRBaRWgFfFZelrThM2n9EmtObJnOfaypA32GSQGsBwsA3oDQKkIr4PNKS6Wdm6Wvlkjb1kum1DrfLEIafIU0dILUMsbWEgHA1xFaRWgFUE7WIenrpdaRfcQ653BI3QdaUwd6DrY2NwAA1CtCqwitACpRUmJtL/vVx1Lq5tPnIyKtkdfBV1jrxwIA6gWhVYRWAOdwaL+0/hNp4zIpP8c65+cn9R5mjb52jbeeAwDqDKFVhFYA1VRUKH2/1lo2a/e20+cj20gDLpfa97DWfW0Sal+NANBIEVpFaAVQA+m7rS9ufZNcceetVrFWeC072na1ltQCANQYoVWEVgDnoeCEtGWVtevWvp3WLlxncjikyLgzgmwXNjMAAA8QWkVoBVCLjuda4dV1pErHDlbs5/CzNjFwBdluUptOrAsLAFUgtIrQCqCO5R6TfvnpdIhN2ynlHKnYzz9AiunoPiIb09E6X1PGWHNxiwulooJT7aLT7aKC08+dIdZ7Nouo+fsBQB0htIrQCsAG2YdPj8ampVqP+dkV+wUESm06S226WOG1qLBiCD3b8+Iiz2tr0dr6Ulm7btZjmy5SkPP8PzMAnAdCqwitALyAMdY0gn2p7kH2ZH7tvYfDTwoMkgKd1mNA0Knnp87lHpMO7qt4nZ+/FNtJat9datfdeoyMY5kvAPWK0CpCKwAvZYx0+IAVXtN3Sw6dCprO02HTk+fVmWZwIs8KzGk/Snt/lPbukPKyKvYLbmLNwy0Lsu26S+Eta/s3AAAuhFYRWgGgSmUjwGUhNu1Ha35u4cmKfSMiT4/EtutuzY91htR/zQAaJUKrCK0A4JGSEiljjzUKm5Yqpe2QMvZaAbe8shUSXNMKekjR7c7vi2UAfBahVYRWADhvJ49Lv+yU9p4KsWmpUtahiv38A6SoOGtVhLIjtqMUEWWtZwsAVajrvMYfpwHAFwQ3kbr0s44y2Yet8OoakU21dg9L320dbtc3lWI6lAuznawwG9KsHj8EAF/GSCsAwGKMdCxTOrD7dHBN322tWFBaUvk1EZHuo7IxHaXoODZhAHwQ0wNEaAUAWxUXWcE1fbd0YJf1mLGn8p3EJGuprcg4ayS2fJhtHs0yXEAjRmgVoRUAvNKJPCl9j/uobPpu63xlnCHWfNkWra0A2zxKahF96nkUUw2ABo45rQAA7xTSTOrU2zrKGGPNlU3f7T7NIDNNKjhxepexygQ3tUJs8+jTj82jpRZRVrBtGs6XwQAfRmgFANQeh8Oa5xoRKfUYdPp8SbF0aL906BdrWsGxTOnoqeNYprVF7sl8a/rBgV2V/+xA5+kQWyHYRkthLazdwQA0SoRWAEDd8w+w1oRt3b7y1wtOSFkH3YPssUzp6EHpWIaUc1QqKpAOpllHVe9RFpgjoqTm5dpl55uEMloLNFCEVgCA/ZwhUnR766hMcZG1ruyxTOloxqkwm3l6xDbrkDWaeyTdOqoSFFwu2FYRboOb1M1n9AYlxda2v0WF1nSMkKZsJoEGg39SAQDeLyBQahVrHZUpLZGyj1ghNuuQdOyQNXKbdej0kZdlbW97cJ91VCW46akgG1V5uA1tYYVfb1kJobjI+my5x8odR6XcM87lHZPycypeH+i0gnpZiHU2sR6Dyx/lz5Xve+oxKJgRbNQ5QisAoOHz87dWIGgeVXWfwgIp+1AVofagde5kvnWk51srI5xNULB1OEPKPYaUe36218q1yz8PdFrhr7iwXODMqiSQnjqfd0w6nuvh78rPWke38KT1vKjAOnKPefZzynP4VQy+TUKtI6SZ1DTsdLvsfPnXmYuMaiC0AgB8Q5BTimxrHVU5efx0iK1qxLYs7BWetI68rNqr0eGwAmVRgWfX+flLoRFSs+ZS6DmOJmFWcC0psXZAO5kvnci3PntZaD+R5/785PFTfc7sly+ZUus4kWcdNcm+wU0rBtkmYVKTspBbrh1S1qep9bvyD2CU10cQWgEAKBPc5OxfGDPmdFgtPGl9gezMx4ITUuEJqeDkqcey9slyr51xXVkQNuZ0YPUPkJpFnD2AloXUJqGeT1fw9z8dEmuq7PdRWbg9nmuF2OO50vEc6fip9oncU+fyrNAsnb7+aIbnNTj8pMCgU4fTegwo1670vFMKDDzdDggs17/cozPEfYpEQGDNf1c4b4RWAACqy+E4/Vf5tam01AqrZSG27K/RvX0EsfzvI7yV59eXFJcLtrmng25+TrnQW1ngzbWulaxR3vLBvy4FBp2e8+ssN9fXNQ+4ScW5wOXPhTS1fldMh6gRQisAAHbz86ubMOztykaTm0V4dp0x1hfQigutucrFhdaKCGXzc4vKPXd77czzZ5wr/1rhqT9EnMwvN//3VN/znRLiDLHCbFCIFBAg+QedegyQ/ANPtSt59A+wRntdj4Gnrys77x/o3qdZhNSm8/nV6yUIrQAAoGFxOE7/1X99bP/rmv97xnzeguMV5wO72mc+5ltBWzo9jaQ+9Bgk/eov9fNedYzQCgAAcDa1Mf9XskZwTx4/Pfe38IQ1zaGk2Aq0rsciqfjUeVe76Iw+Z7uu7Fyx1DKmdn4HXoDQCgAAUB8CgqRmQZ5Ph4AkyUtWRgYAAACqRmgFAACA1yO0AgAAwOsRWgEAAOD1PA6ta9as0cSJExUbGyuHw6HFixeftf8HH3ygyy+/XJGRkQoLC9PQoUP12Wef1bReAAAA+CCPQ2t+fr769u2rF198sVr916xZo8svv1xLly7V5s2bNWrUKE2cOFFbtmzxuFgAAAD4JocxxtT4YodDixYt0qRJkzy6rlevXpo8ebIef/zxavXPyclReHi4srOzFRYWVoNKAQAAUJfqOq/V+5zW0tJS5ebmqkWLFvX91gAAAGig6n1zgWeffVZ5eXm66aabquxTUFCggoIC1/OcnJz6KA0AAABeql5HWt9991099dRTmj9/vqKioqrsl5iYqPDwcNcRFxdXj1UCAADA29RbaE1KStJdd92l+fPna8yYMWftO2vWLGVnZ7uOffv21VOVAAAA8Eb1Mj3gvffe0/Tp05WUlKQJEyacs7/T6ZTT6ayHygAAANAQeBxa8/Ly9PPPP7ue7969WykpKWrRooXatWunWbNmaf/+/XrrrbckWVMCbr/9dj333HMaPHiwMjIyJEkhISEKDw+vpY8BAACAxszj6QGbNm1SfHy84uPjJUkPPvig4uPjXctXpaenKy0tzdX/lVdeUXFxsRISEhQTE+M67r///lr6CAAAAGjszmud1vqSnZ2tiIgI7du3j3VaAQAAvFBOTo7i4uKUlZVVJ3+bXu9LXtVEbm6uJLGKAAAAgJfLzc2tk9DaIEZaS0tLdeDAAYWGhsrhcLiSPCOvjRv32Xdwr30D99l3cK99w5n32Rij3NxcxcbGys+v9heoahAjrX5+fmrbtm2F82FhYfzL4AO4z76De+0buM++g3vtG8rf57r8kn29b+MKAAAAeIrQCgAAAK/XIEOr0+nUE088wQYEjRz32Xdwr30D99l3cK99Q33f5wbxRSwAAAD4tgY50goAAADfQmgFAACA1yO0AgAAwOsRWgEAAOD1GlxoffHFF9WhQwcFBwdr8ODB2rhxo90l4SzWrFmjiRMnKjY2Vg6HQ4sXL3Z73Rijxx9/XDExMQoJCdGYMWP0008/ufU5evSopk6dqrCwMEVERGjGjBnKy8tz6/P9999r+PDhCg4OVlxcnJ555pm6/mgoJzExUQMHDlRoaKiioqI0adIkpaamuvU5efKkEhIS1LJlSzVr1kzXX3+9MjMz3fqkpaVpwoQJatKkiaKiovTII4+ouLjYrc+qVat00UUXyel0qkuXLnrjjTfq+uOhnDlz5qhPnz6uxcSHDh2qTz/91PU697lxevrpp+VwOPTAAw+4znGvG4cnn3xSDofD7ejevbvrda+6z6YBSUpKMkFBQeb11183P/zwg7n77rtNRESEyczMtLs0VGHp0qXmD3/4g/nggw+MJLNo0SK3159++mkTHh5uFi9ebL777jtz9dVXm44dO5oTJ064+lxxxRWmb9++5uuvvzZr1641Xbp0MVOmTHG9np2dbaKjo83UqVPNtm3bzHvvvWdCQkLMyy+/XF8f0+eNGzfOzJ0712zbts2kpKSYK6+80rRr187k5eW5+tx7770mLi7OLF++3GzatMkMGTLEXHzxxa7Xi4uLTe/evc2YMWPMli1bzNKlS02rVq3MrFmzXH127dplmjRpYh588EGzfft2889//tP4+/ubZcuW1evn9WUfffSR+eSTT8zOnTtNamqq+f3vf28CAwPNtm3bjDHc58Zo48aNpkOHDqZPnz7m/vvvd53nXjcOTzzxhOnVq5dJT093HYcOHXK97k33uUGF1kGDBpmEhATX85KSEhMbG2sSExNtrArVdWZoLS0tNa1btzZ/+9vfXOeysrKM0+k07733njHGmO3btxtJ5ptvvnH1+fTTT43D4TD79+83xhjz0ksvmebNm5uCggJXn0cffdR069atjj8RqnLw4EEjyaxevdoYY93XwMBAs2DBAlefHTt2GElm/fr1xhjrDzh+fn4mIyPD1WfOnDkmLCzMdW9/97vfmV69erm91+TJk824cePq+iPhLJo3b27+/e9/c58bodzcXNO1a1eTnJxsRo4c6Qqt3OvG44knnjB9+/at9DVvu88NZnpAYWGhNm/erDFjxrjO+fn5acyYMVq/fr2NlaGmdu/erYyMDLd7Gh4ersGDB7vu6fr16xUREaEBAwa4+owZM0Z+fn7asGGDq8+IESMUFBTk6jNu3Dilpqbq2LFj9fRpUF52drYkqUWLFpKkzZs3q6ioyO1ed+/eXe3atXO71xdeeKGio6NdfcaNG6ecnBz98MMPrj7lf0ZZH/4bYI+SkhIlJSUpPz9fQ4cO5T43QgkJCZowYUKF+8G9blx++uknxcbGqlOnTpo6darS0tIked99bjCh9fDhwyopKXH7pUhSdHS0MjIybKoK56Psvp3tnmZkZCgqKsrt9YCAALVo0cKtT2U/o/x7oP6UlpbqgQce0LBhw9S7d29J1n0ICgpSRESEW98z7/W57mNVfXJycnTixIm6+DioxNatW9WsWTM5nU7de++9WrRokXr27Ml9bmSSkpL07bffKjExscJr3OvGY/DgwXrjjTe0bNkyzZkzR7t379bw4cOVm5vrdfc5wNMPBwBnk5CQoG3btunLL7+0uxTUkW7duiklJUXZ2dlauHChbr/9dq1evdruslCL9u3bp/vvv1/JyckKDg62uxzUofHjx7vaffr00eDBg9W+fXvNnz9fISEhNlZWUYMZaW3VqpX8/f0rfGMtMzNTrVu3tqkqnI+y+3a2e9q6dWsdPHjQ7fXi4mIdPXrUrU9lP6P8e6B+zJw5U0uWLNHKlSvVtm1b1/nWrVursLBQWVlZbv3PvNfnuo9V9QkLC/O6/7g2ZkFBQerSpYv69++vxMRE9e3bV8899xz3uRHZvHmzDh48qIsuukgBAQEKCAjQ6tWr9fzzzysgIEDR0dHc60YqIiJCF1xwgX7++Wev+3e6wYTWoKAg9e/fX8uXL3edKy0t1fLlyzV06FAbK0NNdezYUa1bt3a7pzk5OdqwYYPrng4dOlRZWVnavHmzq8+KFStUWlqqwYMHu/qsWbNGRUVFrj7Jycnq1q2bmjdvXk+fxrcZYzRz5kwtWrRIK1asUMeOHd1e79+/vwIDA93udWpqqtLS0tzu9datW93+kJKcnKywsDD17NnT1af8zyjrw38D7FVaWqqCggLucyMyevRobd26VSkpKa5jwIABmjp1qqvNvW6c8vLy9N///lcxMTHe9++0R1/bsllSUpJxOp3mjTfeMNu3bze/+tWvTEREhNs31uBdcnNzzZYtW8yWLVuMJDN79myzZcsWs3fvXmOMteRVRESE+fDDD833339vrrnmmkqXvIqPjzcbNmwwX375penatavbkldZWVkmOjra3HbbbWbbtm0mKSnJNGnShCWv6tGvf/1rEx4eblatWuW2bMrx48ddfe69917Trl07s2LFCrNp0yYzdOhQM3ToUNfrZcumjB071qSkpJhly5aZyMjISpdNeeSRR8yOHTvMiy++yPI49eyxxx4zq1evNrt37zbff/+9eeyxx4zD4TCff/65MYb73JiVXz3AGO51Y/HQQw+ZVatWmd27d5t169aZMWPGmFatWpmDBw8aY7zrPjeo0GqMMf/85z9Nu3btTFBQkBk0aJD5+uuv7S4JZ7Fy5UojqcJx++23G2OsZa/++Mc/mujoaON0Os3o0aNNamqq2884cuSImTJlimnWrJkJCwszd955p8nNzXXr891335lLLrnEOJ1O06ZNG/P000/X10eEMZXeY0lm7ty5rj4nTpww9913n2nevLlp0qSJufbaa016errbz9mzZ48ZP368CQkJMa1atTIPPfSQKSoqcuuzcuVK069fPxMUFGQ6derk9h6oe9OnTzft27c3QUFBJjIy0owePdoVWI3hPjdmZ4ZW7nXjMHnyZBMTE2OCgoJMmzZtzOTJk83PP//set2b7rPDGGM8G5sFAAAA6leDmdMKAAAA30VoBQAAgNcjtAIAAMDrEVoBAADg9QitAAAA8HqEVgAAAHg9QisAAAC8HqEVAAAAXo/QCgAAAK9HaAUAAIDXI7QCAADA6xFaAQAA4PX+f0iKF8S5F/KMAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "tb_dir = os.path.join(WORK_DIR, 'tensorboard_output')\n", + "fname = os.listdir(tb_dir)[0]\n", + "tb_path = os.path.join(tb_dir, fname)\n", + "#\n", + "data = read_tensorboard_file(tb_path)\n", + "print(data.keys())\n", + "_ = plot_image(data, 'loss', 0.9)\n", + "_ = plot_image(data, 'lr', 0)\n", + "_ = plot_image(data, 'evaluation/acc', 0)\n", + "_ = plot_image(data, 'evaluation/loss', 0)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 推理\n", + "推理部分见baichuan_infer.ipynb" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.11" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/chatglm2_infer.ipynb b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/chatglm2_infer.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..84a25d3e97d26e9e0a7183b2f038c33d46131c19 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/chatglm2_infer.ipynb @@ -0,0 +1,514 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ChatGLM2 推理" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 配置实验环境\n", + "The following code is copied from baichuan_infer.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[2023-07-02 21:48:47,527] [INFO] [real_accelerator.py:110:get_accelerator] Setting ds_accelerator to cuda (auto detect)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 21:48:48,006 - modelscope - INFO - PyTorch version 2.0.1 Found.\n", + "2023-07-02 21:48:48,007 - modelscope - INFO - Loading ast index from /home/hackathon/.cache/modelscope/ast_indexer\n", + "2023-07-02 21:48:48,032 - modelscope - INFO - Loading done! Current index file version is 1.6.2, with md5 ddf811ee982377c1357284a2bfda3dec and a total number of 861 components indexed\n", + "2023-07-02 21:48:48,708 - modelscope - INFO - [0, 1]\n", + "2023-07-02 21:48:48,848 - modelscope - INFO - Using device: cuda:0,1\n" + ] + }, + { + "data": { + "text/plain": [ + "device(type='cuda', index=0)" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from _common import *\n", + "from transformers import TextStreamer\n", + "device_ids = [0, 1]\n", + "select_device(device_ids)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Model, Tokenizer\n", + "Note: 你需要设置CKPT_FPATH的内容, 指向`.bin`文件, 或`.pth`文件" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 21:48:49,227 - modelscope - INFO - Development mode use revision: v1.0.3\n", + "The tokenizer class you load from this checkpoint is not the same type as the class this function is called from. It may result in unexpected tokenization. \n", + "The tokenizer class you load from this checkpoint is 'ChatGLMTokenizer'. \n", + "The class this function is called from is 'ChatGLM2Tokenizer'.\n", + "2023-07-02 21:48:49,572 - modelscope - INFO - initialize model from /home/hackathon/.cache/modelscope/hub/ZhipuAI/chatglm2-6b\n", + "Failed to load cpm_kernels:No module named 'cpm_kernels'\n", + "The model weights are not tied. Please use the `tie_weights` method before using the `infer_auto_device` function.\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "b72b43e11bec49c78c8097deaffea8a7", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Loading checkpoint shards: 0%| | 0/7 [00:00```JSON\n", + "{\"api_name\": \"modelscope_speech-generation\", \"url\": \"http://90.49.118.175:2603/damo/speech_sambert-hifigan_tts_zh-cn_16k\", \"parameters\": {\"text\": \"秋树红叶舞飘零,\n", + "山间小溪水潺潺。\n", + "微风拂面感清凉,\n", + "散步赏景心旷神怡。\", \"gender\": \"woman\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"result\": \"\"}\n", + "```<|endofexec|>\n", + "\n", + "-----------------------------------------------------------------------------------\n", + "[TEST]你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-address\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-address\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"url\": \"http://159.1.4.174:3210/\", \"paths\": [{\"name\": \"modelscope_text-address\", \"model_id\": \"/damo/mgeo_geographic_elements_tagging_chinese_base\", \"method\": \"post\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的地址信息\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-address\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-address\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"url\": \"http://172.163.158.154:5325/\", \"paths\": [{\"name\": \"modelscope_text-address\", \"model_id\": \"/damo/mgeo_geographic_elements_tagging_chinese_base\", \"method\": \"post\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的地址信息\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-address\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-address\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"url\": \"http://133.94.12.37:3160/\", \"paths\": [{\"name\": \"modelscope_text-address\", \"model_id\": \"/damo/mgeo_geographic_elements_tagging_chinese_base\", \"method\": \"post\", \"description\": \"针对中文的地址信息,识别出里面的元素,包括省、市、区、镇、社区、道路、路号、POI、楼栋号、户室号等\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的地址信息\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "现在我给你另一条地址,请识别出里面的元素。输入地址:广东省深圳市南山区科技园北区 \n", + "\n", + "### 助手\n", + "<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-address\", \"url\": \"http://133.94.12.37:3160/damo/mgeo_geographic_elements_tagging_chinese_base\", \"parameters\": {\"text\": \"广东省深圳市南山区科技园北区\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"town\": \"科技园北区\"}\n", + "```<|endofexec|>\n", + "地址识别结果为:{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"town\": \"科技园北区\"}。我识别出的元素包括:prov、city、district、town。\n", + "\n", + "[LABELS]<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-address\", \"url\": \"http://159.1.4.174:3210/damo/mgeo_geographic_elements_tagging_chinese_base\", \"parameters\": {\"text\": \"广东省深圳市南山区科技园北区\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"town\": \"\", \"community\": \"科技园北区\", \"poi\": \"\"}\n", + "```<|endofexec|>\n", + "地址识别json表示:{\"prov\": \"广东省\", \"city\": \"深圳市\", \"district\": \"南山区\", \"town\": \"\", \"community\": \"科技园北区\", \"poi\": \"\"}。我使用的模型是ModelScope的'damo/mgeo_geographic_elements_tagging_chinese_base'模型。这是基于达摩院联合高德发布的多任务多模态地址预训练底座MGeo模型微调得到的。\n", + "-----------------------------------------------------------------------------------\n", + "[TEST]你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。目前支持的插件信息如下,请自行判断是否需要调用插件来解决当前用户问题。若需要调用插件,则需要将插件调用请求按照json格式给出,必须包含api_name、url、parameters字段,并在其前后使用<|startofthink|>和<|endofthink|>作为标志。然后你需要根据插件API调用结果生成合理的答复;若无需调用插件,则直接给出对应回复即可:\n", + "\n", + "1. {\"name\": \"modelscope_text-translation-zh2en\", \"description\": \"将输入的中文文本翻译成英文\", \"url\": \"http://api-inference.modelscope.cn/api-inference/v1/models\", \"paths\": [{\"name\": \"modelscope_text-translation-zh2en\", \"model_id\": \"/damo/nlp_csanmt_translation_zh2en\", \"method\": \"post\", \"description\": \"将输入的中文文本翻译成英文\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的中文文本\", \"required\": \"True\"}]}]}\n", + "\n", + "2. {\"name\": \"modelscope_speech-generation\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"url\": \"http://api-inference.modelscope.cn/api-inference/v1/models\", \"paths\": [{\"name\": \"modelscope_speech-generation\", \"model_id\": \"/damo/speech_sambert-hifigan_tts_zh-cn_16k\", \"method\": \"post\", \"description\": \"针对回复的内容,用语音表示,同时可以选择是男声或者女声\", \"parameters\": [{\"name\": \"text\", \"description\": \"要转成语音的文本\", \"required\": \"True\"}, {\"name\": \"gender\", \"description\": \"用户身份\", \"required\": \"True\"}]}]}\n", + "\n", + "3. {\"name\": \"modelscope_image-generation\", \"description\": \"针对文本输入,生成对应的图片\", \"url\": \"http://api-inference.modelscope.cn/api-inference/v1/models\", \"paths\": [{\"name\": \"modelscope_image-generation\", \"model_id\": \"/damo/image_generation\", \"method\": \"post\", \"description\": \"针对文本输入,生成对应的图片\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本信息\", \"required\": \"True\"}]}]} \n", + "\n", + "### 用户\n", + "歌手:古巨基\n", + "歌曲名:爱情马戏班\n", + "经典歌词:情是何等诡秘能令人使出看家把戏;恋爱就像走纲线般惊险;为你献技 像马戏班\n", + "请结合以上信息,编写一个智能音响的播放导语,需要有文采,字数30字以内,凸显一下即将播放该歌曲 \n", + "\n", + "### 助手\n", + "爱情马戏班,由古巨基演唱,是一首充满马戏班元素的浪漫歌曲,歌词中描述了爱情的神秘和危险,是一首值得听一听的浪漫歌曲。\n", + "\n", + "[LABELS]亲爱的主人,今天我为您带来的是古巨基的经典之作——《爱情马戏班》。这首歌曲描绘了情与爱的神秘和惊险,让人们为之倾倒。让我们一起享受这场爱情的马戏表演吧!\n", + "-----------------------------------------------------------------------------------\n", + "[TEST]你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://114.42.178.183:8005/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://93.82.87.89:6631/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://4.105.93.165:8143/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "按照给定的schema抽取出下面文本对应的信息\n", + "schema:{\"动物\": null, \"食物\": null, \"颜色\": null}\n", + "这只棕色的狗狗很喜欢吃狗粮。 \n", + "\n", + "### 助手\n", + "<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://4.105.93.165:8143/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"这只棕色的狗狗很喜欢吃狗粮。\", \"schema\": \"{\\\"动物\\\": null, \\\"食物\\\": null, \\\"颜色\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"http://4.105.93.165:8143/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"这只棕色的狗狗很喜欢吃狗粮。\", \"schema\": \"{\\\"动物\\\": null, \\\"食物\\\": null, \\\"颜色\\\": null}\"}}\n", + "```<|endofexec|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"http://4.105.93.165:8143/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"这只棕色的狗狗很喜欢吃狗粮。\", \"schema\": \"{\\\"动物\\\": null, \\\"食物\\\": null, \\\"颜色\\\": null}\"}}\n", + "```<|endofexec|>\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"http://4.105.93.165:8143/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"这只棕色的狗狗很喜欢吃狗粮。\", \"schema\": \"{\\\"动物\\\": null, \\\"食物\\\": null, \\\"颜色\\\": null}\"}}\n", + "```<|endofexec|>\n", + "<|startofexec|>```JSON\n", + "{\"prov\": \"http://4.105.93.165:8143/damo/nlp_structbert_siames\n", + "\n", + "[LABELS]<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://114.42.178.183:8005/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"这只棕色的狗狗很喜欢吃狗粮。\", \"schema\": \"{\\\"动物\\\": null, \\\"食物\\\": null, \\\"颜色\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"动物\": [\"狗狗\"], \"食物\": [\"狗粮\"], \"颜色\": [\"棕色\"]}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"动物\": [\"狗狗\"], \"食物\": [\"狗粮\"], \"颜色\": [\"棕色\"]}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "-----------------------------------------------------------------------------------\n", + "[TEST]你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://28.179.171.5:6428/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://100.111.18.38:6408/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://144.67.18.142:6381/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "按照给定的schema抽取出下面文本对应的信息\n", + "schema:{\"人物\": null, \"地理位置\": null, \"组织机构\": null}\n", + "谷歌公司是一家全球知名的科技公司,总部位于美国的加利福尼亚州山景市。 \n", + "\n", + "### 助手\n", + "<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://100.111.18.38:6408/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"谷歌公司是一家全球知名的科技公司,总部位于美国的加利福尼亚州山景市。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": null, \"地理位置\": null, \"组织机构\": null}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": null, \"地理位置\": null, \"组织机构\": null}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调的通用信息抽取模型。\n", + "\n", + "[LABELS]<|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://100.111.18.38:6408/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"谷歌公司是一家全球知名的科技公司,总部位于美国的加利福尼亚州山景市。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": [], \"地理位置\": [\"美国\", \"加利福尼亚州山景市\"], \"组织机构\": [\"谷歌公司\"]}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": [], \"地理位置\": [\"美国\", \"加利福尼亚州山景市\"], \"组织机构\": [\"谷歌公司\"]}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "-----------------------------------------------------------------------------------\n" + ] + } + ], + "source": [ + "streamer = TextStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True)\n", + "for d in test_dataset[:5]:\n", + " system = d['system']\n", + " user = d['user']\n", + " assistant = d['assistant']\n", + " input_ids = tokenize_function(system, user, None, tokenizer)['input_ids']\n", + " print(f'[TEST]{tokenizer.decode(input_ids)}', end='')\n", + " input_ids = torch.tensor(input_ids)[None].cuda()\n", + " attention_mask = torch.ones_like(input_ids)\n", + " generate_ids = model.generate(input_ids=input_ids, max_new_tokens=512,\n", + " attention_mask=attention_mask,\n", + " streamer=streamer, pad_token_id=tokenizer.eos_token_id, \n", + " temperature=0.7, top_k=50, top_p=0.7, do_sample=True)\n", + " print()\n", + " print(f'[LABELS]{assistant}')\n", + " print('-----------------------------------------------------------------------------------')\n", + " # input('next[ENTER]')" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "hackathon", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/chatglm2_sft.ipynb b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/chatglm2_sft.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..b32f2f367e1ec028ac0aa93b555465bd53e894c3 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/llm_agent/chatglm2_sft.ipynb @@ -0,0 +1,1917 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## ChatGLM2 + Lora + Agent\n", + "ChatGLM2-6B 是开源中英双语对话模型 ChatGLM-6B 的第二代版本,在保留了初代模型对话流畅、部署门槛较低等众多优秀特性的基础之上,ChatGLM2-6B 引入了如下新特性:\n", + "\n", + "1. 更强大的性能:基于 ChatGLM 初代模型的开发经验,我们全面升级了 ChatGLM2-6B 的基座模型。ChatGLM2-6B 使用了 GLM 的混合目标函数,经过了 1.4T 中英标识符的预训练与人类偏好对齐训练,评测结果显示,相比于初代模型,ChatGLM2-6B 在 MMLU(+23%)、CEval(+33%)、GSM8K(+571%) 、BBH(+60%)等数据集上的性能取得了大幅度的提升,在同尺寸开源模型中具有较强的竞争力。\n", + "\n", + "2. 更长的上下文:基于 FlashAttention 技术,我们将基座模型的上下文长度(Context Length)由 ChatGLM-6B 的 2K 扩展到了 32K,并在对话阶段使用 8K 的上下文长度训练,允许更多轮次的对话。但当前版本的 ChatGLM2-6B 对单轮超长文档的理解能力有限,我们会在后续迭代升级中着重进行优化。\n", + "\n", + "3. 更高效的推理:基于 Multi-Query Attention 技术,ChatGLM2-6B 有更高效的推理速度和更低的显存占用:在官方的模型实现下,推理速度相比初代提升了 42%,INT4 量化下,6G 显存支持的对话长度由 1K 提升到了 8K。" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "1. Ref: https://modelscope.cn/models/ZhipuAI/chatglm2-6b/summary\n", + "2. 以下脚本可以在2*A10环境下正常运行, 大概占用40G显存\n", + "3. python>=3.8" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 配置实验环境\n", + "The following code is copied from baichuan_sft.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# !pip install modelscope\n", + "# !pip install numpy pandas matplotlib scikit-learn\n", + "# !pip install transformers datasets\n", + "# !conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia\n", + "# !pip install tqdm tensorboard torchmetrics sentencepiece charset_normalizer accelerate\n", + "\n", + "# !pip install numpy -U # Resolve torchmetrics dependencies and update numpy" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[2023-07-02 20:34:35,987] [INFO] [real_accelerator.py:110:get_accelerator] Setting ds_accelerator to cuda (auto detect)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 20:34:36,464 - modelscope - INFO - PyTorch version 2.0.1 Found.\n", + "2023-07-02 20:34:36,465 - modelscope - INFO - Loading ast index from /home/hackathon/.cache/modelscope/ast_indexer\n", + "2023-07-02 20:34:36,489 - modelscope - INFO - Loading done! Current index file version is 1.6.2, with md5 ddf811ee982377c1357284a2bfda3dec and a total number of 861 components indexed\n", + "2023-07-02 20:34:37,158 - modelscope - INFO - [0, 1]\n", + "2023-07-02 20:34:37,324 - modelscope - INFO - Using device: cuda:0,1\n", + "2023-07-02 20:34:37,326 - modelscope - INFO - Global seed set to 42\n" + ] + } + ], + "source": [ + "from _common import *\n", + "device_ids = [0, 1]\n", + "select_device(device_ids)\n", + "_ = seed_everything(42)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Model, Tokenizer" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 20:34:37,660 - modelscope - INFO - Development mode use revision: v1.0.3\n", + "The tokenizer class you load from this checkpoint is not the same type as the class this function is called from. It may result in unexpected tokenization. \n", + "The tokenizer class you load from this checkpoint is 'ChatGLMTokenizer'. \n", + "The class this function is called from is 'ChatGLM2Tokenizer'.\n", + "2023-07-02 20:34:38,020 - modelscope - INFO - initialize model from /home/hackathon/.cache/modelscope/hub/ZhipuAI/chatglm2-6b\n", + "Failed to load cpm_kernels:No module named 'cpm_kernels'\n", + "The model weights are not tied. Please use the `tie_weights` method before using the `infer_auto_device` function.\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "51826d090fb740e0a7d514e543af843b", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Loading checkpoint shards: 0%| | 0/7 [00:00': 1, '': 2, '': 2}\n", + "2023-07-02 20:34:45,152 - modelscope - INFO - bos_token_id: 1, eos_token_id: 2, pad_token_id: 2\n" + ] + } + ], + "source": [ + "WORK_DIR = 'runs/chatglm2'\n", + "LORA_TARGET_MODULES = ['query_key_value']\n", + "#\n", + "model_dir = snapshot_download('ZhipuAI/chatglm2-6b', 'v1.0.6')\n", + "model, tokenizer = get_chatglm2_model_tokenizer(model_dir)\n", + "#\n", + "GRADIENT_CHECKPOINTING = True\n", + "if GRADIENT_CHECKPOINTING:\n", + " model.gradient_checkpointing_enable()\n", + " model.enable_input_require_grads()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 准备Lora\n", + "The following code is copied from baichun.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 20:34:45,215 - modelscope - INFO - lora_config: LoRAConfig(rank=8, replace_modules=['query_key_value'], lora_alpha=32, lora_dropout=0.1, merge_weights=True, use_merged_linear=False, enable_lora=None, fan_in_fan_out=False, bias='none', only_lora_trainable=True, pretrained_weights=None)\n", + "2023-07-02 20:34:49,932 - modelscope - INFO - transformer.embedding.word_embeddings.weight: requires_grad=False\n", + "2023-07-02 20:34:49,933 - modelscope - INFO - transformer.encoder.layers.0.input_layernorm.weight: requires_grad=False\n", + "2023-07-02 20:34:49,933 - modelscope - INFO - transformer.encoder.layers.0.self_attention.query_key_value.weight: requires_grad=False\n", + "2023-07-02 20:34:49,933 - modelscope - INFO - transformer.encoder.layers.0.self_attention.query_key_value.bias: requires_grad=False\n", + "2023-07-02 20:34:49,934 - modelscope - INFO - transformer.encoder.layers.0.self_attention.query_key_value.lora_A: requires_grad=True\n", + "2023-07-02 20:34:49,934 - modelscope - INFO - transformer.encoder.layers.0.self_attention.query_key_value.lora_B: requires_grad=True\n", + "2023-07-02 20:34:49,934 - modelscope - INFO - transformer.encoder.layers.0.self_attention.dense.weight: requires_grad=False\n", + "2023-07-02 20:34:49,934 - modelscope - INFO - transformer.encoder.layers.0.post_attention_layernorm.weight: requires_grad=False\n", + "2023-07-02 20:34:49,935 - modelscope - INFO - transformer.encoder.layers.0.mlp.dense_h_to_4h.weight: requires_grad=False\n", + "2023-07-02 20:34:49,935 - modelscope - INFO - transformer.encoder.layers.0.mlp.dense_4h_to_h.weight: requires_grad=False\n", + "2023-07-02 20:34:49,936 - modelscope - INFO - transformer.encoder.layers.1.input_layernorm.weight: requires_grad=False\n", + "2023-07-02 20:34:49,936 - modelscope - INFO - transformer.encoder.layers.1.self_attention.query_key_value.weight: requires_grad=False\n", + "2023-07-02 20:34:49,936 - modelscope - INFO - transformer.encoder.layers.1.self_attention.query_key_value.bias: requires_grad=False\n", + "2023-07-02 20:34:49,937 - modelscope - INFO - transformer.encoder.layers.1.self_attention.query_key_value.lora_A: requires_grad=True\n", + "2023-07-02 20:34:49,937 - modelscope - INFO - transformer.encoder.layers.1.self_attention.query_key_value.lora_B: requires_grad=True\n", + "2023-07-02 20:34:49,937 - modelscope - INFO - transformer.encoder.layers.1.self_attention.dense.weight: requires_grad=False\n", + "2023-07-02 20:34:49,938 - modelscope - INFO - transformer.encoder.layers.1.post_attention_layernorm.weight: requires_grad=False\n", + "2023-07-02 20:34:49,938 - modelscope - INFO - transformer.encoder.layers.1.mlp.dense_h_to_4h.weight: requires_grad=False\n", + "2023-07-02 20:34:49,938 - modelscope - INFO - transformer.encoder.layers.1.mlp.dense_4h_to_h.weight: requires_grad=False\n", + "2023-07-02 20:34:49,938 - modelscope - INFO - transformer.encoder.layers.2.input_layernorm.weight: requires_grad=False\n", + "2023-07-02 20:34:49,939 - modelscope - INFO - ...\n", + "2023-07-02 20:34:49,941 - modelscope - INFO - ChatGLM2ForConditionalGeneration: 6245.5337M Params (1.9497M Trainable), 0.0000M Buffers.\n", + "2023-07-02 20:34:49,942 - modelscope - INFO - device: cuda:0, dtype: torch.float16\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatGLM2ForConditionalGeneration(\n", + " (transformer): ChatGLMModel(\n", + " (embedding): Embedding(\n", + " (word_embeddings): Embedding(65024, 4096)\n", + " )\n", + " (rotary_pos_emb): RotaryEmbedding()\n", + " (encoder): GLMTransformer(\n", + " (layers): ModuleList(\n", + " (0-27): 28 x GLMBlock(\n", + " (input_layernorm): RMSNorm()\n", + " (self_attention): SelfAttention(\n", + " (query_key_value): Linear(\n", + " in_features=4096, out_features=4608, bias=True\n", + " (lora_dropout): Dropout(p=0.1, inplace=False)\n", + " )\n", + " (core_attention): CoreAttention(\n", + " (attention_dropout): Dropout(p=0.0, inplace=False)\n", + " )\n", + " (dense): Linear(in_features=4096, out_features=4096, bias=False)\n", + " )\n", + " (post_attention_layernorm): RMSNorm()\n", + " (mlp): MLP(\n", + " (dense_h_to_4h): Linear(in_features=4096, out_features=27392, bias=False)\n", + " (dense_4h_to_h): Linear(in_features=13696, out_features=4096, bias=False)\n", + " )\n", + " )\n", + " )\n", + " (final_layernorm): RMSNorm()\n", + " )\n", + " (output_layer): Linear(in_features=4096, out_features=65024, bias=False)\n", + " )\n", + ")" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "LORA_RANK = 8\n", + "LORA_ALPHA = 32\n", + "LORA_DROPOUT_P = 0.1\n", + "lora_config = LoRAConfig(\n", + " target_modules=LORA_TARGET_MODULES,\n", + " r=LORA_RANK,\n", + " lora_alpha=LORA_ALPHA,\n", + " lora_dropout=LORA_DROPOUT_P)\n", + "logger.info(f'lora_config: {lora_config}')\n", + "Swift.prepare_model(model, lora_config)\n", + "#\n", + "show_freeze_layers(model)\n", + "print_model_info(model)\n", + "_p = list(model.parameters())[100]\n", + "logger.info(f'device: {_p.device}, dtype: {_p.dtype}')\n", + "model.bfloat16()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 导入Dataset\n", + "The following code is copied from baichuan_sft.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 20:34:50,040 - modelscope - INFO - No subset_name specified, defaulting to the default\n", + "2023-07-02 20:34:50,479 - modelscope - WARNING - Reusing dataset ms_hackathon_23_agent_train_dev (/home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files)\n", + "2023-07-02 20:34:50,479 - modelscope - INFO - Generating dataset ms_hackathon_23_agent_train_dev (/home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files)\n", + "2023-07-02 20:34:50,480 - modelscope - INFO - Reusing cached meta-data file: /home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files/8c9e7b1aa666c8840cb938d877f2b99f\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "dac0fb3841854f6f867f0c639c6b2176", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Downloading data files: 0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "beada7f3eb734a6485034e666e60285f", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Extracting data files: 0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 5036/5036 [00:12<00:00, 403.83it/s]\n", + "2023-07-02 20:35:03,823 - modelscope - INFO - No subset_name specified, defaulting to the default\n", + "2023-07-02 20:35:04,269 - modelscope - WARNING - Reusing dataset ms_hackathon_23_agent_train_dev (/home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files)\n", + "2023-07-02 20:35:04,270 - modelscope - INFO - Generating dataset ms_hackathon_23_agent_train_dev (/home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files)\n", + "2023-07-02 20:35:04,270 - modelscope - INFO - Reusing cached meta-data file: /home/hackathon/.cache/modelscope/hub/datasets/modelscope/ms_hackathon_23_agent_train_dev/master/data_files/941b733ec0354c2172a3386d8788bb37\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "82cacd1b06864eabb4e320a93d41691c", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Downloading data files: 0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "37d5dbf851b745fb90b12cb1e4167732", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "Extracting data files: 0it [00:00, ?it/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "100%|██████████| 285/285 [00:00<00:00, 380.76it/s]\n", + "2023-07-02 20:35:05,192 - modelscope - INFO - Dataset Token Length: 888.357487±349.060492, min=48.000000, max=2039.000000, size=4982\n", + "2023-07-02 20:35:05,192 - modelscope - INFO - Dataset Token Length: 928.654804±330.133929, min=74.000000, max=1959.000000, size=281\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[INPUT_IDS] 你是达摩院的ModelScopeGPT(魔搭助手),你是个大语言模型, 是2023年达摩院的工程师训练得到的。你有多种能力,可以通过插件集成魔搭社区的模型api来回复用户的问题,还能解答用户使用模型遇到的问题和模型知识相关问答。1. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://109.199.101.10:1485/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "2. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://9.32.64.200:5873/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}}\n", + "\n", + "3. {\"plugin_name\": \"modelscope_text-ie\", \"plugin_owner\": \"ModelScopeGPT\", \"plugin_type\": \"default\", \"plugin_schema_for_model\": {\"name\": \"modelscope_text-ie\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"url\": \"http://54.149.78.185:3979/\", \"paths\": [{\"name\": \"modelscope_text-ie\", \"model_id\": \"/damo/nlp_structbert_siamese-uie_chinese-base\", \"method\": \"post\", \"description\": \"针对中文的文本,根据schema要抽取的内容,找出其中对应信息,并用json格式展示\", \"parameters\": [{\"name\": \"text\", \"description\": \"用户输入的文本\", \"required\": \"True\"}, {\"name\": \"schema\", \"description\": \"要抽取信息的json表示\", \"required\": \"True\"}]}]}} \n", + "\n", + "### 用户\n", + "按照给定的schema抽取出下面文本对应的信息\n", + "schema:{\"人物\": null, \"地理位置\": null, \"组织机构\": null}\n", + "近日,美国政府宣布将对中国1000多种商品加征关税,并威胁进一步加征关税。 \n", + "\n", + "### 助手\n", + " <|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://9.32.64.200:5873/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"近日,美国政府宣布将对中国1000多种商品加征关税,并威胁进一步加征关税。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n", + "\n", + "[LABLES] ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ ⁇ <|startofthink|>```JSON\n", + "{\"api_name\": \"modelscope_text-ie\", \"url\": \"http://9.32.64.200:5873/damo/nlp_structbert_siamese-uie_chinese-base\", \"parameters\": {\"text\": \"近日,美国政府宣布将对中国1000多种商品加征关税,并威胁进一步加征关税。\", \"schema\": \"{\\\"人物\\\": null, \\\"地理位置\\\": null, \\\"组织机构\\\": null}\"}}\n", + "```<|endofthink|>\n", + "\n", + "<|startofexec|>```JSON\n", + "{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}\n", + "```<|endofexec|>\n", + "信息抽取结果:{\"人物\": [], \"地理位置\": [\"中国\", \"美国\"], \"组织机构\": []}。我使用的模型是ModelScope的'damo/nlp_structbert_siamese-uie_chinese-base'模型。这是一个基于StructBERT预训练模型微调训练的通用信息抽取模型。\n" + ] + } + ], + "source": [ + "tokenize_function = partial(tokenize_function, tokenizer=tokenizer)\n", + "train_dataset = make_dataset('train', tokenize_function)\n", + "val_dataset = make_dataset('validation', tokenize_function)\n", + "# Data analysis\n", + "stat_dataset(train_dataset)\n", + "stat_dataset(val_dataset)\n", + "data_collate_fn = partial(data_collate_fn, tokenizer=tokenizer)\n", + "print_examples(train_dataset[0], tokenizer)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 配置Config\n", + "The following code is copied from baichuan_sft.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 20:35:05,244 - modelscope - INFO - work_dir: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505\n" + ] + } + ], + "source": [ + "cfg_file = os.path.join(model_dir, 'configuration.json')\n", + "#\n", + "BATCH_SIZE = 1\n", + "MAX_EPOCHS = 1\n", + "T_max = get_T_max(len(train_dataset), BATCH_SIZE, MAX_EPOCHS, True)\n", + "WORK_DIR = get_work_dir(WORK_DIR)\n", + "EVAL_INTERVAL = 200\n", + "CONFIG = Config({\n", + " 'train': {\n", + " 'dataloader': {\n", + " 'batch_size_per_gpu': BATCH_SIZE,\n", + " 'workers_per_gpu': 1,\n", + " 'shuffle': True,\n", + " 'drop_last': True,\n", + " 'pin_memory': True\n", + " },\n", + " 'max_epochs': MAX_EPOCHS,\n", + " 'work_dir': WORK_DIR,\n", + " 'optimizer': {\n", + " 'type': 'AdamW',\n", + " 'lr': 1e-4,\n", + " 'weight_decay': 0.01,\n", + " 'options': {\n", + " 'cumulative_iters': 16, 'grad_clip': {\n", + " 'norm_type': 2,\n", + " 'max_norm': 2.0\n", + " }\n", + " }\n", + " },\n", + " 'lr_scheduler': {\n", + " 'type': 'CosineAnnealingLR',\n", + " 'T_max': T_max,\n", + " 'eta_min': 1e-5,\n", + " 'options': {\n", + " 'by_epoch': False,\n", + " 'warmup': {\n", + " 'type': 'LinearWarmup',\n", + " 'warmup_ratio': 0.1,\n", + " 'warmup_iters': 200\n", + " }\n", + " }\n", + " },\n", + " 'hooks': [\n", + " {'type': 'CheckpointHook', 'by_epoch': False, 'interval': EVAL_INTERVAL, 'max_checkpoint_num': 1},\n", + " {'type': 'EvaluationHook', 'by_epoch': False, 'interval': EVAL_INTERVAL},\n", + " {'type': 'BestCkptSaverHook',\n", + " 'metric_key': 'acc',\n", + " 'save_best': True, 'rule': 'max', 'max_checkpoint_num': 1},\n", + " {'type': 'TextLoggerHook',\n", + " 'by_epoch': True, # Whether EpochBasedTrainer is used\n", + " 'interval': 5},\n", + " {'type': 'TensorboardHook', 'by_epoch': False, 'interval': 5}\n", + " ]\n", + " },\n", + " 'evaluation': {\n", + " 'dataloader': {\n", + " 'batch_size_per_gpu': BATCH_SIZE,\n", + " 'workers_per_gpu': 1,\n", + " 'shuffle': False,\n", + " 'drop_last': False,\n", + " 'pin_memory': True\n", + " },\n", + " 'metrics': [\n", + " {'type': 'my_metric', 'vocab_size': tokenizer.vocab_size}\n", + " ]\n", + " }\n", + "})" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 微调\n", + "The following code is copied from baichuan_sft.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2023-07-02 20:35:05,284 - modelscope - INFO - ==========================Training Config Start==========================\n", + "2023-07-02 20:35:05,285 - modelscope - INFO - {\n", + " \"framework\": \"pytorch\",\n", + " \"task\": \"chat\",\n", + " \"pipeline\": {\n", + " \"type\": \"chatglm26b-text-generation\"\n", + " },\n", + " \"allow_remote\": true,\n", + " \"train\": {\n", + " \"hooks\": [\n", + " {\n", + " \"type\": \"TensorboardHook\",\n", + " \"by_epoch\": false,\n", + " \"interval\": 5\n", + " }\n", + " ],\n", + " \"dataloader\": {\n", + " \"batch_size_per_gpu\": 1,\n", + " \"workers_per_gpu\": 1,\n", + " \"shuffle\": true,\n", + " \"drop_last\": true,\n", + " \"pin_memory\": true\n", + " },\n", + " \"max_epochs\": 1,\n", + " \"work_dir\": \"/home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505\",\n", + " \"optimizer\": {\n", + " \"type\": \"AdamW\",\n", + " \"lr\": 0.0001,\n", + " \"weight_decay\": 0.01,\n", + " \"options\": {\n", + " \"cumulative_iters\": 16,\n", + " \"grad_clip\": {\n", + " \"norm_type\": 2,\n", + " \"max_norm\": 2.0\n", + " }\n", + " }\n", + " },\n", + " \"lr_scheduler\": {\n", + " \"type\": \"CosineAnnealingLR\",\n", + " \"T_max\": 4982,\n", + " \"eta_min\": 1e-05,\n", + " \"options\": {\n", + " \"by_epoch\": false,\n", + " \"warmup\": {\n", + " \"type\": \"LinearWarmup\",\n", + " \"warmup_ratio\": 0.1,\n", + " \"warmup_iters\": 200\n", + " }\n", + " }\n", + " },\n", + " \"checkpoint\": {\n", + " \"period\": {\n", + " \"by_epoch\": false,\n", + " \"interval\": 200,\n", + " \"max_checkpoint_num\": 1\n", + " },\n", + " \"best\": {\n", + " \"metric_key\": \"acc\",\n", + " \"save_best\": true,\n", + " \"rule\": \"max\",\n", + " \"max_checkpoint_num\": 1\n", + " }\n", + " },\n", + " \"logging\": {\n", + " \"by_epoch\": true,\n", + " \"interval\": 5\n", + " }\n", + " },\n", + " \"evaluation\": {\n", + " \"dataloader\": {\n", + " \"batch_size_per_gpu\": 1,\n", + " \"workers_per_gpu\": 1,\n", + " \"shuffle\": false,\n", + " \"drop_last\": false,\n", + " \"pin_memory\": true\n", + " },\n", + " \"metrics\": [\n", + " {\n", + " \"type\": \"my_metric\",\n", + " \"vocab_size\": 64794\n", + " }\n", + " ],\n", + " \"period\": {\n", + " \"by_epoch\": false,\n", + " \"interval\": 200\n", + " }\n", + " }\n", + "}\n", + "2023-07-02 20:35:05,285 - modelscope - INFO - ===========================Training Config End===========================\n", + "2023-07-02 20:35:05,286 - modelscope - WARNING - ('OPTIMIZER', 'default', 'AdamW') not found in ast index file\n", + "2023-07-02 20:35:05,287 - modelscope - WARNING - ('LR_SCHEDULER', 'default', 'CosineAnnealingLR') not found in ast index file\n", + "2023-07-02 20:35:05,289 - modelscope - INFO - Stage: before_run:\n", + " (ABOVE_NORMAL) OptimizerHook \n", + " (LOW ) LrSchedulerHook \n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: before_train_epoch:\n", + " (LOW ) LrSchedulerHook \n", + " -------------------- \n", + "Stage: before_train_iter:\n", + " (ABOVE_NORMAL) OptimizerHook \n", + " -------------------- \n", + "Stage: after_train_iter:\n", + " (ABOVE_NORMAL) OptimizerHook \n", + " (NORMAL ) EvaluationHook \n", + " (LOW ) LrSchedulerHook \n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: after_train_epoch:\n", + " (NORMAL ) EvaluationHook \n", + " (LOW ) LrSchedulerHook \n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: after_val_epoch:\n", + " (VERY_LOW ) TextLoggerHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "Stage: after_run:\n", + " (LOW ) BestCkptSaverHook \n", + " (LOW ) CheckpointHook \n", + " (VERY_LOW ) TensorboardHook \n", + " -------------------- \n", + "2023-07-02 20:35:05,293 - modelscope - INFO - Checkpoints will be saved to /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505\n", + "2023-07-02 20:35:05,296 - modelscope - INFO - Checkpoints will be saved to /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505\n", + "2023-07-02 20:35:05,296 - modelscope - INFO - Text logs will be saved to /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505\n", + "2023-07-02 20:35:05,296 - modelscope - INFO - tensorboard files will be saved to /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/tensorboard_output\n", + "2023-07-02 20:35:09,665 - modelscope - INFO - epoch [1][5/4982]\tlr: 1.000e-05, memory: 9310, loss: 4.4797\n", + "2023-07-02 20:35:11,753 - modelscope - INFO - epoch [1][10/4982]\tlr: 1.000e-05, memory: 9653, loss: 4.4281\n", + "2023-07-02 20:35:15,111 - modelscope - INFO - epoch [1][15/4982]\tlr: 1.000e-05, memory: 11498, loss: 5.4297\n", + "2023-07-02 20:35:18,142 - modelscope - INFO - epoch [1][20/4982]\tlr: 1.225e-05, memory: 12041, loss: 2.6703\n", + "2023-07-02 20:35:21,335 - modelscope - INFO - epoch [1][25/4982]\tlr: 1.450e-05, memory: 12041, loss: 2.5969\n", + "2023-07-02 20:35:24,524 - modelscope - INFO - epoch [1][30/4982]\tlr: 1.675e-05, memory: 12180, loss: 2.7797\n", + "2023-07-02 20:35:27,061 - modelscope - INFO - epoch [1][35/4982]\tlr: 1.900e-05, memory: 12180, loss: 5.0344\n", + "2023-07-02 20:35:29,749 - modelscope - INFO - epoch [1][40/4982]\tlr: 2.125e-05, memory: 12180, loss: 6.1875\n", + "2023-07-02 20:35:32,140 - modelscope - INFO - epoch [1][45/4982]\tlr: 2.350e-05, memory: 12180, loss: 4.5844\n", + "2023-07-02 20:35:35,367 - modelscope - INFO - epoch [1][50/4982]\tlr: 2.575e-05, memory: 12180, loss: 3.3578\n", + "2023-07-02 20:35:37,739 - modelscope - INFO - epoch [1][55/4982]\tlr: 2.800e-05, memory: 12180, loss: 3.0375\n", + "2023-07-02 20:35:41,595 - modelscope - INFO - epoch [1][60/4982]\tlr: 3.025e-05, memory: 12180, loss: 2.7219\n", + "2023-07-02 20:35:44,105 - modelscope - INFO - epoch [1][65/4982]\tlr: 3.250e-05, memory: 12180, loss: 4.8016\n", + "2023-07-02 20:35:46,069 - modelscope - INFO - epoch [1][70/4982]\tlr: 3.475e-05, memory: 12180, loss: 6.9406\n", + "2023-07-02 20:35:48,149 - modelscope - INFO - epoch [1][75/4982]\tlr: 3.700e-05, memory: 12180, loss: 3.2133\n", + "2023-07-02 20:35:50,371 - modelscope - INFO - epoch [1][80/4982]\tlr: 3.925e-05, memory: 12180, loss: 4.3719\n", + "2023-07-02 20:35:53,531 - modelscope - INFO - epoch [1][85/4982]\tlr: 4.150e-05, memory: 12180, loss: 5.8875\n", + "2023-07-02 20:35:55,682 - modelscope - INFO - epoch [1][90/4982]\tlr: 4.375e-05, memory: 12180, loss: 4.9297\n", + "2023-07-02 20:35:57,349 - modelscope - INFO - epoch [1][95/4982]\tlr: 4.600e-05, memory: 12180, loss: 5.8781\n", + "2023-07-02 20:36:00,218 - modelscope - INFO - epoch [1][100/4982]\tlr: 4.825e-05, memory: 12180, loss: 2.4125\n", + "2023-07-02 20:36:02,674 - modelscope - INFO - epoch [1][105/4982]\tlr: 5.050e-05, memory: 12180, loss: 6.7234\n", + "2023-07-02 20:36:05,443 - modelscope - INFO - epoch [1][110/4982]\tlr: 5.275e-05, memory: 12180, loss: 3.7437\n", + "2023-07-02 20:36:08,231 - modelscope - INFO - epoch [1][115/4982]\tlr: 5.500e-05, memory: 12180, loss: 4.5187\n", + "2023-07-02 20:36:10,992 - modelscope - INFO - epoch [1][120/4982]\tlr: 5.725e-05, memory: 12180, loss: 4.3281\n", + "2023-07-02 20:36:12,907 - modelscope - INFO - epoch [1][125/4982]\tlr: 5.950e-05, memory: 12180, loss: 4.4422\n", + "2023-07-02 20:36:16,210 - modelscope - INFO - epoch [1][130/4982]\tlr: 6.175e-05, memory: 12992, loss: 5.8688\n", + "2023-07-02 20:36:18,791 - modelscope - INFO - epoch [1][135/4982]\tlr: 6.400e-05, memory: 12992, loss: 3.2531\n", + "2023-07-02 20:36:19,911 - modelscope - INFO - epoch [1][140/4982]\tlr: 6.625e-05, memory: 12992, loss: 5.1781\n", + "2023-07-02 20:36:22,445 - modelscope - INFO - epoch [1][145/4982]\tlr: 6.850e-05, memory: 12992, loss: 3.4523\n", + "2023-07-02 20:36:24,826 - modelscope - INFO - epoch [1][150/4982]\tlr: 7.075e-05, memory: 12992, loss: 4.6125\n", + "2023-07-02 20:36:26,567 - modelscope - INFO - epoch [1][155/4982]\tlr: 7.300e-05, memory: 12992, loss: 4.0859\n", + "2023-07-02 20:36:29,936 - modelscope - INFO - epoch [1][160/4982]\tlr: 7.525e-05, memory: 12992, loss: 3.4937\n", + "2023-07-02 20:36:32,253 - modelscope - INFO - epoch [1][165/4982]\tlr: 7.750e-05, memory: 12992, loss: 5.8266\n", + "2023-07-02 20:36:34,867 - modelscope - INFO - epoch [1][170/4982]\tlr: 7.975e-05, memory: 12992, loss: 2.7047\n", + "2023-07-02 20:36:38,118 - modelscope - INFO - epoch [1][175/4982]\tlr: 8.200e-05, memory: 12992, loss: 2.5844\n", + "2023-07-02 20:36:40,913 - modelscope - INFO - epoch [1][180/4982]\tlr: 8.425e-05, memory: 12992, loss: 3.9641\n", + "2023-07-02 20:36:43,807 - modelscope - INFO - epoch [1][185/4982]\tlr: 8.650e-05, memory: 12992, loss: 3.1375\n", + "2023-07-02 20:36:46,624 - modelscope - INFO - epoch [1][190/4982]\tlr: 8.875e-05, memory: 12992, loss: 3.8813\n", + "2023-07-02 20:36:49,527 - modelscope - INFO - epoch [1][195/4982]\tlr: 9.100e-05, memory: 12992, loss: 3.6156\n", + "2023-07-02 20:36:51,833 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:05<00:00, 4.29it/s]\n", + "2023-07-02 20:37:57,381 - modelscope - INFO - Saving checkpoint at 200 iter\n", + "2023-07-02 20:37:57,410 - modelscope - INFO - Saving checkpoint at 200 iter\n", + "2023-07-02 20:37:57,436 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 12992, evaluation/acc: 0.6542, evaluation/loss: 3.4747, loss: 4.5406\n", + "2023-07-02 20:38:00,375 - modelscope - INFO - epoch [1][205/4982]\tlr: 9.550e-05, memory: 12992, loss: 3.8125\n", + "2023-07-02 20:38:03,071 - modelscope - INFO - epoch [1][210/4982]\tlr: 9.775e-05, memory: 12992, loss: 4.4109\n", + "2023-07-02 20:38:06,715 - modelscope - INFO - epoch [1][215/4982]\tlr: 1.000e-04, memory: 12992, loss: 2.2437\n", + "2023-07-02 20:38:09,499 - modelscope - INFO - epoch [1][220/4982]\tlr: 9.998e-05, memory: 12992, loss: 3.2750\n", + "2023-07-02 20:38:13,188 - modelscope - INFO - epoch [1][225/4982]\tlr: 9.996e-05, memory: 13730, loss: 3.2656\n", + "2023-07-02 20:38:15,237 - modelscope - INFO - epoch [1][230/4982]\tlr: 9.994e-05, memory: 13730, loss: 4.3750\n", + "2023-07-02 20:38:17,706 - modelscope - INFO - epoch [1][235/4982]\tlr: 9.992e-05, memory: 13730, loss: 3.2844\n", + "2023-07-02 20:38:20,429 - modelscope - INFO - epoch [1][240/4982]\tlr: 9.990e-05, memory: 13730, loss: 2.9766\n", + "2023-07-02 20:38:23,127 - modelscope - INFO - epoch [1][245/4982]\tlr: 9.988e-05, memory: 13730, loss: 4.4125\n", + "2023-07-02 20:38:26,058 - modelscope - INFO - epoch [1][250/4982]\tlr: 9.986e-05, memory: 13730, loss: 2.3047\n", + "2023-07-02 20:38:28,740 - modelscope - INFO - epoch [1][255/4982]\tlr: 9.984e-05, memory: 13730, loss: 3.5484\n", + "2023-07-02 20:38:31,332 - modelscope - INFO - epoch [1][260/4982]\tlr: 9.982e-05, memory: 13730, loss: 4.4297\n", + "2023-07-02 20:38:33,632 - modelscope - INFO - epoch [1][265/4982]\tlr: 9.980e-05, memory: 13730, loss: 5.1078\n", + "2023-07-02 20:38:35,634 - modelscope - INFO - epoch [1][270/4982]\tlr: 9.977e-05, memory: 13730, loss: 4.2250\n", + "2023-07-02 20:38:37,731 - modelscope - INFO - epoch [1][275/4982]\tlr: 9.975e-05, memory: 13730, loss: 4.5984\n", + "2023-07-02 20:38:39,950 - modelscope - INFO - epoch [1][280/4982]\tlr: 9.973e-05, memory: 13730, loss: 4.0594\n", + "2023-07-02 20:38:42,470 - modelscope - INFO - epoch [1][285/4982]\tlr: 9.970e-05, memory: 13730, loss: 2.6523\n", + "2023-07-02 20:38:45,483 - modelscope - INFO - epoch [1][290/4982]\tlr: 9.968e-05, memory: 13730, loss: 2.5766\n", + "2023-07-02 20:38:47,773 - modelscope - INFO - epoch [1][295/4982]\tlr: 9.965e-05, memory: 13730, loss: 2.7078\n", + "2023-07-02 20:38:51,126 - modelscope - INFO - epoch [1][300/4982]\tlr: 9.963e-05, memory: 13730, loss: 5.0844\n", + "2023-07-02 20:38:53,948 - modelscope - INFO - epoch [1][305/4982]\tlr: 9.960e-05, memory: 13730, loss: 3.3844\n", + "2023-07-02 20:38:56,666 - modelscope - INFO - epoch [1][310/4982]\tlr: 9.958e-05, memory: 13730, loss: 3.1812\n", + "2023-07-02 20:38:59,269 - modelscope - INFO - epoch [1][315/4982]\tlr: 9.955e-05, memory: 13730, loss: 3.3219\n", + "2023-07-02 20:39:02,576 - modelscope - INFO - epoch [1][320/4982]\tlr: 9.952e-05, memory: 13730, loss: 2.0031\n", + "2023-07-02 20:39:04,494 - modelscope - INFO - epoch [1][325/4982]\tlr: 9.949e-05, memory: 13730, loss: 3.7469\n", + "2023-07-02 20:39:07,068 - modelscope - INFO - epoch [1][330/4982]\tlr: 9.947e-05, memory: 13730, loss: 3.0187\n", + "2023-07-02 20:39:09,719 - modelscope - INFO - epoch [1][335/4982]\tlr: 9.944e-05, memory: 13730, loss: 2.5828\n", + "2023-07-02 20:39:11,755 - modelscope - INFO - epoch [1][340/4982]\tlr: 9.941e-05, memory: 13730, loss: 4.1156\n", + "2023-07-02 20:39:14,258 - modelscope - INFO - epoch [1][345/4982]\tlr: 9.938e-05, memory: 13730, loss: 5.1594\n", + "2023-07-02 20:39:16,436 - modelscope - INFO - epoch [1][350/4982]\tlr: 9.935e-05, memory: 13730, loss: 4.0859\n", + "2023-07-02 20:39:19,643 - modelscope - INFO - epoch [1][355/4982]\tlr: 9.932e-05, memory: 13730, loss: 1.8391\n", + "2023-07-02 20:39:22,779 - modelscope - INFO - epoch [1][360/4982]\tlr: 9.929e-05, memory: 13730, loss: 2.0641\n", + "2023-07-02 20:39:25,402 - modelscope - INFO - epoch [1][365/4982]\tlr: 9.926e-05, memory: 13730, loss: 1.9453\n", + "2023-07-02 20:39:27,813 - modelscope - INFO - epoch [1][370/4982]\tlr: 9.923e-05, memory: 13730, loss: 3.8641\n", + "2023-07-02 20:39:30,315 - modelscope - INFO - epoch [1][375/4982]\tlr: 9.920e-05, memory: 13730, loss: 3.0281\n", + "2023-07-02 20:39:33,075 - modelscope - INFO - epoch [1][380/4982]\tlr: 9.916e-05, memory: 13730, loss: 1.9109\n", + "2023-07-02 20:39:35,539 - modelscope - INFO - epoch [1][385/4982]\tlr: 9.913e-05, memory: 13730, loss: 3.9797\n", + "2023-07-02 20:39:37,804 - modelscope - INFO - epoch [1][390/4982]\tlr: 9.910e-05, memory: 13730, loss: 4.4547\n", + "2023-07-02 20:39:40,277 - modelscope - INFO - epoch [1][395/4982]\tlr: 9.906e-05, memory: 13730, loss: 2.4516\n", + "2023-07-02 20:39:43,900 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.25it/s]\n", + "2023-07-02 20:40:50,049 - modelscope - INFO - Saving checkpoint at 400 iter\n", + "2023-07-02 20:40:50,080 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter200_acc0.6542276740074158\n", + "2023-07-02 20:40:50,083 - modelscope - INFO - Saving checkpoint at 400 iter\n", + "2023-07-02 20:40:50,113 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_200\n", + "2023-07-02 20:40:50,115 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 13730, evaluation/acc: 0.6604, evaluation/loss: 3.0119, loss: 2.8062\n", + "2023-07-02 20:40:53,254 - modelscope - INFO - epoch [1][405/4982]\tlr: 9.900e-05, memory: 13730, loss: 3.2422\n", + "2023-07-02 20:40:55,618 - modelscope - INFO - epoch [1][410/4982]\tlr: 9.896e-05, memory: 13730, loss: 4.2297\n", + "2023-07-02 20:40:58,448 - modelscope - INFO - epoch [1][415/4982]\tlr: 9.893e-05, memory: 13730, loss: 3.6063\n", + "2023-07-02 20:41:00,872 - modelscope - INFO - epoch [1][420/4982]\tlr: 9.889e-05, memory: 13730, loss: 4.6141\n", + "2023-07-02 20:41:02,997 - modelscope - INFO - epoch [1][425/4982]\tlr: 9.885e-05, memory: 13730, loss: 5.2875\n", + "2023-07-02 20:41:06,866 - modelscope - INFO - epoch [1][430/4982]\tlr: 9.882e-05, memory: 13730, loss: 2.2109\n", + "2023-07-02 20:41:09,155 - modelscope - INFO - epoch [1][435/4982]\tlr: 9.878e-05, memory: 13730, loss: 2.5969\n", + "2023-07-02 20:41:11,158 - modelscope - INFO - epoch [1][440/4982]\tlr: 9.874e-05, memory: 13730, loss: 3.1453\n", + "2023-07-02 20:41:13,695 - modelscope - INFO - epoch [1][445/4982]\tlr: 9.870e-05, memory: 13730, loss: 4.1219\n", + "2023-07-02 20:41:16,481 - modelscope - INFO - epoch [1][450/4982]\tlr: 9.867e-05, memory: 13730, loss: 3.0016\n", + "2023-07-02 20:41:19,595 - modelscope - INFO - epoch [1][455/4982]\tlr: 9.863e-05, memory: 13730, loss: 2.0086\n", + "2023-07-02 20:41:22,798 - modelscope - INFO - epoch [1][460/4982]\tlr: 9.859e-05, memory: 13730, loss: 1.6477\n", + "2023-07-02 20:41:24,516 - modelscope - INFO - epoch [1][465/4982]\tlr: 9.855e-05, memory: 13730, loss: 5.0250\n", + "2023-07-02 20:41:26,807 - modelscope - INFO - epoch [1][470/4982]\tlr: 9.851e-05, memory: 13730, loss: 5.0906\n", + "2023-07-02 20:41:29,550 - modelscope - INFO - epoch [1][475/4982]\tlr: 9.847e-05, memory: 13730, loss: 3.1719\n", + "2023-07-02 20:41:31,558 - modelscope - INFO - epoch [1][480/4982]\tlr: 9.843e-05, memory: 13730, loss: 3.0094\n", + "2023-07-02 20:41:34,367 - modelscope - INFO - epoch [1][485/4982]\tlr: 9.839e-05, memory: 13730, loss: 1.8000\n", + "2023-07-02 20:41:37,084 - modelscope - INFO - epoch [1][490/4982]\tlr: 9.834e-05, memory: 13730, loss: 3.2406\n", + "2023-07-02 20:41:39,602 - modelscope - INFO - epoch [1][495/4982]\tlr: 9.830e-05, memory: 13730, loss: 2.9141\n", + "2023-07-02 20:41:42,010 - modelscope - INFO - epoch [1][500/4982]\tlr: 9.826e-05, memory: 13730, loss: 3.1969\n", + "2023-07-02 20:41:44,328 - modelscope - INFO - epoch [1][505/4982]\tlr: 9.822e-05, memory: 13730, loss: 2.4125\n", + "2023-07-02 20:41:47,138 - modelscope - INFO - epoch [1][510/4982]\tlr: 9.817e-05, memory: 13730, loss: 2.3031\n", + "2023-07-02 20:41:50,494 - modelscope - INFO - epoch [1][515/4982]\tlr: 9.813e-05, memory: 13730, loss: 2.2938\n", + "2023-07-02 20:41:52,746 - modelscope - INFO - epoch [1][520/4982]\tlr: 9.808e-05, memory: 13730, loss: 3.8672\n", + "2023-07-02 20:41:54,958 - modelscope - INFO - epoch [1][525/4982]\tlr: 9.804e-05, memory: 13730, loss: 3.2156\n", + "2023-07-02 20:41:57,466 - modelscope - INFO - epoch [1][530/4982]\tlr: 9.799e-05, memory: 13730, loss: 3.0344\n", + "2023-07-02 20:42:00,137 - modelscope - INFO - epoch [1][535/4982]\tlr: 9.795e-05, memory: 13730, loss: 4.9406\n", + "2023-07-02 20:42:02,774 - modelscope - INFO - epoch [1][540/4982]\tlr: 9.790e-05, memory: 13730, loss: 3.3563\n", + "2023-07-02 20:42:05,715 - modelscope - INFO - epoch [1][545/4982]\tlr: 9.786e-05, memory: 13730, loss: 1.4797\n", + "2023-07-02 20:42:07,960 - modelscope - INFO - epoch [1][550/4982]\tlr: 9.781e-05, memory: 13730, loss: 3.8781\n", + "2023-07-02 20:42:11,011 - modelscope - INFO - epoch [1][555/4982]\tlr: 9.776e-05, memory: 13730, loss: 2.9297\n", + "2023-07-02 20:42:13,456 - modelscope - INFO - epoch [1][560/4982]\tlr: 9.771e-05, memory: 13730, loss: 3.8203\n", + "2023-07-02 20:42:15,443 - modelscope - INFO - epoch [1][565/4982]\tlr: 9.767e-05, memory: 13730, loss: 2.0219\n", + "2023-07-02 20:42:18,846 - modelscope - INFO - epoch [1][570/4982]\tlr: 9.762e-05, memory: 13730, loss: 1.9281\n", + "2023-07-02 20:42:22,121 - modelscope - INFO - epoch [1][575/4982]\tlr: 9.757e-05, memory: 13730, loss: 2.6750\n", + "2023-07-02 20:42:25,145 - modelscope - INFO - epoch [1][580/4982]\tlr: 9.752e-05, memory: 13730, loss: 1.7852\n", + "2023-07-02 20:42:27,316 - modelscope - INFO - epoch [1][585/4982]\tlr: 9.747e-05, memory: 13730, loss: 2.8047\n", + "2023-07-02 20:42:29,441 - modelscope - INFO - epoch [1][590/4982]\tlr: 9.742e-05, memory: 13730, loss: 2.6773\n", + "2023-07-02 20:42:32,360 - modelscope - INFO - epoch [1][595/4982]\tlr: 9.737e-05, memory: 13730, loss: 1.9812\n", + "2023-07-02 20:42:35,221 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.24it/s]\n", + "2023-07-02 20:43:41,520 - modelscope - INFO - Saving checkpoint at 600 iter\n", + "2023-07-02 20:43:41,550 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter400_acc0.6604225635528564\n", + "2023-07-02 20:43:41,552 - modelscope - INFO - Saving checkpoint at 600 iter\n", + "2023-07-02 20:43:41,582 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_400\n", + "2023-07-02 20:43:41,584 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 13730, evaluation/acc: 0.6708, evaluation/loss: 2.5856, loss: 2.3328\n", + "2023-07-02 20:43:43,999 - modelscope - INFO - epoch [1][605/4982]\tlr: 9.726e-05, memory: 13730, loss: 2.6875\n", + "2023-07-02 20:43:47,119 - modelscope - INFO - epoch [1][610/4982]\tlr: 9.721e-05, memory: 13730, loss: 1.4031\n", + "2023-07-02 20:43:48,961 - modelscope - INFO - epoch [1][615/4982]\tlr: 9.716e-05, memory: 13730, loss: 2.9422\n", + "2023-07-02 20:43:51,931 - modelscope - INFO - epoch [1][620/4982]\tlr: 9.711e-05, memory: 13730, loss: 2.2016\n", + "2023-07-02 20:43:55,085 - modelscope - INFO - epoch [1][625/4982]\tlr: 9.705e-05, memory: 13730, loss: 2.4344\n", + "2023-07-02 20:43:57,859 - modelscope - INFO - epoch [1][630/4982]\tlr: 9.700e-05, memory: 13730, loss: 1.9727\n", + "2023-07-02 20:44:00,652 - modelscope - INFO - epoch [1][635/4982]\tlr: 9.695e-05, memory: 13730, loss: 3.5047\n", + "2023-07-02 20:44:03,525 - modelscope - INFO - epoch [1][640/4982]\tlr: 9.689e-05, memory: 13730, loss: 2.3672\n", + "2023-07-02 20:44:06,457 - modelscope - INFO - epoch [1][645/4982]\tlr: 9.684e-05, memory: 13730, loss: 2.7797\n", + "2023-07-02 20:44:08,691 - modelscope - INFO - epoch [1][650/4982]\tlr: 9.678e-05, memory: 13730, loss: 1.9734\n", + "2023-07-02 20:44:11,608 - modelscope - INFO - epoch [1][655/4982]\tlr: 9.673e-05, memory: 13730, loss: 2.0531\n", + "2023-07-02 20:44:13,499 - modelscope - INFO - epoch [1][660/4982]\tlr: 9.667e-05, memory: 13730, loss: 2.8078\n", + "2023-07-02 20:44:15,767 - modelscope - INFO - epoch [1][665/4982]\tlr: 9.661e-05, memory: 13730, loss: 3.3703\n", + "2023-07-02 20:44:18,064 - modelscope - INFO - epoch [1][670/4982]\tlr: 9.656e-05, memory: 13730, loss: 3.2156\n", + "2023-07-02 20:44:20,955 - modelscope - INFO - epoch [1][675/4982]\tlr: 9.650e-05, memory: 13830, loss: 3.4172\n", + "2023-07-02 20:44:24,557 - modelscope - INFO - epoch [1][680/4982]\tlr: 9.644e-05, memory: 13830, loss: 1.4219\n", + "2023-07-02 20:44:27,433 - modelscope - INFO - epoch [1][685/4982]\tlr: 9.638e-05, memory: 13830, loss: 3.5094\n", + "2023-07-02 20:44:30,177 - modelscope - INFO - epoch [1][690/4982]\tlr: 9.632e-05, memory: 13830, loss: 2.3234\n", + "2023-07-02 20:44:32,790 - modelscope - INFO - epoch [1][695/4982]\tlr: 9.627e-05, memory: 13830, loss: 1.7906\n", + "2023-07-02 20:44:35,003 - modelscope - INFO - epoch [1][700/4982]\tlr: 9.621e-05, memory: 13830, loss: 3.4016\n", + "2023-07-02 20:44:38,237 - modelscope - INFO - epoch [1][705/4982]\tlr: 9.615e-05, memory: 13830, loss: 2.1484\n", + "2023-07-02 20:44:42,304 - modelscope - INFO - epoch [1][710/4982]\tlr: 9.609e-05, memory: 13830, loss: 1.9828\n", + "2023-07-02 20:44:45,293 - modelscope - INFO - epoch [1][715/4982]\tlr: 9.602e-05, memory: 13830, loss: 1.6828\n", + "2023-07-02 20:44:48,385 - modelscope - INFO - epoch [1][720/4982]\tlr: 9.596e-05, memory: 13830, loss: 2.0969\n", + "2023-07-02 20:44:50,846 - modelscope - INFO - epoch [1][725/4982]\tlr: 9.590e-05, memory: 13830, loss: 3.2031\n", + "2023-07-02 20:44:53,572 - modelscope - INFO - epoch [1][730/4982]\tlr: 9.584e-05, memory: 13830, loss: 2.8055\n", + "2023-07-02 20:44:54,918 - modelscope - INFO - epoch [1][735/4982]\tlr: 9.578e-05, memory: 13830, loss: 5.0641\n", + "2023-07-02 20:44:58,220 - modelscope - INFO - epoch [1][740/4982]\tlr: 9.572e-05, memory: 13830, loss: 2.5125\n", + "2023-07-02 20:45:01,363 - modelscope - INFO - epoch [1][745/4982]\tlr: 9.565e-05, memory: 13830, loss: 1.5758\n", + "2023-07-02 20:45:03,990 - modelscope - INFO - epoch [1][750/4982]\tlr: 9.559e-05, memory: 13830, loss: 2.3664\n", + "2023-07-02 20:45:06,603 - modelscope - INFO - epoch [1][755/4982]\tlr: 9.553e-05, memory: 13830, loss: 1.8188\n", + "2023-07-02 20:45:09,658 - modelscope - INFO - epoch [1][760/4982]\tlr: 9.546e-05, memory: 13830, loss: 2.6125\n", + "2023-07-02 20:45:12,102 - modelscope - INFO - epoch [1][765/4982]\tlr: 9.540e-05, memory: 13830, loss: 1.7031\n", + "2023-07-02 20:45:14,836 - modelscope - INFO - epoch [1][770/4982]\tlr: 9.533e-05, memory: 13830, loss: 1.7359\n", + "2023-07-02 20:45:17,436 - modelscope - INFO - epoch [1][775/4982]\tlr: 9.527e-05, memory: 13830, loss: 1.4336\n", + "2023-07-02 20:45:20,163 - modelscope - INFO - epoch [1][780/4982]\tlr: 9.520e-05, memory: 13830, loss: 2.5672\n", + "2023-07-02 20:45:23,429 - modelscope - INFO - epoch [1][785/4982]\tlr: 9.513e-05, memory: 13830, loss: 1.9164\n", + "2023-07-02 20:45:26,285 - modelscope - INFO - epoch [1][790/4982]\tlr: 9.507e-05, memory: 13830, loss: 2.3203\n", + "2023-07-02 20:45:28,656 - modelscope - INFO - epoch [1][795/4982]\tlr: 9.500e-05, memory: 13830, loss: 2.7672\n", + "2023-07-02 20:45:31,279 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 20:46:37,656 - modelscope - INFO - Saving checkpoint at 800 iter\n", + "2023-07-02 20:46:37,685 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter600_acc0.6708211898803711\n", + "2023-07-02 20:46:37,687 - modelscope - INFO - Saving checkpoint at 800 iter\n", + "2023-07-02 20:46:37,715 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_600\n", + "2023-07-02 20:46:37,718 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 13830, evaluation/acc: 0.6881, evaluation/loss: 2.2625, loss: 2.6750\n", + "2023-07-02 20:46:40,639 - modelscope - INFO - epoch [1][805/4982]\tlr: 9.486e-05, memory: 13830, loss: 1.8695\n", + "2023-07-02 20:46:43,092 - modelscope - INFO - epoch [1][810/4982]\tlr: 9.480e-05, memory: 13830, loss: 2.8734\n", + "2023-07-02 20:46:46,484 - modelscope - INFO - epoch [1][815/4982]\tlr: 9.473e-05, memory: 13830, loss: 1.7906\n", + "2023-07-02 20:46:49,542 - modelscope - INFO - epoch [1][820/4982]\tlr: 9.466e-05, memory: 13830, loss: 2.6391\n", + "2023-07-02 20:46:52,581 - modelscope - INFO - epoch [1][825/4982]\tlr: 9.459e-05, memory: 13830, loss: 2.3250\n", + "2023-07-02 20:46:55,248 - modelscope - INFO - epoch [1][830/4982]\tlr: 9.452e-05, memory: 13830, loss: 2.3188\n", + "2023-07-02 20:46:58,323 - modelscope - INFO - epoch [1][835/4982]\tlr: 9.445e-05, memory: 13830, loss: 1.8852\n", + "2023-07-02 20:47:00,885 - modelscope - INFO - epoch [1][840/4982]\tlr: 9.438e-05, memory: 13830, loss: 2.5203\n", + "2023-07-02 20:47:03,739 - modelscope - INFO - epoch [1][845/4982]\tlr: 9.431e-05, memory: 13830, loss: 2.2563\n", + "2023-07-02 20:47:06,494 - modelscope - INFO - epoch [1][850/4982]\tlr: 9.424e-05, memory: 13830, loss: 2.4937\n", + "2023-07-02 20:47:08,653 - modelscope - INFO - epoch [1][855/4982]\tlr: 9.416e-05, memory: 13830, loss: 2.1844\n", + "2023-07-02 20:47:12,100 - modelscope - INFO - epoch [1][860/4982]\tlr: 9.409e-05, memory: 13830, loss: 2.6281\n", + "2023-07-02 20:47:14,954 - modelscope - INFO - epoch [1][865/4982]\tlr: 9.402e-05, memory: 13830, loss: 1.7703\n", + "2023-07-02 20:47:17,549 - modelscope - INFO - epoch [1][870/4982]\tlr: 9.395e-05, memory: 13830, loss: 3.3172\n", + "2023-07-02 20:47:20,094 - modelscope - INFO - epoch [1][875/4982]\tlr: 9.387e-05, memory: 13830, loss: 2.2594\n", + "2023-07-02 20:47:23,556 - modelscope - INFO - epoch [1][880/4982]\tlr: 9.380e-05, memory: 13830, loss: 2.6352\n", + "2023-07-02 20:47:25,327 - modelscope - INFO - epoch [1][885/4982]\tlr: 9.373e-05, memory: 13830, loss: 2.7180\n", + "2023-07-02 20:47:28,177 - modelscope - INFO - epoch [1][890/4982]\tlr: 9.365e-05, memory: 13830, loss: 2.3750\n", + "2023-07-02 20:47:30,955 - modelscope - INFO - epoch [1][895/4982]\tlr: 9.358e-05, memory: 13830, loss: 1.7266\n", + "2023-07-02 20:47:34,940 - modelscope - INFO - epoch [1][900/4982]\tlr: 9.350e-05, memory: 13830, loss: 2.1984\n", + "2023-07-02 20:47:37,402 - modelscope - INFO - epoch [1][905/4982]\tlr: 9.343e-05, memory: 13830, loss: 2.2336\n", + "2023-07-02 20:47:40,011 - modelscope - INFO - epoch [1][910/4982]\tlr: 9.335e-05, memory: 13830, loss: 2.7844\n", + "2023-07-02 20:47:42,601 - modelscope - INFO - epoch [1][915/4982]\tlr: 9.327e-05, memory: 13830, loss: 3.2297\n", + "2023-07-02 20:47:44,837 - modelscope - INFO - epoch [1][920/4982]\tlr: 9.320e-05, memory: 13830, loss: 2.4188\n", + "2023-07-02 20:47:47,897 - modelscope - INFO - epoch [1][925/4982]\tlr: 9.312e-05, memory: 13830, loss: 1.6863\n", + "2023-07-02 20:47:50,418 - modelscope - INFO - epoch [1][930/4982]\tlr: 9.304e-05, memory: 13830, loss: 3.9219\n", + "2023-07-02 20:47:52,672 - modelscope - INFO - epoch [1][935/4982]\tlr: 9.296e-05, memory: 13830, loss: 1.6926\n", + "2023-07-02 20:47:55,286 - modelscope - INFO - epoch [1][940/4982]\tlr: 9.289e-05, memory: 13830, loss: 1.7281\n", + "2023-07-02 20:47:59,111 - modelscope - INFO - epoch [1][945/4982]\tlr: 9.281e-05, memory: 13830, loss: 1.1969\n", + "2023-07-02 20:48:01,843 - modelscope - INFO - epoch [1][950/4982]\tlr: 9.273e-05, memory: 13830, loss: 1.6633\n", + "2023-07-02 20:48:04,387 - modelscope - INFO - epoch [1][955/4982]\tlr: 9.265e-05, memory: 13830, loss: 2.2094\n", + "2023-07-02 20:48:06,681 - modelscope - INFO - epoch [1][960/4982]\tlr: 9.257e-05, memory: 13830, loss: 2.1922\n", + "2023-07-02 20:48:09,850 - modelscope - INFO - epoch [1][965/4982]\tlr: 9.249e-05, memory: 13830, loss: 1.3594\n", + "2023-07-02 20:48:12,651 - modelscope - INFO - epoch [1][970/4982]\tlr: 9.241e-05, memory: 13830, loss: 1.7945\n", + "2023-07-02 20:48:15,819 - modelscope - INFO - epoch [1][975/4982]\tlr: 9.233e-05, memory: 13830, loss: 1.7203\n", + "2023-07-02 20:48:18,453 - modelscope - INFO - epoch [1][980/4982]\tlr: 9.225e-05, memory: 13830, loss: 1.8453\n", + "2023-07-02 20:48:20,628 - modelscope - INFO - epoch [1][985/4982]\tlr: 9.216e-05, memory: 13830, loss: 1.8086\n", + "2023-07-02 20:48:22,947 - modelscope - INFO - epoch [1][990/4982]\tlr: 9.208e-05, memory: 13830, loss: 2.6445\n", + "2023-07-02 20:48:25,309 - modelscope - INFO - epoch [1][995/4982]\tlr: 9.200e-05, memory: 13830, loss: 3.2172\n", + "2023-07-02 20:48:28,028 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 20:49:34,496 - modelscope - INFO - Saving checkpoint at 1000 iter\n", + "2023-07-02 20:49:34,522 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter800_acc0.6881153583526611\n", + "2023-07-02 20:49:34,524 - modelscope - INFO - Saving checkpoint at 1000 iter\n", + "2023-07-02 20:49:34,548 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_800\n", + "2023-07-02 20:49:34,551 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 13830, evaluation/acc: 0.7003, evaluation/loss: 2.0893, loss: 2.7594\n", + "2023-07-02 20:49:37,631 - modelscope - INFO - epoch [1][1005/4982]\tlr: 9.183e-05, memory: 13830, loss: 1.3188\n", + "2023-07-02 20:49:40,106 - modelscope - INFO - epoch [1][1010/4982]\tlr: 9.175e-05, memory: 13830, loss: 2.3094\n", + "2023-07-02 20:49:42,559 - modelscope - INFO - epoch [1][1015/4982]\tlr: 9.167e-05, memory: 13830, loss: 2.4734\n", + "2023-07-02 20:49:44,919 - modelscope - INFO - epoch [1][1020/4982]\tlr: 9.158e-05, memory: 13830, loss: 2.0336\n", + "2023-07-02 20:49:49,264 - modelscope - INFO - epoch [1][1025/4982]\tlr: 9.150e-05, memory: 13861, loss: 1.0523\n", + "2023-07-02 20:49:51,204 - modelscope - INFO - epoch [1][1030/4982]\tlr: 9.141e-05, memory: 13861, loss: 3.1086\n", + "2023-07-02 20:49:53,066 - modelscope - INFO - epoch [1][1035/4982]\tlr: 9.133e-05, memory: 13861, loss: 2.3414\n", + "2023-07-02 20:49:56,035 - modelscope - INFO - epoch [1][1040/4982]\tlr: 9.124e-05, memory: 13861, loss: 2.2359\n", + "2023-07-02 20:49:59,351 - modelscope - INFO - epoch [1][1045/4982]\tlr: 9.116e-05, memory: 13861, loss: 1.9051\n", + "2023-07-02 20:50:01,989 - modelscope - INFO - epoch [1][1050/4982]\tlr: 9.107e-05, memory: 13861, loss: 1.5266\n", + "2023-07-02 20:50:04,982 - modelscope - INFO - epoch [1][1055/4982]\tlr: 9.098e-05, memory: 13861, loss: 2.5000\n", + "2023-07-02 20:50:07,348 - modelscope - INFO - epoch [1][1060/4982]\tlr: 9.090e-05, memory: 13861, loss: 2.9164\n", + "2023-07-02 20:50:10,149 - modelscope - INFO - epoch [1][1065/4982]\tlr: 9.081e-05, memory: 13861, loss: 2.1641\n", + "2023-07-02 20:50:13,289 - modelscope - INFO - epoch [1][1070/4982]\tlr: 9.072e-05, memory: 13861, loss: 2.7469\n", + "2023-07-02 20:50:16,220 - modelscope - INFO - epoch [1][1075/4982]\tlr: 9.063e-05, memory: 13861, loss: 2.2922\n", + "2023-07-02 20:50:18,255 - modelscope - INFO - epoch [1][1080/4982]\tlr: 9.054e-05, memory: 13861, loss: 3.7016\n", + "2023-07-02 20:50:21,566 - modelscope - INFO - epoch [1][1085/4982]\tlr: 9.046e-05, memory: 13861, loss: 1.1164\n", + "2023-07-02 20:50:24,961 - modelscope - INFO - epoch [1][1090/4982]\tlr: 9.037e-05, memory: 13861, loss: 1.5523\n", + "2023-07-02 20:50:28,072 - modelscope - INFO - epoch [1][1095/4982]\tlr: 9.028e-05, memory: 13861, loss: 1.9781\n", + "2023-07-02 20:50:31,178 - modelscope - INFO - epoch [1][1100/4982]\tlr: 9.019e-05, memory: 13861, loss: 2.0867\n", + "2023-07-02 20:50:33,103 - modelscope - INFO - epoch [1][1105/4982]\tlr: 9.010e-05, memory: 13861, loss: 2.9258\n", + "2023-07-02 20:50:37,069 - modelscope - INFO - epoch [1][1110/4982]\tlr: 9.001e-05, memory: 14281, loss: 1.8297\n", + "2023-07-02 20:50:39,077 - modelscope - INFO - epoch [1][1115/4982]\tlr: 8.992e-05, memory: 14281, loss: 2.1539\n", + "2023-07-02 20:50:41,028 - modelscope - INFO - epoch [1][1120/4982]\tlr: 8.982e-05, memory: 14281, loss: 2.4891\n", + "2023-07-02 20:50:43,285 - modelscope - INFO - epoch [1][1125/4982]\tlr: 8.973e-05, memory: 14281, loss: 1.7930\n", + "2023-07-02 20:50:46,047 - modelscope - INFO - epoch [1][1130/4982]\tlr: 8.964e-05, memory: 14281, loss: 1.1984\n", + "2023-07-02 20:50:49,011 - modelscope - INFO - epoch [1][1135/4982]\tlr: 8.955e-05, memory: 14281, loss: 3.1102\n", + "2023-07-02 20:50:51,386 - modelscope - INFO - epoch [1][1140/4982]\tlr: 8.946e-05, memory: 14281, loss: 2.2969\n", + "2023-07-02 20:50:54,463 - modelscope - INFO - epoch [1][1145/4982]\tlr: 8.936e-05, memory: 14281, loss: 1.7891\n", + "2023-07-02 20:50:56,539 - modelscope - INFO - epoch [1][1150/4982]\tlr: 8.927e-05, memory: 14281, loss: 2.6641\n", + "2023-07-02 20:50:58,715 - modelscope - INFO - epoch [1][1155/4982]\tlr: 8.918e-05, memory: 14281, loss: 2.5141\n", + "2023-07-02 20:51:01,359 - modelscope - INFO - epoch [1][1160/4982]\tlr: 8.908e-05, memory: 14281, loss: 1.7031\n", + "2023-07-02 20:51:04,218 - modelscope - INFO - epoch [1][1165/4982]\tlr: 8.899e-05, memory: 14281, loss: 2.7891\n", + "2023-07-02 20:51:07,009 - modelscope - INFO - epoch [1][1170/4982]\tlr: 8.889e-05, memory: 14281, loss: 1.6977\n", + "2023-07-02 20:51:09,989 - modelscope - INFO - epoch [1][1175/4982]\tlr: 8.880e-05, memory: 14281, loss: 1.7984\n", + "2023-07-02 20:51:13,347 - modelscope - INFO - epoch [1][1180/4982]\tlr: 8.870e-05, memory: 14281, loss: 1.7750\n", + "2023-07-02 20:51:16,349 - modelscope - INFO - epoch [1][1185/4982]\tlr: 8.861e-05, memory: 14281, loss: 2.2219\n", + "2023-07-02 20:51:18,901 - modelscope - INFO - epoch [1][1190/4982]\tlr: 8.851e-05, memory: 14281, loss: 2.1070\n", + "2023-07-02 20:51:22,332 - modelscope - INFO - epoch [1][1195/4982]\tlr: 8.841e-05, memory: 14281, loss: 1.3805\n", + "2023-07-02 20:51:25,298 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 20:52:31,792 - modelscope - INFO - Saving checkpoint at 1200 iter\n", + "2023-07-02 20:52:31,820 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter1000_acc0.7003207802772522\n", + "2023-07-02 20:52:31,822 - modelscope - INFO - Saving checkpoint at 1200 iter\n", + "2023-07-02 20:52:31,848 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_1000\n", + "2023-07-02 20:52:31,851 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14281, evaluation/acc: 0.7126, evaluation/loss: 1.9764, loss: 1.4297\n", + "2023-07-02 20:52:35,250 - modelscope - INFO - epoch [1][1205/4982]\tlr: 8.822e-05, memory: 14281, loss: 1.4805\n", + "2023-07-02 20:52:38,308 - modelscope - INFO - epoch [1][1210/4982]\tlr: 8.812e-05, memory: 14281, loss: 1.6289\n", + "2023-07-02 20:52:40,236 - modelscope - INFO - epoch [1][1215/4982]\tlr: 8.803e-05, memory: 14281, loss: 1.6109\n", + "2023-07-02 20:52:42,979 - modelscope - INFO - epoch [1][1220/4982]\tlr: 8.793e-05, memory: 14281, loss: 1.8672\n", + "2023-07-02 20:52:45,670 - modelscope - INFO - epoch [1][1225/4982]\tlr: 8.783e-05, memory: 14281, loss: 1.7875\n", + "2023-07-02 20:52:48,769 - modelscope - INFO - epoch [1][1230/4982]\tlr: 8.773e-05, memory: 14281, loss: 2.9453\n", + "2023-07-02 20:52:51,329 - modelscope - INFO - epoch [1][1235/4982]\tlr: 8.763e-05, memory: 14281, loss: 3.7453\n", + "2023-07-02 20:52:54,457 - modelscope - INFO - epoch [1][1240/4982]\tlr: 8.753e-05, memory: 14281, loss: 1.6602\n", + "2023-07-02 20:52:57,272 - modelscope - INFO - epoch [1][1245/4982]\tlr: 8.743e-05, memory: 14281, loss: 1.9398\n", + "2023-07-02 20:52:59,875 - modelscope - INFO - epoch [1][1250/4982]\tlr: 8.733e-05, memory: 14281, loss: 2.6437\n", + "2023-07-02 20:53:03,234 - modelscope - INFO - epoch [1][1255/4982]\tlr: 8.723e-05, memory: 14281, loss: 1.9438\n", + "2023-07-02 20:53:05,817 - modelscope - INFO - epoch [1][1260/4982]\tlr: 8.713e-05, memory: 14281, loss: 2.0344\n", + "2023-07-02 20:53:07,576 - modelscope - INFO - epoch [1][1265/4982]\tlr: 8.703e-05, memory: 14281, loss: 3.1516\n", + "2023-07-02 20:53:10,222 - modelscope - INFO - epoch [1][1270/4982]\tlr: 8.693e-05, memory: 14281, loss: 1.7117\n", + "2023-07-02 20:53:14,014 - modelscope - INFO - epoch [1][1275/4982]\tlr: 8.683e-05, memory: 14281, loss: 1.1664\n", + "2023-07-02 20:53:16,657 - modelscope - INFO - epoch [1][1280/4982]\tlr: 8.673e-05, memory: 14281, loss: 2.4438\n", + "2023-07-02 20:53:19,474 - modelscope - INFO - epoch [1][1285/4982]\tlr: 8.663e-05, memory: 14281, loss: 1.6219\n", + "2023-07-02 20:53:22,505 - modelscope - INFO - epoch [1][1290/4982]\tlr: 8.652e-05, memory: 14281, loss: 1.4367\n", + "2023-07-02 20:53:25,260 - modelscope - INFO - epoch [1][1295/4982]\tlr: 8.642e-05, memory: 14281, loss: 2.8367\n", + "2023-07-02 20:53:27,856 - modelscope - INFO - epoch [1][1300/4982]\tlr: 8.632e-05, memory: 14281, loss: 2.7094\n", + "2023-07-02 20:53:30,269 - modelscope - INFO - epoch [1][1305/4982]\tlr: 8.621e-05, memory: 14281, loss: 2.2687\n", + "2023-07-02 20:53:32,850 - modelscope - INFO - epoch [1][1310/4982]\tlr: 8.611e-05, memory: 14281, loss: 1.6922\n", + "2023-07-02 20:53:35,441 - modelscope - INFO - epoch [1][1315/4982]\tlr: 8.601e-05, memory: 14281, loss: 1.6664\n", + "2023-07-02 20:53:38,415 - modelscope - INFO - epoch [1][1320/4982]\tlr: 8.590e-05, memory: 14281, loss: 1.8898\n", + "2023-07-02 20:53:41,871 - modelscope - INFO - epoch [1][1325/4982]\tlr: 8.580e-05, memory: 14281, loss: 1.3605\n", + "2023-07-02 20:53:44,517 - modelscope - INFO - epoch [1][1330/4982]\tlr: 8.569e-05, memory: 14281, loss: 1.8219\n", + "2023-07-02 20:53:46,642 - modelscope - INFO - epoch [1][1335/4982]\tlr: 8.559e-05, memory: 14281, loss: 2.2359\n", + "2023-07-02 20:53:49,682 - modelscope - INFO - epoch [1][1340/4982]\tlr: 8.548e-05, memory: 14281, loss: 1.8867\n", + "2023-07-02 20:53:52,314 - modelscope - INFO - epoch [1][1345/4982]\tlr: 8.538e-05, memory: 14281, loss: 1.0359\n", + "2023-07-02 20:53:53,796 - modelscope - INFO - epoch [1][1350/4982]\tlr: 8.527e-05, memory: 14281, loss: 3.0266\n", + "2023-07-02 20:53:55,582 - modelscope - INFO - epoch [1][1355/4982]\tlr: 8.516e-05, memory: 14281, loss: 3.4328\n", + "2023-07-02 20:53:57,793 - modelscope - INFO - epoch [1][1360/4982]\tlr: 8.506e-05, memory: 14281, loss: 1.6180\n", + "2023-07-02 20:54:00,871 - modelscope - INFO - epoch [1][1365/4982]\tlr: 8.495e-05, memory: 14281, loss: 1.6867\n", + "2023-07-02 20:54:03,738 - modelscope - INFO - epoch [1][1370/4982]\tlr: 8.484e-05, memory: 14281, loss: 1.8242\n", + "2023-07-02 20:54:05,352 - modelscope - INFO - epoch [1][1375/4982]\tlr: 8.474e-05, memory: 14281, loss: 3.2016\n", + "2023-07-02 20:54:08,417 - modelscope - INFO - epoch [1][1380/4982]\tlr: 8.463e-05, memory: 14281, loss: 1.9574\n", + "2023-07-02 20:54:11,057 - modelscope - INFO - epoch [1][1385/4982]\tlr: 8.452e-05, memory: 14281, loss: 2.2539\n", + "2023-07-02 20:54:13,691 - modelscope - INFO - epoch [1][1390/4982]\tlr: 8.441e-05, memory: 14281, loss: 1.7277\n", + "2023-07-02 20:54:17,235 - modelscope - INFO - epoch [1][1395/4982]\tlr: 8.430e-05, memory: 14281, loss: 1.1039\n", + "2023-07-02 20:54:18,839 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.22it/s]\n", + "2023-07-02 20:55:25,409 - modelscope - INFO - Saving checkpoint at 1400 iter\n", + "2023-07-02 20:55:25,440 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter1200_acc0.7125999927520752\n", + "2023-07-02 20:55:25,442 - modelscope - INFO - Saving checkpoint at 1400 iter\n", + "2023-07-02 20:55:25,472 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_1200\n", + "2023-07-02 20:55:25,475 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14281, evaluation/acc: 0.7218, evaluation/loss: 1.9104, loss: 1.8773\n", + "2023-07-02 20:55:28,676 - modelscope - INFO - epoch [1][1405/4982]\tlr: 8.408e-05, memory: 14281, loss: 2.2473\n", + "2023-07-02 20:55:32,047 - modelscope - INFO - epoch [1][1410/4982]\tlr: 8.397e-05, memory: 14281, loss: 1.2844\n", + "2023-07-02 20:55:34,358 - modelscope - INFO - epoch [1][1415/4982]\tlr: 8.386e-05, memory: 14281, loss: 2.6406\n", + "2023-07-02 20:55:37,290 - modelscope - INFO - epoch [1][1420/4982]\tlr: 8.375e-05, memory: 14281, loss: 1.2020\n", + "2023-07-02 20:55:39,572 - modelscope - INFO - epoch [1][1425/4982]\tlr: 8.364e-05, memory: 14281, loss: 2.3109\n", + "2023-07-02 20:55:41,133 - modelscope - INFO - epoch [1][1430/4982]\tlr: 8.353e-05, memory: 14281, loss: 3.6844\n", + "2023-07-02 20:55:44,293 - modelscope - INFO - epoch [1][1435/4982]\tlr: 8.342e-05, memory: 14281, loss: 1.2117\n", + "2023-07-02 20:55:47,573 - modelscope - INFO - epoch [1][1440/4982]\tlr: 8.331e-05, memory: 14281, loss: 1.3582\n", + "2023-07-02 20:55:49,943 - modelscope - INFO - epoch [1][1445/4982]\tlr: 8.320e-05, memory: 14281, loss: 1.8289\n", + "2023-07-02 20:55:52,281 - modelscope - INFO - epoch [1][1450/4982]\tlr: 8.309e-05, memory: 14281, loss: 1.6055\n", + "2023-07-02 20:55:55,483 - modelscope - INFO - epoch [1][1455/4982]\tlr: 8.297e-05, memory: 14281, loss: 0.7688\n", + "2023-07-02 20:55:57,759 - modelscope - INFO - epoch [1][1460/4982]\tlr: 8.286e-05, memory: 14281, loss: 2.2945\n", + "2023-07-02 20:56:00,237 - modelscope - INFO - epoch [1][1465/4982]\tlr: 8.275e-05, memory: 14281, loss: 1.8000\n", + "2023-07-02 20:56:03,402 - modelscope - INFO - epoch [1][1470/4982]\tlr: 8.264e-05, memory: 14281, loss: 1.0266\n", + "2023-07-02 20:56:04,994 - modelscope - INFO - epoch [1][1475/4982]\tlr: 8.252e-05, memory: 14281, loss: 2.0094\n", + "2023-07-02 20:56:06,787 - modelscope - INFO - epoch [1][1480/4982]\tlr: 8.241e-05, memory: 14281, loss: 1.9977\n", + "2023-07-02 20:56:09,900 - modelscope - INFO - epoch [1][1485/4982]\tlr: 8.230e-05, memory: 14281, loss: 2.0945\n", + "2023-07-02 20:56:12,226 - modelscope - INFO - epoch [1][1490/4982]\tlr: 8.218e-05, memory: 14281, loss: 2.9172\n", + "2023-07-02 20:56:14,763 - modelscope - INFO - epoch [1][1495/4982]\tlr: 8.207e-05, memory: 14281, loss: 1.8367\n", + "2023-07-02 20:56:17,535 - modelscope - INFO - epoch [1][1500/4982]\tlr: 8.195e-05, memory: 14281, loss: 1.4617\n", + "2023-07-02 20:56:19,733 - modelscope - INFO - epoch [1][1505/4982]\tlr: 8.184e-05, memory: 14281, loss: 1.9328\n", + "2023-07-02 20:56:22,653 - modelscope - INFO - epoch [1][1510/4982]\tlr: 8.172e-05, memory: 14281, loss: 1.5078\n", + "2023-07-02 20:56:26,133 - modelscope - INFO - epoch [1][1515/4982]\tlr: 8.161e-05, memory: 14281, loss: 2.1977\n", + "2023-07-02 20:56:28,551 - modelscope - INFO - epoch [1][1520/4982]\tlr: 8.149e-05, memory: 14281, loss: 2.2246\n", + "2023-07-02 20:56:31,182 - modelscope - INFO - epoch [1][1525/4982]\tlr: 8.138e-05, memory: 14281, loss: 1.9840\n", + "2023-07-02 20:56:33,710 - modelscope - INFO - epoch [1][1530/4982]\tlr: 8.126e-05, memory: 14281, loss: 1.5406\n", + "2023-07-02 20:56:36,337 - modelscope - INFO - epoch [1][1535/4982]\tlr: 8.114e-05, memory: 14281, loss: 1.9930\n", + "2023-07-02 20:56:39,530 - modelscope - INFO - epoch [1][1540/4982]\tlr: 8.103e-05, memory: 14281, loss: 1.8547\n", + "2023-07-02 20:56:42,288 - modelscope - INFO - epoch [1][1545/4982]\tlr: 8.091e-05, memory: 14281, loss: 1.2977\n", + "2023-07-02 20:56:44,838 - modelscope - INFO - epoch [1][1550/4982]\tlr: 8.079e-05, memory: 14281, loss: 1.9984\n", + "2023-07-02 20:56:46,590 - modelscope - INFO - epoch [1][1555/4982]\tlr: 8.068e-05, memory: 14281, loss: 3.7969\n", + "2023-07-02 20:56:49,311 - modelscope - INFO - epoch [1][1560/4982]\tlr: 8.056e-05, memory: 14281, loss: 3.0336\n", + "2023-07-02 20:56:52,158 - modelscope - INFO - epoch [1][1565/4982]\tlr: 8.044e-05, memory: 14281, loss: 1.2789\n", + "2023-07-02 20:56:54,583 - modelscope - INFO - epoch [1][1570/4982]\tlr: 8.032e-05, memory: 14281, loss: 2.0461\n", + "2023-07-02 20:56:57,318 - modelscope - INFO - epoch [1][1575/4982]\tlr: 8.020e-05, memory: 14281, loss: 1.3301\n", + "2023-07-02 20:57:00,187 - modelscope - INFO - epoch [1][1580/4982]\tlr: 8.008e-05, memory: 14281, loss: 1.4945\n", + "2023-07-02 20:57:02,809 - modelscope - INFO - epoch [1][1585/4982]\tlr: 7.997e-05, memory: 14281, loss: 1.7984\n", + "2023-07-02 20:57:05,103 - modelscope - INFO - epoch [1][1590/4982]\tlr: 7.985e-05, memory: 14281, loss: 2.2133\n", + "2023-07-02 20:57:07,880 - modelscope - INFO - epoch [1][1595/4982]\tlr: 7.973e-05, memory: 14281, loss: 1.4664\n", + "2023-07-02 20:57:10,754 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.22it/s]\n", + "2023-07-02 20:58:17,336 - modelscope - INFO - Saving checkpoint at 1600 iter\n", + "2023-07-02 20:58:17,364 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter1400_acc0.7218371033668518\n", + "2023-07-02 20:58:17,366 - modelscope - INFO - Saving checkpoint at 1600 iter\n", + "2023-07-02 20:58:17,392 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_1400\n", + "2023-07-02 20:58:17,395 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14281, evaluation/acc: 0.7349, evaluation/loss: 1.8596, loss: 0.7406\n", + "2023-07-02 20:58:19,762 - modelscope - INFO - epoch [1][1605/4982]\tlr: 7.949e-05, memory: 14281, loss: 2.4625\n", + "2023-07-02 20:58:22,187 - modelscope - INFO - epoch [1][1610/4982]\tlr: 7.937e-05, memory: 14281, loss: 2.0211\n", + "2023-07-02 20:58:24,593 - modelscope - INFO - epoch [1][1615/4982]\tlr: 7.925e-05, memory: 14281, loss: 1.8141\n", + "2023-07-02 20:58:26,348 - modelscope - INFO - epoch [1][1620/4982]\tlr: 7.913e-05, memory: 14281, loss: 2.8254\n", + "2023-07-02 20:58:28,996 - modelscope - INFO - epoch [1][1625/4982]\tlr: 7.900e-05, memory: 14281, loss: 1.3973\n", + "2023-07-02 20:58:31,382 - modelscope - INFO - epoch [1][1630/4982]\tlr: 7.888e-05, memory: 14281, loss: 2.4805\n", + "2023-07-02 20:58:34,123 - modelscope - INFO - epoch [1][1635/4982]\tlr: 7.876e-05, memory: 14281, loss: 1.2414\n", + "2023-07-02 20:58:37,249 - modelscope - INFO - epoch [1][1640/4982]\tlr: 7.864e-05, memory: 14281, loss: 1.7254\n", + "2023-07-02 20:58:40,060 - modelscope - INFO - epoch [1][1645/4982]\tlr: 7.852e-05, memory: 14281, loss: 2.1672\n", + "2023-07-02 20:58:42,200 - modelscope - INFO - epoch [1][1650/4982]\tlr: 7.840e-05, memory: 14281, loss: 2.4047\n", + "2023-07-02 20:58:44,560 - modelscope - INFO - epoch [1][1655/4982]\tlr: 7.827e-05, memory: 14281, loss: 1.7063\n", + "2023-07-02 20:58:47,535 - modelscope - INFO - epoch [1][1660/4982]\tlr: 7.815e-05, memory: 14281, loss: 1.3406\n", + "2023-07-02 20:58:50,161 - modelscope - INFO - epoch [1][1665/4982]\tlr: 7.803e-05, memory: 14281, loss: 2.4453\n", + "2023-07-02 20:58:52,380 - modelscope - INFO - epoch [1][1670/4982]\tlr: 7.791e-05, memory: 14281, loss: 1.7500\n", + "2023-07-02 20:58:54,351 - modelscope - INFO - epoch [1][1675/4982]\tlr: 7.778e-05, memory: 14281, loss: 2.8453\n", + "2023-07-02 20:58:55,966 - modelscope - INFO - epoch [1][1680/4982]\tlr: 7.766e-05, memory: 14281, loss: 1.8719\n", + "2023-07-02 20:58:58,457 - modelscope - INFO - epoch [1][1685/4982]\tlr: 7.754e-05, memory: 14281, loss: 2.1156\n", + "2023-07-02 20:59:01,212 - modelscope - INFO - epoch [1][1690/4982]\tlr: 7.741e-05, memory: 14281, loss: 1.7188\n", + "2023-07-02 20:59:04,057 - modelscope - INFO - epoch [1][1695/4982]\tlr: 7.729e-05, memory: 14281, loss: 2.5672\n", + "2023-07-02 20:59:07,177 - modelscope - INFO - epoch [1][1700/4982]\tlr: 7.716e-05, memory: 14281, loss: 1.0508\n", + "2023-07-02 20:59:09,355 - modelscope - INFO - epoch [1][1705/4982]\tlr: 7.704e-05, memory: 14281, loss: 1.8687\n", + "2023-07-02 20:59:11,209 - modelscope - INFO - epoch [1][1710/4982]\tlr: 7.691e-05, memory: 14281, loss: 2.7281\n", + "2023-07-02 20:59:14,101 - modelscope - INFO - epoch [1][1715/4982]\tlr: 7.679e-05, memory: 14281, loss: 1.0727\n", + "2023-07-02 20:59:16,660 - modelscope - INFO - epoch [1][1720/4982]\tlr: 7.666e-05, memory: 14281, loss: 1.6773\n", + "2023-07-02 20:59:18,798 - modelscope - INFO - epoch [1][1725/4982]\tlr: 7.654e-05, memory: 14281, loss: 2.3687\n", + "2023-07-02 20:59:20,724 - modelscope - INFO - epoch [1][1730/4982]\tlr: 7.641e-05, memory: 14281, loss: 1.9219\n", + "2023-07-02 20:59:23,591 - modelscope - INFO - epoch [1][1735/4982]\tlr: 7.629e-05, memory: 14281, loss: 1.5344\n", + "2023-07-02 20:59:27,214 - modelscope - INFO - epoch [1][1740/4982]\tlr: 7.616e-05, memory: 14281, loss: 0.5793\n", + "2023-07-02 20:59:29,708 - modelscope - INFO - epoch [1][1745/4982]\tlr: 7.603e-05, memory: 14281, loss: 1.4609\n", + "2023-07-02 20:59:32,082 - modelscope - INFO - epoch [1][1750/4982]\tlr: 7.591e-05, memory: 14281, loss: 1.0852\n", + "2023-07-02 20:59:34,683 - modelscope - INFO - epoch [1][1755/4982]\tlr: 7.578e-05, memory: 14281, loss: 1.5297\n", + "2023-07-02 20:59:36,962 - modelscope - INFO - epoch [1][1760/4982]\tlr: 7.565e-05, memory: 14281, loss: 2.9937\n", + "2023-07-02 20:59:39,715 - modelscope - INFO - epoch [1][1765/4982]\tlr: 7.553e-05, memory: 14281, loss: 2.1242\n", + "2023-07-02 20:59:42,455 - modelscope - INFO - epoch [1][1770/4982]\tlr: 7.540e-05, memory: 14281, loss: 2.3789\n", + "2023-07-02 20:59:45,020 - modelscope - INFO - epoch [1][1775/4982]\tlr: 7.527e-05, memory: 14281, loss: 1.8289\n", + "2023-07-02 20:59:46,865 - modelscope - INFO - epoch [1][1780/4982]\tlr: 7.515e-05, memory: 14281, loss: 2.0219\n", + "2023-07-02 20:59:50,367 - modelscope - INFO - epoch [1][1785/4982]\tlr: 7.502e-05, memory: 14281, loss: 2.6187\n", + "2023-07-02 20:59:52,626 - modelscope - INFO - epoch [1][1790/4982]\tlr: 7.489e-05, memory: 14281, loss: 2.3051\n", + "2023-07-02 20:59:54,711 - modelscope - INFO - epoch [1][1795/4982]\tlr: 7.476e-05, memory: 14281, loss: 2.3953\n", + "2023-07-02 20:59:56,419 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.22it/s]\n", + "2023-07-02 21:01:03,053 - modelscope - INFO - Saving checkpoint at 1800 iter\n", + "2023-07-02 21:01:03,080 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter1600_acc0.7349275350570679\n", + "2023-07-02 21:01:03,082 - modelscope - INFO - Saving checkpoint at 1800 iter\n", + "2023-07-02 21:01:03,106 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_1600\n", + "2023-07-02 21:01:03,109 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14281, evaluation/acc: 0.7401, evaluation/loss: 1.8176, loss: 2.8625\n", + "2023-07-02 21:01:05,753 - modelscope - INFO - epoch [1][1805/4982]\tlr: 7.450e-05, memory: 14281, loss: 1.8352\n", + "2023-07-02 21:01:08,030 - modelscope - INFO - epoch [1][1810/4982]\tlr: 7.438e-05, memory: 14281, loss: 2.1453\n", + "2023-07-02 21:01:10,702 - modelscope - INFO - epoch [1][1815/4982]\tlr: 7.425e-05, memory: 14281, loss: 1.6281\n", + "2023-07-02 21:01:13,348 - modelscope - INFO - epoch [1][1820/4982]\tlr: 7.412e-05, memory: 14281, loss: 2.3008\n", + "2023-07-02 21:01:16,272 - modelscope - INFO - epoch [1][1825/4982]\tlr: 7.399e-05, memory: 14281, loss: 2.2414\n", + "2023-07-02 21:01:19,067 - modelscope - INFO - epoch [1][1830/4982]\tlr: 7.386e-05, memory: 14281, loss: 2.8672\n", + "2023-07-02 21:01:21,555 - modelscope - INFO - epoch [1][1835/4982]\tlr: 7.373e-05, memory: 14281, loss: 2.3172\n", + "2023-07-02 21:01:24,755 - modelscope - INFO - epoch [1][1840/4982]\tlr: 7.360e-05, memory: 14281, loss: 0.9746\n", + "2023-07-02 21:01:27,186 - modelscope - INFO - epoch [1][1845/4982]\tlr: 7.347e-05, memory: 14281, loss: 1.4992\n", + "2023-07-02 21:01:30,804 - modelscope - INFO - epoch [1][1850/4982]\tlr: 7.334e-05, memory: 14281, loss: 2.0031\n", + "2023-07-02 21:01:34,075 - modelscope - INFO - epoch [1][1855/4982]\tlr: 7.321e-05, memory: 14281, loss: 1.3766\n", + "2023-07-02 21:01:36,465 - modelscope - INFO - epoch [1][1860/4982]\tlr: 7.308e-05, memory: 14281, loss: 2.3203\n", + "2023-07-02 21:01:39,721 - modelscope - INFO - epoch [1][1865/4982]\tlr: 7.295e-05, memory: 14281, loss: 2.5617\n", + "2023-07-02 21:01:43,444 - modelscope - INFO - epoch [1][1870/4982]\tlr: 7.281e-05, memory: 14281, loss: 0.8551\n", + "2023-07-02 21:01:46,641 - modelscope - INFO - epoch [1][1875/4982]\tlr: 7.268e-05, memory: 14281, loss: 2.1117\n", + "2023-07-02 21:01:49,075 - modelscope - INFO - epoch [1][1880/4982]\tlr: 7.255e-05, memory: 14281, loss: 1.9414\n", + "2023-07-02 21:01:51,733 - modelscope - INFO - epoch [1][1885/4982]\tlr: 7.242e-05, memory: 14281, loss: 1.3805\n", + "2023-07-02 21:01:54,863 - modelscope - INFO - epoch [1][1890/4982]\tlr: 7.229e-05, memory: 14281, loss: 2.0562\n", + "2023-07-02 21:01:56,818 - modelscope - INFO - epoch [1][1895/4982]\tlr: 7.216e-05, memory: 14281, loss: 2.2391\n", + "2023-07-02 21:01:59,267 - modelscope - INFO - epoch [1][1900/4982]\tlr: 7.202e-05, memory: 14281, loss: 2.3027\n", + "2023-07-02 21:02:01,900 - modelscope - INFO - epoch [1][1905/4982]\tlr: 7.189e-05, memory: 14281, loss: 1.8711\n", + "2023-07-02 21:02:05,392 - modelscope - INFO - epoch [1][1910/4982]\tlr: 7.176e-05, memory: 14281, loss: 1.0352\n", + "2023-07-02 21:02:07,808 - modelscope - INFO - epoch [1][1915/4982]\tlr: 7.163e-05, memory: 14281, loss: 1.9133\n", + "2023-07-02 21:02:10,597 - modelscope - INFO - epoch [1][1920/4982]\tlr: 7.149e-05, memory: 14281, loss: 1.5922\n", + "2023-07-02 21:02:13,358 - modelscope - INFO - epoch [1][1925/4982]\tlr: 7.136e-05, memory: 14281, loss: 2.3203\n", + "2023-07-02 21:02:15,288 - modelscope - INFO - epoch [1][1930/4982]\tlr: 7.123e-05, memory: 14281, loss: 1.5707\n", + "2023-07-02 21:02:17,292 - modelscope - INFO - epoch [1][1935/4982]\tlr: 7.110e-05, memory: 14281, loss: 2.6484\n", + "2023-07-02 21:02:20,830 - modelscope - INFO - epoch [1][1940/4982]\tlr: 7.096e-05, memory: 14281, loss: 0.7172\n", + "2023-07-02 21:02:22,944 - modelscope - INFO - epoch [1][1945/4982]\tlr: 7.083e-05, memory: 14281, loss: 2.1992\n", + "2023-07-02 21:02:25,967 - modelscope - INFO - epoch [1][1950/4982]\tlr: 7.069e-05, memory: 14281, loss: 1.1105\n", + "2023-07-02 21:02:28,446 - modelscope - INFO - epoch [1][1955/4982]\tlr: 7.056e-05, memory: 14281, loss: 1.2781\n", + "2023-07-02 21:02:31,222 - modelscope - INFO - epoch [1][1960/4982]\tlr: 7.043e-05, memory: 14281, loss: 2.7156\n", + "2023-07-02 21:02:33,689 - modelscope - INFO - epoch [1][1965/4982]\tlr: 7.029e-05, memory: 14281, loss: 2.1977\n", + "2023-07-02 21:02:36,277 - modelscope - INFO - epoch [1][1970/4982]\tlr: 7.016e-05, memory: 14281, loss: 1.8652\n", + "2023-07-02 21:02:39,628 - modelscope - INFO - epoch [1][1975/4982]\tlr: 7.002e-05, memory: 14281, loss: 0.9414\n", + "2023-07-02 21:02:41,404 - modelscope - INFO - epoch [1][1980/4982]\tlr: 6.989e-05, memory: 14281, loss: 2.2672\n", + "2023-07-02 21:02:44,260 - modelscope - INFO - epoch [1][1985/4982]\tlr: 6.975e-05, memory: 14281, loss: 2.0039\n", + "2023-07-02 21:02:46,214 - modelscope - INFO - epoch [1][1990/4982]\tlr: 6.962e-05, memory: 14281, loss: 2.1391\n", + "2023-07-02 21:02:48,596 - modelscope - INFO - epoch [1][1995/4982]\tlr: 6.948e-05, memory: 14281, loss: 2.2766\n", + "2023-07-02 21:02:51,578 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.24it/s]\n", + "2023-07-02 21:03:57,832 - modelscope - INFO - Saving checkpoint at 2000 iter\n", + "2023-07-02 21:03:57,857 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter1800_acc0.7400715351104736\n", + "2023-07-02 21:03:57,860 - modelscope - INFO - Saving checkpoint at 2000 iter\n", + "2023-07-02 21:03:57,883 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_1800\n", + "2023-07-02 21:03:57,885 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14281, evaluation/acc: 0.7442, evaluation/loss: 1.7936, loss: 1.5309\n", + "2023-07-02 21:04:00,725 - modelscope - INFO - epoch [1][2005/4982]\tlr: 6.921e-05, memory: 14281, loss: 1.2211\n", + "2023-07-02 21:04:02,917 - modelscope - INFO - epoch [1][2010/4982]\tlr: 6.908e-05, memory: 14281, loss: 2.4078\n", + "2023-07-02 21:04:05,194 - modelscope - INFO - epoch [1][2015/4982]\tlr: 6.894e-05, memory: 14281, loss: 2.0891\n", + "2023-07-02 21:04:06,825 - modelscope - INFO - epoch [1][2020/4982]\tlr: 6.881e-05, memory: 14281, loss: 2.4773\n", + "2023-07-02 21:04:09,109 - modelscope - INFO - epoch [1][2025/4982]\tlr: 6.867e-05, memory: 14281, loss: 1.7293\n", + "2023-07-02 21:04:12,824 - modelscope - INFO - epoch [1][2030/4982]\tlr: 6.854e-05, memory: 14281, loss: 0.9602\n", + "2023-07-02 21:04:15,460 - modelscope - INFO - epoch [1][2035/4982]\tlr: 6.840e-05, memory: 14281, loss: 1.4973\n", + "2023-07-02 21:04:18,540 - modelscope - INFO - epoch [1][2040/4982]\tlr: 6.826e-05, memory: 14281, loss: 2.0359\n", + "2023-07-02 21:04:21,265 - modelscope - INFO - epoch [1][2045/4982]\tlr: 6.813e-05, memory: 14281, loss: 1.5586\n", + "2023-07-02 21:04:24,566 - modelscope - INFO - epoch [1][2050/4982]\tlr: 6.799e-05, memory: 14281, loss: 1.3984\n", + "2023-07-02 21:04:27,716 - modelscope - INFO - epoch [1][2055/4982]\tlr: 6.785e-05, memory: 14281, loss: 1.6156\n", + "2023-07-02 21:04:29,775 - modelscope - INFO - epoch [1][2060/4982]\tlr: 6.772e-05, memory: 14281, loss: 2.4398\n", + "2023-07-02 21:04:33,407 - modelscope - INFO - epoch [1][2065/4982]\tlr: 6.758e-05, memory: 14281, loss: 1.2191\n", + "2023-07-02 21:04:35,873 - modelscope - INFO - epoch [1][2070/4982]\tlr: 6.744e-05, memory: 14281, loss: 1.5117\n", + "2023-07-02 21:04:38,406 - modelscope - INFO - epoch [1][2075/4982]\tlr: 6.731e-05, memory: 14281, loss: 1.5688\n", + "2023-07-02 21:04:40,452 - modelscope - INFO - epoch [1][2080/4982]\tlr: 6.717e-05, memory: 14281, loss: 1.3535\n", + "2023-07-02 21:04:42,464 - modelscope - INFO - epoch [1][2085/4982]\tlr: 6.703e-05, memory: 14281, loss: 3.2313\n", + "2023-07-02 21:04:44,395 - modelscope - INFO - epoch [1][2090/4982]\tlr: 6.689e-05, memory: 14281, loss: 1.8109\n", + "2023-07-02 21:04:47,097 - modelscope - INFO - epoch [1][2095/4982]\tlr: 6.676e-05, memory: 14281, loss: 2.6109\n", + "2023-07-02 21:04:50,488 - modelscope - INFO - epoch [1][2100/4982]\tlr: 6.662e-05, memory: 14281, loss: 2.3133\n", + "2023-07-02 21:04:53,478 - modelscope - INFO - epoch [1][2105/4982]\tlr: 6.648e-05, memory: 14281, loss: 1.5336\n", + "2023-07-02 21:04:56,669 - modelscope - INFO - epoch [1][2110/4982]\tlr: 6.634e-05, memory: 14281, loss: 1.8234\n", + "2023-07-02 21:05:00,502 - modelscope - INFO - epoch [1][2115/4982]\tlr: 6.620e-05, memory: 14329, loss: 3.0766\n", + "2023-07-02 21:05:02,541 - modelscope - INFO - epoch [1][2120/4982]\tlr: 6.607e-05, memory: 14329, loss: 1.3789\n", + "2023-07-02 21:05:05,161 - modelscope - INFO - epoch [1][2125/4982]\tlr: 6.593e-05, memory: 14329, loss: 1.5391\n", + "2023-07-02 21:05:07,009 - modelscope - INFO - epoch [1][2130/4982]\tlr: 6.579e-05, memory: 14329, loss: 2.6172\n", + "2023-07-02 21:05:10,521 - modelscope - INFO - epoch [1][2135/4982]\tlr: 6.565e-05, memory: 14329, loss: 1.7750\n", + "2023-07-02 21:05:13,068 - modelscope - INFO - epoch [1][2140/4982]\tlr: 6.551e-05, memory: 14329, loss: 2.1238\n", + "2023-07-02 21:05:15,637 - modelscope - INFO - epoch [1][2145/4982]\tlr: 6.537e-05, memory: 14329, loss: 2.5039\n", + "2023-07-02 21:05:18,628 - modelscope - INFO - epoch [1][2150/4982]\tlr: 6.523e-05, memory: 14329, loss: 1.6203\n", + "2023-07-02 21:05:21,523 - modelscope - INFO - epoch [1][2155/4982]\tlr: 6.510e-05, memory: 14329, loss: 0.9555\n", + "2023-07-02 21:05:24,213 - modelscope - INFO - epoch [1][2160/4982]\tlr: 6.496e-05, memory: 14329, loss: 2.1133\n", + "2023-07-02 21:05:27,402 - modelscope - INFO - epoch [1][2165/4982]\tlr: 6.482e-05, memory: 14329, loss: 1.1963\n", + "2023-07-02 21:05:29,840 - modelscope - INFO - epoch [1][2170/4982]\tlr: 6.468e-05, memory: 14329, loss: 1.3637\n", + "2023-07-02 21:05:32,853 - modelscope - INFO - epoch [1][2175/4982]\tlr: 6.454e-05, memory: 14329, loss: 1.7201\n", + "2023-07-02 21:05:35,628 - modelscope - INFO - epoch [1][2180/4982]\tlr: 6.440e-05, memory: 14329, loss: 2.0109\n", + "2023-07-02 21:05:38,589 - modelscope - INFO - epoch [1][2185/4982]\tlr: 6.426e-05, memory: 14329, loss: 1.2418\n", + "2023-07-02 21:05:40,918 - modelscope - INFO - epoch [1][2190/4982]\tlr: 6.412e-05, memory: 14329, loss: 2.0758\n", + "2023-07-02 21:05:43,421 - modelscope - INFO - epoch [1][2195/4982]\tlr: 6.398e-05, memory: 14329, loss: 1.7094\n", + "2023-07-02 21:05:46,523 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.21it/s]\n", + "2023-07-02 21:06:53,212 - modelscope - INFO - Saving checkpoint at 2200 iter\n", + "2023-07-02 21:06:53,240 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter2000_acc0.7442383766174316\n", + "2023-07-02 21:06:53,243 - modelscope - INFO - Saving checkpoint at 2200 iter\n", + "2023-07-02 21:06:53,269 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_2000\n", + "2023-07-02 21:06:53,272 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14329, evaluation/acc: 0.7494, evaluation/loss: 1.7767, loss: 2.1570\n", + "2023-07-02 21:06:55,998 - modelscope - INFO - epoch [1][2205/4982]\tlr: 6.370e-05, memory: 14329, loss: 1.3469\n", + "2023-07-02 21:06:59,535 - modelscope - INFO - epoch [1][2210/4982]\tlr: 6.356e-05, memory: 14329, loss: 1.3730\n", + "2023-07-02 21:07:01,992 - modelscope - INFO - epoch [1][2215/4982]\tlr: 6.342e-05, memory: 14329, loss: 2.2066\n", + "2023-07-02 21:07:04,789 - modelscope - INFO - epoch [1][2220/4982]\tlr: 6.328e-05, memory: 14329, loss: 1.7098\n", + "2023-07-02 21:07:07,714 - modelscope - INFO - epoch [1][2225/4982]\tlr: 6.314e-05, memory: 14329, loss: 2.0953\n", + "2023-07-02 21:07:09,812 - modelscope - INFO - epoch [1][2230/4982]\tlr: 6.300e-05, memory: 14329, loss: 2.3914\n", + "2023-07-02 21:07:12,315 - modelscope - INFO - epoch [1][2235/4982]\tlr: 6.286e-05, memory: 14329, loss: 2.6797\n", + "2023-07-02 21:07:15,918 - modelscope - INFO - epoch [1][2240/4982]\tlr: 6.272e-05, memory: 14329, loss: 1.3217\n", + "2023-07-02 21:07:19,044 - modelscope - INFO - epoch [1][2245/4982]\tlr: 6.258e-05, memory: 14329, loss: 1.4527\n", + "2023-07-02 21:07:21,636 - modelscope - INFO - epoch [1][2250/4982]\tlr: 6.244e-05, memory: 14329, loss: 2.1770\n", + "2023-07-02 21:07:23,761 - modelscope - INFO - epoch [1][2255/4982]\tlr: 6.230e-05, memory: 14329, loss: 1.8191\n", + "2023-07-02 21:07:25,994 - modelscope - INFO - epoch [1][2260/4982]\tlr: 6.216e-05, memory: 14329, loss: 1.3582\n", + "2023-07-02 21:07:28,770 - modelscope - INFO - epoch [1][2265/4982]\tlr: 6.202e-05, memory: 14329, loss: 1.0121\n", + "2023-07-02 21:07:32,193 - modelscope - INFO - epoch [1][2270/4982]\tlr: 6.188e-05, memory: 14329, loss: 1.0039\n", + "2023-07-02 21:07:34,881 - modelscope - INFO - epoch [1][2275/4982]\tlr: 6.174e-05, memory: 14329, loss: 1.2828\n", + "2023-07-02 21:07:37,688 - modelscope - INFO - epoch [1][2280/4982]\tlr: 6.159e-05, memory: 14329, loss: 1.4516\n", + "2023-07-02 21:07:40,006 - modelscope - INFO - epoch [1][2285/4982]\tlr: 6.145e-05, memory: 14329, loss: 1.5963\n", + "2023-07-02 21:07:42,993 - modelscope - INFO - epoch [1][2290/4982]\tlr: 6.131e-05, memory: 14329, loss: 2.7687\n", + "2023-07-02 21:07:46,133 - modelscope - INFO - epoch [1][2295/4982]\tlr: 6.117e-05, memory: 14329, loss: 1.5977\n", + "2023-07-02 21:07:47,508 - modelscope - INFO - epoch [1][2300/4982]\tlr: 6.103e-05, memory: 14329, loss: 2.5945\n", + "2023-07-02 21:07:50,902 - modelscope - INFO - epoch [1][2305/4982]\tlr: 6.089e-05, memory: 14329, loss: 1.2125\n", + "2023-07-02 21:07:53,059 - modelscope - INFO - epoch [1][2310/4982]\tlr: 6.075e-05, memory: 14329, loss: 2.2883\n", + "2023-07-02 21:07:56,237 - modelscope - INFO - epoch [1][2315/4982]\tlr: 6.061e-05, memory: 14329, loss: 0.8787\n", + "2023-07-02 21:07:59,345 - modelscope - INFO - epoch [1][2320/4982]\tlr: 6.046e-05, memory: 14329, loss: 2.6320\n", + "2023-07-02 21:08:02,587 - modelscope - INFO - epoch [1][2325/4982]\tlr: 6.032e-05, memory: 14329, loss: 1.4213\n", + "2023-07-02 21:08:04,652 - modelscope - INFO - epoch [1][2330/4982]\tlr: 6.018e-05, memory: 14329, loss: 2.7547\n", + "2023-07-02 21:08:07,208 - modelscope - INFO - epoch [1][2335/4982]\tlr: 6.004e-05, memory: 14329, loss: 2.1891\n", + "2023-07-02 21:08:09,836 - modelscope - INFO - epoch [1][2340/4982]\tlr: 5.990e-05, memory: 14329, loss: 1.9711\n", + "2023-07-02 21:08:12,642 - modelscope - INFO - epoch [1][2345/4982]\tlr: 5.976e-05, memory: 14329, loss: 1.2281\n", + "2023-07-02 21:08:15,772 - modelscope - INFO - epoch [1][2350/4982]\tlr: 5.961e-05, memory: 14329, loss: 1.1650\n", + "2023-07-02 21:08:18,568 - modelscope - INFO - epoch [1][2355/4982]\tlr: 5.947e-05, memory: 14329, loss: 1.0545\n", + "2023-07-02 21:08:21,580 - modelscope - INFO - epoch [1][2360/4982]\tlr: 5.933e-05, memory: 14329, loss: 2.3699\n", + "2023-07-02 21:08:24,345 - modelscope - INFO - epoch [1][2365/4982]\tlr: 5.919e-05, memory: 14329, loss: 1.7188\n", + "2023-07-02 21:08:27,132 - modelscope - INFO - epoch [1][2370/4982]\tlr: 5.905e-05, memory: 14329, loss: 0.8174\n", + "2023-07-02 21:08:28,995 - modelscope - INFO - epoch [1][2375/4982]\tlr: 5.891e-05, memory: 14329, loss: 2.0500\n", + "2023-07-02 21:08:32,221 - modelscope - INFO - epoch [1][2380/4982]\tlr: 5.876e-05, memory: 14329, loss: 0.8354\n", + "2023-07-02 21:08:34,747 - modelscope - INFO - epoch [1][2385/4982]\tlr: 5.862e-05, memory: 14329, loss: 1.3457\n", + "2023-07-02 21:08:38,256 - modelscope - INFO - epoch [1][2390/4982]\tlr: 5.848e-05, memory: 14329, loss: 1.9180\n", + "2023-07-02 21:08:40,701 - modelscope - INFO - epoch [1][2395/4982]\tlr: 5.834e-05, memory: 14329, loss: 1.1666\n", + "2023-07-02 21:08:43,933 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 21:09:50,373 - modelscope - INFO - Saving checkpoint at 2400 iter\n", + "2023-07-02 21:09:50,402 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter2200_acc0.749400794506073\n", + "2023-07-02 21:09:50,404 - modelscope - INFO - Saving checkpoint at 2400 iter\n", + "2023-07-02 21:09:50,432 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_2200\n", + "2023-07-02 21:09:50,435 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14329, evaluation/acc: 0.7535, evaluation/loss: 1.7703, loss: 1.5938\n", + "2023-07-02 21:09:53,136 - modelscope - INFO - epoch [1][2405/4982]\tlr: 5.805e-05, memory: 14329, loss: 3.0355\n", + "2023-07-02 21:09:55,673 - modelscope - INFO - epoch [1][2410/4982]\tlr: 5.791e-05, memory: 14329, loss: 1.9070\n", + "2023-07-02 21:09:58,239 - modelscope - INFO - epoch [1][2415/4982]\tlr: 5.777e-05, memory: 14329, loss: 1.1090\n", + "2023-07-02 21:10:00,413 - modelscope - INFO - epoch [1][2420/4982]\tlr: 5.763e-05, memory: 14329, loss: 1.3535\n", + "2023-07-02 21:10:02,887 - modelscope - INFO - epoch [1][2425/4982]\tlr: 5.748e-05, memory: 14329, loss: 1.4563\n", + "2023-07-02 21:10:05,462 - modelscope - INFO - epoch [1][2430/4982]\tlr: 5.734e-05, memory: 14329, loss: 2.2436\n", + "2023-07-02 21:10:08,549 - modelscope - INFO - epoch [1][2435/4982]\tlr: 5.720e-05, memory: 14329, loss: 1.8266\n", + "2023-07-02 21:10:11,226 - modelscope - INFO - epoch [1][2440/4982]\tlr: 5.706e-05, memory: 14329, loss: 1.8402\n", + "2023-07-02 21:10:13,579 - modelscope - INFO - epoch [1][2445/4982]\tlr: 5.691e-05, memory: 14329, loss: 2.0742\n", + "2023-07-02 21:10:15,828 - modelscope - INFO - epoch [1][2450/4982]\tlr: 5.677e-05, memory: 14329, loss: 1.5211\n", + "2023-07-02 21:10:18,658 - modelscope - INFO - epoch [1][2455/4982]\tlr: 5.663e-05, memory: 14329, loss: 0.9520\n", + "2023-07-02 21:10:21,705 - modelscope - INFO - epoch [1][2460/4982]\tlr: 5.649e-05, memory: 14329, loss: 1.4098\n", + "2023-07-02 21:10:24,494 - modelscope - INFO - epoch [1][2465/4982]\tlr: 5.635e-05, memory: 14329, loss: 1.5748\n", + "2023-07-02 21:10:27,349 - modelscope - INFO - epoch [1][2470/4982]\tlr: 5.620e-05, memory: 14329, loss: 2.5328\n", + "2023-07-02 21:10:29,516 - modelscope - INFO - epoch [1][2475/4982]\tlr: 5.606e-05, memory: 14329, loss: 1.2904\n", + "2023-07-02 21:10:32,690 - modelscope - INFO - epoch [1][2480/4982]\tlr: 5.592e-05, memory: 14329, loss: 0.5270\n", + "2023-07-02 21:10:35,469 - modelscope - INFO - epoch [1][2485/4982]\tlr: 5.578e-05, memory: 14329, loss: 0.9842\n", + "2023-07-02 21:10:37,617 - modelscope - INFO - epoch [1][2490/4982]\tlr: 5.563e-05, memory: 14329, loss: 2.4695\n", + "2023-07-02 21:10:40,562 - modelscope - INFO - epoch [1][2495/4982]\tlr: 5.549e-05, memory: 14329, loss: 1.2441\n", + "2023-07-02 21:10:42,074 - modelscope - INFO - epoch [1][2500/4982]\tlr: 5.535e-05, memory: 14329, loss: 2.1055\n", + "2023-07-02 21:10:44,402 - modelscope - INFO - epoch [1][2505/4982]\tlr: 5.521e-05, memory: 14329, loss: 1.5461\n", + "2023-07-02 21:10:47,254 - modelscope - INFO - epoch [1][2510/4982]\tlr: 5.506e-05, memory: 14329, loss: 2.3160\n", + "2023-07-02 21:10:50,538 - modelscope - INFO - epoch [1][2515/4982]\tlr: 5.492e-05, memory: 14329, loss: 1.4293\n", + "2023-07-02 21:10:53,161 - modelscope - INFO - epoch [1][2520/4982]\tlr: 5.478e-05, memory: 14329, loss: 2.6732\n", + "2023-07-02 21:10:55,975 - modelscope - INFO - epoch [1][2525/4982]\tlr: 5.464e-05, memory: 14329, loss: 1.1059\n", + "2023-07-02 21:10:59,325 - modelscope - INFO - epoch [1][2530/4982]\tlr: 5.449e-05, memory: 14329, loss: 0.7672\n", + "2023-07-02 21:11:02,511 - modelscope - INFO - epoch [1][2535/4982]\tlr: 5.435e-05, memory: 14329, loss: 1.0480\n", + "2023-07-02 21:11:04,652 - modelscope - INFO - epoch [1][2540/4982]\tlr: 5.421e-05, memory: 14329, loss: 1.4984\n", + "2023-07-02 21:11:08,281 - modelscope - INFO - epoch [1][2545/4982]\tlr: 5.407e-05, memory: 14329, loss: 1.1805\n", + "2023-07-02 21:11:10,297 - modelscope - INFO - epoch [1][2550/4982]\tlr: 5.392e-05, memory: 14329, loss: 2.0984\n", + "2023-07-02 21:11:13,563 - modelscope - INFO - epoch [1][2555/4982]\tlr: 5.378e-05, memory: 14329, loss: 0.5590\n", + "2023-07-02 21:11:15,666 - modelscope - INFO - epoch [1][2560/4982]\tlr: 5.364e-05, memory: 14329, loss: 1.8969\n", + "2023-07-02 21:11:17,895 - modelscope - INFO - epoch [1][2565/4982]\tlr: 5.350e-05, memory: 14329, loss: 2.2344\n", + "2023-07-02 21:11:20,533 - modelscope - INFO - epoch [1][2570/4982]\tlr: 5.335e-05, memory: 14329, loss: 1.2381\n", + "2023-07-02 21:11:23,834 - modelscope - INFO - epoch [1][2575/4982]\tlr: 5.321e-05, memory: 14329, loss: 1.7533\n", + "2023-07-02 21:11:26,883 - modelscope - INFO - epoch [1][2580/4982]\tlr: 5.307e-05, memory: 14329, loss: 0.9559\n", + "2023-07-02 21:11:29,602 - modelscope - INFO - epoch [1][2585/4982]\tlr: 5.293e-05, memory: 14329, loss: 1.1484\n", + "2023-07-02 21:11:31,820 - modelscope - INFO - epoch [1][2590/4982]\tlr: 5.279e-05, memory: 14329, loss: 1.4527\n", + "2023-07-02 21:11:33,946 - modelscope - INFO - epoch [1][2595/4982]\tlr: 5.264e-05, memory: 14329, loss: 2.1156\n", + "2023-07-02 21:11:36,808 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 21:12:43,304 - modelscope - INFO - Saving checkpoint at 2600 iter\n", + "2023-07-02 21:12:43,335 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter2400_acc0.7534938454627991\n", + "2023-07-02 21:12:43,337 - modelscope - INFO - Saving checkpoint at 2600 iter\n", + "2023-07-02 21:12:43,366 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_2400\n", + "2023-07-02 21:12:43,369 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14329, evaluation/acc: 0.7577, evaluation/loss: 1.7432, loss: 1.3414\n", + "2023-07-02 21:12:45,632 - modelscope - INFO - epoch [1][2605/4982]\tlr: 5.236e-05, memory: 14329, loss: 1.1031\n", + "2023-07-02 21:12:47,931 - modelscope - INFO - epoch [1][2610/4982]\tlr: 5.222e-05, memory: 14329, loss: 2.4422\n", + "2023-07-02 21:12:50,545 - modelscope - INFO - epoch [1][2615/4982]\tlr: 5.207e-05, memory: 14329, loss: 1.2281\n", + "2023-07-02 21:12:53,002 - modelscope - INFO - epoch [1][2620/4982]\tlr: 5.193e-05, memory: 14329, loss: 1.9912\n", + "2023-07-02 21:12:55,893 - modelscope - INFO - epoch [1][2625/4982]\tlr: 5.179e-05, memory: 14329, loss: 1.7354\n", + "2023-07-02 21:12:58,266 - modelscope - INFO - epoch [1][2630/4982]\tlr: 5.165e-05, memory: 14329, loss: 3.0562\n", + "2023-07-02 21:13:00,767 - modelscope - INFO - epoch [1][2635/4982]\tlr: 5.151e-05, memory: 14329, loss: 1.7664\n", + "2023-07-02 21:13:04,043 - modelscope - INFO - epoch [1][2640/4982]\tlr: 5.136e-05, memory: 14329, loss: 1.7547\n", + "2023-07-02 21:13:06,487 - modelscope - INFO - epoch [1][2645/4982]\tlr: 5.122e-05, memory: 14329, loss: 2.0453\n", + "2023-07-02 21:13:09,480 - modelscope - INFO - epoch [1][2650/4982]\tlr: 5.108e-05, memory: 14329, loss: 1.5508\n", + "2023-07-02 21:13:11,484 - modelscope - INFO - epoch [1][2655/4982]\tlr: 5.094e-05, memory: 14329, loss: 2.8527\n", + "2023-07-02 21:13:14,637 - modelscope - INFO - epoch [1][2660/4982]\tlr: 5.080e-05, memory: 14329, loss: 0.4787\n", + "2023-07-02 21:13:17,215 - modelscope - INFO - epoch [1][2665/4982]\tlr: 5.066e-05, memory: 14329, loss: 1.1926\n", + "2023-07-02 21:13:19,892 - modelscope - INFO - epoch [1][2670/4982]\tlr: 5.051e-05, memory: 14329, loss: 2.3055\n", + "2023-07-02 21:13:21,987 - modelscope - INFO - epoch [1][2675/4982]\tlr: 5.037e-05, memory: 14329, loss: 1.6938\n", + "2023-07-02 21:13:24,761 - modelscope - INFO - epoch [1][2680/4982]\tlr: 5.023e-05, memory: 14329, loss: 2.2922\n", + "2023-07-02 21:13:26,815 - modelscope - INFO - epoch [1][2685/4982]\tlr: 5.009e-05, memory: 14329, loss: 1.6898\n", + "2023-07-02 21:13:29,236 - modelscope - INFO - epoch [1][2690/4982]\tlr: 4.995e-05, memory: 14329, loss: 2.2826\n", + "2023-07-02 21:13:31,582 - modelscope - INFO - epoch [1][2695/4982]\tlr: 4.981e-05, memory: 14329, loss: 1.7828\n", + "2023-07-02 21:13:33,912 - modelscope - INFO - epoch [1][2700/4982]\tlr: 4.966e-05, memory: 14329, loss: 1.8785\n", + "2023-07-02 21:13:36,729 - modelscope - INFO - epoch [1][2705/4982]\tlr: 4.952e-05, memory: 14329, loss: 1.4273\n", + "2023-07-02 21:13:38,262 - modelscope - INFO - epoch [1][2710/4982]\tlr: 4.938e-05, memory: 14329, loss: 1.5227\n", + "2023-07-02 21:13:40,572 - modelscope - INFO - epoch [1][2715/4982]\tlr: 4.924e-05, memory: 14329, loss: 2.0828\n", + "2023-07-02 21:13:43,610 - modelscope - INFO - epoch [1][2720/4982]\tlr: 4.910e-05, memory: 14329, loss: 1.7301\n", + "2023-07-02 21:13:46,147 - modelscope - INFO - epoch [1][2725/4982]\tlr: 4.896e-05, memory: 14329, loss: 1.8305\n", + "2023-07-02 21:13:49,457 - modelscope - INFO - epoch [1][2730/4982]\tlr: 4.882e-05, memory: 14329, loss: 1.6883\n", + "2023-07-02 21:13:51,690 - modelscope - INFO - epoch [1][2735/4982]\tlr: 4.868e-05, memory: 14329, loss: 1.3963\n", + "2023-07-02 21:13:54,487 - modelscope - INFO - epoch [1][2740/4982]\tlr: 4.854e-05, memory: 14329, loss: 1.2293\n", + "2023-07-02 21:13:56,303 - modelscope - INFO - epoch [1][2745/4982]\tlr: 4.839e-05, memory: 14329, loss: 1.7289\n", + "2023-07-02 21:13:59,073 - modelscope - INFO - epoch [1][2750/4982]\tlr: 4.825e-05, memory: 14329, loss: 1.1637\n", + "2023-07-02 21:14:02,327 - modelscope - INFO - epoch [1][2755/4982]\tlr: 4.811e-05, memory: 14329, loss: 1.3336\n", + "2023-07-02 21:14:05,192 - modelscope - INFO - epoch [1][2760/4982]\tlr: 4.797e-05, memory: 14329, loss: 0.9352\n", + "2023-07-02 21:14:07,032 - modelscope - INFO - epoch [1][2765/4982]\tlr: 4.783e-05, memory: 14329, loss: 1.9258\n", + "2023-07-02 21:14:10,206 - modelscope - INFO - epoch [1][2770/4982]\tlr: 4.769e-05, memory: 14329, loss: 2.0555\n", + "2023-07-02 21:14:12,659 - modelscope - INFO - epoch [1][2775/4982]\tlr: 4.755e-05, memory: 14329, loss: 1.5836\n", + "2023-07-02 21:14:15,156 - modelscope - INFO - epoch [1][2780/4982]\tlr: 4.741e-05, memory: 14329, loss: 1.6203\n", + "2023-07-02 21:14:18,171 - modelscope - INFO - epoch [1][2785/4982]\tlr: 4.727e-05, memory: 14329, loss: 2.1402\n", + "2023-07-02 21:14:20,575 - modelscope - INFO - epoch [1][2790/4982]\tlr: 4.713e-05, memory: 14329, loss: 1.6504\n", + "2023-07-02 21:14:23,247 - modelscope - INFO - epoch [1][2795/4982]\tlr: 4.699e-05, memory: 14329, loss: 1.7109\n", + "2023-07-02 21:14:26,026 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 21:15:32,451 - modelscope - INFO - Saving checkpoint at 2800 iter\n", + "2023-07-02 21:15:32,483 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter2600_acc0.7577160000801086\n", + "2023-07-02 21:15:32,485 - modelscope - INFO - Saving checkpoint at 2800 iter\n", + "2023-07-02 21:15:32,515 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_2600\n", + "2023-07-02 21:15:32,518 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14329, evaluation/acc: 0.7621, evaluation/loss: 1.7451, loss: 2.2227\n", + "2023-07-02 21:15:34,950 - modelscope - INFO - epoch [1][2805/4982]\tlr: 4.671e-05, memory: 14329, loss: 2.0086\n", + "2023-07-02 21:15:38,272 - modelscope - INFO - epoch [1][2810/4982]\tlr: 4.657e-05, memory: 14329, loss: 0.8770\n", + "2023-07-02 21:15:41,346 - modelscope - INFO - epoch [1][2815/4982]\tlr: 4.643e-05, memory: 14329, loss: 0.7887\n", + "2023-07-02 21:15:43,033 - modelscope - INFO - epoch [1][2820/4982]\tlr: 4.629e-05, memory: 14329, loss: 2.8648\n", + "2023-07-02 21:15:45,965 - modelscope - INFO - epoch [1][2825/4982]\tlr: 4.615e-05, memory: 14329, loss: 1.9832\n", + "2023-07-02 21:15:48,381 - modelscope - INFO - epoch [1][2830/4982]\tlr: 4.601e-05, memory: 14329, loss: 1.4816\n", + "2023-07-02 21:15:51,262 - modelscope - INFO - epoch [1][2835/4982]\tlr: 4.587e-05, memory: 14329, loss: 1.3080\n", + "2023-07-02 21:15:53,969 - modelscope - INFO - epoch [1][2840/4982]\tlr: 4.573e-05, memory: 14329, loss: 1.2664\n", + "2023-07-02 21:15:56,145 - modelscope - INFO - epoch [1][2845/4982]\tlr: 4.559e-05, memory: 14329, loss: 2.4719\n", + "2023-07-02 21:15:58,623 - modelscope - INFO - epoch [1][2850/4982]\tlr: 4.545e-05, memory: 14329, loss: 1.0096\n", + "2023-07-02 21:16:01,537 - modelscope - INFO - epoch [1][2855/4982]\tlr: 4.532e-05, memory: 14329, loss: 1.7023\n", + "2023-07-02 21:16:05,216 - modelscope - INFO - epoch [1][2860/4982]\tlr: 4.518e-05, memory: 14329, loss: 1.8641\n", + "2023-07-02 21:16:08,050 - modelscope - INFO - epoch [1][2865/4982]\tlr: 4.504e-05, memory: 14329, loss: 2.1398\n", + "2023-07-02 21:16:10,270 - modelscope - INFO - epoch [1][2870/4982]\tlr: 4.490e-05, memory: 14329, loss: 1.9180\n", + "2023-07-02 21:16:12,856 - modelscope - INFO - epoch [1][2875/4982]\tlr: 4.476e-05, memory: 14329, loss: 1.6426\n", + "2023-07-02 21:16:15,831 - modelscope - INFO - epoch [1][2880/4982]\tlr: 4.462e-05, memory: 14329, loss: 1.9609\n", + "2023-07-02 21:16:18,475 - modelscope - INFO - epoch [1][2885/4982]\tlr: 4.448e-05, memory: 14329, loss: 1.3818\n", + "2023-07-02 21:16:21,513 - modelscope - INFO - epoch [1][2890/4982]\tlr: 4.434e-05, memory: 14329, loss: 1.8543\n", + "2023-07-02 21:16:23,561 - modelscope - INFO - epoch [1][2895/4982]\tlr: 4.421e-05, memory: 14329, loss: 1.6133\n", + "2023-07-02 21:16:25,999 - modelscope - INFO - epoch [1][2900/4982]\tlr: 4.407e-05, memory: 14329, loss: 2.2039\n", + "2023-07-02 21:16:28,248 - modelscope - INFO - epoch [1][2905/4982]\tlr: 4.393e-05, memory: 14329, loss: 1.5797\n", + "2023-07-02 21:16:31,059 - modelscope - INFO - epoch [1][2910/4982]\tlr: 4.379e-05, memory: 14329, loss: 1.0002\n", + "2023-07-02 21:16:33,522 - modelscope - INFO - epoch [1][2915/4982]\tlr: 4.365e-05, memory: 14329, loss: 1.5379\n", + "2023-07-02 21:16:35,881 - modelscope - INFO - epoch [1][2920/4982]\tlr: 4.352e-05, memory: 14329, loss: 2.8797\n", + "2023-07-02 21:16:38,582 - modelscope - INFO - epoch [1][2925/4982]\tlr: 4.338e-05, memory: 14329, loss: 2.2234\n", + "2023-07-02 21:16:41,105 - modelscope - INFO - epoch [1][2930/4982]\tlr: 4.324e-05, memory: 14329, loss: 0.9779\n", + "2023-07-02 21:16:43,610 - modelscope - INFO - epoch [1][2935/4982]\tlr: 4.310e-05, memory: 14329, loss: 1.1336\n", + "2023-07-02 21:16:46,978 - modelscope - INFO - epoch [1][2940/4982]\tlr: 4.297e-05, memory: 14329, loss: 1.7703\n", + "2023-07-02 21:16:49,719 - modelscope - INFO - epoch [1][2945/4982]\tlr: 4.283e-05, memory: 14329, loss: 2.1102\n", + "2023-07-02 21:16:52,425 - modelscope - INFO - epoch [1][2950/4982]\tlr: 4.269e-05, memory: 14329, loss: 1.6873\n", + "2023-07-02 21:16:54,893 - modelscope - INFO - epoch [1][2955/4982]\tlr: 4.256e-05, memory: 14329, loss: 1.8313\n", + "2023-07-02 21:16:58,211 - modelscope - INFO - epoch [1][2960/4982]\tlr: 4.242e-05, memory: 14329, loss: 1.2132\n", + "2023-07-02 21:17:01,430 - modelscope - INFO - epoch [1][2965/4982]\tlr: 4.228e-05, memory: 14329, loss: 1.5578\n", + "2023-07-02 21:17:04,190 - modelscope - INFO - epoch [1][2970/4982]\tlr: 4.215e-05, memory: 14329, loss: 1.1242\n", + "2023-07-02 21:17:07,777 - modelscope - INFO - epoch [1][2975/4982]\tlr: 4.201e-05, memory: 14329, loss: 1.3516\n", + "2023-07-02 21:17:11,666 - modelscope - INFO - epoch [1][2980/4982]\tlr: 4.187e-05, memory: 14329, loss: 1.2953\n", + "2023-07-02 21:17:14,548 - modelscope - INFO - epoch [1][2985/4982]\tlr: 4.174e-05, memory: 14329, loss: 2.3777\n", + "2023-07-02 21:17:17,244 - modelscope - INFO - epoch [1][2990/4982]\tlr: 4.160e-05, memory: 14329, loss: 1.8803\n", + "2023-07-02 21:17:20,544 - modelscope - INFO - epoch [1][2995/4982]\tlr: 4.147e-05, memory: 14329, loss: 1.1699\n", + "2023-07-02 21:17:22,682 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.22it/s]\n", + "2023-07-02 21:18:29,245 - modelscope - INFO - Saving checkpoint at 3000 iter\n", + "2023-07-02 21:18:29,273 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter2800_acc0.7621409296989441\n", + "2023-07-02 21:18:29,275 - modelscope - INFO - Saving checkpoint at 3000 iter\n", + "2023-07-02 21:18:29,301 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_2800\n", + "2023-07-02 21:18:29,303 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14329, evaluation/acc: 0.7655, evaluation/loss: 1.7432, loss: 1.2258\n", + "2023-07-02 21:18:31,804 - modelscope - INFO - epoch [1][3005/4982]\tlr: 4.120e-05, memory: 14329, loss: 2.2777\n", + "2023-07-02 21:18:35,465 - modelscope - INFO - epoch [1][3010/4982]\tlr: 4.106e-05, memory: 14329, loss: 1.4781\n", + "2023-07-02 21:18:38,255 - modelscope - INFO - epoch [1][3015/4982]\tlr: 4.092e-05, memory: 14329, loss: 1.4242\n", + "2023-07-02 21:18:41,641 - modelscope - INFO - epoch [1][3020/4982]\tlr: 4.079e-05, memory: 14449, loss: 2.5148\n", + "2023-07-02 21:18:44,184 - modelscope - INFO - epoch [1][3025/4982]\tlr: 4.065e-05, memory: 14449, loss: 1.9086\n", + "2023-07-02 21:18:47,235 - modelscope - INFO - epoch [1][3030/4982]\tlr: 4.052e-05, memory: 14449, loss: 2.3363\n", + "2023-07-02 21:18:50,005 - modelscope - INFO - epoch [1][3035/4982]\tlr: 4.039e-05, memory: 14449, loss: 1.4543\n", + "2023-07-02 21:18:52,482 - modelscope - INFO - epoch [1][3040/4982]\tlr: 4.025e-05, memory: 14449, loss: 2.1744\n", + "2023-07-02 21:18:55,300 - modelscope - INFO - epoch [1][3045/4982]\tlr: 4.012e-05, memory: 14449, loss: 1.8871\n", + "2023-07-02 21:18:58,643 - modelscope - INFO - epoch [1][3050/4982]\tlr: 3.998e-05, memory: 14449, loss: 1.6809\n", + "2023-07-02 21:19:01,867 - modelscope - INFO - epoch [1][3055/4982]\tlr: 3.985e-05, memory: 14449, loss: 2.7977\n", + "2023-07-02 21:19:05,785 - modelscope - INFO - epoch [1][3060/4982]\tlr: 3.971e-05, memory: 14449, loss: 1.6258\n", + "2023-07-02 21:19:09,029 - modelscope - INFO - epoch [1][3065/4982]\tlr: 3.958e-05, memory: 14449, loss: 0.9796\n", + "2023-07-02 21:19:11,551 - modelscope - INFO - epoch [1][3070/4982]\tlr: 3.945e-05, memory: 14449, loss: 2.2262\n", + "2023-07-02 21:19:14,238 - modelscope - INFO - epoch [1][3075/4982]\tlr: 3.931e-05, memory: 14449, loss: 1.3527\n", + "2023-07-02 21:19:16,361 - modelscope - INFO - epoch [1][3080/4982]\tlr: 3.918e-05, memory: 14449, loss: 1.6689\n", + "2023-07-02 21:19:18,345 - modelscope - INFO - epoch [1][3085/4982]\tlr: 3.905e-05, memory: 14449, loss: 2.9641\n", + "2023-07-02 21:19:20,849 - modelscope - INFO - epoch [1][3090/4982]\tlr: 3.891e-05, memory: 14449, loss: 1.6723\n", + "2023-07-02 21:19:23,101 - modelscope - INFO - epoch [1][3095/4982]\tlr: 3.878e-05, memory: 14449, loss: 2.7703\n", + "2023-07-02 21:19:25,726 - modelscope - INFO - epoch [1][3100/4982]\tlr: 3.865e-05, memory: 14449, loss: 0.8043\n", + "2023-07-02 21:19:28,252 - modelscope - INFO - epoch [1][3105/4982]\tlr: 3.852e-05, memory: 14449, loss: 2.0820\n", + "2023-07-02 21:19:30,440 - modelscope - INFO - epoch [1][3110/4982]\tlr: 3.838e-05, memory: 14449, loss: 2.3492\n", + "2023-07-02 21:19:33,686 - modelscope - INFO - epoch [1][3115/4982]\tlr: 3.825e-05, memory: 14449, loss: 0.8090\n", + "2023-07-02 21:19:36,596 - modelscope - INFO - epoch [1][3120/4982]\tlr: 3.812e-05, memory: 14449, loss: 0.6620\n", + "2023-07-02 21:19:38,596 - modelscope - INFO - epoch [1][3125/4982]\tlr: 3.799e-05, memory: 14449, loss: 2.6781\n", + "2023-07-02 21:19:41,115 - modelscope - INFO - epoch [1][3130/4982]\tlr: 3.786e-05, memory: 14449, loss: 1.4328\n", + "2023-07-02 21:19:44,046 - modelscope - INFO - epoch [1][3135/4982]\tlr: 3.772e-05, memory: 14449, loss: 1.3764\n", + "2023-07-02 21:19:47,148 - modelscope - INFO - epoch [1][3140/4982]\tlr: 3.759e-05, memory: 14449, loss: 1.0316\n", + "2023-07-02 21:19:50,062 - modelscope - INFO - epoch [1][3145/4982]\tlr: 3.746e-05, memory: 14449, loss: 1.6078\n", + "2023-07-02 21:19:52,899 - modelscope - INFO - epoch [1][3150/4982]\tlr: 3.733e-05, memory: 14449, loss: 1.9883\n", + "2023-07-02 21:19:55,621 - modelscope - INFO - epoch [1][3155/4982]\tlr: 3.720e-05, memory: 14449, loss: 1.6697\n", + "2023-07-02 21:19:57,950 - modelscope - INFO - epoch [1][3160/4982]\tlr: 3.707e-05, memory: 14449, loss: 2.7109\n", + "2023-07-02 21:20:00,606 - modelscope - INFO - epoch [1][3165/4982]\tlr: 3.694e-05, memory: 14449, loss: 1.5930\n", + "2023-07-02 21:20:04,380 - modelscope - INFO - epoch [1][3170/4982]\tlr: 3.681e-05, memory: 14449, loss: 1.5211\n", + "2023-07-02 21:20:07,165 - modelscope - INFO - epoch [1][3175/4982]\tlr: 3.668e-05, memory: 14449, loss: 1.1980\n", + "2023-07-02 21:20:09,788 - modelscope - INFO - epoch [1][3180/4982]\tlr: 3.655e-05, memory: 14449, loss: 1.7625\n", + "2023-07-02 21:20:12,711 - modelscope - INFO - epoch [1][3185/4982]\tlr: 3.642e-05, memory: 14449, loss: 1.6734\n", + "2023-07-02 21:20:15,469 - modelscope - INFO - epoch [1][3190/4982]\tlr: 3.629e-05, memory: 14449, loss: 1.9477\n", + "2023-07-02 21:20:18,068 - modelscope - INFO - epoch [1][3195/4982]\tlr: 3.616e-05, memory: 14449, loss: 1.4062\n", + "2023-07-02 21:20:20,228 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 21:21:26,662 - modelscope - INFO - Saving checkpoint at 3200 iter\n", + "2023-07-02 21:21:26,689 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter3000_acc0.7654780745506287\n", + "2023-07-02 21:21:26,692 - modelscope - INFO - Saving checkpoint at 3200 iter\n", + "2023-07-02 21:21:26,718 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_3000\n", + "2023-07-02 21:21:26,721 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7670, evaluation/loss: 1.7173, loss: 2.3687\n", + "2023-07-02 21:21:29,912 - modelscope - INFO - epoch [1][3205/4982]\tlr: 3.590e-05, memory: 14449, loss: 1.7494\n", + "2023-07-02 21:21:32,447 - modelscope - INFO - epoch [1][3210/4982]\tlr: 3.577e-05, memory: 14449, loss: 2.1035\n", + "2023-07-02 21:21:35,773 - modelscope - INFO - epoch [1][3215/4982]\tlr: 3.565e-05, memory: 14449, loss: 0.8089\n", + "2023-07-02 21:21:38,867 - modelscope - INFO - epoch [1][3220/4982]\tlr: 3.552e-05, memory: 14449, loss: 1.5078\n", + "2023-07-02 21:21:42,117 - modelscope - INFO - epoch [1][3225/4982]\tlr: 3.539e-05, memory: 14449, loss: 0.6988\n", + "2023-07-02 21:21:44,231 - modelscope - INFO - epoch [1][3230/4982]\tlr: 3.526e-05, memory: 14449, loss: 2.9305\n", + "2023-07-02 21:21:46,826 - modelscope - INFO - epoch [1][3235/4982]\tlr: 3.513e-05, memory: 14449, loss: 1.9297\n", + "2023-07-02 21:21:49,591 - modelscope - INFO - epoch [1][3240/4982]\tlr: 3.501e-05, memory: 14449, loss: 0.5963\n", + "2023-07-02 21:21:51,805 - modelscope - INFO - epoch [1][3245/4982]\tlr: 3.488e-05, memory: 14449, loss: 3.5063\n", + "2023-07-02 21:21:54,641 - modelscope - INFO - epoch [1][3250/4982]\tlr: 3.475e-05, memory: 14449, loss: 2.2263\n", + "2023-07-02 21:21:56,972 - modelscope - INFO - epoch [1][3255/4982]\tlr: 3.462e-05, memory: 14449, loss: 2.3281\n", + "2023-07-02 21:21:59,236 - modelscope - INFO - epoch [1][3260/4982]\tlr: 3.450e-05, memory: 14449, loss: 1.6074\n", + "2023-07-02 21:22:02,735 - modelscope - INFO - epoch [1][3265/4982]\tlr: 3.437e-05, memory: 14449, loss: 0.7896\n", + "2023-07-02 21:22:05,850 - modelscope - INFO - epoch [1][3270/4982]\tlr: 3.424e-05, memory: 14449, loss: 2.6018\n", + "2023-07-02 21:22:07,890 - modelscope - INFO - epoch [1][3275/4982]\tlr: 3.412e-05, memory: 14449, loss: 1.3377\n", + "2023-07-02 21:22:10,846 - modelscope - INFO - epoch [1][3280/4982]\tlr: 3.399e-05, memory: 14449, loss: 1.4023\n", + "2023-07-02 21:22:13,203 - modelscope - INFO - epoch [1][3285/4982]\tlr: 3.387e-05, memory: 14449, loss: 2.1109\n", + "2023-07-02 21:22:15,914 - modelscope - INFO - epoch [1][3290/4982]\tlr: 3.374e-05, memory: 14449, loss: 1.3941\n", + "2023-07-02 21:22:18,753 - modelscope - INFO - epoch [1][3295/4982]\tlr: 3.362e-05, memory: 14449, loss: 2.0223\n", + "2023-07-02 21:22:21,131 - modelscope - INFO - epoch [1][3300/4982]\tlr: 3.349e-05, memory: 14449, loss: 1.3546\n", + "2023-07-02 21:22:22,563 - modelscope - INFO - epoch [1][3305/4982]\tlr: 3.337e-05, memory: 14449, loss: 2.2541\n", + "2023-07-02 21:22:26,351 - modelscope - INFO - epoch [1][3310/4982]\tlr: 3.324e-05, memory: 14449, loss: 2.1484\n", + "2023-07-02 21:22:29,794 - modelscope - INFO - epoch [1][3315/4982]\tlr: 3.312e-05, memory: 14449, loss: 0.9180\n", + "2023-07-02 21:22:31,954 - modelscope - INFO - epoch [1][3320/4982]\tlr: 3.299e-05, memory: 14449, loss: 2.4869\n", + "2023-07-02 21:22:34,848 - modelscope - INFO - epoch [1][3325/4982]\tlr: 3.287e-05, memory: 14449, loss: 1.0967\n", + "2023-07-02 21:22:37,229 - modelscope - INFO - epoch [1][3330/4982]\tlr: 3.275e-05, memory: 14449, loss: 2.1406\n", + "2023-07-02 21:22:39,882 - modelscope - INFO - epoch [1][3335/4982]\tlr: 3.262e-05, memory: 14449, loss: 1.9133\n", + "2023-07-02 21:22:42,375 - modelscope - INFO - epoch [1][3340/4982]\tlr: 3.250e-05, memory: 14449, loss: 2.0443\n", + "2023-07-02 21:22:45,140 - modelscope - INFO - epoch [1][3345/4982]\tlr: 3.238e-05, memory: 14449, loss: 2.7484\n", + "2023-07-02 21:22:48,235 - modelscope - INFO - epoch [1][3350/4982]\tlr: 3.225e-05, memory: 14449, loss: 1.3258\n", + "2023-07-02 21:22:50,145 - modelscope - INFO - epoch [1][3355/4982]\tlr: 3.213e-05, memory: 14449, loss: 2.4828\n", + "2023-07-02 21:22:53,373 - modelscope - INFO - epoch [1][3360/4982]\tlr: 3.201e-05, memory: 14449, loss: 1.3379\n", + "2023-07-02 21:22:55,667 - modelscope - INFO - epoch [1][3365/4982]\tlr: 3.189e-05, memory: 14449, loss: 2.0289\n", + "2023-07-02 21:22:57,577 - modelscope - INFO - epoch [1][3370/4982]\tlr: 3.176e-05, memory: 14449, loss: 2.0500\n", + "2023-07-02 21:23:00,744 - modelscope - INFO - epoch [1][3375/4982]\tlr: 3.164e-05, memory: 14449, loss: 1.0834\n", + "2023-07-02 21:23:04,128 - modelscope - INFO - epoch [1][3380/4982]\tlr: 3.152e-05, memory: 14449, loss: 0.8875\n", + "2023-07-02 21:23:07,233 - modelscope - INFO - epoch [1][3385/4982]\tlr: 3.140e-05, memory: 14449, loss: 1.1375\n", + "2023-07-02 21:23:09,464 - modelscope - INFO - epoch [1][3390/4982]\tlr: 3.128e-05, memory: 14449, loss: 2.3506\n", + "2023-07-02 21:23:12,230 - modelscope - INFO - epoch [1][3395/4982]\tlr: 3.116e-05, memory: 14449, loss: 1.0258\n", + "2023-07-02 21:23:15,891 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 21:24:22,313 - modelscope - INFO - Saving checkpoint at 3400 iter\n", + "2023-07-02 21:24:22,343 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter3200_acc0.7669530510902405\n", + "2023-07-02 21:24:22,345 - modelscope - INFO - Saving checkpoint at 3400 iter\n", + "2023-07-02 21:24:22,373 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_3200\n", + "2023-07-02 21:24:22,376 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7689, evaluation/loss: 1.6972, loss: 1.1217\n", + "2023-07-02 21:24:25,324 - modelscope - INFO - epoch [1][3405/4982]\tlr: 3.092e-05, memory: 14449, loss: 1.3055\n", + "2023-07-02 21:24:28,008 - modelscope - INFO - epoch [1][3410/4982]\tlr: 3.080e-05, memory: 14449, loss: 1.8813\n", + "2023-07-02 21:24:30,896 - modelscope - INFO - epoch [1][3415/4982]\tlr: 3.068e-05, memory: 14449, loss: 1.8965\n", + "2023-07-02 21:24:33,316 - modelscope - INFO - epoch [1][3420/4982]\tlr: 3.056e-05, memory: 14449, loss: 2.1344\n", + "2023-07-02 21:24:35,511 - modelscope - INFO - epoch [1][3425/4982]\tlr: 3.044e-05, memory: 14449, loss: 2.6798\n", + "2023-07-02 21:24:38,328 - modelscope - INFO - epoch [1][3430/4982]\tlr: 3.032e-05, memory: 14449, loss: 0.9617\n", + "2023-07-02 21:24:41,517 - modelscope - INFO - epoch [1][3435/4982]\tlr: 3.020e-05, memory: 14449, loss: 1.7773\n", + "2023-07-02 21:24:44,031 - modelscope - INFO - epoch [1][3440/4982]\tlr: 3.008e-05, memory: 14449, loss: 0.9613\n", + "2023-07-02 21:24:46,636 - modelscope - INFO - epoch [1][3445/4982]\tlr: 2.996e-05, memory: 14449, loss: 2.5844\n", + "2023-07-02 21:24:49,249 - modelscope - INFO - epoch [1][3450/4982]\tlr: 2.984e-05, memory: 14449, loss: 1.5498\n", + "2023-07-02 21:24:51,312 - modelscope - INFO - epoch [1][3455/4982]\tlr: 2.973e-05, memory: 14449, loss: 3.1250\n", + "2023-07-02 21:24:53,950 - modelscope - INFO - epoch [1][3460/4982]\tlr: 2.961e-05, memory: 14449, loss: 1.4406\n", + "2023-07-02 21:24:58,115 - modelscope - INFO - epoch [1][3465/4982]\tlr: 2.949e-05, memory: 14449, loss: 1.8449\n", + "2023-07-02 21:25:01,189 - modelscope - INFO - epoch [1][3470/4982]\tlr: 2.938e-05, memory: 14449, loss: 1.5242\n", + "2023-07-02 21:25:04,395 - modelscope - INFO - epoch [1][3475/4982]\tlr: 2.926e-05, memory: 14449, loss: 1.7469\n", + "2023-07-02 21:25:06,700 - modelscope - INFO - epoch [1][3480/4982]\tlr: 2.914e-05, memory: 14449, loss: 2.0787\n", + "2023-07-02 21:25:09,262 - modelscope - INFO - epoch [1][3485/4982]\tlr: 2.903e-05, memory: 14449, loss: 2.8416\n", + "2023-07-02 21:25:11,210 - modelscope - INFO - epoch [1][3490/4982]\tlr: 2.891e-05, memory: 14449, loss: 1.3633\n", + "2023-07-02 21:25:13,408 - modelscope - INFO - epoch [1][3495/4982]\tlr: 2.879e-05, memory: 14449, loss: 2.1203\n", + "2023-07-02 21:25:16,422 - modelscope - INFO - epoch [1][3500/4982]\tlr: 2.868e-05, memory: 14449, loss: 1.2863\n", + "2023-07-02 21:25:19,311 - modelscope - INFO - epoch [1][3505/4982]\tlr: 2.856e-05, memory: 14449, loss: 2.5109\n", + "2023-07-02 21:25:22,759 - modelscope - INFO - epoch [1][3510/4982]\tlr: 2.845e-05, memory: 14449, loss: 1.1850\n", + "2023-07-02 21:25:25,501 - modelscope - INFO - epoch [1][3515/4982]\tlr: 2.833e-05, memory: 14449, loss: 1.2992\n", + "2023-07-02 21:25:27,731 - modelscope - INFO - epoch [1][3520/4982]\tlr: 2.822e-05, memory: 14449, loss: 1.6945\n", + "2023-07-02 21:25:30,093 - modelscope - INFO - epoch [1][3525/4982]\tlr: 2.810e-05, memory: 14449, loss: 1.4635\n", + "2023-07-02 21:25:32,786 - modelscope - INFO - epoch [1][3530/4982]\tlr: 2.799e-05, memory: 14449, loss: 1.3238\n", + "2023-07-02 21:25:35,630 - modelscope - INFO - epoch [1][3535/4982]\tlr: 2.788e-05, memory: 14449, loss: 1.7512\n", + "2023-07-02 21:25:38,803 - modelscope - INFO - epoch [1][3540/4982]\tlr: 2.776e-05, memory: 14449, loss: 0.5063\n", + "2023-07-02 21:25:41,431 - modelscope - INFO - epoch [1][3545/4982]\tlr: 2.765e-05, memory: 14449, loss: 2.9984\n", + "2023-07-02 21:25:44,590 - modelscope - INFO - epoch [1][3550/4982]\tlr: 2.754e-05, memory: 14449, loss: 1.9760\n", + "2023-07-02 21:25:47,035 - modelscope - INFO - epoch [1][3555/4982]\tlr: 2.743e-05, memory: 14449, loss: 1.2375\n", + "2023-07-02 21:25:49,304 - modelscope - INFO - epoch [1][3560/4982]\tlr: 2.731e-05, memory: 14449, loss: 2.3781\n", + "2023-07-02 21:25:51,809 - modelscope - INFO - epoch [1][3565/4982]\tlr: 2.720e-05, memory: 14449, loss: 1.3707\n", + "2023-07-02 21:25:55,272 - modelscope - INFO - epoch [1][3570/4982]\tlr: 2.709e-05, memory: 14449, loss: 2.1244\n", + "2023-07-02 21:25:57,747 - modelscope - INFO - epoch [1][3575/4982]\tlr: 2.698e-05, memory: 14449, loss: 0.8705\n", + "2023-07-02 21:26:00,593 - modelscope - INFO - epoch [1][3580/4982]\tlr: 2.687e-05, memory: 14449, loss: 2.1484\n", + "2023-07-02 21:26:02,783 - modelscope - INFO - epoch [1][3585/4982]\tlr: 2.676e-05, memory: 14449, loss: 1.3639\n", + "2023-07-02 21:26:04,331 - modelscope - INFO - epoch [1][3590/4982]\tlr: 2.665e-05, memory: 14449, loss: 1.5500\n", + "2023-07-02 21:26:07,565 - modelscope - INFO - epoch [1][3595/4982]\tlr: 2.654e-05, memory: 14449, loss: 1.4891\n", + "2023-07-02 21:26:09,515 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.22it/s]\n", + "2023-07-02 21:27:16,035 - modelscope - INFO - Saving checkpoint at 3600 iter\n", + "2023-07-02 21:27:16,062 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter3400_acc0.768944263458252\n", + "2023-07-02 21:27:16,065 - modelscope - INFO - Saving checkpoint at 3600 iter\n", + "2023-07-02 21:27:16,090 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_3400\n", + "2023-07-02 21:27:16,092 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7704, evaluation/loss: 1.6898, loss: 2.3109\n", + "2023-07-02 21:27:17,958 - modelscope - INFO - epoch [1][3605/4982]\tlr: 2.632e-05, memory: 14449, loss: 1.5484\n", + "2023-07-02 21:27:20,844 - modelscope - INFO - epoch [1][3610/4982]\tlr: 2.621e-05, memory: 14449, loss: 1.7049\n", + "2023-07-02 21:27:24,038 - modelscope - INFO - epoch [1][3615/4982]\tlr: 2.610e-05, memory: 14449, loss: 1.1580\n", + "2023-07-02 21:27:26,611 - modelscope - INFO - epoch [1][3620/4982]\tlr: 2.599e-05, memory: 14449, loss: 1.1926\n", + "2023-07-02 21:27:29,270 - modelscope - INFO - epoch [1][3625/4982]\tlr: 2.588e-05, memory: 14449, loss: 1.9445\n", + "2023-07-02 21:27:32,570 - modelscope - INFO - epoch [1][3630/4982]\tlr: 2.577e-05, memory: 14449, loss: 0.8320\n", + "2023-07-02 21:27:34,890 - modelscope - INFO - epoch [1][3635/4982]\tlr: 2.566e-05, memory: 14449, loss: 1.8961\n", + "2023-07-02 21:27:37,762 - modelscope - INFO - epoch [1][3640/4982]\tlr: 2.556e-05, memory: 14449, loss: 1.3434\n", + "2023-07-02 21:27:40,862 - modelscope - INFO - epoch [1][3645/4982]\tlr: 2.545e-05, memory: 14449, loss: 1.6516\n", + "2023-07-02 21:27:43,323 - modelscope - INFO - epoch [1][3650/4982]\tlr: 2.534e-05, memory: 14449, loss: 3.4539\n", + "2023-07-02 21:27:46,306 - modelscope - INFO - epoch [1][3655/4982]\tlr: 2.523e-05, memory: 14449, loss: 1.5139\n", + "2023-07-02 21:27:48,976 - modelscope - INFO - epoch [1][3660/4982]\tlr: 2.513e-05, memory: 14449, loss: 1.6055\n", + "2023-07-02 21:27:52,023 - modelscope - INFO - epoch [1][3665/4982]\tlr: 2.502e-05, memory: 14449, loss: 0.5375\n", + "2023-07-02 21:27:55,459 - modelscope - INFO - epoch [1][3670/4982]\tlr: 2.492e-05, memory: 14449, loss: 1.8552\n", + "2023-07-02 21:27:58,311 - modelscope - INFO - epoch [1][3675/4982]\tlr: 2.481e-05, memory: 14449, loss: 1.0477\n", + "2023-07-02 21:28:00,477 - modelscope - INFO - epoch [1][3680/4982]\tlr: 2.470e-05, memory: 14449, loss: 1.8646\n", + "2023-07-02 21:28:02,402 - modelscope - INFO - epoch [1][3685/4982]\tlr: 2.460e-05, memory: 14449, loss: 2.7117\n", + "2023-07-02 21:28:05,217 - modelscope - INFO - epoch [1][3690/4982]\tlr: 2.449e-05, memory: 14449, loss: 2.6594\n", + "2023-07-02 21:28:07,697 - modelscope - INFO - epoch [1][3695/4982]\tlr: 2.439e-05, memory: 14449, loss: 1.9680\n", + "2023-07-02 21:28:11,289 - modelscope - INFO - epoch [1][3700/4982]\tlr: 2.429e-05, memory: 14449, loss: 1.4680\n", + "2023-07-02 21:28:14,322 - modelscope - INFO - epoch [1][3705/4982]\tlr: 2.418e-05, memory: 14449, loss: 2.1742\n", + "2023-07-02 21:28:16,434 - modelscope - INFO - epoch [1][3710/4982]\tlr: 2.408e-05, memory: 14449, loss: 2.0691\n", + "2023-07-02 21:28:19,150 - modelscope - INFO - epoch [1][3715/4982]\tlr: 2.398e-05, memory: 14449, loss: 1.6078\n", + "2023-07-02 21:28:22,166 - modelscope - INFO - epoch [1][3720/4982]\tlr: 2.387e-05, memory: 14449, loss: 0.9880\n", + "2023-07-02 21:28:24,924 - modelscope - INFO - epoch [1][3725/4982]\tlr: 2.377e-05, memory: 14449, loss: 1.1384\n", + "2023-07-02 21:28:28,212 - modelscope - INFO - epoch [1][3730/4982]\tlr: 2.367e-05, memory: 14449, loss: 1.3064\n", + "2023-07-02 21:28:30,391 - modelscope - INFO - epoch [1][3735/4982]\tlr: 2.357e-05, memory: 14449, loss: 2.5031\n", + "2023-07-02 21:28:32,316 - modelscope - INFO - epoch [1][3740/4982]\tlr: 2.346e-05, memory: 14449, loss: 1.1914\n", + "2023-07-02 21:28:35,087 - modelscope - INFO - epoch [1][3745/4982]\tlr: 2.336e-05, memory: 14449, loss: 1.5630\n", + "2023-07-02 21:28:38,274 - modelscope - INFO - epoch [1][3750/4982]\tlr: 2.326e-05, memory: 14449, loss: 1.5844\n", + "2023-07-02 21:28:40,649 - modelscope - INFO - epoch [1][3755/4982]\tlr: 2.316e-05, memory: 14449, loss: 2.6648\n", + "2023-07-02 21:28:43,226 - modelscope - INFO - epoch [1][3760/4982]\tlr: 2.306e-05, memory: 14449, loss: 1.3648\n", + "2023-07-02 21:28:45,433 - modelscope - INFO - epoch [1][3765/4982]\tlr: 2.296e-05, memory: 14449, loss: 2.8930\n", + "2023-07-02 21:28:48,571 - modelscope - INFO - epoch [1][3770/4982]\tlr: 2.286e-05, memory: 14449, loss: 1.8161\n", + "2023-07-02 21:28:51,247 - modelscope - INFO - epoch [1][3775/4982]\tlr: 2.276e-05, memory: 14449, loss: 2.2783\n", + "2023-07-02 21:28:53,364 - modelscope - INFO - epoch [1][3780/4982]\tlr: 2.266e-05, memory: 14449, loss: 2.4652\n", + "2023-07-02 21:28:56,459 - modelscope - INFO - epoch [1][3785/4982]\tlr: 2.256e-05, memory: 14449, loss: 0.5556\n", + "2023-07-02 21:28:58,529 - modelscope - INFO - epoch [1][3790/4982]\tlr: 2.247e-05, memory: 14449, loss: 1.4350\n", + "2023-07-02 21:29:01,457 - modelscope - INFO - epoch [1][3795/4982]\tlr: 2.237e-05, memory: 14449, loss: 2.3062\n", + "2023-07-02 21:29:03,885 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.22it/s]\n", + "2023-07-02 21:30:10,496 - modelscope - INFO - Saving checkpoint at 3800 iter\n", + "2023-07-02 21:30:10,522 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter3600_acc0.7704192399978638\n", + "2023-07-02 21:30:10,525 - modelscope - INFO - Saving checkpoint at 3800 iter\n", + "2023-07-02 21:30:10,549 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_3600\n", + "2023-07-02 21:30:10,552 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7714, evaluation/loss: 1.6864, loss: 1.6359\n", + "2023-07-02 21:30:12,897 - modelscope - INFO - epoch [1][3805/4982]\tlr: 2.217e-05, memory: 14449, loss: 2.1727\n", + "2023-07-02 21:30:15,703 - modelscope - INFO - epoch [1][3810/4982]\tlr: 2.208e-05, memory: 14449, loss: 1.7061\n", + "2023-07-02 21:30:18,582 - modelscope - INFO - epoch [1][3815/4982]\tlr: 2.198e-05, memory: 14449, loss: 0.9371\n", + "2023-07-02 21:30:21,148 - modelscope - INFO - epoch [1][3820/4982]\tlr: 2.188e-05, memory: 14449, loss: 1.7875\n", + "2023-07-02 21:30:23,806 - modelscope - INFO - epoch [1][3825/4982]\tlr: 2.179e-05, memory: 14449, loss: 2.2953\n", + "2023-07-02 21:30:26,426 - modelscope - INFO - epoch [1][3830/4982]\tlr: 2.169e-05, memory: 14449, loss: 2.3281\n", + "2023-07-02 21:30:28,893 - modelscope - INFO - epoch [1][3835/4982]\tlr: 2.160e-05, memory: 14449, loss: 1.5443\n", + "2023-07-02 21:30:31,735 - modelscope - INFO - epoch [1][3840/4982]\tlr: 2.150e-05, memory: 14449, loss: 2.0406\n", + "2023-07-02 21:30:33,879 - modelscope - INFO - epoch [1][3845/4982]\tlr: 2.141e-05, memory: 14449, loss: 2.1980\n", + "2023-07-02 21:30:36,598 - modelscope - INFO - epoch [1][3850/4982]\tlr: 2.131e-05, memory: 14449, loss: 1.5972\n", + "2023-07-02 21:30:39,142 - modelscope - INFO - epoch [1][3855/4982]\tlr: 2.122e-05, memory: 14449, loss: 2.2004\n", + "2023-07-02 21:30:41,541 - modelscope - INFO - epoch [1][3860/4982]\tlr: 2.112e-05, memory: 14449, loss: 1.5225\n", + "2023-07-02 21:30:44,206 - modelscope - INFO - epoch [1][3865/4982]\tlr: 2.103e-05, memory: 14449, loss: 2.0740\n", + "2023-07-02 21:30:47,318 - modelscope - INFO - epoch [1][3870/4982]\tlr: 2.094e-05, memory: 14449, loss: 2.7250\n", + "2023-07-02 21:30:50,059 - modelscope - INFO - epoch [1][3875/4982]\tlr: 2.084e-05, memory: 14449, loss: 2.2059\n", + "2023-07-02 21:30:52,045 - modelscope - INFO - epoch [1][3880/4982]\tlr: 2.075e-05, memory: 14449, loss: 1.7930\n", + "2023-07-02 21:30:54,716 - modelscope - INFO - epoch [1][3885/4982]\tlr: 2.066e-05, memory: 14449, loss: 1.6184\n", + "2023-07-02 21:30:56,979 - modelscope - INFO - epoch [1][3890/4982]\tlr: 2.057e-05, memory: 14449, loss: 2.1453\n", + "2023-07-02 21:31:01,437 - modelscope - INFO - epoch [1][3895/4982]\tlr: 2.048e-05, memory: 14449, loss: 1.2229\n", + "2023-07-02 21:31:05,207 - modelscope - INFO - epoch [1][3900/4982]\tlr: 2.039e-05, memory: 14449, loss: 1.7156\n", + "2023-07-02 21:31:07,873 - modelscope - INFO - epoch [1][3905/4982]\tlr: 2.029e-05, memory: 14449, loss: 1.8084\n", + "2023-07-02 21:31:10,896 - modelscope - INFO - epoch [1][3910/4982]\tlr: 2.020e-05, memory: 14449, loss: 0.4583\n", + "2023-07-02 21:31:13,623 - modelscope - INFO - epoch [1][3915/4982]\tlr: 2.011e-05, memory: 14449, loss: 3.1516\n", + "2023-07-02 21:31:16,647 - modelscope - INFO - epoch [1][3920/4982]\tlr: 2.002e-05, memory: 14449, loss: 1.0519\n", + "2023-07-02 21:31:19,431 - modelscope - INFO - epoch [1][3925/4982]\tlr: 1.994e-05, memory: 14449, loss: 2.3402\n", + "2023-07-02 21:31:21,995 - modelscope - INFO - epoch [1][3930/4982]\tlr: 1.985e-05, memory: 14449, loss: 2.3391\n", + "2023-07-02 21:31:24,439 - modelscope - INFO - epoch [1][3935/4982]\tlr: 1.976e-05, memory: 14449, loss: 2.4483\n", + "2023-07-02 21:31:26,586 - modelscope - INFO - epoch [1][3940/4982]\tlr: 1.967e-05, memory: 14449, loss: 2.2727\n", + "2023-07-02 21:31:28,897 - modelscope - INFO - epoch [1][3945/4982]\tlr: 1.958e-05, memory: 14449, loss: 3.0383\n", + "2023-07-02 21:31:31,754 - modelscope - INFO - epoch [1][3950/4982]\tlr: 1.949e-05, memory: 14449, loss: 1.5698\n", + "2023-07-02 21:31:35,256 - modelscope - INFO - epoch [1][3955/4982]\tlr: 1.941e-05, memory: 14449, loss: 1.2930\n", + "2023-07-02 21:31:37,474 - modelscope - INFO - epoch [1][3960/4982]\tlr: 1.932e-05, memory: 14449, loss: 1.4481\n", + "2023-07-02 21:31:40,154 - modelscope - INFO - epoch [1][3965/4982]\tlr: 1.923e-05, memory: 14449, loss: 1.6508\n", + "2023-07-02 21:31:42,215 - modelscope - INFO - epoch [1][3970/4982]\tlr: 1.915e-05, memory: 14449, loss: 1.6758\n", + "2023-07-02 21:31:44,996 - modelscope - INFO - epoch [1][3975/4982]\tlr: 1.906e-05, memory: 14449, loss: 3.0355\n", + "2023-07-02 21:31:47,982 - modelscope - INFO - epoch [1][3980/4982]\tlr: 1.898e-05, memory: 14449, loss: 2.0975\n", + "2023-07-02 21:31:50,425 - modelscope - INFO - epoch [1][3985/4982]\tlr: 1.889e-05, memory: 14449, loss: 2.7559\n", + "2023-07-02 21:31:53,599 - modelscope - INFO - epoch [1][3990/4982]\tlr: 1.881e-05, memory: 14449, loss: 0.6062\n", + "2023-07-02 21:31:56,806 - modelscope - INFO - epoch [1][3995/4982]\tlr: 1.872e-05, memory: 14449, loss: 1.8811\n", + "2023-07-02 21:31:59,002 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.24it/s]\n", + "2023-07-02 21:33:05,226 - modelscope - INFO - Saving checkpoint at 4000 iter\n", + "2023-07-02 21:33:05,253 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter3800_acc0.7713964581489563\n", + "2023-07-02 21:33:05,255 - modelscope - INFO - Saving checkpoint at 4000 iter\n", + "2023-07-02 21:33:05,280 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_3800\n", + "2023-07-02 21:33:05,283 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7721, evaluation/loss: 1.6809, loss: 2.3164\n", + "2023-07-02 21:33:07,641 - modelscope - INFO - epoch [1][4005/4982]\tlr: 1.855e-05, memory: 14449, loss: 1.3918\n", + "2023-07-02 21:33:10,090 - modelscope - INFO - epoch [1][4010/4982]\tlr: 1.847e-05, memory: 14449, loss: 1.7758\n", + "2023-07-02 21:33:13,438 - modelscope - INFO - epoch [1][4015/4982]\tlr: 1.839e-05, memory: 14449, loss: 0.8627\n", + "2023-07-02 21:33:16,653 - modelscope - INFO - epoch [1][4020/4982]\tlr: 1.831e-05, memory: 14449, loss: 1.2715\n", + "2023-07-02 21:33:20,248 - modelscope - INFO - epoch [1][4025/4982]\tlr: 1.822e-05, memory: 14449, loss: 2.1164\n", + "2023-07-02 21:33:23,029 - modelscope - INFO - epoch [1][4030/4982]\tlr: 1.814e-05, memory: 14449, loss: 1.0982\n", + "2023-07-02 21:33:25,384 - modelscope - INFO - epoch [1][4035/4982]\tlr: 1.806e-05, memory: 14449, loss: 1.3770\n", + "2023-07-02 21:33:27,542 - modelscope - INFO - epoch [1][4040/4982]\tlr: 1.798e-05, memory: 14449, loss: 1.4436\n", + "2023-07-02 21:33:29,897 - modelscope - INFO - epoch [1][4045/4982]\tlr: 1.790e-05, memory: 14449, loss: 1.6316\n", + "2023-07-02 21:33:32,478 - modelscope - INFO - epoch [1][4050/4982]\tlr: 1.782e-05, memory: 14449, loss: 0.8738\n", + "2023-07-02 21:33:35,228 - modelscope - INFO - epoch [1][4055/4982]\tlr: 1.774e-05, memory: 14449, loss: 1.9016\n", + "2023-07-02 21:33:37,569 - modelscope - INFO - epoch [1][4060/4982]\tlr: 1.766e-05, memory: 14449, loss: 1.6512\n", + "2023-07-02 21:33:40,234 - modelscope - INFO - epoch [1][4065/4982]\tlr: 1.758e-05, memory: 14449, loss: 1.3039\n", + "2023-07-02 21:33:42,749 - modelscope - INFO - epoch [1][4070/4982]\tlr: 1.750e-05, memory: 14449, loss: 1.2514\n", + "2023-07-02 21:33:45,340 - modelscope - INFO - epoch [1][4075/4982]\tlr: 1.742e-05, memory: 14449, loss: 2.8492\n", + "2023-07-02 21:33:47,472 - modelscope - INFO - epoch [1][4080/4982]\tlr: 1.734e-05, memory: 14449, loss: 2.0809\n", + "2023-07-02 21:33:50,149 - modelscope - INFO - epoch [1][4085/4982]\tlr: 1.727e-05, memory: 14449, loss: 1.1375\n", + "2023-07-02 21:33:53,306 - modelscope - INFO - epoch [1][4090/4982]\tlr: 1.719e-05, memory: 14449, loss: 0.4272\n", + "2023-07-02 21:33:55,772 - modelscope - INFO - epoch [1][4095/4982]\tlr: 1.711e-05, memory: 14449, loss: 3.0484\n", + "2023-07-02 21:33:58,344 - modelscope - INFO - epoch [1][4100/4982]\tlr: 1.704e-05, memory: 14449, loss: 1.9910\n", + "2023-07-02 21:34:00,903 - modelscope - INFO - epoch [1][4105/4982]\tlr: 1.696e-05, memory: 14449, loss: 1.7889\n", + "2023-07-02 21:34:03,059 - modelscope - INFO - epoch [1][4110/4982]\tlr: 1.688e-05, memory: 14449, loss: 1.2016\n", + "2023-07-02 21:34:05,621 - modelscope - INFO - epoch [1][4115/4982]\tlr: 1.681e-05, memory: 14449, loss: 1.8453\n", + "2023-07-02 21:34:09,027 - modelscope - INFO - epoch [1][4120/4982]\tlr: 1.673e-05, memory: 14449, loss: 1.5453\n", + "2023-07-02 21:34:11,741 - modelscope - INFO - epoch [1][4125/4982]\tlr: 1.666e-05, memory: 14449, loss: 1.9316\n", + "2023-07-02 21:34:13,865 - modelscope - INFO - epoch [1][4130/4982]\tlr: 1.659e-05, memory: 14449, loss: 2.3094\n", + "2023-07-02 21:34:16,258 - modelscope - INFO - epoch [1][4135/4982]\tlr: 1.651e-05, memory: 14449, loss: 2.5703\n", + "2023-07-02 21:34:20,487 - modelscope - INFO - epoch [1][4140/4982]\tlr: 1.644e-05, memory: 14449, loss: 1.3984\n", + "2023-07-02 21:34:23,365 - modelscope - INFO - epoch [1][4145/4982]\tlr: 1.636e-05, memory: 14449, loss: 1.5207\n", + "2023-07-02 21:34:26,448 - modelscope - INFO - epoch [1][4150/4982]\tlr: 1.629e-05, memory: 14449, loss: 1.3838\n", + "2023-07-02 21:34:28,356 - modelscope - INFO - epoch [1][4155/4982]\tlr: 1.622e-05, memory: 14449, loss: 1.5562\n", + "2023-07-02 21:34:30,276 - modelscope - INFO - epoch [1][4160/4982]\tlr: 1.615e-05, memory: 14449, loss: 2.0258\n", + "2023-07-02 21:34:33,019 - modelscope - INFO - epoch [1][4165/4982]\tlr: 1.608e-05, memory: 14449, loss: 1.0586\n", + "2023-07-02 21:34:35,587 - modelscope - INFO - epoch [1][4170/4982]\tlr: 1.601e-05, memory: 14449, loss: 2.0258\n", + "2023-07-02 21:34:38,118 - modelscope - INFO - epoch [1][4175/4982]\tlr: 1.593e-05, memory: 14449, loss: 1.7780\n", + "2023-07-02 21:34:40,812 - modelscope - INFO - epoch [1][4180/4982]\tlr: 1.586e-05, memory: 14449, loss: 1.4871\n", + "2023-07-02 21:34:43,689 - modelscope - INFO - epoch [1][4185/4982]\tlr: 1.579e-05, memory: 14449, loss: 2.4375\n", + "2023-07-02 21:34:45,571 - modelscope - INFO - epoch [1][4190/4982]\tlr: 1.572e-05, memory: 14449, loss: 2.8734\n", + "2023-07-02 21:34:47,974 - modelscope - INFO - epoch [1][4195/4982]\tlr: 1.566e-05, memory: 14449, loss: 1.9576\n", + "2023-07-02 21:34:50,431 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.24it/s]\n", + "2023-07-02 21:35:56,740 - modelscope - INFO - Saving checkpoint at 4200 iter\n", + "2023-07-02 21:35:56,767 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_4000\n", + "2023-07-02 21:35:56,770 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7719, evaluation/loss: 1.6805, loss: 3.5922\n", + "2023-07-02 21:35:58,922 - modelscope - INFO - epoch [1][4205/4982]\tlr: 1.552e-05, memory: 14449, loss: 2.2658\n", + "2023-07-02 21:36:01,295 - modelscope - INFO - epoch [1][4210/4982]\tlr: 1.545e-05, memory: 14449, loss: 1.6580\n", + "2023-07-02 21:36:04,097 - modelscope - INFO - epoch [1][4215/4982]\tlr: 1.538e-05, memory: 14449, loss: 1.6982\n", + "2023-07-02 21:36:06,731 - modelscope - INFO - epoch [1][4220/4982]\tlr: 1.532e-05, memory: 14449, loss: 1.9359\n", + "2023-07-02 21:36:08,551 - modelscope - INFO - epoch [1][4225/4982]\tlr: 1.525e-05, memory: 14449, loss: 2.5812\n", + "2023-07-02 21:36:11,911 - modelscope - INFO - epoch [1][4230/4982]\tlr: 1.518e-05, memory: 14449, loss: 1.9195\n", + "2023-07-02 21:36:14,506 - modelscope - INFO - epoch [1][4235/4982]\tlr: 1.512e-05, memory: 14449, loss: 1.2545\n", + "2023-07-02 21:36:17,733 - modelscope - INFO - epoch [1][4240/4982]\tlr: 1.505e-05, memory: 14449, loss: 1.9451\n", + "2023-07-02 21:36:20,470 - modelscope - INFO - epoch [1][4245/4982]\tlr: 1.499e-05, memory: 14449, loss: 1.4648\n", + "2023-07-02 21:36:22,770 - modelscope - INFO - epoch [1][4250/4982]\tlr: 1.492e-05, memory: 14449, loss: 1.6961\n", + "2023-07-02 21:36:25,378 - modelscope - INFO - epoch [1][4255/4982]\tlr: 1.486e-05, memory: 14449, loss: 2.4164\n", + "2023-07-02 21:36:27,752 - modelscope - INFO - epoch [1][4260/4982]\tlr: 1.479e-05, memory: 14449, loss: 1.9963\n", + "2023-07-02 21:36:30,118 - modelscope - INFO - epoch [1][4265/4982]\tlr: 1.473e-05, memory: 14449, loss: 2.1148\n", + "2023-07-02 21:36:33,660 - modelscope - INFO - epoch [1][4270/4982]\tlr: 1.466e-05, memory: 14449, loss: 1.0082\n", + "2023-07-02 21:36:37,177 - modelscope - INFO - epoch [1][4275/4982]\tlr: 1.460e-05, memory: 14449, loss: 1.0070\n", + "2023-07-02 21:36:39,794 - modelscope - INFO - epoch [1][4280/4982]\tlr: 1.454e-05, memory: 14449, loss: 2.2496\n", + "2023-07-02 21:36:42,033 - modelscope - INFO - epoch [1][4285/4982]\tlr: 1.448e-05, memory: 14449, loss: 2.6797\n", + "2023-07-02 21:36:45,045 - modelscope - INFO - epoch [1][4290/4982]\tlr: 1.442e-05, memory: 14449, loss: 1.7584\n", + "2023-07-02 21:36:47,854 - modelscope - INFO - epoch [1][4295/4982]\tlr: 1.435e-05, memory: 14449, loss: 0.8922\n", + "2023-07-02 21:36:50,056 - modelscope - INFO - epoch [1][4300/4982]\tlr: 1.429e-05, memory: 14449, loss: 0.9248\n", + "2023-07-02 21:36:52,432 - modelscope - INFO - epoch [1][4305/4982]\tlr: 1.423e-05, memory: 14449, loss: 2.2406\n", + "2023-07-02 21:36:55,320 - modelscope - INFO - epoch [1][4310/4982]\tlr: 1.417e-05, memory: 14449, loss: 2.6234\n", + "2023-07-02 21:36:57,625 - modelscope - INFO - epoch [1][4315/4982]\tlr: 1.411e-05, memory: 14449, loss: 2.5016\n", + "2023-07-02 21:36:59,666 - modelscope - INFO - epoch [1][4320/4982]\tlr: 1.405e-05, memory: 14449, loss: 2.4305\n", + "2023-07-02 21:37:01,862 - modelscope - INFO - epoch [1][4325/4982]\tlr: 1.400e-05, memory: 14449, loss: 2.3391\n", + "2023-07-02 21:37:03,730 - modelscope - INFO - epoch [1][4330/4982]\tlr: 1.394e-05, memory: 14449, loss: 2.1297\n", + "2023-07-02 21:37:06,491 - modelscope - INFO - epoch [1][4335/4982]\tlr: 1.388e-05, memory: 14449, loss: 1.5926\n", + "2023-07-02 21:37:08,327 - modelscope - INFO - epoch [1][4340/4982]\tlr: 1.382e-05, memory: 14449, loss: 2.0867\n", + "2023-07-02 21:37:10,978 - modelscope - INFO - epoch [1][4345/4982]\tlr: 1.376e-05, memory: 14449, loss: 1.5793\n", + "2023-07-02 21:37:13,418 - modelscope - INFO - epoch [1][4350/4982]\tlr: 1.371e-05, memory: 14449, loss: 1.3965\n", + "2023-07-02 21:37:16,097 - modelscope - INFO - epoch [1][4355/4982]\tlr: 1.365e-05, memory: 14449, loss: 1.6531\n", + "2023-07-02 21:37:18,922 - modelscope - INFO - epoch [1][4360/4982]\tlr: 1.360e-05, memory: 14449, loss: 1.2753\n", + "2023-07-02 21:37:21,708 - modelscope - INFO - epoch [1][4365/4982]\tlr: 1.354e-05, memory: 14449, loss: 1.6145\n", + "2023-07-02 21:37:23,716 - modelscope - INFO - epoch [1][4370/4982]\tlr: 1.349e-05, memory: 14449, loss: 2.6463\n", + "2023-07-02 21:37:27,213 - modelscope - INFO - epoch [1][4375/4982]\tlr: 1.343e-05, memory: 14449, loss: 0.6934\n", + "2023-07-02 21:37:30,031 - modelscope - INFO - epoch [1][4380/4982]\tlr: 1.338e-05, memory: 14449, loss: 2.2023\n", + "2023-07-02 21:37:33,441 - modelscope - INFO - epoch [1][4385/4982]\tlr: 1.332e-05, memory: 14449, loss: 1.6848\n", + "2023-07-02 21:37:35,797 - modelscope - INFO - epoch [1][4390/4982]\tlr: 1.327e-05, memory: 14449, loss: 1.6936\n", + "2023-07-02 21:37:39,329 - modelscope - INFO - epoch [1][4395/4982]\tlr: 1.322e-05, memory: 14449, loss: 0.5190\n", + "2023-07-02 21:37:41,815 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 21:38:48,264 - modelscope - INFO - Saving checkpoint at 4400 iter\n", + "2023-07-02 21:38:48,291 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter4000_acc0.7720601558685303\n", + "2023-07-02 21:38:48,293 - modelscope - INFO - Saving checkpoint at 4400 iter\n", + "2023-07-02 21:38:48,319 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_4200\n", + "2023-07-02 21:38:48,321 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7722, evaluation/loss: 1.6760, loss: 2.0141\n", + "2023-07-02 21:38:52,426 - modelscope - INFO - epoch [1][4405/4982]\tlr: 1.311e-05, memory: 14449, loss: 1.0922\n", + "2023-07-02 21:38:54,940 - modelscope - INFO - epoch [1][4410/4982]\tlr: 1.306e-05, memory: 14449, loss: 1.1858\n", + "2023-07-02 21:38:57,631 - modelscope - INFO - epoch [1][4415/4982]\tlr: 1.301e-05, memory: 14449, loss: 2.2687\n", + "2023-07-02 21:39:01,287 - modelscope - INFO - epoch [1][4420/4982]\tlr: 1.296e-05, memory: 14449, loss: 1.2707\n", + "2023-07-02 21:39:04,825 - modelscope - INFO - epoch [1][4425/4982]\tlr: 1.291e-05, memory: 14449, loss: 2.9891\n", + "2023-07-02 21:39:07,641 - modelscope - INFO - epoch [1][4430/4982]\tlr: 1.286e-05, memory: 14449, loss: 1.6935\n", + "2023-07-02 21:39:10,432 - modelscope - INFO - epoch [1][4435/4982]\tlr: 1.281e-05, memory: 14449, loss: 1.4844\n", + "2023-07-02 21:39:13,413 - modelscope - INFO - epoch [1][4440/4982]\tlr: 1.276e-05, memory: 14449, loss: 1.8453\n", + "2023-07-02 21:39:17,035 - modelscope - INFO - epoch [1][4445/4982]\tlr: 1.271e-05, memory: 14449, loss: 1.4854\n", + "2023-07-02 21:39:20,194 - modelscope - INFO - epoch [1][4450/4982]\tlr: 1.266e-05, memory: 14449, loss: 1.2645\n", + "2023-07-02 21:39:23,060 - modelscope - INFO - epoch [1][4455/4982]\tlr: 1.261e-05, memory: 14449, loss: 1.7969\n", + "2023-07-02 21:39:25,473 - modelscope - INFO - epoch [1][4460/4982]\tlr: 1.257e-05, memory: 14449, loss: 2.3201\n", + "2023-07-02 21:39:28,124 - modelscope - INFO - epoch [1][4465/4982]\tlr: 1.252e-05, memory: 14449, loss: 1.7680\n", + "2023-07-02 21:39:30,849 - modelscope - INFO - epoch [1][4470/4982]\tlr: 1.247e-05, memory: 14449, loss: 1.6301\n", + "2023-07-02 21:39:33,762 - modelscope - INFO - epoch [1][4475/4982]\tlr: 1.243e-05, memory: 14449, loss: 2.1186\n", + "2023-07-02 21:39:36,085 - modelscope - INFO - epoch [1][4480/4982]\tlr: 1.238e-05, memory: 14449, loss: 1.4234\n", + "2023-07-02 21:39:38,762 - modelscope - INFO - epoch [1][4485/4982]\tlr: 1.233e-05, memory: 14449, loss: 1.7797\n", + "2023-07-02 21:39:41,748 - modelscope - INFO - epoch [1][4490/4982]\tlr: 1.229e-05, memory: 14449, loss: 1.6820\n", + "2023-07-02 21:39:44,541 - modelscope - INFO - epoch [1][4495/4982]\tlr: 1.224e-05, memory: 14449, loss: 1.0109\n", + "2023-07-02 21:39:47,053 - modelscope - INFO - epoch [1][4500/4982]\tlr: 1.220e-05, memory: 14449, loss: 2.4484\n", + "2023-07-02 21:39:49,590 - modelscope - INFO - epoch [1][4505/4982]\tlr: 1.216e-05, memory: 14449, loss: 1.8258\n", + "2023-07-02 21:39:52,526 - modelscope - INFO - epoch [1][4510/4982]\tlr: 1.211e-05, memory: 14449, loss: 2.8773\n", + "2023-07-02 21:39:55,867 - modelscope - INFO - epoch [1][4515/4982]\tlr: 1.207e-05, memory: 14449, loss: 1.6246\n", + "2023-07-02 21:39:58,627 - modelscope - INFO - epoch [1][4520/4982]\tlr: 1.203e-05, memory: 14449, loss: 2.5562\n", + "2023-07-02 21:40:01,603 - modelscope - INFO - epoch [1][4525/4982]\tlr: 1.199e-05, memory: 14449, loss: 1.4436\n", + "2023-07-02 21:40:04,193 - modelscope - INFO - epoch [1][4530/4982]\tlr: 1.194e-05, memory: 14449, loss: 1.3711\n", + "2023-07-02 21:40:07,773 - modelscope - INFO - epoch [1][4535/4982]\tlr: 1.190e-05, memory: 14449, loss: 1.8023\n", + "2023-07-02 21:40:10,054 - modelscope - INFO - epoch [1][4540/4982]\tlr: 1.186e-05, memory: 14449, loss: 2.0508\n", + "2023-07-02 21:40:12,973 - modelscope - INFO - epoch [1][4545/4982]\tlr: 1.182e-05, memory: 14449, loss: 2.5195\n", + "2023-07-02 21:40:16,038 - modelscope - INFO - epoch [1][4550/4982]\tlr: 1.178e-05, memory: 14449, loss: 1.7164\n", + "2023-07-02 21:40:18,581 - modelscope - INFO - epoch [1][4555/4982]\tlr: 1.174e-05, memory: 14449, loss: 1.5645\n", + "2023-07-02 21:40:20,963 - modelscope - INFO - epoch [1][4560/4982]\tlr: 1.170e-05, memory: 14449, loss: 2.0105\n", + "2023-07-02 21:40:23,706 - modelscope - INFO - epoch [1][4565/4982]\tlr: 1.167e-05, memory: 14449, loss: 1.3252\n", + "2023-07-02 21:40:25,962 - modelscope - INFO - epoch [1][4570/4982]\tlr: 1.163e-05, memory: 14449, loss: 1.8855\n", + "2023-07-02 21:40:29,182 - modelscope - INFO - epoch [1][4575/4982]\tlr: 1.159e-05, memory: 14449, loss: 1.2594\n", + "2023-07-02 21:40:31,408 - modelscope - INFO - epoch [1][4580/4982]\tlr: 1.155e-05, memory: 14449, loss: 2.0570\n", + "2023-07-02 21:40:34,024 - modelscope - INFO - epoch [1][4585/4982]\tlr: 1.152e-05, memory: 14449, loss: 2.6170\n", + "2023-07-02 21:40:36,599 - modelscope - INFO - epoch [1][4590/4982]\tlr: 1.148e-05, memory: 14449, loss: 1.6721\n", + "2023-07-02 21:40:39,014 - modelscope - INFO - epoch [1][4595/4982]\tlr: 1.144e-05, memory: 14449, loss: 1.1687\n", + "2023-07-02 21:40:41,965 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.22it/s]\n", + "2023-07-02 21:41:48,497 - modelscope - INFO - Saving checkpoint at 4600 iter\n", + "2023-07-02 21:41:48,524 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter4400_acc0.7721523642539978\n", + "2023-07-02 21:41:48,526 - modelscope - INFO - Saving checkpoint at 4600 iter\n", + "2023-07-02 21:41:48,552 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_4400\n", + "2023-07-02 21:41:48,555 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7725, evaluation/loss: 1.6727, loss: 1.6291\n", + "2023-07-02 21:41:51,846 - modelscope - INFO - epoch [1][4605/4982]\tlr: 1.137e-05, memory: 14449, loss: 0.3742\n", + "2023-07-02 21:41:54,432 - modelscope - INFO - epoch [1][4610/4982]\tlr: 1.134e-05, memory: 14449, loss: 1.9832\n", + "2023-07-02 21:41:56,756 - modelscope - INFO - epoch [1][4615/4982]\tlr: 1.130e-05, memory: 14449, loss: 1.6234\n", + "2023-07-02 21:41:59,635 - modelscope - INFO - epoch [1][4620/4982]\tlr: 1.127e-05, memory: 14449, loss: 1.2416\n", + "2023-07-02 21:42:02,440 - modelscope - INFO - epoch [1][4625/4982]\tlr: 1.124e-05, memory: 14449, loss: 1.9668\n", + "2023-07-02 21:42:04,595 - modelscope - INFO - epoch [1][4630/4982]\tlr: 1.120e-05, memory: 14449, loss: 1.1527\n", + "2023-07-02 21:42:07,367 - modelscope - INFO - epoch [1][4635/4982]\tlr: 1.117e-05, memory: 14449, loss: 2.0367\n", + "2023-07-02 21:42:09,781 - modelscope - INFO - epoch [1][4640/4982]\tlr: 1.114e-05, memory: 14449, loss: 1.6268\n", + "2023-07-02 21:42:12,158 - modelscope - INFO - epoch [1][4645/4982]\tlr: 1.111e-05, memory: 14449, loss: 2.4633\n", + "2023-07-02 21:42:14,206 - modelscope - INFO - epoch [1][4650/4982]\tlr: 1.108e-05, memory: 14449, loss: 2.8531\n", + "2023-07-02 21:42:16,879 - modelscope - INFO - epoch [1][4655/4982]\tlr: 1.105e-05, memory: 14449, loss: 2.2703\n", + "2023-07-02 21:42:20,006 - modelscope - INFO - epoch [1][4660/4982]\tlr: 1.102e-05, memory: 14449, loss: 0.8350\n", + "2023-07-02 21:42:22,598 - modelscope - INFO - epoch [1][4665/4982]\tlr: 1.099e-05, memory: 14449, loss: 1.9375\n", + "2023-07-02 21:42:26,607 - modelscope - INFO - epoch [1][4670/4982]\tlr: 1.096e-05, memory: 14449, loss: 0.9594\n", + "2023-07-02 21:42:30,336 - modelscope - INFO - epoch [1][4675/4982]\tlr: 1.093e-05, memory: 14449, loss: 1.2943\n", + "2023-07-02 21:42:32,894 - modelscope - INFO - epoch [1][4680/4982]\tlr: 1.090e-05, memory: 14449, loss: 1.4293\n", + "2023-07-02 21:42:37,079 - modelscope - INFO - epoch [1][4685/4982]\tlr: 1.087e-05, memory: 14449, loss: 1.4109\n", + "2023-07-02 21:42:40,878 - modelscope - INFO - epoch [1][4690/4982]\tlr: 1.084e-05, memory: 14449, loss: 0.6270\n", + "2023-07-02 21:42:43,202 - modelscope - INFO - epoch [1][4695/4982]\tlr: 1.082e-05, memory: 14449, loss: 1.4430\n", + "2023-07-02 21:42:45,786 - modelscope - INFO - epoch [1][4700/4982]\tlr: 1.079e-05, memory: 14449, loss: 1.2656\n", + "2023-07-02 21:42:47,371 - modelscope - INFO - epoch [1][4705/4982]\tlr: 1.076e-05, memory: 14449, loss: 1.9141\n", + "2023-07-02 21:42:50,147 - modelscope - INFO - epoch [1][4710/4982]\tlr: 1.074e-05, memory: 14449, loss: 1.1176\n", + "2023-07-02 21:42:52,690 - modelscope - INFO - epoch [1][4715/4982]\tlr: 1.071e-05, memory: 14449, loss: 2.7781\n", + "2023-07-02 21:42:55,645 - modelscope - INFO - epoch [1][4720/4982]\tlr: 1.069e-05, memory: 14449, loss: 0.4620\n", + "2023-07-02 21:42:58,615 - modelscope - INFO - epoch [1][4725/4982]\tlr: 1.066e-05, memory: 14449, loss: 1.2354\n", + "2023-07-02 21:43:00,944 - modelscope - INFO - epoch [1][4730/4982]\tlr: 1.064e-05, memory: 14449, loss: 1.4683\n", + "2023-07-02 21:43:04,011 - modelscope - INFO - epoch [1][4735/4982]\tlr: 1.062e-05, memory: 14449, loss: 1.3249\n", + "2023-07-02 21:43:06,962 - modelscope - INFO - epoch [1][4740/4982]\tlr: 1.059e-05, memory: 14449, loss: 1.0039\n", + "2023-07-02 21:43:10,074 - modelscope - INFO - epoch [1][4745/4982]\tlr: 1.057e-05, memory: 14449, loss: 1.9678\n", + "2023-07-02 21:43:12,406 - modelscope - INFO - epoch [1][4750/4982]\tlr: 1.055e-05, memory: 14449, loss: 0.6996\n", + "2023-07-02 21:43:15,125 - modelscope - INFO - epoch [1][4755/4982]\tlr: 1.053e-05, memory: 14449, loss: 0.9693\n", + "2023-07-02 21:43:17,919 - modelscope - INFO - epoch [1][4760/4982]\tlr: 1.050e-05, memory: 14449, loss: 2.0680\n", + "2023-07-02 21:43:20,500 - modelscope - INFO - epoch [1][4765/4982]\tlr: 1.048e-05, memory: 14449, loss: 1.6277\n", + "2023-07-02 21:43:22,713 - modelscope - INFO - epoch [1][4770/4982]\tlr: 1.046e-05, memory: 14449, loss: 1.9484\n", + "2023-07-02 21:43:24,366 - modelscope - INFO - epoch [1][4775/4982]\tlr: 1.044e-05, memory: 14449, loss: 2.6502\n", + "2023-07-02 21:43:27,079 - modelscope - INFO - epoch [1][4780/4982]\tlr: 1.042e-05, memory: 14449, loss: 1.2715\n", + "2023-07-02 21:43:29,023 - modelscope - INFO - epoch [1][4785/4982]\tlr: 1.040e-05, memory: 14449, loss: 1.8383\n", + "2023-07-02 21:43:31,660 - modelscope - INFO - epoch [1][4790/4982]\tlr: 1.038e-05, memory: 14449, loss: 1.6623\n", + "2023-07-02 21:43:34,660 - modelscope - INFO - epoch [1][4795/4982]\tlr: 1.037e-05, memory: 14449, loss: 1.2914\n", + "2023-07-02 21:43:37,720 - modelscope - WARNING - ('METRICS', 'default', 'my_metric') not found in ast index file\n", + "Total test samples: 100%|██████████| 281/281 [01:06<00:00, 4.23it/s]\n", + "2023-07-02 21:44:44,218 - modelscope - INFO - Saving checkpoint at 4800 iter\n", + "2023-07-02 21:44:44,248 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/best_iter4600_acc0.7724842429161072\n", + "2023-07-02 21:44:44,250 - modelscope - INFO - Saving checkpoint at 4800 iter\n", + "2023-07-02 21:44:44,279 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_4600\n", + "2023-07-02 21:44:44,282 - modelscope - INFO - epoch(eval) [1][281]\tmemory: 14449, evaluation/acc: 0.7729, evaluation/loss: 1.6707, loss: 1.1414\n", + "2023-07-02 21:44:46,870 - modelscope - INFO - epoch [1][4805/4982]\tlr: 1.033e-05, memory: 14449, loss: 0.6551\n", + "2023-07-02 21:44:49,076 - modelscope - INFO - epoch [1][4810/4982]\tlr: 1.031e-05, memory: 14449, loss: 1.6857\n", + "2023-07-02 21:44:51,074 - modelscope - INFO - epoch [1][4815/4982]\tlr: 1.030e-05, memory: 14449, loss: 1.9123\n", + "2023-07-02 21:44:53,385 - modelscope - INFO - epoch [1][4820/4982]\tlr: 1.028e-05, memory: 14449, loss: 1.4424\n", + "2023-07-02 21:44:55,581 - modelscope - INFO - epoch [1][4825/4982]\tlr: 1.027e-05, memory: 14449, loss: 2.2789\n", + "2023-07-02 21:44:58,108 - modelscope - INFO - epoch [1][4830/4982]\tlr: 1.025e-05, memory: 14449, loss: 1.9641\n", + "2023-07-02 21:45:00,888 - modelscope - INFO - epoch [1][4835/4982]\tlr: 1.024e-05, memory: 14449, loss: 1.6689\n", + "2023-07-02 21:45:02,999 - modelscope - INFO - epoch [1][4840/4982]\tlr: 1.022e-05, memory: 14449, loss: 1.9693\n", + "2023-07-02 21:45:06,302 - modelscope - INFO - epoch [1][4845/4982]\tlr: 1.021e-05, memory: 14449, loss: 1.3166\n", + "2023-07-02 21:45:09,602 - modelscope - INFO - epoch [1][4850/4982]\tlr: 1.019e-05, memory: 14449, loss: 1.5213\n", + "2023-07-02 21:45:12,571 - modelscope - INFO - epoch [1][4855/4982]\tlr: 1.018e-05, memory: 14449, loss: 1.8047\n", + "2023-07-02 21:45:14,672 - modelscope - INFO - epoch [1][4860/4982]\tlr: 1.017e-05, memory: 14449, loss: 1.5372\n", + "2023-07-02 21:45:17,717 - modelscope - INFO - epoch [1][4865/4982]\tlr: 1.016e-05, memory: 14449, loss: 1.3180\n", + "2023-07-02 21:45:20,504 - modelscope - INFO - epoch [1][4870/4982]\tlr: 1.014e-05, memory: 14449, loss: 1.3500\n", + "2023-07-02 21:45:23,506 - modelscope - INFO - epoch [1][4875/4982]\tlr: 1.013e-05, memory: 14449, loss: 2.2521\n", + "2023-07-02 21:45:25,399 - modelscope - INFO - epoch [1][4880/4982]\tlr: 1.012e-05, memory: 14449, loss: 1.9281\n", + "2023-07-02 21:45:28,444 - modelscope - INFO - epoch [1][4885/4982]\tlr: 1.011e-05, memory: 14449, loss: 1.4693\n", + "2023-07-02 21:45:31,381 - modelscope - INFO - epoch [1][4890/4982]\tlr: 1.010e-05, memory: 14449, loss: 2.0117\n", + "2023-07-02 21:45:35,557 - modelscope - INFO - epoch [1][4895/4982]\tlr: 1.009e-05, memory: 14449, loss: 0.5264\n", + "2023-07-02 21:45:39,804 - modelscope - INFO - epoch [1][4900/4982]\tlr: 1.008e-05, memory: 14449, loss: 1.2449\n", + "2023-07-02 21:45:42,752 - modelscope - INFO - epoch [1][4905/4982]\tlr: 1.008e-05, memory: 14449, loss: 1.3134\n", + "2023-07-02 21:45:45,007 - modelscope - INFO - epoch [1][4910/4982]\tlr: 1.007e-05, memory: 14449, loss: 0.9836\n", + "2023-07-02 21:45:47,247 - modelscope - INFO - epoch [1][4915/4982]\tlr: 1.006e-05, memory: 14449, loss: 1.8653\n", + "2023-07-02 21:45:49,545 - modelscope - INFO - epoch [1][4920/4982]\tlr: 1.005e-05, memory: 14449, loss: 1.9227\n", + "2023-07-02 21:45:52,533 - modelscope - INFO - epoch [1][4925/4982]\tlr: 1.005e-05, memory: 14449, loss: 1.1875\n", + "2023-07-02 21:45:55,303 - modelscope - INFO - epoch [1][4930/4982]\tlr: 1.004e-05, memory: 14449, loss: 1.9453\n", + "2023-07-02 21:45:58,165 - modelscope - INFO - epoch [1][4935/4982]\tlr: 1.003e-05, memory: 14449, loss: 0.6951\n", + "2023-07-02 21:46:01,430 - modelscope - INFO - epoch [1][4940/4982]\tlr: 1.003e-05, memory: 14449, loss: 0.7973\n", + "2023-07-02 21:46:04,313 - modelscope - INFO - epoch [1][4945/4982]\tlr: 1.002e-05, memory: 14449, loss: 1.8844\n", + "2023-07-02 21:46:06,392 - modelscope - INFO - epoch [1][4950/4982]\tlr: 1.002e-05, memory: 14449, loss: 1.5102\n", + "2023-07-02 21:46:08,801 - modelscope - INFO - epoch [1][4955/4982]\tlr: 1.002e-05, memory: 14449, loss: 2.2773\n", + "2023-07-02 21:46:11,500 - modelscope - INFO - epoch [1][4960/4982]\tlr: 1.001e-05, memory: 14449, loss: 1.6867\n", + "2023-07-02 21:46:13,716 - modelscope - INFO - epoch [1][4965/4982]\tlr: 1.001e-05, memory: 14449, loss: 2.5187\n", + "2023-07-02 21:46:16,514 - modelscope - INFO - epoch [1][4970/4982]\tlr: 1.001e-05, memory: 14449, loss: 1.1453\n", + "2023-07-02 21:46:19,686 - modelscope - INFO - epoch [1][4975/4982]\tlr: 1.000e-05, memory: 14449, loss: 1.6125\n", + "2023-07-02 21:46:23,065 - modelscope - INFO - epoch [1][4980/4982]\tlr: 1.000e-05, memory: 14449, loss: 2.1379\n", + "2023-07-02 21:46:24,007 - modelscope - INFO - Saving checkpoint at 4982 iter\n", + "2023-07-02 21:46:24,163 - modelscope - INFO - deleting checkpoint: /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505/iter_4800\n", + "2023-07-02 21:46:24,209 - modelscope - INFO - Train finished. Uploading models, waiting...\n", + "2023-07-02 21:46:24,299 - modelscope - INFO - {'done': True}\n" + ] + } + ], + "source": [ + "def cfg_modify_fn(cfg: Config) -> Config:\n", + " cfg.update(CONFIG)\n", + " return cfg\n", + "\n", + "\n", + "trainer = EpochBasedTrainer(\n", + " model=model,\n", + " cfg_file=cfg_file,\n", + " data_collator=data_collate_fn,\n", + " train_dataset=train_dataset,\n", + " eval_dataset=val_dataset,\n", + " remove_unused_data=True,\n", + " seed=42,\n", + " cfg_modify_fn=cfg_modify_fn,\n", + ")\n", + "\n", + "trainer.train()" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 可视化\n", + "tensorboard 命令: (e.g.) \n", + "`tensorboard --logdir /home/hackathon/my_git/agent/runs/chatglm2/v1-20230702-203505 --port 6006`\n", + "\n", + "\n", + "The following code is copied from baichuan_sft.ipynb" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "dict_keys(['lr', 'loss', 'evaluation/acc', 'evaluation/loss'])\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAApIAAAHDCAYAAACXsvqpAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAACdkElEQVR4nO29d5wcV5X+/XRP6MlB0oykkUbJQc45CRtssLEwNjZg+2e8BkxeWHnBC7vLGlgwsCCz+y5LNtE2yXjBiw0YB5xzzlG2bMmSlUZhcp7pev84XVOhK9xKHaaf7+cjTXdX1b230q2nzj3n3JSmaRoIIYQQQggJSLrYDSCEEEIIIeUJhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhOS46qqrkEqlsHHjxmI3hRBCygIKSUIIIYQQEgoKSUIIIYQQEgoKSUIIIYQQEgoKSUII8eBHP/oRDjzwQGQyGXR1dWHNmjXo6+uzrPPKK6/g7LPPxoIFC1BXV4fFixfjfe97H/r7+2fWufXWW3HCCSegra0NTU1NWLlyJb7whS8UeG8IISReqovdAEIIKVUuvfRSfPWrX8Upp5yCT33qU1i3bh0uv/xyPProo7j//vtRU1ODiYkJrF69GuPj4/jHf/xHLFiwAFu2bMENN9yAvr4+tLa24vnnn8cZZ5yBQw45BF/72teQyWSwfv163H///cXeRUIIiQSFJCGEOLBz506sXbsWp556Km666Sak0zKAs99+++Giiy7Cb37zG3z4wx/GCy+8gA0bNuAPf/gDzjnnnJntv/zlL898vvXWWzExMYGbbroJ8+bNK/i+EEJIUnBomxBCHLjtttswMTGBiy++eEZEAsDHP/5xtLS04K9//SsAoLW1FQBwyy23YGRkxLGstrY2AMCf/vQnZLPZZBtOCCEFhEKSEEIceP311wEAK1eutPxeW1uLFStWzCxfvnw5PvvZz+LnP/855s2bh9WrV+OHP/yhxT/yvPPOw/HHH4+PfexjmD9/Pt73vvfh97//PUUlIaTsoZAkhJCI/Pd//zeeeeYZfOELX8Do6Cg+/elP48ADD8Qbb7wBAKivr8c999yD2267DR/4wAfwzDPP4LzzzsPb3/52TE9PF7n1hBASHgpJQghxYOnSpQCAdevWWX6fmJjAhg0bZpbrHHzwwfjSl76Ee+65B/feey+2bNmCH//4xzPL0+k0Tj75ZHz729/GCy+8gG984xu44447cOeddya/M4QQkhAUkoQQ4sApp5yC2tpafO9734OmaTO//+IXv0B/fz9OP/10AMDAwACmpqYs2x588MFIp9MYHx8HAOzZsyev/MMOOwwAZtYhhJByhFHbhBDiQEdHBy655BJ89atfxTve8Q6ceeaZWLduHX70ox/h6KOPxvvf/34AwB133IGLLroI5557Lvbdd19MTU3h17/+NaqqqnD22WcDAL72ta/hnnvuwemnn46lS5eip6cHP/rRj7B48WKccMIJxdxNQgiJBIUkIYS4cOmll6KjowM/+MEP8E//9E+YM2cOPvGJT+Cb3/wmampqAACHHnooVq9ejb/85S/YsmULGhoacOihh+Kmm27CcccdBwA488wzsXHjRlxxxRXYtWsX5s2bhxNPPBFf/epXZ6K+CSGkHElp5jEbQgghhBBCFKGPJCGEEEIICQWFJCGEEEIICQWFJCGEEEIICQWFJCGEEEIICQWFJCGEEEIICQWFJCGEEEIICUXB80hms1ls3boVzc3NSKVSha6eEEIIIYT4oGkaBgcH0dXVhXTa3e5YcCG5detWdHd3F7paQgghhBASkM2bN2Px4sWuywsuJJubmwFIw1paWgpdPSGEEEII8WFgYADd3d0zus2NggtJfTi7paWFQpIQQgghpITxc0NksA0hhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAlFICG5bNkypFKpvH9r1qxJqn2EEEIIIaRECZSQ/NFHH8X09PTM9+eeew5vf/vbce6558beMEIIIYQQUtoEEpIdHR2W75dddhn22msvnHjiibE2ihBCCCGElD6hfSQnJibwm9/8Bh/5yEd8p88hhBBCCCGzj9BzbV9//fXo6+vDhz70Ic/1xsfHMT4+PvN9YGAgbJWEEEIIIaSECG2R/MUvfoHTTjsNXV1dnuutXbsWra2tM/+6u7vDVkkIIYQQQkqIlKZpWtCNXn/9daxYsQJ//OMfcdZZZ3mu62SR7O7uRn9/P1paWoK3OE6y00A2C1TXFLcdhBBCCCElxMDAAFpbW331Wqih7SuvvBKdnZ04/fTTfdfNZDLIZDJhqkmeLa8B0ICFyykmCSGEEEICEnhoO5vN4sorr8SFF16I6urQLpYlQs4YOz5a3GYQQgghhJQhgYXkbbfdhk2bNuEjH/lIEu0hhBBCCCFlQmCT4qmnnooQbpWEEEIIIWSWwbm2CSGEEEJIKCgkCSGEEEJIKCgkCSGEEEJIKCgkCSGEEEJIKCgkCSGEEEJIKCgkCSGEEEJIKCgkASBV7AYQQgghhJQfFJIAZpTk8ADQv7u4TSGEEEIIKRPKfY7DeNmzXf7WNQCZ+uK2hRBCCCGkxKFF0onsdLFbQAghhBBS8lBIEkIIIYSQUFBIEkIIIYSQUFBIEkIIIYSQUFBIEkIIIYSQUFBIEkIIIYSQUFBIEkIIIYSQUFSukNS0YreAEEIIIaSsqVwhSQghhBBCIkEhSQghhBBCQkEhSQghhBBCQkEhCQBIFbsBhBBCCCFlRwULSVOwDXUkIYQQQkhgKlhIEkIIIYSQKFSukPTK/sPMQIQQQgghvlSukCSEEEIIIZGgkCSEEEIIIaGgkHSEY9uEEEIIIX5UsJCkWCSEEEIIiUIFC0kvmA+IEEIIIcQPCklHaK0khBBCCPGjcoUktSIhhBBCSCQqV0ha4FA2IYQQQkhQKlhI0iRJCCGEEBKFChaSZigqCSGEEEKCQiFJCCGEEEJCUXlCMpsFNAcLpNNvhBBCCCHElcoSkpMTwJb1wJ7txW4JIYQQQkjZU1lCcrBX/o4M0gJJCCGEEBKRyhKShBBCCCEkNipMSNIKSQghhBASFxUmJAkhhBBCSFxQSAI0VBJCCCGEhCCwkNyyZQve//73Y+7cuaivr8fBBx+Mxx57LIm2JQzVIyGEEEJIFKqDrNzb24vjjz8eb33rW3HTTTeho6MDr7zyCtrb25NqX4GgqCSEEEIICUogIfmtb30L3d3duPLKK2d+W758eeyNKgh52pFikhBCCCEkCIGGtv/85z/jqKOOwrnnnovOzk4cfvjh+NnPfpZU2+KHWpEQQgghJDYCCcnXXnsNl19+OfbZZx/ccsst+NSnPoVPf/rT+OUvf+m6zfj4OAYGBiz/CCGEEEJI+RNoaDubzeKoo47CN7/5TQDA4Ycfjueeew4//vGPceGFFzpus3btWnz1q1+N3tKkobWSEEIIISQQgSySCxcuxAEHHGD5bf/998emTZtct7nkkkvQ398/82/z5s3hWho7VI6EEEIIIVEIZJE8/vjjsW7dOstvL7/8MpYuXeq6TSaTQSaTCdc6QgghhBBSsgSySP7TP/0THnroIXzzm9/E+vXrcfXVV+OnP/0p1qxZk1T7YkZz/EgIIYQQQoITSEgeffTRuO666/C73/0OBx10EL7+9a/jO9/5Di644IKk2lcYtGyxW0AIIYQQUnYEGtoGgDPOOANnnHFGEm0pHnt2ANhh/U3LAkgBqVQxWkQIIYQQUvJU8FzbHmPbWhbY8iqwbWPBWkMIIYQQUm5UsJD0YGIc0DRgerLYLSGEEEIIKVkoJFXQNGDXFqBvZ7FbQgghhBBSMlBIqjA+CowOA4O9xW4JIYQQQkjJULlCMkj6H425ggghhBBC7FSWkNRcvxBCCCGEkIBUlpBUxpy4nIKTEEIIIcSJChOSFIWEEEIIIXFRYUJSEepNQgghhBBfKCQJIYQQQkgoKlhIBgrbTqwVhBBCCCHlSuUKSWpDQgghhJBIVK6Q9ERz+UwIIYQQQnQoJAkhhBBCSCgoJP2gQZIQQgghxBEKSUIIIYQQEooKE5KKvo+0QhJCCCGE+FJhQtKEp1ikkiSEEEII8aNyhSTFIiGEEEJIJCpLSIbSjraNNApQQgghhBCg0oSkGS89SK1ICCGEEOJL5QpJKklCCCGEkEhUsJBUhJqSEEIIIcSR6mI3IHE0DejfBVTX5P9OCCGEEEJCM/uF5OQ4MNgrn+sai9sWQgghhJBZxOwf2s5OuyxQ9ZGk5ZIQQgghxInZLyTddGDoWBsKS0IIIYQQoBKEpAVaGgkhhBBC4qIChGQYwch5uAkhhBBC/KgAIekC00gSQgghhESisoSk5vrFa8Xi4hosRAghhBBSXGa/kFQWj27bF1FU9vYAW14FxoaL1wZCCCGEEBdmv5B0E49eAtG+qFhacqhP/vbtKlIDCCGEEELcqQAhGYYSGtoGgFSq2C0ghBBCCMmjsoSkFjb9D9MGEUIIIYTYqSwhacYz1qbExCItkoQQQggpQSpASIaZ2sbE1ERsLSGEEEIImU3MfiFpGZVWtTSa1tu5JZkUPNksMDGm2CZaJAkhhBBSesx+IelGkDSSk5Nq2wVhxyb5Nzrkvy51JCGEEEJKkAoTkiUUNKMPmY8MFrcdhBBCCCEhqQAhqTl+VN4m2IbCxBgw2Btj0A5NkoQQQggpPaqL3YDEcZvZJkhC8qDs2CR/02mgsRWYngamp4DaTLjyzDpyckISlTe3A9U1ERtKCCGEEBKe2S8kzYS2SIYqREQfAGx9TbbrXAJk6tS3n8GkJHdsArQsMD0JzFsUoqyQaJrUWVXDdESEEEIIARBwaPvSSy9FKpWy/Ntvv/2SalsCFMtHMldXHHNma1n5O1ngtET9u4BtG4GBPYWtlxBCCCElS2CL5IEHHojbbrvNKKC61I2abnNth9imWOgWQPNwfE1tYdsw2Ct/B3YDrXMLWzchhBBCSpLAKrC6uhoLFixIoi3JozpFYonpyBnM+SyrCywkCSGEEEJsBI7afuWVV9DV1YUVK1bgggsuwKZNmzzXHx8fx8DAgOUfCUrOIpnNFrcZhBBCCCEmAgnJY489FldddRVuvvlmXH755diwYQPe/OY3Y3DQPRfi2rVr0draOvOvu7s7cqMDEWqGxIjpf+KGsS2EEEIIKUECCcnTTjsN5557Lg455BCsXr0aN954I/r6+vD73//edZtLLrkE/f39M/82b94cudHBcBvODjC0XfShbl1JFr0hhBBCCCEzRIqUaWtrw7777ov169e7rpPJZJDJhMyfGDdxpP8JlGS8wKZETZNgmNo6oL6psHUTQgghpOKINLPN0NAQXn31VSxcuDCu9hSO2GadKQCqeRtHhyQ9z66tybaHEEIIIQQBheQ///M/4+6778bGjRvxwAMP4D3veQ+qqqpw/vnnJ9W+mFEUj+Uws6GTEJ6eSrBCQgghhBArgYa233jjDZx//vnYvXs3Ojo6cMIJJ+Chhx5CR0dHUu2LjqvlMcjsNXEpywDlmNs9k0cypmYQQgghhMRAICF5zTXXJNWOwlCIiW3KacicEEIIISQCkXwky49iTZEYAosgZf4fQgghhJQeFSYkQxJIcyYwDE4dSQghhJASZPYLyew0cMPPgfv+ZP09saFt85cICtC3fSVuUSWEEELIrGf2C8lN64CXnwAeucW2IKwQ89suicAch4Tk1JGEEEIIKTKzX0hOjMdQSJBo6xiqI4QQQggpA2a/kNSmXX4PqfgKNeTM6G9CCCGElDizX0hms8ZnLeu+nhdBNJ0WV2R4GUWYE0IIIaQiqQAhabJITps+B7L4JWW9jLItxSUhhBBCiksFCMkYLJKBSMIiSQghhBBSesx+IWkWj2ZRGWy8Op5Vo1bJ0W5CCCGElBCzX0iah7bNn52E2OgQ0LdTPj98E/DMfe7rumJO0eOyodKwup+SJIQQQggpLoHm2i5LpqeMz1mPCO6//QZ4/kH5fs5ngPv/Ip8PfhMCmQJjSyPplzOSopIQQgghxWX2WySnJo3PbkPbA7sNEQkArz5tfB4fs23nh4ropAgkhBBCSPlTYULSxSI5OWH9PthrfB4bBibG1OszWxJHhpzTASmNbFNsEkIIIaS0qQAhaR7aNkdwm4SaXSjqfpIA8PxD1mVB9F12GhjcE2ADNygqCSGEEFJ6VICQNFkb3SySYyPW770mIfnwTVYLpR92S+LIkNNKKgWp10kIIYQQUgQqQEi6DG2bBd+4TUhOT1q/D/Xllzsxbg3kCUIcOpJD34QQQggpMrNfSE67BduYsFsk7YzarIqTE8CO14Gtr+WvG5vAo1AkhBBCSGkz+4Wka7CNh0XSzsig9fukV/CNTQCmUv7rJAEtloQQQghJmNkvJPt3GZ8twTamdewWyX2PsH4fHgBu+TXwyC2IXwQWYM5vQgghhJAEmN0JyR/8K7DelBPSzSJpF5JnfEwiua/9HrB9I3D/n41l7/o4ACcro16sgtjzTTbusg51JCGEEEJKiNltkVx2oPW7m0Vyz/b8bWvrgBUH5/8+NZn/mxeOQ9sujI8Cu7c51MHpEgkhhBBSesxuIblgKZBpML47WSS1rIg3J/Y/Ov83v8CcwL6JpvV7Nos/Zu+OQJuFXIEQQgghJBKzW0imUsBHv2Z81xyitvt3A5Pjztu3zpN5t82MDVu/j48an0eH8pc74Sc2pyYZLEMIIYSQkmd2C0kA6N4X6Fohn6cdEpIP93tv3zLH+n3UZpHs2SxD5tPTwK6t+RHeXv6UgIvhMMBweKByCSGEEELiY3YH2wBAXQOQrpLPThbJcVsqn6oa6/emNut3J4vj5Hh+rklPHObfNpNy+Z0QQgghpISY/UISMISkk0VSzwnZ1CaWy2NWW5dX24Slk5Ds2azWDuXo6xR9IAkhhBBS8lSIkMyN4Gs2IXn3/wGP3y6fO7sl7Y8fKj6QZvxGqZ30YCrlsoA5JwkhhBBSOsx+H0nAsEhms8C2jcC918twtC4iAUn340Zzu/E5qJB0xGdo23P9AJsRQgghhCRIhQnJaeB3/wk8+jfgqXus63gJyfP/BcjUy2e/9D92NNcv7qRUhraD1EsIIYQQEj+VJSTNib4H91jX8RKSTW3AwSfI58AWSSdroub8eQa3oW2fcgkhhBBCCkiFCMncbva8YfxW12hdx0tIAhEskiEEn92vMtQUiRSahBBCCEmWChGSOYtk/y7jt1FbvkdlITkcTByGSiweQx5JQgghhJCEqQwhWZUTkuYZbIYHrOtkfISkLjSDWiT9hradlmezQN/OgPUQQgghhBSWyhCSqdxuegrJeu8yzBbJIPhZJJ0WT4w6/EgIIYQQUlpUhpDUh7YnJ4zf7EKye6V3GWYfycSHtl0LUy+Xc3UTQgghJGEqQ0g6Dm2b5th+70XxWyRrMvLXUdDFkEeSEEIIIaTIVIiQzE3gM2GaV3t6Sv7WNQLLDvAvo9Ycta0g7FJ6wIxPMvFYDZYUnIQQQggpHJUlJJ2EVpXiLJG6RXJqApic9F4XMPwyHbWdg5LkUDUhhBBCyowKEZI1HssUhWRtnbHua8/6r69bJJUFoMd6E+PA1teAoX73dYKURwghhBASAxUiJD3EoqqQTKeN2W2eust/fdWh7ZnfPITf5LhM72j2z6SFkhBCCCFFpjKEZHUMQhIAFu8tf/t3+6+bckoq7jBFjS4I49aF1JmEEEIISZhIQvKyyy5DKpXCxRdfHFNzEiKOoW0AaGqXv/Z5up1wFJJexKH8qB4JIYQQUjhCC8lHH30UP/nJT3DIIYfE2Z5k8LJIei2z09wqfwf2AFoWePhm4KEbndcNKiTjGKqmjiSEEEJIAQklJIeGhnDBBRfgZz/7Gdrb2+NuU/zE4SMJAA2tIhCz08ANvwDu/zPwwA3AuNNMNA5C0kvoxe7zSFVJCCGEkGQJJSTXrFmD008/Haecckrc7UmGuIRkVRXQnBPOrzxp/O4kJL0sko5zbQcVfhSKhBBCCCkuAVSUcM011+CJJ57Ao48+qrT++Pg4xseNGWUGBgY81k6IPB/JFGaEWBAhCQBHvA2461rrb0GFpBOxWCQ1x4+EEEIIIUkQyCK5efNmfOYzn8Fvf/tb1NXVKW2zdu1atLa2zvzr7u4O1dBI2MViQ7NpmUcgjhOHvSX/twnFoW2vVEBBhSSFIiGEEEKKTCAh+fjjj6OnpwdHHHEEqqurUV1djbvvvhvf+973UF1djenp6bxtLrnkEvT398/827x5c2yNV8YuJOub3Jf5ka5Gnkh0skgO9QK//Rbw1yuClR8bVJqEEEIISZZAKurkk0/Gs89aZ3X58Ic/jP322w+f//znUVVVlbdNJpNBJpOJ1sqo2COzG5oBPRVkXUPw8j7078DrLwKvPAW88QowPpa/zo1XATtel3/vuFD8K2e0XUIirxy1o6aJEK+tk6TvhBBCCCkbAgnJ5uZmHHTQQZbfGhsbMXfu3LzfS4pq2/C1eWjbbJ1UZc4C+bf5ZfnuNLS9Z5vxeXQIaGo1vsci+MpRNTrQv1vyctY1AB2Li90aQgghhASgMkxAeT6SJvEYRkjqZOrlr31oW9OAPTuM76OD+gKHQuIUhGUYbDPUJ3/HRoraDEIIIYQEJ3DUtp277rorhmYkjFewTUMEIVnrIiRHBoAJ03D3yJBtQ5vKy04DU5Ph2wEAE+NWMdazCZjbFW3/CCGEEEI8iCwkywJ7ZHZ9s/PnoOgWSfvQ9u7t1u8zFkkXtrwavG67xXHH6/nr7N4KNOwbvGxCCCGEEAUqY2jb7iPZ2GJ8jmKxa2qTv327rL9vXmf9PuIjJENRLmPXhBBCCJmtVIaQtA9tN5oCX+oaw5fbmQsOef1FI/AGkEhuAKjJRauP5oa29VyRThPbJMVwf8IVEEIIIaRSqUwh2dAEnHQO8Jb3Rgu2mdtlzGDzh+8Arz4jnwf75O+iveVvnkWygErSHPRDyouJMTl/01PFbgkhhBDiSGUIyXSVdcrC6lqZ6vCoiHOF19QC7fON7889KH/HhuXvnNyyvGAbQhTYsUksyr09xW4JIYQQ4khlCMlUCkiZdrWmNr6yzRHgPZuB3h1GxPacBfJ31CQkY5lTm1QUk+P+6xBCCCFFoEKEJKxC0h58EwXz0PjgHuDKr+bqTAFtHfJ566vAFZfKtIk7twB9O6PXS0EaP709YgUstWNbYs0hhBBCdCpDSCJlnX4v5bPbQQJwTjjLOmw+U0aTNTq8rwe481pgnIm3S5ahPrEmj5aaKwKVJCGEkNKkQoQknMWeG/O61Ndt7wQu/kH+7w3NQEOL9TdzZDeJj+kpoH8XMDVR7JYQQgghFQWFZNR13dZvaAbqG6y/9e4ovWHT2cCe7cDAHhmWLkVGh2TYnOeeEELILKOChGTCu2r3u2xqlTrtItPsH5nNUlzEgT5FZTYbU4EBXyT82LVVhs2HQub05DVCCCGkRKkcIZlOeFff98/W722d8rfaFiG+faP8HRsBfv4l4Iafu5c5MSbJzTUngURxkRgx68gZmA+SEELILKNyhGTQ4eqgdHYDK480vs+ZDyCVb6l87Vn5+8x9YqV65Un3Mv/0Y+D3/yPrekGLFSGEEEKKQIUIyVTyQ9uANdpbT0ZuF5KvvyR/d281fnO0OMIIznnqnvxl1I7xUggxHvpdhiebEEJIaVIZQjKF5C2SgHUO7/YFUq89cntsWHz5+ncbv034JJzWpmNr4qwkdp1VgGuFEEJmE/27OQtXhVIZQhKwTmUYB01t+f6Ph50IHLgKWHkU0LUMQAo49f1Acztw8vuM9cZHrLOV6MEibsQWREKUKDUdSYMkIaSU0TRgYLe4a00yDVulUV3sBhSM1e8H7vyDzLEdB6lUvuCoawBWf0A+p3OHtmMR8PFvyOd7/ig32fgoMGESj75C0skiqbl8rkRi2H/L0HapKUlCCCkT6LNfcVSOkGyeA5z598VtQ6ZRhOTYCDBmEo8TES2SvG9nOTzBhBBCSpPKGdqOm1QKnpYrp8V1uQTlY8P+FslpkxUyqaFtTXMue3wUmJpMps6ShWKNkJJieso9EJEQUjJQSCaGg5LUheSml6zm//Gx/M3NQtNpaDsO3bPzDWDLeqtoHR8FejYD2zbEUEEZUZCR7ZAFU+OSSmNqEtj6GrBtY7FbQpRgJ1XJVIiQTEIZhChTT0H02G3W352Gts1WyvERQ0zu2QHc+ltgcI9p5ZA3sV7H6JCpLQ6itiIo5Y6wlNtGSAKMjchfJvEnpOSpECGZAGHSCe143fn38RGH30xCUtNEfI4Mioh89n7gt98KXj9xx2whpm4jhIRB0+TFvNIEMPvMioZCUhV9WFoVJx/KE85yXnfUR0gCwH1/Av7wHaB/l3wfGQCmcmkWIt/E7AUiMTHOFE2ExEm5Jk4YHQR2bZVh+YqFz5NKozKEZNROqXOxcyF+5dqXH3aSc/qhseH830YH83/bvQ2Y22V83/SyTwMComli9axIQnZ+o8Niae7Z5L9uuT4cCSFqOBkFCJnlVIaQjErGzRoZUBmk08CSlfm/OwnJkaH83wCrYHntmdyHmN4Ah/qsPpJ9uyonejvsIRwZkL9MwktIjPCtq7ygFbKSoZBMDKeOUANa5hpfW+fJ31EH0ej0m/33uIdP7OJ1cI9EdlccATpFJt8lhBBSwVBIhsUv2CY185+BpgEtc4zvcxbI31GPoe2DT3Cvw81qGQb7bDs6FWORDCkIA21HKwshZBbC9+mKpkKEpOkBHibaOi40DaitM77PXSh/nayPukictxA4+9NwFCFuVssw7N4WX1kVBXtQQgghlUuFCEkzcQpJr7Ic5uLWOfFsYK9DgcPeIt/HR2QGBz11BGD8rW8Glu4HHPSm/HJ0q2Ucw6tJD9FmsyU+DBwy/U8p7xIhhBQEdoSVTOXMta2TTgEOE8X44iQKfTWpywpHniz/9FxjmiZzbz99N/DADcC7PiEpJACguV3+OqUfinNoO5VGuAOjwNSkzJRT3wjMW5RMHVEJLXLZgRISO/QCIaRsqDyLZKpAu6zSEVZVG0PdY0MiIgHgLz+VSO7GVmDhcvmtrjF/+9GhmKyRkIhy1+UR6xjqk79OvqAlCYNtCCkuVJJlC/vEiqMChWTYDirodorr1+cEopO/47IDDIFntkhW18jf7HT+lIa9O2QWnKBTHSYqsMvhoVCIYBtCCJmFsB+saCpjaNusYwplkbTX60ZdE9C/G7jmv/OXmVMFmS2Sja3AYK8IyaE+oDW33tgIcOVX5XNtHXCIR8S3nXQ5iL0ECd0PsgMlhBBSuVSeRVKVxhb529AcoRAFcWaO4raj+0cCVotkpt4Qlr/8D0PLmHM+DvcrtxLQCiuwdUaHJMioJAhrkfRbTqGpzNSk8YJEiA7vIUJKmgoRkjZBpyKa2juBjkXAnPnq5YZh3GNKrVaTRbLeZJGsrQNWHiGfN6+TQBbAOpw9Mgg8fjvQ26PWjiTTIjkVPTFuBBS5oWkyVN+/O5FmWety+RxoQxKJns1A305gz45it4QQEhYK/4qjQoSkmZSaaEqlxeoX1lKnKsy85rY2Jy+3WyRPOhdo65Dv+nD2uCmh+NP3AHf/H/D7b6u1w4vB3uhl2JlSmFJwagIY6gcGCiAkwypJc6e5cwuwa4v7usXMYVoO6FkMxjhfMSGElAuVJyRTsD7Qm9uBhcvCl+W5XEE4dO/rvqzdZA01+0imcmL4hHfL976dMizoFGAzPOBctv2t0estsn+X+7KwqLy1mtcp1bdcc7vGhiUynUOzhETE3HeW6L1PTPAcVTKVEWxjxyzwGlqA6triteWkc2So+ul75HtdI7BoL2D5QTK8rs84Y26jLl72OQyoyQCT48DAHqtFMjCl3hFoSDT620lYT47L8Y1kSSz140pICWLXkTTm+zMxLhk9vFK5JYXm+oVUAJVnkQSsw9XKHVTEnkxP2WOnvgl467nG985u4KxP5iKuXaZ21C1eqRTQOk8+9+9yF5JK1j//VYpKoS2S/buAHZv8fUz92lXqx7Uk4UEjJBBjI8CO14HtG4vdElKBVKiQDCEK7ZvUBLVietSZrnIu122TrOlB25YTkn073XNHqszJnahQc9qRgPXpqwdtp6YpbmNbR/cL9Y1+p+ghpOyYmpQXRS8f9XJC7+N1P+Niwi6x4qCQDGNprG/K+Sz6OkkGL9syzO6yvTlljopF0jFYxn63F/juD1xdThDueN0Y7vdjegp4Yz2we3sC7dG3883/E7JgQoiQwD20Z4e8eKv2JcSH0GkvyCygQoSkTThGzZmo55gMWK0nnd3y96A35dZ32ODIk+Xv8Wcav5mF5BuvWNfXA3SchGSejizBm98ebDM+AkxOqFsRhvoBaMCoyvoxBvaU4KEsSybHGbhUsZj6vyTuJ15XhMRGIEV1+eWX45BDDkFLSwtaWlqwatUq3HTTTUm1LT7smixyGpYw2/tsc+7FwAX/BizdL7e6w/pveS9w0betUeZ6CqBXn7EOw3btBcxdIJ+TSN8TmYDDzWHEXZDTFOvDSnP8yIiBAEyOA9tfB7a8WuyWJI+WLY0hSULCwpfniiaQkFy8eDEuu+wyPP7443jsscfwtre9DWeddRaef/75pNoXP/b0P5Hwu3sC1JOpB+YvsW7rJIDtM+HoFkkzH/0acN5nJSIdUBOSJWmRNH8OE7pZNCVJolIquSSzBZh5adtGYOtrarlVK5JyujdLpK3ZrPiBmr8HZbBPhv4D+6UHr4qUN4HS/7zrXe+yfP/GN76Byy+/HA899BAOPPDAWBuWKGYhmYBx0bfOONc3Jy0HgH/4/4zk5bqQHPLxkdQ0JHr3h9Xtlg6sXHuncm03wfgY0LMJaGqTVFxJoVsjR4eB5iKmIiOzh62vSv+5cJmkBdq9TUavzNPu+tGXy1hR36QwVfBs6KtJWEI7C05PT+Oaa67B8PAwVq1aFWebkifyvNIFGKJUFZJVtneBTL3xuTF38ytZJNWqiw2l+mxi15JbLuYGx1leKVp3SXAGcon4h/qK2ozKpITuoWy2/O5pvb1jo0ZAUd/OcGUVwipPyprACcmfffZZrFq1CmNjY2hqasJ1112HAw44wHX98fFxjI+Pz3wfGHCZaaVg2KdIVBWFDusFGdkOPJyecq7Tj6Y2w1dyZmi7L3+9vLaXoJK0D20XbIrBpI5FhHLj3v/JCWDPdrFo1zfFVy4hcVNMETc9JW4HtXU216NKooRE9PSUGEaaWos7kQixENg0t3LlSjz11FN4+OGH8alPfQoXXnghXnjhBdf1165di9bW1pl/3d3dkRocC6XoI5m3achtzZvpwxGDexxWDDBFYmRCj217LFJob5BjaIkQV98sULmlxO5tkv5k19Zit4TMJjRNrqvZYsXS8zO65eglQp4/e0Ls2ipCsueN5OoggQksJGtra7H33nvjyCOPxNq1a3HooYfiu9/9ruv6l1xyCfr7+2f+bd68OVKDw2ETFFGHtp30SevcaGXm1RFABB2zWv7udzQsjdOjtt9Yb43odurkCyV49Hpmy1zbjpRBW2fLg56UFmPDkuh7x+vFbgkBco+DQozkFKjP0wU9sxyUFJHn2s5ms5ahazuZTAaZTCZqNfESeWYbh+2b58ibkusDOqFgGwA47p1A1wpgqS3gqX0+0LEY2PkG8MIjwNFvl/xpW161zqZTDALPNmMf2nXZXtMk+jXwsEecPpLxFVWR8PiVL3qOV3PEcFDGx4Bhc+5XXhDhiUFE8vATHwIJyUsuuQSnnXYalixZgsHBQVx99dW46667cMsttyTVvmRIwtcuZfe9jFpPgG2ra4AVBzuLp85uEZJ6BJ4++40lIa/qNIJxoKfxCeEjaV82OgyM2Hxud2+XBORtnQh0DBPbfZ+C+3ZJHsEko4KdYEpLUqr0bLJ+L6qQmQU3imJ3Gx9UnpVGICHZ09ODD37wg9i2bRtaW1txyCGH4JZbbsHb3/72pNoXD/bYGkv6nxg7Cq/7J3CsTQzzgQPGLDxekdvazH/JM6MjoyYk14BdW/I30WexGdyTnxpJmajHQtHfMps1/Fdb5uRH4OeVOQseaoQQK5oGDA8AmTqgpsRG7wAE7g+pIyuOQELyF7/4RVLtKCAxTJGoP9BLMT7FvmGDSgogLdi+uEUQj41I9ZkGhTIU63H6nAhFHtr2279i6MhsFkgXYxbVQlrIvShz4R4k0r9/l5zvQlvGlSjitVCIS2BkEOjdIZ+7941envneKdQlrAcllQojg+JP2TqvgNk+KpcKmWvbRtQLa2bzBKO2nWa2mVnkuiB/mzyLpNu2ETvr7LQMofe84SMCNNtfVSJGmZeCMHGyqhYNh+ugf5dx/oYHgC3rS3R6zVnM5Lj8i8rUhKStGXDK2GBD02S9ob5ovo0kHIlGhId4BmWng/WXUxO2fqIE+trd26RNo8Phtp8YZ6R+ACgk1TeKvRne1YXMI2lHzyWpJ6V1Ig6RZQ4yChqRrbKOZrdSqbQ5pI+kU9EjgwGm7SviLA+aJoLeadjfj4E9wPiIWBf2bJffwiYxJsHRNJlffPvr0aPq+3aJIOjfFbwNpUYhmjSw2+VarzBr1nQuGHPbBvVtSvnlIxsiulvTJOvAjk22WALiRoUIybxJq2MuT2W1OKO2A5SlWyT7dgKP3OK8adIPD/O+aHkfPPAY2g5s0PTbwGP51KQI8Z1+1tZgxQbbB9vK09PS2dkthtNTInhHh8OLkVIUE5WAlnX+nHzFBayrBNE0oH+33EtFEUUxi9W8+zdA+XowZpD0OrOtvzDvD9OkKVEhQtJGqJltCo1HuzxGtvNo6wSW7Cefr/2e0VFYiKMjUC0jZB7JMPWFPrV20ZrUAz7CcR/YI8MvSVgM6VNUWWiuX9SYnhKLfWKCooBCpRiiqJRutzgMDWWvK8t+BwpOBQpJhzQ9IYoQ/HzekprZxktJ2pal08C5nwbqGsX3apfDEPeQ37SVKXlbv//PuTxxAa17Th1RQINkcIuk/VhoMty3baPzcIVnxL3pNlEZ6lAdgo/y0IoqaEvp4UVKhzCX5PbXxWJfrnOSzzaLmuUkFuBGzzt8ZX48/a6H7LS4/8y66yY8lSckU7AKgyj3WaJR2zFMkaijwYjG1KMD81bwIJ0G7r0eePhm4NffzF8+MgjsNE2152lIDBJs45P+JwgaJNXO1AQw1O+ygkI7ppPymVGI2o67TMuq5kjPWagypybF52nElOha00pr6CrO/iTQOYwh0A7ID2zo7YlWrk4xH9gWt5wyEQ5eUduJ7EOZHBdV/LKF9LwhUzUO7C5cm0qcChSS6RgelDHlePRcP8TQthctuSkcHYWkCcc3Lc2Yk3m4X2aeMLN7mwg08/p+BB7aDthZ5R2jCGWZV4/kfO1lnYxQbChmoVj0oneHRGGag852viGR6SUZLFCs8xPjhVhIC+VQn/RRoYSSh/AqGxQbvmurzwplewDiw++5o2dVsMy+VNlUhpA03xtpm5BU7XcKHejtmesyiJMkZDpEfS7wPR5CcutrwOX/Ctz1h/xlcxcan6P45c04cYcQc4nmlVQsT8kiqdjOQPsQwiKrr9K/yz+NT9zHs9SsN06WR91feKSID4QoL0uxtaE41cZKb4+8BA/7uek4YNn/MhBS2az4SE9OOC+39JOw7tJYmHQ4fqMlCq5M5YT9+LlRBpdKoagMIWkmlY4+tK2cijFCUE8YH8mUy7J0WubdBoCtr7oXe8918vfJu6y/a7BaHAPnFjQdmJ7NMrSctEXSsyyn5V7rmqP4VHwkA/7uvzAkmjxsBvb4i/84h7b7dkoKkSmXBx1RYBaLyuF+sYwl4VYQync4YKBgEKanpM8LI3DdGNgtL4fbNyqsHHUkJ3gR5U8JvNyVGRUiJE13Rzot/9o6gbYOsdYFLWPmc4IXmVceyaDP+XSVMWPCpnXAy084r2eO6L77j6YFmjVJst+QlV9gTO8OxbdWL2tekXzbYs0r5vHmG/qt3rad8sM6xmt5sFfOT79CMuwkCHqO3I51WVscwvpIJtSnmY/xnh1iPVQd+o7LwqVpLoF2qsFxIeoc6pN+Vc/LGgdOmTcspzvK8Qpx0Qc9P0ETnheags6oNjuoECFpQheOzW1Ac3vw7aLWq4zHzDZBmRiTuZz1qQsfucV5vXFTwu3Hb7Nm9p+MYpG04TmntAmloB3FAhzTHilW5mmtDFhWiNUCkdfWEMPssbUrRgGgSv9usYYWa7g6Oy3J4JXrr4AHVe9OZ1/URAKdPDrNXVvl2vCaOch+OqJY5zXNlvHBYX81TaFvClqv/Yek34gCuN2Mj8o5iCsQKwkoHgNTGUIyZbNIhqHKQQj6XW/mep22V902f6H7716bnXqB/HUbZrF3aD/4rKT20GATkkEtTbYDVVUVQpCFtbTlCGIR8ErGG2lqRq8ON6AfUlTiDEYC5JgN9ccoDkLurx5JGehBFeOx7d8t0cteM0klVLWFsNohiYfoUC9mpmFMGq/91v0D87I2JDi0be73nQTsUF+Iqfj8Tm6R/MdVVtdnWhp2ypxhYnpaJlYohqiLYpEcH82NyFSWGK0MIVlVY3z2DGLxQMmimPc6G3B786Zho7Y9Fi5YKn+dIrMnxpw7uj98BzO+djoDPhbJ7a8b5WtaflqQdHVwH0lt5r/8Za7b+6/iuG7ekFtAkaWvMjokkcEqdQbFdVvVttquk6gd384t4rLQZxJwpZK2xQkli2EINRZkRhA7xTpepRDw40ncL1H2az/BZpi3cbo2HFORRSTwCErU+iLU4eaGsuN16TvDjCxY2hN1qD7gvvVsFh/xUEFN5UtlCMkak5AMbZE0DceqDndUm7aJU0iG9Z2sb5a/2en8t2A3C4ouLqdMIvOVJyS5t5ufopY1xONgb75ATaUQ+eHg6yMZtANRHY4OYEn1TbXhJYxDHp8wfeDYsIj/KFXr53hkKMTGDpij43duyU855YffPWq+3mel9SCkSbIUD0XibVK9aaIqyZjwPbUR6oxlFFzRhWa4X4a5B/vy19NF96hdSMY0TD89Jf3KqFN/FYObj1tE/SylMoRkda3xOVaLpM9VZq438NB2AqemusYQxHf8rwz/vfqM3MxeomdsxHpj9PaImPS0TOaEntMbpaYFt0gCyfkqBimm5B2xQwyf79ziU0ZcbQmA2So8PgL0bApYgMcDx+wLXHRcjlHRrq0C1quqCaYmJDWZkm+2SqGphHyCfSjUOY17pMOvvLD7paei6yuCv2Rvj7xAOz334rDQz8ZJHTxQjHooc8zWxKCCzmk71Run2mwJLcTQtsLFq7/pvfiIRHDrvioLlrlvs2uLISTbO+Um7O0Rq6bb0ISW98G2MOgNaheVhYzaTmjozzP4xfcHlQqsPosaArzQx2E1jrZ5JLzunx4Pd4OiEuMBC/QcK9TLUciy9dRVfTuDBUi6MTbkPoFCrLuv+MIcO3H3VyY3pYkxoCZjHdlzyiO58w2gpg5om5e/LBBhBJlCHZ5+8OY+sxSNBaVHZVgkUylg4TLxEQwbfZ1KA3UNQG2detSxRcDatkmlvMvxSv/jdXMFeRMyOzzrOclOOgf4yFet6/VsNm6ueYvkb/8ueaPb4pKXcsZH0mWZ0v3pYV0rVgJtJUOom5UpQv2qwtL8884tHpZmr+vEoezxMWDH5oARpiXsI2mmmA+LxNwTg+x/nPUWAa8pAZ0IO+wYONDO94fCEJdxbGRQngU9m62/24/L6LCMYvkFZUbNhOKGuTl6KrJA25f7DVF4KkNIAjLMXJMJv30qBXQsBjq71R9SqRQwf4lsZ/exXLS3940UKmpbgcNO9F7eMkfya77pDON4bdtoLJ/XJX8fuMEn2tDHT0a/WScngJcedfZV8XrIFiuPZKydTBIdlqlMr7fugDoSPZuAiVHv4CF7uaVqkYyLqckYfKESU5LhKPuHaIGDK4KUXYwhdA2ITUnqhoe8oEwHi6QrpmVh4hWC7srUhItLhKIfZ+j7obKGtitHSMaF1/SKTtdcbZ1YMp3KCRt9HcJQOcNb3gt84Avuy/U5uY97J/C28+TzjlwgRrrKmCEHAF55yr2ciTEZinJMAJzFzMF66EbgxiuBP/3EoRCPDj7rc4PHMDKr1I7Q5fgVVeQHuuvsTUHaVSYWybDt3LZBLPmxJqkvBgH3P1vE1CxOxN0O8/m0FB21nhI5Xp4oCizP3xVddpKIA3Cqf2JMrKRmQana93peW2EzqwRET5014ZH/tMhQSEYiwA3kSExD1Crl6VTXiIXUjeY5xueW3GfdR2nOAmDZAcZyr2n3hgfkxnWbSUK/QV98RP56Td3ohJJFMibRY9GRcVovCm2NUq2jRB94u7ep56kMNLQdoi3m62AqQsoftzKLhUobet4Qy7Q9TVa5GmHM+7x7u7jrzLhwRLUouoiS8dHc9Wy7dmK5BiL0K46uSPpfVSGpWEHaJ1l7WPKakxI//76daq45pZYOa7BXXMl0g04JQiFZqtjf1izfY+ixa+ucf69vND63zrUuW3mkWFdPOEu+68mfg2K+UTMO1lqn9TTYAkd8bvDJCf/5pS11qa6n8GAJ42/pu4nHCubOMY4HkWcRftee2WLvUNDURPg365FB/0TGM5RJ1+Z6vqKexwB5QoNa3fShTfvEBn6bquxSkOt3ZBDYsck6Y07/rmiiRA/CiSV5uua+zz2bpf2JzDcet/jJlef68h6gPotF0nSNurrhxBxso5LntRjuCF6UsCVSp0x62wrEPvRtvunCDombOfPvJVH7ye8DulfKbwcfb12nqc0qYOcvkb+6wAwrJPXO84WHgd2mYBAvf7PRQevsNCWRvDlyYS6f879aGB2yplWyO7+rVBfYSTJHOuJLzLaN8mYdNnG31zCyJegi5qFtr+G74X7ZL/sUgEGJ8wFm3v3RIeCNV9xntLK0IUglRX7K7t4mw5a9O4zfpqec+yVVvz1PQpmuFduQY892hesoSWt7DEPbnj6GLsumXPr+xKzcqvsZ9hqPdWw7xrKSoTLS/5Qj9gdhKg1Af4jG4JuxZCXwj98Wv8e9DgVefhw4/K3WddJVIib16Lu2Tvmr+1H2hxCSkxNATa3kELv5l9Zle7YbYhWIqfNXxaM8i7N2sQSs6bNbJLamhRf3nnWbBVqAd0+vB8z0lASg6XMNu1nIgxBWSEbUBzNDvEEs4IDDDFMJXVv69bJnO9DY4rBCTG0o1tC2/aXE8YVU1SLrQSFu/ZFBEVXzl4Yvw3IdxtjoWIa2XbaL+hJmpm9XxAICiv8w684yKCRLkZra/N/SKUNHuhHUr1KPGm9qBY54m0S2298Mm1oNIan7TLbmcoMN9UsHYM6X6cWz9wO3/hY44Dhg70Pzl+/aahWSXsSe/sdjmXm6qyiJ0PNezkMIZa9txkZ8fIBCDLnbiRQN7VDwwB4Rv5l6tSK86jcPvRUi2MapflWhPTmR/0JQbs+hOK1dSeNZdUJKUoOPdc6FIJkANC3/Ws8TZTEp/DBC0jxVbjrtfjzimglG04ARL8t77lgkbaMoV3/hkHBoOwqh38T0Dw5XW8sceRu155E0P6CSSm3iVOzS/Y3Pegqj+qbcrD2a4mwTOZ64Q/6+8BCweV3+8rxpGhWHH3ZuAa7+T2DjC+ptCYuKBcm12SGGjVS2BWSIbyjAufC8hDwsiVEEmsXvLFeObs1Tzk/pJSTjFipeuYzc6lJsw7STBSbO9oe0yJajVSXqLFdOixLpYhWPbaC0OIU4XyYxGGR9/XNvD7BlfX7aoKQsknnNMfdfpna5b+C8bZB6KkxJUkjqtNuGbQuB47WWcn5YKz/Ao1zADtsefSpw0PHA6R81FqdSwfwkJyeAKy61CsWNL+avF8hCY1p44xWShuWPP/Bvi0p5saxX4LJ2bBKLZGy4iCg/i5ub9pqcsArJsJepp0Uy5BCt0nSdPt/joEg6Umlo2/EYqRy3oDsV1c8gTDtULfUh9iXM7oS9x+Ig1HHyEel6gNzAHrhea44vVkDwjiLkdeq6eim8WJVCG7yhkNRpagO69sqPVC4VLA9Q0+cwSV2V6shRUwucegGw8ijr73Y/yd3bgIdvAp5/KL+Mzevy51M1O8jrhLVIBrGKRiWq9SPEarExsCfcEJLlRTtA1Lblzd4W8ZmIELNNBxmE6SnxLTMfnyBW21gIKYSTaoLOUJ8tJU6U8h0qmMmT5zXBgVeZIetVKtu0Xc9mtYAlawHB2xAov6JPmZoW3TgWdLDETYhptmV2n+mkMk4EthRqjh/DVR4Dpa8j6SNpIew83LHictVYhrZNv1dVA9kJhwUh8J3f27S8xWSR3LUV+NV/GMsy9UD3vvJX0yQ6W2fFQcBrzxnf2+fLkMdQn6TuUCX0cKtbeSHWi3yDezxk8r7HUOlQn/zr3hf+Q8QuOxo6ibC9vpD7oWyRDMieHeILO9gHdO/jVHj4slUpiWFlhzb05l4Cd28DulZ4rpqHPdekE8P9cu/3Q2b8CoxLQ/RE1Lpvt+vmAY67a8CSa+EB1s0RxDhQkteMl+XSpc/TNMnm4fsMDjHTRCQjZcwjG7MUCsmiEOKCc7NIVlUHsKLESKvJIvn8g9Zlf/6JTK947sXATVcaD6JD3iy5KM1C8oLPy0P86m/lWxZVnLgBxLPTAZSkk4N7yKLUKXAnZa4uGzKIJW6xH6S+YBuarGFuQ7uKdcV1miKXE9KXNcgxnJ70tyKqRLLnTbkXkLwm537YsUn+plLi110IXF8AHZa5YX9Zm5wQi50+Q1osU/gliGr77MumJ/OFZCzdhtNLsWK7vNpfWW6QnnBoOxIFvIm9LJKW3yNc3X7ze6ccLJLrHgMevz1/9clxEYe9piHtpjaxhh1wnHzvWiFpXxqb5ftQn3qCXnM+QfMbfBh/t9CE8BlyXa+UhsNtlgPzsbZMEZpbJ4xrQegHoGLUdiBLQsim2JmatEb4B8V8TLIxDfUp1au6nsOKPT5zrytVlvDY6+S49zphj/P4qKRwUi5XVUimjOATTRP/751v+ASkBDiGu7Z6lBXCfOe52NbHOYk0/dkWZqrRsWFg62um+87FdSI0tEiqQItknKTSUJu6b2YD9cVuIs8sJCNfyAE6ozC+pHquwFPfDyw/0MiVVp8TktlpSTze2Crfld/gTe2eGFNPJWMhYIcRysKmyT4O9gINzaGrTxxzsnPA2fo7PCB+rnMXulud4vQnVVk/ikXS93QqlB058tRUx84t8nf+knhybCo3IeiwYcCZWQpynTucTGWxEwB9IoCFyz1SoGmOHz0ZHZJ/C5dZp9/MTgOoCWiRdFg+OiTnzWmq3FCHwuue9Fot90NVFTCVDTfLj36f7NySc9nxaUTQPqkk+uWSaIQntEhGwX5+OxZJcErHovjK1HGbIrGq2hA1VRHfC4L4SOq5JM0rnPkJmSnnnE8bP594tvE5k3sgptMyxN2WK6O6xpgqcbAveLvNwsfLQjCD01trgPqipH3q3SmBBdtf9ymniJ2HPSDHKRp6z3b5PS9oynYNZaclmtwuNpN4e48SbBNLdGhEnIrcsUnmCY6lsIhkp62zSwVFqUlxtDuEVS0KXrM0aa5f/BkbASYcApyCCEkNcLy2g7z0RDp0Chvr+YwdX0psIyB+Ly6Opz7g9RD2nLlljdA0mcNdxV/YzvRUzNk4koEWyTjJ1AMLlqmv7/fsshiC3KZITAELV4glr74pwYvONmVjXQNwwpnAfX+W7/sdBex9mLH87E+L2GybJ9a3jS/kR36baWwGxkfEWrcgZ6lUuYn13JQ6vT0y5NTZnS+M84JIwuJTxugw0NzuvMzVp0zz/KpctzIO5WQa5BwEqTNdBUy7DEllpyXaN2AzQq8fOv2PQnlevm+x4VLo6DACJTuPUq+fMAkctWyvJ4ahbU3Lt5qrbBNmmavPrOtF4/49zDWj+W0f9kKMMU2c57F1/WKgC0k/i+Qeh0wfKli6Bc2zKXkLI+eKhgy7jwzIv6Y2a9njo2Jgcbu3d0d4cSsgFJKRSOJp4nIRuwXbADI0oF+gkfqHIKldABzzDmCfI8QqZU5cDgBL9zM+73+M/POiqU06Cst82pDho3uvB044y3nWm1efsX6/4efiE/WmM6RO3XL66N+knEwDsOb/c7CoBnzznJ52f8McH3HvACyHsMT8b9zOf56IMnX41bUhg70iDEO7LopSpk/DJ8bEt6ytIxf0ENS6NCzHamYINODwq0ITQxPFahaprhxB92uw1yfDg9OxDbpfSRzsoG4DcH45ijq0DXjsnov1zjGptwoKQtrTImnCc8Yar0oKNLrjdi7cBHJfj8wO19AsLkJORA1EKxAc2o5C+3z565deIjTmm9flVOXd1DEnJPdb1N4JrDhYbVi9odl9vbld8nfbBtOPGnDt94DXXwT+8rP8bXZtBTa/LJ8X51K26DfeAzcAv/qG3Kh9u0REAiLyHrlFUpl45VQcGwGevsdlqFyT4UbLTC02XDs981CNtUiMDstQplu7tCwwEVPH4vhAV7x2zH5b6ZAps5Ie2g6K367v3ibX1s5ccEmQ5o+NiA+X5dp2IgZXh/FRqWsypL9mlNMyNqJgKVQZivXB0WoetBDz6h7iI+jIgF2UR73Ow1ghU2mZ2tZ7pYDtsAyPqbdHZZRAD5YM4yOpgmMbVK2oXngdBzcjkImhXKL2oNb1EoRCMgqNLbkk5nbrlioBbmaVwJuo+EZtRyy/th6uheh+pXf/H/DgX43f9Wi8gd3AG6/I59Fh+Xfrb4319jokv8zJcRGUV3zZ+vtgn9y8Zt8ze+dx73XA7dc4C1gN4ZMnu1okc+J0YsxdoO7cYgiZJFCxSOr53pyWAclOdeZUn07fLiM5PhBMSygPU4ZEOZF3DNbWns1yz4wqPJyyWXkZs7z0FMh6E4V0iIG0uF9cog5TK1di2mZkUHyNzfeYU5kq2TvcFqsI5zyXIe+qfBuiC0mnF8Hh/niD2JTaGtEdwY5TpotZBoe2o5JkEvOaDFDXCFRXoyBJq3yriJrw3FZEXaMhFPUIbgC49rvAEW/L9737/f+IYNzwvGyrO6KvPApYstK5zufuz/9NF2OWDsp2g7/0mPzVxauFmIZQzcWYrZDptHMdccws4li5ju38PveADN+f9iHbpubzEtaHSMEyEIRBm/jWcxxGiXhW9aOKVpDaakk8gIb6FIPTEiLsLoXqc2Ow9gbdxj40nZ0W8a6acFyzWTSdXGlmUugEbWYUi2TI7dy6nJn0Py517N7m7NbkV5/jb36WZvuyAPvt68+aW6cQOXULDC2SpUwqJZa69vn5ATbJVBhyWcg6zMPcHYuAd/+DfM5mxe/SLg4A8YnMTosVRReCbz1XUll0dsv3dJUIUTe2b3S3KOqdgXloSK9nalL+RXmoWzoRc8fjN6xTgDdZc9s2rQP+9hux6O60zYHuZZFMHFt9muZ8LifGxE1gSmFKSC2LRF/UlI9R3GLHB6do46RPp+Mwo8s94UZgd4qU9zmI7YXBZ/3xUWDLevXroX+XwsiHX1mayzoBlKddEAdug896XhZJILo7j8LouvsGMVRaARZJCsmywS1q22O9wFWE8JH0Y16Xex32+lYeCczLDXHv3q4WIVpbZ8xacfY/AgeuAk45H9jn8Px1P/ktSaSenc63NGoAbv4l8MuvyUw95kTqfTtlWOlX3wB+8WVg3NS5Z6eBZ+6Vt+YXHpagHvuc4WMjRs45NzSbqNS/Pnyz+GoWAvP5MAcx2QV9LEIyrIXD9r13hzGDiRMqDyE3S4hnOxJ4IIQWO7ltVZKhj494p6tJVEm6lB27ho8jlZNHGbu2BEhT5lB2kMTbfsEWA3sc+kn70I8DQQNmwl6bKvdJyuYjGTaRu3sjHD4X+P6dWSfgLGH2VcaGk3cfCgGHtkuNqC/ApWY1d03UC+cAota50lH3bvcOhtFp7xRr5K4tIihXf0B+HzD5y9XWAe/8sAT7LDtAhN/1lwOnf8SUxFYz5gS/5dfWOob6gBcfkSg7AHj9BcMvdt3jwG2/s65/7/XAx75uzP5z9bdEjH7oy5KqScct+lIXNn07gftz6ZVOPMf/WKhiHzLTmRwXn6TGVmvex6E+Oc6yMULPIGNpQ7jN8jb0e9lQ7dwdrVwxPWx8h9tClmFmdCj/BcYJfSYa1+TNSVMIi0zA4x30XOg+2l716b87LXJLlxWGiTHJdGEP+FR5Duj9gMpQu2fQjKK10vE4+/hIBiEugRU2jZhreW5lpxTKt53InVvk2ZNYgG84aJEsJo43u8qFW6RgmzjK9LJIQjPE165thkhYegDwqf8yVjPn4upaAdQ4iFV9dhwAOOhNElkOAMtMaYpuvNKY3s8rYvDJO61TwW14wfi8c0v++gCw8UXjsy7KnnvQY0TJPsytWYMgPNOdxMTV/wlccalYY/ttQlJndNgq3gptkRzYEy0ptmNTwgxtK7Zf0/ItULu3OycZjzJ8GGVqRnsbdm8PN+2lUvm2uuz4WZB73lBzV8irN0Yhmbe9y2fnH3wswjHjOrKdkhePLettwTsKhQQ5XOZ13ayr+nMgqpDctsHfghv0pS7Qvnq8UDitE9ZXsgR9LCkki0qQqG3Voe1Sw95YDyGpAZif83N8+UlDsLTOAeobjfUsQnIvhzrgnmZo6f5Gzi5NA7a9Lp+dZpDQ2fC8WCF1zJ/rXKZj1NMSmZkYs7XV5Qmkd6h6egjAYQaZCDh1eNlpSbM0OQ5ceak1+a/d6mfurKMm7A2zeZCk2EoWSZenbRxuYds25qf3UMqHZ69Pk+snaRGiJ092m/YyCirHzG8Wn/GRcEnRo4oU78ID/VwQITkxlnsZcFOSMAKtzP1MGB9J87I8q6DPSTdHmE9OiBtQFGE/ohg85llFDC/HbvsQdGi7TKCQLEuKEGwT+u3JdomZi8kbUtGA5QdJXdtek4cwADS05Lfl7ReIpfHwk9zr1iN2lx1g/FaTAT74JeO3nTn/OvuMQGd8DNjvaOdyN60zhqdGXXLa6cLP3HGMj1r3383XUP9stgSafTaj4tTJeVmgvKb2KrRF0ozSlGMBLIeq9GxWFybTMQ236cFDW1+Lp7yi4fTyVIAHqtf5imqR0ly/OJddCCH5m7XAz75o9BtaVl7OZ0RjQMOEynDva88CP/gnydM7s6rffZKyPluSTG8GQOkNNkzXND3l3ocGSiCvQAkK0EBCcu3atTj66KPR3NyMzs5OvPvd78a6deuSatvsJ5EIrqSCbWIww9t312kKw/omYNHe8l23/OkR1LofW/e+wMHHA6e+3zsR+oVfAt6zxiok9XrnLJDP+pC1ObXOSecA+xyWP2d6Q7OkHZocBzY+L7/ZhxTbOuSv3mGbh+rGR2E5P+YHisVHMtf5mjum634Yo5O1ln/tmXMw6szJJdz3FGxRfSQj3AMq4lq1ePMDb2CPu5USkPMYZt7cKIx6DF337ghupStW9GjM7meOOCVG93JdiXwsAm4fJNhGuQk2saKPiDxzn/H3hp8Bf/iOfA8cbKPw4nT95XKc7/sT8Ncr5K9KknEnF6ekcCraM7hHsS27tqqlZ4vFIlnmQvLuu+/GmjVr8NBDD+HWW2/F5OQkTj31VAwPx+SfU2kEGW7x9C00rxe+OYkLSbvzud1aqd/Qex9q/V2fQegDX5CpEo89Ta1dzXOA5Qc6L9NFkh5NrXcC7fMldVAqLX/NbWmdJ1HhAPDorfLXLiRbcz6eI4MiFM2dy3C/dV03Iall5fuQbf1dtjQ8frz2LHD77/IFqNODU/fBNOddfMt75e/ON2R4/yeXAK8951+WCqODVr9TP3ZtlYCnmblylRWi2mrmyO3+Xf4+gsVOe6STzeZfJyokNYtIGJJ4LtoDjzytjl7HIsp5dnhh82tLHJj7nIFdUt/zD8n3GReZAAfdPrStf58Yk39aNr+PWfeYWCafe0ChgkIKI59jPzIY7oU9L02TixXSc4YgJxzWKUGLZKCo7Ztvvtny/aqrrkJnZycef/xxvOUtb4m1YRWBY2deJEsB4B1hHdvN7uFsrN9k+x8D3P1Ho4PXfRrnLnSfkzQo7XYhmRuizph8HquqgdM+DHz/YvleXQsctAp4/Dbx5dK0fEvR3C7gjfUiEm/9rVgwdXp3wLL/bgnR9eNg99vs6wEWLPXfNy0rwuj6y+X7dBY46mTDCuv0INuR8xU98Digc4lEaevHqLdHLKIAcOMVwEXfdmx2IPze3qengb/9WqzTh5wgQ3XZabFKrzwy/jlo7WKiTOa4DW3dUnmJTSR5soO1pxBdXtihbaWyA26fdOoos7VcDwb0nVbSoRzrAutn+8uW2/38+ovG9LVOpFLAcB9ww8+BFQcBBxzn3Ia4XHtGBoH6ZtuPpvryXkBC1uN6GF2ef0N9EiCqlBKozIWknf5+eROeM8c9FH18fBzj40anPDAQwlF6thKoQ1G9eCJcZFVeqXoSiDBzG9KobxLfxyfukO8N9hvfUki4dumiqndHzsk71xFmbMEzNbXG58lxsUqm0/IG+twDhpXxredK4u6j3w68+rQMFevphHSmJoH+PUBtJr899hkwhgeMt9xMgzwIVDrT7DRw5VetUd7P3S///ukHYmm1D9tqmlgcAYmQX3GQsax5jjWPZN7bekKWrVefEQvki48A+x1lCKb1T4uQ9ModaSbKQ9tz2xLpzMtNSDr6BxZASXoObcd4DSvlQEx4f0dMFmo9aM7uAx7ovDpYVu0WezchuUchSPCqrwObXgJefkL80p3uO69ZfYIwNWm8NM+UA49r3aMO5UPoYpE019fbIy/Jns+6wBUXjNDBNtlsFhdffDGOP/54HHTQQa7rrV27Fq2trTP/uru7w1ZZ/szrkum9OhbLd6cOzO26LcRbiGfOxyQqdLFIAhLsstchItD8ighzbBqaRTRqmlglx12EJGBY5vY5TI6RnlPx1t8agq17JXDqBfJWqYtUJ9ycye2d4nC/kfhcnx5MRUgO9LqnCpqJaLTVtXubPBiqa/LzCx5wjPV7Yy7waWJMnPfH7UM6MWFO8fKDzxqfA0evJ/TQDhygEBG34bGwOQmVhrbjPnaa1T2jkIMviab/8So7gfoAca35/f8YL7IWi6RJSI4MiHAyT4UZ9JqxD207YbZ41phelM33q1MZYyMiInV+9C+Fd7sYHQK2vhp+ulC/lyPzbk+Muh9Lp1EQp36m9HRkeCG5Zs0aPPfcc7jmmms817vkkkvQ398/82/zZp8ZPmYz9U2SrqauQb6H9ZFUxSsQxQnPxLQFDvCvawTO+iRw+Ft9VoxgKdUF/RsvA+O5IWonIXnOZ4DVHzSmXZy/LH8dXWABMrOOG49Y3UMwMpTr+B06F31ouyuXxHzLesmVBkgHfM1/A9/+B+Cua+XNf/PLztNK6uhWBM3mq6oHDi070GqBTaWAVWdYy0jnrqkHbhDn/f/7frAHo57iw4vpKffgkQGP/XMiMbES8rqLe1gzrEWyZ7N/zsYobX39RYV8n7nyCyEcPIe2E6pfm/nP9nvEa2BkELj3Opmd6+Zf5UYuzMOztuP+wA3W7/ddHywXp+6z7YX+It6xGPjIpcApfyffd22Va+HJO4HL/xXYusG63esvWb9PjKkl1o+T3h1yDQb1QUdK+jPfTAqmYzc8YIjrvGe6OTDVyxJaekoylDq46KKLcMMNN+DOO+/E4sWLPdfNZDJoaWmx/CM5gnQocQjJhcuNz5l6yY6/YBnQ1mmIKs/643hz9/CRzFs3YXR/w03rjDd7e6ohAGhuE99B/Xiecn6+4NRfDgAZDj7vc9blyw6U/d3wvBEhnc0Cv/km8PMvAW+8Kr/pDuyAYe1bup/8HewFfvdfMrz7zD3yFg2IC8C135WITH04ffE+RvS7zp9/IlMu2i0S23NDPXmBSan8l4v+ncDt18g0koCI28E90gnfdrX31HGaJknPr/pqfuCRmT/+QB6UTvhNEbZpnVhqZkRMUteRQrlJXcMWi2SEVDJeuVPt9QShZ7O8YFz1NVt5DuuODRcmAj7JqG0tK64WecO4Kv6GJgb75P70s4yZpy99/cWc/7CpzPVPWdd//UXr98dvB+5xub8cha+KkDT5mDe2Smo2XRj93/eBO/8g5/qFh6zbbcuJsCX7Gb+98qR3XV5tjZvxUe/5zsPkhNVfkt2ef9NTYjBwyqQhGwavM2ECCUlN03DRRRfhuuuuwx133IHly5f7b0TcCe0jqRi1bR+qNk8Dl06Lv19NrQglsxDyKzcKyrtcCCG5TP6+8hQwlLu59VRDXszrAj72H9bf7BHoelS4zoHHAotzw8YbcpHPIwPyEM1OA4/eIhHRl38e+M1l0pnoHVjrPOtMPU/elT99pP4w1gVeyxzgze/JX+f+PwMvPw7L8dWHzO2BTHpHZ46SB2Tub7NPlC5wn7lPxKSdqUl5OA71Aru3ikXlsdvz1wNElDolc3/XJwwh7yVCr/2uWGpuvFK+B47yzjE65P0AUSovqaFtkyhKIpWMUVG4zXSrOSDXxkzbHcrbE1MQhabJdWWu277cddusWMF6HEbLVM7z2IhcK8MOIwtBorb/+H25P+/8g3d9dteVvp2SBQEQkbJ7m9y7Bxwnvzm9eCmLNYgIV7VIZnLPkXQaOOcf89ebHJf7W7+39FzB+x9t9DObFVMK+p0ap2jyMJj9sVXvN3PbJl3aYG9/KiXt3fqa9P8DLkKy3C2Sa9aswW9+8xtcffXVaG5uxvbt27F9+3aMjirkTyJquF0krhdP3tVofGyOcT7OJC7eOCySUdq1dH+JxN75BrDhWflNH6L28nPUNHnzrnEImtGpbwL+7vNiGVy6P7DXoYb/oT70ZB6+ffUZ4O5rJYF1X49YGXTBUFsHnPheY93xEf95yKtqgIXLgMNOyl9mfohomjGLiV386sd21TuBv7/MKmbN9O0y/HvsjuwAcMuvgJ9+EXjpceM3N1/RTQ4iEpDjqKcm+u23nNcxo4sCv+uoZ7NYdFVSJAVZDsTrH+fmI5nkjC1B2mpuh/lY/uyLwGO3OZfnlatTZ8cmtSTVN14B/Pjz8kLz0mP5D3yv4zQ1Dfzy63LPDeyW+gLtu2ldP3cBL/QhXbsF0Y7u3mGe4Uu3wOuiZ26X9DtuWCy0KRml6O1xbr8+ZasXerCieaavrr3y13vxERk5+fVaEUt6f9G5xJjVbMur3nV5YmrnPdcDP/ycnM+HbgL+9BPvl0MVdOGr2o6xEXmB9lrHTN7wenk4SQYSkpdffjn6+/tx0kknYeHChTP//vd//zep9lUO7Z3yoNTnmrYTami7yjpkHen689jYya8wKoUY2s7UG8EkeseqiyUv/1LdImC28DqxYCnw//4JOPsfxTqsWzv14RD7kJ45mGbGTyglVuP9jhafUUA6G788h4ecINfM2/6f+HiaMc8D/sQdIgJTKXFxsJA75+kqEdjnfgbY94j8um75lfG5ps66bHIcWPe4CGTzcLVb52oXDfVNIobrG40hv7FhteS/gLeA2L1NxMNd14pVOgihLZIhr2uzT5s9p19SqJb90E3ADz4HbN8oIvLu/7Mu18970JyKYyPAby8Dfv1N/+HedaaXlBuvkEhgXUxOT3q7AJiHJ3/+71Lfa89612fBy9qpuM9mYWcWiGZ2bZVj+9Kj8v2kcyTIDzD6C/0FauFyYJFt+th3XGhYKXdvs+aE7euR/sjpOGuavx/rlvXyN2Ma2aqucX+e9e8UN5zJcelr5ywQMQmIGJ4Yy08qn98w78WP3ybn/Y7fAw/8RbJp3PcnY582vuDej4wOiwuPnj5JJ+gIQBC3jVQqP+DGyZe13C2SmqY5/vvQhz6UUPMqiKY2ic6tchEnsVw8ZmfeoJv6JP52wjF4x1xxHD6SKe9y/DjUlv90ZhYdhVvj+HfJXz1JuR/6A+KVJ2UIV2U2ktqMMWyuWxiy04ZV0Smv5n5HG5HegFhC9zva6OQHe+U0bHrJeOh37ZV/7dkP65wFwFFvl8/N7cCRp+TXPTkm1tKbrgKu+xHw4qPO+zXY53yOdZG+8ijgze8G/n6tiGEAOPJkYz2nCHb7Q+F/1pgEokNd5vRMfnM821GySIYItHjhIeC5B6PXHQeq1s4H/iIPwD98V1K4KJfnsx9bTZYps1C042Rl6usxfMzcxIIuLp0e9o/+zbttZtx2I5t1PoaOeRJN/pVOL+a7tgK/+g/xb9RpmWNkkHjpUfGNfPVp+d61QpavPNJYv2sF8I4PSp5eQHytdfxGOLwE1FCfEbC3cJl12Vl/LxkynPpIXch2LJa+p7ld3Hiy05Kp4cef97YAeln8ze4x201lPHU3cOfv5UXhjz8Qn22n8/GXn4oLj54/NyxBI/pVKD0dybm2ywb7DDFFrd++DNIBmFmwzBrcAyCv3V43hB5F7dsutdVcWXkUcNDxUlBDC9CQE5J2n0cnDn0zcMG/eUdpm2k05Qi76uuG9e3g443fDzvJmvDb3IFX1+Q/ZPQ5wfc/RoRWTQZYdbp1nVQKeOeHgQ9cIt9HBuQBZx5CeudH8js9p3O+YCnwvn8B3n8JsNfB+ctHBoH/+54MYW14Lt9nUhfok+POD/i+nJA88Djg6FOtVt8TzjLEuJOQ1IfodTRNrAqA8y2j584EgqcVimOI2s74qETh/u3XPhZXzeVzkZkcd7Zo6QFseUPbDr+ZeWO98fmO/3XPHepmnbdfD2a2bRCx8tCNLlajgHkWndAnLdDZtE4C65zEttk6ODZi5JIFJNPDr/4jf5s5C4wRlVeeAv78U+MY6ZkejjP5N+tuO3qC8OE+Y5mfoWJkUPqM5x/Mz5zQv1v2s6EF2PdI67KOxeIW8/a/y69DnwJXf+lNpaRPNWOet9tOn0MfoB9vfSpIIN8a/eRdhvDt3WEEG5p54xX562dRdLt8J8YUXDdmj5KkkCwFvHztdMzCJuxDLIpV03PblLz9mqmpzR/6zbuvPMr0mls4TtJVkv/x79fKFIy6VU5phoF0zoqsmGap2pbcXB+iam4Hzvi4DBu/6QzrNIV2i6P53J/zGVn+zg8Dp30IOPFs4B//x7BS2NFndNADefRhsJPOkQAir3QUZrqWy5Dz4n2A1R/IX+6VDqO53Zjt54WHrfvz6jMSjAPkv5gAcpyX5SLLnVLLOKXv2LXV3W/NHLSgkjjZgmJCb5XfdMxDrPr1/8gtEoV+x++N4UVzW+PSkRNjkibGLL5U0qHYhzzNEcU6+nCd47577MCW9dbvv73M+Lx7m2GlDCMkH79d7oMHbnAO3rJPfeqFij/tS49JINjAHrFymS2VfTutbh8jgzJX9U8vEeF235/zyzz6VOknuvaSl2E7eiDh4n0kFc87LjSeM/rLmDnfpEp/9+wDwC2/Bn79DevvuuDtXOxcTiol/ax9yF73pe00uV8tXGFdxyu3sePQt6ZmSX/6XuPzTluQVRwBbOOj8lKVxKQjJTi0HWlmGxKR+UukE3R6aHpRlBk3UvEbP8I2NZU2dRapiLuc2ylzHki9jjCkUu7nx55aSLc67XukBLrse7ix7O0XiOXiVJtQMw/jLVkZrG01tSJmpyZEtOhDPh2LpZOvzViH21U6rANXAU3tMnxlf8DodK0wxGUqLUJ32wbgrj9IZ7tlPXDMannY6rj5VukPnafuElHa3C7lvONCY3h62QEicDa9JNfJ1lcNS+74qDxQJ8et/kh9OyXgwM21xE4SPpLmB7tu2dN9ut54RRLid+8r7Z6ZiSOmm/KWX4u7xMtPAB/6svyWnfb3jbNHOuuWnBUHy/YbX5D2ToxZ/ecAwG0uakCGWp0skNlpuYZ++XX5/sBfgAOOlc/LDwTes0bcNR6/3T0xP2CdQeS238nfukZDQO7eFkAE+Kz3zL2GZVxnzw7jJdFuERsZMFxXbvm1ddnf/SuAlBGYAkjE8zrTvdPQLD7FekqeQ06wlqG775itbSr3up44fHxURLg9i4JfcGddo7Pon2N6WZ7XZV3Wu8N0rSugae55ak/5O5mNzDzUDeS/RJoFalW1z3XgdT/3uS8e6i+OQSghaJEsJrV10pl4znGdw3zxWKZZ8jqFUUVW0oRsnPlYOBWxYKm7VU61HemQbUt7vJvV1uVbbvc+ND9aGpDh7rP/UVIzxUl9k/x99RkRjZkGmY6yvlGuQ/NLjWqHtXQ/eQAsd5nh6qDjjTxxh7/VKoAfvknExx9/YDz43vIed0Gnu0uMj8qw+XU/lGHuG68wHsj7HA6c82nDp1SPDO3fDfzon4HvXCTRnIA8KKprRaAMeAgPO0lEbVsskkMyxG3mD98BfnKJ7HfU6FM7unDJs/RqcLw/pqck3dPVLhH0Kw4WUadb4R+7zRClmubc/hcekuFfIJck2sEydPV/Wn0ne3uA+/8in/Vro61D/totkgO75brXNOeyj32HvJAAIij3bIeSUPdb5dn7838z5wjUz7vb/aPzjgvF0rhgqbXfr2uyrpdpgGffqlsGR4cMq6DKvW4eeTH7LupC0t632XHLuWjuq+2uOz2bxd9aNWn9tMkloK4xlxg9BbR2iPvPQW/K38Z+zZtfpnWrdRiyHtHuvTucZ7JRovQe6hSSZYkmnWV9U/7cnPZrzEvYxIXeCTW3y1/X+UL9fEYcmLfIvT75kr+8qto/onqmSRpQ7XSMQt6sXhatVAq44BKrD6M9R6MfetLwFQ7+iSo05B4663NDkPscJteRfkz9jq0bqRTw7k85v9gs2gs4/aOy/NC3yD99eNuMbkmYv9S9no5Fzi9e/buNoVA9zVJbThTv2iaX3YuP5Au56SlDyL/+kn+Cb93tIKxF0m27iTEZztTZ+Lxh3TO7RAz3i4C761qPagOkBHrlSe8hYLfdfPw255yhOg25a0q39D90I/D03fL55l8CP/2CVYy8/pL4h177XfmuXwt2K37PZuB/v408qqpzvs4whKTdIvmrbwB/+rEEWji5zux7hFg3dZ+9503Js7M5y/bmlx0sXraD1L/bmgLJKajOnCNQt4DN8Xj5PeoUw/Jqp952L7V3mm5dhxNY12iIwj/9WK55lREYc5tfesT4rO+fn5Cc2+X8uz3V2lv/n/X7S49ag4ycePlJuSfGRwzB2tgi1th/+C+xstfUynd737P5Zeu1Ynd1ePgm77rdUEnkHobS05EUkmWJpoloc/RrM5GC3Dztne43sRt1DQ7BMj60zgM6u91zMOYNbSngJBr8Or2ULZK7vsl11dwGLmWEwM9fsr7R8PPr7A4uCFd/UHwaT/9IuPbpfpJ64mb9oetE0GOQSlkT22fq5V/HItnvFQdLsE1Tm3Tund3W7fXO3O5mYCZdlT9rkJm2DmOf9HRGu7YA0NytzPpc6nf8L3DlpdZAl2zWaj1b/7QMnbolOh7sk/QxT93t/BBxSjI8PQXc80frbxtfMD6f8dH8bZ57QP7a65ialIT2v73M3YrTu0PE3LP3A3/5GXDFV6zLLRG8Lg9Cs9/ewSeIFdmMHrRmzj2q+6W9+IgcU93ncXJCArR0JsaMWXf8xInOqtONIdtWk0XSLKr18/jqs4brwPHvEh/DE882XoT1YJQbrwR2bJbtrvuBTEn6h+8Af/uNtW7zOdi1FfjFvwPX5vYnO20Imze/x8jrqlskd28zAkoaWgzfRj0ljo6eGscJe//2NpMQczp9qZQxrP76i8Cz90JJnfSZxNa6JyQYanrKGCJvdXFH0Vn9frEKfuAL1t/bbX3Qm99jDUAEjOHo3h554TKn69q1VaZrfeIOuab1FyO9H6lrsL7g6y/jLXPlpWF6yhrs5CT8wwhCzcN1IxKlpyTpI1mOBL043fKSeVFdYxNxCnWmUs6pKxYuF788++w5YcWaZWjbJhrlR+tPmXp5WJlzcum+gprJOTtTr56f0A0VH7uFyyS/5JwFammGzDQ0y5zf1TXhZm3o3kesXfqx8BJtYWhqMx7SH74052hfDcChrXsdavWx07dzEv4tcw0RNn8JcObfy5SPdt5rmk2j1WSRBJynHDvl76wPjoE90qbufeXa+MtPJbr7wi+JQNEtZrV1+dHxAPDY3yQa/47/BU59f/5yJ/+tP/3YKhwB42F4zGoR4Ce/T6Lg3/we8Q/cst45GObVZ4xsALdfA2x5BXjPRcZD/qm7pW2ANf2RmbuvFbG1eB/go1+D471vvleOfYcIviNOBp64XRLu69OPmq1lu7dap+ecGBM/SLvlsH8XcG/ONzTTIJG8Lz/pnUvSnN+0pV3uq+kp4KGbgePeYZ1CcHTQsPYuWJY/KrDqdLHUDuyRlDrrHrfOCb3+KRHp+r1rPjx6uVtflZe1pvacj18aOOpk4MmcVXbTS8Cjt1rT1DS2yEvD7f8rwTQP32wMu863vXSZyZiC8054d06A6h2gS79tHrEZ2OPeF09PASNDYpAwW+rGR4Dff1uuR/1a9TNWNM+RoEA7Zou73uYT3i0+0/rwr37uX3hI/EHXPSb36D6HG9czYP3sFntw4tnSTx12klgbd2yyXlu68N//mFz6Mk3Esv6ioYqWDfiMU3yul6CPJIVkOVKoPHKWOr0W+lzYZlFaXRtMADkV7XcjOYpLG1XVhpiK83iquhLoVo+wQT2ebaiS4+3kh7bPEcC91xvf3WarAcK9+O5/tAgZs9uFWzlHnSIPWz0Vh46TkLQL7r0OkZx485dKQMqrz8iQVZvp4aFbJndtEYuGk6/aISdYAxUAIyDgjVeMKOQNz1tzc778hFhaJ8atQ47m6dBUr3M9qGTOfKC23hoMoI8K6C4B+n717RTLnm7d1jFHTT97n/y94WcixA8+3vqgdeOZ3HbrHnOOjh8dMkTkGR83rIbptJFn1I0Nz1m/v/CQpGMx86cfGylmMnXAyefLv8Feubauv9y6/oJlErCRzbklpKtkf/t2Ag/eIAJgqckv13yMnK612jrx5b37/+RY2IMzAOCmK0VM7tku2RP0FzJz33T7NUZqsMYWudd1N4qdb+Qn30/l2v2ef5DvGxZLipyFy31GDsz3hq0vc+vajnyb4UqxZ4e13fpMMMesFt/j3h4RX4Acy/lLjfm8zdHmHYvyUwP5Yc5QMdNmTV5APvZ18Xu+7ofG8L85EGbzy87Tqeocs9r594ZmyVELGC86ZlcHPeituV2s3EN9cp6DCsmsBqQCPFsKMZtbQlBIliVFEJJedQa5rufMlw6+qc1/w+Y5zuuoiC/7zeb23WyR9GuPV0S2jmrUr1u71Dd0X9TUKqLKae7g1rlWa5KXkAyjJI94m0zPaJ6ezW0fa2qB966Rtvzsi4bwrW825g52K8M8l/Cp7xcxediJ1nV0i8TYsDUi3F6m3RXj5l8Ci/e2iqidW6xCtHeH5O0DRGDqQ4Xm4XN9zmMvtKwRKXzOZ+TBeNNV8j3T4Oz6MH+p3EN//AFw8Q+sItvpnO/YJP+CzK+ss/llq4AGDOHXsdiaacAJ+/C6XbTbRSRgFSO1phEOPUr/U/8plrJdW0V4tcyR426uqrXDsJRtfN45oA1wv/4X7S1/nUQkYE2Q/pN/Az7+DWnbuOnlbfc2cXUAjFEhe1SymRW2YJtVp8tvC5b593l6xPmyA2wLXPqrfY8E3nhV/FZ7Nluv06tzx9d8vegTF8zrEoGmC0kzLXPVhWTrXBkhOPA4h4W5Ntc3GeJtz3YRe25W6YOPF/Fvzrepkg1Ff5Ewp3vSRz5a5koZQ33i4pA35aRf/5jU0HbpQR/JciTQrC+xVhy9iKpqeehm6v2b5+Zzkze0rYBZLFTVWJ3R9ePpV5SSgA14SyVhkQTk+Dq9QadS1odZ3EPb6SqJArc8uH0ObKZehqh0apyyGHiUUd8k6ZI6FlstNzW1xgPcHNhx5ifEKvHunOUnb2pIyBCw2Urx/IMiDPQHj1kg6Ynd+3cZ1jzAWdTZGR8zrr+6Rmug0Zvf7Xx9d5l8l3Vr7vS0BBs4WRDtHHky8NGvS1L5k9/nva7uizk5YQxL60FNduHuhN2txmuGGid/6FqHHLv1TVLusgPkOnPars7mYuOUWHq/o92vf7v/rh+6a8KEyTVmegrYk3Or0P03G1udA81O/0j+saquEUGrkqf2w18R30O93T4j20ilgGNOlc9bNxjHJzvtHXC2cLm4Lbzr49bfu1YEeyl+70XAm94FvO28/GXm55s5cPPmq4x70r7dMau9Rbob+rnYtcUwKOguMK1zjREBfcagwPg8MycnjOAz/aWjDKGQLEcK+ZbT1GoESJix5PkLKVjTVT4WMRfyrFMOy+zRx7UZ8anR02foG6mmlQDyoyPjIJFhilyZbpHr5kj4vOMfoD2qbVdZ7+BcWo7aOvcAAT+qa/N9dHVhuTUnfk54N7D3YTLbhu50X1MLnPdZm+Ut5ZyU+oSz8nNc6ulo9DQ0Ol7R0Dp6HbV1IhzMgQdtHXJ+zMN/qbQEt+jow+KvPCnBBiosXCEPyc5uY65mwLCqzl0oYhuQaeK2bQR++TWZl3x81LA6tbtY+cy86QxJD2V+UQDENaF1rvEgn7vQSL1jRvn+sK1n9sXUh8TteAnhdNoaQNTaAXzoK+4Bg3rqqXGbO0lPrt7GtlwzU87D1EFzCdupb5IXKakEM8fDaa5mneb2XHCPBjycC/hxmjHKjD7V4d6HGf3LKefLzFhBaJ8vs+44DW2bh6/NonvD88ZLzJwF1v5NNSjLjv5iuHubzBf/1N3Gy1jrPCMDxLrHrNcUoNZVmvuyPdslCEt/8dSykknghYdFyJsj4e2MjcjLyhN3ek8bWSQ4tF2OFNJHon2+WGy8hoqjNGfOfHnrdOroUynnffWbZcfrpxqbY7c5qrOpTW5Yp84NEEtmc7v7TBpwaa8XQYNtTFWFWgaIr93T98iKLV5+P74mWigHYfnRtZeIuYYW5zKVZhpy+G3uQvF11F8Y9Gkq7eUt2ltSFOkRzEN9zudmwVLxwzRHX+uWBPtwvFdCbB3dwjLjT5qWQKI920UwtrRbg2rSaRGcJ50jFsgdm8QX0+wrdvSpxlzRtXVWX9nqWkNAA5LRYclKyd94+EmSy7ChRVw0Fi4XH9Zbf2OIRz3/JmBcO+ZAKDuNLcDZn5b72zxUevL5uRRBae9zq+pnai/i8JOM4dfd24z7fu9DjTmm/Xze9joUuCfnA7h4L+mrLvi8WK/uvBYY3GP0B8/eB6w8wmqRBIy8mM2mF7a5C61D5ul0dCHphp4vs67BOdDr4OOlLfddL+fT6cX+8LdK0Mt+RxnHLJUCPvgl+d38AlZVJYKrodll5hk7Pve1W/9Y3yQWzYf+CrzjQ8bIzsnvE99Uu7B1c0syC9XJccOHOJUS16qmNqmrf7f0mR22dHR+3V8qZazz1yvkPtj0ktzjNRmg3/Sy6Za2a+cb1iwMmbr8Oc2LDIVkOdE+XzotL8frJHAUczEaswMLUfsGDqJWKSAHVp+e+iYRCm4J4lMAWua5C8maGoe2+ZCkRdKN7n2Bcz8DTE3l5+kL8oKgqCOt58fDz1T3S3NcrnKcTJYYnaX750RzDvv+mmnrkCHGv15hTF8JWB+K85eI2NIFAiBRnmMj1sheIJhF0jzEuvehAA41/C0t5yR33+lTyW1+WfzC9LpO+7CMIuhCct8jjOHpVFpyeZpfplIpGWbcvglYsMRq5VmwVISk21SJupVNxS94zgIRS/27xNLX5CBYnAiTmQCQa/wDX5TZlibGjPO18ihDSPq5dZiXV+X6BD29VNde4jO77AAjjdG13xNLqxk9yb5+rADgTadLLssVB0uw1+RELpdrqD1Vo7beWUjufwxw62/l8+svWV8ydE58r/PohpPfaecSOWfTU2pCUmWf37NGAm7M1DfK0PxRJ1vbduhb5BzbM4Skq5yH7N1Sw81blLuuq6TMh26UF7dzPyMvJi88LKmjnKY31rL5z8eezVZjiVPGCfPMVmaevsc6cqYHapYQFJLlRFOregcMJNsxhU1c7VxY+NXzREfKYaUAdXrOe+5TZkNL8EMRWpCrWOhc1tE0oHs/OKrAhmZgqFeG8HznnFUd2ra1SWVu4pBV5dG9ry24yEc8tDhYht78HkmI3Ngq6VwOehPw5J3GS8jubZJf0Lz+vdepWSR1IeklcM3nUbfQLFgiFpPBPYaIrKqWlCa7TA+spfvLw7ImI35kTa1Slz6dJCAPWbPfpY49yMZOkHnpq6rFgtXXI36gqvMZK+eydWhDxyIZutWtfy1zJDho9DwRGn6TFpj7Aru1qKFZ0h7Zr1W3OdvNrkHNc4BPXuZw7wftB9PuViz7OXGz7FXXSEqem64SP+DnH8xfR3VyB7286pr8WWxU7ns3lh8o4tAcqKVbEtNVsm9moVXXkH9s3IRk2zzguHfKsLk+AxYAnP+vxufDThQhuXmdiPGr/1Osl6ND8hJm5pZfyzGsqpEXwnd9TH43Z8qw09Aix8vJjxfIfyYt3te9rCJBH0kSjrBTCDoR2CpnWt/eOc34SKoXASD/DdZ1O5eC2zpzwz6lYJH0QdPgnl8uLQ/fQFNM+hD4pSPs0HYqv/hMPXDmJ2VZXaO/Nd8pyKJ1rgQynH2RHJ9MPXDhv1sDDszWBt1iYBYVrz0nkd523sj5fDm+IDq8FOmCIJW2TjXZOhf4xDeBji7rnMeplATtHHdabtuUemaB5Qe5B3ocf6ZDO32oqRU/Pq/gEfOyU/4OOGiVWtlumIXoAcfKcTvsRAm08cN8zbn5UqdSht8gYJxje/S03YfP7wVSZdpcPVdnXpscfvNyoWlNYoTL7grlVr/itWMOvDz1/dbjU12bf1/XZgyfX8C7/3jTGTKHuR54tvJIYKHp2DY0G64nQ31GbstNthEIwBDi05MifHdvE4uzfp+/9yLJW2rOnbk4NxLjZpE0+2rXZPwTvxcBWiRnNV43aYi3Q3MSbHPHEFkMRXH4c1vfT7zYfnOK3A2CPid24NlgEniX821DoaP+bUPbfjhaJCO05bjTJIilts7d/1WnqsrwP9RpaDa9oOT+1mTE4mC3hnz4UrE+pVK56QzvlYeIPlR+4tkSNZ2dBn7/HSNQ5+hT3dvkNLQNWIcW9ztGHqaplPUha59iNDXznz/1TeJuYH9gfvBL1gjZoOemqtp9nuH2+Ya19JATnNdxwq0JK48U6zHgPM+yKjOBLA689dx8S95xp4no0N0CgrojKQWX1YilTcW6m/J4ebDPLOPXLiXLol1I+uzP3IUiutw4cJXMALRo7/zz6Fa22ZKq4up06FvEFaQmg7z2t86ToXqzW5P95cLpZeOVp+TFcnpSRjSW7i8vGc3txhSj3ftKXlr71Iw6ug92XSNw1ieLY3zwgUJy1pHgRdaeywHZ3BZv5HgUHelqkQx4HMIGvdgJrHuLZZFUqD+kd0D+egEtknH6SAKSqDrI7E5HvE2G0+65TgSTOTrZLuoaWqxDUm3z5Pc5C+TBeNvvrGXf/X8S4NDbY4hIQHzV7MmVZ96JHCySgLVdK48yRGMKImiHBxz82FLq1l1NE4vgppdkuzM+KlYTe5qVoNdwTa1zRDwgPqq3/laGGwPh0oaFy0W4V1WL/55bvW783b8CG180ksE7UVsns7rszonGjsXA/GUynelffyE+oVFGXTxx6YdVh7YBESiL9hbRO+7gR2kvV6Xvtzffb/8bmuXlwi0PZXsn8LH/cJ45zelYaVq+S42XK4CO7jNpb2/rPPEXNt/r9uNgDrabv1SGyje/bPQ9cxca5e53lMwCVdckqZOA3Aw7wzLbztP3AseuFvcJPYr8jI8Z1ssSg0KSqFNdYzxE/DqcIATuZDWXzwgpbqDmB6SUKqhAQ9tK27msk0T6qHld7kEZYfJ+epXhuo7L73raDpUHiU77fHn792tHo0lIvvsfDIvhgqXuFpY//9Rq5dOHqvQZa4zKbH9htUiaI0j3PsQkFlLy4HVyT7AP/7sJg1Qa0KZFoGazIkj1eaCjkkqJsHPyWZu7EHjfP8dTj16XPitLmGtvwTK1/X7nh4AH/wrsf6y8hFRViY/pR74avE7Aua1O58orT6QZLyGZSknGBC0L/PSL7pYxIHf9hegH3UZeUgrr6Lil+HEVt7YX2IXLgK2vedfhhj5j1q1XG7/Z69SDixpagFMvAH79TalPd0GYY7ofa+uAD/67nBdNMxK0/2at+D0DwAM3SJ8w1CdD4R2L4N7JFRcKSRKOUhnarq1ztngFij72SUGioyJCAg9t29bP1MvQipvjtQp+CdYTSWiveA5VimydJ8Ex5iE7VeHstJouWNJpYDpA3lBHbA+7OlPUp3lmkuPPBJ5/yLkI+1CxPv9wc7t1+kEdi0XS9MLTOk9S69Q3OsxX7IVNmGoOQ6Nmy755CkjH4kJc89W13omvjZUR20xeSVr/OxZLSpe4sJ/z9k7p63Zvkyh0Tx/KVL4oUxlxSaWBD35RLG+NrWJNPeEsY/mcBWoJ7+3td/pubmuQNroW4XON6C8vymXa2jszoYWpHrtLwXAuwKihWV6sdReOh2+W3+3uUzP+zil5AXnoRkNE6ug5Iw88LvnI/ggw2IaEI85OOUxZC5dL5233eQuTJNvNGmkvKwmLpH39xtbk0zuZBXFc59GrGMsDQqG+mloZ7rEEhahaJJ2GtnMdfpDoU9c6bOXPW+i8XlNbfpoO+1SMgAxzd+3lXL6Tm4ZdQCzdTwKEVL0H7LlZ3c5/oOsixDWvKhqUi45goQfUglsSx+W8pCDiRB8Raplj8tlUHNpW9cWub5K0RLo1dd8jjGV+/sVeqFxPod2LUg6HwcXlKUiZZo47PX+VsWFrdLr+WZ9XvdlmQe3wmC3JLTn+tg3yd6bvKE0lSSE527APWyVWT5xlByxLg3SqTpHWTg9fP4OGagemZJFUK8px/foma2BHWPwsjoEtfREJM7SdSlmFe5RmTicoJI89TZzlT/m7/HXt08gdszp/neGB/N/yMJ1PV8GjqiTtm8XwCAhjkVSuV/V6ibBOmGwLSZBy+WK+nauqxRLtJ3zDWCTjJIxF0isgyLcuhyH/KCnq7O1tagHO+1z+et+7WFInvfqMkaVBH4I3D8XPW5SfY9SMeSpIM3rqqhKM1DbDoe3ZRnWtCBLVFB+hiXCTehUVZJlyoT5RwPbZbixlmIcyErZIBhKRXutp3utYhKZHOYF2xauckG4Qqu30W8c8tB0Ve/vrGoBzL3Zet75JAj0ev12+73O4zKtr5vC32ivIL8c8NVuQ4TknVC2SSRPkhSIul94k9lV1Bhc3n1A3VNvq9tKYd45j2He3NtU1+I+iVLkIX3ORQe7Pmowp8l/hGgn8smP7rkFmS9Ky8hzo3SEz6EyMAS8+Iv/0dFzzcz6RZgvuqe/33z89321NRizCb7xiLNNnPirBiG2AQnL2kUqFm7w+TD0zRO3pgypJj/pmLJIB6nQbsrG/6CbhIwkYPoFusyw41uOxLEgwjWd74xr2juGlI0qwTVxD23YRpsKxp0li8oPelJvT+3Myy8o+h8lyz3s1V5fFgqwQtBAk3D6OU+z6IuZWfYjj6F+owjpuxy7CC4aq+OnslojcoV612XqiHh/Ly1vC1siGlvyk2eb2ZxrEj9criAcIJiTbO2W2GADOPpIarNd5xKHt4X4JhNPdVZaslP76hp8b62zJBfLowTXm3LDzl/j3P+deDNz8KxndeP5BQ0g2tpqm0KSQJGWFzwVrydEVsaNKZJg8wNC2qu9PSwLDCym4RyPGUbbn8gJ1Skk/yIyKnPcpLotkVTUCd+R1DdYgjEV7yT8V9KrMVkilIUKfQpVEp8J+LtpLXloCC/QAQjJWX2zb9/pcypnmtvyZWEIX6kJ1jamekNM+BiEJq3Mgf1rTb23zrFZ11/ID3J92v2unbS2jchGNHU7TndqTzk9PSrYHPQ3XMe+QmXCOeJskSG+d63yd6Vbt7n0l2AkAtm8wlp98vuHKUJo6kkKycnG5IlvnyduXn7hJpYy0GIX2v/HCsVNz6ETMw0yu0yKayupaoTasmFi+uABoPkPbsaFYvuX6CNuhR9gXL4tkfZNESqtQVe3fjHmLrNMPxkHLHDmncfjP2oc5w5SXSslsKGEtvCkEEA2qglNlHdtKrXODW1Mt5SlmewhXeMTNk3BfCFCOsoXctCzIc8R+/TS3iWibyGU80GC9PqcCuBWoUlsnQU/mWa0OO9HYj6ZW4J0fls+dS/zPg3kkqbnd+HzgcaZ0e6WpJEtIAZCC0N4p/ipusxm0zJGIaBXRVFMbrSPWScIi6Vfm5ITxWaUDU/ZNK+CN7mZJjStPZNKWDFXSVdEOq5dFsjHA3PUqFsm6Bnd/sMDk6tLTv2Tq1SySfm2M6mrQsdiYySkUNotkU7u7W4dy81RcH2zrRL289Skng6DqShKlbSlbPXG5EqTgkhDcbWVze/xXC22R1K3i821R0eZ9ng5oBVY9Xud82jTsDPcsDl5l6r+b++3F+0ii+K4V1kCb0tSRFJIVR1Mb0LU8YN65IuJ083kJJUeDpMP6Lbk3Pq8ZT0JZa4KuH7Jn0DSPadsUhWQUvWl/oITZj/pG61zIdhpa5G/rXKgfWI/rxcmCFqTZVdVq+xlVBIQpR3U7vf01GTketW7W+ASS1pvbYD6Oza05X1GfIdLoFbt/D7O7KbubQJg2uC1Kcr8jlNOxON/Fx+meyNsXRaGvnPTefO6cTp7tt6Sed/VN1qk8nVJ8AYovEKY21zcBH/2a5Il1XLe0oJAkpYkuIoKiKmjqc1NThcnZ6JR2yGhA8PLC4mZJ1fI+uBDg6WkW3O2d3nMP5+HW6ae805jMmS+O642taue1rtH78DsOxQY4XypCMrLlJ+z2Abebv0Suf99rKIa6nLZ3tIo6ZVewfVe2iDlV69HuvKkkFZizIPj5dlq9tk6urSDW8SCVxOWjrL8AtM5VEHyq1lXbwlD5PD2CIPX+I8ncvOaXYafZpPxwu/wz9fGM+BUA+kiS0qKhWYa6XC0lfgQUBmHKmrdIhkr0WQcsmxTxjbG6RiJCG3LDhJGNSqZ9qW8yZttJV8U0VOZQhjmKOZUy/Fe9rNANzeIH2dhijay3p1yJ6stbsKChmQoT2M7u+uG2bkCrv5kFS4Htr3tvr5wSyrassUXcb3ZtsyXW92mTXz11DeqpfAB5sdLTtTgtc52ZyqEN7Z1ynZvLijXIyMkSG7Fz8HOPsC9XHlBQXNG8mlPfoP9Ukwn3khDk3lu0t8yJ3diae6aM+27iWNfUhPdyoLjPFw8oJEnpkYl5BoWwPoOufj2p4rgGzF1omr/ZYZ/mL5XOyDV4KAJROjC3Tj8v6W+bh6+cR/1zzX5JpvWqa2xC0mloO8B+pVV9zYrQ2Ycd+Q91Xj22qa5RuP5Uj6ON+iYZqUilrHOcAzKLyOiwPMirqiW/X5B2AyFdWRy2aZvnLiQdh4FDHg9VnHxDww7lO/6utHGIChVRGdrWaZkLDOyOt/5USqZE9VvHTDot+ShVRt5KUzta4NA2KX06u42UCgAS9d+yENODJS7Msx84HYJ02mHu8ZiIIxdkfqEedYQtMi1DTQuX51sQY7FIFrBXd63Kx5fQvDjUPqfCv3ypbJYnbLz8Bk3LvKLWa2qBxXvLcL2bkA1Sr15fKIIOd9utxCHK8CJv5CXua9jnegy6adANg1yrqjPEhG2XX1M6FokFesEySRUU1FpKiyQhIQniF6Wa/qfU6eyWoa7RIbGuhLKARt1vF+uhY1+mOjypOf9sryMKMznXFHzFgtSZShfYOuBSWToFZL3OrWm7TIN3eiPfQAn1ZgkK11zKXm9Yc6p9cdp9NRW3hMY265SVKv56gX0kFUVXHNfZnAWyP+aoYr3syHNI+J0L3x98fg9AXFkq4sDv/NY1Gp/1hOW+11BpikcztEiS8sPzGRqnb1HE7esb/ddxI1MvvmDzumQ4pmORbYUCd56BHxwuWJqtIPSiYPfDU32Iu5aXUls/6X7fURC7rJt3DSo2zv5wtrgcRN3BkJZoz9V8hKmKZTZTZ/XRTavYWfz8A1XWT+gx3NgCdC52mC63wMIkaHR7eyc8N8jkgh11P/pGh+FhL3GpNAFFyGPUMieX/svvxTsAKdcvJQOFJJllONxood9YI960ceQTrKqW4Rg364g+bOUZSR4S17mug1ph4mhMCCxW1Bi6urRqAuooO6xwnIMI4nQVsHCZ+2qqIi7OSQdUBfnMunHUqdh+8zBwdQITEDieuqBiNCJeRSv7V/u0L10lAq+hOfj88E1twFyXNDqAvFSnUpI5omOxdwo3J+Z1ScJvJwEalXSVpNdzy9McihhFaUJQSJLZRYn6kMSOrvE6u8ViOcchEW6cRsugx9U+hZmOVzqLoA+ctg5JY+OGyuweUWbTsDN/qXpZZsyzWKjodb+pOu37ajmueUrSqQDkXTzKM6UoOUna9jOGe9avfek08tvmJ4Zs16PTbF+Bmx6zaHQUfhGGSlvniZ+pHyrX6ZwFpkC4IC9FfnXntklXyQt00DKqqqXviG3iAJe2GT/GWUGMZcUHhSQpLZQ6hYAKKWxHXQ6+K9U1YrHMG76KA5fj7Ljbth/tEdLzl4jlwCnPWnunRC8GDW5obvcepnKySDa1W9dRTTJuL89OVbV/yionq3FtXb4Pmx+NLfk5B4MOp3ptl4ItEh7ApGlmEC/Br6gjC55KKZ1W05FZc/oo0zXcOs/lPMVpnQ9h9e9YlH/f+G7q56aStNW9EIR4izb7oUfdPfMLauTzob5qsWCwDSkN2jokZUbQh6odcyfY1gmMDlotPnHiYLixoklU3tSkCJiJsRjnYVbpKCOaJKNsbhcKtXXyz2m4vKkNcMn648o8u8+oUxscrFTtHcBQr/X36lpgUiH3m6f4Cfnwrcl4PLh9hKtSPSGfPPVN1ryK5uMTR9L12joJPPANaAmzL04WScUXrbTNr9a3qqDHwsMiGWZXq6rFUqqa/9KvbPuymlrxSVTyKwxZZzwbWAmV3iiO+p0skkHq9VuhNJUkhSQpDZrb4xF8lqnX2qLNCVxb75KPLgBmy1HZDbtH8JE0d6SR8s85UNegFsik6iNZk1EUkl6WPYX1nH7WPGbl8Do+eQ+qOK3uDr81tgCDvdao07DogU95AWRqTXEt02sbVQtobZ281FbXqlWenVYrd6YdpjJb53nMZhPkfMYgZi3LbMudRhFiE4Yx9w1ByBPHcbhY5P4mNrRdmlBIkvIj0enbTLTOBarS7kmy/UySSQZWFyJoO0gd9sPu5nsYh5hWbZdZPKQ96nULqqhrlOHqgT258mKwxNnxDATzqK+pBejrcV7V/lnZv9GjbS1z5cXKN6gr7gvTTSAGPBdBfCT1F1qLSHTZr7pGEQ3KgtJUZ00mIZcU9Sb44no6Y+pnA2tgVZcIheuwrsH6ElnfJJ+DWBPdMJfhG+xpOwhV1fKCmXV4ySzGRBgK0EeSzC7itPql0/IAjWummFhfTAusJIO2PY7O2BVlJWn66LEDbmleWufGvB9OU7nZHxY+lrWZZWmP+YMjWrOcAgbSaZl60y84KUxC8thxGtpO6FFXVa0WnKJjblpem9yuV5+DGuewcVKz7MRVZNCAPC9SKat/aV29+HIvXBbhGOhuCgHOXx4OgW4di8R67RTwVQLQIklmB6mUvPkFSV6eNI5z3M5WbPtmic4u0n6rDm27WoVSAR5cCvvoaJkI4rAWpv4Qx14/Vq1zc/69bsOvYYn4kPYrwmlZqBcCU0Gx5a41rRvbS0rA4xlodbeAu7iG0wOWo3o/hnrPTkX3BZ0pynztBGyMlmuLmbrGeNxKEoJCkpQhDjdm114yvKQyG0VcuI1st3XIjBJ+aVqSxq8D83sYuOaR9KGx1Uga7N0A9TLDoDqk6/ZwSkGGu1rmyLBumHrNmIeq2ucDg3vyrYoqvpZ+yz03U/FvzQnJqmpJaB0IxZltVAl1iThsVFUjojhy2THiKSSTbFyA66qEJo0BEGCSB8WGh7E8zl0I7N7mUl7w4vK20TRbu4p9ofoT2N5/zz334F3vehe6urqQSqVw/fXXJ9AsQgKSThdWRALw9LFasDTf0hXnkFGYJOv2HI4dQUWCIvYEwcXqB1Utkq4P9NwwX+s8/weYigA0n7OmVpkP3CuvZhBUHzwq5yKpYeBAjQiwrn1o2en4V1UjcB7JJK5bsyuD/TjHph0CbNw8R/qqmU3zVE2UhjiX6zvdqgs1tcGTjyeBcsYEVRI65gUkcI8xPDyMQw89FD/84Q+TaA8h/pSow3HJYRZILXOtwrEmo+AGkHCHFvZhqSqiI1skQzbQTbR6RWjPrGMpyK8ixQYFJFH/VsTvg2c/f07HX2WWmjyi+Lm5YL4GkvIV9XuhNtdbXR0ggMWtvqD9sYJidjrcTe0F8K8tAez9WxnscuC767TTTsNpp52WRFsI8aazW/KltRZ5yFinrhEYGQgQeVngHqGxBZgYlWHmMNOB+Y1sZ+rlfNjn0s0b8i+Wj2Ta+bOddFqmTdu1NUplpo8KFklXAgQ4xfJQdWhTFGER+7uH4suAGSeLaqEToJsxz9/tFIk7Q0iTpN3y3uaXi9erngDWsYXLRRgHDYBxcwlS2S52QlxfBadU22WQuI/k+Pg4xseNHG0DAwNJV0lmK5n60gqmae+Q9DCu6YFs2PuDSHkzVXzRUjJNWSx1OHRm7Z1ijWhoRt7DKVUCwzNB0t7UN8nw9eiw+jYq9ZrxFBGOBYWr02voUGWfIg1tu5x3fe5l5fnKc4Q5Bfb21zcWTySkq6z9g4pVGgjf3vZOf4uyXUd61eV1GwdyJYpj3D7AdqpW9bgvi5IVo8mS+Gva2rVr0draOvOvu7s76SoJKQzpKhGDyh2qqZOZv8QjfUsBiKO/S1eJddjPz8+1roQ73do6o21hfBGjpgCxE3RoOxaLZG6dxtbci5gtCMppWDKpoe22jsIEoJmtj63zgLk5i2DekGHIYKYoZEvgBctL1CV1S6bTxhSiSRoDOhbLfa+S7L5U8L0OC9OMKCQuJC+55BL09/fP/Nu8eXPSVRJSmqRcvwSnEM8jN0ERuOkx94RBfCTnLwUWLFO0GvsN8/nU5fQ5MCEj5f2YM19cQ+xtq6mVh+78pdbfgqIL0thTlIQ4BmaLZFALaNLow87NPvkAAzU54P7l6Uiv7WMMtulcAiza2+pm43puQtZb1yAv6cppfCJaStscZv2JSl5AUQldvy4kPrSdyWSQycSU0JmQsiZktGKxaOuQ4digeQRTADSFHQwbsRzEJyuVCl+PikVQF7UqEeKBZkBRIMwc3U7o4m9eFzAxHk4Mdi6WlFeuU/6FxBLvovoC4eIbG2Vigbhe3GrrgMV7O18jbi+aASdG8cfuhuKxapwvrDPJzsO6jMTYlkiYGpJOy7lUdVnwo71T7qEgc6eXAMwjSUhZUgCTZFW1MUQUKKdkyntKws5umcNc1bdUZ14XMNjnPPdvIrjsQ1uHTJvY3gHs3p6/rt06qR+7jsVA/04ZbnXFHGwTcug17AO3vin4OdGpqvaZdaOAQ7pmi6T5WNQ1iM/w1GRxfa1dA39cTlyoiHPPBrh/zztNCZy30IIwASWpUmQ6bfVvzotHinqMTAVmGnL3dSm4QKgT+AodGhrC+vXrZ75v2LABTz31FObMmYMlS5bE2jhCZhVllmTWQlBRU1UtD22nKdfCBk1FETphcNvn5nYZfjInuLacWpt1UstZIWsz/rk7gzw/Mg3yL65clCWJyznw8uM0CzXL+08qXPaCvIIKQColL1wDuxWGTy1mW4WybfUUvCtSsN4XDIVRopZ51nntLZvHIPr8Hgul5JrhQmAh+dhjj+Gtb33rzPfPfvazAIALL7wQV111VWwNI4R4UA4vrKEf2mWAvXN3jZpO8CGQSrnMPFPGLyyemBO6twHjo84vFmXw4FUiU5/cpAEz+Aw1J9HPmIeB61RmwCoUMVv442xDiRNYSJ500knQIptyCalEAloOPIlxOIWEw83CrDqjjiMx9K2zSUe63TLptFpkbtlaa4MEepk+B718fKtJ4Fmv+6rW1BZhNrKwuFgeZ8tLS0ToI0lIoaiUPqfUO9eOxUBvj0Qyx0VcFsk4nttFHy4sAeYvBaYnA0TvqpITFLFHpztUk8zKtk3ThvuJphVG2FXXyLSWpXCdhuqr4u7fCjSCkSAUkoQUjPLsJGbQoxOjRL+WAnUNwMJl0ctR8XkN/GCIQUmWjZVHAc31ize1GQAJXKddywMG6wRocyF9qFvnWYV2114AtALMs54j6Gw4hSDUIU9QVJYRJXg2CZmlxDmyXQwWrRCrhePDpjw7QCu2tB6BNnUTkkWwulSVoJAsx+vdjN7+qmpnEVTXCIwNAw1l4hdsj7AvlIAsJ/QXZ3sSf8s6cVQUYFrUEoVCkpCCUaa9hE4qXfa74ElzGzA6KL5bnSoZKFwORkI5xZUxp4spJ3/2uPNsxorPcZy7UIRkXEPeoYc4Vc73bL6Jg+JhBe5aIdejp4WfFkmgADPbEEKcKKMHfKWQqZeHx/ylahYat5Q/UYhD+CU1xWEQ5i0ypsQD1BI2d/pMn1sK++VGOi1zzhfDslemfnUlgdehS6edRaTbNmHPg9MtX04vgKBFkpDCESmalxSEQL5bcflFJsCc+TJLTezBJorUN8q/zS+rb1NTa03gDgDQxNo3OVG8fSkYJXDdkGDE0qc7TEJAIUkIcaV1LjA9XcZpSdwor44vFtye+w3NwMiA+FaNjwQsNKbj2NgKJBxYnAwOaVYamovSkuKSZP7R5IquDFRfIMtvhpqwUEgSUkha5kbbvqkVGOqX2VVIkfFI+aMnkt4ZUEhWxnOHODGb8n+WDRFT73il+gqjI+1llEkQFIUkIeVEW6dYm0otBY95WIfD9kQnaABN5RhxkoHHLhhxCvYkXFrS5SHRyqOVhBAhlSpNX7F0GpjXZXyuBBLxhZxlSiCdDiYk65uA4QHje8kcjpANKZn2lxml4GesREr8qqenHKbrVHwr8vKHzJRgX+8AhSQhJB6c5j2uGOKK2o6nmJIhXQVgUn39tk55UertSaxJpUscY9uz5AKqaxTf2No6oG9nghVFPOYpAPOXAGMjQENT/rKwp6NjsfhZt3WELKCwVIjpgBBCEqRcDCiFRn+5UHV3SKeBprbEmhOaWaLPhDK4WFMpidYvRV/wlO1LVTXQ2JJ/jUeZGKCuAZizoLRTXpmgRZIQQpIisACZVYpFhEBVdYApBe3MsuOhShlovcKT8LWgfMxTjh/zmLsQ6N0RPcCyDKBFkhBCIsOhbUdSKbHWzKb5v0lxKOa90dwmfx39IF2oqZUk+3UeUywCmA03PS2ShBASldgsSOX/UImXCjLN2YdMkyK0dXi243HMW+eJ32ZtXfzTeM6CW55CkpBKpEx8b8qHChI8haCtAxjskwT+JUEhnvYJX0MLlwMTYxUeFBeSVMqwLJqFZNlElycLhSQhlcTchcBgL9DeWeyWVAaBHzSzwDwRB83tpRloEZgSOp/VNXQxiJ04hGQJXSMhoZAkpJJoaK7QKeeKRHsHsHNCXRSV/zOFRCGdBrJaeMFXZnM0lxRhNCENkgAoJAkhJDpuD/7qWhlSJJVFW4fkP5yzUH2bVAroWiEvExwyLWHMYj2G81RV/jKs/PeAEEKKxbwuYHw0Rr8zWpRKkqBT1TW3y1SmQWd5SqVLxMpVQnNVZhqAiVGgvjHhihQPfNyHpbEVmJxQiO4uXSgkCSEkLPVN8QYvlMizm+SY2wWMDgItc4JvW85ThZaQjkTHIvlbilbaONqUSpW9z3oZX+mEEDLL0KOUG1uK2w4iNDRJgFo5i8JQlJBoS6VKTESWisIuHWiRJISQUqGpTfLVMbqWhKGMh0eLQhh9Wl0rwjadLjGBWzwoJAkhpFRIpWRGDEKC0LUCmJ6ShNlxkEpVnuFNVRSmUsCivVBSVtsiU2n2ekIIIWR2UVUdn4gEJIisqlqG9Uk+KVojzdAiSQghhBCDTL1YOWc9FINxQIskIYQQQioPWhVjgRZJQgghhFQeNRnJkFDF4LYoUEgSQgghpPJIpYA5C4rdirKHQ9uEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQUFJKEEEIIISQU1YWuUNM0AMDAwEChqyaEEEIIIQroOk3XbW4UXEgODg4CALq7uwtdNSGEEEIICcDg4CBaW1tdl6c0P6kZM9lsFlu3bkVzczNSqVSidQ0MDKC7uxubN29GS0tLonWR5OB5LH94DssfnsPyh+dwdlCo86hpGgYHB9HV1YV02t0TsuAWyXQ6jcWLFxe0zpaWFt40swCex/KH57D84Tksf3gOZweFOI9elkgdBtsQQgghhJBQUEgSQgghhJBQzGohmclk8JWvfAWZTKbYTSER4Hksf3gOyx+ew/KH53B2UGrnseDBNoQQQgghZHYwqy2ShBBCCCEkOSgkCSGEEEJIKCgkCSGEEEJIKCgkCSGEEEJIKGa1kPzhD3+IZcuWoa6uDsceeyweeeSRYjepYrnnnnvwrne9C11dXUilUrj++ustyzVNw5e//GUsXLgQ9fX1OOWUU/DKK69Y1tmzZw8uuOACtLS0oK2tDR/96EcxNDRkWeeZZ57Bm9/8ZtTV1aG7uxv/+Z//mfSuVQRr167F0UcfjebmZnR2duLd73431q1bZ1lnbGwMa9aswdy5c9HU1ISzzz4bO3bssKyzadMmnH766WhoaEBnZyf+5V/+BVNTU5Z17rrrLhxxxBHIZDLYe++9cdVVVyW9exXD5ZdfjkMOOWQmkfGqVatw0003zSznOSw/LrvsMqRSKVx88cUzv/E8ljaXXnopUqmU5d9+++03s7zszp82S7nmmmu02tpa7YorrtCef/557eMf/7jW1tam7dixo9hNq0huvPFG7Ytf/KL2xz/+UQOgXXfddZbll112mdba2qpdf/312tNPP62deeaZ2vLly7XR0dGZdd7xjndohx56qPbQQw9p9957r7b33ntr559//szy/v5+bf78+doFF1ygPffcc9rvfvc7rb6+XvvJT35SqN2ctaxevVq78sorteeee0576qmntHe+853akiVLtKGhoZl1PvnJT2rd3d3a7bffrj322GPacccdp73pTW+aWT41NaUddNBB2imnnKI9+eST2o033qjNmzdPu+SSS2bWee2117SGhgbts5/9rPbCCy9o3//+97Wqqirt5ptvLuj+zlb+/Oc/a3/961+1l19+WVu3bp32hS98QaupqdGee+45TdN4DsuNRx55RFu2bJl2yCGHaJ/5zGdmfud5LG2+8pWvaAceeKC2bdu2mX87d+6cWV5u52/WCsljjjlGW7Nmzcz36elpraurS1u7dm0RW0U0TcsTktlsVluwYIH2X//1XzO/9fX1aZlMRvvd736naZqmvfDCCxoA7dFHH51Z56abbtJSqZS2ZcsWTdM07Uc/+pHW3t6ujY+Pz6zz+c9/Xlu5cmXCe1R59PT0aAC0u+++W9M0OV81NTXaH/7wh5l1XnzxRQ2A9uCDD2qaJi8T6XRa2759+8w6l19+udbS0jJzzv71X/9VO/DAAy11nXfeedrq1auT3qWKpb29Xfv5z3/Oc1hmDA4Oavvss4926623aieeeOKMkOR5LH2+8pWvaIceeqjjsnI8f7NyaHtiYgKPP/44TjnllJnf0uk0TjnlFDz44INFbBlxYsOGDdi+fbvlfLW2tuLYY4+dOV8PPvgg2tracNRRR82sc8oppyCdTuPhhx+eWectb3kLamtrZ9ZZvXo11q1bh97e3gLtTWXQ398PAJgzZw4A4PHHH8fk5KTlHO63335YsmSJ5RwefPDBmD9//sw6q1evxsDAAJ5//vmZdcxl6Ovwvo2f6elpXHPNNRgeHsaqVat4DsuMNWvW4PTTT8871jyP5cErr7yCrq4urFixAhdccAE2bdoEoDzP36wUkrt27cL09LTlIAPA/PnzsX379iK1irihnxOv87V9+3Z0dnZalldXV2POnDmWdZzKMNdBopPNZnHxxRfj+OOPx0EHHQRAjm9tbS3a2tos69rPod/5cVtnYGAAo6OjSexOxfHss8+iqakJmUwGn/zkJ3HdddfhgAMO4DksI6655ho88cQTWLt2bd4ynsfS59hjj8VVV12Fm2++GZdffjk2bNiAN7/5zRgcHCzL81cda2mEkFnPmjVr8Nxzz+G+++4rdlNICFauXImnnnoK/f39uPbaa3HhhRfi7rvvLnaziCKbN2/GZz7zGdx6662oq6srdnNICE477bSZz4cccgiOPfZYLF26FL///e9RX19fxJaFY1ZaJOfNm4eqqqq8KKcdO3ZgwYIFRWoVcUM/J17na8GCBejp6bEsn5qawp49eyzrOJVhroNE46KLLsINN9yAO++8E4sXL575fcGCBZiYmEBfX59lffs59Ds/buu0tLSUZQdbitTW1mLvvffGkUceibVr1+LQQw/Fd7/7XZ7DMuHxxx9HT08PjjjiCFRXV6O6uhp33303vve976G6uhrz58/neSwz2trasO+++2L9+vVleR/OSiFZW1uLI488ErfffvvMb9lsFrfffjtWrVpVxJYRJ5YvX44FCxZYztfAwAAefvjhmfO1atUq9PX14fHHH59Z54477kA2m8Wxxx47s84999yDycnJmXVuvfVWrFy5Eu3t7QXam9mJpmm46KKLcN111+GOO+7A8uXLLcuPPPJI1NTUWM7hunXrsGnTJss5fPbZZy0vBLfeeitaWlpwwAEHzKxjLkNfh/dtcmSzWYyPj/Mclgknn3wynn32WTz11FMz/4466ihccMEFM595HsuLoaEhvPrqq1i4cGF53oexh++UCNdcc42WyWS0q666SnvhhRe0T3ziE1pbW5slyokUjsHBQe3JJ5/UnnzySQ2A9u1vf1t78skntddff13TNEn/09bWpv3pT3/SnnnmGe2ss85yTP9z+OGHaw8//LB23333afvss48l/U9fX582f/587QMf+ID23HPPaddcc43W0NDA9D8x8KlPfUprbW3V7rrrLkvKipGRkZl1PvnJT2pLlizR7rjjDu2xxx7TVq1apa1atWpmuZ6y4tRTT9Weeuop7eabb9Y6OjocU1b8y7/8i/biiy9qP/zhD5lyJEb+7d/+Tbv77ru1DRs2aM8884z2b//2b1oqldL+9re/aZrGc1iumKO2NY3nsdT53Oc+p911113ahg0btPvvv1875ZRTtHnz5mk9PT2appXf+Zu1QlLTNO373/++tmTJEq22tlY75phjtIceeqjYTapY7rzzTg1A3r8LL7xQ0zRJAfTv//7v2vz587VMJqOdfPLJ2rp16yxl7N69Wzv//PO1pqYmraWlRfvwhz+sDQ4OWtZ5+umntRNOOEHLZDLaokWLtMsuu6xQuzircTp3ALQrr7xyZp3R0VHtH/7hH7T29natoaFBe8973qNt27bNUs7GjRu10047Tauvr9fmzZunfe5zn9MmJyct69x5553aYYcdptXW1morVqyw1EGi8ZGPfERbunSpVltbq3V0dGgnn3zyjIjUNJ7DcsUuJHkeS5vzzjtPW7hwoVZbW6stWrRIO++887T169fPLC+385fSNE2L385JCCGEEEJmO7PSR5IQQgghhCQPhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAkFhSQhhBBCCAnF/w8nMtfYsuDkTgAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAsIAAAHDCAYAAAAupnzhAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABYUElEQVR4nO3deZyO9f7H8dd9z5jFMotkxmgsyVLZikxTaDGZyql0OifJKUlEFKYFdSytHFIhEi2qc4p0TuVnK4fKSWNk7EtSEdEMYmYsY7b7+/vjMjd3DRkx133f1/v5eFyPe/vec39uV3i7+n4/X5cxxiAiIiIi4jBuuwsQEREREbGDgrCIiIiIOJKCsIiIiIg4koKwiIiIiDiSgrCIiIiIOJKCsIiIiIg4koKwiIiIiDiSgrCIiIiIOJKCsIiIiIg4koKwiIgDTZ8+HZfLxbZt2+wuRUTENgrCIiIiIuJICsIiIiIi4kgKwiIi8hvGGPLz8+0uQ0TkrFIQFhER6tWrx5/+9Cc++eQTWrduTWRkJK+++qrdZYmInFUKwiIiAsDmzZvp2rUr1113HePHj6dly5Z2lyQiclaF2l2AiIj4h++++44FCxaQmppqdykiIhVCV4RFRASA+vXrKwSLiKMoCIuICGAFYRERJ1EQFhERACIjI+0uQUSkQikIi4iIiIgjKQiLiIiIiCMpCIuIiIiIIykIi4iIiIgjuYwxxu4iREREREQqmq4Ii4iIiIgjKQiLiIiIiCMpCIuIiIiIIykIi4iIiIgjKQiLiIiIiCMpCIuIiIiII4XaXUCg8Xg87Nq1i2rVquFyuewuR0RERER+xRjDgQMHSEhIwO0+8XVfBeFy2rVrF4mJiXaXISIiIiK/Y8eOHZx33nknfF1BuJyqVasGWL+wUVFRNlcjIiIiIr+Wl5dHYmKiN7ediIJwOZVOh4iKilIQFhEREfFjvzeNVYvlRERERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRTisIT5o0iXr16hEREUFSUhLLly8/6fhZs2bRpEkTIiIiaNasGfPmzfN53RjD8OHDqVWrFpGRkaSkpLBlyxafMfv27aNbt25ERUURExNDz549OXjwoPf1I0eOcM8999CsWTNCQ0Pp3LlzmbV8/vnnXHrppYSHh3PBBRcwffr00/klEBEREZEAV+4gPHPmTNLS0hgxYgQrV66kRYsWpKamsnv37jLHf/XVV3Tt2pWePXuyatUqOnfuTOfOnVm/fr13zJgxY5gwYQJTpkwhIyODKlWqkJqaypEjR7xjunXrxoYNG1i4cCFz5sxhyZIl9O7d2/t6SUkJkZGRPPTQQ6SkpJRZy9atW+nUqRPXXHMNq1evZuDAgdx333188skn5f1lEBEREZEA5zLGmPK8ISkpicsuu4yXX34ZAI/HQ2JiIg8++CBDhgz5zfguXbpw6NAh5syZ433u8ssvp2XLlkyZMgVjDAkJCTz88MM88sgjAOTm5hIXF8f06dO544472LRpExdddBFff/01rVu3BmDBggXceOON/PTTTyQkJPh85j333ENOTg4fffSRz/ODBw9m7ty5PiH8jjvuICcnhwULFpzS98/LyyM6Oprc3FyioqJO6T0iIiIiUnFONa+V64pwYWEhmZmZPldc3W43KSkppKenl/me9PT031yhTU1N9Y7funUrWVlZPmOio6NJSkryjklPTycmJsYbggFSUlJwu91kZGSccv2/V4sjZW+HrB+hqNDuSkREREQqVGh5Bu/du5eSkhLi4uJ8no+Li+Obb74p8z1ZWVlljs/KyvK+XvrcycbUrFnTt/DQUKpXr+4dcypOVEteXh75+flERkb+5j0FBQUUFBR4H+fl5Z3y5/m9bRth/EPWfZcLomtAjdpQo5Z1WzMRatWH6vHg1rpKERERCS7lCsJONGrUKJ588km7yzg7dv1w7L4xkLPHOr5b7TsuLALi60J8PSsYJ5wPiY0gsmpFVisiIiJyRpUrCNeoUYOQkBCys7N9ns/OziY+Pr7M98THx590fOltdnY2tWrV8hnTsmVL75hfL8YrLi5m3759J/zc8tQSFRVV5tVggKFDh5KWluZ9nJeXR2Ji4il/pl/LP2Tdtr4Obrkf9u46euyEPTutaRPZP0LhEdi+2TqOd25tSGwMdZpAncZQ+wIIC6/47yEiIiJyGsoVhMPCwmjVqhWLFi3ytifzeDwsWrSI/v37l/me5ORkFi1axMCBA73PLVy4kOTkZADq169PfHw8ixYt8gbfvLw8MjIy6Nu3r/dn5OTkkJmZSatWrQBYvHgxHo+HpKSkU64/OTn5N63bjq+lLOHh4YSHB2m4yz9g3VauBlVjrKPeRb5jSkrgl13w89Zjx87v4ZefrbC8ZyesXGyNdbshoQE0aG4d5zeFKtEV+Y1ERERETlm5p0akpaXRvXt3WrduTZs2bXjppZc4dOgQPXr0AODuu++mdu3ajBo1CoABAwZw1VVXMW7cODp16sSMGTNYsWIFU6dOBcDlcjFw4ECeeeYZGjZsSP369Rk2bBgJCQnesH3hhRdy/fXX06tXL6ZMmUJRURH9+/fnjjvu8OkYsXHjRgoLC9m3bx8HDhxg9erVAN6A3adPH15++WUee+wx7r33XhYvXsz777/P3LlzT/fXL7AdPtqHObLKiceEhFhzhWsmQov2x54/mAs7NsOOb+HHb6z7B/bDT1us44t/W+Nq1YMGLeD8ZtCwpRW2RURERPxAuYNwly5d2LNnD8OHDycrK4uWLVuyYMEC7yK07du34z5uYdUVV1zBu+++y9///ncef/xxGjZsyEcffUTTpk29Yx577DEOHTpE7969ycnJoW3btixYsICIiAjvmH/961/079+fDh064Ha7ue2225gwYYJPbTfeeCM//vij9/Ell1wCWBt2gHX1ee7cuQwaNIjx48dz3nnn8dprr5GamlreX4bgcKQ0CFcr/3urRsOFbawDjs4x3g1bN8B3a+GHtdbUip+3WceXH1sL8s5rCE1aQ5PLoO5FVtAWERERsUG5+wg7XVD1EZ4yGDZnQtfHoE3HM//zD+yHH9bB9+vg+zW+i/MAIipDo0utUHxRktW1QkREROQPOtW8pq4RTla6WK7yWer+UC3Wmk5ROqUi9xcreH/zNXybCYfyYO2X1gFQ90JodiU0aws1zzs7NYmIiIgcpSDsZKWL5U5nasTpiD7HuvLcpiN4SmDHFisUb8qw5hn/uMk65rwGcXWtUNy8rTWdwuWqmBpFRETEMRSEnaz0ivDJFsudLe4QqNvEOlLvgty9sP4rWLcUtqy22rZl/wj/fdfa3OPSa6wjrm7F1yoiIiJBSUHYqYyBw8e1T7NbdA248mbryD8IGzOsULxpudXX+NN/WkftBnDJtXDp1RAb97s/VkREROREFISdqvCINT0B/G+HuMiq0KqDdRTkW1eKV35mTaPY+b11zJlmtWRrkwotr4LwsjdEERERETkRBWGnyj/aOs0dYm2h7K/CI4+F4kO5sOZ/sOoz+H6t1ZHih3Xw4SRoeTUkXW9tCKL5xCIiInIKFISdqnRaRGTVwAmOVaLhij9ZR84eWLEQMj6xpk5kzLeOmnUgKRUu62h1rRARERE5AQVhpzrbrdPOtphzIeVO6NAVflhvheA1S2D3dvi/aTDvTWjeDtreDPWbBk7YFxERkQqjIOxUFd067WxxuaBBM+v4cz9Y/QUsm2e1Y1v1mXUknG8twmvVQXOJRURExEtB2KlK5wjb0TrtbImoApffaB07tsDS2bBysbWj3ayXrCvFbVKh7S1wbm27qxURERGbKQg71eGjQdgfWqedDYkN4Y6H4ebesPwTKxTv3QVL/gP/+xCaXgnX/BXqX2x3pSIiImITBWGnKp0aERGgc4RPVeVqcPVfoP2fYfMK+N9HVm/idV9aR72LrEDc9Aqrg4aIiIg4hoKwUwX6YrnycrvhwjbWkfUjfP4BrPgvbNsIbz4JNRLgqtusbhOaRywiIuIIbrsLEJsc3z7NaeLrWtMmhv8LrutmXTXeuwv+PRGe7gYL/3VsDrWIiIgELQVhpzpSuljOgUG4VFR1uLEHDH8XbnsQzqkFh/Ks1mtP/w3mv2U9FhERkaCkqRFOFeyL5cojPNLqJJH8J1j9OSx8F7J/hE/fgS8+sFqvXf0XbdAhIiISZBSEnSpfV4R/IyTE6jV8yTWw9ktrisSu72HxTGuR3RV/gmu7WFeSRUREJOApCDuVgvCJud3Qsj20aAcblsHCf8L2zfDFvyF9LrTrDNfcDlWi7K5URERE/gAFYady8mK5U+VyQdNkuPhyq/XagresHesWzYCl/2e1XWv/Z4iobHelIiIichoUhJ2opBgKj1j3ndI+7Y9wuaDJZdC4tXWFeN4b8PNWmD/dmjKR0hWuuAkqhdldqYiIiJSDukY40fGtwYJ9Q40zqfQK8SOvwl2PQ43acDAHPnoFnusOy+aBp8TuKkVEROQUKQg7UWkQDq9sLRCT8nG74dJrYcjr0CUNYs6FnD0w8wV4/n5r5zoRERHxewrCTuRtnaarwX9ISChcfiM8/hbc0sdqRffzNpj6OEwZDLt+sLtCEREROQkFYSfK10K5M6pSmNVn+Im3rduQUNicaV0dnvE85O61u0IREREpg4KwE6l12tlRuZp1ZXjIG9DyKjAGMhbAc/dYu9SVLlAUERERv6Ag7ESHFYTPqhoJ0H0YDJgA9S62AvCn78CoHrDqcysgi4iIiO0UhJ0oX9srV4h6F8FDL0H34RAbZy2oe/sZmPyo1X5NREREbKUg7ESlQTiiir11OIHLZe1SN+R1SL3Lmk/83Wpr/vB/Jvm2shMREZEKpSDsRLoiXPHCIuD67tb84eZtweOB/30Iz5b2H/bYXaGIiIjjKAg7kbZXtk/1eOgxEvr8A+LqwKFcq//w+Adhxxa7qxMREXEUBWEnUtcI+zVuBY9OtbpMRFSG7ZvhxX7w4WQ4ctju6kRERBxBQdiJNDXCP4SEWn2Hh06HS68B44El/4F/9IR1S+2uTkREJOgpCDuR94qwFsv5hajqcNcTcP9oOKeW1V3ijRHw+jDYn213dSIiIkFLQdiJvEFYV4T9SpPW8NhrkHInuENgfTqM7gmffwAlJXZXJyIiEnQUhJ3GGM0R9mdh4dDpXnjkVajf1NqM4+Mp8OID8JMW04mIiJxJCsJOU5B/rFVXZQVhv1WrHvR/Abo8bM3l3vm9tZhu3htQXGh3dSIiIkFBQdhp8o+2TgupBJXC7a1FTs7thstvsHoPt2hv/QNm4bvwfF/4cZPd1YmIiAQ8BWGnOXzcQjmXy95a5NRUi4V7hltHtVjI/hHGD4CPX7WmToiIiMhpURB2GrVOC1wt2sPg16B1itVq7fNZMPZ++G6N3ZWJiIgEJAVhp1HrtMBWJRq6DYFez0LMubB3J0x6GD6YYM3/FhERkVOmIOw0ap0WHC5KgsemQXIn6/HS2fB8H9i20d66REREAoiCsNMcPrpYTq3TAl9kVbh9EPT5x7GrwxMGwtw3oLjI7upERET8noKw0+Qfsm7VOi14NG5lXR0unTv833fhxf6w6we7KxMREfFrCsJOU9o+TVMjgktkVWvu8D3DoUoU7PoeXugHi2eCR7vSiYiIlEVB2GkOa7FcUGvR3tqm+aLLoaQI/m8avPww7N1ld2UiIiJ+R0HYadQ+LfhFVYf7nrZ2pQuPhK3rYWxvyFhgbbEtIiIigIKw85ROjYjQHOGg5nJZu9I9OhXOb2ZtvDHjeXj7mWP/GBIREXE4BWGn0WI5ZzmnFvR7Hjr1BHcIrP7Cujr8wzq7KxMREbGdgrDT5Kt9muO4QyClKzw0HmokwP7d1rzhBW9BiRbSiYiIcykIO03pFWEFYeep2wQengKtr7ParH3yDrycBvuy7K5MRETEFgrCTlJcZM0VBS2Wc6qIytBtMPxtqHV/2wYYez+s+szuykRERCqcgrCTHL9IKqKyfXWI/Vp1gEdehboXwpFD8Paz8N5YKMi3uzIREZEKoyDsJKVBOKKKNW9UnO2cWvDgS9Dxb+Byw/JPrB3psn60uzIREZEKoSDsJIe1UE5+JSQEbrgHHhgLUedA9o/wYj/4+lO7KxMRETnrFISdxLuZhoKw/MoFLeCRKdDoUmse+btjrKkSpXPKRUREgpCCsJOUBmFdEZayVIuF+0dZV4iPnyqRrakSIiISnBSEncQbhNUxQk7AHWLNGe47BqpVh6xt8EI/+Hqh3ZWJiIiccQrCTnJYUyPkFDVsCY++Cg0vOTpV4h8wY5ymSoiISFBREHYS7Son5VEtFvqMhuu7g8sFGfPhpQdhz092VyYiInJGKAg7ieYIS3m5QyD1rqNTJWLh563wwgOw/iu7KxMREfnDFISd5LCCsJymhpdY2zPXbwpHDsPrw2HeG+ApsbsyERGR06Yg7CTe9mlaLCenIfoc6Pc8tP+z9Xjhu/Dq43Aw1966RERETpOCsJMcv7OcyOkICYVbH4C7HoewCPg2E17oC9s3212ZiIhIuSkIO4muCMuZcum1MGAC1KgN+3fDxIGwbL7dVYmIiJSLgrCTaGc5OZMSzoe0SdD0CigugpnjrKOo0O7KRERETomCsFN4PMdNjVAQljMksir0GAmd7rV2o1s237o6vH+33ZWJiIj8LgVhpyg4DMZY9zU1Qs4ktxtS7rS2Z64SBTu+tVqs/bDO7spERERO6rSC8KRJk6hXrx4REREkJSWxfPnyk46fNWsWTZo0ISIigmbNmjFv3jyf140xDB8+nFq1ahEZGUlKSgpbtmzxGbNv3z66detGVFQUMTEx9OzZk4MHD/qMWbt2Le3atSMiIoLExETGjBnzm1peeuklGjduTGRkJImJiQwaNIgjRxywW1bp1eDQSlApzN5aJDg1bgVpkyGhARzMgcmPwldz7K5KRETkhModhGfOnElaWhojRoxg5cqVtGjRgtTUVHbvLvt/hX711Vd07dqVnj17smrVKjp37kznzp1Zv369d8yYMWOYMGECU6ZMISMjgypVqpCamuoTULt168aGDRtYuHAhc+bMYcmSJfTu3dv7el5eHh07dqRu3bpkZmYyduxYRo4cydSpU71j3n33XYYMGcKIESPYtGkTr7/+OjNnzuTxxx8v7y9D4PFupqGrwXIWVY+Hh16CFu2hpBhmvQQfjLfmEIuIiPgbU05t2rQx/fr18z4uKSkxCQkJZtSoUWWOv/32202nTp18nktKSjL333+/McYYj8dj4uPjzdixY72v5+TkmPDwcPPee+8ZY4zZuHGjAczXX3/tHTN//nzjcrnMzp07jTHGTJ482cTGxpqCggLvmMGDB5vGjRt7H/fr189ce+21PrWkpaWZK6+88pS/f25urgFMbm7uKb/HL3y7ypiBHYx5rofdlYgTeDzGLPyXMYNSrP/uJg4yJm+f3VWJiIhDnGpeK9cV4cLCQjIzM0lJSfE+53a7SUlJIT09vcz3pKen+4wHSE1N9Y7funUrWVlZPmOio6NJSkryjklPTycmJobWrVt7x6SkpOB2u8nIyPCOad++PWFhYT6fs3nzZvbv3w/AFVdcQWZmpncqxw8//MC8efO48cYby/PLEJjyD1i36hghFcHlsuYN93wawivD92utecM/bfn994qIiFSQcgXhvXv3UlJSQlxcnM/zcXFxZGVllfmerKysk44vvf29MTVr1vR5PTQ0lOrVq/uMKetnHP8Zd955J0899RRt27alUqVKNGjQgKuvvvqkUyMKCgrIy8vzOQJSvrZXFhtcfDkMmgjn1oacPTBhIKz8zO6qREREAId1jfj888957rnnmDx5MitXruQ///kPc+fO5emnnz7he0aNGkV0dLT3SExMrMCKz6DDCsJik7i6MGgSNLkMigrgnWdhzmvgKbG7MhERcbhyBeEaNWoQEhJCdna2z/PZ2dnEx8eX+Z74+PiTji+9/b0xv16MV1xczL59+3zGlPUzjv+MYcOGcdddd3HffffRrFkzbr31Vp577jlGjRqFx+Mps/6hQ4eSm5vrPXbs2FHmOL+nXeXETpFVodczcG0X6/GiGfDaMDhyyN66RETE0coVhMPCwmjVqhWLFi3yPufxeFi0aBHJycllvic5OdlnPMDChQu94+vXr098fLzPmLy8PDIyMrxjkpOTycnJITMz0ztm8eLFeDwekpKSvGOWLFlCUVGRz+c0btyY2NhYAA4fPozb7fuVQ0JCAKuFW1nCw8OJioryOQKSd2pEFXvrEOdyh8BNveCux60WfpuWw4QBsK/saVUiIiJnW7mnRqSlpTFt2jTeeustNm3aRN++fTl06BA9evQA4O6772bo0KHe8QMGDGDBggWMGzeOb775hpEjR7JixQr69+8PgMvlYuDAgTzzzDPMnj2bdevWcffdd5OQkEDnzp0BuPDCC7n++uvp1asXy5cvZ+nSpfTv35877riDhIQEwJr/GxYWRs+ePdmwYQMzZ85k/PjxpKWleWu56aabeOWVV5gxYwZbt25l4cKFDBs2jJtuuskbiIOW2qeJv7j0Wuj/AlSrDj9vgxf7w9YNdlclIiJOdDotKSZOnGjq1KljwsLCTJs2bcyyZcu8r1111VWme/fuPuPff/9906hRIxMWFmYuvvhiM3fuXJ/XPR6PGTZsmImLizPh4eGmQ4cOZvPmzT5jfvnlF9O1a1dTtWpVExUVZXr06GEOHDjgM2bNmjWmbdu2Jjw83NSuXduMHj3a5/WioiIzcuRI06BBAxMREWESExPNAw88YPbv33/K3z1g26e9OtRqY7Vsnt2ViFj27zZmbG/rv8uHrzdmxX/trkhERILEqeY1lzEnmBMgZcrLyyM6Oprc3NzAmiYx/iHYthF6jIDm7eyuRsRSkA//Gg3rllqPr7sTrr/H2rZZRETkNJ1qXtPfNk6hqRHij8Ij4Z4R0OEO6/HCd+HtZ6DQAduei4iI7RSEneKwFsuJn3K74U/3QdfHICQU1iyBl9Mgd6/dlYmISJBTEHaKI2qfJn6uTUfoOxaqRMGOb61FdDu0E52IiJw9CsJOUFRoHaANNcS/NWgGA1+2NuHI3QsvD4K1X9pdlYiIBCkFYSconR/sckF4ZXtrEfk9NRJgwHhrJ7rCIzD9Sfj8A9C6XhEROcMUhJ0g/4B1G1lVq/ElMERWhfuegba3WAH44ynwn5ehRNsyi4jImaNU5ASlC+UitFBOAkhICPy5P9zSx/q/GV9+DG+MsFquiYiInAEKwk6Qr4VyEqBcLrj6L3DPcGtb5o3LjnaU+MXuykREJAgoCDuBt4ewFspJgGreDh54HqrGwE9bYPyD1vbMIiIif4CCsBMoCEswqHcRDJgA554H+3fDhAGwZZXdVYmISABTEHaCw8ctlhMJZDUSrDB8fjM4cgimDIHln9pdlYiIBCgFYSfIP2TdVlYQliBQJQr6/gMuvQY8JfDeGFjwltqriYhIuSkIO4G3fZoWy0mQCA2DbkMh5U7r8SfvwLv/gOIie+sSEZGAoiDsBKXt0yLVPk2CiNsNne6FLmnW/RX/hamPH5sTLyIi8jsUhJ1A7dMkmF1+I/R6FsIjrcVzL6dZ2zOLiIj8DgVhJ8jXYjkJck0ug/4vQLVY2PUDjB8A2dvtrkpERPycgrATlC6WUxCWYHZew6Pt1WrD/myrvdrWDXZXJSIifkxB2AnUPk2c4pxa8NAEqNvE+u/+lUdh/Vd2VyUiIn5KQTjYeTxQcNi6rznC4gRVo6HvWLjocigqhDdGwldz7K5KRET8kIJwsDty6Fh/VXWNEKcIj4R7n4SkG8B4YNZLMH+6eg2LiIgPBeFgVzotolK41XtVxClCQqzWah3vsh5/+k+Y+QKUlNhbl4iI+A0F4WCnXeXEyVwuuKE7/HUguNyQMR/eGA4F+XZXJiIifkBBONiVtk6LUBAWB7viT9BjBFQKg40ZMPlROJhjd1UiImIzBeFg591MQ0FYHK7ZldYiusrVYPs3Vnu1fVl2VyUiIjZSEA52pUFYrdNEoP7F8NB4iI2DPTutMPzzVrurEhERmygIBztvEFbrNBEA4upYYTiuLuT+Ym3JrI03REQcSUE42B3W1AiR34ipAQ++CHUvPLrxxmOwabndVYmISAVTEA52+dpVTqRMVaKg7xhochkUFcBrwyBzkd1ViYhIBVIQDnaaIyxyYuGR0PMpuPQa8JTAP0fBkg/trkpERCqIgnCwO6wgLHJSoZWg21Bo19l6/OEk7UInIuIQCsLBzts+TYvlRE7I7YZb+8EN91iPP/0nfDDeukosIiJBS0E42HmnRlSxtw4Rf+dyQce/wV8esu5/NQfefhaKC+2uTEREzhIF4WCn9mki5XPlzXDXExASCmuWwLS/a0tmEZEgpSAczIxR+zSR03HJ1dDrWQiLgG9XwuRH4GCu3VWJiMgZpiAczIoKoKTIuq/FciLl07gVPPC81WZt+2aYOAj277a7KhEROYMUhINZ6bQIlxvCK9tbi0ggqtvE2ngj5lzYvR0mDoQ9P9ldlYiInCEKwsHs8HEL5Vwue2sRCVRxda0tmc89z7oiPHEQ7PrB7qpEROQMUBAOZmqdJnJmxNa0rgzXbgAH9sPLabBto91ViYjIH6QgHMxKg3CE5geL/GHVYqHfOKh3sfV765XHrIV0IiISsBSEg9nhA9atOkaInBmRVaHPaGjUCgqPwNQnYN1Su6sSEZHTpCAczPK1vbLIGRceCb2ehmZtra4s05+EzEV2VyUiIqdBQTiYKQiLnB2hYdB9GLS+Djwe+NdoWDrb7qpERKScFISDmRbLiZw9ISHQ9VFoe4u1ec0HE+C/79ldlYiIlIOCcDDLP659moiceW43/Lk/XHen9Xju6/B/06xgLCIifk9BOJh5g7CuCIucNS4X3Hgv3NTLerx4Jvx7gjVlQkRE/JqCcDAr7RqhOcIiZ9+1XeCvA61gvPT/4N1/QEmx3VWJiMhJKAgHM+8cYQVhkQpxxZ/gb0PBHWJ1kpj+FBQV2l2ViIicgIJwMNPUCJGKd+m1cO+TEFoJ1n8F056Agny7qxIRkTIoCAezw2qfJmKLiy+H3qOsnsNbVlm70JX+w1RERPyGgnCwKimBgsPWfU2NEKl4DVtC37FW+8IfN8GkR+Bgrt1ViYjIcRSEg9WR464+6YqwiD3qNoF+46BqDOz8DiY9DHn77K5KRESOUhAOVvmHrNuwCAgJtbcWESdLOB/6vwDR50DWNnh5EOzfbXdVIiKCgnDwUus0Ef8RVwf6vwixcbBnJ7ycBnt32V2ViIjjKQgHK7VOE/EvNRLgwRegRm3Yl2WF4eztdlclIuJoCsLBSq3TRPxPbJw1TSKuLuTutcLwrh/srkpExLEUhIOVpkaI+Kfoc6wwXLsBHMyxFtBt32x3VSIijqQgHKxKF8spCIv4n6rR8MDzVleJwwfglUfhh/V2VyUi4jgKwsEqX1eERfxa5WrQZww0aA5HDsOrQ6zNN0REpMIoCAcrLZYT8X8RlaH3c9C4FRQegamPw8YMu6sSEXEMBeFgla/tlUUCQlgE3Pc0NL0CiovgjRGw9n92VyUi4ggKwsHKu1hOXSNE/F5oGNwzHC65GkqK4a2nIXOR3VWJiAQ9BeFgVbpYTlMjRAJDSCj8bSi0SQWPB/41GpbNs7sqEZGgpiAcrLRYTiTwuEOgy8Nw5c1gDMx8AZZ8aHdVIiJBS0E4WKl9mkhgcrvhtgfh6r9ajz+cBItm2FuTiEiQUhAORsZoQw2RQOZywc29oeNd1uM5r8GCt63f2yIicsYoCAejwiPgKbHuV9ZiOZGA5HLBDd2hU0/r8Sdvw7w3FYZFRM4gBeFgVNo6ze22WjOJSOBK6Qq39LHu//ddmD1VYVhE5Aw5rSA8adIk6tWrR0REBElJSSxfvvyk42fNmkWTJk2IiIigWbNmzJvnuxLaGMPw4cOpVasWkZGRpKSksGXLFp8x+/bto1u3bkRFRRETE0PPnj05ePCgz5i1a9fSrl07IiIiSExMZMyYMb+pJScnh379+lGrVi3Cw8Np1KjRb+oJeMe3TnO57K1FRP64q/9izRsG+HyWNW9YYVhE5A8rdxCeOXMmaWlpjBgxgpUrV9KiRQtSU1PZvXt3meO/+uorunbtSs+ePVm1ahWdO3emc+fOrF+/3jtmzJgxTJgwgSlTppCRkUGVKlVITU3lyJEj3jHdunVjw4YNLFy4kDlz5rBkyRJ69+7tfT0vL4+OHTtSt25dMjMzGTt2LCNHjmTq1KneMYWFhVx33XVs27aNDz74gM2bNzNt2jRq165d3l8G/6bWaSLBp+0tcPsg6x+3//sIZr1ktVkTEZHTZ8qpTZs2pl+/ft7HJSUlJiEhwYwaNarM8bfffrvp1KmTz3NJSUnm/vvvN8YY4/F4THx8vBk7dqz39ZycHBMeHm7ee+89Y4wxGzduNID5+uuvvWPmz59vXC6X2blzpzHGmMmTJ5vY2FhTUFDgHTN48GDTuHFj7+NXXnnFnH/++aawsLC8X9srNzfXACY3N/e0f8ZZt26pMQM7GPPCA3ZXIiJnWsYCYwalWL/H3x1jTEmx3RWJiPidU81r5boiXFhYSGZmJikpKd7n3G43KSkppKenl/me9PR0n/EAqamp3vFbt24lKyvLZ0x0dDRJSUneMenp6cTExNC6dWvvmJSUFNxuNxkZGd4x7du3JywszOdzNm/ezP79+wGYPXs2ycnJ9OvXj7i4OJo2bcpzzz1HSUnJCb9zQUEBeXl5Poff0/bKIsGrTSp0G2KtAVj+Cbw7Bk7yZ5iIiJxYuYLw3r17KSkpIS4uzuf5uLg4srKyynxPVlbWSceX3v7emJo1a/q8HhoaSvXq1X3GlPUzjv+MH374gQ8++ICSkhLmzZvHsGHDGDduHM8888wJv/OoUaOIjo72HomJiScc6zcOKwiLBLVWHeCuv1sbcGQugneetbZmFhGRcnFU1wiPx0PNmjWZOnUqrVq1okuXLjzxxBNMmTLlhO8ZOnQoubm53mPHjh0VWPFp8l4RVus0kaDVsj3cM9zamnnNEnjraSgutLsqEZGAUq4gXKNGDUJCQsjOzvZ5Pjs7m/j4+DLfEx8ff9Lxpbe/N+bXi/GKi4vZt2+fz5iyfsbxn1GrVi0aNWpESEiId8yFF15IVlYWhYVl/wUSHh5OVFSUz+H3vEG4ir11iMjZ1exKuPdJCK0E65bCm09CkcKwiMipKlcQDgsLo1WrVixatMj7nMfjYdGiRSQnJ5f5nuTkZJ/xAAsXLvSOr1+/PvHx8T5j8vLyyMjI8I5JTk4mJyeHzMxM75jFixfj8XhISkryjlmyZAlFRUU+n9O4cWNiY2MBuPLKK/nuu+/wHLfS+ttvv6VWrVo+c4sDXmn7NG2mIRL8LkqC+56BSuGwMQNeH2ZtqiMiIr+r3FMj0tLSmDZtGm+99RabNm2ib9++HDp0iB49egBw9913M3ToUO/4AQMGsGDBAsaNG8c333zDyJEjWbFiBf379wfA5XIxcOBAnnnmGWbPns26deu4++67SUhIoHPnzoB11fb666+nV69eLF++nKVLl9K/f3/uuOMOEhISALjzzjsJCwujZ8+ebNiwgZkzZzJ+/HjS0tK8tfTt25d9+/YxYMAAvv32W+bOnctzzz1Hv379TvsX0C8d0RxhEUdp3Ap6P2ttoLM5E6b9HQry7a5KRMT/nU5LiokTJ5o6deqYsLAw06ZNG7Ns2TLva1dddZXp3r27z/j333/fNGrUyISFhZmLL77YzJ071+d1j8djhg0bZuLi4kx4eLjp0KGD2bx5s8+YX375xXTt2tVUrVrVREVFmR49epgDBw74jFmzZo1p27atCQ8PN7Vr1zajR4/+Te1fffWVSUpKMuHh4eb88883zz77rCkuPvX2QwHRPm3CQKu10srP7K5ERCrS9+uMGXyT9ft//ABj8g/aXZGIiC1ONa+5jNH2ROWRl5dHdHQ0ubm5/jtfeEwv+Hkr9PmHdaVIRJzjx00wZQgcOQR1L4T7R+n/DomI45xqXnNU1wjH0GI5EeeqeyE8MNZaI/DjJpj8GBwKgP7nIiI2UBAORmqfJuJsiY3ggeehSjT89C1MfhQO5thdlYiI31EQDjYlxccWyeh/h4o4V+0G0G8cVIuFXd/DpEcgb5/dVYmI+BUF4WBTejUYFIRFnK5WPej/AkSfA1nbYNLDkLPX7qpERPyGgnCwKQ3C4ZXhuI1DRMShaiZCvxcg5lzYvQMmpcH+7N9/n4iIAygIB5vDWignIr9ybm148EWoHg97d8HLD8MvP9tdlYiI7RSEg03pFWHtKicix6seb02TOLc27MuCl9Ngz067qxIRsZWCcLDJP7q9suYHi8ivxda0pknUrAM5e6wwnL3d7qpERGyjIBxs8g9ZtwrCIlKW6HOg/zhrIV3eL1YY/nmr3VWJiNhCQTjYHD56RVhTI0TkRKrFwgPjrBZrB3OsbhI7v7O7KhGRCqcgHGxK5whHaLGciJxE1WjoO9bafONQnrXpxvbNdlclIlKhFISDjRbLicipqhIFfcdAvYus/5v0yqOwbaPdVYmIVBgF4WBzWIvlRKQcIqvC/aOhQXM4chimDIbv19ldlYhIhVAQDjZaLCci5RVRGXo9Cw0vsbZonzoUtqyyuyoRkbNOQTjYqH2aiJyO8Ei47xlochkUHoFpT8A3X9tdlYjIWaUgHGy8c4QVhEWknMLCoeeTcHEyFBXCa8NhfbrdVYmInDUKwsGmNAhHarGciJyG0DC4Zzg0bwclRfDmSFj7P7urEhE5KxSEg4kxxwVhXREWkdMUWgnu/jtccg14SuCtp2HlZ3ZXJSJyxikIB5OCfPB4rPuaGiEif0RICPxtCLS+zvpz5Z+j4OtP7a5KROSMUhAOJqUL5UJCoVK4vbWISOBzh0DXR+HyG8B44L2xsGy+3VWJiJwxCsLB5PjWaS6XvbWISHBwu+Gvg+DKm63pVzPHwZcf212ViMgZoSAcTLSZhoicDW433PYgXHWb9fjfE+GLf9tbk4jIGaAgHEzUOk1EzhaXC27pAx3usB5/9AosmmFvTSIif5CCcDBR6zQROZtcLujUE1Lvsh7PeQ0+/ae9NYmI/AEKwsFEUyNE5GxzueD67nDjvdbj+dNh3hvW/GERkQCjIBxMShfLaWqEiJxt190JN99v3V/4LvzfVIVhEQk4CsLBJF9XhEWkAl3zV7i1n3X/s1nw4WSFYREJKArCwUS7yolIRWt/K/x1oHX/fx/CrPHHNvYREfFzCsLB5LCCsIjY4Io/wR2PWPOH0+dYvYY9JXZXJSLyuxSEg4l3aoS6RohIBUu6HroNAZcbln8C746BEoVhEfFvCsLBRIvlRMROrTrA3U9YWzNnLoJ3noWSYrurEhE5IQXhYKLFciJit5ZXwT3DISQU1iyBt56G4kK7qxIRKZOCcDApvSKsICwidmp2Jdz7JIRWgnVL4Y2RUKQwLCL+R0E4WBQXQeER676CsIjY7aIkuO8ZqBQOm5bDa8OO/RklIuInFISDRWnrNIDIKvbVISJSqnEr6P0shEXAt5kw7QkoyLe7KhERLwXhYFEahCMqWwtVRET8wQUt4f7REF4ZvlsDrw6BI4fsrkpEBFAQDh6H1TpNRPzU+U2h7z8gogps3QBTBvv+XywREZsoCAeL0r9U1DpNRPxR3QvhgeehcjX48RuY/CgcyrW7KhFxOAXhYKHtlUXE3yU2hH7joGoM/LTFCsMH9ttdlYg4mIJwsPAGYU2NEBE/lnC+FYarVYddP8CkRyD3F7urEhGHUhAOFodLg7A6RoiIn4uvC/1fgOgakP0jTHoYcvbaXZWIOJCCcLDwzhHWFWERCQA1z7PCcGxN2PMTvDwI9mXbXZWIOIyCcLDQ9soiEmhqJED/F+GcWvDLzzBxoBWKRUQqiIJwsDisxXIiEoCqx1lhuGYdyNkDEwfBz1vtrkpEHEJBOFhoaoSIBKqYGtB/HCQ0sLpIvJwGO761uyoRcQAF4WDh3VlOi+VEJABVi4V+z0PdJtYGQZMfgR/W212ViAQ5BeFgoSvCIhLoKleDPmOgQXM4ctjajvnblXZXJSJBTEE4WGhDDREJBhGVofdz0KQ1FB6BaU/A+nS7qxKRIKUgHAw8Hsg/ZN1XEBaRQBcWAT2fgmZtobgI3hwJqz63uyoRCUIKwsGg4DAYj3VfQVhEgkFoGHQfBq06gKcE3nkOln9id1UiEmQUhINB6bSI0EoQFm5vLSIiZ0pICNz5GFx+o/WP/ffGwpcf212ViAQRBeFg4J0frIVyIhJk3CFw+yBo/2fr8b8nwqKZ9tYkIkFDQTgYaDMNEQlmLhd07gvX3Wk9njMN5k8HY2wtS0QCn4JwMPC2TlMQFpEg5XLBjfdCp57W40//CbNfVRgWkT9EQTgY5B+wbnVFWESCXUpXuLWfdf/zD2DWeKtzjojIaVAQDgZqnSYiTtL+VrjjYesqcfoceG8MlJTYXZWIBCAF4WBwWFeERcRhkm6Avz0Objes+C+8/bTVc1hEpBwUhIOB5giLiBNdeg3cMwJCKsHaL+H14dZudCIip0hBOBiofZqIOFWzK6HXM9ZudN98DVMGH/szUUTkdygIBwNNjRARJ2vcCvr8AyKqwNYNMOlhOLDf7qpEJAAoCAeD0sVymhohIk5V/2Lo/wJUi4Wd38PEgbA/2+6qRMTPKQgHA7VPExGB2g3gwRchNg727IQJAyF7u91ViYgfUxAOBtpZTkTEcu558NBLULMO5OyBiYNgxxa7qxIRP6UgHAyOKAiLiHjFnGtdGT6vERzKteYMf7/W7qpExA8pCAe6okLrAKisrhEiIgBUjYZ+Y6FBcyg4DK8OgQ3L7K5KRPyMgnCgK20T5HJBeGV7axER8ScRVaD3KLg42bpg8MYIWLnY7qpExI8oCAe60oVyEVWsHZZEROSYsHDoMQJadQBPCfxzFCydbXdVIuInTis5TZo0iXr16hEREUFSUhLLly8/6fhZs2bRpEkTIiIiaNasGfPmzfN53RjD8OHDqVWrFpGRkaSkpLBli+/ihn379tGtWzeioqKIiYmhZ8+eHDzo2zR97dq1tGvXjoiICBITExkzZswJa5oxYwYul4vOnTuX78v7m9LWaZofLCJStpBQuHMwtL0FjIEPJsB/37Xui4ijlTsIz5w5k7S0NEaMGMHKlStp0aIFqamp7N69u8zxX331FV27dqVnz56sWrWKzp0707lzZ9avX+8dM2bMGCZMmMCUKVPIyMigSpUqpKamcuTIsa0yu3XrxoYNG1i4cCFz5sxhyZIl9O7d2/t6Xl4eHTt2pG7dumRmZjJ27FhGjhzJ1KlTf1PTtm3beOSRR2jXrl15v77/0WYaIiK/z+2GP/eH67pZj+e+Af83TWFYxOlMObVp08b069fP+7ikpMQkJCSYUaNGlTn+9ttvN506dfJ5Likpydx///3GGGM8Ho+Jj483Y8eO9b6ek5NjwsPDzXvvvWeMMWbjxo0GMF9//bV3zPz5843L5TI7d+40xhgzefJkExsbawoKCrxjBg8ebBo3buzz2cXFxeaKK64wr732munevbu55ZZbyvX9c3NzDWByc3PL9b6zZsV/jRnYwZhJj9hdiYhIYPhslvXn5sAOxsx43piSYrsrEpEz7FTzWrmuCBcWFpKZmUlKSor3ObfbTUpKCunp6WW+Jz093Wc8QGpqqnf81q1bycrK8hkTHR1NUlKSd0x6ejoxMTG0bt3aOyYlJQW3201GRoZ3TPv27QkLC/P5nM2bN7N//7GtNp966ilq1qxJz549T+k7FxQUkJeX53P4ldLFcpFV7K1DRCRQXP0XuONhcLlh2Xx4+1koLrS7KhGxQbmC8N69eykpKSEuLs7n+bi4OLKyssp8T1ZW1knHl97+3piaNWv6vB4aGkr16tV9xpT1M47/jC+//JLXX3+dadOmndoXBkaNGkV0dLT3SExMPOX3Vgjv1Ai1ThMROWVJN0D3v0NIJVizBKY+AUcO2V2ViFQwx7QZOHDgAHfddRfTpk2jRo0ap/y+oUOHkpub6z127NhxFqs8DaWL5SprjrCISLm0aA+9n4XwSNiyCiY9Agf2//77RCRolCsI16hRg5CQELKzs32ez87OJj4+vsz3xMfHn3R86e3vjfn1Yrzi4mL27dvnM6asn1H62vfff8+2bdu46aabCA0NJTQ0lLfffpvZs2cTGhrK999/X2b94eHhREVF+Rx+JV+L5URETlujS6HfOKgaAz9tgQkDYO8uu6sSkQpSriAcFhZGq1atWLRokfc5j8fDokWLSE5OLvM9ycnJPuMBFi5c6B1fv3594uPjfcbk5eWRkZHhHZOcnExOTg6ZmZneMYsXL8bj8ZCUlOQds2TJEoqKinw+p3HjxsTGxtKkSRPWrVvH6tWrvcfNN9/MNddcw+rVq/1vysOpytf2yiIif0hiI3joJageb4XgCQNh53d2VyUiFaDcUyPS0tKYNm0ab731Fps2baJv374cOnSIHj16AHD33XczdOhQ7/gBAwawYMECxo0bxzfffMPIkSNZsWIF/fv3B8DlcjFw4ECeeeYZZs+ezbp167j77rtJSEjw9vi98MILuf766+nVqxfLly9n6dKl9O/fnzvuuIOEhAQA7rzzTsLCwujZsycbNmxg5syZjB8/nrS0NAAiIiJo2rSpzxETE0O1atVo2rSpzyK7gHJYQVhE5A879zx4aDwkNIAD++DlNPhutd1VichZFlreN3Tp0oU9e/YwfPhwsrKyaNmyJQsWLPAuTNu+fTvu43Y4u+KKK3j33Xf5+9//zuOPP07Dhg356KOPaNq0qXfMY489xqFDh+jduzc5OTm0bduWBQsWEBER4R3zr3/9i/79+9OhQwfcbje33XYbEyZM8L4eHR3Np59+Sr9+/WjVqhU1atRg+PDhPr2Gg1Lp1IjKWiwnIvKHRJ8D/cfB68Ph+7UwZSjc/Tg0D4Ke8yJSJpcx6iZeHnl5eURHR5Obm+sf84Wf/hvsy4IBE6DeRXZXIyIS+IoK4Z/PwdovrRZrf3kIrviT3VWJSDmcal5zTNeIoKWd5UREzqxKYdB9GCR3AuOBWS/BJ+9oFzqRIKQgHMg8Hig4bN3X1AgRkTPHHQJ/HQgd77IeL3gL/j0RPCW2liUiZ5aCcCA7cujYFQrtLCcicma5XHBDd7jtQev+0tnahU4kyCgIB7LS1mmVwiE0QLteiIj4u7a3wN3ahU4kGCkIBzK1ThMRqRgtr4L7nzu2C93EQZCz1+6qROQPUhAOZNpVTkSk4jS8BPq9ANWqw64fYPyD8PM2u6sSkT9AQTiQaVc5EZGKldgQBk6AmomQs8faklkbb4gELAXhQFYahCsrCIuIVJjq8dYudPUvtuYKTxkKKz+zuyoROQ0KwoHMe0VYrdNERCpUlSjoO9bada6kCN55Fj57X72GRQKMgnAg8y6WU+s0EZEKV7rxRvs/W49nT4UPJ6nXsEgAURAOZN6pEboiLCJiC7cbbn0AbuljPf7fRzD9aSgssLUsETk1CsKBTF0jRET8w9V/OdZreN2X8MqjcDDX7qpE5HcoCAcy9REWEfEfl1wNff9h/Zm8baPVUWLvLrurEpGTUBAOZJoaISLiXxo0tzpKxNaEPT/B+Idg+zd2VyUiJ6AgHMjytVhORMTvxNeFAROgdgM4mAMvPwxrv7S7KhEpg4JwIFP7NBER/xRdA/q/AE0ug6ICmP6k2quJ+CEF4UBljOYIi4j4s4gqcN8zcOVN1p/Zs6fCrPFQUmx3ZSJylIJwoCoqtJq4g3aWExHxVyEhcNtD0LkvuFyQPgemPXHs/+iJiK0UhANVaes0lxvCIu2tRURETszlgqtug3ufhLAI2JwJEwbCvmy7KxNxPAXhQHX8rnJunUYREb/X9Apr3nDUOZC1DV7qDz+qo4SInZSgApVap4mIBJ7ERjDoZUhoAAf2w6Q0WLPE7qpEHEtBOFCVBuEIzQ8WEQkoMefCgy/CRUnWeo/pT8GimeooIWIDBeFA5b0irCAsIhJwIirDvU9Bu87W4znT4P0X1FFCpIIpCAeqw0cXy6l1mohIYAoJgT/3h1v7WQufl82HKUPgUK7dlYk4hoJwoDpyyLpVEBYRCWztb4WeT0F4JHy3Gl7sD1k/2l2ViCMoCAcqXREWEQkeF19ubctcPR5++RleehA2ZthdlUjQUxAOVJojLCISXGrVh0GToEFzKDgMr/1d2zKLnGUKwoGqNAhHqn2aiEjQqBoNff4ByZ2Obcv87hiru4SInHEKwoFKUyNERIJTaCX460BrIZ3bDSsWwuRHIG+f3ZWJBB0F4UCVf3SxnKZGiIgEH5fLaq3We5R1wWPbRnixH/y0xe7KRIKKgnCgytcVYRGRoNe4FQx8GWomQs4emDgIVmsnOpEzRUE4UGmOsIiIM9Q8DwZOhCatofAIvPUUzH8LPB67KxMJeArCgchTAkcOW/cjq9hbi4iInH2RVeG+Z+Gq26zHn74Drw8/dlFERE6LgnAgOv4Pvsq6Iiwi4gghIdC5L3R9zFpQt3GZNW9Ym2+InDYF4UBUulAuLAJCQu2tRUREKlabjvDQeIitCXt2wkv9Ye3/7K5KJCApCAcitU4TEXG2xEaQNhkuaAkF+fDmkzD3dWvqnIicMgXhQKRd5UREpGqMtfnG1X+xHv/3PZj292MXS0TkdykIB6LSIByhICwi4mghIXBLH/jbUKgUDt98DS88ALt+sLsykYCgIByISv+1r4VyIiIC0KoDDBgP1ePhl59h/EOw6jO7qxLxewrCgah0sZxap4mISKnaF0DaJGjUyuo3/Paz8OFkKC6yuzIRv6UgHIi8u8rpirCIiBynSjTc/xxc28V6vOQ/MOlha1c6EfkNBeFApMVyIiJyIu4QuKkX3PskRFSBbRvh+T6wOdPuykT8joJwIPJur6wgLCIiJ9DsSnj4FWvKxKFceHUIfPKOtmYWOY6CcCDyBmFNjRARkZOokQADJsDlN4IxsOAtmPY4HMy1uzIRv6AgHIgOlwZhLZYTEZHfUSkMuqRZWzNXCodvVsC4PtaUCRGHUxAORPlqnyYiIuXUpiMMnAjn1rYWz00cBEs+tK4UiziUgnAg8rZP0xxhEREph4Tzra2ZW7S3tmP+cJK1PfOhPLsrE7GFgnCgMebYhhoKwiIiUl4RVaD7MLi1H4SEwrovrakSP6y3uzKRCqcgHGgKj1j/igdNjRARkdPjckH7W62FdDVqw/7dMCkNFr577O8YEQdQEA40pR0j3G4Ii7C3FhERCWyJjawWa606WG3V5r0BU4ZA7i92VyZSIRSEA83xrdNcLntrERGRwBdRGboNga6PWhdYtqyC5++HTcvtrkzkrFMQDjSHtZmGiIicYS4XtEm1FtIlNICDOTD1cZg9FYqL7K5O5KxREA403tZpCsIiInKGxdWxWqy1vcV6/Nn7Vpu1vbvsrUvkLFEQDjSlUyMiFIRFROQsqBQGtz0IPUZai7K3fwNje8Oy+eo5LEFHQTjQlE6N0BVhERE5m5q3hUemQIPmVseimePgzZHWtAmRIKEgHGiOXywnIiJyNsXGwQNj4aZeR3sOL4UxvbSQToKGgnCg8QbhKvbWISIizuAOgWu7wKCXIa4uHNhvLaT790TrSrFIAFMQDjSlQVibaYiISEWqfYHVVaL9n63HX34M4x6AHVvsrUvkD1AQDjT52l5ZRERsEhYOtz4Aff4B0efA7u3wUn9rR7oS7UgngUdBONCoj7CIiNitcSt4dCo0b2dtyTzvDZjwEGT9aHdlIuWiIBxo8hWERUTED1SJhnuGw52Drb+Ttm+G5/vAf9/T1WEJGArCgSZf7dNERMRPuFxw2XUw+DW4KAlKimDu6zBhgK4OS0BQEA40ap8mIiL+JroG3PcMdH0MIqpYm3CM6wOLZujqsPg1BeFAUlIMBfnWfU2NEBERf+JyQZuOMPh1uLANFBfBnNdg4gDI3m53dSJlUhAOJPmHjt1XEBYREX8UUwN6PQt3PAIRleHHb+D5+492lii2uzoRHwrCgaS0dVp4JISE2FuLiIjIibhckHS9dXW4yWXW1eF5b8C4vvDjJrurE/E6rSA8adIk6tWrR0REBElJSSxffvKtFmfNmkWTJk2IiIigWbNmzJs3z+d1YwzDhw+nVq1aREZGkpKSwpYtvg269+3bR7du3YiKiiImJoaePXty8OBBnzFr166lXbt2REREkJiYyJgxY3xenzZtGu3atSM2NpbY2FhSUlJ+t3a/otZpIiISSGLOhd7PWZ0lqkTBz1th/EPwn5fhyGG7qxMpfxCeOXMmaWlpjBgxgpUrV9KiRQtSU1PZvXt3meO/+uorunbtSs+ePVm1ahWdO3emc+fOrF+/3jtmzJgxTJgwgSlTppCRkUGVKlVITU3lyJFjWzd269aNDRs2sHDhQubMmcOSJUvo3bu39/W8vDw6duxI3bp1yczMZOzYsYwcOZKpU6d6x3z++ed07dqVzz77jPT0dBITE+nYsSM7d+4s7y+DPdQ6TUREAk1pZ4khb0LrFDAG/vcR/KMnbFhmd3XidKac2rRpY/r16+d9XFJSYhISEsyoUaPKHH/77bebTp06+TyXlJRk7r//fmOMMR6Px8THx5uxY8d6X8/JyTHh4eHmvffeM8YYs3HjRgOYr7/+2jtm/vz5xuVymZ07dxpjjJk8ebKJjY01BQUF3jGDBw82jRs3PuF3KS4uNtWqVTNvvfXWqX59k5ubawCTm5t7yu85Y1YuNmZgB2MmDqr4zxYRETkTvllhzFPdrL/PBnYwZvpTxuT+YndVEmRONa+V64pwYWEhmZmZpKSkeJ9zu92kpKSQnp5e5nvS09N9xgOkpqZ6x2/dupWsrCyfMdHR0SQlJXnHpKenExMTQ+vWrb1jUlJScLvdZGRkeMe0b9+esLAwn8/ZvHkz+/fvL7O2w4cPU1RURPXq1cvzy2Cf0sVyuiIsIiKBqnErq+/wNbeD2w2rv4DR98LS2dYudSIVqFxBeO/evZSUlBAXF+fzfFxcHFlZWWW+Jysr66TjS29/b0zNmjV9Xg8NDaV69eo+Y8r6Gcd/xq8NHjyYhISE3wT14xUUFJCXl+dz2Obw0cVyCsIiIhLIwiLg5t4waBKc18ia+vfBBHixvxbTSYVybNeI0aNHM2PGDD788EMiIiJOOG7UqFFER0d7j8TExAqs8lc0R1hERILJeQ1h0ES47UFrI46ftsBLD8LMcXAw1+7qxAHKFYRr1KhBSEgI2dnZPs9nZ2cTHx9f5nvi4+NPOr709vfG/HoxXnFxMfv27fMZU9bPOP4zSj3//POMHj2aTz/9lObNm5/0Ow8dOpTc3FzvsWPHjpOOP6sUhEVEJNi4Q6DtLfD4dLiso/Xcsvkw6h74ao6mS8hZVa4gHBYWRqtWrVi0aJH3OY/Hw6JFi0hOTi7zPcnJyT7jARYuXOgdX79+feLj433G5OXlkZGR4R2TnJxMTk4OmZmZ3jGLFy/G4/GQlJTkHbNkyRKKiop8Pqdx48bExsZ6nxszZgxPP/00CxYs8JlzfCLh4eFERUX5HLYpnRpRWdsri4hIkKkWC3c+Bg++CAnnW3/nzXrJukK8/Ru7q5NgVd5VeDNmzDDh4eFm+vTpZuPGjaZ3794mJibGZGVlGWOMueuuu8yQIUO845cuXWpCQ0PN888/bzZt2mRGjBhhKlWqZNatW+cdM3r0aBMTE2M+/vhjs3btWnPLLbeY+vXrm/z8fO+Y66+/3lxyySUmIyPDfPnll6Zhw4ama9eu3tdzcnJMXFycueuuu8z69evNjBkzTOXKlc2rr77q8zlhYWHmgw8+MD///LP3OHDgwCl/f1u7Rkx+zFphu/yTiv9sERGRilJcbMwX/zFmyE3W33uDUox5d4wxOXvsrkwCxKnmtXIHYWOMmThxoqlTp44JCwszbdq0McuWLfO+dtVVV5nu3bv7jH///fdNo0aNTFhYmLn44ovN3LlzfV73eDxm2LBhJi4uzoSHh5sOHTqYzZs3+4z55ZdfTNeuXU3VqlVNVFSU6dGjx28C7Jo1a0zbtm1NeHi4qV27thk9erTP63Xr1jXAb44RI0ac8ne3NQiP62v9gbDuq4r/bBERkYqWs9eYd0Yda7X2WCdjPnnHmIIjdlcmfu5U85rLGGNsuxwdgPLy8oiOjiY3N7fip0k82x327rT+t9H5zSr2s0VEROyybSN8NBl+PDpFIrYm/KkXXHK1tWGHyK+cal5zbNeIgJSv9mkiIuJA9S6CARPhb0OtbZv374Z3noUJA4+FY5HToCAcKIw5rmuEFsuJiIjDuFzQqgMMfRNuuMfqRbxtA7zUH95+FvbusrtCCUAKwoGiIB88Hut+ZBV7axEREbFLWAR0/BsMnX6s3dqqz2BUD/j3RDhQ9m6yImVREA4UpdMiQkKtPwREREScLKaG1W7t4VegSWur3/CXH8Mzd8H86XDkkN0VSgBQEA4U+Ud/Q0dW1cIAERGRUuc1hPtHwwPPQ50mUHgEPv0nPHM3fPEfKC60u0LxYwrCgeKwFsqJiIicUMOWMHAi3DMczj0PDuVanSaeu8faoa646Pd+gjiQgnCgKF0oV1lBWEREpEwuF7RoD4Nfh78OhOhzrA4Ts16yAnH6XAVi8aEgHCi8HSMUhEVERE4qJASu+BM8/jZ0fgCqVYf92fD+i9aiumXzoKTY7irFDygIBwrv1Ai1ThMRETklYeFw1Z/h7+9A575QLRb2ZcHMF6wrxMvm6wqxwykIBwrvYjm1ThMRESmXsHC46rYyAvE4ePZu+PwDq02pOI6CcKAobZ9WWVeERURETktYxLFAfEsfiDoHcvbAx1PgqTuttmsHc+yuUipQqN0FyCkqnSMcoTnCIiIif0hYBFz9F2h7M6z4Lyx+H/b8ZLVd+2wWJF0P1/wVqsfbXamcZQrCgeKwukaIiIicUaFhcPmN0CYV1i2FxTNh+2ZrY46v/s/qQNH+z1D3QvXwD1IKwoHC2zVCUyNERETOKHeIFXqbt4PvVsOiGbA5E1Z9bh11GluBuEV7CK1kc7FyJikIBwpvENZiORERkbPC5YKGl1jHzu9hyX9g5WLrKvE/R8HHr8KVN1mt2arF2l2tnAEKwoFCi+VEREQqTu0G0PVRuKmXtRHH0tmQ+wsseAsWvgst2kFyJ2jQXNMmApiCcKDwtk/THGEREZEKUzUGrusG13aBNUusq8Q/fmNdKV65GGomWvOML+sIVaPtrlbKSUE4EBQXQeER676CsIiISMULCYVLr7WOHd9aV4lXLobdO2D2qzD3DWjRFi4/epXYrQ61gUBBOBCUzg8GzREWERGxW2Ij67j5flj1GXw1F376FlZ+Zh2xcdA6BVpfBzXPs7taOQkF4UDg7SFc2VrZKiIiIvaLqGzNE07udOwq8arPYH82LPyXddRtYgXiS66GKpo64W8UhAOBWqeJiIj4t9KrxJ0fgA1fwdcLYfMKaz7xj9/AR6/AhZdBi6vg4ss11dFPKAgHgsNHO0boN42IiIh/CwuHS66xjgP7rakSKxbCT1tgfbp1hFSCxq2gZXtoeoX+freRgnAgyNeuciIiIgGnWixc9Wfr+HkbrP4C1nwB2dth4zLrCAmFRpdam3lc2Aaiz7G7akdREA4E3qkRCsIiIiIBqVY967ihuxWK1yyxjqxtsGm5dQCc1wguToKLLofzGqr7xFmmIBwIDisIi4iIBI3SUHz93ZD9I6z5H2xYBtu/sbpP/PQtfPKOdUX5oiRo3BoatrR6GssZpSAcCLxTI7RYTkREJKjE1YWOdaHj3yBvn3VleGOGtdDuwH7IWGAdAAnnH9sCukEziFBL1T9KQTgQ5GuxnIiISNCLqg5J11tHcRF8v9YKxltWwa4fjh1f/NuaMlGnCdS/GOpdDPUust4v5aIgHAi0vbKIiIizhB7tLNG4lfX4wH74bjVsWW0F4727YNtG62CWNaZ6vBWI610EdS+EhPoQGmbTFwgMCsKBQO3TREREnK1a7LG2bAD7suG7NUfD8AZr0d2+LOtYudga4w6BuDpQuwHUvsA6Es6HKlG2fQ1/oyAcCNQ1QkRERI5XPQ7adLQOgCOHrI07SoPx9s3WhbSft1rHiv8ee2/MuXDueVAz8ehxHtSsYz3vsC4VCsKBQEFYRERETiaiiu9UCmMgZw/s/B52fQ8/fQc7v7OuGOfssY4tq3x/RmgliK0JsXHH3R69HxVrda2IrBZUYVlB2J8VHoGtG+CXn63H6hohIiIip8LlOhpma0LT5GPP5x+0NvTYvePo8ZN1u3entUBvz07rOBG3G6rEQLWYo8G4KoRHWkE8PBLCK0NEZWuHvZBKEBoK7lDrNjbOmqrhRxSE/VnePpgy+NhjBWERERH5IyKrHltQd7ySEsjZDft3w/7sY7f7dlvPH8yxplp4PHBgn3WU15U3wV8GnJGvcaYoCPuzkFCoVd+6f35Ta6K8iIiIyJkWEgLn1LKOEykugkO5Vig+kGPdHjlkHQX5cOSwdRQchqICK1wXF0FJMZQUQUzNCvoyp05B2J/F1oTHptldhYiIiIg1hzi6hnUEieCZ7SwiIiIiUg4KwiIiIiLiSArCIiIiIuJICsIiIiIi4kgKwiIiIiLiSArCIiIiIuJICsIiIiIi4kgKwiIiIiLiSArCIiIiIuJICsIiIiIi4kgKwiIiIiLiSArCIiIiIuJICsIiIiIi4kgKwiIiIiLiSKF2FxBojDEA5OXl2VyJiIiIiJSlNKeV5rYTURAupwMHDgCQmJhocyUiIiIicjIHDhwgOjr6hK+7zO9FZfHh8XjYtWsX1apVw+VyndXPysvLIzExkR07dhAVFXVWP0vOHp3HwKdzGPh0DoODzmPgq6hzaIzhwIEDJCQk4HafeCawrgiXk9vt5rzzzqvQz4yKitJv+CCg8xj4dA4Dn85hcNB5DHwVcQ5PdiW4lBbLiYiIiIgjKQiLiIiIiCMpCPux8PBwRowYQXh4uN2lyB+g8xj4dA4Dn85hcNB5DHz+dg61WE5EREREHElXhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSE/dikSZOoV68eERERJCUlsXz5crtLcqwlS5Zw0003kZCQgMvl4qOPPvJ53RjD8OHDqVWrFpGRkaSkpLBlyxafMfv27aNbt25ERUURExNDz549OXjwoM+YtWvX0q5dOyIiIkhMTGTMmDFn+6s5wqhRo7jsssuoVq0aNWvWpHPnzmzevNlnzJEjR+jXrx/nnHMOVatW5bbbbiM7O9tnzPbt2+nUqROVK1emZs2aPProoxQXF/uM+fzzz7n00ksJDw/nggsuYPr06Wf76znGK6+8QvPmzb2N+JOTk5k/f773dZ3DwDN69GhcLhcDBw70Pqfz6N9GjhyJy+XyOZo0aeJ9PeDOnxG/NGPGDBMWFmbeeOMNs2HDBtOrVy8TExNjsrOz7S7NkebNm2eeeOIJ85///McA5sMPP/R5ffTo0SY6Otp89NFHZs2aNebmm2829evXN/n5+d4x119/vWnRooVZtmyZ+d///mcuuOAC07VrV+/rubm5Ji4uznTr1s2sX7/evPfeeyYyMtK8+uqrFfU1g1Zqaqp58803zfr1683q1avNjTfeaOrUqWMOHjzoHdOnTx+TmJhoFi1aZFasWGEuv/xyc8UVV3hfLy4uNk2bNjUpKSlm1apVZt68eaZGjRpm6NCh3jE//PCDqVy5sklLSzMbN240EydONCEhIWbBggUV+n2D1ezZs83cuXPNt99+azZv3mwef/xxU6lSJbN+/XpjjM5hoFm+fLmpV6+ead68uRkwYID3eZ1H/zZixAhz8cUXm59//tl77Nmzx/t6oJ0/BWE/1aZNG9OvXz/v45KSEpOQkGBGjRplY1VijPlNEPZ4PCY+Pt6MHTvW+1xOTo4JDw837733njHGmI0bNxrAfP31194x8+fPNy6Xy+zcudMYY8zkyZNNbGysKSgo8I4ZPHiwady48Vn+Rs6ze/duA5gvvvjCGGOdr0qVKplZs2Z5x2zatMkAJj093Rhj/WPI7XabrKws75hXXnnFREVFec/ZY489Zi6++GKfz+rSpYtJTU0921/JsWJjY81rr72mcxhgDhw4YBo2bGgWLlxorrrqKm8Q1nn0fyNGjDAtWrQo87VAPH+aGuGHCgsLyczMJCUlxfuc2+0mJSWF9PR0GyuTsmzdupWsrCyf8xUdHU1SUpL3fKWnpxMTE0Pr1q29Y1JSUnC73WRkZHjHtG/fnrCwMO+Y1NRUNm/ezP79+yvo2zhDbm4uANWrVwcgMzOToqIin3PYpEkT6tSp43MOmzVrRlxcnHdMamoqeXl5bNiwwTvm+J9ROka/b8+8kpISZsyYwaFDh0hOTtY5DDD9+vWjU6dOv/m11nkMDFu2bCEhIYHzzz+fbt26sX37diAwz5+CsB/au3cvJSUlPv+RAMTFxZGVlWVTVXIipefkZOcrKyuLmjVr+rweGhpK9erVfcaU9TOO/wz54zweDwMHDuTKK6+kadOmgPXrGxYWRkxMjM/YX5/D3zs/JxqTl5dHfn7+2fg6jrNu3TqqVq1KeHg4ffr04cMPP+Siiy7SOQwgM2bMYOXKlYwaNeo3r+k8+r+kpCSmT5/OggULeOWVV9i6dSvt2rXjwIEDAXn+Qs/oTxMR8XP9+vVj/fr1fPnll3aXIqehcePGrF69mtzcXD744AO6d+/OF198YXdZcop27NjBgAEDWLhwIREREXaXI6fhhhtu8N5v3rw5SUlJ1K1bl/fff5/IyEgbKzs9uiLsh2rUqEFISMhvVllmZ2cTHx9vU1VyIqXn5GTnKz4+nt27d/u8XlxczL59+3zGlPUzjv8M+WP69+/PnDlz+OyzzzjvvPO8z8fHx1NYWEhOTo7P+F+fw987PycaExUVFZB/QfijsLAwLrjgAlq1asWoUaNo0aIF48eP1zkMEJmZmezevZtLL72U0NBQQkND+eKLL5gwYQKhoaHExcXpPAaYmJgYGjVqxHfffReQvw8VhP1QWFgYrVq1YtGiRd7nPB4PixYtIjk52cbKpCz169cnPj7e53zl5eWRkZHhPV/Jycnk5OSQmZnpHbN48WI8Hg9JSUneMUuWLKGoqMg7ZuHChTRu3JjY2NgK+jbByRhD//79+fDDD1m8eDH169f3eb1Vq1ZUqlTJ5xxu3ryZ7du3+5zDdevW+fyDZuHChURFRXHRRRd5xxz/M0rH6Pft2ePxeCgoKNA5DBAdOnRg3bp1rF692nu0bt2abt26ee/rPAaWgwcP8v3331OrVq3A/H14xpffyRkxY8YMEx4ebqZPn242btxoevfubWJiYnxWWUrFOXDggFm1apVZtWqVAcwLL7xgVq1aZX788UdjjNU+LSYmxnz88cdm7dq15pZbbimzfdoll1xiMjIyzJdffmkaNmzo0z4tJyfHxMXFmbvuususX7/ezJgxw1SuXFnt086Avn37mujoaPP555/7tPw5fPiwd0yfPn1MnTp1zOLFi82KFStMcnKySU5O9r5e2vKnY8eOZvXq1WbBggXm3HPPLbPlz6OPPmo2bdpkJk2apJZNZ9CQIUPMF198YbZu3WrWrl1rhgwZYlwul/n000+NMTqHger4rhHG6Dz6u4cffth8/vnnZuvWrWbp0qUmJSXF1KhRw+zevdsYE3jnT0HYj02cONHUqVPHhIWFmTZt2phly5bZXZJjffbZZwb4zdG9e3djjNVCbdiwYSYuLs6Eh4ebDh06mM2bN/v8jF9++cV07drVVK1a1URFRZkePXqYAwcO+IxZs2aNadu2rQkPDze1a9c2o0ePrqivGNTKOneAefPNN71j8vPzzQMPPGBiY2NN5cqVza233mp+/vlnn5+zbds2c8MNN5jIyEhTo0YN8/DDD5uioiKfMZ999plp2bKlCQsLM+eff77PZ8gfc++995q6deuasLAwc+6555oOHTp4Q7AxOoeB6tdBWOfRv3Xp0sXUqlXLhIWFmdq1a5suXbqY7777zvt6oJ0/lzHGnPnrzCIiIiIi/k1zhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJEUhEVERETEkRSERURERMSRFIRFRERExJH+H7e4rPYTpc+9AAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArYAAAHDCAYAAADRBFkDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABMDklEQVR4nO3deXgUVb7G8bezdCcsSVizETZFkF1ZIggCEo2KsqgIDAoi48KgojiOcOcK4p0BlxmGURFGJ6DjxiYyKIhAABVFURAhAgFlFUjYEwiQQPe5fxQ0NAlCQpLqdL6f5+knVadPVX5ljfhOUecchzHGCAAAACjjguwuAAAAACgOBFsAAAAEBIItAAAAAgLBFgAAAAGBYAsAAICAQLAFAABAQCDYAgAAICAQbAEAABAQCLYAAAAICARbAAAABASCLQBcgm3btsnhcOitt96y5fe/9dZbcjgc2rZtmy2/HwDKAoItAPiRsWPHas6cOXaXobvuuku33Xab3WUAQKEQbAHAj1wo2N533306fvy46tSpU+I1nDx5UosWLVK3bt1K/HcBQHEKsbsAAMDFBQcHKzg4uFR+15dffqkjR44QbAGUOTyxBVAm7dq1Sw888ICio6PlcrnUpEkTTZkyRZKUmZmpkJAQjRkzJt9x6enpcjgceu211yRJBw8e1B//+Ec1a9ZMlSpVUkREhG699Vb9+OOPF62hc+fO6ty5c772+++/X3Xr1vVp+9vf/qb27durWrVqCg8PV6tWrTRr1iyfPg6HQzk5OXr77bflcDjkcDh0//33S7rwO7avv/66mjRpIpfLpbi4OA0dOlSHDx/OV2fTpk21fv16denSRRUqVFB8fLxeeumlAq9r3rx5aty4sfca1q5dq/vvv1/169dXWFiYYmJi9MADD+jAgQP5jt21a5cGDx6suLg4uVwu1atXT0OGDFFeXp63z+HDh/Xkk0+qbt26crlcqlWrlgYMGKD9+/cXWA8AXCqe2AIoczIzM3XdddfJ4XDo0UcfVY0aNfTpp59q8ODBys7O1hNPPKFOnTppxowZGj16tM+x06dPV3BwsHr37i1J2rJli+bMmaPevXurXr16yszM1L/+9S916tRJ69evV1xcXLHU/M9//lPdu3dX//79lZeXp2nTpql379765JNPvE9G33nnHf3+979X27Zt9dBDD0mSrrjiigue87nnntOYMWOUlJSkIUOGKD09XZMmTdJ3332nr776SqGhod6+hw4d0i233KI777xT99xzj2bNmqVnnnlGzZo106233upz3vnz5+v222/37i9atEhbtmzRoEGDFBMTo59++klvvPGGfvrpJ33zzTdyOBySpN27d6tt27Y6fPiwHnroITVq1Ei7du3SrFmzdOzYMTmdTh09elQdO3bUhg0b9MADD+jaa6/V/v37NXfuXP3666+qXr16sfzzBlBOGQAoYwYPHmxiY2PN/v37fdr79u1rIiMjzbFjx8y//vUvI8msW7fOp0/jxo3NjTfe6N0/ceKEcbvdPn22bt1qXC6Xef75533aJJmpU6d62zp16mQ6deqUr76BAweaOnXq+LQdO3bMZz8vL880bdrUpxZjjKlYsaIZOHBgvnNOnTrVSDJbt241xhizd+9e43Q6zc033+xT/2uvvWYkmSlTpvjUKcn85z//8bbl5uaamJgYc9ddd/n8ni1bthhJZunSpRes3RhjPvjgAyPJfPHFF962AQMGmKCgIPPdd9/l6+/xeIwxxowaNcpIMrNnz75gHwAoKl5FAFCmGGP04Ycf6o477pAxRvv37/d+kpOTlZWVpdWrV+vOO+9USEiIpk+f7j02LS1N69evV58+fbxtLpdLQUHWH4Vut1sHDhxQpUqV1LBhQ61evbrY6g4PD/duHzp0SFlZWerYsWORf8fixYuVl5enJ554wlu/JD344IOKiIjQvHnzfPpXqlRJ9957r3ff6XSqbdu22rJli0+/efPmKTIyUh06dCiw9hMnTmj//v267rrrJMlbv8fj0Zw5c3THHXeodevW+eo981T3ww8/VIsWLdSrV68L9gGAoiLYAihT9u3bp8OHD+uNN95QjRo1fD6DBg2SJO3du1fVq1dX165dNWPGDO+x06dPV0hIiO68805vm8fj0T/+8Q81aNBALpdL1atXV40aNbR27VplZWUVW92ffPKJrrvuOoWFhalq1aqqUaOGJk2aVOTfsX37dklSw4YNfdqdTqfq16/v/f6MWrVq5QuOVapU0aFDh3za5s2bp5tvvlkhIWffVDt48KCGDRum6OhohYeHq0aNGqpXr54keevft2+fsrOz1bRp09+s+5dffrloHwAoKt6xBVCmeDweSdK9996rgQMHFtinefPmkqS+fftq0KBBWrNmjVq2bKkZM2aoa9euPu9xjh07Vs8++6weeOAB/d///Z+qVq2qoKAgPfHEE97fdSEOh0PGmHztbrfbZ//LL79U9+7ddcMNN+j1119XbGysQkNDNXXqVL3//vuFuv6iutCMCufWf+zYMS1btkyTJk3y6XPPPffo66+/1tNPP62WLVuqUqVK8ng8uuWWWy76zwgAShPBFkCZUqNGDVWuXFlut1tJSUm/2bdnz556+OGHva8jbNq0SSNHjvTpM2vWLHXp0kUpKSk+7YcPH77oQKYqVark+6t8Sfmeln744YcKCwvTZ599JpfL5W2fOnVqvmMv9a/jz8xnm56ervr163vb8/LytHXr1ov+synIkiVLlJub6zOY7NChQ0pNTdWYMWM0atQob/vmzZt9jq1Ro4YiIiKUlpb2m7/jiiuuuGgfACgqXkUAUKYEBwfrrrvu0ocfflhgQNq3b593OyoqSsnJyZoxY4amTZsmp9Opnj175jvf+U9dZ86cqV27dl20liuuuEIbN270+Z0//vijvvrqq3y/w+Fw+DzJ3bZtW4ELMVSsWDHfdF0FSUpKktPp1CuvvOJTf0pKirKysoo0B+38+fPVunVrRUdH+9QuKd8/owkTJvjsBwUFqWfPnvr444/1/fff5zv3mePvuusu/fjjj/roo48u2AcAioontgDKnBdeeEFLly5VYmKiHnzwQTVu3FgHDx7U6tWrtXjxYh08eNDbt0+fPrr33nv1+uuvKzk5WVFRUT7nuv322/X8889r0KBBat++vdatW6f33nvP5ynohTzwwAMaP368kpOTNXjwYO3du1eTJ09WkyZNlJ2d7e3XrVs3jR8/Xrfccot+97vfae/evZo4caKuvPJKrV271uecrVq10uLFizV+/HjFxcWpXr16SkxMzPe7a9SooZEjR2rMmDG65ZZb1L17d6Wnp+v1119XmzZtfAaKXar58+d731M+IyIiQjfccINeeuklnTx5UvHx8Vq4cKG2bt2a7/ixY8dq4cKF6tSpkx566CFdffXV2rNnj2bOnKnly5crKipKTz/9tGbNmqXevXvrgQceUKtWrXTw4EHNnTtXkydPVosWLQpdNwB42TYfAwBchszMTDN06FCTkJBgQkNDTUxMjOnatat54403fPplZ2eb8PBwI8m8++67+c5z4sQJ89RTT5nY2FgTHh5urr/+erNixYp8U3kVNN2XMca8++67pn79+sbpdJqWLVuazz77rMDpvlJSUkyDBg2My+UyjRo1MlOnTjWjR4825/8xvHHjRnPDDTd4az4z9df5032d8dprr5lGjRqZ0NBQEx0dbYYMGWIOHTrk06dTp06mSZMm+a793DrT0tKMJLNy5cp8/X799VfTq1cvExUVZSIjI03v3r3N7t27jSQzevRon77bt283AwYMMDVq1DAul8vUr1/fDB061OTm5nr7HDhwwDz66KMmPj7eOJ1OU6tWLTNw4MB807cBQGE5jOHvfgCgvHvppZc0fvx47dmzh2m3AJRZvGMLAFDdunX1j3/8g1ALoEzjiS0AAAACAk9sAQAAEBAItgAAAAgIBFsAAAAEhICZx9bj8Wj37t2qXLkygx8AAAD8kDFGR44cUVxcnIKCiv/5asAE2927dyshIcHuMgAAAHARO3fuVK1atYr9vAETbCtXrizJ+gcVERFhczUAAAA4X3Z2thISEry5rbgFTLA98/pBREQEwRYAAMCPldRrowweAwAAQEAg2AIAACAgEGwBAAAQEAi2AAAACAgEWwAAAAQEgi0AAAACAsEWAAAAAYFgCwAAgIBAsAUAAEBAINgCAAAgIBBsAQAAEBAItgAAAAgIBFsAAAAEhBC7CwAAAEAp8Lil3OPSiWOnf+ZYP2vES1Vj7K6uWBBsAQAA/JUxUt6JsyH0RE7+YHp+W77vj1nbeScK/h09HpE6312611VCCLYAAABFYYzkPinl5VqhMS9XOnninP0T0sncs/snL6HfyRNS7rnHHbd+T3EKDpHCKkiuClJYRSm8YvGe30YEWwAAULw8bikn2/ocPSzlZElHs3x/nsiRHEFSUJAUFGz9dJyz7fMzuHD9HEFScLDk8UinTkruU1YAPXWhn+f2OafdfeoC++dsG0/p/DN1BJ0Oo+FWGM23XcH6ee52QW1hFaQQZ+nUbAOCLQAAuLAzfxWek3U6pGaf97OA0Hr8aPE/ZfR3wSFSqEtyhklO1znbYae3C9gPDTun/fz9MCnUaQVSV7i173DYfZV+j2ALAEB55T4lHd4nHdprfQ7vPbt95JAVUnOypJN5RTt/hcpSxUjrU+m8nxUqSUbW012P23q66nFbT0Dd5/08853358W+P/0zKMgKnCGhUnCoFBJy+mfoOe0hZ7e9+wX0udA5zgTRYCKVP+AuAAAQiIyRjh3xDavnh9fsA5f+ZDUkVKoUdU5QjZAqRp0NqueH1woR1usAQCki2AIAUBadOill7S84sB7KtH5eaBT8uUJCpaiaUpVzPlE1pchqUsWIs2GWvwpHGUCwBQDAn+XlShnbpD1brc/uLdLeHVL2wUt72lopyjewnhtgq0RboTWI9ZoQGAi2AAD4A49HOphxNrye+bl/94VH3oeEFhxYo06H1qga1jugQDlBsAUAoLTlZJ0OrlulPadD7J5tF351oGKkFFdfiq13+lPXWimqUhSvBwDnINgCAFBSTuVJmTvPPoE9E2KzDhTcPyRUiq7jG2Lj6kuVqxBggUtAsAUAoDicOint3CRtSZN2/WwF2L07rWmnClI1RoqrJ8WeDrFx9aTqtZhJALgMBFsAAIoi97i0bb20ZZ312b6h4PlewyvlfwIbU9daAQpAsSLYAgBwKXKyrKexW9KkLWulXzdbA77OVTFSqt9Mqt3QCrBx9aXI6rxGAJQSgi0AAAU5tPfs09gtadaUW+erUlOq31yq31S6orlUM4EQC9iIYAsAgDHSvl+tEPvLWivIHszI3y+6tvVE9orTYbZKdOnXCuCCCLYAgPLH45Z2bTnniew66ehh3z6OICn+SumKZlaYrd/Uml4LgN8i2AIAApvbLe3/9eyUWzs3S9t+kk4c8+0XEirVudoKsPWbS3UbM8ALKGMItgCAwGCMtczsuUvP7tkqZW63puI6n6uCVK/J2SeytRtKIc7SrxtAsSHYAgDKnrwT1mCu3eeF2Jysgvs7w6zVumJPzxtbv6k1Y0EQc8YCgYRgCwDwXx63dGCPb3jdvVU6sNt6Qns+R5BUPe7svLFnflaNkYKCSr9+AKWKYAsA8A/GWIscbN94dvnZjO3W09mCVIryDa9x9a3laJ2uUi0bgP8g2AIA7JWTLX2/SFoxT8rckf/7UKcUXff08rPnrN5VuUqplwrAvxFsAQClzxhpa5r09Tzpx8/PDu5yhklXXSvFXXE6wNazXi3gXVgAl4BgCwAoPceOSN8vlr7+xJqt4Iz4K6R2t0utbpTCKtpXH4AyjWALAChZxkjb1lth9sfPpZN5VrszTLqmsxVoazdkKVoAl41gCwAoGcePWk9nV3wi7dl2tj2uvtSum9SqqxReybbyAAQegi0AoPicmdlgxTzph2XSyVyrPdR1+ulsN2t1L57OAigBBFsAwOU7flRalWoF2t1bzrbH1rVeNWidxNNZACWOYAsAKBpjpB3p1qsGPyw7O99sqFNq0Ulqf7tUtzFPZwGUGoItAKBwTuRIq5ZYgXbXL2fbo2uffTpbMcK++gCUWwRbAMClydwuLZslrV569ulsSOjpp7PdpHpNeToLwFYEWwDAbzuUKS34j/TdIsl4rLaaCdZAsDY3SRUj7a0PAE4j2AIACpaTJS3+QFr+37MrgzVtL3W6S7qiOU9nAfidoKIcNHHiRNWtW1dhYWFKTEzUypUrL9i3c+fOcjgc+T7dunXz6bdhwwZ1795dkZGRqlixotq0aaMdOwpYMxwAULJyj0sL35X+cp/16sGpk1aQHfaKNPh56coWhFoAfqnQT2ynT5+u4cOHa/LkyUpMTNSECROUnJys9PR01axZM1//2bNnKy8vz7t/4MABtWjRQr179/a2/fLLL+rQoYMGDx6sMWPGKCIiQj/99JPCwsKKeFkAgEI7ddKarmvhu9LRw1Zb/BVSt8FSozaEWQB+z2GMMYU5IDExUW3atNFrr70mSfJ4PEpISNBjjz2mESNGXPT4CRMmaNSoUdqzZ48qVrTWA+/bt69CQ0P1zjvvFOESLNnZ2YqMjFRWVpYiIhiNCwCXzOORflgqzZ8qHcyw2qrFSrcNklp2loKK9Jd7AJBPSee1Qv1plZeXp1WrVikpKensCYKClJSUpBUrVlzSOVJSUtS3b19vqPV4PJo3b56uuuoqJScnq2bNmkpMTNScOXMKUxoAoLCMkX76Rvr7I9K746xQW7mqdPfj0ogp0rU3EmoBlCmFehVh//79crvdio6O9mmPjo7Wxo0bL3r8ypUrlZaWppSUFG/b3r17dfToUb3wwgv6y1/+ohdffFELFizQnXfeqaVLl6pTp04Fnis3N1e5ubne/ezs7MJcCgCUb1t/kj75t7RlnbUfVkG6sa90Qy/JFW5vbQBQRKU6K0JKSoqaNWumtm3bets8HmvqmB49eujJJ5+UJLVs2VJff/21Jk+efMFgO27cOI0ZM6bkiwaAQLJnqzR/ipR2+m/ZQkKljr2krn2YtgtAmVeoYFu9enUFBwcrMzPTpz0zM1MxMTG/eWxOTo6mTZum559/Pt85Q0JC1LhxY5/2q6++WsuXL7/g+UaOHKnhw4d797Ozs5WQkHCplwIA5cvBDOnTt6VVi61XEBxBUmKylDxAiqphd3UAUCwKFWydTqdatWql1NRU9ezZU5L1xDU1NVWPPvrobx47c+ZM5ebm6t577813zjZt2ig9Pd2nfdOmTapTp84Fz+dyueRyuQpTPgCUP0cOWXPRfvWx5D49F23zjtbAsOja9tYGAMWs0K8iDB8+XAMHDlTr1q3Vtm1bTZgwQTk5ORo0aJAkacCAAYqPj9e4ceN8jktJSVHPnj1VrVq1fOd8+umn1adPH91www3q0qWLFixYoI8//ljLli0r2lUBQHl3Iseag3bZLGteWklqcI10+2CpdiN7awOAElLoYNunTx/t27dPo0aNUkZGhlq2bKkFCxZ4B5Tt2LFDQeeNok1PT9fy5cu1cOHCAs/Zq1cvTZ48WePGjdPjjz+uhg0b6sMPP1SHDh2KcEkAUI6dypO++kRa9J61cpgk1Wog3f57qWEre2sDgBJW6Hls/RXz2AIo19xuaVWqtOBt6dDpcRA14qXbHrBePWDaLgB+oKTzWqnOigAAKGYet7Tmc+mzd6S9O622yGrSzfdJibdIwfwxD6D84E88ACiLPB5p7ZfSgv9ImduttgqVpRv7SB17Sk6WJAdQ/hBsAaAsMUZa95X1ysGerVZbeCWpS28r0IZVtLU8ALATwRYAyoIzy98ueFva9bPVFlZB6nSX9QmvZG99AOAHCLYA4M+MkTZ+J336lrRzk9XmCreWvu10t1SRwbIAcAbBFgD8kTHSptVWoN2+wWpzhkkdekhd7pEqsfwtAJyPYAsA/mbzGivQbk2z9kOd0vU9pBvvkSpXsbMyAPBrBFsA8Bdb1kmfvi39vMbaDwmV2t9hzXQQmX/VRgCAL4ItANht23or0G5aZe0Hh0rtbpO69pOiqttbGwCUIQRbALDLjo3WPLQbVlr7QcFS4q3STf2kKtH21gYAZRDBFgBK26+brUD70wprPyhIapMs3dxfqhpjb20AUIYRbAGgtOzeYgXadcutfUeQ1DpJuvleqXqcvbUBQAAg2AJASdu9RVr4rvTjF9a+wyFde6N0831SzVr21gYAAYRgCwAlZedmadG71hK4Z1zT2Qq0MXXsqgoAAhbBFgCK2/aN1hPa9d9Y+w6H1LKzdNPvpNh6tpYGAIGMYAsAxWXbeumzd6wlcCXrHdprb7RmOYjmCS0AlDSCLQBcrl/WSQvfsZbAlaxZDlolWU9oa/AOLQCUFoItABSFMdLPP1qB9ucfrbagYKnNzVJSP2Y5AAAbEGwBoDCMsZ7MLnzXWgJXkoJDpLa3SEl9mYcWAGxEsAWAS2GM9e7swnetd2mls0vf3thHqlLT3voAAARbAPhNxkjrv7VeOdiRbrWFOqV2t0td7pGiqttbHwDAi2ALAAXxeKwlbz97R9r1s9UW6pKu7y516S1FVLW3PgBAPgRbADiXxyOtXW4trLB7i9XmDJM69JA63y1VrmJvfQCACyLYAoAkedzSmi+sd2gzt1ttrgrSDT2lG+6SKkXaWh4A4OIItgCweY304StS5g5rP6yidMOd0g29pIoRtpYGALh0BFsA5dfRw9J/J0vfL7b2K1SWOt0ldewphVeyszIAQBEQbAGUPx6PtHKB9PGb0rEjksMhtb9D6vYAgRYAyjCCLYDyZc82aeYEaWuatR9/hdT7CanO1TYWBQAoDgRbAOVD3glrYNjSmdZAMWeYdOv9UsdeUnCw3dUBAIoBwRZA4NuwUpr1inQww9pv2l66c6hUJdreugAAxYpgCyBwZR2Q5rwurfnc2o+qId35qNTsenvrAgCUCIItgMDjcUtffyLNS5FOHJOCgqzpu24ZKLnC7a4OAFBCCLYAAsuvm63BYTvSrf3ajaR7npDir7SzKgBAKSDYAggMucelT9+SvvhIMh4prILUbbDU/nYpiMFhAFAeEGwBlH3rvpJmvyYd3mftX9NZ6jFEiqxmZ1UAgFJGsAVQdh3KtAJt2gprv2qMdPfj0tVt7a0LAGALgi2Assftlr6YLS1425qfNihYuvEe6ab+1vy0AIByiWALoGzZvkGaMUHa/Yu1X7+ZdPcwKbaunVUBAPwAwRZA2XD8qDV919efSMZIFSpL3R+S2iRb03kBAMo9gi0A/+Y+JX2/SJo3VTpy0Gprc7MVaitF2VoaAMC/EGwB+Cf3Ken7xdKi96QDe6y2mglS72HSlS1tLQ0A4J8ItgD8i9strTodaPfvttoqRUk39pE69pBCnLaWBwDwXwRbAP7B7ZZWpUqL3j0v0N4jtb+DpXABABdFsAVgLwItAKCYEGwB2MPtllanSgvfk/bvstoqRlqB9vruBFoAQKERbAGUrgsF2i73SB0ItACAoiPYAigdbre0eon1ysE+Ai0AoPgRbAGULLdb+mGptPBdad+vVlvFiNOBtgeBFgBQbIq0XM/EiRNVt25dhYWFKTExUStXrrxg386dO8vhcOT7dOvWrcD+jzzyiBwOhyZMmFCU0gD4C4/bmof2xcHSey9YobZihNRtsPS/70pd+xJqAQDFqtBPbKdPn67hw4dr8uTJSkxM1IQJE5ScnKz09HTVrFkzX//Zs2crLy/Pu3/gwAG1aNFCvXv3ztf3o48+0jfffKO4uLjClgXAX3jc0g/LpM/eOfuEtkJlqUtvqUNPKayCndUBAAJYoYPt+PHj9eCDD2rQoEGSpMmTJ2vevHmaMmWKRowYka9/1apVffanTZumChUq5Au2u3bt0mOPPabPPvvsgk9zAfixM4F24bvS3p1WG4EWAFCKChVs8/LytGrVKo0cOdLbFhQUpKSkJK1YseKSzpGSkqK+ffuqYsWK3jaPx6P77rtPTz/9tJo0aVKYkgDYzZjTT2j/4xtoO/e2VgoLq/ibhwMAUFwKFWz3798vt9ut6Ohon/bo6Ght3LjxosevXLlSaWlpSklJ8Wl/8cUXFRISoscff/ySa8nNzVVubq53Pzs7+5KPBVBMMrdLM/8p/bLW2q9QWep8t9SxJ4EWAFDqSnVWhJSUFDVr1kxt27b1tq1atUr//Oc/tXr1ajkcjks+17hx4zRmzJiSKBPAxeTlSovfk5bMkNynpFCXNRis050EWgCAbQo1K0L16tUVHByszMxMn/bMzEzFxMT85rE5OTmaNm2aBg8e7NP+5Zdfau/evapdu7ZCQkIUEhKi7du366mnnlLdunUveL6RI0cqKyvL+9m5c2dhLgVAUW38Tnrp99Ki961Q2zhRGpEiJd9HqAUA2KpQT2ydTqdatWql1NRU9ezZU5L1fmxqaqoeffTR3zx25syZys3N1b333uvTft999ykpKcmnLTk5Wffdd593gFpBXC6XXC5XYcoHcDmyDkhzXpfWfG7tR1aX7hwqNesgFeJvWwAAKCmFfhVh+PDhGjhwoFq3bq22bdtqwoQJysnJ8YbQAQMGKD4+XuPGjfM5LiUlRT179lS1atV82qtVq5avLTQ0VDExMWrYsGFhywNQ3Dxu6auPpflTpBPHJEeQdEMv6ZaBzHQAAPArhQ62ffr00b59+zRq1ChlZGSoZcuWWrBggXdA2Y4dOxQU5PuGQ3p6upYvX66FCxcWT9UASsfOzdLMf0g7N1n7tRtKvZ+QajWwtSwAAAriMMYYu4soDtnZ2YqMjFRWVpYiIiLsLgco207kSJ++LX05RzIe68lst8FS+9uloGC7qwMAlFElnddKdVYEAH7OGGntl9JHE613aiXpmi5Sj0ekyGq/fSwAADYj2AKwHNgjffiqtGGltV89TrrrcalRa3vrAgDgEhFsgfLu1Elp2SxrKdyTuVJwiDUnbdd+kpOZRwAAZQfBFijPtqyzVg7L2GbtX9lCunuYFF3b1rIAACgKgi1QHuVkSR//W/r2U2u/UpTU/WGpdRJz0gIAyiyCLVCeGCN9t1Ca+y8pJ9tqu+426fbfSxWZTQQAULYRbIHyInO79drBL2ut/di6Uu8npXpNbC0LAIDiQrAFAl1errT4PWnJDMl9SnKGScn3SZ3usgaKAQAQIPivGhDIjJH+8xfppxXWfpN20p2PSlWj7a0LAIASQLAFAtkPy6xQGxwqDfgfqVkHBocBAAIWwRYIVDnZ1gpiknRzf6l5R3vrAQCghAXZXQCAEjL3X9LRw1J0HenGPnZXAwBAiSPYAoFo8w/Sys+s1w76DJdCQu2uCACAEkewBQJNXq40Y4K13f4OpvMCAJQbBFsg0Cx6T9q/S4qsJnV7wO5qAAAoNQRbIJDs3iItmW5t3/W4FF7J3noAAChFBFsgUHjc0ozx1s9mHaRm19tdEQAApYpgCwSK5XOl7RulsArWIgwAAJQzBFsgEBzaK82fYm3f/nspqrq99QAAYAOCLVDWGSN9+KqUe1yq20Rqd7vdFQEAYAuCLVDW/fjl6WVzQ6w5a4P41xoAUD7xX0CgLDt2RJr9mrXdtZ8UU8feegAAsBHBFijLPn5TOnJQqpkg3dTP7moAALAVwRYoq35ZK30z39q+Z7gU4rS3HgAAbEawBcqik3nSjH9Y2+26SVc0s7ceAAD8AMEWKIsWfyDt3SlVrird8aDd1QAA4BcItkBZs2eblPqBtX3noyybCwDAaQRboCzxeKxXENynpCbtpBYd7a4IAAC/QbAFypIVn0jbfpJc4dJdj0kOh90VAQDgNwi2QFlxeL/0yb+t7dsekKrUtLceAAD8DMEWKCtmvyadOCbVaSR16G53NQAA+B2CLVAWrF0urVsuBQVbc9YGBdtdEQAAfodgC/i740el2a9a2zf2keLq21sPAAB+imAL+Lt5KVLWAal6vHRTf7urAQDAbxFsAX+2JU366mNr+54nJafL3noAAPBjBFvAX506Z9nctslSg5a2lgMAgL8j2AL+askMKXO7VClK6v6w3dUAAOD3CLaAP8rcIS18z9ru9QepYoS99QAAUAYQbAF/410296R0dVvpmi52VwQAQJlAsAX8zbefSlvWSc4w6e7HWTYXAIBLRLAF/EnWAWnuG9b2rfdLVWNsLQcAgLKEYAv4k48mSidypFpXSR172V0NAABlCsEW8BdpK6Qfv5CCgqQ+T0rBLJsLAEBhEGwBf3DimPThK9Z2p7ulWg3srQcAgDKIYAv4g/lTpMP7rHdqbxlgdzUAAJRJBFvAbts3SMv/a23f86Q1GwIAACg0gi1gp1N50vTxkjFS6ySpYSu7KwIAoMwi2AJ28Xik91+S9my1Vhbr8YjdFQEAUKYVKdhOnDhRdevWVVhYmBITE7Vy5coL9u3cubMcDke+T7du3SRJJ0+e1DPPPKNmzZqpYsWKiouL04ABA7R79+6iXRFQFhgj/Xey9MMyKThEuu/PUqUou6sCAKBMK3SwnT59uoYPH67Ro0dr9erVatGihZKTk7V3794C+8+ePVt79uzxftLS0hQcHKzevXtLko4dO6bVq1fr2Wef1erVqzV79mylp6ere/ful3dlgD9bMkP6Yra13e9pXkEAAKAYOIwxpjAHJCYmqk2bNnrttdckSR6PRwkJCXrsscc0YsSIix4/YcIEjRo1Snv27FHFihUL7PPdd9+pbdu22r59u2rXrn1JdWVnZysyMlJZWVmKiIi49AsCStt3C61XECTr9YPOd9tbDwAApaSk81qhntjm5eVp1apVSkpKOnuCoCAlJSVpxYoVl3SOlJQU9e3b94KhVpKysrLkcDgUFRVVmPIA/7dhpTTt79Z2596EWgAAilFIYTrv379fbrdb0dHRPu3R0dHauHHjRY9fuXKl0tLSlJKScsE+J06c0DPPPKN+/fr9ZpLPzc1Vbm6udz87O/sSrgCw0faN0lvPSx631KqrdMeDdlcEAEBAKdVZEVJSUtSsWTO1bdu2wO9Pnjype+65R8YYTZo06TfPNW7cOEVGRno/CQkJJVEyUDz2/Sq9+Wcp74R0VSup7x+tpXMBAECxKdR/WatXr67g4GBlZmb6tGdmZiomJuY3j83JydG0adM0ePDgAr8/E2q3b9+uRYsWXfS9i5EjRyorK8v72blzZ2EuBSg92QelySOknCyp1lXSoNFSSKjdVQEAEHAKFWydTqdatWql1NRUb5vH41FqaqratWv3m8fOnDlTubm5uvfee/N9dybUbt68WYsXL1a1atUuWovL5VJERITPB/A7J3KkN0ZKBzOk6nHSQ3+VwirYXRUAAAGpUO/YStLw4cM1cOBAtW7dWm3bttWECROUk5OjQYMGSZIGDBig+Ph4jRs3zue4lJQU9ezZM19oPXnypO6++26tXr1an3zyidxutzIyMiRJVatWldPpLOq1AfY6lSdNeU7a9Ys1R+3DL0iVq9hdFQAAAavQwbZPnz7at2+fRo0apYyMDLVs2VILFizwDijbsWOHgs57dzA9PV3Lly/XwoUL851v165dmjt3riSpZcuWPt8tXbpUnTt3LmyJgP3OrCq2+QfJFS49NNZ6YgsAAEpMoeex9VfMYwu/YYw0Z5K1AENQsBVqWYABAAD/mscWwCVYes6qYr/7E6EWAIBSQrAFitN3i6SP37S2uz9szVcLAABKBcEWKC4bVkrT/mZtd+4tdeltbz0AAJQzBFugOOw4Z1Wxa29kVTEAAGxAsAUu175fpTf/9+yqYv2eZlUxAABswH99gctxZlWxo4elWg1YVQwAABsRbIGiOpEjvfE/1qpi1WKtab1YVQwAANsQbIGiOHXy9KpiP7OqGAAAfoJgCxSWxyN9cHpVMWeY9aS2RrzdVQEAUO4RbIHCMEb672Rp9VJrVbFBz0kJV9ldFQAAEMEWKJxlM31XFWvU2t56AACAF8EWuFTfLZLmvmFts6oYAAB+h2ALXIqN352zqtjdrCoGAIAfItgCF7MjXZo65pxVxR6yuyIAAFAAgi3wW/btkt788+lVxa5lVTEAAPwY/4UGLuRUnjT1uXNWFXuOVcUAAPBjBFvgQj57R9qz1VqA4cG/sqoYAAB+jmALFGT7Bil1urXde5gUUdXeegAAwEURbIHz5eVK778kGY81WKx5R7srAgAAl4BgC5xv/hRp704popp056N2VwMAAC4RwRY41y/rzq4sds+TUsUIe+sBAACXjGALnJF7XPrgJckYKfEWqcl1dlcEAAAKgWALnPHxm9KBPVJUDanHI3ZXAwAAColgC0hS+irpq7nWdt8/SuGV7K0HAAAUGsEWOH5UmvY3a/v67lLDVvbWAwAAioRgC/x3snR4n1QtVrrjQburAQAARUSwRfn20zfStwskh0Pq9yfJFW53RQAAoIgItii/crKl6eOt7U53SVc0s7ceAABwWQi2KL9mvyYdOSjVrC3dOsjuagAAwGUi2KJ8WvultHqJ5AiSfve05HTZXREAALhMBFuUP0cOSTP/aW137SPVudreegAAQLEg2KJ8MUaa9U/p6GEptp6UfJ/dFQEAgGJCsEX5snqJtHa5FBQs9X9GCnHaXREAACgmBFuUH1n7rQFjknTzvVL8lfbWAwAAihXBFuWDMdKMf0jHjki1rpKS+tldEQAAKGYEW5QPKxdI67+VgkOl3/1JCg6xuyIAAFDMCLYIfIcypY8mWdu33i/F1rWzGgAAUEIItghsxkjT/i7lHpPqNpa63G13RQAAoIQQbBHYvv5Y2rRaCnVJ/f5kzYYAAAACEsEWgWv/bmnuG9b27b+Xataytx4AAFCiCLYITB6P9MHLUt4J6coWUocedlcEAABKGMEWgemL2dKWdZIrXOr7RymI/6kDABDo+K89Ak/mDmn+FGu7+8NStVh76wEAAKWCYIvA4nZL778kncyTGrWW2nWzuyIAAFBKCLYILEunSzs2SmEVpT5PSQ6H3RUBAIBSQrBF4Ni9RVrwH2u711Apqoa99QAAgFJFsEVgOHVSev9FyX1KatpOanOT3RUBAIBSRrBFYFj0vrTrF6lCZan3k7yCAABAOVSkYDtx4kTVrVtXYWFhSkxM1MqVKy/Yt3PnznI4HPk+3bqdHdRjjNGoUaMUGxur8PBwJSUlafPmzUUpDeXRzk3S4ves7bsflyKq2lsPAACwRaGD7fTp0zV8+HCNHj1aq1evVosWLZScnKy9e/cW2H/27Nnas2eP95OWlqbg4GD17t3b2+ell17SK6+8osmTJ+vbb79VxYoVlZycrBMnThT9ylA+nMyzZkHweKQWN0jXdLG7IgAAYJNCB9vx48frwQcf1KBBg9S4cWNNnjxZFSpU0JQpUwrsX7VqVcXExHg/ixYtUoUKFbzB1hijCRMm6H//93/Vo0cPNW/eXP/5z3+0e/duzZkz57IuDuXAgreljG1SpSjp7mF2VwMAAGxUqGCbl5enVatWKSkp6ewJgoKUlJSkFStWXNI5UlJS1LdvX1WsWFGStHXrVmVkZPicMzIyUomJiZd8TpRT29ZLS2da2/c8KVWKtLceAABgq5DCdN6/f7/cbreio6N92qOjo7Vx48aLHr9y5UqlpaUpJSXF25aRkeE9x/nnPPNdQXJzc5Wbm+vdz87OvqRrQIDIPS6996JkPFLrJKnZ9XZXBAAAbFaqsyKkpKSoWbNmatu27WWfa9y4cYqMjPR+EhISiqFClBnzUqT9u6TI6tKdj9pdDQAA8AOFCrbVq1dXcHCwMjMzfdozMzMVExPzm8fm5ORo2rRpGjx4sE/7meMKe86RI0cqKyvL+9m5c2dhLgVl2abV0pdzrO2+f5TCK9laDgAA8A+FCrZOp1OtWrVSamqqt83j8Sg1NVXt2rX7zWNnzpyp3Nxc3XvvvT7t9erVU0xMjM85s7Oz9e233/7mOV0ulyIiInw+KAeOH5Wm/c3abn+71Ki1vfUAAAC/Uah3bCVp+PDhGjhwoFq3bq22bdtqwoQJysnJ0aBBgyRJAwYMUHx8vMaNG+dzXEpKinr27Klq1ar5tDscDj3xxBP6y1/+ogYNGqhevXp69tlnFRcXp549exb9yhCY/jtZOrRXqhYrdX/Y7moAAIAfKXSw7dOnj/bt26dRo0YpIyNDLVu21IIFC7yDv3bs2KGgIN8Hwenp6Vq+fLkWLlxY4Dn/9Kc/KScnRw899JAOHz6sDh06aMGCBQoLCyvCJSFgpa2Qvl1grSrW70+SK9zuigAAgB9xGGOM3UUUh+zsbEVGRiorK4vXEgJRTpb04u+lI4ekzr2lHjytBQCgrCnpvFaqsyIARfbhq1aoja4t3TbI7moAAIAfItjC//2wVPphmRQUJP3uGSnUaXdFAADADxFs4d+yDkizXrG2k/pLtRvaWw8AAPBbBFv4L2OkGf+Qjh2R4q+Ubu5vd0UAAMCPEWzhv1Z+Jq3/RgoOlfo/IwUXehIPAABQjhBs4Z8OZkofvW5t33q/FFvP1nIAAID/I9jC/3g81upiucekuk2kLnfbXREAACgDCLbwP1/NlTb/IDnDpN/9SQoKtrsiAABQBhBs4V/2/ip9/Ka1fceDUo14e+sBAABlBsEW/sPjlt5/UTqZKzW4Rmp/h90VAQCAMoRgC/+xdKa0fYMUVkHq90drQQYAAIBLRHKAf9i9Rfr0bWu711CpSrS99QAAgDKHYAv7nTppvYLgPik1aSe1udnuigAAQBlEsIX9Fr0n7fpFqhgh3fOk5HDYXREAACiDCLaw146N0uL3re27h0kRVe2tBwAAlFkEW9gnL1d67yVrQYZrukgtO9ldEQAAKMMItrDPp1OlvTukylWlux6zuxoAAFDGEWxhj1/WSp9/aG33GW69XwsAAHAZCLYofSeOSe+/JBkjJd4qNbnO7ooAAEAAINii9M19QzqYYc1V2/MRu6sBAAABgmCL0rXxO2nFJ9Z2v6elsIr21gMAAAIGwRal59gRadrfrO2OvaQGLW0tBwAABBaCLUrP7IlS1gGpRi3p9sF2VwMAAAIMwRalY+2X0qrFkiNI+t2fJGeY3RUBAIAAQ7BFyTtySJr5T2u7ax+pbmN76wEAAAGJYIuSZYw0c4J09LAUV19Kvs/uigAAQIAi2KJkrUqV1n0lBYdIv3tGCnHaXREAAAhQBFuUnMP7pA9ftbaT75Pir7C3HgAAENAItig5sydKJ3Kk2g2lG/vaXQ0AAAhwBFuUjN1bpHXLJYdD6vtHKTjY7ooAAECAI9iiZCz+wPrZvKMUW8/eWgAAQLlAsEXx2/ertOZza/um39lbCwAAKDcItih+qdMk45EaJ0rxV9pdDQAAKCcItihehzKl7xZZ2zf1t7cWAABQrhBsUbyWzJA8bunKlqwwBgAAShXBFsXnyCHp20+tbd6tBQAApYxgi+KzbJZ0Mk+q00hqcI3d1QAAgHKGYIvikZMtfTXX2k7qb81fCwAAUIoItigeX86Rco9LcfWlJtfZXQ0AACiHCLa4fCeOSV9+ZG0n9eNpLQAAsAXBFpfv64+lY0ekGrWkFjfYXQ0AACinCLa4PHm51qAxSeraVwoKtrceAABQbhFscXlWLrCm+apSU2qdZHc1AACgHCPYoujcp6Ql063tG/tIwSH21gMAAMo1gi2K7vvF0qG9UuUqUttb7K4GAACUcwRbFI3HLaV+YG13vltyuuytBwAAlHsEWxTNmi+kfbukCpWl9nfYXQ0AAADBFkXg8UiL37e2O/aSwirYWw8AAICKGGwnTpyounXrKiwsTImJiVq5cuVv9j98+LCGDh2q2NhYuVwuXXXVVZo/f773e7fbrWeffVb16tVTeHi4rrjiCv3f//2fjDFFKQ8lbf030p6tkquC1LGn3dUAAABIkgo9jH369OkaPny4Jk+erMTERE2YMEHJyclKT09XzZo18/XPy8vTTTfdpJo1a2rWrFmKj4/X9u3bFRUV5e3z4osvatKkSXr77bfVpEkTff/99xo0aJAiIyP1+OOPX9YFopgZIy06/bT2+jukihH21gMAAHBaoYPt+PHj9eCDD2rQoEGSpMmTJ2vevHmaMmWKRowYka//lClTdPDgQX399dcKDQ2VJNWtW9enz9dff60ePXqoW7du3u8/+OCDiz4Jhg02rZZ2bJRCndagMQAAAD9RqFcR8vLytGrVKiUlnZ2IPygoSElJSVqxYkWBx8ydO1ft2rXT0KFDFR0draZNm2rs2LFyu93ePu3bt1dqaqo2bdokSfrxxx+1fPly3XrrrUW5JpSkM+/WXnebNc0XAACAnyjUE9v9+/fL7XYrOjrapz06OlobN24s8JgtW7ZoyZIl6t+/v+bPn6+ff/5Zf/jDH3Ty5EmNHj1akjRixAhlZ2erUaNGCg4Oltvt1l//+lf179//grXk5uYqNzfXu5+dnV2YS0FRbP1J+vlHayGGLvfYXQ0AAICPEl8qyuPxqGbNmnrjjTcUHBysVq1aadeuXXr55Ze9wXbGjBl677339P7776tJkyZas2aNnnjiCcXFxWngwIEFnnfcuHEaM2ZMSZePcy16z/rZ+iZrCV0AAAA/UqhgW716dQUHByszM9OnPTMzUzExMQUeExsbq9DQUAUHB3vbrr76amVkZCgvL09Op1NPP/20RowYob59+0qSmjVrpu3bt2vcuHEXDLYjR47U8OHDvfvZ2dlKSEgozOWgMH7dLG1YKTmCpK597a4GAAAgn0K9Y+t0OtWqVSulpqZ62zwej1JTU9WuXbsCj7n++uv1888/y+PxeNs2bdqk2NhYOZ1OSdKxY8cUFORbSnBwsM8x53O5XIqIiPD5oAQtPr3K2DWdpBrx9tYCAABQgELPYzt8+HC9+eabevvtt7VhwwYNGTJEOTk53lkSBgwYoJEjR3r7DxkyRAcPHtSwYcO0adMmzZs3T2PHjtXQoUO9fe644w799a9/1bx587Rt2zZ99NFHGj9+vHr16lUMl4jLlrldWvultZ30O3trAQAAuIBCv2Pbp08f7du3T6NGjVJGRoZatmypBQsWeAeU7dixw+fpa0JCgj777DM9+eSTat68ueLj4zVs2DA988wz3j6vvvqqnn32Wf3hD3/Q3r17FRcXp4cfflijRo0qhkvEZVs8zZq/tml7Kbae3dUAAAAUyGECZHmv7OxsRUZGKisri9cSitOBPdLYgdYyuk++JtVuZHdFAACgjCrpvFakJXVRjiyZboXaq1oRagEAgF8j2OLCsvZL335mbd/Eu7UAAMC/EWxxYctmSe6TUr0m0hXN7a4GAADgNxFsUbCjWdLXn1jbN/WXHA576wEAALgIgi0K9sVsKe+EVKuB1KiN3dUAAABcFMEW+R0/Kn05x9pO6sfTWgAAUCYQbJHfV3OlEzlSdG2pWQe7qwEAALgkBFv4yjshfT7b2u7aTwrifyIAAKBsILXA14r50tHDUtUY6dob7a4GAADgkhFscdapPGnpDGu7a18pONjeegAAAAqBYIuzvltkLcoQWU1qe7Pd1QAAABQKwRYWt1tKnWZtd+4thTjtrQcAAKCQCLawrFkmHdgjVYyU2nWzuxoAAIBCI9hC8nikRe9b253ulFzh9tYDAABQBARbSGlfS5nbpbAKUocedlcDAABQJATb8s6Ys09rO/SUwivZWg4AAEBREWzLu/TvpV83Sc4w6zUEAACAMopgW96deVp73W1SpShbSwEAALgcBNvy7Jd10pZ1UnCo1OUeu6sBAAC4LATb8mzxe9bPtjdLUdXtrQUAAOAyEWzLqw0rpY3fS0FB0o197K4GAADgshFsy6P9u6V3xlrb7e+QqsfZWw8AAEAxINiWN7nHpSmjpONHpTqNpB4P210RAABAsSDYlifGSNP+Ju3ZJlWuKt3/nBTitLsqAACAYkGwLU+WTJfWfC4Fh0j3j2LAGAAACCgE2/Ji43fSvBRru9dQqX5Te+sBAAAoZgTb8uDMYDFjpMRbpfa3210RAABAsSPYBrrc49KU0dKxI9ZgsbsfkxwOu6sCAAAodgTbQOYdLLZVqlyFwWIAACCgEWwD2ZIZ1mCxoGAGiwEAgIBHsA1UG78/O1jszqFS/Wb21gMAAFDCCLaBaP9u6Z2/SsYjJd5irS4GAAAQ4Ai2gSb3uDT1OWuwWO1G0l2PM1gMAACUCwTbQGKMNP3v0u4t1mCxQc9JoQwWAwAA5QPBNpAsnSH9sMwaLDaQwWIAAKB8IdgGivRV0idnVhb7g3QFg8UAAED5QrANBAf2SP/5y9nBYtd3t7siAACAUkewLevOXVmMwWIAAKAcI9iWZcZI08dbg8UqRUmDRjNYDAAAlFsE27Js2Szph6WnVxYbLUXVsLsiAAAA2xBsy6r0VdLHb1rbDBYDAAAg2JZJ5w4Wa5vMYDEAAAARbMuevBPSlOdODxZrKN09jMFiAAAAItiWLd6VxX45PVjsOQaLAQAAnEawLUs+/1BafWaw2CgGiwEAAJyDYFtWbFotzX3D2u45RLqiub31AAAA+BmCbVlwMMN3sFiHHnZXBAAA4HcItv4u74S1slhOtpRwFYPFAAAALqBIwXbixImqW7euwsLClJiYqJUrV/5m/8OHD2vo0KGKjY2Vy+XSVVddpfnz5/v02bVrl+69915Vq1ZN4eHhatasmb7//vuilBc4zqwstovBYgAAABcTUtgDpk+fruHDh2vy5MlKTEzUhAkTlJycrPT0dNWsWTNf/7y8PN10002qWbOmZs2apfj4eG3fvl1RUVHePocOHdL111+vLl266NNPP1WNGjW0efNmValS5bIursz7/ENp9ZKzg8Wq5P/nCwAAAIvDGGMKc0BiYqLatGmj1157TZLk8XiUkJCgxx57TCNGjMjXf/LkyXr55Ze1ceNGhYaGFnjOESNG6KuvvtKXX35ZhEuwZGdnKzIyUllZWYqIiCjyefzGxu+lN/9H8nikXkOlG3rZXREAAMBlKem8VqhXEfLy8rRq1SolJSWdPUFQkJKSkrRixYoCj5k7d67atWunoUOHKjo6Wk2bNtXYsWPldrt9+rRu3Vq9e/dWzZo1dc011+jNN98s4iWVcR6PlDr9bKhtc7PUsafdVQEAAPi9QgXb/fv3y+12Kzo62qc9OjpaGRkZBR6zZcsWzZo1S263W/Pnz9ezzz6rv//97/rLX/7i02fSpElq0KCBPvvsMw0ZMkSPP/643n777QvWkpubq+zsbJ9PmXf0sPTv/5U+edMKtdd2kXo/wWAxAACAS1Dod2wLy+PxqGbNmnrjjTcUHBysVq1aadeuXXr55Zc1evRob5/WrVtr7NixkqRrrrlGaWlpmjx5sgYOHFjgeceNG6cxY8aUdPml55d10jt/kbIOWAPEeg2VrruNUAsAAHCJCvXEtnr16goODlZmZqZPe2ZmpmJiYgo8JjY2VldddZWCg4O9bVdffbUyMjKUl5fn7dO4cWOf466++mrt2LHjgrWMHDlSWVlZ3s/OnTsLcyn+w+ORFr0nTXzKCrU1E6QnXpPadSPUAgAAFEKhgq3T6VSrVq2UmprqbfN4PEpNTVW7du0KPOb666/Xzz//LI/H423btGmTYmNj5XQ6vX3S09N9jtu0aZPq1KlzwVpcLpciIiJ8PmXOkUPSv0ZK86daiy+0TpKGvy7F1be7MgAAgDKn0PPYDh8+XG+++abefvttbdiwQUOGDFFOTo4GDRokSRowYIBGjhzp7T9kyBAdPHhQw4YN06ZNmzRv3jyNHTtWQ4cO9fZ58skn9c0332js2LH6+eef9f777+uNN97w6RNwNq+RXn5Y2rRKCnVJ/Z6W+o+QXOF2VwYAAFAmFfod2z59+mjfvn0aNWqUMjIy1LJlSy1YsMA7oGzHjh0KCjqblxMSEvTZZ5/pySefVPPmzRUfH69hw4bpmWee8fZp06aNPvroI40cOVLPP/+86tWrpwkTJqh///7FcIl+xuOWFr4nLXzXekobU1ca+KwUc+Gn0wAAALi4Qs9j66/KxDy2WQekd8dJP6+x9hNvke58VHKG2VoWAABAaSjpvFbisyLgtPRVVqg9etgKsr2fsN6pBQAAQLEg2JY0t1v67G1p8QeSMdbAsIHPWrMfAAAAoNgQbEvS4X3SO2OlLeus/Xa3Sz2HSE6XvXUBAAAEIIJtSdmwUnrvBSknW3JVkPo8KV3Txe6qAAAAAhbBtri5T1nz0i6Zbu3XaiAN+F+pRry9dQEAAAQ4gm1xOpQp/eev0rb11n6HHlKPh6UQp711AQAAlAME2+KS9rX0wcvSsSNSWEWp71NSixvsrgoAAKDcINherlMnpU/+LX3+obVfu6H16kG1WHvrAgAAKGcItpfjwB7r1YMdG639TndJt/9eCgm1ty4AAIByiGBbVGu/lD74m3QiR6pQWer3tNS0vd1VAQAAlFsE26L4+hNp5gRru25j6b4/S1WjbS0JAACgvAuyu4Ayqdn1UmQ16cY+0qPjCbUAAAB+gCe2RVG5ivRMihReye5KAAAAcBpPbIuKUAsAAOBXCLYAAAAICARbAAAABASCLQAAAAICwRYAAAABgWALAACAgECwBQAAQEAg2AIAACAgEGwBAAAQEAi2AAAACAgEWwAAAAQEgi0AAAACAsEWAAAAAYFgCwAAgIAQYncBxcUYI0nKzs62uRIAAAAU5ExOO5PbilvABNsjR45IkhISEmyuBAAAAL/lyJEjioyMLPbzOkxJReZS5vF4tHv3blWuXFlHjhxRQkKCdu7cqYiICLtLQwnKzs7mXpcD3Ofyg3tdPnCfy4/z77UxRkeOHFFcXJyCgor/jdiAeWIbFBSkWrVqSZIcDockKSIign9hygnudfnAfS4/uNflA/e5/Dj3XpfEk9ozGDwGAACAgECwBQAAQEAIyGDrcrk0evRouVwuu0tBCeNelw/c5/KDe10+cJ/Lj9K+1wEzeAwAAADlW0A+sQUAAED5Q7AFAABAQCDYAgAAICAQbAEAABAQAjLYTpw4UXXr1lVYWJgSExO1cuVKu0vCb/jiiy90xx13KC4uTg6HQ3PmzPH53hijUaNGKTY2VuHh4UpKStLmzZt9+hw8eFD9+/dXRESEoqKiNHjwYB09etSnz9q1a9WxY0eFhYUpISFBL730UklfGs4xbtw4tWnTRpUrV1bNmjXVs2dPpaen+/Q5ceKEhg4dqmrVqqlSpUq66667lJmZ6dNnx44d6tatmypUqKCaNWvq6aef1qlTp3z6LFu2TNdee61cLpeuvPJKvfXWWyV9eTht0qRJat68uXcy9nbt2unTTz/1fs89DlwvvPCCHA6HnnjiCW8b97vse+655+RwOHw+jRo18n7vd/fYBJhp06YZp9NppkyZYn766Sfz4IMPmqioKJOZmWl3abiA+fPnmz//+c9m9uzZRpL56KOPfL5/4YUXTGRkpJkzZ4758ccfTffu3U29evXM8ePHvX1uueUW06JFC/PNN9+YL7/80lx55ZWmX79+3u+zsrJMdHS06d+/v0lLSzMffPCBCQ8PN//6179K6zLLveTkZDN16lSTlpZm1qxZY2677TZTu3Ztc/ToUW+fRx55xCQkJJjU1FTz/fffm+uuu860b9/e+/2pU6dM06ZNTVJSkvnhhx/M/PnzTfXq1c3IkSO9fbZs2WIqVKhghg8fbtavX29effVVExwcbBYsWFCq11tezZ0718ybN89s2rTJpKenm//5n/8xoaGhJi0tzRjDPQ5UK1euNHXr1jXNmzc3w4YN87Zzv8u+0aNHmyZNmpg9e/Z4P/v27fN+72/3OOCCbdu2bc3QoUO9+26328TFxZlx48bZWBUu1fnB1uPxmJiYGPPyyy972w4fPmxcLpf54IMPjDHGrF+/3kgy3333nbfPp59+ahwOh9m1a5cxxpjXX3/dVKlSxeTm5nr7PPPMM6Zhw4YlfEW4kL179xpJ5vPPPzfGWPc1NDTUzJw509tnw4YNRpJZsWKFMcb6P0FBQUEmIyPD22fSpEkmIiLCe2//9Kc/mSZNmvj8rj59+pjk5OSSviRcQJUqVcy///1v7nGAOnLkiGnQoIFZtGiR6dSpkzfYcr8Dw+jRo02LFi0K/M4f73FAvYqQl5enVatWKSkpydsWFBSkpKQkrVixwsbKUFRbt25VRkaGzz2NjIxUYmKi956uWLFCUVFRat26tbdPUlKSgoKC9O2333r73HDDDXI6nd4+ycnJSk9P16FDh0rpanCurKwsSVLVqlUlSatWrdLJkyd97nWjRo1Uu3Ztn3vdrFkzRUdHe/skJycrOztbP/30k7fPuec404c/A0qf2+3WtGnTlJOTo3bt2nGPA9TQoUPVrVu3fPeE+x04Nm/erLi4ONWvX1/9+/fXjh07JPnnPQ6oYLt//3653W6ff3iSFB0drYyMDJuqwuU4c99+655mZGSoZs2aPt+HhISoatWqPn0KOse5vwOlx+Px6IknntD111+vpk2bSrLug9PpVFRUlE/f8+/1xe7jhfpkZ2fr+PHjJXE5OM+6detUqVIluVwuPfLII/roo4/UuHFj7nEAmjZtmlavXq1x48bl+477HRgSExP11ltvacGCBZo0aZK2bt2qjh076siRI355j0MK1RsAisHQoUOVlpam5cuX210KSkDDhg21Zs0aZWVladasWRo4cKA+//xzu8tCMdu5c6eGDRumRYsWKSwszO5yUEJuvfVW73bz5s2VmJioOnXqaMaMGQoPD7exsoIF1BPb6tWrKzg4ON9ovMzMTMXExNhUFS7Hmfv2W/c0JiZGe/fu9fn+1KlTOnjwoE+fgs5x7u9A6Xj00Uf1ySefaOnSpapVq5a3PSYmRnl5eTp8+LBP//Pv9cXu44X6RERE+OUfwoHI6XTqyiuvVKtWrTRu3Di1aNFC//znP7nHAWbVqlXau3evrr32WoWEhCgkJESff/65XnnlFYWEhCg6Opr7HYCioqJ01VVX6eeff/bLf6cDKtg6nU61atVKqamp3jaPx6PU1FS1a9fOxspQVPXq1VNMTIzPPc3Ozta3337rvaft2rXT4cOHtWrVKm+fJUuWyOPxKDEx0dvniy++0MmTJ719Fi1apIYNG6pKlSqldDXlmzFGjz76qD766CMtWbJE9erV8/m+VatWCg0N9bnX6enp2rFjh8+9Xrdunc//kVm0aJEiIiLUuHFjb59zz3GmD38G2Mfj8Sg3N5d7HGC6du2qdevWac2aNd5P69at1b9/f+829zvwHD16VL/88otiY2P989/pQg8383PTpk0zLpfLvPXWW2b9+vXmoYceMlFRUT6j8eBfjhw5Yn744Qfzww8/GElm/Pjx5ocffjDbt283xljTfUVFRZn//ve/Zu3ataZHjx4FTvd1zTXXmG+//dYsX77cNGjQwGe6r8OHD5vo6Ghz3333mbS0NDNt2jRToUIFpvsqRUOGDDGRkZFm2bJlPtPGHDt2zNvnkUceMbVr1zZLliwx33//vWnXrp1p166d9/sz08bcfPPNZs2aNWbBggWmRo0aBU4b8/TTT5sNGzaYiRMnMjVQKRoxYoT5/PPPzdatW83atWvNiBEjjMPhMAsXLjTGcI8D3bmzIhjD/Q4ETz31lFm2bJnZunWr+eqrr0xSUpKpXr262bt3rzHG/+5xwAVbY4x59dVXTe3atY3T6TRt27Y133zzjd0l4TcsXbrUSMr3GThwoDHGmvLr2WefNdHR0cblcpmuXbua9PR0n3McOHDA9OvXz1SqVMlERESYQYMGmSNHjvj0+fHHH02HDh2My+Uy8fHx5oUXXiitS4QxBd5jSWbq1KnePsePHzd/+MMfTJUqVUyFChVMr169zJ49e3zOs23bNnPrrbea8PBwU716dfPUU0+ZkydP+vRZunSpadmypXE6naZ+/fo+vwMl64EHHjB16tQxTqfT1KhRw3Tt2tUbao3hHge684Mt97vs69Onj4mNjTVOp9PEx8ebPn36mJ9//tn7vb/dY4cxxhT+OS8AAADgXwLqHVsAAACUXwRbAAAABASCLQAAAAICwRYAAAABgWALAACAgECwBQAAQEAg2AIAACAgEGwBAAAQEAi2AAAACAgEWwAAAAQEgi0AAAACAsEWAAAAAeH/AY3XyWwvO3YcAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAArYAAAHDCAYAAADRBFkDAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABNTUlEQVR4nO3deXxU5d3///dkmwRIhkDIRgKELQgIKCLEBVBSAlol1XorVQFFrRS+t/yq1tLbXdsgWlusFbUV0NtiXCrUG1lENjcWQZBVBNkhCQokIQGSkFy/P04yZEgC2c9k8no+Hqdz5pxrZj6HU+2bq9e5LocxxggAAABo4vzsLgAAAACoDwRbAAAA+ASCLQAAAHwCwRYAAAA+gWALAAAAn0CwBQAAgE8g2AIAAMAnEGwBAADgEwi2AAAA8AkEWwAAAPgEgi0AnMfevXvlcDg0e/ZsW35/9uzZcjgc2rt3b6P/9pNPPimHw9HovwsAtUWwBQAv8Kc//Unz5s2zuwzdfPPNuu666+wuAwBqhWALAF6gqmB755136tSpU+rYsWOD11BUVKQlS5bo+uuvb/DfAoCGEGB3AQCAqvn7+8vf379Rfuvzzz/XiRMnCLYAmix6bAE0KYcOHdLdd9+tqKgoOZ1O9erVSzNnzpQkZWVlKSAgQE899VSFz+3YsUMOh0Mvv/yyJOnYsWN66KGHdPHFF6tVq1YKCwvTyJEj9e23316whqFDh2ro0KEVjo8bN06dOnXyOPbCCy/oiiuuUNu2bRUSEqL+/fvrgw8+8GjjcDiUn5+vN998Uw6HQw6HQ+PGjZNU9RjbV155Rb169ZLT6VRsbKwmTpyo7OzsCnX27t1b27Zt0zXXXKMWLVqoffv2mjZtWqXX9fHHH6tnz54VrqG8M2fO6JlnnlGXLl3kdDrVqVMn/eEPf1BBQYFHu3Xr1iklJUUREREKCQlRQkKC7r77bo826enp6t+/v0JDQxUWFqaLL75Y06dPr/K3AeBC6LEF0GRkZWVp0KBBcjgcmjRpktq1a6eFCxdq/Pjxys3N1eTJkzVkyBC99957euKJJzw+++6778rf31+33HKLJGn37t2aN2+ebrnlFiUkJCgrK0uvvfaahgwZom3btik2NrZeap4+fbpuvPFG3X777SosLFR6erpuueUWzZ8/390z+r//+7+65557dPnll+u+++6TJHXp0qXK73zyySf11FNPKTk5WRMmTNCOHTs0Y8YMff311/ryyy8VGBjobnv8+HGNGDFCN910k/7rv/5LH3zwgR555BFdfPHFGjlypMf3LliwQD//+c/Pez333HOP3nzzTf3yl7/Ugw8+qDVr1igtLU3bt2/X3LlzJUlHjhzR8OHD1a5dO/3+979X69attXfvXn344Yfu71myZIlGjx6tYcOG6bnnnpMkbd++XV9++aUeeOCBGvwJA0A5BgCaiPHjx5uYmBjz008/eRy/7bbbjMvlMidPnjSvvfaakWQ2b97s0aZnz57m2muvdb8/ffq0KS4u9mizZ88e43Q6zdNPP+1xTJKZNWuW+9iQIUPMkCFDKtQ3duxY07FjR49jJ0+e9HhfWFhoevfu7VGLMca0bNnSjB07tsJ3zpo1y0gye/bsMcYYc+TIERMUFGSGDx/uUf/LL79sJJmZM2d61CnJvPXWW+5jBQUFJjo62tx8880ev7N7924jySxfvtx97IknnjDl/2di48aNRpK55557PD770EMPGUlm2bJlxhhj5s6daySZr7/+usL1lHnggQdMWFiYOXPmTJVtAKCmGIoAoEkwxujf//63brjhBhlj9NNPP7m3lJQU5eTk6JtvvtFNN92kgIAAvfvuu+7PbtmyRdu2bdOtt97qPuZ0OuXnZ/0rsLi4WEePHlWrVq2UmJiob775pt7qDgkJce8fP35cOTk5uvrqq2v9G59++qkKCws1efJkd/2SdO+99yosLEwff/yxR/tWrVrpjjvucL8PCgrS5Zdfrt27d3u0+/jjj+VyuXTVVVdV+dsLFiyQJP32t7/1OP7ggw+6v0OSWrduLUmaP3++ioqKKv2u1q1bKz8/X0uWLDnf5QJAjRBsATQJP/74o7Kzs/X666+rXbt2Httdd90lyfq/wCMiIjRs2DC999577s++++67CggI0E033eQ+VlJSor/85S/q1q2bnE6nIiIi1K5dO23atEk5OTn1Vvf8+fM1aNAgBQcHq02bNmrXrp1mzJhR69/Yt2+fJCkxMdHjeFBQkDp37uw+XyYuLq7CXLTh4eE6fvy4x7GPP/5Yw4cPV0BA1SPU9u3bJz8/P3Xt2tXjeHR0tFq3bu3+7SFDhujmm2/WU089pYiICI0aNUqzZs3yGIf7m9/8Rt27d9fIkSMVFxenu+++W4sWLarmnwIAVI5gC6BJKCkpkSTdcccdWrJkSaXblVdeKUm67bbb9P3332vjxo2SpPfee0/Dhg1TRESE+/v+9Kc/6be//a0GDx6st99+W4sXL9aSJUvUq1cv929VpapFC4qLiz3ef/7557rxxhsVHBysV155RQsWLNCSJUv0q1/9SsaY2v5R1EhVMyqU//2TJ09qxYoV1Z6/9kKLNjgcDn3wwQdatWqVJk2a5H7gr3///srLy5MkRUZGauPGjfroo4904403avny5Ro5cqTGjh1bzSsDgIp4eAxAk9CuXTuFhoaquLhYycnJ522bmpqqX//61+7hCN9//72mTJni0eaDDz7QNddcozfeeMPjeHZ2tkcArkx4eHiF/ytfUoXe0n//+98KDg7W4sWL5XQ63cdnzZpV4bPVXeGrbD7bHTt2qHPnzu7jhYWF2rNnzwX/bCqzbNkyFRQUVHiYrLLfLikp0c6dO3XRRRe5j2dlZSk7O7vCXLuDBg3SoEGD9Mc//lFz5szR7bffrvT0dN1zzz2SrF7mG264QTfccINKSkr0m9/8Rq+99poee+yxCr3CAFAd9NgCaBL8/f11880369///re2bNlS4fyPP/7o3m/durVSUlL03nvvKT09XUFBQUpNTa3wfef2mr7//vs6dOjQBWvp0qWLvvvuO4/f/Pbbb/Xll19W+A2Hw+HRk7t3795KF2Jo2bJlhem6KpOcnKygoCC99NJLHvW/8cYbysnJqdUctAsWLNBll12mqKio87Yr69H961//6nH8xRdflCT3bx8/frzCn22/fv0kyT0c4ejRox7n/fz81KdPH482AFBT9NgCaDKmTp2q5cuXa+DAgbr33nvVs2dPHTt2TN98840+/fRTHTt2zN321ltv1R133KFXXnlFKSkp7geayvz85z/X008/rbvuuktXXHGFNm/erH/9618evaBVufvuu/Xiiy8qJSVF48eP15EjR/Tqq6+qV69eys3Ndbe7/vrr9eKLL2rEiBH61a9+pSNHjujvf/+7unbtqk2bNnl8Z//+/fXpp5/qxRdfVGxsrBISEjRw4MAKv92uXTtNmTJFTz31lEaMGKEbb7xRO3bs0CuvvKIBAwZ4PChWXQsWLHCPUz6fvn37auzYsXr99deVnZ2tIUOGaO3atXrzzTeVmpqqa665RpL05ptv6pVXXtEvfvELdenSRSdOnNA//vEPhYWFucPxPffco2PHjunaa69VXFyc9u3bp7/97W/q16+fR28wANSIjTMyAECNZWVlmYkTJ5r4+HgTGBhooqOjzbBhw8zrr7/u0S43N9eEhIQYSebtt9+u8D2nT582Dz74oImJiTEhISHmyiuvNKtWraowlVdl030ZY8zbb79tOnfubIKCgky/fv3M4sWLK53u64033jDdunUzTqfT9OjRw8yaNavCNFrGGPPdd9+ZwYMHu2sum/rr3Om+yrz88sumR48eJjAw0ERFRZkJEyaY48ePe7QZMmSI6dWrV4VrL1/nli1bjCSzdu3aCu0qq7OoqMg89dRTJiEhwQQGBpr4+HgzZcoUc/r0aXebb775xowePdp06NDBOJ1OExkZaX7+85+bdevWudt88MEHZvjw4SYyMtIEBQWZDh06mF//+tcmIyOjQh0AUF0OYxrpCQYAgNeZNm2aXnzxRWVkZFR7nC8AeCvG2AJAM9apUyf95S9/IdQC8An02AIAAMAn0GMLAAAAn0CwBQAAgE8g2AIAAMAn+MQ8tiUlJTp8+LBCQ0N5AAIAAMALGWN04sQJxcbGys+vYfpWfSLYHj58WPHx8XaXAQAAgAs4cOCA4uLiGuS7fSLYhoaGSrL+oMLCwmyuBgAAAOfKzc1VfHy8O7c1BJ8ItmXDD8LCwgi2AAAAXqwhh43y8BgAAAB8AsEWAAAAPoFgCwAAAJ9AsAUAAIBPINgCAADAJ9Qo2M6YMUN9+vRxzz6QlJSkhQsXVtl+9uzZcjgcHltwcLBHG2OMHn/8ccXExCgkJETJycnauXNn7a4GAAAAzVaNgm1cXJymTp2q9evXa926dbr22ms1atQobd26tcrPhIWFKSMjw73t27fP4/y0adP00ksv6dVXX9WaNWvUsmVLpaSk6PTp07W7IgAAADRLNZrH9oYbbvB4/8c//lEzZszQ6tWr1atXr0o/43A4FB0dXek5Y4z++te/6tFHH9WoUaMkSW+99ZaioqI0b9483XbbbTUpDwAAAM1YrcfYFhcXKz09Xfn5+UpKSqqyXV5enjp27Kj4+PgKvbt79uxRZmamkpOT3cdcLpcGDhyoVatW1bY0AAAANEM1Xnls8+bNSkpK0unTp9WqVSvNnTtXPXv2rLRtYmKiZs6cqT59+ignJ0cvvPCCrrjiCm3dulVxcXHKzMyUJEVFRXl8Lioqyn2uMgUFBSooKHC/z83NrellAAAAwMfUuMc2MTFRGzdu1Jo1azRhwgSNHTtW27Ztq7RtUlKSxowZo379+mnIkCH68MMP1a5dO7322mt1KjotLU0ul8u9xcfH1+n7AAAA0PTVONgGBQWpa9eu6t+/v9LS0tS3b19Nnz69Wp8NDAzUJZdcol27dkmSe+xtVlaWR7usrKwqx+VK0pQpU5STk+PeDhw4UNPLAAAAgI+p8zy2JSUlHsMCzqe4uFibN29WTEyMJCkhIUHR0dFaunSpu01ubq7WrFlz3nG7TqfTPeVY2QYAAIDmrUZjbKdMmaKRI0eqQ4cOOnHihObMmaMVK1Zo8eLFkqQxY8aoffv2SktLkyQ9/fTTGjRokLp27ars7Gw9//zz2rdvn+655x5J1owJkydP1rPPPqtu3bopISFBjz32mGJjY5Wamlq/V1qfigqlPVuk4BZShx52VwMAAADVMNgeOXJEY8aMUUZGhlwul/r06aPFixfrZz/7mSRp//798vM72wl8/Phx3XvvvcrMzFR4eLj69++vr776yuNhs9/97nfKz8/Xfffdp+zsbF111VVatGhRhYUcvMrSd6TF/ytdco005n/srgYAAACSHMYYY3cRdZWbmyuXy6WcnJzGGZawe4v0t8lSyzDp6Q8kP1YmBgAAOJ/GyGskstro2MMahpCfKx3aZXc1AAAAEMG2dvwDpG6XWPs71tlbCwAAACQRbGsvsb/1umO9vXUAAABAEsG29rqXBts9W6WCU/bWAgAAAIJtrUXESm2ipeIz0q5v7a4GAACg2SPY1pbDISVeZu0zHAEAAMB2BNu66FE6HOF7gi0AAIDdCLZ10e0SyeEnZe2Xjh+xuxoAAIBmjWBbFyGtrDltJXptAQAAbEawrSum/QIAAPAKBNu66l4u2JYU21sLAABAM0awrauy5XVPnpAOsrwuAACAXQi2deWxvC7DEQAAAOxCsK0PiUz7BQAAYDeCbX0oW6iB5XUBAABsQ7CtDxGxUtsYltcFAACwEcG2vjDtFwAAgK0ItvWFYAsAAGArgm19KVte98h+6XiW3dUAAAA0OwTb+lJ+ed0d39hbCwAAQDNEsK1P7uEI6+ytAwAAoBki2Nansmm/vv+G5XUBAAAaGcG2PnVgeV0AAAC7EGzrk7+/1O1Sa5/ZEQAAABoVwba+JZYFW8bZAgAANCaCbX0rG2e7d5t0+qS9tQAAADQjBNv6Vn553R822V0NAABAs0GwbQhM+wUAANDoCLYNoWw4Ags1AAAANBqCbUPo1k/yY3ldAACAxkSwbQghraQOF1n79NoCAAA0CoJtQ2GcLQAAQKMi2DaUsmDL8roAAACNgmDbUDyW191pdzUAAAA+j2DbUFheFwAAoFERbBuSe5wtwRYAAKChEWwbUlmwZXldAACABkewbUgRsdbG8roAAAANrkbBdsaMGerTp4/CwsIUFhampKQkLVy4sMr2//jHP3T11VcrPDxc4eHhSk5O1tq1az3ajBs3Tg6Hw2MbMWJE7a7GGzHtFwAAQKOoUbCNi4vT1KlTtX79eq1bt07XXnutRo0apa1bt1bafsWKFRo9erSWL1+uVatWKT4+XsOHD9ehQ4c82o0YMUIZGRnu7Z133qn9FXmb7oyzBQAAaAwOY4ypyxe0adNGzz//vMaPH3/BtsXFxQoPD9fLL7+sMWPGSLJ6bLOzszVv3rxa15CbmyuXy6WcnByFhYXV+nsaxKk86dGbpJIS6bF/SW2i7K4IAACg0TVGXqv1GNvi4mKlp6crPz9fSUlJ1frMyZMnVVRUpDZt2ngcX7FihSIjI5WYmKgJEybo6NGj5/2egoIC5ebmemxeq/zyut/TawsAANBQahxsN2/erFatWsnpdOr+++/X3Llz1bNnz2p99pFHHlFsbKySk5Pdx0aMGKG33npLS5cu1XPPPaeVK1dq5MiRKi6uerWutLQ0uVwu9xYfH1/Ty2hcZeNsv2OcLQAAQEOp8VCEwsJC7d+/Xzk5Ofrggw/0z3/+UytXrrxguJ06daqmTZumFStWqE+fPlW22717t7p06aJPP/1Uw4YNq7RNQUGBCgoK3O9zc3MVHx/vnUMRJGu6r+n/LbUIlZ75QPLzt7siAACARuWVQxGCgoLUtWtX9e/fX2lpaerbt6+mT59+3s+88MILmjp1qj755JPzhlpJ6ty5syIiIrRr164q2zidTvfMDGWbV4tPlIJbsrwuAABAA6rzPLYlJSUevafnmjZtmp555hktWrRIl1122QW/7+DBgzp69KhiYmLqWpr38PeXul9i7TM7AgAAQIOoUbCdMmWKPvvsM+3du1ebN2/WlClTtGLFCt1+++2SpDFjxmjKlCnu9s8995wee+wxzZw5U506dVJmZqYyMzOVl5cnScrLy9PDDz+s1atXa+/evVq6dKlGjRqlrl27KiUlpR4v0wsw7RcAAECDCqhJ4yNHjmjMmDHKyMiQy+VSnz59tHjxYv3sZz+TJO3fv19+fmez8owZM1RYWKhf/vKXHt/zxBNP6Mknn5S/v782bdqkN998U9nZ2YqNjdXw4cP1zDPPyOl01sPleZGyB8j2bLWW1w1uYW89AAAAPqbO89h6A6+ex7a8P46RfjosjX9G6l29KdIAAAB8gVc+PIY6YHldAACABkOwbUyJpQ/PMc4WAACg3hFsG1PXvpKfn/TjQelYlt3VAAAA+BSCbWMKaSV1ZHldAACAhkCwbWzdWV4XAACgIRBsG1uP0nG2OzdIJcX21gIAAOBDCLaNrfzyugdYXhcAAKC+EGwbm8fyugxHAAAAqC8EWzsw7RcAAEC9I9jaoWyhhr3brOV1AQAAUGcEWzu0jZEi2lsPj+361u5qAAAAfALB1i6Jl1qvjLMFAACoFwRbuzDOFgAAoF4RbO3isbxupt3VAAAANHkEW7uUX16XXlsAAIA6I9jaieEIAAAA9YZga6eyab9YXhcAAKDOCLZ2ik+0hiSwvC4AAECdEWzt5O8vdWN5XQAAgPpAsLVb2XAExtkCAADUCcHWbh7L6+bbWwsAAEATRrC1G8vrAgAA1AuCrTdgOAIAAECdEWy9gTvY8gAZAABAbRFsvUG3fqXL6x5ieV0AAIBaIth6g+CWUsee1j7DEQAAAGqFYOstGGcLAABQJwRbb1EWbL//huV1AQAAaoFg6y3Kltc9lScd+N7uagAAAJocgq238Fhel+EIAAAANUWw9SZlwxG+Y9ovAACAmiLYepOyYLuP5XUBAABqimDrTdrGSO3aSyUlLK8LAABQQwRbb9Odab8AAABqg2DrbVheFwAAoFYItt6m/PK6RzPsrgYAAKDJINh6G5bXBQAAqJUaBdsZM2aoT58+CgsLU1hYmJKSkrRw4cLzfub9999Xjx49FBwcrIsvvlgLFizwOG+M0eOPP66YmBiFhIQoOTlZO3furPmV+JIel1mvBFsAAIBqq1GwjYuL09SpU7V+/XqtW7dO1157rUaNGqWtW7dW2v6rr77S6NGjNX78eG3YsEGpqalKTU3Vli1b3G2mTZuml156Sa+++qrWrFmjli1bKiUlRadPn67blTVlZeNsd26QilleFwAAoDocxhhTly9o06aNnn/+eY0fP77CuVtvvVX5+fmaP3+++9igQYPUr18/vfrqqzLGKDY2Vg8++KAeeughSVJOTo6ioqI0e/Zs3XbbbdWqITc3Vy6XSzk5OQoLC6vL5XiHkmLp0Zut5XX/e7qU0MvuigAAAOqkMfJarcfYFhcXKz09Xfn5+UpKSqq0zapVq5ScnOxxLCUlRatWrZIk7dmzR5mZmR5tXC6XBg4c6G7TLPn5nx2OsG2NvbUAAAA0ETUOtps3b1arVq3kdDp1//33a+7cuerZs2elbTMzMxUVFeVxLCoqSpmZme7zZceqalOZgoIC5ebmemw+p+cg63XbanvrAAAAaCJqHGwTExO1ceNGrVmzRhMmTNDYsWO1bdu2hqitSmlpaXK5XO4tPj6+UX+/UfQYIDn8pMO7peNH7K4GAADA69U42AYFBalr167q37+/0tLS1LdvX02fPr3SttHR0crKyvI4lpWVpejoaPf5smNVtanMlClTlJOT494OHDhQ08vwfq1cUqeLrH2GIwAAAFxQneexLSkpUUFBQaXnkpKStHTpUo9jS5YscY/JTUhIUHR0tEeb3NxcrVmzpspxu5LkdDrdU46VbT6J4QgAAADVFlCTxlOmTNHIkSPVoUMHnThxQnPmzNGKFSu0ePFiSdKYMWPUvn17paWlSZIeeOABDRkyRH/+8591/fXXKz09XevWrdPrr78uSXI4HJo8ebKeffZZdevWTQkJCXrssccUGxur1NTU+r3SpqjXIOnjN6xpvwpPS0HBdlcEAADgtWoUbI8cOaIxY8YoIyNDLpdLffr00eLFi/Wzn/1MkrR//375+Z3tBL7iiis0Z84cPfroo/rDH/6gbt26ad68eerdu7e7ze9+9zvl5+frvvvuU3Z2tq666iotWrRIwcGEOEV3ksKjpONZ0s6NVtAFAABApeo8j6038Ll5bMv74CXpy4+kpJ9L/zXZ7moAAABqxavnsUUj6VVunG3T/zsIAABAgyHYeruu/ayxtTk/SYd/sLsaAAAAr0Ww9XaBQVL3S6x9pv0CAACoEsG2KSib9msr034BAABUhWDbFFw00Hrd/5104ri9tQAAAHgpgm1T0DpCiutmPTy2/Wu7qwEAAPBKBNumomdpry2rkAEAAFSKYNtUlI2z/W6ddKbI3loAAAC8EMG2qYjvLrVqLRWclHZvtrsaAAAAr0OwbSr8/MoNR2DaLwAAgHMRbJuS8quQAQAAwAPBtinp3l/yD5B+PCQdOWh3NQAAAF6FYNuUBLeQuvS19um1BQAA8ECwbWrKxtmyChkAAIAHgm1TUzbOdvdm6VSevbUAAAB4EYJtUxMRK0V2kEqKrTltAQAAIIlg2zT1YtovAACAcxFsm6KyVci2r7V6bgEAAECwbZISekkhraT8HGn/DrurAQAA8AoE26bIP0DqMcDaZ3YEAAAASQTbpotVyAAAADwQbJuqHpdJDj/p8G7peJbd1QAAANiOYNtUtXRJnS6y9pkdAQAAgGDbpJXNjkCwBQAAINg2aWXjbHdukApP21sLAACAzQi2TVl0Jyk8SioqlHZutLsaAAAAWxFsmzKH42yvLdN+AQCAZo5g29T1LFted7VkjL21AAAA2Ihg29R17ScFBUs5P0mHf7C7GgAAANsQbJu6wCCp+yXWPsMRAABAM0aw9QU9WYUMAACAYOsLLiodZ7t/h3TiuL21AAAA2IRg6wtaR0hx3ayHx7Z/bXc1AAAAtiDY+gqGIwAAgGaOYOsrepUOR/hunXSmyN5aAAAAbECw9RVx3aXQcKngpLR7s93VAAAANDqCra/w85MuutzaZ9ovAADQDBFsfUnZ8rrb1thbBwAAgA1qFGzT0tI0YMAAhYaGKjIyUqmpqdqxY8d5PzN06FA5HI4K2/XXX+9uM27cuArnR4wYUbsras6695f8A6SfDklHDtpdDQAAQKOqUbBduXKlJk6cqNWrV2vJkiUqKirS8OHDlZ+fX+VnPvzwQ2VkZLi3LVu2yN/fX7fccotHuxEjRni0e+edd2p3Rc1ZcAupS19rn9kRAABAMxNQk8aLFi3yeD979mxFRkZq/fr1Gjx4cKWfadOmjcf79PR0tWjRokKwdTqdio6Orkk5qEyvQdL3661xtkN/aXc1AAAAjaZOY2xzcnIkVQyv5/PGG2/otttuU8uWLT2Or1ixQpGRkUpMTNSECRN09OjRKr+joKBAubm5HhtK9Syd9mv3ZulUnr21AAAANKJaB9uSkhJNnjxZV155pXr37l2tz6xdu1ZbtmzRPffc43F8xIgReuutt7R06VI999xzWrlypUaOHKni4uJKvyctLU0ul8u9xcfH1/YyfE9ErBTVQSoptua0BQAAaCYcxhhTmw9OmDBBCxcu1BdffKG4uLhqfebXv/61Vq1apU2bNp233e7du9WlSxd9+umnGjZsWIXzBQUFKigocL/Pzc1VfHy8cnJyFBYWVrML8UUfvSYtf1+6LFm6/fd2VwMAAKDc3Fy5XK4GzWu16rGdNGmS5s+fr+XLl1c71Obn5ys9PV3jx4+/YNvOnTsrIiJCu3btqvS80+lUWFiYx4ZyypbX3b7W6rkFAABoBmoUbI0xmjRpkubOnatly5YpISGh2p99//33VVBQoDvuuOOCbQ8ePKijR48qJiamJuWhTEIvKaSVlJ8r7T//dGwAAAC+okbBduLEiXr77bc1Z84chYaGKjMzU5mZmTp16pS7zZgxYzRlypQKn33jjTeUmpqqtm3behzPy8vTww8/rNWrV2vv3r1aunSpRo0apa5duyolJaWWl9XM+QdIPQZY+6xCBgAAmokaBdsZM2YoJydHQ4cOVUxMjHt799133W3279+vjIwMj8/t2LFDX3zxRaXDEPz9/bVp0ybdeOON6t69u8aPH6/+/fvr888/l9PprOVl4ewqZARbAADQPNRoHtvqPGe2YsWKCscSExOr/GxISIgWL15ckzJQHT0ukxx+0uHd0vEsKTzK7ooAAAAaVJ3msYUXa+mSOvW09retsbcWAACARkCw9WVlizUwzhYAADQDBFtfVjbOducGqeDU+dsCAAA0cQRbXxbdyRpbe6ZI2rnR7moAAAAaFMHWlzkc5WZHYJwtAADwbQRbX1c2znbbaql2qycDAAA0CQRbX9e1nxQULOX8JB3+we5qAAAAGgzB1tcFBkndL7X2mR0BAAD4MIJtc1B+OAIAAICPItg2BxeVBtv9O6QTx+2tBQAAoIEQbJuD1hFSXDfr4bHta+2uBgAAoEEQbJuLnkz7BQAAfBvBtrnoVToc4bt11oINAAAAPoZg21zEdZdCw6WCk9LuzXZXAwAAUO8Its2Fn9/Z2RGY9gsAAPgggm1zwipkAADAhxFsm5Pu/SX/AOmnw9KPB+2uBgAAoF4RbJuT4BZSl77WPsMRAACAjyHYNje9mPYLAAD4JoJtc1M2znb3ZulUnr21AAAA1COCbXMTEStFdZBKiq05bQEAAHwEwbY5cq9CxjhbAADgOwi2zVHZONvta62eWwAAAB9AsG2OOvWSQlpJ+bnSvu/srgYAAKBeEGybI39/qccAa3/rKntrAQAAqCcE2+aqd5L1umGFVFJiaykAAAD1gWDbXPW+QgpuKR3LlHZttLsaAACAOiPYNldBwVL/Ydb+6oX21gIAAFAPCLbN2aCR1uumL6T8HHtrAQAAqCOCbXMW183aioukdZ/aXQ0AAECdEGybu7Je29ULJWPsrQUAAKAOCLbN3aXXSoFOKXOvtG+73dUAAADUGsG2uQtpJfUdbO3zEBkAAGjCCLaQBl1nvW5YLp0+aW8tAAAAtUSwhdS5t9QuTio8LW1cYXc1AAAAtUKwheRwlHuIbIG9tQAAANQSwRaWAcMlP39p33fS4d12VwMAAFBjNQq2aWlpGjBggEJDQxUZGanU1FTt2LHjvJ+ZPXu2HA6HxxYcHOzRxhijxx9/XDExMQoJCVFycrJ27txZ86tB7YWGW8vsStKaRfbWAgAAUAs1CrYrV67UxIkTtXr1ai1ZskRFRUUaPny48vPzz/u5sLAwZWRkuLd9+/Z5nJ82bZpeeuklvfrqq1qzZo1atmyplJQUnT59uuZXhNorG46wbolUVGhvLQAAADUUUJPGixZ59uTNnj1bkZGRWr9+vQYPHlzl5xwOh6Kjoys9Z4zRX//6Vz366KMaNWqUJOmtt95SVFSU5s2bp9tuu60mJaIuEvtLrdtJ2T9Km7+ULr3G7ooAAACqrU5jbHNyciRJbdq0OW+7vLw8dezYUfHx8Ro1apS2bt3qPrdnzx5lZmYqOTnZfczlcmngwIFatWpVXcpDTfn5SwNHWPs8RAYAAJqYWgfbkpISTZ48WVdeeaV69+5dZbvExETNnDlT//nPf/T222+rpKREV1xxhQ4ePChJyszMlCRFRUV5fC4qKsp97lwFBQXKzc312FBPLh9hzZKwc4P002G7qwEAAKi2WgfbiRMnasuWLUpPTz9vu6SkJI0ZM0b9+vXTkCFD9OGHH6pdu3Z67bXXavvTSktLk8vlcm/x8fG1/i6co02UNSRB4iEyAADQpNQq2E6aNEnz58/X8uXLFRcXV6PPBgYG6pJLLtGuXbskyT32Nisry6NdVlZWleNyp0yZopycHPd24MCBWlwFqjSwdCWytYul4mJ7awEAAKimGgVbY4wmTZqkuXPnatmyZUpISKjxDxYXF2vz5s2KiYmRJCUkJCg6OlpLly51t8nNzdWaNWuUlJRU6Xc4nU6FhYV5bKhHvZOkli4p96i0fa3d1QAAAFRLjYLtxIkT9fbbb2vOnDkKDQ1VZmamMjMzderUKXebMWPGaMqUKe73Tz/9tD755BPt3r1b33zzje644w7t27dP99xzjyRrxoTJkyfr2Wef1UcffaTNmzdrzJgxio2NVWpqav1cJWomINBasEGS1vAQGQAAaBpqNN3XjBkzJElDhw71OD5r1iyNGzdOkrR//375+Z3Ny8ePH9e9996rzMxMhYeHq3///vrqq6/Us2dPd5vf/e53ys/P13333afs7GxdddVVWrRoUYWFHNCIBo2QVrwvbVsj5fwkuSLsrggAAOC8HMYYY3cRdZWbmyuXy6WcnByGJdSnlx6Q9myVrr9bSv6V3dUAAIAmrDHyWp3msYWPG1T6ENmaRVJJib21AAAAXADBFlXrO1gKbmHNZ/vDJrurAQAAOC+CLarmDJEuvdbaZyUyAADg5Qi2OL+y4QibPpfyWeENAAB4L4Itzi+um9S+i3SmSFq/9MLtAQAAbEKwxfk5HGd7bVcvkJr+JBoAAMBHEWxxYZdeKwUGSRl7pP077K4GAACgUgRbXFiLUKnPYGufh8gAAICXItiiegaNtF43LJcKTp2/LQAAgA0ItqieLn2kiPZWqN24wu5qAAAAKiDYonocjrO9tqsX2lsLAABAJQi2qL4BwyU/P2nvNiljr93VAAAAeCDYovrC2ki9kqz9NfTaAgAA70KwRc2UzWm7bol0ptDeWgAAAMoh2KJmelwmuSKs5XU3f2V3NQAAAG4EW9SMn780cIS1z0NkAADAixBsUXMDR1izJHy/XjqaYXc1AAAAkgi2qI020VK3S639tYvtrQUAAKAUwRa1Uzan7ZpFUnGxvbUAAACIYIvauvgKqWWYlPOT9N3XdlcDAABAsEUtBQRJl/3M2mdOWwAA4AUItqi9gaXDEbauknKP2VsLAABo9gi2qL2YTlKnnlJJifT1J3ZXAwAAmjmCLeqm7CGy1QslY+ytBQAANGsEW9RNv6GSs4X00yHph012VwMAAJoxgi3qxhkiXXqNtc9KZAAAwEYEW9Rd2XCETZ9JJ0/YWwsAAGi2CLaou/hEKbazVFQofbPM7moAAEAzRbBF3TkcZ6f+WrWAh8gAAIAtCLaoH/2HSQGB0uEfpIM77a4GAAA0QwRb1I+WYVKfq639VQvsrQUAADRLBFvUn0HXWa/fLJMKTtlbCwAAaHYItqg/XfpIEbFSwUnp28/srgYAADQzBFvUHz8/aeAIa381wxEAAEDjItiifg0YbgXcPVulrH12VwMAAJoRgi3qlytC6jnQ2l+9yN5aAABAs0KwRf0bdL31+vUn0pkie2sBAADNBsEW9a/HAMnVVsrPkbassrsaAADQTNQo2KalpWnAgAEKDQ1VZGSkUlNTtWPHjvN+5h//+IeuvvpqhYeHKzw8XMnJyVq7dq1Hm3HjxsnhcHhsI0aMqPnVwDv4+0uXp1j7a3iIDAAANI4aBduVK1dq4sSJWr16tZYsWaKioiINHz5c+fn5VX5mxYoVGj16tJYvX65Vq1YpPj5ew4cP16FDhzzajRgxQhkZGe7tnXfeqd0VwTtcXvoXkx3rpYw99tYCAACaBYcxxtT2wz/++KMiIyO1cuVKDR48uFqfKS4uVnh4uF5++WWNGTNGktVjm52drXnz5tWqjtzcXLlcLuXk5CgsLKxW34EGMPMJafOXUkwnafLfpSCn3RUBAACbNEZeq9MY25ycHElSmzZtqv2ZkydPqqioqMJnVqxYocjISCUmJmrChAk6evRold9RUFCg3Nxcjw1e6JbJUmi4lLFX+uhVu6sBAAA+rtbBtqSkRJMnT9aVV16p3r17V/tzjzzyiGJjY5WcnOw+NmLECL311ltaunSpnnvuOa1cuVIjR45UcXFxpd+RlpYml8vl3uLj42t7GWhIoeHSrx6x9r/8P2nT5/bWAwAAfFqthyJMmDBBCxcu1BdffKG4uLhqfWbq1KmaNm2aVqxYoT59+lTZbvfu3erSpYs+/fRTDRs2rML5goICFRQUuN/n5uYqPj6eoQje6v/+IS17VwppJT38mhQeZXdFAACgkXntUIRJkyZp/vz5Wr58ebVD7QsvvKCpU6fqk08+OW+olaTOnTsrIiJCu3btqvS80+lUWFiYxwYvdt1dUoce0qk86X/TpCp64gEAAOqiRsHWGKNJkyZp7ty5WrZsmRISEqr1uWnTpumZZ57RokWLdNlll12w/cGDB3X06FHFxMTUpDx4K/8A6c4/SM4W0p4t0pK37a4IAAD4oBoF24kTJ+rtt9/WnDlzFBoaqszMTGVmZurUqVPuNmPGjNGUKVPc75977jk99thjmjlzpjp16uT+TF5eniQpLy9PDz/8sFavXq29e/dq6dKlGjVqlLp27aqUlJR6ukzYLiJW+q/J1v4n/5J2fWtrOQAAwPfUKNjOmDFDOTk5Gjp0qGJiYtzbu+++626zf/9+ZWRkeHymsLBQv/zlLz0+88ILL0iS/P39tWnTJt14443q3r27xo8fr/79++vzzz+X08n0UD7l0muthRtMifR2mrUyGQAAQD2p0zy23oJ5bJuQglPSi7+RjhyQel8h3f2U5HDYXRUAAGhgXvvwGFBrzhBrvK1/oLTlK+nLj+yuCAAA+AiCLRpfXDfphnut/f+8Kh36wd56AACATyDYwh6DfyH1HCSdKZLe+qM1RAEAAKAOCLawh8MhjX5YCmsrHdkvzXvF7ooAAEATR7CFfVq5pDumWCF39UJpw3K7KwIAAE0YwRb26tZPSh5t7b/3F+loxnmbAwAAVIVgC/uljJU69ZROn5T+909S8Rm7KwIAAE0QwRb28/e3pgALbint2y4tetPuigAAQBNEsIV3aBMt3fqgtb80Xfr+G3vrAQAATQ7BFt6j32Ap6XrJGOlfz0l52XZXBAAAmhCCLbxL6gQpqqOUe1SaM00qKbG7IgAA0EQQbOFdgoKlMf8jBQZJ29dKn8+1uyIAANBEEGzhfWI7S6Put/b/7x/SgZ321gMAAJoEgi280xU3SBdfZU399b/PWlOBAQAAnAfBFt7J4ZBu/a3Uup304yHpw5ftrggAAHg5gi28V8sw6Y4/SA4/6etPpPVL7a4IAAB4MYItvFuXi6Xhd1j77/9V+umwreUAAADvRbCF9xt+u9Slj1RwSnrrWelMkd0VAQAAL0Swhffz85du/73UIlQ68L20YKbdFQEAAC9EsEXTEB4p3faQtb/8fem7r+2tBwAAeB2CLZqOi6+Urhpl7f/rOSn3mL31AAAAr0KwRdNyw31STIKUl22FW5bcBQAApQi2aFqCnNLYR6VAp/T9emnF+3ZXBAAAvATBFk1PVEfpponW/sczpX3b7a0HAAB4BYItmqaBI6W+g6WSYum1KdLuzXZXBAAAbEawRdNUtuRup17SqTxpxu+kTZ/bXRUAALARwRZNV0gracI0a7aEM0XS7KelL/5jd1UAAMAmBFs0bUFOadzj0hU/l4yR/v03a9ytMXZXBgAAGhnBFk2fn7/0ywekkeOs95/Okd55Xio+Y2tZAACgcRFs4RscDmn4HdJtD0p+ftLXn0j/fEwqOGV3ZQAAoJEQbOFbBo6U7n5aCgq2lt39+4PSieN2VwUAABoBwRa+p9cg6TcvSC1d0oHvpZcekH48ZHdVAACggRFs4Zs69pD+e7rUJlr66bAVbvfvsLsqAADQgAi28F2RcdLkv0lx3aS8bGtYwva1dlcFAAAaCMEWvi00XJr4Z6l7f6nwtPTPR6W1i+2uCgAANACCLXxfcAvp3mel/slSSYk1FdiSOcx1CwCAjyHYonkICJRuf0S69lbr/YKZ1mIOJcX21gUAAOpNjYJtWlqaBgwYoNDQUEVGRio1NVU7dlz4gZz3339fPXr0UHBwsC6++GItWLDA47wxRo8//rhiYmIUEhKi5ORk7dy5s2ZXAlyIwyHdcK/0i4nW/pcfSW8+IxUW2F0ZAACoBzUKtitXrtTEiRO1evVqLVmyREVFRRo+fLjy8/Or/MxXX32l0aNHa/z48dqwYYNSU1OVmpqqLVu2uNtMmzZNL730kl599VWtWbNGLVu2VEpKik6fPl37KwOqMvgX0phHJf9AadMX0quPSPm5dlcFAADqyGFM7Qca/vjjj4qMjNTKlSs1ePDgStvceuutys/P1/z5893HBg0apH79+unVV1+VMUaxsbF68MEH9dBDD0mScnJyFBUVpdmzZ+u22267YB25ublyuVzKyclRWFhYbS8Hzc2ub6U3HpdO50tRHaRfp0nhUXZXBQCAT2qMvFanMbY5OTmSpDZt2lTZZtWqVUpOTvY4lpKSolWrVkmS9uzZo8zMTI82LpdLAwcOdLcBGkTXvtL/+4vkipCy9kvTH5AO77a7KgAAUEu1DrYlJSWaPHmyrrzySvXu3bvKdpmZmYqK8uwFi4qKUmZmpvt82bGq2pyroKBAubm5HhtQK7GdpQdekqI6Sjk/SX/7/6RdG+2uCgAA1EKtg+3EiRO1ZcsWpaen12c91ZKWliaXy+Xe4uPjG70G+JDwSKvnNqG3NSzh1SnSxpV2VwUAAGqoVsF20qRJmj9/vpYvX664uLjzto2OjlZWVpbHsaysLEVHR7vPlx2rqs25pkyZopycHPd24MCB2lwGcFbLMGnCNKnPVVJxkfTWs9Jnc+2uCgAA1ECNgq0xRpMmTdLcuXO1bNkyJSQkXPAzSUlJWrp0qcexJUuWKCkpSZKUkJCg6Ohojza5ublas2aNu825nE6nwsLCPDagzgKDpLGPSVfeaC3eMPfv0kevW4s6AAAArxdQk8YTJ07UnDlz9J///EehoaHuMbAul0shISGSpDFjxqh9+/ZKS0uTJD3wwAMaMmSI/vznP+v6669Xenq61q1bp9dff12S5HA4NHnyZD377LPq1q2bEhIS9Nhjjyk2Nlapqan1eKlANfj5Szf/P6l1hPTxTGn5e1LOj9JNk6SWLrurAwAA51Gj6b4cDkelx2fNmqVx48ZJkoYOHapOnTpp9uzZ7vPvv/++Hn30Ue3du1fdunXTtGnTdN1117nPG2P0xBNP6PXXX1d2drauuuoqvfLKK+revXu16mK6LzSItZ9I775g9dg6W0hDfykNvVkKbml3ZQAANDmNkdfqNI+ttyDYosHs+laa94p06Afrfcswadhoa7hCkNPe2gAAaEIIttVEsEWDKimRNn0uLZwtHSl9UNHVVhp+pzRwhORfoxE9AAA0SwTbaiLYolEUF0vrlkiL35KOH7GOtY2RRoyVLr3GGp8LAAAqRbCtJoItGtWZQmnVAmnJv6QTx61j0Z2k6+6Sel8hVTEWHQCA5oxgW00EW9ii4JT0+Txp2bvSqTzrWIdE6bq7pe6XEnABACiHYFtNBFvY6lSeNS3Yyg+lwtPWsa59rYCb0Mve2gAA8BIE22oi2MIrnDguffqO9OX/WauXSVLPQdYQhfZd7K0NAACbEWyriWALr3I8S/rkbWnt4rOrll0yVBoxToo8/xLUAAD4KoJtNRFs4ZWOHJQWvSltWG699/OTBqRIKXdI4VH21gYAQCMj2FYTwRZe7dAP1hy4W1dZ7/0DpStvkJJHS6HhtpYGAEBjIdhWE8EWTcLebdLHb1irmUlSULA0+CbpmlukFqH21gYAQAMj2FYTwRZNhjHS999IC2ZK+3dYx0JaWUv0Jl0ntYm2tz4AABoIwbaaCLZocoyRtnwlLZglZe61jjkcUuJl0hU/t2ZT8GclMwCA7yDYVhPBFk1WSbG0+Uvpq4+l79efPe5qKw0cKQ0ayYNmAACfQLCtJoItfMJPh6VVH1vThOVlW8ccDumiy6Wkn1uv9OICAJoogm01EWzhU84UWb24qz6Wdm44e7x1O2ngCGnQddY+AABNCMG2mgi28FlHDkqrP5bWfiLl51jHHH5Sz4FS0vXSRQMkP3pxAQDej2BbTQRb+LwzhdKmL6Sv5ks/bDp7PDzS6sEdOEJyRdhXHwAAF0CwrSaCLZqVrP3S6gXWWNyTJ6xjfn5SrySrFzfxMus9AABehGBbTQRbNEtFhdKmz61e3N2bzx5vE322FzesjX31AQBQDsG2mgi2aPYy91kPm339iXQqzzrm5y/1vsKaF7fbJfTiAgBsRbCtJoItUKqwQPp2pTUv7t6tZ4+3iZb6DbG2uG7WNGIAADQigm01EWyBSmTssYYprPtUOp1/9njbGKnvYGuL707IBQA0CoJtNRFsgfMoPC1tWyttXCFtX2u9L9Mm2gq4/YYQcgEADYpgW00EW6CaCk5J27+2hitsW1NFyB0sxScScgEA9YpgW00EW6AWynpyqwy5V5f25BJyAQB1R7CtJoItUEeFp61hChvPE3L7DpE6EHIBALVDsK0mgi1Qj8pC7refSVtXe4bc8KizwxU69CDkAgCqjWBbTQRboIEUnpa++9rqya0QciPPPnhGyAUAXADBtpoItkAjcIfcz6StqyqG3C59pbiuUvsuUvuuUkgr+2oFAHgdgm01EWyBRlZYUK4n95yQW6ZNtBVw47par+27SK4IenYBoJki2FYTwRawUWGBtHODdHCndGiXdHCXdDyr8rYtXWdDblngbdfeWv4XAODTCLbVRLAFvMzJE9KhH6ygW7Zl7ZdKSiq2DQqWYhLODmGI6ypFJ0hBzsavGwDQYBojrwU0yLcCaN5ahErd+llbmcICKXNvubD7g3R4tzWMYd92ayvj5ydFdvAMuzGdpZZhDGUAAFSJHlsA9ikpln48dLZ392Bp6M3Pqbx9cAupbYzUNrb0tXSLiLUeYPPn7+oA4K0YilBNBFvAhxgj5Rz1HMZwcJd0LPP8n3P4WeH23MBbtt8itHHqBwBUiqEIAJofh0NqHWFtvQadPV54WjqWJR3NsLafDp/dP5YhFRVa4fdYpvUw27lCWpX29EZXDL2tIyV/HmADgKaOYAugaQgKlqI7Wtu5SkqkE8fOBt2jGdJP5fZPHJNO5UkHv7e2c/n5W729LV2SM8Qa8uAMKd1anH0NPud9WZvgFtb7gEDGAAOAjQi2AJo+Pz9rjlxXhNT54ornC05ZPbkewfewdDTT6u09U3T2eJ3q8C8XiltIzuDS19Jjoa2l+ESp40VWkCYEA0C9qnGw/eyzz/T8889r/fr1ysjI0Ny5c5Wamlpl+3HjxunNN9+scLxnz57aunWrJOnJJ5/UU0895XE+MTFR3333XU3LA4CKnCHWlGIxCRXPlZRIucesgHsyTyo4aQXhgpPS6dLXsmOny50rOH32eNkCFSXF1lRnJ09cuKbQcKljD6nDRaWviVJwy/q9bgBoZmocbPPz89W3b1/dfffduummmy7Yfvr06Zo6dar7/ZkzZ9S3b1/dcsstHu169eqlTz/99GxhAXQmA2gEfn5nx/TWVklxaeA9NxCXheHSIHw8S9r3nXT4B+nEcWnLKmuTrN7byA5WyO1YGnajExj7CwA1UOP0OHLkSI0cObLa7V0ul1wul/v9vHnzdPz4cd11112ehQQEKDo6uqblAID9/Pyth9NCWlWvfWGBNdvDvu3S/u+ssHssU8raZ21rF1vtgoKluG6ePbut2zGEAQCq0Ojdom+88YaSk5PVsaPnAyA7d+5UbGysgoODlZSUpLS0NHXo0KHS7ygoKFBBQYH7fW5uboPWDAD1KsgpJfSytjInjlsBd//20tfvrN7e3ZutrUxY29KgW9qzG9/dGtcLAGjcYHv48GEtXLhQc+bM8Tg+cOBAzZ49W4mJicrIyNBTTz2lq6++Wlu2bFFoaMW5J9PS0iqMyQWAJi00XOqdZG2SNfb3xwNWyN273Qq8GXuk3KPS5i+tTbLm743uYPXodki05ustKZaKi63Xsu3c9yUlF25T4TPFkpE1fMPP/+zm71/FsfMdr+K9n78UEWONh2bBDQA1VKcFGhwOxwUfHisvLS1Nf/7zn3X48GEFBQVV2S47O1sdO3bUiy++qPHjx1c4X1mPbXx8PAs0APBthaelAzs9e3WPH7G7qoYREGgtqRyfWLp1l6LireALoEnyqQUajDGaOXOm7rzzzvOGWklq3bq1unfvrl27dlV63ul0yul0NkSZAOC9goKlLhdbW5mco6XjdLdLB3daU5dVt2fU45jfhT9T9iBbScnZXt+qenbP2/tbUsW5Emuhjax91rzD+0rHH5e//rhuVsjtUBp228ZatQOAGjHYrly5Urt27aq0B/ZceXl5+uGHH3TnnXc2QmUA0IS52koXX2ltvsIYa57hAzukA9+f3QpPVxxzHNzSCrjusJvIHMFAM1bjYJuXl+fRk7pnzx5t3LhRbdq0UYcOHTRlyhQdOnRIb731lsfn3njjDQ0cOFC9e/eu8J0PPfSQbrjhBnXs2FGHDx/WE088IX9/f40ePboWlwQAaNIcDqlde2u79FrrWEmxdOSgFXb3lwbeQ7uk0/nWEsrll1Fu1fps2I3vboVdV1tbLgVA46pxsF23bp2uueYa9/vf/va3kqSxY8dq9uzZysjI0P79+z0+k5OTo3//+9+aPn16pd958OBBjR49WkePHlW7du101VVXafXq1WrXrl1NywMA+CI//7NLKg8Ybh0rPiNl7rVCblnYPbxbysuWtq+1tjKutmdDblw363taRzKMAfAxdXp4zFs0xmBkAEATUFRohVv3MIYdUuZ+yZRUbBvolCLjpKgOUlRHa4GMqHirpzjg/M+CAKg5n3p4DACABhcYVLp6W4+zxwpOWcMWynp2D++WfjwoFRVIh36wtvL8/KS2MVbgjexQGnw7SJHx1V+EA4AtCLYAAN/mDJE6X2xtZYqLpaMZ0pH9UtY5W8FJ6cdD1qZVnt/lausZdssCb1hbHlgDvABDEQAAKGOMtQhG+aB75ID1mnu06s8Ft7SGMUR1LA26baxjIS2tV/d+q7PTpgHNDEMRAABoTA6H5Iqwtu6Xep47lecZdMu2oxnW7AznzrtblaDgcmG3RcXgG9LSMxQTjoFqI9gCAFAdIa2kTj2trbwzhdawhfI9vCdzpVP5VuA9nW/tF5622heetrbz9QBfSGCQ5B9ordAWEGg97BYQWHHzP9+xcp/xD6h4LMhphfCgYCkoxHp1lr5nuWN4Kf6bCQBAXQQESTEJ1nY+xWfOhtzygbfCsbxz3p88u18WjosKrc0u/oFnQ25VmzOk3H4l5wOdpeOSHZJDKv0P65ij3L7k2absM2UfcX9HuTHOZccCAq3fCQw6+8pYaJ9GsAUAoDH4B0gtXdZWW2XhuOCUtXzymSKrx/hMkXXu3GPnbsWVHCtrf+65woKzvcuFp6zXktJp04qLpJNF0skT9fNn05g8gq7T6pku/z4wSAoM9gzDQcEV2wQFn+3ldvd4n9MD7u4dD7D2CdUNjmALAEBTUR/huLaMsQJtwelygfe0FbILzz12umIoPrdtUZEkY32vSp9jNzp7rPyz7WVtKj2mcsfLtTHGCuhFBdbKdWWKCqxNNoRyd+ANOCcQB0j+Qdbruef9A6wFSvz9PV/LNo/zfhXPndu+su+KSZBCwxv/z6MBEGwBAMCFORylY3CDpJZNbAai4uKzgbZsKyyUik6XDusoO1Z2vvDsa+E5bTzOF0hnzpT2lp/Tc15cdLaH211HaVtvM+ZR6ZKhdldRLwi2AADAt/n7S/4tpOAWjfu7JcWlwbf8cJAz5wwLKQ3FFY6XbiXF1lZcfHbf/b6kdP9M1e3KjlV1vKRYahHauH8uDYhgCwAA0BD8/KUgf0lOuytpNvzsLgAAAACoDwRbAAAA+ASCLQAAAHwCwRYAAAA+gWALAAAAn0CwBQAAgE8g2AIAAMAnEGwBAADgEwi2AAAA8AkEWwAAAPgEgi0AAAB8AsEWAAAAPoFgCwAAAJ9AsAUAAIBPCLC7gPpgjJEk5ebm2lwJAAAAKlOW08pyW0PwiWB74sQJSVJ8fLzNlQAAAOB8Tpw4IZfL1SDf7TANGZsbSUlJiQ4fPqzQ0FA5HA7l5uYqPj5eBw4cUFhYmN3loQFxr5sH7nPzwb1uHrjPzUf5ex0aGqoTJ04oNjZWfn4NMxrWJ3ps/fz8FBcXV+F4WFgY/8A0E9zr5oH73Hxwr5sH7nPzUXavG6qntgwPjwEAAMAnEGwBAADgE3wy2DqdTj3xxBNyOp12l4IGxr1uHrjPzQf3unngPjcfjX2vfeLhMQAAAMAne2wBAADQ/BBsAQAA4BMItgAAAPAJBFsAAAD4BJ8Mtn//+9/VqVMnBQcHa+DAgVq7dq3dJeE8PvvsM91www2KjY2Vw+HQvHnzPM4bY/T4448rJiZGISEhSk5O1s6dOz3aHDt2TLfffrvCwsLUunVrjR8/Xnl5eR5tNm3apKuvvlrBwcGKj4/XtGnTGvrSUE5aWpoGDBig0NBQRUZGKjU1VTt27PBoc/r0aU2cOFFt27ZVq1atdPPNNysrK8ujzf79+3X99derRYsWioyM1MMPP6wzZ854tFmxYoUuvfRSOZ1Ode3aVbNnz27oy0OpGTNmqE+fPu7J2JOSkrRw4UL3ee6xb5o6daocDocmT57sPsa99g1PPvmkHA6Hx9ajRw/3ea+7z8bHpKenm6CgIDNz5kyzdetWc++995rWrVubrKwsu0tDFRYsWGD+53/+x3z44YdGkpk7d67H+alTpxqXy2XmzZtnvv32W3PjjTeahIQEc+rUKXebESNGmL59+5rVq1ebzz//3HTt2tWMHj3afT4nJ8dERUWZ22+/3WzZssW88847JiQkxLz22muNdZnNXkpKipk1a5bZsmWL2bhxo7nuuutMhw4dTF5enrvN/fffb+Lj483SpUvNunXrzKBBg8wVV1zhPn/mzBnTu3dvk5ycbDZs2GAWLFhgIiIizJQpU9xtdu/ebVq0aGF++9vfmm3btpm//e1vxt/f3yxatKhRr7e5+uijj8zHH39svv/+e7Njxw7zhz/8wQQGBpotW7YYY7jHvmjt2rWmU6dOpk+fPuaBBx5wH+de+4YnnnjC9OrVy2RkZLi3H3/80X3e2+6zzwXbyy+/3EycONH9vri42MTGxpq0tDQbq0J1nRtsS0pKTHR0tHn++efdx7Kzs43T6TTvvPOOMcaYbdu2GUnm66+/drdZuHChcTgc5tChQ8YYY1555RUTHh5uCgoK3G0eeeQRk5iY2MBXhKocOXLESDIrV640xlj3NTAw0Lz//vvuNtu3bzeSzKpVq4wx1l+C/Pz8TGZmprvNjBkzTFhYmPve/u53vzO9evXy+K1bb73VpKSkNPQloQrh4eHmn//8J/fYB504ccJ069bNLFmyxAwZMsQdbLnXvuOJJ54wffv2rfScN95nnxqKUFhYqPXr1ys5Odl9zM/PT8nJyVq1apWNlaG29uzZo8zMTI976nK5NHDgQPc9XbVqlVq3bq3LLrvM3SY5OVl+fn5as2aNu83gwYMVFBTkbpOSkqIdO3bo+PHjjXQ1KC8nJ0eS1KZNG0nS+vXrVVRU5HGve/TooQ4dOnjc64svvlhRUVHuNikpKcrNzdXWrVvdbcp/R1kb/h3Q+IqLi5Wenq78/HwlJSVxj33QxIkTdf3111e4H9xr37Jz507Fxsaqc+fOuv3227V//35J3nmffSrY/vTTTyouLvb4w5OkqKgoZWZm2lQV6qLsvp3vnmZmZioyMtLjfEBAgNq0aePRprLvKP8baDwlJSWaPHmyrrzySvXu3VuSdR+CgoLUunVrj7bn3usL3ceq2uTm5urUqVMNcTk4x+bNm9WqVSs5nU7df//9mjt3rnr27Mk99jHp6en65ptvlJaWVuEc99p3DBw4ULNnz9aiRYs0Y8YM7dmzR1dffbVOnDjhlfc5oEatAaAeTJw4UVu2bNEXX3xhdyloAImJidq4caNycnL0wQcfaOzYsVq5cqXdZaEeHThwQA888ICWLFmi4OBgu8tBAxo5cqR7v0+fPho4cKA6duyo9957TyEhITZWVjmf6rGNiIiQv79/hafxsrKyFB0dbVNVqIuy+3a+exodHa0jR454nD9z5oyOHTvm0aay7yj/G2gckyZN0vz587V8+XLFxcW5j0dHR6uwsFDZ2dke7c+91xe6j1W1CQsL88p/CfuioKAgde3aVf3791daWpr69u2r6dOnc499yPr163XkyBFdeumlCggIUEBAgFauXKmXXnpJAQEBioqK4l77qNatW6t79+7atWuXV/4z7VPBNigoSP3799fSpUvdx0pKSrR06VIlJSXZWBlqKyEhQdHR0R73NDc3V2vWrHHf06SkJGVnZ2v9+vXuNsuWLVNJSYkGDhzobvPZZ5+pqKjI3WbJkiVKTExUeHh4I11N82aM0aRJkzR37lwtW7ZMCQkJHuf79++vwMBAj3u9Y8cO7d+/3+Neb9682eMvMkuWLFFYWJh69uzpblP+O8ra8O8A+5SUlKigoIB77EOGDRumzZs3a+PGje7tsssu0+233+7e5177pry8PP3www+KiYnxzn+ma/y4mZdLT083TqfTzJ4922zbts3cd999pnXr1h5P48G7nDhxwmzYsMFs2LDBSDIvvvii2bBhg9m3b58xxpruq3Xr1uY///mP2bRpkxk1alSl031dcsklZs2aNeaLL74w3bp185juKzs720RFRZk777zTbNmyxaSnp5sWLVow3VcjmjBhgnG5XGbFihUe08acPHnS3eb+++83HTp0MMuWLTPr1q0zSUlJJikpyX2+bNqY4cOHm40bN5pFixaZdu3aVTptzMMPP2y2b99u/v73vzM9UCP6/e9/b1auXGn27NljNm3aZH7/+98bh8NhPvnkE2MM99iXlZ8VwRjuta948MEHzYoVK8yePXvMl19+aZKTk01ERIQ5cuSIMcb77rPPBVtjjPnb3/5mOnToYIKCgszll19uVq9ebXdJOI/ly5cbSRW2sWPHGmOsKb8ee+wxExUVZZxOpxk2bJjZsWOHx3ccPXrUjB492rRq1cqEhYWZu+66y5w4ccKjzbfffmuuuuoq43Q6Tfv27c3UqVMb6xJhTKX3WJKZNWuWu82pU6fMb37zGxMeHm5atGhhfvGLX5iMjAyP79m7d68ZOXKkCQkJMREREebBBx80RUVFHm2WL19u+vXrZ4KCgkznzp09fgMN6+677zYdO3Y0QUFBpl27dmbYsGHuUGsM99iXnRtsude+4dZbbzUxMTEmKCjItG/f3tx6661m165d7vPedp8dxhhT835eAAAAwLv41BhbAAAANF8EWwAAAPgEgi0AAAB8AsEWAAAAPoFgCwAAAJ9AsAUAAIBPINgCAADAJxBsAQAA4BMItgAAAPAJBFsAAAD4BIItAAAAfALBFgAAAD7h/wdtXsPv2SVEnwAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "tb_dir = os.path.join(WORK_DIR, 'tensorboard_output')\n", + "fname = os.listdir(tb_dir)[0]\n", + "tb_path = os.path.join(tb_dir, fname)\n", + "#\n", + "data = read_tensorboard_file(tb_path)\n", + "print(data.keys())\n", + "_ = plot_image(data, 'loss', 0.9)\n", + "_ = plot_image(data, 'lr', 0)\n", + "_ = plot_image(data, 'evaluation/acc', 0)\n", + "_ = plot_image(data, 'evaluation/loss', 0)" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 推理\n", + "推理部分见chatglm2_infer.ipynb" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "hackathon", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.11" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/multi_modal_embedding/finetune_multi_modal_embedding.py b/AI/modelscope/1.10.0/8/examples/pytorch/multi_modal_embedding/finetune_multi_modal_embedding.py new file mode 100644 index 0000000000000000000000000000000000000000..7b4cfbb854ac840b2f986225c1aff2bb89e4d0d0 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/multi_modal_embedding/finetune_multi_modal_embedding.py @@ -0,0 +1,126 @@ +import os +from dataclasses import dataclass, field + +from modelscope import MsDataset, TrainingArgs +from modelscope.metainfo import Trainers +from modelscope.trainers import build_trainer +from modelscope.trainers.training_args import set_flatten_value + + +@dataclass(init=False) +class MultiModalEmbeddingArguments(TrainingArgs): + + trainer: str = field( + default=Trainers.default, metadata={ + 'help': 'The trainer used', + }) + + work_dir: str = field( + default='./tmp', + metadata={ + 'help': 'The working path for saving checkpoint', + }) + + use_fp16: bool = field( + default=None, + metadata={ + 'cfg_node': 'train.use_fp16', + 'help': 'Whether to use fp16', + }) + + optimizer_lr: float = field( + default=None, + metadata={ + 'cfg_node': 'train.optimizer_hparams.lr', + 'help': 'The learning rate of the optimizer', + }) + + optimizer_hparams: str = field( + default=None, + metadata={ + 'cfg_node': 'train.optimizer_hparams', + 'cfg_setter': set_flatten_value, + 'help': 'The optimizer init params except `lr`', + }) + + loss_aggregate: bool = field( + default=None, + metadata={ + 'cfg_node': 'train.loss_cfg.aggregate', + 'help': 'Whether to use loss aggregate', + }) + + dataset_column_map: str = field( + default=None, + metadata={ + 'cfg_node': 'dataset.column_map', + 'cfg_setter': set_flatten_value, + 'help': 'The column map for dataset', + }) + + lr_warmup_proportion: float = field( + default=None, + metadata={ + 'cfg_node': 'train.lr_scheduler.warmup_proportion', + 'help': 'The warmup proportion for lr scheduler', + }) + + lr_scheduler_hook: str = field( + default=None, + metadata={ + 'cfg_node': 'train.lr_scheduler_hook', + 'cfg_setter': set_flatten_value, + 'help': 'The parameters for lr scheduler hook', + }) + + optimizer_hook: str = field( + default=None, + metadata={ + 'cfg_node': 'train.optimizer_hook', + 'cfg_setter': set_flatten_value, + 'help': 'The parameters for optimizer hook', + }) + + clip_clamp: bool = field( + default=None, + metadata={ + 'help': 'Whether to use ClipClampLogitScaleHook', + }) + + world_size: int = field( + default=None, metadata={ + 'help': 'The data parallel world size', + }) + + +config, args = MultiModalEmbeddingArguments().parse_cli().to_config() +print(config, args) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + cfg.merge_from_dict({'pretrained_model.model_name': args.model}) + if args.clip_clamp: + cfg.train.hooks.append({'type': 'ClipClampLogitScaleHook'}) + if args.world_size > 1: + cfg.train.launcher = 'pytorch' + return cfg + + +train_dataset = MsDataset.load( + args.train_dataset_name, namespace='modelscope', split='train') +eval_dataset = MsDataset.load( + args.train_dataset_name, namespace='modelscope', split='validation') + +os.makedirs(args.work_dir, exist_ok=True) +kwargs = dict( + model=args.model, + train_dataset=train_dataset, + eval_dataset=eval_dataset, + work_dir=args.work_dir, + cfg_modify_fn=cfg_modify_fn) +trainer = build_trainer(name=args.trainer, default_args=kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/multi_modal_embedding/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/multi_modal_embedding/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..3974405b2a3ce24788c021faf9b47a58886458bc --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/multi_modal_embedding/run_train.sh @@ -0,0 +1,38 @@ +DATA_PARALLEL_SIZE=2 + + +PYTHONPATH=. torchrun --nproc_per_node $DATA_PARALLEL_SIZE \ + examples/pytorch/multi_modal_embedding/finetune_multi_modal_embedding.py \ + --trainer 'clip-multi-modal-embedding' \ + --work_dir './workspace/ckpts/clip' \ + --model 'damo/multi-modal_clip-vit-base-patch16_zh' \ + --train_dataset_name 'muge' \ + --dataset_column_map 'img=image,text=query' \ + --max_epochs 1 \ + --use_fp16 true \ + --per_device_train_batch_size 180 \ + --train_data_worker 0 \ + --train_shuffle true \ + --train_drop_last true \ + --per_device_eval_batch_size 128 \ + --eval_data_worker 0 \ + --eval_shuffle true \ + --eval_drop_last true \ + --save_ckpt_best true \ + --save_ckpt_best_strategy by_step \ + --ckpt_best_interval 200 \ + --metric_for_best_model inbatch_t2i_recall_at_1 \ + --logging_interval 1 \ + --eval_strategy by_step \ + --eval_interval 200 \ + --eval_metrics 'inbatch_recall' \ + --optimizer_lr 2.5e-05 \ + --optimizer 'AdamW' \ + --optimizer_hparams 'weight_decay=0.001,beta1=0.9,beta2=0.999,eps=1e-08' \ + --loss_aggregate true \ + --lr_warmup_proportion 0.1 \ + --lr_scheduler_hook 'type=LrSchedulerHook,by_epoch=false' \ + --optimizer_hook 'type=TorchAMPOptimizerHook,cumulative_iters=1,loss_keys=loss' \ + --clip_clamp true \ + --world_size $DATA_PARALLEL_SIZE \ + --use_model_config true \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/named_entity_recognition/finetune_named_entity_recognition.py b/AI/modelscope/1.10.0/8/examples/pytorch/named_entity_recognition/finetune_named_entity_recognition.py new file mode 100644 index 0000000000000000000000000000000000000000..67a091f741f4f3103a9799b37d9ff9077b1df83b --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/named_entity_recognition/finetune_named_entity_recognition.py @@ -0,0 +1,97 @@ +import os +from dataclasses import dataclass, field + +from adaseq.data.data_collators.base import build_data_collator +from adaseq.data.dataset_manager import DatasetManager +from adaseq.data.preprocessors.nlp_preprocessor import build_preprocessor +from adaseq.training.default_trainer import DefaultTrainer as AdaSeqTrainer + +from modelscope import MsDataset, TrainingArgs, build_dataset_from_file + + +@dataclass(init=False) +class NamedEntityRecognitionArguments(TrainingArgs): + preprocessor: str = field( + default='sequence-labeling-preprocessor', + metadata={ + 'help': 'The preprocessor type', + 'cfg_node': 'preprocessor.type' + }) + + sequence_length: int = field( + default=150, + metadata={ + 'cfg_node': 'preprocessor.max_length', + 'help': 'The parameters for train dataset', + }) + + data_collator: str = field( + default='SequenceLabelingDataCollatorWithPadding', + metadata={ + 'cfg_node': 'data_collator', + 'help': 'The type of data collator', + }) + + dropout: float = field( + default=0.0, + metadata={ + 'cfg_node': 'model.dropout', + 'help': 'Dropout rate', + }) + + use_crf: bool = field( + default=True, + metadata={ + 'cfg_node': 'model.use_crf', + 'help': 'Whether to add a CRF decoder layer', + }) + + crf_lr: float = field( + default=5.0e-1, metadata={ + 'help': 'Learning rate for CRF layer', + }) + + +training_args = NamedEntityRecognitionArguments().parse_cli() +config, args = training_args.to_config() +print(args) + +if args.dataset_json_file is None: + train_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split=args.train_split, + namespace=args.train_dataset_namespace).to_hf_dataset() + validation_dataset = MsDataset.load( + args.val_dataset_name, + subset_name=args.val_subset_name, + split=args.val_split, + namespace=args.val_dataset_namespace).to_hf_dataset() +else: + train_dataset, validation_dataset = build_dataset_from_file( + args.dataset_json_file) +dm = DatasetManager({ + 'train': train_dataset, + 'valid': validation_dataset +}, labels={'type': 'count_span_labels'}) # yapf: disable + +config.preprocessor.model_dir = args.model +config.model.embedder = {'model_name_or_path': args.model} +preprocessor = build_preprocessor(config.preprocessor, labels=dm.labels) +config.model.id_to_label = preprocessor.id_to_label +data_collator = build_data_collator(preprocessor.tokenizer, + dict(type=config.data_collator)) +config.train.optimizer.param_groups = [{'regex': 'crf', 'lr': args.crf_lr}] + +cfg_file = os.path.join(config.train.work_dir, 'config.yaml') +config.dump(cfg_file) + +kwargs = dict( + cfg_file=cfg_file, + work_dir=config.train.work_dir, + dataset_manager=dm, + data_collator=data_collator, + preprocessor=preprocessor) + +trainer = AdaSeqTrainer(**kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/named_entity_recognition/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/named_entity_recognition/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..91b2c5bfdf8c503d9c8440276544b1d03d39499c --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/named_entity_recognition/run_train.sh @@ -0,0 +1,26 @@ +PYTHONPATH=. python examples/pytorch/named_entity_recognition/finetune_named_entity_recognition.py \ + --task 'named-entity-recognition' \ + --work_dir './tmp' \ + --model_type 'sequence-labeling-model' \ + --model 'damo/nlp_structbert_backbone_base_std' \ + --dropout 0.1 \ + --use_crf true \ + --train_dataset_name 'resume_ner' \ + --train_dataset_namespace 'damo' \ + --train_split 'train' \ + --val_dataset_name 'resume_ner' \ + --val_dataset_namespace 'damo' \ + --val_split 'dev' \ + --preprocessor 'sequence-labeling-preprocessor' \ + --sequence_length 150 \ + --data_collator 'SequenceLabelingDataCollatorWithPadding' \ + --max_epochs 5 \ + --per_device_train_batch_size 16 \ + --train_data_worker 0 \ + --eval_data_worker 0 \ + --lr 5.0e-5 \ + --lr_scheduler LinearLR \ + --lr_scheduler_params 'start_factor=1.0,end_factor=0.0,total_iters=5' \ + --eval_metrics ner-metric \ + --save_best_checkpoint true \ + --metric_for_best_model f1 \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/cones2/finetune_stable_diffusion_cones2.py b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/cones2/finetune_stable_diffusion_cones2.py new file mode 100644 index 0000000000000000000000000000000000000000..135a5c7d943b5d8b55ddac4af5c264b49c9496aa --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/cones2/finetune_stable_diffusion_cones2.py @@ -0,0 +1,107 @@ +import os +from dataclasses import dataclass, field + +import cv2 + +from modelscope.metainfo import Trainers +from modelscope.msdatasets import MsDataset +from modelscope.pipelines import pipeline +from modelscope.trainers import EpochBasedTrainer, build_trainer +from modelscope.trainers.training_args import TrainingArgs +from modelscope.utils.constant import DownloadMode, Tasks + + +# Load configuration file and dataset +@dataclass(init=False) +class StableDiffusionCones2Arguments(TrainingArgs): + instance_prompt: str = field( + default='a photo of sks dog', + metadata={ + 'help': 'The instance prompt for cones.', + }) + + resolution: int = field( + default=768, metadata={ + 'help': 'The class images resolution.', + }) + + train_batch_size: int = field( + default=4, + metadata={ + 'help': 'Batch size (per device) for the training dataloader.', + }) + + sample_batch_size: int = field( + default=4, + metadata={ + 'help': 'Batch size (per device) for sampling images.', + }) + + prompt: str = field( + default='dog', metadata={ + 'help': 'The pipeline prompt.', + }) + + +training_args = StableDiffusionCones2Arguments( + task='text-to-image-synthesis').parse_cli() +config, args = training_args.to_config() + +if os.path.exists(args.train_dataset_name): + # Load local dataset + train_dataset = MsDataset.load(args.train_dataset_name) + validation_dataset = MsDataset.load(args.train_dataset_name) +else: + # Load online dataset + train_dataset = MsDataset.load( + args.train_dataset_name, + split='train', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + validation_dataset = MsDataset.load( + args.train_dataset_name, + split='validation', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + cfg.train.lr_scheduler = { + 'type': 'LambdaLR', + 'lr_lambda': lambda _: 1, + 'last_epoch': -1 + } + return cfg + + +kwargs = dict( + model=training_args.model, + model_revision=args.model_revision, + work_dir=training_args.work_dir, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + cfg_modify_fn=cfg_modify_fn) + +trainer = build_trainer(name=Trainers.cones2_inference, default_args=kwargs) +trainer.train() + +# pipeline after training and save result +pipe = pipeline( + task=Tasks.text_to_image_synthesis, + model=training_args.work_dir + '/output', + model_revision=args.model_revision) + +output = pipe({ + 'text': 'a mug and a dog on the beach', + 'subject_list': [['mug', 2], ['dog', 5]], + 'color_context': { + '255,192,0': ['mug', 2.5], + '255,0,0': ['dog', 2.5] + }, + 'layout': 'data/test/images/mask_example.png' +}) +# visualize the result on ipynb and save it +output +cv2.imwrite('./cones2_result.png', output['output_imgs'][0]) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/cones2/run_train_cones2.sh b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/cones2/run_train_cones2.sh new file mode 100644 index 0000000000000000000000000000000000000000..f00ab3b40b36d6099945eebba514f202ee7f7683 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/cones2/run_train_cones2.sh @@ -0,0 +1,13 @@ +PYTHONPATH=. torchrun examples/pytorch/stable_diffusion/cones2/finetune_stable_diffusion_cones2.py \ + --model 'damo/Cones2' \ + --model_revision 'v1.0.1' \ + --instance_prompt="dog" \ + --work_dir './tmp/cones2_diffusion' \ + --train_dataset_name 'buptwq/lora-stable-diffusion-finetune-dog' \ + --max_epochs 250 \ + --save_ckpt_strategy 'by_epoch' \ + --logging_interval 1 \ + --train.dataloader.workers_per_gpu 0 \ + --evaluation.dataloader.workers_per_gpu 0 \ + --train.optimizer.lr 1e-5 \ + --use_model_config true diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/custom/finetune_stable_diffusion_custom.py b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/custom/finetune_stable_diffusion_custom.py new file mode 100644 index 0000000000000000000000000000000000000000..83914127bd5b617ffba56a7c56450a4920288f3c --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/custom/finetune_stable_diffusion_custom.py @@ -0,0 +1,187 @@ +import os +from dataclasses import dataclass, field + +import cv2 +import torch + +from modelscope import snapshot_download +from modelscope.metainfo import Trainers +from modelscope.models import Model +from modelscope.msdatasets import MsDataset +from modelscope.pipelines import pipeline +from modelscope.trainers import EpochBasedTrainer, build_trainer +from modelscope.trainers.training_args import TrainingArgs +from modelscope.utils.constant import DownloadMode, Tasks + + +# Load configuration file and dataset +@dataclass(init=False) +class StableDiffusionCustomArguments(TrainingArgs): + class_prompt: str = field( + default=None, + metadata={ + 'help': + 'The prompt to specify images in the same class as provided instance images.', + }) + + instance_prompt: str = field( + default=None, + metadata={ + 'help': 'The prompt with identifier specifying the instance.', + }) + + modifier_token: str = field( + default=None, + metadata={ + 'help': 'A token to use as a modifier for the concept.', + }) + + num_class_images: int = field( + default=200, + metadata={ + 'help': 'Minimal class images for prior preservation loss.', + }) + + train_batch_size: int = field( + default=4, + metadata={ + 'help': 'Batch size (per device) for the training dataloader.', + }) + + sample_batch_size: int = field( + default=4, + metadata={ + 'help': 'Batch size (per device) for sampling images.', + }) + + initializer_token: str = field( + default='ktn+pll+ucd', + metadata={ + 'help': 'A token to use as initializer word.', + }) + + class_data_dir: str = field( + default='/tmp/class_data', + metadata={ + 'help': 'A folder containing the training data of class images.', + }) + + resolution: int = field( + default=512, + metadata={ + 'help': + 'The resolution for input images, all the images in the train/validation dataset will be resized to this', + }) + + prior_loss_weight: float = field( + default=1.0, + metadata={ + 'help': 'The weight of prior preservation loss.', + }) + + freeze_model: str = field( + default='crossattn_kv', + metadata={ + 'help': + 'crossattn to enable fine-tuning of all params in the cross attention.', + }) + + instance_data_name: str = field( + default='buptwq/lora-stable-diffusion-finetune-dog', + metadata={ + 'help': 'The instance data local dir or online ID.', + }) + + concepts_list: str = field( + default=None, + metadata={ + 'help': 'Path to json containing multiple concepts.', + }) + + torch_type: str = field( + default='float32', + metadata={ + 'help': ' The torch type, default is float32.', + }) + + +training_args = StableDiffusionCustomArguments( + task='text-to-image-synthesis').parse_cli() +config, args = training_args.to_config() + +if os.path.exists(args.train_dataset_name): + # Load local dataset + train_dataset = MsDataset.load(args.train_dataset_name) + validation_dataset = MsDataset.load(args.train_dataset_name) +else: + # Load online dataset + train_dataset = MsDataset.load( + args.train_dataset_name, + split='train', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + validation_dataset = MsDataset.load( + args.train_dataset_name, + split='validation', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + cfg.train.lr_scheduler = { + 'type': 'LambdaLR', + 'lr_lambda': lambda _: 1, + 'last_epoch': -1 + } + return cfg + + +# build model +model_dir = snapshot_download(training_args.model) +model = Model.from_pretrained( + training_args.model, + revision=args.model_revision, + torch_type=torch.float16 + if args.torch_type == 'float16' else torch.float32) + +# build trainer and training +kwargs = dict( + model=model, + cfg_file=os.path.join(model_dir, 'configuration.json'), + class_prompt=args.class_prompt, + instance_prompt=args.instance_prompt, + modifier_token=args.modifier_token, + num_class_images=args.num_class_images, + train_batch_size=args.train_batch_size, + sample_batch_size=args.sample_batch_size, + initializer_token=args.initializer_token, + class_data_dir=args.class_data_dir, + concepts_list=args.concepts_list, + resolution=args.resolution, + prior_loss_weight=args.prior_loss_weight, + freeze_model=args.freeze_model, + instance_data_name=args.instance_data_name, + work_dir=training_args.work_dir, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + torch_type=torch.float16 + if args.torch_type == 'float16' else torch.float32, + cfg_modify_fn=cfg_modify_fn) + +trainer = build_trainer(name=Trainers.custom_diffusion, default_args=kwargs) +trainer.train() + +# pipeline after training and save result +pipe = pipeline( + task=Tasks.text_to_image_synthesis, + model=training_args.model, + custom_dir=training_args.work_dir + '/output', + modifier_token=args.modifier_token, + model_revision=args.model_revision) + +output = pipe({'text': args.instance_prompt}) +# visualize the result on ipynb and save it +output +cv2.imwrite('./custom_result.png', output['output_imgs'][0]) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/custom/run_train_custom.sh b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/custom/run_train_custom.sh new file mode 100644 index 0000000000000000000000000000000000000000..7f9cb50080e8ebaf7232c8e194efa2dd15b04b3a --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/custom/run_train_custom.sh @@ -0,0 +1,18 @@ +PYTHONPATH=. torchrun examples/pytorch/stable_diffusion/custom/finetune_stable_diffusion_custom.py \ + --model 'AI-ModelScope/stable-diffusion-v2-1' \ + --model_revision 'v1.0.9' \ + --class_prompt "dog" \ + --instance_prompt="photo of a dog" \ + --work_dir './tmp/custom_diffusion' \ + --class_data_dir './tmp/class_data' \ + --train_dataset_name 'buptwq/lora-stable-diffusion-finetune-dog' \ + --max_epochs 250 \ + --modifier_token "" \ + --num_class_images=200 \ + --save_ckpt_strategy 'by_epoch' \ + --logging_interval 1 \ + --train.dataloader.workers_per_gpu 0 \ + --evaluation.dataloader.workers_per_gpu 0 \ + --train.optimizer.lr 1e-5 \ + --torch_type 'float32' \ + --use_model_config true diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/dreambooth/finetune_stable_diffusion_dreambooth.py b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/dreambooth/finetune_stable_diffusion_dreambooth.py new file mode 100644 index 0000000000000000000000000000000000000000..2b741ede76e2bd7d101b3520895a29e5be1a21b5 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/dreambooth/finetune_stable_diffusion_dreambooth.py @@ -0,0 +1,144 @@ +import os +from dataclasses import dataclass, field + +import cv2 +import torch + +from modelscope import snapshot_download +from modelscope.metainfo import Trainers +from modelscope.models import Model +from modelscope.msdatasets import MsDataset +from modelscope.pipelines import pipeline +from modelscope.trainers import build_trainer +from modelscope.trainers.training_args import TrainingArgs +from modelscope.utils.constant import DownloadMode, Tasks + + +# Load configuration file and dataset +@dataclass(init=False) +class StableDiffusionDreamboothArguments(TrainingArgs): + with_prior_preservation: bool = field( + default=False, metadata={ + 'help': 'Whether to enable prior loss.', + }) + + instance_prompt: str = field( + default='a photo of sks dog', + metadata={ + 'help': 'The instance prompt for dreambooth.', + }) + + class_prompt: str = field( + default='a photo of dog', + metadata={ + 'help': 'The class prompt for dreambooth.', + }) + + class_data_dir: str = field( + default='./tmp/class_data', + metadata={ + 'help': 'Save class prompt images path.', + }) + + num_class_images: int = field( + default=200, + metadata={ + 'help': 'The numbers of saving class images.', + }) + + resolution: int = field( + default=512, metadata={ + 'help': 'The class images resolution.', + }) + + prior_loss_weight: float = field( + default=1.0, + metadata={ + 'help': 'The weight of instance and prior loss.', + }) + + prompt: str = field( + default='dog', metadata={ + 'help': 'The pipeline prompt.', + }) + + torch_type: str = field( + default='float32', + metadata={ + 'help': ' The torch type, default is float32.', + }) + + +training_args = StableDiffusionDreamboothArguments( + task='text-to-image-synthesis').parse_cli() +config, args = training_args.to_config() + +if os.path.exists(args.train_dataset_name): + # Load local dataset + train_dataset = MsDataset.load(args.train_dataset_name) + validation_dataset = MsDataset.load(args.train_dataset_name) +else: + # Load online dataset + train_dataset = MsDataset.load( + args.train_dataset_name, + split='train', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + validation_dataset = MsDataset.load( + args.train_dataset_name, + split='validation', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + cfg.train.lr_scheduler = { + 'type': 'LambdaLR', + 'lr_lambda': lambda _: 1, + 'last_epoch': -1 + } + return cfg + + +# build model +model_dir = snapshot_download(training_args.model) +model = Model.from_pretrained( + training_args.model, + revision=args.model_revision, + torch_type=torch.float16 + if args.torch_type == 'float16' else torch.float32) + +# build trainer and training +kwargs = dict( + model=model, + cfg_file=os.path.join(model_dir, 'configuration.json'), + work_dir=training_args.work_dir, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + instance_prompt=args.instance_prompt, + class_prompt=args.class_prompt, + class_data_dir=args.class_data_dir, + num_class_images=args.num_class_images, + resolution=args.resolution, + prior_loss_weight=args.prior_loss_weight, + prompt=args.prompt, + torch_type=torch.float16 + if args.torch_type == 'float16' else torch.float32, + cfg_modify_fn=cfg_modify_fn) + +trainer = build_trainer( + name=Trainers.dreambooth_diffusion, default_args=kwargs) +trainer.train() + +# pipeline after training and save result +pipe = pipeline( + task=Tasks.text_to_image_synthesis, + model=training_args.work_dir + '/output', + model_revision=args.model_revision) + +output = pipe({'text': args.prompt}) +# visualize the result on ipynb and save it +output +cv2.imwrite('./dreambooth_result.png', output['output_imgs'][0]) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/dreambooth/run_train_dreambooth.sh b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/dreambooth/run_train_dreambooth.sh new file mode 100644 index 0000000000000000000000000000000000000000..461434ee1e5d83c19a8e97e27111d8e16a2c254b --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/dreambooth/run_train_dreambooth.sh @@ -0,0 +1,21 @@ +PYTHONPATH=. torchrun examples/pytorch/stable_diffusion/dreambooth/finetune_stable_diffusion_dreambooth.py \ + --model 'AI-ModelScope/stable-diffusion-v2-1' \ + --model_revision 'v1.0.8' \ + --work_dir './tmp/dreambooth_diffusion' \ + --train_dataset_name 'buptwq/lora-stable-diffusion-finetune' \ + --with_prior_preservation false \ + --instance_prompt "a photo of sks dog" \ + --class_prompt "a photo of dog" \ + --class_data_dir "./tmp/class_data" \ + --num_class_images 200 \ + --resolution 512 \ + --prior_loss_weight 1.0 \ + --prompt "dog" \ + --max_epochs 150 \ + --save_ckpt_strategy 'by_epoch' \ + --logging_interval 1 \ + --train.dataloader.workers_per_gpu 0 \ + --evaluation.dataloader.workers_per_gpu 0 \ + --train.optimizer.lr 5e-6 \ + --torch_type 'float32' \ + --use_model_config true diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora/finetune_stable_diffusion_lora.py b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora/finetune_stable_diffusion_lora.py new file mode 100644 index 0000000000000000000000000000000000000000..b6f9e57ae2ed6cdb06e005dc88e970911d22712a --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora/finetune_stable_diffusion_lora.py @@ -0,0 +1,104 @@ +import os +from dataclasses import dataclass, field + +import cv2 +import torch + +from modelscope import snapshot_download +from modelscope.metainfo import Trainers +from modelscope.models import Model +from modelscope.msdatasets import MsDataset +from modelscope.pipelines import pipeline +from modelscope.trainers import build_trainer +from modelscope.trainers.training_args import TrainingArgs +from modelscope.utils.constant import DownloadMode, Tasks + + +# Load configuration file and dataset +@dataclass(init=False) +class StableDiffusionLoraArguments(TrainingArgs): + prompt: str = field( + default='dog', metadata={ + 'help': 'The pipeline prompt.', + }) + + lora_rank: int = field( + default=4, + metadata={ + 'help': 'The rank size of lora intermediate linear.', + }) + + torch_type: str = field( + default='float32', + metadata={ + 'help': ' The torch type, default is float32.', + }) + + +training_args = StableDiffusionLoraArguments( + task='text-to-image-synthesis').parse_cli() +config, args = training_args.to_config() + +if os.path.exists(args.train_dataset_name): + # Load local dataset + train_dataset = MsDataset.load(args.train_dataset_name) + validation_dataset = MsDataset.load(args.train_dataset_name) +else: + # Load online dataset + train_dataset = MsDataset.load( + args.train_dataset_name, + split='train', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + validation_dataset = MsDataset.load( + args.train_dataset_name, + split='validation', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + cfg.train.lr_scheduler = { + 'type': 'LambdaLR', + 'lr_lambda': lambda _: 1, + 'last_epoch': -1 + } + return cfg + + +# build model +model_dir = snapshot_download(training_args.model) +model = Model.from_pretrained( + training_args.model, + revision=args.model_revision, + torch_type=torch.float16 + if args.torch_type == 'float16' else torch.float32) + +# build trainer and training +kwargs = dict( + model=model, + cfg_file=os.path.join(model_dir, 'configuration.json'), + work_dir=training_args.work_dir, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + lora_rank=args.lora_rank, + torch_type=torch.float16 + if args.torch_type == 'float16' else torch.float32, + cfg_modify_fn=cfg_modify_fn) + +trainer = build_trainer(name=Trainers.lora_diffusion, default_args=kwargs) +trainer.train() + +# pipeline after training and save result +pipe = pipeline( + task=Tasks.text_to_image_synthesis, + model=training_args.model, + lora_dir=training_args.work_dir + '/output', + model_revision=args.model_revision) + +output = pipe({'text': args.prompt}) +# visualize the result on ipynb and save it +output +cv2.imwrite('./lora_result.png', output['output_imgs'][0]) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora/run_train_lora.sh b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora/run_train_lora.sh new file mode 100644 index 0000000000000000000000000000000000000000..82e31aadb0a7cfbf69c679784b40a6d0d3fcc58d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora/run_train_lora.sh @@ -0,0 +1,15 @@ +PYTHONPATH=. torchrun examples/pytorch/stable_diffusion/lora/finetune_stable_diffusion_lora.py \ + --model 'AI-ModelScope/stable-diffusion-v2-1' \ + --model_revision 'v1.0.9' \ + --prompt "a dog" \ + --work_dir './tmp/lora_diffusion' \ + --train_dataset_name 'buptwq/lora-stable-diffusion-finetune' \ + --max_epochs 100 \ + --lora_rank 16 \ + --save_ckpt_strategy 'by_epoch' \ + --logging_interval 1 \ + --train.dataloader.workers_per_gpu 0 \ + --evaluation.dataloader.workers_per_gpu 0 \ + --train.optimizer.lr 1e-4 \ + --torch_type 'float16' \ + --use_model_config true diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora_xl/finetune_stable_diffusion_xl_lora.py b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora_xl/finetune_stable_diffusion_xl_lora.py new file mode 100644 index 0000000000000000000000000000000000000000..42facfec3fdc01dbb5bf23ec046b029d0042a550 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora_xl/finetune_stable_diffusion_xl_lora.py @@ -0,0 +1,85 @@ +import os +from dataclasses import dataclass, field + +import cv2 + +from modelscope.metainfo import Trainers +from modelscope.msdatasets import MsDataset +from modelscope.pipelines import pipeline +from modelscope.trainers import build_trainer +from modelscope.trainers.training_args import TrainingArgs +from modelscope.utils.constant import DownloadMode, Tasks + + +# Load configuration file and dataset +@dataclass(init=False) +class StableDiffusionXLLoraArguments(TrainingArgs): + prompt: str = field( + default='dog', metadata={ + 'help': 'The pipeline prompt.', + }) + + lora_rank: int = field( + default=16, + metadata={ + 'help': 'The rank size of lora intermediate linear.', + }) + + +training_args = StableDiffusionXLLoraArguments( + task='text-to-image-synthesis').parse_cli() +config, args = training_args.to_config() + +if os.path.exists(args.train_dataset_name): + # Load local dataset + train_dataset = MsDataset.load(args.train_dataset_name) + validation_dataset = MsDataset.load(args.train_dataset_name) +else: + # Load online dataset + train_dataset = MsDataset.load( + args.train_dataset_name, + split='train', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + validation_dataset = MsDataset.load( + args.train_dataset_name, + split='validation', + download_mode=DownloadMode.FORCE_REDOWNLOAD) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + cfg.train.lr_scheduler = { + 'type': 'LambdaLR', + 'lr_lambda': lambda _: 1, + 'last_epoch': -1 + } + return cfg + + +kwargs = dict( + model=training_args.model, + model_revision=args.model_revision, + work_dir=training_args.work_dir, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + lora_rank=args.lora_rank, + cfg_modify_fn=cfg_modify_fn) + +# build trainer and training +trainer = build_trainer(name=Trainers.lora_diffusion_xl, default_args=kwargs) +trainer.train() + +# pipeline after training and save result +pipe = pipeline( + task=Tasks.text_to_image_synthesis, + model=training_args.model, + lora_dir=training_args.work_dir + '/output', + model_revision=args.model_revision) + +output = pipe({'text': args.prompt}) +# visualize the result on ipynb and save it +output +cv2.imwrite('./lora_xl_result.png', output['output_imgs'][0]) diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora_xl/run_train_xl_lora.sh b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora_xl/run_train_xl_lora.sh new file mode 100644 index 0000000000000000000000000000000000000000..fc7704a490009b62388a26f30851cc81d938ce3b --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/lora_xl/run_train_xl_lora.sh @@ -0,0 +1,14 @@ +PYTHONPATH=. torchrun examples/pytorch/stable_diffusion/lora_xl/finetune_stable_diffusion_xl_lora.py \ + --model 'AI-ModelScope/stable-diffusion-xl-base-1.0' \ + --model_revision 'v1.0.2' \ + --prompt "a dog" \ + --work_dir './tmp/lora_diffusion_xl' \ + --train_dataset_name 'buptwq/lora-stable-diffusion-finetune' \ + --max_epochs 100 \ + --lora_rank 16 \ + --save_ckpt_strategy 'by_epoch' \ + --logging_interval 1 \ + --train.dataloader.workers_per_gpu 0 \ + --evaluation.dataloader.workers_per_gpu 0 \ + --train.optimizer.lr 1e-4 \ + --use_model_config true diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/tutorial.ipynb b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/tutorial.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..941b4e7680e6f6df20d912ca2b3016cb181e91bc --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/stable_diffusion/tutorial.ipynb @@ -0,0 +1,83 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Modelscope微调Stable Diffusion教程\n", + "## 原理讲解\n", + "\n", + "从头训练stable diffusion需要数十万美元和一个月以上的时间,巨额的算力和时间成本让普通人难以承受。因此最理想的手段是利用开源的stable diffusion模型,通过微调手段定制化属于自己的模型。近年涌现出很多有效的微调stable diffusion手段,如[Textual Inversion](https://arxiv.org/abs/2208.01618)、[Dreambooth](https://arxiv.org/pdf/2208.12242.pdf)、[Lora](https://arxiv.org/abs/2106.09685)、[Custom Diffusion](https://arxiv.org/pdf/2302.05543.pdf)等,Modelscope目前已经支持了Dreambooth和Lora两种方法。\n", + "\n", + "### Dreambooth\n", + "如果我们直接使用几张图片微调Stable Diffusion模型,很容易陷入“过拟合”的状态,通常的表现为模型生成的结果同质化且损失了泛化能力。除此之外,还容易遇到语言漂移的问题,严重影响了模型性能。Dreambooth提出了重建损失和特定类别先验保留损失相结合的方法来解决这一问题。\n", + "\n", + "### Lora\n", + "Lora的全称是Low-Rank Adaptation,是一种低阶自适应技术。这项技术起源于微调大型语言模型,在stable diffusion上也能取得非常好的效果。因为大模型是一般是过参数化的,它们有更小的内在维度,Lora模型主要依赖于这个低的内在维度去做任务适配。通过低秩分解(先降维再升维)来模拟参数的改变量,从而以极小的参数量来实现大模型的间接训练。\n", + "\n", + "如下图所示,Lora在原先的模型层中并行插入了可训练的排序分解矩阵层,这个矩阵层是由一个降维矩阵A和一个升维矩阵B组成的。降维矩阵A采用高斯分布初始化,升维矩阵B初始化为全0,保证训练开始时旁路为0矩阵。在训练的时候原模型固定,只训练降维矩阵A和升维矩阵B;在推理的时候,将矩阵层加到原参数上。大量实验表明,对于stable diffusion我们用Lora微调Unet网络注意力层可以取得良好的效果。\n", + "\n", + "## 动手实践\n", + "\n", + "首先我们需要下载代码和安装环境。" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "git clone https://github.com/modelscope/modelscope.git\n", + "cd modelscope" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "然后我们执行脚本,开始dreambooth和lora的训练和推理。" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "bash examples/pytorch/stable_diffusion/dreambooth/run_train_dreambooth.sh" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "vscode": { + "languageId": "plaintext" + } + }, + "outputs": [], + "source": [ + "bash examples/pytorch/stable_diffusion/lora/run_train_lora.sh" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/text_classification/finetune_text_classification.py b/AI/modelscope/1.10.0/8/examples/pytorch/text_classification/finetune_text_classification.py new file mode 100644 index 0000000000000000000000000000000000000000..111243f6626cde31fb1eba3ac8df47cb088c62f6 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/text_classification/finetune_text_classification.py @@ -0,0 +1,110 @@ +import os +from dataclasses import dataclass, field + +from modelscope import (EpochBasedTrainer, MsDataset, TrainingArgs, + build_dataset_from_file) +from modelscope.trainers import build_trainer + + +def set_labels(labels): + if isinstance(labels, str): + label_list = labels.split(',') + else: + unique_labels = set(labels) + label_list = list(unique_labels) + label_list.sort() + label_list = list( + map(lambda x: x if isinstance(x, str) else str(x), label_list)) + return {label: id for id, label in enumerate(label_list)} + + +@dataclass(init=False) +class TextClassificationArguments(TrainingArgs): + + first_sequence: str = field( + default=None, + metadata={ + 'help': 'The first sequence key of preprocessor', + 'cfg_node': 'preprocessor.first_sequence' + }) + + second_sequence: str = field( + default=None, + metadata={ + 'help': 'The second sequence key of preprocessor', + 'cfg_node': 'preprocessor.second_sequence' + }) + + label: str = field( + default=None, + metadata={ + 'help': 'The label key of preprocessor', + 'cfg_node': 'preprocessor.label' + }) + + labels: str = field( + default=None, + metadata={ + 'help': 'The labels of the dataset', + 'cfg_node': 'preprocessor.label2id', + 'cfg_setter': set_labels, + }) + + preprocessor: str = field( + default=None, + metadata={ + 'help': 'The preprocessor type', + 'cfg_node': 'preprocessor.type' + }) + + +training_args = TextClassificationArguments().parse_cli() +config, args = training_args.to_config() + +print(config, args) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + if training_args.labels is None: + labels = train_dataset[training_args.label] + validation_dataset[ + training_args.label] + cfg.merge_from_dict({'preprocessor.label2id': set_labels(labels)}) + cfg.model['num_labels'] = len(cfg.preprocessor.label2id) + if cfg.evaluation.period.eval_strategy == 'by_epoch': + cfg.evaluation.period.by_epoch = True + if cfg.train.lr_scheduler.type == 'LinearLR': + cfg.train.lr_scheduler['total_iters'] = \ + int(len(train_dataset) / cfg.train.dataloader.batch_size_per_gpu) * cfg.train.max_epochs + return cfg + + +if args.dataset_json_file is None: + train_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split=args.train_split, + namespace=args.train_dataset_namespace) + validation_dataset = MsDataset.load( + args.val_dataset_name, + subset_name=args.val_subset_name, + split=args.val_split, + namespace=args.val_dataset_namespace) +else: + train_dataset, validation_dataset = build_dataset_from_file( + args.dataset_json_file) + +kwargs = dict( + model=args.model, + model_revision=args.model_revision, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + seed=args.seed, + cfg_modify_fn=cfg_modify_fn) + +os.environ['LOCAL_RANK'] = str(args.local_rank) +trainer: EpochBasedTrainer = build_trainer(name='trainer', default_args=kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/text_classification/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/text_classification/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..e05c71bcf8e217d060fb5d678b7860ee2519ace9 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/text_classification/run_train.sh @@ -0,0 +1,23 @@ +PYTHONPATH=. python examples/pytorch/text_classification/finetune_text_classification.py \ + --task 'text-classification' \ + --model 'damo/nlp_structbert_backbone_base_std' \ + --train_dataset_name 'clue' \ + --val_dataset_name 'clue' \ + --train_subset_name 'tnews' \ + --val_subset_name 'tnews' \ + --train_split 'train' \ + --val_split 'validation' \ + --first_sequence 'sentence' \ + --label label \ + --preprocessor 'sen-cls-tokenizer' \ + --use_model_config True \ + --max_epochs 1 \ + --per_device_train_batch_size 16 \ + --per_device_eval_batch_size 16 \ + --eval_interval 100 \ + --eval_strategy by_step \ + --work_dir './tmp' \ + --train_data_worker 0 \ + --eval_data_worker 0 \ + --lr 1e-5 \ + --eval_metrics 'seq-cls-metric' \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/finetune_text_generation.py b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/finetune_text_generation.py new file mode 100644 index 0000000000000000000000000000000000000000..588d83f3b1b2611ad5db021f9aa5c479a5b0977b --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/finetune_text_generation.py @@ -0,0 +1,124 @@ +from dataclasses import dataclass, field + +from modelscope import (EpochBasedTrainer, MsDataset, TrainingArgs, + build_dataset_from_file) +from modelscope.metainfo import Trainers +from modelscope.trainers import build_trainer + + +@dataclass(init=False) +class TextGenerationArguments(TrainingArgs): + + trainer: str = field( + default=Trainers.default, metadata={ + 'help': 'The trainer used', + }) + + work_dir: str = field( + default='./tmp', + metadata={ + 'help': 'The working path for saving checkpoint', + }) + + src_txt: str = field( + default=None, + metadata={ + 'help': 'The source text key of preprocessor', + 'cfg_node': 'preprocessor.src_txt' + }) + + tgt_txt: str = field( + default=None, + metadata={ + 'help': 'The target text key of preprocessor', + 'cfg_node': 'preprocessor.tgt_txt' + }) + + preprocessor: str = field( + default=None, + metadata={ + 'help': 'The preprocessor type', + 'cfg_node': 'preprocessor.type' + }) + + lr_scheduler: str = field( + default=None, + metadata={ + 'help': 'The lr scheduler type', + 'cfg_node': 'train.lr_scheduler.type' + }) + + world_size: int = field( + default=None, + metadata={ + 'help': 'The parallel world size', + 'cfg_node': 'megatron.world_size' + }) + + tensor_model_parallel_size: int = field( + default=None, + metadata={ + 'help': 'The tensor model parallel size', + 'cfg_node': 'megatron.tensor_model_parallel_size' + }) + + use_megatron: bool = field( + default=None, metadata={ + 'help': 'Whether to use MegatronHook', + }) + + +def noam_lambda(current_step: int): + current_step += 1 + return min(current_step**(-0.5), current_step * 100**(-1.5)) + + +config, args = TextGenerationArguments().parse_cli().to_config() +print(config, args) + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + if cfg.train.lr_scheduler.type == 'noam': + cfg.train.lr_scheduler = { + 'type': 'LambdaLR', + 'lr_lambda': noam_lambda, + 'options': { + 'by_epoch': False + } + } + if args.use_megatron: + cfg.train.hooks.append({'type': 'MegatronHook'}) + return cfg + + +if args.dataset_json_file is None: + train_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split=args.train_split, + namespace=args.train_dataset_namespace) + validation_dataset = MsDataset.load( + args.val_dataset_name, + subset_name=args.val_subset_name, + split=args.val_split, + namespace=args.val_dataset_namespace) +else: + train_dataset, validation_dataset = build_dataset_from_file( + args.dataset_json_file) + +kwargs = dict( + model=args.model, + model_revision=args.model_revision, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + seed=args.seed, + work_dir=args.work_dir, + cfg_modify_fn=cfg_modify_fn) + +trainer: EpochBasedTrainer = build_trainer( + name=args.trainer, default_args=kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_gpt3.sh b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_gpt3.sh new file mode 100644 index 0000000000000000000000000000000000000000..785219516e290beef332265c7ddc1a52d0fd3409 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_gpt3.sh @@ -0,0 +1,27 @@ +DATA_PARALLEL_SIZE=2 +TENSOR_MODEL_PARALLEL_SIZE=2 + +WORLD_SIZE=$(($DATA_PARALLEL_SIZE * $TENSOR_MODEL_PARALLEL_SIZE)) + + +PYTHONPATH=. torchrun --nproc_per_node $WORLD_SIZE examples/pytorch/text_generation/finetune_text_generation.py \ + --trainer 'nlp-gpt3-trainer' \ + --work_dir './tmp' \ + --model 'damo/nlp_gpt3_text-generation_1.3B' \ + --train_dataset_name 'chinese-poetry-collection' \ + --val_dataset_name 'chinese-poetry-collection' \ + --train_split 'train' \ + --val_split 'test' \ + --preprocessor 'text-gen-jieba-tokenizer' \ + --src_txt 'text1' \ + --tgt_txt 'text2' \ + --max_epochs 3 \ + --per_device_train_batch_size 16 \ + --lr 3e-4 \ + --lr_scheduler 'noam' \ + --eval_metrics 'ppl' \ + --world_size $WORLD_SIZE \ + --tensor_model_parallel_size $TENSOR_MODEL_PARALLEL_SIZE \ + --use_megatron true \ + --use_model_config true \ + # --train_dataset_name 'DuReader_robust-QG' \ # input&output diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_mt5.sh b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_mt5.sh new file mode 100644 index 0000000000000000000000000000000000000000..b2d0bbf1ad3a3391cf4b00decf10c0314f2e287c --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_mt5.sh @@ -0,0 +1,16 @@ +PYTHONPATH=. torchrun examples/pytorch/text_generation/finetune_text_generation.py \ + --trainer 'text-generation-trainer' \ + --work_dir './tmp' \ + --task 'text2text-generation' \ + --model 'damo/nlp_mt5_zero-shot-augment_chinese-base' \ + --train_dataset_name 'DuReader_robust-QG' \ + --val_dataset_name 'DuReader_robust-QG' \ + --train_split 'train' \ + --val_split 'validation' \ + --src_txt 'text1' \ + --tgt_txt 'text2' \ + --max_epochs 1 \ + --use_model_config True \ + --per_device_train_batch_size 8 \ + --lr 1e-3 \ + --lr_scheduler 'noam' \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_palm.sh b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_palm.sh new file mode 100644 index 0000000000000000000000000000000000000000..061538125f2fc5c84f068953cd885d2a326bbe6d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/text_generation/run_train_palm.sh @@ -0,0 +1,15 @@ +PYTHONPATH=. torchrun examples/pytorch/text_generation/finetune_text_generation.py \ + --trainer 'text-generation-trainer' \ + --work_dir './tmp' \ + --model 'damo/nlp_palm2.0_pretrained_chinese-base' \ + --train_dataset_name 'DuReader_robust-QG' \ + --val_dataset_name 'DuReader_robust-QG' \ + --train_split 'train' \ + --val_split 'validation' \ + --src_txt 'text1' \ + --tgt_txt 'text2' \ + --max_epochs 1 \ + --use_model_config True \ + --per_device_train_batch_size 8 \ + --lr 1e-3 \ + --lr_scheduler 'noam' \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/finetune_token_classification.py b/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/finetune_token_classification.py new file mode 100644 index 0000000000000000000000000000000000000000..3f9de79177e9673ec118655373e5c203e79e1dad --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/finetune_token_classification.py @@ -0,0 +1,129 @@ +from dataclasses import dataclass, field + +from modelscope import (EpochBasedTrainer, MsDataset, TrainingArgs, + build_dataset_from_file) + + +@dataclass(init=False) +class TokenClassificationArguments(TrainingArgs): + trainer: str = field( + default=None, metadata={ + 'help': 'The trainer used', + }) + + work_dir: str = field( + default='./tmp', + metadata={ + 'help': 'The working path for saving checkpoint', + }) + + preprocessor: str = field( + default=None, + metadata={ + 'help': 'The preprocessor type', + 'cfg_node': 'preprocessor.type' + }) + + preprocessor_padding: str = field( + default=None, + metadata={ + 'help': 'The preprocessor padding', + 'cfg_node': 'preprocessor.padding' + }) + + mode: str = field( + default='inference', + metadata={ + 'help': 'The preprocessor padding', + 'cfg_node': 'preprocessor.mode' + }) + + first_sequence: str = field( + default=None, + metadata={ + 'cfg_node': 'preprocessor.first_sequence', + 'help': 'The parameters for train dataset', + }) + + label: str = field( + default=None, + metadata={ + 'cfg_node': 'preprocessor.label', + 'help': 'The parameters for train dataset', + }) + + sequence_length: int = field( + default=128, + metadata={ + 'cfg_node': 'preprocessor.sequence_length', + 'help': 'The parameters for train dataset', + }) + + +training_args = TokenClassificationArguments().parse_cli() +config, args = training_args.to_config() +print(args) + + +def get_label_list(labels): + unique_labels = set() + for label in labels: + unique_labels = unique_labels | set(label) + label_list = list(unique_labels) + label_list.sort() + return label_list + + +def cfg_modify_fn(cfg): + if args.use_model_config: + cfg.merge_from_dict(config) + else: + cfg = config + labels = train_dataset[training_args.label] + validation_dataset[ + training_args.label] + label_enumerate_values = get_label_list(labels) + cfg.merge_from_dict({ + 'preprocessor.label2id': + {label: id + for id, label in enumerate(label_enumerate_values)} + }) + cfg.merge_from_dict({'model.num_labels': len(label_enumerate_values)}) + cfg.merge_from_dict({'preprocessor.use_fast': True}) + cfg.merge_from_dict({ + 'evaluation.metrics': { + 'type': 'token-cls-metric', + 'label2id': + {label: id + for id, label in enumerate(label_enumerate_values)} + } + }) + if cfg.train.lr_scheduler.type == 'LinearLR': + cfg.train.lr_scheduler['total_iters'] = \ + int(len(train_dataset) / cfg.train.dataloader.batch_size_per_gpu) * cfg.train.max_epochs + return cfg + + +if args.dataset_json_file is None: + train_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split='train', + namespace=args.train_dataset_namespace)['train'] + validation_dataset = MsDataset.load( + args.train_dataset_name, + subset_name=args.train_subset_name, + split='validation', + namespace=args.train_dataset_namespace)['validation'] +else: + train_dataset, validation_dataset = build_dataset_from_file( + args.dataset_json_file) + +kwargs = dict( + model=args.model, + train_dataset=train_dataset, + eval_dataset=validation_dataset, + work_dir=args.work_dir, + cfg_modify_fn=cfg_modify_fn) + +trainer = EpochBasedTrainer(**kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/run_train_mgeo.sh b/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/run_train_mgeo.sh new file mode 100644 index 0000000000000000000000000000000000000000..1e384ec518dc2e87711be0e825c368f7ca4dc759 --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/run_train_mgeo.sh @@ -0,0 +1,22 @@ +PYTHONPATH=. python examples/pytorch/token_classification/finetune_token_classification.py \ + --task 'token-classification' \ + --trainer 'nlp-base-trainer' \ + --work_dir './tmp' \ + --model 'damo/mgeo_backbone_chinese_base' \ + --train_dataset_name 'GeoGLUE' \ + --train_subset_name 'GeoETA' \ + --train_dataset_namespace 'damo' \ + --first_sequence 'tokens' \ + --eval_strategy by_step \ + --eval_interval 10 \ + --label 'ner_tags' \ + --sequence_length 128 \ + --preprocessor 'token-cls-tokenizer' \ + --preprocessor_padding 'max_length' \ + --max_epochs 1 \ + --mode 'inference' \ + --use_model_config True \ + --per_device_train_batch_size 32 \ + --train_data_worker 0 \ + --eval_data_worker 0 \ + --lr 3e-5 \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/run_train_structbert.sh b/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/run_train_structbert.sh new file mode 100644 index 0000000000000000000000000000000000000000..a44c4519d36191f0cb0ab900e7fdb748c60af99d --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/token_classification/run_train_structbert.sh @@ -0,0 +1,22 @@ +PYTHONPATH=. python examples/pytorch/token_classification/finetune_token_classification.py \ + --task 'token-classification' \ + --trainer 'nlp-base-trainer' \ + --work_dir './tmp' \ + --model 'damo/nlp_structbert_backbone_base_std' \ + --train_dataset_name 'GeoGLUE' \ + --train_subset_name 'GeoETA' \ + --train_dataset_namespace 'damo' \ + --first_sequence 'tokens' \ + --eval_strategy by_step \ + --eval_interval 20 \ + --label 'ner_tags' \ + --sequence_length 128 \ + --preprocessor 'token-cls-tokenizer' \ + --preprocessor_padding 'max_length' \ + --max_epochs 2 \ + --mode 'inference' \ + --use_model_config True \ + --per_device_train_batch_size 32 \ + --train_data_worker 0 \ + --eval_data_worker 0 \ + --lr 3e-5 \ diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/transformers/finetune_transformers_model.py b/AI/modelscope/1.10.0/8/examples/pytorch/transformers/finetune_transformers_model.py new file mode 100644 index 0000000000000000000000000000000000000000..5110f75178967f785f939d311ba6087fcf6d88ef --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/transformers/finetune_transformers_model.py @@ -0,0 +1,75 @@ +import os +from dataclasses import dataclass, field + +from datasets import load_dataset +from transformers import (BertForSequenceClassification, BertTokenizerFast, + default_data_collator) + +from modelscope import TrainingArgs +from modelscope.trainers import EpochBasedTrainer, build_trainer + + +@dataclass(init=False) +class TransformersArguments(TrainingArgs): + + num_labels: int = field( + default=None, metadata={ + 'help': 'The number of labels', + }) + + sentence: str = field( + default=None, metadata={ + 'help': 'The sentence key', + }) + + label: str = field( + default=None, metadata={ + 'help': 'The label key', + }) + + +training_args = TransformersArguments( + task='text-classification', eval_metrics='seq-cls-metric').parse_cli() +config, args = training_args.to_config() + +print(config, args) + +train_dataset = load_dataset( + args.train_dataset_name, args.train_subset_name, split=args.train_split) +val_dataset = load_dataset( + args.val_dataset_name, args.val_subset_name, split=args.val_split) + +model = BertForSequenceClassification.from_pretrained( + args.model, num_labels=args.num_labels) +tokenizer = BertTokenizerFast.from_pretrained(args.model) + + +def tokenize_sentence(row): + return tokenizer( + row[training_args.sentence], padding='max_length', max_length=128) + + +# Extra columns, Rename columns +train_dataset = train_dataset.map(tokenize_sentence) +val_dataset = val_dataset.map(tokenize_sentence) +if training_args.label != 'labels': + train_dataset = train_dataset.rename_columns( + {training_args.label: 'labels'}) + val_dataset = val_dataset.rename_columns({training_args.label: 'labels'}) + +cfg_file = os.path.join(args.work_dir or './', 'configuration.json') +config.dump(cfg_file) + +kwargs = dict( + model=model, + cfg_file=cfg_file, + # data_collator + data_collator=default_data_collator, + train_dataset=train_dataset, + eval_dataset=val_dataset, + remove_unused_data=True, + seed=args.seed) + +os.environ['LOCAL_RANK'] = str(args.local_rank) +trainer: EpochBasedTrainer = build_trainer(name='trainer', default_args=kwargs) +trainer.train() diff --git a/AI/modelscope/1.10.0/8/examples/pytorch/transformers/run_train.sh b/AI/modelscope/1.10.0/8/examples/pytorch/transformers/run_train.sh new file mode 100644 index 0000000000000000000000000000000000000000..94e5ef75298f10124da8ba7b9cfc762af447542b --- /dev/null +++ b/AI/modelscope/1.10.0/8/examples/pytorch/transformers/run_train.sh @@ -0,0 +1,14 @@ +PYTHONPATH=. python examples/pytorch/transformers/finetune_transformers_model.py \ + --model bert-base-uncased \ + --num_labels 15 \ + --train_dataset_name clue \ + --train_subset_name tnews \ + --train_split train \ + --val_dataset_name clue \ + --val_subset_name tnews \ + --train_split train \ + --val_split validation \ + --sentence sentence \ + --label label \ + --eval_strategy by_step \ + --eval_interval 100 diff --git a/AI/modelscope/1.10.0/8/git b/AI/modelscope/1.10.0/8/git new file mode 100755 index 0000000000000000000000000000000000000000..21c9c197ec832e3149feb9c5c905d8656554cd84 --- /dev/null +++ b/AI/modelscope/1.10.0/8/git @@ -0,0 +1,16 @@ +#!/bin/bash + +REALGIT=/usr/bin/git + +RETRIES=50 +DELAY=1 +COUNT=1 +while [ $COUNT -lt $RETRIES ]; do + $REALGIT $* + if [ $? -eq 0 ]; then + RETRIES=0 + break + fi + let COUNT=$COUNT+1 + sleep $DELAY +done \ No newline at end of file diff --git a/AI/modelscope/1.10.0/8/requirements/audio.txt b/AI/modelscope/1.10.0/8/requirements/audio.txt new file mode 100644 index 0000000000000000000000000000000000000000..331c334b2ed7c4438098c263e4459504f7f79bc4 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/audio.txt @@ -0,0 +1,4 @@ +-r audio/audio_asr.txt +-r audio/audio_kws.txt +-r audio/audio_signal.txt +-r audio/audio_tts.txt diff --git a/AI/modelscope/1.10.0/8/requirements/audio/audio_asr.txt b/AI/modelscope/1.10.0/8/requirements/audio/audio_asr.txt new file mode 100644 index 0000000000000000000000000000000000000000..f7b1eaea97d01577617b4fccf92a3f86ef07c022 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/audio/audio_asr.txt @@ -0,0 +1 @@ +funasr>=0.6.5 diff --git a/AI/modelscope/1.10.0/8/requirements/audio/audio_kws.txt b/AI/modelscope/1.10.0/8/requirements/audio/audio_kws.txt new file mode 100644 index 0000000000000000000000000000000000000000..276a0a2f598f64272132d20f210d55c589b7b8af --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/audio/audio_kws.txt @@ -0,0 +1,7 @@ +kaldiio +kwsbp>=0.0.6 +matplotlib +py_sound_connect>=0.1 +scipy +SoundFile>0.10 +tensorboardX diff --git a/AI/modelscope/1.10.0/8/requirements/audio/audio_signal.txt b/AI/modelscope/1.10.0/8/requirements/audio/audio_signal.txt new file mode 100644 index 0000000000000000000000000000000000000000..023fbbdf832d95ee1aaf21e9bd1b9b68c981e9fd --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/audio/audio_signal.txt @@ -0,0 +1,12 @@ +hdbscan +hyperpyyaml +librosa==0.9.2 +MinDAEC +mir_eval>=0.7 +rotary_embedding_torch>=0.1.5 +scipy +SoundFile>0.10 +speechbrain>=0.5.12 +torchaudio +tqdm +umap-learn diff --git a/AI/modelscope/1.10.0/8/requirements/audio/audio_tts.txt b/AI/modelscope/1.10.0/8/requirements/audio/audio_tts.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b33f02f5795be9232cfbdaa10f55981008e8789 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/audio/audio_tts.txt @@ -0,0 +1,28 @@ +bitstring +greenlet>=1.1.2 +inflect +jedi>=0.18.1 +kantts +librosa==0.9.2 +lxml +matplotlib +msgpack>=1.0.4 +parso>=0.8.3 +pexpect>=4.8.0 +pickleshare>=0.7.5 +prompt-toolkit>=3.0.30 +protobuf +ptflops +ptyprocess>=0.7.0 +pygments>=2.12.0 +pysptk>=0.1.15,<0.1.19 +pytorch_wavelets +PyWavelets>=1.0.0 +scikit-learn +sox +tensorboardx +tqdm +traitlets>=5.3.0 +ttsfrd>=0.1.2 +unidecode +wcwidth>=0.2.5 diff --git a/AI/modelscope/1.10.0/8/requirements/cv.txt b/AI/modelscope/1.10.0/8/requirements/cv.txt new file mode 100644 index 0000000000000000000000000000000000000000..ee9f5582021faa199b5b10b8d18502ba58d84b47 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/cv.txt @@ -0,0 +1,73 @@ +accelerate +albumentations>=1.0.3 +av>=9.2.0 +bmt_clipit>=1.0 +chumpy +clip>=1.0 +control_ldm +ddpm_guided_diffusion +diffusers +easydict +easyrobust +edit_distance +face_alignment>=1.3.5 +fairscale>=0.4.1 +fastai>=1.0.51 +ffmpeg>=1.4 +ffmpeg-python>=0.2.0 +ftfy +fvcore +healpy +imageio>=2.9.0 +imageio-ffmpeg>=0.4.2 +imgaug>=0.4.0 +kornia>=0.5.0 +lap +lmdb +lpips +ml_collections +mmcls>=0.21.0 +mmdet>=2.25.0,<=2.28.2 +# mmdet3d-1.0.0rc6 remove networkx and numba version restriction +mmdet3d==1.0.0a1 +mmsegmentation<=0.30.0 +moviepy>=1.0.3 +nerfacc==0.2.2 +networkx +numba +omegaconf +onnx +onnxruntime>=1.10 +onnxsim +open-clip-torch>=2.7.0 +opencv-python +paint_ldm +pandas +panopticapi +plyfile>=0.7.4 +psutil +pyclipper +PyMCubes +pytorch-lightning +regex +# <0.20.0 for compatible python3.7 python3.8 +scikit-image>=0.19.3,<0.20.0 +scikit-learn>=0.20.1 +shapely +shotdetect_scenedetect_lgss>=0.0.4 +smplx +tensorflow-estimator>=1.15.1 +tf_slim +thop +timm>=0.4.9 +torch-scatter +torchmetrics>=0.6.2 +torchsummary>=1.5.1 +torchvision +tqdm +transformers>=4.26.0 +trimesh +ujson +utils +videofeatures_clipit>=1.0 +yacs diff --git a/AI/modelscope/1.10.0/8/requirements/docs.txt b/AI/modelscope/1.10.0/8/requirements/docs.txt new file mode 100644 index 0000000000000000000000000000000000000000..3b3538351c05cbe6b973e26d3bf52bc14871a3a2 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/docs.txt @@ -0,0 +1,7 @@ +docutils>=0.16.0 +myst_parser +recommonmark +sphinx>=5.3.0 +sphinx-book-theme +sphinx-copybutton +sphinx_markdown_tables diff --git a/AI/modelscope/1.10.0/8/requirements/framework.txt b/AI/modelscope/1.10.0/8/requirements/framework.txt new file mode 100644 index 0000000000000000000000000000000000000000..83e69a004824744981f1eff7e5641a26b6037b39 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/framework.txt @@ -0,0 +1,22 @@ +addict +attrs +datasets>=2.8.0,<=2.13.0 +einops +filelock>=3.3.0 +gast>=0.2.2 +numpy +oss2 +pandas +Pillow>=6.2.0 +# pyarrow 9.0.0 introduced event_loop core dump +pyarrow>=6.0.0,!=9.0.0 +python-dateutil>=2.1 +pyyaml +requests>=2.25 +scipy +setuptools +simplejson>=3.3.0 +sortedcontainers>=1.5.9 +tqdm>=4.64.0 +urllib3>=1.26 +yapf diff --git a/AI/modelscope/1.10.0/8/requirements/multi-modal.txt b/AI/modelscope/1.10.0/8/requirements/multi-modal.txt new file mode 100644 index 0000000000000000000000000000000000000000..59415bb092566d0830df7c66b0ef348b9e15b286 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/multi-modal.txt @@ -0,0 +1,29 @@ +accelerate +cloudpickle +decord>=0.6.0 +diffusers>=0.19.0 +fairseq +ftfy>=6.0.3 +librosa==0.9.2 +opencv-python +pycocoevalcap>=1.2 +pycocotools>=2.0.4 +pydot +# compatible with taming-transformers-rom1504 +pytorch_lightning<=1.7.7 +rapidfuzz +# rough-score was just recently updated from 0.0.4 to 0.0.7 +# which introduced compatability issues that are being investigated +rouge_score<=0.0.4 +sacrebleu +safetensors +# scikit-video +soundfile +taming-transformers-rom1504 +timm +tokenizers +torchvision +transformers>=4.27.1 +# triton==2.0.0.dev20221120 +unicodedata2 +zhconv diff --git a/AI/modelscope/1.10.0/8/requirements/nlp.txt b/AI/modelscope/1.10.0/8/requirements/nlp.txt new file mode 100644 index 0000000000000000000000000000000000000000..f69f869b83edf1c977f6a79d06657cbd23cfcbe8 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/nlp.txt @@ -0,0 +1,26 @@ +boto3 +embeddings +filelock +ftfy +jieba>=0.42.1 +matplotlib +megatron_util +nltk +pandas +# protobuf version beyond 3.20.0 is not compatible with TensorFlow 1.x, therefore is discouraged. +protobuf>=3.19.0,<3.21.0 +pythainlp +pyvi +regex +rouge +sacremoses>=0.0.41 +scikit_learn +sentencepiece +seqeval +spacy>=2.3.5 +stanza +subword_nmt>=0.3.8 +termcolor +tokenizers +transformers>=4.12.0 +zhconv diff --git a/AI/modelscope/1.10.0/8/requirements/science.txt b/AI/modelscope/1.10.0/8/requirements/science.txt new file mode 100644 index 0000000000000000000000000000000000000000..c30ff644539189e83e82dd322773cb2a21a14eed --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/science.txt @@ -0,0 +1,8 @@ +biopython +iopath +ipdb +lmdb +ml_collections +scipy +tensorboardX +tokenizers diff --git a/AI/modelscope/1.10.0/8/requirements/tensorflow1x.txt b/AI/modelscope/1.10.0/8/requirements/tensorflow1x.txt new file mode 100644 index 0000000000000000000000000000000000000000..5d68065203d53e5d5d921d6f01df4686e1b3d487 --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/tensorflow1x.txt @@ -0,0 +1 @@ +numpy<1.20.0 diff --git a/AI/modelscope/1.10.0/8/requirements/tests.txt b/AI/modelscope/1.10.0/8/requirements/tests.txt new file mode 100644 index 0000000000000000000000000000000000000000..5ec4df7e8f107223a5f25e90eddefddf0e15f7db --- /dev/null +++ b/AI/modelscope/1.10.0/8/requirements/tests.txt @@ -0,0 +1,5 @@ +expecttest +flake8 +isort>=4.3.21 +pre-commit +yapf==0.30.0 # use fix version to ensure consistent auto-styling diff --git a/AI/modelscope/buildspec.yml b/AI/modelscope/buildspec.yml new file mode 100644 index 0000000000000000000000000000000000000000..38bb6af32722032de0e24a865496c9eb6dbf22f3 --- /dev/null +++ b/AI/modelscope/buildspec.yml @@ -0,0 +1,39 @@ +name: &NAME +version: &VERSION +image_type: &IMAGE_TYPE +baseos_version: &BASEOS_VERSION + +# 定义镜像仓库信息 +repository_info: + dockerhub: &DOCKERHUB_PROD + acr: &ACR_PROD anolis-registry.cn-zhangjiakou.cr.aliyuncs.com/openanolis + +# 定义镜像测试信息 +t-one: + # 配置测试信息 workspace 和模版 + workspace: &WORKSPACE container_ci_test + project: &PROJECT default_container_ci_test + test_suite: &TEST_SUITE image-ci-test + # 执行测试 case, 多个用数组表示 + test_conf: &TEST_CONF group=ai_container + test_case: &TEST_CASE pytorch_ai_container + cloud_server_tag: &CLOUD_SERVER_TAG [anolis-container-ci-x86] + +# 构建镜像配置 +images: + # 分组名称,支持自定义 + BuildModelscopeDockerImage: + # 定义是否构建参数 + build: true + platform: [linux/amd64] + docker_file: + path: AI/modelscope/1.10.0/8/Dockerfile + variable: + - &GPU [(USE_GPU, "True")] + scene: + args: [*GPU] + tags: + - [1.10.0-an8] + registry: [*ACR_PROD] + # 测试配置 + test_config: [*WORKSPACE, *PROJECT, *TEST_SUITE, *TEST_CONF, *TEST_CASE, *CLOUD_SERVER_TAG[0], ''] diff --git a/AI/modelscope/version.yml b/AI/modelscope/version.yml new file mode 100644 index 0000000000000000000000000000000000000000..8b10d1de3ed95651c9db14b45077993c5f95e0c4 --- /dev/null +++ b/AI/modelscope/version.yml @@ -0,0 +1,14 @@ +# 版本关系依赖表,默认继承 version-base.yml 配置,可重写覆盖 +BaseDependency: ../version-base.yml +Dependency: + name: modelscope + # training 训练类, inference 推理类 + image_type: training + versions: + 1.10.0: + # 对 AI 框架版本对 python 版本的要求 + python_version: [] + # gpu 对 cuda 版本的要求 + cuda_version: [] + # 对 baseos 的要求,*AnolisOS8.6 表示 Anolis8.6 + baseos_version: [*AnolisOS8.8] \ No newline at end of file