diff --git a/.gitignore b/.gitignore index b1e5a0abbe9b668d8d555e011fc383f62159810c..c0b4a0a78aedc2af75b41a46845cc09994a5216a 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,6 @@ *.tar.gz .idea/ db.sqlite3 -migrations/ node_modules/ /logs/* /.vscode/ @@ -21,3 +20,4 @@ sysom_api/sysom/settings.py venv/ sysomRelease*/ sysom_api/conf/ssh-key +.env diff --git a/script/server/0_env/init.sh b/script/server/0_env/init.sh new file mode 100755 index 0000000000000000000000000000000000000000..489ad140516ae2c11d5beece8fd57004b2faf2e3 --- /dev/null +++ b/script/server/0_env/init.sh @@ -0,0 +1,94 @@ +#!/bin/bash +#****************************************************************# +# ScriptName: init sysom env +# Author: huangtuquan +#***************************************************************# + +ALIYUN_MIRROR="https://mirrors.aliyun.com/pypi/simple/" +SERVER_DIR="sysom_server" +SDK_DIR=$SERVER_DIR/sdk + +VIRTUALENV_HOME="${SERVER_HOME}/virtualenv" +TARGET_PATH="${SERVER_HOME}/target" + +if [ "$UID" -ne 0 ]; then + echo "Please run as root" + exit 1 +fi + +mkdir -p ${SERVER_HOME} + +check_selinux_status() +{ + ###check selinux rpm### + rpm -qa | grep selinux-policy + if [ $? -eq 0 ] + then + cat /etc/selinux/config | grep "SELINUX=disabled" + if [ $? -eq 0 ] + then + echo "selinux disable..." + else + echo "selinux enable, please set selinux disable" + exit 1 + fi + else + echo "selinux rpm package not install" + fi +} + +touch_virtualenv() { + mkdir -p ~/.pip + cp pip.conf ~/.pip/ + if [ -d ${VIRTUALENV_HOME} ]; then + echo "virtualenv exists, skip" + else + virtualenv-3 ${VIRTUALENV_HOME} + if [ "$?" = 0 ]; then + echo "INFO: create virtualenv success" + else + echo "ERROR: create virtualenv failed" + exit 1 + fi + fi + echo "INFO: activate virtualenv..." + source ${VIRTUALENV_HOME}/bin/activate || exit 1 +} + +check_requirements() { + echo "INFO: begin install requirements..." + + if ! [ -d ${SERVER_HOME}/logs/ ]; then + mkdir -p ${SERVER_HOME}/logs/ || exit 1 + fi + + local requirements_log="${SERVER_HOME}/logs/${APP_NAME}_requirements.log" + local requirements="requirements.txt" + touch "$requirements_log" || exit + ### atomic-0.7.3 need cffi, we show install cffi first### + pip install cffi + pip install -r ${requirements} -i "${ALIYUN_MIRROR}" |tee -a "${requirements_log}" || exit 1 + local pip_res=$? + if [ $pip_res -ne 0 ]; then + echo "ERROR: requirements not satisfied and auto install failed, please check ${requirements_log}" + exit 1 + fi +} + +install_sdk() { + pushd ${TARGET_PATH}/${SDK_DIR} + python setup_cec_base.py develop + python setup_cec_redis.py develop + python setup_channel_job.py develop + sudo rm -r *.egg-info build dist + popd +} + +deploy() { + check_selinux_status + touch_virtualenv + check_requirements + install_sdk +} + +deploy diff --git a/script/server/0_sysom_api/pip.conf b/script/server/0_env/pip.conf similarity index 100% rename from script/server/0_sysom_api/pip.conf rename to script/server/0_env/pip.conf diff --git a/script/server/0_env/requirements.txt b/script/server/0_env/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..742dbc3e2618272ee4641daa6500eac351ef1c24 --- /dev/null +++ b/script/server/0_env/requirements.txt @@ -0,0 +1,34 @@ +cffi==1.15.1 +aiofiles==0.8.0 +alembic==1.7.7 +anyio==3.6.2 +asyncer==0.0.2 +asyncssh==2.12.0 +atomic==0.7.3 +autopep8==2.0.0 +channels==3.0.4 +django==3.2.16 +django-apscheduler==0.6.2 +django-cors-headers==3.10.1 +django-filter==21.1 +djangorestframework==3.14.0 +drf-yasg==1.21.4 +daphne==3.0.2 +fastapi==0.83.0 +gevent==22.10.2 +gunicorn==20.1.0 +loguru==0.6.0 +pandas==1.1.5 +paramiko==2.12.0 +prompt-toolkit==3.0.32 +PyJWT==2.4.0 +PyMySQL==1.0.2 +pytest-runner==5.3.2 +python-multipart==0.0.5 +rich==12.6.0 +requests==2.27.1 +redis==4.3.4 +schedule==1.1.0 +uvicorn==0.16.0 +wheel==0.37.1 +xlwt==1.3.0 diff --git a/script/server/0_env/start.sh b/script/server/0_env/start.sh new file mode 100755 index 0000000000000000000000000000000000000000..cd74dc28c6ea843562c41cc04b129abb37f4393f --- /dev/null +++ b/script/server/0_env/start.sh @@ -0,0 +1,9 @@ +#!/bin/bash +start_app() { + systemctl start mariadb.service + systemctl start nginx.service + systemctl start redis.service + systemctl start supervisord.service +} + +start_app diff --git a/script/server/0_local_services/clear.sh b/script/server/0_local_services/clear.sh new file mode 100755 index 0000000000000000000000000000000000000000..34ddbd61029e1dba52b5d06a0436fab158948b54 --- /dev/null +++ b/script/server/0_local_services/clear.sh @@ -0,0 +1,12 @@ +#!/bin/bash +clear_db() { + systemctl start mariadb.service + mysql -uroot -e "drop database if exists sysom;" + mysql -uroot -e "drop database if exists grafana;" +} + +clear_app() { + clear_db +} + +clear_app diff --git a/script/server/0_local_services/init.sh b/script/server/0_local_services/init.sh new file mode 100755 index 0000000000000000000000000000000000000000..f5938f5bcc871f44ba857f02b2b4146c704f4394 --- /dev/null +++ b/script/server/0_local_services/init.sh @@ -0,0 +1,34 @@ +#!/bin/bash +#****************************************************************# +# ScriptName: start local service +# Author: huangtuquan +#***************************************************************# +setup_database() { + echo "INFO: begin create db..." + + systemctl restart mariadb.service + systemctl enable mariadb.service + mysql -uroot -e "create user if not exists 'sysom'@'%' identified by 'sysom_admin';" + mysql -uroot -e "grant usage on *.* to 'sysom'@'localhost' identified by 'sysom_admin'" + mysql -uroot -e "drop database if exists sysom;" + mysql -uroot -e "create database sysom character set utf8;" + mysql -uroot -e "create database grafana character set utf8;" + mysql -uroot -e "grant all privileges on sysom.* to 'sysom'@'%';" + mysql -uroot -e "grant all privileges on grafana.* to 'sysom'@'%';" + mysql -uroot -e "flush privileges;" +} + +start_app() { + systemctl enable nginx.service + systemctl enable redis.service + systemctl restart nginx.service + systemctl restart redis.service + systemctl start supervisord +} + +deploy() { + setup_database | tee -a ${SERVER_HOME}/logs/${APP_NAME}_setup_database.log 2>&1 + start_app +} + +deploy diff --git a/script/server/0_local_services/start.sh b/script/server/0_local_services/start.sh new file mode 100755 index 0000000000000000000000000000000000000000..a8d870b7d9a39d589db7fef0de78bf6c7f2ccf72 --- /dev/null +++ b/script/server/0_local_services/start.sh @@ -0,0 +1,8 @@ +#!/bin/bash +start_app() { + systemctl start mariadb.service + systemctl start nginx.service + systemctl start redis.service +} + +start_app diff --git a/script/server/0_local_services/stop.sh b/script/server/0_local_services/stop.sh new file mode 100755 index 0000000000000000000000000000000000000000..5ac5b924b8f1dae89da33f1525303d98126c2f87 --- /dev/null +++ b/script/server/0_local_services/stop.sh @@ -0,0 +1,8 @@ +#!/bin/bash +stop_app() { + systemctl stop nginx.service + systemctl stop redis.service + systemctl stop mariadb.service +} + +stop_app diff --git a/script/server/0_sysom_api/init.sh b/script/server/0_sysom_api/init.sh index 4d12cd5253af37ccd1416a8bc4f96e8b7affda47..9713b601c42593da37dba0a0024d8ed083f5debc 100755 --- a/script/server/0_sysom_api/init.sh +++ b/script/server/0_sysom_api/init.sh @@ -6,19 +6,11 @@ # Modify Date: 2021-11-16 00:02 # Function: deploy sysom #***************************************************************# - -ALIYUN_MIRROR="https://mirrors.aliyun.com/pypi/simple/" -APP_NAME="sysom" SERVER_DIR="sysom_server" -API_DIR=$SERVER_DIR/sysom_api -DIAGNOSIS_DIR=$SERVER_DIR/sysom_diagnosis -CHANNEL_DIR=$SERVER_DIR/sysom_channel -VMCORE_DIR=$SERVER_DIR/sysom_vmcore -SDK_DIR=$SERVER_DIR/sdk -WEB_DIR="sysom_web" - -VIRTUALENV_HOME="${SERVER_HOME}/virtualenv" -TARGET_PATH="${SERVER_HOME}/target" +API_DIR=${SERVER_DIR}/sysom_api +VIRTUALENV_HOME=${SERVER_HOME}/virtualenv +TARGET_PATH=${SERVER_HOME}/target +SERVICE_NAME=sysom-api if [ "$UID" -ne 0 ]; then echo "Please run as root" @@ -27,129 +19,37 @@ fi mkdir -p ${SERVER_HOME} -check_selinux_status() -{ - ###check selinux rpm### - rpm -qa | grep selinux-policy - if [ $? -eq 0 ] - then - cat /etc/selinux/config | grep "SELINUX=disabled" - if [ $? -eq 0 ] - then - echo "selinux disable..." - else - echo "selinux enable, please set selinux disable" - exit 1 - fi - else - echo "selinux rpm package not install" - fi -} - -touch_virtualenv() { - mkdir -p ~/.pip - cp pip.conf ~/.pip/ - if [ -d ${VIRTUALENV_HOME} ]; then - echo "virtualenv exists, skip" - else - virtualenv-3 ${VIRTUALENV_HOME} - if [ "$?" = 0 ]; then - echo "INFO: create virtualenv success" - else - echo "ERROR: create virtualenv failed" - exit 1 - fi - fi +source_virtualenv() { echo "INFO: activate virtualenv..." source ${VIRTUALENV_HOME}/bin/activate || exit 1 } -check_requirements() { - echo "INFO: begin install requirements..." - - if ! [ -d ${SERVER_HOME}/logs/ ]; then - mkdir -p ${SERVER_HOME}/logs/ || exit 1 - fi - - local requirements_log="${SERVER_HOME}/logs/${APP_NAME}_requirements.log" - local requirements="requirements.txt" - touch "$requirements_log" || exit - pip install pytest-runner cffi requests - pip install -r ${requirements} -i "${ALIYUN_MIRROR}" |tee -a "${requirements_log}" || exit 1 - local pip_res=$? - if [ $pip_res -ne 0 ]; then - echo "ERROR: requirements not satisfied and auto install failed, please check ${requirements_log}" - exit 1 - fi -} - -setup_database() { - echo "INFO: begin create db..." - - systemctl restart mariadb.service - systemctl enable mariadb.service - mysql -uroot -e "create user if not exists 'sysom'@'%' identified by 'sysom_admin';" - mysql -uroot -e "grant usage on *.* to 'sysom'@'localhost' identified by 'sysom_admin'" - mysql -uroot -e "drop database if exists sysom;" - mysql -uroot -e "create database sysom character set utf8;" - mysql -uroot -e "create database grafana character set utf8;" - mysql -uroot -e "grant all privileges on sysom.* to 'sysom'@'%';" - mysql -uroot -e "grant all privileges on grafana.* to 'sysom'@'%';" - mysql -uroot -e "flush privileges;" -} - init_conf() { mkdir -p /run/daphne pushd ${TARGET_PATH}/${API_DIR} - rm -f apps/*/migrations/00*.py - python manage.py makemigrations accounts - python manage.py makemigrations host - python manage.py makemigrations alarm - python manage.py makemigrations vul - python manage.py migrate - popd - - pushd ${TARGET_PATH}/${DIAGNOSIS_DIR} - rm -f apps/*/migrations/00*.py - python manage.py makemigrations task python manage.py migrate popd - pushd ${TARGET_PATH}/${CHANNEL_DIR} - alembic upgrade head - popd - - pushd ${TARGET_PATH}/${VMCORE_DIR} - rm -f apps/*/migrations/00*.py - python manage.py makemigrations vmcore - python manage.py migrate - popd -} - -install_sdk() { - pushd ${TARGET_PATH}/${SDK_DIR} - python setup_cec_base.py develop - python setup_cec_redis.py develop - python setup_channel_job.py develop - sudo rm -r *.egg-info build dist - popd + cp ${SERVICE_NAME}.ini /etc/supervisord.d/ + ###change the install dir base on param $1### + sed -i "s;/usr/local/sysom;${APP_HOME};g" /etc/supervisord.d/${SERVICE_NAME}.ini } start_app() { - systemctl enable nginx.service - systemctl enable redis.service - systemctl enable supervisord.service - systemctl restart nginx.service - systemctl restart redis.service - systemctl restart supervisord.service + ###if supervisor service started, we need use "supervisorctl update" to start new conf#### + supervisorctl update + supervisorctl status ${SERVICE_NAME}:0 + if [ $? -eq 0 ] + then + echo "${SERVICE_NAME} service start success..." + return 0 + fi + echo "${SERVICE_NAME} service start fail, please check log" + exit 1 } deploy() { - check_selinux_status - touch_virtualenv - check_requirements - install_sdk - setup_database | tee -a ${SERVER_HOME}/logs/${APP_NAME}_setup_database.log 2>&1 + source_virtualenv init_conf start_app } diff --git a/script/server/0_sysom_api/requirements.txt b/script/server/0_sysom_api/requirements.txt deleted file mode 100644 index 0ca74de809957be5363f6df137ed3abd45a1a23e..0000000000000000000000000000000000000000 --- a/script/server/0_sysom_api/requirements.txt +++ /dev/null @@ -1,94 +0,0 @@ -aioredis==1.3.1 -asgiref==3.4.1 -async-timeout==4.0.2 -attrs==21.2.0 -autobahn>=21.2.1 -Automat==20.2.0 -autopep8==1.6.0 -bcrypt==3.2.0 -cairocffi>=1.2.0 -CairoSVG==2.5.2 -certifi==2022.6.15 -cffi==1.15.1 -channels==3.0.4 -channels-redis==3.3.1 -charset-normalizer==2.0.12 -constantly==15.1.0 -coreapi==2.3.3 -coreschema==0.0.4 -cryptography==35.0.0 -cssselect2==0.4.1 -daphne==3.0.2 -defusedxml==0.7.1 -Django==3.2.8 -django-apscheduler==0.6.0 -django-cors-headers==3.10.0 -django-filter==21.1 -django-redis==5.2.0 -djangorestframework==3.12.2 -drf-yasg==1.20.0 -hiredis==2.0.0 -hyperlink==21.0.0 -idna==3.3 -incremental==21.3.0 -inflection==0.5.1 -install==1.3.4 -itypes==1.2.0 -Jinja2==3.0.2 -MarkupSafe==2.0.1 -msgpack==1.0.2 -numpy>=1.19.5 -openpyxl==3.0.9 -packaging==21.0 -pandas>=1.1.5 -paramiko==2.8.0 -Pillow==8.4.0 -pyasn1==0.4.8 -pyasn1-modules==0.2.8 -pycodestyle==2.8.0 -pycparser==2.21 -PyJWT==2.4.0 -PyMySQL==1.0.2 -PyNaCl==1.4.0 -pyOpenSSL==21.0.0 -pyparsing==3.0.3 -python-dateutil==2.8.2 -pytz==2021.3 -pytz-deprecation-shim==0.1.0.post0 -redis==4.3.4 -requests==2.27.1 -ruamel.yaml==0.17.16 -ruamel.yaml.clib==0.2.6 -service-identity==21.1.0 -six==1.16.0 -sqlparse==0.4.2 -tinycss2==1.1.0 -toml==0.10.2 -Twisted==21.7.0 -txaio==21.2.1 -typing-extensions==4.1.1 -tzdata==2021.5 -tzlocal==4.1 -uritemplate==4.1.1 -urllib3==1.26.11 -webencodings==0.5.1 -xlrd==2.0.1 -xlwt==1.3.0 -zope.interface==5.4.0 -gevent==21.12.0 -greenlet==1.1.2 -gunicorn==20.1.0 -loguru==0.6.0 -prompt_toolkit==3.0.30 -rich==12.5.1 -APScheduler==3.9.1 -schedule==1.1.0 -atomic==0.7.3 -aiofiles==0.8.0 -fastapi==0.83.0 -SQLAlchemy==1.4.44 -uvicorn==0.16.0 -alembic==1.7.7 -asyncer==0.0.2 -asyncssh==2.12.0 -python-multipart==0.0.5 diff --git a/tools/deploy/sysom-api.ini b/script/server/0_sysom_api/sysom-api.ini similarity index 72% rename from tools/deploy/sysom-api.ini rename to script/server/0_sysom_api/sysom-api.ini index 27a44b5f1e58975a9627af4f3543c6300b544422..47bf18c34bdbbc97deaf50b34dd1044148ccf010 100644 --- a/tools/deploy/sysom-api.ini +++ b/script/server/0_sysom_api/sysom-api.ini @@ -3,10 +3,11 @@ socket=tcp://localhost:7001 directory=/usr/local/sysom/server/target/sysom_server/sysom_api command=/usr/local/sysom/server/virtualenv/bin/daphne -u /run/daphne%(process_num)d.sock --fd 0 --access-log - --proxy-headers sysom.asgi:application numprocs=4 -process_name=asgi%(process_num)d +process_name=%(process_num)d autostart=true autorestart=true redirect_stderr=true stopasgroup=true -stdout_logfile=/usr/local/sysom/server/logs/supervisor.log +stdout_logfile=/usr/local/sysom/server/logs/sysom-api.log +stderr_logfile=/usr/local/sysom/server/logs/sysom-api-error.log environment=PATH="/usr/local/sysom/server/virtualenv/bin" diff --git a/script/server/0_sysom_channel/clear.sh b/script/server/0_sysom_channel/clear.sh new file mode 100755 index 0000000000000000000000000000000000000000..591c691459b4da9ea235b199753d3c3759c9dbc3 --- /dev/null +++ b/script/server/0_sysom_channel/clear.sh @@ -0,0 +1,7 @@ +#!/bin/bash +SERVICE_NAME=sysom-channel +clear_app() { + supervisorctl stop $SERVICE_NAME + rm -rf /etc/supervisord.d/${SERVICE_NAME}.ini +} +clear_app diff --git a/script/server/0_sysom_channel/init.sh b/script/server/0_sysom_channel/init.sh new file mode 100755 index 0000000000000000000000000000000000000000..60df72b8c60d8a8929bec155d116d71b11c0651a --- /dev/null +++ b/script/server/0_sysom_channel/init.sh @@ -0,0 +1,47 @@ +#!/bin/bash +SERVER_DIR="sysom_server" +CHANNEL_DIR=${SERVER_DIR}/sysom_channel +VIRTUALENV_HOME=${SERVER_HOME}/virtualenv +TARGET_PATH=${SERVER_HOME}/target +SERVICE_NAME=sysom-channel + +if [ "$UID" -ne 0 ]; then + echo "Please run as root" + exit 1 +fi + +source_virtualenv() { + echo "INFO: activate virtualenv..." + source ${VIRTUALENV_HOME}/bin/activate || exit 1 +} + +init_conf() { + pushd ${TARGET_PATH}/${CHANNEL_DIR} + alembic upgrade head + popd + + cp ${SERVICE_NAME}.ini /etc/supervisord.d/ + ###change the install dir base on param $1### + sed -i "s;/usr/local/sysom;${APP_HOME};g" /etc/supervisord.d/${SERVICE_NAME}.ini +} + +start_app() { + ###if supervisor service started, we need use "supervisorctl update" to start new conf#### + supervisorctl update + supervisorctl status ${SERVICE_NAME} + if [ $? -eq 0 ] + then + echo "supervisorctl start ${SERVICE_NAME} success..." + return 0 + fi + echo "${SERVICE_NAME} service start fail, please check log" + exit 1 +} + +deploy() { + source_virtualenv + init_conf + start_app +} + +deploy diff --git a/script/server/0_sysom_channel/start.sh b/script/server/0_sysom_channel/start.sh new file mode 100755 index 0000000000000000000000000000000000000000..080f5f53887041336cb003f0835fa1703c9dc225 --- /dev/null +++ b/script/server/0_sysom_channel/start.sh @@ -0,0 +1,7 @@ +#!/bin/bash +SERVICE_NAME=sysom-channel +start_app() { + supervisorctl start $SERVICE_NAME +} + +start_app diff --git a/script/server/0_sysom_channel/stop.sh b/script/server/0_sysom_channel/stop.sh new file mode 100755 index 0000000000000000000000000000000000000000..f58c6440843951eb4b3c97e56b157ad9977287c3 --- /dev/null +++ b/script/server/0_sysom_channel/stop.sh @@ -0,0 +1,7 @@ +#!/bin/bash +SERVICE_NAME=sysom-channel +stop_app() { + supervisorctl stop $SERVICE_NAME +} + +stop_app diff --git a/tools/deploy/sysom-channel.ini b/script/server/0_sysom_channel/sysom-channel.ini similarity index 66% rename from tools/deploy/sysom-channel.ini rename to script/server/0_sysom_channel/sysom-channel.ini index c9bddb4b20f7755e7e425c943c83ed4680aaf9ad..717eaf94bb39e415a1dbaa95a917dd1cec771abd 100644 --- a/tools/deploy/sysom-channel.ini +++ b/script/server/0_sysom_channel/sysom-channel.ini @@ -5,5 +5,5 @@ startsecs=3 autostart=true autorestart=true environment=PATH="/usr/local/sysom/server/virtualenv/bin/" -stderr_logfile=/usr/local/sysom/server/logs/supervisord-channel-error.log -stdout_logfile=/usr/local/sysom/server/logs/supervisord-channel.log +stderr_logfile=/usr/local/sysom/server/logs/sysom-channel-error.log +stdout_logfile=/usr/local/sysom/server/logs/sysom-channel.log diff --git a/script/server/2_vmcore/init_server.sh b/script/server/2_vmcore/init_server.sh index 418055f237bb03f52725968f02645369b223b721..55cfb290c66ee15ca775562d361419a81234f632 100644 --- a/script/server/2_vmcore/init_server.sh +++ b/script/server/2_vmcore/init_server.sh @@ -1,17 +1,69 @@ -#! /bin/sh -systemctl start rpcbind && systemctl enable rpcbind -systemctl start nfs && systemctl enable nfs -if [ $? -ne 0 ];then - systemctl start nfs-server && systemctl enable nfs-server -fi - -nfs_mask=`ip -4 route | grep "link src" | grep $SERVER_LOCAL_IP | awk '{print $1}' | head -n 1` -file_path=${SERVER_HOME}/vmcore/vmcore-nfs -mkdir -p ${file_path} -echo "${file_path} ${nfs_mask}(rw,async)" >> /etc/exports -exportfs -rv -chmod -R 777 ${file_path} - -cp parse_panic.py ${SERVER_HOME}/vmcore -cp vmcore_const.py ${SERVER_HOME}/vmcore -echo "* * * * * pushd ${SERVER_HOME}/vmcore;python3 parse_panic.py ${file_path} ${SERVER_PORT};popd" >> /var/spool/cron/root +#! /bin/bash +SERVER_DIR="sysom_server" +VMCORE_DIR=${SERVER_DIR}/sysom_vmcore +VIRTUALENV_HOME=${SERVER_HOME}/virtualenv +TARGET_PATH=${SERVER_HOME}/target +SERVICE_NAME=sysom-vmcore + +source_virtualenv() { + echo "INFO: activate virtualenv..." + source ${VIRTUALENV_HOME}/bin/activate || exit 1 +} + +init_conf() { + pushd ${TARGET_PATH}/${VMCORE_DIR} + rm -f apps/*/migrations/00*.py + python manage.py makemigrations vmcore + python manage.py migrate + popd + + cp ${SERVICE_NAME}.ini /etc/supervisord.d/ + ###change the install dir base on param $1### + sed -i "s;/usr/local/sysom;${APP_HOME};g" /etc/supervisord.d/${SERVICE_NAME}.ini +} + +start_app() { + ###if supervisor service started, we need use "supervisorctl update" to start new conf#### + supervisorctl update + supervisorctl status ${SERVICE_NAME} + if [ $? -eq 0 ] + then + echo "supervisorctl start ${SERVICE_NAME} success..." + return 0 + fi + echo "${SERVICE_NAME} service start fail, please check log" + exit 1 +} + +start_nfs() +{ + systemctl start rpcbind && systemctl enable rpcbind + systemctl start nfs && systemctl enable nfs + if [ $? -ne 0 ];then + systemctl start nfs-server && systemctl enable nfs-server + fi + + nfs_mask=`ip -4 route | grep "link src" | grep $SERVER_LOCAL_IP | awk '{print $1}' | head -n 1` + file_path=${SERVER_HOME}/vmcore/vmcore-nfs + mkdir -p ${file_path} + echo "${file_path} ${nfs_mask}(rw,async)" >> /etc/exports + exportfs -rv + chmod -R 777 ${file_path} +} + +start_cron() +{ + cp parse_panic.py ${SERVER_HOME}/vmcore + cp vmcore_const.py ${SERVER_HOME}/vmcore + echo "* * * * * pushd ${SERVER_HOME}/vmcore;python3 parse_panic.py ${file_path} ${SERVER_PORT};popd" >> /var/spool/cron/root +} + +deploy() { + source_virtualenv + init_conf + start_app + start_nfs + start_cron +} + +deploy diff --git a/tools/deploy/sysom-vmcore.ini b/script/server/2_vmcore/sysom-vmcore.ini similarity index 69% rename from tools/deploy/sysom-vmcore.ini rename to script/server/2_vmcore/sysom-vmcore.ini index 05862c5dc309a7aadcc79549a6f4eb729b0f030c..b7421943bf048ea328ded57aeb10f585d9bb04bc 100644 --- a/tools/deploy/sysom-vmcore.ini +++ b/script/server/2_vmcore/sysom-vmcore.ini @@ -5,5 +5,5 @@ startsecs=3 autostart=true autorestart=true environment=PATH="/usr/local/sysom/server/virtualenv/bin/" -stderr_logfile=/usr/local/sysom/server/logs/supervisord-vmcore-error.log -stdout_logfile=/usr/local/sysom/server/logs/supervisord-vmcore.log +stderr_logfile=/usr/local/sysom/server/logs/sysom-vmcore-error.log +stdout_logfile=/usr/local/sysom/server/logs/sysom-vmcore.log diff --git a/script/server/3_sysom_diagnosis/clear.sh b/script/server/3_sysom_diagnosis/clear.sh new file mode 100755 index 0000000000000000000000000000000000000000..85ba870695dba9615f4700bfd8f0cc65fe42c39e --- /dev/null +++ b/script/server/3_sysom_diagnosis/clear.sh @@ -0,0 +1,7 @@ +#!/bin/bash +SERVICE_NAME=sysom-diagnosis +clear_app() { + supervisorctl stop $SERVICE_NAME + rm -rf /etc/supervisord.d/${SERVICE_NAME}.ini +} +clear_app diff --git a/script/server/3_sysom_diagnosis/init.sh b/script/server/3_sysom_diagnosis/init.sh new file mode 100755 index 0000000000000000000000000000000000000000..b63e939d84031c4bb032cee8ae1dc54990ef3fd0 --- /dev/null +++ b/script/server/3_sysom_diagnosis/init.sh @@ -0,0 +1,51 @@ +#!/bin/bash +SERVER_DIR="sysom_server" +TARGET_PATH=${SERVER_HOME}/target +DIAGNOSIS_DIR=${SERVER_DIR}/sysom_diagnosis +VIRTUALENV_HOME=${SERVER_HOME}/virtualenv +SERVICE_NAME=sysom-diagnosis + +if [ "$UID" -ne 0 ]; then + echo "Please run as root" + exit 1 +fi + +source_virtualenv() { + echo "INFO: activate virtualenv..." + source ${VIRTUALENV_HOME}/bin/activate || exit 1 +} + +init_conf() { + pushd ${TARGET_PATH}/${DIAGNOSIS_DIR} + rm -f apps/*/migrations/00*.py + python manage.py makemigrations task + python manage.py migrate + popd + + cp ${SERVICE_NAME}.ini /etc/supervisord.d/ + ###change the install dir base on param $1### + sed -i "s;/usr/local/sysom;${APP_HOME};g" /etc/supervisord.d/${SERVICE_NAME}.ini + cpu_num=`cat /proc/cpuinfo | grep processor | wc -l` + sed -i "s/threads = 3/threads = $cpu_num/g" ${TARGET_PATH}/${DIAGNOSIS_DIR}/conf/diagnosis_gunicorn.py +} + +start_app() { + ###if supervisor service started, we need use "supervisorctl update" to start new conf#### + supervisorctl update + supervisorctl status ${SERVICE_NAME} + if [ $? -eq 0 ] + then + echo "supervisorctl start ${SERVICE_NAME} success..." + return 0 + fi + echo "${SERVICE_NAME} service start fail, please check log" + exit 1 +} + +deploy() { + source_virtualenv + init_conf + start_app +} + +deploy diff --git a/script/server/3_sysom_diagnosis/start.sh b/script/server/3_sysom_diagnosis/start.sh new file mode 100755 index 0000000000000000000000000000000000000000..025c5f297dc001dcb4b8630fd580b0dcb5c58b8b --- /dev/null +++ b/script/server/3_sysom_diagnosis/start.sh @@ -0,0 +1,7 @@ +#!/bin/bash +SERVICE_NAME=sysom-diagnosis +start_app() { + supervisorctl start $SERVICE_NAME +} + +start_app diff --git a/script/server/3_sysom_diagnosis/stop.sh b/script/server/3_sysom_diagnosis/stop.sh new file mode 100755 index 0000000000000000000000000000000000000000..587cefea1cc808bdf0b42c706af7f3f946492f51 --- /dev/null +++ b/script/server/3_sysom_diagnosis/stop.sh @@ -0,0 +1,7 @@ +#!/bin/bash +SERVICE_NAME=sysom-diagnosis +stop_app() { + supervisorctl stop $SERVICE_NAME +} + +stop_app diff --git a/tools/deploy/sysom-diagnosis.ini b/script/server/3_sysom_diagnosis/sysom-diagnosis.ini similarity index 68% rename from tools/deploy/sysom-diagnosis.ini rename to script/server/3_sysom_diagnosis/sysom-diagnosis.ini index a65a6aed0c82a567fe9bea8dea7a3df417c45ec9..6065104f78a63a51089df197765fc48a919fdffe 100644 --- a/tools/deploy/sysom-diagnosis.ini +++ b/script/server/3_sysom_diagnosis/sysom-diagnosis.ini @@ -5,5 +5,5 @@ startsecs=3 autostart=true autorestart=true environment=PATH="/usr/local/sysom/server/virtualenv/bin/" -stderr_logfile=/usr/local/sysom/server/logs/supervisord-diagnosis-error.log -stdout_logfile=/usr/local/sysom/server/logs/supervisord-diagnosis.log +stderr_logfile=/usr/local/sysom/server/logs/sysom-diagnosis-error.log +stdout_logfile=/usr/local/sysom/server/logs/sysom-diagnosis.log diff --git a/script/server/4_sysom_vul/clear.sh b/script/server/4_sysom_vul/clear.sh new file mode 100644 index 0000000000000000000000000000000000000000..bfbd0623fff6b25e549fb70d5fe3f5deb14d7f60 --- /dev/null +++ b/script/server/4_sysom_vul/clear.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +SERVICE_NAME=sysom-vul + + +clear_app() { + supervisorctl stop $SERVICE_NAME + rm -rf /etc/supervisord.d/${SERVICE_NAME}.ini +} + +clear_app diff --git a/script/server/4_sysom_vul/init.sh b/script/server/4_sysom_vul/init.sh new file mode 100644 index 0000000000000000000000000000000000000000..7494c1f67bd1dcbe9f9de282cca83f6a2422fc41 --- /dev/null +++ b/script/server/4_sysom_vul/init.sh @@ -0,0 +1,51 @@ +#!/bin/bash +SERVER_DIR="sysom_server" +TARGET_PATH=${SERVER_HOME}/target +VUL_DIR=${SERVER_DIR}/sysom_vul +VIRTUALENV_HOME=${SERVER_HOME}/virtualenv +SERVICE_NAME=sysom-vul + +if [ "$UID" -ne 0 ]; then + echo "Please run as root" + exit 1 +fi + +source_virtualenv() { + echo "INFO: activate virtualenv..." + source ${VIRTUALENV_HOME}/bin/activate || exit 1 +} + +init_conf() { + pushd ${TARGET_PATH}/${VUL_DIR} + rm -f apps/*/migrations/00*.py + python manage.py makemigrations vul + python manage.py migrate + popd + + cp ${SERVICE_NAME}.ini /etc/supervisord.d/ + ###change the install dir base on param $1### + sed -i "s;/usr/local/sysom;${APP_HOME};g" /etc/supervisord.d/${SERVICE_NAME}.ini + cpu_num=`cat /proc/cpuinfo | grep processor | wc -l` + sed -i "s/threads = 3/threads = $cpu_num/g" ${TARGET_PATH}/${VUL_DIR}/conf/vul_gunicorn.py +} + +start_app() { + ###if supervisor service started, we need use "supervisorctl update" to start new conf#### + supervisorctl update + supervisorctl status ${SERVICE_NAME} + if [ $? -eq 0 ] + then + echo "supervisorctl start ${SERVICE_NAME} success..." + return 0 + fi + echo "${SERVICE_NAME} service start fail, please check log" + exit 1 +} + +deploy() { + source_virtualenv + init_conf + start_app +} + +deploy diff --git a/script/server/4_sysom_vul/start.sh b/script/server/4_sysom_vul/start.sh new file mode 100644 index 0000000000000000000000000000000000000000..eeb0d1de273fcf5a24857d5133cd5c4eb314cabf --- /dev/null +++ b/script/server/4_sysom_vul/start.sh @@ -0,0 +1,10 @@ +#!/bin/bash + + +SERVICE_NAME=sysom-vul + +start_app() { + supervisorctl start $SERVICE_NAME +} + +start_app diff --git a/script/server/4_sysom_vul/stop.sh b/script/server/4_sysom_vul/stop.sh new file mode 100644 index 0000000000000000000000000000000000000000..594c064fb45c523bf733f44b6760b3d28df855ae --- /dev/null +++ b/script/server/4_sysom_vul/stop.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +SERVICE_NAME=sysom-vul + +stop_app() { + supervisorctl stop $SERVICE_NAME +} + +stop_app diff --git a/script/server/4_sysom_vul/sysom-vul.ini b/script/server/4_sysom_vul/sysom-vul.ini new file mode 100644 index 0000000000000000000000000000000000000000..ae9a6b4072ef25e1c5b01f487c74c931588d73a9 --- /dev/null +++ b/script/server/4_sysom_vul/sysom-vul.ini @@ -0,0 +1,9 @@ +[program:sysom-vul] +directory = /usr/local/sysom/server/target/sysom_server/sysom_vul +command=/usr/local/sysom/server/virtualenv/bin/gunicorn -c ./conf/vul_gunicorn.py sysom_vul.wsgi:application +startsecs=3 +autostart=true +autorestart=true +environment=PATH="/usr/local/sysom/server/virtualenv/bin/" +stderr_logfile=/usr/local/sysom/server/logs/sysom-vul-error.log +stdout_logfile=/usr/local/sysom/server/logs/sysom-vul.log diff --git a/script/server/conf b/script/server/conf new file mode 100644 index 0000000000000000000000000000000000000000..9f84e156d7c9ee238c4142ea09faa90f32718872 --- /dev/null +++ b/script/server/conf @@ -0,0 +1,17 @@ +[base] +0_env +0_local_services +0_sysom_api +0_sysom_channel + +[monitor] +1_monitor + +[vmcore] +2_vmcore + +[diagnosis] +3_sysom_diagnosis + +[vul] +4_sysom_vul diff --git a/script/server/init.sh b/script/server/init.sh index c527524fadad67289c2955fe26f970bb659440d0..2581237ad49a9d091e7e7a05099487fddc814740 100644 --- a/script/server/init.sh +++ b/script/server/init.sh @@ -2,6 +2,11 @@ FIRST_INIT_DONE=0 +if [ "$APP_NAME" == "" ] +then + export APP_NAME="sysom" +fi + if [ "$APP_HOME" == "" ] then export APP_HOME=/usr/local/sysom/ @@ -24,13 +29,14 @@ then export SERVER_PORT=80 fi +config=conf basedir=`dirname $0` cd $basedir if [ $FIRST_INIT_DONE == 0 ] then - for dir in `ls` + for dir in `cat $config` do if [ -d $dir ] then diff --git a/sysom_server/sdk/channel_job/job.py b/sysom_server/sdk/channel_job/job.py index fbbf20f7e76f008a4a92e670ce4efd4c8cdf1b79..a5d514eb7d8027dd93eb50d5d728a4e49dfc0795 100644 --- a/sysom_server/sdk/channel_job/job.py +++ b/sysom_server/sdk/channel_job/job.py @@ -95,7 +95,7 @@ class ChannelJob: Args: chunk_callback(Callable[[JobResult], None]): A callback function that is """ - return anyio.to_thread.run_sync( + return await anyio.to_thread.run_sync( self.execute, chunk_callback ) diff --git a/sysom_server/sdk/channel_job/model.py b/sysom_server/sdk/channel_job/model.py index 0e4f8cb19a751344dc02822cee817a4bdbaa5781..5ea778ed844ff16463335a21797f6ebfda00667d 100644 --- a/sysom_server/sdk/channel_job/model.py +++ b/sysom_server/sdk/channel_job/model.py @@ -12,6 +12,9 @@ import uuid class JobEntry: + CHANNEL_PARAMS_TIMEOUT = "__channel_params_timeout" + CHANNEL_PARAMS_AUTO_RETRY = "__channel_params_auto_retry" + def __init__(self, channel_type: str = "ssh", channel_opt: str = "cmd", params: dict = {}, echo: dict = {}, listen_topic: str = "", job_id: Optional[str] = None, @@ -28,10 +31,10 @@ class JobEntry: self.job_id = str(uuid.uuid4()) def to_channel_vlaue(self) -> dict: - if "timeout" not in self.params: - self.params["timeout"] = self.timeout - if "auto_retry" not in self.params: - self.params["auto_retry"] = self.auto_retry + if self.CHANNEL_PARAMS_TIMEOUT not in self.params: + self.params[self.CHANNEL_PARAMS_TIMEOUT] = self.timeout + if self.CHANNEL_PARAMS_AUTO_RETRY not in self.params: + self.params[self.CHANNEL_PARAMS_AUTO_RETRY] = self.auto_retry result = { "channel": self.channel_type, "type": self.channel_opt, diff --git a/sysom_server/sysom_api/apps/accounts/migrations/0001_initial.py b/sysom_server/sysom_api/apps/accounts/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..a1a2cbd700f217c9279bbb9bc5e4990866f65d84 --- /dev/null +++ b/sysom_server/sysom_api/apps/accounts/migrations/0001_initial.py @@ -0,0 +1,85 @@ +# Generated by Django 3.2.8 on 2022-11-28 06:37 + +from django.db import migrations, models +import django.db.models.deletion +import lib.utils + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='User', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('username', models.CharField(max_length=128)), + ('password', models.CharField(max_length=255)), + ('is_admin', models.BooleanField(default=False)), + ('is_agree', models.BooleanField(default=False)), + ('description', models.TextField()), + ], + options={ + 'db_table': 'sys_users', + }, + ), + migrations.CreateModel( + name='Permission', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('path', models.CharField(max_length=64, verbose_name='Api路径')), + ('method', models.IntegerField(choices=[(0, 'GET'), (1, 'POST'), (2, 'DELETE'), (3, 'PUT'), (4, 'PATCH')], default=0, verbose_name='请求方式')), + ], + options={ + 'db_table': 'sys_permission', + }, + ), + migrations.CreateModel( + name='Role', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('role_name', models.CharField(max_length=128, unique=True, verbose_name='角色名称')), + ('permissions', models.ManyToManyField(db_constraint=False, to='accounts.Permission', verbose_name='关联权限')), + ], + options={ + 'db_table': 'sys_role', + }, + ), + migrations.CreateModel( + name='HandlerLog', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('request_ip', models.GenericIPAddressField(verbose_name='请求IP地址')), + ('request_url', models.CharField(max_length=64, verbose_name='请求API路径')), + ('request_browser_agent', models.CharField(max_length=256, verbose_name='浏览器信息')), + ('request_method', models.CharField(choices=[('get', 'GET'), ('post', 'POST'), ('put', 'PUT'), ('patch', 'PATCH'), ('delete', 'DELETE')], default='get', max_length=32, verbose_name='请求方式')), + ('handler_view', models.CharField(max_length=32, verbose_name='处理视图')), + ('response_status', models.IntegerField(default=200, verbose_name='响应时间')), + ('request_option', models.IntegerField(choices=[(0, 'login'), (1, 'action')], default=1, verbose_name='请求动作')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='option_logs', to='accounts.user', verbose_name='操作人')), + ], + options={ + 'verbose_name': '操作日志', + 'verbose_name_plural': '操作日志', + 'db_table': 'sys_handler_log', + 'ordering': ['-created_at'], + }, + ), + migrations.AddField( + model_name='user', + name='role', + field=models.ManyToManyField(db_constraint=False, to='accounts.Role', verbose_name='关联角色'), + ), + ] diff --git a/sysom_server/sysom_api/apps/vul/__init__.py b/sysom_server/sysom_api/apps/accounts/migrations/__init__.py similarity index 100% rename from sysom_server/sysom_api/apps/vul/__init__.py rename to sysom_server/sysom_api/apps/accounts/migrations/__init__.py diff --git a/sysom_server/sysom_api/apps/alarm/migrations/0001_initial.py b/sysom_server/sysom_api/apps/alarm/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..b4e36f343c5fdfa597d4483a70fae4932c51863a --- /dev/null +++ b/sysom_server/sysom_api/apps/alarm/migrations/0001_initial.py @@ -0,0 +1,52 @@ +# Generated by Django 3.2.8 on 2022-11-28 06:44 + +from django.db import migrations, models +import django.db.models.deletion +import lib.utils + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('host', '__first__'), + ('accounts', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='SubscribeModel', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('title', models.CharField(max_length=128, unique=True, verbose_name='订阅名称')), + ('users', models.ManyToManyField(related_name='subs', to='accounts.User')), + ], + options={ + 'db_table': 'sys_subscribe', + }, + ), + migrations.CreateModel( + name='AlarmModel', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('noticelcon_type', models.SmallIntegerField(choices=[(0, 'notification'), (1, 'warning')], default=0, verbose_name='通知类型')), + ('level', models.IntegerField(choices=[(0, 'info'), (1, 'warning'), (2, 'error'), (3, 'success')], default=0, verbose_name='告警级别')), + ('message', models.TextField(verbose_name='告警内容')), + ('collected_time', models.DateTimeField(verbose_name='告警采集时间')), + ('duration_time', models.TimeField(blank=True, null=True, verbose_name='告警持续时间')), + ('item', models.CharField(max_length=100, verbose_name='告警项')), + ('is_read', models.BooleanField(default=False, verbose_name='是否已读')), + ('host', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alarm_hosts', to='host.hostmodel', verbose_name='告警主机')), + ('receiver', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='alarm_user', to='accounts.user', verbose_name='告警接受者')), + ('sub', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='alarms', to='alarm.subscribemodel')), + ], + options={ + 'db_table': 'sys_alarm', + }, + ), + ] diff --git a/sysom_server/sysom_api/apps/alarm/migrations/__init__.py b/sysom_server/sysom_api/apps/alarm/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_api/apps/host/migrations/0001_initial.py b/sysom_server/sysom_api/apps/host/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..019bee87bef6c3fbe440b1efb4a5ef023061de53 --- /dev/null +++ b/sysom_server/sysom_api/apps/host/migrations/0001_initial.py @@ -0,0 +1,50 @@ +# Generated by Django 3.2.8 on 2022-11-28 06:46 + +from django.db import migrations, models +import django.db.models.deletion +import lib.utils + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Cluster', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('cluster_name', models.CharField(max_length=128, unique=True)), + ('cluster_description', models.CharField(default='', max_length=255)), + ], + options={ + 'db_table': 'sys_cluster', + }, + ), + migrations.CreateModel( + name='HostModel', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('hostname', models.CharField(max_length=100, unique=True)), + ('ip', models.CharField(max_length=100, unique=True)), + ('port', models.IntegerField()), + ('username', models.CharField(max_length=100)), + ('private_key', models.TextField(null=True)), + ('description', models.CharField(max_length=255, null=True)), + ('status', models.IntegerField(choices=[(0, 'running'), (1, 'error'), (2, 'offline')], default=2, verbose_name='主机状态')), + ('client_deploy_cmd', models.TextField(default='', verbose_name='client部署命令')), + ('created_by', models.IntegerField(verbose_name='创建用户')), + ('cluster', models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, related_name='hosts', to='host.cluster')), + ], + options={ + 'db_table': 'sys_host', + }, + ), + ] diff --git a/sysom_server/sysom_api/apps/host/migrations/__init__.py b/sysom_server/sysom_api/apps/host/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_api/apps/host/models.py b/sysom_server/sysom_api/apps/host/models.py index e964df618e5553dda48ec5f008eebb679762d39d..31a8ed36446d0641083274561665421362307182 100644 --- a/sysom_server/sysom_api/apps/host/models.py +++ b/sysom_server/sysom_api/apps/host/models.py @@ -1,6 +1,5 @@ from django.db import models from lib.base_model import BaseModel -from apps.accounts.models import User class HostModel(BaseModel): @@ -19,8 +18,9 @@ class HostModel(BaseModel): status = models.IntegerField(choices=HOST_STATUS_CHOICES, default=2, verbose_name="主机状态") client_deploy_cmd = models.TextField(verbose_name="client部署命令", default="") cluster = models.ForeignKey('Cluster', on_delete=models.CASCADE, related_name='hosts', default="") - created_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name="c_hosts") - deleted_by = models.ForeignKey(User, null=True, on_delete=models.CASCADE, related_name="d_hosts") + # created_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name="c_hosts") + created_by = models.IntegerField(verbose_name='创建用户') + # deleted_by = models.ForeignKey(User, null=True, on_delete=models.CASCADE, related_name="d_hosts") class Meta: db_table = "sys_host" diff --git a/sysom_server/sysom_api/apps/host/views.py b/sysom_server/sysom_api/apps/host/views.py index 949477d3b938c666424cf3ffab1705cb401462a7..79f779ccb840ec98fd81c99a06efbed3fa5e7298 100644 --- a/sysom_server/sysom_api/apps/host/views.py +++ b/sysom_server/sysom_api/apps/host/views.py @@ -59,7 +59,7 @@ class HostModelViewSet(CommonModelViewSet, return super(HostModelViewSet, self).list(request, *args, **kwargs) def perform_create(self, ser): - ser.save(created_by=self.request.user) + ser.save(created_by=self.request.user.id) def create(self, request, *args, **kwargs): # 检查字段是否满足 diff --git a/sysom_server/sysom_api/conf/common.py b/sysom_server/sysom_api/conf/common.py index ee59ad851ab9a222589c39198b72d81887de2bc7..00103922e2db61d70138c4c73a9a8e545eda397c 100644 --- a/sysom_server/sysom_api/conf/common.py +++ b/sysom_server/sysom_api/conf/common.py @@ -16,7 +16,6 @@ SECRET_KEY = 'django-insecure-^d8b9di9w&-mmsbpt@)o#e+2^z+^m4nhf+z8304%9@8y#ko46l ALLOWED_HOSTS = ['*'] INSTALLED_APPS = [ - 'apps.vul', 'apps.accounts', 'apps.host', 'apps.alarm', diff --git a/sysom_server/sysom_api/consumer/consumers.py b/sysom_server/sysom_api/consumer/consumers.py index 7705d5199ad0240894bd84edc215cfdd114cd066..1d283a914b911dcec7b98d6a5dcfc2b42d038803 100644 --- a/sysom_server/sysom_api/consumer/consumers.py +++ b/sysom_server/sysom_api/consumer/consumers.py @@ -17,10 +17,30 @@ def get_host_instance(model, **kwargs): """async orm""" return model.objects.filter(**kwargs).first() +def bind_ssh_key(): + from django.conf import settings + from lib.ssh import SSH + + def private_key_getter() -> str: + result = "" + with open(settings.SSH_CHANNEL_KEY_PRIVATE) as f: + result = f.read() + return result + + def public_key_getter() -> str: + result = "" + with open(settings.SSH_CHANNEL_KEY_PUB) as f: + result = f.read() + return result + + SSH.set_private_key_getter(private_key_getter) + SSH.set_public_key_getter(public_key_getter) + class SshConsumer(WebsocketConsumer): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + bind_ssh_key() self.user = None self.host_ip = None self.start_cmd = None diff --git a/sysom_server/sysom_channel/app/executor.py b/sysom_server/sysom_channel/app/executor.py index 58989dea1cbe36dd0f7aa496bde6d06684cb8c0e..972b8a8cb7ccdd99e4f32712d79903f996ce751d 100644 --- a/sysom_server/sysom_channel/app/executor.py +++ b/sysom_server/sysom_channel/app/executor.py @@ -23,6 +23,10 @@ import asyncssh logger = logging.getLogger(__name__) +CHANNEL_PARAMS_TIMEOUT = "__channel_params_timeout" +CHANNEL_PARAMS_AUTO_RETRY = "__channel_params_auto_retry" + + class ChannelListener(CecClient): """ A cec-based channel listener @@ -97,7 +101,7 @@ class ChannelListener(CecClient): def _do_run_command(self, channel_type: str, task: dict) -> ChannelResult: """cmd opt""" - def on_data_received(data: str, data_type: asyncssh.DataType): + def on_data_received(data: str, data_type: asyncssh.DataType): echo = task.get("echo", {}) bind_result_topic = task.get("bind_result_topic", None) if bind_result_topic is not None: @@ -108,8 +112,11 @@ class ChannelListener(CecClient): "result": data }) params = task.get("params", {}) + timeout = params.pop(CHANNEL_PARAMS_TIMEOUT, None) + auto_retry = params.pop(CHANNEL_PARAMS_AUTO_RETRY, False) res = self._get_channel(channel_type)(**params).run_command_auto_retry( - timeout=params.get("timeout", None), + timeout=timeout, + auto_retry=auto_retry, on_data_received=on_data_received ) return res @@ -117,7 +124,11 @@ class ChannelListener(CecClient): def _do_init_channel(self, channel_type: str, task: dict) -> ChannelResult: """init opt""" params = task.get("params", {}) - return self._get_channel(channel_type).initial(**params) + timeout = params.pop(CHANNEL_PARAMS_TIMEOUT, None) + auto_retry = params.pop(CHANNEL_PARAMS_AUTO_RETRY, False) + return self._get_channel(channel_type).initial( + **params, timeout=timeout, auto_retry=auto_retry + ) def _process_each_task(self, consumer: Consumer, event: Event): """ diff --git a/sysom_server/sysom_channel/lib/ssh.py b/sysom_server/sysom_channel/lib/ssh.py index 74690e81732728b64f86c2cd41f0f9ad13cccc52..f9038155fdc6d830fb9fc882362df5c2017ac2c7 100644 --- a/sysom_server/sysom_channel/lib/ssh.py +++ b/sysom_server/sysom_channel/lib/ssh.py @@ -16,7 +16,7 @@ import concurrent from conf.settings import * from lib.channels.base import ChannelException -DEFAULT_CONNENT_TIMEOUT = 5 # 默认ssh链接超时时间 5s +DEFAULT_CONNENT_TIMEOUT = 5000 # 默认ssh链接超时时间 5s DEFAULT_NODE_USER = 'root' # 默认节点用户名 root logger = logging.getLogger(__name__) @@ -125,8 +125,9 @@ class AsyncSSH: "total_out": "", "err_msg": "" } - self.connect_args["connect_timeout"] = 1 if timeout is None else timeout / 1000 * 0.8 try: + timeout /= 1000 + self.connect_args["connect_timeout"] = timeout async with asyncssh.connect(self._hostname, **self.connect_args) as conn: chan, session = await conn.create_session( lambda: EasySSHCallbackForwarder(on_data_received), command diff --git a/sysom_server/sysom_diagnosis/apps/task/migrations/0001_initial.py b/sysom_server/sysom_diagnosis/apps/task/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..ae6ede978f351f941140a5b9bffe1ab7d59df59d --- /dev/null +++ b/sysom_server/sysom_diagnosis/apps/task/migrations/0001_initial.py @@ -0,0 +1,33 @@ +# Generated by Django 3.2.8 on 2022-11-28 06:49 + +from django.db import migrations, models +import lib.utils + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='JobModel', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('updated_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='更新时间')), + ('task_id', models.CharField(default='', max_length=64, verbose_name='任务实例ID')), + ('status', models.CharField(choices=[('Ready', 'Ready'), ('Running', 'Running'), ('Success', 'Success'), ('Fail', 'Fail')], default='Ready', max_length=32, verbose_name='任务状态')), + ('command', models.TextField(verbose_name='shell文本')), + ('result', models.JSONField(default=dict, verbose_name='shell结果')), + ('params', models.JSONField(default=dict, verbose_name='params')), + ('host_by', models.TextField(default='', max_length=64, verbose_name='host_jobs')), + ('created_by', models.IntegerField(verbose_name='创建人')), + ], + options={ + 'db_table': 'sys_job', + }, + ), + ] diff --git a/sysom_server/sysom_diagnosis/apps/task/migrations/__init__.py b/sysom_server/sysom_diagnosis/apps/task/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_diagnosis/conf/diagnosis_gunicorn.py b/sysom_server/sysom_diagnosis/conf/diagnosis_gunicorn.py index e7af99d188995b83150ac40898a63ecc87a87ea2..fe1132559519f42405c04f11109626a13818e0d2 100644 --- a/sysom_server/sysom_diagnosis/conf/diagnosis_gunicorn.py +++ b/sysom_server/sysom_diagnosis/conf/diagnosis_gunicorn.py @@ -11,11 +11,9 @@ worker_class = 'gevent' # 工作模式线程, 默认为sync模式 max_requests = 2000 # 设置最大并发数量为2000 (每个worker处理请求的工作线程) -accesslog = '/usr/local/sysom/server/logs/diagnosis-service.log' +accesslog = '/usr/local/sysom/server/logs/sysom-diagnosis-access.log' -errorlog = '/usr/local/sysom/server/logs/diagnosis-service-error.log' - -loglevel = 'info' +loglevel = 'error' proc_name = 'diagnosis_service' diff --git a/sysom_server/sysom_diagnosis/service_scripts/command b/sysom_server/sysom_diagnosis/service_scripts/command new file mode 100755 index 0000000000000000000000000000000000000000..892b0999ea988494af1c4616658ca6600afef9be --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/command @@ -0,0 +1,33 @@ +#!/usr/bin/python3 +# coding=utf-8 +import json +import sys + +############################################################################### +## { +## "service_name":"command", +## "command":"ls -l", +## "instance":"192.168.1.101" +## } +## 解析参数方法: +## sysak oomcheck -j /var/log/sysak/memgraph.json -g -l -f -k -c 3 && cat /var/log/sysak/memgraph.json +## cat /var/log/sysak/oomcheck.json 是memgraph诊断结果 +## +############################################################################### +class Param(dict): + def __missing__(self,key): + sys.stderr.write("入参检查失败,没有检查到入参'%s'"%key) + exit(1) + +args = Param(json.loads(sys.argv[1])) +result = {} +result['commands'] = [] + +cmd0 = {} +cmd0['instance'] = args["instance"] +cmd0['cmd'] = args["command"] + +result['commands'].append(cmd0) + +data = json.dumps(result) +print(data) diff --git a/sysom_server/sysom_diagnosis/service_scripts/command_post b/sysom_server/sysom_diagnosis/service_scripts/command_post new file mode 100755 index 0000000000000000000000000000000000000000..d6b282cfb29ce96661297b737ddee968ecfc3ccb --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/command_post @@ -0,0 +1,22 @@ +#!/usr/bin/python3 +# coding=utf-8 +import os +import sys +import json +import random + +def oomcheck_result(raw): + result = {} + result = {"CommandResult":{"data":[{"key":"","value":raw}]}} + print(json.dumps(result)) + +def extract_params(): + path, res, task_id = sys.argv[1], "", sys.argv[2] + with open(path, 'r') as tmp: + for line in tmp.readlines(): + res = res + "\r" + line + return res, task_id + +if __name__ == "__main__": + res, _ = extract_params() + oomcheck_result(res) diff --git a/sysom_server/sysom_diagnosis/service_scripts/iofsstat b/sysom_server/sysom_diagnosis/service_scripts/iofsstat new file mode 100755 index 0000000000000000000000000000000000000000..dc8da98af364429e8b962124c6b0b5a92adf4f81 --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/iofsstat @@ -0,0 +1,39 @@ +#!/usr/bin/python3 +# coding=utf-8 +import json +import sys + +############################################################################### +## 如前端入参是这个: +## { +## "instance":"192.168.1.101", +## "timeout":"15" +## "disk":"vda" +## } +############################################################################### +class Param(dict): + def __missing__(self,key): + sys.stderr.write("入参检查失败,没有检查到入参'%s'"%key) + exit(1) + +args = Param(json.loads(sys.argv[1])) + +result = {} +result['commands'] = [] + +cmd0 = {} +cmd0['instance'] = args["instance"] +cmd0_arg_T = args.get("timeout","15") +if int(cmd0_arg_T) <= 0: + cmd0_arg_T = 15 +cmd0_arg_device = '' +disk = args.get("disk","") +if len(disk) > 0: + cmd0_arg_device = ' -d '+disk +iosdiag_cmd = "sysak iofsstat -m -P -j /tmp/iofsstat.json -c "+str(cmd0_arg_T)+" -T "+str(cmd0_arg_T)+cmd0_arg_device +cmd0['cmd'] = "rm /tmp/iofsstat.json -f && "+iosdiag_cmd+" && cat /tmp/iofsstat.json" + +result['commands'].append(cmd0) + +data = json.dumps(result) +print(data) diff --git a/sysom_server/sysom_diagnosis/service_scripts/iofsstat_post b/sysom_server/sysom_diagnosis/service_scripts/iofsstat_post new file mode 100755 index 0000000000000000000000000000000000000000..d5a83f8df514729d54b906a7cfb491481b98bad6 --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/iofsstat_post @@ -0,0 +1,100 @@ +#!/usr/bin/python3 +# coding=utf-8 +import sys +import json +import random + +def getDisksFromDiskstats(diskstats): + disks = [] + for o in diskstats: + if o['diskname'] not in disks: + add = True + for d in disks: + # is master dev and part in disks, replace + if o['diskname'] in d: + if o['diskname'] not in disks: + disks[disks.index(d)] = o['diskname'] + else: + # clear muti parts in disks + disks.remove(d) + add = False + continue + # is part dev and master in disks, do nothing + if d in o['diskname']: + add = False + break + if add: + disks.append(o['diskname']) + return disks + +def iofsstatJoinData(raw): + if raw.find("diskstats") == -1: + print({"status": "fail"}) + return + + stat = {} + stat["disks"] = {"data": []} + for s in raw.split('\n'): + try: + obj = json.loads(s) + except Exception: + continue + if "diskstats" in str(s): + disks = getDisksFromDiskstats(obj["diskstats"]) + stat["disks"]["data"] = \ + [{'key':disks.index(d), 'value':d} for d in disks] + for d in disks: + if ("diskIOstat_"+d) not in stat.keys(): + stat["diskIOstat_"+d] = {"data": []} + for o in obj["diskstats"]: + if d in o['diskname']: + stat["diskIOstat_"+d]["data"].append(o) + elif "mstats" in str(s): + for d in disks: + if ("taskIOstat_"+d) not in stat.keys(): + stat["taskIOstat_"+d] = {"data": []} + stat["taskIOblocksize_"+d] = {"data": []} + + for o in obj["mstats"]: + if d not in o['device']: + continue + pat = {'comm': o['comm'], 'tgid:pid': '-:'+o['pid']} + patKey = [] + for key, value in o.items(): + if 'pat' in key: + if o['iops_wr'] == 0 or value == 0: + pat[key] = '0' + else: + pat[key] = \ + format(value/(o['iops_wr']*1.0)*100, '.2f')+'%' + patKey.append(key) + for key in patKey: + del o[key] + stat["taskIOblocksize_"+d]["data"].append(pat) + + o['file'] = str(o['file']) + if 'bufferio' in o.keys(): + o['children'] = o['bufferio'] + for e in o['children']: + task = e['task'].rsplit(':',2) + e['comm'] = task[0] + e['tgid:pid'] = task[1]+':'+task[2] + e['bps_wr'] = e['Wrbw'] + del o['bufferio'] + new = list(o.items()) + new[1] = ('tgid:pid', '-:'+o['pid']) + idx = obj["mstats"].index(o) + obj["mstats"][idx] = dict(new) + stat["taskIOstat_"+d]["data"].append(obj["mstats"][idx]) + s = json.dumps(stat, indent=4) + print(s) + +def extract_params(): + path, res, task_id = sys.argv[1], "", sys.argv[2] + with open(path, 'r') as tmp: + res = tmp.read() + return res, task_id + +if __name__ == "__main__": + res, _ = extract_params() + iofsstatJoinData(res) diff --git a/sysom_server/sysom_diagnosis/service_scripts/iohang b/sysom_server/sysom_diagnosis/service_scripts/iohang new file mode 100755 index 0000000000000000000000000000000000000000..ecdbf74c1b0271e99ba3e456f6282752a630f6a2 --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/iohang @@ -0,0 +1,45 @@ +#!/usr/bin/python3 +# coding=utf-8 +import json +import sys + +############################################################################### +## 如前端入参是这个: +## { +## "instance":"192.168.1.101", +## "threshold":"5000", +## "timeout":"10" +## "disk":"vda" +## } +############################################################################### +class Param(dict): + def __missing__(self,key): + sys.stderr.write("入参检查失败,没有检查到入参'%s'"%key) + exit(1) + +args = Param(json.loads(sys.argv[1])) + +result = {} +result['commands'] = [] + +cmd0 = {} +cmd0['instance'] = args["instance"] +cmd0_arg_t = args.get("threshold","5000") +if int(cmd0_arg_t) < 0: + cmd0_arg_t = 0 +cmd0_arg_T = args.get("timeout","10") +if int(cmd0_arg_T) <= 0: + cmd0_arg_T = 10 +cmd0_arg_device = args.get("disk","") +dump_log_cmd = "cat /var/log/sysak/iosdiag/hangdetect/result.log.stat 2>/dev/null;\ + echo \"\";cat /var/log/sysak/iosdiag/hangdetect/result.log 2>/dev/null;" +iosdiag_cmd = "sysak iosdiag hangdetect -t "+str(cmd0_arg_t)+" -T "+str(cmd0_arg_T)+" "+cmd0_arg_device+" > /dev/null" +print_result_cmd = "if [ ! -e /var/log/sysak/iosdiag/hangdetect/result.log.seq ]; then "+\ + "echo \"fail\"; elif [ -e /var/log/sysak/iosdiag/hangdetect/result.log.stat ]; then "+dump_log_cmd+\ + "else echo \"\"; fi" +cmd0['cmd'] = "rm /var/log/sysak/iosdiag/hangdetect/* -f && "+iosdiag_cmd+" && "+print_result_cmd + +result['commands'].append(cmd0) + +data = json.dumps(result) +print(data) diff --git a/sysom_server/sysom_diagnosis/service_scripts/iohang_post b/sysom_server/sysom_diagnosis/service_scripts/iohang_post new file mode 100755 index 0000000000000000000000000000000000000000..bea88e86b60dcf6ce37c42c896e6e366a93a68c5 --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/iohang_post @@ -0,0 +1,50 @@ +#!/usr/bin/python3 +# coding=utf-8 +import sys +import json +import random + +def iosdiagJoinData(raw): + if raw.startswith('fail'): + print({"status": "fail"}) + return + + disks = [] + stat = {} + stat["disks"] = {"data": []} + for s in raw.split('\n'): + try: + obj = json.loads(s) + except Exception: + continue + dataSource = "singleIO_" + if "percent" in str(obj): + disks = [s['diskname'] for s in obj['summary'] \ + if s['diskname'] not in disks] + stat["disks"]["data"] = \ + [{'key':disks.index(d), 'value':d} for d in disks] + dataSource = "iohangOverview_" + for d in disks: + if (dataSource+d) not in stat.keys(): + stat[dataSource+d] = {"data": []} + for s in obj['summary']: + if d in s['diskname']: + if "percent" not in str(obj): + s["hung ios"] = \ + sorted(s["hung ios"],\ + key=lambda e:float(e['abnormal'].split()[-2]),\ + reverse=True)[:10] + stat[dataSource+d]["data"] = s["hung ios"] + + s = json.dumps(stat, indent=4) + print(s) + +def extract_params(): + path, res, task_id = sys.argv[1], "", sys.argv[2] + with open(path, 'r') as tmp: + res = tmp.read() + return res, task_id + +if __name__ == "__main__": + res, _ = extract_params() + iosdiagJoinData(res) diff --git a/sysom_server/sysom_diagnosis/service_scripts/iolatency b/sysom_server/sysom_diagnosis/service_scripts/iolatency new file mode 100755 index 0000000000000000000000000000000000000000..5a71e3d6c9cafbd093f56c3a92af21e22b6c65f4 --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/iolatency @@ -0,0 +1,46 @@ +#!/usr/bin/python3 +# coding=utf-8 +import json +import sys + +############################################################################### +## 如前端入参是这个: +## { +## "instance":"192.168.1.101", +## "threshold":"1", +## "timeout":"10" +## "disk":"vda" +## } +############################################################################### +class Param(dict): + def __missing__(self,key): + sys.stderr.write("入参检查失败,没有检查到入参'%s'"%key) + exit(1) + +args = Param(json.loads(sys.argv[1])) + +result = {} +result['commands'] = [] + +cmd0 = {} +cmd0['instance'] = args["instance"] +cmd0_arg_t = args.get("threshold","5000") +if int(cmd0_arg_t) < 0: + cmd0_arg_t = 0 +cmd0_arg_T = args.get("timeout","10") +if int(cmd0_arg_T) <= 0: + cmd0_arg_T = 10 +cmd0_arg_device = args.get("disk","") +dump_log_cmd = "cat /var/log/sysak/iosdiag/latency/result.log.stat 2>/dev/null;\ + echo \"\";cat /var/log/sysak/iosdiag/latency/result.log.seq 2>/dev/null;\ + echo \"\";cat /var/log/sysak/iosdiag/latency/result.log 2>/dev/null;" +iosdiag_cmd = "sysak iosdiag latency -t "+str(cmd0_arg_t)+" -T "+str(cmd0_arg_T)+" "+cmd0_arg_device+" > /dev/null" +print_result_cmd = "if [ ! -e /var/log/sysak/iosdiag/latency/result.log.seq ]; then "+\ + "echo \"fail\"; elif [ -e /var/log/sysak/iosdiag/latency/result.log.stat ]; then "+dump_log_cmd+\ + "else echo \"\"; fi" +cmd0['cmd'] = "rm /var/log/sysak/iosdiag/latency/* -f && "+iosdiag_cmd+" && "+print_result_cmd + +result['commands'].append(cmd0) + +data = json.dumps(result) +print(data) diff --git a/sysom_server/sysom_diagnosis/service_scripts/iolatency_post b/sysom_server/sysom_diagnosis/service_scripts/iolatency_post new file mode 100755 index 0000000000000000000000000000000000000000..074ae48ecc507e77a2c81ff74403ce28ad398ae0 --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/iolatency_post @@ -0,0 +1,91 @@ +#!/usr/bin/python3 +# coding=utf-8 +import sys +import json +import random +import re + +def iosdiagJoinData(raw): + if raw.startswith('fail'): + data = {"status":"fail"} + print(data) + + disks = [] + stat = {} + stat["disks"] = {"data": []} + for s in raw.split('\n'): + try: + obj = json.loads(s) + except Exception: + continue + if "percent" in str(obj): + disks = [s['diskname'] for s in obj['summary'] \ + if s['diskname'] not in disks] + stat["disks"]["data"] = \ + [{'key':disks.index(d), 'value':d} for d in disks] + for s in obj['summary']: + diskIdx = 'iolatencyOverview_'+s['diskname'] + if diskIdx not in stat.keys(): + stat[diskIdx] = {"data": []} + for delay in s['delays']: + stat[diskIdx]["data"].append( + {"key": delay['component'], "title": delay['component'], + "value": delay['percent'], "text": + 'Max_us: '+str(round(delay['max'], 1))+ + '\nAVG_us: '+str(round(delay['avg'], 1))+ + '\nMin_us: '+str(round(delay['min'], 1))}) + elif 'totaldelay' in str(obj) or 'abnormal' in str(obj): + for s in obj['summary']: + isSeqData = False + diskIdx = 'singleIO_'+s['diskname'] + if 'totaldelay' in str(obj): + diskIdx = 'singleIOMetrics_'+s['diskname'] + isSeqData = True + + if diskIdx not in stat.keys(): + stat[diskIdx] = {"data": []} + + idx = -1 + dupRm = [] + slowIOs = s['slow ios'] + for delay in slowIOs: + idx += 1 + if idx > 0 and delay["time"] == slowIOs[idx-1]["time"]: + if (isSeqData and delay["totaldelay"] <= \ + slowIOs[idx-1]["totaldelay"]) or (not isSeqData and + float(re.split(':| ', delay['abnormal'])[-2]) <= \ + float(re.split(':| ', slowIOs[idx-1]['abnormal'])[-2])): + dupRm.append(delay) + else: + dupRm.append(slowIOs[idx-1]) + for d in dupRm: + slowIOs.remove(d) + + if not isSeqData: + slowIOs = \ + sorted(slowIOs,\ + key=lambda e:float(re.split(':| ', e['abnormal'])[-2]),\ + reverse=True)[:10] + stat[diskIdx]["data"] = slowIOs + else: + for delay in slowIOs: + entry = { + "time": delay['time'], "total": delay['totaldelay']} + for d in delay['delays']: + entry[d['component']] = d['delay'] + stat[diskIdx]["data"].append(entry) + + s = json.dumps(stat, indent=4) + with open('/root/lgs_debug/iolatency.log', 'w+') as f: + f.write(s) + print(s) + +def extract_params(): + path, res, task_id = sys.argv[1], "", sys.argv[2] + with open(path, 'r') as tmp: + res = tmp.read() + return res, task_id + +if __name__ == "__main__": + res, _ = extract_params() + iosdiagJoinData(res) diff --git a/sysom_server/sysom_diagnosis/service_scripts/loadtask_post b/sysom_server/sysom_diagnosis/service_scripts/loadtask_post index c44c9c36adb13d55ea9b0ef206e28cf0206341b0..9737001ccd990814956fd710de2c970f369aea4f 100755 --- a/sysom_server/sysom_diagnosis/service_scripts/loadtask_post +++ b/sysom_server/sysom_diagnosis/service_scripts/loadtask_post @@ -40,6 +40,7 @@ def cpuflamegraph(file): def parse_log(file): parse_data=collections.OrderedDict() + new_parse_data=collections.OrderedDict() reason=collections.OrderedDict() tasks = {} count = {} @@ -117,7 +118,41 @@ def parse_log(file): parse_data["uninterrupt load"] = d_task_list parse_data["running load"] = r_task_list parse_data["flamegraph"] = cpuflamegraph_str - out = json.dumps(parse_data, indent=4) + + new_parse_data["dataresult"] = {"data": {}} + new_parse_data["dataresult"]["data"] = [ + {"key": "系统平均负载", "value": reason["loadavg"]}, + {"key": "Sys影响检测", "value": reason["sys"]}, + {"key": "硬件中断影响检测", "value": reason["irq"]}, + {"key": "软中断影响检测", "value": reason["softirq"]}, + {"key": "IO影响检测", "value": reason["io"]} + ] + + new_parse_data["datataskcount"] = {"data": []} + new_parse_data["datataskcount"]["data"] = [ + {"key": "uninterrupt_tasks", "value": int(count["uninterrupt tasks"])}, + {"key": "runnig_tasks", "value": int(count["runnig tasks"])}, + ] + new_parse_data["datataskcount"]["data"] + + new_parse_data["datauninterruptload"] = {"data": []} + for i in range(len(d_task_list)): + tmp_d_task = {} + tmp_d_task["key"] = d_task_list[i]["task"] + tmp_d_task["value"] = d_task_list[i]["weight"] + new_parse_data["datauninterruptload"]["data"].append(tmp_d_task) + + new_parse_data["datarunningload"] = {"data": []} + for i in range(len(r_task_list)): + tmp_r_task = {} + tmp_r_task["key"] = r_task_list[i]["task"] + tmp_r_task["value"] = r_task_list[i]["weight"] + new_parse_data["datarunningload"]["data"].append(tmp_r_task) + + new_parse_data["dataflamegraph"] = {"data": []} + new_parse_data["dataflamegraph"]["data"]= [ + {"key": 0, "value": cpuflamegraph_str}] + out = json.dumps(new_parse_data, indent=4) #data = {"catalogue": parse_data["Global lamegraph"]} #files = {"file": open(parse_data["Global lamegraph"], 'rb')} #requests.post("http://127.0.0.1:8001/api/v1/host/upload_file/", data=data, files=files) diff --git a/sysom_server/sysom_diagnosis/service_scripts/retran b/sysom_server/sysom_diagnosis/service_scripts/retran new file mode 100644 index 0000000000000000000000000000000000000000..2f0a84f8b8236789ed32ae661ac835490f4c00ad --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/retran @@ -0,0 +1,36 @@ +#!/usr/bin/python3 +# coding=utf-8 +import json +import sys + +############################################################################### +## { +## "service_name":"packetdrop", +## "instance":"192.168.1.101" +## } +## 解析参数方法: +## sysak memgraph -j /var/log/sysak/memgraph.json -g -l -f -k -c 3 && cat /var/log/sysak/memgraph.json +## cat /var/log/sysak/memgraph.json 是memgraph诊断结果 +## +############################################################################### +class Param(dict): + def __missing__(self,key): + sys.stderr.write("入参检查失败,没有检查到入参'%s'"%key) + exit(1) + +args = Param(json.loads(sys.argv[1])) +result = {} +result['commands'] = [] + +cmd0 = {} +cmd0['instance'] = args["instance"] + +time = args["time"] +rtrace_cmd = "sysak rtrace retran --duration {}".format(time) + +cmd0['cmd'] = "mkdir -p /var/log/sysak && " + rtrace_cmd + " > /var/log/sysak/retran.json && cat /var/log/sysak/retran.json" + +result['commands'].append(cmd0) + +data = json.dumps(result) +print(data) diff --git a/sysom_server/sysom_diagnosis/service_scripts/retran_post b/sysom_server/sysom_diagnosis/service_scripts/retran_post new file mode 100644 index 0000000000000000000000000000000000000000..66ca83aa6be1422b8615c33af3ec56a0fcd0e04d --- /dev/null +++ b/sysom_server/sysom_diagnosis/service_scripts/retran_post @@ -0,0 +1,44 @@ +#!/usr/bin/python3 +# coding=utf-8 +import sys +import json +import random + + +def drop_result(raw): + ''' + fd = open("./test.json", 'r') + raw = fd.read().strip() + fd.close() + ''' + outdata = {} + if raw.find("tcp_state") == -1 and len(raw) != 0: + outdata = {"status": "fail"} + print(outdata) + return + + + newdrop = {} + + newdrop["RetranStat"] = {} + newdrop["RetranStat"]["data"] = [] + + if len(raw) != 0: + for s in raw.split('\n'): + item = json.loads(s) + newdrop["RetranStat"]["data"].append({ + "key": str(cnt), "ip地址": item["ap"], "tcp状态": item["tcp_state"], "拥塞状态": item["ca_state"], "重传次数": item["times"] + }) + + print(json.dumps(newdrop, ensure_ascii=False)) + + +def extract_params(): + path, res, task_id = sys.argv[1], "", sys.argv[2] + with open(path, 'r') as tmp: + res = tmp.read() + return res, task_id + +if __name__ == "__main__": + res, _ = extract_params() + drop_result(res) diff --git a/sysom_server/sysom_vmcore/apps/vmcore/migrations/0001_initial.py b/sysom_server/sysom_vmcore/apps/vmcore/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..ed290f18f9726a6d4d2f64ee6ed6ec7ff57ab262 --- /dev/null +++ b/sysom_server/sysom_vmcore/apps/vmcore/migrations/0001_initial.py @@ -0,0 +1,92 @@ +# Generated by Django 3.2.8 on 2022-11-28 06:51 + +from django.db import migrations, models +import django.db.models.deletion +import lib.utils + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Issue', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('updated_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='更新时间')), + ('calltrace', models.CharField(max_length=256)), + ('crashkey', models.CharField(max_length=256)), + ('solution', models.TextField()), + ], + options={ + 'db_table': 'issue', + }, + ), + migrations.CreateModel( + name='Panic', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('updated_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='更新时间')), + ('name', models.CharField(max_length=128, unique=True)), + ('ip', models.CharField(max_length=64, null=True)), + ('hostname', models.CharField(max_length=128)), + ('vertype', models.IntegerField()), + ('status', models.IntegerField()), + ('core_time', models.DateTimeField()), + ('vmcore_file', models.CharField(max_length=256)), + ('dmesg_file', models.CharField(max_length=256)), + ('dmesg', models.TextField()), + ('title', models.CharField(max_length=128)), + ('ver', models.CharField(max_length=128)), + ('rip', models.CharField(max_length=64)), + ('func_name', models.CharField(max_length=64)), + ('comm', models.CharField(max_length=64)), + ('calltrace', models.CharField(max_length=256)), + ('crashkey', models.CharField(max_length=256)), + ('modules', models.CharField(max_length=512)), + ('upload_time', models.IntegerField()), + ('issue_id', models.IntegerField()), + ('panic_type', models.CharField(max_length=64)), + ('panic_class', models.CharField(max_length=64)), + ], + options={ + 'db_table': 'panic', + }, + ), + migrations.CreateModel( + name='VmcoreConfig', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('updated_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='更新时间')), + ('name', models.CharField(max_length=128)), + ('server_host', models.CharField(max_length=256)), + ('mount_point', models.CharField(max_length=256)), + ('days', models.IntegerField()), + ], + options={ + 'db_table': 'vmcore_config', + }, + ), + migrations.CreateModel( + name='Calltrace', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('updated_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='更新时间')), + ('name', models.CharField(max_length=128)), + ('line', models.CharField(max_length=128)), + ('idx', models.IntegerField()), + ('vmcore', models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, related_name='panic_call_trace', to='vmcore.panic', to_field='name')), + ], + options={ + 'db_table': 'call_trace', + }, + ), + ] diff --git a/sysom_server/sysom_vmcore/apps/vmcore/migrations/__init__.py b/sysom_server/sysom_vmcore/apps/vmcore/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_vmcore/conf/vmcore_gunicorn.py b/sysom_server/sysom_vmcore/conf/vmcore_gunicorn.py index 3304ea15bd737eb14e880fe621c7d549cac6a47f..6ce03a6eec6fc2cfb436db17c83e641ceea05518 100644 --- a/sysom_server/sysom_vmcore/conf/vmcore_gunicorn.py +++ b/sysom_server/sysom_vmcore/conf/vmcore_gunicorn.py @@ -11,11 +11,9 @@ worker_class = 'gevent' # 工作模式线程, 默认为sync模式 max_requests = 2000 # 设置最大并发数量为2000 (每个worker处理请求的工作线程) -accesslog = '/usr/local/sysom/server/logs/vmcore-service.log' +accesslog = '/usr/local/sysom/server/logs/sysom-vmcore-access.log' -errorlog = '/usr/local/sysom/server/logs/vmcore-service-error.log' - -loglevel = 'info' +loglevel = 'error' proc_name = 'vmcore_service' diff --git a/sysom_server/sysom_vul/apps/host/__init__.py b/sysom_server/sysom_vul/apps/host/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_vul/apps/host/app.py b/sysom_server/sysom_vul/apps/host/app.py new file mode 100644 index 0000000000000000000000000000000000000000..e4960645d474bc9cf94e9d57eb975c7402e4f87d --- /dev/null +++ b/sysom_server/sysom_vul/apps/host/app.py @@ -0,0 +1,10 @@ +from django.apps import AppConfig + + +class HostConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'apps.host' + + def ready(self): + # logger.info(">>> Host module loading success") + pass \ No newline at end of file diff --git a/sysom_server/sysom_vul/apps/host/models.py b/sysom_server/sysom_vul/apps/host/models.py new file mode 100644 index 0000000000000000000000000000000000000000..31a8ed36446d0641083274561665421362307182 --- /dev/null +++ b/sysom_server/sysom_vul/apps/host/models.py @@ -0,0 +1,39 @@ +from django.db import models +from lib.base_model import BaseModel + + +class HostModel(BaseModel): + HOST_STATUS_CHOICES = ( + (0, 'running'), + (1, 'error'), + (2, 'offline') + ) + + hostname = models.CharField(max_length=100, unique=True) + ip = models.CharField(max_length=100, unique=True) + port = models.IntegerField() + username = models.CharField(max_length=100) + private_key = models.TextField(null=True) + description = models.CharField(max_length=255, null=True) + status = models.IntegerField(choices=HOST_STATUS_CHOICES, default=2, verbose_name="主机状态") + client_deploy_cmd = models.TextField(verbose_name="client部署命令", default="") + cluster = models.ForeignKey('Cluster', on_delete=models.CASCADE, related_name='hosts', default="") + # created_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name="c_hosts") + created_by = models.IntegerField(verbose_name='创建用户') + # deleted_by = models.ForeignKey(User, null=True, on_delete=models.CASCADE, related_name="d_hosts") + + class Meta: + db_table = "sys_host" + + def __str__(self): + return f'主机:{self.hostname}' + +class Cluster(BaseModel): + cluster_name = models.CharField(max_length=128, unique=True) + cluster_description = models.CharField(max_length=255, default="") + + class Meta: + db_table = "sys_cluster" + + def __str__(self) -> str: + return f'集群: {self.cluster_name}' diff --git a/sysom_server/sysom_vul/apps/host/urls.py b/sysom_server/sysom_vul/apps/host/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..94278ec0eaf47eea9a3c30c5281097a8a0fb99bb --- /dev/null +++ b/sysom_server/sysom_vul/apps/host/urls.py @@ -0,0 +1,2 @@ + +urlpatterns = [] diff --git a/sysom_server/sysom_vul/apps/vul/__init__.py b/sysom_server/sysom_vul/apps/vul/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_api/apps/vul/admin.py b/sysom_server/sysom_vul/apps/vul/admin.py similarity index 100% rename from sysom_server/sysom_api/apps/vul/admin.py rename to sysom_server/sysom_vul/apps/vul/admin.py diff --git a/sysom_server/sysom_api/apps/vul/apps.py b/sysom_server/sysom_vul/apps/vul/apps.py similarity index 38% rename from sysom_server/sysom_api/apps/vul/apps.py rename to sysom_server/sysom_vul/apps/vul/apps.py index 8f439c763d493c1f7ac028c22a03a99a9654f766..1fdd2def890e277e2cbdaccca5d62c241dbe937d 100644 --- a/sysom_server/sysom_api/apps/vul/apps.py +++ b/sysom_server/sysom_vul/apps/vul/apps.py @@ -1,4 +1,5 @@ import logging +import sys from django.apps import AppConfig from django.db.models.signals import post_migrate logger = logging.getLogger(__name__) @@ -10,30 +11,39 @@ class VulConfig(AppConfig): def ready(self) -> None: post_migrate.connect(initialization_vul_config, sender=self) - bind_ssh_key() + from django.conf import settings + if ('runserver' in sys.argv or 'manage.py' not in sys.argv): + from cec_base.log import LoggerHelper, LoggerLevel + from channel_job.job import default_channel_job_executor + LoggerHelper.update_sys_stdout_sink(LoggerLevel.LOGGER_LEVEL_INFO) + # 初始化 channel_job sdk + default_channel_job_executor.init_config(settings.CHANNEL_JOB_URL) + default_channel_job_executor.start() + else: + # from cec_base.log import LoggerHelper, LoggerLevel + # from channel_job.job import default_channel_job_executor + # LoggerHelper.update_sys_stdout_sink(LoggerLevel.LOGGER_LEVEL_INFO) + # # 初始化 channel_job sdk + # default_channel_job_executor.init_config(settings.CHANNEL_JOB_URL) + # default_channel_job_executor.start() + + # job_result = default_channel_job_executor.dispatch_job( + # channel_type="ssh", + # channel_opt='cmd', + # params={ + # 'instance': "127.0.0.1", + # "command": "pwd", + # }, + # timeout=5000, + # auto_retry=True + # ).execute() + # print(job_result.code) + # print(job_result.result) + # 这边执行数据库迁移等操作的时候执行一些处理代码 + pass logger.info(">>> Vul module loading success") -def bind_ssh_key(): - from django.conf import settings - from lib.ssh import SSH - - def private_key_getter() -> str: - result = "" - with open(settings.SSH_CHANNEL_KEY_PRIVATE) as f: - result = f.read() - return result - - def public_key_getter() -> str: - result = "" - with open(settings.SSH_CHANNEL_KEY_PUB) as f: - result = f.read() - return result - - SSH.set_private_key_getter(private_key_getter) - SSH.set_public_key_getter(public_key_getter) - - def initialization_vul_config(sender, **kwargs): try: from .models import VulAddrModel diff --git a/sysom_server/sysom_api/apps/vul/models.py b/sysom_server/sysom_vul/apps/vul/models.py similarity index 96% rename from sysom_server/sysom_api/apps/vul/models.py rename to sysom_server/sysom_vul/apps/vul/models.py index 8cf322b174c2df56698540cb50c7888b0ff50ed4..102d16eeefe03ac567da595614e77e639479f614 100644 --- a/sysom_server/sysom_api/apps/vul/models.py +++ b/sysom_server/sysom_vul/apps/vul/models.py @@ -2,7 +2,7 @@ from django.db import models from django.utils.translation import ugettext_lazy as _ from lib.base_model import BaseModel, human_datetime from apps.host.models import HostModel -from apps.accounts.models import User +# from apps.accounts.models import User # Create your models here. @@ -86,7 +86,8 @@ class SecurityAdvisoryFixHistoryModel(BaseModel): cve_id = models.CharField(max_length=100) vul_level = models.CharField(max_length=100) fixed_at = models.CharField(max_length=20, default=human_datetime, verbose_name="修复时间") - created_by = models.ForeignKey(User, on_delete=models.SET_DEFAULT, default=None) + # created_by = models.ForeignKey(User, on_delete=models.SET_DEFAULT, default=None) + created_by = models.IntegerField(default='创建者ID') status = models.CharField(max_length=20, default="success", verbose_name="修复状态") host = models.ManyToManyField(to=HostModel, verbose_name='关联主机', through="SaFixHistToHost") diff --git a/sysom_server/sysom_api/apps/vul/serializer.py b/sysom_server/sysom_vul/apps/vul/serializer.py similarity index 100% rename from sysom_server/sysom_api/apps/vul/serializer.py rename to sysom_server/sysom_vul/apps/vul/serializer.py diff --git a/sysom_server/sysom_api/apps/vul/ssh_pool.py b/sysom_server/sysom_vul/apps/vul/ssh_pool.py similarity index 73% rename from sysom_server/sysom_api/apps/vul/ssh_pool.py rename to sysom_server/sysom_vul/apps/vul/ssh_pool.py index 92cf37459e71f0b6275ba2cf06d03d6695854edc..5c9da4279a65ac821b6c7f4537a4f5c65cdc2d91 100644 --- a/sysom_server/sysom_api/apps/vul/ssh_pool.py +++ b/sysom_server/sysom_vul/apps/vul/ssh_pool.py @@ -10,8 +10,10 @@ import queue import logging import multiprocessing import time +from django.conf import settings +from channel_job.job import default_channel_job_executor, JobResult -from lib.ssh import SSH +# from lib.ssh import SSH class SshProcessQueueManager: @@ -26,22 +28,27 @@ class SshProcessQueueManager: self.forks = min(len(self.hosts), max(self.DEFAULT_FORKS, cpu_count)) def ssh_command(self, que, host, cmd): - ssh_cli = SSH(hostname=host.ip, port=host.port, username=host.username) + # ssh_cli = SSH(hostname=host.ip, port=host.port, username=host.username) # ssh_cli = SSH(host.ip, host.port, host.username, host.private_key) - try: - status, result = ssh_cli.run_command(cmd) - que.put({'host': host.hostname, - 'ret': { - "status": status, - "result": result - }}) - except Exception as e: - logging.error(e) - que.put({'host': host.hostname, - 'ret': { - "status": 1, - "result": e - }}) + default_channel_job_executor.init_config(settings.CHANNEL_JOB_URL) + default_channel_job_executor.start() + + job_result = default_channel_job_executor.dispatch_job( + channel_type="ssh", + channel_opt='cmd', + params={ + 'instance': host.ip, + "command": cmd + }, + timeout=5000, + auto_retry=True + ).execute() + # status, result = ssh_cli.run_command(cmd) + que.put({'host': host, + 'ret': { + "status": job_result.code, + "result": job_result.result + }}) def run_subprocess(self, hosts, func, *args): que = multiprocessing.Queue() diff --git a/sysom_server/sysom_api/apps/vul/tests.py b/sysom_server/sysom_vul/apps/vul/tests.py similarity index 100% rename from sysom_server/sysom_api/apps/vul/tests.py rename to sysom_server/sysom_vul/apps/vul/tests.py diff --git a/sysom_server/sysom_api/apps/vul/urls.py b/sysom_server/sysom_vul/apps/vul/urls.py similarity index 73% rename from sysom_server/sysom_api/apps/vul/urls.py rename to sysom_server/sysom_vul/apps/vul/urls.py index f4cd74cbbab61202703a034984420405ec8ac599..6a0ea7958aa4179926198941a0d1e583e80013b8 100644 --- a/sysom_server/sysom_api/apps/vul/urls.py +++ b/sysom_server/sysom_vul/apps/vul/urls.py @@ -6,20 +6,21 @@ @Email : weidong@uniontech.com @Software: PyCharm """ -from django.urls import path,include +from django.urls import path, include from rest_framework.routers import DefaultRouter from apps.vul import views router = DefaultRouter() -router.register(r'vul-config', views.VulAddrViewSet) +router.register('config', views.VulAddrViewSet) urlpatterns = [ - path('api/v1/', include(router.urls)), + path('api/v1/vul/', views.VulListView.as_view()), + path('api/v1/vul/', include(router.urls)), path('api/v1/vul/hist/', views.SaFixHistListView.as_view()), - path('api/v1/vul/hist///', views.SaFixHistDetailHostView.as_view()), + path('api/v1/vul/hist///', + views.SaFixHistDetailHostView.as_view()), path('api/v1/vul/hist//', views.SaFixHistDetailsView.as_view()), path('api/v1/vul/summary/', views.VulSummaryView.as_view()), path('api/v1/vul/updatesa/', views.UpdateSaView.as_view()), - path('api/v1/vul/', views.VulListView.as_view()), path('api/v1/vul//', views.VulDetailsView.as_view()), ] diff --git a/sysom_server/sysom_api/apps/vul/views.py b/sysom_server/sysom_vul/apps/vul/views.py similarity index 96% rename from sysom_server/sysom_api/apps/vul/views.py rename to sysom_server/sysom_vul/apps/vul/views.py index 667c6aa3a68b9e0da94e9619ecdadf22c430a417..2e38ced159dc827c54622b58d9895c1e0b8249ab 100644 --- a/sysom_server/sysom_api/apps/vul/views.py +++ b/sysom_server/sysom_vul/apps/vul/views.py @@ -11,8 +11,9 @@ from django_apscheduler.jobstores import register_job from tzlocal import get_localzone from django.utils.timezone import localdate, localtime from django_filters.rest_framework import DjangoFilterBackend +from loguru import logger as log from lib.response import * -from apps.accounts.authentication import Authentication +# from apps.accounts.authentication import Authentication from apps.vul.models import * from apps.vul.vul import update_sa as upsa, update_vul as upvul from apps.vul.vul import fix_cve, get_unfix_cve @@ -37,7 +38,7 @@ scheduler.start() class VulListView(APIView): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] def get(self, request, format=None): """ @@ -104,7 +105,7 @@ class VulListView(APIView): class VulDetailsView(APIView): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] def get(self, request, cve_id, format=None): """ @@ -169,7 +170,7 @@ class VulDetailsView(APIView): class VulSummaryView(APIView): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] def get(self, request, format=None): """ @@ -219,7 +220,7 @@ class VulSummaryView(APIView): class SaFixHistListView(APIView): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] def get(self, request, format=None): sa_fix_hist = SecurityAdvisoryFixHistoryModel.objects.all() @@ -233,7 +234,7 @@ class SaFixHistListView(APIView): class SaFixHistDetailsView(APIView): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] def get_cve2host_details(self, sa_fix_host_obj): hostname = sa_fix_host_obj.host.hostname @@ -262,7 +263,7 @@ class SaFixHistDetailsView(APIView): class SaFixHistDetailHostView(APIView): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] def get(self, request, pk, hostname, format=None): sa_fix_hist_details_host = SaFixHistToHost.objects.filter(sa_fix_hist_id=pk, host__hostname=hostname).first() @@ -275,7 +276,7 @@ class SaFixHistDetailHostView(APIView): class UpdateSaView(APIView): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] def post(self, request): """ @@ -304,7 +305,7 @@ class UpdateSaView(APIView): class VulAddrViewSet(viewsets.ModelViewSet): - authentication_classes = [Authentication] + # authentication_classes = [Authentication] queryset = VulAddrModel.objects.all() serializer_class = VulAddrListSerializer filter_backends = [DjangoFilterBackend] @@ -355,7 +356,7 @@ class VulAddrViewSet(viewsets.ModelViewSet): headers[ "User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) Chrome/99.0.4844.51" - if body.get("authorization_type").lower() == "basic" and body.get("authorization_body"): + if body.get("authorization_body") and body.get("authorization_type").lower() == "basic": authorization_body = body.get("authorization_body") auth = (authorization_body["username"], authorization_body["password"]) else: diff --git a/sysom_server/sysom_api/apps/vul/vul.py b/sysom_server/sysom_vul/apps/vul/vul.py similarity index 99% rename from sysom_server/sysom_api/apps/vul/vul.py rename to sysom_server/sysom_vul/apps/vul/vul.py index b351fa6d2489791befb5634afad9d7bd6efb421d..8dbc73e4079f477b50e37ddff91af5e8c2ec53c2 100644 --- a/sysom_server/sysom_api/apps/vul/vul.py +++ b/sysom_server/sysom_vul/apps/vul/vul.py @@ -11,6 +11,7 @@ import requests import datetime import json import re +from loguru import logger from django.utils import timezone from django.db.models import Q from rest_framework import status @@ -320,7 +321,7 @@ def fix_cve(hosts, cve_id, user): spqm = SshProcessQueueManager(list(HostModel.objects.filter(hostname__in=hosts))) results = spqm.run(spqm.ssh_command, cmd) fixed_time = human_datetime() - user_obj = user + user_obj = user.get('id', 1) vul_level = SecurityAdvisoryModel.objects.filter(cve_id=cve_id).first().vul_level cve_status = "success" init = True diff --git a/sysom_server/sysom_vul/conf/__init__.py b/sysom_server/sysom_vul/conf/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_vul/conf/common.py b/sysom_server/sysom_vul/conf/common.py new file mode 100644 index 0000000000000000000000000000000000000000..94b698a93d0fa2f08f832e77768a507b6ff8978f --- /dev/null +++ b/sysom_server/sysom_vul/conf/common.py @@ -0,0 +1,268 @@ +""" +Django settings for sysom_vul project. + +Generated by 'django-admin startproject' using Django 3.2.8. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.2/ref/settings/ +""" +import os +from pathlib import Path + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = 'django-insecure-^d8b9di9w&-mmsbpt@)o#e+2^z+^m4nhf+z8304%9@8y#ko46l' + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = ['*'] + + +# Application definition + +INSTALLED_APPS = [ + 'apps.vul', + 'apps.host', + + 'rest_framework', + 'corsheaders', + 'django.contrib.staticfiles', + 'drf_yasg', # 在线API文档 + 'django_filters', + 'django_apscheduler', +] + +MIDDLEWARE = [ + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.middleware.security.SecurityMiddleware', + 'django.middleware.common.CommonMiddleware', +] + +ROOT_URLCONF = 'sysom_vul.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'sysom_vul.wsgi.application' +ASGI_APPLICATION = 'sysom_vul.asgi.application' + + +# Database +# https://docs.djangoproject.com/en/3.2/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.mysql', + 'NAME': 'sysom', + 'USER': 'sysom', + 'PASSWORD': 'sysom_admin', + 'HOST': '127.0.0.1', + 'PORT': '3306', + } +} + + +# Password validation +# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/3.2/topics/i18n/ + +LANGUAGE_CODE = 'zh-hans' + +TIME_ZONE = 'Asia/Shanghai' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.2/howto/static-files/ + +STATIC_URL = '/static/' + +# Default primary key field type +# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' + + +# JWT Token Decode DIR +JWT_TOKEN_DECODE_DIR = os.path.join(BASE_DIR, 'lib', 'decode') +if not os.path.exists(JWT_TOKEN_DECODE_DIR): + os.makedirs(JWT_TOKEN_DECODE_DIR) + +######################################################################################### +# rest_framework settings +######################################################################################### +REST_FRAMEWORK = { + 'DEFAULT_PERMISSION_CLASSES': ( + # 'rest_framework.permissions.IsAuthenticated' + ), + 'DEFAULT_AUTHENTICATION_CLASSES': [ + 'lib.authentications.VulAuthentication' + ], + 'UNAUTHENTICATED_USER': None, + 'DEFAULT_VERSIONING_CLASS': "rest_framework.versioning.URLPathVersioning", + 'DEFAULT_VERSION': 'v1', # 默认版本 + 'ALLOWED_VERSIONS': ['v1', 'v2'], # 允许的版本 + 'VERSION_PARAM': 'version', + + # 'DEFAULT_RENDERER_CLASSES': ( + # 'lib.renderers.SysomJsonRender', + # ), + 'DEFAULT_PAGINATION_CLASS': 'lib.paginations.Pagination', + 'UNICODE_JSON': True, + 'EXCEPTION_HANDLER': 'lib.exception.exception_handler' +} + +######################################################################################### +# 格式:[2020-04-22 23:33:01][micoservice.apps.ready():16] [INFO] 这是一条日志: +# 格式:[日期][模块.函数名称():行号] [级别] 信息 +######################################################################################### +STANDARD_LOG_FORMAT = '[%(levelname).4s] -- %(asctime)s -- P_%(process) -- d_T_%(thread)d ' \ + '- <%(module)s:%(lineno)d>: %(message)s' +CONSOLE_LOG_FORMAT = '[%(levelname).4s] -- %(asctime)s -- P_%(process) -- d_T_%(thread)d ' \ + '- <%(module)s:%(lineno)d>: %(message)s' + +if not os.path.exists(os.path.join(BASE_DIR, 'logs')): + os.makedirs(os.path.join(BASE_DIR, 'logs')) + +SERVER_LOGS_FILE = os.path.join(BASE_DIR, 'logs', 'sys_vul_info.log') +ERROR_LOGS_FILE = os.path.join(BASE_DIR, 'logs', 'sys_vul_error.log') + +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'standard': { + 'format': STANDARD_LOG_FORMAT + }, + 'console': { + 'format': CONSOLE_LOG_FORMAT, + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + 'file': { + 'format': CONSOLE_LOG_FORMAT, + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + }, + 'handlers': { + 'file': { + 'level': 'INFO', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': SERVER_LOGS_FILE, + 'maxBytes': 1024 * 1024 * 100, # 100 MB + 'backupCount': 5, # 最多备份5个 + 'formatter': 'standard', + 'encoding': 'utf-8', + }, + 'error': { + 'level': 'ERROR', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': ERROR_LOGS_FILE, + 'maxBytes': 1024 * 1024 * 100, # 100 MB + 'backupCount': 3, # 最多备份3个 + 'formatter': 'standard', + 'encoding': 'utf-8', + }, + 'console': { + 'level': 'INFO', + 'class': 'logging.StreamHandler', + 'formatter': 'console', + } + }, + 'loggers': { + # default日志 + '': { + 'handlers': ['console', 'error', 'file'], + 'level': 'INFO', + }, + 'django': { + 'handlers': ['console', 'error', 'file'], + 'level': 'INFO', + }, + 'scripts': { + 'handlers': ['console', 'error', 'file'], + 'level': 'INFO', + }, + # 数据库相关日志 + 'django.db.backends': { + 'handlers': [], + 'propagate': True, + 'level': 'INFO', + }, + } +} + + +################################################################## +# Cec settings +################################################################## +SYSOM_CEC_URL = "redis://localhost:6379?cec_default_max_len=1000&cec_auto_mk_topic=true" +SYSOM_CEC_ALARM_TOPIC = "CEC-SYSOM-ALARM" +# 通道模块用于对外开放,投递操作的主题 +SYSOM_CEC_CHANNEL_TOPIC = "SYSOM_CEC_CHANNEL_TOPIC" +# 通道模块用于投递执行结果的主题 +SYSOM_CEC_CHANNEL_RESULT_TOPIC = "SYSOM_CEC_CHANNEL_RESULT_TOPIC" +# 诊断模块用于接收通道执行结果的主题 +SYSOM_CEC_CHANNEL_DIAGNOSIS_TOPIC = "SYSOM_CEC_CHANNEL_DIAGNOSIS_TOPIC" +# 诊断模块消费组 +SYSOM_CEC_DIAGNOSIS_CONSUMER_GROUP = "SYSOM_CEC_DIAGNOSIS_CONSUMER_GROUP" +# 诊断模块用于汇报最终诊断执行结果的主题 +SYSOM_CEC_DIAGNOSIS_RESULT_TOPIC = "SYSOM_CEC_DIAGNOSIS_RESULT_TOPIC" +# 用于分发插件系统相关事件的主题 +SYSOM_CEC_PLUGIN_TOPIC = "SYSOM_CEC_PLUGIN_TOPIC" + +# 安全模块用于接收通道执行结果的主题 +SYSOM_CEC_CHANNEL_VUL_TOPIC = "SYSOM_CEC_CHANNEL_VUL_TOPIC" +# 安全模块消费组 +SYSOM_CEC_VUL_CONSUMER_GROUP = "SYSOM_CEC_VUL_CONSUMER_GROUP" + +# channl_job SDK 需要的url +CHANNEL_JOB_URL = f"{SYSOM_CEC_URL}&channel_job_target_topic={SYSOM_CEC_CHANNEL_TOPIC}" \ + f"&channel_job_listen_topic={SYSOM_CEC_CHANNEL_VUL_TOPIC}" \ + f"&channel_job_consumer_group={SYSOM_CEC_VUL_CONSUMER_GROUP}" diff --git a/sysom_server/sysom_vul/conf/develop.py b/sysom_server/sysom_vul/conf/develop.py new file mode 100644 index 0000000000000000000000000000000000000000..55a9b74d4e988f25d79948ff07002a1a01baf0ef --- /dev/null +++ b/sysom_server/sysom_vul/conf/develop.py @@ -0,0 +1,7 @@ +from .common import * + +''' +开发环境配置项 +''' + +DEBUG = True \ No newline at end of file diff --git a/sysom_server/sysom_vul/conf/product.py b/sysom_server/sysom_vul/conf/product.py new file mode 100644 index 0000000000000000000000000000000000000000..d286e3707bc30ed783b0c1f5dba40e48abaa72ae --- /dev/null +++ b/sysom_server/sysom_vul/conf/product.py @@ -0,0 +1,7 @@ +from .common import * + +''' +生产环境配置项 +''' + +DEBUG = False diff --git a/sysom_server/sysom_vul/conf/testing.py b/sysom_server/sysom_vul/conf/testing.py new file mode 100644 index 0000000000000000000000000000000000000000..3039c80c49e162334ccdc91cfb151a3c40b0cd79 --- /dev/null +++ b/sysom_server/sysom_vul/conf/testing.py @@ -0,0 +1,6 @@ +from .common import * + +''' +测试环境配置项 +''' +DEBUG = True diff --git a/sysom_server/sysom_vul/conf/vul_gunicorn.py b/sysom_server/sysom_vul/conf/vul_gunicorn.py new file mode 100644 index 0000000000000000000000000000000000000000..c24aa4f06620bbf95e8b270161eb5d63bc7137a3 --- /dev/null +++ b/sysom_server/sysom_vul/conf/vul_gunicorn.py @@ -0,0 +1,20 @@ +''' +Vul Service Gunicorn Settings +''' +workers = 2 # 指定工作进程数 + +threads = 3 + +bind = '127.0.0.1:7005' + +worker_class = 'gevent' # 工作模式线程, 默认为sync模式 + +max_requests = 2000 # 设置最大并发数量为2000 (每个worker处理请求的工作线程) + +accesslog = '/usr/local/sysom/server/logs/sysom-vul-access.log' + +loglevel = 'error' + +proc_name = 'vul_service' + +raw_env = 'DJANGO_SETTINGS_MODULE=sysom_vul.settings' diff --git a/sysom_server/sysom_vul/lib/authentications.py b/sysom_server/sysom_vul/lib/authentications.py new file mode 100644 index 0000000000000000000000000000000000000000..db94c1f3a32744088141e95401f67708ece69cb0 --- /dev/null +++ b/sysom_server/sysom_vul/lib/authentications.py @@ -0,0 +1,52 @@ +import logging +import os +from typing import List +from django.conf import settings +from django.utils.translation import ugettext as _ +from rest_framework.exceptions import AuthenticationFailed +from rest_framework.request import Request +from rest_framework.authentication import BaseAuthentication +from .utils import import_module + + +logger = logging.getLogger(__name__) + + +def get_jwt_decode_classes() -> List[BaseAuthentication]: + jwt_decode_classes = [] + import_strings = [ + f'lib.decode.{f.replace(".py", "")}' for f in os.listdir(settings.JWT_TOKEN_DECODE_DIR) + ] + for string in import_strings: + module = import_module(string) + try: + m = getattr(module, 'JWTTokenDecode') + jwt_decode_classes.append(m) + except Exception as exc: + logger.warn(exc) + return jwt_decode_classes + + +def decode_token(token: str) -> dict: + error_message, success, result = "", False, {} + for auth_class in get_jwt_decode_classes(): + result, success = auth_class().decode(token) + if not success: + error_message += result + else: + break + if not success: + raise AuthenticationFailed(error_message) + return result + + +class VulAuthentication(BaseAuthentication): + def authenticate(self, request: Request): + token = request.META.get('HTTP_AUTHORIZATION') + if token is None: + raise AuthenticationFailed(detail='Not Authorization Token!') + payload = decode_token(token) + payload['token'] = token + if 'sub' in payload: + payload['id'] = int(payload['sub']) + return payload, _ diff --git a/sysom_server/sysom_vul/lib/base_model.py b/sysom_server/sysom_vul/lib/base_model.py new file mode 100644 index 0000000000000000000000000000000000000000..6c7de7062ac87efdbe6a3a6f2e834be844ba5e1d --- /dev/null +++ b/sysom_server/sysom_vul/lib/base_model.py @@ -0,0 +1,19 @@ +# -*- encoding: utf-8 -*- +""" +@File : base_model.py +@Time : 2021/10/28 11:04 +@Author : DM +@Software: PyCharm +""" + +from django.db import models +from lib.utils import human_datetime + + +class BaseModel(models.Model): + """abstract model""" + created_at = models.CharField(max_length=20, default=human_datetime, verbose_name="创建时间") + deleted_at = models.CharField(max_length=20, null=True) + + class Meta: + abstract = True diff --git a/sysom_server/sysom_vul/lib/decode/sysom_decode.py b/sysom_server/sysom_vul/lib/decode/sysom_decode.py new file mode 100644 index 0000000000000000000000000000000000000000..12c7c1a06b69542f414da73b7defa6768b538b2f --- /dev/null +++ b/sysom_server/sysom_vul/lib/decode/sysom_decode.py @@ -0,0 +1,17 @@ +import jwt +from django.conf import settings + + +class JWTTokenDecode: + """SYSOM TOken解析认证""" + def decode(self, token): + r, s = None, False + try: + r, s = jwt.decode(token, key=settings.SECRET_KEY, algorithms='HS256'), True + except jwt.exceptions.ExpiredSignatureError as e: + r = f'令牌失效: {e}' + except jwt.exceptions.DecodeError as e: + r = f'令牌校验失败: {e}' + except jwt.exceptions.InvalidAlgorithmError as e: + r = f'令牌不合法: {e}' + return r, s \ No newline at end of file diff --git a/sysom_server/sysom_vul/lib/exception.py b/sysom_server/sysom_vul/lib/exception.py new file mode 100644 index 0000000000000000000000000000000000000000..25aa22ea6eb6f94dbd77a1bb2b43d60edb602d37 --- /dev/null +++ b/sysom_server/sysom_vul/lib/exception.py @@ -0,0 +1,63 @@ +import logging +import traceback + +from django.db.models import ProtectedError +from rest_framework.views import set_rollback +from rest_framework import exceptions +from rest_framework.exceptions import APIException as DRFAPIException, AuthenticationFailed, NotAuthenticated + +from .response import ErrorResponse + + +logger = logging.getLogger(__name__) + + +class APIException(Exception): + def __init__(self, code=400, message='API异常', args=('API异常',)): + self.code = code + self.message = message + self.args = args + + def __str__(self): + return self.message + + +class FileNotFoundException(Exception): + def __init__(self, code=404, message='文件不存在'): + self.code = code + self.message = message + + def __str__(self): + return self.message + + +def exception_handler(exc, context): + """自定义异常处理""" + msg = '' + code = 400 + + if isinstance(exc, FileNotFoundException): + code = exc.code + msg = exc.message + if isinstance(exc, AuthenticationFailed): + code = 403 + msg = exc.detail + elif isinstance(exc, NotAuthenticated): + code = 402 + msg = exc.detail + elif isinstance(exc, DRFAPIException): + set_rollback() + # print(exc.detail) + # msg = {str(e) for e in exc.detail} + msg = exc.detail + elif isinstance(exc, exceptions.APIException): + set_rollback() + msg = exc.detail + elif isinstance(exc, ProtectedError): + set_rollback() + msg = "删除失败:该条数据与其他数据有相关绑定" + elif isinstance(exc, Exception): + logger.error(traceback.format_exc()) + msg = str(exc) # 原样输出错误 + + return ErrorResponse(msg=msg, code=code, status=code) diff --git a/sysom_server/sysom_vul/lib/paginations.py b/sysom_server/sysom_vul/lib/paginations.py new file mode 100644 index 0000000000000000000000000000000000000000..b14639bb2a3fcd04760920db1308d1af6d614410 --- /dev/null +++ b/sysom_server/sysom_vul/lib/paginations.py @@ -0,0 +1,26 @@ +''' +@File: paginations.py +@Time: 2021-12-14 13:46:02 +@Author: DM +@Desc: Local Paginations Class +''' + +from rest_framework.pagination import PageNumberPagination +from lib.response import success + + +class Pagination(PageNumberPagination): + page_query_param = "current" + page_size_query_param = "pageSize" + + def paginate_queryset(self, queryset, request, view=None): + self.max_page_size = queryset.count() + return super().paginate_queryset(queryset, request, view=view) + + def get_paginated_response(self, data): + return success(message="获取成功", result=data, total=self.page.paginator.count) + + def get_page_size(self, request): + if not request.query_params.get(self.page_size_query_param, None): + return self.max_page_size + return super().get_page_size(request) \ No newline at end of file diff --git a/sysom_server/sysom_vul/lib/response.py b/sysom_server/sysom_vul/lib/response.py new file mode 100644 index 0000000000000000000000000000000000000000..a2311a42c2d1fafd7106a1fcd7ed7ae81c9a5467 --- /dev/null +++ b/sysom_server/sysom_vul/lib/response.py @@ -0,0 +1,68 @@ +from rest_framework.response import Response +from rest_framework import status +from django.http import FileResponse + + +def _response(data=None, status=None): + return Response(data=data, status=status) + + +def success(result, message="success", success=True, code=status.HTTP_200_OK, **kwargs): + data = { + "code": code, + "message": message, + "data": result, + "success": success + } + data.update(kwargs) + return _response(data=data, status=code) + + +def not_found(code=status.HTTP_404_NOT_FOUND, success=False, message="Not Found"): + data = { + "code": code, + "message": message, + "success": success, + } + + return _response(data=data, status=code) + + +def not_permission(code=status.HTTP_403_FORBIDDEN, success=False, message="Not Permission"): + data = { + "code": code, + "success": success, + "message": message + } + return _response(data=data, status=code) + + +def other_response(result=dict(), message="", success=True, code=status.HTTP_200_OK, **kwargs): + data = { + "code": code, + "message": message, + "data": result, + "success": success + } + data.update(kwargs) + return _response(data=data, status=code) + + +class ErrorResponse(Response): + """ + 标准响应错误的返回,ErrorResponse(msg='xxx') + 默认错误码返回400, 也可以指定其他返回码:ErrorResponse(code=xxx) + """ + + def __init__(self, data=None, msg='error', code=400, status=None, template_name=None, headers=None, + exception=False, content_type=None): + std_data = { + "code": code, + "data": data or {}, + "message": msg + } + super().__init__(std_data, status, template_name, headers, exception, content_type) + + +class FileResponseAlter(FileResponse): + pass diff --git a/sysom_server/sysom_vul/lib/utils.py b/sysom_server/sysom_vul/lib/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c553c24590b0b7eb558d513bed073b7a8ad0f402 --- /dev/null +++ b/sysom_server/sysom_vul/lib/utils.py @@ -0,0 +1,198 @@ + +# -*- encoding: utf-8 -*- +""" +@File : utils.py +@Time : 2021/10/28 11:09 +@Author : DM +@Software: PyCharm +""" +import time +import uuid as UUID +from typing import List +import json +import logging +import jwt +import requests + +from importlib import import_module +from datetime import datetime, date as datetime_date +from decimal import Decimal + +from django.conf import settings +from apscheduler.schedulers.background import BackgroundScheduler +from paramiko.rsakey import RSAKey +from io import StringIO + + +logger = logging.getLogger(__name__) + +job_defaults = { + 'max_instances': 10, + 'misfire_grace_time': None, + 'coalesce': True, +} +scheduler = BackgroundScheduler(job_defaults=job_defaults) +scheduler.start() + + +CHAR_SET = ("a", "b", "c", "d", "e", "f", + "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", + "t", "u", "v", "w", "x", "y", "z", "0", "1", "2", "3", "4", "5", + "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "I", + "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", + "W", "X", "Y", "Z") + + +def human_datetime(date=None): + if date: + assert isinstance(date, datetime) + else: + date = datetime.now() + return date.strftime('%Y-%m-%d %H:%M:%S') + + +# 转换时间格式到字符串 +def datetime_str(date=None): + return datetime.strptime(date, "%Y-%m-%d %H:%M:%S") + + +# 日期json序列化 +class DateTimeEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, datetime): + return o.strftime('%Y-%m-%d %H:%M:%S') + elif isinstance(o, datetime_date): + return o.strftime('%Y-%m-%d') + elif isinstance(o, Decimal): + return float(o) + return json.JSONEncoder.default(self, o) + + +def get_request_real_ip(headers: dict): + x_real_ip = headers.get('x-forwarded-for') + if not x_real_ip: + x_real_ip = headers.get('x-real-ip', '') + return x_real_ip.split(',')[0] + + +def uuid_36(): + """ + 返回36字符的UUID字符串(十六进制,含有-) bc5debab-95c3-4430-933f-2e3b6407ac30 + :return: + """ + return str(UUID.uuid4()) + + +def uuid_32(): + """ + 返回32字符的UUID字符串(十六进制) bc5debab95c34430933f2e3b6407ac30 + :return: + """ + return uuid_36().replace('-', '') + + +def uuid_8(): + """ + 返回8字符的UUID字符串(非进制) 3FNWjtlD + :return: + """ + s = uuid_32() + result = '' + for i in range(0, 8): + sub = s[i * 4: i * 4 + 4] + x = int(sub, 16) + result += CHAR_SET[x % 0x3E] + return result + + +def url_format_dict(url_params: str): + """转化查询参数为dict""" + result = dict() + try: + for item in [{p.split('=')[0]: p.split('=')[1]} for p in url_params.split('&')]: + result.update(item) + except Exception as e: + logger.error(str(e)) + return result + + +def import_string(dotted_path: str): + """ + 优化import_module + Args: + dotted_path 动态导包路径 + Return Package + """ + try: + module_path, class_name = dotted_path.rsplit('.', 1) + except ValueError as err: + raise ImportError("%s doesn't look like a module path" % dotted_path) from err + module = import_module(dotted_path) + + try: + getattr(module, 'Channel') + return module + except AttributeError as err: + raise ImportError('Module "%s" does not define a "%s" attribute/class' % ( + module_path, class_name) + ) from err + +def valid_params(require_params: dict, current_params: dict) -> List[str]: + missing_param_list = [] + for param in require_params: + if param not in current_params: + missing_param_list.append(param) + return missing_param_list + +def generate_key(): + key_obj = StringIO() + key = RSAKey.generate(2048) + key.write_private_key(key_obj) + return key_obj.getvalue(), 'ssh-rsa ' + key.get_base64() + + +class HTTP: + @classmethod + def request(cls, method: str, url: str, token, data: dict, **kwargs): + status, result = 0, '' + headers = { + 'Authorization': token, + 'Content-Type': 'application/json' + } + method = method.upper() + methods = ['GET', 'POST', 'PATCH', 'PUT', 'DELETE'] + if method not in methods: + raise Exception('请求方式不存在!') + data = json.dumps(data) + for _ in range(3): + try: + response = requests.request(method=method, url=url, json=None, headers=headers, data=data, **kwargs) + if response.status_code != 200: + status = response.status_code + data = response.json() + result = data['message'] + break + else: + resp = response.json() + status, result = response.status_code, resp['data'] + break + except requests.exceptions.ConnectTimeout as e: + logger.info('Request Timeout, retry...') + status = 400 + result = '请求超时, 重试三次' + + return status, result + + +class JWT: + @staticmethod + def _encode(payload: dict, exp: int=60 * 5): + """ + 生成JWT Token + :args + payload 载体 + exp 过期时间 (单位秒) 默认时间5分钟 + """ + payload['exp'] = time.time() + exp + # 默认不可逆加密算法为HS256 + return jwt.encode(payload, settings.SECRET_KEY, algorithm="HS256") diff --git a/sysom_server/sysom_vul/manage.py b/sysom_server/sysom_vul/manage.py new file mode 100644 index 0000000000000000000000000000000000000000..e93d9d397825ae4a3efa5d58c0190318f821845b --- /dev/null +++ b/sysom_server/sysom_vul/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sysom_vul.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/sysom_server/sysom_vul/sysom_vul/__init__.py b/sysom_server/sysom_vul/sysom_vul/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9c0f75627493779a57ce48f0b76e35cc87b8984a --- /dev/null +++ b/sysom_server/sysom_vul/sysom_vul/__init__.py @@ -0,0 +1,3 @@ +import pymysql + +pymysql.install_as_MySQLdb() diff --git a/sysom_server/sysom_vul/sysom_vul/asgi.py b/sysom_server/sysom_vul/sysom_vul/asgi.py new file mode 100644 index 0000000000000000000000000000000000000000..e5077035bad942d54e92d6f28b31a851b7ae57ec --- /dev/null +++ b/sysom_server/sysom_vul/sysom_vul/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for sysom_vul project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sysom_vul.settings') + +application = get_asgi_application() diff --git a/sysom_server/sysom_vul/sysom_vul/settings.py b/sysom_server/sysom_vul/sysom_vul/settings.py new file mode 100644 index 0000000000000000000000000000000000000000..ef649510648c3520f2e074e6c73e6749802d3cec --- /dev/null +++ b/sysom_server/sysom_vul/sysom_vul/settings.py @@ -0,0 +1,15 @@ +import os + +env = os.environ.get("env", "product") + + +if env == "develop": + from conf.develop import * +elif env == "testing": + from conf.testing import * +elif env == "product": + from conf.product import * + +# 跨域允许 +if DEBUG: + CORS_ORIGIN_ALLOW_ALL = True diff --git a/sysom_server/sysom_vul/sysom_vul/urls.py b/sysom_server/sysom_vul/sysom_vul/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..611e9ff513442de6874772517f8056f2b14a9861 --- /dev/null +++ b/sysom_server/sysom_vul/sysom_vul/urls.py @@ -0,0 +1,11 @@ +from django.urls import path +from django.urls.conf import include +from django.conf import settings + +app_urlpatterns = [path('', include( + f'{app}.urls')) for app in settings.INSTALLED_APPS if app.startswith('apps')] + +urlpatterns = [ +] + +urlpatterns += app_urlpatterns diff --git a/sysom_server/sysom_vul/sysom_vul/wsgi.py b/sysom_server/sysom_vul/sysom_vul/wsgi.py new file mode 100644 index 0000000000000000000000000000000000000000..f7106242c464f4b66965f737c590a0d711e91e27 --- /dev/null +++ b/sysom_server/sysom_vul/sysom_vul/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for sysom_vul project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sysom_vul.settings') + +application = get_wsgi_application() diff --git a/sysom_web/config/routes.js b/sysom_web/config/routes.js index 1ee8dcc0619b288624e7635f334ee05883f9d085..a2a3aba35f84e0103af497f9a5a37dcd0a363f7c 100644 --- a/sysom_web/config/routes.js +++ b/sysom_web/config/routes.js @@ -124,11 +124,6 @@ export default [ { path: '/diagnose/cpu', redirect: '/diagnose/cpu/loadtask', - }, - { - path: '/diagnose/cpu/loadtask', - name: 'loadtask', - component: './diagnose/cpu/loadtask', } ] }, @@ -138,12 +133,7 @@ export default [ routes: [ { path: '/diagnose/storage', - redirect: '/diagnose/storage/io', - }, - { - path: '/diagnose/storage/io', - name: 'io', - component: './diagnose/Storage/Io', + redirect: '/diagnose/storage/iolatency', } ] }, @@ -231,7 +221,7 @@ export default [ { path: '/security/setting', component: './security/Setting', - }, + }, ], }, { diff --git a/sysom_web/public/resource/diagnose/v1/cpu/loadtask.json b/sysom_web/public/resource/diagnose/v1/cpu/loadtask.json new file mode 100644 index 0000000000000000000000000000000000000000..c9a98a99e9a8034514c6a10b2441b542d80948bc --- /dev/null +++ b/sysom_web/public/resource/diagnose/v1/cpu/loadtask.json @@ -0,0 +1,86 @@ +{ + "servicename": "loadtask", + "version":1.0, + "taskform": [ + { + "type": "text", + "name": "instance", + "initialValue": "", + "label": "实例IP", + "tooltips": "请输入你要诊断的IP, 我们将在会这台机器内部发起SysAK诊断" + } + ], + "variables": [], + "pannels": [ + { + "key": "事件总览", + "type": "stat", + "title": "事件总览", + "datasource": "dataresult", + "fieldConfig": { + "mappings": [ + { + "type": "value", + "options": { + "true": { + "color": "red", + "text": "异常" + }, + "false": { + "color": "green", + "text": "正常" + } + } + } + ], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "red", + "value": 20 + }, + { + "color": "green", + "value": 0 + } + ] + }, + "unit": "" + } + }, + { + "key": "pieRow", + "type": "row", + "title": "测试行", + "datasource": "", + "children": [ + { + "key": "alltasks", + "type": "piechart", + "title": "R/D状态进程数量", + "datasource": "datataskcount" + }, + { + "key": "dtasks", + "type": "piechart", + "title": "D状态负载影响度", + "datasource": "datauninterruptload" + }, + { + "key": "rtasks", + "type": "piechart", + "title": "R状态负载影响度", + "datasource": "datarunningload" + } + ] + }, + { + "key": "火焰图", + "type": "svg", + "title": "调度火焰图", + "datasource": "dataflamegraph" + } + ] + +} diff --git a/sysom_web/public/resource/diagnose/v1/custom/command.json b/sysom_web/public/resource/diagnose/v1/custom/command.json new file mode 100644 index 0000000000000000000000000000000000000000..686a081c527f57923f50bddc45ec556a28bf79f8 --- /dev/null +++ b/sysom_web/public/resource/diagnose/v1/custom/command.json @@ -0,0 +1,29 @@ +{ + "servicename": "command", + "version":1.0, + "taskform": [ + { + "type": "text", + "name": "instance", + "initialValue": "", + "label": "实例IP", + "tooltips": "请输入你要诊断的IP" + }, + { + "type": "text", + "name": "command", + "initialValue": "", + "label": "命令", + "tooltips": "请输入你需要执行的命令" + } + ], + "variables": [], + "pannels": [ + { + "key": "CommandResult", + "type": "stat", + "title": "Command Result:", + "datasource": "CommandResult" + } + ] +} diff --git a/sysom_web/public/resource/diagnose/v1/locales.json b/sysom_web/public/resource/diagnose/v1/locales.json index f2910cb60988113edd3f89ec82b24353d4bd9a49..c416ae11a5ef8a4c5e62bfb343b82d28a06c3980 100644 --- a/sysom_web/public/resource/diagnose/v1/locales.json +++ b/sysom_web/public/resource/diagnose/v1/locales.json @@ -4,12 +4,21 @@ "menu.diagnose.memory": "内存诊断中心", "menu.diagnose.storage": "存储诊断中心", "menu.diagnose.net": "网络诊断中心", - "menu.diagnose.cpu": "调度诊断中心" + "menu.diagnose.cpu": "调度诊断中心", + "menu.diagnose.custom": "自定义诊断中心" }, "dashboard": { "menu.diagnose.memory.memgraph": "内存大盘", "menu.diagnose.memory.filecache": "Cache分析", "menu.diagnose.memory.oomcheck": "OOM诊断", - "menu.diagnose.cpu.schedmoni": "调度抖动诊断" + "menu.diagnose.cpu.schedmoni": "调度抖动诊断", + "menu.diagnose.net.packetdrop": "丢包诊断", + "menu.diagnose.net.jitter": "抖动诊断", + "menu.diagnose.net.retran": "重传诊断" + "menu.diagnose.cpu.loadtask": "系统负载诊断", + "menu.diagnose.storage.iolatency": "IO时延分析", + "menu.diagnose.storage.iofsstat": "IO流量分析", + "menu.diagnose.storage.iohang": "IO HANG诊断", + "menu.diagnose.custom.command": "命令诊断" } } diff --git a/sysom_web/public/resource/diagnose/v1/net/retran.json b/sysom_web/public/resource/diagnose/v1/net/retran.json new file mode 100644 index 0000000000000000000000000000000000000000..e78c773c32fd5b562edd5abd91f0ee78212cad15 --- /dev/null +++ b/sysom_web/public/resource/diagnose/v1/net/retran.json @@ -0,0 +1,29 @@ +{ + "servicename": "retran", + "version": 1.0, + "taskform": [ + { + "type": "text", + "name": "instance", + "initialValue": "", + "label": "实例IP", + "tooltips": "请输入你要诊断的IP, 我们将在会这台机器内部发起SysAK诊断" + }, + { + "type": "text", + "name": "time", + "initialValue": "10", + "label": "运行时间", + "tooltips": "请输入你要诊断的时间,单位秒" + } + ], + "variables": [], + "pannels": [ + { + "key": "RetranStat", + "type": "table", + "title": "重传事件", + "datasource": "RetranStat" + } + ] +} \ No newline at end of file diff --git a/sysom_web/public/resource/diagnose/v1/storage/iofsstat.json b/sysom_web/public/resource/diagnose/v1/storage/iofsstat.json new file mode 100644 index 0000000000000000000000000000000000000000..8eb336a65673df971401c066bb9f1fe9e370e6f0 --- /dev/null +++ b/sysom_web/public/resource/diagnose/v1/storage/iofsstat.json @@ -0,0 +1,54 @@ +{ + "servicename": "iofsstat", + "version":1.0, + "taskform": [ + { + "type": "text", + "name": "instance", + "initialValue": "", + "label": "实例IP", + "tooltips": "请输入你要诊断的IP,我们将在会这台机器内部发起IO流量统计" + }, + { + "type": "text", + "name": "timeout", + "initialValue": "15", + "label": "诊断时长", + "tooltips": "诊断时长,也是IO流量统计周期,单位秒,建议不超过60秒" + }, + { + "type": "text", + "name": "disk", + "initialValue": "", + "label": "目标磁盘", + "tooltips": "请输入你要诊断的磁盘,如vda,sda等等,缺省为所有磁盘" + } + ], + "variables": [ + { + "key": "disks", + "label": "磁盘", + "datasource": "disks" + } + ], + "pannels": [ + { + "key": "diskIOstat", + "type": "table", + "title": "磁盘IO流量分析", + "datasource": "diskIOstat_${disks}" + }, + { + "key": "taskIOstat", + "type": "table", + "title": "进程IO流量分析", + "datasource": "taskIOstat_${disks}" + }, + { + "key": "taskIOblocksize", + "type": "table", + "title": "进程IO块大小分布", + "datasource": "taskIOblocksize_${disks}" + } + ] +} diff --git a/sysom_web/public/resource/diagnose/v1/storage/iohang.json b/sysom_web/public/resource/diagnose/v1/storage/iohang.json new file mode 100644 index 0000000000000000000000000000000000000000..7f31f41809816d0d9c866515df81967e2ef8f1d5 --- /dev/null +++ b/sysom_web/public/resource/diagnose/v1/storage/iohang.json @@ -0,0 +1,55 @@ +{ + "servicename": "iohang", + "version":1.0, + "taskform": [ + { + "type": "text", + "name": "instance", + "initialValue": "", + "label": "实例IP", + "tooltips": "请输入你要诊断的IP,我们将在会这台机器内部发起IO HANG诊断" + }, + { + "type": "text", + "name": "timeout", + "initialValue": "10", + "label": "诊断时长", + "tooltips": "请输入你要诊断的时长,单位秒" + }, + { + "type": "text", + "name": "threshold", + "initialValue": "5000", + "label": "时间阈值", + "tooltips": "保留IO HANG住时间超过阈值的IO,单位毫秒" + }, + { + "type": "text", + "name": "disk", + "initialValue": "", + "label": "目标磁盘", + "tooltips": "请输入你要诊断的磁盘,如vda,sda等等,缺省为所有磁盘" + } + ], + "variables": [ + { + "key": "disks", + "label": "磁盘", + "datasource": "disks" + } + ], + "pannels": [ + { + "key": "iohangOverview", + "type": "table", + "title": "IO HANG总览", + "datasource": "iohangOverview_${disks}" + }, + { + "key": "singleIO", + "type": "table", + "title": "TOP10个IO详情", + "datasource": "singleIO_${disks}" + } + ] +} diff --git a/sysom_web/public/resource/diagnose/v1/storage/iolatency.json b/sysom_web/public/resource/diagnose/v1/storage/iolatency.json new file mode 100644 index 0000000000000000000000000000000000000000..56e06655293b5b6b8da0c70f20537fc481d3d2e0 --- /dev/null +++ b/sysom_web/public/resource/diagnose/v1/storage/iolatency.json @@ -0,0 +1,108 @@ +{ + "servicename": "iolatency", + "version":1.0, + "taskform": [ + { + "type": "text", + "name": "instance", + "initialValue": "", + "label": "实例IP", + "tooltips": "请输入你要诊断的IP,我们将在会这台机器内部发起IO时延诊断" + }, + { + "type": "text", + "name": "timeout", + "initialValue": "10", + "label": "诊断时长", + "tooltips": "请输入你要诊断的时长,单位秒" + }, + { + "type": "text", + "name": "threshold", + "initialValue": "5000", + "label": "时间阈值", + "tooltips": "保留IO延迟大于设定时间阈值的IO(时间单位:ms)" + }, + { + "type": "text", + "name": "disk", + "initialValue": "", + "label": "目标磁盘", + "tooltips": "请输入你要诊断的磁盘,如vda,sda等等,缺省为所有磁盘" + } + ], + "variables": [ + { + "key": "disks", + "label": "磁盘", + "datasource": "disks" + } + ], + "pannels": [ + { + "key": "iolatencyOverview", + "type": "flow", + "title": "整体IO时延分布", + "flowconfigs": { + "nodes": [ + { + "id": "os(block)", + "x": 40, + "y": 40 + }, + { + "id": "os(driver)", + "x": 300, + "y": 40 + }, + { + "id": "disk", + "x": 560, + "y": 40 + }, + { + "id": "os(complete)", + "x": 820, + "y": 40 + } + ], + "edges": [ + { + "source": "os(block)", + "target": { + "cell": "os(driver)", + "port": "left" + } + }, + { + "source": "os(driver)", + "target": { + "cell": "disk", + "port": "left" + } + }, + { + "source": "disk", + "target": { + "cell": "os(complete)", + "port": "left" + } + } + ] + }, + "datasource": "iolatencyOverview_${disks}" + }, + { + "key": "singleIOMetrics", + "type": "timeseries", + "title": "单IO时延metrics展示", + "datasource": "singleIOMetrics_${disks}" + }, + { + "key": "singleIO", + "type": "table", + "title": "TOP10 IO详情", + "datasource": "singleIO_${disks}" + } + ] +} diff --git a/sysom_web/src/pages/diagnose/components/Dashboard.jsx b/sysom_web/src/pages/diagnose/components/Dashboard.jsx index a5df962e8c64a7f405b94a24866bd2f97a1abc2f..c2306d2b0311a13656c6ea4227a6004c1ec18548 100644 --- a/sysom_web/src/pages/diagnose/components/Dashboard.jsx +++ b/sysom_web/src/pages/diagnose/components/Dashboard.jsx @@ -114,7 +114,7 @@ const Dashboard = (props) => { let initialValiable = [] variableDesc.forEach(varDesc => { - if (datas[varDesc.datasource].data) { + if (datas[varDesc.datasource].data && datas[varDesc.datasource].data.length > 0) { initialValiable.push({ name: varDesc.key, value: datas[varDesc.datasource].data[0].value diff --git a/sysom_web/src/pages/diagnose/components/FlowPannel.jsx b/sysom_web/src/pages/diagnose/components/FlowPannel.jsx index 24d2b16adfcc41fb2321324612080e1fd75a2365..86028b321d7a865e8a0df34b6e0f8e1a784d5f09 100644 --- a/sysom_web/src/pages/diagnose/components/FlowPannel.jsx +++ b/sysom_web/src/pages/diagnose/components/FlowPannel.jsx @@ -15,11 +15,12 @@ const FlowPannelPannel = (props) => { let graph = null useEffect(() => { - if (!graph && data) { + if (!graph && data && configs.flowconfigs) { graph = new Graph({ container: ref.current, - width: 800, + // width: 800, height: 400, + autoResize: true, connecting: { router: { name: 'manhattan', @@ -34,7 +35,7 @@ const FlowPannelPannel = (props) => { }, }, anchor: 'center', - //connectionPoint: 'anchor', + // connectionPoint: 'anchor', allowBlank: false, snap: { radius: 20, @@ -200,14 +201,14 @@ const FlowPannelPannel = (props) => { if (configs?.links?.defalut?.pannel) showModalPannel(configs.links.defalut.pannel, node.data) }) - }, [configs.flowconfigs]) + if (configs.flowconfigs) { + configs.flowconfigs.nodes = configs.flowconfigs.nodes.map(node => { + const nodeData = data?.find((i) => i.key == node.id) + return { ...node, ...nodeData, data: nodeData } + }) + data && graph.fromJSON(configs.flowconfigs) + } - useEffect(() => { - configs.flowconfigs.nodes = configs.flowconfigs.nodes.map(node => { - const nodeData = data?.find((i) => i.key == node.id) - return { ...node, ...nodeData, data: nodeData } - }) - data && graph.fromJSON(configs.flowconfigs) }, [configs.flowconfigs, data]) return ( @@ -219,6 +220,12 @@ const FlowPannelPannel = (props) => {
Datasource {configs?.datasource} no data
} /> } + { + !configs.flowconfigs && flowconfigs missing + } /> + } ) } diff --git a/sysom_web/src/pages/diagnose/components/StatisticPannel.jsx b/sysom_web/src/pages/diagnose/components/StatisticPannel.jsx index c02498f41f13fa2204859209bb44909303d1271e..d4ff9344f297c96137128accf02deb001178eb6c 100644 --- a/sysom_web/src/pages/diagnose/components/StatisticPannel.jsx +++ b/sysom_web/src/pages/diagnose/components/StatisticPannel.jsx @@ -16,10 +16,15 @@ const StatisticPannel = (props) => { Object.keys(datas).map((key) => { let data = datas[key] let [value, color] = fieldModifier(props.configs?.fieldConfig, data.value, data, datas) - return ( - + ) }) diff --git a/sysom_web/src/pages/diagnose/components/SvgPannel.jsx b/sysom_web/src/pages/diagnose/components/SvgPannel.jsx index 9c6f4f10ea8816d97ecd27b09a6affccfd22771c..5ae51fa66c7622cf889c46dc844ad73644e3f126 100644 --- a/sysom_web/src/pages/diagnose/components/SvgPannel.jsx +++ b/sysom_web/src/pages/diagnose/components/SvgPannel.jsx @@ -1,29 +1,38 @@ import ProCard from '@ant-design/pro-card'; -import React from 'react'; -import ReactDOM from 'react-dom' - -class SmartIFrame extends React.Component { - render() { - return