diff --git a/script/server/6_sysom_hotfix/clear.sh b/script/server/6_sysom_hotfix/clear.sh new file mode 100644 index 0000000000000000000000000000000000000000..06921d22c52c9e06d17866cb621923893e7353fd --- /dev/null +++ b/script/server/6_sysom_hotfix/clear.sh @@ -0,0 +1,15 @@ +#!/bin/bash +SERVICE_NAME=sysom-vmcore +clear_app() { + sed -i '/vmcore/d' /var/spool/cron/root + sed -i '/vmcore/d' /etc/exports + exportfs -rv + systemctl stop nfs-server + systemctl stop rpcbind + systemctl stop nfs + rm -rf /etc/supervisord.d/${SERVICE_NAME}.ini + ###use supervisorctl update to stop and clear services### + supervisorctl update +} + +clear_app diff --git a/script/server/6_sysom_hotfix/init.sh b/script/server/6_sysom_hotfix/init.sh new file mode 100644 index 0000000000000000000000000000000000000000..542559f826b7d40106e3fa415d73f926f962a97a --- /dev/null +++ b/script/server/6_sysom_hotfix/init.sh @@ -0,0 +1,34 @@ +#! /bin/bash +SERVER_DIR="sysom_server" +HOTFIX_DIR=${SERVER_DIR}/sysom_hotfix +VIRTUALENV_HOME=${SERVER_HOME}/virtualenv +TARGET_PATH=${SERVER_HOME}/target +SERVICE_NAME=sysom-hotfix + +source_virtualenv() { + echo "INFO: activate virtualenv..." + source ${VIRTUALENV_HOME}/bin/activate || exit 1 +} + +start_nfs() +{ + systemctl start rpcbind && systemctl enable rpcbind + systemctl start nfs && systemctl enable nfs + if [ $? -ne 0 ];then + systemctl start nfs-server && systemctl enable nfs-server + fi + + nfs_mask=`ip -4 route | grep "link src" | grep $SERVER_LOCAL_IP | awk '{print $1}' | head -n 1` + file_path=${SERVER_HOME}/hotfix/hotfix-nfs + mkdir -p ${file_path} + echo "${file_path} ${nfs_mask}(rw,async)" >> /etc/exports + exportfs -rv + chmod -R 777 ${file_path} +} + +deploy() { + source_virtualenv + start_nfs +} + +deploy diff --git a/script/server/6_sysom_hotfix/start.sh b/script/server/6_sysom_hotfix/start.sh new file mode 100644 index 0000000000000000000000000000000000000000..c03d31961136846593b1ebd1ca5448e914e9a56a --- /dev/null +++ b/script/server/6_sysom_hotfix/start.sh @@ -0,0 +1,13 @@ +#!/bin/bash +SERVICE_NAME=sysom-hotfix +start_app() { + systemctl start nfs-server + systemctl start rpcbind + systemctl start nfs + sed -i '/hotfix/s;^#;;g' /var/spool/cron/root + sed -i '/hotfix/s;^#;;g' /etc/exports + exportfs -rv + supervisorctl start $SERVICE_NAME +} + +start_app diff --git a/script/server/6_sysom_hotfix/stop.sh b/script/server/6_sysom_hotfix/stop.sh new file mode 100644 index 0000000000000000000000000000000000000000..86021f8244072e43b071c70bbaa309e64779090e --- /dev/null +++ b/script/server/6_sysom_hotfix/stop.sh @@ -0,0 +1,13 @@ +#!/bin/bash +SERVICE_NAME=sysom-hotfix +stop_app() { + sed -i '/hotix/s;^;#;g' /var/spool/cron/root + sed -i '/hotfix/s;^;#;g' /etc/exports + exportfs -rv + systemctl stop nfs-server + systemctl stop rpcbind + systemctl stop nfs + supervisorctl stop $SERVICE_NAME +} + +stop_app diff --git a/script/server/conf b/script/server/conf index 0cdf879af30368888b9046cf92bcb4beda995e25..48488b5ad15057bf4597107e96aca34e4c8bf2f2 100644 --- a/script/server/conf +++ b/script/server/conf @@ -18,3 +18,6 @@ [migration] 5_sysom_migration + +[hotfix] +6_sysom_hotfix \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/accounts/views.py b/sysom_server/sysom_api/apps/accounts/views.py index f9861ccbc4d11bf508708450d02cd62456f7e236..1821c825092a903813d5538f8a01dc606850c1f8 100644 --- a/sysom_server/sysom_api/apps/accounts/views.py +++ b/sysom_server/sysom_api/apps/accounts/views.py @@ -127,6 +127,8 @@ class AuthAPIView(CreateAPIView): authentication_classes = [] def post(self, request): + print("inside...") + print(request.data) ser = UserAuthSerializer(data=request.data) ser.is_valid(raise_exception=True) u, t = ser.create_token() diff --git a/sysom_server/sysom_api/apps/hotfix/__init__.py b/sysom_server/sysom_api/apps/hotfix/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_api/apps/hotfix/admin.py b/sysom_server/sysom_api/apps/hotfix/admin.py new file mode 100644 index 0000000000000000000000000000000000000000..a23ff81f8e829bc519c959f99cb688d498c9c569 --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/admin.py @@ -0,0 +1 @@ +from django.contrib import admin \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/apps.py b/sysom_server/sysom_api/apps/hotfix/apps.py new file mode 100644 index 0000000000000000000000000000000000000000..93fb52d2596ef2bd04b6fe20fabb43cf93305b6c --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/apps.py @@ -0,0 +1,44 @@ +import logging +import sys +from django.apps import AppConfig +from channel_job import default_channel_job_executor +from django.db.models.signals import post_migrate +from django.conf import settings +from cec_base.admin import dispatch_admin + +logger = logging.getLogger(__name__) + + +class HotfixConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'apps.hotfix' + + def ready(self): + try: + hotfix_cec_topic_name = "hotfix_job" + post_migrate.connect(initialization_subscribe, sender=self) + admin = dispatch_admin(settings.HOTFIX_CEC_URL) + if admin.create_topic(hotfix_cec_topic_name): + logger.info(">>>>>>>> INIT_HOTFIX_VIEW : create hotfix_job cec topic success") + except Exception as e: + logger.info(str(e)) + logger.info(">>>>>>>> INIT_HOTFIX_VIEW : create hotfix_job cec topic failed") + logger.info(">>> hotfix module loading success") + + +def initialization_subscribe(sender, **kwargs): + load_subscribe_models_data() + + +def load_subscribe_models_data(): + try: + from .models import SubscribeModel + + if not SubscribeModel.objects.filter().first(): + sub = SubscribeModel.objects.create( + title="admin", + ) + sub.users.add(*[1,]) + sub.save() + except Exception as e: + pass \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/builder/build_hotfix.sh b/sysom_server/sysom_api/apps/hotfix/builder/build_hotfix.sh new file mode 100644 index 0000000000000000000000000000000000000000..91ea7fe4f1d49ea88766fd1476766e359e2978ff --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/builder/build_hotfix.sh @@ -0,0 +1,347 @@ +SRCPREFIX="/tmp/src" +LOCAL_COMPRESS_PREFIX="/disk/hotfix/compress" +tmpdir="/hotfix_tmpdir" +CHANGEINFOFILE=${tmpdir}/changeinfo +NFS_RPM_DIR="/nfs/hotfix/rpm" + +die() { + if [[ -z $1 ]]; then + msg="kpatch build failed" + else + msg="$1" + fi + + if [[ -e $LOGFILE ]]; then + warn "$msg. Check $LOGFILE for more details." + else + warn "$msg." + fi + + exit 1 +} + +warn() { + echo "ERROR: $1" >&2 +} + +usage() { + echo "usage" +} + +function parse_args(){ + ARGS=`getopt -l patch:,kernel:,base:,description:,vmlinux,help,name,log -o hvp:k:d:n:b:g: -- "$@" 2>/dev/null` || { usage; die "FAILED";} + eval set -- "${ARGS}" + while [ -n "$1" ] + do + case "$1" in + -p|--patch) + patch=$(readlink -f "$2") + shift + ;; + -k|--kernel) + kernel="$2" + shift + ;; + -d|--description) + description="$2" + shift + ;; + -b|--base) + BASE_ROOT="$2" + shift + ;; + -v|--vmlinux) + target="vmlinux" + shift + ;; + -n|--name) + hotfix_name="$2" + shift + ;; + -g|--log) + LOGFILE="$2" + shift + ;; + -h|--help) + usage + ;; + --) + echo bb + shift + break + ;; + esac + shift + done + + if [ -z ${patch} ]; then + usage; + die "FAILED"; + fi + + if [ -z ${kernel} ]; then + usage; + die "FAILED"; + fi + + if [ -z "${description}" ]; then + usage; + die "FAILED"; + fi + + if [ -z "${hotfix_name}" ]; then + kpatch_id="`date "+%Y-%m-%d-%H-%M-%S"`" + fi + + BASE=${BASE_ROOT}/kpatch_space + KSRCS=${BASE_ROOT}/kernel_repos + # $kernel is like "4.19.48-006.ali4000.alios7.x86_64" + kernel_version="${kernel}" + strtmp=${kernel_version} + + # $arch is like "x86_64" or "aarch64" + arch="${strtmp##*.}" + if [ "$arch" != "x86_64" -a "$arch" != "aarch64" ]; then + echo "please input complete kernel version including arch (x86_64 or aarch64)" + die "FAILED" + fi + + # 4.19.91-26.an7.x86_64 => 4.19.91-26.an7 + strtmp="${strtmp%.*}" + tag="${strtmp%.*}" + # $dist is like 'an8' or 'an7' + dist="${strtmp##*.}" + release="${strtmp#*-}" + + kpatch_id=`date "+%Y%m%d%H%M%S"` + if [[ -n "$hotfix_name" ]]; then + echo "find patch_name : ${hotfix_name}" + kpatch_id=${hotfix_name}-${tag}-${kpatch_id} + else + kpatch_id=${patch_name}-${tag}-${kpatch_id} + fi + echo ${kpatch_id} +} + +function download_config() { + local get=0 + for ver in ${os_version} + do + for i in ${os_type} + do + uri="https://mirrors.openanolis.cn/anolis/${ver}/${i}/${arch}/os/Packages/kernel-devel-${kernel_version}.rpm" + wget -q -O /tmp/kernel-devel-${kernel_version}.rpm "${uri}" + if [ $? -eq 0 ];then + echo "break eq 0" + get=1 + break + fi + continue + done + if [[ $get == 1 ]];then + break + fi + done + + rpm2cpio /tmp/kernel-devel-${kernel_version}.rpm | cpio -div ./usr/src/kernels/${kernel_version}/.config + if [[ ! -d ${SRCPREFIX}/${kernel_version} ]];then + mkdir -p ${SRCPREFIX}/${kernel_version} + fi + cp ./usr/src/kernels/${kernel_version}/.config ${SRCPREFIX}/${kernel_version}/config-${kernel_version} + rm -fr /tmp/kernel-devel-${kernel_version}.rpm + #rm -fr ./usr +} + +function download_vmlinux() { + local get=0 + for ver in ${os_version} + do + for i in ${os_type} + do + uri="https://mirrors.openanolis.cn/anolis/${ver}/${i}/${arch}/debug/Packages/kernel-debuginfo-${kernel_version}.rpm" + wget -q -O /tmp/kernel-debuginfo-${kernel_version}.rpm "${uri}" + if [ $? -eq 0 ];then + echo "break eq 0" + get=1 + break + fi + continue + done + if [[ $get == 1 ]];then + break + fi + done + + rpm2cpio /tmp/kernel-debuginfo-${kernel_version}.rpm | cpio -dim + if [[ ! -d ${LOCAL_COMPRESS_PREFIX}/${kernel_version} ]];then + mkdir -p ${LOCAL_COMPRESS_PREFIX}/${kernel_version} + fi + # copy the vmlinux to the compress path + cp ./usr/src/kernels/${kernel_version}/.config ${LOCAL_COMPRESS_PREFIX}/${kernel_version}/config-${kernel_version} + rm -fr /tmp/kernel-devel-${kernel_version}.rpm + rm -fr ./usr +} + +# kpatch-build under kpatch_space +function prepare_kpatch(){ + kpatch_prefix="kpatch-build" + kpatch_dir="${BASE}/${kpatch_prefix}" + kpatch_build_path="${kpatch_dir}/kpatch-build/kpatch-build" + + cd ${BASE}/${kpatch_prefix} + + # make + if [ $arch == "x86_64" ]; then + make -C ${kpatch_prefix} BUILDMOD=no && make -C ${kpatch_prefix} BUILDMOD=no install + else + export NO_PROFILING_CALLS=1 && make -C ${kpatch_prefix} BUILDMOD=no && make -C ${kpatch_prefix} install + fi + + make install + cd - +} + +function checkout_branch() { + cd ${KSRCS}/cloud-kernel + #branch_tag="$(`git tag | grep -w $tag`)" + #[[ $branch_tag == $tag ]] || die "checkout source code tag/branch failed"; + git checkout $tag + if [[ ! -d ${SRCPREFIX}/${kernel_version}/${kernel_version} ]];then + mkdir -p ${SRCPREFIX}/${kernel_version}/${kernel_version} + fi + # copy ${KSRCS}/cloud-kernel to temp dir + cp -a * ${SRCPREFIX}/${kernel_version}/${kernel_version} + find ${SRCPREFIX}/${kernel_version}/${kernel_version} -name ".git" | xargs rm -rf + + ln -s ${SRCPREFIX}/${kernel_version} ${tmpdir} || die "create symbol link from src code to ${tmpdir} failed"; +} + +function prepare_environment(){ + echo "Prepare environment ..." + set -x + + # build and install kpatch + prepare_kpatch + source /etc/os-release + + if [[ ! -d ${tmpdir} ]]; then + mkdir -p ${tmpdir} + else + echo "Remove all file under ${tmpdir}" + rm -rf ${tmpdir}/* + fi + + if [[ ! -d ${LOCAL_COMPRESS_PREFIX} ]]; then + mkdir -p ${LOCAL_COMPRESS_PREFIX} + fi + + # check if we have this compress package of this version + mkdir -p ${SRCPREFIX} + if [[ -f ${LOCAL_COMPRESS_PREFIX}/${kernel_version}.tar.gz ]];then + tar zxf "${LOCAL_COMPRESS_PREFIX}/${rkernel_version}.tar.gz" -C ${SRCPREFIX} || die "untar ${kernel_version}.tar.gz failed..."; + else + download_config + fi + + # checkout the source branch + checkout_branch + + cd ${tmpdir} + + cp ${patch} "${kpatch_id}".patch || die "copy ${PATCH_FILE} to ${kpatch_id}.patch failed"; + cp ${patch} patch || die "copy ${PATCH_FILE} to patch failed"; + echo "${description}" > description || die "output description failed"; + + set +x + # Make the version of hotfix ko equal to that of vmlinux. + # Or it will fail when running "kpatch load .ko". + export LOCALVERSION="${localversion}" +} + +function do_kpatch_build(){ + echo "Start kpatch build ..." + set -x + + if [ -f "$CHANGEINFOFILE" ]; then + rm -rf "$CHANGEINFOFILE" + fi + + touch $CHANGEINFOFILE + export CHANGEINFOFILE + + echo "Using the Logfile is : $LOGFILE" + export LOGFILE + + cmd="${kpatch_build_path} --skip-compiler-check -n "${kpatch_id}" -s "${tmpdir}"/"${kernel_version}"/"${kernel_version}" -c "${tmpdir}"/"${kernel_version}"/config-"${kernel_version}" -o "${tmpdir}" "${tmpdir}"/"${kpatch_id}".patch" 2>&1 >> ${LOGFILE} + echo $cmd + if [[ -z "$USERMODBUILDDIR" ]]; then + if [ -z ${target} ]; then + $cmd + else + $cmd -t vmlinux + fi + else + # build oot module hotfix + ${kpatch_build_path} -n "${kpatch_id}" -s "${KERNEL_BUILD_PATH}" -m ${USERMODBUILDDIR} -c "$KERNEL_BUILD_PATH/.config" -v "${tmpdir}"/vmlinux --skip-gcc-check -o "${tmpdir}" "${tmpdir}"/"${kpatch_id}".patch + fi + + if [ $? -ne 0 ]; then + set +x + echo "Build kpatch failed, please check the log" + die "FAILED"; + fi + + cp $CHANGEINFOFILE ${tmpdir} + set +x +} + +function do_rpmbuild(){ + echo "Start build rpm ..." + mkdir rpmbuild + chmod +x ${BASE}/../build_rpm.sh + + # because kpatch-build will replace all the . into - in $kpatch-id + kofile=${kpatch_id//./-} + ${BASE}/../build_rpm.sh -m "${tmpdir}"/"${kofile}".ko -d ${dist} -e "${description}" -r "${tmpdir}"/rpmbuild -k "${kernel_version}" -c "${CHANGEINFOFILE}" -l "${release}" 2>&1 >> ${LOGFILE} + + if [[ $? -ne 0 ]]; then + die "FAILED" + fi + + cp "${tmpdir}"/rpmbuild/RPMS/${arch}/*.rpm ${pwddir}/ || die "FAILED"; + echo "The rpm is : `ls ${pwddir}/kernel-hotfix-"${kpatch_id}"*.rpm`"; + cp -a "${tmpdir}"/rpmbuild/RPMS/${arch}/* ${NFS_RPM_DIR}/ + + cd $pwddir + [ -d "./${kernel_version}" ] && rm -rf "./${kernel_version}" + rm -f "./kpatch-${kpatch_id}" +} + + +# kaptch-build in ${hotfix_base}/kpatch_space +# kernel_src in ${hotfix_base}/kernel_repo +echo "Running build_hotfix.sh..." + +os_type=" + Plus + Experimental + " +os_version=" + 7.9 + 8.6 + 8.5 + 8.4 + 8.2 + 8 + 7.7 + 23 + " + +parse_args "$@"; +prepare_environment; +do_kpatch_build; +do_rpmbuild; + +echo "Running build_hotfix.sh finished..." +echo "Success" +exit 0 \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/builder/build_rpm.sh b/sysom_server/sysom_api/apps/hotfix/builder/build_rpm.sh new file mode 100644 index 0000000000000000000000000000000000000000..f5704b44e1e62907188522af0f9e5560ad42e4ce --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/builder/build_rpm.sh @@ -0,0 +1,197 @@ +#!/bin/bash +function usage(){ + echo "Usage:"; + echo "./build_rpm.sh [OPTIONS]" + echo "" + echo "Options:" + echo " --module" + echo " The ko path" + echo " --distro" + echo " distro" + echo " --rpmbuild" + echo " rpmbuild path" + echo " --kernel" + echo " kernel version" + echo " --release" + echo " kernel release" + echo " --description" + echo " The description of this hotfix" + echo " --help" + echo " For help" +} + +function parse_args(){ + ARGS=`getopt -l module:,distro:,rpmbuild:,kernel:,release:,description:,changeinfo:,help -o hm:d:r:k:c:e:l: -- "$@" 2>/dev/null` || { usage; exit 1; } + eval set -- "${ARGS}" + while [ -n "$1" ] + do + case "$1" in + -m|--module) + module="$2" + shift + ;; + -d|--distro) + distro="$2" + shift + ;; + -r|--rpmbuild) + rpmbuild="$2" + shift + ;; + -k|--kernel) + kernel="$2" + shift + ;; + -c|--changeinfo) + changeinfo="$2" + shift + ;; + -e|--description) + description="$2" + shift + ;; + -l|--release) + release="$2" + shift + ;; + -h|--help) + usage + ;; + --) + shift + break + ;; + esac + shift + done + + if [[ -z ${description} ]] ; then + description="This hotfix have no description" + fi + + if [[ -z ${distro} ]] ; then + distro="Anolis" + fi + + if [[ -z ${rpmbuild} ]] ; then + rpmbuild="`pwd`" + fi + + if [[ -z ${module} || -z ${kernel} || -z ${description} || -z ${changeinfo} || -z ${release} ]] ; then + echo ${module} + echo ${kernel} + echo ${description} + echo ${changeinfo} + echo ${release} + usage; + exit 1 + fi +} + +function prepare_spec(){ + # $arch is like "x86_64" or "aarch64" + arch="${kernel##*.}" + kernel_wo_arch=${kernel%.*} + + # $hotfix_file is like 'kpatch-test-stat.ko' + hotfix_file=`basename ${module}` + + # $hotfix_base is like 'test-stat' + hotfix_base=`echo ${hotfix_file#*-} |cut -d . -f 1` + + # $hotfix_ko_name is like 'kpatch-test-stat' + hotfix_ko_name=`echo ${hotfix_file%.ko}` + + # $module_name is like 'kpatch_test_stat' + module_name="${hotfix_ko_name//-/_}" + + hotfix_dir_path="%{_prefix}/%{_kernel_version}/${hotfix_base}" + hotfix_ko_path="${hotfix_dir_path}/${hotfix_file}" + install_path="\$RPM_BUILD_ROOT%{_prefix}/%{_kernel_version}/${hotfix_base}/" + + hotfix_apply="/usr/sbin/kpatch load %{_prefix}/\$(uname -r)/${hotfix_base}/${hotfix_file}" + hotfix_apply_prefix="/usr/sbin/kpatch load" + hotfix_undo="/usr/sbin/kpatch unload %{_prefix}/\$(uname -r)/${hotfix_base}/${hotfix_file}" + hotfix_spec=" +%define srcname ${hotfix_file} +%define _prefix /var/khotfix +%define _kernel_version ${kernel} +%define _kernel_release ${release} +%define _kernel_version_wo_arch ${kernel_wo_arch} +%define _ks_prefix /usr/local + +Summary: Hotfix for Anolis Kernel +Name: kernel-hotfix-${hotfix_base}-%{_kernel_release} +version: 1.0 +Release: ${RELEASE:-1}.${distro} +License: GPL +Packager: Yongde Zhang +Group: applications +BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root +BuildArch: ${arch} +Source0: "${module}" +Source1: patch +Source2: description + +Requires: kpatch >= 0.8.0-1.5 + + +%description +hotfix rpm build : ${description} + +%install +mkdir -p ${install_path} +cp \$RPM_SOURCE_DIR/${hotfix_file} ${install_path} +cp \$RPM_SOURCE_DIR/patch ${install_path} +cp \$RPM_SOURCE_DIR/description ${install_path} +cp \$RPM_SOURCE_DIR/changeinfo ${install_path} +" + hotfix_spec=${hotfix_spec}" +%files +${hotfix_dir_path}/ + +%clean +%{__rm} -rf \$RPM_BUILD_ROOT + +%pre +if [ \"\$(uname -r)\" != \"%{_kernel_version}\" ]; then + echo \"kernel version does not match\" + exit -1 +fi + +%posttrans +systemctl enable kpatch || exit 1 +" + + hotfix_spec=${hotfix_spec}" +kpatch install -k ${kernel} ${hotfix_ko_path} || exit -1 +if [ \"\$(uname -r)\" == \"%{_kernel_version}\" ]; then + ${hotfix_apply} || exit -1 +fi + +%preun +if [ \"\$(uname -r)\" == \"%{_kernel_version}\" ]; then + ${hotfix_undo} + +fi +kpatch uninstall -k ${kernel} ${hotfix_file} +" +} + +function prepare_environment(){ + echo "Preparing rpm package enviroment..." + mkdir -p "${rpmbuild}"/{BUILD,RPMS,SOURCES,SPECS,SRPMS} + cp ${module} "${rpmbuild}"/SOURCES/ + cp patch "${rpmbuild}"/SOURCES/ + cp description "${rpmbuild}"/SOURCES/ + cp changeinfo "${rpmbuild}"/SOURCES/ + echo "${hotfix_spec}" > "${rpmbuild}"/SPECS/"${hotfix_ko_name}".spec +} + +function do_rpmbuild(){ + rpmbuild -bb "${rpmbuild}"/SPECS/"${hotfix_ko_name}".spec --define "%_topdir $rpmbuild" +} +parse_args "$@"; +prepare_spec; +prepare_environment; +do_rpmbuild; \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/builder/builder.py b/sysom_server/sysom_api/apps/hotfix/builder/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..21b7d819cd6b143a82a56e8fa52ecd551cf79085 --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/builder/builder.py @@ -0,0 +1,244 @@ +import os +import threading +import requests +import json +import platform +import shutil +import re +import subprocess +from cec_base.consumer import Consumer, dispatch_consumer + +class ServerConnector(): + + def __init__(self, server_ip, username, password): + self.server_ip = server_ip + self.username = username + self.password = password + self.token = None + + def get_token(self): + url = self.server_ip + "/api/v1/auth/" + headers = {"Content-Type":"application/json"} + cond = {"username":self.username, "password":self.password} + print(json.dumps(cond)) + resp = requests.post(url, data=json.dumps(cond), headers = headers) + data = resp.json().get('data') ## it is a dict + token = data.get("token") + self.token = token + return token + + def insert_log_to_server(self, hotfix_id, logs): + url = self.server_ip + "/api/v1/hotfix/insert_building_log/" + headers = {'Content-Type': "application/json", 'Authorization': self.token} + cond = {"id":hotfix_id, "log" : logs} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code == 403: + # signature has expired, reflash the token + self.get_token() + headers = {'content-type': "application/json", 'Authorization': self.token} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code != 200: + print("can not connect to the server correctly..") + + def change_building_status(self, hotfix_id, status): + url = self.server_ip + "/api/v1/hotfix/update_building_status/" + headers = {'Content-Type': "application/json", 'Authorization': self.token} + cond = {"id":hotfix_id, "status" : status} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code == 403: + # signature has expired, reflash the token + self.get_token() + headers = {'Content-Type': "application/json", 'Authorization': self.token} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code != 200: + print("can not connect to the server correctly..") + + def sync_building_log(self, hotfix_id): + url = self.server_ip + "/api/v1/hotfix/sync_building_log/" + headers = {'Content-Type': "application/json", 'Authorization': self.token} + cond = {"id":hotfix_id} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code == 403: + # signature has expired, reflash the token + self.get_token() + headers = {'Content-Type': "application/json", 'Authorization': self.token} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code != 200: + print("can not connect to the server correctly..") + return resp + + def sync_rpm_name(self, hotfix_id, rpm_name): + url = self.server_ip + "/api/v1/hotfix/update_hotfix_name/" + headers = {'Content-Type': "application/json", 'Authorization': self.token} + cond = {"id":hotfix_id, "rpm":rpm_name} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code == 403: + # signature has expired, reflash the token + self.get_token() + headers = {'Content-Type': "application/json", 'Authorization': self.token} + resp = requests.post(url, data = json.dumps(cond), headers = headers) + if resp.status_code != 200: + print("can not connect to the server correctly..") + return resp + + + +class HotfixBuilder(): + + def __init__(self, nfs_dir_home, hotfix_base, cec_url, server_ip, username, password): + self.nfs_dir_home = nfs_dir_home + self.hotfix_base = hotfix_base + self.cec_url = cec_url + self.thread_runner = threading.Thread(target=self.build, name="hotfix_builder") + self.cec_hotfix_topic = "hotfix_job" + self.local_arch = os.uname().release.split(".")[-1] + self.connector = ServerConnector(server_ip, username, password) + self.tmpdir="/hotfix_tmpdir" + self.token = self.connector.get_token() + self.prepare_env() + print("the local architecture is : %s" % self.local_arch) + + def run(self): + self.thread_runner.start() + + def prepare_env(self): + # prepare kernel src and kaptch-build + cmd = "chmod +x check_env.sh && ./check_env.sh -b %s -n %s" % (self.hotfix_base, self.nfs_dir_home) + with os.popen(cmd) as process: + output = process.read() + print(output) + + # get the img_list image information and pull them based on machine's kernel arch + image_config_file = open("./img_list.json") + config_data = json.load(image_config_file) + machine_kernel = platform.uname().release + arch = machine_kernel.split(".")[-1] + for each_version in config_data[arch]: + image = config_data[arch][each_version] + os.system("docker pull {}".format(image)) + + if not os.path.exists(self.hotfix_base): + os.makedirs(self.hotfix_base) + + if not os.path.exists(self.tmpdir): + os.makedirs(self.tmpdir) + + # checkout the log directory + if not os.path.exists(os.path.join(self.nfs_dir_home, "log")): + os.makedirs(os.path.join(self.nfs_dir_home, "log")) + + # checkout the rpm directory + if not os.path.exists(os.path.join(self.nfs_dir_home, "rpm")): + os.makedirs(os.path.join(self.nfs_dir_home, "rpm")) + + # copy build_hotfix.sh to BASE + if os.path.exists("./build_hotfix.sh"): + shutil.copy("./build_hotfix.sh", self.hotfix_base) + else: + print("ERROR: cannot find build_hotfix.sh") + + if os.path.exists("./build_rpm.sh"): + shutil.copy("./build_rpm.sh", self.hotfix_base) + else: + print("ERROR: cannot find build_rpm.sh") + + def find_build_rpm(self): + directory = "/hotfix_tmpdir" + rpms = [] + for root, dirs, files in os.walk(directory): + for eachfile in files: + if re.search(".rpm", eachfile): + rpms.append(eachfile) + return rpms + + ''' + Each event is an object, the parameter is inside event.value + event.value is a dictionary. + ''' + def build(self): + consumer_id = Consumer.generate_consumer_id() + consumer = dispatch_consumer(self.cec_url, self.cec_hotfix_topic, + consumer_id=consumer_id, + group_id="hotfix_job_group") + while True: + for event in consumer: + # get one event from cec, if match the arch, ack this event + parameters = event.value + print(parameters['arch']) + if parameters['arch'] != self.local_arch: + print(parameters['arch']) + break + consumer.ack(event) + + # get the hotfix building parametes + hotfix_id = parameters['hotfix_id'] + kernel_version = parameters['kernel_version'] + patch_name = parameters['patch_name'] + print("executing hotfix_id : %s" % hotfix_id) + # find the patch_path in builder local + patch_path = parameters['patch_path'].split("/")[-1] + patch_path = os.path.join(self.nfs_dir_home, "patch", patch_path) + print(patch_path) + log_file = parameters['log_file'] + log = "" + output = "" + + log_file_path = os.path.join(self.nfs_dir_home, "log", log_file) + + # move the patch to base + try: + local_patch = os.path.join(self.hotfix_base, parameters['patch_path'].split("/")[-1]) + print("the local patch is : %s " % local_patch) + shutil.copy(patch_path, local_patch) + except Exception as e: + print(str(e)) + self.connector.change_building_status(hotfix_id, "failed") + + with open(log_file_path, "w") as f: + f.write("Created Hotfix Building Task ... \n") + f.write("Kernel Version: %s\n" % kernel_version) + f.write("Patch file: %s\n" % patch_path) + f.write("Hotfix name : %s\n" % patch_name) + + description = "hello world" + # run the build hotfix script + cmd = "sudo docker run --rm -v {}:{} -v {}:{} -v {}:{} --net=host aliyun-docker-registry.cn-shenzhen.cr.aliyuncs.com/images/test:openAnolis_8.6_v1.1 sh {}/build_hotfix.sh -p {} -k {} -d {} -b {} -n {} -g {}".format( + self.hotfix_base, self.hotfix_base, self.nfs_dir_home, self.nfs_dir_home, self.tmpdir, self.tmpdir, + self.hotfix_base, local_patch, kernel_version, description, self.hotfix_base, patch_name, log_file_path + ) + print(cmd) + self.connector.change_building_status(hotfix_id, "building") + + cmd += " 2>&1 >> %s" % log_file_path + + p=subprocess.Popen(cmd, shell=True) + return_code=p.wait() + print("The return code is %d" % return_code) + + rpm_names = self.find_build_rpm() + + # when finished building, sync the build log + self.connector.sync_building_log(hotfix_id) + + # if rpm is more than one, upload it one by one + for each_rpm in rpm_names: + resp = self.connector.sync_rpm_name(hotfix_id, each_rpm) + if resp.status_code != 200: + self.connector.insert_building_log(hotfix_id, "cannot sync rpm package name %s" % each_rpm) + + # check the last output + if return_code == 0: + self.connector.change_building_status(hotfix_id, "success") + else: + self.connector.change_building_status(hotfix_id, "failed") + + +if __name__ == "__main__": + cec_url="redis://172.18.225.218:6379" + hotfix_base="/hotfix_build/hotfix" + nfs_dir_home="/nfs/hotfix" + server_ip = "http://120.78.173.250" + server_login_account = "admin" + server_login_password = "123456" + hotfix_builder = HotfixBuilder(nfs_dir_home, hotfix_base, cec_url, server_ip, server_login_account, server_login_password) + hotfix_builder.run() \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/builder/check_env.sh b/sysom_server/sysom_api/apps/hotfix/builder/check_env.sh new file mode 100644 index 0000000000000000000000000000000000000000..11c871c4ae5f93735908afe67e60bf27ceb06686 --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/builder/check_env.sh @@ -0,0 +1,132 @@ +#!/bin/bash + +usage() { + echo "Usage:"; + echo "./check_env.sh [OPTIONS]"; + echo ""; + echo "Options:"; + echo " -p|--patch"; + echo " The patch (must not null)"; + echo " -k|--kernelversion"; + echo " The complete kernel version (must not null) like "4.19.48-006.ali4000.alios7.x86_64""; + echo " -h|--help"; + echo " For help"; + echo ""; + echo "For example:"; + echo "./kpatch-packager.sh -p \${patch} -k \${kernel} -d \"\${description}\" -v --prefix=\${prefix_patch}"; +} + +# this is to output the error msg + warn() { + echo "ERROR: $1" >&2 + } + + # First,chech the input parameter length is zero or not + # then, check the LOGFILE exist or not , if exist, show the user that Check the LOGFILE for more details + die() { + if [[ -z $1 ]]; then + msg="kpatch build failed" + else + msg="$1" + fi + + if [[ -e $LOGFILE ]]; then + warn "$msg. Check $LOGFILE for more details." + else + warn "$msg." + fi + + exit 1 + } + +# make sure ${hotfix_base}/kpatch_space has kpatch-build + check_kpatch_build() { + kernel_version=$1 + cd ${BASE} + if [[ ! -d "kpatch_space" ]]; then + mkdir -p ${BASE}/kpatch_space + fi + + cd ${BASE}/kpatch_space + if [[ ! -d "kpatch-build" ]]; then + git clone https://gitee.com/anolis/kpatch-build.git + + if [[ $? -ne 0 ]]; then + cp -a ${NFSDIR}/kpatch-build . + if [[ $? -ne 0 ]]; then + die "No way to get the kpatch-build..." + fi + fi + else + echo "Found kpatch-build" + fi + + } + +# make sure ${hotfix_base}/kernel_repo has cloud-kernel + check_kernel_src() { + kernel_version=$1 + + cd ${BASE} + if [[ ! -d "kernel_repos" ]]; then + mkdir -p ${BASE}/kernel_repos + fi + + cd ${BASE}/kernel_repos + if [[ ! -d "cloud-kernel" ]]; then + git clone https://gitee.com/anolis/cloud-kernel.git + if [[ $? -ne 0 ]]; then + cp -a ${NFSDIR}/cloud-kernel . + if [[ $? -ne 0 ]]; then + die "No way to get the kernel_source..." + fi + fi + fi + + cd cloud-kernel && git fetch --tags + + } + + check_images() { + yum install -y docker + for line in `cat image_list` + do + docker pull $line + done + } + +# Check Hotfix build enviroment +options="$(getopt -o hk:b:s:n: -l "help,kernelversion:,hotfix_base:,ksrcs:,nfs:" -- "$@")" || die "getopt failed" + +eval set -- "$options" + +while [[ $# -gt 0 ]]; do + case "$1" in + -h|--help) + usage + exit 0 + ;; + -k|--kernelversion) + KERNELVERSION="$2" + shift + ;; + -b|--hotfix_base) + BASE="$2" + shift + ;; + -s|--ksrcs) + KSRCS="$2" + shift + ;; + -n|--nfs) + NFSDIR="$2" + shift + ;; + esac + shift +done + +echo "The hotfix_base is : ${BASE} " + +check_kpatch_build $KERNELVERSION +check_kernel_src $KERNELVERSION \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/builder/img_list.json b/sysom_server/sysom_api/apps/hotfix/builder/img_list.json new file mode 100644 index 0000000000000000000000000000000000000000..024e128a8b00f5dd023c7ce9b45cd6f09abb81cc --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/builder/img_list.json @@ -0,0 +1,8 @@ +{ + "x86_64": { + "anolis":"aliyun-docker-registry.cn-shenzhen.cr.aliyuncs.com/images/test:openAnolis_8.6_v1.1" + }, + "aarch64":{ + "anolis":"" + } +} \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/builder/init.sh b/sysom_server/sysom_api/apps/hotfix/builder/init.sh new file mode 100644 index 0000000000000000000000000000000000000000..252736b4f09da6a10b42295c37e5c8d57c2ec826 --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/builder/init.sh @@ -0,0 +1,56 @@ +######################################## +# install packages +######################################## +yum install nfs-utils rpcbind -y +systemctl start rpcbind && systemctl enable rpcbind +systemctl start nfs-server && systemctl enable nfs-server + +yum install -y make gcc patch bison flex openssl-devel elfutils elfutils-devel dwarves + +yum install -y docker git + +if [ "$APP_NAME" == "" ] +then + export APP_NAME="sysom" +fi + +if [ "$APP_HOME" == "" ] +then + export APP_HOME=/usr/local/sysom/ + export SERVER_HOME=/usr/local/sysom/server + export NODE_HOME=/usr/local/sysom/node +fi + +if [ "$BUILDER_LOCAL_IP" == "" ] +then + local_ip=`ip -4 route | grep "link src" | awk -F"link src " '{print $2}' | awk '{print $1}' | head -n 1` + export BUILDER_LOCAL_IP=$local_ip +fi + +if [ "$BUILDER_PUBLIC_IP" == "" ] +then + export BUILDER_PUBLIC_IP=$BUILDER_LOCAL_IP +fi + +if [ "$SERVER_PORT" == "" ] +then + export SERVER_PORT=80 +fi + +################################################################### +# Mount the NFS from local directory to server +# NFS_SERVER_IP : The IP of NFS Server +# HOTFIX_NFS_HOME: The NFS directory of server +# LOCAL_HOTFIX_HOME : The local nfs direcotry you choose to mount +################################################################### +NFS_SERVER_IP=172.18.225.218 +HOTFIX_NFS_HOME=${SERVER_HOME}/hotfix/hotfix-nfs +LOCAL_NFS_HOME=/nfs/hotfix + +if [[ ! -d ${LOCAL_NFS_HOME} ]]; then + mkdir -p ${LOCAL_NFS_HOME} +fi + +sudo umount /nfs/hotfix +sudo mount -t nfs ${NFS_SERVER_IP}:${HOTFIX_NFS_HOME} ${LOCAL_NFS_HOME} + diff --git a/sysom_server/sysom_api/apps/hotfix/migrations/0001_initial.py b/sysom_server/sysom_api/apps/hotfix/migrations/0001_initial.py new file mode 100644 index 0000000000000000000000000000000000000000..18fcfe39ad904c03cb1d99f5cefefb6fe8fc01b4 --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/migrations/0001_initial.py @@ -0,0 +1,40 @@ +# Generated by Django 3.2.16 on 2022-12-15 08:37 + +from django.db import migrations, models +import lib.utils + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='HotfixModel', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.CharField(default=lib.utils.human_datetime, max_length=20, verbose_name='创建时间')), + ('deleted_at', models.CharField(max_length=20, null=True)), + ('arch', models.CharField(max_length=10, verbose_name='架构')), + ('kernel_version', models.CharField(max_length=60, verbose_name='内核版本')), + ('patch_path', models.CharField(max_length=255, verbose_name='补丁路径')), + ('patch_name', models.CharField(default='patch', max_length=255, verbose_name='补丁名称')), + ('hotfix_path', models.CharField(max_length=255, verbose_name='rpm存储路径')), + ('building_status', models.IntegerField(default=0, verbose_name='构建状态')), + ('hotfix_necessary', models.IntegerField(default=0, verbose_name='补丁重要性')), + ('hotfix_risk', models.IntegerField(default=0, verbose_name='补丁风险')), + ('description', models.CharField(default='NULL', max_length=300, verbose_name='描述')), + ('log', models.TextField(default='', verbose_name='构建日志')), + ('log_file', models.CharField(max_length=255, verbose_name='日志存储路径')), + ('creator', models.CharField(default='admin', max_length=20, verbose_name='创建者')), + ('normal', models.BooleanField(default=0, verbose_name='正式包')), + ('rpm_name', models.CharField(max_length=255, verbose_name='rpm包名')), + ], + options={ + 'db_table': 'sys_hotfix', + }, + ), + ] diff --git a/sysom_server/sysom_api/apps/hotfix/migrations/__init__.py b/sysom_server/sysom_api/apps/hotfix/migrations/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/sysom_server/sysom_api/apps/hotfix/models.py b/sysom_server/sysom_api/apps/hotfix/models.py new file mode 100644 index 0000000000000000000000000000000000000000..eef5fffbf54aba7e1bb46ec90cfac1c1d7fa53f8 --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/models.py @@ -0,0 +1,31 @@ +from django.db import models +from lib.base_model import BaseModel +from django.contrib.auth import get_user_model + +# Hotfix sys_hotfix Design +# for building_status->0:waiting; 1:building 2:build failed 3:build success +# for hotfix_necessary->0:optional 1:recommand install 2:must install +# for hotfix_risk -> 0:low risk 1:mid risk 2:high risk +class HotfixModel(BaseModel): + arch = models.CharField(max_length=10, verbose_name="架构") + kernel_version = models.CharField(max_length=60, verbose_name="内核版本") + patch_path = models.CharField(max_length=255, verbose_name="补丁路径") + patch_name = models.CharField(max_length=255, default="patch", verbose_name="补丁名称") + hotfix_path = models.CharField(max_length=255, verbose_name="rpm存储路径") + building_status = models.IntegerField(default=0, verbose_name="构建状态") + hotfix_necessary = models.IntegerField(default=0, verbose_name="补丁重要性") + hotfix_risk = models.IntegerField(default=0, verbose_name="补丁风险") + description = models.CharField(max_length=300, default="NULL", verbose_name="描述") + log = models.TextField(default="", verbose_name="构建日志") + log_file = models.CharField(max_length=255, verbose_name="日志存储路径") + creator = models.CharField(max_length=20, default="admin", verbose_name="创建者") + normal = models.BooleanField(default=0, verbose_name="正式包") + rpm_name = models.CharField(max_length=255, verbose_name="rpm包名") + + class Meta: + db_table = 'sys_hotfix' + + def __str__(self): + return self.patch_path + + \ No newline at end of file diff --git a/sysom_server/sysom_api/apps/hotfix/serializer.py b/sysom_server/sysom_api/apps/hotfix/serializer.py new file mode 100644 index 0000000000000000000000000000000000000000..1ba0ec6c860bdb051c8bb5f069a085e53ac0463f --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/serializer.py @@ -0,0 +1,17 @@ +import logging +from rest_framework import serializers +from apps.hotfix.models import HotfixModel +logger = logging.getLogger(__name__) + +from datetime import datetime + +class HotfixSerializer(serializers.ModelSerializer): + patch_name = serializers.SerializerMethodField() + + class Meta: + model = HotfixModel + fields = ('id','created_at','deleted_at','arch','kernel_version','patch_path', + 'building_status','hotfix_necessary','hotfix_risk','creator','patch_name', 'normal') # fields 指定从数据库返回的字段 + + def get_patch_name(self, attr: HotfixModel): + return attr.patch_path.split("/")[-1] diff --git a/sysom_server/sysom_api/apps/hotfix/urls.py b/sysom_server/sysom_api/apps/hotfix/urls.py new file mode 100644 index 0000000000000000000000000000000000000000..3d71529c4684c7ca924ba4cc5aa7323405233bad --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/urls.py @@ -0,0 +1,24 @@ +from django.urls import path +from django.urls.conf import include + +from rest_framework.routers import DefaultRouter + +from apps.hotfix import views + +router = DefaultRouter() +router.register('host', views.HotfixAPIView) + +urlpatterns = [ + path('api/v1/', include(router.urls)), + path('api/v1/hotfix/create_hotfix/', views.HotfixAPIView.as_view({'post': 'create_hotfix'})), + path('api/v1/hotfix/get_hotfix_list/', views.HotfixAPIView.as_view({'get': 'get_hotfixlist'})), + path('api/v1/hotfix/delete_hotfix/', views.HotfixAPIView.as_view({'delete': 'delete_hotfix'})), + path('api/v1/hotfix/set_normal/', views.HotfixAPIView.as_view({'post': 'set_normal'})), + path('api/v1/hotfix/upload_patch/', views.SaveUploadFile.as_view()), + path('api/v1/hotfix/update_building_status/', views.HotfixAPIView.as_view({'post': 'update_building_status'})), + path('api/v1/hotfix/insert_building_log/', views.HotfixAPIView.as_view({'post': 'insert_building_log'})), + path('api/v1/hotfix/get_build_log/', views.HotfixAPIView.as_view({'get': 'get_build_log'})), + path('api/v1/hotfix/sync_building_log/', views.HotfixAPIView.as_view({'post': 'sync_build_log'})), + path('api/v1/hotfix/update_hotfix_name/', views.HotfixAPIView.as_view({'post': 'update_hotfix_name'})), + path('api/v1/hotfix/download_hotfix/', views.HotfixAPIView.as_view({'get': 'download_hotfix_file'})), +] diff --git a/sysom_server/sysom_api/apps/hotfix/views.py b/sysom_server/sysom_api/apps/hotfix/views.py new file mode 100644 index 0000000000000000000000000000000000000000..e5b038ae115676291ded19826a8689ee3d3cb636 --- /dev/null +++ b/sysom_server/sysom_api/apps/hotfix/views.py @@ -0,0 +1,266 @@ +import re +import logging +import os +import threading +import time +from typing import Any +from drf_yasg import openapi +from drf_yasg.utils import swagger_auto_schema +from rest_framework.request import Request +from rest_framework.views import APIView +from rest_framework import mixins +from django.db.models import Q +from django.db.models.functions import Concat +from django.db.models import Value +from django_filters.rest_framework import DjangoFilterBackend +from rest_framework.exceptions import ValidationError +from django.conf import settings +from rest_framework.viewsets import GenericViewSet + +from apps.hotfix import serializer +from apps.common.common_model_viewset import CommonModelViewSet +from apps.hotfix.models import HotfixModel +from apps.accounts.authentication import Authentication +from lib.response import * +from lib.utils import human_datetime, datetime +from lib.exception import APIException +from concurrent.futures import ThreadPoolExecutor, as_completed +from channel_job import default_channel_job_executor +from channel_job import ChannelJobExecutor +from django.conf import settings +from django import forms +from django.views.decorators.csrf import csrf_exempt +from cec_base.admin import dispatch_admin +from cec_base.producer import dispatch_producer +from cec_base.event import Event +from django.http import HttpResponse, FileResponse + +logger = logging.getLogger(__name__) + +class SaveUploadFile(APIView): + authentication_classes = [] + + @swagger_auto_schema(operation_description="上传文件", + request_body=openapi.Schema( + type=openapi.TYPE_OBJECT, + required=["file"], + properties={ + "file": openapi.Schema(type=openapi.TYPE_FILE), + "catalogue": openapi.Schema(type=openapi.TYPE_STRING) + + } + ), + responses={ + '200': openapi.Response('save upload success', examples={"application/json": { + "code": 200, + "message": "Upload success", + "data": {} + }}), + '400': openapi.Response('Fail', examples={"application/json": { + "code": 400, + "message": "Required Field: file", + "data": {} + }}) + } + ) + def post(self, request): + patch_file = request.data.get('file', None) + catalogue = request.data.get('catalogue', None) + print(patch_file.name) + if not patch_file: + return APIException(message="Upload Failed: file required!") + + patch_file_repo = os.path.join(settings.HOTFIX_FILE_STORAGE_REPO, "patch") + if not os.path.exists(patch_file_repo): + os.makedirs(patch_file_repo) + file_path = os.path.join(patch_file_repo, patch_file.name) + + try: + with open(file_path, 'wb') as f: + for chunk in patch_file.chunks(): + f.write(chunk) + except Exception as e: + logger.error(e) + raise APIException(message=f"Upload Failed: {e}") + return success(result={}, message="Upload success") + + +class HotfixAPIView(GenericViewSet, + mixins.ListModelMixin, + mixins.RetrieveModelMixin, + mixins.CreateModelMixin, + mixins.UpdateModelMixin, + mixins.DestroyModelMixin + ): + queryset = HotfixModel.objects.filter(deleted_at=None) + serializer_class = serializer.HotfixSerializer + authentication_classes = [Authentication] + filter_backends = [DjangoFilterBackend] + filterset_fields = ['created_time', 'creator', 'building_status', 'arch'] + http_method_names = ['get', 'post', 'patch', 'delete'] + + def __init__(self, **kwargs: Any) -> None: + super().__init__(**kwargs) + self.hotfix_cec_topic_name = "hotfix_job" + self.event_id = None + + def cec_delivery_report(self, err: Exception, event: Event): + print(f"Produce new message => {event.event_id}") + self.event_id = event.event_id + + """ + Log_file : patch_name-time.log , this is used to output the log + """ + def create_hotfix(self, request, **kwargs): + arch = request.data['kernel_version'].split(".")[-1] + log_file = "{}-{}.log".format(request.data["patch_name"], time.strftime("%Y%m%d%H%M%S")) + patch_name = request.data["patch_name"] + patch_name.replace(" ","-") + res = HotfixModel.objects.create( + kernel_version = request.data['kernel_version'], + patch_name = patch_name, + patch_path = os.path.join(settings.HOTFIX_FILE_STORAGE_REPO, request.data['upload'].split("\\")[-1]), + building_status = 0, + hotfix_necessary = 0, + hotfix_risk = 2, + normal = 0, + log_file = log_file, + arch = arch + ) + + producer = dispatch_producer(settings.HOTFIX_CEC_URL) + producer.produce(self.hotfix_cec_topic_name, { + "hotfix_id" : res.id, + "kernel_version" : res.kernel_version, + "patch_name" : res.patch_name, + "patch_path" : res.patch_path, + "arch": res.arch, + "log_file" : res.log_file + }, self.cec_delivery_report) + producer.flush() + return success(result={"msg":"success","id":res.id,"event_id":self.event_id}, message="create hotfix job success") + + def get_hotfixlist(self, request): + queryset = HotfixModel.objects.all().filter(deleted_at=None) + response = serializer.HotfixSerializer(queryset, many=True) + return success(result=response.data, message="invoke get_hotfixlist") + + def delete_hotfix(self, request): + hotfix = HotfixModel.objects.filter(id=request.data["id"],deleted_at=None).first() + if hotfix is None: + return other_response(message="can not delete this hotfix", result={"msg":"Hotfix not found"}, code=400) + else: + hotfix.deleted_at=human_datetime() + hotfix.save() + print("saved the object") + return success(result={}, message="invoke delete_hotfix") + + def set_normal(self, request): + if request.method == 'POST': + hotfix = HotfixModel.objects.filter(id=request.data["id"]).first() + hotfix.normal = 1 + hotfix.save() + return success(result={"msg":"scuuessfully update normal status"}, message="normal status updated") + + def update_building_status(self, request): + if request.method == 'POST': + hotfix = HotfixModel.objects.filter(id=request.data["id"]).first() + status = request.data["status"] + if hotfix is None: + return other_response(message="No such hotfix id", result={"mgs":"update building status failed"}, code=400) + if status == "waiting": + hotfix.building_status=0 + elif status == "building": + hotfix.building_status=1 + elif status == "failed": + hotfix.building_status=2 + elif status == "success": + hotfix.building_status=3 + else: + return other_response(message="unsupported status", result={"mgs":"update building status failed"}, code=401) + hotfix.save() + return success(result={"msg":"update building status successfully"}, message="update building status success") + + def get_build_log(self, request): + if request.method == 'GET': + hotfix_id = request.GET.get('id') + hotfix = HotfixModel.objects.filter(id=hotfix_id).first() + if hotfix: + if hotfix.building_status == 2 or hotfix.building_status == 3: + # this hotfix task is finished + msg = hotfix.log + if len(msg) > 0: + return success(result=msg, message="hotfix build log return") + else: + return other_response(message="No build log found", result={"msg":"No build log found"}, code=400) + else: + # this job is not finished.. read from the log file + msg = "" + for line in open(os.path.join(settings.HOTFIX_FILE_STORAGE_REPO, "log", hotfix.log_file)): + msg += line + return success(result=msg, message="hotfix build log return") + else: + return other_response(message="No such record", result={"msg":"Hotfix not found"}, code=400) + else: + return other_response(message="Should be GET method", code=400) + + def insert_building_log(self, request): + if request.method == 'POST': + hotfix = HotfixModel.objects.filter(id=request.data["id"]).first() + log = request.data["log"] + print(log) + if hotfix is None: + return other_response(message="No such hotfix id", result={"mgs":"insert build log failed"}, code=400) + if len(log) <= 0: + return other_response(message="log is blank", result={"mgs":"insert build log failed"}, code=400) + build_log = hotfix.log + build_log = build_log + log + hotfix.log = build_log + hotfix.save() + return success(result={"msg": "inserted hotfix log"}, message="insert build log success") + + # this function is invoked when job finished.. + def sync_build_log(self, request): + if request.method == 'POST': + success = False + hotfix = HotfixModel.objects.filter(id=request.data["id"]).first() + try: + log = "" + for line in open(os.path.join(settings.HOTFIX_FILE_STORAGE_REPO, "log", hotfix.log_file)): + log = log + str(line) + if line == "Success": + success = True + hotfix.log = log + hotfix.save() + except Exception as e: + return other_response(message=str(e), code=400) + if success: + return success(result={"msg": "SUCCESS"}, message="sync build log success") + else: + return success(result={"msg": "FAILED"}, message="sync build log success") + + def update_hotfix_name(self, request): + if request.method == 'POST': + try: + hotfix = HotfixModel.objects.filter(id=request.data["id"]).first() + rpm_name = request.data["rpm"] + hotfix.rpm_name += rpm_name + hotfix.save() + except Exception as e: + return other_response(message=str(e), code=400) + return success(result={"msg":"update hotfix name success"}, message="updated hotfix name") + + def download_hotfix_file(self, request): + if request.method == 'GET': + try: + hotfix_id = request.GET.get('id') + hotfix = HotfixModel.objects.filter(id=hotfix_id).first() + rpm_name = hotfix.rpm_name + response = FileResponse(open(os.path.join(settings.HOTFIX_FILE_STORAGE_REPO, "rpm", rpm_name), "rb"), as_attachment=True) + response['content_type'] = "application/octet-stream" + response['Content-Disposition'] = 'attachment;filename=' + rpm_name + return response + except Exception as e: + print(str(e)) + return other_response(message=str(e), code=400) + diff --git a/sysom_server/sysom_api/conf/common.py b/sysom_server/sysom_api/conf/common.py index 4b7b4ae34d8487b8fffbbd03c40749061ef7955a..2092b06756b3f5f00bfd363202e03dc34bdb4f72 100644 --- a/sysom_server/sysom_api/conf/common.py +++ b/sysom_server/sysom_api/conf/common.py @@ -20,6 +20,7 @@ INSTALLED_APPS = [ 'apps.host', 'apps.alarm', 'apps.services', + 'apps.hotfix', 'rest_framework', 'corsheaders', @@ -116,6 +117,12 @@ IS_MICRO_SERVICES = False # 是否微服务 WEB_DIR = os.path.join(BASE_DIR.parent, 'sysom_web') DOWNLOAD_DIR = os.path.join(WEB_DIR, 'download') +################################################################## +# Hotfix Platform settings +################################################################## +HOTFIX_CONFIG_URL = "http://localhost:7003/api/v1/channel/config/get?name=migration_setting" +HOTFIX_CEC_URL = "redis://localhost:6379" +HOTFIX_FILE_STORAGE_REPO = "/usr/local/sysom/server/hotfix/hotfix-nfs" ################################################################## # SSH channel settings diff --git a/sysom_web/config/routes.js b/sysom_web/config/routes.js index 26c95fde83114ee2a45545a3c2ce6b69351061e1..6f09be306a09146dd13d3918d70b55cdcd243413 100644 --- a/sysom_web/config/routes.js +++ b/sysom_web/config/routes.js @@ -240,6 +240,25 @@ export default [ }, ], }, + { + path: '/hotfix', + name: 'hotfix', + routes: [ + { + path: '/hotfix', + redirect: '/hotfix/make', + }, + { + path: '/hotfix/make', + name: 'make', + component: './hotfix/Make', + }, + { + path: '/hotfix/hotfix_log/:id?', + component: './hotfix/HotfixLog' + }, + ] + }, { path: '/', redirect: '/welcome', diff --git a/sysom_web/src/locales/zh-CN/menu.js b/sysom_web/src/locales/zh-CN/menu.js index f79cbdfdc305b6401042431a422e40f6a88b4750..f207650c39a5bd60306dc341f0335eca2925f4c8 100644 --- a/sysom_web/src/locales/zh-CN/menu.js +++ b/sysom_web/src/locales/zh-CN/menu.js @@ -82,5 +82,7 @@ export default { 'menu.journal.alarm': '告警日志', 'menu.journal.task': '任务日志', 'menu.security': '安全中心', - 'menu.security.list': '漏洞中心' + 'menu.security.list': '漏洞中心', + 'menu.hotfix': '热补丁中心', + 'menu.hotfix.make': '热补丁制作' }; diff --git a/sysom_web/src/locales/zh-CN/pages.js b/sysom_web/src/locales/zh-CN/pages.js index 5f57148e45419c2c3503f30467771ed36695455a..d397874f1ca8527e11a529d06a7e7d9996f735c1 100644 --- a/sysom_web/src/locales/zh-CN/pages.js +++ b/sysom_web/src/locales/zh-CN/pages.js @@ -143,4 +143,19 @@ export default { 'pages.security.Historical.fix_fail': 'CVE修复失败,失败原因:', 'pages.security.notification.fix.success': '系统漏洞已修复', 'pages.security.notification.fix.success.content': '如您正在运行漏洞涉及到的服务,建议您重启相关服务使漏洞修复生效。', + 'pages.hotfix.title': '热补丁列表', + 'pages.hotfix.created_at': '创建时间', + 'pages.hotfix.kernel_version': '内核版本', + 'pages.hotfix.creator': '创建人', + 'pages.hotfix.progress': '进度', + 'pages.hotfix.patch_path': '补丁路径', + 'pages.hotfix.delete_hotfix_not_exist': '要删除的hotfixID不存在', + 'pages.hotfix.operation': '操作', + 'pages.hotfix.delete': '删除', + 'pages.hotfix.building_status': '构建状态', + 'pages.hotfix.normal': '转正式包', + 'pages.hotfix.os_type': '操作系统', + 'pages.hotfix.patch_name': '补丁名称', + 'pages.hotfix.upload': '文件上传', + 'pages.hotfix.download': "下载", }; \ No newline at end of file diff --git a/sysom_web/src/pages/Welcome.jsx b/sysom_web/src/pages/Welcome.jsx index d44ba2b64d00c298a7d8b24219344ba3025dc1ac..42f38994f7138415ffdc737d4fe1bd087294a143 100644 --- a/sysom_web/src/pages/Welcome.jsx +++ b/sysom_web/src/pages/Welcome.jsx @@ -13,6 +13,7 @@ const imgBanner = [ {key:4,banner:"#1a2933",title:"监控中心",urls:'/monitor',des:""}, {key:3,banner:"#1a2933",title:"诊断中心",urls:'/diagnose',des:""}, {key:5,banner:"#1a2933",title:"安全中心",urls:'/security',des:""}, + {key:5,banner:"#1a2933",title:"热补丁中心",urls:'/hotfix',des:""}, ] const Welcome = () => { @@ -64,6 +65,9 @@ const Welcome = () => { 漏洞中心 + + 热补丁制作 +
diff --git a/sysom_web/src/pages/Welcome.less b/sysom_web/src/pages/Welcome.less index 4049bf77d912dbd040f7bcdbb6651b5a929d8d18..0ccbc88520d78f0b4ce204a98eef6167031a2fcf 100644 --- a/sysom_web/src/pages/Welcome.less +++ b/sysom_web/src/pages/Welcome.less @@ -24,7 +24,7 @@ } .menuCenter{ position: absolute; - width: 46%; + width: 60%; left: 50%; transform: translate(-50%,-5%); line-height: 48px!important; diff --git a/sysom_web/src/pages/hotfix/HotfixLog/index.jsx b/sysom_web/src/pages/hotfix/HotfixLog/index.jsx new file mode 100644 index 0000000000000000000000000000000000000000..526710bb0098e4648ecad51466fb4d7c23ba1a85 --- /dev/null +++ b/sysom_web/src/pages/hotfix/HotfixLog/index.jsx @@ -0,0 +1,23 @@ +import { getHotfixLog } from '../service'; +import { useState, useEffect } from 'react'; + +import './index.less' + +const HotfixLog = (props) => { + const [data, setData] = useState(); + const hotFixID = props.match.params.id + + useEffect(() => { + getHotfixLog(hotFixID).then(res => { + setData(res.data) + }) + }, []) + + return ( +
+ {data} +
+ ); +}; + +export default HotfixLog \ No newline at end of file diff --git a/sysom_web/src/pages/hotfix/HotfixLog/index.less b/sysom_web/src/pages/hotfix/HotfixLog/index.less new file mode 100644 index 0000000000000000000000000000000000000000..2933005612822aed79deb87bf9c9ad2105472a7f --- /dev/null +++ b/sysom_web/src/pages/hotfix/HotfixLog/index.less @@ -0,0 +1,5 @@ +@import '~antd/es/style/themes/default.less'; + +.HotfixLog { + white-space: pre-line; +} \ No newline at end of file diff --git a/sysom_web/src/pages/hotfix/Make/index.jsx b/sysom_web/src/pages/hotfix/Make/index.jsx new file mode 100644 index 0000000000000000000000000000000000000000..e76a4004b479a2d99c74225c7cacdba51e420847 --- /dev/null +++ b/sysom_web/src/pages/hotfix/Make/index.jsx @@ -0,0 +1,231 @@ +import { useRef } from 'react'; +import { useIntl, FormattedMessage } from 'umi'; +import { PageContainer } from '@ant-design/pro-layout'; +import ProTable from '@ant-design/pro-table'; +import { Popconfirm, message, Switch, Upload, Button, Select, Form, Collapse} from 'antd'; +import { getHotfixList, delHotfix, setNormal, uploadProps, normFile, createHotfix, downloadHotfixFile } from '../service'; +import { UploadOutlined } from '@ant-design/icons'; +import { async } from '@antv/x6/es/registry/marker/async'; +import { DownloadOutlined } from '@ant-design/icons'; + +const handleDelHotfix = async (record) => { + const hide = message.loading('正在删除'); + const token = localStorage.getItem('token'); + try { + let res = await delHotfix(record.id, token); + hide(); + if (res.code == 200) { + message.success('删除成功'); + return true; + } else { + message.error(`删除失败: ${res.message}`); + return false; + } + } catch (error) { + hide(); + return false; + } +} + +const changeNormal = (record) => { + const token = localStorage.getItem('token'); + setNormal(record.id, token) +}; + +const submitHotfix = (params) => { + const token = localStorage.getItem('token'); + createHotfix(token, params) + console.log(params) +} + +const downloadHotfix = async (record) => { + const res = await downloadHotfixFile(record.id); + if (res) { + const url = window.URL.createObjectURL(new Blob([res])); + const link = document.createElement('a'); //创建a标签 + link.style.display = 'none'; + link.href = url; // 设置a标签路径 + link.download = res.response.headers.get('content-disposition').split("attachment;filename=")[1]; //设置文件名, 也可以这种写法 (link.setAttribute('download', '名单列表.xls'); + document.body.appendChild(link); + link.click(); + URL.revokeObjectURL(link.href); // 释放 URL对象 + document.body.removeChild(link); + console.log(res.response.headers.get('content-disposition').split("attachment;filename=")[1]) + } + console.log("downloadHotfixFile: ", record.id) +} + +const HotfixList = () => { + const actionRef = useRef(); + const intl = useIntl(); + + const columns = [ + { + title: , + dataIndex: 'created_at', + valueType: 'message', + hideInSearch: true, + }, + { + title: , + dataIndex: 'os_type', + key: 'os_type', + dataIndex: 'os_type', + hideInTable: true, + render: (_, record) => [ + ], + renderFormItem: (item, _a, form) => { + return