diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..88388522e644533f76a0809d1d8c7f0d5011995b --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +log +*.pyc diff --git a/golang/scripts/format b/golang/scripts/format deleted file mode 100644 index 9e3d5ce4d04a9d0107f4f3d85a06a59ee879f5c1..0000000000000000000000000000000000000000 --- a/golang/scripts/format +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env bash - -goimports="goimports" - -find_files() { - find . -not \( \ - \( \ - -wholename './output' \ - -o -wholename './_output' \ - -o -wholename './_gopath' \ - -o -wholename './release' \ - -o -wholename './target' \ - -o -wholename '*/third_party/*' \ - -o -wholename '*/vendor/*' \ - \) -prune \ - \) -name '*.go' -} - -diff=$(find_files | xargs ${goimports} -d 2>&1) -if [[ -n "${diff}" ]]; then - echo "${diff}" - exit 1 -fi diff --git a/golang/verify.sh b/golang/verify.sh deleted file mode 100644 index 7a8f23be296e2c40c41a9fdf0aad8c163610ebc0..0000000000000000000000000000000000000000 --- a/golang/verify.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -export GOPATH=$WORKSPACE/$BUILD_ID - -go get golang.org/x/crypto/ssh -go install golang.org/x/crypto/ssh -go get golang.org/x/tools/cmd/goimports -go install golang.org/x/tools/cmd/goimports - -export PATH=$PATH:$WORKSPACE/$BUILD_ID/bin - -#go vet ./... - -/bin/bash $WORKSPACE/$BUILD_ID/openeuler-jenkins/golang/scripts/format - diff --git a/src/README.md b/src/README.md new file mode 100644 index 0000000000000000000000000000000000000000..dfca4a8f68c6725994c36297aa67d3841a758894 --- /dev/null +++ b/src/README.md @@ -0,0 +1,65 @@ +# 基于K8s集群的打包方案 + +## 单包构建任务 + +### 设计逻辑 + +- 部署x86-64和aarch64架构下的k8s集群 +- 将集群配置为**Jenkins slave** +- **Jenkins master** 运行在x86-64架构k8s集群内 + +### 流水线任务 + +> 相同任务只运行一个实例 + +#### trigger + +- 码云触发 +- 并行跑门禁任务,cpu架构不限,失败则中止任务并对pr评论 +- 成功传递参数给下游 **job** + - 项目名(**repo**) + - 分支(**branch**) + - pull request id(**prid**) + - 发起者(**committer**) + +#### multiarch + +- 支持x86_64和aarch64架构 +- trigger成功后触发 +- 执行[**python osc_build_k8s.py $repo $arch $WORKSPACE**](https://gitee.com/src-openeuler/ci_check/blob/k8s/private_build/build/osc_build_k8s.py)进行构建 + +#### comment + +- 收集门禁、build结果 +- 调用接口[**提交Pull Request评论**](https://gitee.com/wuyu15255872976/gitee-python-client/blob/master/docs/PullRequestsApi.md#post_v5_repos_owner_repo_pulls)反馈结果给码云 +- cpu架构不限 + +## 制作jenkins/obs镜像 + +### 机制 + +- k8s集群中部署docker service 服务,对外提供的内部服务地址为tcp://docker.jenkins:2376 +- jenkins安装docker插件,并配置连接到k8s集群docker service服务 +- jenkins中配置制作镜像流水线任务obs-image +- 触发方式:代码仓库ci_check打tag后手动触发,jenkins需安装build with parameterrs插件支持 + +### 流水线任务obs-image + +> 运行该任务的K8s agent需带docker client + +#### 任务:_trigger + +- 检查Dockerfile文件【optional】 +- 设置参数 【环境变量?】 + - name 【jenkins/obs】 + - version 【取自tag】 + +#### 任务:build-image-aarch64 & build-image-x86-64 + +- 构建过程选择 **Build/Publish Docker Image** +- 配置推送镜像的 **Registry Credentials** + +#### 任务:manifest + +多arch支持 +> docker manifest push时Registry Credentials? diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/ac/README.md b/src/ac/README.md new file mode 100644 index 0000000000000000000000000000000000000000..cc370809d7fc7c74cb8c4d3fa831a817e0726107 --- /dev/null +++ b/src/ac/README.md @@ -0,0 +1,34 @@ +# 门禁检查 + +## 如何加入检查项 +1. 在ci_check/src/ac目录下新建文件夹 +2. 在ac_conf.yaml中增加配置项,可选 + +### 配置文件说明 + +```yaml +示例=> +spec: # ac项目名称 + hint: check_spec # gitee中显示名,缺省使用check_+项目名称 + module: spec.check_spec # ac项目模块名称,缺省使用"项目名称+check_+项目名称" + entry: Entry # ac项目入口,入口属性具备callable,缺省使用"run" + exclude: true # 忽略该项检查 + ignored: [] # ac项目内忽略的检查项,就算失败也不影响最终ac项目结果 +``` + +### entry实现模板 + +```yaml +class Entry(object): + def __call__(self, *args, **kwargs): + # do the work + ... +``` + +### 检查结果 + +| 返回码 | 描述 | emoji | +| --- | --- | --- | +| 0 | SUCCESS | :white_check_mark:| +| 1 | WARNING | :bug: | +| 2 | FAILED | :x:| diff --git a/src/ac/__init__.py b/src/ac/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/ac/acl/__init__.py b/src/ac/acl/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/ac/acl/code/__init__.py b/src/ac/acl/code/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/ac/acl/code/check_code_style.py b/src/ac/acl/code/check_code_style.py new file mode 100644 index 0000000000000000000000000000000000000000..439167f51a81898d5fd15160045fcc0914b2faba --- /dev/null +++ b/src/ac/acl/code/check_code_style.py @@ -0,0 +1,134 @@ +# -*- encoding=utf-8 -*- +import os +import shutil +import logging + +from src.proxy.git_proxy import GitProxy +from src.ac.framework.ac_base import BaseCheck +from src.ac.framework.ac_result import FAILED, WARNING, SUCCESS +from src.ac.common.gitee_repo import GiteeRepo +from src.ac.common.linter import LinterCheck +from src.ac.common.rpm_spec_adapter import RPMSpecAdapter + +logger = logging.getLogger("ac") + + +class CheckCodeStyle(BaseCheck): + def __init__(self, workspace, repo, conf): + super(CheckCodeStyle, self).__init__(workspace, repo, conf) + + self._work_tar_dir = os.path.join(workspace, "code") # 解压缩目标目录 + + self._gr = GiteeRepo(self._work_dir, self._work_tar_dir) + + def check_compressed_file(self): + """ + 解压缩包 + """ + return SUCCESS if 0 == self._gr.decompress_all() else FAILED + + def check_patch(self): + """ + 应用所有patch + """ + patches = [] + if self._gr.spec_file: + spec = RPMSpecAdapter(os.path.join(self._work_dir, self._gr.spec_file)) + patches = spec.patches + + rs = self._gr.apply_all_patches(*patches) + + if 0 == rs: + return SUCCESS + + return WARNING if 1 == rs else FAILED + + def check_code_style(self): + """ + 检查代码风格 + :return: + """ + gp = GitProxy(self._work_dir) + diff_files = gp.diff_files_between_commits("HEAD~1", "HEAD~0") + logger.debug("diff files: {}".format(diff_files)) + + diff_code_files = [] # 仓库中变更的代码文件 + diff_patch_code_files = [] # patch内的代码文件 + for diff_file in diff_files: + if GiteeRepo.is_code_file(diff_file): + diff_code_files.append(diff_file) + elif GiteeRepo.is_patch_file(diff_file): + patch_dir = self._gr.patch_dir_mapping.get(diff_file) + logger.debug("diff patch {} apply at dir {}".format(diff_file, patch_dir)) + if patch_dir is not None: + files_in_patch = gp.extract_files_path_of_patch(diff_file) + diff_patch_code_files.extend([os.path.join(patch_dir, file_in_patch) + for file_in_patch in files_in_patch if GiteeRepo.is_code_file(file_in_patch)]) + logger.debug("diff code files: {}".format(diff_code_files)) + logger.debug("diff patch code files: {}".format(diff_patch_code_files)) + + rs_1 = self.check_file_under_work_dir(diff_code_files) + logger.debug("check_file_under_work_dir: {}".format(rs_1)) + rs_2 = self.check_files_inner_patch(diff_patch_code_files) + logger.debug("check_files_inner_patch: {}".format(rs_2)) + + return rs_1 + rs_2 + + def check_file_under_work_dir(self, diff_code_files): + """ + 检查仓库中变更的代码 + :return: + """ + rs = [self.__class__.check_code_file(filename) for filename in set(diff_code_files)] + + return sum(rs, SUCCESS) if rs else SUCCESS + + def check_files_inner_patch(self, diff_patch_code_files): + """ + 检查仓库的patch内的代码 + :return: + """ + rs = [self.__class__.check_code_file(os.path.join(self._work_tar_dir, filename)) for filename in set(diff_patch_code_files)] + + return sum(rs, SUCCESS) if rs else SUCCESS + + @classmethod + def check_code_file(cls, file_path): + """ + 检查代码风格 + :param file_path: + :return: + """ + if GiteeRepo.is_py_file(file_path): + rs = LinterCheck.check_python(file_path) + elif GiteeRepo.is_go_file(file_path): + rs = LinterCheck.check_golang(file_path) + elif GiteeRepo.is_c_cplusplus_file(file_path): + rs = LinterCheck.check_c_cplusplus(file_path) + else: + logger.error("error when arrive here, unsupport file {}".format(file_path)) + return SUCCESS + + logger.info("Linter: {:<40} {}".format(file_path, rs)) + if rs.get("F", 0) > 0: + return FAILED + + if rs.get("W", 0) > 0 or rs.get("E", 0) > 0: + return WARNING + + return SUCCESS + + def __call__(self, *args, **kwargs): + """ + 入口函数 + :param args: + :param kwargs: + :return: + """ + logger.info("check {} repo ...".format(self._repo)) + + not os.path.exists(self._work_tar_dir) and os.mkdir(self._work_tar_dir) + try: + return self.start_check_with_order("compressed_file", "patch", "code_style") + finally: + shutil.rmtree(self._work_tar_dir) diff --git a/src/ac/acl/spec/__init__.py b/src/ac/acl/spec/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/ac/acl/spec/check_spec.py b/src/ac/acl/spec/check_spec.py new file mode 100644 index 0000000000000000000000000000000000000000..55930a0565d0d7971cf8789a1edf7f155469483d --- /dev/null +++ b/src/ac/acl/spec/check_spec.py @@ -0,0 +1,146 @@ +# -*- encoding=utf-8 -*- +import logging +import time + +import yaml + +from src.proxy.git_proxy import GitProxy +from src.proxy.requests_proxy import do_requests +from src.ac.framework.ac_result import FAILED, SUCCESS +from src.ac.framework.ac_base import BaseCheck +from src.ac.common.rpm_spec_adapter import RPMSpecAdapter +from src.ac.common.gitee_repo import GiteeRepo + +logger = logging.getLogger("ac") + + +class CheckSpec(BaseCheck): + def __init__(self, workspace, repo, conf=None): + super(CheckSpec, self).__init__(workspace, repo, conf) + + self._gp = GitProxy(self._work_dir) + self._gr = GiteeRepo(self._work_dir, None) # don't care about decompress + fp = self._gp.get_content_of_file_with_commit(self._gr.spec_file) + self._spec = RPMSpecAdapter(fp) + self._latest_commit = self._gp.commit_id_of_reverse_head_index(0) + + def check_version(self): + """ + 检查当前版本号是否比上一个commit新 + :return: + """ + self._gp.checkout_to_commit("HEAD~1") + try: + gr = GiteeRepo(self._work_dir, None) # don't care about decompress + fp = self._gp.get_content_of_file_with_commit(gr.spec_file) + if fp is None: + # last commit has no spec file + return SUCCESS + spec_o = RPMSpecAdapter(fp) + finally: + self._gp.checkout_to_commit(self._latest_commit) # recover whatever + + self._ex_pkgship(spec_o) + + if self._spec > spec_o: + return SUCCESS + elif self._spec < spec_o: + if self._gp.is_revert_commit(depth=5): # revert, version back, ignore + logger.debug("revert commit") + return SUCCESS + + logger.error("current version: {}-r{}, last version: {}-r{}".format( + self._spec.version, self._spec.release, spec_o.version, spec_o.release)) + return FAILED + + def check_homepage(self, timeout=30, retrying=3, interval=1): + """ + 检查主页是否可访问 + :param timeout: 超时时间 + :param retrying: 重试次数 + :param interval: 重试间隔 + :return: + """ + homepage = self._spec.url + logger.debug("homepage: {}".format(homepage)) + if not homepage: + return SUCCESS + + for _ in xrange(retrying): + if 0 == do_requests("get", homepage, timeout=timeout): + return SUCCESS + time.sleep(interval) + + return FAILED + + def check_patches(self): + """ + 检查spec中的patch是否存在 + :return: + """ + patches_spec = set(self._spec.patches) + patches_file = set(self._gr.patch_files_not_recursive()) + logger.debug("spec patches: {}".format(patches_spec)) + logger.debug("file patches: {}".format(patches_file)) + + result = SUCCESS + for patch in patches_spec - patches_file: + logger.error("patch {} lost".format(patch)) + result = FAILED + for patch in patches_file - patches_spec: + logger.warning("patch {} redundant".format(patch)) + + return result + + def _ex_exclusive_arch(self): + """ + 保存spec中exclusive_arch信息 + :return: + """ + aarch64 = self._spec.include_aarch64_arch() + x86_64 = self._spec.include_x86_arch() + + content = None + if aarch64 and not x86_64: # only build aarch64 + content = "aarch64" + elif not aarch64 and x86_64: # only build x86_64 + content = "x86-64" + + if content is not None: + logger.info("exclusive arch \"{}\"".format(content)) + try: + with open("exclusive_arch", "w") as f: + f.write(content) + except IOError: + logger.exception("save exclusive arch exception") + + def _ex_pkgship(self, spec): + """ + pkgship需求 + :param spec: 上一个版本spec对应的RPMSpecAdapter对象 + :return: + """ + if not self._repo == "pkgship": + return + + logger.debug("special repo \"pkgship\"") + compare_version = RPMSpecAdapter.compare_version(self._spec.version, spec.version) + compare_release = RPMSpecAdapter.compare_version(self._spec.release, spec.release) + compare = self._spec.compare(spec) + + rs = {"repo": "pkgship", "curr_version": self._spec.version, "curr_release": self._spec.release, + "last_version": spec.version, "last_release": spec.release, + "compare_version": compare_version, "compare_release": compare_release, "compare": compare} + + logger.info("{}".format(rs)) + try: + with open("pkgship_notify", "w") as f: + yaml.safe_dump(rs, f) + except IOError: + logger.exception("save pkgship exception") + + def __call__(self, *args, **kwargs): + logger.info("check {} spec ...".format(self._repo)) + self._ex_exclusive_arch() + + return self.start_check() diff --git a/src/ac/common/__init__.py b/src/ac/common/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/ac/common/gitee_repo.py b/src/ac/common/gitee_repo.py new file mode 100644 index 0000000000000000000000000000000000000000..ceb64dcbf5338e0d62e7e9192a4e30d18315df7e --- /dev/null +++ b/src/ac/common/gitee_repo.py @@ -0,0 +1,156 @@ +# -*- encoding=utf-8 -*- +import os +import logging + +from src.proxy.git_proxy import GitProxy +from src.utils.shell_cmd import shell_cmd_live + +logger = logging.getLogger("ac") + + +class GiteeRepo(object): + def __init__(self, work_dir, decompress_dir): + self._work_dir = work_dir + self._decompress_dir = decompress_dir + + self._patch_files = [] + self._compress_files = [] + + self.spec_file = None + self.patch_dir_mapping = {} + + self.find_file_path() + + def find_file_path(self): + """ + compress file, patch file, diff file, spec file + """ + for dirpath, dirnames, filenames in os.walk(self._work_dir): + for filename in filenames: + rel_file_path = os.path.join(dirpath, filename).replace(self._work_dir, "").lstrip("/") + if self.is_compress_file(filename): + logger.debug("find compress file: {}".format(rel_file_path)) + self._compress_files.append(rel_file_path) + elif self.is_patch_file(filename): + logger.debug("find patch file: {}".format(rel_file_path)) + self._patch_files.append(rel_file_path) + elif self.is_spec_file(filename): + logger.debug("find spec file: {}".format(rel_file_path)) + self.spec_file = filename + + def patch_files_not_recursive(self): + """ + 获取当前目录下patch文件 + """ + return [filename for filename in os.listdir(self._work_dir) + if os.path.isfile(os.path.join(self._work_dir, filename)) and self.is_patch_file(filename)] + + def decompress_file(self, file_path): + """ + 解压缩文件 + :param file_path: + :return: + """ + if self._is_compress_zip_file(file_path): + decompress_cmd = "cd {}; unzip -d {} {}".format(self._work_dir, self._decompress_dir, file_path) + elif self._is_compress_tar_file(file_path): + decompress_cmd = "cd {}; tar -C {} -xavf {}".format(self._work_dir, self._decompress_dir, file_path) + else: + logger.warning("unsupport compress file: {}".format(file_path)) + return False + + ret, _, _ = shell_cmd_live(decompress_cmd) + if ret: + logger.debug("decompress failed") + return False + + return True + + def decompress_all(self): + """ + 解压缩所有文件 + :return: 0/全部成功,1/部分成功,-1/全部失败 + """ + if not self._compress_files: + logger.warning("no compressed source file") + rs = [self.decompress_file(filepath) for filepath in self._compress_files] + + return 0 if all(rs) else (1 if any(rs) else -1) + + def apply_patch(self, patch, max_leading=5): + """ + 尝试所有路径和leading + :param patch: 补丁 + :param max_leading: leading path + """ + logger.debug("apply patch {}".format(patch)) + for patch_dir in [filename for filename in os.listdir(self._decompress_dir) if os.path.isdir(os.path.join(self._decompress_dir, filename))] + ["."]: + if patch_dir.startswith(".git"): + continue + for leading in xrange(max_leading + 1): + logger.debug("try dir {} -p{}".format(patch_dir, leading)) + if GitProxy.apply_patch_at_dir(os.path.join(self._decompress_dir, patch_dir), + os.path.join(self._work_dir, patch), leading): + logger.debug("patch success".format(leading)) + self.patch_dir_mapping[patch] = patch_dir + return True + + logger.info("apply patch {} failed".format(patch)) + return False + + def apply_all_patches(self, *patches): + """ + 打补丁通常是有先后顺序的 + :param patches: 需要打的补丁 + """ + if not self._compress_files: + logger.debug("no compress source file, not need apply patch") + return 0 + + rs = [] + for patch in patches: + if patch in set(self._patch_files): + rs.append(self.apply_patch(patch)) + else: + logger.error("patch {} not exist".format(patch)) + rs.append(False) + + return 0 if all(rs) else (1 if any(rs) else -1) + + @staticmethod + def is_py_file(filename): + return filename.endswith((".py",)) + + @staticmethod + def is_go_file(filename): + return filename.endswith((".go",)) + + @staticmethod + def is_c_cplusplus_file(filename): + return filename.endswith((".c", ".cpp", ".cc", ".cxx", ".c++", ".h", ".hpp", "hxx")) + + @staticmethod + def is_code_file(filename): + return GiteeRepo.is_py_file(filename) \ + or GiteeRepo.is_go_file(filename) \ + or GiteeRepo.is_c_cplusplus_file(filename) + + @staticmethod + def is_patch_file(filename): + return filename.endswith((".patch", ".diff")) + + @staticmethod + def is_compress_file(filename): + return GiteeRepo._is_compress_tar_file(filename) or GiteeRepo._is_compress_zip_file(filename) + + @staticmethod + def _is_compress_zip_file(filename): + return filename.endswith((".zip",)) + + @staticmethod + def _is_compress_tar_file(filename): + return filename.endswith((".tar.gz", ".tar.bz", ".tar.bz2", ".tar.xz", "tgz")) + + @staticmethod + def is_spec_file(filename): + return filename.endswith((".spec",)) diff --git a/src/ac/common/linter.py b/src/ac/common/linter.py new file mode 100644 index 0000000000000000000000000000000000000000..95afdffe900d48a1b3816dc148eccb98e793872d --- /dev/null +++ b/src/ac/common/linter.py @@ -0,0 +1,94 @@ +# -*- encoding=utf-8 -*- +import re +import logging + +from src.utils.shell_cmd import shell_cmd_live + +logger = logging.getLogger("ac") + + +class LinterCheck(object): + PYLINTRESULTPREFIX = ["C", "R", "W", "E", "F"] + + @classmethod + def get_summary_of_pylint(cls, message): + """ + parser message for summary and details + """ + summary = {} + for prefix in cls.PYLINTRESULTPREFIX: + m = re.findall("{}: *[0-9]+, *[0-9]+:".format(prefix), "\n".join(message)) + summary[prefix] = len(m) + + return summary + + @classmethod + def get_summary_of_golint(cls, message): + """ + 所有都当作WARNING + """ + m = re.findall("\.go:[0-9]+:[0-9]+:", "\n".join(message)) + return {"W": len(m)} + + @classmethod + def get_summary_of_splint(cls, message): + """ + parser message for summary + """ + summary = {} + # summary["W"] = summary["W"] + message.count("Use -preproc to inhibit warning") + # summary["W"] = summary["W"] + message.count("Use -nestcomment to inhibit warning") + + return summary + + @classmethod + def check_python(cls, filepath): + """ + Check python script by pylint + Using the default text output, the message format is : + MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE + There are 5 kind of message types : + * (C) convention, for programming standard violation + * (R) refactor, for bad code smell + * (W) warning, for python specific problems + * (E) error, for probable bugs in the code + * (F) fatal, if an error occurred which prevented pylint from doing + """ + logger.debug("check python file: {}".format(filepath)) + pylint_cmd = "pylint3 {}".format(filepath) + ret, out, _ = shell_cmd_live(pylint_cmd, cap_out=True, verbose=True) + + if ret: + logger.debug("pylint ret, {}".format(ret)) + + return cls.get_summary_of_pylint(out) + + @classmethod + def check_golang(cls, filepath): + """ + Check golang code by golint + """ + logger.debug("check go file: {}".format(filepath)) + golint_cmd = "golint {}".format(filepath) + ret, out, _ = shell_cmd_live(golint_cmd, cap_out=True, verbose=True) + + if ret: + logger.debug("golint error, {}".format(ret)) + return {} + + return cls.get_summary_of_golint(out) + + @classmethod + def check_c_cplusplus(cls, filepath): + """ + Check c/c++ code by splint + """ + logger.debug("check c/c++ file: {}".format(filepath)) + splint_cmd = "splint {}".format(filepath) + ret, out, _ = shell_cmd_live(splint_cmd, cap_out=True, verbose=True) + + if ret: + logger.debug("splint error, {}".format(ret)) + return {} + + return cls.get_summary_of_splint(out) diff --git a/src/ac/common/pyrpm.py b/src/ac/common/pyrpm.py new file mode 100644 index 0000000000000000000000000000000000000000..7068aa9156e5e5082646c70c4dcc44064f6d87b7 --- /dev/null +++ b/src/ac/common/pyrpm.py @@ -0,0 +1,400 @@ +# -*- encoding=utf-8 -*- +"""Python module for parsing RPM spec files. +RPMs are build from a package's sources along with a spec file. The spec file controls how the RPM +is built. This module allows you to parse spec files and gives you simple access to various bits of +information that is contained in the spec file. +Current status: This module does not parse everything of a spec file. Only the pieces I needed. So +there is probably still plenty of stuff missing. However, it should not be terribly complicated to +add support for the missing pieces. +""" + +import re +from abc import ABCMeta, abstractmethod + +__all__ = ["Spec", "replace_macros", "Package"] + + +class _Tag(object): + __metaclass__ = ABCMeta + + def __init__(self, name, pattern_obj, attr_type): + self.name = name + self.pattern_obj = pattern_obj + self.attr_type = attr_type + + def test(self, line): + return re.search(self.pattern_obj, line) + + def update(self, spec_obj, context, match_obj, line): + """Update given spec object and parse context and return them again. + :param spec_obj: An instance of Spec class + :param context: The parse context + :param match_obj: The re.match object + :param line: The original line + :return: Given updated Spec instance and parse context dictionary. + """ + + assert spec_obj + assert context + assert match_obj + assert line + + return self.update_impl(spec_obj, context, match_obj, line) + + @abstractmethod + def update_impl(self, spec_obj, context, match_obj, line): + pass + + @staticmethod + def current_target(spec_obj, context): + target_obj = spec_obj + if context["current_subpackage"] is not None: + target_obj = context["current_subpackage"] + return target_obj + + +class _NameValue(_Tag): + """Parse a simple name to value tag.""" + + def __init__(self, name, pattern_obj, attr_type=None): + super(_NameValue, self).__init__(name, pattern_obj, attr_type if attr_type else str) + + def update_impl(self, spec_obj, context, match_obj, line): + target_obj = _Tag.current_target(spec_obj, context) + value = match_obj.group(1) + + # Sub-packages + if self.name == "name": + spec_obj.packages = [] + spec_obj.packages.append(Package(value)) + + setattr(target_obj, self.name, self.attr_type(value)) + return spec_obj, context + + +class _MacroDef(_Tag): + """Parse global macro definitions.""" + + def __init__(self, name, pattern_obj): + super(_MacroDef, self).__init__(name, pattern_obj, str) + + def update_impl(self, spec_obj, context, match_obj, line): + name, value = match_obj.groups() + setattr(spec_obj, name, str(value)) + return spec_obj, context + + +class _List(_Tag): + """Parse a tag that expands to a list.""" + + def __init__(self, name, pattern_obj): + super(_List, self).__init__(name, pattern_obj, list) + + def update_impl(self, spec_obj, context, match_obj, line): + target_obj = _Tag.current_target(spec_obj, context) + + if not hasattr(target_obj, self.name): + setattr(target_obj, self.name, list()) + + value = match_obj.group(1) + if self.name == "packages": + if value == "-n": + subpackage_name = line.rsplit(" ", 1)[-1].rstrip() + else: + subpackage_name = "{}-{}".format(spec_obj.name, value) + package = Package(subpackage_name) + context["current_subpackage"] = package + package.is_subpackage = True + spec_obj.packages.append(package) + elif self.name in [ + "build_requires", + "requires", + "conflicts", + "obsoletes", + "provides", + ]: + # Macros are valid in requirements + value = replace_macros(value, spec=spec_obj) + + # It's also legal to do: + # Requires: a b c + # Requires: b >= 3.1 + # Requires: a, b >= 3.1, c + + # 1. Tokenize + tokens = [val for val in re.split("[\t\n, ]", value) if val != ""] + values = [] + + # 2. Join + add = False + for val in tokens: + if add: + add = False + val = values.pop() + " " + val + elif val in [">=", "!=", ">", "<", "<=", "==", "="]: + add = True # Add next value to this one + val = values.pop() + " " + val + values.append(val) + + for val in values: + requirement = Requirement(val) + getattr(target_obj, self.name).append(requirement) + else: + getattr(target_obj, self.name).append(value) + + return spec_obj, context + + +class _ListAndDict(_Tag): + """Parse a tag that expands to a list and to a dict.""" + + def __init__(self, name, pattern_obj): + super(_ListAndDict, self).__init__(name, pattern_obj, list) + + def update_impl(self, spec_obj, context, match_obj, line): + source_name, value = match_obj.groups() + dictionary = getattr(spec_obj, "{}_dict".format(self.name)) + dictionary[source_name] = value + target_obj = _Tag.current_target(spec_obj, context) + getattr(target_obj, self.name).append(value) + return spec_obj, context + + +def re_tag_compile(tag): + return re.compile(tag, re.IGNORECASE) + + +_tags = [ + _NameValue("name", re_tag_compile(r"^Name\s*:\s*(\S+)")), + _NameValue("version", re_tag_compile(r"^Version\s*:\s*(\S+)")), + _NameValue("epoch", re_tag_compile(r"^Epoch\s*:\s*(\S+)")), + _NameValue("release", re_tag_compile(r"^Release\s*:\s*(\S+)")), + _NameValue("summary", re_tag_compile(r"^Summary\s*:\s*(.+)")), + _NameValue("license", re_tag_compile(r"^License\s*:\s*(.+)")), + _NameValue("group", re_tag_compile(r"^Group\s*:\s*(\S+)")), + _NameValue("url", re_tag_compile(r"^URL\s*:\s*(\S+)")), + _NameValue("buildroot", re_tag_compile(r"^BuildRoot\s*:\s*(\S+)")), + _NameValue("buildarch", re_tag_compile(r"^ExclusiveArch\s*:\s*(\S+)")), + _ListAndDict("sources", re_tag_compile(r"^(Source\d*)\s*:\s*(\S+)")), + _ListAndDict("patches", re_tag_compile(r"^(Patch\d*)\s*:\s*(\S+)")), + _List("build_requires", re_tag_compile(r"^BuildRequires\s*:\s*(.+)")), + _List("requires", re_tag_compile(r"^Requires\s*:\s*(.+)")), + _List("conflicts", re_tag_compile(r"^Conflicts\s*:\s*(.+)")), + _List("obsoletes", re_tag_compile(r"^Obsoletes\s*:\s*(.+)")), + _List("provides", re_tag_compile(r"^Provides\s*:\s*(.+)")), + _List("packages", re.compile(r"^%package\s+(\S+)")), + _MacroDef("define", re.compile(r"^%define\s+(\S+)\s+(\S+)")), + _MacroDef("global", re.compile(r"^%global\s+(\S+)\s+(\S+)")), +] + +_macro_pattern = re.compile(r"%{(\S+?)\}") + + +def _parse(spec_obj, context, line): + for tag in _tags: + match = tag.test(line) + if match: + return tag.update(spec_obj, context, match, line) + return spec_obj, context + + +class Requirement: + """Represents a single requirement or build requirement in an RPM spec file. + Each spec file contains one or more requirements or build requirements. + For example, consider following spec file:: + Name: foo + Version: 0.1 + %description + %{name} is the library that everyone needs. + %package devel + Summary: Header files, libraries and development documentation for %{name} + Group: Development/Libraries + Requires: %{name}%{?_isa} = %{version}-%{release} + BuildRequires: gstreamer%{?_isa} >= 0.1.0 + %description devel + This package contains the header files, static libraries, and development + documentation for %{name}. If you like to develop programs using %{name}, you + will need to install %{name}-devel. + This spec file's requirements have a name and either a required or minimum + version. + """ + + expr = re.compile(r"(.*?)\s+([<>]=?|=)\s+(\S+)") + + def __init__(self, name): + assert isinstance(name, str) + self.line = name + match = Requirement.expr.match(name) + if match: + self.name = match.group(1) + self.operator = match.group(2) + self.version = match.group(3) + else: + self.name = name + self.operator = None + self.version = None + + def __repr__(self): + return self.line + + +class Package: + """Represents a single package in a RPM spec file. + Each spec file describes at least one package and can contain one or more subpackages (described + by the %package directive). For example, consider following spec file:: + Name: foo + Version: 0.1 + %description + %{name} is the library that everyone needs. + %package devel + Summary: Header files, libraries and development documentation for %{name} + Group: Development/Libraries + Requires: %{name}%{?_isa} = %{version}-%{release} + %description devel + This package contains the header files, static libraries, and development + documentation for %{name}. If you like to develop programs using %{name}, you + will need to install %{name}-devel. + %package -n bar + Summary: A command line client for foo. + License: GPLv2+ + %description -n bar + This package contains a command line client for foo. + This spec file will create three packages: + * A package named foo, the base package. + * A package named foo-devel, a subpackage. + * A package named bar, also a subpackage, but without the foo- prefix. + As you can see above, the name of a subpackage normally includes the main package name. When the + -n option is added to the %package directive, the prefix of the base package name is omitted and + a completely new name is used. + """ + + def __init__(self, name): + assert isinstance(name, str) + + for tag in _tags: + if tag.attr_type is list and tag.name in [ + "build_requires", + "requires", + "conflicts", + "obsoletes", + "provides", + ]: + setattr(self, tag.name, tag.attr_type()) + + self.name = name + self.is_subpackage = False + + def __repr__(self): + return "Package('{}')".format(self.name) + + +class Spec: + """Represents a single spec file. + """ + + def __init__(self): + for tag in _tags: + if tag.attr_type is list: + setattr(self, tag.name, tag.attr_type()) + else: + setattr(self, tag.name, None) + + self.sources_dict = dict() + self.patches_dict = dict() + + @property + def packages_dict(self): + """All packages in this RPM spec as a dictionary. + You can access the individual packages by their package name, e.g., + git_spec.packages_dict['git-doc'] + """ + assert self.packages + return dict(zip([package.name for package in self.packages], self.packages)) + + @staticmethod + def from_file(filename): + """Creates a new Spec object from a given file. + :param filename: The path to the spec file. + :return: A new Spec object. + """ + + spec = Spec() + with open(filename, "r") as f: + #with open(filename, "r", encoding="utf-8") as f: + parse_context = {"current_subpackage": None} + for line in f: + spec, parse_context = _parse(spec, parse_context, line) + return spec + + @staticmethod + def from_string(string): + """Creates a new Spec object from a given string. + :param string: The contents of a spec file. + :return: A new Spec object. + """ + + spec = Spec() + parse_context = {"current_subpackage": None} + for line in string.splitlines(): + spec, parse_context = _parse(spec, parse_context, line) + return spec + + +def replace_macros(string, spec=None): + """Replace all macros in given string with corresponding values. + For example: a string '%{name}-%{version}.tar.gz' will be transformed to 'foo-2.0.tar.gz'. + :param string A string containing macros that you want to be replaced + :param spec An optional spec file. If given, definitions in that spec + file will be used to replace macros. + :return A string where all macros in given input are substituted as good as possible. + """ + if spec: + assert isinstance(spec, Spec) + + def _is_conditional(macro): + return macro.startswith("?") or macro.startswith("!") + + def _test_conditional(macro): + if macro[0] == "?": + return True + if macro[0] == "!": + return False + raise Exception("Given string is not a conditional macro") + + def _macro_repl(match): + macro_name = match.group(1) + if _is_conditional(macro_name) and spec: + parts = macro_name[1:].split(":", 1) + assert parts + if _test_conditional(macro_name): + if hasattr(spec, parts[0]): + if len(parts) == 2: + return parts[1] + + return getattr(spec, parts[0], None) + + return "" + + if not hasattr(spec, parts[0]): + if len(parts) == 2: + return parts[1] + + return getattr(spec, parts[0], None) + + return "" + + if spec: + value = getattr(spec, macro_name, None) + if value: + return str(value) + return match.string[match.start() : match.end()] + + # Recursively expand macros + # Note: If macros are not defined in the spec file, this won't try to + # expand them. + while True: + ret = re.sub(_macro_pattern, _macro_repl, string) + if ret != string: + string = ret + continue + return ret diff --git a/src/ac/common/rpm_spec_adapter.py b/src/ac/common/rpm_spec_adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..01dea76cd14c329d035f27d88f9f833f0e000187 --- /dev/null +++ b/src/ac/common/rpm_spec_adapter.py @@ -0,0 +1,111 @@ +# -*- encoding=utf-8 -*- +import re +import logging + +from src.ac.common.pyrpm import Spec, replace_macros + +logger = logging.getLogger("ac") + + +class RPMSpecAdapter(object): + def __init__(self, fp): + if isinstance(fp, str): + with open(fp, "r") as fp: + self._adapter = Spec.from_string(fp.read()) + else: + self._adapter = Spec.from_string(fp.read()) + fp.close() + + def __getattr__(self, item): + """ + + :param item: + :return + """ + value = getattr(self._adapter, item) + if isinstance(value, list): + return [replace_macros(item, self._adapter) for item in value] + + return replace_macros(value, self._adapter) if value else "" + + def include_x86_arch(self): + try: + value = self.buildarch + logger.debug("build arch: {}".format(value)) + if "x86_64" in value.lower(): + return True + + return False + except AttributeError: + return True + + def include_aarch64_arch(self): + try: + value = self.buildarch + logger.debug("build arch: {}".format(value)) + if "aarch64" in value.lower(): + return True + + return False + except AttributeError: + return True + + @staticmethod + def compare_version(version_n, version_o): + """ + :param version_n: + :param version_o: + :return: 0~eq, 1~gt, -1~lt + """ + # replace continued chars to dot + version_n = re.sub("[a-zA-Z_-]+", ".", version_n).strip().strip(".") + version_o = re.sub("[a-zA-Z_-]+", ".", version_o).strip().strip(".") + # replace continued dots to a dot + version_n = re.sub("\.+", ".", version_n) + version_o = re.sub("\.+", ".", version_o) + # same partitions with ".0" padding + # "..." * -n = "" + version_n = "{}{}".format(version_n, '.0' * (len(version_o.split('.')) - len(version_n.split('.')))) + version_o = "{}{}".format(version_o, '.0' * (len(version_n.split('.')) - len(version_o.split('.')))) + + logger.debug("compare versions: {} vs {}".format(version_n, version_o)) + z = zip(version_n.split("."), version_o.split(".")) + + for p in z: + try: + if int(p[0]) < int(p[1]): + return -1 + elif int(p[0]) > int(p[1]): + return 1 + except ValueError as exc: + logger.debug("check version exception, {}".format(exc)) + continue + + return 0 + + def compare(self, other): + """ + 比较spec的版本号和发布号 + :param other: + :return: 0~eq, 1~gt, -1~lt + """ + if self.__class__.compare_version(self.version, other.version) == 1: + return 1 + if self.__class__.compare_version(self.version, other.version) == -1: + return -1 + + if self.__class__.compare_version(self.release, other.release) == 1: + return 1 + if self.__class__.compare_version(self.release, other.release) == -1: + return -1 + + return 0 + + def __lt__(self, other): + return -1 == self.compare(other) + + def __eq__(self, other): + return 0 == self.compare(other) + + def __gt__(self, other): + return 1 == self.compare(other) diff --git a/src/ac/framework/__init__.py b/src/ac/framework/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/ac/framework/ac.py b/src/ac/framework/ac.py new file mode 100644 index 0000000000000000000000000000000000000000..35642f573523cc49f7e269ef81c1887bb668f41c --- /dev/null +++ b/src/ac/framework/ac.py @@ -0,0 +1,152 @@ +# -*- encoding=utf-8 -*- +import os +import sys +import yaml +import logging.config +import logging +import json +import argparse +from importlib import import_module + + +class AC(object): + def __init__(self, conf): + self._ac_check_elements = {} # 门禁项 + self._ac_check_result = [] # 门禁结果结果 + + acl_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "../acl")) + self._acl_package = "src.ac.acl" # take attention about import module + self.load_check_elements_from_acl_directory(acl_path) + self.load_check_elements_from_conf(conf) + + logger.debug("check list: {}".format(self._ac_check_elements)) + + def check_all(self, workspace, repo): + """ + 门禁检查 + :param workspace: + :param repo: + :return: + """ + for element in self._ac_check_elements: + check_element = self._ac_check_elements[element] + logger.debug("check {}".format(element)) + + # import module + module_path = check_element.get("module", "{}.check_{}".format(element, element)) # eg: spec.check_spec + try: + module = import_module("." + module_path, self._acl_package) + logger.debug("load module {} succeed".format(module_path)) + except ImportError as exc: + logger.exception("import module {} exception, {}".format(module_path, exc)) + continue + + # import entry + entry_name = check_element.get("entry", "Check{}".format(element.capitalize())) + try: + entry = getattr(module, entry_name) + logger.debug("load entry \"{}\" succeed".format(entry_name)) + except AttributeError as exc: + logger.warning("entry \"{}\" not exist in module {}, {}".format(entry_name, module_path, exc)) + continue + + # new a instance + if isinstance(entry, type): # class object + try: + entry = entry(workspace, repo, check_element) # new a instance + except Exception as exc: + logger.exception("new a instance of class {} exception, {}".format(entry_name, exc)) + return + + if not callable(entry): # check callable + logger.warning("entry {} not callable".format(entry_name)) + continue + + # do ac check + try: + result = entry() + logger.debug("check result {}".format(element, result)) + except Exception as exc: + logger.exception("check exception, {}".format(element, exc)) + continue + + # show in gitee, must starts with "check_" + hint = check_element.get("hint", "check_{}".format(element)) + if not hint.startswith("check_"): + hint = "check_{}".format(hint) + self._ac_check_result.append({"name": hint, "result": result.val}) + + logger.debug("ac result: {}".format(self._ac_check_result)) + + def load_check_elements_from_acl_directory(self, acl_dir): + """ + 加载当前目录下所有门禁项 + :return: + """ + for filename in os.listdir(acl_dir): + if os.path.isdir(os.path.join(acl_dir, filename)): + self._ac_check_elements[filename] = {} # don't worry, using default when checking + + def load_check_elements_from_conf(self, conf_file): + """ + 加载门禁项目,只支持yaml格式 + :param conf_file: 配置文件路径 + :return: + """ + try: + with open(conf_file, "r") as f: + elements = yaml.safe_load(f) + except FileNotFoundError as exc: + logger.warning("ac conf file {} not exist".format(conf_file)) + return + except Exception as exc: + logger.warning("load conf file exception, {}".format(exc)) + return + + for name in elements: + if name in self._ac_check_elements: + if elements[name].get("exclude"): + logger.debug("exclude: {}".format(name)) + self._ac_check_elements.pop(name) + else: + self._ac_check_elements[name] = elements[name] + + def save(self, ac_file): + """ + save result + :param ac_file: + :return: + """ + logger.debug("save ac result to file {}".format(ac_file)) + with open(ac_file, "w") as f: + f.write("ACL={}".format(json.dumps(self._ac_check_result))) + + +if "__main__" == __name__: + args = argparse.ArgumentParser() + args.add_argument("-w", type=str, dest="workspace", help="workspace where to find source") + args.add_argument("-r", type=str, dest="repo", help="repo name") + args.add_argument("-n", type=str, dest="owner", default="src-openeuler", help="gitee owner") + args.add_argument("-o", type=str, dest="output", help="output file to save result") + args.add_argument("-p", type=str, dest="pr", help="pull request number") + args.add_argument("-t", type=str, dest="token", help="gitee api token") + args = args.parse_args() + + # init logging + not os.path.exists("log") and os.mkdir("log") + logger_conf_path = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../conf/logger.conf")) + logging.config.fileConfig(logger_conf_path) + logger = logging.getLogger("ac") + + logger.info("------------------AC START--------------") + + # notify gitee + # TODO use replace all tags instead, state machine in future + from src.proxy.gitee_proxy import GiteeProxy + gp = GiteeProxy(args.owner, args.repo, args.token) + gp.delete_tag_of_pr(args.pr, "ci_finished") + gp.create_tags_of_pr(args.pr, "ci_processing") + + ac = AC(os.path.join(os.path.dirname(os.path.realpath(__file__)), "ac.yaml")) + ac.check_all(workspace=args.workspace, repo=args.repo) + ac.save(args.output) diff --git a/src/ac/framework/ac.yaml b/src/ac/framework/ac.yaml new file mode 100644 index 0000000000000000000000000000000000000000..1785f2d14b5a012dbc7d769469c1af70ccc71ca5 --- /dev/null +++ b/src/ac/framework/ac.yaml @@ -0,0 +1,11 @@ +spec: + hint: check_spec_file + module: spec.check_spec + entry: CheckSpec + ignored: ["homepage"] +code: + hint: check_code_style + module: code.check_code_style + entry: CheckCodeStyle + #exclude: True + ignored: [] diff --git a/src/ac/framework/ac_base.py b/src/ac/framework/ac_base.py new file mode 100644 index 0000000000000000000000000000000000000000..5be63378fe509088880efd3741b1a9c09e4e0a87 --- /dev/null +++ b/src/ac/framework/ac_base.py @@ -0,0 +1,76 @@ +# -*- encoding=utf-8 -*- +from abc import ABCMeta, abstractmethod +import inspect +import logging +import os + +from src.ac.framework.ac_result import SUCCESS, WARNING, FAILED + +logger = logging.getLogger("ac") + + +class BaseCheck(object): + + __metaclass__ = ABCMeta + + def __init__(self, workspace, repo, conf=None): + """ + + :param repo: + :param workspace: + :param conf: + """ + self._repo = repo + self._workspace = workspace + self._conf = conf + + self._work_dir = os.path.join(workspace, repo) + + @abstractmethod + def __call__(self, *args, **kwargs): + raise NotImplementedError("subclasses must override __call__!") + + def start_check_with_order(self, *items): + """ + 按照items中顺序运行 + """ + result = SUCCESS + for name in items: + try: + logger.debug("check {}".format(name)) + method = getattr(self, "check_{}".format(name)) + rs = method() + logger.debug("{} -> {}".format(name, rs)) + except Exception as e: + # 忽略代码错误 + logger.exception("internal error: {}".format(e)) + continue + + ignored = True if self._conf and name in self._conf.get("ignored", []) else False + logger.debug("{} ignore: {}".format(name, ignored)) + + if rs is SUCCESS: + logger.info("check {:<30}pass".format(name)) + elif rs is WARNING: + logger.warning("check {:<30}warning{}".format(name, " [ignored]" if ignored else "")) + elif rs is FAILED: + logger.error("check {:<30}fail{}".format(name, " [ignored]" if ignored else "")) + else: + # never here + logger.exception("check {:<30}exception{}".format(name, " [ignored]" if ignored else "")) + continue + + if not ignored: + result += rs + + return result + + def start_check(self): + """ + 运行所有check_开头的函数 + """ + members = inspect.getmembers(self, inspect.ismethod) + items = [member[0].replace("check_", "") for member in members if member[0].startswith("check_")] + logger.debug("check items: {}".format(items)) + + return self.start_check_with_order(*items) diff --git a/src/ac/framework/ac_result.py b/src/ac/framework/ac_result.py new file mode 100644 index 0000000000000000000000000000000000000000..9738b67bba9082dd3a711e23162dd9ab627e6fd9 --- /dev/null +++ b/src/ac/framework/ac_result.py @@ -0,0 +1,53 @@ +# -*- encoding=utf-8 -*- +""" +Use this variables (FAILED, WARNING, SUCCESS) at most time, +and don't new ACResult unless you have specific needs. +""" + + +class ACResult(object): + """ + ac test result + """ + def __init__(self, val): + self._val = val + + def __add__(self, other): + return self if self.val >= other.val else other + + def __str__(self): + return self.hint + + def __repr__(self): + return self.__str__() + + @classmethod + def get_instance(cls, val): + if isinstance(val, int): + return {0: SUCCESS, 1: WARNING, 2: FAILED}.get(val) + if isinstance(val, bool): + return {True: SUCCESS, False: FAILED}.get(val) + + try: + val = int(val) + return {0: SUCCESS, 1: WARNING, 2: FAILED}.get(val) + except ValueError: + return {"success": SUCCESS, "fail": FAILED, "failed": FAILED, "failure": FAILED, + "warn": WARNING, "warning": WARNING}.get(val.lower(), FAILED) + + @property + def val(self): + return self._val + + @property + def hint(self): + return ["SUCCESS", "WARNING", "FAILED"][self.val] + + @property + def emoji(self): + return [":white_check_mark:", ":bug:", ":x:"][self.val] + + +FAILED = ACResult(2) +WARNING = ACResult(1) +SUCCESS = ACResult(0) diff --git a/src/build/__init__.py b/src/build/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/build/build_rpm_package.py b/src/build/build_rpm_package.py new file mode 100755 index 0000000000000000000000000000000000000000..ccafbea9335175a3240bfed60f744950b314974c --- /dev/null +++ b/src/build/build_rpm_package.py @@ -0,0 +1,162 @@ +# -*- encoding=utf-8 -*- +import os +import re + + +class BuildRPMPackage(object): + + LINKMAGIC = "0X080480000XC0000000" # 不要和gitee中用户名相同 + + def __init__(self, repo, rpmbuild_dir): + """ + + :param repo: 包名 + :param rpmbuild_dir: rpmbuild路径 + """ + self._repo = repo + self._rpmbuild_dir = rpmbuild_dir + + self._rpm_packages = {"srpm": {}, "rpm": {}} + self._package_structure(rpmbuild_dir) + + def main_package_local(self): + """ + 返回主包在本地路径 + :param arch: + :return: + """ + package = self._rpm_packages["rpm"].get(self._repo) + if not package: + # not exist + return None + + return os.path.join(self._rpmbuild_dir, "RPMS", package["arch"], package["fullname"]) + + def main_package_in_repo(self, committer, arch, rpm_repo_url): + """ + 返回主包在repo mirror路径 + :param committer: + :param arch: + :param rpm_repo_url: + :return: + """ + return self.get_package_path(committer, arch, self._repo, rpm_repo_url) + + def last_main_package(self, arch, rpm_repo_url): + """ + 返回主包在repo mirror链接路径(上次构建的rpm包) + :param arch: + :param rpm_repo_url: 构建出的rpm包保存的远端地址 + :return: + """ + return os.path.join(rpm_repo_url, self.LINKMAGIC, arch, self._repo) + + def debuginfo_package_local(self): + """ + 返回debuginfo包在本地路径 + :return: + """ + package = self._rpm_packages["rpm"].get("{}-debuginfo".format(self._repo)) + if not package: + # not exist + return None + + return os.path.join(self._rpmbuild_dir, "RPMS", package["arch"], package["fullname"]) + + def debuginfo_package_in_repo(self, committer, arch, rpm_repo_url): + """ + 返回debuginfo包在repo mirror路径 + :param committer: + :param arch: + :return: + """ + return self.get_package_path(committer, arch, "{}-debuginfo".format(self._repo), rpm_repo_url) + + def last_debuginfo_package(self, arch, rpm_repo_url): + """ + 返回debuginfo包在repo mirror链接路径(上次构建的rpm包) + :param arch: + :return: + """ + return os.path.join(rpm_repo_url, self.LINKMAGIC, arch, "{}-debuginfo".format(self._repo)) + + @staticmethod + def checkabi_md_in_repo(committer, repo, arch, md, rpm_repo_url): + """ + 返回checkabi结果在repo mirror路径 + :param committer: + :param arch: + :param md: + :param rpm_repo_url: + :return: + """ + return os.path.join(rpm_repo_url, committer, repo, arch, md) + + def get_package_path(self, committer, arch, name, remote_url): + """ + 返回包在repo mirror路径 + :param committer: + :param arch: + :param name: 包名 + :param remote_url: 仓库远端地址 + :return: + """ + package = self._rpm_packages["rpm"].get(name) + if not package: + # not exist + return None + + if arch == "noarch": + return os.path.join(remote_url, committer, name, arch, package["fullname"]) + else: + return os.path.join(remote_url, committer, name, arch, "noarch", package["fullname"]) + + def get_package_fullname(self, name): + """ + 获取包全名 + :param name: + :return: + """ + package = self._rpm_packages["rpm"].get(name) + return package["fullname"] if package else name + + def get_srpm_path(self): + """ + for future + :return: + """ + raise NotImplementedError + + @staticmethod + def extract_rpm_name(rpm_fullname): + """ + 取出名字部分 + :param rpm_fullname: + :return: + """ + try: + m = re.match("(.*)-[0-9.]+-.*rpm", rpm_fullname) + return m.group(1) + except: + return rpm_fullname + + def _package_structure(self, rpmbuild_dir): + """ + rpm package 结构 + :param rpmbuild_dir: rpmbuild路径 + :return: + """ + rpms_dir = os.path.join(rpmbuild_dir, "RPMS") + for dirname, _, filenames in os.walk(rpms_dir): + arch = dirname.split("/")[-1] + if arch == "i386": + aarch = "x86-64" + for filename in filenames: + name = self.extract_rpm_name(filename) + self._rpm_packages["rpm"][name] = {"name": name, "fullname": filename, "arch": arch} + + srpms = os.path.join(rpmbuild_dir, "SRPMS") + for dirname, _, filenames in os.walk(srpms): + for filename in filenames: + name = self.extract_rpm_name(filename) + self._rpm_packages["srpm"][name] = {"name": name, "fullname": filename} diff --git a/src/build/extra_work.py b/src/build/extra_work.py new file mode 100755 index 0000000000000000000000000000000000000000..f9c4f1d92c30566fbbd96d378d1f1ada690980d0 --- /dev/null +++ b/src/build/extra_work.py @@ -0,0 +1,178 @@ +# -*- encoding=utf-8 -*- +import os +import argparse +import logging.config +import logging + +import yaml + + +class ExtraWork(object): + def __init__(self, package, rpmbuild_dir="/home/jenkins/agent/buildroot/home/abuild/rpmbuild"): + """ + + :param package: obs package + :param rpmbuild_dir: rpmbuild 路径 + """ + self._repo = package + self._rpm_package = BuildRPMPackage(package, rpmbuild_dir) + + def is_pkgship_need_notify(self, pkgship_meta_path): + """ + 是否需要发起notify + :param pkgship_meta_path: 保存门禁中解析的pkgship spec版本元信息文件路径 + :return: + """ + if self._repo == "pkgship": # 只有pkgship包需要通知 + try: + with open(pkgship_meta_path, "r") as f: + pkgship_meta = yaml.safe_load(f) + logger.debug("pkgship meta: {}".format(pkgship_meta)) + if pkgship_meta.get("compare_version") == 1: # version upgrade + logger.debug("pkgship: notify") + return True + except IOError: + # file not exist, bug + logger.warning("pkgship meta file not exist!") + return True + + return False + + def pkgship_notify(self, notify_url, notify_token, package_url, package_arch, notify_jenkins_user, notify_jenkins_password): + """ + notify + :param notify_url: notify url + :param notify_token: notify token + :param package_url: package addr + :param package_arch: cpu arch + :param notify_jenkins_user: + :param notify_jenkins_password: + :return: + """ + package = self._rpm_package.last_main_package(package_arch, package_url) + querystring = {"token": notify_token, "PACKAGE_URL": package, "arch": package_arch} + ret = do_requests("get", notify_url, querystring=querystring, + auth={"user": notify_jenkins_user, "password": notify_jenkins_password}, timeout=1) + if ret in [0, 2]: + # send async, don't care about response, timeout will be ok + logger.info("notify ...ok") + else: + logger.error("notify ...fail") + + def check_rpm_abi(self, package_url, package_arch, output, committer, comment_file, related_rpm=None): + """ + 对比两个版本rpm包之间的接口差异,根据差异找到受影响的rpm包 + + :param package_arch: + :param related_rpm: + :return: + """ + cwd = os.getcwd() + check_abi_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../utils/check_abi.py")) + + curr_rpm = self._rpm_package.main_package_local() + last_rpm = self._rpm_package.last_main_package(package_arch, package_url) + logger.debug("curr_rpm: {}".format(curr_rpm)) + logger.debug("last_rpm: {}".format(last_rpm)) + + if not curr_rpm or not last_rpm: + logger.info("no rpms") + return + + check_abi_cmd = "{} -o {}".format(check_abi_path, os.path.join(cwd, output)) + + if related_rpm: + # obs + check_abi_cmd = "{} -i {}".format(check_abi_cmd, related_rpm) + + check_abi_cmd = "{} compare_rpm -r {} {}".format(check_abi_cmd, last_rpm, curr_rpm) + + curr_rpm_debug = self._rpm_package.debuginfo_package_local() + last_rpm_debug = self._rpm_package.last_debuginfo_package(package_arch, package_url) + logger.debug("curr_rpm_debug: {}".format(curr_rpm_debug)) + logger.debug("last_rpm_debug: {}".format(last_rpm_debug)) + + if curr_rpm_debug and last_rpm_debug: + # debuginfo + check_abi_cmd = "{} -d {} {}".format(check_abi_cmd, last_rpm_debug, curr_rpm_debug) + + logger.info("check cmd: {}".format(check_abi_cmd)) + ret, _, err = shell_cmd_live(check_abi_cmd, verbose=True) + + if ret == 1: + logger.error("check abi error: {}".format(err)) + else: + logger.debug("check abi ok: {}".format(ret)) + + if os.path.exists(output): + # change of abi + comment = {"name": "check_abi/{}/{}".format(package_arch, self._repo), "result": "WARNING", + "link": self._rpm_package.checkabi_md_in_repo(committer, self._repo, package_arch, output, package_url)} + else: + comment = {"name": "check_abi/{}/{}".format(package_arch, self._repo), "result": "SUCCESS"} + + logger.debug("check abi comment: {}".format(comment)) + try: + with open(comment_file, "r") as f: # one repo with multi build package + comments = yaml.safe_load(f) + except IOError as e: + logger.debug("no history check abi comment") + + comments = [] + if os.path.exists(comment_file): + try: + with open(comment_file, "r") as f: # one repo with multi build package + comments = yaml.safe_load(f) + except: + logger.exception("yaml load check abi comment file exception") + + comments.append(comment) + logger.debug("check abi comments: {}".format(comments)) + try: + with open(comment_file, "w") as f: + yaml.safe_dump(comments, f) # list + except: + logger.exception("save check abi comment exception") + + +if "__main__" == __name__: + args = argparse.ArgumentParser() + + args.add_argument("-f", type=str, dest="func", choices=("notify", "checkabi"), help="function") + + args.add_argument("-p", type=str, dest="package", help="obs package") + args.add_argument("-a", type=str, dest="arch", help="build arch") + args.add_argument("-c", type=str, dest="committer", help="committer") + + args.add_argument("-d", type=str, dest="rpmbuild_dir", default="/home/jenkins/agent/buildroot/home/abuild/rpmbuild", help="rpmbuild dir") + + args.add_argument("-n", type=str, dest="notify_url", help="target branch that merged to ") + args.add_argument("-t", type=str, dest="token", default=os.getcwd(), help="obs workspace dir path") + args.add_argument("-u", type=str, dest="notify_user", default="trigger", help="notify trigger user") + args.add_argument("-w", type=str, dest="notify_password", help="notify trigger password") + args.add_argument("-l", type=str, dest="rpm_repo_url", help="rpm repo where rpm saved") + args.add_argument("-m", type=str, dest="pkgship_meta", help="meta from pkgship spec") + + args.add_argument("-o", type=str, dest="output", help="checkabi result") + args.add_argument("-e", type=str, dest="comment_file", help="checkabi result comment") + args.add_argument("-b", type=str, dest="obs_repo_url", help="obs repo where rpm saved") + + args = args.parse_args() + + not os.path.exists("log") and os.mkdir("log") + logger_conf_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/logger.conf")) + logging.config.fileConfig(logger_conf_path) + logger = logging.getLogger("build") + + from src.utils.shell_cmd import shell_cmd_live + from src.proxy.requests_proxy import do_requests + from src.build.build_rpm_package import BuildRPMPackage + + ew = ExtraWork(args.package, args.rpmbuild_dir) + if args.func == "notify": + # run after copy rpm to rpm repo + if ew.is_pkgship_need_notify(args.pkgship_meta): + ew.pkgship_notify(args.notify_url, args.token, args.rpm_repo_url, args.arch, args.notify_user, args.notify_password) + elif args.func == "checkabi": + # run before copy rpm to rpm repo + ew.check_rpm_abi(args.rpm_repo_url, args.arch, args.output, args.committer, args.comment_file, args.obs_repo_url) diff --git a/src/build/extract_file b/src/build/extract_file new file mode 100755 index 0000000000000000000000000000000000000000..55d30a972cf15475f18cecc3c09d2cab6bae841a --- /dev/null +++ b/src/build/extract_file @@ -0,0 +1,84 @@ +#!/bin/bash + +# A simple script to checkout or update a svn or git repo as source service + +# defaults +MYARCHIVE="" +MYFILES="" +OUTFILE="." +FILES="" + +while test $# -gt 0; do + case $1 in + *-archive) + MYARCHIVE="${2##*/}" + shift + ;; + *-file|*-files) + MYFILES="$MYFILES ${2}" + FILES=${2} + shift + ;; + *-outfilename) + OUTFILE="${2}" + shift + ;; + *-outdir) + MYOUTDIR="$2" + shift + ;; + *) + echo Unknown parameter $1. + echo 'Usage: extract_file --archive $ARCHIVE --file $FILE --outdir $OUT' + exit 1 + ;; + esac + shift +done + +if [ -z "$MYARCHIVE" ]; then + echo "ERROR: no archive specified!" + exit 1 +fi +if [ -z "$MYFILES" ]; then + echo "ERROR: no checkout URL is given via --file parameter!" + exit 1 +fi +if [ -z "$MYOUTDIR" ]; then + echo "ERROR: no output directory is given via --outdir parameter!" + exit 1 +fi +set -x + +if [ "${FILES}" == '*' ];then + MYFILES=" " +fi + +existing_archive="$MYOUTDIR/$(echo $MYARCHIVE)" +cd "$MYOUTDIR" + +existing_archive=`ls $existing_archive` +if [ -e "$existing_archive" ]; then + if [ "${existing_archive%.tar.gz}" != "$existing_archive" ]; then + tar xfz "$existing_archive" --wildcards $MYFILES || exit 1 + elif [ "${existing_archive%.tar.bz2}" != "$existing_archive" ]; then + tar xfj "$existing_archive" --wildcards $MYFILES || exit 1 + elif [ "${existing_archive%.tar.xz}" != "$existing_archive" ]; then + tar xfJ "$existing_archive" --wildcards $MYFILES || exit 1 + elif [ "${existing_archive%.tar}" != "$existing_archive" ]; then + tar xf "$existing_archive" --wildcards $MYFILES || exit 1 + elif [ "${existing_archive%.zip}" != "$existing_archive" ]; then + unzip "$existing_archive" $MYFILES || exit 1 + else + echo "ERROR: unknown archive format $existing_archive" + exit 1 + fi + for i in $MYFILES; do + mv "$i" "$OUTFILE" + done +else + echo "ERROR: archive not found: $existing_archive" + exit 1 +fi + +exit 0 diff --git a/src/build/gitee_comment.py b/src/build/gitee_comment.py new file mode 100755 index 0000000000000000000000000000000000000000..c9433c74dd6aed1b0588e92d9e5b1dde2ae84cfc --- /dev/null +++ b/src/build/gitee_comment.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +import os +import sys +import logging.config +import logging +import json +import yaml +import argparse + + +class Comment(object): + def __init__(self, pr, *check_abi_comment_files): + """ + + :param pr: pull request number + """ + self._pr = pr + self._check_abi_comment_files = check_abi_comment_files + + def comment_build(self, jenkins_proxy, gitee_proxy): + """ + 构建结果 + :param jenkins_proxy: + :param gitee_proxy: + :return: + """ + comments = self._comment_build_html_format(jenkins_proxy) + gitee_proxy.comment_pr(self._pr, "\n".join(comments)) + + def comment_at(self, committer, gitee_proxy): + """ + 通知committer + @committer + :param committer: + :param gitee_proxy: + :return: + """ + gitee_proxy.comment_pr(self._pr, "@{}".format(committer)) + + def _comment_build_html_format(self, jenkins_proxy): + """ + 组装构建信息,并评论pr + :param jenkins_proxy: JenkinsProxy object + :return: + """ + comments = ["", self._comment_html_table_th()] + + base_job_name = os.environ.get("JOB_NAME") + base_build_id = os.environ.get("BUILD_ID") + base_build_id = int(base_build_id) + logger.debug("base_job_name: {}, base_build_id: {}".format(base_job_name, base_build_id)) + + base_build = jenkins_proxy.get_build(base_job_name, base_build_id) + logger.debug("get base build") + + up_builds = jenkins_proxy.get_upstream_builds(base_build) + if up_builds: + logger.debug("get up_builds") + up_up_builds = jenkins_proxy.get_upstream_builds(up_builds[0]) + if up_up_builds: + logger.debug("get up_up_builds") + comments.extend(self._comment_of_ac(up_up_builds[0])) + comments.extend(self._comment_of_build(up_builds)) + comments.extend(self._comment_of_check_abi(up_builds)) + + comments.append("
") + + return comments + + def _comment_of_ac(self, build): + """ + 组装门禁检查结果 + :param build: Jenkins Build object,门禁检查jenkins构建对象 + :return: + """ + if "ACL" not in os.environ: + logger.debug("no ac check") + return [] + + try: + acl = json.loads(os.environ["ACL"]) + logger.debug("ac result: {}".format(acl)) + except ValueError: + logger.exception("invalid ac result format") + return [] + + comments = [] + try: + for index, item in enumerate(acl): + ac_result = ACResult.get_instance(item["result"]) + if index == 0: + build_url = build.get_build_url() + comments.append(self.__class__._comment_html_table_tr( + item["name"], ac_result.emoji, ac_result.hint, "{}{}".format(build_url, "console"), build.buildno, rowspan=len(acl))) + else: + comments.append(self.__class__._comment_html_table_tr_rowspan( + item["name"], ac_result.emoji, ac_result.hint)) + except: + # jenkins api maybe exception, who knows + logger.exception("comment of ac result exception") + + logger.info("ac comment: {}".format(comments)) + + return comments + + def _comment_of_build(self, builds): + """ + 组装编译任务的评论 + :return: + """ + comments = [] + try: + for build in builds: + name = build.job._data["fullName"] + status = build.get_status() + ac_result = ACResult.get_instance(status) + build_url = build.get_build_url() + + comments.append(self.__class__._comment_html_table_tr( + name, ac_result.emoji, ac_result.hint, "{}{}".format(build_url, "console"), build.buildno)) + except: + # jenkins api maybe exception, who knows + logger.exception("comment of build exception") + + logger.info("build comment: {}".format(comments)) + + return comments + + def _comment_of_check_abi(self, builds): + """ + check abi comment + :param builds: + :return: + """ + comments = [] + + def match(name, comment_file): + if "aarch64" in name and "aarch64" in comment_file: + return True + if "x86-64" in name and "x86-64" in comment_file: + return True + return False + + try: + for check_abi_comment_file in self._check_abi_comment_files: + logger.debug("check abi comment file: {}".format(check_abi_comment_file)) + if os.path.exists(check_abi_comment_file): # check abi评论文件存在 + for build in builds: + name = build.job._data["fullName"] + logger.debug("check build {}".format(name)) + if match(name, check_abi_comment_file): # 找到匹配的jenkins build + logger.debug("build \"{}\" match".format(name)) + status = build.get_status() + logger.debug("build state: {}".format(status)) + if ACResult.get_instance(status) == SUCCESS: # 保证build状态成功 + with open(check_abi_comment_file, "r") as f: + content = yaml.safe_load(f) + logger.debug("comment: {}".format(content)) + for item in content: + ac_result = ACResult.get_instance(item.get("result")) + comments.append(self.__class__._comment_html_table_tr( + item.get("name"), ac_result.emoji, ac_result.hint, item.get("link", ""), + "markdown" if "link" in item else "", hashtag=False)) + break + except: + # jenkins api or yaml maybe exception, who knows + logger.exception("comment of build exception") + + logger.info("check abi comment: {}".format(comments)) + + return comments + + @classmethod + def _comment_html_table_th(cls): + return "Check Name Build Result Build Details" + + @classmethod + def _comment_html_table_tr(cls, name, icon, status, href, build_no, hashtag=True, rowspan=1): + return "{} {}{} {}{}".format( + name, icon, status, rowspan, href, "#" if hashtag else "", build_no) + + @classmethod + def _comment_html_table_tr_rowspan(cls, name, icon, status): + return "{} {}{}".format(name, icon, status) + + +if "__main__" == __name__: + args = argparse.ArgumentParser() + args.add_argument("-p", type=int, dest="pr", help="pull request number") + args.add_argument("-c", type=str, dest="committer", help="commiter") + args.add_argument("-o", type=str, dest="owner", help="gitee owner") + args.add_argument("-r", type=str, dest="repo", help="repo name") + args.add_argument("-t", type=str, dest="gitee_token", help="gitee api token") + + args.add_argument("-b", type=str, dest="jenkins_base_url", help="jenkins base url") + args.add_argument("-u", type=str, dest="jenkins_user", help="repo name") + args.add_argument("-j", type=str, dest="jenkins_api_token", help="jenkins api token") + + args.add_argument("-a", type=str, dest="check_abi_comment_files", nargs="*", help="check abi comment files") + + args.add_argument("--disable", dest="enable", default=True, action="store_false", help="comment to gitee switch") + + args = args.parse_args() + + if not args.enable: + sys.exit(0) + + not os.path.exists("log") and os.mkdir("log") + logger_conf_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/logger.conf")) + logging.config.fileConfig(logger_conf_path) + logger = logging.getLogger("build") + + from src.ac.framework.ac_result import ACResult, SUCCESS + from src.proxy.gitee_proxy import GiteeProxy + from src.proxy.jenkins_proxy import JenkinsProxy + + # gitee notify + gp = GiteeProxy(args.owner, args.repo, args.gitee_token) + gp.delete_tag_of_pr(args.pr, "ci_processing") + gp.create_tags_of_pr(args.pr, "ci_finished") + + jp = JenkinsProxy(args.jenkins_base_url, args.jenkins_user, args.jenkins_api_token) + + if args.check_abi_comment_files: + comment = Comment(args.pr, *args.check_abi_comment_files) + else: + comment = Comment(args.pr) + logger.info("comment: build result......") + comment.comment_build(jp, gp) + logger.info("comment: at committer......") + comment.comment_at(args.committer, gp) diff --git a/src/build/osc_build_k8s.py b/src/build/osc_build_k8s.py new file mode 100755 index 0000000000000000000000000000000000000000..3e389de4bece56fafb1cd74a32858cc7447f0d0d --- /dev/null +++ b/src/build/osc_build_k8s.py @@ -0,0 +1,195 @@ +# -*- encoding=utf-8 -*- +import os +import sys +import logging.config +import logging +import argparse +from xml.etree import ElementTree + + +class SinglePackageBuild(object): + + GITEEBRANCHPROJECTMAPPING = { + "master": ["bringInRely", "openEuler:Extras", "openEuler:Factory", "openEuler:Mainline"], + "openEuler-20.03-LTS": ["openEuler:20.03:LTS"], + "openEuler-EPOL-LTS": ["bringInRely"], + "openEuler-20.09": ["openEuler:20.09"], + "mkopeneuler-20.03": ["openEuler:Extras"] + } + + def __init__(self, package, arch, target_branch): + self._package = package + self._arch = arch + self._branch = target_branch + + def get_need_build_obs_repos(self, project): + """ + 需要构建obs repo列表 + :return: list + """ + return OBSProxy.list_repos_of_arch(project, self._package, self._arch) + + def build_obs_repos(self, project, repos, work_dir, code_dir): + """ + build + :param project: 项目名 + :param repos: obs repo + :param code_dir: 码云代码在本地路径 + :param work_dir: + :return: + """ + # osc co + if not OBSProxy.checkout_package(project, self._package): + logger.error("checkout ... failed") + return 1 + + logger.info("checkout ... ok") + + # update package meta file "_service" + self._handle_package_meta(project, work_dir, code_dir) + logger.debug("prepare \"_service\" ... ok") + + # process_service.pl + if not self._prepare_build_environ(project, work_dir): + logger.error("prepare environ ... failed") + return 2 + + logger.info("prepare environ ... ok") + + # osc build + for repo in repos: + if not OBSProxy.build_package(project, self._package, repo["repo"], self._arch): + logger.error("build {} ... failed".format(repo["repo"])) + return 3 + + logger.info("build {} ... ok".format(repo["repo"])) + + logger.debug("build all repos ... finished") + + return 0 + + def _handle_package_meta(self, project, obs_work_dir, code_path): + """ + _service文件重组 + + + + repo + next/openEuler/perl-Archive-Zip + + + + :param project: obs项目 + :param obs_work_dir: obs工作目录 + :param code_path: 代码目录 + :return: + """ + _service_file_path = os.path.join(obs_work_dir, project, self._package, "_service") + tree = ElementTree.parse(_service_file_path) + + logger.info("before update meta------") + ElementTree.dump(tree) + sys.stdout.flush() + + services = tree.findall("service") + + for service in services: + if service.get("name") == "tar_scm_repo_docker": + service.set("name", "tar_local") + elif service.get("name") == "tar_scm_repo": + service.set("name", "tar_local") + elif service.get("name") == "tar_scm_kernel_repo": + service.set("name", "tar_local_kernel") + elif service.get("name") == "tar_scm_kernels_repo": + service.set("name", "tar_local_kernels") + + for param in service.findall("param"): + if param.get("name") == "scm": + param.text = "local" + elif param.get("name") == "tar_scm": + param.text = "tar_local" + elif param.get("name") == "url": + if "openEuler_kernel" in param.text or "LTS_kernel" in param.text or "openEuler-20.09_kernel" in param.text: + param.text = "{}/{}".format(code_path, "code") # kernel special logical + else: + gitee_repo = param.text.split("/")[-1] + param.text = "{}/{}".format(code_path, gitee_repo) + + logger.info("after update meta------") + + ElementTree.dump(tree) + sys.stdout.flush() + tree.write(_service_file_path) + + def _prepare_build_environ(self, project, obs_work_dir): + """ + 准备obs build环境 + :param project: obs项目 + :param obs_work_dir: obs工作目录 + :return: + """ + _process_perl_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../process_service.pl")) + _service_file_path = os.path.join(obs_work_dir, project, self._package, "_service") + _obs_package_path = os.path.join(obs_work_dir, project, self._package) + + cmd = "perl {} -f {} -p {} -m {} -w {}".format( + _process_perl_path, _service_file_path, project, self._package, _obs_package_path) + + ret, _, _ = shell_cmd_live(cmd, verbose=True) + + if ret: + logger.error("prepare build environ error, {}".format(ret)) + return False + + return True + + def build(self, work_dir, code_dir): + """ + 入口 + :param work_dir: obs工作目录 + :param code_dir: 代码目录 + :return: + """ + if self._branch not in self.GITEEBRANCHPROJECTMAPPING: + logger.error("branch \"{}\" not support yet".format(self._branch)) + sys.exit(1) + + for project in self.GITEEBRANCHPROJECTMAPPING.get(self._branch): + logger.debug("start build project {}".format(project)) + + obs_repos = self.get_need_build_obs_repos(project) + if not obs_repos: + logger.info("all repos ignored of project {}".format(project)) + continue + + logger.debug("build obs repos: {}".format(obs_repos)) + ret = self.build_obs_repos(project, obs_repos, work_dir, code_dir) + if ret > 0: + logger.debug("build run return {}".format(ret)) + logger.error("build {} {} {} ... {}".format(project, self._package, self._arch, "failed")) + sys.exit(1) # finish if any error + else: + logger.info("build {} {} {} ... {}".format(project, self._package, self._arch, "ok")) + + +if "__main__" == __name__: + args = argparse.ArgumentParser() + + args.add_argument("-p", type=str, dest="package", help="obs package") + args.add_argument("-a", type=str, dest="arch", help="build arch") + args.add_argument("-b", type=str, dest="branch", help="target branch that merged to ") + args.add_argument("-c", type=str, dest="code", help="code dir path") + args.add_argument("-w", type=str, dest="workspace", default=os.getcwd(), help="obs workspace dir path") + + args = args.parse_args() + + not os.path.exists("log") and os.mkdir("log") + logger_conf_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/logger.conf")) + logging.config.fileConfig(logger_conf_path) + logger = logging.getLogger("build") + + from src.proxy.obs_proxy import OBSProxy + from src.utils.shell_cmd import shell_cmd_live + + spb = SinglePackageBuild(args.package, args.arch, args.branch) + spb.build(args.workspace, args.code) diff --git a/src/build/process_service.pl b/src/build/process_service.pl new file mode 100755 index 0000000000000000000000000000000000000000..24c299611d785838dfcfe496d121ca3a6e705fe8 --- /dev/null +++ b/src/build/process_service.pl @@ -0,0 +1,95 @@ +#!/usr/bin/perl -w + + +use File::Spec::Functions qw(rel2abs); +use File::Basename qw(dirname); +use Getopt::Std; +use POSIX; +use Data::Dumper; +use XML::Structured; +use strict; + +our $services = [ + 'services' => + [[ 'service' => + 'name', + 'mode', # "localonly" is skipping this service on server side, "trylocal" is trying to merge changes directly in local files, "disabled" is just skipping it + [[ 'param' => + 'name', + '_content' + ]], + ]], +]; + +die " USAGE: $0 -f service_file -p product -c code_dir -m module -w workdir\n" if (@ARGV < 5); + +our ($opt_f,$opt_p,$opt_c,$opt_m,$opt_w) =("","","","",""); + +&getopts("Hf:p:c:m:w:"); + +my $service_file = $opt_f if ($opt_f); +my $product = $opt_p if ($opt_p); +my $code_dir = $opt_c if ($opt_c); +my $module = $opt_m if ($opt_m); +my $myworkdir = $opt_w if ($opt_w); + +#open lg, ">/home/test.log"; + +my $xml_file = readstr($service_file); +my $serviceinfo = XMLin($services, $xml_file); +for my $service (@{$serviceinfo->{'service'}}) { + #print lg "Run for ".getcwd. "/$service->{'name'}"."\n"; + my @run; + + push @run, dirname(rel2abs($0))."/$service->{'name'}"; + for my $param (@{$service->{'param'}}) { + if ($service->{'name'} eq 'recompress') { + push @run, "--$param->{'name'}"; + if ($param->{'name'} eq 'file') { + push @run, $myworkdir.'/'.$param->{'_content'}; +# print lg '--'. $param->{'name'} . " ".$myworkdir.'/'.$param->{'_content'}."\n"; + } + else { + push @run, $param->{'_content'}; +# print lg '--'. $param->{'name'}. " " .$param->{'_content'}."\n"; + } +# print lg '--outdir '. $myworkdir."\n"; + } else { + if ($param->{'name'} eq 'submodules'){ + print 'skip submodules para'; + }else{ + next if $param->{'name'} eq 'outdir'; + next unless $param->{'_content'}; + push @run, "--$param->{'name'}"; + push @run, $param->{'_content'}; + } + } + } + + push @run, "--outdir"; + push @run, "$myworkdir"; + + if ($service->{'name'} =~ /tar/) { + push @run, "--project"; + push @run, "$product"; + + push @run, "--package"; + push @run, "$module"; + } + + print @run; + system(@run); +} + +sub readstr { + my ($fn, $nonfatal) = @_; + local *F; + if (!open(F, '<', $fn)) { + die("$fn: $!\n") unless $nonfatal; + return undef; + } + my $d = ''; + 1 while sysread(F, $d, 8192, length($d)); + close F; + return $d; +} diff --git a/src/build/recompress b/src/build/recompress new file mode 100755 index 0000000000000000000000000000000000000000..4daa0b5692a3bb3164d357f19d22ddf7f612f741 --- /dev/null +++ b/src/build/recompress @@ -0,0 +1,140 @@ +#!/bin/bash + +# A simple script to checkout or update a svn or git repo as source service +# +# (C) 2010 by Adrian Schröter +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# See http://www.gnu.org/licenses/gpl-2.0.html for full license text. + +# defaults +MYCOMPRESSION="" +FILES="" +SRCDIR="" + +while test $# -gt 0; do + case $1 in + *-compression) + MYCOMPRESSION="$2" + shift + ;; + *-file) + SRCDIR="$FILES ${2%/*}/" + FILES="$FILES ${2##*/}" + + echo 'SRCDIR ' $SRCDIR + echo 'FILES ' $FILES + shift + ;; + *-outdir) + MYOUTDIR="$2" + shift + ;; + *) + echo Unknown parameter $1. + echo 'Usage: recompress --compression $COMPRESSION --file $FILE --outdir $OUT' + exit 1 + ;; + esac + shift +done + +if [ -z "$MYCOMPRESSION" ]; then + MYCOMPRESSION="bz2" +fi +if [ -z "$FILES" ]; then + echo "ERROR: no inputs files are given via --file parameter!" + exit 1 +fi +if [ -z "$MYOUTDIR" ]; then + echo "ERROR: no output directory is given via --outdir parameter!" + exit 1 +fi + +cd $SRCDIR +echo `pwd` +echo `ls` +echo `ls $FILES` +for i in `ls $FILES`; do +#for i in "ls $SRCIDR"; do + FILE=`ls -1 "$i" || ls -1 "_service:*:$i"` + #FILE=`ls -1 "$i" || ls -1 "$i"` + if [ ! -f "$FILE" ]; then + echo "Unknown file $i" + exit 1 + fi + UNCOMPRESS="cat" + BASENAME="$FILE" + if [ "${FILE%.gz}" != "$FILE" ]; then + UNCOMPRESS="gunzip -c" + BASENAME="${FILE%.gz}" + elif [ "${FILE%.tgz}" != "$FILE" ]; then + UNCOMPRESS="gunzip -c" + BASENAME="${FILE%.tgz}.tar" + elif [ "${FILE%.bz2}" != "$FILE" ]; then + UNCOMPRESS="bunzip2 -c" + BASENAME="${FILE%.bz2}" + elif [ "${FILE%.xz}" != "$FILE" ]; then + UNCOMPRESS="xz -dc" + BASENAME="${FILE%.xz}" + fi + + if [ "$MYCOMPRESSION" == "gz" ]; then + COMPRESS="gzip -c -n --rsyncable -" + NEWFILE="${BASENAME#_service:}.gz" + elif [ "$MYCOMPRESSION" == "bz2" ]; then + COMPRESS="bzip2 -c -" + NEWFILE="${BASENAME#_service:}.bz2" + elif [ "$MYCOMPRESSION" == "xz" ]; then + COMPRESS="xz -c -" + NEWFILE="${BASENAME#_service:}.xz" + elif [ "$MYCOMPRESSION" == "none" ]; then + COMPRESS="cat -" + NEWFILE="${BASENAME#_service:}" + else + echo "ERROR: Unknown compression" + exit 1 + fi + + echo "pwd: ". `pwd`; + # do the real work + echo "UnCompress". $UNCOMPRESS + echo "file ". $FILE + echo "Compress". $COMPRESS + echo "NEWFILE ". $NEWFILE + $UNCOMPRESS "$FILE" | $COMPRESS > "$MYOUTDIR/$NEWFILE" || exit 1 + + # Check if the (compressed) target file already exists in the directory where + # the service is invoked and drop the newly generated one. Avoids overwriting + # otherwise identical files which only have different timestamps. Note that + # zdiff and co all fail to do that properly... + echo "pwd: ". `pwd`; + if [ -f $NEWFILE ] ; then + DIFF_TMPDIR=$(mktemp -d) + SRC_DIR="$PWD" + echo "SRC_DIR ". $SRC_DIR + echo "MYOUTDIR ". $MYOUTDIR + cd $DIFF_TMPDIR + mkdir new old + $(cd new ; tar -xxf "$MYOUTDIR/$NEWFILE" 2> /dev/null || mv "$MYOUTDIR/$NEWFILE" .) + $(cd old ; tar -xxf "$SRC_DIR/$NEWFILE" 2> /dev/null || mv "$SRC_DIR/$NEWFILE" .) + if diff -r new old > /dev/null ; then + echo "Identical target file $NEWFILE already exists, skipping.." + #rm -r "$MYOUTDIR/$NEWFILE" + else + echo "Compressed $FILE to $NEWFILE" + fi + cd $SRC_DIR + rm -r $DIFF_TMPDIR + else + echo "Compressed $FILE to $NEWFILE" + fi + + # we can remove service files, no need to store them twice + rm -f "$FILE" +done + +exit 0 diff --git a/src/build/tar_local b/src/build/tar_local new file mode 100755 index 0000000000000000000000000000000000000000..95be907bffde1b0eb256fc4da60fbb1aba0fd239 --- /dev/null +++ b/src/build/tar_local @@ -0,0 +1,582 @@ +#!/bin/bash + +# A simple script to checkout or update a svn or git repo as source service +# +# (C) 2010 by Adrian Schröter +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# See http://www.gnu.org/licenses/gpl-2.0.html for full license text. + +SERVICE='tar_scm' + +set_default_params () { + MYSCM="" + MYURL="" + MYVERSION="_auto_" + MYFORMAT="" + MYPREFIX="" + MYFILENAME="" + MYREVISION="" + MYPACKAGEMETA="" +# MYHISTORYDEPTH="" + INCLUDES="" +} + +get_config_options () { + # config options for this host ? + if [ -f /etc/obs/services/$SERVICE ]; then + . /etc/obs/services/$SERVICE + fi + # config options for this user ? + if [ -f "$HOME"/.obs/$SERVICE ]; then + . "$HOME"/.obs/$SERVICE + fi +} + +parse_params () { + while test $# -gt 0; do + case $1 in + *-scm) + MYSCM="$2" + shift + ;; + *-url) + MYURL="$2" + CI_PRO_NAME=${MYURL%%/*} + TEMP_URL="$MYURL" + MYURL=$TEMP_URL + shift + ;; + *-subdir) + MYSUBDIR="$2" + shift + ;; + *-revision) + MYREVISION="$2" + shift + ;; + *-version) + MYVERSION="$2" + shift + ;; + *-include) + INCLUDES="$INCLUDES $2" + shift + ;; + *-versionformat) + MYFORMAT="$2" + shift + ;; + *-versionprefix) + MYPREFIX="$2" + shift + ;; + *-exclude) + EXCLUDES="$EXCLUDES --exclude=${2#/}" + shift + ;; + *-filename) + MYFILENAME="${2#/}" + shift + ;; + *-package-meta) + MYPACKAGEMETA="${2#/}" + shift + ;; + *-outdir) + MYOUTDIR="$2" + shift + ;; + *-history-depth) + echo "history-depth parameter is obsolete and will be ignored" + shift + ;; + *-project) + MYPROJECT="$2" + shift + ;; + *-package) + MYPACKAGE="$2" + shift + ;; + *) + echo "Unknown parameter: $1" + echo 'Usage: $SERVICE --scm $SCM --url $URL [--subdir $SUBDIR] [--revision $REVISION] [--version $VERSION] [--include $INCLUDE]* [--exclude $EXCLUDE]* [--versionformat $FORMAT] [--versionprefix $PREFIX] [--filename $FILENAME] [--package-meta $META] --outdir $OUT' + exit 1 + ;; + esac + shift + done +} + +error () { + echo "ERROR: $*" + exit 1 +} + +debug () { + [ -n "$DEBUG_TAR_SCM" ] && echo "$*" +} + +safe_run () { + if ! "$@"; then + error "$* failed; aborting!" + fi +} + +sanitise_params () { + TAR_VERSION="$MYVERSION" + + if [ -z "$MYSCM" ]; then + error "no scm is given via --scm parameter (git/svn/hg/bzr)!" + fi + if [ -z "$MYURL" ]; then + error "no checkout URL is given via --url parameter!" + fi + if [ -z "$MYOUTDIR" ]; then + error "no output directory is given via --outdir parameter!" + fi + if [ -z "$MYPROJECT" ]; then + error "no project is given via --project parameter!" + fi + if [ -z "$MYPACKAGE" ]; then + error "no package is given via --package parameter!" + fi + + FILE="$MYFILENAME" + WD_VERSION="$MYVERSION" + if [ -z "$MYPACKAGEMETA" ]; then + EXCLUDES="$EXCLUDES --exclude=.svn" + fi + # if [ "$MYHISTORYDEPTH" == "full" ]; then + # MYHISTORYDEPTH="999999999" + # fi +} + +detect_default_filename_param () { + if [ -n "$FILE" ]; then + return + fi + + case "$MYSCM" in + git) + FILE="${MYURL%/}" + FILE="${FILE##*/}" + FILE="${FILE%.git}" + FILE="${FILE#*@*:}" + ;; + svn|hg|bzr) + FILE="${MYURL%/}" + FILE="${FILE##*/}" + ;; + local) + FILE="temp_dir" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +fetch_upstream () { + TOHASH="$MYURL" + [ "$MYSCM" = 'svn' ] && TOHASH="$TOHASH/$MYSUBDIR" + HASH=`echo "$TOHASH" | sha256sum | cut -d\ -f 1` + REPOCACHE= + CACHEDIRECTORY=/tmp/local_code/xdf + if [ -n "$CACHEDIRECTORY" ]; then + REPOCACHEINCOMING="$CACHEDIRECTORY/incoming" + REPOCACHEROOT="$CACHEDIRECTORY/repo" + REPOCACHE="$REPOCACHEROOT/$MYPROJECT/$MYPACKAGE" + REPOURLCACHE="$CACHEDIRECTORY/repourl/$HASH" + fi + + + debug "check local cache if configured" + if [ -n "$CACHEDIRECTORY" -a -d "$REPOCACHE/" ]; then + debug "cache hit: $REPOCACHE" + check_cache + else + if [ -n "$CACHEDIRECTORY" ]; then + debug "cache miss: $REPOCACHE/" + else + debug "cache not enabled" + fi + + calc_dir_to_clone_to + debug "new $MYSCM checkout to $CLONE_TO" + initial_clone + + if [ -n "$CACHEDIRECTORY" ]; then + #cache_repo + REPOPATH="$REPOCACHE" + else + REPOPATH="$MYOUTDIR/$FILE" + fi + + if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then + detect_version + fi + #exit 22 + fi + +} + +calc_dir_to_clone_to () { + if [ -n "$CACHEDIRECTORY" ]; then + if [ ! -d REPOCACHE ]; then + mkdir -p "$REPOCACHE" + fi + safe_run cd "$REPOCACHE" + # Use dry-run mode because git/hg refuse to clone into + # an empty directory on SLES11 + #debug mktemp -u -d "tmp.XXXXXXXXXX" + #CLONE_TO=`mktemp -u -d "tmp.XXXXXXXXXX"` + CLONE_TO="$REPOCACHE" + else + CLONE_TO="$FILE" + fi +} + +initial_clone () { + echo "Fetching from $MYURL ..." + + case "$MYSCM" in + git) + # Clone with full depth; so that the revision can be found if specified + safe_run git clone "$MYURL" "$CLONE_TO" + ;; + svn) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + if [[ $(svn --version --quiet) > "1.5.99" ]]; then + TRUST_SERVER_CERT="--trust-server-cert" + fi + safe_run svn checkout --non-interactive $TRUST_SERVER_CERT \ + $args "$MYURL/$MYSUBDIR" "$CLONE_TO" + MYSUBDIR= # repo root is subdir + ;; + local) + echo "xdffff: $MYURL ---- $CLONE_TO --- `pwd`" + safe_run ls -A $MYURL | grep -v .git | xargs -I {} cp -a $MYURL/{} . + if [ -e $MYURL/.git ]; then + safe_run rm -f $MYURL/.git/shallow + safe_run cp -aL $MYURL/.git . + fi + if [ -d "$MYURL/.svn" ]; then + safe_run cp -av $MYURL/.svn ./ + fi + ;; + hg) + safe_run hg clone "$MYURL" "$CLONE_TO" + ;; + bzr) + args= + [ -n "$MYREVISION" ] && args="-r $MYREVISION" + safe_run bzr checkout $args "$MYURL" "$CLONE_TO" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +cache_repo () { + if [ -e "$REPOCACHE" ]; then + error "Somebody else beat us to populating the cache for $MYURL ($REPOCACHE)" + else + # FIXME: small race window here; do source services need to be thread-safe? + if [ ! -d $REPOCACHE ]; then + mkdir -p $REPOCACHE + fi + debug mv2 "$CLONE_TO" "$REPOCACHE" + safe_run mv "$CLONE_TO" "$REPOCACHE" + echo "$MYURL" > "$REPOURLCACHE" + echo "Cached $MYURL at $REPOCACHE" + fi +} + +check_cache () { + if [ -d "$MYURL/.svn" ]; then + new_version=`LC_ALL=C svn info "$MYURL" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + else + new_version="new_version" + fi + if echo "$MYURL" | grep '/$' &> /dev/null; then + new_version="new_version" + fi + if [ -d "$REPOCACHE/.svn" ]; then + old_version=`LC_ALL=C svn info "$REPOCACHE" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + else + old_version="old_version" + fi + #echo "xdf: $new_version $old_version" + #if [ "$new_version" != "$old_version" ]; then + echo "The code has changed for $MYPROJECT/$MYPACKAGE" + rm -rf "$REPOCACHE" + + calc_dir_to_clone_to + debug "new $MYSCM checkout to $CLONE_TO" + initial_clone + + if [ -n "$CACHEDIRECTORY" ]; then + #cache_repo + REPOPATH="$REPOCACHE" + else + REPOPATH="$MYOUTDIR/$FILE" + fi + + safe_run cd "$REPOPATH" + switch_to_revision + if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then + detect_version + fi +} + +update_cache () { + safe_run cd "$REPOCACHE" + + case "$MYSCM" in + git) + safe_run git fetch + ;; + svn) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + safe_run svn update $args > svnupdate_info + isupdate=`cat svnupdate_info | wc -l` + if [ $isupdate -eq 1 ]; then + rm -f svnupdate_info + echo "There is no code update, so exit 22" + exit 22 + fi + MYSUBDIR= # repo root is subdir + ;; + hg) + if ! out=`hg pull`; then + if [[ "$out" == *'no changes found'* ]]; then + # Contrary to the docs, hg pull returns exit code 1 when + # there are no changes to pull, but we don't want to treat + # this as an error. + : + else + error "hg pull failed; aborting!" + fi + fi + ;; + bzr) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + safe_run bzr update $args + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +switch_to_revision () { + case "$MYSCM" in + git) + safe_run git checkout "$MYREVISION" + if git branch | grep -q '^\* (no branch)$'; then + echo "$MYREVISION does not refer to a branch, not attempting git pull" + else + safe_run git pull + fi + ;; + svn|bzr|local) + : # should have already happened via checkout or update + ;; + hg) + safe_run hg update "$MYREVISION" + ;; + # bzr) + # safe_run bzr update + # if [ -n "$MYREVISION" ]; then + # safe_run bzr revert -r "$MYREVISION" + # fi + # ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +detect_version () { + if [ -z "$MYFORMAT" ]; then + case "$MYSCM" in + git) + MYFORMAT="%at" + ;; + hg) + MYFORMAT="{rev}" + ;; + svn|bzr) + MYFORMAT="%r" + ;; + *) + error "unknown SCM '$MYSCM'" + ;; + esac + fi + + safe_run cd "$REPOPATH" + if [ -n "$MYFORMAT" ];then + MYPREFIX="$MYFORMAT" + else + get_version + fi + TAR_VERSION="$MYPREFIX$version" +} + +get_version () { + case "$MYSCM" in + git) + #version=`safe_run git show --pretty=format:"$MYFORMAT" | head -n 1` + version=`safe_run git log -n1 --pretty=format:"$MYFORMAT"` + ;; + svn) + #rev=`LC_ALL=C safe_run svn info | awk '/^Revision:/ { print $2 }'` + rev=`LC_ALL=C safe_run svn info | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + version="${MYFORMAT//%r/$rev}" + ;; + hg) + rev=`safe_run hg id -n` + version=`safe_run hg log -l1 -r$rev --template "$MYFORMAT"` + ;; + bzr) + #safe_run bzr log -l1 ... + rev=`safe_run bzr revno` + version="${MYFORMAT//%r/$rev}" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +prep_tree_for_tar () { + if [ ! -e "$REPOPATH/$MYSUBDIR" ]; then + error "directory does not exist: $REPOPATH/$MYSUBDIR" + fi + + if [ -z "$TAR_VERSION" ]; then + TAR_BASENAME="$FILE" + else + TAR_BASENAME="${FILE}-${TAR_VERSION}" + fi + + MYINCLUDES="" + + for INC in $INCLUDES; do + MYINCLUDES="$MYINCLUDES $INC" + done + #if [ -z "$MYINCLUDES" ]; then + # MYINCLUDES="*" + #fi + + safe_run cd "$MYOUTDIR" + + if [ -n "$CACHEDIRECTORY" ]; then + debug cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + safe_run cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + if [ -e $REPOPATH/$MYSUBDIR/.git ]; then + # amazing copy failed, ignore fail temporary + cp -a "$REPOPATH/$MYSUBDIR/.git" "$TAR_BASENAME" + safe_run pushd "$TAR_BASENAME";git reset --hard HEAD;popd + fi + else + debug mv3 "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + safe_run mv "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + fi + if [ -z "$MYINCLUDES" ]; then + MYINCLUDES=`ls -A $TAR_BASENAME` + fi +} + +create_tar () { + safe_run cd "$TAR_BASENAME" + + compression_array=(`cat $MYOUTDIR/$TARFILE/_service | egrep '"compression"' | awk -F'>' '{print $2}' | awk -F'<' '{print $1}'`) + file_array=`cat $MYOUTDIR/$TARFILE/_service | egrep '"file"' | awk -F'>' '{print $2}' | awk -F'<' '{print $1}' | tr -d '.tar'` + index=0 + for file in $file_array + do + if echo "$TAR_BASENAME" | egrep "$file"; then + break + else + ((index=index+1)) + fi + done + compression_type=${compression_array[index]} + if [ -e .git ]; then + MYINCLUDES="$MYINCLUDES .git" + fi + + TARFILE="${TAR_BASENAME}.tar" + TARPATH="$MYOUTDIR/$TARFILE" + debug tar Pcf "$TARPATH" $EXCLUDES $MYINCLUDES + safe_run tar Pcf "$TARPATH" $EXCLUDES $MYINCLUDES + + + echo "Created $TARFILE" + safe_run cd "$MYOUTDIR" +} + +cleanup () { + debug rm -rf "$TAR_BASENAME" "$FILE" + rm -rf "$TAR_BASENAME" "$FILE" +} + +main () { + set_default_params + #xdf + DEBUG_TAR_SCM=1 + + if [ -z "$DEBUG_TAR_SCM" ]; then + get_config_options + else + # We're in test-mode, so don't let any local site-wide + # or per-user config impact the test suite. + : + fi + parse_params "$@" + sanitise_params + + SRCDIR=$(pwd) + cd "$MYOUTDIR" + #echo "$SRCDIR $MYOUTDIR" + detect_default_filename_param + + #xdf + #LOGFILE=/srv/local_code/xdf/log/$MYPROJECT/$MYPACKAGE + #mkdir -p "/srv/local_code/xdf/log/$MYPROJECT" + + lockfile=$LOGFILE".lock" + if [ -f $lockfile ]; then + mypid=`cat $lockfile` + while ps -p $mypid -o comm= &> /dev/null + do + sleep 10 + mypid=`cat $lockfile` + done + rm -f $lockfile + fi + touch $lockfile + echo "$$" > $lockfile + + #exec 6>&1 + #exec > $LOGFILE + echo "$@" + echo "myurl === $MYURL" + fetch_upstream + + prep_tree_for_tar + create_tar + + cleanup + rm -f $lockfile +} + +main "$@" + +exit 0 diff --git a/src/build/tar_local_kernel b/src/build/tar_local_kernel new file mode 100755 index 0000000000000000000000000000000000000000..679184bbd8cc23902532bbdebd1ebcd09d9ac9c6 --- /dev/null +++ b/src/build/tar_local_kernel @@ -0,0 +1,575 @@ +#!/bin/bash + +# A simple script to checkout or update a svn or git repo as source service +# +# (C) 2010 by Adrian Schröter +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# See http://www.gnu.org/licenses/gpl-2.0.html for full license text. + +SERVICE='tar_scm' + +set_default_params () { + MYSCM="" + MYURL="" + #MYVERSION="_auto_" + MYVERSION="222" + MYFORMAT="" + MYPREFIX="" + MYFILENAME="" + MYREVISION="" + MYPACKAGEMETA="" +# MYHISTORYDEPTH="" + INCLUDES="" +} + +get_config_options () { + # config options for this host ? + if [ -f /etc/obs/services/$SERVICE ]; then + . /etc/obs/services/$SERVICE + fi + # config options for this user ? + if [ -f "$HOME"/.obs/$SERVICE ]; then + . "$HOME"/.obs/$SERVICE + fi +} + +parse_params () { + while test $# -gt 0; do + case $1 in + *-scm) + MYSCM="$2" + shift + ;; + *-url) + MYURL="$2" + CI_PRO_NAME=${MYURL%%/*} + TEMP_URL="$MYURL" + MYURL=$TEMP_URL + shift + ;; + *-subdir) + MYSUBDIR="$2" + shift + ;; + *-revision) + MYREVISION="$2" + shift + ;; + *-version) + MYVERSION="$2" + shift + ;; + *-include) + INCLUDES="$INCLUDES $2" + shift + ;; + *-versionformat) + MYFORMAT="$2" + shift + ;; + *-versionprefix) + MYPREFIX="$2" + shift + ;; + *-exclude) + EXCLUDES="$EXCLUDES --exclude=${2#/}" + shift + ;; + *-filename) + MYFILENAME="${2#/}" + shift + ;; + *-package-meta) + MYPACKAGEMETA="${2#/}" + shift + ;; + *-outdir) + MYOUTDIR="$2" + shift + ;; + *-history-depth) + echo "history-depth parameter is obsolete and will be ignored" + shift + ;; + *-project) + MYPROJECT="$2" + shift + ;; + *-package) + MYPACKAGE="$2" + shift + ;; + *) + echo "Unknown parameter: $1" + echo 'Usage: $SERVICE --scm $SCM --url $URL [--subdir $SUBDIR] [--revision $REVISION] [--version $VERSION] [--include $INCLUDE]* [--exclude $EXCLUDE]* [--versionformat $FORMAT] [--versionprefix $PREFIX] [--filename $FILENAME] [--package-meta $META] --outdir $OUT' + exit 1 + ;; + esac + shift + done +} + +error () { + echo "ERROR: $*" + exit 1 +} + +debug () { + [ -n "$DEBUG_TAR_SCM" ] && echo "$*" +} + +safe_run () { + if ! "$@"; then + error "$* failed; aborting!" + fi +} + +sanitise_params () { + TAR_VERSION="$MYVERSION" + + if [ -z "$MYSCM" ]; then + error "no scm is given via --scm parameter (git/svn/hg/bzr)!" + fi + if [ -z "$MYURL" ]; then + error "no checkout URL is given via --url parameter!" + fi + if [ -z "$MYOUTDIR" ]; then + error "no output directory is given via --outdir parameter!" + fi + if [ -z "$MYPROJECT" ]; then + error "no project is given via --project parameter!" + fi + if [ -z "$MYPACKAGE" ]; then + error "no package is given via --package parameter!" + fi + + FILE="$MYFILENAME" + WD_VERSION="$MYVERSION" + if [ -z "$MYPACKAGEMETA" ]; then + EXCLUDES="$EXCLUDES --exclude=.svn" + fi + # if [ "$MYHISTORYDEPTH" == "full" ]; then + # MYHISTORYDEPTH="999999999" + # fi +} + +detect_default_filename_param () { + if [ -n "$FILE" ]; then + return + fi + + case "$MYSCM" in + git) + FILE="${MYURL%/}" + FILE="${FILE##*/}" + FILE="${FILE%.git}" + FILE="${FILE#*@*:}" + ;; + svn|hg|bzr) + FILE="${MYURL%/}" + FILE="${FILE##*/}" + ;; + local) + FILE="temp_dir" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +fetch_upstream () { + TOHASH="$MYURL" + [ "$MYSCM" = 'svn' ] && TOHASH="$TOHASH/$MYSUBDIR" + HASH=`echo "$TOHASH" | sha256sum | cut -d\ -f 1` + REPOCACHE= + CACHEDIRECTORY=/tmp/local_code/xdf + if [ -n "$CACHEDIRECTORY" ]; then + REPOCACHEINCOMING="$CACHEDIRECTORY/incoming" + REPOCACHEROOT="$CACHEDIRECTORY/repo" + REPOCACHE="$REPOCACHEROOT/$MYPROJECT/$MYPACKAGE" + REPOURLCACHE="$CACHEDIRECTORY/repourl/$HASH" + fi + + + debug "check local cache if configured" + if [ -n "$CACHEDIRECTORY" -a -d "$REPOCACHE/" ]; then + debug "cache hit: $REPOCACHE" + check_cache + else + if [ -n "$CACHEDIRECTORY" ]; then + debug "cache miss: $REPOCACHE/" + else + debug "cache not enabled" + fi + + calc_dir_to_clone_to + debug "new $MYSCM checkout to $CLONE_TO" + initial_clone + + if [ -n "$CACHEDIRECTORY" ]; then + #cache_repo + REPOPATH="$REPOCACHE" + else + REPOPATH="$MYOUTDIR/$FILE" + fi + if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then + detect_version + fi + #exit 22 + fi + +} + +calc_dir_to_clone_to () { + if [ -n "$CACHEDIRECTORY" ]; then + if [ ! -d REPOCACHE ]; then + mkdir -p "$REPOCACHE" + fi + safe_run cd "$REPOCACHE" + # Use dry-run mode because git/hg refuse to clone into + # an empty directory on SLES11 + #debug mktemp -u -d "tmp.XXXXXXXXXX" + #CLONE_TO=`mktemp -u -d "tmp.XXXXXXXXXX"` + CLONE_TO="$REPOCACHE" + else + CLONE_TO="$FILE" + fi +} + +initial_clone () { + echo "Fetching from $MYURL ..." + + case "$MYSCM" in + git) + # Clone with full depth; so that the revision can be found if specified + safe_run git clone "$MYURL" "$CLONE_TO" + ;; + svn) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + if [[ $(svn --version --quiet) > "1.5.99" ]]; then + TRUST_SERVER_CERT="--trust-server-cert" + fi + safe_run svn checkout --non-interactive $TRUST_SERVER_CERT \ + $args "$MYURL/$MYSUBDIR" "$CLONE_TO" + MYSUBDIR= # repo root is subdir + ;; + local) + echo "xdffff: $MYURL ---- $CLONE_TO --- `pwd`" + safe_run cp -av $MYURL/* ./ + if [ -d "$MYURL/.svn" ]; then + safe_run cp -av $MYURL/.svn ./ + fi + ;; + hg) + safe_run hg clone "$MYURL" "$CLONE_TO" + ;; + bzr) + args= + [ -n "$MYREVISION" ] && args="-r $MYREVISION" + safe_run bzr checkout $args "$MYURL" "$CLONE_TO" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +cache_repo () { + if [ -e "$REPOCACHE" ]; then + error "Somebody else beat us to populating the cache for $MYURL ($REPOCACHE)" + else + # FIXME: small race window here; do source services need to be thread-safe? + if [ ! -d $REPOCACHE ]; then + mkdir -p $REPOCACHE + fi + debug mv2 "$CLONE_TO" "$REPOCACHE" + safe_run mv "$CLONE_TO" "$REPOCACHE" + echo "$MYURL" > "$REPOURLCACHE" + echo "Cached $MYURL at $REPOCACHE" + fi +} + +check_cache () { + if [ -d "$MYURL/.svn" ]; then + new_version=`LC_ALL=C svn info "$MYURL" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + else + new_version="new_version" + fi + if echo "$MYURL" | grep '/$' &> /dev/null; then + new_version="new_version" + fi + if [ -d "$REPOCACHE/.svn" ]; then + old_version=`LC_ALL=C svn info "$REPOCACHE" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + else + old_version="old_version" + fi + #echo "xdf: $new_version $old_version" + #if [ "$new_version" != "$old_version" ]; then + echo "The code has changed for $MYPROJECT/$MYPACKAGE" + rm -rf "$REPOCACHE" + + calc_dir_to_clone_to + debug "new $MYSCM checkout to $CLONE_TO" + initial_clone + + if [ -n "$CACHEDIRECTORY" ]; then + #cache_repo + REPOPATH="$REPOCACHE" + else + REPOPATH="$MYOUTDIR/$FILE" + fi + + safe_run cd "$REPOPATH" + switch_to_revision + if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then + detect_version + fi + #else + # echo "No code is changed, so exit 22" + # exit 22 + #fi +} + +update_cache () { + safe_run cd "$REPOCACHE" + + case "$MYSCM" in + git) + safe_run git fetch + ;; + svn) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + safe_run svn update $args > svnupdate_info + isupdate=`cat svnupdate_info | wc -l` + if [ $isupdate -eq 1 ]; then + rm -f svnupdate_info + echo "There is no code update, so exit 22" + exit 22 + fi + MYSUBDIR= # repo root is subdir + ;; + hg) + if ! out=`hg pull`; then + if [[ "$out" == *'no changes found'* ]]; then + # Contrary to the docs, hg pull returns exit code 1 when + # there are no changes to pull, but we don't want to treat + # this as an error. + : + else + error "hg pull failed; aborting!" + fi + fi + ;; + bzr) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + safe_run bzr update $args + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +switch_to_revision () { + case "$MYSCM" in + git) + safe_run git checkout "$MYREVISION" + if git branch | grep -q '^\* (no branch)$'; then + echo "$MYREVISION does not refer to a branch, not attempting git pull" + else + safe_run git pull + fi + ;; + svn|bzr|local) + : # should have already happened via checkout or update + ;; + hg) + safe_run hg update "$MYREVISION" + ;; + # bzr) + # safe_run bzr update + # if [ -n "$MYREVISION" ]; then + # safe_run bzr revert -r "$MYREVISION" + # fi + # ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +detect_version () { + if [ -z "$MYFORMAT" ]; then + case "$MYSCM" in + git) + MYFORMAT="%at" + ;; + hg) + MYFORMAT="{rev}" + ;; + svn|bzr) + MYFORMAT="%r" + ;; + *) + error "unknown SCM '$MYSCM'" + ;; + esac + fi + + safe_run cd "$REPOPATH" + if [ -n "$MYFORMAT" ];then + MYPREFIX="$MYFORMAT" + else + get_version + fi + TAR_VERSION="$MYPREFIX$version" +} + +get_version () { + case "$MYSCM" in + git) + #version=`safe_run git show --pretty=format:"$MYFORMAT" | head -n 1` + version=`safe_run git log -n1 --pretty=format:"$MYFORMAT"` + ;; + svn) + #rev=`LC_ALL=C safe_run svn info | awk '/^Revision:/ { print $2 }'` + rev=`LC_ALL=C safe_run svn info | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + version="${MYFORMAT//%r/$rev}" + ;; + hg) + rev=`safe_run hg id -n` + version=`safe_run hg log -l1 -r$rev --template "$MYFORMAT"` + ;; + bzr) + #safe_run bzr log -l1 ... + rev=`safe_run bzr revno` + version="${MYFORMAT//%r/$rev}" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +prep_tree_for_tar () { + if [ ! -e "$REPOPATH/$MYSUBDIR" ]; then + error "directory does not exist: $REPOPATH/$MYSUBDIR" + fi + + if [ -z "$TAR_VERSION" ]; then + TAR_BASENAME="$FILE" + else + TAR_BASENAME="${FILE}-${TAR_VERSION}" + fi + + MYINCLUDES="" + + for INC in $INCLUDES; do + MYINCLUDES="$MYINCLUDES $INC" + done + if [ -z "$MYINCLUDES" ]; then + MYINCLUDES="*" + fi + + safe_run cd "$MYOUTDIR" + + if [ -n "$CACHEDIRECTORY" ]; then + debug cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + safe_run cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + else + debug mv3 "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + safe_run mv "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + fi +} + +create_tar () { + safe_run cd "$TAR_BASENAME" + + TARFILE="${TAR_BASENAME}.tar.bz2" + TARPATH="$MYOUTDIR/$TARFILE" + + for INC in $MYINCLUDES; do + if [ "$INC" = ".$MYSCM" ]; then + continue + fi + + if echo "$EXCLUDES" | grep -w $INC >/dev/null + then + continue + fi + + if [ -d $INC ]; then + #safe_run tar jcf "$MYOUTDIR/$INC.tar.bz2" --exclude=.$MYSCM --exclude=.svn $INC + safe_run tar Pcf "$MYOUTDIR/$INC.tar" --exclude=.$MYSCM --exclude=.svn $INC + continue + fi + + safe_run cp $INC "$MYOUTDIR/" + done + + echo "Created $TARFILE" + safe_run cd "$MYOUTDIR" +} + +cleanup () { + debug rm -rf "$TAR_BASENAME" "$FILE" + #rm -rf "$TAR_BASENAME" "$FILE" + rm -rf "$TAR_BASENAME" +} + +main () { + set_default_params + #xdf + DEBUG_TAR_SCM=1 + + if [ -z "$DEBUG_TAR_SCM" ]; then + get_config_options + else + # We're in test-mode, so don't let any local site-wide + # or per-user config impact the test suite. + : + fi + parse_params "$@" + sanitise_params + + SRCDIR=$(pwd) + cd "$MYOUTDIR" + #echo "$SRCDIR $MYOUTDIR" + detect_default_filename_param + + #xdf + #LOGFILE=/srv/local_code/xdf/log/$MYPROJECT/$MYPACKAGE + #mkdir -p "/srv/local_code/xdf/log/$MYPROJECT" + + lockfile=$LOGFILE".lock" + if [ -f $lockfile ]; then + mypid=`cat $lockfile` + # while ps -p $mypid -o comm= &> /dev/null + # do + # sleep 10 + # mypid=`cat $lockfile` + # done + rm -f $lockfile + fi + touch $lockfile + echo "$$" > $lockfile + + #exec 6>&1 + #exec > $LOGFILE + echo "$@" + echo "myurl === $MYURL" + fetch_upstream + + prep_tree_for_tar + create_tar + + cleanup + rm -f $lockfile +} + +main "$@" + +exit 0 diff --git a/src/build/tar_local_kernels b/src/build/tar_local_kernels new file mode 100755 index 0000000000000000000000000000000000000000..2e26233df814a1dc04e55ddf58ada74666f803fe --- /dev/null +++ b/src/build/tar_local_kernels @@ -0,0 +1,585 @@ +#!/bin/bash + +# A simple script to checkout or update a svn or git repo as source service +# +# (C) 2010 by Adrian Schröter +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# See http://www.gnu.org/licenses/gpl-2.0.html for full license text. + +set -x +SERVICE='tar_scm' + +set_default_params () { + MYSCM="" + MYURL="" + #MYVERSION="_auto_" + MYVERSION="222" + MYFORMAT="" + MYPREFIX="" + MYFILENAME="" + MYREVISION="" + MYPACKAGEMETA="" +# MYHISTORYDEPTH="" + INCLUDES="" +} + +get_config_options () { + # config options for this host ? + if [ -f /etc/obs/services/$SERVICE ]; then + . /etc/obs/services/$SERVICE + fi + # config options for this user ? + if [ -f "$HOME"/.obs/$SERVICE ]; then + . "$HOME"/.obs/$SERVICE + fi +} + +parse_params () { + while test $# -gt 0; do + case $1 in + *-scm) + MYSCM="$2" + shift + ;; + *-url) + MYURL="$2" + CI_PRO_NAME=${MYURL%%/*} + TEMP_URL="$MYURL" + MYURL=$TEMP_URL + shift + ;; + *-subdir) + MYSUBDIR="$2" + shift + ;; + *-revision) + MYREVISION="$2" + shift + ;; + *-version) + MYVERSION="$2" + shift + ;; + *-include) + INCLUDES="$INCLUDES $2" + shift + ;; + *-versionformat) + MYFORMAT="$2" + shift + ;; + *-versionprefix) + MYPREFIX="$2" + shift + ;; + *-exclude) + EXCLUDES="$EXCLUDES --exclude=${2#/}" + shift + ;; + *-filename) + MYFILENAME="${2#/}" + shift + ;; + *-package-meta) + MYPACKAGEMETA="${2#/}" + shift + ;; + *-outdir) + MYOUTDIR="$2" + shift + ;; + *-history-depth) + echo "history-depth parameter is obsolete and will be ignored" + shift + ;; + *-project) + MYPROJECT="$2" + shift + ;; + *-package) + MYPACKAGE="$2" + shift + ;; + *) + echo "Unknown parameter: $1" + echo 'Usage: $SERVICE --scm $SCM --url $URL [--subdir $SUBDIR] [--revision $REVISION] [--version $VERSION] [--include $INCLUDE]* [--exclude $EXCLUDE]* [--versionformat $FORMAT] [--versionprefix $PREFIX] [--filename $FILENAME] [--package-meta $META] --outdir $OUT' + exit 1 + ;; + esac + shift + done +} + +error () { + echo "ERROR: $*" + exit 1 +} + +debug () { + [ -n "$DEBUG_TAR_SCM" ] && echo "$*" +} + +safe_run () { + if ! "$@"; then + error "$* failed; aborting!" + fi +} + +sanitise_params () { + TAR_VERSION="$MYVERSION" + + if [ -z "$MYSCM" ]; then + error "no scm is given via --scm parameter (git/svn/hg/bzr)!" + fi + if [ -z "$MYURL" ]; then + error "no checkout URL is given via --url parameter!" + fi + if [ -z "$MYOUTDIR" ]; then + error "no output directory is given via --outdir parameter!" + fi + if [ -z "$MYPROJECT" ]; then + error "no project is given via --project parameter!" + fi + if [ -z "$MYPACKAGE" ]; then + error "no package is given via --package parameter!" + fi + + FILE="$MYFILENAME" + WD_VERSION="$MYVERSION" + if [ -z "$MYPACKAGEMETA" ]; then + EXCLUDES="$EXCLUDES --exclude=.svn" + fi + # if [ "$MYHISTORYDEPTH" == "full" ]; then + # MYHISTORYDEPTH="999999999" + # fi +} + +detect_default_filename_param () { + if [ -n "$FILE" ]; then + return + fi + + case "$MYSCM" in + git) + FILE="${MYURL%/}" + FILE="${FILE##*/}" + FILE="${FILE%.git}" + FILE="${FILE#*@*:}" + ;; + svn|hg|bzr) + FILE="${MYURL%/}" + FILE="${FILE##*/}" + ;; + local) + FILE="temp_dir" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +fetch_upstream () { + TOHASH="$MYURL" + [ "$MYSCM" = 'svn' ] && TOHASH="$TOHASH/$MYSUBDIR" + HASH=`echo "$TOHASH" | sha256sum | cut -d\ -f 1` + REPOCACHE= + CACHEDIRECTORY=/tmp/local_code/xdf + if [ -n "$CACHEDIRECTORY" ]; then + REPOCACHEINCOMING="$CACHEDIRECTORY/incoming" + REPOCACHEROOT="$CACHEDIRECTORY/repo" + REPOCACHE="$REPOCACHEROOT/$MYPROJECT/$MYPACKAGE" + REPOURLCACHE="$CACHEDIRECTORY/repourl/$HASH" + fi + + + debug "check local cache if configured" + if [ -n "$CACHEDIRECTORY" -a -d "$REPOCACHE/" ]; then + debug "cache hit: $REPOCACHE" + check_cache + else + if [ -n "$CACHEDIRECTORY" ]; then + debug "cache miss: $REPOCACHE/" + else + debug "cache not enabled" + fi + + calc_dir_to_clone_to + debug "new $MYSCM checkout to $CLONE_TO" + initial_clone + + if [ -n "$CACHEDIRECTORY" ]; then + #cache_repo + REPOPATH="$REPOCACHE" + else + REPOPATH="$MYOUTDIR/$FILE" + fi + if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then + detect_version + fi + + #exit 22 + fi + +} + +calc_dir_to_clone_to () { + if [ -n "$CACHEDIRECTORY" ]; then + if [ ! -d REPOCACHE ]; then + mkdir -p "$REPOCACHE" + fi + safe_run cd "$REPOCACHE" + # Use dry-run mode because git/hg refuse to clone into + # an empty directory on SLES11 + #debug mktemp -u -d "tmp.XXXXXXXXXX" + #CLONE_TO=`mktemp -u -d "tmp.XXXXXXXXXX"` + CLONE_TO="$REPOCACHE" + else + CLONE_TO="$FILE" + fi +} + +initial_clone () { + echo "Fetching from $MYURL ..." + + case "$MYSCM" in + git) + # Clone with full depth; so that the revision can be found if specified + safe_run git clone "$MYURL" "$CLONE_TO" + ;; + svn) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + if [[ $(svn --version --quiet) > "1.5.99" ]]; then + TRUST_SERVER_CERT="--trust-server-cert" + fi + safe_run svn checkout --non-interactive $TRUST_SERVER_CERT \ + $args "$MYURL/$MYSUBDIR" "$CLONE_TO" + MYSUBDIR= # repo root is subdir + ;; + local) + echo "xdffff: $MYURL ---- $CLONE_TO --- `pwd`" + MYURL=`echo $MYURL | sed 's#\./##g' | sed 's/[ /]*$//g'` + pkgname=`basename $MYURL` + safe_run mkdir $pkgname + safe_run cp -av $MYURL/* $pkgname + safe_run mv $pkgname/*.spec . + if [ -f /usr/bin/rpmspec ] + then + version=`rpmspec -q --srpm --qf %{Version} *.spec` + else + version=`grep "^Version:*" *.spec | awk -F: '{print $2}' | sed 's/[ ]*//g'` + fi + pkg="${pkgname}-${version}" + safe_run mv $pkgname $pkg + if [ -d "$MYURL/.svn" ]; then + safe_run cp -av $MYURL/.svn ./ + fi + ;; + hg) + safe_run hg clone "$MYURL" "$CLONE_TO" + ;; + bzr) + args= + [ -n "$MYREVISION" ] && args="-r $MYREVISION" + safe_run bzr checkout $args "$MYURL" "$CLONE_TO" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +cache_repo () { + if [ -e "$REPOCACHE" ]; then + error "Somebody else beat us to populating the cache for $MYURL ($REPOCACHE)" + else + # FIXME: small race window here; do source services need to be thread-safe? + if [ ! -d $REPOCACHE ]; then + mkdir -p $REPOCACHE + fi + debug mv2 "$CLONE_TO" "$REPOCACHE" + safe_run mv "$CLONE_TO" "$REPOCACHE" + echo "$MYURL" > "$REPOURLCACHE" + echo "Cached $MYURL at $REPOCACHE" + fi +} + +check_cache () { + if [ -d "$MYURL/.svn" ]; then + new_version=`LC_ALL=C svn info "$MYURL" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + else + new_version="new_version" + fi + if echo "$MYURL" | grep '/$' &> /dev/null; then + new_version="new_version" + fi + if [ -d "$REPOCACHE/.svn" ]; then + old_version=`LC_ALL=C svn info "$REPOCACHE" | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + else + old_version="old_version" + fi + #echo "xdf: $new_version $old_version" + #if [ "$new_version" != "$old_version" ]; then + echo "The code has changed for $MYPROJECT/$MYPACKAGE" + rm -rf "$REPOCACHE" + + calc_dir_to_clone_to + debug "new $MYSCM checkout to $CLONE_TO" + initial_clone + + if [ -n "$CACHEDIRECTORY" ]; then + #cache_repo + REPOPATH="$REPOCACHE" + else + REPOPATH="$MYOUTDIR/$FILE" + fi + + safe_run cd "$REPOPATH" + switch_to_revision + if [ "$TAR_VERSION" == "_auto_" -o -n "$MYFORMAT" ]; then + detect_version + fi + #else + # echo "No code is changed, so exit 22" + # exit 22 + #fi +} + +update_cache () { + safe_run cd "$REPOCACHE" + + case "$MYSCM" in + git) + safe_run git fetch + ;; + svn) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + safe_run svn update $args > svnupdate_info + isupdate=`cat svnupdate_info | wc -l` + if [ $isupdate -eq 1 ]; then + rm -f svnupdate_info + echo "There is no code update, so exit 22" + exit 22 + fi + MYSUBDIR= # repo root is subdir + ;; + hg) + if ! out=`hg pull`; then + if [[ "$out" == *'no changes found'* ]]; then + # Contrary to the docs, hg pull returns exit code 1 when + # there are no changes to pull, but we don't want to treat + # this as an error. + : + else + error "hg pull failed; aborting!" + fi + fi + ;; + bzr) + args= + [ -n "$MYREVISION" ] && args="-r$MYREVISION" + safe_run bzr update $args + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +switch_to_revision () { + case "$MYSCM" in + git) + safe_run git checkout "$MYREVISION" + if git branch | grep -q '^\* (no branch)$'; then + echo "$MYREVISION does not refer to a branch, not attempting git pull" + else + safe_run git pull + fi + ;; + svn|bzr|local) + : # should have already happened via checkout or update + ;; + hg) + safe_run hg update "$MYREVISION" + ;; + # bzr) + # safe_run bzr update + # if [ -n "$MYREVISION" ]; then + # safe_run bzr revert -r "$MYREVISION" + # fi + # ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +detect_version () { + if [ -z "$MYFORMAT" ]; then + case "$MYSCM" in + git) + MYFORMAT="%at" + ;; + hg) + MYFORMAT="{rev}" + ;; + svn|bzr) + MYFORMAT="%r" + ;; + *) + error "unknown SCM '$MYSCM'" + ;; + esac + fi + + safe_run cd "$REPOPATH" + [ -n "$MYPREFIX" ] && MYPREFIX="$MYPREFIX." + get_version + TAR_VERSION="$MYPREFIX$version" +} + +get_version () { + case "$MYSCM" in + git) + #version=`safe_run git show --pretty=format:"$MYFORMAT" | head -n 1` + version=`safe_run git log -n1 --pretty=format:"$MYFORMAT"` + ;; + svn) + #rev=`LC_ALL=C safe_run svn info | awk '/^Revision:/ { print $2 }'` + rev=`LC_ALL=C safe_run svn info | sed -n 's,^Last Changed Rev: \(.*\),\1,p'` + version="${MYFORMAT//%r/$rev}" + ;; + hg) + rev=`safe_run hg id -n` + version=`safe_run hg log -l1 -r$rev --template "$MYFORMAT"` + ;; + bzr) + #safe_run bzr log -l1 ... + rev=`safe_run bzr revno` + version="${MYFORMAT//%r/$rev}" + ;; + *) + error "unknown SCM '$MYSCM'" + esac +} + +prep_tree_for_tar () { + if [ ! -e "$REPOPATH/$MYSUBDIR" ]; then + error "directory does not exist: $REPOPATH/$MYSUBDIR" + fi + + if [ -z "$TAR_VERSION" ]; then + TAR_BASENAME="$FILE" + else + TAR_BASENAME="${FILE}-${TAR_VERSION}" + fi + + MYINCLUDES="" + + for INC in $INCLUDES; do + MYINCLUDES="$MYINCLUDES $INC" + done + if [ -z "$MYINCLUDES" ]; then + MYINCLUDES="*" + fi + + safe_run cd "$MYOUTDIR" + + if [ -n "$CACHEDIRECTORY" ]; then + debug cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + safe_run cp -a "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + else + debug mv3 "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + safe_run mv "$REPOPATH/$MYSUBDIR" "$TAR_BASENAME" + fi +} + +create_tar () { + safe_run cd "$TAR_BASENAME" + + TARFILE="${TAR_BASENAME}.tar.bz2" + TARPATH="$MYOUTDIR/$TARFILE" + + for INC in $MYINCLUDES; do + if [ "$INC" = ".$MYSCM" ]; then + continue + fi + + if echo "$EXCLUDES" | grep -w $INC >/dev/null + then + continue + fi + + if [ -d $INC ]; then + #safe_run tar jcf "$MYOUTDIR/$INC.tar.bz2" --exclude=.$MYSCM --exclude=.svn $INC + safe_run tar Pcf "$MYOUTDIR/$INC.tar" --exclude=.$MYSCM --exclude=.svn $INC + continue + fi + + safe_run cp $INC "$MYOUTDIR/" + done + + echo "Created $TARFILE" + safe_run cd "$MYOUTDIR" +} + +cleanup () { + debug rm -rf "$TAR_BASENAME" "$FILE" + rm -rf "$TAR_BASENAME" "$FILE" +} + +main () { + set_default_params + #xdf + DEBUG_TAR_SCM=1 + + if [ -z "$DEBUG_TAR_SCM" ]; then + get_config_options + else + # We're in test-mode, so don't let any local site-wide + # or per-user config impact the test suite. + : + fi + parse_params "$@" + sanitise_params + + SRCDIR=$(pwd) + cd "$MYOUTDIR" + #echo "$SRCDIR $MYOUTDIR" + detect_default_filename_param + + #xdf + #LOGFILE=/srv/local_code/xdf/log/$MYPROJECT/$MYPACKAGE + #mkdir -p "/srv/local_code/xdf/log/$MYPROJECT" + + lockfile=$LOGFILE".lock" + if [ -f $lockfile ]; then + mypid=`cat $lockfile` + # while ps -p $mypid -o comm= &> /dev/null + # do + # sleep 10 + # mypid=`cat $lockfile` + # done + rm -f $lockfile + fi + touch $lockfile + echo "$$" > $lockfile + + #exec 6>&1 + #exec > $LOGFILE + echo "$@" + echo "myurl === $MYURL" + fetch_upstream + + prep_tree_for_tar + create_tar + + cleanup + rm -f $lockfile +} + +main "$@" + +exit 0 diff --git a/src/conf/ignore_repo.yaml b/src/conf/ignore_repo.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9348b166e6597123257094314dff33c3beea927f --- /dev/null +++ b/src/conf/ignore_repo.yaml @@ -0,0 +1,14 @@ +ignore: +- ci_check +- build +- obs_bin +- ci-bot +- website +- community +- docs +- infrastructure +- obs_meta +- euleros-latest-release +- image-slim +- risc-v-kernel +- opensbi diff --git a/src/conf/logger.conf b/src/conf/logger.conf new file mode 100644 index 0000000000000000000000000000000000000000..24e0d487d00f4a288a855db70fd5fc8cf6925ce0 --- /dev/null +++ b/src/conf/logger.conf @@ -0,0 +1,93 @@ +#logger.conf +############################################### +[loggers] +keys=root,jobs,build,ac,common,no_fmt + +[logger_root] +level=INFO +handlers=hand01 +qualname=root + +[logger_jobs] +level=DEBUG +handlers=hand02 +qualname=jobs + +[logger_build] +level=DEBUG +handlers=hand03 +qualname=build + +[logger_ac] +level=DEBUG +handlers=hand04 +qualname=ac + +[logger_common] +level=DEBUG +handlers=hand100 +qualname=common + +[logger_no_fmt] +level=DEBUG +handlers=hand100,hand101 +qualname=no_fmt +propagate=0 +############################################### +[handlers] +keys=hand01,hand02,hand03,hand04,hand100,hand101 + +[handler_hand01] +class=StreamHandler +level=INFO +formatter=form01 +args=(sys.stderr,) + +[handler_hand02] +class=handlers.RotatingFileHandler +level=DEBUG +formatter=form02 +args=('log/jobs.log', 'a', 10*1024*1024, 5) + +[handler_hand03] +class=handlers.RotatingFileHandler +level=DEBUG +formatter=form02 +args=('log/build.log', 'a', 10*1024*1024, 5) + +[handler_hand04] +class=handlers.RotatingFileHandler +level=DEBUG +formatter=form02 +args=('log/ac.log', 'a', 10*1024*1024, 5) + +[handler_hand100] +class=handlers.RotatingFileHandler +level=DEBUG +formatter=form02 +args=('log/common.log', 'a', 10*1024*1024, 5) + +[handler_hand101] +class=StreamHandler +level=INFO +formatter=form04 +args=(sys.stderr,) +############################################### +[formatters] +keys=form01,form02,form03,form04 + +[formatter_form01] +class=src.utils.color_log.CusColoredFormatter +format=%(log_color)s%(asctime)s [%(levelname)7s] : %(message)s + +[formatter_form02] +format=%(asctime)s %(filename)20s[line:%(lineno)3d] %(levelname)7s : %(message)s +#datefmt=%a, %d %b %Y %H:%M:%S + +[formatter_form03] +format=%(asctime)s [%(levelname)s] : %(message)s +datefmt= + +[formatter_form04] +class=src.utils.color_log.CusColoredFormatter +format=%(log_color)s%(message)s diff --git a/src/dockerfile/inbound b/src/dockerfile/inbound new file mode 100644 index 0000000000000000000000000000000000000000..410ff99b31adedd2c586f484fdab94af53956ac6 --- /dev/null +++ b/src/dockerfile/inbound @@ -0,0 +1,47 @@ +FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openjdk:11-jdk-stretch + +ARG VERSION=4.3 +ARG user=jenkins +ARG group=jenkins +ARG uid=1000 +ARG gid=1000 +ARG AGENT_WORKDIR=/home/${user}/agent + + +RUN echo 'deb http://deb.debian.org/debian stretch-backports main' > /etc/apt/sources.list.d/stretch-backports.list +RUN apt-get update \ + && apt-get install -y curl vim sudo git git-lfs\ + && rm -rf /var/lib/apt/lists/* + +# add docker +RUN curl -fsSL https://get.docker.com -o get-docker.sh \ + && sh get-docker.sh + +RUN curl --create-dirs -fsSLo /usr/share/jenkins/agent.jar https://repo.jenkins-ci.org/public/org/jenkins-ci/main/remoting/${VERSION}/remoting-${VERSION}.jar \ + && chmod 755 /usr/share/jenkins \ + && chmod 644 /usr/share/jenkins/agent.jar \ + && ln -sf /usr/share/jenkins/agent.jar /usr/share/jenkins/slave.jar + +COPY jenkins-agent /usr/local/bin/jenkins-agent +RUN chmod +x /usr/local/bin/jenkins-agent \ + && ln -s /usr/local/bin/jenkins-agent /usr/local/bin/jenkins-slave + +RUN groupadd -g ${gid} ${group} +RUN useradd -c "Jenkins user" -d /home/${user} -u ${uid} -g ${gid} -m ${user} + +RUN adduser ${user} docker +RUN echo "${user} ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers + + +USER ${user} +ENV AGENT_WORKDIR=${AGENT_WORKDIR} +RUN mkdir /home/${user}/.jenkins && mkdir -p ${AGENT_WORKDIR} + +VOLUME /home/${user}/.jenkins +VOLUME ${AGENT_WORKDIR} +WORKDIR /home/${user} + + +ENTRYPOINT ["jenkins-agent"] + +# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/imbound-agent diff --git a/src/dockerfile/jenkins-agent b/src/dockerfile/jenkins-agent new file mode 100644 index 0000000000000000000000000000000000000000..74485736a5d4a54482e237a5ff0301bab5cc3a25 --- /dev/null +++ b/src/dockerfile/jenkins-agent @@ -0,0 +1,117 @@ +#!/usr/bin/env sh + +# The MIT License +# +# Copyright (c) 2015-2020, CloudBees, Inc. +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +# Usage jenkins-agent.sh [options] -url http://jenkins [SECRET] [AGENT_NAME] +# Optional environment variables : +# * JENKINS_TUNNEL : HOST:PORT for a tunnel to route TCP traffic to jenkins host, when jenkins can't be directly accessed over network +# * JENKINS_URL : alternate jenkins URL +# * JENKINS_SECRET : agent secret, if not set as an argument +# * JENKINS_AGENT_NAME : agent name, if not set as an argument +# * JENKINS_AGENT_WORKDIR : agent work directory, if not set by optional parameter -workDir +# * JENKINS_WEB_SOCKET: true if the connection should be made via WebSocket rather than TCP +# * JENKINS_DIRECT_CONNECTION: Connect directly to this TCP agent port, skipping the HTTP(S) connection parameter download. +# Value: ":" +# * JENKINS_INSTANCE_IDENTITY: The base64 encoded InstanceIdentity byte array of the Jenkins master. When this is set, +# the agent skips connecting to an HTTP(S) port for connection info. +# * JENKINS_PROTOCOLS: Specify the remoting protocols to attempt when instanceIdentity is provided. + +if [ $# -eq 1 ]; then + + # if `docker run` only has one arguments, we assume user is running alternate command like `bash` to inspect the image + exec "$@" + +else + + # if -tunnel is not provided, try env vars + case "$@" in + *"-tunnel "*) ;; + *) + if [ ! -z "$JENKINS_TUNNEL" ]; then + TUNNEL="-tunnel $JENKINS_TUNNEL" + fi ;; + esac + + # if -workDir is not provided, try env vars + if [ ! -z "$JENKINS_AGENT_WORKDIR" ]; then + case "$@" in + *"-workDir"*) echo "Warning: Work directory is defined twice in command-line arguments and the environment variable" ;; + *) + WORKDIR="-workDir $JENKINS_AGENT_WORKDIR" ;; + esac + fi + + if [ -n "$JENKINS_URL" ]; then + URL="-url $JENKINS_URL" + fi + + if [ -n "$JENKINS_NAME" ]; then + JENKINS_AGENT_NAME="$JENKINS_NAME" + fi + + if [ "$JENKINS_WEB_SOCKET" = true ]; then + WEB_SOCKET=-webSocket + fi + + if [ -n "$JENKINS_PROTOCOLS" ]; then + PROTOCOLS="-protocols $JENKINS_PROTOCOLS" + fi + + if [ -n "$JENKINS_DIRECT_CONNECTION" ]; then + DIRECT="-direct $JENKINS_DIRECT_CONNECTION" + fi + + if [ -n "$JENKINS_INSTANCE_IDENTITY" ]; then + INSTANCE_IDENTITY="-instanceIdentity $JENKINS_INSTANCE_IDENTITY" + fi + + # if java home is defined, use it + JAVA_BIN="java" + if [ "$JAVA_HOME" ]; then + JAVA_BIN="$JAVA_HOME/bin/java" + fi + + # if both required options are defined, do not pass the parameters + OPT_JENKINS_SECRET="" + if [ -n "$JENKINS_SECRET" ]; then + case "$@" in + *"${JENKINS_SECRET}"*) echo "Warning: SECRET is defined twice in command-line arguments and the environment variable" ;; + *) + OPT_JENKINS_SECRET="${JENKINS_SECRET}" ;; + esac + fi + + OPT_JENKINS_AGENT_NAME="" + if [ -n "$JENKINS_AGENT_NAME" ]; then + case "$@" in + *"${JENKINS_AGENT_NAME}"*) echo "Warning: AGENT_NAME is defined twice in command-line arguments and the environment variable" ;; + *) + OPT_JENKINS_AGENT_NAME="${JENKINS_AGENT_NAME}" ;; + esac + fi + + #TODO: Handle the case when the command-line and Environment variable contain different values. + #It is fine it blows up for now since it should lead to an error anyway. + + exec $JAVA_BIN $JAVA_OPTS -cp /usr/share/jenkins/agent.jar hudson.remoting.jnlp.Main -headless $TUNNEL $URL $WORKDIR $WEB_SOCKET $DIRECT $PROTOCOLS $INSTANCE_IDENTITY $OPT_JENKINS_SECRET $OPT_JENKINS_AGENT_NAME "$@" +fi diff --git a/src/dockerfile/manifest.sh b/src/dockerfile/manifest.sh new file mode 100644 index 0000000000000000000000000000000000000000..0a5a8d39b6fb1762a60568550db1c663c57fcf93 --- /dev/null +++ b/src/dockerfile/manifest.sh @@ -0,0 +1,25 @@ +# build image manifest for multi arch +# usage: +# sh manifest.sh {name} {version} +# example: sh manifest.sh jenkins/obs 20200601 + +name=$1 # 镜像名 +version=$2 # 镜像版本 + +image=swr.cn-north-4.myhuaweicloud.com/openeuler/${name}:${version} +image_x86_64=swr.cn-north-4.myhuaweicloud.com/openeuler/x86-64/${name}:${version} +image_aarch64=swr.cn-north-4.myhuaweicloud.com/openeuler/aarch64/${name}:${version} + +echo "create manifest" +docker manifest create -a ${image} ${image_x86_64} ${image_aarch64} + +echo "annotate manifest of arch amd64" +docker manifest annotate ${image} ${image_x86_64} --os linux --arch amd64 + +echo "annotate manifest of arch aarch64" +docker manifest annotate ${image} ${image_aarch64} --os linux --arch arm64/v8 + +echo "push manifest" +docker manifest push --purge ${image} + +echo "build image manifest for multi arch ... pass" diff --git a/src/dockerfile/obs b/src/dockerfile/obs new file mode 100644 index 0000000000000000000000000000000000000000..d09274f56ae97930e9e85c19e7100f2c059cc4e7 --- /dev/null +++ b/src/dockerfile/obs @@ -0,0 +1,17 @@ +FROM swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/obs:base +ARG user=jenkins + +# 容器启动时会重置/home/jenkins/agent目录,顾将ci_check放到/home/jenkins目录下 +# replace GiteeCloneUserName and GiteeClonePassword and tag before docker build +RUN cd /home/${user} \ + && git clone https://${GiteeCloneUserName}:${GiteeClonePassword}@gitee.com/src-openeuler/ci_check.git \ + && cd ci_check \ + && git checkout ${tag} + +ENTRYPOINT ["jenkins-agent"] + +# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/obs:{tag} +# image dependency +# opensdk:11-jdk-stretch --> inbound:latest +# | +# --> obs:base --> obs:{tag} diff --git a/src/dockerfile/obs-base b/src/dockerfile/obs-base new file mode 100644 index 0000000000000000000000000000000000000000..34f5ca9d5739d3be7b3bec56ab31f9822f637c6b --- /dev/null +++ b/src/dockerfile/obs-base @@ -0,0 +1,52 @@ +FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openjdk:11-jdk-stretch + +ARG VERSION=4.3 +ARG user=jenkins +ARG group=jenkins +ARG uid=1000 +ARG gid=1000 +ARG AGENT_WORKDIR=/home/${user}/agent + + +RUN echo 'deb http://deb.debian.org/debian stretch-backports main' > /etc/apt/sources.list.d/stretch-backports.list +RUN apt-get update \ + && apt-get install -y python3 python3-pip python python-pip \ + && apt-get install -y curl vim git git-lfs\ + && apt-get install -y sudo cpio bsdtar \ + && apt-get install -y sudo libxml-tokeparser-perl libxml-simpleobject-perl \ + && apt-get install -y sudo libxml-parser-easytree-perl libxml-sax-expat-perl \ + && apt-get install -y osc \ + && apt-get install -y golint splint pylint pylint3 \ + && apt-get install -y abigail-tools \ + && rm -rf /var/lib/apt/lists/* \ + && cpan install XML::Structured + + +RUN curl --create-dirs -fsSLo /usr/share/jenkins/agent.jar https://repo.jenkins-ci.org/public/org/jenkins-ci/main/remoting/${VERSION}/remoting-${VERSION}.jar \ + && chmod 755 /usr/share/jenkins \ + && chmod 644 /usr/share/jenkins/agent.jar \ + && ln -sf /usr/share/jenkins/agent.jar /usr/share/jenkins/slave.jar + +COPY jenkins-agent /usr/local/bin/jenkins-agent +RUN chmod a+rx /usr/local/bin/jenkins-agent \ + && ln -s /usr/local/bin/jenkins-agent /usr/local/bin/jenkins-slave + +RUN groupadd -g ${gid} ${group} +RUN useradd -c "Jenkins user" -d /home/${user} -u ${uid} -g ${gid} -m ${user} +RUN echo "${user} ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers + + +USER ${user} +ENV AGENT_WORKDIR=${AGENT_WORKDIR} +RUN mkdir /home/${user}/.jenkins && mkdir -p ${AGENT_WORKDIR} + +RUN python -m pip install --upgrade pip + +VOLUME /home/${user}/.jenkins +VOLUME ${AGENT_WORKDIR} +WORKDIR ${AGENT_WORKDIR} + + +ENTRYPOINT ["jenkins-agent"] + +# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/obs:base diff --git a/src/dockerfile/openeuler-base b/src/dockerfile/openeuler-base new file mode 100644 index 0000000000000000000000000000000000000000..88c94fd68c5f57a8e3e295b351c99e24f4498996 --- /dev/null +++ b/src/dockerfile/openeuler-base @@ -0,0 +1,38 @@ +FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openjdk-openeuler:11-jdk-stretch + +ARG VERSION=4.3 +ARG user=jenkins +ARG group=jenkins +ARG uid=1000 +ARG gid=1000 +ARG AGENT_WORKDIR=/home/${user}/agent + +RUN yum install -y shadow git + +RUN curl --create-dirs -fsSLo /usr/share/jenkins/agent.jar https://repo.jenkins-ci.org/public/org/jenkins-ci/main/remoting/${VERSION}/remoting-${VERSION}.jar \ + && chmod 755 /usr/share/jenkins \ + && chmod 644 /usr/share/jenkins/agent.jar \ + && ln -sf /usr/share/jenkins/agent.jar /usr/share/jenkins/slave.jar + +COPY jenkins-agent /usr/local/bin/jenkins-agent +RUN chmod a+rx /usr/local/openjdk-11 \ + && chmod a+rx /usr/local/bin/jenkins-agent \ + && ln -s /usr/local/bin/jenkins-agent /usr/local/bin/jenkins-slave + +RUN groupadd -g ${gid} ${group} +RUN useradd -c "Jenkins user" -d /home/${user} -u ${uid} -g ${gid} -m ${user} +RUN echo "${user} ALL=(ALL) NOPASSWD:ALL" >> /etc/sudoers + + +USER ${user} +ENV AGENT_WORKDIR=${AGENT_WORKDIR} +RUN mkdir /home/${user}/.jenkins && mkdir -p ${AGENT_WORKDIR} + +VOLUME /home/${user}/.jenkins +VOLUME ${AGENT_WORKDIR} +WORKDIR ${AGENT_WORKDIR} + + +ENTRYPOINT ["jenkins-agent"] + +# swr.cn-north-4.myhuaweicloud.com/openeuler/jenkins/openeuler:base diff --git a/src/dockerfile/openjdk-openeuler b/src/dockerfile/openjdk-openeuler new file mode 100644 index 0000000000000000000000000000000000000000..1f264e30fd16e103f0dcf13b5dadcfdf50efdf4b --- /dev/null +++ b/src/dockerfile/openjdk-openeuler @@ -0,0 +1,95 @@ +FROM swr.cn-north-4.myhuaweicloud.com/openeuler/openeuler:20.03-lts-08-20 + +RUN set -eux; \ + yum install -y tar wget + +# Default to UTF-8 file.encoding +ENV LANG C.UTF-8 + +ENV JAVA_HOME /usr/local/openjdk-11 +ENV PATH $JAVA_HOME/bin:$PATH + +# backwards compatibility shim +RUN { echo '#/bin/sh'; echo 'echo "$JAVA_HOME"'; } > /usr/local/bin/docker-java-home && chmod +x /usr/local/bin/docker-java-home && [ "$JAVA_HOME" = "$(docker-java-home)" ] + +# https://adoptopenjdk.net/upstream.html +# > +# > What are these binaries? +# > +# > These binaries are built by Red Hat on their infrastructure on behalf of the OpenJDK jdk8u and jdk11u projects. The binaries are created from the unmodified source code at OpenJDK. Although no formal support agreement is provided, please report any bugs you may find to https://bugs.java.com/. +# > +ENV JAVA_VERSION 11.0.8 +# https://github.com/docker-library/openjdk/issues/320#issuecomment-494050246 +# > +# > I am the OpenJDK 8 and 11 Updates OpenJDK project lead. +# > ... +# > While it is true that the OpenJDK Governing Board has not sanctioned those releases, they (or rather we, since I am a member) didn't sanction Oracle's OpenJDK releases either. As far as I am aware, the lead of an OpenJDK project is entitled to release binary builds, and there is clearly a need for them. +# > + +RUN set -eux; \ + \ + arch="$(arch)"; \ +# this "case" statement is generated via "update.sh" + case "$arch" in \ +# arm64v8 + arm64 | aarch64) downloadUrl=https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.8%2B10/OpenJDK11U-jdk_aarch64_linux_11.0.8_10.tar.gz ;; \ +# amd64 + amd64 | i386:x86-64 | x86_64) downloadUrl=https://github.com/AdoptOpenJDK/openjdk11-upstream-binaries/releases/download/jdk-11.0.8%2B10/OpenJDK11U-jdk_x64_linux_11.0.8_10.tar.gz ;; \ +# fallback + *) echo >&2 "error: unsupported architecture: '$arch'"; exit 1 ;; \ + esac; \ + \ + wget -O openjdk.tgz.asc "$downloadUrl.sign"; \ + #wget -O openjdk.tgz "$downloadUrl"; \ + wget -O openjdk.tgz "$downloadUrl" --progress=dot:giga; \ + \ + #export GNUPGHOME="$(mktemp -d)"; \ + #gpg --batch --keyserver ha.pool.sks-keyservers.net --keyserver-options no-self-sigs-only --recv-keys CA5F11C6CE22644D42C6AC4492EF8D39DC13168F; \ + #gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys EAC843EBD3EFDB98CC772FADA5CD6035332FA671; \ + #gpg --batch --list-sigs --keyid-format 0xLONG CA5F11C6CE22644D42C6AC4492EF8D39DC13168F \ + # | tee /dev/stderr \ + # | grep '0xA5CD6035332FA671' \ + # | grep 'Andrew Haley'; \ + #gpg --batch --verify openjdk.tgz.asc openjdk.tgz; \ + #gpgconf --kill all; \ + #rm -rf "$GNUPGHOME"; \ + \ + mkdir -p "$JAVA_HOME"; \ + tar --extract \ + --file openjdk.tgz \ + --directory "$JAVA_HOME" \ + --strip-components 1 \ + --no-same-owner \ + ; \ + rm openjdk.tgz*; \ + \ +# TODO strip "demo" and "man" folders? + \ +# update "cacerts" bundle to use Debian's CA certificates (and make sure it stays up-to-date with changes to Debian's store) +# see https://github.com/docker-library/openjdk/issues/327 +# http://rabexc.org/posts/certificates-not-working-java#comment-4099504075 +# https://salsa.debian.org/java-team/ca-certificates-java/blob/3e51a84e9104823319abeb31f880580e46f45a98/debian/jks-keystore.hook.in +# https://git.alpinelinux.org/aports/tree/community/java-cacerts/APKBUILD?id=761af65f38b4570093461e6546dcf6b179d2b624#n29 + mkdir -p /etc/ca-certificates/update.d; \ + { \ + echo '#!/usr/bin/env bash'; \ + echo 'set -Eeuo pipefail'; \ + echo 'if ! [ -d "$JAVA_HOME" ]; then echo >&2 "error: missing JAVA_HOME environment variable"; exit 1; fi'; \ +# 8-jdk uses "$JAVA_HOME/jre/lib/security/cacerts" and 8-jre and 11+ uses "$JAVA_HOME/lib/security/cacerts" directly (no "jre" directory) + echo 'cacertsFile=; for f in "$JAVA_HOME/lib/security/cacerts" "$JAVA_HOME/jre/lib/security/cacerts"; do if [ -e "$f" ]; then cacertsFile="$f"; break; fi; done'; \ + echo 'if [ -z "$cacertsFile" ] || ! [ -f "$cacertsFile" ]; then echo >&2 "error: failed to find cacerts file in $JAVA_HOME"; exit 1; fi'; \ + echo 'trust extract --overwrite --format=java-cacerts --filter=ca-anchors --purpose=server-auth "$cacertsFile"'; \ + } > /etc/ca-certificates/update.d/docker-openjdk; \ + chmod +x /etc/ca-certificates/update.d/docker-openjdk; \ + /etc/ca-certificates/update.d/docker-openjdk; \ + \ +# https://github.com/docker-library/openjdk/issues/331#issuecomment-498834472 + find "$JAVA_HOME/lib" -name '*.so' -exec dirname '{}' ';' | sort -u > /etc/ld.so.conf.d/docker-openjdk.conf; \ + ldconfig; \ + \ +# basic smoke test + javac --version; \ + java --version + +# "jshell" is an interactive REPL for Java (see https://en.wikipedia.org/wiki/JShell) +CMD ["jshell"] diff --git a/src/jenkinsfile/README.md b/src/jenkinsfile/README.md new file mode 100644 index 0000000000000000000000000000000000000000..681ebac125ff47b4961f124995ae0a62b5066ad8 --- /dev/null +++ b/src/jenkinsfile/README.md @@ -0,0 +1,10 @@ +# openeuler源码仓jenkins构建脚本 + +> 源码仓编译种类差异,每个仓有不同的jenkins构建过程,构建脚本放在此处 +> +> 推荐使用**sh**或者**python**编写构建过程,开头请带上**shebang** +> +> 脚本名称和仓库名保持相同,可带语言对应的后缀 +> +> 当前支持**aarch64、x86-64、risc-v**架构 + diff --git a/src/jenkinsfile/aarch64/libvirt.sh b/src/jenkinsfile/aarch64/libvirt.sh new file mode 100755 index 0000000000000000000000000000000000000000..2318dc7e667a47f7e866f4864493582debf69c36 --- /dev/null +++ b/src/jenkinsfile/aarch64/libvirt.sh @@ -0,0 +1,165 @@ +#!/bin/bash +sudo yum install -y \ + gdb \ + make \ + audit-libs-devel \ + augeas \ + autoconf \ + automake \ + bash-completion \ + cyrus-sasl-devel \ + dbus-devel \ + device-mapper-devel \ + dnsmasq \ + ebtables \ + firewalld-filesystem \ + gawk \ + gcc \ + gettext \ + gettext-devel \ + git \ + glib2-devel \ + glusterfs-api-devel \ + glusterfs-devel \ + gnutls-devel \ + iptables \ + iscsi-initiator-utils \ + libacl-devel \ + libattr-devel \ + libblkid-devel \ + libcap-ng-devel \ + libiscsi-devel \ + libnl3-devel \ + libpcap-devel \ + libpciaccess-devel \ + librados-devel \ + librbd-devel \ + libselinux-devel \ + libssh-devel \ + libssh2-devel \ + libtasn1-devel \ + libtirpc-devel \ + libtool \ + libxml2-devel \ + libxslt \ + lvm2 \ + module-init-tools \ + ncurses-devel \ + netcf-devel \ + nfs-utils \ + numactl-devel \ + numad \ + parted-devel \ + perl-interpreter \ + polkit \ + python3 \ + python3-docutils \ + qemu-img \ + radvd \ + readline-devel \ + rpcgen \ + sanlock-devel \ + scrub \ + systemd-devel \ + systemd-units \ + systemtap-sdt-devel \ + util-linux \ + wireshark-devel \ + xfsprogs-devel \ + yajl-devel \ + --downloadonly --downloaddir=./ --allowerasing --skip-broken --nobest +sudo rpm -ivh --force --nodeps *.rpm + +cd ${repo} +git submodule update --init +autoreconf --verbose --force --install + +mkdir aarch64-openEuler-linux-gnu +cd aarch64-openEuler-linux-gnu + +../configure \ + --build=aarch64-openEuler-linux-gnu \ + --host=aarch64-openEuler-linux-gnu \ + --program-prefix= \ + --disable-dependency-tracking \ + --prefix=/usr \ + --exec-prefix=/usr \ + --bindir=/usr/bin \ + --sbindir=/usr/sbin \ + --sysconfdir=/etc \ + --datadir=/usr/share \ + --includedir=/usr/include \ + --libdir=/usr/lib64 \ + --libexecdir=/usr/libexec \ + --localstatedir=/var \ + --sharedstatedir=/var/lib \ + --mandir=/usr/share/man \ + --infodir=/usr/share/info \ + --enable-dependency-tracking \ + --with-runstatedir=/run \ + --with-qemu \ + --without-openvz \ + --without-lxc \ + --without-vbox \ + --without-libxl \ + --with-sasl \ + --with-polkit \ + --with-libvirtd \ + --without-esx \ + --without-hyperv \ + --without-vmware \ + --without-vz \ + --without-bhyve \ + --with-remote-default-mode=legacy \ + --with-interface \ + --with-network \ + --with-storage-fs \ + --with-storage-lvm \ + --with-storage-iscsi \ + --with-storage-iscsi-direct \ + --with-storage-scsi \ + --with-storage-disk \ + --with-storage-mpath \ + --with-storage-rbd \ + --without-storage-sheepdog \ + --with-storage-gluster \ + --without-storage-zfs \ + --without-storage-vstorage \ + --with-numactl \ + --with-numad \ + --with-capng \ + --without-fuse \ + --with-netcf \ + --with-selinux \ + --with-selinux-mount=/sys/fs/selinux \ + --without-apparmor \ + --without-hal \ + --with-udev \ + --with-yajl \ + --with-sanlock \ + --with-libpcap \ + --with-macvtap \ + --with-audit \ + --with-dtrace \ + --with-driver-modules \ + --with-firewalld \ + --with-firewalld-zone \ + --with-wireshark-dissector \ + --without-pm-utils \ + --with-nss-plugin \ + '--with-packager=http://openeuler.org, 2020-08-20-11:11:11, ' \ + --with-packager-version=7.oe1 \ + --with-qemu-user=qemu \ + --with-qemu-group=qemu \ + --with-tls-priority=@LIBVIRT,SYSTEM \ + --with-loader-nvram=/usr/share/edk2.git/ovmf-x64/OVMF_CODE-pure-efi.fd:/usr/share/edk2.git/ovmf-x64/OVMF_VARS-pure-efi.fd:/usr/share/edk2.git/ovmf-ia32/OVMF_CODE-pure-efi.fd:/usr/share/edk2.git/ovmf-ia32/OVMF_VARS-pure-efi.fd:/usr/share/edk2.git/aarch64/QEMU_EFI-pflash.raw:/usr/share/edk2.git/aarch64/vars-template-pflash.raw:/usr/share/edk2.git/arm/QEMU_EFI-pflash.raw:/usr/share/edk2.git/arm/vars-template-pflash.raw:/usr/share/edk2/ovmf/OVMF_CODE.fd:/usr/share/edk2/ovmf/OVMF_VARS.fd:/usr/share/edk2/ovmf-ia32/OVMF_CODE.fd:/usr/share/edk2/ovmf-ia32/OVMF_VARS.fd:/usr/share/edk2/aarch64/QEMU_EFI-pflash.raw:/usr/share/edk2/aarch64/vars-template-pflash.raw:/usr/share/edk2/arm/QEMU_EFI-pflash.raw:/usr/share/edk2/arm/vars-template-pflash.raw \ + --enable-werror \ + --enable-expensive-tests \ + --with-init-script=systemd \ + --without-login-shell || (cat config.log; exit 1) + +make -j$(getconf _NPROCESSORS_ONLN) V=1 +sed -i 's/while (kill(pid, 0) != -1)/for (int i = 0; kill(pid, 0) != -1 \&\& i < 300; i++)/' ../tests/commandtest.c +sed -i 's/while (kill(pid, SIGINT) != -1)/for (int i = 0; kill(pid, SIGINT) != -1 \&\& i < 300; i++)/' ../tests/commandtest.c +(set +x; for((i=0;i<3;i++)); do sleep 30; ps -fC make &>/dev/null || break; ps ww -e f; ps ww -ef | awk '$9~"tests/.libs/lt-commandtest"{print$2}' | xargs -n 1 pstack; done) & +timeout 120 make -j$(getconf _NPROCESSORS_ONLN) check VIR_TEST_DEBUG=1 || (cat tests/test-suite.log; exit 1) diff --git a/src/jenkinsfile/x86-64/libvirt.sh b/src/jenkinsfile/x86-64/libvirt.sh new file mode 100755 index 0000000000000000000000000000000000000000..573d0d860b0125ba35bd03cdffc00020914725a5 --- /dev/null +++ b/src/jenkinsfile/x86-64/libvirt.sh @@ -0,0 +1,165 @@ +#!/bin/bash +sudo yum install -y \ + gdb \ + make \ + audit-libs-devel \ + augeas \ + autoconf \ + automake \ + bash-completion \ + cyrus-sasl-devel \ + dbus-devel \ + device-mapper-devel \ + dnsmasq \ + ebtables \ + firewalld-filesystem \ + gawk \ + gcc \ + gettext \ + gettext-devel \ + git \ + glib2-devel \ + glusterfs-api-devel \ + glusterfs-devel \ + gnutls-devel \ + iptables \ + iscsi-initiator-utils \ + libacl-devel \ + libattr-devel \ + libblkid-devel \ + libcap-ng-devel \ + libiscsi-devel \ + libnl3-devel \ + libpcap-devel \ + libpciaccess-devel \ + librados-devel \ + librbd-devel \ + libselinux-devel \ + libssh-devel \ + libssh2-devel \ + libtasn1-devel \ + libtirpc-devel \ + libtool \ + libxml2-devel \ + libxslt \ + lvm2 \ + module-init-tools \ + ncurses-devel \ + netcf-devel \ + nfs-utils \ + numactl-devel \ + numad \ + parted-devel \ + perl-interpreter \ + polkit \ + python3 \ + python3-docutils \ + qemu-img \ + radvd \ + readline-devel \ + rpcgen \ + sanlock-devel \ + scrub \ + systemd-devel \ + systemd-units \ + systemtap-sdt-devel \ + util-linux \ + wireshark-devel \ + xfsprogs-devel \ + yajl-devel \ + --downloadonly --downloaddir=./ --allowerasing --skip-broken --nobest +sudo rpm -ivh --force --nodeps *.rpm + +cd ${repo} +git submodule update --init +autoreconf --verbose --force --install + +mkdir x86_64-openEuler-linux-gnu +cd x86_64-openEuler-linux-gnu + +../configure \ + --build=x86_64-openEuler-linux-gnu \ + --host=x86_64-openEuler-linux-gnu \ + --program-prefix= \ + --disable-dependency-tracking \ + --prefix=/usr \ + --exec-prefix=/usr \ + --bindir=/usr/bin \ + --sbindir=/usr/sbin \ + --sysconfdir=/etc \ + --datadir=/usr/share \ + --includedir=/usr/include \ + --libdir=/usr/lib64 \ + --libexecdir=/usr/libexec \ + --localstatedir=/var \ + --sharedstatedir=/var/lib \ + --mandir=/usr/share/man \ + --infodir=/usr/share/info \ + --enable-dependency-tracking \ + --with-runstatedir=/run \ + --with-qemu \ + --without-openvz \ + --without-lxc \ + --without-vbox \ + --without-libxl \ + --with-sasl \ + --with-polkit \ + --with-libvirtd \ + --without-esx \ + --without-hyperv \ + --without-vmware \ + --without-vz \ + --without-bhyve \ + --with-remote-default-mode=legacy \ + --with-interface \ + --with-network \ + --with-storage-fs \ + --with-storage-lvm \ + --with-storage-iscsi \ + --with-storage-iscsi-direct \ + --with-storage-scsi \ + --with-storage-disk \ + --with-storage-mpath \ + --with-storage-rbd \ + --without-storage-sheepdog \ + --with-storage-gluster \ + --without-storage-zfs \ + --without-storage-vstorage \ + --with-numactl \ + --with-numad \ + --with-capng \ + --without-fuse \ + --with-netcf \ + --with-selinux \ + --with-selinux-mount=/sys/fs/selinux \ + --without-apparmor \ + --without-hal \ + --with-udev \ + --with-yajl \ + --with-sanlock \ + --with-libpcap \ + --with-macvtap \ + --with-audit \ + --with-dtrace \ + --with-driver-modules \ + --with-firewalld \ + --with-firewalld-zone \ + --with-wireshark-dissector \ + --without-pm-utils \ + --with-nss-plugin \ + '--with-packager=http://openeuler.org, 2020-08-20-11:11:11, ' \ + --with-packager-version=7.oe1 \ + --with-qemu-user=qemu \ + --with-qemu-group=qemu \ + --with-tls-priority=@LIBVIRT,SYSTEM \ + --with-loader-nvram=/usr/share/edk2.git/ovmf-x64/OVMF_CODE-pure-efi.fd:/usr/share/edk2.git/ovmf-x64/OVMF_VARS-pure-efi.fd:/usr/share/edk2.git/ovmf-ia32/OVMF_CODE-pure-efi.fd:/usr/share/edk2.git/ovmf-ia32/OVMF_VARS-pure-efi.fd:/usr/share/edk2.git/aarch64/QEMU_EFI-pflash.raw:/usr/share/edk2.git/aarch64/vars-template-pflash.raw:/usr/share/edk2.git/arm/QEMU_EFI-pflash.raw:/usr/share/edk2.git/arm/vars-template-pflash.raw:/usr/share/edk2/ovmf/OVMF_CODE.fd:/usr/share/edk2/ovmf/OVMF_VARS.fd:/usr/share/edk2/ovmf-ia32/OVMF_CODE.fd:/usr/share/edk2/ovmf-ia32/OVMF_VARS.fd:/usr/share/edk2/aarch64/QEMU_EFI-pflash.raw:/usr/share/edk2/aarch64/vars-template-pflash.raw:/usr/share/edk2/arm/QEMU_EFI-pflash.raw:/usr/share/edk2/arm/vars-template-pflash.raw \ + --enable-werror \ + --enable-expensive-tests \ + --with-init-script=systemd \ + --without-login-shell || (cat config.log; exit 1) + +make -j$(getconf _NPROCESSORS_ONLN) V=1 +sed -i 's/while (kill(pid, 0) != -1)/for (int i = 0; kill(pid, 0) != -1 \&\& i < 300; i++)/' ../tests/commandtest.c +sed -i 's/while (kill(pid, SIGINT) != -1)/for (int i = 0; kill(pid, SIGINT) != -1 \&\& i < 300; i++)/' ../tests/commandtest.c +(set +x; for((i=0;i<3;i++)); do sleep 30; ps -fC make &>/dev/null || break; ps ww -e f; ps ww -ef | awk '$9~"tests/.libs/lt-commandtest"{print$2}' | xargs -n 1 pstack; done) & +timeout 120 make -j$(getconf _NPROCESSORS_ONLN) check VIR_TEST_DEBUG=1 || (cat tests/test-suite.log; exit 1) diff --git a/src/jobs/__init__.py b/src/jobs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/jobs/jenkins_job.py b/src/jobs/jenkins_job.py new file mode 100644 index 0000000000000000000000000000000000000000..ba8c20a2edf7682417e8ea4e27c578ab826f56e8 --- /dev/null +++ b/src/jobs/jenkins_job.py @@ -0,0 +1,228 @@ +# -*- encoding=utf-8 -*- +import gevent +from gevent import monkey +monkey.patch_all() + +from abc import ABCMeta, abstractmethod +import os +import logging.config +import logging +import time +import xml.etree.ElementTree as ET +import yaml +import argparse + + +class JenkinsJobs(object): + __metaclass__ = ABCMeta + + def __init__(self, template_job, jenkins_proxy): + """ + + :param template_job: 考虑jenkins server的压力,客户端每次使用功能batch个协程发起请求 + :param jenkins_proxy: repo,buddy,package映射关系 + """ + self._template_job = template_job + self._template_job_config = jenkins_proxy.get_config(template_job) + + def run(self, action, jobs, jenkins_proxy, concurrency=75, retry=3, interval=0): + """ + 启动 + :param action: 行为 + :param jobs: 任务列表 + :param jenkins_proxy: 目标任务JenkinsProxy实例 + :param concurrency: 并发量 + :param retry: 尝试次数 + :param interval: 每次batch请求后sleep时间(秒), + :return: + """ + jobs = [job.strip() for job in jobs.split(",")] + logger.info("{} jobs {}".format(action, jobs)) + real_jobs = self.get_real_target_jobs(jobs) + + def run_once(target_jobs): + batch = (len(target_jobs) + concurrency - 1) / concurrency + _failed_jobs = [] + for index in xrange(batch): + works = [gevent.spawn(self.dispatch, action, job, jenkins_proxy) for job in + target_jobs[index * concurrency: (index + 1) * concurrency]] + logger.info("{} works, {}/{} ".format(len(works), index + 1, batch)) + gevent.joinall(works) + for work in works: + if work.value["result"]: + logger.info("{} job {} ... ok".format(action, work.value["job"])) + else: + _failed_jobs.append(work.value["job"]) + logger.error("{} job {} ... failed".format(action, work.value["job"])) + + time.sleep(interval) + + return _failed_jobs + + failed_jobs = run_once(real_jobs) + + for index in xrange(retry): + if not failed_jobs: + break + logger.info("{} jobs failed, retrying {}/{}".format(len(failed_jobs), index + 1, retry)) + failed_jobs = run_once(failed_jobs) + + if failed_jobs: + logger.warning("{} failed jobs".format(len(failed_jobs))) + logger.warning("{}{}".format(",".join(failed_jobs[:100]), "..." if len(failed_jobs) > 100 else "")) + + def dispatch(self, action, job, jenkins_proxy): + """ + 分发任务 + :param action: 更新或者创建 + :param job: 目标任务 + :param jenkins_proxy: 目标任务jenkins代理 + :return: dict + """ + job_config = self.update_config(job) + result = jenkins_proxy.create_job(job, job_config) if action == "create" else jenkins_proxy.update_job(job, job_config) + + return {"job": job, "result": result} + + @abstractmethod + def get_real_target_jobs(self, jobs): + """ + 实际要操作的任务 + :param jobs: + :return: + """ + return jobs + + @abstractmethod + def update_config(self, job): + raise NotImplementedError + + +class SrcOpenEulerJenkinsJobs(JenkinsJobs): + """ + src-openEuler 仓库 + """ + def __init__(self, template_job, jenkins_proxy, buddy_file, exclusive_arch_path=None): + super(SrcOpenEulerJenkinsJobs, self).__init__(template_job, jenkins_proxy) + + with open(buddy_file, "r") as f: + self._buddy_info = yaml.safe_load(f) + logger.debug("load buddy info ok") + + # spec中包含ExclusiveArch的项目 + self._exclusive_arch = {} + if exclusive_arch_path: + for filename in os.listdir(exclusive_arch_path): + with open(os.path.join(exclusive_arch_path, filename), "r") as f: + arches = f.readline() + self._exclusive_arch[filename] = [arch.strip() for arch in arches.split(",")] + logger.debug("exclusive arch: {}".format(self._exclusive_arch )) + + def get_real_target_jobs(self, jobs): + """ + 真实有效的任务列表 + :param jobs: 用户输入的任务列表 + :return: list + """ + if "all" in jobs: + return self._buddy_info.keys() + + return [job for job in jobs if job in self._buddy_info] + + def update_config(self, job): + """ + 根据模板生成目标任务配置信息 + :param job: 目标任务 + :return: xml string + """ + root = ET.fromstring(self._template_job_config.encode("utf-8")) + + buddy = self._buddy_info[job] # + + # triggers + ele = root.find("triggers//regexpFilterExpression") + if ele is not None: + ele.text = ele.text.replace(self._template_job, buddy["repo"]) + + # parameterized trigger + ele = root.find("publishers/hudson.plugins.parameterizedtrigger.BuildTrigger//projects") + if ele is not None: + arches = self._exclusive_arch.get(buddy["repo"]) + if arches: # eg: [x86_64] + projects = [] + for project in ele.text.split(","): + for arch in arches: + if arch in project: + projects.append(project) + ele.text = ",".join(projects).replace(self._template_job, buddy["repo"]) + else: + ele.text = ele.text.replace(self._template_job, buddy["repo"]) + + # join trigger + ele = root.find("publishers/join.JoinTrigger//projects") + if ele is not None: + ele.text = ele.text.replace(self._template_job, buddy["repo"]) + + # set repo defaultValue + ele = root.find("properties//*[name=\"repo\"]/defaultValue") + if ele is not None: + ele.text = buddy["repo"] + + # set buddy defaultValue + ele = root.find("properties//*[name=\"buddy\"]/defaultValue") + if ele is not None: + ele.text = ",".join(buddy["buddy"]) + + # set packages defaultValue + ele = root.find("properties//*[name=\"package\"]/defaultValue") + if ele is not None: + ele.text = ",".join(buddy["packages"]) + + return ET.tostring(root) + + +class OpenEulerJenkinsJobs(JenkinsJobs): + """ + openEuler 仓库 + TODO... + """ + def get_real_target_jobs(self, jobs): + super(OpenEulerJenkinsJobs, self).get_real_target_jobs(jobs) + + def update_config(self, job): + pass + + +if "__main__" == __name__: + args = argparse.ArgumentParser() + args.add_argument("-a", type=str, dest="action", help="workspace where to find source") + args.add_argument("-c", type=int, dest="concurrency", default=75, help="jobs that send to jenkins server concurrency") + args.add_argument("-r", type=int, dest="retry", default=3, help="retry times") + args.add_argument("-i", type=int, dest="interval", default=0, help="retry interval") + + args.add_argument("-m", type=str, dest="template_job", help="template job name") + args.add_argument("-s", type=str, dest="template_job_base_url", help="jenkins base url of template job") + args.add_argument("-j", type=str, dest="target_jobs", help="jobs to created") + args.add_argument("-d", type=str, dest="target_job_base_url", help="jenkins base url of target jobs") + args.add_argument("-o", type=int, dest="jenkins_timeout", default=10, help="jenkins api timeout") + + args.add_argument("-u", type=str, dest="jenkins_user", help="repo name") + args.add_argument("-t", type=str, dest="jenkins_api_token", help="jenkins api token") + + args.add_argument("-x", type=str, dest="buddy_info_file", help="src-openeuler buddy info file") + args.add_argument("-e", type=str, dest="exclusive_arch_file", help="exclusive arch file") + + args = args.parse_args() + + # init logging + not os.path.exists("log") and os.mkdir("log") + logger_conf_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/logger.conf")) + logging.config.fileConfig(logger_conf_path) + logger = logging.getLogger("jobs") + + from src.proxy.jenkins_proxy import JenkinsProxy + jp_m = JenkinsProxy(args.template_job_base_url, args.jenkins_user, args.jenkins_api_token, args.jenkins_timeout) + jp_t = JenkinsProxy(args.target_job_base_url, args.jenkins_user, args.jenkins_api_token, args.jenkins_timeout) + + jenkins_jobs = SrcOpenEulerJenkinsJobs(args.template_job, jp_m, args.buddy_info_file, args.exclusive_arch_file) + jenkins_jobs.run(args.action, args.target_jobs, jp_t, concurrency=args.concurrency, retry=args.retry, interval=args.interval) diff --git a/src/jobs/obs_meta_strategy.py b/src/jobs/obs_meta_strategy.py new file mode 100644 index 0000000000000000000000000000000000000000..75f20b145eb98c28ef92d39ff176f572751028f1 --- /dev/null +++ b/src/jobs/obs_meta_strategy.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +import logging +import os +import re +import xml.etree.ElementTree as ET +from collections import defaultdict + +logger = logging.getLogger("jobs") + + +class ObsMetaStrategy(object): + """ + 使用obs_meta仓库的策略 + """ + def __init__(self, obs_meta_path): + """ + + :param obs_meta_path: obs_meta路径 + """ + self._obs_meta_path = obs_meta_path + self._package_repo = defaultdict(set) + self._repo_package = defaultdict(set) + + def get_packages_of_repo(self, repo): + """ + 获取关联的obs package + :param repo: + :return: + """ + return list(self._repo_package.get(repo, set())) + + def get_buddy_of_repo(self, repo): + """ + 获取兄弟仓库列表 + :param repo: + :return: + """ + packages = self.get_packages_of_repo(repo) + + buddy = set() + for package in packages: + buddy.update(self._package_repo.get(package, set())) + + return list(buddy) + + def __iter__(self): + return iter(self._repo_package.keys()) + + def algorithm(self, *repos): + """ + 仓库与package关联信息算法 + :param repos: 仓库列表 + :return: + """ + index = 0 + for dirpath, dirnames, filenames in os.walk(self._obs_meta_path): + # 忽略.osc目录 + if re.search("\.osc|\.git", dirpath): + continue + + for filename in filenames: + if filename == "_service": + _service = os.path.join(dirpath, filename) + try: + logger.debug("analysis {}".format(_service)) + tree = ET.parse(_service) + elements = tree.findall(".//param[@name=\"url\"]") # next/openEuler/zip + except Exception: + logger.exception("invalid xml format, {}".format(_service)) + continue + + _repos = [element.text.strip("/").split("/")[-1] for element in elements] # eg: next/openEuler/zip + logger.debug("get repos: {}".format(_repos)) + if any([repo in repos for repo in _repos]): + package = dirpath.strip("/").split("/")[-1] # eg: master/openEuler:Mainline/zip/_services + index += 1 + logger.info("{} {}...ok".format(index, _service)) + logger.info("package: {}, repos: {}".format(package, _repos)) + for repo in _repos: + self._package_repo[package].add(repo) + self._repo_package[repo].add(package) diff --git a/src/jobs/repo_buddy.py b/src/jobs/repo_buddy.py new file mode 100644 index 0000000000000000000000000000000000000000..6749588bc68a2f1ccab4efc02160cde6b693923f --- /dev/null +++ b/src/jobs/repo_buddy.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +import logging.config +import logging +import os +import argparse + +import yaml + + +class RepoBuddy(object): + def __init__(self, ignored_repos_path, community_path, *repos): + """ + :param ignored_repos_path: 忽略的repo配置文件 + :param repos: 用户输入的仓库 + """ + self._input_repos = repos + self._repo_buddy = {} # 保存结果 + self._ignored_repos = self._load_ignore_repo(ignored_repos_path) + logger.debug("ignored repos: {}".format(self._ignored_repos)) + self._community_repos = self._load_community_repo(community_path) # 社区repos + logger.debug("community repos: {}".format(self._community_repos)) + + @staticmethod + def _load_ignore_repo(conf_file): + """ + 加载不用触发门禁的任务 + :param conf_file: + :return: + """ + try: + with open(conf_file, "r") as f: + handler = yaml.safe_load(f) + return handler.get("ignore", []) + except IOError as e: + logger.warning("{} not exist".format(conf_file)) + return [] + + @staticmethod + def _load_community_repo(community_path): + """ + 加载不用触发门禁的任务 + :param community_path: + :return: + """ + try: + conf_file = os.path.join(community_path, "repository/src-openeuler.yaml") + with open(conf_file, "r") as f: + handler = yaml.safe_load(f) + repos = {item["name"]: item["type"] for item in handler["repositories"]} + logger.info("repos from community: {}".format(len(repos))) + return repos + except IOError as e: + logger.warning("{} not exist".format(conf_file)) + return [] + + def _is_valid_repo(self, repo): + """ + 仓库是否需要创建门禁 + :param repo: + :return: + """ + if repo in self._community_repos and repo not in self._ignored_repos: + return True + + return False + + def buddy(self, strategy): + """ + 计算仓库关联的package及buddy + :param strategy: 策略对象 + :return: + """ + repos = self._community_repos if "all" in self._input_repos else self._input_repos + valid_repos = [repo for repo in repos if self._is_valid_repo(repo)] + + if valid_repos: + strategy.algorithm(*valid_repos) + + self._repo_buddy = {repo: {"repo": repo, "packages": strategy.get_packages_of_repo(repo), + "buddy": strategy.get_buddy_of_repo(repo)} for repo in strategy} + + def save(self, output): + """ + 保存结果 + :param output: + :return: + """ + with open(output, "w") as f: + yaml.safe_dump(self._repo_buddy, f) + + +if "__main__" == __name__: + args = argparse.ArgumentParser() + args.add_argument("-j", type=str, dest="jobs", help="jobs name, split by dot") + args.add_argument("-o", type=str, dest="mapping_file", help="output file to save buddy info") + args.add_argument("-m", type=str, dest="obs_meta_path", help="obs meta path") + args.add_argument("-c", type=str, dest="community_path", help="community repo path") + args = args.parse_args() + + not os.path.exists("log") and os.mkdir("log") + logger_conf_path = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../conf/logger.conf")) + logging.config.fileConfig(logger_conf_path) + logger = logging.getLogger("jobs") + + # import after log initial + from src.proxy.requests_proxy import do_requests + from src.proxy.gitee_proxy import GiteeProxy + from src.jobs.obs_meta_strategy import ObsMetaStrategy + + ignore_repo_path = os.path.realpath(os.path.join(os.path.realpath(__file__), "../../conf/ignore_repo.yaml")) + input_repos = [repo.strip() for repo in args.jobs.split(",")] + + buddy = RepoBuddy(ignore_repo_path, args.community_path, *input_repos) + buddy.buddy(ObsMetaStrategy(args.obs_meta_path)) + buddy.save(args.mapping_file) diff --git a/src/lib/lib.sh b/src/lib/lib.sh new file mode 100644 index 0000000000000000000000000000000000000000..522e57102eacc792d9143c18fac24f2a2e92c9c7 --- /dev/null +++ b/src/lib/lib.sh @@ -0,0 +1,213 @@ +#!/bin/echo Warning: this library should be sourced! + +function log_info() +{ + echo "[`date +%Y-%m-%d\ %T`] [ INFO ] $@" +} + +function log_warn() +{ + echo -e "\033[33m"[`date +%Y-%m-%d\ %T`] [WARNING] $@" \033[0m" +} + +function log_error() +{ + echo -e "\033[31m"[`date +%Y-%m-%d\ %T`] [ ERROR ] $@" \033[0m" + exit 1 +} + + +function log_debug() +{ + [ "$DEBUG" == "yes" ] && echo "[`date +%Y-%m-%d\ %T`] [ DEBUG ] $@" + echo -n "" +} + +function clean_and_exit() +{ + + if [ $1 -ne 0 ]; then + echo "=========error start=========" + cat $ERROR_LOG + echo "=========error end=========" + fi + exit $1 +} + +function run_srcipt() +{ + script=$1 + shift + args="$@" + log_info "Start run $script $args at `date`" + bash $script $args + if [ $? -ne 0 ]; then + log_error "Run $script $args failed at `date`" + fi + log_info "Finished run $script $args at `date`" +} + +#function add_nameserver() +#{ +# grep -w "nameserver" /etc/resolv.conf | grep -v "#" +# if [ "$?" != "0" ]; then +# echo "search huawei.com">>/etc/resolv.conf +# echo "nameserver 10.72.255.100">>/etc/resolv.conf +# echo "nameserver 10.72.55.82">>/etc/resolv.conf +# echo "nameserver 10.98.48.39">>/etc/resolv.conf +# fi +# set +e +# ping -c 2 code.huawei.com +# if [ "$?" != "0" ]; then +# sleep 60 +# ping -c 6 code.huawei.com +# if [ "$?" != "0" ]; then +# sleep 120 +# ping -c 6 code.huawei.com +# if [ "$?" != "0" ]; then +# echo "can't connet to code.huawei.com" +# exit 1 +# fi +# fi +# fi +# set -e +#} + +function git_clone() +{ + url=$1 + #add_nameserver + expect -c " + set timeout -1 + spawn git clone $url + expect { + \"?(yes/no)*?\" { + send \"yes\r\" + exp_continue + } + } +" +} + +function git_fetch() +{ + #add_nameserver + expect -c " + set timeout -1 + spawn git fetch + expect { + \"?(yes/no)*?\" { + send \"yes\r\" + exp_continue + } + } +" +} + +function git_pull() +{ + #add_nameserver + git reset --hard + git clean -df + expect -c " + set timeout -1 + spawn git pull + expect { + \"?(yes/no)*?\" { + send \"yes\r\" + exp_continue + } + } +" +} + +function git_checkout() +{ + br=$1 + expect -c " + set timeout -1 + spawn git checkout $br + expect { + \"?(yes/no)*?\" { send \"yes\r\"; exp_continue } + eof { catch wait result; exit [lindex \$result 3] } + } + expect { + eof { catch wait result; exit [lindex \$result 3] } + } +" +} + +function git_update() +{ +# cat >id_rsa <id_rsa.pub < /dev/null + else + cd "${git_dir}" + git_clone "${git_url}" + cd "${git_name}" + git_checkout "${git_branch}" + cd $old_pwd &> /dev/null + fi +} diff --git a/src/proxy/__init__.py b/src/proxy/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/proxy/git_proxy.py b/src/proxy/git_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..712f45e5684e3e97a6f2a71df89d93db37bec1bc --- /dev/null +++ b/src/proxy/git_proxy.py @@ -0,0 +1,194 @@ +# -*- encoding=utf-8 -*- +import logging +from cStringIO import StringIO + +from src.utils.shell_cmd import shell_cmd_live + +logger = logging.getLogger("common") + + +class GitProxy(object): + """ + git 代理,实现常见的git操作 + """ + def __init__(self, repo_dir): + """ + :param repo_dir: 仓库目录 + """ + self._repo_dir = repo_dir + + def get_content_of_file_with_commit(self, file_path, commit="HEAD~0"): + """ + 获取单个commit文件内容 + :param commit: HEAD~{} or SHA + :param file_path: 文件完整路径 + :return: StringIO + """ + get_content_cmd = "cd {}; git show {}:{}".format(self._repo_dir, commit, file_path) + ret, out, _ = shell_cmd_live(get_content_cmd, cap_out=True) + if ret: + logger.warning("get file content of commit failed, {}".format(ret)) + return None + + f = StringIO() + f.write("\n".join(out)) + f.seek(0) + + return f + + def diff_files_between_commits(self, base, head): + """ + 获取2次提交的差别的文件名列表 + :param base: 被比较的版本 + :param head: 比较的版本 + :return: list<string> + """ + diff_files_cmd = "cd {}; git diff --name-only --diff-filter=ACM {} {}".format(self._repo_dir, base, head) + ret, out, _ = shell_cmd_live(diff_files_cmd, cap_out=True) + + if ret: + logger.error("get diff files of commits failed, {}".format(ret)) + return [] + + return out + + def extract_files_path_of_patch(self, patch_path): + """ + 获取patch内diff的文件路径 + :param patch_path: patch完整路径 + :return: list<string> + """ + extract_file_cmd = "cd {}; git apply --numstat {}".format(self._repo_dir, patch_path) + ret, out, _ = shell_cmd_live(extract_file_cmd, cap_out=True) + + if ret: + logger.error("extract diff files of patch failed, {}".format(ret)) + return [] + + return [line.split()[-1] for line in out] + + def apply_patch(self, patch_path, leading=0): + """ + 打补丁 + :param patch_path: patch完整路径 + :param leading: Remove <n> leading path components + :return: boolean + """ + apply_patch_cmd = "cd {}; git apply -p{} {}".format(self._repo_dir, leading, patch_path) + ret, _, _ = shell_cmd_live(apply_patch_cmd) + + if ret: + #logger.error("apply patch failed, {}".format(ret)) + return False + + return True + + @classmethod + def apply_patch_at_dir(cls, patch_dir, patch_path, leading=0): + """ + 到指定目录下打补丁 + :param patch_path: patch完整路径 + :param patch_dir: patch使用路径 + :param leading: Remove <n> leading path components + :return: boolean + """ + #apply_patch_cmd = "cd {}; patch -l -t -p{} < {}".format(patch_dir, leading, patch_path) + apply_patch_cmd = "cd {}; git apply --ignore-whitespace -p{} {}".format(patch_dir, leading, patch_path) + ret, _, _ = shell_cmd_live(apply_patch_cmd) + + if ret: + #logger.error("apply patch failed, {}".format(ret)) + return False + + return True + + def commit_id_of_reverse_head_index(self, index=0): + """ + 对应的commit hash + :param index: HEAD~index + :return: hash string + """ + get_commit_cmd = "cd {}; git rev-parse {}".format(self._repo_dir, "HEAD~{}".format(index)) + ret, out, _ = shell_cmd_live(get_commit_cmd, cap_out=True) + + if ret: + logger.error("get commit id of index failed, {}".format(ret)) + return None + + return out[0] + + def checkout_to_commit(self, commit): + """ + git checkout + :param commit: HEAD~{} or SHA + :return: boolean + """ + checkout_cmd = "cd {}; git checkout {}".format(self._repo_dir, commit) + ret, _, _ = shell_cmd_live(checkout_cmd) + + if ret: + logger.warning("checkout failed, {}".format(ret)) + return False + + return True + + def get_tree_hashes(self, commit, number=0, with_merges=True): + """ + 获取tree对象hash值 + :param commit: HEAD~{} or SHA + :param number: hash numbers + :return: hash string + """ + if 0 == number: + tree_hashes_cmd = "cd {}; git log --format=%T {}".format(self._repo_dir, commit) + else: + tree_hashes_cmd = "cd {}; git log --format=%T -n{} {}".format(self._repo_dir, number, commit) + + if not with_merges: + tree_hashes_cmd = "{} --no-merges".format(tree_hashes_cmd) + + ret, out, _ = shell_cmd_live(tree_hashes_cmd, cap_out=True) + + if ret: + logger.error("get tree hashes failed, {}".format(ret)) + return None + + return out + + def fetch_commit_with_depth(self, depth): + """ + git fetch + :param depth: fetch 提交深度,0表示全部提交 + :return: boolean + """ + if 0 == depth: + fetch_cmd = "cd {}; git fetch --unshallow".format(self._repo_dir) + else: + fetch_cmd = "cd {}; git fetch --depth {}".format(self._repo_dir, depth) + + ret, _, _ = shell_cmd_live(fetch_cmd) + + if ret: + logger.error("fetch failed, {}".format(ret)) + return False + + return True + + def is_revert_commit(self, commit="HEAD~0", depth=0): + """ + 判断是否revert commit + :param commit: HEAD~{} or SHA + :param depth: 往前检查的深度,如果是0则表示检查全部 + :return: + """ + self.fetch_commit_with_depth(depth) + + tree_hashes = self.get_tree_hashes(commit, with_merges=False) + + if tree_hashes: + curr = tree_hashes[0] + for tree_hash in tree_hashes[1:]: + if curr == tree_hash: + return True + + return False diff --git a/src/proxy/gitee_proxy.py b/src/proxy/gitee_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..73b104efca8f0981a00b18c4a3f159c545eca811 --- /dev/null +++ b/src/proxy/gitee_proxy.py @@ -0,0 +1,119 @@ +# -*- encoding=utf-8 -*- +import logging +import yaml + +from src.proxy.requests_proxy import do_requests + +logger = logging.getLogger("common") + + +class GiteeProxy(object): + def __init__(self, owner, repo, token): + self._owner = owner + self._repo = repo + self._token = token + + def comment_pr(self, pr, comment): + """ + 评论pull request + :param pr: 本仓库PR的序数 + :param comment: 评论内容 + :return: 0成功,其它失败 + """ + logger.debug("comment pull request {}".format(pr)) + comment_pr_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/comments".format(self._owner, self._repo, pr) + data = {"access_token": self._token, "body": comment} + + rs = do_requests("post", comment_pr_url, body=data, timeout=10) + + if rs != 0: + logger.warning("comment pull request failed") + return False + + return True + + def create_tags_of_pr(self, pr, *tags): + """ + 创建pr tag + :param pr: 本仓库PR的序数 + :param tags: 标签 + :return: 0成功,其它失败 + """ + if not tags: + logger.debug("create tags, but no tags") + return True + + logger.debug("create tags {} of pull request {}".format(tags, pr)) + pr_tag_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/labels?access_token={}".format(self._owner, self._repo, pr, self._token) + + rs = do_requests("post", pr_tag_url, body=list(tags), timeout=10) + + if rs != 0: + logger.warning("create tags failed") + return False + + return True + + def replace_all_tags_of_pr(self, pr, *tags): + """ + 替换所有pr tag + :param pr: 本仓库PR的序数 + :param tags: 标签 + :return: 0成功,其它失败 + """ + if not tags: + logger.debug("replace tags, but no tags") + return True + + logger.debug("replace all tags with {} of pull request {}".format(tag, pr)) + pr_tag_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/labels?access_token={}".format(self._owner, self._repo, pr, self._token) + + rs = do_requests("put", pr_tag_url, body=list(tags), timeout=10) + if rs != 0: + logger.warning("replace tags failed") + return False + + return True + + def delete_tag_of_pr(self, pr, tag): + """ + 删除pr tag + :param pr: 本仓库PR的序数 + :param tag: 标签 + :return: 0成功,其它失败 + """ + logger.debug("delete tag {} of pull request {}".format(tag, pr)) + pr_tag_url = "https://gitee.com/api/v5/repos/{}/{}/pulls/{}/labels/{}?access_token={}".format(self._owner, self._repo, pr, tag, self._token) + + rs = do_requests("delete", pr_tag_url, timeout=10) + + if rs != 0: + logger.warning("delete tags failed") + return False + + return True + + @staticmethod + def load_community_repos(timeout=10): + """ + 获取社区repo + :param timeout: + :return: + """ + repos = {} + + def analysis(response): + """ + requests回调 + :param response: requests response object + :return: + """ + handler = yaml.safe_load(response.text) + repos.update({item["name"]: item["type"] for item in handler["repositories"]}) + logger.info("repos from community: {}".format(len(repos))) + + community_repo_url = "https://gitee.com/openeuler/community/raw/master/repository/src-openeuler.yaml" + logger.info("requests repos from community, this will take multi seconds") + do_requests("get", url=community_repo_url, timeout=timeout, obj=analysis) + + return repos diff --git a/src/proxy/jenkins_patch.py b/src/proxy/jenkins_patch.py new file mode 100644 index 0000000000000000000000000000000000000000..4fa3ef2b527651a66eb15be7476e3bed37cd75f9 --- /dev/null +++ b/src/proxy/jenkins_patch.py @@ -0,0 +1,20 @@ +# -*- encoding=utf-8 -*- +from urllib import quote as urlquote + +from jenkinsapi.jenkinsbase import JenkinsBase + +# hack, bug when if job under baseurl is not folder +# when use jenkins.jenkins src host +def resolve_job_folders(self, jobs): + for job in list(jobs): + if 'color' not in job.keys(): + jobs.remove(job) + jobs += self.process_job_folder(job, self.baseurl) + else: + job["url"] = '%s/job/%s' % (self.baseurl, urlquote(job['name'])) + + return jobs + + +old = JenkinsBase.resolve_job_folders +JenkinsBase.resolve_job_folders = resolve_job_folders diff --git a/src/proxy/jenkins_proxy.py b/src/proxy/jenkins_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..2f73708769a1dd96df09e9aa7ff8ec8f2e426e38 --- /dev/null +++ b/src/proxy/jenkins_proxy.py @@ -0,0 +1,165 @@ +# -*- encoding=utf-8 -*- +import logging +import re + +from jenkinsapi.jenkins import Jenkins # not friendly when job in folders +import src.proxy.jenkins_patch + +logger = logging.getLogger("common") + + +class JenkinsProxy(object): + """ + Jenkins 代理,实现常见的jenkins操作 + """ + def __init__(self, base_url, username, token, timeout=10): + """ + + :param base_url: + :param username: 用户名 + :param token: + :param timeout: + """ + self._username = username + self._token = token + self._timeout = timeout + self._jenkins = Jenkins(base_url, username=username, password=token, timeout=timeout) + + def create_job(self, job, config): + """ + 创建任务 + :param job: 任务名 + :param config: 任务描述,xml + :return: True / False + """ + try: + self._jenkins.create_job(job, config) + return True + except Exception as e: + logger.exception("create job exception, {}".format(e)) + return False + + def update_job(self, job, config): + """ + 更新任务 + :param job: 任务名 + :param config: 任务描述,xml + :return: True / False + """ + try: + jks_job = self._jenkins[job] + jks_job.update_config(config) + return True + except Exception as e: + logger.exception("update job exception, {}".format(e)) + return False + + def get_config(self, job): + """ + 获取任务描述,xml + :param job: 任务名 + :return: None if job not exist + """ + try: + return self._jenkins[job].get_config() + except Exception as e: + logger.exception("get config exception, {}".format(e)) + return None + + def get_build(self, job, build_no): + """ + 获取任务build + :param job: 任务名 + :param build_no: build编号 + :return: None if job not exist + """ + try: + return self._jenkins[job].get_build(build_no) + except Exception as e: + logger.exception("get job build exception, {}".format(e)) + return None + + @classmethod + def get_grandpa_build(cls, build): + """ + 获取上游的上游job build + :param build: + :return: + """ + try: + parent_build = build.get_upstream_build() + return parent_build.get_upstream_build() if parent_build else None + except Exception as e: + logger.exception("get grandpa build exception, {}".format(e)) + return None + + def _get_upstream_jobs(self, job): + """ + 获取upstream jobs + jenkinsapi提供的接口不支持跨目录操作 + :param job: Jenkins Job object + :return: + """ + logger.debug("get upstream jobs of {}".format(job._data["fullName"])) + jobs = [] + for project in job._data["upstreamProjects"]: # but is the only way of get upstream projects info + url = project.get("url") + name = project.get("name") + logger.debug("upstream project: {} {}".format(url, name)) + + m = re.match("(.*)/job/.*", url) # remove last part of job url, greedy match + base_url = m.group(1) + logger.debug("base url {}".format(base_url)) + + try: + j = Jenkins(base_url, self._username, self._token, timeout=self._timeout) + jobs.append(j[name]) + except Exception as e: + logger.exception("get job of {} exception".format(url)) + continue + + return jobs + + def get_upstream_builds(self, build): + """ + 菱形任务工作流时,Jenkins提供的接口不支持多个upstream build + A + / \ + B C + \ / + D + :param build: + :return: + """ + upstream_jobs = self._get_upstream_jobs(build.job) + + cause_build_id = build.get_upstream_build_number() + cause_job_name = build.get_upstream_job_name() + + cause_job = None + for upstream_job in upstream_jobs: + if upstream_job._data["fullName"] == cause_job_name: + cause_job = upstream_job + break + if cause_job is None: + logger.error("get cause job failed") + return [] + + cause_build = cause_job.get_build(cause_build_id) + cause_cause_build_id = cause_build.get_upstream_build_number() + + logger.debug("cause_build_id: {}, cause_job_name: {}, cause_cause_build_id: {}".format( + cause_build_id, cause_job_name, cause_cause_build_id)) + + upstream_builds = [] + for upstream_job in upstream_jobs: + logger.debug("{}".format(upstream_job._data["fullName"])) + for build_id in upstream_job.get_build_ids(): + logger.debug("try build id {}".format(build_id)) + a_build = upstream_job.get_build(build_id) + if a_build.get_upstream_build_number() == cause_cause_build_id: + logger.debug("build id {} match".format(build_id)) + upstream_builds.append(a_build) + break + + return upstream_builds diff --git a/src/proxy/obs_proxy.py b/src/proxy/obs_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..f678d5a0177d30dd7b5a6a5485b27503490eb381 --- /dev/null +++ b/src/proxy/obs_proxy.py @@ -0,0 +1,103 @@ +# -*- encoding=utf-8 -*- +import os +import shutil +import logging + +from src.utils.shell_cmd import shell_cmd_live + +logger = logging.getLogger("common") + + +class OBSProxy(object): + @staticmethod + def is_project_has_package(project, package): + """ + 包是否在项目中 + :param project: + :param package: + :return: + """ + return not not OBSProxy.list_project(project, package) + + @staticmethod + def list_project(project, package=""): + """ + 列出项目下包列表 + :param project: + :param package: + :return: + """ + cmd = "osc ll {} {}".format(project, package) + ret, rs, _ = shell_cmd_live(cmd, cap_out=True) + if ret: + logger.error("list project package error, {}".format(ret)) + return None + + return rs + + @staticmethod + def list_repos_of_arch(project, package, arch): + """ + 获取包的repo列表 + :param project: + :param package: + :return: + """ + cmd = "osc results {} {} -a {}".format(project, package, arch) + ret, out, _ = shell_cmd_live(cmd, cap_out=True) + if ret: + logger.debug("list obs repos of arch error, {}".format(ret)) + return [] + + rs = [] + for line in out: + repo, arch, state = line.split() + rs.append({"repo": repo, "state": state}) + + return rs + + @staticmethod + def checkout_package(project, package): + """ + checkout + :param project: + :param package: + :return: 成功返回True,失败返回False + """ + # pod cache + os.path.isdir(project) and shutil.rmtree(project) + + cmd = "osc co {} {}".format(project, package) + logger.info("osc co {} {}".format(project, package)) + ret, _, _ = shell_cmd_live(cmd, verbose=True) + + if ret: + logger.error("checkout package error, {}".format(ret)) + return False + + return True + + @staticmethod + def build_package(project, package, repo, arch, debug=False): + """ + build + :param project: + :param package: + :param repo: + :param arch: + :param debug: + :return: + """ + package_path = "{}/{}".format(project, package) + cmd = "cd {}; osc build {} {} {} --no-verify --clean".format( + package_path, repo, arch, "--disable-debuginfo" if not debug else "") + + logger.info("osc build {} {} {} --no-verify --clean".format( + repo, arch, "--disable-debuginfo" if not debug else "")) + ret, _, _ = shell_cmd_live(cmd, verbose=True) + + if ret: + logger.error("build package error, {}".format(ret)) + return False + + return True diff --git a/src/proxy/requests_proxy.py b/src/proxy/requests_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..e44274c6e494531d5d8d5e7748c40a4252d5a337 --- /dev/null +++ b/src/proxy/requests_proxy.py @@ -0,0 +1,73 @@ +# -*- encoding=utf-8 -*- +import logging +import requests +from requests.auth import HTTPBasicAuth +try: + from urllib import urlencode +except ImportError: + from urllib.parse import urlencode + +logger = logging.getLogger("common") + + +def do_requests(method, url, querystring=None, body=None, auth=None, timeout=30, obj=None): + """ + http request + :param method: http method + :param url: http[s] schema + :param querystring: dict + :param body: json + :param auth: dict, basic auth with user and password + :param timeout: second + :param obj: callback object, support list/dict/object + :return: + """ + try: + logger.debug("http requests, {} {} {}".format(method, url, timeout)) + logger.debug("querystring: {}".format(querystring)) + logger.debug("body: {}".format(body)) + + if method.lower() not in ["get", "post", "put", "delete"]: + return -1 + + if querystring: + url = "{}?{}".format(url, urlencode(querystring)) + + func = getattr(requests, method.lower()) + if body: + if auth: + rs = func(url, json=body, timeout=timeout, auth=HTTPBasicAuth(auth["user"], auth["password"])) + else: + rs = func(url, json=body, timeout=timeout) + else: + if auth: + rs = func(url, timeout=timeout, auth=HTTPBasicAuth(auth["user"], auth["password"])) + else: + rs = func(url, timeout=timeout) + + logger.debug("status_code {}".format(rs.status_code)) + + if rs.status_code not in [requests.codes.ok, requests.codes.created, requests.codes.no_content]: + return 1 + + # return response + if obj is not None: + if isinstance(obj, list): + obj.extend(rs.json()) + elif isinstance(obj, dict): + obj.update(rs.json()) + elif callable(obj): + obj(rs) + elif hasattr(obj, "cb"): + getattr(obj, "cb")(rs.json()) + + return 0 + except requests.exceptions.SSLError as e: + logger.warning("requests {} ssl exception, {}".format(url, e)) + return -2 + except requests.exceptions.Timeout as e: + logger.warning("requests timeout") + return 2 + except Exception as e: + logger.warning("requests exception, {}".format(e)) + return 3 diff --git a/src/requirements b/src/requirements new file mode 100644 index 0000000000000000000000000000000000000000..30e813f0c12d1fb314fa1391865566e27fe019df --- /dev/null +++ b/src/requirements @@ -0,0 +1,7 @@ +requests +jenkinsapi +colorlog +threadpool +PyYAML +gevent==1.2.2 +jsonpath diff --git a/src/utils/__init__.py b/src/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/utils/check_abi.py b/src/utils/check_abi.py new file mode 100755 index 0000000000000000000000000000000000000000..c40e977326a1c6de22fcd5174feac2a5e301c76f --- /dev/null +++ b/src/utils/check_abi.py @@ -0,0 +1,824 @@ +#!/usr/bin/python3 +#****************************************************************************** +# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved. +# licensed under the Mulan PSL v2. +# You can use this software according to the terms and conditions of the Mulan PSL v2. +# You may obtain a copy of Mulan PSL v2 at: +# http://license.coscl.org.cn/MulanPSL2 +# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR +# PURPOSE. +# See the Mulan PSL v2 for more details. +# Author: wangchuangGG +# Create: 2020-07-20 +# ******************************************************************************/ + +""" +(1) This script is used to check the ABI changes between the old + and new versions of dynamic libraries. + The merged result on difference is saved in the xxx_all_abidiff.out file in the working + directory. + default path: /var/tmp/xxx_all_abidiff.md + +(2) This script depends on abidiff from libabigail package. + +(3) Command parameters + This script accept three kinds of command: compare_rpm or compare_so or compare_rpms + Run it without any paramter prints out help message. +""" + +import argparse +import subprocess +import sys +import os +import logging +import shutil +import tempfile +import re +import requests + +target_sos = set() +changed_sos = set() +diff_result_file = "" +def parse_command_line(): + """Parse the command line arguments.""" + parser = argparse.ArgumentParser(prog="check_abi") + + parser.add_argument("-d", "--work_path", default="/var/tmp", nargs="?", + help="The work path to put rpm2cpio files and results" + " (e.g. /home/tmp_abidiff default: /var/tmp/)") + parser.add_argument("-o", "--result_output_file", default="", nargs="?", + help="The result file" + " (e.g. /home/result.md)") + parser.add_argument("-a", "--show_all_info", action="store_true", default=False, + help="show all infos includ changes in member name") + parser.add_argument("-v", "--verbose", action="store_true", default=False, + help="Show additional information") + parser.add_argument("-i", "--input_rpms_path", default="", nargs="?", + help="Find the rpm packages in this path that calls this change interfaces" + " (e.g. /home/rpms)") + + subparser = parser.add_subparsers(dest='command_name', + help="Compare between two RPMs or two .so files or two RPMs paths") + + rpm_parser = subparser.add_parser('compare_rpm', help="Compare between two RPMs") + rpm_parser.add_argument("-r", "--rpms", required=True, nargs=2, + metavar=('old_rpm', 'new_rpm'), + help="Path or URL of both the old and new RPMs") + rpm_parser.add_argument("-d", "--debuginfo_rpm", nargs=2, + metavar=('old_debuginfo_rpm', 'new_debuginfo_rpm'), + required=False, + help="Path or URL of both the old and new debuginfo RPMs," + "corresponding to compared RPMs.") + rpm_parser.set_defaults(func=process_with_rpm) + + so_parser = subparser.add_parser('compare_so', help="Compare between two .so files") + so_parser.add_argument("-s", "--sos", required=True, nargs=2, + metavar=('old_so', 'new_so'), + help="Path or URL of both the old and new .so files") + so_parser.add_argument("-f", "--debuginfo_path", nargs=2, required=False, + metavar=('old_debuginfo_path', 'new_debuginfo_path'), + help="Path or URL of both the old and new debuginfo files," + "corresponding to compared .so files.") + so_parser.set_defaults(func=process_with_so) + + rpm_parser = subparser.add_parser('compare_rpms', help="Compare between two RPMs paths") + rpm_parser.add_argument("-p", "--paths", required=True, nargs=2, + metavar=('old_path', 'new_path'), + help="Path of both the old RPMs and new RPMs") + rpm_parser.set_defaults(func=process_with_rpms) + + config = parser.parse_args() + + if config.command_name is None: + parser.print_help() + sys.exit(0) + else: + return config + + +def list_so_files(path, add_global): + """ + Generate a list of all .so files in the directory. + """ + # known suffix of exception + # we cannot rely on number suffix for some .so files use complex version scheme. + exception_list = ["hmac", "debug", "socket"] + so_files = set() + for dirpath, _, files in os.walk(path): + for filename in files: + fp = os.path.join(dirpath, filename) + if os.path.islink(fp): + continue + if filename.split(".")[-1] in exception_list: + continue + if ".so" in filename: + logging.debug(".so file found:%s", fp) + so_files.add(fp) + if add_global: + target_sos.add(filename) + return so_files + + +def find_all_so_file(path1, path2): + """ + Generate a map between previous and current so files + """ + all_so_pair = {} + previous_sos = list_so_files(path1, True) + current_sos = list_so_files(path2, True) + logging.debug("previous_so:%s", previous_sos) + logging.debug("current_so:%s", current_sos) + prev_matched = set() + curr_matched = set() + if previous_sos and current_sos: + for so_file1 in previous_sos: + for so_file2 in current_sos: + base_name1 = (os.path.basename(so_file1)).split('.so')[0] + base_name2 = (os.path.basename(so_file2)).split('.so')[0] + if base_name1 == base_name2: + all_so_pair[so_file1] = so_file2 + prev_matched.add(so_file1) + curr_matched.add(so_file2) + else: + logging.info("Not found so files") + return all_so_pair + + prev_left = previous_sos - prev_matched + curr_left = current_sos - curr_matched + + if prev_left: + logging.info("Unmatched .so file in previous version") + logging.info("Usually means deleted .so in current version") + logging.info("%s\n", prev_left) + for so_name in prev_left: + changed_sos.add(os.path.basename(so_name)) + if curr_left: + logging.info("Unmatched .so file in current version") + logging.info("Usually means newly added .so in current version") + logging.info("%s\n", curr_left) + logging.debug("mapping of .so files:%s\n", all_so_pair) + return all_so_pair + + +def make_abi_path(work_path, abipath): + """ + Get the path to put so file from rpm + return the path. + """ + fp = os.path.join(work_path, abipath) + if os.path.isdir(fp): + shutil.rmtree(fp) + os.makedirs(fp) + return fp + + +def get_rpm_path(rpm_url, dest): + """Get the path of rpm package""" + rpm_path = "" + if os.path.isfile(rpm_url): + rpm_path = os.path.abspath(rpm_url) + logging.debug("rpm exists:%s", rpm_path) + else: + rpm_name = os.path.basename(rpm_url) + rpm_path = os.path.join(dest, rpm_name) + logging.debug("downloading %s......", rpm_name) + subprocess.run("wget -P {} {}".format(dest, rpm_url), shell=True) + #subprocess.call(["curl", rpm_url, "-L", + # "--connect-timeout", "10", + # "--max-time", "600", + # "-sS", "-o", rpm_path]) + return rpm_path + + +def do_rpm2cpio(rpm2cpio_path, rpm_file): + """ + Exec the rpm2cpio at rpm2cpio_path. + """ + cur_dir = os.getcwd() + os.chdir(rpm2cpio_path) + logging.debug("\n----working in path:%s----", os.getcwd()) + logging.debug("rpm2cpio %s", rpm_file) + subprocess.run("rpm2cpio {} | cpio -id > /dev/null 2>&1".format(rpm_file), shell=True) + os.chdir(cur_dir) + + +def merge_all_abidiff_files(all_abidiff_files, work_path, rpm_base_name): + """ + Merge the all diff files to merged_file. + return the merged_file. + """ + merged_file = os.path.join(work_path, "{}_all_abidiff.out".format(rpm_base_name)) + if os.path.exists(merged_file): + subprocess.run("rm -rf {}".format(merged_file), shell=True) + + ofile = open(merged_file, "a+") + ofile.write("# Functions changed info\n") + for diff_file in all_abidiff_files: + diff_file_name = os.path.basename(diff_file) + ofile.write("---------------diffs in {}:----------------\n".format(diff_file_name)) + for txt in open(diff_file, "r"): + ofile.write(txt) + ofile.close() + return merged_file + +def do_abidiff(config, all_so_pair, work_path, base_name, debuginfo_path): + """ + Exec the abidiff and write result to files. + return the abidiff returncode. + """ + if not all_so_pair: + logging.info("There are no .so files to compare") + return 0 + + if debuginfo_path: + logging.debug("old_debuginfo_path:%s\nnew_debuginfo_path:%s", + debuginfo_path[0], debuginfo_path[1]) + with_debuginfo = True + else: + with_debuginfo = False + + return_code = 0 + all_abidiff_files = [] + for old_so_file in all_so_pair: + new_so_file = all_so_pair[old_so_file] + logging.debug("begin abidiff between %s and %s", old_so_file, new_so_file) + + abidiff_file = os.path.join(work_path, + "{}_{}_abidiff.out".format(base_name, + os.path.basename(new_so_file))) + + so_options = "{} {}".format(old_so_file, new_so_file) + + if config.show_all_info: + additional_options = "--harmless" + else: + additional_options = "--changed-fns --deleted-fns --added-fns" + + if with_debuginfo: + debug_options = "--d1 {} --d2 {}".format(debuginfo_path[0], debuginfo_path[1]) + else: + debug_options = "" + + abidiff_template = "abidiff {so_options} {debug_options} {additional_options} > {difffile}" + abidiff_cmd = abidiff_template.format(so_options=so_options, + debug_options=debug_options, + additional_options=additional_options, + difffile=abidiff_file) + + ret = subprocess.run(abidiff_cmd, shell=True) + + all_abidiff_files.append(abidiff_file) + logging.info("result write in: %s", abidiff_file) + return_code |= ret.returncode + if return_code != 0: + global diff_result_file + diff_result_file = merge_all_abidiff_files(all_abidiff_files, work_path, base_name) + logging.info("abidiff all results writed in: %s", diff_result_file) + return return_code + + +def scan_target_functions_with_so(so_file, temp_path, rpm_require_functions, diff_functions): + """ + Scan target functions witch the .so file require + """ + require_func_file = os.path.join(temp_path, "calls_func_file.txt") + subprocess.run("nm -D -C -u {} > {}".format(so_file, require_func_file), shell=True) + with open(require_func_file, 'r') as fd: + lines = fd.readlines() + fd.close() + + for func_name in diff_functions: + for line in lines: + if func_name in re.split(r'[(<:\s]', line): + rpm_require_functions.add(func_name) + + +def check_rpm_require_taget_functions(rpm_package, temp_path, rpm_require_functions, diff_functions): + """ + Check whether the rpm package calls target functions + """ + if not os.path.exists(rpm_package) or not rpm_package.endswith(".rpm"): + logging.error("the rpm_package not exists:%s", rpm_package) + return False + logging.debug("\n----check the rpm whether calls diff_functions:%s----", rpm_package) + + rpm2cpio_path = os.path.join(temp_path, "other_rpm2cpio") + if os.path.exists(rpm2cpio_path): + shutil.rmtree(rpm2cpio_path) + os.makedirs(rpm2cpio_path, exist_ok=True) + + do_rpm2cpio(rpm2cpio_path, rpm_package) + so_files = list_so_files(rpm2cpio_path, False) + + for so_file in so_files: + scan_target_functions_with_so(so_file, temp_path, rpm_require_functions, diff_functions) + + if rpm_require_functions: + logging.debug("the rpm require target functions:%s", rpm_require_functions) + return True + logging.debug("the rpm not calls diff_functions") + return False + + +def scan_diff_functions(): + """ + Scan all diff functions + """ + diff_functions = set() + global diff_result_file + if len(diff_result_file) == 0: + return diff_functions + with open(diff_result_file, 'r') as fd: + lines = fd.readlines() + fd.close() + for line in lines: + if "(" in line and ("function " in line or "method " in line): + func_name = line.split("(")[0].split()[-1].split("<")[0].split("::")[-1] + if func_name != "void": + diff_functions.add(func_name) + if "SONAME changed from" in line: + old_so_name = line.split(" to ")[0].split()[-1].replace("'", "") + changed_sos.add(old_so_name) + logging.debug("------ changed_sos:%s ------", changed_sos) + logging.debug("all_diff_functions:%s", diff_functions) + return diff_functions + + +def check_rpm_require_changed_sos(work_path, rpm_package, require_sos, rpm_base_name): + """ + Check if the rpm require changed .so files + """ + require_changed_sos = False + write_name = os.path.basename(rpm_package) + " require these .so files witch version changed:" + for so_name in changed_sos: + base_name = so_name.split(".so")[0] + if base_name in require_sos: + logging.debug("this rpm require changed .so file:%s", base_name) + require_changed_sos = True + write_name += (" " + so_name) + + if require_changed_sos: + effect_info_file = os.path.join(work_path, "{}_sos_changed_effect_rpms.out".format(rpm_base_name)) + if not os.path.exists(effect_info_file): + f = open(effect_info_file, "a+", encoding="utf-8") + f.write("# RPMS effected by .so version changed\n") + f.close() + else: + f = open(effect_info_file, "a+", encoding="utf-8") + f.write("{}\n".format(write_name)) + f.close() + logging.info("sos changed effect rpms write at:%s", effect_info_file) + return require_changed_sos + + +def check_rpm_require_taget_sos(rpm_package, temp_path, work_path, base_name): + """ + Check if the rpm require target .so files + """ + if not os.path.exists(rpm_package) or not rpm_package.endswith(".rpm"): + logging.error("the rpm_package not exists:%s", rpm_package) + return False + + #logging.debug("\n---check if the rpm require target .so files:%s---", rpm_package) + require_info_file = os.path.join(temp_path, "require_info_file.txt") + subprocess.run("rpm -qpR {} > {}".format(rpm_package, require_info_file), shell=True, stderr=subprocess.PIPE) + logging.debug("write require .sos info at:%s", require_info_file) + + with open(require_info_file, 'r') as fd: + lines = fd.readlines() + fd.close() + require_sos = set() + for line in lines: + require_so_name = re.split(r'[(.><\s]', line)[0] + require_sos.add(require_so_name) + logging.debug("\n------%s this rpm require .so files:%s", rpm_package, require_sos) + + if check_rpm_require_changed_sos(work_path, rpm_package, require_sos, base_name): + return True + + for so_name in target_sos: + so_base_name = so_name.split(".so")[0] + if so_base_name in require_sos: + logging.debug("this rpm call target .so file:%s", so_base_name) + return True + logging.debug("this rpm not require target .so files") + return False + +def validate_sos(config): + """ + Validate the command arguments + """ + for so in config.sos: + if not os.path.isfile(so) or ".so" not in so: + logging.error("{so} not exists or not a .so file") + sys.exit(0) + + if config.debuginfo_path: + for d in config.debuginfo_path: + if not os.path.exists(d): + logging.error("{d} not exists") + sys.exit(0) + + +def check_result(returncode): + """ + Check the result of abidiff + """ + ABIDIFF_ERROR_BIT = 1 + if returncode == 0: + logging.info("No ABI differences found.") + elif returncode & ABIDIFF_ERROR_BIT: + logging.info("An unexpected error happened to abidiff") + else: + logging.info("ABI differences found.") + + +def find_target_rpms(rpms_path, temp_path, work_path, base_name): + """ + Find target rpms witch requires target .so files + """ + logging.debug("finding target rpms in all rpms, target sos:%s............", target_sos) + count = 0 + target_rpms = set() + for dirpath, dirnames, files in os.walk(rpms_path): + for filename in files: + fp = os.path.join(dirpath, filename) + if fp.endswith(".rpm"): + if check_rpm_require_taget_sos(fp, temp_path, work_path, base_name): + target_rpms.add(fp) + count = count + 1 + if count%500 == 0: + logging.info("count: %d", count) + logging.debug("all rpms name whitch calls target .so files:%s", target_rpms) + return target_rpms + + +def find_effect_rpms(rpms_require_target_sos, temp_path, diff_functions): + """ + Find rpms witch requires target functions + """ + effect_rpms = set() + for rpm_package in rpms_require_target_sos: + rpm_require_functions = set() + if check_rpm_require_taget_functions(rpm_package, temp_path, + rpm_require_functions, diff_functions): + write_name = os.path.basename(rpm_package) + ":" + for func in rpm_require_functions: + write_name += (" " + func) + effect_rpms.add(write_name) + logging.debug("all effect rpms:%s", effect_rpms) + return effect_rpms + + +def scan_old_rpms(rpms_dir): + """ + Scan all old rpms + """ + files = os.listdir(rpms_dir) + rpm_names = set() + for file_name in files: + if file_name.endswith(".rpm") and "-debuginfo-" not in file_name and "-help-" not in file_name: + rpm_names.add(os.path.join(rpms_dir, file_name)) + logging.debug("all old rpms:%s", rpm_names) + return rpm_names + + +def find_new_rpm(old_rpm_name, rpms_dir): + """ + Find new rpm by old rpm name + """ + old_rpm_basename = os.path.basename(old_rpm_name) + base_name1 = old_rpm_basename.rsplit("-", 2)[0] + arch_name1 = old_rpm_basename.split(".oe1.")[-1] + logging.info("\n------begin process rpm:%s------", old_rpm_basename) + files = os.listdir(rpms_dir) + for file_name in files: + if file_name.endswith(".rpm") and "-debuginfo-" not in file_name and "-help-" not in file_name: + base_name2 = file_name.rsplit("-", 2)[0] + arch_name2 = file_name.split(".oe1.")[-1] + if base_name1 == base_name2 and arch_name1 == arch_name2: + logging.debug("find new rpms:%s", file_name) + return os.path.join(rpms_dir, file_name) + return None + + +def find_debug_rpm(rpm_name, rpms_dir): + """ + Find debuginfo rpm by rpm name + """ + files = os.listdir(rpms_dir) + for file_name in files: + if file_name.endswith(".rpm") and "-debuginfo-" in file_name: + file_basename = file_name.replace("-debuginfo-", "-") + if file_basename == os.path.basename(rpm_name): + return os.path.join(rpms_dir, file_name) + return None + + +def write_result(result_file, merged_file): + """ + Write content ro file + """ + if os.path.exists(result_file): + with open(result_file, "r") as fd: + lines = fd.readlines() + fd.close() + ofile = open(merged_file, "a+") + for line in lines: + ofile.write(line) + ofile.write("\n") + ofile.close() + + +def merge_all_result(config, base_name): + """ + Merge all result files to a .md file + """ + work_path = os.path.abspath(config.work_path) + files_last_name = ["_sos_changed_effect_rpms.out", "_effect_rpms_list.out", "_all_abidiff.out"] + result_files = [os.path.join(work_path, "{}{}".format(base_name, x)) for x in files_last_name] + logging.debug("result_files:%s", result_files) + + merged_file = os.path.join(work_path, "{}_all_result.md".format(base_name)) + + if config.result_output_file: + merged_file = os.path.abspath(config.result_output_file) + if os.path.exists(merged_file): + subprocess.run("rm -rf {}".format(merged_file), shell=True) + [write_result(x, merged_file) for x in result_files] + logging.info("-------------all result write at:%s", merged_file) + +def get_src_name(temp_path, rpm_name): + rpm_qi_info_file = os.path.join(temp_path, "rpmqi.txt") + subprocess.run("rpm -qpi {} > {}".format(rpm_name, rpm_qi_info_file), shell=True, stderr=subprocess.PIPE) + with open(rpm_qi_info_file, 'r') as fd: + lines = fd.readlines() + fd.close() + for line in lines: + if line.startswith("Source RPM : "): + src_name = line.split("Source RPM : ")[-1].split("\n")[0] + return src_name.rsplit("-", 2)[0] + +def get_requeset_by_name(input_rpms_path, temp_path, rpm_name, src_name): + + arch_names = {} + arch_names["aarch64"] = os.path.join(temp_path, "aarch64.html") + arch_names["noarch"] = os.path.join(temp_path, "noarch.html") + rpm_base_name = os.path.basename(rpm_name).rsplit("-", 2)[0] + rpm_arch_name = os.path.basename(rpm_name).split(".oe1")[-1] + rpm_final_name = "" + has_found = False + for name in arch_names: + subprocess.run("wget {}/{}/ -O {}".format(input_rpms_path, name, arch_names[name]), shell=True) + if not has_found: + with open(arch_names[name], "r") as fd: + lines = fd.readlines() + fd.close() + for lin in lines: + if rpm_base_name in lin and rpm_arch_name in lin: + if "title=\"" in lin: + find_rpm_name = lin.split("title=\"")[-1].split("\"")[0] + else: + find_rpm_name = lin.split("href=\"")[-1].split("\"")[0] + if find_rpm_name.rsplit("-", 2)[0] == rpm_base_name and find_rpm_name.split(".oe1")[-1] == rpm_arch_name: + rpm_final_name = find_rpm_name + has_found = True + logging.info("------------rpm_name:%s rpm_final_name:%s------------------",rpm_name, rpm_final_name) + rpm_requeset_info = os.path.join(temp_path, "rpm_requeset.html") + project_name = "openEuler:Mainline" + if "/Factory/" in input_rpms_path: + project_name = "openEuler:Factory" + if "/20.03:/LTS/" in input_rpms_path: + project_name ="openEuler:20.03:LTS" + if "/20.09/" in input_rpms_path: + project_name = "openEuler:20.09" + req_addr = "http://117.78.1.88/package/binary/{}/{}/standard_aarch64/aarch64/{}/".format(project_name, src_name, os.path.basename(rpm_final_name)) + logging.info("find required_by info from: %s", req_addr) + req = requests.get(req_addr) + with open(rpm_requeset_info, "wb+") as fd: + fd.write(req.content) + fd.close() + with open(rpm_requeset_info, "r") as fd: + lines = fd.readlines() + fd.close() + + required_by = set() + has_found_required_by = False + for line in lines: + if "Required by" in line: + has_found_required_by = True + continue + if has_found_required_by and "package/dependency/" in line: + required_by_name = line.split("\">")[-1].split("<")[0] + required_by.add(required_by_name) + continue + if has_found_required_by and "