gala-ragdoll/0003-conf-trace-info-and-conf-sync-optimize.patch

8171 lines
322 KiB
Diff
Raw Normal View History

diff --git a/config/gala-ragdoll.conf b/config/gala-ragdoll.conf
index 57ace32..8cb693a 100644
--- a/config/gala-ragdoll.conf
+++ b/config/gala-ragdoll.conf
@@ -5,21 +5,57 @@ user_email = "user_email"
[collect]
collect_address = "http://0.0.0.0"
-collect_api = "/demo/collectConf"
-collect_port = 11114
+collect_api = "/manage/config/collect"
+collect_port = 11111
[sync]
sync_address = "http://0.0.0.0"
-sync_api = "/demo/syncConf"
-sync_port = 11114
+sync_api = "/manage/config/sync"
+batch_sync_address = "http://0.0.0.0"
+batch_sync_api = "/manage/config/batch/sync"
+sync_port = 11111
[objectFile]
object_file_address = "http://0.0.0.0"
object_file_api = "/manage/config/objectfile"
object_file_port = 11111
+[sync_status]
+host_sync_status_address = "http://0.0.0.0"
+add_host_sync_status_api = "/manage/host/sync/status/add"
+delete_host_sync_status_api = "/manage/host/sync/status/delete"
+delete_all_host_sync_status_api = "/manage/all/host/sync/status/delete"
+host_sync_status_port = 11111
+
+[conf_trace]
+conf_trace_mgmt_address = "http://0.0.0.0"
+conf_trace_mgmt_api = "/conftrace/mgmt"
+conf_trace_delete_api = "/conftrace/delete"
+conf_trace_port = 11111
+
[ragdoll]
-port = 11114
+ip=127.0.0.1
+port=11114
+
+[mysql]
+ip=127.0.0.1
+port=3306
+database_name=aops
+engine_format=mysql+pymysql://@%s:%s/%s
+pool_size=100
+pool_recycle=7200
+
+[redis]
+ip=127.0.0.1
+port=6379
+
+[uwsgi]
+wsgi-file=manage.py
+daemonize=/var/log/aops/uwsgi/ragdoll.log
+http-timeout=600
+harakiri=600
+processes=2
+gevent=100
[log]
log_level = INFO
diff --git a/gala-ragdoll.spec b/gala-ragdoll.spec
index bdde9ce..33bbcdb 100644
--- a/gala-ragdoll.spec
+++ b/gala-ragdoll.spec
@@ -45,6 +45,9 @@ mkdir %{buildroot}/%{python3_sitelib}/ragdoll/config
install config/*.conf %{buildroot}/%{python3_sitelib}/ragdoll/config
mkdir -p %{buildroot}/%{_prefix}/lib/systemd/system
install service/gala-ragdoll.service %{buildroot}/%{_prefix}/lib/systemd/system
+install service/ragdoll %{buildroot}/%{_prefix}/bin/
+install service/ragdoll-filetrace %{buildroot}/%{_prefix}/bin/
+install service/ragdoll-filetrace.service %{buildroot}/%{_prefix}/lib/systemd/system
%pre
@@ -67,7 +70,11 @@ fi
%license LICENSE
/%{_sysconfdir}/ragdoll/gala-ragdoll.conf
%{_bindir}/ragdoll
+%{_bindir}/ragdoll-filetrace
%{_prefix}/lib/systemd/system/gala-ragdoll.service
+%{_prefix}/lib/systemd/system/ragdoll-filetrace.service
+%{_prefix}/bin/ragdoll
+%{_prefix}/bin/ragdoll-filetrace
%files -n python3-gala-ragdoll
@@ -77,6 +84,9 @@ fi
%changelog
+* Thu June 27 2024 zhangdaolong<zhangdaolong@isoftstone.com> - v1.4.0-4
+- Added real-time monitoring file function
+
* Mon Apr 17 2023 wenxin<shusheng.wen@outlook.com> - v1.3.0-3
- update the host id validate method for ragdoll
diff --git a/ragdoll/__main__.py b/ragdoll/__main__.py
deleted file mode 100644
index df65acb..0000000
--- a/ragdoll/__main__.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/env python3
-
-import connexion
-import configparser
-import os
-import ast
-
-from ragdoll import encoder
-from ragdoll.const.conf_handler_const import CONFIG
-from ragdoll.utils.yang_module import YangModule
-from ragdoll.utils.prepare import Prepare
-
-
-def main():
- # prepare to load config
- load_prepare()
- # load yang modules
- load_yang()
- # load port for ragdoll
- ragdoll_port = load_port()
- app = connexion.App(__name__, specification_dir='./swagger/')
- app.app.json_encoder = encoder.JSONEncoder
- app.add_api('swagger.yaml', arguments={'title': 'Configuration traceability'})
- app.run(port=ragdoll_port)
-
-
-def load_prepare():
- git_dir, git_user_name, git_user_email = load_conf()
- prepare = Prepare(git_dir)
- prepare.mdkir_git_warehose(git_user_name, git_user_email)
-
-
-def load_yang():
- yang_modules = YangModule()
-
-
-def load_conf():
- cf = configparser.ConfigParser()
- if os.path.exists(CONFIG):
- cf.read(CONFIG, encoding="utf-8")
- else:
- cf.read("config/gala-ragdoll.conf", encoding="utf-8")
- git_dir = ast.literal_eval(cf.get("git", "git_dir"))
- git_user_name = ast.literal_eval(cf.get("git", "user_name"))
- git_user_email = ast.literal_eval(cf.get("git", "user_email"))
- return git_dir, git_user_name, git_user_email
-
-
-def load_port():
- cf = configparser.ConfigParser()
- if os.path.exists(CONFIG):
- cf.read(CONFIG, encoding="utf-8")
- else:
- cf.read("config/gala-ragdoll.conf", encoding="utf-8")
- ragdoll_port = cf.get("ragdoll", "port")
- return ragdoll_port
-
-
-if __name__ == '__main__':
- main()
diff --git a/ragdoll/conf/__init__.py b/ragdoll/conf/__init__.py
new file mode 100644
index 0000000..0c44a2e
--- /dev/null
+++ b/ragdoll/conf/__init__.py
@@ -0,0 +1,26 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: __init__.py.py
+@Time: 2024/3/4 9:37
+@Author: JiaoSiMao
+Description:
+"""
+from vulcanus.conf import Config
+
+from ragdoll.conf import default_config
+from ragdoll.conf.constant import MANAGER_CONFIG_PATH
+
+# read manager configuration
+configuration = Config(MANAGER_CONFIG_PATH, default_config)
+
diff --git a/ragdoll/conf/constant.py b/ragdoll/conf/constant.py
new file mode 100644
index 0000000..a14a0f9
--- /dev/null
+++ b/ragdoll/conf/constant.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2021. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+Time:
+Author:
+Description: manager constant
+"""
+import os
+
+from ragdoll.utils.git_tools import GitTools
+
+BASE_CONFIG_PATH = "/etc/ragdoll/"
+
+# path of manager configuration
+MANAGER_CONFIG_PATH = os.path.join(BASE_CONFIG_PATH, 'gala-ragdoll.conf')
+
+TARGETDIR = GitTools().target_dir
+
+# domain
+CREATE_DOMAIN = "/domain/createDomain"
+DELETE_DOMAIN = "/domain/deleteDomain"
+QUERY_DOMAIN = "/domain/queryDomain"
+
+# host
+ADD_HOST_IN_DOMAIN = "/host/addHost"
+DELETE_HOST_IN_DOMAIN = "/host/deleteHost"
+GET_HOST_BY_DOMAIN = "/host/getHost"
+
+# management conf
+ADD_MANAGEMENT_CONFS_IN_DOMAIN = "/management/addManagementConf"
+UPLOAD_MANAGEMENT_CONFS_IN_DOMAIN = "/management/uploadManagementConf"
+DELETE_MANAGEMENT_CONFS_IN_DOMAIN = "/management/deleteManagementConf"
+GET_MANAGEMENT_CONFS_IN_DOMAIN = "/management/getManagementConf"
+QUERY_CHANGELOG_OF_MANAGEMENT_CONFS_IN_DOMAIN = "/management/queryManageConfChange"
+
+# confs
+GET_SYNC_STATUS = "/confs/getDomainStatus"
+QUERY_EXCEPTED_CONFS = "/confs/queryExpectedConfs"
+QUERY_REAL_CONFS = "/confs/queryRealConfs"
+SYNC_CONF_TO_HOST_FROM_DOMAIN = "/confs/syncConf"
+QUERY_SUPPORTED_CONFS = "/confs/querySupportedConfs"
+COMPARE_CONF_DIFF = "/confs/domain/diff"
+BATCH_SYNC_CONF_TO_HOST_FROM_DOMAIN = "/confs/batch/syncConf"
diff --git a/ragdoll/conf/default_config.py b/ragdoll/conf/default_config.py
new file mode 100644
index 0000000..094111a
--- /dev/null
+++ b/ragdoll/conf/default_config.py
@@ -0,0 +1,59 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2021. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+Time:
+Author:
+Description: default config of manager
+"""
+git = {"GIT_DIR": "/home/confTraceTest", "USER_NAME": "user_name", "USER_EMAIL": "user_email"}
+
+collect = {
+ "COLLECT_ADDRESS": "http://127.0.0.1",
+ "COLLECT_API": "/manage/config/collect",
+ "COLLECT_PORT": 11111
+}
+
+sync = {
+ "SYNC_ADDRESS": "http://127.0.0.1",
+ "SYNC_API": "/manage/config/sync",
+ "BATCH_SYNC_ADDRESS": "http://127.0.0.1",
+ "BATCH_SYNC_API": "/manage/config/batch/sync",
+ "SYNC_PORT": 11111
+}
+
+objectFile = {"OBJECT_FILE_ADDRESS": "http://127.0.0.1", "OBJECT_FILE_API": "/manage/config/objectfile",
+ "OBJECT_FILE_PORT": 11111}
+
+sync_status = {
+ "HOST_SYNC_STATUS_ADDRESS": "http://127.0.0.1",
+ "ADD_HOST_SYNC_STATUS_API": "/manage/host/sync/status/add",
+ "DELETE_HOST_SYNC_STATUS_API": "/manage/host/sync/status/delete",
+ "HOST_SYNC_STATUS_PORT": 11111
+}
+
+ragdoll = {"IP": "127.0.0.1", "PORT": 11114}
+
+mysql = {
+ "IP": "127.0.0.1",
+ "PORT": 3306,
+ "DATABASE_NAME": "aops",
+ "ENGINE_FORMAT": "mysql+pymysql://@%s:%s/%s",
+ "POOL_SIZE": 100,
+ "POOL_RECYCLE": 7200,
+}
+
+redis = {"IP": "127.0.0.1", "PORT": 6379}
+
+log = {"LOG_LEVEL": "INFO", "LOG_DIR": "/var/log/aops", "MAX_BYTES": 31457280, "BACKUP_COUNT": 40}
+
+
diff --git a/ragdoll/config_model/bash_config.py b/ragdoll/config_model/bash_config.py
index 73c1777..36016a9 100644
--- a/ragdoll/config_model/bash_config.py
+++ b/ragdoll/config_model/bash_config.py
@@ -59,8 +59,7 @@ class BashConfig(BaseHandlerConfig):
dst_conf_dict = json.loads(dst_conf)
src_conf_dict = json.loads(src_conf)
for src_conf in src_conf_dict:
- str_src_conf = str(src_conf)
- if str(dst_conf_dict).find(str_src_conf) == -1:
+ if src_conf not in dst_conf_dict:
res = NOT_SYNCHRONIZE
break
return res
diff --git a/ragdoll/config_model/fstab_config.py b/ragdoll/config_model/fstab_config.py
index 416fd62..5f712e8 100644
--- a/ragdoll/config_model/fstab_config.py
+++ b/ragdoll/config_model/fstab_config.py
@@ -13,7 +13,6 @@
import re
from ragdoll.config_model.base_handler_config import BaseHandlerConfig
from ragdoll.const.conf_handler_const import FSTAB_COLUMN_NUM
-from ragdoll.log.log import LOGGER
class FstabConfig(BaseHandlerConfig):
"""
diff --git a/ragdoll/config_model/hostname_config.py b/ragdoll/config_model/hostname_config.py
new file mode 100644
index 0000000..bca3877
--- /dev/null
+++ b/ragdoll/config_model/hostname_config.py
@@ -0,0 +1,68 @@
+# ******************************************************************************
+# Copyright (C) 2023 isoftstone Technologies Co., Ltd. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+Time: 2023-07-19 11:23:00
+Author: liulei
+Description: text type config analyze
+"""
+import json
+
+from ragdoll.config_model.base_handler_config import BaseHandlerConfig
+from ragdoll.const.conf_handler_const import SYNCHRONIZED, NOT_SYNCHRONIZE
+from ragdoll.log.log import LOGGER
+
+
+class HostnameConfig(BaseHandlerConfig):
+ @staticmethod
+ def parse_conf_to_dict(conf_info):
+ """
+ 将配置信息conf_info转为list但是并未校验配置项是否合法
+ """
+ conf_dict_list = list()
+
+ conf_list = conf_info.strip().splitlines()
+ for line in conf_list:
+ if line is None or line.strip() == '' or line.strip()[0] in '#;':
+ continue
+
+ strip_line = str(line.strip()).replace("\t", " ")
+ conf_dict_list.append(strip_line)
+ return conf_dict_list
+
+ def read_conf(self, conf_info):
+ conf_dict_list = self.parse_conf_to_dict(conf_info)
+ if conf_dict_list:
+ self.conf = conf_dict_list
+
+ def write_conf(self):
+ content = ""
+ for value in self.conf:
+ if value is not None:
+ content = content + value + "\n"
+ return content
+
+ def conf_compare(self, src_conf, dst_conf):
+ """
+ desc: 比较dst_conf和src_conf是否相同dst_conf和src_conf均为序列化后的配置信息。
+ returndst_conf和src_conf相同返回SYNCHRONIZED
+ dst_conf和src_conf不同返回NOT_SYNCHRONIZE
+ """
+ res = SYNCHRONIZED
+ dst_conf_dict = json.loads(dst_conf)
+ src_conf_dict = json.loads(src_conf)
+ if not dst_conf_dict or not src_conf_dict:
+ res = NOT_SYNCHRONIZE
+ return res
+ if dst_conf_dict[0] != src_conf_dict[0]:
+ res = NOT_SYNCHRONIZE
+
+ return res
diff --git a/ragdoll/config_model/hosts_config.py b/ragdoll/config_model/hosts_config.py
index 50660ec..1bc9452 100644
--- a/ragdoll/config_model/hosts_config.py
+++ b/ragdoll/config_model/hosts_config.py
@@ -19,7 +19,6 @@ import json
from ragdoll.config_model.base_handler_config import BaseHandlerConfig
from ragdoll.log.log import LOGGER
-from ragdoll.utils.yang_module import YangModule
from ragdoll.const.conf_handler_const import NOT_SYNCHRONIZE, SYNCHRONIZED
ipv4 = re.compile('^((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)$')
@@ -64,7 +63,7 @@ class HostsConfig(BaseHandlerConfig):
ip_domain = re.split("\s+", line)
if len(ip_domain) == 1:
error_conf = True
- LOGGER.warning("ip_domain contains incorrect formatting")
+ LOGGER.error("Ip_domain contains incorrect formatting")
break
ip = ip_domain[0]
if ipv4.match(ip) or ipv6.match(ip):
@@ -73,7 +72,7 @@ class HostsConfig(BaseHandlerConfig):
res[ip] = str_value
else:
error_conf = True
- LOGGER.warning("ip does not meet the ipv4 or ipv6 format")
+ LOGGER.error("Ip does not meet the ipv4 or ipv6 format")
break
return error_conf, res
@@ -84,7 +83,7 @@ class HostsConfig(BaseHandlerConfig):
self.conf = dict_res
@staticmethod
- def conf_compare(dst_conf, src_conf):
+ def conf_compare(src_conf, dst_conf):
res = SYNCHRONIZED
dst_conf_dict = json.loads(dst_conf)
src_conf_dict = json.loads(src_conf)
diff --git a/ragdoll/config_model/ini_config.py b/ragdoll/config_model/ini_config.py
index 9f9dc3d..0c0bd50 100644
--- a/ragdoll/config_model/ini_config.py
+++ b/ragdoll/config_model/ini_config.py
@@ -11,7 +11,6 @@
# ******************************************************************************/
import re
-import json
import copy
from collections import OrderedDict as _default_dict
diff --git a/ragdoll/config_model/sshd_config.py b/ragdoll/config_model/sshd_config.py
index e499bb2..35ed872 100644
--- a/ragdoll/config_model/sshd_config.py
+++ b/ragdoll/config_model/sshd_config.py
@@ -83,7 +83,7 @@ class SshdConfig():
self.conf = conf_list
@staticmethod
- def conf_compare(dst_conf, src_conf):
+ def conf_compare(src_conf, dst_conf):
"""
desc: 比较dst_conf和src_conf是否相同dst_conf和src_conf均为序列化后的配置信息。
returndst_conf和src_conf相同返回SYNCHRONIZED
@@ -93,9 +93,9 @@ class SshdConfig():
dst_conf_dict = json.loads(dst_conf)
src_conf_dict = json.loads(src_conf)
- for dst_conf in dst_conf_dict:
- str_dst_conf = str(dst_conf)
- if str(src_conf_dict).find(str_dst_conf) == -1:
+ for src_conf in src_conf_dict:
+ str_src_conf = str(src_conf)
+ if str(dst_conf_dict).find(str_src_conf) == -1:
res = NOT_SYNCHRONIZE
break
return res
diff --git a/ragdoll/config_model/text_config.py b/ragdoll/config_model/text_config.py
index dadd915..dd4165a 100644
--- a/ragdoll/config_model/text_config.py
+++ b/ragdoll/config_model/text_config.py
@@ -44,5 +44,4 @@ class TextConfig(BaseHandlerConfig):
for value in self.conf:
if value is not None:
content = content + value + "\n"
- content = content + '\n'
return content
\ No newline at end of file
diff --git a/ragdoll/confs_manage/__init__.py b/ragdoll/confs_manage/__init__.py
new file mode 100644
index 0000000..25b0334
--- /dev/null
+++ b/ragdoll/confs_manage/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: __init__.py.py
+@Time: 2024/3/11 9:01
+@Author: JiaoSiMao
+Description:
+"""
diff --git a/ragdoll/confs_manage/view.py b/ragdoll/confs_manage/view.py
new file mode 100644
index 0000000..4280fa7
--- /dev/null
+++ b/ragdoll/confs_manage/view.py
@@ -0,0 +1,479 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: view.py
+@Time: 2024/3/11 9:01
+@Author: JiaoSiMao
+Description:
+"""
+import os
+
+import connexion
+from vulcanus.restful.resp.state import SUCCEED, SERVER_ERROR, PARAM_ERROR, NO_DATA
+from vulcanus.restful.response import BaseResponse
+
+from ragdoll.conf.constant import TARGETDIR
+from ragdoll.const.conf_files import yang_conf_list
+from ragdoll.const.conf_handler_const import NOT_SYNCHRONIZE
+from ragdoll.function.verify.confs import GetSyncStatusSchema, QueryExceptedConfsSchema, QueryRealConfsSchema, \
+ SyncConfToHostFromDomainSchema, QuerySupportedConfsSchema, CompareConfDiffSchema, \
+ BatchSyncConfToHostFromDomainSchema
+from ragdoll.log.log import LOGGER
+from ragdoll.utils.conf_tools import ConfTools
+from ragdoll.utils.format import Format
+from ragdoll.utils.host_tools import HostTools
+from ragdoll.utils.yang_module import YangModule
+
+
+class GetTheSyncStatusOfDomain(BaseResponse):
+ @BaseResponse.handle(schema=GetSyncStatusSchema, token=True)
+ def post(self, **params):
+ """
+ get the status of the domain
+ get the status of whether the domain has been synchronized # noqa: E501
+
+ :param body:
+ :type body: dict | bytes
+
+ :rtype: SyncStatus
+ """
+ access_token = connexion.request.headers.get("access_token")
+ domain = params.get("domainName")
+ ip = params.get("ip")
+ # check domain
+ base_rsp, code_num = Format.check_domain_param(domain)
+ if code_num != 200:
+ return base_rsp, code_num
+
+ # get manage confs in domain
+ code_num, code_string, manage_confs = Format.get_domain_conf(domain)
+ if not manage_confs:
+ return self.response(code=SUCCEED, message=code_string, data=manage_confs)
+
+ # get real conf in host
+ host_id = Format.get_host_id_by_ip(ip, domain)
+ real_conf_res_text = Format.get_realconf_by_domain_and_host(domain, [host_id], access_token)
+ if real_conf_res_text is None:
+ return self.response(code=SERVER_ERROR, message="get real conf failed")
+
+ # compare manage conf with real conf
+ sync_status = Format.diff_mangeconf_with_realconf(domain, real_conf_res_text, manage_confs)
+
+ # deal with not found files
+ man_conf_list = []
+ for d_man_conf in manage_confs:
+ man_conf_list.append(d_man_conf.get("file_path").split(":")[-1])
+ for d_host in sync_status["hostStatus"]:
+ d_sync_status = d_host["syncStatus"]
+ file_list = []
+ for d_file in d_sync_status:
+ file_path = d_file["file_path"]
+ file_list.append(file_path)
+ for d_man_conf in man_conf_list:
+ if d_man_conf in file_list:
+ continue
+ else:
+ comp_res = "NOT FOUND"
+ conf_is_synced = {"file_path": d_man_conf, "isSynced": comp_res}
+ d_sync_status.append(conf_is_synced)
+ return self.response(code=SUCCEED, message="successfully get the sync status of domain", data=sync_status)
+
+
+class QueryExceptedConfs(BaseResponse):
+ @BaseResponse.handle(schema=QueryExceptedConfsSchema, token=True)
+ def post(self, **params):
+ """
+ query the supported configurations in the current project
+ queryExpectedConfs # noqa: E501
+
+ :rtype: List[ExceptedConfInfo]
+ """
+ # 直接从入参中读取domain列表
+ domain_names = params.get("domainNames")
+ if len(domain_names) == 0:
+ code_num = PARAM_ERROR
+ code_string = "The current domain does not exist, please create the domain first."
+ return self.response(code=code_num, message=code_string)
+
+ all_domain_expected_files = []
+ yang_modules = YangModule()
+ for d_domain in domain_names:
+ domain_path = os.path.join(TARGETDIR, d_domain["domainName"])
+ expected_conf_lists = {"domainName": d_domain["domainName"], "confBaseInfos": []}
+ # Traverse all files in the source management repository
+ for root, dirs, files in os.walk(domain_path):
+ # Domain also contains host cache files, so we need to add hierarchical judgment for root
+ if len(files) > 0 and len(root.split('/')) > 3:
+ if "hostRecord.txt" in files:
+ continue
+ for d_file in files:
+ feature = os.path.join(root.split('/')[-1], d_file)
+ d_module = yang_modules.getModuleByFeature(feature)
+ file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
+ file_path = file_lists.get(d_module.name()).split(":")[-1]
+ d_file_path = os.path.join(root, d_file)
+ expected_value = Format.get_file_content_by_read(d_file_path)
+ conf_base_info = {"filePath": file_path, "expectedContents": expected_value}
+ expected_conf_lists.get("confBaseInfos").append(conf_base_info)
+ all_domain_expected_files.append(expected_conf_lists)
+
+ LOGGER.debug("all_domain_expected_files is : {}".format(all_domain_expected_files))
+
+ if len(all_domain_expected_files) == 0:
+ code_num = PARAM_ERROR
+ code_string = "The current domain does not exist, please create the domain first."
+ return self.response(code=code_num, message=code_string)
+
+ return self.response(code=SUCCEED, message="Successfully get the expected configuration file information.",
+ data=all_domain_expected_files)
+
+
+class QueryRealConfs(BaseResponse):
+ @BaseResponse.handle(schema=QueryRealConfsSchema, token=True)
+ def post(self, **params):
+ """
+ query the real configuration value in the current hostId node
+
+ query the real configuration value in the current hostId node # noqa: E501
+
+ :param body:
+ :type body: dict | bytes
+
+ :rtype: List[RealConfInfo]
+ """
+ access_token = connexion.request.headers.get("access_token")
+ domain = params.get("domainName")
+ host_list = params.get("hostIds")
+
+ check_res = Format.domainCheck(domain)
+ if not check_res:
+ codeNum = PARAM_ERROR
+ codeString = "Failed to verify the input parameter, please check the input parameters."
+ return self.response(code=codeNum, message=codeString)
+
+ # check the domain is Exist
+ is_exist = Format.isDomainExist(domain)
+ if not is_exist:
+ codeNum = PARAM_ERROR
+ codeString = "The current domain does not exist, please create the domain first."
+ return self.response(code=codeNum, message=codeString)
+
+ # check whether the host is configured in the domain
+ is_host_list_exist = Format.isHostInDomain(domain)
+ LOGGER.debug("is_host_list_exist is : {}".format(is_host_list_exist))
+ if not is_host_list_exist:
+ codeNum = PARAM_ERROR
+ codeString = "The host information is not set in the current domain. Please add the host information first"
+ return self.response(code=codeNum, message=codeString)
+
+ # get all hosts managed by the current domain.
+ # If host_list is empty, query all hosts in the current domain.
+ # If host_list is not empty, the actual contents of the currently given host are queried.
+ exist_host = []
+ failed_host = []
+ if len(host_list) > 0:
+ host_tool = HostTools()
+ exist_host, failed_host = host_tool.getHostExistStatus(domain, host_list)
+ else:
+ res_text = Format.get_hostinfo_by_domain(domain)
+ if len(res_text) == 0:
+ code_num = NO_DATA
+ code_string = "The host currently controlled in the domain is empty. Please add host information to " \
+ "the domain. "
+ return self.response(code=code_num, message=code_string)
+
+ if len(exist_host) == 0 or len(failed_host) == len(host_list):
+ codeNum = PARAM_ERROR
+ codeString = "The host information is not set in the current domain. Please add the host information first"
+ return self.response(code=codeNum, message=codeString)
+
+ # get the management conf in domain
+ res = Format.get_realconf_by_domain_and_host(domain, exist_host, access_token)
+ if len(res) == 0:
+ codeNum = NO_DATA
+ codeString = "Real configuration query failed. The failure reason is : The real configuration does not " \
+ "found. "
+ return self.response(code=codeNum, message=codeString)
+
+ return self.response(code=SUCCEED, message="Successfully query real confs", data=res)
+
+
+class SyncConfToHostFromDomain(BaseResponse):
+ @BaseResponse.handle(schema=SyncConfToHostFromDomainSchema, token=True)
+ def put(self, **params):
+ """
+ synchronize the configuration information of the configuration domain to the host # noqa: E501
+
+ :param body:
+ :type body: dict | bytes
+
+ :rtype: List[HostSyncResult]
+ """
+ access_token = connexion.request.headers.get("access_token")
+ domain = params.get("domainName")
+ sync_list = params.get("syncList")
+
+ host_sync_confs = dict()
+
+ for sync in sync_list:
+ host_sync_confs[sync["hostId"]] = sync["syncConfigs"]
+
+ # check the input domain
+ check_res = Format.domainCheck(domain)
+ if not check_res:
+ code_num = PARAM_ERROR
+ code_string = "Failed to verify the input parameter, please check the input parameters."
+ return self.response(code=code_num, message=code_string)
+
+ # check whether the domain exists
+ is_exist = Format.isDomainExist(domain)
+ if not is_exist:
+ code_num = NO_DATA
+ code_string = "The current domain does not exist, please create the domain first."
+ return self.response(code=code_num, message=code_string)
+
+ # get the management host in domain
+ res_host_text = Format.get_hostinfo_by_domain(domain)
+ if len(res_host_text) == 0:
+ code_num = NO_DATA
+ code_string = "The host currently controlled in the domain is empty. Please add host information to the " \
+ "domain. "
+ return self.response(code=code_num, message=code_string)
+
+ # Check whether the host is in the managed host list
+ exist_host = []
+ if len(host_sync_confs) > 0:
+ host_ids = host_sync_confs.keys()
+ for host_id in host_ids:
+ for d_host in res_host_text:
+ if host_id == d_host.get("host_id"):
+ exist_host.append(host_id)
+ else:
+ for d_host in res_host_text:
+ temp_host = {}
+ temp_host["hostId"] = d_host.get("host_id")
+ exist_host.append(temp_host)
+ LOGGER.debug("exist_host is : {}".format(exist_host))
+
+ if len(exist_host) == 0:
+ code_num = PARAM_ERROR
+ code_string = "The host information is not set in the current domain. Please add the host information first"
+ return self.response(code=code_num, message=code_string)
+
+ # get the management conf in domain
+ man_conf_res_text = Format.get_manageconf_by_domain(domain)
+ LOGGER.debug("man_conf_res_text is : {}".format(man_conf_res_text))
+ manage_confs = man_conf_res_text.get("conf_files")
+
+ # Deserialize and reverse parse the expected configuration
+ conf_tools = ConfTools()
+ # 组装入参
+ file_path_infos = dict()
+ for host_id in exist_host:
+ sync_confs = host_sync_confs.get(host_id)
+ for d_man_conf in manage_confs:
+ file_path = d_man_conf.get("file_path").split(":")[-1]
+ if file_path in sync_confs:
+ contents = d_man_conf.get("contents")
+ file_path_infos[file_path] = contents
+
+ code_num, code_string, sync_res = Format.deal_batch_sync_res(conf_tools, exist_host, file_path_infos,
+ access_token)
+ if code_num != 200:
+ return self.response(code=SERVER_ERROR, message=code_string, data=sync_res)
+ return self.response(code=SUCCEED, message=code_string, data=sync_res)
+
+
+class QuerySupportedConfs(BaseResponse):
+ @BaseResponse.handle(schema=QuerySupportedConfsSchema, token=True)
+ def post(self, **params):
+ """
+ query supported configuration list # noqa: E501
+
+ :param body:
+ :type body: dict | bytes
+
+ :rtype: List
+ """
+ domain = params.get("domainName")
+ check_res = Format.domainCheck(domain)
+ if not check_res:
+ code_num = PARAM_ERROR
+ code_string = "Failed to verify the input parameter, please check the input parameters."
+ return self.response(code=code_num, message=code_string)
+
+ is_exist = Format.isDomainExist(domain)
+ if not is_exist:
+ code_num = NO_DATA
+ code_string = "The current domain does not exist, please create the domain first."
+ return self.response(code=code_num, message=code_string)
+
+ conf_files = Format.get_manageconf_by_domain(domain)
+ conf_files = conf_files.get("conf_files")
+ if len(conf_files) == 0:
+ return yang_conf_list
+
+ exist_conf_list = []
+ for conf in conf_files:
+ exist_conf_list.append(conf.get('file_path'))
+
+ return list(set(yang_conf_list).difference(set(exist_conf_list)))
+
+
+class CompareConfDiff(BaseResponse):
+ @BaseResponse.handle(schema=CompareConfDiffSchema, token=True)
+ def post(self, **params):
+ """
+ compare conf different, return host sync status
+
+ :param body:
+ :type body: dict
+
+ :rtype:
+ """
+ expected_confs_resp_list = params.get("expectedConfsResp")
+ domain_result = params.get("domainResult")
+ expected_confs_resp_dict = Format.deal_expected_confs_resp(expected_confs_resp_list)
+
+ real_conf_res_text_dict = Format.deal_domain_result(domain_result)
+ # 循环real_conf_res_text_list 取出每一个domain的domain_result与expected_confs_resp_dict的expected_confs_resp做对比
+ sync_result = []
+ for domain_name, real_conf_res_text_list in real_conf_res_text_dict.items():
+ expected_confs_resp = expected_confs_resp_dict.get(domain_name)
+ sync_status = Format.diff_mangeconf_with_realconf_for_db(domain_name, real_conf_res_text_list,
+ expected_confs_resp)
+ domain_name = sync_status["domainName"]
+ host_status_list = sync_status["hostStatus"]
+
+ for signal_status in host_status_list:
+ host_id = signal_status["hostId"]
+ domain_host_sync_status = 1
+ sync_status_list = signal_status["syncStatus"]
+ for single_sync_status in sync_status_list:
+ if single_sync_status["isSynced"] == NOT_SYNCHRONIZE:
+ domain_host_sync_status = 0
+ break
+ single_domain_host_status = {"domain_name": domain_name, "host_id": host_id,
+ "sync_status": domain_host_sync_status}
+ sync_result.append(single_domain_host_status)
+ return self.response(code=SUCCEED, message="successfully compare conf diff", data=sync_result)
+
+
+class BatchSyncConfToHostFromDomain(BaseResponse):
+ @BaseResponse.handle(schema=BatchSyncConfToHostFromDomainSchema, token=True)
+ def put(self, **params):
+ """
+ synchronize the configuration information of the configuration domain to the host # noqa: E501
+
+ :param body:
+ :type body: dict | bytes
+
+ :rtype: List[HostSyncResult]
+ """
+ access_token = connexion.request.headers.get("access_token")
+ domain = params.get("domainName")
+ host_ids = params.get("hostIds")
+ # check domain
+ base_rsp, code_num = Format.check_domain_param(domain)
+ if code_num != 200:
+ return base_rsp, code_num
+
+ # 根据domain和ip获取有哪些不同步的文件
+ # get manage confs in domain
+ code_num, code_string, manage_confs = Format.get_domain_conf(domain)
+ if not manage_confs:
+ return self.response(code=SUCCEED, message=code_string, data=manage_confs)
+
+ # get real conf in host
+ real_conf_res_text = Format.get_realconf_by_domain_and_host(domain, host_ids, access_token)
+ # compare manage conf with real conf
+ sync_status = Format.diff_mangeconf_with_realconf(domain, real_conf_res_text, manage_confs)
+ # 解析sync_status取出未同步的数据
+ host_sync_confs = dict()
+ host_status = sync_status["hostStatus"]
+ for host_result in host_status:
+ host_id = host_result["hostId"]
+ sync_status = host_result["syncStatus"]
+ sync_configs = []
+ for sync_result in sync_status:
+ if sync_result["isSynced"] == NOT_SYNCHRONIZE:
+ sync_configs.append(sync_result["file_path"])
+ host_sync_confs[host_id] = sync_configs
+
+ # check the input domain
+ check_res = Format.domainCheck(domain)
+ if not check_res:
+ code_num = PARAM_ERROR
+ code_string = "Failed to verify the input parameter, please check the input parameters."
+ return self.response(code=code_num, message=code_string)
+
+ # check whether the domain exists
+ is_exist = Format.isDomainExist(domain)
+ if not is_exist:
+ code_num = NO_DATA
+ code_string = "The current domain does not exist, please create the domain first."
+ return self.response(code=code_num, message=code_string)
+
+ # get the management host in domain
+ res_host_text = Format.get_hostinfo_by_domain(domain)
+ if len(res_host_text) == 0:
+ code_num = NO_DATA
+ code_string = "The host currently controlled in the domain is empty. Please add host information to the " \
+ "domain. "
+ return self.response(code=code_num, message=code_string)
+ # Check whether the host is in the managed host list
+ exist_host = []
+ if len(host_sync_confs) > 0:
+ host_ids = host_sync_confs.keys()
+ for host_id in host_ids:
+ for d_host in res_host_text:
+ if host_id == d_host.get("host_id"):
+ exist_host.append(host_id)
+ else:
+ for d_host in res_host_text:
+ tmp_host = {"hostId": d_host.get("host_id")}
+ exist_host.append(tmp_host)
+ LOGGER.debug("exist_host is : {}".format(exist_host))
+
+ if len(exist_host) == 0:
+ code_num = PARAM_ERROR
+ code_string = "The host information is not set in the current domain. Please add the host information first"
+ return self.response(code=code_num, message=code_string)
+
+ # get the management conf in domain
+ man_conf_res_text = Format.get_manageconf_by_domain(domain)
+ LOGGER.debug("man_conf_res_text is : {}".format(man_conf_res_text))
+ manage_confs = man_conf_res_text.get("conf_files")
+
+ # Deserialize and reverse parse the expected configuration
+ conf_tools = ConfTools()
+ # 组装入参
+ file_path_infos = dict()
+ for host_id in exist_host:
+ sync_confs = host_sync_confs.get(host_id)
+ for d_man_conf in manage_confs:
+ file_path = d_man_conf.get("file_path").split(":")[-1]
+ if file_path in sync_confs:
+ contents = d_man_conf.get("contents")
+ file_path_infos[file_path] = contents
+
+ if not file_path_infos:
+ code_num = PARAM_ERROR
+ code_string = "No config needs to be synchronized"
+ return self.response(code=code_num, message=code_string)
+ code_num, code_string, sync_res = Format.deal_batch_sync_res(conf_tools, exist_host, file_path_infos,
+ access_token)
+
+ if code_num != 200:
+ return self.response(code=SERVER_ERROR, message=code_string, data=sync_res)
+ return self.response(code=SUCCEED, message=code_string, data=sync_res)
diff --git a/ragdoll/controllers/__init__.py b/ragdoll/controllers/__init__.py
deleted file mode 100644
index e69de29..0000000
diff --git a/ragdoll/controllers/confs_controller.py b/ragdoll/controllers/confs_controller.py
deleted file mode 100644
index 44269f9..0000000
--- a/ragdoll/controllers/confs_controller.py
+++ /dev/null
@@ -1,339 +0,0 @@
-import connexion
-import os
-
-from ragdoll.log.log import LOGGER
-from ragdoll.models.base_response import BaseResponse # noqa: E501
-from ragdoll.models.conf_host import ConfHost # noqa: E501
-from ragdoll.models.domain_name import DomainName # noqa: E501
-from ragdoll.models.excepted_conf_info import ExceptedConfInfo # noqa: E501
-from ragdoll.models.sync_req import SyncReq
-from ragdoll.models.sync_status import SyncStatus # noqa: E501
-from ragdoll.models.conf_base_info import ConfBaseInfo
-from ragdoll.models.conf_is_synced import ConfIsSynced
-from ragdoll.models.host_sync_result import HostSyncResult
-
-from ragdoll.controllers.format import Format
-from ragdoll.utils.git_tools import GitTools
-from ragdoll.utils.yang_module import YangModule
-from ragdoll.utils.conf_tools import ConfTools
-from ragdoll.utils.host_tools import HostTools
-from ragdoll.utils.object_parse import ObjectParse
-from ragdoll.const.conf_files import yang_conf_list
-
-TARGETDIR = GitTools().target_dir
-
-
-def get_the_sync_status_of_domain(body=None): # noqa: E501
- """
- get the status of the domain
- get the status of whether the domain has been synchronized # noqa: E501
-
- :param body:
- :type body: dict | bytes
-
- :rtype: SyncStatus
- """
-
- if connexion.request.is_json:
- body = DomainName.from_dict(connexion.request.get_json()) # noqa: E501
-
- domain = body.domain_name
- # check domain
- code_num = 200
- base_rsp = None
- base_rsp, code_num = Format.check_domain_param(domain)
- if code_num != 200:
- return base_rsp, code_num
-
- # get manage confs in domain
- LOGGER.debug("############## get the confs in domain ##############")
- base_rsp, code_num, manage_confs = Format._get_domain_conf(domain)
- if code_num != 200:
- return base_rsp, code_num
-
- # get real conf in host
- LOGGER.debug("############## query the real conf ##############")
- host_ids = Format.get_hostid_list_by_domain(domain)
- real_conf_res_text = Format.get_realconf_by_domain_and_host(domain, host_ids)
-
- # compare manage conf with real conf
- sync_status = Format.diff_mangeconf_with_realconf(domain, real_conf_res_text, manage_confs)
-
- # deal with not found files
- man_conf_list = []
- for d_man_conf in manage_confs:
- man_conf_list.append(d_man_conf.get("file_path").split(":")[-1])
- for d_host in sync_status.host_status:
- d_sync_status = d_host.sync_status
- file_list = []
- for d_file in d_sync_status:
- file_path = d_file.file_path
- file_list.append(file_path)
- for d_man_conf in man_conf_list:
- if d_man_conf in file_list:
- continue
- else:
- comp_res = "NOT FOUND"
- conf_is_synced = ConfIsSynced(file_path=d_man_conf,
- is_synced=comp_res)
- d_sync_status.append(conf_is_synced)
-
- return sync_status
-
-
-def query_excepted_confs(): # noqa: E501
- """
- query the supported configurations in the current project
- queryExpectedConfs # noqa: E501
-
- :rtype: List[ExceptedConfInfo]
- """
- # get all domain
- LOGGER.debug("############## get all domain ##############")
- cmd = "ls {}".format(TARGETDIR)
- git_tools = GitTools()
- res_domain = git_tools.run_shell_return_output(cmd).decode().split()
-
- if len(res_domain) == 0:
- code_num = 400
- base_rsp = BaseResponse(code_num, "The current domain does not exist, please create the domain first.")
- return base_rsp, code_num
-
- success_domain = []
- all_domain_expected_files = []
- yang_modules = YangModule()
- for d_domian in res_domain:
- domain_path = os.path.join(TARGETDIR, d_domian)
- expected_conf_lists = ExceptedConfInfo(domain_name=d_domian,
- conf_base_infos=[])
- # Traverse all files in the source management repository
- for root, dirs, files in os.walk(domain_path):
- # Domain also contains host cache files, so we need to add hierarchical judgment for root
- if len(files) > 0 and len(root.split('/')) > 3:
- if "hostRecord.txt" in files:
- continue
- for d_file in files:
- feature = os.path.join(root.split('/')[-1], d_file)
- d_module = yang_modules.getModuleByFeature(feature)
- file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
- file_path = file_lists.get(d_module.name()).split(":")[-1]
- d_file_path = os.path.join(root, d_file)
- expected_value = Format.get_file_content_by_read(d_file_path)
-
- git_tools = GitTools()
- git_message = git_tools.getLogMessageByPath(d_file_path)
-
- conf_base_info = ConfBaseInfo(file_path=file_path,
- expected_contents=expected_value,
- change_log=git_message)
- expected_conf_lists.conf_base_infos.append(conf_base_info)
- all_domain_expected_files.append(expected_conf_lists)
-
- LOGGER.debug("########################## expetedConfInfo ####################")
- LOGGER.debug("all_domain_expected_files is : {}".format(all_domain_expected_files))
- LOGGER.debug("########################## expetedConfInfo end ####################")
-
- if len(all_domain_expected_files) == 0:
- code_num = 400
- base_rsp = BaseResponse(code_num, "The current domain does not exist, please create the domain first.")
- return base_rsp, code_num
-
- return all_domain_expected_files
-
-
-def query_real_confs(body=None): # noqa: E501
- """
- query the real configuration value in the current hostId node
-
- query the real configuration value in the current hostId node # noqa: E501
-
- :param body:
- :type body: dict | bytes
-
- :rtype: List[RealConfInfo]
- """
- if connexion.request.is_json:
- body = ConfHost.from_dict(connexion.request.get_json()) # noqa: E501
-
- domain = body.domain_name
- host_list = body.host_ids
-
- check_res = Format.domainCheck(domain)
- if not check_res:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check the domain is Exist
- is_exist = Format.isDomainExist(domain)
- if not is_exist:
- code_num = 400
- base_rsp = BaseResponse(code_num, "The current domain does not exist, please create the domain first.")
- return base_rsp, code_num
-
- # check whether the host is configured in the domain
- is_host_list_exist = Format.isHostInDomain(domain)
- LOGGER.debug("is_host_list_exist is : {}".format(is_host_list_exist))
- if not is_host_list_exist:
- code_num = 400
- base_rsp = BaseResponse(code_num, "The host information is not set in the current domain." +
- "Please add the host information first")
- return base_rsp, code_num
-
- # get all hosts managed by the current domain.
- # If host_list is empty, query all hosts in the current domain.
- # If host_list is not empty, the actual contents of the currently given host are queried.
- exist_host = []
- failed_host = []
- if len(host_list) > 0:
- host_tool = HostTools()
- exist_host, failed_host = host_tool.getHostExistStatus(domain, host_list)
- else:
- LOGGER.debug("############## get the host in domain ##############")
- res_text = Format.get_hostinfo_by_domain(domain)
- if len(res_text) == 0:
- code_num = 404
- base_rsp = BaseResponse(code_num, "The host currently controlled in the domain is empty." +
- "Please add host information to the domain.")
-
- if len(exist_host) == 0 or len(failed_host) == len(host_list):
- code_num = 400
- base_rsp = BaseResponse(code_num, "The host information is not set in the current domain." +
- "Please add the host information first")
- return base_rsp, code_num
-
- # get the management conf in domain
- LOGGER.debug("############## get the management conf in domain ##############")
- res = Format.get_realconf_by_domain_and_host(domain, exist_host)
- if len(res) == 0:
- code_num = 400
- res_text = "The real configuration does not found."
- base_rsp = BaseResponse(code_num, "Real configuration query failed." +
- "The failure reason is : " + res_text)
- return base_rsp, code_num
-
- return res
-
-
-def sync_conf_to_host_from_domain(body=None): # noqa: E501
- """
- synchronize the configuration information of the configuration domain to the host # noqa: E501
-
- :param body:
- :type body: dict | bytes
-
- :rtype: List[HostSyncResult]
- """
- if connexion.request.is_json:
- body = SyncReq.from_dict(connexion.request.get_json()) # noqa: E501
-
- domain = body.domain_name
- sync_list = body.sync_list
-
- host_sync_confs = dict()
-
- for sync in sync_list:
- host_sync_confs[sync.host_id] = sync.sync_configs
-
- # check the input domain
- check_res = Format.domainCheck(domain)
- if not check_res:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check whether the domain exists
- is_exist = Format.isDomainExist(domain)
- if not is_exist:
- code_num = 404
- base_rsp = BaseResponse(code_num, "The current domain does not exist, please create the domain first.")
- return base_rsp, code_num
-
- # get the management host in domain
- res_host_text = Format.get_hostinfo_by_domain(domain)
- if len(res_host_text) == 0:
- code_num = 404
- base_rsp = BaseResponse(code_num, "The host currently controlled in the domain is empty." +
- "Please add host information to the domain.")
- # Check whether the host is in the managed host list
- exist_host = []
- if len(host_sync_confs) > 0:
- host_ids = host_sync_confs.keys()
- for host_id in host_ids:
- for d_host in res_host_text:
- if host_id == d_host.get("host_id"):
- exist_host.append(host_id)
- else:
- for d_host in res_host_text:
- temp_host = {}
- temp_host["hostId"] = d_host.get("host_id")
- exist_host.append(temp_host)
- LOGGER.debug("exist_host is : {}".format(exist_host))
-
- if len(exist_host) == 0:
- code_num = 400
- base_rsp = BaseResponse(code_num, "The host information is not set in the current domain." +
- "Please add the host information first")
- return base_rsp, code_num
-
- # get the management conf in domain
- LOGGER.debug("############## get management conf in domain ##############")
- man_conf_res_text = Format.get_manageconf_by_domain(domain)
- LOGGER.debug("man_conf_res_text is : {}".format(man_conf_res_text))
- manage_confs = man_conf_res_text.get("conf_files")
- LOGGER.debug("manage_confs is : {}".format(manage_confs))
-
- # Deserialize and reverse parse the expected configuration
- conf_tools = ConfTools()
- sync_res = []
- for host_id in exist_host:
- host_sync_result = HostSyncResult(host_id=host_id,
- sync_result=[])
- sync_confs = host_sync_confs.get(host_id)
- for d_man_conf in manage_confs:
- file_path = d_man_conf.get("file_path").split(":")[-1]
- if file_path in sync_confs:
- contents = d_man_conf.get("contents")
- object_parse = ObjectParse()
- Format.deal_sync_res(conf_tools, contents, file_path, host_id, host_sync_result, object_parse)
- sync_res.append(host_sync_result)
-
- return sync_res
-
-
-def query_supported_confs(body=None):
- """
- query supported configuration list # noqa: E501
-
- :param body:
- :type body: dict | bytes
-
- :rtype: List
- """
- if connexion.request.is_json:
- body = DomainName.from_dict(connexion.request.get_json())
-
- domain = body.domain_name
-
- check_res = Format.domainCheck(domain)
- if not check_res:
- code_num = 400
- base_rsp = BaseResponse(code_num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, code_num
-
- is_exist = Format.isDomainExist(domain)
- if not is_exist:
- code_num = 404
- base_rsp = BaseResponse(code_num, "The current domain does not exist, please create the domain first.")
- return base_rsp, code_num
-
- conf_files = Format.get_manageconf_by_domain(domain)
- conf_files = conf_files.get("conf_files")
- if len(conf_files) == 0:
- return yang_conf_list
-
- exist_conf_list = []
- for conf in conf_files:
- exist_conf_list.append(conf.get('file_path'))
-
- return list(set(yang_conf_list).difference(set(exist_conf_list)))
diff --git a/ragdoll/controllers/domain_controller.py b/ragdoll/controllers/domain_controller.py
deleted file mode 100644
index fd6a7e5..0000000
--- a/ragdoll/controllers/domain_controller.py
+++ /dev/null
@@ -1,124 +0,0 @@
-import connexion
-import six
-import os
-import shutil
-import logging
-
-from ragdoll.models.base_response import BaseResponse # noqa: E501
-from ragdoll.models.domain import Domain # noqa: E501
-from ragdoll import util
-from ragdoll.controllers.format import Format
-from ragdoll.utils.git_tools import GitTools
-
-TARGETDIR = GitTools().target_dir
-
-# logging.basicConfig(filename='log.log',
-# format='%(asctime)s - %(name)s - %(levelname)s -%(module)s: %(message)s',
-# datefmt='%Y-%m-%d %H:%M:%S %p',
-# level=10)
-
-def create_domain(body=None): # noqa: E501
- """create domain
-
- create domain # noqa: E501
-
- :param body: domain info
- :type body: list | bytes
-
- :rtype: BaseResponse
- """
- if connexion.request.is_json:
- body = [Domain.from_dict(d) for d in connexion.request.get_json()] # noqa: E501
-
- if len(body) == 0:
- base_rsp = BaseResponse(400, "The input domain cannot be empty, please check the domain.")
- return base_rsp
-
- successDomain = []
- failedDomain = []
-
- for domain in body:
- tempDomainName = domain.domain_name
- checkRes = Format.domainCheck(tempDomainName)
- isExist = Format.isDomainExist(tempDomainName)
- if isExist or not checkRes:
- failedDomain.append(tempDomainName)
- else:
- successDomain.append(tempDomainName)
- domainPath = os.path.join(TARGETDIR, tempDomainName)
- os.umask(0o077)
- os.mkdir(domainPath)
-
- if len(failedDomain) == 0:
- codeNum = 200
- codeString = Format.spliceAllSuccString("domain", "created", successDomain)
- else:
- codeNum = 400
- if len(body) == 1:
- if isExist:
- codeString = "domain {} create failed because it has been existed.".format(failedDomain[0])
- elif not checkRes:
- codeString = "domain {} create failed because format is incorrect.".format(failedDomain[0])
- else:
- codeString = Format.splicErrorString("domain", "created", successDomain, failedDomain)
-
- base_rsp = BaseResponse(codeNum, codeString)
-
- return base_rsp, codeNum
-
-
-def delete_domain(domainName): # noqa: E501
- """delete domain
-
- delete domain # noqa: E501
-
- :param domainName: the domain that needs to be deleted
- :type domainName: List[str]
-
- :rtype: BaseResponse
- """
- if len(domainName) == 0:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The entered domian is empty")
- return base_rsp, codeNum
-
- successDomain = []
- failedDomain = []
-
- for tempDomainName in domainName:
- checkRes = Format.domainCheck(tempDomainName)
- isExist = Format.isDomainExist(tempDomainName)
- if checkRes and isExist:
- domainPath = os.path.join(TARGETDIR, tempDomainName)
- successDomain.append(tempDomainName)
- shutil.rmtree(domainPath)
- else:
- failedDomain.append(tempDomainName)
-
- if len(failedDomain) == 0:
- codeNum = 200
- codeString = Format.spliceAllSuccString("domain", "delete", successDomain)
- else:
- codeNum = 400
- codeString = Format.splicErrorString("domain", "delete", successDomain, failedDomain)
-
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
-
-
-def query_domain(): # noqa: E501
- """
- query the list of all configuration domain # noqa: E501
- :rtype: List[Domain]
- """
- domain_list = []
- cmd = "ls {}".format(TARGETDIR)
- gitTools = GitTools()
- ls_res = gitTools.run_shell_return_output(cmd).decode()
- ll_list = ls_res.split('\n')
- for d_ll in ll_list:
- if d_ll:
- domain = Domain(domain_name = d_ll)
- domain_list.append(domain)
-
- return domain_list, 200
diff --git a/ragdoll/controllers/format.py b/ragdoll/controllers/format.py
deleted file mode 100644
index 9676296..0000000
--- a/ragdoll/controllers/format.py
+++ /dev/null
@@ -1,614 +0,0 @@
-import os
-import re
-import json
-import configparser
-import ast
-import requests
-from ragdoll.log.log import LOGGER
-
-from ragdoll.const.conf_handler_const import NOT_SYNCHRONIZE, SYNCHRONIZED, CONFIG, \
- DIRECTORY_FILE_PATH_LIST
-from ragdoll.models import ConfSyncedRes
-from ragdoll.models.base_response import BaseResponse # noqa: E501
-from ragdoll.models.conf_file import ConfFile
-from ragdoll.models.conf_files import ConfFiles
-from ragdoll.models.realconf_base_info import RealconfBaseInfo
-from ragdoll.models.real_conf_info import RealConfInfo # noqa: E501
-from ragdoll.models.conf_is_synced import ConfIsSynced
-from ragdoll.models.host_sync_status import HostSyncStatus
-from ragdoll.models.single_config import SingleConfig
-from ragdoll.models.sync_status import SyncStatus # noqa: E501
-from ragdoll.models.host import Host # noqa: E501
-from ragdoll.utils.host_tools import HostTools
-
-
-class Format(object):
-
- @staticmethod
- def domainCheck(domainName):
- res = True
- if not re.match(r"^[A-Za-z0-9_\.-]*$", domainName) or domainName == "" or len(domainName) > 255:
- res = False
- return res
-
- @staticmethod
- def isDomainExist(domainName):
- TARGETDIR = Format.get_git_dir()
- domainPath = os.path.join(TARGETDIR, domainName)
- if os.path.exists(domainPath):
- return True
-
- return False
-
- @staticmethod
- def spliceAllSuccString(obj, operation, succDomain):
- """
- docstring
- """
- codeString = "All {obj} {oper} successfully, {succ} {obj} in total.".format( \
- obj=obj, oper=operation, succ=len(succDomain))
- return codeString
-
- @staticmethod
- def splicErrorString(obj, operation, succDomain, failDomain):
- """
- docstring
- """
- codeString = "{succ} {obj} {oper} successfully, {fail} {obj} {oper} failed.".format( \
- succ=len(succDomain), obj=obj, oper=operation, fail=len(failDomain))
-
- succString = "\n"
- if len(succDomain) > 0:
- succString = "These are successful: "
- for succName in succDomain:
- succString += succName + " "
- succString += "."
-
- if len(failDomain) > 0:
- failString = "These are failed: "
- for failName in failDomain:
- failString += failName + " "
- return codeString + succString + failString
-
- return codeString + succString
-
- @staticmethod
- def two_abs_join(abs1, abs2):
- """
- Absolute path Joins two absolute paths together
- :param abs1: main path
- :param abs2: the spliced path
- :return: together the path
- """
- # 1. Format path (change \\ in path to \)
- abs2 = os.fspath(abs2)
-
- # 2. Split the path file
- abs2 = os.path.splitdrive(abs2)[1]
- # 3. Remove the beginning '/'
- abs2 = abs2.strip('\\/') or abs2
- return os.path.abspath(os.path.join(abs1, abs2))
-
- @staticmethod
- def isContainedHostIdInfile(f_file, content):
- isContained = False
- with open(f_file, 'r') as d_file:
- for line in d_file.readlines():
- line_dict = json.loads(str(ast.literal_eval(line)).replace("'", "\""))
- if content == line_dict["host_id"]:
- isContained = True
- break
- return isContained
-
- @staticmethod
- def addHostToFile(d_file, host):
- info_json = json.dumps(str(host), sort_keys=False, indent=4, separators=(',', ': '))
- os.umask(0o077)
- with open(d_file, 'a+') as host_file:
- host_file.write(info_json)
- host_file.write("\n")
-
- @staticmethod
- def getSubDirFiles(path):
- """
- desc: Subdirectory records and files need to be logged to the successConf
- """
- fileRealPathList = []
- fileXPathlist = []
- for root, dirs, files in os.walk(path):
- if len(files) > 0:
- preXpath = root.split('/', 3)[3]
- for d_file in files:
- xpath = os.path.join(preXpath, d_file)
- fileXPathlist.append(xpath)
- realPath = os.path.join(root, d_file)
- fileRealPathList.append(realPath)
-
- return fileRealPathList, fileXPathlist
-
- @staticmethod
- def isHostInDomain(domainName):
- """
- desc: Query domain Whether host information is configured in the domain
- """
- isHostInDomain = False
- TARGETDIR = Format.get_git_dir()
- domainPath = os.path.join(TARGETDIR, domainName)
- hostPath = os.path.join(domainPath, "hostRecord.txt")
- if os.path.isfile(hostPath):
- isHostInDomain = True
-
- return isHostInDomain
-
- @staticmethod
- def isHostIdExist(hostPath, hostId):
- """
- desc: Query hostId exists within the current host domain management
- """
- isHostIdExist = False
- if os.path.isfile(hostPath) and os.stat(hostPath).st_size > 0:
- with open(hostPath) as h_file:
- for line in h_file.readlines():
- if hostId in line:
- isHostIdExist = True
- break
-
- return isHostIdExist
-
- @staticmethod
- def is_exists_file(d_file):
- if os.path.exists(d_file):
- return True
- if os.path.islink(d_file):
- LOGGER.debug("file: %s is a symlink, skipped!", d_file)
- return False
- LOGGER.error("file: %s does not exist.", d_file)
- return False
-
- @staticmethod
- def get_file_content_by_readlines(d_file):
- """
- desc: remove empty lines and comments from d_file
- """
- res = []
- try:
- with open(d_file, 'r') as s_f:
- lines = s_f.readlines()
- for line in lines:
- tmp = line.strip()
- if not len(tmp) or tmp.startswith("#"):
- continue
- res.append(line)
- except FileNotFoundError:
- LOGGER.error(f"File not found: {d_file}")
- except IOError as e:
- LOGGER.error(f"IO error: {e}")
- except Exception as e:
- LOGGER.error(f"An error occurred: {e}")
- return res
-
- @staticmethod
- def get_file_content_by_read(d_file):
- """
- desc: return a string after read the d_file
- """
- if not os.path.exists(d_file):
- return ""
- with open(d_file, 'r') as s_f:
- lines = s_f.read()
- return lines
-
- @staticmethod
- def rsplit(_str, seps):
- """
- Splits _str by the first sep in seps that is found from the right side.
- Returns a tuple without the separator.
- """
- for idx, ch in enumerate(reversed(_str)):
- if ch in seps:
- return _str[0:-idx - 1], _str[-idx:]
-
- @staticmethod
- def arch_sep(package_string):
- """
- Helper method for finding if arch separator is '.' or '-'
-
- Args:
- package_string (str): dash separated package string such as 'bash-4.2.39-3.el7'.
-
- Returns:
- str: arch separator
- """
- return '.' if package_string.rfind('.') > package_string.rfind('-') else '-'
-
- @staticmethod
- def set_file_content_by_path(content, path):
- res = 0
- if os.path.exists(path):
- with open(path, 'w+') as d_file:
- for d_cont in content:
- d_file.write(d_cont)
- d_file.write("\n")
- res = 1
- return res
-
- @staticmethod
- def get_git_dir():
- cf = configparser.ConfigParser()
- if os.path.exists(CONFIG):
- cf.read(CONFIG, encoding="utf-8")
- else:
- parent = os.path.dirname(os.path.realpath(__file__))
- conf_path = os.path.join(parent, "../../config/gala-ragdoll.conf")
- cf.read(conf_path, encoding="utf-8")
- git_dir = ast.literal_eval(cf.get("git", "git_dir"))
- return git_dir
-
- @staticmethod
- def get_hostinfo_by_domain(domainName):
- """
- desc: Query hostinfo by domainname
- """
- TARGETDIR = Format.get_git_dir()
- hostlist = []
- domainPath = os.path.join(TARGETDIR, domainName)
- hostPath = os.path.join(domainPath, "hostRecord.txt")
- if not os.path.isfile(hostPath) or os.stat(hostPath).st_size == 0:
- return hostlist
- try:
- with open(hostPath, 'r') as d_file:
- for line in d_file.readlines():
- json_str = json.loads(line)
- host_json = ast.literal_eval(json_str)
- hostId = host_json["host_id"]
- ip = host_json["ip"]
- ipv6 = host_json["ipv6"]
- host = Host(host_id=hostId, ip=ip, ipv6=ipv6)
- hostlist.append(host.to_dict())
- except OSError as err:
- LOGGER.error("OS error: {0}".format(err))
- return hostlist
- if len(hostlist) == 0:
- LOGGER.debug("hostlist is empty : {}".format(hostlist))
- else:
- LOGGER.debug("hostlist is : {}".format(hostlist))
- return hostlist
-
- @staticmethod
- def get_manageconf_by_domain(domain):
- expected_conf_lists = ConfFiles(domain_name=domain, conf_files=[])
- TARGETDIR = Format.get_git_dir()
- domainPath = os.path.join(TARGETDIR, domain)
- from ragdoll.utils.yang_module import YangModule
- for root, dirs, files in os.walk(domainPath):
- if len(files) > 0 and len(root.split('/')) > 3:
- if "hostRecord.txt" in files:
- continue
- for d_file in files:
- d_file_path = os.path.join(root, d_file)
- contents = Format.get_file_content_by_read(d_file_path)
- feature = os.path.join(root.split('/')[-1], d_file)
- yang_modules = YangModule()
- d_module = yang_modules.getModuleByFeature(feature)
- file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
- file_path = file_lists.get(d_module.name()).split(":")[-1]
-
- conf = ConfFile(file_path=file_path, contents=contents)
- expected_conf_lists.conf_files.append(conf.to_dict())
-
- LOGGER.debug("expected_conf_lists is :{}".format(expected_conf_lists))
- return expected_conf_lists.to_dict()
-
- @staticmethod
- def get_realconf_by_domain_and_host(domain, exist_host):
- res = []
- conf_files = Format.get_manageconf_by_domain(domain)
-
- # get the real conf in host
- conf_list = []
- from ragdoll.utils.conf_tools import ConfTools
- from ragdoll.utils.object_parse import ObjectParse
- conf_tools = ConfTools()
- for d_conf in conf_files.get("conf_files"):
- file_path = d_conf.get("file_path").split(":")[-1]
- if file_path not in DIRECTORY_FILE_PATH_LIST:
- conf_list.append(file_path)
- else:
- d_conf_cs = d_conf.get("contents")
- d_conf_contents = json.loads(d_conf_cs)
- for d_conf_key, d_conf_value in d_conf_contents.items():
- conf_list.append(d_conf_key)
- LOGGER.debug("############## get the real conf in host ##############")
- get_real_conf_body = {}
- get_real_conf_body_info = []
- for d_host in exist_host:
- get_real_conf_body_infos = {}
- get_real_conf_body_infos["host_id"] = d_host
- get_real_conf_body_infos["config_list"] = conf_list
- get_real_conf_body_info.append(get_real_conf_body_infos)
- get_real_conf_body["infos"] = get_real_conf_body_info
- url = conf_tools.load_url_by_conf().get("collect_url")
- headers = {"Content-Type": "application/json"}
- try:
- response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request
- except requests.exceptions.RequestException as connect_ex:
- LOGGER.error(f"An error occurred: {connect_ex}")
- codeNum = 500
- codeString = "Failed to obtain the actual configuration, please check the interface of config/collect."
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
- resp = json.loads(response.text).get("data")
- resp_code = json.loads(response.text).get("code")
- if (resp_code != "200") and (resp_code != "206"):
- return res
-
- if not resp or len(resp) == 0:
- return res
-
- success_lists = {}
- failed_lists = {}
-
- for d_res in resp:
- d_host_id = d_res.get("host_id")
- fail_files = d_res.get("fail_files")
- if len(fail_files) > 0:
- failed_lists["host_id"] = d_host_id
- failed_lists_conf = []
- for d_failed in fail_files:
- failed_lists_conf.append(d_failed)
- failed_lists["failed_conf"] = failed_lists_conf
- failed_lists["success_conf"] = []
- else:
- success_lists["host_id"] = d_host_id
- success_lists["success_conf"] = []
- success_lists["failed_conf"] = []
-
- read_conf_info = RealConfInfo(domain_name=domain,
- host_id=d_host_id,
- conf_base_infos=[])
- d_res_infos = d_res.get("infos")
-
- real_directory_conf = {}
- real_directory_conf_list = {}
- object_parse = ObjectParse()
- for d_file in d_res_infos:
- content = d_file.get("content")
- file_path = d_file.get("path")
- file_atrr = d_file.get("file_attr").get("mode")
- file_owner = "({}, {})".format(d_file.get("file_attr").get("group"),
- d_file.get("file_attr").get("owner"))
- directory_flag = False
- for dir_path in DIRECTORY_FILE_PATH_LIST:
- if str(file_path).find(dir_path) != -1:
- if real_directory_conf.get(dir_path) is None:
- real_directory_conf_list[dir_path] = list()
- real_directory_conf[dir_path] = RealconfBaseInfo(file_path=dir_path,
- file_attr=file_atrr,
- file_owner=file_owner,
- conf_contens="")
-
- directory_conf = dict()
- directory_conf["path"] = file_path
- directory_conf["content"] = content
- real_directory_conf_list.get(dir_path).append(directory_conf)
- directory_flag = True
- break
- if not directory_flag:
- Format.deal_conf_list_content(content, d_file, file_path, object_parse, read_conf_info)
- if len(fail_files) > 0:
- failed_lists.get("success_conf").append(file_path)
- else:
- success_lists.get("success_conf").append(file_path)
-
- for dir_path, dir_value in real_directory_conf_list.items():
- content_string = object_parse.parse_directory_single_conf_to_json(dir_value,
- real_directory_conf[
- dir_path].file_path)
- real_directory_conf[dir_path].conf_contens = content_string
- real_conf_base_info = real_directory_conf.get(dir_path)
-
- read_conf_info.conf_base_infos.append(real_conf_base_info)
- res.append(read_conf_info)
- return res
-
- @staticmethod
- def deal_conf_list_content(content, d_file, file_path, object_parse, read_conf_info):
- content_string = object_parse.parse_conf_to_json(file_path, content)
- file_atrr = d_file.get("file_attr").get("mode")
- file_owner = "({}, {})".format(d_file.get("file_attr").get("group"),
- d_file.get("file_attr").get("owner"))
- real_conf_base_info = RealconfBaseInfo(path=file_path,
- file_path=file_path,
- file_attr=file_atrr,
- file_owner=file_owner,
- conf_contens=content_string)
- read_conf_info.conf_base_infos.append(real_conf_base_info)
-
- @staticmethod
- def check_domain_param(domain):
- code_num = 200
- base_resp = None
- check_res = Format.domainCheck(domain)
- if not check_res:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check the domian is exist
- is_exist = Format.isDomainExist(domain)
- if not is_exist:
- code_num = 404
- base_rsp = BaseResponse(code_num, "The current domain does not exist, please create the domain first.")
- return base_rsp, code_num
-
- # get the exist result of the host in domain
- is_host_list_exist = Format.isHostInDomain(domain)
- if not is_host_list_exist:
- code_num = 404
- base_rsp = BaseResponse(code_num, "The host information is not set in the current domain." +
- "Please add the host information first")
- return base_resp, code_num
-
- @staticmethod
- def get_hostid_list_by_domain(domain):
- host_ids = []
- res_text = Format.get_hostinfo_by_domain(domain)
- if len(res_text) == 0:
- return host_ids
-
- host_tools = HostTools()
- host_ids = host_tools.getHostList(res_text)
- return host_ids
-
- @staticmethod
- def _get_domain_conf(domain):
- code_num = 200
- base_resp = None
- # get the host info in domain
- LOGGER.debug("############## get the host in domain ##############")
- host_ids = Format.get_hostid_list_by_domain(domain)
- if not host_ids:
- code_num = 404
- base_resp = BaseResponse(code_num, "The host currently controlled in the domain is empty." +
- "Please add host information to the domain.")
- return base_resp, code_num, list()
-
- # get the managent conf in domain
- LOGGER.debug("############## get the managent conf in domain ##############")
- man_conf_res_text = Format.get_manageconf_by_domain(domain)
- manage_confs = man_conf_res_text.get("conf_files")
-
- if len(manage_confs) == 0:
- code_num = 404
- base_resp = BaseResponse(code_num, "The configuration is not set in the current domain." +
- "Please add the configuration information first.")
- return base_resp, code_num, list()
- return base_resp, code_num, manage_confs
-
- @staticmethod
- def diff_mangeconf_with_realconf(domain, real_conf_res_text, manage_confs):
- sync_status = SyncStatus(domain_name=domain,
- host_status=[])
- from ragdoll.utils.object_parse import ObjectParse
-
- for d_real_conf in real_conf_res_text:
- host_id = d_real_conf.host_id
- host_sync_status = HostSyncStatus(host_id=host_id,
- sync_status=[])
- d_real_conf_base = d_real_conf.conf_base_infos
- for d_conf in d_real_conf_base:
- directory_conf_is_synced = ConfIsSynced(file_path="", is_synced="", single_conf=[])
- d_conf_path = d_conf.file_path
-
- object_parse = ObjectParse()
- # get the conf type and model
- conf_type, conf_model = Format.get_conf_type_model(d_conf_path, object_parse)
-
- Format.deal_conf_sync_status(conf_model, d_conf, d_conf_path, directory_conf_is_synced,
- host_sync_status, manage_confs)
-
- if len(directory_conf_is_synced.single_conf) > 0:
- synced_flag = SYNCHRONIZED
- for single_config in directory_conf_is_synced.single_conf:
- if single_config.single_is_synced == SYNCHRONIZED:
- continue
- else:
- synced_flag = NOT_SYNCHRONIZE
- directory_conf_is_synced.is_synced = synced_flag
- host_sync_status.sync_status.append(directory_conf_is_synced)
- sync_status.host_status.append(host_sync_status)
- return sync_status
-
- @staticmethod
- def deal_conf_sync_status(conf_model, d_conf, d_conf_path, directory_conf_is_synced, host_sync_status,
- manage_confs):
- comp_res = ""
- if d_conf_path in DIRECTORY_FILE_PATH_LIST:
- confContents = json.loads(d_conf.conf_contens)
- directory_conf_contents = ""
- for d_man_conf in manage_confs:
- d_man_conf_path = d_man_conf.get("file_path")
- if d_man_conf_path != d_conf_path:
- # if d_man_conf_path not in DIRECTORY_FILE_PATH_LIST:
- continue
- else:
- directory_conf_is_synced.file_path = d_conf_path
- directory_conf_contents = d_man_conf.get("contents")
-
- directory_conf_contents_dict = json.loads(directory_conf_contents)
-
- for dir_conf_content_key, dir_conf_content_value in directory_conf_contents_dict.items():
- if dir_conf_content_key not in confContents.keys():
- single_conf = SingleConfig(single_file_path=dir_conf_content_key,
- single_is_synced=NOT_SYNCHRONIZE)
- directory_conf_is_synced.single_conf.append(single_conf)
- else:
- dst_conf = confContents.get(dir_conf_content_key)
- comp_res = conf_model.conf_compare(dir_conf_content_value, dst_conf)
- single_conf = SingleConfig(single_file_path=dir_conf_content_key, single_is_synced=comp_res)
- directory_conf_is_synced.single_conf.append(single_conf)
- else:
- for d_man_conf in manage_confs:
- if d_man_conf.get("file_path").split(":")[-1] != d_conf_path:
- continue
- comp_res = conf_model.conf_compare(d_man_conf.get("contents"), d_conf.conf_contens)
- conf_is_synced = ConfIsSynced(file_path=d_conf_path,
- is_synced=comp_res)
- host_sync_status.sync_status.append(conf_is_synced)
-
- @staticmethod
- def get_conf_type_model(d_conf_path, object_parse):
- for dir_path in DIRECTORY_FILE_PATH_LIST:
- if str(d_conf_path).find(dir_path) != -1:
- conf_type = object_parse.get_conf_type_by_conf_path(dir_path)
- conf_model = object_parse.create_conf_model_by_type(conf_type)
- else:
- conf_type = object_parse.get_conf_type_by_conf_path(d_conf_path)
- conf_model = object_parse.create_conf_model_by_type(conf_type)
- return conf_type, conf_model
-
- @staticmethod
- def deal_sync_res(conf_tools, contents, file_path, host_id, host_sync_result, object_parse):
- sync_conf_url = conf_tools.load_url_by_conf().get("sync_url")
- headers = {"Content-Type": "application/json"}
- if file_path in DIRECTORY_FILE_PATH_LIST:
- conf_sync_res_list = []
- for directory_file_path, directory_content in json.loads(contents).items():
- content = object_parse.parse_json_to_conf(directory_file_path, directory_content)
- # Configuration to the host
- data = {"host_id": host_id, "file_path": directory_file_path, "content": content}
- try:
- sync_response = requests.put(sync_conf_url, data=json.dumps(data), headers=headers)
- except requests.exceptions.RequestException as connect_ex:
- LOGGER.error(f"An error occurred: {connect_ex}")
- codeNum = 500
- codeString = "Failed to sync configuration, please check the interface of config/sync."
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
- resp_code = json.loads(sync_response.text).get('code')
- resp = json.loads(sync_response.text).get('data').get('resp')
-
- if resp_code == "200" and resp.get('sync_result') is True:
- conf_sync_res_list.append("SUCCESS")
- else:
- conf_sync_res_list.append("FAILED")
- if "FAILED" in conf_sync_res_list:
- conf_sync_res = ConfSyncedRes(file_path=file_path, result="FAILED")
- else:
- conf_sync_res = ConfSyncedRes(file_path=file_path, result="SUCCESS")
- host_sync_result.sync_result.append(conf_sync_res)
- else:
- content = object_parse.parse_json_to_conf(file_path, contents)
- # Configuration to the host
- data = {"host_id": host_id, "file_path": file_path, "content": content}
- sync_response = requests.put(sync_conf_url, data=json.dumps(data), headers=headers)
-
- resp_code = json.loads(sync_response.text).get('code')
- resp = json.loads(sync_response.text).get('data').get('resp')
- conf_sync_res = ConfSyncedRes(file_path=file_path,
- result="")
- if resp_code == "200" and resp.get('sync_result') is True:
- conf_sync_res.result = "SUCCESS"
- else:
- conf_sync_res.result = "FAILED"
- host_sync_result.sync_result.append(conf_sync_res)
diff --git a/ragdoll/controllers/host_controller.py b/ragdoll/controllers/host_controller.py
deleted file mode 100644
index 1f491fe..0000000
--- a/ragdoll/controllers/host_controller.py
+++ /dev/null
@@ -1,273 +0,0 @@
-import connexion
-import six
-import os
-import json
-import re
-import ast
-
-from ragdoll.log.log import LOGGER
-from ragdoll.models.base_response import BaseResponse # noqa: E501
-from ragdoll.models.domain_name import DomainName # noqa: E501
-from ragdoll.models.host import Host # noqa: E501
-from ragdoll.models.host_infos import HostInfos # noqa: E501
-from ragdoll import util
-from ragdoll.controllers.format import Format
-from ragdoll.utils.git_tools import GitTools
-
-TARGETDIR = GitTools().target_dir
-
-def add_host_in_domain(body=None): # noqa: E501
- """add host in the configuration domain
-
- add host in the configuration domain # noqa: E501
-
- :param body: domain info
- :type body: dict | bytes
-
- :rtype: BaseResponse
- """
- if connexion.request.is_json:
- body = HostInfos.from_dict(connexion.request.get_json()) # noqa: E501
-
- domain = body.domain_name
- host_infos = body.host_infos
-
- # check whether host_infos is empty
- if len(host_infos) == 0:
- num = 400
- base_rsp = BaseResponse(num, "Enter host info cannot be empty, please check the host info.")
- return base_rsp, num
-
- checkRes = Format.domainCheck(domain)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check whether the domain exists
- isExist = Format.isDomainExist(domain)
- if not isExist:
- num = 400
- base_rsp = BaseResponse(num, "The current domain does not exist, please create the domain first.")
- return base_rsp, num
-
- successHost = []
- failedHost = []
- domainPath = os.path.join(TARGETDIR, domain)
-
- # Check whether the current host exists in the domain.
- for host in host_infos:
- hostPath = os.path.join(domainPath, "hostRecord.txt")
- if os.path.isfile(hostPath):
- isContained = Format.isContainedHostIdInfile(hostPath, host.host_id)
- if isContained:
- LOGGER.debug("##########isContained###############")
- failedHost.append(host.host_id)
- else:
- Format.addHostToFile(hostPath, host)
- successHost.append(host.host_id)
- else:
- Format.addHostToFile(hostPath, host)
- successHost.append(host.host_id)
-
- if len(failedHost) == len(host_infos):
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The all host already exists in the administrative scope of the domain.")
- return base_rsp, codeNum
-
- # Joining together the returned codenum codeMessage
- if len(failedHost) == 0:
- codeNum = 200
- codeString = Format.spliceAllSuccString("host", "add hosts", successHost)
- else:
- codeNum = 202
- codeString = Format.splicErrorString("host", "add hosts", successHost, failedHost)
-
- # git commit maessage
- if len(host_infos) > 0:
- git_tools = GitTools()
- commit_code = git_tools.gitCommit("Add the host in {} domian, ".format(domain) +
- "the host including : {}".format(successHost))
-
- base_rsp = BaseResponse(codeNum, codeString)
-
- return base_rsp, codeNum
-
-
-def delete_host_in_domain(body=None): # noqa: E501
- """delete host in the configuration domain
-
- delete the host in the configuration domain # noqa: E501
-
- :param body: domain info
- :type body: dict | bytes
-
- :rtype: BaseResponse
- """
- if connexion.request.is_json:
- body = HostInfos.from_dict(connexion.request.get_json()) # noqa: E501
-
- domain = body.domain_name
- hostInfos = body.host_infos
-
- # check the input domain
- checkRes = Format.domainCheck(domain)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check whether the domain exists
- isExist = Format.isDomainExist(domain)
- if not isExist:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The current domain does not exist, please create the domain first.")
- return base_rsp, codeNum
-
- # Whether the host information added within the current domain is empty while ain exists
- domainPath = os.path.join(TARGETDIR, domain)
- hostPath = os.path.join(domainPath, "hostRecord.txt")
- if not os.path.isfile(hostPath) or (os.path.isfile(hostPath) and os.stat(hostPath).st_size == 0):
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The host information is not set in the current domain." +
- "Please add the host information first")
- return base_rsp, codeNum
-
- # If the input host information is empty, the host information of the whole domain is cleared
- if len(hostInfos) == 0:
- if os.path.isfile(hostPath):
- try:
- os.remove(hostPath)
- except OSError as ex:
- #logging.error("the host delete failed")
- codeNum = 500
- base_rsp = BaseResponse(codeNum, "The host delete failed.")
- return base_rsp, codeNum
- codeNum = 200
- base_rsp = BaseResponse(codeNum, "All hosts are deleted in the current domain.")
- return base_rsp, codeNum
-
- # If the domain exists, check whether the current input parameter host belongs to the corresponding
- # domain. If the host is in the domain, the host is deleted. If the host is no longer in the domain,
- # the host is added to the failure range
- containedInHost = []
- notContainedInHost = []
- os.umask(0o077)
- for hostInfo in hostInfos:
- hostId = hostInfo.host_id
- isContained = False
- try:
- with open(hostPath, 'r') as d_file:
- lines = d_file.readlines()
- with open(hostPath, 'w') as w_file:
- for line in lines:
- line_host_id = json.loads(str(ast.literal_eval(line)).replace("'", "\""))['host_id']
- if hostId != line_host_id:
- w_file.write(line)
- else:
- isContained = True
- except OSError as err:
- LOGGER.error("OS error: {0}".format(err))
- codeNum = 500
- base_rsp = BaseResponse(codeNum, "OS error: {0}".format(err))
- return base_rsp, codeNum
-
- if isContained:
- containedInHost.append(hostId)
- else:
- notContainedInHost.append(hostId)
-
- # All hosts do not belong to the domain
- if len(notContainedInHost) == len(hostInfos):
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "All the host does not belong to the domain control, " +
- "please enter the host again")
- return base_rsp, codeNum
-
- # Some hosts belong to domains, and some hosts do not belong to domains.
- if len(notContainedInHost) == 0:
- codeNum = 200
- codeString = Format.spliceAllSuccString("host", "delete", containedInHost)
- else:
- codeNum = 400
- codeString = Format.splicErrorString("host", "delete", containedInHost, notContainedInHost)
-
- # git commit message
- if len(containedInHost) > 0:
- git_tools = GitTools()
- commit_code = git_tools.gitCommit("Delet the host in {} domian, ".format(domain) +
- "the host including : {}".format(containedInHost))
-
- base_rsp = BaseResponse(codeNum, codeString)
-
- return base_rsp, codeNum
-
-
-def get_host_by_domain_name(body=None): # noqa: E501
- """get host by domainName
-
- get the host information of the configuration domain # noqa: E501
-
- :param body: domain info
- :type body: dict | bytes
-
- :rtype: List[Host]
- """
- if connexion.request.is_json:
- body = DomainName.from_dict(connexion.request.get_json()) # noqa: E501
-
- domain = body.domain_name
-
- # check the input domain
- checkRes = Format.domainCheck(domain)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check whether the domain exists
- isExist = Format.isDomainExist(domain)
- if not isExist:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The current domain does not exist, please create the domain first.")
- return base_rsp, codeNum
-
- # The domain exists, but the host information is empty
- domainPath = os.path.join(TARGETDIR, domain)
- hostPath = os.path.join(domainPath, "hostRecord.txt")
- if not os.path.isfile(hostPath) or (os.path.isfile(hostPath) and os.stat(hostPath).st_size == 0):
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The host information is not set in the current domain." +
- "Please add the host information first.")
- return base_rsp, codeNum
-
- # The domain exists, and the host information exists and is not empty
- hostlist = []
- LOGGER.debug("hostPath is : {}".format(hostPath))
- try:
- with open(hostPath, 'r') as d_file:
- for line in d_file.readlines():
- json_str = json.loads(line)
- host_json = ast.literal_eval(json_str)
- hostId = host_json["host_id"]
- ip = host_json["ip"]
- ipv6 = host_json["ipv6"]
- host = Host(host_id=hostId, ip=ip, ipv6=ipv6)
- hostlist.append(host)
- except OSError as err:
- LOGGER.error("OS error: {0}".format(err))
- codeNum = 500
- base_rsp = BaseResponse(codeNum, "OS error: {0}".format(err))
- return base_rsp, codeNum
-
- # Joining together the returned codenum codeMessag
- if len(hostlist) == 0:
- codeNum = 500
- base_rsp = BaseResponse(codeNum, "Some unknown problems.")
- return base_rsp, codeNum
- else:
- LOGGER.debug("hostlist is : {}".format(hostlist))
- codeNum = 200
- base_rsp = BaseResponse(codeNum, "Get host info in the domain succeccfully")
-
- return hostlist
diff --git a/ragdoll/controllers/management_controller.py b/ragdoll/controllers/management_controller.py
deleted file mode 100644
index 101802a..0000000
--- a/ragdoll/controllers/management_controller.py
+++ /dev/null
@@ -1,601 +0,0 @@
-import io
-
-import connexion
-import os
-import json
-import requests
-
-from ragdoll.const.conf_handler_const import DIRECTORY_FILE_PATH_LIST
-from ragdoll.log.log import LOGGER
-from ragdoll.models.base_response import BaseResponse # noqa: E501
-from ragdoll.models.confs import Confs
-from ragdoll.models.conf_file import ConfFile
-from ragdoll.models.conf_files import ConfFiles
-from ragdoll.models.conf_base_info import ConfBaseInfo
-from ragdoll.models.excepted_conf_info import ExceptedConfInfo
-from ragdoll.models.domain_name import DomainName # noqa: E501
-from ragdoll.models.manage_confs import ManageConfs
-from ragdoll.controllers.format import Format
-from ragdoll.utils.conf_tools import ConfTools
-from ragdoll.utils.git_tools import GitTools
-from ragdoll.utils.yang_module import YangModule
-from ragdoll.utils.object_parse import ObjectParse
-
-TARGETDIR = GitTools().target_dir
-
-
-def add_management_confs_in_domain(body=None): # noqa: E501
- """add management configuration items and expected values in the domain
-
- add management configuration items and expected values in the domain # noqa: E501
-
- :param body: domain info
- :type body: dict | bytes
-
- :rtype: BaseResponse
- """
- if connexion.request.is_json:
- body = Confs.from_dict(connexion.request.get_json()) # noqa: E501
-
- domain = body.domain_name
- conf_files = body.conf_files
-
- # check the input domain
- checkRes = Format.domainCheck(domain)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check whether the domain exists
- isExist = Format.isDomainExist(domain)
- if not isExist:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The current domain does not exist, please create the domain first.")
- return base_rsp, codeNum
-
- # check whether the conf_files is null
- if len(conf_files) == 0:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The path of file can't be empty")
- return base_rsp, codeNum
-
- # Check all conf_files and check whether contents is empty. If so, the query actual configuration
- # interface is called. If not, the conversion is performed directly.
- # Content and host_id can be set to either content or host_id.
- # If they are both empty, invalid input is returned.
- contents_list_null = []
- contents_list_non_null = []
- for d_conf in conf_files:
- if d_conf.contents:
- contents_list_non_null.append(d_conf)
- elif d_conf.host_id:
- contents_list_null.append(d_conf)
- else:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The input parameters are not compliant, " +
- "please check the input parameters.")
- return base_rsp, codeNum
-
- successConf = []
- failedConf = []
- object_parse = ObjectParse()
- yang_module = YangModule()
- conf_tools = ConfTools()
- # Content is not an empty scene and is directly analyed and parsed
- if len(contents_list_non_null) > 0:
- for d_conf in contents_list_non_null:
- if not d_conf.contents.strip():
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The input parameters are not compliant, " +
- "please check the input parameters.")
- return base_rsp, codeNum
- content_string = object_parse.parse_conf_to_json(d_conf.file_path, d_conf.contents)
- if not content_string or not json.loads(content_string):
- failedConf.append(d_conf.file_path)
- else:
- # create the file and expected value in domain
- feature_path = yang_module.get_feature_by_real_path(domain, d_conf.file_path)
- result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
- if result:
- successConf.append(d_conf.file_path)
- else:
- failedConf.append(d_conf.file_path)
-
- # content is empty
- if len(contents_list_null) > 0:
- # get the real conf in host
- LOGGER.debug("############## get the real conf in host ##############")
- get_real_conf_body = {}
- get_real_conf_body_info = []
- LOGGER.debug("contents_list_null is : {}".format(contents_list_null))
- exist_host = dict()
- for d_conf in contents_list_null:
- host_id = int(d_conf.host_id)
- if host_id in exist_host:
- if d_conf.file_path not in DIRECTORY_FILE_PATH_LIST:
- exist_host[host_id].append(d_conf.file_path)
- else:
- codeNum, codeString, file_paths = object_parse.get_directory_files(d_conf, host_id)
- if len(file_paths) == 0:
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
- else:
- for file_path in file_paths:
- exist_host[host_id].append(file_path)
- else:
- if d_conf.file_path not in DIRECTORY_FILE_PATH_LIST:
- conf_list = list()
- conf_list.append(d_conf.file_path)
- exist_host[host_id] = conf_list
- else:
- codeNum, codeString, file_paths = object_parse.get_directory_files(d_conf, host_id)
- if len(file_paths) == 0:
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
- else:
- exist_host[host_id] = file_paths
-
- for k, v in exist_host.items():
- confs = dict()
- confs["host_id"] = k
- confs["config_list"] = v
- get_real_conf_body_info.append(confs)
-
- get_real_conf_body["infos"] = get_real_conf_body_info
-
- url = conf_tools.load_url_by_conf().get("collect_url")
- headers = {"Content-Type": "application/json"}
- try:
- response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request
- except requests.exceptions.RequestException as connect_ex:
- LOGGER.error(f"An error occurred: {connect_ex}")
- codeNum = 500
- codeString = "Failed to obtain the actual configuration, please check the interface of config/collect."
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
-
- response_code = json.loads(response.text).get("code")
- if response_code == None:
- codeNum = 500
- codeString = "Failed to obtain the actual configuration, please check the interface of conf/collect."
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
-
- if (response_code != "200") and (response_code != "206"):
- codeNum = 500
- codeString = "Failed to obtain the actual configuration, please check the file exists."
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
-
- reps = json.loads(response.text).get("data")
- if not reps or len(reps) == 0:
- codeNum = 500
- codeString = "Failed to obtain the actual configuration, please check the host info for conf/collect."
- base_rsp = BaseResponse(codeNum, codeString)
- return base_rsp, codeNum
-
- directory_d_file = []
- directory_d_files = {}
- for d_res in reps:
- failedlist = d_res.get("fail_files")
- if len(failedlist) > 0:
- for d_failed in failedlist:
- failedConf.append(d_failed)
- continue
- d_res_infos = d_res.get("infos")
- for d_file in d_res_infos:
- for dir_path in DIRECTORY_FILE_PATH_LIST:
- if str(d_file.get("path")).find(dir_path) == -1:
- file_path = d_file.get("path")
- content = d_file.get("content")
- content_string = object_parse.parse_conf_to_json(file_path, content)
- # create the file and expected value in domain
- if not content_string or not json.loads(content_string):
- failedConf.append(file_path)
- else:
- feature_path = yang_module.get_feature_by_real_path(domain, file_path)
- result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
- if result:
- successConf.append(file_path)
- else:
- failedConf.append(file_path)
- else:
- directory_d_file.append(d_file)
- directory_d_files[dir_path] = directory_d_file
- if len(directory_d_files) > 0:
- for dir_path, directory_d_file in directory_d_files.items():
- content_string = object_parse.parse_dir_conf_to_json(dir_path, directory_d_file)
- if not content_string or not json.loads(content_string):
- failedConf.append(dir_path)
- else:
- feature_path = yang_module.get_feature_by_real_path(domain, dir_path)
- result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
- if result:
- successConf.append(dir_path)
- else:
- failedConf.append(dir_path)
- # git commit message
- if len(successConf) > 0:
- git_tools = GitTools()
- succ_conf = ""
- for d_conf in successConf:
- succ_conf = succ_conf + d_conf + " "
- commit_code = git_tools.gitCommit("Add the conf in {} domian, ".format(domain) +
- "the path including : {}".format(succ_conf))
-
- # Joinin together the returned codenum and codeMessage
- LOGGER.debug("*******************************************")
- LOGGER.debug("successConf is : {}".format(successConf))
- LOGGER.debug("failedConf is : {}".format(failedConf))
- if len(successConf) == 0:
- codeNum = 400
- codeString = "All configurations failed to be added."
- elif len(failedConf) > 0:
- codeNum = 206
- codeString = Format.splicErrorString("confs", "add management conf", successConf, failedConf)
- else:
- codeNum = 200
- codeString = Format.spliceAllSuccString("confs", "add management conf", successConf)
-
- base_rsp = BaseResponse(codeNum, codeString)
-
- return base_rsp, codeNum
-
-
-def upload_management_confs_in_domain(): # noqa: E501
- """upload management configuration items and expected values in the domain
-
- upload management configuration items and expected values in the domain # noqa: E501
-
- :param body: file info
- :type body: FileStorage
-
- :rtype: BaseResponse
- """
- file = connexion.request.files['file']
- filePath = connexion.request.form.get("filePath")
- domainName = connexion.request.form.get("domainName")
-
- # check the input domainName
- checkRes = Format.domainCheck(domainName)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- # check whether the domainName exists
- isExist = Format.isDomainExist(domainName)
- if not isExist:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The current domain does not exist, please create the domain first.")
- return base_rsp, codeNum
-
- # check whether the file is null
- if file is None:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The file of conf can't be empty")
- return base_rsp, codeNum
-
- # check whether the conf is null
- if filePath is None:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The conf body of conf can't be empty")
- return base_rsp, codeNum
-
- successConf = []
- failedConf = []
- object_parse = ObjectParse()
- yang_module = YangModule()
- conf_tools = ConfTools()
-
- # content is file
- if file:
- if not filePath.strip():
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The input parameters are not compliant, " +
- "please check the input parameters.")
- return base_rsp, codeNum
- try:
- file_bytes = file.read()
- if len(file_bytes) > 1024 * 1024:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The size of the uploaded file must be less than 1MB")
- return base_rsp, codeNum
- byte_stream = io.BytesIO(file_bytes)
-
- # Read the contents of the byte stream
- line_content = byte_stream.read().decode("UTF-8")
- except OSError as err:
- LOGGER.error("OS error: {}".format(err))
- codeNum = 500
- base_rsp = BaseResponse(codeNum, "OS error: {0}".format(err))
- return base_rsp, codeNum
- except Exception as ex:
- LOGGER.error("OS error: {}".format(ex))
- codeNum = 500
- base_rsp = BaseResponse(codeNum, "read file error: {0}".format(ex))
- return base_rsp, codeNum
-
- content_string = object_parse.parse_conf_to_json(filePath, line_content)
- if not content_string or not json.loads(content_string):
- failedConf.append(filePath)
- else:
- # create the file and expected value in domain
- feature_path = yang_module.get_feature_by_real_path(domainName, filePath)
- result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
- if result:
- successConf.append(filePath)
- else:
- failedConf.append(filePath)
-
- # git commit message
- if len(successConf) > 0:
- git_tools = GitTools()
- succ_conf = ""
- for d_conf in successConf:
- succ_conf = succ_conf + d_conf + " "
- commit_code = git_tools.gitCommit("Add the conf in {} domian, ".format(domainName) +
- "the path including : {}".format(succ_conf))
-
- # Joinin together the returned codenum and codeMessage
- LOGGER.debug("*******************************************")
- LOGGER.debug("successConf is : {}".format(successConf))
- LOGGER.debug("failedConf is : {}".format(failedConf))
- if len(successConf) == 0:
- codeNum = 400
- codeString = "All configurations failed to be added."
- elif len(failedConf) > 0:
- codeNum = 206
- codeString = Format.splicErrorString("confs", "add management conf", successConf, failedConf)
- else:
- codeNum = 200
- codeString = Format.spliceAllSuccString("confs", "add management conf", successConf)
-
- base_rsp = BaseResponse(codeNum, codeString)
-
- return base_rsp, codeNum
-
-
-def delete_management_confs_in_domain(body=None): # noqa: E501
- """delete management configuration items and expected values in the domain
-
- delete management configuration items and expected values in the domain # noqa: E501
-
- :param body: domain info
- :type body: dict | bytes
-
- :rtype: BaseResponse
- """
- if connexion.request.is_json:
- body = ManageConfs.from_dict(connexion.request.get_json()) # noqa: E501
-
- # check whether the domain exists
- domain = body.domain_name
-
- # check the input domain
- checkRes = Format.domainCheck(domain)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- isExist = Format.isDomainExist(domain)
- if not isExist:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The current domain does not exist")
- return base_rsp, codeNum
-
- # Check whether path is null in advance
- conf_files = body.conf_files
- if len(conf_files) == 0:
- codeNum = 400
- base_rsp = BaseResponse(codeNum, "The conf_files path can't be empty")
- return base_rsp, codeNum
-
- # Conf to record successes and failures
- successConf = []
- failedConf = []
-
- # Check whether path exists in the domain. There are two possible paths :
- # (1)xpath path
- # (2) configuration item
- domain_path = os.path.join(TARGETDIR, domain)
- LOGGER.debug("conf_files is : {}".format(conf_files))
-
- yang_modules = YangModule()
- module_lists = yang_modules.module_list
- if len(module_lists) == 0:
- base_rsp = BaseResponse(400, "The yang module does not exist")
- return base_rsp
-
- file_path_list = yang_modules.getFilePathInModdule(module_lists)
- LOGGER.debug("module_lists is : {}".format(module_lists))
- for conf in conf_files:
- module = yang_modules.getModuleByFilePath(conf.file_path)
- features = yang_modules.getFeatureInModule(module)
- features_path = os.path.join(domain_path, "/".join(features))
- LOGGER.debug("domain_path is : {}".format(domain_path))
-
- if os.path.isfile(features_path):
- LOGGER.debug("it's a normal file")
- try:
- os.remove(features_path)
- except OSError as ex:
- # logging.error("the path remove failed")
- break
- successConf.append(conf.file_path)
- else:
- failedConf.append(conf.file_path)
-
- # git commit message
- if len(successConf) > 0:
- git_tools = GitTools()
- succ_conf = ""
- for d_conf in successConf:
- succ_conf = succ_conf + d_conf + " "
- commit_code = git_tools.gitCommit("delete the conf in {} domian, ".format(domain) +
- "the path including : {}".format(succ_conf))
-
- # Joinin together the returned codenum and codeMessage
- if len(failedConf) == 0:
- codeNum = 200
- codeString = Format.spliceAllSuccString("confs", "delete management conf", successConf)
- else:
- codeNum = 400
- codeString = Format.splicErrorString("confs", "delete management conf", successConf, failedConf)
- codeString += "\n The reason for the failure is: these paths do not exist."
- base_rsp = BaseResponse(codeNum, codeString)
- # logging.info('delete management conf in {domain}'.format(domain=domain))
-
- return base_rsp, codeNum
-
-
-def get_management_confs_in_domain(body=None): # noqa: E501
- """get management configuration items and expected values in the domain
-
- get management configuration items and expected values in the domain # noqa: E501
-
- :param body: domain info
- :type body: dict | bytes
-
- :rtype: ConfFiles
- """
- if connexion.request.is_json:
- body = DomainName.from_dict(connexion.request.get_json()) # noqa: E501
-
- # Check whether the domain exists
- domain = body.domain_name
-
- # check the input domain
- checkRes = Format.domainCheck(domain)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- isExist = Format.isDomainExist(domain)
- if not isExist:
- base_rsp = BaseResponse(400, "The current domain does not exist")
- return base_rsp, 400
-
- # The parameters of the initial return value assignment
- expected_conf_lists = ConfFiles(domain_name=domain,
- conf_files=[])
-
- # get the path in domain
- domainPath = os.path.join(TARGETDIR, domain)
-
- # When there is a file path is the path of judgment for the configuration items
- for root, dirs, files in os.walk(domainPath):
- if len(files) > 0 and len(root.split('/')) > 3:
- if "hostRecord.txt" in files:
- continue
- for d_file in files:
- d_file_path = os.path.join(root, d_file)
- contents = Format.get_file_content_by_read(d_file_path)
- feature = os.path.join(root.split('/')[-1], d_file)
- yang_modules = YangModule()
- d_module = yang_modules.getModuleByFeature(feature)
- file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
- file_path = file_lists.get(d_module.name()).split(":")[-1]
-
- conf = ConfFile(file_path=file_path, contents=contents)
- expected_conf_lists.conf_files.append(conf)
- LOGGER.debug("expected_conf_lists is :{}".format(expected_conf_lists))
-
- if len(expected_conf_lists.domain_name) > 0:
- base_rsp = BaseResponse(200, "Get management configuration items and expected " +
- "values in the domain succeccfully")
- else:
- base_rsp = BaseResponse(400, "The file is Null in this domain")
-
- return expected_conf_lists
-
-
-def query_changelog_of_management_confs_in_domain(body=None): # noqa: E501
- """query the change log of management config in domain
-
- query the change log of management config in domain # noqa: E501
-
- :param body: domain info
- :type body: dict | bytes
-
- :rtype: ExceptedConfInfo
- """
- if connexion.request.is_json:
- body = ManageConfs.from_dict(connexion.request.get_json()) # noqa: E501
-
- # check whether the domain exists
- domain = body.domain_name
- LOGGER.debug("body is : {}".format(body))
-
- # check the input domain
- checkRes = Format.domainCheck(domain)
- if not checkRes:
- num = 400
- base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
- return base_rsp, num
-
- isExist = Format.isDomainExist(domain)
- if not isExist:
- base_rsp = BaseResponse(400, "The current domain does not exist")
- return base_rsp
-
- # Check whether path is empty in advance. If path is empty, the configuration in the
- # entire domain is queried. Otherwise, the historical records of the specified file are queried.
- conf_files = body.conf_files
- LOGGER.debug("conf_files is : {}".format(conf_files))
- LOGGER.debug("conf_files's type is : {}".format(type(conf_files)))
- conf_files_list = []
- if conf_files:
- for d_conf in conf_files:
- LOGGER.debug("d_conf is : {}".format(d_conf))
- LOGGER.debug("d_conf type is : {}".format(type(d_conf)))
- conf_files_list.append(d_conf.file_path)
- success_conf = []
- failed_conf = []
- domain_path = os.path.join(TARGETDIR, domain)
- expected_conf_lists = ExceptedConfInfo(domain_name=domain,
- conf_base_infos=[])
- yang_modules = YangModule()
- for root, dirs, files in os.walk(domain_path):
- conf_base_infos = []
- if len(files) > 0 and len(root.split('/')) > 3:
- if "hostRecord.txt" in files:
- continue
- confPath = root.split('/', 3)[3]
- for d_file in files:
- feature = os.path.join(root.split('/')[-1], d_file)
- d_module = yang_modules.getModuleByFeature(feature)
- file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
- file_path = file_lists.get(d_module.name()).split(":")[-1]
- if (conf_files_list) and (file_path not in conf_files_list):
- continue
- d_file_path = os.path.join(root, d_file)
- expectedValue = Format.get_file_content_by_read(d_file_path)
- git_tools = GitTools()
- gitMessage = git_tools.getLogMessageByPath(d_file_path)
- if gitMessage and expectedValue:
- success_conf.append(file_path)
- else:
- failed_conf.append(file_path)
- conf_base_info = ConfBaseInfo(file_path=file_path,
- expected_contents=expectedValue,
- change_log=gitMessage)
- expected_conf_lists.conf_base_infos.append(conf_base_info)
-
- LOGGER.debug("########################## expetedConfInfo ####################")
- LOGGER.debug("expected_conf_lists is : {}".format(expected_conf_lists))
- LOGGER.debug("########################## expetedConfInfo end ####################")
-
- if len(success_conf) == 0:
- codeNum = 500
- base_rsp = BaseResponse(codeNum, "Faled to uery the changelog of the configure in the domain.")
- return base_rsp, codeNum
- if len(failed_conf) > 0:
- codeNum = 400
- else:
- codeNum = 200
-
- return expected_conf_lists, codeNum
diff --git a/ragdoll/demo/conf_manage.py b/ragdoll/demo/conf_manage.py
index d4f5d05..fb5fd78 100644
--- a/ragdoll/demo/conf_manage.py
+++ b/ragdoll/demo/conf_manage.py
@@ -2,7 +2,6 @@ import requests
import json
from ragdoll.log.log import LOGGER
-from ragdoll.models.domain import Domain
from ragdoll.models.domain_name import DomainName
from ragdoll.models.conf import Conf
from ragdoll.models.confs import Confs
@@ -19,7 +18,7 @@ class ConfManage(object):
contents_list = args.contents
host_id_list = args.host_id
if not domain_name or not file_path_list:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for conf_add!\n")
return
conf_file = []
@@ -32,7 +31,7 @@ class ConfManage(object):
conf = Conf(file_path=file_path_list[i], host_id=host_id_list[i])
conf_file.append(conf)
else:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error as invlid param!\n")
return
data = Confs(domain_name=domain_name, conf_files=conf_file)
@@ -46,7 +45,7 @@ class ConfManage(object):
def conf_query(self, args):
domain_name = args.domain_name
if not domain_name:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for conf_query!\n")
return
data = DomainName(domain_name=domain_name)
@@ -64,7 +63,7 @@ class ConfManage(object):
domain_name = args.domain_name
file_path_list = args.file_path
if not domain_name or not file_path_list:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for conf_delete!\n")
return
conf_files = []
@@ -87,7 +86,7 @@ class ConfManage(object):
domain_name = args.domain_name
file_path_list = args.file_path
if not domain_name or not file_path_list:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for conf_changelog!\n")
return
conf_files = []
diff --git a/ragdoll/demo/conf_sync.py b/ragdoll/demo/conf_sync.py
index 42cefdf..ec293b6 100644
--- a/ragdoll/demo/conf_sync.py
+++ b/ragdoll/demo/conf_sync.py
@@ -2,7 +2,6 @@ import requests
import json
from ragdoll.log.log import LOGGER
-from ragdoll.models.domain import Domain
from ragdoll.models.domain_name import DomainName
from ragdoll.models.conf_host import ConfHost
from ragdoll.demo.conf import server_port
@@ -14,7 +13,7 @@ class ConfSync(object):
domain_name = args.domain_name
host_id_list = args.host_id
if not domain_name or not host_id_list:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for sync_conf!\n")
return
host_ids = []
@@ -34,7 +33,7 @@ class ConfSync(object):
def sync_status(self, args):
domain_name = args.domain_name
if not domain_name:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for sync_status!\n")
return
data = DomainName(domain_name=domain_name)
@@ -52,7 +51,7 @@ class ConfSync(object):
domain_name = args.domain_name
host_id_list = args.host_id
if not domain_name or not host_id_list:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for query_real_conf!\n")
return
host_ids = []
diff --git a/ragdoll/demo/demo_server.py b/ragdoll/demo/demo_server.py
index 9ac6c08..c20a7d9 100644
--- a/ragdoll/demo/demo_server.py
+++ b/ragdoll/demo/demo_server.py
@@ -1,6 +1,5 @@
import connexion
-from ragdoll.models.conf_host import ConfHost
from ragdoll.log.log import LOGGER
def _read_file(path):
@@ -88,4 +87,4 @@ def sync_conf(body=None):
rsp = {"code": 500,
"msg": "Failed to synchronize the configuration. ERROR:{}".format(error_info),
"status": False}
- return rsp
\ No newline at end of file
+ return rsp
diff --git a/ragdoll/demo/domain.py b/ragdoll/demo/domain.py
index 6574fe2..5e65a72 100644
--- a/ragdoll/demo/domain.py
+++ b/ragdoll/demo/domain.py
@@ -12,7 +12,7 @@ class DomainManage(object):
domain_name_list = args.domain_name
priority_list = args.priority
if len(domain_name_list) != len(priority_list):
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for domain_create!\n")
return
data = []
diff --git a/ragdoll/demo/host.py b/ragdoll/demo/host.py
index 4f70980..9e4ab49 100644
--- a/ragdoll/demo/host.py
+++ b/ragdoll/demo/host.py
@@ -2,7 +2,6 @@ import requests
import json
from ragdoll.log.log import LOGGER
-from ragdoll.models.domain import Domain
from ragdoll.models.domain_name import DomainName
from ragdoll.models.host import Host
from ragdoll.models.host_infos import HostInfos
@@ -17,7 +16,7 @@ class HostManage(object):
ipv6_list = args.ipv6
if len(host_id_list) != len(ip_list):
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for adding host!\n")
return
host_infos = []
@@ -35,7 +34,7 @@ class HostManage(object):
def host_query(self, args):
domain_name = args.domain_name
if not domain_name:
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for qeurying host!\n")
return
data = DomainName(domain_name=domain_name)
@@ -44,7 +43,7 @@ class HostManage(object):
response = requests.post(url, data=json.dumps(data, cls=JSONEncoder), headers=headers)
if response.status_code != 200:
- LOGGER.warning(json.loads(response.text).get("msg"))
+ LOGGER.error(json.loads(response.text).get("msg"))
else:
LOGGER.debug("The following host are managed in domain_name:{}.".format(json.loads(response.text)))
return
@@ -56,7 +55,7 @@ class HostManage(object):
ipv6_list = args.ipv6
if len(host_id_list) != len(ip_list):
- LOGGER.error("ERROR: Input error!\n")
+ LOGGER.error("ERROR: Input error for deleting host!\n")
return
host_infos = []
@@ -69,4 +68,4 @@ class HostManage(object):
headers = {"Content-Type": "application/json"}
response = requests.delete(url, data=json.dumps(data, cls=JSONEncoder), headers=headers)
LOGGER.debug(json.loads(response.text).get("msg"))
- return
\ No newline at end of file
+ return
diff --git a/ragdoll/domain_conf_manage/__init__.py b/ragdoll/domain_conf_manage/__init__.py
new file mode 100644
index 0000000..0f38672
--- /dev/null
+++ b/ragdoll/domain_conf_manage/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: __init__.py.py
+@Time: 2024/3/8 11:39
+@Author: JiaoSiMao
+Description:
+"""
diff --git a/ragdoll/domain_conf_manage/view.py b/ragdoll/domain_conf_manage/view.py
new file mode 100644
index 0000000..81015fb
--- /dev/null
+++ b/ragdoll/domain_conf_manage/view.py
@@ -0,0 +1,601 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: view.py
+@Time: 2024/3/8 11:40
+@Author: JiaoSiMao
+Description:
+"""
+import io
+import json
+import os
+
+import connexion
+import requests
+from flask import request
+from vulcanus.restful.resp.state import PARAM_ERROR, SUCCEED, PARTIAL_SUCCEED, SERVER_ERROR
+from vulcanus.restful.response import BaseResponse
+
+from ragdoll.conf.constant import TARGETDIR
+from ragdoll.const.conf_handler_const import DIRECTORY_FILE_PATH_LIST
+from ragdoll.function.verify.domain_conf import AddManagementConfsSchema, \
+ DeleteManagementConfsSchema, GetManagementConfsSchema, QueryChangelogSchema
+from ragdoll.log.log import LOGGER
+from ragdoll.models import ConfFiles, ExceptedConfInfo
+from ragdoll.utils.conf_tools import ConfTools
+from ragdoll.utils.format import Format
+from ragdoll.utils.git_tools import GitTools
+from ragdoll.utils.object_parse import ObjectParse
+from ragdoll.utils.yang_module import YangModule
+
+
+class AddManagementConfsInDomain(BaseResponse):
+ @BaseResponse.handle(schema=AddManagementConfsSchema, token=True)
+ def post(self, **params):
+ """add management configuration items and expected values in the domain
+
+ add management configuration items and expected values in the domain # noqa: E501
+
+ :param body: domain info
+ :type body: dict | bytes
+
+ :rtype: BaseResponse
+ """
+ global file_paths, reps
+ accessToken = request.headers.get("access_token")
+ domain = params.get("domainName")
+ conf_files = params.get("confFiles")
+
+ # check the input domain
+ checkRes = Format.domainCheck(domain)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="Failed to verify the input parameter, please check the input "
+ "parameters.")
+
+ # check whether the domain exists
+ isExist = Format.isDomainExist(domain)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The current domain does not exist, please create the domain "
+ "first.")
+
+ # check whether the conf_files is null
+ if len(conf_files) == 0:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The path of file can't be empty")
+
+ # Check all conf_files and check whether contents is empty. If so, the query actual configuration
+ # interface is called. If not, the conversion is performed directly.
+ # Content and host_id can be set to either content or host_id.
+ # If they are both empty, invalid input is returned.
+ contents_list_null = []
+ contents_list_non_null = []
+ for d_conf in conf_files:
+ if "contents" in d_conf:
+ contents_list_non_null.append(d_conf)
+ elif d_conf["hostId"]:
+ contents_list_null.append(d_conf)
+ else:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The input parameters are not compliant, please check the "
+ "input parameters.")
+
+ successConf = []
+ failedConf = []
+ object_parse = ObjectParse()
+ yang_module = YangModule()
+ conf_tools = ConfTools()
+ # Content is not an empty scene and is directly analyed and parsed
+ if len(contents_list_non_null) > 0:
+ for d_conf in contents_list_non_null:
+ if not d_conf["contents"].strip():
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The input parameters are not compliant, please check "
+ "the input parameters.")
+ content_string = object_parse.parse_conf_to_json(d_conf["filePath"], d_conf["contents"])
+ if not content_string or not json.loads(content_string):
+ failedConf.append(d_conf["filePath"])
+ else:
+ # create the file and expected value in domain
+ feature_path = yang_module.get_feature_by_real_path(domain, d_conf["filePath"])
+ result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
+ if result:
+ successConf.append(d_conf["filePath"])
+ else:
+ failedConf.append(d_conf["filePath"])
+
+ # content is empty
+ if len(contents_list_null) > 0:
+ # get the real conf in host
+ get_real_conf_body = {}
+ get_real_conf_body_info = []
+ LOGGER.debug("contents_list_null is : {}".format(contents_list_null))
+ exist_host = dict()
+ for d_conf in contents_list_null:
+ host_id = int(d_conf["hostId"])
+ if host_id in exist_host:
+ if d_conf["filePath"] not in DIRECTORY_FILE_PATH_LIST:
+ exist_host[host_id].append(d_conf["filePath"])
+ else:
+ codeNum, codeString, file_paths = object_parse.get_directory_files(d_conf, host_id, accessToken)
+ if len(file_paths) == 0:
+ return self.response(code=codeNum, message=codeString)
+ else:
+ for file_path in file_paths:
+ exist_host[host_id].append(file_path)
+ else:
+ if d_conf["filePath"] not in DIRECTORY_FILE_PATH_LIST:
+ conf_list = list()
+ conf_list.append(d_conf["filePath"])
+ exist_host[host_id] = conf_list
+ else:
+ codeNum, codeString, file_paths = object_parse.get_directory_files(d_conf, host_id, accessToken)
+ if len(file_paths) == 0:
+ return self.response(code=codeNum, message=codeString)
+ else:
+ exist_host[host_id] = file_paths
+
+ for k, v in exist_host.items():
+ confs = dict()
+ confs["host_id"] = k
+ confs["config_list"] = v
+ get_real_conf_body_info.append(confs)
+
+ get_real_conf_body["infos"] = get_real_conf_body_info
+
+ url = conf_tools.load_url_by_conf().get("collect_url")
+ headers = {"Content-Type": "application/json", "access_token": accessToken}
+ try:
+ response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ codeNum = PARAM_ERROR
+ codeString = "Failed to obtain the actual configuration, please check the interface of config/collect."
+ return self.response(code=codeNum, message=codeString)
+
+ response_code = json.loads(response.text).get("code")
+ if response_code == None:
+ codeNum = PARAM_ERROR
+ codeString = "Failed to obtain the actual configuration, please check the interface of conf/collect."
+ return self.response(code=codeNum, message=codeString)
+
+ if (response_code != "200") and (response_code != "206"):
+ codeNum = PARAM_ERROR
+ codeString = "Failed to obtain the actual configuration, please check the file exists."
+ return self.response(code=codeNum, message=codeString)
+
+ reps = json.loads(response.text).get("data")
+ if not reps or len(reps) == 0:
+ codeNum = PARAM_ERROR
+ codeString = "Failed to obtain the actual configuration, please check the host info for conf/collect."
+ return self.response(code=codeNum, message=codeString)
+
+ directory_d_file = []
+ directory_d_files = {}
+ for d_res in reps:
+ failedlist = d_res.get("fail_files")
+ if len(failedlist) > 0:
+ for d_failed in failedlist:
+ failedConf.append(d_failed)
+ continue
+ d_res_infos = d_res.get("infos")
+ for d_file in d_res_infos:
+ for dir_path in DIRECTORY_FILE_PATH_LIST:
+ if str(d_file.get("path")).find(dir_path) == -1:
+ file_path = d_file.get("path")
+ content = d_file.get("content")
+ content_string = object_parse.parse_conf_to_json(file_path, content)
+ # create the file and expected value in domain
+ if not content_string or not json.loads(content_string):
+ failedConf.append(file_path)
+ else:
+ feature_path = yang_module.get_feature_by_real_path(domain, file_path)
+ result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
+ if result:
+ successConf.append(file_path)
+ else:
+ failedConf.append(file_path)
+ else:
+ directory_d_file.append(d_file)
+ directory_d_files[dir_path] = directory_d_file
+ if len(directory_d_files) > 0:
+ for dir_path, directory_d_file in directory_d_files.items():
+ content_string = object_parse.parse_dir_conf_to_json(dir_path, directory_d_file)
+ if not content_string or not json.loads(content_string):
+ failedConf.append(dir_path)
+ else:
+ feature_path = yang_module.get_feature_by_real_path(domain, dir_path)
+ result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
+ if result:
+ successConf.append(dir_path)
+ else:
+ failedConf.append(dir_path)
+ # git commit message
+ if len(successConf) > 0:
+ git_tools = GitTools()
+ succ_conf = ""
+ for d_conf in successConf:
+ succ_conf = succ_conf + d_conf + " "
+ commit_code = git_tools.gitCommit("Add the conf in {} domian, ".format(domain) +
+ "the path including : {}".format(succ_conf))
+
+ # Joinin together the returned codenum and codeMessage
+ LOGGER.debug("successConf is : {}".format(successConf))
+ LOGGER.debug("failedConf is : {}".format(failedConf))
+ if len(successConf) == 0:
+ codeNum = PARAM_ERROR
+ codeString = "All configurations failed to be added."
+ elif len(failedConf) > 0:
+ codeNum = PARTIAL_SUCCEED
+ codeString = Format.splicErrorString("confs", "add management conf", successConf, failedConf)
+ else:
+ codeNum = SUCCEED
+ codeString = Format.spliceAllSuccString("confs", "add management conf", successConf)
+
+ # 根据agith_success_conf 更新agith的配置
+ # 获取domain最新的有哪些配置 [1]
+ conf_files_list = Format.get_conf_files_list(domain, accessToken)
+ if len(conf_files_list) > 0:
+ Format.update_agith(accessToken, conf_files_list, domain)
+ return self.response(code=codeNum, message=codeString)
+
+
+class UploadManagementConfsInDomain(BaseResponse):
+ @BaseResponse.handle(token=True)
+ def post(self, **params):
+ """upload management configuration items and expected values in the domain
+
+ upload management configuration items and expected values in the domain # noqa: E501
+
+ :param body: file info
+ :type body: FileStorage
+
+ :rtype: BaseResponse
+ """
+ access_token = connexion.request.headers.get("access_token")
+ file = connexion.request.files['file']
+
+ filePath = connexion.request.form.get("filePath")
+ domainName = connexion.request.form.get("domainName")
+ # check the input domainName
+ checkRes = Format.domainCheck(domainName)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="Failed to verify the input parameter, please check the input "
+ "parameters.")
+
+ # check whether the domainName exists
+ isExist = Format.isDomainExist(domainName)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The current domain does not exist, please create the domain "
+ "first.")
+
+ # check whether the file is null
+ if file is None:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The file of conf can't be empty")
+
+ # check whether the conf is null
+ if filePath is None:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The conf body of conf can't be empty")
+
+ successConf = []
+ failedConf = []
+ object_parse = ObjectParse()
+ yang_module = YangModule()
+ conf_tools = ConfTools()
+
+ # content is file
+ if file:
+ if not filePath.strip():
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The input parameters are not compliant, please check the "
+ "input parameters.")
+ try:
+ file_bytes = file.read()
+ if len(file_bytes) > 1024 * 1024:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The size of the uploaded file must be less than 1MB")
+ byte_stream = io.BytesIO(file_bytes)
+
+ # Read the contents of the byte stream
+ line_content = byte_stream.read().decode("UTF-8")
+ except OSError as err:
+ LOGGER.error("OS error: {}".format(err))
+ codeNum = SERVER_ERROR
+ return self.response(code=codeNum, message="OS error: {0}".format(err))
+ except Exception as ex:
+ LOGGER.error("Other error: {}".format(ex))
+ codeNum = SERVER_ERROR
+ return self.response(code=codeNum, message="read file error: {0}".format(ex))
+
+ content_string = object_parse.parse_conf_to_json(filePath, line_content)
+ if not content_string or not json.loads(content_string):
+ failedConf.append(filePath)
+ else:
+ # create the file and expected value in domain
+ feature_path = yang_module.get_feature_by_real_path(domainName, filePath)
+ result = conf_tools.wirteFileInPath(feature_path, content_string + '\n')
+ if result:
+ successConf.append(filePath)
+ else:
+ failedConf.append(filePath)
+
+ # git commit message
+ if len(successConf) > 0:
+ git_tools = GitTools()
+ succ_conf = ""
+ for d_conf in successConf:
+ succ_conf = succ_conf + d_conf + " "
+ commit_code = git_tools.gitCommit("Add the conf in {} domian, ".format(domainName) +
+ "the path including : {}".format(succ_conf))
+
+ # Joinin together the returned codenum and codeMessage
+ LOGGER.debug("successConf is : {}".format(successConf))
+ LOGGER.debug("failedConf is : {}".format(failedConf))
+ if len(successConf) == 0:
+ codeNum = PARAM_ERROR
+ codeString = "All configurations failed to be added."
+ elif len(failedConf) > 0:
+ codeNum = PARTIAL_SUCCEED
+ codeString = Format.splicErrorString("confs", "add management conf", successConf, failedConf)
+ else:
+ codeNum = SUCCEED
+ codeString = Format.spliceAllSuccString("confs", "add management conf", successConf)
+
+ # 获取domain最新的有哪些配置 [1]
+ conf_files_list = Format.get_conf_files_list(domainName, access_token)
+ if len(conf_files_list) > 0:
+ Format.update_agith(access_token, conf_files_list, domainName)
+
+ return self.response(code=codeNum, message=codeString)
+
+
+class DeleteManagementConfsInDomain(BaseResponse):
+ @BaseResponse.handle(schema=DeleteManagementConfsSchema, token=True)
+ def delete(self, **params):
+ """delete management configuration items and expected values in the domain
+
+ delete management configuration items and expected values in the domain # noqa: E501
+
+ :param body: domain info
+ :type body: dict | bytes
+
+ :rtype: BaseResponse
+ """
+ access_token = connexion.request.headers.get("access_token")
+ # check whether the domain exists
+ domain = params.get("domainName")
+
+ # check the input domain
+ checkRes = Format.domainCheck(domain)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="Failed to verify the input parameter, please check the input "
+ "parameters.")
+
+ isExist = Format.isDomainExist(domain)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The current domain does not exist")
+
+ # Check whether path is null in advance
+ conf_files = params.get("confFiles")
+ if len(conf_files) == 0:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The conf_files path can't be empty")
+
+ # Conf to record successes and failures
+ successConf = []
+ failedConf = []
+
+ # Check whether path exists in the domain. There are two possible paths :
+ # (1)xpath path
+ # (2) configuration item
+ domain_path = os.path.join(TARGETDIR, domain)
+ LOGGER.debug("conf_files is : {}".format(conf_files))
+
+ yang_modules = YangModule()
+ module_lists = yang_modules.module_list
+ if len(module_lists) == 0:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The yang module does not exist")
+
+ LOGGER.debug("module_lists is : {}".format(module_lists))
+ for conf in conf_files:
+ module = yang_modules.getModuleByFilePath(conf["filePath"])
+ features = yang_modules.getFeatureInModule(module)
+ features_path = os.path.join(domain_path, "/".join(features))
+
+ if os.path.isfile(features_path):
+ LOGGER.info("It's a normal file : {}".format(features_path))
+ try:
+ os.remove(features_path)
+ except OSError as ex:
+ LOGGER.error("Failed to remove path, as OSError: {}".format(str(ex)))
+ break
+ successConf.append(conf["filePath"])
+ else:
+ LOGGER.error("It's a not normal file : {}".format(features_path))
+ failedConf.append(conf["filePath"])
+
+ # git commit message
+ if len(successConf) > 0:
+ git_tools = GitTools()
+ succ_conf = ""
+ for d_conf in successConf:
+ succ_conf = succ_conf + d_conf + " "
+ commit_code = git_tools.gitCommit("delete the conf in {} domian, ".format(domain) +
+ "the path including : {}".format(succ_conf))
+
+ # Joinin together the returned codenum and codeMessage
+ if len(failedConf) == 0:
+ codeNum = SUCCEED
+ codeString = Format.spliceAllSuccString("confs", "delete management conf", successConf)
+ else:
+ codeNum = PARAM_ERROR
+ codeString = Format.splicErrorString("confs", "delete management conf", successConf, failedConf)
+ codeString += "\n The reason for the failure is: these paths do not exist."
+
+ # 获取domain最新的有哪些配置 [1]
+ conf_files_list = Format.get_conf_files_list(domain, access_token)
+ Format.update_agith(access_token, conf_files_list, domain)
+
+ return self.response(code=codeNum, message=codeString)
+
+
+class GetManagementConfsInDomain(BaseResponse):
+ @BaseResponse.handle(schema=GetManagementConfsSchema, token=True)
+ def post(self, **params):
+ """get management configuration items and expected values in the domain
+
+ get management configuration items and expected values in the domain # noqa: E501
+
+ :param body: domain info
+ :type body: dict | bytes
+
+ :rtype: ConfFiles
+ """
+ # Check whether the domain exists
+ domain = params.get("domainName")
+
+ # check the input domain
+ checkRes = Format.domainCheck(domain)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="Failed to verify the input parameter, please check the input "
+ "parameters.")
+
+ isExist = Format.isDomainExist(domain)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The current domain does not exist")
+
+ # The parameters of the initial return value assignment
+ # expected_conf_lists = ConfFiles(domain_name=domain, conf_files=[])
+ expected_conf_lists = {"domainName": domain, "confFiles": []}
+
+ # get the path in domain
+ domainPath = os.path.join(TARGETDIR, domain)
+
+ # When there is a file path is the path of judgment for the configuration items
+ for root, dirs, files in os.walk(domainPath):
+ if len(files) > 0 and len(root.split('/')) > 3:
+ if "hostRecord.txt" in files:
+ continue
+ for d_file in files:
+ d_file_path = os.path.join(root, d_file)
+ contents = Format.get_file_content_by_read(d_file_path)
+ feature = os.path.join(root.split('/')[-1], d_file)
+ yang_modules = YangModule()
+ d_module = yang_modules.getModuleByFeature(feature)
+ file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
+ file_path = file_lists.get(d_module.name()).split(":")[-1]
+
+ # conf = ConfFile(file_path=file_path, contents=contents)
+ conf = {"filePath": file_path, "contents": contents}
+ # expected_conf_lists.conf_files.append(conf)
+ expected_conf_lists.get("confFiles").append(conf)
+ LOGGER.debug("expected_conf_lists is :{}".format(expected_conf_lists))
+
+ if len(expected_conf_lists.get("domainName")) > 0:
+ codeNum = SUCCEED
+ codeString = "Get management configuration items and expected values in the domain successfully"
+ return self.response(code=codeNum, message=codeString, data=expected_conf_lists)
+
+ else:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The file is Null in this domain")
+
+
+class QueryChangelogOfManagementConfsInDomain(BaseResponse):
+ @BaseResponse.handle(schema=QueryChangelogSchema, token=True)
+ def post(self, **params):
+ """query the change log of management config in domain
+
+ query the change log of management config in domain # noqa: E501
+
+ :param body: domain info
+ :type body: dict | bytes
+
+ :rtype: ExceptedConfInfo
+ """
+ # check whether the domain exists
+ domain = params.get("domainName")
+ LOGGER.debug("Query changelog of conf body is : {}".format(params))
+
+ # check the input domain
+ checkRes = Format.domainCheck(domain)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="Failed to verify the input parameter, please check the input "
+ "parameters.")
+
+ isExist = Format.isDomainExist(domain)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="The current domain does not exist")
+
+ # Check whether path is empty in advance. If path is empty, the configuration in the
+ # entire domain is queried. Otherwise, the historical records of the specified file are queried.
+ conf_files = params.get("confFiles")
+ LOGGER.debug("conf_files is : {}".format(conf_files))
+ conf_files_list = []
+ if conf_files:
+ for d_conf in conf_files:
+ LOGGER.debug("d_conf is : {}".format(d_conf))
+ conf_files_list.append(d_conf["filePath"])
+ success_conf = []
+ failed_conf = []
+ domain_path = os.path.join(TARGETDIR, domain)
+ # expected_conf_lists = ExceptedConfInfo(domain_name=domain, conf_base_infos=[])
+ expected_conf_lists = {"domainName": domain, "confBaseInfos": []}
+ yang_modules = YangModule()
+ for root, dirs, files in os.walk(domain_path):
+ if len(files) > 0 and len(root.split('/')) > 3:
+ if "hostRecord.txt" in files:
+ continue
+ for d_file in files:
+ feature = os.path.join(root.split('/')[-1], d_file)
+ d_module = yang_modules.getModuleByFeature(feature)
+ file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
+ file_path = file_lists.get(d_module.name()).split(":")[-1]
+ if conf_files_list and (file_path not in conf_files_list):
+ continue
+ d_file_path = os.path.join(root, d_file)
+ expectedValue = Format.get_file_content_by_read(d_file_path)
+ git_tools = GitTools()
+ gitMessage = git_tools.getLogMessageByPath(d_file_path)
+ if gitMessage and expectedValue:
+ success_conf.append(file_path)
+ else:
+ failed_conf.append(file_path)
+
+ conf_base_info = {"filePath": file_path, "expectedContents": expectedValue, "changeLog": gitMessage}
+ expected_conf_lists.get("confBaseInfos").append(conf_base_info)
+
+ LOGGER.debug("expected_conf_lists is : {}".format(expected_conf_lists))
+
+ if len(success_conf) == 0:
+ codeNum = SERVER_ERROR
+ return self.response(code=codeNum, message="Failed to query the changelog of the configure in the domain.")
+ if len(failed_conf) > 0:
+ codeNum = PARAM_ERROR
+ else:
+ codeNum = SUCCEED
+
+ return self.response(code=codeNum, message="Succeed to query the changelog of the configure in the domain.",
+ data=expected_conf_lists)
diff --git a/ragdoll/domain_manage/__init__.py b/ragdoll/domain_manage/__init__.py
new file mode 100644
index 0000000..f0935bb
--- /dev/null
+++ b/ragdoll/domain_manage/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: __init__.py.py
+@Time: 2024/3/4 10:34
+@Author: JiaoSiMao
+Description:
+"""
diff --git a/ragdoll/domain_manage/view.py b/ragdoll/domain_manage/view.py
new file mode 100644
index 0000000..dd507cb
--- /dev/null
+++ b/ragdoll/domain_manage/view.py
@@ -0,0 +1,135 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: view.py
+@Time: 2024/3/4 10:34
+@Author: JiaoSiMao
+Description:
+"""
+import os
+import shutil
+
+import connexion
+from vulcanus.restful.resp.state import SUCCEED, SERVER_ERROR, PARAM_ERROR
+from vulcanus.restful.response import BaseResponse
+
+from ragdoll.conf.constant import TARGETDIR
+from ragdoll.function.verify.domain import CreateDomainSchema, DeleteDomainSchema
+from ragdoll.utils.format import Format
+from ragdoll.utils.git_tools import GitTools
+
+
+class CreateDomain(BaseResponse):
+
+ @BaseResponse.handle(schema=CreateDomainSchema, token=True)
+ def post(self, **params):
+ """
+ create domain
+
+ Args:
+ args (dict): e.g
+ {
+ "domainName":"xxx",
+ "priority":""
+ }
+
+ Returns:
+ dict: response body
+ """
+ successDomain = []
+ failedDomain = []
+ tempDomainName = params.get("domainName")
+ checkRes = Format.domainCheck(tempDomainName)
+ isExist = Format.isDomainExist(tempDomainName)
+
+ if isExist or not checkRes:
+ failedDomain.append(tempDomainName)
+ else:
+ successDomain.append(tempDomainName)
+ domainPath = os.path.join(TARGETDIR, tempDomainName)
+ os.umask(0o077)
+ os.mkdir(domainPath)
+
+ codeString = ""
+ if len(failedDomain) == 0:
+ codeNum = SUCCEED
+ codeString = Format.spliceAllSuccString("domain", "created", successDomain)
+ else:
+ codeNum = SERVER_ERROR
+ if params:
+ if isExist:
+ codeString = "domain {} create failed because it has been existed.".format(failedDomain[0])
+ elif not checkRes:
+ codeString = "domain {} create failed because format is incorrect.".format(failedDomain[0])
+ else:
+ codeString = Format.splicErrorString("domain", "created", successDomain, failedDomain)
+
+ # 对successDomain成功的domain添加文件监控开关、告警开关
+ Format.add_domain_conf_trace_flag(params, successDomain, tempDomainName)
+
+ return self.response(code=codeNum, message=codeString)
+
+
+class DeleteDomain(BaseResponse):
+
+ @BaseResponse.handle(schema=DeleteDomainSchema, token=True)
+ def delete(self, **params):
+ access_token = connexion.request.headers.get("access_token")
+ domainName = params.get("domainName")
+
+ if not domainName:
+ codeString = "The entered domain is empty"
+ return self.response(code=PARAM_ERROR, message=codeString)
+ # 1.清理agith
+ # 获取domain下的host ids
+ host_ids = Format.get_hostid_list_by_domain(domainName)
+ if len(host_ids) > 0:
+ Format.uninstall_trace(access_token, host_ids, domainName)
+ successDomain = []
+ failedDomain = []
+
+ checkRes = Format.domainCheck(domainName)
+ isExist = Format.isDomainExist(domainName)
+ if checkRes and isExist:
+ domainPath = os.path.join(TARGETDIR, domainName)
+ successDomain.append(domainName)
+ shutil.rmtree(domainPath)
+ else:
+ failedDomain.append(domainName)
+
+ if len(failedDomain) == 0:
+ codeNum = SUCCEED
+ codeString = Format.spliceAllSuccString("domain", "delete", successDomain)
+ else:
+ codeNum = SERVER_ERROR
+ codeString = Format.splicErrorString("domain", "delete", successDomain, failedDomain)
+
+ # 删除业务域对successDomain成功的业务域进行redis的key值清理以及domain下的主机进行agith的清理
+ Format.clear_all_domain_data(access_token, domainName, successDomain, host_ids)
+
+ return self.response(code=codeNum, message=codeString)
+
+
+class QueryDomain(BaseResponse):
+ @BaseResponse.handle(token=True)
+ def post(self, **params):
+ domain_list = []
+ cmd = "ls {}".format(TARGETDIR)
+ gitTools = GitTools()
+ ls_res = gitTools.run_shell_return_output(cmd).decode()
+ ll_list = ls_res.split('\n')
+ for d_ll in ll_list:
+ if d_ll:
+ domain = {"domainName": d_ll}
+ domain_list.append(domain)
+ return self.response(code=SUCCEED, data=domain_list)
diff --git a/ragdoll/function/__init__.py b/ragdoll/function/__init__.py
new file mode 100644
index 0000000..1a316c9
--- /dev/null
+++ b/ragdoll/function/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: __init__.py.py
+@Time: 2024/3/4 10:48
+@Author: JiaoSiMao
+Description:
+"""
diff --git a/ragdoll/function/verify/__init__.py b/ragdoll/function/verify/__init__.py
new file mode 100644
index 0000000..1a316c9
--- /dev/null
+++ b/ragdoll/function/verify/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: __init__.py.py
+@Time: 2024/3/4 10:48
+@Author: JiaoSiMao
+Description:
+"""
diff --git a/ragdoll/function/verify/confs.py b/ragdoll/function/verify/confs.py
new file mode 100644
index 0000000..654ede4
--- /dev/null
+++ b/ragdoll/function/verify/confs.py
@@ -0,0 +1,82 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: confs.py
+@Time: 2024/3/11 9:02
+@Author: JiaoSiMao
+Description:
+"""
+from marshmallow import Schema, fields
+
+
+class DomainNameSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+
+
+class HostIdSchema(Schema):
+ hostId = fields.Integer(required=True, validate=lambda s: s >= 0)
+
+
+class SyncHostConfsSchema(Schema):
+ hostId = fields.Integer(required=True, validate=lambda s: s >= 0)
+ syncConfigs = fields.List(fields.String(required=True, validate=lambda s: len(s) > 0), required=True,
+ validate=lambda s: len(s) > 0)
+
+
+class ConfBaseSchema(Schema):
+ filePath = fields.String(required=True, validate=lambda s: len(s) > 0)
+ expectedContents = fields.String(required=True, validate=lambda s: len(s) > 0)
+
+
+class DomainConfBaseInfosSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ confBaseInfos = fields.List(fields.Nested(ConfBaseSchema(), required=True), required=True,
+ validate=lambda s: len(s) > 0)
+
+
+class GetSyncStatusSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ ip = fields.String(required=True, validate=lambda s: len(s) > 0)
+
+
+class QueryExceptedConfsSchema(Schema):
+ domainNames = fields.List(fields.Nested(DomainNameSchema(), required=True), required=True,
+ validate=lambda s: len(s) > 0)
+
+
+class QueryRealConfsSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ hostIds = fields.List(fields.Nested(HostIdSchema(), required=True), required=True,
+ validate=lambda s: len(s) > 0)
+
+
+class SyncConfToHostFromDomainSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ syncList = fields.List(fields.Nested(SyncHostConfsSchema(), required=True), required=True,
+ validate=lambda s: len(s) > 0)
+
+
+class QuerySupportedConfsSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+
+
+class CompareConfDiffSchema(Schema):
+ expectedConfsResp = fields.List(fields.Nested(DomainConfBaseInfosSchema(), required=True), required=True,
+ validate=lambda s: len(s) > 0)
+ domainResult = fields.Dict(required=True, validate=lambda s: len(s) > 0)
+
+
+class BatchSyncConfToHostFromDomainSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ hostIds = fields.List(fields.Integer(required=True, validate=lambda s: s >= 0), required=True,
+ validate=lambda s: len(s) > 0)
diff --git a/ragdoll/function/verify/domain.py b/ragdoll/function/verify/domain.py
new file mode 100644
index 0000000..c4c1323
--- /dev/null
+++ b/ragdoll/function/verify/domain.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (C) 2023 isoftstone Technologies Co., Ltd. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: domain.py
+@Time: 2024/3/4 10:48
+@Author: JiaoSiMao
+Description:
+"""
+from marshmallow import Schema, fields, validate
+
+
+class CreateDomainSchema(Schema):
+ """
+ validators for parameter of /domain/createDomain
+ """
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ priority = fields.Integer(required=True, validate=lambda s: s >= 0)
+ conf_change_flag = fields.Boolean(required=True, default=False, validate=validate.OneOf([True, False]))
+ report_flag = fields.Boolean(required=True, default=False, validate=validate.OneOf([True, False]))
+
+
+class DeleteDomainSchema(Schema):
+ """
+ validators for parameter of /domain/deleteDomain
+ """
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
diff --git a/ragdoll/function/verify/domain_conf.py b/ragdoll/function/verify/domain_conf.py
new file mode 100644
index 0000000..698772b
--- /dev/null
+++ b/ragdoll/function/verify/domain_conf.py
@@ -0,0 +1,53 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: domain_conf.py
+@Time: 2024/3/8 11:40
+@Author: JiaoSiMao
+Description:
+"""
+from marshmallow import Schema, fields
+
+
+class ConfSchema(Schema):
+ filePath = fields.String(required=False, validate=lambda s: len(s) > 0)
+ contents = fields.String(required=False, validate=lambda s: len(s) > 0)
+ hostId = fields.Integer(required=False, validate=lambda s: s >= 0)
+
+
+class ManageConfSchema(Schema):
+ filePath = fields.String(required=False, validate=lambda s: len(s) > 0)
+
+
+class AddManagementConfsSchema(Schema):
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ confFiles = fields.List(fields.Nested(ConfSchema(), required=True), required=True, validate=lambda s: len(s) > 0)
+
+
+class UploadManagementConfsSchema(Schema):
+ filePath = fields.String(required=False, validate=lambda s: len(s) > 0)
+ domainName = fields.String(required=False, validate=lambda s: len(s) > 0)
+
+
+class DeleteManagementConfsSchema(Schema):
+ domainName = fields.String(required=False, validate=lambda s: len(s) > 0)
+ confFiles = fields.List(fields.Nested(ManageConfSchema(), required=True), required=True, validate=lambda s: len(s) > 0)
+
+
+class GetManagementConfsSchema(Schema):
+ domainName = fields.String(required=False, validate=lambda s: len(s) > 0)
+
+
+class QueryChangelogSchema(Schema):
+ domainName = fields.String(required=False, validate=lambda s: len(s) > 0)
+ confFiles = fields.List(fields.Nested(ManageConfSchema(), required=True), required=True, validate=lambda s: len(s) > 0)
diff --git a/ragdoll/function/verify/host.py b/ragdoll/function/verify/host.py
new file mode 100644
index 0000000..1ea927c
--- /dev/null
+++ b/ragdoll/function/verify/host.py
@@ -0,0 +1,58 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: host.py
+@Time: 2024/3/5 9:44
+@Author: JiaoSiMao
+Description:
+"""
+from marshmallow import Schema, fields
+
+
+class HostSchema(Schema):
+ """
+ validators for parameter of host
+ """
+ hostId = fields.Integer(required=True, validate=lambda s: s >= 0)
+ ip = fields.String(required=True, validate=lambda s: len(s) > 0)
+ ipv6 = fields.String(required=True, validate=lambda s: len(s) > 0)
+
+
+class SingleDeleteHostSchema(Schema):
+ """
+ validators for parameter of /host/deleteHost
+ """
+ hostId = fields.Integer(required=True, validate=lambda s: s >= 0)
+
+
+class AddHostSchema(Schema):
+ """
+ validators for parameter of /host/addHost
+ """
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ hostInfos = fields.List(fields.Nested(HostSchema(), required=True), required=True, validate=lambda s: len(s) > 0)
+
+
+class DeleteHostSchema(Schema):
+ """
+ validators for parameter of /host/deleteHost
+ """
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
+ hostInfos = fields.List(fields.Nested(SingleDeleteHostSchema(), required=True), required=True, validate=lambda s: len(s) > 0)
+
+
+class GetHostSchema(Schema):
+ """
+ validators for parameter of /host/getHost
+ """
+ domainName = fields.String(required=True, validate=lambda s: len(s) > 0)
\ No newline at end of file
diff --git a/ragdoll/host_manage/__init__.py b/ragdoll/host_manage/__init__.py
new file mode 100644
index 0000000..ec92ca5
--- /dev/null
+++ b/ragdoll/host_manage/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: __init__.py.py
+@Time: 2024/3/5 9:39
+@Author: JiaoSiMao
+Description:
+"""
diff --git a/ragdoll/host_manage/view.py b/ragdoll/host_manage/view.py
new file mode 100644
index 0000000..ead42f7
--- /dev/null
+++ b/ragdoll/host_manage/view.py
@@ -0,0 +1,292 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: view.py
+@Time: 2024/3/5 9:40
+@Author: JiaoSiMao
+Description:
+"""
+import ast
+import json
+import os
+
+import connexion
+from vulcanus.restful.resp.state import PARAM_ERROR, SUCCEED, PARTIAL_SUCCEED, SERVER_ERROR
+from vulcanus.restful.response import BaseResponse
+
+from ragdoll.conf.constant import TARGETDIR
+from ragdoll.function.verify.host import AddHostSchema, DeleteHostSchema, GetHostSchema
+from ragdoll.log.log import LOGGER
+from ragdoll.utils.conf_tools import ConfTools
+from ragdoll.utils.format import Format
+from ragdoll.utils.git_tools import GitTools
+
+
+class AddHostInDomain(BaseResponse):
+ @BaseResponse.handle(schema=AddHostSchema, token=True)
+ def post(self, **params):
+ """
+ add host in the configuration domain
+
+ add host in the configuration domain # noqa: E501
+
+ :param body: domain info
+ :type body: dict | bytes
+
+ :rtype: BaseResponse
+ """
+ access_token = connexion.request.headers.get("access_token")
+ domain = params.get("domainName")
+ host_infos = params.get("hostInfos")
+
+ # check whether host_infos is empty
+ if len(host_infos) == 0:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum, message="Enter host info cannot be empty, please check the host info.")
+
+ checkRes = Format.domainCheck(domain)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="Failed to verify the input parameter, please check the input parameters.")
+
+ # check whether the domain exists
+ isExist = Format.isDomainExist(domain)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="The current domain does not exist, please create the domain first.")
+
+ successHost = []
+ failedHost = []
+ domainPath = os.path.join(TARGETDIR, domain)
+ # 将domainName 和host信息入库
+ conf_tools = ConfTools()
+ Format.addHostSyncStatus(conf_tools, domain, host_infos)
+
+ # Check whether the current host exists in the domain.
+ for host in host_infos:
+ # 判断这个hostId是否在其他业务域中
+ contained_flag = Format.isContainedHostIdInOtherDomain(host.get("hostId"))
+ if contained_flag:
+ failedHost.append(host.get("hostId"))
+ else:
+ hostPath = os.path.join(domainPath, "hostRecord.txt")
+ if os.path.isfile(hostPath):
+ isContained = Format.isContainedHostIdInfile(hostPath, host.get("hostId"))
+ if isContained:
+ failedHost.append(host.get("hostId"))
+ else:
+ Format.addHostToFile(hostPath, host)
+ successHost.append(host.get("hostId"))
+ else:
+ Format.addHostToFile(hostPath, host)
+ successHost.append(host.get("hostId"))
+
+ if len(failedHost) == len(host_infos):
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="The all host already exists in the administrative scope of the domain.")
+
+ # Joining together the returned codenum codeMessage
+ if len(failedHost) == 0:
+ codeNum = SUCCEED
+ codeString = Format.spliceAllSuccString("host", "add hosts", successHost)
+ else:
+ codeNum = PARTIAL_SUCCEED
+ codeString = Format.splicErrorString("host", "add hosts", successHost, failedHost)
+
+ # git commit maessage
+ if len(host_infos) > 0:
+ git_tools = GitTools()
+ commit_code = git_tools.gitCommit("Add the host in {} domian, ".format(domain) +
+ "the host including : {}".format(successHost))
+
+ # 针对successHost 添加成功的host, 安装agith并启动agith如果当前业务域有配置配置agith如果没有就不配置
+ Format.install_update_agith(access_token, domain, successHost)
+
+ return self.response(code=codeNum, message=codeString)
+
+
+class DeleteHostInDomain(BaseResponse):
+ @BaseResponse.handle(schema=DeleteHostSchema, token=True)
+ def delete(self, **params):
+ """delete host in the configuration domain
+
+ delete the host in the configuration domain # noqa: E501
+
+ :param body: domain info
+ :type body: dict | bytes
+
+ :rtype: BaseResponse
+ """
+ access_token = connexion.request.headers.get("access_token")
+ domain = params.get("domainName")
+ hostInfos = params.get("hostInfos")
+
+ # check the input domain
+ checkRes = Format.domainCheck(domain)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="Failed to verify the input parameter, please check the input parameters.")
+
+ # check whether the domain exists
+ isExist = Format.isDomainExist(domain)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="The current domain does not exist, please create the domain first.")
+
+ # 将host sync status从库中删除
+ conf_tools = ConfTools()
+ Format.deleteHostSyncStatus(conf_tools, domain, hostInfos)
+
+ # Whether the host information added within the current domain is empty while ain exists
+ domainPath = os.path.join(TARGETDIR, domain)
+ hostPath = os.path.join(domainPath, "hostRecord.txt")
+ if not os.path.isfile(hostPath) or (os.path.isfile(hostPath) and os.stat(hostPath).st_size == 0):
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="The host information is not set in the current domain. Please add the host "
+ "information first")
+
+ # If the input host information is empty, the host information of the whole domain is cleared
+ if len(hostInfos) == 0:
+ if os.path.isfile(hostPath):
+ try:
+ os.remove(hostPath)
+ except OSError as ex:
+ LOGGER.error("Failed to delete hostpath as OS error: {0}".format(ex))
+ codeNum = SERVER_ERROR
+ return self.response(code=codeNum, message="The host delete failed.")
+ codeNum = SUCCEED
+ return self.response(code=codeNum, message="All hosts are deleted in the current domain.")
+
+ # If the domain exists, check whether the current input parameter host belongs to the corresponding
+ # domain. If the host is in the domain, the host is deleted. If the host is no longer in the domain,
+ # the host is added to the failure range
+ containedInHost = []
+ notContainedInHost = []
+ os.umask(0o077)
+ for hostInfo in hostInfos:
+ hostId = hostInfo.get("hostId")
+ isContained = False
+ try:
+ with open(hostPath, 'r') as d_file:
+ lines = d_file.readlines()
+ with open(hostPath, 'w') as w_file:
+ for line in lines:
+ line_host_id = json.loads(str(ast.literal_eval(line)).replace("'", "\""))['host_id']
+ if hostId != line_host_id:
+ w_file.write(line)
+ else:
+ isContained = True
+ except OSError as err:
+ LOGGER.error("OS error: {0}".format(err))
+ codeNum = SERVER_ERROR
+ return self.response(code=codeNum, message="OS error: {0}".format(err))
+
+ if isContained:
+ containedInHost.append(hostId)
+ else:
+ notContainedInHost.append(hostId)
+
+ # All hosts do not belong to the domain
+ if len(notContainedInHost) == len(hostInfos):
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="All the host does not belong to the domain control, please enter the host "
+ "again")
+
+ # Some hosts belong to domains, and some hosts do not belong to domains.
+ if len(notContainedInHost) == 0:
+ codeNum = SUCCEED
+ codeString = Format.spliceAllSuccString("host", "delete", containedInHost)
+ else:
+ codeNum = PARAM_ERROR
+ codeString = Format.splicErrorString("host", "delete", containedInHost, notContainedInHost)
+
+ # git commit message
+ if len(containedInHost) > 0:
+ git_tools = GitTools()
+ commit_code = git_tools.gitCommit("Delete the host in {} domian, ".format(domain) +
+ "the host including : {}".format(containedInHost))
+ # # 根据containedInHost 停止agith服务删除agith删除redis key值
+ Format.uninstall_hosts_agith(access_token, containedInHost, domain)
+
+ return self.response(code=codeNum, message=codeString)
+
+
+class GetHostByDomainName(BaseResponse):
+ @BaseResponse.handle(schema=GetHostSchema, token=True)
+ def post(self, **params):
+ """get host by domainName
+
+ get the host information of the configuration domain # noqa: E501
+
+ :param body: domain info
+ :type body: dict | bytes
+
+ :rtype: List[Host]
+ """
+ domain = params.get("domainName")
+ # check the input domain
+ checkRes = Format.domainCheck(domain)
+ if not checkRes:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="Failed to verify the input parameter, please check the input parameters.")
+
+ # check whether the domain exists
+ isExist = Format.isDomainExist(domain)
+ if not isExist:
+ codeNum = PARAM_ERROR
+ return self.response(code=codeNum,
+ message="The current domain does not exist, please create the domain first.")
+
+ # The domain exists, but the host information is empty
+ domainPath = os.path.join(TARGETDIR, domain)
+ hostPath = os.path.join(domainPath, "hostRecord.txt")
+ if not os.path.isfile(hostPath) or (os.path.isfile(hostPath) and os.stat(hostPath).st_size == 0):
+ codeNum = SUCCEED
+ return self.response(code=codeNum,
+ message="The host information is not set in the current domain. Please add the host "
+ "information first.")
+
+ # The domain exists, and the host information exists and is not empty
+ hostlist = []
+ LOGGER.debug("hostPath is : {}".format(hostPath))
+ try:
+ with open(hostPath, 'r') as d_file:
+ for line in d_file.readlines():
+ json_str = json.loads(line)
+ host_json = ast.literal_eval(json_str)
+ hostId = host_json["host_id"]
+ ip = host_json["ip"]
+ ipv6 = host_json["ipv6"]
+ host = {"hostId": hostId, "ip": ip, "ipv6": ipv6}
+ hostlist.append(host)
+ except OSError as err:
+ LOGGER.error("OS error: {0}".format(err))
+ codeNum = SERVER_ERROR
+ return self.response(code=codeNum, message="OS error: {0}".format(err))
+
+ # Joining together the returned codeNum codeMessage
+ if len(hostlist) == 0:
+ codeNum = SERVER_ERROR
+ return self.response(code=codeNum, message="Some unknown problems.")
+ else:
+ LOGGER.debug("hostlist is : {}".format(hostlist))
+ codeNum = SUCCEED
+ return self.response(code=codeNum, message="Get host info in the domain successfully", data=hostlist)
diff --git a/ragdoll/manage.py b/ragdoll/manage.py
new file mode 100644
index 0000000..d646f77
--- /dev/null
+++ b/ragdoll/manage.py
@@ -0,0 +1,42 @@
+#!/usr/bin/python3
+try:
+ from gevent import monkey, pywsgi
+
+ monkey.patch_all(ssl=False)
+except:
+ pass
+
+from vulcanus.manage import init_application
+from ragdoll.conf import configuration
+from ragdoll.url import URLS
+from ragdoll.utils.prepare import Prepare
+from ragdoll.utils.yang_module import YangModule
+
+
+def load_prepare():
+ git_dir = configuration.git.get("GIT_DIR").replace("\"", "")
+ git_user_name = configuration.git.get("USER_NAME").replace("\"", "")
+ git_user_email = configuration.git.get("USER_EMAIL").replace("\"", "")
+
+ prepare = Prepare(git_dir)
+ prepare.mkdir_git_warehose(git_user_name, git_user_email)
+
+
+def load_yang():
+ yang_modules = YangModule()
+
+
+def main():
+ _app = init_application(name="ragdoll", settings=configuration, register_urls=URLS)
+ # prepare to load config
+ load_prepare()
+ # load yang modules
+ load_yang()
+ print("configuration.ragdoll.get('ip') is ", configuration.ragdoll.get('IP'))
+ return _app
+
+
+app = main()
+
+if __name__ == "__main__":
+ app.run(host=configuration.ragdoll.get('IP'), port=configuration.ragdoll.get("PORT"))
diff --git a/ragdoll/models/batch_sync_req.py b/ragdoll/models/batch_sync_req.py
new file mode 100644
index 0000000..bca0e65
--- /dev/null
+++ b/ragdoll/models/batch_sync_req.py
@@ -0,0 +1,95 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from ragdoll.models.base_model_ import Model
+from ragdoll import util
+from ragdoll.models.sync_host_confs import SyncHostConfs
+
+
+class BatchSyncReq(Model):
+ """NOTE: This class is auto generated by the swagger code generator program.
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, domain_name: str = None, host_ids: List[int] = None): # noqa: E501
+
+ """ConfHost - a model defined in Swagger
+
+ :param domain_name: The domain_name of this BatchSyncReq. # noqa: E501
+ :type domain_name: str
+ :param host_ids: The host_ids of this BatchSyncReq. # noqa: E501
+ :type host_ids: List[int]
+ """
+ self.swagger_types = {
+ 'domain_name': str,
+ 'host_ids': List[int]
+ }
+
+ self.attribute_map = {
+ 'domain_name': 'domainName',
+ 'host_ids': 'hostIds'
+ }
+
+ self._domain_name = domain_name
+ self._host_ids = host_ids
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'BatchSyncReq':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The BatchSyncReq of this BatchSyncReq. # noqa: E501
+ :rtype: BatchSyncReq
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def domain_name(self) -> str:
+ """Gets the domain_name of this BatchSyncReq.
+
+ domain name # noqa: E501
+
+ :return: The domain_name of this BatchSyncReq.
+ :rtype: str
+ """
+ return self._domain_name
+
+ @domain_name.setter
+ def domain_name(self, domain_name: str):
+ """Sets the domain_name of this BatchSyncReq.
+
+ domain name # noqa: E501
+
+ :param domain_name: The domain_name of this BatchSyncReq.
+ :type domain_name: str
+ """
+
+ self._domain_name = domain_name
+
+ @property
+ def host_ids(self) -> List[int]:
+ """Gets the host_ids of this BatchSyncReq.
+
+
+ :return: The host_ids of this BatchSyncReq.
+ :rtype: List[int]
+ """
+
+ return self._host_ids
+
+ @host_ids.setter
+ def host_ids(self, host_ids: List[int]):
+ """Sets the sync_list of this BatchSyncReq.
+
+
+ :param host_ids: The host_ids of this BatchSyncReq.
+ :type host_ids: List[int]
+ """
+
+ self._host_ids = host_ids
diff --git a/ragdoll/models/compare_conf_diff.py b/ragdoll/models/compare_conf_diff.py
new file mode 100644
index 0000000..c68c8b7
--- /dev/null
+++ b/ragdoll/models/compare_conf_diff.py
@@ -0,0 +1,104 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: compare_conf_diff.py
+@Time: 2024/1/25 10:05
+@Author: JiaoSiMao
+Description:
+"""
+from typing import List
+from ragdoll import util
+from ragdoll.models.base_model_ import Model
+from ragdoll.models.domain_conf_base_infos import DomainConfBaseInfos
+
+
+class CompareConfDiff(Model):
+ """NOTE: This class is auto generated by the swagger code generator program.
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, expected_confs_resp: List[DomainConfBaseInfos] = None, domain_result: object = None): # noqa: E501
+ """CompareConfDiff - a model defined in Swagger
+
+ :param expected_confs_resp: The all domain conf. # noqa: E501
+ :type expected_confs_resp: str
+
+ :param domain_result: The all domain host real conf. # noqa: E501
+ :type domain_result: str
+ """
+ self.swagger_types = {
+ 'expected_confs_resp': List[DomainConfBaseInfos],
+ 'domain_result': object
+ }
+
+ self.attribute_map = {
+ 'expected_confs_resp': 'expectedConfsResp',
+ 'domain_result': 'domainResult'
+ }
+
+ self._expected_confs_resp = expected_confs_resp
+ self._domain_result = domain_result
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'CompareConfDiff':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: the CompareConfDiff of CompareConfDiff. # noqa: E501
+ :rtype: CompareConfDiff
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def expected_confs_resp(self) -> List[DomainConfBaseInfos]:
+ """Gets expected_confs_resp of this CompareConfDiff.
+
+
+ :return: The expected_confs_resp of this CompareConfDiff.
+ :rtype: List[DomainConfBaseInfos]
+ """
+ return self._expected_confs_resp
+
+ @expected_confs_resp.setter
+ def expected_confs_resp(self, expected_confs_resp: List[DomainConfBaseInfos]):
+ """Sets expected_confs_resp of this CompareConfDiff.
+
+
+ :param expected_confs_resp: The expected_confs_resp of this CompareConfDiff.
+ type expected_confs_resp: List[DomainConfBaseInfos]
+ """
+
+ self._expected_confs_resp = expected_confs_resp
+
+ @property
+ def domain_result(self) -> object:
+ """Gets domain_result of this CompareConfDiff.
+
+
+ :return: The domain_result of this CompareConfDiff.
+ :rtype: str
+ """
+ return self._domain_result
+
+ @domain_result.setter
+ def domain_result(self, domain_result: object):
+ """Sets domain_result of this CompareConfDiff.
+
+
+ :param domain_result: The domain_result of this CompareConfDiff.
+ type domain_result: str
+ """
+
+ self._domain_result = domain_result
diff --git a/ragdoll/models/conf_base.py b/ragdoll/models/conf_base.py
new file mode 100644
index 0000000..21d2ec6
--- /dev/null
+++ b/ragdoll/models/conf_base.py
@@ -0,0 +1,105 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: conf_base.py
+@Time: 2024/1/25 15:09
+@Author: JiaoSiMao
+Description:
+"""
+from ragdoll import util
+from ragdoll.models.base_model_ import Model
+
+
+class ConfBase(Model):
+ """NOTE: This class is auto generated by the swagger code generator program.
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, file_path: str = None, expected_contents: str = None): # noqa: E501
+ """ConfBaseInfo - a model defined in Swagger
+
+ :param file_path: The file_path of this ConfBase. # noqa: E501
+ :type file_path: str
+ :param expected_contents: The expected_contents of this ConfBase. # noqa: E501
+ :type expected_contents: str
+ """
+ self.swagger_types = {
+ 'file_path': str,
+ 'expected_contents': str
+ }
+
+ self.attribute_map = {
+ 'file_path': 'filePath',
+ 'expected_contents': 'expectedContents'
+ }
+
+ self._file_path = file_path
+ self._expected_contents = expected_contents
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'ConfBase':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The ConfBase of this ConfBase. # noqa: E501
+ :rtype: ConfBase
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def file_path(self) -> str:
+ """Gets the file_path of this ConfBase.
+
+ the path of a configuration file # noqa: E501
+
+ :return: The file_path of this ConfBase.
+ :rtype: str
+ """
+ return self._file_path
+
+ @file_path.setter
+ def file_path(self, file_path: str):
+ """Sets the file_path of this ConfBase.
+
+ the path of a configuration file # noqa: E501
+
+ :param file_path: The file_path of this ConfBase.
+ :type file_path: str
+ """
+
+ self._file_path = file_path
+
+ @property
+ def expected_contents(self) -> str:
+ """Gets the expected_contents of this ConfBase.
+
+ expected configuration value of configuration item # noqa: E501
+
+ :return: The expected_contents of this ConfBase.
+ :rtype: str
+ """
+ return self._expected_contents
+
+ @expected_contents.setter
+ def expected_contents(self, expected_contents: str):
+ """Sets the expected_contents of this ConfBase.
+
+ expected configuration value of configuration item # noqa: E501
+
+ :param expected_contents: The expected_contents of this ConfBase.
+ :type expected_contents: str
+ """
+
+ self._expected_contents = expected_contents
diff --git a/ragdoll/models/domain_conf_base_infos.py b/ragdoll/models/domain_conf_base_infos.py
new file mode 100644
index 0000000..e49e8d2
--- /dev/null
+++ b/ragdoll/models/domain_conf_base_infos.py
@@ -0,0 +1,105 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: domain_conf_base_infos.py
+@Time: 2024/1/25 15:06
+@Author: JiaoSiMao
+Description:
+"""
+from typing import List
+
+from ragdoll.models.base_model_ import Model
+from ragdoll.models.conf_base import ConfBase
+from ragdoll import util
+
+
+class DomainConfBaseInfos(Model):
+ """NOTE: This class is auto generated by the swagger code generator program.
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, conf_base_infos: List[ConfBase] = None, domain_name: str = None): # noqa: E501
+ """DomainConfBaseInfos - a model defined in Swagger
+
+ :param conf_base_infos: The all domain conf. # noqa: E501
+ :type List[ConfBase]
+
+ :param domain_name: The domain_name of DomainConfBaseInfos. # noqa: E501
+ :type domain_name: str
+ """
+ self.swagger_types = {
+ 'conf_base_infos': List[ConfBase],
+ 'domain_name': str
+ }
+
+ self.attribute_map = {
+ 'conf_base_infos': 'confBaseInfos',
+ 'domain_name': 'domainName'
+ }
+
+ self._conf_base_infos = conf_base_infos
+ self._domain_name = domain_name
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'DomainConfBaseInfos':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: the DomainConfBaseInfos of DomainConfBaseInfos. # noqa: E501
+ :rtype: DomainConfBaseInfos
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def conf_base_infos(self) -> List[ConfBase]:
+ """Gets conf_base_infos of this DomainConfBaseInfos.
+
+
+ :return: The conf_base_infos of this DomainConfBaseInfos.
+ :rtype: List[ConfBase]
+ """
+ return self._conf_base_infos
+
+ @conf_base_infos.setter
+ def conf_base_infos(self, conf_base_infos: List[ConfBase]):
+ """Sets conf_base_infos of this DomainConfBaseInfos.
+
+
+ :param conf_base_infos: The conf_base_infos of this DomainConfBaseInfos.
+ type expected_confs_resp: List[ConfBase]
+ """
+
+ self._conf_base_infos = conf_base_infos
+
+ @property
+ def domain_name(self) -> str:
+ """Gets domain_name of this DomainConfBaseInfos.
+
+
+ :return: The domain_name of this DomainConfBaseInfos.
+ :rtype: str
+ """
+ return self._domain_name
+
+ @domain_name.setter
+ def domain_name(self, domain_name: str):
+ """Sets domain_name of this DomainConfBaseInfos.
+
+
+ :param domain_name: The domain_result of this DomainConfBaseInfos.
+ type domain_result: str
+ """
+
+ self._domain_name = domain_name
diff --git a/ragdoll/models/domain_config_sync_result.py b/ragdoll/models/domain_config_sync_result.py
new file mode 100644
index 0000000..c7dc741
--- /dev/null
+++ b/ragdoll/models/domain_config_sync_result.py
@@ -0,0 +1,131 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: domain_config_sync_result.py
+@Time: 2024/1/25 11:31
+@Author: JiaoSiMao
+Description:
+"""
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+from typing import List, Dict # noqa: F401
+from ragdoll.models.base_model_ import Model
+from ragdoll import util
+
+
+class DomainConfigSyncResult(Model):
+ """NOTE: This class is auto generated by the swagger code generator program.
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, host_id: int = None, domain_name: str = None, sync_status: int = None): # noqa: E501
+ """DomainConfigSyncResult - a model defined in Swagger
+
+ :param host_id: The host_id of this HostSyncResult. # noqa: E501
+ :type host_id: int
+ :param domain_name: The domain_name of this DomainConfigSyncResult. # noqa: E501
+ :type domain_name: str
+ :param sync_status: The sync_status of this DomainConfigSyncResult. # noqa: E501
+ :type sync_status: int
+ """
+ self.swagger_types = {
+ 'host_id': int,
+ 'domain_name': str,
+ 'sync_status': int
+ }
+
+ self.attribute_map = {
+ 'host_id': 'hostId',
+ 'domain_name': 'domainName',
+ 'sync_status': 'syncStatus'
+ }
+
+ self._host_id = host_id
+ self._domain_name = domain_name
+ self._sync_status = sync_status
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'DomainConfigSyncResult':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The DomainConfigSyncResult of this DomainConfigSyncResult. # noqa: E501
+ :rtype: DomainConfigSyncResult
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def host_id(self) -> int:
+ """Gets the host_id of this DomainConfigSyncResult.
+
+ the id of host # noqa: E501
+
+ :return: The host_id of this DomainConfigSyncResult.
+ :rtype: int
+ """
+ return self._host_id
+
+ @host_id.setter
+ def host_id(self, host_id: int):
+ """Sets the host_id of this DomainConfigSyncResult.
+
+ the id of host # noqa: E501
+
+ :param host_id: The host_id of this DomainConfigSyncResult.
+ :type host_id: str
+ """
+
+ self._host_id = host_id
+
+ @property
+ def domain_name(self) -> str:
+ """Gets the domain_name of this DomainConfigSyncResult.
+
+ :return: The domain_name of this DomainConfigSyncResult.
+ :rtype: str
+ """
+ return self._domain_name
+
+ @domain_name.setter
+ def domain_name(self, domain_name: str):
+ """Sets the domain_name of this DomainConfigSyncResult.
+
+ :param domain_name: The domain_name of this DomainConfigSyncResult.
+ :type domain_name: str
+ """
+
+ self._domain_name = domain_name
+
+ @property
+ def sync_status(self) -> int:
+ """Gets the sync_status of this DomainConfigSyncResult.
+
+ :return: The sync_status of this DomainConfigSyncResult.
+ :rtype: int
+ """
+ return self._sync_status
+
+ @sync_status.setter
+ def sync_status(self, sync_status: int):
+ """Sets the sync_status of this DomainConfigSyncResult.
+
+
+ :param sync_status: The sync_status of this DomainConfigSyncResult.
+ :type sync_status: str
+ """
+
+ self._sync_status = sync_status
diff --git a/ragdoll/models/domain_ip.py b/ragdoll/models/domain_ip.py
new file mode 100644
index 0000000..932afec
--- /dev/null
+++ b/ragdoll/models/domain_ip.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from ragdoll.models.base_model_ import Model
+from ragdoll import util
+
+
+class DomainIp(Model):
+ """NOTE: This class is auto generated by the swagger code generator program.
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, domain_name: str=None, ip: str=None): # noqa: E501
+ """DomainName - a model defined in Swagger
+
+ :param domain_name: The domain_name of this DomainName. # noqa: E501
+ :type domain_name: str
+ """
+ self.swagger_types = {
+ 'domain_name': str,
+ 'ip': str
+ }
+
+ self.attribute_map = {
+ 'domain_name': 'domainName',
+ 'ip': 'ip'
+ }
+
+ self._domain_name = domain_name
+ self._ip = ip
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'DomainName':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The DomainName of this DomainName. # noqa: E501
+ :rtype: DomainName
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def domain_name(self) -> str:
+ """Gets the domain_name of this DomainName.
+
+ domain name # noqa: E501
+
+ :return: The domain_name of this DomainName.
+ :rtype: str
+ """
+ return self._domain_name
+
+ @domain_name.setter
+ def domain_name(self, domain_name: str):
+ """Sets the domain_name of this DomainName.
+
+ domain name # noqa: E501
+
+ :param domain_name: The domain_name of this DomainName.
+ :type domain_name: str
+ """
+
+ self._domain_name = domain_name
+
+ @property
+ def ip(self) -> str:
+ """Gets the ip in this domain.
+
+ the ipv4 address in this domain # noqa: E501
+
+ :return: The ip in this domain.
+ :rtype: str
+ """
+ return self._ip
+
+ @ip.setter
+ def ip(self, ip: str):
+ """Sets the ip in this domain.
+
+ the ipv4 address in this domain # noqa: E501
+
+ :param ip: The ip in this domain.
+ :type ip: str
+ """
+
+ self._ip = ip
diff --git a/ragdoll/models/domain_names.py b/ragdoll/models/domain_names.py
new file mode 100644
index 0000000..096cd81
--- /dev/null
+++ b/ragdoll/models/domain_names.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+
+from __future__ import absolute_import
+from datetime import date, datetime # noqa: F401
+
+from typing import List, Dict # noqa: F401
+
+from ragdoll.models import DomainName
+from ragdoll.models.base_model_ import Model
+from ragdoll import util
+
+
+class DomainNames(Model):
+ """NOTE: This class is auto generated by the swagger code generator program.
+
+ Do not edit the class manually.
+ """
+
+ def __init__(self, domain_names: List[DomainName]=None): # noqa: E501
+ """DomainName - a model defined in Swagger
+
+ :param domain_names: The domain_names of this DomainNames. # noqa: E501
+ :type domain_names: List[DomainName]
+ """
+ self.swagger_types = {
+ 'domain_names': List[DomainName]
+ }
+
+ self.attribute_map = {
+ 'domain_names': 'domainNames'
+ }
+
+ self._domain_names = domain_names
+
+ @classmethod
+ def from_dict(cls, dikt) -> 'DomainNames':
+ """Returns the dict as a model
+
+ :param dikt: A dict.
+ :type: dict
+ :return: The DomainNames of this DomainNames. # noqa: E501
+ :rtype: DomainNames
+ """
+ return util.deserialize_model(dikt, cls)
+
+ @property
+ def domain_names(self) -> List[DomainName]:
+ """Gets the domain_names of this DomainNames.
+
+
+ :return: The domain_names of this DomainNames.
+ :rtype: List[DomainName]
+ """
+ return self._domain_names
+
+ @domain_names.setter
+ def domain_names(self, domain_names: List[DomainName]):
+ """Sets the domain_names of this DomainNames.
+
+
+ :param domain_names: The domain_names of this DomainNames.
+ type domain_names: List[DomainName]
+ """
+
+ self._domain_names = domain_names
diff --git a/ragdoll/models/realconf_base_info.py b/ragdoll/models/realconf_base_info.py
index 8fe74f6..6139866 100644
--- a/ragdoll/models/realconf_base_info.py
+++ b/ragdoll/models/realconf_base_info.py
@@ -15,7 +15,7 @@ class RealconfBaseInfo(Model):
Do not edit the class manually.
"""
- def __init__(self, path: str=None, file_path: str=None, rpm_name: str=None, rpm_version: str=None, rpm_release: str=None, file_attr: str=None, file_owner: str=None, conf_type: str=None, spacer: str=None, conf_contens: str=None): # noqa: E501
+ def __init__(self, path: str=None, file_path: str=None, rpm_name: str=None, rpm_version: str=None, rpm_release: str=None, file_attr: str=None, file_owner: str=None, conf_type: str=None, spacer: str=None, conf_contents: str=None): # noqa: E501
"""RealconfBaseInfo - a model defined in Swagger
:param path: The path of this RealconfBaseInfo. # noqa: E501
@@ -36,8 +36,8 @@ class RealconfBaseInfo(Model):
:type conf_type: str
:param spacer: The spacer of this RealconfBaseInfo. # noqa: E501
:type spacer: str
- :param conf_contens: The conf_contens of this RealconfBaseInfo. # noqa: E501
- :type conf_contens: str
+ :param conf_contents: The conf_contents of this RealconfBaseInfo. # noqa: E501
+ :type conf_contents: str
"""
self.swagger_types = {
'path': str,
@@ -49,7 +49,7 @@ class RealconfBaseInfo(Model):
'file_owner': str,
'conf_type': str,
'spacer': str,
- 'conf_contens': str
+ 'conf_contents': str
}
self.attribute_map = {
@@ -62,7 +62,7 @@ class RealconfBaseInfo(Model):
'file_owner': 'fileOwner',
'conf_type': 'confType',
'spacer': 'spacer',
- 'conf_contens': 'confContents'
+ 'conf_contents': 'confContents'
}
self._path = path
@@ -74,7 +74,7 @@ class RealconfBaseInfo(Model):
self._file_owner = file_owner
self._conf_type = conf_type
self._spacer = spacer
- self._conf_contens = conf_contens
+ self._conf_contents = conf_contents
@classmethod
def from_dict(cls, dikt) -> 'RealconfBaseInfo':
@@ -293,24 +293,24 @@ class RealconfBaseInfo(Model):
self._spacer = spacer
@property
- def conf_contens(self) -> str:
- """Gets the conf_contens of this RealconfBaseInfo.
+ def conf_contents(self) -> str:
+ """Gets the conf_contents of this RealconfBaseInfo.
the specific content of the configuration item # noqa: E501
- :return: The conf_contens of this RealconfBaseInfo.
+ :return: The conf_contents of this RealconfBaseInfo.
:rtype: str
"""
- return self._conf_contens
+ return self._conf_contents
- @conf_contens.setter
- def conf_contens(self, conf_contens: str):
- """Sets the conf_contens of this RealconfBaseInfo.
+ @conf_contents.setter
+ def conf_contents(self, conf_contents: str):
+ """Sets the conf_contents of this RealconfBaseInfo.
the specific content of the configuration item # noqa: E501
- :param conf_contens: The conf_contens of this RealconfBaseInfo.
- :type conf_contens: str
+ :param conf_contents: The conf_contents of this RealconfBaseInfo.
+ :type conf_contents: str
"""
- self._conf_contens = conf_contens
+ self._conf_contents = conf_contents
diff --git a/ragdoll/swagger/swagger.yaml b/ragdoll/swagger/swagger.yaml
index 8fd5dcb..e90201f 100644
--- a/ragdoll/swagger/swagger.yaml
+++ b/ragdoll/swagger/swagger.yaml
@@ -303,7 +303,12 @@ paths:
summary: "query expected configuration value in the current hostId node"
description: "queryExpectedConfs"
operationId: "query_excepted_confs"
- parameters: [ ]
+ parameters:
+ - in: "body"
+ name: "body"
+ required: false
+ schema:
+ $ref: "#/definitions/DomainNames"
responses:
"200":
description: "query expected configuration value successfully"
@@ -337,6 +342,50 @@ paths:
items:
$ref: "#/definitions/HostSyncResult"
x-swagger-router-controller: "ragdoll.controllers.confs_controller"
+ /confs/batch/syncConf:
+ put:
+ tags:
+ - "confs"
+ summary: "batch synchronize the configuration information of the configuration domain\
+ \ to the host"
+ description: "batch synchronize the configuration information of the configuration\
+ \ domain to the host"
+ operationId: "batch_sync_conf_to_host_from_domain"
+ parameters:
+ - in: "body"
+ name: "body"
+ required: false
+ schema:
+ $ref: "#/definitions/BatchSyncReq"
+ responses:
+ "200":
+ description: "synchronize the configuration items successfully"
+ schema:
+ type: "array"
+ items:
+ $ref: "#/definitions/HostSyncResult"
+ x-swagger-router-controller: "ragdoll.controllers.confs_controller"
+ /confs/domain/diff:
+ post:
+ tags:
+ - "confs"
+ summary: "compare domain conf different"
+ description: "compare domain conf different"
+ operationId: "compare_conf_diff"
+ parameters:
+ - in: "body"
+ name: "body"
+ required: false
+ schema:
+ $ref: "#/definitions/CompareConfDiff"
+ responses:
+ "200":
+ description: "compare domain conf different successfully"
+ schema:
+ type: "array"
+ items:
+ $ref: "#/definitions/DomainConfigSyncResult"
+ x-swagger-router-controller: "ragdoll.controllers.confs_controller"
/confs/getDomainStatus:
post:
tags:
@@ -349,7 +398,7 @@ paths:
name: "body"
required: false
schema:
- $ref: "#/definitions/DomainName"
+ $ref: "#/definitions/DomainIp"
responses:
"200":
description: "get the status of the domain successfully"
@@ -418,6 +467,22 @@ definitions:
domainName:
type: "string"
description: "domain name"
+ DomainNames:
+ type: "object"
+ properties:
+ domainNames:
+ type: "array"
+ items:
+ $ref: "#/definitions/DomainName"
+ DomainIp:
+ type: "object"
+ properties:
+ domainName:
+ type: "string"
+ description: "domain name"
+ ip:
+ type: "string"
+ description: "ip"
HostInfos:
type: "object"
properties:
@@ -780,6 +845,18 @@ definitions:
example:
hostId: "hostId"
syncStatus: "SUCCESS"
+ DomainConfigSyncResult:
+ type: "object"
+ properties:
+ hostId:
+ type: "integer"
+ description: "the id of host"
+ domainName:
+ type: "string"
+ description: "the domainName of host"
+ syncStatus:
+ type: "integer"
+ description: "the syncStatus of host"
CollectInfo:
type: "object"
properties:
@@ -820,6 +897,35 @@ definitions:
type: "array"
items:
type: "string"
+ CompareConfDiff:
+ type: "object"
+ properties:
+ expectedConfsResp:
+ type: "array"
+ items:
+ $ref: "#/definitions/DomainConfBaseInfos"
+ domainResult:
+ type: object
+ description: "domain real config"
+ DomainConfBaseInfos:
+ type: "object"
+ properties:
+ confBaseInfos:
+ type: array
+ items:
+ $ref: "#/definitions/ConfBase"
+ domainName:
+ type: "string"
+ description: "domain name"
+ ConfBase:
+ type: "object"
+ properties:
+ filePath:
+ type: "string"
+ description: "file path"
+ expectedContents:
+ type: "string"
+ description: "expected contents"
SyncReq:
type: "object"
properties:
@@ -830,6 +936,16 @@ definitions:
type: "array"
items:
$ref: "#/definitions/SyncHostConfs"
+ BatchSyncReq:
+ type: "object"
+ properties:
+ domainName:
+ type: "string"
+ description: "domain name"
+ hostIds:
+ type: "array"
+ items:
+ type: "integer"
SingleConf:
type: object
properties:
diff --git a/ragdoll/test/test_conf_model.py b/ragdoll/test/test_conf_model.py
index 35d6a0b..e29b2f8 100644
--- a/ragdoll/test/test_conf_model.py
+++ b/ragdoll/test/test_conf_model.py
@@ -13,7 +13,7 @@ from ragdoll.log.log import LOGGER
from ragdoll.test import BaseTestCase
from ragdoll.utils.yang_module import YangModule
from ragdoll.utils.object_parse import ObjectParse
-from ragdoll.controllers.format import Format
+from ragdoll.utils.format import Format
class TestConfModel():
""" Test config_model """
diff --git a/ragdoll/url.py b/ragdoll/url.py
new file mode 100644
index 0000000..4272bba
--- /dev/null
+++ b/ragdoll/url.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python3
+# ******************************************************************************
+# Copyright (c) Huawei Technologies Co., Ltd. 2021-2022. All rights reserved.
+# licensed under the Mulan PSL v2.
+# You can use this software according to the terms and conditions of the Mulan PSL v2.
+# You may obtain a copy of Mulan PSL v2 at:
+# http://license.coscl.org.cn/MulanPSL2
+# THIS SOFTWARE IS PROVIDED ON AN 'AS IS' BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
+# PURPOSE.
+# See the Mulan PSL v2 for more details.
+# ******************************************************************************/
+"""
+@FileName: url.py
+@Time: 2024/3/4 10:31
+@Author: JiaoSiMao
+Description:
+"""
+from ragdoll.conf.constant import (
+ CREATE_DOMAIN, DELETE_DOMAIN, QUERY_DOMAIN, ADD_HOST_IN_DOMAIN, DELETE_HOST_IN_DOMAIN, GET_HOST_BY_DOMAIN,
+ ADD_MANAGEMENT_CONFS_IN_DOMAIN, UPLOAD_MANAGEMENT_CONFS_IN_DOMAIN, DELETE_MANAGEMENT_CONFS_IN_DOMAIN,
+ GET_MANAGEMENT_CONFS_IN_DOMAIN, QUERY_CHANGELOG_OF_MANAGEMENT_CONFS_IN_DOMAIN, GET_SYNC_STATUS,
+ QUERY_EXCEPTED_CONFS, QUERY_REAL_CONFS, SYNC_CONF_TO_HOST_FROM_DOMAIN, QUERY_SUPPORTED_CONFS, COMPARE_CONF_DIFF,
+ BATCH_SYNC_CONF_TO_HOST_FROM_DOMAIN
+)
+from ragdoll.domain_manage import view as domain_view
+from ragdoll.host_manage import view as host_view
+from ragdoll.domain_conf_manage import view as domain_conf_view
+from ragdoll.confs_manage import view as confs_view
+URLS = []
+
+SPECIFIC_URLS = {
+ "DOMAIN_URLS": [
+ (domain_view.CreateDomain, CREATE_DOMAIN),
+ (domain_view.DeleteDomain, DELETE_DOMAIN),
+ (domain_view.QueryDomain, QUERY_DOMAIN),
+ ],
+ "HOST_URLS": [
+ (host_view.AddHostInDomain, ADD_HOST_IN_DOMAIN),
+ (host_view.DeleteHostInDomain, DELETE_HOST_IN_DOMAIN),
+ (host_view.GetHostByDomainName, GET_HOST_BY_DOMAIN),
+ ],
+ "MANAGEMENT_URLS": [
+ (domain_conf_view.AddManagementConfsInDomain, ADD_MANAGEMENT_CONFS_IN_DOMAIN),
+ (domain_conf_view.UploadManagementConfsInDomain, UPLOAD_MANAGEMENT_CONFS_IN_DOMAIN),
+ (domain_conf_view.DeleteManagementConfsInDomain, DELETE_MANAGEMENT_CONFS_IN_DOMAIN),
+ (domain_conf_view.GetManagementConfsInDomain, GET_MANAGEMENT_CONFS_IN_DOMAIN),
+ (domain_conf_view.QueryChangelogOfManagementConfsInDomain, QUERY_CHANGELOG_OF_MANAGEMENT_CONFS_IN_DOMAIN),
+ ],
+ "CONFS_URLS": [
+ (confs_view.GetTheSyncStatusOfDomain, GET_SYNC_STATUS),
+ (confs_view.QueryExceptedConfs, QUERY_EXCEPTED_CONFS),
+ (confs_view.QueryRealConfs, QUERY_REAL_CONFS),
+ (confs_view.SyncConfToHostFromDomain, SYNC_CONF_TO_HOST_FROM_DOMAIN),
+ (confs_view.QuerySupportedConfs, QUERY_SUPPORTED_CONFS),
+ (confs_view.CompareConfDiff, COMPARE_CONF_DIFF),
+ (confs_view.BatchSyncConfToHostFromDomain, BATCH_SYNC_CONF_TO_HOST_FROM_DOMAIN),
+ ]
+}
+
+for _, value in SPECIFIC_URLS.items():
+ URLS.extend(value)
diff --git a/ragdoll/util.py b/ragdoll/util.py
index edb2251..6d671c3 100644
--- a/ragdoll/util.py
+++ b/ragdoll/util.py
@@ -1,7 +1,6 @@
import datetime
import six
-import typing
def _deserialize(data, klass):
diff --git a/ragdoll/utils/conf_tools.py b/ragdoll/utils/conf_tools.py
index 0ad1f8f..2fe0689 100644
--- a/ragdoll/utils/conf_tools.py
+++ b/ragdoll/utils/conf_tools.py
@@ -6,7 +6,7 @@ from enum import Enum
from ragdoll.const.conf_handler_const import CONFIG
from ragdoll.utils.git_tools import GitTools
-from ragdoll.controllers.format import Format
+from ragdoll.utils.format import Format
from ragdoll.log.log import LOGGER
from ragdoll.models.real_conf import RealConf
from ragdoll.models.real_conf_path import RealConfPath
@@ -80,10 +80,7 @@ class ConfTools(object):
def listToDict(self, manaConfs):
res = {}
LOGGER.debug("manaConfs is : {}".format(manaConfs))
- LOGGER.debug("the typr of manaConfs is : {}".format(type(manaConfs)))
for d_conf in manaConfs:
- LOGGER.debug("d_conf is : {}".format(d_conf))
- LOGGER.debug("the type of d_conf is : {}".format(type(d_conf)))
path = d_conf.get(PATH)
value = d_conf.get(EXCEPTED_VALUE).strip()
level = path.split("/")
@@ -120,8 +117,6 @@ class ConfTools(object):
"""
realConfWithFeature = {}
LOGGER.debug("featureList is : {}".format(featureList))
- lenFeature = len(featureList)
- tempRealConf = realConfDict
d_real_file = {}
d_real_file[featureList[1]] = realConfDict
d_real_feature = {}
@@ -173,11 +168,8 @@ class ConfTools(object):
]
"""
res = []
- conf_nums = len(realConfResText)
LOGGER.debug("realConfResText is : {}".format(realConfResText))
for d_conf in realConfResText:
- LOGGER.debug("d_conf is : {}".format(d_conf))
- LOGGER.debug("d_conf 's type is : {}".format(type(d_conf)))
domainName = d_conf.get("domainName")
hostId = d_conf.get("hostID")
conf_base_infos = d_conf.get("confBaseInfos")
@@ -187,7 +179,6 @@ class ConfTools(object):
for d_conf_info in conf_base_infos:
paths = d_conf_info.get("path").split(" ")
confContents = json.loads(d_conf_info.get("confContents"))
- LOGGER.debug("confContents is : {}".format(confContents))
for d_path in paths:
x_path = os.path.join(domainName, d_path)
remove_feature_path = d_path.split("/")[2:]
@@ -237,13 +228,11 @@ class ConfTools(object):
dict1 = json.loads(real_conf)
dict2 = json.loads(man_conf)
- res = ""
+ res = SYNCHRONIZED
for src_list, dst_list in zip(sorted(dict1), sorted(dict2)):
if str(dict1[src_list]) != str(dict2[dst_list]):
res = NOTSYNCHRONIZE
- if not res:
- res = SYNCHRONIZED
-
+ break
return res
def getRpmInfo(self, path):
@@ -253,15 +242,12 @@ class ConfTools(object):
input: '/etc/yum.repos.d/openEuler.repo'
output: openEuler-repos 1.0 3.0.oe1.aarch64
"""
- res = ""
if not os.path.exists(path):
return None
cmd = "rpm -qf {}".format(path)
gitTools = GitTools()
package_string = gitTools.run_shell_return_output(cmd).decode()
LOGGER.debug("package_string is : {}".format(package_string))
- # lines = returnCode.rsplit(STRIKETHROUGH)
- # res = lines[0]
if 'not owned by any package' in package_string:
return None, None, None
pkg, arch = Format.rsplit(package_string, Format.arch_sep(package_string))
@@ -270,7 +256,6 @@ class ConfTools(object):
pkg, release = Format.rsplit(pkg, '-')
name, version = Format.rsplit(pkg, '-')
# If the value of epoch needs to be returned separately,
- # epoch, version = version.split(':', 1) if ":" in version else ['0', version]
return name, release, version
def getFileAttr(self, path):
@@ -315,7 +300,6 @@ class ConfTools(object):
d_lines = line.split(RightParen + TWOSPACE)
for d_line in d_lines:
d_line = d_line.lstrip()
- # print("d_line is : {}".format(d_line))
if d_line.startswith(ACCESS):
fileAttr = d_line.split(FS)[0].split(LeftParen)[1]
elif d_line.startswith(UID):
@@ -355,15 +339,15 @@ class ConfTools(object):
ll_res_list = ll_res.split(SPACE)
fileType = ll_res_list[0]
- permssions = "0"
+ permissions = "0"
for perm in range(0, PERMISSION):
items = fileType[1 + perm * PERMISSION: (perm + 1) * PERMISSION + 1]
value = 0
for d_item in items:
d_item_value = self.switch_perm(d_item)
value = value + d_item_value
- permssions = permssions + str(value)
- LOGGER.debug("the perssion is : {}".format(permssions))
+ permissions = permissions + str(value)
+ LOGGER.debug("the permission is : {}".format(permissions))
fileOwner = LeftParen + ll_res_list[2] + SPACE + ll_res_list[3] + RightParen
LOGGER.debug("the fileOwner is : {}".format(fileOwner))
@@ -446,9 +430,6 @@ class ConfTools(object):
value = ""
for count in range(0, len(real_conf)):
d_real = real_conf[count]
- # print("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&")
- # print("d_real is : {}".format(d_real))
- # print("path is : {}".format(path))
if d_real.path == path:
index = count
value = d_real.real_value.strip()
@@ -622,7 +603,43 @@ class ConfTools(object):
object_file_url = "{address}:{port}{api}".format(address=object_file_address, api=object_file_api,
port=object_file_port)
- url = {"collect_url": collect_url, "sync_url": sync_url, "object_file_url": object_file_url}
+ batch_sync_address = ast.literal_eval(cf.get("sync", "batch_sync_address"))
+ batch_sync_api = ast.literal_eval(cf.get("sync", "batch_sync_api"))
+ batch_sync_port = str(cf.get("sync", "sync_port"))
+ batch_sync_url = "{address}:{port}{api}".format(address=batch_sync_address, api=batch_sync_api,
+ port=batch_sync_port)
+
+ host_sync_status_address = ast.literal_eval(cf.get("sync_status", "host_sync_status_address"))
+ add_host_sync_status_api = ast.literal_eval(cf.get("sync_status", "add_host_sync_status_api"))
+ delete_host_sync_status_api = ast.literal_eval(cf.get("sync_status", "delete_host_sync_status_api"))
+ delete_all_host_sync_status_api = ast.literal_eval(cf.get("sync_status", "delete_all_host_sync_status_api"))
+ host_sync_status_port = str(cf.get("sync_status", "host_sync_status_port"))
+ add_host_sync_status_url = "{address}:{port}{api}".format(address=host_sync_status_address,
+ api=add_host_sync_status_api,
+ port=host_sync_status_port)
+ delete_host_sync_status_url = "{address}:{port}{api}".format(address=host_sync_status_address,
+ api=delete_host_sync_status_api,
+ port=host_sync_status_port)
+ delete_all_host_sync_status_url = "{address}:{port}{api}".format(address=host_sync_status_address,
+ api=delete_all_host_sync_status_api,
+ port=host_sync_status_port)
+
+ conf_trace_mgmt_address = ast.literal_eval(cf.get("conf_trace", "conf_trace_mgmt_address"))
+ conf_trace_mgmt_api = ast.literal_eval(cf.get("conf_trace", "conf_trace_mgmt_api"))
+ conf_trace_delete_api = ast.literal_eval(cf.get("conf_trace", "conf_trace_delete_api"))
+ conf_trace_port = str(cf.get("conf_trace", "conf_trace_port"))
+ conf_trace_mgmt_url = "{address}:{port}{api}".format(address=conf_trace_mgmt_address,
+ api=conf_trace_mgmt_api,
+ port=conf_trace_port)
+ conf_trace_delete_url = "{address}:{port}{api}".format(address=conf_trace_mgmt_address,
+ api=conf_trace_delete_api,
+ port=conf_trace_port)
+
+ url = {"collect_url": collect_url, "sync_url": sync_url, "object_file_url": object_file_url,
+ "batch_sync_url": batch_sync_url, "add_host_sync_status_url": add_host_sync_status_url,
+ "delete_host_sync_status_url": delete_host_sync_status_url,
+ "delete_all_host_sync_status_url": delete_all_host_sync_status_url,
+ "conf_trace_mgmt_url": conf_trace_mgmt_url, "conf_trace_delete_url": conf_trace_delete_url}
return url
def load_port_by_conf(self):
diff --git a/ragdoll/utils/format.py b/ragdoll/utils/format.py
new file mode 100644
index 0000000..a674b44
--- /dev/null
+++ b/ragdoll/utils/format.py
@@ -0,0 +1,1138 @@
+import os
+import re
+import json
+import configparser
+import ast
+
+import requests
+
+from ragdoll.log.log import LOGGER
+
+from ragdoll.const.conf_handler_const import NOT_SYNCHRONIZE, SYNCHRONIZED, CONFIG, \
+ DIRECTORY_FILE_PATH_LIST
+from ragdoll.models import ConfSyncedRes
+from ragdoll.models.base_response import BaseResponse # noqa: E501
+from ragdoll.models.conf_file import ConfFile
+from ragdoll.models.conf_files import ConfFiles
+from ragdoll.models.host import Host # noqa: E501
+from ragdoll.utils.host_tools import HostTools
+
+
+class Format(object):
+
+ @staticmethod
+ def domainCheck(domainName):
+ res = True
+ if not re.match(r"^[A-Za-z0-9_\.-]*$", domainName) or domainName == "" or len(domainName) > 255:
+ res = False
+ return res
+
+ @staticmethod
+ def isDomainExist(domainName):
+ TARGETDIR = Format.get_git_dir()
+ domainPath = os.path.join(TARGETDIR, domainName)
+ if os.path.exists(domainPath):
+ return True
+
+ return False
+
+ @staticmethod
+ def spliceAllSuccString(obj, operation, succDomain):
+ """
+ docstring
+ """
+ codeString = "All {obj} {oper} successfully, {succ} {obj} in total.".format( \
+ obj=obj, oper=operation, succ=len(succDomain))
+ return codeString
+
+ @staticmethod
+ def splicErrorString(obj, operation, succDomain, failDomain):
+ """
+ docstring
+ """
+ codeString = "{succ} {obj} {oper} successfully, {fail} {obj} {oper} failed.".format( \
+ succ=len(succDomain), obj=obj, oper=operation, fail=len(failDomain))
+
+ succString = "\n"
+ if len(succDomain) > 0:
+ succString = "These are successful: "
+ for succName in succDomain:
+ succString += succName + " "
+ succString += "."
+
+ if len(failDomain) > 0:
+ failString = "These are failed: "
+ for failName in failDomain:
+ failString += failName + " "
+ return codeString + succString + failString
+
+ return codeString + succString
+
+ @staticmethod
+ def two_abs_join(abs1, abs2):
+ """
+ Absolute path Joins two absolute paths together
+ :param abs1: main path
+ :param abs2: the spliced path
+ :return: together the path
+ """
+ # 1. Format path (change \\ in path to \)
+ abs2 = os.fspath(abs2)
+
+ # 2. Split the path file
+ abs2 = os.path.splitdrive(abs2)[1]
+ # 3. Remove the beginning '/'
+ abs2 = abs2.strip('\\/') or abs2
+ return os.path.abspath(os.path.join(abs1, abs2))
+
+ @staticmethod
+ def isContainedHostIdInfile(f_file, content):
+ isContained = False
+ with open(f_file, 'r') as d_file:
+ for line in d_file.readlines():
+ line_dict = json.loads(str(ast.literal_eval(line)).replace("'", "\""))
+ if content == line_dict["host_id"]:
+ isContained = True
+ break
+ return isContained
+
+ @staticmethod
+ def isContainedHostIdInOtherDomain(content):
+ from ragdoll.conf.constant import TARGETDIR
+ isContained = False
+ contents = os.listdir(TARGETDIR)
+ folders = [f for f in contents if os.path.isdir(os.path.join(TARGETDIR, f))]
+ for folder in folders:
+ hostPath = os.path.join(os.path.join(TARGETDIR, folder), "hostRecord.txt")
+ if os.path.isfile(hostPath):
+ with open(hostPath, 'r') as d_file:
+ for line in d_file.readlines():
+ line_dict = json.loads(str(ast.literal_eval(line)).replace("'", "\""))
+ if content == line_dict["host_id"]:
+ isContained = True
+ break
+ return isContained
+
+ @staticmethod
+ def addHostToFile(d_file, host):
+ host = {'host_id': host["hostId"], 'ip': host["ip"], 'ipv6': host["ipv6"]}
+ info_json = json.dumps(str(host), sort_keys=False, indent=4, separators=(',', ': '))
+ os.umask(0o077)
+ with open(d_file, 'a+') as host_file:
+ host_file.write(info_json)
+ host_file.write("\n")
+
+ @staticmethod
+ def getSubDirFiles(path):
+ """
+ desc: Subdirectory records and files need to be logged to the successConf
+ """
+ fileRealPathList = []
+ fileXPathlist = []
+ for root, dirs, files in os.walk(path):
+ if len(files) > 0:
+ preXpath = root.split('/', 3)[3]
+ for d_file in files:
+ xpath = os.path.join(preXpath, d_file)
+ fileXPathlist.append(xpath)
+ realPath = os.path.join(root, d_file)
+ fileRealPathList.append(realPath)
+
+ return fileRealPathList, fileXPathlist
+
+ @staticmethod
+ def isHostInDomain(domainName):
+ """
+ desc: Query domain Whether host information is configured in the domain
+ """
+ isHostInDomain = False
+ TARGETDIR = Format.get_git_dir()
+ domainPath = os.path.join(TARGETDIR, domainName)
+ hostPath = os.path.join(domainPath, "hostRecord.txt")
+ if os.path.isfile(hostPath):
+ isHostInDomain = True
+
+ return isHostInDomain
+
+ @staticmethod
+ def isHostIdExist(hostPath, hostId):
+ """
+ desc: Query hostId exists within the current host domain management
+ """
+ isHostIdExist = False
+ if os.path.isfile(hostPath) and os.stat(hostPath).st_size > 0:
+ with open(hostPath) as h_file:
+ for line in h_file.readlines():
+ if hostId in line:
+ isHostIdExist = True
+ break
+
+ return isHostIdExist
+
+ @staticmethod
+ def get_file_content_by_readlines(d_file):
+ """
+ desc: remove empty lines and comments from d_file
+ """
+ res = []
+ try:
+ with open(d_file, 'r') as s_f:
+ lines = s_f.readlines()
+ for line in lines:
+ tmp = line.strip()
+ if not len(tmp) or tmp.startswith("#"):
+ continue
+ res.append(line)
+ except FileNotFoundError:
+ LOGGER.error(f"File not found: {d_file}")
+ except IOError as e:
+ LOGGER.error(f"IO error: {e}")
+ except Exception as e:
+ LOGGER.error(f"An error occurred: {e}")
+ return res
+
+ @staticmethod
+ def get_file_content_by_read(d_file):
+ """
+ desc: return a string after read the d_file
+ """
+ if not os.path.exists(d_file):
+ return ""
+ with open(d_file, 'r') as s_f:
+ lines = s_f.read()
+ return lines
+
+ @staticmethod
+ def rsplit(_str, seps):
+ """
+ Splits _str by the first sep in seps that is found from the right side.
+ Returns a tuple without the separator.
+ """
+ for idx, ch in enumerate(reversed(_str)):
+ if ch in seps:
+ return _str[0:-idx - 1], _str[-idx:]
+
+ @staticmethod
+ def arch_sep(package_string):
+ """
+ Helper method for finding if arch separator is '.' or '-'
+
+ Args:
+ package_string (str): dash separated package string such as 'bash-4.2.39-3.el7'.
+
+ Returns:
+ str: arch separator
+ """
+ return '.' if package_string.rfind('.') > package_string.rfind('-') else '-'
+
+ @staticmethod
+ def set_file_content_by_path(content, path):
+ res = 0
+ if os.path.exists(path):
+ with open(path, 'w+') as d_file:
+ for d_cont in content:
+ d_file.write(d_cont)
+ d_file.write("\n")
+ res = 1
+ return res
+
+ @staticmethod
+ def get_git_dir():
+ cf = configparser.ConfigParser()
+ if os.path.exists(CONFIG):
+ cf.read(CONFIG, encoding="utf-8")
+ else:
+ parent = os.path.dirname(os.path.realpath(__file__))
+ conf_path = os.path.join(parent, "../../config/gala-ragdoll.conf")
+ cf.read(conf_path, encoding="utf-8")
+ git_dir = ast.literal_eval(cf.get("git", "git_dir"))
+ return git_dir
+
+ @staticmethod
+ def get_hostinfo_by_domain(domainName):
+ """
+ desc: Query hostinfo by domainname
+ """
+ LOGGER.debug("Get hostinfo by domain : {}".format(domainName))
+ TARGETDIR = Format.get_git_dir()
+ hostlist = []
+ domainPath = os.path.join(TARGETDIR, domainName)
+ hostPath = os.path.join(domainPath, "hostRecord.txt")
+ if not os.path.exists(hostPath):
+ return hostlist
+ try:
+ with open(hostPath, 'r') as d_file:
+ for line in d_file.readlines():
+ json_str = json.loads(line)
+ host_json = ast.literal_eval(json_str)
+ hostId = host_json["host_id"]
+ ip = host_json["ip"]
+ ipv6 = host_json["ipv6"]
+ host = Host(host_id=hostId, ip=ip, ipv6=ipv6)
+ hostlist.append(host.to_dict())
+ except OSError as err:
+ LOGGER.error("OS error: {0}".format(err))
+ return hostlist
+ if len(hostlist) == 0:
+ LOGGER.debug("Hostlist is empty !")
+ else:
+ LOGGER.debug("Hostlist is : {}".format(hostlist))
+ return hostlist
+
+ @staticmethod
+ def get_host_id_by_ip(ip, domainName):
+ """
+ desc: Query hostinfo by host ip
+ """
+ LOGGER.debug("Get hostinfo by ip : {}".format(ip))
+ TARGET_DIR = Format.get_git_dir()
+ hostlist = []
+ domainPath = os.path.join(TARGET_DIR, domainName)
+ hostPath = os.path.join(domainPath, "hostRecord.txt")
+ if not os.path.isfile(hostPath) or os.stat(hostPath).st_size == 0:
+ return hostlist
+ try:
+ with open(hostPath, 'r') as d_file:
+ for line in d_file.readlines():
+ json_str = json.loads(line)
+ host_json = ast.literal_eval(json_str)
+ if host_json["ip"] == ip:
+ return host_json["host_id"]
+ except OSError as err:
+ LOGGER.error("OS error: {0}".format(err))
+
+ @staticmethod
+ def get_manageconf_by_domain(domain):
+ LOGGER.debug("Get managerconf by domain : {}".format(domain))
+ expected_conf_lists = ConfFiles(domain_name=domain, conf_files=[])
+ TARGETDIR = Format.get_git_dir()
+ domainPath = os.path.join(TARGETDIR, domain)
+ from ragdoll.utils.yang_module import YangModule
+ for root, dirs, files in os.walk(domainPath):
+ if len(files) > 0 and len(root.split('/')) > 3:
+ if "hostRecord.txt" in files:
+ continue
+ for d_file in files:
+ d_file_path = os.path.join(root, d_file)
+ contents = Format.get_file_content_by_read(d_file_path)
+ feature = os.path.join(root.split('/')[-1], d_file)
+ yang_modules = YangModule()
+ d_module = yang_modules.getModuleByFeature(feature)
+ file_lists = yang_modules.getFilePathInModdule(yang_modules.module_list)
+ file_path = file_lists.get(d_module.name()).split(":")[-1]
+
+ conf = ConfFile(file_path=file_path, contents=contents)
+ expected_conf_lists.conf_files.append(conf.to_dict())
+
+ LOGGER.debug("Expected_conf_lists is :{}".format(expected_conf_lists))
+ return expected_conf_lists.to_dict()
+
+ @staticmethod
+ def get_realconf_by_domain_and_host(domain, exist_host, access_token):
+ res = []
+ conf_files = Format.get_manageconf_by_domain(domain)
+ # get the real conf in host
+ conf_list = []
+ from ragdoll.utils.conf_tools import ConfTools
+ from ragdoll.utils.object_parse import ObjectParse
+ conf_tools = ConfTools()
+ for d_conf in conf_files.get("conf_files"):
+ file_path = d_conf.get("file_path").split(":")[-1]
+ if file_path not in DIRECTORY_FILE_PATH_LIST:
+ conf_list.append(file_path)
+ else:
+ d_conf_cs = d_conf.get("contents")
+ d_conf_contents = json.loads(d_conf_cs)
+ for d_conf_key, d_conf_value in d_conf_contents.items():
+ conf_list.append(d_conf_key)
+ get_real_conf_body = {}
+ get_real_conf_body_info = []
+ for d_host in exist_host:
+ get_real_conf_body_infos = {}
+ get_real_conf_body_infos["host_id"] = d_host
+ get_real_conf_body_infos["config_list"] = conf_list
+ get_real_conf_body_info.append(get_real_conf_body_infos)
+ get_real_conf_body["infos"] = get_real_conf_body_info
+ url = conf_tools.load_url_by_conf().get("collect_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ try:
+ response = requests.post(url, data=json.dumps(get_real_conf_body), headers=headers) # post request
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ codeNum = 500
+ codeString = "Failed to obtain the actual configuration, please check the interface of config/collect."
+ base_rsp = BaseResponse(codeNum, codeString)
+ return base_rsp, codeNum
+ resp = json.loads(response.text).get("data")
+ resp_code = json.loads(response.text).get("code")
+ if (resp_code != "200") and (resp_code != "206"):
+ return res
+
+ if not resp or len(resp) == 0:
+ return res
+
+ success_lists = {}
+ failed_lists = {}
+
+ for d_res in resp:
+ d_host_id = d_res.get("host_id")
+ fail_files = d_res.get("fail_files")
+ if len(fail_files) > 0:
+ failed_lists["host_id"] = d_host_id
+ failed_lists_conf = []
+ for d_failed in fail_files:
+ failed_lists_conf.append(d_failed)
+ failed_lists["failed_conf"] = failed_lists_conf
+ failed_lists["success_conf"] = []
+ else:
+ success_lists["host_id"] = d_host_id
+ success_lists["success_conf"] = []
+ success_lists["failed_conf"] = []
+
+ read_conf_info = {"domainName": domain, "hostID": d_host_id, "confBaseInfos": []}
+ d_res_infos = d_res.get("infos")
+
+ real_directory_conf = {}
+ real_directory_conf_list = {}
+ object_parse = ObjectParse()
+ for d_file in d_res_infos:
+ content = d_file.get("content")
+ file_path = d_file.get("path")
+ file_atrr = d_file.get("file_attr").get("mode")
+ file_owner = "({}, {})".format(d_file.get("file_attr").get("group"),
+ d_file.get("file_attr").get("owner"))
+ directory_flag = False
+ for dir_path in DIRECTORY_FILE_PATH_LIST:
+ if str(file_path).find(dir_path) != -1:
+ if real_directory_conf.get(dir_path) is None:
+ real_directory_conf_list[dir_path] = list()
+ real_directory_conf[dir_path] = {"filePath": dir_path, "fileAttr": file_atrr,
+ "fileOwner": file_owner, "confContents": ""}
+ directory_conf = dict()
+ directory_conf["path"] = file_path
+ directory_conf["content"] = content
+ real_directory_conf_list.get(dir_path).append(directory_conf)
+ directory_flag = True
+ break
+ if not directory_flag:
+ Format.deal_conf_list_content(content, d_file, file_path, object_parse, read_conf_info)
+ if len(fail_files) > 0:
+ failed_lists.get("success_conf").append(file_path)
+ else:
+ success_lists.get("success_conf").append(file_path)
+
+ for dir_path, dir_value in real_directory_conf_list.items():
+ content_string = object_parse.parse_directory_single_conf_to_json(dir_value,
+ real_directory_conf[
+ dir_path]["filePath"])
+ real_directory_conf[dir_path]["confContents"] = content_string
+ real_conf_base_info = real_directory_conf.get(dir_path)
+
+ read_conf_info.get("confBaseInfos").append(real_conf_base_info)
+ res.append(read_conf_info)
+ return res
+
+ @staticmethod
+ def deal_conf_list_content(content, d_file, file_path, object_parse, read_conf_info):
+ content_string = object_parse.parse_conf_to_json(file_path, content)
+ file_atrr = d_file.get("file_attr").get("mode")
+ file_owner = "({}, {})".format(d_file.get("file_attr").get("group"),
+ d_file.get("file_attr").get("owner"))
+ real_conf_base_info = {"path": file_path, "filePath": file_path, "fileAttr": file_atrr, "fileOwner": file_owner,
+ "confContents": content_string}
+ read_conf_info.get("confBaseInfos").append(real_conf_base_info)
+
+ @staticmethod
+ def check_domain_param(domain):
+ code_num = 200
+ base_resp = None
+ check_res = Format.domainCheck(domain)
+ if not check_res:
+ num = 400
+ base_rsp = BaseResponse(num, "Failed to verify the input parameter, please check the input parameters.")
+ return base_rsp, num
+
+ # check the domian is exist
+ is_exist = Format.isDomainExist(domain)
+ if not is_exist:
+ code_num = 404
+ base_rsp = BaseResponse(code_num, "The current domain does not exist, please create the domain first.")
+ return base_rsp, code_num
+
+ # get the exist result of the host in domain
+ is_host_list_exist = Format.isHostInDomain(domain)
+ if not is_host_list_exist:
+ code_num = 404
+ base_resp = BaseResponse(code_num, "The host information is not set in the current domain." +
+ "Please add the host information first")
+ return base_resp, code_num
+
+ @staticmethod
+ def get_hostid_list_by_domain(domain):
+ host_ids = []
+ res_text = Format.get_hostinfo_by_domain(domain)
+ if len(res_text) == 0:
+ return host_ids
+
+ host_tools = HostTools()
+ host_ids = host_tools.getHostList(res_text)
+ return host_ids
+
+ @staticmethod
+ def get_domain_conf(domain):
+ code_num = 200
+ base_resp = None
+ # get the host info in domain
+ LOGGER.debug("Get the conf by domain: {}.".format(domain))
+ code_string = "get domain confs succeed"
+ host_ids = Format.get_hostid_list_by_domain(domain)
+ if not host_ids:
+ code_num = 404
+ code_string = "The host currently controlled in the domain is empty. Please add host information to the " \
+ "domain. "
+ return code_num, code_string, list()
+
+ # get the managent conf in domain
+ man_conf_res_text = Format.get_manageconf_by_domain(domain)
+ manage_confs = man_conf_res_text.get("conf_files")
+
+ if len(manage_confs) == 0:
+ code_num = 404
+ code_string = "The configuration is not set in the current domain. Please add the configuration " \
+ "information first. "
+ return code_num, code_string, list()
+ return code_num, code_string, manage_confs
+
+ @staticmethod
+ def diff_mangeconf_with_realconf(domain, real_conf_res_text, manage_confs):
+ sync_status = {"domainName": domain, "hostStatus": []}
+
+ from ragdoll.utils.object_parse import ObjectParse
+
+ for d_real_conf in real_conf_res_text:
+ host_id = d_real_conf["hostID"]
+ host_sync_status = {"hostId": host_id, "syncStatus": []}
+ d_real_conf_base = d_real_conf["confBaseInfos"]
+ for d_conf in d_real_conf_base:
+ directory_conf_is_synced = {"file_path": "", "isSynced": "", "singleConf": []}
+ d_conf_path = d_conf["filePath"]
+
+ object_parse = ObjectParse()
+ # get the conf type and model
+ conf_type, conf_model = Format.get_conf_type_model(d_conf_path, object_parse)
+
+ Format.deal_conf_sync_status(conf_model, d_conf, d_conf_path, directory_conf_is_synced,
+ host_sync_status, manage_confs)
+
+ if len(directory_conf_is_synced.get("singleConf")) > 0:
+ synced_flag = SYNCHRONIZED
+ for single_config in directory_conf_is_synced.get("singleConf"):
+ if single_config.get("singleIsSynced") == SYNCHRONIZED:
+ continue
+ else:
+ synced_flag = NOT_SYNCHRONIZE
+ directory_conf_is_synced["isSynced"] = synced_flag
+ host_sync_status.get("syncStatus").append(directory_conf_is_synced)
+ sync_status.get("hostStatus").append(host_sync_status)
+ return sync_status
+
+ @staticmethod
+ def deal_conf_sync_status(conf_model, d_conf, d_conf_path, directory_conf_is_synced, host_sync_status,
+ manage_confs):
+ comp_res = ""
+ if d_conf_path in DIRECTORY_FILE_PATH_LIST:
+ confContents = json.loads(d_conf["confContents"])
+ directory_conf_contents = ""
+ for d_man_conf in manage_confs:
+ d_man_conf_path = d_man_conf.get("file_path")
+ if d_man_conf_path != d_conf_path:
+ continue
+ else:
+ directory_conf_is_synced["file_path"] = d_conf_path
+ directory_conf_contents = d_man_conf.get("contents")
+
+ directory_conf_contents_dict = json.loads(directory_conf_contents)
+
+ for dir_conf_content_key, dir_conf_content_value in directory_conf_contents_dict.items():
+ if dir_conf_content_key not in confContents.keys():
+ single_conf = {"singleFilePath": dir_conf_content_key, "singleIsSynced": NOT_SYNCHRONIZE}
+ directory_conf_is_synced.get("singleConf").append(single_conf)
+ else:
+ dst_conf = confContents.get(dir_conf_content_key)
+ comp_res = conf_model.conf_compare(dir_conf_content_value, dst_conf)
+ single_conf = {"singleFilePath": dir_conf_content_key, "singleIsSynced": comp_res}
+ directory_conf_is_synced.get("singleConf").append(single_conf)
+ else:
+ for d_man_conf in manage_confs:
+ if d_man_conf.get("file_path").split(":")[-1] != d_conf_path:
+ continue
+ comp_res = conf_model.conf_compare(d_man_conf.get("contents"), d_conf["confContents"])
+ conf_is_synced = {"file_path": d_conf_path, "isSynced": comp_res}
+ host_sync_status.get("syncStatus").append(conf_is_synced)
+
+ @staticmethod
+ def convert_real_conf(conf_model, conf_type, conf_info, conf_path, parse):
+ # load yang model info
+ yang_info = parse._yang_modules.getModuleByFilePath(conf_path)
+ conf_model.load_yang_model(yang_info)
+
+ # load conf info
+ if conf_type == "kv":
+ spacer_type = parse._yang_modules.getSpacerInModdule([yang_info])
+ conf_model.read_conf(conf_info, spacer_type, yang_info)
+ else:
+ conf_model.read_conf(conf_info)
+
+ @staticmethod
+ def deal_conf_sync_status_for_db(conf_model, d_conf, d_conf_path, directory_conf_is_synced, host_sync_status,
+ manage_confs):
+ comp_res = ""
+ if d_conf_path in DIRECTORY_FILE_PATH_LIST:
+ confContents = d_conf.get("conf_contents")
+ directory_conf_contents = ""
+ for d_man_conf in manage_confs:
+ d_man_conf_path = d_man_conf.get("file_path")
+ if d_man_conf_path != d_conf_path:
+ continue
+ else:
+ directory_conf_is_synced["file_path"] = d_conf_path
+ directory_conf_contents = d_man_conf.get("contents")
+
+ directory_conf_contents_dict = json.loads(directory_conf_contents)
+
+ for dir_conf_content_key, dir_conf_content_value in directory_conf_contents_dict.items():
+ if dir_conf_content_key not in confContents.keys():
+ single_conf = {"singleFilePath": dir_conf_content_key, "singleIsSynced": NOT_SYNCHRONIZE}
+ directory_conf_is_synced["singleConf"].append(single_conf)
+ else:
+ dst_conf = confContents.get(dir_conf_content_key)
+ comp_res = conf_model.conf_compare(dir_conf_content_value, dst_conf)
+ single_conf = {"singleFilePath": dir_conf_content_key, "singleIsSynced": comp_res}
+ directory_conf_is_synced["singleConf"].append(single_conf)
+ else:
+ for d_man_conf in manage_confs:
+ if d_man_conf.get("file_path").split(":")[-1] != d_conf_path:
+ continue
+ contents = d_man_conf.get("contents")
+ comp_res = conf_model.conf_compare(contents, json.dumps(d_conf.get("conf_contents")))
+ conf_is_synced = {"file_path": d_conf_path, "isSynced": comp_res}
+ host_sync_status["syncStatus"].append(conf_is_synced)
+
+ @staticmethod
+ def get_conf_type_model(d_conf_path, object_parse):
+ for dir_path in DIRECTORY_FILE_PATH_LIST:
+ if str(d_conf_path).find(dir_path) != -1:
+ conf_type = object_parse.get_conf_type_by_conf_path(dir_path)
+ conf_model = object_parse.create_conf_model_by_type(conf_type)
+ else:
+ conf_type = object_parse.get_conf_type_by_conf_path(d_conf_path)
+ conf_model = object_parse.create_conf_model_by_type(conf_type)
+ return conf_type, conf_model
+
+ @staticmethod
+ def deal_sync_res(conf_tools, contents, file_path, host_id, host_sync_result, object_parse, access_token):
+ sync_conf_url = conf_tools.load_url_by_conf().get("sync_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ if file_path in DIRECTORY_FILE_PATH_LIST:
+ conf_sync_res_list = []
+ for directory_file_path, directory_content in json.loads(contents).items():
+ content = object_parse.parse_json_to_conf(directory_file_path, directory_content)
+ # Configuration to the host
+ data = {"host_id": host_id, "file_path": directory_file_path, "content": content}
+ try:
+ sync_response = requests.put(sync_conf_url, data=json.dumps(data), headers=headers)
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ codeNum = 500
+ codeString = "Failed to sync configuration, please check the interface of config/sync."
+ base_rsp = BaseResponse(codeNum, codeString)
+ return base_rsp, codeNum
+ resp_code = json.loads(sync_response.text).get('code')
+ resp = json.loads(sync_response.text).get('data').get('resp')
+
+ if resp_code == "200" and resp.get('sync_result') is True:
+ conf_sync_res_list.append("SUCCESS")
+ else:
+ conf_sync_res_list.append("FAILED")
+ if "FAILED" in conf_sync_res_list:
+ conf_sync_res = ConfSyncedRes(file_path=file_path, result="FAILED")
+ else:
+ conf_sync_res = ConfSyncedRes(file_path=file_path, result="SUCCESS")
+ host_sync_result.sync_result.append(conf_sync_res)
+ else:
+ content = object_parse.parse_json_to_conf(file_path, contents)
+ # Configuration to the host
+ data = {"host_id": host_id, "file_path": file_path, "content": content}
+ sync_response = requests.put(sync_conf_url, data=json.dumps(data), headers=headers)
+
+ resp_code = json.loads(sync_response.text).get('code')
+ resp = json.loads(sync_response.text).get('data').get('resp')
+ conf_sync_res = ConfSyncedRes(file_path=file_path,
+ result="")
+ if resp_code == "200" and resp.get('sync_result') is True:
+ conf_sync_res.result = "SUCCESS"
+ else:
+ conf_sync_res.result = "FAILED"
+ host_sync_result.sync_result.append(conf_sync_res)
+
+ @staticmethod
+ def deal_batch_sync_res(conf_tools, exist_host, file_path_infos, access_token):
+ from ragdoll.utils.object_parse import ObjectParse
+ sync_conf_url = conf_tools.load_url_by_conf().get("batch_sync_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ object_parse = ObjectParse()
+ codeNum = 200
+ codeString = "sync config succeed "
+ # 组装参数
+ sync_config_request = {"host_ids": exist_host, "file_path_infos": list()}
+ for file_path, contents in file_path_infos.items():
+ if file_path in DIRECTORY_FILE_PATH_LIST:
+ for directory_file_path, directory_content in json.loads(contents).items():
+ content = object_parse.parse_json_to_conf(directory_file_path, directory_content)
+ single_file_path_info = {"file_path": directory_file_path, "content": content}
+ sync_config_request["file_path_infos"].append(single_file_path_info)
+ else:
+ content = object_parse.parse_json_to_conf(file_path, contents)
+ single_file_path_info = {"file_path": file_path, "content": content}
+ sync_config_request["file_path_infos"].append(single_file_path_info)
+ # 调用zeus接口
+ try:
+ sync_response = requests.put(sync_conf_url, data=json.dumps(sync_config_request), headers=headers)
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ codeNum = 500
+ codeString = "Failed to sync configuration, please check the interface of config/sync."
+ return codeNum, codeString, []
+ # 处理响应
+ resp_code = json.loads(sync_response.text).get('code')
+ resp = json.loads(sync_response.text).get('data')
+ if resp_code != "200":
+ codeNum = 500
+ codeString = f"Failed to sync configuration, reason is {json.loads(sync_response.text).get('message')}, " \
+ f"please check the interface of config/sync. "
+ return codeNum, codeString, []
+
+ # 重新构建返回值,目录文件返回值重新构造
+ sync_res = []
+ for host_result in resp:
+ syncResult = []
+ conf_sync_res_list = []
+ sync_result_list = host_result.get("syncResult")
+ dir_name = ""
+ for single_result in sync_result_list:
+ dir_name = os.path.dirname(single_result.get("filePath"))
+ if dir_name in DIRECTORY_FILE_PATH_LIST and single_result.get("result") == "SUCCESS":
+ conf_sync_res_list.append("SUCCESS")
+ elif dir_name in DIRECTORY_FILE_PATH_LIST and single_result.get("result") == "FAIL":
+ conf_sync_res_list.append("FAILED")
+ else:
+ syncResult.append(single_result)
+ if conf_sync_res_list:
+ if "FAILED" in conf_sync_res_list:
+ directory_sync_result = {"filePath": dir_name, "result": "FAILED"}
+ else:
+ directory_sync_result = {"filePath": dir_name, "result": "SUCCESS"}
+ syncResult.append(directory_sync_result)
+ single_host_sync_result = {"host_id": host_result.get("host_id"), "syncResult": syncResult}
+ sync_res.append(single_host_sync_result)
+ return codeNum, codeString, sync_res
+
+ @staticmethod
+ def addHostSyncStatus(conf_tools, domain, host_infos):
+ add_host_sync_status_url = conf_tools.load_url_by_conf().get("add_host_sync_status_url")
+ headers = {"Content-Type": "application/json"}
+ # 数据入库
+ try:
+ for host in host_infos:
+ contained_flag = Format.isContainedHostIdInOtherDomain(host.get("hostId"))
+ if contained_flag:
+ continue
+ host_sync_status = {
+ "host_id": host.get("hostId"),
+ "host_ip": host.get("ip"),
+ "domain_name": domain,
+ "sync_status": 0
+ }
+ add_host_sync_status_response = requests.post(add_host_sync_status_url,
+ data=json.dumps(host_sync_status), headers=headers)
+ resp_code = json.loads(add_host_sync_status_response.text).get('code')
+ if resp_code != "200":
+ LOGGER.error(
+ "Failed to add host sync status, please check the interface of /manage/host/sync/status/add.")
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ LOGGER.error("Failed to add host sync status, please check the interface of /manage/host/sync/status/add.")
+
+ @staticmethod
+ def deleteHostSyncStatus(conf_tools, domain, hostInfos):
+ delete_host_sync_status_url = conf_tools.load_url_by_conf().get("delete_host_sync_status_url")
+ headers = {"Content-Type": "application/json"}
+ # 数据入库
+ try:
+ for host in hostInfos:
+ delete_host_sync_status = {
+ "host_id": host.get("hostId"),
+ "domain_name": domain
+ }
+ delete_host_sync_status_response = requests.post(delete_host_sync_status_url,
+ data=json.dumps(delete_host_sync_status),
+ headers=headers)
+ resp_code = json.loads(delete_host_sync_status_response.text).get('code')
+ if resp_code != "200":
+ LOGGER.error(
+ "Failed to delete host sync status, please check the interface of "
+ "/manage/host/sync/status/delete.")
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ LOGGER.error(
+ "Failed to delete host sync status, please check the interface of "
+ "/manage/host/sync/status/delete.")
+
+ @staticmethod
+ def diff_mangeconf_with_realconf_for_db(domain, real_conf_res_text, manage_confs):
+ sync_status = {"domainName": domain, "hostStatus": []}
+ from ragdoll.utils.object_parse import ObjectParse
+
+ for d_real_conf in real_conf_res_text:
+ host_id = d_real_conf.get('host_id')
+ host_sync_status = {"hostId": host_id, "syncStatus": []}
+ d_real_conf_base = d_real_conf.get('conf_base_infos')
+ for d_conf in d_real_conf_base:
+ directory_conf_is_synced = {"file_path": "", "isSynced": "", "singleConf": []}
+ d_conf_path = d_conf.get('file_path')
+ object_parse = ObjectParse()
+ # get the conf type and model
+ conf_type, conf_model = Format.get_conf_type_model(d_conf_path, object_parse)
+ Format.deal_conf_sync_status_for_db(conf_model, d_conf, d_conf_path, directory_conf_is_synced,
+ host_sync_status, manage_confs)
+
+ if len(directory_conf_is_synced["singleConf"]) > 0:
+ synced_flag = SYNCHRONIZED
+ for single_config in directory_conf_is_synced["singleConf"]:
+ if single_config["singleIsSynced"] == SYNCHRONIZED:
+ continue
+ else:
+ synced_flag = NOT_SYNCHRONIZE
+ directory_conf_is_synced["isSynced"] = synced_flag
+ host_sync_status["syncStatus"].append(directory_conf_is_synced)
+ sync_status.get("hostStatus").append(host_sync_status)
+ return sync_status
+
+ @staticmethod
+ def deal_expected_confs_resp(expected_confs_resp_list):
+ """"
+ deal the excepted confs resp.
+
+ Args:
+ expected_confs_resp_list (list): e.g
+ [
+ {
+ "domainName": "xx"
+ "confBaseInfos": [
+ {
+ "filePath": "xx",
+ "expectedContents": "xxx"
+ }
+ ]
+ }
+ ]
+ Returns:
+ dict: e.g
+ {
+ "aops": [
+ {
+ "file_path": "xxx",
+ "contents": "xxx"
+ }
+ ]
+ }
+ """
+ # 处理expected_confs_resp将其处理成[{"file_path": "xxx", "contents": "xxx"}], 每个domain都有一个manage_confs
+ expected_confs_resp_dict = {}
+ for expected_confs_resp in expected_confs_resp_list:
+ manage_confs = []
+ domain_name = expected_confs_resp["domainName"]
+ confBaseInfos = expected_confs_resp["confBaseInfos"]
+ for singleConfBaseInfo in confBaseInfos:
+ file_path = singleConfBaseInfo["filePath"]
+ contents = singleConfBaseInfo["expectedContents"]
+ single_manage_conf = {"file_path": file_path, "contents": contents}
+ manage_confs.append(single_manage_conf)
+ expected_confs_resp_dict[domain_name] = manage_confs
+ return expected_confs_resp_dict
+
+ @staticmethod
+ def deal_domain_result(domain_result):
+ """"
+ deal the domain result.
+
+ Args:
+ domain_result (object): e.g
+ {
+ "aops": {
+ "1": [
+ {
+ "filePath": "xxx",
+ "contents": "xxxx"
+ }
+ ]
+ }
+ }
+ Returns:
+ dict: e.g
+ {
+ "aops": [
+ {
+ "domain_name": "xxx",
+ "host_id": 1,
+ "conf_base_infos": [
+ {
+ "file_path": "xxx",
+ "conf_contents": {} or []
+ }
+ ]
+ }
+ ]
+ }
+ """
+ # 处理domain_result,将其处理成[{"domain_name": "aops","host_id": 7, "conf_base_infos": [{"conf_contents": "xxx", "file_path": "xxx"}]}]
+ from ragdoll.utils.object_parse import ObjectParse
+ real_conf_res_text_dict = {}
+ parse = ObjectParse()
+ for domain, host_infos in domain_result.items():
+ real_conf_res_text_list = []
+ for host_id, confs in host_infos.items():
+ signal_host_infos = {"domain_name": domain, "host_id": int(host_id), "conf_base_infos": []}
+ for conf in confs:
+ conf_path = conf["filePath"]
+ conf_info = conf["contents"]
+ conf_type = parse.get_conf_type_by_conf_path(conf_path)
+ if not conf_type:
+ return
+ # create conf model
+ conf_model = parse.create_conf_model_by_type(conf_type)
+ # 转换解析
+ if conf_path not in DIRECTORY_FILE_PATH_LIST:
+ Format.convert_real_conf(conf_model, conf_type, conf_info, conf_path, parse)
+ else:
+ pam_res_infos = []
+ for path, content in json.loads(conf_info).items():
+ signal_res_info = {"path": path, "content": content}
+ pam_res_infos.append(signal_res_info)
+ Format.convert_real_conf(conf_model, conf_type, pam_res_infos, conf_path, parse)
+ signal_conf = {"file_path": conf["filePath"], "conf_contents": conf_model.conf}
+ signal_host_infos["conf_base_infos"].append(signal_conf)
+ real_conf_res_text_list.append(signal_host_infos)
+ real_conf_res_text_dict[domain] = real_conf_res_text_list
+ return real_conf_res_text_dict
+
+ @staticmethod
+ def add_domain_conf_trace_flag(params, successDomain, tempDomainName):
+ # 对successDomain成功的domain添加文件监控开关、告警开关
+ if len(successDomain) > 0:
+ from vulcanus.database.proxy import RedisProxy
+ # 文件监控开关
+ conf_change_flag = params.get("conf_change_flag")
+ if conf_change_flag:
+ RedisProxy.redis_connect.set(tempDomainName + "_conf_change", 1)
+ else:
+ RedisProxy.redis_connect.set(tempDomainName + "_conf_change", 0)
+ # 告警开关
+ report_flag = params.get("report_flag")
+ if report_flag:
+ RedisProxy.redis_connect.set(tempDomainName + "_report", 1)
+ else:
+ RedisProxy.redis_connect.set(tempDomainName + "_report", 0)
+
+ @staticmethod
+ def uninstall_trace(access_token, host_ids, domain):
+ from ragdoll.utils.conf_tools import ConfTools
+ conf_tools = ConfTools()
+ conf_trace_mgmt_url = conf_tools.load_url_by_conf().get("conf_trace_mgmt_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ # 数据入库
+ try:
+ conf_trace_mgmt_data = {
+ "host_ids": host_ids,
+ "action": "stop",
+ "domain_name": domain
+ }
+ conf_trace_mgmt_response = requests.put(conf_trace_mgmt_url,
+ data=json.dumps(conf_trace_mgmt_data), headers=headers)
+ resp_code = json.loads(conf_trace_mgmt_response.text).get('code')
+ if resp_code != "200":
+ LOGGER.error(
+ "Failed to conf trace mgmt, please check the interface of /conftrace/mgmt.")
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ LOGGER.error(
+ "Failed to conf trace mgmt, please check the interface of /conftrace/mgmt.")
+
+ @staticmethod
+ def clear_all_domain_data(access_token, domainName, successDomain, host_ids):
+ # 删除业务域对successDomain成功的业务域进行redis的key值清理以及domain下的主机进行agith的清理
+ if len(successDomain) > 0:
+ from vulcanus.database.proxy import RedisProxy
+ # 1.清理redis key值
+ RedisProxy.redis_connect.delete(domainName + "_conf_change")
+ RedisProxy.redis_connect.delete(domainName + "_report")
+ # 2.清理数据库数据,以避免再次添加业务域的时候还能看到以往的记录
+ Format.delete_conf_trace_infos(access_token, [], domainName)
+ # 3.清理domain下面的host sync记录
+ Format.delete_host_conf_sync_status(access_token, domainName, host_ids)
+
+ @staticmethod
+ def get_conf_change_flag(domain):
+ from vulcanus.database.proxy import RedisProxy
+ domain_conf_change = RedisProxy.redis_connect.get(domain + "_conf_change")
+ return domain_conf_change
+
+ @staticmethod
+ def install_update_agith(access_token, domain, host_ids):
+ # 针对successHost 添加成功的host, 安装agith并启动agith如果当前业务域有配置配置agith如果没有就不配置
+ install_resp_code = "200"
+ # 获取domain的文件监控开关
+ domain_conf_change = Format.get_conf_change_flag(domain)
+ conf_files_list = Format.get_conf_files_list(domain, access_token)
+ if len(host_ids) > 0 and int(domain_conf_change) == 1 and len(conf_files_list) > 0:
+ # 安装并启动agith
+ from ragdoll.utils.conf_tools import ConfTools
+ conf_tools = ConfTools()
+ conf_trace_mgmt_url = conf_tools.load_url_by_conf().get("conf_trace_mgmt_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ try:
+ conf_trace_mgmt_data = {
+ "domain_name": domain,
+ "host_ids": host_ids,
+ "action": "start",
+ "conf_files": conf_files_list
+ }
+ conf_trace_mgmt_response = requests.put(conf_trace_mgmt_url,
+ data=json.dumps(conf_trace_mgmt_data), headers=headers)
+ install_resp_code = json.loads(conf_trace_mgmt_response.text).get('code')
+ LOGGER.info(f"install_resp_code is {install_resp_code}")
+ if install_resp_code != "200":
+ LOGGER.error(
+ "Failed to conf trace mgmt, please check the interface of /conftrace/mgmt.")
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ LOGGER.error(
+ "Failed to conf trace mgmt, please check the interface of /conftrace/mgmt.")
+
+ # 根据业务有是否有配置有配置并且agith启动成功情况下进行agith的配置
+ conf_files_list = Format.get_conf_files_list(domain, access_token)
+ if len(conf_files_list) > 0 and install_resp_code == "200":
+ Format.update_agith_conf(conf_files_list, conf_trace_mgmt_url, headers, host_ids, domain)
+
+ @staticmethod
+ def uninstall_hosts_agith(access_token, containedInHost, domain):
+ # 根据containedInHost 停止agith服务删除agith删除redis key值
+ if len(containedInHost) > 0:
+ # 1.根据containedInHost 停止agith服务删除agith
+ from vulcanus.database.proxy import RedisProxy
+ Format.uninstall_trace(access_token, containedInHost, domain)
+ # 2.清理数据库数据,以避免再次添加业务域的时候还能看到以往的记录
+ Format.delete_conf_trace_infos(access_token, containedInHost, domain)
+ # 3.清理host sync记录
+ Format.delete_host_conf_sync_status(access_token, domain, containedInHost)
+
+ @staticmethod
+ def delete_conf_trace_infos(access_token, containedInHost, domain):
+ from ragdoll.utils.conf_tools import ConfTools
+ conf_tools = ConfTools()
+ conf_trace_delete_url = conf_tools.load_url_by_conf().get("conf_trace_delete_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ # 数据入库
+ try:
+ conf_trace_delete_data = {
+ "domain_name": domain,
+ "host_ids": containedInHost
+ }
+ conf_trace_delete_response = requests.post(conf_trace_delete_url,
+ data=json.dumps(conf_trace_delete_data), headers=headers)
+ resp_code = json.loads(conf_trace_delete_response.text).get('code')
+ if resp_code != "200":
+ LOGGER.error(
+ "Failed to delete trace info, please check the interface of /conftrace/delete.")
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ LOGGER.error(
+ "Failed to delete trace info, please check the interface of /conftrace/delete.")
+
+ @staticmethod
+ def get_conf_files_list(domain, access_token):
+ conf_files_list = []
+ conf_files = Format.get_manageconf_by_domain(domain).get("conf_files")
+ for conf_file in conf_files:
+ if conf_file.get("file_path") in DIRECTORY_FILE_PATH_LIST:
+ # 获取文件夹下面的配置文件
+ from ragdoll.utils.object_parse import ObjectParse
+ object_parse = ObjectParse()
+ d_conf = {"filePath": conf_file.get("file_path")}
+ host_ids = Format.get_hostid_list_by_domain(domain)
+ if len(host_ids):
+ _, _, file_paths = object_parse.get_directory_files(d_conf, host_ids[0], access_token)
+ if len(file_paths) > 0:
+ conf_files_list.extend(file_paths)
+ else:
+ conf_files_list.append(conf_file.get("file_path"))
+ return conf_files_list
+
+ @staticmethod
+ def update_agith(access_token, conf_files_list, domain):
+ # 根据containedInHost,监控开关 停止agith服务删除agith删除redis key值
+ domain_conf_change_flag = Format.get_conf_change_flag(domain)
+ if int(domain_conf_change_flag) == 1:
+ from ragdoll.utils.conf_tools import ConfTools
+ # 根据containedInHost 停止agith服务删除agith
+ conf_tools = ConfTools()
+ conf_trace_mgmt_url = conf_tools.load_url_by_conf().get("conf_trace_mgmt_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ # 获取domain下的主机id
+ host_ids = Format.get_hostid_list_by_domain(domain)
+ # 数据入库
+ if len(host_ids) > 0:
+ Format.update_agith_conf(conf_files_list, conf_trace_mgmt_url, headers, host_ids, domain)
+
+ @staticmethod
+ def update_agith_conf(conf_files_list, conf_trace_mgmt_url, headers, host_ids, domain_name):
+ try:
+ conf_trace_mgmt_data = {
+ "host_ids": host_ids,
+ "action": "update",
+ "conf_files": conf_files_list,
+ "domain_name": domain_name
+ }
+ conf_trace_mgmt_response = requests.put(conf_trace_mgmt_url,
+ data=json.dumps(conf_trace_mgmt_data), headers=headers)
+ resp_code = json.loads(conf_trace_mgmt_response.text).get('code')
+ if resp_code != "200":
+ LOGGER.error(
+ "Failed to conf trace mgmt, please check the interface of /conftrace/mgmt.")
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ LOGGER.error(
+ "Failed to conf trace mgmt, please check the interface of /conftrace/mgmt.")
+
+ @staticmethod
+ def delete_host_conf_sync_status(access_token, domainName, hostIds):
+ try:
+ from ragdoll.utils.conf_tools import ConfTools
+ conf_tools = ConfTools()
+ delete_all_host_sync_status_url = conf_tools.load_url_by_conf().get("delete_all_host_sync_status_url")
+ headers = {"Content-Type": "application/json", "access_token": access_token}
+ delete_host_conf_sync_status_data = {
+ "host_ids": hostIds,
+ "domain_name": domainName
+ }
+ delete_sync_status_response = requests.post(delete_all_host_sync_status_url,
+ data=json.dumps(delete_host_conf_sync_status_data),
+ headers=headers)
+ resp_code = json.loads(delete_sync_status_response.text).get('code')
+ if resp_code != "200":
+ LOGGER.error(
+ "Failed to delete sync status, please check the interface of /manage/all/host/sync/status/delete.")
+ except requests.exceptions.RequestException as connect_ex:
+ LOGGER.error(f"An error occurred: {connect_ex}")
+ LOGGER.error(
+ "Failed to delete sync status, please check the interface of /manage/all/host/sync/status/delete.")
diff --git a/ragdoll/utils/git_tools.py b/ragdoll/utils/git_tools.py
index 049d450..f6200af 100644
--- a/ragdoll/utils/git_tools.py
+++ b/ragdoll/utils/git_tools.py
@@ -6,8 +6,7 @@ import ast
from ragdoll.const.conf_handler_const import CONFIG
from ragdoll.log.log import LOGGER
-from ragdoll.models.git_log_message import GitLogMessage
-from ragdoll.controllers.format import Format
+from ragdoll.utils.format import Format
class GitTools(object):
@@ -18,7 +17,6 @@ class GitTools(object):
self._target_dir = self.load_git_dir()
def load_git_dir(self):
- cf = configparser.ConfigParser()
cf = configparser.ConfigParser()
if os.path.exists(CONFIG):
cf.read(CONFIG, encoding="utf-8")
@@ -93,16 +91,14 @@ class GitTools(object):
# Execute the shell command and return the execution node and output
def run_shell_return_output(self, shell):
cmd = subprocess.Popen(shell, stdout=subprocess.PIPE, shell=True)
- LOGGER.debug("################# shell cmd ################")
LOGGER.debug("subprocess.Popen({shell}, stdout=subprocess.PIPE, shell=True)".format(shell=shell))
- LOGGER.debug("################# shell cmd end ################")
output, err = cmd.communicate()
return output
def makeGitMessage(self, path, logMessage):
if len(logMessage) == 0:
return "the logMessage is null"
- LOGGER.debug("AAAA path is : {}".format(path))
+ LOGGER.debug("path is : {}".format(path))
cwdDir = os.getcwd()
os.chdir(self._target_dir)
LOGGER.debug(os.getcwd())
@@ -113,26 +109,26 @@ class GitTools(object):
count = logMessage.count("commit")
lines = logMessage.split('\n')
+ LOGGER.debug("count is : {}".format(count))
for index in range(0, count):
- LOGGER.debug("AAAAAAAAAAAAAAA count is : {}".format(index))
- gitMessage = GitLogMessage()
+ gitMessage = {}
for temp in range(0, singleLogLen):
line = lines[index * singleLogLen + temp]
value = line.split(" ", 1)[-1]
if "commit" in line:
- gitMessage.change_id = value
+ gitMessage["changeId"] = value
if "Author" in line:
- gitMessage.author = value
+ gitMessage["author"] = value
if "Date" in line:
- gitMessage._date = value[2:]
- gitMessage.change_reason = lines[index * singleLogLen + 4]
+ gitMessage["date"] = value[2:]
+ gitMessage["changeReason"] = lines[index * singleLogLen + 4]
LOGGER.debug("gitMessage is : {}".format(gitMessage))
gitLogMessageList.append(gitMessage)
LOGGER.debug("################# gitMessage start ################")
if count == 1:
last_message = gitLogMessageList[0]
- last_message.post_value = Format.get_file_content_by_read(path)
+ last_message["postValue"] = Format.get_file_content_by_read(path)
os.chdir(cwdDir)
return gitLogMessageList
@@ -140,13 +136,13 @@ class GitTools(object):
LOGGER.debug("index is : {}".format(index))
message = gitLogMessageList[index]
next_message = gitLogMessageList[index + 1]
- message.post_value = Format.get_file_content_by_read(path)
- shell = ['git checkout {}'.format(next_message.change_id)]
+ message["postValue"] = Format.get_file_content_by_read(path)
+ shell = ['git checkout {}'.format(next_message["changeId"])]
output = self.run_shell_return_output(shell)
- message.pre_value = Format.get_file_content_by_read(path)
+ message["preValue"] = Format.get_file_content_by_read(path)
# the last changlog
first_message = gitLogMessageList[count - 1]
- first_message.post_value = Format.get_file_content_by_read(path)
+ first_message["postValue"] = Format.get_file_content_by_read(path)
LOGGER.debug("################# gitMessage end ################")
os.chdir(cwdDir)
diff --git a/ragdoll/utils/host_tools.py b/ragdoll/utils/host_tools.py
index c467994..f561523 100644
--- a/ragdoll/utils/host_tools.py
+++ b/ragdoll/utils/host_tools.py
@@ -75,11 +75,9 @@ class HostTools(object):
}]
"""
res = []
+ LOGGER.debug("The domainHost is : {}".format(domainHost))
for d_host in domainHost:
hostId = int(d_host.get('host_id'))
- LOGGER.debug("the host Id is : {}".format(hostId))
- d_host = {}
- d_host["hostId"] = hostId
res.append(hostId)
return res
diff --git a/ragdoll/utils/object_parse.py b/ragdoll/utils/object_parse.py
index 6cc4564..719dc68 100644
--- a/ragdoll/utils/object_parse.py
+++ b/ragdoll/utils/object_parse.py
@@ -174,14 +174,14 @@ class ObjectParse(object):
return conf_info
- def get_directory_files(self, d_conf, host_id):
+ def get_directory_files(self, d_conf, host_id, access_token):
file_paths = list()
conf_tools = ConfTools()
file_directory = dict()
- file_directory['file_directory'] = d_conf.file_path
+ file_directory['file_directory'] = d_conf["filePath"]
file_directory['host_id'] = host_id
url = conf_tools.load_url_by_conf().get("object_file_url")
- headers = {"Content-Type": "application/json"}
+ headers = {"Content-Type": "application/json", "access_token": access_token}
try:
response = requests.post(url, data=json.dumps(file_directory), headers=headers)
except requests.exceptions.RequestException as connect_ex:
@@ -191,7 +191,7 @@ class ObjectParse(object):
base_rsp = BaseResponse(codeNum, codeString)
return base_rsp, codeNum
response_code = json.loads(response.text).get("code")
- if response_code == None:
+ if response_code is None:
codeNum = 500
codeString = "Failed to obtain the actual configuration, please check the interface of conf/objectFile."
return codeNum, codeString, file_paths
@@ -207,5 +207,5 @@ class ObjectParse(object):
return codeNum, codeString, file_paths
codeNum = 200
codeString = "Success get pam.d file paths."
- file_paths = file_path_reps.get('resp').get('object_file_paths')
+ file_paths = file_path_reps.get('object_file_paths')
return codeNum, codeString, file_paths
diff --git a/ragdoll/utils/prepare.py b/ragdoll/utils/prepare.py
index 4e61489..132ea7b 100644
--- a/ragdoll/utils/prepare.py
+++ b/ragdoll/utils/prepare.py
@@ -15,7 +15,7 @@ class Prepare(object):
def target_dir(self, target_dir):
self._target_dir = target_dir
- def mdkir_git_warehose(self, username, useremail):
+ def mkdir_git_warehose(self, username, useremail):
res = True
LOGGER.debug("self._target_dir is : {}".format(self._target_dir))
if os.path.exists(self._target_dir):
@@ -26,7 +26,7 @@ class Prepare(object):
git_tools = GitTools(self._target_dir)
mkdir_code = git_tools.run_shell_return_code(cmd1)
git_code = self.git_init(username, useremail)
- if mkdir_code != 0:
+ if mkdir_code != 0 or not git_code:
res = False
return res
diff --git a/ragdoll/utils/yang_module.py b/ragdoll/utils/yang_module.py
index de0d9b5..f0f21ab 100644
--- a/ragdoll/utils/yang_module.py
+++ b/ragdoll/utils/yang_module.py
@@ -1,7 +1,5 @@
import libyang
import os
-import sys
-import importlib
import operator
from ragdoll.log.log import LOGGER
@@ -95,10 +93,6 @@ class YangModule(object):
if files_tail != "yang":
continue
modulePath = os.path.join(self._yang_dir, d_file)
- # grammar_res = self.check_yang_grammar(modulePath)
- # print("grammar_res is : {}".format(grammar_res))
- # if not grammar_res:
- # continue
fo = open(modulePath, 'r+')
module = self._ctx.parse_module_file(fo)
module_list.append(module)
@@ -183,7 +177,6 @@ class YangModule(object):
continue
xpath.append(path)
- # print("xpath is : {}".format(xpath))
return xpath
def getFeatureInModule(self, modules):
@@ -250,9 +243,8 @@ class YangModule(object):
}
"""
res = {}
+ LOGGER.debug("modules are : {}".format(modules))
for d_mod in modules:
- LOGGER.debug("d_mod is : {}".format(d_mod))
- LOGGER.debug("d_mod's type is : {}".format(type(d_mod)))
feature_list = self.getFeatureInModule(d_mod)
module_name = d_mod.name()
xpath = ""
diff --git a/service/gala-ragdoll.service b/service/gala-ragdoll.service
index 0fbaf2e..2dfc4f1 100644
--- a/service/gala-ragdoll.service
+++ b/service/gala-ragdoll.service
@@ -3,8 +3,9 @@ Description=a-ops gala ragdoll service
After=network.target
[Service]
-Type=exec
-ExecStart=/usr/bin/ragdoll
+Type=forking
+ExecStart=/usr/bin/ragdoll start
+ExecStop=/usr/bin/ragdoll stop
Restart=on-failure
RestartSec=1
RemainAfterExit=yes
diff --git a/service/ragdoll b/service/ragdoll
new file mode 100644
index 0000000..92a5b7c
--- /dev/null
+++ b/service/ragdoll
@@ -0,0 +1,15 @@
+#!/bin/bash
+. /usr/bin/aops-vulcanus
+
+MANAGER_CONSTANT="ragdoll"
+MANAGER_CONFIG_FILE=/etc/ragdoll/gala-ragdoll.conf
+
+function main() {
+ if [ "${OPERATION}" = "start" ]; then
+ create_config_file "${MANAGER_CONFIG_FILE}" "ragdoll"
+ fi
+ start_or_stop_service "${MANAGER_CONSTANT}"
+ exit $?
+}
+
+main
diff --git a/service/ragdoll-filetrace b/service/ragdoll-filetrace
new file mode 100755
index 0000000..cf1a0bd
--- /dev/null
+++ b/service/ragdoll-filetrace
@@ -0,0 +1,782 @@
+#!/usr/bin/python3
+from bpfcc import BPF
+import json
+import os
+import requests
+import threading
+import psutil
+import ctypes
+
+import logging
+import logging.handlers
+logger = logging.getLogger(__name__)
+syslog_handler = logging.handlers.SysLogHandler(address='/dev/log')
+syslog_handler.setLevel(logging.DEBUG)
+syslog_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
+logger.addHandler(syslog_handler)
+
+
+bpf_text="""
+//#define randomized_struct_fields_start struct {
+//#define randomized_struct_fields_end };
+
+#include <uapi/linux/bpf.h>
+#include <linux/dcache.h>
+#include <linux/err.h>
+#include <linux/fdtable.h>
+#include <linux/fs.h>
+#include <linux/fs_struct.h>
+#include <linux/path.h>
+#include <linux/sched.h>
+#include <linux/slab.h>
+
+#define MAX_DEPTH 4
+#define MAX_DIRNAME_LEN 16
+#define MAX_FILENAME_LEN 32
+#define MAX_CMD_LEN 32
+#define MAX_TASK_COMM_LEN 32
+
+#define ARGSIZE 16
+#define MAX_ARGS 4
+
+
+struct pinfo_t {
+ char comm[MAX_CMD_LEN];
+ unsigned int ppid;
+ char arg1[ARGSIZE];
+ char arg2[ARGSIZE];
+ char arg3[ARGSIZE];
+ char arg4[ARGSIZE];
+
+};
+
+
+struct event {
+ unsigned int pid;
+ unsigned int ppid;
+ char cmd[16];
+ char pcmd[16];
+ unsigned long i_ino;
+ char filename[MAX_FILENAME_LEN];
+ char dir1[MAX_DIRNAME_LEN];
+ char dir2[MAX_DIRNAME_LEN];
+ char dir3[MAX_DIRNAME_LEN];
+ char dir4[MAX_DIRNAME_LEN];
+
+ char oldfilename[MAX_FILENAME_LEN];
+ char odir1[MAX_DIRNAME_LEN];
+ char odir2[MAX_DIRNAME_LEN];
+ char odir3[MAX_DIRNAME_LEN];
+ char odir4[MAX_DIRNAME_LEN];
+ int flag;
+};
+
+BPF_HASH(exec_map, u32, struct pinfo_t);
+BPF_PERF_OUTPUT(events);
+
+//for rm command
+TRACEPOINT_PROBE(syscalls, sys_enter_unlinkat) {
+ struct task_struct* t;
+ struct task_struct* p;
+
+ struct event e = {};
+ e.flag = 4;
+
+ t = (struct task_struct*)bpf_get_current_task();
+ bpf_probe_read(&e.pid, sizeof(e.pid), &t->tgid);
+ bpf_probe_read(&e.cmd, sizeof(e.cmd), &t->comm);
+
+ bpf_probe_read(&p, sizeof(p), &t->real_parent);
+ bpf_probe_read(&e.ppid, sizeof(e.ppid), &p->tgid);
+ bpf_probe_read(&e.pcmd, sizeof(e.pcmd), &p->comm);
+
+ bpf_probe_read_str((void*)&e.filename, sizeof(e.filename), (const void*)args->pathname);
+ bpf_trace_printk("Process calling sys_enter_rename newfilename:%s \\n", e.filename);
+ events.perf_submit((struct pt_regs *)args, &e, sizeof(e));
+ return 0;
+}
+
+//for copy command
+TRACEPOINT_PROBE(syscalls, sys_enter_copy_file_range) {
+
+ struct task_struct* t;
+ struct task_struct* p;
+ struct files_struct* f;
+ struct fdtable* fdt;
+ struct file** fdd;
+ struct file* file;
+ struct path path;
+ struct dentry* dentry;
+ struct inode* inode;
+ struct qstr pathname;
+ umode_t mode;
+ unsigned long i_ino;
+
+ //char filename[128];
+ struct event e = {};
+ e.flag = 2;
+
+ int fd =args->fd_out;
+ t = (struct task_struct*)bpf_get_current_task();
+ if(t){
+ bpf_probe_read(&f, sizeof(f), &(t->files));
+ bpf_probe_read(&fdt, sizeof(fdt), (void*)&f->fdt);
+ int ret = bpf_probe_read(&fdd, sizeof(fdd), (void*)&fdt->fd);
+ if (ret) {
+ //bpf_trace_printk("bpf_probe_read failed: %d\\n", ret);
+ return 0;
+ }
+ bpf_probe_read(&file, sizeof(file), (void*)&fdd[fd]);
+
+ //file file ppid pcmd
+ bpf_probe_read(&p, sizeof(p), &t->real_parent);
+ bpf_probe_read(&e.ppid, sizeof(e.ppid), &p->tgid);
+ bpf_probe_read(&e.pcmd, sizeof(e.pcmd), &p->comm);
+
+ //fill file ino
+ bpf_probe_read(&inode, sizeof(inode), &file->f_inode);
+ bpf_probe_read(&e.i_ino, sizeof(i_ino), &inode->i_ino);
+ bpf_probe_read(&mode, sizeof(mode), &inode->i_mode);
+ if(!S_ISREG(mode)){
+ return 0;
+ }
+
+ //file process info
+ bpf_probe_read(&e.pid, sizeof(e.pid), &t->tgid);
+ bpf_probe_read(&e.cmd, sizeof(e.cmd), &t->comm);
+
+ //get filename
+ bpf_probe_read(&path, sizeof(path), (const void*)&file->f_path);
+ bpf_probe_read(&dentry, sizeof(dentry), (const void*)&path.dentry);
+ bpf_probe_read(&pathname, sizeof(pathname), (const void*)&dentry->d_name);
+
+ struct dentry* d_parent;
+
+ #pragma unroll
+ for (int i = 0; i < MAX_DEPTH; i++) {
+ bpf_probe_read(&d_parent, sizeof(d_parent), (const void*)&dentry->d_parent);
+ if (d_parent == dentry) {
+ break;
+ }
+ //fix me
+ if(i == 0){
+ bpf_probe_read(&e.dir1, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 1){
+ bpf_probe_read(&e.dir2, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 2){
+ bpf_probe_read(&e.dir3, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 3){
+ bpf_probe_read(&e.dir4, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }
+
+ dentry = d_parent;
+ }
+ bpf_probe_read_str((void*)&e.filename, sizeof(e.filename), (const void*)pathname.name);
+ events.perf_submit((struct pt_regs *)args, &e, sizeof(e));
+ return 0;
+ }
+ return 0;
+}
+//for sed command
+TRACEPOINT_PROBE(syscalls, sys_enter_rename) {
+ struct task_struct* t;
+ struct task_struct* p;
+
+ struct event e = {};
+ e.flag = 1;
+
+ t = (struct task_struct*)bpf_get_current_task();
+ bpf_probe_read(&e.pid, sizeof(e.pid), &t->tgid);
+ bpf_probe_read(&e.cmd, sizeof(e.cmd), &t->comm);
+
+ bpf_probe_read(&p, sizeof(p), &t->real_parent);
+ bpf_probe_read(&e.ppid, sizeof(e.ppid), &p->tgid);
+ bpf_probe_read(&e.pcmd, sizeof(e.pcmd), &p->comm);
+
+ bpf_probe_read_str((void*)&e.filename, sizeof(e.filename), (const void*)args->newname);
+ //bpf_trace_printk("Process calling sys_enter_rename newfilename:%s \\n", e.filename);
+ events.perf_submit((struct pt_regs *)args, &e, sizeof(e));
+ return 0;
+}
+
+TRACEPOINT_PROBE(syscalls, sys_enter_renameat) {
+ char comm[TASK_COMM_LEN];
+ bpf_get_current_comm(&comm, sizeof(comm));
+ bpf_trace_printk("Process %s is calling renameat\\n", comm);
+ return 0;
+}
+
+//for move command
+TRACEPOINT_PROBE(syscalls, sys_enter_renameat2) {
+ struct event e = {};
+ e.flag = 3;
+
+ struct task_struct* t;
+ struct task_struct* p;
+
+ t = (struct task_struct*)bpf_get_current_task();
+ bpf_probe_read(&e.pid, sizeof(e.pid), &t->tgid);
+ bpf_probe_read(&e.cmd, sizeof(e.cmd), &t->comm);
+
+ bpf_probe_read(&p, sizeof(p), &t->real_parent);
+ bpf_probe_read(&e.ppid, sizeof(e.ppid), &p->tgid);
+ bpf_probe_read(&e.pcmd, sizeof(e.pcmd), &p->comm);
+
+ bpf_probe_read_str((void*)&e.filename, sizeof(e.filename), (const void*)args->newname);
+ bpf_probe_read_str((void*)&e.oldfilename, sizeof(e.oldfilename), (const void*)args->oldname);
+
+ struct fs_struct *fs;
+ struct path pwd;
+ bpf_probe_read(&fs, sizeof(fs), (const void*)&t->fs);
+ bpf_probe_read(&pwd, sizeof(pwd), (const void*)&fs->pwd);
+
+ struct dentry* dentry;
+ bpf_probe_read(&dentry, sizeof(dentry), (const void*)&pwd.dentry);
+
+ struct dentry* d_parent;
+
+ int olddfd = args->olddfd;
+ int newdfd = args->newdfd;
+ if (newdfd == AT_FDCWD) {
+ #pragma unroll
+ for (int i = 0; i < MAX_DEPTH; i++) {
+ bpf_probe_read(&d_parent, sizeof(d_parent), (const void*)&dentry->d_parent);
+ if (d_parent == dentry) {
+ break;
+ }
+ //fix me
+ if(i == 0){
+ bpf_probe_read(&e.dir1, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 1){
+ bpf_probe_read(&e.dir2, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 2){
+ bpf_probe_read(&e.dir3, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 3){
+ bpf_probe_read(&e.dir4, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }
+ dentry = d_parent;
+ }
+ bpf_trace_printk("newfilename relative to CWD: %s\\n", e.filename);
+ }
+
+ if (olddfd == AT_FDCWD) {
+ bpf_trace_printk("oldfilename relative to CWD: %s\\n", e.oldfilename);
+ }
+
+ events.perf_submit((struct pt_regs *)args, &e, sizeof(e));
+
+ return 0;
+}
+
+TRACEPOINT_PROBE(syscalls, sys_exit_execve) {
+ u64 tgid_pid;
+ u32 tgid, pid;
+ tgid_pid = bpf_get_current_pid_tgid();
+ tgid = tgid_pid >> 32;
+ pid = (u32)tgid_pid;
+ exec_map.delete(&pid);
+ return 0;
+}
+
+TRACEPOINT_PROBE(syscalls, sys_enter_execve) {
+ //args, args->filename, args->argv, args->envp
+ struct pinfo_t p = {};
+
+ u64 tgid_pid;
+ u32 tgid, pid;
+ tgid_pid = bpf_get_current_pid_tgid();
+ tgid = tgid_pid >> 32;
+ pid = (u32)tgid_pid;
+
+ struct task_struct *t = (struct task_struct *)bpf_get_current_task();
+ struct task_struct *pp;
+ bpf_probe_read(&pp, sizeof(pp), &t->real_parent);
+ bpf_probe_read(&p.ppid, sizeof(p.ppid), &pp->tgid);
+
+ bpf_get_current_comm(&p.comm, sizeof(p.comm));
+
+ //int i;
+ //for (i = 1; i <= MAX_ARGS; i++) {
+ // const char __user *argp;
+ // bpf_probe_read_user(&argp, sizeof(argp), &args->argv[i]);
+ // if (!argp) {
+ // break;
+ // }
+ // if(i == 1){
+ // bpf_probe_read_user_str(&p.arg1, sizeof(p.args1), argp);
+ // }else if(i == 2){
+ // bpf_probe_read_user_str(&p.arg2, sizeof(p.args2), argp);
+ // }else if(i == 3){
+ // bpf_probe_read_user_str(&p.arg3, sizeof(p.args3), argp);
+ // }else if(i == 4){
+ // bpf_probe_read_user_str(&p.arg4, sizeof(p.args4), argp);
+ // }
+ //}
+
+ const char *const * argv = args->argv;
+ if(!argv[1]){
+ return 0;
+ }
+ bpf_probe_read_user(&p.arg1, sizeof(p.arg1), (void *)argv[1]);
+
+ if(!argv[2]){
+ return 0;
+ }
+ bpf_probe_read_user(&p.arg2, sizeof(p.arg2), (void *)argv[2]);
+
+ if(!argv[3]){
+ return 0;
+ }
+ bpf_probe_read_user(&p.arg3, sizeof(p.arg3), (void *)argv[3]);
+
+ if(!argv[4]){
+ return 0;
+ }
+ bpf_probe_read_user(&p.arg4, sizeof(p.arg4), (void *)argv[4]);
+
+ exec_map.update(&pid, &p);
+ return 0;
+}
+
+//for vim echo ...
+TRACEPOINT_PROBE(syscalls, sys_enter_write) {
+ unsigned int fd;
+ struct task_struct* t;
+ struct task_struct* p;
+ struct files_struct* f;
+ struct fdtable* fdt;
+ struct file** fdd;
+ struct file* file;
+ struct path path;
+ struct dentry* dentry;
+ struct inode* inode;
+ struct qstr pathname;
+ umode_t mode;
+ unsigned long i_ino;
+ char filename[128];
+ struct event e = {};
+ e.flag = 0;
+
+
+ pid_t ppid;
+ char pcomm[16];
+
+ fd =args->fd;
+ t = (struct task_struct*)bpf_get_current_task();
+ if(t){
+ bpf_probe_read(&f, sizeof(f), &(t->files));
+ bpf_probe_read(&fdt, sizeof(fdt), (void*)&f->fdt);
+ int ret = bpf_probe_read(&fdd, sizeof(fdd), (void*)&fdt->fd);
+ if (ret) {
+ //bpf_trace_printk("bpf_probe_read failed: %d\\n", ret);
+ return 0;
+ }
+ bpf_probe_read(&file, sizeof(file), (void*)&fdd[fd]);
+
+ //file file ppid pcmd
+ bpf_probe_read(&p, sizeof(p), &t->real_parent);
+ bpf_probe_read(&e.ppid, sizeof(e.ppid), &p->tgid);
+ bpf_probe_read(&e.pcmd, sizeof(e.pcmd), &p->comm);
+
+ //fill file ino
+ bpf_probe_read(&inode, sizeof(inode), &file->f_inode);
+ bpf_probe_read(&e.i_ino, sizeof(i_ino), &inode->i_ino);
+ bpf_probe_read(&mode, sizeof(mode), &inode->i_mode);
+ if(!S_ISREG(mode)){
+ return 0;
+ }
+
+ //file process info
+ bpf_probe_read(&e.pid, sizeof(e.pid), &t->tgid);
+ bpf_probe_read(&e.cmd, sizeof(e.cmd), &t->comm);
+ //bpf_probe_read(&e.pcmd, sizeof(e.pcmd), &p->comm);
+
+ //get filename
+ bpf_probe_read(&path, sizeof(path), (const void*)&file->f_path);
+ bpf_probe_read(&dentry, sizeof(dentry), (const void*)&path.dentry);
+ bpf_probe_read(&pathname, sizeof(pathname), (const void*)&dentry->d_name);
+ //fill name event
+ //bpf_probe_read_str((void*)&e.filename, sizeof(e.filename), (const void*)pathname.name);
+
+ struct dentry* d_parent;
+
+ #pragma unroll
+ for (int i = 0; i < MAX_DEPTH; i++) {
+ bpf_probe_read(&d_parent, sizeof(d_parent), (const void*)&dentry->d_parent);
+ if (d_parent == dentry) {
+ break;
+ }
+ //fix me
+ if(i == 0){
+ bpf_probe_read(&e.dir1, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 1){
+ bpf_probe_read(&e.dir2, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 2){
+ bpf_probe_read(&e.dir3, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }else if(i == 3){
+ bpf_probe_read(&e.dir4, sizeof(d_parent->d_iname), (const void*)&d_parent->d_iname);
+ }
+
+ dentry = d_parent;
+ }
+ bpf_probe_read_str((void*)&e.filename, sizeof(e.filename), (const void*)pathname.name);
+ //bpf_trace_printk("filename parent e.filename: %s\\n", e.filename);
+ events.perf_submit((struct pt_regs *)args, &e, sizeof(e));
+
+ return 0;
+ }
+ return 0;
+}
+"""
+
+
+def get_conf():
+ CONF = "/etc/agith/agith.config"
+ conf_data = {}
+ try:
+ with open(CONF) as user_file:
+ conf_data = json.load(user_file)
+ except FileNotFoundError:
+ print(f"[{CONF}] does not exist!")
+ exit(1)
+ return conf_data
+ #print(json.dumps(conf_data, indent = 4))
+
+def get_ino(filename_list):
+ global logger
+ filenames = filename_list.split(",")
+ ino_name_map = {}
+ for f in filenames:
+ try:
+ stat_info = os.stat(f)
+ i = stat_info.st_ino
+ ino_name_map [str(i)] = f
+ except FileNotFoundError:
+ print(f"File not found: [{f}]")
+ exit(1)
+ except Exception as e:
+ print(f"An error occurred with file {f}: {e}")
+ exit(1)
+ logger.warning("g_map:%s", ino_name_map)
+ return ino_name_map
+
+
+conf_data = get_conf()
+g_map = get_ino(conf_data["Repository"]["conf_list"])
+
+def postdata(data=None):
+ global conf_data
+ global g_map
+ global logger
+ #logger.warning('post data: %s', data)
+ g_map = get_ino(conf_data["Repository"]["conf_list"])
+ try:
+ aops_zeus = conf_data["Repository"]["aops_zeus"]
+ response = requests.post(aops_zeus, json=data, timeout=5)
+ response.raise_for_status()
+ if response.status_code != 200:
+ logger.info('POST request failed:', response.status_code)
+ except requests.exceptions.HTTPError as http_err:
+ logger.error(f"HTTP error occurred: {http_err}")
+ except requests.exceptions.ConnectionError as conn_err:
+ logger.error(f"Connection error occurred: {conn_err}")
+ except requests.exceptions.Timeout as timeout_err:
+ logger.error(f"Timeout error occurred: {timeout_err}")
+ except requests.exceptions.RequestException as req_err:
+ logger.error(f"An error occurred: {req_err}")
+
+def get_oldfile_full_path(e):
+ #dir depth 4
+ filename = ""
+ if e.flag != 3:
+ return filename
+ dir1 = ""
+ dir2 = ""
+ dir3 = ""
+ dir4 = ""
+ try:
+ dir1 = e.odir1.decode('utf-8')
+ dir2 = e.odir2.decode('utf-8')
+ dir3 = e.odir3.decode('utf-8')
+ dir4 = e.odir4.decode('utf-8')
+ except UnicodeDecodeError as ex:
+ print(f"UnicodeDecodeError: {ex}")
+
+ filename = e.oldfilename.decode('utf-8')
+ if not filename:
+ return ""
+
+ if dir1 == "/":
+ filename = dir1 + filename
+ return filename
+
+ if dir2 == "/":
+ filename = dir2 + dir1 + "/" + filename
+ return filename
+
+ if dir3 == "/":
+ filename = dir3 + dir2 + "/" + dir1 + "/" + filename
+ return filename
+
+ if dir4 == "/":
+ filename = dir4 + dir3 + "/" + dir2 + "/" + dir1 + "/" + filename
+ return filename
+ return filename
+
+def get_file_full_path(e):
+ #dir depth 4
+ dir1 = ""
+ dir2 = ""
+ dir3 = ""
+ dir4 = ""
+ try:
+ dir1 = e.dir1.decode('utf-8')
+ except UnicodeDecodeError as ex:
+ print(f"UnicodeDecodeError: {ex}")
+ try:
+ dir2 = e.dir2.decode('utf-8')
+ except UnicodeDecodeError as ex:
+ print(f"UnicodeDecodeError: {ex}")
+ try:
+ dir3 = e.dir3.decode('utf-8')
+ except UnicodeDecodeError as ex:
+ print(f"UnicodeDecodeError: {ex}")
+ try:
+ dir4 = e.dir4.decode('utf-8')
+ except UnicodeDecodeError as ex:
+ print(f"UnicodeDecodeError: {ex}")
+
+ filename = e.filename.decode('utf-8')
+ if not filename:
+ return ""
+ #filename is full path
+ if os.path.exists(filename):
+ return filename
+
+ if dir1 == "/":
+ filename = dir1 + filename
+ return filename
+
+ if dir2 == "/":
+ filename = dir2 + dir1 + "/" + filename
+ return filename
+
+ if dir3 == "/":
+ filename = dir3 + dir2 + "/" + dir1 + "/" + filename
+ return filename
+
+ if dir4 == "/":
+ filename = dir4 + dir3 + "/" + dir2 + "/" + dir1 + "/" + filename
+ return filename
+ return filename
+
+class Data(ctypes.Structure):
+ _fields_ = [
+ ("comm", ctypes.c_char * 64),
+ ("ppid", ctypes.c_uint32),
+ ("arg1", ctypes.c_char * 16),
+ ("arg2", ctypes.c_char * 16),
+ ("arg3", ctypes.c_char * 16),
+ ("arg4", ctypes.c_char * 16),
+ ]
+
+
+def get_process_info_from_map(procid):
+ global exec_map
+ pid = procid
+ try:
+ pid = ctypes.c_int(pid)
+ info = exec_map[pid]
+ #info = ctypes.cast(ctypes.pointer(data), ctypes.POINTER(Data)).contents
+ pid = int(pid.value)
+ if info:
+ #print(f"PID: {pid}, Command: {info.comm}, Args: {info.args.decode('utf-8', 'ignore')}")
+ #cmd = info.comm.decode('utf-8') + " " + info.args.decode('utf-8', 'ignore')
+ #cmd = info.args.decode('utf-8', 'ignore')
+ str1 = info.arg1.decode('utf-8')
+ str2 = info.arg2.decode('utf-8')
+ str3 = info.arg3.decode('utf-8')
+ str4 = info.arg4.decode('utf-8')
+ cmd = str1 + " " + str2 + " " + str3 + " " + str4
+ return {
+ 'pid': pid,
+ 'cmd': cmd,
+ }
+ else:
+ #print(f"No information found for PID {pid}")
+ return {
+ 'pid': pid,
+ 'error': 'error:No such process'
+ }
+ except KeyError:
+ pid = int(pid.value)
+ #print(f"key error for PID {pid}")
+ return {
+ 'pid': pid,
+ 'error': 'error:keyerror.'
+ }
+
+def get_ppid_by_pid(procid):
+ global exec_map
+ pid = procid
+ try:
+ pid = ctypes.c_int(pid)
+ info = exec_map[pid]
+ if info:
+ return info.ppid
+ else:
+ #print("not found,get ppid form local")
+ return get_parent_pid(pid)
+ except KeyError:
+ #print("keyerror ,get ppid form local")
+ return get_parent_pid(pid)
+ return 0
+
+def get_parent_pid(pid):
+ if not isinstance(pid, int):
+ pid = int(pid.value)
+ try:
+ with open(f"/proc/{pid}/status", "r") as f:
+ for line in f:
+ if line.startswith("PPid:"):
+ parent_pid = int(line.split()[1])
+ return parent_pid
+ except FileNotFoundError:
+ print(f"FileNotFoundError:/proc/{pid}/status")
+ return 0
+
+def get_process_info_from_local(pid):
+ try:
+ process = psutil.Process(pid)
+ name = process.name()
+ cmdline = process.cmdline()
+ return {
+ 'pid': pid,
+ 'cmd': name + " " + ''.join(map(str, cmdline)),
+ }
+ except psutil.NoSuchProcess:
+ return {
+ 'pid': pid,
+ 'error': 'error:No such process'
+ }
+ except psutil.AccessDenied:
+ return {
+ 'pid': pid,
+ 'error': 'error:Access denied'
+ }
+
+def make_process_tree(procid):
+ info = []
+ tmp = {}
+ flag = True
+ pid = procid
+ while flag:
+ tmp = get_process_info_from_map(pid)
+ if "error" in tmp:
+ tmp = get_process_info_from_local(pid)
+ if "error" in tmp:
+ break;
+ else:
+ info.append(tmp)
+ else:
+ info.append(tmp)
+
+ ppid = get_ppid_by_pid(pid)
+ if ppid == 0:
+ break;
+ else:
+ pid = ppid
+ return info
+
+def check_filename(newfile=None, oldfile=None):
+ global conf_data
+ conf_file = conf_data["Repository"]["conf_list"]
+ if os.path.isdir(newfile):
+ return False
+
+ newfile = remove_dot_slash(newfile)
+ if newfile and newfile in conf_file:
+ return True
+
+ if oldfile and oldfile in conf_file:
+ return True
+
+ return False
+
+def remove_dot_slash(path):
+ if path.startswith('./'):
+ return path[2:]
+ return path
+
+def get_filename(newfile=None, oldfile=None):
+ global conf_data
+ conf_file = conf_data["Repository"]["conf_list"]
+ if not oldfile:
+ return newfile
+
+ if oldfile in conf_file:
+ return oldfile
+ newfile = remove_dot_slash(newfile)
+ if newfile in conf_file:
+ return newfile
+
+def process_event(cpu, data, size):
+ global b
+ global conf_data
+ global g_map
+ global logger
+ global executor
+
+ e = b["events"].event(data)
+ fname = get_file_full_path(e)
+ oldname = get_oldfile_full_path(e)
+ filename = get_filename(fname, oldname)
+ #print(f'post event filename:{fname} e.pid: {e.pid}')
+ #fixme
+ if check_filename(fname, oldname):
+ #pid = ctypes.c_int(e.pid)
+ #get_process_info_map(pid)
+ aops_zeus = conf_data["Repository"]["aops_zeus"]
+ d = {}
+ d["host_id"] = int(conf_data["Repository"]["host_id"])
+ d["domain_name"] = conf_data["Repository"]["domain_name"]
+ #d["file"] = e.filename.decode('utf-8')
+ d["flag"] = e.flag
+ d["file"] = filename
+ d["syscall"] = "write"
+ d["pid"] = e.pid
+ #d["dir1"] = e.dir1.decode('utf-8')
+ #d["dir2"] = e.dir2.decode('utf-8')
+ #d["dir3"] = e.dir3.decode('utf-8')
+ #d["dir4"] = e.dir4.decode('utf-8')
+ #d["inode"] = e.i_ino
+ d["inode"] = 0
+ d["cmd"] = e.cmd.decode('utf-8')
+ d["ptrace"] = make_process_tree(e.ppid)
+ #tmp = {"pid": e.ppid, "cmd": e.pcmd.decode('utf-8')}
+ #d["ptrace"].append(tmp)
+ print(d)
+ #aops_zeus = conf_data["Repository"]["aops_zeus"]
+ #response = requests.post(aops_zeus, json=d, timeout=5)
+ #if response.status_code != 200:
+ # print('POST request failed:', response.status_code)
+ t = threading.Thread(target=postdata, args=(d,))
+ t.deamon = True
+ t.start()
+
+#load ebpf
+b = BPF(text=bpf_text, cflags=["-Wno-macro-redefined"])
+#exec_map = b["exec_map"]
+exec_map = b.get_table("exec_map")
+
+if __name__ == "__main__":
+
+ #print(json.dumps(conf_data, indent = 4))
+ aops_zeus = conf_data["Repository"]["aops_zeus"]
+ conf_list = conf_data["Repository"]["conf_list"]
+ host_id = conf_data["Repository"]["host_id"]
+ domain_name = conf_data["Repository"]["domain_name"]
+
+ b["events"].open_perf_buffer(process_event)
+ while True:
+ b.perf_buffer_poll()
diff --git a/service/ragdoll-filetrace.service b/service/ragdoll-filetrace.service
new file mode 100644
index 0000000..1ac3366
--- /dev/null
+++ b/service/ragdoll-filetrace.service
@@ -0,0 +1,19 @@
+[Unit]
+Description=ragdoll-filetrace Service
+After=network.target
+StartLimitIntervalSec=30
+
+[Service]
+Type=simple
+ExecStartPre=/usr/bin/test -z "$(pgrep -f /usr/bin/ragdoll-filetrace)"
+ExecStart=/usr/bin/python3 /usr/bin/ragdoll-filetrace
+ExecStop=/bin/bash -c 'kill `pgrep -d " " -f ragdoll-filetrace>/dev/null`'
+Restart=on-failure
+RestartSec=5s
+StartLimitBurst=3
+
+Environment=PYTHONUNBUFFERED=1
+
+[Install]
+WantedBy=multi-user.target
+
diff --git a/setup.py b/setup.py
index d21e2b6..1bd488a 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,5 @@
# coding: utf-8
-import sys
from setuptools import setup, find_packages
NAME = "ragdoll"
@@ -20,16 +19,15 @@ setup(
version=VERSION,
description="Configuration traceability",
author_email="",
- url="",
+ url="https://gitee.com/openeuler/gala-ragdoll",
keywords=["Swagger", "Configuration traceability"],
install_requires=REQUIRES,
packages=find_packages(),
package_data={'': ['swagger/swagger.yaml']},
include_package_data=True,
entry_points={
- 'console_scripts': ['ragdoll=ragdoll.__main__:main']},
+ 'console_scripts': ['ragdoll=ragdoll.manage:main']},
long_description="""\
A
"""
)
-
diff --git a/yang_modules/openEuler-hostname.yang b/yang_modules/openEuler-hostname.yang
index 5b0f2ca..ca39557 100644
--- a/yang_modules/openEuler-hostname.yang
+++ b/yang_modules/openEuler-hostname.yang
@@ -61,7 +61,7 @@ module openEuler-hostname {
description "The file name is hostname";
hostname:path "openEuler:/etc/hostname";
- hostname:type "text";
+ hostname:type "hostname";
hostname:spacer "";
}
}