import copy import os import re import time import flask from myapp.views.baseSQLA import MyappSQLAInterface as SQLAInterface from myapp.utils import core from flask_babel import gettext as __ from flask_babel import lazy_gettext as _ from flask import jsonify, make_response, send_from_directory, send_file import pysnooper from myapp.models.model_job import Pipeline, Workflow from flask_appbuilder.actions import action from myapp.project import push_message from myapp import app, appbuilder, db, event_logger, cache from flask import request from sqlalchemy import or_ from flask import Markup from myapp.utils.py import py_k8s import logging from .baseApi import ( MyappModelRestApi ) from flask import ( abort, flash, g, redirect ) from .base import ( DeleteMixin, MyappFilter, MyappModelView, ) from flask_appbuilder import expose import datetime, json conf = app.config class CRD_Filter(MyappFilter): # @pysnooper.snoop() def apply(self, query, func): user_roles = [role.name.lower() for role in list(self.get_user_roles())] if "admin" in user_roles: return query.order_by(self.model.create_time.desc()) return query.filter( or_( self.model.labels.contains('"%s"' % g.user.username), ) ).order_by(self.model.create_time.desc()) class Crd_ModelView_Base(): list_columns = ['name', 'namespace_url', 'create_time', 'status', 'username', 'stop'] show_columns = ['name', 'namespace', 'create_time', 'status', 'annotations', 'labels', 'spec', 'status_more', 'info_json_html'] order_columns = ['id'] base_permissions = ['can_show', 'can_list', 'can_delete'] # base_permissions = ['list','delete','show'] crd_name = '' base_order = ('create_time', 'desc') base_filters = [["id", CRD_Filter, lambda: []]] # list def base_list(self): k8s_client = py_k8s.K8s() crd_info = conf.get("CRD_INFO", {}).get(self.crd_name, {}) if crd_info: crds = k8s_client.get_crd_all_namespaces(group=crd_info['group'], version=crd_info['version'], plural=crd_info['plural']) # 删除所有,注意最好id从0开始 db.session.query(self.datamodel.obj).delete() # db.engine.execute("alter table %s auto_increment =0"%self.datamodel.pbj.__tablename__) # 添加记录 for crd in crds: try: labels = json.loads(crd['labels']) if 'run-rtx' in labels: crd['username'] = labels['run-rtx'] elif 'pipeline-rtx' in labels: crd['username'] = labels['pipeline-rtx'] except Exception as e: logging.error(e) crd_model = self.datamodel.obj(**crd) db.session.add(crd_model) db.session.commit() # 个性化删除操作 def delete_more(self, item): pass # 基础批量删除 # @pysnooper.snoop() def base_muldelete(self, items): if not items: abort(404) for item in items: self.delete_more(item) if item: try: labels = json.loads(item.labels) if item.labels else {} kubeconfig = None if 'pipeline-id' in labels: pipeline = db.session.query(Pipeline).filter_by(id=int(labels['pipeline-id'])).first() if pipeline: kubeconfig = pipeline.project.cluster.get('KUBECONFIG', '') k8s_client = py_k8s.K8s(kubeconfig) crd_info = conf.get("CRD_INFO", {}).get(self.crd_name, {}) if crd_info: k8s_client.delete_crd(group=crd_info['group'],version=crd_info['version'],plural=crd_info['plural'],namespace=item.namespace,name=item.name) # db_crds = db.session.query(self.datamodel.obj).filter(self.datamodel.obj.name.in_(crd_names)).all() # for db_crd in db_crds: # db_crd.status = 'Deleted' # db.session.commit() item.status = 'Deleted' item.change_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') db.session.commit() push_message(conf.get('ADMIN_USER', '').split(','), 'stop %s %s' % (crd_info['plural'],item.name)) except Exception as e: flash(str(e), "danger") def pre_delete(self, item): self.base_muldelete([item]) @expose("/stop/") def stop(self, crd_id): crd = db.session.query(self.datamodel.obj).filter_by(id=crd_id).first() self.base_muldelete([crd]) flash(__('清理完成'), 'success') self.update_redirect() return redirect(self.get_redirect()) @action("stop_all", "停止", "停止所有选中的workflow?", "fa-trash", single=False) def stop_all(self, items): self.base_muldelete(items) self.update_redirect() return redirect(self.get_redirect()) # @event_logger.log_this # @expose("/list/") # @has_access # def list(self): # self.base_list() # widgets = self._list() # res = self.render_template( # self.list_template, title=self.list_title, widgets=widgets # ) # return res @action("muldelete", "删除", "确定删除所选记录?", "fa-trash", single=False) def muldelete(self, items): self.base_muldelete(items) for item in items: db.session.delete(item) db.session.commit() return json.dumps( { "success": [], "fail": [] }, indent=4, ensure_ascii=False ) class Workflow_Filter(MyappFilter): # @pysnooper.snoop() def apply(self, query, func): user_roles = [role.name.lower() for role in list(self.get_user_roles())] if "admin" in user_roles: return query.order_by(self.model.create_time.desc()) return query.filter( or_( self.model.labels.contains('"%s"' % g.user.username), ) ).order_by(self.model.create_time.desc()) default_status_icon = '' status_icon = { "Running": '', "Error": '', "Failed": '', "Freeze": '', 'Succeeded': '' } default_status_color = "#1C1C1C" status_color = { "Running": "#2acc61", "Error": "#CB1B45", "Failed": "#CB1B45", "Freeze": "#33A6B8", "Succeeded": "#1B813E" } # http://data.tme.woa.com/frontend/commonRelation?backurl=/workflow_modelview/api/web/dag/idc/pipeline/crontab-standalone-train-znvv7 # list正在运行的workflow class Workflow_ModelView_Base(Crd_ModelView_Base): base_filters = [["id", Workflow_Filter, lambda: []]] # 删除之前的 workflow和相关容器 # @pysnooper.snoop() def delete_more(self, workflow): try: k8s_client = py_k8s.K8s(workflow.pipeline.project.cluster.get('KUBECONFIG', '')) k8s_client.delete_workflow( all_crd_info=conf.get("CRD_INFO", {}), namespace=workflow.namespace, run_id=json.loads(workflow.labels).get("run-id", '') ) workflow.status = 'Deleted' workflow.change_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') db.session.commit() except Exception as e: print(e) @event_logger.log_this @expose("/stop/") def stop(self, crd_id): workflow = db.session.query(self.datamodel.obj).filter_by(id=crd_id).first() self.delete_more(workflow) flash(__('清理完成'), 'success') url = conf.get('MODEL_URLS', {}).get('workflow', '') return redirect(url) label_title = _('运行实例') datamodel = SQLAInterface(Workflow) list_columns = ['project', 'pipeline_url', 'cluster', 'create_time', 'change_time', 'elapsed_time', 'final_status', 'status', 'username', 'log', 'stop'] search_columns = ['status', 'labels', 'name', 'cluster', 'annotations', 'spec', 'status_more', 'username', 'create_time'] cols_width = { "project": {"type": "ellip2", "width": 100}, "pipeline_url": {"type": "ellip2", "width": 300}, "create_time": {"type": "ellip2", "width": 200}, "change_time": {"type": "ellip2", "width": 200}, "final_status": {"type": "ellip1", "width": 250}, "elapsed_time":{"type": "ellip1", "width": 150}, } spec_label_columns = { "final_status": _('删除前状态') } show_columns = ['name', 'namespace', 'create_time', 'status', 'task_status', 'annotations_html', 'labels_html', 'spec_html', 'status_more_html', 'info_json_html'] crd_name = 'workflow' def get_dag(self, cluster_name, namespace, workflow_name, node_name=''): k8s_client = py_k8s.K8s(conf.get('CLUSTERS', {}).get(cluster_name, {}).get('KUBECONFIG', '')) crd_info = conf.get('CRD_INFO', {}).get('workflow', {}) try_num=3 workflow_obj=None # 尝试三次查询 while not workflow_obj and try_num>0: workflow_obj = k8s_client.get_one_crd(group=crd_info['group'], version=crd_info['version'], plural=crd_info['plural'], namespace=namespace, name=workflow_name) workflow_model = db.session.query(Workflow).filter_by(name=workflow_name).first() if not workflow_obj: if workflow_model: workflow_obj = workflow_model.to_json() try_num-=1 if not workflow_obj: time.sleep(2) # 没有查询到就返回空 if not workflow_obj: return {}, {}, {}, None # print(workflow_obj) labels = json.loads(workflow_obj.get('labels', "{}")) spec = json.loads(workflow_obj.get('spec', '{}')) nodes_spec = {} for node in spec['templates']: if node['name'] != spec['entrypoint']: nodes_spec[node['name']] = node annotations = json.loads(workflow_obj.get('annotations', '{}')) status_more = json.loads(workflow_obj.get('status_more', '{}')) layout_config = {} dag_config = [] self.node_detail_config = {} layout_config["create_time"] = workflow_obj['create_time'] layout_config['search'] = '' layout_config["status"] = workflow_obj['status'] layout_config.update(labels) layout_config['progress'] = status_more.get('progress', '0/0') layout_config["start_time"] = k8s_client.to_local_time(status_more.get('startedAt','')) layout_config['finish_time'] = k8s_client.to_local_time(status_more.get('finishedAt','')) layout_config['crd_json'] = { "apiVersion": "argoproj.io/v1alpha1", "kind": "Workflow", "metadata": { "annotations": core.decode_unicode_escape(annotations), "name": workflow_name, "labels": labels, "namespace": namespace }, "spec": spec, "status": status_more } if int(layout_config.get("pipeline-id", '0')): pipeline = db.session.query(Pipeline).filter_by(id=int(layout_config.get("pipeline-id", '0'))).first() if pipeline: layout_config['pipeline-name'] = pipeline.name layout_config['pipeline-describe'] = pipeline.describe dag_default_status_icon = '' dag_status_icon = { "Running": '', "Error": '', "Failed": '', 'Succeeded': '' } layout_config['icon'] = dag_status_icon.get(workflow_obj['status'], dag_default_status_icon) layout_config['title'] = workflow_name layout_config['right_button'] = [] if workflow_model: layout_config["right_button"].append( { "label": "终止", "url": f"/workflow_modelview/api/stop/{workflow_model.id}" } ) layout_config['right_button'].append( { "label": __("任务流"), "url": f'/pipeline_modelview/api/web/{pipeline.id}' } ) layout_config['detail'] = [ [ { "name": "cluster", "label": __("集群"), "value": cluster_name }, # { # "name": "describe", # "label": "描述", # "value": pipeline.describe # }, { "name": "id", "label": "id", "value": f'{pipeline.id}({pipeline.describe})' }, { "name": "status", "label": __("状态"), "value": workflow_obj['status'] }, { "name": "message", "label": __("消息"), "value": status_more.get('message', '') }, ], [ { "name": "create_time", "label": __("创建时间"), "value": workflow_obj['create_time'] }, { "name": "start_time", "label": __("开始时间"), "value": k8s_client.to_local_time(status_more['startedAt']) if 'startedAt' in status_more else '' }, { "name": "finish_time", "label": __("结束时间"), "value": k8s_client.to_local_time(status_more['finishedAt']) if 'finishedAt' in status_more else '' }, { "name": "run_id", "label": "run-id", "value": labels.get("run-id", '') } ], [ { "name": "created_by", "label": __("创建人"), "value": pipeline.created_by.username }, { "name": "run_user", "label": __("执行人"), "value": labels.get("run-rtx", '') }, { "name": "progress", "label": __("进度"), "value": status_more.get('progress', '0/0') }, { "name": "schedule_type", "label": __("调度类型"), "value": labels.get("schedule_type", 'once') }, ], ] templates = {} for node in spec['templates']: templates[node['name']] = node # @pysnooper.snoop() def fill_child(self, dag, upstream_node_name): try: childs = status_more['nodes'][upstream_node_name].get('children', []) for child in childs: try: # pod_name = child # 这里不对,这里是workflow 名后随机生成 status = status_more['nodes'][child].get('phase', 'unknown') task_name = status_more['nodes'][child]['templateName'] pod_name = workflow_name + "-" + task_name + "-" + child.replace(workflow_name, '').strip('-') s3_key = '' metric_key = '' output_key = '' artifacts = status_more['nodes'][child].get('outputs', {}).get('artifacts', []) for artifact in artifacts: if artifact['name'] == 'main-logs': s3_key = artifact.get('s3', {}).get('key', '') if artifact['name'] == 'metric': metric_key = artifact.get('s3', {}).get('key', '') if artifact['name'] == 'output': output_key = artifact.get('s3', {}).get('key', '') retry = nodes_spec[task_name].get('retryStrategy', {}).get("limit", 0) # 对于可重试节点的发起节点,没有日志和执行命令, displayName = status_more['nodes'][child].get('displayName', '') displayName = displayName.replace("(0)", '(first)') match = re.findall("(\([1-9]+\))", displayName) if len(match) > 0: retry_index = match[0].replace("(", '').replace(")", '') displayName = displayName.replace(match[0], __('(第%s次重试)')%retry_index) title = nodes_spec[task_name].get('metadata', {}).get("annotations", {}).get("task", pod_name)+f"({displayName})" node_type = status_more['nodes'][child]['type'] if node_type == "Retry": title += __("(有%s次重试机会)")%retry if node_type == 'Skipped': title += "(skip)" nodeSelector = nodes_spec[task_name].get('nodeSelector', {}) node_selector = '' for key in nodeSelector: node_selector += key + "=" + nodeSelector[key] + "," node_selector = node_selector.strip(',') requests_resource = nodes_spec[task_name].get('container', {}).get("resources", {}).get("requests", {}) resource_gpu = "0" for resource_name in list(conf.get('GPU_RESOURCE',{}).values()): if resource_name in requests_resource: resource_gpu = str(requests_resource.get(resource_name,"0")) break ui_node = { "node_type": node_type, "nid": status_more['nodes'][child]['id'], "pid": status_more['nodes'][upstream_node_name]['id'], "title": title, "pod": pod_name, "start_time": k8s_client.to_local_time(status_more['nodes'][child].get('startedAt','')), "finish_time": k8s_client.to_local_time(status_more['nodes'][child].get('finishedAt','')), "detail_url": self.route_base + f"/web/node_detail/{cluster_name}/{namespace}/{workflow_name}/{child}", "name": pod_name, "outputs": status_more['nodes'][child].get('outputs', {}), # "icon": '', "icon": status_icon.get(status, default_status_icon), "status": { "label": status, "icon": status_icon.get(status, default_status_icon) }, "message": status_more['nodes'][child].get('message', ''), "node_shape": "rectangle", "color": status_color.get(status, default_status_color), "task_name": task_name, "task_id": nodes_spec[task_name].get('metadata', {}).get("labels", {}).get("task-id", ''), "task_label": nodes_spec[task_name].get('metadata', {}).get("annotations", {}).get("task", ''), "volumeMounts": nodes_spec[task_name].get('container', {}).get("volumeMounts", []), "volumes": nodes_spec[task_name].get('volumes', []), "node_selector": node_selector, "s3_key": s3_key, "metric_key": metric_key, "output_key":output_key, "retry": retry, "resource_cpu": str(nodes_spec[task_name].get('container', {}).get("resources", {}).get("requests", {}).get("cpu", '0')), "resource_memory": str(nodes_spec[task_name].get('container', {}).get("resources", {}).get("requests", {}).get("memory", '0')), "resource_gpu": resource_gpu, "children": [] } if node_name == child and not self.node_detail_config: self.node_detail_config = ui_node fill_child(self, ui_node['children'], child) dag.append(ui_node) except Exception as e: print(e) except Exception as e: print(e) fill_child(self, dag_config, workflow_name) return layout_config, dag_config, self.node_detail_config, workflow_obj @expose("/web/log////", methods=["GET", ]) @expose("/web/log_node////", methods=["GET", ]) @expose("/web/log/////", methods=["GET", ]) def log_node(self, cluster_name, namespace, workflow_name, pod_name,file_name='main.log'): log = self.get_minio_content(f'{workflow_name}/{pod_name}/{file_name}') if '/web/log/' in request.path: from wtforms.widgets.core import HTMLString, html_params return Markup("
%s
"%log) return jsonify({ "status": 0, "message": "", "result": { "type": "html", "value": Markup(log) } }) # @pysnooper.snoop(watch_explode=()) def get_minio_content(self, key, decompress=True, download=False): if request.host=='127.0.0.1': return content = '' from minio import Minio try: minioClient = Minio( endpoint=conf.get('MINIO_HOST','minio.kubeflow:9000'), # minio.kubeflow:9000 '9.135.92.226:9944' access_key='minio', secret_key='minio123', secure=False ) if download: save_path = "/mlpipeline/" + key os.makedirs(os.path.dirname(save_path), exist_ok=True) minioClient.fget_object('mlpipeline', key, save_path) response = make_response(send_from_directory(os.path.dirname(save_path), os.path.basename(save_path), as_attachment=True,conditional=True)) return response response = None try: response = minioClient.get_object('mlpipeline', key) content = response.data except Exception as e: content = str(e) print(e) return content finally: if response: response.close() response.release_conn() except Exception as e: print(e) return str(e) if decompress: if '.zip' in key: import zlib content = zlib.decompress(content) if '.tgz' in key: path = 'minio/' + key if os.path.exists(path): os.remove(path) os.makedirs(os.path.dirname(path), exist_ok=True) file = open(path,mode='wb') file.write(content) file.close() import tarfile # 打开tgz文件 with tarfile.open(path, 'r:gz') as tar: # 解压所有文件到指定目录 tar.extractall(os.path.dirname(path)) files = os.listdir(os.path.dirname(path)) content = '' for file in files: path = os.path.join(os.path.dirname(path),file) if os.path.isfile(path) and path[path.rindex('.'):] in ['.txt','.json','.log','.csv']: content = ''.join(open(os.path.join(os.path.dirname(path),file)).readlines()) content += '\n' # print(key[key.rindex('.'):]) if key[key.rindex('.'):] in ['.txt','.json','.log','.csv']: if type(content)==bytes: content = content.decode() # 删除 ANSI 转义序列 ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])') content = ansi_escape.sub('', content) return content @expose("/web/node_detail////", methods=["GET", ]) def web_node_detail(self,cluster_name,namespace,workflow_name,node_name): layout_config, dag_config,node_detail_config,workflow = self.get_dag(cluster_name, namespace, workflow_name,node_name) # print(node_detail_config) if not node_detail_config: return jsonify({}) del node_detail_config['children'] pod_name = node_detail_config["pod"] # 获取pod信息 k8s_client = py_k8s.K8s(conf.get('CLUSTERS', {}).get(cluster_name, {}).get('KUBECONFIG', '')) pod_yaml = __('pod未发现') pod_status = '' pod = None try: # from kubernetes.client import V1Pod pod = k8s_client.get_pod_humanized(namespace=namespace, pod_name=pod_name) if pod: pod_status = pod.get("status", {}).get('phase', 'Unknown') # 去除一些比好看的,没必要的信息 for key in copy.deepcopy(pod['metadata'].get('annotations',{})): if 'cni' in key or 'kubectl' in key: del pod['metadata']['annotations'][key] for key in ['creationTimestamp','resourceVersion','uid']: if key in pod['metadata']: del pod['metadata'][key] for key in ['initContainers','enableServiceLinks','dnsPolicy','tolerations','terminationGracePeriodSeconds']: if key in pod['spec']: del pod['spec'][key] volumes = [] for index,volume in enumerate(pod['spec'].get('volumes',[])): if 'my-minio-cred' in str(volume) or 'kube-api-access' in str(volume): pass else: volumes.append(volume) pod['spec']['volumes']=volumes if 'status' in pod: del pod['status'] containers = [] for container in pod['spec']['containers']: if 'emissary' in str(container['command']): container_temp = copy.deepcopy(container) envs = [] for env in container_temp.get('env',[]): if 'ARGO_' not in str(env): envs.append(env) container_temp['env']=envs containers.append(container_temp) pod['spec']['containers'] = containers pod_yaml = json.dumps(pod, indent=4, ensure_ascii=False, default=str) # import yaml # pod_yaml = yaml.safe_dump(yaml.load(pod_yaml, Loader=yaml.SafeLoader), default_flow_style=False, indent=4) # # print(pod) except Exception as e: print(e) host_url = "http://" + conf.get("CLUSTERS", {}).get(cluster_name, {}).get("HOST", request.host) online_pod_log_url = "/k8s/web/log/%s/%s/%s/main" % (cluster_name, namespace, pod_name) offline_pod_log_url = f'/workflow_modelview/api/web/log/{cluster_name}/{namespace}/{workflow_name}/{pod_name}/main.log' offline_pod_metric_url = f'/workflow_modelview/api/web/log/{cluster_name}/{namespace}/{workflow_name}/{pod_name}/metric.tgz' debug_online_url = "/k8s/web/debug/%s/%s/%s/main" % (cluster_name, namespace, pod_name) grafana_pod_url = host_url+conf.get('GRAFANA_TASK_PATH','/grafana/d/pod-info/pod-info?var-pod=')+pod_name labels = json.loads(workflow.get('labels', "{}")) pipeline_name = labels.get('pipeline-name', workflow_name) bind_pod_url = f'/k8s/web/search/{cluster_name}/{namespace}/{pipeline_name}' echart_option = '' metric_content = '' try: if node_detail_config['metric_key']: metric_content = self.get_minio_content(node_detail_config['metric_key'],decompress=True) # print(metric_content) metric_content = metric_content except Exception as e: print(e) message = node_detail_config.get('message', '') node_type = node_detail_config.get('node_type', '') volumes = {} for vol in node_detail_config['volumes']: if vol.get("persistentVolumeClaim", {}).get("claimName", ''): volumes[vol['name']] = vol.get("persistentVolumeClaim", {}).get("claimName", '') + "(pvc)" if vol.get("hostPath", {}).get("path", ''): volumes[vol['name']] = vol.get("hostPath", {}).get("path", '') + "(hostpath)" if vol.get("emptyDir", {}).get("medium", ''): volumes[vol['name']] = vol.get("emptyDir", {}).get("medium", '') tab1 = [ { "tabName": __("输入输出"), "content": [ { "groupName": __("消息"), "groupContent": { "label": __('消息'), "value": message if message else __('运行正常'), "type": 'html' } }, { "groupName": __("任务详情"), "groupContent": { "label": __('任务详情'), "value": { "task id": node_detail_config['task_id'], "task name": node_detail_config['task_name'], "task label": node_detail_config['task_label'], "task start": node_detail_config['start_time'], "task finish": node_detail_config['finish_time'], "pod name": node_detail_config['pod'], "log path": node_detail_config['s3_key'], "metric path": node_detail_config['metric_key'], "retry": node_detail_config['retry'], "node selector": node_detail_config['node_selector'], "resource cpu": node_detail_config['resource_cpu'], "resource memory": node_detail_config['resource_memory'], "resource gpu": node_detail_config['resource_gpu'] }, "type": 'map' } }, { "groupName": __("挂载详情"), "groupContent": { "label": __('挂载详情'), "value": dict( [[__('容器路径'), __('主机路径')]] + [[item['mountPath'], volumes.get(item['name'], '')] for item in node_detail_config['volumeMounts']]), "type": 'map' } }, ], "bottomButton": [] } ] if node_type == 'Pod': tab1[0]['bottomButton'] = [ { # "icon": '', "text": __("在线日志") + ("" if pod else '(no)'), "url": online_pod_log_url }, { # "icon": '', "text": __("在线调试") + ("" if pod_status == 'Running' else '(no)'), "url": debug_online_url }, { # "icon": '', "text": __("相关容器") + ("" if pod else '(no)'), "url": bind_pod_url }, { # "icon": '', "text": __("资源使用"), "url": grafana_pod_url }, { # "icon": '', "text": __("离线日志") + ("" if node_type == 'Pod' and pod_status != 'Running' and pod_status!='Pending' else '(no)'), "url": offline_pod_log_url } ] tab2 = [ { "tabName": __("pod信息"), "content": [ { "groupName": __("pod信息"), "groupContent": { "value": Markup(pod_yaml), "type": 'text' } } ], "bottomButton": [] } ] tab3 = [ { "tabName": __("在线日志"), "content": [ { "groupName": __("在线日志"), "groupContent": { "value": f'/k8s/web/log/{cluster_name}/{namespace}/{pod_name}/main' if pod_status == 'Running' else __("pod未发现"), "type": 'iframe' if pod_status == 'Running' else "html" } } ], "bottomButton": [] } ] tab4 = [ { "tabName": __("在线调试"), "content": [ { "groupName": __("在线调试"), "groupContent": { "value": f'/k8s/web/exec/{cluster_name}/{namespace}/{pod_name}/main' if pod_status == 'Running' else __("pod已停止运行"), "type": 'iframe' if pod_status == 'Running' else 'html' } } ], "bottomButton": [] } ] tab5 = [ { "tabName": __("相关容器"), "content": [ { "groupName": __("相关容器"), "groupContent": { "value": { "url": host_url+conf.get('K8S_DASHBOARD_CLUSTER','/k8s/dashboard/cluster/')+f"#/search?namespace={namespace}&q={pod_name}", "target": "div.kd-chrome-container.kd-bg-background", } if pod_status else __("pod未发现"), "type": 'iframe' if pod_status else "html" } } ], "bottomButton": [] } ] tab6 = [ { "tabName": __("资源使用情况"), "content": [ { "groupName": __("资源使用情况"), "groupContent": { "value": { "url": host_url + conf.get('GRAFANA_TASK_PATH', '/grafana/d/pod-info/pod-info?var-pod=') + pod_name, }, "type": 'iframe' } } ], "bottomButton": [] } ] tab7 = [ { "tabName": __("结果可视化"), "content": [ { "groupName": "", "groupContent": { "value": Markup("提示:仅企业版支持任务结果、模型指标、数据集可视化预览"), # options的值 "type": 'html' } }, ], "bottomButton": [] }, ] if not metric_content: echart_demos_file = os.listdir('myapp/utils/echart/') for file in echart_demos_file: # print(file) file_path = os.path.join('myapp/utils/echart/',file) can = ['area-stack.json', 'rose.json', 'mix-line-bar.json', 'pie-nest.json', 'bar-stack.json', 'candlestick-simple.json', 'graph-simple.json', 'tree-polyline.json', 'sankey-simple.json', 'radar.json', 'sunburst-visualMap.json', 'parallel-aqi.json', 'funnel.json', 'sunburst-visualMap.json', 'scatter-effect.json'] not_can = ['bar3d-punch-card.json', 'simple-surface.json']# 不行的。 # if '.json' in file and file in []: if '.json' in file and file in can: echart_option = ''.join(open(file_path).readlines()) # print(echart_option) tab7[0]['content'].append( { "groupName": __("任务结果示例:")+file.replace('.json','')+__("类型图表"), "groupContent": { "value": echart_option, # options的值 "type": 'echart' } } ) tab8 = [ { "tabName": __("workflow信息"), "content": [ { "groupName": __("json信息"), "groupContent": { "value": Markup(json.dumps(layout_config.get("crd_json",{}),indent=4,ensure_ascii=False)), "type": 'text' } } ], "bottomButton": [] } ] node_detail = { "detail": tab1 + tab2 + tab7 + tab8, "control": { "width": "700px" } } return jsonify({ "status": 0, "message": "success", "result": node_detail } ) @expose("/web/dag///", methods=["GET", ]) # @pysnooper.snoop() def web_dag(self, cluster_name, namespace, workflow_name): layout_config, dag_config, node_detail_config, workflow = self.get_dag(cluster_name, namespace, workflow_name) back = { "control": { "node_ops": ["detail", "explore"], # 节点可进行的操作 详情查看/节点上下链路探索,以后才有功能再加 "direction": "vertical", # 或者 vertical horizontal, }, "dag": dag_config, "layout": layout_config } return jsonify( { "status": 0, "message": "success", "result": back } ) @expose("/web/layout///", methods=["GET", ]) def web_layout(self, cluster_name, namespace, workflow_name): layout_config, dag_config, node_detail_config, workflow = self.get_dag(cluster_name, namespace, workflow_name) layout_config['title'] = f"{cluster_name} {namespace} {workflow_name} {layout_config['start_time']} {layout_config['finish_time']}" return jsonify( { "status": 0, "message": "success", "result": layout_config } ) class Workflow_ModelView(Workflow_ModelView_Base, MyappModelView, DeleteMixin): datamodel = SQLAInterface(Workflow) appbuilder.add_view_no_menu(Workflow_ModelView) # 添加api class Workflow_ModelView_Api(Workflow_ModelView_Base, MyappModelRestApi): datamodel = SQLAInterface(Workflow) route_base = '/workflow_modelview/api' appbuilder.add_api(Workflow_ModelView_Api)