初始化脚本目录变更

This commit is contained in:
data-infra 2023-12-11 10:00:49 +08:00
parent 1047044b21
commit fdd2739c66
10 changed files with 0 additions and 12790 deletions

File diff suppressed because it is too large Load Diff

View File

@ -1,29 +0,0 @@
[
{
"job_type": "Job",
"project": "public",
"name": "test",
"namespace": "automl",
"describe": "nni功能测试",
"parallel_trial_count": 3,
"max_trial_count": 12,
"objective_type": "maximize",
"objective_goal": 0.99,
"objective_metric_name": "accuracy",
"algorithm_name": "Random",
"parameters": {
"batch_size": {"_type":"choice", "_value": [16, 32, 64, 128]},
"momentum":{"_type":"uniform","_value":[0, 1]}
},
"job_json": {
},
"job_worker_image": "ccr.ccs.tencentyun.com/cube-studio/nni:20230601",
"working_dir": "/mnt/admin/pipeline/example/nni/",
"job_worker_command": "python demo.py",
"resource_memory": "1G",
"resource_cpu": "1",
"resource_gpu": "0"
}
]

View File

@ -1,9 +0,0 @@
name,label,describe,source_type,source,industry,field,usage,research,storage_class,file_type,status,years,url,path,download_url,storage_size,entries_num,duration,price,status,icon,owner
MNIST,手写数字数据集,"包含一组60,000个示例的训练集和一组10,000个示例的测试集。数字已经过尺寸标准化以适合 20x20 像素框,同时保持其纵横比,并在固定尺寸的图像中居中",开源,github,图像处理,视觉,传统机器学习和深度学习入门,svm、分类,压缩,gz,正常,,http://yann.lecun.com/exdb/mnist/,,"http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz
http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz
http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz
http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz",11M," 60,000 个示例的训练集和 10,000 个示例的测试集",,0,正常,/static/assets/images/dataset/mnist.png,admin
Fashion-MNIST,时尚产品数据,"包含60,000个训练图像和10,000个测试图像。类似MNIST的时尚产品数据库。",开源,github,图像处理,视觉,传统机器学习和深度学习入门,图像分类,压缩,gz,正常,,https://github.com/zalandoresearch/fashion-mnist,,"http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz
http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz
http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz
http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz",5M,"60,000个训练图像和10,000个测试图像",,0,正常,/static/assets/images/dataset/fashion-mnist.png,admin
1 name label describe source_type source industry field usage research storage_class file_type status years url path download_url storage_size entries_num duration price status icon owner
2 MNIST 手写数字数据集 包含一组60,000个示例的训练集和一组10,000个示例的测试集。数字已经过尺寸标准化,以适合 20x20 像素框,同时保持其纵横比,并在固定尺寸的图像中居中 开源 github 图像处理 视觉 传统机器学习和深度学习入门 svm、分类 压缩 gz 正常 http://yann.lecun.com/exdb/mnist/ http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz 11M 60,000 个示例的训练集和 10,000 个示例的测试集 0 正常 /static/assets/images/dataset/mnist.png admin
3 Fashion-MNIST 时尚产品数据 包含60,000个训练图像和10,000个测试图像。类似MNIST的时尚产品数据库。 开源 github 图像处理 视觉 传统机器学习和深度学习入门 图像分类 压缩 gz 正常 https://github.com/zalandoresearch/fashion-mnist http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz 5M 60,000个训练图像和10,000个测试图像 0 正常 /static/assets/images/dataset/fashion-mnist.png admin

File diff suppressed because it is too large Load Diff

View File

@ -1,93 +0,0 @@
{
"tf-mnist": {
"project_name": "public",
"service_name": "mnist-202208011",
"model_name": "mnist",
"service_describe": "tf 图像分类",
"image_name": "ccr.ccs.tencentyun.com/cube-studio/tfserving:2.3.4",
"model_version": "v2022.08.01.1",
"model_path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/tf-mnist.tar.gz",
"service_type": "tfserving",
"env": "TF_CPP_VMODULE=http_server=1\nTZ=Asia/Shanghai",
"ports": "8501",
"metrics": "8501:/metrics",
"command": "wget https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/tf-mnist.tar.gz && tar -zxvf tf-mnist.tar.gz && mkdir -p /models/mnist/202207281/ && cp -r tf-mnist/* /models/mnist/202207281/ && /usr/bin/tf_serving_entrypoint.sh --model_config_file=/config/models.config --monitoring_config_file=/config/monitoring.config --platform_config_file=/config/platform.config",
"health": "8501:/v1/models/mnist/versions/202207281/metadata",
"volume_mount": "kubeflow-user-workspace(pvc):/mnt,kubeflow-archives(pvc):/archives",
"resource_memory": "2G",
"resource_cpu": "2",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/tree/master/images/serving/tfserving/example"
}
},
"pytorch-resnet50": {
"project_name": "public",
"service_name": "resnet50-202208012",
"model_name": "resnet50",
"service_describe": "pytorch 图像分类",
"image_name": "ccr.ccs.tencentyun.com/cube-studio/torchserve:0.5.3-cpu",
"model_version": "v2022.08.01.2",
"model_path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50.mar",
"service_type": "torch-server",
"env": "",
"ports": "8080,8081",
"metrics": "8082:/metrics",
"workdir": "/models",
"command": "wget https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50.mar && mkdir -p /models && cp /config/* /models/ && cp resnet50.mar /models/ && torchserve --start --model-store /models --models resnet50=resnet50.mar --foreground --ts-config=/config/config.properties",
"health": "8080:/ping",
"volume_mount": "kubeflow-user-workspace(pvc):/mnt,kubeflow-archives(pvc):/archives",
"resource_memory": "5G",
"resource_cpu": "5",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/tree/master/images/serving/torchserver/example"
}
},
"torchscript-resnet50": {
"project_name": "public",
"service_name": "resnet50-202208013",
"model_name": "resnet50",
"service_describe": "torchscript 图像分类",
"image_name": "ccr.ccs.tencentyun.com/cube-studio/tritonserver:22.07-py3",
"model_version": "v2022.08.01.3",
"model_path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50-torchscript.pt",
"service_type": "triton-server",
"env": "",
"ports": "8000,8002",
"metrics": "",
"workdir": "",
"command": "wget https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50-torchscript.pt && mkdir -p /models/resnet50/202208013/ && cp /config/* /models/resnet50/ && cp -r resnet50-torchscript.pt /models/resnet50/202208013/model.pt && tritonserver --model-repository=/models --strict-model-config=true --log-verbose=1",
"health": "8000:/v2/health/ready",
"volume_mount": "kubeflow-user-workspace(pvc):/mnt,kubeflow-archives(pvc):/archives",
"resource_memory": "5G",
"resource_cpu": "5",
"inference_config": "\n---config.pbtxt\n\nname: \"resnet50\"\nplatform: \"pytorch_libtorch\"\nmax_batch_size: 0\ninput \n[\n {\n name: \"INPUT__0\"\n data_type: TYPE_FP32\n format: FORMAT_NCHW\n dims: [ 3, 224, 224 ]\n reshape: {\n shape: [ 1, 3, 224, 224 ]\n }\n }\n]\n \noutput \n[\n {\n name: \"OUTPUT__0\"\n data_type: TYPE_FP32\n dims: [ 1000 ]\n reshape: {\n shape: [ 1, 1000 ]\n }\n }\n]\n \n\nparameters: { key: \"DISABLE_OPTIMIZED_EXECUTION\" value: { string_value:\"true\" } }\nparameters: { key: \"INFERENCE_MODE\" value: { string_value: \"false\" } }\n",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/tree/master/images/serving/triton-server/example"
}
},
"onnx-resnet50": {
"project_name": "public",
"service_name": "resnet50-202208014",
"model_name": "resnet50",
"service_describe": "onnx 图像分类",
"image_name": "ccr.ccs.tencentyun.com/cube-studio/tritonserver:22.07-py3",
"model_version": "v2022.08.01.4",
"model_path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50.onnx",
"service_type": "triton-server",
"env": "",
"ports": "8000,8002",
"metrics": "",
"workdir": "",
"command": "wget https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50.onnx && mkdir -p /models/resnet50/202208014/ && cp /config/* /models/resnet50/ && cp -r resnet50.onnx /models/resnet50/202208014/model.onnx && tritonserver --model-repository=/models --strict-model-config=true --log-verbose=1",
"health": "8000:/v2/health/ready",
"volume_mount": "kubeflow-user-workspace(pvc):/mnt,kubeflow-archives(pvc):/archives",
"resource_memory": "5G",
"resource_cpu": "5",
"inference_config": "---config.pbtxt\n\nname: \"resnet50\"\nplatform: \"onnxruntime_onnx\"\nbackend: \"onnxruntime\"\nmax_batch_size : 0\n\ninput [\n {\n name: \"input_name\"\n data_type: TYPE_FP32\n format: FORMAT_NCHW\n dims: [ 3, 224, 224 ]\n reshape { shape: [ 1, 3, 224, 224 ] }\n }\n]\noutput [\n {\n name: \"output_name\"\n data_type: TYPE_FP32\n dims: [ 1000 ]\n reshape { shape: [ 1, 1000 ] }\n }\n]\n\nparameters { key: \"intra_op_thread_count\" value: { string_value: \"10\" } }\nparameters { key: \"execution_mode\" value: { string_value: \"1\" } }\nparameters { key: \"inter_op_thread_count\" value: { string_value: \"10\" } }\n",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/tree/master/images/serving/triton-server/example"
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,75 +0,0 @@
{
"mysql-ui": {
"project_name": "public",
"service_name": "mysql-ui",
"service_describe": "可视化编辑mysql数据库",
"image_name": "ccr.ccs.tencentyun.com/cube-studio/phpmyadmin",
"command": "",
"env": "PMA_HOST=mysql-service.infra\nPMA_PORT=3306\nPMA_USER=root\nPMA_PASSWORD=admin",
"ports": "80",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/blob/master/docs/example/service.md"
}
},
"redis-ui": {
"project_name": "public",
"service_name": "redis-ui",
"service_describe": "可视化编辑redis数据库",
"image_name": "ccr.ccs.tencentyun.com/cube-studio/patrikx3:latest",
"command": "",
"env": "REDIS_NAME=default\nREDIS_HOST=redis-master.infra\nREDIS_PORT=6379\nREDIS_PASSWORD=admin",
"ports": "7843",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/blob/master/docs/example/service.md"
}
},
"mongo-express": {
"project_name": "public",
"service_name": "mongo-express",
"service_describe": "可视化编辑mongo数据库",
"image_name": "mongo-express:0.54.0",
"command": "",
"env": "ME_CONFIG_MONGODB_SERVER=xx.xx.xx.xx\nME_CONFIG_MONGODB_PORT=xx\nME_CONFIG_MONGODB_ENABLE_ADMIN=true\nME_CONFIG_MONGODB_ADMINUSERNAME=xx\nME_CONFIG_MONGODB_ADMINPASSWORD=xx\nME_CONFIG_MONGODB_AUTH_DATABASE=xx\nME_CONFIG_MONGODB_AUTH_USERNAME=xx\nME_CONFIG_MONGODB_AUTH_PASSWORD=xx\nVCAP_APP_HOST=0.0.0.0\nVCAP_APP_PORT=8081\nME_CONFIG_OPTIONS_EDITORTHEME=ambiance",
"ports": "8081",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/blob/master/docs/example/service.md"
}
},
"neo4j": {
"project_name": "public",
"service_name": "neo4j",
"service_describe": "可视化编辑图数据库neo4j",
"image_name": "ccr.ccs.tencentyun.com/cube-studio/neo4j:4.4",
"command": "",
"env": "NEO4J_AUTH=neo4j/admin",
"ports": "7474,7687",
"volume_mount": "kubeflow-user-workspace(pvc):/mnt,/data/k8s/kubeflow/pipeline/workspace/admin/neo4j(hostpath):/var/lib/neo4j/data",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/blob/master/docs/example/service.md"
}
},
"jaeger": {
"project_name": "public",
"service_name": "jaeger",
"service_describe": "jaeger链路追踪",
"image_name": "jaegertracing/all-in-one:1.29",
"command": "",
"env": "",
"ports": "16686,5775",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/blob/master/docs/example/service.md"
}
},
"postgresql-ui": {
"project_name": "public",
"service_name": "postgresql-ui",
"service_describe": "可视化编辑postgresql数据库",
"image_name": "dpage/pgadmin4",
"command": "",
"env": "PGADMIN_DEFAULT_EMAIL=admin@tencent.com\nPGADMIN_DEFAULT_PASSWORD=root",
"ports": "80",
"expand": {
"help_url": "https://github.com/tencentmusic/cube-studio/blob/master/docs/example/service.md"
}
}
}

View File

@ -1,38 +0,0 @@
{
"tf-mnist": {
"project_name": "public",
"name": "mnist",
"version": "v2022.08.01.1",
"describe": "tf mnist 图像分类 tfserving推理",
"path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/tf-mnist.tar.gz",
"framework": "tf",
"api_type": "tfserving"
},
"pytorch-resnet50": {
"project_name": "public",
"name": "resnet50",
"version": "v2022.08.01.2",
"describe": "pytorch resnet50 图像分类 torch-server推理",
"path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50.mar",
"framework": "pytorch",
"api_type": "torch-server"
},
"torchscript-resnet50": {
"project_name": "public",
"name": "resnet50",
"version": "v2022.08.01.3",
"describe": "torchscript resnet50 图像分类 triton推理",
"path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50-torchscript.pt",
"framework": "pytorch",
"api_type": "triton-server"
},
"onnx-resnet50": {
"project_name": "public",
"name": "resnet50",
"version": "v2022.08.01.4",
"describe": "onnx resnet50 图像分类 triton推理",
"path": "https://docker-76009.sz.gfp.tencent-cloud.com/github/cube-studio/inference/resnet50.onnx",
"framework": "onnx",
"api_type": "triton-server"
}
}

View File

@ -1,223 +0,0 @@
"""Defines the templating context for SQL Lab"""
from datetime import datetime, timedelta
import inspect
import json
import random
import time
from typing import Any, List, Optional, Tuple
import uuid
from dateutil.relativedelta import relativedelta
from flask import g, request
from jinja2.sandbox import SandboxedEnvironment
from myapp import app
# template context
conf = app.config
BASE_CONTEXT = {
"datetime": datetime,
"random": random,
"relativedelta": relativedelta,
"time": time,
"timedelta": timedelta,
"uuid": uuid,
}
BASE_CONTEXT.update(conf.get("JINJA_CONTEXT_ADDONS", {}))
def url_param(param: str, default: Optional[str] = None) -> Optional[Any]:
"""Read a url or post parameter and use it in your SQL Lab query
When in SQL Lab, it's possible to add arbitrary URL "query string"
parameters, and use those in your SQL code. For instance you can
alter your url and add `?foo=bar`, as in
`{domain}/myapp/sqllab?foo=bar`. Then if your query is something like
SELECT * FROM foo = '{{ url_param('foo') }}', it will be parsed at
runtime and replaced by the value in the URL.
As you create a visualization form this SQL Lab query, you can pass
parameters in the explore view as well as from the dashboard, and
it should carry through to your queries.
:param param: the parameter to lookup
:param default: the value to return in the absence of the parameter
"""
if request.args.get(param):
return request.args.get(param, default)
# Supporting POST as well as get
form_data = request.form.get("form_data")
if isinstance(form_data, str):
form_data = json.loads(form_data)
url_params = form_data.get("url_params") or {}
return url_params.get(param, default)
return default
def current_user_id() -> Optional[int]:
"""The id of the user who is currently logged in"""
if hasattr(g, "user") and g.user:
return g.user.id
return None
def current_username() -> Optional[str]:
"""The username of the user who is currently logged in"""
if g.user:
return g.user.username
return None
def filter_values(column: str, default: Optional[str] = None) -> List[str]:
""" Gets a values for a particular filter as a list
This is useful if:
- you want to use a filter box to filter a query where the name of filter box
column doesn't match the one in the select statement
- you want to have the ability for filter inside the main query for speed
purposes
This searches for "filters" and "extra_filters" in ``form_data`` for a match
Usage example::
SELECT action, count(*) as times
FROM logs
WHERE action in ( {{ "'" + "','".join(filter_values('action_type')) + "'" }} )
GROUP BY action
:param column: column/filter name to lookup
:param default: default value to return if there's no matching columns
:return: returns a list of filter values
"""
form_data = json.loads(request.form.get("form_data", "{}"))
return_val = []
for filter_type in ["filters", "extra_filters"]:
if filter_type not in form_data:
continue
for f in form_data[filter_type]:
if f["col"] == column:
if isinstance(f["val"], list):
for v in f["val"]:
return_val.append(v)
else:
return_val.append(f["val"])
if return_val:
return return_val
if default:
return [default]
else:
return []
class CacheKeyWrapper:
""" Dummy class that exposes a method used to store additional values used in
calculation of query object cache keys"""
def __init__(self, extra_cache_keys: Optional[List[Any]] = None):
self.extra_cache_keys = extra_cache_keys
def cache_key_wrapper(self, key: Any) -> Any:
""" Adds values to a list that is added to the query object used for calculating
a cache key.
This is needed if the following applies:
- Caching is enabled
- The query is dynamically generated using a jinja template
- A username or similar is used as a filter in the query
Example when using a SQL query as a data source ::
SELECT action, count(*) as times
FROM logs
WHERE logged_in_user = '{{ cache_key_wrapper(current_username()) }}'
GROUP BY action
This will ensure that the query results that were cached by `user_1` will
**not** be seen by `user_2`, as the `cache_key` for the query will be
different. ``cache_key_wrapper`` can be used similarly for regular table data
sources by adding a `Custom SQL` filter.
:param key: Any value that should be considered when calculating the cache key
:return: the original value ``key`` passed to the function
"""
if self.extra_cache_keys is not None:
self.extra_cache_keys.append(key)
return key
class BaseTemplateProcessor:
"""Base class for database-specific jinja context
There's this bit of magic in ``process_template`` that instantiates only
the database context for the active database as a ``models.Database``
object binds it to the context object, so that object methods
have access to
that context. This way, {{ hive.latest_partition('mytable') }} just
knows about the database it is operating in.
This means that object methods are only available for the active database
and are given access to the ``models.Database`` object and schema
name. For globally available methods use ``@classmethod``.
"""
engine: Optional[str] = None
def __init__(
self,
database=None,
query=None,
table=None,
extra_cache_keys: Optional[List[Any]] = None,
**kwargs
):
self.database = database
self.query = query
self.schema = None
if query and query.schema:
self.schema = query.schema
elif table:
self.schema = table.schema
self.context = {
"url_param": url_param,
"current_user_id": current_user_id,
"current_username": current_username,
"cache_key_wrapper": CacheKeyWrapper(extra_cache_keys).cache_key_wrapper,
"filter_values": filter_values,
"form_data": {},
}
self.context.update(kwargs)
self.context.update(BASE_CONTEXT)
if self.engine:
self.context[self.engine] = self
self.env = SandboxedEnvironment()
def process_template(self, sql: str, **kwargs) -> str:
"""Processes a sql template
>>> sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'"
>>> process_template(sql)
"SELECT '2017-01-01T00:00:00'"
"""
template = self.env.from_string(sql)
kwargs.update(self.context)
return template.render(kwargs)
template_processors = {}
keys = tuple(globals().keys())
for k in keys:
o = globals()[k]
if o and inspect.isclass(o) and issubclass(o, BaseTemplateProcessor):
template_processors[o.engine] = o
def get_template_processor(database, table=None, query=None, **kwargs):
TP = template_processors.get(database.backend, BaseTemplateProcessor)
return TP(database=database, table=table, query=query, **kwargs)