diff --git a/CHANGES.txt b/CHANGES.txt index cb099ca3e8..4cb29692bd 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,14 @@ +* v2.5.5 + * added result.cfg to configure CK dashboards + * improved module:result to push results to CK dashboards + * improved module:dashboard to work with new CK dashboards + * improved module:wfe to work with new CK dashboards + * added module:ck-platform to work with cKnowledge.io platform + (moved cbench functionality from the CK incubator to the CK module + to work with cKnowledge.io dashboards directly) + * provided better support to work with CK modules + as standard Python packages (see module:ck-platform as example) + * V2.5.4 * added support to automatically add simple packages from Git: ck add package:my-git-repo --git={URL} --tags=my-repo --env_ext=MY_GIT_REPO diff --git a/README.md b/README.md index e63f685282..490a5c9856 100755 --- a/README.md +++ b/README.md @@ -12,16 +12,19 @@ Windows: [![Windows Build status](https://ci.appveyor.com/api/projects/status/iw [![Documentation Status](https://readthedocs.org/projects/ck/badge/?version=latest)](https://ck.readthedocs.io/en/latest/?badge=latest) [![Coverage Status](https://coveralls.io/repos/github/ctuning/ck/badge.svg)](https://coveralls.io/github/ctuning/ck) -## License +## Versions and licenses -* **V2+** : Apache 2.0 -* **V1.x** : BSD 3-clause +* **V3** (planning) : **Apache 2.0** +* **V2+** (development) : **Apache 2.0** +* **V1.x** (maintenance and bug fixes) : **BSD 3-clause** ## News * [Project website](https://cKnowledge.org) * [CK-powered MLPerf™ benchmark automation](https://github.com/ctuning/ck/blob/master/docs/mlperf-automation/README.md) * [Community projects to improve and redesign CK](https://github.com/ctuning/ck/blob/master/incubator/README.md) +* [AI/ML repository with all aggregated CK components](https://github.com/ctuning/ai) +* [OctoML's CK-based MLOps/MLPerf repository](https://github.com/octoml/mlops) ## Overview @@ -35,7 +38,8 @@ Our goal is to help researchers and practitioners share, reuse and extend their in the form of portable workflows, automation actions and reusable artifacts with a common API, CLI, and meta description. See how CK helps to automate benchmarking, optimization and design space exploration of [AI/ML/software/hardware stacks](https://cknowledge.io/result/crowd-benchmarking-mlperf-inference-classification-mobilenets-all/), -simplifies [MLPerf™](https://mlperf.org) submissions and supports collaborative, reproducible and reusable ML Systems research: +simplifies [MLPerf™ inference benchmark](https://github.com/ctuning/ck/blob/master/docs/mlperf-automation/README.md) submissions +and supports collaborative, reproducible and reusable ML Systems research: * [ACM TechTalk](https://www.youtube.com/watch?v=7zpeIVwICa4) * [AI/ML/MLPerf™ automation workflows and components from the community](https://github.com/ctuning/ai); @@ -47,7 +51,7 @@ simplifies [MLPerf™](https://mlperf.org) submissions and supports collabor * [Automating MLPerf(tm) inference benchmark and packing ML models, data sets and frameworks as CK components with a unified API and meta description](https://github.com/ctuning/ck/blob/master/docs/mlperf-automation/README.md) * Providing a common format to share artifacts at ML, systems and other conferences: [video](https://youtu.be/DIkZxraTmGM), [Artifact Evaluation](https://cTuning.org/ae) * Redesigning CK together with the community based on user feedback -* [Other real-world use cases](https://cKnowledge.org/partners.html) from MLPerf™, Arm, General Motors, IBM, the Raspberry Pi foundation, ACM and other great partners; +* [Other real-world use cases](https://cKnowledge.org/partners.html) from MLPerf™, Qualcomm, Arm, General Motors, IBM, the Raspberry Pi foundation, ACM and other great partners; ## Documentation diff --git a/ck/kernel.py b/ck/kernel.py index f1fdb94bf0..1ddb8c3c7c 100755 --- a/ck/kernel.py +++ b/ck/kernel.py @@ -28,7 +28,7 @@ # We use 3 digits for the main (released) version and 4th digit for development revision -__version__ = "2.5.4" +__version__ = "2.5.5" # Do not use characters (to detect outdated version)! # Import packages that are global for the whole kernel diff --git a/ck/repo/module/.cm/alias-a-ck-platform b/ck/repo/module/.cm/alias-a-ck-platform new file mode 100644 index 0000000000..49842da063 --- /dev/null +++ b/ck/repo/module/.cm/alias-a-ck-platform @@ -0,0 +1 @@ +2ba7a9eb1047fa35 diff --git a/ck/repo/module/.cm/alias-a-result.cfg b/ck/repo/module/.cm/alias-a-result.cfg new file mode 100644 index 0000000000..e55ccfa2cf --- /dev/null +++ b/ck/repo/module/.cm/alias-a-result.cfg @@ -0,0 +1 @@ +68d174e82bfa3f79 diff --git a/ck/repo/module/.cm/alias-u-2ba7a9eb1047fa35 b/ck/repo/module/.cm/alias-u-2ba7a9eb1047fa35 new file mode 100644 index 0000000000..27944a6393 --- /dev/null +++ b/ck/repo/module/.cm/alias-u-2ba7a9eb1047fa35 @@ -0,0 +1 @@ +ck-platform diff --git a/ck/repo/module/.cm/alias-u-68d174e82bfa3f79 b/ck/repo/module/.cm/alias-u-68d174e82bfa3f79 new file mode 100644 index 0000000000..caa81cea25 --- /dev/null +++ b/ck/repo/module/.cm/alias-u-68d174e82bfa3f79 @@ -0,0 +1 @@ +result.cfg diff --git a/ck/repo/module/ck-platform/.cm/desc.json b/ck/repo/module/ck-platform/.cm/desc.json new file mode 100644 index 0000000000..0967ef424b --- /dev/null +++ b/ck/repo/module/ck-platform/.cm/desc.json @@ -0,0 +1 @@ +{} diff --git a/ck/repo/module/ck-platform/.cm/info.json b/ck/repo/module/ck-platform/.cm/info.json new file mode 100644 index 0000000000..b461eff49a --- /dev/null +++ b/ck/repo/module/ck-platform/.cm/info.json @@ -0,0 +1,20 @@ +{ + "backup_data_uid": "2ba7a9eb1047fa35", + "backup_module_uid": "032630d041b4fd8a", + "backup_module_uoa": "module", + "control": { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T07:25:18.688282", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + "data_name": "ck-platform" +} diff --git a/ck/repo/module/ck-platform/.cm/meta.json b/ck/repo/module/ck-platform/.cm/meta.json new file mode 100644 index 0000000000..db575c6ce9 --- /dev/null +++ b/ck/repo/module/ck-platform/.cm/meta.json @@ -0,0 +1,32 @@ +{ + "actions": { + "access": { + "desc": "access platform" + }, + "init_graph": { + "desc": "init graph" + }, + "login": { + "desc": "login to the CK platform" + }, + "publish": { + "desc": "publish CK component" + }, + "push_result": { + "desc": "push result" + }, + "setup": { + "desc": "setup the CK platform" + }, + "versions": { + "desc": "list versions of a CK component" + } + }, + "actions_redirect": {}, + "copyright": "See CK COPYRIGHT.txt for copyright details", + "desc": "cKnowledge.io platform", + "developer": "Grigori Fursin", + "developer_email": "Grigori.Fursin@cTuning.org", + "developer_webpage": "http://fursin.net", + "license": "See CK LICENSE.txt for licensing details" +} diff --git a/ck/repo/module/ck-platform/.cm/updates.json b/ck/repo/module/ck-platform/.cm/updates.json new file mode 100644 index 0000000000..2f900f1d37 --- /dev/null +++ b/ck/repo/module/ck-platform/.cm/updates.json @@ -0,0 +1,102 @@ +{ + "control": [ + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T07:25:31.333861", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T07:25:40.268085", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T08:48:34.498166", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T08:48:43.273879", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T08:52:33.681180", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T08:52:47.745312", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-26T08:52:52.861782", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + } + ] +} diff --git a/incubator/cbench/LICENSE.CodeReef.txt b/ck/repo/module/ck-platform/LICENSE.txt similarity index 99% rename from incubator/cbench/LICENSE.CodeReef.txt rename to ck/repo/module/ck-platform/LICENSE.txt index af7f10361a..7f6da14904 100644 --- a/incubator/cbench/LICENSE.CodeReef.txt +++ b/ck/repo/module/ck-platform/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright 2019-2020 CodeReef. All rights reserved. +Copyright 2020-2021 cTuning foundation. All rights reserved. Apache License Version 2.0, January 2004 diff --git a/ck/repo/module/ck-platform/README.md b/ck/repo/module/ck-platform/README.md new file mode 100644 index 0000000000..0f033fea7f --- /dev/null +++ b/ck/repo/module/ck-platform/README.md @@ -0,0 +1,9 @@ +## Init graph + +Check `test/init-graph.bat` + +https://cknowledge.io/c/result/fgg-test/?v=1.0.0#gfursin_1 + +## Push results to a graph + +Check `test/push-to-graph.bat` diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/__init__.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/__init__.py new file mode 100644 index 0000000000..1185466556 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/__init__.py @@ -0,0 +1,5 @@ +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +__version__ = "1.3.1" diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/__main__.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/__main__.py new file mode 100644 index 0000000000..7c4511dd70 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/__main__.py @@ -0,0 +1,7 @@ +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +from . import main + +main.cli() diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/client.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/client.py new file mode 100644 index 0000000000..e99f468884 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/client.py @@ -0,0 +1,1236 @@ +# +# Web service for the client +# Partially based on CK web service +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +from . import config +from . import comm + +import ck.kernel as ck + +import json +import sys +import os +import tempfile +import cgi +#import ssl +import time +import requests + +# Import various modules while supporting both Python 2.x and 3.x +try: + from http.server import BaseHTTPRequestHandler, HTTPServer +except: + from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer + +try: + import urllib.parse as urlparse +except: + import urlparse + +try: + from urllib.parse import quote as urlquote +except: + from urllib import quote as urlquote + +try: + from urllib.parse import unquote as urlunquote +except: + from urllib import unquote as urlunquote + +#try: +# import http.cookies as Cookie +#except: +# import Cookie + +try: + from socketserver import ThreadingMixIn +except: + from SocketServer import ThreadingMixInqZBMfAaH + + + +context_types={ + "bz2": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/x-bzip2" + }, + "con": { + "Content-type": "text/plain; charset=utf-8" + }, + "css": { + "Content-disposition": "inline; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "text/css" + }, + "csv": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "text/csv" + }, + "eps": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/postscript" + }, + "gif": { + "Content-type": "image/gif" + }, + "gz": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/x-gzip" + }, + "html": { + "Content-type": "text/html; charset=utf-8" + }, + "jpeg": { + "Content-type": "image/jpeg" + }, + "jpg": { + "Content-type": "image/jpeg" + }, + "js": { + "Content-disposition": "inline; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "text/javascript" + }, + "json": { + " -type": "w/json; charset=utf-8" + }, + "pdf": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/pdf" + }, + "png": { + "Content-type": "image/png" + }, + "ps": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/postscript" + }, + "txt": { + "Content-type": "text/plain; charset=utf-8" + }, + "unknown": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/octet-stream" + }, + "zip": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/zip" + } + } + +# URL to tunnel requests to (useful for development boards and Raspberry Pi) +tunnel_url='' + +# Skip print for hearbeat +heartbit_started=False +get_status_started=False + +############################################################################## +# Class to handle requests in separate threads + +class ThreadedHTTPServer(ThreadingMixIn, HTTPServer): + + """ + """ + +############################################################################## +# Send error to HTTP stream + +def web_out(i): + """ + Input: { + http - http object + type - content type + bin - bytes to output + (filename) - if !='', substitute filename in headers + } + + Output: { + return - 0 + } + """ + + http=i['http'] + bin=i['bin'] + + tp=i['type'] + + if tp=='' or tp=='web': tp='html' + + tpx=context_types.get(tp,{}) + if len(tpx)==0: + tp='unknown' + tpx=cfg['content_types'][tp] + + fn=i.get('filename','') + + # Output + for k in sorted(tpx.keys()): + v=tpx[k] + if fn!='': v=v.replace('$#filename#$', fn) + http.send_header(k,v) + + http.send_header('Access-Control-Allow-Origin', '*') + http.send_header('Content-Length', str(len(bin))) + http.end_headers() + + http.wfile.write(bin) + + return {'return':0} + +############################################################################## +# Send error to HTTP stream + +def web_err(i): + """ + Input: { + http - http object + type - content type + bin - bytes to output + } + + Output: { + return - 0 + } + """ + + http=i['http'] + tp=i['type'] + bin=i['bin'] + + try: bin=bin.decode('utf-8') + except Exception as e: pass + + if tp=='json': + rx=ck.dumps_json({'dict':{'return':1, 'error':bin}}) + if rx['return']>0: + bin2=rx['error'].encode('utf8') + else: + bin2=rx['string'].encode('utf-8') + elif tp=='con': + bin2=bin.encode('utf8') + else: + bin2=b'
'+bin.encode('utf8')+b'
' + + i['bin']=bin2 + return web_out(i) + +############################################################################## +# Process CK web service request (both GET and POST) + +def process_web_request(i): + """ + + Input: { + http - Python http object + } + + Output: { None } + """ + + global heartbit_started, get_status_started + + from . import solution + + # http object + http=i['http'] + + # Parse GET variables and path + xget={} + xpath={'host':'', 'port':'', 'first':'', 'rest':'', 'query':''} # May be used in the future + + xt='json' + + xpath['host']=i.get('host','') + xpath['port']=i.get('port','') + + # Check GET variables + if http.path!='': + http.send_response(200) + + a=urlparse.urlparse(http.path) + xp=a.path + xr='' + + if xp.startswith('/'): xp=xp[1:] + + u=xp.find('/') + if u>=0: + xr=xp[u+1:] + xp=xp[:u] + + xt=xp + + xpath['first']=xp + xpath['rest']=xr + xpath['query']=a.query + b=urlparse.parse_qs(a.query, keep_blank_values=True, ) + + xget={} + for k in b: + xget[k]=urlunquote(b[k][0]) + if sys.version_info[0]<3: + xget[k]=xget[k].decode('utf8') + + # Check POST + xpost={} + xpost1={} + + try: + headers = http.headers + content_type = headers.get('content-type') + ctype='' + if content_type != None: + ctype, pdict = cgi.parse_header(content_type) + # Python3 cgi.parse_multipart expects boundary to be bytes, not str. + if sys.version_info[0]<3 and 'boundary' in pdict: + pdict['boundary'] = pdict['boundary'].encode() + + if ctype == 'multipart/form-data': + if sys.version_info[0]<3: + xpost1 = cgi.parse_multipart(http.rfile, pdict) + else: + xxpost1 = cgi.FieldStorage(fp=http.rfile, headers=headers, environ={'REQUEST_METHOD':'POST'}) + for k in xxpost1.keys(): + xpost1[k]=[xxpost1[k].value] + elif ctype == 'application/x-www-form-urlencoded': + length = int(http.headers.get('content-length')) + s=http.rfile.read(length) + if sys.version_info[0]>2: s=s.decode('utf8') + xpost1 = cgi.parse_qs(s, keep_blank_values=1) + + except Exception as e: + web_err({'http':http, 'type':xt, 'bin':bin}) + ck.out(ck.cfg['error']+bin.decode('utf8')) + return + + # Post processing + for k in xpost1: + v=xpost1[k] + if k.endswith('[]'): + k1=k[:-2] + xpost[k1]=[] + for l in v: + xpost[k1].append(urlunquote(l)) + else: + if k!='file_content': + xpost[k]=urlunquote(v[0]) + else: + xpost[k]=v[0] + + if k=='file_content': + fcrt=xpost1.get('file_content_record_to_tmp','') + if (type(fcrt)==list and len(fcrt)>0 and fcrt[0]=='yes') or fcrt=='yes': + fd, fn=tempfile.mkstemp(suffix='.tmp', prefix='ck-') # suffix is important - CK will delete such file! + os.close(fd) + + f=open(fn,'wb') + f.write(xpost[k]) + f.close() + + xpost[k+'_uploaded']=fn + del(xpost[k]) + k+='_uploaded' + else: + import base64 + xpost[k+'_base64']=base64.urlsafe_b64encode(xpost[k]).decode('utf8') + del(xpost[k]) + k+='_base64' + + if sys.version_info[0]<3: + xpost[k]=xpost[k].decode('utf8') + + # Prepare input and check if CK json present + ii=xget + ii.update(xpost) + + act=ii.get('action','') + + # Generate tmp file (to output images for example) + fd, fn=tempfile.mkstemp(suffix='.tmp', prefix='ck-') # suffix is important - CK will delete such file! + os.close(fd) + if os.path.isfile(fn): os.remove(fn) + + # Get tmp dir + p=tempfile.gettempdir() + + # Execute command ********************************************************* +# ck.out('*** Received action request: ' + act) + if act=='get_host_platform_info': + r=ck.access({'action':'detect', + 'module_uoa':'platform'}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + s=json.dumps(r, indent=2, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + #############################################################################################################3 + elif act=='init_workflow': + + data_id=ii.get('data_id','') + + r=solution.init({'uid':data_id}) + + # start program + # r=ck.access({'action':'run', + # 'module_uoa':'program', + # 'data_uoa':ii.get('program_name',''), + # 'cmd_key': 'use_continuous', + # 'deps.python': 'a699c0c7de43a121', + # 'quiet': 'yes'}) + + if r['return']>0: + ck.out(config.CR_LINE) + ck.out("Error: "+r.get('error','')) + ck.out(config.CR_LINE) + + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + solution = {'status': True} + s=json.dumps(solution, indent=4, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + #############################################################################################################3 + elif act=='run_program': + + data_id=ii.get('data_id','') + + r=solution.run({'uid':data_id}) + + # start program + # r=ck.access({'action':'run', + # 'module_uoa':'program', + # 'data_uoa':ii.get('program_name',''), + # 'cmd_key': 'use_continuous', + # 'deps.python': 'a699c0c7de43a121', + # 'quiet': 'yes'}) + + if r['return']>0: + ck.out(config.CR_LINE) + ck.out("Error: "+r.get('error','')) + ck.out(config.CR_LINE) + + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + solution = {'status': True} + s=json.dumps(solution, indent=4, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + + #############################################################################################################3 + elif act=='benchmark_program': + + data_id=ii.get('data_id','') + + r=solution.benchmark({'uid':data_id}) + + if r['return']>0: + ck.out(config.CR_LINE) + ck.out("Error: "+r.get('error','')) + ck.out(config.CR_LINE) + + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + +# solution = {'status': True} +# s=json.dumps(solution, indent=4, sort_keys=True) + # Need to pass info about graphs + s=json.dumps(r, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + return + + #############################################################################################################3 + elif act=='publish_result': + + data_id=ii.get('data_id','') + + r=solution.publish_result({'uid':data_id}) + + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + solution = {'status': True} +# s=json.dumps(solution, indent=4, sort_keys=True) + s=json.dumps(r, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + return + + #############################################################################################################3 + elif act=='get_program_result_image': + + data_id=ii['data_id'] + program_name=ii['program_name'] + + jpeg=ii.get('jpeg','') + + ck_entry=program_name.split(':') + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + p=r['path'] + + meta=r['dict'] + workflow_output_dir=meta.get('workflow_output_dir','') + + workflow_repo=meta.get('workflow_repo_url','') + j=workflow_repo.rfind('/') + if j>0: + workflow_repo=workflow_repo[j+1:] + + cur_dir=os.path.join(p, 'CK', workflow_repo, ck_entry[0], ck_entry[1]) + if workflow_output_dir!='': + cur_dir=os.path.join(cur_dir, workflow_output_dir) + + # r=ck.access({'action':'find', + # 'module_uoa':'program', + # 'data_uoa':ii.get('program_name','')}) + # + # if r['return']>0: + # # Process error properly + # web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + # return + + # cur_dir = 'D:\\Work1\\CK\\ck-repos\\local\\solution\\demo-obj-detection-kitti-min-tf-cpu-win\\CK\\ck-tensorflow\\program\\squeezedet\\tmp\\out' #os.path.join(r['path'],"tmp/out") + # cur_dir='/home/cindex/CK/local/solution/demo-obj-detection-self-driving-win/CK/ck-tensorflow/program/squeezedet/tmp/out' + # cur_dir='/home/cindex/CK/local/solution/demo-obj-detection-kitti-min-tf-cpu-win/CK/ck-tensorflow/program/squeezedet/tmp/out' + + # find the penultimate image provided + try: + st = False + filepath = '' + filepath_buf = '' + + found_files=[] + + ck.out('') + ck.out('Checking for output files in directory:') + ck.out(' '+cur_dir) + ck.out('') + + sorted_list=sorted(os.listdir(cur_dir)) + for file in sorted_list: + if file.endswith(".png") and file.startswith("boxed_"): + found_files.append(file) + if len(found_files)==3: + break + except: + err = 'no files available' + web_err({'http':http, 'type':xt, 'bin':err.encode('utf8')}) + return + + if len(found_files)==0: + err = 'no files available' + web_err({'http':http, 'type':xt, 'bin':err.encode('utf8')}) + return + + if len(found_files)==1: + filepath='' + filepath_buf=found_files[0] + elif len(found_files)==2: + filepath='' + filepath_buf=found_files[1] + elif len(found_files)==3: + filepath=found_files[0] + filepath_buf=found_files[1] + + # Check if convert to jpeg + file_type='png' + pinp=os.path.join(cur_dir, filepath_buf) + + if jpeg=='yes': + quality=ii.get('jpeg_quality','') + if quality==None or quality=='': quality='70' + + pout=os.path.join(cur_dir, filepath_buf+'.jpg') + + s='convert -quality '+quality+' '+pinp+' '+pout + + ck.out('') + ck.out(' Converting to jpeg: '+s) + + os.system(s) + + pinp=pout + filepath_buf+='.jpg' + file_type='jpg' + + # First file will be deleted (only if 2 afterwards), second served + ck.out(' Loading file '+ filepath_buf) + r=ck.load_text_file({'text_file':pinp, 'keep_as_bin':'yes'}) + + if jpeg=='yes': + if os.path.isfile(pinp): + os.remove(pinp) + + # Remove first + if filepath!='': + ck.out(' Trying to delete file '+ filepath) + x=os.path.join(cur_dir, filepath) + if os.path.isfile(x): + os.remove(x) + + # Then finish checking previous one + if r['return']>0: + bout=r['error'].encode('utf-8') + else: + bout=r['bin'] + + web_out({'http':http, 'type':file_type, 'bin':bout}) + + return + + #############################################################################################################3 + elif act=='process_webcam': + + data_id=ii['data_id'] + program_name=ii['program_name'] + + ck_entry=program_name.split(':') + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + pp=r['path'] # Path to solution! + + meta=r['dict'] + + # Find workflow output path + workflow_input_dir=meta.get('workflow_input_dir','') + workflow_output_dir=meta.get('workflow_output_dir','') + workflow_repo=meta.get('workflow_repo_url','') + + j=workflow_repo.rfind('/') + if j>0: + workflow_repo=workflow_repo[j+1:] + + workflow_dir=os.path.join(pp, 'CK', workflow_repo, ck_entry[0], ck_entry[1]) + + if workflow_input_dir!='': + p=os.path.join(workflow_dir, workflow_input_dir) + else: + p = os.path.join(workflow_dir, "tmp", "input") + + if not os.path.isdir(p): os.makedirs(p) + + if workflow_output_dir!='': + pout=os.path.join(workflow_dir, workflow_output_dir) + else: + pout=os.path.join(workflow_dir, "tmp") + + if not os.path.isdir(pout): os.makedirs(pout) + + # Record image + image_uri=xpost.get('image_uri','') + + x='data:image/jpeg;base64,' + if image_uri.startswith(x): + image64=image_uri[len(x):] + + # Finding last file and incrementing + ff='cr-stream-' + + l=os.listdir(p) + + inum=0 + ffound='' + for f in os.listdir(p): + if f.startswith(ff) and f.endswith('.jpg'): + j=f.find('.') + num=f[len(ff):j] + if int(num)>inum: + inum=int(num) + ffound=f + + # New logic: if file already exists, just skip next request from web (otherwise many parallel requests) + # When program starts, it should clean input/output to let this code continue processing image + if (inum>0): + time.sleep(1) + ss='request skipped because there is already file in queue' + ck.out(' Warning: '+ss+' ('+os.path.join(p,ffound)+') ...') + s='{"return":16, "error":"'+ss+'"}' + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + return + + # Otherwise continue processing ... + if inum==0: + inum+=1 + sinum=str(inum) + filename = ff+('0'*(8-len(sinum)))+sinum + + filename2=filename+'.jpg' + pf=os.path.join(p, filename2) + + r=ck.convert_upload_string_to_file({'file_content_base64':image64, 'filename':pf}) + if r['return']>0: return r + + ck.out(' !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') + ck.out(' Recorded external image to '+pf) + + # Need extra converting + pp1=os.path.join(pp, 'support-script-convert.sh') + if os.path.isfile(pp1): + ck.out('') + ck.out('Extra image processing ...') + ck.out('') + + extra_cmd='cd "'+p+'"\n' + extra_cmd+='. "'+pp1+'" '+filename2+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + else: + sinum=str(inum) + filename = ff+('0'*(8-len(sinum)))+sinum + + filename2=filename+'.jpg' + pf=os.path.join(p, filename2) + + # Need extra pushing + pp1=os.path.join(pp, 'support-script-push.sh') + if os.path.isfile(pp1): + ck.out('') + ck.out('Extra image pushing to device ...') + ck.out('') + + extra_cmd='cd "'+p+'"\n' + extra_cmd+='. "'+pp1+'" '+filename+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + # If Android-like device wait for the file ... + ppull=os.path.join(pp, 'support-script-pull.sh') + + # Waiting for output file + poutf=os.path.join(pout, filename +'.json') + + if not os.path.isfile(poutf): + ck.out ('Waiting for output file: '+poutf) + + while not os.path.isfile(poutf): + # Check if need to pull + if os.path.isfile(ppull): + ck.out('Trying to pull from device ...') + + extra_cmd='cd "'+pout+'"\n' + extra_cmd+='export SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CR_SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CODEREEF_SOLUTION_PATH="'+pp+'"\n' # Keeping for compatibility with older version + extra_cmd+='. "'+ppull+'" '+filename+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + time.sleep(0.1) + + ck.out('') + ck.out('Found solution!') + ck.out('') + + with open(poutf) as json_file: + solution = json.load(json_file) + ck.out(json.dumps(solution, indent=2)) + + if os.path.isfile(poutf): + os.remove(poutf) + + if inum==1 and os.path.isfile(pf): + ck.out(' REMOVING '+pf) + os.remove(pf) + + ck.out('') + + s=json.dumps(solution, indent=4, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + + #############################################################################################################3 + elif act=='get_image': + num=ii.get('num','') + inum=int(num) + sinum=str(inum) + + # Finding last file and incrementing + ff='cr-stream-' + pf=os.path.join(p, ff+('0'*(8-len(sinum)))+sinum+'.jpg') + + ck.out(' Loaded file '+pf) + + r=ck.load_text_file({'text_file':pf, 'keep_as_bin':'yes'}) + if r['return']>0: + bout=r['error'].encode('utf-8') + else: + bout=r['bin'] + + web_out({'http':http, 'type':'jpeg', 'bin':bout}) + + return + + #############################################################################################################3 + elif act=='get_result': + + data_id=ii['data_id'] + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + pp=r['path'] # Path to solution! + + meta=r['dict'] + + program_name = meta.get('workflow','') + ck_entry=program_name.split(':') + + # Find workflow output path + result_file=meta.get('result_file','') + workflow_repo=meta.get('workflow_repo_url','') + + j=workflow_repo.rfind('/') + if j>0: + workflow_repo=workflow_repo[j+1:] + + workflow_dir=os.path.join(pp, 'CK', workflow_repo, ck_entry[0], ck_entry[1]) + + if result_file!='': + pout=os.path.join(workflow_dir, result_file) + else: + pout=os.path.join(workflow_dir, "tmp","tmp-ck-timer.json") + + # if not os.path.isdir(pout): os.makedirs(pout) + + + # If Android-like device wait for the file ... + ppull=os.path.join(pp, 'support-script-pull.sh') + + # Waiting for output file + if not os.path.isfile(pout): + ck.out ('Waiting for output file: '+pout) + + while not os.path.isfile(pout): + # Check if need to pull + if os.path.isfile(ppull): + ck.out('Trying to pull from device ...') + + extra_cmd='cd "'+pout+'"\n' + extra_cmd+='export SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CR_SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CODEREEF_SOLUTION_PATH="'+pp+'"\n' # Keeping for compatibility with older version + extra_cmd+='. "'+ppull+'" '+filename+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + time.sleep(0.1) + + ck.out('') + ck.out('Found solution!') + ck.out('') + + rx=ck.load_json_file({'json_file':pout}) + if rx['return']>0: return rx + + rx=ck.flatten_dict(rx) + if rx['return']>0: return rx + + rdf=rx['dict'] + crdf={} + + # Remove first ## (do not need here) + for k in rdf: + v=rdf[k] + if k.startswith('##'): k=k[2:] + crdf[k]=v + ck.out(json.dumps(crdf, indent=2)) + + # if os.path.isfile(pout): + # os.remove(pout) + + # if inum==1 and os.path.isfile(pf): + # ck.out(' REMOVING '+pf) + # os.remove(pf) + + ck.out('') + + s=json.dumps(crdf, indent=4, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + + elif act=='get_status': + data_id=ii['data_id'] + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + pp=r['path'] # Path to solution! + tmp_solStatus=os.path.join(pp, "tmp", "status.json") + + rx=ck.load_json_file({'json_file':tmp_solStatus}) + if rx['return']>0: return rx + + if not get_status_started: + ck.out(json.dumps(rx, indent=2)) + + rdf=rx['dict'] + + if not get_status_started: + ck.out('') + + s=json.dumps(rdf, indent=4, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + get_status_started=True + + return + + #############################################################################################################3 + elif act=='heartbit': + + locdir = os.path.dirname(os.path.realpath(__file__)) + if not heartbit_started: + ck.out(' Local directory: '+locdir) + + # Finding last file and incrementing + pf=os.path.join(locdir, 'static/favicon.ico') + + if not heartbit_started: + ck.out(' Loaded file '+pf) + + heartbit_started=True + + r=ck.load_text_file({'text_file':pf, 'keep_as_bin':'yes'}) + if r['return']>0: + bout=r['error'].encode('utf-8') + else: + bout=r['bin'] + + web_out({'http':http, 'type':'jpeg', 'bin':bout}) + + return + + + r={'return':0} + xt='web' + bout=b'TEST WORKS' + + web_out({'http':http, 'type':xt, 'bin':bout}) + return + + # Process output + if r['return']>0: + if os.path.isfile(fn): os.remove(fn) + + bout=r['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, + 'type':xt, + 'bin':bout}) + return + + # If json or web + # Try to load output file + if not os.path.isfile(fn): + web_err({'http':http, + 'type':xt, + 'bin':b'Output file was not created, see output ('+r['std'].encode('utf8')+b')!'}) + return + + r=ck.load_text_file({'text_file':fn, 'keep_as_bin':'yes'}) + if r['return']>0: + bout=r['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, 'type':xt, 'bin':bout}) + + return + + bin=r['bin'] + + # Process JSON output from file + fx='' + + if sys.version_info[0]>2: bin=bin.decode('utf-8') + + ru=ck.convert_json_str_to_dict({'str':bin, 'skip_quote_replacement':'yes'}) + if ru['return']>0: + bout=ru['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, 'type':xt, 'bin':bout}) + + return + + rr=ru['dict'] + if rr['return']>0: + bout=rr['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, 'type':xt, 'bin':bout}) + return + + # Check if file was returned + fr=False + + if 'file_content_base64' in rr and rr.get('filename','')!='': + fr=True + + # Check if download + if (xt=='web' and fr) or (act=='pull' and xt!='json'): + import base64 + x=rr.get('file_content_base64','') + + fx=rr.get('filename','') + if fx=='': fx=ck.cfg['default_archive_name'] + + # Fixing Python bug + if sys.version_info[0]==3 and sys.version_info[1]<3: + x=x.encode('utf-8') + else: + x=str(x) + bin=base64.urlsafe_b64decode(x) # convert from unicode to str since base64 works on strings + # should be safe in Python 2.x and 3.x + + # Process extension + fn1, fne = os.path.splitext(fx) + if fne.startswith('.'): fne=fne[1:] + if fne!='': xt=fne + else: xt='unknown' + else: + # Check and output html + if rr.get('html','')!='': + bin=rr['html'].encode('utf-8') + else: + if sys.version_info[0]>2: # Unknown output + bin=bin.encode('utf-8') + + web_out({'http':http, 'type':xt, 'bin':bin, 'filename':fx}) + + return {'return':0} + +############################################################################## +# Tunnel functionality + +def process_web_request_post_via_tunnel(i): + + http=i['http'] + post=(i.get('post','')=='yes') + + target_url=tunnel_url+http.path + + ck.out('* Tunneling **************************************************************') + + try: + + if post: + post_body = http.rfile.read(int(http.headers.get_all('content-length', 0)[0])) + + parsed_headers={} + for h in http.headers: + parsed_headers[h]=http.headers[h] + + if post: receive = requests.post(target_url, headers=parsed_headers, verify=False, data=post_body, ) + else: receive = requests.get (target_url, headers=parsed_headers, verify=False) + + http.send_response(receive.status_code) + + received_headers = receive.headers + for h in received_headers: + h1=h.lower() + if '-encoding' not in h1 and h1!='content-length': http.send_header(h, received_headers[h]) + + http.send_header('Content-Length', len(receive.content)) + http.end_headers() + + http.wfile.write(receive.content) + + except Exception as e: + print ('Error: '+format(e)) + http.send_error(500, 'problem accessing remote host') + + return + +############################################################################## +# Class to handle web service requests + +class server_handler(BaseHTTPRequestHandler): + + """ + Input: Python http handler + Output: None + """ + + # Process only GET + def do_GET(self): + if tunnel_url!='': process_web_request_post_via_tunnel({'http':self}) + else: process_web_request({'http':self}) + return + + # Process GET and POST + def do_POST(self): + if tunnel_url!='': process_web_request_post_via_tunnel({'http':self, 'post':'yes'}) + else: process_web_request({'http':self}) + return + + def log_request(self, code='-', size='-'): + self.log_message('"%s" %s %s', self.requestline, str(code), str(size)) + return + + def log_error(self, format, *args): + self.log_message(format, *args) + return + +########################################################################### +# Start web service + +def start(i): + global tunnel_url + + # Check tunnel URL + tunnel=i.get('tunnel','') + if tunnel!=None and tunnel!='': + tunnel_url=tunnel + + ck.out('All web requests will be tunneled to '+tunnel_url) + + host=i.get('host') + if host=='' or host==None: host='localhost' + + port=i.get('port') + if port=='' or port==None: port='4444' + + # Assemble URL. + url=host+':'+port + + ck.out('Starting web service for the client on '+url+' ...') + ck.out('') + + sys.stdout.flush() + + # We do not need secure HTTPS connection here since the user + # runs webbrowser on her/his machine and communicates with + # the CB service on the same machine via 127.0.0.1 + # while avoiding Internet! + + # Still it's possible to start this service with SSL + # but it will require a propoer SSL certificate + # otherwise the connection will not be validated + # if it's purely local ... + + # Get certificates for SSL + # ssl_certificate_file = {path to client.pem} + + # Generate it using "openssl req -new -x509 -keyout server.pem -out server.pem -days 365 -nodes" + + try: + server = ThreadedHTTPServer((host, int(port)), server_handler) + +# Needed for SSL connection (non-SSL connection will not work then) +# server.socket = ssl.wrap_socket (server.socket, server_side=True, +# certfile=ssl_certificate_file) + + # Prevent issues with socket reuse + server.allow_reuse_address=True + + server.serve_forever() + except KeyboardInterrupt: + ck.out('Keyboard interrupt, terminating web service ...') + server.socket.close() + return 1 + except OSError as e: + ck.out('Internal web service error ('+format(e)+')') + return 1 + + return 0 diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/comm.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/comm.py new file mode 100644 index 0000000000..57fdd3d072 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/comm.py @@ -0,0 +1,256 @@ +# +# Communication with the cK server +# Based on "perform_remote_action" function from the CK kernel +# +# Developer(s): Grigori Fursin, https://fursin.net +# + + +from . import config + +import ck.kernel as ck + +import json +import sys +import os + +############################################################################## +# Send JSON request to the cK portal + +def send(i): + """ + Input: { + action [str] - remote API action name + config [dict] - configuration for remote server + dict [dict] - dict to send to remote server + ownership [dict] - info about user ownership + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Import modules compatible with Python 2.x and 3.x + import urllib + + try: import urllib.request as urllib2 + except: import urllib2 + + try: from urllib.parse import urlencode + except: from urllib import urlencode + + # Get server and user config + config=i.get('config',{}) + + username=config.get('username','') +# if username=='' or username==None: +# return {'return':1, 'error':'Username is not defined'} + + api_key=config.get('api_key','') +# if api_key=='' or api_key==None: +# return {'return':1, 'error': 'API key is not defined'} + + url=config.get('server_url') + if url=='' or url==None: + return {'return':1, 'error': 'cK API URL is not defined'} + + remote_server_user=config.get('server_user') + if remote_server_user==None: remote_server_user='' + + remote_server_password=config.get('server_pass') + if remote_server_password==None: remote_server_password='' + + remote_skip_certificate_validation=config.get('server_skip_validation') + if remote_skip_certificate_validation==None: remote_skip_certificate_validation='' + + # Prepare dict to send to remote server + ii={} + ii['action']=i.get('action','') + ii['dict']=i.get('dict',{}) + ii['ownership']=i.get('ownership',{}) + ii['username']=username + ii['api_key']=api_key + + # Prepare post variables + r=ck.dumps_json({'dict':ii, 'skip_indent':'yes'}) + if r['return']>0: return r + + s=r['string'] + if sys.version_info[0]>2: s=s.encode('utf8') + + # Check if skip SSL certificate + ctx=None + add_ctx=False + + if remote_skip_certificate_validation=='yes': + + import ssl + + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + + add_ctx=True + + # If auth + auth=None + add_auth=False + + if remote_server_user!='' and remote_server_user!=None: + if remote_server_password==None: remote_server_password='' + + auth = urllib2.HTTPPasswordMgrWithDefaultRealm() + auth.add_password(None, url, remote_server_user, remote_server_password) + + add_auth=True + + # Prepare handler (TBD: maybe there is another, more elegant way?) + if add_auth and add_ctx: + urllib2.install_opener(urllib2.build_opener(urllib2.HTTPBasicAuthHandler(auth), urllib2.HTTPSHandler(context=ctx))) + elif add_auth: + urllib2.install_opener(urllib2.build_opener(urllib2.HTTPBasicAuthHandler(auth))) + elif add_ctx: + urllib2.install_opener(urllib2.build_opener(urllib2.HTTPSHandler(context=ctx))) + + # Prepare request + request = urllib2.Request(url, s, {'Content-Type': 'application/json'}) + + # Connect + try: + f=urllib2.urlopen(request) + except Exception as e: + return {'return':1, 'error':'Access to the cK portal failed ('+format(e)+')'} + + # Read from Internet + try: + s=f.read() + f.close() + except Exception as e: + return {'return':1, 'error':'Failed reading stream from the cK portal ('+format(e)+')'} + + # Check output + try: s=s.decode('utf8') + except Exception as e: pass + + # Try to convert output to dictionary + r=ck.convert_json_str_to_dict({'str':s, 'skip_quote_replacement':'yes'}) + if r['return']>0: + return {'return':1, 'error':'can\'t parse output from the cK portal ('+r['error']+'):\n'+s[:256]+'\n\n...)'} + + d=r['dict'] + + if 'return' in d: d['return']=int(d['return']) # Fix for some strange behavior when 'return' is not integer - should check why ... + else: + d['return']=99 + d['error']='repsonse doesn\'t follow the cK standard' + + return d + +############################################################################## +# Low-level access to cK portal + +def access(i): + + """ + Input: { + (filename) [str] - load JSON from this file + or + (json) [str] - parse JSON string from command line (use ' instead of ") + or + (dict) [dict] - dictionary to send to the cK API + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + import json + + filename=i.get('filename','') + json_string=i.get('json','') + + display=i.get('display','') + + data=i.get('dict',{}) + + if filename=='' and json_string=='' and len(data)==0: + return {'return':1, 'error':'either "filename" or "json" or "dict" should define data to be pushed to cK API'} + + if filename!='': + r=ck.load_json_file({'json_file':filename}) + if r['return']>0: return r + + data2=r['dict'] + data.update(data2) + + if json_string!='': + json_string=json_string.replace("'", '"') + + data2=json.loads(json_string) + + data.update(data2) + if display=='': + display=False + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Prepare request + ii={'config':cfg} + ii.update(data) + + # Sending request to download + r=send(ii) + if r['return']>0: return r + + if display is True: + ck.out('Output:') + ck.out('') + + ck.out(json.dumps(r, indent=2)) + + return r + +############################################################################## +# Send JSON request to cK portal + +def download_file(i): + """ + Input: { + url [str] - full URL for a file to download + file [dict] - file to save + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + file_size - size of retreived file + } + """ + + url=i['url'] + fn=i['file'] + + # Import modules compatible with Python 2.x and 3.x + import urllib + + try: from urllib.request import urlretrieve + except: from urllib import urlretrieve + + # Connect + try: + urlretrieve(url, fn) + except Exception as e: + return {'return':1, 'error':'download failed ('+format(e)+')'} + + statinfo = os.stat(fn) + file_size=statinfo.st_size + + return {'return':0, 'file_size':file_size} diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/comm_min.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/comm_min.py new file mode 100644 index 0000000000..661b8e36b6 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/comm_min.py @@ -0,0 +1,87 @@ +# +# Minimal communication with the cK server +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +import json +import sys +import os + +############################################################################## +# Send JSON request to the cK portal (without CK) + +def send(i): + """ + Input: { + action [str] - remote API action name + url [str] - URL + dict [dict] - dict to send to remote server + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Import modules compatible with Python 2.x and 3.x + import urllib + + try: import urllib.request as urllib2 + except: import urllib2 + + try: from urllib.parse import urlencode + except: from urllib import urlencode + + url=i.get('url') + if url=='' or url==None: + return {'return':1, 'error': 'cK API URL is not defined'} + + # Prepare dict to send to remote server + ii={} + ii['action']=i.get('action','') + ii['dict']=i.get('dict',{}) + + # Prepare post variables + try: + if sys.version_info[0]>2: + s=json.dumps(ii, ensure_ascii=False).encode('utf8') + else: + s=json.dumps(ii, ensure_ascii=False, encoding='utf8') + except Exception as e: + return {'return':1, 'error':'problem converting dict to json ('+format(e)+')'} + + # Prepare request + request = urllib2.Request(url, s, {'Content-Type': 'application/json'}) + + # Connect + try: + f=urllib2.urlopen(request) + except Exception as e: + return {'return':1, 'error':'Access to the cK portal failed ('+format(e)+')'} + + # Read from Internet + try: + s=f.read() + f.close() + except Exception as e: + return {'return':1, 'error':'Failed reading stream from the cK portal ('+format(e)+')'} + + # Check output + try: s=s.decode('utf8') + except Exception as e: pass + + # Try to convert output to dictionary + try: + d=json.loads(s, encoding='utf8') + except Exception as e: + return {'return':1, 'error':'problem converting text to json ('+format(e)+')'} + + if 'return' in d: d['return']=int(d['return']) # Fix for some strange behavior when 'return' is not integer - should check why ... + else: + d['return']=99 + d['error']='repsonse doesn\'t follow the cK standard' + + return d + diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/config.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/config.py new file mode 100644 index 0000000000..23437715b7 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/config.py @@ -0,0 +1,250 @@ +# +# Global configuration +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +# CK entry to keep client configuration info +CK_CFG_REPO_UOA="local" +CK_CFG_DATA_UOA="cbench" +CK_CFG_MODULE_UID="b34231a3467566f8" # ck info module:cfg + +CK_CFG_MODULE_REPO_UOA="befd7892b0d469e9" # CK module UOA for REPO + +CR_DEFAULT_SERVER="https://cKnowledge.io" +CR_DEFAULT_SERVER_URL=CR_DEFAULT_SERVER+"/api/v1/?" +CR_DEFAULT_SERVER_USER="crowd-user" +CR_DEFAULT_SERVER_API_KEY="43fa84787ff65c2c00bf740e3853c90da8081680fe1025e8314e260888265033" + +PACK_SIZE_WARNING=5000000 + +CR_WORK_DIR='CR' +CR_SOLUTIONS_DIR='solutions' + +CR_MODULE_UOA='solution' + +PACK_FILE='pack.zip' + +CR_ENV_USERNAME='CR_USER' +CR_ENV_API_KEY='CR_KEY' + +CR_LINE='**************************************************************************' + +CR_SOLUTION_CK_COMPONENTS=[ + {'cid':'module:device', 'version':'1.0.0'}, + {'cid':'module:env', 'version':'1.1.0'}, + {'cid':'module:machine', 'version':'1.0.0'}, + {'cid':'module:misc', 'version':'1.0.0'}, + {'cid':'module:os', 'version':'1.0.0'}, + {'cid':'module:package', 'version':'1.2.0'}, + {'cid':'module:platform*', 'version':'1.0.0'}, + {'cid':'module:script', 'version':'1.0.0'}, + {'cid':'module:soft', 'version':'1.2.0'}, + {'cid':'module:docker', 'version':'1.0.0'}, + {'cid':'module:event', 'version':'1.0.0'}, + {'cid':'module:lib', 'version':'1.0.0'}, + {'cid':'module:program', 'version':'1.0.3'}, + {'cid':'module:result', 'version':'1.0.0'}, + {'cid':'module:solution', 'version':'1.0.0'}, + {'cid':'os:*', 'version':'1.0.0'}, + {'cid':'platform.init:*', 'version':'1.0.0'}, + {'cid':'script:download-and-install-package', 'version':'1.0.0'}, + {'cid':'soft:compiler.python', 'version':'1.0.0'}, + {'cid':'soft:tool.adb', 'version':'1.0.0'}, +] + +import ck.kernel as ck + +bootstrapping=False + +############################################################################## +# Load client configuration + +def load(i): + """ + Input: { + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + dict [dict] - configuration dictionary + path [str] - path to CK cfg entry + } + """ + + global bootstrapping + + import os + + # Get current configuration + cfg={ + 'server_url':CR_DEFAULT_SERVER_URL # Default + } + path='' + + ii={'action':'load', + 'repo_uoa':CK_CFG_REPO_UOA, + 'module_uoa':CK_CFG_MODULE_UID, + 'data_uoa':CK_CFG_DATA_UOA} + + r=ck.access(ii) + if (r['return']>0 and r['return']!=16): return r + + if r['return']==0: + cfg=r['dict'] + path=r['path'] + + if not bootstrapping and (r['return']==16 or cfg.get('bootstrapped','')!='yes'): + rx=update({'cfg':cfg}) + if rx['return']>0: return rx + + # Check overriding by env + v=os.environ.get(CR_ENV_USERNAME,'') + if v!='': cfg['username']=v + v=os.environ.get(CR_ENV_API_KEY,'') + if v!='': cfg['api_key']=v + + return {'return':0, 'dict':cfg, 'path':path} + +############################################################################## +# Update CK modules and configuration + +def update(i): + """ + Input: { + (force) [bool] - if True, force update + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + import os + + global bootstrapping + bootstrapping=True + + force=i.get('force') + cfg=i.get('cfg',{}) + + from . import obj + + title='Bootstrapping' + if cfg.get('bootstrapped','')=='yes': title='Updating' + + ck.out(title+' cBench to support portable actions and workflows:') + ck.out('') + + # Check release notes + server_url=cfg.get('server_url','') + if server_url=='': server_url='https://cKnowledge.io/api/v1/?' + + from . import comm_min + r=comm_min.send({'url':server_url, + 'action':'event', + 'dict':{'type':'get-cbench-bootstrap-notes'}}) + + notes=r.get('notes','') + if notes!='': + ck.out('***********************************************') + ck.out(notes) + ck.out('***********************************************') + + lst_all=[] + + sbf=os.environ.get('CB_SAVE_BOOTSTRAP_FILES','') + + if sbf=='': + fboot='cb-bootstrap-20200529' + files=[fboot+'.json'] + + if os.name=='nt': + files.append(fboot+'-win.json') + + for fn in files: + r=ck.gen_tmp_file({'prefix':'cb-bootstrap-', 'suffix':'.json'}) + if r['return']>0: return r + ftmp=r['file_name'] + + burl=CR_DEFAULT_SERVER+'/static/bootstrap/'+fn + + ck.out('Downloading '+burl) + + from . import comm + + rx=comm.download_file({'url':burl, 'file':ftmp}) + if rx['return']>0: return rx + + rx=ck.load_json_file({'json_file':ftmp}) + if rx['return']>0: return rx + + lst_all+=rx['dict'] + + os.remove(ftmp) + + r=obj.download({'components':lst_all, 'force':force}) + if r['return']>0 and r['return']!=8: return r + + else: + for x in CR_SOLUTION_CK_COMPONENTS: + r=obj.download({'cid':x['cid'], 'version':x.get('version',''), 'force':force}) + if r['return']>0: + if r['return']!=8: return r + else: ck.out(' Skipped - already exists!') + else: + lst_all+=r['components'] + + rx=ck.save_json_to_file({'json_file':sbf, 'dict':lst_all, 'sort_keys':'yes'}) + if rx['return']>0: return rx + + ck.out('') + + # Update cfg + cfg['bootstrapped']='yes' + + ii={'action':'update', + 'repo_uoa':CK_CFG_REPO_UOA, + 'module_uoa':CK_CFG_MODULE_UID, + 'data_uoa':CK_CFG_DATA_UOA, + 'dict':cfg, + 'sort_keys':'yes'} + + r=ck.access(ii) + + ck.out(title+' finished!') + ck.out('') + + return r + +############################################################################## +# Get path to work directory in a USER space + +def get_work_dir(i): + """ + Input: { + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + path [str] - path to work dir + } + """ + + import os + + # Get home user directory + from os.path import expanduser + home = expanduser("~") + + work_dir=os.path.join(home, CR_WORK_DIR) + if not os.path.isdir(work_dir): + os.makedirs(work_dir) + + return {'return':0, 'path':work_dir} diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/graph.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/graph.py new file mode 100644 index 0000000000..2a16ecead2 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/graph.py @@ -0,0 +1,359 @@ +# +# Support for graphs +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +from . import config +from . import comm +from . import obj + +import ck.kernel as ck + +import json +import os +import copy + +meta_template={ + "meta": { + "scenario": "universal", + "scenario_uid": "3bf7371412455a8f", + "viz_engine": "ck_beta" + }, + "tags": [ + "result" + ] + } + +desc_template={ + "data_config": { + "default_key_x": "x", + "default_key_y": "y", + "default_sort_key": "x", + "table_view": [ + { + "key": "x", + "name": "X", + "type": "int" + }, + { + "key": "y", + "name": "Y", + "format": "%.2f", + "type": "float" + }, + { + "key": "submitter", + "name": "Submitter" + } + ] + } +} + +extra_info_desc=[{'key':'copyright', 'name':'copyright (optional)'}, + {'key':'license', 'name':'license (optional)'}, + {'key':'author', 'name':'author (optional)'}, + {'key':'author_email', 'name':'author email (optional)'}, + {'key':'author_webpage', 'name':'author webpage (optional)'}] + +############################################################################## +# Initialize a graph on a portal + +def init(i): + + """ + Input: { + uid [str] - graph identifyer + (version) [str] - graph version + (desc_file) [str] - file with graph description + (tags) [str] - tags separated by comma + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + dict [dict] - configuration dictionary + path [str] - path to CK cfg entry + } + """ + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + # CID ########################################################### + uid=i['uid'] + if uid==None: uid='' + + version=i.get('version') + if version==None: version='' + + desc_file=i.get('desc_file','') + if desc_file==None: desc_file='' + + # If UID!='', check if already exists ... + found=False + meta=meta_template + path='' + data_name='' + tags=[] + meta_info='' + source='' + extra_info={} + + if uid!='': + r=ck.access({'action':'load', + 'module_uoa':'result', + 'data_uoa':uid}) + if r['return']>0: + if r['return']!=16: return r + else: + found=True + meta=r['dict'] + path=r['path'] + data_name=r['data_name'] + + tags=meta.get('tags',[]) + source=meta.get('source','') + meta_info=meta.get('meta',{}).get('info','') + + extra_info=r['info'].get('control',{}) + + # Check if init from scratch and no title + if i.get('name')!=None and i.get('name','')!='': + data_name=i['name'].strip() + elif not found or data_name=='': + r=ck.inp({'text':'Select a title for your graph: '}) + if r['return']>0: return r + + data_name=r['string'].strip() + + meta['meta']['title']=data_name + + # Check if init from scratch and no title +# if not found or meta_info=='': +# r=ck.inp({'text':'Enter general info about your graph: '}) +# if r['return']>0: return r + +# x=r['string'].strip() + +# if x=='': x=' ' + +# meta['meta']['info']=x + + # Adding tags + if i.get('tags')!=None and i.get('tags','')!='': + xtags=i['tags'].strip().split(',') + + for t in xtags: + t1=t.strip() + if t1!='' and t1 not in tags: + tags.append(t1) + + meta['tags']=tags + + elif not found or (len(tags)==1 and 'result' in tags): + r=ck.inp({'text':'Enter tags for your graph separated by commas: '}) + if r['return']>0: return r + + xtags=r['string'].strip().split(',') + + for t in xtags: + t1=t.strip() + if t1!='' and t1 not in tags: + tags.append(t1) + + meta['tags']=tags + + # Checking source +# if not found or source=='': +# r=ck.inp({'text':'Enter source of results for your graph (can be URL): '}) +# if r['return']>0: return r + +# source=r['string'].strip() + +# meta['source']=source + + # Checking authors +# for x in extra_info_desc: +# k=x['key'] +# n=x['name'] + +# if not found or extra_info.get(k,'')=='': +# r=ck.inp({'text':'Enter '+n+': '}) +# if r['return']>0: return r + +# s=r['string'].strip() + +# extra_info[k]=s + + # Creating/updating graph + a='add' + if found: a='update' + + ii={'action':a, + 'module_uoa':'result', + 'data_uoa':uid, + 'dict':meta, + 'sort_keys':'yes', + 'data_name':data_name, + 'substitute':'yes', + 'extra_info':extra_info} + + r=ck.access(ii) + if r['return']>0: return r + + data_uoa=r['data_uoa'] + data_uid=r['data_uid'] + path=r['path'] + + x='initialized' + if found: x='updated' + + ck.out('Graph was successfully '+x+':') + ck.out('') + ck.out(' CK UID: '+data_uid) + ck.out(' CK name: '+data_uoa) + ck.out(' CK path: '+path) + + # Add desc + p1=os.path.join(path, 'desc.json') + + dt=copy.deepcopy(desc_template) + if desc_file!='': + rx=ck.load_json_file({'json_file':desc_file}) + if rx['return']>0: return rx + dx=rx['dict'] + dt['data_config'].update(dx) + + if desc_file!='' or not os.path.isfile(p1): + rx=ck.save_json_to_file({'json_file':p1, 'dict':dt, 'sort_keys':'yes'}) + if rx['return']>0: return rx + + p2=os.path.join(path, '.cm', 'meta.json') + + ck.out('') + ck.out('You can continue updating graph using following files: ') + ck.out('') + ck.out(' Graph general meta info: '+p1) + ck.out(' See example at '+config.CR_DEFAULT_SERVER+'/result/sota-mlperf-inference-results-v0.5-open-available/?action=download&filename=.cm/meta.json') + ck.out('') + ck.out(' Graph axes info: '+p2) + ck.out(' See example at '+config.CR_DEFAULT_SERVER+'/result/sota-mlperf-inference-results-v0.5-open-available/?action=download&filename=desc.json') + + # Need to publish + ck.out('') + rx=ck.inp({'text':'Publish graph on the portal (Y/n)?'}) + if rx['return']>0: return rx + s=rx['string'].strip().lower() + + if s=='' or s=='y': + ck.out('') + r=obj.publish({'cid':'result:'+data_uoa, + 'version':version, + 'force':True}) + + else: + ck.out('') + ck.out('You can publish your graph on the portal using the following commands when ready: ') + ck.out('') + ck.out(' cb publish result:'+data_uoa+' --version=1.0.0 --force (--private)') + + return r + +############################################################################## +# Push result to a graph on a portal + +def push(i): + + """ + Input: { + uid [str] - graph identifyer + (version) [str] - graph version + (filename) [str] - JSON file with results + (json) [str] - JSON string from command line (use ' instead of ") + (point) [str] - specific point name to add/update + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + dict [dict] - configuration dictionary + path [str] - path to CK cfg entry + } + """ + + # CID ########################################################### + uid=i['uid'] + if uid=='': + return {'return':1, 'error':'graph UID is not defined!'} + + version=i.get('version') + if version==None: version='' + + filename=i.get('filename','') + json_string=i.get('json','') + + if filename=='' and json_string=='': + return {'return':1, 'error':'either "filename" or "json" should define results to be pushed'} + + point=i.get('point','') + + # Prepare data + data=[] + + if filename!='': + r=ck.load_json_file({'json_file':filename}) + if r['return']>0: return r + + data2=r['dict'] + if type(data2)==dict: + data2=[data2] + + data+=data2 + + if json_string!='': + import json + + json_string=json_string.replace("'", '"') + + data2=json.loads(json_string) + + if type(data2)==dict: + data2=[data2] + + data+=data2 + + # Send request + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Check if username and API_Key are empty and then use default crowd-user ... + username=cfg.get('username','') + if username=='' or username==None: + cfg['username']=config.CR_DEFAULT_SERVER_USER + cfg['api_key']=config.CR_DEFAULT_SERVER_API_KEY + + # Sending request to download + r=comm.send({'config':cfg, + 'action':'push_result', + 'dict':{ + 'data_uoa':uid, + 'version':version, + 'point':point, + 'data':data + } + }) + if r['return']>0: return r + url=r.get('url','') + + ck.out(' Successfully pushed your point to a graph!') + if url!='': + ck.out(' URL: '+url) + + return r diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/main.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/main.py new file mode 100644 index 0000000000..4249af83e2 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/main.py @@ -0,0 +1,602 @@ +# +# CMD parser +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +import click +import ck.kernel as ck + +############################################################################## +@click.group() +def cli(): + return 0 + +############################################################################## +def process_error(r): + + e=r.get('error','') + if e!='': + r['error']=e + + ck.err(r) + # Should not reach here since ck.err exits program + return + +# SETUP CLIENT ############################################################################# +@cli.command() + +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) +@click.option('-s', '--server_url', 'server_url', required=False) +@click.option('-su', '--server_user', 'server_user', required=False) +@click.option('-sp', '--server_pass', 'server_pass', required=False) +@click.option('-ss', '--server_skip_validation', 'server_skip_validation', required=False) + +def setup(username, + api_key, + server_url, + server_user, + server_pass, + server_skip_validation): + ''' + Setup client. + ''' + from . import setup + return setup.setup({'username':username, + 'api_key':api_key, + 'server_url':server_url, + 'server_user':server_user, + 'server_pass':server_pass, + 'server_skip_validation':server_skip_validation}) + +# LOGIN TEST ############################################################################# +@cli.command() + +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) +@click.option('-s', '--server_url', 'server_url', required=False) +@click.option('-su', '--server_user', 'server_user', required=False) +@click.option('-sp', '--server_pass', 'server_pass', required=False) +@click.option('-ss', '--server_skip_validation', 'server_skip_validation', required=False) + +def login(username, + api_key, + server_url, + server_user, + server_pass, + server_skip_validation): + ''' + Test login to the portal. + ''' + from . import setup + return setup.login({'username':username, + 'api_key':api_key, + 'server_url':server_url, + 'server_user':server_user, + 'server_pass':server_pass, + 'server_skip_validation':server_skip_validation}) + + return 0 + +# PUBLISH COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +@click.option('-t', '--tags', 'tags', required=False, default='') +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) +@click.option('--quiet', 'quiet', required=False, is_flag=True) +@click.option('--force', 'force', required=False, is_flag=True) +@click.option('--private', is_flag=True) +@click.option('-w', '--workspaces', 'workspaces', required=False) +@click.option('-v', '--version', 'version', required=False) +@click.option('--author', 'author', required=False) +@click.option('--author_id', 'author_id', required=False) +@click.option('--copyright', 'copyright', required=False) +@click.option('--license', 'license', required=False) +@click.option('--source', 'source', required=False) +@click.option('--permanent', is_flag=True) +@click.option('-et', '--extra_tags', 'extra_tags', required=False, default='') + +def publish(cid, + permanent, + tags, + extra_tags, + username, + api_key, + force, + quiet, + private, + workspaces, + version, + author, + author_id, + copyright, + license, + source): + ''' + Publish CK component to the portal. + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.publish({'cid':cid, + 'permanent':permanent, + 'tags':tags, + 'username':username, + 'api_key':api_key, + 'quiet':quiet, + 'force':force, + 'private':private, + 'workspaces':workspaces, + 'version':version, + 'author':author, + 'author_id':author_id, + 'copyright':copyright, + 'license':license, + 'source':source, + 'extra_tags':extra_tags}) + + if r['return']>0: process_error(r) + return 0 + +# Delete COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) + +def delete(cid, + username, + api_key): + ''' + Delete CK component from the portal if not permanent! + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.delete({'cid':cid, + 'username':username, + 'api_key':api_key}) + + if r['return']>0: process_error(r) + return 0 + +# LIST VERSIONS OF A GIVEN COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +def versions(cid): + ''' + List versions of a given component at the portal. + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.versions({'cid':cid}) + + if r['return']>0: process_error(r) + return 0 + +# OPEN PORTAL WITH A GIVEN COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +def open(cid): + ''' + Open portal web page with a given component + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.open_page({'cid':cid}) + + if r['return']>0: process_error(r) + return 0 + +# DOWNLOAD COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +@click.option('-v', '--version', 'version', required=False) +@click.option('-f', '--force', 'force', required=False, is_flag=True) +@click.option('-t', '--tags', 'tags', required=False, default='') +@click.option('-a', '--all', 'all', required=False, is_flag=True) + +def download(cid, + version, + force, + tags, + all): + ''' + Download CK component from the portal. + + CID: CK identifier {module UOA}:{data UOA}. + ''' + from . import obj + r=obj.download({'cid':cid, + 'version':version, + 'force':force, + 'tags':tags, + 'all':all}) + + if r['return']>0: process_error(r) + return 0 + +# BOOSTRAP ############################################################################# +@cli.command() + +@click.option('-f', '--force', 'force', required=False, is_flag=True) + +def update(force): + ''' + Update/bootstrap cK components. + ''' + + from . import config + r=config.update({'force':force}) + + if r['return']>0: process_error(r) + return 0 + +# INIT GRAPH ############################################################################# +@cli.command() + +@click.argument('uid', required=False) + +@click.option('-v', '--version', 'version', required=False) +@click.option('-d', '--desc_file', 'desc_file', required=False) +@click.option('-t', '--tags', 'tags', required=False) +@click.option('-n', '--name', 'name', required=False) + +def init_graph(uid, + version, + desc_file, + tags, + name): + ''' + Init graph at the portal. + + UID: portal graph identifier. + ''' + from . import graph + r=graph.init({'uid':uid, + 'version':version, + 'desc_file':desc_file, + 'tags':tags, + 'name':name}) + + if r['return']>0: process_error(r) + return 0 + +# PUSH RESULT ############################################################################# +@cli.command() + +@click.argument('uid', required=True) + +@click.option('-v', '--version', 'version', required=False, default='') +@click.option('-f', '--filename', 'filename', required=False, default='') +@click.option('-j', '--json', 'json_string', required=False, default='') +@click.option('-p', '--point', 'point', required=False, default='') + +def push_result(uid, + version, + filename, + json_string, + point): + ''' + Push result to a graph at the portal. + + UID: portal graph identifier. + ''' + + from . import graph + r=graph.push({'uid':uid, + 'version':version, + 'filename':filename, + 'json':json_string, + 'point':point}) + + if r['return']>0: process_error(r) + return 0 + +# ACCESS API ############################################################################# +@cli.command() + +@click.option('-f', '--filename', 'filename', required=False, default='') +@click.option('-j', '--json', 'json_string', required=False, default='') +@click.option('-m', '--mute', 'display', is_flag=True, default=True) + + +def access(filename, + json_string, + display): + ''' + Access Portal via JSON API. + ''' + from . import comm + r=comm.access({'filename':filename, + 'json':json_string, + 'display': display}) + + if r['return']>0: process_error(r) + return 0 + +# INIT SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid', required=False) + +@click.option('-u', '--username', 'username', required=False, default='') +@click.option('-a', '--api_key', 'api_key', required=False, default='') +@click.option('-n', '--name', 'name', required=False, default='') +@click.option('-t', '--tags', 'tags', required=False, default='') +@click.option('-pp', '--python_path', required=False, default='') +@click.option('-pv', '--python_version', required=False, default='') +@click.option('-pvf', '--python_version_from', required=False, default='') +@click.option('-pvt', '--python_version_to', required=False, default='') +@click.option('-pl', '--python_localenv', 'python_localenv', is_flag=True, default=True) +@click.option('-ho', '--host_os', 'host_os', required=False, default='') +@click.option('-to', '--target_os', 'target_os', required=False, default='') +@click.option('-di', '--device_id', 'device_id', required=False, default='') +@click.option('-h', '--hostname', 'hostname', required=False, default='') +@click.option('-w', '--workflow', 'workflow', required=False, default='') +@click.option('-wr', '--workflow_repo_url', 'workflow_repo_url', required=False, default='') +@click.option('-wcb', '--workflow_cmd_before', 'workflow_cmd_before', required=False, default='') +@click.option('-wca', '--workflow_cmd_after', 'workflow_cmd_after', required=False, default='') +@click.option('-wc', '--workflow_cmd', 'workflow_cmd', required=False, default='') +@click.option('-wce', '--workflow_cmd_extra', 'workflow_cmd_extra', required=False, default='') +@click.option('-wi', '--workflow_input', 'workflow_input', required=False, default='') # Input source (stream, webcam, etc) +@click.option('-wid', '--workflow_input_dir', 'workflow_input_dir', required=False, default='') # Input directory (will be cleaned) +@click.option('-wod', '--workflow_output_dir', 'workflow_output_dir', required=False, default='') # Output directory (will be cleaned) +@click.option('-d', '--desc_prereq', 'desc_prereq', required=False, default='') +@click.option('-dp', '--desc_prepare', 'desc_prepare', required=False, default='') +@click.option('-dr', '--desc_run', 'desc_run', required=False, default='') +@click.option('-s', '--add_extra_scripts', 'add_extra_scripts', required=False, default='') +@click.option('-e', '--add_extra_meta_from_file', 'add_extra_meta_from_file', required=False, default='') +@click.option('-rf', '--result_file', 'result_file', required=False, default='') +@click.option('--update_meta_and_stop', 'update_meta_and_stop', is_flag=True, default=False) +@click.option('--skip_graph_init', 'skip_graph_init', is_flag=True, default=False) +@click.option('-r', '--resume', 'resume', is_flag=True, default=False) +@click.option('-ss', '--skip_stop', 'skip_stop', is_flag=True, default=False) +@click.option('-g', '--graphs', 'graphs', required=False, default='') +@click.option('-dg', '--desc_graph', 'desc_graph', required=False, default='') +@click.option('-gc', '--graph_convertor', 'graph_convertor', required=False, default='') + +def init(uid, + username, + api_key, + name, + tags, + python_path, + python_version, + python_version_from, + python_version_to, + python_localenv, + host_os, + target_os, + device_id, + hostname, + workflow, + workflow_repo_url, + workflow_cmd_before, + workflow_cmd_after, + workflow_cmd, + workflow_cmd_extra, + workflow_input, + workflow_input_dir, + workflow_output_dir, + desc_prereq, + desc_prepare, + desc_run, + add_extra_scripts, + add_extra_meta_from_file, + result_file, + update_meta_and_stop, + skip_graph_init, + resume, + skip_stop, + graphs, + desc_graph, + graph_convertor): + ''' + Init portable solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.init({'uid':uid, + 'username':username, + 'api_key':api_key, + 'name':name, + 'tags':tags, + 'python_path':python_path, + 'python_version':python_version, + 'python_version_from':python_version_from, + 'python_version_to':python_version_to, + 'python_localenv':python_localenv, + 'host_os':host_os, + 'target_os':target_os, + 'device_id':device_id, + 'hostname':hostname, + 'workflow_repo_url':workflow_repo_url, + 'workflow':workflow, + 'workflow_cmd_before':workflow_cmd_before, + 'workflow_cmd_after':workflow_cmd_after, + 'workflow_cmd':workflow_cmd, + 'workflow_cmd_extra':workflow_cmd_extra, + 'workflow_input':workflow_input, + 'workflow_input_dir':workflow_input_dir, + 'workflow_output_dir':workflow_output_dir, + 'desc_prereq':desc_prereq, + 'desc_prepare':desc_prepare, + 'desc_run':desc_run, + 'add_extra_meta_from_file':add_extra_meta_from_file, + 'result_file':result_file, + 'add_extra_scripts':add_extra_scripts, + 'update_meta_and_stop':update_meta_and_stop, + 'skip_graph_init':skip_graph_init, + 'resume':resume, + 'skip_stop':skip_stop, + 'graphs':graphs, + 'desc_graph':desc_graph, + 'graph_convertor':graph_convertor}) + + if r['return']>0: process_error(r) + return 0 + +# ACTIVATE SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +def activate(uid): + ''' + Activate virtual environment from the prepared solution. + + UID - solution identifier. + ''' + from . import solution + r=solution.activate({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# RUN SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +@click.option('-c', '--cmd', 'cmd', required=False, default='') + +def benchmark(uid, + cmd): + ''' + Benchmark solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.benchmark({'uid':uid, + 'cmd':cmd}) + + if r['return']>0: process_error(r) + return 0 + +# RUN SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +@click.option('-c', '--cmd', 'cmd', required=False, default='') + +def run(uid, + cmd): + ''' + Run portable solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.run({'uid':uid, + 'cmd':cmd}) + + if r['return']>0: process_error(r) + return 0 + +# LIST SOLUTIONS ############################################################################# +@cli.command() + +@click.argument('uid', required=False) + +def ls(uid): + ''' + List portable solutions. + + UID: solution identifier (can use wildcards).. + ''' + from . import solution + r=solution.ls({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# FIND SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +def find(uid): + ''' + Find portable solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.find({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# DELETE SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +def rm(uid): + ''' + Delete portable solution. + + UID: solution identifier (can use wildcards). + ''' + from . import solution + r=solution.rm({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# START SERVICE TO COMMUNICATE WITH THE PORTAL ############################################################################# +@cli.command() + +@click.option('-h', '--host', 'host', required=False) +@click.option('-p', '--port', 'port', required=False) +@click.option('-t', '--tunnel', 'tunnel', required=False) + + +def start(host, + port, + tunnel): + ''' + Start server. + ''' + + from . import client + return client.start({'host':host, + 'port':port, + 'tunnel':tunnel}) + +# START SERVICE TO COMMUNICATE WITH PORTAL ############################################################################# +@cli.command() + +def version(): + ''' + Show client version. + ''' + + from . import __version__ + + print (__version__) + + return 0 + +############################################################################## +if __name__ == "__main__": + cli() diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/obj.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/obj.py new file mode 100644 index 0000000000..873831fd7c --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/obj.py @@ -0,0 +1,840 @@ +# +# Support for components +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +from . import config +from . import comm + +import ck.kernel as ck + +import json +import zipfile +import os +import time + +skip_words_in_files=[ + 'tmp', + '.git', + '.pyc', + '__pycache__', + '.cache' +] + + +############################################################################## +# Delete CK component from the portal if not permanent + +def delete(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current directory (since will be changing it to get info about Git repo) + cur_dir=os.getcwd() + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Check commands + # Username ########################################################## + username=cfg.get('username','') + if i.get('username')!=None: username=i['username'] + + if username=='' or username==None: + return {'return':1, 'error':'Username is not defined'} + + cfg['username']=username + + # API key ########################################################### + api_key=cfg.get('api_key','') + + if i.get('api_key')!=None: api_key=i['api_key'] + + if api_key=='' or api_key==None: + return {'return':1, 'error':'API key is not defined'} + + cfg['api_key']=api_key + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + + # Sending request to download + r=comm.send({'config':cfg, + 'action':'delete', + 'dict':{ + 'cid':cid + } + }) + if r['return']>0: return r + + ck.out(' Successfully deleted component(s) from the portal!') + + return {'return':0} + +############################################################################## +# Publish CK component to the portal + +def publish(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + (can use wildcards) + (tags) [str] - search multiple CK components by these tags separated by comma + (version) [str] - assign version + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current directory (since will be changing it to get info about Git repo) + cur_dir=os.getcwd() + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Check commands + # Username ########################################################## + username=cfg.get('username','') + if i.get('username')!=None: username=i['username'] + + if username=='' or username==None: + return {'return':1, 'error':'Username is not defined'} + + cfg['username']=username + + # API key ########################################################### + api_key=cfg.get('api_key','') + + if i.get('api_key')!=None: api_key=i['api_key'] + + if api_key=='' or api_key==None: + return {'return':1, 'error':'API key is not defined'} + + cfg['api_key']=api_key + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + tags=i.get('tags','') + + # Check if no module and use "solution" by default + if cid.find(':')<0: + cid='solution:'+cid + + # Version ########################################################### + version=i.get('version') + if version=='' or version==None: + version='1.0.0' + ck.out('Since --version is not defined, we use "1.0.0"') + + # Extra info about authors + author=i.get('author','') + if author==None: author='' + + author_id=i.get('author_id','') + if author_id==None: author_id='' + + copyright=i.get('copyright','') + if copyright==None: copyright='' + + license=i.get('license','') + if license==None: license='' + + source=i.get('source','') + if source==None: source='' + + sextra_tags=i.get('extra_tags','') + if sextra_tags==None: sextra_tags='' + + quiet=i.get('quiet',False) + force=i.get('force',False) + permanent=i.get('permanent',False) + + # List CK components + r=ck.access({'action':'search', + 'cid':cid, + 'tags':tags, + 'add_info':'yes', + 'add_meta':'yes', + 'common_func':'yes'}) + if r['return']>0: return r + + lst=r['lst'] + llst=len(lst) + + if llst==0: + ck.out('No CK objects found') + + num=0 + + # Sort lst by modules and then data + lst1=sorted(lst, key=lambda x: (x.get('repo_uoa',''), x.get('module_uoa',''), x.get('data_uoa',''))) + + for obj in lst1: + num+=1 + + # Basic info about CK object + repo_uoa=obj['repo_uoa'] + repo_uid=obj['repo_uid'] + + module_uoa=obj['module_uoa'] + module_uid=obj['module_uid'] + + data_uoa=obj['data_uoa'] + data_uid=obj['data_uid'] + + # Print info + ck.out(str(num)+' out of '+str(llst)+') '+repo_uoa+':'+module_uoa+':'+data_uoa) + + # Check name and date + data_name=obj.get('info',{}).get('data_name','') + if data_name==data_uoa: data_name='' + + data_meta=obj['meta'] + if data_name=='': + if data_meta.get('misc',{}).get('title','')!='': + data_name=data_meta['misc']['title'] + + data_date='' + if data_meta.get('misc',{}).get('date','')!='': + data_date=data_meta['misc']['date'] + + source2=data_meta.get('source','') + if source2=='': source2=source + + license2=data_meta.get('license','') + if license2=='': license2=license + + copyright2=data_meta.get('copyright','') + if copyright2=='': copyright2=copyright + + # Specialize per specific modules + not_digital_component=False + extra_dict={} + extra_tags=[] + + if module_uoa=='module': + extra_dict['last_module_actions']=[] + actions=data_meta.get('actions',{}) + for a in actions: + extra_dict['last_module_actions'].append(a+' '+data_uoa) + + elif module_uoa=='lib': + not_digital_component=True + extra_tags=['library'] + + if 'reproduced-papers' in data_meta.get('tags',[]): + extra_tags.append('reproduced-papers') + + data_meta2=data_meta.get('meta',{}) + + if data_name=='': + data_name=data_meta2.get('title','') + + all_authors=data_meta2.get('authors','') + if all_authors!='': + extra_dict['all_authors']=[] + for aa in all_authors.split(','): + if aa!='': aa=aa.strip() + if aa!='': + extra_dict['all_authors'].append(aa) + + for k in ['badge_acm_artifact_available', 'badge_acm_artifact_functional', + 'badge_acm_artifact_reusable', 'badge_acm_results_replicated', + 'badge_acm_results_reproduced']: + if data_meta2.get(k,'')=='yes': + extra_tags.append(k) + + elif module_uoa=='event' or module_uoa=='repo': + not_digital_component=True + + # Get info of the first creation + first_creation=obj['info'].get('control',{}) + + # Load info about repo + repo_dict={} + + if not force and repo_uoa=='local' and module_uoa!='repo': # Normally skip everything from local unless we publish repos themselves + ck.out(' SKIPPED') + continue + + if module_uoa=='repo': + if not force and data_uoa=='local': + ck.out(' SKIPPED') + continue + + repo_dict=obj['meta'] + + elif repo_uoa!='default' and repo_uoa!='local': + r=ck.access({'action':'load', + 'repo_uoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_REPO_UOA, + 'data_uoa':repo_uid, + 'common_func':'yes'}) + if r['return']>0: return r + repo_dict=r['dict'] + if 'path' in repo_dict: + del(repo_dict['path']) + + # Generate temp file to pack + r=ck.gen_tmp_file({'prefix':'obj-', 'suffix':'.zip'}) + if r['return']>0: return r + + fn=r['file_name'] + + # Pack component + p=obj['path'] + + zip_method=zipfile.ZIP_DEFLATED + + ii={'path':p, 'all':'yes'} + + # Prune files for solution + if module_uoa=='solution': + ii['ignore_names']=['CK','venv'] + + r=ck.list_all_files(ii) + if r['return']>0: return r + + fl=r['list'] + + # Write archive + try: + f=open(fn, 'wb') + z=zipfile.ZipFile(f, 'w', zip_method) + for fx in fl: + add=True + for k in skip_words_in_files: + if k in fx: + add=False + break + + if add: + p1=os.path.join(p, fx) + z.write(p1, fx, zip_method) + z.close() + f.close() + + except Exception as e: + return {'return':1, 'error':'failed to prepare archive ('+format(e)+')'} + + # Check size + statinfo = os.stat(fn) + pack_size=statinfo.st_size + + # Check problems with repository or components + x='' + if repo_dict.get('remote','')=='yes': + x+='remote repo;' + if repo_dict.get('private','')=='yes': + x+='private repo;' + if repo_dict.get('url','')=='' and repo_uoa!='default': + x+='repo not shared;' + if pack_size>config.PACK_SIZE_WARNING: + x+='pack size ('+str(pack_size)+') > '+str(config.PACK_SIZE_WARNING)+';' + + skip_component=False + if not force and x!='': + if quiet: + skip_component=True + else: + r=ck.inp({'text':' This component has potential issues ('+x+'). Skip processing (Y/n)? '}) + if r['return']>0: return r + s=r['string'].strip() + if s=='' or s=='Y' or s=='y': + skip_component=True + + if skip_component: + ck.out(' SKIPPED ('+x+')') + + if os.path.isfile(fn): + os.remove(fn) + + continue + + # Convert to MIME to send over internet + r=ck.convert_file_to_upload_string({'filename':fn}) + if r['return']>0: return r + + pack64=r['file_content_base64'] + + if os.path.isfile(fn): + os.remove(fn) + + # Check workspaces + lworkspaces=[] + workspaces=i.get('workspaces','') + if workspaces!=None: + lworkspaces=workspaces.strip().split(',') + + # Get extra info about repo + os.chdir(p) + + repo_info={} + + if repo_dict.get('private','')!='yes': + repo_info={'publish_repo_uoa':repo_uoa, + 'publish_repo_uid':repo_uid} + + # Get current Git URL + r=ck.run_and_get_stdout({'cmd':['git','config','--get','remote.origin.url']}) + if r['return']==0 and r['return_code']==0: + x=r['stdout'].strip() + if x!='': repo_info['remote_git_url']=x + + # Get current Git branch + r=ck.run_and_get_stdout({'cmd':['git','rev-parse','--abbrev-ref','HEAD']}) + if r['return']==0 and r['return_code']==0: + x=r['stdout'].strip() + if x!='': repo_info['remote_git_branch']=x + + # Get current Git checkout + r=ck.run_and_get_stdout({'cmd':['git','rev-parse','--short','HEAD']}) + if r['return']==0 and r['return_code']==0: + x=r['stdout'].strip() + if x!='': repo_info['remote_git_checkout']=x + + repo_info['dict']=repo_dict + + # Add extra tags + for et in sextra_tags.split(','): + et=et.strip().lower() + if et!='': + extra_tags.append(et) + + #TBD: owner, version, info about repo + # Sending request + r=comm.send({'config':cfg, + 'action':'publish', + 'ownership':{ + 'private':i.get('private', False), + 'workspaces':lworkspaces + }, + 'dict':{ + 'publish_module_uoa':module_uoa, + 'publish_module_uid':module_uid, + 'publish_data_uoa':data_uoa, + 'publish_data_uid':data_uid, + 'publish_data_name':data_name, + 'publish_data_date':data_date, + 'publish_pack':pack64, + 'publish_pack_size':pack_size, + 'repo_info':repo_info, + 'first_creation':first_creation, + 'version':version, + 'author':author, + 'author_id':author_id, + 'copyright':copyright2, + 'license':license2, + 'source':source2, + 'not_digital_component':not_digital_component, + 'extra_dict':extra_dict, + 'extra_tags':extra_tags, + 'permanent':permanent + } + }) + if r['return']>0: + ck.out(' WARNING: Portal API returned error: '+r['error']) + else: + data_uid=r['data_uid'] + ck.out(' cK component ID: '+data_uid) + purl=r.get('url','') + if purl!='': + ck.out(' cK component URL: '+purl) + + os.chdir(cur_dir) + + return {'return':0} + +############################################################################## +# List versions of a given CK component at the portal + +def versions(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + # Parse CID + r=ck.parse_cid({'cid':cid}) + if r['return']>0: return r + + data_uoa=r.get('data_uoa','') + module_uoa=r.get('module_uoa','') + + # Call Portal API + r=comm.send({'config':cfg, + 'action':'list_versions', + 'dict':{ + 'module_uoa':module_uoa, + 'data_uoa':data_uoa + } + }) + if r['return']>0: return r + + versions=r.get('versions',[]) + for v in versions: + vv=v.get('version','') + dt=v.get('iso_datetime','').replace('T',' ') + + ck.out(vv+' ('+dt+')') + + return r + +############################################################################## +# Open portal with a given CK component + +def open_page(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # URL + url=cfg.get('server_url','') + if url!='': + h=url.find('api/') + if h>0: + url=url[:h] + else: + url='' + + if url=='': + url=config.CR_DEFAULT_SERVER + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + # Parse CID + r=ck.parse_cid({'cid':cid}) + if r['return']>0: return r + + data_uoa=r.get('data_uoa','') + module_uoa=r.get('module_uoa','') + + # Form URL + url+='c/'+module_uoa+'/'+data_uoa + + ck.out('Opening web page '+url+' ...') + + import webbrowser + webbrowser.open(url) + + return {'return':0} + +############################################################################## +# Download CK component from the portal to the local repository + +def download(i): + + """ + Input: { + components - pre-loaded components from bootstrapping + or + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + (can use wildcards) + + + (version) [str] - assign version + (force) [bool] - if True, force download even if components already exists + + (tags) [str] - can search by tags (usually soft/package) + + (all) [bool] - if True, download dependencies (without force!) + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + sbf=os.environ.get('CB_SAVE_BOOTSTRAP_FILES','') + + force=i.get('force') + al=i.get('all') + + skip_module_check=i.get('skip_module_check',False) + + tags=i.get('tags','') + + spaces=i.get('spaces','') + + lst=i.get('components',[]) + + rr={'return':0} + + if len(lst)>0: + preloaded=True + msg='Processing' + msg2='processed' + skip_module_check=True + + repo_uoa='local' + + ck.cfg['check_missing_modules']='no' # Important not to check missing modules! + else: + preloaded=False + msg='Downloading' + msg2='downloaded' + + # CID ########################################################### + cid=i.get('cid') + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + version=i.get('version') + if version==None: version='' + + # Parse CID + r=ck.parse_cid({'cid':cid}) + if r['return']>0: return r + + repo_uoa=r.get('repo_uoa','') + data_uoa=r.get('data_uoa','') + module_uoa=r.get('module_uoa','') + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Sending request to download + rr=comm.send({'config':cfg, + 'action':'download', + 'dict':{ + 'module_uoa':module_uoa, + 'data_uoa':data_uoa, + 'version':version, + 'tags':tags + } + }) + if rr['return']>0: + return rr + + lst=rr['components'] + + for l in lst: + + furl=l['file_url'] + fsize=l['file_size'] + + fmd5=l['file_md5'] + + muoa=l['module_uoa'] + muid=l['module_uid'] + + duoa=l['data_uoa'] + duid=l['data_uid'] + + dependencies=l.get('dependencies',[]) + + xcid=muoa+':'+duoa + + ck.out('* '+msg+' CK component "'+xcid+'" ('+str(fsize)+' bytes)') + + # Check if module exists + if not skip_module_check: + r=ck.access({'action':'find', + 'module_uoa':'module', + 'data_uoa':muoa, + 'common_func':'yes'}) + if r['return']>0: + if r['return']!=16: return r + + x='module:'+muoa + if repo_uoa!='': x=repo_uoa+':'+x + +# FGG: we should not add "version" for dependencies or related components since it's not the same! +# r=download({'cid':x, 'force':force, 'version':version, 'skip_module_check':True, 'all':al}) + + r=download({'cid':x, 'force':force, 'skip_module_check':smc, 'all':al}) + if r['return']>0: return r + + # Check if entry already exists + path='' + r=ck.access({'action':'find', + 'common_func':'yes', + 'repo_uoa':repo_uoa, +# 'module_uoa':muid, + 'module_uoa':muoa, + 'data_uoa':duoa}) + if r['return']==0: + if not force: + return {'return':8, 'error':' Already exists locally ("'+xcid+'")'} + else: + if r['return']!=16: return r + + r=ck.access({'action':'add', + 'common_func':'yes', + 'repo_uoa':repo_uoa, +# 'module_uoa':muid, + 'module_uoa':muoa, + 'data_uoa':duoa, + 'data_uid':duid, + 'ignore_update':'yes'}) + if r['return']>0: return r + + path=r['path'] + + # Prepare pack + ppz=os.path.join(path, config.PACK_FILE) + + if os.path.isfile(ppz): +# if not force: +# return {'return':1, 'error':'pack file already exists ('+ppz+')'} + os.remove(ppz) + + # Download and save pack to file + tstart=time.time() + fpack64=l.get('file_base64','') + + if fpack64!='': + rx=ck.convert_upload_string_to_file({'file_content_base64':fpack64, 'filename':ppz}) + if rx['return']>0: return rx + else: + rx=comm.download_file({'url':furl, 'file':ppz}) + if rx['return']>0: return rx + + # Save boostrap info (debug) + if sbf!='': + rx=ck.convert_file_to_upload_string({'filename':ppz}) + if rx['return']>0: return rx + l['file_base64']=rx['file_content_base64'] + + # MD5 of the pack + rx=ck.load_text_file({'text_file':ppz, 'keep_as_bin':'yes'}) + if rx['return']>0: return rx + bpack=rx['bin'] + + import hashlib + md5=hashlib.md5(bpack).hexdigest() + + if md5!=fmd5: + return {'return':1, 'error':'MD5 of the newly created pack ('+md5+') did not match the one from the portal ('+fmd5+')'} + + # Unpack to src subdirectory + import zipfile + + f=open(ppz,'rb') + z=zipfile.ZipFile(f) + for d in z.namelist(): + if d!='.' and d!='..' and not d.startswith('/') and not d.startswith('\\'): + pp=os.path.join(path,d) + if d.endswith('/'): + # create directory + if not os.path.exists(pp): os.makedirs(pp) + else: + ppd=os.path.dirname(pp) + if not os.path.exists(ppd): os.makedirs(ppd) + + # extract file + fo=open(pp, 'wb') + fo.write(z.read(d)) + fo.close() + + if pp.endswith('.sh') or pp.endswith('.bash'): + import stat + st=os.stat(pp) + os.chmod(pp, st.st_mode | stat.S_IEXEC) + + f.close() + + tstop=time.time()-tstart + + # Remove pack file + os.remove(ppz) + + # Note + if not preloaded: + ck.out(spaces+' Successfully '+msg2+' ('+('%.2f' % tstop)+' sec)!') # to '+path) + + # Check deps + if al: + if len(dependencies)>0: + ck.out(spaces+' Checking dependencies ...') + + for dep in dependencies: + muoa=dep.get('module_uid','') + duoa=dep.get('data_uid','') + + tags=dep.get('tags',[]) + xtags='' + if len(tags)>0: + xtags=','.join(tags) + muoa='package' + duoa='' + + cid=muoa+':'+duoa + rx=download({'cid':cid, + 'all':al, + 'tags':xtags, + 'spaces':spaces+' '}) + if rx['return']>0 and rx['return']!=8 and rx['return']!=16: return rx + if rx['return']==16: + if xtags=='': return rx + rx=download({'cid':'soft:', + 'all':al, + 'tags':xtags, + 'spaces':spaces+' '}) + if rx['return']>0 and rx['return']!=8: return rx + + return rr diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/setup.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/setup.py new file mode 100644 index 0000000000..7625c1e5fc --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/setup.py @@ -0,0 +1,190 @@ +# +# Setup client +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +from . import config +from . import comm + +import ck.kernel as ck + +import json + +############################################################################## +# Setup cBench + +def setup(i): + + """ + Input: { + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + + # Get current configuration + cfg={} + + ii={'action':'load', + 'repo_uoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_UID, + 'data_uoa':config.CK_CFG_DATA_UOA} + + r=ck.access(ii) + if (r['return']>0 and r['return']!=16): ck.err(r) + + if r['return']==0: cfg=r['dict'] + + # Check commands + + # Username ########################################################## + username=cfg.get('username','') + + if i.get('username')!=None: username=i['username'] + + if username=='' or username==None: + r=ck.inp({'text':'Enter cK username: '}) + if r['return']>0: ck.err(r) + + username=r['string'].strip() + + if username==None: username='' + + cfg['username']=username + + # API key ########################################################### + api_key=cfg.get('api_key','') + + if i.get('api_key')!=None: api_key=i['api_key'] + + if api_key=='' or api_key==None: + r=ck.inp({'text':'Enter your cK API key: '}) + if r['return']>0: ck.err(r) + + api_key=r['string'].strip() + + if api_key==None: api_key='' + + cfg['api_key']=api_key + + # Server URL ########################################################### + server_url=cfg.get('server_url','') + + if i.get('server_url')!=None and i.get('server_url')!='': server_url=i['server_url'] + + if server_url==None or server_url=='': server_url=config.CR_DEFAULT_SERVER_URL + + cfg['server_url']=server_url + + # Server User ########################################################### + server_user=cfg.get('server_user','') + + if i.get('server_user')!=None and i.get('server_user')!='': server_user=i['server_user'] + + if server_user!=None and server_user!='': cfg['server_user']=server_user + + # Server Pass ########################################################### + server_pass=cfg.get('server_pass','') + + if i.get('server_pass')!=None and i.get('server_pass')!='': server_pass=i['server_pass'] + + if server_pass!=None and server_pass!='': cfg['server_pass']=server_pass + + # Server Skip Certificate Validation ########################################################### + server_skip_validation=cfg.get('server_skip_validation','') + + if i.get('server_skip_validation')!=None and i.get('server_skip_validation')!='': server_skip_validation=i['server_skip_validation'] + + if server_skip_validation=='yes': cfg['server_skip_validation']=server_skip_validation + + # Save configuration + r=ck.access({'action':'update', + 'repo_uyoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_UID, + 'data_uoa':config.CK_CFG_DATA_UOA, + 'dict':cfg, + 'sort_keys':'yes'}) + if r['return']>0: ck.err(r) + + # Print (new/updated) configuration + ck.out('') + ck.out('Current cBench configuration:') + + ck.out('') + ck.out(json.dumps(cfg, indent=2, sort_keys=True)) + + return 0 + +######################################################################################## +# Test login to the cK portal + +def login(i): + + """ + Input: { + (username) [str] + (api_key) [str] + (server_url) [str] + (server_user) [str] + (server_pass) [str] + (server_skip_validation) [str] + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current configuration + cfg={} + + ii={'action':'load', + 'repo_uoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_UID, + 'data_uoa':config.CK_CFG_DATA_UOA} + + r=ck.access(ii) + if (r['return']>0 and r['return']!=16): ck.err(r) + + # If not found, setup client + if r['return']==16: + setup(i) + + # Load again + cfg={} + +# ii={'action':'load', +# 'repo_uoa':config.CK_CFG_REPO_UOA, +# 'module_uoa':config.CK_CFG_MODULE_UID, +# 'data_uoa':config.CK_CFG_DATA_UOA} +# +# r=ck.access(ii) +# if r['return']>0: ck.err(r) + + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + + # Update cfg + for k in ['username', 'api_key', 'server_url', 'server_user', 'server_pass', 'server_skip_validation']: + v=i.get(k,'') + if v==None: v='' + if v!='': cfg[k]=v + + # Sending request to test connection + r=comm.send({'config':cfg, + 'action':'login' + }) + if r['return']>0: ck.err(r) + + # Success + ck.out('cK login tested successfully!') + + return 0 diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/solution.py b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/solution.py new file mode 100644 index 0000000000..4846cecb98 --- /dev/null +++ b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/solution.py @@ -0,0 +1,1865 @@ +# +# Support for portable solutions +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +from . import config +from . import comm +from . import obj +from . import graph + +import ck.kernel as ck + +import json +import zipfile +import os +import locale + +############################################################################ +# Get some parameters of a local platform + +def get_platform_desc(i): + + # Get platform info + # Check host/target OS/CPU + hos=i.get('host_os','') + tos=i.get('target_os','') + tdid=i.get('device_id','') + + # Get some info about platforms + ii={'action':'detect', + 'module_uoa':'platform.os', + 'host_os':hos, + 'target_os':tos, + 'device_id':tdid} + + if i.get('skip_info_collection','')!='': + ii['skip_info_collection']=i['skip_info_collection'] + + r=ck.access(ii) + if r['return']>0: return r + + hosd=r['host_os_dict'] + host_os_name=hosd.get('ck_name3','') + if host_os_name=='': + host_os_name=hosd.get('ck_name2','') + if host_os_name=='win': host_os_name='windows' + + if host_os_name=='': + return {'return':1, 'error':'your CK OS component is outdated! Try "ck pull repo:ck-env"'} + + # Extra info + host_desc={} + + if host_os_name=='windows': + host_desc['extra_cmd']='call ' + host_desc['venv_bin']='Scripts' + host_desc['venv_activate']='activate.bat' + host_desc['python_bin']='python.exe' + host_desc['activate_cmd']='cmd' + else: + host_desc['extra_cmd']='' + host_desc['venv_bin']='bin' + host_desc['venv_activate']='activate' + host_desc['python_bin']='python' + host_desc['activate_cmd']='bash' + + r['host_desc']=host_desc + + return r + +############################################################################ +# Initialize solution (portable workflow) +# Try to download existing one from the platform +# If doesn't exist, initialize the new one locally + +def init(i): + """ + Input: { + uid [str] - platform identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Save current directory + cur_dir=os.getcwd() + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + # Get platform info + ck.out(config.CR_LINE) + ck.out('Detecting minimal platform info ...') + + i['skip_info_collection']='yes' + rplat=get_platform_desc(i) # Pass input from init + if rplat['return']>0: return rplat + + hos=rplat['host_os_uid'] + hosx=rplat['host_os_uoa'] + hosd=rplat['host_os_dict'] + hosd_extra=rplat['host_desc'] + + hplat=hosd['ck_name'] + + tos=rplat['os_uid'] + tosx=rplat['os_uoa'] + tosd=rplat['os_dict'] + + tdid=rplat.get('device_id','') + + resume=i.get('resume') + if resume==None: resume=False + + # Get solution UID + uid=i['uid'] + if uid==None: + r=ck.gen_uid({}) + if r['return']>0: return r + uid=r['data_uid'] + + # Check if entry already exists + ii={'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid} + r=ck.access(ii) + if r['return']==0: + p=r['path'] + px=os.path.join(p, '.cm', 'meta.json') + + dd=r['dict'] + + ck.out(config.CR_LINE) + ck.out("Preloaded solution meta from "+px) + else: + if r['return']!=16: return r + + ck.out(config.CR_LINE) + r=ck.out('Solution "'+uid+'" is not found locally. Attempting to download from the portal ...') + + dd = {} + + r=obj.download({'cid':'local:solution:'+uid}) + if r['return']>0: + if r['return']!=16: return r + + ck.out('') + r=ck.inp({'text':'Warning: solution was not found on the portal. Do you want to initialize the new one (Y/n): '}) + if r['return']>0: return r + + x=r['string'].strip() + if x=='n' or x=='N': + return {'return':16, 'error':'Solution was not found on the portal'} + + else: + ii={'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid} + r=ck.access(ii) + if r['return']>0: return r + + p=r['path'] + px=os.path.join(p, '.cm', 'meta.json') + + dd=r['dict'] + + ck.out(config.CR_LINE) + ck.out("Preloaded solution meta from "+px) + + # Get extra vars + workflow=i.get('workflow','') + if workflow=='': workflow=dd.get('workflow','') + + workflow_repo_url=i.get('workflow_repo_url','') + if workflow_repo_url=='': workflow_repo_url=dd.get('workflow_repo_url','') + + workflow_cmd=i.get('workflow_cmd','') + if workflow_cmd=='': workflow_cmd=dd.get('workflow_cmd','') + + workflow_cmd_before=i.get('workflow_cmd_before','') + if workflow_cmd_before=='': workflow_cmd_before=dd.get('workflow_cmd_before','') + + workflow_cmd_after=i.get('workflow_cmd_after','') + if workflow_cmd_after=='': workflow_cmd_after=dd.get('workflow_cmd_after','') + + workflow_cmd_extra=i.get('workflow_cmd_extra','') + if workflow_cmd_extra=='': workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input=i.get('workflow_input','') + if workflow_input=='': workflow_input=dd.get('workflow_input','') + + workflow_input_dir=i.get('workflow_input_dir','') + if workflow_input_dir=='': workflow_input_dir=dd.get('workflow_input_dir','') + + workflow_output_dir=i.get('workflow_output_dir','') + if workflow_output_dir=='': workflow_output_dir=dd.get('workflow_output_dir','') + + python_version=i.get('python_version','') + # if python_version!='': + # i['python_version_from']=python_version + # i['python_version_to']=python_version + + python_version_from=i.get('python_version_from','') + if python_version_from=='': python_version_from=dd.get('python_version_from','') + if python_version_from==' ': python_version_from='' + + python_version_to=i.get('python_version_to','') + if python_version_to=='': python_version_to=dd.get('python_version_to','') + if python_version_to==' ': python_version_to='' + + # Check graphs + graphs=i.get('graphs','') + if graphs=='': + graphs=dd.get('graphs',[]) + else: + graphs=graphs.split(',') + i['graphs']=graphs + + tos=i.get('target_os','') + if tos=='': tos=dd.get('target_os','') + + # Update meta and create entry for a solution + name=i.get('name','') + tags=i.get('tags','') + + for k in ['host_os', 'target_os', 'device_id', 'hostname', + 'workflow', 'workflow_repo_url', + 'workflow_cmd_before', 'workflow_cmd_after', + 'workflow_cmd', 'workflow_cmd_extra', 'workflow_input', + 'workflow_input_dir', 'workflow_output_dir', 'result_file', + 'python_version', 'python_version_from', 'python_version_to', + 'graphs']: + v=i.get(k) + if v!=None and v!='': + dd[k]=v + + # dd['detected_platform_info']=rplat + + dd['tags']=["solution"] + + from . import __version__ + dd['client_version']=__version__ + + # Check if extra meta + add_extra_meta_from_file=i.get('add_extra_meta_from_file','') + if add_extra_meta_from_file!='': + r=ck.load_json_file({'json_file':add_extra_meta_from_file}) + if r['return']>0: return r + dd.update(r['dict']) + + # Add/update CK entry for the solution + update_dict={'action':'update', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'dict':dd, + 'sort_keys':'yes'} + if name!='': update_dict['data_name']=name + if tags!='': + dd['tags']+=tags.split(',') + + r=ck.access(update_dict) + if r['return']>0: return r + + solution_uoa=r['data_uoa'] + solution_uid=r['data_uid'] + + p=r['path'] + + ck.out(config.CR_LINE) + ck.out('Path to the solution: '+p) + + ############################################################## + # Process graph description + desc_graph=i.get('desc_graph','') + if desc_graph!='': + ############################################################## + # Graphs + ck.out(config.CR_LINE) + ck.out('Initializing graphs:') + + if not os.path.isfile(desc_graph): + return {'return':1, 'error':'can\'t find file "'+desc_graph+'"'} + + r=ck.load_json_file({'json_file':desc_graph}) + if r['return']>0: return r + + d=r['dict'] + + pdesc=os.path.join(p, 'graph-desc.json') + + r=ck.save_json_to_file({'json_file':pdesc, 'dict':d, 'sort_keys':'yes'}) + if r['return']>0: return r + +# Decided to add all graphs explicitly! +# if solution_uoa not in graphs: +# graphs.append(solution_uoa) + + sgi=i.get('skip_graph_init') + if sgi!=None and not sgi: + for gr in graphs: + ck.out('') + ck.out(' * Graph: '+gr) + ck.out('') + + r=graph.init({'uid':gr, 'version':'1.0.0', 'desc_file':desc_graph}) + if r['return']>0: return r + + ############################################################## + # Process graph convertor + graph_convertor=i.get('graph_convertor','') + if graph_convertor!='': + ############################################################## + # Graphs + ck.out(config.CR_LINE) + ck.out('Processing graph convertor:') + + if not os.path.isfile(graph_convertor): + return {'return':1, 'error':'can\'t find file "'+graph_convertor+'"'} + + r=ck.load_json_file({'json_file':graph_convertor}) + if r['return']>0: return r + + d=r['dict'] + + pconv=os.path.join(p, 'graph-convertor.json') + + r=ck.save_json_to_file({'json_file':pconv, 'dict':d, 'sort_keys':'yes'}) + if r['return']>0: return r + + ############################################################## + # Init virtual environment + ck.out(config.CR_LINE) + ck.out('Setting (virtual) environment...') + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + + python_path=i['python_path'] + + encoding=locale.getdefaultlocale()[1] + + ii={'action':'shell', + 'module_uoa':'os', + 'encoding':encoding, + 'output_to_console':'yes'} + + if resume: + if i['python_localenv'] is True: + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, + 'venv', + hosd_extra['venv_bin'], + hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + else: + if python_path=='': + # Searching for python + ck.out('') + ck.out(' Searching for the python installation') + + if python_version_from!='' and python_version_from!=' ': + ck.out(' Version must be >= '+python_version_from+' (change with --python_version_from="version")') + if python_version_from!='' and python_version_from!=' ': + ck.out(' Version must be <= '+python_version_to+' (change with --python_version_to="version")') + + r=ck.access({'action':'detect', + 'module_uoa':'soft', + 'data_uoa':'compiler.python', + 'version_from':python_version_from, + 'version_to':python_version_to, + 'out':'con'}) + if r['return']>0: return r + + r=ck.access({'action':'load', + 'module_uoa':'env', + 'data_uoa':r['env_data_uid']}) + if r['return']>0: return r + python_path=r['dict']['env']['CK_ENV_COMPILER_PYTHON_FILE'] + + ck.out(config.CR_LINE) + + cmd=cmd0 + if i['python_localenv'] is True: + i_env=ck.inp({'text':'Do you want to create a new virtual environment (Y/n): '}) + if i_env['return']>0: return i_env + + x_env=i_env['string'].strip() + + if x_env=='n' or x_env=='N': + i['python_localenv'] = False + else: + ck.out('creating virtual env') + cmd+='virtualenv --python='+python_path+' venv\n' + + ii['cmd']=cmd + + print (config.CR_LINE) + print ('Running the following commands to install the virtual env:') + print ('') + print (cmd) + print (config.CR_LINE) + + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Install CK + ck.out(config.CR_LINE) + ck.out('Installing CK ...') + + if i['python_localenv'] is True: + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, + 'venv', + hosd_extra['venv_bin'], + hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+='pip install ck\n' + cmd+='\n' + cmd+=hosd_extra['extra_cmd']+'ck\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Initializing CB config ... + ck.out(config.CR_LINE) + ck.out('Initializing cBench client for this solution ...') + + if pcfg!='' and os.path.isdir(pcfg): + pcfg2=os.path.join(pcfg, '.cm', 'meta.json') + if os.path.isfile(pcfg2): + rx=ck.gen_tmp_file({'prefix':'ck-tmp-', 'suffix':'.json'}) + if rx['return']>0: return rx + + pfn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':pfn, 'dict':{'dict':cfg}}) + if rx['return']>0: return rx + + # Update CB cfg of the solution + cmd=cmd0 + cmd+='ck update cfg:'+config.CK_CFG_DATA_UOA+' @'+pfn+'\n' + + ck.out('') + ck.out(cmd) + + ii['cmd']=cmd + r=ck.access(ii) + + if os.path.isfile(pfn): + os.remove(pfn) + + if r['return']>0: return r + +# ############################################################## +# # Downloading CK components +# ck.out(config.CR_LINE) +# ck.out('Downloading CK components from the portal ...') +# ck.out('') +# +# ck_components=config.CR_SOLUTION_CK_COMPONENTS +# +# cmd=cmd0 +# +# for x in ck_components: +# cmd+='\n' +# cmd+='cb download '+x['cid'] +# if x.get('version','')!='': +# cmd+=' --version='+x['version'] +# cmd+=' --force\n' +# if hplat=='linux': +# cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' +# +# ii['cmd']=cmd +# +# r=ck.access(ii) +# if r['return']>0: return r +# rc=r['return_code'] +# if rc>0: +# return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Downloading CK components + ck.out(config.CR_LINE) + ck.out('Extra bootstrap of stable CK components for this solution ...') + ck.out('') + + ck_components=config.CR_SOLUTION_CK_COMPONENTS + + cmd=cmd0 + + cmd+='\n' + cmd+='cb update --force\n' + if hplat=='linux': + cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' + + ii['cmd']=cmd + + r=ck.access(ii) + if r['return']>0: return r + rc=r['return_code'] + if rc>0: + return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Install ck-env repo and detect python + ck.out(config.CR_LINE) + ck.out('Installing ck-env repo and detecting compiler ...') + + cmd=cmd0 + cmd+=hosd_extra['extra_cmd']+'ck set kernel var.install_to_env=yes\n' + # Now downloading from the portal + # cmd+=hosd_extra['extra_cmd']+'ck pull repo:ck-env\n' + cmd+=hosd_extra['extra_cmd']+'ck detect soft:compiler.python --quiet --full_path='+hosd['env_quotes_if_space']+os.path.join(p, + 'venv', + hosd_extra['venv_bin'], + hosd_extra['python_bin'])+hosd['env_quotes_if_space']+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Pull workflow repo + if workflow_repo_url==None or workflow_repo_url=='': + return {'return':1, 'error':'workflow_repo_url is not defined'} + + if workflow_repo_url!='local': + ck.out(config.CR_LINE) + ck.out('Installing workflow repo ...') + + cmd=cmd0 + cmd+=hosd_extra['extra_cmd']+'ck pull repo --url='+workflow_repo_url+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Copy extra scripts if needed + es=i.get('add_extra_scripts','') + if es!='': + ck.out(config.CR_LINE) + ck.out('Copying extra scripts ...') + + import glob + import shutil + + ck.out('') + for fl in glob.glob(es): + ck.out(' * '+fl) + shutil.copy(fl, p) + + ############################################################## + # Describe workflow preparation steps + desc_prereq=i.get('desc_prereq','') + prereq_workflow=dd.get('prereq_workflow',[]) + if desc_prereq!='': + if not os.path.isfile(desc_prereq): + return {'return':1, 'error':'can\'t find file "'+desc_prereq+'"'} + + r=ck.load_text_file({'text_file':desc_prereq, 'split_to_list':'yes'}) + if r['return']>0: return r + + prereq_workflow=r['lst'] + + ck.out('') + ck.out('') + ck.out('***************************************************') + ck.out('***************************************************') + ck.out('Prequisite steps:') + + ck.out('') + for s in prereq_workflow: + ck.out(' '+s) + + dd['prereq_workflow']=prereq_workflow + + update_dict['dict']=dd + r=ck.access(update_dict) + if r['return']>0: return r + + if not i.get('skip_stop',False): + ck.out('') + ck.out('***************************************************') + ck.out('***************************************************') + ck.out('We start virtual env to let you install above deps!') + ck.out('Enter "exit" to continue solution preparation:') + ck.out('***************************************************') + ck.out('***************************************************') + ck.out('') + ck.out('') + + cmd=cmd0 + cmd+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + cmd+=hosd_extra['activate_cmd']+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Detecting complete platform info ...') + + pinfo=os.path.join(p, 'platform-info.json') + if os.path.isfile(pinfo): os.remove(pinfo) + + cmd=cmd0 + + # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + s='ck detect platform' + if i.get('target_os','')!='': s+=' --target_os='+i['target_os'] + if tdid!='': s+=' --device_id='+tdid + s+=' --out=json_file --out_file='+pinfo + + cmd+=s+'\n' + + ii['cmd']=cmd + print (cmd) + r=ck.access(ii) + if r['return']>0: return r + + if not os.path.isfile(pinfo): + return {'return':1, 'error':'platform info file was not created'} + + # # Get some info about platforms + # ii={'action':'detect', + # 'module_uoa':'platform', + # 'host_os':hos, + # 'target_os':tos, + # 'device_id':tdid} + # r=ck.access(ii) + # if r['return']>0: return r + # + # rx=ck.save_json_to_file({'json_file':pinfo, 'dict':r, 'sort_keys':'yes'}) + # if rx['return']>0: return rx + # + ############################################################## + ck.out(config.CR_LINE) + ck.out('Detecting complete platform host OS info ...') + + pinfo2=os.path.join(p, 'platform-host-os-info.json') + if os.path.isfile(pinfo2): os.remove(pinfo2) + + cmd=cmd0 + + # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + s='ck detect platform.os' + s+=' --out=json_file --out_file='+pinfo2 + + cmd+=s+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + if not os.path.isfile(pinfo2): + return {'return':1, 'error':'platform info file was not created'} + + ############################################################## + if i.get('update_meta_and_stop','')==True: + ck.out(config.CR_LINE) + ck.out('Skipping the rest by user request') + return {'return':0} + + ############################################################## + # Describe workflow preparation steps + ck.out(config.CR_LINE) + ck.out('Preparation steps:') + ck.out('') + + desc_prepare=i.get('desc_prepare','') + prepare_workflow=dd.get('prepare_workflow',[]) + if desc_prepare!='': + if not os.path.isfile(desc_prepare): + return {'return':1, 'error':'can\'t find file "'+desc_prepare+'"'} + + r=ck.load_text_file({'text_file':desc_prepare, 'split_to_list':'yes'}) + if r['return']>0: return r + + prepare_workflow=r['lst'] + + for s in prepare_workflow: + ck.out(' '+s) + + dd['prepare_workflow']=prepare_workflow + + update_dict['dict']=dd + r=ck.access(update_dict) + if r['return']>0: return r + + for s in prepare_workflow: + if s=='': + ck.out('') + continue + + ck.out(config.CR_LINE) + ck.out(s) + ck.out('') + + cmd=cmd0 + cmd+=s+'\n' + if hplat=='linux': + cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + rc=r['return_code'] + if rc>0: + return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Check dependencies + ck.out(config.CR_LINE) + ck.out('Checking and recording workflow dependencies') + + pdeps=os.path.join(p, 'resolved-deps.json') + + s='' + + if workflow_cmd_before!='': s+=workflow_cmd_before+'\n' + + s+=hosd_extra['extra_cmd']+'ck run '+workflow+' --cmd_key='+workflow_cmd+' '+workflow_cmd_extra+' --record_deps="'+pdeps+'" --skip_exec' + + if hos!='': s+=' --host_os='+hos + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + + s+='\n' + +# Here we do not need post-processing (often fail) +# if workflow_cmd_after!='': s+=workflow_cmd_after+'\n' + + ck.out('') + ck.out(s) + + ck.out('') + cmd=cmd0 + cmd+=s+'\n' + + if hplat=='linux': + cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + rc=r['return_code'] + if rc>0: + return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Describe workflow run steps + ck.out(config.CR_LINE) + ck.out('Run steps:') + ck.out('') + + desc_run=i.get('desc_run','') + run_workflow=dd.get('run_workflow',[]) + if desc_run!='': + if not os.path.isfile(desc_run): + return {'return':1, 'error':'can\'t find file "'+desc_run+'"'} + + r=ck.load_text_file({'text_file':desc_run, 'split_to_list':'yes'}) + if r['return']>0: return r + + run_workflow=r['lst'] + + for s in run_workflow: + ck.out(' '+s) + + dd['run_workflow']=run_workflow + + update_dict['dict']=dd + r=ck.access(update_dict) + if r['return']>0: return r + + for s in run_workflow: + if s=='': + ck.out('') + continue + + ck.out(config.CR_LINE) + ck.out('Command: '+s) + ck.out('') + + cmd=cmd0 + cmd+=s+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Check dependencies + ck.out(config.CR_LINE) + ck.out('Solution was successfully prepared!') + + ck.out('') + ck.out('You can crowd-benchmark this solution (if supported) as follows:') + ck.out('cb benchmark '+uid) + + ck.out('') + ck.out('You can run this solution locally as follows:') + ck.out('cb run '+uid) + + ck.out('') + ck.out('You can activate virtual env for this solution to debug/improve it as follows:') + ck.out('cb activate '+uid) + + return {'return':0} + +############################################################################ +# Activate virtual environment for a solution + +def activate(i): + """ + Input: { + uid [str] - portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + p=r['path'] + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Activate solution: '+p) + ck.out('') + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + cmd+=hosd_extra['activate_cmd']+'\n' + + encoding=locale.getdefaultlocale()[1] + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding, + 'output_to_console':'yes'} + + r=ck.access(ii) + if r['return']>0: return r + + return {'return':0} + +############################################################################ +# Run prepared solution + +def run(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + tdid=r.get('device_id','') + + xcmd=i.get('cmd','') + if xcmd==None: xcmd='' + xcmd=xcmd.strip() + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + p=r['path'] + dd=r['dict'] + + # TBD: need to be checked from outside ... + # host_os=dd.get('host_os','') + tos=dd.get('target_os','') + tdid=dd.get('device_id','') + + workflow=dd.get('workflow','') + workflow_cmd_before=dd.get('workflow_cmd_before','') + workflow_cmd_after=dd.get('workflow_cmd_after','') + workflow_cmd=dd.get('workflow_cmd','') + workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input_dir=dd.get('workflow_input_dir','') + workflow_output_dir=dd.get('workflow_output_dir','') + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Run solution: '+p) + ck.out('') + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + if workflow_cmd_before!='': cmd+=workflow_cmd_before+'\n' + + if xcmd!='': + s=xcmd + else: + s=hosd_extra['extra_cmd']+'ck run '+workflow+' --cmd_key='+workflow_cmd + + if workflow_cmd_extra!='': + s+=' '+workflow_cmd_extra + + if hos!='': s+=' --host_os='+hos + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + + cmd+=s+'\n' + + if workflow_cmd_after!='': cmd+=workflow_cmd_after+'\n' + + ck.out('') + ck.out(s) + ck.out('') + + encoding=locale.getdefaultlocale()[1] + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding, + 'output_to_console':'yes'} + r=ck.access(ii) + if r['return']>0: return r + + return r + + +############################################################################ +# Benchmark prepared solution + +def benchmark(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + import datetime + import time + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + sdate= datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') + t = time.time() + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + tdid=r.get('device_id','') + + xcmd=i.get('cmd','') + if xcmd==None: xcmd='' + xcmd=xcmd.strip() + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + solution_uoa=r['data_uoa'] + solution_uid=r['data_uid'] + + p=r['path'] + dd=r['dict'] + + # TBD: need to be checked from outside ... + # host_os=dd.get('host_os','') + tos=dd.get('target_os','') + tdid=dd.get('device_id','') + + workflow=dd.get('workflow','') + workflow_cmd_before=dd.get('workflow_cmd_before','') + workflow_cmd_after=dd.get('workflow_cmd_after','') + workflow_cmd=dd.get('workflow_cmd','') + workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input_dir=dd.get('workflow_input_dir','') + workflow_output_dir=dd.get('workflow_output_dir','') + + result_file=dd.get('result_file','') + + graphs=dd.get('graphs',[]) + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Find path to output file '+result_file+' ...') + ck.out('') + + encoding=locale.getdefaultlocale()[1] + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+='ck find '+workflow+'\n' + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding} + r=ck.access(ii) + if r['return']>0: + status=-1 + return r + + path_result=r['stdout'].strip() + path_result_file=os.path.join(path_result, result_file) + + ck.out(' Found path: '+path_result_file) + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Detecting complete platform info ...') + + pinfo=os.path.join(p, 'platform-info.json') + if os.path.isfile(pinfo): os.remove(pinfo) + + cmd=cmd0 + + # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + s='ck detect platform' + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + s+=' --out=json_file --out_file='+pinfo + + cmd+=s+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + if not os.path.isfile(pinfo): + return {'return':1, 'error':'platform info file was not created'} + + # Get some sub-info about deps and platforms + dinfo={} + if os.path.isfile(pinfo): + r=ck.load_json_file({'json_file':pinfo}) + if r['return']==0: + dinfo=r['dict'].get('features',{}) + for k in ['cpu_misc', 'cpu']: + if k in dinfo: del(dinfo[k]) + + pdeps=os.path.join(p, 'resolved-deps.json') + ddeps={} + if os.path.isfile(pdeps): + r=ck.load_json_file({'json_file':pdeps}) + if r['return']==0: + ddeps2=r['dict'] + + r=deps_summary({'deps':ddeps2}) + if r['return']==0: + ddeps=r['deps_summary'] + + ############################################################## + # status management + + path_tmpSol=os.path.join(p, "tmp") + tmp_solStatus=os.path.join(path_tmpSol, "status.json") + + status = 0 + if not os.path.isdir(path_tmpSol): + os.mkdir(path_tmpSol) + + rdf_st={} + rx=ck.load_json_file({'json_file':tmp_solStatus}) + if rx['return']>0: + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':{'status': 0}}) + if rx['return']>0: return rx + else: + rdf_st=rx['dict'] + status = rdf_st.get('status','') + + run=True + if status == 1: + run=False + elif status == 2: + # To be done try to push the result to server + status=1 + + rdf_st['status'] = 1 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + if os.path.isfile(path_result_file): + ck.out(' Cleaning output ...') + os.remove(path_result_file) + + ############################################################## + rr={'return':0} + if run is True: + + ck.out(config.CR_LINE) + ck.out('Run solution: '+p) + ck.out('') + + cmd=cmd0 + + if workflow_cmd_before!='': cmd+=workflow_cmd_before+'\n' + + if xcmd!='': + s=xcmd + else: + s=hosd_extra['extra_cmd']+'ck benchmark '+workflow+' --cmd_key='+workflow_cmd + + if workflow_cmd_extra!='': + s+=' '+workflow_cmd_extra + + if hos!='': s+=' --host_os='+hos + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + ck.out(config.CR_LINE) + ck.out('Command: '+s) + ck.out('') + + cmd+=s+'\n' + + if workflow_cmd_after!='': cmd+=workflow_cmd_after+'\n' + + ck.out('') + ck.out(s) + ck.out('') + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding, + 'output_to_console':'yes'} + rr=ck.access(ii) + + if r['return']>0: + rdf_st['status'] = -1 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + return r + else : + rdf_st['status'] = 2 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + elapsed = time.time() - t + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Reading output: '+path_result_file) + ck.out('') + + if not os.path.isfile(path_result_file): + ck.out(' Error: output file not found!') + rdf_st['status'] = -2 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + else: + rx=ck.load_json_file({'json_file':path_result_file}) + if rx['return']>0: return rx + + rd=rx['dict'] + + # Add solution info + rd['solution_uoa']=solution_uoa + rd['solution_uid']=solution_uid + rd['solution_run_date']=sdate + rd['solution_duration']=elapsed + + sworkflow=workflow.split(':') + if len(sworkflow)>1: + rd['program_workflow_uoa']=sworkflow[1] + + from . import __version__ + rd['client_version']=__version__ + + rx=ck.flatten_dict({'dict':rd}) + if rx['return']>0: return rx + + rdf=rx['dict'] + crdf={} + + crdf['platform_info']=dinfo + crdf['resolved_deps']=ddeps + + # Remove first ## (do not need here) + for k in rdf: + v=rdf[k] + if k.startswith('##'): k=k[2:] + crdf[k]=v + + # Get some sub-info about deps and platforms + if os.path.isfile(pinfo): + r=ck.load_json_file({'json_file':pinfo}) + if r['return']==0: + dx=r['dict'] + + pdeps=os.path.join(p, 'resolved-deps.json') + if os.path.isfile(pdeps): + rx=ck.load_json_file({'json_file':pdeps}) + if rx['return']==0: + dx=rx['dict'] + + ck.out(json.dumps(crdf, indent=2)) + + #over write the file + rx=ck.save_json_to_file({'json_file':path_result_file, 'dict':crdf}) + if rx['return']>0: return rx + + ################################################################ + if len(graphs)>0: + ck.out(config.CR_LINE) + ck.out('Pushing results to graphs...') + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':crdf}) + if rx['return']>0: return rx + + if solution_uoa not in graphs: + graphs.append(solution_uoa) + + for gr in graphs: + ck.out('') + ck.out(' * Graph: '+gr) + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + if 'graphs' not in rr: rr['graphs']=[] + rr['graphs'].append(rx) + + rdf_st['status'] = 3 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + ################################################################ + pconv=os.path.join(p, 'graph-convertor.json') + if os.path.isfile(pconv): + rx=ck.load_json_file({'json_file':pconv}) + if rx['return']==0: + ck.out(config.CR_LINE) + ck.out('Converting data for extra graphs ...') + + dconv=rx['dict'] + + for eg in dconv: + gr=eg['graph_id'] + + ck.out('') + ck.out(' * Graph: '+gr) + + keys=eg['keys'] + + cdata={} + + for k in keys: + ok=k.get('out_key','') + if ok=='': ok=k['key1'] + + kk=[k.get('key1',''), k.get('key2',''), k.get('key3',''), k.get('key4','')] + + vv='' + v=k.get('value','') + if v!='' and v!=None: + vv=v + + first=True + for kx in kk: + if kx!='': + if kx.startswith('##'): + ry=ck.get_by_flat_key({'dict':crdf, 'key':kx}) + if ry['return']>0: return ry + v=ry['value'] + else: + v=crdf.get(kx) + + vm=k.get('multiply',0) + if vm!=0 and vm!='' and vm!=None and (type(v)==float or type(v)==int): + v=v*vm + + if v!='' and v!=None: + if first: + first=False + if type(v)==float or type(v)==int: + vv=0 + else: + vv+=', ' + + # Check if list or dict + if type(v)==list or type(v)==dict: + vv=v + else: + vv+=v + + if vv!='': + cdata[ok]=vv + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':cdata}) + if rx['return']>0: return rx + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + if 'graphs' not in rr: rr['graphs']=[] + rr['graphs'].append(rx) + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + return rr + +############################################################################ +# List local solutions + +def ls(i): + + """ + Input: { + (uid) [str] - portal identifier of the solution (can have wiledcards) + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Create entry + uid=i['uid'] + if uid==None: uid='' + + r=ck.access({'action':'ls', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'common_func':'yes', + 'all':'yes', + 'out':'con'}) + return r + +############################################################################ +# Find solution + +def find(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Find entry + uid=i['uid'] + + r=ck.access({'action':'find', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'common_func':'yes', + 'out':'con'}) + return r + +############################################################################ +# Delete solution + +def rm(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Delete entry + uid=i['uid'] + + r=ck.access({'action':'rm', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'common_func':'yes', + 'out':'con'}) + return r + +############################################################################ +# Get solution directory + +def get_solution_dir(i): + uid=i['uid'] + + # Get work dir + r=config.get_work_dir({}) + if r['return']>0: return r + + work_dir=r['path'] + + # Get solutions dir + solutions_dir=os.path.join(work_dir, config.CR_SOLUTIONS_DIR) + if not os.path.isdir(solutions_dir): + os.makedirs(solutions_dir) + + # Get the solution dir + solution_dir=os.path.join(solutions_dir, uid) + if not os.path.isdir(solution_dir): + os.makedirs(solution_dir) + + return {'return':0, 'solutions_dir':solutions_dir, 'solution_dir':solution_dir} + +############################################################################## +# extracting summary of all deps + +def deps_summary(i): + """ + Input: { + deps - resolved deps + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + + deps_summary - summary of deps + } + + """ + + deps=i['deps'] + ds=i.get('deps_summary',{}) + + for x in deps: + d=deps[x] + dd=d.get('dict',{}) + + ds[x]={} + + cx=dd.get('customize',{}) + + ds[x]['tags']=d.get('tags',[]) + ds[x]['name']=d.get('name','') + + ds[x]['package_tags']=','.join(dd.get('tags',[])) + ds[x]['data_name']=dd.get('data_name','') + + puoa=dd.get('package_uoa','') + if puoa=='': + puoa=d.get('cus',{}).get('used_package_uid','') + ds[x]['package_uoa']=puoa + + ds[x]['version']=cx.get('version','') + ds[x]['git_revision']=cx.get('git_info',{}).get('revision','') + ds[x]['git_iso_datetime_cut_revision']=cx.get('git_info',{}).get('iso_datetime_cut_revision','') + + sdeps=dd.get('deps',{}) + if len(sdeps)>0: + # Recursion + r=deps_summary({'deps':sdeps}) + if r['return']>0: return r + ds[x]['deps']=r['deps_summary'] + + return {'return':0, 'deps_summary':ds} + +############################################################################## +# publish result + +def publish_result(i): + + """ + Input: { + uid [str] - portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + xcmd=i.get('cmd','') + if xcmd==None: xcmd='' + xcmd=xcmd.strip() + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + solution_uoa=r['data_uoa'] + solution_uid=r['data_uid'] + + p=r['path'] + dd=r['dict'] + + # TBD: need to be checked from outside ... + host_os=dd.get('host_os','') + tos=dd.get('target_os','') + tdid=dd.get('device_id','') + + workflow=dd.get('workflow','') + workflow_cmd=dd.get('workflow_cmd','') + workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input_dir=dd.get('workflow_input_dir','') + workflow_output_dir=dd.get('workflow_output_dir','') + + result_file=dd.get('result_file','') + + graphs=dd.get('graphs',[]) + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Find path to output file '+result_file+' ...') + ck.out('') + + encoding=locale.getdefaultlocale()[1] + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+='ck find '+workflow+'\n' + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding} + r=ck.access(ii) + if r['return']>0: + status=-1 + return r + + path_result=r['stdout'].strip() + path_result_file=os.path.join(path_result, result_file) + + ck.out(' Found path: '+path_result_file) + + # ############################################################## + # ck.out(config.CR_LINE) + # ck.out('Detecting complete platform info ...') + + # pinfo=os.path.join(p, 'platform-info.json') + # if os.path.isfile(pinfo): os.remove(pinfo) + + # cmd=cmd0 + + # # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + # s='ck detect platform' + # if i.get('target_os','')!='': s+=' --target_os='+i['target_os'] + # if tdid!='': s+=' --device_id='+tdid + # s+=' --out=json_file --out_file='+pinfo + + # cmd+=s+'\n' + + # ii['cmd']=cmd + # r=ck.access(ii) + # if r['return']>0: return r + + # if not os.path.isfile(pinfo): + # return {'return':1, 'error':'platform info file was not created'} + + # # Get some sub-info about deps and platforms + # dinfo={} + # if os.path.isfile(pinfo): + # r=ck.load_json_file({'json_file':pinfo}) + # if r['return']==0: + # dinfo=r['dict'].get('features',{}) + # for k in ['cpu_misc', 'cpu']: + # if k in dinfo: del(dinfo[k]) + + # pdeps=os.path.join(p, 'resolved-deps.json') + # ddeps={} + # if os.path.isfile(pdeps): + # r=ck.load_json_file({'json_file':pdeps}) + # if r['return']==0: + # ddeps2=r['dict'] + + # r=deps_summary({'deps':ddeps2}) + # if r['return']==0: + # ddeps=r['deps_summary'] + + ############################################################## + # status management + + path_tmpSol=os.path.join(p, "tmp") + tmp_solStatus=os.path.join(path_tmpSol, "status.json") + + status = 0 + if not os.path.isdir(path_tmpSol): + os.mkdir(path_tmpSol) + + rdf_st={} + rx=ck.load_json_file({'json_file':tmp_solStatus}) + if rx['return']>0: + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':{'status': 0}}) + return rx + + rdf_st=rx['dict'] + status = rdf_st.get('status','') + + if status == 2: + ############################################################## + ck.out(config.CR_LINE) + ck.out('Reading output: '+path_result_file) + ck.out('') + + if not os.path.isfile(path_result_file): + ck.out(' Error: output file not found!') + rdf_st['status'] = -2 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + else: + rx=ck.load_json_file({'json_file':path_result_file}) + if rx['return']>0: return rx + + crdf=rx['dict'] + ################################################################ + if len(graphs)>0: + ck.out(config.CR_LINE) + ck.out('Pushing results to graphs...') + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':crdf}) + if rx['return']>0: return rx + + if solution_uoa not in graphs: + graphs.append(solution_uoa) + + for gr in graphs: + ck.out('') + ck.out(' * Graph: '+gr) + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + rdf_st['status'] = 3 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + ################################################################ + pconv=os.path.join(p, 'graph-convertor.json') + if os.path.isfile(pconv): + rx=ck.load_json_file({'json_file':pconv}) + if rx['return']==0: + ck.out(config.CR_LINE) + ck.out('Converting data for extra graphs ...') + + dconv=rx['dict'] + + for eg in dconv: + gr=eg['graph_id'] + + ck.out('') + ck.out(' * Graph: '+gr) + + keys=eg['keys'] + + cdata={} + + for k in keys: + ok=k['out_key'] + + kk=[k.get('key1',''), k.get('key2',''), k.get('key3',''), k.get('key4','')] + + vv='' + v=k.get('value','') + if v!='' and v!=None: + vv=v + + first=True + for kx in kk: + if kx!='': + if kx.startswith('##'): + ry=ck.get_by_flat_key({'dict':crdf, 'key':kx}) + if ry['return']>0: return ry + v=ry['value'] + else: + v=crdf.get(kx) + + vm=k.get('multiply',0) + if vm!=0 and vm!='' and vm!=None and (type(v)==float or type(v)==int): + v=v*vm + + if v!='' and v!=None: + if first: + first=False + if type(v)==float or type(v)==int: + vv=0 + else: + vv+=', ' + vv+=v + + if vv!='': + cdata[ok]=vv + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':cdata}) + if rx['return']>0: return rx + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + return r + return {'return':0} diff --git a/ck/repo/module/ck-platform/ck_032630d041b4fd8a/static/favicon.ico b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/static/favicon.ico new file mode 100644 index 0000000000..de17981bf5 Binary files /dev/null and b/ck/repo/module/ck-platform/ck_032630d041b4fd8a/static/favicon.ico differ diff --git a/ck/repo/module/ck-platform/module.py b/ck/repo/module/ck-platform/module.py new file mode 100644 index 0000000000..b5ee643e75 --- /dev/null +++ b/ck/repo/module/ck-platform/module.py @@ -0,0 +1,196 @@ +# +# Collective Knowledge (cKnowledge.io platform) +# +# See CK LICENSE.txt for licensing details +# See CK COPYRIGHT.txt for copyright details +## +# Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net +# + +cfg = {} # Will be updated by CK (meta description of this module) +work = {} # Will be updated by CK (temporal data) +ck = None # Will be updated by CK (initialized CK kernel) + +# Local settings + +import sys +import os +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +############################################################################## +# Initialize module + +def init(i): + """ + + Input: {} + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + return {'return': 0} + +############################################################################## +# setup platform + +def setup(i): + """ + Input: { + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + from ck_032630d041b4fd8a import setup as xsetup + + xsetup.setup(i) + + return {'return':0} + +############################################################################## +# login to platform + +def login(i): + """ + Input: { + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + from ck_032630d041b4fd8a import setup as xsetup + + xsetup.login(i) + + return {'return':0} + +############################################################################## +# publish CK component + +def publish(i): + """ + Input: { + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + from ck_032630d041b4fd8a import obj as xobj + + i['cid']=i['ckid'] + + return xobj.publish(i) + +############################################################################## +# list versions of a CK component + +def versions(i): + """ + Input: { + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + from ck_032630d041b4fd8a import obj as xobj + + i['cid']=i['ckid'] + + return xobj.versions(i) + +############################################################################## +# init graph + +def init_graph(i): + """ + Input: { + uid [str] - graph identifyer + (version) [str] - graph version + (desc_file) [str] - file with graph description + (tags) [str] - tags separated by comma + (name) + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + from ck_032630d041b4fd8a import graph as xgraph + + return xgraph.init(i) + +############################################################################## +# push result + +def push_result(i): + """ + Input: { + uid + version + filename + json_string + point + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + from ck_032630d041b4fd8a import graph as xgraph + + return xgraph.push(i) + +############################################################################## +# access platform + +def access(i): + """ + Input: { + filename, + json_string + display + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + from ck_032630d041b4fd8a import comm as xcomm + + return xcomm.access(i) diff --git a/ck/repo/module/ck-platform/tests/init-graph.bat b/ck/repo/module/ck-platform/tests/init-graph.bat new file mode 100644 index 0000000000..90265543bc --- /dev/null +++ b/ck/repo/module/ck-platform/tests/init-graph.bat @@ -0,0 +1 @@ +ck init_graph ck-platform --uid=fgg-test --desc_file=init-graph.json --tags=xyz,fgg,test --name="FGG test" diff --git a/ck/repo/module/ck-platform/tests/init-graph.json b/ck/repo/module/ck-platform/tests/init-graph.json new file mode 100644 index 0000000000..cd637e7c26 --- /dev/null +++ b/ck/repo/module/ck-platform/tests/init-graph.json @@ -0,0 +1,37 @@ +{ + "meta":{ + "info":"fgg test" + }, + "default_key_x": "avg_time_ms", + "default_key_y": "mAP", + "default_sort_key": "avg_time_ms", + "table_view": [ + {"key": "platform_info", "name": "Platform info", "json_and_pre": "yes", "skip_pre": "yes"}, + {"key": "resolved_deps", "name": "Resolved deps", "json_and_pre": "yes", "skip_pre": "yes"}, + {"key": "avg_fps", "type":"float", "format": "%.2f", "name": "Average FPS"}, + {"key": "avg_time_ms", "type":"float", "format": "%.2f", "name": "Average time (ms.)"}, + {"key": "detection_time_avg_s", "type":"float", "format": "%.2f", "name": "Detection time (average, sec.)"}, + {"key": "detection_time_total_s", "type":"float", "format": "%.2f", "name": "Detection time (total, sec.)"}, + {"key": "graph_load_time_s", "type":"float", "format": "%.2f", "name": "Graph load time (sec.)"}, + {"key": "images_load_time_avg_s", "type":"float", "format": "%.2f", "name": "Images load time (average, sec.)"}, + {"key": "images_load_time_total_s", "type":"float", "format": "%.2f", "name": "Images load time (total, sec.)"}, + {"key": "mAP", "type":"float", "format": "%.2f", "name": "mAP"}, + {"key": "metrics#DetectionBoxes_Precision/mAP", "type":"float", "format": "%.2f", "name": "Detection Boxes Precision mAP"}, + {"key": "metrics#DetectionBoxes_Precision/mAP (large)", "type":"float", "format": "%.2f", "name": "Detection Boxes Precision mAP (large)"}, + {"key": "metrics#DetectionBoxes_Precision/mAP (medium)", "type":"float", "format": "%.2f", "name": "Detection Boxes Precision mAP (medium)"}, + {"key": "metrics#DetectionBoxes_Precision/mAP (small)", "type":"float", "format": "%.2f", "name": "Detection Boxes Precision mAP (small)"}, + {"key": "metrics#DetectionBoxes_Precision/mAP@.50IOU", "type":"float", "format": "%.2f", "name": "Detection Boxes Precision mAP (.50 IOU)"}, + {"key": "metrics#DetectionBoxes_Precision/mAP@.75IOU", "type":"float", "format": "%.2f", "name": "Detection Boxes Precision mAP (.75 IOU)"}, + {"key": "metrics#DetectionBoxes_Recall/AR@1", "type":"float", "format": "%.2f", "name": "Detection Boxes Recall AR@1"}, + {"key": "metrics#DetectionBoxes_Recall/AR@10", "type":"float", "format": "%.2f", "name": "Detection Boxes Recall AR@10"}, + {"key": "metrics#DetectionBoxes_Recall/AR@100", "type":"float", "format": "%.2f", "name": "Detection Boxes Recall AR@100"}, + {"key": "metrics#DetectionBoxes_Recall/AR@100 (large)", "type":"float", "format": "%.2f", "name": "Detection Boxes Recall AR@100 (large)"}, + {"key": "metrics#DetectionBoxes_Recall/AR@100 (medium)", "type":"float", "format": "%.2f", "name": "Detection Boxes Recall AR@100 (medium)"}, + {"key": "metrics#DetectionBoxes_Recall/AR@100 (small)", "type":"float", "format": "%.2f", "name": "Detection Boxes Recall AR@100 (small)"}, + {"key": "recall", "type":"float", "format": "%.2f", "name": "Recall"}, + {"key": "setup_time_s", "type":"float", "format": "%.2f", "name": "Setup time (sec.)"}, + {"key": "test_time_s", "type":"float", "format": "%.2f", "name": "Test time (sec.)"}, + {"key": "solution_run_date", "type":"string", "format": "%Y-%m-%dT%H:%M:%SZ", "name": "Start date"}, + {"key": "solution_duration", "type":"float", "format": "%.2f", "name": "Total bechmark time (sec.)"} + ] +} diff --git a/ck/repo/module/ck-platform/tests/push-to-graph.bat b/ck/repo/module/ck-platform/tests/push-to-graph.bat new file mode 100644 index 0000000000..30db43f6a5 --- /dev/null +++ b/ck/repo/module/ck-platform/tests/push-to-graph.bat @@ -0,0 +1 @@ +ck push_result ck-platform --uid=fgg-test --filename=push-to-graph.json diff --git a/ck/repo/module/ck-platform/tests/push-to-graph.json b/ck/repo/module/ck-platform/tests/push-to-graph.json new file mode 100644 index 0000000000..696df90404 --- /dev/null +++ b/ck/repo/module/ck-platform/tests/push-to-graph.json @@ -0,0 +1,6 @@ +[ + {"avg_time_ms":10, + "mAP":0.25}, + {"avg_time_ms":20, + "mAP":0.35} +] diff --git a/ck/repo/module/dashboard/README.md b/ck/repo/module/dashboard/README.md new file mode 100644 index 0000000000..37724ea201 --- /dev/null +++ b/ck/repo/module/dashboard/README.md @@ -0,0 +1,28 @@ +## 20210625: New examples + +### CMDs + +``` +ck display dashboard --template=result --cfg=demo.mlperf.inference + +ck display dashboard --template=result --cfg=demo.mlperf.mobilenets +ck display dashboard --template=result --cfg=demo.mlperf.mobilenets --experiment_uoa=mlperf-image-classification-single-stream-onnx-explore-threads + +ck display dashboard --template=result --cfg=demo.request.asplos18 +``` + +### URLS + +``` +ck start web +``` + +* http://localhost:3344/?template=result&cfg=demo.request.asplos18 + +* http://localhost:3344/?template=result&cfg=demo.mlperf.inference + +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets&repo_uoa=ai&data_uoa=xyz*&tags=abc + +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets&experiment_uoa=mlperf-image-classification-single-stream-onnx-explore-threads +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets&data_uoa=-&experiment_uoa=mlperf-image-classification-single-stream-onnx-explore-threads diff --git a/ck/repo/module/dashboard/module.py b/ck/repo/module/dashboard/module.py index 0c65fecfa4..43cb9c8077 100644 --- a/ck/repo/module/dashboard/module.py +++ b/ck/repo/module/dashboard/module.py @@ -67,10 +67,31 @@ def display(args): extra_url = { key:args[key] for key in args if key in ['scenario', 'global'] } extra_url = "&".join("{0}={1}".format(key, extra_url[key]) for key in extra_url) + template=args.get('template','') + if template=='': template='dashboard' + + sub_keys={'cfg':'', + 'repo_uoa':'', + 'data_uoa':'', + 'tags':'', + 'experiment_repo_uoa':'', + 'experiment_uoa':'', + 'experiment_tags':''} + + for k in sub_keys: + kk=sub_keys[k] + if kk=='': kk=k + + v=args.get(k,'') + if v!='': + if extra_url!='': + extra_url+='&' + extra_url+=kk+'='+v + args['action'] = 'start' args['module_uoa'] = 'web' args['browser'] = 'yes' - args['template'] = 'dashboard' + args['template'] = template args['cid'] = '' args['extra_url'] = extra_url diff --git a/ck/repo/module/module/dummy_module.py b/ck/repo/module/module/dummy_module.py index 57c6bb1101..c9803fd98b 100644 --- a/ck/repo/module/module/dummy_module.py +++ b/ck/repo/module/module/dummy_module.py @@ -13,6 +13,12 @@ # Local settings +#import sys +#import os +#sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +#from $#ck_sub_module#$ import ... + ############################################################################## # Initialize module diff --git a/ck/repo/module/module/module.py b/ck/repo/module/module/module.py index d9974dde60..01ff969f97 100644 --- a/ck/repo/module/module/module.py +++ b/ck/repo/module/module/module.py @@ -275,8 +275,26 @@ def add(i): if r['return'] > 0: return r - # Add module code p = r['path'] + + # Module UID + uid = r['data_uid'] + + ck_sub_module = 'ck_'+uid + + spm = spm.replace('$#ck_sub_module#$', ck_sub_module) + + p_sub_module = os.path.join(p, ck_sub_module) + if not os.path.isdir(p_sub_module): + os.makedirs(p_sub_module) + + p_sub_module_init = os.path.join(p_sub_module,'__init__.py') + if not os.path.isfile(p_sub_module_init): + rx = ck.save_text_file({'text_file': p_sub_module_init, 'string': ''}) + if rx['return'] > 0: + return rx + + # Add module code pf = os.path.join(p, ck.cfg['module_full_code_name']) if o == 'con': diff --git a/ck/repo/module/result.cfg/.cm/desc.json b/ck/repo/module/result.cfg/.cm/desc.json new file mode 100644 index 0000000000..0967ef424b --- /dev/null +++ b/ck/repo/module/result.cfg/.cm/desc.json @@ -0,0 +1 @@ +{} diff --git a/ck/repo/module/result.cfg/.cm/info.json b/ck/repo/module/result.cfg/.cm/info.json new file mode 100644 index 0000000000..34a661b9d3 --- /dev/null +++ b/ck/repo/module/result.cfg/.cm/info.json @@ -0,0 +1,20 @@ +{ + "backup_data_uid": "68d174e82bfa3f79", + "backup_module_uid": "032630d041b4fd8a", + "backup_module_uoa": "module", + "control": { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-24T14:36:09.824112", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + "data_name": "result.cfg" +} diff --git a/ck/repo/module/result.cfg/.cm/meta.json b/ck/repo/module/result.cfg/.cm/meta.json new file mode 100644 index 0000000000..618491d90d --- /dev/null +++ b/ck/repo/module/result.cfg/.cm/meta.json @@ -0,0 +1,9 @@ +{ + "actions": {}, + "copyright": "See CK COPYRIGHT.txt for copyright details", + "desc": "Configure result graph", + "developer": "Grigori Fursin", + "developer_email": "Grigori.Fursin@cTuning.org", + "developer_webpage": "http://fursin.net", + "license": "See CK LICENSE.txt for licensing details" +} diff --git a/ck/repo/module/result.cfg/module.py b/ck/repo/module/result.cfg/module.py new file mode 100644 index 0000000000..395279ee97 --- /dev/null +++ b/ck/repo/module/result.cfg/module.py @@ -0,0 +1,32 @@ +# +# Collective Knowledge (Configure result graph) +# +# See CK LICENSE.txt for licensing details +# See CK COPYRIGHT.txt for copyright details +## +# Developer: Grigori Fursin, Grigori.Fursin@cTuning.org, http://fursin.net +# + +cfg = {} # Will be updated by CK (meta description of this module) +work = {} # Will be updated by CK (temporal data) +ck = None # Will be updated by CK (initialized CK kernel) + +# Local settings + +############################################################################## +# Initialize module + + +def init(i): + """ + + Input: {} + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + return {'return': 0} diff --git a/ck/repo/module/result/.cm/meta.json b/ck/repo/module/result/.cm/meta.json index 73725f3717..406db6c204 100644 --- a/ck/repo/module/result/.cm/meta.json +++ b/ck/repo/module/result/.cm/meta.json @@ -1,9 +1,33 @@ { - "actions": {}, + "actions": { + "convert_experiment_to_result": { + "desc": "convert experiment to result" + }, + "get_raw_config": { + "desc": "get raw config for repo widget", + "for_web": "yes" + }, + "get_raw_data": { + "desc": "get raw data for widget", + "for_web": "yes" + }, + "postprocess_html": { + "desc": "Post-process HTML" + }, + "push": { + "desc": "push result" + } + }, + "actions_redirect": {}, "copyright": "See CK COPYRIGHT.txt for copyright details", "desc": "container for any result", "developer": "cTuning foundation", "developer_email": "admin@cTuning.org", "developer_webpage": "http://cTuning.org", - "license": "See CK LICENSE.txt for licensing details" + "license": "See CK LICENSE.txt for licensing details", + "module_deps": { + "experiment": "bc0409fb61f0aa82", + "result.cfg": "68d174e82bfa3f79", + "wfe": "1e4e644996b7f2a0" + } } diff --git a/ck/repo/module/result/.cm/meta.json2 b/ck/repo/module/result/.cm/meta.json2 new file mode 100644 index 0000000000..94b0c7a5fd --- /dev/null +++ b/ck/repo/module/result/.cm/meta.json2 @@ -0,0 +1,301 @@ +{ + "actions": { + "get_raw_config": { + "desc": "get raw config for repo widget", + "for_web": "yes" + }, + "get_raw_data": { + "desc": "get raw data for widget", + "for_web": "yes" + } + }, + "actions_redirect": {}, + "copyright": "See CK COPYRIGHT.txt for copyright details", + "desc": "container for any result", + "developer": "cTuning foundation", + "developer_email": "admin@cTuning.org", + "developer_webpage": "http://cTuning.org", + "license": "See CK LICENSE.txt for licensing details", + "data_config": { + "dimensions": [ + { + "key": "ID", + "name": "ID", + "view_key": "ID" + }, + { + "key": "Division", + "name": "Division", + "view_key": "Division" + }, + { + "key": "Category", + "name": "Category", + "view_key": "Category" + }, + { + "key": "Submitter", + "name": "Submitter", + "view_key": "Submitter" + }, + { + "key": "System", + "name": "System", + "view_key": "System" + }, + { + "key": "Processor", + "name": "Processor", + "view_key": "Processor" + }, + { + "key": "Processor #", + "name": "Processor #", + "view_key": "Processor #" + }, + { + "key": "Accelerator", + "name": "Accelerator", + "view_key": "Accelerator" + }, + { + "key": "Accelerator #", + "name": "Accelerator #", + "view_key": "Accelerator #" + }, + { + "key": "Task", + "name": "Task", + "view_key": "Task" + }, + { + "key": "Benchmark", + "name": "Benchmark", + "view_key": "Benchmark" + }, + { + "key": "Scenario", + "name": "Scenario", + "view_key": "Scenario" + }, + { + "format": "%.2f", + "key": "P_IC1", + "name": "Image Classification performance", + "view_key": "P_IC1" + }, + { + "format": "%.3f", + "key": "A_IC1", + "name": "Image Classification accuracy (Top1, %)", + "view_key": "A_IC1" + }, + { + "format": "%.2f", + "key": "P_OD1", + "name": "Object Detection performance", + "view_key": "P_OD1" + }, + { + "format": "%.3f", + "key": "A_OD1", + "name": "Object Detection accuracy (mAP, %)", + "view_key": "A_OD1" + }, + { + "format": "%.2f", + "key": "P_NMT", + "name": "Machine Translation performance", + "view_key": "P_NMT" + }, + { + "format": "%.3f", + "key": "A_NMT", + "name": "Machine Translation accuracy (BLEU)", + "view_key": "A_NMT" + }, + { + "format": "%d", + "key": "Invalid?", + "name": "Invalid?", + "view_key": "Invalid?" + } + ], + "selector": [], + "selector2": [ + { + "key": "Task", + "name": "Task" + }, + { + "key": "Scenario", + "name": "Scenario" + }, + { + "key": "Category", + "name": "Category" + }, + { + "key": "Division", + "name": "Division" + }, + { + "key": "Submitter", + "name": "Submitter" + }, + { + "key": "Benchmark", + "name": "Benchmark" + }, + { + "key": "Accelerator", + "name": "Accelerator" + }, + { + "key": "FF_M", + "name": "Mobile/Handheld" + }, + { + "key": "FF_E", + "name": "Edge/Embedded" + }, + { + "key": "FF_D", + "name": "Desktop/Workstation" + }, + { + "key": "FF_S", + "name": "Server/Cloud" + } + ], + "selector3": [ + { + "key": "plot_dimension1", + "name": "Plot dimension 1 (X)" + }, + { + "key": "plot_dimension2", + "name": "Plot dimension 2 (Y)" + } + ], + "selector_s": [ + { + "config": { + "min": 0, + "step": 0.1, + "type": "number" + }, + "key": "__delta", + "name": "Reference accuracy delta" + } + ], + "table_view": [ + { + "key": "Division", + "name": "Division" + }, + { + "key": "Category", + "name": "Category" + }, + { + "key": "Submitter", + "name": "Submitter" + }, + { + "key": "System", + "name": "System" + }, + { + "key": "Task", + "name": "Task" + }, + { + "key": "Benchmark", + "name": "Benchmark" + }, + { + "key": "Scenario", + "name": "Scenario" + }, + { + "format": "%.2f", + "key": "P_IC1", + "name": "Image Classification performance (see the metric above the plot)" + }, + { + "format": "%.3f", + "key": "A_IC1", + "name": "Image Classification accuracy
(ImageNet'12 val - Top1, %)" + }, + { + "format": "%.2f", + "key": "P_OD1", + "name": "Object Detection performance (see the metric above the plot)" + }, + { + "format": "%.3f", + "key": "A_OD1", + "name": "Object Detection accuracy
(COCO'17 val - mAP, %)" + }, + { + "format": "%.2f", + "key": "P_NMT", + "name": "Machine Translation performance (see the metric above the plot)" + }, + { + "format": "%.3f", + "key": "A_NMT", + "name": "Machine Translation accuracy
(WMT'16 En-De - BLEU)" + }, + { + "key": "Processor", + "name": "Processor" + }, + { + "key": "Processor #", + "name": "Processor #" + }, + { + "key": "Accelerator", + "name": "Accelerator" + }, + { + "key": "Accelerator #", + "name": "Accelerator #" + }, + { + "key": "Software", + "name": "Software" + }, + { + "key": "FF_M", + "name": "Form Factor - Mobile/Handheld" + }, + { + "key": "FF_E", + "name": "Form Factor - Edge/Embedded" + }, + { + "key": "FF_D", + "name": "Form Factor - Desktop/Workstation" + }, + { + "key": "FF_S", + "name": "Form Factor - Server/Cloud" + }, + { + "key": "Details", + "name": "Details" + }, + { + "key": "Code", + "name": "Code" + }, + { + "key": "Notes", + "name": "Notes" + } + ] + } +} diff --git a/ck/repo/module/result/.cm/meta.jsonx b/ck/repo/module/result/.cm/meta.jsonx new file mode 100644 index 0000000000..4751460e52 --- /dev/null +++ b/ck/repo/module/result/.cm/meta.jsonx @@ -0,0 +1,333 @@ +{ + "actions": { + "get_raw_config": { + "desc": "get raw config for repo widget", + "for_web": "yes" + }, + "get_raw_data": { + "desc": "get raw data for widget", + "for_web": "yes" + } + }, + "actions_redirect": {}, + "copyright": "See CK COPYRIGHT.txt for copyright details", + "desc": "container for any result", + "developer": "cTuning foundation", + "developer_email": "admin@cTuning.org", + "developer_webpage": "http://cTuning.org", + "license": "See CK LICENSE.txt for licensing details", + "data_config": { + "raw_config": { + "name": "MLPerf Inference v0.5 - Image Classification - SingleStream (milliseconds per sample)", + "xDimension": "P_IC1", + "yDimension": "A_IC1", + "xVariationVisible": false, + "yVariationVisible": false, + "colorDimension": "Accelerator #", + "colorRange": ["#0000FF", "#00FFFF", "#00FF00", "#FFCC00", "#FF0000"], + "sizeDimension": "Processor #", + "sizeRange": [4,6], + "markerDimension": "Submitter", + "markerDimensionSets": { + "Default": ["sector_1_4", "sector_1_3", "sector_1_2", "sector_3_4", "circle", "triangle", "triangle_down", "sector_1_4", "rect", "pentagon", "hexagon", "sector_1_4", "star", "diamond"] + }, + "tooltipValues": ["Division", "Submitter", "System", "Processor", "Processor #", "Accelerator", "Accelerator #", "Benchmark", "P_IC1", "A_IC1"], + "props": { + "__delta": 0.0 + }, + "refLines": [ + { + "name": "ResNet", + "dimension": "A_IC1", + "get_value": "CkRepoWidgetUtils.mlperf.get_reference_accuracy_resnet" + }, + { + "name": "MobileNet", + "dimension": "A_IC1", + "get_value": "CkRepoWidgetUtils.mlperf.get_reference_accuracy_mobilenet" + } + ], + "call_attribs": { + "prefilter_mode": "image_classification_singlestream" + }, + "filters": { + "Task": "Image Classification", + "Scenario": "SingleStream" + } + + }, + "dimensions": [ + { + "key": "ID", + "name": "ID", + "view_key": "ID" + }, + { + "key": "Division", + "name": "Division", + "view_key": "Division" + }, + { + "key": "Category", + "name": "Category", + "view_key": "Category" + }, + { + "key": "Submitter", + "name": "Submitter", + "view_key": "Submitter" + }, + { + "key": "System", + "name": "System", + "view_key": "System" + }, + { + "key": "Processor", + "name": "Processor", + "view_key": "Processor" + }, + { + "key": "Processor #", + "name": "Processor #", + "view_key": "Processor #" + }, + { + "key": "Accelerator", + "name": "Accelerator", + "view_key": "Accelerator" + }, + { + "key": "Accelerator #", + "name": "Accelerator #", + "view_key": "Accelerator #" + }, + { + "key": "Task", + "name": "Task", + "view_key": "Task" + }, + { + "key": "Benchmark", + "name": "Benchmark", + "view_key": "Benchmark" + }, + { + "key": "Scenario", + "name": "Scenario", + "view_key": "Scenario" + }, + { + "format": "%.2f", + "key": "P_IC1", + "name": "Image Classification performance", + "view_key": "P_IC1" + }, + { + "format": "%.3f", + "key": "A_IC1", + "name": "Image Classification accuracy (Top1, %)", + "view_key": "A_IC1" + }, + { + "format": "%.2f", + "key": "P_OD1", + "name": "Object Detection performance", + "view_key": "P_OD1" + }, + { + "format": "%.3f", + "key": "A_OD1", + "name": "Object Detection accuracy (mAP, %)", + "view_key": "A_OD1" + }, + { + "format": "%.2f", + "key": "P_NMT", + "name": "Machine Translation performance", + "view_key": "P_NMT" + }, + { + "format": "%.3f", + "key": "A_NMT", + "name": "Machine Translation accuracy (BLEU)", + "view_key": "A_NMT" + }, + { + "format": "%d", + "key": "Invalid?", + "name": "Invalid?", + "view_key": "Invalid?" + } + ], + "selector": [ + ], + "selector2": [ + { + "key": "Category", + "name": "Category" + }, + { + "key": "Division", + "name": "Division" + }, + { + "key": "Submitter", + "name": "Submitter" + }, + { + "key": "Benchmark", + "name": "Benchmark" + }, + { + "key": "Accelerator", + "name": "Accelerator" + }, + { + "key": "FF_M", + "name": "Mobile/Handheld" + }, + { + "key": "FF_E", + "name": "Edge/Embedded" + }, + { + "key": "FF_D", + "name": "Desktop/Workstation" + }, + { + "key": "FF_S", + "name": "Server/Cloud" + } + ], + "selector3": [ + { + "key": "plot_dimension1", + "name": "Plot dimension 1 (X)" + }, + { + "key": "plot_dimension2", + "name": "Plot dimension 2 (Y)" + } + ], + "selector_s": [ + { + "config": { + "min": 0, + "step": 0.1, + "type": "number" + }, + "key": "__delta", + "name": "Reference accuracy delta" + } + ], + "table_view": [ + { + "key": "Division", + "name": "Division" + }, + { + "key": "Category", + "name": "Category" + }, + { + "key": "Submitter", + "name": "Submitter" + }, + { + "key": "System", + "name": "System" + }, + { + "key": "Task", + "name": "Task" + }, + { + "key": "Benchmark", + "name": "Benchmark" + }, + { + "key": "Scenario", + "name": "Scenario" + }, + { + "format": "%.2f", + "key": "P_IC1", + "name": "Image Classification performance (see the metric above the plot)" + }, + { + "format": "%.3f", + "key": "A_IC1", + "name": "Image Classification accuracy
(ImageNet'12 val - Top1, %)" + }, + { + "format": "%.2f", + "key": "P_OD1", + "name": "Object Detection performance (see the metric above the plot)" + }, + { + "format": "%.3f", + "key": "A_OD1", + "name": "Object Detection accuracy
(COCO'17 val - mAP, %)" + }, + { + "format": "%.2f", + "key": "P_NMT", + "name": "Machine Translation performance (see the metric above the plot)" + }, + { + "format": "%.3f", + "key": "A_NMT", + "name": "Machine Translation accuracy
(WMT'16 En-De - BLEU)" + }, + { + "key": "Processor", + "name": "Processor" + }, + { + "key": "Processor #", + "name": "Processor #" + }, + { + "key": "Accelerator", + "name": "Accelerator" + }, + { + "key": "Accelerator #", + "name": "Accelerator #" + }, + { + "key": "Software", + "name": "Software" + }, + { + "key": "FF_M", + "name": "Form Factor - Mobile/Handheld" + }, + { + "key": "FF_E", + "name": "Form Factor - Edge/Embedded" + }, + { + "key": "FF_D", + "name": "Form Factor - Desktop/Workstation" + }, + { + "key": "FF_S", + "name": "Form Factor - Server/Cloud" + }, + { + "key": "Details", + "name": "Details" + }, + { + "key": "Code", + "name": "Code" + }, + { + "key": "Notes", + "name": "Notes" + } + ] + } +} diff --git a/ck/repo/module/result/.cm/updates.json b/ck/repo/module/result/.cm/updates.json new file mode 100644 index 0000000000..a579c1ed18 --- /dev/null +++ b/ck/repo/module/result/.cm/updates.json @@ -0,0 +1,116 @@ +{ + "control": [ + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-21T22:43:32.912034", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-21T22:43:42.816509", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-22T12:51:20.591707", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-22T13:50:01.334834", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-25T15:11:59.364255", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-25T15:18:12.733229", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-25T16:43:16.705621", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + }, + { + "author": "Grigori Fursin", + "author_email": "Grigori.Fursin@cTuning.org", + "author_webpage": "http://fursin.net", + "copyright": "See CK COPYRIGHT.txt for copyright details", + "engine": "CK", + "iso_datetime": "2021-06-25T17:16:54.378049", + "license": "See CK LICENSE.txt for licensing details", + "version": [ + "2", + "5", + "4" + ] + } + ] +} diff --git a/ck/repo/module/result/README.md b/ck/repo/module/result/README.md new file mode 100644 index 0000000000..4b4ad89747 --- /dev/null +++ b/ck/repo/module/result/README.md @@ -0,0 +1,39 @@ +## 20210625: New examples + +### CMDs + +``` +ck display dashboard --template=result --cfg=demo.mlperf.inference + +ck display dashboard --template=result --cfg=demo.mlperf.mobilenets +ck display dashboard --template=result --cfg=demo.mlperf.mobilenets --experiment_uoa=mlperf-image-classification-single-stream-onnx-explore-threads + +ck display dashboard --template=result --cfg=demo.request.asplos18 +``` + +### URLS + +``` +ck start web +``` + +* http://localhost:3344/?template=result&cfg=demo.request.asplos18 + +* http://localhost:3344/?template=result&cfg=demo.mlperf.inference + +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets&repo_uoa=ai&data_uoa=xyz*&tags=abc + +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets&experiment_uoa=mlperf-image-classification-single-stream-onnx-explore-threads +* http://localhost:3344/?template=result&cfg=demo.mlperf.mobilenets&data_uoa=-&experiment_uoa=mlperf-image-classification-single-stream-onnx-explore-threads + +## Notes + +### Variation + +At the moment, X/Y variation can be plotted only when "colorDimension" +is on due to our old implementation of the graph widget +(see meta in "result.cfg:demo.mlperf.explore-threads"). + +One can use "_const" dimension as a trick to turn on "colorDimension" +but do not show different colors: "colorDimension":"_const" diff --git a/ck/repo/module/result/module.py b/ck/repo/module/result/module.py index 8e69bd78dd..4b51839d5c 100644 --- a/ck/repo/module/result/module.py +++ b/ck/repo/module/result/module.py @@ -12,6 +12,14 @@ ck=None # Will be updated by CK (initialized CK kernel) # Local settings +import os + +onchange='document.ck_result_form.submit();' + +common_data_keys=['repo_uoa','data_uoa','tags'] +common_data_keys2={'experiment_repo_uoa':'repo_uoa', + 'experiment_uoa':'data_uoa', + 'experiment_tags':'tags'} ############################################################################## # Initialize module @@ -29,3 +37,487 @@ def init(i): """ return {'return':0} + +############################################################################## +# + +def get_raw_config(i): + """ + Input: { + cfg_uoa - UOA of result.cfg + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + cfg_uoa=i['cfg_uoa'] + cfg_id=i.get('cfg_id','') + + r=load_cfg({'cfg':cfg_uoa, 'cfg_id':cfg_id}) + if r['return']>0: return r + + data_config=r['data_config'] + data_config['return'] = 0 + + return data_config + +############################################################################## +# + +def get_raw_data(i): + """ + Input: { + cfg_uoa - UOA of result.cfg + + (repo_uoa) + (data_uoa) + (tags) + (user) + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + # Load cfg + cfg_uoa=i['cfg_uoa'] + cfg_id=i.get('cfg_id','') + + r=load_cfg({'cfg':cfg_uoa, 'cfg_id':cfg_id}) + if r['return']>0: return r + + data_config=r['data_config'] + data_config['return'] = 0 + + dd=r['dict'] + + experiment_convertor=dd.get('experiment_convertor',[]) + + # Get default parameters where to search for data (module "result") + ii={'action':'search', + 'module_uoa':work['self_module_uid']} + for k in common_data_keys: + for where in [dd, i]: + v=where.get(k,'') + if v!='': + ii[k]=v + + r=ck.access(ii) + if r['return']>0: return r + + table=[] + num_users=0 + + user=i.get('user','') + + for l in r['lst']: + + path=l['path'] + + # General files (for compatibility) + files=os.listdir(path) + for f in files: + if f.startswith('result-') and f.endswith('.json'): + pf=os.path.join(path, f) + + r=ck.load_json_file({'json_file':pf}) + if r['return']>0: return r + + result=r['dict'] # List even with 1 result + + table+=result + + # Per users + path1=os.path.join(path, 'users') + if os.path.isdir(path1): + users=os.listdir(path1) + for u in users: + if user!='' and u!=user: + continue + + path2=os.path.join(path1, u) + if os.path.isdir(path2): + files=os.listdir(path2) + new_user=True + for f in files: + if f.startswith('result-') and f.endswith('.json'): + if new_user: + new_user=False + num_users+=1 + + pf=os.path.join(path2, f) + + r=ck.load_json_file({'json_file':pf}) + if r['return']>0: return r + + result=r['dict'] # List even with 1 result + + for res in result: + table.append(res) + + # Get default parameters where to search for data (module "experiment") + ii={} + for k in common_data_keys2: + for where in [dd, i]: + v=where.get(k,'') + if v!='': + ii[common_data_keys2[k]]=v + + if len(ii)>0: + ii['action']='search' + ii['module_uoa']=cfg['module_deps']['experiment'] + + r=ck.access(ii) + if r['return']>0: return r + + for l in r['lst']: + + path=l['path'] + + # General files (for compatibility) + files=os.listdir(path) + for f in files: + if f.startswith('ckp-') and f.endswith('.flat.json'): + pf=os.path.join(path, f) + + r=ck.load_json_file({'json_file':pf}) + if r['return']>0: return r + + flat_result=r['dict'] + + r=convert_experiment_to_result({'dict':flat_result, + 'convertor':experiment_convertor}) + if r['return']>0: return r + + table+=r['table'] + + # Merge if needed + merge={} + new_table=[] + for t in range(0, len(table)): + result=table[t] + merge_id=result.get('_merge','') + if merge_id=='': + new_table.append(result) + else: + if merge_id not in merge: + # Save position of the first merge + merge[merge_id]=t + new_table.append(result) + else: + tmerge=merge[merge_id] + table[tmerge].update(result) + + table=new_table + + # Add sequence numbers + seq_number=1 + for t in table: + t['seq_number']=seq_number + t['_const']=1 + seq_number+=1 + + return {'return':0, 'table':table} + +############################################################################## +# post-process html + +def postprocess_html(i): + """ + Input: { + html - html to post-process + + original_input (dict) - passing extra parameters from URL + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + + html - post-processed html + } + + """ + + h=i['html'] + + # Substitutions + sub={ + 'ck_html_title':'', + 'ck_html_title_main':'', + 'ck_html_title_note':'', + 'ck_html_form':'' + } + + # Check cfg to customize input + oi=i.get('original_input',{}) + + result_cfg=oi.get('cfg','') + cfg_id=oi.get('cfg_id','') + + if result_cfg!='': + r=load_cfg({'cfg':result_cfg, 'cfg_id':cfg_id}) + if r['return']>0: return r + + dcfg=r['dict'] + + update_html=dcfg.get('update_html',{}) + if len(update_html)>0: + sub.update(update_html) + + sub['ck_cfg_uoa']=result_cfg + sub['ck_cfg_id']=r['cfg_id'] + sub['ck_html_form']=r['html_selector'] + + # Check other params in original input and pass them to HTML + for k in common_data_keys + list(common_data_keys2.keys()) + ['user']: + sub['ck_'+k]=oi.get(k,'') + + # Update html + for s in sub: + h=h.replace('$#'+s+'#$', sub[s]) + + return {'return':0, 'html':h} + + +############################################################################## +# load cfg + +def load_cfg(i): + + result_cfg=i['cfg'] + cfg_id=i.get('cfg_id','') + + r=ck.access({'action':'load', + 'module_uoa':cfg['module_deps']['result.cfg'], + 'data_uoa':result_cfg}) + if r['return']>0: return r + + p=r['path'] + + dcfg=r['dict'] + + data_config=dcfg.get('data_config',{}) + + selector=[] + html_selector='' + + if len(data_config)==0: + # Attempt to read multiple configs + ii={'action':'create_selector', + 'module_uoa':cfg['module_deps']['wfe'], + 'data':selector, + 'name':'cfg_id', + 'onchange':onchange} + + first=True + for f in os.listdir(p): + if f.startswith('config-') and f.endswith('.json'): + pf=os.path.join(p,f) + + r=ck.load_json_file({'json_file':pf}) + if r['return']>0: return r + + d=r['dict'] + + name=d['name'] + value=d['id'] + + if cfg_id==value or (cfg_id=='' and first): + ii['selected_value']=value + dcfg=d + data_config=d['data_config'] + cfg_id=value + + selector.append({'name':name, 'value':value}) + + first=False + + r=ck.access(ii) + if r['return']>0: return r + + html_selector='
\n'+r['html']+'\n
\n
\n' + + return {'return':0, 'dict':dcfg, 'cfg_id':cfg_id, 'data_config':data_config, 'html_selector':html_selector} + +############################################################################## +# convert experiment to result + +def convert_experiment_to_result(i): + """ + Input: { + dict (dict) - dict with flat experiment + convertor (dict) - dict with convertor + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + + table - output table + } + + """ + + table=[] + + d=i.get('dict',{}) + convertor=i.get('convertor',[]) + + result={} + + if len(convertor)>0: + for k in convertor: + ok=k.get('out_key','') + if ok=='': ok=k['key1'] + + kk=[k.get('key1',''), k.get('key2',''), k.get('key3',''), k.get('key4','')] + + vv='' + v=k.get('value','') + if v!='' and v!=None: + vv=v + + first=True + for kx in kk: + if kx!='': + v=d.get(kx) + + vm=k.get('multiply',0) + if vm!=0 and vm!='' and vm!=None and (type(v)==float or type(v)==int): + v=v*vm + + if v!='' and v!=None: + if first: + first=False + if type(v)==float or type(v)==int: + vv=0 + else: + vv+=', ' + + # Check if list or dict + if type(v)==list or type(v)==dict: + vv=v + else: + vv+=v + + if vv!='': + result[ok]=vv + + else: + for k in d: + result[k]=d[k] + + if len(result)>0: + table.append(result) + + return {'return':0, 'table':table} + +############################################################################## +# push result + +def push(i): + """ + Input: { + (data_uoa) - result data UOA + or + (tags) - find data entry to record via tags + + (user) - 'all' by default + (point) - force point to append data (result-{point}.json) + + (dict) - result dict + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + } + + """ + + repo_uoa=i.get('repo_uoa','') + data_uoa=i.get('data_uoa','') + tags=i.get('tags','') + + if data_uoa=='' and tags=='': + return {'return':1, 'error':'data_uoa and tags are not defined'} + + # Find data entry if exists + ii={'action':'search', + 'module_uoa':work['self_module_uid'], + 'repo_uoa':repo_uoa, + 'data_uoa':data_uoa, + 'tags':tags} + r=ck.access(ii) + if r['return']>0: return + + lst=r['lst'] + + if len(lst)>1: + return {'return':1, 'error':'ambiguity: more than 1 result entry found'} + + if len(lst)==0: + if data_uoa=='': + return {'return':1, 'error':'you must specify data_uoa'} + + ii={'action':'add', + 'module_uoa':work['self_module_uid'], + 'repo_uoa':repo_uoa, + 'data_uoa':data_uoa, + 'tags':tags} + r=ck.access(ii) + if r['return']>0: return r + + path=r['path'] + else: + path=lst[0]['path'] + + # Get result dict + dresult=i.get('dict',{}) + + # Check user + user=i.get('user','') + if user=='': user='all' + + path_results=os.path.join(path, 'users', user) + + # Check if directory exists + if not os.path.isdir(path_results): + os.makedirs(path_results) + + # Which result file + point=i.get('point','') + if point=='': point='1' + + result_file='result-'+point+'.json' + + path_file=os.path.join(path_results, result_file) + + table=[] + # Try to load file + if os.path.isfile(path_file): + r=ck.load_json_file({'json_file':path_file}) + if r['return']>0: return r + + table=r['dict'] + + table.append(dresult) + + # Save back + r=ck.save_json_to_file({'json_file':path_file, 'dict':table, 'sort_keys':'yes'}) + if r['return']>0: return r + + return {'return':0, 'path':path, 'path_file':path_file, 'table':table} diff --git a/ck/repo/module/result/test_input.json b/ck/repo/module/result/test_input.json new file mode 100644 index 0000000000..f26c8e7ff5 --- /dev/null +++ b/ck/repo/module/result/test_input.json @@ -0,0 +1,35 @@ +{ + "dict":{ + "accuracy_top1": 0.45, + "accuracy_top5": 0.55, + "batch_count": 500, + "batch_size": 1, + "convolution_method": "DIRECT", + "cpu_freq": "max", + "cpu_name": "ARM Cortex-A53 MP4 + 0x41-8-0x0-0xd08-2 MP2", + "data_layout": "-", + "dataset": "val", + "gpgpu_name": "ARM Mali-T860", + "gpu_freq": "max", + "kernel_tuner": "-", + "library": "armcl-17.12", + "methodXlayout": "DIRECT X -", + "methodXtuner": "DIRECT X -", + "model": "v1-1.00-224", + "multiplier": 1.0, + "os_name": "Ubuntu 16.04.4 LTS", + "platform": "Firefly RK3399", + "rate_max_s": 4.744778371402272, + "rate_max_s_w": 0.7842608878350863, + "resolution": 224, + "time_avg_ms": 212.38999999999996, + "time_avg_ms#max": 213.41345610555604, + "time_avg_ms#min": 211.36654389444388, + "time_min_ms": 210.758, + "time_min_ms#max": 213.802, + "time_min_ms#min": 210.758, + "tunerXlayout": "- X -", + "version": 1 + } +} + diff --git a/ck/repo/module/wfe/module.py b/ck/repo/module/wfe/module.py index 35345805f0..88b3027462 100644 --- a/ck/repo/module/wfe/module.py +++ b/ck/repo/module/wfe/module.py @@ -4,7 +4,8 @@ # See CK LICENSE.txt for licensing details # See CK COPYRIGHT.txt for copyright details # -# Developer: cTuning foundation +# Developer(s): +# * Grigori Fursin, https://fursin.net # cfg={} # Will be updated by CK (meta description of this module) @@ -376,6 +377,7 @@ def index(i): 'form_name':form_name, 'all_params':i} rx=ck.access(ii) + hspec='' if rx['return']==0: hspec=rx.get('html','') hstyle+=rx.get('style','')+'\n' @@ -1081,6 +1083,18 @@ def index(i): # Substitute specials h=h.replace('$#title#$', 'CK Browser') h=h.replace('$#ck_url_template_pull#$', url_template_pull) + h=h.replace('$#ck_url_without_template#$', url0w) + + # Check if postprocessing + mph=d.get('module_to_postprocess_html','') + if mph!='': + r=ck.access({'action':'postprocess_html', + 'module_uoa':mph, + 'html':h, + 'original_input':i}) + if r['return']>0: return r + + h=r['html'] return {'return':0, 'html':h} diff --git a/incubator/cbench/LICENSE.txt b/incubator/cbench/LICENSE.txt index 25ed01c70d..7f6da14904 100644 --- a/incubator/cbench/LICENSE.txt +++ b/incubator/cbench/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright 2020 cTuning foundation. All rights reserved. +Copyright 2020-2021 cTuning foundation. All rights reserved. Apache License Version 2.0, January 2004