From fb38b369686698fcd27b1897084d53f775f07ee9 Mon Sep 17 00:00:00 2001 From: Grigori Fursin Date: Sun, 27 Dec 2020 11:03:26 +0100 Subject: [PATCH] * added incubator projects * prepared for release * updated module:program * removed two outdated modules --- CHANGES.txt | 4 +- ck/kernel.py | 2 +- incubator/cbench/.gitignore | 16 + incubator/cbench/.travis.yml | 35 + incubator/cbench/CHANGELOG.txt | 60 + incubator/cbench/CONTRIBUTIONS.txt | 17 + incubator/cbench/LICENSE.CodeReef.txt | 202 ++ incubator/cbench/LICENSE.txt | 202 ++ incubator/cbench/MANIFEST.in | 3 + incubator/cbench/README.md | 109 + incubator/cbench/appveyor.yml | 26 + incubator/cbench/cbench/__init__.py | 5 + incubator/cbench/cbench/__main__.py | 7 + incubator/cbench/cbench/client.py | 1236 +++++++++++ incubator/cbench/cbench/comm.py | 256 +++ incubator/cbench/cbench/comm_min.py | 87 + incubator/cbench/cbench/config.py | 250 +++ incubator/cbench/cbench/graph.py | 359 ++++ incubator/cbench/cbench/main.py | 602 ++++++ incubator/cbench/cbench/obj.py | 840 ++++++++ incubator/cbench/cbench/setup.py | 190 ++ incubator/cbench/cbench/solution.py | 1865 +++++++++++++++++ incubator/cbench/cbench/static/favicon.ico | Bin 0 -> 13094 bytes .../_activate.sh | 6 + .../_init.sh | 24 + .../_init_graph.sh | 5 + .../_publish.sh | 7 + .../_run.sh | 6 + .../_run_benchmark.sh | 8 + .../extra-meta.json | 10 + .../graph-convertor.json | 58 + .../graph-desc.json | 87 + .../prepare.txt | 94 + .../prereq.txt | 10 + .../_activate.sh | 6 + .../_init.sh | 23 + .../_init_graph.sh | 5 + .../_publish.sh | 7 + .../_run.sh | 6 + .../_run_benchmark.sh | 8 + .../extra-meta.json | 10 + .../graph-convertor.json | 58 + .../graph-desc.json | 87 + .../prepare.txt | 81 + .../prereq.txt | 8 + .../_activate.sh | 6 + .../_init.sh | 23 + .../_init_graph.sh | 5 + .../_publish.sh | 8 + .../_run.sh | 6 + .../_run_benchmark.sh | 8 + .../extra-meta.json | 10 + .../graph-convertor.json | 58 + .../graph-desc.json | 87 + .../prepare.txt | 84 + .../prereq.txt | 10 + incubator/cbench/setup.py | 107 + .../testing/connectme/tests/__init__.py | 6 + .../testing/connectme/tests/__main__.py | 5 + .../connectme/testing/connectme/tests/cli.py | 157 ++ .../connectme/testing/connectme/tests/cm.py | 495 +++++ .../testing/connectme/tests/config.py | 25 + .../connectme/testing/connectme/tests/io.py | 120 ++ .../connectme/testing/connectme/tests/misc.py | 10 + 64 files changed, 8215 insertions(+), 2 deletions(-) create mode 100644 incubator/cbench/.gitignore create mode 100644 incubator/cbench/.travis.yml create mode 100644 incubator/cbench/CHANGELOG.txt create mode 100644 incubator/cbench/CONTRIBUTIONS.txt create mode 100644 incubator/cbench/LICENSE.CodeReef.txt create mode 100644 incubator/cbench/LICENSE.txt create mode 100644 incubator/cbench/MANIFEST.in create mode 100644 incubator/cbench/README.md create mode 100644 incubator/cbench/appveyor.yml create mode 100644 incubator/cbench/cbench/__init__.py create mode 100644 incubator/cbench/cbench/__main__.py create mode 100644 incubator/cbench/cbench/client.py create mode 100644 incubator/cbench/cbench/comm.py create mode 100644 incubator/cbench/cbench/comm_min.py create mode 100644 incubator/cbench/cbench/config.py create mode 100644 incubator/cbench/cbench/graph.py create mode 100644 incubator/cbench/cbench/main.py create mode 100644 incubator/cbench/cbench/obj.py create mode 100644 incubator/cbench/cbench/setup.py create mode 100644 incubator/cbench/cbench/solution.py create mode 100644 incubator/cbench/cbench/static/favicon.ico create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/_activate.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/_init.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/_init_graph.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/_publish.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/_run.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/_run_benchmark.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/extra-meta.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/graph-convertor.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/graph-desc.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/prepare.txt create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/prereq.txt create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_activate.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init_graph.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_publish.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run_benchmark.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/extra-meta.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-convertor.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-desc.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prepare.txt create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prereq.txt create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_activate.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init_graph.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_publish.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run_benchmark.sh create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/extra-meta.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-convertor.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-desc.json create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prepare.txt create mode 100644 incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prereq.txt create mode 100644 incubator/cbench/setup.py create mode 100644 incubator/connectme/testing/connectme/tests/__init__.py create mode 100644 incubator/connectme/testing/connectme/tests/__main__.py create mode 100644 incubator/connectme/testing/connectme/tests/cli.py create mode 100644 incubator/connectme/testing/connectme/tests/cm.py create mode 100644 incubator/connectme/testing/connectme/tests/config.py create mode 100644 incubator/connectme/testing/connectme/tests/io.py create mode 100644 incubator/connectme/testing/connectme/tests/misc.py diff --git a/CHANGES.txt b/CHANGES.txt index 557778601d..15d6bada80 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,6 +1,8 @@ -* V1.55.1 +* V1.55.2 * removed two outdated modules * updated module:program + * added incubator projects + * prepared for release * V1.55.0 * Fixed missing CK modules diff --git a/ck/kernel.py b/ck/kernel.py index 3e537b7508..6ef09f3d70 100755 --- a/ck/kernel.py +++ b/ck/kernel.py @@ -27,7 +27,7 @@ # We use 3 digits for the main (released) version and 4th digit for development revision -__version__ = "1.55.1" +__version__ = "1.55.2" # Do not use characters (to detect outdated version)! # Import packages that are global for the whole kernel diff --git a/incubator/cbench/.gitignore b/incubator/cbench/.gitignore new file mode 100644 index 0000000000..85a2853749 --- /dev/null +++ b/incubator/cbench/.gitignore @@ -0,0 +1,16 @@ +build +build/* +dist +dist/* + +MANIFEST +cbench.*/* +cbench.* +*.pyc +__pycache__ +.cache/ +.coverage +htmlcov + +.vscode + diff --git a/incubator/cbench/.travis.yml b/incubator/cbench/.travis.yml new file mode 100644 index 0000000000..b2fac90359 --- /dev/null +++ b/incubator/cbench/.travis.yml @@ -0,0 +1,35 @@ +os: linux +language: python + +python: + - "2.7" + - "3.5" + - "3.6" + - "3.7" + +matrix: + include: + - os: osx + language: generic + +before_install: + - | + if [ $TRAVIS_OS_NAME == "osx" ]; then + export PIP_EXE=pip3 + export PYTHON_EXE=python3 + else + export PIP_EXE=pip + export PYTHON_EXE=python + fi + +install: + - echo "Env var TRAVIS_OS_NAME ${TRAVIS_OS_NAME}" + - echo "Current directory ${CWD}" + - ${PYTHON_EXE} --version + - ${PIP_EXE} --version + - ${PIP_EXE} install setuptools + - ${PIP_EXE} install ck + - ${PYTHON_EXE} setup.py install + +script: + - cbench download package:lib-tensorflow-pip --all diff --git a/incubator/cbench/CHANGELOG.txt b/incubator/cbench/CHANGELOG.txt new file mode 100644 index 0000000000..746ed46184 --- /dev/null +++ b/incubator/cbench/CHANGELOG.txt @@ -0,0 +1,60 @@ +* 1.3.1 + added "cb delete" action to delete non-permanent components from the portal (if owner) + added --permanent flag to cb publish to mark a given components as permanent (can't be deleted) + +* 1.2.11 + added --extra_tags to cb publish + +* 1.2.10 + considerably accelerated bootstrapping + +* 1.2.9 + fixed bug in solution init with post cmd + +* 1.2.8 + Added new flags to graph init + Added solution examples + +* 1.2.7 + Improved graphs for crowd-benchmarking + +* 1.2.6 + Fixed bug with extra CMD + +* 1.2.5 + New fixes for solution/graph init to reflrect cKnowledge.io updates + +* 1.2.1 + Stable release + +* 1.1.12 + Return graphs URLs during crowd-benchmarking to be able to visualize them at cKnowledge.io dashboards + +* 1.1.9 + Fixed error in comm_min + +* 1.1.8 + Added comm_min for minimal communication with the CK portal + without extra deps to get the latest release notes + +* 1.1.7 + Major change in solution + +... + +* 1.1.3 + Changed API + Major name clean up + +... + +* 1.0.6 + Minor fixes with logs + +* 1.0.5 + Various fixes and improvements + +* 1.0.4 + Fixing ReadMe + +* 1.0.0 diff --git a/incubator/cbench/CONTRIBUTIONS.txt b/incubator/cbench/CONTRIBUTIONS.txt new file mode 100644 index 0000000000..fadc96ee3c --- /dev/null +++ b/incubator/cbench/CONTRIBUTIONS.txt @@ -0,0 +1,17 @@ +cBench uses the standard Apache2 license. + +Contributions are very welcome. We suggest you to fork the code +on GitHub and then submit a pull request: + +* https://github.com/cknowledge/cbench/issues + +Thank you for supporting this community project! + +Contributors: + +N: Grigori Fursin +E: Grigori.Fursin@cTuning.org +H: https://fursin.net +O: cTuning foundation and cKnowledge SAS +C: author + diff --git a/incubator/cbench/LICENSE.CodeReef.txt b/incubator/cbench/LICENSE.CodeReef.txt new file mode 100644 index 0000000000..af7f10361a --- /dev/null +++ b/incubator/cbench/LICENSE.CodeReef.txt @@ -0,0 +1,202 @@ +Copyright 2019-2020 CodeReef. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/incubator/cbench/LICENSE.txt b/incubator/cbench/LICENSE.txt new file mode 100644 index 0000000000..25ed01c70d --- /dev/null +++ b/incubator/cbench/LICENSE.txt @@ -0,0 +1,202 @@ +Copyright 2020 cTuning foundation. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/incubator/cbench/MANIFEST.in b/incubator/cbench/MANIFEST.in new file mode 100644 index 0000000000..161401e449 --- /dev/null +++ b/incubator/cbench/MANIFEST.in @@ -0,0 +1,3 @@ +include LICENSE.txt +include README.md +include cbench/static/favicon.ico \ No newline at end of file diff --git a/incubator/cbench/README.md b/incubator/cbench/README.md new file mode 100644 index 0000000000..efcf1ae1fb --- /dev/null +++ b/incubator/cbench/README.md @@ -0,0 +1,109 @@ +[![PyPI version](https://badge.fury.io/py/cbench.svg)](https://badge.fury.io/py/cbench) +[![Python Version](https://img.shields.io/badge/python-2.7%20|%203.4+-blue.svg)](https://pypi.org/project/cbench) +[![License](https://img.shields.io/badge/license-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) + +Linux/MacOS: [![Build Status](https://travis-ci.org/ctuning/cbench.svg?branch=master)](https://travis-ci.org/ctuning/cbench) +Windows: [![Windows Build status](https://ci.appveyor.com/api/projects/status/yjq5myrrrkx3rydc?svg=true)](https://ci.appveyor.com/project/gfursin/cbench) + + +## News + +We have successfully completed the prototyping phase of the Collective Knowledge technology +to make it easier to reproduce AI&ML and deploy it in production with the help of portable CK workflows, reusable artifacts and MLOps +as described in this [white paper](https://arxiv.org/abs/2006.07161) +and the [CK presentation](https://cKnowledge.io/presentation/ck). +We are now preparing the second phase of this project to make CK simpler to use, more stable and more user friendly - +don't hesitate to get in touch with the [CK author](https://cKnowledge.io/@gfursin) to know more! + + + +## Introduction + +cBench is a small and cross-platform framework +connected with the [open Collective Knowledge portal](https://cKnowledge.io) +to help researchers and practitioners +[reproduce ML&systems research](https://cKnowledge.io/reproduced-papers) +on their own bare-metal platforms, participate in collaborative +benchmarking and optimization, +and share results on [live scoreobards](https://cKnowledge.io/reproduced-results). + +You can try to reproduce MLPerf inference benchmark on your machine using [this solution](https://cKnowledge.io/test) +and see public results from the community on this [scoreboard](https://cknowledge.io/c/result/sota-mlperf-object-detection-v0.5-crowd-benchmarking). + +cBench is a part of the [Collective Knowledge project (CK)](https://cKnowledge.org) +and uses [portable CK solutions](https://cknowledge.io/docs/intro/introduction.html#portable-ck-solution) +to describe how to download, build, benchmark and optimize applications +across different hardware, software, models and data sets. + +## Platform support: + +| | As a host platform | As a target platform | +|---------------|:------------------:|:--------------------:| +| Generic Linux | ✓ | ✓ | +| Linux (Arm) | ✓ | ✓ | +| Raspberry Pi | ✓ | ✓ | +| MacOS | ✓ | ± | +| Windows | ✓ | ✓ | +| Android | ± | ✓ | +| iOS | TBD | TBD | + + +## Object detection crowd-benchmarking demo on Ubuntu + +Install prerequisites: + +``` +sudo apt update +sudo apt install git wget libz-dev curl cmake +sudo apt install gcc g++ autoconf autogen libtool +sudo apt install libfreetype6-dev +sudo apt install python3.7-dev +sudo apt install -y libsm6 libxext6 libxrender-dev +``` + +Install cbench: + +``` +python3 -m pip install cbench +``` + +Initialize the [CK solution for MLPerf](https://cknowledge.io/solution/demo-obj-detection-coco-tf-cpu-benchmark-linux-portable-workflows): + +``` +cb init demo-obj-detection-coco-tf-cpu-benchmark-linux-portable-workflows +``` + +Participate in crowd-benchmarking: + +``` +cb benchmark demo-obj-detection-coco-tf-cpu-benchmark-linux-portable-workflows +``` + +See your results on a public [SOTA dashboard](https://cknowledge.io/c/result/sota-mlperf-object-detection-v0.5-crowd-benchmarking). + +You can also use the stable Docker image to participate in crowd-benchmarking: + +``` +sudo docker run ctuning/cbench-obj-detection-coco-tf-cpu-benchmark-linux-portable-workflows /bin/bash -c "cb benchmark demo-obj-detection-coco-tf-cpu-benchmark-linux-portable-workflows" +``` + +You can also check [all dependencies for this solution](https://cknowledge.io/solution/demo-obj-detection-coco-tf-cpu-benchmark-linux-portable-workflows/#dependencies). + + + +## Documentation + +* [Online docs for the Collective Knowledge technology](https://cKnowledge.io/docs) + +## Feedback + +* This is an ongoing project - don't hesitate to [contact us](https://cKnowledge.org/contacts.html) + if you have any feedback and suggestions! + +## Acknowledgments + +We would like to thank all [CK partners](https://cKnowledge.org/partners.html) +for fruitful discussions and feedback! + + +*Copyright 2020 [cTuning foundation](https://cTuning.org)* diff --git a/incubator/cbench/appveyor.yml b/incubator/cbench/appveyor.yml new file mode 100644 index 0000000000..20703e4569 --- /dev/null +++ b/incubator/cbench/appveyor.yml @@ -0,0 +1,26 @@ +environment: + + matrix: + + # Python versions available on Appveyor: http://www.appveyor.com/docs/installed-software#python + + - PYTHON: "C:\\Python27" + - PYTHON: "C:\\Python33" + - PYTHON: "C:\\Python34" + - PYTHON: "C:\\Python35" + - PYTHON: "C:\\Python36" + - PYTHON: "C:\\Python37" + - PYTHON: "C:\\Python27-x64" + - PYTHON: "C:\\Python35-x64" + - PYTHON: "C:\\Python36-x64" + - PYTHON: "C:\\Python37-x64" + +install: + - pip install setuptools + - pip install ck + - python setup.py install + +build: false + +test_script: + - cbench download package:lib-tensorflow-pip --all diff --git a/incubator/cbench/cbench/__init__.py b/incubator/cbench/cbench/__init__.py new file mode 100644 index 0000000000..1185466556 --- /dev/null +++ b/incubator/cbench/cbench/__init__.py @@ -0,0 +1,5 @@ +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +__version__ = "1.3.1" diff --git a/incubator/cbench/cbench/__main__.py b/incubator/cbench/cbench/__main__.py new file mode 100644 index 0000000000..7c4511dd70 --- /dev/null +++ b/incubator/cbench/cbench/__main__.py @@ -0,0 +1,7 @@ +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +from . import main + +main.cli() diff --git a/incubator/cbench/cbench/client.py b/incubator/cbench/cbench/client.py new file mode 100644 index 0000000000..e99f468884 --- /dev/null +++ b/incubator/cbench/cbench/client.py @@ -0,0 +1,1236 @@ +# +# Web service for the client +# Partially based on CK web service +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +from . import config +from . import comm + +import ck.kernel as ck + +import json +import sys +import os +import tempfile +import cgi +#import ssl +import time +import requests + +# Import various modules while supporting both Python 2.x and 3.x +try: + from http.server import BaseHTTPRequestHandler, HTTPServer +except: + from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer + +try: + import urllib.parse as urlparse +except: + import urlparse + +try: + from urllib.parse import quote as urlquote +except: + from urllib import quote as urlquote + +try: + from urllib.parse import unquote as urlunquote +except: + from urllib import unquote as urlunquote + +#try: +# import http.cookies as Cookie +#except: +# import Cookie + +try: + from socketserver import ThreadingMixIn +except: + from SocketServer import ThreadingMixInqZBMfAaH + + + +context_types={ + "bz2": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/x-bzip2" + }, + "con": { + "Content-type": "text/plain; charset=utf-8" + }, + "css": { + "Content-disposition": "inline; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "text/css" + }, + "csv": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "text/csv" + }, + "eps": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/postscript" + }, + "gif": { + "Content-type": "image/gif" + }, + "gz": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/x-gzip" + }, + "html": { + "Content-type": "text/html; charset=utf-8" + }, + "jpeg": { + "Content-type": "image/jpeg" + }, + "jpg": { + "Content-type": "image/jpeg" + }, + "js": { + "Content-disposition": "inline; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "text/javascript" + }, + "json": { + " -type": "w/json; charset=utf-8" + }, + "pdf": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/pdf" + }, + "png": { + "Content-type": "image/png" + }, + "ps": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/postscript" + }, + "txt": { + "Content-type": "text/plain; charset=utf-8" + }, + "unknown": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/octet-stream" + }, + "zip": { + "Content-disposition": "attachment; filename=$#filename#$", + "Content-title": "$#filename#$", + "Content-type": "application/zip" + } + } + +# URL to tunnel requests to (useful for development boards and Raspberry Pi) +tunnel_url='' + +# Skip print for hearbeat +heartbit_started=False +get_status_started=False + +############################################################################## +# Class to handle requests in separate threads + +class ThreadedHTTPServer(ThreadingMixIn, HTTPServer): + + """ + """ + +############################################################################## +# Send error to HTTP stream + +def web_out(i): + """ + Input: { + http - http object + type - content type + bin - bytes to output + (filename) - if !='', substitute filename in headers + } + + Output: { + return - 0 + } + """ + + http=i['http'] + bin=i['bin'] + + tp=i['type'] + + if tp=='' or tp=='web': tp='html' + + tpx=context_types.get(tp,{}) + if len(tpx)==0: + tp='unknown' + tpx=cfg['content_types'][tp] + + fn=i.get('filename','') + + # Output + for k in sorted(tpx.keys()): + v=tpx[k] + if fn!='': v=v.replace('$#filename#$', fn) + http.send_header(k,v) + + http.send_header('Access-Control-Allow-Origin', '*') + http.send_header('Content-Length', str(len(bin))) + http.end_headers() + + http.wfile.write(bin) + + return {'return':0} + +############################################################################## +# Send error to HTTP stream + +def web_err(i): + """ + Input: { + http - http object + type - content type + bin - bytes to output + } + + Output: { + return - 0 + } + """ + + http=i['http'] + tp=i['type'] + bin=i['bin'] + + try: bin=bin.decode('utf-8') + except Exception as e: pass + + if tp=='json': + rx=ck.dumps_json({'dict':{'return':1, 'error':bin}}) + if rx['return']>0: + bin2=rx['error'].encode('utf8') + else: + bin2=rx['string'].encode('utf-8') + elif tp=='con': + bin2=bin.encode('utf8') + else: + bin2=b'
'+bin.encode('utf8')+b'
' + + i['bin']=bin2 + return web_out(i) + +############################################################################## +# Process CK web service request (both GET and POST) + +def process_web_request(i): + """ + + Input: { + http - Python http object + } + + Output: { None } + """ + + global heartbit_started, get_status_started + + from . import solution + + # http object + http=i['http'] + + # Parse GET variables and path + xget={} + xpath={'host':'', 'port':'', 'first':'', 'rest':'', 'query':''} # May be used in the future + + xt='json' + + xpath['host']=i.get('host','') + xpath['port']=i.get('port','') + + # Check GET variables + if http.path!='': + http.send_response(200) + + a=urlparse.urlparse(http.path) + xp=a.path + xr='' + + if xp.startswith('/'): xp=xp[1:] + + u=xp.find('/') + if u>=0: + xr=xp[u+1:] + xp=xp[:u] + + xt=xp + + xpath['first']=xp + xpath['rest']=xr + xpath['query']=a.query + b=urlparse.parse_qs(a.query, keep_blank_values=True, ) + + xget={} + for k in b: + xget[k]=urlunquote(b[k][0]) + if sys.version_info[0]<3: + xget[k]=xget[k].decode('utf8') + + # Check POST + xpost={} + xpost1={} + + try: + headers = http.headers + content_type = headers.get('content-type') + ctype='' + if content_type != None: + ctype, pdict = cgi.parse_header(content_type) + # Python3 cgi.parse_multipart expects boundary to be bytes, not str. + if sys.version_info[0]<3 and 'boundary' in pdict: + pdict['boundary'] = pdict['boundary'].encode() + + if ctype == 'multipart/form-data': + if sys.version_info[0]<3: + xpost1 = cgi.parse_multipart(http.rfile, pdict) + else: + xxpost1 = cgi.FieldStorage(fp=http.rfile, headers=headers, environ={'REQUEST_METHOD':'POST'}) + for k in xxpost1.keys(): + xpost1[k]=[xxpost1[k].value] + elif ctype == 'application/x-www-form-urlencoded': + length = int(http.headers.get('content-length')) + s=http.rfile.read(length) + if sys.version_info[0]>2: s=s.decode('utf8') + xpost1 = cgi.parse_qs(s, keep_blank_values=1) + + except Exception as e: + web_err({'http':http, 'type':xt, 'bin':bin}) + ck.out(ck.cfg['error']+bin.decode('utf8')) + return + + # Post processing + for k in xpost1: + v=xpost1[k] + if k.endswith('[]'): + k1=k[:-2] + xpost[k1]=[] + for l in v: + xpost[k1].append(urlunquote(l)) + else: + if k!='file_content': + xpost[k]=urlunquote(v[0]) + else: + xpost[k]=v[0] + + if k=='file_content': + fcrt=xpost1.get('file_content_record_to_tmp','') + if (type(fcrt)==list and len(fcrt)>0 and fcrt[0]=='yes') or fcrt=='yes': + fd, fn=tempfile.mkstemp(suffix='.tmp', prefix='ck-') # suffix is important - CK will delete such file! + os.close(fd) + + f=open(fn,'wb') + f.write(xpost[k]) + f.close() + + xpost[k+'_uploaded']=fn + del(xpost[k]) + k+='_uploaded' + else: + import base64 + xpost[k+'_base64']=base64.urlsafe_b64encode(xpost[k]).decode('utf8') + del(xpost[k]) + k+='_base64' + + if sys.version_info[0]<3: + xpost[k]=xpost[k].decode('utf8') + + # Prepare input and check if CK json present + ii=xget + ii.update(xpost) + + act=ii.get('action','') + + # Generate tmp file (to output images for example) + fd, fn=tempfile.mkstemp(suffix='.tmp', prefix='ck-') # suffix is important - CK will delete such file! + os.close(fd) + if os.path.isfile(fn): os.remove(fn) + + # Get tmp dir + p=tempfile.gettempdir() + + # Execute command ********************************************************* +# ck.out('*** Received action request: ' + act) + if act=='get_host_platform_info': + r=ck.access({'action':'detect', + 'module_uoa':'platform'}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + s=json.dumps(r, indent=2, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + #############################################################################################################3 + elif act=='init_workflow': + + data_id=ii.get('data_id','') + + r=solution.init({'uid':data_id}) + + # start program + # r=ck.access({'action':'run', + # 'module_uoa':'program', + # 'data_uoa':ii.get('program_name',''), + # 'cmd_key': 'use_continuous', + # 'deps.python': 'a699c0c7de43a121', + # 'quiet': 'yes'}) + + if r['return']>0: + ck.out(config.CR_LINE) + ck.out("Error: "+r.get('error','')) + ck.out(config.CR_LINE) + + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + solution = {'status': True} + s=json.dumps(solution, indent=4, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + #############################################################################################################3 + elif act=='run_program': + + data_id=ii.get('data_id','') + + r=solution.run({'uid':data_id}) + + # start program + # r=ck.access({'action':'run', + # 'module_uoa':'program', + # 'data_uoa':ii.get('program_name',''), + # 'cmd_key': 'use_continuous', + # 'deps.python': 'a699c0c7de43a121', + # 'quiet': 'yes'}) + + if r['return']>0: + ck.out(config.CR_LINE) + ck.out("Error: "+r.get('error','')) + ck.out(config.CR_LINE) + + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + solution = {'status': True} + s=json.dumps(solution, indent=4, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + + #############################################################################################################3 + elif act=='benchmark_program': + + data_id=ii.get('data_id','') + + r=solution.benchmark({'uid':data_id}) + + if r['return']>0: + ck.out(config.CR_LINE) + ck.out("Error: "+r.get('error','')) + ck.out(config.CR_LINE) + + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + +# solution = {'status': True} +# s=json.dumps(solution, indent=4, sort_keys=True) + # Need to pass info about graphs + s=json.dumps(r, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + return + + #############################################################################################################3 + elif act=='publish_result': + + data_id=ii.get('data_id','') + + r=solution.publish_result({'uid':data_id}) + + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + solution = {'status': True} +# s=json.dumps(solution, indent=4, sort_keys=True) + s=json.dumps(r, sort_keys=True) + + ck.out(config.CR_LINE) + ck.out("Success!") + ck.out(config.CR_LINE) + + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + return + + #############################################################################################################3 + elif act=='get_program_result_image': + + data_id=ii['data_id'] + program_name=ii['program_name'] + + jpeg=ii.get('jpeg','') + + ck_entry=program_name.split(':') + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + p=r['path'] + + meta=r['dict'] + workflow_output_dir=meta.get('workflow_output_dir','') + + workflow_repo=meta.get('workflow_repo_url','') + j=workflow_repo.rfind('/') + if j>0: + workflow_repo=workflow_repo[j+1:] + + cur_dir=os.path.join(p, 'CK', workflow_repo, ck_entry[0], ck_entry[1]) + if workflow_output_dir!='': + cur_dir=os.path.join(cur_dir, workflow_output_dir) + + # r=ck.access({'action':'find', + # 'module_uoa':'program', + # 'data_uoa':ii.get('program_name','')}) + # + # if r['return']>0: + # # Process error properly + # web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + # return + + # cur_dir = 'D:\\Work1\\CK\\ck-repos\\local\\solution\\demo-obj-detection-kitti-min-tf-cpu-win\\CK\\ck-tensorflow\\program\\squeezedet\\tmp\\out' #os.path.join(r['path'],"tmp/out") + # cur_dir='/home/cindex/CK/local/solution/demo-obj-detection-self-driving-win/CK/ck-tensorflow/program/squeezedet/tmp/out' + # cur_dir='/home/cindex/CK/local/solution/demo-obj-detection-kitti-min-tf-cpu-win/CK/ck-tensorflow/program/squeezedet/tmp/out' + + # find the penultimate image provided + try: + st = False + filepath = '' + filepath_buf = '' + + found_files=[] + + ck.out('') + ck.out('Checking for output files in directory:') + ck.out(' '+cur_dir) + ck.out('') + + sorted_list=sorted(os.listdir(cur_dir)) + for file in sorted_list: + if file.endswith(".png") and file.startswith("boxed_"): + found_files.append(file) + if len(found_files)==3: + break + except: + err = 'no files available' + web_err({'http':http, 'type':xt, 'bin':err.encode('utf8')}) + return + + if len(found_files)==0: + err = 'no files available' + web_err({'http':http, 'type':xt, 'bin':err.encode('utf8')}) + return + + if len(found_files)==1: + filepath='' + filepath_buf=found_files[0] + elif len(found_files)==2: + filepath='' + filepath_buf=found_files[1] + elif len(found_files)==3: + filepath=found_files[0] + filepath_buf=found_files[1] + + # Check if convert to jpeg + file_type='png' + pinp=os.path.join(cur_dir, filepath_buf) + + if jpeg=='yes': + quality=ii.get('jpeg_quality','') + if quality==None or quality=='': quality='70' + + pout=os.path.join(cur_dir, filepath_buf+'.jpg') + + s='convert -quality '+quality+' '+pinp+' '+pout + + ck.out('') + ck.out(' Converting to jpeg: '+s) + + os.system(s) + + pinp=pout + filepath_buf+='.jpg' + file_type='jpg' + + # First file will be deleted (only if 2 afterwards), second served + ck.out(' Loading file '+ filepath_buf) + r=ck.load_text_file({'text_file':pinp, 'keep_as_bin':'yes'}) + + if jpeg=='yes': + if os.path.isfile(pinp): + os.remove(pinp) + + # Remove first + if filepath!='': + ck.out(' Trying to delete file '+ filepath) + x=os.path.join(cur_dir, filepath) + if os.path.isfile(x): + os.remove(x) + + # Then finish checking previous one + if r['return']>0: + bout=r['error'].encode('utf-8') + else: + bout=r['bin'] + + web_out({'http':http, 'type':file_type, 'bin':bout}) + + return + + #############################################################################################################3 + elif act=='process_webcam': + + data_id=ii['data_id'] + program_name=ii['program_name'] + + ck_entry=program_name.split(':') + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + pp=r['path'] # Path to solution! + + meta=r['dict'] + + # Find workflow output path + workflow_input_dir=meta.get('workflow_input_dir','') + workflow_output_dir=meta.get('workflow_output_dir','') + workflow_repo=meta.get('workflow_repo_url','') + + j=workflow_repo.rfind('/') + if j>0: + workflow_repo=workflow_repo[j+1:] + + workflow_dir=os.path.join(pp, 'CK', workflow_repo, ck_entry[0], ck_entry[1]) + + if workflow_input_dir!='': + p=os.path.join(workflow_dir, workflow_input_dir) + else: + p = os.path.join(workflow_dir, "tmp", "input") + + if not os.path.isdir(p): os.makedirs(p) + + if workflow_output_dir!='': + pout=os.path.join(workflow_dir, workflow_output_dir) + else: + pout=os.path.join(workflow_dir, "tmp") + + if not os.path.isdir(pout): os.makedirs(pout) + + # Record image + image_uri=xpost.get('image_uri','') + + x='data:image/jpeg;base64,' + if image_uri.startswith(x): + image64=image_uri[len(x):] + + # Finding last file and incrementing + ff='cr-stream-' + + l=os.listdir(p) + + inum=0 + ffound='' + for f in os.listdir(p): + if f.startswith(ff) and f.endswith('.jpg'): + j=f.find('.') + num=f[len(ff):j] + if int(num)>inum: + inum=int(num) + ffound=f + + # New logic: if file already exists, just skip next request from web (otherwise many parallel requests) + # When program starts, it should clean input/output to let this code continue processing image + if (inum>0): + time.sleep(1) + ss='request skipped because there is already file in queue' + ck.out(' Warning: '+ss+' ('+os.path.join(p,ffound)+') ...') + s='{"return":16, "error":"'+ss+'"}' + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + return + + # Otherwise continue processing ... + if inum==0: + inum+=1 + sinum=str(inum) + filename = ff+('0'*(8-len(sinum)))+sinum + + filename2=filename+'.jpg' + pf=os.path.join(p, filename2) + + r=ck.convert_upload_string_to_file({'file_content_base64':image64, 'filename':pf}) + if r['return']>0: return r + + ck.out(' !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!') + ck.out(' Recorded external image to '+pf) + + # Need extra converting + pp1=os.path.join(pp, 'support-script-convert.sh') + if os.path.isfile(pp1): + ck.out('') + ck.out('Extra image processing ...') + ck.out('') + + extra_cmd='cd "'+p+'"\n' + extra_cmd+='. "'+pp1+'" '+filename2+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + else: + sinum=str(inum) + filename = ff+('0'*(8-len(sinum)))+sinum + + filename2=filename+'.jpg' + pf=os.path.join(p, filename2) + + # Need extra pushing + pp1=os.path.join(pp, 'support-script-push.sh') + if os.path.isfile(pp1): + ck.out('') + ck.out('Extra image pushing to device ...') + ck.out('') + + extra_cmd='cd "'+p+'"\n' + extra_cmd+='. "'+pp1+'" '+filename+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + # If Android-like device wait for the file ... + ppull=os.path.join(pp, 'support-script-pull.sh') + + # Waiting for output file + poutf=os.path.join(pout, filename +'.json') + + if not os.path.isfile(poutf): + ck.out ('Waiting for output file: '+poutf) + + while not os.path.isfile(poutf): + # Check if need to pull + if os.path.isfile(ppull): + ck.out('Trying to pull from device ...') + + extra_cmd='cd "'+pout+'"\n' + extra_cmd+='export SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CR_SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CODEREEF_SOLUTION_PATH="'+pp+'"\n' # Keeping for compatibility with older version + extra_cmd+='. "'+ppull+'" '+filename+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + time.sleep(0.1) + + ck.out('') + ck.out('Found solution!') + ck.out('') + + with open(poutf) as json_file: + solution = json.load(json_file) + ck.out(json.dumps(solution, indent=2)) + + if os.path.isfile(poutf): + os.remove(poutf) + + if inum==1 and os.path.isfile(pf): + ck.out(' REMOVING '+pf) + os.remove(pf) + + ck.out('') + + s=json.dumps(solution, indent=4, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + + #############################################################################################################3 + elif act=='get_image': + num=ii.get('num','') + inum=int(num) + sinum=str(inum) + + # Finding last file and incrementing + ff='cr-stream-' + pf=os.path.join(p, ff+('0'*(8-len(sinum)))+sinum+'.jpg') + + ck.out(' Loaded file '+pf) + + r=ck.load_text_file({'text_file':pf, 'keep_as_bin':'yes'}) + if r['return']>0: + bout=r['error'].encode('utf-8') + else: + bout=r['bin'] + + web_out({'http':http, 'type':'jpeg', 'bin':bout}) + + return + + #############################################################################################################3 + elif act=='get_result': + + data_id=ii['data_id'] + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + pp=r['path'] # Path to solution! + + meta=r['dict'] + + program_name = meta.get('workflow','') + ck_entry=program_name.split(':') + + # Find workflow output path + result_file=meta.get('result_file','') + workflow_repo=meta.get('workflow_repo_url','') + + j=workflow_repo.rfind('/') + if j>0: + workflow_repo=workflow_repo[j+1:] + + workflow_dir=os.path.join(pp, 'CK', workflow_repo, ck_entry[0], ck_entry[1]) + + if result_file!='': + pout=os.path.join(workflow_dir, result_file) + else: + pout=os.path.join(workflow_dir, "tmp","tmp-ck-timer.json") + + # if not os.path.isdir(pout): os.makedirs(pout) + + + # If Android-like device wait for the file ... + ppull=os.path.join(pp, 'support-script-pull.sh') + + # Waiting for output file + if not os.path.isfile(pout): + ck.out ('Waiting for output file: '+pout) + + while not os.path.isfile(pout): + # Check if need to pull + if os.path.isfile(ppull): + ck.out('Trying to pull from device ...') + + extra_cmd='cd "'+pout+'"\n' + extra_cmd+='export SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CR_SOLUTION_PATH="'+pp+'"\n' + extra_cmd+='export CODEREEF_SOLUTION_PATH="'+pp+'"\n' # Keeping for compatibility with older version + extra_cmd+='. "'+ppull+'" '+filename+'\n' + + r=solution.run({'uid':data_id, 'cmd':extra_cmd}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + time.sleep(0.1) + + ck.out('') + ck.out('Found solution!') + ck.out('') + + rx=ck.load_json_file({'json_file':pout}) + if rx['return']>0: return rx + + rx=ck.flatten_dict(rx) + if rx['return']>0: return rx + + rdf=rx['dict'] + crdf={} + + # Remove first ## (do not need here) + for k in rdf: + v=rdf[k] + if k.startswith('##'): k=k[2:] + crdf[k]=v + ck.out(json.dumps(crdf, indent=2)) + + # if os.path.isfile(pout): + # os.remove(pout) + + # if inum==1 and os.path.isfile(pf): + # ck.out(' REMOVING '+pf) + # os.remove(pf) + + ck.out('') + + s=json.dumps(crdf, indent=4, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + return + + elif act=='get_status': + data_id=ii['data_id'] + + # Find solution + r=ck.access({'action':'load', + 'module_uoa':'solution', + 'data_uoa':data_id}) + if r['return']>0: + # Process error properly + web_err({'http':http, 'type':xt, 'bin':r['error'].encode('utf8')}) + return + + pp=r['path'] # Path to solution! + tmp_solStatus=os.path.join(pp, "tmp", "status.json") + + rx=ck.load_json_file({'json_file':tmp_solStatus}) + if rx['return']>0: return rx + + if not get_status_started: + ck.out(json.dumps(rx, indent=2)) + + rdf=rx['dict'] + + if not get_status_started: + ck.out('') + + s=json.dumps(rdf, indent=4, sort_keys=True) + web_out({'http':http, 'type':'json', 'bin':s.encode('utf8')}) + + get_status_started=True + + return + + #############################################################################################################3 + elif act=='heartbit': + + locdir = os.path.dirname(os.path.realpath(__file__)) + if not heartbit_started: + ck.out(' Local directory: '+locdir) + + # Finding last file and incrementing + pf=os.path.join(locdir, 'static/favicon.ico') + + if not heartbit_started: + ck.out(' Loaded file '+pf) + + heartbit_started=True + + r=ck.load_text_file({'text_file':pf, 'keep_as_bin':'yes'}) + if r['return']>0: + bout=r['error'].encode('utf-8') + else: + bout=r['bin'] + + web_out({'http':http, 'type':'jpeg', 'bin':bout}) + + return + + + r={'return':0} + xt='web' + bout=b'TEST WORKS' + + web_out({'http':http, 'type':xt, 'bin':bout}) + return + + # Process output + if r['return']>0: + if os.path.isfile(fn): os.remove(fn) + + bout=r['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, + 'type':xt, + 'bin':bout}) + return + + # If json or web + # Try to load output file + if not os.path.isfile(fn): + web_err({'http':http, + 'type':xt, + 'bin':b'Output file was not created, see output ('+r['std'].encode('utf8')+b')!'}) + return + + r=ck.load_text_file({'text_file':fn, 'keep_as_bin':'yes'}) + if r['return']>0: + bout=r['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, 'type':xt, 'bin':bout}) + + return + + bin=r['bin'] + + # Process JSON output from file + fx='' + + if sys.version_info[0]>2: bin=bin.decode('utf-8') + + ru=ck.convert_json_str_to_dict({'str':bin, 'skip_quote_replacement':'yes'}) + if ru['return']>0: + bout=ru['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, 'type':xt, 'bin':bout}) + + return + + rr=ru['dict'] + if rr['return']>0: + bout=rr['error'] + + try: bout=bout.encode('utf-8') + except Exception as e: pass + + web_err({'http':http, 'type':xt, 'bin':bout}) + return + + # Check if file was returned + fr=False + + if 'file_content_base64' in rr and rr.get('filename','')!='': + fr=True + + # Check if download + if (xt=='web' and fr) or (act=='pull' and xt!='json'): + import base64 + x=rr.get('file_content_base64','') + + fx=rr.get('filename','') + if fx=='': fx=ck.cfg['default_archive_name'] + + # Fixing Python bug + if sys.version_info[0]==3 and sys.version_info[1]<3: + x=x.encode('utf-8') + else: + x=str(x) + bin=base64.urlsafe_b64decode(x) # convert from unicode to str since base64 works on strings + # should be safe in Python 2.x and 3.x + + # Process extension + fn1, fne = os.path.splitext(fx) + if fne.startswith('.'): fne=fne[1:] + if fne!='': xt=fne + else: xt='unknown' + else: + # Check and output html + if rr.get('html','')!='': + bin=rr['html'].encode('utf-8') + else: + if sys.version_info[0]>2: # Unknown output + bin=bin.encode('utf-8') + + web_out({'http':http, 'type':xt, 'bin':bin, 'filename':fx}) + + return {'return':0} + +############################################################################## +# Tunnel functionality + +def process_web_request_post_via_tunnel(i): + + http=i['http'] + post=(i.get('post','')=='yes') + + target_url=tunnel_url+http.path + + ck.out('* Tunneling **************************************************************') + + try: + + if post: + post_body = http.rfile.read(int(http.headers.get_all('content-length', 0)[0])) + + parsed_headers={} + for h in http.headers: + parsed_headers[h]=http.headers[h] + + if post: receive = requests.post(target_url, headers=parsed_headers, verify=False, data=post_body, ) + else: receive = requests.get (target_url, headers=parsed_headers, verify=False) + + http.send_response(receive.status_code) + + received_headers = receive.headers + for h in received_headers: + h1=h.lower() + if '-encoding' not in h1 and h1!='content-length': http.send_header(h, received_headers[h]) + + http.send_header('Content-Length', len(receive.content)) + http.end_headers() + + http.wfile.write(receive.content) + + except Exception as e: + print ('Error: '+format(e)) + http.send_error(500, 'problem accessing remote host') + + return + +############################################################################## +# Class to handle web service requests + +class server_handler(BaseHTTPRequestHandler): + + """ + Input: Python http handler + Output: None + """ + + # Process only GET + def do_GET(self): + if tunnel_url!='': process_web_request_post_via_tunnel({'http':self}) + else: process_web_request({'http':self}) + return + + # Process GET and POST + def do_POST(self): + if tunnel_url!='': process_web_request_post_via_tunnel({'http':self, 'post':'yes'}) + else: process_web_request({'http':self}) + return + + def log_request(self, code='-', size='-'): + self.log_message('"%s" %s %s', self.requestline, str(code), str(size)) + return + + def log_error(self, format, *args): + self.log_message(format, *args) + return + +########################################################################### +# Start web service + +def start(i): + global tunnel_url + + # Check tunnel URL + tunnel=i.get('tunnel','') + if tunnel!=None and tunnel!='': + tunnel_url=tunnel + + ck.out('All web requests will be tunneled to '+tunnel_url) + + host=i.get('host') + if host=='' or host==None: host='localhost' + + port=i.get('port') + if port=='' or port==None: port='4444' + + # Assemble URL. + url=host+':'+port + + ck.out('Starting web service for the client on '+url+' ...') + ck.out('') + + sys.stdout.flush() + + # We do not need secure HTTPS connection here since the user + # runs webbrowser on her/his machine and communicates with + # the CB service on the same machine via 127.0.0.1 + # while avoiding Internet! + + # Still it's possible to start this service with SSL + # but it will require a propoer SSL certificate + # otherwise the connection will not be validated + # if it's purely local ... + + # Get certificates for SSL + # ssl_certificate_file = {path to client.pem} + + # Generate it using "openssl req -new -x509 -keyout server.pem -out server.pem -days 365 -nodes" + + try: + server = ThreadedHTTPServer((host, int(port)), server_handler) + +# Needed for SSL connection (non-SSL connection will not work then) +# server.socket = ssl.wrap_socket (server.socket, server_side=True, +# certfile=ssl_certificate_file) + + # Prevent issues with socket reuse + server.allow_reuse_address=True + + server.serve_forever() + except KeyboardInterrupt: + ck.out('Keyboard interrupt, terminating web service ...') + server.socket.close() + return 1 + except OSError as e: + ck.out('Internal web service error ('+format(e)+')') + return 1 + + return 0 diff --git a/incubator/cbench/cbench/comm.py b/incubator/cbench/cbench/comm.py new file mode 100644 index 0000000000..57fdd3d072 --- /dev/null +++ b/incubator/cbench/cbench/comm.py @@ -0,0 +1,256 @@ +# +# Communication with the cK server +# Based on "perform_remote_action" function from the CK kernel +# +# Developer(s): Grigori Fursin, https://fursin.net +# + + +from . import config + +import ck.kernel as ck + +import json +import sys +import os + +############################################################################## +# Send JSON request to the cK portal + +def send(i): + """ + Input: { + action [str] - remote API action name + config [dict] - configuration for remote server + dict [dict] - dict to send to remote server + ownership [dict] - info about user ownership + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Import modules compatible with Python 2.x and 3.x + import urllib + + try: import urllib.request as urllib2 + except: import urllib2 + + try: from urllib.parse import urlencode + except: from urllib import urlencode + + # Get server and user config + config=i.get('config',{}) + + username=config.get('username','') +# if username=='' or username==None: +# return {'return':1, 'error':'Username is not defined'} + + api_key=config.get('api_key','') +# if api_key=='' or api_key==None: +# return {'return':1, 'error': 'API key is not defined'} + + url=config.get('server_url') + if url=='' or url==None: + return {'return':1, 'error': 'cK API URL is not defined'} + + remote_server_user=config.get('server_user') + if remote_server_user==None: remote_server_user='' + + remote_server_password=config.get('server_pass') + if remote_server_password==None: remote_server_password='' + + remote_skip_certificate_validation=config.get('server_skip_validation') + if remote_skip_certificate_validation==None: remote_skip_certificate_validation='' + + # Prepare dict to send to remote server + ii={} + ii['action']=i.get('action','') + ii['dict']=i.get('dict',{}) + ii['ownership']=i.get('ownership',{}) + ii['username']=username + ii['api_key']=api_key + + # Prepare post variables + r=ck.dumps_json({'dict':ii, 'skip_indent':'yes'}) + if r['return']>0: return r + + s=r['string'] + if sys.version_info[0]>2: s=s.encode('utf8') + + # Check if skip SSL certificate + ctx=None + add_ctx=False + + if remote_skip_certificate_validation=='yes': + + import ssl + + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + + add_ctx=True + + # If auth + auth=None + add_auth=False + + if remote_server_user!='' and remote_server_user!=None: + if remote_server_password==None: remote_server_password='' + + auth = urllib2.HTTPPasswordMgrWithDefaultRealm() + auth.add_password(None, url, remote_server_user, remote_server_password) + + add_auth=True + + # Prepare handler (TBD: maybe there is another, more elegant way?) + if add_auth and add_ctx: + urllib2.install_opener(urllib2.build_opener(urllib2.HTTPBasicAuthHandler(auth), urllib2.HTTPSHandler(context=ctx))) + elif add_auth: + urllib2.install_opener(urllib2.build_opener(urllib2.HTTPBasicAuthHandler(auth))) + elif add_ctx: + urllib2.install_opener(urllib2.build_opener(urllib2.HTTPSHandler(context=ctx))) + + # Prepare request + request = urllib2.Request(url, s, {'Content-Type': 'application/json'}) + + # Connect + try: + f=urllib2.urlopen(request) + except Exception as e: + return {'return':1, 'error':'Access to the cK portal failed ('+format(e)+')'} + + # Read from Internet + try: + s=f.read() + f.close() + except Exception as e: + return {'return':1, 'error':'Failed reading stream from the cK portal ('+format(e)+')'} + + # Check output + try: s=s.decode('utf8') + except Exception as e: pass + + # Try to convert output to dictionary + r=ck.convert_json_str_to_dict({'str':s, 'skip_quote_replacement':'yes'}) + if r['return']>0: + return {'return':1, 'error':'can\'t parse output from the cK portal ('+r['error']+'):\n'+s[:256]+'\n\n...)'} + + d=r['dict'] + + if 'return' in d: d['return']=int(d['return']) # Fix for some strange behavior when 'return' is not integer - should check why ... + else: + d['return']=99 + d['error']='repsonse doesn\'t follow the cK standard' + + return d + +############################################################################## +# Low-level access to cK portal + +def access(i): + + """ + Input: { + (filename) [str] - load JSON from this file + or + (json) [str] - parse JSON string from command line (use ' instead of ") + or + (dict) [dict] - dictionary to send to the cK API + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + import json + + filename=i.get('filename','') + json_string=i.get('json','') + + display=i.get('display','') + + data=i.get('dict',{}) + + if filename=='' and json_string=='' and len(data)==0: + return {'return':1, 'error':'either "filename" or "json" or "dict" should define data to be pushed to cK API'} + + if filename!='': + r=ck.load_json_file({'json_file':filename}) + if r['return']>0: return r + + data2=r['dict'] + data.update(data2) + + if json_string!='': + json_string=json_string.replace("'", '"') + + data2=json.loads(json_string) + + data.update(data2) + if display=='': + display=False + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Prepare request + ii={'config':cfg} + ii.update(data) + + # Sending request to download + r=send(ii) + if r['return']>0: return r + + if display is True: + ck.out('Output:') + ck.out('') + + ck.out(json.dumps(r, indent=2)) + + return r + +############################################################################## +# Send JSON request to cK portal + +def download_file(i): + """ + Input: { + url [str] - full URL for a file to download + file [dict] - file to save + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + file_size - size of retreived file + } + """ + + url=i['url'] + fn=i['file'] + + # Import modules compatible with Python 2.x and 3.x + import urllib + + try: from urllib.request import urlretrieve + except: from urllib import urlretrieve + + # Connect + try: + urlretrieve(url, fn) + except Exception as e: + return {'return':1, 'error':'download failed ('+format(e)+')'} + + statinfo = os.stat(fn) + file_size=statinfo.st_size + + return {'return':0, 'file_size':file_size} diff --git a/incubator/cbench/cbench/comm_min.py b/incubator/cbench/cbench/comm_min.py new file mode 100644 index 0000000000..661b8e36b6 --- /dev/null +++ b/incubator/cbench/cbench/comm_min.py @@ -0,0 +1,87 @@ +# +# Minimal communication with the cK server +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +import json +import sys +import os + +############################################################################## +# Send JSON request to the cK portal (without CK) + +def send(i): + """ + Input: { + action [str] - remote API action name + url [str] - URL + dict [dict] - dict to send to remote server + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Import modules compatible with Python 2.x and 3.x + import urllib + + try: import urllib.request as urllib2 + except: import urllib2 + + try: from urllib.parse import urlencode + except: from urllib import urlencode + + url=i.get('url') + if url=='' or url==None: + return {'return':1, 'error': 'cK API URL is not defined'} + + # Prepare dict to send to remote server + ii={} + ii['action']=i.get('action','') + ii['dict']=i.get('dict',{}) + + # Prepare post variables + try: + if sys.version_info[0]>2: + s=json.dumps(ii, ensure_ascii=False).encode('utf8') + else: + s=json.dumps(ii, ensure_ascii=False, encoding='utf8') + except Exception as e: + return {'return':1, 'error':'problem converting dict to json ('+format(e)+')'} + + # Prepare request + request = urllib2.Request(url, s, {'Content-Type': 'application/json'}) + + # Connect + try: + f=urllib2.urlopen(request) + except Exception as e: + return {'return':1, 'error':'Access to the cK portal failed ('+format(e)+')'} + + # Read from Internet + try: + s=f.read() + f.close() + except Exception as e: + return {'return':1, 'error':'Failed reading stream from the cK portal ('+format(e)+')'} + + # Check output + try: s=s.decode('utf8') + except Exception as e: pass + + # Try to convert output to dictionary + try: + d=json.loads(s, encoding='utf8') + except Exception as e: + return {'return':1, 'error':'problem converting text to json ('+format(e)+')'} + + if 'return' in d: d['return']=int(d['return']) # Fix for some strange behavior when 'return' is not integer - should check why ... + else: + d['return']=99 + d['error']='repsonse doesn\'t follow the cK standard' + + return d + diff --git a/incubator/cbench/cbench/config.py b/incubator/cbench/cbench/config.py new file mode 100644 index 0000000000..23437715b7 --- /dev/null +++ b/incubator/cbench/cbench/config.py @@ -0,0 +1,250 @@ +# +# Global configuration +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +# CK entry to keep client configuration info +CK_CFG_REPO_UOA="local" +CK_CFG_DATA_UOA="cbench" +CK_CFG_MODULE_UID="b34231a3467566f8" # ck info module:cfg + +CK_CFG_MODULE_REPO_UOA="befd7892b0d469e9" # CK module UOA for REPO + +CR_DEFAULT_SERVER="https://cKnowledge.io" +CR_DEFAULT_SERVER_URL=CR_DEFAULT_SERVER+"/api/v1/?" +CR_DEFAULT_SERVER_USER="crowd-user" +CR_DEFAULT_SERVER_API_KEY="43fa84787ff65c2c00bf740e3853c90da8081680fe1025e8314e260888265033" + +PACK_SIZE_WARNING=5000000 + +CR_WORK_DIR='CR' +CR_SOLUTIONS_DIR='solutions' + +CR_MODULE_UOA='solution' + +PACK_FILE='pack.zip' + +CR_ENV_USERNAME='CR_USER' +CR_ENV_API_KEY='CR_KEY' + +CR_LINE='**************************************************************************' + +CR_SOLUTION_CK_COMPONENTS=[ + {'cid':'module:device', 'version':'1.0.0'}, + {'cid':'module:env', 'version':'1.1.0'}, + {'cid':'module:machine', 'version':'1.0.0'}, + {'cid':'module:misc', 'version':'1.0.0'}, + {'cid':'module:os', 'version':'1.0.0'}, + {'cid':'module:package', 'version':'1.2.0'}, + {'cid':'module:platform*', 'version':'1.0.0'}, + {'cid':'module:script', 'version':'1.0.0'}, + {'cid':'module:soft', 'version':'1.2.0'}, + {'cid':'module:docker', 'version':'1.0.0'}, + {'cid':'module:event', 'version':'1.0.0'}, + {'cid':'module:lib', 'version':'1.0.0'}, + {'cid':'module:program', 'version':'1.0.3'}, + {'cid':'module:result', 'version':'1.0.0'}, + {'cid':'module:solution', 'version':'1.0.0'}, + {'cid':'os:*', 'version':'1.0.0'}, + {'cid':'platform.init:*', 'version':'1.0.0'}, + {'cid':'script:download-and-install-package', 'version':'1.0.0'}, + {'cid':'soft:compiler.python', 'version':'1.0.0'}, + {'cid':'soft:tool.adb', 'version':'1.0.0'}, +] + +import ck.kernel as ck + +bootstrapping=False + +############################################################################## +# Load client configuration + +def load(i): + """ + Input: { + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + dict [dict] - configuration dictionary + path [str] - path to CK cfg entry + } + """ + + global bootstrapping + + import os + + # Get current configuration + cfg={ + 'server_url':CR_DEFAULT_SERVER_URL # Default + } + path='' + + ii={'action':'load', + 'repo_uoa':CK_CFG_REPO_UOA, + 'module_uoa':CK_CFG_MODULE_UID, + 'data_uoa':CK_CFG_DATA_UOA} + + r=ck.access(ii) + if (r['return']>0 and r['return']!=16): return r + + if r['return']==0: + cfg=r['dict'] + path=r['path'] + + if not bootstrapping and (r['return']==16 or cfg.get('bootstrapped','')!='yes'): + rx=update({'cfg':cfg}) + if rx['return']>0: return rx + + # Check overriding by env + v=os.environ.get(CR_ENV_USERNAME,'') + if v!='': cfg['username']=v + v=os.environ.get(CR_ENV_API_KEY,'') + if v!='': cfg['api_key']=v + + return {'return':0, 'dict':cfg, 'path':path} + +############################################################################## +# Update CK modules and configuration + +def update(i): + """ + Input: { + (force) [bool] - if True, force update + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + import os + + global bootstrapping + bootstrapping=True + + force=i.get('force') + cfg=i.get('cfg',{}) + + from . import obj + + title='Bootstrapping' + if cfg.get('bootstrapped','')=='yes': title='Updating' + + ck.out(title+' cBench to support portable actions and workflows:') + ck.out('') + + # Check release notes + server_url=cfg.get('server_url','') + if server_url=='': server_url='https://cKnowledge.io/api/v1/?' + + from . import comm_min + r=comm_min.send({'url':server_url, + 'action':'event', + 'dict':{'type':'get-cbench-bootstrap-notes'}}) + + notes=r.get('notes','') + if notes!='': + ck.out('***********************************************') + ck.out(notes) + ck.out('***********************************************') + + lst_all=[] + + sbf=os.environ.get('CB_SAVE_BOOTSTRAP_FILES','') + + if sbf=='': + fboot='cb-bootstrap-20200529' + files=[fboot+'.json'] + + if os.name=='nt': + files.append(fboot+'-win.json') + + for fn in files: + r=ck.gen_tmp_file({'prefix':'cb-bootstrap-', 'suffix':'.json'}) + if r['return']>0: return r + ftmp=r['file_name'] + + burl=CR_DEFAULT_SERVER+'/static/bootstrap/'+fn + + ck.out('Downloading '+burl) + + from . import comm + + rx=comm.download_file({'url':burl, 'file':ftmp}) + if rx['return']>0: return rx + + rx=ck.load_json_file({'json_file':ftmp}) + if rx['return']>0: return rx + + lst_all+=rx['dict'] + + os.remove(ftmp) + + r=obj.download({'components':lst_all, 'force':force}) + if r['return']>0 and r['return']!=8: return r + + else: + for x in CR_SOLUTION_CK_COMPONENTS: + r=obj.download({'cid':x['cid'], 'version':x.get('version',''), 'force':force}) + if r['return']>0: + if r['return']!=8: return r + else: ck.out(' Skipped - already exists!') + else: + lst_all+=r['components'] + + rx=ck.save_json_to_file({'json_file':sbf, 'dict':lst_all, 'sort_keys':'yes'}) + if rx['return']>0: return rx + + ck.out('') + + # Update cfg + cfg['bootstrapped']='yes' + + ii={'action':'update', + 'repo_uoa':CK_CFG_REPO_UOA, + 'module_uoa':CK_CFG_MODULE_UID, + 'data_uoa':CK_CFG_DATA_UOA, + 'dict':cfg, + 'sort_keys':'yes'} + + r=ck.access(ii) + + ck.out(title+' finished!') + ck.out('') + + return r + +############################################################################## +# Get path to work directory in a USER space + +def get_work_dir(i): + """ + Input: { + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + path [str] - path to work dir + } + """ + + import os + + # Get home user directory + from os.path import expanduser + home = expanduser("~") + + work_dir=os.path.join(home, CR_WORK_DIR) + if not os.path.isdir(work_dir): + os.makedirs(work_dir) + + return {'return':0, 'path':work_dir} diff --git a/incubator/cbench/cbench/graph.py b/incubator/cbench/cbench/graph.py new file mode 100644 index 0000000000..13a4363226 --- /dev/null +++ b/incubator/cbench/cbench/graph.py @@ -0,0 +1,359 @@ +# +# Support for graphs +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +from . import config +from . import comm +from . import obj + +import ck.kernel as ck + +import json +import os +import copy + +meta_template={ + "meta": { + "scenario": "universal", + "scenario_uid": "3bf7371412455a8f", + "viz_engine": "ck_beta" + }, + "tags": [ + "result" + ] + } + +desc_template={ + "data_config": { + "default_key_x": "x", + "default_key_y": "y", + "default_sort_key": "x", + "table_view": [ + { + "key": "x", + "name": "X", + "type": "int" + }, + { + "key": "y", + "name": "Y", + "format": "%.2f", + "type": "float" + }, + { + "key": "submitter", + "name": "Submitter" + } + ] + } +} + +extra_info_desc=[{'key':'copyright', 'name':'copyright (optional)'}, + {'key':'license', 'name':'license (optional)'}, + {'key':'author', 'name':'author (optional)'}, + {'key':'author_email', 'name':'author email (optional)'}, + {'key':'author_webpage', 'name':'author webpage (optional)'}] + +############################################################################## +# Initialize a graph on a portal + +def init(i): + + """ + Input: { + uid [str] - graph identifyer + (version) [str] - graph version + (desc_file) [str] - file with graph description + (tags) [str] - tags separated by comma + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + dict [dict] - configuration dictionary + path [str] - path to CK cfg entry + } + """ + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + # CID ########################################################### + uid=i['uid'] + if uid==None: uid='' + + version=i.get('version') + if version==None: version='' + + desc_file=i.get('desc_file','') + if desc_file==None: desc_file='' + + # If UID!='', check if already exists ... + found=False + meta=meta_template + path='' + data_name='' + tags=[] + meta_info='' + source='' + extra_info={} + + if uid!='': + r=ck.access({'action':'load', + 'module_uoa':'result', + 'data_uoa':uid}) + if r['return']>0: + if r['return']!=16: return r + else: + found=True + meta=r['dict'] + path=r['path'] + data_name=r['data_name'] + + tags=meta.get('tags',[]) + source=meta.get('source','') + meta_info=meta.get('meta',{}).get('info','') + + extra_info=r['info'].get('control',{}) + + # Check if init from scratch and no title + if i.get('name')!=None and i.get('name','')!='': + data_name=i['name'].strip() + elif not found or data_name=='': + r=ck.inp({'text':'Select a title for your graph: '}) + if r['return']>0: return r + + data_name=r['string'].strip() + + meta['meta']['title']=data_name + + # Check if init from scratch and no title + if not found or meta_info=='': + r=ck.inp({'text':'Enter general info about your graph: '}) + if r['return']>0: return r + + x=r['string'].strip() + + if x=='': x=' ' + + meta['meta']['info']=x + + # Adding tags + if i.get('tags')!=None and i.get('tags','')!='': + xtags=i['tags'].strip().split(',') + + for t in xtags: + t1=t.strip() + if t1!='' and t1 not in tags: + tags.append(t1) + + meta['tags']=tags + + elif not found or (len(tags)==1 and 'result' in tags): + r=ck.inp({'text':'Enter tags for your graph separated by commas: '}) + if r['return']>0: return r + + xtags=r['string'].strip().split(',') + + for t in xtags: + t1=t.strip() + if t1!='' and t1 not in tags: + tags.append(t1) + + meta['tags']=tags + + # Checking source + if not found or source=='': + r=ck.inp({'text':'Enter source of results for your graph (can be URL): '}) + if r['return']>0: return r + + source=r['string'].strip() + + meta['source']=source + + # Checking authors + for x in extra_info_desc: + k=x['key'] + n=x['name'] + + if not found or extra_info.get(k,'')=='': + r=ck.inp({'text':'Enter '+n+': '}) + if r['return']>0: return r + + s=r['string'].strip() + + extra_info[k]=s + + # Creating/updating graph + a='add' + if found: a='update' + + ii={'action':a, + 'module_uoa':'result', + 'data_uoa':uid, + 'dict':meta, + 'sort_keys':'yes', + 'data_name':data_name, + 'substitute':'yes', + 'extra_info':extra_info} + + r=ck.access(ii) + if r['return']>0: return r + + data_uoa=r['data_uoa'] + data_uid=r['data_uid'] + path=r['path'] + + x='initialized' + if found: x='updated' + + ck.out('Graph was successfully '+x+':') + ck.out('') + ck.out(' CK UID: '+data_uid) + ck.out(' CK name: '+data_uoa) + ck.out(' CK path: '+path) + + # Add desc + p1=os.path.join(path, 'desc.json') + + dt=copy.deepcopy(desc_template) + if desc_file!='': + rx=ck.load_json_file({'json_file':desc_file}) + if rx['return']>0: return rx + dx=rx['dict'] + dt['data_config'].update(dx) + + if desc_file!='' or not os.path.isfile(p1): + rx=ck.save_json_to_file({'json_file':p1, 'dict':dt, 'sort_keys':'yes'}) + if rx['return']>0: return rx + + p2=os.path.join(path, '.cm', 'meta.json') + + ck.out('') + ck.out('You can continue updating graph using following files: ') + ck.out('') + ck.out(' Graph general meta info: '+p1) + ck.out(' See example at '+config.CR_DEFAULT_SERVER+'/result/sota-mlperf-inference-results-v0.5-open-available/?action=download&filename=.cm/meta.json') + ck.out('') + ck.out(' Graph axes info: '+p2) + ck.out(' See example at '+config.CR_DEFAULT_SERVER+'/result/sota-mlperf-inference-results-v0.5-open-available/?action=download&filename=desc.json') + + # Need to publish + ck.out('') + rx=ck.inp({'text':'Publish graph on the portal (Y/n)?'}) + if rx['return']>0: return rx + s=rx['string'].strip().lower() + + if s=='' or s=='y': + ck.out('') + r=obj.publish({'cid':'result:'+data_uoa, + 'version':version, + 'force':True}) + + else: + ck.out('') + ck.out('You can publish your graph on the portal using the following commands when ready: ') + ck.out('') + ck.out(' cb publish result:'+data_uoa+' --version=1.0.0 --force (--private)') + + return r + +############################################################################## +# Push result to a graph on a portal + +def push(i): + + """ + Input: { + uid [str] - graph identifyer + (version) [str] - graph version + (filename) [str] - JSON file with results + (json) [str] - JSON string from command line (use ' instead of ") + (point) [str] - specific point name to add/update + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + + dict [dict] - configuration dictionary + path [str] - path to CK cfg entry + } + """ + + # CID ########################################################### + uid=i['uid'] + if uid=='': + return {'return':1, 'error':'graph UID is not defined!'} + + version=i.get('version') + if version==None: version='' + + filename=i.get('filename','') + json_string=i.get('json','') + + if filename=='' and json_string=='': + return {'return':1, 'error':'either "filename" or "json" should define results to be pushed'} + + point=i.get('point','') + + # Prepare data + data=[] + + if filename!='': + r=ck.load_json_file({'json_file':filename}) + if r['return']>0: return r + + data2=r['dict'] + if type(data2)==dict: + data2=[data2] + + data+=data2 + + if json_string!='': + import json + + json_string=json_string.replace("'", '"') + + data2=json.loads(json_string) + + if type(data2)==dict: + data2=[data2] + + data+=data2 + + # Send request + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Check if username and API_Key are empty and then use default crowd-user ... + username=cfg.get('username','') + if username=='' or username==None: + cfg['username']=config.CR_DEFAULT_SERVER_USER + cfg['api_key']=config.CR_DEFAULT_SERVER_API_KEY + + # Sending request to download + r=comm.send({'config':cfg, + 'action':'push_result', + 'dict':{ + 'data_uoa':uid, + 'version':version, + 'point':point, + 'data':data + } + }) + if r['return']>0: return r + url=r.get('url','') + + ck.out(' Successfully pushed your point to a graph!') + if url!='': + ck.out(' URL: '+url) + + return r diff --git a/incubator/cbench/cbench/main.py b/incubator/cbench/cbench/main.py new file mode 100644 index 0000000000..4249af83e2 --- /dev/null +++ b/incubator/cbench/cbench/main.py @@ -0,0 +1,602 @@ +# +# CMD parser +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +import click +import ck.kernel as ck + +############################################################################## +@click.group() +def cli(): + return 0 + +############################################################################## +def process_error(r): + + e=r.get('error','') + if e!='': + r['error']=e + + ck.err(r) + # Should not reach here since ck.err exits program + return + +# SETUP CLIENT ############################################################################# +@cli.command() + +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) +@click.option('-s', '--server_url', 'server_url', required=False) +@click.option('-su', '--server_user', 'server_user', required=False) +@click.option('-sp', '--server_pass', 'server_pass', required=False) +@click.option('-ss', '--server_skip_validation', 'server_skip_validation', required=False) + +def setup(username, + api_key, + server_url, + server_user, + server_pass, + server_skip_validation): + ''' + Setup client. + ''' + from . import setup + return setup.setup({'username':username, + 'api_key':api_key, + 'server_url':server_url, + 'server_user':server_user, + 'server_pass':server_pass, + 'server_skip_validation':server_skip_validation}) + +# LOGIN TEST ############################################################################# +@cli.command() + +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) +@click.option('-s', '--server_url', 'server_url', required=False) +@click.option('-su', '--server_user', 'server_user', required=False) +@click.option('-sp', '--server_pass', 'server_pass', required=False) +@click.option('-ss', '--server_skip_validation', 'server_skip_validation', required=False) + +def login(username, + api_key, + server_url, + server_user, + server_pass, + server_skip_validation): + ''' + Test login to the portal. + ''' + from . import setup + return setup.login({'username':username, + 'api_key':api_key, + 'server_url':server_url, + 'server_user':server_user, + 'server_pass':server_pass, + 'server_skip_validation':server_skip_validation}) + + return 0 + +# PUBLISH COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +@click.option('-t', '--tags', 'tags', required=False, default='') +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) +@click.option('--quiet', 'quiet', required=False, is_flag=True) +@click.option('--force', 'force', required=False, is_flag=True) +@click.option('--private', is_flag=True) +@click.option('-w', '--workspaces', 'workspaces', required=False) +@click.option('-v', '--version', 'version', required=False) +@click.option('--author', 'author', required=False) +@click.option('--author_id', 'author_id', required=False) +@click.option('--copyright', 'copyright', required=False) +@click.option('--license', 'license', required=False) +@click.option('--source', 'source', required=False) +@click.option('--permanent', is_flag=True) +@click.option('-et', '--extra_tags', 'extra_tags', required=False, default='') + +def publish(cid, + permanent, + tags, + extra_tags, + username, + api_key, + force, + quiet, + private, + workspaces, + version, + author, + author_id, + copyright, + license, + source): + ''' + Publish CK component to the portal. + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.publish({'cid':cid, + 'permanent':permanent, + 'tags':tags, + 'username':username, + 'api_key':api_key, + 'quiet':quiet, + 'force':force, + 'private':private, + 'workspaces':workspaces, + 'version':version, + 'author':author, + 'author_id':author_id, + 'copyright':copyright, + 'license':license, + 'source':source, + 'extra_tags':extra_tags}) + + if r['return']>0: process_error(r) + return 0 + +# Delete COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +@click.option('-u', '--username', 'username', required=False) +@click.option('-a', '--api_key', 'api_key', required=False) + +def delete(cid, + username, + api_key): + ''' + Delete CK component from the portal if not permanent! + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.delete({'cid':cid, + 'username':username, + 'api_key':api_key}) + + if r['return']>0: process_error(r) + return 0 + +# LIST VERSIONS OF A GIVEN COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +def versions(cid): + ''' + List versions of a given component at the portal. + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.versions({'cid':cid}) + + if r['return']>0: process_error(r) + return 0 + +# OPEN PORTAL WITH A GIVEN COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +def open(cid): + ''' + Open portal web page with a given component + + CID: CK identifier ({repo UOA}:){module UOA}:{data UOA}. + ''' + from . import obj + r=obj.open_page({'cid':cid}) + + if r['return']>0: process_error(r) + return 0 + +# DOWNLOAD COMPONENT ############################################################################# +@cli.command() + +@click.argument('cid') + +@click.option('-v', '--version', 'version', required=False) +@click.option('-f', '--force', 'force', required=False, is_flag=True) +@click.option('-t', '--tags', 'tags', required=False, default='') +@click.option('-a', '--all', 'all', required=False, is_flag=True) + +def download(cid, + version, + force, + tags, + all): + ''' + Download CK component from the portal. + + CID: CK identifier {module UOA}:{data UOA}. + ''' + from . import obj + r=obj.download({'cid':cid, + 'version':version, + 'force':force, + 'tags':tags, + 'all':all}) + + if r['return']>0: process_error(r) + return 0 + +# BOOSTRAP ############################################################################# +@cli.command() + +@click.option('-f', '--force', 'force', required=False, is_flag=True) + +def update(force): + ''' + Update/bootstrap cK components. + ''' + + from . import config + r=config.update({'force':force}) + + if r['return']>0: process_error(r) + return 0 + +# INIT GRAPH ############################################################################# +@cli.command() + +@click.argument('uid', required=False) + +@click.option('-v', '--version', 'version', required=False) +@click.option('-d', '--desc_file', 'desc_file', required=False) +@click.option('-t', '--tags', 'tags', required=False) +@click.option('-n', '--name', 'name', required=False) + +def init_graph(uid, + version, + desc_file, + tags, + name): + ''' + Init graph at the portal. + + UID: portal graph identifier. + ''' + from . import graph + r=graph.init({'uid':uid, + 'version':version, + 'desc_file':desc_file, + 'tags':tags, + 'name':name}) + + if r['return']>0: process_error(r) + return 0 + +# PUSH RESULT ############################################################################# +@cli.command() + +@click.argument('uid', required=True) + +@click.option('-v', '--version', 'version', required=False, default='') +@click.option('-f', '--filename', 'filename', required=False, default='') +@click.option('-j', '--json', 'json_string', required=False, default='') +@click.option('-p', '--point', 'point', required=False, default='') + +def push_result(uid, + version, + filename, + json_string, + point): + ''' + Push result to a graph at the portal. + + UID: portal graph identifier. + ''' + + from . import graph + r=graph.push({'uid':uid, + 'version':version, + 'filename':filename, + 'json':json_string, + 'point':point}) + + if r['return']>0: process_error(r) + return 0 + +# ACCESS API ############################################################################# +@cli.command() + +@click.option('-f', '--filename', 'filename', required=False, default='') +@click.option('-j', '--json', 'json_string', required=False, default='') +@click.option('-m', '--mute', 'display', is_flag=True, default=True) + + +def access(filename, + json_string, + display): + ''' + Access Portal via JSON API. + ''' + from . import comm + r=comm.access({'filename':filename, + 'json':json_string, + 'display': display}) + + if r['return']>0: process_error(r) + return 0 + +# INIT SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid', required=False) + +@click.option('-u', '--username', 'username', required=False, default='') +@click.option('-a', '--api_key', 'api_key', required=False, default='') +@click.option('-n', '--name', 'name', required=False, default='') +@click.option('-t', '--tags', 'tags', required=False, default='') +@click.option('-pp', '--python_path', required=False, default='') +@click.option('-pv', '--python_version', required=False, default='') +@click.option('-pvf', '--python_version_from', required=False, default='') +@click.option('-pvt', '--python_version_to', required=False, default='') +@click.option('-pl', '--python_localenv', 'python_localenv', is_flag=True, default=True) +@click.option('-ho', '--host_os', 'host_os', required=False, default='') +@click.option('-to', '--target_os', 'target_os', required=False, default='') +@click.option('-di', '--device_id', 'device_id', required=False, default='') +@click.option('-h', '--hostname', 'hostname', required=False, default='') +@click.option('-w', '--workflow', 'workflow', required=False, default='') +@click.option('-wr', '--workflow_repo_url', 'workflow_repo_url', required=False, default='') +@click.option('-wcb', '--workflow_cmd_before', 'workflow_cmd_before', required=False, default='') +@click.option('-wca', '--workflow_cmd_after', 'workflow_cmd_after', required=False, default='') +@click.option('-wc', '--workflow_cmd', 'workflow_cmd', required=False, default='') +@click.option('-wce', '--workflow_cmd_extra', 'workflow_cmd_extra', required=False, default='') +@click.option('-wi', '--workflow_input', 'workflow_input', required=False, default='') # Input source (stream, webcam, etc) +@click.option('-wid', '--workflow_input_dir', 'workflow_input_dir', required=False, default='') # Input directory (will be cleaned) +@click.option('-wod', '--workflow_output_dir', 'workflow_output_dir', required=False, default='') # Output directory (will be cleaned) +@click.option('-d', '--desc_prereq', 'desc_prereq', required=False, default='') +@click.option('-dp', '--desc_prepare', 'desc_prepare', required=False, default='') +@click.option('-dr', '--desc_run', 'desc_run', required=False, default='') +@click.option('-s', '--add_extra_scripts', 'add_extra_scripts', required=False, default='') +@click.option('-e', '--add_extra_meta_from_file', 'add_extra_meta_from_file', required=False, default='') +@click.option('-rf', '--result_file', 'result_file', required=False, default='') +@click.option('--update_meta_and_stop', 'update_meta_and_stop', is_flag=True, default=False) +@click.option('--skip_graph_init', 'skip_graph_init', is_flag=True, default=False) +@click.option('-r', '--resume', 'resume', is_flag=True, default=False) +@click.option('-ss', '--skip_stop', 'skip_stop', is_flag=True, default=False) +@click.option('-g', '--graphs', 'graphs', required=False, default='') +@click.option('-dg', '--desc_graph', 'desc_graph', required=False, default='') +@click.option('-gc', '--graph_convertor', 'graph_convertor', required=False, default='') + +def init(uid, + username, + api_key, + name, + tags, + python_path, + python_version, + python_version_from, + python_version_to, + python_localenv, + host_os, + target_os, + device_id, + hostname, + workflow, + workflow_repo_url, + workflow_cmd_before, + workflow_cmd_after, + workflow_cmd, + workflow_cmd_extra, + workflow_input, + workflow_input_dir, + workflow_output_dir, + desc_prereq, + desc_prepare, + desc_run, + add_extra_scripts, + add_extra_meta_from_file, + result_file, + update_meta_and_stop, + skip_graph_init, + resume, + skip_stop, + graphs, + desc_graph, + graph_convertor): + ''' + Init portable solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.init({'uid':uid, + 'username':username, + 'api_key':api_key, + 'name':name, + 'tags':tags, + 'python_path':python_path, + 'python_version':python_version, + 'python_version_from':python_version_from, + 'python_version_to':python_version_to, + 'python_localenv':python_localenv, + 'host_os':host_os, + 'target_os':target_os, + 'device_id':device_id, + 'hostname':hostname, + 'workflow_repo_url':workflow_repo_url, + 'workflow':workflow, + 'workflow_cmd_before':workflow_cmd_before, + 'workflow_cmd_after':workflow_cmd_after, + 'workflow_cmd':workflow_cmd, + 'workflow_cmd_extra':workflow_cmd_extra, + 'workflow_input':workflow_input, + 'workflow_input_dir':workflow_input_dir, + 'workflow_output_dir':workflow_output_dir, + 'desc_prereq':desc_prereq, + 'desc_prepare':desc_prepare, + 'desc_run':desc_run, + 'add_extra_meta_from_file':add_extra_meta_from_file, + 'result_file':result_file, + 'add_extra_scripts':add_extra_scripts, + 'update_meta_and_stop':update_meta_and_stop, + 'skip_graph_init':skip_graph_init, + 'resume':resume, + 'skip_stop':skip_stop, + 'graphs':graphs, + 'desc_graph':desc_graph, + 'graph_convertor':graph_convertor}) + + if r['return']>0: process_error(r) + return 0 + +# ACTIVATE SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +def activate(uid): + ''' + Activate virtual environment from the prepared solution. + + UID - solution identifier. + ''' + from . import solution + r=solution.activate({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# RUN SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +@click.option('-c', '--cmd', 'cmd', required=False, default='') + +def benchmark(uid, + cmd): + ''' + Benchmark solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.benchmark({'uid':uid, + 'cmd':cmd}) + + if r['return']>0: process_error(r) + return 0 + +# RUN SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +@click.option('-c', '--cmd', 'cmd', required=False, default='') + +def run(uid, + cmd): + ''' + Run portable solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.run({'uid':uid, + 'cmd':cmd}) + + if r['return']>0: process_error(r) + return 0 + +# LIST SOLUTIONS ############################################################################# +@cli.command() + +@click.argument('uid', required=False) + +def ls(uid): + ''' + List portable solutions. + + UID: solution identifier (can use wildcards).. + ''' + from . import solution + r=solution.ls({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# FIND SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +def find(uid): + ''' + Find portable solution. + + UID: solution identifier. + ''' + from . import solution + r=solution.find({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# DELETE SOLUTION ############################################################################# +@cli.command() + +@click.argument('uid') + +def rm(uid): + ''' + Delete portable solution. + + UID: solution identifier (can use wildcards). + ''' + from . import solution + r=solution.rm({'uid':uid}) + + if r['return']>0: process_error(r) + return 0 + +# START SERVICE TO COMMUNICATE WITH THE PORTAL ############################################################################# +@cli.command() + +@click.option('-h', '--host', 'host', required=False) +@click.option('-p', '--port', 'port', required=False) +@click.option('-t', '--tunnel', 'tunnel', required=False) + + +def start(host, + port, + tunnel): + ''' + Start server. + ''' + + from . import client + return client.start({'host':host, + 'port':port, + 'tunnel':tunnel}) + +# START SERVICE TO COMMUNICATE WITH PORTAL ############################################################################# +@cli.command() + +def version(): + ''' + Show client version. + ''' + + from . import __version__ + + print (__version__) + + return 0 + +############################################################################## +if __name__ == "__main__": + cli() diff --git a/incubator/cbench/cbench/obj.py b/incubator/cbench/cbench/obj.py new file mode 100644 index 0000000000..873831fd7c --- /dev/null +++ b/incubator/cbench/cbench/obj.py @@ -0,0 +1,840 @@ +# +# Support for components +# +# Developer(s): Grigori Fursin, https://fursin.net +# + +from . import config +from . import comm + +import ck.kernel as ck + +import json +import zipfile +import os +import time + +skip_words_in_files=[ + 'tmp', + '.git', + '.pyc', + '__pycache__', + '.cache' +] + + +############################################################################## +# Delete CK component from the portal if not permanent + +def delete(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current directory (since will be changing it to get info about Git repo) + cur_dir=os.getcwd() + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Check commands + # Username ########################################################## + username=cfg.get('username','') + if i.get('username')!=None: username=i['username'] + + if username=='' or username==None: + return {'return':1, 'error':'Username is not defined'} + + cfg['username']=username + + # API key ########################################################### + api_key=cfg.get('api_key','') + + if i.get('api_key')!=None: api_key=i['api_key'] + + if api_key=='' or api_key==None: + return {'return':1, 'error':'API key is not defined'} + + cfg['api_key']=api_key + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + + # Sending request to download + r=comm.send({'config':cfg, + 'action':'delete', + 'dict':{ + 'cid':cid + } + }) + if r['return']>0: return r + + ck.out(' Successfully deleted component(s) from the portal!') + + return {'return':0} + +############################################################################## +# Publish CK component to the portal + +def publish(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + (can use wildcards) + (tags) [str] - search multiple CK components by these tags separated by comma + (version) [str] - assign version + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current directory (since will be changing it to get info about Git repo) + cur_dir=os.getcwd() + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Check commands + # Username ########################################################## + username=cfg.get('username','') + if i.get('username')!=None: username=i['username'] + + if username=='' or username==None: + return {'return':1, 'error':'Username is not defined'} + + cfg['username']=username + + # API key ########################################################### + api_key=cfg.get('api_key','') + + if i.get('api_key')!=None: api_key=i['api_key'] + + if api_key=='' or api_key==None: + return {'return':1, 'error':'API key is not defined'} + + cfg['api_key']=api_key + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + tags=i.get('tags','') + + # Check if no module and use "solution" by default + if cid.find(':')<0: + cid='solution:'+cid + + # Version ########################################################### + version=i.get('version') + if version=='' or version==None: + version='1.0.0' + ck.out('Since --version is not defined, we use "1.0.0"') + + # Extra info about authors + author=i.get('author','') + if author==None: author='' + + author_id=i.get('author_id','') + if author_id==None: author_id='' + + copyright=i.get('copyright','') + if copyright==None: copyright='' + + license=i.get('license','') + if license==None: license='' + + source=i.get('source','') + if source==None: source='' + + sextra_tags=i.get('extra_tags','') + if sextra_tags==None: sextra_tags='' + + quiet=i.get('quiet',False) + force=i.get('force',False) + permanent=i.get('permanent',False) + + # List CK components + r=ck.access({'action':'search', + 'cid':cid, + 'tags':tags, + 'add_info':'yes', + 'add_meta':'yes', + 'common_func':'yes'}) + if r['return']>0: return r + + lst=r['lst'] + llst=len(lst) + + if llst==0: + ck.out('No CK objects found') + + num=0 + + # Sort lst by modules and then data + lst1=sorted(lst, key=lambda x: (x.get('repo_uoa',''), x.get('module_uoa',''), x.get('data_uoa',''))) + + for obj in lst1: + num+=1 + + # Basic info about CK object + repo_uoa=obj['repo_uoa'] + repo_uid=obj['repo_uid'] + + module_uoa=obj['module_uoa'] + module_uid=obj['module_uid'] + + data_uoa=obj['data_uoa'] + data_uid=obj['data_uid'] + + # Print info + ck.out(str(num)+' out of '+str(llst)+') '+repo_uoa+':'+module_uoa+':'+data_uoa) + + # Check name and date + data_name=obj.get('info',{}).get('data_name','') + if data_name==data_uoa: data_name='' + + data_meta=obj['meta'] + if data_name=='': + if data_meta.get('misc',{}).get('title','')!='': + data_name=data_meta['misc']['title'] + + data_date='' + if data_meta.get('misc',{}).get('date','')!='': + data_date=data_meta['misc']['date'] + + source2=data_meta.get('source','') + if source2=='': source2=source + + license2=data_meta.get('license','') + if license2=='': license2=license + + copyright2=data_meta.get('copyright','') + if copyright2=='': copyright2=copyright + + # Specialize per specific modules + not_digital_component=False + extra_dict={} + extra_tags=[] + + if module_uoa=='module': + extra_dict['last_module_actions']=[] + actions=data_meta.get('actions',{}) + for a in actions: + extra_dict['last_module_actions'].append(a+' '+data_uoa) + + elif module_uoa=='lib': + not_digital_component=True + extra_tags=['library'] + + if 'reproduced-papers' in data_meta.get('tags',[]): + extra_tags.append('reproduced-papers') + + data_meta2=data_meta.get('meta',{}) + + if data_name=='': + data_name=data_meta2.get('title','') + + all_authors=data_meta2.get('authors','') + if all_authors!='': + extra_dict['all_authors']=[] + for aa in all_authors.split(','): + if aa!='': aa=aa.strip() + if aa!='': + extra_dict['all_authors'].append(aa) + + for k in ['badge_acm_artifact_available', 'badge_acm_artifact_functional', + 'badge_acm_artifact_reusable', 'badge_acm_results_replicated', + 'badge_acm_results_reproduced']: + if data_meta2.get(k,'')=='yes': + extra_tags.append(k) + + elif module_uoa=='event' or module_uoa=='repo': + not_digital_component=True + + # Get info of the first creation + first_creation=obj['info'].get('control',{}) + + # Load info about repo + repo_dict={} + + if not force and repo_uoa=='local' and module_uoa!='repo': # Normally skip everything from local unless we publish repos themselves + ck.out(' SKIPPED') + continue + + if module_uoa=='repo': + if not force and data_uoa=='local': + ck.out(' SKIPPED') + continue + + repo_dict=obj['meta'] + + elif repo_uoa!='default' and repo_uoa!='local': + r=ck.access({'action':'load', + 'repo_uoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_REPO_UOA, + 'data_uoa':repo_uid, + 'common_func':'yes'}) + if r['return']>0: return r + repo_dict=r['dict'] + if 'path' in repo_dict: + del(repo_dict['path']) + + # Generate temp file to pack + r=ck.gen_tmp_file({'prefix':'obj-', 'suffix':'.zip'}) + if r['return']>0: return r + + fn=r['file_name'] + + # Pack component + p=obj['path'] + + zip_method=zipfile.ZIP_DEFLATED + + ii={'path':p, 'all':'yes'} + + # Prune files for solution + if module_uoa=='solution': + ii['ignore_names']=['CK','venv'] + + r=ck.list_all_files(ii) + if r['return']>0: return r + + fl=r['list'] + + # Write archive + try: + f=open(fn, 'wb') + z=zipfile.ZipFile(f, 'w', zip_method) + for fx in fl: + add=True + for k in skip_words_in_files: + if k in fx: + add=False + break + + if add: + p1=os.path.join(p, fx) + z.write(p1, fx, zip_method) + z.close() + f.close() + + except Exception as e: + return {'return':1, 'error':'failed to prepare archive ('+format(e)+')'} + + # Check size + statinfo = os.stat(fn) + pack_size=statinfo.st_size + + # Check problems with repository or components + x='' + if repo_dict.get('remote','')=='yes': + x+='remote repo;' + if repo_dict.get('private','')=='yes': + x+='private repo;' + if repo_dict.get('url','')=='' and repo_uoa!='default': + x+='repo not shared;' + if pack_size>config.PACK_SIZE_WARNING: + x+='pack size ('+str(pack_size)+') > '+str(config.PACK_SIZE_WARNING)+';' + + skip_component=False + if not force and x!='': + if quiet: + skip_component=True + else: + r=ck.inp({'text':' This component has potential issues ('+x+'). Skip processing (Y/n)? '}) + if r['return']>0: return r + s=r['string'].strip() + if s=='' or s=='Y' or s=='y': + skip_component=True + + if skip_component: + ck.out(' SKIPPED ('+x+')') + + if os.path.isfile(fn): + os.remove(fn) + + continue + + # Convert to MIME to send over internet + r=ck.convert_file_to_upload_string({'filename':fn}) + if r['return']>0: return r + + pack64=r['file_content_base64'] + + if os.path.isfile(fn): + os.remove(fn) + + # Check workspaces + lworkspaces=[] + workspaces=i.get('workspaces','') + if workspaces!=None: + lworkspaces=workspaces.strip().split(',') + + # Get extra info about repo + os.chdir(p) + + repo_info={} + + if repo_dict.get('private','')!='yes': + repo_info={'publish_repo_uoa':repo_uoa, + 'publish_repo_uid':repo_uid} + + # Get current Git URL + r=ck.run_and_get_stdout({'cmd':['git','config','--get','remote.origin.url']}) + if r['return']==0 and r['return_code']==0: + x=r['stdout'].strip() + if x!='': repo_info['remote_git_url']=x + + # Get current Git branch + r=ck.run_and_get_stdout({'cmd':['git','rev-parse','--abbrev-ref','HEAD']}) + if r['return']==0 and r['return_code']==0: + x=r['stdout'].strip() + if x!='': repo_info['remote_git_branch']=x + + # Get current Git checkout + r=ck.run_and_get_stdout({'cmd':['git','rev-parse','--short','HEAD']}) + if r['return']==0 and r['return_code']==0: + x=r['stdout'].strip() + if x!='': repo_info['remote_git_checkout']=x + + repo_info['dict']=repo_dict + + # Add extra tags + for et in sextra_tags.split(','): + et=et.strip().lower() + if et!='': + extra_tags.append(et) + + #TBD: owner, version, info about repo + # Sending request + r=comm.send({'config':cfg, + 'action':'publish', + 'ownership':{ + 'private':i.get('private', False), + 'workspaces':lworkspaces + }, + 'dict':{ + 'publish_module_uoa':module_uoa, + 'publish_module_uid':module_uid, + 'publish_data_uoa':data_uoa, + 'publish_data_uid':data_uid, + 'publish_data_name':data_name, + 'publish_data_date':data_date, + 'publish_pack':pack64, + 'publish_pack_size':pack_size, + 'repo_info':repo_info, + 'first_creation':first_creation, + 'version':version, + 'author':author, + 'author_id':author_id, + 'copyright':copyright2, + 'license':license2, + 'source':source2, + 'not_digital_component':not_digital_component, + 'extra_dict':extra_dict, + 'extra_tags':extra_tags, + 'permanent':permanent + } + }) + if r['return']>0: + ck.out(' WARNING: Portal API returned error: '+r['error']) + else: + data_uid=r['data_uid'] + ck.out(' cK component ID: '+data_uid) + purl=r.get('url','') + if purl!='': + ck.out(' cK component URL: '+purl) + + os.chdir(cur_dir) + + return {'return':0} + +############################################################################## +# List versions of a given CK component at the portal + +def versions(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + # Parse CID + r=ck.parse_cid({'cid':cid}) + if r['return']>0: return r + + data_uoa=r.get('data_uoa','') + module_uoa=r.get('module_uoa','') + + # Call Portal API + r=comm.send({'config':cfg, + 'action':'list_versions', + 'dict':{ + 'module_uoa':module_uoa, + 'data_uoa':data_uoa + } + }) + if r['return']>0: return r + + versions=r.get('versions',[]) + for v in versions: + vv=v.get('version','') + dt=v.get('iso_datetime','').replace('T',' ') + + ck.out(vv+' ('+dt+')') + + return r + +############################################################################## +# Open portal with a given CK component + +def open_page(i): + + """ + Input: { + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # URL + url=cfg.get('server_url','') + if url!='': + h=url.find('api/') + if h>0: + url=url[:h] + else: + url='' + + if url=='': + url=config.CR_DEFAULT_SERVER + + # CID ########################################################### + cid=i.get('cid') + + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + # Parse CID + r=ck.parse_cid({'cid':cid}) + if r['return']>0: return r + + data_uoa=r.get('data_uoa','') + module_uoa=r.get('module_uoa','') + + # Form URL + url+='c/'+module_uoa+'/'+data_uoa + + ck.out('Opening web page '+url+' ...') + + import webbrowser + webbrowser.open(url) + + return {'return':0} + +############################################################################## +# Download CK component from the portal to the local repository + +def download(i): + + """ + Input: { + components - pre-loaded components from bootstrapping + or + cid [str] - CK CID of format (repo UOA:)module UOA:data UOA + (can use wildcards) + + + (version) [str] - assign version + (force) [bool] - if True, force download even if components already exists + + (tags) [str] - can search by tags (usually soft/package) + + (all) [bool] - if True, download dependencies (without force!) + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + sbf=os.environ.get('CB_SAVE_BOOTSTRAP_FILES','') + + force=i.get('force') + al=i.get('all') + + skip_module_check=i.get('skip_module_check',False) + + tags=i.get('tags','') + + spaces=i.get('spaces','') + + lst=i.get('components',[]) + + rr={'return':0} + + if len(lst)>0: + preloaded=True + msg='Processing' + msg2='processed' + skip_module_check=True + + repo_uoa='local' + + ck.cfg['check_missing_modules']='no' # Important not to check missing modules! + else: + preloaded=False + msg='Downloading' + msg2='downloaded' + + # CID ########################################################### + cid=i.get('cid') + if cid=='' or cid==None: + return {'return':1, 'error':'CK entry (CID) is not defined'} + + version=i.get('version') + if version==None: version='' + + # Parse CID + r=ck.parse_cid({'cid':cid}) + if r['return']>0: return r + + repo_uoa=r.get('repo_uoa','') + data_uoa=r.get('data_uoa','') + module_uoa=r.get('module_uoa','') + + # Get current configuration + r=config.load({}) + if r['return']>0: return r + cfg=r['dict'] + + # Sending request to download + rr=comm.send({'config':cfg, + 'action':'download', + 'dict':{ + 'module_uoa':module_uoa, + 'data_uoa':data_uoa, + 'version':version, + 'tags':tags + } + }) + if rr['return']>0: + return rr + + lst=rr['components'] + + for l in lst: + + furl=l['file_url'] + fsize=l['file_size'] + + fmd5=l['file_md5'] + + muoa=l['module_uoa'] + muid=l['module_uid'] + + duoa=l['data_uoa'] + duid=l['data_uid'] + + dependencies=l.get('dependencies',[]) + + xcid=muoa+':'+duoa + + ck.out('* '+msg+' CK component "'+xcid+'" ('+str(fsize)+' bytes)') + + # Check if module exists + if not skip_module_check: + r=ck.access({'action':'find', + 'module_uoa':'module', + 'data_uoa':muoa, + 'common_func':'yes'}) + if r['return']>0: + if r['return']!=16: return r + + x='module:'+muoa + if repo_uoa!='': x=repo_uoa+':'+x + +# FGG: we should not add "version" for dependencies or related components since it's not the same! +# r=download({'cid':x, 'force':force, 'version':version, 'skip_module_check':True, 'all':al}) + + r=download({'cid':x, 'force':force, 'skip_module_check':smc, 'all':al}) + if r['return']>0: return r + + # Check if entry already exists + path='' + r=ck.access({'action':'find', + 'common_func':'yes', + 'repo_uoa':repo_uoa, +# 'module_uoa':muid, + 'module_uoa':muoa, + 'data_uoa':duoa}) + if r['return']==0: + if not force: + return {'return':8, 'error':' Already exists locally ("'+xcid+'")'} + else: + if r['return']!=16: return r + + r=ck.access({'action':'add', + 'common_func':'yes', + 'repo_uoa':repo_uoa, +# 'module_uoa':muid, + 'module_uoa':muoa, + 'data_uoa':duoa, + 'data_uid':duid, + 'ignore_update':'yes'}) + if r['return']>0: return r + + path=r['path'] + + # Prepare pack + ppz=os.path.join(path, config.PACK_FILE) + + if os.path.isfile(ppz): +# if not force: +# return {'return':1, 'error':'pack file already exists ('+ppz+')'} + os.remove(ppz) + + # Download and save pack to file + tstart=time.time() + fpack64=l.get('file_base64','') + + if fpack64!='': + rx=ck.convert_upload_string_to_file({'file_content_base64':fpack64, 'filename':ppz}) + if rx['return']>0: return rx + else: + rx=comm.download_file({'url':furl, 'file':ppz}) + if rx['return']>0: return rx + + # Save boostrap info (debug) + if sbf!='': + rx=ck.convert_file_to_upload_string({'filename':ppz}) + if rx['return']>0: return rx + l['file_base64']=rx['file_content_base64'] + + # MD5 of the pack + rx=ck.load_text_file({'text_file':ppz, 'keep_as_bin':'yes'}) + if rx['return']>0: return rx + bpack=rx['bin'] + + import hashlib + md5=hashlib.md5(bpack).hexdigest() + + if md5!=fmd5: + return {'return':1, 'error':'MD5 of the newly created pack ('+md5+') did not match the one from the portal ('+fmd5+')'} + + # Unpack to src subdirectory + import zipfile + + f=open(ppz,'rb') + z=zipfile.ZipFile(f) + for d in z.namelist(): + if d!='.' and d!='..' and not d.startswith('/') and not d.startswith('\\'): + pp=os.path.join(path,d) + if d.endswith('/'): + # create directory + if not os.path.exists(pp): os.makedirs(pp) + else: + ppd=os.path.dirname(pp) + if not os.path.exists(ppd): os.makedirs(ppd) + + # extract file + fo=open(pp, 'wb') + fo.write(z.read(d)) + fo.close() + + if pp.endswith('.sh') or pp.endswith('.bash'): + import stat + st=os.stat(pp) + os.chmod(pp, st.st_mode | stat.S_IEXEC) + + f.close() + + tstop=time.time()-tstart + + # Remove pack file + os.remove(ppz) + + # Note + if not preloaded: + ck.out(spaces+' Successfully '+msg2+' ('+('%.2f' % tstop)+' sec)!') # to '+path) + + # Check deps + if al: + if len(dependencies)>0: + ck.out(spaces+' Checking dependencies ...') + + for dep in dependencies: + muoa=dep.get('module_uid','') + duoa=dep.get('data_uid','') + + tags=dep.get('tags',[]) + xtags='' + if len(tags)>0: + xtags=','.join(tags) + muoa='package' + duoa='' + + cid=muoa+':'+duoa + rx=download({'cid':cid, + 'all':al, + 'tags':xtags, + 'spaces':spaces+' '}) + if rx['return']>0 and rx['return']!=8 and rx['return']!=16: return rx + if rx['return']==16: + if xtags=='': return rx + rx=download({'cid':'soft:', + 'all':al, + 'tags':xtags, + 'spaces':spaces+' '}) + if rx['return']>0 and rx['return']!=8: return rx + + return rr diff --git a/incubator/cbench/cbench/setup.py b/incubator/cbench/cbench/setup.py new file mode 100644 index 0000000000..7625c1e5fc --- /dev/null +++ b/incubator/cbench/cbench/setup.py @@ -0,0 +1,190 @@ +# +# Setup client +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +from . import config +from . import comm + +import ck.kernel as ck + +import json + +############################################################################## +# Setup cBench + +def setup(i): + + """ + Input: { + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + + # Get current configuration + cfg={} + + ii={'action':'load', + 'repo_uoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_UID, + 'data_uoa':config.CK_CFG_DATA_UOA} + + r=ck.access(ii) + if (r['return']>0 and r['return']!=16): ck.err(r) + + if r['return']==0: cfg=r['dict'] + + # Check commands + + # Username ########################################################## + username=cfg.get('username','') + + if i.get('username')!=None: username=i['username'] + + if username=='' or username==None: + r=ck.inp({'text':'Enter cK username: '}) + if r['return']>0: ck.err(r) + + username=r['string'].strip() + + if username==None: username='' + + cfg['username']=username + + # API key ########################################################### + api_key=cfg.get('api_key','') + + if i.get('api_key')!=None: api_key=i['api_key'] + + if api_key=='' or api_key==None: + r=ck.inp({'text':'Enter your cK API key: '}) + if r['return']>0: ck.err(r) + + api_key=r['string'].strip() + + if api_key==None: api_key='' + + cfg['api_key']=api_key + + # Server URL ########################################################### + server_url=cfg.get('server_url','') + + if i.get('server_url')!=None and i.get('server_url')!='': server_url=i['server_url'] + + if server_url==None or server_url=='': server_url=config.CR_DEFAULT_SERVER_URL + + cfg['server_url']=server_url + + # Server User ########################################################### + server_user=cfg.get('server_user','') + + if i.get('server_user')!=None and i.get('server_user')!='': server_user=i['server_user'] + + if server_user!=None and server_user!='': cfg['server_user']=server_user + + # Server Pass ########################################################### + server_pass=cfg.get('server_pass','') + + if i.get('server_pass')!=None and i.get('server_pass')!='': server_pass=i['server_pass'] + + if server_pass!=None and server_pass!='': cfg['server_pass']=server_pass + + # Server Skip Certificate Validation ########################################################### + server_skip_validation=cfg.get('server_skip_validation','') + + if i.get('server_skip_validation')!=None and i.get('server_skip_validation')!='': server_skip_validation=i['server_skip_validation'] + + if server_skip_validation=='yes': cfg['server_skip_validation']=server_skip_validation + + # Save configuration + r=ck.access({'action':'update', + 'repo_uyoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_UID, + 'data_uoa':config.CK_CFG_DATA_UOA, + 'dict':cfg, + 'sort_keys':'yes'}) + if r['return']>0: ck.err(r) + + # Print (new/updated) configuration + ck.out('') + ck.out('Current cBench configuration:') + + ck.out('') + ck.out(json.dumps(cfg, indent=2, sort_keys=True)) + + return 0 + +######################################################################################## +# Test login to the cK portal + +def login(i): + + """ + Input: { + (username) [str] + (api_key) [str] + (server_url) [str] + (server_user) [str] + (server_pass) [str] + (server_skip_validation) [str] + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get current configuration + cfg={} + + ii={'action':'load', + 'repo_uoa':config.CK_CFG_REPO_UOA, + 'module_uoa':config.CK_CFG_MODULE_UID, + 'data_uoa':config.CK_CFG_DATA_UOA} + + r=ck.access(ii) + if (r['return']>0 and r['return']!=16): ck.err(r) + + # If not found, setup client + if r['return']==16: + setup(i) + + # Load again + cfg={} + +# ii={'action':'load', +# 'repo_uoa':config.CK_CFG_REPO_UOA, +# 'module_uoa':config.CK_CFG_MODULE_UID, +# 'data_uoa':config.CK_CFG_DATA_UOA} +# +# r=ck.access(ii) +# if r['return']>0: ck.err(r) + + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + + # Update cfg + for k in ['username', 'api_key', 'server_url', 'server_user', 'server_pass', 'server_skip_validation']: + v=i.get(k,'') + if v==None: v='' + if v!='': cfg[k]=v + + # Sending request to test connection + r=comm.send({'config':cfg, + 'action':'login' + }) + if r['return']>0: ck.err(r) + + # Success + ck.out('cK login tested successfully!') + + return 0 diff --git a/incubator/cbench/cbench/solution.py b/incubator/cbench/cbench/solution.py new file mode 100644 index 0000000000..4846cecb98 --- /dev/null +++ b/incubator/cbench/cbench/solution.py @@ -0,0 +1,1865 @@ +# +# Support for portable solutions +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +from . import config +from . import comm +from . import obj +from . import graph + +import ck.kernel as ck + +import json +import zipfile +import os +import locale + +############################################################################ +# Get some parameters of a local platform + +def get_platform_desc(i): + + # Get platform info + # Check host/target OS/CPU + hos=i.get('host_os','') + tos=i.get('target_os','') + tdid=i.get('device_id','') + + # Get some info about platforms + ii={'action':'detect', + 'module_uoa':'platform.os', + 'host_os':hos, + 'target_os':tos, + 'device_id':tdid} + + if i.get('skip_info_collection','')!='': + ii['skip_info_collection']=i['skip_info_collection'] + + r=ck.access(ii) + if r['return']>0: return r + + hosd=r['host_os_dict'] + host_os_name=hosd.get('ck_name3','') + if host_os_name=='': + host_os_name=hosd.get('ck_name2','') + if host_os_name=='win': host_os_name='windows' + + if host_os_name=='': + return {'return':1, 'error':'your CK OS component is outdated! Try "ck pull repo:ck-env"'} + + # Extra info + host_desc={} + + if host_os_name=='windows': + host_desc['extra_cmd']='call ' + host_desc['venv_bin']='Scripts' + host_desc['venv_activate']='activate.bat' + host_desc['python_bin']='python.exe' + host_desc['activate_cmd']='cmd' + else: + host_desc['extra_cmd']='' + host_desc['venv_bin']='bin' + host_desc['venv_activate']='activate' + host_desc['python_bin']='python' + host_desc['activate_cmd']='bash' + + r['host_desc']=host_desc + + return r + +############################################################################ +# Initialize solution (portable workflow) +# Try to download existing one from the platform +# If doesn't exist, initialize the new one locally + +def init(i): + """ + Input: { + uid [str] - platform identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Save current directory + cur_dir=os.getcwd() + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + # Get platform info + ck.out(config.CR_LINE) + ck.out('Detecting minimal platform info ...') + + i['skip_info_collection']='yes' + rplat=get_platform_desc(i) # Pass input from init + if rplat['return']>0: return rplat + + hos=rplat['host_os_uid'] + hosx=rplat['host_os_uoa'] + hosd=rplat['host_os_dict'] + hosd_extra=rplat['host_desc'] + + hplat=hosd['ck_name'] + + tos=rplat['os_uid'] + tosx=rplat['os_uoa'] + tosd=rplat['os_dict'] + + tdid=rplat.get('device_id','') + + resume=i.get('resume') + if resume==None: resume=False + + # Get solution UID + uid=i['uid'] + if uid==None: + r=ck.gen_uid({}) + if r['return']>0: return r + uid=r['data_uid'] + + # Check if entry already exists + ii={'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid} + r=ck.access(ii) + if r['return']==0: + p=r['path'] + px=os.path.join(p, '.cm', 'meta.json') + + dd=r['dict'] + + ck.out(config.CR_LINE) + ck.out("Preloaded solution meta from "+px) + else: + if r['return']!=16: return r + + ck.out(config.CR_LINE) + r=ck.out('Solution "'+uid+'" is not found locally. Attempting to download from the portal ...') + + dd = {} + + r=obj.download({'cid':'local:solution:'+uid}) + if r['return']>0: + if r['return']!=16: return r + + ck.out('') + r=ck.inp({'text':'Warning: solution was not found on the portal. Do you want to initialize the new one (Y/n): '}) + if r['return']>0: return r + + x=r['string'].strip() + if x=='n' or x=='N': + return {'return':16, 'error':'Solution was not found on the portal'} + + else: + ii={'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid} + r=ck.access(ii) + if r['return']>0: return r + + p=r['path'] + px=os.path.join(p, '.cm', 'meta.json') + + dd=r['dict'] + + ck.out(config.CR_LINE) + ck.out("Preloaded solution meta from "+px) + + # Get extra vars + workflow=i.get('workflow','') + if workflow=='': workflow=dd.get('workflow','') + + workflow_repo_url=i.get('workflow_repo_url','') + if workflow_repo_url=='': workflow_repo_url=dd.get('workflow_repo_url','') + + workflow_cmd=i.get('workflow_cmd','') + if workflow_cmd=='': workflow_cmd=dd.get('workflow_cmd','') + + workflow_cmd_before=i.get('workflow_cmd_before','') + if workflow_cmd_before=='': workflow_cmd_before=dd.get('workflow_cmd_before','') + + workflow_cmd_after=i.get('workflow_cmd_after','') + if workflow_cmd_after=='': workflow_cmd_after=dd.get('workflow_cmd_after','') + + workflow_cmd_extra=i.get('workflow_cmd_extra','') + if workflow_cmd_extra=='': workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input=i.get('workflow_input','') + if workflow_input=='': workflow_input=dd.get('workflow_input','') + + workflow_input_dir=i.get('workflow_input_dir','') + if workflow_input_dir=='': workflow_input_dir=dd.get('workflow_input_dir','') + + workflow_output_dir=i.get('workflow_output_dir','') + if workflow_output_dir=='': workflow_output_dir=dd.get('workflow_output_dir','') + + python_version=i.get('python_version','') + # if python_version!='': + # i['python_version_from']=python_version + # i['python_version_to']=python_version + + python_version_from=i.get('python_version_from','') + if python_version_from=='': python_version_from=dd.get('python_version_from','') + if python_version_from==' ': python_version_from='' + + python_version_to=i.get('python_version_to','') + if python_version_to=='': python_version_to=dd.get('python_version_to','') + if python_version_to==' ': python_version_to='' + + # Check graphs + graphs=i.get('graphs','') + if graphs=='': + graphs=dd.get('graphs',[]) + else: + graphs=graphs.split(',') + i['graphs']=graphs + + tos=i.get('target_os','') + if tos=='': tos=dd.get('target_os','') + + # Update meta and create entry for a solution + name=i.get('name','') + tags=i.get('tags','') + + for k in ['host_os', 'target_os', 'device_id', 'hostname', + 'workflow', 'workflow_repo_url', + 'workflow_cmd_before', 'workflow_cmd_after', + 'workflow_cmd', 'workflow_cmd_extra', 'workflow_input', + 'workflow_input_dir', 'workflow_output_dir', 'result_file', + 'python_version', 'python_version_from', 'python_version_to', + 'graphs']: + v=i.get(k) + if v!=None and v!='': + dd[k]=v + + # dd['detected_platform_info']=rplat + + dd['tags']=["solution"] + + from . import __version__ + dd['client_version']=__version__ + + # Check if extra meta + add_extra_meta_from_file=i.get('add_extra_meta_from_file','') + if add_extra_meta_from_file!='': + r=ck.load_json_file({'json_file':add_extra_meta_from_file}) + if r['return']>0: return r + dd.update(r['dict']) + + # Add/update CK entry for the solution + update_dict={'action':'update', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'dict':dd, + 'sort_keys':'yes'} + if name!='': update_dict['data_name']=name + if tags!='': + dd['tags']+=tags.split(',') + + r=ck.access(update_dict) + if r['return']>0: return r + + solution_uoa=r['data_uoa'] + solution_uid=r['data_uid'] + + p=r['path'] + + ck.out(config.CR_LINE) + ck.out('Path to the solution: '+p) + + ############################################################## + # Process graph description + desc_graph=i.get('desc_graph','') + if desc_graph!='': + ############################################################## + # Graphs + ck.out(config.CR_LINE) + ck.out('Initializing graphs:') + + if not os.path.isfile(desc_graph): + return {'return':1, 'error':'can\'t find file "'+desc_graph+'"'} + + r=ck.load_json_file({'json_file':desc_graph}) + if r['return']>0: return r + + d=r['dict'] + + pdesc=os.path.join(p, 'graph-desc.json') + + r=ck.save_json_to_file({'json_file':pdesc, 'dict':d, 'sort_keys':'yes'}) + if r['return']>0: return r + +# Decided to add all graphs explicitly! +# if solution_uoa not in graphs: +# graphs.append(solution_uoa) + + sgi=i.get('skip_graph_init') + if sgi!=None and not sgi: + for gr in graphs: + ck.out('') + ck.out(' * Graph: '+gr) + ck.out('') + + r=graph.init({'uid':gr, 'version':'1.0.0', 'desc_file':desc_graph}) + if r['return']>0: return r + + ############################################################## + # Process graph convertor + graph_convertor=i.get('graph_convertor','') + if graph_convertor!='': + ############################################################## + # Graphs + ck.out(config.CR_LINE) + ck.out('Processing graph convertor:') + + if not os.path.isfile(graph_convertor): + return {'return':1, 'error':'can\'t find file "'+graph_convertor+'"'} + + r=ck.load_json_file({'json_file':graph_convertor}) + if r['return']>0: return r + + d=r['dict'] + + pconv=os.path.join(p, 'graph-convertor.json') + + r=ck.save_json_to_file({'json_file':pconv, 'dict':d, 'sort_keys':'yes'}) + if r['return']>0: return r + + ############################################################## + # Init virtual environment + ck.out(config.CR_LINE) + ck.out('Setting (virtual) environment...') + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + + python_path=i['python_path'] + + encoding=locale.getdefaultlocale()[1] + + ii={'action':'shell', + 'module_uoa':'os', + 'encoding':encoding, + 'output_to_console':'yes'} + + if resume: + if i['python_localenv'] is True: + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, + 'venv', + hosd_extra['venv_bin'], + hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + else: + if python_path=='': + # Searching for python + ck.out('') + ck.out(' Searching for the python installation') + + if python_version_from!='' and python_version_from!=' ': + ck.out(' Version must be >= '+python_version_from+' (change with --python_version_from="version")') + if python_version_from!='' and python_version_from!=' ': + ck.out(' Version must be <= '+python_version_to+' (change with --python_version_to="version")') + + r=ck.access({'action':'detect', + 'module_uoa':'soft', + 'data_uoa':'compiler.python', + 'version_from':python_version_from, + 'version_to':python_version_to, + 'out':'con'}) + if r['return']>0: return r + + r=ck.access({'action':'load', + 'module_uoa':'env', + 'data_uoa':r['env_data_uid']}) + if r['return']>0: return r + python_path=r['dict']['env']['CK_ENV_COMPILER_PYTHON_FILE'] + + ck.out(config.CR_LINE) + + cmd=cmd0 + if i['python_localenv'] is True: + i_env=ck.inp({'text':'Do you want to create a new virtual environment (Y/n): '}) + if i_env['return']>0: return i_env + + x_env=i_env['string'].strip() + + if x_env=='n' or x_env=='N': + i['python_localenv'] = False + else: + ck.out('creating virtual env') + cmd+='virtualenv --python='+python_path+' venv\n' + + ii['cmd']=cmd + + print (config.CR_LINE) + print ('Running the following commands to install the virtual env:') + print ('') + print (cmd) + print (config.CR_LINE) + + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Install CK + ck.out(config.CR_LINE) + ck.out('Installing CK ...') + + if i['python_localenv'] is True: + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, + 'venv', + hosd_extra['venv_bin'], + hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+='pip install ck\n' + cmd+='\n' + cmd+=hosd_extra['extra_cmd']+'ck\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Initializing CB config ... + ck.out(config.CR_LINE) + ck.out('Initializing cBench client for this solution ...') + + if pcfg!='' and os.path.isdir(pcfg): + pcfg2=os.path.join(pcfg, '.cm', 'meta.json') + if os.path.isfile(pcfg2): + rx=ck.gen_tmp_file({'prefix':'ck-tmp-', 'suffix':'.json'}) + if rx['return']>0: return rx + + pfn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':pfn, 'dict':{'dict':cfg}}) + if rx['return']>0: return rx + + # Update CB cfg of the solution + cmd=cmd0 + cmd+='ck update cfg:'+config.CK_CFG_DATA_UOA+' @'+pfn+'\n' + + ck.out('') + ck.out(cmd) + + ii['cmd']=cmd + r=ck.access(ii) + + if os.path.isfile(pfn): + os.remove(pfn) + + if r['return']>0: return r + +# ############################################################## +# # Downloading CK components +# ck.out(config.CR_LINE) +# ck.out('Downloading CK components from the portal ...') +# ck.out('') +# +# ck_components=config.CR_SOLUTION_CK_COMPONENTS +# +# cmd=cmd0 +# +# for x in ck_components: +# cmd+='\n' +# cmd+='cb download '+x['cid'] +# if x.get('version','')!='': +# cmd+=' --version='+x['version'] +# cmd+=' --force\n' +# if hplat=='linux': +# cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' +# +# ii['cmd']=cmd +# +# r=ck.access(ii) +# if r['return']>0: return r +# rc=r['return_code'] +# if rc>0: +# return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Downloading CK components + ck.out(config.CR_LINE) + ck.out('Extra bootstrap of stable CK components for this solution ...') + ck.out('') + + ck_components=config.CR_SOLUTION_CK_COMPONENTS + + cmd=cmd0 + + cmd+='\n' + cmd+='cb update --force\n' + if hplat=='linux': + cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' + + ii['cmd']=cmd + + r=ck.access(ii) + if r['return']>0: return r + rc=r['return_code'] + if rc>0: + return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Install ck-env repo and detect python + ck.out(config.CR_LINE) + ck.out('Installing ck-env repo and detecting compiler ...') + + cmd=cmd0 + cmd+=hosd_extra['extra_cmd']+'ck set kernel var.install_to_env=yes\n' + # Now downloading from the portal + # cmd+=hosd_extra['extra_cmd']+'ck pull repo:ck-env\n' + cmd+=hosd_extra['extra_cmd']+'ck detect soft:compiler.python --quiet --full_path='+hosd['env_quotes_if_space']+os.path.join(p, + 'venv', + hosd_extra['venv_bin'], + hosd_extra['python_bin'])+hosd['env_quotes_if_space']+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Pull workflow repo + if workflow_repo_url==None or workflow_repo_url=='': + return {'return':1, 'error':'workflow_repo_url is not defined'} + + if workflow_repo_url!='local': + ck.out(config.CR_LINE) + ck.out('Installing workflow repo ...') + + cmd=cmd0 + cmd+=hosd_extra['extra_cmd']+'ck pull repo --url='+workflow_repo_url+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Copy extra scripts if needed + es=i.get('add_extra_scripts','') + if es!='': + ck.out(config.CR_LINE) + ck.out('Copying extra scripts ...') + + import glob + import shutil + + ck.out('') + for fl in glob.glob(es): + ck.out(' * '+fl) + shutil.copy(fl, p) + + ############################################################## + # Describe workflow preparation steps + desc_prereq=i.get('desc_prereq','') + prereq_workflow=dd.get('prereq_workflow',[]) + if desc_prereq!='': + if not os.path.isfile(desc_prereq): + return {'return':1, 'error':'can\'t find file "'+desc_prereq+'"'} + + r=ck.load_text_file({'text_file':desc_prereq, 'split_to_list':'yes'}) + if r['return']>0: return r + + prereq_workflow=r['lst'] + + ck.out('') + ck.out('') + ck.out('***************************************************') + ck.out('***************************************************') + ck.out('Prequisite steps:') + + ck.out('') + for s in prereq_workflow: + ck.out(' '+s) + + dd['prereq_workflow']=prereq_workflow + + update_dict['dict']=dd + r=ck.access(update_dict) + if r['return']>0: return r + + if not i.get('skip_stop',False): + ck.out('') + ck.out('***************************************************') + ck.out('***************************************************') + ck.out('We start virtual env to let you install above deps!') + ck.out('Enter "exit" to continue solution preparation:') + ck.out('***************************************************') + ck.out('***************************************************') + ck.out('') + ck.out('') + + cmd=cmd0 + cmd+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + cmd+=hosd_extra['activate_cmd']+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Detecting complete platform info ...') + + pinfo=os.path.join(p, 'platform-info.json') + if os.path.isfile(pinfo): os.remove(pinfo) + + cmd=cmd0 + + # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + s='ck detect platform' + if i.get('target_os','')!='': s+=' --target_os='+i['target_os'] + if tdid!='': s+=' --device_id='+tdid + s+=' --out=json_file --out_file='+pinfo + + cmd+=s+'\n' + + ii['cmd']=cmd + print (cmd) + r=ck.access(ii) + if r['return']>0: return r + + if not os.path.isfile(pinfo): + return {'return':1, 'error':'platform info file was not created'} + + # # Get some info about platforms + # ii={'action':'detect', + # 'module_uoa':'platform', + # 'host_os':hos, + # 'target_os':tos, + # 'device_id':tdid} + # r=ck.access(ii) + # if r['return']>0: return r + # + # rx=ck.save_json_to_file({'json_file':pinfo, 'dict':r, 'sort_keys':'yes'}) + # if rx['return']>0: return rx + # + ############################################################## + ck.out(config.CR_LINE) + ck.out('Detecting complete platform host OS info ...') + + pinfo2=os.path.join(p, 'platform-host-os-info.json') + if os.path.isfile(pinfo2): os.remove(pinfo2) + + cmd=cmd0 + + # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + s='ck detect platform.os' + s+=' --out=json_file --out_file='+pinfo2 + + cmd+=s+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + if not os.path.isfile(pinfo2): + return {'return':1, 'error':'platform info file was not created'} + + ############################################################## + if i.get('update_meta_and_stop','')==True: + ck.out(config.CR_LINE) + ck.out('Skipping the rest by user request') + return {'return':0} + + ############################################################## + # Describe workflow preparation steps + ck.out(config.CR_LINE) + ck.out('Preparation steps:') + ck.out('') + + desc_prepare=i.get('desc_prepare','') + prepare_workflow=dd.get('prepare_workflow',[]) + if desc_prepare!='': + if not os.path.isfile(desc_prepare): + return {'return':1, 'error':'can\'t find file "'+desc_prepare+'"'} + + r=ck.load_text_file({'text_file':desc_prepare, 'split_to_list':'yes'}) + if r['return']>0: return r + + prepare_workflow=r['lst'] + + for s in prepare_workflow: + ck.out(' '+s) + + dd['prepare_workflow']=prepare_workflow + + update_dict['dict']=dd + r=ck.access(update_dict) + if r['return']>0: return r + + for s in prepare_workflow: + if s=='': + ck.out('') + continue + + ck.out(config.CR_LINE) + ck.out(s) + ck.out('') + + cmd=cmd0 + cmd+=s+'\n' + if hplat=='linux': + cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + rc=r['return_code'] + if rc>0: + return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Check dependencies + ck.out(config.CR_LINE) + ck.out('Checking and recording workflow dependencies') + + pdeps=os.path.join(p, 'resolved-deps.json') + + s='' + + if workflow_cmd_before!='': s+=workflow_cmd_before+'\n' + + s+=hosd_extra['extra_cmd']+'ck run '+workflow+' --cmd_key='+workflow_cmd+' '+workflow_cmd_extra+' --record_deps="'+pdeps+'" --skip_exec' + + if hos!='': s+=' --host_os='+hos + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + + s+='\n' + +# Here we do not need post-processing (often fail) +# if workflow_cmd_after!='': s+=workflow_cmd_after+'\n' + + ck.out('') + ck.out(s) + + ck.out('') + cmd=cmd0 + cmd+=s+'\n' + + if hplat=='linux': + cmd+='if [[ $? != 0 ]]; then exit 1 ; fi\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + rc=r['return_code'] + if rc>0: + return {'return':99, 'error':'last command returned error'} + + ############################################################## + # Describe workflow run steps + ck.out(config.CR_LINE) + ck.out('Run steps:') + ck.out('') + + desc_run=i.get('desc_run','') + run_workflow=dd.get('run_workflow',[]) + if desc_run!='': + if not os.path.isfile(desc_run): + return {'return':1, 'error':'can\'t find file "'+desc_run+'"'} + + r=ck.load_text_file({'text_file':desc_run, 'split_to_list':'yes'}) + if r['return']>0: return r + + run_workflow=r['lst'] + + for s in run_workflow: + ck.out(' '+s) + + dd['run_workflow']=run_workflow + + update_dict['dict']=dd + r=ck.access(update_dict) + if r['return']>0: return r + + for s in run_workflow: + if s=='': + ck.out('') + continue + + ck.out(config.CR_LINE) + ck.out('Command: '+s) + ck.out('') + + cmd=cmd0 + cmd+=s+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + ############################################################## + # Check dependencies + ck.out(config.CR_LINE) + ck.out('Solution was successfully prepared!') + + ck.out('') + ck.out('You can crowd-benchmark this solution (if supported) as follows:') + ck.out('cb benchmark '+uid) + + ck.out('') + ck.out('You can run this solution locally as follows:') + ck.out('cb run '+uid) + + ck.out('') + ck.out('You can activate virtual env for this solution to debug/improve it as follows:') + ck.out('cb activate '+uid) + + return {'return':0} + +############################################################################ +# Activate virtual environment for a solution + +def activate(i): + """ + Input: { + uid [str] - portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + p=r['path'] + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Activate solution: '+p) + ck.out('') + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + cmd+=hosd_extra['activate_cmd']+'\n' + + encoding=locale.getdefaultlocale()[1] + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding, + 'output_to_console':'yes'} + + r=ck.access(ii) + if r['return']>0: return r + + return {'return':0} + +############################################################################ +# Run prepared solution + +def run(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + tdid=r.get('device_id','') + + xcmd=i.get('cmd','') + if xcmd==None: xcmd='' + xcmd=xcmd.strip() + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + p=r['path'] + dd=r['dict'] + + # TBD: need to be checked from outside ... + # host_os=dd.get('host_os','') + tos=dd.get('target_os','') + tdid=dd.get('device_id','') + + workflow=dd.get('workflow','') + workflow_cmd_before=dd.get('workflow_cmd_before','') + workflow_cmd_after=dd.get('workflow_cmd_after','') + workflow_cmd=dd.get('workflow_cmd','') + workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input_dir=dd.get('workflow_input_dir','') + workflow_output_dir=dd.get('workflow_output_dir','') + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Run solution: '+p) + ck.out('') + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + if workflow_cmd_before!='': cmd+=workflow_cmd_before+'\n' + + if xcmd!='': + s=xcmd + else: + s=hosd_extra['extra_cmd']+'ck run '+workflow+' --cmd_key='+workflow_cmd + + if workflow_cmd_extra!='': + s+=' '+workflow_cmd_extra + + if hos!='': s+=' --host_os='+hos + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + + cmd+=s+'\n' + + if workflow_cmd_after!='': cmd+=workflow_cmd_after+'\n' + + ck.out('') + ck.out(s) + ck.out('') + + encoding=locale.getdefaultlocale()[1] + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding, + 'output_to_console':'yes'} + r=ck.access(ii) + if r['return']>0: return r + + return r + + +############################################################################ +# Benchmark prepared solution + +def benchmark(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + import datetime + import time + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + sdate= datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') + t = time.time() + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + tdid=r.get('device_id','') + + xcmd=i.get('cmd','') + if xcmd==None: xcmd='' + xcmd=xcmd.strip() + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + solution_uoa=r['data_uoa'] + solution_uid=r['data_uid'] + + p=r['path'] + dd=r['dict'] + + # TBD: need to be checked from outside ... + # host_os=dd.get('host_os','') + tos=dd.get('target_os','') + tdid=dd.get('device_id','') + + workflow=dd.get('workflow','') + workflow_cmd_before=dd.get('workflow_cmd_before','') + workflow_cmd_after=dd.get('workflow_cmd_after','') + workflow_cmd=dd.get('workflow_cmd','') + workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input_dir=dd.get('workflow_input_dir','') + workflow_output_dir=dd.get('workflow_output_dir','') + + result_file=dd.get('result_file','') + + graphs=dd.get('graphs',[]) + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Find path to output file '+result_file+' ...') + ck.out('') + + encoding=locale.getdefaultlocale()[1] + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+='ck find '+workflow+'\n' + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding} + r=ck.access(ii) + if r['return']>0: + status=-1 + return r + + path_result=r['stdout'].strip() + path_result_file=os.path.join(path_result, result_file) + + ck.out(' Found path: '+path_result_file) + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Detecting complete platform info ...') + + pinfo=os.path.join(p, 'platform-info.json') + if os.path.isfile(pinfo): os.remove(pinfo) + + cmd=cmd0 + + # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + s='ck detect platform' + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + s+=' --out=json_file --out_file='+pinfo + + cmd+=s+'\n' + + ii['cmd']=cmd + r=ck.access(ii) + if r['return']>0: return r + + if not os.path.isfile(pinfo): + return {'return':1, 'error':'platform info file was not created'} + + # Get some sub-info about deps and platforms + dinfo={} + if os.path.isfile(pinfo): + r=ck.load_json_file({'json_file':pinfo}) + if r['return']==0: + dinfo=r['dict'].get('features',{}) + for k in ['cpu_misc', 'cpu']: + if k in dinfo: del(dinfo[k]) + + pdeps=os.path.join(p, 'resolved-deps.json') + ddeps={} + if os.path.isfile(pdeps): + r=ck.load_json_file({'json_file':pdeps}) + if r['return']==0: + ddeps2=r['dict'] + + r=deps_summary({'deps':ddeps2}) + if r['return']==0: + ddeps=r['deps_summary'] + + ############################################################## + # status management + + path_tmpSol=os.path.join(p, "tmp") + tmp_solStatus=os.path.join(path_tmpSol, "status.json") + + status = 0 + if not os.path.isdir(path_tmpSol): + os.mkdir(path_tmpSol) + + rdf_st={} + rx=ck.load_json_file({'json_file':tmp_solStatus}) + if rx['return']>0: + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':{'status': 0}}) + if rx['return']>0: return rx + else: + rdf_st=rx['dict'] + status = rdf_st.get('status','') + + run=True + if status == 1: + run=False + elif status == 2: + # To be done try to push the result to server + status=1 + + rdf_st['status'] = 1 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + if os.path.isfile(path_result_file): + ck.out(' Cleaning output ...') + os.remove(path_result_file) + + ############################################################## + rr={'return':0} + if run is True: + + ck.out(config.CR_LINE) + ck.out('Run solution: '+p) + ck.out('') + + cmd=cmd0 + + if workflow_cmd_before!='': cmd+=workflow_cmd_before+'\n' + + if xcmd!='': + s=xcmd + else: + s=hosd_extra['extra_cmd']+'ck benchmark '+workflow+' --cmd_key='+workflow_cmd + + if workflow_cmd_extra!='': + s+=' '+workflow_cmd_extra + + if hos!='': s+=' --host_os='+hos + if tos!='': s+=' --target_os='+tos + if tdid!='': s+=' --device_id='+tdid + ck.out(config.CR_LINE) + ck.out('Command: '+s) + ck.out('') + + cmd+=s+'\n' + + if workflow_cmd_after!='': cmd+=workflow_cmd_after+'\n' + + ck.out('') + ck.out(s) + ck.out('') + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding, + 'output_to_console':'yes'} + rr=ck.access(ii) + + if r['return']>0: + rdf_st['status'] = -1 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + return r + else : + rdf_st['status'] = 2 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + elapsed = time.time() - t + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Reading output: '+path_result_file) + ck.out('') + + if not os.path.isfile(path_result_file): + ck.out(' Error: output file not found!') + rdf_st['status'] = -2 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + else: + rx=ck.load_json_file({'json_file':path_result_file}) + if rx['return']>0: return rx + + rd=rx['dict'] + + # Add solution info + rd['solution_uoa']=solution_uoa + rd['solution_uid']=solution_uid + rd['solution_run_date']=sdate + rd['solution_duration']=elapsed + + sworkflow=workflow.split(':') + if len(sworkflow)>1: + rd['program_workflow_uoa']=sworkflow[1] + + from . import __version__ + rd['client_version']=__version__ + + rx=ck.flatten_dict({'dict':rd}) + if rx['return']>0: return rx + + rdf=rx['dict'] + crdf={} + + crdf['platform_info']=dinfo + crdf['resolved_deps']=ddeps + + # Remove first ## (do not need here) + for k in rdf: + v=rdf[k] + if k.startswith('##'): k=k[2:] + crdf[k]=v + + # Get some sub-info about deps and platforms + if os.path.isfile(pinfo): + r=ck.load_json_file({'json_file':pinfo}) + if r['return']==0: + dx=r['dict'] + + pdeps=os.path.join(p, 'resolved-deps.json') + if os.path.isfile(pdeps): + rx=ck.load_json_file({'json_file':pdeps}) + if rx['return']==0: + dx=rx['dict'] + + ck.out(json.dumps(crdf, indent=2)) + + #over write the file + rx=ck.save_json_to_file({'json_file':path_result_file, 'dict':crdf}) + if rx['return']>0: return rx + + ################################################################ + if len(graphs)>0: + ck.out(config.CR_LINE) + ck.out('Pushing results to graphs...') + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':crdf}) + if rx['return']>0: return rx + + if solution_uoa not in graphs: + graphs.append(solution_uoa) + + for gr in graphs: + ck.out('') + ck.out(' * Graph: '+gr) + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + if 'graphs' not in rr: rr['graphs']=[] + rr['graphs'].append(rx) + + rdf_st['status'] = 3 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + ################################################################ + pconv=os.path.join(p, 'graph-convertor.json') + if os.path.isfile(pconv): + rx=ck.load_json_file({'json_file':pconv}) + if rx['return']==0: + ck.out(config.CR_LINE) + ck.out('Converting data for extra graphs ...') + + dconv=rx['dict'] + + for eg in dconv: + gr=eg['graph_id'] + + ck.out('') + ck.out(' * Graph: '+gr) + + keys=eg['keys'] + + cdata={} + + for k in keys: + ok=k.get('out_key','') + if ok=='': ok=k['key1'] + + kk=[k.get('key1',''), k.get('key2',''), k.get('key3',''), k.get('key4','')] + + vv='' + v=k.get('value','') + if v!='' and v!=None: + vv=v + + first=True + for kx in kk: + if kx!='': + if kx.startswith('##'): + ry=ck.get_by_flat_key({'dict':crdf, 'key':kx}) + if ry['return']>0: return ry + v=ry['value'] + else: + v=crdf.get(kx) + + vm=k.get('multiply',0) + if vm!=0 and vm!='' and vm!=None and (type(v)==float or type(v)==int): + v=v*vm + + if v!='' and v!=None: + if first: + first=False + if type(v)==float or type(v)==int: + vv=0 + else: + vv+=', ' + + # Check if list or dict + if type(v)==list or type(v)==dict: + vv=v + else: + vv+=v + + if vv!='': + cdata[ok]=vv + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':cdata}) + if rx['return']>0: return rx + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + if 'graphs' not in rr: rr['graphs']=[] + rr['graphs'].append(rx) + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + return rr + +############################################################################ +# List local solutions + +def ls(i): + + """ + Input: { + (uid) [str] - portal identifier of the solution (can have wiledcards) + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Create entry + uid=i['uid'] + if uid==None: uid='' + + r=ck.access({'action':'ls', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'common_func':'yes', + 'all':'yes', + 'out':'con'}) + return r + +############################################################################ +# Find solution + +def find(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Find entry + uid=i['uid'] + + r=ck.access({'action':'find', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'common_func':'yes', + 'out':'con'}) + return r + +############################################################################ +# Delete solution + +def rm(i): + + """ + Input: { + uid [str] - Portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Delete entry + uid=i['uid'] + + r=ck.access({'action':'rm', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid, + 'common_func':'yes', + 'out':'con'}) + return r + +############################################################################ +# Get solution directory + +def get_solution_dir(i): + uid=i['uid'] + + # Get work dir + r=config.get_work_dir({}) + if r['return']>0: return r + + work_dir=r['path'] + + # Get solutions dir + solutions_dir=os.path.join(work_dir, config.CR_SOLUTIONS_DIR) + if not os.path.isdir(solutions_dir): + os.makedirs(solutions_dir) + + # Get the solution dir + solution_dir=os.path.join(solutions_dir, uid) + if not os.path.isdir(solution_dir): + os.makedirs(solution_dir) + + return {'return':0, 'solutions_dir':solutions_dir, 'solution_dir':solution_dir} + +############################################################################## +# extracting summary of all deps + +def deps_summary(i): + """ + Input: { + deps - resolved deps + } + + Output: { + return - return code = 0, if successful + > 0, if error + (error) - error text if return > 0 + + deps_summary - summary of deps + } + + """ + + deps=i['deps'] + ds=i.get('deps_summary',{}) + + for x in deps: + d=deps[x] + dd=d.get('dict',{}) + + ds[x]={} + + cx=dd.get('customize',{}) + + ds[x]['tags']=d.get('tags',[]) + ds[x]['name']=d.get('name','') + + ds[x]['package_tags']=','.join(dd.get('tags',[])) + ds[x]['data_name']=dd.get('data_name','') + + puoa=dd.get('package_uoa','') + if puoa=='': + puoa=d.get('cus',{}).get('used_package_uid','') + ds[x]['package_uoa']=puoa + + ds[x]['version']=cx.get('version','') + ds[x]['git_revision']=cx.get('git_info',{}).get('revision','') + ds[x]['git_iso_datetime_cut_revision']=cx.get('git_info',{}).get('iso_datetime_cut_revision','') + + sdeps=dd.get('deps',{}) + if len(sdeps)>0: + # Recursion + r=deps_summary({'deps':sdeps}) + if r['return']>0: return r + ds[x]['deps']=r['deps_summary'] + + return {'return':0, 'deps_summary':ds} + +############################################################################## +# publish result + +def publish_result(i): + + """ + Input: { + uid [str] - portal identifier of the solution + } + + Output: { + return [int] - return code = 0 if success or >0 if error + (error) [str] - error string if return>0 + } + """ + + # Get main configuration + r=config.load({}) + if r['return']>0: return r + cfg=r.get('dict',{}) + pcfg=r.get('path','') + + xcmd=i.get('cmd','') + if xcmd==None: xcmd='' + xcmd=xcmd.strip() + + cur_dir=os.getcwd() + + # Check if Windows or Linux + # Get platform info + r=get_platform_desc(i) # Pass input from init + if r['return']>0: return r + + hos=r['host_os_uid'] + hosx=r['host_os_uoa'] + hosd=r['host_os_dict'] + hosd_extra=r['host_desc'] + + tos=r['os_uid'] + tosx=r['os_uoa'] + tosd=r['os_dict'] + + # Load entry with the solution + uid=i['uid'] + + r=ck.access({'action':'load', + 'module_uoa':config.CR_MODULE_UOA, + 'data_uoa':uid}) + if r['return']>0: return r + + solution_uoa=r['data_uoa'] + solution_uid=r['data_uid'] + + p=r['path'] + dd=r['dict'] + + # TBD: need to be checked from outside ... + host_os=dd.get('host_os','') + tos=dd.get('target_os','') + tdid=dd.get('device_id','') + + workflow=dd.get('workflow','') + workflow_cmd=dd.get('workflow_cmd','') + workflow_cmd_extra=dd.get('workflow_cmd_extra','') + + workflow_input_dir=dd.get('workflow_input_dir','') + workflow_output_dir=dd.get('workflow_output_dir','') + + result_file=dd.get('result_file','') + + graphs=dd.get('graphs',[]) + + ############################################################## + ck.out(config.CR_LINE) + ck.out('Find path to output file '+result_file+' ...') + ck.out('') + + encoding=locale.getdefaultlocale()[1] + + cmd0=hosd['change_dir']+' '+hosd['env_quotes_if_space']+p+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_set']+' CK_REPOS='+hosd['env_quotes_if_space']+os.path.join(p, 'CK')+hosd['env_quotes_if_space']+'\n' + cmd0+=hosd['env_call']+' '+hosd['env_quotes_if_space']+os.path.join(p, 'venv', hosd_extra['venv_bin'], hosd_extra['venv_activate'])+hosd['env_quotes_if_space']+'\n' + + cmd=cmd0 + cmd+='ck find '+workflow+'\n' + + ii={'action':'shell', + 'module_uoa':'os', + 'cmd':cmd, + 'encoding':encoding} + r=ck.access(ii) + if r['return']>0: + status=-1 + return r + + path_result=r['stdout'].strip() + path_result_file=os.path.join(path_result, result_file) + + ck.out(' Found path: '+path_result_file) + + # ############################################################## + # ck.out(config.CR_LINE) + # ck.out('Detecting complete platform info ...') + + # pinfo=os.path.join(p, 'platform-info.json') + # if os.path.isfile(pinfo): os.remove(pinfo) + + # cmd=cmd0 + + # # Need to do it from virtual env since it's the correct way for Android devices which may require specific files (adb) + # s='ck detect platform' + # if i.get('target_os','')!='': s+=' --target_os='+i['target_os'] + # if tdid!='': s+=' --device_id='+tdid + # s+=' --out=json_file --out_file='+pinfo + + # cmd+=s+'\n' + + # ii['cmd']=cmd + # r=ck.access(ii) + # if r['return']>0: return r + + # if not os.path.isfile(pinfo): + # return {'return':1, 'error':'platform info file was not created'} + + # # Get some sub-info about deps and platforms + # dinfo={} + # if os.path.isfile(pinfo): + # r=ck.load_json_file({'json_file':pinfo}) + # if r['return']==0: + # dinfo=r['dict'].get('features',{}) + # for k in ['cpu_misc', 'cpu']: + # if k in dinfo: del(dinfo[k]) + + # pdeps=os.path.join(p, 'resolved-deps.json') + # ddeps={} + # if os.path.isfile(pdeps): + # r=ck.load_json_file({'json_file':pdeps}) + # if r['return']==0: + # ddeps2=r['dict'] + + # r=deps_summary({'deps':ddeps2}) + # if r['return']==0: + # ddeps=r['deps_summary'] + + ############################################################## + # status management + + path_tmpSol=os.path.join(p, "tmp") + tmp_solStatus=os.path.join(path_tmpSol, "status.json") + + status = 0 + if not os.path.isdir(path_tmpSol): + os.mkdir(path_tmpSol) + + rdf_st={} + rx=ck.load_json_file({'json_file':tmp_solStatus}) + if rx['return']>0: + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':{'status': 0}}) + return rx + + rdf_st=rx['dict'] + status = rdf_st.get('status','') + + if status == 2: + ############################################################## + ck.out(config.CR_LINE) + ck.out('Reading output: '+path_result_file) + ck.out('') + + if not os.path.isfile(path_result_file): + ck.out(' Error: output file not found!') + rdf_st['status'] = -2 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + else: + rx=ck.load_json_file({'json_file':path_result_file}) + if rx['return']>0: return rx + + crdf=rx['dict'] + ################################################################ + if len(graphs)>0: + ck.out(config.CR_LINE) + ck.out('Pushing results to graphs...') + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':crdf}) + if rx['return']>0: return rx + + if solution_uoa not in graphs: + graphs.append(solution_uoa) + + for gr in graphs: + ck.out('') + ck.out(' * Graph: '+gr) + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + rdf_st['status'] = 3 + rx=ck.save_json_to_file({'json_file':tmp_solStatus, 'dict':rdf_st}) + if rx['return']>0: return rx + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + ################################################################ + pconv=os.path.join(p, 'graph-convertor.json') + if os.path.isfile(pconv): + rx=ck.load_json_file({'json_file':pconv}) + if rx['return']==0: + ck.out(config.CR_LINE) + ck.out('Converting data for extra graphs ...') + + dconv=rx['dict'] + + for eg in dconv: + gr=eg['graph_id'] + + ck.out('') + ck.out(' * Graph: '+gr) + + keys=eg['keys'] + + cdata={} + + for k in keys: + ok=k['out_key'] + + kk=[k.get('key1',''), k.get('key2',''), k.get('key3',''), k.get('key4','')] + + vv='' + v=k.get('value','') + if v!='' and v!=None: + vv=v + + first=True + for kx in kk: + if kx!='': + if kx.startswith('##'): + ry=ck.get_by_flat_key({'dict':crdf, 'key':kx}) + if ry['return']>0: return ry + v=ry['value'] + else: + v=crdf.get(kx) + + vm=k.get('multiply',0) + if vm!=0 and vm!='' and vm!=None and (type(v)==float or type(v)==int): + v=v*vm + + if v!='' and v!=None: + if first: + first=False + if type(v)==float or type(v)==int: + vv=0 + else: + vv+=', ' + vv+=v + + if vv!='': + cdata[ok]=vv + + rx=ck.gen_tmp_file({'prefix':'tmp-result-', 'suffix':'.json'}) + if rx['return']>0: return rx + fn=rx['file_name'] + + rx=ck.save_json_to_file({'json_file':fn, 'dict':cdata}) + if rx['return']>0: return rx + + ck.out('') + rx=graph.push({'uid':gr, 'version':'1.0.0', 'filename':fn}) + if rx['return']>0: return rx + + # Clean temp data file + if os.path.isfile(fn): + os.remove(fn) + + return r + return {'return':0} diff --git a/incubator/cbench/cbench/static/favicon.ico b/incubator/cbench/cbench/static/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..de17981bf5e10d8021e28a5862245af66b53c962 GIT binary patch literal 13094 zcmeHM36vDo)qb_~bXWIOH{DcEFVi#K(_K?lDiUK{qQ*!PC5qxMi3ZfD7#EDFpfVD- zDDFG%ii!&eQPH@7yCDijVxqX97(`*%*I_mug8A-yRWkzu`TykP{O3Q1ch2qZs<+?m zyZ1I@3e#DyUJTz6c1DP?{*19wNj^`EF!nUw?Y5i$9A|70s9!(%?A0)~slb?rIaq>W znpg*4^nY7!FB84Fj=e(k);jho z=(Rt!ve%}!ve!Xx{JE9A`F1ON>z!8iC!!f`Y&y}*Huh(tciY(8M1N^x@4VN>W`JhS zYG*V5+Rol3dcU3hh3IeX>^;z|zqhl$en8a0-v6+J{f%gL2m3qGM;+_~qK`Y+heV%r zu-QbPcCwE^AI|}O*2z90`n;2U3i|Ae^=$4xK=anKFTPyQ{xN?&oA=dv_T|@{&0oOT zSD>#Ka<-tEvxSQ|YhKLRq9q&H;-wqdl4Tp%vK1TH@)h5(6)QojzF{kgR)51*fmW{p zt^J0r0j+J>$XeEIWb0ZtveveZtgU?`YbWa1$U4_=V(W?cCicz7Z`nrBrcK|nZ@>MP zG+;;D!GX;<(A3m4>V)%$S=DjmkvE*@Sw@8Bc6f^ukPuRcw`9N9GdXHDPlzk951zYF{4#BD}3VMG@%V&@QzTEu=!)U=45OLWO1HeAqR zcHZd4?EK$>E?vwn0FAh8F}v_`&=pJAMOQ9i7Zd${2^&Rp)e_c3boCN;3DKA(Z1k9= zZ1got+3$$1UCJ&65k6dY{Ze*0=!zR8R$K|Zxbj9|1`%-MD#DJNC4P(nhK#v&8M}t) zHlpPMQ?9*T;>z{Fmg~nZXE%Uuyb}bR39#lC;LR=ftYEhi-P?^lcig{{-SNOmHkJrD zL|Am^xRvZKqK8(pyNMoN$?gH&`$#uV-Tx@Cipa;S2Z2|FS>v8q#U2v0nmsgO^=240 ze$s08DA8oXvu;d#;;A)k!qaQm#AkqSM8LR7M9)gBoBZ5b_6H)uya4xJSj(Ov0`?L9 zO?jz>JxesTg*`_EJp3c*`Imuj2m?d#B zz{8$cC~)v!VqiB8e$)d82?OWMB^;Dk_&H%=fQJDl_Qb^i8v}d2isc%LyyDf|siZFF}NtYg&3>W>4JQ0z2C~Hn9$(&P}Y7s0WsE;3?m* ziESYI2G~mEZ}uj5c=xK$IDc>dd&loK;;~!CzO*2C zAAG;D>#+6f2A|CN3x(L&*C&p->#Ly0|DL|v^`jCG;^EfFxvl)mV}J6_<^x&|QbN_w z;oa)f!$UvdpB;Z`&_DgkN$-C#@8NXg9Y>!)hnstlYL9sO&FQn3$`OC`%)JiU!}MO` zL)1e*#JgF$glej@dkh{r4-e}mkKXtCk3T#i{~VvVZ|Z>KUc}@GQ=+{y{V6GwjTj3-Ril&WT2L0z;!EX*d{pg?;{y}95 zzuIoNhx4{y4C&-g9&JCuSKjo(!C%ij{DU*EBfsWf4gtLzwxzW4;jN> zeR$0gm8*GJZ0~ubwb%>F?(7u?H>TtABbGUvtKdEvG!m zCmz%O(Vzu$kA%wcwyW>v{QlfTzWTs}SMyctUme)YM-J$?cQ{|wAKc@QA25&4IeTK$ z@|FSFzi{3$w28l&JE3LpBfN9)z_pyeaxZVY=uv*#3w)3^p7VF=9^+@TF?08s$@yp( zGVWN%pTUwdMt6=$PilYbP~*ilKT1xY@_p#e6Tk1o27KuN9CXRx*^6erdQ2u1d4yj& zy5pKX7vr5&z)$y_!ui@mAK;(<^e5&qHw;%E^S<0?0v~fL z*1RT8sva~5^5f$EUmP}dUjNzr^8s&jKB4OJpi_Ro65V;y4?g+xk6-}5Ii;P?xt(;o zNBeBp(N~n_K3)GF=eIR}+M^@*+5XkDXU%&eav$gKj+5sWJu)!(zx$*(1t}vt8PpZ0 z7+a51C^n%OgyIZ}Cr0`)1jPvy6AitY0nubosC4p9|}gj6MFd)P!v%k^+)Rbh&%X;qP^ z;I&xCNh*eArDCdL;G8)Xt0v@fm9P>?sMxhoG>O;L^1Terr2FKRFc?=dqH0yjFe{?d zVhP7oLRi~L5qCYWQeABciRAUFI?pRt*Sd*>XB&p#WkHELB`r$FbLr-oR}uTT8BH&E zUSl)WN#e9?)_Hc762>V?no{FArk*PL zr!QE!d^(CNsTr(LtHfy2TrbasI#fGe9q#IQPA04wiF7_-QH`PrrEyIrj#@D)9;B<} zyqqs?JD;kSV`w=&Q8Y+KPDF{&zIGKh%j(SZu!|Cr%ep0#P6&NcL$((anu7Ide1lev zW7Q?{c36!jA_mxn8UQLh+=;4PEJTf|KHL#;}X&XlTXH1d!=&ehbSTwJwwHPA=VbJ?Dm`_s2f$y=qp^1BAYdWNxh=#*Bf@c*9TAk;{w4!797X=$iFf&u= zQzfKC=(Y`}sl!B^gzbbAc!XN&DZIx;v2O3M?bU?oHkMIqii4HXYCRYZ*;>$Zw3132 zh~CplY;RXm$)uOv@eQn8O$RHL?+p`lq)j4YUChox6?a0^<}i3pV` z3Wm9b3eH3)qix+74(CRNm&B_YatdIMI7vzKIS#D=%WVVWOFHRpLLM0IqXtiIB3Xx{ zL6eo2Ms1>qdL zELf0_!5*^YFtDS^uujUioeW7%G$$s_uUG|{VM+vgrZC*Q&V)2grQ2J{@nxxXU=D3r ztLdt$XG~}Voz;!X_2p3cSrZd`l~f$aF=Ay)iNmRhYeHH=H+v5E+ zAgRIE09=9ux|Nnupnb$Dp*5Z*83?BOc(dgcIJ3M^?3B>rvL-}br<&3kZ8;n_Q!;fB zyIU>9;TVE#PHh(>H=p6AtCXz8(zvqF7&uRo{dQU<+h8h|*BB<8hrR2a4%2V-2=9d3ko{D<NWw%iN1>8Pb49R&@TgFmNtrQ=Vy5jjB&~E{%8N94 zlS%~1qrg?SKC1WrS1Y(_6%nF~oED+9X!V++$Uw%c7)p2>K}FZ85YmN{Cg#XE2k+C! zOjB~PQb@)(i+zh` zbj!F7JCwRb(-6X7+(1FH7_=h2Q>$w!i4cKl&>_Fwlom;3>MM<|+gKk}R27kZMOS25 zL=4Gml%1=QM7B_BC|B&wSx}6KUOj9WVK7eVh3=BBDq)17e#^iV(1SHJP18dLh3?(0 zkxCGuur78{M2QFCpQ`9GdB8w7f$1Sw1ilO_q01E3U%@HG6+La+m2xdgL`bJ;d$F5V z6Py&ek(w>g0&#Gm6baG{q&?L*z8|E*<^|iC{un@ z2{A)tWL~YRU}|Pdf8dr&rHWX}wKOFxjMtKJXq=);=$;`?P_HV;_nm6ymrUd_ytJlj zdMuSIWUEos5vr9}ydM-B4vV1ivS%E6BtttF;mS)5gdez(Kj4Td7aOrh>2@qB4q(%%KtWL&j3*wIV)m zz>82Ly~YaN$(n6xBI(ABurzLN%Ec+W^IeZGEIVl=U3eiiY-m&{30i$pXpI9AAz5-F z{;WbcBz+XNL?wZms|u3fiZ1Vu8%y&P9f1==MMnsy9A@FH zWP*Q;jGCyAgHbr3Oqu=ns1Kq16gbhGtSEt}xJSI(v%t_`?RKaJ@=>*(t}(BLG9^=H zP)NO4wZASLJe+WF1Xtvr;2mn1fXleFBMdF-r1v?JAX-WDb&8CGhHZvw z3sJ>47I=E$A##3F3ndb{pI8ev9uLdnR*>SA!bJnD0%%Fc;>I_deqFoUWZP+p{wXaqrO;I`cu6_H#^hd{zU%Gi-ym_!95rEzvx zR2N{qGRY7K7ZRmiJWutsO2HBq|$@9`KoFQtoy{f+B-+!>=8{=83%xx|)o4sR9lot)QeEhJVj|wVM@J0r=%0Ek9_Hh5?JN zM&%IRhZ;EqUlzU1I1}vh5^1wz%H2|BS}G#VD*mo1aB zxa1XL1Yb!-I00D;l!Z3ep!#0WQod^TR;1sjzXPNPp;MG~!6LXtimOs`F<-C53K>yh zBQ`6l+?Q!hk8 z_7RvvV}walDN&@HlB|M93y?o8C5$7HFZ2&XU?fT>I6_M5g)H=ybTZovv|Sniv`xqo z=ubWpOpp#0^*&!ky>f3MtF&O5Ym#7c*k9@;{k=mg5%|Eq%DP`|gsi)&Szt8=ZD}V{ zxSk#AnlHw5eZXx6(P|(e2#lJU4*4z|gTZy5tz9$a52Ov!rI3UL9 z8kx-24-l+FL!dBelC~RD#j0i9w`*ElhA_eK?|g$raDI>np@8s+7!nV-3o!~;^POp3 zFDS|~Tp0_X94`->_x%aLdK)#;uKAR_gq{5pGo6{_WvJq41H$xUjCY zSOwF-lrDE6*4fU7oiXBoE!sa7gg>wjRYZQ `ck locate env --tags=val`/val_map.txt +# Install misc Python dependencies required for calibration. +python -m pip install nibabel pillow progress py-cpuinfo pyyaml shapely sklearn tqdm xmltodict yamlloader +# Install "headless" OpenCV (which doesn't need libsm6, libxext6, libxrender-dev). +ck install package --tags=lib,python-package,cv2,opencv-python-headless +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 7. Install the official SSD-MobileNet model for MLPerf Inference v0.5 +# and convert it into the OpenVINO format. +#-----------------------------------------------------------------------------# +ck install package --tags=model,tf,ssd-mobilenet,quantized,for.openvino +ck install package --tags=model,openvino,ssd-mobilenet --safe +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 8. Install the COCO 2017 validation dataset (5,000 images). +#-----------------------------------------------------------------------------# +# Download the dataset to the default path. Remove all training annotations (~765 MB). +echo | ck install package --tags=object-detection,dataset,coco.2017,val,original,full && ck virtual env --tags=object-detection,dataset,coco.2017,val,original,full --shell_cmd='rm $CK_ENV_DATASET_COCO_LABELS_DIR/*train2017.json' +# Install Python COCO API. +#ck install package --tags=lib,python-package,cython # already installed +#ck install package --tags=lib,python-package,numpy # already installed +ck install package --tags=lib,python-package,matplotlib +ck install package --tags=tool,coco,api +#-----------------------------------------------------------------------------# diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/prereq.txt b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/prereq.txt new file mode 100644 index 0000000000..669fbd5b21 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-detection-openvino-ssd-mobilenet-coco-500-linux/prereq.txt @@ -0,0 +1,10 @@ +# Tested on Ubuntu 18.04 +# (including Docker and Windows 10 Sybsystem for Linux) + +# These dependencies are needed to rebuild COCO API: + + sudo apt update + + sudo apt install git wget libz-dev zip curl + sudo apt install python3 python3-pip + sudo apt install gcc g++ autoconf autogen libtool cmake diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_activate.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_activate.sh new file mode 100644 index 0000000000..68c0d3719f --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_activate.sh @@ -0,0 +1,6 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb activate mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init.sh new file mode 100644 index 0000000000..074e0eb6d3 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init.sh @@ -0,0 +1,23 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb init mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux \ + --name="Image classification; MLPerf inference v0.5; OpenVINO; MobileNet v1 1.0 224; ImageNet; 500 images validation; Linux; benchmark; portable workflows" \ + --tags="validated,image-classification,mlperf,mlperf-inference,mlperf-inference-v0.5,openvino,mobilenet,mobilenet-v1,imagenet,500,benchmark,linux,portable-workflows,crowd-benchmarking" \ + --workflow_repo_url="local" \ + --workflow="program:mlperf-inference-v0.5" \ + --workflow_cmd_before="export NPROCS=\`grep -c processor /proc/cpuinfo\`" \ + --workflow_cmd="image-classification" \ + --workflow_cmd_extra="--repetitions=1 --no_state_check --skip_print_timers --env.CK_OPENVINO_MODEL_NAME=mobilenet --env.CK_LOADGEN_SCENARIO=Offline --env.CK_LOADGEN_MODE=Accuracy --env.CK_LOADGEN_DATASET_SIZE=500 --env.CK_OPENVINO_NTHREADS=\$NPROCS --env.CK_OPENVINO_NSTREAMS=\$NPROCS --env.CK_OPENVINO_NIREQ=\$NPROCS" \ + --workflow_output_dir="tmp" \ + --add_extra_meta_from_file="$PWD/extra-meta.json" \ + --desc_prereq="$PWD/prereq.txt" \ + --desc_prepare="$PWD/prepare.txt" \ + --result_file="tmp/tmp-ck-timer.json" \ + --python_version_from="3.6" \ + --python_version_to="3.7.99" \ + --graph_convertor="$PWD/graph-convertor.json" +# --update_meta_and_stop + diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init_graph.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init_graph.sh new file mode 100644 index 0000000000..0fa8a0dc31 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_init_graph.sh @@ -0,0 +1,5 @@ +#! /bin/bash + +cbench init-graph mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux --version=1.0.0 --desc_file="$PWD/graph-desc.json" \ + --name="MLPerf Inference v0.5 - Image Classification - crowd-benchmarking" \ + --tags="benchmarking,reproducible-benchmarking,crowd-benchmarking,reproduced-results,mlperf,mlperf-inference,mlperf-inference-v0.5" diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_publish.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_publish.sh new file mode 100644 index 0000000000..f7bef583cb --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_publish.sh @@ -0,0 +1,7 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb setup +cb publish solution:mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux --force --version=1.0.1 diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run.sh new file mode 100644 index 0000000000..6382bdf8e4 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run.sh @@ -0,0 +1,6 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb run mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run_benchmark.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run_benchmark.sh new file mode 100644 index 0000000000..479529db90 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/_run_benchmark.sh @@ -0,0 +1,8 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +ck find module:program + +cb benchmark mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/extra-meta.json b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/extra-meta.json new file mode 100644 index 0000000000..2d91328a72 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/extra-meta.json @@ -0,0 +1,10 @@ +{ + "related_components": { + "paper":{ + "title":"MLPerf Inference Benchmark", + "uid":"d0e50ebb5b9d4ec9" + }, + "result_uoa":"mlperf-inference-v0.5-classification" + }, + "crowd_benchmarking":"yes" +} diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-convertor.json b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-convertor.json new file mode 100644 index 0000000000..3a5973abd5 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-convertor.json @@ -0,0 +1,58 @@ +[ + { + "graph_id": "mlperf-inference-v0.5-classification", + "keys": [ + {"key1":"solution_uoa"}, + {"key1":"solution_uid"}, + {"key1":"solution_run_date"}, + {"key1":"solution_duration"}, + {"key1":"program_workflow_uoa"}, + {"key1":"client_version"}, + + {"key1":"good"}, + {"key1":"total"}, + { + "key1": "accuracy" + }, + + {"key1":"##resolved_deps#weights#data_name", "out_key":"benchmark"}, + {"key1":"##resolved_deps#weights#package_uoa","out_key":"benchmark_uoa"}, + {"key1":"##resolved_deps#dataset#name", "out_key":"dataset"}, + + {"key1":"##resolved_deps", "out_key":"resolved_deps"}, + {"key1":"##platform_info", "out_key":"platform_info"}, + { + "key1": "##resolved_deps#openvino#data_name", + "key2": "##resolved_deps#openvino#version", + "out_key": "framework" + }, + { + "key1": "##resolved_deps#openvino#data_name", + "key2": "##resolved_deps#openvino#version", + "out_key": "framework" + }, + { + "out_key": "notes", + "value": "Crowd-benchmarking using portable CK solutions" + }, + { + "key1": "##platform_info#cpu_unique@0#model name", + "out_key": "processor" + }, + { + "out_key": "submitter", + "value": "Crowd-user" + }, + { + "key1": "##platform_info#platform#vendor", + "key2": "##platform_info#platform#model", + "key3": "##platform_info#platform#name", + "out_key": "system" + }, + { + "out_key": "crowd_benchmarking", + "value": "yes" + } + ] + } +] diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-desc.json b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-desc.json new file mode 100644 index 0000000000..0724fe89aa --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/graph-desc.json @@ -0,0 +1,87 @@ +{ + "default_key_x": "solution_duration", + "default_key_y": "accuracy", + "default_sort_key": "solution_duration", + "table_view": [ + { + "json_and_pre": "yes", + "key": "platform_info", + "name": "Platform info", + "skip_pre": "yes" + }, + { + "json_and_pre": "yes", + "key": "resolved_deps", + "name": "Resolved deps", + "skip_pre": "yes" + }, + { + "format": "%.2f", + "key": "solution_duration", + "name": "Total time (sec)", + "type": "float" + }, + { + "format": "%.2f", + "key": "accuracy", + "name": "Accuracy", + "type": "float" + }, + { + "key": "system", + "name": "System" + }, + { + "key": "processor", + "name": "Processor" + }, + { + "key": "program_workflow_uoa", + "name": "Program pipeline (workflow)", + "module_uoa":"program", + "type": "url3" + }, + { + "key": "framework", + "name": "Framework" + }, + { + "key": "benchmark", + "name": "Model" + }, + { + "key": "benchmark_uoa", + "name": "Model package", + "module_uoa":"package", + "type": "url3" + }, + { + "key": "dataset", + "name": "Dataset" + }, + { + "key": "total", + "name": "Total objects", + "type": "int" + }, + { + "key": "good", + "name": "Predicted objects", + "type": "int" + }, + { + "key": "solution_uoa", + "name": "Portable benchmarking solution", + "module_uoa":"solution", + "type":"url3" + }, + { + "key": "solution_run_date", + "name": "Run date" + } , + { + "key": "notes", + "name": "Notes" + } + ] +} diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prepare.txt b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prepare.txt new file mode 100644 index 0000000000..9eed83e067 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prepare.txt @@ -0,0 +1,81 @@ +# Get stable components for the CK solution +cbench download package:lib-boost-1.67.0-without-python --force +cbench download package:mlperf-inference-source-ck --force +cbench download package:lib-openvino-ck --force + + +# Pull CK repositories (including ck-mlperf, ck-env, ck-autotuning, ck-tensorflow, ck-docker). +ck pull repo:ck-openvino + +# Use generic Linux settings with dummy frequency setting scripts. +ck detect platform.os --platform_init_uoa=generic-linux-dummy + +# Detect C/C++ compiler (gcc). +ck detect soft:compiler.gcc + +# Detect CMake build tool. +#ck detect soft --tags=cmake --full_path=`which cmake` + + +# Install the latest Python package installer (pip) and some dependencies. +python -m pip install --ignore-installed pip setuptools + + + +#-----------------------------------------------------------------------------# +# Step 1. Install Python dependencies (for Model Optimizer and LoadGen). +#-----------------------------------------------------------------------------# +# OpenVINO pre-release strictly requires TensorFlow < 2.0 and NetworkX < 2.4. +ck install package --tags=lib,python-package,tensorflow --force_version=1.15.2 +ck install package --tags=lib,python-package,networkx --force_version=2.3.0 +ck install package --tags=lib,python-package,defusedxml +# Cython is an implicit dependency of NumPy. +ck install package --tags=lib,python-package,cython +ck install package --tags=lib,python-package,numpy +# test-generator is an implicit dependency of Model Optimizer (not in requirements.txt). +ck install package --tags=lib,python-package,test-generator +# Abseil is a LoadGen dependency. +ck install package --tags=lib,python-package,absl + + +#-----------------------------------------------------------------------------# +# Step 2. Install C++ dependencies (for Inference Engine and MLPerf program). +#-----------------------------------------------------------------------------# +ck install package --tags=channel-stable,opencv,v3.4.3 +ck install package:lib-boost-1.67.0-without-python +# Install LoadGen from a branch reconstructed according to Intel's README. +ck install package --tags=ck-mlperf,inference,source,dividiti.v0.5-intel +ck install package --tags=lib,loadgen,static +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 3. Install the OpenVINO "pre-release" used for MLPerf Inference v0.5. +#-----------------------------------------------------------------------------# +ck install package --tags=lib,ck-openvino,pre-release +ck compile ck-openvino:program:mlperf-inference-v0.5 +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 4. Install the first 500 images of the ImageNet 2012 validation dataset. +# TODO: Create a calibration dataset. +#-----------------------------------------------------------------------------# +ck install package --tags=dataset,imagenet,val,min --no_tags=resized +ck install package --tags=dataset,imagenet,aux +# The OpenVINO program expects to find val_map.txt in the dataset directory. +head -n 500 `ck locate env --tags=aux`/val.txt > `ck locate env --tags=val`/val_map.txt +# Install misc Python dependencies required for calibration. +python -m pip install nibabel pillow progress py-cpuinfo pyyaml shapely sklearn tqdm xmltodict yamlloader +# Install "headless" OpenCV (which doesn't need libsm6, libxext6, libxrender-dev). +ck install package --tags=lib,python-package,cv2,opencv-python-headless +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 6. Install the official MobileNet model for MLPerf Inference v0.5 +# and convert it into the OpenVINO format. +#-----------------------------------------------------------------------------# +ck install package --tags=image-classification,model,tf,mlperf,mobilenet-v1-1.0-224,non-quantized +ck install package --tags=model,openvino,mobilenet --safe +#-----------------------------------------------------------------------------# diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prereq.txt b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prereq.txt new file mode 100644 index 0000000000..efd070ac5e --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-mobilenet-imagenet-val-500-linux/prereq.txt @@ -0,0 +1,8 @@ +# Tested on Ubuntu 18.04 +# (including Docker and Windows 10 Sybsystem for Linux) + + sudo apt update + + sudo apt install git wget libz-dev zip curl + sudo apt install python3 python3-pip + sudo apt install gcc g++ autoconf autogen libtool cmake diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_activate.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_activate.sh new file mode 100644 index 0000000000..1e4b776f5e --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_activate.sh @@ -0,0 +1,6 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb activate mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux \ No newline at end of file diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init.sh new file mode 100644 index 0000000000..8910a3554d --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init.sh @@ -0,0 +1,23 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb init mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux \ + --name="Image classification; MLPerf inference v0.5; OpenVINO; ResNet 50; ImageNet; 500 images validation; Linux; benchmark; portable workflows" \ + --tags="validated,image-classification,mlperf,mlperf-inference,mlperf-inference-v0.5,openvino,resnet,resnet50,imagenet,500,benchmark,linux,portable-workflows,crowd-benchmarking" \ + --workflow_repo_url="local" \ + --workflow="program:mlperf-inference-v0.5" \ + --workflow_cmd_before="export NPROCS=\`grep -c processor /proc/cpuinfo\`" \ + --workflow_cmd="image-classification" \ + --workflow_cmd_extra="--repetitions=1 --no_state_check --skip_print_timers --env.CK_OPENVINO_MODEL_NAME=resnet50 --env.CK_LOADGEN_SCENARIO=Offline --env.CK_LOADGEN_MODE=Accuracy --env.CK_LOADGEN_DATASET_SIZE=500 --env.CK_OPENVINO_NTHREADS=\$NPROCS --env.CK_OPENVINO_NSTREAMS=\$NPROCS --env.CK_OPENVINO_NIREQ=\$NPROCS" \ + --workflow_output_dir="tmp" \ + --add_extra_meta_from_file="$PWD/extra-meta.json" \ + --desc_prereq="$PWD/prereq.txt" \ + --desc_prepare="$PWD/prepare.txt" \ + --result_file="tmp/tmp-ck-timer.json" \ + --python_version_from="3.6" \ + --python_version_to="3.7.99" \ + --graph_convertor="$PWD/graph-convertor.json" +# --update_meta_and_stop + diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init_graph.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init_graph.sh new file mode 100644 index 0000000000..bdcf9e598c --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_init_graph.sh @@ -0,0 +1,5 @@ +#! /bin/bash + +cbench init-graph mlperf-inference-v0.5-classification --version=1.0.0 --desc_file="$PWD/graph-desc.json" \ + --name="MLPerf Inference v0.5 - Image Classification - crowd-benchmarking" \ + --tags="benchmarking,reproducible-benchmarking,crowd-benchmarking,reproduced-results,mlperf,mlperf-inference,mlperf-inference-v0.5" diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_publish.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_publish.sh new file mode 100644 index 0000000000..4afee4e002 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_publish.sh @@ -0,0 +1,8 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb setup +cb publish solution:mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux --force --version=1.0.1 + diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run.sh new file mode 100644 index 0000000000..7de38e2228 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run.sh @@ -0,0 +1,6 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +cb run mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run_benchmark.sh b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run_benchmark.sh new file mode 100644 index 0000000000..fa0f0e1a69 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/_run_benchmark.sh @@ -0,0 +1,8 @@ +#! /bin/bash + +export CK_REPOS=$PWD/CK +export CK_TOOLS=$PWD/CK-TOOLS + +ck find module:program + +cb benchmark mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/extra-meta.json b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/extra-meta.json new file mode 100644 index 0000000000..2d91328a72 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/extra-meta.json @@ -0,0 +1,10 @@ +{ + "related_components": { + "paper":{ + "title":"MLPerf Inference Benchmark", + "uid":"d0e50ebb5b9d4ec9" + }, + "result_uoa":"mlperf-inference-v0.5-classification" + }, + "crowd_benchmarking":"yes" +} diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-convertor.json b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-convertor.json new file mode 100644 index 0000000000..3a5973abd5 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-convertor.json @@ -0,0 +1,58 @@ +[ + { + "graph_id": "mlperf-inference-v0.5-classification", + "keys": [ + {"key1":"solution_uoa"}, + {"key1":"solution_uid"}, + {"key1":"solution_run_date"}, + {"key1":"solution_duration"}, + {"key1":"program_workflow_uoa"}, + {"key1":"client_version"}, + + {"key1":"good"}, + {"key1":"total"}, + { + "key1": "accuracy" + }, + + {"key1":"##resolved_deps#weights#data_name", "out_key":"benchmark"}, + {"key1":"##resolved_deps#weights#package_uoa","out_key":"benchmark_uoa"}, + {"key1":"##resolved_deps#dataset#name", "out_key":"dataset"}, + + {"key1":"##resolved_deps", "out_key":"resolved_deps"}, + {"key1":"##platform_info", "out_key":"platform_info"}, + { + "key1": "##resolved_deps#openvino#data_name", + "key2": "##resolved_deps#openvino#version", + "out_key": "framework" + }, + { + "key1": "##resolved_deps#openvino#data_name", + "key2": "##resolved_deps#openvino#version", + "out_key": "framework" + }, + { + "out_key": "notes", + "value": "Crowd-benchmarking using portable CK solutions" + }, + { + "key1": "##platform_info#cpu_unique@0#model name", + "out_key": "processor" + }, + { + "out_key": "submitter", + "value": "Crowd-user" + }, + { + "key1": "##platform_info#platform#vendor", + "key2": "##platform_info#platform#model", + "key3": "##platform_info#platform#name", + "out_key": "system" + }, + { + "out_key": "crowd_benchmarking", + "value": "yes" + } + ] + } +] diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-desc.json b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-desc.json new file mode 100644 index 0000000000..0724fe89aa --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/graph-desc.json @@ -0,0 +1,87 @@ +{ + "default_key_x": "solution_duration", + "default_key_y": "accuracy", + "default_sort_key": "solution_duration", + "table_view": [ + { + "json_and_pre": "yes", + "key": "platform_info", + "name": "Platform info", + "skip_pre": "yes" + }, + { + "json_and_pre": "yes", + "key": "resolved_deps", + "name": "Resolved deps", + "skip_pre": "yes" + }, + { + "format": "%.2f", + "key": "solution_duration", + "name": "Total time (sec)", + "type": "float" + }, + { + "format": "%.2f", + "key": "accuracy", + "name": "Accuracy", + "type": "float" + }, + { + "key": "system", + "name": "System" + }, + { + "key": "processor", + "name": "Processor" + }, + { + "key": "program_workflow_uoa", + "name": "Program pipeline (workflow)", + "module_uoa":"program", + "type": "url3" + }, + { + "key": "framework", + "name": "Framework" + }, + { + "key": "benchmark", + "name": "Model" + }, + { + "key": "benchmark_uoa", + "name": "Model package", + "module_uoa":"package", + "type": "url3" + }, + { + "key": "dataset", + "name": "Dataset" + }, + { + "key": "total", + "name": "Total objects", + "type": "int" + }, + { + "key": "good", + "name": "Predicted objects", + "type": "int" + }, + { + "key": "solution_uoa", + "name": "Portable benchmarking solution", + "module_uoa":"solution", + "type":"url3" + }, + { + "key": "solution_run_date", + "name": "Run date" + } , + { + "key": "notes", + "name": "Notes" + } + ] +} diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prepare.txt b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prepare.txt new file mode 100644 index 0000000000..f4e31969fc --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prepare.txt @@ -0,0 +1,84 @@ +# Get stable components for the CK solution +cbench download package:lib-boost-1.67.0-without-python --force +cbench download package:mlperf-inference-source-ck --force +cbench download package:lib-openvino-ck --force + + +# Pull CK repositories (including ck-mlperf, ck-env, ck-autotuning, ck-tensorflow, ck-docker). +ck pull repo:ck-openvino + +# Use generic Linux settings with dummy frequency setting scripts. +ck detect platform.os --platform_init_uoa=generic-linux-dummy + +# Detect C/C++ compiler (gcc). +ck detect soft:compiler.gcc + +# Detect CMake build tool. +#ck detect soft --tags=cmake --full_path=`which cmake` + + +# Install the latest Python package installer (pip) and some dependencies. +python -m pip install --ignore-installed pip setuptools + + + +#-----------------------------------------------------------------------------# +# Step 1. Install Python dependencies (for Model Optimizer and LoadGen). +#-----------------------------------------------------------------------------# +# OpenVINO pre-release strictly requires TensorFlow < 2.0 and NetworkX < 2.4. +ck install package --tags=lib,python-package,tensorflow --force_version=1.15.2 +ck install package --tags=lib,python-package,networkx --force_version=2.3.0 +ck install package --tags=lib,python-package,defusedxml +# Cython is an implicit dependency of NumPy. +ck install package --tags=lib,python-package,cython +ck install package --tags=lib,python-package,numpy +# test-generator is an implicit dependency of Model Optimizer (not in requirements.txt). +ck install package --tags=lib,python-package,test-generator +# Abseil is a LoadGen dependency. +ck install package --tags=lib,python-package,absl + + +#-----------------------------------------------------------------------------# +# Step 2. Install C++ dependencies (for Inference Engine and MLPerf program). +#-----------------------------------------------------------------------------# +ck install package --tags=channel-stable,opencv,v3.4.3 +ck install package:lib-boost-1.67.0-without-python +# Install LoadGen from a branch reconstructed according to Intel's README. +ck install package --tags=ck-mlperf,inference,source,dividiti.v0.5-intel +ck install package --tags=lib,loadgen,static +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 3. Install the OpenVINO "pre-release" used for MLPerf Inference v0.5. +#-----------------------------------------------------------------------------# +ck install package --tags=lib,ck-openvino,pre-release +ck compile ck-openvino:program:mlperf-inference-v0.5 +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 4. Install the first 500 images of the ImageNet 2012 validation dataset. +# TODO: Create a calibration dataset. +#-----------------------------------------------------------------------------# +ck install package --tags=dataset,imagenet,val,min --no_tags=resized +ck install package --tags=dataset,imagenet,aux +# The OpenVINO program expects to find val_map.txt in the dataset directory. +head -n 500 `ck locate env --tags=aux`/val.txt > `ck locate env --tags=val`/val_map.txt +# Install misc Python dependencies required for calibration. +python -m pip install nibabel pillow progress py-cpuinfo pyyaml shapely sklearn tqdm xmltodict yamlloader +# Install "headless" OpenCV (which doesn't need libsm6, libxext6, libxrender-dev). +ck install package --tags=lib,python-package,cv2,opencv-python-headless +#-----------------------------------------------------------------------------# + + +#-----------------------------------------------------------------------------# +# Step 5. Install the official ResNet model for MLPerf Inference v0.5 +# and convert it into the OpenVINO format. +#-----------------------------------------------------------------------------# + +#python -m pip install Pillow -U + +ck install package --tags=image-classification,model,tf,mlperf,resnet +ck install package --tags=model,openvino,resnet50 --safe +#-----------------------------------------------------------------------------# diff --git a/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prereq.txt b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prereq.txt new file mode 100644 index 0000000000..669fbd5b21 --- /dev/null +++ b/incubator/cbench/examples/solutions/mlperf-inference-v0.5-image-classification-openvino-resnet50-imagenet-val-500-linux/prereq.txt @@ -0,0 +1,10 @@ +# Tested on Ubuntu 18.04 +# (including Docker and Windows 10 Sybsystem for Linux) + +# These dependencies are needed to rebuild COCO API: + + sudo apt update + + sudo apt install git wget libz-dev zip curl + sudo apt install python3 python3-pip + sudo apt install gcc g++ autoconf autogen libtool cmake diff --git a/incubator/cbench/setup.py b/incubator/cbench/setup.py new file mode 100644 index 0000000000..42dbabf396 --- /dev/null +++ b/incubator/cbench/setup.py @@ -0,0 +1,107 @@ +# +# Developer(s): Grigori Fursin +# Herve Guillou +# + +import os +import sys +import imp + +############################################################ +from setuptools import find_packages, setup, convert_path + +try: + from io import open +except ImportError: + pass + +############################################################ +# Version +version = imp.load_source( + 'cbench.__init__', os.path.join('cbench', '__init__.py')).__version__ + +# Default portal +portal_url='https://cKnowledge.io' + + +############################################################ +setup( + name='cbench', + + author="Grigori Fursin", + author_email="Grigori.Fursin@cTuning.org", + + version=version, + + description="A cross-platform client to perform collaborative and reproducible benchmarking, optimization and co-design of software and hardware for emerging workloads (AI, ML, quantum, IoT) via the open cKnowledge.io portal", + + license="Apache Software License (Apache 2.0)", + + long_description=open(convert_path('./README.md'), encoding="utf-8").read(), + long_description_content_type="text/markdown", + + url=portal_url, + + python_requires=">=2.7", + + packages=find_packages(exclude=["tests*", "docs*"]), + package_data={"cbench":['static/*']}, + + include_package_data=True, + + install_requires=[ + 'requests', + 'click>=7.0', + 'ck', + 'virtualenv' + ], + + entry_points={ + "console_scripts": + [ + "cr = cbench.main:cli", + "cb = cbench.main:cli", + "cbench = cbench.main:cli" + ] + }, + + zip_safe=False, + + keywords="reproducible benchmarking, customizable benchmarking, portable workflows, reusable computational components, reproducibility, collaborative experiments, automation, optimization, co-design, collective knowledge", + + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Intended Audience :: Developers", + "Intended Audience :: Education", + "Intended Audience :: End Users/Desktop", + "Intended Audience :: Information Technology", + "Intended Audience :: Science/Research", + "Environment :: Console", + "Environment :: Plugins", + "Environment :: Web Environment", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Topic :: Scientific/Engineering", + "Topic :: Software Development", + "Topic :: System", + "Topic :: System :: Benchmark", + "Topic :: Education", + "Topic :: Utilities" + ], +) + +########################################################### +# Get release notes +import cbench.comm_min +r=cbench.comm_min.send({'url':portal_url+'/api/v1/?', + 'action':'event', + 'dict':{'type':'get-cbench-release-notes','version':version}}) +notes=r.get('notes','') +if notes!='': + print ('*********************************************************************') + print ('Release notes:') + print ('') + print (notes) + print ('*********************************************************************') diff --git a/incubator/connectme/testing/connectme/tests/__init__.py b/incubator/connectme/testing/connectme/tests/__init__.py new file mode 100644 index 0000000000..f9ff72daf2 --- /dev/null +++ b/incubator/connectme/testing/connectme/tests/__init__.py @@ -0,0 +1,6 @@ +# +# Developer(s): +# * Grigori Fursin, https://fursin.net +# + +from connectme.cm import CM diff --git a/incubator/connectme/testing/connectme/tests/__main__.py b/incubator/connectme/testing/connectme/tests/__main__.py new file mode 100644 index 0000000000..cecd553be4 --- /dev/null +++ b/incubator/connectme/testing/connectme/tests/__main__.py @@ -0,0 +1,5 @@ +from connectme.cli import run + +r = run() + +exit(r['return']) diff --git a/incubator/connectme/testing/connectme/tests/cli.py b/incubator/connectme/testing/connectme/tests/cli.py new file mode 100644 index 0000000000..e4af8f32c6 --- /dev/null +++ b/incubator/connectme/testing/connectme/tests/cli.py @@ -0,0 +1,157 @@ +import os + +########################################################################### +def run(argv=None): + """ + Run CM from command line. + + Args: + argv (str | list): CM input + + Returns: + Dictionary: + return (int): return code == 0 if no error + >0 if error + + (error) (str): error string if return>0 + + data from a given action + """ + + # Aggregate CM input from argv + i = {} + + con = False + + if not argv: + import sys + argv = sys.argv[1:] + + con = True + + # Parse command line + r = parse(argv) + + args = r['args'] + options = r['options'] + extra = r['extra'] + + # Useful if input is string + argv = r['argv'] + + # Check explicit help + if (len(args)==0 and ('h' in options or 'help' in options)) or \ + (len(args)==0 and len(options)==0): + print_help() + exit(0) + + # Aggregate into 1 dict: + i.update(options) + + i['extra_cmd'] = extra + + if len(args)>0: + i['module']=args[0] + if len(args)>1: + i['action']=args[1] + if len(args)>2: + i['data']=args[2] + if len(args)>3: + i['args']=args[3:] + + # Access CM + from connectme import CM + + cm = CM(con = con) + + r = cm.init() + if r['return']>0: return r + + return cm.access(i, argv) + +########################################################################### +def print_help(): + """ + Print command line help. + """ + + print('usage: cm [module (data class)] [action] [data] [arguments] [options]') + + +########################################################################### +def parse(cmd): + """ + Parse command line. + + Args: + cmd (str | list) : arguments as a string or list + + Returns: + Dictionary:: + + args (list) : list of positional arguments + options (dict) : options + extra (str): string after -- + """ + + argv=cmd + + # If input is string, convert to argv + if type(cmd) == str: + import shlex + argv=shlex.split(cmd) + + # Positional arguments + args = [] + + # Options + options = {} + + # Extra after -- + extra = '' + + # Parse + for i in range(0, len(argv)): + a=argv[i] + + # Check if args or options + j=a.find('=') + + if a=='--': + extra = ' '.join(argv[i+1:]) + break + + elif a.startswith('@'): + file_name=a[1:] + + if os.path.isfile(file_name): + from connectme import io + + r = io.load_json_or_yaml(file_name) + if r['return']>0: return r + + options.update(r['data']) + + elif j>0 or a.startswith('-') or a.startswith('--'): + v=True + + k=a + if j>0: + v=a[j+1:] + k=a[:j] + + if k.startswith('--'): + k = k[2:] + elif k.startswith('-'): + k = k[1:] + + options[k] = v + else: + args.append(a) + + return {'return':0, 'args': args, 'options': options, 'extra': extra, 'argv': argv} + +########################################################################### +if __name__ == "__main__": + r = run() + exit(r['return']) diff --git a/incubator/connectme/testing/connectme/tests/cm.py b/incubator/connectme/testing/connectme/tests/cm.py new file mode 100644 index 0000000000..6a9d8fd372 --- /dev/null +++ b/incubator/connectme/testing/connectme/tests/cm.py @@ -0,0 +1,495 @@ +import os + +from connectme import config, io, misc, repo + +########################################################################### +class CM: + def __init__(self, + path="", + debug=False, + con=False, + line_before_json=False, + print_json=False): + + """ + ConnectMe main class + + Args: + path (str): path to all repositories (optional) + + """ + + # Configuration + self.cfg = {} + + # Update with static config + self.cfg.update(config.cfg) + + # Run-time parameters + self.rt = {} + + # Set path to all repositories + # Check 1) explicit path from above + # 2) environment variable CM_HOME + # 3) USER HOME directory CM + home_path = path if path != "" else self.set_home_path() + + self.rt['home_path'] = home_path + + # Check debug + if os.environ.get(config.ENV_DEBUG, '')!='': + debug = True + self.rt['debug'] = debug + + # Check console + self.rt['con']=con + + # Check output + self.rt['print_json']=print_json + + # Check if line before json + self.rt['line_before_json']=line_before_json + + # Check file with databases + self.rt['file_repo_list'] = os.path.join(home_path, config.FILE_REPO_LIST) + + # Load repositories or initialize empty file with local repos + self.repos = [] + + + ########################################################################### + def init(self): + """ + ConnectMe class + + Args: + path (str): path to all repositories (optional) + + """ + + # Check repositories + fn = self.rt['file_repo_list'] + + if os.path.isfile(fn): + r = io.load_json(fn) + if r['return']>0: return r + + self.repos = r['data'] + else: + # Initialize local repo + path_local_repo = os.path.join(self.rt['home_path'], config.LOCAL_REPO_NAME) + + lrepo = repo.Repo(self) + + r = lrepo.init(path_local_repo, + config.LOCAL_REPO_NAME, + config.LOCAL_REPO_UID) + if r['return']>0: return r + + return {'return':0} + + ########################################################################### + def register_path(self, path): + """ + Register path with repository. + + Args: + path (str): path to a CM repository + + """ + + found = False + for x in self.repos: + try: + if os.path.samefile(x['path'], path): + found = True + break + except: + pass + + if not found: + self.repos.append({'path':path}) + + # Save file + r = io.save_json_or_yaml(self.rt['file_repo_list'], self.repos, sort_keys=True) + if r['return']>0: return r + + return {'return':0} + + ########################################################################### + def set_home_path(self): + """ + + Set ConnectMe home path where to store repositories and other info. + + Check + steps: + 1) environment variable CM_REPOS + 2) USER HOME directory CM + + Create dir if empty. + + Args: + None + + Returns: + path (str): path + + """ + + path = os.environ.get(config.ENV_HOME,'') + if path == "": + from os.path import expanduser + path = os.path.join(expanduser("~"), config.DEFAULT_HOME_DIR) + + return path + + + + + ########################################################################### + def access(self, i, argv=None): + """ + Access CM repositories + + Args: + i (dict) - CM dictionary + + (argv) (list) - original input from the command line + to support wrapping around tools + """ + + # Process special commands + module = i.get('module','') + + if module == 'ck': + # Keep support for CK + import ck.kernel as ck + + return ck.access(argv[1:]) + + data = i.get('data','') + + # Import fnmatch if * or ? + module_wildcards=True if '*' in module or '?' in module else False + data_wildcards=True if '*' in data or '?' in data else False + + if module_wildcards or data_wildcards: + import fnmatch + + # Iterate over CM repos + for p in self.repos: + path = p['path'] + + if os.path.isdir(path): + # Expand modules + list_of_modules=[] + if not module_wildcards: + list_of_modules.append(module) + else: + list_of_potential_modules = os.listdir(path) + + for m in list_of_potential_modules: + if fnmatch.fnmatch(m, module): + list_of_modules.append(m) + + # Iterate over modules + for m in list_of_modules: + pm=os.path.join(path, m) + + if os.path.isdir(pm): + # Expand data + list_of_data=[] + if not data_wildcards: + list_of_data.append(data) + else: + list_of_potential_data = os.listdir(pm) + + for d in list_of_potential_data: + if fnmatch.fnmatch(d, data): + list_of_data.append(d) + + # Iterate over data + for d in list_of_data: + pd=os.path.join(pm, d) + + if os.path.isdir(pd): + print (pd) + + + + return {'return':0} + + + + + + + + def search(self, + repo="", + module="", + data=""): + """ + """ + + # Iterate over repos + paths=[] + + for rr in self.rt['repo_list']: + path=rr['path'] + + check_this_repo = True + + if repo!='' and rr['name'] != repo: + check_this_repo = False + + if check_this_repo: + # List modules + mdirs = os.listdir(path) + + for m in mdirs: + path_to_module = os.path.join(path, m) + + if os.path.isdir(path_to_module): + check_this_module = True + + if module != '' and m != module: + check_this_module = False + + if check_this_module: + # List directories + ddirs = os.listdir(path_to_module) + + for d in ddirs: + path_to_data = os.path.join(path_to_module, d) + + if os.path.isdir(path_to_data): + check_this_data = True + + if data != '' and d != data: + check_this_data = False + + if check_this_data: + paths.append(path_to_data) + + return {'return':0, 'paths':paths} + + + + + + + + + + + + + + + + ########################################################################### + def finalize(self, r): + """ + Prepare error code + + Args: + output (dict): misc + + Returns: + (dict): xyz + + """ + + ret = r['return'] + + if ret > 0 and self.rt['con']: + print (r['error']) + + if self.rt['print_json']: + if self.rt['line_before_json']: + print (config.LINE_BEFORE_JSON) + + import json + print (json.dumps(r, indent=2)) + + if ret > 0 and self.rt['debug']: + raise(Exception(r['error'])) + + return r + + + ########################################################################### + def err(self, r): + """ + Print error and exit. Useful for scripts. + + Args: + r (dict): return dictionary + + + """ + + ret = r['return'] + + self.finalize(r) + + exit(ret) + + + + + + def ok(self, output = {}): + """Prepare return code + + """ + + r={'return': 0} + r.update(output) + return r + + def exit(self, r): + """ + Check output, print error if error and exit + + """ + + import sys + + code=r['return'] + + if code>0: + print (r['error']) + + sys.exit(code) + + def find_module(self, module): + """ + Initialize ConnectMe library. + + Check 1) environment variable CM_REPOS + 2) USER HOME directory CM + + """ + + # Python object for this module + obj = None + + # Python module for API + python_module = config.PLUGIN_PREFIX + module + + # Check inside connectme and then globally in Python + import importlib + for internal_module in [config.NAME + "." + python_module, python_module]: + try: + obj = importlib.import_module(internal_module) + except Exception as e: + pass + + # Test if compatible + if obj: + try: + # Test if has compatibility var + x = obj.connectme + except Exception as e: + obj = None + + if obj: + break + + return obj + + def run_helper(self, i): + """ + """ + + # Find module in input + module = '' + + if type(i)==list: + if len(i) > 0: + module=i[0] + else: + module = i['module'] + del(i['module']) + + if module == '': + return self.err(self.errors['module_not_defined']) + + # Find CM module + obj = self.find_module(module) + + # TBD: init empty Module class with UID and with attr: dummy_module=True + if not obj: + return self.err(self.errors['module_not_found']) + + # Init class from the module + api = obj.api(self) + + # Check if input is a list and module has special func "parse_cmd" + if type(i)==list and hasattr(api, 'parse_cmd'): + return api.parse_cmd(i) + + # Find action + action = '' + if type(i)==list: + if len(i) > 1: + action=i[1] + else: + action=i['action'] + del(i['action']) + + if action == '': + # Check if module has help + if hasattr(api, 'print_help'): + return api.print_help() + + return self.err(self.errors['action_not_defined']) + + # Test if has argv extensions (so this func will parse args itself) + func = None + try: + func = getattr(api, action + '_argv') + except Exception as e: + pass + + r = self.ok() + + if func and type(i)==list: + r = func(['']+i) + else: + if type(i)==dict: + ii=i + else: + # TBD: convert list to input + ii={} + + try: + func = getattr(api, action) + except Exception as e: + return self.err(self.error['action_not_defined']) + + # Use internal parser + r = func(**ii) + + return r + + + + def run(self, i): + """ + Run CM module with a given action. + + Args: + i (list | dict) - action input + """ + + r = self.run_helper(i) + + if self.rt['print_json']: + print(r) + + return r + diff --git a/incubator/connectme/testing/connectme/tests/config.py b/incubator/connectme/testing/connectme/tests/config.py new file mode 100644 index 0000000000..c2233f32b1 --- /dev/null +++ b/incubator/connectme/testing/connectme/tests/config.py @@ -0,0 +1,25 @@ +# Version +__version__ = "0.0.3" + +# Static config that can be updated externally +cfg = { + } + +# Static vars +LINE_BEFORE_JSON = "====== LINE BEFORE JSON ======" + +NAME = "connectme" + +PLUGIN_PREFIX = "cm_" + +ENV_HOME = "CM_HOME" +ENV_DEBUG = "CM_DEBUG" + +DEFAULT_HOME_DIR = "CM" + +FILE_REPO_LIST = "repo_list.json" + +LOCAL_REPO_NAME = "local" +LOCAL_REPO_UID = "9a3280b14a4285c9" + +REPO_DESC_FILE = "cm.yaml" diff --git a/incubator/connectme/testing/connectme/tests/io.py b/incubator/connectme/testing/connectme/tests/io.py new file mode 100644 index 0000000000..3bca2ec4a7 --- /dev/null +++ b/incubator/connectme/testing/connectme/tests/io.py @@ -0,0 +1,120 @@ +import os + +ERROR_UNKNOWN_FILE_EXTENSION = 1 +ERROR_PATH_NOT_FOUND=2 +ERROR_FILE_NOT_FOUND=16 + +########################################################################### +def load_json_or_yaml(file_name): + + if file_name.endswith('json'): + return load_json(file_name) + elif file_name.endswith('yaml'): + return load_yaml(file_name) + + return {'return':ERROR_UNKNOWN_FILE_EXTENSION, 'error':'unknown file extension'} + +########################################################################### +def save_json_or_yaml(file_name, data, sort_keys=False): + if file_name.endswith('json'): + return save_json(file_name, data, sort_keys) + elif file_name.endswith('yaml'): + return save_yaml(file_name, data, sort_keys) + + return {'return':ERROR_UNKNOWN_FILE_EXTENSION, 'error':'unknown file extension'} + +########################################################################### +def load_json(file_name): + + import json + + with open(file_name) as jf: + data = json.load(jf) + + return {'return':0, + 'data': data} + +########################################################################### +def save_json(file_name, data={}, indent=2, sort_keys=True): + + import json + + with open(file_name, 'w') as jf: + jf.write(json.dumps(data, indent=indent, sort_keys=sort_keys)) + + return {'return':0} + +########################################################################### +def load_yaml(file_name): + + import yaml + + with open(file_name) as jf: + data = yaml.load(jf, Loader=yaml.FullLoader) + + return {'return':0, + 'data': data} + +########################################################################### +def save_yaml(file_name, data={}, sort_keys=True): + + import yaml + + with open(file_name, 'w') as jf: + data = yaml.dump(data, jf) + + return {'return':0} + +########################################################################### +def check_and_create_dir(path): + """ + Create directories if path doesn't exist + """ + + if not os.path.isdir(path): + os.makedirs(path) + + return {'return':0} + +########################################################################### +def find_file_in_dir_and_above(filename, + path=""): + """ + Find file in the current directory or above + + Args: + filename (str) + path (str) + + Returns: + (dict) return (int): 0 - if found + 16 - if not found + (error) (str) + + path (str): path where file is found + + path_to_file (str): path to file + """ + + if path == "": + path = os.getcwd() + + if not os.path.isdir(path): + return {'return':ERROR_PATH_NOT_FOUND, 'error': 'path not found'} + + path = os.path.realpath(path) + + while True: + test_path = os.path.join(path, filename) + + if os.path.isfile(test_path): + return {'return':0, 'path': path, 'path_to_file': test_path} + + new_path, skip = os.path.split(path) + + if new_path == path: + break + + path = new_path + + return {'return':ERROR_FILE_NOT_FOUND, 'error': 'path not found'} diff --git a/incubator/connectme/testing/connectme/tests/misc.py b/incubator/connectme/testing/connectme/tests/misc.py new file mode 100644 index 0000000000..76435466cb --- /dev/null +++ b/incubator/connectme/testing/connectme/tests/misc.py @@ -0,0 +1,10 @@ +########################################################################### +def gen_uid(): + """ + Generate CM UID + """ + + import uuid + + return {'return':0, + 'uid':uuid.uuid4().hex[:16]}