aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntonio Terceiro <antonio.terceiro@linaro.org>2013-11-18 19:03:36 -0300
committerAntonio Terceiro <antonio.terceiro@linaro.org>2013-11-18 19:03:36 -0300
commitb1db8292803d8615eb382fa3391cb0f95f8e4fb3 (patch)
treea6cb048f1c631420ff3c1222b3585699ed0c66f1
parent99ea8ab8541c3144c5e62cec8cdd327dc13c4388 (diff)
Imported Upstream version 0.8.1upstream/0.8.1
-rw-r--r--HACKING17
-rw-r--r--INSTALL22
-rw-r--r--MANIFEST.in14
-rw-r--r--NEWS0
-rw-r--r--PKG-INFO2
-rwxr-xr-xci-build58
-rw-r--r--entry_points.ini97
-rwxr-xr-xintegration-tests80
-rw-r--r--lava/tool/__init__.py2
-rw-r--r--lava_tool.egg-info/PKG-INFO2
-rw-r--r--lava_tool.egg-info/SOURCES.txt7
11 files changed, 298 insertions, 3 deletions
diff --git a/HACKING b/HACKING
new file mode 100644
index 0000000..b641e8c
--- /dev/null
+++ b/HACKING
@@ -0,0 +1,17 @@
+Tests Code Coverage
+===================
+
+To have a nicely HTML viewable report on tests code coverage, do as follows:
+
+* Install `python-coverage` (`pip install coverage` in case you use pip)
+* Run the following command:
+
+ python-coverage run -m unittest lava_tool.tests.test_suite 2>/dev/null && python-coverage html
+
+* The report will be saved in a directory called `lava_tool_coverage`: open
+the `index.html` file in there to see the report.
+
+Notes:
+
+ * To re-run the coverage report, you have to delete the `lava_tool_coverage`
+directory first, otherwise `python-coverage` will fail.
diff --git a/INSTALL b/INSTALL
new file mode 100644
index 0000000..d145d7b
--- /dev/null
+++ b/INSTALL
@@ -0,0 +1,22 @@
+Installation
+============
+
+Installation needs to be done via setup.py. For the impatient,
+
+$ python setup.py develop --user
+
+is a least effort, minimally disruptive starting point.
+
+This will put a script you can invoke in ~/.local/bin/. You may need to include
+that directory into your path.
+
+To undo this process do:
+
+$ python setup.py develop --user --uninstall
+$ rm -f ~/.local/bin/lava-tool
+
+Note that for various reasons the actual program (lava-tool) is not removed by
+this step. Because this program is automatically generated it is safe to remove
+it manually at any time.
+
+
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..903d37c
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,14 @@
+include ci-build
+include entry_points.ini
+include HACKING
+include INSTALL
+include NEWS
+include README
+include setup.py
+include setup.cfg
+include integration-tests
+recursive-include integration-tests.d
+recursive-include lava/
+recursive-include lava_dashboard_tool/
+recursive-include lava_scheduler_tool/
+recursive-include lava_tool/
diff --git a/NEWS b/NEWS
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/NEWS
diff --git a/PKG-INFO b/PKG-INFO
index d055310..0c6e058 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: lava-tool
-Version: 0.8
+Version: 0.8.1
Summary: Command line utility for Linaro validation services
Home-page: https://launchpad.net/lava-tool
Author: Zygmunt Krynicki
diff --git a/ci-build b/ci-build
new file mode 100755
index 0000000..714c5da
--- /dev/null
+++ b/ci-build
@@ -0,0 +1,58 @@
+#!/bin/sh
+
+VENV_DIR=".ci-build-venv"
+# Directory where coverage HTML report will be written.
+COVERAGE_REPORT_DIR="lava_tool_coverage"
+
+set -e
+
+if test -z "$VIRTUAL_ENV"; then
+ set -x
+ virtualenv $VENV_DIR
+ . $VENV_DIR/bin/activate
+ python setup.py develop
+fi
+
+# requirement for integration tests
+if ! pip show Flask | grep -q Flask; then
+ pip install 'Flask==0.9'
+fi
+if ! pip show PyYAML | grep -q PyYAML; then
+ pip install PyYAML
+fi
+# requirement for unit tests
+if ! pip show mocker | grep -q mocker; then
+ pip install mocker
+fi
+
+if ! pip show mock | grep -q mock; then
+ pip install mock
+fi
+# Requirement to run code coverage tests.
+if ! pip show coverage | grep -q coverage; then
+ pip install coverage
+fi
+
+if test -z "$DISPLAY"; then
+ # actual CI
+
+ # will install tests dependencies automatically. The output is also more
+ # verbose
+ python setup.py test < /dev/null
+
+ # integration-tests will pick this up and provide detailed output
+ export VERBOSE=1
+else
+ # in a development workstation, this will produce shorter/nicer output, but
+ # requires the test dependencies to be installed manually (or by running
+ # `python setup.py test` before).
+ python -m unittest lava_tool.tests.test_suite < /dev/null
+fi
+
+if test -d $COVERAGE_REPORT_DIR; then
+ rm -rf $COVERAGE_REPORT_DIR
+fi
+# Runs python-coverage.
+python-coverage run -m unittest lava_tool.tests.test_suite 2>/dev/null && python-coverage html
+
+./integration-tests
diff --git a/entry_points.ini b/entry_points.ini
new file mode 100644
index 0000000..2266d9e
--- /dev/null
+++ b/entry_points.ini
@@ -0,0 +1,97 @@
+[console_scripts]
+lava-tool = lava_tool.dispatcher:main
+lava = lava.tool.main:LavaDispatcher.run
+lava-dashboard-tool=lava_dashboard_tool.main:main
+
+[lava.commands]
+help = lava.tool.commands.help:help
+scheduler = lava_scheduler_tool.commands:scheduler
+dashboard = lava_dashboard_tool.commands:dashboard
+job = lava.job.commands:job
+device = lava.device.commands:device
+testdef = lava.testdef.commands:testdef
+init = lava.commands:init
+submit = lava.commands:submit
+run = lava.commands:run
+status = lava.job.commands:status
+update = lava.commands:update
+script = lava.script.commands:script
+
+[lava_tool.commands]
+help = lava.tool.commands.help:help
+auth-add = lava_tool.commands.auth:auth_add
+submit-job = lava_scheduler_tool.commands:submit_job
+resubmit-job = lava_scheduler_tool.commands:resubmit_job
+cancel-job = lava_scheduler_tool.commands:cancel_job
+job-output = lava_scheduler_tool.commands:job_output
+job-status = lava_scheduler_tool.commands:job_status
+backup=lava_dashboard_tool.commands:backup
+bundles=lava_dashboard_tool.commands:bundles
+data_views=lava_dashboard_tool.commands:data_views
+deserialize=lava_dashboard_tool.commands:deserialize
+get=lava_dashboard_tool.commands:get
+make_stream=lava_dashboard_tool.commands:make_stream
+pull=lava_dashboard_tool.commands:pull
+put=lava_dashboard_tool.commands:put
+query_data_view=lava_dashboard_tool.commands:query_data_view
+restore=lava_dashboard_tool.commands:restore
+server_version=lava_dashboard_tool.commands:server_version
+streams=lava_dashboard_tool.commands:streams
+version=lava_dashboard_tool.commands:version
+
+[lava.scheduler.commands]
+submit-job = lava_scheduler_tool.commands:submit_job
+resubmit-job = lava_scheduler_tool.commands:resubmit_job
+cancel-job = lava_scheduler_tool.commands:cancel_job
+job-output = lava_scheduler_tool.commands:job_output
+job-status = lava_scheduler_tool.commands:job_status
+
+[lava.dashboard.commands]
+backup=lava_dashboard_tool.commands:backup
+bundles=lava_dashboard_tool.commands:bundles
+data_views=lava_dashboard_tool.commands:data_views
+deserialize=lava_dashboard_tool.commands:deserialize
+get=lava_dashboard_tool.commands:get
+make_stream=lava_dashboard_tool.commands:make_stream
+pull=lava_dashboard_tool.commands:pull
+put=lava_dashboard_tool.commands:put
+query_data_view=lava_dashboard_tool.commands:query_data_view
+restore=lava_dashboard_tool.commands:restore
+server_version=lava_dashboard_tool.commands:server_version
+streams=lava_dashboard_tool.commands:streams
+version=lava_dashboard_tool.commands:version
+
+[lava_dashboard_tool.commands]
+backup=lava_dashboard_tool.commands:backup
+bundles=lava_dashboard_tool.commands:bundles
+data_views=lava_dashboard_tool.commands:data_views
+deserialize=lava_dashboard_tool.commands:deserialize
+get=lava_dashboard_tool.commands:get
+make_stream=lava_dashboard_tool.commands:make_stream
+pull=lava_dashboard_tool.commands:pull
+put=lava_dashboard_tool.commands:put
+query_data_view=lava_dashboard_tool.commands:query_data_view
+restore=lava_dashboard_tool.commands:restore
+server_version=lava_dashboard_tool.commands:server_version
+streams=lava_dashboard_tool.commands:streams
+version=lava_dashboard_tool.commands:version
+
+[lava.job.commands]
+new = lava.job.commands:new
+submit = lava.job.commands:submit
+status = lava.job.commands:status
+run = lava.job.commands:run
+
+[lava.device.commands]
+add = lava.device.commands:add
+remove = lava.device.commands:remove
+config = lava.device.commands:config
+
+[lava.testdef.commands]
+new = lava.testdef.commands:new
+run = lava.testdef.commands:run
+submit = lava.testdef.commands:submit
+
+[lava.script.commands]
+run = lava.script.commands:run
+submit = lava.script.commands:submit
diff --git a/integration-tests b/integration-tests
new file mode 100755
index 0000000..73726f6
--- /dev/null
+++ b/integration-tests
@@ -0,0 +1,80 @@
+#!/bin/sh
+
+set -e
+
+green() {
+ test -t 1 && printf "\033[0;32;40m$@\033[m\n" || echo "$@"
+}
+
+red() {
+ test -t 2 && printf "\033[0;31;40m$@\033[m\n" >&2 || echo "$2" >&2
+}
+
+start_server() {
+ server_dir="${base_tmpdir}/_server"
+ mkdir -p "${server_dir}"
+ server_log="${server_dir}/log"
+ python integration-tests.d/lib/server.py > "${server_log}" 2>&1 &
+ server_pid=$?
+}
+
+stop_server() {
+ curl -q http://localhost:5000/exit
+}
+
+run_test() {
+ local testfile="$1"
+ local logfile="$2"
+ rc=0
+ if test -n "$VERBOSE"; then
+ sh -x "$testfile" < /dev/null || rc=$?
+ else
+ sh -x "$testfile" > "${logfile}" 2>&1 < /dev/null || rc=$?
+ fi
+ if test $rc -eq 0; then
+ green "$testname: PASS"
+ passed=$(($passed + 1))
+ else
+ failed=$(($failed + 1))
+ red "$testname: FAIL"
+ if test -f "$logfile"; then
+ cat "$logfile"
+ fi
+ fi
+}
+
+passed=0
+failed=0
+base_tmpdir=$(mktemp -d)
+logs="${base_tmpdir}/logs"
+mkdir "$logs"
+
+export PATH="$(dirname $0)"/integration-tests.d/lib:$PATH
+
+start_server
+
+tests="$@"
+if test -z "$tests"; then
+ tests=$(echo integration-tests.d/*.sh)
+fi
+
+for testfile in $tests; do
+ testname=$(basename "$testfile")
+ logfile="${logs}/${testname}.log"
+ export tmpdir="${base_tmpdir}/${testname}"
+ export LAVACONFIG="${tmpdir}/config"
+ mkdir "${tmpdir}"
+ run_test "$testfile" "$logfile"
+done
+
+stop_server
+
+rm -rf "${base_tmpdir}"
+
+echo
+if [ "$failed" -eq 0 ]; then
+ green "$passed tests passed, $failed tests failed."
+else
+ red "$passed tests passed, $failed tests failed."
+ exit 1
+fi
diff --git a/lava/tool/__init__.py b/lava/tool/__init__.py
index 97f7f0b..84f311d 100644
--- a/lava/tool/__init__.py
+++ b/lava/tool/__init__.py
@@ -24,4 +24,4 @@ lava.tool
Generic code for command line utilities for LAVA
"""
-__version__ = (0, 8, 0, "final", 0)
+__version__ = (0, 8, 1, "final", 0)
diff --git a/lava_tool.egg-info/PKG-INFO b/lava_tool.egg-info/PKG-INFO
index d055310..0c6e058 100644
--- a/lava_tool.egg-info/PKG-INFO
+++ b/lava_tool.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: lava-tool
-Version: 0.8
+Version: 0.8.1
Summary: Command line utility for Linaro validation services
Home-page: https://launchpad.net/lava-tool
Author: Zygmunt Krynicki
diff --git a/lava_tool.egg-info/SOURCES.txt b/lava_tool.egg-info/SOURCES.txt
index 31c1297..4e164b3 100644
--- a/lava_tool.egg-info/SOURCES.txt
+++ b/lava_tool.egg-info/SOURCES.txt
@@ -1,4 +1,11 @@
+HACKING
+INSTALL
+MANIFEST.in
+NEWS
README
+ci-build
+entry_points.ini
+integration-tests
setup.cfg
setup.py
lava/__init__.py