aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMilo Casagrande <milo.casagrande@linaro.org>2014-12-05 17:03:17 +0100
committerMilo Casagrande <milo.casagrande@linaro.org>2014-12-05 17:03:17 +0100
commitb60ca6e70eabdbf3fb0bd68823ac1937b39488e9 (patch)
tree35308ed5d1fa53b336db6fe30cb16cfa42bfec18
parent5c3618fb4c3f5356add76424bae8a50b236813d1 (diff)
parentbfd8ad9a0cfb4e5a3c26016c05d8990cc0bbffc2 (diff)
Merge branch 'boot-labs'
Conflicts: app/utils/db.py Change-Id: I2c27698ca64319ddb69cadf1a8fa6196d25b1ea4
-rw-r--r--.gitignore1
-rw-r--r--ansible/host_vars/api.armcloud.us2
-rw-r--r--ansible/host_vars/staging.api.armcloud.us2
-rw-r--r--ansible/hosts1
-rw-r--r--ansible/roles/configure-nginx/templates/api.armcloud.us2
-rw-r--r--ansible/roles/configure-nginx/templates/staging.api.armcloud.us53
-rw-r--r--ansible/roles/install-app/tasks/main.yml1
-rw-r--r--app/.coveragerc1
-rw-r--r--app/__init__.py2
-rw-r--r--app/handlers/__init__.py2
-rw-r--r--app/handlers/app.py4
-rw-r--r--app/handlers/base.py193
-rw-r--r--app/handlers/batch.py52
-rw-r--r--app/handlers/bisect.py125
-rw-r--r--app/handlers/boot.py189
-rw-r--r--app/handlers/common.py431
-rw-r--r--app/handlers/count.py7
-rw-r--r--app/handlers/dbindexes.py48
-rw-r--r--app/handlers/decorators.py209
-rw-r--r--app/handlers/job.py64
-rw-r--r--app/handlers/lab.py361
-rw-r--r--app/handlers/response.py2
-rw-r--r--app/handlers/subscription.py19
-rw-r--r--app/handlers/tests/test_batch_handler.py33
-rw-r--r--app/handlers/tests/test_bisect_handler.py51
-rw-r--r--app/handlers/tests/test_boot_handler.py176
-rw-r--r--app/handlers/tests/test_count_handler.py32
-rw-r--r--app/handlers/tests/test_defconf_handler.py46
-rw-r--r--app/handlers/tests/test_handler_response.py18
-rw-r--r--app/handlers/tests/test_handlers_common.py32
-rw-r--r--app/handlers/tests/test_job_handler.py80
-rw-r--r--app/handlers/tests/test_lab_handler.py456
-rw-r--r--app/handlers/tests/test_token_handler.py44
-rw-r--r--app/handlers/tests/test_version_handler.py78
-rw-r--r--app/handlers/token.py189
-rw-r--r--app/handlers/version.py46
-rw-r--r--app/models/__init__.py88
-rw-r--r--app/models/base.py103
-rw-r--r--app/models/bisect.py233
-rw-r--r--app/models/boot.py349
-rw-r--r--app/models/defconfig.py247
-rw-r--r--app/models/job.py242
-rw-r--r--app/models/lab.py199
-rw-r--r--app/models/subscription.py143
-rw-r--r--app/models/tests/test_bisect_model.py81
-rw-r--r--app/models/tests/test_boot_model.py100
-rw-r--r--app/models/tests/test_defconfig_model.py143
-rw-r--r--app/models/tests/test_job_model.py144
-rw-r--r--app/models/tests/test_lab_model.py191
-rw-r--r--app/models/tests/test_models.py258
-rw-r--r--app/models/tests/test_subscription_model.py107
-rw-r--r--app/models/tests/test_token_model.py227
-rw-r--r--app/models/token.py421
-rw-r--r--app/taskqueue/tasks.py57
-rw-r--r--app/tests/__init__.py10
-rw-r--r--app/urls.py48
-rw-r--r--app/utils/__init__.py4
-rw-r--r--app/utils/bisect/__init__.py440
-rw-r--r--app/utils/bootimport.py448
-rw-r--r--app/utils/db.py152
-rw-r--r--app/utils/docimport.py268
-rw-r--r--app/utils/meta_parser.py123
-rw-r--r--app/utils/scripts/__init__.py0
-rw-r--r--app/utils/scripts/convert-models.py505
-rw-r--r--app/utils/scripts/operation-tests.py (renamed from app/utils/emails.py)39
-rw-r--r--app/utils/subscription.py27
-rw-r--r--app/utils/tests/test_bootimport.py242
-rw-r--r--app/utils/tests/test_docimport.py177
-rw-r--r--app/utils/tests/test_meta_parser.py80
-rw-r--r--app/utils/tests/test_validator.py247
-rw-r--r--app/utils/validator.py155
-rw-r--r--doc/collection-batch.rst (renamed from doc/batch-collection.rst)0
-rw-r--r--doc/collection-boot.rst (renamed from doc/boot-collection.rst)36
-rw-r--r--doc/collection-count.rst (renamed from doc/count-collection.rst)6
-rw-r--r--doc/collection-defconfig.rst (renamed from doc/defconfig-collection.rst)11
-rw-r--r--doc/collection-job.rst (renamed from doc/job-collection.rst)7
-rw-r--r--doc/collection-lab.rst183
-rw-r--r--doc/collection-version.rst62
-rw-r--r--doc/collections.rst12
-rw-r--r--doc/conf.py2
-rw-r--r--doc/examples.rst108
-rw-r--r--doc/schema-boot.rst269
-rw-r--r--doc/schema-defconfig.rst270
-rw-r--r--doc/schema-job.rst54
-rw-r--r--doc/schema-lab.rst246
-rw-r--r--doc/schema-token.rst9
-rw-r--r--doc/schema.rst1
-rw-r--r--requirements.txt19
88 files changed, 7524 insertions, 3121 deletions
diff --git a/.gitignore b/.gitignore
index 52e4e61..36893a1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
*.pyc
*.pyo
+.coverage
diff --git a/ansible/host_vars/api.armcloud.us b/ansible/host_vars/api.armcloud.us
index 5f3ddcd..a496f9c 100644
--- a/ansible/host_vars/api.armcloud.us
+++ b/ansible/host_vars/api.armcloud.us
@@ -1,2 +1,2 @@
hostname: api.armcloud.us
-role: staging
+role: production
diff --git a/ansible/host_vars/staging.api.armcloud.us b/ansible/host_vars/staging.api.armcloud.us
new file mode 100644
index 0000000..355b1f4
--- /dev/null
+++ b/ansible/host_vars/staging.api.armcloud.us
@@ -0,0 +1,2 @@
+hostname: staging.api.armcloud.us
+role: staging
diff --git a/ansible/hosts b/ansible/hosts
index ed911bf..7f42b27 100644
--- a/ansible/hosts
+++ b/ansible/hosts
@@ -1,2 +1,3 @@
[all]
api.armcloud.us
+staging.api.armcloud.us
diff --git a/ansible/roles/configure-nginx/templates/api.armcloud.us b/ansible/roles/configure-nginx/templates/api.armcloud.us
index 88fa10e..8ad5e72 100644
--- a/ansible/roles/configure-nginx/templates/api.armcloud.us
+++ b/ansible/roles/configure-nginx/templates/api.armcloud.us
@@ -27,7 +27,7 @@ server {
add_header Cache-Control "public";
}
- location ~* /(?:(count|job|defconfig|boot|token|batch|bisect)(.*)(?!(\.(html?|json|css|js))))$ {
+ location ~* /(?:(count|job|defconfig|boot|token|batch|bisect|lab)(.*)(?!(\.(html?|json|css|js))))$ {
# Proxy cache, disabled for now.
# proxy_cache BACKEND;
diff --git a/ansible/roles/configure-nginx/templates/staging.api.armcloud.us b/ansible/roles/configure-nginx/templates/staging.api.armcloud.us
new file mode 100644
index 0000000..8ad5e72
--- /dev/null
+++ b/ansible/roles/configure-nginx/templates/staging.api.armcloud.us
@@ -0,0 +1,53 @@
+server {
+ listen 80;
+ server_name {{ hostname }};
+ root {{ web_root }}/{{ hostname }};
+ charset utf-8;
+
+ access_log /var/log/nginx/{{ hostname }}-access.log;
+ error_log /var/log/nginx/{{ hostname }}-error.log;
+
+ location / {
+ if (-f $document_root/maintenance.html) {
+ return 503;
+ }
+
+ autoindex off;
+ index index.html;
+ }
+
+ error_page 503 @maintenance;
+ location @maintenance {
+ rewrite ^(.*)$ /maintenance.html break;
+ }
+
+ location ~* \.(?:ico|css|js|gif|jpe?g|png)$ {
+ expires 150d;
+ add_header Pragma public;
+ add_header Cache-Control "public";
+ }
+
+ location ~* /(?:(count|job|defconfig|boot|token|batch|bisect|lab)(.*)(?!(\.(html?|json|css|js))))$ {
+
+ # Proxy cache, disabled for now.
+ # proxy_cache BACKEND;
+ # proxy_cache_key $proxy_host$uri$is_args$args;
+ # proxy_cache_lock on;
+ # proxy_cache_lock_timeout 2s;
+ # proxy_cache_valid 3h;
+
+ proxy_buffers 32 4k;
+ proxy_buffer_size 8k;
+ proxy_busy_buffers_size 64k;
+ proxy_pass_header Server;
+ proxy_set_header Host $http_host;
+ proxy_redirect off;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_set_header X-Scheme $scheme;
+ proxy_connect_timeout 9s;
+ proxy_send_timeout 9s;
+ proxy_read_timeout 9s;
+ proxy_pass http://backends;
+ }
+}
diff --git a/ansible/roles/install-app/tasks/main.yml b/ansible/roles/install-app/tasks/main.yml
index e5f8201..2c427f2 100644
--- a/ansible/roles/install-app/tasks/main.yml
+++ b/ansible/roles/install-app/tasks/main.yml
@@ -34,6 +34,7 @@
- name: Install pip requirements
pip: requirements={{ install_base }}/{{ hostname }}/requirements.txt
virtualenv={{ install_base }}/.venv/{{ hostname }}
+ extra_args="--upgrade"
notify:
- restart-kernel-ci-backend
- restart-celery
diff --git a/app/.coveragerc b/app/.coveragerc
index 456e1d1..433e3b2 100644
--- a/app/.coveragerc
+++ b/app/.coveragerc
@@ -5,6 +5,7 @@ omit =
server.py
setup*
*/tests/*
+ utils/scripts/*
[report]
precision = 2
diff --git a/app/__init__.py b/app/__init__.py
index 8d753dd..e69de29 100644
--- a/app/__init__.py
+++ b/app/__init__.py
@@ -1,2 +0,0 @@
-__version__ = "2014.10"
-__versionfull__ = __version__
diff --git a/app/handlers/__init__.py b/app/handlers/__init__.py
index e69de29..8aced47 100644
--- a/app/handlers/__init__.py
+++ b/app/handlers/__init__.py
@@ -0,0 +1,2 @@
+__version__ = "2014.11"
+__versionfull__ = __version__
diff --git a/app/handlers/app.py b/app/handlers/app.py
index b7c74c8..40a5d67 100644
--- a/app/handlers/app.py
+++ b/app/handlers/app.py
@@ -18,10 +18,10 @@ A very simple RequestHandler used as the default one for the Tornado
application.
"""
-from tornado.web import RequestHandler
+import tornado.web
-class AppHandler(RequestHandler):
+class AppHandler(tornado.web.RequestHandler):
"""This handler is used to provide custom error messages.
It is used to provide JSON response on errors, and the only implemented
diff --git a/app/handlers/base.py b/app/handlers/base.py
index e4f2082..7c6c064 100644
--- a/app/handlers/base.py
+++ b/app/handlers/base.py
@@ -1,5 +1,3 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@@ -15,41 +13,24 @@
"""The base RequestHandler that all subclasses should inherit from."""
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import bson
+import functools
import httplib
-import json
import tornado
+import tornado.web
-from bson.json_util import default
-from functools import partial
-from tornado.web import (
- RequestHandler,
- asynchronous,
-)
-
-from handlers.common import (
- ACCEPTED_CONTENT_TYPE,
- API_TOKEN_HEADER,
- DEFAULT_RESPONSE_TYPE,
- MASTER_KEY,
- NOT_VALID_TOKEN,
- get_all_query_values,
- get_query_fields,
- valid_token_general,
- validate_token,
-)
-from handlers.response import HandlerResponse
-from models import (
- DB_NAME,
- TOKEN_COLLECTION,
- TOKEN_KEY,
-)
-from utils.db import (
- aggregate,
- find_and_count,
- find_one,
-)
-from utils.log import get_log
-from utils.validator import is_valid_json
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import utils
+import utils.db
+import utils.log
+import utils.validator as validator
STATUS_MESSAGES = {
@@ -57,7 +38,7 @@ STATUS_MESSAGES = {
405: 'Operation not allowed',
415: (
'Please use "%s" as the default media type' %
- ACCEPTED_CONTENT_TYPE
+ hcommon.ACCEPTED_CONTENT_TYPE
),
420: 'No JSON data found',
500: 'Internal database error',
@@ -66,7 +47,7 @@ STATUS_MESSAGES = {
}
-class BaseHandler(RequestHandler):
+class BaseHandler(tornado.web.RequestHandler):
"""The base handler."""
def __init__(self, application, request, **kwargs):
@@ -93,7 +74,7 @@ class BaseHandler(RequestHandler):
db_pwd = db_options['dbpassword']
db_user = db_options['dbuser']
- self._db = client[DB_NAME]
+ self._db = client[models.DB_NAME]
if all([db_user, db_pwd]):
self._db.authenticate(db_user, password=db_pwd)
@@ -103,7 +84,7 @@ class BaseHandler(RequestHandler):
@property
def log(self):
"""The logger of this object."""
- return get_log(debug=self.settings['debug'])
+ return utils.log.get_log(debug=self.settings['debug'])
@staticmethod
def _valid_keys(method):
@@ -117,7 +98,7 @@ class BaseHandler(RequestHandler):
@staticmethod
def _token_validation_func():
- return valid_token_general
+ return hcommon.valid_token_general
def _get_status_message(self, status_code):
"""Get custom error message based on the status code.
@@ -144,11 +125,14 @@ class BaseHandler(RequestHandler):
headers = {}
result = {}
- if isinstance(response, HandlerResponse):
+ if isinstance(response, hresponse.HandlerResponse):
status_code = response.status_code
reason = response.reason or self._get_status_message(status_code)
headers = response.headers
- result = json.dumps(response.to_dict(), default=default)
+ result = json.dumps(
+ response.to_dict(),
+ default=bson.json_util.default, ensure_ascii=False
+ )
else:
status_code = 506
reason = self._get_status_message(status_code)
@@ -156,7 +140,7 @@ class BaseHandler(RequestHandler):
self.set_status(status_code=status_code, reason=reason)
self.write(result)
- self.set_header('Content-Type', DEFAULT_RESPONSE_TYPE)
+ self.set_header('Content-Type', hcommon.DEFAULT_RESPONSE_TYPE)
if headers:
for key, val in headers.iteritems():
@@ -175,7 +159,7 @@ class BaseHandler(RequestHandler):
if 'Content-Type' in self.request.headers.keys():
if self.request.headers['Content-Type'] == \
- ACCEPTED_CONTENT_TYPE:
+ hcommon.ACCEPTED_CONTENT_TYPE:
valid_content = True
else:
self.log.error(
@@ -186,13 +170,13 @@ class BaseHandler(RequestHandler):
return valid_content
- @asynchronous
+ @tornado.web.asynchronous
def post(self, *args, **kwargs):
self.executor.submit(
- partial(self.execute_post, *args, **kwargs)
+ functools.partial(self.execute_post, *args, **kwargs)
).add_done_callback(
lambda future: tornado.ioloop.IOLoop.instance().add_callback(
- partial(self._create_valid_response, future.result())
+ functools.partial(self._create_valid_response, future.result())
)
)
@@ -210,27 +194,37 @@ class BaseHandler(RequestHandler):
try:
json_obj = json.loads(self.request.body.decode('utf8'))
- if is_valid_json(json_obj, self._valid_keys('POST')):
+ valid_json, j_reason = validator.is_valid_json(
+ json_obj, self._valid_keys("POST")
+ )
+ if valid_json:
kwargs['json_obj'] = json_obj
kwargs['db_options'] = self.settings['dboptions']
+ kwargs['reason'] = j_reason
response = self._post(*args, **kwargs)
else:
- response = HandlerResponse(400)
- response.reason = "Provided JSON is not valid"
+ response = hresponse.HandlerResponse(400)
+ if j_reason:
+ response.reason = (
+ "Provided JSON is not valid: %s" % j_reason
+ )
+ else:
+ response.reason = "Provided JSON is not valid"
response.result = None
- except ValueError:
+ except ValueError, ex:
+ self.log.exception(ex)
error = "No JSON data found in the POST request"
self.log.error(error)
- response = HandlerResponse(422)
+ response = hresponse.HandlerResponse(422)
response.reason = error
response.result = None
else:
- response = HandlerResponse(valid_request)
+ response = hresponse.HandlerResponse(valid_request)
response.reason = self._get_status_message(valid_request)
response.result = None
else:
- response = HandlerResponse(403)
- response.reason = NOT_VALID_TOKEN
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
return response
@@ -262,16 +256,16 @@ class BaseHandler(RequestHandler):
:return A `HandlerResponse` object.
"""
- return HandlerResponse(501)
+ return hresponse.HandlerResponse(501)
- @asynchronous
+ @tornado.web.asynchronous
def delete(self, *args, **kwargs):
self.executor.submit(
- partial(self.execute_delete, *args, **kwargs)
+ functools.partial(self.execute_delete, *args, **kwargs)
).add_done_callback(
lambda future:
tornado.ioloop.IOLoop.instance().add_callback(
- partial(self._create_valid_response, future.result())
+ functools.partial(self._create_valid_response, future.result())
)
)
@@ -286,12 +280,12 @@ class BaseHandler(RequestHandler):
if kwargs and kwargs.get('id', None):
response = self._delete(kwargs['id'])
else:
- response = HandlerResponse(400)
+ response = hresponse.HandlerResponse(400)
response.reason = self._get_status_message(400)
response.result = None
else:
- response = HandlerResponse(403)
- response.status = NOT_VALID_TOKEN
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
return response
@@ -307,16 +301,16 @@ class BaseHandler(RequestHandler):
:param doc_id: The ID of the documento to delete.
:return A `HandlerResponse` object.
"""
- return HandlerResponse(501)
+ return hresponse.HandlerResponse(501)
- @asynchronous
+ @tornado.web.asynchronous
def get(self, *args, **kwargs):
self.executor.submit(
- partial(self.execute_get, *args, **kwargs)
+ functools.partial(self.execute_get, *args, **kwargs)
).add_done_callback(
lambda future:
tornado.ioloop.IOLoop.instance().add_callback(
- partial(self._create_valid_response, future.result())
+ functools.partial(self._create_valid_response, future.result())
)
)
@@ -334,8 +328,8 @@ class BaseHandler(RequestHandler):
else:
response = self._get(**kwargs)
else:
- response = HandlerResponse(403)
- response.reason = NOT_VALID_TOKEN
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
return response
@@ -349,20 +343,29 @@ class BaseHandler(RequestHandler):
:return A `HandlerResponse` object.
"""
- response = HandlerResponse()
- result = find_one(
- self.collection,
- doc_id,
- fields=get_query_fields(self.get_query_arguments)
- )
+ response = hresponse.HandlerResponse()
+ result = None
- if result:
- # result here is returned as a dictionary from mongodb
- response.result = result
- else:
- response.status_code = 404
- response.reason = "Resource '%s' not found" % doc_id
- response.result = None
+ try:
+ obj_id = bson.objectid.ObjectId(doc_id)
+ result = utils.db.find_one(
+ self.collection,
+ [obj_id],
+ fields=hcommon.get_query_fields(self.get_query_arguments)
+ )
+
+ if result:
+ # result here is returned as a dictionary from mongodb
+ response.result = result
+ else:
+ response.status_code = 404
+ response.reason = "Resource '%s' not found" % doc_id
+ response.result = None
+ except bson.errors.InvalidId, ex:
+ self.log.exception(ex)
+ self.log.error("Provided doc ID '%s' is not valid", doc_id)
+ response.status_code = 400
+ response.status = "Wrong ID value provided"
return response
@@ -378,11 +381,11 @@ class BaseHandler(RequestHandler):
:return A `HandlerResponse` object.
"""
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
spec, sort, fields, skip, limit, unique = self._get_query_args()
if unique:
- response.result = aggregate(
+ response.result = utils.db.aggregate(
self.collection,
unique,
sort=sort,
@@ -391,7 +394,7 @@ class BaseHandler(RequestHandler):
limit=limit
)
else:
- result, count = find_and_count(
+ result, count = utils.db.find_and_count(
self.collection,
limit,
skip,
@@ -428,9 +431,10 @@ class BaseHandler(RequestHandler):
unique = None
if self.request.arguments:
- spec, sort, fields, skip, limit, unique = get_all_query_values(
- self.get_query_arguments, self._valid_keys(method)
- )
+ spec, sort, fields, skip, limit, unique = \
+ hcommon.get_all_query_values(
+ self.get_query_arguments, self._valid_keys(method)
+ )
return (spec, sort, fields, skip, limit, unique)
@@ -456,9 +460,9 @@ class BaseHandler(RequestHandler):
"""
valid_token = False
- req_token = self.request.headers.get(API_TOKEN_HEADER, None)
+ req_token = self.get_request_token()
remote_ip = self.request.remote_ip
- master_key = self.settings.get(MASTER_KEY, None)
+ master_key = self.settings.get(hcommon.MASTER_KEY, None)
if req_token:
valid_token = self._token_validation(
@@ -473,12 +477,19 @@ class BaseHandler(RequestHandler):
return valid_token
+ def get_request_token(self):
+ """Retrieve the Authorization token of this request.
+
+ :return The authorization token as string.
+ """
+ return self.request.headers.get(hcommon.API_TOKEN_HEADER, None)
+
def _token_validation(self, req_token, method, remote_ip, master_key):
valid_token = False
token_obj = self._find_token(req_token, self.db)
if token_obj:
- valid_token = validate_token(
+ valid_token = hcommon.validate_token(
token_obj,
method,
remote_ip,
@@ -494,4 +505,6 @@ class BaseHandler(RequestHandler):
:param token: The token to find.
:return A json object, or nothing.
"""
- return find_one(db_conn[TOKEN_COLLECTION], [token], field=TOKEN_KEY)
+ return utils.db.find_one(
+ db_conn[models.TOKEN_COLLECTION], [token], field=models.TOKEN_KEY
+ )
diff --git a/app/handlers/batch.py b/app/handlers/batch.py
index 02474ad..9b841f8 100644
--- a/app/handlers/batch.py
+++ b/app/handlers/batch.py
@@ -13,20 +13,20 @@
"""The /batch RequestHandler to perform batch operations."""
-from json import loads as j_load
+try:
+ import simplejson as json
+except ImportError:
+ import json
-from handlers.base import BaseHandler
-from handlers.common import (
- BATCH_VALID_KEYS,
- NOT_VALID_TOKEN,
-)
-from handlers.response import HandlerResponse
-from models import BATCH_KEY
-from taskqueue.tasks import run_batch_group
-from utils.validator import is_valid_batch_json
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import taskqueue.tasks as taskq
+import utils.validator as validator
-class BatchHandler(BaseHandler):
+class BatchHandler(hbase.BaseHandler):
"""The batch URL handler class."""
def __init__(self, application, request, **kwargs):
@@ -35,13 +35,13 @@ class BatchHandler(BaseHandler):
@staticmethod
def _valid_keys(method):
- return BATCH_VALID_KEYS.get(method, None)
+ return hcommon.BATCH_VALID_KEYS.get(method, None)
def execute_get(self):
- return HandlerResponse(501)
+ return hresponse.HandlerResponse(501)
def execute_delete(self):
- return HandlerResponse(501)
+ return hresponse.HandlerResponse(501)
def execute_post(self):
response = None
@@ -51,32 +51,34 @@ class BatchHandler(BaseHandler):
if valid_request == 200:
try:
- json_obj = j_load(self.request.body.decode("utf8"))
+ json_obj = json.loads(self.request.body.decode("utf8"))
- if is_valid_batch_json(
- json_obj, BATCH_KEY, self._valid_keys("POST")):
- response = HandlerResponse(200)
+ if validator.is_valid_batch_json(
+ json_obj,
+ models.BATCH_KEY,
+ self._valid_keys("POST")):
+ response = hresponse.HandlerResponse(200)
response.result = \
self.prepare_and_perform_batch_ops(
json_obj, self.settings["dboptions"]
)
else:
- response = HandlerResponse(400)
+ response = hresponse.HandlerResponse(400)
response.reason = "Provided JSON is not valid"
response.result = None
except ValueError:
error = "No JSON data found in the POST request"
self.log.error(error)
- response = HandlerResponse(422)
+ response = hresponse.HandlerResponse(422)
response.reason = error
response.result = None
else:
- response = HandlerResponse(valid_request)
+ response = hresponse.HandlerResponse(valid_request)
response.reason = self._get_status_message(valid_request)
response.result = None
else:
- response = HandlerResponse(403)
- response.reason = NOT_VALID_TOKEN
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
return response
@@ -92,4 +94,6 @@ class BatchHandler(BaseHandler):
:param db_options: The mongodb database connection parameters.
:type db_options: dict
"""
- return run_batch_group(json_obj.get(BATCH_KEY), db_options)
+ return taskq.run_batch_group(
+ json_obj.get(models.BATCH_KEY), db_options
+ )
diff --git a/app/handlers/bisect.py b/app/handlers/bisect.py
index 1ac4e27..2a242f2 100644
--- a/app/handlers/bisect.py
+++ b/app/handlers/bisect.py
@@ -13,52 +13,39 @@
"""The request handler for bisect URLs."""
+import bson
+import functools
import tornado
+import tornado.web
-from functools import partial
-from tornado.web import asynchronous
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import taskqueue.tasks as taskt
+import utils.db
-from handlers.base import BaseHandler
-from handlers.common import (
- NOT_VALID_TOKEN,
- get_query_fields,
-)
-from handlers.response import HandlerResponse
-from taskqueue.tasks import boot_bisect
-from utils.db import find_one
-
-from models import (
- BISECT_COLLECTION,
- BOOT_COLLECTION,
- DOC_ID_KEY,
-)
-
-BISECT_COLLECTIONS = [
- BOOT_COLLECTION,
-]
-
-
-class BisectHandler(BaseHandler):
+class BisectHandler(hbase.BaseHandler):
"""Handler used to trigger bisect operations on the data."""
def __init__(self, application, request, **kwargs):
super(BisectHandler, self).__init__(application, request, **kwargs)
- @asynchronous
+ @tornado.web.asynchronous
def get(self, *args, **kwargs):
self.executor.submit(
- partial(self.execute_get, *args, **kwargs)
+ functools.partial(self.execute_get, *args, **kwargs)
).add_done_callback(
lambda future:
tornado.ioloop.IOLoop.instance().add_callback(
- partial(self._create_valid_response, future.result())
+ functools.partial(self._create_valid_response, future.result())
)
)
@property
def collection(self):
- return BISECT_COLLECTION
+ return models.BISECT_COLLECTION
def execute_get(self, *args, **kwargs):
"""This is the actual GET operation.
@@ -75,23 +62,37 @@ class BisectHandler(BaseHandler):
if all([collection, doc_id]):
fields = None
if self.request.arguments:
- fields = get_query_fields(self.get_query_arguments)
- bisect_result = find_one(
- self.db[self.collection], doc_id, field=DOC_ID_KEY,
- fields=fields
- )
- if bisect_result:
- response = HandlerResponse(200)
- response.result = bisect_result
- else:
- response = self._get_bisect(collection, doc_id, fields)
+ fields = hcommon.get_query_fields(
+ self.get_query_arguments
+ )
+ try:
+ obj_id = bson.objectid.ObjectId(doc_id)
+ bisect_result = utils.db.find_one(
+ self.db[self.collection],
+ [obj_id],
+ field=models.NAME_KEY,
+ fields=fields
+ )
+ if bisect_result:
+ response = hresponse.HandlerResponse(200)
+ response.result = bisect_result
+ else:
+ response = self._get_bisect(
+ collection, doc_id, fields
+ )
+ except bson.errors.InvalidId, ex:
+ self.log.exception(ex)
+ self.log.error(
+ "Wrong ID '%s' value passed as object ID", doc_id)
+ response = hresponse.HandlerResponse(400)
+ response.reason = "Wrong ID value passed as object ID"
else:
- response = HandlerResponse(400)
+ response = hresponse.HandlerResponse(400)
else:
- response = HandlerResponse(400)
+ response = hresponse.HandlerResponse(400)
else:
- response = HandlerResponse(403)
- response.reason = NOT_VALID_TOKEN
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
return response
@@ -109,13 +110,16 @@ class BisectHandler(BaseHandler):
"""
response = None
- if collection in BISECT_COLLECTIONS:
+ if collection in models.BISECT_VALID_COLLECTIONS:
db_options = self.settings["dboptions"]
- if collection == BOOT_COLLECTION:
+ if collection == models.BOOT_COLLECTION:
response = self.execute_boot_bisect(doc_id, db_options, fields)
+ elif collection == models.DEFCONFIG_COLLECTION:
+ response = self.execute_defconfig_bisect(
+ doc_id, db_options, fields)
else:
- response = HandlerResponse(400)
+ response = hresponse.HandlerResponse(400)
response.reason = (
"Provided bisect collection '%s' is not valid" % collection
)
@@ -135,9 +139,9 @@ class BisectHandler(BaseHandler):
:type fields: list or dict
:return A `HandlerResponse` object.
"""
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
- result = boot_bisect.apply_async([doc_id, db_options, fields])
+ result = taskt.boot_bisect.apply_async([doc_id, db_options, fields])
while not result.ready():
pass
@@ -147,3 +151,32 @@ class BisectHandler(BaseHandler):
elif response.status_code == 400:
response.reason = "Boot report cannot be bisected: is it failed?"
return response
+
+ @staticmethod
+ def execute_defconfig_bisect(doc_id, db_options, fields=None):
+ """Execute the defconfig bisect operation.
+
+ :param doc_id: The ID of the document to execute the bisect on.
+ :type doc_id: str
+ :param db_options: The mongodb database connection parameters.
+ :type db_options: dict
+ :param fields: A `fields` data structure with the fields to return or
+ exclude. Default to None.
+ :type fields: list or dict
+ :return A `HandlerResponse` object.
+ """
+ response = hresponse.HandlerResponse()
+
+ result = taskt.defconfig_bisect.apply_async(
+ [doc_id, db_options, fields]
+ )
+ while not result.ready():
+ pass
+
+ response.status_code, response.result = result.get()
+ if response.status_code == 404:
+ response.reason = "Defconfig not found"
+ elif response.status_code == 400:
+ response.reason = "Defconfig cannot be bisected: is it failed?"
+
+ return response
diff --git a/app/handlers/boot.py b/app/handlers/boot.py
index 5f488e3..9f314aa 100644
--- a/app/handlers/boot.py
+++ b/app/handlers/boot.py
@@ -15,22 +15,19 @@
"""The RequestHandler for /boot URLs."""
-from handlers.base import BaseHandler
-from handlers.common import (
- BOOT_VALID_KEYS,
- NOT_VALID_TOKEN,
- get_query_spec,
-)
-from handlers.response import HandlerResponse
-from models import BOOT_COLLECTION
-from taskqueue.tasks import import_boot
-from utils.db import (
- delete,
- find_one,
-)
-
-
-class BootHandler(BaseHandler):
+import bson
+
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import models.lab as mlab
+import models.token as mtoken
+import taskqueue.tasks as taskq
+import utils.db
+
+
+class BootHandler(hbase.BaseHandler):
"""Handle the /boot URLs."""
def __init__(self, application, request, **kwargs):
@@ -38,36 +35,116 @@ class BootHandler(BaseHandler):
@property
def collection(self):
- return self.db[BOOT_COLLECTION]
+ return self.db[models.BOOT_COLLECTION]
@staticmethod
def _valid_keys(method):
- return BOOT_VALID_KEYS.get(method, None)
+ return hcommon.BOOT_VALID_KEYS.get(method, None)
+
+ @staticmethod
+ def _token_validation_func():
+ return hcommon.valid_token_bh
def _post(self, *args, **kwargs):
- response = HandlerResponse(202)
- response.reason = "Request accepted and being imported"
- response.result = None
+ req_token = self.get_request_token()
+ lab_name = kwargs["json_obj"].get(models.LAB_NAME_KEY, None)
+
+ if self._is_valid_token(req_token, lab_name):
+ response = hresponse.HandlerResponse(202)
+ if kwargs.get("reason", None):
+ response.reason = (
+ "Request accepted and being imported. WARNING: %s" %
+ kwargs["reason"]
+ )
+ else:
+ response.reason = "Request accepted and being imported"
+ response.result = None
- import_boot.apply_async([kwargs['json_obj'], kwargs['db_options']])
+ taskq.import_boot.apply_async(
+ [kwargs["json_obj"], kwargs["db_options"]]
+ )
+ else:
+ response = hresponse.HandlerResponse(403)
+ response.reason = (
+ "Provided authentication token is not associated with "
+ "lab '%s'" % lab_name
+ )
return response
+ def _is_valid_token(self, req_token, lab_name):
+ """Make sure the token used to perform the POST is valid.
+
+ We are being paranoid here. We need to make sure the token used to
+ post is really associated with the provided lab name.
+
+ To be valid to post boot report, the token must either be an admin
+ token or a valid token associated with the lab.
+
+ :param req_token: The token string from the request.
+ :type req_token: str
+ :param lab_name: The name of the lab to check.
+ :type lab_name: str
+ :return True if the token is valid, False otherwise.
+ """
+ valid_lab = False
+
+ lab_doc = utils.db.find_one(
+ self.db[models.LAB_COLLECTION], [lab_name], field=models.NAME_KEY
+ )
+
+ if lab_doc:
+ lab_token_doc = utils.db.find_one(
+ self.db[models.TOKEN_COLLECTION], [lab_doc[models.TOKEN_KEY]]
+ )
+
+ if lab_token_doc:
+ lab_token = mtoken.Token.from_json(lab_token_doc)
+ if all([req_token == lab_token.token, not lab_token.expired]):
+ valid_lab = True
+ elif all([lab_token.is_admin, not lab_token.expired]):
+ valid_lab = True
+ utils.LOG.warn(
+ "Received boot POST request from an admin token")
+ else:
+ utils.LOG.warn(
+ "Received token (%s) is not associated with lab '%s'",
+ req_token, lab_name
+ )
+
+ return valid_lab
+
def execute_delete(self, *args, **kwargs):
response = None
if self.validate_req_token("DELETE"):
- if kwargs and kwargs.get('id', None):
- doc_id = kwargs['id']
- if find_one(self.collection, doc_id):
- response = self._delete(doc_id)
- if response.status_code == 200:
- response.reason = "Resource '%s' deleted" % doc_id
- else:
- response = HandlerResponse(404)
- response.reason = "Resource '%s' not found" % doc_id
+ if kwargs and kwargs.get("id", None):
+ try:
+ doc_id = kwargs["id"]
+ obj_id = bson.objectid.ObjectId(doc_id)
+
+ boot_doc = utils.db.find_one(self.collection, [obj_id])
+ if boot_doc:
+ if self._valid_boot_delete_token(boot_doc):
+ response = self._delete(obj_id)
+ if response.status_code == 200:
+ response.reason = (
+ "Resource '%s' deleted" % doc_id)
+ else:
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
+ else:
+ response = hresponse.HandlerResponse(404)
+ response.reason = "Resource '%s' not found" % doc_id
+ except bson.errors.InvalidId, ex:
+ self.log.exception(ex)
+ self.log.error(
+ "Wrong ID '%s' value passed as object ID", doc_id
+ )
+ response = hresponse.HandlerResponse(400)
+ response.reason = "Wrong ID value passed as object ID"
else:
- spec = get_query_spec(
+ spec = hcommon.get_query_spec(
self.get_query_arguments, self._valid_keys("DELETE")
)
if spec:
@@ -77,22 +154,60 @@ class BootHandler(BaseHandler):
"Resources identified with '%s' deleted" % spec
)
else:
- response = HandlerResponse(400)
+ response = hresponse.HandlerResponse(400)
response.result = None
response.reason = (
"No valid data provided to execute a DELETE"
)
else:
- response = HandlerResponse(403)
- response.reason = NOT_VALID_TOKEN
+ response = hresponse.HandlerResponse(403)
+ response.reason = hcommon.NOT_VALID_TOKEN
return response
+ def _valid_boot_delete_token(self, boot_doc):
+ """Make sure the token is an actual delete token.
+
+ This is an extra step in making sure the token is valid. A lab
+ token, token used to send boot reports, can be used to delete boot
+ reports only belonging to its lab.
+
+ :param boot_doc: The document to delete.
+ :type boot_doc: dict
+ :return True or False.
+ """
+ valid_token = True
+ req_token = self.get_request_token()
+ token = self._find_token(req_token, self.db)
+
+ if token:
+ token = mtoken.Token.from_json(token)
+
+ # Just need to check if it is a lab token. A validation has already
+ # occurred makig sure is a valid DELETE one. This is the extra step.
+ if token.is_lab_token:
+ # This is only valid if the lab matches.
+ valid_token = False
+
+ lab_doc = utils.db.find_one(
+ self.db[models.LAB_COLLECTION],
+ [boot_doc[models.LAB_NAME_KEY]],
+ field=models.NAME_KEY
+ )
+
+ if lab_doc:
+ lab_doc = mlab.LabDocument.from_json(lab_doc)
+
+ if lab_doc.token == token.id:
+ valid_token = True
+
+ return valid_token
+
def _delete(self, spec_or_id):
- response = HandlerResponse(200)
+ response = hresponse.HandlerResponse(200)
response.result = None
- response.status_code = delete(self.collection, spec_or_id)
+ response.status_code = utils.db.delete(self.collection, spec_or_id)
response.reason = self._get_status_message(response.status_code)
return response
diff --git a/app/handlers/common.py b/app/handlers/common.py
index 7241649..f3fc7cd 100644
--- a/app/handlers/common.py
+++ b/app/handlers/common.py
@@ -13,161 +13,288 @@
"""Set of common functions for all handlers."""
+import pymongo
import types
-from bson import tz_util
+from bson import (
+ objectid,
+ tz_util,
+)
from datetime import (
date,
datetime,
time,
timedelta,
)
-from pymongo import (
- ASCENDING,
- DESCENDING,
-)
-from models import (
- ADMIN_KEY,
- AGGREGATE_KEY,
- ARCHITECTURE_KEY,
- BOARD_KEY,
- BOOT_COLLECTION,
- COLLECTION_KEY,
- CREATED_KEY,
- DATE_RANGE_KEY,
- DEFCONFIG_COLLECTION,
- DEFCONFIG_KEY,
- DELETE_KEY,
- DOCUMENT_ID_KEY,
- EMAIL_KEY,
- ERRORS_KEY,
- EXPIRED_KEY,
- EXPIRES_KEY,
- FIELD_KEY,
- GET_KEY,
- IP_ADDRESS_KEY,
- IP_RESTRICTED,
- JOB_COLLECTION,
- JOB_ID_KEY,
- JOB_KEY,
- KERNEL_KEY,
- LIMIT_KEY,
- METHOD_KEY,
- NOT_FIELD_KEY,
- OP_ID_KEY,
- POST_KEY,
- PRIVATE_KEY,
- PROPERTIES_KEY,
- QUERY_KEY,
- SKIP_KEY,
- SORT_KEY,
- SORT_ORDER_KEY,
- STATUS_KEY,
- SUPERUSER_KEY,
- TIME_KEY,
- TOKEN_KEY,
- USERNAME_KEY,
- WARNINGS_KEY,
-)
-from models.token import Token
-from utils import get_log
+import models
+import models.token as mtoken
+import utils
# Default value to calculate a date range in case the provided value is
# out of range.
DEFAULT_DATE_RANGE = 5
-LOG = get_log()
-
# All the available collections as key-value. The key is the same used for the
# URL configuration.
COLLECTIONS = {
- 'boot': BOOT_COLLECTION,
- 'defconfig': DEFCONFIG_COLLECTION,
- 'job': JOB_COLLECTION,
+ 'boot': models.BOOT_COLLECTION,
+ 'defconfig': models.DEFCONFIG_COLLECTION,
+ 'job': models.JOB_COLLECTION,
}
# Handlers valid keys.
BOOT_VALID_KEYS = {
- 'POST': [JOB_KEY, KERNEL_KEY],
+ 'POST': {
+ models.MANDATORY_KEYS: [
+ models.ARCHITECTURE_KEY,
+ models.BOARD_KEY,
+ models.DEFCONFIG_KEY,
+ models.JOB_KEY,
+ models.KERNEL_KEY,
+ models.LAB_NAME_KEY,
+ models.VERSION_KEY,
+ ],
+ models.ACCEPTED_KEYS: [
+ models.ARCHITECTURE_KEY,
+ models.BOARD_INSTANCE_KEY,
+ models.BOARD_KEY,
+ models.BOOT_LOAD_ADDR_KEY,
+ models.BOOT_LOG_HTML_KEY,
+ models.BOOT_LOG_KEY,
+ models.BOOT_RESULT_DESC_KEY,
+ models.BOOT_RESULT_KEY,
+ models.BOOT_RETRIES_KEY,
+ models.BOOT_TIME_KEY,
+ models.BOOT_WARNINGS_KEY,
+ models.DEFCONFIG_FULL_KEY,
+ models.DEFCONFIG_KEY,
+ models.DTB_ADDR_KEY,
+ models.DTB_APPEND_KEY,
+ models.DTB_KEY,
+ models.EMAIL_KEY,
+ models.ENDIANNESS_KEY,
+ models.FASTBOOT_CMD_KEY,
+ models.FASTBOOT_KEY,
+ models.FILE_SERVER_RESOURCE_KEY,
+ models.FILE_SERVER_URL_KEY,
+ models.GIT_BRANCH_KEY,
+ models.GIT_COMMIT_KEY,
+ models.GIT_DESCRIBE_KEY,
+ models.GIT_URL_KEY,
+ models.ID_KEY,
+ models.INITRD_ADDR_KEY,
+ models.INITRD_KEY,
+ models.JOB_KEY,
+ models.KERNEL_IMAGE_KEY,
+ models.KERNEL_KEY,
+ models.LAB_NAME_KEY,
+ models.NAME_KEY,
+ models.STATUS_KEY,
+ models.VERSION_KEY
+ ]
+ },
'GET': [
- CREATED_KEY, WARNINGS_KEY, JOB_ID_KEY, BOARD_KEY,
- JOB_KEY, KERNEL_KEY, DEFCONFIG_KEY, TIME_KEY, STATUS_KEY,
+ models.ARCHITECTURE_KEY,
+ models.BOARD_KEY,
+ models.CREATED_KEY,
+ models.DEFCONFIG_FULL_KEY,
+ models.DEFCONFIG_ID_KEY,
+ models.DEFCONFIG_KEY,
+ models.ENDIANNESS_KEY,
+ models.GIT_BRANCH_KEY,
+ models.GIT_COMMIT_KEY,
+ models.ID_KEY,
+ models.JOB_ID_KEY,
+ models.JOB_KEY,
+ models.KERNEL_KEY,
+ models.LAB_NAME_KEY,
+ models.NAME_KEY,
+ models.STATUS_KEY,
+ models.WARNINGS_KEY,
],
'DELETE': [
- JOB_KEY, KERNEL_KEY, DEFCONFIG_KEY, BOARD_KEY, JOB_ID_KEY
+ models.BOARD_KEY,
+ models.DEFCONFIG_FULL_KEY,
+ models.DEFCONFIG_ID_KEY,
+ models.DEFCONFIG_KEY,
+ models.ID_KEY,
+ models.JOB_ID_KEY,
+ models.JOB_KEY,
+ models.KERNEL_KEY,
+ models.NAME_KEY,
]
}
COUNT_VALID_KEYS = {
'GET': [
- ARCHITECTURE_KEY,
- BOARD_KEY,
- CREATED_KEY,
- DEFCONFIG_KEY,
- ERRORS_KEY,
- JOB_ID_KEY,
- JOB_KEY,
- KERNEL_KEY,
- PRIVATE_KEY,
- STATUS_KEY,
- TIME_KEY,
- WARNINGS_KEY,
+ models.ARCHITECTURE_KEY,
+ models.BOARD_KEY,
+ models.CREATED_KEY,
+ models.DEFCONFIG_FULL_KEY,
+ models.DEFCONFIG_ID_KEY,
+ models.DEFCONFIG_KEY,
+ models.ERRORS_KEY,
+ models.GIT_BRANCH_KEY,
+ models.GIT_COMMIT_KEY,
+ models.GIT_DESCRIBE_KEY,
+ models.ID_KEY,
+ models.JOB_ID_KEY,
+ models.JOB_KEY,
+ models.KERNEL_CONFIG_KEY,
+ models.KERNEL_IMAGE_KEY,
+ models.KERNEL_KEY,
+ models.MODULES_DIR_KEY,
+ models.MODULES_KEY,
+ models.NAME_KEY,
+ models.PRIVATE_KEY,
+ models.STATUS_KEY,
+ models.SYSTEM_MAP_KEY,
+ models.TEXT_OFFSET_KEY,
+ models.TIME_KEY,
+ models.WARNINGS_KEY,
],
}
DEFCONFIG_VALID_KEYS = {
'GET': [
- DEFCONFIG_KEY, WARNINGS_KEY, ERRORS_KEY, ARCHITECTURE_KEY,
- JOB_KEY, KERNEL_KEY, STATUS_KEY, JOB_ID_KEY, CREATED_KEY,
+ models.ARCHITECTURE_KEY,
+ models.BUILD_LOG_KEY,
+ models.CREATED_KEY,
+ models.DEFCONFIG_FULL_KEY,
+ models.DEFCONFIG_KEY,
+ models.DIRNAME_KEY,
+ models.ERRORS_KEY,
+ models.GIT_BRANCH_KEY,
+ models.GIT_COMMIT_KEY,
+ models.GIT_DESCRIBE_KEY,
+ models.ID_KEY,
+ models.JOB_ID_KEY,
+ models.JOB_KEY,
+ models.KCONFIG_FRAGMENTS_KEY,
+ models.KERNEL_CONFIG_KEY,
+ models.KERNEL_IMAGE_KEY,
+ models.KERNEL_KEY,
+ models.MODULES_DIR_KEY,
+ models.MODULES_KEY,
+ models.NAME_KEY,
+ models.STATUS_KEY,
+ models.SYSTEM_MAP_KEY,
+ models.TEXT_OFFSET_KEY,
+ models.WARNINGS_KEY,
],
}
TOKEN_VALID_KEYS = {
'POST': [
- ADMIN_KEY,
- DELETE_KEY,
- EMAIL_KEY,
- EXPIRES_KEY,
- GET_KEY,
- IP_ADDRESS_KEY,
- IP_RESTRICTED,
- POST_KEY,
- SUPERUSER_KEY,
- USERNAME_KEY,
+ models.ADMIN_KEY,
+ models.DELETE_KEY,
+ models.EMAIL_KEY,
+ models.EXPIRES_KEY,
+ models.GET_KEY,
+ models.IP_ADDRESS_KEY,
+ models.IP_RESTRICTED,
+ models.LAB_KEY,
+ models.NAME_KEY,
+ models.POST_KEY,
+ models.SUPERUSER_KEY,
+ models.USERNAME_KEY
],
'GET': [
- CREATED_KEY,
- EMAIL_KEY,
- EXPIRED_KEY,
- EXPIRES_KEY,
- IP_ADDRESS_KEY,
- PROPERTIES_KEY,
- TOKEN_KEY,
- USERNAME_KEY,
+ models.CREATED_KEY,
+ models.EMAIL_KEY,
+ models.EXPIRED_KEY,
+ models.EXPIRES_KEY,
+ models.ID_KEY,
+ models.IP_ADDRESS_KEY,
+ models.NAME_KEY,
+ models.PROPERTIES_KEY,
+ models.TOKEN_KEY,
+ models.USERNAME_KEY
],
}
SUBSCRIPTION_VALID_KEYS = {
- 'GET': [JOB_KEY],
- 'POST': [JOB_KEY, EMAIL_KEY],
- 'DELETE': [EMAIL_KEY],
+ 'GET': [
+ models.JOB_KEY
+ ],
+ 'POST': [
+ models.EMAIL_KEY,
+ models.JOB_KEY,
+ ],
+ 'DELETE': [
+ models.EMAIL_KEY
+ ],
}
JOB_VALID_KEYS = {
- 'POST': [JOB_KEY, KERNEL_KEY],
+ 'POST': [
+ models.JOB_KEY,
+ models.KERNEL_KEY
+ ],
'GET': [
- JOB_KEY, KERNEL_KEY, STATUS_KEY, PRIVATE_KEY, CREATED_KEY,
+ models.CREATED_KEY,
+ models.ID_KEY,
+ models.JOB_KEY,
+ models.KERNEL_KEY,
+ models.NAME_KEY,
+ models.PRIVATE_KEY,
+ models.STATUS_KEY,
],
}
BATCH_VALID_KEYS = {
"POST": [
- METHOD_KEY, COLLECTION_KEY, QUERY_KEY, OP_ID_KEY,
- DOCUMENT_ID_KEY
+ models.COLLECTION_KEY,
+ models.DOCUMENT_ID_KEY,
+ models.METHOD_KEY,
+ models.OP_ID_KEY,
+ models.QUERY_KEY,
]
}
+LAB_VALID_KEYS = {
+ "POST": {
+ models.MANDATORY_KEYS: [
+ models.CONTACT_KEY,
+ models.NAME_KEY,
+ ],
+ models.ACCEPTED_KEYS: [
+ models.ADDRESS_KEY,
+ models.CONTACT_KEY,
+ models.NAME_KEY,
+ models.PRIVATE_KEY,
+ models.TOKEN_KEY,
+ models.VERSION_KEY,
+ ]
+ },
+ "GET": [
+ models.ADDRESS_KEY,
+ models.CONTACT_KEY,
+ models.CREATED_KEY,
+ models.ID_KEY,
+ models.NAME_KEY,
+ models.PRIVATE_KEY,
+ models.TOKEN_KEY,
+ models.UPDATED_KEY,
+ ],
+ "DELETE": [
+ models.ADDRESS_KEY,
+ models.CONTACT_KEY,
+ models.ID_KEY,
+ models.NAME_KEY,
+ models.TOKEN_KEY,
+ ]
+}
+
+ID_KEYS = [
+ models.BOOT_ID_KEY,
+ models.DEFCONFIG_ID_KEY,
+ models.ID_KEY,
+ models.JOB_ID_KEY,
+ models.LAB_ID_KEY,
+]
+
MASTER_KEY = 'master_key'
API_TOKEN_HEADER = 'Authorization'
ACCEPTED_CONTENT_TYPE = 'application/json'
@@ -186,7 +313,9 @@ def get_all_query_values(query_args_func, valid_keys):
:type valid_keys: list
"""
spec = get_query_spec(query_args_func, valid_keys)
- spec = get_and_add_date_range(spec, query_args_func)
+
+ get_and_add_date_range(spec, query_args_func)
+ update_id_fields(spec)
sort = get_query_sort(query_args_func)
fields = get_query_fields(query_args_func)
@@ -196,6 +325,21 @@ def get_all_query_values(query_args_func, valid_keys):
return (spec, sort, fields, skip, limit, unique)
+def update_id_fields(spec):
+ """Make sure ID fields are treated correctly.
+
+ If we search for an ID field, either _id or like job_id, that references
+ a real _id in mongodb, we need to make sure they are treated as such.
+ mongodb stores them as ObjectId elements.
+
+ :param spec: The spec data structure with the parameters to check.
+ """
+ if spec:
+ common_keys = list(set(ID_KEYS) & set(spec.viewkeys()))
+ for key in common_keys:
+ spec[key] = objectid.ObjectId(spec[key])
+
+
def get_aggregate_value(query_args_func):
"""Get teh value of the aggregate key.
@@ -204,7 +348,7 @@ def get_aggregate_value(query_args_func):
:type query_args_func: function
:return The aggregate value as string.
"""
- aggregate = query_args_func(AGGREGATE_KEY)
+ aggregate = query_args_func(models.AGGREGATE_KEY)
if all([aggregate and isinstance(aggregate, types.ListType)]):
aggregate = aggregate[-1]
else:
@@ -276,7 +420,7 @@ def get_and_add_date_range(spec, query_args_func):
:type query_args_func: function
:return The passed `spec` updated.
"""
- date_range = query_args_func(DATE_RANGE_KEY)
+ date_range = query_args_func(models.DATE_RANGE_KEY)
if date_range:
# Today needs to be set at the end of the day!
today = datetime.combine(
@@ -284,7 +428,7 @@ def get_and_add_date_range(spec, query_args_func):
)
previous = calculate_date_range(date_range)
- spec[CREATED_KEY] = {'$gte': previous, '$lt': today}
+ spec[models.CREATED_KEY] = {'$gte': previous, '$lt': today}
return spec
@@ -307,7 +451,9 @@ def calculate_date_range(date_range):
try:
date_range = int(date_range)
except ValueError:
- LOG.error("Wrong value passed to date_range: %s", date_range)
+ utils.LOG.error(
+ "Wrong value passed to date_range: %s", date_range
+ )
date_range = DEFAULT_DATE_RANGE
date_range = abs(date_range)
@@ -335,7 +481,9 @@ def get_query_fields(query_args_func):
:return A `fields` data structure (list or dictionary).
"""
fields = None
- y_fields, n_fields = map(query_args_func, [FIELD_KEY, NOT_FIELD_KEY])
+ y_fields, n_fields = map(
+ query_args_func, [models.FIELD_KEY, models.NOT_FIELD_KEY]
+ )
if y_fields and not n_fields:
fields = list(set(y_fields))
@@ -360,21 +508,24 @@ def get_query_sort(query_args_func):
:return A `sort` data structure, or None.
"""
sort = None
- sort_fields, sort_order = map(query_args_func, [SORT_KEY, SORT_ORDER_KEY])
+ sort_fields, sort_order = map(
+ query_args_func, [models.SORT_KEY, models.SORT_ORDER_KEY]
+ )
if sort_fields:
if all([sort_order, isinstance(sort_order, types.ListType)]):
sort_order = int(sort_order[-1])
else:
- sort_order = DESCENDING
+ sort_order = pymongo.DESCENDING
# Wrong number for sort order? Force descending.
- if all([sort_order != ASCENDING, sort_order != DESCENDING]):
- LOG.warn(
+ if all([sort_order != pymongo.ASCENDING,
+ sort_order != pymongo.DESCENDING]):
+ utils.LOG.warn(
"Wrong sort order used (%d), default to %d",
- sort_order, DESCENDING
+ sort_order, pymongo.DESCENDING
)
- sort_order = DESCENDING
+ sort_order = pymongo.DESCENDING
sort = [
(field, sort_order)
@@ -392,7 +543,7 @@ def get_skip_and_limit(query_args_func):
:type query_args_func: function
:return A tuple with the `skip` and `limit` arguments.
"""
- skip, limit = map(query_args_func, [SKIP_KEY, LIMIT_KEY])
+ skip, limit = map(query_args_func, [models.SKIP_KEY, models.LIMIT_KEY])
if all([skip, isinstance(skip, types.ListType)]):
skip = int(skip[-1])
@@ -410,6 +561,9 @@ def get_skip_and_limit(query_args_func):
def valid_token_general(token, method):
"""Make sure the token can be used for an HTTP method.
+ For DELETE requests, if the token is a lab token, the request will be
+ refused. The lab token can be used only to delete boot reports.
+
:param token: The Token object to validate.
:param method: The HTTP verb this token is being validated for.
:return True or False.
@@ -420,7 +574,29 @@ def valid_token_general(token, method):
valid_token = True
elif method == "POST" and token.is_post_token:
valid_token = True
- elif method == "DELETE" and token.is_delete_token:
+ elif all([method == "DELETE", token.is_delete_token]):
+ if not token.is_lab_token:
+ valid_token = True
+
+ return valid_token
+
+
+def valid_token_bh(token, method):
+ """Make sure the token is a valid token for the `BootHandler`.
+
+ This is a special case to handle a lab token (token associeated with a lab)
+
+ :param token: The Token object to validate.
+ :param method: The HTTP verb this token is being validated for.
+ :return True or False.
+ """
+ valid_token = False
+
+ if all([method == "GET", token.is_get_token]):
+ valid_token = True
+ elif all([method == "POST", token.is_post_token]):
+ valid_token = True
+ elif all([method == "DELETE", token.is_delete_token]):
valid_token = True
return valid_token
@@ -440,7 +616,7 @@ def valid_token_th(token, method):
if token.is_admin:
valid_token = True
- elif token.is_superuser and method == "GET":
+ elif all([token.is_superuser, method == "GET"]):
valid_token = True
return valid_token
@@ -453,16 +629,16 @@ def validate_token(token_obj, method, remote_ip, validate_func):
:param method: The HTTP verb this token is being validated for.
:param remote_ip: The remote IP address sending the token.
:param validate_func: Function called to validate the token, must accept
- a Token object, the method string and kwargs.
+ a Token object and the method string.
:return True or False.
"""
valid_token = True
if token_obj:
- token = Token.from_json(token_obj)
+ token = mtoken.Token.from_json(token_obj)
- if not isinstance(token, Token):
- LOG.error("Retrieved token is not a Token object")
+ if not isinstance(token, mtoken.Token):
+ utils.LOG.error("Retrieved token is not a Token object")
valid_token = False
else:
valid_token &= validate_func(token, method)
@@ -483,14 +659,19 @@ def _valid_token_ip(token, remote_ip):
:param remote_ip: The remote IP address sending the token.
:return True or False.
"""
- valid_token = True
+ valid_token = False
- # TODO: what if we have a pool of IPs for the token?
- if token.ip_address != remote_ip:
- LOG.info(
- "IP restricted token from wrong IP address: %s",
- remote_ip
- )
- valid_token = False
+ if remote_ip:
+ remote_ip = mtoken.convert_ip_address(remote_ip)
+
+ if remote_ip in token.ip_address:
+ valid_token = True
+ else:
+ utils.LOG.warn(
+ "IP restricted token from wrong IP address: %s",
+ remote_ip
+ )
+ else:
+ utils.LOG.info("No remote IP address provided, cannot validate token")
return valid_token
diff --git a/app/handlers/count.py b/app/handlers/count.py
index 3bb0736..526af8a 100644
--- a/app/handlers/count.py
+++ b/app/handlers/count.py
@@ -21,6 +21,7 @@ from handlers.base import BaseHandler
from handlers.common import (
COLLECTIONS,
COUNT_VALID_KEYS,
+ update_id_fields,
get_and_add_date_range,
get_query_spec,
)
@@ -103,7 +104,8 @@ def count_one_collection(
"""
result = []
spec = get_query_spec(query_args_func, valid_keys)
- spec = get_and_add_date_range(spec, query_args_func)
+ get_and_add_date_range(spec, query_args_func)
+ update_id_fields(spec)
if spec:
_, number = find_and_count(
@@ -142,7 +144,8 @@ def count_all_collections(database, query_args_func, valid_keys):
result = []
spec = get_query_spec(query_args_func, valid_keys)
- spec = get_and_add_date_range(spec, query_args_func)
+ get_and_add_date_range(spec, query_args_func)
+ update_id_fields(spec)
if spec:
for key, val in COLLECTIONS.iteritems():
diff --git a/app/handlers/dbindexes.py b/app/handlers/dbindexes.py
index 8e87487..29512bf 100644
--- a/app/handlers/dbindexes.py
+++ b/app/handlers/dbindexes.py
@@ -15,21 +15,9 @@
"""Make sure indexes are created at startup."""
-from pymongo import (
- ASCENDING,
- DESCENDING,
-)
-
-from models import (
- CREATED_KEY,
- DB_NAME,
- STATUS_KEY
-)
-from models import (
- BOOT_COLLECTION,
- DEFCONFIG_COLLECTION,
- JOB_COLLECTION,
-)
+import pymongo
+
+import models
def ensure_indexes(client, db_options):
@@ -44,7 +32,7 @@ def ensure_indexes(client, db_options):
db_user = db_options["dbuser"]
db_pwd = db_options["dbpassword"]
- database = client[DB_NAME]
+ database = client[models.DB_NAME]
if all([db_user, db_pwd]):
database.authenticate(db_user, password=db_pwd)
@@ -58,8 +46,8 @@ def _ensure_job_indexes(database):
:param database: The database connection.
"""
- database[JOB_COLLECTION].ensure_index(
- [(CREATED_KEY, DESCENDING)], background=True
+ database[models.JOB_COLLECTION].ensure_index(
+ [(models.CREATED_KEY, pymongo.DESCENDING)], background=True
)
@@ -68,8 +56,8 @@ def _ensure_boot_indexes(database):
:param database: The database connection.
"""
- database[BOOT_COLLECTION].ensure_index(
- [(CREATED_KEY, DESCENDING)], background=True
+ database[models.BOOT_COLLECTION].ensure_index(
+ [(models.CREATED_KEY, pymongo.DESCENDING)], background=True
)
@@ -78,7 +66,21 @@ def _ensure_defconfig_indexes(database):
:param database: The database connection.
"""
- collection = database[DEFCONFIG_COLLECTION]
+ collection = database[models.DEFCONFIG_COLLECTION]
+
+ collection.ensure_index(
+ [(models.CREATED_KEY, pymongo.DESCENDING)], background=True)
+ collection.ensure_index(
+ [(models.STATUS_KEY, pymongo.ASCENDING)], background=True)
+
+
+def _ensure_token_indexes(database):
+ """Ensure indexes exists for the 'token' collection.
+
+ :param database: The database connection.
+ """
+ collection = database[models.TOKEN_COLLECTION]
- collection.ensure_index([(CREATED_KEY, DESCENDING)], background=True)
- collection.ensure_index([(STATUS_KEY, ASCENDING)], background=True)
+ collection.ensure_index(
+ [(models.TOKEN_KEY, pymongo.DESCENDING)], background=True
+ )
diff --git a/app/handlers/decorators.py b/app/handlers/decorators.py
deleted file mode 100644
index 40014aa..0000000
--- a/app/handlers/decorators.py
+++ /dev/null
@@ -1,209 +0,0 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-"""Decorators used by handler methods."""
-
-from functools import wraps
-
-from models.token import (
- TOKEN_COLLECTION,
- Token,
-)
-from utils.db import find_one
-
-# TODO: move this into __init__
-API_TOKEN_HEADER = 'Authorization'
-
-
-def protected(method):
- """Protects an HTTP method with token based auth/authz.
-
- :param method: The HTTP verb to protect.
- """
-
- def decorator_wrap(function):
- @wraps(function)
- def _function_wrapper(obj, *args, **kwargs):
- token = obj.request.headers.get(API_TOKEN_HEADER, None)
-
- if token:
- token_obj = _find_token(token, obj)
-
- if token_obj and _validate_token(
- token_obj, obj, method, _valid_token_general):
- return function(obj, *args, **kwargs)
-
- obj.log.info(
- "Token not authorized for IP address %s - Token: %s",
- obj.request.remote_ip, token
- )
- obj.send_error(403)
-
- return _function_wrapper
-
- return decorator_wrap
-
-
-def protected_th(method):
- """Protect HTTP method with token auth/authz for the token RequestHandler.
-
- :param method: The HTTP verb to protect.
- """
-
- def decorator_wrap(function):
- @wraps(function)
- def _function_wrapper(obj, *args, **kwargs):
- token = obj.request.headers.get(API_TOKEN_HEADER, None)
-
- if token:
- if _is_master_key(token, obj):
- obj.log.info(
- "Master key in use from IP address %s",
- obj.request.remote_ip
- )
- return function(obj, *args, **kwargs)
- else:
- token_obj = _find_token(token, obj)
-
- if token_obj and _validate_token(
- token_obj, obj, method, _valid_token_th):
- return function(obj, *args, **kwargs)
-
- obj.log.info(
- "Token not authorized nor a master key for IP "
- "address %s - Token: %s", obj.request.remote_ip, token
- )
- obj.send_error(403)
-
- return _function_wrapper
-
- return decorator_wrap
-
-
-def _validate_token(token, obj, method, validate_func):
- """Make sure the passed token is valid.
-
- :param token: The Token object to validate.
- :param obj: The RequestHandler object of this request.
- :param method: The HTTP verb this token is being validated for.
- :param validate_func: Function called to validate the token, must accept
- a Token object and the method string.
- :return True or False.
- """
- valid_token = True
-
- if not isinstance(token, Token):
- obj.log.error("Retrieved token is not a Token object")
- valid_token = False
- elif token.is_ip_restricted and not _valid_token_ip(token, obj):
- valid_token = False
-
- valid_token &= validate_func(token, method)
-
- return valid_token
-
-
-def _valid_token_th(token, method):
- """Make sure a token is a valid token for the `TokenHandler`.
-
- A valid `TokenHandler` token is an admin token, or a superuser token
- for GET operations.
-
- :param token: The Token object to validate.
- :param method: The HTTP verb to validate.
- :return True or False.
- """
- valid_token = False
-
- if token.is_admin:
- valid_token = True
- elif token.is_superuser and method == "GET":
- valid_token = True
-
- return valid_token
-
-
-def _valid_token_general(token, method):
- """Make sure the token can be used for an HTTP method.
-
- :param token: The Token object to validate.
- :param method: The HTTP verb this token is being validated for.
- :return True or False.
- """
- valid_token = False
-
- if method == "GET" and token.is_get_token:
- valid_token = True
- elif method == "POST" and token.is_post_token:
- valid_token = True
- elif method == "DELETE" and token.is_delete_token:
- valid_token = True
-
- return valid_token
-
-
-def _valid_token_ip(token, obj):
- """Make sure the token comes from the designated IP addresses.
-
- :param token: The Token object to validate.
- :param obj: The RequestHandler object of this request.
- :return True or False.
- """
- valid_token = True
-
- # TODO: what if we have a pool of IPs for the token?
- if token.ip_address != obj.request.remote_ip:
- obj.log.info(
- "IP restricted token from wrong IP address: %s",
- obj.request.remote_ip
- )
- valid_token = False
-
- return valid_token
-
-
-def _is_master_key(token, obj):
- """Is the token a master key?
-
- :param token: The token to check.
- :param obj: The RequestHandler object as passed by the decorator.
- :return True or False.
- """
- is_valid = False
-
- obj.log.debug(
- "Checking master key from IP address %s", obj.request.remote_ip
- )
-
- if obj.settings['master_key'] == token:
- is_valid = True
-
- return is_valid
-
-
-def _find_token(token, obj):
- """Find a token in the database.
-
- :param token: The token to find.
- :param obj: The RequestHandler object as passed by the decorator.
- :return A Token object.
- """
- token_found = None
- result = find_one(obj.db[TOKEN_COLLECTION], [token], field='token')
-
- if result:
- token_found = Token.from_json(result)
-
- return token_found
diff --git a/app/handlers/job.py b/app/handlers/job.py
index da13980..e7d6120 100644
--- a/app/handlers/job.py
+++ b/app/handlers/job.py
@@ -15,23 +15,18 @@
"""The RequestHandler for /job URLs."""
-from handlers.base import BaseHandler
-from handlers.common import JOB_VALID_KEYS
-from handlers.response import HandlerResponse
-from models import (
- DEFCONFIG_COLLECTION,
- JOB_COLLECTION,
- JOB_ID_KEY,
- SUBSCRIPTION_COLLECTION,
-)
-from utils.db import (
- delete,
- find_one,
-)
+import bson
+
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import utils.db
+
from taskqueue.tasks import import_job
-class JobHandler(BaseHandler):
+class JobHandler(hbase.BaseHandler):
"""Handle the /job URLs."""
def __init__(self, application, request, **kwargs):
@@ -39,14 +34,14 @@ class JobHandler(BaseHandler):
@property
def collection(self):
- return self.db[JOB_COLLECTION]
+ return self.db[models.JOB_COLLECTION]
@staticmethod
def _valid_keys(method):
- return JOB_VALID_KEYS.get(method, None)
+ return hcommon.JOB_VALID_KEYS.get(method, None)
def _post(self, *args, **kwargs):
- response = HandlerResponse(202)
+ response = hresponse.HandlerResponse(202)
response.reason = "Request accepted and being imported"
response.result = None
@@ -67,25 +62,24 @@ class JobHandler(BaseHandler):
"""
# TODO: maybe look into two-phase commits in mongodb
# http://docs.mongodb.org/manual/tutorial/perform-two-phase-commits/
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
response.result = None
- if find_one(self.collection, job_id):
- delete(
- self.db[DEFCONFIG_COLLECTION],
- {JOB_ID_KEY: {'$in': [job_id]}}
- )
-
- delete(
- self.db[SUBSCRIPTION_COLLECTION],
- {JOB_ID_KEY: {'$in': [job_id]}}
- )
-
- response.status_code = delete(self.collection, job_id)
- if response.status_code == 200:
- response.reason = "Resource '%s' deleted" % job_id
- else:
- response.status_code = 404
- response.reason = self._get_status_message(404)
+ try:
+ job_obj = bson.objectid.ObjectId(job_id)
+ if utils.db.find_one(self.collection, [job_obj]):
+ utils.db.delete(
+ self.db[models.DEFCONFIG_COLLECTION],
+ {models.JOB_ID_KEY: {'$in': [job_obj]}}
+ )
+
+ response.status_code = utils.db.delete(self.collection, job_obj)
+ if response.status_code == 200:
+ response.reason = "Resource '%s' deleted" % job_id
+ else:
+ response.status_code = 404
+ response.reason = self._get_status_message(404)
+ except bson.errors.InvalidId, ex:
+ raise ex
return response
diff --git a/app/handlers/lab.py b/app/handlers/lab.py
new file mode 100644
index 0000000..6282045
--- /dev/null
+++ b/app/handlers/lab.py
@@ -0,0 +1,361 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Handler for the /lab URLs."""
+
+from urlparse import urlunparse
+
+import datetime
+import bson
+
+import handlers.base
+import handlers.common
+import handlers.response as hresponse
+import models
+import models.lab as mlab
+import models.token as mtoken
+import utils.validator as validator
+import utils.db
+
+
+# pylint: disable=too-many-public-methods
+class LabHandler(handlers.base.BaseHandler):
+ """Handle all traffic through the /lab URLs."""
+
+ def __init__(self, application, request, **kwargs):
+ super(LabHandler, self).__init__(application, request, **kwargs)
+
+ @property
+ def collection(self):
+ return self.db[models.LAB_COLLECTION]
+
+ @staticmethod
+ def _valid_keys(method):
+ return handlers.common.LAB_VALID_KEYS.get(method, None)
+
+ @staticmethod
+ def _token_validation_func():
+ return handlers.common.valid_token_th
+
+ def _post(self, *args, **kwargs):
+ response = hresponse.HandlerResponse(201)
+
+ json_obj = kwargs["json_obj"]
+
+ valid_contact, reason = validator.is_valid_lab_contact_data(json_obj)
+ if valid_contact:
+ lab_id = kwargs.get("id", None)
+ status_code, reason, result, headers = self._create_or_update(
+ json_obj, lab_id)
+
+ response.status_code = status_code
+ response.result = result
+ if reason:
+ if kwargs["reason"]:
+ reason += "\n" + kwargs["reason"]
+ response.reason = reason
+ if headers:
+ response.headers = headers
+ else:
+ response.status_code = 400
+ if reason:
+ if kwargs["reason"]:
+ reason += "\n" + kwargs["reason"]
+ response.reason = reason
+
+ return response
+
+ def _create_or_update(self, json_obj, lab_id):
+ """Create or update a new lab object.
+
+ If the request comes in with a specified lab name, it will be treated
+ as an update request.
+
+ :param json_obj: The JSON data as sent in the request.
+ :type json_obj: dict
+ :param lab_id: The ID part of the request.
+ :type lab_id: str
+ :return A tuple with: status code, reason, result and headers.
+ """
+ status_code = None
+ reason = None
+ result = None
+ headers = None
+ old_lab = None
+ name = json_obj.get(models.NAME_KEY)
+
+ if lab_id:
+ try:
+ old_lab = utils.db.find_one(
+ self.collection,
+ [bson.objectid.ObjectId(lab_id)]
+ )
+ except bson.errors.InvalidId, ex:
+ self.log.exception(ex)
+ self.log.error("Wrong ID value '%s' passed as doc ID", lab_id)
+ reason = "Wrong ID value provided"
+ else:
+ old_lab = utils.db.find_one(
+ self.collection, [name], field=models.NAME_KEY
+ )
+
+ if all([lab_id, old_lab]):
+ self.log.info(
+ "Updating lab with ID '%s' from IP address %s",
+ old_lab.get(models.ID_KEY), self.request.remote_ip
+ )
+ status_code, reason, result, headers = self._update_lab(
+ json_obj, old_lab
+ )
+ elif all([lab_id, not old_lab]):
+ status_code = 404
+ reason = "Lab with name '%s' not found" % lab_id
+ elif all([not old_lab, not lab_id]):
+ self.log.info("Creating new lab object")
+ status_code, reason, result, headers = self._create_new_lab(
+ json_obj)
+ else:
+ status_code = 400
+ if not reason:
+ reason = (
+ "Lab with name '%s' already exists: did you mean to "
+ "update it?" % name
+ )
+
+ return status_code, reason, result, headers
+
+ def _update_lab(self, json_obj, old_lab):
+ """Update an existing lab object.
+
+ :param json_obj: The JSON object with the lab data.
+ :type json_obj: dict
+ :param old_lab: The JSON object of the lab from the db.
+ :type old_lab: dict
+ :return A tuple with: status code, reason, result and headers.
+ """
+ status_code = None
+ reason = None
+ result = None
+ headers = None
+
+ # Locally used to store the contact information from the new lab object.
+ new_contact = None
+
+ old_lab = mlab.LabDocument.from_json(old_lab)
+ new_lab = mlab.LabDocument.from_json(json_obj)
+
+ if new_lab.name:
+ if old_lab.name != new_lab.name:
+ # The is no setter for the name field in the Lab model.
+ old_lab._name = new_lab.name
+
+ if new_lab.contact:
+ if old_lab.contact != new_lab.contact:
+ old_lab.contact = new_lab.contact
+ new_contact = new_lab.contact
+
+ if new_lab.token:
+ self._update_lab_token(old_lab, new_lab, new_contact)
+
+ if new_lab.address:
+ if old_lab.address != new_lab.address:
+ old_lab.address = new_lab.address
+
+ if old_lab.private != new_lab.private:
+ old_lab.private = new_lab.private
+
+ old_lab.updated_on = datetime.datetime.now(tz=bson.tz_util.utc)
+
+ status_code, _ = utils.db.save(self.db, old_lab)
+ if status_code != 201:
+ reason = "Error updating lab '%s'" % old_lab.name
+ else:
+ reason = "Lab '%s' updated" % old_lab.name
+ status_code = 200
+
+ return status_code, reason, result, headers
+
+ def _update_lab_token(self, old_lab, new_lab, new_contact):
+ """Update references of lab token.
+
+ :param old_lab: The lab object as found in the database.
+ :type old_lab: LabDocument
+ :param new_lab: The new lab object as passed by the user.
+ :type new_lab: LabDocument
+ :param new_contact: The contact information as found in the new lab
+ document.
+ :type new_contact: dict
+ """
+ # If the user specifies a new token, it will be doing so using the
+ # actual token value, not its ID. We need to make sure we still
+ # have the old token as defined in the old lab document, find the
+ # new token and update accordingly using the token ID.
+ old_token = utils.db.find_one(
+ self.db[models.TOKEN_COLLECTION],
+ [old_lab.token], field=models.TOKEN_KEY
+ )
+ new_token = utils.db.find_one(
+ self.db[models.TOKEN_COLLECTION],
+ [new_lab.token], field=models.TOKEN_KEY
+ )
+
+ if old_token:
+ old_token = mtoken.Token.from_json(old_token)
+ if new_token:
+ new_token = mtoken.Token.from_json(new_token)
+
+ if all([old_token, new_token]):
+ # Both old and new tokens?
+ # Expire the old one and save it.
+ if old_token.token != new_token.token:
+ old_lab.token = new_token.id
+
+ old_token.expired = True
+ ret_code, _ = utils.db.save(self.db, old_token)
+ if ret_code != 201:
+ self.log.warn("Error expiring old token '%s'", old_token.id)
+
+ if all([old_token, not new_token, new_contact]):
+ # Just the old token?
+ # Make sure its contact information are correct and save it.
+ old_token.username = (
+ new_contact[models.NAME_KEY] +
+ " " +
+ new_contact[models.SURNAME_KEY]
+ )
+ old_token.email = new_contact[models.EMAIL_KEY]
+ ret_code, _ = utils.db.save(self.db, old_token)
+ if ret_code != 201:
+ self.log.warn("Error updating old token '%s'", old_token.id)
+
+ def _create_new_lab(self, json_obj):
+ """Create a new lab in the database.
+
+ :param json_obj: The JSON object with the lab data.
+ :type json_obj: dict
+ :return A tuple with: status code, reason, result and headers.
+ """
+ token_id = None
+ ret_val = None
+ reason = "New lab created"
+ result = None
+ headers = None
+
+ lab_doc = mlab.LabDocument.from_json(json_obj)
+ lab_doc.created_on = datetime.datetime.now(tz=bson.tz_util.utc)
+
+ if lab_doc.token:
+ token_json = utils.db.find_one(
+ self.db[models.TOKEN_COLLECTION],
+ [lab_doc.token],
+ field=models.TOKEN_KEY
+ )
+ if token_json:
+ token = mtoken.Token.from_json(token_json)
+ token_id = token.id
+ ret_val = 200
+ else:
+ ret_val = 500
+ else:
+ token = mtoken.Token()
+ token.email = lab_doc.contact[models.EMAIL_KEY]
+ token.username = (
+ lab_doc.contact[models.NAME_KEY] +
+ " " +
+ lab_doc.contact[models.SURNAME_KEY]
+ )
+ token.is_post_token = True
+ token.is_delete_token = True
+ token.is_lab_token = True
+ ret_val, token_id = utils.db.save(self.db, token, manipulate=True)
+
+ if ret_val == 201 or ret_val == 200:
+ lab_doc.token = token_id
+ ret_val, lab_id = utils.db.save(self.db, lab_doc, manipulate=True)
+ if ret_val == 201:
+ result = {
+ models.ID_KEY: lab_id,
+ models.NAME_KEY: lab_doc.name,
+ models.TOKEN_KEY: token.token
+ }
+ location = urlunparse(
+ (
+ 'http',
+ self.request.headers.get('Host'),
+ self.request.uri + '/' + lab_doc.name,
+ '', '', ''
+ )
+ )
+ headers = {'Location': location}
+ else:
+ reason = "Error saving new lab '%s'" % lab_doc.name
+ else:
+ reason = (
+ "Error saving or finding the token for lab '%s'" % lab_doc.name
+ )
+
+ return (ret_val, reason, result, headers)
+
+ def execute_delete(self, *args, **kwargs):
+ # TODO: need to expire or delete token as well.
+ response = None
+
+ if self.validate_req_token("DELETE"):
+ if kwargs and kwargs.get('id', None):
+ lab_id = kwargs['id']
+ try:
+ lab_id = bson.objectid.ObjectId(lab_id)
+ if utils.db.find_one(self.collection, [lab_id]):
+ response = self._delete(lab_id)
+ if response.status_code == 200:
+ response.reason = "Resource '%s' deleted" % lab_id
+ else:
+ response = hresponse.HandlerResponse(404)
+ response.reason = "Resource '%s' not found" % lab_id
+ except bson.errors.InvalidId, ex:
+ self.log.exception(ex)
+ self.log.error(
+ "Wrong ID value '%s' passed as doc ID", lab_id
+ )
+ response = hresponse.HandlerResponse(400)
+ response.reason = "Wrong ID value provided"
+ else:
+ spec = handlers.common.get_query_spec(
+ self.get_query_arguments, self._valid_keys("DELETE")
+ )
+ if spec:
+ response = self._delete(spec)
+ if response.status_code == 200:
+ response.reason = (
+ "Resources identified with '%s' deleted" % spec
+ )
+ else:
+ response = hresponse.HandlerResponse(400)
+ response.result = None
+ response.reason = (
+ "No valid data provided to execute a DELETE"
+ )
+ else:
+ response = hresponse.HandlerResponse(403)
+ response.reason = handlers.common.NOT_VALID_TOKEN
+
+ return response
+
+ def _delete(self, spec_or_id):
+ response = hresponse.HandlerResponse(200)
+ response.result = None
+
+ response.status_code = utils.db.delete(self.collection, spec_or_id)
+ response.reason = self._get_status_message(response.status_code)
+
+ return response
diff --git a/app/handlers/response.py b/app/handlers/response.py
index ae21426..653f815 100644
--- a/app/handlers/response.py
+++ b/app/handlers/response.py
@@ -149,7 +149,7 @@ class HandlerResponse(object):
# The pymongo cursor is an iterable.
if not isinstance(value, (ListType, Cursor)):
value = [value]
- if isinstance(value, Cursor):
+ elif isinstance(value, Cursor):
value = [r for r in value]
self._result = value
diff --git a/app/handlers/subscription.py b/app/handlers/subscription.py
index 7e587e3..a6bd578 100644
--- a/app/handlers/subscription.py
+++ b/app/handlers/subscription.py
@@ -15,7 +15,10 @@
"""The RequetHandler for /subscription URLs."""
-import json
+try:
+ import simplejson as json
+except ImportError:
+ import json
from handlers.base import BaseHandler
from handlers.response import HandlerResponse
@@ -65,16 +68,20 @@ class SubscriptionHandler(BaseHandler):
try:
json_obj = json.loads(self.request.body.decode('utf8'))
- if is_valid_json(json_obj, self._valid_keys('DELETE')):
+ valid, reason = is_valid_json(
+ json_obj, self._valid_keys('DELETE')
+ )
+
+ if valid:
response.status_code = unsubscribe(
self.collection, doc_id, json_obj[EMAIL_KEY]
)
+ response.reason = self._get_status_message(
+ response.status_code
+ )
else:
response.status_code = 400
-
- response.reason = self._get_status_message(
- response.status_code
- )
+ response.reason = reason
except ValueError:
response.status_code = 420
response.reason = "No JSON data found in the DELETE request"
diff --git a/app/handlers/tests/test_batch_handler.py b/app/handlers/tests/test_batch_handler.py
index ef162ae..1b4e67e 100644
--- a/app/handlers/tests/test_batch_handler.py
+++ b/app/handlers/tests/test_batch_handler.py
@@ -13,35 +13,32 @@
"""Test module for the BatchHandler handler."""
+import concurrent.futures
import json
+import mock
import mongomock
+import tornado
+import tornado.testing
-from concurrent.futures import ThreadPoolExecutor
-from mock import patch
-from tornado import (
- ioloop,
- testing,
- web,
-)
-
-from handlers.app import AppHandler
-from urls import _BATCH_URL
+import handlers.app
+import urls
# Default Content-Type header returned by Tornado.
DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
-class TestBatchHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
+class TestBatchHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
def setUp(self):
self.mongodb_client = mongomock.Connection()
super(TestBatchHandler, self).setUp()
- patched_find_token = patch("handlers.base.BaseHandler._find_token")
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
self.find_token = patched_find_token.start()
self.find_token.return_value = "token"
- patched_validate_token = patch("handlers.base.validate_token")
+ patched_validate_token = mock.patch("handlers.common.validate_token")
self.validate_token = patched_validate_token.start()
self.validate_token.return_value = True
@@ -57,15 +54,15 @@ class TestBatchHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
settings = {
'dboptions': dboptions,
'client': self.mongodb_client,
- 'executor': ThreadPoolExecutor(max_workers=2),
- 'default_handler_class': AppHandler,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
'debug': False
}
- return web.Application([_BATCH_URL], **settings)
+ return tornado.web.Application([urls._BATCH_URL], **settings)
def get_new_ioloop(self):
- return ioloop.IOLoop.instance()
+ return tornado.ioloop.IOLoop.instance()
def test_delete_no_token(self):
response = self.fetch('/batch', method='DELETE')
@@ -152,7 +149,7 @@ class TestBatchHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- @patch("handlers.batch.run_batch_group")
+ @mock.patch("taskqueue.tasks.run_batch_group")
def test_post_correct(self, mocked_run_batch):
headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
batch_dict = {
diff --git a/app/handlers/tests/test_bisect_handler.py b/app/handlers/tests/test_bisect_handler.py
index 934f1ed..a8fd028 100644
--- a/app/handlers/tests/test_bisect_handler.py
+++ b/app/handlers/tests/test_bisect_handler.py
@@ -13,46 +13,43 @@
"""Test module for the BisectHandler handler."""
+import concurrent.futures
+import mock
import mongomock
+import tornado
+import tornado.testing
-from concurrent.futures import ThreadPoolExecutor
-from mock import patch, MagicMock
-from tornado import (
- ioloop,
- testing,
- web,
-)
-
-from handlers.app import AppHandler
-from urls import _BISECT_URL
+import handlers.app
+import urls
# Default Content-Type header returned by Tornado.
DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
-class TestBisectHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
+class TestBisectHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
def setUp(self):
self.mongodb_client = mongomock.Connection()
super(TestBisectHandler, self).setUp()
- self.task_return_value = MagicMock()
- self.task_ready = MagicMock()
+ self.task_return_value = mock.MagicMock()
+ self.task_ready = mock.MagicMock()
self.task_ready.return_value = True
self.task_return_value.ready = self.task_ready
- self.task_return_value.get = MagicMock()
+ self.task_return_value.get = mock.MagicMock()
self.task_return_value.get.return_value = 200, []
- patched_boot_bisect_func = patch("handlers.bisect.boot_bisect")
+ patched_boot_bisect_func = mock.patch("taskqueue.tasks.boot_bisect")
self.boot_bisect = patched_boot_bisect_func.start()
- self.boot_bisect.apply_async = MagicMock()
+ self.boot_bisect.apply_async = mock.MagicMock()
self.boot_bisect.apply_async.return_value = self.task_return_value
- patched_find_token = patch("handlers.base.BaseHandler._find_token")
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
self.find_token = patched_find_token.start()
self.find_token.return_value = "token"
- patched_validate_token = patch("handlers.base.validate_token")
+ patched_validate_token = mock.patch("handlers.common.validate_token")
self.validate_token = patched_validate_token.start()
self.validate_token.return_value = True
@@ -69,15 +66,15 @@ class TestBisectHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
settings = {
'dboptions': dboptions,
'client': self.mongodb_client,
- 'executor': ThreadPoolExecutor(max_workers=2),
- 'default_handler_class': AppHandler,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
'debug': False
}
- return web.Application([_BISECT_URL], **settings)
+ return tornado.web.Application([urls._BISECT_URL], **settings)
def get_new_ioloop(self):
- return ioloop.IOLoop.instance()
+ return tornado.ioloop.IOLoop.instance()
def test_bisect_wrong_collection(self):
headers = {'Authorization': 'foo'}
@@ -97,7 +94,9 @@ class TestBisectHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response = self.fetch('/bisect/boot/', headers=headers)
self.assertEqual(response.code, 400)
- def test_boot_bisect_no_id(self):
+ @mock.patch("bson.objectid.ObjectId")
+ def test_boot_bisect_no_id(self, mock_id):
+ mock_id.return_value = "foo"
headers = {'Authorization': 'foo'}
self.task_return_value.get.return_value = 404, []
@@ -113,8 +112,10 @@ class TestBisectHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response = self.fetch('/bisect/boot/foo', headers=headers)
self.assertEqual(response.code, 400)
- @patch("handlers.bisect.find_one")
- def test_boot_bisect_with_result(self, mocked_find):
+ @mock.patch("bson.objectid.ObjectId")
+ @mock.patch("utils.db.find_one")
+ def test_boot_bisect_with_result(self, mocked_find, mock_id):
+ mock_id.return_value = "foo"
headers = {'Authorization': 'foo'}
mocked_find.return_value = [{"foo": "bar"}]
diff --git a/app/handlers/tests/test_boot_handler.py b/app/handlers/tests/test_boot_handler.py
index b9257eb..940bb8a 100644
--- a/app/handlers/tests/test_boot_handler.py
+++ b/app/handlers/tests/test_boot_handler.py
@@ -13,35 +13,37 @@
"""Test module for the BootHandler handler."""
-import mongomock
+try:
+ import simplejson as json
+except ImportError:
+ import json
-from concurrent.futures import ThreadPoolExecutor
-from mock import patch
-from tornado import (
- ioloop,
- testing,
- web,
-)
+import concurrent.futures
+import mock
+import mongomock
+import tornado
+import tornado.testing
-from handlers.app import AppHandler
-from urls import _BOOT_URL
+import handlers.app
+import urls
# Default Content-Type header returned by Tornado.
DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
-class TestBootHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
+class TestBootHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
def setUp(self):
self.mongodb_client = mongomock.Connection()
super(TestBootHandler, self).setUp()
- patched_find_token = patch("handlers.base.BaseHandler._find_token")
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
self.find_token = patched_find_token.start()
self.find_token.return_value = "token"
- patched_validate_token = patch("handlers.base.validate_token")
+ patched_validate_token = mock.patch("handlers.common.validate_token")
self.validate_token = patched_validate_token.start()
self.validate_token.return_value = True
@@ -57,15 +59,15 @@ class TestBootHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
settings = {
'dboptions': dboptions,
'client': self.mongodb_client,
- 'executor': ThreadPoolExecutor(max_workers=2),
- 'default_handler_class': AppHandler,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
'debug': False
}
- return web.Application([_BOOT_URL], **settings)
+ return tornado.web.Application([urls._BOOT_URL], **settings)
def get_new_ioloop(self):
- return ioloop.IOLoop.instance()
+ return tornado.ioloop.IOLoop.instance()
def test_delete_no_token(self):
self.find_token.return_value = None
@@ -73,7 +75,9 @@ class TestBootHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response = self.fetch('/boot/board', method='DELETE')
self.assertEqual(response.code, 403)
- def test_delete_with_token_no_job(self):
+ @mock.patch("bson.objectid.ObjectId")
+ def test_delete_with_token_no_job(self, mock_id):
+ mock_id.return_value = "boot"
headers = {'Authorization': 'foo'}
response = self.fetch(
@@ -84,7 +88,11 @@ class TestBootHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- def test_delete_with_token_with_boot(self):
+ @mock.patch("handlers.boot.BootHandler._valid_boot_delete_token")
+ @mock.patch("bson.objectid.ObjectId")
+ def test_delete_with_token_with_boot(self, mock_id, valid_delete):
+ valid_delete.return_value = True
+ mock_id.return_value = "boot"
db = self.mongodb_client['kernel-ci']
db['boot'].insert(dict(_id='boot', job='job', kernel='kernel'))
@@ -98,6 +106,24 @@ class TestBootHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ @mock.patch("handlers.boot.BootHandler._valid_boot_delete_token")
+ @mock.patch("bson.objectid.ObjectId")
+ def test_delete_with_non_lab_token_with_boot(self, mock_id, valid_delete):
+ valid_delete.return_value = False
+ mock_id.return_value = "boot"
+ db = self.mongodb_client['kernel-ci']
+ db['boot'].insert(dict(_id='boot', job='job', kernel='kernel'))
+
+ headers = {'Authorization': 'foo'}
+
+ response = self.fetch(
+ '/boot/boot', method='DELETE', headers=headers,
+ )
+
+ self.assertEqual(response.code, 403)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
def test_delete_no_id_no_spec(self):
headers = {'Authorization': 'foo'}
@@ -109,6 +135,17 @@ class TestBootHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ def test_delete_with_bogus_objectid(self):
+ headers = {'Authorization': 'foo'}
+
+ response = self.fetch(
+ '/boot/!@#$!#$foo', method='DELETE', headers=headers,
+ )
+
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
def test_delete_wrong_spec(self):
headers = {'Authorization': 'foo'}
@@ -120,3 +157,104 @@ class TestBootHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(response.code, 400)
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_post_wrong_content(self):
+ body = {
+ "foo": "bar"
+ }
+
+ headers = {
+ 'Authorization': 'foo',
+ 'Content-Type': 'application/json',
+ }
+
+ response = self.fetch(
+ "/boot", method="POST", body=json.dumps(body), headers=headers
+ )
+
+ self.assertEqual(response.code, 400)
+
+ @mock.patch("taskqueue.tasks.import_boot")
+ @mock.patch("utils.db.find_one")
+ def test_post_valid_content_same_token(self, find_one, import_boot):
+ find_one.side_effect = [
+ {"token": "foo"}, {"token": "foo", "expired": False}
+ ]
+ body = {
+ "version": "1.0",
+ "board": "board",
+ "job": "job",
+ "kernel": "kernel",
+ "defconfig": "defconfig",
+ "lab_name": "lab-name",
+ "arch": "arm"
+ }
+
+ headers = {
+ 'Authorization': 'foo',
+ 'Content-Type': 'application/json',
+ }
+
+ response = self.fetch(
+ "/boot", method="POST", body=json.dumps(body), headers=headers
+ )
+
+ self.assertEqual(response.code, 202)
+
+ @mock.patch("utils.db.find_one")
+ def test_post_valid_content_different_token(self, find_one):
+ find_one.side_effect = [
+ {"token": "bar"}, {"token": "bar", "expired": False}
+ ]
+ body = {
+ "version": "1.0",
+ "board": "board",
+ "job": "job",
+ "kernel": "kernel",
+ "defconfig": "defconfig",
+ "lab_name": "lab-name",
+ "arch": "arm"
+ }
+
+ headers = {
+ "Authorization": "foo",
+ "Content-Type": "application/json",
+ }
+
+ response = self.fetch(
+ "/boot", method="POST", body=json.dumps(body), headers=headers
+ )
+
+ self.assertEqual(response.code, 403)
+
+ @mock.patch("taskqueue.tasks.import_boot")
+ @mock.patch("utils.db.find_one")
+ def test_post_valid_content_different_token_admin(
+ self, find_one, import_boot):
+ token_prop = [0 for _ in range(0, 16)]
+ token_prop[0] = [1]
+
+ find_one.side_effect = [
+ {"token": "bar"},
+ {"token": "bar", "expired": False, "properties": token_prop}
+ ]
+ body = {
+ "version": "1.0",
+ "board": "board",
+ "job": "job",
+ "kernel": "kernel",
+ "defconfig": "defconfig",
+ "lab_name": "lab-name",
+ "arch": "arm"
+ }
+
+ headers = {
+ "Authorization": "foo",
+ "Content-Type": "application/json",
+ }
+
+ response = self.fetch(
+ "/boot", method="POST", body=json.dumps(body), headers=headers
+ )
+
+ self.assertEqual(response.code, 202)
diff --git a/app/handlers/tests/test_count_handler.py b/app/handlers/tests/test_count_handler.py
index 5757ef0..33db9f7 100644
--- a/app/handlers/tests/test_count_handler.py
+++ b/app/handlers/tests/test_count_handler.py
@@ -15,37 +15,33 @@
"""Test module for the CountHandler handler."""
+import concurrent.futures
import json
+import mock
import mongomock
+import tornado
+import tornado.testing
-from concurrent.futures import ThreadPoolExecutor
-from tornado import (
- ioloop,
- testing,
- web,
-)
-
-from mock import patch
-
-from handlers.app import AppHandler
-from urls import _COUNT_URL
+import handlers.app
+import urls
# Default Content-Type header returned by Tornado.
DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
-class TestCountHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
+class TestCountHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
def setUp(self):
self.mongodb_client = mongomock.Connection()
super(TestCountHandler, self).setUp()
- patched_find_token = patch("handlers.base.BaseHandler._find_token")
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
self.find_token = patched_find_token.start()
self.find_token.return_value = "token"
- patched_validate_token = patch("handlers.base.validate_token")
+ patched_validate_token = mock.patch("handlers.common.validate_token")
self.validate_token = patched_validate_token.start()
self.validate_token.return_value = True
@@ -61,15 +57,15 @@ class TestCountHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
settings = {
'dboptions': dboptions,
'client': self.mongodb_client,
- 'executor': ThreadPoolExecutor(max_workers=2),
- 'default_handler_class': AppHandler,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
'debug': False,
}
- return web.Application([_COUNT_URL], **settings)
+ return tornado.web.Application([urls._COUNT_URL], **settings)
def get_new_ioloop(self):
- return ioloop.IOLoop.instance()
+ return tornado.ioloop.IOLoop.instance()
def test_post(self):
body = json.dumps(dict(job='job', kernel='kernel'))
diff --git a/app/handlers/tests/test_defconf_handler.py b/app/handlers/tests/test_defconf_handler.py
index f81efc5..75e241c 100644
--- a/app/handlers/tests/test_defconf_handler.py
+++ b/app/handlers/tests/test_defconf_handler.py
@@ -15,38 +15,32 @@
"""Test module for the DefConfHandler handler.."""
+import concurrent.futures
+import mock
import mongomock
+import tornado
+import tornado.testing
-from concurrent.futures import ThreadPoolExecutor
-from mock import (
- MagicMock,
- patch,
-)
-from tornado import (
- ioloop,
- testing,
- web,
-)
-
-from handlers.app import AppHandler
-from urls import _DEFCONF_URL
+import handlers.app
+import urls
# Default Content-Type header returned by Tornado.
DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
-class TestDefconfHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
+class TestDefconfHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
def setUp(self):
self.mongodb_client = mongomock.Connection()
super(TestDefconfHandler, self).setUp()
- patched_find_token = patch("handlers.base.BaseHandler._find_token")
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
self.find_token = patched_find_token.start()
self.find_token.return_value = "token"
- patched_validate_token = patch("handlers.base.validate_token")
+ patched_validate_token = mock.patch("handlers.common.validate_token")
self.validate_token = patched_validate_token.start()
self.validate_token.return_value = True
@@ -62,15 +56,15 @@ class TestDefconfHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
settings = {
'dboptions': dboptions,
'client': self.mongodb_client,
- 'executor': ThreadPoolExecutor(max_workers=2),
- 'default_handler_class': AppHandler,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
'debug': False,
}
- return web.Application([_DEFCONF_URL], **settings)
+ return tornado.web.Application([urls._DEFCONF_URL], **settings)
def get_new_ioloop(self):
- return ioloop.IOLoop.instance()
+ return tornado.ioloop.IOLoop.instance()
def test_get_wrong_url(self):
response = self.fetch('/foobardefconf')
@@ -79,8 +73,8 @@ class TestDefconfHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- @patch('utils.db.find')
- @patch('utils.db.count')
+ @mock.patch('utils.db.find')
+ @mock.patch('utils.db.count')
def test_get(self, mock_count, mock_find):
mock_count.return_value = 0
mock_find.return_value = []
@@ -97,9 +91,11 @@ class TestDefconfHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
self.assertEqual(response.body, expected_body)
- @patch('handlers.defconf.DefConfHandler.collection')
- def test_get_by_id_not_found(self, mock_collection):
- mock_collection.find_one = MagicMock()
+ @mock.patch('bson.objectid.ObjectId')
+ @mock.patch('handlers.defconf.DefConfHandler.collection')
+ def test_get_by_id_not_found(self, mock_collection, mock_id):
+ mock_id.return_value = "defconf"
+ mock_collection.find_one = mock.MagicMock()
mock_collection.find_one.return_value = None
headers = {'Authorization': 'foo'}
diff --git a/app/handlers/tests/test_handler_response.py b/app/handlers/tests/test_handler_response.py
index 97ca7bc..f101371 100644
--- a/app/handlers/tests/test_handler_response.py
+++ b/app/handlers/tests/test_handler_response.py
@@ -17,16 +17,16 @@
import unittest
-from handlers.response import HandlerResponse
+import handlers.response as handres
class TestHandlerResponse(unittest.TestCase):
def test_response_constructor_not_valid_input(self):
- self.assertRaises(ValueError, HandlerResponse, "1")
+ self.assertRaises(ValueError, handres.HandlerResponse, "1")
def test_response_setter_not_valid(self):
- response = HandlerResponse()
+ response = handres.HandlerResponse()
def _setter_call(value):
response.status_code = value
@@ -37,13 +37,13 @@ class TestHandlerResponse(unittest.TestCase):
self.assertRaises(ValueError, _setter_call, ())
def test_response_setter_valid(self):
- response = HandlerResponse(1)
+ response = handres.HandlerResponse(1)
response.status_code = 200
self.assertEqual(response.status_code, 200)
def test_reponse_creation_default_values(self):
- response = HandlerResponse()
+ response = handres.HandlerResponse()
self.assertEqual(response.status_code, 200)
self.assertIsNone(response.headers)
@@ -51,7 +51,7 @@ class TestHandlerResponse(unittest.TestCase):
self.assertIsNone(response.reason)
def test_response_reason_setter_valid(self):
- response = HandlerResponse()
+ response = handres.HandlerResponse()
response.reason = u'foo'
self.assertEqual('foo', response.reason)
@@ -60,7 +60,7 @@ class TestHandlerResponse(unittest.TestCase):
self.assertEqual('bar', response.reason)
def test_response_result_setter(self):
- response = HandlerResponse()
+ response = handres.HandlerResponse()
response.result = {}
self.assertIsInstance(response.result, list)
@@ -75,7 +75,7 @@ class TestHandlerResponse(unittest.TestCase):
self.assertEqual(response.result, ['foo'])
def test_response_headers_setter_not_valid(self):
- response = HandlerResponse()
+ response = handres.HandlerResponse()
def _setter_call(value):
response.headers = value
@@ -87,7 +87,7 @@ class TestHandlerResponse(unittest.TestCase):
self.assertRaises(ValueError, _setter_call, "1")
def test_response_headers_setter_valid(self):
- response = HandlerResponse()
+ response = handres.HandlerResponse()
response.headers = {'foo': 'bar'}
self.assertEqual({'foo': 'bar'}, response.headers)
diff --git a/app/handlers/tests/test_handlers_common.py b/app/handlers/tests/test_handlers_common.py
index 4fc3f2a..5b18bd5 100644
--- a/app/handlers/tests/test_handlers_common.py
+++ b/app/handlers/tests/test_handlers_common.py
@@ -374,7 +374,7 @@ class TestHandlersCommon(unittest.TestCase):
}
}
- spec = get_and_add_date_range(spec, query_args_func)
+ get_and_add_date_range(spec, query_args_func)
self.assertEqual(expected, spec)
@patch("models.token.Token", spec=True)
@@ -386,12 +386,30 @@ class TestHandlersCommon(unittest.TestCase):
token.is_get_token = True
token.is_post_token = True
token.is_delete_token = True
+ token.is_lab_token = False
+ self.assertFalse(token.is_lab_token)
self.assertTrue(valid_token_general(token, "GET"))
self.assertTrue(valid_token_general(token, "POST"))
self.assertTrue(valid_token_general(token, "DELETE"))
@patch("models.token.Token", spec=True)
+ def test_valid_token_general_lab_token(self, mock_class):
+ token = mock_class.return_value
+
+ self.assertIsInstance(token, Token)
+
+ token.is_get_token = False
+ token.is_post_token = True
+ token.is_delete_token = True
+ token.is_lab_token = True
+
+ self.assertTrue(token.is_lab_token)
+ self.assertFalse(valid_token_general(token, "GET"))
+ self.assertTrue(valid_token_general(token, "POST"))
+ self.assertFalse(valid_token_general(token, "DELETE"))
+
+ @patch("models.token.Token", spec=True)
def test_valid_token_general_false(self, mock_class):
token = mock_class.return_value
@@ -453,11 +471,9 @@ class TestHandlersCommon(unittest.TestCase):
validate_token(None, "GET", None, None)
)
- @patch("models.token.Token", spec=True)
@patch("models.token.Token.from_json")
- def test_validate_token_true(self, mock_from_json, mock_class):
- token = mock_class.return_value
- self.assertIsInstance(token, Token)
+ def test_validate_token_true(self, mock_from_json):
+ token = Token()
mock_from_json.return_value = token
validate_func = Mock()
@@ -476,11 +492,9 @@ class TestHandlersCommon(unittest.TestCase):
validate_token(token, "GET", "127.0.0.1", validate_func)
)
- @patch("models.token.Token", spec=True)
@patch("models.token.Token.from_json")
- def test_validate_token_false(self, mock_from_json, mock_class):
- token = mock_class.return_value
- self.assertIsInstance(token, Token)
+ def test_validate_token_false(self, mock_from_json):
+ token = Token()
mock_from_json.return_value = token
validate_func = Mock()
diff --git a/app/handlers/tests/test_job_handler.py b/app/handlers/tests/test_job_handler.py
index 806c3cf..dbc58a5 100644
--- a/app/handlers/tests/test_job_handler.py
+++ b/app/handlers/tests/test_job_handler.py
@@ -15,39 +15,33 @@
"""Test module for the JobHandler handler."""
+import concurrent.futures
import json
+import mock
import mongomock
+import tornado
+import tornado.testing
-from concurrent.futures import ThreadPoolExecutor
-from mock import (
- MagicMock,
- patch,
-)
-from tornado import (
- ioloop,
- testing,
- web,
-)
-
-from handlers.app import AppHandler
-from urls import _JOB_URL
+import handlers.app
+import urls
# Default Content-Type header returned by Tornado.
DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
-class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
+class TestJobHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
def setUp(self):
self.mongodb_client = mongomock.Connection()
super(TestJobHandler, self).setUp()
- patched_find_token = patch("handlers.base.BaseHandler._find_token")
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
self.find_token = patched_find_token.start()
self.find_token.return_value = "token"
- patched_validate_token = patch("handlers.base.validate_token")
+ patched_validate_token = mock.patch("handlers.common.validate_token")
self.validate_token = patched_validate_token.start()
self.validate_token.return_value = True
@@ -63,18 +57,18 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
settings = {
'dboptions': dboptions,
'client': self.mongodb_client,
- 'executor': ThreadPoolExecutor(max_workers=2),
- 'default_handler_class': AppHandler,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
'debug': False
}
- return web.Application([_JOB_URL], **settings)
+ return tornado.web.Application([urls._JOB_URL], **settings)
def get_new_ioloop(self):
- return ioloop.IOLoop.instance()
+ return tornado.ioloop.IOLoop.instance()
- @patch('utils.db.find')
- @patch('utils.db.count')
+ @mock.patch('utils.db.find')
+ @mock.patch('utils.db.count')
def test_get(self, mock_count, mock_find):
mock_count.return_value = 0
mock_find.return_value = []
@@ -89,8 +83,8 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
self.assertEqual(response.body, expected_body)
- @patch('utils.db.find')
- @patch('utils.db.count')
+ @mock.patch('utils.db.find')
+ @mock.patch('utils.db.count')
def test_get_with_limit(self, mock_count, mock_find):
mock_count.return_value = 0
mock_find.return_value = []
@@ -107,9 +101,11 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
self.assertEqual(response.body, expected_body)
- @patch('handlers.job.JobHandler.collection')
- def test_get_by_id_not_found(self, collection):
- collection.find_one = MagicMock()
+ @mock.patch('bson.objectid.ObjectId')
+ @mock.patch('handlers.job.JobHandler.collection')
+ def test_get_by_id_not_found(self, collection, mock_id):
+ mock_id.return_value = "job-kernel"
+ collection.find_one = mock.MagicMock()
collection.find_one.return_value = None
headers = {'Authorization': 'foo'}
@@ -119,9 +115,11 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- @patch('handlers.job.JobHandler.collection')
- def test_get_by_id_not_found_empty_list(self, collection):
- collection.find_one = MagicMock()
+ @mock.patch('bson.objectid.ObjectId')
+ @mock.patch('handlers.job.JobHandler.collection')
+ def test_get_by_id_not_found_empty_list(self, collection, mock_id):
+ mock_id.return_value = "job-kernel"
+ collection.find_one = mock.MagicMock()
collection.find_one.return_value = []
headers = {'Authorization': 'foo'}
@@ -131,9 +129,11 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- @patch('handlers.job.JobHandler.collection')
- def test_get_by_id_found(self, collection):
- collection.find_one = MagicMock()
+ @mock.patch('bson.objectid.ObjectId')
+ @mock.patch('handlers.job.JobHandler.collection')
+ def test_get_by_id_found(self, collection, mock_id):
+ mock_id.return_value = "job-kernel"
+ collection.find_one = mock.MagicMock()
collection.find_one.return_value = {'_id': 'foo'}
expected_body = '{"code": 200, "result": [{"_id": "foo"}]}'
@@ -189,9 +189,9 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- @patch('handlers.job.import_job')
+ @mock.patch('handlers.job.import_job')
def test_post_correct(self, mock_import_job):
- mock_import_job.apply_async = MagicMock()
+ mock_import_job.apply_async = mock.MagicMock()
headers = {
'Authorization': 'foo',
@@ -212,7 +212,9 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response = self.fetch('/job/job', method='DELETE')
self.assertEqual(response.code, 403)
- def test_delete_with_token_no_job(self):
+ @mock.patch('bson.objectid.ObjectId')
+ def test_delete_with_token_no_job(self, mock_id):
+ mock_id.return_value = "job"
headers = {'Authorization': 'foo'}
response = self.fetch(
@@ -223,7 +225,9 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- def test_delete_with_token_with_job(self):
+ @mock.patch("bson.objectid.ObjectId")
+ def test_delete_with_token_with_job(self, mock_id):
+ mock_id.return_value = "job"
db = self.mongodb_client['kernel-ci']
db['job'].insert(dict(_id='job', job='job', kernel='kernel'))
@@ -248,7 +252,7 @@ class TestJobHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- @patch('handlers.job.JobHandler._get_one')
+ @mock.patch('handlers.job.JobHandler._get_one')
def test_get_wrong_handler_response(self, mock_get_one):
mock_get_one.return_value = ""
diff --git a/app/handlers/tests/test_lab_handler.py b/app/handlers/tests/test_lab_handler.py
new file mode 100644
index 0000000..9d067b6
--- /dev/null
+++ b/app/handlers/tests/test_lab_handler.py
@@ -0,0 +1,456 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Test module for the LabHandler handler."""
+
+import concurrent.futures
+import json
+import mock
+import mongomock
+import tornado
+import tornado.testing
+
+import handlers.app
+import urls
+
+# Default Content-Type header returned by Tornado.
+DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
+
+
+class TestLabHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
+
+ def setUp(self):
+ self.mongodb_client = mongomock.Connection()
+
+ super(TestLabHandler, self).setUp()
+
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
+ self.find_token = patched_find_token.start()
+ self.find_token.return_value = "token"
+
+ patched_validate_token = mock.patch("handlers.common.validate_token")
+ self.validate_token = patched_validate_token.start()
+ self.validate_token.return_value = True
+
+ self.addCleanup(patched_find_token.stop)
+ self.addCleanup(patched_validate_token.stop)
+
+ def get_app(self):
+ dboptions = {
+ 'dbpassword': "",
+ 'dbuser': ""
+ }
+
+ settings = {
+ 'dboptions': dboptions,
+ 'client': self.mongodb_client,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
+ 'debug': False
+ }
+
+ return tornado.web.Application([urls._LAB_URL], **settings)
+
+ def get_new_ioloop(self):
+ return tornado.ioloop.IOLoop.instance()
+
+ def test_post_no_json(self):
+ body = json.dumps(dict(name='name', contact={}))
+
+ response = self.fetch('/lab', method='POST', body=body)
+
+ self.assertEqual(response.code, 403)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_post_not_json_content(self):
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ response = self.fetch(
+ '/lab', method='POST', body='', headers=headers
+ )
+
+ self.assertEqual(response.code, 422)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_post_wrong_content_type(self):
+ headers = {'Authorization': 'foo'}
+
+ response = self.fetch(
+ '/lab', method='POST', body='', headers=headers
+ )
+
+ self.assertEqual(response.code, 415)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_post_wrong_json(self):
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(dict(foo='foo', bar='bar'))
+
+ response = self.fetch(
+ '/lab', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_post_wrong_json_no_fields(self):
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(dict(name='foo', contact={}))
+
+ response = self.fetch(
+ '/lab', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_post_wrong_json_no_all_fields(self):
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(name='foo', contact={"name": "bar", "surname": "foo"})
+ )
+
+ response = self.fetch(
+ '/lab', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("utils.db.find_one")
+ def test_post_correct(self, find_one):
+ find_one.side_effect = [None]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foo"},
+ )
+ )
+
+ response = self.fetch(
+ '/lab', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 201)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ self.assertIsNotNone(response.headers["Location"])
+
+ @mock.patch("utils.db.find_one")
+ def test_post_correct_lab_id_found(self, find_one):
+ find_one.side_effect = [True]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foo"},
+ )
+ )
+
+ response = self.fetch(
+ '/lab', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("bson.objectid.ObjectId")
+ @mock.patch("utils.db.find_one")
+ def test_post_correct_with_id_lab_id_not_found(self, find_one, mock_id):
+ mock_id.return_value = "lab-01"
+ find_one.side_effect = [None]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foo"},
+ )
+ )
+
+ response = self.fetch(
+ '/lab/lab-01', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 404)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("utils.db.find_one")
+ def test_post_correct_with_token_not_found(self, find_one):
+ find_one.side_effect = [None, None]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foo"},
+ token="token"
+ )
+ )
+
+ response = self.fetch(
+ '/lab', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 500)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("utils.db.find_one")
+ def test_post_correct_with_token_found(self, find_one):
+ token_json = {
+ "_id": "token_id",
+ "token": "token",
+ "email": "foo",
+ "username": "bar"
+ }
+ find_one.side_effect = [None, token_json, None]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foo"},
+ token="token"
+ )
+ )
+
+ response = self.fetch(
+ '/lab', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 201)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ self.assertIsNotNone(response.headers["Location"])
+
+ @mock.patch("bson.objectid.ObjectId")
+ @mock.patch("utils.db.find_one")
+ def test_post_correct_with_id_lab_id_found(self, find_one, mock_id):
+ lab_json = {
+ "name": "foo",
+ "token": "token-id",
+ "contact": {
+ "name": "foo",
+ "surname": "bar",
+ "email": "foo"
+ }
+ }
+
+ mock_id.return_value = "foo"
+ find_one.side_effect = [lab_json]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foo"},
+ address={"street_1": "foo", "city": "bar"},
+ private=True
+ )
+ )
+
+ response = self.fetch(
+ '/lab/foo', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 200)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("bson.objectid.ObjectId")
+ @mock.patch("utils.db.save")
+ @mock.patch("utils.db.find_one")
+ def test_post_correct_with_id_lab_id_found_err_on_save(
+ self, find_one, save, mock_id):
+ mock_id.return_value = "foo"
+ lab_json = {
+ "name": "foo",
+ "token": "token-id",
+ "contact": {
+ "name": "foo",
+ "surname": "bar",
+ "email": "foo"
+ },
+ "address": {
+ "street_1": "foo"
+ }
+ }
+ find_one.side_effect = [lab_json]
+ save.side_effect = [(500, None)]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foo"},
+ address={"street_1": "foo"}
+ )
+ )
+
+ response = self.fetch(
+ '/lab/foo', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 500)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("bson.objectid.ObjectId")
+ @mock.patch("utils.db.find_one")
+ def test_post_correct_with_id_lab_id_found_and_token(
+ self, find_one, mock_id):
+ old_lab_json = {
+ "name": "foo",
+ "token": "token-id",
+ "contact": {
+ "name": "foo",
+ "surname": "bar",
+ "email": "foo"
+ },
+ "address": {
+ "street_1": "foo"
+ }
+ }
+ old_token_json = {
+ "_id": "old-token-id",
+ "token": "token-id",
+ "email": ""
+ }
+
+ new_token_json = {
+ "_id": "new-token-id",
+ "token": "token-uuid",
+ "email": "foo",
+ "username": "bar"
+ }
+
+ mock_id.return_value = "foo"
+ find_one.side_effect = [old_lab_json, old_token_json, new_token_json]
+
+ headers = {'Authorization': 'foo', 'Content-Type': 'application/json'}
+
+ body = json.dumps(
+ dict(
+ name='foo',
+ contact={"name": "bar", "surname": "foo", "email": "foobar"},
+ token="token-uuid"
+ )
+ )
+
+ response = self.fetch(
+ '/lab/foo', method='POST', body=body, headers=headers
+ )
+
+ self.assertEqual(response.code, 200)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("bson.objectid.ObjectId")
+ @mock.patch("utils.db.find_one")
+ def test_get_by_id_not_found(self, find_one, mock_id):
+ mock_id.return_value = "lab-01"
+ find_one.side_effect = [None]
+
+ headers = {'Authorization': 'foo'}
+ response = self.fetch('/lab/lab-01', headers=headers)
+
+ self.assertEqual(response.code, 404)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("bson.objectid.ObjectId")
+ @mock.patch("utils.db.find_one")
+ def test_get_by_id_found(self, find_one, mock_id):
+ find_one.side_effect = [{"_id": "foo", "name": "lab-01"}]
+ mock_id.return_value = "lab-01"
+
+ expected_body = (
+ '{"code": 200, "result": [{"_id": "foo", "name": "lab-01"}]}'
+ )
+
+ headers = {'Authorization': 'foo'}
+ response = self.fetch('/lab/lab-01', headers=headers)
+
+ self.assertEqual(response.code, 200)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ self.assertEqual(response.body, expected_body)
+
+ def test_delete_no_token(self):
+ self.find_token.return_value = None
+
+ response = self.fetch('/lab/lab', method='DELETE')
+ self.assertEqual(response.code, 403)
+
+ @mock.patch("bson.objectid.ObjectId")
+ def test_delete_with_token_no_lab(self, mock_id):
+ mock_id.return_value = "foolab"
+ headers = {'Authorization': 'foo'}
+
+ response = self.fetch(
+ '/lab/foolab', method='DELETE', headers=headers,
+ )
+
+ self.assertEqual(response.code, 404)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ @mock.patch("bson.objectid.ObjectId")
+ def test_delete_with_token_with_lab(self, mock_id):
+ mock_id.return_value = "lab"
+ db = self.mongodb_client['kernel-ci']
+ db['lab'].insert(dict(_id='lab', name='lab-01', contact={}, address={}))
+
+ headers = {'Authorization': 'foo'}
+
+ response = self.fetch(
+ '/lab/lab', method='DELETE', headers=headers,
+ )
+
+ self.assertEqual(response.code, 200)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+
+ def test_delete_no_id_no_spec(self):
+ headers = {'Authorization': 'foo'}
+
+ response = self.fetch(
+ '/lab', method='DELETE', headers=headers,
+ )
+
+ self.assertEqual(response.code, 400)
+ self.assertEqual(
+ response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
diff --git a/app/handlers/tests/test_token_handler.py b/app/handlers/tests/test_token_handler.py
index 9804ccb..bcf6802 100644
--- a/app/handlers/tests/test_token_handler.py
+++ b/app/handlers/tests/test_token_handler.py
@@ -47,7 +47,7 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.find_token = patched_find_token.start()
self.find_token.return_value = "token"
- patched_validate_token = patch("handlers.token.validate_token")
+ patched_validate_token = patch("handlers.common.validate_token")
self.validate_token = patched_validate_token.start()
self.validate_token.return_value = True
@@ -261,7 +261,9 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
self.assertIsNotNone(response.headers['Location'])
- def test_post_update_no_token(self):
+ @patch('bson.objectid.ObjectId')
+ def test_post_update_no_token(self, mock_id):
+ mock_id.return_value = "token"
headers = {
'Authorization': 'foo',
'Content-Type': 'application/json',
@@ -277,11 +279,13 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ @patch("bson.objectid.ObjectId")
@patch('handlers.token.TokenHandler.collection')
- def test_post_update_with_token(self, mock_collection):
-
+ def test_post_update_with_token(self, mock_collection, mock_id):
+ mock_id.return_value = "token"
mock_collection.find_one = MagicMock()
- mock_collection.find_one.return_value = dict(token='token')
+ mock_collection.find_one.return_value = dict(
+ _id="token", token='token')
headers = {
'Authorization': 'foo',
@@ -298,9 +302,11 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ @patch('bson.objectid.ObjectId')
@patch('handlers.token.TokenHandler.collection')
- def test_post_update_wrong_content_0(self, mock_collection):
+ def test_post_update_wrong_content_0(self, mock_collection, mock_id):
+ mock_id.return_value = "token"
mock_collection.find_one = MagicMock()
mock_collection.find_one.return_value = dict(token='token')
@@ -319,9 +325,11 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ @patch('bson.objectid.ObjectId')
@patch('handlers.token.TokenHandler.collection')
- def test_post_update_wrong_content_1(self, mock_collection):
+ def test_post_update_wrong_content_1(self, mock_collection, mock_id):
+ mock_id.return_value = "token"
mock_collection.find_one = MagicMock()
mock_collection.find_one.return_value = dict(
token='token', email='email', properties=[0 for _ in range(0, 16)]
@@ -342,9 +350,11 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ @patch('bson.objectid.ObjectId')
@patch('handlers.token.TokenHandler.collection')
- def test_post_update_wrong_content_2(self, mock_collection):
+ def test_post_update_wrong_content_2(self, mock_collection, mock_id):
+ mock_id.return_value = "token"
mock_collection.find_one = MagicMock()
mock_collection.find_one.return_value = dict(
token='token', email='email', properties=[0 for _ in range(0, 16)]
@@ -365,12 +375,15 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
+ @patch("bson.objectid.ObjectId")
@patch('handlers.token.TokenHandler.collection')
- def test_post_update_ip_restricted(self, mock_collection):
+ def test_post_update_ip_restricted(self, mock_collection, mock_id):
+ mock_id.return_value = "token"
mock_collection.find_one = MagicMock()
mock_collection.find_one.return_value = dict(
- token='token', email='email', properties=[0 for _ in range(0, 16)]
+ _id="token", token='token', email='email',
+ properties=[0 for _ in range(0, 16)]
)
headers = {
@@ -392,7 +405,9 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
response = self.fetch('/token/token', method='DELETE')
self.assertEqual(response.code, 403)
- def test_delete_with_token_no_document(self):
+ @patch("bson.objectid.ObjectId")
+ def test_delete_with_token_no_document(self, mock_id):
+ mock_id.return_value = "token"
headers = {'Authorization': 'foo'}
response = self.fetch(
@@ -403,9 +418,12 @@ class TestTokenHandler(testing.AsyncHTTPTestCase, testing.LogTrapTestCase):
self.assertEqual(
response.headers['Content-Type'], DEFAULT_CONTENT_TYPE)
- def test_delete_with_token_with_document(self):
+ @patch('bson.objectid.ObjectId')
+ def test_delete_with_token_with_document(self, mock_id):
+ mock_id.return_value = "token"
+
db = self.mongodb_client['kernel-ci']
- db['api-token'].insert(dict(token='token', email='email'))
+ db['api-token'].insert(dict(_id="token", token='token', email='email'))
headers = {'Authorization': 'foo'}
diff --git a/app/handlers/tests/test_version_handler.py b/app/handlers/tests/test_version_handler.py
new file mode 100644
index 0000000..3406915
--- /dev/null
+++ b/app/handlers/tests/test_version_handler.py
@@ -0,0 +1,78 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Test module for the JobHandler handler."""
+
+import concurrent.futures
+import mock
+import mongomock
+import tornado
+import tornado.testing
+
+import handlers.app
+import urls
+
+# Default Content-Type header returned by Tornado.
+DEFAULT_CONTENT_TYPE = 'application/json; charset=UTF-8'
+
+
+class TestVersionHandler(
+ tornado.testing.AsyncHTTPTestCase, tornado.testing.LogTrapTestCase):
+
+ def setUp(self):
+ self.mongodb_client = mongomock.Connection()
+
+ super(TestVersionHandler, self).setUp()
+
+ patched_find_token = mock.patch("handlers.base.BaseHandler._find_token")
+ self.find_token = patched_find_token.start()
+ self.find_token.return_value = "token"
+
+ patched_validate_token = mock.patch("handlers.common.validate_token")
+ self.validate_token = patched_validate_token.start()
+ self.validate_token.return_value = True
+
+ self.addCleanup(patched_find_token.stop)
+ self.addCleanup(patched_validate_token.stop)
+
+ def get_app(self):
+ dboptions = {
+ 'dbpassword': "",
+ 'dbuser': ""
+ }
+
+ settings = {
+ 'dboptions': dboptions,
+ 'client': self.mongodb_client,
+ 'executor': concurrent.futures.ThreadPoolExecutor(max_workers=2),
+ 'default_handler_class': handlers.app.AppHandler,
+ 'debug': False,
+ 'version': 'foo'
+ }
+
+ return tornado.web.Application([urls._VERSION_URL], **settings)
+
+ def get_new_ioloop(self):
+ return tornado.ioloop.IOLoop.instance()
+
+ def test_get(self):
+ response = self.fetch("/version", method="GET")
+ self.assertEqual(response.code, 200,)
+
+ def test_post(self):
+ response = self.fetch("/version", method="POST", body="")
+ self.assertEqual(response.code, 501)
+
+ def test_delete(self):
+ response = self.fetch("/version", method="DELETE")
+ self.assertEqual(response.code, 501)
diff --git a/app/handlers/token.py b/app/handlers/token.py
index 508190a..79bb8cf 100644
--- a/app/handlers/token.py
+++ b/app/handlers/token.py
@@ -15,41 +15,19 @@
"""The RequestHandler for /token URLs."""
+import bson
+
from urlparse import urlunparse
-from handlers.base import BaseHandler
-from handlers.common import (
- NOT_VALID_TOKEN,
- TOKEN_VALID_KEYS,
- get_query_fields,
- valid_token_th,
- validate_token,
-)
-from handlers.response import HandlerResponse
-from models import (
- ADMIN_KEY,
- DELETE_KEY,
- EMAIL_KEY,
- EXPIRES_KEY,
- GET_KEY,
- IP_ADDRESS_KEY,
- IP_RESTRICTED,
- POST_KEY,
- SUPERUSER_KEY,
- TOKEN_COLLECTION,
- TOKEN_KEY,
- USERNAME_KEY,
-)
-from models.token import Token
-from utils.db import (
- delete,
- find_one,
- save,
- update,
-)
-
-
-class TokenHandler(BaseHandler):
+import handlers.base as hbase
+import handlers.common as hcommon
+import handlers.response as hresponse
+import models
+import models.token as mtoken
+import utils.db
+
+
+class TokenHandler(hbase.BaseHandler):
"""Handle the /token URLs."""
def __init__(self, application, request, **kwargs):
@@ -57,15 +35,15 @@ class TokenHandler(BaseHandler):
@property
def collection(self):
- return self.db[TOKEN_COLLECTION]
+ return self.db[models.TOKEN_COLLECTION]
@staticmethod
def _valid_keys(method):
- return TOKEN_VALID_KEYS.get(method, None)
+ return hcommon.TOKEN_VALID_KEYS.get(method, None)
@staticmethod
def _token_validation_func():
- return valid_token_th
+ return hcommon.valid_token_th
def _token_validation(self, req_token, method, remote_ip, master_key):
valid_token = False
@@ -76,7 +54,7 @@ class TokenHandler(BaseHandler):
token_obj = self._find_token(req_token, self.db)
if token_obj:
- valid_token = validate_token(
+ valid_token = hcommon.validate_token(
token_obj,
method,
remote_ip,
@@ -88,14 +66,14 @@ class TokenHandler(BaseHandler):
def _get_one(self, doc_id):
# Overridden: with the token we do not search by _id, but
# by token field.
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
response.result = None
- result = find_one(
+ result = utils.db.find_one(
self.collection,
doc_id,
- field=TOKEN_KEY,
- fields=get_query_fields(self.get_query_arguments)
+ field=models.TOKEN_KEY,
+ fields=hcommon.get_query_fields(self.get_query_arguments)
)
if result:
@@ -130,15 +108,15 @@ class TokenHandler(BaseHandler):
:param json_obj: The JSON object with the paramters.
:return A `HandlerResponse` object.
"""
- response = HandlerResponse(201)
+ response = hresponse.HandlerResponse(201)
response.result = None
try:
new_token = self._token_update_create(json_obj)
- response.status_code = save(self.db, new_token)
+ response.status_code, _ = utils.db.save(self.db, new_token)
if response.status_code == 201:
- response.result = {TOKEN_KEY: new_token.token}
+ response.result = {models.TOKEN_KEY: new_token.token}
location = urlunparse(
(
'http',
@@ -169,34 +147,43 @@ class TokenHandler(BaseHandler):
:param json_obj: The JSON object with the parameters.
:return A `HandlerResponse` objet.
"""
- response = HandlerResponse()
+ response = hresponse.HandlerResponse()
response.result = None
- result = find_one(self.collection, doc_id, field=TOKEN_KEY)
+ try:
+ obj_id = bson.objectid.ObjectId(doc_id)
+ result = utils.db.find_one(self.collection, [obj_id])
- if result:
- token = Token.from_json(result)
+ if result:
+ token = mtoken.Token.from_json(result)
- try:
token = self._token_update_create(json_obj, token, fail=False)
- response.status_code = update(
- self.collection, {'token': doc_id}, token.to_dict()
+ response.status_code = utils.db.update(
+ self.collection,
+ {models.ID_KEY: obj_id},
+ token.to_dict()
)
if response.status_code == 200:
- response.result = {TOKEN_KEY: token.token}
- except KeyError:
- response.status_code = 400
- response.reason = (
- "Mandatory field missing"
- )
- except (TypeError, ValueError):
- response.status_code = 400
- response.reason = "Wrong field value or type in the JSON data"
- except Exception, ex:
- response.status_code = 400
- response.reason = str(ex)
- else:
- response.status_code = 404
+ response.result = {models.TOKEN_KEY: token.token}
+ else:
+ response.status_code = 404
+ except bson.errors.InvalidId, ex:
+ self.log.exception(ex)
+ self.log.error("Wrong ID '%s' value passed for object ID", doc_id)
+ response.status_code = 400
+ response.reason = "Wrong ID value provided"
+ except KeyError, ex:
+ self.log.exception(ex)
+ response.status_code = 400
+ response.reason = "Mandatory field missing"
+ except (TypeError, ValueError), ex:
+ self.log.exception(ex)
+ response.status_code = 400
+ response.reason = "Wrong field value or type in the JSON data"
+ except Exception, ex:
+ self.log.exception(ex)
+ response.status_code = 400
+ response.reason = str(ex)
return response
@@ -215,46 +202,52 @@ class TokenHandler(BaseHandler):
:raise KeyError, ValueError, TypeError, Exception.
"""
if not token:
- token = Token()
+ token = mtoken.Token()
+
+ json_get = json_obj.get
if fail:
- token.email = json_obj[EMAIL_KEY]
+ token.email = json_obj[models.EMAIL_KEY]
else:
- if json_obj.get(EMAIL_KEY, None):
- token.email = json_obj.get(EMAIL_KEY)
+ if json_get(models.EMAIL_KEY, None):
+ token.email = json_get(models.EMAIL_KEY)
+
+ if json_get(models.USERNAME_KEY, None):
+ token.username = json_get(models.USERNAME_KEY)
- if json_obj.get(USERNAME_KEY, None):
- token.username = json_obj.get(USERNAME_KEY)
+ if json_get(models.EXPIRES_KEY, None):
+ token.expires_on = json_get(models.EXPIRES_KEY)
- if json_obj.get(EXPIRES_KEY, None):
- token.expires_on = json_obj.get(EXPIRES_KEY)
+ if json_get(models.GET_KEY, None):
+ token.is_get_token = json_get(models.GET_KEY)
- if json_obj.get(GET_KEY, None):
- token.is_get_token = json_obj.get(GET_KEY)
+ if json_get(models.POST_KEY, None):
+ token.is_post_token = json_get(models.POST_KEY)
- if json_obj.get(POST_KEY, None):
- token.is_post_token = json_obj.get(POST_KEY)
+ if json_get(models.DELETE_KEY, None):
+ token.is_delete_token = json_get(models.DELETE_KEY)
- if json_obj.get(DELETE_KEY, None):
- token.is_delete_token = json_obj.get(DELETE_KEY)
+ if json_get(models.SUPERUSER_KEY, None):
+ token.is_superuser = json_get(models.SUPERUSER_KEY)
- if json_obj.get(SUPERUSER_KEY, None):
- token.is_superuser = json_obj.get(SUPERUSER_KEY)
+ if json_get(models.ADMIN_KEY, None):
+ token.is_admin = json_get(models.ADMIN_KEY)
- if json_obj.get(ADMIN_KEY, None):
- token.is_admin = json_obj.get(ADMIN_KEY)
+ if json_get(models.IP_RESTRICTED, None):
+ token.is_ip_restricted = json_get(models.IP_RESTRICTED)
- if json_obj.get(IP_RESTRICTED, None):
- token.is_ip_restricted = json_obj.get(IP_RESTRICTED)
+ if json_get(models.LAB_KEY, None):
+ token.is_lab_token = json_get(models.LAB_KEY)
- if token.is_ip_restricted and not json_obj.get(IP_ADDRESS_KEY, None):
+ if token.is_ip_restricted and not json_get(models.IP_ADDRESS_KEY, None):
raise Exception("IP restricted but no IP addresses given")
- elif json_obj.get(IP_ADDRESS_KEY, None) and not token.is_ip_restricted:
+ elif (json_get(models.IP_ADDRESS_KEY, None) and
+ not token.is_ip_restricted):
raise Exception(
"IP addresses given, but token is not IP restricted"
)
- elif token.is_ip_restricted and json_obj.get(IP_ADDRESS_KEY, None):
- token.ip_address = json_obj.get(IP_ADDRESS_KEY)
+ elif token.is_ip_restricted and json_get(models.IP_ADDRESS_KEY, None):
+ token.ip_address = json_get(models.IP_ADDRESS_KEY)
return token
@@ -267,7 +260,7 @@ class TokenHandler(BaseHandler):
Subclasses should not override this unless there are special reasons
to.
"""
- response = HandlerResponse(400)
+ response = hresponse.HandlerResponse(400)
if self.validate_req_token("DELETE"):
if kwargs and kwargs.get('id', None):
@@ -276,15 +269,23 @@ class TokenHandler(BaseHandler):
response.reason = "Resource deleted"
else:
response.status_code = 403
- response.reason = NOT_VALID_TOKEN
+ response.reason = hcommon.NOT_VALID_TOKEN
return response
def _delete(self, doc_id):
ret_val = 404
- self.log.info("Tolen deletion by IP %s", self.request.remote_ip)
- if find_one(self.collection, doc_id, field=TOKEN_KEY):
- ret_val = delete(self.collection, {TOKEN_KEY: {'$in': [doc_id]}})
+ self.log.info("Token deletion from IP %s", self.request.remote_ip)
+ try:
+ doc_obj = bson.objectid.ObjectId(doc_id)
+ if utils.db.find_one(self.collection, [doc_obj]):
+ ret_val = utils.db.delete(
+ self.collection, {models.ID_KEY: {"$in": [doc_obj]}}
+ )
+ except bson.errors.InvalidId, ex:
+ self.log.exception(ex)
+ self.log.error("Wrong ID '%s' value passed as object ID", doc_id)
+ ret_val = 400
return ret_val
diff --git a/app/handlers/version.py b/app/handlers/version.py
new file mode 100644
index 0000000..265de8e
--- /dev/null
+++ b/app/handlers/version.py
@@ -0,0 +1,46 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""Provide a simple /version handler."""
+
+import handlers
+import handlers.base as hbase
+import handlers.response as hresponse
+import models
+
+
+# pylint: disable=too-many-public-methods
+class VersionHandler(hbase.BaseHandler):
+ """Handle request to the /version URL.
+
+ Provide the backend version number in use.
+ """
+
+ def __init__(self, application, request, **kwargs):
+ super(VersionHandler, self).__init__(application, request, **kwargs)
+
+ def execute_get(self, *args, **kwargs):
+ response = hresponse.HandlerResponse()
+ response.result = [
+ {
+ models.VERSION_FULL_KEY: handlers.__versionfull__,
+ models.VERSION_KEY: handlers.__version__,
+ }
+ ]
+ return response
+
+ def execute_post(self, *args, **kwargs):
+ return hresponse.HandlerResponse(501)
+
+ def execute_delete(self, *args, **kwargs):
+ return hresponse.HandlerResponse(501)
diff --git a/app/models/__init__.py b/app/models/__init__.py
index 213868d..9bed4fe 100644
--- a/app/models/__init__.py
+++ b/app/models/__init__.py
@@ -1,5 +1,3 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@@ -16,57 +14,101 @@
# The default mongodb database name.
DB_NAME = 'kernel-ci'
+DEFAULT_SCHEMA_VERSION = "1.0"
+
# The default ID key, and other keys, for mongodb documents and queries.
+ACCEPTED_KEYS = 'accepted'
+ADDRESS_KEY = "address"
AGGREGATE_KEY = 'aggregate'
ARCHITECTURE_KEY = 'arch'
+ARM64_ARCHITECTURE_KEY = 'arm64'
+ARM_ARCHITECTURE_KEY = 'arm'
+BOARD_INSTANCE_KEY = 'board_instance'
BOARD_KEY = 'board'
+BOOT_ID_KEY = 'boot_id'
+BOOT_LOAD_ADDR_KEY = 'loadaddr'
BOOT_LOG_HTML_KEY = 'boot_log_html'
BOOT_LOG_KEY = 'boot_log'
+BOOT_RESULT_DESC_KEY = "boot_result_description"
+BOOT_RESULT_KEY = 'boot_result'
+BOOT_RETRIES_KEY = 'boot_retries'
+BOOT_TIME_KEY = 'boot_time'
+BOOT_WARNINGS_KEY = 'boot_warnings'
+BUILD_ERRORS_KEY = 'build_errors'
+BUILD_LOG_KEY = 'build_log'
+BUILD_PLATFORM_KEY = 'build_platform'
BUILD_RESULT_KEY = 'build_result'
+BUILD_TIME_KEY = 'build_time'
+BUILD_WARNINGS_KEY = 'build_warnings'
COMPILER_VERSION_KEY = 'compiler_version'
+CONTACT_KEY = "contact"
COUNT_KEY = "count"
CREATED_KEY = 'created_on'
CROSS_COMPILE_KEY = 'cross_compile'
DATE_RANGE_KEY = 'date_range'
+DEFCONFIG_FULL_KEY = 'defconfig_full'
+DEFCONFIG_ID_KEY = 'defconfig_id'
DEFCONFIG_KEY = 'defconfig'
DIRNAME_KEY = 'dirname'
DOC_ID_KEY = 'doc_id'
DTB_ADDR_KEY = 'dtb_addr'
+DTB_APPEND_KEY = 'dtb_append'
+DTB_DIR_KEY = 'dtb_dir'
DTB_KEY = 'dtb'
EMAIL_KEY = 'email'
+EMAIL_LIST_KEY = 'emails'
ENDIANNESS_KEY = 'endian'
ERRORS_KEY = 'errors'
EXPIRED_KEY = 'expired'
EXPIRES_KEY = 'expires_on'
+FASTBOOT_CMD_KEY = 'fastboot_cmd'
FASTBOOT_KEY = 'fastboot'
FIELD_KEY = 'field'
+FILE_SERVER_RESOURCE_KEY = 'file_server_resource'
+FILE_SERVER_URL_KEY = 'file_server_url'
GIT_BRANCH_KEY = 'git_branch'
GIT_COMMIT_KEY = 'git_commit'
GIT_DESCRIBE_KEY = 'git_describe'
GIT_URL_KEY = 'git_url'
ID_KEY = '_id'
INITRD_ADDR_KEY = 'initrd_addr'
+INITRD_KEY = 'initrd'
IP_ADDRESS_KEY = 'ip_address'
JOB_ID_KEY = 'job_id'
JOB_KEY = 'job'
+KCONFIG_FRAGMENTS_KEY = 'kconfig_fragments'
+KERNEL_CONFIG_KEY = 'kernel_config'
KERNEL_IMAGE_KEY = 'kernel_image'
KERNEL_KEY = 'kernel'
+LAB_ID_KEY = "lab_id"
+LAB_NAME_KEY = 'lab_name'
LIMIT_KEY = 'limit'
LOAD_ADDR_KEY = 'load_addr'
+MANDATORY_KEYS = 'mandatory'
METADATA_KEY = 'metadata'
+MODULES_DIR_KEY = 'modules_dir'
+MODULES_KEY = 'modules'
+NAME_KEY = "name"
NOT_FIELD_KEY = 'nfield'
PRIVATE_KEY = 'private'
PROPERTIES_KEY = 'properties'
RESULT_KEY = "result"
+RETRIES_KEY = 'retries'
SKIP_KEY = 'skip'
SORT_KEY = 'sort'
SORT_ORDER_KEY = 'sort_order'
STATUS_KEY = 'status'
+SURNAME_KEY = 'surname'
+SYSTEM_MAP_KEY = 'system_map'
+TEXT_OFFSET_KEY = 'text_offset'
TIME_KEY = 'time'
TOKEN_KEY = 'token'
-UPDATED_KEY = 'updated'
+UPDATED_KEY = 'updated_on'
USERNAME_KEY = 'username'
+VERSION_FULL_KEY = 'full_version'
+VERSION_KEY = 'version'
WARNINGS_KEY = 'warnings'
+x86_ARCHITECTURE_KEY = 'x86'
# Token special fields.
ADMIN_KEY = 'admin'
@@ -75,12 +117,15 @@ GET_KEY = 'get'
IP_RESTRICTED = 'ip_restricted'
POST_KEY = 'post'
SUPERUSER_KEY = 'superuser'
+LAB_KEY = "lab"
# Job and/or build status.
BUILD_STATUS = 'BUILD'
FAIL_STATUS = 'FAIL'
PASS_STATUS = 'PASS'
UNKNOWN_STATUS = 'UNKNOWN'
+OFFLINE_STATUS = 'OFFLINE'
+UNTRIED_STATUS = 'UNTRIED'
# Build file names.
DONE_FILE = '.done'
@@ -106,6 +151,7 @@ JOB_COLLECTION = 'job'
SUBSCRIPTION_COLLECTION = 'subscription'
TOKEN_COLLECTION = 'api-token'
BISECT_COLLECTION = 'bisect'
+LAB_COLLECTION = 'lab'
# Bisect values.
BISECT_BOOT_STATUS_KEY = 'boot_status'
@@ -122,3 +168,39 @@ BISECT_GOOD_COMMIT_DATE = 'good_commit_date'
BISECT_BAD_COMMIT_DATE = 'bad_commit_date'
BISECT_GOOD_COMMIT_URL = 'good_commit_url'
BISECT_BAD_COMMIT_URL = 'bad_commit_url'
+
+# Name formats.
+JOB_DOCUMENT_NAME = '%(job)s-%(kernel)s'
+BOOT_DOCUMENT_NAME = '%(board)s-%(job)s-%(kernel)s-%(defconfig)s-%(arch)s'
+DEFCONFIG_DOCUMENT_NAME = '%(job)s-%(kernel)s-%(defconfig)s'
+SUBSCRIPTION_DOCUMENT_NAME = 'sub-%(job)s-%(kernel)s'
+
+# Valid build status.
+VALID_BUILD_STATUS = [
+ BUILD_STATUS,
+ FAIL_STATUS,
+ PASS_STATUS,
+ UNKNOWN_STATUS
+]
+
+# Valid boot status.
+VALID_BOOT_STATUS = [
+ FAIL_STATUS,
+ OFFLINE_STATUS,
+ PASS_STATUS,
+ UNTRIED_STATUS,
+]
+
+# Valid job status.
+VALID_JOB_STATUS = [
+ BUILD_STATUS,
+ FAIL_STATUS,
+ PASS_STATUS,
+ UNKNOWN_STATUS,
+]
+
+# The valid collections for the bisect handler.
+BISECT_VALID_COLLECTIONS = [
+ BOOT_COLLECTION,
+ DEFCONFIG_COLLECTION,
+]
diff --git a/app/models/base.py b/app/models/base.py
index 8c42605..10c4a55 100644
--- a/app/models/base.py
+++ b/app/models/base.py
@@ -11,74 +11,83 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""The base document model that represents a mongodb document."""
+"""The base document abstract model that represents a mongodb document."""
-import json
-
-from bson import json_util
-
-from models import (
- CREATED_KEY,
- ID_KEY,
-)
+import abc
+# pylint: disable=abstract-class-not-used
class BaseDocument(object):
- """The base document model for all other documents."""
+ """The base document abstract model for all other documents.
- def __init__(self, name):
- self._name = name
- self._created_on = None
+ It defines the necessary methods and properties that all documents must
+ implement.
+ """
- @property
- def name(self):
- """The name of this document.
+ __metaclass__ = abc.ABCMeta
- It should be used as the `_id' field in a mongodb document.
+ id_doc = (
"""
- return self._name
+ The ID of this document as returned by mongodb.
- @property
- def collection(self):
- """The collection this document should belong to.
-
- :return None, subclasses should implement it.
+ This should only be set with values returned by mongodb since it is
+ an internal used field.
"""
- return None
+ )
- @property
- def created_on(self):
- """The date this document was created.
+ # pylint: disable=invalid-name
+ id = abc.abstractproperty(None, None, doc=id_doc)
- :return A datetime object, with UTC time zone.
+ name_doc = (
"""
- return self._created_on
+ The name of this document.
- @created_on.setter
- def created_on(self, value):
- self._created_on = value
+ This is a user defined property usually built with values from the
+ document itself. It is not necessary to be unique among all documents.
+ """
+ )
- def to_dict(self):
- """Return a dictionary view of the document.
+ name = abc.abstractproperty(None, None, doc=name_doc)
+
+ @abc.abstractproperty
+ def collection(self):
+ """The collection this document belongs to."""
+ return None
- The name attribute will be available as the `_id' key, useful for
- mongodb document.
+ created_on_doc = (
+ """
+ The date this document was created.
- :return A dictionary.
+ A datetime object with UTC time zone.
"""
- return {
- ID_KEY: self._name,
- CREATED_KEY: self._created_on,
- }
+ )
- def to_json(self):
- """Return a JSON string for this object.
+ created_on = abc.abstractproperty(None, None, doc=created_on_doc)
- :return A JSON string.
+ version_doc = (
+ """
+ The schema version number of this object.
"""
- return json.dumps(self.to_dict(), default=json_util.default)
+ )
+
+ version = abc.abstractproperty(None, None, doc=version_doc)
+
+ @abc.abstractmethod
+ def to_dict(self):
+ """Return a dictionary view of the document that can be serialized."""
+ raise NotImplementedError(
+ "Class '%s' doesn't implement to_dict()" % self.__class__.__name__
+ )
@staticmethod
+ @abc.abstractmethod
def from_json(json_obj):
- """Build a document from a JSON object."""
- raise NotImplementedError()
+ """Build a document from a JSON object.
+
+ The passed `json_obj` must be a valid Python dictionary. No checks are
+ performed on its type.
+
+ :param json_obj: The JSON object from which to build this object.
+ :type json_obj: dict
+ """
+ raise NotImplementedError("This class doesn't implement from_json()")
diff --git a/app/models/bisect.py b/app/models/bisect.py
index 86b3aca..4d8d8e9 100644
--- a/app/models/bisect.py
+++ b/app/models/bisect.py
@@ -13,39 +13,30 @@
"""Bisect mongodb document models."""
-from models import (
- BISECT_BAD_COMMIT_DATE,
- BISECT_BAD_COMMIT_KEY,
- BISECT_BAD_COMMIT_URL,
- BISECT_COLLECTION,
- BISECT_DATA_KEY,
- BISECT_GOOD_COMMIT_DATE,
- BISECT_GOOD_COMMIT_KEY,
- BISECT_GOOD_COMMIT_URL,
- BOARD_KEY,
- CREATED_KEY,
- DOC_ID_KEY,
- ID_KEY,
- JOB_KEY,
-)
-from models.base import BaseDocument
-
-
-class BisectDocument(BaseDocument):
+import models
+import models.base as modb
+
+
+class BisectDocument(modb.BaseDocument):
"""The bisect document model class."""
+ # pylint: disable=too-many-instance-attributes
+ # pylint: disable=invalid-name
def __init__(self, name):
- super(BisectDocument, self).__init__(name)
-
+ self._created_on = None
self._id = None
- self._job = None
- self._bisect_data = []
- self._bad_commit = None
- self._good_commit = None
- self._bad_commit_date = None
- self._good_commit_date = None
- self._bad_commit_url = None
- self._good_commit_url = None
+ self._name = name
+ self._version = None
+
+ self.bad_commit = None
+ self.bad_commit_date = None
+ self.bad_commit_url = None
+ self.bisect_data = []
+ self.good_commit = None
+ self.good_commit_date = None
+ self.good_commit_url = None
+ self.job = None
+ self.job_id = None
@property
def collection(self):
@@ -53,125 +44,80 @@ class BisectDocument(BaseDocument):
Where document of this kind will be stored.
"""
- return BISECT_COLLECTION
+ return models.BISECT_COLLECTION
@property
- def id(self):
- """The ID of this object in the database.
-
- This value should be returned by mongodb.
- """
- return self._id
+ def name(self):
+ """The name of the boot report."""
+ return self._name
@property
- def doc_id(self):
- """The interl doc ID."""
- return self._name
+ def id(self):
+ """The ID of this object as returned by mongodb."""
+ return self._id
@id.setter
def id(self, value):
- """Set the ID of this object."""
- self._id = value
-
- @property
- def job(self):
- """The job this document is part of."""
- return self._job
-
- @job.setter
- def job(self, value):
- """Set the job this document is part of."""
- self._job = value
-
- @property
- def bad_commit_date(self):
- """The date of the bad commit."""
- return self._bad_commit_date
+ """Set the ID of this object with the ObjectID from mongodb.
- @bad_commit_date.setter
- def bad_commit_date(self, value):
- """Set the date of the bad commit."""
- self._bad_commit_date = value
-
- @property
- def bad_commit(self):
- """The bad commit hash value."""
- return self._bad_commit
-
- @bad_commit.setter
- def bad_commit(self, value):
- """Set the bad commit hash value."""
- self._bad_commit = value
-
- @property
- def bad_commit_url(self):
- """The URL of the bad commit."""
- return self._bad_commit_url
-
- @bad_commit_url.setter
- def bad_commit_url(self, value):
- """Set the URL of the bad commit."""
- self._bad_commit_url = value
+ :param value: The ID of this object.
+ :type value: str
+ """
+ self._id = value
@property
- def good_commit(self):
- """The good commit hash value."""
- return self._good_commit
+ def created_on(self):
+ """When this lab object was created."""
+ return self._created_on
- @good_commit.setter
- def good_commit(self, value):
- """Set the good commit hash value."""
- self._good_commit = value
+ @created_on.setter
+ def created_on(self, value):
+ """Set the creation date of this lab object.
- @property
- def good_commit_date(self):
- """The date of the good commit."""
- return self._good_commit_date
-
- @good_commit_date.setter
- def good_commit_date(self, value):
- """Set the date of the good commit."""
- self._good_commit_date = value
+ :param value: The lab creation date, in UTC time zone.
+ :type value: datetime
+ """
+ self._created_on = value
@property
- def good_commit_url(self):
- """The URL of the good commit."""
- return self._good_commit_url
+ def version(self):
+ """The schema version of this object."""
+ return self._version
- @good_commit_url.setter
- def good_commit_url(self, value):
- """Set the URL of the good commit."""
- self._good_commit_url = value
+ @version.setter
+ def version(self, value):
+ """Set the schema version of this object.
- @property
- def bisect_data(self):
- """Get all the bisect data, ranging from the bad to the good commit."""
- return self._bisect_data
-
- @bisect_data.setter
- def bisect_data(self, value):
- """Set the bisect data."""
- self._bisect_data = value
+ :param value: The schema string.
+ :type param: str
+ """
+ self._version = value
def to_dict(self):
bisect_dict = {
- CREATED_KEY: self._created_on,
- JOB_KEY: self._job,
- DOC_ID_KEY: self._name,
- BISECT_DATA_KEY: self._bisect_data,
- BISECT_GOOD_COMMIT_KEY: self._good_commit,
- BISECT_GOOD_COMMIT_DATE: self._good_commit_date,
- BISECT_GOOD_COMMIT_URL: self._good_commit_url,
- BISECT_BAD_COMMIT_KEY: self._bad_commit,
- BISECT_BAD_COMMIT_DATE: self._bad_commit_date,
- BISECT_BAD_COMMIT_URL: self._bad_commit_url,
+ models.BISECT_BAD_COMMIT_DATE: self.bad_commit_date,
+ models.BISECT_BAD_COMMIT_KEY: self.bad_commit,
+ models.BISECT_BAD_COMMIT_URL: self.bad_commit_url,
+ models.BISECT_DATA_KEY: self.bisect_data,
+ models.BISECT_GOOD_COMMIT_DATE: self.good_commit_date,
+ models.BISECT_GOOD_COMMIT_KEY: self.good_commit,
+ models.BISECT_GOOD_COMMIT_URL: self.good_commit_url,
+ models.CREATED_KEY: self.created_on,
+ models.JOB_ID_KEY: self.job_id,
+ models.JOB_KEY: self.job,
+ models.NAME_KEY: self.name,
+ models.VERSION_KEY: self.version,
}
- if self._id:
- bisect_dict[ID_KEY] = self._id
+ if self.id:
+ bisect_dict[models.ID_KEY] = self.id
return bisect_dict
+ @staticmethod
+ def from_json(json_obj):
+ return None
+
class BootBisectDocument(BisectDocument):
"""The bisect document class for boot bisection."""
@@ -179,19 +125,34 @@ class BootBisectDocument(BisectDocument):
def __init__(self, name):
super(BootBisectDocument, self).__init__(name)
- self._board = None
-
- @property
- def board(self):
- """The board this document belongs to."""
- return self._board
-
- @board.setter
- def board(self, value):
- """Set the board name this document belongs to."""
- self._board = value
+ self.board = None
+ self.defconfig_id = None
+ self.boot_id = None
def to_dict(self):
boot_b_dict = super(BootBisectDocument, self).to_dict()
- boot_b_dict[BOARD_KEY] = self._board
+ boot_b_dict[models.BOARD_KEY] = self.board
+ boot_b_dict[models.DEFCONFIG_ID_KEY] = self.defconfig_id
+ boot_b_dict[models.BOOT_ID_KEY] = self.boot_id
return boot_b_dict
+
+
+class DefconfigBisectDocument(BisectDocument):
+ """The bisect document class for defconfig/build bisection."""
+
+ def __init__(self, name):
+ super(DefconfigBisectDocument, self).__init__(name)
+
+ self.defconfig = None
+ self.defconfig_id = None
+ self.defconfig_full = None
+ self.arch = None
+
+ def to_dict(self):
+ def_b_dict = super(DefconfigBisectDocument, self).to_dict()
+ def_b_dict[models.DEFCONFIG_ID_KEY] = self.defconfig_id
+ def_b_dict[models.DEFCONFIG_KEY] = self.defconfig
+ def_b_dict[models.DEFCONFIG_FULL_KEY] = self.defconfig_full
+ def_b_dict[models.ARCHITECTURE_KEY] = self.arch
+
+ return def_b_dict
diff --git a/app/models/boot.py b/app/models/boot.py
index 8494c04..92c3dfc 100644
--- a/app/models/boot.py
+++ b/app/models/boot.py
@@ -1,5 +1,3 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@@ -15,74 +13,112 @@
"""The model that represents a boot document in the mongodb collection."""
-from models import (
- BOARD_KEY,
- BOOT_COLLECTION,
- BOOT_LOG_HTML_KEY,
- BOOT_LOG_KEY,
- DEFCONFIG_KEY,
- DTB_ADDR_KEY,
- DTB_KEY,
- ENDIANNESS_KEY,
- FASTBOOT_KEY,
- INITRD_ADDR_KEY,
- JOB_ID_KEY,
- JOB_KEY,
- KERNEL_IMAGE_KEY,
- KERNEL_KEY,
- LOAD_ADDR_KEY,
- METADATA_KEY,
- STATUS_KEY,
- TIME_KEY,
- WARNINGS_KEY,
-)
-from models.base import BaseDocument
-from models.job import JobDocument
-
-
-class BootDocument(BaseDocument):
+import models
+import models.base as modb
+
+
+# pylint: disable=too-many-public-methods
+# pylint: disable=too-many-instance-attributes
+# pylint: disable=too-many-arguments
+# pylint: disable=invalid-name
+class BootDocument(modb.BaseDocument):
"""Model for a boot document.
Each document is a single booted board.
"""
- ID_FORMAT = '%(board)s-%(job)s-%(kernel)s-%(defconfig)s'
-
- def __init__(self, board, job, kernel, defconfig):
- super(BootDocument, self).__init__(
- self.ID_FORMAT % {
- BOARD_KEY: board,
- JOB_KEY: job,
- KERNEL_KEY: kernel,
- DEFCONFIG_KEY: defconfig,
- }
- )
+ def __init__(
+ self, board, job, kernel, defconfig, lab_name,
+ defconfig_full=None, arch=models.ARM_ARCHITECTURE_KEY):
+ """A new BootDocument.
+
+ :param board: The name of the board.
+ :type board: str
+ :param job: The name of the job.
+ :type job: str
+ :param kernel: The name of the kernel.
+ :type kernel: str
+ :param defconfig: The name of the defconfig.
+ :type defconfig: str
+ :param lab_name: The user readable ID of the lab.
+ :type lab_name: str
+ """
- self._job_id = JobDocument.ID_FORMAT % {
- JOB_KEY: job, KERNEL_KEY: kernel
+ doc_name = models.BOOT_DOCUMENT_NAME % {
+ models.BOARD_KEY: board,
+ models.DEFCONFIG_KEY: defconfig_full or defconfig,
+ models.JOB_KEY: job,
+ models.KERNEL_KEY: kernel,
+ models.ARCHITECTURE_KEY: arch
}
+ self._created_on = None
+ self._id = None
+ self._name = doc_name
+ self._version = None
+
+ self._arch = arch
self._board = board
+ self._defconfig = defconfig
+ self._defconfig_full = defconfig_full or defconfig
self._job = job
self._kernel = kernel
- self._defconfig = defconfig
- self._time = None
- self._status = None
- self._warnings = None
- self._boot_log = None
- self._initrd_addr = None
- self._load_addr = None
- self._kernel_image = None
- self._dtb_addr = None
- self._dtb = None
- self._endianness = None
- self._metadata = None
- self._fastboot = None
- self._boot_log_html = None
+ self._lab_name = lab_name
+ self.board_instance = None
+ self.boot_log = None
+ self.boot_log_html = None
+ self.boot_result_description = None
+ self.defconfig_id = None
+ self.dtb = None
+ self.dtb_addr = None
+ self.dtb_append = None
+ self.endianness = None
+ self.fastboot = False
+ self.fastboot_cmd = None
+ self.file_server_resource = None
+ self.file_server_url = None
+ self.git_branch = None
+ self.git_commit = None
+ self.git_describe = None
+ self.git_url = None
+ self.initrd = None
+ self.initrd_addr = None
+ self.job_id = None
+ self.kernel_image = None
+ self.load_addr = None
+ self.metadata = {}
+ self.retries = 0
+ self.status = None
+ self.time = 0
+ self.warnings = 0
@property
def collection(self):
- return BOOT_COLLECTION
+ return models.BOOT_COLLECTION
+
+ @property
+ def name(self):
+ """The name of the boot report."""
+ return self._name
+
+ @property
+ def id(self):
+ """The ID of this object as returned by mongodb."""
+ return self._id
+
+ @id.setter
+ def id(self, value):
+ """Set the ID of this object with the ObjectID from mongodb.
+
+ :param value: The ID of this object.
+ :type value: str
+ """
+ self._id = value
+
+ @property
+ def arch(self):
+ """The architecture of the board."""
+ return self._arch
@property
def board(self):
@@ -105,142 +141,95 @@ class BootDocument(BaseDocument):
return self._defconfig
@property
- def status(self):
- """The boot status of this document."""
- return self._status
-
- @status.setter
- def status(self, value):
- self._status = value
-
- @property
- def boot_log(self):
- """The log of this board."""
- return self._boot_log
-
- @boot_log.setter
- def boot_log(self, value):
- self._boot_log = value
-
- @property
- def time(self):
- """The time it took this board to boot.
-
- Represented as the time passed after the epoch time.
- """
- return self._time
-
- @time.setter
- def time(self, value):
- self._time = value
-
- @property
- def warnings(self):
- """The number of warnings associated with this board."""
- return self._warnings
+ def defconfig_full(self):
+ """The full value of the defconfig, with fragments."""
+ return self._defconfig_full
- @warnings.setter
- def warnings(self, value):
- self._warnings = value
+ @defconfig_full.setter
+ def defconfig_full(self, value):
+ """Set the defconfig full name."""
+ self._defconfig_full = value
@property
- def job_id(self):
- """The ID of the Job document associated with this boot."""
- return self._job_id
+ def lab_name(self):
+ """Get the lab ID value of this boot report."""
+ return self._lab_name
- @property
- def initrd_addr(self):
- return self._initrd_addr
-
- @initrd_addr.setter
- def initrd_addr(self, value):
- self._initrd_addr = value
+ @lab_name.setter
+ def lab_name(self, value):
+ """Set the lab ID value."""
+ self._lab_name = value
@property
- def load_addr(self):
- """The load_addr."""
- return self._load_addr
-
- @load_addr.setter
- def load_addr(self, value):
- self._load_addr = value
+ def created_on(self):
+ """When this lab object was created."""
+ return self._created_on
- @property
- def dtb_addr(self):
- return self._dtb_addr
-
- @dtb_addr.setter
- def dtb_addr(self, value):
- self._dtb_addr = value
-
- @property
- def dtb(self):
- """The dtb file of this boot document."""
- return self._dtb
-
- @dtb.setter
- def dtb(self, value):
- self._dtb = value
-
- @property
- def kernel_image(self):
- """The kernel image used to boot."""
- return self._kernel_image
+ @created_on.setter
+ def created_on(self, value):
+ """Set the creation date of this lab object.
- @kernel_image.setter
- def kernel_image(self, value):
- self._kernel_image = value
-
- @property
- def endianness(self):
- return self._endianness
-
- @endianness.setter
- def endianness(self, value):
- self._endianness = value
-
- @property
- def fastboot(self):
- return self._fastboot
-
- @fastboot.setter
- def fastboot(self, value):
- self._fastboot = value
+ :param value: The lab creation date, in UTC time zone.
+ :type value: datetime
+ """
+ self._created_on = value
@property
- def boot_log_html(self):
- return self._boot_log_html
+ def version(self):
+ """The version of this document schema."""
+ return self._version
- @boot_log_html.setter
- def boot_log_html(self, value):
- self._boot_log_html = value
+ @version.setter
+ def version(self, value):
+ """The version of this document schema."""
+ self._version = value
- @property
- def metadata(self):
- return self._metadata
+ def to_dict(self):
+ boot_dict = {
+ models.ARCHITECTURE_KEY: self.arch,
+ models.BOARD_INSTANCE_KEY: self.board_instance,
+ models.BOARD_KEY: self.board,
+ models.BOOT_LOG_HTML_KEY: self.boot_log_html,
+ models.BOOT_LOG_KEY: self.boot_log,
+ models.BOOT_RESULT_DESC_KEY: self.boot_result_description,
+ models.CREATED_KEY: self.created_on,
+ models.DEFCONFIG_FULL_KEY: self.defconfig_full,
+ models.DEFCONFIG_ID_KEY: self.defconfig_id,
+ models.DEFCONFIG_KEY: self.defconfig,
+ models.DTB_ADDR_KEY: self.dtb_addr,
+ models.DTB_APPEND_KEY: self.dtb_append,
+ models.DTB_KEY: self.dtb,
+ models.ENDIANNESS_KEY: self.endianness,
+ models.FASTBOOT_CMD_KEY: self.fastboot_cmd,
+ models.FASTBOOT_KEY: self.fastboot,
+ models.FILE_SERVER_RESOURCE_KEY: self.file_server_resource,
+ models.FILE_SERVER_URL_KEY: self.file_server_url,
+ models.GIT_BRANCH_KEY: self.git_branch,
+ models.GIT_COMMIT_KEY: self.git_commit,
+ models.GIT_DESCRIBE_KEY: self.git_describe,
+ models.GIT_URL_KEY: self.git_url,
+ models.INITRD_ADDR_KEY: self.initrd_addr,
+ models.INITRD_KEY: self.initrd,
+ models.JOB_ID_KEY: self.job_id,
+ models.JOB_KEY: self.job,
+ models.KERNEL_IMAGE_KEY: self.kernel_image,
+ models.KERNEL_KEY: self.kernel,
+ models.LAB_NAME_KEY: self.lab_name,
+ models.LOAD_ADDR_KEY: self.load_addr,
+ models.METADATA_KEY: self.metadata,
+ models.NAME_KEY: self.name,
+ models.RETRIES_KEY: self.retries,
+ models.STATUS_KEY: self.status,
+ models.TIME_KEY: self.time,
+ models.VERSION_KEY: self.version,
+ models.WARNINGS_KEY: self.warnings
+ }
- @metadata.setter
- def metadata(self, value):
- self._metadata = value
+ if self.id:
+ boot_dict[models.ID_KEY] = self.id
- def to_dict(self):
- boot_dict = super(BootDocument, self).to_dict()
- boot_dict[BOARD_KEY] = self._board
- boot_dict[TIME_KEY] = self._time
- boot_dict[JOB_KEY] = self._job
- boot_dict[KERNEL_KEY] = self._kernel
- boot_dict[DEFCONFIG_KEY] = self._defconfig
- boot_dict[STATUS_KEY] = self._status
- boot_dict[BOOT_LOG_KEY] = self._boot_log
- boot_dict[WARNINGS_KEY] = self._warnings
- boot_dict[JOB_ID_KEY] = self._job_id
- boot_dict[KERNEL_IMAGE_KEY] = self._kernel_image
- boot_dict[LOAD_ADDR_KEY] = self._load_addr
- boot_dict[INITRD_ADDR_KEY] = self._initrd_addr
- boot_dict[DTB_KEY] = self._dtb
- boot_dict[DTB_ADDR_KEY] = self._dtb_addr
- boot_dict[ENDIANNESS_KEY] = self._endianness
- boot_dict[METADATA_KEY] = self._metadata
- boot_dict[FASTBOOT_KEY] = self._fastboot
- boot_dict[BOOT_LOG_HTML_KEY] = self._boot_log_html
return boot_dict
+
+ @staticmethod
+ def from_json(json_obj):
+ return None
diff --git a/app/models/defconfig.py b/app/models/defconfig.py
index cbef0a6..d41761b 100644
--- a/app/models/defconfig.py
+++ b/app/models/defconfig.py
@@ -15,71 +15,111 @@
"""The model that represents a defconfing document in the mongodb collection."""
-from models import (
- ARCHITECTURE_KEY,
- DEFCONFIG_COLLECTION,
- DEFCONFIG_KEY,
- DIRNAME_KEY,
- ERRORS_KEY,
- JOB_ID_KEY,
- JOB_KEY,
- KERNEL_KEY,
- METADATA_KEY,
- STATUS_KEY,
- WARNINGS_KEY,
-)
-from models.base import BaseDocument
-
-
-class DefConfigDocument(BaseDocument):
+import types
+
+import models
+import models.base as modb
+
+
+# pylint: disable=too-many-instance-attributes
+# pylint: disable=invalid-name
+class DefconfigDocument(modb.BaseDocument):
+
"""This class represents a defconfig folder as seen on the file system."""
- ID_FORMAT = '%(job_id)s-%(defconfig)s'
+ def __init__(self, job, kernel, defconfig, defconfig_full=None):
- def __init__(self, name, job_id, job=None, kernel=None):
- super(DefConfigDocument, self).__init__(
- self.ID_FORMAT % {JOB_ID_KEY: job_id, DEFCONFIG_KEY: name}
- )
+ doc_name = {
+ models.JOB_KEY: job,
+ models.KERNEL_KEY: kernel,
+ models.DEFCONFIG_KEY: defconfig_full or defconfig
+ }
- self._job_id = job_id
+ self._created_on = None
+ self._id = None
+ self._name = models.DEFCONFIG_DOCUMENT_NAME % doc_name
+ self._version = None
+
+ self._build_platform = []
+ self._defconfig = defconfig
+ self._defconfig_full = defconfig_full or defconfig
self._job = job
self._kernel = kernel
- self._defconfig = None
- self._dirname = None
- self._status = None
self._metadata = {}
- self._errors = None
- self._warnings = None
- self._arch = None
+ self._status = None
+ self.arch = None
+ self.build_log = None
+ self.build_time = 0
+ self.dirname = None
+ self.dtb_dir = None
+ self.errors = 0
+ self.file_server_resource = None
+ self.file_server_url = None
+ self.git_branch = None
+ self.git_commit = None
+ self.git_describe = None
+ self.git_url = None
+ self.job_id = None
+ self.kconfig_fragments = None
+ self.kernel_config = None
+ self.kernel_image = None
+ self.modules = None
+ self.modules_dir = None
+ self.system_map = None
+ self.text_offset = None
+ self.warnings = 0
@property
def collection(self):
- return DEFCONFIG_COLLECTION
+ return models.DEFCONFIG_COLLECTION
+
+ @property
+ def created_on(self):
+ """When this object was created."""
+ return self._created_on
+
+ @created_on.setter
+ def created_on(self, value):
+ """Set the creation date of this object.
+
+ :param value: The lab creation date, in UTC time zone.
+ :type value: datetime
+ """
+ self._created_on = value
@property
- def job_id(self):
- """The job ID this defconfig belogns to."""
- return self._job_id
+ def name(self):
+ """The name of the object."""
+ return self._name
+
+ @property
+ def id(self):
+ """The ID of this object as returned by mongodb."""
+ return self._id
+
+ @id.setter
+ def id(self, value):
+ """Set the ID of this object with the ObjectID from mongodb.
+
+ :param value: The ID of this object.
+ :type value: str
+ """
+ self._id = value
@property
def job(self):
"""The job this defconfig belongs too."""
return self._job
- @job.setter
- def job(self, value):
- """Set the job name of this defconfig."""
- self._job = value
-
@property
def kernel(self):
"""The kernel this defconfig was built against."""
return self._kernel
- @kernel.setter
- def kernel(self, value):
- """Set the kernel of this defconfig."""
- self._kernel = value
+ @property
+ def defconfig(self):
+ """The defconfig name."""
+ return self._defconfig
@property
def metadata(self):
@@ -92,6 +132,10 @@ class DefConfigDocument(BaseDocument):
:param value: A dictionary with defconfig metadata.
"""
+ if not isinstance(value, types.DictionaryType):
+ raise TypeError(
+ "Passed value is not a dictionary, got %s", type(value)
+ )
self._metadata = value
@property
@@ -105,63 +149,92 @@ class DefConfigDocument(BaseDocument):
:param value: The status as string.
"""
+ if value not in models.VALID_BUILD_STATUS:
+ raise ValueError(
+ "Status value '%s' not valid, should be one of: %s",
+ value, str(models.VALID_BUILD_STATUS)
+ )
self._status = value
@property
- def defconfig(self):
- """The defconfig name of this document."""
- return self._defconfig
+ def build_platform(self):
+ """Details about the platform used to build."""
+ return self._build_platform
- @defconfig.setter
- def defconfig(self, value):
- self._defconfig = value
+ @build_platform.setter
+ def build_platform(self, value):
+ """Set details about the build platform."""
+ if not isinstance(value, types.ListType):
+ raise TypeError("Value passed is not a list: %s", type(value))
+ self._build_platform = value
@property
- def errors(self):
- """Number of errors associated with this defconfig."""
- return self._errors
+ def version(self):
+ """The schema version of this object."""
+ return self._version
- @errors.setter
- def errors(self, value):
- self._errors = value
+ @version.setter
+ def version(self, value):
+ """Set the schema version of this object.
- @property
- def warnings(self):
- """Number of warnings associated with this defconfig."""
- return self._warnings
-
- @warnings.setter
- def warnings(self, value):
- self._warnings = value
+ :param value: The schema string.
+ :type param: str
+ """
+ self._version = value
@property
- def arch(self):
- """The architecture of this defconfig."""
- return self._arch
+ def defconfig_full(self):
+ """The full defconfig name.
- @arch.setter
- def arch(self, value):
- self._arch = value
-
- @property
- def dirname(self):
- """The name of the directory of this defconfig."""
- return self._dirname
+ This parameter contains also the config fragments information.
+ """
+ return self._defconfig_full
- @dirname.setter
- def dirname(self, value):
- self._dirname = value
+ @defconfig_full.setter
+ def defconfig_full(self, value):
+ """Set the full defconfig name."""
+ self._defconfig_full = value
def to_dict(self):
- defconf_dict = super(DefConfigDocument, self).to_dict()
- defconf_dict[JOB_ID_KEY] = self._job_id
- defconf_dict[JOB_KEY] = self._job
- defconf_dict[KERNEL_KEY] = self._kernel
- defconf_dict[STATUS_KEY] = self._status
- defconf_dict[METADATA_KEY] = self._metadata
- defconf_dict[DEFCONFIG_KEY] = self._defconfig
- defconf_dict[WARNINGS_KEY] = self._warnings
- defconf_dict[ERRORS_KEY] = self._errors
- defconf_dict[ARCHITECTURE_KEY] = self._arch
- defconf_dict[DIRNAME_KEY] = self._dirname
+ defconf_dict = {
+ models.ARCHITECTURE_KEY: self.arch,
+ models.BUILD_LOG_KEY: self.build_log,
+ models.BUILD_PLATFORM_KEY: self.build_platform,
+ models.BUILD_TIME_KEY: self.build_time,
+ models.CREATED_KEY: self.created_on,
+ models.DEFCONFIG_FULL_KEY: self.defconfig_full,
+ models.DEFCONFIG_KEY: self.defconfig,
+ models.DIRNAME_KEY: self.dirname,
+ models.DTB_DIR_KEY: self.dtb_dir,
+ models.ERRORS_KEY: self.errors,
+ models.FILE_SERVER_RESOURCE_KEY: self.file_server_resource,
+ models.FILE_SERVER_URL_KEY: self.file_server_url,
+ models.GIT_BRANCH_KEY: self.git_branch,
+ models.GIT_COMMIT_KEY: self.git_commit,
+ models.GIT_DESCRIBE_KEY: self.git_describe,
+ models.GIT_URL_KEY: self.git_url,
+ models.JOB_ID_KEY: self.job_id,
+ models.JOB_KEY: self.job,
+ models.KCONFIG_FRAGMENTS_KEY: self.kconfig_fragments,
+ models.KERNEL_CONFIG_KEY: self.kernel_config,
+ models.KERNEL_IMAGE_KEY: self.kernel_image,
+ models.KERNEL_KEY: self.kernel,
+ models.METADATA_KEY: self.metadata,
+ models.MODULES_DIR_KEY: self.modules_dir,
+ models.MODULES_KEY: self.modules,
+ models.NAME_KEY: self.name,
+ models.STATUS_KEY: self.status,
+ models.SYSTEM_MAP_KEY: self.system_map,
+ models.TEXT_OFFSET_KEY: self.text_offset,
+ models.VERSION_KEY: self.version,
+ models.WARNINGS_KEY: self.warnings,
+ }
+
+ if self.id:
+ defconf_dict[models.ID_KEY] = self.id
+
return defconf_dict
+
+ @staticmethod
+ def from_json(json_obj):
+ return None
diff --git a/app/models/job.py b/app/models/job.py
index 81d1a77..4d5cdb7 100644
--- a/app/models/job.py
+++ b/app/models/job.py
@@ -15,27 +15,14 @@
"""The model that represents a job document in the mongodb collection."""
-from bson import json_util
-from types import StringTypes
-
-from models import (
- GIT_BRANCH_KEY,
- GIT_COMMIT_KEY,
- GIT_DESCRIBE_KEY,
- GIT_URL_KEY,
- ID_KEY,
- JOB_COLLECTION,
- JOB_KEY,
- KERNEL_KEY,
- METADATA_KEY,
- PRIVATE_KEY,
- STATUS_KEY,
- UPDATED_KEY,
-)
-from models.base import BaseDocument
-
-
-class JobDocument(BaseDocument):
+import types
+
+import models
+import models.base as modb
+
+
+# pylint: disable=invalid-name
+class JobDocument(modb.BaseDocument):
"""This class represents a job as seen on the file system.
Each job on the file system is composed of a real job name (usually who
@@ -43,24 +30,63 @@ class JobDocument(BaseDocument):
of the two, and its name is of the form `job-kernel`.
"""
- ID_FORMAT = '%(job)s-%(kernel)s'
- METADATA_KEYS = (
- GIT_URL_KEY, GIT_BRANCH_KEY, GIT_DESCRIBE_KEY, GIT_COMMIT_KEY,
- )
+ def __init__(self, job, kernel):
- def __init__(self, name, job=None, kernel=None):
- super(JobDocument, self).__init__(name)
+ doc_name = {
+ models.JOB_KEY: job,
+ models.KERNEL_KEY: kernel,
+ }
+
+ self._created_on = None
+ self._id = None
+ self._name = models.JOB_DOCUMENT_NAME % doc_name
+ self._version = None
- self._private = False
self._job = job
self._kernel = kernel
- self._status = None
- self._updated = None
- self._metadata = {}
+ self.git_branch = None
+ self.git_commit = None
+ self.git_describe = None
+ self.git_url = None
+ self.private = False
+ self.status = None
@property
def collection(self):
- return JOB_COLLECTION
+ return models.JOB_COLLECTION
+
+ @property
+ def name(self):
+ """The name of the object."""
+ return self._name
+
+ @property
+ def id(self):
+ """The ID of this object as returned by mongodb."""
+ return self._id
+
+ @id.setter
+ def id(self, value):
+ """Set the ID of this object with the ObjectID from mongodb.
+
+ :param value: The ID of this object.
+ :type value: str
+ """
+ self._id = value
+
+ @property
+ def created_on(self):
+ """When this object was created."""
+ return self._created_on
+
+ @created_on.setter
+ def created_on(self, value):
+ """Set the creation date of this object.
+
+ :param value: The lab creation date, in UTC time zone.
+ :type value: datetime
+ """
+ self._created_on = value
@property
def private(self):
@@ -77,94 +103,130 @@ class JobDocument(BaseDocument):
@property
def job(self):
- """Return the real job name as found on the file system."""
+ """The real job name as found on the file system."""
return self._job
- @job.setter
- def job(self, value):
- """Set the real job name as found on the file system."""
- self._job = value
-
@property
def kernel(self):
- """Return the real kernel name as found on the file system."""
+ """The real kernel name as found on the file system."""
return self._kernel
- @kernel.setter
- def kernel(self, value):
- """Set the real kernel name as found on the file system."""
- self._kernel = value
-
- @property
- def updated(self):
- """The date this document was last updated.
-
- :return A string representing a datetime object in ISO format,
- UTC time zone.
- """
- return self._updated
-
- @updated.setter
- def updated(self, value):
- """Set the date this document was last updated.
-
- :param value: A string representing a datetime object in ISO format.
- """
- self._updated = value
-
@property
def status(self):
- """The build status of this job."""
+ """The status of the job."""
return self._status
@status.setter
def status(self, value):
- """Set the build status of the job.
+ """Set the status of the job.
:param value: The status.
"""
+ if value is not None and value not in models.VALID_JOB_STATUS:
+ raise ValueError(
+ "Status value '%s' not valid, should be one of: %s",
+ value, str(models.VALID_JOB_STATUS)
+ )
self._status = value
@property
- def metadata(self):
- """The metadata associated with this job.
+ def version(self):
+ """The schema version of this object."""
+ return self._version
- A dictionary contaning information like commit ID, tree URL...
+ @version.setter
+ def version(self, value):
+ """Set the schema version of this object.
+
+ :param value: The schema string.
+ :type param: str
"""
- return self._metadata
+ self._version = value
- @metadata.setter
- def metadata(self, value):
- """Set the metadata dictionary associated with this job.
+ @property
+ def git_url(self):
+ """The git URL where the code comes from."""
+ return self._git_url
- :param value: A dictionary containing the metadata.
- """
- self._metadata = value
+ @git_url.setter
+ def git_url(self, value):
+ """Set the git URL of this defconfig document."""
+ self._git_url = value
+
+ @property
+ def git_commit(self):
+ """The git commit SHA."""
+ return self._git_commit
+
+ @git_commit.setter
+ def git_commit(self, value):
+ """Set the git commit SHA."""
+ self._git_commit = value
+
+ @property
+ def git_branch(self):
+ """The branch name of the repository used."""
+ return self._git_branch
+
+ @git_branch.setter
+ def git_branch(self, value):
+ """Set the branch name of the repository used."""
+ self._git_branch = value
+
+ @property
+ def git_describe(self):
+ """The git describe value of the repository."""
+ return self._git_describe
+
+ @git_describe.setter
+ def git_describe(self, value):
+ """Set the git describe value of the repository."""
+ self._git_describe = value
def to_dict(self):
- job_dict = super(JobDocument, self).to_dict()
- job_dict[PRIVATE_KEY] = self._private
- job_dict[JOB_KEY] = self._job
- job_dict[KERNEL_KEY] = self._kernel
- job_dict[UPDATED_KEY] = self._updated
- job_dict[STATUS_KEY] = self._status
- job_dict[METADATA_KEY] = self._metadata
+ job_dict = {
+ models.CREATED_KEY: self.created_on,
+ models.GIT_BRANCH_KEY: self.git_branch,
+ models.GIT_COMMIT_KEY: self.git_commit,
+ models.GIT_DESCRIBE_KEY: self.git_describe,
+ models.GIT_URL_KEY: self.git_url,
+ models.JOB_KEY: self.job,
+ models.KERNEL_KEY: self.kernel,
+ models.NAME_KEY: self.name,
+ models.PRIVATE_KEY: self.private,
+ models.STATUS_KEY: self.status,
+ models.VERSION_KEY: self.version,
+ }
+
+ if self.id:
+ job_dict[models.ID_KEY] = self.id
+
return job_dict
@staticmethod
def from_json(json_obj):
"""Build a document from a JSON object.
- :param json_obj: The JSON object to start from, or a JSON string.
- :return An instance of `JobDocument`.
+ :param json_obj: The JSON object to start from.
+ :return An instance of `JobDocument` or None
"""
- if isinstance(json_obj, StringTypes):
- json_obj = json_util.loads(json_obj)
-
- name = json_obj.pop(ID_KEY)
-
- job_doc = JobDocument(name)
- for key, value in json_obj.iteritems():
- setattr(job_doc, key, value)
+ job_doc = None
+
+ # pylint: disable=maybe-no-member
+ if json_obj and isinstance(json_obj, types.DictionaryType):
+ json_get = json_obj.get
+ job = json_get(models.JOB_KEY)
+ kernel = json_get(models.KERNEL_KEY)
+
+ job_doc = JobDocument(job, kernel)
+
+ job_doc.created_on = json_get(models.CREATED_KEY, None)
+ job_doc.git_branch = json_get(models.GIT_BRANCH_KEY, None)
+ job_doc.git_commit = json_get(models.GIT_COMMIT_KEY, None)
+ job_doc.git_describe = json_get(models.GIT_DESCRIBE_KEY, None)
+ job_doc.git_url = json_get(models.GIT_URL_KEY, None)
+ job_doc.id = json_get(models.ID_KEY, None)
+ job_doc.status = json_get(models.STATUS_KEY, None)
+ job_doc.version = json_get(models.VERSION_KEY, "1.0")
return job_doc
diff --git a/app/models/lab.py b/app/models/lab.py
new file mode 100644
index 0000000..ddd5c7b
--- /dev/null
+++ b/app/models/lab.py
@@ -0,0 +1,199 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""The model that represents a lab for boot testing or other tests."""
+
+import types
+
+import models
+import models.base as modb
+
+
+class LabDocument(modb.BaseDocument):
+ """This class represents a lab object as stored in the database.
+
+ A lab object contains all the necessary information needed to set up and
+ accepts data from external boot, test or whatever else labs.
+ """
+
+ # pylint: disable=too-many-instance-attributes
+ def __init__(self, name):
+ self._created_on = None
+ self._id = None
+ self._name = name
+ self._version = None
+
+ self._address = {}
+ self._contact = {}
+ self.private = False
+ self.token = None
+ self.updated_on = None
+
+ @property
+ def collection(self):
+ return models.LAB_COLLECTION
+
+ # pylint: disable=invalid-name
+ @staticmethod
+ def from_json(json_obj):
+ lab_doc = None
+ if json_obj:
+ json_get = json_obj.get
+ lab_doc = LabDocument(json_get(models.NAME_KEY))
+ lab_doc.id = json_get(models.ID_KEY, None)
+ lab_doc.created_on = json_get(models.CREATED_KEY, None)
+ lab_doc.private = json_get(models.PRIVATE_KEY, False)
+ lab_doc.address = json_get(models.ADDRESS_KEY, {})
+ lab_doc.contact = json_get(models.CONTACT_KEY, {})
+ lab_doc.token = json_get(models.TOKEN_KEY, None)
+ lab_doc.updated_on = json_get(models.UPDATED_KEY, None)
+ lab_doc.version = json_get(
+ models.VERSION_KEY, models.DEFAULT_SCHEMA_VERSION)
+ return lab_doc
+
+ @property
+ def name(self):
+ """The name of the lab."""
+ return self._name
+
+ @property
+ def id(self):
+ """The ID of this object as returned by mongodb."""
+ return self._id
+
+ @id.setter
+ def id(self, value):
+ """Set the ID of this object with the ObjectID from mongodb.
+
+ :param value: The ID of this object.
+ :type value: str
+ """
+ self._id = value
+
+ @property
+ def address(self):
+ """The address of this lab.
+
+ :return A dictionary.
+ """
+ return self._address
+
+ @address.setter
+ def address(self, value):
+ """Set the address of this lab.
+
+ The address must be a dictionary containing the following keys:
+ * street_1
+ * street_2
+ * city
+ * country
+ * zipcode
+ * longitude
+ * latitude
+
+ Keys do not need to be all set, it is not mandatory to provide an
+ address. Try to keep `street_1` and `street_2` under 64 chars: that's
+ why two fields are provided.
+
+ :param value: The address data structure for this lab.
+ :type value: dict
+ :raises TypeError if value is not a dict.
+ """
+ if not isinstance(value, types.DictType):
+ raise TypeError("Passed value is not a dictionary")
+ self._address = value
+
+ @property
+ def contact(self):
+ """The contact details for this lab.
+
+ :return A dictionary.
+ """
+ return self._contact
+
+ @contact.setter
+ def contact(self, value):
+ """Set the contact for this lab.
+
+ The contact must be a dictionary containing the following keys:
+ * name
+ * surname
+ * telephone
+ * mobile
+ * email
+
+ Mandatory keys are 'name', 'surname' and 'email'.
+
+ :param value: The contact data structure for this lab.
+ :type value: dict
+ :raises TypeError if value is not a dict, ValueError if the mandatory
+ fields are missing.
+ """
+ if not isinstance(value, types.DictType):
+ raise TypeError("Passed value is not a dictionary")
+ if not all(
+ [value.get("name"), value.get("surname"), value.get("email")]
+ ):
+ raise ValueError(
+ "Missing mandatory field (one of): name, surname and email"
+ )
+ self._contact = value
+
+ @property
+ def created_on(self):
+ """When this lab object was created."""
+ return self._created_on
+
+ @created_on.setter
+ def created_on(self, value):
+ """Set the creation date of this lab object.
+
+ :param value: The lab creation date, in UTC time zone.
+ :type value: datetime
+ """
+ self._created_on = value
+
+ @property
+ def version(self):
+ """The schema version of this object."""
+ return self._version
+
+ @version.setter
+ def version(self, value):
+ """Set the schema version of this object.
+
+ :param value: The schema string.
+ :type param: str
+ """
+ self._version = value
+
+ def to_dict(self):
+ """Create a serializable view of this document.
+
+ :return A dictionary representation of the object.
+ """
+ lab_dict = {
+ models.ADDRESS_KEY: self.address,
+ models.CONTACT_KEY: self.contact,
+ models.CREATED_KEY: self.created_on,
+ models.NAME_KEY: self.name,
+ models.PRIVATE_KEY: self.private,
+ models.TOKEN_KEY: self.token,
+ models.UPDATED_KEY: self.updated_on,
+ models.VERSION_KEY: self.version,
+ }
+
+ if self.id:
+ lab_dict[models.ID_KEY] = self.id
+
+ return lab_dict
diff --git a/app/models/subscription.py b/app/models/subscription.py
index 66cd136..b63182c 100644
--- a/app/models/subscription.py
+++ b/app/models/subscription.py
@@ -1,5 +1,3 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@@ -19,38 +17,93 @@ The model that represents a subscription document in the mongodb collection.
import types
-from bson import json_util
-
-from models import (
- ID_KEY,
- SUBSCRIPTION_COLLECTION,
-)
-from models.base import BaseDocument
+import models
+import models.base as modb
-class SubscriptionDocument(BaseDocument):
+class SubscriptionDocument(modb.BaseDocument):
"""This class represents a subscription document in the mongodb database.
A subscription document contains a list of emails that shoule be notified.
It contains an external ID that points to the job ID.
"""
- SUBSCRIPTION_ID_FORMAT = 'sub-%s'
-
- def __init__(self, name, job_id):
- super(SubscriptionDocument, self).__init__(name)
- self._job_id = job_id
+ def __init__(self, job, kernel):
+ doc_name = {
+ models.JOB_KEY: job,
+ models.KERNEL_KEY: kernel
+ }
+ self._name = models.SUBSCRIPTION_DOCUMENT_NAME % doc_name
+ self._created_on = None
+ self._id = None
+ self._job_id = None
+ self._job = job
+ self._kernel = kernel
self._emails = []
+ self._version = None
@property
def collection(self):
- return SUBSCRIPTION_COLLECTION
+ return models.SUBSCRIPTION_COLLECTION
+
+ @property
+ def name(self):
+ """The name of the object."""
+ return self._name
+
+ @name.setter
+ def name(self, value):
+ """Set the name of the document."""
+ self._name = value
+
+ @property
+ def id(self):
+ """The ID of this object as returned by mongodb."""
+ return self._id
+
+ @id.setter
+ def id(self, value):
+ """Set the ID of this object with the ObjectID from mongodb.
+
+ :param value: The ID of this object.
+ :type value: str
+ """
+ self._id = value
+
+ @property
+ def job(self):
+ """The real job name as found on the file system."""
+ return self._job
+
+ @property
+ def kernel(self):
+ """The real kernel name as found on the file system."""
+ return self._kernel
+
+ @property
+ def created_on(self):
+ """When this object was created."""
+ return self._created_on
+
+ @created_on.setter
+ def created_on(self, value):
+ """Set the creation date of this object.
+
+ :param value: The lab creation date, in UTC time zone.
+ :type value: datetime
+ """
+ self._created_on = value
@property
def job_id(self):
"""The job ID this subscriptions belong to."""
return self._job_id
+ @job_id.setter
+ def job_id(self, value):
+ """Set the ID of the associated job."""
+ self._job_id = value
+
@property
def emails(self):
"""The list of emails subscribed."""
@@ -69,10 +122,34 @@ class SubscriptionDocument(BaseDocument):
# Make sure the list is unique.
self._emails = list(set(self._emails))
+ @property
+ def version(self):
+ """The schema version of this object."""
+ return self._version
+
+ @version.setter
+ def version(self, value):
+ """Set the schema version of this object.
+
+ :param value: The schema string.
+ :type param: str
+ """
+ self._version = value
+
def to_dict(self):
- sub_dict = super(SubscriptionDocument, self).to_dict()
- sub_dict['emails'] = self._emails
- sub_dict['job_id'] = self._job_id
+ sub_dict = {
+ models.CREATED_KEY: self.created_on,
+ models.EMAIL_LIST_KEY: self.emails,
+ models.JOB_ID_KEY: self.job_id,
+ models.JOB_KEY: self.job,
+ models.KERNEL_KEY: self.kernel,
+ models.NAME_KEY: self.name,
+ models.VERSION_KEY: self.version,
+ }
+
+ if self.id:
+ sub_dict[models.ID_KEY] = self.id
+
return sub_dict
@staticmethod
@@ -82,15 +159,23 @@ class SubscriptionDocument(BaseDocument):
:param json_obj: The JSON object to start from.
:return An instance of `SubscriptionDocument`.
"""
- if isinstance(json_obj, types.StringTypes):
- json_obj = json_util.loads(json_obj)
-
- name = json_obj.pop(ID_KEY)
- job_id = json_obj.pop('job_id')
-
- sub_doc = SubscriptionDocument(name, job_id)
-
- for key, value in json_obj.iteritems():
- setattr(sub_doc, key, value)
+ sub_doc = None
+ if isinstance(json_obj, types.DictionaryType):
+ json_pop = json_obj.pop
+ job = json_pop(models.JOB_KEY)
+ kernel = json_pop(models.KERNEL_KEY)
+ doc_id = json_pop(models.ID_KEY)
+ # Remove the name key.
+ json_pop(models.NAME_KEY)
+
+ sub_doc = SubscriptionDocument(job, kernel)
+ sub_doc.id = doc_id
+ sub_doc.version = json_pop(models.VERSION_KEY, "1.0")
+
+ for key, value in json_obj.iteritems():
+ try:
+ setattr(sub_doc, key, value)
+ except AttributeError:
+ print key
return sub_doc
diff --git a/app/models/tests/test_bisect_model.py b/app/models/tests/test_bisect_model.py
index 5f5b629..874835b 100644
--- a/app/models/tests/test_bisect_model.py
+++ b/app/models/tests/test_bisect_model.py
@@ -13,36 +13,36 @@
import unittest
-from models.base import BaseDocument
-from models.bisect import BisectDocument, BootBisectDocument
+import models.base as modb
+import models.bisect as modbs
class TestBisectModel(unittest.TestCase):
def test_bisect_base_document(self):
- bisect_doc = BisectDocument("foo")
- self.assertIsInstance(bisect_doc, BaseDocument)
+ bisect_doc = modbs.BisectDocument("foo")
+ self.assertIsInstance(bisect_doc, modb.BaseDocument)
def test_boot_bisect_document(self):
- bisect_doc = BootBisectDocument("bar")
- self.assertIsInstance(bisect_doc, BisectDocument)
- self.assertIsInstance(bisect_doc, BaseDocument)
+ bisect_doc = modbs.BootBisectDocument("bar")
+ self.assertIsInstance(bisect_doc, modbs.BisectDocument)
+ self.assertIsInstance(bisect_doc, modb.BaseDocument)
def test_bisect_base_document_collection(self):
- bisect_doc = BisectDocument("foo")
+ bisect_doc = modbs.BisectDocument("foo")
self.assertEqual(bisect_doc.collection, "bisect")
def test_bisect_boot_document_collection(self):
- bisect_doc = BootBisectDocument("foo")
+ bisect_doc = modbs.BootBisectDocument("foo")
self.assertEqual(bisect_doc.collection, "bisect")
def test_bisect_base_to_dict(self):
- bisect_doc = BisectDocument("foo")
+ bisect_doc = modbs.BisectDocument("foo")
expected = {
"created_on": None,
"job": None,
- "doc_id": "foo",
+ "name": "foo",
"bisect_data": [],
"good_commit": None,
"good_commit_date": None,
@@ -50,18 +50,20 @@ class TestBisectModel(unittest.TestCase):
"bad_commit": None,
"bad_commit_date": None,
"bad_commit_url": None,
+ "version": None,
+ "job_id": None
}
self.assertDictEqual(expected, bisect_doc.to_dict())
def test_bisect_base_to_dict_with_id(self):
- bisect_doc = BisectDocument("foo")
+ bisect_doc = modbs.BisectDocument("foo")
bisect_doc.id = "bar"
expected = {
"_id": "bar",
"created_on": None,
"job": None,
- "doc_id": "foo",
+ "name": "foo",
"bisect_data": [],
"good_commit": None,
"good_commit_date": None,
@@ -69,20 +71,26 @@ class TestBisectModel(unittest.TestCase):
"bad_commit": None,
"bad_commit_date": None,
"bad_commit_url": None,
+ "version": None,
+ "job_id": None
}
self.assertDictEqual(expected, bisect_doc.to_dict())
def test_bisect_boot_to_dict(self):
- bisect_doc = BootBisectDocument("foo")
+ bisect_doc = modbs.BootBisectDocument("foo")
bisect_doc.id = "bar"
bisect_doc.board = "baz"
+ bisect_doc.version = "1.0"
+ bisect_doc.boot_id = "boot-id"
+ bisect_doc.defconfig_id = "defconfig-id"
+ bisect_doc.job_id = "job-id"
expected = {
"_id": "bar",
"board": "baz",
"created_on": None,
"job": None,
- "doc_id": "foo",
+ "name": "foo",
"bisect_data": [],
"good_commit": None,
"good_commit_date": None,
@@ -90,11 +98,15 @@ class TestBisectModel(unittest.TestCase):
"bad_commit": None,
"bad_commit_date": None,
"bad_commit_url": None,
+ "version": "1.0",
+ "boot_id": "boot-id",
+ "defconfig_id": "defconfig-id",
+ "job_id": "job-id"
}
self.assertDictEqual(expected, bisect_doc.to_dict())
def test_bisect_base_properties(self):
- bisect_doc = BootBisectDocument("foo")
+ bisect_doc = modbs.BootBisectDocument("foo")
bisect_doc.id = "bar"
bisect_doc.created_on = "now"
bisect_doc.job = "fooz"
@@ -107,7 +119,7 @@ class TestBisectModel(unittest.TestCase):
bisect_doc.bad_commit_url = "url"
self.assertEqual(bisect_doc.id, "bar")
- self.assertEqual(bisect_doc.doc_id, "foo")
+ self.assertEqual(bisect_doc.name, "foo")
self.assertEqual(bisect_doc.created_on, "now")
self.assertEqual(bisect_doc.job, "fooz")
self.assertEqual(bisect_doc.bisect_data, [1, 2, 3])
@@ -119,7 +131,40 @@ class TestBisectModel(unittest.TestCase):
self.assertEqual(bisect_doc.bad_commit_url, "url")
def test_bisect_boot_properties(self):
- bisect_doc = BootBisectDocument("foo")
+ bisect_doc = modbs.BootBisectDocument("foo")
bisect_doc.board = "bar"
self.assertEqual(bisect_doc.board, "bar")
+
+ def test_bisect_defconfig_to_dict(self):
+ bisect_doc = modbs.DefconfigBisectDocument("foo")
+ bisect_doc.id = "bar"
+ bisect_doc.defconfig_id = "defconfig-id"
+ bisect_doc.defconfig = "defconfig-name"
+ bisect_doc.version = "1.0"
+ bisect_doc.job = "job"
+ bisect_doc.job_id = "job-id"
+ bisect_doc.defconfig_full = "defconfig-full"
+ bisect_doc.arch = "arm"
+
+ expected = {
+ "_id": "bar",
+ "created_on": None,
+ "job": "job",
+ "name": "foo",
+ "bisect_data": [],
+ "good_commit": None,
+ "good_commit_date": None,
+ "good_commit_url": None,
+ "bad_commit": None,
+ "bad_commit_date": None,
+ "bad_commit_url": None,
+ "version": "1.0",
+ "defconfig_id": "defconfig-id",
+ "defconfig": "defconfig-name",
+ "job_id": "job-id",
+ "defconfig_full": "defconfig-full",
+ "arch": "arm"
+ }
+
+ self.assertDictEqual(expected, bisect_doc.to_dict())
diff --git a/app/models/tests/test_boot_model.py b/app/models/tests/test_boot_model.py
index b45e3b8..90c12a2 100644
--- a/app/models/tests/test_boot_model.py
+++ b/app/models/tests/test_boot_model.py
@@ -13,55 +13,83 @@
import unittest
-from models.base import BaseDocument
-from models.boot import BootDocument
+import models.base as modb
+import models.boot as modbt
class TestBootModel(unittest.TestCase):
def test_boot_document_valid_instance(self):
- boot_doc = BootDocument('board', 'job', 'kernel', 'defconfig')
- self.assertIsInstance(boot_doc, BaseDocument)
+ boot_doc = modbt.BootDocument(
+ 'board', 'job', 'kernel', 'defconfig', 'lab'
+ )
+ self.assertIsInstance(boot_doc, modb.BaseDocument)
def test_boot_document_to_dict(self):
- boot_doc = BootDocument('board', 'job', 'kernel', 'defconfig')
+ self.maxDiff = None
+ boot_doc = modbt.BootDocument(
+ 'board', 'job', 'kernel', 'defconfig', 'lab', arch='arm'
+ )
+ boot_doc.id = 'id'
+ boot_doc.job_id = 'job-id'
+ boot_doc.created_on = 'now'
+ boot_doc.defconfig_id = "defconfig_id"
+ boot_doc.retries = 10
+ boot_doc.version = "1.0"
+ boot_doc.dtb_append = False
+ boot_doc.boot_log = "boot-log"
+ boot_doc.boot_log_html = "boot-log-html"
+ boot_doc.warnings = 2
+ boot_doc.git_branch = "git-branch"
+ boot_doc.git_commit = "git-commit"
+ boot_doc.git_describe = "git-describe"
+ boot_doc.git_url = "git-url"
+ boot_doc.fastboot_cmd = "fastboot"
+ boot_doc.defconfig_full = "defconfig"
+ boot_doc.file_server_url = "file-server"
+ boot_doc.file_server_resource = "file-resource"
+ boot_doc.initrd = "initrd"
+ boot_doc.board_instance = "instance"
expected = {
- 'status': None,
- 'time': None,
- 'warnings': None,
- 'kernel': 'kernel',
- 'job_id': 'job-kernel',
- 'created_on': None,
- 'defconfig': 'defconfig',
- 'job': 'job',
- '_id': 'board-job-kernel-defconfig',
+ '_id': 'id',
'board': 'board',
- 'load_addr': None,
+ 'boot_log': "boot-log",
+ 'boot_log_html': "boot-log-html",
+ 'boot_result_description': None,
+ 'created_on': 'now',
+ 'defconfig': 'defconfig',
+ 'defconfig_id': "defconfig_id",
'dtb': None,
'dtb_addr': None,
+ 'dtb_append': False,
+ 'endian': None,
+ 'fastboot': False,
'initrd_addr': None,
+ 'job': 'job',
+ 'job_id': 'job-id',
+ 'kernel': 'kernel',
'kernel_image': None,
- 'boot_log': None,
- 'endian': None,
- 'metadata': None,
- 'boot_log_html': None,
- 'fastboot': None,
+ 'lab_name': 'lab',
+ 'load_addr': None,
+ 'metadata': {},
+ 'name': 'board-job-kernel-defconfig-arm',
+ 'retries': 10,
+ 'status': None,
+ 'time': 0,
+ 'version': "1.0",
+ 'warnings': 2,
+ "git_commit": "git-commit",
+ "git_branch": "git-branch",
+ "git_describe": "git-describe",
+ "git_url": "git-url",
+ "arch": "arm",
+ "fastboot_cmd": "fastboot",
+ "defconfig_full": "defconfig",
+ "file_server_url": "file-server",
+ "file_server_resource": "file-resource",
+ "initrd": "initrd",
+ "board_instance": "instance"
}
- self.assertEqual(expected, boot_doc.to_dict())
-
- def test_boot_document_to_json(self):
- boot_doc = BootDocument('board', 'job', 'kernel', 'defconfig')
-
- expected = (
- '{"status": null, "kernel": "kernel", "boot_log": null, '
- '"job_id": "job-kernel", "fastboot": null, "warnings": null, '
- '"boot_log_html": null, "initrd_addr": null, "dtb_addr": null, '
- '"created_on": null, "defconfig": "defconfig", '
- '"kernel_image": null, "job": "job", "board": "board", '
- '"time": null, "dtb": null, "_id": "board-job-kernel-defconfig", '
- '"load_addr": null, "endian": null, "metadata": null}'
- )
-
- self.assertEqual(expected, boot_doc.to_json())
+ self.assertDictEqual(expected, boot_doc.to_dict())
diff --git a/app/models/tests/test_defconfig_model.py b/app/models/tests/test_defconfig_model.py
new file mode 100644
index 0000000..c2e78d6
--- /dev/null
+++ b/app/models/tests/test_defconfig_model.py
@@ -0,0 +1,143 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+
+import models.base as modb
+import models.defconfig as moddf
+
+
+class TestDefconfModel(unittest.TestCase):
+
+ def test_defconfig_document_valid_instance(self):
+ defconf_doc = moddf.DefconfigDocument('job', 'kernel', 'defconfig')
+ self.assertIsInstance(defconf_doc, modb.BaseDocument)
+ self.assertIsInstance(defconf_doc, moddf.DefconfigDocument)
+
+ def test_defconfig_document_collection(self):
+ defconfig_doc = moddf.DefconfigDocument('job', 'kernel', 'defconfig')
+ self.assertEqual(defconfig_doc.collection, 'defconfig')
+
+ def test_defconfig_document_to_dict(self):
+ self.maxDiff = None
+ defconf_doc = moddf.DefconfigDocument(
+ 'job', 'kernel', 'defconfig', 'defconfig_full'
+ )
+ defconf_doc.id = "defconfig_id"
+ defconf_doc.job_id = "job_id"
+ defconf_doc.created_on = "now"
+ defconf_doc.metadata = {}
+ defconf_doc.status = "FAIL"
+ defconf_doc.dirname = "defconfig"
+ defconf_doc.boot_resul_description = []
+ defconf_doc.errors = 1
+ defconf_doc.warnings = 1
+ defconf_doc.build_time = 1
+ defconf_doc.arch = "foo"
+ defconf_doc.git_url = "git_url"
+ defconf_doc.git_commit = "git_commit"
+ defconf_doc.git_branch = "git_branch"
+ defconf_doc.git_describe = "git_describe"
+ defconf_doc.version = "1.0"
+ defconf_doc.modules = "modules-file"
+ defconf_doc.dtb_dir = "dtb-dir"
+ defconf_doc.kernel_config = "kernel-config"
+ defconf_doc.system_map = "system-map"
+ defconf_doc.text_offset = "offset"
+ defconf_doc.kernel_image = "kernel-image"
+ defconf_doc.modules_dir = "modules-dir"
+ defconf_doc.build_log = "build.log"
+ defconf_doc.kconfig_fragments = "config-frag"
+ defconf_doc.file_server_resource = "file-resource"
+ defconf_doc.file_server_url = "server-url"
+
+ expected = {
+ "name": "job-kernel-defconfig_full",
+ "_id": "defconfig_id",
+ "job": "job",
+ "kernel": "kernel",
+ "defconfig": "defconfig",
+ "job_id": "job_id",
+ "created_on": "now",
+ "metadata": {},
+ "status": "FAIL",
+ "defconfig": "defconfig",
+ "errors": 1,
+ "warnings": 1,
+ "build_time": 1,
+ "arch": "foo",
+ "dirname": "defconfig",
+ "git_url": "git_url",
+ "git_describe": "git_describe",
+ "git_branch": "git_branch",
+ "git_commit": "git_commit",
+ "build_platform": [],
+ "version": "1.0",
+ "dtb_dir": "dtb-dir",
+ "kernel_config": "kernel-config",
+ "kernel_image": "kernel-image",
+ "system_map": "system-map",
+ "text_offset": "offset",
+ "modules": "modules-file",
+ "modules_dir": "modules-dir",
+ "build_log": "build.log",
+ "kconfig_fragments": "config-frag",
+ "defconfig_full": "defconfig_full",
+ "file_server_resource": "file-resource",
+ "file_server_url": "server-url",
+ }
+
+ self.assertDictEqual(expected, defconf_doc.to_dict())
+
+ def test_deconfig_set_status_wrong_and_right(self):
+ defconf_doc = moddf.DefconfigDocument("job", "kernel", "defconfig")
+
+ self.assertRaises(ValueError, setattr, defconf_doc, "status", "foo")
+ self.assertRaises(ValueError, setattr, defconf_doc, "status", [])
+ self.assertRaises(ValueError, setattr, defconf_doc, "status", {})
+ self.assertRaises(ValueError, setattr, defconf_doc, "status", ())
+
+ defconf_doc.status = "FAIL"
+ self.assertEqual(defconf_doc.status, "FAIL")
+ defconf_doc.status = "PASS"
+ self.assertEqual(defconf_doc.status, "PASS")
+ defconf_doc.status = "UNKNOWN"
+ self.assertEqual(defconf_doc.status, "UNKNOWN")
+ defconf_doc.status = "BUILD"
+ self.assertEqual(defconf_doc.status, "BUILD")
+
+ def test_defconfig_set_build_platform_wrong(self):
+ defconf_doc = moddf.DefconfigDocument("job", "kernel", "defconfig")
+
+ self.assertRaises(TypeError, setattr, defconf_doc, "build_platform", ())
+ self.assertRaises(TypeError, setattr, defconf_doc, "build_platform", {})
+ self.assertRaises(TypeError, setattr, defconf_doc, "build_platform", "")
+
+ def test_defconfig_set_build_platform(self):
+ defconf_doc = moddf.DefconfigDocument("job", "kernel", "defconfig")
+ defconf_doc.build_platform = ["a", "b"]
+
+ self.assertListEqual(defconf_doc.build_platform, ["a", "b"])
+
+ def test_defconfig_set_metadata_wrong(self):
+ defconf_doc = moddf.DefconfigDocument("job", "kernel", "defconfig")
+
+ self.assertRaises(TypeError, setattr, defconf_doc, "metadata", ())
+ self.assertRaises(TypeError, setattr, defconf_doc, "metadata", [])
+ self.assertRaises(TypeError, setattr, defconf_doc, "metadata", "")
+
+ def test_defconfig_from_json_is_none(self):
+ self.assertIsNone(moddf.DefconfigDocument.from_json({}))
+ self.assertIsNone(moddf.DefconfigDocument.from_json(""))
+ self.assertIsNone(moddf.DefconfigDocument.from_json([]))
+ self.assertIsNone(moddf.DefconfigDocument.from_json(()))
diff --git a/app/models/tests/test_job_model.py b/app/models/tests/test_job_model.py
new file mode 100644
index 0000000..7b1edb8
--- /dev/null
+++ b/app/models/tests/test_job_model.py
@@ -0,0 +1,144 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import datetime
+import unittest
+
+from bson import (
+ json_util,
+ tz_util,
+)
+
+import models.base as modb
+import models.job as modj
+
+
+class TestJobModel(unittest.TestCase):
+
+ def test_job_documet_valid_instance(self):
+ job_doc = modj.JobDocument("job", "kernel")
+ self.assertIsInstance(job_doc, modb.BaseDocument)
+ self.assertIsInstance(job_doc, modj.JobDocument)
+
+ def test_job_wrong_status(self):
+ job_doc = modj.JobDocument("job", "kernel")
+
+ self.assertRaises(ValueError, setattr, job_doc, "status", "foo")
+ self.assertRaises(ValueError, setattr, job_doc, "status", [])
+ self.assertRaises(ValueError, setattr, job_doc, "status", ())
+ self.assertRaises(ValueError, setattr, job_doc, "status", {})
+
+ def test_job_correct_status(self):
+ job_doc = modj.JobDocument("job", "kernel")
+
+ job_doc.status = "FAIL"
+ self.assertEqual(job_doc.status, "FAIL")
+ job_doc.status = "BUILD"
+ self.assertEqual(job_doc.status, "BUILD")
+ job_doc.status = "PASS"
+ self.assertEqual(job_doc.status, "PASS")
+
+ def test_job_document_to_dict(self):
+ job_doc = modj.JobDocument("job", "kernel")
+ job_doc.id = "job"
+ job_doc.created_on = "now"
+ job_doc.status = "PASS"
+ job_doc.version = "1.0"
+ job_doc.git_commit = "1234"
+ job_doc.git_url = "git-url"
+ job_doc.git_branch = "git-branch"
+ job_doc.git_describe = "git-describe"
+
+ expected = {
+ "_id": "job",
+ "name": "job-kernel",
+ "kernel": "kernel",
+ "job": "job",
+ "private": False,
+ "created_on": "now",
+ "status": "PASS",
+ "version": "1.0",
+ "git_commit": "1234",
+ "git_url": "git-url",
+ "git_branch": "git-branch",
+ "git_describe": "git-describe"
+ }
+
+ self.assertEqual(job_doc.to_dict(), expected)
+
+ def test_job_document_collection(self):
+ job_doc = modj.JobDocument("job", "kernel")
+ self.assertEqual(job_doc.collection, "job")
+
+ def test_job_document_from_json(self):
+ now = datetime.datetime.now(tz=tz_util.utc)
+
+ json_obj = dict(
+ _id="job",
+ job="job",
+ kernel="kernel",
+ created_on=now,
+ status="BUILD",
+ name="job-kernel"
+ )
+
+ job_doc = modj.JobDocument.from_json(json_obj)
+
+ self.assertIsInstance(job_doc, modj.JobDocument)
+ self.assertIsInstance(job_doc, modb.BaseDocument)
+ self.assertEqual(job_doc.name, 'job-kernel')
+ self.assertEqual(job_doc.kernel, 'kernel')
+ self.assertEqual(job_doc.job, 'job')
+ self.assertEqual(job_doc.created_on, now)
+ self.assertEqual(job_doc.status, 'BUILD')
+
+ def test_job_document_private(self):
+ # By default, jobs are public.
+ job_doc = modj.JobDocument("job", "kernel")
+
+ self.assertFalse(job_doc.private)
+
+ job_doc.private = True
+
+ self.assertTrue(job_doc.private)
+
+ def test_job_document_date_serialization(self):
+ now = datetime.datetime.now(tz=tz_util.utc)
+
+ job_doc = modj.JobDocument("job", "kernel")
+ job_doc.created_on = now
+
+ self.assertIsInstance(job_doc.created_on, datetime.datetime)
+
+ json_obj = {
+ "_id": "job",
+ "job": "job",
+ "kernel": "kernel",
+ "name": "job-kernel",
+ "created_on": now,
+ }
+
+ json_deserialized = json_util.loads(json_util.dumps(json_obj))
+ new_job = modj.JobDocument.from_json(json_deserialized)
+
+ self.assertIsInstance(new_job.created_on, datetime.datetime)
+ # During the deserialization process, some microseconds are lost.
+ self.assertLessEqual(
+ (new_job.created_on - job_doc.created_on).total_seconds(), 0)
+
+ def test_job_document_from_wrong_json(self):
+ self.assertIsNone(modj.JobDocument.from_json(None))
+ self.assertIsNone(modj.JobDocument.from_json({}))
+ self.assertIsNone(modj.JobDocument.from_json([]))
+ self.assertIsNone(modj.JobDocument.from_json(""))
+ self.assertIsNone(modj.JobDocument.from_json(()))
diff --git a/app/models/tests/test_lab_model.py b/app/models/tests/test_lab_model.py
new file mode 100644
index 0000000..9d8ea73
--- /dev/null
+++ b/app/models/tests/test_lab_model.py
@@ -0,0 +1,191 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+
+import models.base as modb
+import models.lab as modl
+
+
+class TestLabModel(unittest.TestCase):
+
+ def test_is_valid_base_class(self):
+ self.assertIsInstance(modl.LabDocument(""), modb.BaseDocument)
+
+ def test_model_collection(self):
+ lab_doc = modl.LabDocument("")
+ self.assertEqual(lab_doc.collection, "lab")
+
+ def test_set_address_wrong_type(self):
+ lab_doc = modl.LabDocument("foo")
+
+ self.assertRaises(TypeError, setattr, lab_doc, "address", "")
+ self.assertRaises(TypeError, setattr, lab_doc, "address", [])
+ self.assertRaises(TypeError, setattr, lab_doc, "address", ())
+
+ def test_set_contact_missing_valid_fields(self):
+ lab_doc = modl.LabDocument("foo")
+
+ self.assertRaises(ValueError, setattr, lab_doc, "contact", {})
+
+ def test_set_contact_wrong_type(self):
+ lab_doc = modl.LabDocument("foo")
+
+ self.assertRaises(TypeError, setattr, lab_doc, "contact", "")
+ self.assertRaises(TypeError, setattr, lab_doc, "contact", [])
+ self.assertRaises(TypeError, setattr, lab_doc, "contact", ())
+
+ def test_set_contact_missing_email(self):
+ lab_doc = modl.LabDocument("foo")
+
+ contact = {
+ "name": "foo",
+ "surname": "bar"
+ }
+
+ self.assertRaises(ValueError, setattr, lab_doc, "contact", contact)
+
+ def test_set_contact_missing_name(self):
+ lab_doc = modl.LabDocument("foo")
+
+ contact = {
+ "surname": "foo",
+ "email": "bar"
+ }
+
+ self.assertRaises(ValueError, setattr, lab_doc, "contact", contact)
+
+ def test_set_contact_missing_surname(self):
+ lab_doc = modl.LabDocument("foo")
+
+ contact = {
+ "name": "foo",
+ "email": "bar"
+ }
+
+ self.assertRaises(ValueError, setattr, lab_doc, "contact", contact)
+
+ def test_set_contact_only_email(self):
+ lab_doc = modl.LabDocument("foo")
+
+ contact = {
+ "email": "bar"
+ }
+
+ self.assertRaises(ValueError, setattr, lab_doc, "contact", contact)
+
+ def test_set_contact_only_name(self):
+ lab_doc = modl.LabDocument("foo")
+
+ contact = {
+ "name": "bar"
+ }
+
+ self.assertRaises(ValueError, setattr, lab_doc, "contact", contact)
+
+ def test_set_contact_only_surname(self):
+ lab_doc = modl.LabDocument("foo")
+
+ contact = {
+ "surname": "bar"
+ }
+
+ self.assertRaises(ValueError, setattr, lab_doc, "contact", contact)
+
+ def test_lab_to_dict(self):
+ lab_doc = modl.LabDocument("foo")
+ lab_doc.created_on = "now"
+ lab_doc.updated_on = "now"
+ lab_doc.id = "bar"
+ lab_doc.address = {
+ "street_1": "a",
+ "street_2": "b",
+ "city": "c",
+ "country": "d",
+ "zipcode": "e",
+ "longitude": "f",
+ "latitude": "h"
+ }
+ lab_doc.private = True
+ lab_doc.token = "token"
+ lab_doc.contact = {
+ "name": "foo",
+ "surname": "bar",
+ "telephone": "1234",
+ "mobile": "1234",
+ "email": "user@example.net"
+ }
+
+ expected = {
+ "name": "foo",
+ "created_on": "now",
+ "updated_on": "now",
+ "_id": "bar",
+ "address": {
+ "street_1": "a",
+ "street_2": "b",
+ "city": "c",
+ "country": "d",
+ "zipcode": "e",
+ "longitude": "f",
+ "latitude": "h"
+ },
+ "private": True,
+ "token": "token",
+ "contact": {
+ "name": "foo",
+ "surname": "bar",
+ "telephone": "1234",
+ "mobile": "1234",
+ "email": "user@example.net"
+ },
+ "version": None,
+ }
+
+ self.assertDictEqual(expected, lab_doc.to_dict())
+
+ def test_lab_from_json(self):
+ json_obj = {
+ "name": "foo",
+ "created_on": "now",
+ "updated_on": "now",
+ "address": {
+ "street_1": "a",
+ "street_2": "b",
+ "city": "c",
+ "country": "d",
+ "zipcode": "e",
+ "longitude": "f",
+ "latitude": "h"
+ },
+ "private": True,
+ "token": "token",
+ "contact": {
+ "name": "foo",
+ "surname": "bar",
+ "telephone": "1234",
+ "mobile": "1234",
+ "email": "user@example.net"
+ },
+ "version": "1.0",
+ }
+
+ lab_doc = modl.LabDocument.from_json(json_obj)
+
+ self.assertIsInstance(lab_doc, modb.BaseDocument)
+ self.assertIsInstance(lab_doc, modl.LabDocument)
+ self.assertDictEqual(lab_doc.to_dict(), json_obj)
+
+ def test_lab_from_json_none(self):
+ self.assertIsNone(modl.LabDocument.from_json(None))
+ self.assertIsNone(modl.LabDocument.from_json({}))
diff --git a/app/models/tests/test_models.py b/app/models/tests/test_models.py
deleted file mode 100644
index 698151c..0000000
--- a/app/models/tests/test_models.py
+++ /dev/null
@@ -1,258 +0,0 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import types
-import unittest
-
-from bson import (
- json_util,
- tz_util,
-)
-
-from datetime import datetime
-
-from models.base import BaseDocument
-from models.defconfig import DefConfigDocument
-from models.job import JobDocument
-from models.subscription import SubscriptionDocument
-
-
-class TestJobModel(unittest.TestCase):
-
- def test_job_documet_valid_instance(self):
- job_doc = JobDocument('job')
- self.assertIsInstance(job_doc, BaseDocument)
-
- def test_job_document_to_dict(self):
- expected = {
- 'kernel': None,
- 'metadata': {},
- 'job': None,
- '_id': 'job',
- 'private': False,
- 'created_on': None,
- 'status': None,
- 'updated': None,
- }
- job_doc = JobDocument('job')
- self.assertEqual(job_doc.to_dict(), expected)
-
- def test_job_document_collection(self):
- job_doc = JobDocument('job')
- self.assertEqual(job_doc.collection, 'job')
-
- def test_job_document_to_json(self):
- expected_json = (
- '{"status": null, "kernel": null, "updated": null, "job": null, '
- '"private": false, "created_on": null, "_id": "job", '
- '"metadata": {}}'
- )
-
- job_doc = JobDocument('job')
- self.assertEqual(job_doc.to_json(), expected_json)
-
- def test_job_document_from_json(self):
- now = datetime.now(tz=tz_util.utc)
-
- json_obj = dict(
- _id='job-kernel',
- job='job',
- kernel='kernel',
- created_on=now,
- status='BUILDING',
- )
-
- job_doc = JobDocument.from_json(json_obj)
-
- self.assertIsInstance(job_doc, JobDocument)
- self.assertIsInstance(job_doc, BaseDocument)
- self.assertEqual(job_doc.name, 'job-kernel')
- self.assertEqual(job_doc.kernel, 'kernel')
- self.assertEqual(job_doc.job, 'job')
- self.assertEqual(job_doc.created_on, now)
- self.assertEqual(job_doc.status, 'BUILDING')
-
- def test_job_document_from_json_string(self):
- json_obj = dict(
- _id='job-kernel',
- job='job',
- kernel='kernel',
- )
-
- json_string = json_util.dumps(json_obj)
- job_doc = JobDocument.from_json(json_string)
-
- self.assertIsInstance(job_doc, JobDocument)
-
- def test_job_document_private(self):
- # By default, jobs are public.
- job_doc = JobDocument('job')
-
- self.assertFalse(job_doc.private)
-
- job_doc.private = True
-
- self.assertTrue(job_doc.private)
-
- def test_job_document_date_serialization(self):
- now = datetime.now(tz=tz_util.utc)
-
- job_doc = JobDocument('job')
- job_doc.created_on = now
- job_doc.updated = now
-
- self.assertIsInstance(job_doc.created_on, datetime)
- self.assertIsInstance(job_doc.updated, datetime)
-
- new_job = JobDocument.from_json(json_util.loads(job_doc.to_json()))
-
- self.assertIsInstance(new_job.created_on, datetime)
- self.assertIsInstance(new_job.updated, datetime)
- # During the deserialization process, some microseconds are lost.
- self.assertLessEqual(
- (new_job.created_on - job_doc.created_on).total_seconds(), 0)
- self.assertLessEqual(
- (new_job.updated - job_doc.updated).total_seconds(), 0)
-
-
-class TestDefconfModel(unittest.TestCase):
-
- def test_defconfig_document_valid_instance(self):
- defconf_doc = DefConfigDocument('defconf', 'job')
- self.assertIsInstance(defconf_doc, BaseDocument)
-
- def test_defconfig_document_to_dict(self):
- expected = {
- 'job_id': 'job',
- 'kernel': 'kernel',
- 'created_on': None,
- 'metadata': {},
- 'job': 'job',
- '_id': 'job-defconfig',
- 'status': None,
- 'defconfig': None,
- 'errors': None,
- 'warnings': None,
- 'arch': None,
- 'dirname': None,
- }
-
- defconfig_doc = DefConfigDocument('defconfig', 'job', 'job', 'kernel')
- self.assertEqual(defconfig_doc.to_dict(), expected)
-
- def test_defconfig_document_collection(self):
- defconfig_doc = DefConfigDocument('defconfig', 'job')
- self.assertEqual(defconfig_doc.collection, 'defconfig')
-
- def test_defconfig_document_to_json(self):
- expected_json = (
- '{"status": null, "kernel": null, "errors": null, '
- '"dirname": null, "job_id": "job", "warnings": null, '
- '"created_on": null, "defconfig": null, "job": null, '
- '"_id": "job-defconfig", "arch": null, "metadata": {}}'
- )
-
- defconfig_doc = DefConfigDocument('defconfig', 'job')
- self.assertEqual(expected_json, defconfig_doc.to_json())
-
-
-class TestSubscriptionModel(unittest.TestCase):
-
- def test_subscription_document_emails_attribute(self):
- sub_doc = SubscriptionDocument('sub', 'job')
- self.assertIsInstance(sub_doc.emails, types.ListType)
- self.assertItemsEqual([], sub_doc.emails)
-
- def test_subscription_document_emails_attribute_set(self):
- sub_doc = SubscriptionDocument('sub', 'job')
-
- self.assertIsInstance(sub_doc.emails, types.ListType)
- self.assertNotIsInstance(sub_doc.emails, types.StringTypes)
-
- def test_subscription_document_emails_extended(self):
- sub_doc = SubscriptionDocument('sub', 'job')
- sub_doc.emails = 'email2'
-
- self.assertIsInstance(sub_doc.emails, types.ListType)
- self.assertEquals(['email2'], sub_doc.emails)
-
- def test_subscription_document_emails_setter_str(self):
- sub_doc = SubscriptionDocument('sub', 'job')
- sub_doc.emails = 'an_email'
-
- self.assertIsInstance(sub_doc.emails, types.ListType)
- self.assertEqual(['an_email'], sub_doc.emails)
-
- def test_subscription_document_emails_setter_tuple(self):
- sub_doc = SubscriptionDocument('sub', 'job')
- sub_doc.emails = ('an_email', 'another_email')
-
- self.assertIsInstance(sub_doc.emails, types.ListType)
- self.assertEqual(['an_email', 'another_email'], sub_doc.emails)
-
- def test_subscription_document_to_dict(self):
- expected = dict(_id='sub', emails=[], job_id='job', created_on=None)
- sub_doc = SubscriptionDocument('sub', 'job')
-
- self.assertEqual(expected, sub_doc.to_dict())
-
- def test_subscription_document_to_json(self):
- expected = (
- '{"created_on": null, "_id": "sub", "emails": [], "job_id": "job"}'
- )
- sub_doc = SubscriptionDocument('sub', 'job')
- self.assertEqual(expected, sub_doc.to_json())
-
- def test_subscription_document_from_json(self):
- json_str = (
- '{"_id": "sub", "emails": [], "job_id": "job"}'
- )
- json_obj = json_util.loads(json_str)
-
- sub_doc = SubscriptionDocument.from_json(json_obj)
-
- self.assertIsInstance(sub_doc, SubscriptionDocument)
- self.assertIsInstance(sub_doc, BaseDocument)
-
- self.assertEqual(sub_doc.name, 'sub')
- self.assertEqual(sub_doc.job_id, 'job')
- self.assertIsInstance(sub_doc.emails, types.ListType)
-
- def test_subscription_document_from_json_with_emails(self):
- json_obj = dict(
- _id='sub',
- job_id='job',
- emails=['a@example.org', 'b@example.org'],
- created_on=None,
- )
-
- sub_doc = SubscriptionDocument.from_json(json_obj)
-
- self.assertIsInstance(sub_doc.emails, types.ListType)
- self.assertEqual(len(sub_doc.emails), 2)
-
- def test_subscription_doc_from_json_string(self):
- json_obj = dict(
- _id='sub',
- job_id='job',
- emails=['a@example.org', 'b@example.org'],
- created_on=None,
- )
-
- json_string = json_util.dumps(json_obj)
- sub_doc = SubscriptionDocument.from_json(json_string)
-
- self.assertIsInstance(sub_doc, SubscriptionDocument)
- self.assertIsInstance(sub_doc.emails, types.ListType)
diff --git a/app/models/tests/test_subscription_model.py b/app/models/tests/test_subscription_model.py
new file mode 100644
index 0000000..2b5a6b9
--- /dev/null
+++ b/app/models/tests/test_subscription_model.py
@@ -0,0 +1,107 @@
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import types
+import unittest
+
+from bson import json_util
+
+import models.base as modb
+import models.subscription as mods
+
+
+class TestSubscriptionModel(unittest.TestCase):
+
+ def test_subscription_document_emails_attribute(self):
+ sub_doc = mods.SubscriptionDocument("job", "kernel")
+ self.assertIsInstance(sub_doc.emails, types.ListType)
+ self.assertItemsEqual([], sub_doc.emails)
+
+ def test_subscription_document_emails_attribute_set(self):
+ sub_doc = mods.SubscriptionDocument("job", "kernel")
+
+ self.assertIsInstance(sub_doc.emails, types.ListType)
+ self.assertNotIsInstance(sub_doc.emails, types.StringTypes)
+
+ def test_subscription_document_emails_extended(self):
+ sub_doc = mods.SubscriptionDocument("job", "kernel")
+ sub_doc.emails = "email2"
+
+ self.assertIsInstance(sub_doc.emails, types.ListType)
+ self.assertEquals(["email2"], sub_doc.emails)
+
+ def test_subscription_document_emails_setter_str(self):
+ sub_doc = mods.SubscriptionDocument("job", "kernel")
+ sub_doc.emails = "an_email"
+
+ self.assertIsInstance(sub_doc.emails, types.ListType)
+ self.assertEqual(["an_email"], sub_doc.emails)
+
+ def test_subscription_document_emails_setter_tuple(self):
+ sub_doc = mods.SubscriptionDocument("sub", "job")
+ sub_doc.emails = ("an_email", "another_email")
+
+ self.assertIsInstance(sub_doc.emails, types.ListType)
+ self.assertEqual(["an_email", "another_email"], sub_doc.emails)
+
+ def test_subscription_document_to_dict(self):
+ expected = {
+ "job": "job",
+ "kernel": "kernel",
+ "name": "sub-job-kernel",
+ "emails": [],
+ "job_id": None,
+ "created_on": None,
+ "version": None,
+ }
+ sub_doc = mods.SubscriptionDocument("job", "kernel")
+
+ self.assertEqual(expected, sub_doc.to_dict())
+
+ def test_subscription_document_from_json(self):
+ json_obj = {
+ "_id": "id",
+ "name": "sub-job-kernel",
+ "job": "job",
+ "kernel": "kernel",
+ "emails": [],
+ "job_id": "job-id",
+ "cerated_on": "now"
+ }
+
+ sub_doc = mods.SubscriptionDocument.from_json(json_obj)
+
+ self.assertIsInstance(sub_doc, mods.SubscriptionDocument)
+ self.assertIsInstance(sub_doc, modb.BaseDocument)
+
+ self.assertEqual(sub_doc.name, 'sub-job-kernel')
+ self.assertEqual(sub_doc.job_id, 'job-id')
+ self.assertIsInstance(sub_doc.emails, types.ListType)
+
+ def test_subscription_document_from_json_with_emails(self):
+ json_obj = {
+ "_id": "id",
+ "job_id": "job-id",
+ "name": "sub-job-kernel",
+ "job": "job",
+ "kernel": "kernel",
+ "created_on": None,
+ "emails": [
+ "a@example.org", "b@example.org"
+ ]
+ }
+
+ sub_doc = mods.SubscriptionDocument.from_json(json_obj)
+
+ self.assertIsInstance(sub_doc.emails, types.ListType)
+ self.assertEqual(len(sub_doc.emails), 2)
diff --git a/app/models/tests/test_token_model.py b/app/models/tests/test_token_model.py
index daa6005..7d5e4d9 100644
--- a/app/models/tests/test_token_model.py
+++ b/app/models/tests/test_token_model.py
@@ -13,60 +13,76 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-import json
+import datetime
+import netaddr
+import random
+import types
import unittest
+import uuid
-from bson import (
- json_util,
- tz_util,
-)
-from datetime import datetime
-from netaddr import (
- IPAddress,
- IPNetwork,
-)
-from uuid import uuid4
-from types import DictionaryType
+from bson import tz_util
-from models.base import BaseDocument
-from models.token import Token
+import models.base as modb
+import models.token as modt
class TestTokenModel(unittest.TestCase):
def test_token_model_is_base_document(self):
- token_obj = Token()
- self.assertIsInstance(token_obj, BaseDocument)
+ token_obj = modt.Token()
+ self.assertIsInstance(token_obj, modb.BaseDocument)
def test_properties_len(self):
- token_obj = Token()
+ token_obj = modt.Token()
self.assertEqual(len(token_obj.properties), 16)
def test_not_expired(self):
- token_obj = Token()
+ token_obj = modt.Token()
self.assertFalse(token_obj.expired)
def test_token_not_none(self):
- token_obj = Token()
+ token_obj = modt.Token()
self.assertIsNotNone(token_obj.token)
def test_unique_token(self):
- token_obj1 = Token()
- token_obj2 = Token()
+ token_obj1 = modt.Token()
+ token_obj2 = modt.Token()
self.assertNotEqual(token_obj1.token, token_obj2.token)
def test_token_creation_date(self):
- token_obj = Token()
- self.assertIsInstance(token_obj.created_on, datetime)
+ token_obj = modt.Token()
+ self.assertIsInstance(token_obj.created_on, datetime.datetime)
def test_token_to_dict_is_dict(self):
- token_obj = Token()
+ token_obj = modt.Token()
+ token_obj.id = "token-id"
+ token_obj.email = "foo@example.org"
+ token_obj.created_on = "now"
+ token_obj.token = "token"
+ token_obj.username = "user"
+
+ expected = {
+ "_id": "token-id",
+ "created_on": "now",
+ "email": "foo@example.org",
+ "expired": False,
+ "expires_on": None,
+ "ip_address": None,
+ "name": "foo@example.org",
+ "properties": [0 for _ in range(0, 16)],
+ "token": "token",
+ "username": "user",
+ "version": None,
+ }
- self.assertIsInstance(token_obj.to_dict(), DictionaryType)
+ obtained = token_obj.to_dict()
+
+ self.assertIsInstance(obtained, types.DictionaryType)
+ self.assertDictEqual(expected, obtained)
def test_token_is_admin(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_admin = 1
self.assertEqual(token_obj.is_admin, 1)
@@ -76,8 +92,20 @@ class TestTokenModel(unittest.TestCase):
self.assertEqual(token_obj.is_delete_token, 1)
self.assertEqual(token_obj.is_post_token, 1)
+ def test_token_is_admin_no_create(self):
+ token_obj = modt.Token()
+ token_obj.is_admin = 1
+ token_obj.can_create_token = 0
+
+ self.assertEqual(token_obj.is_admin, 1)
+ self.assertEqual(token_obj.can_create_token, 0)
+ self.assertEqual(token_obj.is_superuser, 1)
+ self.assertEqual(token_obj.is_get_token, 1)
+ self.assertEqual(token_obj.is_delete_token, 1)
+ self.assertEqual(token_obj.is_post_token, 1)
+
def test_token_is_superuser(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_superuser = 1
self.assertEqual(token_obj.is_admin, 0)
@@ -87,106 +115,89 @@ class TestTokenModel(unittest.TestCase):
self.assertEqual(token_obj.is_delete_token, 1)
self.assertEqual(token_obj.is_post_token, 1)
- def test_token_wrong_numeric_value(self):
- token_obj = Token()
+ def test_token_is_lab_token(self):
+ token_obj = modt.Token()
+ token_obj.is_lab_token = 1
- def _call_setter(value):
- token_obj.is_admin = value
+ self.assertEqual(token_obj.is_lab_token, 1)
- self.assertRaises(ValueError, _call_setter, 2)
+ def test_token_wrong_numeric_value(self):
+ token_obj = modt.Token()
+ self.assertRaises(ValueError, setattr, token_obj, "is_admin", 2)
def test_token_wrong_type(self):
- token_obj = Token()
+ token_obj = modt.Token()
+ self.assertRaises(TypeError, setattr, token_obj, "is_admin", "1")
- def _call_setter(value):
- token_obj.is_admin = value
+ def test_token_negative_number(self):
+ token_obj = modt.Token()
+ self.assertRaises(ValueError, setattr, token_obj, "is_admin", -22)
- self.assertRaises(TypeError, _call_setter, "1")
+ def test_token_properties_setter_wrong_type(self):
+ token_obj = modt.Token()
- def test_token_negative_number(self):
- token_obj = Token()
+ self.assertRaises(TypeError, setattr, token_obj, "properties", "")
+ self.assertRaises(TypeError, setattr, token_obj, "properties", ())
+ self.assertRaises(TypeError, setattr, token_obj, "properties", {})
- def _call_setter(value):
- token_obj.is_admin = value
+ def test_token_properties_setter_wrong_len(self):
+ token_obj = modt.Token()
+ expected = [0 for _ in range(0, 16)]
- self.assertRaises(ValueError, _call_setter, -22)
+ # Make sure the list we have is all 0 and 16 in length.
+ self.assertListEqual(token_obj.properties, expected)
+ self.assertRaises(
+ ValueError, setattr, token_obj,
+ "properties", [0 for _ in range(0, random.randint(0, 15))]
+ )
+ self.assertRaises(
+ ValueError, setattr, token_obj,
+ "properties", [0 for _ in range(0, random.randint(17, 1024))]
+ )
+ self.assertRaises(ValueError, setattr, token_obj, "properties", [])
def test_token_valid_negative_number(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_superuser = -1
self.assertTrue(token_obj.is_superuser)
def test_token_with_boolean(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_admin = True
self.assertTrue(token_obj.is_admin)
self.assertEqual(token_obj.is_admin, 1)
def test_token_wrong_expiry(self):
- token_obj = Token()
-
- def _call_setter(value):
- token_obj.expires_on = value
-
- self.assertRaises(ValueError, _call_setter, "2014-06")
+ token_obj = modt.Token()
+ self.assertRaises(
+ ValueError, setattr, token_obj, "expires_on", "2014-06"
+ )
def test_token_expiry_correct_single_digit(self):
expire_str = "2014-7-1"
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.expires_on = expire_str
- expected = datetime(2014, 7, 1, 0, 0)
+ expected = datetime.datetime(2014, 7, 1, 0, 0)
self.assertEqual(expected, token_obj.expires_on)
def test_token_expiry_correct_double_digits(self):
expire_str = "2014-07-01"
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.expires_on = expire_str
- expected = datetime(2014, 7, 1, 0, 0)
+ expected = datetime.datetime(2014, 7, 1, 0, 0)
self.assertEqual(expected, token_obj.expires_on)
- def test_token_to_json(self):
- token_obj = Token()
-
- token_obj._created_on = '1'
- token_obj._token = '1'
-
- expected = (
- '{"username": null, "expired": false, "token": "1", '
- '"properties": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], '
- '"created_on": "1", "ip_address": null, "email": null, '
- '"expires_on": null}'
- )
-
- self.assertEqual(expected, token_obj.to_json())
-
- def test_token_to_json_with_ip(self):
- token_obj = Token()
-
- token_obj._created_on = '1'
- token_obj._token = '1'
- token_obj.ip_address = '127.0.0.1'
- token_obj.is_ip_restricted = True
-
- expected = (
- '{"username": null, "expired": false, "token": "1", '
- '"properties": [0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], '
- '"created_on": "1", "ip_address": ["::127.0.0.1"], '
- '"email": null, "expires_on": null}'
- )
-
- self.assertEqual(expected, token_obj.to_json())
-
def test_token_from_json(self):
- token_string = str(uuid4())
- now = datetime.now(tz=tz_util.utc)
+ token_string = str(uuid.uuid4())
+ now = datetime.datetime.now(tz=tz_util.utc)
token_dict = {
"username": "foo",
@@ -196,59 +207,63 @@ class TestTokenModel(unittest.TestCase):
"expired": True,
"email": "bar@foo",
"expires_on": None,
- "properties": [1 for _ in range(0, 16)]
+ "properties": [1 for _ in range(0, 16)],
+ "name": "bar@foo",
+ "_id": "token-id",
}
- token = Token.from_json(
- json.dumps(token_dict, default=json_util.default)
- )
+ token = modt.Token.from_json(token_dict)
- self.assertIsInstance(token, Token)
+ self.assertIsInstance(token, modt.Token)
self.assertEqual(token.properties, [1 for _ in range(0, 16)])
self.assertEqual(token.token, token_string)
self.assertEqual(token.email, "bar@foo")
+ self.assertEqual(token.name, "bar@foo")
+ self.assertEqual(token.id, "token-id")
self.assertTrue(token.expired)
def test_ip_address_check_type_error(self):
- self.assertRaises(TypeError, Token.check_ip_address, 'foo')
+ self.assertRaises(TypeError, modt.check_ip_address, 'foo')
def test_ip_address_check_value_error(self):
addrlist = ['foo']
- self.assertRaises(ValueError, Token.check_ip_address, addrlist)
+ self.assertRaises(ValueError, modt.check_ip_address, addrlist)
def test_ip_address_check_with_ip_address(self):
addrlist = ['127.0.0.1']
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.ip_address = addrlist
- expected = [IPAddress('127.0.0.1').ipv6(ipv4_compatible=True)]
+ expected = [netaddr.IPAddress('127.0.0.1').ipv6(ipv4_compatible=True)]
self.assertEqual(expected, token_obj.ip_address)
def test_ip_address_check_with_ip_network(self):
addrlist = ['192.0.4.0/25']
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.ip_address = addrlist
- expected = [IPNetwork('192.0.4.0/25').ipv6(ipv4_compatible=True)]
+ expected = [
+ netaddr.IPNetwork('192.0.4.0/25').ipv6(ipv4_compatible=True)
+ ]
self.assertEqual(expected, token_obj.ip_address)
def test_ip_address_check_with_ip_network_and_address(self):
addrlist = ['192.0.4.0/25', '127.0.0.1']
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.ip_address = addrlist
expected = [
- IPNetwork('192.0.4.0/25').ipv6(ipv4_compatible=True),
- IPAddress('127.0.0.1').ipv6(ipv4_compatible=True)
+ netaddr.IPNetwork('192.0.4.0/25').ipv6(ipv4_compatible=True),
+ netaddr.IPAddress('127.0.0.1').ipv6(ipv4_compatible=True)
]
self.assertEqual(expected, token_obj.ip_address)
def test_valid_ip_no_restricted(self):
- token_obj = Token()
+ token_obj = modt.Token()
self.assertTrue(token_obj.is_valid_ip("foo"))
def test_valid_ip_single_ip(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_ip_restricted = True
token_obj.ip_address = '127.0.0.1'
@@ -256,14 +271,14 @@ class TestTokenModel(unittest.TestCase):
self.assertFalse(token_obj.is_valid_ip('127.0.0.3'))
def test_valid_ip_single_ip_wrong_address(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_ip_restricted = True
token_obj.ip_address = '127.0.0.1'
self.assertFalse(token_obj.is_valid_ip("a.b.c"))
def test_valid_ip_network(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_ip_restricted = True
token_obj.ip_address = '192.0.4.0/25'
@@ -271,7 +286,7 @@ class TestTokenModel(unittest.TestCase):
self.assertFalse(token_obj.is_valid_ip('192.0.5.1'))
def test_valid_ip_mix_valid(self):
- token_obj = Token()
+ token_obj = modt.Token()
token_obj.is_ip_restricted = True
token_obj.ip_address = ['10.2.3.4', '192.0.4.0/25']
diff --git a/app/models/token.py b/app/models/token.py
index e2cb072..017c4cc 100644
--- a/app/models/token.py
+++ b/app/models/token.py
@@ -15,43 +15,23 @@
"""The API token model to store token in the DB."""
-import json
-
-from bson import (
- json_util,
- tz_util,
-)
-from datetime import datetime
-from netaddr import (
- IPAddress,
- IPNetwork,
- IPSet,
-)
-from netaddr.core import AddrFormatError
-from types import (
- BooleanType,
- IntType,
- ListType,
- StringTypes,
-)
-from uuid import uuid4
-
-from models import (
- CREATED_KEY,
- EMAIL_KEY,
- EXPIRED_KEY,
- EXPIRES_KEY,
- ID_KEY,
- IP_ADDRESS_KEY,
- PROPERTIES_KEY,
- TOKEN_COLLECTION,
- TOKEN_KEY,
- USERNAME_KEY,
-)
-from models.base import BaseDocument
-
-
-class Token(BaseDocument):
+import bson
+import datetime
+import netaddr
+import netaddr.core
+import types
+import uuid
+
+import models
+import models.base as modb
+
+PROPERTIES_SIZE = 16
+
+
+# pylint: disable=too-many-instance-attributes
+# pylint: disable=invalid-name
+# pylint: disable=too-many-public-methods
+class Token(modb.BaseDocument):
"""This is an API token as stored in the DB.
A token can be:
@@ -75,104 +55,149 @@ class Token(BaseDocument):
- 4: if the token can perform DELETE
- 5: if the token is IP restricted
- 6: if the token can create new tokens
+ - 7: if the token is a boot lab token
"""
def __init__(self):
- self._id = None
- self._token = None
self._created_on = None
+ self._id = None
+ self._name = None
+ self._version = None
+
self._expires_on = None
- self._expired = False
- self._username = None
- self._email = None
self._ip_address = None
- self._properties = [0 for _ in range(0, 16)]
+ self._properties = [0 for _ in range(0, PROPERTIES_SIZE)]
+ self._token = None
+ self.email = None
+ self.expired = False
+ self.username = None
@property
def collection(self):
- return TOKEN_COLLECTION
+ return models.TOKEN_COLLECTION
+
+ @property
+ def name(self):
+ """The name of the object."""
+ if not self._name:
+ self._name = self.email
+ return self._name
+
+ @name.setter
+ def name(self, value):
+ """Set the name of the object."""
+ self._name = value
+
+ @property
+ def id(self):
+ """The ID of this object as returned by mongodb."""
+ return self._id
+
+ @id.setter
+ def id(self, value):
+ """Set the ID of this object with the ObjectID from mongodb.
+
+ :param value: The ID of this object.
+ :type value: str
+ """
+ self._id = value
@property
def token(self):
+ """The real token value. A UUID4 string."""
if self._token is None:
- self._token = str(uuid4())
+ self._token = str(uuid.uuid4())
return self._token
@token.setter
def token(self, value):
+ """Set the value of the token."""
self._token = value
@property
def properties(self):
+ """The properties array."""
return self._properties
@properties.setter
def properties(self, value):
+ """Set the properties array.
+
+ :param value: The array.
+ :type value: list
+ """
+ if not isinstance(value, types.ListType):
+ raise TypeError(
+ "Properties field must be a list, got: %s", type(value)
+ )
+ if len(value) != 16:
+ raise ValueError(
+ "Properties list size must be %s", PROPERTIES_SIZE
+ )
self._properties = value
@property
def created_on(self):
+ """When this object was created.
+
+ A datetime object in UTC timezone.
+ """
if not self._created_on:
- self._created_on = datetime.now(tz=tz_util.utc)
+ self._created_on = datetime.datetime.now(tz=bson.tz_util.utc)
return self._created_on
@created_on.setter
def created_on(self, value):
+ """Set the creation date of the object."""
self._created_on = value
@property
def expires_on(self):
+ """When this token is supposed to expire."""
return self._expires_on
@expires_on.setter
def expires_on(self, value):
- self._expires_on = self.check_expires_date(value)
+ """Set the expiry date of the token.
+
+ A datetime object with the following format: %Y-%M-%d
+ """
+ self._expires_on = check_expires_date(value)
@property
def ip_address(self):
+ """The list of IP addresses associated with this token."""
return self._ip_address
@ip_address.setter
def ip_address(self, value):
+ """Set the IP address for this token.
+
+ :param value: The IP address or a list of.
+ :type value: str or list
+ """
if value is not None:
- if not isinstance(value, ListType):
+ if not isinstance(value, types.ListType):
value = [value]
- value = self.check_ip_address(value)
+ value = check_ip_address(value)
self._ip_address = value
@property
- def expired(self):
- return self._expired
-
- @expired.setter
- def expired(self, value):
- self._expired = value
-
- @property
- def email(self):
- return self._email
-
- @email.setter
- def email(self, value):
- self._email = value
-
- @property
- def username(self):
- return self._username
-
- @username.setter
- def username(self, value):
- self._username = value
-
- @property
def is_admin(self):
+ """If the token is an admin one."""
return self._properties[0]
@is_admin.setter
def is_admin(self, value):
- value = self.check_attribute_value(value)
+ """Make this token an admin one.
+
+ This will update also other fields, turning it into a real admin token.
+ An admin token can perform GET, POST and DELETE and can create new
+ tokens.
+ """
+ value = check_attribute_value(value)
self._properties[0] = value
# Admin tokens can GET, POST and DELETE, are superuser and can create
@@ -185,11 +210,17 @@ class Token(BaseDocument):
@property
def is_superuser(self):
+ """If the token is a super user one."""
return self._properties[1]
@is_superuser.setter
def is_superuser(self, value):
- value = self.check_attribute_value(value)
+ """Make this token a superuser one.
+
+ This will update also other fields. A superuser token cannot create
+ new tokens.
+ """
+ value = check_attribute_value(value)
# Force admin to zero, and also if can create new tokens, regardless
# of what is passed. A super user cannot create new tokens.
@@ -203,49 +234,68 @@ class Token(BaseDocument):
@property
def is_get_token(self):
+ """If the token can perform GET requests."""
return self._properties[2]
@is_get_token.setter
def is_get_token(self, value):
- value = self.check_attribute_value(value)
+ """Set whether the token can perform GET requests."""
+ value = check_attribute_value(value)
self._properties[2] = value
@property
def is_post_token(self):
+ """If the token can perform POST requests."""
return self._properties[3]
@is_post_token.setter
def is_post_token(self, value):
- value = self.check_attribute_value(value)
+ """Sets whether the token can perform POST requests."""
+ value = check_attribute_value(value)
self._properties[3] = value
@property
def is_delete_token(self):
+ """If the token can perform DELETE requests."""
return self._properties[4]
@is_delete_token.setter
def is_delete_token(self, value):
- value = self.check_attribute_value(value)
+ """Set whether the token can perform DELETE requests."""
+ value = check_attribute_value(value)
self._properties[4] = value
@property
def is_ip_restricted(self):
+ """If the token is IP restricted."""
return self._properties[5]
@is_ip_restricted.setter
def is_ip_restricted(self, value):
- value = self.check_attribute_value(value)
+ """Set whether the token is IP restricted."""
+ value = check_attribute_value(value)
self._properties[5] = value
@property
def can_create_token(self):
+ """If with this token it is possible to create new tokens."""
return self._properties[6]
@can_create_token.setter
def can_create_token(self, value):
- value = self.check_attribute_value(value)
+ """Sets whether this token can create new tokens."""
+ value = check_attribute_value(value)
self._properties[6] = value
+ @property
+ def is_lab_token(self):
+ return self._properties[7]
+
+ @is_lab_token.setter
+ def is_lab_token(self, value):
+ value = check_attribute_value(value)
+ self._properties[7] = value
+
def is_valid_ip(self, address):
"""Check if an IP address is valid for a token.
@@ -258,128 +308,149 @@ class Token(BaseDocument):
return_value = True
else:
try:
- address = self._convert_ip_address(address)
- if address in IPSet(self.ip_address):
+ address = convert_ip_address(address)
+ if address in netaddr.IPSet(self.ip_address):
return_value = True
- except AddrFormatError:
+ except netaddr.core.AddrFormatError:
# If we get an error converting the IP address, consider it
# not valid and force False.
return_value = False
return return_value
- @classmethod
- def check_ip_address(cls, addrlist):
- """Perform sanity check and conversion on the IP address list.
+ @property
+ def version(self):
+ """The schema version of this object."""
+ return self._version
- :return The address list converted with `IPaddress` and/or `IPNetwork`
- objects.
- """
- if not isinstance(addrlist, ListType):
- raise TypeError("Value must be a list of addresses")
+ @version.setter
+ def version(self, value):
+ """Set the schema version of this object.
- for idx, address in enumerate(addrlist):
- try:
- addrlist[idx] = cls._convert_ip_address(address)
- except AddrFormatError:
- raise ValueError(
- "Address %s is not a valid IP address or network", address
- )
- return addrlist
+ :param value: The schema string.
+ :type param: str
+ """
+ self._version = value
- @staticmethod
- def _convert_ip_address(address):
- """Convert a string into an IPAddress or IPNetwork.
+ def to_dict(self):
+ """Return a dictionary view of the object.
- :return An `IPAddress` or `IPNetwork` object.
+ :return The object as a dictionary.
"""
- if '/' in address:
- address = IPNetwork(address).ipv6(ipv4_compatible=True)
+ doc_dict = {
+ models.CREATED_KEY: self.created_on,
+ models.EMAIL_KEY: self.email,
+ models.EXPIRED_KEY: self.expired,
+ models.EXPIRES_KEY: self.expires_on,
+ models.NAME_KEY: self.name,
+ models.VERSION_KEY: self.version,
+ }
+ if self.ip_address is not None:
+ doc_dict[models.IP_ADDRESS_KEY] = \
+ [str(x) for x in self.ip_address if x]
else:
- address = IPAddress(address).ipv6(ipv4_compatible=True)
- return address
+ doc_dict[models.IP_ADDRESS_KEY] = None
+ doc_dict[models.PROPERTIES_KEY] = self.properties
+ doc_dict[models.TOKEN_KEY] = self.token
+ doc_dict[models.USERNAME_KEY] = self.username
+ if self.id:
+ doc_dict[models.ID_KEY] = self.id
- @staticmethod
- def check_attribute_value(value):
- """Make sure the value passed for the properties list is valid.
+ return doc_dict
- A properties value must be an integer or a boolean, either 0 or 1.
- Negative number will be converted into their absolute values.
+ # pylint: disable=maybe-no-member
+ @staticmethod
+ def from_json(json_obj):
+ """Build a Token object from a JSON string.
- :param value: The value to check.
- :return The value converted into an int.
- :raise TypeError if the value is not IntType or BooleanType; ValueError
- if it is not 0 or 1.
+ :param json_obj: The JSON object to start from.
+ :return An instance of `Token`.
"""
- if not isinstance(value, (IntType, BooleanType)):
- raise TypeError("Wrong value passed, must be int or bool")
+ token_doc = None
+ if json_obj:
+ token_doc = Token()
+ json_get = json_obj.get
+ token_doc.id = json_get(models.ID_KEY)
+ token_doc.name = json_get(models.NAME_KEY)
+ token_doc.email = json_get(models.EMAIL_KEY)
+ token_doc.username = json_get(models.USERNAME_KEY, None)
+ token_doc.token = json_get(models.TOKEN_KEY, None)
+ token_doc.created_on = json_get(models.CREATED_KEY, None)
+ token_doc.expired = json_get(models.EXPIRED_KEY, False)
+ token_doc.expires_on = json_get(models.EXPIRES_KEY, None)
+ token_doc.properties = json_get(
+ models.PROPERTIES_KEY, [0 for _ in range(0, PROPERTIES_SIZE)])
+ token_doc.ip_address = json_get(models.IP_ADDRESS_KEY, None)
+ token_doc.version = json_get(models.VERSION_KEY, "10.0")
+ return token_doc
+
+
+def check_attribute_value(value):
+ """Make sure the value passed for the properties list is valid.
+
+ A properties value must be an integer or a boolean, either 0 or 1.
+ Negative number will be converted into their absolute values.
+
+ :param value: The value to check.
+ :return The value converted into an int.
+ :raise TypeError if the value is not IntType or BooleanType; ValueError
+ if it is not 0 or 1.
+ """
+ if not isinstance(value, (types.IntType, types.BooleanType)):
+ raise TypeError("Wrong value passed, must be int or bool")
- value = abs(int(value))
- if 0 != value != 1:
- raise ValueError("Value must be 0 or 1")
+ value = abs(int(value))
+ if 0 != value != 1:
+ raise ValueError("Value must be 0 or 1")
- return value
+ return value
- @staticmethod
- def check_expires_date(value):
- """Check and convert the expiry date.
- Expiry date must follow this format: %Y-%m-%d.
-
- :param value: The date string.
- :return The converted date, or None if the passed value is None.
- :raise ValueError if the date string cannot be parsed accordingly to
- the predefined format.
- """
- try:
- if value:
- value = datetime.strptime(value, "%Y-%m-%d")
- except ValueError, ex:
- raise ex
- else:
- return value
+def check_expires_date(value):
+ """Check and convert the expiry date.
- def to_dict(self):
- """Return a dictionary view of the object.
+ Expiry date must follow this format: %Y-%m-%d.
- :return The object as a dictionary.
- """
- doc_dict = {}
- doc_dict[CREATED_KEY] = self.created_on
- doc_dict[EMAIL_KEY] = self.email
- doc_dict[EXPIRED_KEY] = self.expired
- doc_dict[EXPIRES_KEY] = self.expires_on
- if self.ip_address is not None:
- doc_dict[IP_ADDRESS_KEY] = [str(x) for x in self.ip_address if x]
- else:
- doc_dict[IP_ADDRESS_KEY] = None
- doc_dict[PROPERTIES_KEY] = self.properties
- doc_dict[TOKEN_KEY] = self.token
- doc_dict[USERNAME_KEY] = self.username
- if self._id:
- doc_dict[ID_KEY] = self._id
+ :param value: The date string.
+ :return The converted date, or None if the passed value is None.
+ :raise ValueError if the date string cannot be parsed accordingly to
+ the predefined format.
+ """
+ try:
+ if value:
+ value = datetime.datetime.strptime(value, "%Y-%m-%d")
+ except ValueError, ex:
+ raise ex
+ else:
+ return value
- return doc_dict
- def to_json(self):
- """Return a JSON string for this object.
+def convert_ip_address(address):
+ """Convert a string into an IPAddress or IPNetwork.
- :return A JSON string.
- """
- return json.dumps(self.to_dict(), default=json_util.default)
+ :return An `IPAddress` or `IPNetwork` object.
+ """
+ if '/' in address:
+ address = netaddr.IPNetwork(address).ipv6(ipv4_compatible=True)
+ else:
+ address = netaddr.IPAddress(address).ipv6(ipv4_compatible=True)
+ return address
- @staticmethod
- def from_json(json_obj):
- """Build a Token object from a JSON string.
- :param json_obj: The JSON object to start from, or a JSON string.
- :return An instance of `Token`.
- """
- if isinstance(json_obj, StringTypes):
- json_obj = json_util.loads(json_obj)
+def check_ip_address(addrlist):
+ """Perform sanity check and conversion on the IP address list.
- token = Token()
- for key, value in json_obj.iteritems():
- setattr(token, key, value)
+ :return The address list converted with `IPaddress` and/or `IPNetwork`
+ objects.
+ """
+ if not isinstance(addrlist, types.ListType):
+ raise TypeError("Value must be a list of addresses")
- return token
+ for idx, address in enumerate(addrlist):
+ try:
+ addrlist[idx] = convert_ip_address(address)
+ except netaddr.core.AddrFormatError:
+ raise ValueError(
+ "Address %s is not a valid IP address or network", address
+ )
+ return addrlist
diff --git a/app/taskqueue/tasks.py b/app/taskqueue/tasks.py
index e882fa4..5d8131f 100644
--- a/app/taskqueue/tasks.py
+++ b/app/taskqueue/tasks.py
@@ -17,20 +17,16 @@
from __future__ import absolute_import
-from celery import group
+import celery
-from taskqueue.celery import app
-from utils.bootimport import import_and_save_boot
-from utils.docimport import import_and_save_job
-from utils.subscription import send
-from utils.batch.common import (
- execute_batch_operation,
-)
-from utils.bisect import execute_boot_bisection
-from utils import LOG
+import taskqueue.celery as taskc
+import utils.batch.common
+import utils.bisect
+import utils.bootimport
+import utils.docimport
-@app.task(name='send-emails', ignore_result=True)
+@taskc.app.task(name='send-emails', ignore_result=True)
def send_emails(job_id):
"""Just a wrapper around the real `send` function.
@@ -38,10 +34,13 @@ def send_emails(job_id):
:param job_id: The job ID to trigger notifications for.
"""
- send(job_id)
+ # send(job_id)
+ # XXX: This has been removed since the subscription handler is not used
+ # right now and will be completely reworked in the future.
+ pass
-@app.task(name='import-job', ignore_result=True)
+@taskc.app.task(name='import-job', ignore_result=True)
def import_job(json_obj, db_options):
"""Just a wrapper around the real import function.
@@ -53,10 +52,10 @@ def import_job(json_obj, db_options):
:param db_options: The mongodb database connection parameters.
:type db_options: dict
"""
- import_and_save_job(json_obj, db_options)
+ utils.docimport.import_and_save_job(json_obj, db_options)
-@app.task(name='import-boot', ignore_result=True)
+@taskc.app.task(name='import-boot', ignore_result=True)
def import_boot(json_obj, db_options):
"""Just a wrapper around the real boot import function.
@@ -68,10 +67,10 @@ def import_boot(json_obj, db_options):
:param db_options: The mongodb database connection parameters.
:type db_options: dict
"""
- import_and_save_boot(json_obj, db_options)
+ utils.bootimport.import_and_save_boot(json_obj, db_options)
-@app.task(name='batch-executor')
+@taskc.app.task(name='batch-executor')
def execute_batch(json_obj, db_options):
"""Run batch operations based on the passed JSON object.
@@ -81,10 +80,10 @@ def execute_batch(json_obj, db_options):
:type db_options: dict
:return The result of the batch operations.
"""
- return execute_batch_operation(json_obj, db_options)
+ return utils.batch.common.execute_batch_operation(json_obj, db_options)
-@app.task(name="boot-bisect")
+@taskc.app.task(name="boot-bisect")
def boot_bisect(doc_id, db_options, fields=None):
"""Run a boot bisect operation on the passed boot document id.
@@ -97,7 +96,23 @@ def boot_bisect(doc_id, db_options, fields=None):
:type fields: list or dict
:return The result of the boot bisect operation.
"""
- return execute_boot_bisection(doc_id, db_options, fields)
+ return utils.bisect.execute_boot_bisection(doc_id, db_options, fields)
+
+
+@taskc.app.task(name="defconfig-bisect")
+def defconfig_bisect(doc_id, db_options, fields=None):
+ """Run a defconfig bisect operation on the passed defconfig document id.
+
+ :param doc_id: The boot document ID.
+ :type doc_id: str
+ :param db_options: The mongodb database connection parameters.
+ :type db_options: dict
+ :param fields: A `fields` data structure with the fields to return or
+ exclude. Default to None.
+ :type fields: list or dict
+ :return The result of the boot bisect operation.
+ """
+ return utils.bisect.execute_defconfig_bisection(doc_id, db_options, fields)
def run_batch_group(batch_op_list, db_options):
@@ -109,7 +124,7 @@ def run_batch_group(batch_op_list, db_options):
:param db_options: The mongodb database connection parameters.
:type db_options: dict
"""
- job = group(
+ job = celery.group(
[
execute_batch.s(batch_op, db_options)
for batch_op in batch_op_list
diff --git a/app/tests/__init__.py b/app/tests/__init__.py
index 1a6c5e8..21e0867 100644
--- a/app/tests/__init__.py
+++ b/app/tests/__init__.py
@@ -28,18 +28,20 @@ def test_modules():
'handlers.tests.test_handler_response',
'handlers.tests.test_handlers_common',
'handlers.tests.test_job_handler',
- 'handlers.tests.test_subscription_handler',
+ 'handlers.tests.test_lab_handler',
'handlers.tests.test_token_handler',
+ 'handlers.tests.test_version_handler',
'models.tests.test_bisect_model',
'models.tests.test_boot_model',
- 'models.tests.test_models',
+ 'models.tests.test_defconfig_model',
+ 'models.tests.test_job_model',
+ 'models.tests.test_lab_model',
+ 'models.tests.test_subscription_model',
'models.tests.test_token_model',
'utils.batch.tests.test_batch_common',
'utils.bisect.tests.test_bisect',
'utils.tests.test_bootimport',
'utils.tests.test_docimport',
- 'utils.tests.test_meta_parser',
- 'utils.tests.test_subscribe',
'utils.tests.test_validator',
]
diff --git a/app/urls.py b/app/urls.py
index 4f2462b..269e15f 100644
--- a/app/urls.py
+++ b/app/urls.py
@@ -17,40 +17,54 @@
from tornado.web import url
-from handlers.bisect import BisectHandler
-from handlers.batch import BatchHandler
-from handlers.boot import BootHandler
-from handlers.count import CountHandler
-from handlers.defconf import DefConfHandler
-from handlers.job import JobHandler
-from handlers.subscription import SubscriptionHandler
-from handlers.token import TokenHandler
+import handlers.batch
+import handlers.bisect
+import handlers.boot
+import handlers.count
+import handlers.defconf
+import handlers.job
+import handlers.lab
+import handlers.subscription
+import handlers.token
+import handlers.version
-_JOB_URL = url(r'/job(?P<sl>/)?(?P<id>.*)', JobHandler, name='job')
+_JOB_URL = url(
+ r'/job(?P<sl>/)?(?P<id>.*)', handlers.job.JobHandler, name='job'
+)
_DEFCONF_URL = url(
- r'/defconfig(?P<sl>/)?(?P<id>.*)', DefConfHandler, name='defconf'
+ r'/defconfig(?P<sl>/)?(?P<id>.*)',
+ handlers.defconf.DefConfHandler,
+ name='defconf'
)
_SUBSCRIPTION_URL = url(
r'/subscription(?P<sl>/)?(?P<id>.*)',
- SubscriptionHandler,
+ handlers.subscription.SubscriptionHandler,
name='subscription',
)
-_BOOT_URL = url(r'/boot(?P<sl>/)?(?P<id>.*)', BootHandler, name='boot')
+_BOOT_URL = url(
+ r'/boot(?P<sl>/)?(?P<id>.*)', handlers.boot.BootHandler, name='boot'
+)
_COUNT_URL = url(
- r'/count(?P<sl>/)?(?P<id>.*)', CountHandler, name='count'
+ r'/count(?P<sl>/)?(?P<id>.*)', handlers.count.CountHandler, name='count'
)
_TOKEN_URL = url(
- r'/token(?P<sl>/)?(?P<id>.*)', TokenHandler, name='token'
+ r'/token(?P<sl>/)?(?P<id>.*)', handlers.token.TokenHandler, name='token'
)
_BATCH_URL = url(
- r'/batch', BatchHandler, name='batch'
+ r'/batch', handlers.batch.BatchHandler, name='batch'
)
_BISECT_URL = url(
r"/bisect/(?P<collection>.*)/(?P<id>.*)",
- BisectHandler,
+ handlers.bisect.BisectHandler,
name="bisect"
)
+_LAB_URL = url(
+ r"/lab(?P<sl>/)?(?P<id>.*)", handlers.lab.LabHandler, name="lab"
+)
+_VERSION_URL = url(
+ r"/version", handlers.version.VersionHandler, name="version"
+)
APP_URLS = [
_BATCH_URL,
@@ -59,6 +73,8 @@ APP_URLS = [
_COUNT_URL,
_DEFCONF_URL,
_JOB_URL,
+ _LAB_URL,
_SUBSCRIPTION_URL,
_TOKEN_URL,
+ _VERSION_URL,
]
diff --git a/app/utils/__init__.py b/app/utils/__init__.py
index 7b8a500..53f366c 100644
--- a/app/utils/__init__.py
+++ b/app/utils/__init__.py
@@ -13,13 +13,13 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from utils.log import get_log
+import utils.log
BASE_PATH = '/var/www/images/kernel-ci'
DEFAULT_MONGODB_URL = "localhost"
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_POOL = 250
-LOG = get_log()
+LOG = utils.log.get_log()
def is_hidden(value):
diff --git a/app/utils/bisect/__init__.py b/app/utils/bisect/__init__.py
index ee7063a..7e60dc7 100644
--- a/app/utils/bisect/__init__.py
+++ b/app/utils/bisect/__init__.py
@@ -13,129 +13,140 @@
"""All the bisect operations that the app can perform."""
-from bson import tz_util
-from bson.json_util import default
-from datetime import datetime
-from json import (
- dumps as j_dump,
- loads as j_load,
-)
-from pymongo import DESCENDING
-from types import DictionaryType
-
-from models import (
- ARCHITECTURE_KEY,
- BISECT_BOOT_CREATED_KEY,
- BISECT_BOOT_METADATA_KEY,
- BISECT_BOOT_STATUS_KEY,
- BISECT_DEFCONFIG_ARCHITECTURE_KEY,
- BISECT_DEFCONFIG_CREATED_KEY,
- BISECT_DEFCONFIG_METADATA_KEY,
- BISECT_DEFCONFIG_STATUS_KEY,
- BOARD_KEY,
- BOOT_COLLECTION,
- CREATED_KEY,
- DEFCONFIG_COLLECTION,
- DEFCONFIG_KEY,
- DIRNAME_KEY,
- GIT_COMMIT_KEY,
- GIT_URL_KEY,
- ID_KEY,
- JOB_ID_KEY,
- JOB_KEY,
- KERNEL_KEY,
- METADATA_KEY,
- PASS_STATUS,
- STATUS_KEY,
-)
-from models.bisect import BootBisectDocument
-from utils import LOG
-from utils.db import (
- find,
- find_one,
- get_db_connection,
- save,
-)
-
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import bson
+import bson.json_util
+import copy
+import datetime
+import pymongo
+import types
+
+import models
+import models.bisect as mbisect
+import utils
+import utils.db
BOOT_SEARCH_FIELDS = [
- BOARD_KEY,
- CREATED_KEY,
- DEFCONFIG_KEY,
- JOB_ID_KEY,
- JOB_KEY,
- KERNEL_KEY,
- METADATA_KEY,
- STATUS_KEY,
+ models.BOARD_KEY,
+ models.CREATED_KEY,
+ models.DEFCONFIG_ID_KEY,
+ models.DEFCONFIG_KEY,
+ models.ID_KEY,
+ models.JOB_ID_KEY,
+ models.JOB_KEY,
+ models.KERNEL_KEY,
+ models.STATUS_KEY,
]
BOOT_DEFCONFIG_SEARCH_FIELDS = [
- ARCHITECTURE_KEY,
- CREATED_KEY,
- DIRNAME_KEY,
- METADATA_KEY,
- STATUS_KEY,
+ models.ARCHITECTURE_KEY,
+ models.CREATED_KEY,
+ models.DEFCONFIG_KEY,
+ models.GIT_BRANCH_KEY,
+ models.GIT_COMMIT_KEY,
+ models.GIT_DESCRIBE_KEY,
+ models.GIT_URL_KEY,
+ models.STATUS_KEY,
+]
+
+BOOT_SORT = [(models.CREATED_KEY, pymongo.DESCENDING)]
+
+DEFCONFIG_SEARCH_FIELDS = [
+ models.ARCHITECTURE_KEY,
+ models.CREATED_KEY,
+ models.DEFCONFIG_FULL_KEY,
+ models.DEFCONFIG_KEY,
+ models.GIT_COMMIT_KEY,
+ models.GIT_DESCRIBE_KEY,
+ models.GIT_URL_KEY,
+ models.ID_KEY,
+ models.JOB_ID_KEY,
+ models.JOB_KEY,
+ models.KERNEL_KEY,
+ models.STATUS_KEY,
]
-BOOT_SORT = [(CREATED_KEY, DESCENDING)]
+DEFCONFIG_SORT = [(models.CREATED_KEY, pymongo.DESCENDING)]
def _combine_defconfig_values(boot_doc, db_options):
"""Combine the boot document values with their own defconfing.
- It returns a list of dictionaries whose structure is a combination
+ It returns a dictionary whose structure is a combination
of the values from the boot document and its associated defconfing.
:param boot_doc: The boot document to retrieve the defconfig of.
:type boot_doc: dict
:param db_options: The mongodb database connection parameters.
:type db_options: dict
- :return A list of dictionaries.
+ :return A dictionary.
"""
- database = get_db_connection(db_options)
+ database = utils.db.get_db_connection(db_options)
boot_doc_get = boot_doc.get
- job = boot_doc_get(JOB_KEY)
- kernel = boot_doc_get(KERNEL_KEY)
- defconfig = boot_doc_get(DEFCONFIG_KEY)
+ job = boot_doc_get(models.JOB_KEY)
+ kernel = boot_doc_get(models.KERNEL_KEY)
+ defconfig = boot_doc_get(models.DEFCONFIG_KEY)
+ defconfig_id = boot_doc_get(models.DEFCONFIG_ID_KEY, None)
+ job_id = boot_doc_get(models.JOB_ID_KEY, None)
combined_values = {
- JOB_KEY: job,
- KERNEL_KEY: kernel,
- DEFCONFIG_KEY: defconfig,
- BISECT_BOOT_STATUS_KEY: boot_doc_get(STATUS_KEY),
- BISECT_BOOT_CREATED_KEY: boot_doc_get(CREATED_KEY),
- BISECT_BOOT_METADATA_KEY: boot_doc_get(METADATA_KEY),
- DIRNAME_KEY: "",
- BISECT_DEFCONFIG_CREATED_KEY: "",
- BISECT_DEFCONFIG_ARCHITECTURE_KEY: "",
- BISECT_DEFCONFIG_STATUS_KEY: "",
- BISECT_DEFCONFIG_METADATA_KEY: {}
+ models.BISECT_BOOT_CREATED_KEY: boot_doc_get(models.CREATED_KEY),
+ models.BISECT_BOOT_METADATA_KEY: boot_doc_get(models.METADATA_KEY),
+ models.BISECT_BOOT_STATUS_KEY: boot_doc_get(models.STATUS_KEY),
+ models.BISECT_DEFCONFIG_ARCHITECTURE_KEY: "",
+ models.BISECT_DEFCONFIG_CREATED_KEY: "",
+ models.BISECT_DEFCONFIG_STATUS_KEY: "",
+ models.DEFCONFIG_ID_KEY: defconfig_id,
+ models.DEFCONFIG_KEY: defconfig,
+ models.DIRNAME_KEY: "",
+ models.GIT_BRANCH_KEY: "",
+ models.GIT_COMMIT_KEY: "",
+ models.GIT_DESCRIBE_KEY: "",
+ models.GIT_URL_KEY: "",
+ models.JOB_ID_KEY: job_id,
+ models.JOB_KEY: job,
+ models.KERNEL_KEY: kernel,
}
- defconf_id = job + "-" + kernel + "-" + defconfig
- defconf_doc = find_one(
- database[DEFCONFIG_COLLECTION],
- defconf_id,
- fields=BOOT_DEFCONFIG_SEARCH_FIELDS
- )
+ if defconfig_id:
+ defconf_doc = utils.db.find_one(
+ database[models.DEFCONFIG_COLLECTION],
+ [defconfig_id],
+ fields=BOOT_DEFCONFIG_SEARCH_FIELDS
+ )
+ else:
+ defconfig_name = job + "-" + kernel + "-" + defconfig
+ defconf_doc = utils.db.find_one(
+ database[models.DEFCONFIG_COLLECTION],
+ [defconfig_name],
+ field=models.NAME_KEY,
+ fields=BOOT_DEFCONFIG_SEARCH_FIELDS
+ )
if defconf_doc:
defconf_doc_get = defconf_doc.get
- combined_values[DIRNAME_KEY] = defconf_doc_get(DIRNAME_KEY)
- combined_values[BISECT_DEFCONFIG_CREATED_KEY] = defconf_doc_get(
- CREATED_KEY
- )
- combined_values[BISECT_DEFCONFIG_ARCHITECTURE_KEY] = defconf_doc_get(
- ARCHITECTURE_KEY
- )
- combined_values[BISECT_DEFCONFIG_STATUS_KEY] = defconf_doc_get(
- STATUS_KEY
- )
- combined_values[BISECT_DEFCONFIG_METADATA_KEY] = defconf_doc_get(
- METADATA_KEY
- )
+ combined_values[models.DIRNAME_KEY] = defconf_doc_get(
+ models.DIRNAME_KEY)
+ combined_values[models.BISECT_DEFCONFIG_CREATED_KEY] = \
+ defconf_doc_get(models.CREATED_KEY)
+ combined_values[models.BISECT_DEFCONFIG_ARCHITECTURE_KEY] = \
+ defconf_doc_get(models.ARCHITECTURE_KEY)
+ combined_values[models.BISECT_DEFCONFIG_STATUS_KEY] = \
+ defconf_doc_get(models.STATUS_KEY)
+ combined_values[models.GIT_URL_KEY] = defconf_doc_get(
+ models.GIT_URL_KEY, None)
+ combined_values[models.GIT_BRANCH_KEY] = defconf_doc_get(
+ models.GIT_BRANCH_KEY, None)
+ combined_values[models.GIT_COMMIT_KEY] = defconf_doc_get(
+ models.GIT_COMMIT_KEY, None)
+ combined_values[models.GIT_DESCRIBE_KEY] = defconf_doc_get(
+ models.GIT_DESCRIBE_KEY, None)
return combined_values
@@ -156,32 +167,37 @@ def execute_boot_bisection(doc_id, db_options, fields=None):
:type fields: list or dict
:return A numeric value for the result status and a list dictionaries.
"""
- database = get_db_connection(db_options)
+ database = utils.db.get_db_connection(db_options)
result = []
code = 200
- start_doc = find_one(
- database[BOOT_COLLECTION], doc_id, fields=BOOT_SEARCH_FIELDS
+ obj_id = bson.objectid.ObjectId(doc_id)
+ start_doc = utils.db.find_one(
+ database[models.BOOT_COLLECTION], [obj_id], fields=BOOT_SEARCH_FIELDS
)
- if all([start_doc, isinstance(start_doc, DictionaryType)]):
+ if all([start_doc, isinstance(start_doc, types.DictionaryType)]):
start_doc_get = start_doc.get
- if start_doc_get(STATUS_KEY) == PASS_STATUS:
+ if start_doc_get(models.STATUS_KEY) == models.PASS_STATUS:
code = 400
result = None
else:
- bisect_doc = BootBisectDocument(doc_id)
- bisect_doc.job = start_doc_get(JOB_ID_KEY)
- bisect_doc.created_on = datetime.now(tz=tz_util.utc)
- bisect_doc.board = start_doc_get(BOARD_KEY)
+ bisect_doc = mbisect.BootBisectDocument(obj_id)
+ bisect_doc.version = "1.0"
+ bisect_doc.job = start_doc_get(models.JOB_KEY, None)
+ bisect_doc.job_id = start_doc_get(models.JOB_ID_KEY, None)
+ bisect_doc.defconfig_id = start_doc_get(
+ models.DEFCONFIG_ID_KEY, None)
+ bisect_doc.created_on = datetime.datetime.now(tz=bson.tz_util.utc)
+ bisect_doc.board = start_doc_get(models.BOARD_KEY)
spec = {
- BOARD_KEY: start_doc_get(BOARD_KEY),
- DEFCONFIG_KEY: start_doc_get(DEFCONFIG_KEY),
- JOB_KEY: start_doc_get(JOB_KEY),
- CREATED_KEY: {
- "$lt": start_doc_get(CREATED_KEY)
+ models.BOARD_KEY: start_doc_get(models.BOARD_KEY),
+ models.DEFCONFIG_KEY: start_doc_get(models.DEFCONFIG_KEY),
+ models.JOB_KEY: start_doc_get(models.JOB_KEY),
+ models.CREATED_KEY: {
+ "$lt": start_doc_get(models.CREATED_KEY)
}
}
@@ -190,18 +206,19 @@ def execute_boot_bisection(doc_id, db_options, fields=None):
func = _combine_defconfig_values
bad_doc = func(start_doc, db_options)
- bad_doc_meta = bad_doc[BISECT_DEFCONFIG_METADATA_KEY].get
+ bad_doc_get = bad_doc.get
- bisect_doc.bad_commit_date = bad_doc[BISECT_DEFCONFIG_CREATED_KEY]
- bisect_doc.bad_commit = bad_doc_meta(GIT_COMMIT_KEY)
- bisect_doc.bad_commit_url = bad_doc_meta(GIT_URL_KEY)
+ bisect_doc.bad_commit_date = bad_doc_get(
+ models.BISECT_DEFCONFIG_CREATED_KEY)
+ bisect_doc.bad_commit = bad_doc_get(models.GIT_COMMIT_KEY)
+ bisect_doc.bad_commit_url = bad_doc_get(models.GIT_URL_KEY)
all_valid_docs = [bad_doc]
# Search through all the previous boot reports, until one that
# passed is found, and combine them with their defconfig document.
- all_prev_docs = find(
- database[BOOT_COLLECTION],
+ all_prev_docs = utils.db.find(
+ database[models.BOOT_COLLECTION],
0,
0,
spec=spec,
@@ -219,24 +236,193 @@ def execute_boot_bisection(doc_id, db_options, fields=None):
# The last doc should be the good one, in case it is, add the
# values to the bisect_doc.
good_doc = all_valid_docs[-1]
- if good_doc[BISECT_BOOT_STATUS_KEY] == PASS_STATUS:
- good_doc_meta = good_doc[BISECT_DEFCONFIG_METADATA_KEY].get
- bisect_doc.good_commit = good_doc_meta(GIT_COMMIT_KEY)
- bisect_doc.good_commit_url = good_doc_meta(GIT_URL_KEY)
- bisect_doc.good_commit_date = \
- good_doc[BISECT_DEFCONFIG_CREATED_KEY]
+ if (good_doc[models.BISECT_BOOT_STATUS_KEY] ==
+ models.PASS_STATUS):
+ good_doc_get = good_doc.get
+ bisect_doc.good_commit = good_doc_get(
+ models.GIT_COMMIT_KEY)
+ bisect_doc.good_commit_url = good_doc_get(
+ models.GIT_URL_KEY)
+ bisect_doc.good_commit_date = good_doc_get(
+ models.BISECT_DEFCONFIG_CREATED_KEY)
# Store everything in the bisect_data list of the bisect_doc.
bisect_doc.bisect_data = all_valid_docs
- return_code, saved_id = save(database, bisect_doc, manipulate=True)
+ return_code, saved_id = utils.db.save(
+ database, bisect_doc, manipulate=True)
+ if return_code == 201:
+ bisect_doc.id = saved_id
+ else:
+ utils.LOG.error("Error saving bisect data %s", doc_id)
+
+ bisect_doc = _update_doc_fields(bisect_doc, fields)
+ result = [
+ json.loads(
+ json.dumps(
+ bisect_doc,
+ default=bson.json_util.default,
+ ensure_ascii=False
+ )
+ )
+ ]
+ else:
+ code = 404
+ result = None
+
+ return code, result
+
+
+def execute_defconfig_bisection(doc_id, db_options, fields=None):
+ """Calculate bisect data for the provided defconfig report.
+
+ It searches all the previous defconfig built starting from the provided one
+ until it finds one that passed. After that, it combines the value into a
+ single data structure.
+
+ :param doc_id: The boot document ID.
+ :type doc_id: str
+ :param db_options: The mongodb database connection parameters.
+ :type db_options: dict
+ :param fields: A `fields` data structure with the fields to return or
+ exclude. Default to None.
+ :type fields: list or dict
+ :return A numeric value for the result status and a list of dictionaries.
+ """
+ database = utils.db.get_db_connection(db_options)
+ result = []
+ code = 200
+
+ obj_id = bson.objectid.ObjectId(doc_id)
+ start_doc = utils.db.find_one(
+ database[models.DEFCONFIG_COLLECTION],
+ [obj_id], fields=DEFCONFIG_SEARCH_FIELDS
+ )
+
+ if all([start_doc, isinstance(start_doc, types.DictionaryType)]):
+ start_doc_get = start_doc.get
+
+ if start_doc_get(models.STATUS_KEY) == models.PASS_STATUS:
+ code = 400
+ result = None
+ else:
+ bisect_doc = mbisect.DefconfigBisectDocument(obj_id)
+ bisect_doc.version = "1.0"
+ bisect_doc.arch = start_doc_get(models.ARCHITECTURE_KEY, None)
+ bisect_doc.job = start_doc_get(models.JOB_KEY, None)
+ bisect_doc.job_id = start_doc_get(models.JOB_ID_KEY, None)
+ bisect_doc.defconfig_id = start_doc_get(models.ID_KEY)
+ bisect_doc.defconfig = start_doc_get(models.DEFCONFIG_KEY, None)
+ bisect_doc.created_on = datetime.datetime.now(tz=bson.tz_util.utc)
+ bisect_doc.bad_commit_date = start_doc_get(models.CREATED_KEY)
+ bisect_doc.bad_commit = start_doc_get(models.GIT_COMMIT_KEY)
+ bisect_doc.bad_commit_url = start_doc_get(models.GIT_URL_KEY)
+
+ spec = {
+ models.ARCHITECTURE_KEY: start_doc_get(
+ models.ARCHITECTURE_KEY),
+ models.DEFCONFIG_FULL_KEY: start_doc_get(
+ models.DEFCONFIG_FULL_KEY),
+ models.DEFCONFIG_KEY: start_doc_get(models.DEFCONFIG_KEY),
+ models.JOB_KEY: start_doc_get(models.JOB_KEY),
+ }
+
+ all_valid_docs = [start_doc]
+
+ # Search for the first passed defconfig so that we can limit the
+ # next search. Doing this to cut down search and load time on
+ # mongodb side: there are a lot of defconfig documents to search
+ # for and the mongodb Cursor can get quite big.
+ # Tweak the spec to search for PASS status and limit also the
+ # result found: we are only interested in the first found one.
+ # Need to use copy.deepcoy here since for some strange reasons,
+ # just adding and removing the keys from the spec is not working
+ # as expected.
+ pass_spec = copy.deepcopy(spec)
+ pass_spec[models.STATUS_KEY] = models.PASS_STATUS
+
+ passed_builds = utils.db.find(
+ database[models.DEFCONFIG_COLLECTION],
+ 10,
+ 0,
+ spec=pass_spec,
+ fields=DEFCONFIG_SEARCH_FIELDS,
+ sort=DEFCONFIG_SORT
+ )
+
+ # In case we have a passed doc, tweak the spec to search between
+ # the valid dates.
+ if passed_builds.count() > 0:
+ passed_build = passed_builds[0]
+
+ if passed_build.get(models.STATUS_KEY) != models.PASS_STATUS:
+ utils.LOG.warn(
+ "First result found is not a passed build for '%s'",
+ obj_id
+ )
+ for doc in passed_builds:
+ if doc.get(models.STATUS_KEY) == models.PASS_STATUS:
+ passed_build = doc
+ break
+
+ spec[models.CREATED_KEY] = {
+ "$gte": passed_build.get(models.CREATED_KEY),
+ "$lt": start_doc_get(models.CREATED_KEY)
+ }
+ else:
+ spec[models.CREATED_KEY] = {
+ "$lt": start_doc_get(models.CREATED_KEY)
+ }
+ utils.LOG.warn("No passed build found for '%s'", obj_id)
+
+ all_prev_docs = utils.db.find(
+ database[models.DEFCONFIG_COLLECTION],
+ 0,
+ 0,
+ spec=spec,
+ fields=DEFCONFIG_SEARCH_FIELDS,
+ sort=DEFCONFIG_SORT
+ )
+
+ if all_prev_docs:
+ all_valid_docs.extend(
+ [
+ doc for doc in _get_docs_until_pass(all_prev_docs)
+ ]
+ )
+
+ # The last doc should be the good one, in case it is, add the
+ # values to the bisect_doc.
+ good_doc = all_valid_docs[-1]
+ if (good_doc[models.STATUS_KEY] == models.PASS_STATUS):
+ good_doc_get = good_doc.get
+ bisect_doc.good_commit = good_doc_get(
+ models.GIT_COMMIT_KEY)
+ bisect_doc.good_commit_url = good_doc_get(
+ models.GIT_URL_KEY)
+ bisect_doc.good_commit_date = good_doc_get(
+ models.CREATED_KEY)
+
+ # Store everything in the bisect data.
+ bisect_doc.bisect_data = all_valid_docs
+
+ return_code, saved_id = utils.db.save(
+ database, bisect_doc, manipulate=True)
if return_code == 201:
bisect_doc.id = saved_id
else:
- LOG.error("Error savind bisect data %s", doc_id)
+ utils.LOG.error("Error saving bisect data %s", doc_id)
bisect_doc = _update_doc_fields(bisect_doc, fields)
- result = [j_load(j_dump(bisect_doc, default=default))]
+ result = [
+ json.loads(
+ json.dumps(
+ bisect_doc,
+ default=bson.json_util.default,
+ ensure_ascii=False
+ )
+ )
+ ]
else:
code = 404
result = None
@@ -257,18 +443,18 @@ def _update_doc_fields(bisect_doc, fields):
:param fields: A `fields` data structure with the fields to return or
exclude. Default to None.
:type fields: list or dict
- :return The BisectDocument as a dictionary.
+ :return The BisectDocument as a dict calling it `to_dict()` method.
"""
if fields:
if isinstance(fields, list):
bisect_doc = bisect_doc.to_dict()
to_remove = list(bisect_doc.viewkeys() - set(fields))
for field in to_remove:
- if field == ID_KEY:
+ if field == models.ID_KEY:
continue
else:
bisect_doc.pop(field)
- elif isinstance(fields, DictionaryType):
+ elif isinstance(fields, types.DictionaryType):
y_fields = [
field for field, val in fields.iteritems() if val
]
@@ -294,7 +480,7 @@ def _get_docs_until_pass(doc_list):
:type doc_list: list
"""
for doc in doc_list:
- if doc[STATUS_KEY] == PASS_STATUS:
+ if doc[models.STATUS_KEY] == models.PASS_STATUS:
yield doc
break
yield doc
diff --git a/app/utils/bootimport.py b/app/utils/bootimport.py
index 2e609a9..c0a141b 100644
--- a/app/utils/bootimport.py
+++ b/app/utils/bootimport.py
@@ -15,58 +15,32 @@
"""Container for all the boot import related functions."""
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import bson
+import copy
+import datetime
import glob
-import json
import os
-import pymongo
import re
-from bson import tz_util
-from datetime import (
- datetime,
- timedelta
-)
-
-from models import (
- BOOT_LOG_HTML_KEY,
- BOOT_LOG_KEY,
- DB_NAME,
- DTB_ADDR_KEY,
- DTB_KEY,
- ENDIANNESS_KEY,
- FASTBOOT_KEY,
- INITRD_ADDR_KEY,
- JOB_KEY,
- KERNEL_IMAGE_KEY,
- KERNEL_KEY,
- UNKNOWN_STATUS,
-)
-from models.boot import BootDocument
-from utils import (
- BASE_PATH,
- LOG,
- is_hidden,
-)
-from utils.db import (
- get_db_connection,
- save,
-)
+import models
+import models.boot as modbt
+import utils
+import utils.db
# Pattern used for glob matching files on the filesystem.
BOOT_REPORT_PATTERN = 'boot-*.json'
-# Keys defined only for the boot report JSON format. We store them differently.
-BOOT_TIME_JSON = 'boot_time'
-LOAD_ADDR_JSON = 'loadaddr'
-BOOT_RESULT_JSON = 'boot_result'
-BOOT_WARNINGS_JSON = 'boot_warnings'
-
# Some dtb appears to be in a temp directory like 'tmp', and will results in
# some weird names.
TMP_RE = re.compile(r'tmp')
-def import_and_save_boot(json_obj, db_options, base_path=BASE_PATH):
+def import_and_save_boot(json_obj, db_options, base_path=utils.BASE_PATH):
"""Wrapper function to be used as an external task.
This function should only be called by Celery or other task managers.
@@ -78,149 +52,339 @@ def import_and_save_boot(json_obj, db_options, base_path=BASE_PATH):
:type json_obj: dict
:param db_options: The mongodb database connection parameters.
:type db_options: dict
- :param base_path: The base path where to start looking for the boot log
- file. It defaults to: /var/www/images/kernel-ci.
"""
- database = get_db_connection(db_options)
- docs = parse_boot_from_json(json_obj, base_path)
-
- if docs:
- save(database, docs)
- else:
- LOG.info("No boot log imported")
-
+ database = utils.db.get_db_connection(db_options)
+ json_copy = copy.deepcopy(json_obj)
-def parse_boot_from_json(json_obj, base_path=BASE_PATH):
- """Parse boot log file from a JSON object.
+ doc = _parse_boot_from_json(json_copy, database)
+ doc_id = None
+ ret_code = None
- The provided JSON object, a dict-like object, should contain at least the
- `job` and `kernel` keys.
+ if doc:
+ ret_code, doc_id = save_or_update(doc, database)
+ save_to_disk(doc, json_obj, base_path)
+ else:
+ utils.LOG.info("Boot report not imported nor saved")
- :param json_obj: A dict-like object that should contain the keys `job` and
- :param base_path: The base path where to start looking for the boot log
- file. It defaults to: /var/www/images/kernel-ci.
- :return A list with all the `BootDocument`s.
- """
- job = json_obj[JOB_KEY]
- kernel = json_obj[KERNEL_KEY]
+ return ret_code, doc_id
- return _parse_boot(job, kernel, base_path)
+def save_or_update(boot_doc, database):
+ """Save or update the document in the database.
-def _parse_boot(job, kernel, base_path=BASE_PATH):
- """Traverse the kernel directory and look for boot report logs.
+ Check if we have a document available in the db, and in case perform an
+ update on it.
- :param job: The name of the job.
- :param kernel: The name of the kernel.
- :param base_path: The base path where to start traversing.
- :return A list of documents to be saved, or an empty list.
+ :param boot_doc: The boot document to save.
+ :type boot_doc: BaseDocument
+ :param database: The database connection.
+ :return The save action return code and the doc ID.
"""
- docs = []
-
- job_dir = os.path.join(base_path, job)
+ spec = {
+ models.LAB_NAME_KEY: boot_doc.lab_name,
+ models.NAME_KEY: boot_doc.name,
+ }
+
+ fields = [
+ models.CREATED_KEY,
+ models.ID_KEY,
+ ]
+
+ found_doc = utils.db.find(
+ database[models.BOOT_COLLECTION], 1, 0, spec=spec, fields=fields)
+
+ prev_doc = None
+ doc_len = found_doc.count()
+ if doc_len == 1:
+ prev_doc = found_doc[0]
+
+ if prev_doc:
+ doc_get = prev_doc.get
+ doc_id = doc_get(models.ID_KEY)
+ boot_doc.id = doc_id
+ boot_doc.created_on = doc_get(models.CREATED_KEY)
+
+ utils.LOG.info("Updating boot document with id '%s'", doc_id)
+ ret_val, _ = utils.db.save(database, boot_doc)
+ else:
+ ret_val, doc_id = utils.db.save(database, boot_doc, manipulate=True)
- if not is_hidden(job) and os.path.isdir(job_dir):
- kernel_dir = os.path.join(job_dir, kernel)
+ return ret_val, doc_id
- if not is_hidden(kernel) and os.path.isdir(kernel_dir):
- for defconfig in os.listdir(kernel_dir):
- defconfig_dir = os.path.join(kernel_dir, defconfig)
- if not is_hidden(defconfig) and os.path.isdir(defconfig_dir):
- docs.extend([
- _parse_boot_log(boot_log, job, kernel, defconfig)
- for boot_log in glob.iglob(
- os.path.join(defconfig_dir, BOOT_REPORT_PATTERN)
- )
- if os.path.isfile(boot_log)
- ])
+def save_to_disk(boot_doc, json_obj, base_path):
+ """Save the provided boot report to disk.
- return docs
+ :param boot_doc: The document parsed.
+ :type boot_doc: models.boot.BootDocument
+ :param json_obj: The JSON object to save.
+ :type json_obj: dict
+ :param base_path: The base path where to save the document.
+ :type base_path: str
+ """
+ job = boot_doc.job
+ kernel = boot_doc.kernel
+ defconfig = boot_doc.defconfig_full
+ lab_name = boot_doc.lab_name
+ board = boot_doc.board
+ arch = boot_doc.arch
+
+ r_defconfig = "-".join([arch, defconfig])
+
+ dir_path = os.path.join(base_path, job, kernel, r_defconfig, lab_name)
+ file_path = os.path.join(dir_path, 'boot-%s.json' % board)
+
+ try:
+ if not os.path.isdir(dir_path):
+ os.makedirs(dir_path)
+
+ with open(file_path, mode="w") as write_json:
+ write_json.write(
+ json.dumps(json_obj, encoding="utf_8", ensure_ascii=False)
+ )
+ except (OSError, IOError), ex:
+ utils.LOG.error(
+ "Error saving document '%s' into '%s'",
+ boot_doc.name, dir_path
+ )
+ utils.LOG.exception(ex)
-def _parse_boot_log(boot_log, job, kernel, defconfig):
+def _parse_boot_from_file(boot_log):
"""Read and parse the actual boot report.
:param boot_log: The path to the boot report.
- :param job: The name of the job.
- :param kernel: The name of the kernel.
- :param defconfig: The name of the defconfig.
:return A `BootDocument` object.
"""
- LOG.info("Parsing boot log '%s'", os.path.basename(boot_log))
+ utils.LOG.info("Parsing boot log file '%s'", boot_log)
boot_json = None
boot_doc = None
- with open(boot_log) as read_f:
- boot_json = json.load(read_f)
-
- if boot_json:
- dtb = boot_json.pop(DTB_KEY, None)
-
- if dtb and not TMP_RE.findall(dtb):
- board = os.path.splitext(os.path.basename(dtb))[0]
- else:
- # If we do not have the dtb field we use the boot report file to
- # extract some kind of value for board.
- board = os.path.splitext(
- os.path.basename(boot_log).replace('boot-', ''))[0]
- LOG.info("Using boot report file name for board name: %s", board)
-
- boot_doc = BootDocument(board, job, kernel, defconfig)
- boot_doc.created_on = datetime.fromtimestamp(
- os.stat(boot_log).st_mtime, tz=tz_util.utc)
-
- time_d = timedelta(seconds=float(boot_json.pop(BOOT_TIME_JSON, 0.0)))
- boot_time = datetime(
- 1970, 1, 1,
- minute=time_d.seconds / 60,
- second=time_d.seconds % 60,
- microsecond=time_d.microseconds
- )
+ try:
+ with open(boot_log) as read_f:
+ boot_json = json.load(read_f)
+
+ json_pop_f = boot_json.pop
+
+ # Mandatory fields.
+ job = json_pop_f(models.JOB_KEY)
+ kernel = json_pop_f(models.KERNEL_KEY)
+ defconfig = json_pop_f(models.DEFCONFIG_KEY)
+ defconfig_full = json_pop_f(models.DEFCONFIG_FULL_KEY, defconfig)
+ lab_name = json_pop_f(models.LAB_NAME_KEY)
+ arch = json_pop_f(models.ARCHITECTURE_KEY, models.ARM_ARCHITECTURE_KEY)
+ # Even if board is mandatory, for old cases this used not to be true.
+ board = json_pop_f(models.BOARD_KEY, None)
+ dtb = boot_json.get(models.DTB_KEY, None)
+
+ if not board:
+ utils.LOG.info("No board value specified in the boot report")
+ if dtb and not TMP_RE.findall(dtb):
+ board = os.path.splitext(os.path.basename(dtb))[0]
+ else:
+ # If we do not have the dtb field we use the boot report file to
+ # extract some kind of value for board.
+ board = os.path.splitext(
+ os.path.basename(boot_log).replace('boot-', ''))[0]
+ utils.LOG.info(
+ "Using boot report file name for board name: %s", board
+ )
+
+ boot_doc = modbt.BootDocument(
+ board, job, kernel, defconfig, lab_name, defconfig_full, arch)
+ _update_boot_doc_from_json(boot_doc, boot_json, json_pop_f)
+ # TODO: Find and add job_id and defconfig_id
+ except (OSError, TypeError, IOError), ex:
+ utils.LOG.error("Error opening the file '%s'", boot_log)
+ utils.LOG.exception(ex)
+ except KeyError, ex:
+ utils.LOG.error("Missing key in boot report: import failed")
+ utils.LOG.exception(ex)
- boot_doc.time = boot_time
- boot_doc.status = boot_json.pop(BOOT_RESULT_JSON, UNKNOWN_STATUS)
- boot_doc.warnings = boot_json.pop(BOOT_WARNINGS_JSON, "0")
- boot_doc.boot_log = boot_json.pop(BOOT_LOG_KEY, None)
- boot_doc.initrd_addr = boot_json.pop(INITRD_ADDR_KEY, None)
- boot_doc.load_addr = boot_json.pop(LOAD_ADDR_JSON, None)
- boot_doc.kernel_image = boot_json.pop(KERNEL_IMAGE_KEY, None)
- boot_doc.dtb_addr = boot_json.pop(DTB_ADDR_KEY, None)
- boot_doc.endianness = boot_json.pop(ENDIANNESS_KEY, None)
- boot_doc.boot_log_html = boot_json.pop(BOOT_LOG_HTML_KEY, None)
- boot_doc.fastboot = boot_json.pop(FASTBOOT_KEY, None)
- boot_doc.dtb = dtb
-
- boot_doc.metadata = boot_json
- else:
- LOG.error(
- "Boot log '%s' does not contain JSON data",
- os.path.basename(boot_log)
+ return boot_doc
+
+
+def _parse_boot_from_json(boot_json, database):
+ """Parse the boot report from a JSON object.
+
+ :param boot_json: The JSON object.
+ :type boot_json: dict
+ :return A `models.boot.BootDocument` instance, or None if the JSON cannot
+ be parsed correctly.
+ """
+ boot_doc = None
+
+ try:
+ json_pop_f = boot_json.pop
+ board = json_pop_f(models.BOARD_KEY)
+ job = json_pop_f(models.JOB_KEY)
+ kernel = json_pop_f(models.KERNEL_KEY)
+ defconfig = json_pop_f(models.DEFCONFIG_KEY)
+ defconfig_full = json_pop_f(models.DEFCONFIG_FULL_KEY, defconfig)
+ lab_name = json_pop_f(models.LAB_NAME_KEY)
+ arch = json_pop_f(models.ARCHITECTURE_KEY, models.ARM_ARCHITECTURE_KEY)
+
+ boot_doc = modbt.BootDocument(
+ board, job, kernel, defconfig, lab_name, defconfig_full, arch)
+ boot_doc.created_on = datetime.datetime.now(tz=bson.tz_util.utc)
+ _update_boot_doc_from_json(boot_doc, boot_json, json_pop_f)
+ _update_boot_doc_ids(boot_doc, database)
+ except KeyError, ex:
+ utils.LOG.error(
+ "Missing key in boot report: import failed"
)
+ utils.LOG.exception(ex)
return boot_doc
-def _import_all(base_path=BASE_PATH):
- """Handy function to import all boot logs."""
+def _update_boot_doc_ids(boot_doc, database):
+ """Update boot document job and defconfig IDs references.
+
+ :param boot_doc: The boot document to update.
+ :type boot_doc: BootDocument
+ :param database: The database connection to use.
+ """
+ job = boot_doc.job
+ kernel = boot_doc.kernel
+ defconfig = boot_doc.defconfig
+
+ job_name = models.JOB_DOCUMENT_NAME % {
+ models.JOB_KEY: job,
+ models.KERNEL_KEY: kernel
+ }
+ defconfig_name = models.DEFCONFIG_DOCUMENT_NAME % {
+ models.JOB_KEY: job,
+ models.KERNEL_KEY: kernel,
+ models.DEFCONFIG_KEY: defconfig
+ }
+
+ job_doc = utils.db.find_one(
+ database[models.JOB_COLLECTION], [job_name], field="name",
+ fields=[models.ID_KEY]
+ )
+
+ defconfig_doc = utils.db.find_one(
+ database[models.DEFCONFIG_COLLECTION], [defconfig_name], field="name",
+ fields=[models.ID_KEY]
+ )
+
+ if job_doc:
+ boot_doc.job_id = job_doc.get(models.ID_KEY, None)
+ if defconfig_doc:
+ boot_doc.defconfig_id = defconfig_doc.get(models.ID_KEY, None)
+
+
+def _update_boot_doc_from_json(boot_doc, boot_json, json_pop_f):
+ """Update a BootDocument from the provided JSON boot object.
+
+ This function does not return anything, and the BootDocument passed is
+ updated from the values found in the provided JSON object.
+
+ :param boot_doc: The BootDocument to update.
+ :type boot_doc: `models.boot.BootDocument`.
+ :param boot_json: The JSON object from where to take that parameters.
+ :type boot_json: dict
+ :param json_pop_f: The function used to pop elements out of the JSON object.
+ :type json_pop_f: function
+ """
+ time_d = datetime.timedelta(
+ seconds=float(json_pop_f(models.BOOT_TIME_KEY, 0.0))
+ )
+ boot_doc.time = datetime.datetime(
+ 1970, 1, 1,
+ minute=time_d.seconds / 60,
+ second=time_d.seconds % 60,
+ microsecond=time_d.microseconds
+ )
+
+ boot_doc.status = json_pop_f(
+ models.BOOT_RESULT_KEY, models.UNKNOWN_STATUS
+ )
+ boot_doc.board_instance = json_pop_f(models.BOARD_INSTANCE_KEY, None)
+ boot_doc.boot_log = json_pop_f(models.BOOT_LOG_KEY, None)
+ boot_doc.boot_log_html = json_pop_f(models.BOOT_LOG_HTML_KEY, None)
+ boot_doc.boot_result_description = json_pop_f(
+ models.BOOT_RESULT_DESC_KEY, None
+ )
+ boot_doc.dtb = json_pop_f(models.DTB_KEY, None)
+ boot_doc.dtb_addr = json_pop_f(models.DTB_ADDR_KEY, None)
+ boot_doc.dtb_append = json_pop_f(models.DTB_APPEND_KEY, None)
+ boot_doc.endianness = json_pop_f(models.ENDIANNESS_KEY, None)
+ boot_doc.fastboot = json_pop_f(models.FASTBOOT_KEY, None)
+ boot_doc.fastboot_cmd = json_pop_f(models.FASTBOOT_CMD_KEY, None)
+ boot_doc.file_server_resource = json_pop_f(
+ models.FILE_SERVER_RESOURCE_KEY, None)
+ boot_doc.file_server_url = json_pop_f(models.FILE_SERVER_URL_KEY, None)
+ boot_doc.git_branch = json_pop_f(models.GIT_BRANCH_KEY, None)
+ boot_doc.git_commit = json_pop_f(models.GIT_COMMIT_KEY, None)
+ boot_doc.git_describe = json_pop_f(models.GIT_DESCRIBE_KEY, None)
+ boot_doc.git_url = json_pop_f(models.GIT_URL_KEY, None)
+ boot_doc.initrd_addr = json_pop_f(models.INITRD_ADDR_KEY, None)
+ boot_doc.kernel_image = json_pop_f(models.KERNEL_IMAGE_KEY, None)
+ boot_doc.load_addr = json_pop_f(models.BOOT_LOAD_ADDR_KEY, None)
+ boot_doc.retries = json_pop_f(models.BOOT_RETRIES_KEY, 0)
+ boot_doc.version = json_pop_f(models.VERSION_KEY, "1.0")
+ boot_doc.warnings = json_pop_f(models.BOOT_WARNINGS_KEY, 0)
+
+ boot_doc.metadata = boot_json
+
+
+def import_all_for_lab(lab_name, base_path=utils.BASE_PATH):
+ """Handy function to import all boot logs.
+
+ :param lab_name: The lab name whose boot reports should be imported.
+ :type lab_name: str
+ :param base_path: Where to start the scan on the hard disk.
+ :type base_path: str
+ :return A list of BootDocument documents.
+ """
boot_docs = []
for job in os.listdir(base_path):
job_dir = os.path.join(base_path, job)
for kernel in os.listdir(job_dir):
- boot_docs.extend(_parse_boot(job, kernel, base_path))
+ boot_docs.extend(
+ parse_boot_from_disk(job, kernel, lab_name, base_path)
+ )
return boot_docs
-if __name__ == '__main__':
- connection = pymongo.MongoClient()
- database = connection[DB_NAME]
+def parse_boot_from_disk(job, kernel, lab_name, base_path=utils.BASE_PATH):
+ """Traverse the kernel directory and look for boot report logs.
- all_docs = _import_all()
- save(database, all_docs)
+ :param job: The name of the job.
+ :param kernel: The name of the kernel.
+ :param lab_name: The name of the lab.
+ :param base_path: The base path where to start traversing.
+ :return A list of documents to be saved, or an empty list.
+ """
+ docs = []
+
+ job_dir = os.path.join(base_path, job)
+
+ if not utils.is_hidden(job) and os.path.isdir(job_dir):
+ kernel_dir = os.path.join(job_dir, kernel)
- connection.disconnect()
+ if not utils.is_hidden(kernel) and os.path.isdir(kernel_dir):
+ for defconfig in os.listdir(kernel_dir):
+ defconfig_dir = os.path.join(kernel_dir, defconfig)
+
+ if not utils.is_hidden(defconfig) and \
+ os.path.isdir(defconfig_dir):
+
+ lab_dir = os.path.join(defconfig_dir, lab_name)
+ if os.path.isdir(lab_dir):
+ docs.extend([
+ _parse_boot_from_file(boot_log)
+ for boot_log in glob.iglob(
+ os.path.join(lab_dir, BOOT_REPORT_PATTERN)
+ )
+ if os.path.isfile(boot_log)
+ ])
+
+ return docs
diff --git a/app/utils/db.py b/app/utils/db.py
index bc1791f..3687e2d 100644
--- a/app/utils/db.py
+++ b/app/utils/db.py
@@ -15,19 +15,14 @@
"""Collection of mongodb database operations."""
-import types
import pymongo
+import types
from pymongo.errors import OperationFailure
-from models import DB_NAME
-from models.base import BaseDocument
-from utils import (
- DEFAULT_MONGODB_POOL,
- DEFAULT_MONGODB_PORT,
- DEFAULT_MONGODB_URL,
- LOG,
-)
+import models
+import models.base as mbase
+import utils
DB_CONNECTION = None
@@ -44,16 +39,16 @@ def get_db_connection(db_options):
if DB_CONNECTION is None:
db_options_get = db_options.get
- db_host = db_options_get("dbhost", DEFAULT_MONGODB_URL)
- db_port = db_options_get("dbport", DEFAULT_MONGODB_PORT)
- db_pool = db_options.get("dbpool", DEFAULT_MONGODB_POOL)
+ db_host = db_options_get("dbhost", utils.DEFAULT_MONGODB_URL)
+ db_port = db_options_get("dbport", utils.DEFAULT_MONGODB_PORT)
+ db_pool = db_options.get("dbpool", utils.DEFAULT_MONGODB_POOL)
db_user = db_options_get("dbuser", "")
db_pwd = db_options.get("dbpassword", "")
DB_CONNECTION = pymongo.MongoClient(
host=db_host, port=db_port, max_pool_size=db_pool
- )[DB_NAME]
+ )[models.DB_NAME]
if all([db_user, db_pwd]):
DB_CONNECTION.authenticate(db_user, password=db_pwd)
@@ -62,7 +57,7 @@ def get_db_connection(db_options):
def find_one(collection,
- values,
+ value,
field='_id',
operator='$in',
fields=None):
@@ -71,10 +66,11 @@ def find_one(collection,
The `field' value can be specified, and by default is `_id'.
The search executed is like:
- collection.find_one({"_id": {"$in": values}})
+ collection.find_one({"_id": {"$in": value}})
:param collection: The collection where to search.
- :param values: The values to search. Can be a list of multiple values.
+ :param value: The value to search. It has to be of the appropriate type for
+ the operator in use. If using the default operator `$in`, it must be a list.
:param field: The field where the value should be searched. Defaults to
`_id`.
:param oeprator: The operator used to perform the comparison. Defaults to
@@ -83,19 +79,20 @@ def find_one(collection,
result.
:return None or the search result as a dictionary.
"""
-
- if not isinstance(values, types.ListType):
- if isinstance(values, types.StringTypes):
- values = [values]
- else:
- values = list(values)
-
- result = collection.find_one(
- {
- field: {operator: values}
- },
- fields=fields,
- )
+ result = None
+ if all([operator == '$in', not isinstance(value, types.ListType)]):
+ utils.LOG.error(
+ "Provided value (%s) is not of type list, got: %s",
+ value,
+ type(value)
+ )
+ else:
+ result = collection.find_one(
+ {
+ field: {operator: value}
+ },
+ fields=fields,
+ )
return result
@@ -167,8 +164,8 @@ def count(collection):
return collection.count()
-def save(database, documents, manipulate=False):
- """Save documents into the database.
+def save(database, document, manipulate=False):
+ """Save one document into the database.
:param database: The database where to save.
:param documents: The document to save, can be a list or a single document:
@@ -177,8 +174,55 @@ def save(database, documents, manipulate=False):
:param manipulate: If the passed documents have to be manipulated by
mongodb. Default to False.
:type manipulate: bool
- :return 201 if the save has success, 500 in case of an error. If manipulate
- is True, return also the mongodb created ID.
+ :return A tuple: first element is the operation code (201 if the save has
+ success, 500 in case of an error), second element is the mongodb created
+ `_id` value if manipulate is True or None.
+ """
+ ret_value = 201
+ doc_id = None
+
+ if isinstance(document, mbase.BaseDocument):
+ to_save = document.to_dict()
+ else:
+ utils.LOG.warn(
+ "Cannot save document, it is not of type BaseDocument, got %s",
+ type(document)
+ )
+
+ try:
+ doc_id = database[document.collection].save(
+ to_save, manipulate=manipulate
+ )
+ utils.LOG.info(
+ "Document '%s' saved (%s)", document.name, document.collection
+ )
+ except OperationFailure, ex:
+ utils.LOG.error(
+ "Error saving the following document: %s (%s)",
+ document.name, document.collection
+ )
+ utils.LOG.exception(ex)
+ ret_value = 500
+
+ return ret_value, doc_id
+
+
+def save_all(database, documents, manipulate=False, fail_on_err=False):
+ """Save a list of documents.
+
+ :param database: The database where to save.
+ :param documents: The list of `BaseDocument` documents.
+ :type documents: list
+ :param manipulate: If the database has to create an _id attribute for each
+ document. Default False.
+ :type manipulate: bool
+ :param fail_on_err: If in case of an error the save operation should stop
+ immediatly. Default False.
+ :type fail_on_err: bool
+ :return A tuple: first element is the operation code (201 if the save has
+ success, 500 in case of an error), second element is the list of the
+ mongodb created `_id` values for each document if manipulate is True, or a
+ list of None values.
"""
ret_value = 201
doc_id = []
@@ -187,28 +231,26 @@ def save(database, documents, manipulate=False):
documents = [documents]
for document in documents:
- if isinstance(document, BaseDocument):
- to_save = document.to_dict()
- else:
- LOG.warn(
- "Cannot save document, it is not of type BaseDocument, got %s",
- type(to_save)
+ if isinstance(document, mbase.BaseDocument):
+ ret_value, save_id = save(
+ database, document, manipulate=manipulate
)
- continue
+ doc_id.append(save_id)
- try:
- doc_id = database[document.collection].save(
- to_save, manipulate=manipulate
+ if fail_on_err and ret_value == 500:
+ break
+ else:
+ utils.LOG.error(
+ "Cannot save document, it is not of type BaseDocument, got %s",
+ type(document)
)
- except OperationFailure, ex:
- LOG.exception(str(ex))
- ret_value = 500
- break
+ doc_id.append(None)
- if manipulate:
- ret_value = (ret_value, doc_id)
+ if fail_on_err:
+ ret_value = 500
+ break
- return ret_value
+ return ret_value, doc_id
def update(collection, spec, document, operation='$set'):
@@ -238,10 +280,10 @@ def update(collection, spec, document, operation='$set'):
}
)
except OperationFailure, ex:
- LOG.error(
+ utils.LOG.error(
"Error updating the following document: %s", str(document)
)
- LOG.exception(str(ex))
+ utils.LOG.exception(str(ex))
ret_val = 500
return ret_val
@@ -262,10 +304,10 @@ def delete(collection, spec_or_id):
try:
collection.remove(spec_or_id)
except OperationFailure, ex:
- LOG.error(
+ utils.LOG.error(
"Error removing the following document: %s", str(spec_or_id)
)
- LOG.exception(str(ex))
+ utils.LOG.exception(str(ex))
ret_val = 500
return ret_val
@@ -356,7 +398,7 @@ def aggregate(
'$limit': limit
})
- LOG.debug(pipeline)
+ utils.LOG.debug(pipeline)
result = collection.aggregate(pipeline)
diff --git a/app/utils/docimport.py b/app/utils/docimport.py
index f6ae929..c61f329 100644
--- a/app/utils/docimport.py
+++ b/app/utils/docimport.py
@@ -15,52 +15,26 @@
"""Container for all the import related functions."""
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import bson
+import datetime
+import glob
import os
import pymongo
+import types
-from bson import tz_util
-from glob import glob
-from datetime import datetime
-
-from models import (
- ARCHITECTURE_KEY,
- BUILD_FAIL_FILE,
- BUILD_META_FILE,
- BUILD_META_JSON_FILE,
- BUILD_PASS_FILE,
- BUILD_RESULT_KEY,
- BUILD_STATUS,
- DB_NAME,
- DEFCONFIG_KEY,
- DONE_FILE,
- DONE_FILE_PATTERN,
- ERRORS_KEY,
- FAIL_STATUS,
- JOB_KEY,
- KERNEL_KEY,
- PASS_STATUS,
- UNKNOWN_STATUS,
- WARNINGS_KEY,
-)
-from models.defconfig import DefConfigDocument
-from models.job import (
- JOB_COLLECTION,
- JobDocument,
-)
-from utils import (
- BASE_PATH,
- LOG,
- is_hidden,
-)
-from utils.db import (
- find_one,
- get_db_connection,
- save,
-)
-from utils.meta_parser import parse_metadata_file
-
-
-def import_and_save_job(json_obj, db_options, base_path=BASE_PATH):
+import models
+import models.defconfig as mdefconfig
+import models.job as mjob
+import utils
+import utils.db
+
+
+def import_and_save_job(json_obj, db_options, base_path=utils.BASE_PATH):
"""Wrapper function to be used as an external task.
This function should only be called by Celery or other task managers.
@@ -71,21 +45,21 @@ def import_and_save_job(json_obj, db_options, base_path=BASE_PATH):
:type db_options: dict
:return The ID of the created document.
"""
- database = get_db_connection(db_options)
+ database = utils.db.get_db_connection(db_options)
docs, job_id = import_job_from_json(json_obj, database, base_path)
if docs:
- LOG.info(
+ utils.LOG.info(
"Importing %d documents with job ID: %s", len(docs), job_id
)
- save(database, docs)
+ utils.db.save_all(database, docs)
else:
- LOG.info("No jobs to save")
+ utils.LOG.info("No jobs to save")
return job_id
-def import_job_from_json(json_obj, database, base_path=BASE_PATH):
+def import_job_from_json(json_obj, database, base_path=utils.BASE_PATH):
"""Import a job based on the provided JSON object.
The provided JSON object, a dict-like object, should contain at least the
@@ -99,13 +73,13 @@ def import_job_from_json(json_obj, database, base_path=BASE_PATH):
directory. It defaults to: /var/www/images/kernel-ci.
:return The documents to be saved, and the job document ID.
"""
- job_dir = json_obj[JOB_KEY]
- kernel_dir = json_obj[KERNEL_KEY]
+ job_dir = json_obj[models.JOB_KEY]
+ kernel_dir = json_obj[models.KERNEL_KEY]
return _import_job(job_dir, kernel_dir, database, base_path)
-def _import_job(job, kernel, database, base_path=BASE_PATH):
+def _import_job(job, kernel, database, base_path=utils.BASE_PATH):
"""Traverse the job dir and create the documenst to save.
:param job: The name of the job.
@@ -117,45 +91,50 @@ def _import_job(job, kernel, database, base_path=BASE_PATH):
job_dir = os.path.join(base_path, job)
kernel_dir = os.path.join(job_dir, kernel)
- if is_hidden(job) or is_hidden(kernel):
+ if utils.is_hidden(job) or utils.is_hidden(kernel):
return docs
- job_id = JobDocument.ID_FORMAT % {JOB_KEY: job, KERNEL_KEY: kernel}
+ job_name = (
+ models.JOB_DOCUMENT_NAME %
+ {models.JOB_KEY: job, models.KERNEL_KEY: kernel}
+ )
- saved_doc = find_one(database[JOB_COLLECTION], [job_id])
+ saved_doc = utils.db.find_one(
+ database[models.JOB_COLLECTION], [job_name], field=models.NAME_KEY
+ )
if saved_doc:
- job_doc = JobDocument.from_json(saved_doc)
+ job_doc = mjob.JobDocument.from_json(saved_doc)
else:
- job_doc = JobDocument(job_id, job=job, kernel=kernel)
+ job_doc = mjob.JobDocument(job, kernel)
- job_doc.updated = datetime.now(tz=tz_util.utc)
docs.append(job_doc)
if os.path.isdir(kernel_dir):
- if (os.path.exists(os.path.join(kernel_dir, DONE_FILE)) or
- glob(os.path.join(kernel_dir, DONE_FILE_PATTERN))):
- job_doc.status = PASS_STATUS
+ if (os.path.exists(os.path.join(kernel_dir, models.DONE_FILE)) or
+ glob.glob(os.path.join(kernel_dir, models.DONE_FILE_PATTERN))):
+ job_doc.status = models.PASS_STATUS
else:
- job_doc.status = UNKNOWN_STATUS
+ job_doc.status = models.UNKNOWN_STATUS
# If the job dir exists, read the last modification time from the
# file system and use that as the creation date.
if not job_doc.created_on:
- job_doc.created_on = datetime.fromtimestamp(
- os.stat(kernel_dir).st_mtime, tz=tz_util.utc)
+ job_doc.created_on = datetime.datetime.fromtimestamp(
+ os.stat(kernel_dir).st_mtime, tz=bson.tz_util.utc)
docs.extend(
[
_traverse_defconf_dir(
- job_id, job, kernel, kernel_dir, defconf_dir
+ job, kernel, kernel_dir, defconf_dir
) for defconf_dir in os.listdir(kernel_dir)
if os.path.isdir(os.path.join(kernel_dir, defconf_dir))
- if not is_hidden(defconf_dir)
+ if not utils.is_hidden(defconf_dir)
+ if not None
]
)
else:
- job_doc.status = BUILD_STATUS
- job_doc.created_on = datetime.now(tz=tz_util.utc)
+ job_doc.status = models.BUILD_STATUS
+ job_doc.created_on = datetime.datetime.now(tz=bson.tz_util.utc)
# Kind of a hack:
# We want to store some metadata at the job document level as well, like
@@ -167,91 +146,118 @@ def _import_job(job, kernel, database, base_path=BASE_PATH):
idx = 0
while idx < docs_len:
defconf_doc = docs[idx]
- if isinstance(defconf_doc, JobDocument):
+ if isinstance(defconf_doc, mjob.JobDocument):
idx += 1
- elif (isinstance(defconf_doc, DefConfigDocument) and
- defconf_doc.metadata):
- for key in job_doc.METADATA_KEYS:
- if key in defconf_doc.metadata.keys():
- job_doc.metadata[key] = \
- defconf_doc.metadata[key]
- break
+ elif isinstance(defconf_doc, mdefconfig.DefconfigDocument):
+ if (defconf_doc.job == job_doc.job and
+ defconf_doc.kernel == job_doc.kernel):
+ job_doc.git_commit = defconf_doc.git_commit
+ job_doc.git_describe = defconf_doc.git_describe
+ job_doc.git_url = defconf_doc.git_url
+ job_doc.git_branch = defconf_doc.git_branch
+ break
else:
idx += 1
- return (docs, job_id)
+ return (docs, job_name)
-def _traverse_defconf_dir(job_id, job, kernel, kernel_dir, defconf_dir):
+def _traverse_defconf_dir(job, kernel, kernel_dir, defconfig_dir):
"""Traverse the defconfig directory looking for files.
- :param job_id: The ID of the parent job.
:param kernel_dir: The parent directory of this defconfig.
- :param defconf_dir: The actual defconfig directory to parse.
- :return A `DefConfigDocument` instance.
+ :param defconfig_dir: The actual defconfig directory to parse.
+ :return A `DefconfigDocument` instance.
"""
- defconf_doc = DefConfigDocument(defconf_dir, job_id, job, kernel)
- # Default to the directory name and if we have the metadata file, get
- # the value from there.
- # Split on the + sign since some dirs are in the form 'defconfig+FRAGMENT'.
- defconf_doc.defconfig = defconf_dir.split('+')[0]
- defconf_doc.dirname = defconf_dir
-
- LOG.info("Traversing directory %s", defconf_dir)
-
- real_dir = os.path.join(kernel_dir, defconf_dir)
- defconf_doc.created_on = datetime.fromtimestamp(
- os.stat(real_dir).st_mtime, tz=tz_util.utc
- )
+ real_dir = os.path.join(kernel_dir, defconfig_dir)
+ utils.LOG.info("Traversing directory '%s'", real_dir)
+ defconfig_doc = None
for dirname, subdirs, files in os.walk(real_dir):
# Consider only the actual directory and its files.
subdirs[:] = []
- # Legacy: status was retrieved via the presence of a file.
- # Keep it for backward compatibility.
- if os.path.isfile(os.path.join(dirname, BUILD_PASS_FILE)):
- defconf_doc.status = PASS_STATUS
- elif os.path.isfile(os.path.join(dirname, BUILD_FAIL_FILE)):
- defconf_doc.status = FAIL_STATUS
- else:
- defconf_doc.status = UNKNOWN_STATUS
+ data_file = os.path.join(dirname, models.BUILD_META_JSON_FILE)
- json_meta_file = os.path.join(dirname, BUILD_META_JSON_FILE)
- default_meta_file = os.path.join(dirname, BUILD_META_FILE)
-
- if os.path.isfile(json_meta_file):
- _parse_build_metadata(json_meta_file, defconf_doc)
- elif os.path.isfile(default_meta_file):
- _parse_build_metadata(default_meta_file, defconf_doc)
+ if os.path.isfile(data_file):
+ defconfig_doc = _parse_build_data(data_file, job, kernel)
+ defconfig_doc.dirname = defconfig_dir
else:
- # If we do not have the metadata file, consider the build failed.
- defconf_doc.status = FAIL_STATUS
+ utils.LOG.warn("No build data file found in '%s'", real_dir)
- return defconf_doc
+ return defconfig_doc
-def _parse_build_metadata(metadata_file, defconf_doc):
+def _parse_build_data(data_file, job, kernel):
"""Parse the metadata file contained in thie build directory.
- :param metadata_file: The path to the metadata file.
- :param defconf_doc: The `DefConfigDocument` whose metadata will be updated.
+ :param data_file: The path to the metadata file.
+ :param defconf_doc: The `DefconfigDocument` whose metadata will be updated.
"""
- metadata = parse_metadata_file(metadata_file)
-
- if metadata:
- # Set some of the metadata values directly into the objet for easier
- # search.
- defconf_doc.status = metadata.get(BUILD_RESULT_KEY, None)
- defconf_doc.defconfig = metadata.get(DEFCONFIG_KEY, None)
- defconf_doc.warnings = metadata.get(WARNINGS_KEY, None)
- defconf_doc.errros = metadata.get(ERRORS_KEY, None)
- defconf_doc.arch = metadata.get(ARCHITECTURE_KEY, None)
-
- defconf_doc.metadata = metadata
-
-
-def _import_all(database, base_path=BASE_PATH):
+ build_data = None
+ with open(data_file) as data_file:
+ build_data = json.load(data_file)
+
+ defconfig_doc = None
+
+ if all([build_data, isinstance(build_data, types.DictionaryType)]):
+ data_pop = build_data.pop
+
+ try:
+ defconfig = data_pop(models.DEFCONFIG_KEY)
+ defconfig_full = data_pop(models.DEFCONFIG_FULL_KEY, defconfig)
+ job = data_pop(models.JOB_KEY, None) or job
+ kernel = data_pop(models.KERNEL_KEY, None) or kernel
+
+ defconfig_doc = mdefconfig.DefconfigDocument(
+ job, kernel, defconfig, defconfig_full
+ )
+
+ defconfig_doc.created_on = datetime.datetime.fromtimestamp(
+ os.stat(data_file).st_mtime, tz=bson.tz_util.utc
+ )
+
+ defconfig_doc.arch = data_pop(models.ARCHITECTURE_KEY, None)
+ defconfig_doc.build_log = data_pop(models.BUILD_LOG_KEY, None)
+ defconfig_doc.build_platform = data_pop(
+ models.BUILD_PLATFORM_KEY, [])
+ defconfig_doc.build_time = data_pop(models.BUILD_TIME_KEY, 0)
+ defconfig_doc.dtb_dir = data_pop(models.DTB_DIR_KEY, None)
+ defconfig_doc.errros = data_pop(models.BUILD_ERRORS_KEY, 0)
+ defconfig_doc.file_server_resource = data_pop(
+ models.FILE_SERVER_RESOURCE_KEY, None)
+ defconfig_doc.file_server_url = data_pop(
+ models.FILE_SERVER_URL_KEY, None)
+ defconfig_doc.git_branch = data_pop(models.GIT_BRANCH_KEY, None)
+ defconfig_doc.git_commit = data_pop(models.GIT_COMMIT_KEY, None)
+ defconfig_doc.git_describe = data_pop(models.GIT_DESCRIBE_KEY, None)
+ defconfig_doc.git_url = data_pop(models.GIT_URL_KEY, None)
+ defconfig_doc.kconfig_fragments = data_pop(
+ models.KCONFIG_FRAGMENTS_KEY, None)
+ defconfig_doc.kernel_config = data_pop(
+ models.KERNEL_CONFIG_KEY, None)
+ defconfig_doc.kernel_image = data_pop(models.KERNEL_IMAGE_KEY, None)
+ defconfig_doc.modules = data_pop(models.MODULES_KEY, None)
+ defconfig_doc.modules_dir = data_pop(models.MODULES_DIR_KEY, None)
+ defconfig_doc.status = data_pop(
+ models.BUILD_RESULT_KEY, models.UNKNOWN_STATUS)
+ defconfig_doc.system_map = data_pop(models.SYSTEM_MAP_KEY, None)
+ defconfig_doc.text_offset = data_pop(models.TEXT_OFFSET_KEY, None)
+ defconfig_doc.version = data_pop(models.VERSION_KEY, "1.0")
+ defconfig_doc.warnings = data_pop(models.BUILD_WARNINGS_KEY, 0)
+
+ defconfig_doc.metadata = build_data
+ except KeyError, ex:
+ utils.LOG.exception(ex)
+ utils.LOG.error(
+ "Missing mandatory key in build data file '%s'",
+ data_file
+ )
+
+ return defconfig_doc
+
+
+def _import_all(database, base_path=utils.BASE_PATH):
"""This function is used only to trigger the import from the command line.
Do not use it elsewhere.
@@ -279,9 +285,9 @@ def _import_all(database, base_path=BASE_PATH):
if __name__ == '__main__':
connection = pymongo.MongoClient()
- database = connection[DB_NAME]
+ database = connection[models.DB_NAME]
documents = _import_all(database)
- save(database, documents)
+ utils.db.save(database, documents)
connection.disconnect()
diff --git a/app/utils/meta_parser.py b/app/utils/meta_parser.py
deleted file mode 100644
index 0724056..0000000
--- a/app/utils/meta_parser.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-"""Functions to parse build metadata files."""
-
-import ConfigParser
-import json
-import os
-
-from utils import LOG
-
-CONFIG_FILE_START = '[default]'
-CONFIG_FILE_DEFAULT = 'DEFAULT'
-
-
-def parse_metadata_file(metadata_file):
- """Parse the build metadata file.
-
- No checks are performed on the file: it must exists and be a valid file.
-
- :param metadata_file: Full path to the file to parse.
- :return A dictionary with the key-values found in the metadata file, None
- otherwise.
- """
- metadata = None
-
- LOG.info("Parsing metadata file %s", metadata_file)
-
- if os.path.basename(metadata_file).endswith('json'):
- metadata = _parse_json_metadata(metadata_file)
- else:
- with open(metadata_file, 'r') as r_file:
- first_line = r_file.readline()
-
- if first_line.strip().lower() == CONFIG_FILE_START:
- r_file.seek(0)
- metadata = _parse_config_metadata(r_file)
- else:
- r_file.seek(0)
- metadata = _parse_build_metadata(r_file)
-
- return metadata
-
-
-def _parse_json_metadata(metadata_file):
- """Parse a JSON based metadata file.
-
- :param metadata_file: The path to the metadata file.
- :return A dictionary containing the parsed metadata file.
- """
- LOG.info("Parsing JSON metadata file")
-
- metadata = {}
-
- with open(metadata_file, 'r') as r_file:
- metadata = json.load(r_file)
-
- return metadata
-
-
-def _parse_config_metadata(metadata_file):
- """Parse a INI-like metadata file.
-
- Only the default section in the file will be read.
-
- :param metadata_file: The open for reading metadata file.
- :return A dictionary containing the parsed lines in the file.
- """
- LOG.info("Parsing INI-like metadata file")
-
- config = ConfigParser.ConfigParser(allow_no_value=True)
- config.readfp(metadata_file)
-
- metadata = {
- k: v for k, v in config.items(CONFIG_FILE_DEFAULT)
- }
-
- return metadata
-
-
-def _parse_build_metadata(metadata_file):
- """Parse the metadata file contained in thie build directory.
-
- :param metadata_file: The open for reading metadata file.
- :return A dictionary containing the parsed lines in the file.
- """
- LOG.info("Parsing normal metadata file")
-
- metadata = {}
-
- LOG.info("Parsing metadata file %s", metadata_file)
-
- for line in metadata_file:
- line = line.strip()
- if line:
- if line[0] == '#':
- # Accept a sane char for commented lines.
- continue
-
- try:
- key, value = line.split(':', 1)
- value = value.strip()
- if value:
- metadata[key] = value
- else:
- metadata[key] = None
- except ValueError, ex:
- LOG.error("Error parsing metadata file line: %s", line)
- LOG.exception(str(ex))
-
- return metadata
diff --git a/app/utils/scripts/__init__.py b/app/utils/scripts/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/app/utils/scripts/__init__.py
diff --git a/app/utils/scripts/convert-models.py b/app/utils/scripts/convert-models.py
new file mode 100644
index 0000000..f1ee234
--- /dev/null
+++ b/app/utils/scripts/convert-models.py
@@ -0,0 +1,505 @@
+#!/usr/bin/python
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+import argparse
+import datetime
+import time
+import sys
+
+import models
+import models.boot as mboot
+import models.job as mjob
+import models.defconfig as mdefconfig
+import utils
+import utils.db
+
+ZERO_TIME = datetime.datetime(1970, 1, 1, 0, 0, 0, 0)
+
+# Data structures with old ID as the key, and new ID the value.
+NEW_JOB_IDS = {}
+NEW_DEFCONFIG_IDS = {}
+DEFCONFIG_GIT_VAL = {}
+
+
+def convert_job_collection(db, limit=0):
+ count = db[models.JOB_COLLECTION].find().count()
+ utils.LOG.info("Processing %s job documents", count)
+ time.sleep(2)
+
+ doc_count = 0
+ for document in db[models.JOB_COLLECTION].find(limit=limit):
+ doc_get = document.get
+
+ if doc_get("version", None) == "1.0":
+ continue
+ else:
+ doc_count += 1
+ utils.LOG.info("Processing document #%s", doc_count)
+
+ job = doc_get("job")
+ kernel = doc_get("kernel")
+
+ job_doc = mjob.JobDocument(job, kernel)
+ job_doc.version = "1.0"
+ job_doc.status = doc_get("status", "UNKNOWN")
+ job_doc.created_on = doc_get("created_on")
+ job_doc.private = doc_get("private", False)
+
+ metadata = doc_get("metadata", None)
+ if metadata:
+ meta_get = metadata.get
+ job_doc.git_url = meta_get("git_url", None)
+ job_doc.git_commit = meta_get("git_commit", None)
+ job_doc.git_branch = meta_get("git_branch", None)
+ job_doc.git_describe = meta_get("git_describe", None)
+
+ # Delete and save the old doc.
+ ret_val = utils.db.delete(db[models.JOB_COLLECTION], doc_get("_id"))
+ if ret_val != 200:
+ utils.LOG.error(
+ "Error deleting job document %s", doc_get("_id")
+ )
+ time.sleep(3)
+ sys.exit(1)
+
+ ret_val, doc_id = utils.db.save(db, job_doc, manipulate=True)
+ if ret_val == 201:
+ NEW_JOB_IDS[job + "-" + kernel] = doc_id
+ else:
+ utils.LOG.error(
+ "Error saving new job document for %s", doc_get("_id"))
+ time.sleep(3)
+ sys.exit(1)
+
+ count = db[models.JOB_COLLECTION].find().count()
+ utils.LOG.info("Job documents at the end: %s (%s)", count, doc_count)
+ time.sleep(2)
+
+
+def convert_defconfig_collection(db, limit=0):
+
+ count = db[models.DEFCONFIG_COLLECTION].find().count()
+ utils.LOG.info("Processing %s defconfig documents", count)
+ time.sleep(2)
+
+ doc_count = 0
+ for document in db[models.DEFCONFIG_COLLECTION].find(limit=limit):
+ doc_get = document.get
+
+ if doc_get("version", None) == "1.0":
+ continue
+ else:
+ doc_count += 1
+ utils.LOG.info("Processing document #%s", doc_count)
+
+ metadata = doc_get("metadata", {})
+ meta_get = metadata.get
+ meta_pop = metadata.pop
+
+ arch = None
+ defconfig_full = None
+ kconfig_fragments = None
+ dirname = None
+
+ job = doc_get("job")
+ kernel = doc_get("kernel")
+ defconfig = doc_get("defconfig")
+ dirname = doc_get("dirname", None)
+
+ if defconfig.startswith("arm-"):
+ defconfig = defconfig.replace("arm-", "", 1)
+ arch = "arm"
+ elif defconfig.startswith("arm64-"):
+ defconfig = defconfig.replace("arm64-", "", 1)
+ arch = "arm64"
+ elif defconfig.startswith("x86-"):
+ defconfig = defconfig.replace("x86-", "", 1)
+ arch = "x86"
+
+ if arch is None and dirname is not None:
+ if dirname.startswith("arm-"):
+ arch = "arm"
+ elif dirname.startswith("arm64-"):
+ arch = "arm64"
+ elif dirname.startswith("x86-"):
+ arch = "x86"
+
+ if doc_get("arch", None) is not None:
+ arch = doc_get("arch")
+ if meta_get("arch", None) is not None:
+ if arch != meta_get("arch"):
+ arch = meta_pop("arch")
+ meta_pop("arch", None)
+ if arch is None:
+ utils.LOG.warn(
+ "arch is still None for %s-%s-%s", job, kernel, defconfig
+ )
+ arch = "arm"
+
+ if meta_get("kconfig_fragments", None):
+ kconfig_fragments = meta_pop("kconfig_fragments")
+ fragment = \
+ kconfig_fragments.replace(
+ "frag-", "").replace(".config", "")
+ if fragment not in defconfig:
+ defconfig_full = "+".join([defconfig, fragment])
+
+ if not defconfig_full:
+ defconfig_full = defconfig
+
+ def_doc = mdefconfig.DefconfigDocument(
+ job, kernel, defconfig, defconfig_full)
+
+ def_doc.version = "1.0"
+ def_doc.arch = arch
+ def_doc.dirname = dirname
+ def_doc.kconfig_fragments = kconfig_fragments
+ def_doc.defconfig_full = defconfig_full
+ def_doc.status = doc_get("status", models.UNKNOWN_STATUS)
+
+ if not NEW_JOB_IDS.get(job + "-" + kernel, None):
+ utils.LOG.error("No job ID for '%s-%s'", job, kernel)
+ def_doc.job_id = NEW_JOB_IDS.get(job + "-" + kernel, None)
+
+ def_doc.created_on = doc_get("created_on")
+
+ def_doc.errors = doc_get("errors", 0)
+ if def_doc.errors is None:
+ def_doc.errors = 0
+ else:
+ def_doc.errors = int(def_doc.errors)
+ def_doc.warnings = doc_get("warnings", 0)
+ if def_doc.warnings is None:
+ def_doc.warnings = 0
+ else:
+ def_doc.warnings = int(def_doc.warnings)
+ def_doc.build_time = doc_get("build_time", 0)
+ def_doc.modules_dir = doc_get("modules_dir", None)
+ def_doc.modules = doc_get("modules", None)
+ def_doc.build_log = doc_get("build_log", None)
+
+ if metadata:
+ if (str(def_doc.errors) != str(meta_get("build_errors")) and
+ meta_get("build_errors") is not None):
+ def_doc.errors = int(meta_pop("build_errors", 0))
+ meta_pop("build_errors", 0)
+
+ if (str(def_doc.warnings) != str(meta_get("build_warnings")) and
+ meta_get("build_warnings") is not None):
+ def_doc.warnings = int(meta_pop("build_warnings", 0))
+ meta_pop("build_warnings", 0)
+
+ def_doc.git_url = meta_pop("git_url", None)
+ def_doc.git_branch = meta_pop("git_branch", None)
+ def_doc.git_describe = meta_pop("git_describe", None)
+ def_doc.git_commit = meta_pop("git_commit", None)
+ def_doc.build_platform = meta_pop("build_platform", [])
+
+ if meta_get("build_log", None):
+ def_doc.build_log = meta_get("build_log", None)
+ meta_pop("build_log", None)
+
+ if meta_get("build_result", None):
+ result = meta_get("build_result")
+ if result != def_doc.status:
+ def_doc.status = meta_pop("build_result")
+ else:
+ meta_pop("build_result")
+
+ if str(meta_get("build_time")):
+ def_doc.build_time = meta_pop("build_time", 0)
+ meta_pop("build_time", None)
+
+ def_doc.dtb_dir = meta_pop("dtb_dir", None)
+ def_doc.kernel_config = meta_pop("kernel_config", None)
+ def_doc.kernel_image = meta_pop("kernel_image", None)
+ def_doc.modules = meta_pop("modules", None)
+ def_doc.system_map = meta_pop("system_map", None)
+ def_doc.text_offset = meta_pop("text_offset", None)
+
+ if meta_get("modules_dir", None):
+ def_doc.modules_dir = meta_pop("modules_dir")
+ meta_pop("modules_dir", None)
+
+ if meta_get("kconfig_fragments", None):
+ def_doc.kconfig_fragments = meta_pop("kconfig_fragments")
+ meta_pop("kconfig_fragments", None)
+
+ meta_pop("defconfig", None)
+ meta_pop("job", None)
+
+ def_doc.file_server_url = meta_pop("file_server_url", None)
+ def_doc.file_server_resource = meta_pop(
+ "file_server_resource", None)
+
+ if def_doc.file_server_resource is None:
+ def_doc.file_server_resource = (
+ "/" + job + "/" + kernel + "/" +
+ arch + "-" + defconfig_full
+ )
+
+ def_doc.metadata = metadata
+
+ ret_val = utils.db.delete(
+ db[models.DEFCONFIG_COLLECTION], doc_get("_id")
+ )
+ if ret_val != 200:
+ utils.LOG.error(
+ "Error deleting defconfig document %s", doc_get("_id")
+ )
+ time.sleep(3)
+ sys.exit(1)
+
+ if defconfig == "lab-tbaker-00":
+ pass
+ else:
+ ret_val, doc_id = utils.db.save(db, def_doc, manipulate=True)
+ if ret_val == 201:
+ key = job + "-" + kernel + "-" + defconfig_full + "-" + arch
+ NEW_DEFCONFIG_IDS[key] = \
+ (doc_id, defconfig, defconfig_full, arch)
+ DEFCONFIG_GIT_VAL[doc_id] = (
+ def_doc.git_branch, def_doc.git_url, def_doc.git_commit,
+ def_doc.git_describe
+ )
+ else:
+ utils.LOG.error(
+ "Error saving new defconfig document for %s",
+ doc_get("_id")
+ )
+ time.sleep(3)
+ sys.exit(1)
+
+ count = db[models.DEFCONFIG_COLLECTION].find().count()
+ utils.LOG.info("Defconfig documents at the end: %s (%s)", count, doc_count)
+ time.sleep(2)
+
+
+def convert_boot_collection(db, lab_name, limit=0):
+
+ count = db[models.BOOT_COLLECTION].find().count()
+ utils.LOG.info("Processing %s boot documents", count)
+ time.sleep(2)
+
+ doc_count = 0
+ for document in db[models.BOOT_COLLECTION].find(limit=limit):
+
+ doc_get = document.get
+
+ if doc_get("version", None) == "1.0":
+ continue
+ else:
+ doc_count += 1
+ utils.LOG.info("Processing document #%s", doc_count)
+
+ board = doc_get("board")
+ job = doc_get("job")
+ kernel = doc_get("kernel")
+ defconfig = doc_get("defconfig")
+ metadata = doc_get("metadata", {})
+ meta_get = metadata.get
+ meta_pop = metadata.pop
+ arch = None
+
+ if defconfig.startswith("arm-"):
+ defconfig = defconfig.replace("arm-", "", 1)
+ arch = "arm"
+ elif defconfig.startswith("arm64-"):
+ defconfig = defconfig.replace("arm64-", "", 1)
+ arch = "arm64"
+ elif defconfig.startswith("x86-"):
+ defconfig = defconfig.replace("x86-", "", 1)
+ arch = "x86"
+ else:
+ arch = "arm"
+
+ pre_lab = meta_pop("lab_name", None)
+ if pre_lab:
+ lab_name = pre_lab
+
+ job_id = NEW_JOB_IDS.get(job + "-" + kernel, None)
+ if not job_id:
+ utils.LOG.error("No job ID found for %s-%s", job, kernel)
+
+ defconfig_id, build_defconfig, defconfig_full, build_arch = \
+ NEW_DEFCONFIG_IDS.get(
+ job + "-" + kernel + "-" + defconfig + "-" + arch,
+ [None, None, None, None]
+ )
+
+ def_full = meta_pop("defconfig_full", None)
+ if def_full:
+ utils.LOG.warn("Found defconfig_full")
+ defconfig_full = def_full
+
+ if build_arch is not None and arch != build_arch:
+ utils.LOG.warn("Using build architecture")
+ arch = build_arch
+
+ if build_defconfig and defconfig != build_defconfig:
+ defconfig = build_defconfig
+ if not defconfig_full:
+ defconfig_full = defconfig
+
+ if not defconfig_id:
+ utils.LOG.error(
+ "No defconfig ID found for %s-%s-%s (%s)",
+ job, kernel, defconfig, defconfig_full
+ )
+
+ boot_doc = mboot.BootDocument(
+ board, job, kernel, defconfig, lab_name, defconfig_full, arch
+ )
+
+ boot_doc.job_id = job_id
+ boot_doc.defconfig_id = defconfig_id
+ boot_doc.version = "1.0"
+
+ if defconfig_id:
+ git_branch, git_url, git_commit, git_describe = \
+ DEFCONFIG_GIT_VAL.get(
+ defconfig_id, [None, None, None, None])
+ boot_doc.git_branch = git_branch
+ boot_doc.git_commit = git_commit
+ boot_doc.git_describe = git_describe
+ boot_doc.git_url = git_url
+
+ boot_doc.created_on = doc_get("created_on", None)
+ boot_doc.tine = doc_get("time", 0)
+ if doc_get("warnings", None) is not None:
+ boot_doc.warnings = int(doc_get("warnings"))
+ boot_doc.status = doc_get("status", models.UNKNOWN_STATUS)
+ boot_doc.boot_log = doc_get("boot_log", None)
+ boot_doc.endianness = doc_get("endian", None)
+ boot_doc.dtb = doc_get("dtb", None)
+ boot_doc.dtb_addr = doc_get("dtb_addr", None)
+ boot_doc.initrd_addr = doc_get("initrd_addr", None)
+ boot_doc.load_addr = doc_get("load_addr", None)
+ if doc_get("retries", None) is not None:
+ boot_doc.retries = int(doc_get("retries"))
+ boot_doc.boot_log_html = doc_get("boot_log_html", None)
+ boot_doc.boot_log = doc_get("boot_log", None)
+ boot_doc.kernel_image = doc_get("kernel_image", None)
+ boot_doc.time = doc_get("time", ZERO_TIME)
+ boot_doc.dtb_append = doc_get("dtb_append", None)
+
+ if meta_get("fastboot", None) is not None:
+ boot_doc.fastboot = meta_pop("fastboot")
+ meta_pop("fastboot", None)
+
+ boot_doc.fastboot_cmd = meta_pop("fastboot_cmd", None)
+ boot_doc.boot_result_description = meta_pop(
+ "boot_result_description", None)
+ if not boot_doc.boot_log_html:
+ boot_doc.boot_log_html = meta_pop("boot_log_html", None)
+ if not boot_doc.boot_log:
+ boot_doc.boot_log = meta_pop("boot_log", None)
+ boot_doc.dtb_append = meta_pop("dtb_append", None)
+ boot_doc.git_commit = meta_pop("git_commit", None)
+ boot_doc.git_branch = meta_pop("git_branc", None)
+ boot_doc.git_describe = meta_pop("git_describe", None)
+ boot_doc.git_url = meta_pop("git_url", None)
+ if meta_get("retries", None) is not None:
+ boot_doc.retries = int(meta_pop("retries"))
+ meta_pop("retries", None)
+ meta_pop("version", None)
+
+ if meta_get("arch", None) and not boot_doc.arch:
+ boot_doc.arch = meta_pop("arch")
+
+ boot_doc.file_server_resource = meta_pop(
+ "file_server_resource", None)
+ if not pre_lab and boot_doc.file_server_resource is None:
+ boot_doc.file_server_resource = (
+ "/" + job + "/" + kernel + "/" +
+ arch + "-" + defconfig_full + "/"
+ )
+
+ boot_doc.board_instance = meta_pop("board_instance", None)
+ boot_doc.initrd = meta_pop("initrd", None)
+
+ boot_doc.file_server_url = meta_pop("file_server_url", None)
+
+ boot_doc.metadata = metadata
+
+ ret_val = utils.db.delete(
+ db[models.BOOT_COLLECTION], doc_get("_id"))
+ if ret_val != 200:
+ utils.LOG.error(
+ "Error deleting boot document %s", doc_get("_id")
+ )
+ time.sleep(3)
+ sys.exit(1)
+
+ ret_val, doc_id = utils.db.save(db, boot_doc, manipulate=True)
+ if ret_val != 201:
+ utils.LOG.error(
+ "Error saving new boot document for %s",
+ doc_get("_id")
+ )
+ time.sleep(3)
+ sys.exit(1)
+
+ count = db[models.BOOT_COLLECTION].find().count()
+ utils.LOG.info("Boot documents at the end: %s (%s)", count, doc_count)
+ time.sleep(2)
+
+
+def _check_func(db):
+ """Check some documents if they are ok."""
+ for document in db[models.JOB_COLLECTION].find(limit=3):
+ print document
+ for document in db[models.DEFCONFIG_COLLECTION].find(limit=3):
+ print document
+ for document in db[models.BOOT_COLLECTION].find(limit=3):
+ print document
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Convert mongodb data into new model",
+ version=0.1
+ )
+ parser.add_argument(
+ "--lab-name", "-n",
+ type=str,
+ help="The lab name to use for boot reports",
+ required=True,
+ dest="lab_name"
+ )
+ parser.add_argument(
+ "--limit", "-l",
+ type=int,
+ default=0,
+ help="The number of documents to process",
+ dest="limit"
+ )
+ args = parser.parse_args()
+
+ lab_name = args.lab_name
+ limit = args.limit
+
+ try:
+ db = utils.db.get_db_connection({})
+ convert_job_collection(db, limit)
+ convert_defconfig_collection(db, limit)
+ convert_boot_collection(db, lab_name, limit)
+ _check_func(db)
+ except KeyboardInterrupt:
+ utils.LOG.info("User interrupted.")
+
+
+if __name__ == "__main__":
+ main()
diff --git a/app/utils/emails.py b/app/utils/scripts/operation-tests.py
index cce8302..8a9cab4 100644
--- a/app/utils/emails.py
+++ b/app/utils/scripts/operation-tests.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2014 Linaro Ltd.
+#!/usr/bin/python
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
@@ -13,29 +13,26 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-"""All email related utilities."""
+import models.boot as mboot
+import utils.bootimport
+import utils.db
-import smtplib
-from email.mime.text import MIMEText
+def main():
+ database = utils.db.get_db_connection({})
-FROM = 'noreply@linaro.org'
+ board = "fake-board"
+ job = "next"
+ kernel = "next-20141113"
+ defconfig = "u8500_defconfig"
+ lab_name = "lab-01"
+ boot_doc = mboot.BootDocument(board, job, kernel, defconfig, lab_name)
+ utils.bootimport._update_boot_doc_ids(boot_doc, database)
-def _create_email(job_id):
- msg = MIMEText('')
+ print boot_doc.defconfig_id
+ print boot_doc.job_id
+ print boot_doc.to_dict()
- msg['Subject'] = 'Results for job: %s' % (job_id)
- msg['From'] = FROM
-
- return msg
-
-
-def send(job_id, recipients):
-
- msg = _create_email(job_id)
- server = smtplib.SMTP('localhost')
-
- for recipient in recipients:
- msg['To'] = recipient
- server.sendmail(FROM, [recipient], msg.as_string(unixfrom=True))
+if __name__ == '__main__':
+ main()
diff --git a/app/utils/subscription.py b/app/utils/subscription.py
index e9fe3e0..83e1903 100644
--- a/app/utils/subscription.py
+++ b/app/utils/subscription.py
@@ -17,15 +17,8 @@
import pymongo
-from models import (
- DB_NAME,
- ID_KEY,
-)
-from models.job import JOB_COLLECTION
-from models.subscription import (
- SUBSCRIPTION_COLLECTION,
- SubscriptionDocument,
-)
+import models
+import models.subscription as mods
from utils import LOG
from utils.db import (
find_one,
@@ -53,23 +46,23 @@ def subscribe(database, json_obj):
job = json_obj['job']
emails = json_obj['email']
- job_doc = find_one(database[JOB_COLLECTION], job)
+ job_doc = find_one(database[models.JOB_COLLECTION], job)
if job_doc:
- job_id = job_doc[ID_KEY]
+ job_id = job_doc[models.ID_KEY]
subscription = find_one(
- database[SUBSCRIPTION_COLLECTION],
+ database[models.SUBSCRIPTION_COLLECTION],
job_id,
'job_id'
)
if subscription:
- sub_obj = SubscriptionDocument.from_json(subscription)
+ sub_obj = mods.SubscriptionDocument.from_json(subscription)
sub_obj.emails = emails
else:
sub_id = (
- SubscriptionDocument.SUBSCRIPTION_ID_FORMAT % (job_id)
+ models.SUBSCRIPTION_DOCUMENT_NAME % job_id
)
- sub_obj = SubscriptionDocument(sub_id, job_id)
+ sub_obj = mods.SubscriptionDocument(sub_id, job_id)
sub_obj.emails = emails
ret_val = save(database, sub_obj)
@@ -116,10 +109,10 @@ def send(job_id):
"""
# TODO: add logic to make sure we can send the notifications.
# We should store the job status.
- database = pymongo.MongoClient()[DB_NAME]
+ database = pymongo.MongoClient()[models.DB_NAME]
subscription = find_one(
- database[SUBSCRIPTION_COLLECTION], job_id, 'job_id'
+ database[models.SUBSCRIPTION_COLLECTION], job_id, 'job_id'
)
if subscription:
diff --git a/app/utils/tests/test_bootimport.py b/app/utils/tests/test_bootimport.py
index 1511284..f517f0d 100644
--- a/app/utils/tests/test_bootimport.py
+++ b/app/utils/tests/test_bootimport.py
@@ -18,20 +18,13 @@ import logging
import mongomock
import os
import tempfile
+import types
import unittest
-from mock import patch, MagicMock, Mock
+from mock import patch
-from datetime import (
- datetime,
- timedelta,
-)
-
-from models.boot import BootDocument
-from utils.bootimport import (
- _parse_boot_log,
- parse_boot_from_json,
-)
+import models.boot as modb
+import utils.bootimport
class TestParseBoot(unittest.TestCase):
@@ -39,112 +32,187 @@ class TestParseBoot(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
self.db = mongomock.Database(mongomock.Connection(), 'kernel-ci')
+ self.base_path = tempfile.gettempdir()
self.boot_report = dict(
+ version="1.0",
+ board="board",
+ lab_name="lab_name",
+ kernel="kernel",
+ job="job",
+ defconfig="defconfig",
+ arch="arm",
boot_log='boot-board-name.log',
- boot_result='PASS',
+ boot_result="PASS",
+ boot_result_description="passed",
boot_time=28.07,
boot_warnings=0,
- dtb='dtb/board-name.dtb',
- dtb_addr='0x81f00000',
- initrd_addr='0x81f00001',
- kernel_image='zImage',
- loadaddr='0x80200000',
- endian='little',
+ dtb="dtb/board-name.dtb",
+ dtb_addr="0x81f00000",
+ initrd_addr="0x81f00001",
+ kernel_image="zImage",
+ loadaddr="0x80200000",
+ endian="little",
uImage=True,
- uimage_addr='xip'
+ uimage_addr="xip"
)
def tearDown(self):
logging.disable(logging.NOTSET)
- def test_parse_boot_log(self):
- temp_json_f = os.path.join(
- tempfile.gettempdir(), 'boot-board-name.json'
+ def test_parse_from_json_simple(self):
+ doc = utils.bootimport._parse_boot_from_json(self.boot_report, self.db)
+
+ self.assertIsInstance(doc, modb.BootDocument)
+ self.assertEqual(doc.name, "board-job-kernel-defconfig-arm")
+ self.assertEqual(doc.load_addr, "0x80200000")
+ self.assertEqual(doc.endianness, "little")
+ self.assertEqual(doc.version, "1.0")
+ self.assertIsInstance(doc.metadata, types.DictionaryType)
+
+ @patch("utils.db.get_db_connection")
+ def test_import_and_save_boot(self, mock_db):
+ mock_db = self.db
+
+ code, doc_id = utils.bootimport.import_and_save_boot(
+ self.boot_report, {}, base_path=self.base_path
+ )
+ lab_dir = os.path.join(
+ self.base_path, "job", "kernel", "arm-defconfig", "lab_name"
)
+ boot_file = os.path.join(lab_dir, "boot-board.json")
+
+ self.assertTrue(os.path.isdir(lab_dir))
+ self.assertTrue(os.path.isfile(boot_file))
+ self.assertEqual(code, 201)
try:
- with open(temp_json_f, 'w') as w_f:
- w_f.write(json.dumps(self.boot_report))
-
- doc = _parse_boot_log(temp_json_f, 'job', 'kernel', 'defconfig')
-
- time_d = timedelta(seconds=28.07)
- boot_time = datetime(
- 1970, 1, 1,
- minute=time_d.seconds / 60,
- second=time_d.seconds % 60,
- microsecond=time_d.microseconds
- )
-
- self.assertIsInstance(doc, BootDocument)
- self.assertEqual(doc.board, 'board-name')
- self.assertEqual(doc.job, 'job')
- self.assertEqual(doc.kernel, 'kernel')
- self.assertEqual(doc.defconfig, 'defconfig')
- self.assertIsInstance(doc.time, datetime)
- self.assertEqual(doc.time, boot_time)
- self.assertEqual(doc.boot_log, 'boot-board-name.log')
- self.assertEqual(doc.status, 'PASS')
- self.assertEqual(doc.load_addr, '0x80200000')
- self.assertEqual(doc.initrd_addr, '0x81f00001')
- self.assertEqual(doc.endianness, 'little')
- self.assertDictEqual(
- doc.metadata, {'uImage': True, 'uimage_addr': 'xip'}
- )
- finally:
- os.unlink(temp_json_f)
+ os.remove(boot_file)
+ os.rmdir(lab_dir)
+ except OSError:
+ pass
+
+ def test_parse_from_json_wrong_json(self):
+ boot_json = {
+ "foo": "bar"
+ }
+ self.assertRaises(
+ KeyError, utils.bootimport._parse_boot_from_json(boot_json, self.db)
+ )
+
+ @patch("utils.bootimport._parse_boot_from_json")
+ def test_import_and_save_no_doc(self, mock_parse):
+ mock_parse.return_value = None
+
+ code, doc_id = utils.bootimport.import_and_save_boot({}, {})
+ self.assertIsNone(code)
+ self.assertIsNone(doc_id)
+
+ def test_parse_from_file_no_file(self):
+ doc = utils.bootimport._parse_boot_from_file(None)
+ self.assertIsNone(doc)
- def test_parse_boot_log_without_dtb(self):
- temp_json_f = os.path.join(
- tempfile.gettempdir(), 'boot-board-name.json'
+ def test_parse_from_file_wrong_file(self):
+ doc = utils.bootimport._parse_boot_from_file('foobar.json')
+ self.assertIsNone(doc)
+
+ def test_parse_from_file_no_key(self):
+ boot_log = tempfile.NamedTemporaryFile(
+ mode='w+b', bufsize=-1, suffix="json", delete=False
)
+ boot_obj = {
+ "foo": "bar"
+ }
try:
- self.boot_report.pop('dtb')
-
- with open(temp_json_f, 'w') as w_f:
- w_f.write(json.dumps(self.boot_report))
+ with open(boot_log.name, mode="w") as boot_write:
+ boot_write.write(json.dumps(boot_obj))
- doc = _parse_boot_log(temp_json_f, 'job', 'kernel', 'defconfig')
+ doc = utils.bootimport._parse_boot_from_file(boot_log.name)
- self.assertIsInstance(doc, BootDocument)
- self.assertEqual(doc.board, 'board-name')
+ self.assertIsNone(doc)
finally:
- os.unlink(temp_json_f)
+ os.remove(boot_log.name)
- def test_parse_boot_log_with_tmp_dir(self):
- temp_json_f = os.path.join(
- tempfile.gettempdir(), 'boot-board-name.json'
+ def test_parse_from_file_valid(self):
+ boot_log = tempfile.NamedTemporaryFile(
+ mode='w+b', bufsize=-1, suffix="json", delete=False
)
+ boot_obj = {
+ "job": "job",
+ "kernel": "kernel",
+ "defconfig": "defconfig",
+ "board": "board",
+ "dtb": "dtb",
+ "lab_name": "lab_name",
+ "boot_time": 0,
+ }
try:
- self.boot_report['dtb'] = '/tmp/tmpfoo-bar.dtb'
+ with open(boot_log.name, mode="w") as boot_write:
+ boot_write.write(json.dumps(boot_obj))
+
+ doc = utils.bootimport._parse_boot_from_file(boot_log.name)
- with open(temp_json_f, 'w') as w_f:
- w_f.write(json.dumps(self.boot_report))
+ self.assertEqual(doc.board, "board")
+ self.assertEqual(doc.job, "job")
+ self.assertEqual(doc.kernel, "kernel")
+ self.assertEqual(doc.defconfig, "defconfig")
+ self.assertEqual(doc.dtb, "dtb")
+ finally:
+ os.remove(boot_log.name)
- doc = _parse_boot_log(temp_json_f, 'job', 'kernel', 'defconfig')
+ def test_parse_from_file_no_board(self):
+ boot_log = tempfile.NamedTemporaryFile(
+ mode='w+b', bufsize=-1, prefix="boot-", suffix=".json", delete=False
+ )
+ boot_obj = {
+ "job": "job",
+ "kernel": "kernel",
+ "defconfig": "defconfig",
+ "dtb": "dtbs/board.dtb",
+ "lab_name": "lab_name",
+ "boot_time": 0,
+ }
- self.assertIsInstance(doc, BootDocument)
- self.assertEqual(doc.board, 'board-name')
+ try:
+ with open(boot_log.name, mode="w") as boot_write:
+ boot_write.write(json.dumps(boot_obj))
+
+ doc = utils.bootimport._parse_boot_from_file(boot_log.name)
+
+ self.assertEqual(doc.board, "board")
+ self.assertEqual(doc.job, "job")
+ self.assertEqual(doc.kernel, "kernel")
+ self.assertEqual(doc.defconfig, "defconfig")
+ self.assertEqual(doc.dtb, "dtbs/board.dtb")
finally:
- os.unlink(temp_json_f)
+ os.remove(boot_log.name)
- @patch('utils.bootimport._parse_boot_log')
- @patch('os.path.isfile')
- @patch('glob.iglob', new=Mock(return_value=['boot-board.json']))
- @patch('os.path.isdir')
- @patch('os.listdir')
- def test_parse_from_json_simple(
- self, mock_listdir, mock_isdir, mock_isfile, mock_parse):
- json_obj = dict(job='job', kernel='kernel')
+ def test_parse_from_file_no_board_tmp_dtb(self):
+ boot_log = tempfile.NamedTemporaryFile(
+ mode='w+b', bufsize=-1, prefix="boot-", suffix=".json", delete=False
+ )
+ boot_obj = {
+ "job": "job",
+ "kernel": "kernel",
+ "defconfig": "defconfig",
+ "dtb": "tmp/board.dtb",
+ "lab_name": "lab_name",
+ "boot_time": 0,
+ "arch": "arm"
+ }
+
+ board = os.path.splitext(
+ os.path.basename(boot_log.name).replace('boot-', ''))[0]
- mock_isfile.return_value = True
- mock_isdir.return_value = True
- mock_parse.side_effect = [MagicMock(), MagicMock()]
- mock_listdir.return_value = ('.hidden', 'defconfdir')
+ try:
+ with open(boot_log.name, mode="w") as boot_write:
+ boot_write.write(json.dumps(boot_obj))
- docs = parse_boot_from_json(json_obj, base_path=tempfile.gettempdir())
+ doc = utils.bootimport._parse_boot_from_file(boot_log.name)
- self.assertEqual(len(docs), 1)
+ self.assertEqual(doc.board, board)
+ self.assertEqual(doc.dtb, "tmp/board.dtb")
+ finally:
+ os.remove(boot_log.name)
diff --git a/app/utils/tests/test_docimport.py b/app/utils/tests/test_docimport.py
index 2170be1..ada4dde 100644
--- a/app/utils/tests/test_docimport.py
+++ b/app/utils/tests/test_docimport.py
@@ -13,30 +13,19 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
+import datetime
+import json
import logging
+import mock
import mongomock
import os
import tempfile
+import types
import unittest
from bson import tz_util
-from datetime import datetime
-from types import DictionaryType
-from mock import (
- MagicMock,
- Mock,
- patch,
-)
-from models.defconfig import DefConfigDocument
-
-from utils.docimport import (
- _import_all,
- _import_job,
- _parse_build_metadata,
- _traverse_defconf_dir,
- import_and_save_job,
-)
+import utils.docimport as docimport
class TestParseJob(unittest.TestCase):
@@ -48,23 +37,23 @@ class TestParseJob(unittest.TestCase):
def tearDown(self):
logging.disable(logging.NOTSET)
- @patch("os.stat")
- @patch("os.path.isdir")
- @patch("os.listdir")
+ @mock.patch("os.stat")
+ @mock.patch("os.path.isdir")
+ @mock.patch("os.listdir")
def test_import_all_simple(self, mock_os_listdir, mock_isdir, mock_stat):
mock_os_listdir.side_effect = [
['job'], ['kernel'], ['defconf'],
]
mock_isdir.side_effect = [True, True, True, True]
- mock_stat.st_mtime.return_value = datetime.now(tz=tz_util.utc)
+ mock_stat.st_mtime.return_value = datetime.datetime.now(tz=tz_util.utc)
- docs = _import_all(self.db)
+ docs = docimport._import_all(self.db)
self.assertEqual(len(docs), 2)
- @patch("os.stat")
- @patch("os.path.isdir")
- @patch("os.walk")
- @patch("os.listdir")
+ @mock.patch("os.stat")
+ @mock.patch("os.path.isdir")
+ @mock.patch("os.walk")
+ @mock.patch("os.listdir")
def test_import_all_complex(
self, mock_os_listdir, mock_os_walk, mock_isdir, mock_stat):
mock_os_listdir.side_effect = [
@@ -77,14 +66,14 @@ class TestParseJob(unittest.TestCase):
]
mock_isdir.side_effect = list((True,) * 13)
- mock_stat.st_mtime.return_value = datetime.now(tz=tz_util.utc)
+ mock_stat.st_mtime.return_value = datetime.datetime.now(tz=tz_util.utc)
- docs = _import_all(self.db)
+ docs = docimport._import_all(self.db)
self.assertEqual(len(docs), 8)
- @patch("os.stat")
- @patch("os.path.isdir")
- @patch("os.listdir")
+ @mock.patch("os.stat")
+ @mock.patch("os.path.isdir")
+ @mock.patch("os.listdir")
def test_import_all_documents_created(
self, mock_os_listdir, mock_isdir, mock_stat):
mock_os_listdir.side_effect = [
@@ -92,35 +81,39 @@ class TestParseJob(unittest.TestCase):
]
mock_isdir.side_effect = list((True,) * 4)
- mock_stat.st_mtime.return_value = datetime.now(tz=tz_util.utc)
+ mock_stat.st_mtime.return_value = datetime.datetime.now(tz=tz_util.utc)
- docs = _import_all(self.db)
+ docs = docimport._import_all(self.db)
self.assertEqual(len(docs), 2)
self.assertEqual(docs[0].name, "job-kernel")
- self.assertEqual(docs[1].job_id, "job-kernel")
- @patch('pymongo.MongoClient')
- def test_import_and_save(self, mocked_client=mongomock.Connection()):
+ @mock.patch('pymongo.MongoClient')
+ @mock.patch("utils.db.find_one")
+ def test_import_and_save(
+ self, mock_find, mocked_client=mongomock.Connection()):
json_obj = dict(job='job', kernel='kernel')
+ mock_find.return_value = None
- self.assertEqual(import_and_save_job(json_obj, {}), 'job-kernel')
+ self.assertEqual(
+ docimport.import_and_save_job(json_obj, {}), 'job-kernel'
+ )
- @patch('utils.docimport.find_one')
+ @mock.patch('utils.db.find_one')
def test_import_job_building(self, mock_find_one):
mock_find_one.return_value = []
database = mongomock.Connection()
- docs, job_id = _import_job('job', 'kernel', database)
+ docs, job_id = docimport._import_job('job', 'kernel', database)
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0].status, 'BUILD')
- @patch('os.stat')
- @patch('utils.docimport.find_one')
- @patch('utils.docimport._traverse_defconf_dir')
- @patch('os.listdir')
- @patch('os.path.exists')
- @patch('os.path.isdir')
+ @mock.patch('os.stat')
+ @mock.patch('utils.db.find_one')
+ @mock.patch('utils.docimport._traverse_defconf_dir')
+ @mock.patch('os.listdir')
+ @mock.patch('os.path.exists')
+ @mock.patch('os.path.isdir')
def test_import_job_done(
self, mock_isdir, mock_exists, mock_listdir, mock_traverse,
mock_find_one, mock_stat):
@@ -129,83 +122,47 @@ class TestParseJob(unittest.TestCase):
mock_listdir.return_value = []
mock_traverse.return_value = []
mock_find_one.return_value = []
- mock_stat.st_mtime.return_value = datetime.now(tz=tz_util.utc)
+ mock_stat.st_mtime.return_value = datetime.datetime.now(tz=tz_util.utc)
database = mongomock.Connection()
- docs, job_id = _import_job('job', 'kernel', database)
+ docs, job_id = docimport._import_job('job', 'kernel', database)
self.assertEqual(len(docs), 1)
self.assertEqual(docs[0].status, 'PASS')
- def test_parse_and_update_build_metadata(self):
- meta_content = (
- '''
-# A comment.
-arch: arm
-git_url: git://git.example.org
-git_branch: test/branch
-git_describe: vfoo.bar
-git_commit: 1234567890
-defconfig: defoo_confbar
-kconfig_fragments:
-tree_name: foo_tree
-
-kernel_image: zImage
-kernel_config: kernel.config
-dtb_dir: dtbs
-modules_dir: foo/bar
-'''
- )
-
- defconf_doc = MagicMock()
+ @mock.patch("os.stat")
+ def test_parse_and_update_build_metadata(self, mock_stat):
+ mock_stat.st_mtime.return_value = datetime.datetime.now(tz=tz_util.utc)
+ meta_content = {
+ "arch": "arm",
+ "defconfig": "defconfig",
+ "git_url": "git://git.example.org",
+ "git_branch": "test/branch",
+ "git_describe": "vfoo.bar",
+ "git_commit": "1234567890",
+ "defconfig": "defoo_confbar",
+ "kconfig_fragments": None,
+ "kernel_image": "zImage",
+ "kernel_config": "kernel.config",
+ "dtb_dir": "dtbs",
+ "modules_dir": "foo/bar",
+ "build_log": "file.log",
+ "kconfig_fragments": "fragment"
+ }
try:
fake_meta = tempfile.NamedTemporaryFile(delete=False)
with open(fake_meta.name, 'w') as w_file:
- w_file.write(meta_content)
+ w_file.write(json.dumps(meta_content))
- _parse_build_metadata(fake_meta.name, defconf_doc)
+ defconf_doc = docimport._parse_build_data(
+ fake_meta.name, "job", "kernel"
+ )
finally:
os.unlink(fake_meta.name)
- self.assertIsInstance(defconf_doc.metadata, DictionaryType)
- self.assertEqual(None, defconf_doc.metadata['kconfig_fragments'])
- self.assertEqual('arm', defconf_doc.metadata['arch'])
-
- @patch('utils.docimport._parse_build_metadata')
- @patch('os.path.isfile')
- @patch('os.walk', new=Mock(return_value=[('defconf-dir', [], [])]))
- @patch('os.stat')
- def test_traverse_defconf_dir_json(
- self, mock_stat, mock_isfile, mock_parser):
- mock_stat.st_mtime.return_value = datetime.now(tz=tz_util.utc)
- mock_isfile.side_effect = [False, False, True]
-
- defconf_doc = _traverse_defconf_dir(
- 'job-kernel', 'job', 'kernel', 'kernel-dir', 'defconf-dir'
- )
-
- self.assertIsInstance(defconf_doc, DefConfigDocument)
- self.assertEqual(defconf_doc.status, 'UNKNOWN')
- mock_parser.assert_called_once_with(
- 'defconf-dir/build.json', defconf_doc
- )
-
- @patch('utils.docimport._parse_build_metadata')
- @patch('os.path.isfile')
- @patch('os.walk', new=Mock(return_value=[('defconf-dir', [], [])]))
- @patch('os.stat')
- def test_traverse_defconf_dir_nornal(
- self, mock_stat, mock_isfile, mock_parser):
- mock_stat.st_mtime.return_value = datetime.now(tz=tz_util.utc)
- mock_isfile.side_effect = [False, False, False, True]
-
- defconf_doc = _traverse_defconf_dir(
- 'job-kernel', 'job', 'kernel', 'kernel-dir', 'defconf-dir'
- )
-
- self.assertIsInstance(defconf_doc, DefConfigDocument)
- self.assertEqual(defconf_doc.status, 'UNKNOWN')
- mock_parser.assert_called_once_with(
- 'defconf-dir/build.meta', defconf_doc
- )
+ self.assertIsInstance(defconf_doc.metadata, types.DictionaryType)
+ self.assertEqual("fragment", defconf_doc.kconfig_fragments)
+ self.assertEqual(defconf_doc.arch, "arm")
+ self.assertEqual(defconf_doc.git_commit, "1234567890")
+ self.assertEqual(defconf_doc.git_branch, "test/branch")
diff --git a/app/utils/tests/test_meta_parser.py b/app/utils/tests/test_meta_parser.py
deleted file mode 100644
index 2ddf93c..0000000
--- a/app/utils/tests/test_meta_parser.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Affero General Public License as
-# published by the Free Software Foundation, either version 3 of the
-# License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Affero General Public License for more details.
-#
-# You should have received a copy of the GNU Affero General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-import json
-import logging
-import os
-import tempfile
-import types
-import unittest
-
-from utils.meta_parser import parse_metadata_file
-
-
-class TestMetaParser(unittest.TestCase):
-
- def setUp(self):
- logging.disable(logging.CRITICAL)
- self.temp_metadata = tempfile.NamedTemporaryFile(delete=False)
-
- def tearDown(self):
- logging.disable(logging.NOTSET)
- try:
- os.unlink(self.temp_metadata.name)
- except Exception:
- pass
-
- def test_parse_config_file(self):
- file_content = (
- '[DEFAULT]\nbuild_status: PASS\nbuild_log: build.log'
- )
-
- with open(self.temp_metadata.name, 'w') as w_file:
- w_file.write(file_content)
-
- expected = dict(build_status='PASS', build_log='build.log')
- metadata = parse_metadata_file(self.temp_metadata.name)
-
- self.assertEqual(expected, metadata)
-
- def test_parse_normal_file(self):
- file_content = (
- 'build_status: PASS\nbuild_log: build.log\n'
- )
-
- with open(self.temp_metadata.name, 'w') as w_file:
- w_file.write(file_content)
-
- expected = dict(build_status='PASS', build_log='build.log')
- metadata = parse_metadata_file(self.temp_metadata.name)
-
- self.assertEqual(expected, metadata)
-
- def test_parse_json_file(self):
- expected = dict(build_status='PASS', build_log='build.log')
-
- try:
- json_tmp = tempfile.NamedTemporaryFile(
- suffix='.json', delete=False
- )
-
- with open(json_tmp.name, 'w') as w_file:
- json.dump(expected, w_file)
-
- metadata = parse_metadata_file(json_tmp.name)
- self.assertIsInstance(metadata, types.DictionaryType)
- self.assertEqual(expected, metadata)
- finally:
- os.unlink(json_tmp.name)
diff --git a/app/utils/tests/test_validator.py b/app/utils/tests/test_validator.py
index 3db4252..f670feb 100644
--- a/app/utils/tests/test_validator.py
+++ b/app/utils/tests/test_validator.py
@@ -16,53 +16,145 @@
import json
import unittest
-from utils.validator import (
- is_valid_json,
- is_valid_batch_json,
-)
+import utils.validator as utilsv
class TestValidator(unittest.TestCase):
def test_valid_json_valid(self):
json_string = '{"job": "job", "kernel": "kernel"}'
- accepted_keys = ('job', 'kernel')
+ accepted_keys = ['job', 'kernel']
- self.assertTrue(
- is_valid_json(json.loads(json_string), accepted_keys)
- )
-
- def test_valid_json_valid_job(self):
- json_string = '{"job": "job"}'
- accepted_keys = ('job', 'kernel')
-
- self.assertTrue(
- is_valid_json(json.loads(json_string), accepted_keys)
+ valid, reason = utilsv.is_valid_json(
+ json.loads(json_string), accepted_keys
)
+ self.assertTrue(valid)
+ self.assertIsNone(reason)
- def test_valid_json_only_kernel(self):
+ def test_valid_json_with_more_valid_keys(self):
json_string = '{"kernel": "kernel"}'
- accepted_keys = ('job', 'kernel')
+ accepted_keys = ['job', 'kernel', "defconfig", "foo"]
- self.assertTrue(
- is_valid_json(json.loads(json_string), accepted_keys)
+ valid, reason = utilsv.is_valid_json(
+ json.loads(json_string), accepted_keys
)
+ self.assertTrue(valid)
+ self.assertIsNone(reason)
- def test_not_valid_json(self):
- json_string = '{"kernel": "foo", "foo": "bar"}'
- accepted_keys = ('job', 'kernel')
+ def test_valid_json_with_strange_keys(self):
+ json_obj = {
+ "kernel": "foo",
+ "foo": "bar",
+ "baz": "foo",
+ "job": "job",
+ }
+ expected = {
+ "kernel": "foo",
+ "job": "job",
+ }
+ accepted_keys = ['job', 'kernel']
- self.assertFalse(
- is_valid_json(json.loads(json_string), accepted_keys)
- )
+ valid, reason = utilsv.is_valid_json(json_obj, accepted_keys)
+
+ self.assertTrue(valid)
+ self.assertIsNotNone(reason)
+ self.assertDictEqual(expected, json_obj)
def test_no_accepted_keys(self):
- json_string = '{"kernel": "foo", "foo": "bar"}'
+ json_obj = {
+ "kernel": "foo",
+ "job": "job",
+ "foo": "bar"
+ }
accepted_keys = None
- self.assertFalse(
- is_valid_json(json.loads(json_string), accepted_keys)
- )
+ valid, reason = utilsv.is_valid_json(json_obj, accepted_keys)
+ self.assertFalse(valid)
+ self.assertIsNotNone(reason)
+
+ def test_remove_all_keys(self):
+ json_obj = {
+ "job": "job",
+ "kernel": "kernel",
+ }
+
+ accepted_keys = ["foo", "bar"]
+ valid, reason = utilsv.is_valid_json(json_obj, accepted_keys)
+
+ self.assertFalse(valid)
+ self.assertIsNotNone(reason)
+
+ def test_validation_complex_valid_no_reason(self):
+ accepted_keys = {
+ "mandatory": [
+ "job",
+ "kernel"
+ ],
+ "accepted": [
+ "foo",
+ "bar",
+ "baz",
+ "job",
+ "kernel",
+ ]
+ }
+
+ json_obj = {
+ "job": "job",
+ "kernel": "kernel",
+ "foo": "foo"
+ }
+
+ valid, reason = utilsv.is_valid_json(json_obj, accepted_keys)
+
+ self.assertTrue(valid)
+ self.assertIsNone(reason)
+
+ def test_validation_complex_valid_with_reason(self):
+ accepted_keys = {
+ "mandatory": [
+ "job",
+ "kernel"
+ ],
+ "accepted": [
+ "baz",
+ "job",
+ "kernel",
+ ]
+ }
+
+ json_obj = {
+ "job": "job",
+ "kernel": "kernel",
+ "foo": "foo"
+ }
+
+ valid, reason = utilsv.is_valid_json(json_obj, accepted_keys)
+
+ self.assertTrue(valid)
+ self.assertIsNotNone(reason)
+
+ def test_validation_complex_no_mandatory(self):
+ accepted_keys = {
+ "mandatory": [
+ "job",
+ "kernel"
+ ],
+ "accepted": [
+ "baz",
+ "job",
+ "kernel",
+ ]
+ }
+
+ json_obj = {
+ "foo": "foo"
+ }
+
+ valid, reason = utilsv.is_valid_json(json_obj, accepted_keys)
+
+ self.assertFalse(valid)
+ self.assertIsNotNone(reason)
class TestBatchValidator(unittest.TestCase):
@@ -73,7 +165,7 @@ class TestBatchValidator(unittest.TestCase):
accepted_keys = ()
self.assertFalse(
- is_valid_batch_json(
+ utilsv.is_valid_batch_json(
json.loads(json_string), batch_key, accepted_keys
)
)
@@ -98,7 +190,7 @@ class TestBatchValidator(unittest.TestCase):
}
self.assertTrue(
- is_valid_batch_json(json_obj, batch_key, accepted_keys)
+ utilsv.is_valid_batch_json(json_obj, batch_key, accepted_keys)
)
def test_valid_batch_json_from_string(self):
@@ -110,7 +202,9 @@ class TestBatchValidator(unittest.TestCase):
)
self.assertTrue(
- is_valid_batch_json(json.loads(json_str), batch_key, accepted_keys)
+ utilsv.is_valid_batch_json(
+ json.loads(json_str), batch_key, accepted_keys
+ )
)
def test_non_valid_batch_json_from_dict(self):
@@ -124,7 +218,7 @@ class TestBatchValidator(unittest.TestCase):
}
self.assertFalse(
- is_valid_batch_json(json_obj, batch_key, accepted_keys)
+ utilsv.is_valid_batch_json(json_obj, batch_key, accepted_keys)
)
json_obj = {
@@ -134,7 +228,7 @@ class TestBatchValidator(unittest.TestCase):
}
self.assertFalse(
- is_valid_batch_json(json_obj, batch_key, accepted_keys)
+ utilsv.is_valid_batch_json(json_obj, batch_key, accepted_keys)
)
def test_non_valid_batch_json_wrong_keys(self):
@@ -154,5 +248,88 @@ class TestBatchValidator(unittest.TestCase):
}
self.assertFalse(
- is_valid_batch_json(json_obj, batch_key, accepted_keys)
+ utilsv.is_valid_batch_json(json_obj, batch_key, accepted_keys)
)
+
+ def test_validate_contact_object_wrong(self):
+ json_obj = {
+ "contact": {}
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": ["a"]
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": "a"
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": {
+ "foo": "bar",
+ "baz": "foo"
+ }
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": {
+ "name": "bar",
+ "surname": "foo"
+ }
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": {
+ "surname": "foo"
+ }
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": {
+ "name": "foo"
+ }
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": {
+ "email": "foo"
+ }
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": {
+ "name": "foo",
+ "email": "foo"
+ }
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ json_obj = {
+ "contact": {
+ "surname": "foo",
+ "email": "foo"
+ }
+ }
+ self.assertFalse(utilsv.is_valid_lab_contact_data(json_obj)[0])
+
+ def test_validate_contact_object_correct(self):
+
+ json_obj = {
+ "contact": {
+ "name": "foo",
+ "surname": "foo",
+ "email": "foo",
+ }
+ }
+
+ validated = utilsv.is_valid_lab_contact_data(json_obj)
+ self.assertTrue(validated[0])
+ self.assertIsNone(validated[1])
diff --git a/app/utils/validator.py b/app/utils/validator.py
index be23f60..f131da8 100644
--- a/app/utils/validator.py
+++ b/app/utils/validator.py
@@ -1,5 +1,3 @@
-# Copyright (C) 2014 Linaro Ltd.
-#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
@@ -13,33 +11,126 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
-from types import DictionaryType
+import types
+
+import models
def is_valid_json(json_obj, accepted_keys):
- """Validate JSON object for a request.
+ """Validate a JSON object from a request.
+
+ The JSON object will also be modified based on the valid keys for the
+ request being validated. All non recognized keys will be removed.
- To be invalid, just one of the keys passed needs not to be found, or there
- are no keys passed.
+ To be invalid it either has to:
+ - Be without one of the mandatory keys (for a complex validation)
+ - Have no keys at the end of the validation
:param json_obj: The JSON object to validate. It will be treated as a
Python dictionary.
:param accepted_keys: A list of keys that needs to be found in the JSON
object.
- :return True or False.
+ :return A tuple with True or False, and an optional error message.
"""
- is_valid = True
- json_keys = json_obj.keys()
+ valid_json = False
+ error_message = "No valid keys defined for this request"
if accepted_keys:
- for key in json_keys:
- if key not in accepted_keys:
- is_valid &= False
- break
+ if isinstance(accepted_keys, types.ListType):
+ # Simple case, where the valid keys is just a list of keys.
+ valid_json, error_message = _simple_json_validation(
+ json_obj, accepted_keys
+ )
+ elif isinstance(accepted_keys, types.DictionaryType):
+ # More complex case where accepted_keys is a a dictionary with
+ # mandatory and all the valid keys.
+ valid_json, error_message = _complex_json_validation(
+ json_obj, accepted_keys
+ )
+
+ return valid_json, error_message
+
+
+def _simple_json_validation(json_obj, accepted_keys):
+ """Perform JSON validation with simple logic.
+
+ The passed keys parameter is a list: if just one keys is not found,
+ the JSON is not valid.
+
+ :param json_obj: The JSON object to analyze.
+ :type json_obj: dict
+ :param accepted_keys: The accepted keys for this JSON object.
+ :type accepted_keys: list
+ :return True or False, and an error message if False.
+ """
+ is_valid = True
+ error_message = None
+ json_keys = set(json_obj.keys())
+
+ strange_keys = json_keys - set(accepted_keys)
+ if strange_keys:
+ error_message = (
+ "Found non recognizable keys, they will not be considered: %s" %
+ ", ".join(strange_keys)
+ )
+ # If we have keys that are not defined in our model, remove them.
+ for key in strange_keys:
+ json_obj.pop(key, None)
+
+ if not json_obj:
+ # Did we remove everything from the JSON object?
+ is_valid = False
+ error_message = "No valid or acceptable keys in the JSON data"
+
+ return is_valid, error_message
+
+
+def _complex_json_validation(json_obj, accepted_keys):
+ """Perform JSON validation with a more complex logic.
+
+ The passed keys parameter is a dictionary that contains mandatory keys and
+ all the other accepted keys.
+
+ If one of the mandatory keys is not found, it is not valid.
+ If other keys are passed and are not in the accepted keys, they will be
+ discarded.
+
+ :param json_obj: The JSON object to analyze.
+ :type json_obj: dict
+ :param accepted_keys: The accepted keys for this JSON object.
+ :type accepted_keys: list
+ :return True or False, and and error message if False or None.
+ """
+ is_valid = True
+ error_message = None
+
+ json_keys = set(json_obj.keys())
+ mandatory_keys = set(accepted_keys.get(models.MANDATORY_KEYS))
+ valid_keys = set(accepted_keys.get(models.ACCEPTED_KEYS))
+
+ missing_keys = list(mandatory_keys - json_keys)
+ if missing_keys:
+ is_valid = False
+ error_message = (
+ "One or more mandatory keys are missing: %s" % str(missing_keys)
+ )
else:
+ strange_keys = list(json_keys - valid_keys)
+ if strange_keys:
+ error_message = (
+ "Found non recognizable keys, they will not be considered: %s" %
+ ", ".join(strange_keys)
+ )
+ # If we have keys that are not defined in our model, remove them.
+ for key in strange_keys:
+ json_obj.pop(key, None)
+
+ if not json_obj:
+ # Did we remove everything from the JSON object?
is_valid = False
+ error_message = "No valid or acceptable keys in the JSON data"
- return is_valid
+ return is_valid, error_message
def is_valid_batch_json(json_obj, batch_key, accepted_keys):
@@ -57,7 +148,7 @@ def is_valid_batch_json(json_obj, batch_key, accepted_keys):
while is_valid:
for batch_op in batch_op_list:
- if isinstance(batch_op, DictionaryType):
+ if isinstance(batch_op, types.DictionaryType):
batch_op_keys = batch_op.keys()
for key in batch_op_keys:
@@ -74,3 +165,37 @@ def is_valid_batch_json(json_obj, batch_key, accepted_keys):
is_valid = False
return is_valid
+
+
+def is_valid_lab_contact_data(json_obj):
+ """Validate a `contact` data structure for the Lab model.
+
+ :param json_obj: The JSON object containing the `contact` data.
+ :type json_obj: dict
+ :return A tuple: True or False, and an error message if False or None.
+ """
+ is_valid = True
+ reason = None
+
+ contact = json_obj.get(models.CONTACT_KEY)
+ if all([contact, isinstance(contact, types.DictionaryType)]):
+ mandatory_keys = set(
+ [models.NAME_KEY, models.SURNAME_KEY, models.EMAIL_KEY]
+ )
+ provided_keys = set(contact.keys())
+ # Does the provided dict contain all the mandatory keys?
+ if not (provided_keys >= mandatory_keys):
+ missing_keys = list(mandatory_keys - provided_keys)
+ is_valid = False
+ reason = (
+ "Missing mandatory keys for '%s' JSON object: %s" %
+ (models.CONTACT_KEY, str(missing_keys))
+ )
+ else:
+ is_valid = False
+ reason = (
+ "Provided '%s' data structure is not a JSON object or "
+ "is empty" % models.CONTACT_KEY
+ )
+
+ return is_valid, reason
diff --git a/doc/batch-collection.rst b/doc/collection-batch.rst
index 1893b4b..1893b4b 100644
--- a/doc/batch-collection.rst
+++ b/doc/collection-batch.rst
diff --git a/doc/boot-collection.rst b/doc/collection-boot.rst
index 5bc3889..2ee0473 100644
--- a/doc/boot-collection.rst
+++ b/doc/collection-boot.rst
@@ -10,7 +10,7 @@ GET
Get all the available boot reports or a single one if ``boot_id`` is provided.
- :param boot_id: The ID of the boot report to retrieve. Usually in the form of: ``board``-``job``-``kernel``-``defconfig``.
+ :param boot_id: The ID of the boot report to retrieve.
:type boot_id: string
:reqheader Authorization: The token necessary to authorize the request.
@@ -29,13 +29,19 @@ GET
:query string field: The field that should be returned in the response. Can be
repeated multiple times.
:query string nfield: The field that should *not* be returned in the response. Can be repeated multiple times.
+ :query string _id: The internal ID of the boot report.
+ :query string board: The name of a board.
:query string job: The name of a job.
:query string job_id: The ID of a job.
:query string kernel: The name of a kernel.
:query string defconfig: The name of a defconfig.
+ :query string defconfig_full: The full name of a defconfig (with config fragments).
+ :query string defconfig_id: The ID of a defconfig.
+ :query string endian: The endianness of the board.
:query string board: The name of a board.
- :query string status: The status of the boot report. Can be one of: ``PASS``
- or ``FAIL``.
+ :query string lab_name: The name of the lab that created the boot report.
+ :query string name: The name of the boot report.
+ :query string status: The status of the boot report.
:query int warnings: The number of warnings in the boot report.
:status 200: Resuslts found.
@@ -82,7 +88,7 @@ GET
"status": "PASS",
"kernel": "next-20140905",
"job": "next",
- "_id": "next-next-20140905",
+ "_id": "boot-id",
"fastboot": false,
"warnings": 0,
"defconfig": "arm-omap2plus_defconfig"
@@ -120,6 +126,8 @@ GET
.. note::
Results shown here do not include the full JSON response.
+.. _collection_boot_post:
+
POST
****
@@ -127,11 +135,16 @@ POST
Create or update a boot report as defined in the JSON data. The request will be accepted and it will begin to parse the available data.
- If the request has been accepted, it will always return ``202`` as the status code, even when not boot reports for the ``job`` and ``kernel`` combination
- have been found.
+ If the request has been accepted, it will always return ``202`` as the status code.
+
+ For more info on all the required JSON request fields, see the :ref:`boot schema for POST requests <schema_boot_post>`.
+ :reqjson string lab_name: The name of the boot tests lab.
:reqjson string job: The name of the job.
:reqjson string kernel: The name of the kernel.
+ :reqjson string defconfig: The name of the defconfig.
+ :reqjson string board: The name of the board.
+ :reqjson string version: The version number of the schema.
:reqheader Authorization: The token necessary to authorize the request.
:reqheader Content-Type: Content type of the transmitted data, must be ``application/json``.
@@ -157,7 +170,10 @@ POST
{
"job": "next",
- "kernel": "next-20140801"
+ "kernel": "next-20140801",
+ "defconfig": "all-noconfig",
+ "lab_name": "lab-01",
+ "board": "beagleboneblack"
}
DELETE
@@ -175,11 +191,15 @@ DELETE
:resheader Content-Type: Will be ``application/json; charset=UTF-8``.
- :query string job: The name of a job.
+ :query string _id: The ID of a boot report.
:query string job_id: The ID of a job.
+ :query string job: The name of a job.
:query string kernel: The name of a kernel.
+ :query string defconfig_id: The ID of a defconfig.
:query string defconfig: The name of a defconfig.
+ :query string defconfig_full: The full name of a defconfig (with config fragments).
:query string board: The name of a board.
+ :query string name: The name of a boot report.
:status 200: Resource deleted.
:status 403: Not authorized to perform the operation.
diff --git a/doc/count-collection.rst b/doc/collection-count.rst
index b91bb38..f4d587e 100644
--- a/doc/count-collection.rst
+++ b/doc/collection-count.rst
@@ -28,11 +28,15 @@ GET
:query string arch: A type of computer architetcture (like ``arm``, ``arm64``).
:query string board: The name of a board.
:query string defconfig: A defconfig name.
+ :query int errors: The number of errors.
:query string job: A job name.
- :query string job_id: A job ID (in the form of ``job``-``kernel``).
+ :query string job_id: A job ID.
:query string kernel: A kernel name.
+ :query string name: The name of an object.
+ :query boolean private: The private status.
:query string status: The status of the elements to get the count of. Can be
one of: ``PASS`` or ``FAIL``.
+ :query int warnings: The number of warnings.
:status 200: Resuslts found.
:status 403: Not authorized to perform the operation.
diff --git a/doc/defconfig-collection.rst b/doc/collection-defconfig.rst
index 6f67b6d..6cf2d66 100644
--- a/doc/defconfig-collection.rst
+++ b/doc/collection-defconfig.rst
@@ -10,7 +10,7 @@ GET
Get all the available defconfigs built or a single one if ``defconfig_id`` is provided.
- :param defconfig_id: The ID of the defconfig to retrieve. Usually in the form of: ``job``-``kernel``-``defconfig``.
+ :param defconfig_id: The ID of the defconfig to retrieve.
:type defconfig_id: string
:reqheader Authorization: The token necessary to authorize the request.
@@ -29,15 +29,20 @@ GET
:query string field: The field that should be returned in the response. Can be
repeated multiple times.
:query string nfield: The field that should *not* be returned in the response. Can be repeated multiple times.
+ :query string _id: The internal ID of the defconfig report.
:query string job: The name of a job.
:query string job_id: The ID of a job.
:query string kernel: The name of a kernel.
+ :query string defconfig_full: The full name of a defconfig (with config fragments).
:query string defconfig: The name of a defconfig.
+ :query string name: The name of the defconfig report.
:query string arch: The architecture on which the defconfig has been built.
- :query string status: The status of the defconfig report. Can be one of:
- ``PASS`` or ``FAIL``.
+ :query string status: The status of the defconfig report.
:query int warnings: The number of warnings in the defconfig built.
:query int errors: The number of errors in the defconfig built.
+ :query string git_branch: The name of the git branch.
+ :query string git_commit: The git commit SHA.
+ :query string git_describe: The git describe value.
:status 200: Resuslts found.
:status 403: Not authorized to perform the operation.
diff --git a/doc/job-collection.rst b/doc/collection-job.rst
index d57c24f..293b6db 100644
--- a/doc/job-collection.rst
+++ b/doc/collection-job.rst
@@ -12,7 +12,7 @@ GET
Get all the available jobs or a single one if ``job_id`` is provided.
- :param job_id: The ID of the job to retrieve in the form of ``job``-``kernel``.
+ :param job_id: The ID of the job to retrieve.
:type job_id: string
:reqheader Authorization: The token necessary to authorize the request.
@@ -31,10 +31,11 @@ GET
:query string field: The field that should be returned in the response. Can be
repeated multiple times.
:query string nfield: The field that should *not* be returned in the response. Can be repeated multiple times.
+ :query string _id: The internal ID of hte job report.
:query string job: A job name.
:query string kernel: A kernel name.
- :query string status: The status of the job report. Can be one of: ``PASS`` or
- ``FAIL``.
+ :query string name: The name of the job report.
+ :query string status: The status of the job report.
:status 200: Resuslts found.
:status 403: Not authorized to perform the operation.
diff --git a/doc/collection-lab.rst b/doc/collection-lab.rst
new file mode 100644
index 0000000..af882ea
--- /dev/null
+++ b/doc/collection-lab.rst
@@ -0,0 +1,183 @@
+.. _collection_lab:
+
+lab
+---
+
+GET
+***
+
+.. http:get:: /lab/(string:lab_id)
+
+ Get all the available registered lab or a single one if ``lab_id`` is
+ provided.
+
+ :param lab_id: The ID of the lab to retrieve.
+ :type lab_id: string
+
+ :reqheader Authorization: The token necessary to authorize the request.
+ :reqheader Accept-Encoding: Accept the ``gzip`` coding.
+
+ :resheader Content-Type: Will be ``application/json; charset=UTF-8``.
+
+ :query int limit: Number of results to return. Default 0 (all results).
+ :query int skip: Number of results to skip. Default 0 (none).
+ :query string sort: Field to sort the results on. Can be repeated multiple times.
+ :query int sort_order: The sort order of the results: -1 (descending), 1
+ (ascending). This will be applied only to the first ``sort``
+ parameter passed. Default -1.
+ :query int date_range: Number of days to consider, starting from today
+ (:ref:`more info <intro_schema_time_date>`). By default consider all results.
+ :query string field: The field that should be returned in the response. Can be
+ repeated multiple times.
+ :query string nfield: The field that should *not* be returned in the response. Can be repeated multiple times.
+ :query string _id: The internal ID of the registered lab.
+ :query string name: The name of the lab.
+ :query boolean private: If the lab is private or not.
+ :query string token: The token ID associated with the lab.
+
+ :status 200: Resuslts found.
+ :status 403: Not authorized to perform the operation.
+ :status 404: The provided resource has not been found.
+ :status 500: Internal database error.
+
+ **Example Requests**
+
+ .. sourcecode:: http
+
+ GET /lab/ HTTP/1.1
+ Host: api.armcloud.us
+ Accept: */*
+ Authorization: token
+
+ .. sourcecode:: http
+
+ GET /boot/lab-01 HTTP/1.1
+ Host: api.armcloud.us
+ Accept: */*
+ Authorization: token
+
+ **Example Responses**
+
+ .. sourcecode:: http
+
+ HTTP/1.1 200 OK
+ Vary: Accept-Encoding
+ Date: Tue, 10 Nov 2014 12:28:50 GMT
+ Content-Type: application/json; charset=UTF-8
+
+ {
+ "code": 200,
+ "result": [
+ {
+ "name": "lab-01",
+ "private": false,
+ "address": {
+ "street_1": "Example street"
+ },
+ "contact": {
+ "name": "Name",
+ "surname": "Surname",
+ "email": "example@example.net"
+ }
+ },
+ ],
+ }
+
+ .. note::
+ Results shown here do not include the full JSON response.
+
+POST
+****
+
+.. http:post:: /lab/(string:lab_id)
+
+ Create or update a lab document as defined in the JSON data. If ``lab_id`` is
+ provided, it will perform an update.
+
+ For more info on all the required JSON request fields, see the :ref:`lab
+ schema <schema_lab>`.
+
+ :reqjson string name: The name that should be given to the lab.
+ :reqjson object contact: The contact data associated with the lab.
+
+ :reqheader Authorization: The token necessary to authorize the request.
+ :reqheader Content-Type: Content type of the transmitted data, must be ``application/json``.
+ :reqheader Accept-Encoding: Accept the ``gzip`` coding.
+
+ :resheader Content-Type: Will be ``application/json; charset=UTF-8``.
+
+ :status 200: The request has been accepted and the lab updated.
+ :status 201: The request has been accepted and the lab created.
+ :status 400: JSON data not valid, or provided name for the lab already exists.
+ :status 403: Not authorized to perform the operation.
+ :status 404: The provided ``lab_id`` was not found.
+ :status 415: Wrong content type.
+ :status 422: No real JSON data provided.
+
+ **Example Requests**
+
+ .. sourcecode:: http
+
+ POST /lab HTTP/1.1
+ Host: api.armcloud.us
+ Content-Type: application/json
+ Accept: */*
+ Authorization: token
+
+ {
+ "name": "lab-01",
+ "contact": {
+ "name": "Name",
+ "surname": "Surname",
+ "email": "example@example.net"
+ }
+ }
+
+DELETE
+******
+
+.. http:delete:: /lab/(string:lab_id)
+
+ Delete the lab entry identified by ``lab_id``.
+
+ :param lab_id: The ID of the lab document to delete.
+ :type lab_id: string
+
+ :reqheader Authorization: The token necessary to authorize the request.
+ :reqheader Accept-Encoding: Accept the ``gzip`` coding.
+
+ :resheader Content-Type: Will be ``application/json; charset=UTF-8``.
+
+ :query string _id: The internal ID of the registered lab.
+ :query string private: If the lab is private or not.
+ :query string token: The token ID associated with the lab.
+
+ :status 200: Resource deleted.
+ :status 403: Not authorized to perform the operation.
+ :status 404: The provided resource has not been found.
+ :status 500: Internal database error.
+
+ **Example Requests**
+
+ .. sourcecode:: http
+
+ DELETE /lab/lab-01 HTTP/1.1
+ Host: api.armcloud.us
+ Accept: */*
+ Content-Type: application/json
+ Authorization: token
+
+ .. sourcecode:: http
+
+ DELETE /lab?private=true HTTP/1.1
+ Host: api.armcloud.us
+ Accept: */*
+ Content-Type: application/json
+ Authorization: token
+
+
+More Info
+*********
+
+* :ref:`Lab schema <schema_lab>`
+* :ref:`API results <intro_schema_results>`
diff --git a/doc/collection-version.rst b/doc/collection-version.rst
new file mode 100644
index 0000000..025295d
--- /dev/null
+++ b/doc/collection-version.rst
@@ -0,0 +1,62 @@
+version
+-------
+
+GET
+***
+
+.. http:get:: /version
+
+ Provide the version number of the software running.
+
+ :reqheader Accept-Encoding: Accept the ``gzip`` coding.
+
+ :resheader Content-Type: Will be ``application/json; charset=UTF-8``.
+
+ :status 200: Resuslts found.
+
+ .. note::
+
+ This collection does not require authentication.
+
+ **Example Requests**
+
+ .. sourcecode:: http
+
+ GET /version HTTP/1.1
+ Host: api.armcloud.us
+ Accept: */*
+
+ **Example Responses**
+
+ .. sourcecode:: http
+
+ HTTP/1.1 200 OK
+ Vary: Accept-Encoding
+ Date: Mon, 24 Nov 2014 18:08:12 GMT
+ Content-Type: application/json; charset=UTF-8
+
+ {
+ "code": 200,
+ "result":
+ [
+ {
+ "version": "2014.11",
+ "full_version": "2014.11"
+ }
+ ]
+ }
+
+POST
+****
+
+.. caution::
+ Not implemented. Will return a :ref:`status code <http_status_code>`
+ of ``501``.
+
+
+DELETE
+******
+
+.. caution::
+ Not implemented. Will return a :ref:`status code <http_status_code>`
+ of ``501``.
diff --git a/doc/collections.rst b/doc/collections.rst
index 749431c..b1bc434 100644
--- a/doc/collections.rst
+++ b/doc/collections.rst
@@ -6,8 +6,10 @@ Collections
.. toctree::
:maxdepth: 2
- count-collection
- job-collection
- defconfig-collection
- boot-collection
- batch-collection
+ collection-count
+ collection-job
+ collection-defconfig
+ collection-boot
+ collection-batch
+ collection-lab
+ collection-version
diff --git a/doc/conf.py b/doc/conf.py
index 55f5b08..c1e1a10 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -24,7 +24,7 @@ sys.path.insert(
0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
)
-from app import __version__, __versionfull__
+from app.handlers import __version__, __versionfull__
# -- General configuration ------------------------------------------------
diff --git a/doc/examples.rst b/doc/examples.rst
index 704ab41..2c5d9e5 100644
--- a/doc/examples.rst
+++ b/doc/examples.rst
@@ -174,3 +174,111 @@ module automatically handles ``gzip`` and ``deflate`` compressions.
if __name__ == "__main__":
main()
+
+
+Creating a new lab
+------------------
+
+.. note::
+
+ Creation of new lab that can send boot reports is permitted only with an
+ administrative token.
+
+The response object will contain:
+
+* The ``token`` that should be used to send boot lab reports.
+
+* The ``name`` of the lab that should be used to send boot lab reports.
+
+* The lab internal ``_id`` value.
+
+
+::
+
+ #!/usr/bin/env python
+
+ try:
+ import simplejson as json
+ except ImportError:
+ import json
+
+ import requests
+ import urlparse
+
+ AUTHORIZATION_TOKEN = 'foo'
+ BACKEND_URL = 'http://api.armcloud.us'
+
+
+ def main():
+ headers = {
+ 'Authorization': AUTHORIZATION_TOKEN,
+ 'Content-Type': 'application/json'
+ }
+
+ payload = {
+ 'version': '1.0',
+ 'name': 'lab-enymton-00',
+ 'contact': {
+ 'name': 'Ema',
+ 'surname': 'Nymton',
+ 'email': 'ema.nymton@example.org'
+ }
+ }
+
+ url = urlparse.urljoin(BACKEND_URL, '/lab')
+ response = requests.post(url, data=json.dumps(payload), headers=headers)
+
+ print response.content
+
+ if __name__ == '__main__':
+ main()
+
+
+Sending a boot report
+---------------------
+
+::
+
+ #!/usr/bin/env python
+
+ try:
+ import simplejson as json
+ except ImportError:
+ import json
+
+ import requests
+ import urlparse
+
+ AUTHORIZATION_TOKEN = 'foo'
+ BACKEND_URL = 'http://api.armcloud.us'
+
+
+ def main():
+ headers = {
+ 'Authorization': AUTHORIZATION_TOKEN,
+ 'Content-Type': 'application/json'
+ }
+
+ payload = {
+ 'version': '1.0',
+ 'lab_name': 'lab-name-00',
+ 'kernel': 'next-20141118',
+ 'job': 'next',
+ 'defconfig': 'arm-omap2plus_defconfig',
+ 'board': 'omap4-panda',
+ 'boot_result': 'PASS',
+ 'boot_time': 10.4,
+ 'boot_warnings': 1,
+ 'endian': 'little',
+ 'git_branch': 'local/master',
+ 'git_commit': 'fad15b648058ee5ea4b352888afa9030e0092f1b',
+ 'git_describe': 'next-20141118'
+ }
+
+ url = urlparse.urljoin(BACKEND_URL, '/boot')
+ response = requests.post(url, data=json.dumps(payload), headers=headers)
+
+ print response.content
+
+ if __name__ == '__main__':
+ main()
diff --git a/doc/schema-boot.rst b/doc/schema-boot.rst
index e7e103f..7d0ff81 100644
--- a/doc/schema-boot.rst
+++ b/doc/schema-boot.rst
@@ -3,11 +3,16 @@
boot
----
-A boot ID is composed from the name of the board, the job, kernel and
-defconfig: ``board``-``job``-``kernel``-``defconfig``.
+.. _schema_boot_get:
-The value of ``defconfig``, in this case, is the directory name containing the
-defconfig.
+GET
+***
+
+A boot ``name`` is composed from the name of the board, job, kernel, defconfig
+and architecture values: ``board``-``job``-``kernel``-``defconfig``-``arch``.
+
+Boot report ``name``-s are not unique. To uniquely identify a boot report it is
+necessary to use its ``_id`` value.
::
@@ -16,9 +21,13 @@ defconfig.
"description": "A boot report object",
"type": "object",
"properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of this boot report (internally created)"
+ },
"_id": {
"type": "string",
- "description": "The ID associated with the object"
+ "description": "The ID associated with the object as provided by mongodb"
},
"created_on": {
"type": "object",
@@ -26,7 +35,8 @@ defconfig.
"properties": {
"$date": {
"type": "number",
- "description": "Milliseconds from epoch time"
+ "description": "Milliseconds from epoch time",
+ "format": "utc-millisec"
}
}
},
@@ -34,10 +44,24 @@ defconfig.
"type": "string",
"description": "The name of the board"
},
+ "board_instance": {
+ "type": "string",
+ "description": "The instance identifier for the board"
+ },
"job": {
"type": "string",
"description": "The job associated with this object"
},
+ "job_id": {
+ "type": "object",
+ "description": "The ID of the associated job",
+ "properties": {
+ "$oid": {
+ "type": "string",
+ "description": "The actual ID value"
+ }
+ }
+ },
"kernel": {
"type": "string",
"description": "The kernel associated with this object"
@@ -46,28 +70,62 @@ defconfig.
"type": "string",
"description": "The name of the defconfig as reported by the CI loop"
},
+ "defconfig_full": {
+ "type": "string",
+ "description": "The full name of the defconfig, can contain also config fragments information",
+ "default": "The defconfig value"
+ },
+ "defconfig_id": {
+ "type": "object",
+ "description": "The ID of the associated build report",
+ "properties": {
+ "$oid": {
+ "type": "string",
+ "description": "The actual ID value"
+ }
+ }
+ },
+ "arch" : {
+ "type": "string",
+ "description": "The architecture type of this board",
+ "enum": ["arm", "arm64", "x86"],
+ "default": "arm"
+ },
+ "git_branch": {
+ "type": "string",
+ "description": "The branch used for boot testing"
+ },
+ "git_commit": {
+ "type": "string",
+ "description": "The git SHA of the commit used for boot testing"
+ },
+ "git_describe": {
+ "type": "string",
+ "description": "The name of the git describe command"
+ },
+ "lab_name": {
+ "type": "string",
+ "description": "The name of the lab that is doing the boot tests"
+ },
"time": {
"type": "object",
- "description": "Time take to boot the board as milliseconds from epoch",
+ "description": "Time taken to boot the board",
"properties": {
"$date": {
"type": "number",
- "description": "Milliseconds from epoch time"
+ "description": "Milliseconds from epoch time",
+ "format": "utc-millisec"
}
}
},
"status": {
"type": "string",
"description": "The status of the boot report",
- "items": {
- "FAIL",
- "PASS",
- "OFFLINE"
- }
+ "enum": ["FAIL", "OFFLINE", "PASS", "UNTRIED"]
},
"warnings": {
"type": "number",
- "description": "Numbere of warnings in the boot phase"
+ "description": "Number of warnings in the boot phase"
},
"boot_log": {
"type": "string",
@@ -81,6 +139,10 @@ defconfig.
"type": "string",
"description": "Initrd address used"
},
+ "initrd": {
+ "type": "string",
+ "description": "Initrd path"
+ },
"load_addr": {
"type": "string",
"description": "Load address used"
@@ -105,6 +167,27 @@ defconfig.
"type": "boolean",
"description": "If it was a fastboot"
},
+ "boot_result_description": {
+ "type": "string",
+ "description": "The description of the boot result, useful to provide a cause of a failure"
+ },
+ "retries": {
+ "type": "integer",
+ "description": "The number of boot retries that have been performed",
+ "default": 0
+ },
+ "version": {
+ "type": "string",
+ "description": "The version of this JSON schema: depends on the POST request"
+ },
+ "file_server_url": {
+ "type": "string",
+ "description": "The URL where boot log files, or other related files, are stored"
+ },
+ "file_server_resource": {
+ "type": "string",
+ "description": "The server path where the boot related files are stored"
+ },
"metadata": {
"type": "object",
"description": "A free form object that can contain different properties"
@@ -112,6 +195,164 @@ defconfig.
}
}
+.. _schema_boot_post:
+
+POST
+****
+
+The following schema defines the valid fields that a boot report document should
+have when sent to the server.
+
+::
+
+ {
+ "title": "boot",
+ "description": "A boot POST request object",
+ "type": "object",
+ "properties": {
+ "version": {
+ "type": "string",
+ "description": "The version number of this JSON schema",
+ "enum": ["1.0"]
+ },
+ "lab_name": {
+ "type": "string",
+ "description": "The name of the lab that is doing the boot tests"
+ },
+ "job": {
+ "type": "string",
+ "description": "The job associated with this boot report"
+ },
+ "kernel": {
+ "type": "string",
+ "description": "The kernel associated with this boot report"
+ },
+ "defconfig": {
+ "type": "string",
+ "description": "The name of the defconfig as reported by the CI loop"
+ },
+ "defconfig_full": {
+ "type": "string",
+ "description": "The full name of the defconfig with config fragments information",
+ "default": "The defconfig value"
+ },
+ "board": {
+ "type": "string",
+ "description": "The name of the board: it must be a valid and recognized name"
+ },
+ "board_instance": {
+ "type": "string",
+ "description": "The instance identifier for the board"
+ },
+ "arch" : {
+ "type": "string",
+ "description": "The architecture type of this board",
+ "enum": ["arm", "arm64", "x86"],
+ "default": "arm"
+ },
+ "git_branch": {
+ "type": "string",
+ "description": "The branch used for boot testing"
+ },
+ "git_commit": {
+ "type": "string",
+ "description": "The git SHA of the commit used for boot testing"
+ },
+ "git_describe": {
+ "type": "string",
+ "description": "The name of the git describe command"
+ },
+ "boot_retries": {
+ "type": "integer",
+ "description": "The number of boot retries that have been performed",
+ "default": 0
+ },
+ "boot_result": {
+ "type": "string",
+ "description": "The final status of the boot test",
+ "enum": ["FAIL", "OFFLINE", "PASS", "UNTRIED"]
+ },
+ "boot_result_description": {
+ "type": "string",
+ "description": "The description of the boot result, useful to provide a cause of a failure"
+ },
+ "boot_log": {
+ "type": "string",
+ "description": "The name of the boot log file in txt format"
+ },
+ "boot_log_html": {
+ "type": "string",
+ "description": "The name of the boot log file in html format"
+ },
+ "boot_time": {
+ "type": "number",
+ "description": "The number of seconds it took to boot the board: iternally it will be converted into milliseconds from the epoch time"
+ },
+ "boot_warnings": {
+ "type": "integer",
+ "description": "The number of warnings detected during the boot",
+ "default": 0
+ },
+ "dtb": {
+ "type": "string",
+ "description": "The DTB file or directory used"
+ },
+ "dtb_addr": {
+ "type": "string",
+ "description": "The DTB address used"
+ },
+ "dtb_append": {
+ "type": "boolean",
+ "default": "false"
+ },
+ "endian": {
+ "type": "string",
+ "description": "Endianness of the board"
+ },
+ "fastboot": {
+ "type": "boolean",
+ "description": "If it was a fastboot",
+ "default": "false"
+ },
+ "initrd_addr": {
+ "type": "string",
+ "description": "Initrd address used"
+ },
+ "initrd": {
+ "type": "string",
+ "description": "Initrd path"
+ },
+ "kernel_image": {
+ "type": "string",
+ "description": "The kernel image used to boot"
+ },
+ "loadaddr": {
+ "type": "string",
+ "description": "Load address used"
+ },
+ "email": {
+ "type": "string",
+ "description": "Optional email address to be notified if the boot report import fails"
+ },
+ "file_server_url": {
+ "type": "string",
+ "description": "The URL where boot log files, or other related files, are stored"
+ },
+ "file_server_resource": {
+ "type": "string",
+ "description": "The server path where the boot related files are stored"
+ }
+ },
+ "required": ["version", "lab_name", "job", "kernel", "defconfig", "board", "arch"]
+ }
+
+Notes
++++++
+
+* ``defconfig_full``: This field should be used to specify the full defconfig name if config fragments have been used. It should not contain the architecture (``arch``) value. If not defined, the ``defconfig`` value will be used. Its value should conform to: ``defconfig[+fragment[+fragment ... ]]``.
+
+* ``file_server_url``, ``file_server_resource``: These field should be used to provide the base URL and the actual path where boot related files (i.e. boot logs) are stored. ``file_server_url`` defines the base path, like ``http://storage.armcloud.us/``, ``file_server_resource`` defines the path on the server, like ``kernel-ci/next/``. When both resources are available, they should be joined together with the file names to form the actual URL. Implementation and default values are left to the user or the visualization tool using the data.
+
More Info
*********
diff --git a/doc/schema-defconfig.rst b/doc/schema-defconfig.rst
index 727d64e..af6c8ed 100644
--- a/doc/schema-defconfig.rst
+++ b/doc/schema-defconfig.rst
@@ -3,11 +3,15 @@
defconfig
---------
-A defconfig ID is composed of a job ID and the defconfig name as follows:
-``job``-``kernel``-``name``.
+A defconfig ``name`` is composed of the job, kernel and defconfig values:
+``job``-``kernel``-``defconfig_full``.
-At a lower level a defconfig is the directory resulting from a kernel build
-using a defconfig.
+.. _schema_defconfig_get:
+
+GET
+***
+
+The following schema covers the data that is available with a GET request.
::
@@ -16,6 +20,15 @@ using a defconfig.
"description": "A defconfig as built by the CI loop",
"type": "object",
"properties": {
+ "version": {
+ "type": "string",
+ "description": "The version number of this JSON schema",
+ "enum": ["1.0"]
+ },
+ "name": {
+ "type": "string",
+ "description": "The name of this object (internally created)"
+ },
"_id": {
"type": "string",
"description": "The ID associated with the object"
@@ -34,6 +47,16 @@ using a defconfig.
"type": "string",
"description": "The job associated with this object"
},
+ "job_id": {
+ "type": "object",
+ "description": "The ID of the associated job",
+ "properties": {
+ "$oid": {
+ "type": "string",
+ "description": "The actual ID value"
+ }
+ }
+ },
"kernel": {
"type": "string",
"description": "The kernel associated with this object"
@@ -42,6 +65,11 @@ using a defconfig.
"type": "string",
"description": "The name of the defconfig as reported by the CI loop"
},
+ "defconfig_full": {
+ "type": "string",
+ "description": "The full name of the defconfig, can contain also config fragments information",
+ "default": "The defconfig value"
+ },
"dirname": {
"type": "string",
"description": "The name of the directory of the defconfig built; it can be different from the actual defconfig name"
@@ -49,24 +77,89 @@ using a defconfig.
"status": {
"type": "string",
"description": "The status of the defconfig",
- "items": {
- "FAIL",
- "PASS",
- "UNKNOWN"
- }
+ "enum": ["FAIL", "PASS", "UNKNOWN"]
},
"errors": {
- "type": "number",
- "description": "Number of errors reported"
+ "type": "integer",
+ "description": "Number of errors reported",
+ "default": 0
},
"warnings": {
- "type": "number",
- "description": "Number of warnings reported"
+ "type": "integer",
+ "description": "Number of warnings reported",
+ "default": 0
},
"arch": {
"type": "string",
"description": "The architecture of the defconfig built"
},
+ "build_time": {
+ "type": "number",
+ "description": "The time taken to build this defconfig"
+ },
+ "git_url": {
+ "type": "string",
+ "description": "The URL of the git web interface where the code used to build can be found"
+ },
+ "git_commit": {
+ "type": "string",
+ "description": "The git SHA of the commit used for the build"
+ },
+ "git_branch": {
+ "type": "string",
+ "description": "The name of the branch"
+ },
+ "git_describe": {
+ "type": "string",
+ "description": "The name of the git describe command"
+ },
+ "build_platform": {
+ "type": "array",
+ "description": "An array with info about the build platform"
+ },
+ "modules_dir": {
+ "type": "string",
+ "description": "Name of the modules directory"
+ },
+ "modules": {
+ "type": "string",
+ "description": "Name of the modules file"
+ },
+ "dtb_dir": {
+ "type": "string",
+ "description": "Name of the dtb directory"
+ },
+ "build_log": {
+ "type": "string",
+ "description": "Name of the build log file in txt format"
+ },
+ "text_offset": {
+ "type": "string"
+ },
+ "system_map": {
+ "type": "string",
+ "description": "Name of the system map file"
+ },
+ "kernel_config": {
+ "type": "string",
+ "description": "Name of the kernel config file used"
+ },
+ "kernel_image": {
+ "type": "string",
+ "description": "Name of the kernel image created"
+ },
+ "kconfig_fragments": {
+ "type": "string",
+ "description": "The config fragment used"
+ },
+ "file_server_url": {
+ "type": "string",
+ "description": "The URL where boot log files, or other related files, are stored"
+ },
+ "file_server_resource": {
+ "type": "string",
+ "description": "The server path where the boot related files are stored"
+ },
"metadata": {
"type": "object",
"description": "A free form object that can contain different properties"
@@ -74,6 +167,157 @@ using a defconfig.
}
}
+.. _schema_defconfig_post:
+
+POST
+****
+
+The following schema covers the data that should be available in a build JSON
+data file sent to the server.
+
+The ``defconfig`` collection does not support POST requests. This schema is
+placed here as a reference document in order to provide correct data to the
+server.
+
+::
+
+ {
+ "title": "defconfig",
+ "description": "A defconfig as built by the CI loop",
+ "type": "object",
+ "properties": {
+ "version": {
+ "type": "string",
+ "description": "The version number of this JSON schema",
+ "enum": ["1.0"]
+ },
+ "job": {
+ "type": "string",
+ "description": "The job associated with this object"
+ },
+ "kernel": {
+ "type": "string",
+ "description": "The kernel associated with this object"
+ },
+ "defconfig": {
+ "type": "string",
+ "description": "The name of the defconfig as reported by the CI loop"
+ },
+ "defconfig_full": {
+ "type": "string",
+ "description": "The full name of the defconfig with config fragments information",
+ "default": "The defconfig value"
+ },
+ "build_result": {
+ "type": "string",
+ "description": "The status of the defconfig",
+ "enum": ["FAIL", "PASS", "UNKNOWN"]
+ },
+ "build_errors": {
+ "type": "integer",
+ "description": "Number of errors reported",
+ "default": 0
+ },
+ "build_warnings": {
+ "type": "integer",
+ "description": "Number of warnings reported",
+ "default": 0
+ },
+ "arch": {
+ "type": "string",
+ "description": "The architecture of the defconfig built"
+ },
+ "build_time": {
+ "type": "number",
+ "description": "The time taken to build this defconfig",
+ "default": 0
+ },
+ "git_url": {
+ "type": "string",
+ "description": "The URL of the git web interface where the code used to build can be found"
+ },
+ "git_commit": {
+ "type": "string",
+ "description": "The git SHA of the commit used for the build"
+ },
+ "git_branch": {
+ "type": "string",
+ "description": "The name of the branch"
+ },
+ "git_describe": {
+ "type": "string",
+ "description": "The name of the git describe command"
+ },
+ "build_log": {
+ "type": "string",
+ "description": "Name of the build log file in txt format"
+ },
+ "build_platform": {
+ "type": "array",
+ "description": "An array with info about the build platform"
+ },
+ "dtb_dir": {
+ "type": "string",
+ "description": "Name of the dtb directory"
+ },
+ "compiler_version": {
+ "type": "string",
+ "description": "Description string of the compiler used"
+ },
+ "kconfig_fragments": {
+ "type": "string",
+ "description": "The config fragment used"
+ },
+ "kernel_config": {
+ "type": "string",
+ "description": "Name of the kernel config file used"
+ },
+ "kernel_image": {
+ "type": "string",
+ "description": "Name of the kernel image created"
+ },
+ "cross_compile": {
+ "type": "string",
+ "description": "The cross compiler used"
+ },
+ "modules": {
+ "type": "string",
+ "description": "Name of the modules file"
+ },
+ "modules_dir": {
+ "type": "string",
+ "description": "Name of the modules directory"
+ },
+ "system_map": {
+ "type": "string",
+ "description": "Name of the system map file"
+ },
+ "text_offset": {
+ "type": "string"
+ },
+ "kconfig_fragments": {
+ "type": "string",
+ "description": "The config fragment used"
+ },
+ "file_server_url": {
+ "type": "string",
+ "description": "The URL where boot log files, or other related files, are stored"
+ },
+ "file_server_resource": {
+ "type": "string",
+ "description": "The server path where the boot related files are stored"
+ }
+ },
+ "required": ["version", "job", "kernel", "defconfig"]
+ }
+
+Notes
++++++
+
+* ``defconfig_full``: This field should be used to specify the full defconfig name if config fragments have been used. It should not contain the architecture (``arch``) value. If not defined, the ``defconfig`` value will be used. Its value should conform to: ``defconfig[+fragment[+fragment ... ]]``.
+
+* ``file_server_url``, ``file_server_resource``: These field should be used to provide the base URL and the actual path where boot related files (i.e. boot logs) are stored. ``file_server_url`` defines the base path, like ``http://storage.armcloud.us/``, ``file_server_resource`` defines the path on the server, like ``kernel-ci/next/``. When both resources are available, they should be joined together with the file names to form the actual URL. Implementation and default values are left to the user or the visualization tool using the data.
+
More Info
*********
diff --git a/doc/schema-job.rst b/doc/schema-job.rst
index 70425c8..d1059f2 100644
--- a/doc/schema-job.rst
+++ b/doc/schema-job.rst
@@ -3,8 +3,7 @@
job
---
-A job is composed of an actual job name and a kernel name. The ID is formed
-by concatenating these two values: ``job``-``kernel``.
+A job ``name`` composed of an actual job name and a kernel name: ``job``-``kernel``.
At a lower level, a job is the top level directory of the results from a CI
build.
@@ -16,10 +15,19 @@ build.
"description": "A job as provided by the CI loop",
"type": "object",
"properties": {
+ "version": {
+ "type": "string",
+ "description": "The version number of this JSON schema",
+ "enum": ["1.0"]
+ },
"_id": {
"type": "string",
"description": "The ID associated with this object"
},
+ "name": {
+ "type": "string",
+ "description": "The name of the object"
+ },
"created_on": {
"type": "object",
"description": "Creation date of the object",
@@ -32,22 +40,13 @@ build.
},
"private": {
"type": "boolean",
- "description": "If the job is private or not, default false"
+ "description": "If the job is private or not",
+ "default": false
},
"kernel": {
"type": "string",
"description": "The name of the kernel"
},
- "updated": {
- "type": "object",
- "description": "Date the job was updated",
- "properties": {
- "$date": {
- "type": "number",
- "description": "Milliseconds from epoch time"
- }
- }
- },
"job": {
"type": "string",
"description": "The name of the job"
@@ -55,34 +54,7 @@ build.
"status": {
"type": "string",
"description": "The status of the job",
- "items": {
- "BUILD",
- "FAIL",
- "PASS",
- "UNKNOWN"
- }
- },
- "metadata": {
- "type": "object",
- "description": "A free form object that can contain different properties",
- "properties": {
- "git_branch": {
- "type": "string",
- "description": "The kernel branch name"
- },
- "git_commit": {
- "type": "string",
- "description": "The commit SHA"
- },
- "git_url": {
- "type": "string",
- "description": "URL of the git repository"
- },
- "git_describe": {
- "type": "string",
- "description": "Name of the repository"
- }
- }
+ "enum": ["BUILD", "FAIL", "PASS", "UNKNOWN"]
}
}
}
diff --git a/doc/schema-lab.rst b/doc/schema-lab.rst
new file mode 100644
index 0000000..b6c8c89
--- /dev/null
+++ b/doc/schema-lab.rst
@@ -0,0 +1,246 @@
+.. _schema_lab:
+
+lab
+---
+
+The ``name`` of the lab mut be a unique value among all the registered labs. Use
+a short but descriptive name to identify the lab, since this value will be used
+to perform POST request :ref:`on the boot collection <collection_boot_post>`.
+
+As a rule of thumbs for creating a lab ``name``:
+
+* Start the lab name with ``lab-``.
+
+* Use some of the contact information as the next element (the ``name``, or ``affiliation``).
+
+* Add a progressive number at the end (``-00``, ``-01``, etc...).
+
+.. _schema_lab_get:
+
+GET
+***
+
+::
+
+ {
+ "title": "lab",
+ "description": "A lab object",
+ "type": "object",
+ "properties": {
+ "version": {
+ "type": "string",
+ "description": "The version of this JSON schema: depends on the POST request"
+ },
+ "name": {
+ "type": "string",
+ "description": "The name associated with the object"
+ },
+ "_id": {
+ "type": "string",
+ "description": "The ID associated with the object as provided by mongodb"
+ },
+ "created_on": {
+ "type": "object",
+ "description": "Creation date of the object",
+ "properties": {
+ "$date": {
+ "type": "number",
+ "description": "Milliseconds from epoch time",
+ "format": "utc-millisec"
+ }
+ }
+ },
+ "updated_on": {
+ "type": "object",
+ "description": "Update date of the object",
+ "properties": {
+ "$date": {
+ "type": "number",
+ "description": "Milliseconds from epoch time",
+ "format": "utc-millisec"
+ }
+ }
+ },
+ "contact": {
+ "type": "object",
+ "description": "The contact details of the object",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of the contact"
+ },
+ "surname": {
+ "type": "string",
+ "description": "The surname of the contact"
+ },
+ "email": {
+ "type": "string",
+ "description": "The email of the contact"
+ },
+ "telephone": {
+ "type": "string",
+ "description": "The landline phone number"
+ },
+ "mobile": {
+ "type": "string",
+ "description": "The mobile phone number"
+ },
+ "affiliation": {
+ "type": "string",
+ "description": "The name of the company, or association this contact is part of"
+ }
+ }
+ },
+ "address": {
+ "type": "object",
+ "description": "The address where the lab is located",
+ "properties": {
+ "street_1": {
+ "type": "string",
+ "description": "First line for the address"
+ },
+ "street_2": {
+ "type": "string",
+ "description": "Second line for the address"
+ },
+ "city": {
+ "type": "string",
+ "description": "The city name"
+ },
+ "country": {
+ "type": "string",
+ "description": "The country name"
+ },
+ "zipcode": {
+ "type": "string",
+ "description": "The zip code"
+ },
+ "longitude": {
+ "type": "number",
+ "description": "Latitude of the lab location"
+ },
+ "longitude": {
+ "type": "number",
+ "description": "Longitude of the lab location"
+ }
+ }
+ },
+ "private": {
+ "type": "boolean",
+ "description": "If the lab is private or not",
+ "default": "false"
+ },
+ "token": {
+ "type": "string",
+ "description": "The ID of the token associated with this lab"
+ }
+ }
+ }
+
+.. _schema_lab_post:
+
+POST
+****
+
+
+::
+
+ {
+ "title": "lab",
+ "description": "A lab object",
+ "type": "object",
+ "properties": {
+ "version": {
+ "type": "string",
+ "description": "The version number of this JSON schema",
+ "enum": ["1.0"]
+ },
+ "name": {
+ "type": "string",
+ "description": "The name associated with the object"
+ },
+ "contact": {
+ "type": "object",
+ "description": "The contact details of the object",
+ "properties": {
+ "name": {
+ "type": "string",
+ "description": "The name of the contact"
+ },
+ "surname": {
+ "type": "string",
+ "description": "The surname of the contact"
+ },
+ "email": {
+ "type": "string",
+ "description": "The email of the contact"
+ },
+ "telephone": {
+ "type": "string",
+ "description": "The landline phone number"
+ },
+ "mobile": {
+ "type": "string",
+ "description": "The mobile phone number"
+ },
+ "affiliation": {
+ "type": "string",
+ "description": "The name of the company, or association this contact is part of"
+ },
+ "required": ["name", "surname", "email"]
+ }
+ },
+ "address": {
+ "type": "object",
+ "description": "The address where the lab is located",
+ "properties": {
+ "street_1": {
+ "type": "string",
+ "description": "First line for the address"
+ },
+ "street_2": {
+ "type": "string",
+ "description": "Second line for the address"
+ },
+ "city": {
+ "type": "string",
+ "description": "The city name"
+ },
+ "country": {
+ "type": "string",
+ "description": "The country name"
+ },
+ "zipcode": {
+ "type": "string",
+ "description": "The zip code"
+ },
+ "longitude": {
+ "type": "number",
+ "description": "Latitude of the lab location"
+ },
+ "longitude": {
+ "type": "number",
+ "description": "Longitude of the lab location"
+ }
+ }
+ },
+ "private": {
+ "type": "boolean",
+ "description": "If the lab is private or not",
+ "default": "false"
+ },
+ "token": {
+ "type": "string",
+ "description": "The token to associated with this lab"
+ }
+ },
+ "required": ["version", "name", "contact"]
+ }
+
+More Info
+*********
+
+* :ref:`Lab collection <collection_lab>`
+* :ref:`Defconfig schema <schema_defconfig>`
+* :ref:`API results <intro_schema_results>`
+* :ref:`Schema time and date <intro_schema_time_date>`
diff --git a/doc/schema-token.rst b/doc/schema-token.rst
index b009cfc..3fb2f56 100644
--- a/doc/schema-token.rst
+++ b/doc/schema-token.rst
@@ -12,10 +12,19 @@ A token object as stored in the database.
"description": "A token used to interact with the API",
"type": "object",
"properties": {
+ "version": {
+ "type": "string",
+ "description": "The version number of this JSON schema",
+ "enum": ["1.0"]
+ },
"_id": {
"type": "string",
"description": "The ID associated with this object"
},
+ "name": {
+ "type": "string",
+ "description": "The name associated with this token"
+ },
"created_on": {
"type": "object",
"description": "Creation date of the object",
diff --git a/doc/schema.rst b/doc/schema.rst
index 34f1c19..e778af7 100644
--- a/doc/schema.rst
+++ b/doc/schema.rst
@@ -11,3 +11,4 @@ Schema
schema-boot
schema-token
schema-batch
+ schema-lab
diff --git a/requirements.txt b/requirements.txt
index ac9bfa8..2b8cbac 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,12 +1,13 @@
-futures==2.1.6
-pycares==0.5.0
-tornado==3.2.2
-pymongo
-mongomock==1.2.0
-mock>0.7.2
+Sphinx
celery[redis]==3.1.16
+futures==2.2.0
+mock>0.7.2
+mongomock==1.2.0
+netaddr==0.7.12
+pycares==0.6.3
+pymongo
redis==2.10.3
-netaddr==0.7.2
-Sphinx
-sphinxcontrib-httpdomain
+simplejson>=3.6.1
sphinx-bootstrap-theme
+sphinxcontrib-httpdomain
+tornado==3.2.2