Signed-off-by: Cleber Rosa <[email protected]>
---
documentation/Makefile | 153 ++++++++++
documentation/source/conf.py | 50 ++++
documentation/source/frontend/afe_models.rst | 139 +++++++++
documentation/source/frontend/index.rst | 21 ++
documentation/source/frontend/model_logic.rst | 14 +
documentation/source/frontend/models.rst | 22 ++
documentation/source/frontend/tko_models.rst | 49 +++
documentation/source/index.rst | 23 ++
frontend/afe/model_logic.py | 24 +-
frontend/afe/models.py | 416 +++++++++++++++++++-------
frontend/afe/rpc_utils.py | 18 ++
frontend/tko/models.py | 43 +++
12 files changed, 857 insertions(+), 115 deletions(-)
create mode 100644 documentation/Makefile
create mode 100644 documentation/build/.empty
create mode 100644 documentation/source/_static/.empty
create mode 100644 documentation/source/_templates/.empty
create mode 100644 documentation/source/conf.py
create mode 100644 documentation/source/frontend/afe_models.rst
create mode 100644 documentation/source/frontend/index.rst
create mode 100644 documentation/source/frontend/model_logic.rst
create mode 100644 documentation/source/frontend/models.rst
create mode 100644 documentation/source/frontend/tko_models.rst
create mode 100644 documentation/source/index.rst
diff --git a/documentation/Makefile b/documentation/Makefile
new file mode 100644
index 0000000..2b1d166
--- /dev/null
+++ b/documentation/Makefile
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS)
source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp
epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or
PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated
items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation
(if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the"
\
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/autotest.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/autotest.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/autotest"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/autotest"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/documentation/build/.empty b/documentation/build/.empty
new file mode 100644
index 0000000..e69de29
diff --git a/documentation/source/_static/.empty
b/documentation/source/_static/.empty
new file mode 100644
index 0000000..e69de29
diff --git a/documentation/source/_templates/.empty
b/documentation/source/_templates/.empty
new file mode 100644
index 0000000..e69de29
diff --git a/documentation/source/conf.py b/documentation/source/conf.py
new file mode 100644
index 0000000..23611e8
--- /dev/null
+++ b/documentation/source/conf.py
@@ -0,0 +1,50 @@
+# -*- coding: utf-8 -*-
+
+import sys, os
+
+from autotest.client.shared.version import get_version
+from autotest.frontend import setup_django_environment
+
+extensions = ['sphinx.ext.autodoc',
+ 'sphinx.ext.doctest',
+ 'sphinx.ext.intersphinx',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.ifconfig',
+ 'sphinx.ext.viewcode']
+
+
+master_doc = 'index'
+project = u'autotest'
+copyright = u'2013, Autotest Team'
+
+v_parts = get_version().split('.')
+version = "%s.%s" % (v_parts[0], v_parts[1])
+release = '%s.%s.%s' % (v_parts[0], v_parts[1], v_parts[2])
+
+pygments_style = 'sphinx'
+
+latex_documents = [
+ ('index', 'autotest.tex', u'autotest Documentation',
+ u'Autotest Team', 'manual'),
+]
+
+man_pages = [
+ ('index', 'autotest', u'autotest Documentation',
+ [u'Autotest Team'], 1)
+]
+
+
+texinfo_documents = [
+ ('index', 'autotest', u'autotest Documentation',
+ u'Autotest Team', 'autotest', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+epub_title = u'autotest'
+epub_author = u'Autotest Team'
+epub_publisher = u'Autotest Team'
+epub_copyright = u'2013, Autotest Team'
+
+# Example configuration for intersphinx: refer to the Python standard library.
+intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/documentation/source/frontend/afe_models.rst
b/documentation/source/frontend/afe_models.rst
new file mode 100644
index 0000000..978db1d
--- /dev/null
+++ b/documentation/source/frontend/afe_models.rst
@@ -0,0 +1,139 @@
+============
+ AFE Models
+============
+
+.. module:: autotest.frontend.afe.models
+
+AFE stands for Autotest Front End. It's an application that provides access
+to the core of Autotest definitions, such as Hosts, Tests, Jobs, etc.
+
+For the classes that inherit from :class:`django.db.models.Model` some of the
+attributes documented here are instances from one of the many
+:mod:`django.db.models.fields` classes and will be mapped into a field on the
+relational database.
+
+:class:`AtomicGroup`
+====================
+
+.. autoclass:: AtomicGroup
+ :members:
+
+
+:class:`Job`
+============
+
+.. autoclass:: Job
+ :members:
+
+
+:class:`Label`
+==============
+
+.. autoclass:: Label
+ :members:
+
+
+:class:`Drone`
+==============
+
+.. autoclass:: Drone
+ :members:
+
+
+:class:`DroneSet`
+=================
+
+.. autoclass:: DroneSet
+ :members:
+
+
+:class:`User`
+=============
+
+.. autoclass:: User
+ :members:
+
+
+:class:`Host`
+=============
+
+.. autoclass:: Host
+ :members:
+
+:class:`HostAttribute`
+======================
+
+.. autoclass:: HostAttribute
+ :members:
+
+:class:`Test`
+=============
+
+.. autoclass:: Test
+ :members:
+
+
+:class:`TestParameter`
+======================
+
+.. autoclass:: TestParameter
+ :members:
+
+:class:`Profiler`
+=================
+
+.. autoclass:: Profiler
+ :members:
+
+:class:`AclGroup`
+=================
+
+.. autoclass:: AclGroup
+ :members:
+
+:class:`Kernel`
+===============
+
+.. autoclass:: Kernel
+ :members:
+
+:class:`ParameterizedJob`
+=========================
+
+.. autoclass:: ParameterizedJob
+ :members:
+
+:class:`ParameterizedJobProfiler`
+=================================
+
+.. autoclass:: ParameterizedJobProfiler
+ :members:
+
+
+:class:`ParameterizedJobProfilerParameter`
+==========================================
+
+.. autoclass:: ParameterizedJobProfilerParameter
+ :members:
+
+:class:`ParameterizedJobParameter`
+==================================
+
+.. autoclass:: ParameterizedJobParameter
+ :members:
+
+:class:`Job`
+============
+
+.. autoclass:: Job
+ :members:
+
+
+================
+ AFE Exceptions
+================
+
+Besides persistence, Models also provide some logic. And as such, some custom
+error conditions exist.
+
+.. autoexception:: AclAccessViolation
diff --git a/documentation/source/frontend/index.rst
b/documentation/source/frontend/index.rst
new file mode 100644
index 0000000..22474e1
--- /dev/null
+++ b/documentation/source/frontend/index.rst
@@ -0,0 +1,21 @@
+============
+ RPC Server
+============
+
+The Autotest RPC Server, also known as the frontend, is a Django based
+application that provides:
+
+* The Database Objects (defined by Django :mod:`Models <django.db.models>`)
+* A remoting interface using the JSON-RPC protocol
+* The :mod:`Administration Web Interface <django.contrib.admin>` that Django
+ gives us for free
+
+We'll start by taking a look at the Database the Models and the database
+structure that they generate.
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+ models
diff --git a/documentation/source/frontend/model_logic.rst
b/documentation/source/frontend/model_logic.rst
new file mode 100644
index 0000000..472a163
--- /dev/null
+++ b/documentation/source/frontend/model_logic.rst
@@ -0,0 +1,14 @@
+=============
+ Model Logic
+=============
+
+.. module:: autotest.frontend.afe.model_logic
+
+Autotest extends the base Django Database models with some custom logic.
+
+:class:`ModelWithInvalid`
+-------------------------
+
+.. autoclass:: ModelWithInvalid
+ :members:
+
diff --git a/documentation/source/frontend/models.rst
b/documentation/source/frontend/models.rst
new file mode 100644
index 0000000..8fe8118
--- /dev/null
+++ b/documentation/source/frontend/models.rst
@@ -0,0 +1,22 @@
+========
+ Models
+========
+
+The Database Models play a major role in the RPC server. The most important
+things they do:
+
+* Define and create the database structure on the Autotest Relational Database
+* Provide a object like uniform API for the Database entries
+
+.. note:: For historical reasons, the RPC server is composed of two different
+ applications, AFE and TKO. Because of that, the models are also defined in
+ two different modules.
+
+ These may soon be united into a single application, specially their model
+ definition. For now, keep in mind that the model you are looking for may
+ be in one of two different places.
+
+.. toctree::
+ model_logic
+ afe_models
+ tko_models
diff --git a/documentation/source/frontend/tko_models.rst
b/documentation/source/frontend/tko_models.rst
new file mode 100644
index 0000000..fed34d3
--- /dev/null
+++ b/documentation/source/frontend/tko_models.rst
@@ -0,0 +1,49 @@
+============
+ TKO Models
+============
+
+.. module:: autotest.frontend.tko.models
+
+TKO is the autotest application dedicated to storing and querying test results.
+
+:class:`Machine`
+================
+
+.. autoclass:: Machine
+ :members:
+
+:class:`Kernel`
+===============
+
+.. autoclass:: Kernel
+ :members:
+
+:class:`Patch`
+==============
+
+.. autoclass:: Patch
+ :members:
+
+:class:`Status`
+===============
+
+.. autoclass:: Status
+ :members:
+
+:class:`Job`
+===============
+
+.. autoclass:: Job
+ :members:
+
+:class:`JobKeyval`
+==================
+
+.. autoclass:: JobKeyval
+ :members:
+
+:class:`Test`
+=============
+
+.. autoclass:: Test
+ :members:
diff --git a/documentation/source/index.rst b/documentation/source/index.rst
new file mode 100644
index 0000000..1e20cb2
--- /dev/null
+++ b/documentation/source/index.rst
@@ -0,0 +1,23 @@
+Welcome to autotest's documentation!
+====================================
+
+Welcome! This is the Autotest documentation.
+
+Autotest is a framework for fully automated testing.
+
+It is designed primarily to test the Linux kernel, though it is useful for
many other functions such as qualifying new hardware.
+
+Contents:
+
+.. toctree::
+ :maxdepth: 2
+
+ frontend/index
+
+
+Indices and tables
+==================
+
+* :ref:`genindex`
+* :ref:`modindex`
+* :ref:`search`
diff --git a/frontend/afe/model_logic.py b/frontend/afe/model_logic.py
index 57870ae..4b2dffe 100644
--- a/frontend/afe/model_logic.py
+++ b/frontend/afe/model_logic.py
@@ -1018,11 +1018,18 @@ class ModelExtensions(object):
class ModelWithInvalid(ModelExtensions):
"""
Overrides model methods save() and delete() to support invalidation in
- place of actual deletion. Subclasses must have a boolean "invalid"
- field.
+ place of actual deletion.
+
+ Subclasses must have a boolean "invalid" field.
"""
def save(self, *args, **kwargs):
+ '''
+ Attempts to ressurect a previously added and invalidated object first
+
+ If the object being added has not been saved to the database
+ previously, then this logic is relevant and not executed.
+ '''
first_time = (self.id is None)
if first_time:
# see if this object was previously added and invalidated
@@ -1040,9 +1047,11 @@ class ModelWithInvalid(ModelExtensions):
def resurrect_object(self, old_object):
"""
- Called when self is about to be saved for the first time and is
actually
- "undeleting" a previously deleted object. Can be overridden by
- subclasses to copy data as desired from the deleted entry (but this
+ Restores a previously invalidated object
+
+ Called when self is about to be saved for the first time and is
+ actually "undeleting" a previously deleted object. Can be overridden
+ by subclasses to copy data as desired from the deleted entry (but this
superclass implementation must normally be called).
"""
self.id = old_object.id
@@ -1050,6 +1059,8 @@ class ModelWithInvalid(ModelExtensions):
def clean_object(self):
"""
+ Method supposed to be overriden by subclasses when invalidating objects
+
This method is called when an object is marked invalid.
Subclasses should override this to clean up relationships that
should no longer exist if the object were deleted.
@@ -1058,6 +1069,9 @@ class ModelWithInvalid(ModelExtensions):
def delete(self):
+ """
+ Fakes a deletion by marking an object as invalid
+ """
self.invalid = self.invalid
assert not self.invalid
self.invalid = True
diff --git a/frontend/afe/models.py b/frontend/afe/models.py
index e24685e..7fa4f6b 100644
--- a/frontend/afe/models.py
+++ b/frontend/afe/models.py
@@ -19,35 +19,44 @@ DEFAULT_REBOOT_AFTER = model_attributes.RebootBefore.ALWAYS
class AclAccessViolation(Exception):
- """\
- Raised when an operation is attempted with proper permissions as
- dictated by ACLs.
+ """
+ Raised when an operation is attempted with proper permissions as dictated
+ by ACLs.
"""
class AtomicGroup(model_logic.ModelWithInvalid, dbmodels.Model):
- """\
- An atomic group defines a collection of hosts which must only be scheduled
- all at once. Any host with a label having an atomic group will only be
- scheduled for a job at the same time as other hosts sharing that label.
+ """
+ A collection of hosts which must only be scheduled all at once.
- Required:
- name: A name for this atomic group. ex: 'rack23' or 'funky_net'
- max_number_of_machines: The maximum number of machines that will be
- scheduled at once when scheduling jobs to this atomic group.
- The job.synch_count is considered the minimum.
+ Any host with a label having an atomic group will only be scheduled for a
+ job at the same time as other hosts sharing that label.
- Optional:
- description: Arbitrary text description of this group's purpose.
+ Required Fields: :attr:`name`
+
+ Optional Fields: :attr:`description`, :attr:`max_number_of_machines`
+
+ Internal Fields: :attr:`invalid`
"""
+ #: This magic value is the default to simplify the scheduler logic.
+ #: It must be "large". The common use of atomic groups is to want all
+ #: machines in the group to be used, limits on which subset used are
+ #: often chosen via dependency labels.
+ INFINITE_MACHINES = 333333333
+
+ #: A descriptive name, such as "rack23" or "my_net"
name = dbmodels.CharField(max_length=255, unique=True)
+
+ #: Arbitrary text description of this group's purpose.
description = dbmodels.TextField(blank=True)
- # This magic value is the default to simplify the scheduler logic.
- # It must be "large". The common use of atomic groups is to want all
- # machines in the group to be used, limits on which subset used are
- # often chosen via dependency labels.
- INFINITE_MACHINES = 333333333
+
+ #: The maximum number of machines that will be scheduled at once when
+ #: scheduling jobs to this atomic group. The :attr:`Job.synch_count` is
+ #: considered the minimum. Default value is :attr:`INFINITE_MACHINES`
max_number_of_machines = dbmodels.IntegerField(default=INFINITE_MACHINES)
+
+ #: Internal field, used by
+ #: :class:`autotest.frontend.afe.model_logic.ModelWithInvalid`
invalid = dbmodels.BooleanField(default=False,
editable=frontend_settings.FULL_ADMIN)
@@ -57,13 +66,27 @@ class AtomicGroup(model_logic.ModelWithInvalid,
dbmodels.Model):
def enqueue_job(self, job, is_template=False):
- """Enqueue a job on an associated atomic group of hosts."""
+ """
+ Enqueue a job on an associated atomic group of hosts
+
+ :param job: the :class:`Job` that will be sent to this atomic group
+ :type job: :class:`Job`
+ :param is_template: wether the job is a template (True) or a regular
+ job (False). Default is a regular job (False).
+ :type is_template: boolean
+ """
queue_entry = HostQueueEntry.create(atomic_group=self, job=job,
is_template=is_template)
queue_entry.save()
def clean_object(self):
+ """
+ Clears the labels set on this atomic group.
+
+ This method is required by
+ :class:`autotest.frontend.afe.model_logic.ModelWithInvalid`
+ """
self.label_set.clear()
@@ -76,38 +99,70 @@ class AtomicGroup(model_logic.ModelWithInvalid,
dbmodels.Model):
class Label(model_logic.ModelWithInvalid, dbmodels.Model):
- """\
- Required:
- name: label name
+ """
+ Identifiers used to tag hosts, tests and jobs, etc
- Optional:
- kernel_config: URL/path to kernel config for jobs run on this label.
- platform: If True, this is a platform label (defaults to False).
- only_if_needed: If True, a Host with this label can only be used if that
- label is requested by the job/test (either as the meta_host or
- in the job_dependencies).
- atomic_group: The atomic group associated with this label.
+ Required Fields: :attr:`name`
+
+ Optional Fields: :attr:`kernel_config`, :attr:`platform`
+
+ Internal Fields: :attr:`invalid`
"""
+ #: The name of the label. This is a required field and it must be unique
name = dbmodels.CharField(max_length=255, unique=True)
+
+ #: URL/path to kernel config for jobs run on this label
kernel_config = dbmodels.CharField(max_length=255, blank=True)
+
+ #: If True, this is a platform label (defaults to False)
platform = dbmodels.BooleanField(default=False)
+
+ #: Internal field, used by
+ #: :class:`autotest.frontend.afe.model_logic.ModelWithInvalid`
invalid = dbmodels.BooleanField(default=False,
editable=frontend_settings.FULL_ADMIN)
+
+ #: If True, a Host with this label can only be used if that label is
+ #: requested by the job/test (either as the meta_host or in the
+ #: job_dependencies).
only_if_needed = dbmodels.BooleanField(default=False)
+ #: The atomic group associated with this label.
+ atomic_group = dbmodels.ForeignKey(AtomicGroup, null=True, blank=True)
+
name_field = 'name'
objects = model_logic.ModelWithInvalidManager()
valid_objects = model_logic.ValidObjectsManager()
- atomic_group = dbmodels.ForeignKey(AtomicGroup, null=True, blank=True)
def clean_object(self):
+ """
+ Clears this label from all hosts and tests it's associated with
+
+ This method is required by
+ :class:`autotest.frontend.afe.model_logic.ModelWithInvalid`
+ """
+
self.host_set.clear()
self.test_set.clear()
def enqueue_job(self, job, profile, atomic_group=None, is_template=False):
- """Enqueue a job on any host of this label."""
+ """
+ Enqueue a job on any host of this label
+
+ :param job: the :class:`Job` that will be sent to any host having this
+ label
+ :type job: :class:`Job`
+ :param profile: a value for :attr:`HostQueueEntry.profile`
+ :type profile: string
+ :param atomic_group: The named collection of hosts to be scheduled all
+ at once
+ :type atomic_group: :class:`AtomicGroup`
+ :param is_template: wether the job is a template (True) or a regular
+ job (False). Default is a regular job (False).
+ :type is_template: boolean
+ """
queue_entry = HostQueueEntry.create(meta_host=self, job=job,
profile=profile,
is_template=is_template,
@@ -126,8 +181,9 @@ class Drone(dbmodels.Model, model_logic.ModelExtensions):
"""
A scheduler drone
- hostname: the drone's hostname
+ Required Field: :attr:`hostname`
"""
+ #: the drone's hostname
hostname = dbmodels.CharField(max_length=255, unique=True)
name_field = 'hostname'
@@ -155,13 +211,12 @@ class Drone(dbmodels.Model, model_logic.ModelExtensions):
class DroneSet(dbmodels.Model, model_logic.ModelExtensions):
"""
- A set of scheduler drones
+ A set of scheduler :class:`drones <Drone>`
These will be used by the scheduler to decide what drones a job is allowed
to run on.
- name: the drone set's name
- drones: the drones that are part of the set
+ Required Fields: :attr:`name`
"""
DRONE_SETS_ENABLED = settings.get_value('SCHEDULER', 'drone_sets_enabled',
type=bool, default=False)
@@ -169,7 +224,10 @@ class DroneSet(dbmodels.Model,
model_logic.ModelExtensions):
'default_drone_set_name',
default=None)
+ #: the drone set's name
name = dbmodels.CharField(max_length=255, unique=True)
+
+ #: the :class:`drones <Drone>` that are part of the set
drones = dbmodels.ManyToManyField(Drone, db_table='afe_drone_sets_drones')
name_field = 'name'
@@ -190,25 +248,40 @@ class DroneSet(dbmodels.Model,
model_logic.ModelExtensions):
@classmethod
def drone_sets_enabled(cls):
+ '''
+ Returns wether the drone set feature is enabled on the scheduler
+
+ By means of the configuration file.
+ '''
return cls.DRONE_SETS_ENABLED
@classmethod
def default_drone_set_name(cls):
+ '''
+ Returns the default drone set name as set on the configuration file
+ '''
return cls.DEFAULT_DRONE_SET_NAME
@classmethod
def get_default(cls):
+ '''
+ Returns the default :class:`DroneSet` instance from the database
+ '''
return cls.smart_get(cls.DEFAULT_DRONE_SET_NAME)
@classmethod
def resolve_name(cls, drone_set_name):
"""
- Returns the name of one of these, if not None, in order of preference:
- 1) the drone set given,
- 2) the current user's default drone set, or
+ Returns one of three possible :class:`drone set <DroneSet>`
+
+ If this method is not passed None this order of preference will be used
+ to look for a :class:`DroneSet`
+
+ 1) the drone set given
+ 2) the current user's default drone set
3) the global default drone set
or returns None if drone sets are disabled
@@ -237,12 +310,13 @@ class DroneSet(dbmodels.Model,
model_logic.ModelExtensions):
class User(dbmodels.Model, model_logic.ModelExtensions):
- """\
- Required:
- login :user login name
+ """
+ A user account with a login name, privileges and preferences
- Optional:
- access_level: 0=User (default), 1=Admin, 100=Root
+ Required Fields: :attr:`login`
+
+ Optional Fields: :attr:`access_level`, :attr:`reboot_before`,
+ :attr:`reboot_after`, :attr:`drone_set`, :attr:`show_experimental`
"""
ACCESS_ROOT = 100
ACCESS_ADMIN = 1
@@ -250,17 +324,27 @@ class User(dbmodels.Model, model_logic.ModelExtensions):
AUTOTEST_SYSTEM = 'autotest_system'
+ #: user login name
login = dbmodels.CharField(max_length=255, unique=True)
+
+ #: a numeric privilege level that must be one of: 0=User (default),
+ #: 1=Admin, 100=Root
access_level = dbmodels.IntegerField(default=ACCESS_USER, blank=True)
- # user preferences
+ #: wheter to reboot hosts before a job by default
reboot_before = dbmodels.SmallIntegerField(
choices=model_attributes.RebootBefore.choices(), blank=True,
default=DEFAULT_REBOOT_BEFORE)
+
+ #: wheter to reboot hosts after a job by default
reboot_after = dbmodels.SmallIntegerField(
choices=model_attributes.RebootAfter.choices(), blank=True,
default=DEFAULT_REBOOT_AFTER)
+
+ #: a :class:`DroneSet` that will be used by default for this user's jobs
drone_set = dbmodels.ForeignKey(DroneSet, null=True, blank=True)
+
+ #: whether to show tests marked as experimental to this user
show_experimental = dbmodels.BooleanField(default=False)
name_field = 'login'
@@ -280,6 +364,12 @@ class User(dbmodels.Model, model_logic.ModelExtensions):
def is_superuser(self):
+ """
+ Returns whether the user is a super user
+
+ :return: True if the user is a super user, False otherwise
+ :rtype: boolean
+ """
return self.access_level >= self.ACCESS_ROOT
@@ -302,43 +392,58 @@ class User(dbmodels.Model, model_logic.ModelExtensions):
class Host(model_logic.ModelWithInvalid, dbmodels.Model,
model_logic.ModelWithAttributes):
- """\
- Required:
- hostname
-
- optional:
- locked: if true, host is locked and will not be queued
-
- Internal:
- synch_id: currently unused
- status: string describing status of host
- invalid: true if the host has been deleted
- protection: indicates what can be done to this host during repair
- locked_by: user that locked the host, or null if the host is unlocked
- lock_time: DateTime at which the host was locked
- dirty: true if the host has been used without being rebooted
+ """
+ A machine on which a :class:`job <Job>` will run
+
+ Required fields: :attr:`hostname`
+
+ Optional fields: :attr:`locked`
+
+ Internal fields: :attr:`synch_id`, :attr:`status`, :attr:`invalid`,
+ :attr:`protection`, :attr:`locked_by`, :attr:`lock_time`, :attr:`dirty`
"""
Status = enum.Enum('Verifying', 'Running', 'Ready', 'Repairing',
'Repair Failed', 'Cleaning', 'Pending',
string_values=True)
Protection = host_protections.Protection
+ #: the name of the machine, usually the FQDN or IP address
hostname = dbmodels.CharField(max_length=255, unique=True)
+
+ #: labels that are set on this host
labels = dbmodels.ManyToManyField(Label, blank=True,
db_table='afe_hosts_labels')
+
+ #: if true, host is locked and will not be queued
locked = dbmodels.BooleanField(default=False)
+
+ #: currently unused
synch_id = dbmodels.IntegerField(blank=True, null=True,
editable=frontend_settings.FULL_ADMIN)
+
+ #: string describing status of hos
status = dbmodels.CharField(max_length=255, default=Status.READY,
choices=Status.choices(),
editable=frontend_settings.FULL_ADMIN)
+
+ #: true if the host has been deleted. Internal field, used by
+ #: :class:`autotest.frontend.afe.model_logic.ModelWithInvalid`
invalid = dbmodels.BooleanField(default=False,
editable=frontend_settings.FULL_ADMIN)
+
+ #: indicates what can be done to this host during repair
protection = dbmodels.SmallIntegerField(null=False, blank=True,
choices=host_protections.choices,
default=host_protections.default)
+
+ #: :class:`user <User>` that locked this host, or null if the host is
+ #: unlocked
locked_by = dbmodels.ForeignKey(User, null=True, blank=True,
editable=False)
+
+ #: Date and time at which the host was locked
lock_time = dbmodels.DateTimeField(null=True, blank=True, editable=False)
+
+ #: true if the host has been used without being rebooted
dirty = dbmodels.BooleanField(default=True,
editable=frontend_settings.FULL_ADMIN)
@@ -354,6 +459,16 @@ class Host(model_logic.ModelWithInvalid, dbmodels.Model,
@staticmethod
def create_one_time_host(hostname):
+ """
+ Creates a host that will be available for a single job run
+
+ Internally, a :class:`host <Host>` is created with the :attr:`invalid`
+ attribute set to True. This way, it will **not** be available to have
+ jobs queued to it.
+
+ :returns: the one time, invalid, :class:`Host`
+ :rtype: :class:`Host`
+ """
query = Host.objects.filter(hostname=hostname)
if query.count() == 0:
host = Host(hostname=hostname, invalid=True)
@@ -420,7 +535,21 @@ class Host(model_logic.ModelWithInvalid, dbmodels.Model,
def enqueue_job(self, job, profile, atomic_group=None, is_template=False):
- """Enqueue a job on this host."""
+ """
+ Enqueue a job on this host
+
+ :param job: the :class:`Job` that will be sent to any host having this
+ label
+ :type job: :class:`Job`
+ :param profile: a value for :attr:`HostQueueEntry.profile`
+ :type profile: string
+ :param atomic_group: The named collection of hosts to be scheduled all
+ at once
+ :type atomic_group: :class:`AtomicGroup`
+ :param is_template: wether the job is a template (True) or a regular
+ job (False). Default is a regular job (False).
+ :type is_template: boolean
+ """
queue_entry = HostQueueEntry.create(host=self, job=job,
profile=profile,
is_template=is_template,
atomic_group=atomic_group)
@@ -484,9 +613,16 @@ class Host(model_logic.ModelWithInvalid, dbmodels.Model,
class HostAttribute(dbmodels.Model):
- """Arbitrary keyvals associated with hosts."""
+ """
+ Arbitrary keyvals associated with hosts
+
+ Required Fields: :attr:`host`, :attr:`attribute`, :attr:`value`
+ """
+ #: reference to a :class:`Host`
host = dbmodels.ForeignKey(Host)
+ #: name of the attribute to set on the :class:`Host`
attribute = dbmodels.CharField(max_length=90, blank=False)
+ #: value for the attribute
value = dbmodels.CharField(max_length=300, blank=False)
objects = model_logic.ExtendedManager()
@@ -497,49 +633,66 @@ class HostAttribute(dbmodels.Model):
class Test(dbmodels.Model, model_logic.ModelExtensions):
"""
- Required:
- author: author name
- description: description of the test
- name: test name
- time: short, medium, long
- test_class: This describes the class for your the test belongs in.
- test_category: This describes the category for your tests
- test_type: Client or Server
- path: path to pass to run_test()
- sync_count: is a number >=1 (1 being the default). If it's 1, then it's an
- async job. If it's >1 it's sync job for that number of
machines
- i.e. if sync_count = 2 it is a sync job that requires two
- machines.
- Optional:
- dependencies: What the test requires to run. Comma deliminated list
- dependency_labels: many-to-many relationship with labels corresponding to
- test dependencies.
- experimental: If this is set to True production servers will ignore the
test
- run_verify: Whether or not the scheduler should run the verify stage
+ A test that can be scheduled and run on a :class:`host <Host>`
+
+ Required Fields: :attr:`author`, :attr:`description`, :attr:`name`,
+ :attr:`test_time`, :attr:`test_class`, :attr:`test_category`
+ :attr:`test_type`, :attr:`path`
+
+ Optional Fields: :attr:`sync_count`, :attr:`dependencies`,
+ :attr:`dependency_labels`, :attr:`experimental`, :attr:`run_verify`
"""
TestTime = enum.Enum('SHORT', 'MEDIUM', 'LONG', start_value=1)
TestTypes = model_attributes.TestTypes
# TODO(showard) - this should be merged with Job.ControlType (but right
# now they use opposite values)
+ #: test name
name = dbmodels.CharField(max_length=255, unique=True)
+
+ #: author name
author = dbmodels.CharField(max_length=255, blank=False)
+
+ #: This describes the class for your the test belongs in.
test_class = dbmodels.CharField(max_length=255, blank=False)
+
+ #: This describes the category for your tests
test_category = dbmodels.CharField(max_length=255, blank=False)
+
+ #: What the test requires to run. Comma deliminated list
dependencies = dbmodels.CharField(max_length=255, blank=True)
+
+ #: description of the test
description = dbmodels.TextField(blank=True)
+
+ #: If this is set to True production servers will ignore the test
experimental = dbmodels.BooleanField(default=True)
+
+ #: Whether or not the scheduler should run the verify stage
run_verify = dbmodels.BooleanField(default=True)
+
+ #: short, medium, long
test_time = dbmodels.SmallIntegerField(choices=TestTime.choices(),
default=TestTime.MEDIUM)
+
+ #: Client or Server
test_type = dbmodels.SmallIntegerField(choices=TestTypes.choices(),
default=TestTypes.CLIENT)
+
+ #: is a number >=1 (1 being the default). If it's 1, then it's an
+ #: async job. If it's >1 it's sync job for that number of machines
+ #: i.e. if sync_count = 2 it is a sync job that requires two
+ #: machines.
sync_count = dbmodels.PositiveIntegerField(default=1)
+
+ #: path to pass to run_test()
path = dbmodels.CharField(max_length=255, unique=True, blank=False)
+ #: many-to-many relationship with labels corresponding to test dependencies
dependency_labels = (
dbmodels.ManyToManyField(Label, blank=True,
db_table='afe_autotests_dependency_labels'))
+
name_field = 'name'
objects = model_logic.ExtendedManager()
@@ -561,6 +714,8 @@ class Test(dbmodels.Model, model_logic.ModelExtensions):
class TestParameter(dbmodels.Model):
"""
A declared parameter of a test
+
+
"""
test = dbmodels.ForeignKey(Test)
name = dbmodels.CharField(max_length=255)
@@ -619,6 +774,13 @@ class AclGroup(dbmodels.Model,
model_logic.ModelExtensions):
@staticmethod
def check_for_acl_violation_hosts(hosts):
+ '''
+ Check if the user can indeed send a job to the given hosts
+
+ :param hosts: a list of :class:`hosts <Host>`
+ :type hosts: list of :class:`Host`
+ :raises: AclAccessViolation
+ '''
user = User.current_user()
if user.is_superuser():
return
@@ -639,9 +801,9 @@ class AclGroup(dbmodels.Model, model_logic.ModelExtensions):
def check_abort_permissions(queue_entries):
"""
look for queue entries that aren't abortable, meaning
- * the job isn't owned by this user, and
- * the machine isn't ACL-accessible, or
- * the machine is in the "Everyone" ACL
+ * the job isn't owned by this user, and
+ * the machine isn't ACL-accessible, or
+ * the machine is in the "Everyone" ACL
"""
user = User.current_user()
if user.is_superuser():
@@ -749,10 +911,10 @@ class Kernel(dbmodels.Model):
"""
Creates all kernels in the kernel list
- @param kernel_list A list of dictionaries that describe the kernels, in
- the same format as the 'kernel' argument to
- rpc_interface.generate_control_file
- @returns a list of the created kernels
+ :param kernel_list: A list of dictionaries that describe the kernels,
in
+ the same format as the 'kernel' argument to
+ rpc_interface.generate_control_file
+ :returns: a list of the created kernels
"""
if not kernel_list:
return None
@@ -894,27 +1056,8 @@ class JobManager(model_logic.ExtendedManager):
class Job(dbmodels.Model, model_logic.ModelExtensions):
- """\
- owner: username of job owner
- name: job name (does not have to be unique)
- priority: Low, Medium, High, Urgent (or 0-3)
- control_file: contents of control file
- control_type: Client or Server
- created_on: date of job creation
- submitted_on: date of job submission
- synch_count: how many hosts should be used per autoserv execution
- run_verify: Whether or not to run the verify phase
- timeout: hours from queuing time until job times out
- max_runtime_hrs: hours from job starting time until job times out
- email_list: list of people to email on completion delimited by any of:
- white space, ',', ':', ';'
- dependency_labels: many-to-many relationship with labels corresponding to
- job dependencies
- reboot_before: Never, If dirty, or Always
- reboot_after: Never, If all tests passed, or Always
- parse_failed_repair: if True, a failed repair launched by this job will
have
- its results parsed as part of the job.
- drone_set: The set of drones to run this job on
+ """
+ A test job scheduled throught the AFE application
"""
DEFAULT_TIMEOUT = settings.get_value('AUTOTEST_WEB', 'job_timeout_default',
default=240)
@@ -929,34 +1072,70 @@ class Job(dbmodels.Model, model_logic.ModelExtensions):
Priority = enum.Enum('Low', 'Medium', 'High', 'Urgent')
ControlType = enum.Enum('Server', 'Client', start_value=1)
+ #: username of job owner
owner = dbmodels.CharField(max_length=255)
+
+ #: job name (does not have to be unique)
name = dbmodels.CharField(max_length=255)
+
+ #: Low, Medium, High, Urgent (or 0-3)
priority = dbmodels.SmallIntegerField(choices=Priority.choices(),
blank=True, # to allow 0
default=Priority.MEDIUM)
+
+ #: contents of control file
control_file = dbmodels.TextField(null=True, blank=True)
+
+ #: Client or Server
control_type = dbmodels.SmallIntegerField(choices=ControlType.choices(),
blank=True, # to allow 0
default=ControlType.CLIENT)
+
+ #: date of job creation
created_on = dbmodels.DateTimeField()
+
+ #: how many hosts should be used per autoserv execution
synch_count = dbmodels.IntegerField(null=True, default=1)
+
+ #: hours from queuing time until job times out
timeout = dbmodels.IntegerField(default=DEFAULT_TIMEOUT)
+
+ #: Whether or not to run the verify phase
run_verify = dbmodels.BooleanField(default=True)
+
+
+ #: list of people to email on job completion. Delimited by one of:
+ #: white space, comma (``,``), colon (``:``) or semi-colon (``;``)
email_list = dbmodels.CharField(max_length=250, blank=True)
+
+ #: many-to-many relationship with labels corresponding to job dependencies
dependency_labels = (
dbmodels.ManyToManyField(Label, blank=True,
db_table='afe_jobs_dependency_labels'))
+
+ #: Never, If dirty, or Always
reboot_before = dbmodels.SmallIntegerField(
choices=model_attributes.RebootBefore.choices(), blank=True,
default=DEFAULT_REBOOT_BEFORE)
+
+ #: Never, If all tests passed, or Always
reboot_after = dbmodels.SmallIntegerField(
choices=model_attributes.RebootAfter.choices(), blank=True,
default=DEFAULT_REBOOT_AFTER)
+
+ #: if True, a failed repair launched by this job will have its results
+ #: parsed as part of the job.
parse_failed_repair = dbmodels.BooleanField(
default=DEFAULT_PARSE_FAILED_REPAIR)
+
+ #: hours from job starting time until job times out
max_runtime_hrs = dbmodels.IntegerField(default=DEFAULT_MAX_RUNTIME_HRS)
+
+ #: the set of drones to run this job on
drone_set = dbmodels.ForeignKey(DroneSet, null=True, blank=True)
+ #: a reference to a :class:`ParameterizedJob` object. Can be NULL if
+ #: job is not parameterized
parameterized_job = dbmodels.ForeignKey(ParameterizedJob, null=True,
blank=True)
@@ -983,6 +1162,12 @@ class Job(dbmodels.Model, model_logic.ModelExtensions):
First, either control_file must be set, or parameterized_job must be
set, but not both. Second, parameterized_job must be set if and only if
the parameterized_jobs option in the global config is set to True.
+
+ :param control_file: the definition of the control file
+ :type control_file: string
+ :param parameterized_job: a :class:`ParameterizedJob`
+ :type parameterized_job: :class:`ParameterizedJob`
+ :return: None
"""
if not (bool(control_file) ^ bool(parameterized_job)):
raise Exception('Job must have either control file or '
@@ -999,9 +1184,20 @@ class Job(dbmodels.Model, model_logic.ModelExtensions):
@classmethod
def create(cls, owner, options, hosts):
- """\
+ """
Creates a job by taking some information (the listed args)
and filling in the rest of the necessary information.
+
+ :param owner: the username of the job owner. set the attribute
+ :attr:`owner`
+ :type owner: string
+ :param options: a dictionary with parameters to be passed to to the
+ class method :meth:`add_object <Job.add_object>`
+ :type options: dict
+ :param hosts: a list of :class:`hosts <Host>` that will be used to
+ check if the user can indeed send a job to them (by means
+ of :meth:`AclGroup.check_for_acl_violation_hosts`
+ :type hosts: list of :class:`Host`
"""
AclGroup.check_for_acl_violation_hosts(hosts)
diff --git a/frontend/afe/rpc_utils.py b/frontend/afe/rpc_utils.py
index ba1849a..2147603 100644
--- a/frontend/afe/rpc_utils.py
+++ b/frontend/afe/rpc_utils.py
@@ -462,6 +462,24 @@ def check_for_duplicate_hosts(host_objects):
def create_new_job(owner, options, host_objects, profiles, metahost_objects,
metahost_profiles, atomic_group=None):
+ '''
+ Create a new job
+
+ :param owner:
+ :type owner
+ :param options:
+ :type options:
+ :param host_objects:
+ :type host_objects:
+ :param profiles:
+ :type profiles:
+ :param metahost_objects:
+ :type metahost_objects:
+ :param metahost_profiles:
+ :type metahost_profiles:
+ :param atomic_group:
+ :type atomic_group:
+ '''
labels_by_name = dict((label.name, label)
for label in models.Label.objects.all())
all_host_objects = host_objects + metahost_objects
diff --git a/frontend/tko/models.py b/frontend/tko/models.py
index 188a9cb..be23cf6 100644
--- a/frontend/tko/models.py
+++ b/frontend/tko/models.py
@@ -106,9 +106,19 @@ class TempManager(model_logic.ExtendedManager):
class Machine(dbmodels.Model):
+ '''
+ A machine used to run a test
+ '''
+ #: A numeric and automatic integer that uniquely identifies a given
+ #: machine. This is the primary key for the resulting table created
+ #: from this model.
machine_idx = dbmodels.AutoField(primary_key=True)
+ #: The name, such as a FQDN, of the machine that run the test. Must be
+ #: unique.
hostname = dbmodels.CharField(unique=True, max_length=255)
+ #: the machine group
machine_group = dbmodels.CharField(blank=True, max_length=240)
+ #: the machine owner
owner = dbmodels.CharField(blank=True, max_length=240)
class Meta:
@@ -116,9 +126,18 @@ class Machine(dbmodels.Model):
class Kernel(dbmodels.Model):
+ '''
+ The Linux Kernel used during a test
+ '''
+ #: A numeric and automatic integer that uniquely identifies a given
+ #: machine. This is the primary key for the resulting table created
+ #: from this model.
kernel_idx = dbmodels.AutoField(primary_key=True)
+ #: the kernel hash
kernel_hash = dbmodels.CharField(max_length=105, editable=False)
+ #: base
base = dbmodels.CharField(max_length=90)
+ #: printable
printable = dbmodels.CharField(max_length=300)
class Meta:
@@ -126,9 +145,16 @@ class Kernel(dbmodels.Model):
class Patch(dbmodels.Model):
+ '''
+ A Patch applied to a Linux Kernel source during the build process
+ '''
+ #: A reference to a :class:`Kernel`
kernel = dbmodels.ForeignKey(Kernel, db_column='kernel_idx')
+ #: A descriptive name for the patch
name = dbmodels.CharField(blank=True, max_length=240)
+ #: The URL where the patch was fetched from
url = dbmodels.CharField(blank=True, max_length=900)
+ #: hash
the_hash = dbmodels.CharField(blank=True, max_length=105, db_column='hash')
class Meta:
@@ -136,7 +162,18 @@ class Patch(dbmodels.Model):
class Status(dbmodels.Model):
+ '''
+ The possible results of a test
+
+ These objects are populated automatically from a
+ :ref:`fixture file <django:initial-data-via-fixtures>`
+ '''
+ #: A numeric and automatic integer that uniquely identifies a given
+ #: machine. This is the primary key for the resulting table created
+ #: from this model.
status_idx = dbmodels.AutoField(primary_key=True)
+ #: A short descriptive name for the status. This exact name is searched for
+ #: while the TKO parser is reading and parsing status files
word = dbmodels.CharField(max_length=30)
class Meta:
@@ -144,6 +181,9 @@ class Status(dbmodels.Model):
class Job(dbmodels.Model, model_logic.ModelExtensions):
+ """
+ A test job, having one or many tests an their results
+ """
job_idx = dbmodels.AutoField(primary_key=True)
tag = dbmodels.CharField(unique=True, max_length=100)
label = dbmodels.CharField(max_length=300)
@@ -152,6 +192,9 @@ class Job(dbmodels.Model, model_logic.ModelExtensions):
queued_time = dbmodels.DateTimeField(null=True, blank=True)
started_time = dbmodels.DateTimeField(null=True, blank=True)
finished_time = dbmodels.DateTimeField(null=True, blank=True)
+
+ #: If this job was scheduled through the AFE application, this points
+ #: to the related :class:`autotest.frontend.afe.models.Job` object
afe_job_id = dbmodels.IntegerField(null=True, default=None)
objects = model_logic.ExtendedManager()
--
1.7.11.7
_______________________________________________
Autotest-kernel mailing list
[email protected]
https://www.redhat.com/mailman/listinfo/autotest-kernel