Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 1 addition & 7 deletions docs/developer_guide.rst
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,6 @@ Optional Tests
Some tests require additional tools to be installed and are not enabled by
default. You can enable them by passing additional flags to ``lit``:

``-Dmysql=1``
Enable mysql database support testing. This requires MySQL-python to be
installed and expects the ``mysqld`` and ``mysqladmin`` binaries in your path.
Note that you do not need to setup an actual server, the tests will create
temporary instances on demand.

``-Dtidylib=1``
Check generated html pages for errors using ``tidy-html5``. This requires
``pytidylib`` and ``tidy-html5`` to be installed.
Expand All @@ -68,7 +62,7 @@ default. You can enable them by passing additional flags to ``lit``:

Example::

lit -sv -Dmysql=1 -Dtidylib=1 ./tests
lit -sv -Dtidylib=1 ./tests

Publishing a new version of LNT
-------------------------------
Expand Down
9 changes: 4 additions & 5 deletions docs/intro.rst
Original file line number Diff line number Diff line change
Expand Up @@ -86,17 +86,16 @@ for the templating engine. My hope is to eventually move to a more AJAXy web
interface.

The database layer uses SQLAlchemy for its ORM, and is typically backed by
SQLite, although I have tested on MySQL on the past, and supporting other
databases should be trivial. My plan is to always support SQLite as this allows
the possibility of developers easily running their own LNT installation for
viewing nightly test results, and to run with whatever DB makes the most sense
SQLite for local installations and PostgreSQL in production. The plan is to
always support SQLite as this allows developers to easily run their own LNT
installation locally, and to run with whatever DB makes the most sense
on the server.

Running a LNT Server Locally
----------------------------

LNT can accommodate many more users in the production config. In production:
- Postgres or MySQL should be used as the database.
- Postgres should be used as the database.
- A proper wsgi server should be used, in front of a proxy like Nginx or Apache.

To install the extra packages for the server config::
Expand Down
3 changes: 0 additions & 3 deletions lnt/server/db/migrations/upgrade_0_to_1.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,8 +183,6 @@ def get_base_for_testsuite(test_suite):

class Machine(UpdatedBase): # type: ignore[misc,valid-type]
__tablename__ = db_key_name + '_Machine'
# For case sensitive compare.
__table_args__ = {'mysql_collate': 'utf8_bin'}
id = Column("ID", Integer, primary_key=True)
name = Column("Name", String(256), index=True)

Expand Down Expand Up @@ -249,7 +247,6 @@ class Run(UpdatedBase): # type: ignore[misc,valid-type]

class Test(UpdatedBase): # type: ignore[misc,valid-type]
__tablename__ = db_key_name + '_Test'
__table_args__ = {'mysql_collate': 'utf8_bin'}
id = Column("ID", Integer, primary_key=True)
name = Column("Name", String(256), unique=True, index=True)

Expand Down
22 changes: 2 additions & 20 deletions lnt/server/db/migrations/upgrade_13_to_14.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# always the same so we have to rename from `Compile_XXXX` to `compile_XXX`.
import collections

from sqlalchemy import delete, select, update, func, and_
from sqlalchemy import delete, select, update, func

from lnt.server.db.migrations.util import introspect_table, rename_table

Expand All @@ -29,8 +29,7 @@ def _drop_suite(trans, name, engine):
drop_fields(engine, test_suite_id, 'TestSuiteOrderFields', trans)
drop_fields(engine, test_suite_id, 'TestSuiteMachineFields', trans)
drop_fields(engine, test_suite_id, 'TestSuiteRunFields', trans)

drop_samples_fields(engine, test_suite_id, trans)
drop_fields(engine, test_suite_id, 'TestSuiteSampleFields', trans)

trans.execute(delete(test_suite).where(test_suite.c.Name == name))

Expand All @@ -45,23 +44,6 @@ def drop_fields(engine, test_suite_id, name, trans):
return fields_table


def drop_samples_fields(engine, test_suite_id, trans):
"""In the TestSuiteSampleFields, drop entries related to the test_suite_id.

This extra function is needed because in MySQL it can't sort out the forign
keys in the same table.
"""
samples_table = introspect_table(engine, 'TestSuiteSampleFields')
order_files = delete(samples_table,
and_(samples_table.c.TestSuiteID == test_suite_id,
samples_table.c.status_field.isnot(None)))
trans.execute(order_files)
order_files = delete(samples_table,
samples_table.c.TestSuiteID == test_suite_id)
trans.execute(order_files)
return samples_table


TableRename = collections.namedtuple('TableRename', 'old_name new_name')


Expand Down
5 changes: 0 additions & 5 deletions lnt/server/db/testsuitedb.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ def set_fields_pop(self, data_dict):

class Machine(self.base, ParameterizedMixin):
__tablename__ = db_key_name + '_Machine'
__table_args__ = {'mysql_collate': 'utf8_bin'}
DEFAULT_BASELINE_REVISION = v4db.baseline_revision

fields = self.machine_fields
Expand Down Expand Up @@ -427,8 +426,6 @@ def __json__(self, flatten_order=True):

class Test(self.base, ParameterizedMixin):
__tablename__ = db_key_name + '_Test'
# utf8_bin for case sensitive compare
__table_args__ = {'mysql_collate': 'utf8_bin'}
id = Column("ID", Integer, primary_key=True)
name = Column("Name", String(256), unique=True, index=True)

Expand Down Expand Up @@ -748,7 +745,6 @@ def __repr__(self):
class Baseline(self.base, ParameterizedMixin):
"""Baselines to compare runs to."""
__tablename__ = db_key_name + '_Baseline'
__table_args__ = {'mysql_collate': 'utf8_bin'}

id = Column("ID", Integer, primary_key=True)
name = Column("Name", String(32), unique=True)
Expand Down Expand Up @@ -1033,7 +1029,6 @@ def _getOrCreateRun(self, session, run_data, machine, merge):
def _importSampleValues(self, session, tests_data, run, config):
# Load a map of all the tests, which we will extend when we find tests
# that need to be added.
# Downcast to str, so we match on MySQL.
test_cache = dict((test.name, test)
for test in session.query(self.Test))

Expand Down
86 changes: 0 additions & 86 deletions tests/SharedInputs/mysql_wrapper.sh

This file was deleted.

5 changes: 0 additions & 5 deletions tests/lit.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,6 @@ postgres = shutil.which('postgres')
if postgres is not None:
config.available_features.add('postgres')

# Enable MySQL testing. This requires mysqld and mysqladmin binaries in PATH.
# (You do not need to start a server, the tests will create ad-hoc instances).
if lit_config.params.get('mysql', None):
config.available_features.add('mysql')

# Enable tidylib testing. This requires pytidylib and tidy-html5.
if lit_config.params.get('tidylib', None):
config.substitutions.append(('%{tidylib}', '--use-tidylib'))
Expand Down
15 changes: 0 additions & 15 deletions tests/lnttool/MySQLDB.shtest

This file was deleted.

12 changes: 12 additions & 0 deletions tests/lnttool/import.shtest
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash

# RUN: rm -rf %t.instance
# RUN: lnt create %t.instance

# Make sure we can import a basic test set
# RUN: lnt import %t.instance %{shared_inputs}/sample-a-small.plist

# Make sure we can import test sets where the only difference is the
# case used in the test name.
# RUN: lnt import %t.instance %{shared_inputs}/sample-arm.json --format=json
# RUN: lnt import %t.instance %{shared_inputs}/sample-arm2.json --format=json
Loading