public release 4.2.0 - see README.md and CHANGES.md for details
This commit is contained in:
232
tests/database/test_common.py
Normal file
232
tests/database/test_common.py
Normal file
@@ -0,0 +1,232 @@
|
||||
"""
|
||||
@package tests.database.test_common
|
||||
unit tests for pmsco.database.common
|
||||
|
||||
the purpose of these tests is to help debugging the code.
|
||||
|
||||
to run the tests, change to the directory which contains the tests directory, and execute =nosetests=.
|
||||
|
||||
@pre nose must be installed (python-nose package on Debian).
|
||||
|
||||
@author Matthias Muntwiler, matthias.muntwiler@psi.ch
|
||||
|
||||
@copyright (c) 2016 by Paul Scherrer Institut @n
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); @n
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
"""
|
||||
|
||||
import unittest
|
||||
import datetime
|
||||
import sqlalchemy.exc
|
||||
import pmsco.database.access as db
|
||||
import pmsco.database.common as db_common
|
||||
import pmsco.database.orm as orm
|
||||
import pmsco.dispatch as dispatch
|
||||
|
||||
|
||||
def setup_sample_database(session):
|
||||
p1 = orm.Project(name="oldproject", code="oldcode")
|
||||
p2 = orm.Project(name="unittest", code="testcode")
|
||||
j1 = orm.Job(project=p1, name="oldjob", mode="oldmode", machine="oldhost", datetime=datetime.datetime.now())
|
||||
j2 = orm.Job(project=p2, name="testjob", mode="testmode", machine="testhost", datetime=datetime.datetime.now())
|
||||
pk1 = orm.Param(key='parA')
|
||||
pk2 = orm.Param(key='parB')
|
||||
pk3 = orm.Param(key='parC')
|
||||
m1 = orm.Model(job=j1, model=91)
|
||||
m2 = orm.Model(job=j2, model=92)
|
||||
r1 = orm.Result(calc_id=dispatch.CalcID(91, -1, -1, -1, -1), rfac=0.534, secs=37.9)
|
||||
r1.model = m1
|
||||
pv1 = orm.ParamValue(model=m1, param=pk1, value=1.234, delta=0.1234)
|
||||
pv2 = orm.ParamValue(model=m1, param=pk2, value=5.678, delta=-0.5678)
|
||||
pv3 = orm.ParamValue(model=m2, param=pk3, value=6.785, delta=0.6785)
|
||||
objects = {'p1': p1, 'p2': p2, 'j1': j1, 'j2': j2, 'm1': m1, 'm2': m2, 'r1': r1,
|
||||
'pv1': pv1, 'pv2': pv2, 'pv3': pv3, 'pk1': pk1, 'pk2': pk2, 'pk3': pk3}
|
||||
session.add_all(objects.values())
|
||||
session.commit()
|
||||
return objects
|
||||
|
||||
|
||||
class TestDatabaseCommon(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.db = db.DatabaseAccess()
|
||||
self.db.connect(":memory:")
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
# before any methods in this class
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
# teardown_class() after any methods in this class
|
||||
pass
|
||||
|
||||
def test_setup_database(self):
|
||||
with self.db.session() as session:
|
||||
setup_sample_database(session)
|
||||
self.assertEqual(session.query(orm.Project).count(), 2)
|
||||
self.assertEqual(session.query(orm.Job).count(), 2)
|
||||
self.assertEqual(session.query(orm.Param).count(), 3)
|
||||
self.assertEqual(session.query(orm.Model).count(), 2)
|
||||
self.assertEqual(session.query(orm.Result).count(), 1)
|
||||
self.assertEqual(session.query(orm.ParamValue).count(), 3)
|
||||
|
||||
def test_get_project(self):
|
||||
with self.db.session() as session:
|
||||
p1 = orm.Project(name="p1")
|
||||
p2 = orm.Project(name="p2")
|
||||
p3 = orm.Project(name="p3")
|
||||
p4 = orm.Project(name="p4")
|
||||
session.add_all([p1, p2, p3])
|
||||
session.commit()
|
||||
q1 = db_common.get_project(session, p1)
|
||||
q2 = db_common.get_project(session, p2.id)
|
||||
q3 = db_common.get_project(session, p3.name)
|
||||
q4 = db_common.get_project(session, p4)
|
||||
self.assertIs(q1, p1, "by object")
|
||||
self.assertIs(q2, p2, "by id")
|
||||
self.assertIs(q3, p3, "by name")
|
||||
self.assertIs(q4, p4, "detached object by object")
|
||||
with self.assertRaises(sqlalchemy.exc.InvalidRequestError, msg="detached object by name"):
|
||||
db_common.get_project(session, p4.name)
|
||||
|
||||
def test_get_job(self):
|
||||
with self.db.session() as session:
|
||||
p1 = orm.Project(name="p1")
|
||||
p2 = orm.Project(name="p2")
|
||||
p3 = orm.Project(name="p3")
|
||||
p4 = orm.Project(name="p4")
|
||||
j1 = orm.Job(name="j1")
|
||||
j1.project = p1
|
||||
j2 = orm.Job(name="j2")
|
||||
j2.project = p2
|
||||
j3 = orm.Job(name="j1")
|
||||
j3.project = p3
|
||||
j4 = orm.Job(name="j4")
|
||||
j4.project = p4
|
||||
session.add_all([p1, p2, p3, j1, j2, j3])
|
||||
session.commit()
|
||||
|
||||
self.assertIsNot(j3, j1, "jobs with same name")
|
||||
q1 = db_common.get_job(session, p1, j1)
|
||||
q2 = db_common.get_job(session, p2, j2.id)
|
||||
q3 = db_common.get_job(session, p3, j3.name)
|
||||
q4 = db_common.get_job(session, p4, j4)
|
||||
self.assertIs(q1, j1, "by object")
|
||||
self.assertIs(q2, j2, "by id")
|
||||
self.assertIs(q3, j3, "by name")
|
||||
self.assertIs(q4, j4, "detached object by object")
|
||||
with self.assertRaises(sqlalchemy.exc.InvalidRequestError, msg="detached object by name"):
|
||||
db_common.get_job(session, p4, j4.name)
|
||||
q5 = db_common.get_job(session, p1, j4)
|
||||
self.assertIs(q5, j4)
|
||||
|
||||
def test_register_project(self):
|
||||
with self.db.session() as session:
|
||||
id1 = db_common.register_project(session, "unittest1", "Atest", allow_existing=True)
|
||||
self.assertIsInstance(id1, orm.Project)
|
||||
id2 = db_common.register_project(session, "unittest2", "Btest", allow_existing=True)
|
||||
self.assertIsInstance(id2, orm.Project)
|
||||
id3 = db_common.register_project(session, "unittest1", "Ctest", allow_existing=True)
|
||||
self.assertIsInstance(id3, orm.Project)
|
||||
self.assertNotEqual(id1, id2)
|
||||
self.assertEqual(id1, id3)
|
||||
session.commit()
|
||||
|
||||
c = session.execute("select count(*) from Projects")
|
||||
row = c.fetchone()
|
||||
self.assertEqual(row[0], 2)
|
||||
c = session.execute("select name, code from Projects where id=:id", {'id': id1.id})
|
||||
row = c.fetchone()
|
||||
self.assertIsNotNone(row)
|
||||
self.assertEqual(len(row), 2)
|
||||
self.assertEqual(row[0], "unittest1")
|
||||
self.assertEqual(row[1], "Atest")
|
||||
self.assertEqual(row['name'], "unittest1")
|
||||
self.assertEqual(row['code'], "Atest")
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
db_common.register_project(session, "unittest1", "Ctest")
|
||||
|
||||
def test_register_job(self):
|
||||
with self.db.session() as session:
|
||||
pid1 = db_common.register_project(session, "unittest1", "Acode")
|
||||
pid2 = db_common.register_project(session, "unittest2", "Bcode")
|
||||
dt1 = datetime.datetime.now()
|
||||
|
||||
# insert new job
|
||||
id1 = db_common.register_job(session, pid1, "Ajob", mode="Amode", machine="local", git_hash="Ahash",
|
||||
datetime=dt1, description="Adesc")
|
||||
self.assertIsInstance(id1, orm.Job)
|
||||
# insert another job
|
||||
id2 = db_common.register_job(session, pid1.id, "Bjob", mode="Bmode", machine="local", git_hash="Ahash",
|
||||
datetime=dt1, description="Adesc")
|
||||
self.assertIsInstance(id2, orm.Job)
|
||||
# update first job
|
||||
id3 = db_common.register_job(session, "unittest1", "Ajob", mode="Cmode", machine="local", git_hash="Chash",
|
||||
datetime=dt1, description="Cdesc",
|
||||
allow_existing=True)
|
||||
self.assertIsInstance(id3, orm.Job)
|
||||
# insert another job with same name but in other project
|
||||
id4 = db_common.register_job(session, pid2, "Ajob", mode="Dmode", machine="local", git_hash="Dhash",
|
||||
datetime=dt1, description="Ddesc")
|
||||
self.assertIsInstance(id4, orm.Job)
|
||||
# existing job
|
||||
with self.assertRaises(ValueError):
|
||||
db_common.register_job(session, pid1, "Ajob", mode="Emode", machine="local", git_hash="Dhash",
|
||||
datetime=dt1, description="Ddesc")
|
||||
|
||||
self.assertIsNot(id1, id2)
|
||||
self.assertIs(id1, id3)
|
||||
self.assertIsNot(id1, id4)
|
||||
|
||||
c = session.execute("select count(*) from Jobs")
|
||||
row = c.fetchone()
|
||||
self.assertEqual(row[0], 3)
|
||||
c = session.execute("select name, mode, machine, git_hash, datetime, description from Jobs where id=:id",
|
||||
{'id': id1.id})
|
||||
row = c.fetchone()
|
||||
self.assertIsNotNone(row)
|
||||
self.assertEqual(len(row), 6)
|
||||
self.assertEqual(row[0], "Ajob")
|
||||
self.assertEqual(row[1], "Amode")
|
||||
self.assertEqual(row['machine'], "local")
|
||||
self.assertEqual(str(row['datetime']), str(dt1))
|
||||
self.assertEqual(row['git_hash'], "Ahash")
|
||||
self.assertEqual(row['description'], "Adesc")
|
||||
|
||||
def test_register_params(self):
|
||||
with self.db.session() as session:
|
||||
setup_sample_database(session)
|
||||
model5 = {'parA': 2.341, 'parC': 6.785, '_model': 92, '_rfac': 0.453}
|
||||
db_common.register_params(session, model5)
|
||||
expected = ['parA', 'parB', 'parC']
|
||||
session.commit()
|
||||
|
||||
c = session.execute("select * from Params order by key")
|
||||
results = c.fetchall()
|
||||
self.assertEqual(len(results), 3)
|
||||
result_params = [row['key'] for row in results]
|
||||
self.assertEqual(result_params, expected)
|
||||
|
||||
def test_query_params(self):
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
results = db_common.query_params(session, project=objs['p1'].id)
|
||||
expected = ['parA', 'parB']
|
||||
self.assertEqual(expected, sorted(list(results.keys())))
|
||||
self.assertIsInstance(results['parA'], orm.Param)
|
||||
self.assertIsInstance(results['parB'], orm.Param)
|
||||
results = db_common.query_params(session, project=objs['p2'].name)
|
||||
expected = ['parC']
|
||||
self.assertEqual(expected, sorted(list(results.keys())))
|
||||
self.assertIsInstance(results['parC'], orm.Param)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
211
tests/database/test_ingest.py
Normal file
211
tests/database/test_ingest.py
Normal file
@@ -0,0 +1,211 @@
|
||||
"""
|
||||
@package tests.database.test_ingest
|
||||
unit tests for pmsco.database
|
||||
|
||||
the purpose of these tests is to help debugging the code.
|
||||
|
||||
to run the tests, change to the directory which contains the tests directory, and execute =nosetests=.
|
||||
|
||||
@pre nose must be installed (python-nose package on Debian).
|
||||
|
||||
@author Matthias Muntwiler, matthias.muntwiler@psi.ch
|
||||
|
||||
@copyright (c) 2016 by Paul Scherrer Institut @n
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); @n
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
"""
|
||||
|
||||
import unittest
|
||||
import pmsco.database.access as db
|
||||
import pmsco.database.ingest as db_ingest
|
||||
import pmsco.database.orm as orm
|
||||
import pmsco.dispatch as dispatch
|
||||
from tests.database.test_common import setup_sample_database
|
||||
|
||||
|
||||
class TestDatabase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.db = db.DatabaseAccess()
|
||||
self.db.connect(":memory:")
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
# before any methods in this class
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
# teardown_class() after any methods in this class
|
||||
pass
|
||||
|
||||
def test_insert_result(self):
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
index = dispatch.CalcID(15, 16, 17, 18, -1)
|
||||
result_data = {'parA': 4.123, 'parB': 8.567, '_rfac': 0.654, '_gen': 3, '_particle': 21, '_secs': 27.8}
|
||||
result_delta = {'parA': 0.4123, 'parB': 0.8567}
|
||||
model_obj, result_obj = db_ingest.insert_result(session, objs['j1'], index, result_data, result_delta)
|
||||
session.commit()
|
||||
|
||||
# model
|
||||
q = session.query(orm.Model)
|
||||
q = q.filter(orm.Model.job_id == objs['j1'].id)
|
||||
q = q.filter(orm.Model.model == index.model)
|
||||
m = q.one()
|
||||
self.assertIsNot(model_obj, objs['m1'])
|
||||
self.assertIs(m, model_obj)
|
||||
self.assertEqual(m.id, model_obj.id)
|
||||
self.assertEqual(m.job_id, objs['j1'].id)
|
||||
self.assertEqual(m.model, index.model)
|
||||
self.assertEqual(m.gen, result_data['_gen'])
|
||||
self.assertEqual(m.particle, result_data['_particle'])
|
||||
|
||||
# result
|
||||
q = session.query(orm.Result)
|
||||
q = q.filter(orm.Result.model_id == model_obj.id)
|
||||
r = q.one()
|
||||
self.assertIsNot(r, objs['r1'])
|
||||
self.assertIs(r, result_obj)
|
||||
self.assertEqual(r.id, result_obj.id)
|
||||
self.assertIs(r.model, model_obj)
|
||||
self.assertEqual(r.scan, index.scan)
|
||||
self.assertEqual(r.domain, index.domain)
|
||||
self.assertEqual(r.emit, index.emit)
|
||||
self.assertEqual(r.region, index.region)
|
||||
self.assertEqual(r.rfac, result_data['_rfac'])
|
||||
self.assertEqual(r.secs, result_data['_secs'])
|
||||
|
||||
# param values
|
||||
q = session.query(orm.ParamValue)
|
||||
q = q.filter(orm.ParamValue.model_id == model_obj.id)
|
||||
pvs = q.all()
|
||||
values = {pv.param_key: pv.value for pv in pvs}
|
||||
deltas = {pv.param_key: pv.delta for pv in pvs}
|
||||
for k in result_data:
|
||||
if k[0] != '_':
|
||||
self.assertAlmostEqual(values[k], result_data[k])
|
||||
self.assertAlmostEqual(deltas[k], result_delta[k])
|
||||
self.assertAlmostEqual(m.values[k], result_data[k])
|
||||
self.assertAlmostEqual(m.deltas[k], result_delta[k])
|
||||
|
||||
def test_update_result(self):
|
||||
"""
|
||||
test update an existing model and result
|
||||
|
||||
update parameters parA and parB and rfac of result (91, -1, -1, -1, -1)
|
||||
|
||||
@return: None
|
||||
"""
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
index = dispatch.CalcID(91, -1, -1, -1, -1)
|
||||
result_data = {'parA': 4.123, 'parB': 8.567, '_rfac': 0.654, '_gen': 3, '_particle': 21, '_secs': 27.8}
|
||||
result_delta = {'parA': 0.4123, 'parB': 0.8567}
|
||||
model_obj, result_obj = db_ingest.insert_result(session, objs['j1'], index, result_data, result_delta)
|
||||
session.commit()
|
||||
|
||||
# model
|
||||
q = session.query(orm.Model)
|
||||
q = q.filter(orm.Model.job_id == objs['j1'].id)
|
||||
q = q.filter(orm.Model.model == index.model)
|
||||
m = q.one()
|
||||
self.assertIs(model_obj, objs['m1'])
|
||||
self.assertIs(m, objs['m1'])
|
||||
self.assertEqual(m.id, model_obj.id)
|
||||
self.assertEqual(m.job_id, objs['j1'].id)
|
||||
self.assertEqual(m.model, index.model)
|
||||
self.assertEqual(m.gen, result_data['_gen'])
|
||||
self.assertEqual(m.particle, result_data['_particle'])
|
||||
|
||||
# result
|
||||
q = session.query(orm.Result)
|
||||
q = q.filter(orm.Result.model_id == model_obj.id)
|
||||
r = q.one()
|
||||
self.assertIs(result_obj, objs['r1'])
|
||||
self.assertIs(r, objs['r1'])
|
||||
self.assertEqual(r.id, result_obj.id)
|
||||
self.assertIs(r.model, model_obj)
|
||||
self.assertEqual(r.scan, index.scan)
|
||||
self.assertEqual(r.domain, index.domain)
|
||||
self.assertEqual(r.emit, index.emit)
|
||||
self.assertEqual(r.region, index.region)
|
||||
self.assertEqual(r.rfac, result_data['_rfac'])
|
||||
self.assertEqual(r.secs, result_data['_secs'])
|
||||
|
||||
# param values
|
||||
q = session.query(orm.ParamValue)
|
||||
q = q.filter(orm.ParamValue.model_id == model_obj.id)
|
||||
pvs = q.all()
|
||||
values = {pv.param_key: pv.value for pv in pvs}
|
||||
deltas = {pv.param_key: pv.delta for pv in pvs}
|
||||
for k in result_data:
|
||||
if k[0] != '_':
|
||||
self.assertAlmostEqual(values[k], result_data[k])
|
||||
self.assertAlmostEqual(deltas[k], result_delta[k])
|
||||
self.assertAlmostEqual(m.values[k], result_data[k])
|
||||
self.assertAlmostEqual(m.deltas[k], result_delta[k])
|
||||
|
||||
def test_update_result_dict(self):
|
||||
"""
|
||||
test update an existing model and result with dictionary arguments
|
||||
|
||||
update parameters parA and parB and rfac of result (91, -1, -1, -1, -1)
|
||||
|
||||
@return: None
|
||||
"""
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
result_data = {'_model': 91, '_scan': -1, '_domain': -1, '_emit': -1, '_region': -1,
|
||||
'parA': 4.123, 'parB': 8.567, '_rfac': 0.654, '_gen': 3, '_particle': 21, '_secs': 27.8}
|
||||
result_delta = {'parA': 0.4123, 'parB': 0.8567}
|
||||
model_obj, result_obj = db_ingest.insert_result(session, objs['j1'], result_data, result_data, result_delta)
|
||||
session.commit()
|
||||
|
||||
# model
|
||||
q = session.query(orm.Model)
|
||||
q = q.filter(orm.Model.job_id == objs['j1'].id)
|
||||
q = q.filter(orm.Model.model == result_data['_model'])
|
||||
m = q.one()
|
||||
self.assertIs(model_obj, objs['m1'])
|
||||
self.assertIs(m, objs['m1'])
|
||||
self.assertEqual(m.id, model_obj.id)
|
||||
self.assertEqual(m.job_id, objs['j1'].id)
|
||||
self.assertEqual(m.model, result_data['_model'])
|
||||
self.assertEqual(m.gen, result_data['_gen'])
|
||||
self.assertEqual(m.particle, result_data['_particle'])
|
||||
|
||||
# result
|
||||
q = session.query(orm.Result)
|
||||
q = q.filter(orm.Result.model_id == model_obj.id)
|
||||
r = q.one()
|
||||
self.assertIs(result_obj, objs['r1'])
|
||||
self.assertIs(r, objs['r1'])
|
||||
self.assertEqual(r.id, result_obj.id)
|
||||
self.assertIs(r.model, model_obj)
|
||||
self.assertEqual(r.scan, result_data['_scan'])
|
||||
self.assertEqual(r.domain, result_data['_domain'])
|
||||
self.assertEqual(r.emit, result_data['_emit'])
|
||||
self.assertEqual(r.region, result_data['_region'])
|
||||
self.assertEqual(r.rfac, result_data['_rfac'])
|
||||
|
||||
# param values
|
||||
q = session.query(orm.ParamValue)
|
||||
q = q.filter(orm.ParamValue.model_id == model_obj.id)
|
||||
pvs = q.all()
|
||||
values = {pv.param_key: pv.value for pv in pvs}
|
||||
deltas = {pv.param_key: pv.delta for pv in pvs}
|
||||
for k in result_data:
|
||||
if k[0] != '_':
|
||||
self.assertAlmostEqual(values[k], result_data[k])
|
||||
self.assertAlmostEqual(deltas[k], result_delta[k])
|
||||
self.assertAlmostEqual(m.values[k], result_data[k])
|
||||
self.assertAlmostEqual(m.deltas[k], result_delta[k])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
279
tests/database/test_orm.py
Normal file
279
tests/database/test_orm.py
Normal file
@@ -0,0 +1,279 @@
|
||||
"""
|
||||
@package tests.database.test_orm
|
||||
unit tests for pmsco.database.orm
|
||||
|
||||
the purpose of these tests is to help debugging the code.
|
||||
|
||||
to run the tests, change to the directory which contains the tests directory, and execute =nosetests=.
|
||||
|
||||
@pre nose must be installed (python-nose package on Debian).
|
||||
|
||||
@author Matthias Muntwiler, matthias.muntwiler@psi.ch
|
||||
|
||||
@copyright (c) 2021 by Paul Scherrer Institut @n
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); @n
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
"""
|
||||
|
||||
import unittest
|
||||
import pmsco.database.access as db
|
||||
import pmsco.database.orm as orm
|
||||
import pmsco.database.util as util
|
||||
import pmsco.dispatch as dispatch
|
||||
|
||||
|
||||
class TestDatabase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.db = db.DatabaseAccess()
|
||||
self.db.connect(":memory:")
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
# before any methods in this class
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
# teardown_class() after any methods in this class
|
||||
pass
|
||||
|
||||
def test_orm_1(self):
|
||||
with self.db.session() as session:
|
||||
prj = orm.Project(name="test1", code=__file__)
|
||||
session.add(prj)
|
||||
job = orm.Job(name="test_database")
|
||||
job.project = prj
|
||||
session.add(job)
|
||||
tag1 = orm.Tag(key="phase")
|
||||
tag2 = orm.Tag(key="scatter")
|
||||
session.add_all([tag1, tag2])
|
||||
jt1 = orm.JobTag()
|
||||
jt1.tag = tag1
|
||||
jt1.job = job
|
||||
jt1.value = 'phagen'
|
||||
jt2 = orm.JobTag()
|
||||
jt2.tag = tag2
|
||||
jt2.job = job
|
||||
jt2.value = 'edac'
|
||||
session.commit()
|
||||
|
||||
qprj = session.query(orm.Project).filter_by(id=1).one()
|
||||
self.assertEqual(prj.name, qprj.name)
|
||||
qjob = session.query(orm.Job).filter_by(id=1).one()
|
||||
self.assertEqual(job.name, qjob.name)
|
||||
self.assertEqual(job.project.name, prj.name)
|
||||
self.assertEqual(len(qprj.jobs), 1)
|
||||
self.assertEqual(len(qjob.job_tags), 2)
|
||||
self.assertEqual(qjob.tags['phase'], 'phagen')
|
||||
self.assertEqual(qjob.tags['scatter'], 'edac')
|
||||
|
||||
def test_orm_2(self):
|
||||
with self.db.session() as session:
|
||||
prj = orm.Project(name="project 1", code=__file__)
|
||||
session.add(prj)
|
||||
|
||||
job = orm.Job(name="job 1")
|
||||
job.project = prj
|
||||
session.add(job)
|
||||
|
||||
jt1 = orm.JobTag('phase', 'phagen')
|
||||
session.add(jt1)
|
||||
job.job_tags[jt1.tag_key] = jt1
|
||||
job.tags['scatter'] = 'edac'
|
||||
|
||||
mod = orm.Model(model=1111, gen=111, particle=11)
|
||||
session.add(mod)
|
||||
|
||||
pv1 = orm.ParamValue(key='dAB', value=123.456, delta=7.543)
|
||||
session.add(pv1)
|
||||
mod.param_values[pv1.param_key] = pv1
|
||||
mod.values['dBC'] = 234.567
|
||||
|
||||
cid = dispatch.CalcID(1111, 2, 3, 4, 5)
|
||||
res = orm.Result(calc_id=cid, rfac=0.123)
|
||||
res.model = mod
|
||||
session.add(res)
|
||||
|
||||
session.commit()
|
||||
|
||||
qprj = session.query(orm.Project).filter_by(id=1).one()
|
||||
self.assertEqual(qprj.name, prj.name)
|
||||
self.assertEqual(len(qprj.jobs), 1)
|
||||
job_names = [k for k in qprj.jobs.keys()]
|
||||
self.assertEqual(job_names[0], job.name)
|
||||
self.assertEqual(qprj.jobs[job.name], job)
|
||||
|
||||
qjob = session.query(orm.Job).filter_by(id=1).one()
|
||||
self.assertEqual(qjob.name, job.name)
|
||||
self.assertEqual(qjob.project.name, prj.name)
|
||||
self.assertEqual(len(qjob.job_tags), 2)
|
||||
self.assertEqual(qjob.job_tags['phase'].value, 'phagen')
|
||||
self.assertEqual(qjob.job_tags['scatter'].value, 'edac')
|
||||
self.assertEqual(len(qjob.tags), 2)
|
||||
self.assertEqual(qjob.tags['phase'], 'phagen')
|
||||
self.assertEqual(qjob.tags['scatter'], 'edac')
|
||||
|
||||
qmod = session.query(orm.Model).filter_by(id=1).one()
|
||||
self.assertEqual(qmod.model, mod.model)
|
||||
self.assertEqual(len(qmod.param_values), 2)
|
||||
self.assertEqual(qmod.values['dAB'], 123.456)
|
||||
self.assertEqual(qmod.deltas['dAB'], 7.543)
|
||||
self.assertEqual(qmod.values['dBC'], 234.567)
|
||||
|
||||
self.assertEqual(len(qmod.results), 1)
|
||||
self.assertEqual(qmod.results[0].rfac, 0.123)
|
||||
|
||||
def test_job_tags(self):
|
||||
with self.db.session() as session:
|
||||
prj = orm.Project(name="project 1", code=__file__)
|
||||
session.add(prj)
|
||||
|
||||
job1 = orm.Job(name="job 1")
|
||||
job1.project = prj
|
||||
session.add(job1)
|
||||
job2 = orm.Job(name="job 2")
|
||||
job2.project = prj
|
||||
session.add(job2)
|
||||
|
||||
job1.tags['color'] = 'blue'
|
||||
job1.tags['shape'] = 'round'
|
||||
session.flush()
|
||||
job2.tags['color'] = 'red'
|
||||
job1.tags['color'] = 'green'
|
||||
|
||||
session.commit()
|
||||
|
||||
qjob1 = session.query(orm.Job).filter_by(name='job 1').one()
|
||||
self.assertEqual(qjob1.tags['color'], 'green')
|
||||
qjob2 = session.query(orm.Job).filter_by(name='job 2').one()
|
||||
self.assertEqual(qjob2.tags['color'], 'red')
|
||||
|
||||
def test_job_jobtags(self):
|
||||
with self.db.session() as session:
|
||||
prj = orm.Project(name="project 1", code=__file__)
|
||||
session.add(prj)
|
||||
|
||||
job1 = orm.Job(name="job 1")
|
||||
job1.project = prj
|
||||
session.add(job1)
|
||||
job2 = orm.Job(name="job 2")
|
||||
job2.project = prj
|
||||
session.add(job2)
|
||||
|
||||
jt1 = orm.JobTag('color', 'blue')
|
||||
job1.job_tags[jt1.tag_key] = jt1
|
||||
session.flush()
|
||||
jt2 = orm.JobTag('color', 'red')
|
||||
job2.job_tags[jt2.tag_key] = jt2
|
||||
|
||||
session.commit()
|
||||
|
||||
qjob1 = session.query(orm.Job).filter_by(name='job 1').one()
|
||||
self.assertIsInstance(qjob1.job_tags['color'], orm.JobTag)
|
||||
self.assertEqual(qjob1.job_tags['color'].value, 'blue')
|
||||
qjob2 = session.query(orm.Job).filter_by(name='job 2').one()
|
||||
self.assertIsInstance(qjob2.job_tags['color'], orm.JobTag)
|
||||
self.assertEqual(qjob2.job_tags['color'].value, 'red')
|
||||
|
||||
def test_param_values(self):
|
||||
with self.db.session() as session:
|
||||
prj = orm.Project(name="project 1", code=__file__)
|
||||
session.add(prj)
|
||||
job = orm.Job(name="job 1")
|
||||
job.project = prj
|
||||
session.add(job)
|
||||
|
||||
mod1 = orm.Model(model=1, gen=11, particle=111)
|
||||
session.add(mod1)
|
||||
mod2 = orm.Model(model=2, gen=22, particle=222)
|
||||
session.add(mod2)
|
||||
|
||||
mod1.values['dBC'] = 234.567
|
||||
# note: this flush is necessary before accessing the same param in another model
|
||||
session.flush()
|
||||
mod2.values['dBC'] = 345.678
|
||||
|
||||
session.commit()
|
||||
|
||||
qmod1 = session.query(orm.Model).filter_by(model=1).one()
|
||||
self.assertEqual(qmod1.values['dBC'], 234.567)
|
||||
qmod2 = session.query(orm.Model).filter_by(model=2).one()
|
||||
self.assertEqual(qmod2.values['dBC'], 345.678)
|
||||
|
||||
def test_filter_job(self):
|
||||
"""
|
||||
test sqlalchemy filter syntax
|
||||
|
||||
@return: None
|
||||
"""
|
||||
with self.db.session() as session:
|
||||
p1 = orm.Project(name="p1")
|
||||
p2 = orm.Project(name="p2")
|
||||
j11 = orm.Job(name="j1")
|
||||
j11.project = p1
|
||||
j12 = orm.Job(name="j2")
|
||||
j12.project = p1
|
||||
j21 = orm.Job(name="j1")
|
||||
j21.project = p2
|
||||
j22 = orm.Job(name="j2")
|
||||
j22.project = p2
|
||||
session.add_all([p1, p2, j11, j12, j21, j22])
|
||||
session.commit()
|
||||
|
||||
q1 = session.query(orm.Job).join(orm.Project)
|
||||
q1 = q1.filter(orm.Project.name == 'p1')
|
||||
q1 = q1.filter(orm.Job.name == 'j1')
|
||||
jobs1 = q1.all()
|
||||
|
||||
sql = """
|
||||
select Projects.name project_name, Jobs.name job_name
|
||||
from Projects join Jobs on Projects.id = Jobs.project_id
|
||||
where Jobs.name = 'j1' and Projects.name = 'p1'
|
||||
"""
|
||||
jobs2 = session.execute(sql)
|
||||
|
||||
n = 0
|
||||
for j in jobs2:
|
||||
self.assertEqual(j.project_name, 'p1')
|
||||
self.assertEqual(j.job_name, 'j1')
|
||||
n += 1
|
||||
self.assertEqual(n, 1)
|
||||
|
||||
for j in jobs1:
|
||||
self.assertEqual(j.project.name, 'p1')
|
||||
self.assertEqual(j.name, 'j1')
|
||||
self.assertEqual(len(jobs1), 1)
|
||||
|
||||
def test_filter_in(self):
|
||||
"""
|
||||
test sqlalchemy filter syntax: in_ operator
|
||||
|
||||
@return: None
|
||||
"""
|
||||
with self.db.session() as session:
|
||||
p1 = orm.Project(name="p1")
|
||||
p2 = orm.Project(name="p2")
|
||||
j11 = orm.Job(name="j1")
|
||||
j11.project = p1
|
||||
j12 = orm.Job(name="j2")
|
||||
j12.project = p1
|
||||
j21 = orm.Job(name="j1")
|
||||
j21.project = p2
|
||||
j22 = orm.Job(name="j2")
|
||||
j22.project = p2
|
||||
session.add_all([p1, p2, j11, j12, j21, j22])
|
||||
session.commit()
|
||||
|
||||
q1 = session.query(orm.Job)
|
||||
q1 = q1.filter(orm.Job.id.in_([2, 3, 7]))
|
||||
jobs1 = q1.all()
|
||||
self.assertEqual(len(jobs1), 2)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
311
tests/database/test_query.py
Normal file
311
tests/database/test_query.py
Normal file
@@ -0,0 +1,311 @@
|
||||
"""
|
||||
@package tests.database.test_query
|
||||
unit tests for pmsco.database
|
||||
|
||||
the purpose of these tests is to help debugging the code.
|
||||
|
||||
to run the tests, change to the directory which contains the tests directory, and execute =nosetests=.
|
||||
|
||||
@pre nose must be installed (python-nose package on Debian).
|
||||
|
||||
@author Matthias Muntwiler, matthias.muntwiler@psi.ch
|
||||
|
||||
@copyright (c) 2016 by Paul Scherrer Institut @n
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); @n
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
"""
|
||||
|
||||
import unittest
|
||||
import numpy as np
|
||||
import pmsco.database.access as db
|
||||
import pmsco.database.common as db_common
|
||||
import pmsco.database.ingest as db_ingest
|
||||
import pmsco.database.orm as db_orm
|
||||
import pmsco.database.query as db_query
|
||||
import pmsco.database.util as db_util
|
||||
from tests.database.test_common import setup_sample_database
|
||||
|
||||
|
||||
def pop_query_hook(query, gen):
|
||||
return query.filter(db_orm.Model.gen == gen)
|
||||
|
||||
|
||||
class TestDatabase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.db = db.DatabaseAccess()
|
||||
self.db.connect(":memory:")
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
# before any methods in this class
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
# teardown_class() after any methods in this class
|
||||
pass
|
||||
|
||||
def test_query_model_results_array(self):
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
job = objs['j1']
|
||||
|
||||
index = {'_scan': -1, '_domain': -1, '_emit': -1, '_region': -1}
|
||||
model2 = {'parA': 4.123, 'parB': 8.567, '_model': 92, '_rfac': 0.654, '_gen': 1, '_particle': 1, '_secs': 0.1}
|
||||
model3 = {'parA': 3.412, 'parB': 7.856, '_model': 93, '_rfac': 0.345, '_gen': 2, '_particle': 2, '_secs': 0.2}
|
||||
model4 = {'parA': 4.123, 'parB': 8.567, '_model': 94, '_rfac': 0.354, '_gen': 2, '_particle': 3, '_secs': 0.3}
|
||||
model5 = {'parA': 2.341, 'parC': 6.785, '_model': 95, '_rfac': 0.453}
|
||||
model6 = {'parA': 4.123, 'parB': 8.567, '_model': 96, '_rfac': 0.354, '_gen': 3, '_particle': 5, '_secs': 0.5}
|
||||
model2.update(index)
|
||||
model3.update(index)
|
||||
model4.update(index)
|
||||
model5.update(index)
|
||||
model6.update(index)
|
||||
m2, r2 = db_ingest.insert_result(session, job, model2, model2, model2)
|
||||
m3, r3 = db_ingest.insert_result(session, job, model3, model3, model3)
|
||||
m4, r4 = db_ingest.insert_result(session, job, model4, model4, model4)
|
||||
m5, r5 = db_ingest.insert_result(session, job, model5, model5, model5)
|
||||
m6, r6 = db_ingest.insert_result(session, job, model6, model6, model6)
|
||||
session.commit()
|
||||
|
||||
models = [m3, m4, m5]
|
||||
result_values, result_deltas = db_query.query_model_results_array(session, models=models, include_params=True)
|
||||
|
||||
template = ['parA', 'parB', 'parC', '_model', '_rfac', '_gen', '_particle', '_secs']
|
||||
dt = [(field, db_util.field_to_numpy_type(field)) for field in template]
|
||||
expected = np.zeros((len(models),), dtype=dt)
|
||||
expected['parA'] = np.array([3.412, 4.123, 2.341])
|
||||
expected['parB'] = np.array([7.856, 8.567, None])
|
||||
expected['parC'] = np.array([None, None, 6.785])
|
||||
expected['_model'] = np.array([93, 94, 95])
|
||||
expected['_rfac'] = np.array([0.345, 0.354, 0.453])
|
||||
expected['_gen'] = np.array([2, 2, 0])
|
||||
expected['_particle'] = np.array([2, 3, 0])
|
||||
expected['_secs'] = np.array([0.2, 0.3, None])
|
||||
|
||||
self.assertEqual(result_values.shape, expected.shape)
|
||||
np.testing.assert_array_almost_equal(result_values['parA'], expected['parA'])
|
||||
np.testing.assert_array_almost_equal(result_values['parB'], expected['parB'])
|
||||
np.testing.assert_array_almost_equal(result_values['parC'], expected['parC'])
|
||||
np.testing.assert_array_almost_equal(result_values['_model'], expected['_model'])
|
||||
np.testing.assert_array_almost_equal(result_values['_gen'], expected['_gen'])
|
||||
np.testing.assert_array_almost_equal(result_values['_particle'], expected['_particle'])
|
||||
np.testing.assert_array_almost_equal(result_values['_rfac'], expected['_rfac'])
|
||||
np.testing.assert_array_almost_equal(result_values['_secs'], expected['_secs'])
|
||||
|
||||
self.assertEqual(result_deltas.shape, expected.shape)
|
||||
np.testing.assert_array_almost_equal(result_deltas['parA'], expected['parA'])
|
||||
np.testing.assert_array_almost_equal(result_deltas['parB'], expected['parB'])
|
||||
np.testing.assert_array_almost_equal(result_deltas['parC'], expected['parC'])
|
||||
np.testing.assert_array_almost_equal(result_deltas['_model'], expected['_model'])
|
||||
np.testing.assert_array_almost_equal(result_deltas['_gen'], expected['_gen'])
|
||||
np.testing.assert_array_almost_equal(result_deltas['_particle'], expected['_particle'])
|
||||
|
||||
def test_query_model_results_array_index(self):
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
job = objs['j1']
|
||||
|
||||
model = {'parA': 4.123, 'parB': 8.567, 'parC': 6.785}
|
||||
|
||||
index1 = {'_model': 99, '_scan': -1, '_domain': -1, '_emit': -1, '_region': -1}
|
||||
index2 = {'_model': 99, '_scan': 1, '_domain': -1, '_emit': -1, '_region': -1}
|
||||
index3 = {'_model': 99, '_scan': 1, '_domain': 1, '_emit': -1, '_region': -1}
|
||||
index4 = {'_model': 99, '_scan': 1, '_domain': 1, '_emit': 1, '_region': -1}
|
||||
index5 = {'_model': 99, '_scan': 1, '_domain': 1, '_emit': 1, '_region': 1}
|
||||
|
||||
result1 = {'_rfac': 0.154, '_gen': 1, '_particle': 1}
|
||||
result1.update(model)
|
||||
result2 = {'_rfac': 0.254, '_gen': 1, '_particle': 1}
|
||||
result2.update(model)
|
||||
result3 = {'_rfac': 0.354, '_gen': 1, '_particle': 1}
|
||||
result3.update(model)
|
||||
result4 = {'_rfac': 0.454, '_gen': 1, '_particle': 1}
|
||||
result4.update(model)
|
||||
result5 = {'_rfac': 0.554, '_gen': 1, '_particle': 1}
|
||||
result5.update(model)
|
||||
|
||||
m1, r1 = db_ingest.insert_result(session, job, index1, result1, result1)
|
||||
m2, r2 = db_ingest.insert_result(session, job, index2, result2, result2)
|
||||
m3, r3 = db_ingest.insert_result(session, job, index3, result3, result3)
|
||||
m4, r4 = db_ingest.insert_result(session, job, index4, result4, result4)
|
||||
m5, r5 = db_ingest.insert_result(session, job, index5, result5, result5)
|
||||
session.commit()
|
||||
|
||||
self.assertEqual(m1.id, m2.id)
|
||||
self.assertEqual(m1.id, m3.id)
|
||||
self.assertEqual(m1.id, m4.id)
|
||||
self.assertEqual(m1.id, m5.id)
|
||||
|
||||
result_values, result_deltas = db_query.query_model_results_array(session,
|
||||
model=99, domain=1, include_params=True)
|
||||
|
||||
pars = ['parA', 'parB', 'parC']
|
||||
dt = [(k, 'f8') for k in pars]
|
||||
controls = ['_model', '_scan', '_domain', '_emit', '_region', '_rfac']
|
||||
dt.extend(((k, db_util.field_to_numpy_type(k)) for k in controls))
|
||||
expected = np.zeros((3,), dtype=dt)
|
||||
expected['parA'] = np.array([4.123, 4.123, 4.123])
|
||||
expected['parB'] = np.array([8.567, 8.567, 8.567])
|
||||
expected['parC'] = np.array([6.785, 6.785, 6.785])
|
||||
expected['_model'] = np.array([99, 99, 99])
|
||||
expected['_scan'] = np.array([1, 1, 1])
|
||||
expected['_domain'] = np.array([1, 1, 1])
|
||||
expected['_emit'] = np.array([-1, 1, 1])
|
||||
expected['_region'] = np.array([-1, -1, 1])
|
||||
expected['_rfac'] = np.array([0.354, 0.454, 0.554])
|
||||
|
||||
self.assertEqual(result_values.shape, expected.shape)
|
||||
np.testing.assert_array_almost_equal(result_values['parA'], expected['parA'])
|
||||
np.testing.assert_array_almost_equal(result_values['parB'], expected['parB'])
|
||||
np.testing.assert_array_almost_equal(result_values['parC'], expected['parC'])
|
||||
np.testing.assert_array_almost_equal(result_values['_model'], expected['_model'])
|
||||
np.testing.assert_array_almost_equal(result_values['_scan'], expected['_scan'])
|
||||
np.testing.assert_array_almost_equal(result_values['_domain'], expected['_domain'])
|
||||
np.testing.assert_array_almost_equal(result_values['_emit'], expected['_emit'])
|
||||
np.testing.assert_array_almost_equal(result_values['_region'], expected['_region'])
|
||||
np.testing.assert_array_almost_equal(result_values['_rfac'], expected['_rfac'])
|
||||
|
||||
def test_query_model_results_hook(self):
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
job = objs['j1']
|
||||
|
||||
index = {'_scan': -1, '_domain': -1, '_emit': -1, '_region': -1}
|
||||
model2 = {'parA': 4.123, 'parB': 8.567, '_model': 92, '_rfac': 0.654, '_gen': 1, '_particle': 1}
|
||||
model3 = {'parA': 3.412, 'parB': 7.856, '_model': 93, '_rfac': 0.345, '_gen': 2, '_particle': 2}
|
||||
model4 = {'parA': 4.123, 'parB': 8.567, '_model': 94, '_rfac': 0.354, '_gen': 2, '_particle': 3}
|
||||
model5 = {'parA': 2.341, 'parC': 6.785, '_model': 95, '_rfac': 0.453}
|
||||
model6 = {'parA': 4.123, 'parB': 8.567, '_model': 96, '_rfac': 0.354, '_gen': 3, '_particle': 5}
|
||||
model2.update(index)
|
||||
model3.update(index)
|
||||
model4.update(index)
|
||||
model5.update(index)
|
||||
model6.update(index)
|
||||
m2, r2 = db_ingest.insert_result(session, job, model2, model2, model2)
|
||||
m3, r3 = db_ingest.insert_result(session, job, model3, model3, model3)
|
||||
m4, r4 = db_ingest.insert_result(session, job, model4, model4, model4)
|
||||
m5, r5 = db_ingest.insert_result(session, job, model5, model5, model5)
|
||||
m6, r6 = db_ingest.insert_result(session, job, model6, model6, model6)
|
||||
session.commit()
|
||||
|
||||
models = [m3, m4]
|
||||
hd = {'gen': 2}
|
||||
result_values, result_deltas = db_query.query_model_results_array(session, include_params=True,
|
||||
query_hook=pop_query_hook, hook_data=hd)
|
||||
|
||||
template = ['parA', 'parB', 'parC', '_model', '_rfac', '_gen', '_particle']
|
||||
dt = [(field, db_util.field_to_numpy_type(field)) for field in template]
|
||||
|
||||
expected = np.zeros((len(models),), dtype=dt)
|
||||
expected['parA'] = np.array([3.412, 4.123])
|
||||
expected['parB'] = np.array([7.856, 8.567])
|
||||
expected['_model'] = np.array([93, 94])
|
||||
expected['_rfac'] = np.array([0.345, 0.354])
|
||||
expected['_gen'] = np.array([2, 2])
|
||||
expected['_particle'] = np.array([2, 3])
|
||||
|
||||
self.assertEqual(result_values.shape, expected.shape)
|
||||
self.assertNotIn('parC', result_values.dtype.names)
|
||||
np.testing.assert_array_almost_equal(result_values['parA'], expected['parA'])
|
||||
np.testing.assert_array_almost_equal(result_values['parB'], expected['parB'])
|
||||
np.testing.assert_array_almost_equal(result_values['_model'], expected['_model'])
|
||||
np.testing.assert_array_almost_equal(result_values['_gen'], expected['_gen'])
|
||||
np.testing.assert_array_almost_equal(result_values['_particle'], expected['_particle'])
|
||||
|
||||
def test_query_best_task_models(self):
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
job = objs['j1']
|
||||
|
||||
model0xxx = {'_model': 0, '_scan': -1, '_domain': -1, '_emit': -1, '_region': -1, 'parA': 4., 'parB': 8.567,
|
||||
'_rfac': 0.01}
|
||||
model00xx = {'_model': 1, '_scan': 0, '_domain': -1, '_emit': -1, '_region': -1, 'parA': 4., 'parB': 8.567,
|
||||
'_rfac': 0.02}
|
||||
model000x = {'_model': 2, '_scan': 0, '_domain': 0, '_emit': -1, '_region': -1, 'parA': 4., 'parB': 8.567,
|
||||
'_rfac': 0.03}
|
||||
model01xx = {'_model': 3, '_scan': 1, '_domain': -1, '_emit': -1, '_region': -1, 'parA': 4., 'parB': 8.567,
|
||||
'_rfac': 0.04}
|
||||
model010x = {'_model': 4, '_scan': 1, '_domain': 0, '_emit': -1, '_region': -1, 'parA': 4., 'parB': 8.567,
|
||||
'_rfac': 0.05}
|
||||
|
||||
model1xxx = {'_model': 5, '_scan': -1, '_domain': -1, '_emit': -1, '_region': -1, 'parA': 4.123,
|
||||
'parB': 8.567, '_rfac': 0.09}
|
||||
model10xx = {'_model': 6, '_scan': 0, '_domain': -1, '_emit': -1, '_region': -1, 'parA': 4.123,
|
||||
'parB': 8.567, '_rfac': 0.08}
|
||||
model100x = {'_model': 7, '_scan': 0, '_domain': 0, '_emit': -1, '_region': -1, 'parA': 4.123,
|
||||
'parB': 8.567, '_rfac': 0.07}
|
||||
model11xx = {'_model': 8, '_scan': 1, '_domain': -1, '_emit': -1, '_region': -1, 'parA': 4.123,
|
||||
'parB': 8.567, '_rfac': 0.06}
|
||||
model110x = {'_model': 9, '_scan': 1, '_domain': 0, '_emit': -1, '_region': -1, 'parA': 4.123,
|
||||
'parB': 8.567, '_rfac': 0.05}
|
||||
|
||||
model2xxx = {'_model': 10, '_scan': -1, '_domain': -1, '_emit': -1, '_region': -1, 'parA': 4.123,
|
||||
'parB': 8.567, '_rfac': 0.01}
|
||||
|
||||
db_ingest.insert_result(session, job, model0xxx, model0xxx)
|
||||
db_ingest.insert_result(session, job, model00xx, model00xx)
|
||||
db_ingest.insert_result(session, job, model000x, model000x)
|
||||
db_ingest.insert_result(session, job, model01xx, model01xx)
|
||||
db_ingest.insert_result(session, job, model010x, model010x)
|
||||
|
||||
db_ingest.insert_result(session, job, model1xxx, model1xxx)
|
||||
db_ingest.insert_result(session, job, model10xx, model10xx)
|
||||
db_ingest.insert_result(session, job, model100x, model100x)
|
||||
db_ingest.insert_result(session, job, model11xx, model11xx)
|
||||
db_ingest.insert_result(session, job, model110x, model110x)
|
||||
|
||||
db_ingest.insert_result(session, job, model2xxx, model2xxx)
|
||||
|
||||
result = db_query.query_best_task_models(session, job.id, level=1, count=2)
|
||||
|
||||
expected = {0, 1, 3, 6, 8, 10}
|
||||
self.assertEqual(result, expected)
|
||||
|
||||
def test_query_best_models_per_job(self):
|
||||
with self.db.session() as session:
|
||||
objs = setup_sample_database(session)
|
||||
job = objs['j2']
|
||||
|
||||
model2 = {'parA': 4.123, 'parB': 8.567, '_model': 92, '_rfac': 0.654, '_gen': 1, '_particle': 2}
|
||||
model3 = {'parA': 3.412, 'parB': 7.856, '_model': 93, '_rfac': 0.345, '_gen': 1, '_particle': 3}
|
||||
model4 = {'parA': 4.123, 'parB': 8.567, '_model': 94, '_rfac': 0.354, '_gen': 1, '_particle': 4}
|
||||
model5 = {'parA': 2.341, 'parC': 6.785, '_model': 95, '_rfac': 0.453, '_gen': 1, '_particle': 5}
|
||||
model6 = {'parA': 4.123, 'parB': 8.567, '_model': 96, '_rfac': 0.354, '_gen': 1, '_particle': 6}
|
||||
model7 = {'parA': 5.123, 'parB': 6.567, '_model': 97, '_rfac': 0.154, '_gen': 1, '_particle': 7}
|
||||
|
||||
model2.update({'_scan': -1, '_domain': -1, '_emit': -1, '_region': -1})
|
||||
model3.update({'_scan': 1, '_domain': -1, '_emit': -1, '_region': -1})
|
||||
model4.update({'_scan': 2, '_domain': 11, '_emit': 23, '_region': 33})
|
||||
model5.update({'_scan': 3, '_domain': 11, '_emit': -1, '_region': -1})
|
||||
model6.update({'_scan': 4, '_domain': 11, '_emit': 25, '_region': -1})
|
||||
model7.update({'_scan': 5, '_domain': -1, '_emit': -1, '_region': -1})
|
||||
m2, r2 = db_ingest.insert_result(session, job, model2, model2)
|
||||
m3, r3 = db_ingest.insert_result(session, job, model3, model3)
|
||||
m4, r4 = db_ingest.insert_result(session, job, model4, model4)
|
||||
m5, r5 = db_ingest.insert_result(session, job, model5, model5)
|
||||
m6, r6 = db_ingest.insert_result(session, job, model6, model6)
|
||||
m7, r7 = db_ingest.insert_result(session, job, model7, model7)
|
||||
|
||||
lim = 3
|
||||
query = db_query.query_best_models_per_job(session, task_level='domain', limit=lim)
|
||||
expected_models = [91, 97]
|
||||
self.assertEqual(len(query), len(expected_models))
|
||||
for model, result in query:
|
||||
self.assertIn(model.model, expected_models)
|
||||
|
||||
lim = 3
|
||||
query = db_query.query_best_models_per_job(session, jobs=[job], task_level='domain', limit=lim)
|
||||
expected_models = [97]
|
||||
self.assertEqual(len(query), len(expected_models))
|
||||
for model, result in query:
|
||||
self.assertIn(model.model, expected_models)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
83
tests/database/test_util.py
Normal file
83
tests/database/test_util.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""
|
||||
@package tests.test_database
|
||||
unit tests for pmsco.database
|
||||
|
||||
the purpose of these tests is to help debugging the code.
|
||||
|
||||
to run the tests, change to the directory which contains the tests directory, and execute =nosetests=.
|
||||
|
||||
@pre nose must be installed (python-nose package on Debian).
|
||||
|
||||
@author Matthias Muntwiler, matthias.muntwiler@psi.ch
|
||||
|
||||
@copyright (c) 2016 by Paul Scherrer Institut @n
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); @n
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import unittest
|
||||
import pmsco.database.util as util
|
||||
import pmsco.dispatch as dispatch
|
||||
|
||||
|
||||
class TestDatabase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
# before any methods in this class
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def teardown_class(cls):
|
||||
# teardown_class() after any methods in this class
|
||||
pass
|
||||
|
||||
def test_regular_params(self):
|
||||
d1 = {'parA': 1.234, 'par_B': 5.678, '_model': 91, '_rfac': 0.534}
|
||||
d2 = util.regular_params(d1)
|
||||
d3 = {'parA': d1['parA'], 'par_B': d1['par_B']}
|
||||
self.assertEqual(d2, d3)
|
||||
self.assertIsNot(d2, d1)
|
||||
|
||||
def test_special_params(self):
|
||||
d1 = {'parA': 1.234, 'par_B': 5.678, '_model': 91, '_rfac': 0.534, '_db_model_id': 99}
|
||||
d2 = util.special_params(d1)
|
||||
d3 = {'model': d1['_model'], 'rfac': d1['_rfac']}
|
||||
self.assertEqual(d2, d3)
|
||||
self.assertIsNot(d2, d1)
|
||||
|
||||
dt = [('parA', 'f4'), ('par_B', 'f4'), ('_model', 'i4'), ('_rfac', 'f4'), ('_db_model_id', 'f4')]
|
||||
arr = np.zeros(1, dtype=dt)
|
||||
for k, v in d1.items():
|
||||
arr[0][k] = v
|
||||
d4 = util.special_params(arr[0])
|
||||
self.assertEqual(d4.keys(), d3.keys())
|
||||
for k in d4:
|
||||
self.assertAlmostEqual(d4[k], d3[k])
|
||||
|
||||
cid1 = dispatch.CalcID(1, 2, 3, 4, -1)
|
||||
cid2 = util.special_params(cid1)
|
||||
cid3 = {'model': 1, 'scan': 2, 'domain': 3, 'emit': 4, 'region': -1}
|
||||
self.assertEqual(cid2, cid3)
|
||||
|
||||
l1 = d1.keys()
|
||||
l2 = util.special_params(l1)
|
||||
l3 = d3.keys()
|
||||
self.assertEqual(list(l2), list(l3))
|
||||
|
||||
t1 = tuple(l1)
|
||||
t2 = util.special_params(t1)
|
||||
t3 = tuple(l3)
|
||||
self.assertEqual(t2, t3)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user