Commit 842ae180 authored by sarsonl's avatar sarsonl
Browse files

Addition of logic so that parents always returned if no children

There is a specific case where if hydraulic query parameters are
used and these filter out all the hydraulic children, then nothing
was returned. We thought the expection should be that if there
are no matching children, the parents should still be returned if
they are valid.

Validation on query parameters has also been updated in accordance
with the Ramsis core code so that these match.
parent f2b70095
......@@ -31,7 +31,7 @@ _iso8601_re = re.compile(
# tzinfo must not be available
def from_fdsnws_datetime(datestring, use_dateutil=True):
......@@ -140,6 +140,3 @@ def make_response(obj, mimetype):
response = _make_response(obj)
response.headers['Content-Type'] = mimetype
return response
......@@ -24,7 +24,7 @@ from hydws.db.orm import Borehole, BoreholeSection, HydraulicSample
# input comparison value can be list or a string.
# operator must belong in orm attr as op, op_, __op__
filter_hydraulics = [# capital.
('datetime_value', 'ge', 'starttime'),
('datetime_value', 'le', 'endtime'),
('toptemperature_value', 'ge', 'mintoptemperature'),
......@@ -46,11 +46,22 @@ filter_hydraulics = [# capital.
('fluidph_value', 'ge', 'minfluidph'),
('fluidph_value', 'le', 'maxfluidph')]
filter_sections = [
('starttime', 'le', 'endtime'),
('endtime', 'ge', 'starttime')]
filter_boreholes = [
[('toplatitude_value', 'ge', 'minlatitude'),
('bottomlatitude_value', 'ge', 'minlatitude')],
[('toplatitude_value', 'le', 'maxlatitude'),
('bottomlatitude_value', 'le', 'maxlatitude')],
[('toplongitude_value', 'ge', 'minlongitude'),
('bottomlongitude_value', 'ge', 'minlongitude')],
[('toplongitude_value', 'le', 'maxlongitude'),
('longitude_valuee', 'le', 'maxlongitude')]]
('latitude_value', 'ge', 'minlatitude'),
('latitude_value', 'le', 'maxlatitude'),
('longitude_value', 'ge', 'minlongitude'),
......@@ -65,7 +76,6 @@ filter_boreholes = [
class DynamicQuery(object):
Dynamic filtering and of query.
......@@ -75,12 +85,10 @@ class DynamicQuery(object):
:param query: sqlalchemy query to manipulate.
:type query: sqlalchemy.orm.query.Query()
def __init__(self, query):
self.query = query = 1
def return_all(self):
"""Returns all results from query.
......@@ -93,29 +101,32 @@ class DynamicQuery(object):
return None
def return_one(self):
"""Returns one result from query.
Returns one result from query.
:rtype: dict
#MultipleResultsFound from sqlalchemy.orm.exc
return self.query.one_or_none()
def paginate_query(self, limit, page=None, error_flag=False):
"""Paginate used to return a subset of results, starting from
offset*limit to offset*limit + limit.
To be used instead of self.return_all()
:returns: Pagination of query. Use .items to get similar
response to .return_all()
:rtype: Pagination object
def format_results(self, order_column=None, limit=None, offset=None):
Return a subset of results of size limit
and with an offset if required.
:param limit: Limit to number of results returned.
:type query: sqlalchemy.orm.query.Query()
if not page:
page =
return self.query.paginate(page, limit, error_flag)
if order_column:
self.query = self.query.order_by(order_column)
if limit:
self.query = self.query.limit(limit)
if offset:
self.query = self.query.offset(offset)
def operator_attr(self, obj, op):
"""Returns method associated with an comparison operator
Returns method associated with an comparison operator
If one of op, op_, __op__ do not exist, Exception raised
:param obj: Object used to find existing operator methods
......@@ -125,7 +136,6 @@ class DynamicQuery(object):
:return: Method that exists ob obj associted with op
:rtype: str
:raises: Exception
obj_methods = [op, f"{op}_", f"__{op}__"]
existing_methods = [m for m in obj_methods
......@@ -137,14 +147,32 @@ class DynamicQuery(object):
raise Exception(f"Invalid operator: {op}")
def filter_section_epoch(self, column, attr, param_value):
# special case as have to deal with open epochs.
Special case for filtering to deal with open epochs.
This requires BoreholeSection starttime and endtime values to
include None values if no value has been set for them.
:param column: Attribute name of ORM table to filter on.
:type column: str
:params attr: Attribute name of operator to use in evaluation.
:type filter_level: str
:params param_value: Value of input query parameter to filter
column on.
:type filter_level: matches type of values stored in column.
:return: Method to evaluate ORM column
e.g. getattr(col, operator)(param value)
:type: Column evaluation method.
eq_attr = self.operator_attr(column, 'eq')
filt = getattr(column, attr)(param_value)
filt = or_((getattr(column, attr)(param_value)),
(getattr(column, eq_attr)(None)))
return filt
def filter_query(self, query_params, filter_level):
"""Update self.query with chained filters based
def filter_level(self, query_params, filter_level):
Update self.query with chained filters based
on query_params
:param query_params: values to filter query results
......@@ -153,47 +181,94 @@ class DynamicQuery(object):
one of ("hydraulic", "borehole")
:type filter_level: str
:raises: Exception
if filter_level == "hydraulic":
orm_class = HydraulicSample
filter_condition = filter_hydraulics
filter_condition = {"hydraulic": FILTER_HYDRAULICS}
elif filter_level == "borehole":
orm_class = Borehole
filter_condition = filter_boreholes
filter_condition = {"borehole": FILTER_BOREHOLES}
elif filter_level == "section":
orm_class = BoreholeSection
filter_condition = filter_sections
filter_condition = {"section_epoch": FILTER_SECTIONS_EPOCH,
raise Exception(f'filter level not handled: {filter_level}')
for filter_tuple in filter_condition:
key, op, param_name = filter_tuple
except ValueError as err:
raise Exception(f"Invalid filter input: {filter_tuple}")
param_value = query_params.get(param_name)
for filter_name, filter_tuples in filter_condition.items():
for filter_clause in filter_tuples:
if not param_value:
if isinstance(filter_clause, list):
filt_list = []
for clause in filter_clause:
filt = self.get_filter(clause, filter_name,
query_params, orm_class)
if filt is None:
column = getattr(orm_class, key)
except AttributeError:
raise Exception(f"Invalid filter column: {key}")
self.query = self.query.filter(or_(*filt_list))
if op == "in":
if isinstance(value, list):
filt = column.in_(param_value)
filt = column.in_(param_value.split(","))
filt = self.get_filter( filter_clause, filter_name, query_params,orm_class)
if filt is None:
self.query = self.query.filter(filt)
def get_filter(self, filter_clause, filter_name, query_params, orm_class):
"""Return evaluation clause for filtering query if a query param
value exists to to the evaluation on.
:param filter_clause: e.g. ('datetime_value', 'ge', 'starttime')
:type filter_clause: tuple
:params filter_name: name given to collection of filter clauses.
:type filter_level: str
:param query_params: values to filter query results
:type query_params: dict
:param orm_class: Name of ORM class that the column value belongs to.
:type query_params: str
:return: Method to evaluate ORM column
e.g. getattr(col, operator)(param value)
:type: Column evaluation method or None if no param value exists.
key, op, param_name, param_value = self.get_query_param(filter_clause, query_params)
if param_value:
return self.filter_query(query_params, filter_name, key, op, param_name, param_value, orm_class)
return None
def get_query_param(self, filter_clause, query_params):
key, op, param_name = filter_clause
except ValueError as err:
raise Exception(f"Invalid filter input")
param_value = query_params.get(param_name)
return key, op, param_name, param_value
def filter_query(self, query_params, filter_name, key, op, param_name, param_value, orm_class):
column = getattr(orm_class, key)
except AttributeError:
raise Exception(f"Invalid filter column: {key}")
if op == "in":
if isinstance(value, list):
filt = column.in_(param_value)
attr = self.operator_attr(column, op)
if filter_level == "section":
filt = self.filter_section_epoch(column, attr, param_value)
filt = getattr(column, attr)(param_value)
filt = column.in_(param_value.split(","))
attr = self.operator_attr(column, op)
if filter_name == "section_epoch":
filt = self.filter_section_epoch(column, attr, param_value)
filt = getattr(column, attr)(param_value)
self.query = self.query.filter(filt)
return filt
......@@ -64,14 +64,14 @@ bh1_section2 = orm.BoreholeSection(
starttime=datetime.datetime(2010, 1, 10),
.. module:: schema
:synopsis: HYDWS datamodel ORM entity de-/serialization facilities..
:synopsis: HYDWS datamodel ORM entity de-/serialization facilities.
.. moduleauthor:: Laura Sarson <>
......@@ -8,40 +8,33 @@
import datetime
import logging
from functools import partial
from marshmallow import Schema, fields, post_dump, pre_load, validate, validates_schema, post_load
from marshmallow import (Schema, fields, post_dump, pre_load,
validate, validates_schema, post_load)
from hydws.db.orm import Borehole, BoreholeSection, HydraulicSample
ValidateLatitude = validate.Range(min=-90., max=90.)
ValidateLongitude = validate.Range(min=-180., max=180.)
ValidatePositive = validate.Range(min=0.)
ValidateConfidenceLevel = validate.Range(min=0., max=100.)
ValidateCelcius = validate.Range(min=0)
VALIDATE_LATITUDE = validate.Range(min=-90., max=90.)
VALIDATE_LONGITUDE = validate.Range(min=-180., max=180.)
VALIDATE_POSITIVE = validate.Range(min=0.)
VALIDATE_CONFIDENCE_LEVEL = validate.Range(min=0., max=100.)
VALIDATE_KELVIN = validate.Range(min=0.)
VALIDATE_PH = validate.Range(min=0., max=14)
Datetime = partial(fields.DateTime, format='iso')
DatetimeRequired = partial(Datetime, required=True)
Degree = partial(fields.Float)
Latitude = partial(Degree, validate=ValidateLatitude)
FloatPositive = partial(fields.Float, validate=VALIDATE_POSITIVE)
Latitude = partial(Degree, validate=VALIDATE_LATITUDE)
RequiredLatitude = partial(Latitude, required=True)
Longitude = partial(Degree, validate=ValidateLongitude)
Longitude = partial(Degree, validate=VALIDATE_LONGITUDE)
RequiredLongitude = partial(Longitude, required=True)
Uncertainty = partial(fields.Float, validate=ValidatePositive)
ConfidenceLevel = partial(fields.Float, validate=ValidateConfidenceLevel)
Depth = partial(fields.Float, validate=ValidatePositive)
BedrockDepth = partial(fields.Float, validate=ValidatePositive)
MeasuredDepth = partial(fields.Float, validate=ValidatePositive)
Diameter = partial(fields.Float, validate=ValidatePositive)
Temperature = partial(fields.Float, validate=ValidateCelcius)
Pressure = partial(fields.Float, validate=ValidatePositive)
Flow = partial(fields.Float, validate=ValidatePositive)
FluidPh = partial(fields.Float, validate=ValidatePositive)
FluidViscosity = partial(fields.Float, validate=ValidatePositive)
FluidDensity = partial(fields.Float, validate=ValidatePositive)
ConfidenceLevel = partial(fields.Float, validate=VALIDATE_CONFIDENCE_LEVEL)
Temperature = partial(fields.Float, validate=VALIDATE_KELVIN)
FluidPh = partial(fields.Float, validate=VALIDATE_PH)
class SchemaBase(Schema):
"""Schema base class for object de-/serialization.
Schema base class for object de-/serialization.
def remove_empty(self, data):
......@@ -98,9 +91,9 @@ class SchemaBase(Schema):
class CreationInfoSchema(SchemaBase):
"""Schema implementation of literature source and creation info
Schema implementation of literature source and creation info
defined levels.
creationinfo_author = fields.String()
creationinfo_authoruri_resourceid = fields.String()
......@@ -114,9 +107,9 @@ class CreationInfoSchema(SchemaBase):
class LSCreatorPersonSchema(SchemaBase):
"""Schema implementation of literature source and creation info
Schema implementation of literature source and creation info
defined levels.
literaturesource_creator_person_name = fields.String()
literaturesource_creator_person_givenname = fields.String()
......@@ -131,9 +124,9 @@ class LSCreatorPersonSchema(SchemaBase):
class LSCreatorAffiliationSchema(SchemaBase):
"""Schema implementation of literature source and creation info
Schema implementation of literature source and creation info
defined levels.
literaturesource_creator_affiliation_institution_name = fields.String()
literaturesource_creator_affiliation_institution_identifier_resourceid = fields.String()
......@@ -163,9 +156,9 @@ class LSCreatorAffiliationSchema(SchemaBase):
class LSCreatorAlternateAffiliationSchema(SchemaBase):
"""Schema implementation of literature source and creation info
Schema implementation of literature source and creation info
defined levels.
literaturesource_creator_alternateaffiliation_institution_name = fields.String()
literaturesource_creator_alternateaffiliation_institution_identifier_resourceid = fields.String()
......@@ -195,9 +188,9 @@ class LSCreatorAlternateAffiliationSchema(SchemaBase):
class LSCreatorSchema(SchemaBase):
"""Schema implementation of literature source and creation info
Schema implementation of literature source and creation info
defined levels.
literaturesource_creator_mbox_resourceid = fields.String()
literaturesource_creator_comment_comment = fields.String()
......@@ -219,9 +212,9 @@ class LiteratureSourceCreationInfoSchema(
CreationInfoSchema, SchemaBase):
"""Schema implementation of literature source and creation info
Schema implementation of literature source and creation info
defined levels.
literaturesource_identifier_resourceid = fields.String()
literaturesource_type_uri_resourceid = fields.String()
......@@ -253,67 +246,67 @@ class LiteratureSourceCreationInfoSchema(
class HydraulicSampleSchema(SchemaBase):
"""Schema implementation of an hydraulic data sample.
Schema implementation of an hydraulic data sample.
datetime_value = DatetimeRequired()
datetime_uncertainty = Uncertainty()
datetime_loweruncertainty = Uncertainty()
datetime_upperuncertainty = Uncertainty()
datetime_uncertainty = FloatPositive()
datetime_loweruncertainty = FloatPositive()
datetime_upperuncertainty = FloatPositive()
datetime_confidencelevel = ConfidenceLevel()
toptemperature_value = Temperature()
toptemperature_uncertainty = Uncertainty()
toptemperature_loweruncertainty = Uncertainty()
toptemperature_upperuncertainty = Uncertainty()
toptemperature_uncertainty = FloatPositive()
toptemperature_loweruncertainty = FloatPositive()
toptemperature_upperuncertainty = FloatPositive()
toptemperature_confidencelevel = ConfidenceLevel()
bottomtemperature_value = Temperature()
bottomtemperature_uncertainty = Uncertainty()
bottomtemperature_loweruncertainty = Uncertainty()
bottomtemperature_upperuncertainty = Uncertainty()
bottomtemperature_uncertainty = FloatPositive()
bottomtemperature_loweruncertainty = FloatPositive()
bottomtemperature_upperuncertainty = FloatPositive()
bottomtemperature_confidencelevel = ConfidenceLevel()
topflow_value = Flow()
topflow_uncertainty = Uncertainty()
topflow_loweruncertainty = Uncertainty()
topflow_upperuncertainty = Uncertainty()
topflow_value = fields.Float()
topflow_uncertainty = FloatPositive()
topflow_loweruncertainty = FloatPositive()
topflow_upperuncertainty = FloatPositive()
topflow_confidencelevel = ConfidenceLevel()
bottomflow_value = Flow()
bottomflow_uncertainty = Uncertainty()
bottomflow_loweruncertainty = Uncertainty()
bottomflow_upperuncertainty = Uncertainty()
bottomflow_value = fields.Float()
bottomflow_uncertainty = FloatPositive()
bottomflow_loweruncertainty = FloatPositive()
bottomflow_upperuncertainty = FloatPositive()
bottomflow_confidencelevel = ConfidenceLevel()
toppressure_value = Pressure()
toppressure_uncertainty = Uncertainty()
toppressure_loweruncertainty = Uncertainty()
toppressure_upperuncertainty = Uncertainty()
toppressure_value = FloatPositive()
toppressure_uncertainty = FloatPositive()
toppressure_loweruncertainty = FloatPositive()
toppressure_upperuncertainty = FloatPositive()
toppressure_confidencelevel = ConfidenceLevel()
bottompressure_value = Pressure()
bottompressure_uncertainty = Uncertainty()
bottompressure_loweruncertainty = Uncertainty()
bottompressure_upperuncertainty = Uncertainty()
bottompressure_value = FloatPositive()
bottompressure_uncertainty = FloatPositive()
bottompressure_loweruncertainty = FloatPositive()
bottompressure_upperuncertainty = FloatPositive()
bottompressure_confidencelevel = ConfidenceLevel()
fluiddensity_value = FluidDensity()
fluiddensity_uncertainty = Uncertainty()
fluiddensity_loweruncertainty = Uncertainty()
fluiddensity_upperuncertainty = Uncertainty()
fluiddensity_value = FloatPositive()
fluiddensity_uncertainty = FloatPositive()
fluiddensity_loweruncertainty = FloatPositive()
fluiddensity_upperuncertainty = FloatPositive()
fluiddensity_confidencelevel = ConfidenceLevel()
fluidviscosity_value = FluidViscosity()
fluidviscosity_uncertainty = Uncertainty()
fluidviscosity_loweruncertainty = Uncertainty()
fluidviscosity_upperuncertainty = Uncertainty()
fluidviscosity_value = FloatPositive()
fluidviscosity_uncertainty = FloatPositive()
fluidviscosity_loweruncertainty = FloatPositive()
fluidviscosity_upperuncertainty = FloatPositive()
fluidviscosity_confidencelevel = ConfidenceLevel()
fluidph_value = FluidPh()
fluidph_uncertainty = Uncertainty()
fluidph_loweruncertainty = Uncertainty()
fluidph_upperuncertainty = Uncertainty()
fluidph_uncertainty = FloatPositive()
fluidph_loweruncertainty = FloatPositive()
fluidph_upperuncertainty = FloatPositive()
fluidph_confidencelevel = ConfidenceLevel()
fluidcomposition = fields.String()
......@@ -322,61 +315,61 @@ class HydraulicSampleSchema(SchemaBase):
return HydraulicSample(**data)
class SectionSchema(SchemaBase):
"""Schema implementation of a borehole section.
Schema implementation of a borehole section.
publicid = fields.String(required=True)
starttime = DatetimeRequired()
endtime = Datetime()
toplongitude_value = Longitude()
toplongitude_uncertainty = Uncertainty()
toplongitude_loweruncertainty = Uncertainty()
toplongitude_upperuncertainty = Uncertainty()
toplongitude_uncertainty = FloatPositive()
toplongitude_loweruncertainty = FloatPositive()
toplongitude_upperuncertainty = FloatPositive()
toplongitude_confidencelevel = ConfidenceLevel()
bottomlongitude_value = Longitude()
bottomlongitude_uncertainty = Uncertainty()
bottomlongitude_loweruncertainty = Uncertainty()
bottomlongitude_upperuncertainty = Uncertainty()
bottomlongitude_uncertainty = FloatPositive()
bottomlongitude_loweruncertainty = FloatPositive()
bottomlongitude_upperuncertainty = FloatPositive()
bottomlongitude_confidencelevel = ConfidenceLevel()
toplatitude_value = Latitude()
toplatitude_uncertainty = Uncertainty()
toplatitude_loweruncertainty = Uncertainty()
toplatitude_upperuncertainty = Uncertainty()
toplatitude_uncertainty = FloatPositive()
toplatitude_loweruncertainty = FloatPositive()
toplatitude_upperuncertainty = FloatPositive()
toplatitude_confidencelevel = ConfidenceLevel()
bottomlatitude_value = Latitude()
bottomlatitude_uncertainty = Uncertainty()
bottomlatitude_loweruncertainty = Uncertainty()
bottomlatitude_upperuncertainty = Uncertainty()
bottomlatitude_uncertainty = FloatPositive()
bottomlatitude_loweruncertainty = FloatPositive()
bottomlatitude_upperuncertainty = FloatPositive()
bottomlatitude_confidencelevel = ConfidenceLevel()
topdepth_value = Depth()
topdepth_uncertainty = Uncertainty()
topdepth_loweruncertainty = Uncertainty()
topdepth_upperuncertainty = Uncertainty()
topdepth_value = FloatPositive()
topdepth_uncertainty = FloatPositive()
topdepth_loweruncertainty = FloatPositive()
topdepth_upperuncertainty = FloatPositive()
topdepth_confidencelevel = ConfidenceLevel()
bottomdepth_value = Depth()
bottomdepth_uncertainty = Uncertainty()
bottomdepth_loweruncertainty = Uncertainty()
bottomdepth_upperuncertainty = Uncertainty()
bottomdepth_value = FloatPositive()
bottomdepth_uncertainty = FloatPositive()
bottomdepth_loweruncertainty = FloatPositive()
bottomdepth_upperuncertainty = FloatPositive()
bottomdepth_confidencelevel = ConfidenceLevel()