From f1937ba3381468644c3b1b01d01a73eff0b91031 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 4 Nov 2014 08:51:00 +0100 Subject: [PATCH 001/209] Create v0.10 branch --- pgmapcss/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgmapcss/version.py b/pgmapcss/version.py index a9211f73..8c776250 100644 --- a/pgmapcss/version.py +++ b/pgmapcss/version.py @@ -1,7 +1,7 @@ __all__ = 'VERSION', 'VERSION_INFO' #: (:class:`tuple`) The version tuple e.g. ``(0, 9, 2)``. -VERSION_INFO = (0, 9, 0) +VERSION_INFO = (0, 10, 'dev') #: (:class:`basestring`) The version string e.g. ``'0.9.2'``. if len(VERSION_INFO) == 4: From 30e5f8eb0d2978e1ab86e5dd8db41ad7a86f0a1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 4 Nov 2014 08:57:36 +0100 Subject: [PATCH 002/209] DB/Postgresql DB: base class for all pg based databases --- pgmapcss/db/osm2pgsql/db.py | 6 +++--- pgmapcss/db/osmosis/db.py | 6 +++--- pgmapcss/db/postgresql_db/__init__.py | 1 + pgmapcss/db/postgresql_db/db.py | 5 +++++ 4 files changed, 12 insertions(+), 6 deletions(-) create mode 100644 pgmapcss/db/postgresql_db/__init__.py create mode 100644 pgmapcss/db/postgresql_db/db.py diff --git a/pgmapcss/db/osm2pgsql/db.py b/pgmapcss/db/osm2pgsql/db.py index 741e4c83..b2e2b4d3 100644 --- a/pgmapcss/db/osm2pgsql/db.py +++ b/pgmapcss/db/osm2pgsql/db.py @@ -1,10 +1,10 @@ -from ..default import default +from ..postgresql_db import postgresql_db from ..pg import format from ..pg import ident -class db(default): +class db(postgresql_db): def __init__(self, conn, stat): - default.__init__(self, conn, stat) + postgresql_db.__init__(self, conn, stat) if not 'db.srs' in self.stat['config']: if stat['config'].get('offline', False): diff --git a/pgmapcss/db/osmosis/db.py b/pgmapcss/db/osmosis/db.py index 4fff0834..0b2104a2 100644 --- a/pgmapcss/db/osmosis/db.py +++ b/pgmapcss/db/osmosis/db.py @@ -1,12 +1,12 @@ from pkg_resources import * import postgresql -from ..default import default +from ..postgresql_db import postgresql_db from ..pg import format from ..pg import ident -class db(default): +class db(postgresql_db): def __init__(self, conn, stat): - default.__init__(self, conn, stat) + postgresql_db.__init__(self, conn, stat) if not 'db.multipolygons' in self.stat['config']: try: diff --git a/pgmapcss/db/postgresql_db/__init__.py b/pgmapcss/db/postgresql_db/__init__.py new file mode 100644 index 00000000..c15128e1 --- /dev/null +++ b/pgmapcss/db/postgresql_db/__init__.py @@ -0,0 +1 @@ +from .db import postgresql_db diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py new file mode 100644 index 00000000..40d7c37f --- /dev/null +++ b/pgmapcss/db/postgresql_db/db.py @@ -0,0 +1,5 @@ +from ..default import default + +# This class is the base class for all database using a postgresql database +class postgresql_db(default): + pass From 10b5fad58a266b9c8780c0b83652d48232060cd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 4 Nov 2014 09:17:08 +0100 Subject: [PATCH 003/209] Rename 'match_where' to 'db_selects' --- ...ile_function_get_where.py => compile_db_selects.py} | 10 +++++----- pgmapcss/compiler/compile_function_match.py | 10 +++++----- 2 files changed, 10 insertions(+), 10 deletions(-) rename pgmapcss/compiler/{compile_function_get_where.py => compile_db_selects.py} (92%) diff --git a/pgmapcss/compiler/compile_function_get_where.py b/pgmapcss/compiler/compile_db_selects.py similarity index 92% rename from pgmapcss/compiler/compile_function_get_where.py rename to pgmapcss/compiler/compile_db_selects.py index e3ed340b..1c22d41f 100644 --- a/pgmapcss/compiler/compile_function_get_where.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -1,7 +1,7 @@ import pgmapcss.db as db from .compile_sql import * -def get_where_selectors(filter, stat): +def filter_selectors(filter, stat): # where_selectors contains indexes of all selectors which we need for match queries where_selectors = [] @@ -44,7 +44,7 @@ def get_where_selectors(filter, stat): # uniq list return list(set(where_selectors)) -def compile_function_get_where(id, stat): +def compile_db_selects(id, stat): ret = '' scale_denominators = stat.all_scale_denominators() @@ -52,7 +52,7 @@ def compile_function_get_where(id, stat): max_scale = None for min_scale in scale_denominators: filter = { 'min_scale': min_scale, 'max_scale': max_scale or 10E+10} - where_selectors = get_where_selectors(filter, stat) + current_selectors = filter_selectors(filter, stat) # compile all selectors # TODO: define list of possible object_types @@ -61,7 +61,7 @@ def compile_function_get_where(id, stat): object_type, compile_selector_sql(stat['statements'][i], stat, prefix='', filter=filter, object_type=object_type) ) - for i in where_selectors + for i in current_selectors for object_type in ({'node', 'way', 'area'} if stat['statements'][i]['selector']['type'] == True else { stat['statements'][i]['selector']['type'] }) } @@ -92,6 +92,6 @@ def compile_function_get_where(id, stat): ret += \ ' render_context[\'scale_denominator\'] >= ' + str(min_scale) + ':\n' +\ - ' match_where = ' + repr(conditions) + '\n' + ' db_selects = ' + repr(conditions) + '\n' return ret diff --git a/pgmapcss/compiler/compile_function_match.py b/pgmapcss/compiler/compile_function_match.py index a883fdf0..42611f1d 100644 --- a/pgmapcss/compiler/compile_function_match.py +++ b/pgmapcss/compiler/compile_function_match.py @@ -1,6 +1,6 @@ from pkg_resources import * import pgmapcss.db as db -from .compile_function_get_where import compile_function_get_where +from .compile_db_selects import compile_db_selects from .compile_function_check import compile_function_check from .compile_build_result import compile_build_result from ..includes import include_text @@ -57,7 +57,7 @@ def compile_function_match(stat): for k, v in stat['defines']['style_element_property'].items() }), 'scale_denominators': repr(scale_denominators), - 'match_where': compile_function_get_where(stat['id'], stat), + 'db_selects': compile_db_selects(stat['id'], stat), 'db_query': db.query_functions(stat), 'function_check': check_functions, 'check_chooser': check_chooser, @@ -108,8 +108,8 @@ def compile_function_match(stat): {db_query} {eval_functions} {function_check} -match_where = None -{match_where} +db_selects = None +{db_selects} counter = {{ 'rendered': 0, 'total': 0 }} {check_chooser} @@ -122,7 +122,7 @@ def compile_function_match(stat): '''.format(**replacement) - func = "objects(render_context.get('bbox'), match_where)" + func = "objects(render_context.get('bbox'), db_selects)" if stat['config'].get('debug.profiler', False): ret += "time_qry_start = datetime.datetime.now() # profiling\n" ret += "src = list(" + func + ")\n" From eebca3e35b2479317fd9a9d4e32716c0177bdcbb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 4 Nov 2014 13:56:00 +0100 Subject: [PATCH 004/209] DB/Postgresql DB: Move code from compile_sql (and similar) --- pgmapcss/compiler/compile_db_selects.py | 23 +- pgmapcss/compiler/compile_link_selector.py | 3 +- pgmapcss/compiler/compile_sql.py | 276 ------------------- pgmapcss/db/postgresql_db/db.py | 300 ++++++++++++++++++++- 4 files changed, 302 insertions(+), 300 deletions(-) delete mode 100644 pgmapcss/compiler/compile_sql.py diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 1c22d41f..ef445767 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -1,6 +1,3 @@ -import pgmapcss.db as db -from .compile_sql import * - def filter_selectors(filter, stat): # where_selectors contains indexes of all selectors which we need for match queries where_selectors = [] @@ -59,29 +56,13 @@ def compile_db_selects(id, stat): conditions = { ( object_type, - compile_selector_sql(stat['statements'][i], stat, prefix='', filter=filter, object_type=object_type) + stat['database'].compile_selector(stat['statements'][i], stat, prefix='', filter=filter, object_type=object_type) ) for i in current_selectors for object_type in ({'node', 'way', 'area'} if stat['statements'][i]['selector']['type'] == True else { stat['statements'][i]['selector']['type'] }) } - types = [ t for t, cs in conditions if t != True ] - - conditions = { - t: - '(' + ') or ('.join([ - cs - for t2, cs in conditions - if t == t2 - if cs != 'false' - ]) + ')' - for t in types - } - conditions = { - t: cs - for t, cs in conditions.items() - if cs != '()' - } + conditions = stat['database'].merge_conditions(conditions) max_scale = min_scale diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index b88da310..e2d67926 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -1,12 +1,11 @@ from .compile_selector_part import compile_selector_part from .compile_conditions import compile_conditions -from .compile_sql import * from .compile_eval import compile_eval import pgmapcss.db as db def compile_link_selector(statement, stat): parent_conditions = ' and '.join([ - compile_condition_sql(c, statement, stat, prefix='') or 'true' + stat['database'].compile_condition(c, statement, stat, prefix='') or 'true' for c in statement['parent_selector']['conditions'] ]) diff --git a/pgmapcss/compiler/compile_sql.py b/pgmapcss/compiler/compile_sql.py deleted file mode 100644 index 59061dde..00000000 --- a/pgmapcss/compiler/compile_sql.py +++ /dev/null @@ -1,276 +0,0 @@ -import pgmapcss.db as db -from .CompileError import CompileError - -def value_format_default(key, value): - return db.format(value) - -# escape strings for "like" matches, see http://www.postgresql.org/docs/9.1/static/functions-matching.html -def pg_like_escape(s): - s = s.replace('\\', '\\\\') - s = s.replace('_', '\\_') - s = s.replace('%', '\\%') - return s - -def compile_condition_hstore_value(condition, statement, tag_type, stat, prefix, filter): - ret = None - negate = False - key = tag_type[1] - column = tag_type[2] - op = condition['op'] - - if op[0:2] == '! ': - op = op[2:] - negate = True - - # eval() statements - if op == 'eval': - return None - - # ignore pseudo classes - if op == 'pseudo_class': - return None - - # regexp on key of tag - if op in ('key_regexp', 'key_regexp_case'): - return None - - # value-eval() statements - if condition['value_type'] == 'eval': - # treat other conditions as has_key - ret = prefix + column + ' ? ' + db.format(key); - - # = - elif op == '=': - ret = prefix + column + ' @> ' + db.format({ key: condition['value'] }) - - # @= - elif op == '@=' and condition['value_type'] == 'value': - ret = '(' + ' or '.join([ - prefix + column + ' @> ' + db.format({ key: v }) - for v in condition['value'].split(';') - ]) + ')' - - # != - elif op == '!=': - ret = '( not ' + prefix + column + ' ? ' + db.format(key) +\ - 'or not ' + prefix + column + ' @> ' +\ - db.format({ key: condition['value'] }) + ')' - - # regexp match =~ - elif op == '=~': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) +\ - (' ~* ' if 'i' in condition['regexp_flags'] else ' ~ ') +\ - db.format(condition['value']) + ')' - - # negated regexp match !~ - elif op == '!~': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) +\ - (' !~* ' if 'i' in condition['regexp_flags'] else ' !~ ') +\ - db.format(condition['value']) + ')' - - # prefix match ^= - elif op == '^=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) + ' like ' +\ - db.format(pg_like_escape(condition['value']) + '%') + ')' - - # suffix match $= - elif op == '$=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) + ' like ' +\ - db.format('%' + pg_like_escape(condition['value'])) + ')' - - # substring match *= - elif op == '*=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) + ' like ' +\ - db.format('%' + pg_like_escape(condition['value']) + '%') + ')' - - # list membership ~= - elif op == '~=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - db.format(condition['value']) + ' =any(string_to_array(' +\ - prefix + column + '->' + db.format(key) + ', \';\')))' - - else: - ret = prefix + column + ' ? ' + db.format(key) - - if ret is None: - return None - - if negate: - return '(not ' + prefix + column + ' ? ' + db.format(key) +\ - ' or not ' + ret + ')' - - return ret - -def compile_condition_column(condition, statement, tag_type, stat, prefix, filter): - ret = None - key = tag_type[1] - op = condition['op'] - negate = False - - value_format = value_format_default - if len(tag_type) > 2: - value_format = tag_type[2] - - if op[0:2] == '! ': - op = op[2:] - negate = True - - # eval() statements - if op == 'eval': - return None - - # ignore pseudo classes - if op == 'pseudo_class': - return None - - # regexp on key of tag - if op in ('key_regexp', 'key_regexp_case'): - return None - - # value-eval() statements - if condition['value_type'] == 'eval': - # treat other conditions as has_key - ret = prefix + db.ident(key) + ' is not null' - - # = - elif op == '=': - # if value_format returns None -> can't resolve, discard condition - # if value_format returns False -> return false as result - f = value_format(key, condition['value']) - if f is None: - return None - elif f: - ret = prefix + db.ident(key) + ' = ' + f - else: - ret = 'false' - - # @= - elif op == '@=' and condition['value_type'] == 'value': - f = { - value_format(key, v) - for v in condition['value'].split(';') - } - # if value_format returns None -> can't resolve, discard condition - # if value_format returns False -> return false as result - if None in f: - return None - if False in f: - f.remove(None) - - if len(f): - ret = prefix + db.ident(key) + ' in (' + ', '.join(f) + ')' - else: - ret = 'false' - - # != - elif op == '!=': - ret = '(' + prefix + db.ident(key) + 'is null or ' +\ - prefix + db.ident(key) + '!=' +\ - db.format(condition['value']) + ')' - - # regexp match =~ - elif op == '=~': - ret = prefix + db.ident(key) +\ - (' ~* ' if 'i' in condition['regexp_flags'] else ' ~ ') +\ - db.format(condition['value']) - - # negated regexp match !~ - elif op == '!~': - ret = prefix + db.ident(key) +\ - (' !~* ' if 'i' in condition['regexp_flags'] else ' !~ ') +\ - db.format(condition['value']) - - # prefix match ^= - elif op == '^=': - ret = prefix + db.ident(key) + ' like ' +\ - db.format(pg_like_escape(condition['value']) + '%') - - # suffix match $= - elif op == '$=': - ret = prefix + db.ident(key) + ' like ' +\ - db.format('%' + pg_like_escape(condition['value'])) - - # substring match *= - elif op == '*=': - ret = prefix + db.ident(key) + ' like ' +\ - db.format('%' + pg_like_escape(condition['value']) + '%') - - # list membership ~= - elif op == '~=': - ret = \ - db.format(condition['value']) + ' =any(string_to_array(' +\ - prefix + db.ident(key) + ', \';\'))' - - else: - ret = prefix + db.ident(key) + ' is not null' - - if ret is None: - return None - - if negate: - return '(' + prefix + db.ident(key) + ' is null or not ' + ret + ')' - - return ret - -def compile_condition_sql(condition, statement, stat, prefix='current.', filter={}): - ret = set() - - # assignments: map conditions which are based on a (possible) set-statement - # back to their original selectors: - f = filter.copy() - f['has_set_tag'] = condition['key'] - f['max_id'] = statement['id'] - set_statements = stat.filter_statements(f) - - if len(set_statements) > 0: - ret.add('((' + ') or ('.join([ - compile_selector_sql(s, stat, prefix, filter) - for s in set_statements - ]) + '))') - - # ignore generated tags (identified by leading .) - if condition['key'][0] == '.': - if len(ret) == 0: - return 'false' - return ''.join(ret) - - # depending on the tag type compile the specified condition - tag_type = stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) - - if tag_type is None: - pass - elif tag_type[0] == 'hstore-value': - ret.add(compile_condition_hstore_value(condition, statement, tag_type, stat, prefix, filter)) - elif tag_type[0] == 'column': - ret.add(compile_condition_column(condition, statement, tag_type, stat, prefix, filter)) - else: - raise CompileError('unknown tag type {}'.format(tag_type)) - - if None in ret: - ret.remove(None) - if len(ret) == 0: - return None - - # merge conditions together, return - return '(' + ' or '.join(ret) + ')' - -def compile_selector_sql(statement, stat, prefix='current.', filter={}, object_type=None): - filter['object_type'] = object_type - - ret = { - compile_condition_sql(c, statement, stat, prefix, filter) or 'true' - for c in statement['selector']['conditions'] - } - - if len(ret) == 0: - return 'true' - - if 'false' in ret: - return 'false' - - return ' and '.join(ret) diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index 40d7c37f..eda672d9 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -1,5 +1,303 @@ from ..default import default +import pgmapcss.db.pg # This class is the base class for all database using a postgresql database class postgresql_db(default): - pass + def __init__(self, conn, stat): + default.__init__(self, conn, stat) + self.format = pgmapcss.db.pg.format + self.ident = pgmapcss.db.pg.ident + + def merge_conditions(self, conditions): + types = [ t for t, cs in conditions if t != True ] + + conditions = { + t: + '(' + ') or ('.join([ + cs + for t2, cs in conditions + if t == t2 + if cs != 'false' + ]) + ')' + for t in types + } + + return { + t: cs + for t, cs in conditions.items() + if cs != '()' + } + + def compile_condition_hstore_value(self, condition, statement, tag_type, stat, prefix, filter): + ret = None + negate = False + key = tag_type[1] + column = tag_type[2] + op = condition['op'] + + if op[0:2] == '! ': + op = op[2:] + negate = True + + # eval() statements + if op == 'eval': + return None + + # ignore pseudo classes + if op == 'pseudo_class': + return None + + # regexp on key of tag + if op in ('key_regexp', 'key_regexp_case'): + return None + + # value-eval() statements + if condition['value_type'] == 'eval': + # treat other conditions as has_key + ret = prefix + column + ' ? ' + self.format(key); + + # = + elif op == '=': + ret = prefix + column + ' @> ' + self.format({ key: condition['value'] }) + + # @= + elif op == '@=' and condition['value_type'] == 'value': + ret = '(' + ' or '.join([ + prefix + column + ' @> ' + self.format({ key: v }) + for v in condition['value'].split(';') + ]) + ')' + + # != + elif op == '!=': + ret = '( not ' + prefix + column + ' ? ' + self.format(key) +\ + 'or not ' + prefix + column + ' @> ' +\ + self.format({ key: condition['value'] }) + ')' + + # regexp match =~ + elif op == '=~': + ret = '(' + prefix + column + ' ? ' + self.format(key) + ' and ' +\ + prefix + column + '->' + self.format(key) +\ + (' ~* ' if 'i' in condition['regexp_flags'] else ' ~ ') +\ + self.format(condition['value']) + ')' + + # negated regexp match !~ + elif op == '!~': + ret = '(' + prefix + column + ' ? ' + self.format(key) + ' and ' +\ + prefix + column + '->' + self.format(key) +\ + (' !~* ' if 'i' in condition['regexp_flags'] else ' !~ ') +\ + self.format(condition['value']) + ')' + + # prefix match ^= + elif op == '^=': + ret = '(' + prefix + column + ' ? ' + self.format(key) + ' and ' +\ + prefix + column + '->' + self.format(key) + ' like ' +\ + self.format(self.pg_like_escape(condition['value']) + '%') + ')' + + # suffix match $= + elif op == '$=': + ret = '(' + prefix + column + ' ? ' + self.format(key) + ' and ' +\ + prefix + column + '->' + self.format(key) + ' like ' +\ + self.format('%' + self.pg_like_escape(condition['value'])) + ')' + + # substring match *= + elif op == '*=': + ret = '(' + prefix + column + ' ? ' + self.format(key) + ' and ' +\ + prefix + column + '->' + self.format(key) + ' like ' +\ + self.format('%' + self.pg_like_escape(condition['value']) + '%') + ')' + + # list membership ~= + elif op == '~=': + ret = '(' + prefix + column + ' ? ' + self.format(key) + ' and ' +\ + self.format(condition['value']) + ' =any(string_to_array(' +\ + prefix + column + '->' + self.format(key) + ', \';\')))' + + else: + ret = prefix + column + ' ? ' + self.format(key) + + if ret is None: + return None + + if negate: + return '(not ' + prefix + column + ' ? ' + self.format(key) +\ + ' or not ' + ret + ')' + + return ret + + def compile_condition_column(self, condition, statement, tag_type, stat, prefix, filter): + ret = None + key = tag_type[1] + op = condition['op'] + negate = False + + value_format = self.value_format_default + if len(tag_type) > 2: + value_format = tag_type[2] + + if op[0:2] == '! ': + op = op[2:] + negate = True + + # eval() statements + if op == 'eval': + return None + + # ignore pseudo classes + if op == 'pseudo_class': + return None + + # regexp on key of tag + if op in ('key_regexp', 'key_regexp_case'): + return None + + # value-eval() statements + if condition['value_type'] == 'eval': + # treat other conditions as has_key + ret = prefix + self.ident(key) + ' is not null' + + # = + elif op == '=': + # if value_format returns None -> can't resolve, discard condition + # if value_format returns False -> return false as result + f = value_format(key, condition['value']) + if f is None: + return None + elif f: + ret = prefix + self.ident(key) + ' = ' + f + else: + ret = 'false' + + # @= + elif op == '@=' and condition['value_type'] == 'value': + f = { + value_format(key, v) + for v in condition['value'].split(';') + } + # if value_format returns None -> can't resolve, discard condition + # if value_format returns False -> return false as result + if None in f: + return None + if False in f: + f.remove(None) + + if len(f): + ret = prefix + self.ident(key) + ' in (' + ', '.join(f) + ')' + else: + ret = 'false' + + # != + elif op == '!=': + ret = '(' + prefix + self.ident(key) + 'is null or ' +\ + prefix + self.ident(key) + '!=' +\ + self.format(condition['value']) + ')' + + # regexp match =~ + elif op == '=~': + ret = prefix + self.ident(key) +\ + (' ~* ' if 'i' in condition['regexp_flags'] else ' ~ ') +\ + self.format(condition['value']) + + # negated regexp match !~ + elif op == '!~': + ret = prefix + self.ident(key) +\ + (' !~* ' if 'i' in condition['regexp_flags'] else ' !~ ') +\ + self.format(condition['value']) + + # prefix match ^= + elif op == '^=': + ret = prefix + self.ident(key) + ' like ' +\ + self.format(self.pg_like_escape(condition['value']) + '%') + + # suffix match $= + elif op == '$=': + ret = prefix + self.ident(key) + ' like ' +\ + self.format('%' + self.pg_like_escape(condition['value'])) + + # substring match *= + elif op == '*=': + ret = prefix + self.ident(key) + ' like ' +\ + self.format('%' + self.pg_like_escape(condition['value']) + '%') + + # list membership ~= + elif op == '~=': + ret = \ + self.format(condition['value']) + ' =any(string_to_array(' +\ + prefix + self.ident(key) + ', \';\'))' + + else: + ret = prefix + self.ident(key) + ' is not null' + + if ret is None: + return None + + if negate: + return '(' + prefix + self.ident(key) + ' is null or not ' + ret + ')' + + return ret + + def compile_condition(self, condition, statement, stat, prefix='current.', filter={}): + ret = set() + + # assignments: map conditions which are based on a (possible) set-statement + # back to their original selectors: + f = filter.copy() + f['has_set_tag'] = condition['key'] + f['max_id'] = statement['id'] + set_statements = stat.filter_statements(f) + + if len(set_statements) > 0: + ret.add('((' + ') or ('.join([ + self.compile_selector(s, stat, prefix, filter) + for s in set_statements + ]) + '))') + + # ignore generated tags (identified by leading .) + if condition['key'][0] == '.': + if len(ret) == 0: + return 'false' + return ''.join(ret) + + # depending on the tag type compile the specified condition + tag_type = stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) + + if tag_type is None: + pass + elif tag_type[0] == 'hstore-value': + ret.add(self.compile_condition_hstore_value(condition, statement, tag_type, stat, prefix, filter)) + elif tag_type[0] == 'column': + ret.add(self.compile_condition_column(condition, statement, tag_type, stat, prefix, filter)) + else: + raise CompileError('unknown tag type {}'.format(tag_type)) + + if None in ret: + ret.remove(None) + if len(ret) == 0: + return None + + # merge conditions together, return + return '(' + ' or '.join(ret) + ')' + + def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None): + filter['object_type'] = object_type + + ret = { + self.compile_condition(c, statement, stat, prefix, filter) or 'true' + for c in statement['selector']['conditions'] + } + + if len(ret) == 0: + return 'true' + + if 'false' in ret: + return 'false' + + return ' and '.join(ret) + + def value_format_default(self, key, value): + return self.format(value) + + # escape strings for "like" matches, see http://www.postgresql.org/docs/9.1/static/functions-matching.html + def pg_like_escape(self, s): + s = s.replace('\\', '\\\\') + s = s.replace('_', '\\_') + s = s.replace('%', '\\%') + return s From b594fede63d352e9181eb387f70cf50aac88fe5f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Fri, 7 Nov 2014 10:53:44 +0100 Subject: [PATCH 005/209] DB Overpass: compiler, add db --- pgmapcss/compiler/compile_sql.py | 96 +++++++++ pgmapcss/db/db.py | 5 +- pgmapcss/db/overpass/__init__.py | 1 + pgmapcss/db/overpass/db.py | 30 +++ pgmapcss/db/overpass/db_functions.py | 295 +++++++++++++++++++++++++++ pgmapcss/db/overpass/init.sql | 14 ++ 6 files changed, 440 insertions(+), 1 deletion(-) create mode 100644 pgmapcss/db/overpass/__init__.py create mode 100644 pgmapcss/db/overpass/db.py create mode 100644 pgmapcss/db/overpass/db_functions.py create mode 100644 pgmapcss/db/overpass/init.sql diff --git a/pgmapcss/compiler/compile_sql.py b/pgmapcss/compiler/compile_sql.py index 59061dde..291695e1 100644 --- a/pgmapcss/compiler/compile_sql.py +++ b/pgmapcss/compiler/compile_sql.py @@ -217,6 +217,100 @@ def compile_condition_column(condition, statement, tag_type, stat, prefix, filte return ret +def compile_condition_overpass(condition, statement, tag_type, stat, prefix, filter): + ret = None + negate = False + key = tag_type[1] + op = condition['op'] + + if op[0:2] == '! ': + op = op[2:] + negate = True + + # eval() statements + if op == 'eval': + return None + + # ignore pseudo classes + if op == 'pseudo_class': + return None + + # regexp on key of tag + if op in ('key_regexp', 'key_regexp_case'): + return None + + # value-eval() statements + if condition['value_type'] == 'eval': + # treat other conditions as has_key + ret = prefix + column + ' ? ' + db.format(key); + + # = + elif op == '=': + ret = '__TYPE__[' + repr(key) + '=' + repr(condition['value']) + ']' + + # @= + elif op == '@=' and condition['value_type'] == 'value': + ret = '(' + ' or '.join([ + prefix + column + ' @> ' + db.format({ key: v }) + for v in condition['value'].split(';') + ]) + ')' + + # != + elif op == '!=': + ret = '( not ' + prefix + column + ' ? ' + db.format(key) +\ + 'or not ' + prefix + column + ' @> ' +\ + db.format({ key: condition['value'] }) + ')' + + # regexp match =~ + elif op == '=~': + ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ + prefix + column + '->' + db.format(key) +\ + (' ~* ' if 'i' in condition['regexp_flags'] else ' ~ ') +\ + db.format(condition['value']) + ')' + + # negated regexp match !~ + elif op == '!~': + ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ + prefix + column + '->' + db.format(key) +\ + (' !~* ' if 'i' in condition['regexp_flags'] else ' !~ ') +\ + db.format(condition['value']) + ')' + + # prefix match ^= + elif op == '^=': + ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ + prefix + column + '->' + db.format(key) + ' like ' +\ + db.format(pg_like_escape(condition['value']) + '%') + ')' + + # suffix match $= + elif op == '$=': + ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ + prefix + column + '->' + db.format(key) + ' like ' +\ + db.format('%' + pg_like_escape(condition['value'])) + ')' + + # substring match *= + elif op == '*=': + ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ + prefix + column + '->' + db.format(key) + ' like ' +\ + db.format('%' + pg_like_escape(condition['value']) + '%') + ')' + + # list membership ~= + elif op == '~=': + ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ + db.format(condition['value']) + ' =any(string_to_array(' +\ + prefix + column + '->' + db.format(key) + ', \';\')))' + + else: + ret = '[' + repr(key) + ']' + + if ret is None: + return None + + if negate: + return '(not ' + prefix + column + ' ? ' + db.format(key) +\ + ' or not ' + ret + ')' + + return ret + def compile_condition_sql(condition, statement, stat, prefix='current.', filter={}): ret = set() @@ -248,6 +342,8 @@ def compile_condition_sql(condition, statement, stat, prefix='current.', filter= ret.add(compile_condition_hstore_value(condition, statement, tag_type, stat, prefix, filter)) elif tag_type[0] == 'column': ret.add(compile_condition_column(condition, statement, tag_type, stat, prefix, filter)) + elif tag_type[0] == 'overpass': + ret.add(compile_condition_overpass(condition, statement, tag_type, stat, prefix, filter)) else: raise CompileError('unknown tag type {}'.format(tag_type)) diff --git a/pgmapcss/db/db.py b/pgmapcss/db/db.py index 4bcf95a3..34d78bcf 100644 --- a/pgmapcss/db/db.py +++ b/pgmapcss/db/db.py @@ -5,6 +5,7 @@ from .version import * import pgmapcss.db.osm2pgsql import pgmapcss.db.osmosis +import pgmapcss.db.overpass from pgmapcss.misc import strip_includes conn = None @@ -23,7 +24,7 @@ def __call__(self, param=[]): def connect(args, stat): global conn - if not args.database_type in ('osm2pgsql', 'osmosis'): + if not args.database_type in ('osm2pgsql', 'osmosis', 'overpass'): print('* Database type "{}" not supported right now'.format(args.database_type)) exit(1) @@ -43,6 +44,8 @@ def connect(args, stat): conn.database = pgmapcss.db.osm2pgsql.db(conn, stat) elif args.database_type == 'osmosis': conn.database = pgmapcss.db.osmosis.db(conn, stat) + elif args.database_type == 'overpass': + conn.database = pgmapcss.db.overpass.db(conn, stat) else: raise Exception('unknown database type {}'.format(args.database_type)) diff --git a/pgmapcss/db/overpass/__init__.py b/pgmapcss/db/overpass/__init__.py new file mode 100644 index 00000000..85eece32 --- /dev/null +++ b/pgmapcss/db/overpass/__init__.py @@ -0,0 +1 @@ +from .db import db diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py new file mode 100644 index 00000000..a334e9f2 --- /dev/null +++ b/pgmapcss/db/overpass/db.py @@ -0,0 +1,30 @@ +from pkg_resources import * +import postgresql +from ..default import default +from ..pg import format +from ..pg import ident + +class db(default): + def __init__(self, conn, stat): + default.__init__(self, conn, stat) + if not 'db.srs' in self.stat['config']: + self.stat['config']['db.srs'] = 4326 + + def tag_type(self, key, condition, selector, statement): + if key[0:4] == 'osm:': + if key == 'osm:id': + return ( 'column', 'id', self.compile_modify_id ) + elif key == 'osm:user': + return ( 'column', 'user_id', self.compile_user_id ) + elif key == 'osm:user_id': + return ( 'column', 'user_id' ) + elif key == 'osm:version': + return ( 'column', 'version' ) + elif key == 'osm:timestamp': + return ( 'column', 'tstamp' ) + elif key == 'osm:changeset': + return ( 'column', 'changeset_id' ) + else: + return None + + return ( 'overpass', key ) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py new file mode 100644 index 00000000..05b7efca --- /dev/null +++ b/pgmapcss/db/overpass/db_functions.py @@ -0,0 +1,295 @@ +#[out:json][bbox:{{bbox}}];(way[name=Marschnergasse];way[name=Erdbrustgasse]);out geom meta; + +global node_geom_plan +node_geom_plan = None +global way_geom_plan +way_geom_plan = None + +def node_geom(lat, lon): + global node_geom_plan + + if not node_geom_plan: + node_geom_plan = plpy.prepare('select ST_SetSRID(ST_Point($1, $2), 4326) as geom', [ 'float', 'float' ]) + + res = plpy.execute(node_geom_plan, [ lon, lat ]) + + return res[0]['geom'] + +def way_geom(l): + global way_geom_plan + + if not way_geom_plan: + way_geom_plan = plpy.prepare('select ST_GeomFromText($1, 4326) as geom', [ 'text' ]) + + res = plpy.execute(way_geom_plan, ['LINESTRING(' + ','.join([ + str(p['lon']) + ' ' + str(p['lat']) + for p in l + ]) + ')']) + + return res[0]['geom'] + +# Use this functions only with a database based on an import with osmosis +def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): + import urllib.request + import urllib.parse + import json + time_start = datetime.datetime.now() # profiling + + qry = '[out:json]' + + if _bbox: + plan = plpy.prepare("select ST_YMin($1::geometry) || ',' || ST_XMIN($1::geometry) || ',' || ST_YMAX($1::geometry) || ',' || ST_XMAX($1::geometry) as bbox_string", [ 'geometry' ]) + res = plpy.execute(plan, [ _bbox ]) + qry += '[bbox:' + res[0]['bbox_string'] + ']' + plpy.warning(qry) + + qry += ';__QRY__;out meta geom;' + + # nodes + w = [] + for t in ('*', 'node', 'point'): + if t in where_clauses: + w.append(where_clauses[t]) + + if len(w): + q = qry.replace('__QRY__', '(' + ');('.join(w) + ');') + q = q.replace('__TYPE__', 'node') + q = qry.replace('__QRY__', 'node[place]') + + #url = 'http://overpass.osm.rambler.ru/cgi/interpreter?' +\ + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + res = json.loads(f) + + for r in res['elements']: + t = { + 'id': 'n' + str(r['id']), + 'types': ['node', 'point'], + 'tags': r['tags'], + 'geo': node_geom(r['lat'], r['lon']), + } + t['tags']['osm:id'] = t['id'] + t['tags']['osm:version'] = t['version'] if 'version' in t else '' + t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' + t['tags']['osm:user'] = t['user'] if 'user' in t else '' + t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' + t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + yield(t) + + #'http://overpass-turbo.eu/?Q=' + q).read() + + # ways + w = [] + for t in ('*', 'line', 'area', 'way'): + if t in where_clauses: + w.append(where_clauses[t]) + + if len(w): + q = qry.replace('__QRY__', '(' + ');('.join(w) + ');') + q = q.replace('__TYPE__', 'way') + q = qry.replace('__QRY__', 'way[highway=residential]') + + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + res = json.loads(f) + + for r in res['elements']: + t = { + 'id': 'n' + str(r['id']), + 'types': ['way', 'line'], + 'tags': r['tags'], + 'geo': way_geom(r['geometry']), + } + t['tags']['osm:id'] = t['id'] + t['tags']['osm:version'] = t['version'] if 'version' in t else '' + t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' + t['tags']['osm:user'] = t['user'] if 'user' in t else '' + t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' + t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + yield(t) + + time_stop = datetime.datetime.now() # profiling + plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) + +def objects_by_id(id_list): + _id_list = [ int(i[1:]) for i in id_list if i[0] == 'n' ] + plan = plpy.prepare('select id, tags, geom from nodes where id=any($1)', ['bigint[]']); + res = plpy.cursor(plan, [_id_list]) + for r in res: + yield { + 'id': 'n' + str(r['id']), + 'members': [], + 'tags': pghstore.loads(r['tags']), + 'geo': r['geom'], + 'types': ['node', 'point'] + } + + _id_list = [ int(i[1:]) for i in id_list if i[0] == 'w' ] + plan = plpy.prepare('select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, linestring as linestring, array_agg(node_id) as member_ids from (select ways.*, node_id from ways left join way_nodes on ways.id=way_nodes.way_id where ways.id=any($1) order by way_nodes.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id, linestring', ['bigint[]']); + res = plpy.cursor(plan, [_id_list]) + for r in res: + t = { + 'id': 'w' + str(r['id']), + 'members': [ { + 'member_id': 'n' + str(m), + 'sequence_id': str(i) + } + for i, m in enumerate(r['member_ids']) + ], + 'tags': pghstore.loads(r['tags']), + 'geo': r['linestring'], + 'types': ['way', 'line', 'area'] + } + t['tags']['osm:id'] = str(t['id']) + t['tags']['osm:version'] = str(r['version']) + t['tags']['osm:user_id'] = str(r['user_id']) + t['tags']['osm:user'] = r['user'] + t['tags']['osm:timestamp'] = str(r['tstamp']) + t['tags']['osm:changeset'] = str(r['changeset_id']) + yield(t) + + _id_list = [ int(i[1:]) for i in id_list if i[0] == 'r' ] + plan = plpy.prepare('select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, array_agg(lower(member_type) || member_id) as member_ids, array_agg(member_role) as member_roles from (select relations.*, member_type, member_id, member_role from relations left join relation_members on relations.id=relation_members.relation_id where relations.id=any($1) order by relation_members.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id', ['bigint[]']); + res = plpy.cursor(plan, [_id_list]) + for r in res: + t = { + 'id': 'r' + str(r['id']), + 'tags': pghstore.loads(r['tags']), + 'members': [ { + 'member_id': m[0], + 'role': m[1], + 'sequence_id': i + } + for i, m in enumerate(zip(r['member_ids'], r['member_roles'])) + ], + 'geo': None, + 'types': ['relation'] + } + t['tags']['osm:id'] = str(t['id']) + t['tags']['osm:version'] = str(r['version']) + t['tags']['osm:user_id'] = str(r['user_id']) + t['tags']['osm:user'] = r['user'] + t['tags']['osm:timestamp'] = str(r['tstamp']) + t['tags']['osm:changeset'] = str(r['changeset_id']) + yield(t) + +def objects_member_of(member_id, parent_type, parent_conditions): + if parent_type == 'relation': + plan = plpy.prepare('select *, (select name from users where id=user_id) as user from relation_members join relations on relation_members.relation_id=relations.id where member_id=$1 and member_type=$2', ['bigint', 'text']); + res = plpy.cursor(plan, [int(member_id[1:]), member_id[0:1].upper()]) + for r in res: + t = { + 'id': 'r' + str(r['id']), + 'tags': pghstore.loads(r['tags']), + 'types': ['relation'], + 'geo': None, + 'link_tags': { + 'sequence_id': str(r['sequence_id']), + 'role': str(r['member_role']), + 'member_id': r['member_type'].lower() + str(r['member_id']), + } + } + t['tags']['osm:id'] = str(t['id']) + t['tags']['osm:version'] = str(r['version']) + t['tags']['osm:user_id'] = str(r['user_id']) + t['tags']['osm:user'] = r['user'] + t['tags']['osm:timestamp'] = str(r['tstamp']) + t['tags']['osm:changeset'] = str(r['changeset_id']) + yield(t) + + if parent_type == 'way' and member_id[0] == 'n': + num_id = int(member_id[1:]) + plan = plpy.prepare('select *, (select name from users where id=user_id) as user from way_nodes join ways on way_nodes.way_id=ways.id where node_id=$1', ['bigint']); + res = plpy.cursor(plan, [num_id]) + for r in res: + t = { + 'id': 'w' + str(r['id']), + 'tags': pghstore.loads(r['tags']), + 'types': ['way'], + 'geo': r['linestring'], + 'link_tags': { + 'member_id': member_id, + 'sequence_id': str(r['sequence_id']) + } + } + t['tags']['osm:id'] = str(t['id']) + t['tags']['osm:version'] = str(r['version']) + t['tags']['osm:user_id'] = str(r['user_id']) + t['tags']['osm:user'] = r['user'] + t['tags']['osm:timestamp'] = str(r['tstamp']) + t['tags']['osm:changeset'] = str(r['changeset_id']) + yield(t) + +def objects_members(relation_id, parent_type, parent_conditions): + ob = list(objects_by_id([relation_id])) + + if not len(ob): + return + + ob = ob[0] + + link_obs_ids = [ i['member_id'] for i in ob['members'] ] + link_obs = {} + for o in objects_by_id(link_obs_ids): + link_obs[o['id']] = o + + for member in ob['members']: + if not member['member_id'] in link_obs: + continue + + ret = link_obs[member['member_id']] + + if parent_type not in ret['types']: + continue + + ret['link_tags'] = member + yield ret + +def objects_near(max_distance, ob, parent_selector, where_clause, check_geo=None): + if ob: + geom = ob['geo'] + elif 'geo' in current['properties'][current['pseudo_element']]: + geom = current['properties'][current['pseudo_element']]['geo'] + else: + geom = current['object']['geo'] + + if where_clause == '': + where_clause = 'true' + + max_distance = to_float(eval_metric([ max_distance, 'u' ])) + if max_distance is None: + return [] + elif max_distance == 0: + bbox = geom + else: + plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) + res = plpy.execute(plan, [ geom, max_distance ]) + bbox = res[0]['r'] + + if check_geo == 'within': + where_clause += " and ST_DWithin(way, $2, 0.0)" + elif check_geo == 'surrounds': + where_clause += " and ST_DWithin($2, way, 0.0)" + elif check_geo == 'overlaps': + where_clause += " and ST_Overlaps($2, way)" + + obs = [] + for ob in objects( + bbox, + { parent_selector: where_clause }, + { # add_columns + '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(__geo__, {unit.srs}))' + }, + [ 'geometry' ], + [ geom ] + ): + if ob['id'] != current['object']['id'] and ob['__distance'] <= max_distance: + ob['link_tags'] = { + 'distance': eval_metric([ str(ob['__distance']) + 'u', 'px' ]) + } + obs.append(ob) + + obs = sorted(obs, key=lambda ob: ob['__distance'] ) + return obs diff --git a/pgmapcss/db/overpass/init.sql b/pgmapcss/db/overpass/init.sql new file mode 100644 index 00000000..fca61fdd --- /dev/null +++ b/pgmapcss/db/overpass/init.sql @@ -0,0 +1,14 @@ +-- Create multicolumn way / tags indexes +do $$ +begin +if not exists ( + select 1 + from pg_class + where relname = 'nodes' + ) then + + raise notice E'\ncreating multicolumn indexes - please be patient ...'; + create index nodes_geom_tags on nodes using gist(geom, tags); + create index ways_linestring_tags on ways using gist(linestring, tags); +end if; +end$$; From dad1c1a8ec5361bef4ab9cacd1928e839700d6e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 10 Nov 2014 22:16:33 +0100 Subject: [PATCH 006/209] DB/Overpass: resolve set statements --- pgmapcss/db/overpass/db.py | 66 +++++++++++++++++++++++++++----------- 1 file changed, 47 insertions(+), 19 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index a3793064..66b16f14 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -134,16 +134,14 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte set_statements = stat.filter_statements(f) if len(set_statements) > 0: - ret.add('((' + ') or ('.join([ - self.compile_selector(s, stat, prefix, filter) + set_statements = { + self.compile_selector(s, stat, prefix, filter, no_object_type=True) for s in set_statements - ]) + '))') + } # ignore generated tags (identified by leading .) if condition['key'][0] == '.': - if len(ret) == 0: - return 'false' - return ''.join(ret) + return set_statements # depending on the tag type compile the specified condition tag_type = stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) @@ -158,22 +156,28 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte if None in ret: ret.remove(None) if len(ret) == 0: - return None + return set_statements + + if len(set_statements): + return { + s + ''.join(ret) + for s in set_statements + } # merge conditions together, return - return '' + ''.join(ret) + '' + return ''.join(ret) def merge_conditions(self, conditions): types = [ t for t, cs in conditions if t != True ] conditions = { t: - '(\n' + ';\n'.join([ + '(\n' + '\n'.join([ cs for t2, cs in conditions if t == t2 if cs != 'false' - ]) + ';\n);' + ]) + '\n);' for t in types } @@ -183,18 +187,42 @@ def merge_conditions(self, conditions): if cs != '()' } - def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None): + def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None, no_object_type=False): filter['object_type'] = object_type - ret = { - self.compile_condition(c, statement, stat, prefix, filter) or 'true' + conditions = [ + self.compile_condition(c, statement, stat, prefix, filter) or None for c in statement['selector']['conditions'] - } + ] - if len(ret) == 0: - return 'true' + if no_object_type: + ret = { '' } + else: + ret = { '__TYPE__' } - if 'false' in ret: - return 'false' + for condition in conditions: + if condition is None: + continue + + if condition is None: + pass + + elif type(condition) == set: + ret = [ + r + c + for c in condition + for r in ret + ] + else: + ret = [ + r + condition + for r in ret + ] + + if False in ret: + return False + + if no_object_type: + return ''.join(ret) - return '__TYPE__' + ''.join(ret) + return ';'.join(ret) + ';' From 43fc2667d3ffc5fd5c8ad490c838c5ec7811f032 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 10 Nov 2014 22:25:21 +0100 Subject: [PATCH 007/209] DB/Overpass: improve db functions - fix union in queries - build either linestrings or polygons - bugfixes --- pgmapcss/db/overpass/db_functions.py | 47 +++++++++++++++------------- 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index dec33145..ae72a7ae 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -1,29 +1,33 @@ #[out:json][bbox:{{bbox}}];(way[name=Marschnergasse];way[name=Erdbrustgasse]);out geom meta; -global node_geom_plan -node_geom_plan = None -global way_geom_plan -way_geom_plan = None - def node_geom(lat, lon): - global node_geom_plan + global geom_plan - if not node_geom_plan: - node_geom_plan = plpy.prepare('select ST_SetSRID(ST_Point($1, $2), 4326) as geom', [ 'float', 'float' ]) + try: + geom_plan + except NameError: + geom_plan = plpy.prepare('select ST_GeomFromText($1, 4326) as geom', [ 'text' ]) - res = plpy.execute(node_geom_plan, [ lon, lat ]) + res = plpy.execute(geom_plan, [ 'POINT({} {})'.format(lon, lat) ]) return res[0]['geom'] -def way_geom(l): - global way_geom_plan +def way_geom(r, is_polygon): + global geom_plan + + try: + geom_plan + except NameError: + geom_plan = plpy.prepare('select ST_GeomFromText($1, 4326) as geom', [ 'text' ]) - if not way_geom_plan: - way_geom_plan = plpy.prepare('select ST_GeomFromText($1, 4326) as geom', [ 'text' ]) + if is_polygon: + t = 'POLYGON' + else: + t = 'LINESTRING' - res = plpy.execute(way_geom_plan, ['LINESTRING(' + ','.join([ + res = plpy.execute(geom_plan, [t + '(' + ','.join([ str(p['lon']) + ' ' + str(p['lat']) - for p in l + for p in r['geometry'] ]) + ')']) return res[0]['geom'] @@ -41,7 +45,6 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v plan = plpy.prepare("select ST_YMin($1::geometry) || ',' || ST_XMIN($1::geometry) || ',' || ST_YMAX($1::geometry) || ',' || ST_XMAX($1::geometry) as bbox_string", [ 'geometry' ]) res = plpy.execute(plan, [ _bbox ]) qry += '[bbox:' + res[0]['bbox_string'] + ']' - plpy.warning(qry) qry += ';__QRY__;out meta geom;' @@ -52,7 +55,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t]) if len(w): - q = qry.replace('__QRY__', '(' + ');('.join(w) + ')') + q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') q = q.replace('__TYPE__', 'node') #url = 'http://overpass.osm.rambler.ru/cgi/interpreter?' +\ @@ -85,9 +88,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t]) if len(w): - q = qry.replace('__QRY__', '(' + ');('.join(w) + ')') + q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') q = q.replace('__TYPE__', 'way') - plpy.warning(q) url = 'http://overpass-api.de/api/interpreter?' +\ urllib.parse.urlencode({ 'data': q }) @@ -95,11 +97,12 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v res = json.loads(f) for r in res['elements']: + is_polygon = len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] t = { - 'id': 'n' + str(r['id']), - 'types': ['way', 'line'], + 'id': 'w' + str(r['id']), + 'types': ['way', 'line', 'area'] if is_polygon else ['way', 'line'], 'tags': r['tags'], - 'geo': way_geom(r['geometry']), + 'geo': way_geom(r, is_polygon), } t['tags']['osm:id'] = t['id'] t['tags']['osm:version'] = t['version'] if 'version' in t else '' From ec9ea3e4b2b81cc3ea9f5b12f7abb6ee54f4bb1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 11 Nov 2014 08:17:29 +0100 Subject: [PATCH 008/209] DB/Overpass: First attempt in implementing multipolygons --- pgmapcss/db/overpass/db_functions.py | 94 ++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index ae72a7ae..db9e6af6 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -32,6 +32,62 @@ def way_geom(r, is_polygon): return res[0]['geom'] +def linestring(geom): + return 'LINESTRING(' + ','.join([ + '{} {}'.format(g['lon'], g['lat']) + for g in geom + ]) + ')' + +def relation_geom(r): + global geom_plan + + try: + geom_plan_makepoly + except NameError: + geom_plan_makepoly = plpy.prepare('select ST_SetSRID(ST_MakePolygon(ST_GeomFromText($1)), 4326) as geom', [ 'text' ]) + geom_plan_collect = plpy.prepare('select ST_Collect($1) as geom', [ 'geometry[]' ]) + geom_plan_substract = plpy.prepare('select ST_Difference($1, $2) as geom', [ 'geometry', 'geometry' ]) + + if 'tags' in r and 'type' in r['tags'] and r['tags']['type'] in ('multipolygon', 'boundary'): + t = 'MULTIPOLYGON' + else: + return None + + polygons = [] + lines = [] + inner_polygons = [] + inner_lines = [] + + for m in r['members']: + if m['role'] in ('outer', ''): + if m['geometry'][0] == m['geometry'][-1]: + polygons.append(linestring(m['geometry'])) + else: + lines.append(linestring(m['geometry'])) + + elif m['role'] in ('inner'): + if m['geometry'][0] == m['geometry'][-1]: + inner_polygons.append(linestring(m['geometry'])) + else: + inner_lines.append(linestring(m['geometry'])) + + polygons = [ + plpy.execute(geom_plan_makepoly, [ p ])[0]['geom'] + for p in polygons + ] + polygons = plpy.execute(geom_plan_collect, [ polygons ])[0]['geom'] + inner_polygons = [ + plpy.execute(geom_plan_makepoly, [ p ])[0]['geom'] + for p in inner_polygons + ] + + for p in inner_polygons: + polygons = plpy.execute(geom_plan_substract, [ polygons, p ])[0]['geom'] + inner_polygons = None + + #plpy.warning(polygons, lines, inner_polygons, inner_lines) + return polygons + # Use this functions only with a database based on an import with osmosis def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): import urllib.request @@ -81,6 +137,44 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v #'http://overpass-turbo.eu/?Q=' + q).read() + # relations + w = [] + for t in ('*', 'relation', 'area'): + if t in where_clauses: + w.append(where_clauses[t]) + + if len(w): + q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') + q = q.replace('__TYPE__', 'relation') + + #url = 'http://overpass.osm.rambler.ru/cgi/interpreter?' +\ + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + res = json.loads(f) + + for r in res['elements']: + g = relation_geom(r) + if not g or not 'tags' in r: + continue + plpy.warning(g) + t = { + 'id': 'n' + str(r['id']), + 'types': ['area', 'relation'], + 'tags': r['tags'], + 'geo': g + } + t['tags']['osm:id'] = t['id'] + t['tags']['osm:version'] = t['version'] if 'version' in t else '' + t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' + t['tags']['osm:user'] = t['user'] if 'user' in t else '' + t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' + t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + yield(t) + + #'http://overpass-turbo.eu/?Q=' + q).read() + + return # ways w = [] for t in ('*', 'line', 'area', 'way'): From 1bf6116ae9432f13ead2ede492a20ccf7f9a196d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 11 Nov 2014 10:07:33 +0100 Subject: [PATCH 009/209] DB/Overpass: multipolygons: construct from non-closed linestrings --- pgmapcss/db/overpass/db_functions.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index db9e6af6..13efac3a 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -47,6 +47,8 @@ def relation_geom(r): geom_plan_makepoly = plpy.prepare('select ST_SetSRID(ST_MakePolygon(ST_GeomFromText($1)), 4326) as geom', [ 'text' ]) geom_plan_collect = plpy.prepare('select ST_Collect($1) as geom', [ 'geometry[]' ]) geom_plan_substract = plpy.prepare('select ST_Difference($1, $2) as geom', [ 'geometry', 'geometry' ]) + # merge all lines together, return all closed rings (but remove unconnected lines) + geom_plan_linemerge = plpy.prepare('select geom from (select (ST_Dump((ST_LineMerge(ST_Collect(geom))))).geom as geom from (select ST_GeomFromText(unnest($1), 4326) geom) t offset 0) t where ST_NPoints(geom) > 3 and ST_IsClosed(geom)', [ 'text[]' ]) if 'tags' in r and 'type' in r['tags'] and r['tags']['type'] in ('multipolygon', 'boundary'): t = 'MULTIPOLYGON' @@ -75,17 +77,25 @@ def relation_geom(r): plpy.execute(geom_plan_makepoly, [ p ])[0]['geom'] for p in polygons ] + + lines = plpy.execute(geom_plan_linemerge, [ lines ]) + for r in lines: + polygons.append(r['geom']) + polygons = plpy.execute(geom_plan_collect, [ polygons ])[0]['geom'] inner_polygons = [ plpy.execute(geom_plan_makepoly, [ p ])[0]['geom'] for p in inner_polygons ] + inner_lines = plpy.execute(geom_plan_linemerge, [ inner_lines ]) + for r in inner_lines: + inner_polygons.append(r['geom']) + for p in inner_polygons: polygons = plpy.execute(geom_plan_substract, [ polygons, p ])[0]['geom'] inner_polygons = None - #plpy.warning(polygons, lines, inner_polygons, inner_lines) return polygons # Use this functions only with a database based on an import with osmosis @@ -157,7 +167,6 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v g = relation_geom(r) if not g or not 'tags' in r: continue - plpy.warning(g) t = { 'id': 'n' + str(r['id']), 'types': ['area', 'relation'], @@ -174,7 +183,6 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v #'http://overpass-turbo.eu/?Q=' + q).read() - return # ways w = [] for t in ('*', 'line', 'area', 'way'): From 3558a932e21e923ecd824858ecc2f7b77ebfb547 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 11 Nov 2014 10:14:23 +0100 Subject: [PATCH 010/209] DB/Overpass: bugfix arranging way areas --- pgmapcss/db/overpass/db_functions.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 13efac3a..a898cad9 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -20,15 +20,17 @@ def way_geom(r, is_polygon): except NameError: geom_plan = plpy.prepare('select ST_GeomFromText($1, 4326) as geom', [ 'text' ]) + geom_str = ','.join([ + str(p['lon']) + ' ' + str(p['lat']) + for p in r['geometry'] + ]) + if is_polygon: - t = 'POLYGON' + geom_str = 'POLYGON((' + geom_str + '))' else: - t = 'LINESTRING' + geom_str = 'LINESTRING('+ geom_str + ')' - res = plpy.execute(geom_plan, [t + '(' + ','.join([ - str(p['lon']) + ' ' + str(p['lat']) - for p in r['geometry'] - ]) + ')']) + res = plpy.execute(geom_plan, [ geom_str ]) return res[0]['geom'] From c70de393a8c7841f01a16bd8c0d7a749defb81b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 12 Nov 2014 14:40:25 +0100 Subject: [PATCH 011/209] DB/Overpass: handle tag-less multipolygons (use tags of outer ways) --- pgmapcss/db/overpass/db_functions.py | 138 ++++++++++++++++++++++----- 1 file changed, 115 insertions(+), 23 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index a898cad9..981d60dc 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -100,12 +100,14 @@ def relation_geom(r): return polygons -# Use this functions only with a database based on an import with osmosis def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): import urllib.request import urllib.parse import json time_start = datetime.datetime.now() # profiling + non_relevant_tags = {'type', 'source', 'source:ref', 'source_ref', 'note', 'comment', 'created_by', 'converted_by', 'fixme', 'FIXME', 'description', 'attribution', 'osm:id', 'osm:version', 'osm:user_id', 'osm:user', 'osm:timestamp', 'osm:changeset'} + ways_done = [] + rels_done = [] qry = '[out:json]' @@ -126,7 +128,6 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') q = q.replace('__TYPE__', 'node') - #url = 'http://overpass.osm.rambler.ru/cgi/interpreter?' +\ url = 'http://overpass-api.de/api/interpreter?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') @@ -149,31 +150,116 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v #'http://overpass-turbo.eu/?Q=' + q).read() - # relations + # way areas and multipolygons based on outer tags w = [] - for t in ('*', 'relation', 'area'): + for t in ('*', 'area'): + if t in where_clauses: + w.append(where_clauses[t]) + + if len(w): + # query for ways which match query, also get their parent relations and + # again all child ways. if a way is outer way of a multipolygon, the + # multipolygon has no (relevant) tags and all outer ways share the same + # tags (save non relevant tags) the ways are discarded and the relation + # is used - as type 'multipolygon' and a 'm' prefixed to the ID + q = qry.replace('__QRY__', + 'relation[type=multipolygon] -> .rel;' + + '((' + ');('.join(w) + ');) -> .outer;relation(bw.outer)[type=multipolygon]') + '.outer out tags qt;' + q = q.replace('__TYPE__', 'way(r.rel:"outer")') + plpy.warning(q) + + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + res = json.loads(f) + + _ways = {} + _rels = {} + + for r in res['elements']: + if r['type'] == 'way': + _ways[r['id']] = r + elif r['type'] == 'relation': + _rels[r['id']] = r + + for rid, r in _rels.items(): + if r['tags']['type'] in ('multipolygon', 'boundary') and len([ + v + for v in r['tags'] + if v not in non_relevant_tags + ]) == 0: + is_valid_mp = True + outer_tags = None + + for outer in r['members']: + if outer['role'] in ('', 'outer'): + outer_way = _ways[outer['ref']] + tags = { + vk: vv + for vk, vv in outer_way['tags'].items() + if vk not in non_relevant_tags + } if 'tags' in outer_way else {} + + if outer_tags is None: + outer_tags = tags + elif outer_tags != tags: + is_valid_mp = True + + if is_valid_mp: + rels_done.append(rid) + for outer in r['members']: + if outer['role'] in ('', 'outer'): + ways_done.append(outer['ref']) + + t = { + 'id': 'm' + str(r['id']), + 'types': ['multipolygon', 'area'], + # TODO: merge tags with relation tags and + # (non-relevant) tags of other outer ways + 'tags': outer_tags, + 'geo': relation_geom(r), + } + t['tags']['osm:id'] = t['id'] + t['tags']['osm:version'] = t['version'] if 'version' in t else '' + t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' + t['tags']['osm:user'] = t['user'] if 'user' in t else '' + t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' + t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + + yield(t) + else: + plpy.warning('tag-less multipolygon with non-similar outer ways: {}'.format(rid)) + + _ways = None + _rels = None + + # ways + w = [] + for t in ('*', 'line', 'way', 'area'): if t in where_clauses: w.append(where_clauses[t]) if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') - q = q.replace('__TYPE__', 'relation') + q = q.replace('__TYPE__', 'way') + plpy.warning(q) - #url = 'http://overpass.osm.rambler.ru/cgi/interpreter?' +\ url = 'http://overpass-api.de/api/interpreter?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') res = json.loads(f) for r in res['elements']: - g = relation_geom(r) - if not g or not 'tags' in r: - continue + if r['id'] in ways_done: + pass + ways_done.append(r['id']) + + is_polygon = len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] t = { - 'id': 'n' + str(r['id']), - 'types': ['area', 'relation'], - 'tags': r['tags'], - 'geo': g + 'id': 'w' + str(r['id']), + 'types': ['way', 'line', 'area'] if is_polygon else ['way', 'line'], + 'tags': r['tags'] if 'tags' in r else {}, + 'geo': way_geom(r, is_polygon), } t['tags']['osm:id'] = t['id'] t['tags']['osm:version'] = t['version'] if 'version' in t else '' @@ -181,19 +267,19 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v t['tags']['osm:user'] = t['user'] if 'user' in t else '' t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' - yield(t) - #'http://overpass-turbo.eu/?Q=' + q).read() + yield(t) - # ways + # relations w = [] - for t in ('*', 'line', 'area', 'way'): + for t in ('*', 'relation', 'area'): if t in where_clauses: w.append(where_clauses[t]) if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') - q = q.replace('__TYPE__', 'way') + q = q.replace('__TYPE__', 'relation') + plpy.warning(q) url = 'http://overpass-api.de/api/interpreter?' +\ urllib.parse.urlencode({ 'data': q }) @@ -201,12 +287,18 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v res = json.loads(f) for r in res['elements']: - is_polygon = len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] + if r['id'] in rels_done: + pass + rels_done.append(r['id']) + + g = relation_geom(r) + if not g or not 'tags' in r: + continue t = { - 'id': 'w' + str(r['id']), - 'types': ['way', 'line', 'area'] if is_polygon else ['way', 'line'], - 'tags': r['tags'], - 'geo': way_geom(r, is_polygon), + 'id': 'r' + str(r['id']), + 'types': ['area', 'relation'], + 'tags': r['tags'] if 'tags' in r else {}, + 'geo': g } t['tags']['osm:id'] = t['id'] t['tags']['osm:version'] = t['version'] if 'version' in t else '' From e68094441049e03e10b8586e268d47683a057142 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 12 Nov 2014 15:02:55 +0100 Subject: [PATCH 012/209] DB/Overpass: Bugfix, loading meta data of objects --- pgmapcss/db/overpass/db_functions.py | 40 ++++++++++++++-------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 981d60dc..721b3003 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -141,11 +141,11 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v 'geo': node_geom(r['lat'], r['lon']), } t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = t['version'] if 'version' in t else '' - t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' - t['tags']['osm:user'] = t['user'] if 'user' in t else '' - t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' - t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' + t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' + t['tags']['osm:user'] = r['user'] if 'user' in r else '' + t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' + t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' yield(t) #'http://overpass-turbo.eu/?Q=' + q).read() @@ -220,11 +220,11 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v 'geo': relation_geom(r), } t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = t['version'] if 'version' in t else '' - t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' - t['tags']['osm:user'] = t['user'] if 'user' in t else '' - t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' - t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' + t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' + t['tags']['osm:user'] = r['user'] if 'user' in r else '' + t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' + t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' yield(t) else: @@ -262,11 +262,11 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v 'geo': way_geom(r, is_polygon), } t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = t['version'] if 'version' in t else '' - t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' - t['tags']['osm:user'] = t['user'] if 'user' in t else '' - t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' - t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' + t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' + t['tags']['osm:user'] = r['user'] if 'user' in r else '' + t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' + t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' yield(t) @@ -301,11 +301,11 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v 'geo': g } t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = t['version'] if 'version' in t else '' - t['tags']['osm:user_id'] = t['uid'] if 'uid' in t else '' - t['tags']['osm:user'] = t['user'] if 'user' in t else '' - t['tags']['osm:timestamp'] = t['timestamp'] if 'timestamp' in t else '' - t['tags']['osm:changeset'] = t['changeset'] if 'changeset' in t else '' + t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' + t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' + t['tags']['osm:user'] = r['user'] if 'user' in r else '' + t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' + t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' yield(t) time_stop = datetime.datetime.now() # profiling From a6664cd9e86c01a3098aadb3243268f33038542a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 12 Nov 2014 20:09:56 +0100 Subject: [PATCH 013/209] DB/Overpass: include (all) areas which are larger than bbox --- pgmapcss/db/overpass/db_functions.py | 41 ++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 721b3003..57bf2b4e 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -308,6 +308,47 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' yield(t) + # areas + w = [] + for t in ('*', 'relation', 'area'): + if t in where_clauses: + w.append(where_clauses[t]) + + if len(w): + plan = plpy.prepare("select ST_Y(ST_Centroid($1::geometry)) || ',' || ST_X(ST_Centroid($1::geometry)) as geom", [ 'geometry' ]) + res = plpy.execute(plan, [ _bbox ]) + + q = qry.replace('__QRY__', 'is_in({0});way(pivot);out meta geom;is_in({0});relation(pivot)'.format(res[0]['geom'])) + plpy.warning(q) + + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + res = json.loads(f) + + for r in res['elements']: + if (r['type'] == 'way' and r['id'] in ways_done) or\ + (r['type'] == 'relation' and r['id'] in rels_done): + continue + + t = { + 'id': 'r' + str(r['id']), + 'tags': r['tags'] if 'tags' in r else {}, + } + if r['type'] == 'relation': + t['types'] = ['area', 'relation'] + t['geo'] = relation_geom(r) + elif r['type'] == 'way': + t['types'] = ['area', 'line', 'way'] + t['geo'] = way_geom(r, True) + t['tags']['osm:id'] = t['id'] + t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' + t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' + t['tags']['osm:user'] = r['user'] if 'user' in r else '' + t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' + t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' + yield(t) + time_stop = datetime.datetime.now() # profiling plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) From 8560ed33bda7f89217549ba52fd54ce9b749e4a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 13 Nov 2014 10:30:09 +0100 Subject: [PATCH 014/209] DB/Overpass: Bugfix setting ID for areas --- pgmapcss/db/overpass/db_functions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 57bf2b4e..c2ad8c01 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -332,13 +332,14 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v continue t = { - 'id': 'r' + str(r['id']), 'tags': r['tags'] if 'tags' in r else {}, } if r['type'] == 'relation': + t['id'] = 'r' + str(r['id']) t['types'] = ['area', 'relation'] t['geo'] = relation_geom(r) elif r['type'] == 'way': + t['id'] = 'w' + str(r['id']) t['types'] = ['area', 'line', 'way'] t['geo'] = way_geom(r, True) t['tags']['osm:id'] = t['id'] From b7745b5f24206f0f2332be5307dc7377ec336c3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 13 Nov 2014 10:44:47 +0100 Subject: [PATCH 015/209] DB/Overpass: re-implement objects_by_id() --- pgmapcss/db/overpass/db_functions.py | 114 +++++++++++++-------------- 1 file changed, 54 insertions(+), 60 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index c2ad8c01..f996c785 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -100,6 +100,34 @@ def relation_geom(r): return polygons +def assemble_object(r): + t = { + 'tags': r['tags'] if 'tags' in r else {}, + } + if r['type'] == 'node': + t['id'] = 'n' + str(r['id']) + t['types'] = ['area', 'line', 'way'] + t['geo'] = node_geom(r['lat'], r['lon']), + elif r['type'] == 'way': + is_polygon = len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] + t['id'] = 'w' + str(r['id']) + t['types'] = ['line', 'way'] + if is_polygon: + t['types'].append('area') + t['geo'] = way_geom(r, is_polygon) + elif r['type'] == 'relation': + t['id'] = 'r' + str(r['id']) + t['types'] = ['area', 'relation'] + t['geo'] = relation_geom(r) + t['tags']['osm:id'] = t['id'] + t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' + t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' + t['tags']['osm:user'] = r['user'] if 'user' in r else '' + t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' + t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' + + return t + def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): import urllib.request import urllib.parse @@ -354,66 +382,32 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) def objects_by_id(id_list): - _id_list = [ int(i[1:]) for i in id_list if i[0] == 'n' ] - plan = plpy.prepare('select id, tags, geom from nodes where id=any($1)', ['bigint[]']); - res = plpy.cursor(plan, [_id_list]) - for r in res: - yield { - 'id': 'n' + str(r['id']), - 'members': [], - 'tags': pghstore.loads(r['tags']), - 'geo': r['geom'], - 'types': ['node', 'point'] - } - - _id_list = [ int(i[1:]) for i in id_list if i[0] == 'w' ] - plan = plpy.prepare('select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, linestring as linestring, array_agg(node_id) as member_ids from (select ways.*, node_id from ways left join way_nodes on ways.id=way_nodes.way_id where ways.id=any($1) order by way_nodes.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id, linestring', ['bigint[]']); - res = plpy.cursor(plan, [_id_list]) - for r in res: - t = { - 'id': 'w' + str(r['id']), - 'members': [ { - 'member_id': 'n' + str(m), - 'sequence_id': str(i) - } - for i, m in enumerate(r['member_ids']) - ], - 'tags': pghstore.loads(r['tags']), - 'geo': r['linestring'], - 'types': ['way', 'line', 'area'] - } - t['tags']['osm:id'] = str(t['id']) - t['tags']['osm:version'] = str(r['version']) - t['tags']['osm:user_id'] = str(r['user_id']) - t['tags']['osm:user'] = r['user'] - t['tags']['osm:timestamp'] = str(r['tstamp']) - t['tags']['osm:changeset'] = str(r['changeset_id']) - yield(t) - - _id_list = [ int(i[1:]) for i in id_list if i[0] == 'r' ] - plan = plpy.prepare('select id, tags, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, array_agg(lower(member_type) || member_id) as member_ids, array_agg(member_role) as member_roles from (select relations.*, member_type, member_id, member_role from relations left join relation_members on relations.id=relation_members.relation_id where relations.id=any($1) order by relation_members.sequence_id) t group by id, tags, version, user_id, tstamp, changeset_id', ['bigint[]']); - res = plpy.cursor(plan, [_id_list]) - for r in res: - t = { - 'id': 'r' + str(r['id']), - 'tags': pghstore.loads(r['tags']), - 'members': [ { - 'member_id': m[0], - 'role': m[1], - 'sequence_id': i - } - for i, m in enumerate(zip(r['member_ids'], r['member_roles'])) - ], - 'geo': None, - 'types': ['relation'] - } - t['tags']['osm:id'] = str(t['id']) - t['tags']['osm:version'] = str(r['version']) - t['tags']['osm:user_id'] = str(r['user_id']) - t['tags']['osm:user'] = r['user'] - t['tags']['osm:timestamp'] = str(r['tstamp']) - t['tags']['osm:changeset'] = str(r['changeset_id']) - yield(t) + import urllib.request + import urllib.parse + import json + q = '' + multipolygons = [] + for i in id_list: + if i[0:1] == 'n': + q += 'node({});out meta geom;'.format(i[1:]) + elif i[0:1] == 'w': + q += 'way({});out meta geom;'.format(i[1:]) + elif i[0:1] == 'r': + q += 'relation({});out meta geom;'.format(i[1:]) + + if q == '': + return + q = '[out:json];' + q + + plpy.warning(q) + + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + res = json.loads(f) + + for r in res['elements']: + yield(assemble_object(r)) def objects_member_of(member_id, parent_type, parent_conditions): if parent_type == 'relation': From 099da94a7796a0abac64898cc2c49905f518608b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 15 Nov 2014 10:18:33 +0100 Subject: [PATCH 016/209] DB/Overpass: add option to set a different overpass api url --- doc/config_options.md | 1 + pgmapcss/db/db.py | 2 +- pgmapcss/db/overpass/db.py | 3 +++ pgmapcss/db/overpass/db_functions.py | 10 +++++----- 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/doc/config_options.md b/doc/config_options.md index 587866b9..11c93d5e 100644 --- a/doc/config_options.md +++ b/doc/config_options.md @@ -10,6 +10,7 @@ The following config options are supported: | unit.srs | Spatial Reference System to use for distances. If other values than 900913 are used, unexpected behaviour might happen. | 900913 | | srs | Default Spatial Reference System to use on the frontend side | 900913 when using with renderer (mode 'database-function'), 4326 otherwise | | db.hstore-only | osm2pgsql only: Do not use the separate tag columns, only use the hstore 'tags' column. Might be faster on large databases in combination with a multicolumn index on way and tags: e.g. create index planet_osm_point_way_tags on planet_osm_point using gist(way, tags). Requires --hstore-all on osm2pgsql when importing the database. | true/**false** | +| db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api/interpreter | | | offline | When compiling standalone mode, do not make any requests to the database. | true/**false** | | debug.profiler | during execution, show some statistics about query/processing time and count of objects. | true/**false** | | debug.context | show bounding box and scale denominator of requests. | true/**false** | diff --git a/pgmapcss/db/db.py b/pgmapcss/db/db.py index 34d78bcf..3f58ed79 100644 --- a/pgmapcss/db/db.py +++ b/pgmapcss/db/db.py @@ -92,7 +92,7 @@ def query_functions(stat): ret = strip_includes(resource_stream(__name__, conn.database_type + '/db_functions.py'), stat) for k, v in stat['config'].items(): - if re.match('^[a-zA-Z\._0-9]+$', k): + if re.match('^[a-zA-Z\.\-_0-9]+$', k): ret = ret.replace('{' + k + '}', str(v)) return ret diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 66b16f14..9e09e609 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -10,6 +10,9 @@ def __init__(self, conn, stat): if not 'db.srs' in self.stat['config']: self.stat['config']['db.srs'] = 4326 + if not 'db.overpass-url' in self.stat['config']: + self.stat['config']['db.overpass-url'] = 'http://overpass-api.de/api/interpreter' + def tag_type(self, key, condition, selector, statement): if key[0:4] == 'osm:': if key == 'osm:id': diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 57bf2b4e..81449174 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -128,7 +128,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') q = q.replace('__TYPE__', 'node') - url = 'http://overpass-api.de/api/interpreter?' +\ + url = '{db.overpass-url}?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') res = json.loads(f) @@ -168,7 +168,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = q.replace('__TYPE__', 'way(r.rel:"outer")') plpy.warning(q) - url = 'http://overpass-api.de/api/interpreter?' +\ + url = '{db.overpass-url}?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') res = json.loads(f) @@ -244,7 +244,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = q.replace('__TYPE__', 'way') plpy.warning(q) - url = 'http://overpass-api.de/api/interpreter?' +\ + url = '{db.overpass-url}?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') res = json.loads(f) @@ -281,7 +281,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = q.replace('__TYPE__', 'relation') plpy.warning(q) - url = 'http://overpass-api.de/api/interpreter?' +\ + url = '{db.overpass-url}?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') res = json.loads(f) @@ -321,7 +321,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = qry.replace('__QRY__', 'is_in({0});way(pivot);out meta geom;is_in({0});relation(pivot)'.format(res[0]['geom'])) plpy.warning(q) - url = 'http://overpass-api.de/api/interpreter?' +\ + url = '{db.overpass-url}?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') res = json.loads(f) From b46f89ab0289b2636ad019696c76ba312fb77635 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 15 Nov 2014 12:54:44 +0100 Subject: [PATCH 017/209] DB/Overpass: handle error message when areas are not available in overpass api --- pgmapcss/db/overpass/db_functions.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 81449174..fbce1e76 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -324,7 +324,15 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v url = '{db.overpass-url}?' +\ urllib.parse.urlencode({ 'data': q }) f = urllib.request.urlopen(url).read().decode('utf-8') - res = json.loads(f) + + try: + res = json.loads(f) + except ValueError: + print(f) + if re.search('osm3s_v[0-9\.]+_areas', f): + res = { 'elements': [] } + else: + raise for r in res['elements']: if (r['type'] == 'way' and r['id'] in ways_done) or\ From 825a19c1a6e9d972ecb5bacfbcaec7cc23e545c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 15 Nov 2014 13:24:42 +0100 Subject: [PATCH 018/209] DB/Overpass: Bugfix, multipolygons: not all outer ways in bbox --- pgmapcss/db/overpass/db_functions.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index fbce1e76..d6851eb3 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -193,6 +193,9 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v for outer in r['members']: if outer['role'] in ('', 'outer'): + if not outer['ref'] in _ways: + continue + outer_way = _ways[outer['ref']] tags = { vk: vv From 3e5addfb73f4ccc5760a881620f7cff358a18807 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 16 Nov 2014 10:28:53 +0100 Subject: [PATCH 019/209] Compile DB selects: change list of conditions to list instead of set --- pgmapcss/compiler/compile_db_selects.py | 4 ++-- pgmapcss/db/postgresql_db/db.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index ef445767..244dee4e 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -53,14 +53,14 @@ def compile_db_selects(id, stat): # compile all selectors # TODO: define list of possible object_types - conditions = { + conditions = [ ( object_type, stat['database'].compile_selector(stat['statements'][i], stat, prefix='', filter=filter, object_type=object_type) ) for i in current_selectors for object_type in ({'node', 'way', 'area'} if stat['statements'][i]['selector']['type'] == True else { stat['statements'][i]['selector']['type'] }) - } + ] conditions = stat['database'].merge_conditions(conditions) diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index eda672d9..0eeaf6fe 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -9,6 +9,7 @@ def __init__(self, conn, stat): self.ident = pgmapcss.db.pg.ident def merge_conditions(self, conditions): + conditions = set(conditions) types = [ t for t, cs in conditions if t != True ] conditions = { From eefe90bcc6bb7170959b9db11108f9648767a7d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 16 Nov 2014 20:58:59 +0100 Subject: [PATCH 020/209] DB/Overpass: first compile to tokens, then to strings; also compile other conditions --- pgmapcss/db/overpass/db.py | 148 +++++++++++++++++++++---------------- 1 file changed, 85 insertions(+), 63 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 9e09e609..2d975158 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -32,6 +32,16 @@ def tag_type(self, key, condition, selector, statement): return ( 'overpass', key ) + def value_to_regexp(self, s): + s = s.replace('\\', '\\\\') + s = s.replace('.', '\\.') + s = s.replace('|', '\\|') + s = s.replace('[', '\\[') + s = s.replace(']', '\\]') + s = s.replace('(', '\\(') + s = s.replace(')', '\\)') + return s + def compile_condition_overpass(self, condition, statement, tag_type, stat, prefix, filter): ret = None negate = False @@ -57,77 +67,66 @@ def compile_condition_overpass(self, condition, statement, tag_type, stat, prefi # value-eval() statements if condition['value_type'] == 'eval': # treat other conditions as has_key - ret = prefix + column + ' ? ' + db.format(key); + ret = ( 'key', key ) # = elif op == '=': - ret = '[' + repr(key) + '=' + repr(condition['value']) + ']' + ret = ( 'is', key, condition['value'] ) # @= elif op == '@=' and condition['value_type'] == 'value': - ret = '(' + ' or '.join([ - prefix + column + ' @> ' + db.format({ key: v }) + ret = ( 'regexp', key, '^(' + '|'.join([ + self.value_to_regexp(v) for v in condition['value'].split(';') - ]) + ')' + ]) + ')$' ) # != elif op == '!=': - ret = '( not ' + prefix + column + ' ? ' + db.format(key) +\ - 'or not ' + prefix + column + ' @> ' +\ - db.format({ key: condition['value'] }) + ')' + ret = ( 'isnot', key, condition['value'] ) # regexp match =~ elif op == '=~': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) +\ - (' ~* ' if 'i' in condition['regexp_flags'] else ' ~ ') +\ - db.format(condition['value']) + ')' + ret = ( + ('iregexp' if condition['regexp_flags'] else 'regexp' ), + key, condition['value']) # negated regexp match !~ elif op == '!~': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) +\ - (' !~* ' if 'i' in condition['regexp_flags'] else ' !~ ') +\ - db.format(condition['value']) + ')' + ret = ( + ('notiregexp' if condition['regexp_flags'] else 'notregexp' ), + key, condition['value']) # prefix match ^= elif op == '^=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) + ' like ' +\ - db.format(pg_like_escape(condition['value']) + '%') + ')' + ret = ( 'regexp', key, '^' + self.value_to_regexp(condition['value']) ) # suffix match $= elif op == '$=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) + ' like ' +\ - db.format('%' + pg_like_escape(condition['value'])) + ')' + ret = ( 'regexp', key, self.value_to_regexp(condition['value']) + '$' ) # substring match *= elif op == '*=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - prefix + column + '->' + db.format(key) + ' like ' +\ - db.format('%' + pg_like_escape(condition['value']) + '%') + ')' + ret = ( 'regexp', key, self.value_to_regexp(condition['value']) ) # list membership ~= elif op == '~=': - ret = '(' + prefix + column + ' ? ' + db.format(key) + ' and ' +\ - db.format(condition['value']) + ' =any(string_to_array(' +\ - prefix + column + '->' + db.format(key) + ', \';\')))' + ret = ( 'regexp', key, self.value_to_regexp(condition['value']) ) else: - ret = '[' + repr(key) + ']' + ret = ( 'key', key ) if ret is None: return None if negate: - return '(not ' + prefix + column + ' ? ' + db.format(key) +\ - ' or not ' + ret + ')' + return None # TODO +# return '(not ' + prefix + column + ' ? ' + db.format(key) +\ +# ' or not ' + ret + ')' return ret def compile_condition(self, condition, statement, stat, prefix='current.', filter={}): - ret = set() + ret = [] # assignments: map conditions which are based on a (possible) set-statement # back to their original selectors: @@ -137,10 +136,10 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte set_statements = stat.filter_statements(f) if len(set_statements) > 0: - set_statements = { + set_statements = [ self.compile_selector(s, stat, prefix, filter, no_object_type=True) for s in set_statements - } + ] # ignore generated tags (identified by leading .) if condition['key'][0] == '.': @@ -152,7 +151,7 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte if tag_type is None: pass elif tag_type[0] == 'overpass': - ret.add(self.compile_condition_overpass(condition, statement, tag_type, stat, prefix, filter)) + ret = self.compile_condition_overpass(condition, statement, tag_type, stat, prefix, filter) else: raise CompileError('unknown tag type {}'.format(tag_type)) @@ -162,32 +161,59 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte return set_statements if len(set_statements): - return { - s + ''.join(ret) + return [ + s + [ ret ] for s in set_statements - } + ] + + # return + return ret - # merge conditions together, return - return ''.join(ret) + def conditions_to_query(self, conditions): + ret = '__TYPE__'; + + for c in conditions: + if c[0] == 'type': + pass + elif c[0] == 'key': + ret += '[' + repr(c[1]) + ']' + elif c[0] == 'is': + ret += '[' + repr(c[1]) + '=' + repr(c[2]) + ']' + elif c[0] == 'isnot': + ret += '[' + repr(c[1]) + '!=' + repr(c[2]) + ']' + elif c[0] == 'regexp': + ret += '[' + repr(c[1]) + '~' + repr(c[2]) + ']' + elif c[0] == 'iregexp': + ret += '[' + repr(c[1]) + '~' + repr(c[2]) + ', i]' + elif c[0] == 'notregexp': + ret += '[' + repr(c[1]) + '!~' + repr(c[2]) + ']' + elif c[0] == 'notiregexp': + ret += '[' + repr(c[1]) + '!~' + repr(c[2]) + ', i]' + else: + print('Unknown Overpass operator "{}"'.format(c[0])) + + return ret def merge_conditions(self, conditions): types = [ t for t, cs in conditions if t != True ] conditions = { - t: - '(\n' + '\n'.join([ - cs + t: [ + c for t2, cs in conditions if t == t2 - if cs != 'false' - ]) + '\n);' + if cs != False + for c in cs + ] for t in types } return { - t: cs + t: ';\n'.join([ + self.conditions_to_query(c) + for c in cs + ]) + ';\n' for t, cs in conditions.items() - if cs != '()' } def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None, no_object_type=False): @@ -198,27 +224,23 @@ def compile_selector(self, statement, stat, prefix='current.', filter={}, object for c in statement['selector']['conditions'] ] - if no_object_type: - ret = { '' } - else: - ret = { '__TYPE__' } + ret = [ [] ] for condition in conditions: if condition is None: continue - if condition is None: - pass - - elif type(condition) == set: + elif type(condition) == list: ret = [ - r + c - for c in condition - for r in ret - ] - else: + r + c + for r in ret + for cs in condition + for c in cs + ] + + elif type(condition) == tuple: ret = [ - r + condition + r + [ condition ] for r in ret ] @@ -226,6 +248,6 @@ def compile_selector(self, statement, stat, prefix='current.', filter={}, object return False if no_object_type: - return ''.join(ret) + return ret - return ';'.join(ret) + ';' + return ret From 7c24a0c8908518a20d639d1df19cecc67d59f38f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 16 Nov 2014 21:20:11 +0100 Subject: [PATCH 021/209] DB/Overpass: merge conditions if they build subsets --- pgmapcss/db/overpass/db.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 2d975158..7f0fe5b2 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -194,6 +194,19 @@ def conditions_to_query(self, conditions): return ret + def simplify_conditions(self, conditions): + for i1, c1 in enumerate(conditions): + for i2, c2 in enumerate(conditions): + if i1 != i2 and c1 is not None and c2 is not None: + # check if query c1 is a subset of query c2 -> replace by c1 + if len([ e1 for e1 in c1 if e1 not in c2 ]) == 0: + conditions[i1] = c1 + conditions[i2] = None + + conditions = [ c for c in conditions if c is not None ] + + return conditions + def merge_conditions(self, conditions): types = [ t for t, cs in conditions if t != True ] @@ -208,6 +221,11 @@ def merge_conditions(self, conditions): for t in types } + conditions = { + t: self.simplify_conditions(cs) + for t, cs in conditions.items() + } + return { t: ';\n'.join([ self.conditions_to_query(c) From e8552908aa82790e81aaab83bbc67408f67a0062 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 16 Nov 2014 22:01:09 +0100 Subject: [PATCH 022/209] DB/Overpass: merge conditions if one of them queries for key --- pgmapcss/db/overpass/db.py | 32 ++++++++++++++++++++++++++++---- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 7f0fe5b2..2a3a6f0f 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -42,6 +42,12 @@ def value_to_regexp(self, s): s = s.replace(')', '\\)') return s + def convert_to_regexp(self, s): + if s[0] in ('regexp', 'iregexp', 'isnot', 'notregexp', 'notiregexp'): + return s + if s[0] == 'is': + return ('regexp', s[1], '^' + self.value_to_regexp(s[2]) + '$') + def compile_condition_overpass(self, condition, statement, tag_type, stat, prefix, filter): ret = None negate = False @@ -194,17 +200,35 @@ def conditions_to_query(self, conditions): return ret + def is_subset(self, c1, c2): + merge = [] + + # check if query c1 is a subset of query c2 -> replace by c1 + if len([ e1 for e1 in c1 if e1 not in c2 ]) == 0: + return c1 + + # c1 and c2 only differ in one condition and it has the same key + d1 = [ e1 for e1 in c1 if e1 not in c2 ] + d2 = [ e2 for e2 in c2 if e2 not in c1 ] + if len(d1) == 1 and len(d2) == 1 and d1[0][1] == d2[0][1]: + # one of the differing conditions queries for key -> ignore other condition + if d1[0][0] == 'key' or d2[0][0] == 'key': + return [ + c + for c in c1 + if c != d1[0] + ] + [ ( 'key', d1[0][1] ) ] + def simplify_conditions(self, conditions): for i1, c1 in enumerate(conditions): for i2, c2 in enumerate(conditions): if i1 != i2 and c1 is not None and c2 is not None: - # check if query c1 is a subset of query c2 -> replace by c1 - if len([ e1 for e1 in c1 if e1 not in c2 ]) == 0: - conditions[i1] = c1 + s = self.is_subset(c1, c2) + if s: + conditions[i1] = s conditions[i2] = None conditions = [ c for c in conditions if c is not None ] - return conditions def merge_conditions(self, conditions): From 0387f297e15e1581f3630b712f2c26af4a8d948c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 16 Nov 2014 22:07:39 +0100 Subject: [PATCH 023/209] DB/Overpass: Bugfix simplifying conditions (check references) --- pgmapcss/db/overpass/db.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 2a3a6f0f..54cbe061 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -220,8 +220,10 @@ def is_subset(self, c1, c2): ] + [ ( 'key', d1[0][1] ) ] def simplify_conditions(self, conditions): - for i1, c1 in enumerate(conditions): - for i2, c2 in enumerate(conditions): + for i1 in range(0, len(conditions)): + for i2 in range(0, len(conditions)): + c1 = conditions[i1] + c2 = conditions[i2] if i1 != i2 and c1 is not None and c2 is not None: s = self.is_subset(c1, c2) if s: From 69437d9b791e9656f86012cac39ac2a985e39d66 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 16 Nov 2014 22:12:14 +0100 Subject: [PATCH 024/209] DB/Overpass: combine conditions to regular expressions --- pgmapcss/db/overpass/db.py | 67 ++++++++++++++++++++++++++++++++++++-- 1 file changed, 65 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 54cbe061..6d6ab64a 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -200,9 +200,42 @@ def conditions_to_query(self, conditions): return ret - def is_subset(self, c1, c2): - merge = [] + def merge_regexp(self, c1, c2): + if c1[0] != 'regexp' or c2[0] != 'regexp': + return None + if c1[1] != c2[1]: + return None + r1 = c1[2] + r2 = c2[2] + r = '' + r_end = '' + + if r1[0] == '^' or r2[0] == '^': + r += '^' + if r1[0] == '^': + r1 = r1[1:] + else: + r1 = '.*' + r1 + if r2[0] == '^': + r2 = r2[1:] + else: + r2 = '.*' + r2 + + if r1[-1] == '$' or r2[-1] == '$': + r_end = '$' + if r1[-1] == '$': + r1 = r1[:-1] + else: + r1 += '.*' + if r2[-1] == '$': + r2 = r2[:-1] + else: + r2 += '.*' + + return r + r1 + '|' + r2 + r_end + + def is_subset(self, c1, c2): # check if query c1 is a subset of query c2 -> replace by c1 if len([ e1 for e1 in c1 if e1 not in c2 ]) == 0: return c1 @@ -219,11 +252,27 @@ def is_subset(self, c1, c2): if c != d1[0] ] + [ ( 'key', d1[0][1] ) ] + def check_merge_regexp(self, c1, c2): + # c1 and c2 only differ in one condition and it has the same key + d1 = [ e1 for e1 in c1 if e1 not in c2 ] + d2 = [ e2 for e2 in c2 if e2 not in c1 ] + if len(d1) == 1 and len(d2) == 1 and d1[0][1] == d2[0][1]: +# check if we can merge the regular expressions + m = self.merge_regexp(self.convert_to_regexp(d1[0]), self.convert_to_regexp(d2[0])) + if m is not None: + x = [ + c + for c in c1 + if c != d1[0] + ] + [ ( 'regexp', d1[0][1], m ) ] + return x + def simplify_conditions(self, conditions): for i1 in range(0, len(conditions)): for i2 in range(0, len(conditions)): c1 = conditions[i1] c2 = conditions[i2] + if i1 != i2 and c1 is not None and c2 is not None: s = self.is_subset(c1, c2) if s: @@ -231,6 +280,20 @@ def simplify_conditions(self, conditions): conditions[i2] = None conditions = [ c for c in conditions if c is not None ] + + for i1 in range(0, len(conditions)): + for i2 in range(0, len(conditions)): + c1 = conditions[i1] + c2 = conditions[i2] + + if i1 != i2 and c1 is not None and c2 is not None: + s = self.check_merge_regexp(c1, c2) + if s: + conditions[i1] = s + conditions[i2] = None + + conditions = [ c for c in conditions if c is not None ] + return conditions def merge_conditions(self, conditions): From 6a9b8066a15e54cc998a39a0dee6623bdd669ae0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 03:46:15 +0100 Subject: [PATCH 025/209] DB/Overpass: compiling regexp: collect in set, compile in last step --- pgmapcss/db/overpass/db.py | 82 ++++++++++++++++---------------------- 1 file changed, 34 insertions(+), 48 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 6d6ab64a..b03ad5dc 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -46,7 +46,7 @@ def convert_to_regexp(self, s): if s[0] in ('regexp', 'iregexp', 'isnot', 'notregexp', 'notiregexp'): return s if s[0] == 'is': - return ('regexp', s[1], '^' + self.value_to_regexp(s[2]) + '$') + return ('regexp', s[1], { '^' + self.value_to_regexp(s[2]) + '$' }) def compile_condition_overpass(self, condition, statement, tag_type, stat, prefix, filter): ret = None @@ -81,10 +81,10 @@ def compile_condition_overpass(self, condition, statement, tag_type, stat, prefi # @= elif op == '@=' and condition['value_type'] == 'value': - ret = ( 'regexp', key, '^(' + '|'.join([ - self.value_to_regexp(v) + ret = ( 'regexp', key, { + '^' + self.value_to_regexp(v) + '$' for v in condition['value'].split(';') - ]) + ')$' ) + } ) # != elif op == '!=': @@ -92,31 +92,31 @@ def compile_condition_overpass(self, condition, statement, tag_type, stat, prefi # regexp match =~ elif op == '=~': - ret = ( + ret = ( ('iregexp' if condition['regexp_flags'] else 'regexp' ), - key, condition['value']) + key, { condition['value'] }) # negated regexp match !~ elif op == '!~': ret = ( ('notiregexp' if condition['regexp_flags'] else 'notregexp' ), - key, condition['value']) + key, { condition['value'] }) # prefix match ^= elif op == '^=': - ret = ( 'regexp', key, '^' + self.value_to_regexp(condition['value']) ) + ret = ( 'regexp', key, { '^' + self.value_to_regexp(condition['value']) } ) # suffix match $= elif op == '$=': - ret = ( 'regexp', key, self.value_to_regexp(condition['value']) + '$' ) + ret = ( 'regexp', key, { self.value_to_regexp(condition['value']) + '$' } ) # substring match *= elif op == '*=': - ret = ( 'regexp', key, self.value_to_regexp(condition['value']) ) + ret = ( 'regexp', key, { self.value_to_regexp(condition['value']) }) # list membership ~= elif op == '~=': - ret = ( 'regexp', key, self.value_to_regexp(condition['value']) ) + ret = ( 'regexp', key, { self.value_to_regexp(condition['value']) }) else: ret = ( 'key', key ) @@ -188,52 +188,37 @@ def conditions_to_query(self, conditions): elif c[0] == 'isnot': ret += '[' + repr(c[1]) + '!=' + repr(c[2]) + ']' elif c[0] == 'regexp': - ret += '[' + repr(c[1]) + '~' + repr(c[2]) + ']' + ret += '[' + repr(c[1]) + '~' + self.merge_regexp(c[2]) + ']' elif c[0] == 'iregexp': - ret += '[' + repr(c[1]) + '~' + repr(c[2]) + ', i]' + ret += '[' + repr(c[1]) + '~' + self.merge_regexp(c[2]) + ', i]' elif c[0] == 'notregexp': - ret += '[' + repr(c[1]) + '!~' + repr(c[2]) + ']' + ret += '[' + repr(c[1]) + '!~' + self.merge_regexp(c[2]) + ']' elif c[0] == 'notiregexp': - ret += '[' + repr(c[1]) + '!~' + repr(c[2]) + ', i]' + ret += '[' + repr(c[1]) + '!~' + self.merge_regexp(c[2]) + ', i]' else: print('Unknown Overpass operator "{}"'.format(c[0])) return ret - def merge_regexp(self, c1, c2): - if c1[0] != 'regexp' or c2[0] != 'regexp': - return None - if c1[1] != c2[1]: - return None - - r1 = c1[2] - r2 = c2[2] + def merge_regexp(self, regexps): r = '' r_end = '' - if r1[0] == '^' or r2[0] == '^': + if len([ r for r in regexps if r[0] == '^' ]): r += '^' - if r1[0] == '^': - r1 = r1[1:] - else: - r1 = '.*' + r1 - if r2[0] == '^': - r2 = r2[1:] - else: - r2 = '.*' + r2 + regexps = { + r[1:] if r[0] == '^' else '.*' + r + for r in regexps + } - if r1[-1] == '$' or r2[-1] == '$': + if len([ r for r in regexps if r[-1] == '$' ]): r_end = '$' - if r1[-1] == '$': - r1 = r1[:-1] - else: - r1 += '.*' - if r2[-1] == '$': - r2 = r2[:-1] - else: - r2 += '.*' + regexps = { + r[:-1] if r[-1] == '$' else r + '.*' + for r in regexps + } - return r + r1 + '|' + r2 + r_end + return repr(r + '|'.join(regexps) + r_end) def is_subset(self, c1, c2): # check if query c1 is a subset of query c2 -> replace by c1 @@ -257,15 +242,16 @@ def check_merge_regexp(self, c1, c2): d1 = [ e1 for e1 in c1 if e1 not in c2 ] d2 = [ e2 for e2 in c2 if e2 not in c1 ] if len(d1) == 1 and len(d2) == 1 and d1[0][1] == d2[0][1]: -# check if we can merge the regular expressions - m = self.merge_regexp(self.convert_to_regexp(d1[0]), self.convert_to_regexp(d2[0])) - if m is not None: - x = [ + # check if we can merge the regular expressions + m1 = self.convert_to_regexp(d1[0]) + m2 = self.convert_to_regexp(d2[0]) + + if m1[0] == m2[0]: + return [ c for c in c1 if c != d1[0] - ] + [ ( 'regexp', d1[0][1], m ) ] - return x + ] + [ ( m1[0], d1[0][1], m1[2].union(m2[2]) ) ] def simplify_conditions(self, conditions): for i1 in range(0, len(conditions)): From a07ac80c9e2853b8342c3faab3875b08a0ce5129 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 03:53:11 +0100 Subject: [PATCH 026/209] DB/Overpass: fix escaping values to regexp --- pgmapcss/db/overpass/db.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index b03ad5dc..dfa6aa0e 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -40,6 +40,13 @@ def value_to_regexp(self, s): s = s.replace(']', '\\]') s = s.replace('(', '\\(') s = s.replace(')', '\\)') + s = s.replace('{', '\\{') + s = s.replace('}', '\\}') + s = s.replace('?', '\\?') + s = s.replace('+', '\\+') + s = s.replace('*', '\\*') + s = s.replace('^', '\\^') + s = s.replace('$', '\\$') return s def convert_to_regexp(self, s): From 356d1e153e3d859ebcfb934c0450c3869614db44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 10:09:04 +0100 Subject: [PATCH 027/209] DB/Overpass: Bugfix merging conditions --- pgmapcss/db/overpass/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index dfa6aa0e..338d4949 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -268,7 +268,7 @@ def simplify_conditions(self, conditions): if i1 != i2 and c1 is not None and c2 is not None: s = self.is_subset(c1, c2) - if s: + if s is not None: conditions[i1] = s conditions[i2] = None @@ -281,7 +281,7 @@ def simplify_conditions(self, conditions): if i1 != i2 and c1 is not None and c2 is not None: s = self.check_merge_regexp(c1, c2) - if s: + if s is not None: conditions[i1] = s conditions[i2] = None From fdc27dfe607709a438bafefea5d69c7ee30441c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 10:20:46 +0100 Subject: [PATCH 028/209] Compile Link Selector: use DB selector compiler --- pgmapcss/compiler/compile_link_selector.py | 9 +++++---- pgmapcss/db/postgresql_db/db.py | 4 ++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index e2d67926..8b1e8407 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -4,10 +4,11 @@ import pgmapcss.db as db def compile_link_selector(statement, stat): - parent_conditions = ' and '.join([ - stat['database'].compile_condition(c, statement, stat, prefix='') or 'true' - for c in statement['parent_selector']['conditions'] - ]) + parent_conditions = stat['database'].merge_conditions([( + statement['parent_selector']['type'], + stat['database'].compile_selector( + statement, stat, prefix='', selector='parent_selector') + )])[statement['parent_selector']['type']] if statement['link_selector']['type'] in ('>', ''): return "objects_member_of(object['id'], " +\ diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index eda672d9..6c162ee1 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -276,12 +276,12 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte # merge conditions together, return return '(' + ' or '.join(ret) + ')' - def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None): + def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None, selector='selector'): filter['object_type'] = object_type ret = { self.compile_condition(c, statement, stat, prefix, filter) or 'true' - for c in statement['selector']['conditions'] + for c in statement[selector]['conditions'] } if len(ret) == 0: From c70942ce568a9aeadf2178484cdadc7d701ed9fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 12:20:14 +0100 Subject: [PATCH 029/209] DB/Overpass: re-implement objects_member_of() --- pgmapcss/db/overpass/db_functions.py | 94 +++++++++++++++------------- 1 file changed, 50 insertions(+), 44 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 5d1f6c47..7d383b2f 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -421,51 +421,57 @@ def objects_by_id(id_list): yield(assemble_object(r)) def objects_member_of(member_id, parent_type, parent_conditions): - if parent_type == 'relation': - plan = plpy.prepare('select *, (select name from users where id=user_id) as user from relation_members join relations on relation_members.relation_id=relations.id where member_id=$1 and member_type=$2', ['bigint', 'text']); - res = plpy.cursor(plan, [int(member_id[1:]), member_id[0:1].upper()]) - for r in res: - t = { - 'id': 'r' + str(r['id']), - 'tags': pghstore.loads(r['tags']), - 'types': ['relation'], - 'geo': None, - 'link_tags': { - 'sequence_id': str(r['sequence_id']), - 'role': str(r['member_role']), - 'member_id': r['member_type'].lower() + str(r['member_id']), - } - } - t['tags']['osm:id'] = str(t['id']) - t['tags']['osm:version'] = str(r['version']) - t['tags']['osm:user_id'] = str(r['user_id']) - t['tags']['osm:user'] = r['user'] - t['tags']['osm:timestamp'] = str(r['tstamp']) - t['tags']['osm:changeset'] = str(r['changeset_id']) - yield(t) + import urllib.request + import urllib.parse + import json - if parent_type == 'way' and member_id[0] == 'n': - num_id = int(member_id[1:]) - plan = plpy.prepare('select *, (select name from users where id=user_id) as user from way_nodes join ways on way_nodes.way_id=ways.id where node_id=$1', ['bigint']); - res = plpy.cursor(plan, [num_id]) - for r in res: - t = { - 'id': 'w' + str(r['id']), - 'tags': pghstore.loads(r['tags']), - 'types': ['way'], - 'geo': r['linestring'], - 'link_tags': { - 'member_id': member_id, - 'sequence_id': str(r['sequence_id']) - } - } - t['tags']['osm:id'] = str(t['id']) - t['tags']['osm:version'] = str(r['version']) - t['tags']['osm:user_id'] = str(r['user_id']) - t['tags']['osm:user'] = r['user'] - t['tags']['osm:timestamp'] = str(r['tstamp']) - t['tags']['osm:changeset'] = str(r['changeset_id']) - yield(t) + q = '[out:json];' + + if member_id[0] == 'n': + ob_type = 'node' + ob_id = int(member_id[1:]) + q += 'node(' + member_id[1:] + ')->.a;' + elif member_id[0] == 'w': + ob_type = 'way' + ob_id = int(member_id[1:]) + q += 'way(' + member_id[1:] + ')->.a;' + elif member_id[0] == 'r': + ob_type = 'relation' + ob_id = int(member_id[1:]) + q += 'relation(' + member_id[1:] + ')->.a;' + + q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(b' + + member_id[0] + '.a)') + ');' + q += 'out meta qt geom;' + + plpy.warning(q) + + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + plpy.warning(f) + res = json.loads(f) + + for r in res['elements']: + t = assemble_object(r) + if parent_type == 'relation': + for i, m in enumerate(r['members']): + if m['type'] == ob_type and m['ref'] == ob_id: + t['link_tags'] = { + 'sequence_id': str(i), + 'role': m['role'], + 'member_id': m['type'][0] + str(m['ref']), + } + yield(t) + + elif parent_type == 'way': + for i, m in enumerate(r['nodes']): + if m == ob_id: + t['link_tags'] = { + 'sequence_id': str(i), + 'member_id': 'n' + str(m), + } + yield(t) def objects_members(relation_id, parent_type, parent_conditions): ob = list(objects_by_id([relation_id])) From 6530f674dd0659de067ec39092bf9748c4d3f8cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 13:25:51 +0100 Subject: [PATCH 030/209] DB/Overpass: use assemble_objects() for all queries --- pgmapcss/db/overpass/db_functions.py | 83 +++------------------------- 1 file changed, 7 insertions(+), 76 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 7d383b2f..3dcb4b77 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -162,19 +162,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v res = json.loads(f) for r in res['elements']: - t = { - 'id': 'n' + str(r['id']), - 'types': ['node', 'point'], - 'tags': r['tags'], - 'geo': node_geom(r['lat'], r['lon']), - } - t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' - t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' - t['tags']['osm:user'] = r['user'] if 'user' in r else '' - t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' - t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' - yield(t) + yield(assemble_object(r)) #'http://overpass-turbo.eu/?Q=' + q).read() @@ -242,20 +230,9 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v if outer['role'] in ('', 'outer'): ways_done.append(outer['ref']) - t = { - 'id': 'm' + str(r['id']), - 'types': ['multipolygon', 'area'], - # TODO: merge tags with relation tags and - # (non-relevant) tags of other outer ways - 'tags': outer_tags, - 'geo': relation_geom(r), - } - t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' - t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' - t['tags']['osm:user'] = r['user'] if 'user' in r else '' - t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' - t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' + t = assemble_object(r) + t['types'] = ['multipolygon', 'area'] + t['tags'] = outer_tags yield(t) else: @@ -285,21 +262,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v pass ways_done.append(r['id']) - is_polygon = len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] - t = { - 'id': 'w' + str(r['id']), - 'types': ['way', 'line', 'area'] if is_polygon else ['way', 'line'], - 'tags': r['tags'] if 'tags' in r else {}, - 'geo': way_geom(r, is_polygon), - } - t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' - t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' - t['tags']['osm:user'] = r['user'] if 'user' in r else '' - t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' - t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' - - yield(t) + yield(assemble_object(r)) # relations w = [] @@ -322,22 +285,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v pass rels_done.append(r['id']) - g = relation_geom(r) - if not g or not 'tags' in r: - continue - t = { - 'id': 'r' + str(r['id']), - 'types': ['area', 'relation'], - 'tags': r['tags'] if 'tags' in r else {}, - 'geo': g - } - t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' - t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' - t['tags']['osm:user'] = r['user'] if 'user' in r else '' - t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' - t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' - yield(t) + yield(assemble_object(r)) # areas w = [] @@ -370,24 +318,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v (r['type'] == 'relation' and r['id'] in rels_done): continue - t = { - 'tags': r['tags'] if 'tags' in r else {}, - } - if r['type'] == 'relation': - t['id'] = 'r' + str(r['id']) - t['types'] = ['area', 'relation'] - t['geo'] = relation_geom(r) - elif r['type'] == 'way': - t['id'] = 'w' + str(r['id']) - t['types'] = ['area', 'line', 'way'] - t['geo'] = way_geom(r, True) - t['tags']['osm:id'] = t['id'] - t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' - t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' - t['tags']['osm:user'] = r['user'] if 'user' in r else '' - t['tags']['osm:timestamp'] = r['timestamp'] if 'timestamp' in r else '' - t['tags']['osm:changeset'] = str(r['changeset']) if 'changeset' in r else '' - yield(t) + yield(assemble_object(r)) time_stop = datetime.datetime.now() # profiling plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) From fc86f17f284b91cb7975c2a96d44433005e9926c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 13:30:35 +0100 Subject: [PATCH 031/209] DB/Overpass: always add members list to objects --- pgmapcss/db/overpass/db_functions.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 3dcb4b77..9479d058 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -115,10 +115,25 @@ def assemble_object(r): if is_polygon: t['types'].append('area') t['geo'] = way_geom(r, is_polygon) + t['members'] = [ + { + 'member_id': 'n' + str(m), + 'sequence_id': str(i), + } + for i, m in enumerate(r['nodes']) + ] elif r['type'] == 'relation': t['id'] = 'r' + str(r['id']) t['types'] = ['area', 'relation'] t['geo'] = relation_geom(r) + t['members'] = [ + { + 'member_id': m['type'][0] + str(m['ref']), + 'role': m['role'], + 'sequence_id': str(i), + } + for i, m in enumerate(r['members']) + ] t['tags']['osm:id'] = t['id'] t['tags']['osm:version'] = str(r['version']) if 'version' in r else '' t['tags']['osm:user_id'] = str(r['uid']) if 'uid' in r else '' From 255024838178a66e6aa0e9ebe6f13e644431772f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 15:16:42 +0100 Subject: [PATCH 032/209] DB/Overpass: re-implement objects_members() --- pgmapcss/db/overpass/db_functions.py | 60 ++++++++++++++++++++-------- 1 file changed, 44 insertions(+), 16 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 9479d058..d846938f 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -420,29 +420,57 @@ def objects_member_of(member_id, parent_type, parent_conditions): yield(t) def objects_members(relation_id, parent_type, parent_conditions): - ob = list(objects_by_id([relation_id])) + import urllib.request + import urllib.parse + import json - if not len(ob): - return + q = '[out:json];' + + if relation_id[0] == 'n': + ob_type = 'node' + ob_id = int(relation_id[1:]) + q += 'node(' + relation_id[1:] + ')->.a;' + elif relation_id[0] == 'w': + ob_type = 'way' + ob_id = int(relation_id[1:]) + q += 'way(' + relation_id[1:] + ')->.a;' + elif relation_id[0] == 'r': + ob_type = 'relation' + ob_id = int(relation_id[1:]) + q += 'relation(' + relation_id[1:] + ')->.a;' + + q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(' + + relation_id[0] + '.a)') + ');' + q += '.a out meta qt geom;out meta qt geom;' + # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object - ob = ob[0] + url = 'http://overpass-api.de/api/interpreter?' +\ + urllib.parse.urlencode({ 'data': q }) + f = urllib.request.urlopen(url).read().decode('utf-8') + plpy.warning(f) + res = json.loads(f) - link_obs_ids = [ i['member_id'] for i in ob['members'] ] - link_obs = {} - for o in objects_by_id(link_obs_ids): - link_obs[o['id']] = o + relation = None + relation_type = None - for member in ob['members']: - if not member['member_id'] in link_obs: - continue + for r in res['elements']: + t = assemble_object(r) - ret = link_obs[member['member_id']] + if t['id'] == relation_id: + relation = t + relation_type = r['type'] - if parent_type not in ret['types']: - continue + else: + for m in relation['members']: + if m['member_id'] == t['id']: + t['link_tags'] = { + 'sequence_id': m['sequence_id'], + 'member_id': m['member_id'], + } + if 'role' in m: + t['link_tags']['role'] = m['role'] - ret['link_tags'] = member - yield ret + yield(t) def objects_near(max_distance, ob, parent_selector, where_clause, check_geo=None): if ob: From 19595ee223800d8bdae89d8dc41edb38d1d17dd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 15:44:10 +0100 Subject: [PATCH 033/209] DB/Overpass: always use regexp for is/isnot conditions --- pgmapcss/db/overpass/db.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 5c60c339..8da9acfb 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -84,7 +84,8 @@ def compile_condition_overpass(self, condition, statement, tag_type, stat, prefi # = elif op == '=': - ret = ( 'is', key, condition['value'] ) + #ret = ( 'is', key, condition['value'] ) + ret = ( 'regexp', key, { '^' + self.value_to_regexp(condition['value']) + '$' }) # @= elif op == '@=' and condition['value_type'] == 'value': @@ -95,7 +96,8 @@ def compile_condition_overpass(self, condition, statement, tag_type, stat, prefi # != elif op == '!=': - ret = ( 'isnot', key, condition['value'] ) + #ret = ( 'isnot', key, condition['value'] ) + ret = ( 'notregexp', key, { '^' + self.value_to_regexp(condition['value']) + '$' }) # regexp match =~ elif op == '=~': From e5a25d30b0390dffe0686918fecdfbc4b7697548 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 17:39:23 +0100 Subject: [PATCH 034/209] DB/*: pass child_conditions to link relations --- pgmapcss/compiler/compile_link_selector.py | 18 ++++++++++++++---- pgmapcss/db/osm2pgsql/db_functions.py | 6 +++--- pgmapcss/db/osmosis/db_functions.py | 6 +++--- pgmapcss/db/overpass/db_functions.py | 6 +++--- 4 files changed, 23 insertions(+), 13 deletions(-) diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index 8b1e8407..ea978032 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -10,15 +10,23 @@ def compile_link_selector(statement, stat): statement, stat, prefix='', selector='parent_selector') )])[statement['parent_selector']['type']] + child_conditions = stat['database'].merge_conditions([( + statement['selector']['type'], + stat['database'].compile_selector( + statement, stat, prefix='') + )])[statement['selector']['type']] + if statement['link_selector']['type'] in ('>', ''): return "objects_member_of(object['id'], " +\ repr(statement['parent_selector']['type']) + ", " +\ - repr(parent_conditions) + ")" + repr(parent_conditions) + ", " +\ + repr(child_conditions) + ")" elif statement['link_selector']['type'] == '<': return "objects_members(object['id'], " +\ repr(statement['parent_selector']['type']) + ", " +\ - repr(parent_conditions) + ")" + repr(parent_conditions) + ", " +\ + repr(child_conditions) + ")" elif statement['link_selector']['type'] == 'near': distance = { 'value': '100' } @@ -37,12 +45,14 @@ def compile_link_selector(statement, stat): return "objects_near(" + distance + ", None, "+\ repr(statement['parent_selector']['type']) + ", " +\ - repr(parent_conditions) + ")" + repr(parent_conditions) + ", " +\ + repr(child_conditions) + ")" elif statement['link_selector']['type'] in ('within', 'surrounds', 'overlaps'): return "objects_near(\"0\", None, "+\ repr(statement['parent_selector']['type']) + ", " +\ - repr(parent_conditions) + ", check_geo=" +\ + repr(parent_conditions) + ", " +\ + repr(child_conditions) + ", check_geo=" +\ repr(statement['link_selector']['type']) + ")" else: diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index 5c02b8b2..5497be3c 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -365,7 +365,7 @@ def flatarray_to_members(arr): return ret -def objects_member_of(member_id, parent_type, parent_conditions): +def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): if parent_type == 'relation': plan = plpy.prepare('select * from planet_osm_rels where members @> Array[$1]', ['text']); res = plpy.cursor(plan, [member_id]) @@ -415,7 +415,7 @@ def objects_member_of(member_id, parent_type, parent_conditions): t['tags']['osm:id'] = t['id'] yield(t) -def objects_members(relation_id, parent_type, parent_conditions): +def objects_members(relation_id, parent_type, parent_conditions, child_conditions): ob = list(objects_by_id([relation_id])) if not len(ob): @@ -440,7 +440,7 @@ def objects_members(relation_id, parent_type, parent_conditions): ret['link_tags'] = member yield ret -def objects_near(max_distance, ob, parent_selector, where_clause, check_geo=None): +def objects_near(max_distance, ob, parent_selector, where_clause, child_conditions, check_geo=None): if ob: geom = ob['geo'] elif 'geo' in current['properties'][current['pseudo_element']]: diff --git a/pgmapcss/db/osmosis/db_functions.py b/pgmapcss/db/osmosis/db_functions.py index 98770727..d83b7f04 100644 --- a/pgmapcss/db/osmosis/db_functions.py +++ b/pgmapcss/db/osmosis/db_functions.py @@ -236,7 +236,7 @@ def objects_by_id(id_list): t['tags']['osm:changeset'] = str(r['changeset_id']) yield(t) -def objects_member_of(member_id, parent_type, parent_conditions): +def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): if parent_type == 'relation': plan = plpy.prepare('select *, (select name from users where id=user_id) as user from relation_members join relations on relation_members.relation_id=relations.id where member_id=$1 and member_type=$2', ['bigint', 'text']); res = plpy.cursor(plan, [int(member_id[1:]), member_id[0:1].upper()]) @@ -283,7 +283,7 @@ def objects_member_of(member_id, parent_type, parent_conditions): t['tags']['osm:changeset'] = str(r['changeset_id']) yield(t) -def objects_members(relation_id, parent_type, parent_conditions): +def objects_members(relation_id, parent_type, parent_conditions, child_conditions): ob = list(objects_by_id([relation_id])) if not len(ob): @@ -308,7 +308,7 @@ def objects_members(relation_id, parent_type, parent_conditions): ret['link_tags'] = member yield ret -def objects_near(max_distance, ob, parent_selector, where_clause, check_geo=None): +def objects_near(max_distance, ob, parent_selector, where_clause, child_conditions, check_geo=None): if ob: geom = ob['geo'] elif 'geo' in current['properties'][current['pseudo_element']]: diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index d846938f..52656e0a 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -366,7 +366,7 @@ def objects_by_id(id_list): for r in res['elements']: yield(assemble_object(r)) -def objects_member_of(member_id, parent_type, parent_conditions): +def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): import urllib.request import urllib.parse import json @@ -419,7 +419,7 @@ def objects_member_of(member_id, parent_type, parent_conditions): } yield(t) -def objects_members(relation_id, parent_type, parent_conditions): +def objects_members(relation_id, parent_type, parent_conditions, child_conditions): import urllib.request import urllib.parse import json @@ -472,7 +472,7 @@ def objects_members(relation_id, parent_type, parent_conditions): yield(t) -def objects_near(max_distance, ob, parent_selector, where_clause, check_geo=None): +def objects_near(max_distance, ob, parent_selector, where_clause, child_conditions, check_geo=None): if ob: geom = ob['geo'] elif 'geo' in current['properties'][current['pseudo_element']]: From c3cacd35da05c3e11163205ccc9a5532d4893f8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 18:04:24 +0100 Subject: [PATCH 035/209] DB/Overpass: query api from function overpass_query() --- pgmapcss/db/overpass/db_functions.py | 110 ++++++++------------------- 1 file changed, 30 insertions(+), 80 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 52656e0a..6031f4db 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -1,5 +1,27 @@ #[out:json][bbox:{{bbox}}];(way[name=Marschnergasse];way[name=Erdbrustgasse]);out geom meta; +def overpass_query(query): + import urllib.request + import urllib.parse + import json + + plpy.warning(query) + url = '{db.overpass-url}?' +\ + urllib.parse.urlencode({ 'data': query }) + f = urllib.request.urlopen(url).read().decode('utf-8') + + try: + res = json.loads(f) + except ValueError: + # areas not initialized -> ignore + if re.search('osm3s_v[0-9\.]+_areas', f): + return + else: + raise + + for r in res['elements']: + yield(r) + def node_geom(lat, lon): global geom_plan @@ -144,9 +166,6 @@ def assemble_object(r): return t def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): - import urllib.request - import urllib.parse - import json time_start = datetime.datetime.now() # profiling non_relevant_tags = {'type', 'source', 'source:ref', 'source_ref', 'note', 'comment', 'created_by', 'converted_by', 'fixme', 'FIXME', 'description', 'attribution', 'osm:id', 'osm:version', 'osm:user_id', 'osm:user', 'osm:timestamp', 'osm:changeset'} ways_done = [] @@ -171,12 +190,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') q = q.replace('__TYPE__', 'node') - url = '{db.overpass-url}?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - res = json.loads(f) - - for r in res['elements']: + for r in overpass_query(q): yield(assemble_object(r)) #'http://overpass-turbo.eu/?Q=' + q).read() @@ -197,17 +211,11 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v 'relation[type=multipolygon] -> .rel;' + '((' + ');('.join(w) + ');) -> .outer;relation(bw.outer)[type=multipolygon]') + '.outer out tags qt;' q = q.replace('__TYPE__', 'way(r.rel:"outer")') - plpy.warning(q) - - url = '{db.overpass-url}?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - res = json.loads(f) _ways = {} _rels = {} - for r in res['elements']: + for r in overpass_query(q): if r['type'] == 'way': _ways[r['id']] = r elif r['type'] == 'relation': @@ -265,14 +273,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') q = q.replace('__TYPE__', 'way') - plpy.warning(q) - - url = '{db.overpass-url}?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - res = json.loads(f) - for r in res['elements']: + for r in overpass_query(q): if r['id'] in ways_done: pass ways_done.append(r['id']) @@ -288,14 +290,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') q = q.replace('__TYPE__', 'relation') - plpy.warning(q) - - url = '{db.overpass-url}?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - res = json.loads(f) - for r in res['elements']: + for r in overpass_query(q): if r['id'] in rels_done: pass rels_done.append(r['id']) @@ -313,22 +309,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v res = plpy.execute(plan, [ _bbox ]) q = qry.replace('__QRY__', 'is_in({0});way(pivot);out meta geom;is_in({0});relation(pivot)'.format(res[0]['geom'])) - plpy.warning(q) - - url = '{db.overpass-url}?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - - try: - res = json.loads(f) - except ValueError: - print(f) - if re.search('osm3s_v[0-9\.]+_areas', f): - res = { 'elements': [] } - else: - raise - for r in res['elements']: + for r in overpass_query(q): if (r['type'] == 'way' and r['id'] in ways_done) or\ (r['type'] == 'relation' and r['id'] in rels_done): continue @@ -339,9 +321,6 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) def objects_by_id(id_list): - import urllib.request - import urllib.parse - import json q = '' multipolygons = [] for i in id_list: @@ -356,21 +335,10 @@ def objects_by_id(id_list): return q = '[out:json];' + q - plpy.warning(q) - - url = 'http://overpass-api.de/api/interpreter?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - res = json.loads(f) - - for r in res['elements']: + for r in overpass_query(q): yield(assemble_object(r)) def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): - import urllib.request - import urllib.parse - import json - q = '[out:json];' if member_id[0] == 'n': @@ -390,15 +358,7 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition member_id[0] + '.a)') + ');' q += 'out meta qt geom;' - plpy.warning(q) - - url = 'http://overpass-api.de/api/interpreter?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - plpy.warning(f) - res = json.loads(f) - - for r in res['elements']: + for r in overpass_query(q): t = assemble_object(r) if parent_type == 'relation': for i, m in enumerate(r['members']): @@ -420,10 +380,6 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition yield(t) def objects_members(relation_id, parent_type, parent_conditions, child_conditions): - import urllib.request - import urllib.parse - import json - q = '[out:json];' if relation_id[0] == 'n': @@ -444,16 +400,10 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition q += '.a out meta qt geom;out meta qt geom;' # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object - url = 'http://overpass-api.de/api/interpreter?' +\ - urllib.parse.urlencode({ 'data': q }) - f = urllib.request.urlopen(url).read().decode('utf-8') - plpy.warning(f) - res = json.loads(f) - relation = None relation_type = None - for r in res['elements']: + for r in overpass_query(q): t = assemble_object(r) if t['id'] == relation_id: From 6c9154096289eb1f5d048555a47e149855497b78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 17 Nov 2014 18:38:04 +0100 Subject: [PATCH 036/209] DB/Overpass: member_of/members - do query once for bbox, cache results --- pgmapcss/db/overpass/db_functions.py | 133 ++++++++++++++++----------- 1 file changed, 80 insertions(+), 53 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 6031f4db..e23caa5d 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -165,6 +165,14 @@ def assemble_object(r): return t +def get_bbox(_bbox=None): + if _bbox is None: + _bbox = render_context['bbox'] + + plan = plpy.prepare("select ST_YMin($1::geometry) || ',' || ST_XMIN($1::geometry) || ',' || ST_YMAX($1::geometry) || ',' || ST_XMAX($1::geometry) as bbox_string", [ 'geometry' ]) + res = plpy.execute(plan, [ _bbox ]) + return '[bbox:' + res[0]['bbox_string'] + ']' + def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): time_start = datetime.datetime.now() # profiling non_relevant_tags = {'type', 'source', 'source:ref', 'source_ref', 'note', 'comment', 'created_by', 'converted_by', 'fixme', 'FIXME', 'description', 'attribution', 'osm:id', 'osm:version', 'osm:user_id', 'osm:user', 'osm:timestamp', 'osm:changeset'} @@ -174,9 +182,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v qry = '[out:json]' if _bbox: - plan = plpy.prepare("select ST_YMin($1::geometry) || ',' || ST_XMIN($1::geometry) || ',' || ST_YMAX($1::geometry) || ',' || ST_XMAX($1::geometry) as bbox_string", [ 'geometry' ]) - res = plpy.execute(plan, [ _bbox ]) - qry += '[bbox:' + res[0]['bbox_string'] + ']' + qry += get_bbox(_bbox) qry += ';__QRY__;out meta geom;' @@ -339,88 +345,109 @@ def objects_by_id(id_list): yield(assemble_object(r)) def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): - q = '[out:json];' + global member_of_cache + try: + member_of_cache + except: + member_of_cache = {} if member_id[0] == 'n': ob_type = 'node' ob_id = int(member_id[1:]) - q += 'node(' + member_id[1:] + ')->.a;' elif member_id[0] == 'w': ob_type = 'way' ob_id = int(member_id[1:]) - q += 'way(' + member_id[1:] + ')->.a;' elif member_id[0] == 'r': ob_type = 'relation' ob_id = int(member_id[1:]) - q += 'relation(' + member_id[1:] + ')->.a;' - q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(b' + - member_id[0] + '.a)') + ');' - q += 'out meta qt geom;' + member_of_cache_id = parent_type + '|' + ob_type + '|' + repr(parent_conditions) + '|' + repr(child_conditions) - for r in overpass_query(q): - t = assemble_object(r) - if parent_type == 'relation': - for i, m in enumerate(r['members']): - if m['type'] == ob_type and m['ref'] == ob_id: - t['link_tags'] = { - 'sequence_id': str(i), - 'role': m['role'], - 'member_id': m['type'][0] + str(m['ref']), - } - yield(t) + if member_of_cache_id not in member_of_cache: + member_of_cache[member_of_cache_id] = [] + q = '[out:json]' + get_bbox() + ';' - elif parent_type == 'way': - for i, m in enumerate(r['nodes']): - if m == ob_id: - t['link_tags'] = { - 'sequence_id': str(i), - 'member_id': 'n' + str(m), - } - yield(t) + q += '(' + child_conditions.replace('__TYPE__', ob_type) + ')->.a;' + + q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(b' + + ob_type[0] + '.a)') + ');' + q += 'out meta qt geom;' + + for r in overpass_query(q): + t = assemble_object(r) + member_of_cache[member_of_cache_id].append(t) + + for t in member_of_cache[member_of_cache_id]: + for m in t['members']: + if m['member_id'] == member_id: + t['link_tags'] = { + 'sequence_id': m['sequence_id'], + 'member_id': m['member_id'], + } + if 'role' in m: + t['link_tags']['role'] = m['role'] + + yield(t) def objects_members(relation_id, parent_type, parent_conditions, child_conditions): + global members_cache + try: + members_cache + except: + members_cache = {} + q = '[out:json];' if relation_id[0] == 'n': ob_type = 'node' ob_id = int(relation_id[1:]) - q += 'node(' + relation_id[1:] + ')->.a;' elif relation_id[0] == 'w': ob_type = 'way' ob_id = int(relation_id[1:]) - q += 'way(' + relation_id[1:] + ')->.a;' elif relation_id[0] == 'r': ob_type = 'relation' ob_id = int(relation_id[1:]) - q += 'relation(' + relation_id[1:] + ')->.a;' - q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(' + - relation_id[0] + '.a)') + ');' - q += '.a out meta qt geom;out meta qt geom;' - # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object + members_cache_id = parent_type + '|' + ob_type + '|' + repr(parent_conditions) + '|' + repr(child_conditions) - relation = None - relation_type = None + if members_cache_id not in members_cache: + members_cache[members_cache_id] = { 'parents': {}, 'children': [] } + q = '[out:json]' + get_bbox() + ';' - for r in overpass_query(q): - t = assemble_object(r) + q += '(' + child_conditions.replace('__TYPE__', ob_type) + ');' + q += 'out meta qt geom;' + # TODO: out body qt; would be sufficient, but need to adapt assemble_object - if t['id'] == relation_id: - relation = t - relation_type = r['type'] + for r in overpass_query(q): + t = assemble_object(r) + t['type'] = r['type'] + members_cache[members_cache_id]['parents'][t['id']] = t - else: - for m in relation['members']: - if m['member_id'] == t['id']: - t['link_tags'] = { - 'sequence_id': m['sequence_id'], - 'member_id': m['member_id'], - } - if 'role' in m: - t['link_tags']['role'] = m['role'] + q = '[out:json]' + get_bbox() + ';' - yield(t) + q += '(' + child_conditions.replace('__TYPE__', ob_type) + ')->.a;' + q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(' + + relation_id[0] + '.a)') + ');' + q += 'out meta qt geom;' + # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object + + for r in overpass_query(q): + t = assemble_object(r) + members_cache[members_cache_id]['children'].append(t) + + relation = members_cache[members_cache_id]['parents'][relation_id] + + for t in members_cache[members_cache_id]['children']: + for m in relation['members']: + if m['member_id'] == t['id']: + t['link_tags'] = { + 'sequence_id': m['sequence_id'], + 'member_id': m['member_id'], + } + if 'role' in m: + t['link_tags']['role'] = m['role'] + + yield(t) def objects_near(max_distance, ob, parent_selector, where_clause, child_conditions, check_geo=None): if ob: From 2d983c68979be232c608d28c3eed4da04ee37f52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 04:45:57 +0100 Subject: [PATCH 037/209] Mode standalone: execute everything inside a transaction --- pgmapcss/mode/standalone/footer.inc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pgmapcss/mode/standalone/footer.inc b/pgmapcss/mode/standalone/footer.inc index 057f6403..a076f037 100644 --- a/pgmapcss/mode/standalone/footer.inc +++ b/pgmapcss/mode/standalone/footer.inc @@ -155,6 +155,7 @@ if __name__ == '__main__': parameters['lang'] = 'en' plpy = fake_plpy(args) + plpy.execute(plpy.prepare('begin', []), []) plan_to_geojson = plpy.prepare('select ST_asGeoJSON($1) as r', [ 'geometry' ]) def format_result(results): @@ -206,3 +207,5 @@ if __name__ == '__main__': else: print(format_result(results)) print("]}}\n") + + plpy.execute(plpy.prepare('commit', []), []) From d50e804e6c3dd50cb8fd81acef11cff02d1e7b5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 04:10:40 +0100 Subject: [PATCH 038/209] PGCache: create template class + database table --- pgmapcss/db/pgmapcss_types.sql | 11 +++++++++++ pgmapcss/db/version.py | 2 +- pgmapcss/misc/__init__.py | 8 ++++++++ pgmapcss/misc/pgcache.py | 27 +++++++++++++++++++++++++++ pgmapcss/mode/standalone/footer.inc | 1 + 5 files changed, 48 insertions(+), 1 deletion(-) create mode 100644 pgmapcss/misc/pgcache.py diff --git a/pgmapcss/db/pgmapcss_types.sql b/pgmapcss/db/pgmapcss_types.sql index 6331ac08..bceab712 100644 --- a/pgmapcss/db/pgmapcss_types.sql +++ b/pgmapcss/db/pgmapcss_types.sql @@ -23,3 +23,14 @@ create table _pgmapcss_left_right_hand_traffic ( geo geometry ); create index _pgmapcss_left_right_hand_traffic_geo on _pgmapcss_left_right_hand_traffic using gist(geo); + +drop table if exists _pgmapcss_PGCache cascade; +create table _pgmapcss_PGCache ( + cache_id int, + data bytea, + id text, + geo geometry +); +create index _pgmapcss_PGCache_cache_id on _pgmapcss_PGCache(cache_id); +create index _pgmapcss_PGCache_id on _pgmapcss_PGCache(id); +create index _pgmapcss_PGCache_geo on _pgmapcss_PGCache using gist(geo); diff --git a/pgmapcss/db/version.py b/pgmapcss/db/version.py index 5e44d79a..e2d77334 100644 --- a/pgmapcss/db/version.py +++ b/pgmapcss/db/version.py @@ -3,7 +3,7 @@ import postgresql from pkg_resources import parse_version -db_version_table_layout = 2 +db_version_table_layout = 3 def db_version(): try: diff --git a/pgmapcss/misc/__init__.py b/pgmapcss/misc/__init__.py index 4e8a8bcf..3877b56b 100644 --- a/pgmapcss/misc/__init__.py +++ b/pgmapcss/misc/__init__.py @@ -1 +1,9 @@ from .strip_includes import strip_includes +from .pgcache import PGCache +from .pgcache import get_PGCache + +from ..includes import register_includes +from pkg_resources import * +register_includes({ + 'pgcache': resource_string(__name__, 'pgcache.py').decode('utf-8'), +}) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py new file mode 100644 index 00000000..90ae245b --- /dev/null +++ b/pgmapcss/misc/pgcache.py @@ -0,0 +1,27 @@ +class PGCache: + def __init__(self, id, read_id=False, read_geo=False): + global PGCaches + try: + PGCaches + except: + PGCaches = {} + + PGCaches[id] = self + + def add(self, data, id=None, geo=None): + pass + + def get(self, id=None): + pass + + def query(self, qry): + pass + +def get_PGCache(id, read_id=False, read_geo=False): + global PGCaches + try: + PGCaches + except: + PGCaches = {} + + return PGCaches[id] diff --git a/pgmapcss/mode/standalone/footer.inc b/pgmapcss/mode/standalone/footer.inc index a076f037..6b97c307 100644 --- a/pgmapcss/mode/standalone/footer.inc +++ b/pgmapcss/mode/standalone/footer.inc @@ -208,4 +208,5 @@ if __name__ == '__main__': print(format_result(results)) print("]}}\n") + plpy.execute(plpy.prepare('truncate _pgmapcss_PGCache', []), []) plpy.execute(plpy.prepare('commit', []), []) From 22f93709b8531588d529aac817c93cf963ace910 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 04:51:47 +0100 Subject: [PATCH 039/209] Mode standalone::fake_plpy: execute() may not return rows (e.g. insert, delete) --- pgmapcss/mode/standalone/header.inc | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/pgmapcss/mode/standalone/header.inc b/pgmapcss/mode/standalone/header.inc index e4a3e1ac..8756bc76 100644 --- a/pgmapcss/mode/standalone/header.inc +++ b/pgmapcss/mode/standalone/header.inc @@ -46,10 +46,14 @@ class fake_plpy: self.explain_queries[plan.query]['count'] += 1 # END debug.explain_queries - return [ - dict(r) - for r in plan(*param) - ] + ret = [] + for r in plan(*param): + if type(r) != postgresql.types.Row: + return r + + ret.append(dict(r)) + + return ret def cursor(self, plan, param=[]): # START debug.explain_queries From ec5cc5d30b50e12c2e11f6206c220b4f2731e352 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 06:05:29 +0100 Subject: [PATCH 040/209] PGCache: implement class --- pgmapcss/misc/pgcache.py | 64 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 60 insertions(+), 4 deletions(-) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 90ae245b..570d976d 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -7,15 +7,71 @@ def __init__(self, id, read_id=False, read_geo=False): PGCaches = {} PGCaches[id] = self + self.id = id + self.read_id = read_id + self.read_geo = read_geo + self.cache_id = len(PGCaches) def add(self, data, id=None, geo=None): - pass + import pickle + try: + self.plan_add + except: + self.plan_add = plpy.prepare('insert into _pgmapcss_PGCache values (\'' + str(self.cache_id).replace("'", "''") + '\', $1, $2, $3)', [ 'bytea', 'text', 'geometry' ]) + + if id is None and self.read_id and 'id' in data: + id = data['id'] + if geo is None and self.read_geo and 'geo' in data: + geo = data['geo'] + + plpy.execute(self.plan_add, [ pickle.dumps(data), id, geo ]) def get(self, id=None): - pass + import pickle + if id is None: + try: + self.plan_get + except: + self.plan_get = plpy.prepare('select * from _pgmapcss_PGCache where cache_id=' + str(self.cache_id).replace("'", "''"), []) + + cursor = plpy.cursor(self.plan_get, []) + + else: + try: + self.plan_get_id + except: + self.plan_get_id = plpy.prepare('select * from _pgmapcss_PGCache where id=ANY($1) and cache_id=' + str(self.cache_id).replace("'", "''"), ['text[]']) + + if type(id) == str: + id = [ id ] + + cursor = plpy.cursor(self.plan_get_id, [id]) + + for r in cursor: + yield pickle.loads(r['data']) + + def prepare(self, query, param_type=[]): + return plpy.prepare(query.replace('{table}', '(select data, id, geo from _pgmapcss_PGCache where cache_id=' + str(self.cache_id).replace("'", "''") + ') t'), param_type) + + def execute(self, plan, param=[]): + import pickle + ret = [] + + for r in plpy.execute(plan, param): + if 'data' in r: + r['data'] = pickle.loads(r['data']) + ret.append(r) + + return ret + + def cursor(self, plan, param=[]): + import pickle + ret = [] - def query(self, qry): - pass + for r in plpy.cursor(plan, param): + if 'data' in r: + r['data'] = pickle.loads(r['data']) + yield r def get_PGCache(id, read_id=False, read_geo=False): global PGCaches From fd91819cb500b5046be93aba4765e85ad29b3489 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 06:05:38 +0100 Subject: [PATCH 041/209] PGCache: example code --- pgmapcss/misc/pgcache.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 570d976d..675e20c5 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -1,3 +1,19 @@ +# Example code: +# try: +# cache = get_PGCache('foo', read_id=True) +# except: +# cache = PGCache('foo', read_id=True) +# cache.add({{'id': '1', 'foo': 'bar' }}) +# cache.add({{'id': '2', 'foo': 'foo' }}) +# cache.add({{'id': '3', 'foo': 'bla' }}) +# +# for r in cache.get(['1', '2']): +# print(r) +# +# plan = cache.prepare('select * from {table}', []) +# for r in cache.cursor(plan): +# print(r) + class PGCache: def __init__(self, id, read_id=False, read_geo=False): global PGCaches From 692e4a08b988569dd26c7a31ec56c162d37a1a22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 14:14:07 +0100 Subject: [PATCH 042/209] PGCache: documentation --- pgmapcss/misc/pgcache.py | 42 +++++++++++++++++++++++++++++++++++++--- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 675e20c5..9fbdc9aa 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -15,6 +15,11 @@ # print(r) class PGCache: +### __init__(): initialize a new cache +# Parameters: +# id: identify cache +# read_id: (boolean) should the id of the object (if possible) should automatically be read (default False) +# read_geo: (boolean) should the geometry of the object (if possible) should automatically be read (default False) def __init__(self, id, read_id=False, read_geo=False): global PGCaches try: @@ -28,6 +33,11 @@ def __init__(self, id, read_id=False, read_geo=False): self.read_geo = read_geo self.cache_id = len(PGCaches) +### add(): add new data +# Parameters: +# data: some data, will be serialized (and on return unserialized) +# id: (optional, string) identify this object. If read_id is True and data a dict with a key 'id', this will be used +# geo: (optional, geometry) geometry of the object. If read_geo is True and data a dict with a key 'geo', this will be used def add(self, data, id=None, geo=None): import pickle try: @@ -35,13 +45,16 @@ def add(self, data, id=None, geo=None): except: self.plan_add = plpy.prepare('insert into _pgmapcss_PGCache values (\'' + str(self.cache_id).replace("'", "''") + '\', $1, $2, $3)', [ 'bytea', 'text', 'geometry' ]) - if id is None and self.read_id and 'id' in data: + if id is None and self.read_id and type(data) is dict and 'id' in data: id = data['id'] - if geo is None and self.read_geo and 'geo' in data: + if geo is None and self.read_geo and type(data) is dict and 'geo' in data: geo = data['geo'] plpy.execute(self.plan_add, [ pickle.dumps(data), id, geo ]) +### get(): a generator function returning data from cache +# Parameters: +# id: (optional, string or list of strings) only return data which matches the id. def get(self, id=None): import pickle if id is None: @@ -66,9 +79,21 @@ def get(self, id=None): for r in cursor: yield pickle.loads(r['data']) +### prepare(): prepare a SQL select statement +# Parameters: +# query: (string) a database query containing "{table}" as database source, e.g. "select * from {table} where id=$1" +# param_type: (optional, list of type identifiers) parameter types to the database query, references by $1, $2, ... from the query, e.g. ['text'] +# Return: +# a plan, which can be passed to execute() or cursor() def prepare(self, query, param_type=[]): return plpy.prepare(query.replace('{table}', '(select data, id, geo from _pgmapcss_PGCache where cache_id=' + str(self.cache_id).replace("'", "''") + ') t'), param_type) +### execute(): execute a plan and return a list of result rows +# Parameters: +# plan: a plan from prepare() +# param: (optional, list) parameters to the database query, e.g. [ 'w1234' ] +# Return: +# a list with all result rows. if 'data' is in the result columns it will be unserialized. def execute(self, plan, param=[]): import pickle ret = [] @@ -80,6 +105,12 @@ def execute(self, plan, param=[]): return ret +### cursor(): execute a plan and yield result rows +# Parameters: +# plan: a plan from prepare() +# param: (optional, list) parameters to the database query, e.g. [ 'w1234' ] +# Return: +# a generator generating result rows. if 'data' is in the result columns it will be unserialized. def cursor(self, plan, param=[]): import pickle ret = [] @@ -89,7 +120,12 @@ def cursor(self, plan, param=[]): r['data'] = pickle.loads(r['data']) yield r -def get_PGCache(id, read_id=False, read_geo=False): +### get_PGCache(): get an existing cache, will throw exception if it doesn't exist +# Parameters: +# id: id of the cache +# Return: +# return the existing cache +def get_PGCache(id): global PGCaches try: PGCaches From 77e496a85c12a1d4f12f35b296ded3959144774d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 14:14:07 +0100 Subject: [PATCH 043/209] PGCache: documentation --- pgmapcss/misc/pgcache.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 9fbdc9aa..4206a140 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -1,6 +1,6 @@ # Example code: # try: -# cache = get_PGCache('foo', read_id=True) +# cache = get_PGCache('foo') # except: # cache = PGCache('foo', read_id=True) # cache.add({{'id': '1', 'foo': 'bar' }}) @@ -13,6 +13,11 @@ # plan = cache.prepare('select * from {table}', []) # for r in cache.cursor(plan): # print(r) +# +# The cache database table has the following columns: +# * data: the data of the object in serialized form +# * id: id of the object (see add() for details) +# * geo: geometry of the object (see add() for details) class PGCache: ### __init__(): initialize a new cache From 51751c23cbecb1c96f4f7e16e1bf1480c895234f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 14:31:14 +0100 Subject: [PATCH 044/209] DB/Overpass: bugfixes --- pgmapcss/db/overpass/db_functions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index e23caa5d..37b96d74 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -128,8 +128,8 @@ def assemble_object(r): } if r['type'] == 'node': t['id'] = 'n' + str(r['id']) - t['types'] = ['area', 'line', 'way'] - t['geo'] = node_geom(r['lat'], r['lon']), + t['types'] = ['node', 'point'] + t['geo'] = node_geom(r['lat'], r['lon']) elif r['type'] == 'way': is_polygon = len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] t['id'] = 'w' + str(r['id']) From 92fe06fd856d3b1ddcc7a9bb170d052e2971baf4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 15:34:01 +0100 Subject: [PATCH 045/209] DB/Overpass: re-implement objects_near() using PGCache --- pgmapcss/db/overpass/db_functions.py | 58 +++++++++++++++------------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 37b96d74..a63c8a5f 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -449,7 +449,25 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition yield(t) -def objects_near(max_distance, ob, parent_selector, where_clause, child_conditions, check_geo=None): +def objects_near(max_distance, ob, parent_type, parent_conditions, child_conditions, check_geo=None): + cache_id = 'objects_near' + '|' + parent_type + '|' + repr(parent_conditions) + + max_distance = to_float(eval_metric([ max_distance, 'u' ])) + if max_distance is None: + return + + try: + cache = get_PGCache(cache_id) + except: + cache = PGCache(cache_id, read_geo=True) + + plan = plpy.prepare('select ST_Transform(ST_Envelope(ST_Buffer(ST_Transform(ST_Envelope($1::geometry), {unit.srs}), $2)), {db.srs}) as r', ['geometry', 'float']) + res = plpy.execute(plan, [ render_context['bbox'], max_distance ]) + bbox = res[0]['r'] + + for t in objects(bbox, { parent_type: parent_conditions }): + cache.add(t) + if ob: geom = ob['geo'] elif 'geo' in current['properties'][current['pseudo_element']]: @@ -457,13 +475,7 @@ def objects_near(max_distance, ob, parent_selector, where_clause, child_conditio else: geom = current['object']['geo'] - if where_clause == '': - where_clause = 'true' - - max_distance = to_float(eval_metric([ max_distance, 'u' ])) - if max_distance is None: - return [] - elif max_distance == 0: + if max_distance == 0: bbox = geom else: plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) @@ -471,27 +483,19 @@ def objects_near(max_distance, ob, parent_selector, where_clause, child_conditio bbox = res[0]['r'] if check_geo == 'within': - where_clause += " and ST_DWithin(way, $2, 0.0)" + where_clause += " and ST_DWithin(geo, $1, 0.0)" elif check_geo == 'surrounds': - where_clause += " and ST_DWithin($2, way, 0.0)" + where_clause += " and ST_DWithin($1, geo, 0.0)" elif check_geo == 'overlaps': - where_clause += " and ST_Overlaps($2, way)" - - obs = [] - for ob in objects( - bbox, - { parent_selector: where_clause }, - { # add_columns - '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(__geo__, {unit.srs}))' - }, - [ 'geometry' ], - [ geom ] - ): - if ob['id'] != current['object']['id'] and ob['__distance'] <= max_distance: + where_clause += " and ST_Overlaps($1, geo)" + + plan = cache.prepare('select * from (select *, ST_Distance(ST_Transform($1, {unit.srs}), ST_Transform(geo, {unit.srs})) dist from {table} where geo && $2 offset 0) t order by dist asc', [ 'geometry', 'geometry' ]) + for t in cache.cursor(plan, [ geom, bbox ]): + ob = t['data'] + + if ob['id'] != current['object']['id'] and t['dist'] <= max_distance: ob['link_tags'] = { - 'distance': eval_metric([ str(ob['__distance']) + 'u', 'px' ]) + 'distance': eval_metric([ str(t['dist']) + 'u', 'px' ]) } - obs.append(ob) - obs = sorted(obs, key=lambda ob: ob['__distance'] ) - return obs + yield ob From 6c91a5afae2a305924d01d0873c88443d8613c46 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 15:52:16 +0100 Subject: [PATCH 046/209] DB/Overpass: remove last remnants of code, the module was cloned from --- pgmapcss/db/overpass/db.py | 15 +-------------- pgmapcss/db/overpass/init.sql | 14 -------------- 2 files changed, 1 insertion(+), 28 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 8da9acfb..b1f6a07d 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -15,20 +15,7 @@ def __init__(self, conn, stat): def tag_type(self, key, condition, selector, statement): if key[0:4] == 'osm:': - if key == 'osm:id': - return ( 'column', 'id', self.compile_modify_id ) - elif key == 'osm:user': - return ( 'column', 'user_id', self.compile_user_id ) - elif key == 'osm:user_id': - return ( 'column', 'user_id' ) - elif key == 'osm:version': - return ( 'column', 'version' ) - elif key == 'osm:timestamp': - return ( 'column', 'tstamp' ) - elif key == 'osm:changeset': - return ( 'column', 'changeset_id' ) - else: - return None + return None return ( 'overpass', key ) diff --git a/pgmapcss/db/overpass/init.sql b/pgmapcss/db/overpass/init.sql index fca61fdd..e69de29b 100644 --- a/pgmapcss/db/overpass/init.sql +++ b/pgmapcss/db/overpass/init.sql @@ -1,14 +0,0 @@ --- Create multicolumn way / tags indexes -do $$ -begin -if not exists ( - select 1 - from pg_class - where relname = 'nodes' - ) then - - raise notice E'\ncreating multicolumn indexes - please be patient ...'; - create index nodes_geom_tags on nodes using gist(geom, tags); - create index ways_linestring_tags on ways using gist(linestring, tags); -end if; -end$$; From 5b9da532524c3651b53a902a72c4ae5e491f8a49 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 16:20:23 +0100 Subject: [PATCH 047/209] DB/Overpass: Compile set statements: Bugfix --- pgmapcss/db/overpass/db.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index b1f6a07d..31cebdb4 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -157,14 +157,12 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte else: raise CompileError('unknown tag type {}'.format(tag_type)) - if None in ret: - ret.remove(None) - if len(ret) == 0: + if ret is None: return set_statements if len(set_statements): return [ - s + [ ret ] + s + [[ ret ]] for s in set_statements ] From b4a9f72005fc63d4a2237e8f9e8a9b15982994e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 16:20:53 +0100 Subject: [PATCH 048/209] Mode database-function/PGCache: db function must be volatile for inserting to table --- pgmapcss/mode/database-function/footer.inc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgmapcss/mode/database-function/footer.inc b/pgmapcss/mode/database-function/footer.inc index 8755b707..8a330065 100644 --- a/pgmapcss/mode/database-function/footer.inc +++ b/pgmapcss/mode/database-function/footer.inc @@ -1 +1 @@ -$body$ language 'plpython3u' immutable; +$body$ language 'plpython3u' volatile; From 87de30e37f62e26fe6dfdc1195aa94d730bedd4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 18 Nov 2014 20:55:48 +0100 Subject: [PATCH 049/209] DB/Overpass: bugfix: multipolygons need outer_ways; also fix ID (m1234) --- pgmapcss/db/overpass/db_functions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index a63c8a5f..4a01f1aa 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -253,13 +253,14 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v elif outer_tags != tags: is_valid_mp = True - if is_valid_mp: + if is_valid_mp and outer_tags is not None: rels_done.append(rid) for outer in r['members']: if outer['role'] in ('', 'outer'): ways_done.append(outer['ref']) t = assemble_object(r) + t['id'] = 'm' + str(r['id']) t['types'] = ['multipolygon', 'area'] t['tags'] = outer_tags From 661fc1574501c6313f6cab91b21ec0363a359e8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 19 Nov 2014 06:31:26 +0100 Subject: [PATCH 050/209] PGCache: move documentation to external file --- pgmapcss/misc/pgcache.md | 78 ++++++++++++++++++++++++++++++++++++++++ pgmapcss/misc/pgcache.py | 57 ----------------------------- 2 files changed, 78 insertions(+), 57 deletions(-) create mode 100644 pgmapcss/misc/pgcache.md diff --git a/pgmapcss/misc/pgcache.md b/pgmapcss/misc/pgcache.md new file mode 100644 index 00000000..b91eb5e1 --- /dev/null +++ b/pgmapcss/misc/pgcache.md @@ -0,0 +1,78 @@ +PGCache implements a database cache with a postgis geometry column and the possibility to do queries into the database: + +Example code: +```python + try: + cache = get_PGCache('foo') + except: + cache = PGCache('foo', read_id=True) + cache.add({'id': '1', 'foo': 'bar' }) + cache.add({'id': '2', 'foo': 'foo' }) + cache.add({'id': '3', 'foo': 'bla' }) + + for r in cache.get(['1', '2']): + print(r) + + plan = cache.prepare('select * from {table}', []) + for r in cache.cursor(plan): + print(r) +``` + +The cache database table has the following columns: +* data: the data of the object in serialized form +* id: id of the object (see add() for details) +* geo: geometry of the object (see add() for details) + +Functions Overview +================== +get_PGCache(): get an existing cache, will throw exception if it doesn't exist +------------------------------------------------------------------------------ +Parameters: +* id: id of the cache +Return: +* return the existing cache + +The following class functions are available: +constructor: initialize a new cache +---------------------------------- +Parameters: +* id: identify cache +* read_id: (boolean) should the id of the object (if possible) should automatically be read (default False) +* read_geo: (boolean) should the geometry of the object (if possible) should automatically be read (default False) + +add: add new data +------------------- +Parameters: +* data: some data, will be serialized (and on return unserialized) +* id: (optional, string) identify this object. If read_id is True and data a dict with a key 'id', this will be used +* geo: (optional, geometry) geometry of the object. If read_geo is True and data a dict with a key 'geo', this will be used + +get(): a generator function returning data from cache +----------------------------------------------------- +Parameters: +* id: (optional, string or list of strings) only return data which matches the id. + +prepare(): prepare a SQL select statement +----------------------------------------- +Parameters: +* query: (string) a database query containing "{table}" as database source, e.g. "select * from {table} where id=$1" +* param_type: (optional, list of type identifiers) parameter types to the database query, references by $1, $2, ... from the query, e.g. ['text'] +Return: +* a plan, which can be passed to execute() or cursor() + +execute(): execute a plan and return a list of result rows +---------------------------------------------------------- +Parameters: +* plan: a plan from prepare() +* param: (optional, list) parameters to the database query, e.g. [ 'w1234' ] +Return: +* a list with all result rows. if 'data' is in the result columns it will be unserialized. + +cursor(): execute a plan and yield result rows +---------------------------------------------- +Parameters: +* plan: a plan from prepare() +* param: (optional, list) parameters to the database query, e.g. [ 'w1234' ] +Return: +* a generator generating result rows. if 'data' is in the result columns it will be unserialized. + diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 4206a140..544d62e4 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -1,30 +1,4 @@ -# Example code: -# try: -# cache = get_PGCache('foo') -# except: -# cache = PGCache('foo', read_id=True) -# cache.add({{'id': '1', 'foo': 'bar' }}) -# cache.add({{'id': '2', 'foo': 'foo' }}) -# cache.add({{'id': '3', 'foo': 'bla' }}) -# -# for r in cache.get(['1', '2']): -# print(r) -# -# plan = cache.prepare('select * from {table}', []) -# for r in cache.cursor(plan): -# print(r) -# -# The cache database table has the following columns: -# * data: the data of the object in serialized form -# * id: id of the object (see add() for details) -# * geo: geometry of the object (see add() for details) - class PGCache: -### __init__(): initialize a new cache -# Parameters: -# id: identify cache -# read_id: (boolean) should the id of the object (if possible) should automatically be read (default False) -# read_geo: (boolean) should the geometry of the object (if possible) should automatically be read (default False) def __init__(self, id, read_id=False, read_geo=False): global PGCaches try: @@ -38,11 +12,6 @@ def __init__(self, id, read_id=False, read_geo=False): self.read_geo = read_geo self.cache_id = len(PGCaches) -### add(): add new data -# Parameters: -# data: some data, will be serialized (and on return unserialized) -# id: (optional, string) identify this object. If read_id is True and data a dict with a key 'id', this will be used -# geo: (optional, geometry) geometry of the object. If read_geo is True and data a dict with a key 'geo', this will be used def add(self, data, id=None, geo=None): import pickle try: @@ -57,9 +26,6 @@ def add(self, data, id=None, geo=None): plpy.execute(self.plan_add, [ pickle.dumps(data), id, geo ]) -### get(): a generator function returning data from cache -# Parameters: -# id: (optional, string or list of strings) only return data which matches the id. def get(self, id=None): import pickle if id is None: @@ -84,21 +50,9 @@ def get(self, id=None): for r in cursor: yield pickle.loads(r['data']) -### prepare(): prepare a SQL select statement -# Parameters: -# query: (string) a database query containing "{table}" as database source, e.g. "select * from {table} where id=$1" -# param_type: (optional, list of type identifiers) parameter types to the database query, references by $1, $2, ... from the query, e.g. ['text'] -# Return: -# a plan, which can be passed to execute() or cursor() def prepare(self, query, param_type=[]): return plpy.prepare(query.replace('{table}', '(select data, id, geo from _pgmapcss_PGCache where cache_id=' + str(self.cache_id).replace("'", "''") + ') t'), param_type) -### execute(): execute a plan and return a list of result rows -# Parameters: -# plan: a plan from prepare() -# param: (optional, list) parameters to the database query, e.g. [ 'w1234' ] -# Return: -# a list with all result rows. if 'data' is in the result columns it will be unserialized. def execute(self, plan, param=[]): import pickle ret = [] @@ -110,12 +64,6 @@ def execute(self, plan, param=[]): return ret -### cursor(): execute a plan and yield result rows -# Parameters: -# plan: a plan from prepare() -# param: (optional, list) parameters to the database query, e.g. [ 'w1234' ] -# Return: -# a generator generating result rows. if 'data' is in the result columns it will be unserialized. def cursor(self, plan, param=[]): import pickle ret = [] @@ -125,11 +73,6 @@ def cursor(self, plan, param=[]): r['data'] = pickle.loads(r['data']) yield r -### get_PGCache(): get an existing cache, will throw exception if it doesn't exist -# Parameters: -# id: id of the cache -# Return: -# return the existing cache def get_PGCache(id): global PGCaches try: From c2d0cfc440c8f3b825c24991c48f0592debc4a8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 19 Nov 2014 06:35:09 +0100 Subject: [PATCH 051/209] PGCache: rename class to PGCache_table and create transparent creation function --- pgmapcss/misc/pgcache.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 544d62e4..e669a5af 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -1,4 +1,16 @@ -class PGCache: +def get_PGCache(id): + global PGCaches + try: + PGCaches + except: + PGCaches = {} + + return PGCaches[id] + +def PGCache(id, read_id=False, read_geo=False): + return PGCache_table(id, read_id, read_geo) + +class PGCache_table: def __init__(self, id, read_id=False, read_geo=False): global PGCaches try: @@ -72,12 +84,3 @@ def cursor(self, plan, param=[]): if 'data' in r: r['data'] = pickle.loads(r['data']) yield r - -def get_PGCache(id): - global PGCaches - try: - PGCaches - except: - PGCaches = {} - - return PGCaches[id] From 015a18fc9fec8c2ec4eb4e15e829459bf8c4e437 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 19 Nov 2014 06:43:04 +0100 Subject: [PATCH 052/209] PGCache: create base class --- pgmapcss/misc/pgcache.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index e669a5af..26baca02 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -10,7 +10,7 @@ def get_PGCache(id): def PGCache(id, read_id=False, read_geo=False): return PGCache_table(id, read_id, read_geo) -class PGCache_table: +class PGCache_base: def __init__(self, id, read_id=False, read_geo=False): global PGCaches try: @@ -22,6 +22,26 @@ def __init__(self, id, read_id=False, read_geo=False): self.id = id self.read_id = read_id self.read_geo = read_geo + + def add(self, data, id=None, geo=None): + pass + + def get(self, id=None): + pass + + def prepare(self, query, param_type=[]): + pass + + def execute(self, plan, param=[]): + pass + + def cursor(self, plan, param=[]): + pass + +class PGCache_table(PGCache_base): + def __init__(self, id, read_id=False, read_geo=False): + global PGCaches + super().__init__(id, read_id, read_geo) self.cache_id = len(PGCaches) def add(self, data, id=None, geo=None): From a7605225d3e04eaf782a5df159b8c7392c20fe0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 19 Nov 2014 06:57:54 +0100 Subject: [PATCH 053/209] PGCache: implement PGCache_virtual --- pgmapcss/misc/pgcache.py | 61 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 26baca02..50de5585 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -104,3 +104,64 @@ def cursor(self, plan, param=[]): if 'data' in r: r['data'] = pickle.loads(r['data']) yield r + +class PGCache_virtual(PGCache_base): + def __init__(self, id, read_id=False, read_geo=False): + super().__init__(id, read_id, read_geo) + self.cache = [] + self.db_param = None + + def add(self, data, id=None, geo=None): + if id is None and self.read_id and type(data) is dict and 'id' in data: + id = data['id'] + if geo is None and self.read_geo and type(data) is dict and 'geo' in data: + geo = data['geo'] + + self.cache.append(( data, id, geo, )) + self.db_param = None + + def get(self, id=None): + if id is None: + for r in self.cache: + yield r[0] + + else: + if type(id) == str: + id = [ id ] + + for r in self.cache: + if r[1] in id: + yield r[0] + + def prepare(self, query, param_type=[]): + l = len(param_type) + return plpy.prepare(query.replace('{table}', '(select unnest(${}) as data, unnest(${}) as id, unnest(${}) as geo) t'.format(l+1, l+2, l+3)), param_type + [ 'int[]', 'text[]', 'geometry[]' ]) + + def get_db_param(self): + if self.db_param is None: + self.db_param = [ + list(range(0, len(self.cache))), + [ r[1] for r in self.cache ], + [ r[2] for r in self.cache ], + ] + + return self.db_param + + def execute(self, plan, param=[]): + param += self.get_db_param() + + ret = [] + for r in plpy.execute(plan, param): + if 'data' in r: + r['data'] = self.cache[r['data']][0] + ret.append(r) + + return ret + + def cursor(self, plan, param=[]): + param += self.get_db_param() + + for r in plpy.cursor(plan, param): + if 'data' in r: + r['data'] = self.cache[r['data']][0] + yield(r) From fec8246db942e7e749c9bf916af1bb97354ac193 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 19 Nov 2014 07:10:35 +0100 Subject: [PATCH 054/209] PGCache: automatically decide on correct flavour + documentation --- pgmapcss/misc/pgcache.md | 2 +- pgmapcss/misc/pgcache.py | 19 ++++++++++++++++++- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/pgmapcss/misc/pgcache.md b/pgmapcss/misc/pgcache.md index b91eb5e1..98ec68c3 100644 --- a/pgmapcss/misc/pgcache.md +++ b/pgmapcss/misc/pgcache.md @@ -1,4 +1,4 @@ -PGCache implements a database cache with a postgis geometry column and the possibility to do queries into the database: +PGCache implements a database cache with a postgis geometry column and the possibility to do queries into the database. PGCache comes in two flavours: *table*, where the data is written into a real postgresql database table or *virtual*, where the data is kept in memory and - if necessary - a virtual database table is used. Which flavour is used is decided by PGCache (usually *table*, only if the database happens to be read-only, *virtual* is used). Example code: ```python diff --git a/pgmapcss/misc/pgcache.py b/pgmapcss/misc/pgcache.py index 50de5585..e1348e3e 100644 --- a/pgmapcss/misc/pgcache.py +++ b/pgmapcss/misc/pgcache.py @@ -8,7 +8,24 @@ def get_PGCache(id): return PGCaches[id] def PGCache(id, read_id=False, read_geo=False): - return PGCache_table(id, read_id, read_geo) + global PGCache_type + try: + PGCache_type + except: + try: + plan = plpy.prepare('insert into _pgmapcss_PGCache values (null, null, null, null)', []) + plpy.execute(plan) + plan = plpy.prepare('delete from _pgmapcss_PGCache where cache_id is null', []) + plpy.execute(plan) + + PGCache_type = 1 + except: + PGCache_type = 2 + + if PGCache_type == 1: + return PGCache_table(id, read_id, read_geo) + elif PGCache_type == 2: + return PGCache_virtual(id, read_id, read_geo) class PGCache_base: def __init__(self, id, read_id=False, read_geo=False): From 49b245cb1620d11ec1fba7cb4459d46400287148 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 19 Nov 2014 15:50:00 +0100 Subject: [PATCH 055/209] DB/Overpass::relation_geom(): bugfixes --- pgmapcss/db/overpass/db_functions.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 4a01f1aa..5aa996d5 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -85,14 +85,17 @@ def relation_geom(r): inner_lines = [] for m in r['members']: + if not 'geometry' in m: + continue + if m['role'] in ('outer', ''): - if m['geometry'][0] == m['geometry'][-1]: + if len(m['geometry']) > 3 and m['geometry'][0] == m['geometry'][-1]: polygons.append(linestring(m['geometry'])) else: lines.append(linestring(m['geometry'])) elif m['role'] in ('inner'): - if m['geometry'][0] == m['geometry'][-1]: + if len(m['geometry']) > 3 and m['geometry'][0] == m['geometry'][-1]: inner_polygons.append(linestring(m['geometry'])) else: inner_lines.append(linestring(m['geometry'])) @@ -117,7 +120,12 @@ def relation_geom(r): inner_polygons.append(r['geom']) for p in inner_polygons: - polygons = plpy.execute(geom_plan_substract, [ polygons, p ])[0]['geom'] + try: + polygons = plpy.execute(geom_plan_substract, [ polygons, p ])[0]['geom'] + except: + plpy.warning('DB/Overpass::relation_geom({}): error substracting inner polygons'.format(r['id'])) + pass + inner_polygons = None return polygons From 240cf1aca00c77e73bbf1f943bf365c1d5275cc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 19 Nov 2014 14:47:27 +0100 Subject: [PATCH 056/209] DB/Overpass: also load outer way MPs with role "" --- pgmapcss/db/overpass/db_functions.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 5aa996d5..859adfc8 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -221,10 +221,12 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # multipolygon has no (relevant) tags and all outer ways share the same # tags (save non relevant tags) the ways are discarded and the relation # is used - as type 'multipolygon' and a 'm' prefixed to the ID + q1 = ');('.join(w).replace('__TYPE__', 'way(r.rel:"outer")') + q2 = ');('.join(w).replace('__TYPE__', 'way(r.rel:"")') + q = qry.replace('__QRY__', 'relation[type=multipolygon] -> .rel;' + - '((' + ');('.join(w) + ');) -> .outer;relation(bw.outer)[type=multipolygon]') + '.outer out tags qt;' - q = q.replace('__TYPE__', 'way(r.rel:"outer")') + '((' + q1 + q2 + ');) -> .outer;relation(bw.outer)[type=multipolygon]') + '.outer out tags qt;' _ways = {} _rels = {} From 4e4b0bfcd186144b1ce347ef1a37217e822f3d56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 07:09:04 +0100 Subject: [PATCH 057/209] DB/Overpass: loading api result: process results incrementally --- pgmapcss/db/overpass/db_functions.py | 46 ++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 12 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 859adfc8..ba77797c 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -8,19 +8,41 @@ def overpass_query(query): plpy.warning(query) url = '{db.overpass-url}?' +\ urllib.parse.urlencode({ 'data': query }) - f = urllib.request.urlopen(url).read().decode('utf-8') - try: - res = json.loads(f) - except ValueError: - # areas not initialized -> ignore - if re.search('osm3s_v[0-9\.]+_areas', f): - return - else: - raise - - for r in res['elements']: - yield(r) + f = urllib.request.urlopen(url) + + block = '' + mode = 0 + while True: + r = f.readline().decode('utf-8') + if r == '': + raise Exception('Connection closed early from Overpass API') + + if mode == 0: + if re.search('"elements":', r): + mode = 1 + + # areas not initialized -> ignore + if re.search('osm3s_v[0-9\.]+_areas', r): + f.close() + return + + elif mode == 1: + if re.match('}', r): + block += '}' + yield json.loads(block) + + block = '' + + elif re.match('\s*$', block) and re.match('.*\]', r): + f.close() + return + + else: + block += r + + if mode == 0: + raise Exception('Could not parse Overpass API result') def node_geom(lat, lon): global geom_plan From 8b1acdf5a60cdab43d79574b405e49eebe940cd5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 07:14:32 +0100 Subject: [PATCH 058/209] DB/Overpass: query areas: filter for interesting results --- pgmapcss/db/overpass/db_functions.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index ba77797c..5624ecf6 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -347,7 +347,10 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v plan = plpy.prepare("select ST_Y(ST_Centroid($1::geometry)) || ',' || ST_X(ST_Centroid($1::geometry)) as geom", [ 'geometry' ]) res = plpy.execute(plan, [ _bbox ]) - q = qry.replace('__QRY__', 'is_in({0});way(pivot);out meta geom;is_in({0});relation(pivot)'.format(res[0]['geom'])) + q1 = ');('.join(w).replace('__TYPE__', 'relation(pivot.a)') + q2 = ');('.join(w).replace('__TYPE__', 'way(pivot.a)') + + q = ('[out:json];is_in({})->.a;(' + q1 + q2 + ');out meta geom;').format(res[0]['geom']) for r in overpass_query(q): if (r['type'] == 'way' and r['id'] in ways_done) or\ From f81e0472ef65095b3c88b343ecaeeb05c63e436c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 12:07:27 +0100 Subject: [PATCH 059/209] DB/Overpass: URL does no longer include '/interpreter' --- doc/config_options.md | 2 +- pgmapcss/db/overpass/db.py | 2 +- pgmapcss/db/overpass/db_functions.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/config_options.md b/doc/config_options.md index 11c93d5e..f3124f1a 100644 --- a/doc/config_options.md +++ b/doc/config_options.md @@ -10,7 +10,7 @@ The following config options are supported: | unit.srs | Spatial Reference System to use for distances. If other values than 900913 are used, unexpected behaviour might happen. | 900913 | | srs | Default Spatial Reference System to use on the frontend side | 900913 when using with renderer (mode 'database-function'), 4326 otherwise | | db.hstore-only | osm2pgsql only: Do not use the separate tag columns, only use the hstore 'tags' column. Might be faster on large databases in combination with a multicolumn index on way and tags: e.g. create index planet_osm_point_way_tags on planet_osm_point using gist(way, tags). Requires --hstore-all on osm2pgsql when importing the database. | true/**false** | -| db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api/interpreter | | +| db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api | | | offline | When compiling standalone mode, do not make any requests to the database. | true/**false** | | debug.profiler | during execution, show some statistics about query/processing time and count of objects. | true/**false** | | debug.context | show bounding box and scale denominator of requests. | true/**false** | diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 31cebdb4..7768edba 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -11,7 +11,7 @@ def __init__(self, conn, stat): self.stat['config']['db.srs'] = 4326 if not 'db.overpass-url' in self.stat['config']: - self.stat['config']['db.overpass-url'] = 'http://overpass-api.de/api/interpreter' + self.stat['config']['db.overpass-url'] = 'http://overpass-api.de/api' def tag_type(self, key, condition, selector, statement): if key[0:4] == 'osm:': diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 5624ecf6..e7f4fa0b 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -6,7 +6,7 @@ def overpass_query(query): import json plpy.warning(query) - url = '{db.overpass-url}?' +\ + url = '{db.overpass-url}/interpreter?' +\ urllib.parse.urlencode({ 'data': query }) f = urllib.request.urlopen(url) From 3fa012f6934b9a267ee31e85065e6c1e4e12946c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 12:10:35 +0100 Subject: [PATCH 060/209] DB/Overpass: new config option 'debug.overpass_queries' --- doc/config_options.md | 1 + pgmapcss/db/overpass/db_functions.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/doc/config_options.md b/doc/config_options.md index f3124f1a..472be091 100644 --- a/doc/config_options.md +++ b/doc/config_options.md @@ -17,6 +17,7 @@ The following config options are supported: | debug.counter | Count of rendered map features in comparison to map features loaded from the database. If 'verbose', print each map feature which was not rendered. | true/**false**/verbose | | debug.rusage | show resource usage at end of processing. | true/**false** | | debug.explain_queries | Print queries, their plans and count of executions to stderr (standalone mode only). | true/**false** | +| debug.overpass_queries | overpass only: Print a debug message for each query posted to the Overpass API | true/**false** | Advances options: diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index e7f4fa0b..93448e55 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -5,7 +5,9 @@ def overpass_query(query): import urllib.parse import json +# START debug.overpass_queries plpy.warning(query) +# END debug.overpass_queries url = '{db.overpass-url}/interpreter?' +\ urllib.parse.urlencode({ 'data': query }) From d7a1c9dd050ea32fd6111975eab40cfcdccbc2c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 12:26:14 +0100 Subject: [PATCH 061/209] DB/Overpass: documentation --- doc/database.md | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/doc/database.md b/doc/database.md index e7b96bcb..e66b034d 100644 --- a/doc/database.md +++ b/doc/database.md @@ -1,4 +1,4 @@ -pgmapcss supports different kind of database layouts, currently osm2pgsql and osmosis pgsnapshot. Here's a short description of advantages and disadvantages. +pgmapcss supports different kind of database layouts, currently osm2pgsql, osmosis pgsnapshot and overpass. Here's a short description of advantages and disadvantages. osm2pgsql ========= @@ -50,3 +50,27 @@ Behaviour can be influenced with the following config options: |------------------|-------------|----------------- | db.srs | Spatial Reference System used in the database. Autodetected. | Usual values: 4326 (WGS-84), 900913 resp. 3857 (Spherical Mercator for Web Maps) | | db.multipolygons | Specify whether the multipolygons table is present and should be used. Usually autodected. Needed when using offline mode (default: false) | true/false + +Overpass API (short: overpass) +============================== +In contrast to osm2pgsql and osmosis, Overpass API is an external database which is queried by HTTP requests. Also, the query language is very different from SQL. By default, the API on overpass-api.de will be used, therefore it is not necessary to import a local copy. For sure, if you want to render on a regular base the admins of overpass-api.de will be happy if you change to a local copy. You still need local PostgreSQL database, as it is used for connecting to Mapnik and for accessing the PostGIS functions. + +* Overpass API is faster then PostgreSQL/PostGIS on larger areas +* Full multipolygon support (handled similar to Osmosis pgsnapshot) +* In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. + +Options +------- +Behaviour can be influenced with the following config options: + +| Config option | Description | Possible values +|------------------|-------------|----------------- +| db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api | | +| debug.overpass_queries | overpass only: Print a debug message for each query posted to the Overpass API | true/**false** | + +Example usage: +```sh +pgmapcss --database-type=overpass -c db.overpass-url=http://overpass.osm.rambler.ru/cgi -d LOCAL_DB -u USER -p PASSWORD test.mapcss +``` + +* -d, -u and -p are the parameters of your local PostgreSQL database From 9f13767e45cbc3dc30dc86633542ce805688ada1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 13:27:44 +0100 Subject: [PATCH 062/209] DB/Overpass: improve documentation --- doc/database.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/doc/database.md b/doc/database.md index e66b034d..78fddd75 100644 --- a/doc/database.md +++ b/doc/database.md @@ -53,10 +53,12 @@ Behaviour can be influenced with the following config options: Overpass API (short: overpass) ============================== -In contrast to osm2pgsql and osmosis, Overpass API is an external database which is queried by HTTP requests. Also, the query language is very different from SQL. By default, the API on overpass-api.de will be used, therefore it is not necessary to import a local copy. For sure, if you want to render on a regular base the admins of overpass-api.de will be happy if you change to a local copy. You still need local PostgreSQL database, as it is used for connecting to Mapnik and for accessing the PostGIS functions. +In contrast to osm2pgsql and osmosis, Overpass API is an external database which is queried by HTTP requests. Also, the query language is very different from SQL. Overpass API is faster then PostgreSQL/PostGIS on large viewports. -* Overpass API is faster then PostgreSQL/PostGIS on larger areas -* Full multipolygon support (handled similar to Osmosis pgsnapshot) +By default, the API on overpass-api.de will be used, therefore it is not necessary to import a local copy. For sure, if you want to render on a regular base the admins of overpass-api.de will be happy if you change to a local copy. Additionally, you still need a local PostgreSQL database, as it is used for connecting to Mapnik and accessing the PostGIS functions. + +* In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead. +* Full multipolygon support (handled similar to Osmosis pgsnapshot). * In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. Options From 62d1eab772739b72c7cafdb11f9850c5c5c7f49f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 15:56:44 +0100 Subject: [PATCH 063/209] DB/Overpass: Documentation, about meta tags (osm:id, and similar) --- doc/database.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/database.md b/doc/database.md index 78fddd75..f4bee296 100644 --- a/doc/database.md +++ b/doc/database.md @@ -60,6 +60,7 @@ By default, the API on overpass-api.de will be used, therefore it is not necessa * In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead. * Full multipolygon support (handled similar to Osmosis pgsnapshot). * In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. +* Additionally the tags "osm:id", "osm:version", "osm:user_id", "osm:user", "osm:timestamp", "osm:changeset" will be set from OpenStreetMap meta info. Filtering for meta information is currently not possible on Overpass API, therefore these filters will not be applied to queries (in short: a condition for forests of user abc will be compiled into a query of all forests in the current viewport regardless of the user). Options ------- From cf1cf0e33135ca82d90044cca60ee0f2950f7dce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 17:23:04 +0100 Subject: [PATCH 064/209] DB/Overpass: ways can no longer be area and line - depend on condition --- pgmapcss/db/overpass/db_functions.py | 49 +++++++++++++++++----------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 93448e55..96095548 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -154,7 +154,7 @@ def relation_geom(r): return polygons -def assemble_object(r): +def assemble_object(r, way_polygon=None): t = { 'tags': r['tags'] if 'tags' in r else {}, } @@ -163,11 +163,15 @@ def assemble_object(r): t['types'] = ['node', 'point'] t['geo'] = node_geom(r['lat'], r['lon']) elif r['type'] == 'way': - is_polygon = len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] + is_polygon = way_polygon in (True, None) and len(r['nodes']) > 3 and r['nodes'][0] == r['nodes'][-1] + if way_polygon is True and not is_polygon: + return None t['id'] = 'w' + str(r['id']) - t['types'] = ['line', 'way'] + t['types'] = ['way'] if is_polygon: t['types'].append('area') + else: + t['types'].append('line') t['geo'] = way_geom(r, is_polygon) t['members'] = [ { @@ -305,22 +309,29 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v _ways = None _rels = None - # ways - w = [] - for t in ('*', 'line', 'way', 'area'): - if t in where_clauses: - w.append(where_clauses[t]) - - if len(w): - q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') - q = q.replace('__TYPE__', 'way') - - for r in overpass_query(q): - if r['id'] in ways_done: - pass - ways_done.append(r['id']) - - yield(assemble_object(r)) + # ways - will be run 3 times, first for areas, then lines and finally for not specified ways + for types in [ + { 'types': ('area',), 'way_polygon': True }, + { 'types': ('line',), 'way_polygon': False }, + { 'types': ('*', 'way'), 'way_polygon': None }, + ]: + w = [] + for t in types['types']: + if t in where_clauses: + w.append(where_clauses[t]) + + if len(w): + q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') + q = q.replace('__TYPE__', 'way') + + for r in overpass_query(q): + if r['id'] in ways_done: + continue + + t = assemble_object(r, way_polygon=types['way_polygon']) + if t: + ways_done.append(r['id']) + yield t # relations w = [] From 3bf0b83c38fd38d545b2b86147dc6da7562a86c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 17:32:00 +0100 Subject: [PATCH 065/209] DB/Overpass: check outer ways from multipolygons make sure, that a way which was part of a multipolygon (with tags from outer ways) is not returned as an area - it may still be a line --- pgmapcss/db/overpass/db_functions.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 96095548..83b3311c 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -214,6 +214,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v non_relevant_tags = {'type', 'source', 'source:ref', 'source_ref', 'note', 'comment', 'created_by', 'converted_by', 'fixme', 'FIXME', 'description', 'attribution', 'osm:id', 'osm:version', 'osm:user_id', 'osm:user', 'osm:timestamp', 'osm:changeset'} ways_done = [] rels_done = [] + area_ways_done = [] qry = '[out:json]' @@ -295,7 +296,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v rels_done.append(rid) for outer in r['members']: if outer['role'] in ('', 'outer'): - ways_done.append(outer['ref']) + area_ways_done.append(outer['ref']) t = assemble_object(r) t['id'] = 'm' + str(r['id']) @@ -328,7 +329,16 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v if r['id'] in ways_done: continue - t = assemble_object(r, way_polygon=types['way_polygon']) + # check, if way was part of multipolygon (with tags from outer + # ways) -> may not be area + way_polygon = types['way_polygon'] + if r['id'] in area_ways_done: + if types['way_polygon'] == True: + continue + else: + way_polygon = False + + t = assemble_object(r, way_polygon=way_polygon) if t: ways_done.append(r['id']) yield t From c895db8ab65dfbf5ad768b00754e51b7fcbae3da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 20 Nov 2014 17:58:01 +0100 Subject: [PATCH 066/209] DB/Overpass: bugfix, do not re-use variable --- pgmapcss/db/overpass/db_functions.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 93448e55..ff67a4d1 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -130,8 +130,8 @@ def relation_geom(r): ] lines = plpy.execute(geom_plan_linemerge, [ lines ]) - for r in lines: - polygons.append(r['geom']) + for p in lines: + polygons.append(p['geom']) polygons = plpy.execute(geom_plan_collect, [ polygons ])[0]['geom'] inner_polygons = [ @@ -140,8 +140,8 @@ def relation_geom(r): ] inner_lines = plpy.execute(geom_plan_linemerge, [ inner_lines ]) - for r in inner_lines: - inner_polygons.append(r['geom']) + for p in inner_lines: + inner_polygons.append(p['geom']) for p in inner_polygons: try: From fbc32ef7cb9c78d38e05e6d249ae710afface489 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Fri, 21 Nov 2014 09:12:02 +0100 Subject: [PATCH 067/209] DB/Overpass: Multipolygons: relation parent may have same tags as children --- pgmapcss/db/overpass/db_functions.py | 76 ++++++++++++++-------------- 1 file changed, 39 insertions(+), 37 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 83b3311c..49f7ad11 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -267,45 +267,47 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v _rels[r['id']] = r for rid, r in _rels.items(): - if r['tags']['type'] in ('multipolygon', 'boundary') and len([ - v - for v in r['tags'] - if v not in non_relevant_tags - ]) == 0: - is_valid_mp = True - outer_tags = None + mp_tags = { + vk: vv + for vk, vv in r['tags'].items() + if vk not in non_relevant_tags + } + is_valid_mp = True + outer_tags = None + + for outer in r['members']: + if outer['role'] in ('', 'outer'): + if not outer['ref'] in _ways: + continue + outer_way = _ways[outer['ref']] + tags = { + vk: vv + for vk, vv in outer_way['tags'].items() + if vk not in non_relevant_tags + } if 'tags' in outer_way else {} + + if outer_tags is None: + outer_tags = tags + elif outer_tags != tags: + is_valid_mp = True + + if (len(mp_tags) == 0 or mp_tags == outer_tags) and \ + is_valid_mp and outer_tags is not None: + rels_done.append(rid) for outer in r['members']: if outer['role'] in ('', 'outer'): - if not outer['ref'] in _ways: - continue - - outer_way = _ways[outer['ref']] - tags = { - vk: vv - for vk, vv in outer_way['tags'].items() - if vk not in non_relevant_tags - } if 'tags' in outer_way else {} - - if outer_tags is None: - outer_tags = tags - elif outer_tags != tags: - is_valid_mp = True - - if is_valid_mp and outer_tags is not None: - rels_done.append(rid) - for outer in r['members']: - if outer['role'] in ('', 'outer'): - area_ways_done.append(outer['ref']) - - t = assemble_object(r) - t['id'] = 'm' + str(r['id']) - t['types'] = ['multipolygon', 'area'] - t['tags'] = outer_tags - - yield(t) - else: - plpy.warning('tag-less multipolygon with non-similar outer ways: {}'.format(rid)) + area_ways_done.append(outer['ref']) + + t = assemble_object(r) + t['id'] = 'm' + str(r['id']) + t['types'] = ['multipolygon', 'area'] + t['tags'] = outer_tags + t['tags']['osm:id'] = t['id'] + + yield(t) + else: + plpy.warning('tag-less multipolygon with non-similar outer ways: {}'.format(rid)) _ways = None _rels = None @@ -355,7 +357,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v for r in overpass_query(q): if r['id'] in rels_done: - pass + continue rels_done.append(r['id']) yield(assemble_object(r)) From 9612aa69aeff6471a015237e75d797a50321a4b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Fri, 21 Nov 2014 10:37:46 +0100 Subject: [PATCH 068/209] DB/Overpass: improve multipolygon support - set 'osm:has_outer_tags' to 'yes', when multipolygon has tags from outer ways - when relation and outer ways have the same (relevant) tags, the structure comes from relation; the ways are no areas - improve documentaiton - remove debug message --- doc/database.md | 11 +++++++++-- pgmapcss/db/overpass/db_functions.py | 12 ++++++------ 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/doc/database.md b/doc/database.md index f4bee296..dc34a81d 100644 --- a/doc/database.md +++ b/doc/database.md @@ -57,11 +57,18 @@ In contrast to osm2pgsql and osmosis, Overpass API is an external database which By default, the API on overpass-api.de will be used, therefore it is not necessary to import a local copy. For sure, if you want to render on a regular base the admins of overpass-api.de will be happy if you change to a local copy. Additionally, you still need a local PostgreSQL database, as it is used for connecting to Mapnik and accessing the PostGIS functions. -* In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead. -* Full multipolygon support (handled similar to Osmosis pgsnapshot). +* In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead; including full multipolygon support (see below) * In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. * Additionally the tags "osm:id", "osm:version", "osm:user_id", "osm:user", "osm:timestamp", "osm:changeset" will be set from OpenStreetMap meta info. Filtering for meta information is currently not possible on Overpass API, therefore these filters will not be applied to queries (in short: a condition for forests of user abc will be compiled into a query of all forests in the current viewport regardless of the user). +Multipolygon support +-------------------- +There are two "types" of multipolygons, those that have their tags bound to the relation (the standard) and multipolygons which inherit their tags from their outer members (when the relation has no relevant tags and the outer members have exactly the same relevant tags, or the relation and the outer members have the same relevant tags). + +* "Standard" multipolygons get their ID prefixed by 'r' (as they are relations). +* Multipolygons with tags from their outer members get their ID prefixed by 'm' (for multipolygon) and an additional tag 'osm:has_outer_tags' (set to 'yes'). On the other hand closed ways which are an outer member of a multipolygon relation do not count as 'area', whereas the multipolygon itself does not count as 'relation'. +* When the the relation and the outer members have the same relevant tags, the feature is handled as in the "standard" multipolygon way, but the outer ways do not match 'area'. + Options ------- Behaviour can be influenced with the following config options: diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 49f7ad11..9313757c 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -300,14 +300,14 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v area_ways_done.append(outer['ref']) t = assemble_object(r) - t['id'] = 'm' + str(r['id']) - t['types'] = ['multipolygon', 'area'] - t['tags'] = outer_tags - t['tags']['osm:id'] = t['id'] + if len(mp_tags) == 0: + t['id'] = 'm' + str(r['id']) + t['types'] = ['multipolygon', 'area'] + t['tags'] = outer_tags + t['tags']['osm:id'] = t['id'] + t['tags']['osm:has_outer_tags'] = 'yes' yield(t) - else: - plpy.warning('tag-less multipolygon with non-similar outer ways: {}'.format(rid)) _ways = None _rels = None From 8ba7ac164da7c7a9537b0d1e0b5673c8683bd235 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 22 Nov 2014 06:25:33 +0100 Subject: [PATCH 069/209] DB/Overpass: query for areas (multipolygons): include [type=multipolygon] condition --- pgmapcss/db/overpass/db_functions.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index ff67a4d1..59cf0478 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -324,13 +324,12 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # relations w = [] - for t in ('*', 'relation', 'area'): + for t, type_condition in {'*': '', 'relation': '', 'area': '[type=multipolygon]'}.items(): if t in where_clauses: - w.append(where_clauses[t]) + w.append(where_clauses[t].replace('__TYPE__', 'relation' + type_condition)) if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') - q = q.replace('__TYPE__', 'relation') for r in overpass_query(q): if r['id'] in rels_done: From 1e45c6df212aeac592995b573843ceec0276978d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 22 Nov 2014 08:00:54 +0100 Subject: [PATCH 070/209] Misc/strip_includes: also accept 'ELSE' --- pgmapcss/misc/strip_includes.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/pgmapcss/misc/strip_includes.py b/pgmapcss/misc/strip_includes.py index 66703d1d..63bdecab 100644 --- a/pgmapcss/misc/strip_includes.py +++ b/pgmapcss/misc/strip_includes.py @@ -2,6 +2,7 @@ def strip_includes(stream, stat): import re ret = '' selectors = set() + else_selectors = set() include = True while True: @@ -10,17 +11,30 @@ def strip_includes(stream, stat): return ret r = r.decode('utf-8') - m = re.match('# (START|END) (.*)', r) + m = re.match('# (START|ELSE|END) (.*)', r) if m: if m.group(1) == 'END': - selectors.remove(m.group(2)) + if m.group(2) in selectors: + selectors.remove(m.group(2)) + if m.group(2) in else_selectors: + else_selectors.remove(m.group(2)) elif m.group(1) == 'START': selectors.add(m.group(2)) + if m.group(2) in else_selectors: + else_selectors.remove(m.group(2)) + elif m.group(1) == 'ELSE': + else_selectors.add(m.group(2)) + if m.group(2) in selectors: + selectors.remove(m.group(2)) include = not len({ True for s in selectors if not s in stat['config'] or stat['config'][s] in ('false', False, 'no') + }) + len({ + True + for s in else_selectors + if s in stat['config'] and stat['config'][s] not in ('false', False, 'no') }) elif include: From 2f84445b7593a51c233ced0e8297682c4ba2102f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 22 Nov 2014 08:02:00 +0100 Subject: [PATCH 071/209] DB/Overpass: debug.profiler for overpass queries --- pgmapcss/db/overpass/db_functions.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 59cf0478..cb89753b 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -8,6 +8,10 @@ def overpass_query(query): # START debug.overpass_queries plpy.warning(query) # END debug.overpass_queries +# START debug.profiler + ret = [] + time_start = datetime.datetime.now() +# END debug.profiler url = '{db.overpass-url}/interpreter?' +\ urllib.parse.urlencode({ 'data': query }) @@ -32,12 +36,23 @@ def overpass_query(query): elif mode == 1: if re.match('}', r): block += '}' +# START debug.profiler + ret.append(json.loads(block)) +# ELSE debug.profiler yield json.loads(block) +# END debug.profiler block = '' elif re.match('\s*$', block) and re.match('.*\]', r): f.close() + +# START debug.profiler + plpy.warning('%s\nquery took %.2fs for %d features' % (query, (datetime.datetime.now() - time_start).total_seconds(), len(ret))) + for r in ret: + yield r +# END debug.profiler + return else: From 2186155a6c3aca4b3470c3ff8eb554c0bd8c019b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 22 Nov 2014 08:04:23 +0100 Subject: [PATCH 072/209] DB/Overpass: queries for relations type=multipolygon also include type=boundary --- pgmapcss/db/overpass/db_functions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index cb89753b..12cfda6f 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -264,8 +264,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q2 = ');('.join(w).replace('__TYPE__', 'way(r.rel:"")') q = qry.replace('__QRY__', - 'relation[type=multipolygon] -> .rel;' + - '((' + q1 + q2 + ');) -> .outer;relation(bw.outer)[type=multipolygon]') + '.outer out tags qt;' + "relation[type~'^multipolygon|boundary$'] -> .rel;" + + '((' + q1 + q2 + ");) -> .outer;relation(bw.outer)[type~'^multipolygon|boundary$']") + '.outer out tags qt;' _ways = {} _rels = {} @@ -339,7 +339,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # relations w = [] - for t, type_condition in {'*': '', 'relation': '', 'area': '[type=multipolygon]'}.items(): + for t, type_condition in {'*': '', 'relation': '', 'area': "[type~'^multipolygon|boundary$']"}.items(): if t in where_clauses: w.append(where_clauses[t].replace('__TYPE__', 'relation' + type_condition)) From d76d4382942e1b457081dc543a9ad78ab305634a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 22 Nov 2014 09:21:14 +0100 Subject: [PATCH 073/209] DB/Overpass: if a relation is not a multipolygon, construct a geometry collection --- pgmapcss/db/overpass/db_functions.py | 32 ++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 12cfda6f..3c67ba5a 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -101,7 +101,7 @@ def linestring(geom): for g in geom ]) + ')' -def relation_geom(r): +def multipolygon_geom(r): global geom_plan try: @@ -113,10 +113,7 @@ def relation_geom(r): # merge all lines together, return all closed rings (but remove unconnected lines) geom_plan_linemerge = plpy.prepare('select geom from (select (ST_Dump((ST_LineMerge(ST_Collect(geom))))).geom as geom from (select ST_GeomFromText(unnest($1), 4326) geom) t offset 0) t where ST_NPoints(geom) > 3 and ST_IsClosed(geom)', [ 'text[]' ]) - if 'tags' in r and 'type' in r['tags'] and r['tags']['type'] in ('multipolygon', 'boundary'): - t = 'MULTIPOLYGON' - else: - return None + t = 'MULTIPOLYGON' polygons = [] lines = [] @@ -169,6 +166,26 @@ def relation_geom(r): return polygons +def relation_geom(r): + global geom_plan_collect + + try: + geom_plan_collect + except NameError: + geom_plan_collect = plpy.prepare('select ST_Collect($1) as geom', [ 'geometry[]' ]) + + l = [] + + for m in r['members']: + if m['type'] == 'node': + l.append(node_geom(m['lat'], m['lon'])) + if m['type'] == 'way': + l.append(way_geom(m, None)) + + res = plpy.execute(geom_plan_collect, [ l ]) + + return res[0]['geom'] + def assemble_object(r): t = { 'tags': r['tags'] if 'tags' in r else {}, @@ -194,7 +211,10 @@ def assemble_object(r): elif r['type'] == 'relation': t['id'] = 'r' + str(r['id']) t['types'] = ['area', 'relation'] - t['geo'] = relation_geom(r) + if 'tags' in r and 'type' in r['tags'] and r['tags']['type'] in ('multipolygon', 'boundary'): + t['geo'] = multipolygon_geom(r) + else: + t['geo'] = relation_geom(r) t['members'] = [ { 'member_id': m['type'][0] + str(m['ref']), From 28e04f418c3ab4a6e286ecf5170c1d4a9d06ee0a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 22 Nov 2014 10:26:44 +0100 Subject: [PATCH 074/209] DB/Overpass: bugfix, query overpass areas only for area-conditions --- pgmapcss/db/overpass/db_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 3c67ba5a..f54a7b53 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -375,7 +375,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # areas w = [] - for t in ('*', 'relation', 'area'): + for t in ('*', 'area'): if t in where_clauses: w.append(where_clauses[t]) From dca8c4e9e370147bb2027d237d4e6d1b8fa91e8f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 10:02:01 +0100 Subject: [PATCH 075/209] DB/Overpass: compiling queries, create dict with 'query' --- pgmapcss/db/overpass/db.py | 4 ++-- pgmapcss/db/overpass/db_functions.py | 24 ++++++++++++------------ 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 7768edba..5811c36b 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -296,10 +296,10 @@ def merge_conditions(self, conditions): } return { - t: ';\n'.join([ + t: { 'query': ';\n'.join([ self.conditions_to_query(c) for c in cs - ]) + ';\n' + ]) + ';\n' } for t, cs in conditions.items() } diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 57742292..389347e1 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -265,7 +265,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t]) if len(w): - q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') + q = qry.replace('__QRY__', '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') q = q.replace('__TYPE__', 'node') for r in overpass_query(q): @@ -285,8 +285,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # multipolygon has no (relevant) tags and all outer ways share the same # tags (save non relevant tags) the ways are discarded and the relation # is used - as type 'multipolygon' and a 'm' prefixed to the ID - q1 = ');('.join(w).replace('__TYPE__', 'way(r.rel:"outer")') - q2 = ');('.join(w).replace('__TYPE__', 'way(r.rel:"")') + q1 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'way(r.rel:"outer")') + q2 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'way(r.rel:"")') q = qry.replace('__QRY__', "relation[type~'^multipolygon|boundary$'] -> .rel;" + @@ -359,7 +359,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t]) if len(w): - q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') + q = qry.replace('__QRY__', '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') q = q.replace('__TYPE__', 'way') for r in overpass_query(q): @@ -387,7 +387,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t].replace('__TYPE__', 'relation' + type_condition)) if len(w): - q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') + q = qry.replace('__QRY__', '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') for r in overpass_query(q): if r['id'] in rels_done: @@ -406,8 +406,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v plan = plpy.prepare("select ST_Y(ST_Centroid($1::geometry)) || ',' || ST_X(ST_Centroid($1::geometry)) as geom", [ 'geometry' ]) res = plpy.execute(plan, [ _bbox ]) - q1 = ');('.join(w).replace('__TYPE__', 'relation(pivot.a)') - q2 = ');('.join(w).replace('__TYPE__', 'way(pivot.a)') + q1 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'relation(pivot.a)') + q2 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'way(pivot.a)') q = ('[out:json];is_in({})->.a;(' + q1 + q2 + ');out meta geom;').format(res[0]['geom']) @@ -462,9 +462,9 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition member_of_cache[member_of_cache_id] = [] q = '[out:json]' + get_bbox() + ';' - q += '(' + child_conditions.replace('__TYPE__', ob_type) + ')->.a;' + q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ')->.a;' - q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(b' + + q += '(' + parent_conditions['query'].replace('__TYPE__', parent_type + '(b' + ob_type[0] + '.a)') + ');' q += 'out meta qt geom;' @@ -509,7 +509,7 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition members_cache[members_cache_id] = { 'parents': {}, 'children': [] } q = '[out:json]' + get_bbox() + ';' - q += '(' + child_conditions.replace('__TYPE__', ob_type) + ');' + q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ');' q += 'out meta qt geom;' # TODO: out body qt; would be sufficient, but need to adapt assemble_object @@ -520,8 +520,8 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition q = '[out:json]' + get_bbox() + ';' - q += '(' + child_conditions.replace('__TYPE__', ob_type) + ')->.a;' - q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(' + + q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ')->.a;' + q += '(' + parent_conditions['query'].replace('__TYPE__', parent_type + '(' + relation_id[0] + '.a)') + ');' q += 'out meta qt geom;' # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object From abc30815264d8514d6648c26484a905bc097f7ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 11:27:54 +0100 Subject: [PATCH 076/209] DB/Overpass: compiling queries, return dict from conditions_to_query() --- pgmapcss/db/overpass/db.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 5811c36b..2cdd5639 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -192,7 +192,7 @@ def conditions_to_query(self, conditions): else: print('Unknown Overpass operator "{}"'.format(c[0])) - return ret + return { 'query': ret } def merge_regexp(self, regexps): r = '' @@ -295,13 +295,24 @@ def merge_conditions(self, conditions): for t, cs in conditions.items() } - return { - t: { 'query': ';\n'.join([ - self.conditions_to_query(c) - for c in cs - ]) + ';\n' } - for t, cs in conditions.items() - } + ret = {} + for t, cs in conditions.items(): + if not t in ret: + ret[t] = {} + + for c in cs: + c = self.conditions_to_query(c) + for c1, c2 in c.items(): + if not c1 in ret[t]: + ret[t][c1] = [] + + ret[t][c1].append(c2) + + for t in ret: + if 'query' in ret[t]: + ret[t]['query'] = ';\n'.join(ret[t]['query']) + ';\n' + + return ret def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None, selector='selector', no_object_type=False): filter['object_type'] = object_type From 22a660601f1d31ad8ad588d01db4c9732e4a656e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 11:43:50 +0100 Subject: [PATCH 077/209] DB/Overpass: implement parent>child queries --- pgmapcss/db/overpass/db.py | 55 +++++++++++++++++++++++++++- pgmapcss/db/overpass/db_functions.py | 28 ++++++++++++-- 2 files changed, 77 insertions(+), 6 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 2cdd5639..72fcf457 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -170,7 +170,14 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte return ret def conditions_to_query(self, conditions): - ret = '__TYPE__'; + parent_ret = '' + parent = [ c for c in conditions if c[0] == 'parent' ] + if len(parent): + parent_ret = parent[0][1] + ret = '__TYPE__(r.a)' + + else: + ret = '__TYPE__'; for c in conditions: if c[0] == 'type': @@ -189,10 +196,33 @@ def conditions_to_query(self, conditions): ret += '[' + repr(c[1]) + '!~' + self.merge_regexp(c[2]) + ']' elif c[0] == 'notiregexp': ret += '[' + repr(c[1]) + '!~' + self.merge_regexp(c[2]) + ', i]' + + elif c[0] == 'parent_key': + parent_ret += '[' + repr(c[1]) + ']' + elif c[0] == 'parent_is': + parent_ret += '[' + repr(c[1]) + '=' + repr(c[2]) + ']' + elif c[0] == 'parent_isnot': + parent_ret += '[' + repr(c[1]) + '!=' + repr(c[2]) + ']' + elif c[0] == 'parent_regexp': + parent_ret += '[' + repr(c[1]) + '~' + self.merge_regexp(c[2]) + ']' + elif c[0] == 'parent_iregexp': + parent_ret += '[' + repr(c[1]) + '~' + self.merge_regexp(c[2]) + ', i]' + elif c[0] == 'parent_notregexp': + parent_ret += '[' + repr(c[1]) + '!~' + self.merge_regexp(c[2]) + ']' + elif c[0] == 'parent_notiregexp': + parent_ret += '[' + repr(c[1]) + '!~' + self.merge_regexp(c[2]) + ', i]' + elif c[0] == 'parent': + pass + else: print('Unknown Overpass operator "{}"'.format(c[0])) - return { 'query': ret } + r = { 'query': ret } + + if parent_ret != '': + r['parent_query'] = parent_ret + '->.a' + + return r def merge_regexp(self, regexps): r = '' @@ -311,6 +341,8 @@ def merge_conditions(self, conditions): for t in ret: if 'query' in ret[t]: ret[t]['query'] = ';\n'.join(ret[t]['query']) + ';\n' + if 'parent_query' in ret[t]: + ret[t]['parent_query'] = ';\n'.join(ret[t]['parent_query']) + ';\n' return ret @@ -322,6 +354,25 @@ def compile_selector(self, statement, stat, prefix='current.', filter={}, object for c in statement[selector]['conditions'] ] + parent_conditions = None + if 'parent_selector' in statement and selector == 'selector' and statement['link_selector']['type'] in ('', '>'): + parent_conditions = [ + self.compile_condition(c, statement, stat, prefix, filter) or None + for c in statement['parent_selector']['conditions'] + ] + + conditions.append(( 'parent', statement['parent_selector']['type'] )) + for condition in parent_conditions: + if condition is None: + continue + t = tuple() + for i, c in enumerate(condition): + if i == 0: + c = 'parent_' + c + t += ( c ,) + + conditions.append(t) + ret = [ [] ] for condition in conditions: diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 389347e1..039b2032 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -265,7 +265,12 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t]) if len(w): - q = qry.replace('__QRY__', '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') + parent_query = '' + for w1 in w: + if 'parent_query' in w1: + parent_query += w1['parent_query'] + + q = qry.replace('__QRY__', parent_query + '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') q = q.replace('__TYPE__', 'node') for r in overpass_query(q): @@ -285,10 +290,15 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # multipolygon has no (relevant) tags and all outer ways share the same # tags (save non relevant tags) the ways are discarded and the relation # is used - as type 'multipolygon' and a 'm' prefixed to the ID + parent_query = '' + for w1 in w: + if 'parent_query' in w1: + parent_query += w1['parent_query'] + q1 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'way(r.rel:"outer")') q2 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'way(r.rel:"")') - q = qry.replace('__QRY__', + q = qry.replace('__QRY__', parent_query +\ "relation[type~'^multipolygon|boundary$'] -> .rel;" + '((' + q1 + q2 + ");) -> .outer;relation(bw.outer)[type~'^multipolygon|boundary$']") + '.outer out tags qt;' @@ -359,7 +369,12 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t]) if len(w): - q = qry.replace('__QRY__', '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') + parent_query = '' + for w1 in w: + if 'parent_query' in w1: + parent_query += w1['parent_query'] + + q = qry.replace('__QRY__', parent_query + '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') q = q.replace('__TYPE__', 'way') for r in overpass_query(q): @@ -387,7 +402,12 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v w.append(where_clauses[t].replace('__TYPE__', 'relation' + type_condition)) if len(w): - q = qry.replace('__QRY__', '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') + parent_query = '' + for w1 in w: + if 'parent_query' in w1: + parent_query += w1['parent_query'] + + q = qry.replace('__QRY__', parent_query + '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') for r in overpass_query(q): if r['id'] in rels_done: From 6cfc4ddf5254fb67de533798ff8e5476fb724ad5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 11:50:56 +0100 Subject: [PATCH 078/209] DB/Overpass: collect parent queries in list to assign unique id --- pgmapcss/db/overpass/db.py | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 72fcf457..aa8222c6 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -13,6 +13,8 @@ def __init__(self, conn, stat): if not 'db.overpass-url' in self.stat['config']: self.stat['config']['db.overpass-url'] = 'http://overpass-api.de/api' + self.parent_queries = [] + def tag_type(self, key, condition, selector, statement): if key[0:4] == 'osm:': return None @@ -171,13 +173,7 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte def conditions_to_query(self, conditions): parent_ret = '' - parent = [ c for c in conditions if c[0] == 'parent' ] - if len(parent): - parent_ret = parent[0][1] - ret = '__TYPE__(r.a)' - - else: - ret = '__TYPE__'; + ret = '' for c in conditions: if c[0] == 'type': @@ -217,10 +213,25 @@ def conditions_to_query(self, conditions): else: print('Unknown Overpass operator "{}"'.format(c[0])) - r = { 'query': ret } + r = { } + + parent = [ c for c in conditions if c[0] == 'parent' ] + if len(parent): + parent_ret = parent[0][1] + parent_ret + + try: + pq = self.parent_queries.index(parent_ret) + except ValueError: + pq = len(self.parent_queries) + self.parent_queries.append(parent_ret) + + ret = '__TYPE__(r.pq' + str(pq) + ')' + ret + r['parent_query'] = parent_ret + '->.pq' + str(pq) + + else: + ret = '__TYPE__' + ret - if parent_ret != '': - r['parent_query'] = parent_ret + '->.a' + r['query'] = ret return r From 7efa3642651b4cfa5a806292a2932f68bdda4f13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 11:53:01 +0100 Subject: [PATCH 079/209] DB/Overpass: collect (parent-)queries in set to remove duplicates --- pgmapcss/db/overpass/db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index aa8222c6..2b7c0619 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -345,9 +345,9 @@ def merge_conditions(self, conditions): c = self.conditions_to_query(c) for c1, c2 in c.items(): if not c1 in ret[t]: - ret[t][c1] = [] + ret[t][c1] = set() - ret[t][c1].append(c2) + ret[t][c1].add(c2) for t in ret: if 'query' in ret[t]: From a5927f968788138c457fbf7d1913dbf77165629d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 11:56:28 +0100 Subject: [PATCH 080/209] DB/Overpass: get_bbox() returns only coordinates without [bbox:...] --- pgmapcss/db/overpass/db_functions.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 039b2032..a7fd23c1 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -242,7 +242,7 @@ def get_bbox(_bbox=None): plan = plpy.prepare("select ST_YMin($1::geometry) || ',' || ST_XMIN($1::geometry) || ',' || ST_YMAX($1::geometry) || ',' || ST_XMAX($1::geometry) as bbox_string", [ 'geometry' ]) res = plpy.execute(plan, [ _bbox ]) - return '[bbox:' + res[0]['bbox_string'] + ']' + return res[0]['bbox_string'] def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): time_start = datetime.datetime.now() # profiling @@ -254,7 +254,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v qry = '[out:json]' if _bbox: - qry += get_bbox(_bbox) + qry += '[bbox:' + get_bbox(_bbox) + ']' qry += ';__QRY__;out meta geom;' @@ -480,7 +480,7 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition if member_of_cache_id not in member_of_cache: member_of_cache[member_of_cache_id] = [] - q = '[out:json]' + get_bbox() + ';' + q = '[out:json][bbox:' + get_bbox() + '];' q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ')->.a;' @@ -527,7 +527,7 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition if members_cache_id not in members_cache: members_cache[members_cache_id] = { 'parents': {}, 'children': [] } - q = '[out:json]' + get_bbox() + ';' + q = '[out:json][bbox:' + get_bbox() + '];' q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ');' q += 'out meta qt geom;' @@ -538,7 +538,7 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition t['type'] = r['type'] members_cache[members_cache_id]['parents'][t['id']] = t - q = '[out:json]' + get_bbox() + ';' + q = '[out:json][bbox:' + get_bbox() + '];' q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ')->.a;' q += '(' + parent_conditions['query'].replace('__TYPE__', parent_type + '(' + From c38cc14e18104cf0d77513986a2ea53213f2b506 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 12:10:31 +0100 Subject: [PATCH 081/209] DB/Overpass: parent queries also for members() and member_of() --- pgmapcss/db/overpass/db_functions.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index a7fd23c1..9403fd61 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -482,6 +482,11 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition member_of_cache[member_of_cache_id] = [] q = '[out:json][bbox:' + get_bbox() + '];' + if 'parent_query' in child_conditions: + q += child_conditions['parent_query'] + if 'parent_query' in parent_conditions: + q += parent_conditions['parent_query'] + q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ')->.a;' q += '(' + parent_conditions['query'].replace('__TYPE__', parent_type + '(b' + @@ -529,6 +534,8 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition members_cache[members_cache_id] = { 'parents': {}, 'children': [] } q = '[out:json][bbox:' + get_bbox() + '];' + if 'parent_query' in child_conditions: + q += child_conditions['parent_query'] q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ');' q += 'out meta qt geom;' # TODO: out body qt; would be sufficient, but need to adapt assemble_object @@ -540,6 +547,11 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition q = '[out:json][bbox:' + get_bbox() + '];' + if 'parent_query' in child_conditions: + q += child_conditions['parent_query'] + if 'parent_query' in parent_conditions: + q += parent_conditions['parent_query'] + q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ')->.a;' q += '(' + parent_conditions['query'].replace('__TYPE__', parent_type + '(' + relation_id[0] + '.a)') + ');' From 88d703985fbcdaf889644c3dcd15cc00191bd0e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 12:13:29 +0100 Subject: [PATCH 082/209] DB/Overpass: when querying parent->child relations, add another BBOX query --- pgmapcss/db/overpass/db.py | 2 +- pgmapcss/db/overpass/db_functions.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 2b7c0619..35a493d4 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -225,7 +225,7 @@ def conditions_to_query(self, conditions): pq = len(self.parent_queries) self.parent_queries.append(parent_ret) - ret = '__TYPE__(r.pq' + str(pq) + ')' + ret + ret = '__TYPE__(r.pq' + str(pq) + ')__BBOX__' + ret r['parent_query'] = parent_ret + '->.pq' + str(pq) else: diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 9403fd61..73f94cfd 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -252,9 +252,13 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v area_ways_done = [] qry = '[out:json]' + replacements = {} if _bbox: qry += '[bbox:' + get_bbox(_bbox) + ']' + replacements['__BBOX__'] = '(' + get_bbox(_bbox) + ')' + else: + replacements['__BBOX__'] = '' qry += ';__QRY__;out meta geom;' @@ -272,6 +276,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = qry.replace('__QRY__', parent_query + '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') q = q.replace('__TYPE__', 'node') + for r1, r2 in replacements.items(): + q = q.replace(r1, r2) for r in overpass_query(q): yield(assemble_object(r)) @@ -376,6 +382,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q = qry.replace('__QRY__', parent_query + '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') q = q.replace('__TYPE__', 'way') + for r1, r2 in replacements.items(): + q = q.replace(r1, r2) for r in overpass_query(q): if r['id'] in ways_done: @@ -408,6 +416,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v parent_query += w1['parent_query'] q = qry.replace('__QRY__', parent_query + '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') + for r1, r2 in replacements.items(): + q = q.replace(r1, r2) for r in overpass_query(q): if r['id'] in rels_done: @@ -430,6 +440,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v q2 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'way(pivot.a)') q = ('[out:json];is_in({})->.a;(' + q1 + q2 + ');out meta geom;').format(res[0]['geom']) + for r1, r2 in replacements.items(): + q = q.replace(r1, r2) for r in overpass_query(q): if (r['type'] == 'way' and r['id'] in ways_done) or\ @@ -480,6 +492,7 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition if member_of_cache_id not in member_of_cache: member_of_cache[member_of_cache_id] = [] + replacements = { '__BBOX__': '(' + get_bbox() + ')' } q = '[out:json][bbox:' + get_bbox() + '];' if 'parent_query' in child_conditions: @@ -492,6 +505,8 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition q += '(' + parent_conditions['query'].replace('__TYPE__', parent_type + '(b' + ob_type[0] + '.a)') + ');' q += 'out meta qt geom;' + for r1, r2 in replacements.items(): + q = q.replace(r1, r2) for r in overpass_query(q): t = assemble_object(r) @@ -532,6 +547,7 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition if members_cache_id not in members_cache: members_cache[members_cache_id] = { 'parents': {}, 'children': [] } + replacements = { '__BBOX__': '(' + get_bbox() + ')' } q = '[out:json][bbox:' + get_bbox() + '];' if 'parent_query' in child_conditions: @@ -539,6 +555,8 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition q += '(' + child_conditions['query'].replace('__TYPE__', ob_type) + ');' q += 'out meta qt geom;' # TODO: out body qt; would be sufficient, but need to adapt assemble_object + for r1, r2 in replacements.items(): + q = q.replace(r1, r2) for r in overpass_query(q): t = assemble_object(r) @@ -557,6 +575,8 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition relation_id[0] + '.a)') + ');' q += 'out meta qt geom;' # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object + for r1, r2 in replacements.items(): + q = q.replace(r1, r2) for r in overpass_query(q): t = assemble_object(r) From fea399a9534420d3240f160049dc737723b56c3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 15:56:56 +0100 Subject: [PATCH 083/209] DB/Overpass: enable more relationship queries - way node - node < way, member < relation --- pgmapcss/db/overpass/db.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 35a493d4..320414dd 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -217,7 +217,8 @@ def conditions_to_query(self, conditions): parent = [ c for c in conditions if c[0] == 'parent' ] if len(parent): - parent_ret = parent[0][1] + parent_ret + parent_selector = parent[0] + parent_ret = parent_selector[1] + parent_ret try: pq = self.parent_queries.index(parent_ret) @@ -225,7 +226,12 @@ def conditions_to_query(self, conditions): pq = len(self.parent_queries) self.parent_queries.append(parent_ret) - ret = '__TYPE__(r.pq' + str(pq) + ')__BBOX__' + ret + if parent_selector[2] in ('>', ''): + parent_sel = parent_selector[1][0] + elif parent_selector[2] in ('<'): + parent_sel = 'b' + parent_selector[1][0] + + ret = '__TYPE__(' + parent_sel + '.pq' + str(pq) + ')__BBOX__' + ret r['parent_query'] = parent_ret + '->.pq' + str(pq) else: @@ -366,13 +372,13 @@ def compile_selector(self, statement, stat, prefix='current.', filter={}, object ] parent_conditions = None - if 'parent_selector' in statement and selector == 'selector' and statement['link_selector']['type'] in ('', '>'): + if 'parent_selector' in statement and selector == 'selector' and statement['link_selector']['type'] in ('', '>', '<'): parent_conditions = [ self.compile_condition(c, statement, stat, prefix, filter) or None for c in statement['parent_selector']['conditions'] ] - conditions.append(( 'parent', statement['parent_selector']['type'] )) + conditions.append(( 'parent', statement['parent_selector']['type'], statement['link_selector']['type'] )) for condition in parent_conditions: if condition is None: continue From 3a7c76eefedd876700f25926ba0c714799014ee0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 23 Nov 2014 16:15:37 +0100 Subject: [PATCH 084/209] DB/Overpass: bugfix adding parent_queries for relations --- pgmapcss/db/overpass/db_functions.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 73f94cfd..23b3e3b7 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -405,17 +405,15 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # relations w = [] + parent_query = '' for t, type_condition in {'*': '', 'relation': '', 'area': "[type~'^multipolygon|boundary$']"}.items(): if t in where_clauses: - w.append(where_clauses[t].replace('__TYPE__', 'relation' + type_condition)) + w.append(where_clauses[t]['query'].replace('__TYPE__', 'relation' + type_condition)) + if 'parent_query' in where_clauses[t]: + parent_query += where_clauses[t]['parent_query'] if len(w): - parent_query = '' - for w1 in w: - if 'parent_query' in w1: - parent_query += w1['parent_query'] - - q = qry.replace('__QRY__', parent_query + '((' + ');('.join([ w1['query'] for w1 in w ]) + ');)') + q = qry.replace('__QRY__', parent_query + '((' + ');('.join(w) + ');)') for r1, r2 in replacements.items(): q = q.replace(r1, r2) From bbf9e9b198ce19df579c60fbdf790b0fd9fff6c4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 27 Nov 2014 11:54:43 +0100 Subject: [PATCH 085/209] Compile Link Selector: Bugfix when parent conditions can't be compiled --- pgmapcss/compiler/compile_link_selector.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index ea978032..21a38a59 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -8,7 +8,12 @@ def compile_link_selector(statement, stat): statement['parent_selector']['type'], stat['database'].compile_selector( statement, stat, prefix='', selector='parent_selector') - )])[statement['parent_selector']['type']] + )]) + + if statement['parent_selector']['type'] in parent_conditions: + parent_conditions = parent_conditions[statement['parent_selector']['type']] + else: + parent_conditions = None child_conditions = stat['database'].merge_conditions([( statement['selector']['type'], From f6966173c9f6d926b7b9d37f195f60f04987fa85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Fri, 5 Dec 2014 07:02:42 +0100 Subject: [PATCH 086/209] Compile Link Selector: Bugfix, no child conditions compiled --- pgmapcss/compiler/compile_link_selector.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index 21a38a59..32857fc0 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -19,7 +19,11 @@ def compile_link_selector(statement, stat): statement['selector']['type'], stat['database'].compile_selector( statement, stat, prefix='') - )])[statement['selector']['type']] + )]) + if statement['selector']['type'] in child_conditions: + child_conditions = child_conditions[statement['selector']['type']] + else: + child_conditions = None if statement['link_selector']['type'] in ('>', ''): return "objects_member_of(object['id'], " +\ From 74d589fc0cfa694b9af12fe7e4c03b82c1a96ac6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Fri, 5 Dec 2014 07:07:51 +0100 Subject: [PATCH 087/209] DB/Overpass: Bugfix compiling non-evaluating set statements - if a condition for a class has no statements which set this class, it always evaluates to False (before, this would always be True for db select statements) --- pgmapcss/db/overpass/db.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 7768edba..e3ef9102 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -127,6 +127,8 @@ def compile_condition_overpass(self, condition, statement, tag_type, stat, prefi return ret + # returns None if it's not possible to query for condition (e.g. osm:user) + # returns False if query always evaluates negative def compile_condition(self, condition, statement, stat, prefix='current.', filter={}): ret = [] @@ -145,6 +147,8 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte # ignore generated tags (identified by leading .) if condition['key'][0] == '.': + if len(set_statements) == 0: + return False return set_statements # depending on the tag type compile the specified condition @@ -161,6 +165,9 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte return set_statements if len(set_statements): + if False in set_statements: + return False + return [ s + [[ ret ]] for s in set_statements @@ -301,13 +308,14 @@ def merge_conditions(self, conditions): for c in cs ]) + ';\n' for t, cs in conditions.items() + if len(cs) } def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None, selector='selector', no_object_type=False): filter['object_type'] = object_type conditions = [ - self.compile_condition(c, statement, stat, prefix, filter) or None + self.compile_condition(c, statement, stat, prefix, filter) for c in statement[selector]['conditions'] ] @@ -331,8 +339,8 @@ def compile_selector(self, statement, stat, prefix='current.', filter={}, object for r in ret ] - if False in ret: - return False + elif condition == False: + return False if no_object_type: return ret From fb30a6a40df2ed869a77bb77fdc3c1df9eef53c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 9 Dec 2014 22:09:46 +0100 Subject: [PATCH 088/209] DB backend, improve & doc: start documenting + think about standardization --- doc/database-API.md | 97 +++++++++++++++++++++++++ pgmapcss/compiler/compile_db_selects.py | 2 + 2 files changed, 99 insertions(+) create mode 100644 doc/database-API.md diff --git a/doc/database-API.md b/doc/database-API.md new file mode 100644 index 00000000..d76537f5 --- /dev/null +++ b/doc/database-API.md @@ -0,0 +1,97 @@ +This is a documentation how support for a database backend is implemented in pgmapcss. It basically consists of a directory under pgmapcss/db (e.g. pgmapcss/db/osm2pgsql for the osm2pgsql backend) with two files: db.py and db_functions.py . + +pgmapcss/db/BACKEND/db.py +========================= +The minimal file looks like this: +```python +from ..default import default + +# This class implements the database backend for the TEMPLATE database layout. +class db(default): + # function to initialize the database backend + def __init__(self, conn, stat): + # sets self.conn (the database connection) and self.stat (contains + # everything about the current compile process, including the parsed + # MapCSS tree, config options, ...) + default.__init__(self, conn, stat) + + # this function may check and set config options in + # self.stat['config'], e.g. self.stat['config']['my_opt'] = True + # this might come in handy later-on in db_functions.py + + # compile the selector (e.g. `node[amenity=bar][name]`) to a select + # condition without selecting the object type + # (e.g. `"amenity"='bar' and "name" is not null` or + # `tags @> 'amenity=>bar' and tags ? 'name'`). + # see below for the structure of the statement argument + # You may define the datatype of the return value + def compile_selector(self, statement): + pass + + # merge several compiled selectors together (the argument conditions is a + # list) + e.g. `[ '"amenity"=\'bar\' and "name" is not null', '"foo"=\'bar\'' ]` + # => `'("amenity"=\'bar\' and "name" is not null) or ("foo"=\'bar\')'` + # You may define the datatype of the return value + def merge_conditions(self, conditions): + pass +``` + +pgmapcss/db/BACKEND/db_functions.py +=================================== +This file will be included in the compiled executable / database function. All the functions may return more objects as actually needed, the objects will be checked again later-on during processing (though this will reduce performace. you can set the config option `db.counter=verbose` to see which objects were returned but not rendered in the output). + +An object should look like this: +```python +{ + 'id': 'w1234', # identifier + 'types': [ 'way', 'area' ], # types this object might match + 'geo': '01010000...', # geometry in Well-Known Binary representation + 'tags': { 'foo': 'bar' }, # tags of the object + 'members': [ # (optional) list of members, with link tags + { 'member_id': 'n234', 'sequence_id': '0' }, + { 'member_id': 'n235', 'sequence_id': '1' } + ] +} +``` + +The minimal file looks like this: +```python +# objects() yields all objects which match the query/queries in the current +# bounding box. +# Arguments: +# bbox: a bounding box as WKT or None (ignore bounding box ; return all objects +# in database) +# db_selects: a dict, with the object types and the compiled conditions from db.py, e.g.: `{ 'area': '("amenity"=\'bar\' and "name" is not null) or ("foo"=\'bar\')' }`. objects() need to match the object types to the respective openstreetmap objects, e.g. 'area' => closed ways and multipolygons. +def objects(bbox, db_selects): + pass + +# objects_by_id() yields the specified objects from the database +# an id is a string with the object type identifier and the id, e.g. 'n1234' +def objects_by_id(id_list): + pass + +# objects_member_of(). For each object in the `objects` list, return all parent +# objects (which match the db_selects). +# As yielded values, tuples are expected with: +# ( child_object, parent_object, link_tags ) +# +# link_tags (dict) should contain: +# * sequence_id: members are consecutively numbered, the child is the nth entry (counting from 0) +# * role: role as specified in osm data (when the parent is a relation) +def objects_member_of(objects, db_selects): + pass + +# objects_members(). For each object in the `objects` list, return all child +# objects (which match the db_selects). +# As yielded values, tuples are expected with: +# ( parent_object, child_object, link_tags ) +# link_tags (dict) should contain: +# * sequence_id: members are consecutively numbered, the child is the nth entry (counting from 0) +# * role: role as specified in osm data (when the parent is a relation) +def objects_members(objects, db_selects): + pass + +def objects_near(objects, db_selects, options): + pass +``` diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 244dee4e..3d1b9fd8 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -62,6 +62,8 @@ def compile_db_selects(id, stat): for object_type in ({'node', 'way', 'area'} if stat['statements'][i]['selector']['type'] == True else { stat['statements'][i]['selector']['type'] }) ] +# TODO: call merge_conditions() for each object_type individially, replace list +# of tuples by list of compiled selectors conditions = stat['database'].merge_conditions(conditions) max_scale = min_scale From 332cada4ebdba8696c1a5c634be99500bbdfd250 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 10 Dec 2014 07:45:15 +0100 Subject: [PATCH 089/209] DB backend, improve & doc: improve new documentation --- doc/database-API.md | 107 ++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 99 insertions(+), 8 deletions(-) diff --git a/doc/database-API.md b/doc/database-API.md index d76537f5..941790ad 100644 --- a/doc/database-API.md +++ b/doc/database-API.md @@ -1,8 +1,14 @@ -This is a documentation how support for a database backend is implemented in pgmapcss. It basically consists of a directory under pgmapcss/db (e.g. pgmapcss/db/osm2pgsql for the osm2pgsql backend) with two files: db.py and db_functions.py . +This is a documentation how support for a database backend is implemented in pgmapcss. It basically consists of a directory under pgmapcss/db (e.g. pgmapcss/db/osm2pgsql for the osm2pgsql backend) with three files: __init__.py, db.py and db_functions.py . -pgmapcss/db/BACKEND/db.py +pgmapcss/db/TEMPLATE/__init__.py ========================= -The minimal file looks like this: +```python +from .db import db +``` + +pgmapcss/db/TEMPLATE/db.py +========================= +The template file looks like this: ```python from ..default import default @@ -19,25 +25,26 @@ class db(default): # self.stat['config'], e.g. self.stat['config']['my_opt'] = True # this might come in handy later-on in db_functions.py - # compile the selector (e.g. `node[amenity=bar][name]`) to a select - # condition without selecting the object type + # compile the conditions of the selector (e.g. `node[amenity=bar][name]`) + # to a select condition without selecting the object type # (e.g. `"amenity"='bar' and "name" is not null` or # `tags @> 'amenity=>bar' and tags ? 'name'`). # see below for the structure of the statement argument + # for good performance it would be advisable to also compile relationships # You may define the datatype of the return value def compile_selector(self, statement): pass # merge several compiled selectors together (the argument conditions is a # list) - e.g. `[ '"amenity"=\'bar\' and "name" is not null', '"foo"=\'bar\'' ]` + # e.g. `[ '"amenity"=\'bar\' and "name" is not null', '"foo"=\'bar\'' ]` # => `'("amenity"=\'bar\' and "name" is not null) or ("foo"=\'bar\')'` # You may define the datatype of the return value def merge_conditions(self, conditions): pass ``` -pgmapcss/db/BACKEND/db_functions.py +pgmapcss/db/TEMPLATE/db_functions.py =================================== This file will be included in the compiled executable / database function. All the functions may return more objects as actually needed, the objects will be checked again later-on during processing (though this will reduce performace. you can set the config option `db.counter=verbose` to see which objects were returned but not rendered in the output). @@ -55,7 +62,7 @@ An object should look like this: } ``` -The minimal file looks like this: +The template file looks like this: ```python # objects() yields all objects which match the query/queries in the current # bounding box. @@ -92,6 +99,90 @@ def objects_member_of(objects, db_selects): def objects_members(objects, db_selects): pass +# objects_near(). For each object in the `objects` list, return all nearby objects (which match the db_selects). +# Argument options (dict): +# * distance: maximum distance in pixels +# * check_geo: (optional) one of: +# * 'within': if child object is within certain distance around parent +# * 'surrounds': if parent object is within certain distance around child +# * 'overlaps': if parent object and child object overlap (distance=0) +# As yielded values, tuples are expected with: +# ( parent_object, child_object, link_tags ) +# link_tags (dict) should contain: +# * distance: distance between objects in pixels def objects_near(objects, db_selects, options): pass ``` + +APPENDIX +======== +Structure of parsed statements +------------------------------ +An example statement structure looks like this: +```css +node[amenity=bar][name] { + text: "name"; + text-color: #ff0000; +} +``` + +```python +{ + 'id': 5, # sequential numbering of statements/properties + 'selector': { + 'conditions': [ + { + 'key': 'amenity', + 'op': '=', + 'value': 'bar', + 'value_type': 'value' # one of (value, eval) + }, { + 'key': 'name', + 'op': 'has_tag' + } + ], + 'type': 'node' # selected type; True if any type (*) + }, + 'properties': .... +} +``` + +An example using relationships: +```css +relation[type=route] >[role=stop] node { + text: parent_tag('ref'); +} +``` + +```python +{ + 'id': 7, # sequential numbering of statements/properties + 'selector': { + 'conditions': [], # no conditions on node + 'type': 'node' + }, + 'link_selector': { # (optional) when using relationship selector + 'type': '>' # '', '>', '<', 'near', 'within', 'overlaps' or 'surrounds' + 'conditions': [ + { + 'key': 'role', + 'op': '=', + 'value': 'stop', + 'value_type': 'value' + } + ] + } + 'parent_selector': { # (optional) when using relationship selector + 'type': 'relation' + 'conditions': [ + { + 'key': 'type', + 'op': '=', + 'value': 'route', + 'value_type': 'value' + } + ] + } + 'properties': .... +} +``` From e53ed4c4b5ff5027cf1693bcf733262b274ff0ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 10 Dec 2014 22:18:43 +0100 Subject: [PATCH 090/209] DB backend, improve & doc: reduce parameters on compile_selector() --- pgmapcss/compiler/compile_db_selects.py | 7 ++--- pgmapcss/compiler/compile_link_selector.py | 12 ++++++--- pgmapcss/db/overpass/db.py | 19 ++++++------- pgmapcss/db/postgresql_db/db.py | 31 ++++++++++++---------- 4 files changed, 39 insertions(+), 30 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 3d1b9fd8..e933081e 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -53,13 +53,14 @@ def compile_db_selects(id, stat): # compile all selectors # TODO: define list of possible object_types + # TODO: how to handle wildcard object type? conditions = [ ( - object_type, - stat['database'].compile_selector(stat['statements'][i], stat, prefix='', filter=filter, object_type=object_type) + stat['statements'][i]['selector']['type'], + stat['database'].compile_selector(stat['statements'][i]) ) for i in current_selectors - for object_type in ({'node', 'way', 'area'} if stat['statements'][i]['selector']['type'] == True else { stat['statements'][i]['selector']['type'] }) + #for object_type in ({'node', 'way', 'area'} if stat['statements'][i]['selector']['type'] == True else { stat['statements'][i]['selector']['type'] }) ] # TODO: call merge_conditions() for each object_type individially, replace list diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index 32857fc0..7d90971a 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -4,10 +4,15 @@ import pgmapcss.db as db def compile_link_selector(statement, stat): + # create statement where selector is build from parent_selector for compiling + _statement = statement.copy() + _statement['selector'] = _statement['parent_selector'] + del _statement['link_selector'] + del _statement['parent_selector'] + parent_conditions = stat['database'].merge_conditions([( statement['parent_selector']['type'], - stat['database'].compile_selector( - statement, stat, prefix='', selector='parent_selector') + stat['database'].compile_selector(_statement) )]) if statement['parent_selector']['type'] in parent_conditions: @@ -17,8 +22,7 @@ def compile_link_selector(statement, stat): child_conditions = stat['database'].merge_conditions([( statement['selector']['type'], - stat['database'].compile_selector( - statement, stat, prefix='') + stat['database'].compile_selector(statement) )]) if statement['selector']['type'] in child_conditions: child_conditions = child_conditions[statement['selector']['type']] diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index e3ef9102..6595b45f 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -42,7 +42,7 @@ def convert_to_regexp(self, s): if s[0] == 'is': return ('regexp', s[1], { '^' + self.value_to_regexp(s[2]) + '$' }) - def compile_condition_overpass(self, condition, statement, tag_type, stat, prefix, filter): + def compile_condition_overpass(self, condition, statement, tag_type, filter): ret = None negate = False key = tag_type[1] @@ -129,7 +129,7 @@ def compile_condition_overpass(self, condition, statement, tag_type, stat, prefi # returns None if it's not possible to query for condition (e.g. osm:user) # returns False if query always evaluates negative - def compile_condition(self, condition, statement, stat, prefix='current.', filter={}): + def compile_condition(self, condition, statement, filter={}): ret = [] # assignments: map conditions which are based on a (possible) set-statement @@ -137,7 +137,7 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte f = filter.copy() f['has_set_tag'] = condition['key'] f['max_id'] = statement['id'] - set_statements = stat.filter_statements(f) + set_statements = self.stat.filter_statements(f) if len(set_statements) > 0: set_statements = [ @@ -152,12 +152,12 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte return set_statements # depending on the tag type compile the specified condition - tag_type = stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) + tag_type = self.stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) if tag_type is None: pass elif tag_type[0] == 'overpass': - ret = self.compile_condition_overpass(condition, statement, tag_type, stat, prefix, filter) + ret = self.compile_condition_overpass(condition, statement, tag_type, filter) else: raise CompileError('unknown tag type {}'.format(tag_type)) @@ -311,12 +311,13 @@ def merge_conditions(self, conditions): if len(cs) } - def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None, selector='selector', no_object_type=False): - filter['object_type'] = object_type + def compile_selector(self, statement, no_object_type=False): + filter = {} + filter['object_type'] = statement['selector']['type'] conditions = [ - self.compile_condition(c, statement, stat, prefix, filter) - for c in statement[selector]['conditions'] + self.compile_condition(c, statement, filter) + for c in statement['selector']['conditions'] ] ret = [ [] ] diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index cac4d273..9ee553e9 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -29,12 +29,13 @@ def merge_conditions(self, conditions): if cs != '()' } - def compile_condition_hstore_value(self, condition, statement, tag_type, stat, prefix, filter): + def compile_condition_hstore_value(self, condition, statement, tag_type, filter): ret = None negate = False key = tag_type[1] column = tag_type[2] op = condition['op'] + prefix = '' if op[0:2] == '! ': op = op[2:] @@ -61,7 +62,7 @@ def compile_condition_hstore_value(self, condition, statement, tag_type, stat, p elif op == '=': ret = prefix + column + ' @> ' + self.format({ key: condition['value'] }) - if 'db.hstore_key_index' in stat['config'] and key in stat['config']['db.hstore_key_index']: + if 'db.hstore_key_index' in self.stat['config'] and key in self.stat['config']['db.hstore_key_index']: ret += ' and ' + prefix + column + ' ? ' + self.format(key) # @= @@ -71,7 +72,7 @@ def compile_condition_hstore_value(self, condition, statement, tag_type, stat, p for v in condition['value'].split(';') ]) + ')' - if 'db.hstore_key_index' in stat['config'] and key in stat['config']['db.hstore_key_index']: + if 'db.hstore_key_index' in self.stat['config'] and key in self.stat['config']['db.hstore_key_index']: ret += ' and ' + prefix + column + ' ? ' + self.format(key) # != @@ -130,11 +131,12 @@ def compile_condition_hstore_value(self, condition, statement, tag_type, stat, p return ret - def compile_condition_column(self, condition, statement, tag_type, stat, prefix, filter): + def compile_condition_column(self, condition, statement, tag_type, filter): ret = None key = tag_type[1] op = condition['op'] negate = False + prefix = '' value_format = self.value_format_default if len(tag_type) > 2: @@ -241,7 +243,7 @@ def compile_condition_column(self, condition, statement, tag_type, stat, prefix, return ret - def compile_condition(self, condition, statement, stat, prefix='current.', filter={}): + def compile_condition(self, condition, statement, filter={}): ret = set() # assignments: map conditions which are based on a (possible) set-statement @@ -249,11 +251,11 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte f = filter.copy() f['has_set_tag'] = condition['key'] f['max_id'] = statement['id'] - set_statements = stat.filter_statements(f) + set_statements = self.stat.filter_statements(f) if len(set_statements) > 0: ret.add('((' + ') or ('.join([ - self.compile_selector(s, stat, prefix, filter) + self.compile_selector(s) for s in set_statements ]) + '))') @@ -264,14 +266,14 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte return ''.join(ret) # depending on the tag type compile the specified condition - tag_type = stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) + tag_type = self.stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) if tag_type is None: pass elif tag_type[0] == 'hstore-value': - ret.add(self.compile_condition_hstore_value(condition, statement, tag_type, stat, prefix, filter)) + ret.add(self.compile_condition_hstore_value(condition, statement, tag_type, filter)) elif tag_type[0] == 'column': - ret.add(self.compile_condition_column(condition, statement, tag_type, stat, prefix, filter)) + ret.add(self.compile_condition_column(condition, statement, tag_type, filter)) else: raise CompileError('unknown tag type {}'.format(tag_type)) @@ -283,12 +285,13 @@ def compile_condition(self, condition, statement, stat, prefix='current.', filte # merge conditions together, return return '(' + ' or '.join(ret) + ')' - def compile_selector(self, statement, stat, prefix='current.', filter={}, object_type=None, selector='selector'): - filter['object_type'] = object_type + def compile_selector(self, statement): + filter = {} + filter['object_type'] = statement['selector']['type'] ret = { - self.compile_condition(c, statement, stat, prefix, filter) or 'true' - for c in statement[selector]['conditions'] + self.compile_condition(c, statement, filter) or 'true' + for c in statement['selector']['conditions'] } if len(ret) == 0: From c5e540f0ff0bbdb19d440970bb1a640bb7f7d572 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 11 Dec 2014 11:54:08 +0100 Subject: [PATCH 091/209] DB backend, improve & doc: move handling of set statements to compile_db_selects() --- pgmapcss/compiler/compile_db_selects.py | 62 +++++++++++++++++++++---- pgmapcss/db/overpass/db.py | 31 ------------- pgmapcss/db/postgresql_db/db.py | 19 -------- 3 files changed, 54 insertions(+), 58 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index e933081e..f31e4b75 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -1,3 +1,44 @@ +# takes a list of conditions as input and returns several condition combinations +def resolve_set_statements(statement, stat): + ret = [ [] ] + + # iterate over all conditions in the statement + for condition in statement['selector']['conditions']: + last_ret = ret + ret = [] + + # check if there are any statements which assign the current condition key + filter = { + 'has_set_tag': condition['key'], + 'max_id': statement['id'] + } + set_statements = stat.filter_statements(filter) + + # recurse into resolve_set_statements, to also resolve conditions in + # the statements where set statements happened + set_statements = [ + resolve_set_statements(s, stat) + for s in set_statements + ] + + # for all set statements create a new set of conditions + ret = [ + r + s1 + for r in last_ret + for s in set_statements + for s1 in s + ] + + # for each set of conditions add the current condition + # unless the condition's key does not start with a '.' + if condition['key'][0] != '.': + ret += [ + r + [ condition ] + for r in last_ret + ] + + return ret + def filter_selectors(filter, stat): # where_selectors contains indexes of all selectors which we need for match queries where_selectors = [] @@ -51,17 +92,22 @@ def compile_db_selects(id, stat): filter = { 'min_scale': min_scale, 'max_scale': max_scale or 10E+10} current_selectors = filter_selectors(filter, stat) + conditions = [] + for i in current_selectors: + _statement = stat['statements'][i].copy() + for c in resolve_set_statements(stat['statements'][i], stat): + _statement['selector']['conditions'] = c + + conditions.append( + ( + _statement['selector']['type'], + stat['database'].compile_selector(_statement) + ) + ) + # compile all selectors # TODO: define list of possible object_types # TODO: how to handle wildcard object type? - conditions = [ - ( - stat['statements'][i]['selector']['type'], - stat['database'].compile_selector(stat['statements'][i]) - ) - for i in current_selectors - #for object_type in ({'node', 'way', 'area'} if stat['statements'][i]['selector']['type'] == True else { stat['statements'][i]['selector']['type'] }) - ] # TODO: call merge_conditions() for each object_type individially, replace list # of tuples by list of compiled selectors diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 6595b45f..f4224725 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -132,25 +132,6 @@ def compile_condition_overpass(self, condition, statement, tag_type, filter): def compile_condition(self, condition, statement, filter={}): ret = [] - # assignments: map conditions which are based on a (possible) set-statement - # back to their original selectors: - f = filter.copy() - f['has_set_tag'] = condition['key'] - f['max_id'] = statement['id'] - set_statements = self.stat.filter_statements(f) - - if len(set_statements) > 0: - set_statements = [ - self.compile_selector(s, stat, prefix, filter, no_object_type=True) - for s in set_statements - ] - - # ignore generated tags (identified by leading .) - if condition['key'][0] == '.': - if len(set_statements) == 0: - return False - return set_statements - # depending on the tag type compile the specified condition tag_type = self.stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) @@ -161,18 +142,6 @@ def compile_condition(self, condition, statement, filter={}): else: raise CompileError('unknown tag type {}'.format(tag_type)) - if ret is None: - return set_statements - - if len(set_statements): - if False in set_statements: - return False - - return [ - s + [[ ret ]] - for s in set_statements - ] - # return return ret diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index 9ee553e9..d102d8ba 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -246,25 +246,6 @@ def compile_condition_column(self, condition, statement, tag_type, filter): def compile_condition(self, condition, statement, filter={}): ret = set() - # assignments: map conditions which are based on a (possible) set-statement - # back to their original selectors: - f = filter.copy() - f['has_set_tag'] = condition['key'] - f['max_id'] = statement['id'] - set_statements = self.stat.filter_statements(f) - - if len(set_statements) > 0: - ret.add('((' + ') or ('.join([ - self.compile_selector(s) - for s in set_statements - ]) + '))') - - # ignore generated tags (identified by leading .) - if condition['key'][0] == '.': - if len(ret) == 0: - return 'false' - return ''.join(ret) - # depending on the tag type compile the specified condition tag_type = self.stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) From 8b261ff9cce1787143f2ed0f830440b538659c60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 11 Dec 2014 21:58:50 +0100 Subject: [PATCH 092/209] DB backend, improve & doc: redesign - redesign representation of parent selectors - db.compile_selector() does not accept 'statement', but 'selector' instead - remove unnecessary function arguments --- doc/database-API.md | 54 +++++++++------------- pgmapcss/compiler/compile_db_selects.py | 2 +- pgmapcss/compiler/compile_link_selector.py | 37 +++++++-------- pgmapcss/compiler/compile_statement.py | 8 ++-- pgmapcss/db/osm2pgsql/db.py | 2 +- pgmapcss/db/osmosis/db.py | 2 +- pgmapcss/db/overpass/db.py | 18 ++++---- pgmapcss/db/postgresql_db/db.py | 20 ++++---- pgmapcss/parser/parse_selectors.py | 12 ++--- 9 files changed, 71 insertions(+), 84 deletions(-) diff --git a/doc/database-API.md b/doc/database-API.md index 941790ad..6a0b974a 100644 --- a/doc/database-API.md +++ b/doc/database-API.md @@ -29,10 +29,10 @@ class db(default): # to a select condition without selecting the object type # (e.g. `"amenity"='bar' and "name" is not null` or # `tags @> 'amenity=>bar' and tags ? 'name'`). - # see below for the structure of the statement argument + # see below for the structure of the selector argument # for good performance it would be advisable to also compile relationships # You may define the datatype of the return value - def compile_selector(self, statement): + def compile_selector(self, selector): pass # merge several compiled selectors together (the argument conditions is a @@ -116,9 +116,9 @@ def objects_near(objects, db_selects, options): APPENDIX ======== -Structure of parsed statements ------------------------------- -An example statement structure looks like this: +Structure of parsed selector +---------------------------- +An example selector structure looks like this: ```css node[amenity=bar][name] { text: "name"; @@ -128,22 +128,18 @@ node[amenity=bar][name] { ```python { - 'id': 5, # sequential numbering of statements/properties - 'selector': { - 'conditions': [ - { - 'key': 'amenity', - 'op': '=', - 'value': 'bar', - 'value_type': 'value' # one of (value, eval) - }, { - 'key': 'name', - 'op': 'has_tag' - } - ], - 'type': 'node' # selected type; True if any type (*) - }, - 'properties': .... + 'type': 'node', # selected type; True if any type (*) + 'conditions': [ + { + 'key': 'amenity', + 'op': '=', + 'value': 'bar', + 'value_type': 'value' # one of (value, eval) + }, { + 'key': 'name', + 'op': 'has_tag' + } + ] } ``` @@ -156,13 +152,10 @@ relation[type=route] >[role=stop] node { ```python { - 'id': 7, # sequential numbering of statements/properties - 'selector': { - 'conditions': [], # no conditions on node - 'type': 'node' - }, - 'link_selector': { # (optional) when using relationship selector - 'type': '>' # '', '>', '<', 'near', 'within', 'overlaps' or 'surrounds' + 'type': 'node', + 'conditions': [], # no conditions on node + 'link': { # (optional) when using relationship selector + 'type': '>' # '', '>', '<', 'near', 'within', 'overlaps' or 'surrounds' 'conditions': [ { 'key': 'role', @@ -171,8 +164,8 @@ relation[type=route] >[role=stop] node { 'value_type': 'value' } ] - } - 'parent_selector': { # (optional) when using relationship selector + }, + 'parent': { # (optional) when using relationship selector 'type': 'relation' 'conditions': [ { @@ -183,6 +176,5 @@ relation[type=route] >[role=stop] node { } ] } - 'properties': .... } ``` diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index f31e4b75..d988d4a7 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -101,7 +101,7 @@ def compile_db_selects(id, stat): conditions.append( ( _statement['selector']['type'], - stat['database'].compile_selector(_statement) + stat['database'].compile_selector(_statement['selector']) ) ) diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index 7d90971a..3c756ea0 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -5,46 +5,41 @@ def compile_link_selector(statement, stat): # create statement where selector is build from parent_selector for compiling - _statement = statement.copy() - _statement['selector'] = _statement['parent_selector'] - del _statement['link_selector'] - del _statement['parent_selector'] - parent_conditions = stat['database'].merge_conditions([( - statement['parent_selector']['type'], - stat['database'].compile_selector(_statement) + statement['selector']['parent']['type'], + stat['database'].compile_selector(statement['selector']['parent']) )]) - if statement['parent_selector']['type'] in parent_conditions: - parent_conditions = parent_conditions[statement['parent_selector']['type']] + if statement['selector']['parent']['type'] in parent_conditions: + parent_conditions = parent_conditions[statement['selector']['parent']['type']] else: parent_conditions = None child_conditions = stat['database'].merge_conditions([( statement['selector']['type'], - stat['database'].compile_selector(statement) + stat['database'].compile_selector(statement['selector']) )]) if statement['selector']['type'] in child_conditions: child_conditions = child_conditions[statement['selector']['type']] else: child_conditions = None - if statement['link_selector']['type'] in ('>', ''): + if statement['selector']['link']['type'] in ('>', ''): return "objects_member_of(object['id'], " +\ - repr(statement['parent_selector']['type']) + ", " +\ + repr(statement['selector']['parent']['type']) + ", " +\ repr(parent_conditions) + ", " +\ repr(child_conditions) + ")" - elif statement['link_selector']['type'] == '<': + elif statement['selector']['link']['type'] == '<': return "objects_members(object['id'], " +\ - repr(statement['parent_selector']['type']) + ", " +\ + repr(statement['selector']['parent']['type']) + ", " +\ repr(parent_conditions) + ", " +\ repr(child_conditions) + ")" - elif statement['link_selector']['type'] == 'near': + elif statement['selector']['link']['type'] == 'near': distance = { 'value': '100' } - for r in statement['link_selector']['conditions']: + for r in statement['selector']['link']['conditions']: if r['key'] == 'distance' and r['op'] in ('<', '<=', '='): distance = r @@ -57,16 +52,16 @@ def compile_link_selector(statement, stat): distance = repr(distance['value']) return "objects_near(" + distance + ", None, "+\ - repr(statement['parent_selector']['type']) + ", " +\ + repr(statement['selector']['parent']['type']) + ", " +\ repr(parent_conditions) + ", " +\ repr(child_conditions) + ")" - elif statement['link_selector']['type'] in ('within', 'surrounds', 'overlaps'): + elif statement['selector']['link']['type'] in ('within', 'surrounds', 'overlaps'): return "objects_near(\"0\", None, "+\ - repr(statement['parent_selector']['type']) + ", " +\ + repr(statement['selector']['parent']['type']) + ", " +\ repr(parent_conditions) + ", " +\ repr(child_conditions) + ", check_geo=" +\ - repr(statement['link_selector']['type']) + ")" + repr(statement['selector']['link']['type']) + ")" else: - raise Exception('Unknown link selector "{type}"'.format(**selector['link_selector'])) + raise Exception('Unknown link selector "{type}"'.format(**selector['selector']['link'])) diff --git a/pgmapcss/compiler/compile_statement.py b/pgmapcss/compiler/compile_statement.py index cb59f173..c820a215 100644 --- a/pgmapcss/compiler/compile_statement.py +++ b/pgmapcss/compiler/compile_statement.py @@ -54,7 +54,7 @@ def compile_statement(statement, stat, indent=''): for c in object_selector['conditions'] ]) + '\n' - if 'link_selector' in statement: + if 'link' in statement['selector']: ret['body'] += indent + 'for parent_index, parent_object in enumerate(' + compile_link_selector(statement, stat) + '):\n' indent += ' ' @@ -62,9 +62,9 @@ def compile_statement(statement, stat, indent=''): ret['body'] += indent + "current['link_object'] = { 'tags': parent_object['link_tags'] }\n" ret['body'] += indent + "current['link_object']['tags']['index'] = str(parent_index)\n" ret['body'] += indent + 'if (' +\ - and_join(compile_conditions(statement['parent_selector']['conditions'], stat, "current['parent_object']['tags']")) +\ + and_join(compile_conditions(statement['selector']['parent']['conditions'], stat, "current['parent_object']['tags']")) +\ ') and (' +\ - and_join(compile_conditions(statement['link_selector']['conditions'], stat, "current['link_object']['tags']")) + '):\n' + and_join(compile_conditions(statement['selector']['link']['conditions'], stat, "current['link_object']['tags']")) + '):\n' indent += ' ' ret['body'] += indent + 'current[\'parent_object\'] = parent_object\n' @@ -97,7 +97,7 @@ def compile_statement(statement, stat, indent=''): if object_selector['pseudo_element'] == '*': indent = indent[4:] - if 'link_selector' in statement: + if 'link' in statement['selector']: ret['body'] += indent + "current['parent_object'] = None\n" indent = indent[8:] diff --git a/pgmapcss/db/osm2pgsql/db.py b/pgmapcss/db/osm2pgsql/db.py index edbf12b0..a46071f5 100644 --- a/pgmapcss/db/osm2pgsql/db.py +++ b/pgmapcss/db/osm2pgsql/db.py @@ -65,7 +65,7 @@ def __init__(self, conn, stat): if 'db.hstore_key_index' in stat['config']: stat['config']['db.hstore_key_index'] = stat['config']['db.hstore_key_index'].split(',') - def tag_type(self, key, condition, selector, statement): + def tag_type(self, key, condition, selector): if key[0:4] == 'osm:': if key == 'osm:id': return ( 'column', 'osm_id', self.compile_modify_id ) diff --git a/pgmapcss/db/osmosis/db.py b/pgmapcss/db/osmosis/db.py index 9d7c4de7..d2e14e33 100644 --- a/pgmapcss/db/osmosis/db.py +++ b/pgmapcss/db/osmosis/db.py @@ -30,7 +30,7 @@ def __init__(self, conn, stat): if 'db.hstore_key_index' in stat['config']: stat['config']['db.hstore_key_index'] = stat['config']['db.hstore_key_index'].split(',') - def tag_type(self, key, condition, selector, statement): + def tag_type(self, key, condition, selector): if key[0:4] == 'osm:': if key == 'osm:id': return ( 'column', 'id', self.compile_modify_id ) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index f4224725..85527c69 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -13,7 +13,7 @@ def __init__(self, conn, stat): if not 'db.overpass-url' in self.stat['config']: self.stat['config']['db.overpass-url'] = 'http://overpass-api.de/api' - def tag_type(self, key, condition, selector, statement): + def tag_type(self, key, condition): if key[0:4] == 'osm:': return None @@ -42,7 +42,7 @@ def convert_to_regexp(self, s): if s[0] == 'is': return ('regexp', s[1], { '^' + self.value_to_regexp(s[2]) + '$' }) - def compile_condition_overpass(self, condition, statement, tag_type, filter): + def compile_condition_overpass(self, condition, tag_type, filter): ret = None negate = False key = tag_type[1] @@ -129,16 +129,16 @@ def compile_condition_overpass(self, condition, statement, tag_type, filter): # returns None if it's not possible to query for condition (e.g. osm:user) # returns False if query always evaluates negative - def compile_condition(self, condition, statement, filter={}): + def compile_condition(self, condition, filter={}): ret = [] # depending on the tag type compile the specified condition - tag_type = self.stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) + tag_type = self.stat['database'].tag_type(condition['key'], condition) if tag_type is None: pass elif tag_type[0] == 'overpass': - ret = self.compile_condition_overpass(condition, statement, tag_type, filter) + ret = self.compile_condition_overpass(condition, tag_type, filter) else: raise CompileError('unknown tag type {}'.format(tag_type)) @@ -280,13 +280,13 @@ def merge_conditions(self, conditions): if len(cs) } - def compile_selector(self, statement, no_object_type=False): + def compile_selector(self, selector, no_object_type=False): filter = {} - filter['object_type'] = statement['selector']['type'] + filter['object_type'] = selector['type'] conditions = [ - self.compile_condition(c, statement, filter) - for c in statement['selector']['conditions'] + self.compile_condition(c, filter) + for c in selector['conditions'] ] ret = [ [] ] diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index d102d8ba..c0509b13 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -29,7 +29,7 @@ def merge_conditions(self, conditions): if cs != '()' } - def compile_condition_hstore_value(self, condition, statement, tag_type, filter): + def compile_condition_hstore_value(self, condition, tag_type, filter): ret = None negate = False key = tag_type[1] @@ -131,7 +131,7 @@ def compile_condition_hstore_value(self, condition, statement, tag_type, filter) return ret - def compile_condition_column(self, condition, statement, tag_type, filter): + def compile_condition_column(self, condition, tag_type, filter): ret = None key = tag_type[1] op = condition['op'] @@ -243,18 +243,18 @@ def compile_condition_column(self, condition, statement, tag_type, filter): return ret - def compile_condition(self, condition, statement, filter={}): + def compile_condition(self, condition, selector, filter={}): ret = set() # depending on the tag type compile the specified condition - tag_type = self.stat['database'].tag_type(condition['key'], condition, statement['selector'], statement) + tag_type = self.stat['database'].tag_type(condition['key'], condition, selector) if tag_type is None: pass elif tag_type[0] == 'hstore-value': - ret.add(self.compile_condition_hstore_value(condition, statement, tag_type, filter)) + ret.add(self.compile_condition_hstore_value(condition, tag_type, filter)) elif tag_type[0] == 'column': - ret.add(self.compile_condition_column(condition, statement, tag_type, filter)) + ret.add(self.compile_condition_column(condition, tag_type, filter)) else: raise CompileError('unknown tag type {}'.format(tag_type)) @@ -266,13 +266,13 @@ def compile_condition(self, condition, statement, filter={}): # merge conditions together, return return '(' + ' or '.join(ret) + ')' - def compile_selector(self, statement): + def compile_selector(self, selector): filter = {} - filter['object_type'] = statement['selector']['type'] + filter['object_type'] = selector['type'] ret = { - self.compile_condition(c, statement, filter) or 'true' - for c in statement['selector']['conditions'] + self.compile_condition(c, selector, filter) or 'true' + for c in selector['conditions'] } if len(ret) == 0: diff --git a/pgmapcss/parser/parse_selectors.py b/pgmapcss/parser/parse_selectors.py index 217ebe85..262b9f08 100644 --- a/pgmapcss/parser/parse_selectors.py +++ b/pgmapcss/parser/parse_selectors.py @@ -117,20 +117,20 @@ def parse_selectors(selectors, to_parse): raise if sel1 and sel2 and sel3: - selector['parent_selector'] = sel1 - selector['link_selector'] = sel2 selector['selector'] = sel3 + selector['selector']['parent'] = sel1 + selector['selector']['link'] = sel2 elif sel1 and not sel2 and sel3: - selector['parent_selector'] = sel1 - selector['link_selector'] = { + selector['selector'] = sel3 + selector['selector']['parent'] = sel1 + selector['selector']['link'] = { 'type': '', 'conditions': [] } - selector['selector'] = sel3 else: selector['selector'] = sel1 if not to_parse.match('\s*,'): - return + return From 104648e311ee1426559618c9d44a71a35e95cdbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Thu, 11 Dec 2014 22:34:58 +0100 Subject: [PATCH 093/209] DB backend, improve & doc: re-design db.merge_conditions() - db.merge_conditions() is called separately for each object type - object type is no longer passed, only list of conditions --- doc/database-API.md | 6 ++-- pgmapcss/compiler/compile_db_selects.py | 27 ++++++++++++++++-- pgmapcss/compiler/compile_link_selector.py | 19 +++---------- pgmapcss/db/overpass/db.py | 33 ++++++++-------------- pgmapcss/db/postgresql_db/db.py | 27 +++++++----------- 5 files changed, 53 insertions(+), 59 deletions(-) diff --git a/doc/database-API.md b/doc/database-API.md index 6a0b974a..82c287dd 100644 --- a/doc/database-API.md +++ b/doc/database-API.md @@ -31,7 +31,8 @@ class db(default): # `tags @> 'amenity=>bar' and tags ? 'name'`). # see below for the structure of the selector argument # for good performance it would be advisable to also compile relationships - # You may define the datatype of the return value + # You may define the datatype of the return value, with the exception of + # False -> the condition will be dropped def compile_selector(self, selector): pass @@ -39,7 +40,8 @@ class db(default): # list) # e.g. `[ '"amenity"=\'bar\' and "name" is not null', '"foo"=\'bar\'' ]` # => `'("amenity"=\'bar\' and "name" is not null) or ("foo"=\'bar\')'` - # You may define the datatype of the return value + # You may define the datatype of the return value, with the exception of + # False -> the conditions will be dropped def merge_conditions(self, conditions): pass ``` diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index d988d4a7..0b229710 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -109,9 +109,30 @@ def compile_db_selects(id, stat): # TODO: define list of possible object_types # TODO: how to handle wildcard object type? -# TODO: call merge_conditions() for each object_type individially, replace list -# of tuples by list of compiled selectors - conditions = stat['database'].merge_conditions(conditions) + # get list of types and make list of conditions of each type + types = [ t for t, cs in conditions if t != True ] + conditions = { + t: [ + cs + for t2, cs in conditions + if t == t2 + if cs != False + ] + for t in types + } + + # merge all conditions for each types together + conditions = { + t: stat['database'].merge_conditions(cs) + for t, cs in conditions.items() + } + + # remove False entries + conditions = { + t: cs + for t, cs in conditions.items() + if cs is not False + } max_scale = min_scale diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index 3c756ea0..8c3845b4 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -5,24 +5,13 @@ def compile_link_selector(statement, stat): # create statement where selector is build from parent_selector for compiling - parent_conditions = stat['database'].merge_conditions([( - statement['selector']['parent']['type'], + parent_conditions = stat['database'].merge_conditions([ stat['database'].compile_selector(statement['selector']['parent']) - )]) + ]) - if statement['selector']['parent']['type'] in parent_conditions: - parent_conditions = parent_conditions[statement['selector']['parent']['type']] - else: - parent_conditions = None - - child_conditions = stat['database'].merge_conditions([( - statement['selector']['type'], + child_conditions = stat['database'].merge_conditions([ stat['database'].compile_selector(statement['selector']) - )]) - if statement['selector']['type'] in child_conditions: - child_conditions = child_conditions[statement['selector']['type']] - else: - child_conditions = None + ]) if statement['selector']['link']['type'] in ('>', ''): return "objects_member_of(object['id'], " +\ diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 85527c69..0db9852e 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -253,32 +253,21 @@ def simplify_conditions(self, conditions): return conditions def merge_conditions(self, conditions): - types = [ t for t, cs in conditions if t != True ] - - conditions = { - t: [ - c - for t2, cs in conditions - if t == t2 - if cs != False - for c in cs - ] - for t in types - } + conditions = [ + c + for cs in conditions + for c in cs + ] + + conditions = self.simplify_conditions(conditions) - conditions = { - t: self.simplify_conditions(cs) - for t, cs in conditions.items() - } + if len(conditions) == 0: + return False - return { - t: ';\n'.join([ + return ';\n'.join([ self.conditions_to_query(c) - for c in cs + for c in conditions ]) + ';\n' - for t, cs in conditions.items() - if len(cs) - } def compile_selector(self, selector, no_object_type=False): filter = {} diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index c0509b13..c363f3ed 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -10,24 +10,17 @@ def __init__(self, conn, stat): def merge_conditions(self, conditions): conditions = set(conditions) - types = [ t for t, cs in conditions if t != True ] - - conditions = { - t: - '(' + ') or ('.join([ - cs - for t2, cs in conditions - if t == t2 - if cs != 'false' - ]) + ')' - for t in types - } - return { - t: cs - for t, cs in conditions.items() - if cs != '()' - } + conditions = '(' + ') or ('.join([ + cs + for cs in conditions + if cs != 'false' + ]) + ')' + + if conditions == '()': + return False + + return conditions def compile_condition_hstore_value(self, condition, tag_type, filter): ret = None From a8d1af55d764369a800b1e5dfcf45190699f183f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 14 Dec 2014 09:27:09 +0100 Subject: [PATCH 094/209] Compile DB Selects: export function compile_selectors_db --- pgmapcss/compiler/compile_db_selects.py | 95 ++++++++++++++----------- 1 file changed, 54 insertions(+), 41 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 0b229710..ecaad088 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -82,6 +82,59 @@ def filter_selectors(filter, stat): # uniq list return list(set(where_selectors)) +def compile_selectors_db(statements, selector_index, stat): + conditions = [] + for i in statements: + if type(i) == int: + _statement = stat['statements'][i].copy() + else: + _statement = i.copy() + + for c in resolve_set_statements(_statement, stat): + _statement['selector']['conditions'] = c + if selector_index is None: + selector = _statement['selector'] + else: + selector = _statement['selector'][selector_index] + + conditions.append( + ( + selector['type'], + stat['database'].compile_selector(selector) + ) + ) + + # compile all selectors + # TODO: define list of possible object_types + # TODO: how to handle wildcard object type? + + # get list of types and make list of conditions of each type + types = [ t for t, cs in conditions if t != True ] + conditions = { + t: [ + cs + for t2, cs in conditions + if t == t2 + if cs != False + ] + for t in types + } + + # merge all conditions for each types together + conditions = { + t: stat['database'].merge_conditions(cs) + for t, cs in conditions.items() + } + + # remove False entries + conditions = { + t: cs + for t, cs in conditions.items() + if cs is not False + } + + return conditions + def compile_db_selects(id, stat): ret = '' @@ -92,47 +145,7 @@ def compile_db_selects(id, stat): filter = { 'min_scale': min_scale, 'max_scale': max_scale or 10E+10} current_selectors = filter_selectors(filter, stat) - conditions = [] - for i in current_selectors: - _statement = stat['statements'][i].copy() - for c in resolve_set_statements(stat['statements'][i], stat): - _statement['selector']['conditions'] = c - - conditions.append( - ( - _statement['selector']['type'], - stat['database'].compile_selector(_statement['selector']) - ) - ) - - # compile all selectors - # TODO: define list of possible object_types - # TODO: how to handle wildcard object type? - - # get list of types and make list of conditions of each type - types = [ t for t, cs in conditions if t != True ] - conditions = { - t: [ - cs - for t2, cs in conditions - if t == t2 - if cs != False - ] - for t in types - } - - # merge all conditions for each types together - conditions = { - t: stat['database'].merge_conditions(cs) - for t, cs in conditions.items() - } - - # remove False entries - conditions = { - t: cs - for t, cs in conditions.items() - if cs is not False - } + conditions = compile_selectors_db(current_selectors, None, stat) max_scale = min_scale From 87c0cc5a67bdd1e2c5e22f4c427b3f5f1cb96ddb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 14 Dec 2014 10:49:08 +0100 Subject: [PATCH 095/209] DB/osm2pgsql: bugfix objects_by_id(): don't include add. columns in tags --- pgmapcss/db/osm2pgsql/db_functions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index 5497be3c..ebef9f4e 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -288,7 +288,7 @@ def objects_by_id(id_list): t['tags'] = { k: r[k] for k in r - if k not in ['id', 'geo', 'types', 'tags'] + if k not in ['id', 'geo', 'types', 'tags', 'way', 'osm_id'] if r[k] is not None } # START db.has-hstore @@ -320,7 +320,7 @@ def objects_by_id(id_list): t['tags'] = { k: r[k] for k in r - if k not in ['id', 'geo', 'types', 'tags'] + if k not in ['osm_id', 'geo', 'types', 'tags', 'nodes', '_type', 'way'] if r[k] is not None } # START db.has-hstore From a0d1f0538d111eb6e75612fb268427a4f2b5ec21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 14 Dec 2014 11:07:44 +0100 Subject: [PATCH 096/209] DB/osm2pgsql: fix objects_member_of, correct config key --- pgmapcss/db/osm2pgsql/db_functions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index ebef9f4e..bedf4a55 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -398,7 +398,7 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition 'sequence_id': str(i) } } -# START db.columns +# START db.columns.way t['tags'] = { k: r[k] for k in r @@ -408,7 +408,7 @@ def objects_member_of(member_id, parent_type, parent_conditions, child_condition # START db.has-hstore t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore -# END db.columns +# END db.columns.way # START db.hstore-only t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only From 8e8ab96a066b145d2918e5d2602bf240939a41e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 15 Dec 2014 07:15:10 +0100 Subject: [PATCH 097/209] DB/*: make objects_* functions ready for bulk processing; follow new docu --- doc/database-API.md | 44 +++- pgmapcss/compiler/compile_function_match.py | 2 +- pgmapcss/compiler/compile_link_selector.py | 50 ++-- pgmapcss/compiler/compile_statement.py | 4 +- pgmapcss/db/osm2pgsql/db_functions.py | 225 +++++++++--------- pgmapcss/db/osmosis/db_functions.py | 229 ++++++++++--------- pgmapcss/db/overpass/db_functions.py | 239 +++++++++++--------- 7 files changed, 422 insertions(+), 371 deletions(-) diff --git a/doc/database-API.md b/doc/database-API.md index 82c287dd..fa1597b4 100644 --- a/doc/database-API.md +++ b/doc/database-API.md @@ -66,53 +66,79 @@ An object should look like this: The template file looks like this: ```python -# objects() yields all objects which match the query/queries in the current +# objects_bbox() yields all objects which match the query/queries in the current # bounding box. +# # Arguments: # bbox: a bounding box as WKT or None (ignore bounding box ; return all objects # in database) -# db_selects: a dict, with the object types and the compiled conditions from db.py, e.g.: `{ 'area': '("amenity"=\'bar\' and "name" is not null) or ("foo"=\'bar\')' }`. objects() need to match the object types to the respective openstreetmap objects, e.g. 'area' => closed ways and multipolygons. -def objects(bbox, db_selects): +# db_selects: a dict, with the object types and the compiled conditions from db.py, e.g.: `{ 'area': '("amenity"=\'bar\' and "name" is not null) or ("foo"=\'bar\')' }`. objects_bbox() need to match the object types to the respective openstreetmap objects, e.g. 'area' => closed ways and multipolygons. +# options: a dict, with additional settings (currently: none) +def objects_bbox(bbox, db_selects, options): pass # objects_by_id() yields the specified objects from the database # an id is a string with the object type identifier and the id, e.g. 'n1234' -def objects_by_id(id_list): +# options: a dict, with additional settings (currently: none) +def objects_by_id(id_list, options): pass # objects_member_of(). For each object in the `objects` list, return all parent # objects (which match the db_selects). +# +# Arguments: +# objects: a list of objects +# other_selects: a query/queries how to select parent objects (see db_selects on objects_bbox()) +# self_selects: a query/queries how to select child objects (this might be useful if you want to query all objects in the bounding box for caching) +# options: a dict, with additional settings (currently: none) +# +# Yields: # As yielded values, tuples are expected with: # ( child_object, parent_object, link_tags ) # # link_tags (dict) should contain: # * sequence_id: members are consecutively numbered, the child is the nth entry (counting from 0) # * role: role as specified in osm data (when the parent is a relation) -def objects_member_of(objects, db_selects): +def objects_member_of(objects, other_selects, self_selects, options): pass # objects_members(). For each object in the `objects` list, return all child # objects (which match the db_selects). -# As yielded values, tuples are expected with: +# +# Arguments: +# objects: a list of objects +# other_selects: a query/queries how to select child objects (this might be useful if you want to query all objects in the bounding box for caching) +# self_selects: a query/queries how to select parent objects (see db_selects on objects_bbox()) +# options: a dict, with additional settings (currently: none) +# +# Yields: +## As yielded values, tuples are expected with: # ( parent_object, child_object, link_tags ) # link_tags (dict) should contain: # * sequence_id: members are consecutively numbered, the child is the nth entry (counting from 0) # * role: role as specified in osm data (when the parent is a relation) -def objects_members(objects, db_selects): +def objects_members(objects, other_selects, self_selects, options): pass # objects_near(). For each object in the `objects` list, return all nearby objects (which match the db_selects). -# Argument options (dict): +# +# Arguments: +# objects: a list of objects +# other_selects: a query/queries how to select the other objects (see db_selects on objects_bbox()) +# self_selects: a query/queries how to select the objects from which we query (this might be useful if you want to query all objects in the bounding box for caching) +# options: a dict, with additional settings: # * distance: maximum distance in pixels # * check_geo: (optional) one of: # * 'within': if child object is within certain distance around parent # * 'surrounds': if parent object is within certain distance around child # * 'overlaps': if parent object and child object overlap (distance=0) +# +# Yields: # As yielded values, tuples are expected with: # ( parent_object, child_object, link_tags ) # link_tags (dict) should contain: # * distance: distance between objects in pixels -def objects_near(objects, db_selects, options): +def objects_near(objects, other_selects, self_selects, options): pass ``` diff --git a/pgmapcss/compiler/compile_function_match.py b/pgmapcss/compiler/compile_function_match.py index 93957559..495fbfd4 100644 --- a/pgmapcss/compiler/compile_function_match.py +++ b/pgmapcss/compiler/compile_function_match.py @@ -126,7 +126,7 @@ def compile_function_match(stat): '''.format(**replacement) - func = "objects(render_context.get('bbox'), db_selects)" + func = "objects_bbox(render_context.get('bbox'), db_selects, {})" if stat['config'].get('debug.profiler', False): ret += "time_qry_start = datetime.datetime.now() # profiling\n" ret += "src = list(" + func + ")\n" diff --git a/pgmapcss/compiler/compile_link_selector.py b/pgmapcss/compiler/compile_link_selector.py index 8c3845b4..60f29991 100644 --- a/pgmapcss/compiler/compile_link_selector.py +++ b/pgmapcss/compiler/compile_link_selector.py @@ -1,29 +1,25 @@ from .compile_selector_part import compile_selector_part from .compile_conditions import compile_conditions +from .compile_db_selects import compile_selectors_db from .compile_eval import compile_eval import pgmapcss.db as db def compile_link_selector(statement, stat): # create statement where selector is build from parent_selector for compiling - parent_conditions = stat['database'].merge_conditions([ - stat['database'].compile_selector(statement['selector']['parent']) - ]) - - child_conditions = stat['database'].merge_conditions([ - stat['database'].compile_selector(statement['selector']) - ]) + other_selects = compile_selectors_db([statement], 'parent', stat) + self_selects = compile_selectors_db([statement], None, stat) if statement['selector']['link']['type'] in ('>', ''): - return "objects_member_of(object['id'], " +\ - repr(statement['selector']['parent']['type']) + ", " +\ - repr(parent_conditions) + ", " +\ - repr(child_conditions) + ")" + return "objects_member_of([object], " +\ + repr(other_selects) + ", " +\ + repr(self_selects) + ", " +\ + repr({}) + ")" elif statement['selector']['link']['type'] == '<': - return "objects_members(object['id'], " +\ - repr(statement['selector']['parent']['type']) + ", " +\ - repr(parent_conditions) + ", " +\ - repr(child_conditions) + ")" + return "objects_members([object], " +\ + repr(other_selects) + ", " +\ + repr(self_selects) + ", " +\ + repr({}) + ")" elif statement['selector']['link']['type'] == 'near': distance = { 'value': '100' } @@ -38,19 +34,23 @@ def compile_link_selector(statement, stat): 'id': statement['id'] }, stat) else: - distance = repr(distance['value']) + distance = distance['value'] - return "objects_near(" + distance + ", None, "+\ - repr(statement['selector']['parent']['type']) + ", " +\ - repr(parent_conditions) + ", " +\ - repr(child_conditions) + ")" + return "objects_near([object], " +\ + repr(other_selects) + ", " +\ + repr(self_selects) + ", " +\ + repr({ + 'distance': distance + }) + ")" elif statement['selector']['link']['type'] in ('within', 'surrounds', 'overlaps'): - return "objects_near(\"0\", None, "+\ - repr(statement['selector']['parent']['type']) + ", " +\ - repr(parent_conditions) + ", " +\ - repr(child_conditions) + ", check_geo=" +\ - repr(statement['selector']['link']['type']) + ")" + return "objects_near([object], " +\ + repr(other_selects) + ", " +\ + repr(self_selects) + ", " +\ + repr({ + 'distance': 0, + 'check_geo': statement['selector']['link']['type'], + }) + ")" else: raise Exception('Unknown link selector "{type}"'.format(**selector['selector']['link'])) diff --git a/pgmapcss/compiler/compile_statement.py b/pgmapcss/compiler/compile_statement.py index c820a215..0f263104 100644 --- a/pgmapcss/compiler/compile_statement.py +++ b/pgmapcss/compiler/compile_statement.py @@ -55,11 +55,11 @@ def compile_statement(statement, stat, indent=''): ]) + '\n' if 'link' in statement['selector']: - ret['body'] += indent + 'for parent_index, parent_object in enumerate(' + compile_link_selector(statement, stat) + '):\n' + ret['body'] += indent + 'for parent_index, (o, parent_object, link_tags) in enumerate(' + compile_link_selector(statement, stat) + '):\n' indent += ' ' ret['body'] += indent + "current['parent_object'] = parent_object\n" - ret['body'] += indent + "current['link_object'] = { 'tags': parent_object['link_tags'] }\n" + ret['body'] += indent + "current['link_object'] = { 'tags': link_tags }\n" ret['body'] += indent + "current['link_object']['tags']['index'] = str(parent_index)\n" ret['body'] += indent + 'if (' +\ and_join(compile_conditions(statement['selector']['parent']['conditions'], stat, "current['parent_object']['tags']")) +\ diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index bedf4a55..ca1a8560 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -1,5 +1,5 @@ # Use this functions only with a database based on an import with osm2pgsql -def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): +def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], add_param_value=[]): import pghstore qry = '' @@ -26,8 +26,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # planet_osm_point w = [] for t in ('*', 'node', 'point'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): qry = ''' @@ -76,8 +76,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # planet_osm_line - ways w = [] for t in ('*', 'line', 'way'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): qry = ''' @@ -126,8 +126,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # planet_osm_line - relations w = [] for t in ('*', 'line', 'relation'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): qry = ''' @@ -176,8 +176,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # planet_osm_polygon - ways w = [] for t in ('*', 'area', 'way'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): qry = ''' @@ -226,8 +226,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # planet_osm_polygon - relations w = [] for t in ('*', 'area', 'relation'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): qry = ''' @@ -273,7 +273,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v yield(t) -def objects_by_id(id_list): +def objects_by_id(id_list, options): _id_list = [ int(i[1:]) for i in id_list if i[0] == 'n' ] plan = plpy.prepare('select * from planet_osm_point where osm_id=any($1)', ['bigint[]']); res = plpy.cursor(plan, [_id_list]) @@ -365,124 +365,127 @@ def flatarray_to_members(arr): return ret -def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): - if parent_type == 'relation': +def objects_member_of(objects, other_selects, self_selects, options): + if 'relation' in other_selects: plan = plpy.prepare('select * from planet_osm_rels where members @> Array[$1]', ['text']); - res = plpy.cursor(plan, [member_id]) - for r in res: - for member in flatarray_to_members(r['members']): - if member['member_id'] == member_id: - t = { - 'id': 'r' + str(r['id']), - 'tags': flatarray_to_tags(r['tags']) if r['tags'] else {}, - 'types': ['relation'], - 'geo': None, - 'link_tags': member - } - t['tags']['osm:id'] = t['id'] - yield(t) - - if parent_type == 'way': - num_id = int(member_id[1:]) + for o in objects: + member_id = o['id'] + + res = plpy.cursor(plan, [member_id]) + for r in res: + for member in flatarray_to_members(r['members']): + if member['member_id'] == member_id: + t = { + 'id': 'r' + str(r['id']), + 'tags': flatarray_to_tags(r['tags']) if r['tags'] else {}, + 'types': ['relation'], + 'geo': None, + } + t['tags']['osm:id'] = t['id'] + yield (o, t, member) + + if 'way' in other_selects: plan = plpy.prepare('select id, nodes, planet_osm_line.tags, way as geo from planet_osm_ways left join planet_osm_line on planet_osm_ways.id=planet_osm_line.osm_id where nodes::bigint[] @> Array[$1]', ['bigint']); - res = plpy.cursor(plan, [num_id]) - for r in res: - for i, member in enumerate(r['nodes']): - if member == num_id: - t = { - 'id': 'w' + str(r['id']), - 'types': ['way'], - 'geo': r['geo'], - 'link_tags': { + for o in objects: + member_id = o['id'] + num_id = int(member_id[1:]) + + res = plpy.cursor(plan, [num_id]) + for r in res: + for i, member in enumerate(r['nodes']): + if member == num_id: + t = { + 'id': 'w' + str(r['id']), + 'types': ['way'], + 'geo': r['geo'], + } + + link_tags = { 'member_id': member_id, 'sequence_id': str(i) } - } # START db.columns.way - t['tags'] = { - k: r[k] - for k in r - if k not in ['id', 'geo', 'types', 'tags'] - if r[k] is not None - } + t['tags'] = { + k: r[k] + for k in r + if k not in ['id', 'geo', 'types', 'tags', 'nodes'] + if r[k] is not None + } # START db.has-hstore - t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) + t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only - t['tags'] = pghstore.loads(r['tags']) + t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only - t['tags']['osm:id'] = t['id'] - yield(t) - -def objects_members(relation_id, parent_type, parent_conditions, child_conditions): - ob = list(objects_by_id([relation_id])) + t['tags']['osm:id'] = t['id'] + yield(o, t, link_tags) - if not len(ob): - return +def objects_members(objects, other_selects, self_selects, options): + for _ob in objects: + # relation don't get 'members' from objects_bbox(), therefore reload object + ob = list(objects_by_id([ _ob['id'] ], {})) - ob = ob[0] + if not len(ob): + continue - link_obs_ids = [ i['member_id'] for i in ob['members'] ] - link_obs = {} - for o in objects_by_id(link_obs_ids): - link_obs[o['id']] = o + ob = ob[0] - for member in ob['members']: - if not member['member_id'] in link_obs: - continue + link_obs_ids = [ i['member_id'] for i in ob['members'] ] + link_obs = {} + for o in objects_by_id(link_obs_ids, {}): + link_obs[o['id']] = o - ret = link_obs[member['member_id']] + for member in ob['members']: + if not member['member_id'] in link_obs: + continue - if parent_type not in ret['types']: - continue + ret = link_obs[member['member_id']] - ret['link_tags'] = member - yield ret + if len(other_selects.keys() - ret['types']): + yield (_ob, ret, member ) -def objects_near(max_distance, ob, parent_selector, where_clause, child_conditions, check_geo=None): - if ob: +def objects_near(objects, other_selects, self_selects, options): + # TODO: how to check properties of object (e.g. when geometry has been modified) + for ob in objects: geom = ob['geo'] - elif 'geo' in current['properties'][current['pseudo_element']]: - geom = current['properties'][current['pseudo_element']]['geo'] - else: - geom = current['object']['geo'] - if where_clause == '': - where_clause = 'true' - - max_distance = to_float(eval_metric([ max_distance, 'u' ])) - if max_distance is None: - return [] - elif max_distance == 0: - bbox = geom - else: - plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) - res = plpy.execute(plan, [ geom, max_distance ]) - bbox = res[0]['r'] - - if check_geo == 'within': - where_clause += " and ST_DWithin(way, $2, 0.0)" - elif check_geo == 'surrounds': - where_clause += " and ST_DWithin($2, way, 0.0)" - elif check_geo == 'overlaps': - where_clause += " and ST_Overlaps($2, way)" - - obs = [] - for ob in objects( - bbox, - { parent_selector: where_clause }, - { # add_columns - '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(way, {unit.srs}))' - }, - [ 'geometry' ], - [ geom ] - ): - if ob['id'] != current['object']['id'] and ob['__distance'] <= max_distance: - ob['link_tags'] = { - 'distance': eval_metric([ str(ob['__distance']) + 'u', 'px' ]) - } - obs.append(ob) + max_distance = to_float(eval_metric([ options['distance'], 'u' ])) + if max_distance is None: + return + elif max_distance == 0: + bbox = geom + else: + plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) + res = plpy.execute(plan, [ geom, max_distance ]) + bbox = res[0]['r'] + + if not 'check_geo' in options: + pass + elif options['check_geo'] == 'within': + where_clause += " and ST_DWithin(way, $2, 0.0)" + elif options['check_geo'] == 'surrounds': + where_clause += " and ST_DWithin($2, way, 0.0)" + elif options['check_geo'] == 'overlaps': + where_clause += " and ST_Overlaps($2, way)" + + obs = [] + for o in objects_bbox( + bbox, + other_selects, + options, + { # add_columns + '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(way, {unit.srs}))' + }, + [ 'geometry' ], + [ geom ] + ): + if o['id'] != ob['id'] and o['__distance'] <= max_distance: + link_tags = { + 'distance': eval_metric([ str(o['__distance']) + 'u', 'px' ]) + } + obs.append((o, link_tags)) - obs = sorted(obs, key=lambda ob: ob['__distance'] ) - return obs + obs = sorted(obs, key=lambda o: o[0]['__distance'] ) + for o in obs: + yield((ob, o[0], o[1])) diff --git a/pgmapcss/db/osmosis/db_functions.py b/pgmapcss/db/osmosis/db_functions.py index d83b7f04..2b04e9d0 100644 --- a/pgmapcss/db/osmosis/db_functions.py +++ b/pgmapcss/db/osmosis/db_functions.py @@ -1,5 +1,5 @@ # Use this functions only with a database based on an import with osmosis -def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): +def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], add_param_value=[]): import pghstore time_start = datetime.datetime.now() # profiling @@ -23,8 +23,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # nodes w = [] for t in ('*', 'node', 'point'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): bbox = '' @@ -56,8 +56,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # ways w = [] for t in ('*', 'line', 'area', 'way'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): bbox = '' @@ -104,8 +104,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # multipolygons w = [] for t in ('*', 'relation', 'area'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): bbox = '' @@ -145,8 +145,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # relations - (no bbox match!) w = [] for t in ('*', 'relation'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): qry = ''' @@ -174,7 +174,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v time_stop = datetime.datetime.now() # profiling plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) -def objects_by_id(id_list): +def objects_by_id(id_list, options): _id_list = [ int(i[1:]) for i in id_list if i[0] == 'n' ] plan = plpy.prepare('select id, tags, geom from nodes where id=any($1)', ['bigint[]']); res = plpy.cursor(plan, [_id_list]) @@ -236,121 +236,128 @@ def objects_by_id(id_list): t['tags']['osm:changeset'] = str(r['changeset_id']) yield(t) -def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): - if parent_type == 'relation': +def objects_member_of(objects, other_selects, self_selects, options): + if 'relation' in other_selects: plan = plpy.prepare('select *, (select name from users where id=user_id) as user from relation_members join relations on relation_members.relation_id=relations.id where member_id=$1 and member_type=$2', ['bigint', 'text']); - res = plpy.cursor(plan, [int(member_id[1:]), member_id[0:1].upper()]) - for r in res: - t = { - 'id': 'r' + str(r['id']), - 'tags': pghstore.loads(r['tags']), - 'types': ['relation'], - 'geo': None, - 'link_tags': { + for ob in objects: + member_id = ob['id'] + + res = plpy.cursor(plan, [int(member_id[1:]), member_id[0:1].upper()]) + for r in res: + t = { + 'id': 'r' + str(r['id']), + 'tags': pghstore.loads(r['tags']), + 'types': ['relation'], + 'geo': None, + } + link_tags = { 'sequence_id': str(r['sequence_id']), 'role': str(r['member_role']), 'member_id': r['member_type'].lower() + str(r['member_id']), } - } - t['tags']['osm:id'] = str(t['id']) - t['tags']['osm:version'] = str(r['version']) - t['tags']['osm:user_id'] = str(r['user_id']) - t['tags']['osm:user'] = r['user'] - t['tags']['osm:timestamp'] = str(r['tstamp']) - t['tags']['osm:changeset'] = str(r['changeset_id']) - yield(t) - - if parent_type == 'way' and member_id[0] == 'n': - num_id = int(member_id[1:]) + t['tags']['osm:id'] = str(t['id']) + t['tags']['osm:version'] = str(r['version']) + t['tags']['osm:user_id'] = str(r['user_id']) + t['tags']['osm:user'] = r['user'] + t['tags']['osm:timestamp'] = str(r['tstamp']) + t['tags']['osm:changeset'] = str(r['changeset_id']) + yield((ob, t, link_tags)) + + if 'way' in other_selects: plan = plpy.prepare('select *, (select name from users where id=user_id) as user from way_nodes join ways on way_nodes.way_id=ways.id where node_id=$1', ['bigint']); - res = plpy.cursor(plan, [num_id]) - for r in res: - t = { - 'id': 'w' + str(r['id']), - 'tags': pghstore.loads(r['tags']), - 'types': ['way'], - 'geo': r['linestring'], - 'link_tags': { + for o in objects: + member_id = o['id'] + + if member_id[0] != 'n': + continue + + num_id = int(member_id[1:]) + res = plpy.cursor(plan, [num_id]) + for r in res: + t = { + 'id': 'w' + str(r['id']), + 'tags': pghstore.loads(r['tags']), + 'types': ['way'], + 'geo': r['linestring'], + } + link_tags = { 'member_id': member_id, 'sequence_id': str(r['sequence_id']) } - } - t['tags']['osm:id'] = str(t['id']) - t['tags']['osm:version'] = str(r['version']) - t['tags']['osm:user_id'] = str(r['user_id']) - t['tags']['osm:user'] = r['user'] - t['tags']['osm:timestamp'] = str(r['tstamp']) - t['tags']['osm:changeset'] = str(r['changeset_id']) - yield(t) - -def objects_members(relation_id, parent_type, parent_conditions, child_conditions): - ob = list(objects_by_id([relation_id])) - - if not len(ob): - return - - ob = ob[0] - - link_obs_ids = [ i['member_id'] for i in ob['members'] ] - link_obs = {} - for o in objects_by_id(link_obs_ids): - link_obs[o['id']] = o - - for member in ob['members']: - if not member['member_id'] in link_obs: + t['tags']['osm:id'] = str(t['id']) + t['tags']['osm:version'] = str(r['version']) + t['tags']['osm:user_id'] = str(r['user_id']) + t['tags']['osm:user'] = r['user'] + t['tags']['osm:timestamp'] = str(r['tstamp']) + t['tags']['osm:changeset'] = str(r['changeset_id']) + yield((ob, t, link_tags)) + +def objects_members(objects, other_selects, self_selects, options): + for _ob in objects: + # relation don't get 'members' from objects_bbox(), therefore reload object + ob = list(objects_by_id([ _ob['id'] ], {})) + + if not len(ob): continue - ret = link_obs[member['member_id']] + ob = ob[0] - if parent_type not in ret['types']: - continue + link_obs_ids = [ i['member_id'] for i in ob['members'] ] + link_obs = {} + for o in objects_by_id(link_obs_ids, {}): + link_obs[o['id']] = o + + for member in ob['members']: + if not member['member_id'] in link_obs: + continue - ret['link_tags'] = member - yield ret + ret = link_obs[member['member_id']] -def objects_near(max_distance, ob, parent_selector, where_clause, child_conditions, check_geo=None): - if ob: + if len(other_selects.keys() - ret['types']): + continue + + yield (( _ob, ret, member )) + +def objects_near(objects, other_selects, self_selects, options): + for ob in objects: geom = ob['geo'] - elif 'geo' in current['properties'][current['pseudo_element']]: - geom = current['properties'][current['pseudo_element']]['geo'] - else: - geom = current['object']['geo'] - if where_clause == '': - where_clause = 'true' + max_distance = to_float(eval_metric([ options['distance'], 'u' ])) + if max_distance is None: + return + elif max_distance == 0: + bbox = geom + else: + plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) + res = plpy.execute(plan, [ geom, max_distance ]) + bbox = res[0]['r'] + + if not 'check_geo' in options: + pass + elif options['check_geo'] == 'within': + where_clause += " and ST_DWithin(way, $2, 0.0)" + elif options['check_geo'] == 'surrounds': + where_clause += " and ST_DWithin($2, way, 0.0)" + elif options['check_geo'] == 'overlaps': + where_clause += " and ST_Overlaps($2, way)" + + obs = [] + for o in objects_bbox( + bbox, + other_selects, + {}, + { # add_columns + '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(__geo__, {unit.srs}))' + }, + [ 'geometry' ], + [ geom ] + ): + if o['id'] != ob['id'] and o['__distance'] <= max_distance: + link_tags = { + 'distance': eval_metric([ str(o['__distance']) + 'u', 'px' ]) + } + obs.append((o, link_tags)) - max_distance = to_float(eval_metric([ max_distance, 'u' ])) - if max_distance is None: - return [] - elif max_distance == 0: - bbox = geom - else: - plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) - res = plpy.execute(plan, [ geom, max_distance ]) - bbox = res[0]['r'] - - if check_geo == 'within': - where_clause += " and ST_DWithin(way, $2, 0.0)" - elif check_geo == 'surrounds': - where_clause += " and ST_DWithin($2, way, 0.0)" - elif check_geo == 'overlaps': - where_clause += " and ST_Overlaps($2, way)" - - obs = [] - for ob in objects( - bbox, - { parent_selector: where_clause }, - { # add_columns - '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(__geo__, {unit.srs}))' - }, - [ 'geometry' ], - [ geom ] - ): - if ob['id'] != current['object']['id'] and ob['__distance'] <= max_distance: - ob['link_tags'] = { - 'distance': eval_metric([ str(ob['__distance']) + 'u', 'px' ]) - } - obs.append(ob) - - obs = sorted(obs, key=lambda ob: ob['__distance'] ) - return obs + obs = sorted(obs, key=lambda o: o[0]['__distance'] ) + for o in obs: + yield((ob, o[0], o[1])) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 57742292..875a298e 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -244,7 +244,7 @@ def get_bbox(_bbox=None): res = plpy.execute(plan, [ _bbox ]) return '[bbox:' + res[0]['bbox_string'] + ']' -def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_value=[]): +def objects_bbox(_bbox, db_selects, options): time_start = datetime.datetime.now() # profiling non_relevant_tags = {'type', 'source', 'source:ref', 'source_ref', 'note', 'comment', 'created_by', 'converted_by', 'fixme', 'FIXME', 'description', 'attribution', 'osm:id', 'osm:version', 'osm:user_id', 'osm:user', 'osm:timestamp', 'osm:changeset'} ways_done = [] @@ -261,8 +261,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # nodes w = [] for t in ('*', 'node', 'point'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') @@ -276,8 +276,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # way areas and multipolygons based on outer tags w = [] for t in ('*', 'area'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): # query for ways which match query, also get their parent relations and @@ -355,8 +355,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v ]: w = [] for t in types['types']: - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') @@ -383,8 +383,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # relations w = [] for t, type_condition in {'*': '', 'relation': '', 'area': "[type~'^multipolygon|boundary$']"}.items(): - if t in where_clauses: - w.append(where_clauses[t].replace('__TYPE__', 'relation' + type_condition)) + if t in db_selects: + w.append(db_selects[t].replace('__TYPE__', 'relation' + type_condition)) if len(w): q = qry.replace('__QRY__', '((' + ');('.join(w) + ');)') @@ -399,8 +399,8 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v # areas w = [] for t in ('*', 'area'): - if t in where_clauses: - w.append(where_clauses[t]) + if t in db_selects: + w.append(db_selects[t]) if len(w): plan = plpy.prepare("select ST_Y(ST_Centroid($1::geometry)) || ',' || ST_X(ST_Centroid($1::geometry)) as geom", [ 'geometry' ]) @@ -421,7 +421,7 @@ def objects(_bbox, where_clauses, add_columns={}, add_param_type=[], add_param_v time_stop = datetime.datetime.now() # profiling plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) -def objects_by_id(id_list): +def objects_by_id(id_list, options): q = '' multipolygons = [] for i in id_list: @@ -439,52 +439,59 @@ def objects_by_id(id_list): for r in overpass_query(q): yield(assemble_object(r)) -def objects_member_of(member_id, parent_type, parent_conditions, child_conditions): +def objects_member_of(objects, other_selects, self_selects, options): global member_of_cache try: member_of_cache except: member_of_cache = {} - if member_id[0] == 'n': - ob_type = 'node' - ob_id = int(member_id[1:]) - elif member_id[0] == 'w': - ob_type = 'way' - ob_id = int(member_id[1:]) - elif member_id[0] == 'r': - ob_type = 'relation' - ob_id = int(member_id[1:]) + for ob in objects: + if ob['id'][0] == 'n': + ob_type = 'node' + ob_id = int(ob['id'][1:]) + elif ob['id'][0] == 'w': + ob_type = 'way' + ob_id = int(ob['id'][1:]) + elif ob['id'][0] == 'r': + ob_type = 'relation' + ob_id = int(ob['id'][1:]) - member_of_cache_id = parent_type + '|' + ob_type + '|' + repr(parent_conditions) + '|' + repr(child_conditions) + member_of_cache_id = ob_type + '|' + repr(other_selects) + '|' + repr(self_selects) - if member_of_cache_id not in member_of_cache: - member_of_cache[member_of_cache_id] = [] - q = '[out:json]' + get_bbox() + ';' + if member_of_cache_id not in member_of_cache: + member_of_cache[member_of_cache_id] = [] + q = '[out:json]' + get_bbox() + ';' - q += '(' + child_conditions.replace('__TYPE__', ob_type) + ')->.a;' + q += '(' + ''.join([ + ss.replace('__TYPE__', ob_type) + for si, ss in self_selects.items() + ]) + ')->.a;' - q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(b' + - ob_type[0] + '.a)') + ');' - q += 'out meta qt geom;' + q += '(' + ''.join([ + ss.replace('__TYPE__', si + '(b' + ob_type[0] + '.a)') + for si, ss in other_selects.items() + ]) + ');' - for r in overpass_query(q): - t = assemble_object(r) - member_of_cache[member_of_cache_id].append(t) + q += 'out meta qt geom;' - for t in member_of_cache[member_of_cache_id]: - for m in t['members']: - if m['member_id'] == member_id: - t['link_tags'] = { - 'sequence_id': m['sequence_id'], - 'member_id': m['member_id'], - } - if 'role' in m: - t['link_tags']['role'] = m['role'] + for r in overpass_query(q): + t = assemble_object(r) + member_of_cache[member_of_cache_id].append(t) - yield(t) + for t in member_of_cache[member_of_cache_id]: + for m in t['members']: + if m['member_id'] == ob['id']: + link_tags = { + 'sequence_id': m['sequence_id'], + 'member_id': m['member_id'], + } + if 'role' in m: + link_tags['role'] = m['role'] -def objects_members(relation_id, parent_type, parent_conditions, child_conditions): + yield((ob, t, link_tags)) + +def objects_members(objects, other_selects, self_selects, options): global members_cache try: members_cache @@ -493,61 +500,71 @@ def objects_members(relation_id, parent_type, parent_conditions, child_condition q = '[out:json];' - if relation_id[0] == 'n': - ob_type = 'node' - ob_id = int(relation_id[1:]) - elif relation_id[0] == 'w': - ob_type = 'way' - ob_id = int(relation_id[1:]) - elif relation_id[0] == 'r': - ob_type = 'relation' - ob_id = int(relation_id[1:]) + for ob in objects: + if ob['id'][0] == 'n': + ob_type = 'node' + ob_id = int(ob['id'][1:]) + elif ob['id'][0] == 'w': + ob_type = 'way' + ob_id = int(ob['id'][1:]) + elif ob['id'][0] == 'r': + ob_type = 'relation' + ob_id = int(ob['id'][1:]) + + members_cache_id = ob_type + '|' + repr(other_selects) + '|' + repr(self_selects) + + if members_cache_id not in members_cache: + members_cache[members_cache_id] = { 'self': {}, 'other': [] } + q = '[out:json]' + get_bbox() + ';' + q += '(' + ''.join([ + ss.replace('__TYPE__', ob_type) + for si, ss in self_selects.items() + ]) + ')->.a;' + q += 'out meta qt geom;' + # TODO: out body qt; would be sufficient, but need to adapt assemble_object - members_cache_id = parent_type + '|' + ob_type + '|' + repr(parent_conditions) + '|' + repr(child_conditions) + for r in overpass_query(q): + t = assemble_object(r) + t['type'] = r['type'] + members_cache[members_cache_id]['self'][t['id']] = t - if members_cache_id not in members_cache: - members_cache[members_cache_id] = { 'parents': {}, 'children': [] } - q = '[out:json]' + get_bbox() + ';' + q = '[out:json]' + get_bbox() + ';' - q += '(' + child_conditions.replace('__TYPE__', ob_type) + ');' - q += 'out meta qt geom;' - # TODO: out body qt; would be sufficient, but need to adapt assemble_object + q += '(' + ''.join([ + ss.replace('__TYPE__', ob_type) + for si, ss in self_selects.items() + ]) + ')->.a;' - for r in overpass_query(q): - t = assemble_object(r) - t['type'] = r['type'] - members_cache[members_cache_id]['parents'][t['id']] = t + q += '(' + ''.join([ + ss.replace('__TYPE__', si + '(' + ob_type[0] + '.a)') + for si, ss in other_selects.items() + ]) + ')->.a;' - q = '[out:json]' + get_bbox() + ';' + q += 'out meta qt geom;' + # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object - q += '(' + child_conditions.replace('__TYPE__', ob_type) + ')->.a;' - q += '(' + parent_conditions.replace('__TYPE__', parent_type + '(' + - relation_id[0] + '.a)') + ');' - q += 'out meta qt geom;' - # TODO: .a out body qt; would be sufficient, but need to adapt assemble_object - - for r in overpass_query(q): - t = assemble_object(r) - members_cache[members_cache_id]['children'].append(t) + for r in overpass_query(q): + t = assemble_object(r) + members_cache[members_cache_id]['other'].append(t) - relation = members_cache[members_cache_id]['parents'][relation_id] + relation = members_cache[members_cache_id]['self'][ob['id']] - for t in members_cache[members_cache_id]['children']: - for m in relation['members']: - if m['member_id'] == t['id']: - t['link_tags'] = { + for t in members_cache[members_cache_id]['other']: + for m in relation['members']: + if m['member_id'] == t['id']: + link_tags = { 'sequence_id': m['sequence_id'], 'member_id': m['member_id'], - } - if 'role' in m: - t['link_tags']['role'] = m['role'] + } + if 'role' in m: + link_tags['role'] = m['role'] - yield(t) + yield((ob, t, link_tags)) -def objects_near(max_distance, ob, parent_type, parent_conditions, child_conditions, check_geo=None): - cache_id = 'objects_near' + '|' + parent_type + '|' + repr(parent_conditions) +def objects_near(objects, other_selects, self_selects, options): + cache_id = 'objects_near' + '|' + repr(other_selects) + '|' + repr(self_selects) + '|' + repr(options) - max_distance = to_float(eval_metric([ max_distance, 'u' ])) + max_distance = to_float(eval_metric([ options['distance'], 'u' ])) if max_distance is None: return @@ -560,37 +577,35 @@ def objects_near(max_distance, ob, parent_type, parent_conditions, child_conditi res = plpy.execute(plan, [ render_context['bbox'], max_distance ]) bbox = res[0]['r'] - for t in objects(bbox, { parent_type: parent_conditions }): + for t in objects_bbox(bbox, other_selects, options): cache.add(t) - if ob: - geom = ob['geo'] - elif 'geo' in current['properties'][current['pseudo_element']]: - geom = current['properties'][current['pseudo_element']]['geo'] - else: - geom = current['object']['geo'] - - if max_distance == 0: - bbox = geom - else: - plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) - res = plpy.execute(plan, [ geom, max_distance ]) - bbox = res[0]['r'] - - if check_geo == 'within': + if not 'check_geo' in options: + pass + elif options['check_geo'] == 'within': where_clause += " and ST_DWithin(geo, $1, 0.0)" - elif check_geo == 'surrounds': + elif options['check_geo'] == 'surrounds': where_clause += " and ST_DWithin($1, geo, 0.0)" - elif check_geo == 'overlaps': + elif options['check_geo'] == 'overlaps': where_clause += " and ST_Overlaps($1, geo)" - plan = cache.prepare('select * from (select *, ST_Distance(ST_Transform($1, {unit.srs}), ST_Transform(geo, {unit.srs})) dist from {table} where geo && $2 offset 0) t order by dist asc', [ 'geometry', 'geometry' ]) - for t in cache.cursor(plan, [ geom, bbox ]): - ob = t['data'] + for ob in objects: + geom = ob['geo'] + + if max_distance == 0: + bbox = geom + else: + plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2), {db.srs}) as r', ['geometry', 'float']) + res = plpy.execute(plan, [ geom, max_distance ]) + bbox = res[0]['r'] + + plan = cache.prepare('select * from (select *, ST_Distance(ST_Transform($1, {unit.srs}), ST_Transform(geo, {unit.srs})) dist from {table} where geo && $2 offset 0) t order by dist asc', [ 'geometry', 'geometry' ]) + for t in cache.cursor(plan, [ geom, bbox ]): + o = t['data'] - if ob['id'] != current['object']['id'] and t['dist'] <= max_distance: - ob['link_tags'] = { - 'distance': eval_metric([ str(t['dist']) + 'u', 'px' ]) - } + if o['id'] != ob['id'] and t['dist'] <= max_distance: + link_tags = { + 'distance': eval_metric([ str(t['dist']) + 'u', 'px' ]) + } - yield ob + yield (ob, o, link_tags) From 30f09cd4cb2e65558f78ff4838d60229064265c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 15 Dec 2014 11:14:16 +0100 Subject: [PATCH 098/209] Compile DB Selects: bugfix, create selector clones via deepcopy --- pgmapcss/compiler/compile_db_selects.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index ecaad088..0aac0a35 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -1,3 +1,5 @@ +import copy + # takes a list of conditions as input and returns several condition combinations def resolve_set_statements(statement, stat): ret = [ [] ] @@ -86,9 +88,9 @@ def compile_selectors_db(statements, selector_index, stat): conditions = [] for i in statements: if type(i) == int: - _statement = stat['statements'][i].copy() + _statement = copy.deepcopy(stat['statements'][i]) else: - _statement = i.copy() + _statement = copy.deepcopy(i) for c in resolve_set_statements(_statement, stat): _statement['selector']['conditions'] = c From 3d49f1869ed1f693024c3740575084bd7cd55401 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 15 Dec 2014 10:42:55 +0100 Subject: [PATCH 099/209] Compile DB Selects: when resolving statements, prohibit duplicate statements --- pgmapcss/compiler/compile_db_selects.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 0aac0a35..bff94d2c 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -1,8 +1,11 @@ import copy # takes a list of conditions as input and returns several condition combinations -def resolve_set_statements(statement, stat): +def resolve_set_statements(statement, done, stat): ret = [ [] ] + if statement['id'] in done: + return [ [] ] + done.append(statement['id']) # iterate over all conditions in the statement for condition in statement['selector']['conditions']: @@ -19,7 +22,7 @@ def resolve_set_statements(statement, stat): # recurse into resolve_set_statements, to also resolve conditions in # the statements where set statements happened set_statements = [ - resolve_set_statements(s, stat) + resolve_set_statements(s, done, stat) for s in set_statements ] @@ -92,7 +95,7 @@ def compile_selectors_db(statements, selector_index, stat): else: _statement = copy.deepcopy(i) - for c in resolve_set_statements(_statement, stat): + for c in resolve_set_statements(_statement, [], stat): _statement['selector']['conditions'] = c if selector_index is None: selector = _statement['selector'] From 2d7123b7f9fa607eeed572a9a24ebf679bcd1038 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Fri, 19 Dec 2014 09:52:04 +0100 Subject: [PATCH 100/209] Eval functions: catch errors from postgis functions - when error occurs, print warning and return none --- pgmapcss/eval/eval_area.py | 11 ++++++++-- pgmapcss/eval/eval_azimuth.py | 21 +++++++++++++------- pgmapcss/eval/eval_buffer.py | 8 ++++++-- pgmapcss/eval/eval_centroid.py | 8 ++++++-- pgmapcss/eval/eval_convex_hull.py | 8 ++++++-- pgmapcss/eval/eval_intersection.py | 8 ++++++-- pgmapcss/eval/eval_is_closed.py | 8 ++++++-- pgmapcss/eval/eval_is_left_hand_traffic.py | 8 ++++++-- pgmapcss/eval/eval_is_right_hand_traffic.py | 8 ++++++-- pgmapcss/eval/eval_line.py | 6 +++++- pgmapcss/eval/eval_line_interpolate_point.py | 8 ++++++-- pgmapcss/eval/eval_line_length.py | 10 +++++++--- pgmapcss/eval/eval_line_locate_azimuth.py | 8 ++++++-- pgmapcss/eval/eval_line_locate_point.py | 8 ++++++-- pgmapcss/eval/eval_line_merge.py | 16 +++++++++------ pgmapcss/eval/eval_line_part.py | 8 ++++++-- pgmapcss/eval/eval_rotate.py | 8 ++++++-- pgmapcss/eval/eval_translate.py | 8 ++++++-- 18 files changed, 123 insertions(+), 45 deletions(-) diff --git a/pgmapcss/eval/eval_area.py b/pgmapcss/eval/eval_area.py index 7c88a481..408cd469 100644 --- a/pgmapcss/eval/eval_area.py +++ b/pgmapcss/eval/eval_area.py @@ -5,14 +5,21 @@ def eval_area(param): if len(param) == 0: return '' - plan = plpy.prepare('select ST_Area(ST_Transform($1, 900913)) as area', ['geometry']) - res = plpy.execute(plan, param) + try: + plan = plpy.prepare('select ST_Area(ST_Transform($1, 900913)) as area', ['geometry']) + res = plpy.execute(plan, param) + except Exception as err: + plpy.warning('{} | Eval::area({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' zoom = eval_metric(['1u']) if zoom == '': return '' + if res[0]['area'] is None: + return '' + ret = res[0]['area'] * float(zoom) ** 2 return float_to_str(ret) diff --git a/pgmapcss/eval/eval_azimuth.py b/pgmapcss/eval/eval_azimuth.py index ac0e1179..879ac978 100644 --- a/pgmapcss/eval/eval_azimuth.py +++ b/pgmapcss/eval/eval_azimuth.py @@ -2,16 +2,23 @@ class config_eval_azimuth(config_base): mutable = 2 def eval_azimuth(param): - if len(param) < 2: - return '' + if len(param) < 2: + return '' - if param[0] is None or param[0] == '' or param[1] is None or param[1] == '': - return '' + if param[0] is None or param[0] == '' or param[1] is None or param[1] == '': + return '' - plan = plpy.prepare('select ST_Azimuth($1, $2) as r', ['geometry', 'geometry']) - res = plpy.execute(plan, param) + try: + plan = plpy.prepare('select ST_Azimuth($1, $2) as r', ['geometry', 'geometry']) + res = plpy.execute(plan, param) + except Exception as err: + plpy.warning('{} | Eval::azimuth({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' + + if res[0]['r'] is None: + return '' - return float_to_str(FROM_RADIANS(res[0]['r'])) + return float_to_str(FROM_RADIANS(res[0]['r'])) # TESTS # IN ['010100002031BF0D0033333333F4EE2F41E17A14DE3A8A5641', '010100002031BF0D0052B81E8583EF2F417B14AE77FC895641'] diff --git a/pgmapcss/eval/eval_buffer.py b/pgmapcss/eval/eval_buffer.py index f460d007..00c22d1f 100644 --- a/pgmapcss/eval/eval_buffer.py +++ b/pgmapcss/eval/eval_buffer.py @@ -10,8 +10,12 @@ def eval_buffer(param): if radius == '': return '' - plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform($1, 900913), $2), {db.srs}) as r', ['geometry', 'float']) - res = plpy.execute(plan, [ param[0], float(radius) ]) + try: + plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform($1, 900913), $2), {db.srs}) as r', ['geometry', 'float']) + res = plpy.execute(plan, [ param[0], float(radius) ]) + except Exception as err: + plpy.warning('{} | Eval::buffer({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_centroid.py b/pgmapcss/eval/eval_centroid.py index 34e22532..a3c73b5b 100644 --- a/pgmapcss/eval/eval_centroid.py +++ b/pgmapcss/eval/eval_centroid.py @@ -5,8 +5,12 @@ def eval_centroid(param): if not len(param): return '' - plan = plpy.prepare('select ST_Centroid($1) as r', ['geometry']) - res = plpy.execute(plan, param) + try: + plan = plpy.prepare('select ST_Centroid($1) as r', ['geometry']) + res = plpy.execute(plan, param) + except Exception as err: + plpy.warning('{} | Eval::centroid({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_convex_hull.py b/pgmapcss/eval/eval_convex_hull.py index 097f92ab..45bd91a9 100644 --- a/pgmapcss/eval/eval_convex_hull.py +++ b/pgmapcss/eval/eval_convex_hull.py @@ -5,8 +5,12 @@ def eval_convex_hull(param): if len(param) == 0: return '' - plan = plpy.prepare('select ST_ConvexHull($1) as r', ['geometry']) - res = plpy.execute(plan, param) + try: + plan = plpy.prepare('select ST_ConvexHull($1) as r', ['geometry']) + res = plpy.execute(plan, param) + except Exception as err: + plpy.warning('{} | Eval::convex_hull({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_intersection.py b/pgmapcss/eval/eval_intersection.py index 3a736cd5..b9946209 100644 --- a/pgmapcss/eval/eval_intersection.py +++ b/pgmapcss/eval/eval_intersection.py @@ -5,8 +5,12 @@ def eval_intersection(param): if len(param) < 2: return '' - plan = plpy.prepare('select ST_Intersection($1, $2) as geo', ['geometry', 'geometry']) - res = plpy.execute(plan, param) + try: + plan = plpy.prepare('select ST_Intersection($1, $2) as geo', ['geometry', 'geometry']) + res = plpy.execute(plan, param) + except Exception as err: + plpy.warning('{} | Eval::intersection({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['geo'] diff --git a/pgmapcss/eval/eval_is_closed.py b/pgmapcss/eval/eval_is_closed.py index 54078ea7..533b8388 100644 --- a/pgmapcss/eval/eval_is_closed.py +++ b/pgmapcss/eval/eval_is_closed.py @@ -11,7 +11,11 @@ def eval_is_closed(param): else: geo = current['object']['geo'] - plan = plpy.prepare('select ST_GeometryType($1) in (\'ST_Polygon\', \'ST_MultiPolygon\') or (ST_GeometryType($1) in (\'ST_Line\') and ST_Line_Interpolate_Point($1, 0.0) = ST_Line_Interpolate_Point($1, 1.0)) as r', ['geometry']) - res = plpy.execute(plan, [ geo ]) + try: + plan = plpy.prepare('select ST_GeometryType($1) in (\'ST_Polygon\', \'ST_MultiPolygon\') or (ST_GeometryType($1) in (\'ST_Line\') and ST_Line_Interpolate_Point($1, 0.0) = ST_Line_Interpolate_Point($1, 1.0)) as r', ['geometry']) + res = plpy.execute(plan, [ geo ]) + except Exception as err: + plpy.warning('{} | Eval::is_closed({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return 'true' if res[0]['r'] else 'false' diff --git a/pgmapcss/eval/eval_is_left_hand_traffic.py b/pgmapcss/eval/eval_is_left_hand_traffic.py index 6460d7c5..8ca96fe5 100644 --- a/pgmapcss/eval/eval_is_left_hand_traffic.py +++ b/pgmapcss/eval/eval_is_left_hand_traffic.py @@ -26,8 +26,12 @@ def eval_is_left_hand_traffic(param): if not geo: return 'partly' - plan = plpy.prepare('select ST_Within($1, geo) as r from _pgmapcss_left_right_hand_traffic where ST_Intersects($1, geo)', ['geometry']) - res = plpy.execute(plan, [ geo ]) + try: + plan = plpy.prepare('select ST_Within($1, geo) as r from _pgmapcss_left_right_hand_traffic where ST_Intersects($1, geo)', ['geometry']) + res = plpy.execute(plan, [ geo ]) + except Exception as err: + plpy.warning('{} | Eval::is_left_hand_traffic({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' if len(res) == 0: diff --git a/pgmapcss/eval/eval_is_right_hand_traffic.py b/pgmapcss/eval/eval_is_right_hand_traffic.py index d3fddaa2..c799e745 100644 --- a/pgmapcss/eval/eval_is_right_hand_traffic.py +++ b/pgmapcss/eval/eval_is_right_hand_traffic.py @@ -23,8 +23,12 @@ def eval_is_right_hand_traffic(param): if not geo: return 'partly' - plan = plpy.prepare('select ST_Within($1, geo) as r from _pgmapcss_left_right_hand_traffic where ST_Intersects($1, geo)', ['geometry']) - res = plpy.execute(plan, [ geo ]) + try: + plan = plpy.prepare('select ST_Within($1, geo) as r from _pgmapcss_left_right_hand_traffic where ST_Intersects($1, geo)', ['geometry']) + res = plpy.execute(plan, [ geo ]) + except Exception as err: + plpy.warning('{} | Eval::is_right_hand_traffic({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' if len(res) == 0: diff --git a/pgmapcss/eval/eval_line.py b/pgmapcss/eval/eval_line.py index 3f6a5929..7d729f51 100644 --- a/pgmapcss/eval/eval_line.py +++ b/pgmapcss/eval/eval_line.py @@ -15,7 +15,11 @@ def eval_line(param): else: plan = plpy.prepare('select ST_MakeLine($1) as r', ['geometry[]']) - res = plpy.execute(plan, [param]) + try: + res = plpy.execute(plan, [param]) + except Exception as err: + plpy.warning('{} | Eval::line({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_line_interpolate_point.py b/pgmapcss/eval/eval_line_interpolate_point.py index 966cc54e..935514a1 100644 --- a/pgmapcss/eval/eval_line_interpolate_point.py +++ b/pgmapcss/eval/eval_line_interpolate_point.py @@ -21,8 +21,12 @@ def eval_line_interpolate_point(param): elif f > 1.0: f = 1.0 - plan = plpy.prepare('select ST_Line_Interpolate_Point($1, $2) as r', ['geometry', 'float']) - res = plpy.execute(plan, [ param[0], float(f) ]) + try: + plan = plpy.prepare('select ST_Line_Interpolate_Point($1, $2) as r', ['geometry', 'float']) + res = plpy.execute(plan, [ param[0], float(f) ]) + except Exception as err: + plpy.warning('{} | Eval::line_interpolate_point({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_line_length.py b/pgmapcss/eval/eval_line_length.py index af445a11..655a3cb6 100644 --- a/pgmapcss/eval/eval_line_length.py +++ b/pgmapcss/eval/eval_line_length.py @@ -8,9 +8,13 @@ def eval_line_length(param): if not param[0]: return '' - plan = plpy.prepare('select ST_Length(ST_Transform($1, {unit.srs})) as r', ['geometry']) - res = plpy.execute(plan, [param[0]]) - l = res[0]['r'] + try: + plan = plpy.prepare('select ST_Length(ST_Transform($1, {unit.srs})) as r', ['geometry']) + res = plpy.execute(plan, [param[0]]) + l = res[0]['r'] + except Exception as err: + plpy.warning('{} | Eval::line_length({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return eval_metric([ repr(l) + 'u' ]) diff --git a/pgmapcss/eval/eval_line_locate_azimuth.py b/pgmapcss/eval/eval_line_locate_azimuth.py index d54a7a88..7c98c716 100644 --- a/pgmapcss/eval/eval_line_locate_azimuth.py +++ b/pgmapcss/eval/eval_line_locate_azimuth.py @@ -35,8 +35,12 @@ def eval_line_locate_azimuth(param): if f2 > l: f2 = l - plan = plpy.prepare('select degrees(ST_Azimuth(ST_Line_Interpolate_Point($1, $2), ST_Line_Interpolate_Point($1, $3))) as r1, degrees(ST_Azimuth(ST_Line_Interpolate_Point($1, $3), ST_Line_Interpolate_Point($1, $4))) as r2', ['geometry', 'float', 'float', 'float']) - res = plpy.execute(plan, [ param[0], f1 / l, f / l, f2 / l ]) + try: + plan = plpy.prepare('select degrees(ST_Azimuth(ST_Line_Interpolate_Point($1, $2), ST_Line_Interpolate_Point($1, $3))) as r1, degrees(ST_Azimuth(ST_Line_Interpolate_Point($1, $3), ST_Line_Interpolate_Point($1, $4))) as r2', ['geometry', 'float', 'float', 'float']) + res = plpy.execute(plan, [ param[0], f1 / l, f / l, f2 / l ]) + except Exception as err: + plpy.warning('{} | Eval::line_locate_azimuth({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' r1 = res[0]['r1'] r2 = res[0]['r2'] diff --git a/pgmapcss/eval/eval_line_locate_point.py b/pgmapcss/eval/eval_line_locate_point.py index 59059ac1..fc183164 100644 --- a/pgmapcss/eval/eval_line_locate_point.py +++ b/pgmapcss/eval/eval_line_locate_point.py @@ -8,8 +8,12 @@ def eval_line_locate_point(param): if not param[0] or not param[1]: return '' - plan = plpy.prepare('select ST_Line_Locate_Point($1, $2) * ST_Length(ST_Transform($1, {unit.srs})) as r', ['geometry', 'geometry']) - res = plpy.execute(plan, [ param[0], param[1] ]) + try: + plan = plpy.prepare('select ST_Line_Locate_Point($1, $2) * ST_Length(ST_Transform($1, {unit.srs})) as r', ['geometry', 'geometry']) + res = plpy.execute(plan, [ param[0], param[1] ]) + except Exception as err: + plpy.warning('{} | Eval::line_locate_point({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return eval_metric([ repr(res[0]['r']) + 'u' ]) diff --git a/pgmapcss/eval/eval_line_merge.py b/pgmapcss/eval/eval_line_merge.py index 32d9476e..7fc21a29 100644 --- a/pgmapcss/eval/eval_line_merge.py +++ b/pgmapcss/eval/eval_line_merge.py @@ -8,13 +8,17 @@ def eval_line_merge(param): if len(param) == 1: param = param[0].split(';'); - if len(param) == 1: - plan = plpy.prepare('select ST_LineMerge($1) as r', ['geometry']) - res = plpy.execute(plan, [param[0]]) + try: + if len(param) == 1: + plan = plpy.prepare('select ST_LineMerge($1) as r', ['geometry']) + res = plpy.execute(plan, [param[0]]) - else: - plan = plpy.prepare('select ST_LineMerge(ST_Collect($1)) as r', ['geometry[]']) - res = plpy.execute(plan, [param]) + else: + plan = plpy.prepare('select ST_LineMerge(ST_Collect($1)) as r', ['geometry[]']) + res = plpy.execute(plan, [param]) + except Exception as err: + plpy.warning('{} | Eval::line_merge({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_line_part.py b/pgmapcss/eval/eval_line_part.py index d3c1bee1..8244d4e9 100644 --- a/pgmapcss/eval/eval_line_part.py +++ b/pgmapcss/eval/eval_line_part.py @@ -49,8 +49,12 @@ def eval_line_part(param): if pos1 > length: pos1 = length - plan = plpy.prepare('select ST_Transform(ST_Line_Substring(ST_Transform($1, {unit.srs}), $2, $3), {db.srs}) as r', ['geometry', 'float', 'float' ]) - res = plpy.execute(plan, [ param[0], pos0 / length, pos1 / length ]) + try: + plan = plpy.prepare('select ST_Transform(ST_Line_Substring(ST_Transform($1, {unit.srs}), $2, $3), {db.srs}) as r', ['geometry', 'float', 'float' ]) + res = plpy.execute(plan, [ param[0], pos0 / length, pos1 / length ]) + except Exception as err: + plpy.warning('{} | Eval::line_part({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_rotate.py b/pgmapcss/eval/eval_rotate.py index e0e59366..0d6fe3a3 100644 --- a/pgmapcss/eval/eval_rotate.py +++ b/pgmapcss/eval/eval_rotate.py @@ -19,8 +19,12 @@ def eval_rotate(param): angle = TO_RADIANS(angle) - plan = plpy.prepare('select ST_Translate(ST_Rotate(ST_Translate($1, -ST_X($3), -ST_Y($3)), $2), ST_X($3), ST_Y($3)) as r', ['geometry', 'float', 'geometry']) - res = plpy.execute(plan, [ param[0], angle, center ]) + try: + plan = plpy.prepare('select ST_Translate(ST_Rotate(ST_Translate($1, -ST_X($3), -ST_Y($3)), $2), ST_X($3), ST_Y($3)) as r', ['geometry', 'float', 'geometry']) + res = plpy.execute(plan, [ param[0], angle, center ]) + except Exception as err: + plpy.warning('{} | Eval::rotate({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_translate.py b/pgmapcss/eval/eval_translate.py index 2322d522..f4755c7f 100644 --- a/pgmapcss/eval/eval_translate.py +++ b/pgmapcss/eval/eval_translate.py @@ -10,8 +10,12 @@ def eval_translate(param): x = to_float(eval_metric([param[1], 'u'])) y = to_float(eval_metric([param[2], 'u'])) - plan = plpy.prepare('select ST_Transform(ST_Translate(ST_Transform($1, {unit.srs}), $2, $3), {db.srs}) as r', ['geometry', 'float', 'float']) - res = plpy.execute(plan, [param[0], x, y ]) + try: + plan = plpy.prepare('select ST_Transform(ST_Translate(ST_Transform($1, {unit.srs}), $2, $3), {db.srs}) as r', ['geometry', 'float', 'float']) + res = plpy.execute(plan, [param[0], x, y ]) + except Exception as err: + plpy.warning('{} | Eval::translate({}): Exception: {}'.format(current['object']['id'], param, err)) + return '' return res[0]['r'] From eaa8524852d7dc15fbc7bf2201ee0b29e96f1bb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 09:47:59 +0100 Subject: [PATCH 101/209] Compile DB Selects: first build list of selectors, then compile --- pgmapcss/compiler/compile_db_selects.py | 36 ++++++++++++++----------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index bff94d2c..9d4c61fc 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -88,7 +88,8 @@ def filter_selectors(filter, stat): return list(set(where_selectors)) def compile_selectors_db(statements, selector_index, stat): - conditions = [] + selectors = {} + for i in statements: if type(i) == int: _statement = copy.deepcopy(stat['statements'][i]) @@ -102,27 +103,32 @@ def compile_selectors_db(statements, selector_index, stat): else: selector = _statement['selector'][selector_index] - conditions.append( - ( - selector['type'], - stat['database'].compile_selector(selector) - ) - ) + if not selector['type'] in selectors: + selectors[selector['type']] = [] + + selectors[selector['type']].append(selector) + + # compile each selector + conditions = { + t: [ + stat['database'].compile_selector(selector) + for selector in s + ] + for t, s in selectors.items() + } # compile all selectors # TODO: define list of possible object_types # TODO: how to handle wildcard object type? - # get list of types and make list of conditions of each type - types = [ t for t, cs in conditions if t != True ] + # remove all invalid conditions from list conditions = { t: [ - cs - for t2, cs in conditions - if t == t2 - if cs != False - ] - for t in types + c + for c in cs + if c != False + ] + for t, cs in conditions.items() } # merge all conditions for each types together From df1e238d17c80050b5b7f3c1e22663c887cfdd2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 10:49:18 +0100 Subject: [PATCH 102/209] Compile DB Selects: check if next selector is sub selector of any other --- pgmapcss/compiler/compile_db_selects.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 9d4c61fc..6eb8fb89 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -87,6 +87,15 @@ def filter_selectors(filter, stat): # uniq list return list(set(where_selectors)) +def check_is_sub_selector(selector, master_selector): + is_sub = True + for c in master_selector['conditions']: + if not c in selector['conditions']: + is_sub = False + break + + return is_sub + def compile_selectors_db(statements, selector_index, stat): selectors = {} @@ -106,7 +115,16 @@ def compile_selectors_db(statements, selector_index, stat): if not selector['type'] in selectors: selectors[selector['type']] = [] - selectors[selector['type']].append(selector) + # check if the current selector is a sub selector of any other -> + # then we don't need to add it + is_sub = False + for s in selectors[selector['type']]: + if check_is_sub_selector(selector, s): + is_sub = True + break + + if not is_sub: + selectors[selector['type']].append(selector) # compile each selector conditions = { From 7972ec850b6639e9fee963cddf6bb648405f0378 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 10:54:21 +0100 Subject: [PATCH 103/209] Compile DB Selects: check if next selector is master selector of others --- pgmapcss/compiler/compile_db_selects.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 6eb8fb89..00288e8b 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -124,6 +124,14 @@ def compile_selectors_db(statements, selector_index, stat): break if not is_sub: + # check if the current selector is a master selector of others + # -> remove those + selectors[selector['type']] = [ + s + for s in selectors[selector['type']] + if not check_is_sub_selector(s, selector) + ] + selectors[selector['type']].append(selector) # compile each selector From 97225dc581832b38cd2d4fb989f308e7e1000e8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 11:17:24 +0100 Subject: [PATCH 104/209] Compile DB Selects: check sub selectors: handle has_tag conditions --- pgmapcss/compiler/compile_db_selects.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index 00288e8b..b7833ca9 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -91,8 +91,17 @@ def check_is_sub_selector(selector, master_selector): is_sub = True for c in master_selector['conditions']: if not c in selector['conditions']: - is_sub = False - break + # also check for has_tag conditions + has_tag = False + if c['op'] == 'has_tag': + for oc in selector['conditions']: + if oc['op'] not in ('key_regexp', 'eval') and \ + oc['key'] == c['key']: + has_tag = True + + if not has_tag: + is_sub = False + break return is_sub From 759e803e062be02e3e63e17488a85b43b1d1f7fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 17:07:06 +0100 Subject: [PATCH 105/209] DB/Overpass: Bugfix, object_members/_member_of: add parent queries --- pgmapcss/db/overpass/db_functions.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 1c6e2a4a..1c550037 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -494,8 +494,9 @@ def objects_member_of(objects, other_selects, self_selects, options): replacements = { '__BBOX__': '(' + get_bbox() + ')' } q = '[out:json][bbox:' + get_bbox() + '];' - if 'parent_query' in self_selects[ob_type]: - q += self_selects[ob_type]['parent_query'] + for si, ss in self_selects.items(): + if 'parent_query' in ss: + q += ss['parent_query'] for oi, os in other_selects.items(): if 'parent_query' in os: q += os['parent_query'] @@ -558,8 +559,9 @@ def objects_members(objects, other_selects, self_selects, options): replacements = { '__BBOX__': '(' + get_bbox() + ')' } q = '[out:json][bbox:' + get_bbox() + '];' - if 'parent_query' in self_selects[ob_type]: - q += self_selects[ob_type]['parent_query'] + for si, ss in self_selects.items(): + if 'parent_query' in ss: + q += ss['parent_query'] q += '(' + ''.join([ ss['query'].replace('__TYPE__', ob_type) for si, ss in self_selects.items() @@ -576,8 +578,9 @@ def objects_members(objects, other_selects, self_selects, options): q = '[out:json][bbox:' + get_bbox() + '];' - if 'parent_query' in self_selects[ob_type]: - q += self_selects[ob_type]['parent_query'] + for si, ss in self_selects.items(): + if 'parent_query' in ss: + q += ss['parent_query'] for oi, os in other_selects.items(): if 'parent_query' in os: q += os['parent_query'] From 2b761a28ef6bcd42fabd8133975375e87ae94d81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 17:46:32 +0100 Subject: [PATCH 106/209] Compile DB Selects: when checking sub selectors, check links --- pgmapcss/compiler/compile_db_selects.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index b7833ca9..b87e195c 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -88,6 +88,19 @@ def filter_selectors(filter, stat): return list(set(where_selectors)) def check_is_sub_selector(selector, master_selector): + # if master_selector has relationship conditions, check if these match with + # the current selectors conditions + if 'parent' in master_selector: + if not 'parent' in selector: + return False + + if selector['link']['type'] != master_selector['link']['type'] or\ + not check_is_sub_selector(selector['link'], master_selector['link']) or\ + not check_is_sub_selector(selector['parent'], master_selector['parent']): + return False + + # check if all the master_conditions are also in current selector's + # condition is_sub = True for c in master_selector['conditions']: if not c in selector['conditions']: From 8e8caec92330b99aa6e5c7933dde9d47415d4e72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 17:51:16 +0100 Subject: [PATCH 107/209] DB/Overpass: bugfix, handle parent conditions in convert_to_regexp --- pgmapcss/db/overpass/db.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index ca097a9d..228361a7 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -39,10 +39,12 @@ def value_to_regexp(self, s): return s def convert_to_regexp(self, s): - if s[0] in ('regexp', 'iregexp', 'isnot', 'notregexp', 'notiregexp'): + if s[0] in ('regexp', 'iregexp', 'isnot', 'notregexp', 'notiregexp', 'parent_regexp', 'parent_iregexp', 'parent_isnot', 'parent_notregexp', 'parent_notiregexp'): return s if s[0] == 'is': return ('regexp', s[1], { '^' + self.value_to_regexp(s[2]) + '$' }) + if s[0] == 'parent_is': + return ('parent_regexp', s[1], { '^' + self.value_to_regexp(s[2]) + '$' }) def compile_condition_overpass(self, condition, tag_type, filter): ret = None From b661bac637393b5cca1fffa86b127941d732d572 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 21 Dec 2014 19:12:12 +0100 Subject: [PATCH 108/209] Compile DB Select: Bugfix, make sure that selector does not get modified --- pgmapcss/compiler/compile_db_selects.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index b87e195c..f8de2977 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -134,6 +134,9 @@ def compile_selectors_db(statements, selector_index, stat): else: selector = _statement['selector'][selector_index] + # make sure that selector does not get modified + selector = copy.deepcopy(selector) + if not selector['type'] in selectors: selectors[selector['type']] = [] From 588896c0ee0fbf0ae70daca4947d87b8d96590cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 22 Dec 2014 12:02:31 +0100 Subject: [PATCH 109/209] DB/osm2pgsql: type=route/multipolygon is not set in planet_osm_line for relations --- pgmapcss/db/osm2pgsql/db.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pgmapcss/db/osm2pgsql/db.py b/pgmapcss/db/osm2pgsql/db.py index a46071f5..b87595f0 100644 --- a/pgmapcss/db/osm2pgsql/db.py +++ b/pgmapcss/db/osm2pgsql/db.py @@ -78,6 +78,11 @@ def tag_type(self, key, condition, selector): if selector['type'] in ('way', 'line', 'area'): type = 'way' + # type=route, type=multipolygon is not set for relations + # TODO: a relation can also be an area -> how to handle this? + if selector['type'] in ('relation') and key in ('type'): + return None + if type and self.stat['config']['db.columns.' + type]: if key in self.stat['config']['db.columns.' + type]: return ( 'column', key ) From 0ab11befd033a4787d509ebcd50c4f2577286881 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 27 Dec 2014 15:27:19 +0100 Subject: [PATCH 110/209] DB/osm2pgsql: first attempt in optimizing parent relationship queries --- pgmapcss/db/osm2pgsql/db.py | 9 +++++++++ pgmapcss/db/osm2pgsql/db_functions.py | 8 ++++++++ pgmapcss/db/postgresql_db/db.py | 15 +++++++-------- 3 files changed, 24 insertions(+), 8 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db.py b/pgmapcss/db/osm2pgsql/db.py index b87595f0..2a22a9c8 100644 --- a/pgmapcss/db/osm2pgsql/db.py +++ b/pgmapcss/db/osm2pgsql/db.py @@ -97,3 +97,12 @@ def compile_modify_id(self, key, value): return format(-int(value[1:])) else: return format(value[1:]) + + def compile_selector(self, selector, prefix=''): + ret = postgresql_db.compile_selector(self, selector, prefix=prefix) + + if 'parent' in selector and selector['link']['type'] in ('', '>'): + parent_conditions = self.compile_selector(selector['parent'], prefix='parent.') + ret += ' and osm_id in (select cast(substr(member_id, 2) as bigint) member_ids from (select unnest(r.members) member_id, generate_series(1, array_upper(r.members, 1)) % 2 is_member_id from planet_osm_line parent join planet_osm_rels r on r.id=-parent.osm_id where __PARENT_BBOX__ ' + parent_conditions + ') t where is_member_id=1 and substr(member_id, 1, 1) = \'w\')'; + + return ret diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index ca1a8560..88f42daa 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -5,8 +5,13 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], qry = '' bbox = '' + replacements = { + 'parent_bbox': '', + } + if _bbox is not None: bbox = 'way && $1 and ST_Intersects(way, $1::geometry) and' + replacements['parent_bbox'] = 'way && $1 and ST_Intersects(way, $1::geometry) and' if len(add_columns): add_columns_qry = ', ' + ', '.join([ @@ -91,6 +96,8 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], from planet_osm_line where osm_id>0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) + qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) + plpy.warning(qry) plan = plpy.prepare(qry, param_type ) res = plpy.cursor(plan, param_value ) @@ -141,6 +148,7 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], from planet_osm_line where osm_id<0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) + qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) plan = plpy.prepare(qry, param_type ) res = plpy.cursor(plan, param_value ) diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index c363f3ed..3b50ff08 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -22,13 +22,12 @@ def merge_conditions(self, conditions): return conditions - def compile_condition_hstore_value(self, condition, tag_type, filter): + def compile_condition_hstore_value(self, condition, tag_type, filter, prefix=''): ret = None negate = False key = tag_type[1] column = tag_type[2] op = condition['op'] - prefix = '' if op[0:2] == '! ': op = op[2:] @@ -124,7 +123,7 @@ def compile_condition_hstore_value(self, condition, tag_type, filter): return ret - def compile_condition_column(self, condition, tag_type, filter): + def compile_condition_column(self, condition, tag_type, filter, prefix=''): ret = None key = tag_type[1] op = condition['op'] @@ -236,7 +235,7 @@ def compile_condition_column(self, condition, tag_type, filter): return ret - def compile_condition(self, condition, selector, filter={}): + def compile_condition(self, condition, selector, filter={}, prefix=''): ret = set() # depending on the tag type compile the specified condition @@ -245,9 +244,9 @@ def compile_condition(self, condition, selector, filter={}): if tag_type is None: pass elif tag_type[0] == 'hstore-value': - ret.add(self.compile_condition_hstore_value(condition, tag_type, filter)) + ret.add(self.compile_condition_hstore_value(condition, tag_type, filter, prefix=prefix)) elif tag_type[0] == 'column': - ret.add(self.compile_condition_column(condition, tag_type, filter)) + ret.add(self.compile_condition_column(condition, tag_type, filter, prefix=prefix)) else: raise CompileError('unknown tag type {}'.format(tag_type)) @@ -259,12 +258,12 @@ def compile_condition(self, condition, selector, filter={}): # merge conditions together, return return '(' + ' or '.join(ret) + ')' - def compile_selector(self, selector): + def compile_selector(self, selector, prefix=''): filter = {} filter['object_type'] = selector['type'] ret = { - self.compile_condition(c, selector, filter) or 'true' + self.compile_condition(c, selector, filter, prefix=prefix) or 'true' for c in selector['conditions'] } From 91b990fef474b2818718df866f3ea768eb6a0093 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 17:40:09 +0100 Subject: [PATCH 111/209] DOC: Remove install instructions for Ubuntu 12.04 --- ...Install_on_Ubuntu_12.04_with_Mapnik_2.0.md | 60 ------------------- ...Install_on_Ubuntu_12.04_with_Mapnik_2.2.md | 60 ------------------- 2 files changed, 120 deletions(-) delete mode 100644 doc/Install_on_Ubuntu_12.04_with_Mapnik_2.0.md delete mode 100644 doc/Install_on_Ubuntu_12.04_with_Mapnik_2.2.md diff --git a/doc/Install_on_Ubuntu_12.04_with_Mapnik_2.0.md b/doc/Install_on_Ubuntu_12.04_with_Mapnik_2.0.md deleted file mode 100644 index cfc36a94..00000000 --- a/doc/Install_on_Ubuntu_12.04_with_Mapnik_2.0.md +++ /dev/null @@ -1,60 +0,0 @@ -Installation on a plain Ubuntu 12.04.3 Server: - -I recommend [installing Mapnik 2.2 instead](Install pgmapcss with Mapnik_2.2 on Ubuntu_12.04.md). - -Install additional packages: -```sh -sudo apt-get install postgresql postgresql-contrib postgresql-9.1-postgis python-mapnik2 git osm2pgsql python3 python3-setuptools python3-postgresql python3-dev postgresql-plpython3 python3-wand -``` - -More dependencies: -* pghstore - -```sh -git clone https://github.com/plepe/pghstore.git -cd pghstore -python3 setup.py build -sudo python3 setup.py install -``` - -Create database: -```sh -sudo su - postgres -createdb test -createuser -P user -# you may say 'y' for superuser -exit -``` -Initialize database: -```sh -psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create extension hstore" -psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create language plpython3u" -psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/postgresql/9.1/contrib/postgis-1.5/postgis.sql -psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/postgresql/9.1/contrib/postgis-1.5/spatial_ref_sys.sql -``` - -Download an OSM file and import to database: -```sh -osm2pgsql -dtest -Uuser -Hlocalhost -W -s -S /usr/share/osm2pgsql/default.style --hstore -G azores-latest.osm.bz2 -``` - -Warning! The osm2pgsql program packaged with Ubuntu 12.04 still uses 32bit ID space for OSM objects. Finally it will no longer be possible to store all objects; also some queries to the database assume 64bit and PostgreSQL can't use database indexes - expect slow behaviour. See [the installing Mapnik 2.2 guide](Install pgmapcss with Mapnik_2.2 on Ubuntu_12.04.md) how to install a new osm2pgsql. - -Clone pgmapcss: -```sh -git clone https://github.com/plepe/pgmapcss.git -cd pgmapcss -python3 setup.py build -sudo python3 setup.py install -``` - -Compile 'test.mapcss' file and install database functions: -``` -pgmapcss -dtest -uuser -pPASSWORD -tmapnik-2.0 test -``` - -You get a file `test.mapnik` which you can use with your preferred render front-end (these are just examples): -* [Render an image](https://github.com/plepe/mapnik-render-image) -* [Run as WMS (Web Map Service)](https://github.com/mapbox/landspeed.js) -* [Run as TMS (Tile Map Service) with Apache2 and mod_tile](https://github.com/openstreetmap/mod_tile) -* [View in GUI](https://github.com/mapnik/mapnik/wiki/MapnikViewer) diff --git a/doc/Install_on_Ubuntu_12.04_with_Mapnik_2.2.md b/doc/Install_on_Ubuntu_12.04_with_Mapnik_2.2.md deleted file mode 100644 index 8b951255..00000000 --- a/doc/Install_on_Ubuntu_12.04_with_Mapnik_2.2.md +++ /dev/null @@ -1,60 +0,0 @@ -Installation on a plain Ubuntu 12.04.3 Server: - -Install additional packages: -```sh -sudo apt-get install python-software-properties -sudo add-apt-repository ppa:mapnik/v2.2.0 -sudo add-apt-repository ppa:kakrueger/openstreetmap -sudo apt-get update -sudo apt-get install postgresql postgresql-contrib postgresql-9.1-postgis python-mapnik git osm2pgsql python3 python3-setuptools python3-postgresql python3-dev postgresql-plpython3 python3-wand -``` - -More dependencies: -* pghstore - -```sh -git clone https://github.com/plepe/pghstore.git -cd pghstore -python3 setup.py build -sudo python3 setup.py install -``` - -Create database: -```sh -sudo su - postgres -createdb test -createuser -P user -# you may say 'y' for superuser -exit -``` -Initialize database: -```sh -psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create extension hstore" -psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create language plpython3u" -psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/postgresql/9.1/contrib/postgis-1.5/postgis.sql -psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/postgresql/9.1/contrib/postgis-1.5/spatial_ref_sys.sql -``` - -Download an OSM file and import to database: -```sh -osm2pgsql -dtest -Uuser -Hlocalhost -W -s -S /usr/share/osm2pgsql/default.style --hstore -G azores-latest.osm.bz2 -``` - -Clone pgmapcss: -```sh -git clone https://github.com/plepe/pgmapcss.git -cd pgmapcss -python3 setup.py build -sudo python3 setup.py install -``` - -Compile 'test.mapcss' file and install database functions: -``` -pgmapcss -dtest -uuser -pPASSWORD -tmapnik-2.2 test -``` - -You get a file `test.mapnik` which you can use with your preferred render front-end (these are just examples): -* [Render an image](https://github.com/plepe/mapnik-render-image) -* [Run as WMS (Web Map Service)](https://github.com/mapbox/landspeed.js) -* [Run as TMS (Tile Map Service) with Apache2 and mod_tile](https://github.com/openstreetmap/mod_tile) -* [View in GUI](https://github.com/mapnik/mapnik/wiki/MapnikViewer) From 31e169690a69733589f42f3ec42b517f71371319 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 17:41:30 +0100 Subject: [PATCH 112/209] DOC: Fix install instructions for Ubuntu 14.04/Mapnik 3.0 - add missing "apt-get update" - command line for compiling needs to include 'mapnik-3.0' --- doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md b/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md index 45e77837..ce784066 100644 --- a/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md +++ b/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md @@ -4,6 +4,7 @@ Install additional packages: ```sh sudo apt-get install python-software-properties sudo add-apt-repository ppa:mapnik/nightly-trunk +sudo apt-get update sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik osm2pgsql postgresql-plpython3 python3-postgresql ttf-unifont mapnik-input-plugin-postgis libmapnik libmapnik-dev mapnik-utils python3-wand ``` @@ -46,7 +47,7 @@ sudo python3 setup.py install Compile 'test.mapcss' file and install database functions: ``` -pgmapcss -dtest -uuser -pPASSWORD -tmapnik-2.2 test +pgmapcss -dtest -uuser -pPASSWORD -tmapnik-3.0 test ``` You get a file `test.mapnik` which you can use with your preferred render front-end (these are just examples): From 8ac0728a30c4c9b287489e688749420576f02ecd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 18:22:49 +0100 Subject: [PATCH 113/209] DOC: adhere database type in install instructions --- ...Install_on_Ubuntu_14.04_with_Mapnik_2.2.md | 24 +++++++++++++++--- ...Install_on_Ubuntu_14.04_with_Mapnik_3.0.md | 25 ++++++++++++++++--- 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/doc/Install_on_Ubuntu_14.04_with_Mapnik_2.2.md b/doc/Install_on_Ubuntu_14.04_with_Mapnik_2.2.md index a3fb5845..2eec6ae2 100644 --- a/doc/Install_on_Ubuntu_14.04_with_Mapnik_2.2.md +++ b/doc/Install_on_Ubuntu_14.04_with_Mapnik_2.2.md @@ -2,7 +2,7 @@ Installation on a plain Ubuntu 14.04 Server: Install additional packages: ```sh -sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik osm2pgsql postgresql-plpython3 python3-postgresql ttf-unifont python3-wand +sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik postgresql-plpython3 python3-postgresql ttf-unifont python3-wand ``` More dependencies: @@ -36,11 +36,27 @@ for i in /usr/share/fonts/truetype/dejavu/* ; do sudo ln -s $i /usr/lib/mapnik/f sudo ln -s /usr/share/fonts/truetype/unifont/unifont.ttf /usr/lib/mapnik/fonts/ ``` -Download an OSM file and import to database: +For the next step you can decide, whether you want to use osm2pgsql, osmosis or overpass as database backend. + +Case 'osm2pgsql': Download an OSM file and import to database: ```sh +sudo apt-get install osm2pgsql osm2pgsql -dtest -Uuser -Hlocalhost -W -s -S /usr/share/osm2pgsql/default.style --hstore -G azores-latest.osm.bz2 ``` +Case 'osmosis': Download an OSM file and import to database: +```sh +sudo apt-get install osmosis +mkdir pgimport +osmosis --read-xml azores-latest.osm.bz2 --write-pgsql-dump +cd pgimport +psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6.sql +psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6_linestring.sql +psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_load_0.6.sql +``` + +Case 'overpass': You can use one of the public Overpass APIs (default), or [install your own](http://wiki.openstreetmap.org/wiki/Overpass_API/install). + Clone pgmapcss: ```sh git clone https://github.com/plepe/pgmapcss.git @@ -51,9 +67,11 @@ sudo python3 setup.py install Compile 'test.mapcss' file and install database functions: ``` -pgmapcss -dtest -uuser -pPASSWORD -tmapnik-2.2 test +pgmapcss --database-type=TYPE -dtest -uuser -pPASSWORD -tmapnik-2.2 test ``` +Replace TYPE by 'osm2pgsql' (default), 'osmosis' or 'overpass'. See [config_options.md](./config_options.md) for advanced options. + You get a file `test.mapnik` which you can use with your preferred render front-end (these are just examples): * [Render an image](https://github.com/plepe/mapnik-render-image) * [Run as WMS (Web Map Service)](https://github.com/mapbox/landspeed.js) diff --git a/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md b/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md index ce784066..0f7ac68c 100644 --- a/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md +++ b/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md @@ -5,7 +5,7 @@ Install additional packages: sudo apt-get install python-software-properties sudo add-apt-repository ppa:mapnik/nightly-trunk sudo apt-get update -sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik osm2pgsql postgresql-plpython3 python3-postgresql ttf-unifont mapnik-input-plugin-postgis libmapnik libmapnik-dev mapnik-utils python3-wand +sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik postgresql-plpython3 python3-postgresql ttf-unifont mapnik-input-plugin-postgis libmapnik libmapnik-dev mapnik-utils python3-wand ``` More dependencies: @@ -32,11 +32,27 @@ psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create exte psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create language plpython3u" ``` -Download an OSM file and import to database: +For the next step you can decide, whether you want to use osm2pgsql, osmosis or overpass as database backend. + +Case 'osm2pgsql': Download an OSM file and import to database: ```sh +sudo apt-get install osm2pgsql osm2pgsql -dtest -Uuser -Hlocalhost -W -s -S /usr/share/osm2pgsql/default.style --hstore -G azores-latest.osm.bz2 ``` +Case 'osmosis': Download an OSM file and import to database: +```sh +sudo apt-get install osmosis +mkdir pgimport +osmosis --read-xml azores-latest.osm.bz2 --write-pgsql-dump +cd pgimport +psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6.sql +psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_schema_0.6_linestring.sql +psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_load_0.6.sql +``` + +Case 'overpass': You can use one of the public Overpass APIs (default), or [install your own](http://wiki.openstreetmap.org/wiki/Overpass_API/install). + Clone pgmapcss: ```sh git clone https://github.com/plepe/pgmapcss.git @@ -47,9 +63,12 @@ sudo python3 setup.py install Compile 'test.mapcss' file and install database functions: ``` -pgmapcss -dtest -uuser -pPASSWORD -tmapnik-3.0 test +pgmapcss --database-type=TYPE -dtest -uuser -pPASSWORD -tmapnik-3.0 test ``` +Replace TYPE by 'osm2pgsql' (default), 'osmosis' or 'overpass'. See [config_options.md](./config_options.md) for advanced options. + + You get a file `test.mapnik` which you can use with your preferred render front-end (these are just examples): * [Render an image](https://github.com/plepe/mapnik-render-image) * [Run as WMS (Web Map Service)](https://github.com/mapbox/landspeed.js) From d0c72482f86a16ffd60873194e01b02d230abca9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 18:25:54 +0100 Subject: [PATCH 114/209] DOC: shorten file names of install instructions --- README.md | 2 +- ...untu_14.04_with_Mapnik_2.2.md => Install_with_Mapnik_2.2.md} | 0 ...untu_14.04_with_Mapnik_3.0.md => Install_with_Mapnik_3.0.md} | 0 3 files changed, 1 insertion(+), 1 deletion(-) rename doc/{Install_on_Ubuntu_14.04_with_Mapnik_2.2.md => Install_with_Mapnik_2.2.md} (100%) rename doc/{Install_on_Ubuntu_14.04_with_Mapnik_3.0.md => Install_with_Mapnik_3.0.md} (100%) diff --git a/README.md b/README.md index f2237373..9531aaf4 100644 --- a/README.md +++ b/README.md @@ -184,7 +184,7 @@ Two possible uses for the standalone mode: ### Easy to install: ### -Find installation instructions in [Install pgmapcss with Mapnik 3.0 on Ubuntu 14.04](doc/Install pgmapcss with Mapnik_3.0 on Ubuntu_14.04.md). +Find installation instructions in [Install with Mapnik 3.0](doc/Install_with_Mapnik_3.0.md). There's a file [test.mapcss](./test.mapcss) which you can use to build upon. You can [try it online!](http://pgmapcss.openstreetbrowser.org/?style=f457f&zoom=14&lat=48.2098&lon=16.3725) diff --git a/doc/Install_on_Ubuntu_14.04_with_Mapnik_2.2.md b/doc/Install_with_Mapnik_2.2.md similarity index 100% rename from doc/Install_on_Ubuntu_14.04_with_Mapnik_2.2.md rename to doc/Install_with_Mapnik_2.2.md diff --git a/doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md b/doc/Install_with_Mapnik_3.0.md similarity index 100% rename from doc/Install_on_Ubuntu_14.04_with_Mapnik_3.0.md rename to doc/Install_with_Mapnik_3.0.md From 855742a8bebe42ebf8b578d9b59a99878d0e7537 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 22:19:31 +0100 Subject: [PATCH 115/209] DB/osm2pgsql: optimize queries only when object relation[type=route] --- pgmapcss/db/osm2pgsql/db.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db.py b/pgmapcss/db/osm2pgsql/db.py index 2a22a9c8..f065d5e4 100644 --- a/pgmapcss/db/osm2pgsql/db.py +++ b/pgmapcss/db/osm2pgsql/db.py @@ -98,11 +98,27 @@ def compile_modify_id(self, key, value): else: return format(value[1:]) + def has_condition(self, conditions, key, values): + for condition in conditions: + if 'key' in condition and condition['key'] == key: + if condition['op'] == '=' and condition['value_type'] == 'value': + if condition['value'] in values: + return True + + if condition['op'] == '@=' and condition['value_type'] == 'value': + print(condition) + if len(set(condition['value'].split(';')) - values) == 0: + return True + + return False + def compile_selector(self, selector, prefix=''): ret = postgresql_db.compile_selector(self, selector, prefix=prefix) if 'parent' in selector and selector['link']['type'] in ('', '>'): - parent_conditions = self.compile_selector(selector['parent'], prefix='parent.') - ret += ' and osm_id in (select cast(substr(member_id, 2) as bigint) member_ids from (select unnest(r.members) member_id, generate_series(1, array_upper(r.members, 1)) % 2 is_member_id from planet_osm_line parent join planet_osm_rels r on r.id=-parent.osm_id where __PARENT_BBOX__ ' + parent_conditions + ') t where is_member_id=1 and substr(member_id, 1, 1) = \'w\')'; + if selector['parent']['type'] == 'relation' and \ + self.has_condition(selector['parent']['conditions'], 'type', { 'route' }): + parent_conditions = self.compile_selector(selector['parent'], prefix='parent.') + ret += ' and osm_id in (select cast(substr(member_id, 2) as bigint) member_ids from (select unnest(r.members) member_id, generate_series(1, array_upper(r.members, 1)) % 2 is_member_id from planet_osm_line parent join planet_osm_rels r on r.id=-parent.osm_id where __PARENT_BBOX__ ' + parent_conditions + ') t where is_member_id=1 and substr(member_id, 1, 1) = \'w\')'; return ret From deafd2a70381a461c1a8866583066cef34464b1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 22:33:04 +0100 Subject: [PATCH 116/209] DB/osm2pgsql: add __TYPE_SHORT__ and __TYPE_MODIFY__ replacement patterns --- pgmapcss/db/osm2pgsql/db.py | 2 +- pgmapcss/db/osm2pgsql/db_functions.py | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db.py b/pgmapcss/db/osm2pgsql/db.py index f065d5e4..00a57ff8 100644 --- a/pgmapcss/db/osm2pgsql/db.py +++ b/pgmapcss/db/osm2pgsql/db.py @@ -119,6 +119,6 @@ def compile_selector(self, selector, prefix=''): if selector['parent']['type'] == 'relation' and \ self.has_condition(selector['parent']['conditions'], 'type', { 'route' }): parent_conditions = self.compile_selector(selector['parent'], prefix='parent.') - ret += ' and osm_id in (select cast(substr(member_id, 2) as bigint) member_ids from (select unnest(r.members) member_id, generate_series(1, array_upper(r.members, 1)) % 2 is_member_id from planet_osm_line parent join planet_osm_rels r on r.id=-parent.osm_id where __PARENT_BBOX__ ' + parent_conditions + ') t where is_member_id=1 and substr(member_id, 1, 1) = \'w\')'; + ret += ' and osm_id in (select __TYPE_MODIFY__cast(substr(member_id, 2) as bigint) member_ids from (select unnest(r.members) member_id, generate_series(1, array_upper(r.members, 1)) % 2 is_member_id from planet_osm_line parent join planet_osm_rels r on r.id=-parent.osm_id where __PARENT_BBOX__ ' + parent_conditions + ') t where is_member_id=1 and substr(member_id, 1, 1) = \'__TYPE_SHORT__\')'; return ret diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index 88f42daa..74a15947 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -47,6 +47,10 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], where {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) + qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) + qry = qry.replace('__TYPE_SHORT__', 'n') + qry = qry.replace('__TYPE_MODIFY__', '') + plan = plpy.prepare(qry, param_type ) res = plpy.cursor(plan, param_value ) @@ -97,7 +101,8 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], where osm_id>0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) - plpy.warning(qry) + qry = qry.replace('__TYPE_SHORT__', 'w') + qry = qry.replace('__TYPE_MODIFY__', '') plan = plpy.prepare(qry, param_type ) res = plpy.cursor(plan, param_value ) @@ -149,6 +154,8 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], where osm_id<0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) + qry = qry.replace('__TYPE_SHORT__', 'w') + qry = qry.replace('__TYPE_MODIFY__', '') plan = plpy.prepare(qry, param_type ) res = plpy.cursor(plan, param_value ) @@ -200,6 +207,10 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], where osm_id>0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) + qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) + qry = qry.replace('__TYPE_SHORT__', 'r') + qry = qry.replace('__TYPE_MODIFY__', '-') + plan = plpy.prepare(qry, param_type ) res = plpy.cursor(plan, param_value ) @@ -250,6 +261,10 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], where osm_id<0 and {bbox} ( {w} ) '''.format(bbox=bbox, w=' or '.join(w), add_columns=add_columns_qry) + qry = qry.replace('__PARENT_BBOX__', replacements['parent_bbox']) + qry = qry.replace('__TYPE_SHORT__', 'r') + qry = qry.replace('__TYPE_MODIFY__', '-') + plan = plpy.prepare(qry, param_type ) res = plpy.cursor(plan, param_value ) From 66eaca7c9e87ea69783c2d2d545ac7aea9ef11af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 23:01:11 +0100 Subject: [PATCH 117/209] DB/osm2pgsql: bugfix objects_member_of: tags may be None --- pgmapcss/db/osm2pgsql/db_functions.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index 74a15947..6cd5bd96 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -435,11 +435,13 @@ def objects_member_of(objects, other_selects, self_selects, options): if r[k] is not None } # START db.has-hstore - t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) + if r['tags'] is not None: + t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) # END db.has-hstore # END db.columns.way # START db.hstore-only - t['tags'] = pghstore.loads(r['tags']) + if r['tags'] is not None: + t['tags'] = pghstore.loads(r['tags']) # END db.hstore-only t['tags']['osm:id'] = t['id'] yield(o, t, link_tags) From 80a0feb1681c35f2a2e8e1cbdfd44a04ccf7c924 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 23:04:28 +0100 Subject: [PATCH 118/209] DB/osm2pgsql: objects_member_of: bugfix resolving way->node relations --- pgmapcss/db/osm2pgsql/db_functions.py | 22 +++++----------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index 6cd5bd96..ef76fd37 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -371,6 +371,9 @@ def objects_by_id(id_list, options): yield t def flatarray_to_tags(arr): + if arr is None: + return {} + ret = {} for i in range(0, len(arr), 2): ret[arr[i]] = arr[i + 1] @@ -408,7 +411,7 @@ def objects_member_of(objects, other_selects, self_selects, options): yield (o, t, member) if 'way' in other_selects: - plan = plpy.prepare('select id, nodes, planet_osm_line.tags, way as geo from planet_osm_ways left join planet_osm_line on planet_osm_ways.id=planet_osm_line.osm_id where nodes::bigint[] @> Array[$1]', ['bigint']); + plan = plpy.prepare('select id, nodes, tags, (select way from planet_osm_line where id=osm_id union select way from planet_osm_polygon where id=osm_id) as geo from planet_osm_ways where nodes::bigint[] @> Array[$1]', ['bigint']); for o in objects: member_id = o['id'] num_id = int(member_id[1:]) @@ -427,22 +430,7 @@ def objects_member_of(objects, other_selects, self_selects, options): 'member_id': member_id, 'sequence_id': str(i) } -# START db.columns.way - t['tags'] = { - k: r[k] - for k in r - if k not in ['id', 'geo', 'types', 'tags', 'nodes'] - if r[k] is not None - } -# START db.has-hstore - if r['tags'] is not None: - t['tags'] = dict(pghstore.loads(r['tags']).items() | t['tags'].items()) -# END db.has-hstore -# END db.columns.way -# START db.hstore-only - if r['tags'] is not None: - t['tags'] = pghstore.loads(r['tags']) -# END db.hstore-only + t['tags'] = flatarray_to_tags(r['tags']) t['tags']['osm:id'] = t['id'] yield(o, t, link_tags) From d437a2803c4be7d5f8af9b6586842c31117fc928 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 29 Dec 2014 23:05:10 +0100 Subject: [PATCH 119/209] DB/osm2pgsql: parent relations for relation(polygon)->member, way->node --- pgmapcss/db/osm2pgsql/db.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pgmapcss/db/osm2pgsql/db.py b/pgmapcss/db/osm2pgsql/db.py index 00a57ff8..6d4c9211 100644 --- a/pgmapcss/db/osm2pgsql/db.py +++ b/pgmapcss/db/osm2pgsql/db.py @@ -121,4 +121,13 @@ def compile_selector(self, selector, prefix=''): parent_conditions = self.compile_selector(selector['parent'], prefix='parent.') ret += ' and osm_id in (select __TYPE_MODIFY__cast(substr(member_id, 2) as bigint) member_ids from (select unnest(r.members) member_id, generate_series(1, array_upper(r.members, 1)) % 2 is_member_id from planet_osm_line parent join planet_osm_rels r on r.id=-parent.osm_id where __PARENT_BBOX__ ' + parent_conditions + ') t where is_member_id=1 and substr(member_id, 1, 1) = \'__TYPE_SHORT__\')'; + if selector['parent']['type'] == 'relation' and \ + self.has_condition(selector['parent']['conditions'], 'type', { 'multipolygon', 'boundary' }): + parent_conditions = self.compile_selector(selector['parent'], prefix='parent.') + ret += ' and osm_id in (select __TYPE_MODIFY__cast(substr(member_id, 2) as bigint) member_ids from (select unnest(r.members) member_id, generate_series(1, array_upper(r.members, 1)) % 2 is_member_id from planet_osm_polygon parent join planet_osm_rels r on r.id=-parent.osm_id where __PARENT_BBOX__ ' + parent_conditions + ') t where is_member_id=1 and substr(member_id, 1, 1) = \'__TYPE_SHORT__\')'; + + if selector['parent']['type'] == 'way' and selector['type'] == 'node': + parent_conditions = self.compile_selector(selector['parent'], prefix='parent.') + ret += ' and osm_id in (select __TYPE_MODIFY__member_id member_ids from (select unnest(r.nodes) member_id from (select * from planet_osm_line union select * from planet_osm_polygon) parent join planet_osm_ways r on r.id=parent.osm_id where parent.osm_id>0 and __PARENT_BBOX__ ' + parent_conditions + ') t)'; + return ret From 6d0473cb09a5e90c506d5cbf3f8be356872894ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 00:04:08 +0100 Subject: [PATCH 120/209] Main loop: bugfix postgis 2.0: add ST_ prefixes --- pgmapcss/compiler/compile_function_match.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgmapcss/compiler/compile_function_match.py b/pgmapcss/compiler/compile_function_match.py index 495fbfd4..2167ac02 100644 --- a/pgmapcss/compiler/compile_function_match.py +++ b/pgmapcss/compiler/compile_function_match.py @@ -92,7 +92,7 @@ def compile_function_match(stat): parameters['srs' ] = {srs} if type(bbox) == list and len(bbox) == 4: - plan = plpy.prepare('select SetSRID(MakeBox2D(ST_Point($1, $2), ST_Point($3, $4)), $5) as bounds', ['float', 'float', 'float', 'float', 'int']) + plan = plpy.prepare('select ST_SetSRID(ST_MakeBox2D(ST_Point($1, $2), ST_Point($3, $4)), $5) as bounds', ['float', 'float', 'float', 'float', 'int']) res = plpy.execute(plan, [float(b) for b in bbox] + [ parameters['in.srs'] if 'in.srs' in parameters else parameters['srs'] ]) _bbox = res[0]['bounds'] else: From fa1d9b7893b72245b4bbbefd8e351b53b875cc14 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 00:45:15 +0100 Subject: [PATCH 121/209] DB/osm2pgsql: document optimized parental queries --- doc/database.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/database.md b/doc/database.md index fd1b2593..8178fdd4 100644 --- a/doc/database.md +++ b/doc/database.md @@ -6,8 +6,8 @@ osm2pgsql * Objects matching 'line' depend on the osm2pgsql style file; also relation=route are included in 'line' * Objects matching 'area' depend on the osm2pgsql style file; also multipolygons and boundaries are included in 'area'. * As direct query, only relations of type=route, type=boundary and type=multipolygon are available. On the other hand the 'type' tag is removed by osm2pgsql, therefore a query for `relation[type=route]` will not work. -* Queries of the type `relation node|way|relation` do work for all relation types as parents; even the type-tag is available. Currently those queries are inefficient, as for every possible member all available relations are queried. -* Queries of the type `way node` work. Currently those queries are inefficient, as for every possible member all available relations are queried. +* Queries of the type `relation node|way|relation` do work for all relation types as parents; even the type-tag is available. Currently those queries are inefficient, as for every possible member all available relations are queried, except when the parent contains a query for the tag 'type' with the values 'route', 'multipolygon' or 'boundary' (as these object are added to planet_osm_line or planet_osm_polygon). E.g. `relation[type=route][route=tram] way`. +* Queries of the type `way node` work, and will be optimized, as first all ways in the bouding box are queried and then all associated nodes. * Additionally the tag "osm:id" will be set (e.g. 'n1234'), but it will not be available for querying (see below at osmosis pgsnapshot for additional tags). * The osm2pgsql mode will by default use the tag columns of the database tables, and for other tags the "tags" column (type hstore), if available. From 855ede3581ec48079cc361c957e46c30dd1f10f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 00:52:48 +0100 Subject: [PATCH 122/209] DB/Overpass: document optimized parental conditions --- doc/database.md | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/database.md b/doc/database.md index 8178fdd4..b73fc585 100644 --- a/doc/database.md +++ b/doc/database.md @@ -62,6 +62,7 @@ By default, the API on overpass-api.de will be used, therefore it is not necessa * In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead; including full multipolygon support (see below) * In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. +* Queries for `relation member` (where member is any type) and `way node` will be optimzed, as only objects matching the member conditions and the parents conditions will be queried from Overpass API. * Additionally the tags "osm:id", "osm:version", "osm:user_id", "osm:user", "osm:timestamp", "osm:changeset" will be set from OpenStreetMap meta info. Filtering for meta information is currently not possible on Overpass API, therefore these filters will not be applied to queries (in short: a condition for forests of user abc will be compiled into a query of all forests in the current viewport regardless of the user). Multipolygon support From 637be0456ea4f9f999b3479a74c9b4062cab45da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 01:09:17 +0100 Subject: [PATCH 123/209] DB/Overpass: remove obsolete profiling code --- pgmapcss/db/overpass/db_functions.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 1c550037..73d37298 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -245,7 +245,6 @@ def get_bbox(_bbox=None): return res[0]['bbox_string'] def objects_bbox(_bbox, db_selects, options): - time_start = datetime.datetime.now() # profiling non_relevant_tags = {'type', 'source', 'source:ref', 'source_ref', 'note', 'comment', 'created_by', 'converted_by', 'fixme', 'FIXME', 'description', 'attribution', 'osm:id', 'osm:version', 'osm:user_id', 'osm:user', 'osm:timestamp', 'osm:changeset'} ways_done = [] rels_done = [] @@ -448,9 +447,6 @@ def objects_bbox(_bbox, db_selects, options): yield(assemble_object(r)) - time_stop = datetime.datetime.now() # profiling - plpy.notice('querying db objects took %.2fs' % (time_stop - time_start).total_seconds()) - def objects_by_id(id_list, options): q = '' multipolygons = [] From ee638affedcaa063c5040bf75e6a7bfbeeb65195 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 02:18:49 +0100 Subject: [PATCH 124/209] Compile DB Selects: re-design resolve_set_statements: return list of possible selectors --- pgmapcss/compiler/compile_db_selects.py | 47 +++++++++++++------------ 1 file changed, 25 insertions(+), 22 deletions(-) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index f8de2977..ab5572aa 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -1,10 +1,16 @@ import copy -# takes a list of conditions as input and returns several condition combinations +# takes a statement as input and returns several selectors with condition +# combinations from set statements. also, conditions with a key prefixed by '.' +# are removed. def resolve_set_statements(statement, done, stat): - ret = [ [] ] + # initialize return selector(s) with empty conditions + ret = copy.deepcopy(statement['selector']) + ret['conditions'] = [] + ret = [ ret ] + if statement['id'] in done: - return [ [] ] + return ret done.append(statement['id']) # iterate over all conditions in the statement @@ -27,20 +33,20 @@ def resolve_set_statements(statement, done, stat): ] # for all set statements create a new set of conditions - ret = [ - r + s1 - for r in last_ret - for s in set_statements - for s1 in s - ] + for lr in last_ret: + for s1 in set_statements: + for s in s1: + r = copy.deepcopy(lr) + r['conditions'] += s['conditions'] + ret.append(r) # for each set of conditions add the current condition # unless the condition's key does not start with a '.' if condition['key'][0] != '.': - ret += [ - r + [ condition ] - for r in last_ret - ] + for r in last_ret: + c = copy.deepcopy(r) + r['conditions'] += [ condition ] + ret.append(r) return ret @@ -123,16 +129,13 @@ def compile_selectors_db(statements, selector_index, stat): for i in statements: if type(i) == int: - _statement = copy.deepcopy(stat['statements'][i]) + _statement = stat['statements'][i] else: - _statement = copy.deepcopy(i) - - for c in resolve_set_statements(_statement, [], stat): - _statement['selector']['conditions'] = c - if selector_index is None: - selector = _statement['selector'] - else: - selector = _statement['selector'][selector_index] + _statement = i + + for selector in resolve_set_statements(_statement, [], stat): + if selector_index is not None: + selector = selector[selector_index] # make sure that selector does not get modified selector = copy.deepcopy(selector) From 3f7822f12f741af8ea5b8c67b654ab3f4764e318 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 03:10:30 +0100 Subject: [PATCH 125/209] Compile DB Selects: copy parent selectors for set statements --- pgmapcss/compiler/compile_db_selects.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/pgmapcss/compiler/compile_db_selects.py b/pgmapcss/compiler/compile_db_selects.py index ab5572aa..fdf9f845 100644 --- a/pgmapcss/compiler/compile_db_selects.py +++ b/pgmapcss/compiler/compile_db_selects.py @@ -38,6 +38,14 @@ def resolve_set_statements(statement, done, stat): for s in s1: r = copy.deepcopy(lr) r['conditions'] += s['conditions'] + + # also copy parent selector from set statements (but only, + # if there's not a parent selector on the child yet) + # TODO: allow several parent selectors for a single selector + if 'parent' in s and not 'parent' in r: + r['parent'] = s['parent'] + r['link'] = s['link'] + ret.append(r) # for each set of conditions add the current condition From a6c34bec57c0caf01356c3730e1de22ea33d5a1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 07:37:23 +0100 Subject: [PATCH 126/209] DB/Overpass: new option 'db.serial_requests' - closes #89 --- doc/config_options.md | 1 + doc/database.md | 1 + pgmapcss/db/overpass/db.py | 3 +++ pgmapcss/db/overpass/db_functions.py | 14 +++++++++----- 4 files changed, 14 insertions(+), 5 deletions(-) diff --git a/doc/config_options.md b/doc/config_options.md index 218f970e..83f7c659 100644 --- a/doc/config_options.md +++ b/doc/config_options.md @@ -30,3 +30,4 @@ Advances options: | db.has-hstore | osm2pgsql only: Additional tags can be read from the 'tags' column (of type hstore). Usually autodetected. Needed when using offline mode. | | | db.multipolygons | osmosis only: Specify whether the multipolygons table is present and should be used. Usually autodected. Needed when using offline mode (default: false) | true/false | db.hstore_key_index| osm2pgsql/osmosis: Assume that there's a btree index on the specified keys (,-separated) and therefore add a tag-exists condition into the SQL query. E.g.: db.hstore_key_index=sport,amenity . | | +| db.serial_requests | overpass only: Always finish parsing result data, before sending a parallel request (e.g. for handling relationships). Necessary on some APIs which disallow multiple requests at once. Impacts memory usage, as results need to be cached. | true/**false** diff --git a/doc/database.md b/doc/database.md index b73fc585..d5c6918e 100644 --- a/doc/database.md +++ b/doc/database.md @@ -80,6 +80,7 @@ Behaviour can be influenced with the following config options: | Config option | Description | Possible values |------------------|-------------|----------------- | db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api | | +| db.serial_requests | overpass only: Always finish parsing result data, before sending a parallel request (e.g. for handling relationships). Necessary on some APIs which disallow multiple requests at once. Impacts memory usage, as results need to be cached. | true/**false** | debug.overpass_queries | overpass only: Print a debug message for each query posted to the Overpass API | true/**false** | Example usage: diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 228361a7..666c6554 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -13,6 +13,9 @@ def __init__(self, conn, stat): if not 'db.overpass-url' in self.stat['config']: self.stat['config']['db.overpass-url'] = 'http://overpass-api.de/api' + if 'debug.profiler' in self.stat['config']: + self.stat['config']['db.serial_requests'] = True + self.parent_queries = [] def tag_type(self, key, condition): diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 73d37298..e9ee2956 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -8,8 +8,10 @@ def overpass_query(query): # START debug.overpass_queries plpy.warning(query) # END debug.overpass_queries -# START debug.profiler +# START db.serial_requests ret = [] +# END db.serial_requests +# START debug.profiler time_start = datetime.datetime.now() # END debug.profiler url = '{db.overpass-url}/interpreter?' +\ @@ -36,11 +38,11 @@ def overpass_query(query): elif mode == 1: if re.match('}', r): block += '}' -# START debug.profiler +# START db.serial_requests ret.append(json.loads(block)) -# ELSE debug.profiler +# ELSE db.serial_requests yield json.loads(block) -# END debug.profiler +# END db.serial_requests block = '' @@ -49,9 +51,11 @@ def overpass_query(query): # START debug.profiler plpy.warning('%s\nquery took %.2fs for %d features' % (query, (datetime.datetime.now() - time_start).total_seconds(), len(ret))) +# END debug.profiler +# START db.serial_requests for r in ret: yield r -# END debug.profiler +# END db.serial_requests return From b2045f9e8de4367e01a64872371805f3862035c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 07:39:40 +0100 Subject: [PATCH 127/209] DB/Overpass: remove option debug.overpass_queries (replaced by debug.profiler) --- doc/database.md | 2 +- pgmapcss/db/overpass/db_functions.py | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/doc/database.md b/doc/database.md index d5c6918e..f088d6e6 100644 --- a/doc/database.md +++ b/doc/database.md @@ -81,7 +81,7 @@ Behaviour can be influenced with the following config options: |------------------|-------------|----------------- | db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api | | | db.serial_requests | overpass only: Always finish parsing result data, before sending a parallel request (e.g. for handling relationships). Necessary on some APIs which disallow multiple requests at once. Impacts memory usage, as results need to be cached. | true/**false** -| debug.overpass_queries | overpass only: Print a debug message for each query posted to the Overpass API | true/**false** | +| debug.profiler | during execution, show some statistics about query/processing time and count of objects. | true/**false** | Example usage: ```sh diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index e9ee2956..d37e0d1c 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -5,9 +5,6 @@ def overpass_query(query): import urllib.parse import json -# START debug.overpass_queries - plpy.warning(query) -# END debug.overpass_queries # START db.serial_requests ret = [] # END db.serial_requests From 23492dc8cb844b842882592706751a96eddd47ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 07:46:20 +0100 Subject: [PATCH 128/209] DB/Overpass: when a query fails, print failing query --- pgmapcss/db/overpass/db_functions.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index d37e0d1c..443facfd 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -14,7 +14,11 @@ def overpass_query(query): url = '{db.overpass-url}/interpreter?' +\ urllib.parse.urlencode({ 'data': query }) - f = urllib.request.urlopen(url) + try: + f = urllib.request.urlopen(url) + except urllib.error.HTTPError as err: + plpy.warning('Overpass query failed:\n' + query) + raise block = '' mode = 0 From 990a006aa5c4f422607844b59385bc3718fbd889 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 07:52:36 +0100 Subject: [PATCH 129/209] DB/Overpass: instead of GET use POST requests to query data --- pgmapcss/db/overpass/db_functions.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 443facfd..6731cdd6 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -11,11 +11,12 @@ def overpass_query(query): # START debug.profiler time_start = datetime.datetime.now() # END debug.profiler - url = '{db.overpass-url}/interpreter?' +\ - urllib.parse.urlencode({ 'data': query }) + url = '{db.overpass-url}/interpreter' + data = urllib.parse.urlencode({ 'data': query }) + data = data.encode('utf-8') try: - f = urllib.request.urlopen(url) + f = urllib.request.urlopen(url, data) except urllib.error.HTTPError as err: plpy.warning('Overpass query failed:\n' + query) raise From 8cbd9b3b09543c4319d8ae17ec546da0f220cb71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 08:03:54 +0100 Subject: [PATCH 130/209] Make DB/Overpass the new default database backend --- README.md | 2 + doc/Install_with_Mapnik_2.2.md | 8 ++-- doc/Install_with_Mapnik_3.0.md | 8 ++-- doc/database.md | 72 +++++++++++++++++----------------- pgmapcss/main.py | 4 +- 5 files changed, 48 insertions(+), 46 deletions(-) diff --git a/README.md b/README.md index 9531aaf4..f2dbffec 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,8 @@ pgmapcss compiles MapCSS styles into a database function. Mapnik just needs to c Alternatively there's a standalone mode, where the MapCSS style is compiled into an executable, which can be run from the command line. The executable will not render an image but create GeoJSON output instead. See below for details. +Since version 0.10 you don't even need a local database import (a local database is needed nonetheless), as the new default data source is Overpass API. Alternatively, osm2pgsql and osmosis pgsnapshot are supported. See [doc/database.md](doc/database.md) for details. + Stable version: [0.9.2](https://github.com/plepe/pgmapcss), development version: [0.10-dev](https://github.com/plepe/pgmapcss/tree/branch-0.10), see [open issues](https://github.com/plepe/pgmapcss/milestones/Version%200.10) Features diff --git a/doc/Install_with_Mapnik_2.2.md b/doc/Install_with_Mapnik_2.2.md index 2eec6ae2..72474e71 100644 --- a/doc/Install_with_Mapnik_2.2.md +++ b/doc/Install_with_Mapnik_2.2.md @@ -36,7 +36,9 @@ for i in /usr/share/fonts/truetype/dejavu/* ; do sudo ln -s $i /usr/lib/mapnik/f sudo ln -s /usr/share/fonts/truetype/unifont/unifont.ttf /usr/lib/mapnik/fonts/ ``` -For the next step you can decide, whether you want to use osm2pgsql, osmosis or overpass as database backend. +For the next step you can decide, whether you want to use overpass (default), osm2pgsql or osmosis as database backend. + +Case 'overpass': You can use one of the public Overpass APIs (default), or [install your own](http://wiki.openstreetmap.org/wiki/Overpass_API/install). Case 'osm2pgsql': Download an OSM file and import to database: ```sh @@ -55,8 +57,6 @@ psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/d psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_load_0.6.sql ``` -Case 'overpass': You can use one of the public Overpass APIs (default), or [install your own](http://wiki.openstreetmap.org/wiki/Overpass_API/install). - Clone pgmapcss: ```sh git clone https://github.com/plepe/pgmapcss.git @@ -70,7 +70,7 @@ Compile 'test.mapcss' file and install database functions: pgmapcss --database-type=TYPE -dtest -uuser -pPASSWORD -tmapnik-2.2 test ``` -Replace TYPE by 'osm2pgsql' (default), 'osmosis' or 'overpass'. See [config_options.md](./config_options.md) for advanced options. +Replace TYPE by 'overpass' (default), 'osm2pgsql' or 'osmosis'. See [config_options.md](./config_options.md) for advanced options. You get a file `test.mapnik` which you can use with your preferred render front-end (these are just examples): * [Render an image](https://github.com/plepe/mapnik-render-image) diff --git a/doc/Install_with_Mapnik_3.0.md b/doc/Install_with_Mapnik_3.0.md index 0f7ac68c..0c1a1cec 100644 --- a/doc/Install_with_Mapnik_3.0.md +++ b/doc/Install_with_Mapnik_3.0.md @@ -32,7 +32,9 @@ psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create exte psql -d "dbname=test user=user host=localhost password=PASSWORD" -c "create language plpython3u" ``` -For the next step you can decide, whether you want to use osm2pgsql, osmosis or overpass as database backend. +For the next step you can decide, whether you want to use overpass, osm2pgsql or osmosis as database backend. + +Case 'overpass': You can use one of the public Overpass APIs (default), or [install your own](http://wiki.openstreetmap.org/wiki/Overpass_API/install). Case 'osm2pgsql': Download an OSM file and import to database: ```sh @@ -51,8 +53,6 @@ psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/d psql -d "dbname=test user=user host=localhost password=PASSWORD" -f /usr/share/doc/osmosis/examples/pgsnapshot_load_0.6.sql ``` -Case 'overpass': You can use one of the public Overpass APIs (default), or [install your own](http://wiki.openstreetmap.org/wiki/Overpass_API/install). - Clone pgmapcss: ```sh git clone https://github.com/plepe/pgmapcss.git @@ -66,7 +66,7 @@ Compile 'test.mapcss' file and install database functions: pgmapcss --database-type=TYPE -dtest -uuser -pPASSWORD -tmapnik-3.0 test ``` -Replace TYPE by 'osm2pgsql' (default), 'osmosis' or 'overpass'. See [config_options.md](./config_options.md) for advanced options. +Replace TYPE by 'overpass' (default), 'osm2pgsql' or 'osmosis'. See [config_options.md](./config_options.md) for advanced options. You get a file `test.mapnik` which you can use with your preferred render front-end (these are just examples): diff --git a/doc/database.md b/doc/database.md index f088d6e6..29e1ef11 100644 --- a/doc/database.md +++ b/doc/database.md @@ -1,5 +1,41 @@ pgmapcss supports different kind of database layouts, currently osm2pgsql, osmosis pgsnapshot and overpass. Here's a short description of advantages and disadvantages. +Overpass API (short: overpass) +============================== +In contrast to osm2pgsql and osmosis, Overpass API is an external database which is queried by HTTP requests. Also, the query language is very different from SQL. Overpass API is faster then PostgreSQL/PostGIS on large viewports. + +By default, the API on overpass-api.de will be used, therefore it is not necessary to import a local copy. For sure, if you want to render on a regular base the admins of overpass-api.de will be happy if you change to a local copy. Additionally, you still need a local PostgreSQL database, as it is used for connecting to Mapnik and accessing the PostGIS functions. + +* In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead; including full multipolygon support (see below) +* In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. +* Queries for `relation member` (where member is any type) and `way node` will be optimzed, as only objects matching the member conditions and the parents conditions will be queried from Overpass API. +* Additionally the tags "osm:id", "osm:version", "osm:user_id", "osm:user", "osm:timestamp", "osm:changeset" will be set from OpenStreetMap meta info. Filtering for meta information is currently not possible on Overpass API, therefore these filters will not be applied to queries (in short: a condition for forests of user abc will be compiled into a query of all forests in the current viewport regardless of the user). + +Multipolygon support +-------------------- +There are two "types" of multipolygons, those that have their tags bound to the relation (the standard) and multipolygons which inherit their tags from their outer members (when the relation has no relevant tags and the outer members have exactly the same relevant tags, or the relation and the outer members have the same relevant tags). + +* "Standard" multipolygons get their ID prefixed by 'r' (as they are relations). +* Multipolygons with tags from their outer members get their ID prefixed by 'm' (for multipolygon) and an additional tag 'osm:has_outer_tags' (set to 'yes'). On the other hand closed ways which are an outer member of a multipolygon relation do not count as 'area', whereas the multipolygon itself does not count as 'relation'. +* When the the relation and the outer members have the same relevant tags, the feature is handled as in the "standard" multipolygon way, but the outer ways do not match 'area'. + +Options +------- +Behaviour can be influenced with the following config options: + +| Config option | Description | Possible values +|------------------|-------------|----------------- +| db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api | | +| db.serial_requests | overpass only: Always finish parsing result data, before sending a parallel request (e.g. for handling relationships). Necessary on some APIs which disallow multiple requests at once. Impacts memory usage, as results need to be cached. | true/**false** +| debug.profiler | during execution, show some statistics about query/processing time and count of objects. | true/**false** | + +Example usage: +```sh +pgmapcss --database-type=overpass -c db.overpass-url=http://overpass.osm.rambler.ru/cgi -d LOCAL_DB -u USER -p PASSWORD test.mapcss +``` + +* -d, -u and -p are the parameters of your local PostgreSQL database + osm2pgsql ========= * Only nodes, ways and relations which are considered tagged by osm2pgsql can be used - as only those are added to the database tables. You can influence this list by change the osm2pgsql style file. @@ -53,39 +89,3 @@ Behaviour can be influenced with the following config options: | db.srs | Spatial Reference System used in the database. Autodetected. | Usual values: 4326 (WGS-84), 900913 resp. 3857 (Spherical Mercator for Web Maps) | | db.multipolygons | Specify whether the multipolygons table is present and should be used. Usually autodected. Needed when using offline mode (default: false) | true/false | db.multipolygons-v0.2 | osmosis-multipolygon compatibility with version 0.1 (before the hide_outer_ways column had been added). | true/false - -Overpass API (short: overpass) -============================== -In contrast to osm2pgsql and osmosis, Overpass API is an external database which is queried by HTTP requests. Also, the query language is very different from SQL. Overpass API is faster then PostgreSQL/PostGIS on large viewports. - -By default, the API on overpass-api.de will be used, therefore it is not necessary to import a local copy. For sure, if you want to render on a regular base the admins of overpass-api.de will be happy if you change to a local copy. Additionally, you still need a local PostgreSQL database, as it is used for connecting to Mapnik and accessing the PostGIS functions. - -* In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead; including full multipolygon support (see below) -* In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. -* Queries for `relation member` (where member is any type) and `way node` will be optimzed, as only objects matching the member conditions and the parents conditions will be queried from Overpass API. -* Additionally the tags "osm:id", "osm:version", "osm:user_id", "osm:user", "osm:timestamp", "osm:changeset" will be set from OpenStreetMap meta info. Filtering for meta information is currently not possible on Overpass API, therefore these filters will not be applied to queries (in short: a condition for forests of user abc will be compiled into a query of all forests in the current viewport regardless of the user). - -Multipolygon support --------------------- -There are two "types" of multipolygons, those that have their tags bound to the relation (the standard) and multipolygons which inherit their tags from their outer members (when the relation has no relevant tags and the outer members have exactly the same relevant tags, or the relation and the outer members have the same relevant tags). - -* "Standard" multipolygons get their ID prefixed by 'r' (as they are relations). -* Multipolygons with tags from their outer members get their ID prefixed by 'm' (for multipolygon) and an additional tag 'osm:has_outer_tags' (set to 'yes'). On the other hand closed ways which are an outer member of a multipolygon relation do not count as 'area', whereas the multipolygon itself does not count as 'relation'. -* When the the relation and the outer members have the same relevant tags, the feature is handled as in the "standard" multipolygon way, but the outer ways do not match 'area'. - -Options -------- -Behaviour can be influenced with the following config options: - -| Config option | Description | Possible values -|------------------|-------------|----------------- -| db.overpass-url | overpass only: Use this alternative Overpass API url. default: http://overpass-api.de/api | | -| db.serial_requests | overpass only: Always finish parsing result data, before sending a parallel request (e.g. for handling relationships). Necessary on some APIs which disallow multiple requests at once. Impacts memory usage, as results need to be cached. | true/**false** -| debug.profiler | during execution, show some statistics about query/processing time and count of objects. | true/**false** | - -Example usage: -```sh -pgmapcss --database-type=overpass -c db.overpass-url=http://overpass.osm.rambler.ru/cgi -d LOCAL_DB -u USER -p PASSWORD test.mapcss -``` - -* -d, -u and -p are the parameters of your local PostgreSQL database diff --git a/pgmapcss/main.py b/pgmapcss/main.py index 56c67e48..59063bf9 100755 --- a/pgmapcss/main.py +++ b/pgmapcss/main.py @@ -26,8 +26,8 @@ help='Name of database (default: username)') parser.add_argument('--database-type', dest='database_type', - default='osm2pgsql', - help='Type of database, see doc/database.md for details. (currently supported: osm2pgsql (default), osmosis)') + default='overpass', + help='Type of database, see doc/database.md for details. (currently supported: overpass (default), osm2pgsql, osmosis)') parser.add_argument('-u', '--user', dest='user', default=getpass.getuser(), From d77660bfb3a1194f87158af2ed77b22e6ca307c2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 16:03:55 +0100 Subject: [PATCH 131/209] Mode standalone: Move fake_plpy to separate module pgmapcss.misc.fake_plpy --- pgmapcss/compiler/compile_function_match.py | 3 +- pgmapcss/misc/fake_plpy.py | 68 ++++++++++++++++++++ pgmapcss/mode/standalone/header.inc | 70 +-------------------- 3 files changed, 71 insertions(+), 70 deletions(-) create mode 100644 pgmapcss/misc/fake_plpy.py diff --git a/pgmapcss/compiler/compile_function_match.py b/pgmapcss/compiler/compile_function_match.py index 2167ac02..0b68af6d 100644 --- a/pgmapcss/compiler/compile_function_match.py +++ b/pgmapcss/compiler/compile_function_match.py @@ -63,8 +63,9 @@ def compile_function_match(stat): 'eval_functions': \ resource_string(pgmapcss.eval.__name__, 'base.py').decode('utf-8') +\ pgmapcss.eval.functions().print(indent='') +\ -include_text() +include_text(), } + replacement['fake_plpy'] = strip_includes(resource_stream(pgmapcss.misc.__name__, 'fake_plpy.py'), stat).format(**replacement) # add all config options as replacement patterns, in the form # 'config|foo|bar', were 'foo.bar' was the config option ('.' not allowed # in patterns) diff --git a/pgmapcss/misc/fake_plpy.py b/pgmapcss/misc/fake_plpy.py new file mode 100644 index 00000000..ac0efa7c --- /dev/null +++ b/pgmapcss/misc/fake_plpy.py @@ -0,0 +1,68 @@ +class fake_plpy: + def __init__(self, args=None): + self.conn = postgresql.open( + host=(args.host if args and args.host else '{host}'), + password=(args.password if args and args.password else '{password}'), + database=(args.database if args and args.database else '{database}'), + user=(args.user if args and args.user else '{user}') + ) + self.conn.settings.update({{ +# START db.search_path + 'search_path': "{config|db|search_path}" +# END db.search_path + }}) +# START debug.explain_queries + self.explain_queries = {{ }} +# END debug.explain_queries + + def notice(self, *arg): + sys.stderr.write('NOTICE: ' + ' '.join([repr(a) for a in arg]) + '\n') + + def warning(self, *arg): + sys.stderr.write('WARNING: ' + ' '.join([repr(a) for a in arg]) + '\n') + + def prepare(self, query, param_type): + for (i, t) in enumerate(param_type): + i1 = i + 1 + if t == 'geometry': + t = 'text' + elif t == 'geometry[]': + t = 'text[]' + query = query.replace('$' + str(i1), '$' + str(i1) + '::' + t) + + plan = self.conn.prepare(query) + plan.query = query + + return plan + + def execute(self, plan, param=[]): +# START debug.explain_queries + if not plan.query in self.explain_queries: + self.explain_queries[plan.query] = {{ 'count': 0 }} + explain = self.conn.prepare('explain ' + plan.query) + sys.stderr.write(plan.query) + self.explain_queries[plan.query]['explain'] = explain(*param) + + self.explain_queries[plan.query]['count'] += 1 +# END debug.explain_queries + ret = [] + for r in plan(*param): + if type(r) != postgresql.types.Row: + return r + + ret.append(dict(r)) + + return ret + + def cursor(self, plan, param=[]): +# START debug.explain_queries + if not plan.query in self.explain_queries: + self.explain_queries[plan.query] = {{ 'count': 0 }} + explain = self.conn.prepare('explain ' + plan.query) + sys.stderr.write(plan.query) + self.explain_queries[plan.query]['explain'] = explain(*param) + + self.explain_queries[plan.query]['count'] += 1 +# END debug.explain_queries + for r in plan(*param): + yield dict(r) diff --git a/pgmapcss/mode/standalone/header.inc b/pgmapcss/mode/standalone/header.inc index 4d1dc8e2..0c0bd873 100644 --- a/pgmapcss/mode/standalone/header.inc +++ b/pgmapcss/mode/standalone/header.inc @@ -4,75 +4,7 @@ import postgresql import sys import math -class fake_plpy: - def __init__(self, args=None): - self.conn = postgresql.open( - host=(args.host if args and args.host else '{host}'), - password=(args.password if args and args.password else '{password}'), - database=(args.database if args and args.database else '{database}'), - user=(args.user if args and args.user else '{user}') - ) - self.conn.settings.update({{ -# START db.search_path - 'search_path': "{config|db|search_path}" -# END db.search_path - }}) -# START debug.explain_queries - self.explain_queries = {{ }} -# END debug.explain_queries - - def notice(self, *arg): - sys.stderr.write('NOTICE: ' + ' '.join([repr(a) for a in arg]) + '\n') - - def warning(self, *arg): - sys.stderr.write('WARNING: ' + ' '.join([repr(a) for a in arg]) + '\n') - - def prepare(self, query, param_type): - for (i, t) in enumerate(param_type): - i1 = i + 1 - if t == 'geometry': - t = 'text' - elif t == 'geometry[]': - t = 'text[]' - query = query.replace('$' + str(i1), '$' + str(i1) + '::' + t) - - plan = self.conn.prepare(query) - plan.query = query - - return plan - - def execute(self, plan, param=[]): -# START debug.explain_queries - if not plan.query in self.explain_queries: - self.explain_queries[plan.query] = {{ 'count': 0 }} - explain = self.conn.prepare('explain ' + plan.query) - sys.stderr.write(plan.query) - self.explain_queries[plan.query]['explain'] = explain(*param) - - self.explain_queries[plan.query]['count'] += 1 -# END debug.explain_queries - ret = [] - for r in plan(*param): - if type(r) != postgresql.types.Row: - return r - - ret.append(dict(r)) - - return ret - - def cursor(self, plan, param=[]): -# START debug.explain_queries - if not plan.query in self.explain_queries: - self.explain_queries[plan.query] = {{ 'count': 0 }} - explain = self.conn.prepare('explain ' + plan.query) - sys.stderr.write(plan.query) - self.explain_queries[plan.query]['explain'] = explain(*param) - - self.explain_queries[plan.query]['count'] += 1 -# END debug.explain_queries - for r in plan(*param): - yield dict(r) +{fake_plpy} def pgmapcss_{style_id}(bbox=None, scale_denominator=2000, parameters={{}}, _all_style_elements={all_style_elements}): import pghstore - plpy = fake_plpy() From a1b039d1af6a6a12b08030631140b69e3ec96dc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 16:15:24 +0100 Subject: [PATCH 132/209] Eval: make fake_plpy available for eval functions --- pgmapcss/eval/functions.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index e6a509cb..fcbdde0a 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -1,6 +1,7 @@ from pkg_resources import * from ..includes import * from .base import config_base +import pgmapcss.misc import re class Functions: @@ -60,18 +61,29 @@ def eval(self, statement, additional_code=''): if not self._eval or additional_code != '': self._eval_global_data = repr(self.stat['global_data']) + + replacement = { + 'host': self.stat['args'].host, + 'password': self.stat['args'].password, + 'database': self.stat['args'].database, + 'user': self.stat['args'].user, + } + content = \ 'def _eval(statement):\n' +\ ' import re\n' +\ ' import math\n' +\ + ' import postgresql\n' +\ ' global_data = ' + repr(self.stat['global_data']) + '\n' +\ ' ' + resource_string(__name__, 'base.py').decode('utf-8').replace('\n', '\n ') +\ '\n' +\ - ' ' + include_text().replace('\n', '\n ') +\ + ' ' + include_text().replace('\n', '\n ') + '\n' +\ + ' ' + pgmapcss.misc.strip_includes(resource_stream(pgmapcss.misc.__name__, 'fake_plpy.py'), self.stat).format(**replacement).replace('\n', '\n ') + '\n' +\ '\n' +\ additional_code.replace('\n', '\n ') +\ '\n' +\ - self.print(indent=' ') + '\n'\ + self.print(indent=' ') + '\n' +\ + ' plpy = fake_plpy()\n' +\ ' return eval(statement)' eval_code = compile(content, '', 'exec') From f19d081416da90c57d2d2a74ea466b7a1cc94160 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 10:08:56 +0100 Subject: [PATCH 133/209] Eval: eval tests: rewrite result check - get list of returns, compare outside db fun - also accept multiple possible result values --- pgmapcss/db/pgmapcss_types.sql | 2 + pgmapcss/eval/functions.py | 68 ++++++++++++++++++++-------------- 2 files changed, 43 insertions(+), 27 deletions(-) diff --git a/pgmapcss/db/pgmapcss_types.sql b/pgmapcss/db/pgmapcss_types.sql index bceab712..68db5269 100644 --- a/pgmapcss/db/pgmapcss_types.sql +++ b/pgmapcss/db/pgmapcss_types.sql @@ -34,3 +34,5 @@ create table _pgmapcss_PGCache ( create index _pgmapcss_PGCache_cache_id on _pgmapcss_PGCache(cache_id); create index _pgmapcss_PGCache_id on _pgmapcss_PGCache(id); create index _pgmapcss_PGCache_geo on _pgmapcss_PGCache using gist(geo); + +drop function if exists __eval_test__(); diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index fcbdde0a..af944246 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -3,6 +3,7 @@ from .base import config_base import pgmapcss.misc import re +import postgresql class Functions: def __init__(self, stat): @@ -146,7 +147,7 @@ def test(self, func, src): config = self.eval_functions[func] ret = ''' -create or replace function __eval_test__() returns text +create or replace function __eval_test__() returns setof text as $body$ import re import math @@ -160,9 +161,10 @@ def test(self, func, src): render_context = {'bbox': '010300002031BF0D000100000005000000DBF1839BB5DC3B41E708549B2B705741DBF1839BB5DC3B41118E9739B171574182069214CCE23B41118E9739B171574182069214CCE23B41E708549B2B705741DBF1839BB5DC3B41E708549B2B705741', 'scale_denominator': 8536.77} ''' ret += self.print() - ret += "result = ''\n" - param_in = None + list_param_in = [] + list_return_possibilities = [] + list_shall_round = [] for r in rows: m = re.match('# IN (.*)$', r) if m: @@ -173,6 +175,10 @@ def test(self, func, src): for p in param_in ] + list_param_in.append(param_in) + list_return_possibilities.append(set()) + list_shall_round.append(False) + m = re.match('# OUT(_ROUND)? (.*)$', r) if m: return_out = eval(m.group(2)) @@ -180,32 +186,40 @@ def test(self, func, src): if len(return_out) > 16 and re.match('[0-9A-F]+$', return_out): return_out = self.convert_srs(return_out, self.stat['config']['db.srs']) - shall_round = m.group(1) == '_ROUND' - - ret += 'ret = ' + config.compiler([ repr(p) for p in param_in ], '', {}) + '\n' - ret += 'result += "IN %s\\n"\n' % repr(param_in) - ret += 'result += "EXP %s\\n"\n' % repr(return_out) - ret += 'result += "OUT %s\\n" % repr(ret)\n' - - ret += 'if type(ret) != str:\n result += "ERROR not a string: " + repr(ret) + "\\n"\n' - if shall_round: - ret += 'elif round(float(ret), 5) != %s:\n result += "ERROR return value wrong!\\n"\n' % repr(round(float(return_out), 5)) + if m.group(1) == '_ROUND': + list_shall_round[-1] = True + + list_return_possibilities[-1].add(return_out) + + if len(list_param_in): + for i, param_in in enumerate(list_param_in): + ret += 'yield ' + config.compiler([ repr(p) for p in param_in ], '', {}) + '\n' + + ret += "$body$ language 'plpython3u' immutable;" + conn = db.connection() + conn.execute(ret) + + res = conn.prepare('select * from __eval_test__()') + error = False + for i, r in enumerate(res()): + print('IN', repr(list_param_in[i])) + print('EXP', '\n '.join([ + repr(r) + for r in list_return_possibilities[i] + ])) + print('OUT', repr(r[0])) + + if list_shall_round[i]: + if round(float(r[0]), 5) not in [ float(q) for q in list_return_possibilities[i] ]: + error = True + print('ERROR return value wrong!') else: - ret += 'elif ret != %s:\n result += "ERROR return value wrong!\\n"\n' % repr(return_out) - - ret += 'return result\n' - ret += "$body$ language 'plpython3u' immutable;" - #print(ret) - conn = db.connection() - conn.execute(ret) - - r = conn.prepare('select __eval_test__()'); - res = r()[0][0] - - print(res) + if r[0] not in list_return_possibilities[i]: + error = True + print('ERROR return value wrong!') - if(re.search("^ERROR", res, re.MULTILINE)): - raise Exception("eval-test failed!") + if error: + raise Exception("eval-test failed!") def test_all(self): if not self.eval_functions: From 3643cad28a6b54f77dc50e47cec3bab403444b58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 10:14:43 +0100 Subject: [PATCH 134/209] Eval: eval tests accept un-ordered lists (OUT_SET) --- pgmapcss/eval/eval_keys_of_tags.py | 2 +- pgmapcss/eval/eval_unique.py | 2 +- pgmapcss/eval/functions.py | 11 ++++++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/pgmapcss/eval/eval_keys_of_tags.py b/pgmapcss/eval/eval_keys_of_tags.py index 99089202..9a9edd79 100644 --- a/pgmapcss/eval/eval_keys_of_tags.py +++ b/pgmapcss/eval/eval_keys_of_tags.py @@ -5,4 +5,4 @@ def eval_keys_of_tags(param): return ';'.join(current['tags']) # IN [] -# OUT 'cuisine;amenity;name:en;name;name:de' +# OUT_SET 'cuisine;amenity;name:en;name;name:de' diff --git a/pgmapcss/eval/eval_unique.py b/pgmapcss/eval/eval_unique.py index 6d0ee9cd..c4d04327 100644 --- a/pgmapcss/eval/eval_unique.py +++ b/pgmapcss/eval/eval_unique.py @@ -12,4 +12,4 @@ def eval_unique(param): # TESTS # IN ['foo;bar;foo'] -# OUT 'foo;bar' +# OUT_SET 'foo;bar' diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index af944246..70bb4d54 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -165,6 +165,8 @@ def test(self, func, src): list_param_in = [] list_return_possibilities = [] list_shall_round = [] + list_set = [] + for r in rows: m = re.match('# IN (.*)$', r) if m: @@ -178,8 +180,9 @@ def test(self, func, src): list_param_in.append(param_in) list_return_possibilities.append(set()) list_shall_round.append(False) + list_set.append(False) - m = re.match('# OUT(_ROUND)? (.*)$', r) + m = re.match('# OUT(_ROUND|_SET)? (.*)$', r) if m: return_out = eval(m.group(2)) @@ -188,6 +191,8 @@ def test(self, func, src): if m.group(1) == '_ROUND': list_shall_round[-1] = True + if m.group(1) == '_SET': + list_set[-1] = True list_return_possibilities[-1].add(return_out) @@ -213,6 +218,10 @@ def test(self, func, src): if round(float(r[0]), 5) not in [ float(q) for q in list_return_possibilities[i] ]: error = True print('ERROR return value wrong!') + elif list_set[i]: + if ';'.split(r[0]) not in [ ';'.split(q) for q in list_return_possibilities[i] ]: + error = True + print('ERROR return value wrong!') else: if r[0] not in list_return_possibilities[i]: error = True From be67a420398c4a432c22d19e8c34c0889d78c164 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 16:39:29 +0100 Subject: [PATCH 135/209] Eval: move parsing test parameters to separate function --- pgmapcss/eval/functions.py | 92 ++++++++++++++++++++------------------ 1 file changed, 49 insertions(+), 43 deletions(-) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index 70bb4d54..ffc9431a 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -138,12 +138,50 @@ def call(self, func, param, stat): statement = config.compiler([ repr(p) for p in param ], '', stat) return self.eval(statement) + def get_tests(self, src): + ret = { + 'param_in': [], + 'return_possibilities': [], + 'shall_round': [], + 'set': [], + } + + for r in src.split('\n'): + m = re.match('# IN (.*)$', r) + if m: + param_in = eval(m.group(1)) + + param_in = [ + p if len(p) < 16 or not re.match('[0-9A-F]+$', p) else self.convert_srs(p, self.stat['config']['db.srs']) + for p in param_in + ] + + ret['param_in'].append(param_in) + ret['return_possibilities'].append(set()) + ret['shall_round'].append(False) + ret['set'].append(False) + + m = re.match('# OUT(_ROUND|_SET)? (.*)$', r) + if m: + return_out = eval(m.group(2)) + + if len(return_out) > 16 and re.match('[0-9A-F]+$', return_out): + return_out = self.convert_srs(return_out, self.stat['config']['db.srs']) + + if m.group(1) == '_ROUND': + ret['shall_round'][-1] = True + if m.group(1) == '_SET': + ret['set'][-1] = True + + ret['return_possibilities'][-1].add(return_out) + + return ret + def test(self, func, src): print('* Testing %s' % func) import re import pgmapcss.db as db - rows = src.split('\n') config = self.eval_functions[func] ret = ''' @@ -162,42 +200,10 @@ def test(self, func, src): ''' ret += self.print() - list_param_in = [] - list_return_possibilities = [] - list_shall_round = [] - list_set = [] - - for r in rows: - m = re.match('# IN (.*)$', r) - if m: - param_in = eval(m.group(1)) - - param_in = [ - p if len(p) < 16 or not re.match('[0-9A-F]+$', p) else self.convert_srs(p, self.stat['config']['db.srs']) - for p in param_in - ] - - list_param_in.append(param_in) - list_return_possibilities.append(set()) - list_shall_round.append(False) - list_set.append(False) - - m = re.match('# OUT(_ROUND|_SET)? (.*)$', r) - if m: - return_out = eval(m.group(2)) - - if len(return_out) > 16 and re.match('[0-9A-F]+$', return_out): - return_out = self.convert_srs(return_out, self.stat['config']['db.srs']) - - if m.group(1) == '_ROUND': - list_shall_round[-1] = True - if m.group(1) == '_SET': - list_set[-1] = True - - list_return_possibilities[-1].add(return_out) + tests = self.get_tests(src) - if len(list_param_in): - for i, param_in in enumerate(list_param_in): + if len(tests['param_in']): + for i, param_in in enumerate(tests['param_in']): ret += 'yield ' + config.compiler([ repr(p) for p in param_in ], '', {}) + '\n' ret += "$body$ language 'plpython3u' immutable;" @@ -207,23 +213,23 @@ def test(self, func, src): res = conn.prepare('select * from __eval_test__()') error = False for i, r in enumerate(res()): - print('IN', repr(list_param_in[i])) + print('IN', repr(tests['param_in'][i])) print('EXP', '\n '.join([ repr(r) - for r in list_return_possibilities[i] + for r in tests['return_possibilities'][i] ])) print('OUT', repr(r[0])) - if list_shall_round[i]: - if round(float(r[0]), 5) not in [ float(q) for q in list_return_possibilities[i] ]: + if tests['shall_round'][i]: + if round(float(r[0]), 5) not in [ float(q) for q in tests['return_possibilities'][i] ]: error = True print('ERROR return value wrong!') - elif list_set[i]: - if ';'.split(r[0]) not in [ ';'.split(q) for q in list_return_possibilities[i] ]: + elif tests['set'][i]: + if ';'.split(r[0]) not in [ ';'.split(q) for q in tests['return_possibilities'][i] ]: error = True print('ERROR return value wrong!') else: - if r[0] not in list_return_possibilities[i]: + if r[0] not in tests['return_possibilities'][i]: error = True print('ERROR return value wrong!') From 485d6ee823bc20cb06be3bc14d6b5d13c9c2b665 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 18:13:26 +0100 Subject: [PATCH 136/209] Eval: Tests: separate test function and result checking --- pgmapcss/eval/functions.py | 84 ++++++++++++++++++++++---------------- 1 file changed, 48 insertions(+), 36 deletions(-) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index ffc9431a..fe292e2b 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -177,9 +177,7 @@ def get_tests(self, src): return ret - def test(self, func, src): - print('* Testing %s' % func) - + def test_dbfun(self, func, tests, add_code): import re import pgmapcss.db as db config = self.eval_functions[func] @@ -190,17 +188,11 @@ def test(self, func, src): import re import math ''' +\ +add_code +\ resource_string(__name__, 'base.py').decode('utf-8') +\ -include_text() +\ -''' -global_data = {'icon-image': {'crossing.svg': (11, 7)}} -parameters = {'lang': 'en', 'foo': 'bar'} -current = { 'object': { 'id': 'n123', 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }}, 'pseudo_element': 'default', 'pseudo_elements': ['default', 'test'], 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }, 'properties': { 'default': { 'width': '2', 'color': '#ff0000' }, 'test': { 'fill-color': '#00ff00', 'icon-image': 'crossing.svg', 'text': 'Test' } } } -render_context = {'bbox': '010300002031BF0D000100000005000000DBF1839BB5DC3B41E708549B2B705741DBF1839BB5DC3B41118E9739B171574182069214CCE23B41118E9739B171574182069214CCE23B41E708549B2B705741DBF1839BB5DC3B41E708549B2B705741', 'scale_denominator': 8536.77} -''' - ret += self.print() +include_text() - tests = self.get_tests(src) + ret += self.print() if len(tests['param_in']): for i, param_in in enumerate(tests['param_in']): @@ -211,30 +203,50 @@ def test(self, func, src): conn.execute(ret) res = conn.prepare('select * from __eval_test__()') - error = False - for i, r in enumerate(res()): - print('IN', repr(tests['param_in'][i])) - print('EXP', '\n '.join([ - repr(r) - for r in tests['return_possibilities'][i] - ])) - print('OUT', repr(r[0])) - - if tests['shall_round'][i]: - if round(float(r[0]), 5) not in [ float(q) for q in tests['return_possibilities'][i] ]: - error = True - print('ERROR return value wrong!') - elif tests['set'][i]: - if ';'.split(r[0]) not in [ ';'.split(q) for q in tests['return_possibilities'][i] ]: - error = True - print('ERROR return value wrong!') - else: - if r[0] not in tests['return_possibilities'][i]: - error = True - print('ERROR return value wrong!') - - if error: - raise Exception("eval-test failed!") + return [ r[0] for r in res() ] + + def test(self, func, src): + print('* Testing %s' % func) + + add_code = \ +''' +global_data = {'icon-image': {'crossing.svg': (11, 7)}} +parameters = {'lang': 'en', 'foo': 'bar'} +current = { 'object': { 'id': 'n123', 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }}, 'pseudo_element': 'default', 'pseudo_elements': ['default', 'test'], 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }, 'properties': { 'default': { 'width': '2', 'color': '#ff0000' }, 'test': { 'fill-color': '#00ff00', 'icon-image': 'crossing.svg', 'text': 'Test' } } } +render_context = {'bbox': '010300002031BF0D000100000005000000DBF1839BB5DC3B41E708549B2B705741DBF1839BB5DC3B41118E9739B171574182069214CCE23B41118E9739B171574182069214CCE23B41E708549B2B705741DBF1839BB5DC3B41E708549B2B705741', 'scale_denominator': 8536.77} +''' + + tests = self.get_tests(src) + + error = False + results = self.test_dbfun(func, tests, add_code) + + if results is None: + return + + for i, res in enumerate(results): + print('IN', repr(tests['param_in'][i])) + print('EXP', '\n '.join([ + repr(r) + for r in tests['return_possibilities'][i] + ])) + print('OUT', repr(res)) + + if tests['shall_round'][i]: + if round(float(res), 5) not in [ float(q) for q in tests['return_possibilities'][i] ]: + error = True + print('ERROR return value wrong!') + elif tests['set'][i]: + if ';'.split(res) not in [ ';'.split(q) for q in tests['return_possibilities'][i] ]: + error = True + print('ERROR return value wrong!') + else: + if res not in tests['return_possibilities'][i]: + error = True + print('ERROR return value wrong!') + + if error: + raise Exception("eval-test failed!") def test_all(self): if not self.eval_functions: From c285a8c3774f2c271b6dc7ef049a59571d888093 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 19:30:16 +0100 Subject: [PATCH 137/209] Eval: optionally test eval func in standalone mode --- pgmapcss/eval/functions.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index fe292e2b..37202f1a 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -177,6 +177,19 @@ def get_tests(self, src): return ret + def test_standalone(self, func, tests, add_code): + if len(tests) == 0: + return None + + config = self.eval_functions[func] + + code = '[ ' + ', '.join([ + config.compiler([ repr(p) for p in t ], '', {}) + for t in tests['param_in'] + ]) + ' ]' + + return self.eval(code, additional_code=add_code) + def test_dbfun(self, func, tests, add_code): import re import pgmapcss.db as db @@ -220,6 +233,7 @@ def test(self, func, src): error = False results = self.test_dbfun(func, tests, add_code) + #results = self.test_standalone(func, tests, add_code) if results is None: return From fdc37cf3e8be5e78507845c4acba7e5f4179d8b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 19:33:12 +0100 Subject: [PATCH 138/209] Eval: test both dbfun and standalone mode --- pgmapcss/eval/functions.py | 38 +++++++++++++++++++++----------------- 1 file changed, 21 insertions(+), 17 deletions(-) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index 37202f1a..004154d9 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -218,26 +218,11 @@ def test_dbfun(self, func, tests, add_code): res = conn.prepare('select * from __eval_test__()') return [ r[0] for r in res() ] - def test(self, func, src): - print('* Testing %s' % func) - - add_code = \ -''' -global_data = {'icon-image': {'crossing.svg': (11, 7)}} -parameters = {'lang': 'en', 'foo': 'bar'} -current = { 'object': { 'id': 'n123', 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }}, 'pseudo_element': 'default', 'pseudo_elements': ['default', 'test'], 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }, 'properties': { 'default': { 'width': '2', 'color': '#ff0000' }, 'test': { 'fill-color': '#00ff00', 'icon-image': 'crossing.svg', 'text': 'Test' } } } -render_context = {'bbox': '010300002031BF0D000100000005000000DBF1839BB5DC3B41E708549B2B705741DBF1839BB5DC3B41118E9739B171574182069214CCE23B41118E9739B171574182069214CCE23B41E708549B2B705741DBF1839BB5DC3B41E708549B2B705741', 'scale_denominator': 8536.77} -''' - - tests = self.get_tests(src) - - error = False - results = self.test_dbfun(func, tests, add_code) - #results = self.test_standalone(func, tests, add_code) - + def analyze_results(self, tests, results): if results is None: return + error = False for i, res in enumerate(results): print('IN', repr(tests['param_in'][i])) print('EXP', '\n '.join([ @@ -262,6 +247,25 @@ def test(self, func, src): if error: raise Exception("eval-test failed!") + def test(self, func, src): + add_code = \ +''' +global_data = {'icon-image': {'crossing.svg': (11, 7)}} +parameters = {'lang': 'en', 'foo': 'bar'} +current = { 'object': { 'id': 'n123', 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }}, 'pseudo_element': 'default', 'pseudo_elements': ['default', 'test'], 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }, 'properties': { 'default': { 'width': '2', 'color': '#ff0000' }, 'test': { 'fill-color': '#00ff00', 'icon-image': 'crossing.svg', 'text': 'Test' } } } +render_context = {'bbox': '010300002031BF0D000100000005000000DBF1839BB5DC3B41E708549B2B705741DBF1839BB5DC3B41118E9739B171574182069214CCE23B41118E9739B171574182069214CCE23B41E708549B2B705741DBF1839BB5DC3B41E708549B2B705741', 'scale_denominator': 8536.77} +''' + + tests = self.get_tests(src) + + print('* Testing %s (DB Function)' % func) + results = self.test_dbfun(func, tests, add_code) + self.analyze_results(tests, results) + + print('* Testing %s (Standalone)' % func) + results = self.test_standalone(func, tests, add_code) + self.analyze_results(tests, results) + def test_all(self): if not self.eval_functions: self.resolve_config() From b6fa1fc292902f2a7c4ea3c6fd6297356a0f3db9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 19:34:56 +0100 Subject: [PATCH 139/209] Fake PLPY / DB/*: fix geometry parameters -> no need in db functions - fake_plpy no correctly converts geometry parameters - DB/* functions had '::geometry' casts in sql queries -> remove --- pgmapcss/db/osm2pgsql/db_functions.py | 6 +++--- pgmapcss/db/osmosis/db_functions.py | 8 ++++---- pgmapcss/db/overpass/db_functions.py | 6 +++--- pgmapcss/misc/fake_plpy.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/pgmapcss/db/osm2pgsql/db_functions.py b/pgmapcss/db/osm2pgsql/db_functions.py index ef76fd37..5e3bcee2 100644 --- a/pgmapcss/db/osm2pgsql/db_functions.py +++ b/pgmapcss/db/osm2pgsql/db_functions.py @@ -10,8 +10,8 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], } if _bbox is not None: - bbox = 'way && $1 and ST_Intersects(way, $1::geometry) and' - replacements['parent_bbox'] = 'way && $1 and ST_Intersects(way, $1::geometry) and' + bbox = 'way && $1 and ST_Intersects(way, $1) and' + replacements['parent_bbox'] = 'way && $1 and ST_Intersects(way, $1) and' if len(add_columns): add_columns_qry = ', ' + ', '.join([ @@ -488,7 +488,7 @@ def objects_near(objects, other_selects, self_selects, options): other_selects, options, { # add_columns - '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(way, {unit.srs}))' + '__distance': 'ST_Distance(ST_Transform($2, {unit.srs}), ST_Transform(way, {unit.srs}))' }, [ 'geometry' ], [ geom ] diff --git a/pgmapcss/db/osmosis/db_functions.py b/pgmapcss/db/osmosis/db_functions.py index 307374d4..4e37f3f4 100644 --- a/pgmapcss/db/osmosis/db_functions.py +++ b/pgmapcss/db/osmosis/db_functions.py @@ -28,7 +28,7 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], if len(w): bbox = '' if _bbox is not None: - bbox = 'geom && $1 and ST_Intersects(geom, $1::geometry) and' + bbox = 'geom && $1 and ST_Intersects(geom, $1) and' qry = ''' select 'n' || cast(id as text) as id, version, user_id, (select name from users where id=user_id) as user, tstamp, changeset_id, @@ -61,7 +61,7 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], if len(w): bbox = '' if _bbox is not None: - bbox = 'linestring && $1 and (ST_NPoints(linestring) = 1 or ST_Intersects(linestring, $1::geometry)) and' + bbox = 'linestring && $1 and (ST_NPoints(linestring) = 1 or ST_Intersects(linestring, $1)) and' qry = ''' select * {add_columns} from ( @@ -117,7 +117,7 @@ def objects_bbox(_bbox, db_selects, options, add_columns={}, add_param_type=[], if len(w): bbox = '' if _bbox is not None: - bbox = 'geom && $1 and ST_Intersects(geom, $1::geometry) and' + bbox = 'geom && $1 and ST_Intersects(geom, $1) and' qry = ''' select * {add_columns} from ( @@ -349,7 +349,7 @@ def objects_near(objects, other_selects, self_selects, options): other_selects, {}, { # add_columns - '__distance': 'ST_Distance(ST_Transform($2::geometry, {unit.srs}), ST_Transform(__geo__, {unit.srs}))' + '__distance': 'ST_Distance(ST_Transform($2, {unit.srs}), ST_Transform(__geo__, {unit.srs}))' }, [ 'geometry' ], [ geom ] diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 6731cdd6..7e395322 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -246,7 +246,7 @@ def get_bbox(_bbox=None): if _bbox is None: _bbox = render_context['bbox'] - plan = plpy.prepare("select ST_YMin($1::geometry) || ',' || ST_XMIN($1::geometry) || ',' || ST_YMAX($1::geometry) || ',' || ST_XMAX($1::geometry) as bbox_string", [ 'geometry' ]) + plan = plpy.prepare("select ST_YMin($1) || ',' || ST_XMIN($1) || ',' || ST_YMAX($1) || ',' || ST_XMAX($1) as bbox_string", [ 'geometry' ]) res = plpy.execute(plan, [ _bbox ]) return res[0]['bbox_string'] @@ -436,7 +436,7 @@ def objects_bbox(_bbox, db_selects, options): w.append(db_selects[t]) if len(w): - plan = plpy.prepare("select ST_Y(ST_Centroid($1::geometry)) || ',' || ST_X(ST_Centroid($1::geometry)) as geom", [ 'geometry' ]) + plan = plpy.prepare("select ST_Y(ST_Centroid($1)) || ',' || ST_X(ST_Centroid($1)) as geom", [ 'geometry' ]) res = plpy.execute(plan, [ _bbox ]) q1 = ');('.join([ w1['query'] for w1 in w ]).replace('__TYPE__', 'relation(pivot.a)') @@ -635,7 +635,7 @@ def objects_near(objects, other_selects, self_selects, options): except: cache = PGCache(cache_id, read_geo=True) - plan = plpy.prepare('select ST_Transform(ST_Envelope(ST_Buffer(ST_Transform(ST_Envelope($1::geometry), {unit.srs}), $2)), {db.srs}) as r', ['geometry', 'float']) + plan = plpy.prepare('select ST_Transform(ST_Envelope(ST_Buffer(ST_Transform(ST_Envelope($1), {unit.srs}), $2)), {db.srs}) as r', ['geometry', 'float']) res = plpy.execute(plan, [ render_context['bbox'], max_distance ]) bbox = res[0]['r'] diff --git a/pgmapcss/misc/fake_plpy.py b/pgmapcss/misc/fake_plpy.py index ac0efa7c..ce94eff0 100644 --- a/pgmapcss/misc/fake_plpy.py +++ b/pgmapcss/misc/fake_plpy.py @@ -25,9 +25,9 @@ def prepare(self, query, param_type): for (i, t) in enumerate(param_type): i1 = i + 1 if t == 'geometry': - t = 'text' + t = 'text::geometry' elif t == 'geometry[]': - t = 'text[]' + t = 'text[]::geometry[]' query = query.replace('$' + str(i1), '$' + str(i1) + '::' + t) plan = self.conn.prepare(query) From be5c518e0e14bda8305d5f3e2010a2092b71e065 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 19:49:53 +0100 Subject: [PATCH 140/209] Eval: opt. pass list of eval-tests to be executed as argument option --- pgmapcss/eval/functions.py | 11 +++++++++-- pgmapcss/main.py | 14 +++++++++----- 2 files changed, 18 insertions(+), 7 deletions(-) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index 004154d9..00e45630 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -266,8 +266,15 @@ def test(self, func, src): results = self.test_standalone(func, tests, add_code) self.analyze_results(tests, results) - def test_all(self): + def test_all(self, tests=None): if not self.eval_functions: self.resolve_config() - [ self.test(func, src) for func, src in self.eval_functions_source.items() ] + if tests is None: + tests = self.eval_functions_source.keys() + + for func in tests: + if not func in self.eval_functions_source: + print('* No such function "{}"'.format(func)) + else: + self.test(func, self.eval_functions_source[func]) diff --git a/pgmapcss/main.py b/pgmapcss/main.py index 59063bf9..808bb394 100755 --- a/pgmapcss/main.py +++ b/pgmapcss/main.py @@ -45,9 +45,9 @@ required=True, help='mapcss/renderer base style for the correct renderer and renderer version, e.g. "mapnik-2.0"') -parser.add_argument('--eval-tests', dest='eval_tests', action='store_const', - const=True, default=False, - help='Test all eval functions.') +parser.add_argument('--eval-tests', dest='eval_tests', + default=False, nargs='*', + help='Test eval functions. Pass list of functions to be tested as parameters. Default: all functions.') parser.add_argument('-r', '--database-update', dest='database_update', default='auto', @@ -166,8 +166,12 @@ def main(): else: print('* Current DB version: {version}'.format(**db_version)) - if args.eval_tests: - pgmapcss.eval.functions(stat).test_all() + if args.eval_tests is not False: + if len(args.eval_tests): + pgmapcss.eval.functions(stat).test_all(args.eval_tests) + else: + pgmapcss.eval.functions(stat).test_all() + print('* All tests completed successfully.') try: From b779ca40ec7ee251dce458eeebc27280ffa21b2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 20:09:09 +0100 Subject: [PATCH 141/209] Eval::tr: bugfix eval tests, add condition-keys to global data --- pgmapcss/eval/eval_tr.py | 2 ++ pgmapcss/eval/functions.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pgmapcss/eval/eval_tr.py b/pgmapcss/eval/eval_tr.py index 599a2344..c62b63a6 100644 --- a/pgmapcss/eval/eval_tr.py +++ b/pgmapcss/eval/eval_tr.py @@ -33,5 +33,7 @@ def eval_tr(param): # OUT 'barfoo' # IN ['{1}{}', 'foo', 'bar'] # OUT 'barfoo' +# IN ['{} {} {}', '{0.key}', '{0.value}', '{0.tag}'] +# OUT 'amenity restaurant amenity=restaurant' # IN [] # OUT '' diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index 00e45630..61ae6c9b 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -252,7 +252,7 @@ def test(self, func, src): ''' global_data = {'icon-image': {'crossing.svg': (11, 7)}} parameters = {'lang': 'en', 'foo': 'bar'} -current = { 'object': { 'id': 'n123', 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }}, 'pseudo_element': 'default', 'pseudo_elements': ['default', 'test'], 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }, 'properties': { 'default': { 'width': '2', 'color': '#ff0000' }, 'test': { 'fill-color': '#00ff00', 'icon-image': 'crossing.svg', 'text': 'Test' } } } +current = { 'object': { 'id': 'n123', 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }}, 'pseudo_element': 'default', 'pseudo_elements': ['default', 'test'], 'tags': { 'amenity': 'restaurant', 'name': 'Foobar', 'name:en': 'English Foobar', 'name:de': 'German Foobar', 'cuisine': 'pizza;kebab;noodles' }, 'properties': { 'default': { 'width': '2', 'color': '#ff0000' }, 'test': { 'fill-color': '#00ff00', 'icon-image': 'crossing.svg', 'text': 'Test' } }, 'condition-keys': [ 'amenity' ] } render_context = {'bbox': '010300002031BF0D000100000005000000DBF1839BB5DC3B41E708549B2B705741DBF1839BB5DC3B41118E9739B171574182069214CCE23B41118E9739B171574182069214CCE23B41E708549B2B705741DBF1839BB5DC3B41E708549B2B705741', 'scale_denominator': 8536.77} ''' From 1eea3137756e944b9dbf4a7cb080268f373663e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 20:11:15 +0100 Subject: [PATCH 142/209] Eval::area: bugfix eval test, Postgis 2.0 returns slightly different values --- pgmapcss/eval/eval_area.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pgmapcss/eval/eval_area.py b/pgmapcss/eval/eval_area.py index 408cd469..feeba9c3 100644 --- a/pgmapcss/eval/eval_area.py +++ b/pgmapcss/eval/eval_area.py @@ -27,3 +27,4 @@ def eval_area(param): # TESTS # IN ['010300002031BF0D000100000004000000AE47E1BA1F52354185EB51B83EAE5641C3F528DC1F5235413D0AD7D33FAE5641295C8F4224523541000000B03EAE5641AE47E1BA1F52354185EB51B83EAE5641'] # OUT_ROUND '1.75767' +# OUT_ROUND '1.75766' From f9133acb706c0319b5fe35868380547bd2ab9b16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 3 Jan 2015 14:14:04 +0100 Subject: [PATCH 143/209] Eval: bugfix, import sys, so that plpy.warning works in standalone mode --- pgmapcss/eval/functions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index 61ae6c9b..8ad81fba 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -75,6 +75,7 @@ def eval(self, statement, additional_code=''): ' import re\n' +\ ' import math\n' +\ ' import postgresql\n' +\ + ' import sys\n' +\ ' global_data = ' + repr(self.stat['global_data']) + '\n' +\ ' ' + resource_string(__name__, 'base.py').decode('utf-8').replace('\n', '\n ') +\ '\n' +\ From 83d1aa901890427086c78cf8e1b0ca8cdab06b0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 3 Jan 2015 14:30:22 +0100 Subject: [PATCH 144/209] Eval::debug: prepend object id (if available) --- pgmapcss/eval/base.py | 7 ++++++- pgmapcss/eval/eval_debug.py | 9 +++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/pgmapcss/eval/base.py b/pgmapcss/eval/base.py index f8779596..e0768f1b 100644 --- a/pgmapcss/eval/base.py +++ b/pgmapcss/eval/base.py @@ -18,7 +18,12 @@ def float_to_str(v, default=None): r = r[:-2] return r def debug(text): - plpy.warning(text) + try: + prefix = current['object']['id'] + ' | ' + except: + prefix = '' + + plpy.warning(prefix + text) class config_base: math_level = None diff --git a/pgmapcss/eval/eval_debug.py b/pgmapcss/eval/eval_debug.py index e2facca3..4dc147d3 100644 --- a/pgmapcss/eval/eval_debug.py +++ b/pgmapcss/eval/eval_debug.py @@ -6,9 +6,14 @@ def possible_values(self, param_values, prop, stat): return (param_values[0], 0) def eval_debug(param): + prefix = '' + if len(param) == 1: - plpy.warning(param[0]) + debug(param[0]) else: - plpy.warning(param) + debug(param) return param[0] + +# IN ['foo'] +# OUT 'foo' From 1807f933185bb2002496d726f8af1a8d27efb7bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 3 Jan 2015 14:30:46 +0100 Subject: [PATCH 145/209] Eval-functions: use debug() for printing warnings --- pgmapcss/eval/eval_area.py | 2 +- pgmapcss/eval/eval_azimuth.py | 2 +- pgmapcss/eval/eval_buffer.py | 2 +- pgmapcss/eval/eval_centroid.py | 2 +- pgmapcss/eval/eval_convex_hull.py | 2 +- pgmapcss/eval/eval_intersection.py | 2 +- pgmapcss/eval/eval_is_closed.py | 2 +- pgmapcss/eval/eval_is_left_hand_traffic.py | 2 +- pgmapcss/eval/eval_is_right_hand_traffic.py | 2 +- pgmapcss/eval/eval_line.py | 2 +- pgmapcss/eval/eval_line_interpolate_point.py | 2 +- pgmapcss/eval/eval_line_length.py | 2 +- pgmapcss/eval/eval_line_locate_azimuth.py | 2 +- pgmapcss/eval/eval_line_locate_point.py | 2 +- pgmapcss/eval/eval_line_merge.py | 2 +- pgmapcss/eval/eval_line_part.py | 2 +- pgmapcss/eval/eval_rotate.py | 2 +- pgmapcss/eval/eval_to_dashes.py | 2 +- pgmapcss/eval/eval_translate.py | 2 +- 19 files changed, 19 insertions(+), 19 deletions(-) diff --git a/pgmapcss/eval/eval_area.py b/pgmapcss/eval/eval_area.py index feeba9c3..5029fe07 100644 --- a/pgmapcss/eval/eval_area.py +++ b/pgmapcss/eval/eval_area.py @@ -9,7 +9,7 @@ def eval_area(param): plan = plpy.prepare('select ST_Area(ST_Transform($1, 900913)) as area', ['geometry']) res = plpy.execute(plan, param) except Exception as err: - plpy.warning('{} | Eval::area({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::area({}): Exception: {}'.format(param, err)) return '' zoom = eval_metric(['1u']) diff --git a/pgmapcss/eval/eval_azimuth.py b/pgmapcss/eval/eval_azimuth.py index 879ac978..7d0b788a 100644 --- a/pgmapcss/eval/eval_azimuth.py +++ b/pgmapcss/eval/eval_azimuth.py @@ -12,7 +12,7 @@ def eval_azimuth(param): plan = plpy.prepare('select ST_Azimuth($1, $2) as r', ['geometry', 'geometry']) res = plpy.execute(plan, param) except Exception as err: - plpy.warning('{} | Eval::azimuth({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::azimuth({}): Exception: {}'.format(param, err)) return '' if res[0]['r'] is None: diff --git a/pgmapcss/eval/eval_buffer.py b/pgmapcss/eval/eval_buffer.py index 00c22d1f..d193c8b0 100644 --- a/pgmapcss/eval/eval_buffer.py +++ b/pgmapcss/eval/eval_buffer.py @@ -14,7 +14,7 @@ def eval_buffer(param): plan = plpy.prepare('select ST_Transform(ST_Buffer(ST_Transform($1, 900913), $2), {db.srs}) as r', ['geometry', 'float']) res = plpy.execute(plan, [ param[0], float(radius) ]) except Exception as err: - plpy.warning('{} | Eval::buffer({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::buffer({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_centroid.py b/pgmapcss/eval/eval_centroid.py index a3c73b5b..29dfeb58 100644 --- a/pgmapcss/eval/eval_centroid.py +++ b/pgmapcss/eval/eval_centroid.py @@ -9,7 +9,7 @@ def eval_centroid(param): plan = plpy.prepare('select ST_Centroid($1) as r', ['geometry']) res = plpy.execute(plan, param) except Exception as err: - plpy.warning('{} | Eval::centroid({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::centroid({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_convex_hull.py b/pgmapcss/eval/eval_convex_hull.py index 45bd91a9..465eb980 100644 --- a/pgmapcss/eval/eval_convex_hull.py +++ b/pgmapcss/eval/eval_convex_hull.py @@ -9,7 +9,7 @@ def eval_convex_hull(param): plan = plpy.prepare('select ST_ConvexHull($1) as r', ['geometry']) res = plpy.execute(plan, param) except Exception as err: - plpy.warning('{} | Eval::convex_hull({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::convex_hull({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_intersection.py b/pgmapcss/eval/eval_intersection.py index b9946209..05694613 100644 --- a/pgmapcss/eval/eval_intersection.py +++ b/pgmapcss/eval/eval_intersection.py @@ -9,7 +9,7 @@ def eval_intersection(param): plan = plpy.prepare('select ST_Intersection($1, $2) as geo', ['geometry', 'geometry']) res = plpy.execute(plan, param) except Exception as err: - plpy.warning('{} | Eval::intersection({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::intersection({}): Exception: {}'.format(param, err)) return '' return res[0]['geo'] diff --git a/pgmapcss/eval/eval_is_closed.py b/pgmapcss/eval/eval_is_closed.py index 533b8388..1c8fbf63 100644 --- a/pgmapcss/eval/eval_is_closed.py +++ b/pgmapcss/eval/eval_is_closed.py @@ -15,7 +15,7 @@ def eval_is_closed(param): plan = plpy.prepare('select ST_GeometryType($1) in (\'ST_Polygon\', \'ST_MultiPolygon\') or (ST_GeometryType($1) in (\'ST_Line\') and ST_Line_Interpolate_Point($1, 0.0) = ST_Line_Interpolate_Point($1, 1.0)) as r', ['geometry']) res = plpy.execute(plan, [ geo ]) except Exception as err: - plpy.warning('{} | Eval::is_closed({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::is_closed({}): Exception: {}'.format(param, err)) return '' return 'true' if res[0]['r'] else 'false' diff --git a/pgmapcss/eval/eval_is_left_hand_traffic.py b/pgmapcss/eval/eval_is_left_hand_traffic.py index 8ca96fe5..c5b66329 100644 --- a/pgmapcss/eval/eval_is_left_hand_traffic.py +++ b/pgmapcss/eval/eval_is_left_hand_traffic.py @@ -30,7 +30,7 @@ def eval_is_left_hand_traffic(param): plan = plpy.prepare('select ST_Within($1, geo) as r from _pgmapcss_left_right_hand_traffic where ST_Intersects($1, geo)', ['geometry']) res = plpy.execute(plan, [ geo ]) except Exception as err: - plpy.warning('{} | Eval::is_left_hand_traffic({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::is_left_hand_traffic({}): Exception: {}'.format(param, err)) return '' diff --git a/pgmapcss/eval/eval_is_right_hand_traffic.py b/pgmapcss/eval/eval_is_right_hand_traffic.py index c799e745..4e4ec310 100644 --- a/pgmapcss/eval/eval_is_right_hand_traffic.py +++ b/pgmapcss/eval/eval_is_right_hand_traffic.py @@ -27,7 +27,7 @@ def eval_is_right_hand_traffic(param): plan = plpy.prepare('select ST_Within($1, geo) as r from _pgmapcss_left_right_hand_traffic where ST_Intersects($1, geo)', ['geometry']) res = plpy.execute(plan, [ geo ]) except Exception as err: - plpy.warning('{} | Eval::is_right_hand_traffic({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::is_right_hand_traffic({}): Exception: {}'.format(param, err)) return '' diff --git a/pgmapcss/eval/eval_line.py b/pgmapcss/eval/eval_line.py index 7d729f51..9e5992ed 100644 --- a/pgmapcss/eval/eval_line.py +++ b/pgmapcss/eval/eval_line.py @@ -18,7 +18,7 @@ def eval_line(param): try: res = plpy.execute(plan, [param]) except Exception as err: - plpy.warning('{} | Eval::line({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::line({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_line_interpolate_point.py b/pgmapcss/eval/eval_line_interpolate_point.py index 935514a1..294e0da7 100644 --- a/pgmapcss/eval/eval_line_interpolate_point.py +++ b/pgmapcss/eval/eval_line_interpolate_point.py @@ -25,7 +25,7 @@ def eval_line_interpolate_point(param): plan = plpy.prepare('select ST_Line_Interpolate_Point($1, $2) as r', ['geometry', 'float']) res = plpy.execute(plan, [ param[0], float(f) ]) except Exception as err: - plpy.warning('{} | Eval::line_interpolate_point({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::line_interpolate_point({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_line_length.py b/pgmapcss/eval/eval_line_length.py index 655a3cb6..f8a76304 100644 --- a/pgmapcss/eval/eval_line_length.py +++ b/pgmapcss/eval/eval_line_length.py @@ -13,7 +13,7 @@ def eval_line_length(param): res = plpy.execute(plan, [param[0]]) l = res[0]['r'] except Exception as err: - plpy.warning('{} | Eval::line_length({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::line_length({}): Exception: {}'.format(param, err)) return '' return eval_metric([ repr(l) + 'u' ]) diff --git a/pgmapcss/eval/eval_line_locate_azimuth.py b/pgmapcss/eval/eval_line_locate_azimuth.py index 7c98c716..5df33159 100644 --- a/pgmapcss/eval/eval_line_locate_azimuth.py +++ b/pgmapcss/eval/eval_line_locate_azimuth.py @@ -39,7 +39,7 @@ def eval_line_locate_azimuth(param): plan = plpy.prepare('select degrees(ST_Azimuth(ST_Line_Interpolate_Point($1, $2), ST_Line_Interpolate_Point($1, $3))) as r1, degrees(ST_Azimuth(ST_Line_Interpolate_Point($1, $3), ST_Line_Interpolate_Point($1, $4))) as r2', ['geometry', 'float', 'float', 'float']) res = plpy.execute(plan, [ param[0], f1 / l, f / l, f2 / l ]) except Exception as err: - plpy.warning('{} | Eval::line_locate_azimuth({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::line_locate_azimuth({}): Exception: {}'.format(param, err)) return '' r1 = res[0]['r1'] diff --git a/pgmapcss/eval/eval_line_locate_point.py b/pgmapcss/eval/eval_line_locate_point.py index fc183164..3d4b0795 100644 --- a/pgmapcss/eval/eval_line_locate_point.py +++ b/pgmapcss/eval/eval_line_locate_point.py @@ -12,7 +12,7 @@ def eval_line_locate_point(param): plan = plpy.prepare('select ST_Line_Locate_Point($1, $2) * ST_Length(ST_Transform($1, {unit.srs})) as r', ['geometry', 'geometry']) res = plpy.execute(plan, [ param[0], param[1] ]) except Exception as err: - plpy.warning('{} | Eval::line_locate_point({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::line_locate_point({}): Exception: {}'.format(param, err)) return '' return eval_metric([ repr(res[0]['r']) + 'u' ]) diff --git a/pgmapcss/eval/eval_line_merge.py b/pgmapcss/eval/eval_line_merge.py index 7fc21a29..092a01b2 100644 --- a/pgmapcss/eval/eval_line_merge.py +++ b/pgmapcss/eval/eval_line_merge.py @@ -17,7 +17,7 @@ def eval_line_merge(param): plan = plpy.prepare('select ST_LineMerge(ST_Collect($1)) as r', ['geometry[]']) res = plpy.execute(plan, [param]) except Exception as err: - plpy.warning('{} | Eval::line_merge({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::line_merge({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_line_part.py b/pgmapcss/eval/eval_line_part.py index 8244d4e9..626e9565 100644 --- a/pgmapcss/eval/eval_line_part.py +++ b/pgmapcss/eval/eval_line_part.py @@ -53,7 +53,7 @@ def eval_line_part(param): plan = plpy.prepare('select ST_Transform(ST_Line_Substring(ST_Transform($1, {unit.srs}), $2, $3), {db.srs}) as r', ['geometry', 'float', 'float' ]) res = plpy.execute(plan, [ param[0], pos0 / length, pos1 / length ]) except Exception as err: - plpy.warning('{} | Eval::line_part({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::line_part({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_rotate.py b/pgmapcss/eval/eval_rotate.py index 0d6fe3a3..0c42f5c0 100644 --- a/pgmapcss/eval/eval_rotate.py +++ b/pgmapcss/eval/eval_rotate.py @@ -23,7 +23,7 @@ def eval_rotate(param): plan = plpy.prepare('select ST_Translate(ST_Rotate(ST_Translate($1, -ST_X($3), -ST_Y($3)), $2), ST_X($3), ST_Y($3)) as r', ['geometry', 'float', 'geometry']) res = plpy.execute(plan, [ param[0], angle, center ]) except Exception as err: - plpy.warning('{} | Eval::rotate({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::rotate({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] diff --git a/pgmapcss/eval/eval_to_dashes.py b/pgmapcss/eval/eval_to_dashes.py index f1d46d6c..6608dac4 100644 --- a/pgmapcss/eval/eval_to_dashes.py +++ b/pgmapcss/eval/eval_to_dashes.py @@ -18,7 +18,7 @@ def eval_to_dashes(params): for t in ret.split(',') if not re.match('[0-9]+$', t) ]): - # plpy.warning("invalid dashes value '{}'".format(params[0])) + # debug("invalid dashes value '{}'".format(params[0])) return 'none' return ret diff --git a/pgmapcss/eval/eval_translate.py b/pgmapcss/eval/eval_translate.py index f4755c7f..ac22e00a 100644 --- a/pgmapcss/eval/eval_translate.py +++ b/pgmapcss/eval/eval_translate.py @@ -14,7 +14,7 @@ def eval_translate(param): plan = plpy.prepare('select ST_Transform(ST_Translate(ST_Transform($1, {unit.srs}), $2, $3), {db.srs}) as r', ['geometry', 'float', 'float']) res = plpy.execute(plan, [param[0], x, y ]) except Exception as err: - plpy.warning('{} | Eval::translate({}): Exception: {}'.format(current['object']['id'], param, err)) + debug('Eval::translate({}): Exception: {}'.format(param, err)) return '' return res[0]['r'] From 2a250e8d4e192ea6607c440e5d711f2edf6c540b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sat, 3 Jan 2015 15:00:04 +0100 Subject: [PATCH 146/209] Fake PLPY: improve debug messages (use str(), not repr()) --- pgmapcss/misc/fake_plpy.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pgmapcss/misc/fake_plpy.py b/pgmapcss/misc/fake_plpy.py index ce94eff0..fe2a23c1 100644 --- a/pgmapcss/misc/fake_plpy.py +++ b/pgmapcss/misc/fake_plpy.py @@ -16,10 +16,10 @@ def __init__(self, args=None): # END debug.explain_queries def notice(self, *arg): - sys.stderr.write('NOTICE: ' + ' '.join([repr(a) for a in arg]) + '\n') + sys.stderr.write('NOTICE: ' + '\n '.join([str(a) for a in arg]) + '\n') def warning(self, *arg): - sys.stderr.write('WARNING: ' + ' '.join([repr(a) for a in arg]) + '\n') + sys.stderr.write('WARNING: ' + '\n '.join([str(a) for a in arg]) + '\n') def prepare(self, query, param_type): for (i, t) in enumerate(param_type): From b1bec413c4a6072ed51700f82311d5d43fc9d975 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 4 Jan 2015 07:50:01 +0100 Subject: [PATCH 147/209] Eval: optimize calling eval, initialize functions once --- pgmapcss/eval/functions.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/pgmapcss/eval/functions.py b/pgmapcss/eval/functions.py index 8ad81fba..39233001 100644 --- a/pgmapcss/eval/functions.py +++ b/pgmapcss/eval/functions.py @@ -71,21 +71,18 @@ def eval(self, statement, additional_code=''): } content = \ + 'import re\n' +\ + 'import math\n' +\ + 'import postgresql\n' +\ + 'import sys\n' +\ + 'global_data = ' + repr(self.stat['global_data']) + '\n' +\ + resource_string(__name__, 'base.py').decode('utf-8') + '\n' +\ + include_text() + '\n' +\ + pgmapcss.misc.strip_includes(resource_stream(pgmapcss.misc.__name__, 'fake_plpy.py'), self.stat).format(**replacement) + '\n' +\ + additional_code + '\n' +\ + self.print() + '\n' +\ + 'plpy = fake_plpy()\n' +\ 'def _eval(statement):\n' +\ - ' import re\n' +\ - ' import math\n' +\ - ' import postgresql\n' +\ - ' import sys\n' +\ - ' global_data = ' + repr(self.stat['global_data']) + '\n' +\ - ' ' + resource_string(__name__, 'base.py').decode('utf-8').replace('\n', '\n ') +\ - '\n' +\ - ' ' + include_text().replace('\n', '\n ') + '\n' +\ - ' ' + pgmapcss.misc.strip_includes(resource_stream(pgmapcss.misc.__name__, 'fake_plpy.py'), self.stat).format(**replacement).replace('\n', '\n ') + '\n' +\ - '\n' +\ - additional_code.replace('\n', '\n ') +\ - '\n' +\ - self.print(indent=' ') + '\n' +\ - ' plpy = fake_plpy()\n' +\ ' return eval(statement)' eval_code = compile(content, '', 'exec') From ae15bea84f83f9c663649bf324942eed5950191a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Tue, 30 Dec 2014 20:55:33 +0100 Subject: [PATCH 148/209] Update CHANGELOG --- CHANGELOG.creole | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.creole b/CHANGELOG.creole index 184156d5..2936f30d 100644 --- a/CHANGELOG.creole +++ b/CHANGELOG.creole @@ -1,3 +1,12 @@ +0.10.0 release 2014-01-?? +* New database backend 'overpass', which is also the new default database backend, including multipolygon support, relationship queries, ... +* DB/osm2pgsql can optimize some queries with relationships. (e.g. relation[type=route] member) +* Re-structured and simplified code for database backends, see [[doc/database-API.md|doc/database-API.md]] for details. +* new internal module PGCache which can be used to cache data (with different backends, depending on writeability of the database) +* improved installation instructions ([[doc/Install_with_Mapnik_3.0.md|doc/Install_with_Mapnik_3.0.md]] or [[doc/Install_with_Mapnik_2.2.md|doc/Install_with_Mapnik_2.2.md]]) +* improved eval tests (also using standalone mode; choose which tests to run) +* geometric eval functions now catch and print errors + 0.9.3 release 2015-01-03 * bugfixes From 5b9fcbfef0f9442fab86788f6d343db7c2d5a308 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 4 Jan 2015 11:37:20 +0100 Subject: [PATCH 149/209] Bump to version 0.10.0 --- CHANGELOG.creole | 2 +- README.md | 2 +- pgmapcss/version.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.creole b/CHANGELOG.creole index 2936f30d..89640129 100644 --- a/CHANGELOG.creole +++ b/CHANGELOG.creole @@ -1,4 +1,4 @@ -0.10.0 release 2014-01-?? +0.10.0 release 2014-01-04 * New database backend 'overpass', which is also the new default database backend, including multipolygon support, relationship queries, ... * DB/osm2pgsql can optimize some queries with relationships. (e.g. relation[type=route] member) * Re-structured and simplified code for database backends, see [[doc/database-API.md|doc/database-API.md]] for details. diff --git a/README.md b/README.md index 960f93f4..6cf61dbd 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Alternatively there's a standalone mode, where the MapCSS style is compiled into Since version 0.10 you don't even need a local database import (a local database is needed nonetheless), as the new default data source is Overpass API. Alternatively, osm2pgsql and osmosis pgsnapshot are supported. See [doc/database.md](doc/database.md) for details. -Stable version: [0.9.3](https://github.com/plepe/pgmapcss), development version: [0.10-dev](https://github.com/plepe/pgmapcss/tree/branch-0.10), see [open issues](https://github.com/plepe/pgmapcss/milestones/Version%200.10) +Stable version: [0.10.0](https://github.com/plepe/pgmapcss) ([CHANGELOG](https://github.com/plepe/pgmapcss/blob/master/CHANGELOG.creole)), development version: [0.11-dev](https://github.com/plepe/pgmapcss/tree/branch-0.11), see [open issues](https://github.com/plepe/pgmapcss/milestones/Version%200.11) Features -------- diff --git a/pgmapcss/version.py b/pgmapcss/version.py index 8c776250..c09ad2ef 100644 --- a/pgmapcss/version.py +++ b/pgmapcss/version.py @@ -1,7 +1,7 @@ __all__ = 'VERSION', 'VERSION_INFO' #: (:class:`tuple`) The version tuple e.g. ``(0, 9, 2)``. -VERSION_INFO = (0, 10, 'dev') +VERSION_INFO = (0, 10, 0) #: (:class:`basestring`) The version string e.g. ``'0.9.2'``. if len(VERSION_INFO) == 4: From 4f171b83a618b6ba7880d91872fd77f83d181ac9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Mon, 5 Jan 2015 11:52:12 +0100 Subject: [PATCH 150/209] DB/Overpass: Multipolygons: when merging lines convert them to polygons - fixes #103 --- pgmapcss/db/overpass/db_functions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgmapcss/db/overpass/db_functions.py b/pgmapcss/db/overpass/db_functions.py index 7e395322..cf3d5e3f 100644 --- a/pgmapcss/db/overpass/db_functions.py +++ b/pgmapcss/db/overpass/db_functions.py @@ -117,7 +117,7 @@ def multipolygon_geom(r): geom_plan_collect = plpy.prepare('select ST_Collect($1) as geom', [ 'geometry[]' ]) geom_plan_substract = plpy.prepare('select ST_Difference($1, $2) as geom', [ 'geometry', 'geometry' ]) # merge all lines together, return all closed rings (but remove unconnected lines) - geom_plan_linemerge = plpy.prepare('select geom from (select (ST_Dump((ST_LineMerge(ST_Collect(geom))))).geom as geom from (select ST_GeomFromText(unnest($1), 4326) geom) t offset 0) t where ST_NPoints(geom) > 3 and ST_IsClosed(geom)', [ 'text[]' ]) + geom_plan_linemerge = plpy.prepare('select ST_MakePolygon(geom) geom from (select (ST_Dump((ST_LineMerge(ST_Collect(geom))))).geom as geom from (select ST_GeomFromText(unnest($1), 4326) geom) t offset 0) t where ST_NPoints(geom) > 3 and ST_IsClosed(geom)', [ 'text[]' ]) t = 'MULTIPOLYGON' From 6984d65fb53319cf0ffd8fd8a9685ceedb3f634c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 7 Jan 2015 15:17:50 +0100 Subject: [PATCH 151/209] DB/Overpass: correct documentation about querying large polygons --- doc/database.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/database.md b/doc/database.md index 29e1ef11..9e1b3b99 100644 --- a/doc/database.md +++ b/doc/database.md @@ -7,7 +7,7 @@ In contrast to osm2pgsql and osmosis, Overpass API is an external database which By default, the API on overpass-api.de will be used, therefore it is not necessary to import a local copy. For sure, if you want to render on a regular base the admins of overpass-api.de will be happy if you change to a local copy. Additionally, you still need a local PostgreSQL database, as it is used for connecting to Mapnik and accessing the PostGIS functions. * In contrast to osm2pgsql/osmosis the geometries need to be constructed on the fly which causes some additional overhead; including full multipolygon support (see below) -* In Overpass API, bounding box queries do not include ways and relations which cross the bounding box without having a node inside the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas). Therefore very large objects might be missing in the output. +* In Overpass API, bounding box queries do not include ways and relations which enclose the bounding box, with the exception of [areas](http://wiki.openstreetmap.org/wiki/Overpass_API/Areas) which are queried additionally. Therefore large polygons might be missing in the output. * Queries for `relation member` (where member is any type) and `way node` will be optimzed, as only objects matching the member conditions and the parents conditions will be queried from Overpass API. * Additionally the tags "osm:id", "osm:version", "osm:user_id", "osm:user", "osm:timestamp", "osm:changeset" will be set from OpenStreetMap meta info. Filtering for meta information is currently not possible on Overpass API, therefore these filters will not be applied to queries (in short: a condition for forests of user abc will be compiled into a query of all forests in the current viewport regardless of the user). From 17adaf0eb84a4c8c79aa1fb4c942e33a3571393e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 7 Jan 2015 17:35:32 +0100 Subject: [PATCH 152/209] Stat::property_values: adhere include_none: '' is synomymous with none --- pgmapcss/compiler/stat.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgmapcss/compiler/stat.py b/pgmapcss/compiler/stat.py index cab2d7ea..873371c3 100644 --- a/pgmapcss/compiler/stat.py +++ b/pgmapcss/compiler/stat.py @@ -164,7 +164,7 @@ def property_values(self, prop, pseudo_element=None, include_illegal_values=Fals values = { v for v in values - if v != None + if v != None and v != '' } if warn_unresolvable and True in values: From a01450d343c94261f1a3e122b4054b89056145bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Wed, 7 Jan 2015 17:54:09 +0100 Subject: [PATCH 153/209] DB/*: compile_condition: 'value_type' might not be set --- pgmapcss/db/overpass/db.py | 2 +- pgmapcss/db/postgresql_db/db.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pgmapcss/db/overpass/db.py b/pgmapcss/db/overpass/db.py index 666c6554..05ea67f8 100644 --- a/pgmapcss/db/overpass/db.py +++ b/pgmapcss/db/overpass/db.py @@ -72,7 +72,7 @@ def compile_condition_overpass(self, condition, tag_type, filter): return None # value-eval() statements - if condition['value_type'] == 'eval': + if 'value_type' in condition and condition['value_type'] == 'eval': # treat other conditions as has_key ret = ( 'key', key ) diff --git a/pgmapcss/db/postgresql_db/db.py b/pgmapcss/db/postgresql_db/db.py index 3b50ff08..9b88f5d9 100644 --- a/pgmapcss/db/postgresql_db/db.py +++ b/pgmapcss/db/postgresql_db/db.py @@ -46,7 +46,7 @@ def compile_condition_hstore_value(self, condition, tag_type, filter, prefix='') return None # value-eval() statements - if condition['value_type'] == 'eval': + if 'value_type' in condition and condition['value_type'] == 'eval': # treat other conditions as has_key ret = prefix + column + ' ? ' + self.format(key); @@ -151,7 +151,7 @@ def compile_condition_column(self, condition, tag_type, filter, prefix=''): return None # value-eval() statements - if condition['value_type'] == 'eval': + if 'value_type' in condition and condition['value_type'] == 'eval': # treat other conditions as has_key ret = prefix + self.ident(key) + ' is not null' From 25c93a5ea112389e6095f33a15d6c8785d739102 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 11 Jan 2015 20:38:01 +0100 Subject: [PATCH 154/209] Type image_png: Bugfix, import 'Image' from wand --- pgmapcss/types/image_png.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pgmapcss/types/image_png.py b/pgmapcss/types/image_png.py index 55fdab45..bd96c113 100644 --- a/pgmapcss/types/image_png.py +++ b/pgmapcss/types/image_png.py @@ -32,6 +32,7 @@ def stat_value(self, prop): return prop['value'] if os.path.exists(prop['value']): + from wand.image import Image img = Image(filename=prop['value']) self.data[prop['value']] = img.size if not prop['key'] in self.stat['global_data']: From 2398a6b528aeffda894e4d96c490143e176cf800 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 11 Jan 2015 20:45:01 +0100 Subject: [PATCH 155/209] DOC/Install: wand needs imagemagick to work correctly --- doc/Install_with_Mapnik_2.2.md | 2 +- doc/Install_with_Mapnik_3.0.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/Install_with_Mapnik_2.2.md b/doc/Install_with_Mapnik_2.2.md index 72474e71..1b718bf0 100644 --- a/doc/Install_with_Mapnik_2.2.md +++ b/doc/Install_with_Mapnik_2.2.md @@ -2,7 +2,7 @@ Installation on a plain Ubuntu 14.04 Server: Install additional packages: ```sh -sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik postgresql-plpython3 python3-postgresql ttf-unifont python3-wand +sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik postgresql-plpython3 python3-postgresql ttf-unifont python3-wand imagemagick ``` More dependencies: diff --git a/doc/Install_with_Mapnik_3.0.md b/doc/Install_with_Mapnik_3.0.md index 0c1a1cec..8b33b690 100644 --- a/doc/Install_with_Mapnik_3.0.md +++ b/doc/Install_with_Mapnik_3.0.md @@ -5,7 +5,7 @@ Install additional packages: sudo apt-get install python-software-properties sudo add-apt-repository ppa:mapnik/nightly-trunk sudo apt-get update -sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik postgresql-plpython3 python3-postgresql ttf-unifont mapnik-input-plugin-postgis libmapnik libmapnik-dev mapnik-utils python3-wand +sudo apt-get install git postgresql postgresql-contrib postgresql-9.3-postgis-2.1 python3-setuptools python3-dev python-mapnik postgresql-plpython3 python3-postgresql ttf-unifont mapnik-input-plugin-postgis libmapnik libmapnik-dev mapnik-utils python3-wand imagemagick ``` More dependencies: From e6ad0a3f65d9dc7e166e4acb7a2111778ac6aa85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 18 Jan 2015 07:44:58 +0100 Subject: [PATCH 156/209] Colors: Bugfix, accessing color_names during execution --- pgmapcss/colors/to_color.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pgmapcss/colors/to_color.py b/pgmapcss/colors/to_color.py index 9838a095..7059c8cf 100644 --- a/pgmapcss/colors/to_color.py +++ b/pgmapcss/colors/to_color.py @@ -20,7 +20,7 @@ def to_color(value): try: from .color_names import color_names - except ValueError: + except SystemError: global color_names if value in color_names: From a7674cb7272976f9f3839a1c774ff3e6e61b778c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 18 Jan 2015 13:05:44 +0100 Subject: [PATCH 157/209] Colors: bugfix last commit, Py3.2 uses different Exception as Py3.4 --- pgmapcss/colors/to_color.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pgmapcss/colors/to_color.py b/pgmapcss/colors/to_color.py index 7059c8cf..2ecf76cb 100644 --- a/pgmapcss/colors/to_color.py +++ b/pgmapcss/colors/to_color.py @@ -20,6 +20,8 @@ def to_color(value): try: from .color_names import color_names + except ValueError: + global color_names except SystemError: global color_names From 92f42dd0aee026c17ef26920ca4a770b2cf3ac77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Stephan=20B=C3=B6sch-Plepelits?= Date: Sun, 18 Jan 2015 13:40:38 +0100 Subject: [PATCH 158/209] Update Maki icons --- pgmapcss/icons/maki/airfield-12.svg | 106 +- pgmapcss/icons/maki/airfield-18.svg | 109 +- pgmapcss/icons/maki/airfield-24.png | Bin 423 -> 415 bytes pgmapcss/icons/maki/airfield-24.svg | 104 +- pgmapcss/icons/maki/airport-12.svg | 110 +- pgmapcss/icons/maki/airport-18.svg | 113 +- pgmapcss/icons/maki/airport-24.png | Bin 462 -> 455 bytes pgmapcss/icons/maki/airport-24.svg | 114 +- pgmapcss/icons/maki/alcohol-shop-12.svg | 110 +- pgmapcss/icons/maki/alcohol-shop-18.svg | 110 +- pgmapcss/icons/maki/alcohol-shop-24.png | Bin 432 -> 424 bytes pgmapcss/icons/maki/alcohol-shop-24.svg | 116 +- pgmapcss/icons/maki/america-football-12.svg | 108 +- pgmapcss/icons/maki/america-football-18.svg | 111 +- pgmapcss/icons/maki/america-football-24.png | Bin 454 -> 445 bytes pgmapcss/icons/maki/america-football-24.svg | 120 +- pgmapcss/icons/maki/art-gallery-12.svg | 107 +- pgmapcss/icons/maki/art-gallery-18.svg | 119 +- pgmapcss/icons/maki/art-gallery-24.png | Bin 572 -> 564 bytes pgmapcss/icons/maki/art-gallery-24.svg | 120 +- pgmapcss/icons/maki/bakery-12.svg | 98 +- pgmapcss/icons/maki/bakery-18.svg | 97 +- pgmapcss/icons/maki/bakery-24.png | Bin 531 -> 528 bytes pgmapcss/icons/maki/bakery-24.svg | 113 +- pgmapcss/icons/maki/bank-12.svg | 108 +- pgmapcss/icons/maki/bank-18.svg | 108 +- pgmapcss/icons/maki/bank-24.png | Bin 345 -> 337 bytes pgmapcss/icons/maki/bank-24.svg | 120 +- pgmapcss/icons/maki/bar-12.svg | 108 +- pgmapcss/icons/maki/bar-18.svg | 108 +- pgmapcss/icons/maki/bar-24.png | Bin 444 -> 436 bytes pgmapcss/icons/maki/bar-24.svg | 116 +- pgmapcss/icons/maki/baseball-12.svg | 106 +- pgmapcss/icons/maki/baseball-18.svg | 109 +- pgmapcss/icons/maki/baseball-24.png | Bin 515 -> 508 bytes pgmapcss/icons/maki/baseball-24.svg | 110 +- pgmapcss/icons/maki/basketball-12.svg | 104 +- pgmapcss/icons/maki/basketball-18.svg | 107 +- pgmapcss/icons/maki/basketball-24.png | Bin 441 -> 433 bytes pgmapcss/icons/maki/basketball-24.svg | 108 +- pgmapcss/icons/maki/beer-12.svg | 113 +- pgmapcss/icons/maki/beer-18.svg | 113 +- pgmapcss/icons/maki/beer-24.png | Bin 452 -> 444 bytes pgmapcss/icons/maki/beer-24.svg | 119 +- pgmapcss/icons/maki/bicycle-12.svg | 106 +- pgmapcss/icons/maki/bicycle-18.svg | 113 +- pgmapcss/icons/maki/bicycle-24.png | Bin 589 -> 582 bytes pgmapcss/icons/maki/bicycle-24.svg | 108 +- pgmapcss/icons/maki/building-12.svg | 117 +- pgmapcss/icons/maki/building-18.svg | 106 +- pgmapcss/icons/maki/building-24.png | Bin 419 -> 411 bytes pgmapcss/icons/maki/building-24.svg | 116 +- pgmapcss/icons/maki/bus-12.svg | 104 +- pgmapcss/icons/maki/bus-18.svg | 122 +- pgmapcss/icons/maki/bus-24.png | Bin 344 -> 336 bytes pgmapcss/icons/maki/bus-24.svg | 126 +- pgmapcss/icons/maki/cafe-12.svg | 108 +- pgmapcss/icons/maki/cafe-18.svg | 108 +- pgmapcss/icons/maki/cafe-24.png | Bin 373 -> 365 bytes pgmapcss/icons/maki/cafe-24.svg | 116 +- pgmapcss/icons/maki/camera-12.svg | 144 +- pgmapcss/icons/maki/camera-18.svg | 137 +- pgmapcss/icons/maki/camera-24.png | Bin 441 -> 433 bytes pgmapcss/icons/maki/camera-24.svg | 100 +- pgmapcss/icons/maki/campsite-12.svg | 104 +- pgmapcss/icons/maki/campsite-18.svg | 107 +- pgmapcss/icons/maki/campsite-24.png | Bin 478 -> 471 bytes pgmapcss/icons/maki/campsite-24.svg | 120 +- pgmapcss/icons/maki/car-12.svg | 127 +- pgmapcss/icons/maki/car-18.svg | 150 +- pgmapcss/icons/maki/car-24.png | Bin 437 -> 429 bytes pgmapcss/icons/maki/car-24.svg | 163 +- pgmapcss/icons/maki/cemetery-12.svg | 102 +- pgmapcss/icons/maki/cemetery-18.svg | 105 +- pgmapcss/icons/maki/cemetery-24.png | Bin 380 -> 372 bytes pgmapcss/icons/maki/cemetery-24.svg | 108 +- pgmapcss/icons/maki/chemist-12.svg | 103 +- pgmapcss/icons/maki/chemist-18.svg | 94 +- pgmapcss/icons/maki/chemist-24.png | Bin 419 -> 411 bytes pgmapcss/icons/maki/chemist-24.svg | 119 +- pgmapcss/icons/maki/cinema-12.svg | 110 +- pgmapcss/icons/maki/cinema-18.svg | 112 +- pgmapcss/icons/maki/cinema-24.png | Bin 464 -> 456 bytes pgmapcss/icons/maki/cinema-24.svg | 116 +- pgmapcss/icons/maki/circle-12.svg | 119 +- pgmapcss/icons/maki/circle-18.svg | 118 +- pgmapcss/icons/maki/circle-24.png | Bin 460 -> 452 bytes pgmapcss/icons/maki/circle-24.svg | 125 +- pgmapcss/icons/maki/circle-stroked-12.svg | 106 +- pgmapcss/icons/maki/circle-stroked-18.svg | 104 +- pgmapcss/icons/maki/circle-stroked-24.png | Bin 545 -> 537 bytes pgmapcss/icons/maki/circle-stroked-24.svg | 110 +- pgmapcss/icons/maki/city-12.svg | 102 +- pgmapcss/icons/maki/city-18.svg | 102 +- pgmapcss/icons/maki/city-24.png | Bin 337 -> 329 bytes pgmapcss/icons/maki/city-24.svg | 110 +- pgmapcss/icons/maki/clothing-store-12.svg | 101 +- pgmapcss/icons/maki/clothing-store-18.svg | 107 +- pgmapcss/icons/maki/clothing-store-24.png | Bin 444 -> 435 bytes pgmapcss/icons/maki/clothing-store-24.svg | 128 +- pgmapcss/icons/maki/college-12.svg | 108 +- pgmapcss/icons/maki/college-18.svg | 107 +- pgmapcss/icons/maki/college-24.png | Bin 526 -> 518 bytes pgmapcss/icons/maki/college-24.svg | 118 +- pgmapcss/icons/maki/commercial-12.svg | 109 +- pgmapcss/icons/maki/commercial-18.svg | 107 +- pgmapcss/icons/maki/commercial-24.png | Bin 326 -> 318 bytes pgmapcss/icons/maki/commercial-24.svg | 116 +- pgmapcss/icons/maki/cricket-12.svg | 106 +- pgmapcss/icons/maki/cricket-18.svg | 109 +- pgmapcss/icons/maki/cricket-24.png | Bin 437 -> 431 bytes pgmapcss/icons/maki/cricket-24.svg | 110 +- pgmapcss/icons/maki/cross-12.svg | 100 +- pgmapcss/icons/maki/cross-18.svg | 107 +- pgmapcss/icons/maki/cross-24.png | Bin 494 -> 485 bytes pgmapcss/icons/maki/cross-24.svg | 108 +- pgmapcss/icons/maki/dam-12.svg | 108 +- pgmapcss/icons/maki/dam-18.svg | 108 +- pgmapcss/icons/maki/dam-24.png | Bin 448 -> 440 bytes pgmapcss/icons/maki/dam-24.svg | 118 +- pgmapcss/icons/maki/danger-12.svg | 106 +- pgmapcss/icons/maki/danger-18.svg | 106 +- pgmapcss/icons/maki/danger-24.png | Bin 583 -> 575 bytes pgmapcss/icons/maki/danger-24.svg | 116 +- pgmapcss/icons/maki/disability-12.svg | 108 +- pgmapcss/icons/maki/disability-18.svg | 105 +- pgmapcss/icons/maki/disability-24.png | Bin 600 -> 591 bytes pgmapcss/icons/maki/disability-24.svg | 111 +- pgmapcss/icons/maki/dog-park-12.svg | 52 +- pgmapcss/icons/maki/dog-park-18.svg | 52 +- pgmapcss/icons/maki/dog-park-24.png | Bin 547 -> 539 bytes pgmapcss/icons/maki/dog-park-24.svg | 45 +- pgmapcss/icons/maki/embassy-12.svg | 106 +- pgmapcss/icons/maki/embassy-18.svg | 105 +- pgmapcss/icons/maki/embassy-24.png | Bin 417 -> 409 bytes pgmapcss/icons/maki/embassy-24.svg | 114 +- .../icons/maki/emergency-telephone-12.svg | 109 +- .../icons/maki/emergency-telephone-18.svg | 105 +- .../icons/maki/emergency-telephone-24.png | Bin 511 -> 502 bytes .../icons/maki/emergency-telephone-24.svg | 102 +- pgmapcss/icons/maki/entrance-12.svg | 109 +- pgmapcss/icons/maki/entrance-18.svg | 131 +- pgmapcss/icons/maki/entrance-24.png | Bin 414 -> 406 bytes pgmapcss/icons/maki/entrance-24.svg | 141 +- pgmapcss/icons/maki/farm-12.svg | 105 +- pgmapcss/icons/maki/farm-18.svg | 100 +- pgmapcss/icons/maki/farm-24.png | Bin 416 -> 408 bytes pgmapcss/icons/maki/farm-24.svg | 113 +- pgmapcss/icons/maki/fast-food-12.svg | 110 +- pgmapcss/icons/maki/fast-food-18.svg | 106 +- pgmapcss/icons/maki/fast-food-24.png | Bin 410 -> 402 bytes pgmapcss/icons/maki/fast-food-24.svg | 118 +- pgmapcss/icons/maki/ferry-12.svg | 108 +- pgmapcss/icons/maki/ferry-18.svg | 109 +- pgmapcss/icons/maki/ferry-24.png | Bin 517 -> 510 bytes pgmapcss/icons/maki/ferry-24.svg | 114 +- pgmapcss/icons/maki/fire-station-12.svg | 124 +- pgmapcss/icons/maki/fire-station-18.svg | 122 +- pgmapcss/icons/maki/fire-station-24.png | Bin 596 -> 587 bytes pgmapcss/icons/maki/fire-station-24.svg | 127 +- pgmapcss/icons/maki/fuel-12.svg | 106 +- pgmapcss/icons/maki/fuel-18.svg | 109 +- pgmapcss/icons/maki/fuel-24.png | Bin 402 -> 394 bytes pgmapcss/icons/maki/fuel-24.svg | 114 +- pgmapcss/icons/maki/garden-12.svg | 102 +- pgmapcss/icons/maki/garden-18.svg | 111 +- pgmapcss/icons/maki/garden-24.png | Bin 519 -> 511 bytes pgmapcss/icons/maki/garden-24.svg | 110 +- pgmapcss/icons/maki/gift-12.svg | 109 +- pgmapcss/icons/maki/gift-18.svg | 122 +- pgmapcss/icons/maki/gift-24.png | Bin 433 -> 426 bytes pgmapcss/icons/maki/gift-24.svg | 101 +- pgmapcss/icons/maki/golf-12.svg | 104 +- pgmapcss/icons/maki/golf-18.svg | 107 +- pgmapcss/icons/maki/golf-24.png | Bin 430 -> 422 bytes pgmapcss/icons/maki/golf-24.svg | 108 +- pgmapcss/icons/maki/grocery-12.svg | 108 +- pgmapcss/icons/maki/grocery-18.svg | 108 +- pgmapcss/icons/maki/grocery-24.png | Bin 406 -> 398 bytes pgmapcss/icons/maki/grocery-24.svg | 116 +- pgmapcss/icons/maki/hairdresser-12.svg | 100 +- pgmapcss/icons/maki/hairdresser-18.svg | 114 +- pgmapcss/icons/maki/hairdresser-24.png | Bin 561 -> 552 bytes pgmapcss/icons/maki/hairdresser-24.svg | 120 +- pgmapcss/icons/maki/harbor-12.svg | 104 +- pgmapcss/icons/maki/harbor-18.svg | 109 +- pgmapcss/icons/maki/harbor-24.png | Bin 519 -> 509 bytes pgmapcss/icons/maki/harbor-24.svg | 106 +- pgmapcss/icons/maki/heart-12.svg | 98 +- pgmapcss/icons/maki/heart-18.svg | 113 +- pgmapcss/icons/maki/heart-24.png | Bin 512 -> 503 bytes pgmapcss/icons/maki/heart-24.svg | 123 +- pgmapcss/icons/maki/heliport-12.svg | 106 +- pgmapcss/icons/maki/heliport-18.svg | 105 +- pgmapcss/icons/maki/heliport-24.png | Bin 417 -> 409 bytes pgmapcss/icons/maki/heliport-24.svg | 106 +- pgmapcss/icons/maki/hospital-12.svg | 114 +- pgmapcss/icons/maki/hospital-18.svg | 116 +- pgmapcss/icons/maki/hospital-24.png | Bin 350 -> 342 bytes pgmapcss/icons/maki/hospital-24.svg | 126 +- pgmapcss/icons/maki/ice-cream-12.svg | 90 +- pgmapcss/icons/maki/ice-cream-18.svg | 77 +- pgmapcss/icons/maki/ice-cream-24.png | Bin 504 -> 496 bytes pgmapcss/icons/maki/ice-cream-24.svg | 78 +- pgmapcss/icons/maki/industrial-12.svg | 108 +- pgmapcss/icons/maki/industrial-18.svg | 108 +- pgmapcss/icons/maki/industrial-24.png | Bin 376 -> 368 bytes pgmapcss/icons/maki/industrial-24.svg | 116 +- pgmapcss/icons/maki/land-use-12.svg | 108 +- pgmapcss/icons/maki/land-use-18.svg | 106 +- pgmapcss/icons/maki/land-use-24.png | Bin 461 -> 453 bytes pgmapcss/icons/maki/land-use-24.svg | 113 +- pgmapcss/icons/maki/laundry-12.svg | 119 +- pgmapcss/icons/maki/laundry-18.svg | 131 +- pgmapcss/icons/maki/laundry-24.png | Bin 463 -> 455 bytes pgmapcss/icons/maki/laundry-24.svg | 145 +- pgmapcss/icons/maki/library-12.svg | 112 +- pgmapcss/icons/maki/library-18.svg | 111 +- pgmapcss/icons/maki/library-24.png | Bin 448 -> 441 bytes pgmapcss/icons/maki/library-24.svg | 116 +- pgmapcss/icons/maki/lighthouse-12.svg | 88 +- pgmapcss/icons/maki/lighthouse-18.svg | 98 +- pgmapcss/icons/maki/lighthouse-24.png | Bin 443 -> 434 bytes pgmapcss/icons/maki/lighthouse-24.svg | 100 +- pgmapcss/icons/maki/lodging-12.svg | 104 +- pgmapcss/icons/maki/lodging-18.svg | 107 +- pgmapcss/icons/maki/lodging-24.png | Bin 411 -> 403 bytes pgmapcss/icons/maki/lodging-24.svg | 118 +- pgmapcss/icons/maki/logging-12.svg | 108 +- pgmapcss/icons/maki/logging-18.svg | 112 +- pgmapcss/icons/maki/logging-24.png | Bin 494 -> 487 bytes pgmapcss/icons/maki/logging-24.svg | 120 +- pgmapcss/icons/maki/london-underground-12.svg | 106 +- pgmapcss/icons/maki/london-underground-18.svg | 110 +- pgmapcss/icons/maki/london-underground-24.png | Bin 527 -> 520 bytes pgmapcss/icons/maki/london-underground-24.svg | 106 +- pgmapcss/icons/maki/maki-12-base.svg | 7 + pgmapcss/icons/maki/maki-18-base.svg | 7 + pgmapcss/icons/maki/maki-24-base.svg | 7 + pgmapcss/icons/maki/maki-icons.svg | 6928 ++++++----------- pgmapcss/icons/maki/marker-12.svg | 102 +- pgmapcss/icons/maki/marker-18.svg | 107 +- pgmapcss/icons/maki/marker-24.png | Bin 498 -> 490 bytes pgmapcss/icons/maki/marker-24.svg | 111 +- pgmapcss/icons/maki/marker-stroked-12.svg | 104 +- pgmapcss/icons/maki/marker-stroked-18.svg | 105 +- pgmapcss/icons/maki/marker-stroked-24.png | Bin 595 -> 588 bytes pgmapcss/icons/maki/marker-stroked-24.svg | 119 +- pgmapcss/icons/maki/minefield-12.svg | 104 +- pgmapcss/icons/maki/minefield-18.svg | 110 +- pgmapcss/icons/maki/minefield-24.png | Bin 520 -> 512 bytes pgmapcss/icons/maki/minefield-24.svg | 106 +- pgmapcss/icons/maki/mobilephone-12.svg | 155 +- pgmapcss/icons/maki/mobilephone-18.svg | 155 +- pgmapcss/icons/maki/mobilephone-24.png | Bin 379 -> 371 bytes pgmapcss/icons/maki/mobilephone-24.svg | 112 +- pgmapcss/icons/maki/monument-12.svg | 102 +- pgmapcss/icons/maki/monument-18.svg | 105 +- pgmapcss/icons/maki/monument-24.png | Bin 367 -> 359 bytes pgmapcss/icons/maki/monument-24.svg | 110 +- pgmapcss/icons/maki/museum-12.svg | 108 +- pgmapcss/icons/maki/museum-18.svg | 123 +- pgmapcss/icons/maki/museum-24.png | Bin 489 -> 481 bytes pgmapcss/icons/maki/museum-24.svg | 128 +- pgmapcss/icons/maki/music-12.svg | 111 +- pgmapcss/icons/maki/music-18.svg | 117 +- pgmapcss/icons/maki/music-24.png | Bin 373 -> 363 bytes pgmapcss/icons/maki/music-24.svg | 119 +- pgmapcss/icons/maki/oil-well-12.svg | 108 +- pgmapcss/icons/maki/oil-well-18.svg | 108 +- pgmapcss/icons/maki/oil-well-24.png | Bin 574 -> 567 bytes pgmapcss/icons/maki/oil-well-24.svg | 120 +- pgmapcss/icons/maki/park-12.svg | 102 +- pgmapcss/icons/maki/park-18.svg | 109 +- pgmapcss/icons/maki/park-24.png | Bin 445 -> 438 bytes pgmapcss/icons/maki/park-24.svg | 113 +- pgmapcss/icons/maki/park2-12.svg | 104 +- pgmapcss/icons/maki/park2-18.svg | 105 +- pgmapcss/icons/maki/park2-24.png | Bin 468 -> 460 bytes pgmapcss/icons/maki/park2-24.svg | 110 +- pgmapcss/icons/maki/parking-12.svg | 104 +- pgmapcss/icons/maki/parking-18.svg | 110 +- pgmapcss/icons/maki/parking-24.png | Bin 418 -> 410 bytes pgmapcss/icons/maki/parking-24.svg | 116 +- pgmapcss/icons/maki/parking-garage-12.svg | 107 +- pgmapcss/icons/maki/parking-garage-18.svg | 116 +- pgmapcss/icons/maki/parking-garage-24.png | Bin 506 -> 498 bytes pgmapcss/icons/maki/parking-garage-24.svg | 106 +- pgmapcss/icons/maki/pharmacy-12.svg | 109 +- pgmapcss/icons/maki/pharmacy-18.svg | 112 +- pgmapcss/icons/maki/pharmacy-24.png | Bin 457 -> 449 bytes pgmapcss/icons/maki/pharmacy-24.svg | 116 +- pgmapcss/icons/maki/pitch-12.svg | 108 +- pgmapcss/icons/maki/pitch-18.svg | 107 +- pgmapcss/icons/maki/pitch-24.png | Bin 567 -> 561 bytes pgmapcss/icons/maki/pitch-24.svg | 112 +- pgmapcss/icons/maki/place-of-worship-12.svg | 111 +- pgmapcss/icons/maki/place-of-worship-18.svg | 114 +- pgmapcss/icons/maki/place-of-worship-24.png | Bin 386 -> 378 bytes pgmapcss/icons/maki/place-of-worship-24.svg | 115 +- pgmapcss/icons/maki/playground-12.svg | 98 +- pgmapcss/icons/maki/playground-18.svg | 99 +- pgmapcss/icons/maki/playground-24.png | Bin 618 -> 611 bytes pgmapcss/icons/maki/playground-24.svg | 128 +- pgmapcss/icons/maki/police-12.svg | 110 +- pgmapcss/icons/maki/police-18.svg | 107 +- pgmapcss/icons/maki/police-24.png | Bin 489 -> 481 bytes pgmapcss/icons/maki/police-24.svg | 109 +- pgmapcss/icons/maki/polling-place-12.svg | 114 +- pgmapcss/icons/maki/polling-place-18.svg | 152 +- pgmapcss/icons/maki/polling-place-24.png | Bin 510 -> 502 bytes pgmapcss/icons/maki/polling-place-24.svg | 125 +- pgmapcss/icons/maki/post-12.svg | 108 +- pgmapcss/icons/maki/post-18.svg | 107 +- pgmapcss/icons/maki/post-24.png | Bin 474 -> 466 bytes pgmapcss/icons/maki/post-24.svg | 110 +- pgmapcss/icons/maki/prison-12.svg | 110 +- pgmapcss/icons/maki/prison-18.svg | 109 +- pgmapcss/icons/maki/prison-24.png | Bin 336 -> 328 bytes pgmapcss/icons/maki/prison-24.svg | 110 +- pgmapcss/icons/maki/rail-12.svg | 121 +- pgmapcss/icons/maki/rail-18.svg | 139 +- pgmapcss/icons/maki/rail-24.png | Bin 424 -> 416 bytes pgmapcss/icons/maki/rail-24.svg | 129 +- pgmapcss/icons/maki/rail-above-12.svg | 106 +- pgmapcss/icons/maki/rail-above-18.svg | 109 +- pgmapcss/icons/maki/rail-above-24.png | Bin 434 -> 426 bytes pgmapcss/icons/maki/rail-above-24.svg | 106 +- pgmapcss/icons/maki/rail-light-12.svg | 119 +- pgmapcss/icons/maki/rail-light-18.svg | 137 +- pgmapcss/icons/maki/rail-light-24.png | Bin 510 -> 501 bytes pgmapcss/icons/maki/rail-light-24.svg | 129 +- pgmapcss/icons/maki/rail-metro-12.svg | 119 +- pgmapcss/icons/maki/rail-metro-18.svg | 139 +- pgmapcss/icons/maki/rail-metro-24.png | Bin 445 -> 437 bytes pgmapcss/icons/maki/rail-metro-24.svg | 129 +- pgmapcss/icons/maki/rail-underground-12.svg | 108 +- pgmapcss/icons/maki/rail-underground-18.svg | 109 +- pgmapcss/icons/maki/rail-underground-24.png | Bin 438 -> 430 bytes pgmapcss/icons/maki/rail-underground-24.svg | 108 +- .../icons/maki/religious-christian-12.svg | 108 +- .../icons/maki/religious-christian-18.svg | 111 +- .../icons/maki/religious-christian-24.png | Bin 349 -> 341 bytes .../icons/maki/religious-christian-24.svg | 112 +- pgmapcss/icons/maki/religious-jewish-12.svg | 108 +- pgmapcss/icons/maki/religious-jewish-18.svg | 109 +- pgmapcss/icons/maki/religious-jewish-24.png | Bin 468 -> 458 bytes pgmapcss/icons/maki/religious-jewish-24.svg | 112 +- pgmapcss/icons/maki/religious-muslim-12.svg | 104 +- pgmapcss/icons/maki/religious-muslim-18.svg | 107 +- pgmapcss/icons/maki/religious-muslim-24.png | Bin 569 -> 561 bytes pgmapcss/icons/maki/religious-muslim-24.svg | 110 +- pgmapcss/icons/maki/restaurant-12.svg | 110 +- pgmapcss/icons/maki/restaurant-18.svg | 110 +- pgmapcss/icons/maki/restaurant-24.png | Bin 467 -> 459 bytes pgmapcss/icons/maki/restaurant-24.svg | 118 +- pgmapcss/icons/maki/roadblock-12.svg | 103 +- pgmapcss/icons/maki/roadblock-18.svg | 109 +- pgmapcss/icons/maki/roadblock-24.png | Bin 469 -> 460 bytes pgmapcss/icons/maki/roadblock-24.svg | 103 +- pgmapcss/icons/maki/rocket-12.svg | 95 +- pgmapcss/icons/maki/rocket-18.svg | 94 +- pgmapcss/icons/maki/rocket-24.png | Bin 520 -> 512 bytes pgmapcss/icons/maki/rocket-24.svg | 104 +- pgmapcss/icons/maki/school-12.svg | 108 +- pgmapcss/icons/maki/school-18.svg | 109 +- pgmapcss/icons/maki/school-24.png | Bin 654 -> 646 bytes pgmapcss/icons/maki/school-24.svg | 120 +- pgmapcss/icons/maki/scooter-12.svg | 88 +- pgmapcss/icons/maki/scooter-18.svg | 90 +- pgmapcss/icons/maki/scooter-24.png | Bin 505 -> 497 bytes pgmapcss/icons/maki/scooter-24.svg | 108 +- pgmapcss/icons/maki/shop-12.svg | 108 +- pgmapcss/icons/maki/shop-18.svg | 108 +- pgmapcss/icons/maki/shop-24.png | Bin 454 -> 447 bytes pgmapcss/icons/maki/shop-24.svg | 116 +- pgmapcss/icons/maki/skiing-12.svg | 102 +- pgmapcss/icons/maki/skiing-18.svg | 107 +- pgmapcss/icons/maki/skiing-24.png | Bin 611 -> 603 bytes pgmapcss/icons/maki/skiing-24.svg | 114 +- pgmapcss/icons/maki/slaughterhouse-12.svg | 110 +- pgmapcss/icons/maki/slaughterhouse-18.svg | 108 +- pgmapcss/icons/maki/slaughterhouse-24.png | Bin 471 -> 461 bytes pgmapcss/icons/maki/slaughterhouse-24.svg | 116 +- pgmapcss/icons/maki/soccer-12.svg | 104 +- pgmapcss/icons/maki/soccer-18.svg | 108 +- pgmapcss/icons/maki/soccer-24.png | Bin 660 -> 652 bytes pgmapcss/icons/maki/soccer-24.svg | 113 +- pgmapcss/icons/maki/square-12.svg | 104 +- pgmapcss/icons/maki/square-18.svg | 114 +- pgmapcss/icons/maki/square-24.png | Bin 327 -> 319 bytes pgmapcss/icons/maki/square-24.svg | 111 +- pgmapcss/icons/maki/square-stroked-12.svg | 110 +- pgmapcss/icons/maki/square-stroked-18.svg | 107 +- pgmapcss/icons/maki/square-stroked-24.png | Bin 343 -> 335 bytes pgmapcss/icons/maki/square-stroked-24.svg | 118 +- pgmapcss/icons/maki/star-12.svg | 108 +- pgmapcss/icons/maki/star-18.svg | 111 +- pgmapcss/icons/maki/star-24.png | Bin 484 -> 477 bytes pgmapcss/icons/maki/star-24.svg | 123 +- pgmapcss/icons/maki/star-stroked-12.svg | 104 +- pgmapcss/icons/maki/star-stroked-18.svg | 107 +- pgmapcss/icons/maki/star-stroked-24.png | Bin 568 -> 562 bytes pgmapcss/icons/maki/star-stroked-24.svg | 120 +- pgmapcss/icons/maki/suitcase-12.svg | 141 +- pgmapcss/icons/maki/suitcase-18.svg | 123 +- pgmapcss/icons/maki/suitcase-24.png | Bin 335 -> 327 bytes pgmapcss/icons/maki/suitcase-24.svg | 129 +- pgmapcss/icons/maki/swimming-12.svg | 108 +- pgmapcss/icons/maki/swimming-18.svg | 107 +- pgmapcss/icons/maki/swimming-24.png | Bin 555 -> 549 bytes pgmapcss/icons/maki/swimming-24.svg | 108 +- pgmapcss/icons/maki/telephone-12.svg | 112 +- pgmapcss/icons/maki/telephone-18.svg | 107 +- pgmapcss/icons/maki/telephone-24.png | Bin 481 -> 471 bytes pgmapcss/icons/maki/telephone-24.svg | 106 +- pgmapcss/icons/maki/tennis-12.svg | 108 +- pgmapcss/icons/maki/tennis-18.svg | 107 +- pgmapcss/icons/maki/tennis-24.png | Bin 470 -> 463 bytes pgmapcss/icons/maki/tennis-24.svg | 112 +- pgmapcss/icons/maki/theatre-12.svg | 109 +- pgmapcss/icons/maki/theatre-18.svg | 109 +- pgmapcss/icons/maki/theatre-24.png | Bin 518 -> 509 bytes pgmapcss/icons/maki/theatre-24.svg | 124 +- pgmapcss/icons/maki/toilets-12.svg | 112 +- pgmapcss/icons/maki/toilets-18.svg | 103 +- pgmapcss/icons/maki/toilets-24.png | Bin 473 -> 465 bytes pgmapcss/icons/maki/toilets-24.svg | 108 +- pgmapcss/icons/maki/town-12.svg | 104 +- pgmapcss/icons/maki/town-18.svg | 106 +- pgmapcss/icons/maki/town-24.png | Bin 378 -> 370 bytes pgmapcss/icons/maki/town-24.svg | 114 +- pgmapcss/icons/maki/town-hall-12.svg | 124 +- pgmapcss/icons/maki/town-hall-18.svg | 123 +- pgmapcss/icons/maki/town-hall-24.png | Bin 437 -> 430 bytes pgmapcss/icons/maki/town-hall-24.svg | 123 +- pgmapcss/icons/maki/triangle-12.svg | 102 +- pgmapcss/icons/maki/triangle-18.svg | 105 +- pgmapcss/icons/maki/triangle-24.png | Bin 463 -> 455 bytes pgmapcss/icons/maki/triangle-24.svg | 108 +- pgmapcss/icons/maki/triangle-stroked-12.svg | 106 +- pgmapcss/icons/maki/triangle-stroked-18.svg | 107 +- pgmapcss/icons/maki/triangle-stroked-24.png | Bin 555 -> 547 bytes pgmapcss/icons/maki/triangle-stroked-24.svg | 108 +- pgmapcss/icons/maki/village-12.svg | 100 +- pgmapcss/icons/maki/village-18.svg | 106 +- pgmapcss/icons/maki/village-24.png | Bin 422 -> 417 bytes pgmapcss/icons/maki/village-24.svg | 114 +- pgmapcss/icons/maki/warehouse-12.svg | 106 +- pgmapcss/icons/maki/warehouse-18.svg | 106 +- pgmapcss/icons/maki/warehouse-24.png | Bin 428 -> 420 bytes pgmapcss/icons/maki/warehouse-24.svg | 118 +- pgmapcss/icons/maki/waste-basket-12.svg | 110 +- pgmapcss/icons/maki/waste-basket-18.svg | 109 +- pgmapcss/icons/maki/waste-basket-24.png | Bin 443 -> 435 bytes pgmapcss/icons/maki/waste-basket-24.svg | 106 +- pgmapcss/icons/maki/water-12.svg | 102 +- pgmapcss/icons/maki/water-18.svg | 105 +- pgmapcss/icons/maki/water-24.png | Bin 482 -> 474 bytes pgmapcss/icons/maki/water-24.svg | 106 +- pgmapcss/icons/maki/wetland-12.svg | 104 +- pgmapcss/icons/maki/wetland-18.svg | 107 +- pgmapcss/icons/maki/wetland-24.png | Bin 566 -> 558 bytes pgmapcss/icons/maki/wetland-24.svg | 104 +- pgmapcss/icons/maki/zoo-12.svg | 100 +- pgmapcss/icons/maki/zoo-18.svg | 105 +- pgmapcss/icons/maki/zoo-24.png | Bin 514 -> 506 bytes pgmapcss/icons/maki/zoo-24.svg | 120 +- 468 files changed, 9163 insertions(+), 36309 deletions(-) diff --git a/pgmapcss/icons/maki/airfield-12.svg b/pgmapcss/icons/maki/airfield-12.svg index 74c726a0..75268517 100644 --- a/pgmapcss/icons/maki/airfield-12.svg +++ b/pgmapcss/icons/maki/airfield-12.svg @@ -7,76 +7,13 @@ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:svg="http://www.w3.org/2000/svg" xmlns="http://www.w3.org/2000/svg" - xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" + version="1.1" width="12" height="12" - id="svg4764" - version="1.1" - inkscape:version="0.48.2 r9819" - sodipodi:docname="parking-garage-12.svg"> + id="svg4764"> - - - - - - - - - - - @@ -85,45 +22,38 @@ image/svg+xml - + + transform="translate(0,-1040.3622)" + id="layer1"> + style="display:inline"> + inkscape:connector-curvature="0" + id="path6224" + style="opacity:0.3;color:#000000;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:#ffffff;stroke-width:2;stroke-linecap:butt;stroke-linejoin:round;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate" /> + id="path6200" + style="fill:#444444;fill-opacity:1;stroke:none;display:inline" /> - +