id
int64
0
458k
file_name
stringlengths
4
119
file_path
stringlengths
14
227
content
stringlengths
24
9.96M
size
int64
24
9.96M
language
stringclasses
1 value
extension
stringclasses
14 values
total_lines
int64
1
219k
avg_line_length
float64
2.52
4.63M
max_line_length
int64
5
9.91M
alphanum_fraction
float64
0
1
repo_name
stringlengths
7
101
repo_stars
int64
100
139k
repo_forks
int64
0
26.4k
repo_open_issues
int64
0
2.27k
repo_license
stringclasses
12 values
repo_extraction_date
stringclasses
433 values
14,600
__init__.py
jrnl-org_jrnl/jrnl/plugins/__init__.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html from typing import Type from jrnl.plugins.calendar_heatmap_exporter import CalendarHeatmapExporter from jrnl.plugins.dates_exporter import DatesExporter from jrnl.plugins.fancy_exporter import FancyExporter from jrnl.plugins.jrnl_importer import JRNLImporter from jrnl.plugins.json_exporter import JSONExporter from jrnl.plugins.markdown_exporter import MarkdownExporter from jrnl.plugins.tag_exporter import TagExporter from jrnl.plugins.text_exporter import TextExporter from jrnl.plugins.xml_exporter import XMLExporter from jrnl.plugins.yaml_exporter import YAMLExporter __exporters = [ CalendarHeatmapExporter, DatesExporter, FancyExporter, JSONExporter, MarkdownExporter, TagExporter, TextExporter, XMLExporter, YAMLExporter, ] __importers = [JRNLImporter] __exporter_types = {name: plugin for plugin in __exporters for name in plugin.names} __exporter_types["pretty"] = None __exporter_types["short"] = None __importer_types = {name: plugin for plugin in __importers for name in plugin.names} EXPORT_FORMATS = sorted(__exporter_types.keys()) IMPORT_FORMATS = sorted(__importer_types.keys()) def get_exporter(format: str) -> Type[TextExporter] | None: for exporter in __exporters: if hasattr(exporter, "names") and format in exporter.names: return exporter return None def get_importer(format: str) -> Type[JRNLImporter] | None: for importer in __importers: if hasattr(importer, "names") and format in importer.names: return importer return None
1,649
Python
.py
41
36.756098
84
0.774234
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,601
yaml_exporter.py
jrnl-org_jrnl/jrnl/plugins/yaml_exporter.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html import os import re from typing import TYPE_CHECKING from jrnl.exception import JrnlException from jrnl.messages import Message from jrnl.messages import MsgStyle from jrnl.messages import MsgText from jrnl.output import print_msg from jrnl.plugins.text_exporter import TextExporter if TYPE_CHECKING: from jrnl.journals import Entry from jrnl.journals import Journal class YAMLExporter(TextExporter): """This Exporter converts entries and journals into Markdown formatted text with YAML front matter.""" names = ["yaml"] extension = "md" @classmethod def export_entry(cls, entry: "Entry", to_multifile: bool = True) -> str: """Returns a markdown representation of an entry, with YAML front matter.""" if to_multifile is False: raise JrnlException(Message(MsgText.YamlMustBeDirectory, MsgStyle.ERROR)) date_str = entry.date.strftime(entry.journal.config["timeformat"]) body_wrapper = "\n" if entry.body else "" body = body_wrapper + entry.body tagsymbols = entry.journal.config["tagsymbols"] # see also Entry.rag_regex multi_tag_regex = re.compile(rf"(?u)^\s*([{tagsymbols}][-+*#/\w]+\s*)+$") """Increase heading levels in body text""" newbody = "" heading = "#" previous_line = "" warn_on_heading_level = False for line in body.splitlines(True): if re.match(r"^#+ ", line): """ATX style headings""" newbody = newbody + previous_line + heading + line if re.match(r"^#######+ ", heading + line): warn_on_heading_level = True line = "" elif re.match(r"^=+$", line.rstrip()) and not re.match( r"^$", previous_line.strip() ): """Setext style H1""" newbody = newbody + heading + "# " + previous_line line = "" elif re.match(r"^-+$", line.rstrip()) and not re.match( r"^$", previous_line.strip() ): """Setext style H2""" newbody = newbody + heading + "## " + previous_line line = "" elif multi_tag_regex.match(line): """Tag only lines""" line = "" else: newbody = newbody + previous_line previous_line = line newbody = newbody + previous_line # add very last line # make sure the export ends with a blank line if previous_line not in ["\r", "\n", "\r\n", "\n\r"]: newbody = newbody + os.linesep # set indentation for YAML body block spacebody = "\t" for line in newbody.splitlines(True): spacebody = spacebody + "\t" + line if warn_on_heading_level is True: print_msg( Message( MsgText.HeadingsPastH6, MsgStyle.WARNING, {"date": date_str, "title": entry.title}, ) ) dayone_attributes = "" if hasattr(entry, "uuid"): dayone_attributes += "uuid: " + entry.uuid + "\n" if ( hasattr(entry, "creator_device_agent") or hasattr(entry, "creator_generation_date") or hasattr(entry, "creator_host_name") or hasattr(entry, "creator_os_agent") or hasattr(entry, "creator_software_agent") ): dayone_attributes += "creator:\n" if hasattr(entry, "creator_device_agent"): dayone_attributes += f" device agent: {entry.creator_device_agent}\n" if hasattr(entry, "creator_generation_date"): dayone_attributes += " generation date: {}\n".format( str(entry.creator_generation_date) ) if hasattr(entry, "creator_host_name"): dayone_attributes += f" host name: {entry.creator_host_name}\n" if hasattr(entry, "creator_os_agent"): dayone_attributes += f" os agent: {entry.creator_os_agent}\n" if hasattr(entry, "creator_software_agent"): dayone_attributes += ( f" software agent: {entry.creator_software_agent}\n" ) # TODO: copy over pictures, if present # source directory is entry.journal.config['journal'] # output directory is...? return ( "{start}\n" "title: {title}\n" "date: {date}\n" "starred: {starred}\n" "tags: {tags}\n" "{dayone}body: |{body}{end}" ).format( start="---", date=date_str, title=entry.title, starred=entry.starred, tags=", ".join([tag[1:] for tag in entry.tags]), dayone=dayone_attributes, body=spacebody, end="...", ) @classmethod def export_journal(cls, journal: "Journal"): """Returns an error, as YAML export requires a directory as a target.""" raise JrnlException(Message(MsgText.YamlMustBeDirectory, MsgStyle.ERROR))
5,324
Python
.py
125
31.248
88
0.552489
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,602
xml_exporter.py
jrnl-org_jrnl/jrnl/plugins/xml_exporter.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html from typing import TYPE_CHECKING from xml.dom import minidom from jrnl.plugins.json_exporter import JSONExporter from jrnl.plugins.util import get_tags_count if TYPE_CHECKING: from jrnl.journals import Entry from jrnl.journals import Journal class XMLExporter(JSONExporter): """This Exporter can convert entries and journals into XML.""" names = ["xml"] extension = "xml" @classmethod def export_entry( cls, entry: "Entry", doc: minidom.Document | None = None ) -> minidom.Element | str: """Returns an XML representation of a single entry.""" doc_el = doc or minidom.Document() entry_el = doc_el.createElement("entry") for key, value in cls.entry_to_dict(entry).items(): elem = doc_el.createElement(key) elem.appendChild(doc_el.createTextNode(value)) entry_el.appendChild(elem) if not doc: doc_el.appendChild(entry_el) return doc_el.toprettyxml() else: return entry_el @classmethod def entry_to_xml(cls, entry: "Entry", doc: minidom.Document) -> minidom.Element: entry_el = doc.createElement("entry") entry_el.setAttribute("date", entry.date.isoformat()) if hasattr(entry, "uuid"): entry_el.setAttribute("uuid", entry.uuid) entry_el.setAttribute("starred", entry.starred) tags = entry.tags for tag in tags: tag_el = doc.createElement("tag") tag_el.setAttribute("name", tag) entry_el.appendChild(tag_el) entry_el.appendChild(doc.createTextNode(entry.fulltext)) return entry_el @classmethod def export_journal(cls, journal: "Journal") -> str: """Returns an XML representation of an entire journal.""" tags = get_tags_count(journal) doc = minidom.Document() xml = doc.createElement("journal") tags_el = doc.createElement("tags") entries_el = doc.createElement("entries") for count, tag in tags: tag_el = doc.createElement("tag") tag_el.setAttribute("name", tag) count_node = doc.createTextNode(str(count)) tag_el.appendChild(count_node) tags_el.appendChild(tag_el) for entry in journal.entries: entries_el.appendChild(cls.entry_to_xml(entry, doc)) xml.appendChild(entries_el) xml.appendChild(tags_el) doc.appendChild(xml) return doc.toprettyxml()
2,605
Python
.py
63
32.968254
84
0.642716
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,603
tag_exporter.py
jrnl-org_jrnl/jrnl/plugins/tag_exporter.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html from typing import TYPE_CHECKING from jrnl.plugins.text_exporter import TextExporter from jrnl.plugins.util import get_tags_count if TYPE_CHECKING: from jrnl.journals import Entry from jrnl.journals import Journal class TagExporter(TextExporter): """This Exporter lists the tags for entries and journals.""" names = ["tags"] extension = "tags" @classmethod def export_entry(cls, entry: "Entry") -> str: """Returns a list of tags for a single entry.""" return ", ".join(entry.tags) @classmethod def export_journal(cls, journal: "Journal") -> str: """Returns a list of tags and their frequency for an entire journal.""" tag_counts = get_tags_count(journal) result = "" if not tag_counts: return "[No tags found in journal.]" elif min(tag_counts)[0] == 0: tag_counts = filter(lambda x: x[0] > 1, tag_counts) result += "[Removed tags that appear only once.]\n" result += "\n".join( "{:20} : {}".format(tag, n) for n, tag in sorted(tag_counts, reverse=True) ) return result
1,237
Python
.py
30
34.5
86
0.64387
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,604
calendar_heatmap_exporter.py
jrnl-org_jrnl/jrnl/plugins/calendar_heatmap_exporter.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html import calendar from datetime import datetime from typing import TYPE_CHECKING from rich import box from rich.align import Align from rich.columns import Columns from rich.console import Console from rich.table import Table from rich.text import Text from jrnl.plugins.text_exporter import TextExporter from jrnl.plugins.util import get_journal_frequency_nested if TYPE_CHECKING: from jrnl.journals import Entry from jrnl.journals import Journal from jrnl.plugins.util import NestedDict class CalendarHeatmapExporter(TextExporter): """This Exporter displays a calendar heatmap of the journaling frequency.""" names = ["calendar", "heatmap"] extension = "cal" @classmethod def export_entry(cls, entry: "Entry"): raise NotImplementedError @classmethod def print_calendar_heatmap(cls, journal_frequency: "NestedDict") -> str: """Returns a string representation of the calendar heatmap.""" console = Console() cal = calendar.Calendar() curr_year = datetime.now().year curr_month = datetime.now().month curr_day = datetime.now().day hit_first_entry = False with console.capture() as capture: for year, month_journaling_freq in journal_frequency.items(): year_calendar = [] for month in range(1, 13): if month > curr_month and year == curr_year: break entries_this_month = sum(month_journaling_freq[month].values()) if not hit_first_entry and entries_this_month > 0: hit_first_entry = True if entries_this_month == 0 and not hit_first_entry: continue elif entries_this_month == 0: entry_msg = "No entries" elif entries_this_month == 1: entry_msg = "1 entry" else: entry_msg = f"{entries_this_month} entries" table = Table( title=f"{calendar.month_name[month]} {year} ({entry_msg})", title_style="bold green", box=box.SIMPLE_HEAVY, padding=0, ) for week_day in cal.iterweekdays(): table.add_column( "{:.3}".format(calendar.day_name[week_day]), justify="right" ) month_days = cal.monthdayscalendar(year, month) for weekdays in month_days: days = [] for _, day in enumerate(weekdays): if day == 0: # Not a part of this month, just filler. day_label = Text(str(day or ""), style="white") elif ( day > curr_day and month == curr_month and year == curr_year ): break else: journal_frequency_for_day = ( month_journaling_freq[month][day] or 0 ) day = str(day) # TODO: Make colors configurable? if journal_frequency_for_day == 0: day_label = Text(day, style="red on black") elif journal_frequency_for_day == 1: day_label = Text(day, style="black on yellow") elif journal_frequency_for_day == 2: day_label = Text(day, style="black on green") else: day_label = Text(day, style="black on white") days.append(day_label) table.add_row(*days) year_calendar.append(Align.center(table)) # Print year header line console.rule(str(year)) console.print() # Print calendar console.print(Columns(year_calendar, padding=1, expand=True)) return capture.get() @classmethod def export_journal(cls, journal: "Journal"): """Returns dates and their frequencies for an entire journal.""" journal_entry_date_frequency = get_journal_frequency_nested(journal) return cls.print_calendar_heatmap(journal_entry_date_frequency)
4,860
Python
.py
100
30.83
88
0.504322
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,605
dates_exporter.py
jrnl-org_jrnl/jrnl/plugins/dates_exporter.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html from typing import TYPE_CHECKING from jrnl.plugins.text_exporter import TextExporter from jrnl.plugins.util import get_journal_frequency_one_level if TYPE_CHECKING: from jrnl.journals import Entry from jrnl.journals import Journal class DatesExporter(TextExporter): """This Exporter lists dates and their respective counts, for heatingmapping etc.""" names = ["dates"] extension = "dates" @classmethod def export_entry(cls, entry: "Entry"): raise NotImplementedError @classmethod def export_journal(cls, journal: "Journal") -> str: """Returns dates and their frequencies for an entire journal.""" date_counts = get_journal_frequency_one_level(journal) result = "\n".join(f"{date}, {count}" for date, count in date_counts.items()) return result
924
Python
.py
21
39
88
0.727374
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,606
BaseEncryption.py
jrnl-org_jrnl/jrnl/encryption/BaseEncryption.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html import logging from abc import ABC from abc import abstractmethod from jrnl.exception import JrnlException from jrnl.messages import Message from jrnl.messages import MsgStyle from jrnl.messages import MsgText class BaseEncryption(ABC): def __init__(self, journal_name: str, config: dict): logging.debug("start") self._encoding: str = "utf-8" self._journal_name: str = journal_name self._config: dict = config def clear(self) -> None: pass def encrypt(self, text: str) -> bytes: logging.debug("encrypting") return self._encrypt(text) def decrypt(self, text: bytes) -> str: logging.debug("decrypting") if (result := self._decrypt(text)) is None: raise JrnlException( Message(MsgText.DecryptionFailedGeneric, MsgStyle.ERROR) ) return result @abstractmethod def _encrypt(self, text: str) -> bytes: """ This is needed because self.decrypt might need to perform actions (e.g. prompt for password) before actually encrypting. """ pass @abstractmethod def _decrypt(self, text: bytes) -> str | None: """ This is needed because self.decrypt might need to perform actions (e.g. prompt for password) before actually decrypting. """ pass
1,480
Python
.py
43
27.418605
72
0.650315
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,607
BaseKeyEncryption.py
jrnl-org_jrnl/jrnl/encryption/BaseKeyEncryption.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html from .BaseEncryption import BaseEncryption class BaseKeyEncryption(BaseEncryption): pass
192
Python
.py
5
36
52
0.815217
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,608
BasePasswordEncryption.py
jrnl-org_jrnl/jrnl/encryption/BasePasswordEncryption.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html import logging from jrnl.encryption.BaseEncryption import BaseEncryption from jrnl.exception import JrnlException from jrnl.keyring import get_keyring_password from jrnl.messages import Message from jrnl.messages import MsgStyle from jrnl.messages import MsgText from jrnl.prompt import create_password from jrnl.prompt import prompt_password class BasePasswordEncryption(BaseEncryption): def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) logging.debug("start") self._attempts: int = 0 self._max_attempts: int = 3 self._password: str = "" self._check_keyring: bool = True @property def check_keyring(self) -> bool: return self._check_keyring @check_keyring.setter def check_keyring(self, value: bool) -> None: self._check_keyring = value @property def password(self) -> str | None: return self._password @password.setter def password(self, value: str) -> None: self._password = value def clear(self): self.password = None self.check_keyring = False def encrypt(self, text: str) -> bytes: logging.debug("encrypting") if not self.password: if self.check_keyring and ( keyring_pw := get_keyring_password(self._journal_name) ): self.password = keyring_pw if not self.password: self.password = create_password(self._journal_name) return self._encrypt(text) def decrypt(self, text: bytes) -> str: logging.debug("decrypting") if not self.password: if self.check_keyring and ( keyring_pw := get_keyring_password(self._journal_name) ): self.password = keyring_pw if not self.password: self._prompt_password() while (result := self._decrypt(text)) is None: self._prompt_password() return result def _prompt_password(self) -> None: if self._attempts >= self._max_attempts: raise JrnlException( Message(MsgText.PasswordMaxTriesExceeded, MsgStyle.ERROR) ) first_try = self._attempts == 0 self.password = prompt_password(first_try=first_try) self._attempts += 1
2,447
Python
.py
64
29.828125
73
0.632558
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,609
__init__.py
jrnl-org_jrnl/jrnl/encryption/__init__.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html from enum import Enum from importlib import import_module from typing import TYPE_CHECKING from typing import Type if TYPE_CHECKING: from .BaseEncryption import BaseEncryption class EncryptionMethods(str, Enum): def __str__(self) -> str: return self.value NONE = "NoEncryption" JRNLV1 = "Jrnlv1Encryption" JRNLV2 = "Jrnlv2Encryption" def determine_encryption_method(config: str | bool) -> Type["BaseEncryption"]: ENCRYPTION_METHODS = { True: EncryptionMethods.JRNLV2, # the default False: EncryptionMethods.NONE, "jrnlv1": EncryptionMethods.JRNLV1, "jrnlv2": EncryptionMethods.JRNLV2, } key = config if isinstance(config, str): key = config.lower() my_class = ENCRYPTION_METHODS[key] return getattr(import_module(f"jrnl.encryption.{my_class}"), my_class)
954
Python
.py
26
31.769231
78
0.718954
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,610
NoEncryption.py
jrnl-org_jrnl/jrnl/encryption/NoEncryption.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html import logging from jrnl.encryption.BaseEncryption import BaseEncryption class NoEncryption(BaseEncryption): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) logging.debug("start") def _encrypt(self, text: str) -> bytes: logging.debug("encrypting") return text.encode(self._encoding) def _decrypt(self, text: bytes) -> str: logging.debug("decrypting") return text.decode(self._encoding)
571
Python
.py
14
35.071429
57
0.684211
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,611
Jrnlv2Encryption.py
jrnl-org_jrnl/jrnl/encryption/Jrnlv2Encryption.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html import base64 import logging from cryptography.fernet import Fernet from cryptography.fernet import InvalidToken from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC from .BasePasswordEncryption import BasePasswordEncryption class Jrnlv2Encryption(BasePasswordEncryption): def __init__(self, *args, **kwargs) -> None: # Salt is hard-coded self._salt: bytes = b"\xf2\xd5q\x0e\xc1\x8d.\xde\xdc\x8e6t\x89\x04\xce\xf8" self._key: bytes = b"" super().__init__(*args, **kwargs) logging.debug("start") @property def password(self): return self._password @password.setter def password(self, value: str | None): self._password = value self._make_key() def _make_key(self) -> None: if self._password is None: # Password was removed after being set self._key = None return password = self.password.encode(self._encoding) kdf = PBKDF2HMAC( algorithm=hashes.SHA256(), length=32, salt=self._salt, iterations=100_000, backend=default_backend(), ) key = kdf.derive(password) self._key = base64.urlsafe_b64encode(key) def _encrypt(self, text: str) -> bytes: logging.debug("encrypting") return Fernet(self._key).encrypt(text.encode(self._encoding)) def _decrypt(self, text: bytes) -> str | None: logging.debug("decrypting") try: return Fernet(self._key).decrypt(text).decode(self._encoding) except (InvalidToken, IndexError): return None
1,850
Python
.py
48
30.979167
83
0.656058
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,612
Jrnlv1Encryption.py
jrnl-org_jrnl/jrnl/encryption/Jrnlv1Encryption.py
# Copyright © 2012-2023 jrnl contributors # License: https://www.gnu.org/licenses/gpl-3.0.html import hashlib import logging from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import padding from cryptography.hazmat.primitives.ciphers import Cipher from cryptography.hazmat.primitives.ciphers import algorithms from cryptography.hazmat.primitives.ciphers import modes from jrnl.encryption.BasePasswordEncryption import BasePasswordEncryption class Jrnlv1Encryption(BasePasswordEncryption): def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) logging.debug("start") def _encrypt(self, _: str) -> bytes: raise NotImplementedError def _decrypt(self, text: bytes) -> str | None: logging.debug("decrypting") iv, cipher = text[:16], text[16:] password = self._password or "" decryption_key = hashlib.sha256(password.encode(self._encoding)).digest() decryptor = Cipher( algorithms.AES(decryption_key), modes.CBC(iv), default_backend() ).decryptor() try: plain_padded = decryptor.update(cipher) + decryptor.finalize() if plain_padded[-1] in (" ", 32): # Ancient versions of jrnl. Do not judge me. return plain_padded.decode(self._encoding).rstrip(" ") else: unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() plain = unpadder.update(plain_padded) + unpadder.finalize() return plain.decode(self._encoding) except ValueError: return None
1,657
Python
.py
35
39.285714
81
0.676161
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,613
allow_all_python_version.py
jrnl-org_jrnl/.build/allow_all_python_version.py
import toml pyproject = toml.load("pyproject.toml") pyproject["tool"]["poetry"]["dependencies"]["python"] = "*" with open("pyproject.toml", "w") as toml_file: toml.dump(pyproject, toml_file)
198
Python
.pyt
5
37.2
59
0.705263
jrnl-org/jrnl
6,434
519
140
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,614
.pylintrc
osm-search_Nominatim/.pylintrc
[MASTER] extension-pkg-whitelist=osmium,falcon ignored-modules=icu,datrie [MESSAGES CONTROL] [TYPECHECK] # closing added here because it sometimes triggers a false positive with # 'with' statements. ignored-classes=NominatimArgs,closing # 'too-many-ancestors' is triggered already by deriving from UserDict # 'not-context-manager' disabled because it causes false positives once # typed Python is enabled. See also https://github.com/PyCQA/pylint/issues/5273 disable=too-few-public-methods,duplicate-code,too-many-ancestors,bad-option-value,no-self-use,not-context-manager,use-dict-literal,chained-comparison,attribute-defined-outside-init,too-many-boolean-expressions,contextmanager-generator-missing-cleanup,too-many-positional-arguments good-names=i,j,x,y,m,t,fd,db,cc,x1,x2,y1,y2,pt,k,v,nr [DESIGN] max-returns=7
826
Python
.py
15
53.6
280
0.824627
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,615
.mypy.ini
osm-search_Nominatim/.mypy.ini
[mypy] plugins = sqlalchemy.ext.mypy.plugin [mypy-sanic_cors.*] ignore_missing_imports = True [mypy-icu.*] ignore_missing_imports = True [mypy-asyncpg.*] ignore_missing_imports = True [mypy-datrie.*] ignore_missing_imports = True [mypy-dotenv.*] ignore_missing_imports = True [mypy-falcon.*] ignore_missing_imports = True [mypy-geoalchemy2.*] ignore_missing_imports = True
380
Python
.py
16
22.3125
36
0.778711
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,616
nominatim-cli.py
osm-search_Nominatim/nominatim-cli.py
#!/usr/bin/env python3 # SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Helper script for development to run nominatim from the source directory. """ from pathlib import Path import sys sys.path.insert(1, str((Path(__file__) / '..' / 'src').resolve())) from nominatim_db import cli exit(cli.nominatim(module_dir=None, osm2pgsql_path=None))
509
Python
.py
15
32.733333
73
0.747454
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,617
conftest.py
osm-search_Nominatim/test/python/conftest.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. import itertools import sys from pathlib import Path import psycopg from psycopg import sql as pysql import pytest # always test against the source SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve() sys.path.insert(0, str(SRC_DIR / 'src')) from nominatim_db.config import Configuration from nominatim_db.db import connection from nominatim_db.db.sql_preprocessor import SQLPreprocessor import nominatim_db.tokenizer.factory import dummy_tokenizer import mocks from cursor import CursorForTesting @pytest.fixture def src_dir(): return SRC_DIR @pytest.fixture def temp_db(monkeypatch): """ Create an empty database for the test. The database name is also exported into NOMINATIM_DATABASE_DSN. """ name = 'test_nominatim_python_unittest' with psycopg.connect(dbname='postgres', autocommit=True) as conn: with conn.cursor() as cur: cur.execute(pysql.SQL('DROP DATABASE IF EXISTS') + pysql.Identifier(name)) cur.execute(pysql.SQL('CREATE DATABASE') + pysql.Identifier(name)) monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'dbname=' + name) with psycopg.connect(dbname=name) as conn: with conn.cursor() as cur: cur.execute('CREATE EXTENSION hstore') yield name with psycopg.connect(dbname='postgres', autocommit=True) as conn: with conn.cursor() as cur: cur.execute('DROP DATABASE IF EXISTS {}'.format(name)) @pytest.fixture def dsn(temp_db): return 'dbname=' + temp_db @pytest.fixture def temp_db_with_extensions(temp_db): with psycopg.connect(dbname=temp_db) as conn: with conn.cursor() as cur: cur.execute('CREATE EXTENSION postgis') return temp_db @pytest.fixture def temp_db_conn(temp_db): """ Connection to the test database. """ with connection.connect('', autocommit=True, dbname=temp_db) as conn: connection.register_hstore(conn) yield conn @pytest.fixture def temp_db_cursor(temp_db): """ Connection and cursor towards the test database. The connection will be in auto-commit mode. """ with psycopg.connect(dbname=temp_db, autocommit=True, cursor_factory=CursorForTesting) as conn: connection.register_hstore(conn) with conn.cursor() as cur: yield cur @pytest.fixture def table_factory(temp_db_conn): """ A fixture that creates new SQL tables, potentially filled with content. """ def mk_table(name, definition='id INT', content=None): with psycopg.ClientCursor(temp_db_conn) as cur: cur.execute('CREATE TABLE {} ({})'.format(name, definition)) if content: sql = pysql.SQL("INSERT INTO {} VALUES ({})")\ .format(pysql.Identifier(name), pysql.SQL(',').join([pysql.Placeholder() for _ in range(len(content[0]))])) cur.executemany(sql , content) return mk_table @pytest.fixture def def_config(): cfg = Configuration(None) cfg.set_libdirs(osm2pgsql=None) return cfg @pytest.fixture def project_env(tmp_path): projdir = tmp_path / 'project' projdir.mkdir() cfg = Configuration(projdir) cfg.set_libdirs(osm2pgsql=None) return cfg @pytest.fixture def property_table(table_factory, temp_db_conn): table_factory('nominatim_properties', 'property TEXT, value TEXT') return mocks.MockPropertyTable(temp_db_conn) @pytest.fixture def status_table(table_factory): """ Create an empty version of the status table and the status logging table. """ table_factory('import_status', """lastimportdate timestamp with time zone NOT NULL, sequence_id integer, indexed boolean""") table_factory('import_osmosis_log', """batchend timestamp, batchseq integer, batchsize bigint, starttime timestamp, endtime timestamp, event text""") @pytest.fixture def place_table(temp_db_with_extensions, table_factory): """ Create an empty version of the place table. """ table_factory('place', """osm_id int8 NOT NULL, osm_type char(1) NOT NULL, class text NOT NULL, type text NOT NULL, name hstore, admin_level smallint, address hstore, extratags hstore, geometry Geometry(Geometry,4326) NOT NULL""") @pytest.fixture def place_row(place_table, temp_db_cursor): """ A factory for rows in the place table. The table is created as a prerequisite to the fixture. """ idseq = itertools.count(1001) def _insert(osm_type='N', osm_id=None, cls='amenity', typ='cafe', names=None, admin_level=None, address=None, extratags=None, geom=None): temp_db_cursor.execute("INSERT INTO place VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)", (osm_id or next(idseq), osm_type, cls, typ, names, admin_level, address, extratags, geom or 'SRID=4326;POINT(0 0)')) return _insert @pytest.fixture def placex_table(temp_db_with_extensions, temp_db_conn): """ Create an empty version of the place table. """ return mocks.MockPlacexTable(temp_db_conn) @pytest.fixture def osmline_table(temp_db_with_extensions, table_factory): table_factory('location_property_osmline', """place_id BIGINT, osm_id BIGINT, parent_place_id BIGINT, geometry_sector INTEGER, indexed_date TIMESTAMP, startnumber INTEGER, endnumber INTEGER, partition SMALLINT, indexed_status SMALLINT, linegeo GEOMETRY, interpolationtype TEXT, address HSTORE, postcode TEXT, country_code VARCHAR(2)""") @pytest.fixture def sql_preprocessor_cfg(tmp_path, table_factory, temp_db_with_extensions): table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, ))) cfg = Configuration(None) cfg.set_libdirs(osm2pgsql=None, sql=tmp_path) return cfg @pytest.fixture def sql_preprocessor(sql_preprocessor_cfg, temp_db_conn): return SQLPreprocessor(temp_db_conn, sql_preprocessor_cfg) @pytest.fixture def tokenizer_mock(monkeypatch, property_table): """ Sets up the configuration so that the test dummy tokenizer will be loaded when the tokenizer factory is used. Also returns a factory with which a new dummy tokenizer may be created. """ monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy') def _import_dummy(*args, **kwargs): return dummy_tokenizer monkeypatch.setattr(nominatim_db.tokenizer.factory, "_import_tokenizer", _import_dummy) property_table.set('tokenizer', 'dummy') def _create_tokenizer(): return dummy_tokenizer.DummyTokenizer(None, None) return _create_tokenizer
7,517
Python
.py
187
31.540107
110
0.63429
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,618
cursor.py
osm-search_Nominatim/test/python/cursor.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Specialised psycopg cursor with shortcut functions useful for testing. """ import psycopg class CursorForTesting(psycopg.Cursor): """ Extension to the DictCursor class that provides execution short-cuts that simplify writing assertions. """ def scalar(self, sql, params=None): """ Execute a query with a single return value and return this value. Raises an assertion when not exactly one row is returned. """ self.execute(sql, params) assert self.rowcount == 1 return self.fetchone()[0] def row_set(self, sql, params=None): """ Execute a query and return the result as a set of tuples. Fails when the SQL command returns duplicate rows. """ self.execute(sql, params) result = set((tuple(row) for row in self)) assert len(result) == self.rowcount return result def table_exists(self, table): """ Check that a table with the given name exists in the database. """ num = self.scalar("""SELECT count(*) FROM pg_tables WHERE tablename = %s""", (table, )) return num == 1 def index_exists(self, table, index): """ Check that an indexwith the given name exists on the given table. """ num = self.scalar("""SELECT count(*) FROM pg_indexes WHERE tablename = %s and indexname = %s""", (table, index)) return num == 1 def table_rows(self, table, where=None): """ Return the number of rows in the given table. """ if where is None: return self.scalar('SELECT count(*) FROM ' + table) return self.scalar('SELECT count(*) FROM {} WHERE {}'.format(table, where))
2,016
Python
.py
48
33.645833
83
0.617391
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,619
mocks.py
osm-search_Nominatim/test/python/mocks.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Custom mocks for testing. """ import itertools from nominatim_db.db import properties # This must always point to the mock word table for the default tokenizer. from mock_icu_word_table import MockIcuWordTable as MockWordTable class MockPlacexTable: """ A placex table for testing. """ def __init__(self, conn): self.idseq = itertools.count(10000) self.conn = conn with conn.cursor() as cur: cur.execute("""CREATE TABLE placex ( place_id BIGINT, parent_place_id BIGINT, linked_place_id BIGINT, importance FLOAT, indexed_date TIMESTAMP, geometry_sector INTEGER, rank_address SMALLINT, rank_search SMALLINT, partition SMALLINT, indexed_status SMALLINT, osm_id int8, osm_type char(1), class text, type text, name hstore, admin_level smallint, address hstore, extratags hstore, token_info jsonb, geometry Geometry(Geometry,4326), wikipedia TEXT, country_code varchar(2), housenumber TEXT, postcode TEXT, centroid GEOMETRY(Geometry, 4326))""") cur.execute("CREATE SEQUENCE IF NOT EXISTS seq_place") conn.commit() def add(self, osm_type='N', osm_id=None, cls='amenity', typ='cafe', names=None, admin_level=None, address=None, extratags=None, geom='POINT(10 4)', country=None, housenumber=None, rank_search=30): with self.conn.cursor() as cur: cur.execute("""INSERT INTO placex (place_id, osm_type, osm_id, class, type, name, admin_level, address, housenumber, rank_search, extratags, geometry, country_code) VALUES(nextval('seq_place'), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)""", (osm_type, osm_id or next(self.idseq), cls, typ, names, admin_level, address, housenumber, rank_search, extratags, 'SRID=4326;' + geom, country)) self.conn.commit() class MockPropertyTable: """ A property table for testing. """ def __init__(self, conn): self.conn = conn def set(self, name, value): """ Set a property in the table to the given value. """ properties.set_property(self.conn, name, value) def get(self, name): """ Set a property in the table to the given value. """ return properties.get_property(self.conn, name)
3,479
Python
.py
75
28.68
108
0.482027
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,620
mock_icu_word_table.py
osm-search_Nominatim/test/python/mock_icu_word_table.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Legacy word table for testing with functions to prefil and test contents of the table. """ from nominatim_db.db.connection import execute_scalar class MockIcuWordTable: """ A word table for testing using legacy word table structure. """ def __init__(self, conn): self.conn = conn with conn.cursor() as cur: cur.execute("""CREATE TABLE word (word_id INTEGER, word_token text NOT NULL, type text NOT NULL, word text, info jsonb)""") conn.commit() def add_full_word(self, word_id, word, word_token=None): with self.conn.cursor() as cur: cur.execute("""INSERT INTO word (word_id, word_token, type, word, info) VALUES(%s, %s, 'W', %s, '{}'::jsonb)""", (word_id, word or word_token, word)) self.conn.commit() def add_special(self, word_token, word, cls, typ, oper): with self.conn.cursor() as cur: cur.execute("""INSERT INTO word (word_token, type, word, info) VALUES (%s, 'S', %s, json_build_object('class', %s::text, 'type', %s::text, 'op', %s::text)) """, (word_token, word, cls, typ, oper)) self.conn.commit() def add_country(self, country_code, word_token): with self.conn.cursor() as cur: cur.execute("""INSERT INTO word (word_token, type, word) VALUES(%s, 'C', %s)""", (word_token, country_code)) self.conn.commit() def add_postcode(self, word_token, postcode): with self.conn.cursor() as cur: cur.execute("""INSERT INTO word (word_token, type, word) VALUES (%s, 'P', %s) """, (word_token, postcode)) self.conn.commit() def add_housenumber(self, word_id, word_tokens, word=None): with self.conn.cursor() as cur: if isinstance(word_tokens, str): # old style without analyser cur.execute("""INSERT INTO word (word_id, word_token, type) VALUES (%s, %s, 'H') """, (word_id, word_tokens)) else: if word is None: word = word_tokens[0] for token in word_tokens: cur.execute("""INSERT INTO word (word_id, word_token, type, word, info) VALUES (%s, %s, 'H', %s, jsonb_build_object('lookup', %s::text)) """, (word_id, token, word, word_tokens[0])) self.conn.commit() def count(self): return execute_scalar(self.conn, "SELECT count(*) FROM word") def count_special(self): return execute_scalar(self.conn, "SELECT count(*) FROM word WHERE type = 'S'") def count_housenumbers(self): return execute_scalar(self.conn, "SELECT count(*) FROM word WHERE type = 'H'") def get_special(self): with self.conn.cursor() as cur: cur.execute("SELECT word_token, info, word FROM word WHERE type = 'S'") result = set(((row[0], row[2], row[1]['class'], row[1]['type'], row[1]['op']) for row in cur)) assert len(result) == cur.rowcount, "Word table has duplicates." return result def get_country(self): with self.conn.cursor() as cur: cur.execute("SELECT word, word_token FROM word WHERE type = 'C'") result = set((tuple(row) for row in cur)) assert len(result) == cur.rowcount, "Word table has duplicates." return result def get_postcodes(self): with self.conn.cursor() as cur: cur.execute("SELECT word FROM word WHERE type = 'P'") return set((row[0] for row in cur)) def get_partial_words(self): with self.conn.cursor() as cur: cur.execute("SELECT word_token, info FROM word WHERE type ='w'") return set(((row[0], row[1]['count']) for row in cur))
4,617
Python
.py
92
35.25
102
0.514896
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,621
dummy_tokenizer.py
osm-search_Nominatim/test/python/dummy_tokenizer.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tokenizer for testing. """ from nominatim_db.data.place_info import PlaceInfo from nominatim_db.config import Configuration def create(dsn, data_dir): """ Create a new instance of the tokenizer provided by this module. """ return DummyTokenizer(dsn, data_dir) class DummyTokenizer: def __init__(self, dsn, data_dir): self.dsn = dsn self.data_dir = data_dir self.init_state = None self.analyser_cache = {} def init_new_db(self, *args, **kwargs): assert self.init_state is None self.init_state = "new" def init_from_project(self, config): assert isinstance(config, Configuration) assert self.init_state is None self.init_state = "loaded" @staticmethod def finalize_import(_): pass def name_analyzer(self): return DummyNameAnalyzer(self.analyser_cache) class DummyNameAnalyzer: def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.close() def __init__(self, cache): self.analyser_cache = cache cache['countries'] = [] def close(self): pass @staticmethod def normalize_postcode(postcode): return postcode @staticmethod def update_postcodes_from_db(): pass def update_special_phrases(self, phrases, should_replace): self.analyser_cache['special_phrases'] = phrases def add_country_names(self, code, names): self.analyser_cache['countries'].append((code, names)) @staticmethod def process_place(place): assert isinstance(place, PlaceInfo) return {}
1,879
Python
.py
57
26.982456
71
0.667408
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,622
test_country_info.py
osm-search_Nominatim/test/python/data/test_country_info.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for function that handle country properties. """ from textwrap import dedent import pytest from nominatim_db.data import country_info @pytest.fixture def loaded_country(def_config): country_info.setup_country_config(def_config) @pytest.fixture def env_with_country_config(project_env): def _mk_config(cfg): (project_env.project_dir / 'country_settings.yaml').write_text(dedent(cfg)) return project_env return _mk_config @pytest.mark.parametrize("no_partitions", (True, False)) def test_setup_country_tables(src_dir, temp_db_with_extensions, dsn, temp_db_cursor, loaded_country, no_partitions): country_info.setup_country_tables(dsn, src_dir / 'data', no_partitions) assert temp_db_cursor.table_exists('country_name') assert temp_db_cursor.table_rows('country_name') == \ temp_db_cursor.scalar( 'SELECT count(DISTINCT country_code) FROM country_name') partitions = temp_db_cursor.row_set( "SELECT DISTINCT partition FROM country_name") if no_partitions: assert partitions == {(0, )} else: assert len(partitions) > 10 assert temp_db_cursor.table_exists('country_osm_grid') assert temp_db_cursor.table_rows('country_osm_grid') > 100 @pytest.mark.parametrize("languages", (None, ['fr', 'en'])) def test_create_country_names(temp_db_with_extensions, temp_db_conn, temp_db_cursor, table_factory, tokenizer_mock, languages, loaded_country): table_factory('country_name', 'country_code varchar(2), name hstore', content=(('us', '"name"=>"us1","name:af"=>"us2"'), ('fr', '"name"=>"Fra", "name:en"=>"Fren"'))) assert temp_db_cursor.scalar("SELECT count(*) FROM country_name") == 2 tokenizer = tokenizer_mock() country_info.create_country_names(temp_db_conn, tokenizer, languages) assert len(tokenizer.analyser_cache['countries']) == 2 result_set = {k: set(v.values()) for k, v in tokenizer.analyser_cache['countries']} if languages: assert result_set == {'us': set(('us', 'us1')), 'fr': set(('fr', 'Fra', 'Fren'))} else: assert result_set == {'us': set(('us', 'us1', 'us2')), 'fr': set(('fr', 'Fra', 'Fren'))} def test_setup_country_names_prefixes(env_with_country_config): config = env_with_country_config("""\ es: names: name: en: Spain de: Spanien default: Espagñe us: names: short_name: default: USA name: default: United States en: United States """) info = country_info._CountryInfo() info.load(config) assert info.get('es')['names'] == {"name": "Espagñe", "name:en": "Spain", "name:de": "Spanien"} assert info.get('us')['names'] == {"name": "United States", "name:en": "United States", "short_name": "USA"} assert 'names' not in info.get('xx') def test_setup_country_config_languages_not_loaded(env_with_country_config): config = env_with_country_config("""\ de: partition: 3 names: name: default: Deutschland """) info = country_info._CountryInfo() info.load(config) assert dict(info.items()) == {'de': {'partition': 3, 'languages': [], 'names': {'name': 'Deutschland'}}} def test_setup_country_config_name_not_loaded(env_with_country_config): config = env_with_country_config("""\ de: partition: 3 languages: de names: """) info = country_info._CountryInfo() info.load(config) assert dict(info.items()) == {'de': {'partition': 3, 'languages': ['de'], 'names': {} }} def test_setup_country_config_names_not_loaded(env_with_country_config): config = env_with_country_config(""" de: partition: 3 languages: de """) info = country_info._CountryInfo() info.load(config) assert dict(info.items()) == {'de': {'partition': 3, 'languages': ['de'], 'names': {} }} def test_setup_country_config_special_character(env_with_country_config): config = env_with_country_config(""" bq: partition: 250 languages: nl names: name: default: "\\N" """) info = country_info._CountryInfo() info.load(config) assert dict(info.items()) == {'bq': {'partition': 250, 'languages': ['nl'], 'names': {'name': '\x85'} }}
6,507
Python
.py
133
29.142857
88
0.444743
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,623
test_utils.py
osm-search_Nominatim/test/python/db/test_utils.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for DB utility functions in db.utils """ import json import pytest import nominatim_db.db.utils as db_utils from nominatim_db.errors import UsageError def test_execute_file_success(dsn, temp_db_cursor, tmp_path): tmpfile = tmp_path / 'test.sql' tmpfile.write_text('CREATE TABLE test (id INT);\nINSERT INTO test VALUES(56);') db_utils.execute_file(dsn, tmpfile) assert temp_db_cursor.row_set('SELECT * FROM test') == {(56, )} def test_execute_file_bad_file(dsn, tmp_path): with pytest.raises(FileNotFoundError): db_utils.execute_file(dsn, tmp_path / 'test2.sql') def test_execute_file_bad_sql(dsn, tmp_path): tmpfile = tmp_path / 'test.sql' tmpfile.write_text('CREATE STABLE test (id INT)') with pytest.raises(UsageError): db_utils.execute_file(dsn, tmpfile) def test_execute_file_bad_sql_ignore_errors(dsn, tmp_path): tmpfile = tmp_path / 'test.sql' tmpfile.write_text('CREATE STABLE test (id INT)') db_utils.execute_file(dsn, tmpfile, ignore_errors=True) def test_execute_file_with_pre_code(dsn, tmp_path, temp_db_cursor): tmpfile = tmp_path / 'test.sql' tmpfile.write_text('INSERT INTO test VALUES(4)') db_utils.execute_file(dsn, tmpfile, pre_code='CREATE TABLE test (id INT)') assert temp_db_cursor.row_set('SELECT * FROM test') == {(4, )} def test_execute_file_with_post_code(dsn, tmp_path, temp_db_cursor): tmpfile = tmp_path / 'test.sql' tmpfile.write_text('CREATE TABLE test (id INT)') db_utils.execute_file(dsn, tmpfile, post_code='INSERT INTO test VALUES(23)') assert temp_db_cursor.row_set('SELECT * FROM test') == {(23, )}
1,867
Python
.py
40
42.875
83
0.71057
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,624
test_status.py
osm-search_Nominatim/test/python/db/test_status.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for status table manipulation. """ import datetime as dt import pytest import nominatim_db.db.status from nominatim_db.errors import UsageError OSM_NODE_DATA = """\ <osm version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/"> <node id="45673" visible="true" version="1" changeset="2047" timestamp="2006-01-27T22:09:10Z" user="Foo" uid="111" lat="48.7586670" lon="8.1343060"> </node> </osm> """ def iso_date(date): return dt.datetime.strptime(date, nominatim_db.db.status.ISODATE_FORMAT)\ .replace(tzinfo=dt.timezone.utc) @pytest.fixture(autouse=True) def setup_status_table(status_table): pass @pytest.mark.parametrize('offline', [True, False]) def test_compute_database_date_from_osm2pgsql(table_factory, temp_db_conn, offline): table_factory('osm2pgsql_properties', 'property TEXT, value TEXT', content=(('current_timestamp', '2024-01-03T23:45:54Z'), )) date = nominatim_db.db.status.compute_database_date(temp_db_conn, offline=offline) assert date == iso_date('2024-01-03T23:45:54') def test_compute_database_date_from_osm2pgsql_nodata(table_factory, temp_db_conn): table_factory('osm2pgsql_properties', 'property TEXT, value TEXT') with pytest.raises(UsageError, match='Cannot determine database date from data in offline mode'): nominatim_db.db.status.compute_database_date(temp_db_conn, offline=True) def test_compute_database_date_place_empty(place_table, temp_db_conn): with pytest.raises(UsageError): nominatim_db.db.status.compute_database_date(temp_db_conn) def test_compute_database_date_valid(monkeypatch, place_row, temp_db_conn): place_row(osm_type='N', osm_id=45673) requested_url = [] def mock_url(url): requested_url.append(url) return OSM_NODE_DATA monkeypatch.setattr(nominatim_db.db.status, "get_url", mock_url) date = nominatim_db.db.status.compute_database_date(temp_db_conn) assert requested_url == ['https://www.openstreetmap.org/api/0.6/node/45673/1'] assert date == iso_date('2006-01-27T22:09:10') def test_compute_database_broken_api(monkeypatch, place_row, temp_db_conn): place_row(osm_type='N', osm_id=45673) requested_url = [] def mock_url(url): requested_url.append(url) return '<osm version="0.6" generator="OpenStre' monkeypatch.setattr(nominatim_db.db.status, "get_url", mock_url) with pytest.raises(UsageError): nominatim_db.db.status.compute_database_date(temp_db_conn) def test_set_status_empty_table(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim_db.db.status.set_status(temp_db_conn, date=date) assert temp_db_cursor.row_set("SELECT * FROM import_status") == \ {(date, None, True)} def test_set_status_filled_table(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim_db.db.status.set_status(temp_db_conn, date=date) assert temp_db_cursor.table_rows('import_status') == 1 date = dt.datetime.fromordinal(1000100).replace(tzinfo=dt.timezone.utc) nominatim_db.db.status.set_status(temp_db_conn, date=date, seq=456, indexed=False) assert temp_db_cursor.row_set("SELECT * FROM import_status") == \ {(date, 456, False)} def test_set_status_missing_date(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim_db.db.status.set_status(temp_db_conn, date=date) assert temp_db_cursor.table_rows('import_status') == 1 nominatim_db.db.status.set_status(temp_db_conn, date=None, seq=456, indexed=False) assert temp_db_cursor.row_set("SELECT * FROM import_status") == \ {(date, 456, False)} def test_get_status_empty_table(temp_db_conn): assert nominatim_db.db.status.get_status(temp_db_conn) == (None, None, None) def test_get_status_success(temp_db_conn): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim_db.db.status.set_status(temp_db_conn, date=date, seq=667, indexed=False) assert nominatim_db.db.status.get_status(temp_db_conn) == \ (date, 667, False) @pytest.mark.parametrize("old_state", [True, False]) @pytest.mark.parametrize("new_state", [True, False]) def test_set_indexed(temp_db_conn, temp_db_cursor, old_state, new_state): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) nominatim_db.db.status.set_status(temp_db_conn, date=date, indexed=old_state) nominatim_db.db.status.set_indexed(temp_db_conn, new_state) assert temp_db_cursor.scalar("SELECT indexed FROM import_status") == new_state def test_set_indexed_empty_status(temp_db_conn, temp_db_cursor): nominatim_db.db.status.set_indexed(temp_db_conn, True) assert temp_db_cursor.table_rows("import_status") == 0 def test_log_status(temp_db_conn, temp_db_cursor): date = dt.datetime.fromordinal(1000000).replace(tzinfo=dt.timezone.utc) start = dt.datetime.now() - dt.timedelta(hours=1) nominatim_db.db.status.set_status(temp_db_conn, date=date, seq=56) nominatim_db.db.status.log_status(temp_db_conn, start, 'index') temp_db_conn.commit() assert temp_db_cursor.table_rows("import_osmosis_log") == 1 assert temp_db_cursor.scalar("SELECT batchseq FROM import_osmosis_log") == 56 assert temp_db_cursor.scalar("SELECT event FROM import_osmosis_log") == 'index'
5,857
Python
.py
103
51.951456
204
0.725263
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,625
test_connection.py
osm-search_Nominatim/test/python/db/test_connection.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for specialised connection and cursor classes. """ import pytest import psycopg import nominatim_db.db.connection as nc @pytest.fixture def db(dsn): with nc.connect(dsn) as conn: yield conn def test_connection_table_exists(db, table_factory): assert not nc.table_exists(db, 'foobar') table_factory('foobar') assert nc.table_exists(db, 'foobar') def test_has_column_no_table(db): assert not nc.table_has_column(db, 'sometable', 'somecolumn') @pytest.mark.parametrize('name,result', [('tram', True), ('car', False)]) def test_has_column(db, table_factory, name, result): table_factory('stuff', 'tram TEXT') assert nc.table_has_column(db, 'stuff', name) == result def test_connection_index_exists(db, table_factory, temp_db_cursor): assert not nc.index_exists(db, 'some_index') table_factory('foobar') temp_db_cursor.execute('CREATE INDEX some_index ON foobar(id)') assert nc.index_exists(db, 'some_index') assert nc.index_exists(db, 'some_index', table='foobar') assert not nc.index_exists(db, 'some_index', table='bar') def test_drop_table_existing(db, table_factory): table_factory('dummy') assert nc.table_exists(db, 'dummy') nc.drop_tables(db, 'dummy') assert not nc.table_exists(db, 'dummy') def test_drop_table_non_existing(db): nc.drop_tables(db, 'dfkjgjriogjigjgjrdghehtre') def test_drop_many_tables(db, table_factory): tables = [f'table{n}' for n in range(5)] for t in tables: table_factory(t) assert nc.table_exists(db, t) nc.drop_tables(db, *tables) for t in tables: assert not nc.table_exists(db, t) def test_drop_table_non_existing_force(db): with pytest.raises(psycopg.ProgrammingError, match='.*does not exist.*'): nc.drop_tables(db, 'dfkjgjriogjigjgjrdghehtre', if_exists=False) def test_connection_server_version_tuple(db): ver = nc.server_version_tuple(db) assert isinstance(ver, tuple) assert len(ver) == 2 assert ver[0] > 8 def test_connection_postgis_version_tuple(db, temp_db_with_extensions): ver = nc.postgis_version_tuple(db) assert isinstance(ver, tuple) assert len(ver) == 2 assert ver[0] >= 2 def test_cursor_scalar(db, table_factory): table_factory('dummy') assert nc.execute_scalar(db, 'SELECT count(*) FROM dummy') == 0 def test_cursor_scalar_many_rows(db): with pytest.raises(RuntimeError, match='Query did not return a single row.'): nc.execute_scalar(db, 'SELECT * FROM pg_tables') def test_cursor_scalar_no_rows(db, table_factory): table_factory('dummy') with pytest.raises(RuntimeError, match='Query did not return a single row.'): nc.execute_scalar(db, 'SELECT id FROM dummy') def test_get_pg_env_add_variable(monkeypatch): monkeypatch.delenv('PGPASSWORD', raising=False) env = nc.get_pg_env('user=fooF') assert env['PGUSER'] == 'fooF' assert 'PGPASSWORD' not in env def test_get_pg_env_overwrite_variable(monkeypatch): monkeypatch.setenv('PGUSER', 'some default') env = nc.get_pg_env('user=overwriter') assert env['PGUSER'] == 'overwriter' def test_get_pg_env_ignore_unknown(): env = nc.get_pg_env('client_encoding=stuff', base_env={}) assert env == {}
3,495
Python
.py
83
37.686747
81
0.705113
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,626
test_properties.py
osm-search_Nominatim/test/python/db/test_properties.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for property table manpulation. """ import pytest from nominatim_db.db import properties @pytest.fixture def property_factory(property_table, temp_db_cursor): """ A function fixture that adds a property into the property table. """ def _add_property(name, value): temp_db_cursor.execute("INSERT INTO nominatim_properties VALUES(%s, %s)", (name, value)) return _add_property def test_get_property_existing(property_factory, temp_db_conn): property_factory('foo', 'bar') assert properties.get_property(temp_db_conn, 'foo') == 'bar' def test_get_property_unknown(property_factory, temp_db_conn): property_factory('other', 'bar') assert properties.get_property(temp_db_conn, 'foo') is None @pytest.mark.parametrize("prefill", (True, False)) def test_set_property_new(property_factory, temp_db_conn, temp_db_cursor, prefill): if prefill: property_factory('something', 'bar') properties.set_property(temp_db_conn, 'something', 'else') assert temp_db_cursor.scalar("""SELECT value FROM nominatim_properties WHERE property = 'something'""") == 'else' assert properties.get_property(temp_db_conn, 'something') == 'else'
1,477
Python
.py
33
39.363636
83
0.695804
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,627
test_sql_preprocessor.py
osm-search_Nominatim/test/python/db/test_sql_preprocessor.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for SQL preprocessing. """ import pytest import pytest_asyncio from nominatim_db.db.sql_preprocessor import SQLPreprocessor @pytest.fixture def sql_factory(tmp_path): def _mk_sql(sql_body): (tmp_path / 'test.sql').write_text(""" CREATE OR REPLACE FUNCTION test() RETURNS TEXT AS $$ BEGIN {} END; $$ LANGUAGE plpgsql IMMUTABLE;""".format(sql_body)) return 'test.sql' return _mk_sql @pytest.mark.parametrize("expr,ret", [ ("'a'", 'a'), ("'{{db.partitions|join}}'", '012'), ("{% if 'country_name' in db.tables %}'yes'{% else %}'no'{% endif %}", "yes"), ("{% if 'xxx' in db.tables %}'yes'{% else %}'no'{% endif %}", "no"), ("'{{db.tablespace.address_data}}'", ""), ("'{{db.tablespace.search_data}}'", 'TABLESPACE "dsearch"'), ("'{{db.tablespace.address_index}}'", 'TABLESPACE "iaddress"'), ("'{{db.tablespace.aux_data}}'", 'TABLESPACE "daux"') ]) def test_load_file_simple(sql_preprocessor_cfg, sql_factory, temp_db_conn, temp_db_cursor, monkeypatch, expr, ret): monkeypatch.setenv('NOMINATIM_TABLESPACE_SEARCH_DATA', 'dsearch') monkeypatch.setenv('NOMINATIM_TABLESPACE_ADDRESS_INDEX', 'iaddress') monkeypatch.setenv('NOMINATIM_TABLESPACE_AUX_DATA', 'daux') sqlfile = sql_factory("RETURN {};".format(expr)) SQLPreprocessor(temp_db_conn, sql_preprocessor_cfg).run_sql_file(temp_db_conn, sqlfile) assert temp_db_cursor.scalar('SELECT test()') == ret def test_load_file_with_params(sql_preprocessor, sql_factory, temp_db_conn, temp_db_cursor): sqlfile = sql_factory("RETURN '{{ foo }} {{ bar }}';") sql_preprocessor.run_sql_file(temp_db_conn, sqlfile, bar='XX', foo='ZZ') assert temp_db_cursor.scalar('SELECT test()') == 'ZZ XX' @pytest.mark.asyncio async def test_load_parallel_file(dsn, sql_preprocessor, tmp_path, temp_db_cursor): (tmp_path / 'test.sql').write_text(""" CREATE TABLE foo (a TEXT); CREATE TABLE foo2(a TEXT);""" + "\n---\nCREATE TABLE bar (b INT);") await sql_preprocessor.run_parallel_sql_file(dsn, 'test.sql', num_threads=4) assert temp_db_cursor.table_exists('foo') assert temp_db_cursor.table_exists('foo2') assert temp_db_cursor.table_exists('bar')
2,555
Python
.py
57
39.105263
92
0.64211
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,628
conftest.py
osm-search_Nominatim/test/python/tools/conftest.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. import pytest @pytest.fixture def osm2pgsql_options(temp_db, tmp_path): """ A standard set of options for osm2pgsql together with a osm2pgsql mock that just reflects the command line. """ osm2pgsql_exec = tmp_path / 'osm2pgsql_mock' osm2pgsql_exec.write_text("""#!/bin/sh if [ "$*" = "--version" ]; then >&2 echo "2024-08-09 11:16:23 osm2pgsql version 11.7.2 (11.7.2)" else echo "$@" fi """) osm2pgsql_exec.chmod(0o777) return dict(osm2pgsql=str(osm2pgsql_exec), osm2pgsql_cache=10, osm2pgsql_style='style.file', threads=1, dsn='dbname=' + temp_db, flatnode_file='', tablespaces=dict(slim_data='', slim_index='', main_data='', main_index=''))
1,028
Python
.py
29
28.482759
75
0.61005
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,629
test_check_database.py
osm-search_Nominatim/test/python/tools/test_check_database.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for database integrity checks. """ import pytest from nominatim_db.tools import check_database as chkdb import nominatim_db.version def test_check_database_unknown_db(def_config, monkeypatch): monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=fjgkhughwgh2423gsags') assert chkdb.check_database(def_config) == 1 def test_check_database_fatal_test(def_config, temp_db): assert chkdb.check_database(def_config) == 1 def test_check_connection_good(temp_db_conn, def_config): assert chkdb.check_connection(temp_db_conn, def_config) == chkdb.CheckState.OK def test_check_connection_bad(def_config): badconn = chkdb._BadConnection('Error') assert chkdb.check_connection(badconn, def_config) == chkdb.CheckState.FATAL def test_check_database_version_good(property_table, temp_db_conn, def_config): property_table.set('database_version', str(nominatim_db.version.NOMINATIM_VERSION)) assert chkdb.check_database_version(temp_db_conn, def_config) == chkdb.CheckState.OK def test_check_database_version_bad(property_table, temp_db_conn, def_config): property_table.set('database_version', '3.9.9-9') assert chkdb.check_database_version(temp_db_conn, def_config) == chkdb.CheckState.FATAL def test_check_placex_table_good(table_factory, temp_db_conn, def_config): table_factory('placex') assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.OK def test_check_placex_table_bad(temp_db_conn, def_config): assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.FATAL def test_check_placex_table_size_good(table_factory, temp_db_conn, def_config): table_factory('placex', content=((1, ), (2, ))) assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.OK def test_check_placex_table_size_bad(table_factory, temp_db_conn, def_config): table_factory('placex') assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.FATAL def test_check_tokenizer_missing(temp_db_conn, def_config, tmp_path): def_config.project_dir = tmp_path assert chkdb.check_tokenizer(temp_db_conn, def_config) == chkdb.CheckState.FAIL @pytest.mark.parametrize("check_result,state", [(None, chkdb.CheckState.OK), ("Something wrong", chkdb.CheckState.FAIL)]) def test_check_tokenizer(temp_db_conn, def_config, monkeypatch, check_result, state): class _TestTokenizer: @staticmethod def check_database(_): return check_result monkeypatch.setattr(chkdb.tokenizer_factory, 'get_tokenizer_for_db', lambda *a, **k: _TestTokenizer()) assert chkdb.check_tokenizer(temp_db_conn, def_config) == state def test_check_indexing_good(table_factory, temp_db_conn, def_config): table_factory('placex', 'place_id int, indexed_status smallint', content=((1, 0), (2, 0))) assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.OK def test_check_indexing_bad(table_factory, temp_db_conn, def_config): table_factory('placex', 'place_id int, indexed_status smallint', content=((1, 0), (2, 2))) assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.WARN def test_check_database_indexes_bad(temp_db_conn, def_config): assert chkdb.check_database_indexes(temp_db_conn, def_config) == chkdb.CheckState.FAIL def test_check_database_indexes_valid(temp_db_conn, def_config): assert chkdb.check_database_index_valid(temp_db_conn, def_config) == chkdb.CheckState.OK def test_check_tiger_table_disabled(temp_db_conn, def_config, monkeypatch): monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'no') assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.NOT_APPLICABLE def test_check_tiger_table_enabled(temp_db_cursor, temp_db_conn, def_config, monkeypatch): monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'yes') assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.FAIL temp_db_cursor.execute('CREATE TABLE location_property_tiger (place_id int)') assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.FAIL temp_db_cursor.execute('INSERT INTO location_property_tiger VALUES (1), (2)') assert chkdb.check_tiger_table(temp_db_conn, def_config) == chkdb.CheckState.OK
4,682
Python
.py
76
55.789474
95
0.726138
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,630
test_database_import.py
osm-search_Nominatim/test/python/tools/test_database_import.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for functions to import a new database. """ from pathlib import Path import pytest import pytest_asyncio import psycopg from psycopg import sql as pysql from nominatim_db.tools import database_import from nominatim_db.errors import UsageError class TestDatabaseSetup: DBNAME = 'test_nominatim_python_unittest' @pytest.fixture(autouse=True) def setup_nonexistant_db(self): with psycopg.connect(dbname='postgres', autocommit=True) as conn: with conn.cursor() as cur: cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}') yield True with conn.cursor() as cur: cur.execute(f'DROP DATABASE IF EXISTS {self.DBNAME}') @pytest.fixture def cursor(self): with psycopg.connect(dbname=self.DBNAME) as conn: with conn.cursor() as cur: yield cur def conn(self): return psycopg.connect(dbname=self.DBNAME) def test_setup_skeleton(self): database_import.setup_database_skeleton(f'dbname={self.DBNAME}') # Check that all extensions are set up. with self.conn() as conn: with conn.cursor() as cur: cur.execute('CREATE TABLE t (h HSTORE, geom GEOMETRY(Geometry, 4326))') def test_unsupported_pg_version(self, monkeypatch): monkeypatch.setattr(database_import, 'POSTGRESQL_REQUIRED_VERSION', (100, 4)) with pytest.raises(UsageError, match='PostgreSQL server is too old.'): database_import.setup_database_skeleton(f'dbname={self.DBNAME}') def test_create_db_explicit_ro_user(self): database_import.setup_database_skeleton(f'dbname={self.DBNAME}', rouser='postgres') def test_create_db_missing_ro_user(self): with pytest.raises(UsageError, match='Missing read-only user.'): database_import.setup_database_skeleton(f'dbname={self.DBNAME}', rouser='sdfwkjkjgdugu2;jgsafkljas;') def test_setup_extensions_old_postgis(self, monkeypatch): monkeypatch.setattr(database_import, 'POSTGIS_REQUIRED_VERSION', (50, 50)) with pytest.raises(UsageError, match='PostGIS is too old.'): database_import.setup_database_skeleton(f'dbname={self.DBNAME}') def test_setup_skeleton_already_exists(temp_db): with pytest.raises(UsageError): database_import.setup_database_skeleton(f'dbname={temp_db}') def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd): table_factory('place', content=((1, ), )) database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options) captured = capfd.readouterr() assert '--create' in captured.out assert '--output gazetteer' in captured.out assert f'--style {osm2pgsql_options["osm2pgsql_style"]}' in captured.out assert f'--number-processes {osm2pgsql_options["threads"]}' in captured.out assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out assert 'file.pbf' in captured.out def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, capfd): table_factory('place', content=((1, ), )) osm2pgsql_options['osm2pgsql_cache'] = 0 files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm'] for f in files: f.write_text('test') database_import.import_osm_data(files, osm2pgsql_options) captured = capfd.readouterr() assert 'file1.osm' in captured.out assert 'file2.osm' in captured.out def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options): table_factory('place') with pytest.raises(UsageError, match='No data imported'): database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options) def test_import_osm_data_simple_ignore_no_data(table_factory, osm2pgsql_options): table_factory('place') database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, ignore_errors=True) def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options): table_factory('place', content=((1, ), )) table_factory('planet_osm_nodes') flatfile = tmp_path / 'flatfile' flatfile.write_text('touch') osm2pgsql_options['flatnode_file'] = str(flatfile.resolve()) database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, drop=True) assert not flatfile.exists() assert not temp_db_cursor.table_exists('planet_osm_nodes') def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd): table_factory('place', content=((1, ), )) osm2pgsql_options['osm2pgsql_cache'] = 0 database_import.import_osm_data(Path(__file__), osm2pgsql_options) captured = capfd.readouterr() assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out @pytest.mark.parametrize("with_search", (True, False)) def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory, with_search): tables = ['placex', 'place_addressline', 'location_area', 'location_area_country', 'location_property_tiger', 'location_property_osmline', 'location_postcode', 'location_road_23'] if with_search: tables.append('search_name') for table in tables: table_factory(table, content=((1, ), (2, ), (3, ))) assert temp_db_cursor.table_rows(table) == 3 database_import.truncate_data_tables(temp_db_conn) for table in tables: assert temp_db_cursor.table_rows(table) == 0 @pytest.mark.parametrize("threads", (1, 5)) @pytest.mark.asyncio async def test_load_data(dsn, place_row, placex_table, osmline_table, temp_db_cursor, threads): for func in ('precompute_words', 'getorcreate_housenumber_id', 'make_standard_name'): temp_db_cursor.execute(pysql.SQL("""CREATE FUNCTION {} (src TEXT) RETURNS TEXT AS $$ SELECT 'a'::TEXT $$ LANGUAGE SQL """).format(pysql.Identifier(func))) for oid in range(100, 130): place_row(osm_id=oid) place_row(osm_type='W', osm_id=342, cls='place', typ='houses', geom='SRID=4326;LINESTRING(0 0, 10 10)') await database_import.load_data(dsn, threads) assert temp_db_cursor.table_rows('placex') == 30 assert temp_db_cursor.table_rows('location_property_osmline') == 1 class TestSetupSQL: @pytest.fixture(autouse=True) def init_env(self, temp_db, tmp_path, def_config, sql_preprocessor_cfg): def_config.lib_dir.sql = tmp_path / 'sql' def_config.lib_dir.sql.mkdir() self.config = def_config def write_sql(self, fname, content): (self.config.lib_dir.sql / fname).write_text(content) @pytest.mark.parametrize("reverse", [True, False]) def test_create_tables(self, temp_db_conn, temp_db_cursor, reverse): self.write_sql('tables.sql', """CREATE FUNCTION test() RETURNS bool AS $$ SELECT {{db.reverse_only}} $$ LANGUAGE SQL""") database_import.create_tables(temp_db_conn, self.config, reverse) temp_db_cursor.scalar('SELECT test()') == reverse def test_create_table_triggers(self, temp_db_conn, temp_db_cursor): self.write_sql('table-triggers.sql', """CREATE FUNCTION test() RETURNS TEXT AS $$ SELECT 'a'::text $$ LANGUAGE SQL""") database_import.create_table_triggers(temp_db_conn, self.config) temp_db_cursor.scalar('SELECT test()') == 'a' def test_create_partition_tables(self, temp_db_conn, temp_db_cursor): self.write_sql('partition-tables.src.sql', """CREATE FUNCTION test() RETURNS TEXT AS $$ SELECT 'b'::text $$ LANGUAGE SQL""") database_import.create_partition_tables(temp_db_conn, self.config) temp_db_cursor.scalar('SELECT test()') == 'b' @pytest.mark.parametrize("drop", [True, False]) @pytest.mark.asyncio async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop): self.write_sql('indices.sql', """CREATE FUNCTION test() RETURNS bool AS $$ SELECT {{drop}} $$ LANGUAGE SQL""") await database_import.create_search_indices(temp_db_conn, self.config, drop) temp_db_cursor.scalar('SELECT test()') == drop
8,741
Python
.py
164
44.384146
95
0.658387
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,631
test_postcodes.py
osm-search_Nominatim/test/python/tools/test_postcodes.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for functions to maintain the artificial postcode table. """ import subprocess import pytest from nominatim_db.tools import postcodes from nominatim_db.data import country_info import dummy_tokenizer class MockPostcodeTable: """ A location_postcode table for testing. """ def __init__(self, conn): self.conn = conn with conn.cursor() as cur: cur.execute("""CREATE TABLE location_postcode ( place_id BIGINT, parent_place_id BIGINT, rank_search SMALLINT, rank_address SMALLINT, indexed_status SMALLINT, indexed_date TIMESTAMP, country_code varchar(2), postcode TEXT, geometry GEOMETRY(Geometry, 4326))""") cur.execute("""CREATE OR REPLACE FUNCTION token_normalized_postcode(postcode TEXT) RETURNS TEXT AS $$ BEGIN RETURN postcode; END; $$ LANGUAGE plpgsql; CREATE OR REPLACE FUNCTION get_country_code(place geometry) RETURNS TEXT AS $$ BEGIN RETURN null; END; $$ LANGUAGE plpgsql; """) conn.commit() def add(self, country, postcode, x, y): with self.conn.cursor() as cur: cur.execute("""INSERT INTO location_postcode (place_id, indexed_status, country_code, postcode, geometry) VALUES (nextval('seq_place'), 1, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326))""", (country, postcode, x, y)) self.conn.commit() @property def row_set(self): with self.conn.cursor() as cur: cur.execute("""SELECT country_code, postcode, ST_X(geometry), ST_Y(geometry) FROM location_postcode""") return set((tuple(row) for row in cur)) @pytest.fixture def tokenizer(): return dummy_tokenizer.DummyTokenizer(None, None) @pytest.fixture def postcode_table(def_config, temp_db_conn, placex_table): country_info.setup_country_config(def_config) return MockPostcodeTable(temp_db_conn) @pytest.fixture def insert_implicit_postcode(placex_table, place_row): """ Inserts data into the placex and place table which can then be used to compute one postcode. """ def _insert_implicit_postcode(osm_id, country, geometry, address): placex_table.add(osm_id=osm_id, country=country, geom=geometry) place_row(osm_id=osm_id, geom='SRID=4326;'+geometry, address=address) return _insert_implicit_postcode def test_postcodes_empty(dsn, postcode_table, place_table, tmp_path, tokenizer): postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert not postcode_table.row_set def test_postcodes_add_new(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer): insert_implicit_postcode(1, 'xx', 'POINT(10 12)', dict(postcode='9486')) postcode_table.add('yy', '9486', 99, 34) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('xx', '9486', 10, 12), } def test_postcodes_replace_coordinates(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer): insert_implicit_postcode(1, 'xx', 'POINT(10 12)', dict(postcode='AB 4511')) postcode_table.add('xx', 'AB 4511', 99, 34) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)} def test_postcodes_replace_coordinates_close(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer): insert_implicit_postcode(1, 'xx', 'POINT(10 12)', dict(postcode='AB 4511')) postcode_table.add('xx', 'AB 4511', 10, 11.99999) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('xx', 'AB 4511', 10, 11.99999)} def test_postcodes_remove(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer): insert_implicit_postcode(1, 'xx', 'POINT(10 12)', dict(postcode='AB 4511')) postcode_table.add('xx', 'badname', 10, 12) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)} def test_postcodes_ignore_empty_country(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer): insert_implicit_postcode(1, None, 'POINT(10 12)', dict(postcode='AB 4511')) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert not postcode_table.row_set def test_postcodes_remove_all(dsn, postcode_table, place_table, tmp_path, tokenizer): postcode_table.add('ch', '5613', 10, 12) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert not postcode_table.row_set def test_postcodes_multi_country(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer): insert_implicit_postcode(1, 'de', 'POINT(10 12)', dict(postcode='54451')) insert_implicit_postcode(2, 'cc', 'POINT(100 56)', dict(postcode='DD23 T')) insert_implicit_postcode(3, 'de', 'POINT(10.3 11.0)', dict(postcode='54452')) insert_implicit_postcode(4, 'cc', 'POINT(10.3 11.0)', dict(postcode='54452')) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('de', '54451', 10, 12), ('de', '54452', 10.3, 11.0), ('cc', '54452', 10.3, 11.0), ('cc', 'DD23 T', 100, 56)} @pytest.mark.parametrize("gzipped", [True, False]) def test_postcodes_extern(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer, gzipped): insert_implicit_postcode(1, 'xx', 'POINT(10 12)', dict(postcode='AB 4511')) extfile = tmp_path / 'xx_postcodes.csv' extfile.write_text("postcode,lat,lon\nAB 4511,-4,-1\nCD 4511,-5, -10") if gzipped: subprocess.run(['gzip', str(extfile)]) assert not extfile.is_file() postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12), ('xx', 'CD 4511', -10, -5)} def test_postcodes_extern_bad_column(dsn, postcode_table, tmp_path, insert_implicit_postcode, tokenizer): insert_implicit_postcode(1, 'xx', 'POINT(10 12)', dict(postcode='AB 4511')) extfile = tmp_path / 'xx_postcodes.csv' extfile.write_text("postode,lat,lon\nAB 4511,-4,-1\nCD 4511,-5, -10") postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12)} def test_postcodes_extern_bad_number(dsn, insert_implicit_postcode, postcode_table, tmp_path, tokenizer): insert_implicit_postcode(1, 'xx', 'POINT(10 12)', dict(postcode='AB 4511')) extfile = tmp_path / 'xx_postcodes.csv' extfile.write_text("postcode,lat,lon\nXX 4511,-4,NaN\nCD 4511,-5, -10\n34,200,0") postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('xx', 'AB 4511', 10, 12), ('xx', 'CD 4511', -10, -5)} def test_can_compute(dsn, table_factory): assert not postcodes.can_compute(dsn) table_factory('place') assert postcodes.can_compute(dsn) def test_no_placex_entry(dsn, tmp_path, temp_db_cursor, place_row, postcode_table, tokenizer): #Rewrite the get_country_code function to verify its execution. temp_db_cursor.execute(""" CREATE OR REPLACE FUNCTION get_country_code(place geometry) RETURNS TEXT AS $$ BEGIN RETURN 'yy'; END; $$ LANGUAGE plpgsql; """) place_row(geom='SRID=4326;POINT(10 12)', address=dict(postcode='AB 4511')) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert postcode_table.row_set == {('yy', 'AB 4511', 10, 12)} def test_discard_badly_formatted_postcodes(dsn, tmp_path, temp_db_cursor, place_row, postcode_table, tokenizer): #Rewrite the get_country_code function to verify its execution. temp_db_cursor.execute(""" CREATE OR REPLACE FUNCTION get_country_code(place geometry) RETURNS TEXT AS $$ BEGIN RETURN 'fr'; END; $$ LANGUAGE plpgsql; """) place_row(geom='SRID=4326;POINT(10 12)', address=dict(postcode='AB 4511')) postcodes.update_postcodes(dsn, tmp_path, tokenizer) assert not postcode_table.row_set
9,289
Python
.py
173
42.052023
112
0.606563
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,632
test_import_special_phrases.py
osm-search_Nominatim/test/python/tools/test_import_special_phrases.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for import special phrases methods of the class SPImporter. """ from shutil import copyfile import pytest from nominatim_db.tools.special_phrases.sp_importer import SPImporter from nominatim_db.tools.special_phrases.sp_wiki_loader import SPWikiLoader from nominatim_db.tools.special_phrases.special_phrase import SpecialPhrase from nominatim_db.errors import UsageError from cursor import CursorForTesting @pytest.fixture def sp_importer(temp_db_conn, def_config, monkeypatch): """ Return an instance of SPImporter. """ monkeypatch.setenv('NOMINATIM_LANGUAGES', 'en') loader = SPWikiLoader(def_config) return SPImporter(def_config, temp_db_conn, loader) @pytest.fixture def xml_wiki_content(src_dir): """ return the content of the static xml test file. """ xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt' return xml_test_content.read_text() @pytest.fixture def default_phrases(table_factory): table_factory('place_classtype_testclasstypetable_to_delete') table_factory('place_classtype_testclasstypetable_to_keep') def test_fetch_existing_place_classtype_tables(sp_importer, table_factory): """ Check for the fetch_existing_place_classtype_tables() method. It should return the table just created. """ table_factory('place_classtype_testclasstypetable') sp_importer._fetch_existing_place_classtype_tables() contained_table = sp_importer.table_phrases_to_delete.pop() assert contained_table == 'place_classtype_testclasstypetable' def test_check_sanity_class(sp_importer): """ Check for _check_sanity() method. If a wrong class or type is given, an UsageError should raise. If a good class and type are given, nothing special happens. """ assert not sp_importer._check_sanity(SpecialPhrase('en', '', 'type', '')) assert not sp_importer._check_sanity(SpecialPhrase('en', 'class', '', '')) assert sp_importer._check_sanity(SpecialPhrase('en', 'class', 'type', '')) def test_load_white_and_black_lists(sp_importer): """ Test that _load_white_and_black_lists() well return black list and white list and that they are of dict type. """ black_list, white_list = sp_importer._load_white_and_black_lists() assert isinstance(black_list, dict) and isinstance(white_list, dict) def test_create_place_classtype_indexes(temp_db_with_extensions, temp_db_conn, temp_db_cursor, table_factory, sp_importer): """ Test that _create_place_classtype_indexes() create the place_id index and centroid index on the right place_class_type table. """ phrase_class = 'class' phrase_type = 'type' table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type) table_factory(table_name, 'place_id BIGINT, centroid GEOMETRY') sp_importer._create_place_classtype_indexes('', phrase_class, phrase_type) temp_db_conn.commit() assert check_placeid_and_centroid_indexes(temp_db_cursor, phrase_class, phrase_type) def test_create_place_classtype_table(temp_db_conn, temp_db_cursor, placex_table, sp_importer): """ Test that _create_place_classtype_table() create the right place_classtype table. """ phrase_class = 'class' phrase_type = 'type' sp_importer._create_place_classtype_table('', phrase_class, phrase_type) temp_db_conn.commit() assert check_table_exist(temp_db_cursor, phrase_class, phrase_type) def test_grant_access_to_web_user(temp_db_conn, temp_db_cursor, table_factory, def_config, sp_importer): """ Test that _grant_access_to_webuser() give right access to the web user. """ phrase_class = 'class' phrase_type = 'type' table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type) table_factory(table_name) sp_importer._grant_access_to_webuser(phrase_class, phrase_type) temp_db_conn.commit() assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, phrase_class, phrase_type) def test_create_place_classtype_table_and_indexes( temp_db_cursor, def_config, placex_table, sp_importer, temp_db_conn): """ Test that _create_place_classtype_table_and_indexes() create the right place_classtype tables and place_id indexes and centroid indexes and grant access to the web user for the given set of pairs. """ pairs = set([('class1', 'type1'), ('class2', 'type2')]) sp_importer._create_classtype_table_and_indexes(pairs) temp_db_conn.commit() for pair in pairs: assert check_table_exist(temp_db_cursor, pair[0], pair[1]) assert check_placeid_and_centroid_indexes(temp_db_cursor, pair[0], pair[1]) assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, pair[0], pair[1]) def test_remove_non_existent_tables_from_db(sp_importer, default_phrases, temp_db_conn, temp_db_cursor): """ Check for the remove_non_existent_phrases_from_db() method. It should removed entries from the word table which are contained in the words_phrases_to_delete set and not those also contained in the words_phrases_still_exist set. place_classtype tables contained in table_phrases_to_delete should be deleted. """ sp_importer.table_phrases_to_delete = { 'place_classtype_testclasstypetable_to_delete' } query_tables = """ SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_name like 'place_classtype_%'; """ sp_importer._remove_non_existent_tables_from_db() temp_db_conn.commit() assert temp_db_cursor.row_set(query_tables) \ == {('place_classtype_testclasstypetable_to_keep', )} @pytest.mark.parametrize("should_replace", [(True), (False)]) def test_import_phrases(monkeypatch, temp_db_cursor, def_config, sp_importer, placex_table, table_factory, tokenizer_mock, xml_wiki_content, should_replace): """ Check that the main import_phrases() method is well executed. It should create the place_classtype table, the place_id and centroid indexes, grand access to the web user and executing the SQL functions for amenities. It should also update the database well by deleting or preserving existing entries of the database. """ #Add some data to the database before execution in order to test #what is deleted and what is preserved. table_factory('place_classtype_amenity_animal_shelter') table_factory('place_classtype_wrongclass_wrongtype') monkeypatch.setattr('nominatim_db.tools.special_phrases.sp_wiki_loader._get_wiki_content', lambda lang: xml_wiki_content) tokenizer = tokenizer_mock() sp_importer.import_phrases(tokenizer, should_replace) assert len(tokenizer.analyser_cache['special_phrases']) == 18 class_test = 'aerialway' type_test = 'zip_line' assert check_table_exist(temp_db_cursor, class_test, type_test) assert check_placeid_and_centroid_indexes(temp_db_cursor, class_test, type_test) assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, class_test, type_test) assert check_table_exist(temp_db_cursor, 'amenity', 'animal_shelter') if should_replace: assert not check_table_exist(temp_db_cursor, 'wrong_class', 'wrong_type') assert temp_db_cursor.table_exists('place_classtype_amenity_animal_shelter') if should_replace: assert not temp_db_cursor.table_exists('place_classtype_wrongclass_wrongtype') def check_table_exist(temp_db_cursor, phrase_class, phrase_type): """ Verify that the place_classtype table exists for the given phrase_class and phrase_type. """ return temp_db_cursor.table_exists('place_classtype_{}_{}'.format(phrase_class, phrase_type)) def check_grant_access(temp_db_cursor, user, phrase_class, phrase_type): """ Check that the web user has been granted right access to the place_classtype table of the given phrase_class and phrase_type. """ table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type) temp_db_cursor.execute(""" SELECT * FROM information_schema.role_table_grants WHERE table_name='{}' AND grantee='{}' AND privilege_type='SELECT'""".format(table_name, user)) return temp_db_cursor.fetchone() def check_placeid_and_centroid_indexes(temp_db_cursor, phrase_class, phrase_type): """ Check that the place_id index and centroid index exist for the place_classtype table of the given phrase_class and phrase_type. """ table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type) index_prefix = 'idx_place_classtype_{}_{}_'.format(phrase_class, phrase_type) return ( temp_db_cursor.index_exists(table_name, index_prefix + 'centroid') and temp_db_cursor.index_exists(table_name, index_prefix + 'place_id') )
9,537
Python
.py
198
41.333333
101
0.690668
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,633
test_tiger_data.py
osm-search_Nominatim/test/python/tools/test_tiger_data.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test for tiger data function """ import tarfile from textwrap import dedent import pytest import pytest_asyncio from nominatim_db.db.connection import execute_scalar from nominatim_db.tools import tiger_data, freeze from nominatim_db.errors import UsageError class MockTigerTable: def __init__(self, conn): self.conn = conn with conn.cursor() as cur: cur.execute("""CREATE TABLE tiger (linegeo GEOMETRY, start INTEGER, stop INTEGER, interpol TEXT, token_info JSONB, postcode TEXT)""") # We need this table to determine if the database is frozen or not cur.execute("CREATE TABLE place (number INTEGER)") def count(self): return execute_scalar(self.conn, "SELECT count(*) FROM tiger") def row(self): with self.conn.cursor() as cur: cur.execute("SELECT * FROM tiger LIMIT 1") return cur.fetchone() @pytest.fixture def tiger_table(def_config, temp_db_conn, sql_preprocessor, temp_db_with_extensions, tmp_path): def_config.lib_dir.sql = tmp_path / 'sql' def_config.lib_dir.sql.mkdir() (def_config.lib_dir.sql / 'tiger_import_start.sql').write_text( """CREATE OR REPLACE FUNCTION tiger_line_import(linegeo GEOMETRY, start INTEGER, stop INTEGER, interpol TEXT, token_info JSONB, postcode TEXT) RETURNS INTEGER AS $$ INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode) RETURNING 1 $$ LANGUAGE SQL;""") (def_config.lib_dir.sql / 'tiger_import_finish.sql').write_text( """DROP FUNCTION tiger_line_import (linegeo GEOMETRY, in_startnumber INTEGER, in_endnumber INTEGER, interpolationtype TEXT, token_info JSONB, in_postcode TEXT);""") return MockTigerTable(temp_db_conn) @pytest.fixture def csv_factory(tmp_path): def _mk_file(fname, hnr_from=1, hnr_to=9, interpol='odd', street='Main St', city='Newtown', state='AL', postcode='12345', geometry='LINESTRING(-86.466995 32.428956,-86.466923 32.428933)'): (tmp_path / (fname + '.csv')).write_text(dedent("""\ from;to;interpolation;street;city;state;postcode;geometry {};{};{};{};{};{};{};{} """.format(hnr_from, hnr_to, interpol, street, city, state, postcode, geometry))) return _mk_file @pytest.mark.parametrize("threads", (1, 5)) @pytest.mark.asyncio async def test_add_tiger_data(def_config, src_dir, tiger_table, tokenizer_mock, threads): await tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'), def_config, threads, tokenizer_mock()) assert tiger_table.count() == 6213 @pytest.mark.asyncio async def test_add_tiger_data_database_frozen(def_config, temp_db_conn, tiger_table, tokenizer_mock, tmp_path): freeze.drop_update_tables(temp_db_conn) with pytest.raises(UsageError) as excinfo: await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock()) assert "database frozen" in str(excinfo.value) assert tiger_table.count() == 0 @pytest.mark.asyncio async def test_add_tiger_data_no_files(def_config, tiger_table, tokenizer_mock, tmp_path): await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 0 @pytest.mark.asyncio async def test_add_tiger_data_bad_file(def_config, tiger_table, tokenizer_mock, tmp_path): sqlfile = tmp_path / '1010.csv' sqlfile.write_text("""Random text""") await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 0 @pytest.mark.asyncio async def test_add_tiger_data_hnr_nan(def_config, tiger_table, tokenizer_mock, csv_factory, tmp_path): csv_factory('file1', hnr_from=99) csv_factory('file2', hnr_from='L12') csv_factory('file3', hnr_to='12.4') await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 1 assert tiger_table.row().start == 99 @pytest.mark.parametrize("threads", (1, 5)) @pytest.mark.asyncio async def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock, tmp_path, src_dir, threads): tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz") tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv')) tar.close() await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads, tokenizer_mock()) assert tiger_table.count() == 6213 @pytest.mark.asyncio async def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock, tmp_path): tarfile = tmp_path / 'sample.tar.gz' tarfile.write_text("""Random text""") with pytest.raises(UsageError): await tiger_data.add_tiger_data(str(tarfile), def_config, 1, tokenizer_mock()) @pytest.mark.asyncio async def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock, tmp_path): tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz") tar.add(__file__) tar.close() await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1, tokenizer_mock()) assert tiger_table.count() == 0
6,173
Python
.py
124
38.782258
100
0.604895
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,634
test_refresh.py
osm-search_Nominatim/test/python/tools/test_refresh.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test for various refresh functions. """ from pathlib import Path import pytest from nominatim_db.tools import refresh from nominatim_db.db.connection import postgis_version_tuple def test_refresh_import_wikipedia_not_existing(dsn): assert refresh.import_wikipedia_articles(dsn, Path('.')) == 1 def test_refresh_import_secondary_importance_non_existing(dsn): assert refresh.import_secondary_importance(dsn, Path('.')) == 1 def test_refresh_import_secondary_importance_testdb(dsn, src_dir, temp_db_conn, temp_db_cursor): temp_db_cursor.execute('CREATE EXTENSION postgis') if postgis_version_tuple(temp_db_conn)[0] < 3: assert refresh.import_secondary_importance(dsn, src_dir / 'test' / 'testdb') > 0 else: temp_db_cursor.execute('CREATE EXTENSION postgis_raster') assert refresh.import_secondary_importance(dsn, src_dir / 'test' / 'testdb') == 0 assert temp_db_cursor.table_exists('secondary_importance') @pytest.mark.parametrize("replace", (True, False)) def test_refresh_import_wikipedia(dsn, src_dir, table_factory, temp_db_cursor, replace): if replace: table_factory('wikimedia_importance') # use the small wikipedia file for the API testdb assert refresh.import_wikipedia_articles(dsn, src_dir / 'test' / 'testdb') == 0 assert temp_db_cursor.table_rows('wikipedia_article') > 0 assert temp_db_cursor.table_rows('wikipedia_redirect') > 0 def test_recompute_importance(placex_table, table_factory, temp_db_conn, temp_db_cursor): temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION compute_importance(extratags HSTORE, country_code varchar(2), rank_search SMALLINT, centroid GEOMETRY, OUT importance FLOAT, OUT wikipedia TEXT) AS $$ SELECT 0.1::float, 'foo'::text $$ LANGUAGE SQL""") refresh.recompute_importance(temp_db_conn) @pytest.mark.parametrize('osm_type', ('N', 'W', 'R')) def test_invalidate_osm_object_simple(placex_table, osm_type, temp_db_conn, temp_db_cursor): placex_table.add(osm_type=osm_type, osm_id=57283) refresh.invalidate_osm_object(osm_type, 57283, temp_db_conn, recursive=False) temp_db_conn.commit() assert 2 == temp_db_cursor.scalar("""SELECT indexed_status FROM placex WHERE osm_type = %s and osm_id = %s""", (osm_type, 57283)) def test_invalidate_osm_object_nonexisting_simple(placex_table, temp_db_conn, temp_db_cursor): placex_table.add(osm_type='W', osm_id=57283) refresh.invalidate_osm_object('N', 57283, temp_db_conn, recursive=False) temp_db_conn.commit() assert 0 == temp_db_cursor.scalar("""SELECT count(*) FROM placex WHERE indexed_status > 0""") @pytest.mark.parametrize('osm_type', ('N', 'W', 'R')) def test_invalidate_osm_object_recursive(placex_table, osm_type, temp_db_conn, temp_db_cursor): placex_table.add(osm_type=osm_type, osm_id=57283) temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION place_force_update(placeid BIGINT) RETURNS BOOLEAN AS $$ BEGIN UPDATE placex SET indexed_status = 522 WHERE place_id = placeid; RETURN TRUE; END; $$ LANGUAGE plpgsql;""") refresh.invalidate_osm_object(osm_type, 57283, temp_db_conn) temp_db_conn.commit() assert 522 == temp_db_cursor.scalar("""SELECT indexed_status FROM placex WHERE osm_type = %s and osm_id = %s""", (osm_type, 57283))
4,216
Python
.py
73
44.726027
96
0.608505
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,635
test_sp_csv_loader.py
osm-search_Nominatim/test/python/tools/test_sp_csv_loader.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for methods of the SPCsvLoader class. """ import pytest from nominatim_db.errors import UsageError from nominatim_db.tools.special_phrases.sp_csv_loader import SPCsvLoader from nominatim_db.tools.special_phrases.special_phrase import SpecialPhrase @pytest.fixture def sp_csv_loader(src_dir): """ Return an instance of SPCsvLoader. """ csv_path = (src_dir / 'test' / 'testdata' / 'sp_csv_test.csv').resolve() loader = SPCsvLoader(csv_path) return loader def test_generate_phrases(sp_csv_loader): """ Test method parse_csv() Should return the right SpecialPhrase objects. """ phrases = list(sp_csv_loader.generate_phrases()) assert len(phrases) == 42 assert len(set(phrases)) == 41 assert SpecialPhrase('Billboard', 'advertising', 'billboard', '-') in phrases assert SpecialPhrase('Zip Lines', 'aerialway', 'zip_line', '-') in phrases def test_invalid_cvs_file(): """ Test method check_csv_validity() It should raise an exception when file with a different exception than .csv is given. """ loader = SPCsvLoader('test.wrong') with pytest.raises(UsageError, match='not a csv file'): next(loader.generate_phrases())
1,466
Python
.py
40
32.325
81
0.701482
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,636
test_admin.py
osm-search_Nominatim/test/python/tools/test_admin.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for maintenance and analysis functions. """ import pytest from nominatim_db.errors import UsageError from nominatim_db.tools import admin from nominatim_db.tokenizer import factory from nominatim_db.db.sql_preprocessor import SQLPreprocessor @pytest.fixture(autouse=True) def create_placex_table(project_env, tokenizer_mock, temp_db_cursor, placex_table): """ All tests in this module require the placex table to be set up. """ temp_db_cursor.execute("DROP TYPE IF EXISTS prepare_update_info CASCADE") temp_db_cursor.execute("""CREATE TYPE prepare_update_info AS ( name HSTORE, address HSTORE, rank_address SMALLINT, country_code TEXT, class TEXT, type TEXT, linked_place_id BIGINT )""") temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION placex_indexing_prepare(p placex, OUT result prepare_update_info) AS $$ BEGIN result.address := p.address; result.name := p.name; result.class := p.class; result.type := p.type; result.country_code := p.country_code; result.rank_address := p.rank_address; END; $$ LANGUAGE plpgsql STABLE; """) factory.create_tokenizer(project_env) def test_analyse_indexing_no_objects(project_env): with pytest.raises(UsageError): admin.analyse_indexing(project_env) @pytest.mark.parametrize("oid", ['1234', 'N123a', 'X123']) def test_analyse_indexing_bad_osmid(project_env, oid): with pytest.raises(UsageError): admin.analyse_indexing(project_env, osm_id=oid) def test_analyse_indexing_unknown_osmid(project_env): with pytest.raises(UsageError): admin.analyse_indexing(project_env, osm_id='W12345674') def test_analyse_indexing_with_place_id(project_env, temp_db_cursor): temp_db_cursor.execute("INSERT INTO placex (place_id) VALUES(12345)") admin.analyse_indexing(project_env, place_id=12345) def test_analyse_indexing_with_osm_id(project_env, temp_db_cursor): temp_db_cursor.execute("""INSERT INTO placex (place_id, osm_type, osm_id) VALUES(9988, 'N', 10000)""") admin.analyse_indexing(project_env, osm_id='N10000') class TestAdminCleanDeleted: @pytest.fixture(autouse=True) def setup_polygon_delete(self, project_env, table_factory, place_table, osmline_table, temp_db_cursor, temp_db_conn, def_config, src_dir): """ Set up place_force_delete function and related tables """ self.project_env = project_env self.temp_db_cursor = temp_db_cursor table_factory('import_polygon_delete', """osm_id BIGINT, osm_type CHAR(1), class TEXT NOT NULL, type TEXT NOT NULL""", ((100, 'N', 'boundary', 'administrative'), (145, 'N', 'boundary', 'administrative'), (175, 'R', 'landcover', 'grass'))) temp_db_cursor.execute("""INSERT INTO placex (place_id, osm_id, osm_type, class, type, indexed_date, indexed_status) VALUES(1, 100, 'N', 'boundary', 'administrative', current_date - INTERVAL '1 month', 1), (2, 145, 'N', 'boundary', 'administrative', current_date - INTERVAL '3 month', 1), (3, 175, 'R', 'landcover', 'grass', current_date - INTERVAL '3 months', 1)""") # set up tables and triggers for utils function table_factory('place_to_be_deleted', """osm_id BIGINT, osm_type CHAR(1), class TEXT NOT NULL, type TEXT NOT NULL, deferred BOOLEAN""") table_factory('country_name', 'partition INT') table_factory('import_polygon_error', """osm_id BIGINT, osm_type CHAR(1), class TEXT NOT NULL, type TEXT NOT NULL""") temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION place_delete() RETURNS TRIGGER AS $$ BEGIN RETURN NULL; END; $$ LANGUAGE plpgsql;""") temp_db_cursor.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place FOR EACH ROW EXECUTE PROCEDURE place_delete();""") orig_sql = def_config.lib_dir.sql def_config.lib_dir.sql = src_dir / 'lib-sql' sqlproc = SQLPreprocessor(temp_db_conn, def_config) sqlproc.run_sql_file(temp_db_conn, 'functions/utils.sql') def_config.lib_dir.sql = orig_sql def test_admin_clean_deleted_no_records(self): admin.clean_deleted_relations(self.project_env, age='1 year') assert self.temp_db_cursor.row_set('SELECT osm_id, osm_type, class, type, indexed_status FROM placex') == {(100, 'N', 'boundary', 'administrative', 1), (145, 'N', 'boundary', 'administrative', 1), (175, 'R', 'landcover', 'grass', 1)} assert self.temp_db_cursor.table_rows('import_polygon_delete') == 3 @pytest.mark.parametrize('test_age', ['T week', '1 welk', 'P1E']) def test_admin_clean_deleted_bad_age(self, test_age): with pytest.raises(UsageError): admin.clean_deleted_relations(self.project_env, age = test_age) def test_admin_clean_deleted_partial(self): admin.clean_deleted_relations(self.project_env, age = '2 months') assert self.temp_db_cursor.row_set('SELECT osm_id, osm_type, class, type, indexed_status FROM placex') == {(100, 'N', 'boundary', 'administrative', 1), (145, 'N', 'boundary', 'administrative', 100), (175, 'R', 'landcover', 'grass', 100)} assert self.temp_db_cursor.table_rows('import_polygon_delete') == 1 @pytest.mark.parametrize('test_age', ['1 week', 'P3D', '5 hours']) def test_admin_clean_deleted(self, test_age): admin.clean_deleted_relations(self.project_env, age = test_age) assert self.temp_db_cursor.row_set('SELECT osm_id, osm_type, class, type, indexed_status FROM placex') == {(100, 'N', 'boundary', 'administrative', 100), (145, 'N', 'boundary', 'administrative', 100), (175, 'R', 'landcover', 'grass', 100)} assert self.temp_db_cursor.table_rows('import_polygon_delete') == 0
7,696
Python
.py
124
43.137097
161
0.531432
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,637
test_replication.py
osm-search_Nominatim/test/python/tools/test_replication.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for replication functionality. """ import datetime as dt import time import pytest from osmium.replication.server import OsmosisState import nominatim_db.tools.replication import nominatim_db.db.status as status from nominatim_db.errors import UsageError OSM_NODE_DATA = """\ <osm version="0.6" generator="OpenStreetMap server" copyright="OpenStreetMap and contributors" attribution="http://www.openstreetmap.org/copyright" license="http://opendatacommons.org/licenses/odbl/1-0/"> <node id="100" visible="true" version="1" changeset="2047" timestamp="2006-01-27T22:09:10Z" user="Foo" uid="111" lat="48.7586670" lon="8.1343060"> </node> </osm> """ @pytest.fixture(autouse=True) def setup_status_table(status_table): pass ### init replication def test_init_replication_bad_base_url(monkeypatch, place_row, temp_db_conn): place_row(osm_type='N', osm_id=100) monkeypatch.setattr(status, "get_url", lambda u: OSM_NODE_DATA) with pytest.raises(UsageError, match="Failed to reach replication service"): nominatim_db.tools.replication.init_replication(temp_db_conn, 'https://test.io') def test_init_replication_success(monkeypatch, place_row, temp_db_conn, temp_db_cursor): place_row(osm_type='N', osm_id=100) monkeypatch.setattr(status, "get_url", lambda u: OSM_NODE_DATA) monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer, "timestamp_to_sequence", lambda self, date: 234) nominatim_db.tools.replication.init_replication(temp_db_conn, 'https://test.io') expected_date = dt.datetime.strptime('2006-01-27T19:09:10', status.ISODATE_FORMAT)\ .replace(tzinfo=dt.timezone.utc) assert temp_db_cursor.row_set("SELECT * FROM import_status") \ == {(expected_date, 234, True)} ### checking for updates def test_check_for_updates_empty_status_table(temp_db_conn): assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254 def test_check_for_updates_seq_not_set(temp_db_conn): status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc)) assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 254 def test_check_for_updates_no_state(monkeypatch, temp_db_conn): status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=345) monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer, "get_state_info", lambda self: None) assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == 253 @pytest.mark.parametrize("server_sequence,result", [(344, 2), (345, 2), (346, 0)]) def test_check_for_updates_no_new_data(monkeypatch, temp_db_conn, server_sequence, result): date = dt.datetime.now(dt.timezone.utc) status.set_status(temp_db_conn, date, seq=345) monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer, "get_state_info", lambda self: OsmosisState(server_sequence, date)) assert nominatim_db.tools.replication.check_for_updates(temp_db_conn, 'https://test.io') == result ### updating @pytest.fixture def update_options(tmpdir): return dict(base_url='https://test.io', indexed_only=False, update_interval=3600, import_file=tmpdir / 'foo.osm', max_diff_size=1) def test_update_empty_status_table(dsn): with pytest.raises(UsageError): nominatim_db.tools.replication.update(dsn, {}) def test_update_already_indexed(temp_db_conn, dsn): status.set_status(temp_db_conn, dt.datetime.now(dt.timezone.utc), seq=34, indexed=False) assert nominatim_db.tools.replication.update(dsn, dict(indexed_only=True)) \ == nominatim_db.tools.replication.UpdateState.MORE_PENDING def test_update_no_data_no_sleep(monkeypatch, temp_db_conn, dsn, update_options): date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(days=1) status.set_status(temp_db_conn, date, seq=34) monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer, "apply_diffs", lambda *args, **kwargs: None) sleeptime = [] monkeypatch.setattr(time, 'sleep', sleeptime.append) assert nominatim_db.tools.replication.update(dsn, update_options) \ == nominatim_db.tools.replication.UpdateState.NO_CHANGES assert not sleeptime def test_update_no_data_sleep(monkeypatch, temp_db_conn, dsn, update_options): date = dt.datetime.now(dt.timezone.utc) - dt.timedelta(minutes=30) status.set_status(temp_db_conn, date, seq=34) monkeypatch.setattr(nominatim_db.tools.replication.ReplicationServer, "apply_diffs", lambda *args, **kwargs: None) sleeptime = [] monkeypatch.setattr(time, 'sleep', sleeptime.append) assert nominatim_db.tools.replication.update(dsn, update_options) \ == nominatim_db.tools.replication.UpdateState.NO_CHANGES assert len(sleeptime) == 1 assert sleeptime[0] < 3600 assert sleeptime[0] > 0
5,439
Python
.py
101
46.732673
204
0.698734
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,638
test_exec_utils.py
osm-search_Nominatim/test/python/tools/test_exec_utils.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for tools.exec_utils module. """ from pathlib import Path import subprocess import pytest from nominatim_db.config import Configuration import nominatim_db.tools.exec_utils as exec_utils def test_run_osm2pgsql(osm2pgsql_options): osm2pgsql_options['append'] = False osm2pgsql_options['import_file'] = 'foo.bar' osm2pgsql_options['tablespaces']['slim_data'] = 'extra' exec_utils.run_osm2pgsql(osm2pgsql_options) def test_run_osm2pgsql_disable_jit(osm2pgsql_options): osm2pgsql_options['append'] = True osm2pgsql_options['import_file'] = 'foo.bar' osm2pgsql_options['disable_jit'] = True exec_utils.run_osm2pgsql(osm2pgsql_options)
888
Python
.py
24
34.458333
59
0.76135
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,639
test_add_osm_data.py
osm-search_Nominatim/test/python/tools/test_add_osm_data.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for functions to add additional data to the database. """ from pathlib import Path import pytest from nominatim_db.tools import add_osm_data class CaptureGetUrl: def __init__(self, monkeypatch): self.url = None monkeypatch.setattr(add_osm_data, 'get_url', self) def __call__(self, url): self.url = url return '<xml></xml>' @pytest.fixture(autouse=True) def setup_delete_postprocessing(temp_db_cursor): temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION flush_deleted_places() RETURNS INTEGER AS $$ SELECT 1 $$ LANGUAGE SQL""") def test_import_osm_file_simple(dsn, table_factory, osm2pgsql_options, capfd): assert add_osm_data.add_data_from_file(dsn, Path('change.osm'), osm2pgsql_options) == 0 captured = capfd.readouterr() assert '--append' in captured.out assert '--output gazetteer' in captured.out assert f'--style {osm2pgsql_options["osm2pgsql_style"]}' in captured.out assert f'--number-processes {osm2pgsql_options["threads"]}' in captured.out assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out assert 'change.osm' in captured.out @pytest.mark.parametrize("osm_type", ['node', 'way', 'relation']) @pytest.mark.parametrize("main_api,url", [(True, 'https://www.openstreetmap.org/api'), (False, 'https://overpass-api.de/api/interpreter?')]) def test_import_osm_object_main_api(dsn, osm2pgsql_options, monkeypatch, capfd, osm_type, main_api, url): get_url_mock = CaptureGetUrl(monkeypatch) add_osm_data.add_osm_object(dsn, osm_type, 4536, main_api, osm2pgsql_options) captured = capfd.readouterr() assert get_url_mock.url.startswith(url) assert '--append' in captured.out assert '--output gazetteer' in captured.out assert f'--style {osm2pgsql_options["osm2pgsql_style"]}' in captured.out assert f'--number-processes {osm2pgsql_options["threads"]}' in captured.out assert f'--cache {osm2pgsql_options["osm2pgsql_cache"]}' in captured.out assert captured.out.endswith(' -\n')
2,362
Python
.py
47
44.170213
95
0.688261
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,640
test_refresh_address_levels.py
osm-search_Nominatim/test/python/tools/test_refresh_address_levels.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for function for importing address ranks. """ import json from pathlib import Path import pytest from nominatim_db.tools.refresh import load_address_levels, load_address_levels_from_config def test_load_ranks_def_config(temp_db_conn, temp_db_cursor, def_config): load_address_levels_from_config(temp_db_conn, def_config) assert temp_db_cursor.table_rows('address_levels') > 0 def test_load_ranks_from_project_dir(project_env, temp_db_conn, temp_db_cursor): test_file = project_env.project_dir / 'address-levels.json' test_file.write_text('[{"tags":{"place":{"sea":2}}}]') load_address_levels_from_config(temp_db_conn, project_env) assert temp_db_cursor.table_rows('address_levels') == 1 def test_load_ranks_from_broken_file(project_env, temp_db_conn): test_file = project_env.project_dir / 'address-levels.json' test_file.write_text('[{"tags":"place":{"sea":2}}}]') with pytest.raises(json.decoder.JSONDecodeError): load_address_levels_from_config(temp_db_conn, project_env) def test_load_ranks_country(temp_db_conn, temp_db_cursor): load_address_levels(temp_db_conn, 'levels', [{"tags": {"place": {"village": 14}}}, {"countries": ['de'], "tags": {"place": {"village": 15}}}, {"countries": ['uk', 'us'], "tags": {"place": {"village": 16}}} ]) assert temp_db_cursor.row_set('SELECT * FROM levels') == \ set([(None, 'place', 'village', 14, 14), ('de', 'place', 'village', 15, 15), ('uk', 'place', 'village', 16, 16), ('us', 'place', 'village', 16, 16), ]) def test_load_ranks_default_value(temp_db_conn, temp_db_cursor): load_address_levels(temp_db_conn, 'levels', [{"tags": {"boundary": {"": 28}}}, {"countries": ['hu'], "tags": {"boundary": {"": 29}}} ]) assert temp_db_cursor.row_set('SELECT * FROM levels') == \ set([(None, 'boundary', None, 28, 28), ('hu', 'boundary', None, 29, 29), ]) def test_load_ranks_multiple_keys(temp_db_conn, temp_db_cursor): load_address_levels(temp_db_conn, 'levels', [{"tags": {"place": {"city": 14}, "boundary": {"administrative2" : 4}} }]) assert temp_db_cursor.row_set('SELECT * FROM levels') == \ set([(None, 'place', 'city', 14, 14), (None, 'boundary', 'administrative2', 4, 4), ]) def test_load_ranks_address(temp_db_conn, temp_db_cursor): load_address_levels(temp_db_conn, 'levels', [{"tags": {"place": {"city": 14, "town" : [14, 13]}} }]) assert temp_db_cursor.row_set('SELECT * FROM levels') == \ set([(None, 'place', 'city', 14, 14), (None, 'place', 'town', 14, 13), ])
3,358
Python
.py
68
37.720588
91
0.538862
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,641
test_migration.py
osm-search_Nominatim/test/python/tools/test_migration.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for migration functions """ import pytest from nominatim_db.tools import migration from nominatim_db.errors import UsageError from nominatim_db.db.connection import server_version_tuple import nominatim_db.version class DummyTokenizer: def update_sql_functions(self, config): pass @pytest.fixture def postprocess_mock(monkeypatch): monkeypatch.setattr(migration.refresh, 'create_functions', lambda *args: args) monkeypatch.setattr(migration.tokenizer_factory, 'get_tokenizer_for_db', lambda *args: DummyTokenizer()) def test_no_migration_old_versions(temp_db_with_extensions, def_config, property_table): property_table.set('database_version', '4.2.99-0') with pytest.raises(UsageError, match='Migration not possible'): migration.migrate(def_config, {}) def test_already_at_version(temp_db_with_extensions, def_config, property_table): property_table.set('database_version', str(nominatim_db.version.NOMINATIM_VERSION)) assert migration.migrate(def_config, {}) == 0 def test_run_single_migration(temp_db_with_extensions, def_config, temp_db_cursor, property_table, monkeypatch, postprocess_mock): oldversion = [4, 4, 99, 0] property_table.set('database_version', str(nominatim_db.version.NominatimVersion(*oldversion))) done = {'old': False, 'new': False} def _migration(**_): """ Dummy migration""" done['new'] = True def _old_migration(**_): """ Dummy migration""" done['old'] = True oldversion[1] = 0 monkeypatch.setattr(migration, '_MIGRATION_FUNCTIONS', [(tuple(oldversion), _old_migration), (nominatim_db.version.NOMINATIM_VERSION, _migration)]) assert migration.migrate(def_config, {}) == 0 assert done['new'] assert not done['old'] assert property_table.get('database_version') == str(nominatim_db.version.NOMINATIM_VERSION) ###### Tests for specific migrations # # Each migration should come with two tests: # 1. Test that migration from old to new state works as expected. # 2. Test that the migration can be rerun on the new state without side effects.
2,477
Python
.py
55
38.763636
96
0.691379
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,642
test_refresh_wiki_data.py
osm-search_Nominatim/test/python/tools/test_refresh_wiki_data.py
# SPDX-License-Identifier: GPL-2.0-only # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2022 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for correctly assigning wikipedia pages to places. """ import gzip import csv import pytest from nominatim_db.tools.refresh import import_wikipedia_articles, recompute_importance, create_functions @pytest.fixture def wiki_csv(tmp_path, sql_preprocessor): def _import(data): with gzip.open(tmp_path / 'wikimedia-importance.csv.gz', mode='wt') as fd: writer = csv.DictWriter(fd, fieldnames=['language', 'type', 'title', 'importance', 'wikidata_id'], delimiter='\t', quotechar='|') writer.writeheader() for lang, title, importance, wd in data: writer.writerow({'language': lang, 'type': 'a', 'title': title, 'importance': str(importance), 'wikidata_id' : wd}) return tmp_path return _import @pytest.mark.parametrize('extra', [{'wikipedia:en': 'Test'}, {'wikipedia': 'en:Test'}, {'wikidata': 'Q123'}]) def test_wikipedia(dsn, temp_db_conn, temp_db_cursor, def_config, wiki_csv, placex_table, extra): import_wikipedia_articles(dsn, wiki_csv([('en', 'Test', 0.3, 'Q123')])) create_functions(temp_db_conn, def_config) content = temp_db_cursor.row_set( 'SELECT language, title, importance, wikidata FROM wikimedia_importance') assert content == set([('en', 'Test', 0.3, 'Q123')]) placex_table.add(osm_id=12, extratags=extra) recompute_importance(temp_db_conn) content = temp_db_cursor.row_set('SELECT wikipedia, importance FROM placex') assert content == set([('en:Test', 0.3)]) def test_wikipedia_no_match(dsn, temp_db_conn, temp_db_cursor, def_config, wiki_csv, placex_table): import_wikipedia_articles(dsn, wiki_csv([('de', 'Test', 0.3, 'Q123')])) create_functions(temp_db_conn, def_config) placex_table.add(osm_id=12, extratags={'wikipedia': 'en:Test'}, rank_search=10) recompute_importance(temp_db_conn) content = temp_db_cursor.row_set('SELECT wikipedia, importance FROM placex') assert list(content) == [(None, pytest.approx(0.26667666))]
2,462
Python
.py
48
41.8125
104
0.620675
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,643
test_sp_wiki_loader.py
osm-search_Nominatim/test/python/tools/test_sp_wiki_loader.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for methods of the SPWikiLoader class. """ import pytest from nominatim_db.tools.special_phrases.sp_wiki_loader import SPWikiLoader @pytest.fixture def sp_wiki_loader(src_dir, monkeypatch, def_config): """ Return an instance of SPWikiLoader. """ monkeypatch.setenv('NOMINATIM_LANGUAGES', 'en') loader = SPWikiLoader(def_config) def _mock_wiki_content(lang): xml_test_content = src_dir / 'test' / 'testdata' / 'special_phrases_test_content.txt' return xml_test_content.read_text() monkeypatch.setattr('nominatim_db.tools.special_phrases.sp_wiki_loader._get_wiki_content', _mock_wiki_content) return loader def test_generate_phrases(sp_wiki_loader): """ Test objects returned from the next() method. It should return all SpecialPhrases objects of the 'en' special phrases. """ phrases = list(sp_wiki_loader.generate_phrases()) assert set((p.p_label, p.p_class, p.p_type, p.p_operator) for p in phrases) ==\ {('Zip Line', 'aerialway', 'zip_line', '-'), ('Zip Lines', 'aerialway', 'zip_line', '-'), ('Zip Line in', 'aerialway', 'zip_line', 'in'), ('Zip Lines in', 'aerialway', 'zip_line', 'in'), ('Zip Line near', 'aerialway', 'zip_line', 'near'), ('Animal shelter', 'amenity', 'animal_shelter', '-'), ('Animal shelters', 'amenity', 'animal_shelter', '-'), ('Animal shelter in', 'amenity', 'animal_shelter', 'in'), ('Animal shelters in', 'amenity', 'animal_shelter', 'in'), ('Animal shelter near', 'amenity', 'animal_shelter', 'near'), ('Animal shelters near', 'amenity', 'animal_shelter', 'near'), ('Drinking Water near', 'amenity', 'drinking_water', 'near'), ('Water', 'amenity', 'drinking_water', '-'), ('Water in', 'amenity', 'drinking_water', 'in'), ('Water near', 'amenity', 'drinking_water', 'near'), ('Embassy', 'amenity', 'embassy', '-'), ('Embassys', 'amenity', 'embassy', '-'), ('Embassies', 'amenity', 'embassy', '-')}
2,444
Python
.py
50
39.96
94
0.585253
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,644
test_refresh_create_functions.py
osm-search_Nominatim/test/python/tools/test_refresh_create_functions.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for creating PL/pgSQL functions for Nominatim. """ import pytest from nominatim_db.tools.refresh import create_functions class TestCreateFunctions: @pytest.fixture(autouse=True) def init_env(self, sql_preprocessor, temp_db_conn, def_config, tmp_path): self.conn = temp_db_conn self.config = def_config def_config.lib_dir.sql = tmp_path def write_functions(self, content): sqlfile = self.config.lib_dir.sql / 'functions.sql' sqlfile.write_text(content) def test_create_functions(self, temp_db_cursor): self.write_functions("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER AS $$ BEGIN RETURN 43; END; $$ LANGUAGE plpgsql IMMUTABLE; """) create_functions(self.conn, self.config) assert temp_db_cursor.scalar('SELECT test()') == 43 @pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22))) def test_create_functions_with_template(self, temp_db_cursor, dbg, ret): self.write_functions("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER AS $$ BEGIN {% if debug %} RETURN 43; {% else %} RETURN 22; {% endif %} END; $$ LANGUAGE plpgsql IMMUTABLE; """) create_functions(self.conn, self.config, enable_debug=dbg) assert temp_db_cursor.scalar('SELECT test()') == ret
1,998
Python
.py
45
29.844444
81
0.528078
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,645
test_freeze.py
osm-search_Nominatim/test/python/tools/test_freeze.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for freeze functions (removing unused database parts). """ from nominatim_db.tools import freeze NOMINATIM_RUNTIME_TABLES = [ 'country_name', 'country_osm_grid', 'location_postcode', 'location_property_osmline', 'location_property_tiger', 'placex', 'place_adressline', 'search_name', 'word' ] NOMINATIM_DROP_TABLES = [ 'address_levels', 'location_area', 'location_area_country', 'location_area_large_100', 'location_road_1', 'place', 'planet_osm_nodes', 'planet_osm_rels', 'planet_osm_ways', 'search_name_111', 'wikipedia_article', 'wikipedia_redirect' ] def test_drop_tables(temp_db_conn, temp_db_cursor, table_factory): for table in NOMINATIM_RUNTIME_TABLES + NOMINATIM_DROP_TABLES: table_factory(table) assert not freeze.is_frozen(temp_db_conn) freeze.drop_update_tables(temp_db_conn) for table in NOMINATIM_RUNTIME_TABLES: assert temp_db_cursor.table_exists(table) for table in NOMINATIM_DROP_TABLES: assert not temp_db_cursor.table_exists(table) assert freeze.is_frozen(temp_db_conn) def test_drop_flatnode_file_no_file(): freeze.drop_flatnode_file(None) def test_drop_flatnode_file_file_already_gone(tmp_path): freeze.drop_flatnode_file(tmp_path / 'something.store') def test_drop_flatnode_file_delete(tmp_path): flatfile = tmp_path / 'flatnode.store' flatfile.write_text('Some content') freeze.drop_flatnode_file(flatfile) assert not flatfile.exists()
1,711
Python
.py
44
34.909091
80
0.725787
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,646
test_config_load_module.py
osm-search_Nominatim/test/python/config/test_config_load_module.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test for loading extra Python modules. """ from pathlib import Path import sys import pytest from nominatim_db.config import Configuration @pytest.fixture def test_config(src_dir, tmp_path): """ Create a configuration object with project and config directories in a temporary directory. """ (tmp_path / 'project').mkdir() (tmp_path / 'config').mkdir() conf = Configuration(tmp_path / 'project') conf.config_dir = tmp_path / 'config' return conf def test_load_default_module(test_config): module = test_config.load_plugin_module('version', 'nominatim_db') assert isinstance(module.NOMINATIM_VERSION, tuple) def test_load_default_module_with_hyphen(test_config): module = test_config.load_plugin_module('place-info', 'nominatim_db.data') assert isinstance(module.PlaceInfo, object) def test_load_plugin_module(test_config, tmp_path): (tmp_path / 'project' / 'testpath').mkdir() (tmp_path / 'project' / 'testpath' / 'mymod.py')\ .write_text("def my_test_function():\n return 'gjwitlsSG42TG%'") module = test_config.load_plugin_module('testpath/mymod.py', 'private.something') assert module.my_test_function() == 'gjwitlsSG42TG%' # also test reloading module (tmp_path / 'project' / 'testpath' / 'mymod.py')\ .write_text("def my_test_function():\n return 'hjothjorhj'") module = test_config.load_plugin_module('testpath/mymod.py', 'private.something') assert module.my_test_function() == 'gjwitlsSG42TG%' def test_load_external_library_module(test_config, tmp_path, monkeypatch): MODULE_NAME = 'foogurenqodr4' pythonpath = tmp_path / 'priv-python' pythonpath.mkdir() (pythonpath / MODULE_NAME).mkdir() (pythonpath / MODULE_NAME / '__init__.py').write_text('') (pythonpath / MODULE_NAME / 'tester.py')\ .write_text("def my_test_function():\n return 'gjwitlsSG42TG%'") monkeypatch.syspath_prepend(pythonpath) module = test_config.load_plugin_module(f'{MODULE_NAME}.tester', 'private.something') assert module.my_test_function() == 'gjwitlsSG42TG%' # also test reloading module (pythonpath / MODULE_NAME / 'tester.py')\ .write_text("def my_test_function():\n return 'dfigjreigj'") module = test_config.load_plugin_module(f'{MODULE_NAME}.tester', 'private.something') assert module.my_test_function() == 'gjwitlsSG42TG%' del sys.modules[f'{MODULE_NAME}.tester']
2,669
Python
.py
57
42.385965
89
0.703246
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,647
test_config.py
osm-search_Nominatim/test/python/config/test_config.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test for loading dotenv configuration. """ from pathlib import Path import pytest from nominatim_db.config import Configuration, flatten_config_list from nominatim_db.errors import UsageError @pytest.fixture def make_config(): """ Create a configuration object from the given project directory. """ def _mk_config(project_dir=None): return Configuration(project_dir) return _mk_config @pytest.fixture def make_config_path(tmp_path): """ Create a configuration object with project and config directories in a temporary directory. """ def _mk_config(): (tmp_path / 'project').mkdir() (tmp_path / 'config').mkdir() conf = Configuration(tmp_path / 'project') conf.config_dir = tmp_path / 'config' return conf return _mk_config def test_no_project_dir(make_config): config = make_config() assert config.DATABASE_WEBUSER == 'www-data' @pytest.mark.parametrize("val", ('apache', '"apache"')) def test_prefer_project_setting_over_default(make_config, val, tmp_path): envfile = tmp_path / '.env' envfile.write_text('NOMINATIM_DATABASE_WEBUSER={}\n'.format(val)) config = make_config(tmp_path) assert config.DATABASE_WEBUSER == 'apache' def test_prefer_os_environ_over_project_setting(make_config, monkeypatch, tmp_path): envfile = tmp_path / '.env' envfile.write_text('NOMINATIM_DATABASE_WEBUSER=apache\n') monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'nobody') config = make_config(tmp_path) assert config.DATABASE_WEBUSER == 'nobody' def test_prefer_os_environ_can_unset_project_setting(make_config, monkeypatch, tmp_path): envfile = tmp_path / '.env' envfile.write_text('NOMINATIM_DATABASE_WEBUSER=apache\n') monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', '') config = make_config(tmp_path) assert config.DATABASE_WEBUSER == '' def test_get_os_env_add_defaults(make_config, monkeypatch): config = make_config() monkeypatch.delenv('NOMINATIM_DATABASE_WEBUSER', raising=False) assert config.get_os_env()['NOMINATIM_DATABASE_WEBUSER'] == 'www-data' def test_get_os_env_prefer_os_environ(make_config, monkeypatch): config = make_config() monkeypatch.setenv('NOMINATIM_DATABASE_WEBUSER', 'nobody') assert config.get_os_env()['NOMINATIM_DATABASE_WEBUSER'] == 'nobody' def test_get_libpq_dsn_convert_default(make_config): config = make_config() assert config.get_libpq_dsn() == 'dbname=nominatim' def test_get_libpq_dsn_convert_php(make_config, monkeypatch): config = make_config() monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=gis;password=foo;host=localhost') assert config.get_libpq_dsn() == 'dbname=gis password=foo host=localhost' @pytest.mark.parametrize("val,expect", [('foo bar', "'foo bar'"), ("xy'z", "xy\\'z"), ]) def test_get_libpq_dsn_convert_php_special_chars(make_config, monkeypatch, val, expect): config = make_config() monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'pgsql:dbname=gis;password={}'.format(val)) assert config.get_libpq_dsn() == "dbname=gis password={}".format(expect) def test_get_libpq_dsn_convert_libpq(make_config, monkeypatch): config = make_config() monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'host=localhost dbname=gis password=foo') assert config.get_libpq_dsn() == 'host=localhost dbname=gis password=foo' @pytest.mark.parametrize("value,result", [(x, True) for x in ('1', 'true', 'True', 'yes', 'YES')] + [(x, False) for x in ('0', 'false', 'no', 'NO', 'x')]) def test_get_bool(make_config, monkeypatch, value, result): config = make_config() monkeypatch.setenv('NOMINATIM_FOOBAR', value) assert config.get_bool('FOOBAR') == result def test_get_bool_empty(make_config): config = make_config() assert config.TOKENIZER_CONFIG == '' assert not config.get_bool('TOKENIZER_CONFIG') @pytest.mark.parametrize("value,result", [('0', 0), ('1', 1), ('85762513444', 85762513444)]) def test_get_int_success(make_config, monkeypatch, value, result): config = make_config() monkeypatch.setenv('NOMINATIM_FOOBAR', value) assert config.get_int('FOOBAR') == result @pytest.mark.parametrize("value", ['1b', 'fg', '0x23']) def test_get_int_bad_values(make_config, monkeypatch, value): config = make_config() monkeypatch.setenv('NOMINATIM_FOOBAR', value) with pytest.raises(UsageError): config.get_int('FOOBAR') def test_get_int_empty(make_config): config = make_config() assert config.TOKENIZER_CONFIG == '' with pytest.raises(UsageError): config.get_int('TOKENIZER_CONFIG') @pytest.mark.parametrize("value,outlist", [('sd', ['sd']), ('dd,rr', ['dd', 'rr']), (' a , b ', ['a', 'b'])]) def test_get_str_list_success(make_config, monkeypatch, value, outlist): config = make_config() monkeypatch.setenv('NOMINATIM_MYLIST', value) assert config.get_str_list('MYLIST') == outlist def test_get_str_list_empty(make_config): config = make_config() assert config.get_str_list('LANGUAGES') is None def test_get_path_empty(make_config): config = make_config() assert config.TOKENIZER_CONFIG == '' assert not config.get_path('TOKENIZER_CONFIG') def test_get_path_absolute(make_config, monkeypatch): config = make_config() monkeypatch.setenv('NOMINATIM_FOOBAR', '/dont/care') result = config.get_path('FOOBAR') assert isinstance(result, Path) assert str(result) == '/dont/care' def test_get_path_relative(make_config, monkeypatch, tmp_path): config = make_config(tmp_path) monkeypatch.setenv('NOMINATIM_FOOBAR', 'an/oyster') result = config.get_path('FOOBAR') assert isinstance(result, Path) assert str(result) == str(tmp_path / 'an/oyster') def test_get_import_style_intern(make_config, src_dir, monkeypatch): config = make_config() monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'street') expected = src_dir / 'settings' / 'import-street.lua' assert config.get_import_style_file() == expected def test_get_import_style_extern_relative(make_config_path, monkeypatch): config = make_config_path() (config.project_dir / 'custom.style').write_text('x') monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', 'custom.style') assert str(config.get_import_style_file()) == str(config.project_dir / 'custom.style') def test_get_import_style_extern_absolute(make_config, tmp_path, monkeypatch): config = make_config() cfgfile = tmp_path / 'test.style' cfgfile.write_text('x') monkeypatch.setenv('NOMINATIM_IMPORT_STYLE', str(cfgfile)) assert str(config.get_import_style_file()) == str(cfgfile) def test_load_subconf_from_project_dir(make_config_path): config = make_config_path() testfile = config.project_dir / 'test.yaml' testfile.write_text('cow: muh\ncat: miau\n') testfile = config.config_dir / 'test.yaml' testfile.write_text('cow: miau\ncat: muh\n') rules = config.load_sub_configuration('test.yaml') assert rules == dict(cow='muh', cat='miau') def test_load_subconf_from_settings_dir(make_config_path): config = make_config_path() testfile = config.config_dir / 'test.yaml' testfile.write_text('cow: muh\ncat: miau\n') rules = config.load_sub_configuration('test.yaml') assert rules == dict(cow='muh', cat='miau') def test_load_subconf_empty_env_conf(make_config_path, monkeypatch): monkeypatch.setenv('NOMINATIM_MY_CONFIG', '') config = make_config_path() testfile = config.config_dir / 'test.yaml' testfile.write_text('cow: muh\ncat: miau\n') rules = config.load_sub_configuration('test.yaml', config='MY_CONFIG') assert rules == dict(cow='muh', cat='miau') def test_load_subconf_env_absolute_found(make_config_path, monkeypatch, tmp_path): monkeypatch.setenv('NOMINATIM_MY_CONFIG', str(tmp_path / 'other.yaml')) config = make_config_path() (config.config_dir / 'test.yaml').write_text('cow: muh\ncat: miau\n') (tmp_path / 'other.yaml').write_text('dog: muh\nfrog: miau\n') rules = config.load_sub_configuration('test.yaml', config='MY_CONFIG') assert rules == dict(dog='muh', frog='miau') def test_load_subconf_env_absolute_not_found(make_config_path, monkeypatch, tmp_path): monkeypatch.setenv('NOMINATIM_MY_CONFIG', str(tmp_path / 'other.yaml')) config = make_config_path() (config.config_dir / 'test.yaml').write_text('cow: muh\ncat: miau\n') with pytest.raises(UsageError, match='Config file not found.'): rules = config.load_sub_configuration('test.yaml', config='MY_CONFIG') @pytest.mark.parametrize("location", ['project_dir', 'config_dir']) def test_load_subconf_env_relative_found(make_config_path, monkeypatch, location): monkeypatch.setenv('NOMINATIM_MY_CONFIG', 'other.yaml') config = make_config_path() (config.config_dir / 'test.yaml').write_text('cow: muh\ncat: miau\n') (getattr(config, location) / 'other.yaml').write_text('dog: bark\n') rules = config.load_sub_configuration('test.yaml', config='MY_CONFIG') assert rules == dict(dog='bark') def test_load_subconf_env_relative_not_found(make_config_path, monkeypatch): monkeypatch.setenv('NOMINATIM_MY_CONFIG', 'other.yaml') config = make_config_path() (config.config_dir / 'test.yaml').write_text('cow: muh\ncat: miau\n') with pytest.raises(UsageError, match='Config file not found.'): rules = config.load_sub_configuration('test.yaml', config='MY_CONFIG') def test_load_subconf_json(make_config_path): config = make_config_path() (config.project_dir / 'test.json').write_text('{"cow": "muh", "cat": "miau"}') rules = config.load_sub_configuration('test.json') assert rules == dict(cow='muh', cat='miau') def test_load_subconf_not_found(make_config_path): config = make_config_path() with pytest.raises(UsageError, match='Config file not found.'): config.load_sub_configuration('test.yaml') def test_load_subconf_env_unknown_format(make_config_path): config = make_config_path() (config.project_dir / 'test.xml').write_text('<html></html>') with pytest.raises(UsageError, match='unknown format'): config.load_sub_configuration('test.xml') def test_load_subconf_include_absolute(make_config_path, tmp_path): config = make_config_path() testfile = config.config_dir / 'test.yaml' testfile.write_text(f'base: !include {tmp_path}/inc.yaml\n') (tmp_path / 'inc.yaml').write_text('first: 1\nsecond: 2\n') rules = config.load_sub_configuration('test.yaml') assert rules == dict(base=dict(first=1, second=2)) @pytest.mark.parametrize("location", ['project_dir', 'config_dir']) def test_load_subconf_include_relative(make_config_path, tmp_path, location): config = make_config_path() testfile = config.config_dir / 'test.yaml' testfile.write_text(f'base: !include inc.yaml\n') (getattr(config, location) / 'inc.yaml').write_text('first: 1\nsecond: 2\n') rules = config.load_sub_configuration('test.yaml') assert rules == dict(base=dict(first=1, second=2)) def test_load_subconf_include_bad_format(make_config_path): config = make_config_path() testfile = config.config_dir / 'test.yaml' testfile.write_text(f'base: !include inc.txt\n') (config.config_dir / 'inc.txt').write_text('first: 1\nsecond: 2\n') with pytest.raises(UsageError, match='Cannot handle config file format.'): rules = config.load_sub_configuration('test.yaml') def test_load_subconf_include_not_found(make_config_path): config = make_config_path() testfile = config.config_dir / 'test.yaml' testfile.write_text(f'base: !include inc.txt\n') with pytest.raises(UsageError, match='Config file not found.'): rules = config.load_sub_configuration('test.yaml') def test_load_subconf_include_recursive(make_config_path): config = make_config_path() testfile = config.config_dir / 'test.yaml' testfile.write_text(f'base: !include inc.yaml\n') (config.config_dir / 'inc.yaml').write_text('- !include more.yaml\n- upper\n') (config.config_dir / 'more.yaml').write_text('- the end\n') rules = config.load_sub_configuration('test.yaml') assert rules == dict(base=[['the end'], 'upper']) @pytest.mark.parametrize("content", [[], None]) def test_flatten_config_list_empty(content): assert flatten_config_list(content) == [] @pytest.mark.parametrize("content", [{'foo': 'bar'}, 'hello world', 3]) def test_flatten_config_list_no_list(content): with pytest.raises(UsageError): flatten_config_list(content) def test_flatten_config_list_allready_flat(): assert flatten_config_list([1, 2, 456]) == [1, 2, 456] def test_flatten_config_list_nested(): content = [ 34, [{'first': '1st', 'second': '2nd'}, {}], [[2, 3], [45, [56, 78], 66]], 'end' ] assert flatten_config_list(content) == \ [34, {'first': '1st', 'second': '2nd'}, {}, 2, 3, 45, 56, 78, 66, 'end']
13,650
Python
.py
269
44.687732
90
0.67941
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,648
test_json_writer.py
osm-search_Nominatim/test/python/utils/test_json_writer.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the streaming JSON writer. """ import json import pytest from nominatim_api.utils.json_writer import JsonWriter @pytest.mark.parametrize("inval,outstr", [(None, 'null'), (True, 'true'), (False, 'false'), (23, '23'), (0, '0'), (-1.3, '-1.3'), ('g\nä', '"g\\nä"'), ('"', '"\\\""'), ({}, '{}'), ([], '[]')]) def test_simple_value(inval, outstr): writer = JsonWriter() writer.value(inval) assert writer() == outstr json.loads(writer()) def test_empty_array(): writer = JsonWriter().start_array().end_array() assert writer() == '[]' json.loads(writer()) def test_array_with_single_value(): writer = JsonWriter().start_array().value(None).end_array() assert writer() == '[null]' json.loads(writer()) @pytest.mark.parametrize("invals,outstr", [((1, ), '[1]'), (('a', 'b'), '["a","b"]')]) def test_array_with_data(invals, outstr): writer = JsonWriter() writer.start_array() for ival in invals: writer.value(ival).next() writer.end_array() assert writer() == outstr json.loads(writer()) def test_empty_object(): writer = JsonWriter().start_object().end_object() assert writer() == '{}' json.loads(writer()) def test_object_single_entry(): writer = JsonWriter()\ .start_object()\ .key('something')\ .value(5)\ .end_object() assert writer() == '{"something":5}' json.loads(writer()) def test_object_many_values(): writer = JsonWriter()\ .start_object()\ .keyval('foo', None)\ .keyval('bar', {})\ .keyval('baz', 'b\taz')\ .end_object() assert writer() == '{"foo":null,"bar":{},"baz":"b\\taz"}' json.loads(writer()) def test_object_many_values_without_none(): writer = JsonWriter()\ .start_object()\ .keyval_not_none('foo', 0)\ .keyval_not_none('bar', None)\ .keyval_not_none('baz', '')\ .keyval_not_none('eve', False, transform = lambda v: 'yes' if v else 'no')\ .end_object() assert writer() == '{"foo":0,"baz":"","eve":"no"}' json.loads(writer()) def test_raw_output(): writer = JsonWriter()\ .start_array()\ .raw('{ "nicely": "formatted here" }').next()\ .value(1)\ .end_array() assert writer() == '[{ "nicely": "formatted here" },1]'
3,004
Python
.py
79
27.493671
81
0.498273
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,649
test_centroid.py
osm-search_Nominatim/test/python/utils/test_centroid.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for centroid computation. """ import pytest from nominatim_db.utils.centroid import PointsCentroid def test_empty_set(): c = PointsCentroid() with pytest.raises(ValueError, match='No points'): c.centroid() @pytest.mark.parametrize("centroid", [(0,0), (-1, 3), [0.0000032, 88.4938]]) def test_one_point_centroid(centroid): c = PointsCentroid() c += centroid assert len(c.centroid()) == 2 assert c.centroid() == (pytest.approx(centroid[0]), pytest.approx(centroid[1])) def test_multipoint_centroid(): c = PointsCentroid() c += (20.0, -10.0) assert c.centroid() == (pytest.approx(20.0), pytest.approx(-10.0)) c += (20.2, -9.0) assert c.centroid() == (pytest.approx(20.1), pytest.approx(-9.5)) c += (20.2, -9.0) assert c.centroid() == (pytest.approx(20.13333), pytest.approx(-9.333333)) def test_manypoint_centroid(): c = PointsCentroid() for _ in range(10000): c += (4.564732, -0.000034) assert c.centroid() == (pytest.approx(4.564732), pytest.approx(-0.000034)) @pytest.mark.parametrize("param", ["aa", None, 5, [1, 2, 3], (3, None), ("a", 3.9)]) def test_add_non_tuple(param): c = PointsCentroid() with pytest.raises(ValueError, match='2-element tuples'): c += param
1,500
Python
.py
39
34.564103
84
0.659972
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,650
conftest.py
osm-search_Nominatim/test/python/api/conftest.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Helper fixtures for API call tests. """ import pytest import pytest_asyncio import time import datetime as dt import sqlalchemy as sa import nominatim_api as napi from nominatim_db.db.sql_preprocessor import SQLPreprocessor from nominatim_api.search.query_analyzer_factory import make_query_analyzer from nominatim_db.tools import convert_sqlite import nominatim_api.logging as loglib class APITester: def __init__(self): self.api = napi.NominatimAPI() self.async_to_sync(self.api._async_api.setup_database()) def async_to_sync(self, func): """ Run an asynchronous function until completion using the internal loop of the API. """ return self.api._loop.run_until_complete(func) def add_data(self, table, data): """ Insert data into the given table. """ sql = getattr(self.api._async_api._tables, table).insert() self.async_to_sync(self.exec_async(sql, data)) def add_placex(self, **kw): name = kw.get('name') if isinstance(name, str): name = {'name': name} centroid = kw.get('centroid', (23.0, 34.0)) geometry = kw.get('geometry', 'POINT(%f %f)' % centroid) self.add_data('placex', {'place_id': kw.get('place_id', 1000), 'osm_type': kw.get('osm_type', 'W'), 'osm_id': kw.get('osm_id', 4), 'class_': kw.get('class_', 'highway'), 'type': kw.get('type', 'residential'), 'name': name, 'address': kw.get('address'), 'extratags': kw.get('extratags'), 'parent_place_id': kw.get('parent_place_id'), 'linked_place_id': kw.get('linked_place_id'), 'admin_level': kw.get('admin_level', 15), 'country_code': kw.get('country_code'), 'housenumber': kw.get('housenumber'), 'postcode': kw.get('postcode'), 'wikipedia': kw.get('wikipedia'), 'rank_search': kw.get('rank_search', 30), 'rank_address': kw.get('rank_address', 30), 'importance': kw.get('importance'), 'centroid': 'POINT(%f %f)' % centroid, 'indexed_status': kw.get('indexed_status', 0), 'indexed_date': kw.get('indexed_date', dt.datetime(2022, 12, 7, 14, 14, 46, 0)), 'geometry': geometry}) def add_address_placex(self, object_id, **kw): self.add_placex(**kw) self.add_data('addressline', {'place_id': object_id, 'address_place_id': kw.get('place_id', 1000), 'distance': kw.get('distance', 0.0), 'cached_rank_address': kw.get('rank_address', 30), 'fromarea': kw.get('fromarea', False), 'isaddress': kw.get('isaddress', True)}) def add_osmline(self, **kw): self.add_data('osmline', {'place_id': kw.get('place_id', 10000), 'osm_id': kw.get('osm_id', 4004), 'parent_place_id': kw.get('parent_place_id'), 'indexed_date': kw.get('indexed_date', dt.datetime(2022, 12, 7, 14, 14, 46, 0)), 'startnumber': kw.get('startnumber', 2), 'endnumber': kw.get('endnumber', 6), 'step': kw.get('step', 2), 'address': kw.get('address'), 'postcode': kw.get('postcode'), 'country_code': kw.get('country_code'), 'linegeo': kw.get('geometry', 'LINESTRING(1.1 -0.2, 1.09 -0.22)')}) def add_tiger(self, **kw): self.add_data('tiger', {'place_id': kw.get('place_id', 30000), 'parent_place_id': kw.get('parent_place_id'), 'startnumber': kw.get('startnumber', 2), 'endnumber': kw.get('endnumber', 6), 'step': kw.get('step', 2), 'postcode': kw.get('postcode'), 'linegeo': kw.get('geometry', 'LINESTRING(1.1 -0.2, 1.09 -0.22)')}) def add_postcode(self, **kw): self.add_data('postcode', {'place_id': kw.get('place_id', 1000), 'parent_place_id': kw.get('parent_place_id'), 'country_code': kw.get('country_code'), 'postcode': kw.get('postcode'), 'rank_search': kw.get('rank_search', 20), 'rank_address': kw.get('rank_address', 22), 'indexed_date': kw.get('indexed_date', dt.datetime(2022, 12, 7, 14, 14, 46, 0)), 'geometry': kw.get('geometry', 'POINT(23 34)')}) def add_country(self, country_code, geometry): self.add_data('country_grid', {'country_code': country_code, 'area': 0.1, 'geometry': geometry}) def add_country_name(self, country_code, names, partition=0): self.add_data('country_name', {'country_code': country_code, 'name': names, 'partition': partition}) def add_search_name(self, place_id, **kw): centroid = kw.get('centroid', (23.0, 34.0)) self.add_data('search_name', {'place_id': place_id, 'importance': kw.get('importance', 0.00001), 'search_rank': kw.get('search_rank', 30), 'address_rank': kw.get('address_rank', 30), 'name_vector': kw.get('names', []), 'nameaddress_vector': kw.get('address', []), 'country_code': kw.get('country_code', 'xx'), 'centroid': 'POINT(%f %f)' % centroid}) def add_class_type_table(self, cls, typ): self.async_to_sync( self.exec_async(sa.text(f"""CREATE TABLE place_classtype_{cls}_{typ} AS (SELECT place_id, centroid FROM placex WHERE class = '{cls}' AND type = '{typ}') """))) def add_word_table(self, content): data = [dict(zip(['word_id', 'word_token', 'type', 'word', 'info'], c)) for c in content] async def _do_sql(): async with self.api._async_api.begin() as conn: if 'word' not in conn.t.meta.tables: await make_query_analyzer(conn) word_table = conn.t.meta.tables['word'] await conn.connection.run_sync(word_table.create) if data: await conn.execute(conn.t.meta.tables['word'].insert(), data) self.async_to_sync(_do_sql()) async def exec_async(self, sql, *args, **kwargs): async with self.api._async_api.begin() as conn: return await conn.execute(sql, *args, **kwargs) async def create_tables(self): async with self.api._async_api._engine.begin() as conn: await conn.run_sync(self.api._async_api._tables.meta.create_all) @pytest.fixture def apiobj(temp_db_with_extensions, temp_db_conn, monkeypatch): """ Create an asynchronous SQLAlchemy engine for the test DB. """ monkeypatch.setenv('NOMINATIM_USE_US_TIGER_DATA', 'yes') testapi = APITester() testapi.async_to_sync(testapi.create_tables()) proc = SQLPreprocessor(temp_db_conn, testapi.api.config) proc.run_sql_file(temp_db_conn, 'functions/ranking.sql') loglib.set_log_output('text') yield testapi print(loglib.get_and_disable()) testapi.api.close() @pytest.fixture(params=['postgres_db', 'sqlite_db']) def frontend(request, event_loop, tmp_path): testapis = [] if request.param == 'sqlite_db': db = str(tmp_path / 'test_nominatim_python_unittest.sqlite') def mkapi(apiobj, options={'reverse'}): apiobj.add_data('properties', [{'property': 'tokenizer', 'value': 'icu'}, {'property': 'tokenizer_import_normalisation', 'value': ':: lower();'}, {'property': 'tokenizer_import_transliteration', 'value': "'1' > '/1/'; 'ä' > 'ä '"}, ]) async def _do_sql(): async with apiobj.api._async_api.begin() as conn: if 'word' in conn.t.meta.tables: return await make_query_analyzer(conn) word_table = conn.t.meta.tables['word'] await conn.connection.run_sync(word_table.create) apiobj.async_to_sync(_do_sql()) event_loop.run_until_complete(convert_sqlite.convert(None, db, options)) outapi = napi.NominatimAPI(environ={'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={db}", 'NOMINATIM_USE_US_TIGER_DATA': 'yes'}) testapis.append(outapi) return outapi elif request.param == 'postgres_db': def mkapi(apiobj, options=None): return apiobj.api yield mkapi for api in testapis: api.close() @pytest_asyncio.fixture async def api(temp_db): async with napi.NominatimAPIAsync() as api: yield api
9,984
Python
.py
198
35.373737
110
0.512639
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,651
test_api_connection.py
osm-search_Nominatim/test/python/api/test_api_connection.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for enhanced connection class for API functions. """ from pathlib import Path import pytest import sqlalchemy as sa @pytest.mark.asyncio async def test_run_scalar(api, table_factory): table_factory('foo', definition='that TEXT', content=(('a', ),)) async with api.begin() as conn: assert await conn.scalar(sa.text('SELECT * FROM foo')) == 'a' @pytest.mark.asyncio async def test_run_execute(api, table_factory): table_factory('foo', definition='that TEXT', content=(('a', ),)) async with api.begin() as conn: result = await conn.execute(sa.text('SELECT * FROM foo')) assert result.fetchone()[0] == 'a' @pytest.mark.asyncio async def test_get_property_existing_cached(api, table_factory): table_factory('nominatim_properties', definition='property TEXT, value TEXT', content=(('dbv', '96723'), )) async with api.begin() as conn: assert await conn.get_property('dbv') == '96723' await conn.execute(sa.text('TRUNCATE nominatim_properties')) assert await conn.get_property('dbv') == '96723' @pytest.mark.asyncio async def test_get_property_existing_uncached(api, table_factory): table_factory('nominatim_properties', definition='property TEXT, value TEXT', content=(('dbv', '96723'), )) async with api.begin() as conn: assert await conn.get_property('dbv') == '96723' await conn.execute(sa.text("UPDATE nominatim_properties SET value = '1'")) assert await conn.get_property('dbv', cached=False) == '1' @pytest.mark.asyncio @pytest.mark.parametrize('param', ['foo', 'DB:server_version']) async def test_get_property_missing(api, table_factory, param): table_factory('nominatim_properties', definition='property TEXT, value TEXT') async with api.begin() as conn: with pytest.raises(ValueError): await conn.get_property(param) @pytest.mark.asyncio async def test_get_db_property_existing(api): async with api.begin() as conn: assert await conn.get_db_property('server_version') > 0 @pytest.mark.asyncio async def test_get_db_property_existing(api): async with api.begin() as conn: with pytest.raises(ValueError): await conn.get_db_property('dfkgjd.rijg')
2,551
Python
.py
58
38.12069
82
0.680032
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,652
test_api_polygons_v1.py
osm-search_Nominatim/test/python/api/test_api_polygons_v1.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the deletable v1 API call. """ import json import datetime as dt from pathlib import Path import pytest from fake_adaptor import FakeAdaptor, FakeError, FakeResponse import nominatim_api.v1.server_glue as glue class TestPolygonsEndPoint: @pytest.fixture(autouse=True) def setup_deletable_table(self, temp_db_cursor, table_factory, temp_db_with_extensions): self.now = dt.datetime.now() self.recent = dt.datetime.now() - dt.timedelta(days=3) table_factory('import_polygon_error', definition="""osm_id bigint, osm_type character(1), class text, type text, name hstore, country_code character varying(2), updated timestamp without time zone, errormessage text, prevgeometry geometry(Geometry,4326), newgeometry geometry(Geometry,4326)""", content=[(345, 'N', 'boundary', 'administrative', {'name': 'Foo'}, 'xx', self.recent, 'some text', None, None), (781, 'R', 'landuse', 'wood', None, 'ds', self.now, 'Area reduced by lots', None, None)]) @pytest.mark.asyncio async def test_polygons_simple(self, api): a = FakeAdaptor() resp = await glue.polygons_endpoint(api, a) results = json.loads(resp.output) results.sort(key=lambda r: (r['osm_type'], r['osm_id'])) assert results == [{'osm_type': 'N', 'osm_id': 345, 'class': 'boundary', 'type': 'administrative', 'name': 'Foo', 'country_code': 'xx', 'errormessage': 'some text', 'updated': self.recent.isoformat(sep=' ', timespec='seconds')}, {'osm_type': 'R', 'osm_id': 781, 'class': 'landuse', 'type': 'wood', 'name': None, 'country_code': 'ds', 'errormessage': 'Area reduced by lots', 'updated': self.now.isoformat(sep=' ', timespec='seconds')}] @pytest.mark.asyncio async def test_polygons_days(self, api): a = FakeAdaptor() a.params['days'] = '2' resp = await glue.polygons_endpoint(api, a) results = json.loads(resp.output) assert [r['osm_id'] for r in results] == [781] @pytest.mark.asyncio async def test_polygons_class(self, api): a = FakeAdaptor() a.params['class'] = 'landuse' resp = await glue.polygons_endpoint(api, a) results = json.loads(resp.output) assert [r['osm_id'] for r in results] == [781] @pytest.mark.asyncio async def test_polygons_reduced(self, api): a = FakeAdaptor() a.params['reduced'] = '1' resp = await glue.polygons_endpoint(api, a) results = json.loads(resp.output) assert [r['osm_id'] for r in results] == [781]
3,527
Python
.py
74
32.959459
92
0.519977
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,653
test_results.py
osm-search_Nominatim/test/python/api/test_results.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for result datatype helper functions. """ import struct from binascii import hexlify import pytest import pytest_asyncio import sqlalchemy as sa from nominatim_api import SourceTable, DetailedResult, Point import nominatim_api.results as nresults def mkpoint(x, y): return hexlify(struct.pack("=biidd", 1, 0x20000001, 4326, x, y)).decode('utf-8') class FakeRow: def __init__(self, **kwargs): if 'parent_place_id' not in kwargs: kwargs['parent_place_id'] = None for k, v in kwargs.items(): setattr(self, k, v) self._mapping = kwargs def test_minimal_detailed_result(): res = DetailedResult(SourceTable.PLACEX, ('amenity', 'post_box'), Point(23.1, 0.5)) assert res.lon == 23.1 assert res.lat == 0.5 assert res.calculated_importance() == pytest.approx(0.00001) def test_detailed_result_custom_importance(): res = DetailedResult(SourceTable.PLACEX, ('amenity', 'post_box'), Point(23.1, 0.5), importance=0.4563) assert res.calculated_importance() == 0.4563 @pytest.mark.parametrize('func', (nresults.create_from_placex_row, nresults.create_from_osmline_row, nresults.create_from_tiger_row, nresults.create_from_postcode_row)) def test_create_row_none(func): assert func(None, DetailedResult) is None @pytest.mark.parametrize('func', (nresults.create_from_osmline_row, nresults.create_from_tiger_row)) def test_create_row_with_housenumber(func): row = FakeRow(place_id=2345, osm_type='W', osm_id=111, housenumber=4, address=None, postcode='99900', country_code='xd', centroid=mkpoint(0, 0)) res = func(row, DetailedResult) assert res.housenumber == '4' assert res.extratags is None assert res.category == ('place', 'house') @pytest.mark.parametrize('func', (nresults.create_from_osmline_row, nresults.create_from_tiger_row)) def test_create_row_without_housenumber(func): row = FakeRow(place_id=2345, osm_type='W', osm_id=111, startnumber=1, endnumber=11, step=2, address=None, postcode='99900', country_code='xd', centroid=mkpoint(0, 0)) res = func(row, DetailedResult) assert res.housenumber is None assert res.extratags == {'startnumber': '1', 'endnumber': '11', 'step': '2'} assert res.category == ('place', 'houses')
2,866
Python
.py
65
34.953846
84
0.622438
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,654
fake_adaptor.py
osm-search_Nominatim/test/python/api/fake_adaptor.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Provides dummy implementations of ASGIAdaptor for testing. """ from collections import namedtuple import nominatim_api.v1.server_glue as glue from nominatim_api.v1.format import dispatch as formatting from nominatim_api.config import Configuration class FakeError(BaseException): def __init__(self, msg, status): self.msg = msg self.status = status def __str__(self): return f'{self.status} -- {self.msg}' FakeResponse = namedtuple('FakeResponse', ['status', 'output', 'content_type']) class FakeAdaptor(glue.ASGIAdaptor): def __init__(self, params=None, headers=None, config=None): self.params = params or {} self.headers = headers or {} self._config = config or Configuration(None) def get(self, name, default=None): return self.params.get(name, default) def get_header(self, name, default=None): return self.headers.get(name, default) def error(self, msg, status=400): return FakeError(msg, status) def create_response(self, status, output, num_results): return FakeResponse(status, output, self.content_type) def base_uri(self): return 'http://test' def config(self): return self._config def formatting(self): return formatting
1,512
Python
.py
39
33.538462
79
0.699725
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,655
test_api_search.py
osm-search_Nominatim/test/python/api/test_api_search.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for search API calls. These tests make sure that all Python code is correct and executable. Functional tests can be found in the BDD test suite. """ import json import pytest import sqlalchemy as sa import nominatim_api as napi import nominatim_api.logging as loglib API_OPTIONS = {'search'} @pytest.fixture(autouse=True) def setup_icu_tokenizer(apiobj): """ Setup the properties needed for using the ICU tokenizer. """ apiobj.add_data('properties', [{'property': 'tokenizer', 'value': 'icu'}, {'property': 'tokenizer_import_normalisation', 'value': ':: lower();'}, {'property': 'tokenizer_import_transliteration', 'value': "'1' > '/1/'; 'ä' > 'ä '"}, ]) def test_search_no_content(apiobj, frontend): apiobj.add_word_table([]) api = frontend(apiobj, options=API_OPTIONS) assert api.search('foo') == [] def test_search_simple_word(apiobj, frontend): apiobj.add_word_table([(55, 'test', 'W', 'test', None), (2, 'test', 'w', 'test', None)]) apiobj.add_placex(place_id=444, class_='place', type='village', centroid=(1.3, 0.7)) apiobj.add_search_name(444, names=[2, 55]) api = frontend(apiobj, options=API_OPTIONS) results = api.search('TEST') assert [r.place_id for r in results] == [444] @pytest.mark.parametrize('logtype', ['text', 'html']) def test_search_with_debug(apiobj, frontend, logtype): apiobj.add_word_table([(55, 'test', 'W', 'test', None), (2, 'test', 'w', 'test', None)]) apiobj.add_placex(place_id=444, class_='place', type='village', centroid=(1.3, 0.7)) apiobj.add_search_name(444, names=[2, 55]) api = frontend(apiobj, options=API_OPTIONS) loglib.set_log_output(logtype) results = api.search('TEST') assert loglib.get_and_disable() def test_address_no_content(apiobj, frontend): apiobj.add_word_table([]) api = frontend(apiobj, options=API_OPTIONS) assert api.search_address(amenity='hotel', street='Main St 34', city='Happyville', county='Wideland', state='Praerie', postalcode='55648', country='xx') == [] @pytest.mark.parametrize('atype,address,search', [('street', 26, 26), ('city', 16, 18), ('county', 12, 12), ('state', 8, 8)]) def test_address_simple_places(apiobj, frontend, atype, address, search): apiobj.add_word_table([(55, 'test', 'W', 'test', None), (2, 'test', 'w', 'test', None)]) apiobj.add_placex(place_id=444, rank_address=address, rank_search=search, centroid=(1.3, 0.7)) apiobj.add_search_name(444, names=[2, 55], address_rank=address, search_rank=search) api = frontend(apiobj, options=API_OPTIONS) results = api.search_address(**{atype: 'TEST'}) assert [r.place_id for r in results] == [444] def test_address_country(apiobj, frontend): apiobj.add_word_table([(None, 'ro', 'C', 'ro', None)]) apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country_name('ro', {'name': 'România'}) api = frontend(apiobj, options=API_OPTIONS) assert len(api.search_address(country='ro')) == 1 def test_category_no_categories(apiobj, frontend): apiobj.add_word_table([]) api = frontend(apiobj, options=API_OPTIONS) assert api.search_category([], near_query='Berlin') == [] def test_category_no_content(apiobj, frontend): apiobj.add_word_table([]) api = frontend(apiobj, options=API_OPTIONS) assert api.search_category([('amenity', 'restaurant')]) == [] def test_category_simple_restaurant(apiobj, frontend): apiobj.add_word_table([]) apiobj.add_placex(place_id=444, class_='amenity', type='restaurant', centroid=(1.3, 0.7)) apiobj.add_search_name(444, names=[2, 55], address_rank=16, search_rank=18) api = frontend(apiobj, options=API_OPTIONS) results = api.search_category([('amenity', 'restaurant')], near=(1.3, 0.701), near_radius=0.015) assert [r.place_id for r in results] == [444] def test_category_with_search_phrase(apiobj, frontend): apiobj.add_word_table([(55, 'test', 'W', 'test', None), (2, 'test', 'w', 'test', None)]) apiobj.add_placex(place_id=444, class_='place', type='village', rank_address=16, rank_search=18, centroid=(1.3, 0.7)) apiobj.add_search_name(444, names=[2, 55], address_rank=16, search_rank=18) apiobj.add_placex(place_id=95, class_='amenity', type='restaurant', centroid=(1.3, 0.7003)) api = frontend(apiobj, options=API_OPTIONS) results = api.search_category([('amenity', 'restaurant')], near_query='TEST') assert [r.place_id for r in results] == [95]
5,436
Python
.py
109
39.816514
106
0.588291
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,656
test_warm.py
osm-search_Nominatim/test/python/api/test_warm.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for warm-up CLI function. """ import pytest import nominatim_db.cli @pytest.fixture(autouse=True) def setup_database_with_context(apiobj, table_factory): table_factory('word', definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB', content=[(55, 'test', 'W', 'test', None), (2, 'test', 'w', 'test', None)]) apiobj.add_data('properties', [{'property': 'tokenizer', 'value': 'icu'}, {'property': 'tokenizer_import_normalisation', 'value': ':: lower();'}, {'property': 'tokenizer_import_transliteration', 'value': "'1' > '/1/'; 'ä' > 'ä '"}, ]) @pytest.mark.parametrize('args', [['--search-only'], ['--reverse-only']]) def test_warm_all(tmp_path, args): assert 0 == nominatim_db.cli.nominatim(osm2pgsql_path='OSM2PGSQL NOT AVAILABLE', cli_args=['admin', '--project-dir', str(tmp_path), '--warm'] + args)
1,303
Python
.py
27
37.62963
106
0.555556
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,657
test_api_deletable_v1.py
osm-search_Nominatim/test/python/api/test_api_deletable_v1.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the deletable v1 API call. """ import json from pathlib import Path import pytest from fake_adaptor import FakeAdaptor, FakeError, FakeResponse import nominatim_api.v1.server_glue as glue class TestDeletableEndPoint: @pytest.fixture(autouse=True) def setup_deletable_table(self, temp_db_cursor, table_factory, temp_db_with_extensions): table_factory('import_polygon_delete', definition='osm_id bigint, osm_type char(1), class text, type text', content=[(345, 'N', 'boundary', 'administrative'), (781, 'R', 'landuse', 'wood'), (781, 'R', 'landcover', 'grass')]) table_factory('placex', definition="""place_id bigint, osm_id bigint, osm_type char(1), class text, type text, name HSTORE, country_code char(2)""", content=[(1, 345, 'N', 'boundary', 'administrative', {'old_name': 'Former'}, 'ab'), (2, 781, 'R', 'landuse', 'wood', {'name': 'Wood'}, 'cd'), (3, 781, 'R', 'landcover', 'grass', None, 'cd')]) @pytest.mark.asyncio async def test_deletable(self, api): a = FakeAdaptor() resp = await glue.deletable_endpoint(api, a) results = json.loads(resp.output) results.sort(key=lambda r: r['place_id']) assert results == [{'place_id': 1, 'country_code': 'ab', 'name': None, 'osm_id': 345, 'osm_type': 'N', 'class': 'boundary', 'type': 'administrative'}, {'place_id': 2, 'country_code': 'cd', 'name': 'Wood', 'osm_id': 781, 'osm_type': 'R', 'class': 'landuse', 'type': 'wood'}, {'place_id': 3, 'country_code': 'cd', 'name': None, 'osm_id': 781, 'osm_type': 'R', 'class': 'landcover', 'type': 'grass'}]
2,275
Python
.py
43
39.046512
105
0.520937
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,658
test_api_details.py
osm-search_Nominatim/test/python/api/test_api_details.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for details API call. """ import datetime as dt import pytest import nominatim_api as napi @pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4), napi.OsmID('W', 4, 'highway'))) def test_lookup_in_placex(apiobj, frontend, idobj): import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name={'name': 'Road'}, address={'city': 'Barrow'}, extratags={'surface': 'paved'}, parent_place_id=34, linked_place_id=55, admin_level=15, country_code='gb', housenumber='4', postcode='34425', wikipedia='en:Faa', rank_search=27, rank_address=26, importance=0.01, centroid=(23, 34), indexed_date=import_date, geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)') api = frontend(apiobj, options={'details'}) result = api.details(idobj) assert result is not None assert result.source_table.name == 'PLACEX' assert result.category == ('highway', 'residential') assert result.centroid == (pytest.approx(23.0), pytest.approx(34.0)) assert result.place_id == 332 assert result.parent_place_id == 34 assert result.linked_place_id == 55 assert result.osm_object == ('W', 4) assert result.admin_level == 15 assert result.names == {'name': 'Road'} assert result.address == {'city': 'Barrow'} assert result.extratags == {'surface': 'paved'} assert result.housenumber == '4' assert result.postcode == '34425' assert result.wikipedia == 'en:Faa' assert result.rank_search == 27 assert result.rank_address == 26 assert result.importance == pytest.approx(0.01) assert result.country_code == 'gb' assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc) assert result.address_rows is None assert result.linked_rows is None assert result.parented_rows is None assert result.name_keywords is None assert result.address_keywords is None assert result.geometry == {'type': 'ST_LineString'} def test_lookup_in_placex_minimal_info(apiobj, frontend): import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', admin_level=15, rank_search=27, rank_address=26, centroid=(23, 34), indexed_date=import_date, geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)') api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(332)) assert result is not None assert result.source_table.name == 'PLACEX' assert result.category == ('highway', 'residential') assert result.centroid == (pytest.approx(23.0), pytest.approx(34.0)) assert result.place_id == 332 assert result.parent_place_id is None assert result.linked_place_id is None assert result.osm_object == ('W', 4) assert result.admin_level == 15 assert result.names is None assert result.address is None assert result.extratags is None assert result.housenumber is None assert result.postcode is None assert result.wikipedia is None assert result.rank_search == 27 assert result.rank_address == 26 assert result.importance is None assert result.country_code is None assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc) assert result.address_rows is None assert result.linked_rows is None assert result.parented_rows is None assert result.name_keywords is None assert result.address_keywords is None assert result.geometry == {'type': 'ST_LineString'} def test_lookup_in_placex_with_geometry(apiobj, frontend): apiobj.add_placex(place_id=332, geometry='LINESTRING(23 34, 23.1 34)') api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON) assert result.geometry == {'geojson': '{"type":"LineString","coordinates":[[23,34],[23.1,34]]}'} def test_lookup_placex_with_address_details(apiobj, frontend): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='pl', rank_search=27, rank_address=26) apiobj.add_address_placex(332, fromarea=False, isaddress=False, distance=0.0034, place_id=1000, osm_type='N', osm_id=3333, class_='place', type='suburb', name='Smallplace', country_code='pl', admin_level=13, rank_search=24, rank_address=23) apiobj.add_address_placex(332, fromarea=True, isaddress=True, place_id=1001, osm_type='N', osm_id=3334, class_='place', type='city', name='Bigplace', country_code='pl', rank_search=17, rank_address=16) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(332), address_details=True) assert result.address_rows == [ napi.AddressLine(place_id=332, osm_object=('W', 4), category=('highway', 'residential'), names={'name': 'Street'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=26, distance=0.0, local_name='Street'), napi.AddressLine(place_id=1000, osm_object=('N', 3333), category=('place', 'suburb'), names={'name': 'Smallplace'}, extratags={}, admin_level=13, fromarea=False, isaddress=True, rank_address=23, distance=0.0034, local_name='Smallplace'), napi.AddressLine(place_id=1001, osm_object=('N', 3334), category=('place', 'city'), names={'name': 'Bigplace'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=16, distance=0.0, local_name='Bigplace'), napi.AddressLine(place_id=None, osm_object=None, category=('place', 'country_code'), names={'ref': 'pl'}, extratags={}, admin_level=None, fromarea=True, isaddress=False, rank_address=4, distance=0.0) ] def test_lookup_place_with_linked_places_none_existing(apiobj, frontend): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='pl', linked_place_id=45, rank_search=27, rank_address=26) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(332), linked_places=True) assert result.linked_rows == [] def test_lookup_place_with_linked_places_existing(apiobj, frontend): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='pl', linked_place_id=45, rank_search=27, rank_address=26) apiobj.add_placex(place_id=1001, osm_type='W', osm_id=5, class_='highway', type='residential', name='Street', country_code='pl', linked_place_id=332, rank_search=27, rank_address=26) apiobj.add_placex(place_id=1002, osm_type='W', osm_id=6, class_='highway', type='residential', name='Street', country_code='pl', linked_place_id=332, rank_search=27, rank_address=26) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(332), linked_places=True) assert result.linked_rows == [ napi.AddressLine(place_id=1001, osm_object=('W', 5), category=('highway', 'residential'), names={'name': 'Street'}, extratags={}, admin_level=15, fromarea=False, isaddress=True, rank_address=26, distance=0.0), napi.AddressLine(place_id=1002, osm_object=('W', 6), category=('highway', 'residential'), names={'name': 'Street'}, extratags={}, admin_level=15, fromarea=False, isaddress=True, rank_address=26, distance=0.0), ] def test_lookup_place_with_parented_places_not_existing(apiobj, frontend): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='pl', parent_place_id=45, rank_search=27, rank_address=26) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(332), parented_places=True) assert result.parented_rows == [] def test_lookup_place_with_parented_places_existing(apiobj, frontend): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='pl', parent_place_id=45, rank_search=27, rank_address=26) apiobj.add_placex(place_id=1001, osm_type='N', osm_id=5, class_='place', type='house', housenumber='23', country_code='pl', parent_place_id=332, rank_search=30, rank_address=30) apiobj.add_placex(place_id=1002, osm_type='W', osm_id=6, class_='highway', type='residential', name='Street', country_code='pl', parent_place_id=332, rank_search=27, rank_address=26) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(332), parented_places=True) assert result.parented_rows == [ napi.AddressLine(place_id=1001, osm_object=('N', 5), category=('place', 'house'), names={'housenumber': '23'}, extratags={}, admin_level=15, fromarea=False, isaddress=True, rank_address=30, distance=0.0), ] @pytest.mark.parametrize('idobj', (napi.PlaceID(4924), napi.OsmID('W', 9928))) def test_lookup_in_osmline(apiobj, frontend, idobj): import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) apiobj.add_osmline(place_id=4924, osm_id=9928, parent_place_id=12, startnumber=1, endnumber=4, step=1, country_code='gb', postcode='34425', address={'city': 'Big'}, indexed_date=import_date, geometry='LINESTRING(23 34, 23 35)') api = frontend(apiobj, options={'details'}) result = api.details(idobj) assert result is not None assert result.source_table.name == 'OSMLINE' assert result.category == ('place', 'houses') assert result.centroid == (pytest.approx(23.0), pytest.approx(34.5)) assert result.place_id == 4924 assert result.parent_place_id == 12 assert result.linked_place_id is None assert result.osm_object == ('W', 9928) assert result.admin_level == 15 assert result.names is None assert result.address == {'city': 'Big'} assert result.extratags == {'startnumber': '1', 'endnumber': '4', 'step': '1'} assert result.housenumber is None assert result.postcode == '34425' assert result.wikipedia is None assert result.rank_search == 30 assert result.rank_address == 30 assert result.importance is None assert result.country_code == 'gb' assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc) assert result.address_rows is None assert result.linked_rows is None assert result.parented_rows is None assert result.name_keywords is None assert result.address_keywords is None assert result.geometry == {'type': 'ST_LineString'} def test_lookup_in_osmline_split_interpolation(apiobj, frontend): apiobj.add_osmline(place_id=1000, osm_id=9, startnumber=2, endnumber=4, step=1) apiobj.add_osmline(place_id=1001, osm_id=9, startnumber=6, endnumber=9, step=1) apiobj.add_osmline(place_id=1002, osm_id=9, startnumber=11, endnumber=20, step=1) api = frontend(apiobj, options={'details'}) for i in range(1, 6): result = api.details(napi.OsmID('W', 9, str(i))) assert result.place_id == 1000 for i in range(7, 11): result = api.details(napi.OsmID('W', 9, str(i))) assert result.place_id == 1001 for i in range(12, 22): result = api.details(napi.OsmID('W', 9, str(i))) assert result.place_id == 1002 def test_lookup_osmline_with_address_details(apiobj, frontend): apiobj.add_osmline(place_id=9000, osm_id=9, startnumber=2, endnumber=4, step=1, parent_place_id=332) apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='pl', rank_search=27, rank_address=26) apiobj.add_address_placex(332, fromarea=False, isaddress=False, distance=0.0034, place_id=1000, osm_type='N', osm_id=3333, class_='place', type='suburb', name='Smallplace', country_code='pl', admin_level=13, rank_search=24, rank_address=23) apiobj.add_address_placex(332, fromarea=True, isaddress=True, place_id=1001, osm_type='N', osm_id=3334, class_='place', type='city', name='Bigplace', country_code='pl', rank_search=17, rank_address=16) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(9000), address_details=True) assert result.address_rows == [ napi.AddressLine(place_id=332, osm_object=('W', 4), category=('highway', 'residential'), names={'name': 'Street'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=26, distance=0.0, local_name='Street'), napi.AddressLine(place_id=1000, osm_object=('N', 3333), category=('place', 'suburb'), names={'name': 'Smallplace'}, extratags={}, admin_level=13, fromarea=False, isaddress=True, rank_address=23, distance=0.0034, local_name='Smallplace'), napi.AddressLine(place_id=1001, osm_object=('N', 3334), category=('place', 'city'), names={'name': 'Bigplace'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=16, distance=0.0, local_name='Bigplace'), napi.AddressLine(place_id=None, osm_object=None, category=('place', 'country_code'), names={'ref': 'pl'}, extratags={}, admin_level=None, fromarea=True, isaddress=False, rank_address=4, distance=0.0) ] def test_lookup_in_tiger(apiobj, frontend): apiobj.add_tiger(place_id=4924, parent_place_id=12, startnumber=1, endnumber=4, step=1, postcode='34425', geometry='LINESTRING(23 34, 23 35)') apiobj.add_placex(place_id=12, category=('highway', 'residential'), osm_type='W', osm_id=6601223, geometry='LINESTRING(23 34, 23 35)') api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(4924)) assert result is not None assert result.source_table.name == 'TIGER' assert result.category == ('place', 'houses') assert result.centroid == (pytest.approx(23.0), pytest.approx(34.5)) assert result.place_id == 4924 assert result.parent_place_id == 12 assert result.linked_place_id is None assert result.osm_object == ('W', 6601223) assert result.admin_level == 15 assert result.names is None assert result.address is None assert result.extratags == {'startnumber': '1', 'endnumber': '4', 'step': '1'} assert result.housenumber is None assert result.postcode == '34425' assert result.wikipedia is None assert result.rank_search == 30 assert result.rank_address == 30 assert result.importance is None assert result.country_code == 'us' assert result.indexed_date is None assert result.address_rows is None assert result.linked_rows is None assert result.parented_rows is None assert result.name_keywords is None assert result.address_keywords is None assert result.geometry == {'type': 'ST_LineString'} def test_lookup_tiger_with_address_details(apiobj, frontend): apiobj.add_tiger(place_id=9000, startnumber=2, endnumber=4, step=1, parent_place_id=332) apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='us', rank_search=27, rank_address=26) apiobj.add_address_placex(332, fromarea=False, isaddress=False, distance=0.0034, place_id=1000, osm_type='N', osm_id=3333, class_='place', type='suburb', name='Smallplace', country_code='us', admin_level=13, rank_search=24, rank_address=23) apiobj.add_address_placex(332, fromarea=True, isaddress=True, place_id=1001, osm_type='N', osm_id=3334, class_='place', type='city', name='Bigplace', country_code='us', rank_search=17, rank_address=16) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(9000), address_details=True) assert result.address_rows == [ napi.AddressLine(place_id=332, osm_object=('W', 4), category=('highway', 'residential'), names={'name': 'Street'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=26, distance=0.0, local_name='Street'), napi.AddressLine(place_id=1000, osm_object=('N', 3333), category=('place', 'suburb'), names={'name': 'Smallplace'}, extratags={}, admin_level=13, fromarea=False, isaddress=True, rank_address=23, distance=0.0034, local_name='Smallplace'), napi.AddressLine(place_id=1001, osm_object=('N', 3334), category=('place', 'city'), names={'name': 'Bigplace'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=16, distance=0.0, local_name='Bigplace'), napi.AddressLine(place_id=None, osm_object=None, category=('place', 'country_code'), names={'ref': 'us'}, extratags={}, admin_level=None, fromarea=True, isaddress=False, rank_address=4, distance=0.0) ] def test_lookup_in_postcode(apiobj, frontend): import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) apiobj.add_postcode(place_id=554, parent_place_id=152, postcode='34 425', country_code='gb', rank_search=20, rank_address=22, indexed_date=import_date, geometry='POINT(-9.45 5.6)') api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(554)) assert result is not None assert result.source_table.name == 'POSTCODE' assert result.category == ('place', 'postcode') assert result.centroid == (pytest.approx(-9.45), pytest.approx(5.6)) assert result.place_id == 554 assert result.parent_place_id == 152 assert result.linked_place_id is None assert result.osm_object is None assert result.admin_level == 15 assert result.names == {'ref': '34 425'} assert result.address is None assert result.extratags is None assert result.housenumber is None assert result.postcode is None assert result.wikipedia is None assert result.rank_search == 20 assert result.rank_address == 22 assert result.importance is None assert result.country_code == 'gb' assert result.indexed_date == import_date.replace(tzinfo=dt.timezone.utc) assert result.address_rows is None assert result.linked_rows is None assert result.parented_rows is None assert result.name_keywords is None assert result.address_keywords is None assert result.geometry == {'type': 'ST_Point'} def test_lookup_postcode_with_address_details(apiobj, frontend): apiobj.add_postcode(place_id=9000, parent_place_id=332, postcode='34 425', country_code='gb', rank_search=25, rank_address=25) apiobj.add_placex(place_id=332, osm_type='N', osm_id=3333, class_='place', type='suburb', name='Smallplace', country_code='gb', admin_level=13, rank_search=24, rank_address=23) apiobj.add_address_placex(332, fromarea=True, isaddress=True, place_id=1001, osm_type='N', osm_id=3334, class_='place', type='city', name='Bigplace', country_code='gb', rank_search=17, rank_address=16) api = frontend(apiobj, options={'details'}) result = api.details(napi.PlaceID(9000), address_details=True) assert result.address_rows == [ napi.AddressLine(place_id=9000, osm_object=None, category=('place', 'postcode'), names={'ref': '34 425'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=25, distance=0.0, local_name='34 425'), napi.AddressLine(place_id=332, osm_object=('N', 3333), category=('place', 'suburb'), names={'name': 'Smallplace'}, extratags={}, admin_level=13, fromarea=True, isaddress=True, rank_address=23, distance=0.0, local_name='Smallplace'), napi.AddressLine(place_id=1001, osm_object=('N', 3334), category=('place', 'city'), names={'name': 'Bigplace'}, extratags={}, admin_level=15, fromarea=True, isaddress=True, rank_address=16, distance=0.0, local_name='Bigplace'), napi.AddressLine(place_id=None, osm_object=None, category=('place', 'country_code'), names={'ref': 'gb'}, extratags={}, admin_level=None, fromarea=True, isaddress=False, rank_address=4, distance=0.0) ] @pytest.mark.parametrize('objid', [napi.PlaceID(1736), napi.OsmID('W', 55), napi.OsmID('N', 55, 'amenity')]) def test_lookup_missing_object(apiobj, frontend, objid): apiobj.add_placex(place_id=1, osm_type='N', osm_id=55, class_='place', type='suburb') api = frontend(apiobj, options={'details'}) assert api.details(objid) is None @pytest.mark.parametrize('gtype', (napi.GeometryFormat.KML, napi.GeometryFormat.SVG, napi.GeometryFormat.TEXT)) def test_lookup_unsupported_geometry(apiobj, frontend, gtype): apiobj.add_placex(place_id=332) api = frontend(apiobj, options={'details'}) with pytest.raises(ValueError): api.details(napi.PlaceID(332), geometry_output=gtype)
26,488
Python
.py
488
38.64959
100
0.547133
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,659
test_helpers_v1.py
osm-search_Nominatim/test/python/api/test_helpers_v1.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the helper functions for v1 API. """ import pytest import nominatim_api.v1.helpers as helper @pytest.mark.parametrize('inp', ['', 'abc', '12 23', 'abc -78.90, 12.456 def', '40 N 60 W']) def test_extract_coords_no_coords(inp): query, x, y = helper.extract_coords_from_query(inp) assert query == inp assert x is None assert y is None def test_extract_coords_null_island(): assert ('', 0.0, 0.0) == helper.extract_coords_from_query('0.0 -0.0') def test_extract_coords_with_text_before(): assert ('abc', 12.456, -78.90) == helper.extract_coords_from_query('abc -78.90, 12.456') def test_extract_coords_with_text_after(): assert ('abc', 12.456, -78.90) == helper.extract_coords_from_query('-78.90, 12.456 abc') @pytest.mark.parametrize('inp', [' [12.456,-78.90] ', ' 12.456,-78.90 ']) def test_extract_coords_with_spaces(inp): assert ('', -78.90, 12.456) == helper.extract_coords_from_query(inp) @pytest.mark.parametrize('inp', ['40 26.767 N 79 58.933 W', '40° 26.767′ N 79° 58.933′ W', "40° 26.767' N 79° 58.933' W", "40° 26.767'\n" " N 79° 58.933' W", 'N 40 26.767, W 79 58.933', 'N 40°26.767′, W 79°58.933′', ' N 40°26.767′, W 79°58.933′', "N 40°26.767', W 79°58.933'", '40 26 46 N 79 58 56 W', '40° 26′ 46″ N 79° 58′ 56″ W', '40° 26′ 46.00″ N 79° 58′ 56.00″ W', '40°26′46″N 79°58′56″W', 'N 40 26 46 W 79 58 56', 'N 40° 26′ 46″, W 79° 58′ 56″', 'N 40° 26\' 46", W 79° 58\' 56"', 'N 40° 26\' 46", W 79° 58\' 56"', '40.446 -79.982', '40.446,-79.982', '40.446° N 79.982° W', 'N 40.446° W 79.982°', '[40.446 -79.982]', '[40.446, -79.982]', ' 40.446 , -79.982 ', ' 40.446 , -79.982 ', ' 40.446 , -79.982 ', ' 40.446 , -79.982 ']) def test_extract_coords_formats(inp): query, x, y = helper.extract_coords_from_query(inp) assert query == '' assert pytest.approx(x, abs=0.001) == -79.982 assert pytest.approx(y, abs=0.001) == 40.446 query, x, y = helper.extract_coords_from_query('foo bar ' + inp) assert query == 'foo bar' assert pytest.approx(x, abs=0.001) == -79.982 assert pytest.approx(y, abs=0.001) == 40.446 query, x, y = helper.extract_coords_from_query(inp + ' x') assert query == 'x' assert pytest.approx(x, abs=0.001) == -79.982 assert pytest.approx(y, abs=0.001) == 40.446 def test_extract_coords_formats_southeast(): query, x, y = helper.extract_coords_from_query('S 40 26.767, E 79 58.933') assert query == '' assert pytest.approx(x, abs=0.001) == 79.982 assert pytest.approx(y, abs=0.001) == -40.446 @pytest.mark.parametrize('inp', ['[shop=fish] foo bar', 'foo [shop=fish] bar', 'foo [shop=fish]bar', 'foo bar [shop=fish]']) def test_extract_category_good(inp): query, cls, typ = helper.extract_category_from_query(inp) assert query == 'foo bar' assert cls == 'shop' assert typ == 'fish' def test_extract_category_only(): assert helper.extract_category_from_query('[shop=market]') == ('', 'shop', 'market') @pytest.mark.parametrize('inp', ['house []', 'nothing', '[352]']) def test_extract_category_no_match(inp): assert helper.extract_category_from_query(inp) == (inp, None, None)
4,260
Python
.py
89
35.640449
94
0.511428
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,660
test_api_reverse.py
osm-search_Nominatim/test/python/api/test_api_reverse.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for reverse API call. These tests make sure that all Python code is correct and executable. Functional tests can be found in the BDD test suite. """ import json import pytest import nominatim_api as napi API_OPTIONS = {'reverse'} def test_reverse_rank_30(apiobj, frontend): apiobj.add_placex(place_id=223, class_='place', type='house', housenumber='1', centroid=(1.3, 0.7), geometry='POINT(1.3 0.7)') api = frontend(apiobj, options=API_OPTIONS) result = api.reverse((1.3, 0.7)) assert result is not None assert result.place_id == 223 @pytest.mark.parametrize('country', ['de', 'us']) def test_reverse_street(apiobj, frontend, country): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(10.0, 10.0), country_code=country, geometry='LINESTRING(9.995 10, 10.005 10)') api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((9.995, 10)).place_id == 990 def test_reverse_ignore_unindexed(apiobj, frontend): apiobj.add_placex(place_id=223, class_='place', type='house', housenumber='1', indexed_status=2, centroid=(1.3, 0.7), geometry='POINT(1.3 0.7)') api = frontend(apiobj, options=API_OPTIONS) result = api.reverse((1.3, 0.7)) assert result is None @pytest.mark.parametrize('y,layer,place_id', [(0.7, napi.DataLayer.ADDRESS, 223), (0.70001, napi.DataLayer.POI, 224), (0.7, napi.DataLayer.ADDRESS | napi.DataLayer.POI, 224), (0.70001, napi.DataLayer.ADDRESS | napi.DataLayer.POI, 223), (0.7, napi.DataLayer.MANMADE, 225), (0.7, napi.DataLayer.RAILWAY, 226), (0.7, napi.DataLayer.NATURAL, 227), (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225), (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225), (5, napi.DataLayer.ADDRESS, 229)]) def test_reverse_rank_30_layers(apiobj, frontend, y, layer, place_id): apiobj.add_placex(place_id=223, osm_type='N', class_='place', type='house', housenumber='1', rank_address=30, rank_search=30, centroid=(1.3, 0.70001)) apiobj.add_placex(place_id=224, osm_type='N', class_='amenity', type='toilet', rank_address=30, rank_search=30, centroid=(1.3, 0.7)) apiobj.add_placex(place_id=225, osm_type='N', class_='man_made', type='tower', rank_address=0, rank_search=30, centroid=(1.3, 0.70003)) apiobj.add_placex(place_id=226, osm_type='N', class_='railway', type='station', rank_address=0, rank_search=30, centroid=(1.3, 0.70004)) apiobj.add_placex(place_id=227, osm_type='N', class_='natural', type='cave', rank_address=0, rank_search=30, centroid=(1.3, 0.70005)) apiobj.add_placex(place_id=229, class_='place', type='house', name={'addr:housename': 'Old Cottage'}, rank_address=30, rank_search=30, centroid=(1.3, 5)) api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((1.3, y), layers=layer).place_id == place_id def test_reverse_poi_layer_with_no_pois(apiobj, frontend): apiobj.add_placex(place_id=223, class_='place', type='house', housenumber='1', rank_address=30, rank_search=30, centroid=(1.3, 0.70001)) api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((1.3, 0.70001), max_rank=29, layers=napi.DataLayer.POI) is None @pytest.mark.parametrize('with_geom', [True, False]) def test_reverse_housenumber_on_street(apiobj, frontend, with_geom): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(10.0, 10.0), geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_placex(place_id=991, class_='place', type='house', parent_place_id=990, rank_search=30, rank_address=30, housenumber='23', centroid=(10.0, 10.00001)) apiobj.add_placex(place_id=1990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'Other Street'}, centroid=(10.0, 1.0), geometry='LINESTRING(9.995 1, 10.005 1)') apiobj.add_placex(place_id=1991, class_='place', type='house', parent_place_id=1990, rank_search=30, rank_address=30, housenumber='23', centroid=(10.0, 1.00001)) params = {'geometry_output': napi.GeometryFormat.TEXT} if with_geom else {} api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((10.0, 10.0), max_rank=30, **params).place_id == 991 assert api.reverse((10.0, 10.0), max_rank=27).place_id == 990 assert api.reverse((10.0, 10.00001), max_rank=30).place_id == 991 assert api.reverse((10.0, 1.0), **params).place_id == 1991 @pytest.mark.parametrize('with_geom', [True, False]) def test_reverse_housenumber_interpolation(apiobj, frontend, with_geom): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(10.0, 10.0), geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_placex(place_id=991, class_='place', type='house', parent_place_id=990, rank_search=30, rank_address=30, housenumber='23', centroid=(10.0, 10.00002)) apiobj.add_osmline(place_id=992, parent_place_id=990, startnumber=1, endnumber=3, step=1, centroid=(10.0, 10.00001), geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') apiobj.add_placex(place_id=1990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'Other Street'}, centroid=(10.0, 20.0), geometry='LINESTRING(9.995 20, 10.005 20)') apiobj.add_osmline(place_id=1992, parent_place_id=1990, startnumber=1, endnumber=3, step=1, centroid=(10.0, 20.00001), geometry='LINESTRING(9.995 20.00001, 10.005 20.00001)') params = {'geometry_output': napi.GeometryFormat.TEXT} if with_geom else {} api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((10.0, 10.0), **params).place_id == 992 assert api.reverse((10.0, 20.0), **params).place_id == 1992 def test_reverse_housenumber_point_interpolation(apiobj, frontend): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(10.0, 10.0), geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_osmline(place_id=992, parent_place_id=990, startnumber=42, endnumber=42, step=1, centroid=(10.0, 10.00001), geometry='POINT(10.0 10.00001)') api = frontend(apiobj, options=API_OPTIONS) res = api.reverse((10.0, 10.0)) assert res.place_id == 992 assert res.housenumber == '42' def test_reverse_tiger_number(apiobj, frontend): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(10.0, 10.0), country_code='us', geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_tiger(place_id=992, parent_place_id=990, startnumber=1, endnumber=3, step=1, centroid=(10.0, 10.00001), geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((10.0, 10.0)).place_id == 992 assert api.reverse((10.0, 10.00001)).place_id == 992 def test_reverse_point_tiger(apiobj, frontend): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(10.0, 10.0), country_code='us', geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_tiger(place_id=992, parent_place_id=990, startnumber=1, endnumber=1, step=1, centroid=(10.0, 10.00001), geometry='POINT(10.0 10.00001)') api = frontend(apiobj, options=API_OPTIONS) res = api.reverse((10.0, 10.0)) assert res.place_id == 992 assert res.housenumber == '1' def test_reverse_low_zoom_address(apiobj, frontend): apiobj.add_placex(place_id=1001, class_='place', type='house', housenumber='1', rank_address=30, rank_search=30, centroid=(59.3, 80.70001)) apiobj.add_placex(place_id=1002, class_='place', type='town', name={'name': 'Town'}, rank_address=16, rank_search=16, centroid=(59.3, 80.70001), geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001, 59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""") api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((59.30005, 80.7005)).place_id == 1001 assert api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002 def test_reverse_place_node_in_area(apiobj, frontend): apiobj.add_placex(place_id=1002, class_='place', type='town', name={'name': 'Town Area'}, rank_address=16, rank_search=16, centroid=(59.3, 80.70001), geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001, 59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""") apiobj.add_placex(place_id=1003, class_='place', type='suburb', name={'name': 'Suburb Point'}, osm_type='N', rank_address=18, rank_search=18, centroid=(59.30004, 80.70055)) api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((59.30004, 80.70055)).place_id == 1003 @pytest.mark.parametrize('layer,place_id', [(napi.DataLayer.MANMADE, 225), (napi.DataLayer.RAILWAY, 226), (napi.DataLayer.NATURAL, 227), (napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225), (napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225)]) def test_reverse_larger_area_layers(apiobj, frontend, layer, place_id): apiobj.add_placex(place_id=225, class_='man_made', type='dam', name={'name': 'Dam'}, rank_address=0, rank_search=25, centroid=(1.3, 0.70003)) apiobj.add_placex(place_id=226, class_='railway', type='yard', name={'name': 'Dam'}, rank_address=0, rank_search=20, centroid=(1.3, 0.70004)) apiobj.add_placex(place_id=227, class_='natural', type='spring', name={'name': 'Dam'}, rank_address=0, rank_search=16, centroid=(1.3, 0.70005)) api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((1.3, 0.7), layers=layer).place_id == place_id def test_reverse_country_lookup_no_objects(apiobj, frontend): apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((0.5, 0.5)) is None @pytest.mark.parametrize('rank', [4, 30]) @pytest.mark.parametrize('with_geom', [True, False]) def test_reverse_country_lookup_country_only(apiobj, frontend, rank, with_geom): apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country('yy', 'POLYGON((10 0, 10 1, 11 1, 11 0, 10 0))') apiobj.add_placex(place_id=225, class_='place', type='country', name={'name': 'My Country'}, rank_address=4, rank_search=4, country_code='xx', centroid=(0.7, 0.7)) params = {'max_rank': rank} if with_geom: params['geometry_output'] = napi.GeometryFormat.TEXT api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((0.5, 0.5), **params).place_id == 225 assert api.reverse((10.5, 0.5), **params) is None @pytest.mark.parametrize('with_geom', [True, False]) def test_reverse_country_lookup_place_node_inside(apiobj, frontend, with_geom): apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country('yy', 'POLYGON((10 0, 10 1, 11 1, 11 0, 10 0))') apiobj.add_placex(place_id=225, class_='place', type='state', osm_type='N', name={'name': 'My State'}, rank_address=6, rank_search=6, country_code='xx', centroid=(0.5, 0.505)) apiobj.add_placex(place_id=425, class_='place', type='state', osm_type='N', name={'name': 'Other State'}, rank_address=6, rank_search=6, country_code='yy', centroid=(10.5, 0.505)) params = {'geometry_output': napi.GeometryFormat.KML} if with_geom else {} api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((0.5, 0.5), **params).place_id == 225 assert api.reverse((10.5, 0.5), **params).place_id == 425 @pytest.mark.parametrize('gtype', list(napi.GeometryFormat)) def test_reverse_geometry_output_placex(apiobj, frontend, gtype): apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_placex(place_id=1001, class_='place', type='house', housenumber='1', rank_address=30, rank_search=30, centroid=(59.3, 80.70001)) apiobj.add_placex(place_id=1003, class_='place', type='suburb', name={'name': 'Suburb Point'}, osm_type='N', rank_address=18, rank_search=18, country_code='xx', centroid=(0.5, 0.5)) api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001 assert api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003 def test_reverse_simplified_geometry(apiobj, frontend): apiobj.add_placex(place_id=1001, class_='place', type='house', housenumber='1', rank_address=30, rank_search=30, centroid=(59.3, 80.70001)) api = frontend(apiobj, options=API_OPTIONS) details = dict(geometry_output=napi.GeometryFormat.GEOJSON, geometry_simplification=0.1) assert api.reverse((59.3, 80.70001), **details).place_id == 1001 def test_reverse_interpolation_geometry(apiobj, frontend): apiobj.add_osmline(place_id=992, parent_place_id=990, startnumber=1, endnumber=3, step=1, centroid=(10.0, 10.00001), geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') api = frontend(apiobj, options=API_OPTIONS) assert api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\ .geometry['text'] == 'POINT(10 10.00001)' def test_reverse_tiger_geometry(apiobj, frontend): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(10.0, 10.0), country_code='us', geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_tiger(place_id=992, parent_place_id=990, startnumber=1, endnumber=3, step=1, centroid=(10.0, 10.00001), geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') apiobj.add_placex(place_id=1000, class_='highway', type='service', rank_search=27, rank_address=27, name = {'name': 'My Street'}, centroid=(11.0, 11.0), country_code='us', geometry='LINESTRING(10.995 11, 11.005 11)') apiobj.add_tiger(place_id=1001, parent_place_id=1000, startnumber=1, endnumber=3, step=1, centroid=(11.0, 11.00001), geometry='LINESTRING(10.995 11.00001, 11.005 11.00001)') api = frontend(apiobj, options=API_OPTIONS) params = {'geometry_output': napi.GeometryFormat.GEOJSON} output = api.reverse((10.0, 10.0), **params) assert json.loads(output.geometry['geojson']) == {'coordinates': [10, 10.00001], 'type': 'Point'} output = api.reverse((11.0, 11.0), **params) assert json.loads(output.geometry['geojson']) == {'coordinates': [11, 11.00001], 'type': 'Point'}
19,317
Python
.py
360
37.977778
110
0.527642
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,661
test_localization.py
osm-search_Nominatim/test/python/api/test_localization.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test functions for adapting results to the user's locale. """ import pytest from nominatim_api import Locales def test_display_name_empty_names(): l = Locales(['en', 'de']) assert l.display_name(None) == '' assert l.display_name({}) == '' def test_display_name_none_localized(): l = Locales() assert l.display_name({}) == '' assert l.display_name({'name:de': 'DE', 'name': 'ALL'}) == 'ALL' assert l.display_name({'ref': '34', 'name:de': 'DE'}) == '34' def test_display_name_localized(): l = Locales(['en', 'de']) assert l.display_name({}) == '' assert l.display_name({'name:de': 'DE', 'name': 'ALL'}) == 'DE' assert l.display_name({'ref': '34', 'name:de': 'DE'}) == 'DE' def test_display_name_preference(): l = Locales(['en', 'de']) assert l.display_name({}) == '' assert l.display_name({'name:de': 'DE', 'name:en': 'EN'}) == 'EN' assert l.display_name({'official_name:en': 'EN', 'name:de': 'DE'}) == 'DE' @pytest.mark.parametrize('langstr,langlist', [('fr', ['fr']), ('fr-FR', ['fr-FR', 'fr']), ('de,fr-FR', ['de', 'fr-FR', 'fr']), ('fr,de,fr-FR', ['fr', 'de', 'fr-FR']), ('en;q=0.5,fr', ['fr', 'en']), ('en;q=0.5,fr,en-US', ['fr', 'en-US', 'en']), ('en,fr;garbage,de', ['en', 'de'])]) def test_from_language_preferences(langstr, langlist): assert Locales.from_accept_languages(langstr).languages == langlist
1,783
Python
.py
40
37.125
78
0.542775
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,662
test_api_lookup.py
osm-search_Nominatim/test/python/api/test_api_lookup.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for lookup API call. """ import json import pytest import nominatim_api as napi def test_lookup_empty_list(apiobj, frontend): api = frontend(apiobj, options={'details'}) assert api.lookup([]) == [] def test_lookup_non_existing(apiobj, frontend): api = frontend(apiobj, options={'details'}) assert api.lookup((napi.PlaceID(332), napi.OsmID('W', 4), napi.OsmID('W', 4, 'highway'))) == [] @pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4), napi.OsmID('W', 4, 'highway'))) def test_lookup_single_placex(apiobj, frontend, idobj): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name={'name': 'Road'}, address={'city': 'Barrow'}, extratags={'surface': 'paved'}, parent_place_id=34, linked_place_id=55, admin_level=15, country_code='gb', housenumber='4', postcode='34425', wikipedia='en:Faa', rank_search=27, rank_address=26, importance=0.01, centroid=(23, 34), geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)') api = frontend(apiobj, options={'details'}) result = api.lookup([idobj]) assert len(result) == 1 result = result[0] assert result.source_table.name == 'PLACEX' assert result.category == ('highway', 'residential') assert result.centroid == (pytest.approx(23.0), pytest.approx(34.0)) assert result.place_id == 332 assert result.osm_object == ('W', 4) assert result.names == {'name': 'Road'} assert result.address == {'city': 'Barrow'} assert result.extratags == {'surface': 'paved'} assert result.housenumber == '4' assert result.postcode == '34425' assert result.wikipedia == 'en:Faa' assert result.rank_search == 27 assert result.rank_address == 26 assert result.importance == pytest.approx(0.01) assert result.country_code == 'gb' assert result.address_rows is None assert result.linked_rows is None assert result.parented_rows is None assert result.name_keywords is None assert result.address_keywords is None assert result.geometry == {} def test_lookup_multiple_places(apiobj, frontend): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name={'name': 'Road'}, address={'city': 'Barrow'}, extratags={'surface': 'paved'}, parent_place_id=34, linked_place_id=55, admin_level=15, country_code='gb', housenumber='4', postcode='34425', wikipedia='en:Faa', rank_search=27, rank_address=26, importance=0.01, centroid=(23, 34), geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)') apiobj.add_osmline(place_id=4924, osm_id=9928, parent_place_id=12, startnumber=1, endnumber=4, step=1, country_code='gb', postcode='34425', address={'city': 'Big'}, geometry='LINESTRING(23 34, 23 35)') api = frontend(apiobj, options={'details'}) result = api.lookup((napi.OsmID('W', 1), napi.OsmID('W', 4), napi.OsmID('W', 9928))) assert len(result) == 2 assert set(r.place_id for r in result) == {332, 4924} @pytest.mark.parametrize('gtype', list(napi.GeometryFormat)) def test_simple_place_with_geometry(apiobj, frontend, gtype): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name={'name': 'Road'}, address={'city': 'Barrow'}, extratags={'surface': 'paved'}, parent_place_id=34, linked_place_id=55, admin_level=15, country_code='gb', housenumber='4', postcode='34425', wikipedia='en:Faa', rank_search=27, rank_address=26, importance=0.01, centroid=(23, 34), geometry='POLYGON((23 34, 23.1 34, 23.1 34.1, 23 34))') api = frontend(apiobj, options={'details'}) result = api.lookup([napi.OsmID('W', 4)], geometry_output=gtype) assert len(result) == 1 assert result[0].place_id == 332 if gtype == napi.GeometryFormat.NONE: assert list(result[0].geometry.keys()) == [] else: assert list(result[0].geometry.keys()) == [gtype.name.lower()] def test_simple_place_with_geometry_simplified(apiobj, frontend): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name={'name': 'Road'}, address={'city': 'Barrow'}, extratags={'surface': 'paved'}, parent_place_id=34, linked_place_id=55, admin_level=15, country_code='gb', housenumber='4', postcode='34425', wikipedia='en:Faa', rank_search=27, rank_address=26, importance=0.01, centroid=(23, 34), geometry='POLYGON((23 34, 22.999 34, 23.1 34, 23.1 34.1, 23 34))') api = frontend(apiobj, options={'details'}) result = api.lookup([napi.OsmID('W', 4)], geometry_output=napi.GeometryFormat.GEOJSON, geometry_simplification=0.1) assert len(result) == 1 assert result[0].place_id == 332 geom = json.loads(result[0].geometry['geojson']) assert geom['type'] == 'Polygon' assert geom['coordinates'] == [[[23, 34], [23.1, 34], [23.1, 34.1], [23, 34]]]
6,247
Python
.py
128
36.476563
87
0.553254
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,663
test_api_types.py
osm-search_Nominatim/test/python/api/test_api_types.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for loading of parameter dataclasses. """ import pytest from nominatim_api.errors import UsageError import nominatim_api.types as typ def test_no_params_defaults(): params = typ.LookupDetails.from_kwargs({}) assert not params.parented_places assert params.geometry_simplification == 0.0 @pytest.mark.parametrize('k,v', [('geometry_output', 'a'), ('linked_places', 0), ('geometry_simplification', 'NaN')]) def test_bad_format_reverse(k, v): with pytest.raises(UsageError): params = typ.ReverseDetails.from_kwargs({k: v}) @pytest.mark.parametrize('rin,rout', [(-23, 0), (0, 0), (1, 1), (15, 15), (30, 30), (31, 30)]) def test_rank_params(rin, rout): params = typ.ReverseDetails.from_kwargs({'max_rank': rin}) assert params.max_rank == rout
1,104
Python
.py
27
34.555556
69
0.643592
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,664
test_api_status.py
osm-search_Nominatim/test/python/api/test_api_status.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the status API call. """ import datetime as dt import pytest from nominatim_db.version import NominatimVersion from nominatim_api.version import NOMINATIM_API_VERSION import nominatim_api as napi def test_status_no_extra_info(apiobj, frontend): api = frontend(apiobj) result = api.status() assert result.status == 0 assert result.message == 'OK' assert result.software_version == NOMINATIM_API_VERSION assert result.database_version is None assert result.data_updated is None def test_status_full(apiobj, frontend): import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0, tzinfo=dt.timezone.utc) apiobj.add_data('import_status', [{'lastimportdate': import_date}]) apiobj.add_data('properties', [{'property': 'database_version', 'value': '99.5.4-2'}]) api = frontend(apiobj) result = api.status() assert result.status == 0 assert result.message == 'OK' assert result.software_version == NOMINATIM_API_VERSION assert result.database_version == '99.5.4-2' assert result.data_updated == import_date def test_status_database_not_found(monkeypatch): monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'dbname=rgjdfkgjedkrgdfkngdfkg') api = napi.NominatimAPI() result = api.status() assert result.status == 700 assert result.message == 'Database connection failed' assert result.software_version == NOMINATIM_API_VERSION assert result.database_version is None assert result.data_updated is None
1,757
Python
.py
44
35.477273
81
0.720165
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,665
test_server_glue_v1.py
osm-search_Nominatim/test/python/api/test_server_glue_v1.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the Python web frameworks adaptor, v1 API. """ import json import xml.etree.ElementTree as ET import pytest from fake_adaptor import FakeAdaptor, FakeError, FakeResponse import nominatim_api.v1.server_glue as glue import nominatim_api as napi import nominatim_api.logging as loglib # ASGIAdaptor.get_int/bool() @pytest.mark.parametrize('func', ['get_int', 'get_bool']) def test_adaptor_get_int_missing_but_required(func): with pytest.raises(FakeError, match='^400 -- .*missing'): getattr(FakeAdaptor(), func)('something') @pytest.mark.parametrize('func, val', [('get_int', 23), ('get_bool', True)]) def test_adaptor_get_int_missing_with_default(func, val): assert getattr(FakeAdaptor(), func)('something', val) == val @pytest.mark.parametrize('inp', ['0', '234', '-4566953498567934876']) def test_adaptor_get_int_success(inp): assert FakeAdaptor(params={'foo': inp}).get_int('foo') == int(inp) assert FakeAdaptor(params={'foo': inp}).get_int('foo', 4) == int(inp) @pytest.mark.parametrize('inp', ['rs', '4.5', '6f']) def test_adaptor_get_int_bad_number(inp): with pytest.raises(FakeError, match='^400 -- .*must be a number'): FakeAdaptor(params={'foo': inp}).get_int('foo') @pytest.mark.parametrize('inp', ['1', 'true', 'whatever', 'false']) def test_adaptor_get_bool_trueish(inp): assert FakeAdaptor(params={'foo': inp}).get_bool('foo') def test_adaptor_get_bool_falsish(): assert not FakeAdaptor(params={'foo': '0'}).get_bool('foo') # ASGIAdaptor.parse_format() def test_adaptor_parse_format_use_default(): adaptor = FakeAdaptor() assert glue.parse_format(adaptor, napi.StatusResult, 'text') == 'text' assert adaptor.content_type == 'text/plain; charset=utf-8' def test_adaptor_parse_format_use_configured(): adaptor = FakeAdaptor(params={'format': 'json'}) assert glue.parse_format(adaptor, napi.StatusResult, 'text') == 'json' assert adaptor.content_type == 'application/json; charset=utf-8' def test_adaptor_parse_format_invalid_value(): adaptor = FakeAdaptor(params={'format': '@!#'}) with pytest.raises(FakeError, match='^400 -- .*must be one of'): glue.parse_format(adaptor, napi.StatusResult, 'text') # ASGIAdaptor.get_accepted_languages() def test_accepted_languages_from_param(): a = FakeAdaptor(params={'accept-language': 'de'}) assert glue.get_accepted_languages(a) == 'de' def test_accepted_languages_from_header(): a = FakeAdaptor(headers={'accept-language': 'de'}) assert glue.get_accepted_languages(a) == 'de' def test_accepted_languages_from_default(monkeypatch): monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'de') a = FakeAdaptor() assert glue.get_accepted_languages(a) == 'de' def test_accepted_languages_param_over_header(): a = FakeAdaptor(params={'accept-language': 'de'}, headers={'accept-language': 'en'}) assert glue.get_accepted_languages(a) == 'de' def test_accepted_languages_header_over_default(monkeypatch): monkeypatch.setenv('NOMINATIM_DEFAULT_LANGUAGE', 'en') a = FakeAdaptor(headers={'accept-language': 'de'}) assert glue.get_accepted_languages(a) == 'de' # ASGIAdaptor.raise_error() class TestAdaptorRaiseError: @pytest.fixture(autouse=True) def init_adaptor(self): self.adaptor = FakeAdaptor() glue.setup_debugging(self.adaptor) def run_raise_error(self, msg, status): with pytest.raises(FakeError) as excinfo: self.adaptor.raise_error(msg, status=status) return excinfo.value def test_without_content_set(self): err = self.run_raise_error('TEST', 404) assert self.adaptor.content_type == 'text/plain; charset=utf-8' assert err.msg == 'ERROR 404: TEST' assert err.status == 404 def test_json(self): self.adaptor.content_type = 'application/json; charset=utf-8' err = self.run_raise_error('TEST', 501) content = json.loads(err.msg)['error'] assert content['code'] == 501 assert content['message'] == 'TEST' def test_xml(self): self.adaptor.content_type = 'text/xml; charset=utf-8' err = self.run_raise_error('this!', 503) content = ET.fromstring(err.msg) assert content.tag == 'error' assert content.find('code').text == '503' assert content.find('message').text == 'this!' def test_raise_error_during_debug(): a = FakeAdaptor(params={'debug': '1'}) glue.setup_debugging(a) loglib.log().section('Ongoing') with pytest.raises(FakeError) as excinfo: a.raise_error('badstate') content = ET.fromstring(excinfo.value.msg) assert content.tag == 'html' assert '>Ongoing<' in excinfo.value.msg assert 'badstate' in excinfo.value.msg # ASGIAdaptor.build_response def test_build_response_without_content_type(): resp = glue.build_response(FakeAdaptor(), 'attention') assert isinstance(resp, FakeResponse) assert resp.status == 200 assert resp.output == 'attention' assert resp.content_type == 'text/plain; charset=utf-8' def test_build_response_with_status(): a = FakeAdaptor(params={'format': 'json'}) glue.parse_format(a, napi.StatusResult, 'text') resp = glue.build_response(a, 'stuff\nmore stuff', status=404) assert isinstance(resp, FakeResponse) assert resp.status == 404 assert resp.output == 'stuff\nmore stuff' assert resp.content_type == 'application/json; charset=utf-8' def test_build_response_jsonp_with_json(): a = FakeAdaptor(params={'format': 'json', 'json_callback': 'test.func'}) glue.parse_format(a, napi.StatusResult, 'text') resp = glue.build_response(a, '{}') assert isinstance(resp, FakeResponse) assert resp.status == 200 assert resp.output == 'test.func({})' assert resp.content_type == 'application/javascript; charset=utf-8' def test_build_response_jsonp_without_json(): a = FakeAdaptor(params={'format': 'text', 'json_callback': 'test.func'}) glue.parse_format(a, napi.StatusResult, 'text') resp = glue.build_response(a, '{}') assert isinstance(resp, FakeResponse) assert resp.status == 200 assert resp.output == '{}' assert resp.content_type == 'text/plain; charset=utf-8' @pytest.mark.parametrize('param', ['alert(); func', '\\n', '', 'a b']) def test_build_response_jsonp_bad_format(param): a = FakeAdaptor(params={'format': 'json', 'json_callback': param}) glue.parse_format(a, napi.StatusResult, 'text') with pytest.raises(FakeError, match='^400 -- .*Invalid'): glue.build_response(a, '{}') # status_endpoint() class TestStatusEndpoint: @pytest.fixture(autouse=True) def patch_status_func(self, monkeypatch): async def _status(*args, **kwargs): return self.status monkeypatch.setattr(napi.NominatimAPIAsync, 'status', _status) @pytest.mark.asyncio async def test_status_without_params(self): a = FakeAdaptor() self.status = napi.StatusResult(0, 'foo') resp = await glue.status_endpoint(napi.NominatimAPIAsync(), a) assert isinstance(resp, FakeResponse) assert resp.status == 200 assert resp.content_type == 'text/plain; charset=utf-8' @pytest.mark.asyncio async def test_status_with_error(self): a = FakeAdaptor() self.status = napi.StatusResult(405, 'foo') resp = await glue.status_endpoint(napi.NominatimAPIAsync(), a) assert isinstance(resp, FakeResponse) assert resp.status == 500 assert resp.content_type == 'text/plain; charset=utf-8' @pytest.mark.asyncio async def test_status_json_with_error(self): a = FakeAdaptor(params={'format': 'json'}) self.status = napi.StatusResult(405, 'foo') resp = await glue.status_endpoint(napi.NominatimAPIAsync(), a) assert isinstance(resp, FakeResponse) assert resp.status == 200 assert resp.content_type == 'application/json; charset=utf-8' @pytest.mark.asyncio async def test_status_bad_format(self): a = FakeAdaptor(params={'format': 'foo'}) self.status = napi.StatusResult(0, 'foo') with pytest.raises(FakeError): await glue.status_endpoint(napi.NominatimAPIAsync(), a) # details_endpoint() class TestDetailsEndpoint: @pytest.fixture(autouse=True) def patch_lookup_func(self, monkeypatch): self.result = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0)) self.lookup_args = [] async def _lookup(*args, **kwargs): self.lookup_args.extend(args[1:]) return self.result monkeypatch.setattr(napi.NominatimAPIAsync, 'details', _lookup) @pytest.mark.asyncio async def test_details_no_params(self): a = FakeAdaptor() with pytest.raises(FakeError, match='^400 -- .*Missing'): await glue.details_endpoint(napi.NominatimAPIAsync(), a) @pytest.mark.asyncio async def test_details_by_place_id(self): a = FakeAdaptor(params={'place_id': '4573'}) await glue.details_endpoint(napi.NominatimAPIAsync(), a) assert self.lookup_args[0].place_id == 4573 @pytest.mark.asyncio async def test_details_by_osm_id(self): a = FakeAdaptor(params={'osmtype': 'N', 'osmid': '45'}) await glue.details_endpoint(napi.NominatimAPIAsync(), a) assert self.lookup_args[0].osm_type == 'N' assert self.lookup_args[0].osm_id == 45 assert self.lookup_args[0].osm_class is None @pytest.mark.asyncio async def test_details_with_debugging(self): a = FakeAdaptor(params={'osmtype': 'N', 'osmid': '45', 'debug': '1'}) resp = await glue.details_endpoint(napi.NominatimAPIAsync(), a) content = ET.fromstring(resp.output) assert resp.content_type == 'text/html; charset=utf-8' assert content.tag == 'html' @pytest.mark.asyncio async def test_details_no_result(self): a = FakeAdaptor(params={'place_id': '4573'}) self.result = None with pytest.raises(FakeError, match='^404 -- .*found'): await glue.details_endpoint(napi.NominatimAPIAsync(), a) # reverse_endpoint() class TestReverseEndPoint: @pytest.fixture(autouse=True) def patch_reverse_func(self, monkeypatch): self.result = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0)) async def _reverse(*args, **kwargs): return self.result monkeypatch.setattr(napi.NominatimAPIAsync, 'reverse', _reverse) @pytest.mark.asyncio @pytest.mark.parametrize('params', [{}, {'lat': '3.4'}, {'lon': '6.7'}]) async def test_reverse_no_params(self, params): a = FakeAdaptor() a.params = params a.params['format'] = 'xml' with pytest.raises(FakeError, match='^400 -- (?s:.*)missing'): await glue.reverse_endpoint(napi.NominatimAPIAsync(), a) @pytest.mark.asyncio @pytest.mark.parametrize('params', [{'lat': '45.6', 'lon': '4563'}]) async def test_reverse_success(self, params): a = FakeAdaptor() a.params = params a.params['format'] = 'json' res = await glue.reverse_endpoint(napi.NominatimAPIAsync(), a) assert res == '' @pytest.mark.asyncio async def test_reverse_success(self): a = FakeAdaptor() a.params['lat'] = '56.3' a.params['lon'] = '6.8' assert await glue.reverse_endpoint(napi.NominatimAPIAsync(), a) @pytest.mark.asyncio async def test_reverse_from_search(self): a = FakeAdaptor() a.params['q'] = '34.6 2.56' a.params['format'] = 'json' res = await glue.search_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == 1 # lookup_endpoint() class TestLookupEndpoint: @pytest.fixture(autouse=True) def patch_lookup_func(self, monkeypatch): self.results = [napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0))] async def _lookup(*args, **kwargs): return napi.SearchResults(self.results) monkeypatch.setattr(napi.NominatimAPIAsync, 'lookup', _lookup) @pytest.mark.asyncio async def test_lookup_no_params(self): a = FakeAdaptor() a.params['format'] = 'json' res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a) assert res.output == '[]' @pytest.mark.asyncio @pytest.mark.parametrize('param', ['w', 'bad', '']) async def test_lookup_bad_params(self, param): a = FakeAdaptor() a.params['format'] = 'json' a.params['osm_ids'] = f'W34,{param},N33333' res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == 1 @pytest.mark.asyncio @pytest.mark.parametrize('param', ['p234234', '4563']) async def test_lookup_bad_osm_type(self, param): a = FakeAdaptor() a.params['format'] = 'json' a.params['osm_ids'] = f'W34,{param},N33333' res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == 1 @pytest.mark.asyncio async def test_lookup_working(self): a = FakeAdaptor() a.params['format'] = 'json' a.params['osm_ids'] = 'N23,W34' res = await glue.lookup_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == 1 # search_endpoint() class TestSearchEndPointSearch: @pytest.fixture(autouse=True) def patch_lookup_func(self, monkeypatch): self.results = [napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0))] async def _search(*args, **kwargs): return napi.SearchResults(self.results) monkeypatch.setattr(napi.NominatimAPIAsync, 'search', _search) @pytest.mark.asyncio async def test_search_free_text(self): a = FakeAdaptor() a.params['q'] = 'something' res = await glue.search_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == 1 @pytest.mark.asyncio async def test_search_free_text_xml(self): a = FakeAdaptor() a.params['q'] = 'something' a.params['format'] = 'xml' res = await glue.search_endpoint(napi.NominatimAPIAsync(), a) assert res.status == 200 assert res.output.index('something') > 0 @pytest.mark.asyncio async def test_search_free_and_structured(self): a = FakeAdaptor() a.params['q'] = 'something' a.params['city'] = 'ignored' with pytest.raises(FakeError, match='^400 -- .*cannot be used together'): res = await glue.search_endpoint(napi.NominatimAPIAsync(), a) @pytest.mark.asyncio @pytest.mark.parametrize('dedupe,numres', [(True, 1), (False, 2)]) async def test_search_dedupe(self, dedupe, numres): self.results = self.results * 2 a = FakeAdaptor() a.params['q'] = 'something' if not dedupe: a.params['dedupe'] = '0' res = await glue.search_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == numres class TestSearchEndPointSearchAddress: @pytest.fixture(autouse=True) def patch_lookup_func(self, monkeypatch): self.results = [napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0))] async def _search(*args, **kwargs): return napi.SearchResults(self.results) monkeypatch.setattr(napi.NominatimAPIAsync, 'search_address', _search) @pytest.mark.asyncio async def test_search_structured(self): a = FakeAdaptor() a.params['street'] = 'something' res = await glue.search_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == 1 class TestSearchEndPointSearchCategory: @pytest.fixture(autouse=True) def patch_lookup_func(self, monkeypatch): self.results = [napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0))] async def _search(*args, **kwargs): return napi.SearchResults(self.results) monkeypatch.setattr(napi.NominatimAPIAsync, 'search_category', _search) @pytest.mark.asyncio async def test_search_category(self): a = FakeAdaptor() a.params['q'] = '[shop=fog]' res = await glue.search_endpoint(napi.NominatimAPIAsync(), a) assert len(json.loads(res.output)) == 1
17,422
Python
.py
372
38.75
81
0.64376
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,666
test_export.py
osm-search_Nominatim/test/python/api/test_export.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for export CLI function. """ import pytest import nominatim_db.cli @pytest.fixture def run_export(tmp_path, capsys): def _exec(args): assert 0 == nominatim_db.cli.nominatim(osm2pgsql_path='OSM2PGSQL NOT AVAILABLE', cli_args=['export', '--project-dir', str(tmp_path)] + args) return capsys.readouterr().out.split('\r\n') return _exec @pytest.fixture(autouse=True) def setup_database_with_context(apiobj): apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, class_='highway', type='residential', name='Street', country_code='pl', postcode='55674', rank_search=27, rank_address=26) apiobj.add_address_placex(332, fromarea=False, isaddress=False, distance=0.0034, place_id=1000, osm_type='N', osm_id=3333, class_='place', type='suburb', name='Smallplace', country_code='pl', admin_level=13, rank_search=24, rank_address=23) apiobj.add_address_placex(332, fromarea=True, isaddress=True, place_id=1001, osm_type='N', osm_id=3334, class_='place', type='city', name='Bigplace', country_code='pl', rank_search=17, rank_address=16) def test_export_default(run_export): csv = run_export([]) assert csv == ['street,suburb,city,county,state,country', 'Street,,Bigplace,,,', ''] def test_export_output_type(run_export): csv = run_export(['--output-type', 'city']) assert csv == ['street,suburb,city,county,state,country', ',,Bigplace,,,', ''] def test_export_output_format(run_export): csv = run_export(['--output-format', 'placeid;street;nothing;postcode']) assert csv == ['placeid,street,nothing,postcode', '332,Street,,55674', ''] def test_export_restrict_to_node_good(run_export): csv = run_export(['--restrict-to-osm-node', '3334']) assert csv == ['street,suburb,city,county,state,country', 'Street,,Bigplace,,,', ''] def test_export_restrict_to_node_not_address(run_export): csv = run_export(['--restrict-to-osm-node', '3333']) assert csv == ['street,suburb,city,county,state,country', '']
2,640
Python
.py
51
40.333333
98
0.58622
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,667
test_result_formatting_v1_reverse.py
osm-search_Nominatim/test/python/api/test_result_formatting_v1_reverse.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for formatting reverse results for the V1 API. These test only ensure that the Python code is correct. For functional tests see BDD test suite. """ import json import xml.etree.ElementTree as ET import pytest from nominatim_api.v1.format import dispatch as v1_format import nominatim_api as napi FORMATS = ['json', 'jsonv2', 'geojson', 'geocodejson', 'xml'] @pytest.mark.parametrize('fmt', FORMATS) def test_format_reverse_minimal(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('amenity', 'post_box'), napi.Point(0.3, -8.9)) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {}) if fmt == 'xml': root = ET.fromstring(raw) assert root.tag == 'reversegeocode' else: result = json.loads(raw) assert isinstance(result, dict) @pytest.mark.parametrize('fmt', FORMATS) def test_format_reverse_no_result(fmt): raw = v1_format.format_result(napi.ReverseResults(), fmt, {}) if fmt == 'xml': root = ET.fromstring(raw) assert root.find('error').text == 'Unable to geocode' else: assert json.loads(raw) == {'error': 'Unable to geocode'} @pytest.mark.parametrize('fmt', FORMATS) def test_format_reverse_with_osm_id(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('amenity', 'post_box'), napi.Point(0.3, -8.9), place_id=5564, osm_object=('N', 23)) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {}) if fmt == 'xml': root = ET.fromstring(raw).find('result') assert root.attrib['osm_type'] == 'node' assert root.attrib['osm_id'] == '23' else: result = json.loads(raw) if fmt == 'geocodejson': props = result['features'][0]['properties']['geocoding'] elif fmt == 'geojson': props = result['features'][0]['properties'] else: props = result assert props['osm_type'] == 'node' assert props['osm_id'] == 23 @pytest.mark.parametrize('fmt', FORMATS) def test_format_reverse_with_address(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), country_code='fe', address_rows=napi.AddressLines([ napi.AddressLine(place_id=None, osm_object=None, category=('place', 'county'), names={'name': 'Hello'}, extratags=None, admin_level=5, fromarea=False, isaddress=True, rank_address=10, distance=0.0), napi.AddressLine(place_id=None, osm_object=None, category=('place', 'county'), names={'name': 'ByeBye'}, extratags=None, admin_level=5, fromarea=False, isaddress=False, rank_address=10, distance=0.0) ])) reverse.localize(napi.Locales()) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'addressdetails': True}) if fmt == 'xml': root = ET.fromstring(raw) assert root.find('addressparts').find('county').text == 'Hello' else: result = json.loads(raw) assert isinstance(result, dict) if fmt == 'geocodejson': props = result['features'][0]['properties']['geocoding'] assert 'admin' in props assert props['county'] == 'Hello' else: if fmt == 'geojson': props = result['features'][0]['properties'] else: props = result assert 'address' in props def test_format_reverse_geocodejson_special_parts(): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'house'), napi.Point(1.0, 2.0), place_id=33, country_code='fe', address_rows=napi.AddressLines([ napi.AddressLine(place_id=None, osm_object=None, category=('place', 'house_number'), names={'ref': '1'}, extratags=None, admin_level=15, fromarea=False, isaddress=True, rank_address=10, distance=0.0), napi.AddressLine(place_id=None, osm_object=None, category=('place', 'postcode'), names={'ref': '99446'}, extratags=None, admin_level=11, fromarea=False, isaddress=True, rank_address=10, distance=0.0), napi.AddressLine(place_id=33, osm_object=None, category=('place', 'county'), names={'name': 'Hello'}, extratags=None, admin_level=5, fromarea=False, isaddress=True, rank_address=10, distance=0.0) ])) reverse.localize(napi.Locales()) raw = v1_format.format_result(napi.ReverseResults([reverse]), 'geocodejson', {'addressdetails': True}) props = json.loads(raw)['features'][0]['properties']['geocoding'] assert props['housenumber'] == '1' assert props['postcode'] == '99446' assert 'county' not in props @pytest.mark.parametrize('fmt', FORMATS) def test_format_reverse_with_address_none(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), address_rows=napi.AddressLines()) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'addressdetails': True}) if fmt == 'xml': root = ET.fromstring(raw) assert root.find('addressparts') is None else: result = json.loads(raw) assert isinstance(result, dict) if fmt == 'geocodejson': props = result['features'][0]['properties']['geocoding'] print(props) assert 'admin' in props else: if fmt == 'geojson': props = result['features'][0]['properties'] else: props = result assert 'address' in props @pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml']) def test_format_reverse_with_extratags(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), extratags={'one': 'A', 'two':'B'}) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'extratags': True}) if fmt == 'xml': root = ET.fromstring(raw) assert root.find('extratags').find('tag').attrib['key'] == 'one' else: result = json.loads(raw) if fmt == 'geojson': extra = result['features'][0]['properties']['extratags'] else: extra = result['extratags'] assert extra == {'one': 'A', 'two':'B'} @pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml']) def test_format_reverse_with_extratags_none(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0)) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'extratags': True}) if fmt == 'xml': root = ET.fromstring(raw) assert root.find('extratags') is not None else: result = json.loads(raw) if fmt == 'geojson': extra = result['features'][0]['properties']['extratags'] else: extra = result['extratags'] assert extra is None @pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml']) def test_format_reverse_with_namedetails_with_name(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), names={'name': 'A', 'ref':'1'}) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'namedetails': True}) if fmt == 'xml': root = ET.fromstring(raw) assert root.find('namedetails').find('name').text == 'A' else: result = json.loads(raw) if fmt == 'geojson': extra = result['features'][0]['properties']['namedetails'] else: extra = result['namedetails'] assert extra == {'name': 'A', 'ref':'1'} @pytest.mark.parametrize('fmt', ['json', 'jsonv2', 'geojson', 'xml']) def test_format_reverse_with_namedetails_without_name(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0)) raw = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'namedetails': True}) if fmt == 'xml': root = ET.fromstring(raw) assert root.find('namedetails') is not None else: result = json.loads(raw) if fmt == 'geojson': extra = result['features'][0]['properties']['namedetails'] else: extra = result['namedetails'] assert extra is None @pytest.mark.parametrize('fmt', ['json', 'jsonv2']) def test_search_details_with_icon_available(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('amenity', 'restaurant'), napi.Point(1.0, 2.0)) result = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'icon_base_url': 'foo'}) js = json.loads(result) assert js['icon'] == 'foo/food_restaurant.p.20.png' @pytest.mark.parametrize('fmt', ['json', 'jsonv2']) def test_search_details_with_icon_not_available(fmt): reverse = napi.ReverseResult(napi.SourceTable.PLACEX, ('amenity', 'tree'), napi.Point(1.0, 2.0)) result = v1_format.format_result(napi.ReverseResults([reverse]), fmt, {'icon_base_url': 'foo'}) assert 'icon' not in json.loads(result)
13,028
Python
.py
262
29.805344
87
0.450689
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,668
test_result_formatting_v1.py
osm-search_Nominatim/test/python/api/test_result_formatting_v1.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for formatting results for the V1 API. These test only ensure that the Python code is correct. For functional tests see BDD test suite. """ import datetime as dt import json import pytest from nominatim_api.v1.format import dispatch as v1_format import nominatim_api as napi STATUS_FORMATS = {'text', 'json'} # StatusResult def test_status_format_list(): assert set(v1_format.list_formats(napi.StatusResult)) == STATUS_FORMATS @pytest.mark.parametrize('fmt', list(STATUS_FORMATS)) def test_status_supported(fmt): assert v1_format.supports_format(napi.StatusResult, fmt) def test_status_unsupported(): assert not v1_format.supports_format(napi.StatusResult, 'gagaga') def test_status_format_text(): assert v1_format.format_result(napi.StatusResult(0, 'message here'), 'text', {}) == 'OK' def test_status_format_text(): assert v1_format.format_result(napi.StatusResult(500, 'message here'), 'text', {}) == 'ERROR: message here' def test_status_format_json_minimal(): status = napi.StatusResult(700, 'Bad format.') result = v1_format.format_result(status, 'json', {}) assert result == \ f'{{"status":700,"message":"Bad format.","software_version":"{napi.__version__}"}}' def test_status_format_json_full(): status = napi.StatusResult(0, 'OK') status.data_updated = dt.datetime(2010, 2, 7, 20, 20, 3, 0, tzinfo=dt.timezone.utc) status.database_version = '5.6' result = v1_format.format_result(status, 'json', {}) assert result == \ f'{{"status":0,"message":"OK","data_updated":"2010-02-07T20:20:03+00:00","software_version":"{napi.__version__}","database_version":"5.6"}}' # DetailedResult def test_search_details_minimal(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0)) result = v1_format.format_result(search, 'json', {}) assert json.loads(result) == \ {'category': 'place', 'type': 'thing', 'admin_level': 15, 'names': {}, 'localname': '', 'calculated_importance': pytest.approx(0.00001), 'rank_address': 30, 'rank_search': 30, 'isarea': False, 'addresstags': {}, 'extratags': {}, 'centroid': {'type': 'Point', 'coordinates': [1.0, 2.0]}, 'geometry': {'type': 'Point', 'coordinates': [1.0, 2.0]}, } def test_search_details_full(): import_date = dt.datetime(2010, 2, 7, 20, 20, 3, 0, tzinfo=dt.timezone.utc) search = napi.DetailedResult( source_table=napi.SourceTable.PLACEX, category=('amenity', 'bank'), centroid=napi.Point(56.947, -87.44), place_id=37563, parent_place_id=114, linked_place_id=55693, osm_object=('W', 442100), admin_level=14, names={'name': 'Bank', 'name:fr': 'Banque'}, address={'city': 'Niento', 'housenumber': ' 3'}, extratags={'atm': 'yes'}, housenumber='3', postcode='556 X23', wikipedia='en:Bank', rank_address=29, rank_search=28, importance=0.0443, country_code='ll', indexed_date = import_date ) search.localize(napi.Locales()) result = v1_format.format_result(search, 'json', {}) assert json.loads(result) == \ {'place_id': 37563, 'parent_place_id': 114, 'osm_type': 'W', 'osm_id': 442100, 'category': 'amenity', 'type': 'bank', 'admin_level': 14, 'localname': 'Bank', 'names': {'name': 'Bank', 'name:fr': 'Banque'}, 'addresstags': {'city': 'Niento', 'housenumber': ' 3'}, 'housenumber': '3', 'calculated_postcode': '556 X23', 'country_code': 'll', 'indexed_date': '2010-02-07T20:20:03+00:00', 'importance': pytest.approx(0.0443), 'calculated_importance': pytest.approx(0.0443), 'extratags': {'atm': 'yes'}, 'calculated_wikipedia': 'en:Bank', 'rank_address': 29, 'rank_search': 28, 'isarea': False, 'centroid': {'type': 'Point', 'coordinates': [56.947, -87.44]}, 'geometry': {'type': 'Point', 'coordinates': [56.947, -87.44]}, } @pytest.mark.parametrize('gtype,isarea', [('ST_Point', False), ('ST_LineString', False), ('ST_Polygon', True), ('ST_MultiPolygon', True)]) def test_search_details_no_geometry(gtype, isarea): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), geometry={'type': gtype}) result = v1_format.format_result(search, 'json', {}) js = json.loads(result) assert js['geometry'] == {'type': 'Point', 'coordinates': [1.0, 2.0]} assert js['isarea'] == isarea def test_search_details_with_geometry(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), geometry={'geojson': '{"type":"Point","coordinates":[56.947,-87.44]}'}) result = v1_format.format_result(search, 'json', {}) js = json.loads(result) assert js['geometry'] == {'type': 'Point', 'coordinates': [56.947, -87.44]} assert js['isarea'] == False def test_search_details_with_icon_available(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('amenity', 'restaurant'), napi.Point(1.0, 2.0)) result = v1_format.format_result(search, 'json', {'icon_base_url': 'foo'}) js = json.loads(result) assert js['icon'] == 'foo/food_restaurant.p.20.png' def test_search_details_with_icon_not_available(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('amenity', 'tree'), napi.Point(1.0, 2.0)) result = v1_format.format_result(search, 'json', {'icon_base_url': 'foo'}) js = json.loads(result) assert 'icon' not in js def test_search_details_with_address_minimal(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), address_rows=[ napi.AddressLine(place_id=None, osm_object=None, category=('bnd', 'note'), names={}, extratags=None, admin_level=None, fromarea=False, isaddress=False, rank_address=10, distance=0.0) ]) result = v1_format.format_result(search, 'json', {}) js = json.loads(result) assert js['address'] == [{'localname': '', 'class': 'bnd', 'type': 'note', 'rank_address': 10, 'distance': 0.0, 'isaddress': False}] @pytest.mark.parametrize('field,outfield', [('address_rows', 'address'), ('linked_rows', 'linked_places'), ('parented_rows', 'hierarchy') ]) def test_search_details_with_further_infos(field, outfield): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0)) setattr(search, field, [napi.AddressLine(place_id=3498, osm_object=('R', 442), category=('bnd', 'note'), names={'name': 'Trespass'}, extratags={'access': 'no', 'place_type': 'spec'}, admin_level=4, fromarea=True, isaddress=True, rank_address=10, distance=0.034) ]) result = v1_format.format_result(search, 'json', {}) js = json.loads(result) assert js[outfield] == [{'localname': 'Trespass', 'place_id': 3498, 'osm_id': 442, 'osm_type': 'R', 'place_type': 'spec', 'class': 'bnd', 'type': 'note', 'admin_level': 4, 'rank_address': 10, 'distance': 0.034, 'isaddress': True}] def test_search_details_grouped_hierarchy(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), parented_rows = [napi.AddressLine(place_id=3498, osm_object=('R', 442), category=('bnd', 'note'), names={'name': 'Trespass'}, extratags={'access': 'no', 'place_type': 'spec'}, admin_level=4, fromarea=True, isaddress=True, rank_address=10, distance=0.034) ]) result = v1_format.format_result(search, 'json', {'group_hierarchy': True}) js = json.loads(result) assert js['hierarchy'] == {'note': [{'localname': 'Trespass', 'place_id': 3498, 'osm_id': 442, 'osm_type': 'R', 'place_type': 'spec', 'class': 'bnd', 'type': 'note', 'admin_level': 4, 'rank_address': 10, 'distance': 0.034, 'isaddress': True}]} def test_search_details_keywords_name(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), name_keywords=[ napi.WordInfo(23, 'foo', 'mefoo'), napi.WordInfo(24, 'foo', 'bafoo')]) result = v1_format.format_result(search, 'json', {'keywords': True}) js = json.loads(result) assert js['keywords'] == {'name': [{'id': 23, 'token': 'foo'}, {'id': 24, 'token': 'foo'}], 'address': []} def test_search_details_keywords_address(): search = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, 2.0), address_keywords=[ napi.WordInfo(23, 'foo', 'mefoo'), napi.WordInfo(24, 'foo', 'bafoo')]) result = v1_format.format_result(search, 'json', {'keywords': True}) js = json.loads(result) assert js['keywords'] == {'address': [{'id': 23, 'token': 'foo'}, {'id': 24, 'token': 'foo'}], 'name': []}
13,150
Python
.py
259
31.061776
151
0.439947
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,669
test_token_assignment.py
osm-search_Nominatim/test/python/api/search/test_token_assignment.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test for creation of token assignments from tokenized queries. """ import pytest from nominatim_api.search.query import QueryStruct, Phrase, PhraseType, BreakType, TokenType, TokenRange, Token from nominatim_api.search.token_assignment import yield_token_assignments, TokenAssignment, PENALTY_TOKENCHANGE class MyToken(Token): def get_category(self): return 'this', 'that' def make_query(*args): q = QueryStruct([Phrase(args[0][1], '')]) dummy = MyToken(penalty=3.0, token=45, count=1, addr_count=1, lookup_word='foo') for btype, ptype, _ in args[1:]: q.add_node(btype, ptype) q.add_node(BreakType.END, PhraseType.NONE) for start, t in enumerate(args): for end, ttype in t[2]: q.add_token(TokenRange(start, end), ttype, dummy) return q def check_assignments(actual, *expected): todo = list(expected) for assignment in actual: assert assignment in todo, f"Unexpected assignment: {assignment}" todo.remove(assignment) assert not todo, f"Missing assignments: {expected}" def test_query_with_missing_tokens(): q = QueryStruct([Phrase(PhraseType.NONE, '')]) q.add_node(BreakType.END, PhraseType.NONE) assert list(yield_token_assignments(q)) == [] def test_one_word_query(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL), (1, TokenType.WORD), (1, TokenType.HOUSENUMBER)])) res = list(yield_token_assignments(q)) assert res == [TokenAssignment(name=TokenRange(0, 1))] def test_single_postcode(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.POSTCODE)])) res = list(yield_token_assignments(q)) assert res == [TokenAssignment(postcode=TokenRange(0, 1))] def test_single_country_name(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.COUNTRY)])) res = list(yield_token_assignments(q)) assert res == [TokenAssignment(country=TokenRange(0, 1))] def test_single_word_poi_search(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.NEAR_ITEM), (1, TokenType.QUALIFIER)])) res = list(yield_token_assignments(q)) assert res == [TokenAssignment(near_item=TokenRange(0, 1))] @pytest.mark.parametrize('btype', [BreakType.WORD, BreakType.PART, BreakType.TOKEN]) def test_multiple_simple_words(btype): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (btype, PhraseType.NONE, [(2, TokenType.PARTIAL)]), (btype, PhraseType.NONE, [(3, TokenType.PARTIAL)])) penalty = PENALTY_TOKENCHANGE[btype] check_assignments(yield_token_assignments(q), TokenAssignment(name=TokenRange(0, 3)), TokenAssignment(penalty=penalty, name=TokenRange(0, 2), address=[TokenRange(2, 3)]), TokenAssignment(penalty=penalty, name=TokenRange(0, 1), address=[TokenRange(1, 3)]), TokenAssignment(penalty=penalty, name=TokenRange(1, 3), address=[TokenRange(0, 1)]), TokenAssignment(penalty=penalty, name=TokenRange(2, 3), address=[TokenRange(0, 2)]) ) def test_multiple_words_respect_phrase_break(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(name=TokenRange(0, 1), address=[TokenRange(1, 2)]), TokenAssignment(name=TokenRange(1, 2), address=[TokenRange(0, 1)])) def test_housenumber_and_street(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.HOUSENUMBER)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(name=TokenRange(1, 2), housenumber=TokenRange(0, 1)), TokenAssignment(address=[TokenRange(1, 2)], housenumber=TokenRange(0, 1))) def test_housenumber_and_street_backwards(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.HOUSENUMBER)])) check_assignments(yield_token_assignments(q), TokenAssignment(name=TokenRange(0, 1), housenumber=TokenRange(1, 2)), TokenAssignment(address=[TokenRange(0, 1)], housenumber=TokenRange(1, 2))) def test_housenumber_and_postcode(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.HOUSENUMBER)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(4, TokenType.POSTCODE)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=pytest.approx(0.3), name=TokenRange(0, 1), housenumber=TokenRange(1, 2), address=[TokenRange(2, 3)], postcode=TokenRange(3, 4)), TokenAssignment(penalty=pytest.approx(0.3), housenumber=TokenRange(1, 2), address=[TokenRange(0, 1), TokenRange(2, 3)], postcode=TokenRange(3, 4))) def test_postcode_and_housenumber(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.POSTCODE)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(4, TokenType.HOUSENUMBER)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=pytest.approx(0.3), name=TokenRange(2, 3), housenumber=TokenRange(3, 4), address=[TokenRange(0, 1)], postcode=TokenRange(1, 2)), TokenAssignment(penalty=pytest.approx(0.3), housenumber=TokenRange(3, 4), address=[TokenRange(0, 1), TokenRange(2, 3)], postcode=TokenRange(1, 2))) def test_country_housenumber_postcode(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.COUNTRY)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.HOUSENUMBER)]), (BreakType.WORD, PhraseType.NONE, [(4, TokenType.POSTCODE)])) check_assignments(yield_token_assignments(q)) @pytest.mark.parametrize('ttype', [TokenType.POSTCODE, TokenType.COUNTRY, TokenType.NEAR_ITEM, TokenType.QUALIFIER]) def test_housenumber_with_only_special_terms(ttype): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.HOUSENUMBER)]), (BreakType.WORD, PhraseType.NONE, [(2, ttype)])) check_assignments(yield_token_assignments(q)) @pytest.mark.parametrize('ttype', [TokenType.POSTCODE, TokenType.HOUSENUMBER, TokenType.COUNTRY]) def test_multiple_special_tokens(ttype): q = make_query((BreakType.START, PhraseType.NONE, [(1, ttype)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(3, ttype)])) check_assignments(yield_token_assignments(q)) def test_housenumber_many_phrases(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(3, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(4, TokenType.HOUSENUMBER)]), (BreakType.WORD, PhraseType.NONE, [(5, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.1, name=TokenRange(4, 5), housenumber=TokenRange(3, 4),\ address=[TokenRange(0, 1), TokenRange(1, 2), TokenRange(2, 3)]), TokenAssignment(penalty=0.1, housenumber=TokenRange(3, 4),\ address=[TokenRange(0, 1), TokenRange(1, 2), TokenRange(2, 3), TokenRange(4, 5)])) def test_country_at_beginning(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.COUNTRY)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.1, name=TokenRange(1, 2), country=TokenRange(0, 1))) def test_country_at_end(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.COUNTRY)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.1, name=TokenRange(0, 1), country=TokenRange(1, 2))) def test_country_in_middle(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.COUNTRY)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q)) def test_postcode_with_designation(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.POSTCODE)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.1, name=TokenRange(1, 2), postcode=TokenRange(0, 1)), TokenAssignment(postcode=TokenRange(0, 1), address=[TokenRange(1, 2)])) def test_postcode_with_designation_backwards(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.POSTCODE)])) check_assignments(yield_token_assignments(q), TokenAssignment(name=TokenRange(0, 1), postcode=TokenRange(1, 2)), TokenAssignment(penalty=0.1, postcode=TokenRange(1, 2), address=[TokenRange(0, 1)])) def test_near_item_at_beginning(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.NEAR_ITEM)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.1, name=TokenRange(1, 2), near_item=TokenRange(0, 1))) def test_near_item_at_end(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.NEAR_ITEM)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.1, name=TokenRange(0, 1), near_item=TokenRange(1, 2))) def test_near_item_in_middle(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.NEAR_ITEM)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q)) def test_qualifier_at_beginning(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.QUALIFIER)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.1, name=TokenRange(1, 3), qualifier=TokenRange(0, 1)), TokenAssignment(penalty=0.2, name=TokenRange(1, 2), qualifier=TokenRange(0, 1), address=[TokenRange(2, 3)])) def test_qualifier_after_name(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.QUALIFIER)]), (BreakType.WORD, PhraseType.NONE, [(4, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(5, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q), TokenAssignment(penalty=0.2, name=TokenRange(0, 2), qualifier=TokenRange(2, 3), address=[TokenRange(3, 5)]), TokenAssignment(penalty=0.2, name=TokenRange(3, 5), qualifier=TokenRange(2, 3), address=[TokenRange(0, 2)])) def test_qualifier_before_housenumber(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.QUALIFIER)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.HOUSENUMBER)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q)) def test_qualifier_after_housenumber(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.HOUSENUMBER)]), (BreakType.WORD, PhraseType.NONE, [(2, TokenType.QUALIFIER)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q)) def test_qualifier_in_middle_of_phrase(): q = make_query((BreakType.START, PhraseType.NONE, [(1, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(2, TokenType.PARTIAL)]), (BreakType.WORD, PhraseType.NONE, [(3, TokenType.QUALIFIER)]), (BreakType.WORD, PhraseType.NONE, [(4, TokenType.PARTIAL)]), (BreakType.PHRASE, PhraseType.NONE, [(5, TokenType.PARTIAL)])) check_assignments(yield_token_assignments(q))
15,714
Python
.py
254
45.452756
111
0.573
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,670
test_search_poi.py
osm-search_Nominatim/test/python/api/search/test_search_poi.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for running the POI searcher. """ import pytest import nominatim_api as napi from nominatim_api.types import SearchDetails from nominatim_api.search.db_searches import PoiSearch from nominatim_api.search.db_search_fields import WeightedStrings, WeightedCategories def run_search(apiobj, frontend, global_penalty, poitypes, poi_penalties=None, ccodes=[], details=SearchDetails()): if poi_penalties is None: poi_penalties = [0.0] * len(poitypes) class MySearchData: penalty = global_penalty qualifiers = WeightedCategories(poitypes, poi_penalties) countries = WeightedStrings(ccodes, [0.0] * len(ccodes)) search = PoiSearch(MySearchData()) api = frontend(apiobj, options=['search']) async def run(): async with api._async_api.begin() as conn: return await search.lookup(conn, details) return api._loop.run_until_complete(run()) @pytest.mark.parametrize('coord,pid', [('34.3, 56.100021', 2), ('5.0, 4.59933', 1)]) def test_simple_near_search_in_placex(apiobj, frontend, coord, pid): apiobj.add_placex(place_id=1, class_='highway', type='bus_stop', centroid=(5.0, 4.6)) apiobj.add_placex(place_id=2, class_='highway', type='bus_stop', centroid=(34.3, 56.1)) details = SearchDetails.from_kwargs({'near': coord, 'near_radius': 0.001}) results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5], details=details) assert [r.place_id for r in results] == [pid] @pytest.mark.parametrize('coord,pid', [('34.3, 56.100021', 2), ('34.3, 56.4', 2), ('5.0, 4.59933', 1)]) def test_simple_near_search_in_classtype(apiobj, frontend, coord, pid): apiobj.add_placex(place_id=1, class_='highway', type='bus_stop', centroid=(5.0, 4.6)) apiobj.add_placex(place_id=2, class_='highway', type='bus_stop', centroid=(34.3, 56.1)) apiobj.add_class_type_table('highway', 'bus_stop') details = SearchDetails.from_kwargs({'near': coord, 'near_radius': 0.5}) results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5], details=details) assert [r.place_id for r in results] == [pid] class TestPoiSearchWithRestrictions: @pytest.fixture(autouse=True, params=["placex", "classtype"]) def fill_database(self, apiobj, request): apiobj.add_placex(place_id=1, class_='highway', type='bus_stop', country_code='au', centroid=(34.3, 56.10003)) apiobj.add_placex(place_id=2, class_='highway', type='bus_stop', country_code='nz', centroid=(34.3, 56.1)) if request.param == 'classtype': apiobj.add_class_type_table('highway', 'bus_stop') self.args = {'near': '34.3, 56.4', 'near_radius': 0.5} else: self.args = {'near': '34.3, 56.100021', 'near_radius': 0.001} def test_unrestricted(self, apiobj, frontend): results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5], details=SearchDetails.from_kwargs(self.args)) assert [r.place_id for r in results] == [1, 2] def test_restict_country(self, apiobj, frontend): results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5], ccodes=['de', 'nz'], details=SearchDetails.from_kwargs(self.args)) assert [r.place_id for r in results] == [2] def test_restrict_by_viewbox(self, apiobj, frontend): args = {'bounded_viewbox': True, 'viewbox': '34.299,56.0,34.3001,56.10001'} args.update(self.args) results = run_search(apiobj, frontend, 0.1, [('highway', 'bus_stop')], [0.5], ccodes=['de', 'nz'], details=SearchDetails.from_kwargs(args)) assert [r.place_id for r in results] == [2]
4,337
Python
.py
80
43.575
98
0.596167
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,671
test_query_analyzer_factory.py
osm-search_Nominatim/test/python/api/search/test_query_analyzer_factory.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for query analyzer creation. """ from pathlib import Path import pytest from nominatim_api.search.query_analyzer_factory import make_query_analyzer from nominatim_api.search.icu_tokenizer import ICUQueryAnalyzer @pytest.mark.asyncio async def test_import_icu_tokenizer(table_factory, api): table_factory('nominatim_properties', definition='property TEXT, value TEXT', content=(('tokenizer', 'icu'), ('tokenizer_import_normalisation', ':: lower();'), ('tokenizer_import_transliteration', "'1' > '/1/'; 'ä' > 'ä '"))) async with api.begin() as conn: ana = await make_query_analyzer(conn) assert isinstance(ana, ICUQueryAnalyzer) @pytest.mark.asyncio async def test_import_missing_property(table_factory, api): table_factory('nominatim_properties', definition='property TEXT, value TEXT') async with api.begin() as conn: with pytest.raises(ValueError, match='Property.*not found'): await make_query_analyzer(conn) @pytest.mark.asyncio async def test_import_missing_module(table_factory, api): table_factory('nominatim_properties', definition='property TEXT, value TEXT', content=(('tokenizer', 'missing'),)) async with api.begin() as conn: with pytest.raises(RuntimeError, match='Tokenizer not found'): await make_query_analyzer(conn)
1,690
Python
.py
38
37.236842
92
0.673581
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,672
test_search_near.py
osm-search_Nominatim/test/python/api/search/test_search_near.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for running the near searcher. """ import pytest import nominatim_api as napi from nominatim_api.types import SearchDetails from nominatim_api.search.db_searches import NearSearch, PlaceSearch from nominatim_api.search.db_search_fields import WeightedStrings, WeightedCategories,\ FieldLookup, FieldRanking, RankedTokens from nominatim_api.search.db_search_lookups import LookupAll def run_search(apiobj, frontend, global_penalty, cat, cat_penalty=None, ccodes=[], details=SearchDetails()): class PlaceSearchData: penalty = 0.0 postcodes = WeightedStrings([], []) countries = WeightedStrings(ccodes, [0.0] * len(ccodes)) housenumbers = WeightedStrings([], []) qualifiers = WeightedStrings([], []) lookups = [FieldLookup('name_vector', [56], LookupAll)] rankings = [] if ccodes is not None: details.countries = ccodes place_search = PlaceSearch(0.0, PlaceSearchData(), 2) if cat_penalty is None: cat_penalty = [0.0] * len(cat) near_search = NearSearch(0.1, WeightedCategories(cat, cat_penalty), place_search) api = frontend(apiobj, options=['search']) async def run(): async with api._async_api.begin() as conn: return await near_search.lookup(conn, details) results = api._loop.run_until_complete(run()) results.sort(key=lambda r: r.accuracy) return results def test_no_results_inner_query(apiobj, frontend): assert not run_search(apiobj, frontend, 0.4, [('this', 'that')]) def test_no_appropriate_results_inner_query(apiobj, frontend): apiobj.add_placex(place_id=100, country_code='us', centroid=(5.6, 4.3), geometry='POLYGON((0.0 0.0, 10.0 0.0, 10.0 2.0, 0.0 2.0, 0.0 0.0))') apiobj.add_search_name(100, names=[56], country_code='us', centroid=(5.6, 4.3)) apiobj.add_placex(place_id=22, class_='amenity', type='bank', centroid=(5.6001, 4.2994)) assert not run_search(apiobj, frontend, 0.4, [('amenity', 'bank')]) class TestNearSearch: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_placex(place_id=100, country_code='us', centroid=(5.6, 4.3)) apiobj.add_search_name(100, names=[56], country_code='us', centroid=(5.6, 4.3)) apiobj.add_placex(place_id=101, country_code='mx', centroid=(-10.3, 56.9)) apiobj.add_search_name(101, names=[56], country_code='mx', centroid=(-10.3, 56.9)) def test_near_in_placex(self, apiobj, frontend): apiobj.add_placex(place_id=22, class_='amenity', type='bank', centroid=(5.6001, 4.2994)) apiobj.add_placex(place_id=23, class_='amenity', type='bench', centroid=(5.6001, 4.2994)) results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')]) assert [r.place_id for r in results] == [22] def test_multiple_types_near_in_placex(self, apiobj, frontend): apiobj.add_placex(place_id=22, class_='amenity', type='bank', importance=0.002, centroid=(5.6001, 4.2994)) apiobj.add_placex(place_id=23, class_='amenity', type='bench', importance=0.001, centroid=(5.6001, 4.2994)) results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank'), ('amenity', 'bench')]) assert [r.place_id for r in results] == [22, 23] def test_near_in_classtype(self, apiobj, frontend): apiobj.add_placex(place_id=22, class_='amenity', type='bank', centroid=(5.6, 4.34)) apiobj.add_placex(place_id=23, class_='amenity', type='bench', centroid=(5.6, 4.34)) apiobj.add_class_type_table('amenity', 'bank') apiobj.add_class_type_table('amenity', 'bench') results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')]) assert [r.place_id for r in results] == [22] @pytest.mark.parametrize('cc,rid', [('us', 22), ('mx', 23)]) def test_restrict_by_country(self, apiobj, frontend, cc, rid): apiobj.add_placex(place_id=22, class_='amenity', type='bank', centroid=(5.6001, 4.2994), country_code='us') apiobj.add_placex(place_id=122, class_='amenity', type='bank', centroid=(5.6001, 4.2994), country_code='mx') apiobj.add_placex(place_id=23, class_='amenity', type='bank', centroid=(-10.3001, 56.9), country_code='mx') apiobj.add_placex(place_id=123, class_='amenity', type='bank', centroid=(-10.3001, 56.9), country_code='us') results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')], ccodes=[cc, 'fr']) assert [r.place_id for r in results] == [rid] @pytest.mark.parametrize('excluded,rid', [(22, 122), (122, 22)]) def test_exclude_place_by_id(self, apiobj, frontend, excluded, rid): apiobj.add_placex(place_id=22, class_='amenity', type='bank', centroid=(5.6001, 4.2994), country_code='us') apiobj.add_placex(place_id=122, class_='amenity', type='bank', centroid=(5.6001, 4.2994), country_code='us') results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')], details=SearchDetails(excluded=[excluded])) assert [r.place_id for r in results] == [rid] @pytest.mark.parametrize('layer,rids', [(napi.DataLayer.POI, [22]), (napi.DataLayer.MANMADE, [])]) def test_with_layer(self, apiobj, frontend, layer, rids): apiobj.add_placex(place_id=22, class_='amenity', type='bank', centroid=(5.6001, 4.2994), country_code='us') results = run_search(apiobj, frontend, 0.1, [('amenity', 'bank')], details=SearchDetails(layers=layer)) assert [r.place_id for r in results] == rids
6,716
Python
.py
123
41.504065
93
0.566585
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,673
test_search_places.py
osm-search_Nominatim/test/python/api/search/test_search_places.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for running the generic place searcher. """ import json import pytest import nominatim_api as napi from nominatim_api.types import SearchDetails from nominatim_api.search.db_searches import PlaceSearch from nominatim_api.search.db_search_fields import WeightedStrings, WeightedCategories,\ FieldLookup, FieldRanking, RankedTokens from nominatim_api.search.db_search_lookups import LookupAll, LookupAny, Restrict APIOPTIONS = ['search'] def run_search(apiobj, frontend, global_penalty, lookup, ranking, count=2, hnrs=[], pcs=[], ccodes=[], quals=[], details=SearchDetails()): class MySearchData: penalty = global_penalty postcodes = WeightedStrings(pcs, [0.0] * len(pcs)) countries = WeightedStrings(ccodes, [0.0] * len(ccodes)) housenumbers = WeightedStrings(hnrs, [0.0] * len(hnrs)) qualifiers = WeightedCategories(quals, [0.0] * len(quals)) lookups = lookup rankings = ranking search = PlaceSearch(0.0, MySearchData(), count) if frontend is None: api = apiobj else: api = frontend(apiobj, options=APIOPTIONS) async def run(): async with api._async_api.begin() as conn: return await search.lookup(conn, details) results = api._loop.run_until_complete(run()) results.sort(key=lambda r: r.accuracy) return results class TestNameOnlySearches: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_placex(place_id=100, country_code='us', centroid=(5.6, 4.3)) apiobj.add_search_name(100, names=[1,2,10,11], country_code='us', centroid=(5.6, 4.3)) apiobj.add_placex(place_id=101, country_code='mx', centroid=(-10.3, 56.9)) apiobj.add_search_name(101, names=[1,2,20,21], country_code='mx', centroid=(-10.3, 56.9)) @pytest.mark.parametrize('lookup_type', [LookupAll, Restrict]) @pytest.mark.parametrize('rank,res', [([10], [100, 101]), ([20], [101, 100])]) def test_lookup_all_match(self, apiobj, frontend, lookup_type, rank, res): lookup = FieldLookup('name_vector', [1,2], lookup_type) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, rank)]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking]) assert [r.place_id for r in results] == res @pytest.mark.parametrize('lookup_type', [LookupAll, Restrict]) def test_lookup_all_partial_match(self, apiobj, frontend, lookup_type): lookup = FieldLookup('name_vector', [1,20], lookup_type) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking]) assert len(results) == 1 assert results[0].place_id == 101 @pytest.mark.parametrize('rank,res', [([10], [100, 101]), ([20], [101, 100])]) def test_lookup_any_match(self, apiobj, frontend, rank, res): lookup = FieldLookup('name_vector', [11,21], LookupAny) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, rank)]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking]) assert [r.place_id for r in results] == res def test_lookup_any_partial_match(self, apiobj, frontend): lookup = FieldLookup('name_vector', [20], LookupAll) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking]) assert len(results) == 1 assert results[0].place_id == 101 @pytest.mark.parametrize('cc,res', [('us', 100), ('mx', 101)]) def test_lookup_restrict_country(self, apiobj, frontend, cc, res): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], ccodes=[cc]) assert [r.place_id for r in results] == [res] def test_lookup_restrict_placeid(self, apiobj, frontend): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], details=SearchDetails(excluded=[101])) assert [r.place_id for r in results] == [100] @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON, napi.GeometryFormat.KML, napi.GeometryFormat.SVG, napi.GeometryFormat.TEXT]) def test_return_geometries(self, apiobj, frontend, geom): lookup = FieldLookup('name_vector', [20], LookupAll) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], details=SearchDetails(geometry_output=geom)) assert geom.name.lower() in results[0].geometry @pytest.mark.parametrize('factor,npoints', [(0.0, 3), (1.0, 2)]) def test_return_simplified_geometry(self, apiobj, frontend, factor, npoints): apiobj.add_placex(place_id=333, country_code='us', centroid=(9.0, 9.0), geometry='LINESTRING(8.9 9.0, 9.0 9.0, 9.1 9.0)') apiobj.add_search_name(333, names=[55], country_code='us', centroid=(5.6, 4.3)) lookup = FieldLookup('name_vector', [55], LookupAll) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], details=SearchDetails(geometry_output=napi.GeometryFormat.GEOJSON, geometry_simplification=factor)) assert len(results) == 1 result = results[0] geom = json.loads(result.geometry['geojson']) assert result.place_id == 333 assert len(geom['coordinates']) == npoints @pytest.mark.parametrize('viewbox', ['5.0,4.0,6.0,5.0', '5.7,4.0,6.0,5.0']) @pytest.mark.parametrize('wcount,rids', [(2, [100, 101]), (20000, [100])]) def test_prefer_viewbox(self, apiobj, frontend, viewbox, wcount, rids): lookup = FieldLookup('name_vector', [1, 2], LookupAll) ranking = FieldRanking('name_vector', 0.2, [RankedTokens(0.0, [21])]) api = frontend(apiobj, options=APIOPTIONS) results = run_search(api, None, 0.1, [lookup], [ranking]) assert [r.place_id for r in results] == [101, 100] results = run_search(api, None, 0.1, [lookup], [ranking], count=wcount, details=SearchDetails.from_kwargs({'viewbox': viewbox})) assert [r.place_id for r in results] == rids @pytest.mark.parametrize('viewbox', ['5.0,4.0,6.0,5.0', '5.55,4.27,5.62,4.31']) def test_force_viewbox(self, apiobj, frontend, viewbox): lookup = FieldLookup('name_vector', [1, 2], LookupAll) details=SearchDetails.from_kwargs({'viewbox': viewbox, 'bounded_viewbox': True}) results = run_search(apiobj, frontend, 0.1, [lookup], [], details=details) assert [r.place_id for r in results] == [100] def test_prefer_near(self, apiobj, frontend): lookup = FieldLookup('name_vector', [1, 2], LookupAll) ranking = FieldRanking('name_vector', 0.4, [RankedTokens(0.0, [21])]) api = frontend(apiobj, options=APIOPTIONS) results = run_search(api, None, 0.1, [lookup], [ranking]) assert [r.place_id for r in results] == [101, 100] results = run_search(api, None, 0.1, [lookup], [ranking], details=SearchDetails.from_kwargs({'near': '5.6,4.3'})) results.sort(key=lambda r: -r.importance) assert [r.place_id for r in results] == [100, 101] @pytest.mark.parametrize('radius', [0.09, 0.11]) def test_force_near(self, apiobj, frontend, radius): lookup = FieldLookup('name_vector', [1, 2], LookupAll) details=SearchDetails.from_kwargs({'near': '5.6,4.3', 'near_radius': radius}) results = run_search(apiobj, frontend, 0.1, [lookup], [], details=details) assert [r.place_id for r in results] == [100] class TestStreetWithHousenumber: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_placex(place_id=1, class_='place', type='house', parent_place_id=1000, housenumber='20 a', country_code='es') apiobj.add_placex(place_id=2, class_='place', type='house', parent_place_id=1000, housenumber='21;22', country_code='es') apiobj.add_placex(place_id=1000, class_='highway', type='residential', rank_search=26, rank_address=26, country_code='es') apiobj.add_search_name(1000, names=[1,2,10,11], search_rank=26, address_rank=26, country_code='es') apiobj.add_placex(place_id=91, class_='place', type='house', parent_place_id=2000, housenumber='20', country_code='pt') apiobj.add_placex(place_id=92, class_='place', type='house', parent_place_id=2000, housenumber='22', country_code='pt') apiobj.add_placex(place_id=93, class_='place', type='house', parent_place_id=2000, housenumber='24', country_code='pt') apiobj.add_placex(place_id=2000, class_='highway', type='residential', rank_search=26, rank_address=26, country_code='pt') apiobj.add_search_name(2000, names=[1,2,20,21], search_rank=26, address_rank=26, country_code='pt') @pytest.mark.parametrize('hnr,res', [('20', [91, 1]), ('20 a', [1]), ('21', [2]), ('22', [2, 92]), ('24', [93]), ('25', [])]) def test_lookup_by_single_housenumber(self, apiobj, frontend, hnr, res): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=[hnr]) assert [r.place_id for r in results] == res + [1000, 2000] @pytest.mark.parametrize('cc,res', [('es', [2, 1000]), ('pt', [92, 2000])]) def test_lookup_with_country_restriction(self, apiobj, frontend, cc, res): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'], ccodes=[cc]) assert [r.place_id for r in results] == res def test_lookup_exclude_housenumber_placeid(self, apiobj, frontend): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'], details=SearchDetails(excluded=[92])) assert [r.place_id for r in results] == [2, 1000, 2000] def test_lookup_exclude_street_placeid(self, apiobj, frontend): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'], details=SearchDetails(excluded=[1000])) assert [r.place_id for r in results] == [2, 92, 2000] def test_lookup_only_house_qualifier(self, apiobj, frontend): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'], quals=[('place', 'house')]) assert [r.place_id for r in results] == [2, 92] def test_lookup_only_street_qualifier(self, apiobj, frontend): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'], quals=[('highway', 'residential')]) assert [r.place_id for r in results] == [1000, 2000] @pytest.mark.parametrize('rank,found', [(26, True), (27, False), (30, False)]) def test_lookup_min_rank(self, apiobj, frontend, rank, found): lookup = FieldLookup('name_vector', [1,2], LookupAll) ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, [lookup], [ranking], hnrs=['22'], details=SearchDetails(min_rank=rank)) assert [r.place_id for r in results] == ([2, 92, 1000, 2000] if found else [2, 92]) @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON, napi.GeometryFormat.KML, napi.GeometryFormat.SVG, napi.GeometryFormat.TEXT]) def test_return_geometries(self, apiobj, frontend, geom): lookup = FieldLookup('name_vector', [1, 2], LookupAll) results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['20', '21', '22'], details=SearchDetails(geometry_output=geom)) assert results assert all(geom.name.lower() in r.geometry for r in results) def test_very_large_housenumber(apiobj, frontend): apiobj.add_placex(place_id=93, class_='place', type='house', parent_place_id=2000, housenumber='2467463524544', country_code='pt') apiobj.add_placex(place_id=2000, class_='highway', type='residential', rank_search=26, rank_address=26, country_code='pt') apiobj.add_search_name(2000, names=[1,2], search_rank=26, address_rank=26, country_code='pt') lookup = FieldLookup('name_vector', [1, 2], LookupAll) results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['2467463524544'], details=SearchDetails()) assert results assert [r.place_id for r in results] == [93, 2000] @pytest.mark.parametrize('wcount,rids', [(2, [990, 991]), (30000, [990])]) def test_name_and_postcode(apiobj, frontend, wcount, rids): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, postcode='11225', centroid=(10.0, 10.0), geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_search_name(990, names=[111], centroid=(10.0, 10.0), search_rank=27, address_rank=27) apiobj.add_placex(place_id=991, class_='highway', type='service', rank_search=27, rank_address=27, postcode='11221', centroid=(10.3, 10.3), geometry='LINESTRING(9.995 10.3, 10.005 10.3)') apiobj.add_search_name(991, names=[111], centroid=(10.3, 10.3), search_rank=27, address_rank=27) apiobj.add_postcode(place_id=100, country_code='ch', postcode='11225', geometry='POINT(10 10)') lookup = FieldLookup('name_vector', [111], LookupAll) results = run_search(apiobj, frontend, 0.1, [lookup], [], pcs=['11225'], count=wcount, details=SearchDetails()) assert results assert [r.place_id for r in results] == rids class TestInterpolations: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, centroid=(10.0, 10.0), geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_search_name(990, names=[111], search_rank=27, address_rank=27) apiobj.add_placex(place_id=991, class_='place', type='house', parent_place_id=990, rank_search=30, rank_address=30, housenumber='23', centroid=(10.0, 10.00002)) apiobj.add_osmline(place_id=992, parent_place_id=990, startnumber=21, endnumber=29, step=2, centroid=(10.0, 10.00001), geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') @pytest.mark.parametrize('hnr,res', [('21', [992]), ('22', []), ('23', [991])]) def test_lookup_housenumber(self, apiobj, frontend, hnr, res): lookup = FieldLookup('name_vector', [111], LookupAll) results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=[hnr]) assert [r.place_id for r in results] == res + [990] @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON, napi.GeometryFormat.KML, napi.GeometryFormat.SVG, napi.GeometryFormat.TEXT]) def test_osmline_with_geometries(self, apiobj, frontend, geom): lookup = FieldLookup('name_vector', [111], LookupAll) results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['21'], details=SearchDetails(geometry_output=geom)) assert results[0].place_id == 992 assert geom.name.lower() in results[0].geometry class TestTiger: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_placex(place_id=990, class_='highway', type='service', rank_search=27, rank_address=27, country_code='us', centroid=(10.0, 10.0), geometry='LINESTRING(9.995 10, 10.005 10)') apiobj.add_search_name(990, names=[111], country_code='us', search_rank=27, address_rank=27) apiobj.add_placex(place_id=991, class_='place', type='house', parent_place_id=990, rank_search=30, rank_address=30, housenumber='23', country_code='us', centroid=(10.0, 10.00002)) apiobj.add_tiger(place_id=992, parent_place_id=990, startnumber=21, endnumber=29, step=2, centroid=(10.0, 10.00001), geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') @pytest.mark.parametrize('hnr,res', [('21', [992]), ('22', []), ('23', [991])]) def test_lookup_housenumber(self, apiobj, frontend, hnr, res): lookup = FieldLookup('name_vector', [111], LookupAll) results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=[hnr]) assert [r.place_id for r in results] == res + [990] @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON, napi.GeometryFormat.KML, napi.GeometryFormat.SVG, napi.GeometryFormat.TEXT]) def test_tiger_with_geometries(self, apiobj, frontend, geom): lookup = FieldLookup('name_vector', [111], LookupAll) results = run_search(apiobj, frontend, 0.1, [lookup], [], hnrs=['21'], details=SearchDetails(geometry_output=geom)) assert results[0].place_id == 992 assert geom.name.lower() in results[0].geometry class TestLayersRank30: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_placex(place_id=223, class_='place', type='house', housenumber='1', rank_address=30, rank_search=30) apiobj.add_search_name(223, names=[34], importance=0.0009, address_rank=30, search_rank=30) apiobj.add_placex(place_id=224, class_='amenity', type='toilet', rank_address=30, rank_search=30) apiobj.add_search_name(224, names=[34], importance=0.0008, address_rank=30, search_rank=30) apiobj.add_placex(place_id=225, class_='man_made', type='tower', rank_address=0, rank_search=30) apiobj.add_search_name(225, names=[34], importance=0.0007, address_rank=0, search_rank=30) apiobj.add_placex(place_id=226, class_='railway', type='station', rank_address=0, rank_search=30) apiobj.add_search_name(226, names=[34], importance=0.0006, address_rank=0, search_rank=30) apiobj.add_placex(place_id=227, class_='natural', type='cave', rank_address=0, rank_search=30) apiobj.add_search_name(227, names=[34], importance=0.0005, address_rank=0, search_rank=30) @pytest.mark.parametrize('layer,res', [(napi.DataLayer.ADDRESS, [223]), (napi.DataLayer.POI, [224]), (napi.DataLayer.ADDRESS | napi.DataLayer.POI, [223, 224]), (napi.DataLayer.MANMADE, [225]), (napi.DataLayer.RAILWAY, [226]), (napi.DataLayer.NATURAL, [227]), (napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, [225, 227]), (napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, [225, 226])]) def test_layers_rank30(self, apiobj, frontend, layer, res): lookup = FieldLookup('name_vector', [34], LookupAny) results = run_search(apiobj, frontend, 0.1, [lookup], [], details=SearchDetails(layers=layer)) assert [r.place_id for r in results] == res
23,598
Python
.py
394
44.484772
106
0.553691
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,674
test_icu_query_analyzer.py
osm-search_Nominatim/test/python/api/search/test_icu_query_analyzer.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for query analyzer for ICU tokenizer. """ import pytest import pytest_asyncio from nominatim_api import NominatimAPIAsync from nominatim_api.search.query import Phrase, PhraseType, TokenType, BreakType import nominatim_api.search.icu_tokenizer as tok from nominatim_api.logging import set_log_output, get_and_disable async def add_word(conn, word_id, word_token, wtype, word, info = None): t = conn.t.meta.tables['word'] await conn.execute(t.insert(), {'word_id': word_id, 'word_token': word_token, 'type': wtype, 'word': word, 'info': info}) def make_phrase(query): return [Phrase(PhraseType.NONE, s) for s in query.split(',')] @pytest_asyncio.fixture async def conn(table_factory): """ Create an asynchronous SQLAlchemy engine for the test DB. """ table_factory('nominatim_properties', definition='property TEXT, value TEXT', content=(('tokenizer_import_normalisation', ':: lower();'), ('tokenizer_import_transliteration', "'1' > '/1/'; '채' > '채 '"))) table_factory('word', definition='word_id INT, word_token TEXT, type TEXT, word TEXT, info JSONB') async with NominatimAPIAsync() as api: async with api.begin() as conn: yield conn @pytest.mark.asyncio async def test_empty_phrase(conn): ana = await tok.create_query_analyzer(conn) query = await ana.analyze_query([]) assert len(query.source) == 0 assert query.num_token_slots() == 0 @pytest.mark.asyncio async def test_single_phrase_with_unknown_terms(conn): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, 'foo', 'w', 'FOO') query = await ana.analyze_query(make_phrase('foo BAR')) assert len(query.source) == 1 assert query.source[0].ptype == PhraseType.NONE assert query.source[0].text == 'foo bar' assert query.num_token_slots() == 2 assert len(query.nodes[0].starting) == 1 assert not query.nodes[1].starting @pytest.mark.asyncio async def test_multiple_phrases(conn): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, 'one', 'w', 'one') await add_word(conn, 2, 'two', 'w', 'two') await add_word(conn, 100, 'one two', 'W', 'one two') await add_word(conn, 3, 'three', 'w', 'three') query = await ana.analyze_query(make_phrase('one two,three')) assert len(query.source) == 2 @pytest.mark.asyncio async def test_splitting_in_transliteration(conn): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, 'm채', 'W', 'ma') await add_word(conn, 2, 'fo', 'W', 'fo') query = await ana.analyze_query(make_phrase('m채fo')) assert query.num_token_slots() == 2 assert query.nodes[0].starting assert query.nodes[1].starting assert query.nodes[1].btype == BreakType.TOKEN @pytest.mark.asyncio @pytest.mark.parametrize('term,order', [('23456', ['POSTCODE', 'HOUSENUMBER', 'WORD', 'PARTIAL']), ('3', ['HOUSENUMBER', 'POSTCODE', 'WORD', 'PARTIAL']) ]) async def test_penalty_postcodes_and_housenumbers(conn, term, order): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, term, 'P', None) await add_word(conn, 2, term, 'H', term) await add_word(conn, 3, term, 'w', term) await add_word(conn, 4, term, 'W', term) query = await ana.analyze_query(make_phrase(term)) assert query.num_token_slots() == 1 torder = [(tl.tokens[0].penalty, tl.ttype.name) for tl in query.nodes[0].starting] torder.sort() assert [t[1] for t in torder] == order @pytest.mark.asyncio async def test_category_words_only_at_beginning(conn): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': 'in'}) await add_word(conn, 2, 'bar', 'w', 'BAR') query = await ana.analyze_query(make_phrase('foo BAR foo')) assert query.num_token_slots() == 3 assert len(query.nodes[0].starting) == 1 assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM assert not query.nodes[2].starting @pytest.mark.asyncio async def test_freestanding_qualifier_words_become_category(conn): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, 'foo', 'S', 'FOO', {'op': '-'}) query = await ana.analyze_query(make_phrase('foo')) assert query.num_token_slots() == 1 assert len(query.nodes[0].starting) == 1 assert query.nodes[0].starting[0].ttype == TokenType.NEAR_ITEM @pytest.mark.asyncio async def test_qualifier_words(conn): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, 'foo', 'S', None, {'op': '-'}) await add_word(conn, 2, 'bar', 'w', None) query = await ana.analyze_query(make_phrase('foo BAR foo BAR foo')) assert query.num_token_slots() == 5 assert set(t.ttype for t in query.nodes[0].starting) == {TokenType.QUALIFIER} assert set(t.ttype for t in query.nodes[2].starting) == {TokenType.QUALIFIER} assert set(t.ttype for t in query.nodes[4].starting) == {TokenType.QUALIFIER} @pytest.mark.asyncio async def test_add_unknown_housenumbers(conn): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, '23', 'H', '23') query = await ana.analyze_query(make_phrase('466 23 99834 34a')) assert query.num_token_slots() == 4 assert query.nodes[0].starting[0].ttype == TokenType.HOUSENUMBER assert len(query.nodes[0].starting[0].tokens) == 1 assert query.nodes[0].starting[0].tokens[0].token == 0 assert query.nodes[1].starting[0].ttype == TokenType.HOUSENUMBER assert len(query.nodes[1].starting[0].tokens) == 1 assert query.nodes[1].starting[0].tokens[0].token == 1 assert not query.nodes[2].starting assert not query.nodes[3].starting @pytest.mark.asyncio @pytest.mark.parametrize('logtype', ['text', 'html']) async def test_log_output(conn, logtype): ana = await tok.create_query_analyzer(conn) await add_word(conn, 1, 'foo', 'w', 'FOO') set_log_output(logtype) await ana.analyze_query(make_phrase('foo')) assert get_and_disable()
6,526
Python
.py
138
41.028986
98
0.658236
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,675
test_query.py
osm-search_Nominatim/test/python/api/search/test_query.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test data types for search queries. """ import pytest import nominatim_api.search.query as nq def test_token_range_equal(): assert nq.TokenRange(2, 3) == nq.TokenRange(2, 3) assert not (nq.TokenRange(2, 3) != nq.TokenRange(2, 3)) @pytest.mark.parametrize('lop,rop', [((1, 2), (3, 4)), ((3, 4), (3, 5)), ((10, 12), (11, 12))]) def test_token_range_unequal(lop, rop): assert not (nq.TokenRange(*lop) == nq.TokenRange(*rop)) assert nq.TokenRange(*lop) != nq.TokenRange(*rop) def test_token_range_lt(): assert nq.TokenRange(1, 3) < nq.TokenRange(10, 12) assert nq.TokenRange(5, 6) < nq.TokenRange(7, 8) assert nq.TokenRange(1, 4) < nq.TokenRange(4, 5) assert not(nq.TokenRange(5, 6) < nq.TokenRange(5, 6)) assert not(nq.TokenRange(10, 11) < nq.TokenRange(4, 5)) def test_token_rankge_gt(): assert nq.TokenRange(3, 4) > nq.TokenRange(1, 2) assert nq.TokenRange(100, 200) > nq.TokenRange(10, 11) assert nq.TokenRange(10, 11) > nq.TokenRange(4, 10) assert not(nq.TokenRange(5, 6) > nq.TokenRange(5, 6)) assert not(nq.TokenRange(1, 2) > nq.TokenRange(3, 4)) assert not(nq.TokenRange(4, 10) > nq.TokenRange(3, 5)) def test_token_range_unimplemented_ops(): with pytest.raises(TypeError): nq.TokenRange(1, 3) <= nq.TokenRange(10, 12) with pytest.raises(TypeError): nq.TokenRange(1, 3) >= nq.TokenRange(10, 12)
1,680
Python
.py
38
38.842105
59
0.645833
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,676
test_search_postcode.py
osm-search_Nominatim/test/python/api/search/test_search_postcode.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for running the postcode searcher. """ import pytest import nominatim_api as napi from nominatim_api.types import SearchDetails from nominatim_api.search.db_searches import PostcodeSearch from nominatim_api.search.db_search_fields import WeightedStrings, FieldLookup, \ FieldRanking, RankedTokens def run_search(apiobj, frontend, global_penalty, pcs, pc_penalties=None, ccodes=[], lookup=[], ranking=[], details=SearchDetails()): if pc_penalties is None: pc_penalties = [0.0] * len(pcs) class MySearchData: penalty = global_penalty postcodes = WeightedStrings(pcs, pc_penalties) countries = WeightedStrings(ccodes, [0.0] * len(ccodes)) lookups = lookup rankings = ranking search = PostcodeSearch(0.0, MySearchData()) api = frontend(apiobj, options=['search']) async def run(): async with api._async_api.begin() as conn: return await search.lookup(conn, details) return api._loop.run_until_complete(run()) def test_postcode_only_search(apiobj, frontend): apiobj.add_postcode(place_id=100, country_code='ch', postcode='12345') apiobj.add_postcode(place_id=101, country_code='pl', postcode='12 345') results = run_search(apiobj, frontend, 0.3, ['12345', '12 345'], [0.0, 0.1]) assert len(results) == 2 assert [r.place_id for r in results] == [100, 101] def test_postcode_with_country(apiobj, frontend): apiobj.add_postcode(place_id=100, country_code='ch', postcode='12345') apiobj.add_postcode(place_id=101, country_code='pl', postcode='12 345') results = run_search(apiobj, frontend, 0.3, ['12345', '12 345'], [0.0, 0.1], ccodes=['de', 'pl']) assert len(results) == 1 assert results[0].place_id == 101 def test_postcode_area(apiobj, frontend): apiobj.add_postcode(place_id=100, country_code='ch', postcode='12345') apiobj.add_placex(place_id=200, country_code='ch', postcode='12345', osm_type='R', osm_id=34, class_='boundary', type='postal_code', geometry='POLYGON((0 0, 1 0, 1 1, 0 1, 0 0))') results = run_search(apiobj, frontend, 0.3, ['12345'], [0.0]) assert len(results) == 1 assert results[0].place_id == 200 assert results[0].bbox.area == 1 class TestPostcodeSearchWithAddress: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_postcode(place_id=100, country_code='ch', parent_place_id=1000, postcode='12345', geometry='POINT(17 5)') apiobj.add_postcode(place_id=101, country_code='pl', parent_place_id=2000, postcode='12345', geometry='POINT(-45 7)') apiobj.add_placex(place_id=1000, class_='place', type='village', rank_search=22, rank_address=22, country_code='ch') apiobj.add_search_name(1000, names=[1,2,10,11], search_rank=22, address_rank=22, country_code='ch') apiobj.add_placex(place_id=2000, class_='place', type='village', rank_search=22, rank_address=22, country_code='pl') apiobj.add_search_name(2000, names=[1,2,20,21], search_rank=22, address_rank=22, country_code='pl') def test_lookup_both(self, apiobj, frontend): lookup = FieldLookup('name_vector', [1,2], 'restrict') ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, ['12345'], lookup=[lookup], ranking=[ranking]) assert [r.place_id for r in results] == [100, 101] def test_restrict_by_name(self, apiobj, frontend): lookup = FieldLookup('name_vector', [10], 'restrict') results = run_search(apiobj, frontend, 0.1, ['12345'], lookup=[lookup]) assert [r.place_id for r in results] == [100] @pytest.mark.parametrize('coord,place_id', [((16.5, 5), 100), ((-45.1, 7.004), 101)]) def test_lookup_near(self, apiobj, frontend, coord, place_id): lookup = FieldLookup('name_vector', [1,2], 'restrict') ranking = FieldRanking('name_vector', 0.3, [RankedTokens(0.0, [10])]) results = run_search(apiobj, frontend, 0.1, ['12345'], lookup=[lookup], ranking=[ranking], details=SearchDetails(near=napi.Point(*coord), near_radius=0.6)) assert [r.place_id for r in results] == [place_id] @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON, napi.GeometryFormat.KML, napi.GeometryFormat.SVG, napi.GeometryFormat.TEXT]) def test_return_geometries(self, apiobj, frontend, geom): results = run_search(apiobj, frontend, 0.1, ['12345'], details=SearchDetails(geometry_output=geom)) assert results assert all(geom.name.lower() in r.geometry for r in results) @pytest.mark.parametrize('viewbox, rids', [('-46,6,-44,8', [101,100]), ('16,4,18,6', [100,101])]) def test_prefer_viewbox(self, apiobj, frontend, viewbox, rids): results = run_search(apiobj, frontend, 0.1, ['12345'], details=SearchDetails.from_kwargs({'viewbox': viewbox})) assert [r.place_id for r in results] == rids @pytest.mark.parametrize('viewbox, rid', [('-46,6,-44,8', 101), ('16,4,18,6', 100)]) def test_restrict_to_viewbox(self, apiobj, frontend, viewbox, rid): results = run_search(apiobj, frontend, 0.1, ['12345'], details=SearchDetails.from_kwargs({'viewbox': viewbox, 'bounded_viewbox': True})) assert [r.place_id for r in results] == [rid] @pytest.mark.parametrize('coord,rids', [((17.05, 5), [100, 101]), ((-45, 7.1), [101, 100])]) def test_prefer_near(self, apiobj, frontend, coord, rids): results = run_search(apiobj, frontend, 0.1, ['12345'], details=SearchDetails(near=napi.Point(*coord))) assert [r.place_id for r in results] == rids @pytest.mark.parametrize('pid,rid', [(100, 101), (101, 100)]) def test_exclude(self, apiobj, frontend, pid, rid): results = run_search(apiobj, frontend, 0.1, ['12345'], details=SearchDetails(excluded=[pid])) assert [r.place_id for r in results] == [rid]
7,222
Python
.py
126
43.738095
98
0.572158
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,677
test_search_country.py
osm-search_Nominatim/test/python/api/search/test_search_country.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for running the country searcher. """ import pytest import nominatim_api as napi from nominatim_api.types import SearchDetails from nominatim_api.search.db_searches import CountrySearch from nominatim_api.search.db_search_fields import WeightedStrings def run_search(apiobj, frontend, global_penalty, ccodes, country_penalties=None, details=SearchDetails()): if country_penalties is None: country_penalties = [0.0] * len(ccodes) class MySearchData: penalty = global_penalty countries = WeightedStrings(ccodes, country_penalties) search = CountrySearch(MySearchData()) api = frontend(apiobj, options=['search']) async def run(): async with api._async_api.begin() as conn: return await search.lookup(conn, details) return api._loop.run_until_complete(run()) def test_find_from_placex(apiobj, frontend): apiobj.add_placex(place_id=55, class_='boundary', type='administrative', rank_search=4, rank_address=4, name={'name': 'Lolaland'}, country_code='yw', centroid=(10, 10), geometry='POLYGON((9.5 9.5, 9.5 10.5, 10.5 10.5, 10.5 9.5, 9.5 9.5))') results = run_search(apiobj, frontend, 0.5, ['de', 'yw'], [0.0, 0.3]) assert len(results) == 1 assert results[0].place_id == 55 assert results[0].accuracy == 0.8 def test_find_from_fallback_countries(apiobj, frontend): apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country_name('ro', {'name': 'Rom창nia'}) results = run_search(apiobj, frontend, 0.0, ['ro']) assert len(results) == 1 assert results[0].names == {'name': 'Rom창nia'} def test_find_none(apiobj, frontend): assert len(run_search(apiobj, frontend, 0.0, ['xx'])) == 0 @pytest.mark.parametrize('coord,numres', [((0.5, 1), 1), ((10, 10), 0)]) def test_find_near(apiobj, frontend, coord, numres): apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country_name('ro', {'name': 'Rom창nia'}) results = run_search(apiobj, frontend, 0.0, ['ro'], details=SearchDetails(near=napi.Point(*coord), near_radius=0.1)) assert len(results) == numres class TestCountryParameters: @pytest.fixture(autouse=True) def fill_database(self, apiobj): apiobj.add_placex(place_id=55, class_='boundary', type='administrative', rank_search=4, rank_address=4, name={'name': 'Lolaland'}, country_code='yw', centroid=(10, 10), geometry='POLYGON((9.5 9.5, 9.5 10.5, 10.5 10.5, 10.5 9.5, 9.5 9.5))') apiobj.add_country('ro', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country_name('ro', {'name': 'Rom창nia'}) @pytest.mark.parametrize('geom', [napi.GeometryFormat.GEOJSON, napi.GeometryFormat.KML, napi.GeometryFormat.SVG, napi.GeometryFormat.TEXT]) @pytest.mark.parametrize('cc', ['yw', 'ro']) def test_return_geometries(self, apiobj, frontend, geom, cc): results = run_search(apiobj, frontend, 0.5, [cc], details=SearchDetails(geometry_output=geom)) assert len(results) == 1 assert geom.name.lower() in results[0].geometry @pytest.mark.parametrize('pid,rids', [(76, [55]), (55, [])]) def test_exclude_place_id(self, apiobj, frontend, pid, rids): results = run_search(apiobj, frontend, 0.5, ['yw', 'ro'], details=SearchDetails(excluded=[pid])) assert [r.place_id for r in results] == rids @pytest.mark.parametrize('viewbox,rids', [((9, 9, 11, 11), [55]), ((-10, -10, -3, -3), [])]) def test_bounded_viewbox_in_placex(self, apiobj, frontend, viewbox, rids): results = run_search(apiobj, frontend, 0.5, ['yw'], details=SearchDetails.from_kwargs({'viewbox': viewbox, 'bounded_viewbox': True})) assert [r.place_id for r in results] == rids @pytest.mark.parametrize('viewbox,numres', [((0, 0, 1, 1), 1), ((-10, -10, -3, -3), 0)]) def test_bounded_viewbox_in_fallback(self, apiobj, frontend, viewbox, numres): results = run_search(apiobj, frontend, 0.5, ['ro'], details=SearchDetails.from_kwargs({'viewbox': viewbox, 'bounded_viewbox': True})) assert len(results) == numres
5,092
Python
.py
94
41.638298
96
0.569066
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,678
test_db_search_builder.py
osm-search_Nominatim/test/python/api/search/test_db_search_builder.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2023 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for creating abstract searches from token assignments. """ import pytest from nominatim_api.search.query import Token, TokenRange, BreakType, PhraseType, TokenType, QueryStruct, Phrase from nominatim_api.search.db_search_builder import SearchBuilder from nominatim_api.search.token_assignment import TokenAssignment from nominatim_api.types import SearchDetails import nominatim_api.search.db_searches as dbs class MyToken(Token): def get_category(self): return 'this', 'that' def make_query(*args): q = QueryStruct([Phrase(PhraseType.NONE, '')]) for _ in range(max(inner[0] for tlist in args for inner in tlist)): q.add_node(BreakType.WORD, PhraseType.NONE) q.add_node(BreakType.END, PhraseType.NONE) for start, tlist in enumerate(args): for end, ttype, tinfo in tlist: for tid, word in tinfo: q.add_token(TokenRange(start, end), ttype, MyToken(penalty=0.5 if ttype == TokenType.PARTIAL else 0.0, token=tid, count=1, addr_count=1, lookup_word=word)) return q def test_country_search(): q = make_query([(1, TokenType.COUNTRY, [(2, 'de'), (3, 'en')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(country=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.CountrySearch) assert set(search.countries.values) == {'de', 'en'} def test_country_search_with_country_restriction(): q = make_query([(1, TokenType.COUNTRY, [(2, 'de'), (3, 'en')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'countries': 'en,fr'})) searches = list(builder.build(TokenAssignment(country=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.CountrySearch) assert set(search.countries.values) == {'en'} def test_country_search_with_conflicting_country_restriction(): q = make_query([(1, TokenType.COUNTRY, [(2, 'de'), (3, 'en')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'countries': 'fr'})) searches = list(builder.build(TokenAssignment(country=TokenRange(0, 1)))) assert len(searches) == 0 def test_postcode_search_simple(): q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PostcodeSearch) assert search.postcodes.values == ['2367'] assert not search.countries.values assert not search.lookups assert not search.rankings def test_postcode_with_country(): q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])], [(2, TokenType.COUNTRY, [(1, 'xx')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1), country=TokenRange(1, 2)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PostcodeSearch) assert search.postcodes.values == ['2367'] assert search.countries.values == ['xx'] assert not search.lookups assert not search.rankings def test_postcode_with_address(): q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])], [(2, TokenType.PARTIAL, [(100, 'word')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1), address=[TokenRange(1, 2)]))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PostcodeSearch) assert search.postcodes.values == ['2367'] assert not search.countries assert search.lookups assert not search.rankings def test_postcode_with_address_with_full_word(): q = make_query([(1, TokenType.POSTCODE, [(34, '2367')])], [(2, TokenType.PARTIAL, [(100, 'word')]), (2, TokenType.WORD, [(1, 'full')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(postcode=TokenRange(0, 1), address=[TokenRange(1, 2)]))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PostcodeSearch) assert search.postcodes.values == ['2367'] assert not search.countries assert search.lookups assert len(search.rankings) == 1 @pytest.mark.parametrize('kwargs', [{'viewbox': '0,0,1,1', 'bounded_viewbox': True}, {'near': '10,10'}]) def test_near_item_only(kwargs): q = make_query([(1, TokenType.NEAR_ITEM, [(2, 'foo')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs(kwargs)) searches = list(builder.build(TokenAssignment(near_item=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PoiSearch) assert search.qualifiers.values == [('this', 'that')] @pytest.mark.parametrize('kwargs', [{'viewbox': '0,0,1,1'}, {}]) def test_near_item_skipped(kwargs): q = make_query([(1, TokenType.NEAR_ITEM, [(2, 'foo')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs(kwargs)) searches = list(builder.build(TokenAssignment(near_item=TokenRange(0, 1)))) assert len(searches) == 0 def test_name_only_search(): q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]), (1, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert not search.postcodes.values assert not search.countries.values assert not search.housenumbers.values assert not search.qualifiers.values assert len(search.lookups) == 1 assert len(search.rankings) == 1 def test_name_with_qualifier(): q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]), (1, TokenType.WORD, [(100, 'a')])], [(2, TokenType.QUALIFIER, [(55, 'hotel')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1), qualifier=TokenRange(1, 2)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert not search.postcodes.values assert not search.countries.values assert not search.housenumbers.values assert search.qualifiers.values == [('this', 'that')] assert len(search.lookups) == 1 assert len(search.rankings) == 1 def test_name_with_housenumber_search(): q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]), (1, TokenType.WORD, [(100, 'a')])], [(2, TokenType.HOUSENUMBER, [(66, '66')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1), housenumber=TokenRange(1, 2)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert not search.postcodes.values assert not search.countries.values assert search.housenumbers.values == ['66'] assert len(search.lookups) == 1 assert len(search.rankings) == 1 def test_name_and_address(): q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]), (1, TokenType.WORD, [(100, 'a')])], [(2, TokenType.PARTIAL, [(2, 'b')]), (2, TokenType.WORD, [(101, 'b')])], [(3, TokenType.PARTIAL, [(3, 'c')]), (3, TokenType.WORD, [(102, 'c')])] ) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1), address=[TokenRange(1, 2), TokenRange(2, 3)]))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert not search.postcodes.values assert not search.countries.values assert not search.housenumbers.values assert len(search.lookups) == 2 assert len(search.rankings) == 3 def test_name_and_complex_address(): q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]), (1, TokenType.WORD, [(100, 'a')])], [(2, TokenType.PARTIAL, [(2, 'b')]), (3, TokenType.WORD, [(101, 'bc')])], [(3, TokenType.PARTIAL, [(3, 'c')])], [(4, TokenType.PARTIAL, [(4, 'd')]), (4, TokenType.WORD, [(103, 'd')])] ) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1), address=[TokenRange(1, 2), TokenRange(2, 4)]))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert not search.postcodes.values assert not search.countries.values assert not search.housenumbers.values assert len(search.lookups) == 2 assert len(search.rankings) == 2 def test_name_only_near_search(): q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])], [(2, TokenType.PARTIAL, [(1, 'a')]), (2, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails()) searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2), near_item=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.NearSearch) assert isinstance(search.search, dbs.PlaceSearch) def test_name_only_search_with_category(): q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]), (1, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]})) searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert search.qualifiers.values == [('foo', 'bar')] def test_name_with_near_item_search_with_category_mismatch(): q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])], [(2, TokenType.PARTIAL, [(1, 'a')]), (2, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]})) searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2), near_item=TokenRange(0, 1)))) assert len(searches) == 0 def test_name_with_near_item_search_with_category_match(): q = make_query([(1, TokenType.NEAR_ITEM, [(88, 'g')])], [(2, TokenType.PARTIAL, [(1, 'a')]), (2, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar'), ('this', 'that')]})) searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2), near_item=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.NearSearch) assert isinstance(search.search, dbs.PlaceSearch) def test_name_with_qualifier_search_with_category_mismatch(): q = make_query([(1, TokenType.QUALIFIER, [(88, 'g')])], [(2, TokenType.PARTIAL, [(1, 'a')]), (2, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar')]})) searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2), qualifier=TokenRange(0, 1)))) assert len(searches) == 0 def test_name_with_qualifier_search_with_category_match(): q = make_query([(1, TokenType.QUALIFIER, [(88, 'g')])], [(2, TokenType.PARTIAL, [(1, 'a')]), (2, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'categories': [('foo', 'bar'), ('this', 'that')]})) searches = list(builder.build(TokenAssignment(name=TokenRange(1, 2), qualifier=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert search.qualifiers.values == [('this', 'that')] def test_name_only_search_with_countries(): q = make_query([(1, TokenType.PARTIAL, [(1, 'a')]), (1, TokenType.WORD, [(100, 'a')])]) builder = SearchBuilder(q, SearchDetails.from_kwargs({'countries': 'de,en'})) searches = list(builder.build(TokenAssignment(name=TokenRange(0, 1)))) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert not search.postcodes.values assert set(search.countries.values) == {'de', 'en'} assert not search.housenumbers.values def make_counted_searches(name_part, name_full, address_part, address_full, num_address_parts=1): q = QueryStruct([Phrase(PhraseType.NONE, '')]) for i in range(1 + num_address_parts): q.add_node(BreakType.WORD, PhraseType.NONE) q.add_node(BreakType.END, PhraseType.NONE) q.add_token(TokenRange(0, 1), TokenType.PARTIAL, MyToken(0.5, 1, name_part, 1, 'name_part')) q.add_token(TokenRange(0, 1), TokenType.WORD, MyToken(0, 101, name_full, 1, 'name_full')) for i in range(num_address_parts): q.add_token(TokenRange(i + 1, i + 2), TokenType.PARTIAL, MyToken(0.5, 2, address_part, 1, 'address_part')) q.add_token(TokenRange(i + 1, i + 2), TokenType.WORD, MyToken(0, 102, address_full, 1, 'address_full')) builder = SearchBuilder(q, SearchDetails()) return list(builder.build(TokenAssignment(name=TokenRange(0, 1), address=[TokenRange(1, 1 + num_address_parts)]))) def test_infrequent_partials_in_name(): searches = make_counted_searches(1, 1, 1, 1) assert len(searches) == 1 search = searches[0] assert isinstance(search, dbs.PlaceSearch) assert len(search.lookups) == 2 assert len(search.rankings) == 2 assert set((l.column, l.lookup_type.__name__) for l in search.lookups) == \ {('name_vector', 'LookupAll'), ('nameaddress_vector', 'Restrict')} def test_frequent_partials_in_name_and_address(): searches = make_counted_searches(9999, 1, 9999, 1) assert len(searches) == 2 assert all(isinstance(s, dbs.PlaceSearch) for s in searches) searches.sort(key=lambda s: s.penalty) assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \ {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')} assert set((l.column, l.lookup_type.__name__) for l in searches[1].lookups) == \ {('nameaddress_vector', 'LookupAll'), ('name_vector', 'LookupAll')} def test_too_frequent_partials_in_name_and_address(): searches = make_counted_searches(20000, 1, 10000, 1) assert len(searches) == 1 assert all(isinstance(s, dbs.PlaceSearch) for s in searches) searches.sort(key=lambda s: s.penalty) assert set((l.column, l.lookup_type.__name__) for l in searches[0].lookups) == \ {('name_vector', 'LookupAny'), ('nameaddress_vector', 'Restrict')}
16,504
Python
.py
320
41.484375
111
0.604722
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,679
test_api_search_query.py
osm-search_Nominatim/test/python/api/search/test_api_search_query.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for tokenized query data structures. """ import pytest from nominatim_api.search import query class MyToken(query.Token): def get_category(self): return 'this', 'that' def mktoken(tid: int): return MyToken(penalty=3.0, token=tid, count=1, addr_count=1, lookup_word='foo') @pytest.mark.parametrize('ptype,ttype', [('NONE', 'WORD'), ('AMENITY', 'QUALIFIER'), ('STREET', 'PARTIAL'), ('CITY', 'WORD'), ('COUNTRY', 'COUNTRY'), ('POSTCODE', 'POSTCODE')]) def test_phrase_compatible(ptype, ttype): assert query.PhraseType[ptype].compatible_with(query.TokenType[ttype], False) @pytest.mark.parametrize('ptype', ['COUNTRY', 'POSTCODE']) def test_phrase_incompatible(ptype): assert not query.PhraseType[ptype].compatible_with(query.TokenType.PARTIAL, True) def test_query_node_empty(): qn = query.QueryNode(query.BreakType.PHRASE, query.PhraseType.NONE) assert not qn.has_tokens(3, query.TokenType.PARTIAL) assert qn.get_tokens(3, query.TokenType.WORD) is None def test_query_node_with_content(): qn = query.QueryNode(query.BreakType.PHRASE, query.PhraseType.NONE) qn.starting.append(query.TokenList(2, query.TokenType.PARTIAL, [mktoken(100), mktoken(101)])) qn.starting.append(query.TokenList(2, query.TokenType.WORD, [mktoken(1000)])) assert not qn.has_tokens(3, query.TokenType.PARTIAL) assert not qn.has_tokens(2, query.TokenType.COUNTRY) assert qn.has_tokens(2, query.TokenType.PARTIAL) assert qn.has_tokens(2, query.TokenType.WORD) assert qn.get_tokens(3, query.TokenType.PARTIAL) is None assert qn.get_tokens(2, query.TokenType.COUNTRY) is None assert len(qn.get_tokens(2, query.TokenType.PARTIAL)) == 2 assert len(qn.get_tokens(2, query.TokenType.WORD)) == 1 def test_query_struct_empty(): q = query.QueryStruct([]) assert q.num_token_slots() == 0 def test_query_struct_with_tokens(): q = query.QueryStruct([query.Phrase(query.PhraseType.NONE, 'foo bar')]) q.add_node(query.BreakType.WORD, query.PhraseType.NONE) q.add_node(query.BreakType.END, query.PhraseType.NONE) assert q.num_token_slots() == 2 q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1)) q.add_token(query.TokenRange(1, 2), query.TokenType.PARTIAL, mktoken(2)) q.add_token(query.TokenRange(1, 2), query.TokenType.WORD, mktoken(99)) q.add_token(query.TokenRange(1, 2), query.TokenType.WORD, mktoken(98)) assert q.get_tokens(query.TokenRange(0, 2), query.TokenType.WORD) == [] assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.WORD)) == 2 partials = q.get_partials_list(query.TokenRange(0, 2)) assert len(partials) == 2 assert [t.token for t in partials] == [1, 2] assert q.find_lookup_word_by_id(4) == 'None' assert q.find_lookup_word_by_id(99) == '[W]foo' def test_query_struct_incompatible_token(): q = query.QueryStruct([query.Phrase(query.PhraseType.COUNTRY, 'foo bar')]) q.add_node(query.BreakType.WORD, query.PhraseType.COUNTRY) q.add_node(query.BreakType.END, query.PhraseType.NONE) q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1)) q.add_token(query.TokenRange(1, 2), query.TokenType.COUNTRY, mktoken(100)) assert q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL) == [] assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.COUNTRY)) == 1 def test_query_struct_amenity_single_word(): q = query.QueryStruct([query.Phrase(query.PhraseType.AMENITY, 'bar')]) q.add_node(query.BreakType.END, query.PhraseType.NONE) q.add_token(query.TokenRange(0, 1), query.TokenType.PARTIAL, mktoken(1)) q.add_token(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM, mktoken(2)) q.add_token(query.TokenRange(0, 1), query.TokenType.QUALIFIER, mktoken(3)) assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1 assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 1 assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 0 def test_query_struct_amenity_two_words(): q = query.QueryStruct([query.Phrase(query.PhraseType.AMENITY, 'foo bar')]) q.add_node(query.BreakType.WORD, query.PhraseType.AMENITY) q.add_node(query.BreakType.END, query.PhraseType.NONE) for trange in [(0, 1), (1, 2)]: q.add_token(query.TokenRange(*trange), query.TokenType.PARTIAL, mktoken(1)) q.add_token(query.TokenRange(*trange), query.TokenType.NEAR_ITEM, mktoken(2)) q.add_token(query.TokenRange(*trange), query.TokenType.QUALIFIER, mktoken(3)) assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.PARTIAL)) == 1 assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.NEAR_ITEM)) == 0 assert len(q.get_tokens(query.TokenRange(0, 1), query.TokenType.QUALIFIER)) == 1 assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.PARTIAL)) == 1 assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.NEAR_ITEM)) == 0 assert len(q.get_tokens(query.TokenRange(1, 2), query.TokenType.QUALIFIER)) == 1
5,559
Python
.py
94
52.425532
97
0.691003
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,680
conftest.py
osm-search_Nominatim/test/python/cli/conftest.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. import pytest import nominatim_db.cli class MockParamCapture: """ Mock that records the parameters with which a function was called as well as the number of calls. """ def __init__(self, retval=0): self.called = 0 self.return_value = retval self.last_args = None self.last_kwargs = None def __call__(self, *args, **kwargs): self.called += 1 self.last_args = args self.last_kwargs = kwargs return self.return_value class AsyncMockParamCapture: """ Mock that records the parameters with which a function was called as well as the number of calls. """ def __init__(self, retval=0): self.called = 0 self.return_value = retval self.last_args = None self.last_kwargs = None async def __call__(self, *args, **kwargs): self.called += 1 self.last_args = args self.last_kwargs = kwargs return self.return_value class DummyTokenizer: def __init__(self, *args, **kwargs): self.update_sql_functions_called = False self.finalize_import_called = False self.update_statistics_called = False self.update_word_tokens_called = False def update_sql_functions(self, *args, **kwargs): self.update_sql_functions_called = True def finalize_import(self, *args, **kwargs): self.finalize_import_called = True def update_statistics(self, *args, **kwargs): self.update_statistics_called = True def update_word_tokens(self, *args, **kwargs): self.update_word_tokens_called = True @pytest.fixture def cli_call(): """ Call the nominatim main function with the correct paths set. Returns a function that can be called with the desired CLI arguments. """ def _call_nominatim(*args): return nominatim_db.cli.nominatim(osm2pgsql_path='OSM2PGSQL NOT AVAILABLE', cli_args=args) return _call_nominatim @pytest.fixture def mock_func_factory(monkeypatch): def get_mock(module, func): mock = MockParamCapture() mock.func_name = func monkeypatch.setattr(module, func, mock) return mock return get_mock @pytest.fixture def async_mock_func_factory(monkeypatch): def get_mock(module, func): mock = AsyncMockParamCapture() mock.func_name = func monkeypatch.setattr(module, func, mock) return mock return get_mock @pytest.fixture def cli_tokenizer_mock(monkeypatch): tok = DummyTokenizer() monkeypatch.setattr(nominatim_db.tokenizer.factory, 'get_tokenizer_for_db', lambda *args: tok) monkeypatch.setattr(nominatim_db.tokenizer.factory, 'create_tokenizer', lambda *args: tok) return tok
3,052
Python
.py
83
29.722892
83
0.656706
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,681
test_cli.py
osm-search_Nominatim/test/python/cli/test_cli.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for command line interface wrapper. These tests just check that the various command line parameters route to the correct functionality. They use a lot of monkeypatching to avoid executing the actual functions. """ import importlib import pytest import nominatim_db.indexer.indexer import nominatim_db.tools.add_osm_data import nominatim_db.tools.freeze import nominatim_db.tools.tiger_data def test_cli_help(cli_call, capsys): """ Running nominatim tool without arguments prints help. """ assert cli_call() == 1 captured = capsys.readouterr() assert captured.out.startswith('usage:') def test_cli_version(cli_call, capsys): """ Running nominatim tool --version prints a version string. """ assert cli_call('--version') == 0 captured = capsys.readouterr() assert captured.out.startswith('Nominatim version') class TestCliWithDb: @pytest.fixture(autouse=True) def setup_cli_call(self, cli_call, temp_db, cli_tokenizer_mock, table_factory): self.call_nominatim = cli_call self.tokenizer_mock = cli_tokenizer_mock # Make sure tools.freeze.is_frozen doesn't report database as frozen. Monkeypatching failed table_factory('place') @pytest.mark.parametrize("name,oid", [('file', 'foo.osm'), ('diff', 'foo.osc')]) def test_cli_add_data_file_command(self, cli_call, mock_func_factory, name, oid): mock_run_legacy = mock_func_factory(nominatim_db.tools.add_osm_data, 'add_data_from_file') assert cli_call('add-data', '--' + name, str(oid)) == 0 assert mock_run_legacy.called == 1 @pytest.mark.parametrize("name,oid", [('node', 12), ('way', 8), ('relation', 32)]) def test_cli_add_data_object_command(self, cli_call, mock_func_factory, name, oid): mock_run_legacy = mock_func_factory(nominatim_db.tools.add_osm_data, 'add_osm_object') assert cli_call('add-data', '--' + name, str(oid)) == 0 assert mock_run_legacy.called == 1 def test_cli_add_data_tiger_data(self, cli_call, cli_tokenizer_mock, async_mock_func_factory): mock = async_mock_func_factory(nominatim_db.tools.tiger_data, 'add_tiger_data') assert cli_call('add-data', '--tiger-data', 'somewhere') == 0 assert mock.called == 1 def test_freeze_command(self, mock_func_factory): mock_drop = mock_func_factory(nominatim_db.tools.freeze, 'drop_update_tables') mock_flatnode = mock_func_factory(nominatim_db.tools.freeze, 'drop_flatnode_file') assert self.call_nominatim('freeze') == 0 assert mock_drop.called == 1 assert mock_flatnode.called == 1 @pytest.mark.parametrize("params,do_bnds,do_ranks", [ ([], 2, 2), (['--boundaries-only'], 2, 0), (['--no-boundaries'], 0, 2), (['--boundaries-only', '--no-boundaries'], 0, 0)]) def test_index_command(self, monkeypatch, async_mock_func_factory, table_factory, params, do_bnds, do_ranks): table_factory('import_status', 'indexed bool') bnd_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_boundaries') rank_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_by_rank') postcode_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_postcodes') monkeypatch.setattr(nominatim_db.indexer.indexer.Indexer, 'has_pending', [False, True].pop) assert self.call_nominatim('index', *params) == 0 assert bnd_mock.called == do_bnds assert rank_mock.called == do_ranks assert postcode_mock.called == do_ranks def test_special_phrases_wiki_command(self, mock_func_factory): func = mock_func_factory(nominatim_db.clicmd.special_phrases.SPImporter, 'import_phrases') self.call_nominatim('special-phrases', '--import-from-wiki', '--no-replace') assert func.called == 1 def test_special_phrases_csv_command(self, src_dir, mock_func_factory): func = mock_func_factory(nominatim_db.clicmd.special_phrases.SPImporter, 'import_phrases') testdata = src_dir / 'test' / 'testdb' csv_path = str((testdata / 'full_en_phrases_test.csv').resolve()) self.call_nominatim('special-phrases', '--import-from-csv', csv_path) assert func.called == 1 def test_special_phrases_csv_bad_file(self, src_dir): testdata = src_dir / 'something349053905.csv' self.call_nominatim('special-phrases', '--import-from-csv', str(testdata.resolve())) == 1
4,920
Python
.py
88
47.727273
104
0.662563
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,682
test_cmd_api.py
osm-search_Nominatim/test/python/cli/test_cmd_api.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for API access commands of command-line interface wrapper. """ import json import pytest import nominatim_db.clicmd.api import nominatim_api as napi @pytest.mark.parametrize('call', ['search', 'reverse', 'lookup', 'details', 'status']) def test_list_format(cli_call, call): assert 0 == cli_call(call, '--list-formats') @pytest.mark.parametrize('call', ['search', 'reverse', 'lookup', 'details', 'status']) def test_bad_format(cli_call, call): assert 1 == cli_call(call, '--format', 'rsdfsdfsdfsaefsdfsd') class TestCliStatusCall: @pytest.fixture(autouse=True) def setup_status_mock(self, monkeypatch): monkeypatch.setattr(napi.NominatimAPI, 'status', lambda self: napi.StatusResult(200, 'OK')) def test_status_simple(self, cli_call, tmp_path): result = cli_call('status', '--project-dir', str(tmp_path)) assert result == 0 def test_status_json_format(self, cli_call, tmp_path, capsys): result = cli_call('status', '--project-dir', str(tmp_path), '--format', 'json') assert result == 0 json.loads(capsys.readouterr().out) class TestCliDetailsCall: @pytest.fixture(autouse=True) def setup_status_mock(self, monkeypatch): result = napi.DetailedResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, -3.0)) monkeypatch.setattr(napi.NominatimAPI, 'details', lambda *args, **kwargs: result) @pytest.mark.parametrize("params", [('--node', '1'), ('--way', '1'), ('--relation', '1'), ('--place_id', '10001')]) def test_details_json_format(self, cli_call, tmp_path, capsys, params): result = cli_call('details', '--project-dir', str(tmp_path), *params) assert result == 0 json.loads(capsys.readouterr().out) class TestCliReverseCall: @pytest.fixture(autouse=True) def setup_reverse_mock(self, monkeypatch): result = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, -3.0), names={'name':'Name', 'name:fr': 'Nom'}, extratags={'extra':'Extra'}, locale_name='Name', display_name='Name') monkeypatch.setattr(napi.NominatimAPI, 'reverse', lambda *args, **kwargs: result) def test_reverse_simple(self, cli_call, tmp_path, capsys): result = cli_call('reverse', '--project-dir', str(tmp_path), '--lat', '34', '--lon', '34') assert result == 0 out = json.loads(capsys.readouterr().out) assert out['name'] == 'Name' assert 'address' not in out assert 'extratags' not in out assert 'namedetails' not in out @pytest.mark.parametrize('param,field', [('--addressdetails', 'address'), ('--extratags', 'extratags'), ('--namedetails', 'namedetails')]) def test_reverse_extra_stuff(self, cli_call, tmp_path, capsys, param, field): result = cli_call('reverse', '--project-dir', str(tmp_path), '--lat', '34', '--lon', '34', param) assert result == 0 out = json.loads(capsys.readouterr().out) assert field in out def test_reverse_format(self, cli_call, tmp_path, capsys): result = cli_call('reverse', '--project-dir', str(tmp_path), '--lat', '34', '--lon', '34', '--format', 'geojson') assert result == 0 out = json.loads(capsys.readouterr().out) assert out['type'] == 'FeatureCollection' class TestCliLookupCall: @pytest.fixture(autouse=True) def setup_lookup_mock(self, monkeypatch): result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, -3.0), names={'name':'Name', 'name:fr': 'Nom'}, extratags={'extra':'Extra'}, locale_name='Name', display_name='Name') monkeypatch.setattr(napi.NominatimAPI, 'lookup', lambda *args, **kwargs: napi.SearchResults([result])) def test_lookup_simple(self, cli_call, tmp_path, capsys): result = cli_call('lookup', '--project-dir', str(tmp_path), '--id', 'N34') assert result == 0 out = json.loads(capsys.readouterr().out) assert len(out) == 1 assert out[0]['name'] == 'Name' assert 'address' not in out[0] assert 'extratags' not in out[0] assert 'namedetails' not in out[0] @pytest.mark.parametrize('endpoint, params', [('search', ('--query', 'Berlin')), ('search_address', ('--city', 'Berlin')) ]) def test_search(cli_call, tmp_path, capsys, monkeypatch, endpoint, params): result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), napi.Point(1.0, -3.0), names={'name':'Name', 'name:fr': 'Nom'}, extratags={'extra':'Extra'}, locale_name='Name', display_name='Name') monkeypatch.setattr(napi.NominatimAPI, endpoint, lambda *args, **kwargs: napi.SearchResults([result])) result = cli_call('search', '--project-dir', str(tmp_path), *params) assert result == 0 out = json.loads(capsys.readouterr().out) assert len(out) == 1 assert out[0]['name'] == 'Name' assert 'address' not in out[0] assert 'extratags' not in out[0] assert 'namedetails' not in out[0]
6,357
Python
.py
123
37.747967
86
0.538175
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,683
test_cmd_admin.py
osm-search_Nominatim/test/python/cli/test_cmd_admin.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Test for the command line interface wrapper admin subcommand. These tests just check that the various command line parameters route to the correct functionality. They use a lot of monkeypatching to avoid executing the actual functions. """ import pytest import nominatim_db.tools.admin import nominatim_db.tools.check_database import nominatim_db.tools.migration import nominatim_db.clicmd.admin def test_admin_command_check_database(cli_call, mock_func_factory): mock = mock_func_factory(nominatim_db.tools.check_database, 'check_database') assert cli_call('admin', '--check-database') == 0 assert mock.called == 1 def test_admin_migrate(cli_call, mock_func_factory): mock = mock_func_factory(nominatim_db.tools.migration, 'migrate') assert cli_call('admin', '--migrate') == 0 assert mock.called == 1 def test_admin_clean_deleted_relations(cli_call, mock_func_factory): mock = mock_func_factory(nominatim_db.tools.admin, 'clean_deleted_relations') assert cli_call('admin', '--clean-deleted', '1 month') == 0 assert mock.called == 1 def test_admin_clean_deleted_relations_no_age(cli_call, mock_func_factory): mock = mock_func_factory(nominatim_db.tools.admin, 'clean_deleted_relations') assert cli_call('admin', '--clean-deleted') == 1 class TestCliAdminWithDb: @pytest.fixture(autouse=True) def setup_cli_call(self, cli_call, temp_db, cli_tokenizer_mock): self.call_nominatim = cli_call self.tokenizer_mock = cli_tokenizer_mock @pytest.mark.parametrize("func, params", [('analyse_indexing', ('--analyse-indexing', ))]) def test_analyse_indexing(self, mock_func_factory, func, params): mock = mock_func_factory(nominatim_db.tools.admin, func) assert self.call_nominatim('admin', *params) == 0 assert mock.called == 1
2,050
Python
.py
42
45
94
0.736181
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,684
test_cmd_replication.py
osm-search_Nominatim/test/python/cli/test_cmd_replication.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2023 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for replication command of command-line interface wrapper. """ import datetime as dt import time import pytest import nominatim_db.cli import nominatim_db.indexer.indexer import nominatim_db.tools.replication import nominatim_db.tools.refresh from nominatim_db.db import status @pytest.fixture def tokenizer_mock(monkeypatch): class DummyTokenizer: def __init__(self, *args, **kwargs): self.update_sql_functions_called = False self.finalize_import_called = False def update_sql_functions(self, *args): self.update_sql_functions_called = True def finalize_import(self, *args): self.finalize_import_called = True tok = DummyTokenizer() monkeypatch.setattr(nominatim_db.tokenizer.factory, 'get_tokenizer_for_db', lambda *args: tok) monkeypatch.setattr(nominatim_db.tokenizer.factory, 'create_tokenizer', lambda *args: tok) return tok @pytest.fixture def init_status(temp_db_conn, status_table): status.set_status(temp_db_conn, date=dt.datetime.now(dt.timezone.utc), seq=1) @pytest.fixture def index_mock(async_mock_func_factory, tokenizer_mock, init_status): return async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full') @pytest.fixture def update_mock(mock_func_factory, init_status, tokenizer_mock): return mock_func_factory(nominatim_db.tools.replication, 'update') class TestCliReplication: @pytest.fixture(autouse=True) def setup_cli_call(self, cli_call, temp_db): self.call_nominatim = lambda *args: cli_call('replication', *args) @pytest.fixture(autouse=True) def setup_update_function(self, monkeypatch): def _mock_updates(states): monkeypatch.setattr(nominatim_db.tools.replication, 'update', lambda *args, **kwargs: states.pop()) self.update_states = _mock_updates @pytest.mark.parametrize("params,func", [ (('--init',), 'init_replication'), (('--init', '--no-update-functions'), 'init_replication'), (('--check-for-updates',), 'check_for_updates') ]) def test_replication_command(self, mock_func_factory, params, func): func_mock = mock_func_factory(nominatim_db.tools.replication, func) if params == ('--init',): umock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions') assert self.call_nominatim(*params) == 0 assert func_mock.called == 1 if params == ('--init',): assert umock.called == 1 def test_replication_update_bad_interval(self, monkeypatch): monkeypatch.setenv('NOMINATIM_REPLICATION_UPDATE_INTERVAL', 'xx') assert self.call_nominatim() == 1 def test_replication_update_bad_interval_for_geofabrik(self, monkeypatch): monkeypatch.setenv('NOMINATIM_REPLICATION_URL', 'https://download.geofabrik.de/europe/italy-updates') assert self.call_nominatim() == 1 def test_replication_update_continuous_no_index(self): assert self.call_nominatim('--no-index') == 1 def test_replication_update_once_no_index(self, update_mock): assert self.call_nominatim('--once', '--no-index') == 0 assert str(update_mock.last_args[1]['osm2pgsql']).endswith('OSM2PGSQL NOT AVAILABLE') def test_replication_update_custom_osm2pgsql(self, monkeypatch, update_mock): monkeypatch.setenv('NOMINATIM_OSM2PGSQL_BINARY', '/secret/osm2pgsql') assert self.call_nominatim('--once', '--no-index') == 0 assert str(update_mock.last_args[1]['osm2pgsql']) == '/secret/osm2pgsql' @pytest.mark.parametrize("update_interval", [60, 3600]) def test_replication_catchup(self, placex_table, monkeypatch, index_mock, update_interval): monkeypatch.setenv('NOMINATIM_REPLICATION_UPDATE_INTERVAL', str(update_interval)) self.update_states([nominatim_db.tools.replication.UpdateState.NO_CHANGES]) assert self.call_nominatim('--catch-up') == 0 def test_replication_update_custom_threads(self, update_mock): assert self.call_nominatim('--once', '--no-index', '--threads', '4') == 0 assert update_mock.last_args[1]['threads'] == 4 def test_replication_update_continuous(self, index_mock): self.update_states([nominatim_db.tools.replication.UpdateState.UP_TO_DATE, nominatim_db.tools.replication.UpdateState.UP_TO_DATE]) with pytest.raises(IndexError): self.call_nominatim() assert index_mock.called == 2 def test_replication_update_continuous_no_change(self, mock_func_factory, index_mock): self.update_states([nominatim_db.tools.replication.UpdateState.NO_CHANGES, nominatim_db.tools.replication.UpdateState.UP_TO_DATE]) sleep_mock = mock_func_factory(time, 'sleep') with pytest.raises(IndexError): self.call_nominatim() assert index_mock.called == 1 assert sleep_mock.called == 1 assert sleep_mock.last_args[0] == 60
5,502
Python
.py
105
43.171429
95
0.66305
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,685
test_cmd_refresh.py
osm-search_Nominatim/test/python/cli/test_cmd_refresh.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for command line interface wrapper for refresk command. """ import pytest import nominatim_db.tools.refresh import nominatim_db.tools.postcodes import nominatim_db.indexer.indexer class TestRefresh: @pytest.fixture(autouse=True) def setup_cli_call(self, cli_call, temp_db, cli_tokenizer_mock): self.call_nominatim = cli_call self.tokenizer_mock = cli_tokenizer_mock @pytest.mark.parametrize("command,func", [ ('address-levels', 'load_address_levels_from_config'), ('wiki-data', 'import_wikipedia_articles'), ('importance', 'recompute_importance'), ]) def test_refresh_command(self, mock_func_factory, command, func): mock_func_factory(nominatim_db.tools.refresh, 'create_functions') func_mock = mock_func_factory(nominatim_db.tools.refresh, func) assert self.call_nominatim('refresh', '--' + command) == 0 assert func_mock.called == 1 def test_refresh_word_count(self): assert self.call_nominatim('refresh', '--word-count') == 0 assert self.tokenizer_mock.update_statistics_called def test_refresh_word_tokens(self): assert self.call_nominatim('refresh', '--word-tokens') == 0 assert self.tokenizer_mock.update_word_tokens_called def test_refresh_postcodes(self, async_mock_func_factory, mock_func_factory, place_table): func_mock = mock_func_factory(nominatim_db.tools.postcodes, 'update_postcodes') idx_mock = async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_postcodes') assert self.call_nominatim('refresh', '--postcodes') == 0 assert func_mock.called == 1 assert idx_mock.called == 1 def test_refresh_postcodes_no_place_table(self): # Do nothing without the place table assert self.call_nominatim('refresh', '--postcodes') == 0 def test_refresh_create_functions(self, mock_func_factory): func_mock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions') assert self.call_nominatim('refresh', '--functions') == 0 assert func_mock.called == 1 assert self.tokenizer_mock.update_sql_functions_called def test_refresh_wikidata_file_not_found(self, monkeypatch): monkeypatch.setenv('NOMINATIM_WIKIPEDIA_DATA_PATH', 'gjoiergjeroi345Q') assert self.call_nominatim('refresh', '--wiki-data') == 1 def test_refresh_secondary_importance_file_not_found(self): assert self.call_nominatim('refresh', '--secondary-importance') == 1 def test_refresh_secondary_importance_new_table(self, mock_func_factory): mocks = [mock_func_factory(nominatim_db.tools.refresh, 'import_secondary_importance'), mock_func_factory(nominatim_db.tools.refresh, 'create_functions')] assert self.call_nominatim('refresh', '--secondary-importance') == 0 assert mocks[0].called == 1 assert mocks[1].called == 1 def test_refresh_importance_computed_after_wiki_import(self, monkeypatch, mock_func_factory): calls = [] monkeypatch.setattr(nominatim_db.tools.refresh, 'import_wikipedia_articles', lambda *args, **kwargs: calls.append('import') or 0) monkeypatch.setattr(nominatim_db.tools.refresh, 'recompute_importance', lambda *args, **kwargs: calls.append('update')) func_mock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions') assert self.call_nominatim('refresh', '--importance', '--wiki-data') == 0 assert calls == ['import', 'update'] assert func_mock.called == 1 @pytest.mark.parametrize('params', [('--data-object', 'w234'), ('--data-object', 'N23', '--data-object', 'N24'), ('--data-area', 'R7723'), ('--data-area', 'r7723', '--data-area', 'r2'), ('--data-area', 'R9284425', '--data-object', 'n1234567894567')]) def test_refresh_objects(self, params, mock_func_factory): func_mock = mock_func_factory(nominatim_db.tools.refresh, 'invalidate_osm_object') assert self.call_nominatim('refresh', *params) == 0 assert func_mock.called == len(params)/2 @pytest.mark.parametrize('func', ('--data-object', '--data-area')) @pytest.mark.parametrize('param', ('234', 'a55', 'R 453', 'Rel')) def test_refresh_objects_bad_param(self, func, param, mock_func_factory): func_mock = mock_func_factory(nominatim_db.tools.refresh, 'invalidate_osm_object') self.call_nominatim('refresh', func, param) == 1 assert func_mock.called == 0
5,038
Python
.py
84
49.630952
104
0.643148
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,686
test_cmd_import.py
osm-search_Nominatim/test/python/cli/test_cmd_import.py
# SPDX-License-Identifier: GPL-2.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for import command of the command-line interface wrapper. """ import pytest import nominatim_db.tools.database_import import nominatim_db.data.country_info import nominatim_db.tools.refresh import nominatim_db.tools.postcodes import nominatim_db.indexer.indexer import nominatim_db.db.properties class TestCliImportWithDb: @pytest.fixture(autouse=True) def setup_cli_call(self, cli_call, temp_db, cli_tokenizer_mock): self.call_nominatim = cli_call self.tokenizer_mock = cli_tokenizer_mock def test_import_missing_file(self): assert self.call_nominatim('import', '--osm-file', 'sfsafegwedgw.reh.erh') == 1 def test_import_bad_file(self): assert self.call_nominatim('import', '--osm-file', '.') == 1 @pytest.mark.parametrize('with_updates', [True, False]) def test_import_full(self, mock_func_factory, async_mock_func_factory, with_updates, place_table, property_table): mocks = [ mock_func_factory(nominatim_db.tools.database_import, 'setup_database_skeleton'), mock_func_factory(nominatim_db.data.country_info, 'setup_country_tables'), mock_func_factory(nominatim_db.tools.database_import, 'import_osm_data'), mock_func_factory(nominatim_db.tools.refresh, 'import_wikipedia_articles'), mock_func_factory(nominatim_db.tools.refresh, 'import_secondary_importance'), mock_func_factory(nominatim_db.tools.database_import, 'truncate_data_tables'), async_mock_func_factory(nominatim_db.tools.database_import, 'load_data'), mock_func_factory(nominatim_db.tools.database_import, 'create_tables'), mock_func_factory(nominatim_db.tools.database_import, 'create_table_triggers'), mock_func_factory(nominatim_db.tools.database_import, 'create_partition_tables'), async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'), mock_func_factory(nominatim_db.data.country_info, 'create_country_names'), mock_func_factory(nominatim_db.tools.refresh, 'load_address_levels_from_config'), mock_func_factory(nominatim_db.tools.postcodes, 'update_postcodes'), async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full'), ] params = ['import', '--osm-file', __file__] if with_updates: mocks.append(mock_func_factory(nominatim_db.tools.freeze, 'drop_update_tables')) params.append('--no-updates') cf_mock = mock_func_factory(nominatim_db.tools.refresh, 'create_functions') assert self.call_nominatim(*params) == 0 assert self.tokenizer_mock.finalize_import_called assert cf_mock.called > 1 for mock in mocks: assert mock.called == 1, "Mock '{}' not called".format(mock.func_name) def test_import_continue_load_data(self, mock_func_factory, async_mock_func_factory): mocks = [ mock_func_factory(nominatim_db.tools.database_import, 'truncate_data_tables'), async_mock_func_factory(nominatim_db.tools.database_import, 'load_data'), async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'), mock_func_factory(nominatim_db.data.country_info, 'create_country_names'), mock_func_factory(nominatim_db.tools.postcodes, 'update_postcodes'), async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full'), mock_func_factory(nominatim_db.db.properties, 'set_property') ] assert self.call_nominatim('import', '--continue', 'load-data') == 0 assert self.tokenizer_mock.finalize_import_called for mock in mocks: assert mock.called == 1, "Mock '{}' not called".format(mock.func_name) def test_import_continue_indexing(self, mock_func_factory, async_mock_func_factory, placex_table, temp_db_conn): mocks = [ async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'), mock_func_factory(nominatim_db.data.country_info, 'create_country_names'), async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_full'), mock_func_factory(nominatim_db.db.properties, 'set_property') ] assert self.call_nominatim('import', '--continue', 'indexing') == 0 for mock in mocks: assert mock.called == 1, "Mock '{}' not called".format(mock.func_name) # Calling it again still works for the index assert self.call_nominatim('import', '--continue', 'indexing') == 0 def test_import_continue_postprocess(self, mock_func_factory, async_mock_func_factory): mocks = [ async_mock_func_factory(nominatim_db.tools.database_import, 'create_search_indices'), mock_func_factory(nominatim_db.data.country_info, 'create_country_names'), mock_func_factory(nominatim_db.db.properties, 'set_property') ] assert self.call_nominatim('import', '--continue', 'db-postprocess') == 0 assert self.tokenizer_mock.finalize_import_called for mock in mocks: assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)
5,563
Python
.py
92
50.967391
97
0.673897
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,687
test_indexing.py
osm-search_Nominatim/test/python/indexer/test_indexing.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for running the indexing. """ import itertools import pytest import pytest_asyncio from nominatim_db.indexer import indexer from nominatim_db.tokenizer import factory class IndexerTestDB: def __init__(self, conn): self.placex_id = itertools.count(100000) self.osmline_id = itertools.count(500000) self.postcode_id = itertools.count(700000) self.conn = conn self.conn.autocimmit = True with self.conn.cursor() as cur: cur.execute("""CREATE TABLE placex (place_id BIGINT, name HSTORE, class TEXT, type TEXT, linked_place_id BIGINT, rank_address SMALLINT, rank_search SMALLINT, indexed_status SMALLINT, indexed_date TIMESTAMP, partition SMALLINT, admin_level SMALLINT, country_code TEXT, address HSTORE, token_info JSONB, geometry_sector INTEGER)""") cur.execute("""CREATE TABLE location_property_osmline ( place_id BIGINT, osm_id BIGINT, address HSTORE, token_info JSONB, indexed_status SMALLINT, indexed_date TIMESTAMP, geometry_sector INTEGER)""") cur.execute("""CREATE TABLE location_postcode ( place_id BIGINT, indexed_status SMALLINT, indexed_date TIMESTAMP, country_code varchar(2), postcode TEXT)""") cur.execute("""CREATE OR REPLACE FUNCTION date_update() RETURNS TRIGGER AS $$ BEGIN IF NEW.indexed_status = 0 and OLD.indexed_status != 0 THEN NEW.indexed_date = now(); END IF; RETURN NEW; END; $$ LANGUAGE plpgsql;""") cur.execute("DROP TYPE IF EXISTS prepare_update_info CASCADE") cur.execute("""CREATE TYPE prepare_update_info AS ( name HSTORE, address HSTORE, rank_address SMALLINT, country_code TEXT, class TEXT, type TEXT, linked_place_id BIGINT )""") cur.execute("""CREATE OR REPLACE FUNCTION placex_indexing_prepare(p placex, OUT result prepare_update_info) AS $$ BEGIN result.address := p.address; result.name := p.name; result.class := p.class; result.type := p.type; result.country_code := p.country_code; result.rank_address := p.rank_address; END; $$ LANGUAGE plpgsql STABLE; """) cur.execute("""CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT) RETURNS HSTORE AS $$ BEGIN RETURN in_address; END; $$ LANGUAGE plpgsql STABLE; """) for table in ('placex', 'location_property_osmline', 'location_postcode'): cur.execute("""CREATE TRIGGER {0}_update BEFORE UPDATE ON {0} FOR EACH ROW EXECUTE PROCEDURE date_update() """.format(table)) def scalar(self, query): with self.conn.cursor() as cur: cur.execute(query) return cur.fetchone()[0] def add_place(self, cls='place', typ='locality', rank_search=30, rank_address=30, sector=20): next_id = next(self.placex_id) with self.conn.cursor() as cur: cur.execute("""INSERT INTO placex (place_id, class, type, rank_search, rank_address, indexed_status, geometry_sector) VALUES (%s, %s, %s, %s, %s, 1, %s)""", (next_id, cls, typ, rank_search, rank_address, sector)) return next_id def add_admin(self, **kwargs): kwargs['cls'] = 'boundary' kwargs['typ'] = 'administrative' return self.add_place(**kwargs) def add_osmline(self, sector=20): next_id = next(self.osmline_id) with self.conn.cursor() as cur: cur.execute("""INSERT INTO location_property_osmline (place_id, osm_id, indexed_status, geometry_sector) VALUES (%s, %s, 1, %s)""", (next_id, next_id, sector)) return next_id def add_postcode(self, country, postcode): next_id = next(self.postcode_id) with self.conn.cursor() as cur: cur.execute("""INSERT INTO location_postcode (place_id, indexed_status, country_code, postcode) VALUES (%s, 1, %s, %s)""", (next_id, country, postcode)) return next_id def placex_unindexed(self): return self.scalar('SELECT count(*) from placex where indexed_status > 0') def osmline_unindexed(self): return self.scalar("""SELECT count(*) from location_property_osmline WHERE indexed_status > 0""") @pytest.fixture def test_db(temp_db_conn): yield IndexerTestDB(temp_db_conn) @pytest.fixture def test_tokenizer(tokenizer_mock, project_env): return factory.create_tokenizer(project_env) @pytest.mark.parametrize("threads", [1, 15]) @pytest.mark.asyncio async def test_index_all_by_rank(test_db, threads, test_tokenizer): for rank in range(31): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() assert test_db.placex_unindexed() == 31 assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) await idx.index_by_rank(0, 30) assert test_db.placex_unindexed() == 0 assert test_db.osmline_unindexed() == 0 assert test_db.scalar("""SELECT count(*) from placex WHERE indexed_status = 0 and indexed_date is null""") == 0 # ranks come in order of rank address assert test_db.scalar(""" SELECT count(*) FROM placex p WHERE rank_address > 0 AND indexed_date >= (SELECT min(indexed_date) FROM placex o WHERE p.rank_address < o.rank_address)""") == 0 # placex address ranked objects come before interpolations assert test_db.scalar( """SELECT count(*) FROM placex WHERE rank_address > 0 AND indexed_date > (SELECT min(indexed_date) FROM location_property_osmline)""") == 0 # rank 0 comes after all other placex objects assert test_db.scalar( """SELECT count(*) FROM placex WHERE rank_address > 0 AND indexed_date > (SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @pytest.mark.asyncio async def test_index_partial_without_30(test_db, threads, test_tokenizer): for rank in range(31): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() assert test_db.placex_unindexed() == 31 assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) await idx.index_by_rank(4, 15) assert test_db.placex_unindexed() == 19 assert test_db.osmline_unindexed() == 1 assert test_db.scalar(""" SELECT count(*) FROM placex WHERE indexed_status = 0 AND not rank_address between 4 and 15""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @pytest.mark.asyncio async def test_index_partial_with_30(test_db, threads, test_tokenizer): for rank in range(31): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() assert test_db.placex_unindexed() == 31 assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) await idx.index_by_rank(28, 30) assert test_db.placex_unindexed() == 27 assert test_db.osmline_unindexed() == 0 assert test_db.scalar(""" SELECT count(*) FROM placex WHERE indexed_status = 0 AND rank_address between 1 and 27""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @pytest.mark.asyncio async def test_index_boundaries(test_db, threads, test_tokenizer): for rank in range(4, 10): test_db.add_admin(rank_address=rank, rank_search=rank) for rank in range(31): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() assert test_db.placex_unindexed() == 37 assert test_db.osmline_unindexed() == 1 idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) await idx.index_boundaries(0, 30) assert test_db.placex_unindexed() == 31 assert test_db.osmline_unindexed() == 1 assert test_db.scalar(""" SELECT count(*) FROM placex WHERE indexed_status = 0 AND class != 'boundary'""") == 0 @pytest.mark.parametrize("threads", [1, 15]) @pytest.mark.asyncio async def test_index_postcodes(test_db, threads, test_tokenizer): for postcode in range(1000): test_db.add_postcode('de', postcode) for postcode in range(32000, 33000): test_db.add_postcode('us', postcode) idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads) await idx.index_postcodes() assert test_db.scalar("""SELECT count(*) FROM location_postcode WHERE indexed_status != 0""") == 0 @pytest.mark.parametrize("analyse", [True, False]) @pytest.mark.asyncio async def test_index_full(test_db, analyse, test_tokenizer): for rank in range(4, 10): test_db.add_admin(rank_address=rank, rank_search=rank) for rank in range(31): test_db.add_place(rank_address=rank, rank_search=rank) test_db.add_osmline() for postcode in range(1000): test_db.add_postcode('de', postcode) idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, 4) await idx.index_full(analyse=analyse) assert test_db.placex_unindexed() == 0 assert test_db.osmline_unindexed() == 0 assert test_db.scalar("""SELECT count(*) FROM location_postcode WHERE indexed_status != 0""") == 0
12,052
Python
.py
243
34.078189
93
0.538089
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,688
test_icu_rule_loader.py
osm-search_Nominatim/test/python/tokenizer/test_icu_rule_loader.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for converting a config file to ICU rules. """ from textwrap import dedent import pytest import yaml from nominatim_db.tokenizer.icu_rule_loader import ICURuleLoader from nominatim_db.errors import UsageError from icu import Transliterator CONFIG_SECTIONS = ('normalization', 'transliteration', 'token-analysis') class TestIcuRuleLoader: @pytest.fixture(autouse=True) def init_env(self, project_env): self.project_env = project_env def write_config(self, content): (self.project_env.project_dir / 'icu_tokenizer.yaml').write_text(dedent(content)) def config_rules(self, *variants): content = dedent("""\ normalization: - ":: NFD ()" - "[[:Nonspacing Mark:] [:Cf:]] >" - ":: lower ()" - "[[:Punctuation:][:Space:]]+ > ' '" - ":: NFC ()" transliteration: - ":: Latin ()" - "[[:Punctuation:][:Space:]]+ > ' '" token-analysis: - analyzer: generic variants: - words: """) content += '\n'.join((" - " + s for s in variants)) + '\n' self.write_config(content) def get_replacements(self, *variants): self.config_rules(*variants) loader = ICURuleLoader(self.project_env) rules = loader.analysis[None].config['replacements'] return sorted((k, sorted(v)) for k,v in rules) def test_empty_rule_set(self): self.write_config("""\ normalization: transliteration: token-analysis: - analyzer: generic variants: """) rules = ICURuleLoader(self.project_env) assert rules.get_search_rules() == '' assert rules.get_normalization_rules() == '' assert rules.get_transliteration_rules() == '' @pytest.mark.parametrize("section", CONFIG_SECTIONS) def test_missing_section(self, section): rule_cfg = { s: [] for s in CONFIG_SECTIONS if s != section} self.write_config(yaml.dump(rule_cfg)) with pytest.raises(UsageError): ICURuleLoader(self.project_env) def test_get_search_rules(self): self.config_rules() loader = ICURuleLoader(self.project_env) rules = loader.get_search_rules() trans = Transliterator.createFromRules("test", rules) assert trans.transliterate(" Baum straße ") == " baum straße " assert trans.transliterate(" Baumstraße ") == " baumstraße " assert trans.transliterate(" Baumstrasse ") == " baumstrasse " assert trans.transliterate(" Baumstr ") == " baumstr " assert trans.transliterate(" Baumwegstr ") == " baumwegstr " assert trans.transliterate(" Αθήνα ") == " athēna " assert trans.transliterate(" проспект ") == " prospekt " def test_get_normalization_rules(self): self.config_rules() loader = ICURuleLoader(self.project_env) rules = loader.get_normalization_rules() trans = Transliterator.createFromRules("test", rules) assert trans.transliterate(" проспект-Prospekt ") == " проспект prospekt " def test_get_transliteration_rules(self): self.config_rules() loader = ICURuleLoader(self.project_env) rules = loader.get_transliteration_rules() trans = Transliterator.createFromRules("test", rules) assert trans.transliterate(" проспект-Prospekt ") == " prospekt Prospekt " def test_transliteration_rules_from_file(self): self.write_config("""\ normalization: transliteration: - "'ax' > 'b'" - !include transliteration.yaml token-analysis: - analyzer: generic variants: """) transpath = self.project_env.project_dir / ('transliteration.yaml') transpath.write_text('- "x > y"') loader = ICURuleLoader(self.project_env) rules = loader.get_transliteration_rules() trans = Transliterator.createFromRules("test", rules) assert trans.transliterate(" axxt ") == " byt " def test_search_rules(self): self.config_rules('~street => s,st', 'master => mstr') proc = ICURuleLoader(self.project_env).make_token_analysis() assert proc.search.transliterate('Master Street').strip() == 'master street' assert proc.search.transliterate('Earnes St').strip() == 'earnes st' assert proc.search.transliterate('Nostreet').strip() == 'nostreet' @pytest.mark.parametrize("variant", ['foo > bar', 'foo -> bar -> bar', '~foo~ -> bar', 'fo~ o -> bar']) def test_invalid_variant_description(self, variant): self.config_rules(variant) with pytest.raises(UsageError): ICURuleLoader(self.project_env) def test_add_full(self): repl = self.get_replacements("foo -> bar") assert repl == [(' foo ', [' bar', ' foo'])] def test_replace_full(self): repl = self.get_replacements("foo => bar") assert repl == [(' foo ', [' bar'])] def test_add_suffix_no_decompose(self): repl = self.get_replacements("~berg |-> bg") assert repl == [(' berg ', [' berg', ' bg']), ('berg ', ['berg', 'bg'])] def test_replace_suffix_no_decompose(self): repl = self.get_replacements("~berg |=> bg") assert repl == [(' berg ', [' bg']),('berg ', ['bg'])] def test_add_suffix_decompose(self): repl = self.get_replacements("~berg -> bg") assert repl == [(' berg ', [' berg', ' bg', 'berg', 'bg']), ('berg ', [' berg', ' bg', 'berg', 'bg'])] def test_replace_suffix_decompose(self): repl = self.get_replacements("~berg => bg") assert repl == [(' berg ', [' bg', 'bg']), ('berg ', [' bg', 'bg'])] def test_add_prefix_no_compose(self): repl = self.get_replacements("hinter~ |-> hnt") assert repl == [(' hinter', [' hinter', ' hnt']), (' hinter ', [' hinter', ' hnt'])] def test_replace_prefix_no_compose(self): repl = self.get_replacements("hinter~ |=> hnt") assert repl == [(' hinter', [' hnt']), (' hinter ', [' hnt'])] def test_add_prefix_compose(self): repl = self.get_replacements("hinter~-> h") assert repl == [(' hinter', [' h', ' h ', ' hinter', ' hinter ']), (' hinter ', [' h', ' h', ' hinter', ' hinter'])] def test_replace_prefix_compose(self): repl = self.get_replacements("hinter~=> h") assert repl == [(' hinter', [' h', ' h ']), (' hinter ', [' h', ' h'])] def test_add_beginning_only(self): repl = self.get_replacements("^Premier -> Pr") assert repl == [('^ premier ', ['^ pr', '^ premier'])] def test_replace_beginning_only(self): repl = self.get_replacements("^Premier => Pr") assert repl == [('^ premier ', ['^ pr'])] def test_add_final_only(self): repl = self.get_replacements("road$ -> rd") assert repl == [(' road ^', [' rd ^', ' road ^'])] def test_replace_final_only(self): repl = self.get_replacements("road$ => rd") assert repl == [(' road ^', [' rd ^'])] def test_decompose_only(self): repl = self.get_replacements("~foo -> foo") assert repl == [(' foo ', [' foo', 'foo']), ('foo ', [' foo', 'foo'])] def test_add_suffix_decompose_end_only(self): repl = self.get_replacements("~berg |-> bg", "~berg$ -> bg") assert repl == [(' berg ', [' berg', ' bg']), (' berg ^', [' berg ^', ' bg ^', 'berg ^', 'bg ^']), ('berg ', ['berg', 'bg']), ('berg ^', [' berg ^', ' bg ^', 'berg ^', 'bg ^'])] def test_replace_suffix_decompose_end_only(self): repl = self.get_replacements("~berg |=> bg", "~berg$ => bg") assert repl == [(' berg ', [' bg']), (' berg ^', [' bg ^', 'bg ^']), ('berg ', ['bg']), ('berg ^', [' bg ^', 'bg ^'])] def test_add_multiple_suffix(self): repl = self.get_replacements("~berg,~burg -> bg") assert repl == [(' berg ', [' berg', ' bg', 'berg', 'bg']), (' burg ', [' bg', ' burg', 'bg', 'burg']), ('berg ', [' berg', ' bg', 'berg', 'bg']), ('burg ', [' bg', ' burg', 'bg', 'burg'])]
8,951
Python
.py
185
37.816216
89
0.545233
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,689
test_factory.py
osm-search_Nominatim/test/python/tokenizer/test_factory.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for creating new tokenizers. """ import pytest from nominatim_db.db import properties from nominatim_db.tokenizer import factory from nominatim_db.errors import UsageError from dummy_tokenizer import DummyTokenizer def test_setup_bad_tokenizer_name(project_env, monkeypatch): monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy') with pytest.raises(UsageError): factory.create_tokenizer(project_env) class TestFactory: @pytest.fixture(autouse=True) def init_env(self, project_env, property_table, tokenizer_mock): self.config = project_env def test_setup_dummy_tokenizer(self, temp_db_conn): tokenizer = factory.create_tokenizer(self.config) assert isinstance(tokenizer, DummyTokenizer) assert tokenizer.init_state == "new" assert (self.config.project_dir / 'tokenizer').is_dir() assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy' def test_setup_tokenizer_dir_exists(self): (self.config.project_dir / 'tokenizer').mkdir() tokenizer = factory.create_tokenizer(self.config) assert isinstance(tokenizer, DummyTokenizer) assert tokenizer.init_state == "new" def test_setup_tokenizer_dir_failure(self): (self.config.project_dir / 'tokenizer').write_text("foo") with pytest.raises(UsageError): factory.create_tokenizer(self.config) def test_load_tokenizer(self): factory.create_tokenizer(self.config) tokenizer = factory.get_tokenizer_for_db(self.config) assert isinstance(tokenizer, DummyTokenizer) assert tokenizer.init_state == "loaded" def test_load_repopulate_tokenizer_dir(self): factory.create_tokenizer(self.config) self.config.project_dir = self.config.project_dir factory.get_tokenizer_for_db(self.config) assert (self.config.project_dir / 'tokenizer').exists() def test_load_missing_property(self, temp_db_cursor): factory.create_tokenizer(self.config) temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties") with pytest.raises(UsageError): factory.get_tokenizer_for_db(self.config)
2,415
Python
.py
52
39.961538
76
0.720223
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,690
test_icu.py
osm-search_Nominatim/test/python/tokenizer/test_icu.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for ICU tokenizer. """ import yaml import itertools import pytest from nominatim_db.tokenizer import icu_tokenizer import nominatim_db.tokenizer.icu_rule_loader from nominatim_db.db import properties from nominatim_db.db.sql_preprocessor import SQLPreprocessor from nominatim_db.data.place_info import PlaceInfo from mock_icu_word_table import MockIcuWordTable @pytest.fixture def word_table(temp_db_conn): return MockIcuWordTable(temp_db_conn) @pytest.fixture def test_config(project_env, tmp_path): sqldir = tmp_path / 'sql' sqldir.mkdir() (sqldir / 'tokenizer').mkdir() (sqldir / 'tokenizer' / 'icu_tokenizer.sql').write_text("SELECT 'a'") project_env.lib_dir.sql = sqldir return project_env @pytest.fixture def tokenizer_factory(dsn, tmp_path, property_table, sql_preprocessor, place_table, word_table): (tmp_path / 'tokenizer').mkdir() def _maker(): return icu_tokenizer.create(dsn, tmp_path / 'tokenizer') return _maker @pytest.fixture def db_prop(temp_db_conn): def _get_db_property(name): return properties.get_property(temp_db_conn, name) return _get_db_property @pytest.fixture def analyzer(tokenizer_factory, test_config, monkeypatch, temp_db_with_extensions, tmp_path): sql = tmp_path / 'sql' / 'tokenizer' / 'icu_tokenizer.sql' sql.write_text("SELECT 'a';") monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();') tok = tokenizer_factory() tok.init_new_db(test_config) monkeypatch.undo() def _mk_analyser(norm=("[[:Punctuation:][:Space:]]+ > ' '",), trans=(':: upper()',), variants=('~gasse -> gasse', 'street => st', ), sanitizers=[], with_housenumber=False, with_postcode=False): cfgstr = {'normalization': list(norm), 'sanitizers': sanitizers, 'transliteration': list(trans), 'token-analysis': [{'analyzer': 'generic', 'variants': [{'words': list(variants)}]}]} if with_housenumber: cfgstr['token-analysis'].append({'id': '@housenumber', 'analyzer': 'housenumbers'}) if with_postcode: cfgstr['token-analysis'].append({'id': '@postcode', 'analyzer': 'postcodes'}) (test_config.project_dir / 'icu_tokenizer.yaml').write_text(yaml.dump(cfgstr)) tok.loader = nominatim_db.tokenizer.icu_rule_loader.ICURuleLoader(test_config) return tok.name_analyzer() return _mk_analyser @pytest.fixture def sql_functions(temp_db_conn, def_config, src_dir): orig_sql = def_config.lib_dir.sql def_config.lib_dir.sql = src_dir / 'lib-sql' sqlproc = SQLPreprocessor(temp_db_conn, def_config) sqlproc.run_sql_file(temp_db_conn, 'functions/utils.sql') sqlproc.run_sql_file(temp_db_conn, 'tokenizer/icu_tokenizer.sql') def_config.lib_dir.sql = orig_sql @pytest.fixture def getorcreate_full_word(temp_db_cursor): temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_full_word( norm_term TEXT, lookup_terms TEXT[], OUT full_token INT, OUT partial_tokens INT[]) AS $$ DECLARE partial_terms TEXT[] = '{}'::TEXT[]; term TEXT; term_id INTEGER; term_count INTEGER; BEGIN SELECT min(word_id) INTO full_token FROM word WHERE info->>'word' = norm_term and type = 'W'; IF full_token IS NULL THEN full_token := nextval('seq_word'); INSERT INTO word (word_id, word_token, type, info) SELECT full_token, lookup_term, 'W', json_build_object('word', norm_term, 'count', 0) FROM unnest(lookup_terms) as lookup_term; END IF; FOR term IN SELECT unnest(string_to_array(unnest(lookup_terms), ' ')) LOOP term := trim(term); IF NOT (ARRAY[term] <@ partial_terms) THEN partial_terms := partial_terms || term; END IF; END LOOP; partial_tokens := '{}'::INT[]; FOR term IN SELECT unnest(partial_terms) LOOP SELECT min(word_id), max(info->>'count') INTO term_id, term_count FROM word WHERE word_token = term and type = 'w'; IF term_id IS NULL THEN term_id := nextval('seq_word'); term_count := 0; INSERT INTO word (word_id, word_token, type, info) VALUES (term_id, term, 'w', json_build_object('count', term_count)); END IF; IF NOT (ARRAY[term_id] <@ partial_tokens) THEN partial_tokens := partial_tokens || term_id; END IF; END LOOP; END; $$ LANGUAGE plpgsql; """) def test_init_new(tokenizer_factory, test_config, db_prop): tok = tokenizer_factory() tok.init_new_db(test_config) assert db_prop(nominatim_db.tokenizer.icu_rule_loader.DBCFG_IMPORT_NORM_RULES) \ .startswith(':: lower ();') def test_init_word_table(tokenizer_factory, test_config, place_row, temp_db_cursor): place_row(names={'name' : 'Test Area', 'ref' : '52'}) place_row(names={'name' : 'No Area'}) place_row(names={'name' : 'Holzstrasse'}) tok = tokenizer_factory() tok.init_new_db(test_config) assert temp_db_cursor.table_exists('word') def test_init_from_project(test_config, tokenizer_factory): tok = tokenizer_factory() tok.init_new_db(test_config) tok = tokenizer_factory() tok.init_from_project(test_config) assert tok.loader is not None def test_update_sql_functions(db_prop, temp_db_cursor, tokenizer_factory, test_config, table_factory, monkeypatch): tok = tokenizer_factory() tok.init_new_db(test_config) table_factory('test', 'txt TEXT') func_file = test_config.lib_dir.sql / 'tokenizer' / 'icu_tokenizer.sql' func_file.write_text("""INSERT INTO test VALUES (1133)""") tok.update_sql_functions(test_config) test_content = temp_db_cursor.row_set('SELECT * FROM test') assert test_content == set((('1133', ), )) def test_finalize_import(tokenizer_factory, temp_db_cursor, test_config, sql_preprocessor_cfg): tok = tokenizer_factory() tok.init_new_db(test_config) assert not temp_db_cursor.index_exists('word', 'idx_word_word_id') tok.finalize_import(test_config) assert temp_db_cursor.index_exists('word', 'idx_word_word_id') def test_check_database(test_config, tokenizer_factory, temp_db_cursor, sql_preprocessor_cfg): tok = tokenizer_factory() tok.init_new_db(test_config) assert tok.check_database(test_config) is None def test_update_statistics_reverse_only(word_table, tokenizer_factory, test_config): tok = tokenizer_factory() tok.update_statistics(test_config) def test_update_statistics(word_table, table_factory, temp_db_cursor, tokenizer_factory, test_config): word_table.add_full_word(1000, 'hello') word_table.add_full_word(1001, 'bye') table_factory('search_name', 'place_id BIGINT, name_vector INT[], nameaddress_vector INT[]', [(12, [1000], [1001])]) tok = tokenizer_factory() tok.update_statistics(test_config) assert temp_db_cursor.scalar("""SELECT count(*) FROM word WHERE type = 'W' and word_id = 1000 and (info->>'count')::int > 0""") == 1 assert temp_db_cursor.scalar("""SELECT count(*) FROM word WHERE type = 'W' and word_id = 1001 and (info->>'addr_count')::int > 0""") == 1 def test_normalize_postcode(analyzer): with analyzer() as anl: anl.normalize_postcode('123') == '123' anl.normalize_postcode('ab-34 ') == 'AB-34' anl.normalize_postcode('38 Б') == '38 Б' class TestPostcodes: @pytest.fixture(autouse=True) def setup(self, analyzer, sql_functions): sanitizers = [{'step': 'clean-postcodes'}] with analyzer(sanitizers=sanitizers, with_postcode=True) as anl: self.analyzer = anl yield anl def process_postcode(self, cc, postcode): return self.analyzer.process_place(PlaceInfo({'country_code': cc, 'address': {'postcode': postcode}})) def test_update_postcodes_from_db_empty(self, table_factory, word_table): table_factory('location_postcode', 'country_code TEXT, postcode TEXT', content=(('de', '12345'), ('se', '132 34'), ('bm', 'AB23'), ('fr', '12345'))) self.analyzer.update_postcodes_from_db() assert word_table.count() == 5 assert word_table.get_postcodes() == {'12345', '132 34@132 34', 'AB 23@AB 23'} def test_update_postcodes_from_db_ambigious(self, table_factory, word_table): table_factory('location_postcode', 'country_code TEXT, postcode TEXT', content=(('in', '123456'), ('sg', '123456'))) self.analyzer.update_postcodes_from_db() assert word_table.count() == 3 assert word_table.get_postcodes() == {'123456', '123456@123 456'} def test_update_postcodes_from_db_add_and_remove(self, table_factory, word_table): table_factory('location_postcode', 'country_code TEXT, postcode TEXT', content=(('ch', '1234'), ('bm', 'BC 45'), ('bm', 'XX45'))) word_table.add_postcode(' 1234', '1234') word_table.add_postcode(' 5678', '5678') self.analyzer.update_postcodes_from_db() assert word_table.count() == 5 assert word_table.get_postcodes() == {'1234', 'BC 45@BC 45', 'XX 45@XX 45'} def test_process_place_postcode_simple(self, word_table): info = self.process_postcode('de', '12345') assert info['postcode'] == '12345' assert word_table.get_postcodes() == {'12345', } def test_process_place_postcode_with_space(self, word_table): info = self.process_postcode('in', '123 567') assert info['postcode'] == '123567' assert word_table.get_postcodes() == {'123567@123 567', } def test_update_special_phrase_empty_table(analyzer, word_table): with analyzer() as anl: anl.update_special_phrases([ ("König bei", "amenity", "royal", "near"), ("Könige ", "amenity", "royal", "-"), ("street", "highway", "primary", "in") ], True) assert word_table.get_special() \ == {('KÖNIG BEI', 'König bei', 'amenity', 'royal', 'near'), ('KÖNIGE', 'Könige', 'amenity', 'royal', None), ('STREET', 'street', 'highway', 'primary', 'in')} def test_update_special_phrase_delete_all(analyzer, word_table): word_table.add_special('FOO', 'foo', 'amenity', 'prison', 'in') word_table.add_special('BAR', 'bar', 'highway', 'road', None) assert word_table.count_special() == 2 with analyzer() as anl: anl.update_special_phrases([], True) assert word_table.count_special() == 0 def test_update_special_phrases_no_replace(analyzer, word_table): word_table.add_special('FOO', 'foo', 'amenity', 'prison', 'in') word_table.add_special('BAR', 'bar', 'highway', 'road', None) assert word_table.count_special() == 2 with analyzer() as anl: anl.update_special_phrases([], False) assert word_table.count_special() == 2 def test_update_special_phrase_modify(analyzer, word_table): word_table.add_special('FOO', 'foo', 'amenity', 'prison', 'in') word_table.add_special('BAR', 'bar', 'highway', 'road', None) assert word_table.count_special() == 2 with analyzer() as anl: anl.update_special_phrases([ ('prison', 'amenity', 'prison', 'in'), ('bar', 'highway', 'road', '-'), ('garden', 'leisure', 'garden', 'near') ], True) assert word_table.get_special() \ == {('PRISON', 'prison', 'amenity', 'prison', 'in'), ('BAR', 'bar', 'highway', 'road', None), ('GARDEN', 'garden', 'leisure', 'garden', 'near')} def test_add_country_names_new(analyzer, word_table): with analyzer() as anl: anl.add_country_names('es', {'name': 'Espagña', 'name:en': 'Spain'}) assert word_table.get_country() == {('es', 'ESPAGÑA'), ('es', 'SPAIN')} def test_add_country_names_extend(analyzer, word_table): word_table.add_country('ch', 'SCHWEIZ') with analyzer() as anl: anl.add_country_names('ch', {'name': 'Schweiz', 'name:fr': 'Suisse'}) assert word_table.get_country() == {('ch', 'SCHWEIZ'), ('ch', 'SUISSE')} class TestPlaceNames: @pytest.fixture(autouse=True) def setup(self, analyzer, sql_functions): sanitizers = [{'step': 'split-name-list'}, {'step': 'strip-brace-terms'}] with analyzer(sanitizers=sanitizers) as anl: self.analyzer = anl yield anl def expect_name_terms(self, info, *expected_terms): tokens = self.analyzer.get_word_token_info(expected_terms) for token in tokens: assert token[2] is not None, "No token for {0}".format(token) assert eval(info['names']) == set((t[2] for t in tokens)) def process_named_place(self, names): return self.analyzer.process_place(PlaceInfo({'name': names})) def test_simple_names(self): info = self.process_named_place({'name': 'Soft bAr', 'ref': '34'}) self.expect_name_terms(info, '#Soft bAr', '#34', 'Soft', 'bAr', '34') @pytest.mark.parametrize('sep', [',' , ';']) def test_names_with_separator(self, sep): info = self.process_named_place({'name': sep.join(('New York', 'Big Apple'))}) self.expect_name_terms(info, '#New York', '#Big Apple', 'new', 'york', 'big', 'apple') def test_full_names_with_bracket(self): info = self.process_named_place({'name': 'Houseboat (left)'}) self.expect_name_terms(info, '#Houseboat (left)', '#Houseboat', 'houseboat', 'left') def test_country_name(self, word_table): place = PlaceInfo({'name' : {'name': 'Norge'}, 'country_code': 'no', 'rank_address': 4, 'class': 'boundary', 'type': 'administrative'}) info = self.analyzer.process_place(place) self.expect_name_terms(info, '#norge', 'norge') assert word_table.get_country() == {('no', 'NORGE')} class TestPlaceAddress: @pytest.fixture(autouse=True) def setup(self, analyzer, sql_functions): hnr = {'step': 'clean-housenumbers', 'filter-kind': ['housenumber', 'conscriptionnumber', 'streetnumber']} with analyzer(trans=(":: upper()", "'🜵' > ' '"), sanitizers=[hnr]) as anl: self.analyzer = anl yield anl @pytest.fixture def getorcreate_hnr_id(self, temp_db_cursor): temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION getorcreate_hnr_id(lookup_term TEXT) RETURNS INTEGER AS $$ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""") def process_address(self, **kwargs): return self.analyzer.process_place(PlaceInfo({'address': kwargs})) def name_token_set(self, *expected_terms): tokens = self.analyzer.get_word_token_info(expected_terms) for token in tokens: assert token[2] is not None, "No token for {0}".format(token) return set((t[2] for t in tokens)) @pytest.mark.parametrize('pcode', ['12345', 'AB 123', '34-345']) def test_process_place_postcode(self, word_table, pcode): self.process_address(postcode=pcode) assert word_table.get_postcodes() == {pcode, } @pytest.mark.parametrize('hnr', ['123a', '1', '101']) def test_process_place_housenumbers_simple(self, hnr, getorcreate_hnr_id): info = self.process_address(housenumber=hnr) assert info['hnr'] == hnr.upper() assert info['hnr_tokens'] == "{-1}" def test_process_place_housenumbers_duplicates(self, getorcreate_hnr_id): info = self.process_address(housenumber='134', conscriptionnumber='134', streetnumber='99a') assert set(info['hnr'].split(';')) == set(('134', '99A')) assert info['hnr_tokens'] == "{-1,-2}" def test_process_place_housenumbers_cached(self, getorcreate_hnr_id): info = self.process_address(housenumber="45") assert info['hnr_tokens'] == "{-1}" info = self.process_address(housenumber="46") assert info['hnr_tokens'] == "{-2}" info = self.process_address(housenumber="41;45") assert eval(info['hnr_tokens']) == {-1, -3} info = self.process_address(housenumber="41") assert eval(info['hnr_tokens']) == {-3} def test_process_place_street(self): self.analyzer.process_place(PlaceInfo({'name': {'name' : 'Grand Road'}})) info = self.process_address(street='Grand Road') assert eval(info['street']) == self.name_token_set('#Grand Road') def test_process_place_nonexisting_street(self): info = self.process_address(street='Grand Road') assert info['street'] == '{}' def test_process_place_multiple_street_tags(self): self.analyzer.process_place(PlaceInfo({'name': {'name' : 'Grand Road', 'ref': '05989'}})) info = self.process_address(**{'street': 'Grand Road', 'street:sym_ul': '05989'}) assert eval(info['street']) == self.name_token_set('#Grand Road', '#05989') def test_process_place_street_empty(self): info = self.process_address(street='🜵') assert info['street'] == '{}' def test_process_place_street_from_cache(self): self.analyzer.process_place(PlaceInfo({'name': {'name' : 'Grand Road'}})) self.process_address(street='Grand Road') # request address again info = self.process_address(street='Grand Road') assert eval(info['street']) == self.name_token_set('#Grand Road') def test_process_place_place(self): info = self.process_address(place='Honu Lulu') assert eval(info['place']) == self.name_token_set('HONU', 'LULU', '#HONU LULU') def test_process_place_place_extra(self): info = self.process_address(**{'place:en': 'Honu Lulu'}) assert 'place' not in info def test_process_place_place_empty(self): info = self.process_address(place='🜵') assert 'place' not in info def test_process_place_address_terms(self): info = self.process_address(country='de', city='Zwickau', state='Sachsen', suburb='Zwickau', street='Hauptstr', full='right behind the church') city = self.name_token_set('ZWICKAU', '#ZWICKAU') state = self.name_token_set('SACHSEN', '#SACHSEN') result = {k: eval(v) for k,v in info['addr'].items()} assert result == {'city': city, 'suburb': city, 'state': state} def test_process_place_multiple_address_terms(self): info = self.process_address(**{'city': 'Bruxelles', 'city:de': 'Brüssel'}) result = {k: eval(v) for k,v in info['addr'].items()} assert result == {'city': self.name_token_set('Bruxelles', '#Bruxelles')} def test_process_place_address_terms_empty(self): info = self.process_address(country='de', city=' ', street='Hauptstr', full='right behind the church') assert 'addr' not in info class TestPlaceHousenumberWithAnalyser: @pytest.fixture(autouse=True) def setup(self, analyzer, sql_functions): hnr = {'step': 'clean-housenumbers', 'filter-kind': ['housenumber', 'conscriptionnumber', 'streetnumber']} with analyzer(trans=(":: upper()", "'🜵' > ' '"), sanitizers=[hnr], with_housenumber=True) as anl: self.analyzer = anl yield anl @pytest.fixture def getorcreate_hnr_id(self, temp_db_cursor): temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION create_analyzed_hnr_id(norm_term TEXT, lookup_terms TEXT[]) RETURNS INTEGER AS $$ SELECT -nextval('seq_word')::INTEGER; $$ LANGUAGE SQL""") def process_address(self, **kwargs): return self.analyzer.process_place(PlaceInfo({'address': kwargs})) def name_token_set(self, *expected_terms): tokens = self.analyzer.get_word_token_info(expected_terms) for token in tokens: assert token[2] is not None, "No token for {0}".format(token) return set((t[2] for t in tokens)) @pytest.mark.parametrize('hnr', ['123 a', '1', '101']) def test_process_place_housenumbers_simple(self, hnr, getorcreate_hnr_id): info = self.process_address(housenumber=hnr) assert info['hnr'] == hnr.upper() assert info['hnr_tokens'] == "{-1}" def test_process_place_housenumbers_duplicates(self, getorcreate_hnr_id): info = self.process_address(housenumber='134', conscriptionnumber='134', streetnumber='99a') assert set(info['hnr'].split(';')) == set(('134', '99 A')) assert info['hnr_tokens'] == "{-1,-2}" def test_process_place_housenumbers_cached(self, getorcreate_hnr_id): info = self.process_address(housenumber="45") assert info['hnr_tokens'] == "{-1}" info = self.process_address(housenumber="46") assert info['hnr_tokens'] == "{-2}" info = self.process_address(housenumber="41;45") assert eval(info['hnr_tokens']) == {-1, -3} info = self.process_address(housenumber="41") assert eval(info['hnr_tokens']) == {-3} class TestUpdateWordTokens: @pytest.fixture(autouse=True) def setup(self, tokenizer_factory, table_factory, placex_table, word_table): table_factory('search_name', 'place_id BIGINT, name_vector INT[]') self.tok = tokenizer_factory() @pytest.fixture def search_entry(self, temp_db_cursor): place_id = itertools.count(1000) def _insert(*args): temp_db_cursor.execute("INSERT INTO search_name VALUES (%s, %s)", (next(place_id), list(args))) return _insert @pytest.fixture(params=['simple', 'analyzed']) def add_housenumber(self, request, word_table): if request.param == 'simple': def _make(hid, hnr): word_table.add_housenumber(hid, hnr) elif request.param == 'analyzed': def _make(hid, hnr): word_table.add_housenumber(hid, [hnr]) return _make @pytest.mark.parametrize('hnr', ('1a', '1234567', '34 5')) def test_remove_unused_housenumbers(self, add_housenumber, word_table, hnr): word_table.add_housenumber(1000, hnr) assert word_table.count_housenumbers() == 1 self.tok.update_word_tokens() assert word_table.count_housenumbers() == 0 def test_keep_unused_numeral_housenumbers(self, add_housenumber, word_table): add_housenumber(1000, '5432') assert word_table.count_housenumbers() == 1 self.tok.update_word_tokens() assert word_table.count_housenumbers() == 1 def test_keep_housenumbers_from_search_name_table(self, add_housenumber, word_table, search_entry): add_housenumber(9999, '5432a') add_housenumber(9991, '9 a') search_entry(123, 9999, 34) assert word_table.count_housenumbers() == 2 self.tok.update_word_tokens() assert word_table.count_housenumbers() == 1 def test_keep_housenumbers_from_placex_table(self, add_housenumber, word_table, placex_table): add_housenumber(9999, '5432a') add_housenumber(9990, '34z') placex_table.add(housenumber='34z') placex_table.add(housenumber='25432a') assert word_table.count_housenumbers() == 2 self.tok.update_word_tokens() assert word_table.count_housenumbers() == 1 def test_keep_housenumbers_from_placex_table_hnr_list(self, add_housenumber, word_table, placex_table): add_housenumber(9991, '9 b') add_housenumber(9990, '34z') placex_table.add(housenumber='9 a;9 b;9 c') assert word_table.count_housenumbers() == 2 self.tok.update_word_tokens() assert word_table.count_housenumbers() == 1
25,317
Python
.py
495
41.183838
120
0.605452
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,691
test_place_sanitizer.py
osm-search_Nominatim/test/python/tokenizer/test_place_sanitizer.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for execution of the sanitztion step. """ import pytest from nominatim_db.errors import UsageError import nominatim_db.tokenizer.place_sanitizer as sanitizer from nominatim_db.data.place_info import PlaceInfo def test_placeinfo_clone_new_name(): place = sanitizer.PlaceName('foo', 'ki', 'su') newplace = place.clone(name='bar') assert place.name == 'foo' assert newplace.name == 'bar' assert newplace.kind == 'ki' assert newplace.suffix == 'su' def test_placeinfo_clone_merge_attr(): place = sanitizer.PlaceName('foo', 'ki', 'su') place.set_attr('a1', 'v1') place.set_attr('a2', 'v2') newplace = place.clone(attr={'a2': 'new', 'b2': 'foo'}) assert place.get_attr('a2') == 'v2' assert place.get_attr('b2') is None assert newplace.get_attr('a1') == 'v1' assert newplace.get_attr('a2') == 'new' assert newplace.get_attr('b2') == 'foo' def test_placeinfo_has_attr(): place = sanitizer.PlaceName('foo', 'ki', 'su') place.set_attr('a1', 'v1') assert place.has_attr('a1') assert not place.has_attr('whatever') def test_sanitizer_default(def_config): san = sanitizer.PlaceSanitizer([{'step': 'split-name-list'}], def_config) name, address = san.process_names(PlaceInfo({'name': {'name:de:de': '1;2;3'}, 'address': {'street': 'Bald'}})) assert len(name) == 3 assert all(isinstance(n, sanitizer.PlaceName) for n in name) assert all(n.kind == 'name' for n in name) assert all(n.suffix == 'de:de' for n in name) assert len(address) == 1 assert all(isinstance(n, sanitizer.PlaceName) for n in address) @pytest.mark.parametrize('rules', [None, []]) def test_sanitizer_empty_list(def_config, rules): san = sanitizer.PlaceSanitizer(rules, def_config) name, address = san.process_names(PlaceInfo({'name': {'name:de:de': '1;2;3'}})) assert len(name) == 1 assert all(isinstance(n, sanitizer.PlaceName) for n in name) def test_sanitizer_missing_step_definition(def_config): with pytest.raises(UsageError): san = sanitizer.PlaceSanitizer([{'id': 'split-name-list'}], def_config)
2,392
Python
.py
54
39.425926
84
0.667819
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,692
test_clean_tiger_tags.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_clean_tiger_tags.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for sanitizer that clean up TIGER tags. """ import pytest from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer from nominatim_db.data.place_info import PlaceInfo class TestCleanTigerTags: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, addr): place = PlaceInfo({'address': addr}) _, outaddr = PlaceSanitizer([{'step': 'clean-tiger-tags'}], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix) for p in outaddr]) @pytest.mark.parametrize('inname,outname', [('Hamilton, AL', 'Hamilton'), ('Little, Borough, CA', 'Little, Borough')]) def test_well_formatted(self, inname, outname): assert self.run_sanitizer_on({'tiger:county': inname})\ == [(outname, 'county', 'tiger')] @pytest.mark.parametrize('name', ('Hamilton', 'Big, Road', '')) def test_badly_formatted(self, name): assert self.run_sanitizer_on({'tiger:county': name})\ == [(name, 'county', 'tiger')] def test_unmatched(self): assert self.run_sanitizer_on({'tiger:country': 'US'})\ == [('US', 'tiger', 'country')]
1,480
Python
.py
32
39.53125
101
0.645094
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,693
test_tag_analyzer_by_language.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_tag_analyzer_by_language.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the sanitizer that enables language-dependent analyzers. """ import pytest from nominatim_db.data.place_info import PlaceInfo from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer from nominatim_db.data.country_info import setup_country_config class TestWithDefaults: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, country, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': country}) name, _ = PlaceSanitizer([{'step': 'tag-analyzer-by-language'}], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix, p.attr) for p in name]) def test_no_names(self): assert self.run_sanitizer_on('de') == [] def test_simple(self): res = self.run_sanitizer_on('fr', name='Foo',name_de='Zoo', ref_abc='M') assert res == [('Foo', 'name', None, {}), ('M', 'ref', 'abc', {'analyzer': 'abc'}), ('Zoo', 'name', 'de', {'analyzer': 'de'})] @pytest.mark.parametrize('suffix', ['DE', 'asbc']) def test_illegal_suffix(self, suffix): assert self.run_sanitizer_on('fr', **{'name_' + suffix: 'Foo'}) \ == [('Foo', 'name', suffix, {})] class TestFilterKind: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, filt, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de'}) name, _ = PlaceSanitizer([{'step': 'tag-analyzer-by-language', 'filter-kind': filt}], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix, p.attr) for p in name]) def test_single_exact_name(self): res = self.run_sanitizer_on(['name'], name_fr='A', ref_fr='12', shortname_fr='C', name='D') assert res == [('12', 'ref', 'fr', {}), ('A', 'name', 'fr', {'analyzer': 'fr'}), ('C', 'shortname', 'fr', {}), ('D', 'name', None, {})] def test_single_pattern(self): res = self.run_sanitizer_on(['.*name'], name_fr='A', ref_fr='12', namexx_fr='B', shortname_fr='C', name='D') assert res == [('12', 'ref', 'fr', {}), ('A', 'name', 'fr', {'analyzer': 'fr'}), ('B', 'namexx', 'fr', {}), ('C', 'shortname', 'fr', {'analyzer': 'fr'}), ('D', 'name', None, {})] def test_multiple_patterns(self): res = self.run_sanitizer_on(['.*name', 'ref'], name_fr='A', ref_fr='12', oldref_fr='X', namexx_fr='B', shortname_fr='C', name='D') assert res == [('12', 'ref', 'fr', {'analyzer': 'fr'}), ('A', 'name', 'fr', {'analyzer': 'fr'}), ('B', 'namexx', 'fr', {}), ('C', 'shortname', 'fr', {'analyzer': 'fr'}), ('D', 'name', None, {}), ('X', 'oldref', 'fr', {})] class TestDefaultCountry: @pytest.fixture(autouse=True) def setup_country(self, def_config): setup_country_config(def_config) self.config = def_config def run_sanitizer_append(self, mode, country, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': country}) name, _ = PlaceSanitizer([{'step': 'tag-analyzer-by-language', 'use-defaults': mode, 'mode': 'append'}], self.config).process_names(place) assert all(isinstance(p.attr, dict) for p in name) assert all(len(p.attr) <= 1 for p in name) assert all(not p.attr or ('analyzer' in p.attr and p.attr['analyzer']) for p in name) return sorted([(p.name, p.attr.get('analyzer', '')) for p in name]) def run_sanitizer_replace(self, mode, country, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': country}) name, _ = PlaceSanitizer([{'step': 'tag-analyzer-by-language', 'use-defaults': mode, 'mode': 'replace'}], self.config).process_names(place) assert all(isinstance(p.attr, dict) for p in name) assert all(len(p.attr) <= 1 for p in name) assert all(not p.attr or ('analyzer' in p.attr and p.attr['analyzer']) for p in name) return sorted([(p.name, p.attr.get('analyzer', '')) for p in name]) def test_missing_country(self): place = PlaceInfo({'name': {'name': 'something'}}) name, _ = PlaceSanitizer([{'step': 'tag-analyzer-by-language', 'use-defaults': 'all', 'mode': 'replace'}], self.config).process_names(place) assert len(name) == 1 assert name[0].name == 'something' assert name[0].suffix is None assert 'analyzer' not in name[0].attr def test_mono_unknown_country(self): expect = [('XX', '')] assert self.run_sanitizer_replace('mono', 'xx', name='XX') == expect assert self.run_sanitizer_append('mono', 'xx', name='XX') == expect def test_mono_monoling_replace(self): res = self.run_sanitizer_replace('mono', 'de', name='Foo') assert res == [('Foo', 'de')] def test_mono_monoling_append(self): res = self.run_sanitizer_append('mono', 'de', name='Foo') assert res == [('Foo', ''), ('Foo', 'de')] def test_mono_multiling(self): expect = [('XX', '')] assert self.run_sanitizer_replace('mono', 'ch', name='XX') == expect assert self.run_sanitizer_append('mono', 'ch', name='XX') == expect def test_all_unknown_country(self): expect = [('XX', '')] assert self.run_sanitizer_replace('all', 'xx', name='XX') == expect assert self.run_sanitizer_append('all', 'xx', name='XX') == expect def test_all_monoling_replace(self): res = self.run_sanitizer_replace('all', 'de', name='Foo') assert res == [('Foo', 'de')] def test_all_monoling_append(self): res = self.run_sanitizer_append('all', 'de', name='Foo') assert res == [('Foo', ''), ('Foo', 'de')] def test_all_multiling_append(self): res = self.run_sanitizer_append('all', 'ch', name='XX') assert res == [('XX', ''), ('XX', 'de'), ('XX', 'fr'), ('XX', 'it'), ('XX', 'rm')] def test_all_multiling_replace(self): res = self.run_sanitizer_replace('all', 'ch', name='XX') assert res == [('XX', 'de'), ('XX', 'fr'), ('XX', 'it'), ('XX', 'rm')] class TestCountryWithWhitelist: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, mode, country, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': country}) name, _ = PlaceSanitizer([{'step': 'tag-analyzer-by-language', 'use-defaults': mode, 'mode': 'replace', 'whitelist': ['de', 'fr', 'ru']}], self.config).process_names(place) assert all(isinstance(p.attr, dict) for p in name) assert all(len(p.attr) <= 1 for p in name) assert all(not p.attr or ('analyzer' in p.attr and p.attr['analyzer']) for p in name) return sorted([(p.name, p.attr.get('analyzer', '')) for p in name]) def test_mono_monoling(self): assert self.run_sanitizer_on('mono', 'de', name='Foo') == [('Foo', 'de')] assert self.run_sanitizer_on('mono', 'pt', name='Foo') == [('Foo', '')] def test_mono_multiling(self): assert self.run_sanitizer_on('mono', 'ca', name='Foo') == [('Foo', '')] def test_all_monoling(self): assert self.run_sanitizer_on('all', 'de', name='Foo') == [('Foo', 'de')] assert self.run_sanitizer_on('all', 'pt', name='Foo') == [('Foo', '')] def test_all_multiling(self): assert self.run_sanitizer_on('all', 'ca', name='Foo') == [('Foo', 'fr')] assert self.run_sanitizer_on('all', 'ch', name='Foo') \ == [('Foo', 'de'), ('Foo', 'fr')] class TestWhiteList: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, whitelist, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}}) name, _ = PlaceSanitizer([{'step': 'tag-analyzer-by-language', 'mode': 'replace', 'whitelist': whitelist}], self.config).process_names(place) assert all(isinstance(p.attr, dict) for p in name) assert all(len(p.attr) <= 1 for p in name) assert all(not p.attr or ('analyzer' in p.attr and p.attr['analyzer']) for p in name) return sorted([(p.name, p.attr.get('analyzer', '')) for p in name]) def test_in_whitelist(self): assert self.run_sanitizer_on(['de', 'xx'], ref_xx='123') == [('123', 'xx')] def test_not_in_whitelist(self): assert self.run_sanitizer_on(['de', 'xx'], ref_yy='123') == [('123', '')] def test_empty_whitelist(self): assert self.run_sanitizer_on([], ref_yy='123') == [('123', '')]
10,512
Python
.py
191
41.748691
88
0.513009
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,694
test_sanitizer_config.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_sanitizer_config.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for sanitizer configuration helper functions. """ import pytest from nominatim_db.errors import UsageError from nominatim_db.tokenizer.sanitizers.config import SanitizerConfig def test_string_list_default_empty(): assert SanitizerConfig().get_string_list('op') == [] def test_string_list_default_something(): assert SanitizerConfig().get_string_list('op', default=['a', 'b']) == ['a', 'b'] def test_string_list_value_string(): assert SanitizerConfig({'op': 't'}).get_string_list('op', default=['a', 'b']) == ['t'] def test_string_list_value_list(): assert SanitizerConfig({'op': ['1', '2']}).get_string_list('op') == ['1', '2'] def test_string_list_value_empty(): assert SanitizerConfig({'op': ''}).get_string_list('op', default=['a', 'b']) == [] def test_string_list_value_dict(): with pytest.raises(UsageError): SanitizerConfig({'op': {'1': 'a'}}).get_string_list('op') def test_string_list_value_int_list(): with pytest.raises(UsageError): SanitizerConfig({'op': [1, 2]}).get_string_list('op') @pytest.mark.parametrize('inp', ('fg34', 'f\\f', 'morning [glory]', '56.78')) def test_create_split_regex_no_params_unsplit(inp): regex = SanitizerConfig().get_delimiter() assert list(regex.split(inp)) == [inp] @pytest.mark.parametrize('inp,outp', [('here,there', ['here', 'there']), ('ying;;yang', ['ying', 'yang']), (';a; ;c;d,', ['', 'a', '', 'c', 'd', '']), ('1, 3 ,5', ['1', '3', '5']) ]) def test_create_split_regex_no_params_split(inp, outp): regex = SanitizerConfig().get_delimiter() assert list(regex.split(inp)) == outp @pytest.mark.parametrize('delimiter', ['.', '\\', '[]', ' ', '/.*+']) def test_create_split_regex_custom(delimiter): regex = SanitizerConfig({'delimiters': delimiter}).get_delimiter() assert list(regex.split(f'out{delimiter}house')) == ['out', 'house'] assert list(regex.split('out,house')) == ['out,house'] def test_create_split_regex_empty_delimiter(): with pytest.raises(UsageError): regex = SanitizerConfig({'delimiters': ''}).get_delimiter() @pytest.mark.parametrize('inp', ('name', 'name:de', 'na\\me', '.*', '')) def test_create_name_filter_no_param_no_default(inp): filt = SanitizerConfig({'filter-kind': 'place'}).get_filter('name') assert filt(inp) @pytest.mark.parametrize('inp', ('name', 'name:de', 'na\\me', '.*', '')) def test_create_name_filter_no_param_default_pass_all(inp): filt = SanitizerConfig().get_filter('name', 'PASS_ALL') assert filt(inp) @pytest.mark.parametrize('inp', ('name', 'name:de', 'na\\me', '.*', '')) def test_create_name_filter_no_param_default_fail_all(inp): filt = SanitizerConfig().get_filter('name', 'FAIL_ALL') assert not filt(inp) def test_create_name_filter_no_param_default_invalid_string(): with pytest.raises(ValueError): filt = SanitizerConfig().get_filter('name', 'abc') def test_create_name_filter_no_param_default_empty_list(): with pytest.raises(ValueError): filt = SanitizerConfig().get_filter('name', []) @pytest.mark.parametrize('kind', ('de', 'name:de', 'ende')) def test_create_kind_filter_default_positive(kind): filt = SanitizerConfig().get_filter('filter-kind', ['.*de']) assert filt(kind) @pytest.mark.parametrize('kind', ('de', 'name:de', 'ende')) def test_create_kind_filter_default_negetive(kind): filt = SanitizerConfig().get_filter('filter-kind', ['.*fr']) assert not filt(kind) @pytest.mark.parametrize('kind', ('lang', 'lang:de', 'langxx')) def test_create_kind_filter_custom_regex_positive(kind): filt = SanitizerConfig({'filter-kind': 'lang.*'} ).get_filter('filter-kind', ['.*fr']) assert filt(kind) @pytest.mark.parametrize('kind', ('de ', '123', '', 'bedece')) def test_create_kind_filter_custom_regex_negative(kind): filt = SanitizerConfig({'filter-kind': '.*de'}).get_filter('filter-kind') assert not filt(kind) @pytest.mark.parametrize('kind', ('name', 'fr', 'name:fr', 'frfr', '34')) def test_create_kind_filter_many_positive(kind): filt = SanitizerConfig({'filter-kind': ['.*fr', 'name', r'\d+']} ).get_filter('filter-kind') assert filt(kind) @pytest.mark.parametrize('kind', ('name:de', 'fridge', 'a34', '.*', '\\')) def test_create_kind_filter_many_negative(kind): filt = SanitizerConfig({'filter-kind': ['.*fr', 'name', r'\d+']} ).get_filter('filter-kind') assert not filt(kind)
4,812
Python
.py
93
46.451613
90
0.639931
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,695
test_split_name_list.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_split_name_list.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the sanitizer that splits multivalue lists. """ import pytest from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer from nominatim_db.data.place_info import PlaceInfo from nominatim_db.errors import UsageError class TestSplitName: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, **kwargs): place = PlaceInfo({'name': kwargs}) name, _ = PlaceSanitizer([{'step': 'split-name-list'}], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix) for p in name]) def sanitize_with_delimiter(self, delimiter, name): place = PlaceInfo({'name': {'name': name}}) san = PlaceSanitizer([{'step': 'split-name-list', 'delimiters': delimiter}], self.config) name, _ = san.process_names(place) return sorted([p.name for p in name]) def test_simple(self): assert self.run_sanitizer_on(name='ABC') == [('ABC', 'name', None)] assert self.run_sanitizer_on(name='') == [('', 'name', None)] def test_splits(self): assert self.run_sanitizer_on(name='A;B;C') == [('A', 'name', None), ('B', 'name', None), ('C', 'name', None)] assert self.run_sanitizer_on(short_name=' House, boat ') == [('House', 'short_name', None), ('boat', 'short_name', None)] def test_empty_fields(self): assert self.run_sanitizer_on(name='A;;B') == [('A', 'name', None), ('B', 'name', None)] assert self.run_sanitizer_on(name='A; ,B') == [('A', 'name', None), ('B', 'name', None)] assert self.run_sanitizer_on(name=' ;B') == [('B', 'name', None)] assert self.run_sanitizer_on(name='B,') == [('B', 'name', None)] def test_custom_delimiters(self): assert self.sanitize_with_delimiter(':', '12:45,3') == ['12', '45,3'] assert self.sanitize_with_delimiter('\\', 'a;\\b!#@ \\') == ['a;', 'b!#@'] assert self.sanitize_with_delimiter('[]', 'foo[to]be') == ['be', 'foo', 'to'] assert self.sanitize_with_delimiter(' ', 'morning sun') == ['morning', 'sun'] def test_empty_delimiter_set(self): with pytest.raises(UsageError): self.sanitize_with_delimiter('', 'abc') def test_no_name_list(def_config): place = PlaceInfo({'address': {'housenumber': '3'}}) name, address = PlaceSanitizer([{'step': 'split-name-list'}], def_config).process_names(place) assert not name assert len(address) == 1
3,026
Python
.py
56
42.821429
99
0.559891
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,696
test_clean_housenumbers.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_clean_housenumbers.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the sanitizer that normalizes housenumbers. """ import pytest from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer from nominatim_db.data.place_info import PlaceInfo @pytest.fixture def sanitize(request, def_config): sanitizer_args = {'step': 'clean-housenumbers'} for mark in request.node.iter_markers(name="sanitizer_params"): sanitizer_args.update({k.replace('_', '-') : v for k,v in mark.kwargs.items()}) def _run(**kwargs): place = PlaceInfo({'address': kwargs}) _, address = PlaceSanitizer([sanitizer_args], def_config).process_names(place) return sorted([(p.kind, p.name) for p in address]) return _run def test_simple_number(sanitize): assert sanitize(housenumber='34') == [('housenumber', '34')] @pytest.mark.parametrize('number', ['1;2;3', '1,2,3', '1; 3 ,2', '2,,3,1', '1;2;3;;', ';3;2;1']) def test_housenumber_lists(sanitize, number): assert sanitize(housenumber=number) == \ [('housenumber', '1'), ('housenumber', '2'), ('housenumber', '3')] @pytest.mark.sanitizer_params(filter_kind=('number', 'streetnumber')) def test_filter_kind(sanitize): assert sanitize(housenumber='34', number='4', badnumber='65') == \ [('badnumber', '65'), ('housenumber', '34'), ('housenumber', '4')] @pytest.mark.parametrize('number', ('6523', 'n/a', '4')) def test_convert_to_name_converted(def_config, number): sanitizer_args = {'step': 'clean-housenumbers', 'convert-to-name': (r'\d+', 'n/a')} place = PlaceInfo({'address': {'housenumber': number}}) names, address = PlaceSanitizer([sanitizer_args], def_config).process_names(place) assert ('housenumber', number) in set((p.kind, p.name) for p in names) assert 'housenumber' not in set(p.kind for p in address) @pytest.mark.parametrize('number', ('a54', 'n.a', 'bow')) def test_convert_to_name_unconverted(def_config, number): sanitizer_args = {'step': 'clean-housenumbers', 'convert-to-name': (r'\d+', 'n/a')} place = PlaceInfo({'address': {'housenumber': number}}) names, address = PlaceSanitizer([sanitizer_args], def_config).process_names(place) assert 'housenumber' not in set(p.kind for p in names) assert ('housenumber', number) in set((p.kind, p.name) for p in address)
2,583
Python
.py
49
47.183673
87
0.658449
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,697
test_tag_japanese.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_tag_japanese.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. from typing import Mapping, Optional, List import pytest from nominatim_db.data.place_info import PlaceInfo from nominatim_db.data.place_name import PlaceName from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer class TestTagJapanese: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self,type, **kwargs): place = PlaceInfo({ 'address': kwargs, 'country_code': 'jp' }) sanitizer_args = {'step': 'tag-japanese'} _, address = PlaceSanitizer([sanitizer_args], self.config).process_names(place) tmp_list = [(p.name,p.kind) for p in address] return sorted(tmp_list) def test_on_address(self): res = self.run_sanitizer_on('address', name='foo', ref='bar', ref_abc='baz') assert res == [('bar','ref'),('baz','ref_abc'),('foo','name')] def test_housenumber(self): res = self.run_sanitizer_on('address', housenumber='2') assert res == [('2','housenumber')] def test_blocknumber(self): res = self.run_sanitizer_on('address', block_number='6') assert res == [('6','housenumber')] def test_neighbourhood(self): res = self.run_sanitizer_on('address', neighbourhood='8') assert res == [('8','place')] def test_quarter(self): res = self.run_sanitizer_on('address', quarter='kase') assert res==[('kase','place')] def test_housenumber_blocknumber(self): res = self.run_sanitizer_on('address', housenumber='2', block_number='6') assert res == [('6-2','housenumber')] def test_quarter_neighbourhood(self): res = self.run_sanitizer_on('address', quarter='kase', neighbourhood='8') assert res == [('kase8','place')] def test_blocknumber_housenumber_quarter(self): res = self.run_sanitizer_on('address', block_number='6', housenumber='2', quarter='kase') assert res == [('6-2','housenumber'),('kase','place')] def test_blocknumber_housenumber_quarter_neighbourhood(self): res = self.run_sanitizer_on('address', block_number='6', housenumber='2', neighbourhood='8') assert res == [('6-2','housenumber'),('8','place')] def test_blocknumber_quarter_neighbourhood(self): res = self.run_sanitizer_on('address',block_number='6', quarter='kase', neighbourhood='8') assert res == [('6','housenumber'),('kase8','place')] def test_blocknumber_quarter(self): res = self.run_sanitizer_on('address',block_number='6', quarter='kase') assert res == [('6','housenumber'),('kase','place')] def test_blocknumber_neighbourhood(self): res = self.run_sanitizer_on('address',block_number='6', neighbourhood='8') assert res == [('6','housenumber'),('8','place')] def test_housenumber_quarter_neighbourhood(self): res = self.run_sanitizer_on('address',housenumber='2', quarter='kase', neighbourhood='8') assert res == [('2','housenumber'),('kase8','place')] def test_housenumber_quarter(self): res = self.run_sanitizer_on('address',housenumber='2', quarter='kase') assert res == [('2','housenumber'),('kase','place')] def test_housenumber_blocknumber_neighbourhood_quarter(self): res = self.run_sanitizer_on('address', block_number='6', housenumber='2', quarter='kase', neighbourhood='8') assert res == [('6-2','housenumber'),('kase8','place')]
3,697
Python
.py
69
46.637681
116
0.64626
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,698
test_delete_tags.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_delete_tags.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the sanitizer that normalizes housenumbers. """ import pytest from nominatim_db.data.place_info import PlaceInfo from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer class TestWithDefault: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, type, **kwargs): place = PlaceInfo({type: {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de', 'rank_address': 30}) sanitizer_args = {'step': 'delete-tags'} name, address = PlaceSanitizer([sanitizer_args], self.config).process_names(place) return { 'name': sorted([(p.name, p.kind, p.suffix or '') for p in name]), 'address': sorted([(p.name, p.kind, p.suffix or '') for p in address]) } def test_on_name(self): res = self.run_sanitizer_on('name', name='foo', ref='bar', ref_abc='baz') assert res.get('name') == [] def test_on_address(self): res = self.run_sanitizer_on('address', name='foo', ref='bar', ref_abc='baz') assert res.get('address') == [('bar', 'ref', ''), ('baz', 'ref', 'abc'), ('foo', 'name', '')] class TestTypeField: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, type, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de', 'rank_address': 30}) sanitizer_args = { 'step': 'delete-tags', 'type': type, } name, _ = PlaceSanitizer([sanitizer_args], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix or '') for p in name]) def test_name_type(self): res = self.run_sanitizer_on('name', name='foo', ref='bar', ref_abc='baz') assert res == [] def test_address_type(self): res = self.run_sanitizer_on('address', name='foo', ref='bar', ref_abc='baz') assert res == [('bar', 'ref', ''), ('baz', 'ref', 'abc'), ('foo', 'name', '')] class TestFilterKind: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, filt, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de', 'rank_address': 30}) sanitizer_args = { 'step': 'delete-tags', 'filter-kind': filt, } name, _ = PlaceSanitizer([sanitizer_args], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix or '') for p in name]) def test_single_exact_name(self): res = self.run_sanitizer_on(['name'], ref='foo', name='foo', name_abc='bar', ref_abc='bar') assert res == [('bar', 'ref', 'abc'), ('foo', 'ref', '')] def test_single_pattern(self): res = self.run_sanitizer_on(['.*name'], name_fr='foo', ref_fr='foo', namexx_fr='bar', shortname_fr='bar', name='bar') assert res == [('bar', 'namexx', 'fr'), ('foo', 'ref', 'fr')] def test_multiple_patterns(self): res = self.run_sanitizer_on(['.*name', 'ref'], name_fr='foo', ref_fr='foo', oldref_fr='foo', namexx_fr='bar', shortname_fr='baz', name='baz') assert res == [('bar', 'namexx', 'fr'), ('foo', 'oldref', 'fr')] class TestRankAddress: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, rank_addr, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de', 'rank_address': 30}) sanitizer_args = { 'step': 'delete-tags', 'rank_address': rank_addr } name, _ = PlaceSanitizer([sanitizer_args], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix or '') for p in name]) def test_single_rank(self): res = self.run_sanitizer_on('30', name='foo', ref='bar') assert res == [] def test_single_rank_fail(self): res = self.run_sanitizer_on('28', name='foo', ref='bar') assert res == [('bar', 'ref', ''), ('foo', 'name', '')] def test_ranged_rank_pass(self): res = self.run_sanitizer_on('26-30', name='foo', ref='bar') assert res == [] def test_ranged_rank_fail(self): res = self.run_sanitizer_on('26-29', name='foo', ref='bar') assert res == [('bar', 'ref', ''), ('foo', 'name', '')] def test_mixed_rank_pass(self): res = self.run_sanitizer_on(['4', '20-28', '30', '10-12'], name='foo', ref='bar') assert res == [] def test_mixed_rank_fail(self): res = self.run_sanitizer_on(['4-8', '10', '26-29', '18'], name='foo', ref='bar') assert res == [('bar', 'ref', ''), ('foo', 'name', '')] class TestSuffix: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, suffix, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de', 'rank_address': 30}) sanitizer_args = { 'step': 'delete-tags', 'suffix': suffix, } name, _ = PlaceSanitizer([sanitizer_args], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix or '') for p in name]) def test_single_suffix(self): res = self.run_sanitizer_on('abc', name='foo', name_abc='foo', name_pqr='bar', ref='bar', ref_abc='baz') assert res == [('bar', 'name', 'pqr'), ('bar', 'ref', ''), ('foo', 'name', '')] def test_multiple_suffix(self): res = self.run_sanitizer_on(['abc.*', 'pqr'], name='foo', name_abcxx='foo', ref_pqr='bar', name_pqrxx='baz') assert res == [('baz', 'name', 'pqrxx'), ('foo', 'name', '')] class TestCountryCodes: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, country_code, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de', 'rank_address': 30}) sanitizer_args = { 'step': 'delete-tags', 'country_code': country_code, } name, _ = PlaceSanitizer([sanitizer_args], self.config).process_names(place) return sorted([(p.name, p.kind) for p in name]) def test_single_country_code_pass(self): res = self.run_sanitizer_on('de', name='foo', ref='bar') assert res == [] def test_single_country_code_fail(self): res = self.run_sanitizer_on('in', name='foo', ref='bar') assert res == [('bar', 'ref'), ('foo', 'name')] def test_empty_country_code_list(self): res = self.run_sanitizer_on([], name='foo', ref='bar') assert res == [('bar', 'ref'), ('foo', 'name')] def test_multiple_country_code_pass(self): res = self.run_sanitizer_on(['in', 'de', 'fr'], name='foo', ref='bar') assert res == [] def test_multiple_country_code_fail(self): res = self.run_sanitizer_on(['in', 'au', 'fr'], name='foo', ref='bar') assert res == [('bar', 'ref'), ('foo', 'name')] class TestAllParameters: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, country_code, rank_addr, suffix, **kwargs): place = PlaceInfo({'name': {k.replace('_', ':'): v for k, v in kwargs.items()}, 'country_code': 'de', 'rank_address': 30}) sanitizer_args = { 'step': 'delete-tags', 'type': 'name', 'filter-kind': ['name', 'ref'], 'country_code': country_code, 'rank_address': rank_addr, 'suffix': suffix, 'name': r'[\s\S]*', } name, _ = PlaceSanitizer([sanitizer_args], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix or '') for p in name]) def test_string_arguments_pass(self): res = self.run_sanitizer_on('de', '25-30', r'[\s\S]*', name='foo', ref='foo', name_abc='bar', ref_abc='baz') assert res == [] def test_string_arguments_fail(self): res = self.run_sanitizer_on('in', '25-30', r'[\s\S]*', name='foo', ref='foo', name_abc='bar', ref_abc='baz') assert res == [('bar', 'name', 'abc'), ('baz', 'ref', 'abc'), ('foo', 'name', ''), ('foo', 'ref', '')] def test_list_arguments_pass(self): res = self.run_sanitizer_on(['de', 'in'], ['20-28', '30'], [r'abc.*', r'[\s\S]*'], name='foo', ref='foo', name_abcxx='bar', ref_pqr='baz') assert res == [] def test_list_arguments_fail(self): res = self.run_sanitizer_on(['de', 'in'], ['14', '20-29'], [r'abc.*', r'pqr'], name='foo', ref_abc='foo', name_abcxx='bar', ref_pqr='baz') assert res == [('bar', 'name', 'abcxx'), ('baz', 'ref', 'pqr'), ('foo', 'name', ''), ('foo', 'ref', 'abc')] def test_mix_arguments_pass(self): res = self.run_sanitizer_on('de', ['10', '20-28', '30'], r'[\s\S]*', name_abc='foo', ref_abc='foo', name_abcxx='bar', ref_pqr='baz') assert res == [] def test_mix_arguments_fail(self): res = self.run_sanitizer_on(['de', 'in'], ['10', '20-28', '30'], r'abc.*', name='foo', ref='foo', name_pqr='bar', ref_pqr='baz') assert res == [('bar', 'name', 'pqr'), ('baz', 'ref', 'pqr'), ('foo', 'name', ''), ('foo', 'ref', '')]
11,497
Python
.py
211
39.554502
100
0.490051
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)
14,699
test_strip_brace_terms.py
osm-search_Nominatim/test/python/tokenizer/sanitizers/test_strip_brace_terms.py
# SPDX-License-Identifier: GPL-3.0-or-later # # This file is part of Nominatim. (https://nominatim.org) # # Copyright (C) 2024 by the Nominatim developer community. # For a full list of authors see the git log. """ Tests for the sanitizer that handles braced suffixes. """ import pytest from nominatim_db.tokenizer.place_sanitizer import PlaceSanitizer from nominatim_db.data.place_info import PlaceInfo class TestStripBrace: @pytest.fixture(autouse=True) def setup_country(self, def_config): self.config = def_config def run_sanitizer_on(self, **kwargs): place = PlaceInfo({'name': kwargs}) name, _ = PlaceSanitizer([{'step': 'strip-brace-terms'}], self.config).process_names(place) return sorted([(p.name, p.kind, p.suffix) for p in name]) def test_no_braces(self): assert self.run_sanitizer_on(name='foo', ref='23') == [('23', 'ref', None), ('foo', 'name', None)] def test_simple_braces(self): assert self.run_sanitizer_on(name='Halle (Saale)', ref='3')\ == [('3', 'ref', None), ('Halle', 'name', None), ('Halle (Saale)', 'name', None)] assert self.run_sanitizer_on(name='ack ( bar')\ == [('ack', 'name', None), ('ack ( bar', 'name', None)] def test_only_braces(self): assert self.run_sanitizer_on(name='(maybe)') == [('(maybe)', 'name', None)] def test_double_braces(self): assert self.run_sanitizer_on(name='a((b))') == [('a', 'name', None), ('a((b))', 'name', None)] assert self.run_sanitizer_on(name='a (b) (c)') == [('a', 'name', None), ('a (b) (c)', 'name', None)] def test_no_names(def_config): place = PlaceInfo({'address': {'housenumber': '3'}}) name, address = PlaceSanitizer([{'step': 'strip-brace-terms'}], def_config).process_names(place) assert not name assert len(address) == 1
2,034
Python
.py
40
41.4
100
0.57634
osm-search/Nominatim
3,062
711
96
GPL-3.0
9/5/2024, 5:11:58 PM (Europe/Amsterdam)