diff --git a/codegen/allapigen.py b/codegen/allapigen.py index dfedf169521..a33df8a1766 100644 --- a/codegen/allapigen.py +++ b/codegen/allapigen.py @@ -4,7 +4,7 @@ from ansys.fluent.core import CODEGEN_OUTDIR, FluentMode, FluentVersion, launch_fluent from ansys.fluent.core.codegen import StaticInfoType, allapigen, print_fluent_version -from ansys.fluent.core.search import _search +from ansys.fluent.core.search import _generate_api_data from ansys.fluent.core.utils.fluent_version import get_version_for_file_name if __name__ == "__main__": @@ -66,4 +66,4 @@ allapigen.generate(version, static_infos) t2 = time() print(f"Time to generate APIs: {t2 - t1:.2f} seconds") - _search("", version=version, write_api_tree_data=True) + _generate_api_data(version=version) diff --git a/src/ansys/fluent/core/__init__.py b/src/ansys/fluent/core/__init__.py index 5ee31d5a345..3ddace72dba 100644 --- a/src/ansys/fluent/core/__init__.py +++ b/src/ansys/fluent/core/__init__.py @@ -128,5 +128,8 @@ def version_info() -> str: # Whether to skip health check CHECK_HEALTH = True +# Whether to print search results +PRINT_SEARCH_RESULTS = True + # Whether to clear environment variables related to Fluent parallel mode CLEAR_FLUENT_PARA_ENVS = False diff --git a/src/ansys/fluent/core/search.py b/src/ansys/fluent/core/search.py index 32411f4b358..1cf1c8b5fbe 100644 --- a/src/ansys/fluent/core/search.py +++ b/src/ansys/fluent/core/search.py @@ -8,25 +8,17 @@ from pathlib import Path import pickle import re -import sys -from typing import Any import warnings -from ansys.fluent.core.solver import flobject +import ansys.fluent.core as pyfluent from ansys.fluent.core.solver.error_message import closest_allowed_names from ansys.fluent.core.utils.fluent_version import ( FluentVersion, get_version_for_file_name, ) -from ansys.fluent.core.workflow import ( - BaseTask, - ClassicWorkflow, - TaskContainer, - Workflow, -) -def _get_api_tree_data_file(): +def _get_api_tree_data_file_path(): """Get API tree data file.""" from ansys.fluent.core import CODEGEN_OUTDIR @@ -62,142 +54,24 @@ def _remove_suffix(input: str, suffix): _meshing_rules = ["workflow", "meshing", "PartManagement", "PMFileManagement"] -def _get_version_path_prefix_from_obj(obj: Any): - from ansys.fluent.core.services.datamodel_se import PyMenu, PyNamedObjectContainer - from ansys.fluent.core.services.datamodel_tui import TUIMenu - from ansys.fluent.core.session_pure_meshing import PureMeshing - from ansys.fluent.core.session_solver import Solver - - path = None - version = None - prefix = None - if isinstance(obj, PureMeshing): - path = [""] - version = get_version_for_file_name(obj.get_fluent_version().value) - prefix = "" - elif isinstance(obj, Solver): - path = [""] - version = get_version_for_file_name(obj.get_fluent_version().value) - prefix = "" - elif isinstance(obj, TUIMenu): - module = obj.__class__.__module__ - path = [ - ( - "" - if module.startswith("meshing") - else "" - ), - "tui", - ] - path.extend(obj._path) - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, (ClassicWorkflow, Workflow)): - path = ["", obj.rules] - module = obj._workflow.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, BaseTask): - path = ["", obj.rules] - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - module = obj._workflow.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, TaskContainer): - path = ["", obj.rules] - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - path[-1] = f"{path[-1]}:" - module = obj._container._workflow.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = '[""]' - elif isinstance(obj, PyMenu): - rules = obj.rules - path = ["" if rules in _meshing_rules else ""] - path.append(rules) - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - elif isinstance(obj, PyNamedObjectContainer): - rules = obj.rules - path = ["" if rules in _meshing_rules else ""] - path.append(rules) - path.extend([f"{k[0]}:" if k[1] else k[0] for k in obj.path]) - path[-1] = f"{path[-1]}:" - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = '[""]' - elif isinstance(obj, flobject.Group): - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = "" - path = [""] - # Cannot deduce the whole path without api_tree - elif isinstance(obj, flobject.NamedObject): - module = obj.__class__.__module__ - version = module.rsplit("_", 1)[-1] - prefix = '[""]' - path = [""] - # Cannot deduce the whole path without api_tree - return version, path, prefix - - -def _search( - word: str, - match_whole_word: bool = False, - match_case: bool = False, +def _generate_api_data( version: str | None = None, - search_root: Any | None = None, - write_api_tree_data: bool | None = False, ): - """Search for a word through the Fluent's object hierarchy. + """Generate API tree data. Parameters ---------- - word : str - Word to search for. - match_whole_word : bool, optional - Whether to match whole word, by default False - match_case : bool, optional - Whether to match case, by default False version : str, optional Fluent version to search in. The default is ``None``. If ``None``, it searches in the latest version for which codegen was run. - search_root : Any, optional - The root object within which the search is performed. - It can be a session object or any API object within a session. - The default is ``None``. If ``None``, it searches everything. write_api_tree_data: bool, optional Whether to write the API tree data. - - Examples - -------- - >>> import ansys.fluent.core as pyfluent - >>> pyfluent.search("geometry") - .tui.file.import_.cad_geometry (Command) - .tui.display.update_scene.select_geometry (Command) - .meshing.ImportGeometry (Command) - .meshing.LoadCADGeometry (Command) - .tui.solve.initialize.compute_defaults.geometry (Command) - .tui.report.reference_values.compute.geometry (Command) - .tui.define.geometry (Command) - .tui.mesh.geometry (Object) - .setup.boundary_conditions.geometry[""] (Object) - .setup.geometry (Object) - .solution.report_definitions.surface[""].geometry (Parameter) - .solution.report_definitions.volume[""].geometry (Parameter) - .results.graphics.mesh[""].geometry (Parameter) - .results.graphics.contour[""].geometry (Parameter) """ - api_objects = [] - api_tui_objects = [] - api_object_names = [] - results = [] + api_objects = set() + api_tui_objects = set() + api_object_names = set() if version: version = get_version_for_file_name(version) - root_version, root_path, prefix = _get_version_path_prefix_from_obj(search_root) - if search_root and not prefix: - return if not version: for fluent_version in FluentVersion: version = get_version_for_file_name(fluent_version.value) @@ -207,35 +81,7 @@ def _search( with open(api_tree_file, "rb") as f: api_tree = pickle.load(f) - if isinstance(search_root, (flobject.Group, flobject.NamedObject)): - path = root_path + [ - flobject.to_python_name(x) for x in search_root.path.split("/") - ] - root_path = [] - tree = api_tree - while path: - p = path.pop(0) - if p in tree: - tree = tree[p] - root_path.append(p) - elif f"{p}:" in tree: - tree = tree[f"{p}:"] - root_path.append(f"{p}:") - if path: - path.pop(0) - else: - return - - def inner(tree, path, root_path): - if root_path: - path = prefix - while root_path: - p = root_path.pop(0) - if p in tree: - tree = tree[p] - else: - return - + def inner(tree, path): for k, v in tree.items(): if k in ("", ""): next_path = k @@ -249,21 +95,20 @@ def inner(tree, path, root_path): else: next_path = f"{path}.{k}" type_ = "Object" if isinstance(v, Mapping) else v - api_object_names.append(k) + api_object_names.add(k) if "tui" in next_path: - api_tui_objects.append(f"{next_path} ({type_})") + api_tui_objects.add(f"{next_path} ({type_})") else: - api_objects.append(f"{next_path} ({type_})") - if _match(k, word, match_whole_word, match_case): - results.append(f"{next_path} ({type_})") + api_objects.add(f"{next_path} ({type_})") if isinstance(v, Mapping): - inner(v, next_path, root_path) + inner(v, next_path) - inner(api_tree, "", root_path) + inner(api_tree, "") api_tree_data = dict() - api_tree_data["api_objects"] = sorted(api_objects) - api_tree_data["api_tui_objects"] = sorted(api_tui_objects) + api_tree_data["api_objects"] = sorted(list(api_objects)) + api_tree_data["api_tui_objects"] = sorted(list(api_tui_objects)) + api_tree_data["all_api_object_names"] = sorted(list(api_object_names)) def _write_api_tree_file(api_tree_data: dict, api_object_names: list): from nltk.corpus import wordnet as wn @@ -276,35 +121,31 @@ def _write_api_tree_file(api_tree_data: dict, api_object_names: list): all_api_object_name_synsets = dict() for name in api_object_names: - api_object_name_synsets = ( - wn.synsets(name.decode("utf-8"), lang="eng") - if sys.version_info[0] < 3 - else wn.synsets(name, lang="eng") - ) - synset_names = [] + api_object_name_synsets = wn.synsets(name, lang="eng") + synset_names = set() for api_object_name_synset in api_object_name_synsets: - synset_names.append(api_object_name_synset.name().split(".")[0]) - all_api_object_name_synsets[name] = synset_names + synset_names.add(api_object_name_synset.name()) + if synset_names: + all_api_object_name_synsets[name] = sorted(list(synset_names)) api_tree_data["all_api_object_name_synsets"] = all_api_object_name_synsets - api_tree_file = _get_api_tree_data_file() - api_tree_file.touch() - with open(api_tree_file, "w") as json_file: + api_tree_file_path = _get_api_tree_data_file_path() + api_tree_file_path.touch() + with open(api_tree_file_path, "w") as json_file: json.dump(api_tree_data, json_file) - if write_api_tree_data: - _write_api_tree_file( - api_tree_data=api_tree_data, api_object_names=list(api_object_names) - ) - return results + _write_api_tree_file( + api_tree_data=api_tree_data, api_object_names=list(api_object_names) + ) + api_tree_file.unlink() @functools.cache def _get_api_tree_data(): """Get API tree data.""" - api_tree_data_file = _get_api_tree_data_file() - if api_tree_data_file.exists(): - json_file = open(api_tree_data_file, "r") + api_tree_data_file_path = _get_api_tree_data_file_path() + if api_tree_data_file_path.exists(): + json_file = open(api_tree_data_file_path, "r") api_tree_data = json.load(json_file) return api_tree_data @@ -319,13 +160,19 @@ def _print_search_results(queries: list, api_tree_data: dict): api_tree_data: dict All API object data. """ + results = [] api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() api_tree_datas = [api_tree_data["api_objects"], api_tree_data["api_tui_objects"]] for api_tree_data in api_tree_datas: for query in queries: for api_object in api_tree_data: - if query in api_object: - print(api_object) + if api_object.split()[0].endswith(query): + results.append(api_object) + if pyfluent.PRINT_SEARCH_RESULTS: + for result in results: + print(result) + elif results: + return results def _get_wildcard_matches_for_word_from_names(word: str, names: list): @@ -364,10 +211,10 @@ def _search_wildcard(search_string: str, api_tree_data: dict): """ api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() queries = _get_wildcard_matches_for_word_from_names( - search_string, names=list(api_tree_data["all_api_object_name_synsets"].keys()) + search_string, names=api_tree_data["all_api_object_names"] ) if queries: - _print_search_results(queries, api_tree_data=api_tree_data) + return _print_search_results(queries, api_tree_data=api_tree_data) def _get_exact_match_for_word_from_names( @@ -387,7 +234,7 @@ def _get_exact_match_for_word_from_names( ------- List of exact match. """ - return [name for name in names if word == name] + return list({name for name in names if word == name or word in name}) def _get_capitalize_match_for_word_from_names( @@ -459,7 +306,7 @@ def _get_close_matches_for_word_from_names( def _search_whole_word( search_string: str, match_case: bool = False, - match_whole_word: bool = False, + match_whole_word: bool = True, api_tree_data: dict = None, ): """Perform exact search for a word through the Fluent's object hierarchy. @@ -483,43 +330,43 @@ def _search_whole_word( """ api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() queries = [] - if match_case and match_whole_word: + if not match_case and not match_whole_word: queries.extend( - _get_exact_match_for_word_from_names( + _get_capitalize_match_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) - elif match_case: queries.extend( _get_match_case_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) - elif match_whole_word: - for word in [search_string, search_string.capitalize()]: - queries.extend( - _get_exact_match_for_word_from_names( - word, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), - ) - ) - elif not match_case and not match_whole_word: + elif match_case and match_whole_word: queries.extend( - _get_capitalize_match_for_word_from_names( + _get_exact_match_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) + elif match_case: queries.extend( _get_match_case_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) ) + elif match_whole_word: + for word in [search_string, search_string.capitalize()]: + queries.extend( + _get_exact_match_for_word_from_names( + word, + names=api_tree_data["all_api_object_names"], + ) + ) if queries: - _print_search_results(queries, api_tree_data=api_tree_data) + return _print_search_results(queries, api_tree_data=api_tree_data) def _download_nltk_data(): @@ -567,32 +414,31 @@ def _search_semantic(search_string: str, language: str, api_tree_data: dict): api_tree_data = api_tree_data if api_tree_data else _get_api_tree_data() similar_keys = set() - search_string_synsets = ( - wn.synsets(search_string.decode("utf-8"), lang=language) - if sys.version_info[0] < 3 - else wn.synsets(search_string, lang=language) - ) + search_string_synsets = set(wn.synsets(search_string, lang=language)) for api_object_name, api_object_synset_names in list( api_tree_data["all_api_object_name_synsets"].items() ): - for search_string_synset in search_string_synsets: - for api_object_synset_name in api_object_synset_names: - search_string_synset_name = search_string_synset.name().split(".")[0] - if ( - search_string in api_object_synset_name - or search_string_synset_name in api_object_synset_name - ): - similar_keys.add(api_object_synset_name + "*") + api_object_synsets = { + wn.synset(api_object_synset_name) + for api_object_synset_name in api_object_synset_names + } + if search_string_synsets & api_object_synsets: + similar_keys.add(api_object_name + "*") if similar_keys: + results = [] for key in similar_keys: - _search_wildcard(key, api_tree_data) + result = _search_wildcard(key, api_tree_data) + if result: + results.extend(result) + if results: + return results else: queries = _get_close_matches_for_word_from_names( search_string, - names=list(api_tree_data["all_api_object_name_synsets"].keys()), + names=api_tree_data["all_api_object_names"], ) if queries: - _print_search_results(queries, api_tree_data=api_tree_data) + return _print_search_results(queries, api_tree_data=api_tree_data) def search( @@ -646,47 +492,35 @@ def search( "``wildcard=True`` matches wildcard pattern.", UserWarning, ) - elif language and match_whole_word: - warnings.warn( - "``match_whole_word=True`` matches the whole word (case insensitive).", - UserWarning, - ) - elif match_whole_word: - warnings.warn( - "``match_whole_word=True`` matches the whole word (case insensitive).", - UserWarning, - ) - elif match_case: - warnings.warn( - "``match_case=True`` matches the whole word (case sensitive).", - UserWarning, - ) api_tree_data = _get_api_tree_data() - try: - _search_semantic(search_string, language, api_tree_data=api_tree_data) - except ModuleNotFoundError: - pass - except LookupError: - _download_nltk_data() - _search_semantic(search_string, language, api_tree_data=api_tree_data) - if wildcard: - _search_wildcard( + return _search_wildcard( search_string, api_tree_data=api_tree_data, ) elif match_whole_word: if not match_case: - _search_whole_word( + return _search_whole_word( search_string, match_whole_word=True, api_tree_data=api_tree_data ) else: - _search_whole_word( - search_string, match_case=True, api_tree_data=api_tree_data + return _search_whole_word( + search_string, + match_case=True, + match_whole_word=True, + api_tree_data=api_tree_data, ) else: - _search_whole_word( - search_string, match_whole_word=True, api_tree_data=api_tree_data - ) + try: + return _search_semantic( + search_string, language, api_tree_data=api_tree_data + ) + except ModuleNotFoundError: + pass + except LookupError: + _download_nltk_data() + return _search_semantic( + search_string, language, api_tree_data=api_tree_data + ) diff --git a/src/ansys/fluent/core/solver/flobject.py b/src/ansys/fluent/core/solver/flobject.py index 1feab0d59c6..f8b3137b58f 100644 --- a/src/ansys/fluent/core/solver/flobject.py +++ b/src/ansys/fluent/core/solver/flobject.py @@ -54,7 +54,7 @@ from .error_message import allowed_name_error_message, allowed_values_error from .flunits import UnhandledQuantity, get_si_unit_for_fluent_quantity -from .settings_external import expand_api_file_argument, use_search +from .settings_external import expand_api_file_argument def _ansys_units(): @@ -1135,27 +1135,18 @@ def __getattribute__(self, name): attr._check_stable() return attr except AttributeError as ex: - modified_search_results = [] - if use_search( - codegen_outdir=pyfluent.CODEGEN_OUTDIR, - version=super().__getattribute__("version"), - ): - search_results = pyfluent.utils._search( - word=name, - search_root=self, - match_case=False, - match_whole_word=False, - ) - if search_results: - for search_result in search_results: - search_result = search_result.replace( - "", self.__class__.__name__ - ) - modified_search_results.append(search_result) + pyfluent.PRINT_SEARCH_RESULTS = False + search_results = pyfluent.utils.search( + search_string=name, + match_case=False, + match_whole_word=False, + ) + pyfluent.PRINT_SEARCH_RESULTS = True + results = search_results if search_results else [] error_msg = allowed_name_error_message( trial_name=name, message=ex.args[0], - search_results=modified_search_results, + search_results=results, ) ex.args = (error_msg,) raise diff --git a/src/ansys/fluent/core/solver/settings_external.py b/src/ansys/fluent/core/solver/settings_external.py index 89940e4e3e1..1ebd512944e 100644 --- a/src/ansys/fluent/core/solver/settings_external.py +++ b/src/ansys/fluent/core/solver/settings_external.py @@ -1,8 +1,5 @@ """Miscellaneous utility functions.""" -import os -import re - def expand_api_file_argument(command_name, value, kwargs): """Expand API file argument.""" @@ -14,28 +11,3 @@ def expand_api_file_argument(command_name, value, kwargs): return [value, data_file] else: return [value] - - -def use_search(codegen_outdir: str, version: str): - """Whether to use ``_search()`` in the error handling. - - Parameters - ---------- - codegen_outdir: str - Codegen directory. - version: str - Fluent version. - """ - fluent_version_str = version - fluent_version_int = int(fluent_version_str.replace(".", "")[0:3]) - api_tree_files = [ - file for file in os.listdir(codegen_outdir) if file.endswith("pickle") - ] - api_tree_file_versions = [ - int(re.findall(r"\d+", file)[0]) for file in api_tree_files - ] - latest_api_tree_version = max(api_tree_file_versions) - if len(api_tree_files) == 1 and fluent_version_int == latest_api_tree_version: - return True - else: - return False diff --git a/src/ansys/fluent/core/utils/__init__.py b/src/ansys/fluent/core/utils/__init__.py index ee9f5549fea..b6e5bdd73d2 100644 --- a/src/ansys/fluent/core/utils/__init__.py +++ b/src/ansys/fluent/core/utils/__init__.py @@ -5,7 +5,7 @@ from pathlib import Path import sys -from ansys.fluent.core.search import _search # noqa: F401 +from ansys.fluent.core.search import search # noqa: F401 logger = logging.getLogger("pyfluent.general") diff --git a/tests/test_search.py b/tests/test_search.py index ac541d1c38d..da1894682e7 100644 --- a/tests/test_search.py +++ b/tests/test_search.py @@ -1,3 +1,5 @@ +import sys + import pytest import ansys.fluent.core as pyfluent @@ -7,9 +9,7 @@ _get_close_matches_for_word_from_names, _get_exact_match_for_word_from_names, _get_match_case_for_word_from_names, - _get_version_path_prefix_from_obj, _get_wildcard_matches_for_word_from_names, - _search, _search_semantic, _search_whole_word, _search_wildcard, @@ -33,7 +33,7 @@ def test_nltk_data_download(): @pytest.mark.codegen_required def test_get_exact_match_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] exact_match = _get_exact_match_for_word_from_names( "VideoResoutionY", names=api_object_names, @@ -46,7 +46,7 @@ def test_get_exact_match_for_word_from_names(): @pytest.mark.codegen_required def test_get_capitalize_match_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] capitalize_match_cases = _get_capitalize_match_for_word_from_names( "font", names=api_object_names, @@ -73,7 +73,7 @@ def test_get_capitalize_match_for_word_from_names(): @pytest.mark.codegen_required def test_get_match_case_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] match_cases = _get_match_case_for_word_from_names( "font", names=api_object_names, @@ -106,7 +106,7 @@ def test_get_match_case_for_word_from_names(): @pytest.mark.codegen_required def test_get_wildcard_matches_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] wildcard_matches = _get_wildcard_matches_for_word_from_names( "iter*", names=api_object_names, @@ -134,7 +134,7 @@ def test_get_wildcard_matches_for_word_from_names(): @pytest.mark.codegen_required def test_get_close_matches_for_word_from_names(): api_tree_data = _get_api_tree_data() - api_object_names = list(api_tree_data["all_api_object_name_synsets"].keys()) + api_object_names = api_tree_data["all_api_object_names"] close_matches = _get_close_matches_for_word_from_names( "font", names=api_object_names, @@ -229,10 +229,7 @@ def test_whole_word_search(capsys): lines = capsys.readouterr().out.splitlines() assert "font" not in lines assert ".preferences.Appearance.Charts.Font (Object)" in lines - assert ( - ".preferences.Graphics.ColormapSettings.TextFontAutomaticUnits (Parameter)" - in lines - ) + assert ".preferences.Appearance.Charts.Font (Object)" in lines @pytest.mark.fluent_version("==24.2") @@ -243,14 +240,8 @@ def test_match_case_search(capsys): for line in lines: assert "Font" not in line assert "font" in line - assert ( - '.results.graphics.pathline[""].color_map.font_name (Parameter)' - in lines - ) - assert ( - '.results.graphics.vector[""].color_map.font_automatic (Parameter)' - in lines - ) + assert ".tui.preferences.appearance.charts.font (Object)" in lines + assert ".tui.preferences.appearance.charts.font (Object)" in lines @pytest.mark.fluent_version("==24.2") @@ -266,10 +257,7 @@ def test_match_whole_word_and_case_search(capsys): ".preferences.Graphics.ColormapSettings.TextFontAutomaticUnits (Parameter)" not in lines ) - assert ( - '.results.graphics.lic[""].color_map.font_name (Parameter)' - in lines - ) + assert ".tui.display.set_grid.label_font (Command)" in lines @pytest.mark.fluent_version("==24.2") @@ -312,182 +300,45 @@ def test_japanese_semantic_search(capsys): assert ".tui.preferences.appearance.charts.font (Object)" in lines -@pytest.mark.codegen_required -def test_search(): - results = _search("display") - assert ".tui.display (Object)" in results - assert ".tui.display.update_scene.display (Command)" in results - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeout (Parameter)" - in results - ) - assert ( - '.results.graphics.mesh[""].display (Command)' in results - ) - assert ( - '.results.graphics.mesh[""].display_state_name (Parameter)' - in results - ) - - results = _search("display", match_whole_word=True) - assert ( - '.results.graphics.mesh[""].display (Command)' in results - ) - assert ( - '.results.graphics.mesh[""].display_state_name (Parameter)' - not in results - ) - - results = _search("Display", match_case=True) - assert ".tui.display (Object)" not in results - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeout (Parameter)" - in results - ) - - results = _search( - "GraphicsWindowDisplayTimeout", match_whole_word=True, match_case=True - ) - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeout (Parameter)" - in results - ) - assert ( - ".preferences.Graphics.MeshingMode.GraphicsWindowDisplayTimeoutValue (Parameter)" - not in results - ) - - -@pytest.mark.codegen_required -@pytest.mark.fluent_version("latest") -def test_get_version_path_prefix_from_obj( - watertight_workflow_session, new_solver_session -): - meshing = watertight_workflow_session - solver = new_solver_session - version = solver._version - assert _get_version_path_prefix_from_obj(meshing) == ( - version, - [""], - "", - ) - assert _get_version_path_prefix_from_obj(solver) == ( - version, - [""], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.tui.file.import_) == ( - version, - ["", "tui", "file", "import_"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.tui.file.read_case) == ( - None, - None, - None, - ) - assert _get_version_path_prefix_from_obj(meshing.meshing) == ( - version, - ["", "meshing"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.workflow) == ( - version, - ["", "workflow"], - "", - ) - assert _get_version_path_prefix_from_obj(solver.workflow) == ( - version, - ["", "workflow"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.workflow.TaskObject) == ( - version, - ["", "workflow", "TaskObject:"], - '[""]', - ) - assert _get_version_path_prefix_from_obj( - meshing.workflow.TaskObject["Import Geometry"] - ) == ( - version, - ["", "workflow", "TaskObject:"], - "", - ) - assert _get_version_path_prefix_from_obj(meshing.preferences.Appearance.Charts) == ( - version, - ["", "preferences", "Appearance", "Charts"], - "", - ) - assert _get_version_path_prefix_from_obj(solver.setup.models) == ( - version, - [""], - "", - ) - assert _get_version_path_prefix_from_obj(solver.file.cff_files) == ( - None, - None, - None, - ) - - -@pytest.mark.codegen_required -@pytest.mark.fluent_version("latest") -def test_search_from_root(watertight_workflow_session): - meshing = watertight_workflow_session - results = _search("display", search_root=meshing) - assert ".tui.display (Object)" in results - results = _search("display", search_root=meshing.tui) - assert ".display (Object)" in results - results = _search("display", search_root=meshing.tui.display) - assert ".update_scene.display (Command)" in results - assert ".display_states (Object)" in results - results = _search("cad", search_root=meshing.meshing) - assert ".GlobalSettings.EnableCleanCAD (Parameter)" in results - assert ".LoadCADGeometry (Command)" in results - results = _search("next", search_root=meshing.workflow) - assert '.TaskObject[""].InsertNextTask (Command)' in results - results = _search("next", search_root=meshing.workflow.TaskObject) - assert '[""].InsertNextTask (Command)' in results - results = _search( - "next", search_root=meshing.workflow.TaskObject["Import Geometry"] - ) - assert ".InsertNextTask (Command)" in results - results = _search("timeout", search_root=meshing.preferences) - assert ".General.IdleTimeout (Parameter)" in results - results = _search("timeout", search_root=meshing.preferences.General) - assert ".IdleTimeout (Parameter)" in results - - -@pytest.mark.codegen_required -@pytest.mark.fluent_version(">=25.1") -def test_search_settings_from_root(capsys, static_mixer_settings_session): - solver = static_mixer_settings_session - results = _search("conduction", search_root=solver) - assert ".tui.define.models.shell_conduction (Object)" in results - assert ( - '.setup.boundary_conditions.wall[""].phase[""].thermal.enable_shell_conduction (Parameter)' - in results - ) - results = _search("conduction", search_root=solver.setup.boundary_conditions) - assert ( - '.wall[""].phase[""].thermal.conduction_layers[] (Object)' - in results - ) - results = _search("conduction", search_root=solver.setup.boundary_conditions.wall) - assert ( - '[""].phase[""].thermal.conduction_layers[] (Object)' - in results - ) - results = _search( - "conduction", search_root=solver.setup.boundary_conditions.wall["wall"] - ) - assert ( - '.phase[""].thermal.conduction_layers[] (Object)' - in results - ) - results = _search( - "conduction", search_root=solver.setup.boundary_conditions.wall["wall"].phase - ) - assert ( - '[""].thermal.conduction_layers[] (Object)' in results - ) +def test_match_whole_word(monkeypatch): + monkeypatch.setattr(pyfluent, "PRINT_SEARCH_RESULTS", False) + api_tree_data = { + "api_objects": [ + ".parent (Object)", + ".parent.child (Parameter)", + ".first_last (Object)", + ".none (Object)", + ], + "api_tui_objects": [], + "all_api_object_name_synsets": { + "parent": ["parent"], + "child": ["child"], + "first_last": ["first_last"], + "none": ["none"], + }, + "all_api_object_names": ["parent", "child", "first_last", "none"], + } + + search_module = sys.modules["ansys.fluent.core.search"] + monkeypatch.setattr(search_module, "_get_api_tree_data", lambda: api_tree_data) + + assert _search_whole_word("parent", api_tree_data=api_tree_data) == [ + ".parent (Object)" + ] + assert _search_whole_word("child", api_tree_data=api_tree_data) == [ + ".parent.child (Parameter)" + ] + assert pyfluent.search("parent", match_whole_word=True) == [ + ".parent (Object)" + ] + + assert pyfluent.search("first", match_whole_word=True) == [ + ".first_last (Object)" + ] + assert pyfluent.search("last", match_whole_word=True) == [ + ".first_last (Object)" + ] + + assert pyfluent.search("first_last", match_whole_word=True) == [ + ".first_last (Object)" + ]