diff --git a/.github/workflows/CI-linter.yml b/.github/workflows/CI-linter.yml new file mode 100644 index 0000000000..0ab540f48b --- /dev/null +++ b/.github/workflows/CI-linter.yml @@ -0,0 +1,136 @@ +--- +name: CI-linting + +on: + pull_request: + types: [opened, synchronize] + schedule: + - cron: "0 0 * * *" + +jobs: + + linting: + runs-on: ubuntu-latest + defaults: + run: + shell: bash -leo pipefail {0} + + permissions: + contents: read + packages: read + statuses: write + + steps: + - name: checkout + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.11" + + - name: Check for non-ASCII characters + run: | + output=$(find . -type f \ + \( -name "*.py" -o -name "*.rst" -o -name "*.yml" -o -name "*.toml" \) \ + -exec perl -ne 'print if /[^[:ascii:]]/' {} \;) + if [ -n "$output" ]; then + echo "Non-ASCII characters found in documentation." + exit 1 + fi + + - name: install packages not included in super-linter + run: | + pip install validate-pyproject restructuredtext-lint + + - name: pyproject.toml + run: | + validate-pyproject pyproject.toml + + # RST linter + # Note: unclear how to suppress error messages + # (use grep -v in this case) + # - name: restructuredtext-lint + # run: | + # rst-lint README.rst docs/source | + # grep -v "Unknown directive type" | + # grep -v "Unknown interpreted text role" | + # grep -v "Cannot analyze code. Pygments package not found." + + - name: Check whether the citation metadata from CITATION.cff is valid + uses: citation-file-format/cffconvert-github-action@2.0.0 + with: + args: "--validate" + + - name: yaml_config used by super-linter + run: | + # TODO - very large line length + echo 'rules:' > yaml_config.yaml + echo ' line-length:' >> yaml_config.yaml + echo ' max: 250' >> yaml_config.yaml + + # Dependencies required to avoid errors + # reported by linters + - name: Install mamba dependencies + uses: mamba-org/setup-micromamba@v1 + with: + environment-file: environment.yml + init-shell: bash + + - name: Python dependencies + run: | + pip install -e '.[tests,dev,doc]' + pip install pylint + + - name: pylint + run: | + pylint $(git ls-files 'simtools/*.py') + + + - name: Lint Code Base + uses: super-linter/super-linter@v5 + env: + VALIDATE_ALL_CODEBASE: false + # github actions + VALIDATE_GITHUB_ACTIONS: true + # yaml + VALIDATE_YAML: true + YAML_CONFIG_FILE: yaml_config.yaml + YAML_ERROR_ON_WARNING: false + # isort + VALIDATE_PYTHON_ISORT: true + PYTHON_ISORT_CONFIG_FILE: pyproject.toml + # flake8 + VALIDATE_PYTHON_FLAKE8: true + # black + VALIDATE_PYTHON_BLACK: true + PYTHON_BLACK_CONFIG_FILE: pyproject.toml + # markdown + VALIDATE_MARKDOWN: true + # docker + VALIDATE_DOCKERFILE_HADOLINT: true + # copy and paste + VALIDATE_JSCPD_ALL_CODEBASE: true + # .env file + VALIDATE_ENV: true + # language + VALIDATE_NATURAL_LANGUAGE: true + # bash + VALIDATE_BASH: true + # path for linter rules + LINTER_RULES_PATH: ./ + # create a log file + CREATE_LOG_FILE: true + LOG_FILE: superlinter.log + DEFAULT_BRANCH: main + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Upload super-linter log file + # and keep it for 5 days + - name: Archive production artifacts + uses: actions/upload-artifact@v3 + with: + name: super-linter reports + path: | + superlinter.log + retention-days: 5 diff --git a/.github/workflows/CI-unittests.yml b/.github/workflows/CI-unittests.yml index 3390f2bdde..38c9e45df4 100644 --- a/.github/workflows/CI-unittests.yml +++ b/.github/workflows/CI-unittests.yml @@ -46,7 +46,7 @@ jobs: - name: checkout uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ec51a875c0..8d7931c637 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: - id: isort args: ["--profile", "black", "--filter-files"] - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.11.0 hooks: - id: black args: ["--line-length=100"] @@ -18,7 +18,7 @@ repos: args: ["--max-line-length=100"] # https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -28,3 +28,8 @@ repos: hooks: - id: docstr-coverage args: ["--verbose", "2", "--fail-under", "70.", "simtools"] + # gitup action + - repo: https://github.com/rhysd/actionlint + rev: v1.6.26 + hooks: + - id: actionlint diff --git a/README.rst b/README.rst index 25f4bfa9dd..5f4c75ea67 100644 --- a/README.rst +++ b/README.rst @@ -101,4 +101,4 @@ Please cite this software if it use used for a publication, see the `Zenodo reco Acknowledgements ================ -This project is supported by the Deutsche Forschungsgemeinschaft (DFG, German Research Foundation) – project number 460248186 (PUNCH4NFDI). +This project is supported by the Deutsche Forschungsgemeinschaft (DFG, German Research Foundation) - project number 460248186 (PUNCH4NFDI). diff --git a/docker/README.md b/docker/README.md index 3b38c68eb2..d054a62335 100644 --- a/docker/README.md +++ b/docker/README.md @@ -8,7 +8,7 @@ Types of dockerfiles and containers available: - [simtools users](#container-for-simtools-users): a container with all software installed (CORSIKA, sim\_telarray, simtools python environment, simtools). Pull latest release with: `docker pull ghcr.io/gammasim/simtools-prod:latest` - [simtools developers](#container-for-simtools-developers): a container with CORSIKA, sim\_telarray, and simtools conda environment installed. Pull latest release with: `docker pull ghcr.io/gammasim/simtools-dev:latest` -- [sim\_telarray](#container-for-simulation-software-corsika--sim_telarray): provides a container with the CORSIKA and sim\_telarray installed. This provides the base image for the previously listed containers. Pull latest release with: `docker pull ghcr.io/gammasim/simtools-simtelarray:latest` +- [sim_telarray](#container-for-corsika-and-simtelarray): provides a container with the CORSIKA and sim\_telarray installed. This provides the base image for the previously listed containers. Pull latest release with: `docker pull ghcr.io/gammasim/simtools-simtelarray:latest` ## Container for simtools Users @@ -95,7 +95,7 @@ docker build -f Dockerfile-dev -t simtools-dev . Use the docker container in the same way as above, replacing `ghcr.io/gammasim/simtools-dev:latest` by `simtools-dev`. -## Container for simulation software CORSIKA / sim\_telarray +## Container for CORSIKA and sim\_telarray Provide a container including the following the CORSIKA and sim\_telarray simulation software packages. @@ -118,7 +118,7 @@ docker build -f Dockerfile-simtelarray -t simtelarray . ``` Building expects that a tar ball of corsika/sim\_telarray (named corsika7.7\_simtelarray.tar.gz) is available in the building directory. -Download the tar package from the MPIK website (password applies) with +Download the tar package from MPIK (password applies) with ```bash ./download_simulationsoftware.sh diff --git a/docs/source/coding_guidelines.rst b/docs/source/coding_guidelines.rst index 033e8e0eb3..c5b0912da4 100644 --- a/docs/source/coding_guidelines.rst +++ b/docs/source/coding_guidelines.rst @@ -37,6 +37,16 @@ In rare cases, one might want to skip pre-commit checks with git commit --no-verify +Code Linting +============ + +Linters of almost all file types are run by the CI-linter workflow. + +To run pylint locally, use: + +.. code-block:: + + pylint $(git ls-files 'simtools/*.py') API documentation ----------------- diff --git a/docs/source/conf.py b/docs/source/conf.py index d54dc041ab..d0f28652f4 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -13,12 +13,11 @@ import os import sys +from pathlib import Path import toml import yaml -from pathlib import Path - import simtools.version sys.path.insert(0, os.path.abspath("../../simtools")) @@ -31,20 +30,17 @@ def get_authors_from_citation_file(): Read list of authors from CITATION.cff file """ - try: - with open(Path(__file__).parent / "../../CITATION.cff") as file: - citation = yaml.safe_load(file) - except FileNotFoundError: - raise + with open(Path(__file__).parent / "../../CITATION.cff", encoding="utf-8") as file: + citation = yaml.safe_load(file) - author = "" + tmp_author = "" try: for person in citation["authors"]: - author = author + person["given-names"] + " " + person["family-names"] - author += " (" + person["affiliation"] + "), " + tmp_author = tmp_author + person["given-names"] + " " + person["family-names"] + tmp_author += " (" + person["affiliation"] + "), " except KeyError: pass - return author[:-2] + return tmp_author[:-2] def get_python_version_from_pyproject(): diff --git a/pyproject.toml b/pyproject.toml index d94dc95655..44ca20cad1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,543 +88,56 @@ simtools-validate-file-using-schema = "simtools.applications.validate_file_using simtools-validate-optics = "simtools.applications.validate_optics:main" [tool.pylint.main] -# Explicit list of all pylint options (generated with --generate-toml-config) -# Analyse import fallback blocks. This can be used to support both Python 2 and 3 -# compatible code, which means that the block might have code that exists only in -# one or another interpreter, leading to false positives when analysed. -# analyse-fallback-blocks = - -# Clear in-memory caches upon conclusion of linting. Useful if running pylint in -# a server-like mode. -# clear-cache-post-run = - -# Always return a 0 (non-error) status code, even if lint errors are found. This -# is primarily useful in continuous integration scripts. -# exit-zero = - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -# extension-pkg-allow-list = - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -# extension-pkg-whitelist = - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -# fail-on = - -# Specify a score threshold under which the program will exit with error. -fail-under = 10 - -# Interpret the stdin as a python script, whose filename needs to be passed as -# the module_or_package argument. -# from-stdin = - -# Files or directories to be skipped. They should be base names, not paths. -ignore = ["CVS"] - -# Add files or directories matching the regular expressions patterns to the -# ignore-list. The regex matches against paths and can be in Posix or Windows -# format. Because '\\' represents the directory delimiter on Windows systems, it -# can't be used as an escape character. -# ignore-paths = - -# Files or directories matching the regular expression patterns are skipped. The -# regex matches against base names, not paths. The default value ignores Emacs -# file locks -ignore-patterns = ["^\\.#"] - -# List of module names for which member attributes should not be checked (useful -# for modules/projects where namespaces are manipulated during runtime and thus -# existing member attributes cannot be deduced by static analysis). It supports -# qualified module names, as well as Unix pattern matching. -# ignored-modules = - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -# init-hook = - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use, and will cap the count on Windows to -# avoid hangs. -jobs = 0 - -# Control the amount of potential inferred values when inferring a single object. -# This can help the performance when dealing with large functions or complex, -# nested conditions. -limit-inference-results = 100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -# load-plugins = - -# Pickle collected data for later comparisons. -persistent = true - -# Minimum Python version to use for version dependent checks. Will default to the -# version used to run pylint. -py-version = "3.11" - -# Discover python modules and packages in the file system subtree. -# recursive = - -# Add paths to the list of the source roots. Supports globbing patterns. The -# source root is an absolute path or a path relative to the current working -# directory used to determine a package namespace for modules located under the -# source root. -# source-roots = - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode = true - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -# unsafe-load-any-extension = - -[tool.pylint.basic] -# Naming style matching correct argument names. -argument-naming-style = "snake_case" - -# Regular expression matching correct argument names. Overrides argument-naming- -# style. If left empty, argument names will be checked with the set naming style. -# argument-rgx = - -# Naming style matching correct attribute names. -attr-naming-style = "snake_case" - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. If left empty, attribute names will be checked with the set naming -# style. -# attr-rgx = - -# Bad variable names which should always be refused, separated by a comma. -bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -# bad-names-rgxs = - -# Naming style matching correct class attribute names. -class-attribute-naming-style = "any" - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. If left empty, class attribute names will be checked -# with the set naming style. -# class-attribute-rgx = - -# Naming style matching correct class constant names. -class-const-naming-style = "UPPER_CASE" - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. If left empty, class constant names will be checked with -# the set naming style. -# class-const-rgx = - -# Naming style matching correct class names. -class-naming-style = "PascalCase" - -# Regular expression matching correct class names. Overrides class-naming-style. -# If left empty, class names will be checked with the set naming style. -# class-rgx = - -# Naming style matching correct constant names. -const-naming-style = "UPPER_CASE" - -# Regular expression matching correct constant names. Overrides const-naming- -# style. If left empty, constant names will be checked with the set naming style. -# const-rgx = - -# Minimum line length for functions/classes that require docstrings, shorter ones -# are exempt. -docstring-min-length = -1 - -# Naming style matching correct function names. -function-naming-style = "snake_case" - -# Regular expression matching correct function names. Overrides function-naming- -# style. If left empty, function names will be checked with the set naming style. -# function-rgx = - # Good variable names which should always be accepted, separated by a comma. -good-names = ["e", "i", "j", "k", "x", "y", "n", "f", "r", "ex", "db", "im", "sh", "ax", "ce", "xx", "yy", "zz"] - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -# good-names-rgxs = - -# Include a hint for the correct naming format with invalid-name. -# include-naming-hint = - -# Naming style matching correct inline iteration names. -inlinevar-naming-style = "any" - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. If left empty, inline iteration names will be checked -# with the set naming style. -# inlinevar-rgx = - -# Naming style matching correct method names. -method-naming-style = "snake_case" - -# Regular expression matching correct method names. Overrides method-naming- -# style. If left empty, method names will be checked with the set naming style. -# method-rgx = - -# Naming style matching correct module names. -module-naming-style = "snake_case" - -# Regular expression matching correct module names. Overrides module-naming- -# style. If left empty, module names will be checked with the set naming style. -# module-rgx = - -# Colon-delimited sets of names that determine each other's naming style when the -# name regexes allow several styles. -# name-group = - -# Regular expression which should only match function or class names that do not -# require a docstring. -no-docstring-rgx = "main" - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. These -# decorators are taken in consideration only for invalid-name. -property-classes = ["abc.abstractproperty"] - -# Regular expression matching correct type alias names. If left empty, type alias -# names will be checked with the set naming style. -# typealias-rgx = - -# Regular expression matching correct type variable names. If left empty, type -# variable names will be checked with the set naming style. -# typevar-rgx = - -# Naming style matching correct variable names. -variable-naming-style = "snake_case" - -# Regular expression matching correct variable names. Overrides variable-naming- -# style. If left empty, variable names will be checked with the set naming style. -# variable-rgx = - -[tool.pylint.classes] -# Warn about protected attribute access inside special methods -# check-protected-access-in-special-methods = - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"] - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"] - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg = ["cls"] - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg = ["mcs"] - -[tool.pylint.design] -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -# exclude-too-few-public-methods = - -# List of qualified class names to ignore when counting class parents (see R0901) -# ignored-parents = - -# Maximum number of arguments for function / method. -max-args = 5 - -# Maximum number of attributes for a class (see R0902). -max-attributes = 7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr = 5 - -# Maximum number of branch for function / method body. -max-branches = 12 - -# Maximum number of locals for function / method body. -max-locals = 15 - -# Maximum number of parents for a class (see R0901). -max-parents = 7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods = 20 - -# Maximum number of return / yield for function / method body. -max-returns = 6 - -# Maximum number of statements in function / method body. -max-statements = 50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods = 2 - -[tool.pylint.exceptions] -# Exceptions that will emit a warning when caught. -overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"] - -[tool.pylint.format] -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -# expected-line-ending-format = - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines = "^\\s*(# )??$" - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren = 4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string = " " - -# Maximum number of characters on a single line. -max-line-length = 100 - -# Maximum number of lines in a module. -max-module-lines = 1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -# single-line-class-stmt = - -# Allow the body of an if to be on the same line as the test if there is no else. -# single-line-if-stmt = - -[tool.pylint.imports] -# List of modules that can be imported at any level, not just the top level one. -# allow-any-import-level = - -# Allow explicit reexports by alias from a package __init__. -# allow-reexport-from-package = - -# Allow wildcard imports from modules that define __all__. -# allow-wildcard-with-all = - -# Deprecated modules which should not be used, separated by a comma. -# deprecated-modules = - -# Output a graph (.gv or any supported image format) of external dependencies to -# the given file (report RP0402 must not be disabled). -# ext-import-graph = - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be disabled). -# import-graph = - -# Output a graph (.gv or any supported image format) of internal dependencies to -# the given file (report RP0402 must not be disabled). -# int-import-graph = - -# Force import order to recognize a module as part of the standard compatibility -# libraries. -# known-standard-library = - -# Force import order to recognize a module as part of a third party library. -known-third-party = ["enchant"] - -# Couples of modules and preferred modules, separated by a comma. -# preferred-modules = - -[tool.pylint.logging] -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style = "new" - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules = ["logging"] - -[tool.pylint."messages control"] -# Only show warnings with the listed confidence levels. Leave empty to show all. -# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] - -# Disable the message, report, category or checker with the given id(s). You can -# either give multiple identifiers separated by comma (,) or put this option -# multiple times (only on the command line, not in the configuration file where -# it should appear only once). You can also use "--disable=all" to disable -# everything first and then re-enable specific checks. For example, if you want -# to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-symbolic-message-instead", "invalid-name", "missing-module-docstring", "import-error", "too-many-instance-attributes", "too-many-arguments", "too-many-locals", "logging-fstring-interpolation"] - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where it -# should appear only once). See also the "--disable" option for examples. -enable = ["c-extension-no-member"] - -[tool.pylint.method_args] -# List of qualified names (i.e., library.method) which require a timeout -# parameter e.g. 'requests.api.get,requests.api.post' -timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"] - -[tool.pylint.miscellaneous] -# List of note tags to take in consideration, separated by a comma. -notes = ["FIXME", "XXX", "TODO"] - -# Regular expression of note tags to take in consideration. -# notes-rgx = - -[tool.pylint.refactoring] -# Maximum number of nested blocks for function / method body -max-nested-blocks = 5 - -# Complete name of functions that never returns. When checking for inconsistent- -# return-statements if a never returning function is called then it will be -# considered as an explicit return statement and no message will be printed. -never-returning-functions = ["sys.exit", "argparse.parse_error"] - -[tool.pylint.reports] -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'fatal', 'error', 'warning', 'refactor', -# 'convention', and 'info' which contain the number of messages in each category, -# as well as 'statement' which is the total number of statements analyzed. This -# score is used by the global evaluation report (RP0004). -evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -# msg-template = - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -# output-format = - -# Tells whether to display a full report or only the messages. -# reports = - -# Activate the evaluation score. -score = true - -[tool.pylint.similarities] -# Comments are removed from the similarity computation -ignore-comments = true - -# Docstrings are removed from the similarity computation -ignore-docstrings = true - -# Imports are removed from the similarity computation -# ignore-imports = - -# Signatures are removed from the similarity computation -ignore-signatures = true - -# Minimum lines number of a similarity. -min-similarity-lines = 4 - -[tool.pylint.spelling] -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions = 4 - -# Spelling dictionary name. No available dictionaries : You need to install both -# the python package and the system dependency for enchant to work.. -# spelling-dict = - -# List of comma separated words that should be considered directives if they -# appear at the beginning of a comment and should not be checked. -spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:" - -# List of comma separated words that should not be checked. -# spelling-ignore-words = - -# A path to a file that contains the private dictionary; one word per line. -# spelling-private-dict-file = - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -# spelling-store-unknown-words = - -[tool.pylint.typecheck] -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators = ["contextlib.contextmanager"] +good-names = [ + "e", "i", "j", "k", "x", "y", "n", "f", "r", "ex", "db", "im", "sh", "ax", "ce", "xx", "yy", "zz", + "C1", "C2", "C3", "C4", "C4x", "N1", "N2", "N3", "N4", "N4x", + "lst", "mst", "sst", "sct", "hess", "magic", "veritas", + ] +# Ignore files +ignore = [ + "__init__.py", "scm_version.py", "_version.py", + "legend_handlers.py", + "version.py", +] -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. +# See discussion in issue 521 +# Agreed to not have module docstring (missing-module-docstring) +# Pylint cannot handle overload (used in pyproj; unpacking-non-sequence) +disable = [ + "missing-module-docstring", + "unpacking-non-sequence", + "logging-format-interpolation", + "fixme", + "missing-function-docstring", + "R0801", # duplications + "logging-fstring-interpolation", +] # see https://github.com/pylint-dev/pylint/issues/2289 generated-members = ["gist_heat_r"] -# Tells whether missing members accessed in mixin class should be ignored. A -# class is considered mixin if its name matches the mixin-class-rgx option. -# Tells whether to warn about missing members when the owner of the attribute is -# inferred to be None. -ignore-none = true - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference can -# return multiple potential results while evaluating a Python object, but some -# branches might not be evaluated, which results in partial inference. In that -# case, it might be useful to still emit no-member and other checks for the rest -# of the inferred objects. -ignore-on-opaque-inference = true - -# List of symbolic message names to ignore for Mixin members. -ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"] - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "argparse.Namespace"] - -# Show a hint with possible names when a member name was not found. The aspect of -# finding the hint is based on edit distance. -missing-member-hint = true - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance = 1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices = 1 - -# Regex pattern to define which classes are considered mixins. -mixin-class-rgx = ".*[Mm]ixin" - -# List of decorators that change the signature of a decorated function. -# signature-mutators = - -[tool.pylint.variables] -# List of additional names supposed to be defined in builtins. Remember that you -# should avoid defining new builtins when possible. -# additional-builtins = - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables = true - -# List of names allowed to shadow builtins -# allowed-redefined-builtins = - -# List of strings which can identify a callback function by name. A callback name -# must start or end with one of those strings. -callbacks = ["cb_", "_cb"] - -# A regular expression matching the name of dummy variables (i.e. expected to not -# be used). -dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" - -# Argument names that match this expression will be ignored. -ignored-argument-names = "_.*|^ignored_|^unused_" - -# Tells whether we should check for unused import in __init__ files. -# init-import = - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] +# Maximum number of characters on a single line. +max-line-length = 100 +# Maximum number of lines in a module. (default 1000) +max-module-lines = 2000 +# Maximum number of branch for function / method body. (default=12) +max-branches = 25 +# Maximum number of arguments for function / method (default=5). +max-args = 12 +# Maximum number of locals for function / method body (default=15). +max-locals = 50 +# Maximum number of attributes for a class (see R0902). (default=7) +max-attributes = 50 +# Maximum number of statements in function / method body. (default=50) +max-statements = 80 +# Maximum number of public methods for a class (see R0904). (default=20) +max-public-methods = 50 + +[tool.black] +line-length = 100 + +[tool.isort] +profile = "black" [tool.pytest.ini_options] minversion="6.0" diff --git a/simtools/applications/add_file_to_db.py b/simtools/applications/add_file_to_db.py index bc10636338..781074d2c0 100644 --- a/simtools/applications/add_file_to_db.py +++ b/simtools/applications/add_file_to_db.py @@ -66,6 +66,7 @@ def _user_confirm(): return answer == "y" except EOFError: return False + return False def main(): diff --git a/simtools/applications/compare_cumulative_psf.py b/simtools/applications/compare_cumulative_psf.py index ce119e2d89..c949fcbc56 100644 --- a/simtools/applications/compare_cumulative_psf.py +++ b/simtools/applications/compare_cumulative_psf.py @@ -121,9 +121,7 @@ def main(): type=float, default=10, ) - config.parser.add_argument( - "--zenith", help="Zenith angle in deg", type=float, default=20.0 - ) + config.parser.add_argument("--zenith", help="Zenith angle in deg", type=float, default=20.0) config.parser.add_argument( "--data", help="Data file name with the measured PSF vs radius [cm]", type=str ) @@ -181,7 +179,7 @@ def main(): # Simulated cumulative PSF data_to_plot[r"sim$\_$telarray"] = im.get_cumulative_data(radius * u.cm) - fig = visualize.plot_1D(data_to_plot) + fig = visualize.plot_1d(data_to_plot) fig.gca().set_ylim(0, 1.05) plot_file_name = label + "_" + tel_model.name + "_cumulative_PSF" @@ -192,7 +190,7 @@ def main(): # Plotting image data_to_plot = im.get_image_data() - fig = visualize.plot_hist_2D(data_to_plot, bins=80) + fig = visualize.plot_hist_2d(data_to_plot, bins=80) circle = plt.Circle((0, 0), im.get_psf(0.8) / 2, color="k", fill=False, lw=2, ls="--") fig.gca().add_artist(circle) fig.gca().set_aspect("equal") diff --git a/simtools/applications/generate_corsika_histograms.py b/simtools/applications/generate_corsika_histograms.py index 38271a7474..9822608176 100644 --- a/simtools/applications/generate_corsika_histograms.py +++ b/simtools/applications/generate_corsika_histograms.py @@ -25,7 +25,7 @@ - Number of photons per event. Histograms for the distribution of CORSIKA event header elements can also be generated by using - the `--event_1D_histograms` and `--event_2D_histograms` arguments. The accepted arguments (keys) + the `--event_1d_histograms` and `--event_2d_histograms` arguments. The accepted arguments (keys) are to be found in the CORSIKA manual, e.g., "total_energy", "zenith", "azimuth". Command line arguments @@ -111,10 +111,10 @@ If set, histograms are saved into pdf files. One pdf file contains all the histograms for the Cherenkov photons. The name of the file is controlled via `hdf5_file_name`. - If event_1D_histograms and event_2D_histograms are used, two separate pdf files might be + If event_1d_histograms and event_2d_histograms are used, two separate pdf files might be created to accommodate the histograms for the CORSIKA event header elements. The core names of these output pdf files are also given by `hdf5_file_name` argument with the addition of - 'event_1D_histograms' and 'event_2D_histograms'. + 'event_1d_histograms' and 'event_2d_histograms'. hdf5 (bool, optional) @@ -125,23 +125,23 @@ It requires the `--hdf5` flag. If not given, `hdf5_file_name` takes the name from the input IACT file (`input_file`). If the output `hdf5_file_name` file already exists, the tables associated to the chosen - flags (e.g. `hdf5`, `event_1D_histograms`, `event_2D_histograms`) will be overwritten. The + flags (e.g. `hdf5`, `event_1d_histograms`, `event_2d_histograms`) will be overwritten. The remaining tables, if any, will stay untouched. - event_1D_histograms (str, optional) - Produce 1D histograms for elements given in `--event_1D_histograms` from the CORSIKA event + event_1d_histograms (str, optional) + Produce 1D histograms for elements given in `--event_1d_histograms` from the CORSIKA event header and save into hdf5/pdf files. It allows more than one argument, separated by simple spaces. - Usage: `--event_1D_histograms first_interaction_height total_energy`. + Usage: `--event_1d_histograms first_interaction_height total_energy`. - event_2D_histograms (str, optional) - Produce 2D histograms for elements given in `--event_2D_histograms` from the CORSIKA event + event_2d_histograms (str, optional) + Produce 2D histograms for elements given in `--event_2d_histograms` from the CORSIKA event header and save into hdf5/pdf files. It allows more than one argument, separated by simple spaces. The elements are grouped into pairs and the 2D histograms are produced always for two subsequent elements. - For example, `--event_2D_histograms first_interaction_height total_energy zenith azimuth` + For example, `--event_2d_histograms first_interaction_height total_energy zenith azimuth` will produce one 2D histogram for `first_interaction_height` `total_energy` and another 2D histogram for `zenith` and `azimuth`. @@ -153,7 +153,7 @@ simtools-generate-corsika-histograms --IACT_file /workdir/external/simtools/tests/\ resources/tel_output_10GeV-2-gamma-20deg-CTAO-South.corsikaio --pdf --hdf5 - --event_2D_histograms zenith azimuth --event_1D_histograms total_energy + --event_2d_histograms zenith azimuth --event_1d_histograms total_energy Expected final print-out message: @@ -249,7 +249,7 @@ def _parse(label, description): ) config.parser.add_argument( - "--event_1D_histograms", + "--event_1d_histograms", help="The keys from the CORSIKA event header to be used for the generation of 1D " "histograms. The available choices can been found in the `all_event_keys` attribute of" "the CorsikaHistograms.", @@ -259,7 +259,7 @@ def _parse(label, description): ) config.parser.add_argument( - "--event_2D_histograms", + "--event_2d_histograms", help="The keys from the CORSIKA event header to be used for the generation of 2D " "histograms. The available choices can been found in the `all_event_keys` attribute of" "the CorsikaHistograms.", @@ -273,12 +273,12 @@ def _parse(label, description): if not config_parser["pdf"]: if ( not config_parser["hdf5"] - and not config_parser["event_1D_histograms"] - and not config_parser["event_2D_histograms"] + and not config_parser["event_1d_histograms"] + and not config_parser["event_2d_histograms"] ): config.parser.error( - "At least one argument is required: `--pdf`, `--hdf5`, `--event_1D_histograms`, or " - "`--event_2D_histograms`." + "At least one argument is required: `--pdf`, `--hdf5`, `--event_1d_histograms`, or " + "`--event_2d_histograms`." ) return config_parser, _ @@ -314,18 +314,18 @@ def _plot_figures(corsika_histograms_instance): corsika_histograms_visualize.save_figs_to_pdf(figure_list, output_file_name) -def _derive_event_1D_histograms( - corsika_histograms_instance, event_1D_header_keys, pdf, hdf5, overwrite=False +def _derive_event_1d_histograms( + corsika_histograms_instance, event_1d_header_keys, pdf, hdf5, overwrite=False ): """ - Auxiliary function to derive the histograms for the arguments given by event_1D_histograms. + Auxiliary function to derive the histograms for the arguments given by event_1d_histograms. Parameters ---------- corsika_histograms_instance: `CorsikaHistograms` instance. The CorsikaHistograms instance created in main. - event_1D_header_keys: str - Produce 1D histograms for elements given in `event_1D_header_keys` from the CORSIKA event + event_1d_header_keys: str + Produce 1D histograms for elements given in `event_1d_header_keys` from the CORSIKA event header and save into hdf5/pdf files. pdf: bool If true, histograms are saved into a pdf file. @@ -335,37 +335,37 @@ def _derive_event_1D_histograms( If true, overwrites the current output hdf5 file. """ figure_list = [] - for event_header_element in event_1D_header_keys: + for event_header_element in event_1d_header_keys: if pdf: - figure = corsika_histograms_visualize.plot_1D_event_header_distribution( + figure = corsika_histograms_visualize.plot_1d_event_header_distribution( corsika_histograms_instance, event_header_element ) figure_list.append(figure) if hdf5: - corsika_histograms_instance.export_event_header_1D_histogram( + corsika_histograms_instance.export_event_header_1d_histogram( event_header_element, bins=50, hist_range=None, overwrite=overwrite ) if pdf: figures_list = np.array(figure_list).flatten() output_file_name = Path(corsika_histograms_instance.output_path).joinpath( - f"{corsika_histograms_instance.hdf5_file_name}_event_1D_histograms.pdf" + f"{corsika_histograms_instance.hdf5_file_name}_event_1d_histograms.pdf" ) corsika_histograms_visualize.save_figs_to_pdf(figures_list, output_file_name) -def _derive_event_2D_histograms( - corsika_histograms_instance, event_2D_header_keys, pdf, hdf5, overwrite=False +def _derive_event_2d_histograms( + corsika_histograms_instance, event_2d_header_keys, pdf, hdf5, overwrite=False ): """ - Auxiliary function to derive the histograms for the arguments given by event_1D_histograms. + Auxiliary function to derive the histograms for the arguments given by event_1d_histograms. If an odd number of event header keys are given, the last one is discarded. Parameters ---------- corsika_histograms_instance: `CorsikaHistograms` instance. The CorsikaHistograms instance created in main. - event_2D_header_keys: str - Produce 1D histograms for elements given in `event_1D_header_keys` from the CORSIKA event + event_2d_header_keys: str + Produce 1D histograms for elements given in `event_1d_header_keys` from the CORSIKA event header and save into hdf5/pdf files. pdf: bool If true, histograms are saved into a pdf file. @@ -375,10 +375,10 @@ def _derive_event_2D_histograms( If true, overwrites the current output hdf5 file. """ figure_list = [] - for i_event_header_element, _ in enumerate(event_2D_header_keys[::2]): + for i_event_header_element, _ in enumerate(event_2d_header_keys[::2]): # [::2] to discard the last one in case an odd number of keys are passed - if len(event_2D_header_keys) % 2 == 1: # if odd number of keys + if len(event_2d_header_keys) % 2 == 1: # if odd number of keys msg = ( "An odd number of keys was passed to produce 2D histograms." "The last key is being ignored." @@ -386,16 +386,16 @@ def _derive_event_2D_histograms( logger.warning(msg) if pdf: - figure = corsika_histograms_visualize.plot_2D_event_header_distribution( + figure = corsika_histograms_visualize.plot_2d_event_header_distribution( corsika_histograms_instance, - event_2D_header_keys[i_event_header_element], - event_2D_header_keys[i_event_header_element + 1], + event_2d_header_keys[i_event_header_element], + event_2d_header_keys[i_event_header_element + 1], ) figure_list.append(figure) if hdf5: - corsika_histograms_instance.export_event_header_2D_histogram( - event_2D_header_keys[i_event_header_element], - event_2D_header_keys[i_event_header_element + 1], + corsika_histograms_instance.export_event_header_2d_histogram( + event_2d_header_keys[i_event_header_element], + event_2d_header_keys[i_event_header_element + 1], bins=50, hist_range=None, overwrite=overwrite, @@ -403,7 +403,7 @@ def _derive_event_2D_histograms( if pdf: figures_list = np.array(figure_list).flatten() output_file_name = Path(corsika_histograms_instance.output_path).joinpath( - f"{corsika_histograms_instance.hdf5_file_name}_event_2D_histograms.pdf" + f"{corsika_histograms_instance.hdf5_file_name}_event_2d_histograms.pdf" ) corsika_histograms_visualize.save_figs_to_pdf(figures_list, output_file_name) @@ -437,7 +437,7 @@ def main(): indices = None # If the hdf5 output file already exists, the results are appended to it. if (Path(corsika_histograms_instance.hdf5_file_name).exists()) and ( - args_dict["hdf5"] or args_dict["event_1D_histograms"] or args_dict["event_2D_histograms"] + args_dict["hdf5"] or args_dict["event_1d_histograms"] or args_dict["event_2d_histograms"] ): msg = ( f"Output hdf5 file {corsika_histograms_instance.hdf5_file_name} already exists. " @@ -460,21 +460,21 @@ def main(): corsika_histograms_instance.export_histograms(overwrite=overwrite) # Event information - if args_dict["event_1D_histograms"] is not None: - _derive_event_1D_histograms( + if args_dict["event_1d_histograms"] is not None: + _derive_event_1d_histograms( corsika_histograms_instance, - args_dict["event_1D_histograms"], + args_dict["event_1d_histograms"], args_dict["pdf"], args_dict["hdf5"], - overwrite=False if args_dict["hdf5"] else True, + overwrite=not args_dict["hdf5"], ) - if args_dict["event_2D_histograms"] is not None: - _derive_event_2D_histograms( + if args_dict["event_2d_histograms"] is not None: + _derive_event_2d_histograms( corsika_histograms_instance, - args_dict["event_2D_histograms"], + args_dict["event_2d_histograms"], args_dict["pdf"], args_dict["hdf5"], - overwrite=False if args_dict["hdf5"] or args_dict["event_1D_histograms"] else True, + overwrite=not (args_dict["hdf5"] or args_dict["event_1d_histograms"]), ) final_time = time.time() diff --git a/simtools/applications/generate_default_metadata.py b/simtools/applications/generate_default_metadata.py index 115e6529dc..23d0e52be0 100644 --- a/simtools/applications/generate_default_metadata.py +++ b/simtools/applications/generate_default_metadata.py @@ -28,9 +28,9 @@ import yaml -import simtools.data_model.metadata_model as metadata_model import simtools.utils.general as gen from simtools.configuration import configurator +from simtools.data_model import metadata_model def _parse(label, description): diff --git a/simtools/applications/generate_simtel_array_histograms.py b/simtools/applications/generate_simtel_array_histograms.py index d8124d8c23..16efc0ae0c 100644 --- a/simtools/applications/generate_simtel_array_histograms.py +++ b/simtools/applications/generate_simtel_array_histograms.py @@ -136,10 +136,10 @@ def main(): for line in file: # Removing '\n' from filename, in case it is left there. histogram_files.append(line.replace("\n", "")) - except FileNotFoundError: + except FileNotFoundError as exc: msg = f"{one_file} is not a file." logger.error(msg) - raise FileNotFoundError + raise FileNotFoundError from exc # If no output name is passed, the tool gets the name of the first histogram of the list if config_parser["output_file_name"] is None: @@ -159,7 +159,7 @@ def main(): # Building SimtelHistograms simtel_histograms = SimtelHistograms(histogram_files) - simtel_histograms._combine_histogram_files() + simtel_histograms.combine_histogram_files() if config_parser["pdf"]: logger.debug(f"Creating the pdf file {output_file_name}.pdf") diff --git a/simtools/applications/sim_showers_for_trigger_rates.py b/simtools/applications/sim_showers_for_trigger_rates.py index 1741b6fc43..7f6e1d07a5 100644 --- a/simtools/applications/sim_showers_for_trigger_rates.py +++ b/simtools/applications/sim_showers_for_trigger_rates.py @@ -105,18 +105,10 @@ def _parse(label=None, description=None): type=str, required=True, ) - config.parser.add_argument( - "--nruns", help="Number of runs", type=int, default=100 - ) - config.parser.add_argument( - "--nevents", help="Number of events/run", type=int, default=100000 - ) - config.parser.add_argument( - "--zenith", help="Zenith angle in deg", type=float, default=20 - ) - config.parser.add_argument( - "--azimuth", help="Azimuth angle in deg", type=float, default=0 - ) + config.parser.add_argument("--nruns", help="Number of runs", type=int, default=100) + config.parser.add_argument("--nevents", help="Number of events/run", type=int, default=100000) + config.parser.add_argument("--zenith", help="Zenith angle in deg", type=float, default=20) + config.parser.add_argument("--azimuth", help="Azimuth angle in deg", type=float, default=0) # TODO confusing with output_path? config.parser.add_argument( "--output", diff --git a/simtools/applications/tune_psf.py b/simtools/applications/tune_psf.py index 673379218f..5f076d3812 100644 --- a/simtools/applications/tune_psf.py +++ b/simtools/applications/tune_psf.py @@ -131,9 +131,7 @@ def main(): type=float, default=10, ) - config.parser.add_argument( - "--zenith", help="Zenith angle in deg", type=float, default=20 - ) + config.parser.add_argument("--zenith", help="Zenith angle in deg", type=float, default=20) config.parser.add_argument( "--data", help="Data file name with the measured PSF vs radius [cm]", type=str ) @@ -211,11 +209,11 @@ def add_parameters( mar_0 = split_simtel_parameter(raw_par)[0] logger.debug( - "Previous parameter values: \n" - "MRRA = " + str(mrra_0) + "\n" - "MRF = " + str(mfr_0) + "\n" - "MRRA2 = " + str(mrra2_0) + "\n" - "MAR = " + str(mar_0) + "\n" + "Previous parameter values:\n" + f"MRRA = {str(mrra_0)}\n" + f"MRF = {str(mfr_0)}\n" + f"MRRA2 = {str(mrra2_0)}\n" + f"MAR = {str(mar_0)}\n" ) if args_dict["fixed"]: @@ -224,8 +222,8 @@ def add_parameters( # Drawing parameters randonly # Range around the previous values are hardcoded # Number of runs is hardcoded - N_RUNS = 50 - for _ in range(N_RUNS): + n_runs = 50 + for _ in range(n_runs): mrra_range = 0.004 if not args_dict["fixed"] else 0 mrf_range = 0.1 mrra2_range = 0.03 @@ -285,7 +283,7 @@ def run_pars(pars, plot=True): ) if plot: - fig = visualize.plot_1D( + fig = visualize.plot_1d( data_to_plot, plot_difference=True, no_markers=True, diff --git a/simtools/applications/validate_camera_fov.py b/simtools/applications/validate_camera_fov.py index 3e1c36ab30..f081391989 100644 --- a/simtools/applications/validate_camera_fov.py +++ b/simtools/applications/validate_camera_fov.py @@ -126,14 +126,14 @@ def main(): pixel_ids_to_print = int(args_dict["print_pixels_id"]) if pixel_ids_to_print == 0: pixel_ids_to_print = -1 # so not print the zero pixel - except ValueError: + except ValueError as exc: if args_dict["print_pixels_id"].lower() == "all": pixel_ids_to_print = camera.get_number_of_pixels() else: raise ValueError( f"The value provided to --print_pixels_id ({args_dict['print_pixels_id']}) " "should be an integer or All" - ) + ) from exc fig = camera.plot_pixel_layout(args_dict["camera_in_sky_coor"], pixel_ids_to_print) plot_file_prefix = output_dir.joinpath(f"{label}_{tel_model.name}_pixel_layout") for suffix in ["pdf", "png"]: diff --git a/simtools/applications/validate_file_using_schema.py b/simtools/applications/validate_file_using_schema.py index fc4778051e..90092bf193 100644 --- a/simtools/applications/validate_file_using_schema.py +++ b/simtools/applications/validate_file_using_schema.py @@ -32,9 +32,9 @@ import yaml -import simtools.data_model.metadata_model as metadata_model import simtools.utils.general as gen from simtools.configuration import configurator +from simtools.data_model import metadata_model def _parse(label, description): diff --git a/simtools/applications/validate_optics.py b/simtools/applications/validate_optics.py index 25139fbca7..61f7dd611c 100644 --- a/simtools/applications/validate_optics.py +++ b/simtools/applications/validate_optics.py @@ -103,9 +103,7 @@ def _parse(label): type=float, default=10, ) - config.parser.add_argument( - "--zenith", help="Zenith angle in deg", type=float, default=20 - ) + config.parser.add_argument("--zenith", help="Zenith angle in deg", type=float, default=20) config.parser.add_argument( "--max_offset", help="Maximum offset angle in deg", diff --git a/simtools/corsika/corsika_config.py b/simtools/corsika/corsika_config.py index 47be5b93fd..7b934856e5 100644 --- a/simtools/corsika/corsika_config.py +++ b/simtools/corsika/corsika_config.py @@ -91,7 +91,7 @@ def __init__( self.site = names.validate_site_name(site) self.primary = None self.eslope = None - self._config_file_path = None + self.config_file_path = None self._output_generic_file_name = None self._simtel_source_path = simtel_source_path @@ -357,7 +357,7 @@ def export_input_file(self, use_multipipe=False): sub_dir = "corsika_simtel" if use_multipipe else "corsika" self._set_output_file_and_directory(sub_dir) - self._logger.debug(f"Exporting CORSIKA input file to {self._config_file_path}") + self._logger.debug(f"Exporting CORSIKA input file to {self.config_file_path}") def _get_text_single_line(pars): text = "" @@ -377,7 +377,7 @@ def _get_text_multiple_lines(pars): text += _get_text_single_line(new_pars) return text - with open(self._config_file_path, "w", encoding="utf-8") as file: + with open(self.config_file_path, "w", encoding="utf-8") as file: file.write("\n* [ RUN PARAMETERS ]\n") # Removing AZM entry first _user_pars_temp = copy.copy(self._user_parameters) @@ -424,7 +424,7 @@ def _get_text_multiple_lines(pars): file.write("\n* [ OUTUPUT FILE ]\n") if use_multipipe: - run_cta_script = Path(self._config_file_path.parent).joinpath("run_cta_multipipe") + run_cta_script = Path(self.config_file_path.parent).joinpath("run_cta_multipipe") file.write(f"TELFIL |{str(run_cta_script)}\n") else: file.write(f"TELFIL {self._output_generic_file_name}\n") @@ -504,7 +504,7 @@ def _set_output_file_and_directory(self, sub_dir="corsika"): file_directory = self.io_handler.get_output_directory(label=self.label, sub_dir=sub_dir) self._logger.info(f"Creating directory {file_directory}, if needed.") file_directory.mkdir(parents=True, exist_ok=True) - self._config_file_path = file_directory.joinpath(config_file_name) + self.config_file_path = file_directory.joinpath(config_file_name) self._output_generic_file_name = self.get_file_name(file_type="output_generic") @@ -535,4 +535,4 @@ def get_input_file(self, use_multipipe=False): """ if not self._is_file_updated: self.export_input_file(use_multipipe) - return self._config_file_path + return self.config_file_path diff --git a/simtools/corsika/corsika_histograms.py b/simtools/corsika/corsika_histograms.py index 458a11c190..3b283d6f87 100644 --- a/simtools/corsika/corsika_histograms.py +++ b/simtools/corsika/corsika_histograms.py @@ -18,7 +18,7 @@ from simtools.io_operations import io_handler from simtools.io_operations.hdf5_handler import fill_hdf5_table from simtools.utils.general import collect_data_from_yaml_or_dict -from simtools.utils.geometry import convert_2D_to_radial_distr, rotate +from simtools.utils.geometry import convert_2d_to_radial_distr, rotate from simtools.utils.names import sanitize_name @@ -72,14 +72,6 @@ def __init__(self, input_file, label=None, output_path=None, hdf5_file_name=None else: self.hdf5_file_name = hdf5_file_name - self._initialize_attributes() - self.read_event_information() - self._initialize_header() - - def _initialize_attributes(self): - """ - Initializes the class attributes. - """ self._telescope_indices = None self._telescope_positions = None self.num_events = None @@ -89,8 +81,8 @@ def _initialize_attributes(self): self._num_photons_per_event = None self._num_photons_per_telescope = None self.__meta_dict = None - self.__dict_2D_distributions = None - self.__dict_1D_distributions = None + self._dict_2d_distributions = None + self._dict_1d_distributions = None self._event_azimuth_angles = None self._event_zenith_angles = None self._hist_config = None @@ -103,9 +95,15 @@ def _initialize_attributes(self): self.event_information = None self._individual_telescopes = None self._allowed_histograms = {"hist_position", "hist_direction", "hist_time_altitude"} - self._allowed_1D_labels = {"wavelength", "time", "altitude"} - self._allowed_2D_labels = {"counts", "density", "direction", "time_altitude"} + self._allowed_1d_labels = {"wavelength", "time", "altitude"} + self._allowed_2d_labels = {"counts", "density", "direction", "time_altitude"} self._header = None + self.hist_position = None + self.hist_direction = None + self.hist_time_altitude = None + + self.read_event_information() + self._initialize_header() @property def hdf5_file_name(self): @@ -637,7 +635,7 @@ def set_histograms(self, telescope_indices=None, individual_telescopes=None, his num_photons_per_event_per_telescope_to_set = [] start_time = time.time() - self._logger.debug("Starting reading the file at {}.".format(start_time)) + self._logger.debug(f"Starting reading the file at {start_time}.") with IACTFile(self.input_file) as f: event_counter = 0 for event in f: @@ -699,7 +697,7 @@ def _raise_if_no_histogram(self): """ for histogram in self._allowed_histograms: - if not hasattr(self, histogram): + if not hasattr(self, histogram) or getattr(self, histogram) is None: msg = ( "The histograms were not created. Please, use `create_histograms` to create " "histograms from the CORSIKA output file." @@ -707,7 +705,7 @@ def _raise_if_no_histogram(self): self._logger.error(msg) raise HistogramNotCreated - def _get_hist_2D_projection(self, label): + def _get_hist_2d_projection(self, label): """ Helper function to get 2D distributions. @@ -731,8 +729,8 @@ def _get_hist_2D_projection(self, label): if label is not valid. """ - if label not in self._allowed_2D_labels: - msg = f"label is not valid. Valid entries are {self._allowed_2D_labels}" + if label not in self._allowed_2d_labels: + msg = f"label is not valid. Valid entries are {self._allowed_2d_labels}" self._logger.error(msg) raise ValueError self._raise_if_no_histogram() @@ -761,7 +759,7 @@ def _get_hist_2D_projection(self, label): return np.array(hist_values), np.array(x_bin_edges), np.array(y_bin_edges) - def get_2D_photon_position_distr(self): + def get_2d_photon_position_distr(self): """ Get 2D histograms of position of the Cherenkov photons on the ground. @@ -774,9 +772,9 @@ def get_2D_photon_position_distr(self): numpy.array The y bin edges of the count histograms in y, usually in meters. """ - return self._get_hist_2D_projection("counts") + return self._get_hist_2d_projection("counts") - def get_2D_photon_density_distr(self): + def get_2d_photon_density_distr(self): """ Get 2D histograms of position of the Cherenkov photons on the ground. It returns the photon density per square meter. @@ -790,9 +788,9 @@ def get_2D_photon_density_distr(self): numpy.array The y bin edges of the density/count histograms in y, usually in meters. """ - return self._get_hist_2D_projection("density") + return self._get_hist_2d_projection("density") - def get_2D_photon_direction_distr(self): + def get_2d_photon_direction_distr(self): """ Get 2D histograms of incoming direction of the Cherenkov photons on the ground. @@ -805,9 +803,9 @@ def get_2D_photon_direction_distr(self): numpy.array The y bin edges of the direction histograms in cos(y) """ - return self._get_hist_2D_projection("direction") + return self._get_hist_2d_projection("direction") - def get_2D_photon_time_altitude_distr(self): + def get_2d_photon_time_altitude_distr(self): """ Get 2D histograms of the time and altitude of the photon production. @@ -820,9 +818,9 @@ def get_2D_photon_time_altitude_distr(self): numpy.array The y bin edges of the time_altitude histograms, usually in km. """ - return self._get_hist_2D_projection("time_altitude") + return self._get_hist_2d_projection("time_altitude") - def get_2D_num_photons_distr(self): + def get_2d_num_photons_distr(self): """ Get the distribution of Cherenkov photons per event per telescope. It returns the 2D array accounting for the events from the telescopes given by `self.telescope_indices`. @@ -842,11 +840,11 @@ def get_2D_num_photons_distr(self): telescope_counter = np.arange(len(self.telescope_indices) + 1).reshape( 1, len(self.telescope_indices) + 1 ) - hist_2D = np.array(self.num_photons_per_event_per_telescope) - hist_2D = hist_2D.reshape((1, len(self.telescope_indices), self.num_events)) - return (hist_2D, num_events_array, telescope_counter) + hist_2d = np.array(self.num_photons_per_event_per_telescope) + hist_2d = hist_2d.reshape((1, len(self.telescope_indices), self.num_events)) + return (hist_2d, num_events_array, telescope_counter) - def _get_hist_1D_projection(self, label): + def _get_hist_1d_projection(self, label): """ Helper function to get 1D distributions. @@ -868,13 +866,13 @@ def _get_hist_1D_projection(self, label): if label is not valid. """ - if label not in self._allowed_1D_labels: - msg = f"{label} is not valid. Valid entries are {self._allowed_1D_labels}" + if label not in self._allowed_1d_labels: + msg = f"{label} is not valid. Valid entries are {self._allowed_1d_labels}" self._logger.error(msg) raise ValueError self._raise_if_no_histogram() - x_bin_edges_list, hist_1D_list = [], [] + x_bin_edges_list, hist_1d_list = [], [] for i_hist, _ in enumerate(self.hist_position): if label == "wavelength": mini_hist = self.hist_position[i_hist][sum, sum, :] @@ -884,8 +882,8 @@ def _get_hist_1D_projection(self, label): mini_hist = self.hist_time_altitude[i_hist][sum, :] x_bin_edges_list.append(mini_hist.axes.edges.T.flatten()[0]) - hist_1D_list.append(mini_hist.view().T) - return np.array(hist_1D_list), np.array(x_bin_edges_list) + hist_1d_list.append(mini_hist.view().T) + return np.array(hist_1d_list), np.array(x_bin_edges_list) def _get_bins_max_dist(self, bins=None, max_dist=None): """Auxiliary function to get the number of bins and the max distance to generate the @@ -942,21 +940,21 @@ def get_photon_radial_distr(self, bins=None, max_dist=None): """ bins, max_dist = self._get_bins_max_dist(bins=bins, max_dist=max_dist) - bin_edges_1D_list, hist1D_list = [], [] + bin_edges_1d_list, hist_1d_list = [], [] - hist2D_values_list, x_position_list, y_position_list = self.get_2D_photon_position_distr() + hist_2d_values_list, x_position_list, y_position_list = self.get_2d_photon_position_distr() for i_hist, _ in enumerate(x_position_list): - hist1D, bin_edges_1D = convert_2D_to_radial_distr( - hist2D_values_list[i_hist], - x_position_list[i_hist], + hist_1d, bin_edges_1d = convert_2d_to_radial_distr( + hist_2d_values_list[i_hist], + x_position_list[i_hist], # pylint: disable=unnecessary-list-index-lookup y_position_list[i_hist], bins=bins, max_dist=max_dist, ) - bin_edges_1D_list.append(bin_edges_1D) - hist1D_list.append(hist1D) - return np.array(hist1D_list), np.array(bin_edges_1D_list) + bin_edges_1d_list.append(bin_edges_1d) + hist_1d_list.append(hist_1d) + return np.array(hist_1d_list), np.array(bin_edges_1d_list) def get_photon_density_distr(self, bins=None, max_dist=None): """ @@ -980,21 +978,21 @@ def get_photon_density_distr(self, bins=None, max_dist=None): usually in meter. """ bins, max_dist = self._get_bins_max_dist(bins=bins, max_dist=max_dist) - bin_edges_1D_list, hist1D_list = [], [] + bin_edges_1d_list, hist_1d_list = [], [] - hist2D_values_list, x_position_list, y_position_list = self.get_2D_photon_density_distr() + hist_2d_values_list, x_position_list, y_position_list = self.get_2d_photon_density_distr() for i_hist, _ in enumerate(x_position_list): - hist1D, bin_edges_1D = convert_2D_to_radial_distr( - hist2D_values_list[i_hist], - x_position_list[i_hist], + hist_1d, bin_edges_1d = convert_2d_to_radial_distr( + hist_2d_values_list[i_hist], + x_position_list[i_hist], # pylint: disable=unnecessary-list-index-lookup y_position_list[i_hist], bins=bins, max_dist=max_dist, ) - bin_edges_1D_list.append(bin_edges_1D) - hist1D_list.append(hist1D) - return np.array(hist1D_list), np.array(bin_edges_1D_list) + bin_edges_1d_list.append(bin_edges_1d) + hist_1d_list.append(hist_1d) + return np.array(hist_1d_list), np.array(bin_edges_1d_list) def get_photon_wavelength_distr(self): """ @@ -1008,7 +1006,7 @@ def get_photon_wavelength_distr(self): The bin edges of the wavelength histogram in nanometers. """ - return self._get_hist_1D_projection("wavelength") + return self._get_hist_1d_projection("wavelength") def get_photon_time_of_emission_distr(self): """ @@ -1024,7 +1022,7 @@ def get_photon_time_of_emission_distr(self): The bin edges of the time histograms in ns. """ - return self._get_hist_1D_projection("time") + return self._get_hist_1d_projection("time") def get_photon_altitude_distr(self): """ @@ -1038,7 +1036,7 @@ def get_photon_altitude_distr(self): The bin edges of the photon altitude histograms in km. """ - return self._get_hist_1D_projection("altitude") + return self._get_hist_1d_projection("altitude") @property def num_photons_per_event_per_telescope(self): @@ -1124,8 +1122,8 @@ def export_histograms(self, overwrite=False): overwrite: bool If True overwrites the histograms already saved in the hdf5 file. """ - self._export_1D_histograms(overwrite=overwrite) - self._export_2D_histograms(overwrite=False) + self._export_1d_histograms(overwrite=overwrite) + self._export_2d_histograms(overwrite=False) @property def _meta_dict(self): @@ -1150,7 +1148,7 @@ def _meta_dict(self): return self.__meta_dict @property - def _dict_1D_distributions(self): + def dict_1d_distributions(self): """ Dictionary to label the 1D distributions according to the class methods. @@ -1159,60 +1157,60 @@ def _dict_1D_distributions(self): dict: The dictionary with information about the 1D distributions. """ - self.__dict_1D_distributions = { + self._dict_1d_distributions = { "wavelength": { "function": "get_photon_wavelength_distr", - "file name": "hist_1D_photon_wavelength_distr", + "file name": "hist_1d_photon_wavelength_distr", "title": "Photon wavelength distribution", "bin edges": "wavelength", "axis unit": self.hist_config["hist_position"]["z axis"]["start"].unit, }, "counts": { "function": "get_photon_radial_distr", - "file name": "hist_1D_photon_radial_distr", + "file name": "hist_1d_photon_radial_distr", "title": "Radial photon distribution on the ground", "bin edges": "Distance to center", "axis unit": self.hist_config["hist_position"]["x axis"]["start"].unit, }, "density": { "function": "get_photon_density_distr", - "file name": "hist_1D_photon_density_distr", + "file name": "hist_1d_photon_density_distr", "title": "Photon density distribution on the ground", "bin edges": "Distance to center", "axis unit": self.hist_config["hist_position"]["x axis"]["start"].unit, }, "time": { "function": "get_photon_time_of_emission_distr", - "file name": "hist_1D_photon_time_distr", + "file name": "hist_1d_photon_time_distr", "title": "Photon time of arrival distribution", "bin edges": "Time of arrival", "axis unit": self.hist_config["hist_time_altitude"]["x axis"]["start"].unit, }, "altitude": { "function": "get_photon_altitude_distr", - "file name": "hist_1D_photon_altitude_distr", + "file name": "hist_1d_photon_altitude_distr", "title": "Photon altitude of emission distribution", "bin edges": "Altitude of emission", "axis unit": self.hist_config["hist_time_altitude"]["y axis"]["start"].unit, }, "num_photons_per_event": { "function": "get_num_photons_per_event_distr", - "file name": "hist_1D_photon_per_event_distr", + "file name": "hist_1d_photon_per_event_distr", "title": "Photons per event distribution", "bin edges": "Event counter", "axis unit": u.dimensionless_unscaled, }, "num_photons_per_telescope": { "function": "get_num_photons_per_telescope_distr", - "file name": "hist_1D_photon_per_telescope_distr", + "file name": "hist_1d_photon_per_telescope_distr", "title": "Photons per telescope distribution", "bin edges": "Telescope counter", "axis unit": u.dimensionless_unscaled, }, } - return self.__dict_1D_distributions + return self._dict_1d_distributions - def _export_1D_histograms(self, overwrite=False): + def _export_1d_histograms(self, overwrite=False): """ Auxiliary function to export only the 1D histograms. @@ -1222,16 +1220,16 @@ def _export_1D_histograms(self, overwrite=False): If True overwrites the histograms already saved in the hdf5 file. """ - for _, function_dict in self._dict_1D_distributions.items(): + for _, function_dict in self.dict_1d_distributions.items(): self._meta_dict["Title"] = sanitize_name(function_dict["title"]) function = getattr(self, function_dict["function"]) - hist_1D_list, x_bin_edges_list = function() + hist_1d_list, x_bin_edges_list = function() x_bin_edges_list = x_bin_edges_list * function_dict["axis unit"] if function_dict["function"] == "get_photon_density_distr": histogram_value_unit = 1 / (function_dict["axis unit"] ** 2) else: histogram_value_unit = u.dimensionless_unscaled - hist_1D_list = hist_1D_list * histogram_value_unit + hist_1d_list = hist_1d_list * histogram_value_unit for i_histogram, _ in enumerate(x_bin_edges_list): if self.individual_telescopes: hdf5_table_name = ( @@ -1242,7 +1240,7 @@ def _export_1D_histograms(self, overwrite=False): hdf5_table_name = f"/{function_dict['file name']}_all_tels" table = fill_hdf5_table( - hist=hist_1D_list[i_histogram], + hist=hist_1d_list[i_histogram], x_bin_edges=x_bin_edges_list[i_histogram], y_bin_edges=None, x_label=function_dict["bin edges"], @@ -1263,25 +1261,20 @@ def _export_1D_histograms(self, overwrite=False): ) @property - def _dict_2D_distributions(self, overwrite=False): + def dict_2d_distributions(self): """ Dictionary to label the 2D distributions according to the class methods. - Parameters - ---------- - overwrite: bool - If True overwrites the histograms already saved in the hdf5 file. - Returns ------- dict: The dictionary with information about the 2D distributions. """ - if self.__dict_2D_distributions is None: - self.__dict_2D_distributions = { + if self._dict_2d_distributions is None: + self._dict_2d_distributions = { "counts": { - "function": "get_2D_photon_position_distr", - "file name": "hist_2D_photon_count_distr", + "function": "get_2d_photon_position_distr", + "file name": "hist_2d_photon_count_distr", "title": "Photon count distribution on the ground", "x bin edges": "x position on the ground", "x axis unit": self.hist_config["hist_position"]["x axis"]["start"].unit, @@ -1289,8 +1282,8 @@ def _dict_2D_distributions(self, overwrite=False): "y axis unit": self.hist_config["hist_position"]["y axis"]["start"].unit, }, "density": { - "function": "get_2D_photon_density_distr", - "file name": "hist_2D_photon_density_distr", + "function": "get_2d_photon_density_distr", + "file name": "hist_2d_photon_density_distr", "title": "Photon density distribution on the ground", "x bin edges": "x position on the ground", "x axis unit": self.hist_config["hist_position"]["x axis"]["start"].unit, @@ -1298,8 +1291,8 @@ def _dict_2D_distributions(self, overwrite=False): "y axis unit": self.hist_config["hist_position"]["y axis"]["start"].unit, }, "direction": { - "function": "get_2D_photon_direction_distr", - "file name": "hist_2D_photon_direction_distr", + "function": "get_2d_photon_direction_distr", + "file name": "hist_2d_photon_direction_distr", "title": "Photon arrival direction", "x bin edges": "x direction cosine", "x axis unit": u.dimensionless_unscaled, @@ -1307,8 +1300,8 @@ def _dict_2D_distributions(self, overwrite=False): "y axis unit": u.dimensionless_unscaled, }, "time_altitude": { - "function": "get_2D_photon_time_altitude_distr", - "file name": "hist_2D_photon_time_altitude_distr", + "function": "get_2d_photon_time_altitude_distr", + "file name": "hist_2d_photon_time_altitude_distr", "title": "Time of arrival vs altitude of emission", "x bin edges": "Time of arrival", "x axis unit": self.hist_config["hist_time_altitude"]["x axis"]["start"].unit, @@ -1316,8 +1309,8 @@ def _dict_2D_distributions(self, overwrite=False): "y axis unit": self.hist_config["hist_time_altitude"]["y axis"]["start"].unit, }, "num_photons_per_telescope": { - "function": "get_2D_num_photons_distr", - "file name": "hist_2D_photon_telescope_event_distr", + "function": "get_2d_num_photons_distr", + "file name": "hist_2d_photon_telescope_event_distr", "title": "Number of photons per telescope and per event", "x bin edges": "Telescope counter", "x axis unit": u.dimensionless_unscaled, @@ -1325,9 +1318,9 @@ def _dict_2D_distributions(self, overwrite=False): "y axis unit": u.dimensionless_unscaled, }, } - return self.__dict_2D_distributions + return self._dict_2d_distributions - def _export_2D_histograms(self, overwrite): + def _export_2d_histograms(self, overwrite): """ Auxiliary function to export only the 2D histograms. @@ -1336,37 +1329,37 @@ def _export_2D_histograms(self, overwrite): overwrite: bool If True overwrites the histograms already saved in the hdf5 file. """ - for property_name, function_dict in self._dict_2D_distributions.items(): + for property_name, function_dict in self.dict_2d_distributions.items(): self._meta_dict["Title"] = sanitize_name(function_dict["title"]) function = getattr(self, function_dict["function"]) - hist_2D_list, x_bin_edges_list, y_bin_edges_list = function() - if function_dict["function"] == "get_2D_photon_density_distr": + hist_2d_list, x_bin_edges_list, y_bin_edges_list = function() + if function_dict["function"] == "get_2d_photon_density_distr": histogram_value_unit = 1 / ( - self._dict_2D_distributions[property_name]["x axis unit"] - * self._dict_2D_distributions[property_name]["y axis unit"] + self.dict_2d_distributions[property_name]["x axis unit"] + * self.dict_2d_distributions[property_name]["y axis unit"] ) else: histogram_value_unit = u.dimensionless_unscaled - hist_2D_list, x_bin_edges_list, y_bin_edges_list = ( - hist_2D_list * histogram_value_unit, - x_bin_edges_list * self._dict_2D_distributions[property_name]["x axis unit"], - y_bin_edges_list * self._dict_2D_distributions[property_name]["y axis unit"], + hist_2d_list, x_bin_edges_list, y_bin_edges_list = ( + hist_2d_list * histogram_value_unit, + x_bin_edges_list * self.dict_2d_distributions[property_name]["x axis unit"], + y_bin_edges_list * self.dict_2d_distributions[property_name]["y axis unit"], ) for i_histogram, _ in enumerate(x_bin_edges_list): if self.individual_telescopes: hdf5_table_name = ( - f"/{self._dict_2D_distributions[property_name]['file name']}" + f"/{self.dict_2d_distributions[property_name]['file name']}" f"_tel_index_{self.telescope_indices[i_histogram]}" ) else: hdf5_table_name = ( - f"/{self._dict_2D_distributions[property_name]['file name']}" f"_all_tels" + f"/{self.dict_2d_distributions[property_name]['file name']}" f"_all_tels" ) table = fill_hdf5_table( - hist=hist_2D_list[i_histogram], + hist=hist_2d_list[i_histogram], x_bin_edges=x_bin_edges_list[i_histogram], y_bin_edges=y_bin_edges_list[i_histogram], x_label=function_dict["x bin edges"], @@ -1379,12 +1372,12 @@ def _export_2D_histograms(self, overwrite): f"{self.hdf5_file_name}." ) # Always appending to table due to the file previously created - # by self._export_1D_histograms. + # by self._export_1d_histograms. write_table( table, self.hdf5_file_name, hdf5_table_name, append=True, overwrite=overwrite ) - def export_event_header_1D_histogram( + def export_event_header_1d_histogram( self, event_header_element, bins=50, hist_range=None, overwrite=False ): """ @@ -1404,7 +1397,7 @@ def export_event_header_1D_histogram( If True overwrites the histograms already saved in the hdf5 file. """ - hist, bin_edges = self.event_1D_histogram( + hist, bin_edges = self.event_1d_histogram( event_header_element, bins=bins, hist_range=hist_range ) bin_edges *= self.event_information[event_header_element].unit @@ -1416,7 +1409,7 @@ def export_event_header_1D_histogram( y_label=None, meta_data=self._meta_dict, ) - hdf5_table_name = f"/event_2D_histograms_{event_header_element}" + hdf5_table_name = f"/event_2d_histograms_{event_header_element}" self._logger.info( f"Exporting histogram with name {hdf5_table_name} to {self.hdf5_file_name}." @@ -1428,7 +1421,7 @@ def export_event_header_1D_histogram( append = True write_table(table, self.hdf5_file_name, hdf5_table_name, append=append, overwrite=overwrite) - def export_event_header_2D_histogram( + def export_event_header_2d_histogram( self, event_header_element_1, event_header_element_2, @@ -1455,7 +1448,7 @@ def export_event_header_2D_histogram( overwrite: bool If True overwrites the histograms already saved in the hdf5 file. """ - hist, x_bin_edges, y_bin_edges = self.event_2D_histogram( + hist, x_bin_edges, y_bin_edges = self.event_2d_histogram( event_header_element_1, event_header_element_2, bins=bins, hist_range=hist_range ) x_bin_edges *= self.event_information[event_header_element_1].unit @@ -1470,7 +1463,7 @@ def export_event_header_2D_histogram( meta_data=self._meta_dict, ) - hdf5_table_name = f"/event_2D_histograms_{event_header_element_1}_{event_header_element_2}" + hdf5_table_name = f"/event_2d_histograms_{event_header_element_1}_{event_header_element_2}" self._logger.info( f"Exporting histogram with name {hdf5_table_name} to {self.hdf5_file_name}." @@ -1685,7 +1678,7 @@ def get_run_info(self, parameter): raise KeyError return self.header[parameter] - def event_1D_histogram(self, key, bins=50, hist_range=None): + def event_1d_histogram(self, key, bins=50, hist_range=None): """ Create a histogram for the all events using `key` as parameter. Valid keys are stored in `self.all_event_keys` (CORSIKA defined). @@ -1724,7 +1717,7 @@ def event_1D_histogram(self, key, bins=50, hist_range=None): ) return hist, bin_edges - def event_2D_histogram(self, key_1, key_2, bins=50, hist_range=None): + def event_2d_histogram(self, key_1, key_2, bins=50, hist_range=None): """ Create a 2D histogram for the all events using `key_1` and `key_2` as parameters. Valid keys are stored in `self.all_event_keys` (CORSIKA defined). diff --git a/simtools/corsika/corsika_histograms_visualize.py b/simtools/corsika/corsika_histograms_visualize.py index 0974d5ea3e..97a713d1b0 100644 --- a/simtools/corsika/corsika_histograms_visualize.py +++ b/simtools/corsika/corsika_histograms_visualize.py @@ -1,16 +1,16 @@ import logging from pathlib import Path -import matplotlib.colors as colors import matplotlib.pyplot as plt import numpy as np from astropy import units as u +from matplotlib import colors from matplotlib.backends.backend_pdf import PdfPages _logger = logging.getLogger(__name__) -def _kernel_plot_2D_photons(histograms_instance, property_name, log_z=False): +def _kernel_plot_2d_photons(histograms_instance, property_name, log_z=False): """ The next functions below are used by the the CorsikaHistograms class to plot all sort of information from the Cherenkov photons saved. @@ -38,16 +38,16 @@ def _kernel_plot_2D_photons(histograms_instance, property_name, log_z=False): ValueError if `property` is not allowed. """ - if property_name not in histograms_instance._dict_2D_distributions: + if property_name not in histograms_instance.dict_2d_distributions: msg = ( f"This property does not exist. The valid entries are " - f"{histograms_instance._dict_2D_distributions}" + f"{histograms_instance.dict_2d_distributions}" ) _logger.error(msg) raise ValueError function = getattr( histograms_instance, - histograms_instance._dict_2D_distributions[property_name]["function"], + histograms_instance.dict_2d_distributions[property_name]["function"], ) hist_values, x_bin_edges, y_bin_edges = function() @@ -62,28 +62,28 @@ def _kernel_plot_2D_photons(histograms_instance, property_name, log_z=False): x_bin_edges[i_hist], y_bin_edges[i_hist], hist_values[i_hist], norm=norm ) if ( - histograms_instance._dict_2D_distributions[property_name]["x axis unit"] + histograms_instance.dict_2d_distributions[property_name]["x axis unit"] is not u.dimensionless_unscaled ): ax.set_xlabel( - f"{histograms_instance._dict_2D_distributions[property_name]['x bin edges']} " - f"({histograms_instance._dict_2D_distributions[property_name]['x axis unit']})" + f"{histograms_instance.dict_2d_distributions[property_name]['x bin edges']} " + f"({histograms_instance.dict_2d_distributions[property_name]['x axis unit']})" ) else: ax.set_xlabel( - f"{histograms_instance._dict_2D_distributions[property_name]['x bin edges']} " + f"{histograms_instance.dict_2d_distributions[property_name]['x bin edges']} " ) if ( - histograms_instance._dict_2D_distributions[property_name]["y axis unit"] + histograms_instance.dict_2d_distributions[property_name]["y axis unit"] is not u.dimensionless_unscaled ): ax.set_ylabel( - f"{histograms_instance._dict_2D_distributions[property_name]['y bin edges']} " - f"({histograms_instance._dict_2D_distributions[property_name]['y axis unit']})" + f"{histograms_instance.dict_2d_distributions[property_name]['y bin edges']} " + f"({histograms_instance.dict_2d_distributions[property_name]['y axis unit']})" ) else: ax.set_ylabel( - f"{histograms_instance._dict_2D_distributions[property_name]['y bin edges']} " + f"{histograms_instance.dict_2d_distributions[property_name]['y bin edges']} " ) ax.set_xlim(np.amin(x_bin_edges[i_hist]), np.amax(x_bin_edges[i_hist])) ax.set_ylim(np.amin(y_bin_edges[i_hist]), np.amax(y_bin_edges[i_hist])) @@ -92,7 +92,7 @@ def _kernel_plot_2D_photons(histograms_instance, property_name, log_z=False): all_figs.append(fig) if histograms_instance.individual_telescopes is False: ax.set_title( - f"{histograms_instance._dict_2D_distributions[property_name]['file name']}" + f"{histograms_instance.dict_2d_distributions[property_name]['file name']}" "_all_tels" ) else: @@ -106,7 +106,7 @@ def _kernel_plot_2D_photons(histograms_instance, property_name, log_z=False): color="white", ) ax.set_title( - f"{histograms_instance._dict_2D_distributions[property_name]['file name']}" + f"{histograms_instance.dict_2d_distributions[property_name]['file name']}" f"_tel_index_{histograms_instance.telescope_indices[i_hist]}", ) plt.close() @@ -114,7 +114,7 @@ def _kernel_plot_2D_photons(histograms_instance, property_name, log_z=False): return all_figs -def plot_2D_counts(histograms_instance, log_z=True): +def plot_2d_counts(histograms_instance, log_z=True): """ Plot the 2D histogram of the photon positions on the ground. @@ -130,10 +130,10 @@ def plot_2D_counts(histograms_instance, log_z=True): list List of figures for the given telescopes. """ - return _kernel_plot_2D_photons(histograms_instance, "counts", log_z=log_z) + return _kernel_plot_2d_photons(histograms_instance, "counts", log_z=log_z) -def plot_2D_density(histograms_instance, log_z=True): +def plot_2d_density(histograms_instance, log_z=True): """ Plot the 2D histogram of the photon density distribution on the ground. @@ -150,10 +150,10 @@ def plot_2D_density(histograms_instance, log_z=True): List of figures for the given telescopes. """ - return _kernel_plot_2D_photons(histograms_instance, "density", log_z=log_z) + return _kernel_plot_2d_photons(histograms_instance, "density", log_z=log_z) -def plot_2D_direction(histograms_instance, log_z=True): +def plot_2d_direction(histograms_instance, log_z=True): """ Plot the 2D histogram of the incoming direction of photons. @@ -170,10 +170,10 @@ def plot_2D_direction(histograms_instance, log_z=True): List of figures for the given telescopes. """ - return _kernel_plot_2D_photons(histograms_instance, "direction", log_z=log_z) + return _kernel_plot_2d_photons(histograms_instance, "direction", log_z=log_z) -def plot_2D_time_altitude(histograms_instance, log_z=True): +def plot_2d_time_altitude(histograms_instance, log_z=True): """ Plot the 2D histogram of the time and altitude where the photon was produced. @@ -190,10 +190,10 @@ def plot_2D_time_altitude(histograms_instance, log_z=True): List of figures for the given telescopes. """ - return _kernel_plot_2D_photons(histograms_instance, "time_altitude", log_z=log_z) + return _kernel_plot_2d_photons(histograms_instance, "time_altitude", log_z=log_z) -def plot_2D_num_photons_per_telescope(histograms_instance, log_z=True): +def plot_2d_num_photons_per_telescope(histograms_instance, log_z=True): """ Plot the 2D histogram of the number of photons per event and per telescope. @@ -210,10 +210,10 @@ def plot_2D_num_photons_per_telescope(histograms_instance, log_z=True): List of figures for the given telescopes. """ - return _kernel_plot_2D_photons(histograms_instance, "num_photons_per_telescope", log_z=log_z) + return _kernel_plot_2d_photons(histograms_instance, "num_photons_per_telescope", log_z=log_z) -def _kernel_plot_1D_photons(histograms_instance, property_name, log_y=True): +def _kernel_plot_1d_photons(histograms_instance, property_name, log_y=True): """ Create the figure of a 1D plot. The parameter `property` indicate which plot. @@ -238,17 +238,17 @@ def _kernel_plot_1D_photons(histograms_instance, property_name, log_y=True): ValueError if `property` is not allowed. """ - if property_name not in histograms_instance._dict_1D_distributions: + if property_name not in histograms_instance.dict_1d_distributions: msg = ( f"This property does not exist. The valid entries are " - f"{histograms_instance._dict_1D_distributions}" + f"{histograms_instance.dict_1d_distributions}" ) _logger.error(msg) raise ValueError function = getattr( histograms_instance, - histograms_instance._dict_1D_distributions[property_name]["function"], + histograms_instance.dict_1d_distributions[property_name]["function"], ) hist_values, bin_edges = function() all_figs = [] @@ -261,20 +261,20 @@ def _kernel_plot_1D_photons(histograms_instance, property_name, log_y=True): width=np.abs(np.diff(bin_edges[i_hist])), ) if ( - histograms_instance._dict_1D_distributions[property_name]["axis unit"] + histograms_instance.dict_1d_distributions[property_name]["axis unit"] is not u.dimensionless_unscaled ): ax.set_xlabel( - f"{histograms_instance._dict_1D_distributions[property_name]['bin edges']} " - f"({histograms_instance._dict_1D_distributions[property_name]['axis unit']})" + f"{histograms_instance.dict_1d_distributions[property_name]['bin edges']} " + f"({histograms_instance.dict_1d_distributions[property_name]['axis unit']})" ) else: ax.set_xlabel( - f"{histograms_instance._dict_1D_distributions[property_name]['bin edges']} " + f"{histograms_instance.dict_1d_distributions[property_name]['bin edges']} " ) if property_name == "density": ax.set_ylabel( - f"Density ({histograms_instance._dict_1D_distributions[property_name]['axis unit']}" + f"Density ({histograms_instance.dict_1d_distributions[property_name]['axis unit']}" r"$^{-2}$)" ) else: @@ -284,12 +284,12 @@ def _kernel_plot_1D_photons(histograms_instance, property_name, log_y=True): ax.set_yscale("log") if histograms_instance.individual_telescopes is False: ax.set_title( - f"{histograms_instance._dict_1D_distributions[property_name]['file name']}" + f"{histograms_instance.dict_1d_distributions[property_name]['file name']}" "_all_tels" ) else: ax.set_title( - f"{histograms_instance._dict_1D_distributions[property_name]['file name']}" + f"{histograms_instance.dict_1d_distributions[property_name]['file name']}" f"_tel_index_{histograms_instance.telescope_indices[i_hist]}", ) all_figs.append(fig) @@ -313,7 +313,7 @@ def plot_wavelength_distr(histograms_instance, log_y=True): list List of figures for the given telescopes. """ - return _kernel_plot_1D_photons(histograms_instance, "wavelength", log_y=log_y) + return _kernel_plot_1d_photons(histograms_instance, "wavelength", log_y=log_y) def plot_counts_distr(histograms_instance, log_y=True): @@ -332,7 +332,7 @@ def plot_counts_distr(histograms_instance, log_y=True): list List of figures for the given telescopes. """ - return _kernel_plot_1D_photons(histograms_instance, "counts", log_y=log_y) + return _kernel_plot_1d_photons(histograms_instance, "counts", log_y=log_y) def plot_density_distr(histograms_instance, log_y=True): @@ -351,7 +351,7 @@ def plot_density_distr(histograms_instance, log_y=True): list List of figures for the given telescopes. """ - return _kernel_plot_1D_photons(histograms_instance, "density", log_y=log_y) + return _kernel_plot_1d_photons(histograms_instance, "density", log_y=log_y) def plot_time_distr(histograms_instance, log_y=True): @@ -370,7 +370,7 @@ def plot_time_distr(histograms_instance, log_y=True): list List of figures for the given telescopes. """ - return _kernel_plot_1D_photons(histograms_instance, "time", log_y=log_y) + return _kernel_plot_1d_photons(histograms_instance, "time", log_y=log_y) def plot_altitude_distr(histograms_instance, log_y=True): @@ -389,7 +389,7 @@ def plot_altitude_distr(histograms_instance, log_y=True): list List of figures for the given telescopes. """ - return _kernel_plot_1D_photons(histograms_instance, "altitude", log_y=log_y) + return _kernel_plot_1d_photons(histograms_instance, "altitude", log_y=log_y) def plot_photon_per_event_distr(histograms_instance, log_y=True): @@ -410,7 +410,7 @@ def plot_photon_per_event_distr(histograms_instance, log_y=True): """ - return _kernel_plot_1D_photons(histograms_instance, "num_photons_per_event", log_y=log_y) + return _kernel_plot_1d_photons(histograms_instance, "num_photons_per_event", log_y=log_y) def plot_photon_per_telescope_distr(histograms_instance, log_y=True): @@ -431,10 +431,10 @@ def plot_photon_per_telescope_distr(histograms_instance, log_y=True): """ - return _kernel_plot_1D_photons(histograms_instance, "num_photons_per_telescope", log_y=log_y) + return _kernel_plot_1d_photons(histograms_instance, "num_photons_per_telescope", log_y=log_y) -def plot_1D_event_header_distribution( +def plot_1d_event_header_distribution( histograms_instance, event_header_element, log_y=True, bins=50, hist_range=None ): """ @@ -459,7 +459,7 @@ def plot_1D_event_header_distribution( List of figures for the given telescopes. """ - hist_values, bin_edges = histograms_instance.event_1D_histogram( + hist_values, bin_edges = histograms_instance.event_1d_histogram( event_header_element, bins=bins, hist_range=hist_range ) fig, ax = plt.subplots() @@ -483,11 +483,11 @@ def plot_1D_event_header_distribution( if log_y is True: ax.set_yscale("log") - ax.set_title(f"hist_1D_{event_header_element}") + ax.set_title(f"hist_1d_{event_header_element}") return fig -def plot_2D_event_header_distribution( +def plot_2d_event_header_distribution( histograms_instance, event_header_element_1, event_header_element_2, @@ -519,7 +519,7 @@ def plot_2D_event_header_distribution( List of figures for the given telescopes. """ - hist_values, x_bin_edges, y_bin_edges = histograms_instance.event_2D_histogram( + hist_values, x_bin_edges, y_bin_edges = histograms_instance.event_2d_histogram( event_header_element_1, event_header_element_2, bins=bins, hist_range=hist_range ) fig, ax = plt.subplots() @@ -551,7 +551,7 @@ def plot_2D_event_header_distribution( ax.set_ylabel(f"{event_header_element_2}") ax.set_facecolor("black") - ax.set_title(f"hist_2D_{event_header_element_1}_{event_header_element_2}") + ax.set_title(f"hist_2d_{event_header_element_1}_{event_header_element_2}") fig.colorbar(mesh) return fig diff --git a/simtools/corsika_simtel/corsika_simtel_runner.py b/simtools/corsika_simtel/corsika_simtel_runner.py index 69777dfdc2..1efb9b3c98 100644 --- a/simtools/corsika_simtel/corsika_simtel_runner.py +++ b/simtools/corsika_simtel/corsika_simtel_runner.py @@ -90,7 +90,7 @@ def export_multipipe_script(self, **kwargs): run_number=run_number, input_file="-", # Tell sim_telarray to take the input from standard output ) - multipipe_file = Path(self.corsika_config._config_file_path.parent).joinpath( + multipipe_file = Path(self.corsika_config.config_file_path.parent).joinpath( self.corsika_config.get_file_name("multipipe") ) with open(multipipe_file, "w", encoding="utf-8") as file: @@ -107,7 +107,7 @@ def _export_multipipe_executable(self, multipipe_file): The name of the multipipe file which contains all of the multipipe commands. """ - multipipe_executable = Path(self.corsika_config._config_file_path.parent).joinpath( + multipipe_executable = Path(self.corsika_config.config_file_path.parent).joinpath( "run_cta_multipipe" ) with open(multipipe_executable, "w", encoding="utf-8") as file: diff --git a/simtools/data_model/metadata_collector.py b/simtools/data_model/metadata_collector.py index 1fc358d2cb..ed5eb37de3 100644 --- a/simtools/data_model/metadata_collector.py +++ b/simtools/data_model/metadata_collector.py @@ -114,6 +114,7 @@ def get_data_model_schema_file_name(self): pass self._logger.warning("No schema file found.") + return None def get_data_model_schema_dict(self): """ diff --git a/simtools/data_model/model_data_writer.py b/simtools/data_model/model_data_writer.py index c9a68a4974..28288bae6c 100644 --- a/simtools/data_model/model_data_writer.py +++ b/simtools/data_model/model_data_writer.py @@ -60,7 +60,6 @@ def dump(args_dict, metadata=None, product_data=None, validate_schema_file=False ) if validate_schema_file: product_data = writer.validate_and_transform( - metadata=metadata, product_data=product_data, validate_schema_file=validate_schema_file, ) @@ -84,15 +83,13 @@ def write(self, metadata=None, product_data=None): if product_data is not None: self.write_data(product_data=product_data) - def validate_and_transform(self, metadata=None, product_data=None, validate_schema_file=None): + def validate_and_transform(self, product_data=None, validate_schema_file=None): """ Validate product data using jsonschema given in metadata. If necessary, transform product data to match schema. Parameters ---------- - metadata: dict - Metadata to be written. product_data: astropy Table Model data to be validated validate_schema_file: str diff --git a/simtools/db_handler.py b/simtools/db_handler.py index 36dbccf26f..81a540f90e 100644 --- a/simtools/db_handler.py +++ b/simtools/db_handler.py @@ -94,7 +94,8 @@ def _set_up_connection(self): """ if self.mongo_db_config: if DatabaseHandler.db_client is None: - with Lock(): + lock = Lock() + with lock: DatabaseHandler.db_client = self._open_mongo_db() def _open_mongo_db(self): @@ -279,7 +280,7 @@ def _write_model_file_yaml(self, file_name, dest_dir, no_file_ok=False): raise - dest_file.write_text(file.read_text()) + dest_file.write_text(file.read_text(encoding="utf-8"), encoding="utf-8") def _get_model_parameters_yaml( self, telescope_model_name, model_version, only_applicable=False @@ -881,7 +882,7 @@ def copy_telescope( try: collection.insert_many(db_entries) except BulkWriteError as exc: - raise BulkWriteError.details from exc + raise BulkWriteError(str(exc.details)) from exc def copy_documents(self, db_name, collection, query, db_to_copy_to, collection_to_copy_to=None): """ @@ -931,7 +932,7 @@ def copy_documents(self, db_name, collection, query, db_to_copy_to, collection_t try: _collection.insert_many(db_entries) except BulkWriteError as exc: - raise BulkWriteError.details from exc + raise BulkWriteError(str(exc.details)) from exc def delete_query(self, db_name, collection, query): """ @@ -1385,9 +1386,9 @@ def insert_file_to_db(self, file_name, db_name=DB_CTA_SIMULATION_MODEL, **kwargs self._logger.warning( f"The file {kwargs['filename']} exists in the DB. Returning its ID" ) - return file_system.find_one( + return file_system.find_one( # pylint: disable=protected-access {"filename": kwargs["filename"]} - )._id # pylint: disable=protected-access; + )._id with open(file_name, "rb") as data_file: return file_system.put(data_file, **kwargs) diff --git a/simtools/io_operations/hdf5_handler.py b/simtools/io_operations/hdf5_handler.py index 4d33cbbbb6..761bc31dc0 100644 --- a/simtools/io_operations/hdf5_handler.py +++ b/simtools/io_operations/hdf5_handler.py @@ -106,7 +106,7 @@ def read_hdf5(hdf5_file_name): with tables.open_file(hdf5_file_name, mode="r") as file: for node in file.walk_nodes("/", "Table"): - table_path = node._v_pathname + table_path = node._v_pathname # pylint: disable=protected-access table = read_table(hdf5_file_name, table_path) tables_list.append(table) return tables_list diff --git a/simtools/model/telescope_model.py b/simtools/model/telescope_model.py index 44ae917eef..7608d70342 100644 --- a/simtools/model/telescope_model.py +++ b/simtools/model/telescope_model.py @@ -713,7 +713,7 @@ def _load_simtel_config_writer(self): label=self.label, ) - def is_file_2D(self, par): + def is_file_2d(self, par): """ Check if the file referenced by par is a 2D table. @@ -734,8 +734,8 @@ def is_file_2D(self, par): file_name = self.get_parameter_value(par) file = self.get_config_directory().joinpath(file_name) with open(file, "r", encoding="utf-8") as f: - is2D = "@RPOL@" in f.read() - return is2D + is_2d = "@RPOL@" in f.read() + return is_2d def read_two_dim_wavelength_angle(self, file_name): """ diff --git a/simtools/psf_analysis.py b/simtools/psf_analysis.py index 2320727f86..1aa75327bd 100644 --- a/simtools/psf_analysis.py +++ b/simtools/psf_analysis.py @@ -51,7 +51,7 @@ def __init__(self, focal_length=None, total_scattered_area=None): self.centroid_x = None self.centroid_y = None self._total_area = total_scattered_area - self._stored_PSF = {} + self._stored_psf = {} if focal_length is not None: self._cm_to_deg = 180.0 / pi / focal_length self._has_focal_length = True @@ -76,7 +76,7 @@ def read_photon_list_from_simtel_file(self, photons_file): self._logger.info(f"Reading sim_telarray file {photons_file}") self._total_photons = 0 if Path(photons_file).suffix == ".gz": - import gzip + import gzip # pylint: disable=import-outside-toplevel file_open_function = gzip.open else: @@ -197,10 +197,10 @@ def get_psf(self, fraction=0.8, unit="cm"): if unit == "deg" and not self._has_focal_length: self._logger.error("PSF cannot be computed in deg because focal length is not set") return None - if fraction not in self._stored_PSF: + if fraction not in self._stored_psf: self._compute_psf(fraction) unit_factor = 1 if unit == "cm" else self._cm_to_deg - return self._stored_PSF[fraction] * unit_factor + return self._stored_psf[fraction] * unit_factor def set_psf(self, value, fraction=0.8, unit="cm"): """ @@ -219,7 +219,7 @@ def set_psf(self, value, fraction=0.8, unit="cm"): self._logger.error("PSF cannot be set in deg because focal length is not set") return unit_factor = 1 if unit == "cm" else 1.0 / self._cm_to_deg - self._stored_PSF[fraction] = value * unit_factor + self._stored_psf[fraction] = value * unit_factor def _compute_psf(self, fraction): """ @@ -230,7 +230,7 @@ def _compute_psf(self, fraction): fraction: float Fraction of photons within the containing radius """ - self._stored_PSF[fraction] = self._find_psf(fraction) + self._stored_psf[fraction] = self._find_psf(fraction) def _find_psf(self, fraction): """ @@ -259,21 +259,21 @@ def _find_psf(self, fraction): target_number = fraction * self._number_of_detected_photons current_radius = 1.5 * radius_sig start_number = self._sum_photons_in_radius(current_radius) - SCALE = 0.5 * sqrt(current_radius * current_radius / start_number) + scale = 0.5 * sqrt(current_radius * current_radius / start_number) delta_number = start_number - target_number n_iter = 0 - MAX_ITER = 100 - TOLERANCE = self._number_of_detected_photons / 1000.0 + max_iter = 100 + tolerance = self._number_of_detected_photons / 1000.0 found_radius = False - while not found_radius and n_iter < MAX_ITER: + while not found_radius and n_iter < max_iter: n_iter += 1 - dr = -delta_number * SCALE / sqrt(target_number) + dr = -delta_number * scale / sqrt(target_number) while current_radius + dr < 0: dr *= 0.5 current_radius += dr current_number = self._sum_photons_in_radius(current_radius) delta_number = current_number - target_number - found_radius = fabs(delta_number) < TOLERANCE + found_radius = fabs(delta_number) < tolerance if found_radius: # Diameter = 2 * radius @@ -392,7 +392,7 @@ def plot_image(self, centralized=True, **kwargs): psf_ls="--", ) kwargs_for_image = collect_kwargs("image", kwargs) - kwargs_for_PSF = collect_kwargs("psf", kwargs) + kwargs_for_psf = collect_kwargs("psf", kwargs) ax = plt.gca() # Image histogram @@ -401,7 +401,7 @@ def plot_image(self, centralized=True, **kwargs): # PSF circle center = (0, 0) if centralized else (self.centroid_x, self.centroid_y) - circle = plt.Circle(center, self.get_psf(0.8) / 2, **kwargs_for_PSF) + circle = plt.Circle(center, self.get_psf(0.8) / 2, **kwargs_for_psf) ax.add_artist(circle) ax.axhline(0, color="k", linestyle="--", zorder=3, linewidth=0.5) diff --git a/simtools/simtel/simtel_histograms.py b/simtools/simtel/simtel_histograms.py index 45a3a11e2d..e662e0eded 100644 --- a/simtools/simtel/simtel_histograms.py +++ b/simtools/simtel/simtel_histograms.py @@ -53,14 +53,14 @@ def plot_and_save_figures(self, fig_name): fig_name: str Name of the output figure file. """ - self._combine_histogram_files() + self.combine_histogram_files() self._plot_combined_histograms(fig_name) @property def number_of_histograms(self): """Returns number of histograms.""" if self.combined_hists is None: - self._combine_histogram_files() + self.combine_histogram_files() return len(self.combined_hists) def get_histogram_title(self, i_hist): @@ -78,10 +78,10 @@ def get_histogram_title(self, i_hist): Histogram title. """ if self.combined_hists is None: - self._combine_histogram_files() + self.combine_histogram_files() return self.combined_hists[i_hist]["title"] - def _combine_histogram_files(self): + def combine_histogram_files(self): """Combine histograms from all files into one single list of histograms.""" # Processing and combining histograms from multiple files self.combined_hists = [] @@ -92,7 +92,7 @@ def _combine_histogram_files(self): for o in yield_toplevel_of_type(f, Histograms): try: hists = o.parse() - except Exception: + except Exception: # pylint: disable=broad-except self._logger.warning(f"Problematic file {file}") count_file = False continue diff --git a/simtools/simtel/simtel_runner.py b/simtools/simtel/simtel_runner.py index 11f9d81c79..32ef4abe8a 100644 --- a/simtools/simtel/simtel_runner.py +++ b/simtools/simtel/simtel_runner.py @@ -45,7 +45,7 @@ def __init__(self, simtel_source_path, label=None): self._script_dir = None self._script_file = None - self.RUNS_PER_SET = 1 + self.runs_per_set = 1 def __repr__(self): return f"SimtelRunner(label={self.label})\n" @@ -140,8 +140,8 @@ def prepare_run_script(self, test=False, input_file=None, run_number=None, extra file.write(f"{line}\n") file.write("# End of extras\n\n") - N = 1 if test else self.RUNS_PER_SET - for _ in range(N): + n = 1 if test else self.runs_per_set + for _ in range(n): file.write(f"{command}\n\n") # Printing out runtime @@ -178,10 +178,10 @@ def run(self, test=False, force=False, input_file=None, run_number=None): self._logger.info(f"Running (test) with command: {command}") self._run_simtel_and_check_output(command) else: - self._logger.debug(f"Running ({self.RUNS_PER_SET}x) with command: {command}") + self._logger.debug(f"Running ({self.runs_per_set}x) with command: {command}") self._run_simtel_and_check_output(command) - for _ in range(self.RUNS_PER_SET - 1): + for _ in range(self.runs_per_set - 1): self._run_simtel_and_check_output(command) self._check_run_result(run_number=run_number) diff --git a/simtools/simtel/simtel_runner_array.py b/simtools/simtel/simtel_runner_array.py index 32b3133794..127f076cc1 100644 --- a/simtools/simtel/simtel_runner_array.py +++ b/simtools/simtel/simtel_runner_array.py @@ -226,7 +226,7 @@ def get_resources(self, run_number): _resources = {} _resources["runtime"] = None - with open(sub_log_file, "r") as file: + with open(sub_log_file, "r", encoding="utf-8") as file: for line in reversed(list(file)): if "RUNTIME" in line: _resources["runtime"] = int(line.split()[1]) diff --git a/simtools/simtel/simtel_runner_camera_efficiency.py b/simtools/simtel/simtel_runner_camera_efficiency.py index fe3df5d9f1..9546cbbc92 100644 --- a/simtools/simtel/simtel_runner_camera_efficiency.py +++ b/simtools/simtel/simtel_runner_camera_efficiency.py @@ -83,7 +83,7 @@ def _make_run_command(self, **kwargs): # pylint: disable=unused-argument # Processing camera filter # A special case is testeff does not support 2D distributions camera_filter_file = self._telescope_model.get_parameter_value("camera_filter") - if self._telescope_model.is_file_2D("camera_filter"): + if self._telescope_model.is_file_2d("camera_filter"): camera_filter_file = self._get_one_dim_distribution( "camera_filter", "camera_filter_incidence_angle" ) @@ -93,7 +93,7 @@ def _make_run_command(self, **kwargs): # pylint: disable=unused-argument mirror_reflectivity = self._telescope_model.get_parameter_value("mirror_reflectivity") if mirror_class == 2: mirror_reflectivity_secondary = mirror_reflectivity - if self._telescope_model.is_file_2D("mirror_reflectivity"): + if self._telescope_model.is_file_2d("mirror_reflectivity"): mirror_reflectivity = self._get_one_dim_distribution( "mirror_reflectivity", "primary_mirror_incidence_angle" ) diff --git a/simtools/simtel/simtel_runner_ray_tracing.py b/simtools/simtel/simtel_runner_ray_tracing.py index 288602c3fa..6eda80e62d 100644 --- a/simtools/simtel/simtel_runner_ray_tracing.py +++ b/simtools/simtel/simtel_runner_ray_tracing.py @@ -88,8 +88,8 @@ def __init__( # RayTracing - default parameters self._rep_number = 0 - self.RUNS_PER_SET = 1 if self.config.single_mirror_mode else 20 - self.PHOTONS_PER_RUN = 100000 + self.runs_per_set = 1 if self.config.single_mirror_mode else 20 + self.photons_per_run = 100000 self._load_required_files(force_simulate) @@ -172,7 +172,7 @@ def _make_run_command(self, **kwargs): # pylint: disable=unused-argument command += super()._config_option( "telescope_theta", self.config.zenith_angle + self.config.off_axis_angle ) - command += super()._config_option("star_photons", str(self.PHOTONS_PER_RUN)) + command += super()._config_option("star_photons", str(self.photons_per_run)) command += super()._config_option("telescope_phi", "0") command += super()._config_option("camera_transmission", "1.0") command += super()._config_option("nightsky_background", "all:0.") diff --git a/simtools/utils/geometry.py b/simtools/utils/geometry.py index 4198f2f476..66fb4c4ba3 100644 --- a/simtools/utils/geometry.py +++ b/simtools/utils/geometry.py @@ -9,20 +9,20 @@ from astropy.coordinates.errors import UnitsError __all__ = [ - "convert_2D_to_radial_distr", + "convert_2d_to_radial_distr", "rotate", ] _logger = logging.getLogger(__name__) -def convert_2D_to_radial_distr(hist2d, xaxis, yaxis, bins=50, max_dist=1000): +def convert_2d_to_radial_distr(hist_2d, xaxis, yaxis, bins=50, max_dist=1000): """ - Convert a 2D histogram of positions, e.g. photon positions on the ground, to a 1D distribution. + Convert a 2d histogram of positions, e.g. photon positions on the ground, to a 1D distribution. Parameters ---------- - hist2d: numpy.ndarray + hist_2d: numpy.ndarray The histogram counts. xaxis: numpy.array The values of the x axis (histogram bin edges) on the ground. @@ -43,7 +43,7 @@ def convert_2D_to_radial_distr(hist2d, xaxis, yaxis, bins=50, max_dist=1000): """ # Check if the histogram will make sense - bins_step = 2 * max_dist / bins # in the 2D array, the positive and negative direction count. + bins_step = 2 * max_dist / bins # in the 2d array, the positive and negative direction count. for axis in [xaxis, yaxis]: if (bins_step < np.diff(axis)).any(): msg = ( @@ -60,14 +60,15 @@ def convert_2D_to_radial_distr(hist2d, xaxis, yaxis, bins=50, max_dist=1000): radial_distance_map = np.sqrt(grid_2d_x**2 + grid_2d_y**2) # The sorting and unravel_index give us the two indices for the position of the sorted element # in the original 2d matrix - x_indices_sorted, y_indices_sorted = np.unravel_index( + sorted_indices = np.unravel_index( np.argsort(radial_distance_map, axis=None), np.shape(radial_distance_map) ) + x_indices_sorted, y_indices_sorted = sorted_indices[0], sorted_indices[1] # We construct a 1D array with the histogram counts sorted according to the distance to the # center. hist_sorted = np.array( - [hist2d[i_x, i_y] for i_x, i_y in zip(x_indices_sorted, y_indices_sorted)] + [hist_2d[i_x, i_y] for i_x, i_y in zip(x_indices_sorted, y_indices_sorted)] ) distance_sorted = np.sort(radial_distance_map, axis=None) @@ -75,7 +76,7 @@ def convert_2D_to_radial_distr(hist2d, xaxis, yaxis, bins=50, max_dist=1000): # acount for it using weights below. weights, radial_bin_edges = np.histogram(distance_sorted, bins=bins, range=(0, max_dist)) - histogram_1D = np.empty_like(weights, dtype=float) + histogram_1d = np.empty_like(weights, dtype=float) for i_radial, _ in enumerate(radial_bin_edges[:-1]): # Here we sum all the events within a radial interval 'dr' and then divide by the number of @@ -84,10 +85,10 @@ def convert_2D_to_radial_distr(hist2d, xaxis, yaxis, bins=50, max_dist=1000): distance_sorted < radial_bin_edges[i_radial + 1] ) if weights[i_radial] != 0: - histogram_1D[i_radial] = np.sum(hist_sorted[indices_to_sum]) / weights[i_radial] + histogram_1d[i_radial] = np.sum(hist_sorted[indices_to_sum]) / weights[i_radial] else: - histogram_1D[i_radial] = 0 - return histogram_1D, radial_bin_edges + histogram_1d[i_radial] = 0 + return histogram_1d, radial_bin_edges @u.quantity_input(rotation_angle_phi=u.rad, rotation_angle_theta=u.rad) diff --git a/simtools/visualization/visualize.py b/simtools/visualization/visualize.py index ff83e16d16..d194b86427 100644 --- a/simtools/visualization/visualize.py +++ b/simtools/visualization/visualize.py @@ -22,9 +22,9 @@ "get_lines", "get_markers", "get_telescope_patch", - "plot_1D", + "plot_1d", "plot_array", - "plot_hist_2D", + "plot_hist_2d", "plot_table", "set_style", ] @@ -272,7 +272,7 @@ def get_lines(): return LINES -def plot_1D(data, **kwargs): +def plot_1d(data, **kwargs): """ Produce a high contrast one dimensional plot from multiple data sets. A ratio plot can be \ added at the bottom to allow easy comparison. Additional options, such as plot title, plot @@ -481,10 +481,10 @@ def plot_table(table, y_title, **kwargs): for column in table.keys()[1:]: data_dict[column] = QTable([table[x_axis], table[column]], names=[x_axis, y_title]) - return plot_1D(data_dict, **kwargs) + return plot_1d(data_dict, **kwargs) -def plot_hist_2D(data, **kwargs): +def plot_hist_2d(data, **kwargs): """ Produce a two dimensional histogram plot. Any option that can be changed after plotting (e.g.,\ axes limits, log scale, etc.) should be done using the returned plt instance. @@ -511,7 +511,7 @@ def plot_hist_2D(data, **kwargs): else: title = "" - # Set default style since the usual options do not affect 2D plots (for now). + # Set default style since the usual options do not affect 2d plots (for now). set_style() gs = gridspec.GridSpec(1, 1) diff --git a/tests/integration_tests/test_applications.py b/tests/integration_tests/test_applications.py index 81db653d8e..851dd68bd9 100644 --- a/tests/integration_tests/test_applications.py +++ b/tests/integration_tests/test_applications.py @@ -421,18 +421,18 @@ "TESTOUTPUTDIR/", ] ], - "generate_corsika_histograms::1D_and_2D_run_header": [ + "generate_corsika_histograms::1d_and_2d_run_header": [ [ "--IACT_file", "./tests/resources/tel_output_10GeV-2-gamma-20deg-CTAO-South." "corsikaio", "--pdf", "--hdf5", "--hdf5_file_name", - "test_file_1D_and_2D_run_header.hdf5", - "--event_2D_histograms", + "test_file_1d_and_2d_run_header.hdf5", + "--event_2d_histograms", "zenith", "azimuth", - "--event_1D_histograms", + "--event_1d_histograms", "total_energy", "--output_path", "TESTOUTPUTDIR/", @@ -446,7 +446,7 @@ "--hdf5", "--hdf5_file_name", "test_file_1D_run_header.hdf5", - "--event_1D_histograms", + "--event_1d_histograms", "total_energy", "--output_path", "TESTOUTPUTDIR/", @@ -460,7 +460,7 @@ "--hdf5", "--hdf5_file_name", "test_file_2D_run_header.hdf5", - "--event_2D_histograms", + "--event_2d_histograms", "zenith", "azimuth", "--output_path", diff --git a/tests/unit_tests/corsika/test_corsika_histograms.py b/tests/unit_tests/corsika/test_corsika_histograms.py index 1b8cf4264f..8d3fd8bb9e 100644 --- a/tests/unit_tests/corsika/test_corsika_histograms.py +++ b/tests/unit_tests/corsika/test_corsika_histograms.py @@ -184,9 +184,9 @@ def test_fill_histograms_no_rotation(corsika_output_file_name): assert np.count_nonzero(corsika_histograms_instance_fill.hist_direction[0].values()) > 0 -def test_get_hist_1D_projection(corsika_histograms_instance_set_histograms, caplog): +def test_get_hist_1d_projection(corsika_histograms_instance_set_histograms, caplog): with pytest.raises(ValueError): - corsika_histograms_instance_set_histograms._get_hist_1D_projection("label_not_valid") + corsika_histograms_instance_set_histograms._get_hist_1d_projection("label_not_valid") assert "label_not_valid is not valid." in caplog.text labels = ["wavelength", "time", "altitude"] @@ -196,13 +196,13 @@ def test_get_hist_1D_projection(corsika_histograms_instance_set_histograms, capl expected_std = [153.4, 378.2, 483.8] for i_hist, hist_label in enumerate(labels): ( - hist_1D_list, + hist_1d_list, x_bin_edges_list, - ) = corsika_histograms_instance_set_histograms._get_hist_1D_projection(hist_label) + ) = corsika_histograms_instance_set_histograms._get_hist_1d_projection(hist_label) assert np.shape(x_bin_edges_list) == expected_shape_of_bin_edges[i_hist] - assert np.shape(hist_1D_list) == expected_shape_of_values[i_hist] - assert pytest.approx(np.mean(hist_1D_list), 1e-2) == expected_mean[i_hist] - assert pytest.approx(np.std(hist_1D_list), 1e-2) == expected_std[i_hist] + assert np.shape(hist_1d_list) == expected_shape_of_values[i_hist] + assert pytest.approx(np.mean(hist_1d_list), 1e-2) == expected_mean[i_hist] + assert pytest.approx(np.std(hist_1d_list), 1e-2) == expected_std[i_hist] def test_set_histograms_all_telescopes_1_histogram(corsika_histograms_instance): @@ -278,26 +278,26 @@ def test_set_histograms_passing_config(corsika_histograms_instance): assert corsika_histograms_instance.hist_position[0][:, :, sum].axes[0].edges[-1] == 500 -def test_raise_if_no_histogram(corsika_output_file_name, caplog): +def test_raise_if_no_histogram(corsika_output_file_name, caplog, io_handler): corsika_histograms_instance_not_hist = CorsikaHistograms(corsika_output_file_name) with pytest.raises(HistogramNotCreated): corsika_histograms_instance_not_hist._raise_if_no_histogram() assert "The histograms were not created." in caplog -def test_get_hist_2D_projection(corsika_histograms_instance, caplog): +def test_get_hist_2d_projection(corsika_histograms_instance, caplog): corsika_histograms_instance.set_histograms() label = "hist_non_existent" with pytest.raises(ValueError): - corsika_histograms_instance._get_hist_2D_projection(label) + corsika_histograms_instance._get_hist_2d_projection(label) assert "label is not valid." in caplog.text labels = ["counts", "density", "direction", "time_altitude"] hist_sums = [11633, 29.1, 11634, 11634] # sum of photons are approximately the same # (except for the density hist, which is divided by the area) for i_label, label in enumerate(labels): - hist_values, x_bin_edges, y_bin_edges = corsika_histograms_instance._get_hist_2D_projection( + hist_values, x_bin_edges, y_bin_edges = corsika_histograms_instance._get_hist_2d_projection( label ) assert np.shape(x_bin_edges) == (1, 101) @@ -309,18 +309,18 @@ def test_get_hist_2D_projection(corsika_histograms_instance, caplog): corsika_histograms_instance.set_histograms(telescope_indices=[0, 1, 2]) hist_sums = [3677, 9.2, 3677, 3677] for i_label, label in enumerate(labels): - hist_values, x_bin_edges, y_bin_edges = corsika_histograms_instance._get_hist_2D_projection( + hist_values, x_bin_edges, y_bin_edges = corsika_histograms_instance._get_hist_2d_projection( label ) assert pytest.approx(np.sum(hist_values), 1e-2) == hist_sums[i_label] -def test_get_2D_photon_position_distr(corsika_histograms_instance_set_histograms): - density = corsika_histograms_instance_set_histograms.get_2D_photon_density_distr() +def test_get_2d_photon_position_distr(corsika_histograms_instance_set_histograms): + density = corsika_histograms_instance_set_histograms.get_2d_photon_density_distr() # Test the values of the histogram assert pytest.approx(np.sum(density[0]), 1e-2) == 29 - counts = corsika_histograms_instance_set_histograms.get_2D_photon_position_distr() + counts = corsika_histograms_instance_set_histograms.get_2d_photon_position_distr() assert pytest.approx(np.sum(counts[0]), 1e-2) == 11633 # The bin edges should be the same @@ -328,37 +328,37 @@ def test_get_2D_photon_position_distr(corsika_histograms_instance_set_histograms assert (counts[2] == density[2]).all() -def test_get_2D_photon_direction_distr(corsika_histograms_instance_set_histograms): +def test_get_2d_photon_direction_distr(corsika_histograms_instance_set_histograms): for returned_variable in range(3): assert ( - corsika_histograms_instance_set_histograms.get_2D_photon_direction_distr()[ + corsika_histograms_instance_set_histograms.get_2d_photon_direction_distr()[ returned_variable ] - == corsika_histograms_instance_set_histograms._get_hist_2D_projection("direction")[ + == corsika_histograms_instance_set_histograms._get_hist_2d_projection("direction")[ returned_variable ] ).all() -def test_get_2D_photon_time_altitude_distr(corsika_histograms_instance_set_histograms): +def test_get_2d_photon_time_altitude_distr(corsika_histograms_instance_set_histograms): for returned_variable in range(3): assert ( - corsika_histograms_instance_set_histograms.get_2D_photon_time_altitude_distr()[ + corsika_histograms_instance_set_histograms.get_2d_photon_time_altitude_distr()[ returned_variable ] - == corsika_histograms_instance_set_histograms._get_hist_2D_projection("time_altitude")[ + == corsika_histograms_instance_set_histograms._get_hist_2d_projection("time_altitude")[ returned_variable ] ).all() -def test_get_2D_num_photons_distr(corsika_histograms_instance_set_histograms): +def test_get_2d_num_photons_distr(corsika_histograms_instance_set_histograms): corsika_histograms_instance_set_histograms.set_histograms(telescope_indices=[0, 4, 10]) ( num_photons_per_event_per_telescope, num_events_array, telescope_indices_array, - ) = corsika_histograms_instance_set_histograms.get_2D_num_photons_distr() + ) = corsika_histograms_instance_set_histograms.get_2d_num_photons_distr() assert np.shape(num_events_array) == (1, 3) # number of events in this output file + 1 # (bin edges of hist) assert (telescope_indices_array == [0, 1, 2, 3]).all() @@ -379,7 +379,7 @@ def test_get_2D_num_photons_distr(corsika_histograms_instance_set_histograms): def test_get_photon_altitude_distr(corsika_histograms_instance_set_histograms): for returned_variable in range(2): assert ( - corsika_histograms_instance_set_histograms._get_hist_1D_projection("altitude")[ + corsika_histograms_instance_set_histograms._get_hist_1d_projection("altitude")[ returned_variable ] == corsika_histograms_instance_set_histograms.get_photon_altitude_distr()[ @@ -391,7 +391,7 @@ def test_get_photon_altitude_distr(corsika_histograms_instance_set_histograms): def test_get_photon_time_of_emission_distr(corsika_histograms_instance_set_histograms): for returned_variable in range(2): assert ( - corsika_histograms_instance_set_histograms._get_hist_1D_projection("time")[ + corsika_histograms_instance_set_histograms._get_hist_1d_projection("time")[ returned_variable ] == corsika_histograms_instance_set_histograms.get_photon_time_of_emission_distr()[ @@ -403,7 +403,7 @@ def test_get_photon_time_of_emission_distr(corsika_histograms_instance_set_histo def test_get_photon_wavelength_distr(corsika_histograms_instance_set_histograms): for returned_variable in range(2): assert ( - corsika_histograms_instance_set_histograms._get_hist_1D_projection("wavelength")[ + corsika_histograms_instance_set_histograms._get_hist_1d_projection("wavelength")[ returned_variable ] == corsika_histograms_instance_set_histograms.get_photon_wavelength_distr()[ @@ -442,7 +442,7 @@ def test_get_photon_radial_distr_input_some_tel_and_density( ) ( - hist_1D_list, + hist_1d_list, x_bin_edges_list, ) = corsika_histograms_instance_set_histograms.get_photon_radial_distr(bins=100, max_dist=1200) assert np.amax(x_bin_edges_list) == 1200 @@ -450,7 +450,7 @@ def test_get_photon_radial_distr_input_some_tel_and_density( # Test if the keyword density changes the output histogram but not the bin_edges ( - hist_1D_list_dens, + hist_1d_list_dens, x_bin_edges_list_dens, ) = corsika_histograms_instance_set_histograms.get_photon_density_distr( bins=100, @@ -459,10 +459,10 @@ def test_get_photon_radial_distr_input_some_tel_and_density( assert (x_bin_edges_list_dens == x_bin_edges_list).all() assert ( - pytest.approx(np.sum(hist_1D_list_dens), 1e-2) == 1.86 + pytest.approx(np.sum(hist_1d_list_dens), 1e-2) == 1.86 ) # density smaller because it divides # by the area (not counts per bin) - assert pytest.approx(np.sum(hist_1D_list), 1e-2) == 744.17 + assert pytest.approx(np.sum(hist_1d_list), 1e-2) == 744.17 def test_get_photon_radial_distr_input_all_tel(corsika_histograms_instance): @@ -723,8 +723,8 @@ def test_get_run_info(corsika_histograms_instance_set_histograms, caplog): ) -def test_event_1D_histogram(corsika_histograms_instance_set_histograms): - hist, bin_edges = corsika_histograms_instance_set_histograms.event_1D_histogram( +def test_event_1d_histogram(corsika_histograms_instance_set_histograms): + hist, bin_edges = corsika_histograms_instance_set_histograms.event_1d_histogram( "total_energy", bins=5, hist_range=(5, 15) ) assert np.size(bin_edges) == 6 @@ -732,8 +732,8 @@ def test_event_1D_histogram(corsika_histograms_instance_set_histograms): assert hist[2] == 2 -def test_event_2D_histogram(corsika_histograms_instance_set_histograms): - hist, x_bin_edges, _ = corsika_histograms_instance_set_histograms.event_2D_histogram( +def test_event_2d_histogram(corsika_histograms_instance_set_histograms): + hist, x_bin_edges, _ = corsika_histograms_instance_set_histograms.event_2d_histogram( "total_energy", "first_interaction_height", bins=(5, 5), hist_range=[[5, 15], [-60e5, -5e5]] ) assert np.size(x_bin_edges) == 6 @@ -775,11 +775,11 @@ def test_meta_dict(corsika_histograms_instance_set_histograms): assert corsika_histograms_instance_set_histograms._meta_dict == expected_meta_dict -def test_dict_1D_distributions(corsika_histograms_instance_set_histograms): - expected_dict_1D_distributions = { +def test_dict_1d_distributions(corsika_histograms_instance_set_histograms): + expected_dict_1d_distributions = { "wavelength": { "function": "get_photon_wavelength_distr", - "file name": "hist_1D_photon_wavelength_distr", + "file name": "hist_1d_photon_wavelength_distr", "title": "Photon wavelength distribution", "bin edges": "wavelength", "axis unit": corsika_histograms_instance_set_histograms.hist_config["hist_position"][ @@ -788,8 +788,8 @@ def test_dict_1D_distributions(corsika_histograms_instance_set_histograms): } } assert ( - corsika_histograms_instance_set_histograms._dict_1D_distributions["wavelength"] - == expected_dict_1D_distributions["wavelength"] + corsika_histograms_instance_set_histograms.dict_1d_distributions["wavelength"] + == expected_dict_1d_distributions["wavelength"] ) @@ -820,11 +820,11 @@ def test_export_and_read_histograms(corsika_histograms_instance_set_histograms, ) -def test_dict_2D_distributions(corsika_histograms_instance_set_histograms): - expected_dict_2D_distributions = { +def test_dict_2d_distributions(corsika_histograms_instance_set_histograms): + expected_dict_2d_distributions = { "counts": { - "function": "get_2D_photon_position_distr", - "file name": "hist_2D_photon_count_distr", + "function": "get_2d_photon_position_distr", + "file name": "hist_2d_photon_count_distr", "title": "Photon count distribution on the ground", "x bin edges": "x position on the ground", "x axis unit": corsika_histograms_instance_set_histograms.hist_config["hist_position"][ @@ -837,20 +837,20 @@ def test_dict_2D_distributions(corsika_histograms_instance_set_histograms): } } assert ( - corsika_histograms_instance_set_histograms._dict_2D_distributions["counts"] - == expected_dict_2D_distributions["counts"] + corsika_histograms_instance_set_histograms.dict_2d_distributions["counts"] + == expected_dict_2d_distributions["counts"] ) -def test_export_event_header_1D_histogram(corsika_histograms_instance_set_histograms, io_handler): +def test_export_event_header_1d_histogram(corsika_histograms_instance_set_histograms, io_handler): corsika_event_header_example = { - "total_energy": "event_1D_histograms_total_energy", - "azimuth": "event_1D_histograms_azimuth", - "zenith": "event_1D_histograms_zenith", - "first_interaction_height": "event_1D_histograms_first_interaction_height", + "total_energy": "event_1d_histograms_total_energy", + "azimuth": "event_1d_histograms_azimuth", + "zenith": "event_1d_histograms_zenith", + "first_interaction_height": "event_1d_histograms_first_interaction_height", } for event_header_element, _ in corsika_event_header_example.items(): - corsika_histograms_instance_set_histograms.export_event_header_1D_histogram( + corsika_histograms_instance_set_histograms.export_event_header_1d_histogram( event_header_element, bins=50, hist_range=None ) @@ -858,19 +858,19 @@ def test_export_event_header_1D_histogram(corsika_histograms_instance_set_histog assert len(tables) == 4 -def test_export_event_header_2D_histogram(corsika_histograms_instance_set_histograms, io_handler): +def test_export_event_header_2d_histogram(corsika_histograms_instance_set_histograms, io_handler): # Test writing the default photon histograms as well corsika_histograms_instance_set_histograms.export_histograms() tables = read_hdf5(corsika_histograms_instance_set_histograms.hdf5_file_name) assert len(tables) == 12 corsika_event_header_example = { - ("azimuth", "zenith"): "event_2D_histograms_azimuth_zenith", + ("azimuth", "zenith"): "event_2d_histograms_azimuth_zenith", } # Test writing (appending) event header histograms for event_header_element, file_name in corsika_event_header_example.items(): - corsika_histograms_instance_set_histograms.export_event_header_2D_histogram( + corsika_histograms_instance_set_histograms.export_event_header_2d_histogram( event_header_element[0], event_header_element[1], bins=50, hist_range=None ) tables = read_hdf5(corsika_histograms_instance_set_histograms.hdf5_file_name) diff --git a/tests/unit_tests/corsika/test_corsika_histograms_visualize.py b/tests/unit_tests/corsika/test_corsika_histograms_visualize.py index cea3a9565c..536591e9c6 100644 --- a/tests/unit_tests/corsika/test_corsika_histograms_visualize.py +++ b/tests/unit_tests/corsika/test_corsika_histograms_visualize.py @@ -5,7 +5,7 @@ from simtools.corsika import corsika_histograms_visualize -def test_kernel_plot_2D_photons(corsika_histograms_instance_set_histograms, caplog): +def test_kernel_plot_2d_photons(corsika_histograms_instance_set_histograms, caplog): corsika_histograms_instance_set_histograms.set_histograms( individual_telescopes=False, telescope_indices=[0, 1, 2] ) @@ -16,7 +16,7 @@ def test_kernel_plot_2D_photons(corsika_histograms_instance_set_histograms, capl "time_altitude", "num_photons_per_telescope", ]: - all_figs = corsika_histograms_visualize._kernel_plot_2D_photons( + all_figs = corsika_histograms_visualize._kernel_plot_2d_photons( corsika_histograms_instance_set_histograms, property_name ) assert np.size(all_figs) == 1 @@ -32,26 +32,26 @@ def test_kernel_plot_2D_photons(corsika_histograms_instance_set_histograms, capl "time_altitude", "num_photons_per_telescope", ]: - all_figs = corsika_histograms_visualize._kernel_plot_2D_photons( + all_figs = corsika_histograms_visualize._kernel_plot_2d_photons( corsika_histograms_instance_set_histograms, property_name ) for _, _ in enumerate(corsika_histograms_instance_set_histograms.telescope_indices): assert isinstance(all_figs[0], plt.Figure) with pytest.raises(ValueError): - corsika_histograms_visualize._kernel_plot_2D_photons( + corsika_histograms_visualize._kernel_plot_2d_photons( corsika_histograms_instance_set_histograms, "this_property_does_not_exist" ) msg = "This property does not exist. " assert msg in caplog.text -def test_plot_2Ds(corsika_histograms_instance_set_histograms): +def test_plot_2ds(corsika_histograms_instance_set_histograms): for function_label in [ - "plot_2D_counts", - "plot_2D_density", - "plot_2D_direction", - "plot_2D_num_photons_per_telescope", + "plot_2d_counts", + "plot_2d_density", + "plot_2d_direction", + "plot_2d_num_photons_per_telescope", ]: function = getattr(corsika_histograms_visualize, function_label) figs = function(corsika_histograms_instance_set_histograms) @@ -59,7 +59,7 @@ def test_plot_2Ds(corsika_histograms_instance_set_histograms): assert all(isinstance(fig, plt.Figure) for fig in figs) -def test_kernel_plot_1D_photons(corsika_histograms_instance_set_histograms, caplog): +def test_kernel_plot_1d_photons(corsika_histograms_instance_set_histograms, caplog): corsika_histograms_instance_set_histograms.set_histograms( individual_telescopes=False, telescope_indices=[0, 1, 2] ) @@ -74,7 +74,7 @@ def test_kernel_plot_1D_photons(corsika_histograms_instance_set_histograms, capl ] for property_name in labels: - all_figs = corsika_histograms_visualize._kernel_plot_1D_photons( + all_figs = corsika_histograms_visualize._kernel_plot_1d_photons( corsika_histograms_instance_set_histograms, property_name ) assert np.size(all_figs) == 1 @@ -84,7 +84,7 @@ def test_kernel_plot_1D_photons(corsika_histograms_instance_set_histograms, capl individual_telescopes=True, telescope_indices=[0, 1, 2] ) for property_name in labels: - all_figs = corsika_histograms_visualize._kernel_plot_1D_photons( + all_figs = corsika_histograms_visualize._kernel_plot_1d_photons( corsika_histograms_instance_set_histograms, property_name ) for i_hist, _ in enumerate(corsika_histograms_instance_set_histograms.telescope_indices): @@ -94,14 +94,14 @@ def test_kernel_plot_1D_photons(corsika_histograms_instance_set_histograms, capl assert isinstance(all_figs[i_hist], plt.Figure) with pytest.raises(ValueError): - corsika_histograms_visualize._kernel_plot_1D_photons( + corsika_histograms_visualize._kernel_plot_1d_photons( corsika_histograms_instance_set_histograms, "this_property_does_not_exist" ) msg = "This property does not exist. " assert msg in caplog.text -def test_plot_1Ds(corsika_histograms_instance_set_histograms): +def test_plot_1ds(corsika_histograms_instance_set_histograms): for function_label in [ "plot_wavelength_distr", "plot_counts_distr", @@ -118,12 +118,12 @@ def test_plot_1Ds(corsika_histograms_instance_set_histograms): def test_plot_event_headers(corsika_histograms_instance_set_histograms): - fig = corsika_histograms_visualize.plot_1D_event_header_distribution( + fig = corsika_histograms_visualize.plot_1d_event_header_distribution( corsika_histograms_instance_set_histograms, "total_energy" ) assert isinstance(fig, plt.Figure) - fig = corsika_histograms_visualize.plot_2D_event_header_distribution( + fig = corsika_histograms_visualize.plot_2d_event_header_distribution( corsika_histograms_instance_set_histograms, "zenith", "azimuth" ) assert isinstance(fig, plt.Figure) diff --git a/tests/unit_tests/corsika_simtel/test_corsika_simtel_runner.py b/tests/unit_tests/corsika_simtel/test_corsika_simtel_runner.py index 79420da103..a474280165 100644 --- a/tests/unit_tests/corsika_simtel/test_corsika_simtel_runner.py +++ b/tests/unit_tests/corsika_simtel/test_corsika_simtel_runner.py @@ -97,7 +97,7 @@ def test_prepare_run_script_with_invalid_run(corsika_simtel_runner): def test_export_multipipe_script(corsika_simtel_runner): corsika_simtel_runner.export_multipipe_script() - script = Path(corsika_simtel_runner.corsika_config._config_file_path.parent).joinpath( + script = Path(corsika_simtel_runner.corsika_config.config_file_path.parent).joinpath( corsika_simtel_runner.corsika_config.get_file_name("multipipe") ) @@ -112,11 +112,11 @@ def test_export_multipipe_script(corsika_simtel_runner): def test_export_multipipe_executable(corsika_simtel_runner): corsika_simtel_runner.export_multipipe_script() - multipipe_file = Path(corsika_simtel_runner.corsika_config._config_file_path.parent).joinpath( + multipipe_file = Path(corsika_simtel_runner.corsika_config.config_file_path.parent).joinpath( corsika_simtel_runner.corsika_config.get_file_name("multipipe") ) corsika_simtel_runner._export_multipipe_executable(multipipe_file) - script = Path(corsika_simtel_runner.corsika_config._config_file_path.parent).joinpath( + script = Path(corsika_simtel_runner.corsika_config.config_file_path.parent).joinpath( "run_cta_multipipe" ) diff --git a/tests/unit_tests/simtel/test_simtel_histograms.py b/tests/unit_tests/simtel/test_simtel_histograms.py index cc9c8a2391..0fb867b6bb 100644 --- a/tests/unit_tests/simtel/test_simtel_histograms.py +++ b/tests/unit_tests/simtel/test_simtel_histograms.py @@ -23,7 +23,7 @@ def simtel_array_histograms_file(io_handler, corsika_output_file_name): @pytest.fixture def simtel_array_histograms_instance(simtel_array_histograms_file): instance = SimtelHistograms(histogram_files=simtel_array_histograms_file, test=True) - instance._combine_histogram_files() + instance.combine_histogram_files() return instance diff --git a/tests/unit_tests/utils/test_geometry.py b/tests/unit_tests/utils/test_geometry.py index a13ee9c52a..ad03b989c2 100644 --- a/tests/unit_tests/utils/test_geometry.py +++ b/tests/unit_tests/utils/test_geometry.py @@ -63,7 +63,7 @@ def check_results(x_to_test, y_to_test, x_right, y_right, angle, theta=0 * u.deg transf.rotate(x_new_array, y_new_array, 30 * u.m) -def test_convert_2D_to_radial_distr(caplog) -> None: +def test_convert_2d_to_radial_distr(caplog) -> None: # Test normal functioning max_dist = 100 bins = 100 @@ -73,14 +73,14 @@ def test_convert_2D_to_radial_distr(caplog) -> None: x2d, y2d = np.meshgrid(xaxis, yaxis) distance_to_center_2D = np.sqrt((x2d) ** 2 + (y2d) ** 2) - distance_to_center_1D, radial_bin_edges = transf.convert_2D_to_radial_distr( + distance_to_center_1D, radial_bin_edges = transf.convert_2d_to_radial_distr( distance_to_center_2D, xaxis, yaxis, bins=bins, max_dist=max_dist ) difference = radial_bin_edges[:-1] - distance_to_center_1D assert pytest.approx(difference[:-1], abs=1) == 0 # last value deviates # Test warning in caplog - transf.convert_2D_to_radial_distr( + transf.convert_2d_to_radial_distr( distance_to_center_2D, xaxis, yaxis, bins=4 * bins, max_dist=max_dist ) msg = "The histogram with number of bins" diff --git a/tests/unit_tests/visualization/test_visualize.py b/tests/unit_tests/visualization/test_visualize.py index 3cde258820..0b239aa528 100644 --- a/tests/unit_tests/visualization/test_visualize.py +++ b/tests/unit_tests/visualization/test_visualize.py @@ -16,8 +16,8 @@ logger.setLevel(logging.DEBUG) -def test_plot_1D(db, io_handler): - logger.debug("Testing plot_1D") +def test_plot_1d(db, io_handler): + logger.debug("Testing plot_1d") x_title = "Wavelength [nm]" y_title = "Mirror reflectivity [%]" @@ -47,10 +47,10 @@ def test_plot_1D(db, io_handler): new_data[y_title] = new_data[y_title] * (1 - 0.1 * (i + 1)) data[f"{100 * (1 - 0.1 * (i + 1))}%% reflectivity"] = new_data - plt = visualize.plot_1D(data, title=title, palette="autumn") + plt = visualize.plot_1d(data, title=title, palette="autumn") plot_file = io_handler.get_output_file( - file_name="plot_1D.pdf", sub_dir="plots", dir_type="test" + file_name="plot_1d.pdf", sub_dir="plots", dir_type="test" ) if plot_file.exists(): plot_file.unlink()