diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cfb0b92..2d9961b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,17 +15,60 @@ on: - setup.py jobs: + lint: + name: Lint + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.12'] + steps: + - uses: actions/checkout@v4 + - name: Get history and tags for SCM versioning to work + run: | + git fetch --prune --unshallow + git fetch --depth=1 origin +refs/tags/*:refs/tags/* + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: 'pip' + cache-dependency-path: 'lint_requirements.txt' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + - name: Install project + run: | + pip install -e . + - name: Install dependencies + run: | + pip install -U -r lint_requirements.txt + - name: Formatting + if: always() + run: | + ruff format pyclibrary tests --check + - name: Linting + if: always() + run: | + ruff check pyclibrary tests + # - name: Typing + # if: always() + # run: | + # mypy pyclibrary tests: name: Unit tests runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ['3.6', '3.7', '3.8', '3.9', '3.10'] + python-version: ['3.10', '3.11', '3.12', '3.13'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 + - name: Get history and tags for SCM versioning to work + run: | + git fetch --prune --unshallow + git fetch --depth=1 origin +refs/tags/*:refs/tags/* - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -39,7 +82,7 @@ jobs: pip install pytest-cov python -m pytest tests --cov pyclibrary --cov-report xml -v - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} flags: unittests diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index bf53a6e..b7525e0 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -19,15 +19,19 @@ jobs: name: Docs building runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 + - name: Get history and tags for SCM versioning to work + run: | + git fetch --prune --unshallow + git fetch --depth=1 origin +refs/tags/*:refs/tags/* - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 - name: Install dependencies run: | python -m pip install --upgrade pip - name: Install project run: | - python setup.py develop + pip install . - name: Install graphviz uses: kamiazya/setup-graphviz@v1 - name: Install doc building tools diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 091e8cc..f63dc56 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,11 +13,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.12' - name: Build sdist run: | pip install --upgrade pip diff --git a/.gitignore b/.gitignore index 35842a4..06b3526 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,6 @@ __pycache__ *.egg-info .mypy_cache -*.pyclibcache \ No newline at end of file +*.pyclibcache + +version.py \ No newline at end of file diff --git a/CHANGES b/CHANGES index 0b7ad2f..5a22e30 100644 --- a/CHANGES +++ b/CHANGES @@ -1,9 +1,12 @@ PyCLibrary Changelog ==================== -0.2.3 - unreleased +0.3.0 - unreleased ------------------ +- officially support Python 3.10+ (up to 3.13) PR #78 +- drop support for Python < 3.9 PR #78 +- move installation to pyproject base installation procedure PR #78 - allow selection of encoding when loading files (issue #51) 0.2.2 - 22/01/2024 diff --git a/docs/source/conf.py b/docs/source/conf.py index f6d080e..139c9d9 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,58 +12,51 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) -(root, _) = os.path.split(os.path.abspath('..')) -sys.path.insert(0, root) +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +from pyclibrary.version import version_info # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = '2.0' +needs_sphinx = "2.0" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.intersphinx', - 'sphinx.ext.viewcode', - 'sphinx.ext.napoleon' + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.viewcode", + "sphinx.ext.napoleon", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'PyCLibrary' -copyright = u'2015-2020, Luke Campagnola, Matthieu Dartiailh' -author = u'Luke Campagnola, Matthieu Dartiailh' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -from pyclibrary.version import version_info +project = "PyCLibrary" +copyright = "2015-2025, Luke Campagnola, Matthieu C. Dartiailh" +author = "Luke Campagnola, Matthieu C. Dartiailh" # The short X.Y version. -version = '{0}.{1}'.format(*version_info) +version = "{0}.{1}".format(*version_info) # The full version, including alpha/beta/rc tags. -release = ('{0}.{1}.{2}.{3}'.format(*version_info) if version_info[3] - else '{0}.{1}.{2}'.format(*version_info)) +release = ( + "{0}.{1}.{2}.{3}".format(*version_info) + if version_info[3] + else "{0}.{1}.{2}".format(*version_info) +) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -74,9 +67,9 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -84,47 +77,47 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # Autodoc configuration. -autodoc_member_order = 'bysource' +autodoc_member_order = "bysource" autodoc_default_flags = [ - 'members', - 'show-inheritance', - ] -auto_class_content = 'class' + "members", + "show-inheritance", +] +auto_class_content = "class" # Inheritance diagram configuration. #: rankdir ['LR', 'TB', 'BT', 'RL'] -inheritance_graph_attrs = dict( - rankdir="LR", - width="8.0", - fontsize=14, - ratio='compress' - ) +inheritance_graph_attrs = { + "rankdir": "LR", + "width": "8.0", + "fontsize": 14, + "ratio": "compress", +} autosummary_generate = True @@ -134,39 +127,33 @@ # -- Options for HTML output ---------------------------------------------- -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -if not on_rtd: # only import and set the theme if we're building docs locally - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -176,122 +163,121 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'PyCLibrarydoc' +htmlhelp_basename = "PyCLibrarydoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'PyCLibrary.tex', u'PyCLibrary Documentation', - u'Luke Campagnola, Matthieu Dartiailh', 'manual'), + ( + master_doc, + "PyCLibrary.tex", + "PyCLibrary Documentation", + "Luke Campagnola, Matthieu Dartiailh", + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'pyclibrary', u'PyCLibrary Documentation', - [author], 1) -] +man_pages = [(master_doc, "pyclibrary", "PyCLibrary Documentation", [author], 1)] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -300,24 +286,29 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'PyCLibrary', u'PyCLibrary Documentation', - author, 'PyCLibrary', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "PyCLibrary", + "PyCLibrary Documentation", + author, + "PyCLibrary", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = \ - {'python': ('https://docs.python.org/', None)} +intersphinx_mapping = {"python": ("https://docs.python.org/", None)} diff --git a/lint_requirements.txt b/lint_requirements.txt new file mode 100644 index 0000000..e3b95ac --- /dev/null +++ b/lint_requirements.txt @@ -0,0 +1,2 @@ +ruff +mypy \ No newline at end of file diff --git a/pyclibrary/__init__.py b/pyclibrary/__init__.py index 5b19e9e..3bff72e 100644 --- a/pyclibrary/__init__.py +++ b/pyclibrary/__init__.py @@ -1,14 +1,26 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- import logging -logging.getLogger('pyclibrary').addHandler(logging.NullHandler()) -from .c_parser import win_defs, CParser from .c_library import CLibrary, build_array, cast_to +from .c_parser import CParser, win_defs from .errors import DefinitionError -from .init import init, auto_init +from .init import auto_init, init + +logging.getLogger("pyclibrary").addHandler(logging.NullHandler()) + +__all__ = ( + "CLibrary", + "CParser", + "DefinitionError", + "auto_init", + "build_array", + "cast_to", + "init", + "win_defs", +) diff --git a/pyclibrary/backends/__init__.py b/pyclibrary/backends/__init__.py index 01f6598..0d18090 100644 --- a/pyclibrary/backends/__init__.py +++ b/pyclibrary/backends/__init__.py @@ -1,33 +1,31 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- -""" -""" -from .ctypes import (init_clibrary as c_init, - identify_library as c_iden, - get_library_path as cpath) +""" """ -lib_types = {'ctypes': c_iden} -lib_path = {'ctypes': cpath} +from .ctypes import ( + get_library_path as cpath, + identify_library as c_iden, + init_clibrary as c_init, +) +lib_types = {"ctypes": c_iden} +lib_path = {"ctypes": cpath} -def identify_library(lib): - """Identify a library backend. - """ +def identify_library(lib): + """Identify a library backend.""" for typ, check in lib_types.items(): if check(lib): return typ def get_library_path(lib, backend=None): - """Retrieve the path to the dynamic library file. - - """ + """Retrieve the path to the dynamic library file.""" if not backend or backend not in lib_path: backend = identify_library(lib) @@ -35,7 +33,5 @@ def get_library_path(lib, backend=None): def init_libraries(extra_types): - """Run the initialiser of each backend. - - """ + """Run the initialiser of each backend.""" c_init(extra_types) diff --git a/pyclibrary/backends/ctypes.py b/pyclibrary/backends/ctypes.py index 0256568..c5955a8 100644 --- a/pyclibrary/backends/ctypes.py +++ b/pyclibrary/backends/ctypes.py @@ -1,5 +1,5 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # @@ -10,30 +10,60 @@ function calling based on C header definitions. """ + import logging import os import sys +from ctypes import ( + CDLL, + CFUNCTYPE, + POINTER, + Structure, + Union, + c_bool, + c_char, + c_char_p, + c_double, + c_float, + c_int, + c_int8, + c_int16, + c_int32, + c_int64, + c_long, + c_longdouble, + c_longlong, + c_short, + c_ubyte, + c_uint, + c_uint8, + c_uint16, + c_uint32, + c_uint64, + c_ulong, + c_ulonglong, + c_ushort, + c_void_p, + c_wchar, + c_wchar_p, + cast, + cdll, + pointer, +) from inspect import cleandoc -from ctypes import (c_char, c_wchar, c_ubyte, c_short, c_ushort, - c_int, c_uint, c_long, c_ulong, c_longlong, c_ulonglong, - c_float, c_double, c_longdouble, c_int8, c_uint8, c_int16, - c_uint16, c_int32, c_uint32, c_int64, c_uint64, c_bool, - c_char_p, c_wchar_p, c_void_p, - pointer, Union, Structure, cast, - cdll, POINTER, CFUNCTYPE, CDLL) -if sys.platform == 'win32': - from ctypes import windll, oledll, WINFUNCTYPE, HRESULT +if sys.platform == "win32": + from ctypes import HRESULT, WINFUNCTYPE, oledll, windll -from ..errors import DefinitionError from ..c_library import CLibrary +from ..errors import DefinitionError from ..utils import find_library logger = logging.getLogger(__name__) def make_mess(mess): - return cleandoc(mess).replace('\n', ' ') + return cleandoc(mess).replace("\n", " ") class CTypesCLibrary(CLibrary): @@ -88,6 +118,7 @@ class CTypesCLibrary(CLibrary): names. """ + #: Private flag allowing to know if the class has been initiliased. _init = False @@ -95,33 +126,32 @@ class CTypesCLibrary(CLibrary): Null = object() #: Id of the backend - backend = 'ctypes' + backend = "ctypes" #: Types (filled by init_clibrary) _types_ = {} #: Types for which ctypes provides a special pointer type. - _ptr_types_ = {'char': c_char_p, - 'wchar': c_wchar_p, - 'wchar_t': c_wchar_p, - 'void': c_void_p - } + _ptr_types_ = { + "char": c_char_p, + "wchar": c_wchar_p, + "wchar_t": c_wchar_p, + "void": c_void_p, + } def __repr__(self): return "" % str(self._lib_) def _link_library(self, lib_path, convention): - """Find and link the external librairy if only a path was provided. - - """ - if convention == 'cdll': + """Find and link the external librairy if only a path was provided.""" + if convention == "cdll": return cdll.LoadLibrary(lib_path) - elif convention == 'windll': + elif convention == "windll": return windll.LoadLibrary(lib_path) - elif convention == 'oledll': + elif convention == "oledll": return oledll.LoadLibrary(lib_path) else: - raise ValueError('Convention cannot be {}'.format(convention)) + raise ValueError("Convention cannot be {}".format(convention)) def _extract_val_(self, obj): """Extract a Python value from a ctype object. @@ -130,12 +160,12 @@ def _extract_val_(self, obj): it wrong (a pointer being often an array). """ - if not hasattr(obj, 'value'): + if not hasattr(obj, "value"): return obj return obj.value - def _get_type(self, typ, pointers=True): + def _get_type(self, typ, pointers=True): # noqa """Return a ctype object representing the named type. If pointers is True, the class returned includes all pointer/array @@ -150,8 +180,12 @@ def _get_type(self, typ, pointers=True): # Create the initial type # Some types like ['char', '*'] have a specific ctype (c_char_p) # (but only do this if pointers == True) - if (pointers and len(typ) > 1 and typ[1] == '*' and - typ[0] in self._ptr_types_): + if ( + pointers + and len(typ) > 1 + and typ[1] == "*" + and typ[0] in self._ptr_types_ + ): cls = self._ptr_types_[typ[0]] mods = typ[2:] @@ -160,17 +194,15 @@ def _get_type(self, typ, pointers=True): cls = self._types_[typ[0]] # structs, unions, enums: - elif typ[0][:7] == 'struct ': - cls = self._get_struct('structs', - self._defs_['types'][typ[0]][1]) - elif typ[0][:6] == 'union ': - cls = self._get_struct('unions', - self._defs_['types'][typ[0]][1]) - elif typ[0][:5] == 'enum ': + elif typ[0][:7] == "struct ": + cls = self._get_struct("structs", self._defs_["types"][typ[0]][1]) + elif typ[0][:6] == "union ": + cls = self._get_struct("unions", self._defs_["types"][typ[0]][1]) + elif typ[0][:5] == "enum ": cls = c_int # void - elif typ[0] == 'void': + elif typ[0] == "void": cls = None else: raise KeyError("Can't find base type for {}".format(typ)) @@ -178,7 +210,6 @@ def _get_type(self, typ, pointers=True): if not pointers: return cls - n_mods = [] # Go through the modifier looking for array modifiers. # Array modifiers are list and if we find consecutive modifiers we merge @@ -199,11 +230,11 @@ def _get_type(self, typ, pointers=True): while len(mods) > 0: m = mods.pop(0) if isinstance(m, str): # pointer or reference - if m[0] == '*' or m[0] == '&': + if m[0] == "*" or m[0] == "&": for i in m: cls = POINTER(cls) - elif isinstance(m, list): # array + elif isinstance(m, list): # array # Go in reverse order to get nd array to work properly for i in reversed(m): # -1 indicates an 'incomplete type' like "int @@ -218,17 +249,17 @@ def _get_type(self, typ, pointers=True): elif isinstance(m, tuple): # Find pointer and calling convention is_ptr = False - conv = '__cdecl' + conv = "__cdecl" if len(mods) == 0: mess = "Function signature with no pointer:" raise DefinitionError(mess, m, mods) for i in [0, 1]: if len(mods) < 1: break - if mods[0] == '*': + if mods[0] == "*": mods.pop(0) is_ptr = True - elif mods[0] in ['__stdcall', '__cdecl']: + elif mods[0] in ["__stdcall", "__cdecl"]: conv = mods.pop(0) else: break @@ -237,7 +268,7 @@ def _get_type(self, typ, pointers=True): (function without single pointer): {}""") raise DefinitionError(mess.format(typ)) - if conv == '__stdcall': + if conv == "__stdcall": mkfn = WINFUNCTYPE else: @@ -257,19 +288,20 @@ def _get_type(self, typ, pointers=True): def _get_struct(self, str_type, str_name): if str_name not in self._structs_: - str_name = self._resolve_struct_alias(str_type, str_name) # Pull struct definition defn = self._defs_[str_type][str_name] # create ctypes class - defs = defn['members'][:] - if str_type == 'structs': + defs = defn["members"][:] + if str_type == "structs": + class s(Structure): def __repr__(self): return "" % str_name - elif str_type == 'unions': + elif str_type == "unions": + class s(Union): def __repr__(self): return "" % str_name @@ -277,8 +309,8 @@ def __repr__(self): # Must register struct here to allow recursive definitions. self._structs_[str_name] = s - if defn['pack'] is not None: - s._pack_ = defn['pack'] + if defn["pack"] is not None: + s._pack_ = defn["pack"] # Assign names to anonymous members members = [] @@ -287,7 +319,7 @@ def __repr__(self): if d[0] is None: c = 0 while True: - name = 'anon_member%d' % c + name = "anon_member%d" % c if name not in members: d = (name,) + d[1:] defs[i] = d @@ -300,23 +332,23 @@ def __repr__(self): # Handle bit field specifications, ctypes only supports bit fields # for integer but I am not sure how to test for it in a nice # fashion. - s._fields_ = [(m[0], self._get_type(m[1])) if m[2] is None else - (m[0], self._get_type(m[1]), m[2]) for m in defs] + s._fields_ = [ + (m[0], self._get_type(m[1])) + if m[2] is None + else (m[0], self._get_type(m[1]), m[2]) + for m in defs + ] s._defaults_ = [m[2] for m in defs] return self._structs_[str_name] def _get_pointer(self, arg_type, sig): - """Build an uninitialised pointer for the given type. - - """ + """Build an uninitialised pointer for the given type.""" # Must be 2-part type, second part must be '*' or '**' - assert 2 <= len(arg_type) <= 3 and set(arg_type[1:]) == {'*'} + assert 2 <= len(arg_type) <= 3 and set(arg_type[1:]) == {"*"} arg_type_list = list(arg_type) cls = self._get_type(sig, pointers=False) - special_pointer_types = {None: c_void_p, - c_char: c_char_p, - c_wchar: c_wchar_p} + special_pointer_types = {None: c_void_p, c_char: c_char_p, c_wchar: c_wchar_p} if cls in special_pointer_types: cls = special_pointer_types[cls] del arg_type_list[1] @@ -325,23 +357,19 @@ def _get_pointer(self, arg_type, sig): return pointer(cls()) def _cast_to(self, obj, typ): - """Cast an object to a new type (new type must be a pointer). - - """ + """Cast an object to a new type (new type must be a pointer).""" if not isinstance(typ, type): typ = self._get_type((typ,)) return cast(obj, typ) def _get_array(self, typ, size, vals): - """Build an array. - - """ + """Build an array.""" if not isinstance(typ, type): typ = self._get_type((typ,)) if not isinstance(size, tuple): - size = (size, ) + size = (size,) new = typ for s in size[::-1]: @@ -353,55 +381,53 @@ def _get_array(self, typ, size, vals): return new() def _init_function(self, function): - """Overrided here to declare the arguments types and return type. - - """ + """Overrided here to declare the arguments types and return type.""" function.func.argtypes = function.arg_types function.func.restype = function.res_type -if sys.platform == 'win32': - WIN_TYPES = {'__int64': c_longlong, 'HRESULT': HRESULT} +if sys.platform == "win32": + WIN_TYPES = {"__int64": c_longlong, "HRESULT": HRESULT} def init_clibrary(extra_types={}): # First load all standard types CTypesCLibrary._types_ = { - 'bool': c_bool, - 'char': c_char, - 'wchar': c_wchar, - 'unsigned char': c_ubyte, - 'short': c_short, - 'short int': c_short, - 'unsigned short': c_ushort, - 'unsigned short int': c_ushort, - 'int': c_int, - 'unsigned': c_uint, - 'unsigned int': c_uint, - 'long': c_long, - 'long int': c_long, - 'unsigned long': c_ulong, - 'unsigned long int': c_ulong, - 'long unsigned int': c_ulong, - 'long long': c_longlong, - 'long long int': c_longlong, - 'unsigned __int64': c_ulonglong, - 'unsigned long long': c_ulonglong, - 'unsigned long long int': c_ulonglong, - 'float': c_float, - 'double': c_double, - 'long double': c_longdouble, - 'uint8_t': c_uint8, - 'int8_t': c_int8, - 'uint16_t': c_uint16, - 'int16_t': c_int16, - 'uint32_t': c_uint32, - 'int32_t': c_int32, - 'uint64_t': c_uint64, - 'int64_t': c_int64 + "bool": c_bool, + "char": c_char, + "wchar": c_wchar, + "unsigned char": c_ubyte, + "short": c_short, + "short int": c_short, + "unsigned short": c_ushort, + "unsigned short int": c_ushort, + "int": c_int, + "unsigned": c_uint, + "unsigned int": c_uint, + "long": c_long, + "long int": c_long, + "unsigned long": c_ulong, + "unsigned long int": c_ulong, + "long unsigned int": c_ulong, + "long long": c_longlong, + "long long int": c_longlong, + "unsigned __int64": c_ulonglong, + "unsigned long long": c_ulonglong, + "unsigned long long int": c_ulonglong, + "float": c_float, + "double": c_double, + "long double": c_longdouble, + "uint8_t": c_uint8, + "int8_t": c_int8, + "uint16_t": c_uint16, + "int16_t": c_int16, + "uint32_t": c_uint32, + "int32_t": c_int32, + "uint64_t": c_uint64, + "int64_t": c_int64, } - if sys.platform == 'win32': + if sys.platform == "win32": for k in extra_types: if k in WIN_TYPES: extra_types[k] = WIN_TYPES[k] diff --git a/pyclibrary/c_library.py b/pyclibrary/c_library.py index ecdebf8..b3466ba 100644 --- a/pyclibrary/c_library.py +++ b/pyclibrary/c_library.py @@ -1,5 +1,5 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # @@ -10,21 +10,22 @@ function calling based on C header definitions. """ + import logging -import sys import os +import sys from inspect import cleandoc -from weakref import WeakValueDictionary from threading import RLock +from weakref import WeakValueDictionary -from .utils import find_library, LibraryPath from .c_parser import CParser +from .utils import LibraryPath, find_library logger = logging.getLogger(__name__) def make_mess(mess): - return cleandoc(mess).replace('\n', ' ') + return cleandoc(mess).replace("\n", " ") class CLibraryMeta(type): @@ -32,15 +33,16 @@ class CLibraryMeta(type): duplicates libraries exists. """ + backends = {} libs = WeakValueDictionary() def __new__(meta, name, bases, dct): - if name == 'CLibrary': + if name == "CLibrary": return super(CLibraryMeta, meta).__new__(meta, name, bases, dct) - if 'backend' not in dct: - mess = make_mess('''{} does not declare a backend name, it cannot - be registered.''') + if "backend" not in dct: + mess = make_mess("""{} does not declare a backend name, it cannot + be registered.""") logger.warning(mess.format(name)) return None @@ -50,16 +52,16 @@ def __new__(meta, name, bases, dct): return cls def __call__(cls, lib, *args, **kwargs): - # Identify the library path. if isinstance(lib, str): if os.sep not in lib: lib_path = find_library(lib).path else: lib_path = os.path.realpath(lib) - assert os.path.isfile(lib_path),\ - 'Provided path does not point to a file' - backend_cls = cls.backends[kwargs.get('backend', 'ctypes')] + assert os.path.isfile( + lib_path + ), "Provided path does not point to a file" + backend_cls = cls.backends[kwargs.get("backend", "ctypes")] lib_arch = LibraryPath(lib_path).arch py_bitness = 64 if sys.maxsize > 2**32 else 32 @@ -67,7 +69,8 @@ def __call__(cls, lib, *args, **kwargs): raise OSError("Library bitness does not match Python's") lib = lib_path else: - from .backends import identify_library, get_library_path + from .backends import get_library_path, identify_library + backend = identify_library(lib) backend_cls = cls.backends[backend] lib_path = get_library_path(lib, backend) @@ -77,8 +80,7 @@ def __call__(cls, lib, *args, **kwargs): return cls.libs[lib_path] else: - obj = super(CLibraryMeta, backend_cls).__call__(lib, *args, - **kwargs) + obj = super(CLibraryMeta, backend_cls).__call__(lib, *args, **kwargs) cls.libs[lib_path] = obj return obj @@ -149,14 +151,23 @@ class CLibrary(object, metaclass=CLibraryMeta): is created. """ + #: Private flag allowing to know if the class has been initiliased. _init = False #: Balise to use when a NULL pointer is needed Null = object() - def __init__(self, lib, headers, prefix=None, lock_calls=False, - convention='cdll', backend='ctypes', **kwargs): + def __init__( + self, + lib, + headers, + prefix=None, + lock_calls=False, + convention="cdll", + backend="ctypes", + **kwargs, + ): # name everything using underscores to avoid name collisions with # library @@ -166,7 +177,7 @@ def __init__(self, lib, headers, prefix=None, lock_calls=False, elif isinstance(headers, CParser): self._headers_ = headers else: - msg = 'Expected a CParser instance or list for headers, not {}' + msg = "Expected a CParser instance or list for headers, not {}" raise ValueError(msg.format(type(headers))) self._defs_ = self._headers_.defs @@ -189,8 +200,7 @@ def __init__(self, lib, headers, prefix=None, lock_calls=False, self._lock_ = RLock() self._objs_ = {} - for k in ['values', 'functions', 'types', 'structs', 'unions', - 'enums']: + for k in ["values", "functions", "types", "structs", "unions", "enums"]: self._objs_[k] = {} self._all_objs_ = {} self._structs_ = {} @@ -215,8 +225,15 @@ def __getattr__(self, name): """ if name not in self._all_objs_: names = self._all_names_(name) - for k in ['values', 'functions', 'types', 'structs', 'unions', - 'enums', None]: + for k in [ + "values", + "functions", + "types", + "structs", + "unions", + "enums", + None, + ]: if k is None: raise NameError(name) obj = None @@ -230,9 +247,7 @@ def __getattr__(self, name): return self._all_objs_[name] def __getitem__(self, name): - """Used to retrieve a specific dictionary from the headers. - - """ + """Used to retrieve a specific dictionary from the headers.""" return self._defs_[name] # --- Private API --------------------------------------------------------- @@ -245,9 +260,7 @@ def _all_names_(self, name): return [name] + [p + name for p in self._prefix_] def _make_obj_(self, typ, name): - """Build the correct C-like object from the header definitions. - - """ + """Build the correct C-like object from the header definitions.""" names = self._all_names_(name) objs = self._objs_[typ] @@ -256,33 +269,33 @@ def _make_obj_(self, typ, name): return self.objs[n] for n in names: # try with and without prefix - if (n not in self._defs_[typ] and - not (typ in ['structs', 'unions', 'enums'] and - n in self._defs_['types'])): + if n not in self._defs_[typ] and not ( + typ in ["structs", "unions", "enums"] and n in self._defs_["types"] + ): continue - if typ == 'values': + if typ == "values": return self._defs_[typ][n] - elif typ == 'functions': + elif typ == "functions": return self._get_function(n) - elif typ == 'types': + elif typ == "types": obj = self._defs_[typ][n] return self._get_type(obj) - elif typ == 'structs': - return self._get_struct('structs', n) - elif typ == 'unions': - return self._get_struct('unions', n) - elif typ == 'enums': + elif typ == "structs": + return self._get_struct("structs", n) + elif typ == "unions": + return self._get_struct("unions", n) + elif typ == "enums": # Allow automatic resolving of typedefs that alias enums - if n not in self._defs_['enums']: - if n not in self._defs_['types']: + if n not in self._defs_["enums"]: + if n not in self._defs_["types"]: raise KeyError('No enums named "{}"'.format(n)) typ = self._headers_.eval_type([n])[0] - if typ[:5] != 'enum ': + if typ[:5] != "enum ": raise KeyError('No enums named "{}"'.format(n)) # Look up internal name of enum - n = self._defs_['types'][typ][1] - obj = self._defs_['enums'][n] + n = self._defs_["types"][typ][1] + obj = self._defs_["enums"][n] return obj else: @@ -294,9 +307,7 @@ def __repr__(self): return "" % str(self._lib_) def _build_parser(self, headers, kwargs): - """Find the headers and parse them to extract the definitions. - - """ + """Find the headers and parse them to extract the definitions.""" return CParser(headers, **kwargs) def _link_library(self, lib_path, convention): @@ -314,23 +325,24 @@ def _link_library(self, lib_path, convention): raise NotImplementedError() def _extract_val_(self, obj): - """Extract a python representation from a function return value. - - """ + """Extract a python representation from a function return value.""" raise NotImplementedError() def _get_function(self, func_name): - """Return a CFuntion instance. - - """ + """Return a CFuntion instance.""" try: func = getattr(self._lib_, func_name) - except: + except Exception: mess = "Function name '{}' appears in headers but not in library!" raise KeyError(mess.format(func_name)) - return CFunction(self, func, self._defs_['functions'][func_name], - func_name, self._lock_calls_) + return CFunction( + self, + func, + self._defs_["functions"][func_name], + func_name, + self._lock_calls_, + ) def _init_function(self, function): """Finish the function wrapper initialisation. @@ -352,50 +364,39 @@ def _get_type(self, typ, pointers=True): raise NotImplementedError() def _get_struct(self, str_type, str_name): - """Return an object representing the named structure or union. - - """ + """Return an object representing the named structure or union.""" raise NotImplementedError() def _get_pointer(self, arg_type): - """Build an uninitialised pointer for the given type. - - """ + """Build an uninitialised pointer for the given type.""" raise NotImplementedError() def _get_array(self, typ, size, obj): - """Build an array of the specified type and size. - - """ + """Build an array of the specified type and size.""" raise NotImplementedError() def _resolve_struct_alias(self, str_type, str_name): - """Resolve struct name--typedef aliases. - - """ + """Resolve struct name--typedef aliases.""" if str_name not in self._defs_[str_type]: - - if str_name not in self._defs_['types']: + if str_name not in self._defs_["types"]: mess = 'No struct/union named "{}"' raise KeyError(mess.format(str_name)) typ = self._headers_.eval_type([str_name])[0] - if typ[:7] != 'struct ' and typ[:6] != 'union ': + if typ[:7] != "struct " and typ[:6] != "union ": mess = 'No struct/union named "{}"' raise KeyError(mess.format(str_name)) - return self._defs_['types'][typ][1] + return self._defs_["types"][typ][1] else: return str_name class CFunction(object): - """Wrapper object for a function from the library. + """Wrapper object for a function from the library.""" - """ def __init__(self, lib, func, sig, name, lock_call): - self.lock_call = lock_call self.lib = lib self.func = func @@ -405,8 +406,8 @@ def __init__(self, lib, func, sig, name, lock_call): self.sig = list(sig) # remove void args from list - self.sig[1] = [s for s in sig[1] if s[1] != ('void',)] - for conv in ['__stdcall', '__cdecl']: + self.sig[1] = [s for s in sig[1] if s[1] != ("void",)] + for conv in ["__stdcall", "__cdecl"]: if conv in self.sig[0]: self.sig[0].remove(conv) self.name = name @@ -459,8 +460,11 @@ def __call__(self, *args, **kwargs): # Finally, fill in remaining arguments if they are pointers to # int/float/void*/struct values (we assume these are to be modified by # the function and their initial value is not important) - missings = {i: arg for i, arg in enumerate(arg_list) - if arg is None or arg is self.lib.Null} + missings = { + i: arg + for i, arg in enumerate(arg_list) + if arg is None or arg is self.lib.Null + } for i, arg in missings.items(): try: sig = self.sig[1][i][1] @@ -482,8 +486,7 @@ def __call__(self, *args, **kwargs): if sys.exc_info()[0] is not AssertionError: raise mess = "Function call '{}' missing required argument {} {}" - raise TypeError(mess.format(self.name, i, - self.sig[1][i][0])) + raise TypeError(mess.format(self.name, i, self.sig[1][i][0])) try: if self.lock_call: @@ -492,14 +495,14 @@ def __call__(self, *args, **kwargs): else: res = self.func(*arg_list) except Exception: - logger.error("Function call failed. Signature is: {}".format( - self.pretty_signature())) + logger.error( + "Function call failed. Signature is: {}".format(self.pretty_signature()) + ) logger.error("Arguments: {}".format(arg_list)) logger.error("Argtypes: {}".format(self.func.argtypes)) raise - cr = CallResult(self.lib, res, arg_list, self.sig, - guessed=guessed_args) + cr = CallResult(self.lib, res, arg_list, self.sig, guessed=guessed_args) return cr def arg_c_type(self, arg): @@ -516,10 +519,11 @@ def arg_c_type(self, arg): return self.lib._get_type(self.sig[1][arg][1]) def pretty_signature(self): - args = (''.join(self.sig[0]), self.name, - ', '.join(["{} {}".format(s[1], s[0]) - for s in self.sig[1]]) - ) + args = ( + "".join(self.sig[0]), + self.name, + ", ".join(["{} {}".format(s[1], s[0]) for s in self.sig[1]]), + ) return "{} {}({})".format(*args) @@ -563,15 +567,16 @@ class CallResult(object): Pointers that were created on the fly. """ + def __init__(self, lib, rval, args, sig, guessed): self.lib = lib - self.rval = rval # return value of function call - self.args = args # list of arguments to function call - self.sig = sig # function signature + self.rval = rval # return value of function call + self.args = args # list of arguments to function call + self.sig = sig # function signature self.guessed = guessed # list of arguments that were auto-generated def __call__(self): - if self.sig[0] == ['void']: + if self.sig[0] == ["void"]: return None return self.lib._extract_val_(self.rval) @@ -599,9 +604,7 @@ def __setitem__(self, n, val): raise ValueError("Index must be int or str.") def find_arg(self, arg): - """Find argument based on name. - - """ + """Find argument based on name.""" for i, a in enumerate(self.sig[1]): if a[0] == arg: return i @@ -611,7 +614,7 @@ def find_arg(self, arg): def __iter__(self): yield self() - yield(self[i] for i in range(len(self.args))) + yield (self[i] for i in range(len(self.args))) def auto(self): """Return a list of all the auto-generated values. diff --git a/pyclibrary/c_parser.py b/pyclibrary/c_parser.py index cce1aab..902eec1 100644 --- a/pyclibrary/c_parser.py +++ b/pyclibrary/c_parser.py @@ -10,29 +10,53 @@ function signatures from C header files. """ -import sys -import re -import os + import logging +import os +import re +import sys from inspect import cleandoc from traceback import format_exc +# Import parsing elements +from pyparsing import ( + Forward, + Group, + Keyword, + LineEnd, + Literal, + OneOrMore, + Optional, + ParserElement, + ParseResults, + Regex, + SkipTo, + Suppress, + Word, + WordEnd, + WordStart, + ZeroOrMore, + alphanums, + alphas, + cStyleComment, + delimitedList, + hexnums, + lineno, + nestedExpr, + oneOf, + quotedString, + restOfLine, +) + from .errors import DefinitionError from .utils import find_header -# Import parsing elements -from pyparsing import \ - (ParserElement, ParseResults, Forward, Optional, Word, WordStart, - WordEnd, Keyword, Regex, Literal, SkipTo, ZeroOrMore, OneOrMore, - Group, LineEnd, quotedString, oneOf, nestedExpr, - delimitedList, restOfLine, cStyleComment, alphas, alphanums, hexnums, - lineno, Suppress) ParserElement.enablePackrat() logger = logging.getLogger(__name__) -__all__ = ['win_defs', 'CParser'] +__all__ = ["CParser", "win_defs"] class Type(tuple): @@ -88,17 +112,17 @@ class Type(tuple): >>> '*', [2]) """ + # Cannot slot a subclass of tuple. def __new__(cls, type_spec, *declarators, **argv): - return super(Type, cls).__new__(cls, (type_spec,) + declarators) + return super(Type, cls).__new__(cls, (type_spec, *declarators)) def __init__(self, type_spec, *declarators, **argv): super(Type, self).__init__() - self.type_quals = (argv.pop('type_quals', None) or - ((),) * (1 + len(declarators))) + self.type_quals = argv.pop("type_quals", None) or ((),) * (1 + len(declarators)) if len(self.type_quals) != 1 + len(declarators): raise ValueError("wrong number of type qualifiers") - assert len(argv) == 0, 'Invalid Parameter' + assert len(argv) == 0, "Invalid Parameter" def __eq__(self, other): if isinstance(other, Type): @@ -111,16 +135,12 @@ def __ne__(self, other): @property def declarators(self): - """Return a tuple of all declarators. - - """ + """Return a tuple of all declarators.""" return tuple(self[1:]) @property def type_spec(self): - """Return the base type of this type. - - """ + """Return the base type of this type.""" return self[0] def is_fund_type(self): @@ -129,12 +149,16 @@ def is_fund_type(self): """ - if (self[0].startswith('struct ') or self[0].startswith('union ') or - self[0].startswith('enum ')): + if ( + self[0].startswith("struct ") + or self[0].startswith("union ") + or self[0].startswith("enum ") + ): return True - names = (num_types + nonnum_types + size_modifiers + sign_modifiers + - extra_type_list) + names = ( + num_types + nonnum_types + size_modifiers + sign_modifiers + extra_type_list + ) for w in self[0].split(): if w not in names: return False @@ -159,27 +183,32 @@ def eval(self, type_map, used=None): if self.is_fund_type(): # Remove 'signed' before returning evaluated type - return Type(re.sub(r'\bsigned\b', '', self.type_spec).strip(), - *self.declarators, - type_quals=self.type_quals) + return Type( + re.sub(r"\bsigned\b", "", self.type_spec).strip(), + *self.declarators, + type_quals=self.type_quals, + ) parent = self.type_spec if parent in used: - m = 'Recursive loop while evaluating types. (typedefs are {})' - raise DefinitionError(m.format(' -> '.join(used+[parent]))) + m = "Recursive loop while evaluating types. (typedefs are {})" + raise DefinitionError(m.format(" -> ".join([*used, parent]))) used.append(parent) if parent not in type_map: m = 'Unknown type "{}" (typedefs are {})' - raise DefinitionError(m.format(parent, ' -> '.join(used))) + raise DefinitionError(m.format(parent, " -> ".join(used))) pt = type_map[parent] - evaled_type = Type(pt.type_spec, *(pt.declarators + self.declarators), - type_quals=(pt.type_quals[:-1] + - (pt.type_quals[-1] + - self.type_quals[0],) + - self.type_quals[1:]) - ) + evaled_type = Type( + pt.type_spec, + *(pt.declarators + self.declarators), + type_quals=( + pt.type_quals[:-1] + + (pt.type_quals[-1] + self.type_quals[0],) + + self.type_quals[1:] + ), + ) return evaled_type.eval(type_map, used) @@ -195,10 +224,12 @@ def add_compatibility_hack(self): Type(Type('int', '*'), ()) """ - if type(self[-1]) == tuple: - return Type(Type(*self[:-1], type_quals=self.type_quals[:-1]), - self[-1], - type_quals=((), self.type_quals[-1])) + if type(self[-1]) is tuple: + return Type( + Type(*self[:-1], type_quals=self.type_quals[:-1]), + self[-1], + type_quals=((), self.type_quals[-1]), + ) else: return self @@ -213,40 +244,46 @@ def remove_compatibility_hack(self): return self def __repr__(self): - type_qual_str = ('' if not any(self.type_quals) else - ', type_quals='+repr(self.type_quals)) - return (type(self).__name__ + '(' + - ', '.join(map(repr, self)) + type_qual_str + ')') + type_qual_str = ( + "" if not any(self.type_quals) else ", type_quals=" + repr(self.type_quals) + ) + return ( + type(self).__name__ + "(" + ", ".join(map(repr, self)) + type_qual_str + ")" + ) def __getnewargs__(self): - return (self.type_spec,) + self.declarators + return (self.type_spec, *self.declarators) class Compound(dict): - """Base class for representing object using a dict-like interface. + """Base class for representing object using a dict-like interface.""" - """ __slots__ = () def __init__(self, *members, **argv): members = list(members) - pack = argv.pop('pack', None) + pack = argv.pop("pack", None) assert len(argv) == 0 - super(Compound, self).__init__(dict(members=members, pack=pack)) + super(Compound, self).__init__({"members": members, "pack": pack}) def __repr__(self): - packParam = ', pack='+repr(self.pack) if self.pack is not None else '' - return (type(self).__name__ + '(' + - ', '.join(map(repr, self.members)) + packParam + ')') + packParam = ", pack=" + repr(self.pack) if self.pack is not None else "" + return ( + type(self).__name__ + + "(" + + ", ".join(map(repr, self.members)) + + packParam + + ")" + ) @property def members(self): - return self['members'] + return self["members"] @property def pack(self): - return self['pack'] + return self["pack"] class Struct(Compound): @@ -257,6 +294,7 @@ class Struct(Compound): from dict and can be seen as the dicts from 0.1.0. In future this might change to a dict-like object!!! """ + __slots__ = () @@ -268,6 +306,7 @@ class Union(Compound): from dict and can be seen as the dicts from 0.1.0. In future this might change to a dict-like object!!! """ + __slots__ = () @@ -279,19 +318,22 @@ class Enum(dict): from dict and can be seen as the dicts from 0.1.0. In future this might change to a dict-like object!!! """ + __slots__ = () def __init__(self, **args): super(Enum, self).__init__(args) def __repr__(self): - return (type(self).__name__ + '(' + - ', '.join(nm + '=' + repr(val) - for nm, val in sorted(self.items())) + - ')') + return ( + type(self).__name__ + + "(" + + ", ".join(nm + "=" + repr(val) for nm, val in sorted(self.items())) + + ")" + ) -def win_defs(version='1500'): +def win_defs(version="1500"): """Loads selection of windows headers included with PyCLibrary. These definitions can either be accessed directly or included before @@ -314,21 +356,33 @@ def win_defs(version='1500'): CParser containing all the infos from te windows headers. """ - header_files = ['WinNt.h', 'WinDef.h', 'WinBase.h', 'BaseTsd.h', - 'WTypes.h', 'WinUser.h'] + header_files = [ + "WinNt.h", + "WinDef.h", + "WinBase.h", + "BaseTsd.h", + "WTypes.h", + "WinUser.h", + ] if not CParser._init: - logger.info('Automatic initialisation : OS is assumed to be win32') + logger.info("Automatic initialisation : OS is assumed to be win32") from .init import auto_init + auto_init() d = os.path.dirname(__file__) p = CParser( header_files, - macros={'_WIN32': '', '_MSC_VER': version, 'CONST': 'const', - 'NO_STRICT': None, 'MS_WIN32': ''}, - process_all=False - ) - - p.process_all(cache=os.path.join(d, 'headers', 'WinDefs.cache')) + macros={ + "_WIN32": "", + "_MSC_VER": version, + "CONST": "const", + "NO_STRICT": None, + "MS_WIN32": "", + }, + process_all=False, + ) + + p.process_all(cache=os.path.join(d, "headers", "WinDefs.cache")) return p @@ -401,6 +455,7 @@ class CParser(object): print s """ + #: Increment every time cache structure or parsing changes to invalidate #: old cache files. # 2 : add C99 integers @@ -409,12 +464,21 @@ class CParser(object): #: Private flag allowing to know if the parser has been initiliased. _init = False - def __init__(self, files=None, copy_from=None, replace=None, process_all=True, - cache=None, check_cache_validity=True, encoding="utf-8", **kwargs): - + def __init__( + self, + files=None, + copy_from=None, + replace=None, + process_all=True, + cache=None, + check_cache_validity=True, + encoding="utf-8", + **kwargs, + ): if not self._init: - logger.info('Automatic initialisation based on OS detection') + logger.info("Automatic initialisation based on OS detection") from .init import auto_init + auto_init() # Holds all definitions @@ -425,11 +489,20 @@ def __init__(self, files=None, copy_from=None, replace=None, process_all=True, self.pack_list = {} self.init_opts = kwargs.copy() - self.init_opts['files'] = [] - self.init_opts['replace'] = {} - - self.data_list = ['types', 'variables', 'fnmacros', 'macros', - 'structs', 'unions', 'enums', 'functions', 'values'] + self.init_opts["files"] = [] + self.init_opts["replace"] = {} + + self.data_list = [ + "types", + "variables", + "fnmacros", + "macros", + "structs", + "unions", + "enums", + "functions", + "values", + ] self.file_order = [] self.files = {} @@ -466,9 +539,14 @@ def __init__(self, files=None, copy_from=None, replace=None, process_all=True, if process_all: self.process_all(cache=cache, check_cache_validity=check_cache_validity) - def process_all(self, cache=None, return_unparsed=False, - print_after_preprocess=False, check_cache_validity=True): - """ Remove comments, preprocess, and parse declarations from all files. + def process_all( + self, + cache=None, + return_unparsed=False, + print_after_preprocess=False, + check_cache_validity=True, + ): + """Remove comments, preprocess, and parse declarations from all files. This operates in memory, and thus does not alter the original files. @@ -490,16 +568,19 @@ def process_all(self, cache=None, return_unparsed=False, List of the results from parse_defs. """ - if cache is not None and self.load_cache(cache, check_validity=check_cache_validity): + if cache is not None and self.load_cache( + cache, check_validity=check_cache_validity + ): logger.debug("Loaded cached definitions; will skip parsing.") # Cached values loaded successfully, nothing left to do here return results = [] - logger.debug(cleandoc('''Parsing C header files (no valid cache found). - This could take several minutes...''')) + logger.debug( + cleandoc("""Parsing C header files (no valid cache found). + This could take several minutes...""") + ) for f in self.file_order: - if self.files[f] is None: # This means the file could not be loaded and there was no # cache. @@ -574,29 +655,30 @@ def load_cache(self, cache_file, check_validity=False): try: # Read cache file import pickle - cache = pickle.load(open(cache_file, 'rb')) + + cache = pickle.load(open(cache_file, "rb")) # Make sure __init__ options match if check_validity: - if cache['opts'] != self.init_opts: + if cache["opts"] != self.init_opts: db = logger.debug db("Cache file is not valid") db("It was created using different initialization options") - db('{}'.format(cache['opts'])) - db('{}'.format(self.init_opts)) + db("{}".format(cache["opts"])) + db("{}".format(self.init_opts)) return False else: logger.debug("Cache init opts are OK:") - logger.debug('{}'.format(cache['opts'])) + logger.debug("{}".format(cache["opts"])) - if cache['version'] < self.cache_version: + if cache["version"] < self.cache_version: mess = "Cache file is not valid--cache format has changed." logger.debug(mess) return False # Import all parse results - self.import_dict(cache['file_defs']) + self.import_dict(cache["file_defs"]) return True except Exception: @@ -617,21 +699,18 @@ def import_dict(self, data): self.add_def(k, n, data[f][k][n]) def write_cache(self, cache_file): - """Store all parsed declarations to cache. Used internally. - - """ + """Store all parsed declarations to cache. Used internally.""" cache = {} - cache['opts'] = self.init_opts - cache['file_defs'] = self.file_defs - cache['version'] = self.cache_version + cache["opts"] = self.init_opts + cache["file_defs"] = self.file_defs + cache["version"] = self.cache_version import pickle - with open(cache_file, 'wb') as f: + + with open(cache_file, "wb") as f: pickle.dump(cache, f) def find_headers(self, headers): - """Try to find the specified headers. - - """ + """Try to find the specified headers.""" hs = [] for header in headers: if os.path.isfile(header): @@ -639,7 +718,7 @@ def find_headers(self, headers): else: h = find_header(header) if not h: - raise OSError('Cannot find header: {}'.format(header)) + raise OSError("Cannot find header: {}".format(header)) hs.append(h) return hs @@ -682,14 +761,15 @@ def load_file(self, path, replace=None, encoding=None): self.file_order.append(path) bn = os.path.basename(path) - self.init_opts['replace'][bn] = replace + self.init_opts["replace"][bn] = replace # Only interested in the file names, the directory may change between # systems. - self.init_opts['files'].append(bn) + self.init_opts["files"].append(bn) return True def _format_parsed_file(self, filename=None): from pprint import pformat + s = "" for k in self.data_list: s += "============== {} ==================\n".format(k) @@ -726,13 +806,14 @@ def remove_comments(self, path): text = self.files[path] cplusplus_line_comment = Literal("//") + restOfLine # match quoted strings first to prevent matching comments inside quotes - comment_remover = (quotedString | cStyleComment.suppress() | - cplusplus_line_comment.suppress()) + comment_remover = ( + quotedString | cStyleComment.suppress() | cplusplus_line_comment.suppress() + ) self.files[path] = comment_remover.transformString(text) # --- Pre processing - def preprocess(self, path): + def preprocess(self, path): # noqa """Scan named file for preprocessor directives, removing them while expanding macros. @@ -757,26 +838,28 @@ def preprocess(self, path): text = self.files[path] # First join together lines split by \\n - text = Literal('\\\n').suppress().transformString(text) + text = Literal("\\\n").suppress().transformString(text) # Define the structure of a macro definition - name = Word(alphas+'_', alphanums+'_')('name') + name = Word(alphas + "_", alphanums + "_")("name") deli_list = Optional(lparen + delimitedList(name) + rparen) - self.pp_define = (name.setWhitespaceChars(' \t')("macro") + - deli_list.setWhitespaceChars(' \t')('args') + - SkipTo(LineEnd())('value')) + self.pp_define = ( + name.setWhitespaceChars(" \t")("macro") + + deli_list.setWhitespaceChars(" \t")("args") + + SkipTo(LineEnd())("value") + ) self.pp_define.setParseAction(self.process_macro_defn) # Comb through lines, process all directives - lines = text.split('\n') + lines = text.split("\n") result = [] - directive = re.compile(r'\s*#\s*([a-zA-Z]+)(.*)$') + directive = re.compile(r"\s*#\s*([a-zA-Z]+)(.*)$") if_true = [True] if_hit = [] for i, line in enumerate(lines): - new_line = '' + new_line = "" m = directive.match(line) # Regular code line @@ -791,82 +874,93 @@ def preprocess(self, path): d = m.groups()[0] rest = m.groups()[1] - if d == 'ifdef': - d = 'if' - rest = 'defined ' + rest - elif d == 'ifndef': - d = 'if' - rest = '!defined ' + rest + if d == "ifdef": + d = "if" + rest = "defined " + rest + elif d == "ifndef": + d = "if" + rest = "!defined " + rest # Evaluate 'defined' operator before expanding macros - if d in ['if', 'elif']: - def pa(t): - is_macro = t['name'] in self.defs['macros'] - is_macro_func = t['name'] in self.defs['fnmacros'] - return ['0', '1'][is_macro or is_macro_func] - - rest = (Keyword('defined') + - (name | lparen + name + rparen) - ).setParseAction(pa).transformString(rest) + if d in ["if", "elif"]: - elif d in ['define', 'undef']: - match = re.match(r'\s*([a-zA-Z_][a-zA-Z0-9_]*)(.*)$', rest) + def pa(t): + is_macro = t["name"] in self.defs["macros"] + is_macro_func = t["name"] in self.defs["fnmacros"] + return ["0", "1"][is_macro or is_macro_func] + + rest = ( + (Keyword("defined") + (name | lparen + name + rparen)) + .setParseAction(pa) + .transformString(rest) + ) + + elif d in ["define", "undef"]: + match = re.match(r"\s*([a-zA-Z_][a-zA-Z0-9_]*)(.*)$", rest) macroName, rest = match.groups() # Expand macros if needed - if rest is not None and (all(if_true) or d in ['if', 'elif']): + if rest is not None and (all(if_true) or d in ["if", "elif"]): rest = self.expand_macros(rest) - if d == 'elif': + if d == "elif": if if_hit[-1] or not all(if_true[:-1]): ev = False else: ev = self.eval_preprocessor_expr(rest) - logger.debug(" "*(len(if_true)-2) + line + - '{}, {}'.format(rest, ev)) + logger.debug( + " " * (len(if_true) - 2) + line + "{}, {}".format(rest, ev) + ) if_true[-1] = ev if_hit[-1] = if_hit[-1] or ev - elif d == 'else': - logger.debug(" "*(len(if_true)-2) + line + - '{}'.format(not if_hit[-1])) + elif d == "else": + logger.debug( + " " * (len(if_true) - 2) + line + "{}".format(not if_hit[-1]) + ) if_true[-1] = (not if_hit[-1]) and all(if_true[:-1]) if_hit[-1] = True - elif d == 'endif': + elif d == "endif": if_true.pop() if_hit.pop() - logger.debug(" "*(len(if_true)-1) + line) + logger.debug(" " * (len(if_true) - 1) + line) - elif d == 'if': + elif d == "if": if all(if_true): ev = self.eval_preprocessor_expr(rest) else: ev = False - logger.debug(" "*(len(if_true)-1) + line + - '{}, {}'.format(rest, ev)) + logger.debug( + " " * (len(if_true) - 1) + line + "{}, {}".format(rest, ev) + ) if_true.append(ev) if_hit.append(ev) - elif d == 'define': + elif d == "define": if not if_true[-1]: continue - logger.debug(" "*(len(if_true)-1) + "define: " + - '{}, {}'.format(macroName, rest)) + logger.debug( + " " * (len(if_true) - 1) + + "define: " + + "{}, {}".format(macroName, rest) + ) try: # Macro is registered here - self.pp_define.parseString(macroName + ' ' + rest) + self.pp_define.parseString(macroName + " " + rest) except Exception: - logger.exception("Error processing macro definition:" + - '{}, {}'.format(macroName, rest)) + logger.exception( + "Error processing macro definition:" + + "{}, {}".format(macroName, rest) + ) - elif d == 'undef': + elif d == "undef": if not if_true[-1]: continue try: - self.rem_def('macros', macroName.strip()) + self.rem_def("macros", macroName.strip()) except Exception: if sys.exc_info()[0] is not KeyError: mess = "Error removing macro definition '{}'" @@ -880,18 +974,18 @@ def pa(t): # https://gcc.gnu.org/onlinedocs/gcc/Structure-Packing-Pragmas.html # http://msdn.microsoft.com/fr-fr/library/2e70t5y1.aspx # The current implementation follows the MSVC doc. - elif d == 'pragma': + elif d == "pragma": if not if_true[-1]: continue - m = re.match(r'\s+pack\s*\(([^\)]*)\)', rest) + m = re.match(r"\s+pack\s*\(([^\)]*)\)", rest) if not m: continue if m.groups(): - opts = [s.strip() for s in m.groups()[0].split(',')] + opts = [s.strip() for s in m.groups()[0].split(",")] pushpop = id = val = None for o in opts: - if o in ['push', 'pop']: + if o in ["push", "pop"]: pushpop = o elif o.isdigit(): val = int(o) @@ -900,9 +994,9 @@ def pa(t): packing = val - if pushpop == 'push': + if pushpop == "push": pack_stack.append((packing, id)) - elif opts[0] == 'pop': + elif opts[0] == "pop": if id is None: pack_stack.pop() else: @@ -921,19 +1015,20 @@ def pa(t): self.pack_list[path].append((i, packing)) else: # Ignore any other directives - mess = 'Ignored directive {} at line {}' + mess = "Ignored directive {} at line {}" logger.debug(mess.format(d, i)) result.append(new_line) - self.files[path] = '\n'.join(result) + self.files[path] = "\n".join(result) def eval_preprocessor_expr(self, expr): # Make a few alterations so the expression can be eval'd macro_diffs = ( - Literal('!').setParseAction(lambda: ' not ') | - Literal('&&').setParseAction(lambda: ' and ') | - Literal('||').setParseAction(lambda: ' or ') | - Word(alphas + '_', alphanums + '_').setParseAction(lambda: '0')) + Literal("!").setParseAction(lambda: " not ") + | Literal("&&").setParseAction(lambda: " and ") + | Literal("||").setParseAction(lambda: " or ") + | Word(alphas + "_", alphanums + "_").setParseAction(lambda: "0") + ) expr2 = macro_diffs.transformString(expr).strip() try: @@ -945,40 +1040,38 @@ def eval_preprocessor_expr(self, expr): return ev def process_macro_defn(self, t): - """Parse a #define macro and register the definition. - - """ + """Parse a #define macro and register the definition.""" logger.debug("Processing MACRO: {}".format(t)) macro_val = t.value.strip() - if macro_val in self.defs['fnmacros']: - self.add_def('fnmacros', t.macro, self.defs['fnmacros'][macro_val]) + if macro_val in self.defs["fnmacros"]: + self.add_def("fnmacros", t.macro, self.defs["fnmacros"][macro_val]) logger.debug(" Copy fn macro {} => {}".format(macro_val, t.macro)) else: - if t.args == '': + if t.args == "": val = self.eval_expr(macro_val) - self.add_def('macros', t.macro, macro_val) - self.add_def('values', t.macro, val) + self.add_def("macros", t.macro, macro_val) + self.add_def("values", t.macro, val) mess = " Add macro: {} ({}); {}" - logger.debug(mess.format(t.macro, val, - self.defs['macros'][t.macro])) + logger.debug(mess.format(t.macro, val, self.defs["macros"][t.macro])) else: - self.add_def('fnmacros', t.macro, - self.compile_fn_macro(macro_val, - [x for x in t.args])) + self.add_def( + "fnmacros", + t.macro, + self.compile_fn_macro(macro_val, list(t.args)), + ) mess = " Add fn macro: {} ({}); {}" - logger.debug(mess.format(t.macro, t.args, - self.defs['fnmacros'][t.macro])) + logger.debug( + mess.format(t.macro, t.args, self.defs["fnmacros"][t.macro]) + ) return "#define " + t.macro + " " + macro_val def compile_fn_macro(self, text, args): - """Turn a function macro spec into a compiled description. - - """ + """Turn a function macro spec into a compiled description.""" # Find all instances of each arg in text. - args_str = '|'.join(args) + args_str = "|".join(args) arg_regex = re.compile(r'("(\\"|[^"])*")|(\b({})\b)'.format(args_str)) start = 0 parts = [] @@ -988,11 +1081,11 @@ def compile_fn_macro(self, text, args): for m in arg_regex.finditer(text): arg = m.groups()[N] if arg is not None: - parts.append(text[start:m.start(N)] + '{}') + parts.append(text[start : m.start(N)] + "{}") start = m.end(N) arg_order.append(args.index(arg)) parts.append(text[start:]) - return (''.join(parts), arg_order) + return ("".join(parts), arg_order) def expand_macros(self, line): """Expand all the macro expressions in a string. @@ -1004,30 +1097,29 @@ def expand_macros(self, line): parts = [] # The group number to check for macro names N = 3 - macros = self.defs['macros'] - fnmacros = self.defs['fnmacros'] + macros = self.defs["macros"] + fnmacros = self.defs["fnmacros"] while True: m = reg.search(line) if not m: break name = m.groups()[N] if name in macros: - parts.append(line[:m.start(N)]) - line = line[m.end(N):] + parts.append(line[: m.start(N)]) + line = line[m.end(N) :] parts.append(macros[name]) elif name in fnmacros: # If function macro expansion fails, just ignore it. try: - exp, end = self.expand_fn_macro(name, line[m.end(N):]) + exp, end = self.expand_fn_macro(name, line[m.end(N) :]) except Exception: exp = name - end = line[m.end(N):] + end = line[m.end(N) :] mess = "Function macro expansion failed: {}, {}\n {}" - logger.error(mess.format(name, line[m.end(N):], - format_exc())) + logger.error(mess.format(name, line[m.end(N) :], format_exc())) - parts.append(line[:m.start(N)]) + parts.append(line[: m.start(N)]) line = end parts.append(exp) @@ -1037,22 +1129,20 @@ def expand_macros(self, line): line = line[start:] parts.append(line) - return ''.join(parts) + return "".join(parts) def expand_fn_macro(self, name, text): - """Replace a function macro. - - """ + """Replace a function macro.""" # defn looks like ('%s + %s / %s', (0, 0, 1)) - defn = self.defs['fnmacros'][name] + defn = self.defs["fnmacros"][name] try: - args, end = text.split(')', 1) - _, args = args.split('(', 1) - args = [a.strip() for a in args.split(',')] + args, end = text.split(")", 1) + _, args = args.split("(", 1) + args = [a.strip() for a in args.split(",")] except Exception: mess = "Function macro {} argument analysis failed :\n{}" - raise DefinitionError(0, mess.format(name, format_exc())) + raise DefinitionError(0, mess.format(name, format_exc())) args = [self.expand_macros(arg) for arg in args] new_str = defn[0].format(*[args[i] for i in defn[1]]) @@ -1085,7 +1175,7 @@ def parse_defs(self, path, return_unparsed=False): parser = self.build_parser() if return_unparsed: text = parser.suppress().transformString(self.files[path]) - return re.sub(r'\n\s*\n', '\n', text) + return re.sub(r"\n\s*\n", "\n", text) else: return [x[0] for x in parser.scanString(self.files[path])] @@ -1094,19 +1184,21 @@ def build_parser(self): bits we support, anyway). """ - if hasattr(self, 'parser'): + if hasattr(self, "parser"): return self.parser self.struct_type = Forward() self.enum_type = Forward() - type_ = (fund_type | - Optional(kwl(size_modifiers + sign_modifiers)) + ident | - self.struct_type | - self.enum_type) + type_ = ( + fund_type + | Optional(kwl(size_modifiers + sign_modifiers)) + ident + | self.struct_type + | self.enum_type + ) if extra_modifier is not None: type_ += extra_modifier type_.setParseAction(recombine) - self.type_spec = Group(type_qualifier('pre_qual') + type_("name")) + self.type_spec = Group(type_qualifier("pre_qual") + type_("name")) # --- Abstract declarators for use in function pointer arguments # Thus begins the extremely hairy business of parsing C declarators. @@ -1127,20 +1219,31 @@ def build_parser(self): # *( )(int, int)[10] # ...etc... self.abstract_declarator << Group( - type_qualifier('first_typequal') + - Group(ZeroOrMore(Group(Suppress('*') + type_qualifier)))('ptrs') + - ((Optional('&')('ref')) | - (lparen + self.abstract_declarator + rparen)('center')) + - Optional(lparen + - Optional(delimitedList(Group( - self.type_spec('type') + - self.abstract_declarator('decl') + - Optional(Literal('=').suppress() + expression, - default=None)('val') - )), default=None) + - rparen)('args') + - Group(ZeroOrMore(lbrack + Optional(expression, default='-1') + - rbrack))('arrays') + type_qualifier("first_typequal") + + Group(ZeroOrMore(Group(Suppress("*") + type_qualifier)))("ptrs") + + ( + (Optional("&")("ref")) + | (lparen + self.abstract_declarator + rparen)("center") + ) + + Optional( + lparen + + Optional( + delimitedList( + Group( + self.type_spec("type") + + self.abstract_declarator("decl") + + Optional( + Literal("=").suppress() + expression, default=None + )("val") + ) + ), + default=None, + ) + + rparen + )("args") + + Group(ZeroOrMore(lbrack + Optional(expression, default="-1") + rbrack))( + "arrays" + ) ) # Declarators look like: @@ -1151,97 +1254,129 @@ def build_parser(self): # * fnName(int arg1=0)[10] # ...etc... self.declarator << Group( - type_qualifier('first_typequal') + call_conv + - Group(ZeroOrMore(Group(Suppress('*') + type_qualifier)))('ptrs') + - ((Optional('&')('ref') + ident('name')) | - (lparen + self.declarator + rparen)('center')) + - Optional(lparen + - Optional(delimitedList( - Group(self.type_spec('type') + - (self.declarator | - self.abstract_declarator)('decl') + - Optional(Literal('=').suppress() + - expression, default=None)('val') - )), - default=None) + - rparen)('args') + - Group(ZeroOrMore(lbrack + Optional(expression, default='-1') + - rbrack))('arrays') + type_qualifier("first_typequal") + + call_conv + + Group(ZeroOrMore(Group(Suppress("*") + type_qualifier)))("ptrs") + + ( + (Optional("&")("ref") + ident("name")) + | (lparen + self.declarator + rparen)("center") + ) + + Optional( + lparen + + Optional( + delimitedList( + Group( + self.type_spec("type") + + (self.declarator | self.abstract_declarator)("decl") + + Optional( + Literal("=").suppress() + expression, default=None + )("val") + ) + ), + default=None, + ) + + rparen + )("args") + + Group(ZeroOrMore(lbrack + Optional(expression, default="-1") + rbrack))( + "arrays" + ) ) self.declarator_list = Group(delimitedList(self.declarator)) # Typedef - self.type_decl = (Keyword('typedef') + self.type_spec('type') + - self.declarator_list('decl_list') + semi) + self.type_decl = ( + Keyword("typedef") + + self.type_spec("type") + + self.declarator_list("decl_list") + + semi + ) self.type_decl.setParseAction(self.process_typedef) # Variable declaration self.variable_decl = ( - Group(storage_class_spec + - self.type_spec('type') + - Optional(self.declarator_list('decl_list')) + - Optional(Literal('=').suppress() + - (expression('value') | - (lbrace + - Group(delimitedList(expression))('array_values') + - rbrace - ) - ) - ) - ) + - semi) + Group( + storage_class_spec + + self.type_spec("type") + + Optional(self.declarator_list("decl_list")) + + Optional( + Literal("=").suppress() + + ( + expression("value") + | ( + lbrace + + Group(delimitedList(expression))("array_values") + + rbrace + ) + ) + ) + ) + + semi + ) self.variable_decl.setParseAction(self.process_variable) # Function definition - self.typeless_function_decl = (self.declarator('decl') + - nestedExpr('{', '}').suppress()) - self.function_decl = (storage_class_spec + - self.type_spec('type') + - self.declarator('decl') + - nestedExpr('{', '}').suppress()) + self.typeless_function_decl = ( + self.declarator("decl") + nestedExpr("{", "}").suppress() + ) + self.function_decl = ( + storage_class_spec + + self.type_spec("type") + + self.declarator("decl") + + nestedExpr("{", "}").suppress() + ) self.function_decl.setParseAction(self.process_function) # Struct definition self.struct_decl = Forward() - struct_kw = (Keyword('struct') | Keyword('union')) + struct_kw = Keyword("struct") | Keyword("union") self.struct_member = ( - Group(self.variable_decl.copy().setParseAction(lambda: None)) | + Group(self.variable_decl.copy().setParseAction(lambda: None)) + | # Hack to handle bit width specification. - Group(Group(self.type_spec('type') + - Optional(self.declarator_list('decl_list')) + - colon + integer('bit') + semi)) | - (self.type_spec + self.declarator + - nestedExpr('{', '}')).suppress() | - (self.declarator + nestedExpr('{', '}')).suppress() + Group( + Group( + self.type_spec("type") + + Optional(self.declarator_list("decl_list")) + + colon + + integer("bit") + + semi + ) ) + | (self.type_spec + self.declarator + nestedExpr("{", "}")).suppress() + | (self.declarator + nestedExpr("{", "}")).suppress() + ) - self.decl_list = (lbrace + - Group(OneOrMore(self.struct_member))('members') + - rbrace) - self.struct_type << (struct_kw('struct_type') + - ((Optional(ident)('name') + - self.decl_list) | ident('name')) - ) + self.decl_list = ( + lbrace + Group(OneOrMore(self.struct_member))("members") + rbrace + ) + self.struct_type << ( + struct_kw("struct_type") + + ((Optional(ident)("name") + self.decl_list) | ident("name")) + ) self.struct_type.setParseAction(self.process_struct) self.struct_decl = self.struct_type + semi # Enum definition - enum_var_decl = Group(ident('name') + - Optional(Literal('=').suppress() + - expression('value'))) - - self.enum_type << (Keyword('enum') + - (Optional(ident)('name') + - lbrace + - Group(delimitedList(enum_var_decl))('members') + - Optional(comma) + rbrace | ident('name')) - ) + enum_var_decl = Group( + ident("name") + Optional(Literal("=").suppress() + expression("value")) + ) + + self.enum_type << ( + Keyword("enum") + + ( + Optional(ident)("name") + + lbrace + + Group(delimitedList(enum_var_decl))("members") + + Optional(comma) + + rbrace + | ident("name") + ) + ) self.enum_type.setParseAction(self.process_enum) self.enum_decl = self.enum_type + semi - self.parser = (self.type_decl | self.variable_decl | - self.function_decl) + self.parser = self.type_decl | self.variable_decl | self.function_decl return self.parser def process_declarator(self, decl): @@ -1252,46 +1387,49 @@ def process_declarator(self, decl): """ toks = [] - quals = [tuple(decl.get('first_typequal', []))] + quals = [tuple(decl.get("first_typequal", []))] name = None logger.debug("DECL: {}".format(decl)) - if 'call_conv' in decl and len(decl['call_conv']) > 0: - toks.append(decl['call_conv']) + if "call_conv" in decl and len(decl["call_conv"]) > 0: + toks.append(decl["call_conv"]) quals.append(None) - if 'ptrs' in decl and len(decl['ptrs']) > 0: - toks += ('*',) * len(decl['ptrs']) - quals += map(tuple, decl['ptrs']) + if "ptrs" in decl and len(decl["ptrs"]) > 0: + toks += ("*",) * len(decl["ptrs"]) + quals += map(tuple, decl["ptrs"]) - if 'arrays' in decl and len(decl['arrays']) > 0: - toks.extend([self.eval_expr(x)] for x in decl['arrays']) - quals += [()] * len(decl['arrays']) + if "arrays" in decl and len(decl["arrays"]) > 0: + toks.extend([self.eval_expr(x)] for x in decl["arrays"]) + quals += [()] * len(decl["arrays"]) - if 'args' in decl and len(decl['args']) > 0: - if decl['args'][0] is None: + if "args" in decl and len(decl["args"]) > 0: + if decl["args"][0] is None: toks.append(()) else: - ex = lambda x: (x[0],) if len(x)!=0 else (None,) - toks.append(tuple([self.process_type(a['type'], - a['decl'][0]) + - ex(a['val']) for a in decl['args']] - ) - ) + ex = lambda x: (x[0],) if len(x) != 0 else (None,) # noqa + toks.append( + tuple( + [ + self.process_type(a["type"], a["decl"][0]) + ex(a["val"]) + for a in decl["args"] + ] + ) + ) quals.append(()) - if 'ref' in decl: - toks.append('&') + if "ref" in decl: + toks.append("&") quals.append(()) - if 'center' in decl: - (n, t, q) = self.process_declarator(decl['center'][0]) + if "center" in decl: + (n, t, q) = self.process_declarator(decl["center"][0]) if n is not None: name = n toks.extend(t) quals = quals[:-1] + [quals[-1] + q[0]] + list(q[1:]) - if 'name' in decl: - name = decl['name'] + if "name" in decl: + name = decl["name"] return (name, toks, tuple(quals)) @@ -1323,22 +1461,23 @@ def process_type(self, typ, decl): (None, ["struct s", ((None, ['int']), (None, ['int', '*'])), '*']) """ - logger.debug("PROCESS TYPE/DECL: {}/{}".format(typ['name'], decl)) + logger.debug("PROCESS TYPE/DECL: {}/{}".format(typ["name"], decl)) (name, decl, quals) = self.process_declarator(decl) - pre_typequal = tuple(typ.get('pre_qual', [])) - return (name, Type(typ['name'], *decl, - type_quals=(pre_typequal + quals[0],) + quals[1:])) + pre_typequal = tuple(typ.get("pre_qual", [])) + return ( + name, + Type(typ["name"], *decl, type_quals=(pre_typequal + quals[0],) + quals[1:]), + ) - def process_enum(self, s, l, t): - """ - """ + def process_enum(self, s, line, t): + """ """ try: logger.debug("ENUM: {}".format(t)) - if t.name == '': + if t.name == "": n = 0 while True: - name = 'anon_enum{}'.format(n) - if name not in self.defs['enums']: + name = "anon_enum{}".format(n) + if name not in self.defs["enums"]: break n += 1 else: @@ -1346,48 +1485,44 @@ def process_enum(self, s, l, t): logger.debug(" name: {}".format(name)) - if name not in self.defs['enums']: + if name not in self.defs["enums"]: i = 0 enum = {} for v in t.members: - if v.value != '': + if v.value != "": try: i = self.eval_expr(v.value) except Exception: pass enum[v.name] = i - self.add_def('values', v.name, i) + self.add_def("values", v.name, i) i += 1 logger.debug(" members: {}".format(enum)) - self.add_def('enums', name, enum) - self.add_def('types', 'enum '+name, Type('enum', name)) - return ('enum ' + name) - except: + self.add_def("enums", name, enum) + self.add_def("types", "enum " + name, Type("enum", name)) + return "enum " + name + except Exception: logger.exception("Error processing enum: {}".format(t)) - def process_function(self, s, l, t): - """Build a function definition from the parsing tokens. - - """ + def process_function(self, s, line, t): + """Build a function definition from the parsing tokens.""" logger.debug("FUNCTION {} : {}".format(t, t.keys())) try: name, decl = self.process_type(t.type, t.decl[0]) - if len(decl) == 0 or type(decl[-1]) != tuple: - logger.error('{}'.format(t)) + if len(decl) == 0 or type(decl[-1]) is not tuple: + logger.error("{}".format(t)) mess = "Incorrect declarator type for function definition." raise DefinitionError(mess) logger.debug(" name: {}".format(name)) logger.debug(" sig: {}".format(decl)) - self.add_def('functions', name, decl.add_compatibility_hack()) + self.add_def("functions", name, decl.add_compatibility_hack()) except Exception: logger.exception("Error processing function: {}".format(t)) def packing_at(self, line): - """Return the structure packing value at the given line number. - - """ + """Return the structure packing value at the given line number.""" packing = None for p in self.pack_list[self.current_file]: if p[0] <= line: @@ -1396,21 +1531,20 @@ def packing_at(self, line): break return packing - def process_struct(self, s, l, t): - """ - """ + def process_struct(self, s, line, t): + """ """ try: str_typ = t.struct_type # struct or union # Check for extra packing rules - packing = self.packing_at(lineno(l, s)) + packing = self.packing_at(lineno(line, s)) - logger.debug('{} {} {}'.format(str_typ.upper(), t.name, t)) - if t.name == '': + logger.debug("{} {} {}".format(str_typ.upper(), t.name, t)) + if t.name == "": n = 0 while True: - sname = 'anon_{}{}'.format(str_typ, n) - if sname not in self.defs[str_typ+'s']: + sname = "anon_{}{}".format(str_typ, n) + if sname not in self.defs[str_typ + "s"]: break n += 1 else: @@ -1420,15 +1554,19 @@ def process_struct(self, s, l, t): sname = t.name[0] logger.debug(" NAME: {}".format(sname)) - if (len(t.members) > 0 or sname not in self.defs[str_typ+'s'] or - self.defs[str_typ+'s'][sname] == {}): + if ( + len(t.members) > 0 + or sname not in self.defs[str_typ + "s"] + or self.defs[str_typ + "s"][sname] == {} + ): logger.debug(" NEW " + str_typ.upper()) struct = [] for m in t.members: typ = m[0].type val = self.eval_expr(m[0].value) - logger.debug(" member: {}, {}, {}".format( - m, m[0].keys(), m[0].decl_list)) + logger.debug( + " member: {}, {}, {}".format(m, m[0].keys(), m[0].decl_list) + ) if len(m[0].decl_list) == 0: # anonymous member member = [None, Type(typ[0]), None] @@ -1442,21 +1580,20 @@ def process_struct(self, s, l, t): if m[0].bit: member.append(int(m[0].bit)) struct.append(tuple(member)) - logger.debug(" {} {} {} {}".format(name, decl, - val, m[0].bit)) + logger.debug( + " {} {} {} {}".format(name, decl, val, m[0].bit) + ) - str_cls = (Struct if str_typ == 'struct' else Union) - self.add_def(str_typ + 's', sname, - str_cls(*struct, pack=packing)) - self.add_def('types', str_typ+' '+sname, Type(str_typ, sname)) - return str_typ + ' ' + sname + str_cls = Struct if str_typ == "struct" else Union + self.add_def(str_typ + "s", sname, str_cls(*struct, pack=packing)) + self.add_def("types", str_typ + " " + sname, Type(str_typ, sname)) + return str_typ + " " + sname except Exception: - logger.exception('Error processing struct: {}'.format(t)) + logger.exception("Error processing struct: {}".format(t)) - def process_variable(self, s, l, t): - """ - """ + def process_variable(self, s, line, t): + """ """ logger.debug("VARIABLE: {}".format(t)) try: val = self.eval_expr(t[0]) @@ -1464,29 +1601,27 @@ def process_variable(self, s, l, t): (name, typ) = self.process_type(t[0].type, d) # This is a function prototype if type(typ[-1]) is tuple: - logger.debug(" Add function prototype: {} {} {}".format( - name, typ, val)) - self.add_def('functions', name, - typ.add_compatibility_hack()) + logger.debug( + " Add function prototype: {} {} {}".format(name, typ, val) + ) + self.add_def("functions", name, typ.add_compatibility_hack()) # This is a variable else: - logger.debug(" Add variable: {} {} {}".format(name, - typ, val)) - self.add_def('variables', name, (val, typ)) - self.add_def('values', name, val) + logger.debug(" Add variable: {} {} {}".format(name, typ, val)) + self.add_def("variables", name, (val, typ)) + self.add_def("values", name, val) except Exception: - logger.exception('Error processing variable: {}'.format(t)) + logger.exception("Error processing variable: {}".format(t)) - def process_typedef(self, s, l, t): - """ - """ + def process_typedef(self, s, line, t): + """ """ logger.debug("TYPE: {}".format(t)) typ = t.type for d in t.decl_list: (name, decl) = self.process_type(typ, d) logger.debug(" {} {}".format(name, decl)) - self.add_def('types', name, decl) + self.add_def("types", name, decl) # --- Utility methods @@ -1500,12 +1635,13 @@ def eval_expr(self, toks): logger.debug("Eval: {}".format(toks)) try: if isinstance(toks, str): - val = self.eval(toks, None, self.defs['values']) - elif toks.array_values != '': - val = [self.eval(x, None, self.defs['values']) - for x in toks.array_values] - elif toks.value != '': - val = self.eval(toks.value, None, self.defs['values']) + val = self.eval(toks, None, self.defs["values"]) + elif toks.array_values != "": + val = [ + self.eval(x, None, self.defs["values"]) for x in toks.array_values + ] + elif toks.value != "": + val = self.eval(toks.value, None, self.defs["values"]) else: val = None return val @@ -1517,10 +1653,9 @@ def eval_expr(self, toks): def eval(self, expr, *args): """Just eval with a little extra robustness.""" expr = expr.strip() - cast = (lparen + self.type_spec + self.abstract_declarator + - rparen).suppress() + cast = (lparen + self.type_spec + self.abstract_declarator + rparen).suppress() expr = (quotedString | number | cast).transformString(expr) - if expr == '': + if expr == "": return None return eval(expr, *args) @@ -1571,12 +1706,10 @@ def eval_type(self, typ): """ if not isinstance(typ, Type): typ = Type(*typ) - return typ.eval(self.defs['types']) + return typ.eval(self.defs["types"]) def find(self, name): - """Search all definitions for the given name. - - """ + """Search all definitions for the given name.""" res = [] for f in self.file_defs: fd = self.file_defs[f] @@ -1592,56 +1725,51 @@ def find(self, name): return res def find_text(self, text): - """Search all file strings for text, return matching lines. - - """ + """Search all file strings for text, return matching lines.""" res = [] for f in self.files: - l = self.files[f].split('\n') - for i in range(len(l)): - if text in l[i]: - res.append((f, i, l[i])) + lines = self.files[f].split("\n") + for i, line in enumerate(lines): + if text in line: + res.append((f, i, line)) return res # --- Basic parsing elements. + def kwl(strs): """Generate a match-first list of keywords given a list of strings.""" - return Regex(r'\b({})\b'.format('|'.join(strs))) + return Regex(r"\b({})\b".format("|".join(strs))) def flatten(lst): - res = [] - for i in lst: - if isinstance(i, (list, tuple)): - res.extend(flatten(i)) - else: - res.append(str(i)) - return res + res = [] + for i in lst: + if isinstance(i, (list, tuple)): + res.extend(flatten(i)) + else: + res.append(str(i)) + return res def recombine(tok): - """Flattens a tree of tokens and joins into one big string. - - """ + """Flattens a tree of tokens and joins into one big string.""" return " ".join(flatten(tok.asList())) -def print_parse_results(pr, depth=0, name=''): - """For debugging; pretty-prints parse result objects. - - """ - start = name + " " * (20 - len(name)) + ':' + '..' * depth +def print_parse_results(pr, depth=0, name=""): + """For debugging; pretty-prints parse result objects.""" + start = name + " " * (20 - len(name)) + ":" + ".." * depth if isinstance(pr, ParseResults): print(start) for i in pr: - name = '' + name = "" for k in pr.keys(): if pr[k] is i: name = k break - print_parse_results(i, depth+1, name) + print_parse_results(i, depth + 1, name) else: print(start + str(pr)) @@ -1658,24 +1786,23 @@ def print_parse_results(pr, depth=0, name=''): rparen = Literal(")").ignore(quotedString).suppress() # Numbers -int_strip = lambda t: t[0].rstrip('UL') -hexint = Regex(r'[+-]?\s*0[xX][{}]+[UL]*'.format(hexnums)).setParseAction(int_strip) -decint = Regex(r'[+-]?\s*[0-9]+[UL]*').setParseAction(int_strip) -integer = (hexint | decint) +int_strip = lambda t: t[0].rstrip("UL") # noqa +hexint = Regex(r"[+-]?\s*0[xX][{}]+[UL]*".format(hexnums)).setParseAction(int_strip) +decint = Regex(r"[+-]?\s*[0-9]+[UL]*").setParseAction(int_strip) +integer = hexint | decint # The floating regex is ugly but it is because we do not want to match # integer to it. -floating = Regex(r'[+-]?\s*((((\d(\.\d*)?)|(\.\d+))[eE][+-]?\d+)|((\d\.\d*)|(\.\d+)))') -number = (floating | integer) +floating = Regex(r"[+-]?\s*((((\d(\.\d*)?)|(\.\d+))[eE][+-]?\d+)|((\d\.\d*)|(\.\d+)))") +number = floating | integer # Miscelaneous bi_operator = oneOf("+ - / * | & || && ! ~ ^ % == != > < >= <= -> . :: << >> = ? :") uni_right_operator = oneOf("++ --") uni_left_operator = oneOf("++ -- - + * sizeof new") -wordchars = alphanums+'_$' -name = (WordStart(wordchars) + Word(alphas+"_", alphanums+"_$") + - WordEnd(wordchars)) -size_modifiers = ['short', 'long'] -sign_modifiers = ['signed', 'unsigned'] +wordchars = alphanums + "_$" +name = WordStart(wordchars) + Word(alphas + "_", alphanums + "_$") + WordEnd(wordchars) +size_modifiers = ["short", "long"] +sign_modifiers = ["signed", "unsigned"] # Syntax elements defined by _init_parser. expression = Forward() @@ -1689,10 +1816,18 @@ def print_parse_results(pr, depth=0, name=''): fund_type = None extra_type_list = [] -c99_int_types = ['int8_t', 'uint8_t', 'int16_t', 'uint16_t', - 'int32_t', 'uint32_t', 'int64_t', 'uint64_t'] -num_types = ['int', 'float', 'double'] + c99_int_types -nonnum_types = ['char', 'bool', 'void'] +c99_int_types = [ + "int8_t", + "uint8_t", + "int16_t", + "uint16_t", + "int32_t", + "uint32_t", + "int64_t", + "uint64_t", +] +num_types = ["int", "float", "double", *c99_int_types] +nonnum_types = ["char", "bool", "void"] # Define some common language elements when initialising. @@ -1707,61 +1842,90 @@ def _init_cparser(extra_types=None, extra_modifiers=None): # Some basic definitions extra_type_list = [] if extra_types is None else list(extra_types) base_types = nonnum_types + num_types + extra_type_list - storage_classes = ['inline', 'static', 'extern'] - qualifiers = ['const', 'volatile', 'restrict', 'near', 'far'] - - keywords = (['struct', 'enum', 'union', '__stdcall', '__cdecl'] + - qualifiers + base_types + size_modifiers + sign_modifiers) + storage_classes = ["inline", "static", "extern"] + qualifiers = ["const", "volatile", "restrict", "near", "far"] + + keywords = [ + "struct", + "enum", + "union", + "__stdcall", + "__cdecl", + *qualifiers, + *base_types, + *size_modifiers, + *sign_modifiers, + ] keyword = kwl(keywords) - wordchars = alphanums+'_$' - ident = (WordStart(wordchars) + ~keyword + - Word(alphas + "_", alphanums + "_$") + - WordEnd(wordchars)).setParseAction(lambda t: t[0]) + wordchars = alphanums + "_$" + ident = ( + WordStart(wordchars) + + ~keyword + + Word(alphas + "_", alphanums + "_$") + + WordEnd(wordchars) + ).setParseAction(lambda t: t[0]) - call_conv = Optional(Keyword('__cdecl') | - Keyword('__stdcall'))('call_conv') + call_conv = Optional(Keyword("__cdecl") | Keyword("__stdcall"))("call_conv") # Removes '__name' from all type specs. may cause trouble. - underscore_2_ident = (WordStart(wordchars) + ~keyword + '__' + - Word(alphanums, alphanums+"_$") + - WordEnd(wordchars)).setParseAction(lambda t: t[0]) - type_qualifier = ZeroOrMore((underscore_2_ident + Optional(nestedExpr())) | - kwl(qualifiers)) + underscore_2_ident = ( + WordStart(wordchars) + + ~keyword + + "__" + + Word(alphanums, alphanums + "_$") + + WordEnd(wordchars) + ).setParseAction(lambda t: t[0]) + type_qualifier = ZeroOrMore( + (underscore_2_ident + Optional(nestedExpr())) | kwl(qualifiers) + ) storage_class_spec = Optional(kwl(storage_classes)) if extra_modifiers: - extra_modifier = ZeroOrMore(kwl(extra_modifiers) + - Optional(nestedExpr())).suppress() + extra_modifier = ZeroOrMore( + kwl(extra_modifiers) + Optional(nestedExpr()) + ).suppress() else: extra_modifier = None # Language elements - fund_type = OneOrMore(kwl(sign_modifiers + size_modifiers + - base_types)).setParseAction(lambda t: ' '.join(t)) + fund_type = OneOrMore( + kwl(sign_modifiers + size_modifiers + base_types) + ).setParseAction(lambda t: " ".join(t)) # Is there a better way to process expressions with cast operators?? cast_atom = ( - ZeroOrMore(uni_left_operator) + Optional('('+ident+')').suppress() + - ((ident + '(' + Optional(delimitedList(expression)) + ')' | - ident + OneOrMore('[' + expression + ']') | - ident | number | quotedString - ) | - ('(' + expression + ')')) + - ZeroOrMore(uni_right_operator) + ZeroOrMore(uni_left_operator) + + Optional("(" + ident + ")").suppress() + + ( + ( + ident + "(" + Optional(delimitedList(expression)) + ")" + | ident + OneOrMore("[" + expression + "]") + | ident + | number + | quotedString + ) + | ("(" + expression + ")") ) + + ZeroOrMore(uni_right_operator) + ) uncast_atom = ( - ZeroOrMore(uni_left_operator) + - ((ident + '(' + Optional(delimitedList(expression)) + ')' | - ident + OneOrMore('[' + expression + ']') | - ident | number | quotedString - ) | - ('(' + expression + ')')) + - ZeroOrMore(uni_right_operator) + ZeroOrMore(uni_left_operator) + + ( + ( + ident + "(" + Optional(delimitedList(expression)) + ")" + | ident + OneOrMore("[" + expression + "]") + | ident + | number + | quotedString + ) + | ("(" + expression + ")") ) + + ZeroOrMore(uni_right_operator) + ) atom = cast_atom | uncast_atom diff --git a/pyclibrary/errors.py b/pyclibrary/errors.py index daa6fe2..5f5ae28 100644 --- a/pyclibrary/errors.py +++ b/pyclibrary/errors.py @@ -1,18 +1,16 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- -"""Errors that can happen during parsing or binding. +"""Errors that can happen during parsing or binding.""" -""" class PyCLibError(Exception): - """Base exception for all PyCLibrary exceptions. + """Base exception for all PyCLibrary exceptions.""" - """ pass @@ -21,4 +19,5 @@ class DefinitionError(PyCLibError): or meaningless. """ + pass diff --git a/pyclibrary/headers/update_WinDefs.py b/pyclibrary/headers/update_WinDefs.py index 1422e26..a446544 100644 --- a/pyclibrary/headers/update_WinDefs.py +++ b/pyclibrary/headers/update_WinDefs.py @@ -1,5 +1,5 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # @@ -16,15 +16,16 @@ * another Visual C/C++ compiler version is used * the object model of the parser was updated. """ + from pyclibrary.c_parser import win_defs from pyclibrary.utils import add_header_locations -SDK_DIR = r'c:\program files\microsoft sdks\windows\v6.0a\include' +SDK_DIR = r"c:\program files\microsoft sdks\windows\v6.0a\include" -if __name__ == '__main__': - print('parsing windows definitions (may take some while)') +if __name__ == "__main__": + print("parsing windows definitions (may take some while)") add_header_locations([SDK_DIR]) parser = win_defs() - print('parsed:') - for objcls in ['variables', 'functions', 'types', 'macros', 'fnmacros']: - print(' ', len(parser.defs[objcls]), objcls) + print("parsed:") + for objcls in ["variables", "functions", "types", "macros", "fnmacros"]: + print(" ", len(parser.defs[objcls]), objcls) diff --git a/pyclibrary/init.py b/pyclibrary/init.py index 00aa089..ef58c67 100644 --- a/pyclibrary/init.py +++ b/pyclibrary/init.py @@ -1,5 +1,5 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # @@ -11,10 +11,12 @@ are used to declare additional types and modifiers for the parser. """ + import sys -from .c_parser import _init_cparser, CParser -from .c_library import CLibrary + from .backends import init_libraries +from .c_library import CLibrary +from .c_parser import CParser, _init_cparser def init(extra_types=None, extra_modifiers=None): @@ -29,7 +31,7 @@ def init(extra_types=None, extra_modifiers=None): """ if CParser._init or CLibrary._init: - raise RuntimeError('Can only initialise the parser once') + raise RuntimeError("Can only initialise the parser once") extra_types = extra_types if extra_types else {} extra_modifiers = extra_modifiers if extra_modifiers else [] @@ -41,10 +43,18 @@ def init(extra_types=None, extra_modifiers=None): CLibrary._init = True -WIN_TYPES = {'__int64': None} -WIN_MODIFIERS = ['__based', '__declspec', '__fastcall', - '__restrict', '__sptr', '__uptr', '__w64', - '__unaligned', '__nullterminated'] +WIN_TYPES = {"__int64": None} +WIN_MODIFIERS = [ + "__based", + "__declspec", + "__fastcall", + "__restrict", + "__sptr", + "__uptr", + "__w64", + "__unaligned", + "__nullterminated", +] def auto_init(extra_types=None, extra_modifiers=None, os=None): @@ -64,7 +74,7 @@ def auto_init(extra_types=None, extra_modifiers=None, os=None): extra_types = extra_types if extra_types else {} extra_modifiers = extra_modifiers if extra_modifiers else [] - if os == 'win32' or sys.platform == 'win32': + if os == "win32" or sys.platform == "win32": extra_types.update(WIN_TYPES) extra_modifiers += WIN_MODIFIERS diff --git a/pyclibrary/thirdparty/__init__.py b/pyclibrary/thirdparty/__init__.py index 139597f..8b13789 100644 --- a/pyclibrary/thirdparty/__init__.py +++ b/pyclibrary/thirdparty/__init__.py @@ -1,2 +1 @@ - diff --git a/pyclibrary/thirdparty/find_library.py b/pyclibrary/thirdparty/find_library.py index 749c2ac..1687c14 100644 --- a/pyclibrary/thirdparty/find_library.py +++ b/pyclibrary/thirdparty/find_library.py @@ -3,24 +3,27 @@ import os import sys +__all__ = ("find_library",) + # On Linux, find Library returns the name not the path. # This excerpt provides a modified find_library. # noinspection PyUnresolvedReferences -if os.name == "posix" and sys.platform.startswith('linux'): - +if os.name == "posix" and sys.platform.startswith("linux"): # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump def define_find_libary(): + import errno import re import tempfile - import errno def _findlib_gcc(name): - expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name) + expr = r"[^\(\)\s]*lib%s\.[^\(\)\s]*" % re.escape(name) fdout, ccout = tempfile.mkstemp() os.close(fdout) - cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; else CC=cc; fi;' \ - '$CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name - trace = '' + cmd = ( + "if type gcc >/dev/null 2>&1; then CC=gcc; else CC=cc; fi;" + "$CC -Wl,-t -o " + ccout + " 2>&1 -l" + name + ) + trace = "" try: f = os.popen(cmd) trace = f.read() @@ -38,12 +41,11 @@ def _findlib_gcc(name): def _findlib_ldconfig(name): # XXX assuming GLIBC's ldconfig (with option -p) - expr = r'/[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name) - res = re.search(expr, - os.popen('/sbin/ldconfig -p 2>/dev/null').read()) + expr = r"/[^\(\)\s]*lib%s\.[^\(\)\s]*" % re.escape(name) + res = re.search(expr, os.popen("/sbin/ldconfig -p 2>/dev/null").read()) if not res: # Hm, this works only for libs needed by the python executable. - cmd = 'ldd %s 2>/dev/null' % sys.executable + cmd = "ldd %s 2>/dev/null" % sys.executable res = re.search(expr, os.popen(cmd).read()) if not res: return None diff --git a/pyclibrary/utils.py b/pyclibrary/utils.py index 1223fe3..b89f0e2 100644 --- a/pyclibrary/utils.py +++ b/pyclibrary/utils.py @@ -1,5 +1,5 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # @@ -16,29 +16,28 @@ find_library : Find the path to a shared library from its name. """ -import os -import sys -import logging + import io +import logging +import os import struct import subprocess +import sys from .thirdparty.find_library import find_library as find_lib logger = logging.getLogger(__name__) -HEADER_DIRS = [os.path.join(os.path.dirname(__file__), 'headers')] +HEADER_DIRS = [os.path.join(os.path.dirname(__file__), "headers")] def add_header_locations(dir_list): - """Add directories in which to look for header files. - - """ + """Add directories in which to look for header files.""" dirs = [d for d in dir_list if os.path.isdir(d)] rejected = [d for d in dir_list if d not in dirs] if rejected: - msg = 'The following directories are invalid: {}' + msg = "The following directories are invalid: {}" logging.warning(msg.format(rejected)) HEADER_DIRS.extend(dirs) @@ -73,16 +72,21 @@ def find_header(h_name, dirs=None): else: dirs = HEADER_DIRS[::-1] - if sys.platform == 'win32': + if sys.platform == "win32": pass - if sys.platform == 'darwin': - dirs.extend(('/usr/local/include', '/usr/include', - '/System/Library/Frameworks', '/Library/Frameworks')) + if sys.platform == "darwin": + dirs.extend( + ( + "/usr/local/include", + "/usr/include", + "/System/Library/Frameworks", + "/Library/Frameworks", + ) + ) - if sys.platform == 'linux2': - dirs.extend(('/usr/local/include', '/usr/target/include', - '/usr/include')) + if sys.platform == "linux2": + dirs.extend(("/usr/local/include", "/usr/target/include", "/usr/include")) for d in dirs: path = os.path.join(d, h_name) @@ -96,13 +100,11 @@ def find_header(h_name, dirs=None): def add_library_locations(dir_list): - """Add directories in which to look for libraries. - - """ + """Add directories in which to look for libraries.""" dirs = [d for d in dir_list if os.path.isdir(d)] rejected = [d for d in dir_list if d not in dirs] if rejected: - msg = 'The following directories are invalid: {}' + msg = "The following directories are invalid: {}" logging.warning(msg.format(rejected)) LIBRARY_DIRS.extend(dirs) @@ -151,12 +153,12 @@ def find_library(name, dirs=None): # --- Private API ------------------------------------------------------------- -class LibraryPath(str): +class LibraryPath(str): #: Architectural information (32, ) or (64, ) or (32, 64) _arch = None - def __new__(cls, path, found_by='auto'): + def __new__(cls, path, found_by="auto"): obj = super(LibraryPath, cls).__new__(cls, path) obj.path = path obj.found_by = found_by @@ -168,112 +170,112 @@ def arch(self): if self._arch is None: try: self._arch = get_arch(self.path) - except: - self._arch = tuple() + except Exception: + self._arch = () return self._arch @property def is_32bit(self): if not self.arch: - return 'n/a' + return "n/a" return 32 in self.arch @property def is_64bit(self): if not self.arch: - return 'n/a' + return "n/a" return 64 in self.arch @property def bitness(self): if not self.arch: - return 'n/a' - return ', '.join(str(a) for a in self.arch) + return "n/a" + return ", ".join(str(a) for a in self.arch) def get_arch(filename): this_platform = sys.platform - if this_platform.startswith('win'): + if this_platform.startswith("win"): machine_type = get_shared_library_arch(filename) - if machine_type == 'I386': - return 32, - elif machine_type in ('IA64', 'AMD64'): - return 64, + if machine_type == "I386": + return (32,) + elif machine_type in ("IA64", "AMD64"): + return (64,) else: return () - elif this_platform not in ('linux2', 'linux3', 'linux', 'darwin'): - raise OSError('') + elif this_platform not in ("linux2", "linux3", "linux", "darwin"): + raise OSError("") out = check_output(["file", filename], stderr=subprocess.STDOUT) - out = out.decode('ascii') + out = out.decode("ascii") ret = [] - if this_platform.startswith('linux'): - if '32-bit' in out: + if this_platform.startswith("linux"): + if "32-bit" in out: ret.append(32) - if '64-bit' in out: + if "64-bit" in out: ret.append(64) - elif this_platform == 'darwin': - if '(for architecture i386)' in out: + elif this_platform == "darwin": + if "(for architecture i386)" in out: ret.append(32) - if '(for architecture x86_64)' in out: + if "(for architecture x86_64)" in out: ret.append(64) return tuple(ret) machine_types = { - 0: 'UNKNOWN', - 0x014c: 'I386', - 0x0162: 'R3000', - 0x0166: 'R4000', - 0x0168: 'R10000', - 0x0169: 'WCEMIPSV2', - 0x0184: 'ALPHA', - 0x01a2: 'SH3', - 0x01a3: 'SH3DSP', - 0x01a4: 'SH3E', - 0x01a6: 'SH4', - 0x01a8: 'SH5', - 0x01c0: 'ARM', - 0x01c2: 'THUMB', - 0x01c4: 'ARMNT', - 0x01d3: 'AM33', - 0x01f0: 'POWERPC', - 0x01f1: 'POWERPCFP', - 0x0200: 'IA64', - 0x0266: 'MIPS16', - 0x0284: 'ALPHA64', - 0x0366: 'MIPSFPU', - 0x0466: 'MIPSFPU16', - 0x0520: 'TRICORE', - 0x0cef: 'CEF', - 0x0ebc: 'EBC', - 0x8664: 'AMD64', - 0x9041: 'M32R', - 0xc0ee: 'CEE', + 0: "UNKNOWN", + 0x014C: "I386", + 0x0162: "R3000", + 0x0166: "R4000", + 0x0168: "R10000", + 0x0169: "WCEMIPSV2", + 0x0184: "ALPHA", + 0x01A2: "SH3", + 0x01A3: "SH3DSP", + 0x01A4: "SH3E", + 0x01A6: "SH4", + 0x01A8: "SH5", + 0x01C0: "ARM", + 0x01C2: "THUMB", + 0x01C4: "ARMNT", + 0x01D3: "AM33", + 0x01F0: "POWERPC", + 0x01F1: "POWERPCFP", + 0x0200: "IA64", + 0x0266: "MIPS16", + 0x0284: "ALPHA64", + 0x0366: "MIPSFPU", + 0x0466: "MIPSFPU16", + 0x0520: "TRICORE", + 0x0CEF: "CEF", + 0x0EBC: "EBC", + 0x8664: "AMD64", + 0x9041: "M32R", + 0xC0EE: "CEE", } def get_shared_library_arch(filename): - with io.open(filename, 'rb') as fp: + with io.open(filename, "rb") as fp: dos_headers = fp.read(64) fp.read(4) - magic, skip, offset = struct.unpack(str('2s58sl'), dos_headers) + magic, skip, offset = struct.unpack(str("2s58sl"), dos_headers) - if magic != b'MZ': - raise Exception('Not an executable') + if magic != b"MZ": + raise Exception("Not an executable") fp.seek(offset, io.SEEK_SET) pe_header = fp.read(6) - sig, skip, machine = struct.unpack(str('2s2sH'), pe_header) + sig, skip, machine = struct.unpack(str("2s2sH"), pe_header) - if sig != b'PE': - raise Exception('Not a PE executable') + if sig != b"PE": + raise Exception("Not a PE executable") - return machine_types.get(machine, 'UNKNOWN') + return machine_types.get(machine, "UNKNOWN") def check_output(*popenargs, **kwargs): diff --git a/pyclibrary/version.py b/pyclibrary/version.py deleted file mode 100644 index 457fdb5..0000000 --- a/pyclibrary/version.py +++ /dev/null @@ -1,37 +0,0 @@ -# ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. -# -# Distributed under the terms of the MIT/X11 license. -# -# The full license is in the file LICENCE, distributed with this software. -# ----------------------------------------------------------------------------- -"""The version information for this release of PyCLibrary. - -""" -from collections import namedtuple - -# The major release number. Differences in the major number indicate -# possibly large differences in API. -MAJOR = 0 - -# The minor release number. Differences in the minor number indicate -# possibly small differences in the API, but these changes will come -# backwards compatibility support when possible. Minor releases are -# typically used for large feature additions. -MINOR = 2 - -# The micro release number. The micro release number is incremented -# for bug fix releases and small feature additions. -MICRO = 2 - -# The status indicate if this is a development or pre-release version -STATUS = '' - -#: A namedtuple of the version info for the current release. -version_info = namedtuple('version_info', 'major minor micro status') -version_info = version_info(MAJOR, MINOR, MICRO, STATUS) -# Remove everything but the 'version_info' from this module. -del namedtuple, MAJOR, MINOR, MICRO, STATUS - -__version__ = ('{0}.{1}.{2}'.format(*version_info) if not version_info.status - else '{0}.{1}.{2}.{3}'.format(*version_info)) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ab7d663 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,117 @@ +# ----------------------------------------------------------------------------- +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. +# +# Distributed under the terms of the MIT/X11 license. +# +# The full license is in the file LICENCE, distributed with this software. +# ----------------------------------------------------------------------------- + +[project] + name = "pyclibrary" + description = "C binding automation" + readme = "README.rst" + requires-python = ">=3.10" + license = { file = "LICENSE" } + authors = [{ name = "Matthieu C. Dartiailh", email = "m.dartiailh@gmail.com" }] + classifiers = [ + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Topic :: Software Development :: Libraries :: Python Modules", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: Implementation :: CPython", + ] + dependencies = ["pyparsing>=2.3.1,<4"] + dynamic = ["version"] + + [project.urls] + homepage = "https://github.com/MatthieuDartiailh/pyclibrary" + documentation = "https://pyclibrary.readthedocs.io/en/latest/" + repository = "https://github.com/MatthieuDartiailh/pyclibrary" + changelog = "https://github.com/MatthieuDartiailh/pyclibrary/blob/main/CHANGES" + +[build-system] + requires = ["setuptools>=61.2", "wheel", "setuptools_scm[toml]>=3.4.3"] + build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] + write_to = "pyclibrary/version.py" + write_to_template = """ +# ----------------------------------------------------------------------------- +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. +# +# Distributed under the terms of the MIT/X11 license. +# +# The full license is in the file LICENCE, distributed with this software. +# ----------------------------------------------------------------------------- +# This file is auto-generated by setuptools-scm do NOT edit it. + +from collections import namedtuple + +#: A namedtuple of the version info for the current release. +_version_info = namedtuple("_version_info", "major minor micro status") + +parts = "{version}".split(".", 3) +version_info = _version_info( + int(parts[0]), + int(parts[1]), + int(parts[2]), + parts[3] if len(parts) == 4 else "", +) + +# Remove everything but the 'version_info' from this module. +del namedtuple, _version_info, parts + +__version__ = "{version}" +""" + +[tool.ruff] + src = ["src"] + line-length = 88 + + [tool.ruff.lint] + select = ["C", "E", "F", "W", "I", "C90", "RUF"] + extend-ignore = ["E501", "RUF012"] + + [tool.ruff.lint.isort] + combine-as-imports = true + known-first-party = ["atom"] + + [tool.ruff.lint.mccabe] + max-complexity = 20 + +[tool.mypy] + follow_imports = "normal" + strict_optional = true + +[tool.pytest.ini_options] + minversion = "6.0" + +[tool.coverage] + [tool.coverage.run] + branch = true + source = ["atom"] + + [tool.coverage.report] + # Regexes for lines to exclude from consideration + exclude_lines = [ + # Have to re-enable the standard pragma + "pragma: no cover", + + # Don't complain if tests don't hit defensive assertion code: + "raise NotImplementedError", + "pass", + + # Don't complain about abstract methods, they aren't run: + "@(abc\\.)?abstractmethod", + + # Don't complain about type checking + "if TYPE_CHECKING:", + + # Don't complain about ellipsis in overload + "\\.\\.\\.", + ] diff --git a/setup.py b/setup.py deleted file mode 100644 index 42c15fe..0000000 --- a/setup.py +++ /dev/null @@ -1,50 +0,0 @@ -#!/usr/bin/env python -from setuptools import setup - -import os.path - -version_py = os.path.join(os.path.dirname(__file__), 'pyclibrary', - 'version.py') -with open(version_py, 'r') as f: - d = dict() - exec(f.read(), d) - version = d['__version__'] - -setup( - name = 'pyclibrary', - description = 'C binding automation', - version = version, - long_description = '''PyCLibrary includes 1) a pure-python C parser and -2) an automation library that uses C header file definitions to simplify the -use of c bindings. The C parser currently processes all macros, typedefs, -structs, unions, enums, function prototypes, and global variable declarations, -and can evaluate typedefs down to their fundamental C types + -pointers/arrays/function signatures. Pyclibrary can automatically build c -structs/unions and perform type conversions when calling functions via -cdll/windll.''', - long_description_content_type="text/x-rst", - author = 'PyCLibrary Developers', - author_email = 'm.dartiailh@gmail.com', - url = 'http://github.com/MatthieuDartiailh/pyclibrary', - download_url = 'http://github.com/MatthieuDartiailh/pyclibrary/tarball/master', - keywords = 'C binding automation', - license = 'MIT', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Environment :: Console', - 'License :: OSI Approved :: MIT License', - 'Natural Language :: English', - 'Operating System :: OS Independent', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10' - ], - zip_safe = False, - packages = ['pyclibrary', 'pyclibrary.backends', 'pyclibrary.thirdparty'], - package_data = {'pyclibrary': ['headers/*']}, - install_requires = ['pyparsing>=2.3.1,<4'], -) - diff --git a/tests/backends/__init__.py b/tests/backends/__init__.py index 139597f..8b13789 100644 --- a/tests/backends/__init__.py +++ b/tests/backends/__init__.py @@ -1,2 +1 @@ - diff --git a/tests/backends/test_ctypes.py b/tests/backends/test_ctypes.py index 942c81b..6e27d59 100644 --- a/tests/backends/test_ctypes.py +++ b/tests/backends/test_ctypes.py @@ -1,4 +1,3 @@ - # ----------------------------------------------------------------------------- # Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. # @@ -6,21 +5,20 @@ # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- -""" Test of the generic wrapper library capabilities. +"""Test of the generic wrapper library capabilities. This actually needs ctypes to make sense but it affects all wrappers. """ -import os -import ctypes + import _ctypes_test +import ctypes +import os import pytest - -from pyclibrary.utils import (add_header_locations, HEADER_DIRS) -from pyclibrary.c_library import CLibrary, cast_to, build_array +from pyclibrary.c_library import CLibrary, build_array, cast_to from pyclibrary.c_parser import CParser - +from pyclibrary.utils import HEADER_DIRS, add_header_locations BACKUPS = () @@ -28,8 +26,7 @@ def setup_module(): global BACKUPS BACKUPS = HEADER_DIRS[:] - add_header_locations([os.path.join(os.path.dirname(__file__), - '..', 'headers')]) + add_header_locations([os.path.join(os.path.dirname(__file__), "..", "headers")]) def teardown_module(): @@ -39,24 +36,23 @@ def teardown_module(): class TestCTypesCLibrary(object): - """Test the ctypes wrapper functionality. + """Test the ctypes wrapper functionality.""" - """ def setup_method(self): - self.library = CLibrary(_ctypes_test.__file__, ['ctypes_test.h']) + self.library = CLibrary(_ctypes_test.__file__, ["ctypes_test.h"]) def test_call(self): - point_cls = self.library('structs', 'tagpoint') + point_cls = self.library("structs", "tagpoint") point_cls(x=1, y=2) with pytest.raises(KeyError): - self.library('r', 't') + self.library("r", "t") def test_getattr(self): assert self.library.an_integer == 42 def test_getitem(self): - assert self.library['values']['an_integer'] == 42 + assert self.library["values"]["an_integer"] == 42 def test_make_struct(self): self.library.BITS @@ -74,12 +70,12 @@ def test_function_call2(self): _, (res,) = self.library.getSPAMANDEGGS() egg = res[0] # we get a pointer of pointer. # Needed because this is a byte array in python 3 - assert egg.name.decode('utf8') == 'first egg' + assert egg.name.decode("utf8") == "first egg" assert egg.num_spams == 1 - assert egg.spams[0].name.decode('utf8') == 'name1' - assert egg.spams[0].value.decode('utf8') == 'value1' - assert egg.spams[1].name.decode('utf8') == 'name2' - assert egg.spams[1].value.decode('utf8') == 'value2' + assert egg.spams[0].name.decode("utf8") == "name1" + assert egg.spams[0].value.decode("utf8") == "value1" + assert egg.spams[1].name.decode("utf8") == "name2" + assert egg.spams[1].value.decode("utf8") == "value2" def test_function_call3(self): # Test calling a function with an argument and a missing pointer. @@ -95,39 +91,35 @@ def test_function_call4(self): Will fail if restype is not properly set. """ - test_str = 'Test'.encode('utf-8') + test_str = "Test".encode("utf-8") copy = self.library.my_strdup(test_str)() assert copy == test_str assert copy is not test_str def test_cast_to(self): - """Test casting. - - """ + """Test casting.""" a = 10 - assert (cast_to(self.library, a, ctypes.c_void_p).value == - ctypes.cast(a, ctypes.c_void_p).value) + assert ( + cast_to(self.library, a, ctypes.c_void_p).value + == ctypes.cast(a, ctypes.c_void_p).value + ) def test_build_array(self): - """Test array building. - - """ + """Test array building.""" pyc_array = build_array(self.library, ctypes.c_void_p, 2, [1, 2]) - c_array = (ctypes.c_void_p*2)(1, 2) - assert type(pyc_array) == type(c_array) + c_array = (ctypes.c_void_p * 2)(1, 2) + assert type(pyc_array) is type(c_array) assert pyc_array[0] == c_array[0] class TestCachedCTypesLibrary(TestCTypesCLibrary): - """Run test on cached headers. + """Run test on cached headers.""" - """ def setup(self): - path = os.path.join(os.path.dirname(__file__), 'test.pyclibcache') - parser = CParser(['ctypes_test.h']) + path = os.path.join(os.path.dirname(__file__), "test.pyclibcache") + parser = CParser(["ctypes_test.h"]) parser.write_cache(path) - self.library = CLibrary(_ctypes_test.__file__, ['ctypes_test.h'], - cache=path) + self.library = CLibrary(_ctypes_test.__file__, ["ctypes_test.h"], cache=path) def teardown(self): - type(self.library).libs.clear() \ No newline at end of file + type(self.library).libs.clear() diff --git a/tests/test_init.py b/tests/test_init.py index 799d1ea..e493dd4 100644 --- a/tests/test_init.py +++ b/tests/test_init.py @@ -5,14 +5,12 @@ # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- -"""Test init mechanisms. +"""Test init mechanisms.""" -""" -import pytest - -from pyclibrary.init import init, auto_init -import pyclibrary.c_parser as cp import pyclibrary.c_library as cl +import pyclibrary.c_parser as cp +import pytest +from pyclibrary.init import auto_init, init @pytest.fixture @@ -23,27 +21,26 @@ def init_fixture(): def test_init(init_fixture): - - init({'new_type': int}, ['__modifier']) - assert 'new_type' in cp.base_types + init({"new_type": int}, ["__modifier"]) + assert "new_type" in cp.base_types assert cp.extra_modifier is not None from pyclibrary.backends.ctypes import CTypesCLibrary - assert 'new_type' in CTypesCLibrary._types_ + assert "new_type" in CTypesCLibrary._types_ -def test_reinit_attempt(init_fixture): +def test_reinit_attempt(init_fixture): init() with pytest.raises(RuntimeError): init() def test_auto_init(init_fixture): - - auto_init({'new_type': int}, ['__modifier'], 'win32') - assert 'new_type' in cp.base_types - assert '__int64' in cp.base_types + auto_init({"new_type": int}, ["__modifier"], "win32") + assert "new_type" in cp.base_types + assert "__int64" in cp.base_types assert cp.extra_modifier is not None from pyclibrary.backends.ctypes import CTypesCLibrary - assert 'new_type' in CTypesCLibrary._types_ - assert '__int64' in CTypesCLibrary._types_ + + assert "new_type" in CTypesCLibrary._types_ + assert "__int64" in CTypesCLibrary._types_ diff --git a/tests/test_library.py b/tests/test_library.py index eb22075..d2c1634 100644 --- a/tests/test_library.py +++ b/tests/test_library.py @@ -1,25 +1,28 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- -""" Test of the generic wrapper library capabilities. +"""Test of the generic wrapper library capabilities. This actually needs ctypes to make sense but it affects all wrappers. """ -import os + import _ctypes_test import ctypes +import os import pytest - -from pyclibrary.utils import (add_library_locations, add_header_locations, - LIBRARY_DIRS, HEADER_DIRS) from pyclibrary.c_library import CLibrary - +from pyclibrary.utils import ( + HEADER_DIRS, + LIBRARY_DIRS, + add_header_locations, + add_library_locations, +) HEADERS_DIR = os.path.dirname(__file__) BACKUPS = () @@ -28,8 +31,7 @@ def setup_module(): global BACKUPS BACKUPS = HEADER_DIRS[:] - add_header_locations([os.path.join(os.path.dirname(__file__), - 'headers')]) + add_header_locations([os.path.join(os.path.dirname(__file__), "headers")]) def teardown_module(): @@ -48,41 +50,32 @@ def library_location_fixture(): class TestCLibrary(object): - """Test the basic CLibrary object functionalities. + """Test the basic CLibrary object functionalities.""" - """ def test_accessing_library_by_name(self, library_location_fixture): - - library = CLibrary(os.path.basename(_ctypes_test.__file__), - ['ctypes_test.h']) + library = CLibrary(os.path.basename(_ctypes_test.__file__), ["ctypes_test.h"]) assert library._lib_ def test_accessing_library_by_path(self): - - library = CLibrary(_ctypes_test.__file__, ['ctypes_test.h']) + library = CLibrary(_ctypes_test.__file__, ["ctypes_test.h"]) assert library._lib_ def test_accessing_library_by_object(self): - + CLibrary.libs.clear() lib = ctypes.CDLL(_ctypes_test.__file__) - library = CLibrary(lib, ['ctypes_test.h']) + library = CLibrary(lib, ["ctypes_test.h"]) assert library._lib_ is lib def test_already_opened_library(self): - lib = ctypes.CDLL(_ctypes_test.__file__) - library = CLibrary(lib, ['ctypes_test.h']) - assert library is CLibrary(_ctypes_test.__file__, ['ctypes_test.h']) + library = CLibrary(lib, ["ctypes_test.h"]) + assert library is CLibrary(_ctypes_test.__file__, ["ctypes_test.h"]) def test_accessing_prefixed_value(self): pass def test_function_pretty_signature(): - """Test building the pretty signature of a function. - - """ - library = CLibrary(os.path.basename(_ctypes_test.__file__), - ['ctypes_test.h']) + """Test building the pretty signature of a function.""" + library = CLibrary(os.path.basename(_ctypes_test.__file__), ["ctypes_test.h"]) library.my_strdup.pretty_signature() - diff --git a/tests/test_parser.py b/tests/test_parser.py index 0cfbba3..619d942 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -5,121 +5,127 @@ # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- -"""Test parser functionalities. +"""Test parser functionalities.""" -""" import os import sys from pickle import dumps, loads -import pytest - -from pyclibrary.c_parser import CParser, Type, Struct, Union, Enum import pyclibrary.utils +import pytest +from pyclibrary.c_parser import CParser, Enum, Struct, Type, Union - -H_DIRECTORY = os.path.join(os.path.dirname(__file__), 'headers') +H_DIRECTORY = os.path.join(os.path.dirname(__file__), "headers") def compare_lines(lines, lines2): - """Compare lines striped from whitespaces characters. - - """ - for l, l_test in zip(lines, lines2): - assert l.strip() == l_test.strip() + """Compare lines striped from whitespaces characters.""" + for line, line_test in zip(lines, lines2): + assert line.strip() == line_test.strip() class TestType(object): - def test_init(self): with pytest.raises(ValueError): - Type('int', '*', type_quals=(('volatile',),)) + Type("int", "*", type_quals=(("volatile",),)) def test_tuple_equality(self): - assert Type('int') == ('int',) - assert ('int',) == Type('int') + assert Type("int") == ("int",) + assert ("int",) == Type("int") - assert Type('int', '*', type_quals=[['const'], ['volatile']]) == \ - ('int', '*') + assert Type("int", "*", type_quals=[["const"], ["volatile"]]) == ("int", "*") assert issubclass(Type, tuple) def test_Type_equality(self): - assert Type('int', '*', type_quals=(('const',), ('volatile',))) == \ - Type('int', '*', type_quals=(('const',), ('volatile',))) - assert Type('int', '*', type_quals=(('const',), ())) != \ - Type('int', '*', type_quals=(('const',), ('volatile',))) + assert Type("int", "*", type_quals=(("const",), ("volatile",))) == Type( + "int", "*", type_quals=(("const",), ("volatile",)) + ) + assert Type("int", "*", type_quals=(("const",), ())) != Type( + "int", "*", type_quals=(("const",), ("volatile",)) + ) def test_getters(self): - assert Type('int', '*').type_spec == 'int' - assert Type('int', '*', [1]).declarators == ('*', [1]) - assert Type('int', '*', type_quals=(('volatile',), ())).type_quals == \ - (('volatile',), ()) + assert Type("int", "*").type_spec == "int" + assert Type("int", "*", [1]).declarators == ("*", [1]) + assert Type("int", "*", type_quals=(("volatile",), ())).type_quals == ( + ("volatile",), + (), + ) def test_is_fund_type(self): - assert not Type('custom_typedef').is_fund_type() + assert not Type("custom_typedef").is_fund_type() - assert Type('int').is_fund_type() - assert Type('int', '*').is_fund_type() - assert Type('int', [1]).is_fund_type() - assert Type('int', ()).is_fund_type() - assert Type('int', type_quals=(('volatile',))).is_fund_type() + assert Type("int").is_fund_type() + assert Type("int", "*").is_fund_type() + assert Type("int", [1]).is_fund_type() + assert Type("int", ()).is_fund_type() + assert Type("int", type_quals=(("volatile",))).is_fund_type() - assert Type('unsigned').is_fund_type() - assert Type('short').is_fund_type() - assert Type('unsigned short int').is_fund_type() - assert Type('struct test').is_fund_type() + assert Type("unsigned").is_fund_type() + assert Type("short").is_fund_type() + assert Type("unsigned short int").is_fund_type() + assert Type("struct test").is_fund_type() def test_eval(self): type_map = { - 'tq_parent_type': Type('int', '*', - type_quals=(('__tq1',), ('__tq2',))), - 'parent_type': Type('int', '*', '*', [2]), - 'child_type': Type('parent_type', '*', [3]) } - - assert Type('parent_type', '*', [1]).eval(type_map) == \ - Type('int', '*', '*', [2], '*', [1]) - assert Type('child_type', (), '*').eval(type_map) == \ - Type('int', '*', '*', [2], '*', [3], (), '*') - assert Type('tq_parent_type', [1], - type_quals=(('__tq3',), ('__tq4',))).eval(type_map) == \ - Type('int', '*', [1], - type_quals=(('__tq1',), ('__tq2', '__tq3'), ('__tq4',))) + "tq_parent_type": Type("int", "*", type_quals=(("__tq1",), ("__tq2",))), + "parent_type": Type("int", "*", "*", [2]), + "child_type": Type("parent_type", "*", [3]), + } + + assert Type("parent_type", "*", [1]).eval(type_map) == Type( + "int", "*", "*", [2], "*", [1] + ) + assert Type("child_type", (), "*").eval(type_map) == Type( + "int", "*", "*", [2], "*", [3], (), "*" + ) + assert Type("tq_parent_type", [1], type_quals=(("__tq3",), ("__tq4",))).eval( + type_map + ) == Type( + "int", "*", [1], type_quals=(("__tq1",), ("__tq2", "__tq3"), ("__tq4",)) + ) def test_compatibility_hack(self): - assert Type('int', '*', ()).add_compatibility_hack() == \ - Type(Type('int', '*'), ()) - assert Type('int', '*', (), '*').add_compatibility_hack() == \ - Type('int', '*', (), '*') - assert Type('int', (), type_quals=(('const',), ('__interrupt',)))\ - .add_compatibility_hack() == \ - Type(Type('int', type_quals=(('const',),)), (), - type_quals=((), ('__interrupt',),)) - - assert Type(Type('int', '*'), ()).remove_compatibility_hack() == \ - Type('int', '*', ()) - assert Type('int', '*', ()).remove_compatibility_hack() == \ - Type('int', '*', ()) + assert Type("int", "*", ()).add_compatibility_hack() == Type( + Type("int", "*"), () + ) + assert Type("int", "*", (), "*").add_compatibility_hack() == Type( + "int", "*", (), "*" + ) + assert Type( + "int", (), type_quals=(("const",), ("__interrupt",)) + ).add_compatibility_hack() == Type( + Type("int", type_quals=(("const",),)), + (), + type_quals=( + (), + ("__interrupt",), + ), + ) + + assert Type(Type("int", "*"), ()).remove_compatibility_hack() == Type( + "int", "*", () + ) + assert Type("int", "*", ()).remove_compatibility_hack() == Type("int", "*", ()) def test_repr(self): - assert repr(Type('int', '*')) == "Type({!r}, {!r})".format('int', '*') - assert repr(Type('int', '*', type_quals=(('volatile',), ()))) == \ - ('Type({!r}, {!r}, type_quals=(({!r},), ()))' - .format('int', '*', 'volatile')) + assert repr(Type("int", "*")) == "Type({!r}, {!r})".format("int", "*") + assert repr(Type("int", "*", type_quals=(("volatile",), ()))) == ( + "Type({!r}, {!r}, type_quals=(({!r},), ()))".format("int", "*", "volatile") + ) def test_persistence(self): - t = Type('tq_parent_type', [1], type_quals=(('__tq3',), ('__tq4',))) + t = Type("tq_parent_type", [1], type_quals=(("__tq3",), ("__tq4",))) assert repr(t) == repr(loads(dumps(t))) class TestStructUnion(object): - - TEST_MEMBERS = [ ('a', Type('int'), None), - ('b', Type('char', '*'), None)] + TEST_MEMBERS = [("a", Type("int"), None), ("b", Type("char", "*"), None)] def test_init(self): assert Struct().members == [] - assert Struct().pack == None + assert Struct().pack is None assert Struct(*self.TEST_MEMBERS).members == self.TEST_MEMBERS assert Struct(pack=2).pack == 2 @@ -127,315 +133,341 @@ def test_init(self): def test_list_equality(self): assert Struct(*self.TEST_MEMBERS, pack=2) == { - 'members': [ ('a', Type('int'), None), - ('b', Type('char', '*'), None)], - 'pack': 2 } + "members": [("a", Type("int"), None), ("b", Type("char", "*"), None)], + "pack": 2, + } assert issubclass(Struct, dict) - assert Union(*self.TEST_MEMBERS)['members'] == self.TEST_MEMBERS + assert Union(*self.TEST_MEMBERS)["members"] == self.TEST_MEMBERS def test_repr(self): - assert repr(Struct()) == 'Struct()' - assert repr(Struct(*self.TEST_MEMBERS, pack=2)) == \ - ( 'Struct(' + repr(self.TEST_MEMBERS[0]) + ', ' + - repr(self.TEST_MEMBERS[1]) + ', pack=2)' ) - assert repr(Union()) == 'Union()' + assert repr(Struct()) == "Struct()" + assert repr(Struct(*self.TEST_MEMBERS, pack=2)) == ( + "Struct(" + + repr(self.TEST_MEMBERS[0]) + + ", " + + repr(self.TEST_MEMBERS[1]) + + ", pack=2)" + ) + assert repr(Union()) == "Union()" class TestEnum(object): - def test_dict_equality(self): - assert Enum(a=1, b=2) == {'a':1, 'b':2} + assert Enum(a=1, b=2) == {"a": 1, "b": 2} assert issubclass(Enum, dict) - def test_repr(self): - assert repr(Enum(a=1, b=2)) == 'Enum(a=1, b=2)' + assert repr(Enum(a=1, b=2)) == "Enum(a=1, b=2)" class TestFileHandling(object): - """Test parser basic file operations. - - """ + """Test parser basic file operations.""" - h_dir = os.path.join(H_DIRECTORY, 'file_handling') + h_dir = os.path.join(H_DIRECTORY, "file_handling") def setup_method(self): self.parser = CParser(process_all=False) def test_init(self): - parser = CParser(os.path.join(self.h_dir, 'replace.h')) + parser = CParser(os.path.join(self.h_dir, "replace.h")) assert parser.files is not None def test_find_file(self): - saved_headers = pyclibrary.utils.HEADER_DIRS try: pyclibrary.utils.add_header_locations([self.h_dir]) assert self.h_dir in pyclibrary.utils.HEADER_DIRS - assert self.parser.find_headers(['replace.h']) == \ - [os.path.join(self.h_dir, 'replace.h')] + assert self.parser.find_headers(["replace.h"]) == [ + os.path.join(self.h_dir, "replace.h") + ] finally: pyclibrary.utils.HEADER_DIRS = saved_headers - abs_hdr_path = os.path.join(self.h_dir, 'replace.h') + abs_hdr_path = os.path.join(self.h_dir, "replace.h") assert self.parser.find_headers([abs_hdr_path]) == [abs_hdr_path] - abs_hdr_path2 = os.path.join(self.h_dir, 'c_comments.h') + abs_hdr_path2 = os.path.join(self.h_dir, "c_comments.h") assert len(self.parser.find_headers([abs_hdr_path, abs_hdr_path2])) == 2 - def test_load_file(self): - - path = os.path.join(self.h_dir, 'replace.h') + path = os.path.join(self.h_dir, "replace.h") assert self.parser.load_file(path) assert self.parser.files[path] is not None assert self.parser.file_order == [path] - assert self.parser.init_opts['replace']['replace.h'] is None - assert self.parser.init_opts['files'] == ['replace.h'] + assert self.parser.init_opts["replace"]["replace.h"] is None + assert self.parser.init_opts["files"] == ["replace.h"] def test_load_file_and_replace(self): - - path = os.path.join(self.h_dir, 'replace.h') - rep = {'{placeholder}': '1', 'placeholder2': '2'} + path = os.path.join(self.h_dir, "replace.h") + rep = {"{placeholder}": "1", "placeholder2": "2"} assert self.parser.load_file(path, rep) - lines = self.parser.files[path].split('\n') - assert lines[3] == '# define MACRO 1' - assert lines[6] == ' # define MACRO2 2' + lines = self.parser.files[path].split("\n") + assert lines[3] == "# define MACRO 1" + assert lines[6] == " # define MACRO2 2" - lines[3] = '# define MACRO {placeholder}' - lines[6] = ' # define MACRO2 placeholder2' + lines[3] = "# define MACRO {placeholder}" + lines[6] = " # define MACRO2 placeholder2" with open(path) as f: compare_lines(lines, f.readlines()) assert self.parser.file_order == [path] - assert self.parser.init_opts['replace']['replace.h'] == rep - assert self.parser.init_opts['files'] == ['replace.h'] + assert self.parser.init_opts["replace"]["replace.h"] == rep + assert self.parser.init_opts["files"] == ["replace.h"] def test_load_non_existing_file(self): - - path = os.path.join(self.h_dir, 'no.h') + path = os.path.join(self.h_dir, "no.h") assert not self.parser.load_file(path) assert self.parser.files[path] is None def test_removing_c_comments(self): - - path = os.path.join(self.h_dir, 'c_comments.h') + path = os.path.join(self.h_dir, "c_comments.h") self.parser.load_file(path) self.parser.remove_comments(path) - with open(os.path.join(self.h_dir, 'c_comments_removed.h'), 'r') as f: - compare_lines(self.parser.files[path].split('\n'), f.readlines()) + with open(os.path.join(self.h_dir, "c_comments_removed.h"), "r") as f: + compare_lines(self.parser.files[path].split("\n"), f.readlines()) def test_removing_cpp_comments(self): - - path = os.path.join(self.h_dir, 'cpp_comments.h') + path = os.path.join(self.h_dir, "cpp_comments.h") self.parser.load_file(path) self.parser.remove_comments(path) - with open(os.path.join(self.h_dir, - 'cpp_comments_removed.h'), 'r') as f: - compare_lines(self.parser.files[path].split('\n'), f.readlines()) + with open(os.path.join(self.h_dir, "cpp_comments_removed.h"), "r") as f: + compare_lines(self.parser.files[path].split("\n"), f.readlines()) class TestPreprocessing(object): - """Test preprocessing. + """Test preprocessing.""" - """ - h_dir = os.path.join(H_DIRECTORY, 'macros') + h_dir = os.path.join(H_DIRECTORY, "macros") def setup_method(self): - self.parser = CParser(process_all=False) def test_values(self): - - path = os.path.join(self.h_dir, 'macro_values.h') + path = os.path.join(self.h_dir, "macro_values.h") self.parser.load_file(path) self.parser.remove_comments(path) self.parser.preprocess(path) - macros = self.parser.defs['macros'] - values = self.parser.defs['values'] + macros = self.parser.defs["macros"] + values = self.parser.defs["values"] - assert 'M' in macros and macros['M'] == '' - assert 'N' in macros and macros['N'] == 'n' and values['N'] is None + assert "M" in macros and macros["M"] == "" + assert "N" in macros and macros["N"] == "n" and values["N"] is None # Decimal integer - assert ('MACRO_D1' in macros and macros['MACRO_D1'] == '1' and - values['MACRO_D1'] == 1) - assert ('MACRO_D2' in macros and macros['MACRO_D2'] == '-2U' and - values['MACRO_D2'] == -2) - assert ('MACRO_D3' in macros and macros['MACRO_D3'] == '+ 3UL' and - values['MACRO_D3'] == 3) + assert ( + "MACRO_D1" in macros + and macros["MACRO_D1"] == "1" + and values["MACRO_D1"] == 1 + ) + assert ( + "MACRO_D2" in macros + and macros["MACRO_D2"] == "-2U" + and values["MACRO_D2"] == -2 + ) + assert ( + "MACRO_D3" in macros + and macros["MACRO_D3"] == "+ 3UL" + and values["MACRO_D3"] == 3 + ) # Bit shifted decimal integer - assert ('MACRO_SD1' in macros and - macros['MACRO_SD1'] == '(1 << 1)' and - values['MACRO_SD1'] == 2) - assert ('MACRO_SD2' in macros and - macros['MACRO_SD2'] == '(2U << 2)' and - values['MACRO_SD2'] == 8) - assert ('MACRO_SD3' in macros and - macros['MACRO_SD3'] == '(3UL << 3)' and - values['MACRO_SD3'] == 24) + assert ( + "MACRO_SD1" in macros + and macros["MACRO_SD1"] == "(1 << 1)" + and values["MACRO_SD1"] == 2 + ) + assert ( + "MACRO_SD2" in macros + and macros["MACRO_SD2"] == "(2U << 2)" + and values["MACRO_SD2"] == 8 + ) + assert ( + "MACRO_SD3" in macros + and macros["MACRO_SD3"] == "(3UL << 3)" + and values["MACRO_SD3"] == 24 + ) # Hexadecimal integer - assert ('MACRO_H1' in macros and - macros['MACRO_H1'] == '+0x000000' and - values['MACRO_H1'] == 0) - assert ('MACRO_H2' in macros and - macros['MACRO_H2'] == '- 0x000001U' and - values['MACRO_H2'] == -1) - assert ('MACRO_H3' in macros and - macros['MACRO_H3'] == '0X000002UL' and - values['MACRO_H3'] == 2) + assert ( + "MACRO_H1" in macros + and macros["MACRO_H1"] == "+0x000000" + and values["MACRO_H1"] == 0 + ) + assert ( + "MACRO_H2" in macros + and macros["MACRO_H2"] == "- 0x000001U" + and values["MACRO_H2"] == -1 + ) + assert ( + "MACRO_H3" in macros + and macros["MACRO_H3"] == "0X000002UL" + and values["MACRO_H3"] == 2 + ) # Bit shifted hexadecimal integer - assert ('MACRO_SH1' in macros and - macros['MACRO_SH1'] == '(0x000000 << 1)' and - values['MACRO_SH1'] == 0) - assert ('MACRO_SH2' in macros and - macros['MACRO_SH2'] == '(0x000001U << 2)' and - values['MACRO_SH2'] == 4) - assert ('MACRO_H3' in macros and - macros['MACRO_SH3'] == '(0X000002UL << 3)' and - values['MACRO_SH3'] == 16) + assert ( + "MACRO_SH1" in macros + and macros["MACRO_SH1"] == "(0x000000 << 1)" + and values["MACRO_SH1"] == 0 + ) + assert ( + "MACRO_SH2" in macros + and macros["MACRO_SH2"] == "(0x000001U << 2)" + and values["MACRO_SH2"] == 4 + ) + assert ( + "MACRO_H3" in macros + and macros["MACRO_SH3"] == "(0X000002UL << 3)" + and values["MACRO_SH3"] == 16 + ) # Floating point value - assert ('MACRO_F1' in macros and - macros['MACRO_F1'] == '1.0' and - values['MACRO_F1'] == 1.0) - assert ('MACRO_F2' in macros and - macros['MACRO_F2'] == '1.1e1' and - values['MACRO_F2'] == 11.) - assert ('MACRO_F3' in macros and - macros['MACRO_F3'] == '-1.1E-1' and - values['MACRO_F3'] == -0.11) + assert ( + "MACRO_F1" in macros + and macros["MACRO_F1"] == "1.0" + and values["MACRO_F1"] == 1.0 + ) + assert ( + "MACRO_F2" in macros + and macros["MACRO_F2"] == "1.1e1" + and values["MACRO_F2"] == 11.0 + ) + assert ( + "MACRO_F3" in macros + and macros["MACRO_F3"] == "-1.1E-1" + and values["MACRO_F3"] == -0.11 + ) # String macro - assert ('MACRO_S' in macros and macros['MACRO_S'] == '"test"' and - values['MACRO_S'] == 'test') + assert ( + "MACRO_S" in macros + and macros["MACRO_S"] == '"test"' + and values["MACRO_S"] == "test" + ) # Nested macros - assert ('NESTED' in macros and macros['NESTED'] == '1' and - values['NESTED'] == 1) - assert ('NESTED2' in macros and macros['NESTED2'] == '1' and - values['NESTED2'] == 1) - assert ('MACRO_N' in macros and macros['MACRO_N'] == '1 + 2' and - values['MACRO_N'] == 3) + assert "NESTED" in macros and macros["NESTED"] == "1" and values["NESTED"] == 1 + assert ( + "NESTED2" in macros and macros["NESTED2"] == "1" and values["NESTED2"] == 1 + ) + assert ( + "MACRO_N" in macros + and macros["MACRO_N"] == "1 + 2" + and values["MACRO_N"] == 3 + ) # Muliline macro - assert 'MACRO_ML' in macros and values['MACRO_ML'] == 2 + assert "MACRO_ML" in macros and values["MACRO_ML"] == 2 def test_conditionals(self): - - path = os.path.join(self.h_dir, 'macro_conditionals.h') + path = os.path.join(self.h_dir, "macro_conditionals.h") self.parser.load_file(path) self.parser.remove_comments(path) self.parser.preprocess(path) self.parser.parse_defs(path) - macros = self.parser.defs['macros'] + macros = self.parser.defs["macros"] stream = self.parser.files[path] # Test if defined conditional - assert 'DEFINE_IF' in macros - assert ' int DECLARE_IF;\n' in stream - assert 'NO_DEFINE_IF' not in macros - assert ' int NO_DECLARE_IF;\n' not in stream + assert "DEFINE_IF" in macros + assert " int DECLARE_IF;\n" in stream + assert "NO_DEFINE_IF" not in macros + assert " int NO_DECLARE_IF;\n" not in stream # Test ifdef conditional - assert 'DEFINE_IFDEF' in macros - assert ' int DECLARE_IFDEF;\n' in stream - assert 'NO_DEFINE_IFDEF' not in macros - assert ' int NO_DECLARE_IFDEF;\n' not in stream + assert "DEFINE_IFDEF" in macros + assert " int DECLARE_IFDEF;\n" in stream + assert "NO_DEFINE_IFDEF" not in macros + assert " int NO_DECLARE_IFDEF;\n" not in stream # Test if !defined - assert 'DEFINE_IFN' in macros - assert ' int DECLARE_IFN;\n' in stream - assert 'NO_DEFINE_IFN' not in macros - assert ' int NO_DECLARE_IFN;\n' not in stream + assert "DEFINE_IFN" in macros + assert " int DECLARE_IFN;\n" in stream + assert "NO_DEFINE_IFN" not in macros + assert " int NO_DECLARE_IFN;\n" not in stream # Test ifndef - assert 'DEFINE_IFNDEF' in macros - assert ' int DECLARE_IFNDEF;\n' in stream - assert 'NO_DEFINE_IFNDEF' not in macros - assert ' int NO_DECLARE_IFNDEF;\n' not in stream + assert "DEFINE_IFNDEF" in macros + assert " int DECLARE_IFNDEF;\n" in stream + assert "NO_DEFINE_IFNDEF" not in macros + assert " int NO_DECLARE_IFNDEF;\n" not in stream # Test elif - assert 'DEFINE_ELIF' in macros - assert ' int DECLARE_ELIF;\n' in stream - assert 'NO_DEFINE_ELIF' not in macros - assert ' int NO_DECLARE_ELIF;\n' not in stream + assert "DEFINE_ELIF" in macros + assert " int DECLARE_ELIF;\n" in stream + assert "NO_DEFINE_ELIF" not in macros + assert " int NO_DECLARE_ELIF;\n" not in stream # Test else - assert 'DEFINE_ELSE' in macros - assert ' int DECLARE_ELSE;\n' in stream - assert 'NO_DEFINE_ELSE' not in macros - assert ' int NO_DECLARE_ELSE;\n' not in stream + assert "DEFINE_ELSE" in macros + assert " int DECLARE_ELSE;\n" in stream + assert "NO_DEFINE_ELSE" not in macros + assert " int NO_DECLARE_ELSE;\n" not in stream # Test nested - assert 'DEFINE_N1' in macros - assert ' int DECLARE_N1;\n' in stream - assert 'NO_DEFINE_N2' not in macros - assert 'DEFINE_N2' not in macros + assert "DEFINE_N1" in macros + assert " int DECLARE_N1;\n" in stream + assert "NO_DEFINE_N2" not in macros + assert "DEFINE_N2" not in macros - assert 'DEFINE_N3' in macros - assert 'NO_DEFINE_N3' not in macros - assert ' int NO_DECLARE_N3;\n' not in stream + assert "DEFINE_N3" in macros + assert "NO_DEFINE_N3" not in macros + assert " int NO_DECLARE_N3;\n" not in stream # Test logical - assert 'DEFINE_LOG' in macros - assert ' int DECLARE_LOG;\n' in stream - assert 'NO_DEFINE_LOG' not in macros - assert 'NO_DEFINE_LOG' not in macros + assert "DEFINE_LOG" in macros + assert " int DECLARE_LOG;\n" in stream + assert "NO_DEFINE_LOG" not in macros + assert "NO_DEFINE_LOG" not in macros # Test undef - assert 'DEFINE_UNDEF' in macros - assert 'UNDEF' not in macros + assert "DEFINE_UNDEF" in macros + assert "UNDEF" not in macros def test_macro_function(self): - - path = os.path.join(self.h_dir, 'macro_functions.h') + path = os.path.join(self.h_dir, "macro_functions.h") self.parser.load_file(path) self.parser.remove_comments(path) self.parser.preprocess(path) self.parser.parse_defs(path) - values = self.parser.defs['values'] - fnmacros = self.parser.defs['fnmacros'] + values = self.parser.defs["values"] + fnmacros = self.parser.defs["fnmacros"] stream = self.parser.files[path] # Test macro declaration. - assert 'CARRE' in fnmacros - assert 'int carre = 2*2;' in stream + assert "CARRE" in fnmacros + assert "int carre = 2*2;" in stream - assert 'int __declspec(dllexport) function2()' in stream - assert '__declspec(dllexport) int function3()' in stream - assert '__declspec(dllexport) int * function4()' in stream + assert "int __declspec(dllexport) function2()" in stream + assert "__declspec(dllexport) int function3()" in stream + assert "__declspec(dllexport) int * function4()" in stream # Test defining a macro function as an alias for another one. - assert 'MAKEINTRESOURCEA' in fnmacros - assert 'MAKEINTRESOURCEW' in fnmacros - assert 'MAKEINTRESOURCE' in fnmacros - assert fnmacros['MAKEINTRESOURCE'] == fnmacros['MAKEINTRESOURCEA'] - assert 'int x = ((LPSTR)((ULONG_PTR)((WORD)(4))))' + assert "MAKEINTRESOURCEA" in fnmacros + assert "MAKEINTRESOURCEW" in fnmacros + assert "MAKEINTRESOURCE" in fnmacros + assert fnmacros["MAKEINTRESOURCE"] == fnmacros["MAKEINTRESOURCEA"] + assert "int x = ((LPSTR)((ULONG_PTR)((WORD)(4))))" # Test using a macro value in a macro function call - assert 'BIT' in values and values['BIT'] == 1 - assert '((y) |= (0x01))' in stream + assert "BIT" in values and values["BIT"] == 1 + assert "((y) |= (0x01))" in stream # Test defining a macro function calling other macros (values and # functions) - assert 'SETBITS' in fnmacros - assert 'int z1, z2 = (((1) |= (0x01)), ((2) |= (0x01)));' in stream + assert "SETBITS" in fnmacros + assert "int z1, z2 = (((1) |= (0x01)), ((2) |= (0x01)));" in stream # Test defining a macro function calling nested macro functions - assert 'SETBIT_AUTO' in fnmacros - assert 'int z3 = ((((3) |= (0x01)), ((3) |= (0x01))));' in stream + assert "SETBIT_AUTO" in fnmacros + assert "int z3 = ((((3) |= (0x01)), ((3) |= (0x01))));" in stream def test_pragmas(self): - - path = os.path.join(self.h_dir, 'pragmas.h') + path = os.path.join(self.h_dir, "pragmas.h") self.parser.load_file(path) self.parser.remove_comments(path) self.parser.preprocess(path) @@ -445,7 +477,7 @@ def test_pragmas(self): packings = self.parser.pack_list[path] # Check all pragmas instructions have been removed. - assert stream.strip() == '' + assert stream.strip() == "" assert packings[1][1] is None assert packings[2][1] == 4 @@ -458,352 +490,409 @@ def test_pragmas(self): class TestParsing(object): - """Test parsing. - - """ + """Test parsing.""" h_dir = H_DIRECTORY def setup_method(self): - self.parser = CParser(process_all=False) def test_variables(self): - - path = os.path.join(self.h_dir, 'variables.h') + path = os.path.join(self.h_dir, "variables.h") self.parser.load_file(path) self.parser.process_all() - variables = self.parser.defs['variables'] + variables = self.parser.defs["variables"] # Integers - assert ('short1' in variables and - variables['short1'] == (1, Type('signed short'))) - assert ('short_int' in variables and - variables['short_int'] == (1, Type('short int'))) - assert ('short_un' in variables and - variables['short_un'] == (1, Type('unsigned short'))) - assert ('short_int_un' in variables and - variables['short_int_un'] == (1, Type('unsigned short int'))) - assert ('int1' in variables and - variables['int1'] == (1, Type('int'))) - assert ('un' in variables and - variables['un'] == (1, Type('unsigned'))) - assert ('int_un' in variables and - variables['int_un'] == (1, Type('unsigned int'))) - assert ('long1' in variables and - variables['long1'] == (1, Type('long'))) - assert ('long_int' in variables and - variables['long_int'] == (1, Type('long int'))) - assert ('long_un' in variables and - variables['long_un'] == (1, Type('unsigned long'))) - assert ('long_int_un' in variables and - variables['long_int_un'] == (1, Type('unsigned long int'))) - if sys.platform == 'win32': - assert ('int64' in variables and - variables['int64'] == (1, Type('__int64'))) - assert ('int64_un' in variables and - variables['int64_un'] == (1, Type('unsigned __int64'))) - assert ('long_long' in variables and - variables['long_long'] == (1, Type('long long'))) - assert ('long_long_int' in variables and - variables['long_long_int'] == (1, Type('long long int'))) - assert ('long_long_un' in variables and - variables['long_long_un'] == (1, Type('unsigned long long'))) - assert ('long_long_int_un' in variables and - variables['long_long_int_un'] == (1, Type('unsigned long ' - 'long int'))) + assert "short1" in variables and variables["short1"] == ( + 1, + Type("signed short"), + ) + assert "short_int" in variables and variables["short_int"] == ( + 1, + Type("short int"), + ) + assert "short_un" in variables and variables["short_un"] == ( + 1, + Type("unsigned short"), + ) + assert "short_int_un" in variables and variables["short_int_un"] == ( + 1, + Type("unsigned short int"), + ) + assert "int1" in variables and variables["int1"] == (1, Type("int")) + assert "un" in variables and variables["un"] == (1, Type("unsigned")) + assert "int_un" in variables and variables["int_un"] == ( + 1, + Type("unsigned int"), + ) + assert "long1" in variables and variables["long1"] == (1, Type("long")) + assert "long_int" in variables and variables["long_int"] == ( + 1, + Type("long int"), + ) + assert "long_un" in variables and variables["long_un"] == ( + 1, + Type("unsigned long"), + ) + assert "long_int_un" in variables and variables["long_int_un"] == ( + 1, + Type("unsigned long int"), + ) + if sys.platform == "win32": + assert "int64" in variables and variables["int64"] == (1, Type("__int64")) + assert "int64_un" in variables and variables["int64_un"] == ( + 1, + Type("unsigned __int64"), + ) + assert "long_long" in variables and variables["long_long"] == ( + 1, + Type("long long"), + ) + assert "long_long_int" in variables and variables["long_long_int"] == ( + 1, + Type("long long int"), + ) + assert "long_long_un" in variables and variables["long_long_un"] == ( + 1, + Type("unsigned long long"), + ) + assert "long_long_int_un" in variables and variables["long_long_int_un"] == ( + 1, + Type("unsigned long " "long int"), + ) # C99 integers for i in (8, 16, 32, 64): - assert ('i%d' % i in variables and - variables['i%d' % i] == (1, Type('int%d_t' % i))) - assert ('u%d' % i in variables and - variables['u%d' % i] == (1, Type('uint%d_t' % i))) + assert "i%d" % i in variables and variables["i%d" % i] == ( + 1, + Type("int%d_t" % i), + ) + assert "u%d" % i in variables and variables["u%d" % i] == ( + 1, + Type("uint%d_t" % i), + ) # Floating point number - assert ('fl' in variables and variables['fl'] == - (1., Type('float'))) - assert ('db' in variables and variables['db'] == - (0.1, Type('double'))) - assert ('dbl' in variables and - variables['dbl'] == (-10., Type('long double'))) + assert "fl" in variables and variables["fl"] == (1.0, Type("float")) + assert "db" in variables and variables["db"] == (0.1, Type("double")) + assert "dbl" in variables and variables["dbl"] == (-10.0, Type("long double")) # Const and static modif - assert ('int_const' in variables and - variables['int_const'] == (4, Type('int', - type_quals=(('const',),)))) - assert ('int_stat' in variables and - variables['int_stat'] == (4, Type('int'))) - assert ('int_con_stat' in variables and - variables['int_con_stat'] == (4, Type('int', - type_quals=(('const',),)))) - assert ('int_extern' in variables and - variables['int_extern'] == (4, Type('int'))) + assert "int_const" in variables and variables["int_const"] == ( + 4, + Type("int", type_quals=(("const",),)), + ) + assert "int_stat" in variables and variables["int_stat"] == (4, Type("int")) + assert "int_con_stat" in variables and variables["int_con_stat"] == ( + 4, + Type("int", type_quals=(("const",),)), + ) + assert "int_extern" in variables and variables["int_extern"] == (4, Type("int")) # String - assert ('str1' in variables and - variables['str1'] == ("normal string", Type('char', '*'))) - assert ('str2' in variables and - variables['str2'] == ("string with macro: INT", - Type('char', '*', '*'))) - assert ('str3' in variables and - variables['str3'] == ("string with comment: /*comment inside string*/", - Type('char', '*', type_quals=(('const',), ('const',))))) - assert ('str4' in variables and - variables['str4'] == ("string with define #define MACRO5 macro5_in_string ", - Type('char', '*'))) - assert ('str5' in variables and - variables['str5'] == ("string with \"escaped quotes\" ", - Type('char', '*'))) + assert "str1" in variables and variables["str1"] == ( + "normal string", + Type("char", "*"), + ) + assert "str2" in variables and variables["str2"] == ( + "string with macro: INT", + Type("char", "*", "*"), + ) + assert "str3" in variables and variables["str3"] == ( + "string with comment: /*comment inside string*/", + Type("char", "*", type_quals=(("const",), ("const",))), + ) + assert "str4" in variables and variables["str4"] == ( + "string with define #define MACRO5 macro5_in_string ", + Type("char", "*"), + ) + assert "str5" in variables and variables["str5"] == ( + 'string with "escaped quotes" ', + Type("char", "*"), + ) # Test complex evaluation - assert ('x1' in variables and - variables['x1'] == (1., Type('float'))) + assert "x1" in variables and variables["x1"] == (1.0, Type("float")) # Test type casting handling. - assert ('x2' in variables and - variables['x2'] == (88342528, Type('int'))) + assert "x2" in variables and variables["x2"] == (88342528, Type("int")) # Test array handling - assert ('array' in variables and - variables['array'] == ([1, 3141500.0], Type('float', [2]))) + assert "array" in variables and variables["array"] == ( + [1, 3141500.0], + Type("float", [2]), + ) # assert ('array2d' in variables and # variables['array2d'] == ([[1, 2, 3], [4, 5, 6]], Type('float', [2, 3]))) - assert ('intJunk' in variables and - variables['intJunk'] == ( - None, - Type('int', '*', '*', '*', [4], - type_quals=(('const',), ('const',), (), (), ())) ) ) + assert "intJunk" in variables and variables["intJunk"] == ( + None, + Type( + "int", + "*", + "*", + "*", + [4], + type_quals=(("const",), ("const",), (), (), ()), + ), + ) # test type qualifiers - assert variables.get('typeQualedIntPtrPtr') == \ - (None, Type('int', '*', '*', - type_quals=(('const',), ('volatile',), ())) ) - assert variables.get('typeQualedIntPtr') == \ - (None, Type('int', '*', type_quals=(('const', 'volatile',), ()))) + assert variables.get("typeQualedIntPtrPtr") == ( + None, + Type("int", "*", "*", type_quals=(("const",), ("volatile",), ())), + ) + assert variables.get("typeQualedIntPtr") == ( + None, + Type( + "int", + "*", + type_quals=( + ( + "const", + "volatile", + ), + (), + ), + ), + ) # test type definition precedence - assert variables.get('prec_ptr_of_arr') == \ - (None, Type('int', [1], '*')) - assert variables.get('prec_arr_of_ptr') == \ - (None, Type('int', '*', [1])) - assert variables.get('prec_arr_of_ptr2') == \ - (None, Type('int', '*', [1])) + assert variables.get("prec_ptr_of_arr") == (None, Type("int", [1], "*")) + assert variables.get("prec_arr_of_ptr") == (None, Type("int", "*", [1])) + assert variables.get("prec_arr_of_ptr2") == (None, Type("int", "*", [1])) # No structure, no unions, no enum def test_typedef(self): - - path = os.path.join(self.h_dir, 'typedefs.h') + path = os.path.join(self.h_dir, "typedefs.h") self.parser.load_file(path) self.parser.process_all() - types = self.parser.defs['types'] - variables = self.parser.defs['variables'] + types = self.parser.defs["types"] + variables = self.parser.defs["variables"] # Test defining types from base types. - assert ('typeChar' in types and types['typeChar'] == - Type('char', '*', '*')) - assert ('typeInt' in types and types['typeInt'] == - Type('int')) - assert ('typeIntPtr' in types and types['typeIntPtr'] == - Type('int', '*')) - assert ('typeIntArr' in types and types['typeIntArr'] == - Type('int', [10])) - assert ('typeIntDArr' in types and types['typeIntDArr'] == - Type('int', [5], [6])) - assert ('typeTypeInt' in types and - types['typeTypeInt'] == Type('typeInt')) - assert not self.parser.is_fund_type('typeTypeInt') - assert self.parser.eval_type(['typeTypeInt']) == Type('int') - assert ('ULONG' in types and types['ULONG'] == Type('unsigned long')) + assert "typeChar" in types and types["typeChar"] == Type("char", "*", "*") + assert "typeInt" in types and types["typeInt"] == Type("int") + assert "typeIntPtr" in types and types["typeIntPtr"] == Type("int", "*") + assert "typeIntArr" in types and types["typeIntArr"] == Type("int", [10]) + assert "typeIntDArr" in types and types["typeIntDArr"] == Type("int", [5], [6]) + assert "typeTypeInt" in types and types["typeTypeInt"] == Type("typeInt") + assert not self.parser.is_fund_type("typeTypeInt") + assert self.parser.eval_type(["typeTypeInt"]) == Type("int") + assert "ULONG" in types and types["ULONG"] == Type("unsigned long") # Test annotated types - assert ('voidpc' in types and types['voidpc'] == - Type('void', '*', type_quals=(('const',), ()))) - assert ('charf' in types and types['charf'] == - Type('char', type_quals=(('far',),))) + assert "voidpc" in types and types["voidpc"] == Type( + "void", "*", type_quals=(("const",), ()) + ) + assert "charf" in types and types["charf"] == Type( + "char", type_quals=(("far",),) + ) # Test using custom type. - assert ('ttip5' in variables and - variables['ttip5'] == (None, Type('typeTypeInt', '*', [5]))) + assert "ttip5" in variables and variables["ttip5"] == ( + None, + Type("typeTypeInt", "*", [5]), + ) # Handling undefined types - assert ('SomeOtherType' in types and - types['SomeOtherType'] == Type('someType')) - assert ('x' in variables and variables['x'] == - (None, Type('undefined'))) - assert not self.parser.is_fund_type('SomeOtherType') + assert "SomeOtherType" in types and types["SomeOtherType"] == Type("someType") + assert "x" in variables and variables["x"] == (None, Type("undefined")) + assert not self.parser.is_fund_type("SomeOtherType") with pytest.raises(Exception): - self.parser.eval_type(Type('undefined')) + self.parser.eval_type(Type("undefined")) # Testing recursive defs - assert 'recType1' in types - assert 'recType2' in types - assert 'recType3' in types + assert "recType1" in types + assert "recType2" in types + assert "recType3" in types with pytest.raises(Exception): - self.parser.eval_type(Type('recType3')) + self.parser.eval_type(Type("recType3")) def test_enums(self): - - path = os.path.join(self.h_dir, 'enums.h') + path = os.path.join(self.h_dir, "enums.h") self.parser.load_file(path) self.parser.process_all() - enums = self.parser.defs['enums'] - types = self.parser.defs['types'] - variables = self.parser.defs['variables'] - print(self.parser.defs['values']) - assert ('enum_name' in enums and 'enum enum_name' in types) - assert enums['enum_name'] == {'enum1': 129, 'enum2': 6, 'enum3': 7, - 'enum4': 8} - assert types['enum enum_name'] == Type('enum', 'enum_name',) - assert ('enum_inst' in variables and - variables['enum_inst'] == (None, Type('enum enum_name',))) - - assert 'anon_enum0' in enums - assert 'anon_enum1' in enums - assert 'no_name_enum_typeddef' in types + enums = self.parser.defs["enums"] + types = self.parser.defs["types"] + variables = self.parser.defs["variables"] + print(self.parser.defs["values"]) + assert "enum_name" in enums and "enum enum_name" in types + assert enums["enum_name"] == {"enum1": 129, "enum2": 6, "enum3": 7, "enum4": 8} + assert types["enum enum_name"] == Type( + "enum", + "enum_name", + ) + assert "enum_inst" in variables and variables["enum_inst"] == ( + None, + Type( + "enum enum_name", + ), + ) + + assert "anon_enum0" in enums + assert "anon_enum1" in enums + assert "no_name_enum_typeddef" in types def test_struct(self): - - path = os.path.join(self.h_dir, 'structs.h') + path = os.path.join(self.h_dir, "structs.h") self.parser.load_file(path) self.parser.process_all() - structs = self.parser.defs['structs'] - types = self.parser.defs['types'] - variables = self.parser.defs['variables'] + structs = self.parser.defs["structs"] + types = self.parser.defs["types"] + variables = self.parser.defs["variables"] # Test creating a structure using only base types. - assert ('struct_name' in structs and 'struct struct_name' in types) - assert structs['struct_name'] == \ - Struct(('x', Type('int'), 1), - ('y', Type('type_type_int'), None, 2), - ('str', Type('char', [10]), None)) - assert ('struct_inst' in variables and - variables['struct_inst'] == (None, Type('struct struct_name'))) + assert "struct_name" in structs and "struct struct_name" in types + assert structs["struct_name"] == Struct( + ("x", Type("int"), 1), + ("y", Type("type_type_int"), None, 2), + ("str", Type("char", [10]), None), + ) + assert "struct_inst" in variables and variables["struct_inst"] == ( + None, + Type("struct struct_name"), + ) # Test creating a structure using only base types. - assert ('struct_arr' in structs and 'struct struct_arr' in types) - assert structs['struct_arr'] == \ - Struct(('str', Type('char', [10], [20]), None)) - assert ('struct_inst' in variables and - variables['struct_inst'] == (None, Type('struct struct_name'))) + assert "struct_arr" in structs and "struct struct_arr" in types + assert structs["struct_arr"] == Struct(("str", Type("char", [10], [20]), None)) + assert "struct_inst" in variables and variables["struct_inst"] == ( + None, + Type("struct struct_name"), + ) # Test creating a pointer type from a structure. - assert ('struct_name_ptr' in types and - types['struct_name_ptr'] == Type('struct struct_name', '*')) + assert "struct_name_ptr" in types and types["struct_name_ptr"] == Type( + "struct struct_name", "*" + ) - assert ('struct_name2_ptr' in types and - types['struct_name2_ptr'] == Type('struct anon_struct0', '*')) + assert "struct_name2_ptr" in types and types["struct_name2_ptr"] == Type( + "struct anon_struct0", "*" + ) # Test declaring a recursive structure. - assert ('recursive_struct' in structs and - 'struct recursive_struct' in types) - assert structs['recursive_struct'] == \ - Struct(('next', Type('struct recursive_struct', '*'), None)) + assert "recursive_struct" in structs and "struct recursive_struct" in types + assert structs["recursive_struct"] == Struct( + ("next", Type("struct recursive_struct", "*"), None) + ) # Test declaring near and far pointers. - assert 'tagWNDCLASSEXA' in structs - assert ('NPWNDCLASSEXA' in types and - ( types['NPWNDCLASSEXA'] == - Type('struct tagWNDCLASSEXA', '*', type_quals=(('near',), ())))) + assert "tagWNDCLASSEXA" in structs + assert "NPWNDCLASSEXA" in types and ( + types["NPWNDCLASSEXA"] + == Type("struct tagWNDCLASSEXA", "*", type_quals=(("near",), ())) + ) # Test altering the packing of a structure. - assert ('struct_name_p' in structs and 'struct struct_name_p' in types) - assert structs['struct_name_p'] == \ - Struct(('x', Type('int'), None), - ('y', Type('type_type_int'), None), - ('str', Type('char', [10]), "brace } \0"), - pack=16) + assert "struct_name_p" in structs and "struct struct_name_p" in types + assert structs["struct_name_p"] == Struct( + ("x", Type("int"), None), + ("y", Type("type_type_int"), None), + ("str", Type("char", [10]), "brace } \0"), + pack=16, + ) # Test nested structures - NESTED_STRUCT_ENUM_0 = self.parser.defs['enums']['root_nested_enum']['NESTED_STRUCT_ENUM_0'] - NESTED_STRUCT_ENUM_1 = self.parser.defs['enums']['root_nested_enum']['NESTED_STRUCT_ENUM_1'] - NESTED_STRUCT_ENUM_2 = self.parser.defs['enums']['root_nested_enum']['NESTED_STRUCT_ENUM_2'] + NESTED_STRUCT_ENUM_0 = self.parser.defs["enums"]["root_nested_enum"][ + "NESTED_STRUCT_ENUM_0" + ] + NESTED_STRUCT_ENUM_1 = self.parser.defs["enums"]["root_nested_enum"][ + "NESTED_STRUCT_ENUM_1" + ] + NESTED_STRUCT_ENUM_2 = self.parser.defs["enums"]["root_nested_enum"][ + "NESTED_STRUCT_ENUM_2" + ] assert NESTED_STRUCT_ENUM_0 == 0 assert NESTED_STRUCT_ENUM_1 == 1 assert NESTED_STRUCT_ENUM_2 == 2 - assert ('root_nested_structure' in structs and 'struct root_nested_structure' in types) - assert structs['root_nested_structure'] == \ - Struct(('x', Type('struct leaf1_nested_structure', [NESTED_STRUCT_ENUM_2]), None), - ('y', Type('root_nested_enum_type'), None), - ('z', Type('struct leaf2_nested_structure'), None), - pack=16) + assert ( + "root_nested_structure" in structs + and "struct root_nested_structure" in types + ) + assert structs["root_nested_structure"] == Struct( + ("x", Type("struct leaf1_nested_structure", [NESTED_STRUCT_ENUM_2]), None), + ("y", Type("root_nested_enum_type"), None), + ("z", Type("struct leaf2_nested_structure"), None), + pack=16, + ) def test_unions(self): - - path = os.path.join(self.h_dir, 'unions.h') + path = os.path.join(self.h_dir, "unions.h") self.parser.load_file(path) self.parser.process_all() - unions = self.parser.defs['unions'] - structs = self.parser.defs['structs'] - types = self.parser.defs['types'] - variables = self.parser.defs['variables'] + unions = self.parser.defs["unions"] + structs = self.parser.defs["structs"] + types = self.parser.defs["types"] + variables = self.parser.defs["variables"] # Test declaring an union. - assert 'union_name' in unions and 'union union_name' in types - assert unions['union_name'] == \ - Union(('x', Type('int'), 1), - ('y', Type('int'), None), - pack=None) - assert ('union_name_ptr' in types and - types['union_name_ptr'] == Type('union union_name', '*')) + assert "union_name" in unions and "union union_name" in types + assert unions["union_name"] == Union( + ("x", Type("int"), 1), ("y", Type("int"), None), pack=None + ) + assert "union_name_ptr" in types and types["union_name_ptr"] == Type( + "union union_name", "*" + ) # Test defining an unnamed union - assert ('no_name_union_inst' in variables and - variables['no_name_union_inst'] == (None, - Type('union anon_union0'))) + assert "no_name_union_inst" in variables and variables[ + "no_name_union_inst" + ] == (None, Type("union anon_union0")) # Test defining a structure using an unnamed union internally. - assert ('tagRID_DEVICE_INFO' in structs and - structs['tagRID_DEVICE_INFO'] == \ - Struct(('cbSize', Type('DWORD'), None), - ('dwType', Type('DWORD'), None), - (None, Type('union anon_union1'), None))) - - assert ('RID_DEVICE_INFO' in types and - types['RID_DEVICE_INFO'] == Type('struct tagRID_DEVICE_INFO')) - assert ('PRID_DEVICE_INFO' in types and - types['PRID_DEVICE_INFO'] == - Type('struct tagRID_DEVICE_INFO', '*') - ) - assert ('LPRID_DEVICE_INFO' in types and - ( types['LPRID_DEVICE_INFO'] == - Type('struct tagRID_DEVICE_INFO', '*') - ) - ) + assert "tagRID_DEVICE_INFO" in structs and structs[ + "tagRID_DEVICE_INFO" + ] == Struct( + ("cbSize", Type("DWORD"), None), + ("dwType", Type("DWORD"), None), + (None, Type("union anon_union1"), None), + ) + + assert "RID_DEVICE_INFO" in types and types["RID_DEVICE_INFO"] == Type( + "struct tagRID_DEVICE_INFO" + ) + assert "PRID_DEVICE_INFO" in types and types["PRID_DEVICE_INFO"] == Type( + "struct tagRID_DEVICE_INFO", "*" + ) + assert "LPRID_DEVICE_INFO" in types and ( + types["LPRID_DEVICE_INFO"] == Type("struct tagRID_DEVICE_INFO", "*") + ) def test_functions(self): - - path = os.path.join(self.h_dir, 'functions.h') + path = os.path.join(self.h_dir, "functions.h") self.parser.load_file(path) self.parser.process_all() - functions = self.parser.defs['functions'] - variables = self.parser.defs['variables'] - - assert functions.get('f') == \ - Type(Type('void'), ( (None, Type('int'), None), - (None, Type('int'), None) )) - assert functions['g'] == \ - Type(Type('int'), ( ('ch', Type('char', '*'), None), - ('str', Type('char', '*', '*'), None) )) - assert variables.get('fnPtr') == \ - (None, Type('int', - ( (None, Type('char'), None), - (None, Type('float'), None) ), - '*')) - assert functions.get('function1') == \ - Type(Type('int', '__stdcall', type_quals=((), None)), ()) - - assert functions.get('function2') == Type(Type('int'), ()) - - assert 'externFunc' in functions - - ptyp = Type('int', '*', '*', type_quals=(('volatile',), ('const',), ())) - assert functions.get('typeQualedFunc') == \ - Type(Type('int'), ((None, ptyp, None),)) + functions = self.parser.defs["functions"] + variables = self.parser.defs["variables"] + + assert functions.get("f") == Type( + Type("void"), ((None, Type("int"), None), (None, Type("int"), None)) + ) + assert functions["g"] == Type( + Type("int"), + (("ch", Type("char", "*"), None), ("str", Type("char", "*", "*"), None)), + ) + assert variables.get("fnPtr") == ( + None, + Type("int", ((None, Type("char"), None), (None, Type("float"), None)), "*"), + ) + assert functions.get("function1") == Type( + Type("int", "__stdcall", type_quals=((), None)), () + ) + + assert functions.get("function2") == Type(Type("int"), ()) + + assert "externFunc" in functions + + ptyp = Type("int", "*", "*", type_quals=(("volatile",), ("const",), ())) + assert functions.get("typeQualedFunc") == Type( + Type("int"), ((None, ptyp, None),) + ) diff --git a/tests/test_version.py b/tests/test_version.py index 429f751..9e3db4f 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -1,13 +1,12 @@ # ----------------------------------------------------------------------------- -# Copyright 2015-2022 by PyCLibrary Authors, see AUTHORS for more details. +# Copyright 2015-2025 by PyCLibrary Authors, see AUTHORS for more details. # # Distributed under the terms of the MIT/X11 license. # # The full license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- -"""Test version script (avoid stupid mistakes). +"""Test version script (avoid stupid mistakes).""" -""" from pyclibrary.version import __version__