trezor: replace Python tools with CMake

This commit is contained in:
tobtoht 2024-09-13 01:35:32 +02:00
parent a1dc85c537
commit 2ad6f08122
No known key found for this signature in database
GPG Key ID: E45B10DD027D2472
7 changed files with 20 additions and 568 deletions

View File

@ -121,30 +121,8 @@ else()
message(STATUS "Trezor: support disabled by USE_DEVICE_TREZOR")
endif()
if(Protobuf_FOUND AND USE_DEVICE_TREZOR)
if (NOT "$ENV{TREZOR_PYTHON}" STREQUAL "")
set(TREZOR_PYTHON "$ENV{TREZOR_PYTHON}" CACHE INTERNAL "Copied from environment variable TREZOR_PYTHON")
else()
find_package(Python QUIET COMPONENTS Interpreter) # cmake 3.12+
if(Python_Interpreter_FOUND)
set(TREZOR_PYTHON "${Python_EXECUTABLE}")
endif()
endif()
if(NOT TREZOR_PYTHON)
find_package(PythonInterp)
if(PYTHONINTERP_FOUND AND PYTHON_EXECUTABLE)
set(TREZOR_PYTHON "${PYTHON_EXECUTABLE}")
endif()
endif()
if(NOT TREZOR_PYTHON)
trezor_fatal_msg("Trezor: Python not found")
endif()
endif()
# Protobuf compilation test
if(Protobuf_FOUND AND USE_DEVICE_TREZOR AND TREZOR_PYTHON)
if(Protobuf_FOUND AND USE_DEVICE_TREZOR)
execute_process(COMMAND ${Protobuf_PROTOC_EXECUTABLE} -I "${CMAKE_CURRENT_LIST_DIR}" -I "${Protobuf_INCLUDE_DIR}" "${CMAKE_CURRENT_LIST_DIR}/test-protobuf.proto" --cpp_out ${CMAKE_BINARY_DIR} RESULT_VARIABLE RET OUTPUT_VARIABLE OUT ERROR_VARIABLE ERR)
if(RET)
trezor_fatal_msg("Trezor: Protobuf test generation failed: ${OUT} ${ERR}")
@ -183,22 +161,28 @@ if(Protobuf_FOUND AND USE_DEVICE_TREZOR AND TREZOR_PYTHON)
endif()
# Try to build protobuf messages
if(Protobuf_FOUND AND USE_DEVICE_TREZOR AND TREZOR_PYTHON)
set(ENV{PROTOBUF_INCLUDE_DIRS} "${Protobuf_INCLUDE_DIR}")
set(ENV{PROTOBUF_PROTOC_EXECUTABLE} "${Protobuf_PROTOC_EXECUTABLE}")
set(TREZOR_PROTOBUF_PARAMS "")
if (USE_DEVICE_TREZOR_DEBUG)
set(TREZOR_PROTOBUF_PARAMS "--debug")
endif()
if(Protobuf_FOUND AND USE_DEVICE_TREZOR)
# .proto files to compile
set(_proto_files "messages.proto"
"messages-common.proto"
"messages-management.proto"
"messages-monero.proto")
if (TREZOR_DEBUG)
list(APPEND _proto_files "messages-debug.proto")
endif ()
execute_process(COMMAND ${TREZOR_PYTHON} tools/build_protob.py ${TREZOR_PROTOBUF_PARAMS} WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/../src/device_trezor/trezor RESULT_VARIABLE RET OUTPUT_VARIABLE OUT ERROR_VARIABLE ERR)
set(_proto_include_dir "${CMAKE_SOURCE_DIR}/external/trezor-common/protob")
set(_proto_files_absolute)
foreach(file IN LISTS _proto_files)
list(APPEND _proto_files_absolute "${_proto_include_dir}/${file}")
endforeach ()
execute_process(COMMAND ${Protobuf_PROTOC_EXECUTABLE} --cpp_out "${CMAKE_SOURCE_DIR}/src/device_trezor/trezor/messages" "-I${_proto_include_dir}" ${_proto_files_absolute} RESULT_VARIABLE RET OUTPUT_VARIABLE OUT ERROR_VARIABLE ERR)
if(RET)
trezor_fatal_msg("Trezor: protobuf messages could not be regenerated (err=${RET}, python ${PYTHON})."
"OUT: ${OUT}, ERR: ${ERR}."
"Please read src/device_trezor/trezor/tools/README.md")
trezor_fatal_msg("Trezor: protobuf messages could not be (re)generated (err=${RET}). OUT: ${OUT}, ERR: ${ERR}.")
endif()
message(STATUS "Trezor: protobuf messages regenerated out: \"${OUT}.\"")
message(STATUS "Trezor: protobuf messages regenerated out.")
set(DEVICE_TREZOR_READY 1)
add_definitions(-DDEVICE_TREZOR_READY=1)
add_definitions(-DPROTOBUF_INLINE_NOT_IN_HEADERS=0)

View File

@ -1,45 +0,0 @@
# Trezor
## Messages rebuild
Install `protoc` for your distribution. Requirements:
- `protobuf-compiler`
- `libprotobuf-dev`
- `python`
Soft requirement: Python 3, can be easily installed with [pyenv].
If Python 3 is used there are no additional python dependencies.
Since Cmake 3.12 the `FindPython` module is used to locate the Python
interpreter in your system. It preferably searches for Python 3, if none
is found, it searches for Python 2.
Lower version of the cmake uses another module which does not guarantee
ordering. If you want to override the selected python you can do it in
the following way:
```bash
export TREZOR_PYTHON=`which python3`
```
### Python 2.7+
Python 3 has `tempfile.TemporaryDirectory` available but Python 2 lacks
this class so the message generation code uses `backports.tempfile` package
bundled in the repository.
The minimal Python versions are 2.7 and 3.4
### Regenerate messages
```bash
cd src/device_trezor/trezor
python tools/build_protob.py
```
The messages regeneration is done also automatically via cmake.
[pyenv]: https://github.com/pyenv/pyenv

View File

@ -1,48 +0,0 @@
#!/usr/bin/env python
import os
import subprocess
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug-msg", default=False, action="store_const", const=True, help="Build debug messages")
args = parser.parse_args()
CWD = os.path.dirname(os.path.realpath(__file__))
ROOT_DIR = os.path.abspath(os.path.join(CWD, "..", "..", "..", ".."))
TREZOR_COMMON = os.path.join(ROOT_DIR, "external", "trezor-common")
TREZOR_MESSAGES = os.path.join(CWD, "..", "messages")
# check for existence of the submodule directory
common_defs = os.path.join(TREZOR_COMMON, "defs")
if not os.path.exists(common_defs):
raise ValueError(
"trezor-common submodule seems to be missing.\n"
+ 'Use "git submodule update --init --recursive" to retrieve it.'
)
# regenerate messages
try:
selected = [
"messages.proto",
"messages-common.proto",
"messages-management.proto",
"messages-monero.proto",
]
if args.debug_msg:
selected += ["messages-debug.proto"]
proto_srcs = [os.path.join(TREZOR_COMMON, "protob", x) for x in selected]
exec_args = [
sys.executable,
os.path.join(CWD, "pb2cpp.py"),
"-o",
TREZOR_MESSAGES,
] + proto_srcs
subprocess.check_call(exec_args)
except Exception as e:
raise

View File

@ -1,219 +0,0 @@
#!/usr/bin/env python
# Converts Google's protobuf python definitions of TREZOR wire messages
# to plain-python objects as used in TREZOR Core and python-trezor
import argparse
import logging
import os
import re
import shutil
import subprocess
import glob
import hashlib
try:
from tempfile import TemporaryDirectory
except:
# Py2 backward compatibility, using bundled sources.
# Original source: pip install backports.tempfile
try:
# Try bundled python version
import sys
sys.path.append(os.path.dirname(__file__))
from py2backports.tempfile import TemporaryDirectory
except:
raise EnvironmentError('Python 2.7+ or 3.4+ is required. '
'TemporaryDirectory is not available in Python 2.'
'Try to specify python to use, e.g.: "export TREZOR_PYTHON=`which python3`"')
AUTO_HEADER = "# Automatically generated by pb2cpp\n"
# Fixing GCC7 compilation error
UNDEF_STATEMENT = """
#ifdef minor
#undef minor
#endif
"""
PROTOC = None
PROTOC_INCLUDE = None
def which(pgm):
path = os.getenv('PATH')
for p in path.split(os.path.pathsep):
p = os.path.join(p, pgm)
if os.path.exists(p) and os.access(p, os.X_OK):
return p
def namespace_file(fpath, package):
"""Adds / replaces package name. Simple regex parsing, may use https://github.com/ph4r05/plyprotobuf later"""
with open(fpath) as fh:
fdata = fh.read()
re_syntax = re.compile(r"^syntax\s*=")
re_package = re.compile(r"^package\s+([^;]+?)\s*;\s*$")
lines = fdata.split("\n")
line_syntax = None
line_package = None
for idx, line in enumerate(lines):
if line_syntax is None and re_syntax.match(line):
line_syntax = idx
if line_package is None and re_package.match(line):
line_package = idx
if package is None:
if line_package is None:
return
else:
lines.pop(line_package)
else:
new_package = "package %s;" % package
if line_package is None:
lines.insert(line_syntax + 1 if line_syntax is not None else 0, new_package)
else:
lines[line_package] = new_package
new_fdat = "\n".join(lines)
with open(fpath, "w+") as fh:
fh.write(new_fdat)
return new_fdat
def protoc(files, out_dir, additional_includes=(), package=None, force=False):
"""Compile code with protoc and return the data."""
include_dirs = set()
include_dirs.add(PROTOC_INCLUDE)
if additional_includes:
include_dirs.update(additional_includes)
with TemporaryDirectory() as tmpdir_protob, TemporaryDirectory() as tmpdir_out:
include_dirs.add(tmpdir_protob)
new_files = []
for file in files:
bname = os.path.basename(file)
tmp_file = os.path.join(tmpdir_protob, bname)
shutil.copy(file, tmp_file)
if package is not None:
namespace_file(tmp_file, package)
new_files.append(tmp_file)
protoc_includes = ["-I" + dir for dir in include_dirs if dir]
exec_args = (
[
PROTOC,
"--cpp_out",
tmpdir_out,
]
+ protoc_includes
+ new_files
)
subprocess.check_call(exec_args)
# Fixing gcc compilation and clashes with "minor" field name
add_undef(tmpdir_out)
# Scan output dir, check file differences
update_message_files(tmpdir_out, out_dir, force)
def update_message_files(tmpdir_out, out_dir, force=False):
files = glob.glob(os.path.join(tmpdir_out, '*.pb.*'))
for fname in files:
bname = os.path.basename(fname)
dest_file = os.path.join(out_dir, bname)
if not force and os.path.exists(dest_file):
data = open(fname, 'rb').read()
data_hash = hashlib.sha256(data).digest()
data_dest = open(dest_file, 'rb').read()
data_dest_hash = hashlib.sha256(data_dest).digest()
if data_hash == data_dest_hash:
continue
shutil.copy(fname, dest_file)
def add_undef(out_dir):
files = glob.glob(os.path.join(out_dir, '*.pb.*'))
for fname in files:
with open(fname) as fh:
lines = fh.readlines()
idx_insertion = None
for idx in range(len(lines)):
if '@@protoc_insertion_point(includes)' in lines[idx]:
idx_insertion = idx
break
if idx_insertion is None:
pass
lines.insert(idx_insertion + 1, UNDEF_STATEMENT)
with open(fname, 'w') as fh:
fh.write("".join(lines))
def strip_leader(s, prefix):
"""Remove given prefix from underscored name."""
leader = prefix + "_"
if s.startswith(leader):
return s[len(leader) :]
else:
return s
def main():
global PROTOC, PROTOC_INCLUDE
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
# fmt: off
parser.add_argument("proto", nargs="+", help="Protobuf definition files")
parser.add_argument("-o", "--out-dir", help="Directory for generated source code")
parser.add_argument("-n", "--namespace", default=None, help="Message namespace")
parser.add_argument("-I", "--protoc-include", action="append", help="protoc include path")
parser.add_argument("-P", "--protobuf-module", default="protobuf", help="Name of protobuf module")
parser.add_argument("-f", "--force", default=False, help="Overwrite existing files")
# fmt: on
args = parser.parse_args()
protoc_includes = args.protoc_include or (os.environ.get("PROTOC_INCLUDE"),)
PROTOBUF_INCLUDE_DIRS = os.getenv("PROTOBUF_INCLUDE_DIRS", None)
PROTOBUF_PROTOC_EXECUTABLE = os.getenv("PROTOBUF_PROTOC_EXECUTABLE", None)
if PROTOBUF_PROTOC_EXECUTABLE and not os.path.exists(PROTOBUF_PROTOC_EXECUTABLE):
raise ValueError("PROTOBUF_PROTOC_EXECUTABLE set but not found: %s" % PROTOBUF_PROTOC_EXECUTABLE)
elif PROTOBUF_PROTOC_EXECUTABLE:
PROTOC = PROTOBUF_PROTOC_EXECUTABLE
else:
if os.name == "nt":
PROTOC = which("protoc.exe")
else:
PROTOC = which("protoc")
if not PROTOC:
raise ValueError("protoc command not found. Set PROTOBUF_PROTOC_EXECUTABLE env var to the protoc binary and optionally PROTOBUF_INCLUDE_DIRS")
PROTOC_PREFIX = os.path.dirname(os.path.dirname(PROTOC))
PROTOC_INCLUDE = PROTOBUF_INCLUDE_DIRS if PROTOBUF_INCLUDE_DIRS else os.path.join(PROTOC_PREFIX, "include")
protoc(
args.proto, args.out_dir, protoc_includes, package=args.namespace, force=args.force
)
if __name__ == "__main__":
main()

View File

@ -1,72 +0,0 @@
"""
https://github.com/pjdelport/backports.tempfile/blob/master/src/backports/tempfile.py
Partial backport of Python 3.5's tempfile module:
TemporaryDirectory
Backport modifications are marked with marked with "XXX backport".
"""
from __future__ import absolute_import
import sys
import warnings as _warnings
from shutil import rmtree as _rmtree
from py2backports.weakref import finalize
# XXX backport: Rather than backporting all of mkdtemp(), we just create a
# thin wrapper implementing its Python 3.5 signature.
if sys.version_info < (3, 5):
from tempfile import mkdtemp as old_mkdtemp
def mkdtemp(suffix=None, prefix=None, dir=None):
"""
Wrap `tempfile.mkdtemp()` to make the suffix and prefix optional (like Python 3.5).
"""
kwargs = {k: v for (k, v) in
dict(suffix=suffix, prefix=prefix, dir=dir).items()
if v is not None}
return old_mkdtemp(**kwargs)
else:
from tempfile import mkdtemp
# XXX backport: ResourceWarning was added in Python 3.2.
# For earlier versions, fall back to RuntimeWarning instead.
_ResourceWarning = RuntimeWarning if sys.version_info < (3, 2) else ResourceWarning
class TemporaryDirectory(object):
"""Create and return a temporary directory. This has the same
behavior as mkdtemp but can be used as a context manager. For
example:
with TemporaryDirectory() as tmpdir:
...
Upon exiting the context, the directory and everything contained
in it are removed.
"""
def __init__(self, suffix=None, prefix=None, dir=None):
self.name = mkdtemp(suffix, prefix, dir)
self._finalizer = finalize(
self, self._cleanup, self.name,
warn_message="Implicitly cleaning up {!r}".format(self))
@classmethod
def _cleanup(cls, name, warn_message):
_rmtree(name)
_warnings.warn(warn_message, _ResourceWarning)
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self._finalizer.detach():
_rmtree(self.name)

View File

@ -1,148 +0,0 @@
"""
https://github.com/pjdelport/backports.weakref/blob/master/src/backports/weakref.py
Partial backport of Python 3.6's weakref module:
finalize (new in Python 3.4)
Backport modifications are marked with "XXX backport".
"""
from __future__ import absolute_import
import itertools
import sys
from weakref import ref
__all__ = ['finalize']
class finalize(object):
"""Class for finalization of weakrefable objects
finalize(obj, func, *args, **kwargs) returns a callable finalizer
object which will be called when obj is garbage collected. The
first time the finalizer is called it evaluates func(*arg, **kwargs)
and returns the result. After this the finalizer is dead, and
calling it just returns None.
When the program exits any remaining finalizers for which the
atexit attribute is true will be run in reverse order of creation.
By default atexit is true.
"""
# Finalizer objects don't have any state of their own. They are
# just used as keys to lookup _Info objects in the registry. This
# ensures that they cannot be part of a ref-cycle.
__slots__ = ()
_registry = {}
_shutdown = False
_index_iter = itertools.count()
_dirty = False
_registered_with_atexit = False
class _Info(object):
__slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
def __init__(self, obj, func, *args, **kwargs):
if not self._registered_with_atexit:
# We may register the exit function more than once because
# of a thread race, but that is harmless
import atexit
atexit.register(self._exitfunc)
finalize._registered_with_atexit = True
info = self._Info()
info.weakref = ref(obj, self)
info.func = func
info.args = args
info.kwargs = kwargs or None
info.atexit = True
info.index = next(self._index_iter)
self._registry[self] = info
finalize._dirty = True
def __call__(self, _=None):
"""If alive then mark as dead and return func(*args, **kwargs);
otherwise return None"""
info = self._registry.pop(self, None)
if info and not self._shutdown:
return info.func(*info.args, **(info.kwargs or {}))
def detach(self):
"""If alive then mark as dead and return (obj, func, args, kwargs);
otherwise return None"""
info = self._registry.get(self)
obj = info and info.weakref()
if obj is not None and self._registry.pop(self, None):
return (obj, info.func, info.args, info.kwargs or {})
def peek(self):
"""If alive then return (obj, func, args, kwargs);
otherwise return None"""
info = self._registry.get(self)
obj = info and info.weakref()
if obj is not None:
return (obj, info.func, info.args, info.kwargs or {})
@property
def alive(self):
"""Whether finalizer is alive"""
return self in self._registry
@property
def atexit(self):
"""Whether finalizer should be called at exit"""
info = self._registry.get(self)
return bool(info) and info.atexit
@atexit.setter
def atexit(self, value):
info = self._registry.get(self)
if info:
info.atexit = bool(value)
def __repr__(self):
info = self._registry.get(self)
obj = info and info.weakref()
if obj is None:
return '<%s object at %#x; dead>' % (type(self).__name__, id(self))
else:
return '<%s object at %#x; for %r at %#x>' % \
(type(self).__name__, id(self), type(obj).__name__, id(obj))
@classmethod
def _select_for_exit(cls):
# Return live finalizers marked for exit, oldest first
L = [(f,i) for (f,i) in cls._registry.items() if i.atexit]
L.sort(key=lambda item:item[1].index)
return [f for (f,i) in L]
@classmethod
def _exitfunc(cls):
# At shutdown invoke finalizers for which atexit is true.
# This is called once all other non-daemonic threads have been
# joined.
reenable_gc = False
try:
if cls._registry:
import gc
if gc.isenabled():
reenable_gc = True
gc.disable()
pending = None
while True:
if pending is None or finalize._dirty:
pending = cls._select_for_exit()
finalize._dirty = False
if not pending:
break
f = pending.pop()
try:
# gc is disabled, so (assuming no daemonic
# threads) the following is the only line in
# this function which might trigger creation
# of a new finalizer
f()
except Exception:
sys.excepthook(*sys.exc_info())
assert f not in cls._registry
finally:
# prevent any more finalizers from executing during shutdown
finalize._shutdown = True
if reenable_gc:
gc.enable()