frida 16.2.1 → 16.2.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/BSDmakefile +6 -0
- package/Makefile +16 -0
- package/README.md +14 -11
- package/configure +18 -0
- package/configure.bat +22 -0
- package/dist/native.js +0 -8
- package/lib/application.ts +98 -0
- package/lib/authentication.ts +3 -0
- package/lib/build.py +50 -0
- package/lib/bus.ts +30 -0
- package/lib/cancellable.ts +33 -0
- package/lib/child.ts +15 -0
- package/lib/crash.ts +11 -0
- package/lib/device.ts +329 -0
- package/lib/device_manager.ts +69 -0
- package/lib/endpoint_parameters.ts +56 -0
- package/lib/icon.ts +15 -0
- package/lib/index.ts +311 -0
- package/lib/iostream.ts +78 -0
- package/lib/meson.build +53 -0
- package/lib/native.ts +9 -0
- package/lib/portal_membership.ts +10 -0
- package/lib/portal_service.ts +105 -0
- package/lib/process.ts +57 -0
- package/lib/relay.ts +44 -0
- package/lib/script.ts +352 -0
- package/lib/session.ts +113 -0
- package/lib/signals.ts +45 -0
- package/lib/socket_address.ts +35 -0
- package/lib/spawn.ts +4 -0
- package/lib/system_parameters.ts +78 -0
- package/make.bat +23 -0
- package/meson.build +160 -0
- package/meson.options +11 -0
- package/package.json +27 -6
- package/releng/deps.py +1133 -0
- package/releng/deps.toml +391 -0
- package/releng/devkit-assets/frida-core-example-unix.c +188 -0
- package/releng/devkit-assets/frida-core-example-windows.c +197 -0
- package/releng/devkit-assets/frida-core-example.sln +28 -0
- package/releng/devkit-assets/frida-core-example.vcxproj +157 -0
- package/releng/devkit-assets/frida-core-example.vcxproj.filters +27 -0
- package/releng/devkit-assets/frida-gum-example-unix.c +122 -0
- package/releng/devkit-assets/frida-gum-example-windows.c +132 -0
- package/releng/devkit-assets/frida-gum-example.sln +28 -0
- package/releng/devkit-assets/frida-gum-example.vcxproj +157 -0
- package/releng/devkit-assets/frida-gum-example.vcxproj.filters +27 -0
- package/releng/devkit-assets/frida-gumjs-example-unix.c +84 -0
- package/releng/devkit-assets/frida-gumjs-example-windows.c +91 -0
- package/releng/devkit-assets/frida-gumjs-example.sln +28 -0
- package/releng/devkit-assets/frida-gumjs-example.vcxproj +157 -0
- package/releng/devkit-assets/frida-gumjs-example.vcxproj.filters +27 -0
- package/releng/devkit.py +535 -0
- package/releng/env.py +420 -0
- package/releng/env_android.py +150 -0
- package/releng/env_apple.py +176 -0
- package/releng/env_generic.py +373 -0
- package/releng/frida_version.py +69 -0
- package/releng/machine_file.py +44 -0
- package/releng/machine_spec.py +290 -0
- package/releng/meson/meson.py +27 -0
- package/releng/meson/mesonbuild/__init__.py +0 -0
- package/releng/meson/mesonbuild/_pathlib.py +63 -0
- package/releng/meson/mesonbuild/_typing.py +69 -0
- package/releng/meson/mesonbuild/arglist.py +321 -0
- package/releng/meson/mesonbuild/ast/__init__.py +23 -0
- package/releng/meson/mesonbuild/ast/interpreter.py +441 -0
- package/releng/meson/mesonbuild/ast/introspection.py +374 -0
- package/releng/meson/mesonbuild/ast/postprocess.py +109 -0
- package/releng/meson/mesonbuild/ast/printer.py +620 -0
- package/releng/meson/mesonbuild/ast/visitor.py +161 -0
- package/releng/meson/mesonbuild/backend/__init__.py +0 -0
- package/releng/meson/mesonbuild/backend/backends.py +2047 -0
- package/releng/meson/mesonbuild/backend/ninjabackend.py +3808 -0
- package/releng/meson/mesonbuild/backend/nonebackend.py +26 -0
- package/releng/meson/mesonbuild/backend/vs2010backend.py +2078 -0
- package/releng/meson/mesonbuild/backend/vs2012backend.py +35 -0
- package/releng/meson/mesonbuild/backend/vs2013backend.py +34 -0
- package/releng/meson/mesonbuild/backend/vs2015backend.py +35 -0
- package/releng/meson/mesonbuild/backend/vs2017backend.py +59 -0
- package/releng/meson/mesonbuild/backend/vs2019backend.py +54 -0
- package/releng/meson/mesonbuild/backend/vs2022backend.py +54 -0
- package/releng/meson/mesonbuild/backend/xcodebackend.py +1781 -0
- package/releng/meson/mesonbuild/build.py +3249 -0
- package/releng/meson/mesonbuild/cargo/__init__.py +5 -0
- package/releng/meson/mesonbuild/cargo/builder.py +238 -0
- package/releng/meson/mesonbuild/cargo/cfg.py +274 -0
- package/releng/meson/mesonbuild/cargo/interpreter.py +733 -0
- package/releng/meson/mesonbuild/cargo/manifest.py +227 -0
- package/releng/meson/mesonbuild/cargo/version.py +95 -0
- package/releng/meson/mesonbuild/cmake/__init__.py +28 -0
- package/releng/meson/mesonbuild/cmake/common.py +331 -0
- package/releng/meson/mesonbuild/cmake/data/__init__.py +0 -0
- package/releng/meson/mesonbuild/cmake/data/preload.cmake +82 -0
- package/releng/meson/mesonbuild/cmake/executor.py +241 -0
- package/releng/meson/mesonbuild/cmake/fileapi.py +324 -0
- package/releng/meson/mesonbuild/cmake/generator.py +186 -0
- package/releng/meson/mesonbuild/cmake/interpreter.py +1267 -0
- package/releng/meson/mesonbuild/cmake/toolchain.py +248 -0
- package/releng/meson/mesonbuild/cmake/traceparser.py +814 -0
- package/releng/meson/mesonbuild/cmake/tracetargets.py +161 -0
- package/releng/meson/mesonbuild/compilers/__init__.py +86 -0
- package/releng/meson/mesonbuild/compilers/asm.py +307 -0
- package/releng/meson/mesonbuild/compilers/c.py +788 -0
- package/releng/meson/mesonbuild/compilers/c_function_attributes.py +143 -0
- package/releng/meson/mesonbuild/compilers/compilers.py +1388 -0
- package/releng/meson/mesonbuild/compilers/cpp.py +1035 -0
- package/releng/meson/mesonbuild/compilers/cs.py +136 -0
- package/releng/meson/mesonbuild/compilers/cuda.py +806 -0
- package/releng/meson/mesonbuild/compilers/cython.py +91 -0
- package/releng/meson/mesonbuild/compilers/d.py +861 -0
- package/releng/meson/mesonbuild/compilers/detect.py +1396 -0
- package/releng/meson/mesonbuild/compilers/fortran.py +523 -0
- package/releng/meson/mesonbuild/compilers/java.py +113 -0
- package/releng/meson/mesonbuild/compilers/mixins/__init__.py +0 -0
- package/releng/meson/mesonbuild/compilers/mixins/arm.py +167 -0
- package/releng/meson/mesonbuild/compilers/mixins/ccrx.py +113 -0
- package/releng/meson/mesonbuild/compilers/mixins/clang.py +170 -0
- package/releng/meson/mesonbuild/compilers/mixins/clike.py +1330 -0
- package/releng/meson/mesonbuild/compilers/mixins/compcert.py +117 -0
- package/releng/meson/mesonbuild/compilers/mixins/elbrus.py +93 -0
- package/releng/meson/mesonbuild/compilers/mixins/emscripten.py +89 -0
- package/releng/meson/mesonbuild/compilers/mixins/gnu.py +629 -0
- package/releng/meson/mesonbuild/compilers/mixins/intel.py +167 -0
- package/releng/meson/mesonbuild/compilers/mixins/islinker.py +120 -0
- package/releng/meson/mesonbuild/compilers/mixins/metrowerks.py +279 -0
- package/releng/meson/mesonbuild/compilers/mixins/pgi.py +88 -0
- package/releng/meson/mesonbuild/compilers/mixins/ti.py +130 -0
- package/releng/meson/mesonbuild/compilers/mixins/visualstudio.py +458 -0
- package/releng/meson/mesonbuild/compilers/mixins/xc16.py +111 -0
- package/releng/meson/mesonbuild/compilers/objc.py +120 -0
- package/releng/meson/mesonbuild/compilers/objcpp.py +102 -0
- package/releng/meson/mesonbuild/compilers/rust.py +230 -0
- package/releng/meson/mesonbuild/compilers/swift.py +131 -0
- package/releng/meson/mesonbuild/compilers/vala.py +121 -0
- package/releng/meson/mesonbuild/coredata.py +1532 -0
- package/releng/meson/mesonbuild/dependencies/__init__.py +252 -0
- package/releng/meson/mesonbuild/dependencies/base.py +663 -0
- package/releng/meson/mesonbuild/dependencies/boost.py +1083 -0
- package/releng/meson/mesonbuild/dependencies/cmake.py +656 -0
- package/releng/meson/mesonbuild/dependencies/coarrays.py +80 -0
- package/releng/meson/mesonbuild/dependencies/configtool.py +163 -0
- package/releng/meson/mesonbuild/dependencies/cuda.py +295 -0
- package/releng/meson/mesonbuild/dependencies/data/CMakeLists.txt +102 -0
- package/releng/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt +204 -0
- package/releng/meson/mesonbuild/dependencies/data/CMakePathInfo.txt +31 -0
- package/releng/meson/mesonbuild/dependencies/data/__init__.py +0 -0
- package/releng/meson/mesonbuild/dependencies/detect.py +225 -0
- package/releng/meson/mesonbuild/dependencies/dev.py +707 -0
- package/releng/meson/mesonbuild/dependencies/dub.py +424 -0
- package/releng/meson/mesonbuild/dependencies/factory.py +146 -0
- package/releng/meson/mesonbuild/dependencies/framework.py +111 -0
- package/releng/meson/mesonbuild/dependencies/hdf5.py +168 -0
- package/releng/meson/mesonbuild/dependencies/misc.py +618 -0
- package/releng/meson/mesonbuild/dependencies/mpi.py +231 -0
- package/releng/meson/mesonbuild/dependencies/pkgconfig.py +570 -0
- package/releng/meson/mesonbuild/dependencies/platform.py +52 -0
- package/releng/meson/mesonbuild/dependencies/python.py +431 -0
- package/releng/meson/mesonbuild/dependencies/qt.py +484 -0
- package/releng/meson/mesonbuild/dependencies/scalapack.py +142 -0
- package/releng/meson/mesonbuild/dependencies/ui.py +281 -0
- package/releng/meson/mesonbuild/depfile.py +82 -0
- package/releng/meson/mesonbuild/envconfig.py +480 -0
- package/releng/meson/mesonbuild/environment.py +987 -0
- package/releng/meson/mesonbuild/interpreter/__init__.py +47 -0
- package/releng/meson/mesonbuild/interpreter/compiler.py +900 -0
- package/releng/meson/mesonbuild/interpreter/dependencyfallbacks.py +386 -0
- package/releng/meson/mesonbuild/interpreter/interpreter.py +3595 -0
- package/releng/meson/mesonbuild/interpreter/interpreterobjects.py +1096 -0
- package/releng/meson/mesonbuild/interpreter/kwargs.py +479 -0
- package/releng/meson/mesonbuild/interpreter/mesonmain.py +487 -0
- package/releng/meson/mesonbuild/interpreter/primitives/__init__.py +29 -0
- package/releng/meson/mesonbuild/interpreter/primitives/array.py +108 -0
- package/releng/meson/mesonbuild/interpreter/primitives/boolean.py +52 -0
- package/releng/meson/mesonbuild/interpreter/primitives/dict.py +88 -0
- package/releng/meson/mesonbuild/interpreter/primitives/integer.py +86 -0
- package/releng/meson/mesonbuild/interpreter/primitives/range.py +38 -0
- package/releng/meson/mesonbuild/interpreter/primitives/string.py +247 -0
- package/releng/meson/mesonbuild/interpreter/type_checking.py +853 -0
- package/releng/meson/mesonbuild/interpreterbase/__init__.py +126 -0
- package/releng/meson/mesonbuild/interpreterbase/_unholder.py +25 -0
- package/releng/meson/mesonbuild/interpreterbase/baseobjects.py +174 -0
- package/releng/meson/mesonbuild/interpreterbase/decorators.py +806 -0
- package/releng/meson/mesonbuild/interpreterbase/disabler.py +35 -0
- package/releng/meson/mesonbuild/interpreterbase/exceptions.py +22 -0
- package/releng/meson/mesonbuild/interpreterbase/helpers.py +67 -0
- package/releng/meson/mesonbuild/interpreterbase/interpreterbase.py +665 -0
- package/releng/meson/mesonbuild/interpreterbase/operator.py +32 -0
- package/releng/meson/mesonbuild/linkers/__init__.py +20 -0
- package/releng/meson/mesonbuild/linkers/base.py +39 -0
- package/releng/meson/mesonbuild/linkers/detect.py +229 -0
- package/releng/meson/mesonbuild/linkers/linkers.py +1614 -0
- package/releng/meson/mesonbuild/mcompile.py +380 -0
- package/releng/meson/mesonbuild/mconf.py +368 -0
- package/releng/meson/mesonbuild/mdevenv.py +234 -0
- package/releng/meson/mesonbuild/mdist.py +376 -0
- package/releng/meson/mesonbuild/mesondata.py +38 -0
- package/releng/meson/mesonbuild/mesonlib.py +23 -0
- package/releng/meson/mesonbuild/mesonmain.py +289 -0
- package/releng/meson/mesonbuild/minit.py +204 -0
- package/releng/meson/mesonbuild/minstall.py +864 -0
- package/releng/meson/mesonbuild/mintro.py +667 -0
- package/releng/meson/mesonbuild/mlog.py +542 -0
- package/releng/meson/mesonbuild/modules/__init__.py +270 -0
- package/releng/meson/mesonbuild/modules/cmake.py +442 -0
- package/releng/meson/mesonbuild/modules/cuda.py +377 -0
- package/releng/meson/mesonbuild/modules/dlang.py +117 -0
- package/releng/meson/mesonbuild/modules/external_project.py +306 -0
- package/releng/meson/mesonbuild/modules/fs.py +323 -0
- package/releng/meson/mesonbuild/modules/gnome.py +2215 -0
- package/releng/meson/mesonbuild/modules/hotdoc.py +487 -0
- package/releng/meson/mesonbuild/modules/i18n.py +405 -0
- package/releng/meson/mesonbuild/modules/icestorm.py +123 -0
- package/releng/meson/mesonbuild/modules/java.py +112 -0
- package/releng/meson/mesonbuild/modules/keyval.py +65 -0
- package/releng/meson/mesonbuild/modules/modtest.py +33 -0
- package/releng/meson/mesonbuild/modules/pkgconfig.py +744 -0
- package/releng/meson/mesonbuild/modules/python.py +556 -0
- package/releng/meson/mesonbuild/modules/python3.py +85 -0
- package/releng/meson/mesonbuild/modules/qt.py +621 -0
- package/releng/meson/mesonbuild/modules/qt4.py +23 -0
- package/releng/meson/mesonbuild/modules/qt5.py +23 -0
- package/releng/meson/mesonbuild/modules/qt6.py +22 -0
- package/releng/meson/mesonbuild/modules/rust.py +355 -0
- package/releng/meson/mesonbuild/modules/simd.py +114 -0
- package/releng/meson/mesonbuild/modules/sourceset.py +291 -0
- package/releng/meson/mesonbuild/modules/wayland.py +151 -0
- package/releng/meson/mesonbuild/modules/windows.py +207 -0
- package/releng/meson/mesonbuild/mparser.py +1114 -0
- package/releng/meson/mesonbuild/msetup.py +365 -0
- package/releng/meson/mesonbuild/msubprojects.py +764 -0
- package/releng/meson/mesonbuild/mtest.py +2201 -0
- package/releng/meson/mesonbuild/munstable_coredata.py +107 -0
- package/releng/meson/mesonbuild/optinterpreter.py +276 -0
- package/releng/meson/mesonbuild/programs.py +367 -0
- package/releng/meson/mesonbuild/rewriter.py +1075 -0
- package/releng/meson/mesonbuild/scripts/__init__.py +10 -0
- package/releng/meson/mesonbuild/scripts/clangformat.py +55 -0
- package/releng/meson/mesonbuild/scripts/clangtidy.py +30 -0
- package/releng/meson/mesonbuild/scripts/cleantrees.py +35 -0
- package/releng/meson/mesonbuild/scripts/cmake_run_ctgt.py +103 -0
- package/releng/meson/mesonbuild/scripts/cmd_or_ps.ps1 +17 -0
- package/releng/meson/mesonbuild/scripts/copy.py +19 -0
- package/releng/meson/mesonbuild/scripts/coverage.py +214 -0
- package/releng/meson/mesonbuild/scripts/delwithsuffix.py +27 -0
- package/releng/meson/mesonbuild/scripts/depfixer.py +495 -0
- package/releng/meson/mesonbuild/scripts/depscan.py +198 -0
- package/releng/meson/mesonbuild/scripts/dirchanger.py +20 -0
- package/releng/meson/mesonbuild/scripts/env2mfile.py +402 -0
- package/releng/meson/mesonbuild/scripts/externalproject.py +106 -0
- package/releng/meson/mesonbuild/scripts/gettext.py +86 -0
- package/releng/meson/mesonbuild/scripts/gtkdochelper.py +286 -0
- package/releng/meson/mesonbuild/scripts/hotdochelper.py +40 -0
- package/releng/meson/mesonbuild/scripts/itstool.py +77 -0
- package/releng/meson/mesonbuild/scripts/meson_exe.py +115 -0
- package/releng/meson/mesonbuild/scripts/msgfmthelper.py +29 -0
- package/releng/meson/mesonbuild/scripts/pycompile.py +54 -0
- package/releng/meson/mesonbuild/scripts/python_info.py +121 -0
- package/releng/meson/mesonbuild/scripts/regen_checker.py +55 -0
- package/releng/meson/mesonbuild/scripts/run_tool.py +58 -0
- package/releng/meson/mesonbuild/scripts/scanbuild.py +57 -0
- package/releng/meson/mesonbuild/scripts/symbolextractor.py +322 -0
- package/releng/meson/mesonbuild/scripts/tags.py +44 -0
- package/releng/meson/mesonbuild/scripts/test_loaded_modules.py +14 -0
- package/releng/meson/mesonbuild/scripts/uninstall.py +41 -0
- package/releng/meson/mesonbuild/scripts/vcstagger.py +35 -0
- package/releng/meson/mesonbuild/scripts/yasm.py +24 -0
- package/releng/meson/mesonbuild/templates/__init__.py +0 -0
- package/releng/meson/mesonbuild/templates/cpptemplates.py +143 -0
- package/releng/meson/mesonbuild/templates/cstemplates.py +90 -0
- package/releng/meson/mesonbuild/templates/ctemplates.py +126 -0
- package/releng/meson/mesonbuild/templates/cudatemplates.py +143 -0
- package/releng/meson/mesonbuild/templates/dlangtemplates.py +109 -0
- package/releng/meson/mesonbuild/templates/fortrantemplates.py +101 -0
- package/releng/meson/mesonbuild/templates/javatemplates.py +94 -0
- package/releng/meson/mesonbuild/templates/mesontemplates.py +70 -0
- package/releng/meson/mesonbuild/templates/objcpptemplates.py +126 -0
- package/releng/meson/mesonbuild/templates/objctemplates.py +126 -0
- package/releng/meson/mesonbuild/templates/rusttemplates.py +79 -0
- package/releng/meson/mesonbuild/templates/samplefactory.py +41 -0
- package/releng/meson/mesonbuild/templates/sampleimpl.py +160 -0
- package/releng/meson/mesonbuild/templates/valatemplates.py +82 -0
- package/releng/meson/mesonbuild/utils/__init__.py +0 -0
- package/releng/meson/mesonbuild/utils/core.py +166 -0
- package/releng/meson/mesonbuild/utils/platform.py +27 -0
- package/releng/meson/mesonbuild/utils/posix.py +32 -0
- package/releng/meson/mesonbuild/utils/universal.py +2445 -0
- package/releng/meson/mesonbuild/utils/vsenv.py +126 -0
- package/releng/meson/mesonbuild/utils/win32.py +29 -0
- package/releng/meson/mesonbuild/wrap/__init__.py +59 -0
- package/releng/meson/mesonbuild/wrap/wrap.py +846 -0
- package/releng/meson/mesonbuild/wrap/wraptool.py +198 -0
- package/releng/meson-scripts/BSDmakefile +6 -0
- package/releng/meson-scripts/Makefile +16 -0
- package/releng/meson-scripts/configure +18 -0
- package/releng/meson-scripts/configure.bat +22 -0
- package/releng/meson-scripts/make.bat +23 -0
- package/releng/meson_configure.py +506 -0
- package/releng/meson_make.py +131 -0
- package/releng/mkdevkit.py +107 -0
- package/releng/mkfatmacho.py +54 -0
- package/releng/post-process-oabi.py +97 -0
- package/releng/progress.py +14 -0
- package/releng/sync-from-upstream.py +185 -0
- package/releng/tomlkit/tomlkit/__init__.py +59 -0
- package/releng/tomlkit/tomlkit/_compat.py +22 -0
- package/releng/tomlkit/tomlkit/_types.py +83 -0
- package/releng/tomlkit/tomlkit/_utils.py +158 -0
- package/releng/tomlkit/tomlkit/api.py +308 -0
- package/releng/tomlkit/tomlkit/container.py +875 -0
- package/releng/tomlkit/tomlkit/exceptions.py +227 -0
- package/releng/tomlkit/tomlkit/items.py +1967 -0
- package/releng/tomlkit/tomlkit/parser.py +1141 -0
- package/releng/tomlkit/tomlkit/py.typed +0 -0
- package/releng/tomlkit/tomlkit/source.py +180 -0
- package/releng/tomlkit/tomlkit/toml_char.py +52 -0
- package/releng/tomlkit/tomlkit/toml_document.py +7 -0
- package/releng/tomlkit/tomlkit/toml_file.py +58 -0
- package/releng/winenv.py +140 -0
- package/scripts/adjust-version.py +19 -0
- package/scripts/detect-version.py +40 -0
- package/scripts/fetch-abi-bits.py +343 -0
- package/scripts/install.js +23 -0
- package/scripts/package.py +15 -0
- package/src/addon.cc +76 -0
- package/src/application.cc +148 -0
- package/src/application.h +31 -0
- package/src/authentication.cc +174 -0
- package/src/authentication.h +24 -0
- package/src/bus.cc +167 -0
- package/src/bus.h +33 -0
- package/src/cancellable.cc +117 -0
- package/src/cancellable.h +31 -0
- package/src/child.cc +150 -0
- package/src/child.h +32 -0
- package/src/crash.cc +122 -0
- package/src/crash.h +30 -0
- package/src/device.cc +1302 -0
- package/src/device.h +55 -0
- package/src/device_manager.cc +362 -0
- package/src/device_manager.h +35 -0
- package/src/endpoint_parameters.cc +171 -0
- package/src/endpoint_parameters.h +28 -0
- package/src/glib_context.cc +62 -0
- package/src/glib_context.h +29 -0
- package/src/glib_object.cc +25 -0
- package/src/glib_object.h +37 -0
- package/src/iostream.cc +247 -0
- package/src/iostream.h +30 -0
- package/src/meson.build +26 -0
- package/src/operation.h +94 -0
- package/src/portal_membership.cc +100 -0
- package/src/portal_membership.h +26 -0
- package/src/portal_service.cc +401 -0
- package/src/portal_service.h +40 -0
- package/src/process.cc +135 -0
- package/src/process.h +30 -0
- package/src/relay.cc +139 -0
- package/src/relay.h +31 -0
- package/src/runtime.cc +443 -0
- package/src/runtime.h +64 -0
- package/src/script.cc +301 -0
- package/src/script.h +36 -0
- package/src/session.cc +860 -0
- package/src/session.h +42 -0
- package/src/signals.cc +334 -0
- package/src/signals.h +47 -0
- package/src/spawn.cc +95 -0
- package/src/spawn.h +27 -0
- package/src/usage_monitor.h +117 -0
- package/src/uv_context.cc +118 -0
- package/src/uv_context.h +40 -0
- package/src/win_delay_load_hook.cc +63 -0
- package/subprojects/frida-core.wrap +8 -0
- package/subprojects/nan.wrap +9 -0
- package/subprojects/packagefiles/nan.patch +13 -0
- package/test/data/index.ts +13 -0
- package/test/data/unixvictim-linux-x86 +0 -0
- package/test/data/unixvictim-linux-x86_64 +0 -0
- package/test/data/unixvictim-macos +0 -0
- package/test/device.ts +27 -0
- package/test/device_manager.ts +16 -0
- package/test/labrat.ts +32 -0
- package/test/script.ts +176 -0
- package/test/session.ts +73 -0
- package/tsconfig.json +18 -0
package/releng/deps.py
ADDED
|
@@ -0,0 +1,1133 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
import argparse
|
|
4
|
+
import base64
|
|
5
|
+
from configparser import ConfigParser
|
|
6
|
+
import dataclasses
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from enum import Enum
|
|
9
|
+
import graphlib
|
|
10
|
+
import itertools
|
|
11
|
+
import json
|
|
12
|
+
import os
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
import re
|
|
15
|
+
import shlex
|
|
16
|
+
import shutil
|
|
17
|
+
import subprocess
|
|
18
|
+
import sys
|
|
19
|
+
import tarfile
|
|
20
|
+
import tempfile
|
|
21
|
+
import time
|
|
22
|
+
from typing import Callable, Iterator, Optional, Mapping, Sequence, Union
|
|
23
|
+
import urllib.request
|
|
24
|
+
|
|
25
|
+
RELENG_DIR = Path(__file__).resolve().parent
|
|
26
|
+
ROOT_DIR = RELENG_DIR.parent
|
|
27
|
+
|
|
28
|
+
if __name__ == "__main__":
|
|
29
|
+
# TODO: Refactor
|
|
30
|
+
sys.path.insert(0, str(ROOT_DIR))
|
|
31
|
+
sys.path.insert(0, str(RELENG_DIR / "tomlkit"))
|
|
32
|
+
|
|
33
|
+
from tomlkit.toml_file import TOMLFile
|
|
34
|
+
|
|
35
|
+
from releng import env
|
|
36
|
+
from releng.progress import Progress, ProgressCallback, print_progress
|
|
37
|
+
from releng.machine_spec import MachineSpec
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def main():
|
|
41
|
+
parser = argparse.ArgumentParser()
|
|
42
|
+
subparsers = parser.add_subparsers()
|
|
43
|
+
|
|
44
|
+
default_machine = MachineSpec.make_from_local_system().identifier
|
|
45
|
+
|
|
46
|
+
bundle_opt_kwargs = {
|
|
47
|
+
"help": "bundle (default: sdk)",
|
|
48
|
+
"type": parse_bundle_option_value,
|
|
49
|
+
}
|
|
50
|
+
machine_opt_kwargs = {
|
|
51
|
+
"help": f"os/arch (default: {default_machine})",
|
|
52
|
+
"type": MachineSpec.parse,
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
command = subparsers.add_parser("sync", help="ensure prebuilt dependencies are up-to-date")
|
|
56
|
+
command.add_argument("bundle", **bundle_opt_kwargs)
|
|
57
|
+
command.add_argument("host", **machine_opt_kwargs)
|
|
58
|
+
command.add_argument("location", help="filesystem location", type=Path)
|
|
59
|
+
command.set_defaults(func=lambda args: sync(args.bundle, args.host, args.location.resolve()))
|
|
60
|
+
|
|
61
|
+
command = subparsers.add_parser("roll", help="build and upload prebuilt dependencies if needed")
|
|
62
|
+
command.add_argument("bundle", **bundle_opt_kwargs)
|
|
63
|
+
command.add_argument("host", **machine_opt_kwargs)
|
|
64
|
+
command.add_argument("--build", default=default_machine, **machine_opt_kwargs)
|
|
65
|
+
command.add_argument("--activate", default=False, action='store_true')
|
|
66
|
+
command.add_argument("--post", help="post-processing script")
|
|
67
|
+
command.set_defaults(func=lambda args: roll(args.bundle, args.build, args.host, args.activate,
|
|
68
|
+
Path(args.post) if args.post is not None else None))
|
|
69
|
+
|
|
70
|
+
command = subparsers.add_parser("build", help="build prebuilt dependencies")
|
|
71
|
+
command.add_argument("--bundle", default=Bundle.SDK, **bundle_opt_kwargs)
|
|
72
|
+
command.add_argument("--build", default=default_machine, **machine_opt_kwargs)
|
|
73
|
+
command.add_argument("--host", default=default_machine, **machine_opt_kwargs)
|
|
74
|
+
command.add_argument("--only", help="only build packages A, B, and C", metavar="A,B,C",
|
|
75
|
+
type=parse_set_option_value)
|
|
76
|
+
command.add_argument("--exclude", help="exclude packages A, B, and C", metavar="A,B,C",
|
|
77
|
+
type=parse_set_option_value, default=set())
|
|
78
|
+
command.add_argument("-v", "--verbose", help="be verbose", action="store_true")
|
|
79
|
+
command.set_defaults(func=lambda args: build(args.bundle, args.build, args.host,
|
|
80
|
+
args.only, args.exclude, args.verbose))
|
|
81
|
+
|
|
82
|
+
command = subparsers.add_parser("wait", help="wait for prebuilt dependencies if needed")
|
|
83
|
+
command.add_argument("bundle", **bundle_opt_kwargs)
|
|
84
|
+
command.add_argument("host", **machine_opt_kwargs)
|
|
85
|
+
command.set_defaults(func=lambda args: wait(args.bundle, args.host))
|
|
86
|
+
|
|
87
|
+
command = subparsers.add_parser("bump", help="bump dependency versions")
|
|
88
|
+
command.set_defaults(func=lambda args: bump())
|
|
89
|
+
|
|
90
|
+
args = parser.parse_args()
|
|
91
|
+
if 'func' in args:
|
|
92
|
+
try:
|
|
93
|
+
args.func(args)
|
|
94
|
+
except CommandError as e:
|
|
95
|
+
print(e, file=sys.stderr)
|
|
96
|
+
sys.exit(1)
|
|
97
|
+
else:
|
|
98
|
+
parser.print_usage(file=sys.stderr)
|
|
99
|
+
sys.exit(1)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def parse_bundle_option_value(raw_bundle: str) -> Bundle:
|
|
103
|
+
try:
|
|
104
|
+
return Bundle[raw_bundle.upper()]
|
|
105
|
+
except KeyError:
|
|
106
|
+
choices = "', '".join([e.name.lower() for e in Bundle])
|
|
107
|
+
raise argparse.ArgumentTypeError(f"invalid choice: {raw_bundle} (choose from '{choices}')")
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def parse_set_option_value(v: str) -> set[str]:
|
|
111
|
+
return set([v.strip() for v in v.split(",")])
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def query_toolchain_prefix(machine: MachineSpec,
|
|
115
|
+
cache_dir: Path) -> Path:
|
|
116
|
+
identifier = "windows-x86" if machine.os == "windows" and machine.arch in {"x86", "x86_64"} \
|
|
117
|
+
else machine.identifier
|
|
118
|
+
return cache_dir / f"toolchain-{identifier}"
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def ensure_toolchain(machine: MachineSpec,
|
|
122
|
+
cache_dir: Path,
|
|
123
|
+
version: Optional[str] = None,
|
|
124
|
+
on_progress: ProgressCallback = print_progress) -> tuple[Path, SourceState]:
|
|
125
|
+
toolchain_prefix = query_toolchain_prefix(machine, cache_dir)
|
|
126
|
+
state = sync(Bundle.TOOLCHAIN, machine, toolchain_prefix, version, on_progress)
|
|
127
|
+
return (toolchain_prefix, state)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def query_sdk_prefix(machine: MachineSpec,
|
|
131
|
+
cache_dir: Path) -> Path:
|
|
132
|
+
return cache_dir / f"sdk-{machine.identifier}"
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def ensure_sdk(machine: MachineSpec,
|
|
136
|
+
cache_dir: Path,
|
|
137
|
+
version: Optional[str] = None,
|
|
138
|
+
on_progress: ProgressCallback = print_progress) -> tuple[Path, SourceState]:
|
|
139
|
+
sdk_prefix = query_sdk_prefix(machine, cache_dir)
|
|
140
|
+
state = sync(Bundle.SDK, machine, sdk_prefix, version, on_progress)
|
|
141
|
+
return (sdk_prefix, state)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def detect_cache_dir(sourcedir: Path) -> Path:
|
|
145
|
+
raw_location = os.environ.get("FRIDA_DEPS", None)
|
|
146
|
+
if raw_location is not None:
|
|
147
|
+
location = Path(raw_location)
|
|
148
|
+
else:
|
|
149
|
+
location = sourcedir / "deps"
|
|
150
|
+
return location
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def sync(bundle: Bundle,
|
|
154
|
+
machine: MachineSpec,
|
|
155
|
+
location: Path,
|
|
156
|
+
version: Optional[str] = None,
|
|
157
|
+
on_progress: ProgressCallback = print_progress) -> SourceState:
|
|
158
|
+
state = SourceState.PRISTINE
|
|
159
|
+
|
|
160
|
+
if version is None:
|
|
161
|
+
version = load_dependency_parameters().deps_version
|
|
162
|
+
|
|
163
|
+
bundle_nick = bundle.name.lower() if bundle != Bundle.SDK else bundle.name
|
|
164
|
+
|
|
165
|
+
if location.exists():
|
|
166
|
+
try:
|
|
167
|
+
cached_version = (location / "VERSION.txt").read_text(encoding="utf-8").strip()
|
|
168
|
+
if cached_version == version:
|
|
169
|
+
return state
|
|
170
|
+
except:
|
|
171
|
+
pass
|
|
172
|
+
shutil.rmtree(location)
|
|
173
|
+
state = SourceState.MODIFIED
|
|
174
|
+
|
|
175
|
+
(url, filename) = compute_bundle_parameters(bundle, machine, version)
|
|
176
|
+
|
|
177
|
+
local_bundle = location.parent / filename
|
|
178
|
+
if local_bundle.exists():
|
|
179
|
+
on_progress(Progress("Deploying local {}".format(bundle_nick)))
|
|
180
|
+
archive_path = local_bundle
|
|
181
|
+
archive_is_temporary = False
|
|
182
|
+
else:
|
|
183
|
+
if bundle == Bundle.SDK:
|
|
184
|
+
on_progress(Progress(f"Downloading SDK {version} for {machine.identifier}"))
|
|
185
|
+
else:
|
|
186
|
+
on_progress(Progress(f"Downloading {bundle_nick} {version}"))
|
|
187
|
+
try:
|
|
188
|
+
with urllib.request.urlopen(url) as response, \
|
|
189
|
+
tempfile.NamedTemporaryFile(delete=False) as archive:
|
|
190
|
+
shutil.copyfileobj(response, archive)
|
|
191
|
+
archive_path = Path(archive.name)
|
|
192
|
+
archive_is_temporary = True
|
|
193
|
+
on_progress(Progress(f"Extracting {bundle_nick}"))
|
|
194
|
+
except urllib.error.HTTPError as e:
|
|
195
|
+
if e.code == 404:
|
|
196
|
+
raise BundleNotFoundError(f"missing bundle at {url}") from e
|
|
197
|
+
raise e
|
|
198
|
+
|
|
199
|
+
try:
|
|
200
|
+
staging_dir = location.parent / f"_{location.name}"
|
|
201
|
+
if staging_dir.exists():
|
|
202
|
+
shutil.rmtree(staging_dir)
|
|
203
|
+
staging_dir.mkdir(parents=True)
|
|
204
|
+
|
|
205
|
+
with tarfile.open(archive_path, "r:xz") as tar:
|
|
206
|
+
tar.extractall(staging_dir)
|
|
207
|
+
|
|
208
|
+
suffix_len = len(".frida.in")
|
|
209
|
+
raw_location = location.as_posix()
|
|
210
|
+
for f in staging_dir.rglob("*.frida.in"):
|
|
211
|
+
target = f.parent / f.name[:-suffix_len]
|
|
212
|
+
f.write_text(f.read_text(encoding="utf-8").replace("@FRIDA_TOOLROOT@", raw_location),
|
|
213
|
+
encoding="utf-8")
|
|
214
|
+
f.rename(target)
|
|
215
|
+
|
|
216
|
+
staging_dir.rename(location)
|
|
217
|
+
finally:
|
|
218
|
+
if archive_is_temporary:
|
|
219
|
+
archive_path.unlink()
|
|
220
|
+
|
|
221
|
+
return state
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def roll(bundle: Bundle,
|
|
225
|
+
build_machine: MachineSpec,
|
|
226
|
+
host_machine: MachineSpec,
|
|
227
|
+
activate: bool,
|
|
228
|
+
post: Optional[Path]):
|
|
229
|
+
params = load_dependency_parameters()
|
|
230
|
+
version = params.deps_version
|
|
231
|
+
|
|
232
|
+
if activate and bundle == Bundle.SDK:
|
|
233
|
+
configure_bootstrap_version(version)
|
|
234
|
+
|
|
235
|
+
(public_url, filename) = compute_bundle_parameters(bundle, host_machine, version)
|
|
236
|
+
|
|
237
|
+
# First do a quick check to avoid hitting S3 in most cases.
|
|
238
|
+
request = urllib.request.Request(public_url)
|
|
239
|
+
request.get_method = lambda: "HEAD"
|
|
240
|
+
try:
|
|
241
|
+
with urllib.request.urlopen(request) as r:
|
|
242
|
+
return
|
|
243
|
+
except urllib.request.HTTPError as e:
|
|
244
|
+
if e.code != 404:
|
|
245
|
+
raise CommandError("network error") from e
|
|
246
|
+
|
|
247
|
+
s3_url = "s3://build.frida.re/deps/{version}/{filename}".format(version=version, filename=filename)
|
|
248
|
+
|
|
249
|
+
# We will most likely need to build, but let's check S3 to be certain.
|
|
250
|
+
r = subprocess.run(["aws", "s3", "ls", s3_url], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding="utf-8")
|
|
251
|
+
if r.returncode == 0:
|
|
252
|
+
return
|
|
253
|
+
if r.returncode != 1:
|
|
254
|
+
raise CommandError(f"unable to access S3: {r.stdout.strip()}")
|
|
255
|
+
|
|
256
|
+
artifact = build(bundle, build_machine, host_machine)
|
|
257
|
+
|
|
258
|
+
if post is not None:
|
|
259
|
+
post_script = RELENG_DIR / post
|
|
260
|
+
if not post_script.exists():
|
|
261
|
+
raise CommandError("post-processing script not found")
|
|
262
|
+
|
|
263
|
+
subprocess.run([
|
|
264
|
+
sys.executable, post_script,
|
|
265
|
+
"--bundle=" + bundle.name.lower(),
|
|
266
|
+
"--host=" + host_machine.identifier,
|
|
267
|
+
"--artifact=" + str(artifact),
|
|
268
|
+
"--version=" + version,
|
|
269
|
+
],
|
|
270
|
+
check=True)
|
|
271
|
+
|
|
272
|
+
subprocess.run(["aws", "s3", "cp", artifact, s3_url], check=True)
|
|
273
|
+
|
|
274
|
+
# Use the shell for Windows compatibility, where npm generates a .bat script.
|
|
275
|
+
subprocess.run("cfcli purge " + public_url, shell=True, check=True)
|
|
276
|
+
|
|
277
|
+
if activate and bundle == Bundle.TOOLCHAIN:
|
|
278
|
+
configure_bootstrap_version(version)
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def build(bundle: Bundle,
|
|
282
|
+
build_machine: MachineSpec,
|
|
283
|
+
host_machine: MachineSpec,
|
|
284
|
+
only_packages: Optional[set[str]] = None,
|
|
285
|
+
excluded_packages: set[str] = set(),
|
|
286
|
+
verbose: bool = False) -> Path:
|
|
287
|
+
builder = Builder(bundle, build_machine, host_machine, verbose)
|
|
288
|
+
try:
|
|
289
|
+
return builder.build(only_packages, excluded_packages)
|
|
290
|
+
except subprocess.CalledProcessError as e:
|
|
291
|
+
print(e, file=sys.stderr)
|
|
292
|
+
if e.stdout is not None:
|
|
293
|
+
print("\n=== stdout ===\n" + e.stdout, file=sys.stderr)
|
|
294
|
+
if e.stderr is not None:
|
|
295
|
+
print("\n=== stderr ===\n" + e.stderr, file=sys.stderr)
|
|
296
|
+
sys.exit(1)
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
class Builder:
|
|
300
|
+
def __init__(self,
|
|
301
|
+
bundle: Bundle,
|
|
302
|
+
build_machine: MachineSpec,
|
|
303
|
+
host_machine: MachineSpec,
|
|
304
|
+
verbose: bool):
|
|
305
|
+
self._bundle = bundle
|
|
306
|
+
self._host_machine = host_machine.default_missing()
|
|
307
|
+
self._build_machine = build_machine.default_missing().maybe_adapt_to_host(self._host_machine)
|
|
308
|
+
self._verbose = verbose
|
|
309
|
+
self._default_library = "static"
|
|
310
|
+
|
|
311
|
+
self._params = load_dependency_parameters()
|
|
312
|
+
self._cachedir = detect_cache_dir(ROOT_DIR)
|
|
313
|
+
self._workdir = self._cachedir / "src"
|
|
314
|
+
|
|
315
|
+
self._toolchain_prefix: Optional[Path] = None
|
|
316
|
+
self._build_config: Optional[env.MachineConfig] = None
|
|
317
|
+
self._host_config: Optional[env.MachineConfig] = None
|
|
318
|
+
self._build_env: dict[str, str] = {}
|
|
319
|
+
self._host_env: dict[str, str] = {}
|
|
320
|
+
|
|
321
|
+
self._ansi_supported = os.environ.get("TERM") != "dumb" \
|
|
322
|
+
and (self._build_machine.os != "windows" or "WT_SESSION" in os.environ)
|
|
323
|
+
|
|
324
|
+
def build(self,
|
|
325
|
+
only_packages: Optional[list[str]],
|
|
326
|
+
excluded_packages: set[str]) -> Path:
|
|
327
|
+
started_at = time.time()
|
|
328
|
+
prepare_ended_at = None
|
|
329
|
+
clone_time_elapsed = None
|
|
330
|
+
build_time_elapsed = None
|
|
331
|
+
build_ended_at = None
|
|
332
|
+
packaging_ended_at = None
|
|
333
|
+
try:
|
|
334
|
+
all_packages = {i: self._resolve_package(p) for i, p in self._params.packages.items() \
|
|
335
|
+
if self._can_build(p)}
|
|
336
|
+
if only_packages is not None:
|
|
337
|
+
toplevel_packages = [all_packages[identifier] for identifier in only_packages]
|
|
338
|
+
selected_packages = self._resolve_dependencies(toplevel_packages, all_packages)
|
|
339
|
+
elif self._bundle is Bundle.TOOLCHAIN:
|
|
340
|
+
toplevel_packages = [p for p in all_packages.values() if p.scope == "toolchain"]
|
|
341
|
+
selected_packages = self._resolve_dependencies(toplevel_packages, all_packages)
|
|
342
|
+
else:
|
|
343
|
+
selected_packages = {i: p for i, p, in all_packages.items() if p.scope is None}
|
|
344
|
+
selected_packages = {i: p for i, p in selected_packages.items() if i not in excluded_packages}
|
|
345
|
+
|
|
346
|
+
packages = [selected_packages[i] for i in iterate_package_ids_in_dependency_order(selected_packages.values())]
|
|
347
|
+
all_deps = itertools.chain.from_iterable([pkg.dependencies for pkg in packages])
|
|
348
|
+
deps_for_build_machine = {dep.identifier for dep in all_deps if dep.for_machine == "build"}
|
|
349
|
+
|
|
350
|
+
self._prepare()
|
|
351
|
+
prepare_ended_at = time.time()
|
|
352
|
+
|
|
353
|
+
clone_time_elapsed = 0
|
|
354
|
+
build_time_elapsed = 0
|
|
355
|
+
for pkg in packages:
|
|
356
|
+
self._print_package_banner(pkg)
|
|
357
|
+
|
|
358
|
+
t1 = time.time()
|
|
359
|
+
self._clone_repo_if_needed(pkg)
|
|
360
|
+
t2 = time.time()
|
|
361
|
+
clone_time_elapsed += t2 - t1
|
|
362
|
+
|
|
363
|
+
machines = [self._host_machine]
|
|
364
|
+
if pkg.identifier in deps_for_build_machine:
|
|
365
|
+
machines += [self._build_machine]
|
|
366
|
+
self._build_package(pkg, machines)
|
|
367
|
+
t3 = time.time()
|
|
368
|
+
build_time_elapsed += t3 - t2
|
|
369
|
+
build_ended_at = time.time()
|
|
370
|
+
|
|
371
|
+
artifact_file = self._package()
|
|
372
|
+
packaging_ended_at = time.time()
|
|
373
|
+
finally:
|
|
374
|
+
ended_at = time.time()
|
|
375
|
+
|
|
376
|
+
if prepare_ended_at is not None:
|
|
377
|
+
self._print_summary_banner()
|
|
378
|
+
print(" Total: {}".format(format_duration(ended_at - started_at)))
|
|
379
|
+
|
|
380
|
+
if prepare_ended_at is not None:
|
|
381
|
+
print(" Prepare: {}".format(format_duration(prepare_ended_at - started_at)))
|
|
382
|
+
|
|
383
|
+
if clone_time_elapsed is not None:
|
|
384
|
+
print(" Clone: {}".format(format_duration(clone_time_elapsed)))
|
|
385
|
+
|
|
386
|
+
if build_time_elapsed is not None:
|
|
387
|
+
print(" Build: {}".format(format_duration(build_time_elapsed)))
|
|
388
|
+
|
|
389
|
+
if packaging_ended_at is not None:
|
|
390
|
+
print(" Packaging: {}".format(format_duration(packaging_ended_at - build_ended_at)))
|
|
391
|
+
|
|
392
|
+
print("", flush=True)
|
|
393
|
+
|
|
394
|
+
return artifact_file
|
|
395
|
+
|
|
396
|
+
def _can_build(self, pkg: PackageSpec) -> bool:
|
|
397
|
+
return self._evaluate_condition(pkg.when)
|
|
398
|
+
|
|
399
|
+
def _resolve_package(self, pkg: PackageSpec) -> bool:
|
|
400
|
+
resolved_opts = [opt for opt in pkg.options if self._evaluate_condition(opt.when)]
|
|
401
|
+
resolved_deps = [dep for dep in pkg.dependencies if self._evaluate_condition(dep.when)]
|
|
402
|
+
return dataclasses.replace(pkg,
|
|
403
|
+
options=resolved_opts,
|
|
404
|
+
dependencies=resolved_deps)
|
|
405
|
+
|
|
406
|
+
def _resolve_dependencies(self,
|
|
407
|
+
packages: Sequence[PackageSpec],
|
|
408
|
+
all_packages: Mapping[str, PackageSpec]) -> dict[str, PackageSpec]:
|
|
409
|
+
result = {p.identifier: p for p in packages}
|
|
410
|
+
for p in packages:
|
|
411
|
+
self._resolve_package_dependencies(p, all_packages, result)
|
|
412
|
+
return result
|
|
413
|
+
|
|
414
|
+
def _resolve_package_dependencies(self,
|
|
415
|
+
package: PackageSpec,
|
|
416
|
+
all_packages: Mapping[str, PackageSpec],
|
|
417
|
+
resolved_packages: Mapping[str, PackageSpec]):
|
|
418
|
+
for dep in package.dependencies:
|
|
419
|
+
identifier = dep.identifier
|
|
420
|
+
if identifier in resolved_packages:
|
|
421
|
+
continue
|
|
422
|
+
p = all_packages[identifier]
|
|
423
|
+
resolved_packages[identifier] = p
|
|
424
|
+
self._resolve_package_dependencies(p, all_packages, resolved_packages)
|
|
425
|
+
|
|
426
|
+
def _evaluate_condition(self, cond: Optional[str]) -> bool:
|
|
427
|
+
if cond is None:
|
|
428
|
+
return True
|
|
429
|
+
global_vars = {
|
|
430
|
+
"Bundle": Bundle,
|
|
431
|
+
"bundle": self._bundle,
|
|
432
|
+
"machine": self._host_machine,
|
|
433
|
+
}
|
|
434
|
+
return eval(cond, global_vars)
|
|
435
|
+
|
|
436
|
+
def _prepare(self):
|
|
437
|
+
self._toolchain_prefix, toolchain_state = \
|
|
438
|
+
ensure_toolchain(self._build_machine,
|
|
439
|
+
self._cachedir,
|
|
440
|
+
version=self._params.bootstrap_version)
|
|
441
|
+
if toolchain_state == SourceState.MODIFIED:
|
|
442
|
+
self._wipe_build_state()
|
|
443
|
+
|
|
444
|
+
envdir = self._get_builddir_container()
|
|
445
|
+
envdir.mkdir(parents=True, exist_ok=True)
|
|
446
|
+
|
|
447
|
+
menv = {**os.environ}
|
|
448
|
+
|
|
449
|
+
if self._bundle is Bundle.TOOLCHAIN:
|
|
450
|
+
extra_ldflags = []
|
|
451
|
+
if self._host_machine.is_apple:
|
|
452
|
+
symfile = envdir / "toolchain-executable.symbols"
|
|
453
|
+
symfile.write_text("# No exported symbols.\n", encoding="utf-8")
|
|
454
|
+
extra_ldflags += [f"-Wl,-exported_symbols_list,{symfile}"]
|
|
455
|
+
elif self._host_machine.os != "windows":
|
|
456
|
+
verfile = envdir / "toolchain-executable.version"
|
|
457
|
+
verfile.write_text("\n".join([
|
|
458
|
+
"{",
|
|
459
|
+
" global:",
|
|
460
|
+
" # FreeBSD needs these two:",
|
|
461
|
+
" __progname;",
|
|
462
|
+
" environ;",
|
|
463
|
+
"",
|
|
464
|
+
" local:",
|
|
465
|
+
" *;",
|
|
466
|
+
"};",
|
|
467
|
+
""
|
|
468
|
+
]),
|
|
469
|
+
encoding="utf-8")
|
|
470
|
+
extra_ldflags += [f"-Wl,--version-script,{verfile}"]
|
|
471
|
+
if extra_ldflags:
|
|
472
|
+
menv["LDFLAGS"] = shlex.join(extra_ldflags + shlex.split(menv.get("LDFLAGS", "")))
|
|
473
|
+
|
|
474
|
+
build_sdk_prefix = None
|
|
475
|
+
host_sdk_prefix = None
|
|
476
|
+
|
|
477
|
+
self._build_config, self._host_config = \
|
|
478
|
+
env.generate_machine_configs(self._build_machine,
|
|
479
|
+
self._host_machine,
|
|
480
|
+
menv,
|
|
481
|
+
self._toolchain_prefix,
|
|
482
|
+
build_sdk_prefix,
|
|
483
|
+
host_sdk_prefix,
|
|
484
|
+
self._call_meson,
|
|
485
|
+
self._default_library,
|
|
486
|
+
envdir)
|
|
487
|
+
self._build_env = self._build_config.make_merged_environment(os.environ)
|
|
488
|
+
self._host_env = self._host_config.make_merged_environment(os.environ)
|
|
489
|
+
|
|
490
|
+
def _clone_repo_if_needed(self, pkg: PackageSpec):
|
|
491
|
+
sourcedir = self._get_sourcedir(pkg)
|
|
492
|
+
|
|
493
|
+
git = lambda *args, **kwargs: subprocess.run(["git", *args],
|
|
494
|
+
**kwargs,
|
|
495
|
+
capture_output=True,
|
|
496
|
+
encoding="utf-8")
|
|
497
|
+
|
|
498
|
+
if sourcedir.exists():
|
|
499
|
+
self._print_status(pkg.name, "Reusing existing checkout")
|
|
500
|
+
current_rev = git("rev-parse", "FETCH_HEAD", check=True).stdout.strip()
|
|
501
|
+
if current_rev != pkg.version:
|
|
502
|
+
self._print_status(pkg.name, "WARNING: Checkout does not match version in deps.toml")
|
|
503
|
+
else:
|
|
504
|
+
self._print_status(pkg.name, "Cloning")
|
|
505
|
+
clone_shallow(pkg, sourcedir, git)
|
|
506
|
+
|
|
507
|
+
def _wipe_build_state(self):
|
|
508
|
+
for path in (self._get_outdir(), self._get_builddir_container()):
|
|
509
|
+
if path.exists():
|
|
510
|
+
self._print_status(path.relative_to(self._workdir).as_posix(), "Wiping")
|
|
511
|
+
shutil.rmtree(path)
|
|
512
|
+
|
|
513
|
+
def _build_package(self, pkg: PackageSpec, machines: Sequence[MachineSpec]):
|
|
514
|
+
for machine in machines:
|
|
515
|
+
manifest_path = self._get_manifest_path(pkg, machine)
|
|
516
|
+
action = "skip" if manifest_path.exists() else "build"
|
|
517
|
+
|
|
518
|
+
message = "Building" if action == "build" else "Already built"
|
|
519
|
+
message += f" for {machine.identifier}"
|
|
520
|
+
self._print_status(pkg.name, message)
|
|
521
|
+
|
|
522
|
+
if action == "build":
|
|
523
|
+
self._build_package_for_machine(pkg, machine)
|
|
524
|
+
assert manifest_path.exists()
|
|
525
|
+
|
|
526
|
+
def _build_package_for_machine(self, pkg: PackageSpec, machine: MachineSpec):
|
|
527
|
+
sourcedir = self._get_sourcedir(pkg)
|
|
528
|
+
builddir = self._get_builddir(pkg, machine)
|
|
529
|
+
|
|
530
|
+
prefix = self._get_prefix(machine)
|
|
531
|
+
libdir = prefix / "lib"
|
|
532
|
+
|
|
533
|
+
strip = "true" if machine.toolchain_can_strip else "false"
|
|
534
|
+
|
|
535
|
+
if builddir.exists():
|
|
536
|
+
shutil.rmtree(builddir)
|
|
537
|
+
|
|
538
|
+
machine_file_opts = [f"--native-file={self._build_config.machine_file}"]
|
|
539
|
+
pc_opts = [f"-Dpkg_config_path={prefix / machine.libdatadir / 'pkgconfig'}"]
|
|
540
|
+
if self._host_config is not self._build_config and machine is self._host_machine:
|
|
541
|
+
machine_file_opts += [f"--cross-file={self._host_config.machine_file}"]
|
|
542
|
+
pc_path_for_build = self._get_prefix(self._build_machine) / self._build_machine.libdatadir / "pkgconfig"
|
|
543
|
+
pc_opts += [f"-Dbuild.pkg_config_path={pc_path_for_build}"]
|
|
544
|
+
|
|
545
|
+
menv = self._host_env if machine is self._host_machine else self._build_env
|
|
546
|
+
|
|
547
|
+
meson_kwargs = {
|
|
548
|
+
"env": menv,
|
|
549
|
+
"check": True,
|
|
550
|
+
}
|
|
551
|
+
if not self._verbose:
|
|
552
|
+
meson_kwargs["capture_output"] = True
|
|
553
|
+
meson_kwargs["encoding"] = "utf-8"
|
|
554
|
+
|
|
555
|
+
self._call_meson([
|
|
556
|
+
"setup",
|
|
557
|
+
builddir,
|
|
558
|
+
*machine_file_opts,
|
|
559
|
+
f"-Dprefix={prefix}",
|
|
560
|
+
f"-Dlibdir={libdir}",
|
|
561
|
+
*pc_opts,
|
|
562
|
+
f"-Ddefault_library={self._default_library}",
|
|
563
|
+
f"-Dbackend=ninja",
|
|
564
|
+
*machine.meson_optimization_options,
|
|
565
|
+
f"-Dstrip={strip}",
|
|
566
|
+
*[opt.value for opt in pkg.options],
|
|
567
|
+
],
|
|
568
|
+
cwd=sourcedir,
|
|
569
|
+
**meson_kwargs)
|
|
570
|
+
|
|
571
|
+
self._call_meson(["install"],
|
|
572
|
+
cwd=builddir,
|
|
573
|
+
**meson_kwargs)
|
|
574
|
+
|
|
575
|
+
manifest_lines = []
|
|
576
|
+
install_locations = json.loads(self._call_meson(["introspect", "--installed"],
|
|
577
|
+
cwd=builddir,
|
|
578
|
+
capture_output=True,
|
|
579
|
+
encoding="utf-8",
|
|
580
|
+
env=menv).stdout)
|
|
581
|
+
for installed_path in install_locations.values():
|
|
582
|
+
manifest_lines.append(Path(installed_path).relative_to(prefix).as_posix())
|
|
583
|
+
manifest_lines.sort()
|
|
584
|
+
manifest_path = self._get_manifest_path(pkg, machine)
|
|
585
|
+
manifest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
586
|
+
manifest_path.write_text("\n".join(manifest_lines) + "\n", encoding="utf-8")
|
|
587
|
+
|
|
588
|
+
def _call_meson(self, argv, *args, **kwargs):
|
|
589
|
+
if self._verbose and argv[0] in {"setup", "install"}:
|
|
590
|
+
vanilla_env = os.environ
|
|
591
|
+
meson_env = kwargs["env"]
|
|
592
|
+
changed_env = {k: v for k, v in meson_env.items() if k not in vanilla_env or v != vanilla_env[k]}
|
|
593
|
+
|
|
594
|
+
indent = " "
|
|
595
|
+
env_summary = f" \\\n{indent}".join([f"{k}={shlex.quote(v)}" for k, v in changed_env.items()])
|
|
596
|
+
argv_summary = f" \\\n{3 * indent}".join([str(arg) for arg in argv])
|
|
597
|
+
|
|
598
|
+
print(f"> {env_summary} \\\n{indent}meson {argv_summary}", flush=True)
|
|
599
|
+
|
|
600
|
+
return env.call_meson(argv, use_submodule=True, *args, **kwargs)
|
|
601
|
+
|
|
602
|
+
def _package(self):
|
|
603
|
+
outfile = self._cachedir / f"{self._bundle.name.lower()}-{self._host_machine.identifier}.tar.xz"
|
|
604
|
+
|
|
605
|
+
self._print_packaging_banner()
|
|
606
|
+
with tempfile.TemporaryDirectory(prefix="frida-deps") as raw_tempdir:
|
|
607
|
+
tempdir = Path(raw_tempdir)
|
|
608
|
+
|
|
609
|
+
self._print_status(outfile.name, "Staging files")
|
|
610
|
+
if self._bundle is Bundle.TOOLCHAIN:
|
|
611
|
+
self._stage_toolchain_files(tempdir)
|
|
612
|
+
else:
|
|
613
|
+
self._stage_sdk_files(tempdir)
|
|
614
|
+
|
|
615
|
+
self._adjust_manifests(tempdir)
|
|
616
|
+
self._adjust_files_containing_hardcoded_paths(tempdir)
|
|
617
|
+
|
|
618
|
+
(tempdir / "VERSION.txt").write_text(self._params.deps_version + "\n", encoding="utf-8")
|
|
619
|
+
|
|
620
|
+
self._print_status(outfile.name, "Assembling")
|
|
621
|
+
with tarfile.open(outfile, "w:xz") as tar:
|
|
622
|
+
tar.add(tempdir, ".")
|
|
623
|
+
|
|
624
|
+
self._print_status(outfile.name, "All done")
|
|
625
|
+
|
|
626
|
+
return outfile
|
|
627
|
+
|
|
628
|
+
def _stage_toolchain_files(self, location: Path) -> list[Path]:
|
|
629
|
+
if self._host_machine.os == "windows":
|
|
630
|
+
toolchain_prefix = self._toolchain_prefix
|
|
631
|
+
mixin_files = [f for f in self._walk_plain_files(toolchain_prefix)
|
|
632
|
+
if self._file_should_be_mixed_into_toolchain(f)]
|
|
633
|
+
copy_files(toolchain_prefix, mixin_files, location)
|
|
634
|
+
|
|
635
|
+
prefix = self._get_prefix(self._host_machine)
|
|
636
|
+
files = [f for f in self._walk_plain_files(prefix)
|
|
637
|
+
if self._file_is_toolchain_related(f)]
|
|
638
|
+
copy_files(prefix, files, location)
|
|
639
|
+
|
|
640
|
+
def _stage_sdk_files(self, location: Path) -> list[Path]:
|
|
641
|
+
prefix = self._get_prefix(self._host_machine)
|
|
642
|
+
files = [f for f in self._walk_plain_files(prefix)
|
|
643
|
+
if self._file_is_sdk_related(f)]
|
|
644
|
+
copy_files(prefix, files, location)
|
|
645
|
+
|
|
646
|
+
def _adjust_files_containing_hardcoded_paths(self, bundledir: Path):
|
|
647
|
+
prefix = self._get_prefix(self._host_machine)
|
|
648
|
+
|
|
649
|
+
raw_prefixes = [str(prefix)]
|
|
650
|
+
if self._host_machine.os == "windows":
|
|
651
|
+
raw_prefixes.append(prefix.as_posix())
|
|
652
|
+
|
|
653
|
+
for f in self._walk_plain_files(bundledir):
|
|
654
|
+
filepath = bundledir / f
|
|
655
|
+
try:
|
|
656
|
+
text = filepath.read_text(encoding="utf-8")
|
|
657
|
+
|
|
658
|
+
new_text = text
|
|
659
|
+
is_pcfile = filepath.suffix == ".pc"
|
|
660
|
+
replacement = "${frida_sdk_prefix}" if is_pcfile else "@FRIDA_TOOLROOT@"
|
|
661
|
+
for p in raw_prefixes:
|
|
662
|
+
new_text = new_text.replace(p, replacement)
|
|
663
|
+
|
|
664
|
+
if new_text != text:
|
|
665
|
+
filepath.write_text(new_text, encoding="utf-8")
|
|
666
|
+
if not is_pcfile:
|
|
667
|
+
filepath.rename(filepath.parent / f"{f.name}.frida.in")
|
|
668
|
+
except UnicodeDecodeError:
|
|
669
|
+
pass
|
|
670
|
+
|
|
671
|
+
@staticmethod
|
|
672
|
+
def _walk_plain_files(rootdir: Path) -> Iterator[Path]:
|
|
673
|
+
for dirpath, dirnames, filenames in os.walk(rootdir):
|
|
674
|
+
for filename in filenames:
|
|
675
|
+
f = Path(dirpath) / filename
|
|
676
|
+
if f.is_symlink():
|
|
677
|
+
continue
|
|
678
|
+
yield f.relative_to(rootdir)
|
|
679
|
+
|
|
680
|
+
@staticmethod
|
|
681
|
+
def _adjust_manifests(bundledir: Path):
|
|
682
|
+
for manifest_path in (bundledir / "manifest").glob("*.pkg"):
|
|
683
|
+
lines = []
|
|
684
|
+
|
|
685
|
+
prefix = manifest_path.parent.parent
|
|
686
|
+
for entry in manifest_path.read_text(encoding="utf-8").strip().split("\n"):
|
|
687
|
+
if prefix.joinpath(entry).exists():
|
|
688
|
+
lines.append(entry)
|
|
689
|
+
|
|
690
|
+
if lines:
|
|
691
|
+
lines.sort()
|
|
692
|
+
manifest_path.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
|
693
|
+
else:
|
|
694
|
+
manifest_path.unlink()
|
|
695
|
+
|
|
696
|
+
def _file_should_be_mixed_into_toolchain(self, f: Path) -> bool:
|
|
697
|
+
parts = f.parts
|
|
698
|
+
if parts[0] == "VERSION.txt":
|
|
699
|
+
return False
|
|
700
|
+
if parts[0] == "bin":
|
|
701
|
+
stem = f.stem
|
|
702
|
+
return stem in {"bison", "flex", "m4", "nasm", "vswhere"} or stem.startswith("msys-")
|
|
703
|
+
if parts[0] == "manifest":
|
|
704
|
+
return False
|
|
705
|
+
|
|
706
|
+
if self._file_is_vala_toolchain_related(f):
|
|
707
|
+
return False
|
|
708
|
+
|
|
709
|
+
return True
|
|
710
|
+
|
|
711
|
+
def _file_is_toolchain_related(self, f: Path) -> bool:
|
|
712
|
+
if self._file_is_vala_toolchain_related(f):
|
|
713
|
+
return True
|
|
714
|
+
|
|
715
|
+
parts = f.parts
|
|
716
|
+
if parts[0] == "bin":
|
|
717
|
+
if f.suffix == ".pdb":
|
|
718
|
+
return False
|
|
719
|
+
stem = f.stem
|
|
720
|
+
if stem in {"gdbus", "gio", "gobject-query", "gsettings"}:
|
|
721
|
+
return False
|
|
722
|
+
if stem.startswith("gspawn-"):
|
|
723
|
+
return False
|
|
724
|
+
return True
|
|
725
|
+
if parts[0] == "manifest":
|
|
726
|
+
return True
|
|
727
|
+
|
|
728
|
+
return False
|
|
729
|
+
|
|
730
|
+
def _file_is_vala_toolchain_related(self, f: Path) -> bool:
|
|
731
|
+
if f.suffix in {".vapi", ".deps"}:
|
|
732
|
+
return True
|
|
733
|
+
|
|
734
|
+
name = f.name
|
|
735
|
+
if f.suffix == self._host_machine.executable_suffix:
|
|
736
|
+
return name.startswith("vala") or name.startswith("vapi") or name.startswith("gen-introspect")
|
|
737
|
+
if f.parts[0] == "bin" and name.startswith("vala-gen-introspect"):
|
|
738
|
+
return True
|
|
739
|
+
|
|
740
|
+
return False
|
|
741
|
+
|
|
742
|
+
def _file_is_sdk_related(self, f: Path) -> bool:
|
|
743
|
+
suffix = f.suffix
|
|
744
|
+
if suffix == ".pdb":
|
|
745
|
+
return False
|
|
746
|
+
if suffix in [".vapi", ".deps"]:
|
|
747
|
+
return True
|
|
748
|
+
|
|
749
|
+
parts = f.parts
|
|
750
|
+
if parts[0] == "bin":
|
|
751
|
+
return f.name.startswith("v8-mksnapshot-")
|
|
752
|
+
|
|
753
|
+
return "share" not in parts
|
|
754
|
+
|
|
755
|
+
def _get_outdir(self) -> Path:
|
|
756
|
+
return self._workdir / f"_{self._bundle.name.lower()}.out"
|
|
757
|
+
|
|
758
|
+
def _get_sourcedir(self, pkg: PackageSpec) -> Path:
|
|
759
|
+
return self._workdir / pkg.identifier
|
|
760
|
+
|
|
761
|
+
def _get_builddir(self, pkg: PackageSpec, machine: MachineSpec) -> Path:
|
|
762
|
+
return self._get_builddir_container() / machine.identifier / pkg.identifier
|
|
763
|
+
|
|
764
|
+
def _get_builddir_container(self) -> Path:
|
|
765
|
+
return self._workdir / f"_{self._bundle.name.lower()}.tmp"
|
|
766
|
+
|
|
767
|
+
def _get_prefix(self, machine: MachineSpec) -> Path:
|
|
768
|
+
return self._get_outdir() / machine.identifier
|
|
769
|
+
|
|
770
|
+
def _get_manifest_path(self, pkg: PackageSpec, machine: MachineSpec) -> Path:
|
|
771
|
+
return self._get_prefix(machine) / "manifest" / f"{pkg.identifier}.pkg"
|
|
772
|
+
|
|
773
|
+
def _print_package_banner(self, pkg: PackageSpec):
|
|
774
|
+
if self._ansi_supported:
|
|
775
|
+
print("\n".join([
|
|
776
|
+
"",
|
|
777
|
+
"╭────",
|
|
778
|
+
f"│ 📦 \033[1m{pkg.name}\033[0m",
|
|
779
|
+
"├───────────────────────────────────────────────╮",
|
|
780
|
+
f"│ URL: {pkg.url}",
|
|
781
|
+
f"│ CID: {pkg.version}",
|
|
782
|
+
"├───────────────────────────────────────────────╯",
|
|
783
|
+
]), flush=True)
|
|
784
|
+
else:
|
|
785
|
+
print("\n".join([
|
|
786
|
+
"",
|
|
787
|
+
f"# {pkg.name}",
|
|
788
|
+
f"- URL: {pkg.url}",
|
|
789
|
+
f"- CID: {pkg.version}",
|
|
790
|
+
]), flush=True)
|
|
791
|
+
|
|
792
|
+
def _print_packaging_banner(self):
|
|
793
|
+
if self._ansi_supported:
|
|
794
|
+
print("\n".join([
|
|
795
|
+
"",
|
|
796
|
+
"╭────",
|
|
797
|
+
f"│ 🏗️ \033[1mPackaging\033[0m",
|
|
798
|
+
"├───────────────────────────────────────────────╮",
|
|
799
|
+
]), flush=True)
|
|
800
|
+
else:
|
|
801
|
+
print("\n".join([
|
|
802
|
+
"",
|
|
803
|
+
f"# Packaging",
|
|
804
|
+
]), flush=True)
|
|
805
|
+
|
|
806
|
+
def _print_summary_banner(self):
|
|
807
|
+
if self._ansi_supported:
|
|
808
|
+
print("\n".join([
|
|
809
|
+
"",
|
|
810
|
+
"╭────",
|
|
811
|
+
f"│ 🎉 \033[1mDone\033[0m",
|
|
812
|
+
"├───────────────────────────────────────────────╮",
|
|
813
|
+
]), flush=True)
|
|
814
|
+
else:
|
|
815
|
+
print("\n".join([
|
|
816
|
+
"",
|
|
817
|
+
f"# Done",
|
|
818
|
+
]), flush=True)
|
|
819
|
+
|
|
820
|
+
def _print_status(self, scope: str, *args):
|
|
821
|
+
status = " ".join([str(arg) for arg in args])
|
|
822
|
+
if self._ansi_supported:
|
|
823
|
+
print(f"│ \033[1m{scope}\033[0m :: {status}", flush=True)
|
|
824
|
+
else:
|
|
825
|
+
print(f"# {scope} :: {status}", flush=True)
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
def wait(bundle: Bundle, machine: MachineSpec):
|
|
829
|
+
params = load_dependency_parameters()
|
|
830
|
+
(url, filename) = compute_bundle_parameters(bundle, machine, params.deps_version)
|
|
831
|
+
|
|
832
|
+
request = urllib.request.Request(url)
|
|
833
|
+
request.get_method = lambda: "HEAD"
|
|
834
|
+
started_at = time.time()
|
|
835
|
+
while True:
|
|
836
|
+
try:
|
|
837
|
+
with urllib.request.urlopen(request) as r:
|
|
838
|
+
return
|
|
839
|
+
except urllib.request.HTTPError as e:
|
|
840
|
+
if e.code != 404:
|
|
841
|
+
return
|
|
842
|
+
print("Waiting for: {} Elapsed: {} Retrying in 5 minutes...".format(url, int(time.time() - started_at)), flush=True)
|
|
843
|
+
time.sleep(5 * 60)
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
def bump():
|
|
847
|
+
def run(argv: list[str], **kwargs) -> subprocess.CompletedProcess:
|
|
848
|
+
return subprocess.run(argv,
|
|
849
|
+
capture_output=True,
|
|
850
|
+
encoding="utf-8",
|
|
851
|
+
check=True,
|
|
852
|
+
**kwargs)
|
|
853
|
+
|
|
854
|
+
packages = load_dependency_parameters().packages
|
|
855
|
+
for identifier in iterate_package_ids_in_dependency_order(packages.values()):
|
|
856
|
+
pkg = packages[identifier]
|
|
857
|
+
print(f"# Checking {pkg.name}")
|
|
858
|
+
assert pkg.url.startswith("https://github.com/frida/"), f"{pkg.url}: unhandled URL"
|
|
859
|
+
|
|
860
|
+
bump_wraps(identifier, packages, run)
|
|
861
|
+
|
|
862
|
+
latest = query_repo_commits(identifier)["sha"]
|
|
863
|
+
if pkg.version == latest:
|
|
864
|
+
print(f"\tdeps.toml is up-to-date")
|
|
865
|
+
else:
|
|
866
|
+
print(f"\tdeps.toml is outdated")
|
|
867
|
+
print(f"\t\tcurrent: {pkg.version}")
|
|
868
|
+
print(f"\t\t latest: {latest}")
|
|
869
|
+
|
|
870
|
+
f = TOMLFile(DEPS_TOML_PATH)
|
|
871
|
+
config = f.read()
|
|
872
|
+
config[identifier]["version"] = latest
|
|
873
|
+
f.write(config)
|
|
874
|
+
|
|
875
|
+
run(["git", "add", "deps.toml"], cwd=RELENG_DIR)
|
|
876
|
+
run(["git", "commit", "-m" f"deps: Bump {pkg.name} to {latest[:7]}"], cwd=RELENG_DIR)
|
|
877
|
+
|
|
878
|
+
packages = load_dependency_parameters().packages
|
|
879
|
+
|
|
880
|
+
print("")
|
|
881
|
+
|
|
882
|
+
|
|
883
|
+
def bump_wraps(identifier: str,
|
|
884
|
+
packages: Mapping[str, PackageSpec],
|
|
885
|
+
run: Callable):
|
|
886
|
+
root = query_repo_trees(identifier)
|
|
887
|
+
subp_dir = next((t for t in root["tree"] if t["path"] == "subprojects"), None)
|
|
888
|
+
if subp_dir is None or subp_dir["type"] != "tree":
|
|
889
|
+
print("\tno wraps to bump")
|
|
890
|
+
return
|
|
891
|
+
|
|
892
|
+
all_wraps = [(entry, identifier_from_wrap_filename(entry["path"]))
|
|
893
|
+
for entry in query_github_api(subp_dir["url"])["tree"]
|
|
894
|
+
if entry["type"] == "blob" and entry["path"].endswith(".wrap")]
|
|
895
|
+
relevant_wraps = [(blob, packages[identifier])
|
|
896
|
+
for blob, identifier in all_wraps
|
|
897
|
+
if identifier in packages]
|
|
898
|
+
if not relevant_wraps:
|
|
899
|
+
print(f"\tno relevant wraps, only: {', '.join([blob['path'] for blob, _ in all_wraps])}")
|
|
900
|
+
return
|
|
901
|
+
|
|
902
|
+
pending_wraps: list[tuple[str, str, PackageSpec]] = []
|
|
903
|
+
for blob, spec in relevant_wraps:
|
|
904
|
+
filename = blob["path"]
|
|
905
|
+
|
|
906
|
+
response = query_github_api(blob["url"])
|
|
907
|
+
assert response["encoding"] == "base64"
|
|
908
|
+
data = base64.b64decode(response["content"])
|
|
909
|
+
|
|
910
|
+
config = ConfigParser()
|
|
911
|
+
config.read_file(data.decode("utf-8").split("\n"))
|
|
912
|
+
|
|
913
|
+
if "wrap-git" not in config:
|
|
914
|
+
print(f"\tskipping {filename} as it's not wrap-git")
|
|
915
|
+
continue
|
|
916
|
+
source = config["wrap-git"]
|
|
917
|
+
|
|
918
|
+
url = source["url"]
|
|
919
|
+
if not url.startswith("https://github.com/frida/"):
|
|
920
|
+
print(f"\tskipping {filename} as URL is external: {url}")
|
|
921
|
+
continue
|
|
922
|
+
|
|
923
|
+
revision = source["revision"]
|
|
924
|
+
if revision == spec.version:
|
|
925
|
+
continue
|
|
926
|
+
|
|
927
|
+
pending_wraps.append((filename, revision, spec))
|
|
928
|
+
if not pending_wraps:
|
|
929
|
+
print(f"\tall wraps up-to-date")
|
|
930
|
+
return
|
|
931
|
+
|
|
932
|
+
workdir = detect_cache_dir(ROOT_DIR) / "src"
|
|
933
|
+
workdir.mkdir(parents=True, exist_ok=True)
|
|
934
|
+
|
|
935
|
+
sourcedir = workdir / identifier
|
|
936
|
+
if sourcedir.exists():
|
|
937
|
+
shutil.rmtree(sourcedir)
|
|
938
|
+
run(["git", "clone", "--depth", "1", f"git@github.com:frida/{identifier}.git"], cwd=workdir)
|
|
939
|
+
|
|
940
|
+
subpdir = sourcedir / "subprojects"
|
|
941
|
+
revision_pattern = re.compile(r"^(?P<key_equals>\s*revision\s*=\s*)\S+$", re.MULTILINE)
|
|
942
|
+
for filename, revision, dep in pending_wraps:
|
|
943
|
+
wrapfile = subpdir / filename
|
|
944
|
+
old_config = wrapfile.read_text(encoding="utf-8")
|
|
945
|
+
# Would be simpler to use ConfigParser to write it back out, but we
|
|
946
|
+
# want to preserve the particular style to keep our patches minimal.
|
|
947
|
+
new_config = revision_pattern.sub(fr"\g<key_equals>{dep.version}", old_config)
|
|
948
|
+
wrapfile.write_text(new_config, encoding="utf-8")
|
|
949
|
+
|
|
950
|
+
run(["git", "add", filename], cwd=subpdir)
|
|
951
|
+
|
|
952
|
+
action = "Pin" if revision == "main" else "Bump"
|
|
953
|
+
run(["git", "commit", "-m" f"subprojects: {action} {dep.name} to {dep.version[:7]}"], cwd=sourcedir)
|
|
954
|
+
|
|
955
|
+
print(f"\tdid {action.lower()} {filename} to {dep.version} (from {revision})")
|
|
956
|
+
|
|
957
|
+
run(["git", "push"], cwd=sourcedir)
|
|
958
|
+
|
|
959
|
+
|
|
960
|
+
def identifier_from_wrap_filename(filename: str) -> str:
|
|
961
|
+
return filename.split(".", maxsplit=1)[0]
|
|
962
|
+
|
|
963
|
+
|
|
964
|
+
def compute_bundle_parameters(bundle: Bundle,
|
|
965
|
+
machine: MachineSpec,
|
|
966
|
+
version: str) -> tuple[str, str]:
|
|
967
|
+
if bundle == Bundle.TOOLCHAIN and machine.os == "windows":
|
|
968
|
+
os_arch_config = "windows-x86"
|
|
969
|
+
else:
|
|
970
|
+
os_arch_config = machine.identifier
|
|
971
|
+
filename = f"{bundle.name.lower()}-{os_arch_config}.tar.xz"
|
|
972
|
+
url = BUNDLE_URL.format(version=version, filename=filename)
|
|
973
|
+
return (url, filename)
|
|
974
|
+
|
|
975
|
+
|
|
976
|
+
def load_dependency_parameters() -> DependencyParameters:
|
|
977
|
+
config = TOMLFile(DEPS_TOML_PATH).read()
|
|
978
|
+
|
|
979
|
+
packages = {}
|
|
980
|
+
for identifier, pkg in config.items():
|
|
981
|
+
if identifier == "dependencies":
|
|
982
|
+
continue
|
|
983
|
+
packages[identifier] = PackageSpec(identifier,
|
|
984
|
+
pkg["name"],
|
|
985
|
+
pkg["version"],
|
|
986
|
+
pkg["url"],
|
|
987
|
+
list(map(parse_option, pkg.get("options", []))),
|
|
988
|
+
list(map(parse_dependency, pkg.get("dependencies", []))),
|
|
989
|
+
pkg.get("scope"),
|
|
990
|
+
pkg.get("when"))
|
|
991
|
+
|
|
992
|
+
p = config["dependencies"]
|
|
993
|
+
return DependencyParameters(p["version"], p["bootstrap_version"], packages)
|
|
994
|
+
|
|
995
|
+
|
|
996
|
+
def iterate_package_ids_in_dependency_order(packages: Sequence[PackageSpec]) -> Iterator[str]:
|
|
997
|
+
ts = graphlib.TopologicalSorter({pkg.identifier: {dep.identifier for dep in pkg.dependencies}
|
|
998
|
+
for pkg in packages})
|
|
999
|
+
return ts.static_order()
|
|
1000
|
+
|
|
1001
|
+
|
|
1002
|
+
def configure_bootstrap_version(version: str):
|
|
1003
|
+
f = TOMLFile(DEPS_TOML_PATH)
|
|
1004
|
+
config = f.read()
|
|
1005
|
+
config["dependencies"]["bootstrap_version"] = version
|
|
1006
|
+
f.write(config)
|
|
1007
|
+
|
|
1008
|
+
|
|
1009
|
+
def query_repo_commits(repo: str,
|
|
1010
|
+
organization: str = "frida",
|
|
1011
|
+
branch: str = "main") -> dict:
|
|
1012
|
+
return query_github_api(make_github_url(f"/repos/{organization}/{repo}/commits/{branch}"))
|
|
1013
|
+
|
|
1014
|
+
|
|
1015
|
+
def query_repo_trees(repo: str,
|
|
1016
|
+
organization: str = "frida",
|
|
1017
|
+
branch: str = "main") -> dict:
|
|
1018
|
+
return query_github_api(make_github_url(f"/repos/{organization}/{repo}/git/trees/{branch}"))
|
|
1019
|
+
|
|
1020
|
+
|
|
1021
|
+
def query_github_api(url: str) -> dict:
|
|
1022
|
+
request = urllib.request.Request(url)
|
|
1023
|
+
request.add_header("Authorization", make_github_auth_header())
|
|
1024
|
+
with urllib.request.urlopen(request) as r:
|
|
1025
|
+
return json.load(r)
|
|
1026
|
+
|
|
1027
|
+
|
|
1028
|
+
def make_github_url(path: str) -> str:
|
|
1029
|
+
return "https://api.github.com" + path
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
def make_github_auth_header() -> str:
|
|
1033
|
+
return "Basic " + base64.b64encode(":".join([
|
|
1034
|
+
os.environ["GH_USERNAME"],
|
|
1035
|
+
os.environ["GH_TOKEN"]
|
|
1036
|
+
]).encode("utf-8")).decode("utf-8")
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
def clone_shallow(pkg: PackageSpec, outdir: Path, call_git: Callable):
|
|
1040
|
+
outdir.mkdir(parents=True, exist_ok=True)
|
|
1041
|
+
git = lambda *args: call_git(*args, cwd=outdir, check=True)
|
|
1042
|
+
git("init")
|
|
1043
|
+
git("remote", "add", "origin", pkg.url)
|
|
1044
|
+
git("fetch", "--depth", "1", "origin", pkg.version)
|
|
1045
|
+
git("checkout", "FETCH_HEAD")
|
|
1046
|
+
git("submodule", "update", "--init", "--recursive", "--depth", "1")
|
|
1047
|
+
|
|
1048
|
+
|
|
1049
|
+
def parse_option(v: Union[str, dict]) -> OptionSpec:
|
|
1050
|
+
if isinstance(v, str):
|
|
1051
|
+
return OptionSpec(v)
|
|
1052
|
+
return OptionSpec(v["value"], v.get("when"))
|
|
1053
|
+
|
|
1054
|
+
|
|
1055
|
+
def parse_dependency(v: Union[str, dict]) -> OptionSpec:
|
|
1056
|
+
if isinstance(v, str):
|
|
1057
|
+
return DependencySpec(v)
|
|
1058
|
+
return DependencySpec(v["id"], v.get("for_machine"), v.get("when"))
|
|
1059
|
+
|
|
1060
|
+
|
|
1061
|
+
def copy_files(fromdir: Path,
|
|
1062
|
+
files: list[Path],
|
|
1063
|
+
todir: Path):
|
|
1064
|
+
for filename in files:
|
|
1065
|
+
src = fromdir / filename
|
|
1066
|
+
dst = todir / filename
|
|
1067
|
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
1068
|
+
shutil.copy(src, dst, follow_symlinks=False)
|
|
1069
|
+
|
|
1070
|
+
|
|
1071
|
+
def format_duration(duration_in_seconds: float) -> str:
|
|
1072
|
+
hours, remainder = divmod(duration_in_seconds, 3600)
|
|
1073
|
+
minutes, seconds = divmod(remainder, 60)
|
|
1074
|
+
return "{:02d}:{:02d}:{:02d}".format(int(hours), int(minutes), int(seconds))
|
|
1075
|
+
|
|
1076
|
+
|
|
1077
|
+
class CommandError(Exception):
|
|
1078
|
+
pass
|
|
1079
|
+
|
|
1080
|
+
|
|
1081
|
+
DEPS_TOML_PATH = RELENG_DIR / "deps.toml"
|
|
1082
|
+
|
|
1083
|
+
BUNDLE_URL = "https://build.frida.re/deps/{version}/{filename}"
|
|
1084
|
+
|
|
1085
|
+
|
|
1086
|
+
class Bundle(Enum):
|
|
1087
|
+
TOOLCHAIN = 1,
|
|
1088
|
+
SDK = 2,
|
|
1089
|
+
|
|
1090
|
+
|
|
1091
|
+
class BundleNotFoundError(Exception):
|
|
1092
|
+
pass
|
|
1093
|
+
|
|
1094
|
+
|
|
1095
|
+
class SourceState(Enum):
|
|
1096
|
+
PRISTINE = 1,
|
|
1097
|
+
MODIFIED = 2,
|
|
1098
|
+
|
|
1099
|
+
|
|
1100
|
+
@dataclass
|
|
1101
|
+
class DependencyParameters:
|
|
1102
|
+
deps_version: str
|
|
1103
|
+
bootstrap_version: str
|
|
1104
|
+
packages: dict[str, PackageSpec]
|
|
1105
|
+
|
|
1106
|
+
|
|
1107
|
+
@dataclass
|
|
1108
|
+
class PackageSpec:
|
|
1109
|
+
identifier: str
|
|
1110
|
+
name: str
|
|
1111
|
+
version: str
|
|
1112
|
+
url: str
|
|
1113
|
+
options: list[OptionSpec] = field(default_factory=list)
|
|
1114
|
+
dependencies: list[DependencySpec] = field(default_factory=list)
|
|
1115
|
+
scope: Optional[str] = None
|
|
1116
|
+
when: Optional[str] = None
|
|
1117
|
+
|
|
1118
|
+
|
|
1119
|
+
@dataclass
|
|
1120
|
+
class OptionSpec:
|
|
1121
|
+
value: str
|
|
1122
|
+
when: Optional[str] = None
|
|
1123
|
+
|
|
1124
|
+
|
|
1125
|
+
@dataclass
|
|
1126
|
+
class DependencySpec:
|
|
1127
|
+
identifier: str
|
|
1128
|
+
for_machine: str = "host"
|
|
1129
|
+
when: Optional[str] = None
|
|
1130
|
+
|
|
1131
|
+
|
|
1132
|
+
if __name__ == "__main__":
|
|
1133
|
+
main()
|