Index: ps/trunk/build/premake/premake5.lua =================================================================== --- ps/trunk/build/premake/premake5.lua (revision 26880) +++ ps/trunk/build/premake/premake5.lua (revision 26881) @@ -1,1536 +1,1536 @@ newoption { trigger = "android", description = "Use non-working Android cross-compiling mode" } newoption { trigger = "atlas", description = "Include Atlas scenario editor projects" } newoption { trigger = "coverage", description = "Enable code coverage data collection (GCC only)" } newoption { trigger = "gles", description = "Use non-working OpenGL ES 2.0 mode" } newoption { trigger = "icc", description = "Use Intel C++ Compiler (Linux only; should use either \"--cc icc\" or --without-pch too, and then set CXX=icpc before calling make)" } newoption { trigger = "jenkins-tests", description = "Configure CxxTest to use the XmlPrinter runner which produces Jenkins-compatible output" } newoption { trigger = "minimal-flags", description = "Only set compiler/linker flags that are really needed. Has no effect on Windows builds" } newoption { trigger = "outpath", description = "Location for generated project files" } newoption { trigger = "with-system-mozjs", description = "Search standard paths for libmozjs60, instead of using bundled copy" } newoption { trigger = "with-system-nvtt", description = "Search standard paths for nvidia-texture-tools library, instead of using bundled copy" } newoption { trigger = "with-valgrind", description = "Enable Valgrind support (non-Windows only)" } newoption { trigger = "without-audio", description = "Disable use of OpenAL/Ogg/Vorbis APIs" } newoption { trigger = "without-lobby", description = "Disable the use of gloox and the multiplayer lobby" } newoption { trigger = "without-miniupnpc", description = "Disable use of miniupnpc for port forwarding" } newoption { trigger = "without-nvtt", description = "Disable use of NVTT" } newoption { trigger = "without-pch", description = "Disable generation and usage of precompiled headers" } newoption { trigger = "without-tests", description = "Disable generation of test projects" } -- Linux/BSD specific options newoption { trigger = "prefer-local-libs", description = "Prefer locally built libs. Any local libraries used must also be listed within a file within /etc/ld.so.conf.d so the dynamic linker can find them at runtime." } -- OS X specific options newoption { trigger = "macosx-version-min", description = "Set minimum required version of the OS X API, the build will possibly fail if an older SDK is used, while newer API functions will be weakly linked (i.e. resolved at runtime)" } newoption { trigger = "sysroot", description = "Set compiler system root path, used for building against a non-system SDK. For example /usr/local becomes SYSROOT/user/local" } -- Windows specific options newoption { trigger = "build-shared-glooxwrapper", description = "Rebuild glooxwrapper DLL for Windows. Requires the same compiler version that gloox was built with" } newoption { trigger = "use-shared-glooxwrapper", description = "Use prebuilt glooxwrapper DLL for Windows" } newoption { trigger = "large-address-aware", description = "Make the executable large address aware. Do not use for development, in order to spot memory issues easily" } -- Install options newoption { trigger = "bindir", description = "Directory for executables (typically '/usr/games'); default is to be relocatable" } newoption { trigger = "datadir", description = "Directory for data files (typically '/usr/share/games/0ad'); default is ../data/ relative to executable" } newoption { trigger = "libdir", description = "Directory for libraries (typically '/usr/lib/games/0ad'); default is ./ relative to executable" } -- Root directory of project checkout relative to this .lua file rootdir = "../.." dofile("extern_libs5.lua") -- detect compiler for non-Windows if os.istarget("macosx") then cc = "clang" elseif os.istarget("linux") and _OPTIONS["icc"] then cc = "icc" elseif os.istarget("bsd") and os.getversion().description == "FreeBSD" then cc = "clang" elseif not os.istarget("windows") then cc = os.getenv("CC") if cc == nil or cc == "" then local hasgcc = os.execute("which gcc > .gccpath") local f = io.open(".gccpath", "r") local gccpath = f:read("*line") f:close() os.execute("rm .gccpath") if gccpath == nil then cc = "clang" else cc = "gcc" end end end --- detect CPU architecture (simplistic, currently only supports x86, amd64 and ARM) +-- detect CPU architecture (simplistic) arch = "x86" macos_arch = "x86_64" if _OPTIONS["android"] then arch = "arm" elseif os.istarget("windows") then if os.getenv("PROCESSOR_ARCHITECTURE") == "amd64" or os.getenv("PROCESSOR_ARCHITEW6432") == "amd64" then arch = "amd64" end else - arch = os.getenv("HOSTTYPE") - -- Force x86_64 on Mac OS for now, as Spidermonkey 78 isn't Apple Silicon compatible. + os.execute(cc .. " -dumpmachine > .gccmachine.tmp") + local f = io.open(".gccmachine.tmp", "r") + local machine = f:read("*line") + f:close() + -- Special handling on mac os where xcode needs special flags. if os.istarget("macosx") then - arch = "amd64" - macos_arch = "x86_64" - elseif arch == "x86_64" or arch == "amd64" then - arch = "amd64" - else - os.execute(cc .. " -dumpmachine > .gccmachine.tmp") - local f = io.open(".gccmachine.tmp", "r") - local machine = f:read("*line") - f:close() - if string.find(machine, "x86_64") == 1 or string.find(machine, "amd64") == 1 then - arch = "amd64" - elseif string.find(machine, "i.86") == 1 then - arch = "x86" - elseif string.find(machine, "arm") == 1 then - arch = "arm" - elseif string.find(machine, "aarch64") == 1 then + if string.find(machine, "arm64") then arch = "aarch64" - elseif string.find(machine, "e2k") == 1 then - arch = "e2k" - elseif string.find(machine, "ppc64") == 1 or string.find(machine, "powerpc64") == 1 then - arch = "ppc64" + macos_arch = "arm64" else - print("WARNING: Cannot determine architecture from GCC, assuming x86") + arch = "amd64" + macos_arch = "x86_64" end + elseif string.find(machine, "x86_64") == 1 or string.find(machine, "amd64") == 1 then + arch = "amd64" + elseif string.find(machine, "i.86") == 1 then + arch = "x86" + elseif string.find(machine, "arm") == 1 then + arch = "arm" + elseif string.find(machine, "aarch64") == 1 then + arch = "aarch64" + elseif string.find(machine, "e2k") == 1 then + arch = "e2k" + elseif string.find(machine, "ppc64") == 1 or string.find(machine, "powerpc64") == 1 then + arch = "ppc64" + else + print("WARNING: Cannot determine architecture from GCC, assuming x86") end end -- Test whether we need to link libexecinfo. -- This is mostly the case on musl systems, as well as on BSD systems : only glibc provides the -- backtrace symbols we require in the libc, for other libcs we use the libexecinfo library. local link_execinfo = false if os.istarget("bsd") then link_execinfo = true elseif os.istarget("linux") then local _, link_errorCode = os.outputof(cc .. " ./tests/execinfo.c -o /dev/null") if link_errorCode ~= 0 then link_execinfo = true end end -- Set up the Workspace workspace "pyrogenesis" targetdir(rootdir.."/binaries/system") libdirs(rootdir.."/binaries/system") if not _OPTIONS["outpath"] then error("You must specify the 'outpath' parameter") end location(_OPTIONS["outpath"]) configurations { "Release", "Debug" } source_root = rootdir.."/source/" -- default for most projects - overridden by local in others -- Rationale: projects should not have any additional include paths except for -- those required by external libraries. Instead, we should always write the -- full relative path, e.g. #include "maths/Vector3d.h". This avoids confusion -- ("which file is meant?") and avoids enormous include path lists. -- projects: engine static libs, main exe, atlas, atlas frontends, test. -------------------------------------------------------------------------------- -- project helper functions -------------------------------------------------------------------------------- function project_set_target(project_name) -- Note: On Windows, ".exe" is added on the end, on unices the name is used directly local obj_dir_prefix = _OPTIONS["outpath"].."/obj/"..project_name.."_" filter "Debug" objdir(obj_dir_prefix.."Debug") targetsuffix("_dbg") filter "Release" objdir(obj_dir_prefix.."Release") filter { } end function project_set_build_flags() editandcontinue "Off" if not _OPTIONS["minimal-flags"] then symbols "On" end if cc ~= "icc" and (os.istarget("windows") or not _OPTIONS["minimal-flags"]) then -- adds the -Wall compiler flag warnings "Extra" -- this causes far too many warnings/remarks on ICC end -- disable Windows debug heap, since it makes malloc/free hugely slower when -- running inside a debugger if os.istarget("windows") then debugenvs { "_NO_DEBUG_HEAP=1" } end filter "Debug" defines { "DEBUG" } filter "Release" if os.istarget("windows") or not _OPTIONS["minimal-flags"] then optimize "Speed" end defines { "NDEBUG", "CONFIG_FINAL=1" } filter { } if _OPTIONS["gles"] then defines { "CONFIG2_GLES=1" } end if _OPTIONS["without-audio"] then defines { "CONFIG2_AUDIO=0" } end if _OPTIONS["without-nvtt"] then defines { "CONFIG2_NVTT=0" } end if _OPTIONS["without-lobby"] then defines { "CONFIG2_LOBBY=0" } end if _OPTIONS["without-miniupnpc"] then defines { "CONFIG2_MINIUPNPC=0" } end -- Enable C++17 standard. filter "action:vs*" buildoptions { "/std:c++17" } filter "action:not vs*" buildoptions { "-std=c++17" } filter {} -- various platform-specific build flags if os.istarget("windows") then flags { "MultiProcessorCompile" } -- Since KB4088875 Windows 7 has a soft requirement for SSE2. -- Windows 8+ and Firefox ESR52 make it hard requirement. -- Finally since VS2012 it's enabled implicitely when not set. vectorextensions "SSE2" -- use native wchar_t type (not typedef to unsigned short) nativewchar "on" else -- *nix -- TODO, FIXME: This check is incorrect because it means that some additional flags will be added inside the "else" branch if the -- compiler is ICC and minimal-flags is specified (ticket: #2994) if cc == "icc" and not _OPTIONS["minimal-flags"] then buildoptions { "-w1", -- "-Wabi", -- "-Wp64", -- complains about OBJECT_TO_JSVAL which is annoying "-Wpointer-arith", "-Wreturn-type", -- "-Wshadow", "-Wuninitialized", "-Wunknown-pragmas", "-Wunused-function", "-wd1292" -- avoid lots of 'attribute "__nonnull__" ignored' } filter "Debug" buildoptions { "-O0" } -- ICC defaults to -O2 filter { } if os.istarget("macosx") then linkoptions { "-multiply_defined","suppress" } end else -- exclude most non-essential build options for minimal-flags if not _OPTIONS["minimal-flags"] then buildoptions { -- enable most of the standard warnings "-Wno-switch", -- enumeration value not handled in switch (this is sometimes useful, but results in lots of noise) "-Wno-reorder", -- order of initialization list in constructors (lots of noise) "-Wno-invalid-offsetof", -- offsetof on non-POD types (see comment in renderer/PatchRData.cpp) "-Wextra", "-Wno-missing-field-initializers", -- (this is common in external headers we can't fix) -- add some other useful warnings that need to be enabled explicitly "-Wunused-parameter", "-Wredundant-decls", -- (useful for finding some multiply-included header files) -- "-Wformat=2", -- (useful sometimes, but a bit noisy, so skip it by default) -- "-Wcast-qual", -- (useful for checking const-correctness, but a bit noisy, so skip it by default) "-Wnon-virtual-dtor", -- (sometimes noisy but finds real bugs) "-Wundef", -- (useful for finding macro name typos) -- enable security features (stack checking etc) that shouldn't have -- a significant effect on performance and can catch bugs "-fstack-protector-all", "-U_FORTIFY_SOURCE", -- (avoid redefinition warning if already defined) "-D_FORTIFY_SOURCE=2", -- always enable strict aliasing (useful in debug builds because of the warnings) "-fstrict-aliasing", -- don't omit frame pointers (for now), because performance will be impacted -- negatively by the way this breaks profilers more than it will be impacted -- positively by the optimisation "-fno-omit-frame-pointer" } if not _OPTIONS["without-pch"] then buildoptions { -- do something (?) so that ccache can handle compilation with PCH enabled -- (ccache 3.1+ also requires CCACHE_SLOPPINESS=time_macros for this to work) "-fpch-preprocess" } end if os.istarget("linux") or os.istarget("bsd") then buildoptions { "-fPIC" } linkoptions { "-Wl,--no-undefined", "-Wl,--as-needed", "-Wl,-z,relro" } end if arch == "x86" then buildoptions { -- To support intrinsics like __sync_bool_compare_and_swap on x86 -- we need to set -march to something that supports them (i686). -- We use pentium3 to also enable other features like mmx and sse, -- while tuning for generic to have good performance on every -- supported CPU. -- Note that all these features are already supported on amd64. "-march=pentium3 -mtune=generic", -- This allows x86 operating systems to handle the 2GB+ public mod. "-D_FILE_OFFSET_BITS=64" } end end if arch == "arm" then -- disable warnings about va_list ABI change and use -- compile-time flags for futher configuration. buildoptions { "-Wno-psabi" } if _OPTIONS["android"] then -- Android uses softfp, so we should too. buildoptions { "-mfloat-abi=softfp" } end end if _OPTIONS["coverage"] then buildoptions { "-fprofile-arcs", "-ftest-coverage" } links { "gcov" } end - -- MacOS 10.12 only supports processors with SSE 4.1, so enable that. - if os.istarget("macosx") then + -- MacOS 10.12 only supports intel processors with SSE 4.1, so enable that. + if os.istarget("macosx") and arch == "amd64" then buildoptions { "-msse4.1" } end -- Check if SDK path should be used if _OPTIONS["sysroot"] then buildoptions { "-isysroot " .. _OPTIONS["sysroot"] } linkoptions { "-Wl,-syslibroot," .. _OPTIONS["sysroot"] } end -- On OS X, sometimes we need to specify the minimum API version to use if _OPTIONS["macosx-version-min"] then buildoptions { "-mmacosx-version-min=" .. _OPTIONS["macosx-version-min"] } -- clang and llvm-gcc look at mmacosx-version-min to determine link target -- and CRT version, and use it to set the macosx_version_min linker flag linkoptions { "-mmacosx-version-min=" .. _OPTIONS["macosx-version-min"] } end -- Only libc++ is supported on MacOS if os.istarget("macosx") then buildoptions { "-stdlib=libc++" } linkoptions { "-stdlib=libc++" } end end buildoptions { -- Hide symbols in dynamic shared objects by default, for efficiency and for equivalence with -- Windows - they should be exported explicitly with __attribute__ ((visibility ("default"))) "-fvisibility=hidden" } if _OPTIONS["bindir"] then defines { "INSTALLED_BINDIR=" .. _OPTIONS["bindir"] } end if _OPTIONS["datadir"] then defines { "INSTALLED_DATADIR=" .. _OPTIONS["datadir"] } end if _OPTIONS["libdir"] then defines { "INSTALLED_LIBDIR=" .. _OPTIONS["libdir"] } end if os.istarget("linux") or os.istarget("bsd") then if _OPTIONS["prefer-local-libs"] then libdirs { "/usr/local/lib" } end -- To use our local shared libraries, they need to be found in the -- runtime dynamic linker path. Add their path to -rpath. if _OPTIONS["libdir"] then linkoptions {"-Wl,-rpath," .. _OPTIONS["libdir"] } else -- On FreeBSD we need to allow use of $ORIGIN if os.istarget("bsd") then linkoptions { "-Wl,-z,origin" } end -- Adding the executable path and taking care of correct escaping if _ACTION == "gmake" then linkoptions { "-Wl,-rpath,'$$ORIGIN'" } elseif _ACTION == "codeblocks" then linkoptions { "-Wl,-R\\\\$$$ORIGIN" } end end end end end -- create a project and set the attributes that are common to all projects. function project_create(project_name, target_type) project(project_name) language "C++" kind(target_type) filter "action:vs2017" toolset "v141_xp" filter {} filter "action:vs*" buildoptions "/utf-8" filter {} project_set_target(project_name) project_set_build_flags() end -- OSX creates a .app bundle if the project type of the main application is set to "WindowedApp". -- We don't want this because this bundle would be broken (it lacks all the resources and external dependencies, Info.plist etc...) -- Windows opens a console in the background if it's set to ConsoleApp, which is not what we want. -- I didn't check if this setting matters for linux, but WindowedApp works there. function get_main_project_target_type() if _OPTIONS["android"] then return "SharedLib" elseif os.istarget("macosx") then return "ConsoleApp" else return "WindowedApp" end end -- source_root: rel_source_dirs and rel_include_dirs are relative to this directory -- rel_source_dirs: A table of subdirectories. All source files in these directories are added. -- rel_include_dirs: A table of subdirectories to be included. -- extra_params: table including zero or more of the following: -- * no_pch: If specified, no precompiled headers are used for this project. -- * pch_dir: If specified, this directory will be used for precompiled headers instead of the default -- /pch//. -- * extra_files: table of filenames (relative to source_root) to add to project -- * extra_links: table of library names to add to link step function project_add_contents(source_root, rel_source_dirs, rel_include_dirs, extra_params) for i,v in pairs(rel_source_dirs) do local prefix = source_root..v.."/" files { prefix.."*.cpp", prefix.."*.h", prefix.."*.inl", prefix.."*.js", prefix.."*.asm", prefix.."*.mm" } end -- Put the project-specific PCH directory at the start of the -- include path, so '#include "precompiled.h"' will look in -- there first local pch_dir if not extra_params["pch_dir"] then pch_dir = source_root .. "pch/" .. project().name .. "/" else pch_dir = extra_params["pch_dir"] end includedirs { pch_dir } -- Precompiled Headers -- rationale: we need one PCH per static lib, since one global header would -- increase dependencies. To that end, we can either include them as -- "projectdir/precompiled.h", or add "source/PCH/projectdir" to the -- include path and put the PCH there. The latter is better because -- many projects contain several dirs and it's unclear where there the -- PCH should be stored. This way is also a bit easier to use in that -- source files always include "precompiled.h". -- Notes: -- * Visual Assist manages to use the project include path and can -- correctly open these files from the IDE. -- * precompiled.cpp (needed to "Create" the PCH) also goes in -- the abovementioned dir. if (not _OPTIONS["without-pch"] and not extra_params["no_pch"]) then filter "action:vs*" pchheader("precompiled.h") filter "action:xcode*" pchheader("../"..pch_dir.."precompiled.h") filter { "action:not vs*", "action:not xcode*" } pchheader(pch_dir.."precompiled.h") filter {} pchsource(pch_dir.."precompiled.cpp") defines { "CONFIG_ENABLE_PCH=1" } files { pch_dir.."precompiled.h", pch_dir.."precompiled.cpp" } else defines { "CONFIG_ENABLE_PCH=0" } flags { "NoPCH" } end -- next is source root dir, for absolute (nonrelative) includes -- (e.g. "lib/precompiled.h") includedirs { source_root } for i,v in pairs(rel_include_dirs) do includedirs { source_root .. v } end if extra_params["extra_files"] then for i,v in pairs(extra_params["extra_files"]) do -- .rc files are only needed on Windows if path.getextension(v) ~= ".rc" or os.istarget("windows") then files { source_root .. v } end end end if extra_params["extra_links"] then links { extra_params["extra_links"] } end end -- Add command-line options to set up the manifest dependencies for Windows -- (See lib/sysdep/os/win/manifest.cpp) function project_add_manifest() linkoptions { "\"/manifestdependency:type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='X86' publicKeyToken='6595b64144ccf1df'\"" } end -------------------------------------------------------------------------------- -- engine static libraries -------------------------------------------------------------------------------- -- the engine is split up into several static libraries. this eases separate -- distribution of those components, reduces dependencies a bit, and can -- also speed up builds. -- more to the point, it is necessary to efficiently support a separate -- test executable that also includes much of the game code. -- names of all static libs created. automatically added to the -- main app project later (see explanation at end of this file) static_lib_names = {} static_lib_names_debug = {} static_lib_names_release = {} -- set up one of the static libraries into which the main engine code is split. -- extra_params: -- no_default_link: If specified, linking won't be done by default. -- For the rest of extra_params, see project_add_contents(). -- note: rel_source_dirs and rel_include_dirs are relative to global source_root. function setup_static_lib_project (project_name, rel_source_dirs, extern_libs, extra_params) local target_type = "StaticLib" project_create(project_name, target_type) project_add_contents(source_root, rel_source_dirs, {}, extra_params) project_add_extern_libs(extern_libs, target_type) if not extra_params["no_default_link"] then table.insert(static_lib_names, project_name) end -- Deactivate Run Time Type Information. Performance of dynamic_cast is very poor. -- The exception to this principle is Atlas UI, which is not a static library. rtti "off" if os.istarget("macosx") then architecture(macos_arch) buildoptions { "-arch " .. macos_arch } linkoptions { "-arch " .. macos_arch } xcodebuildsettings { ARCHS = macos_arch } if _OPTIONS["macosx-version-min"] then xcodebuildsettings { MACOSX_DEPLOYMENT_TARGET = _OPTIONS["macosx-version-min"] } end end end function setup_third_party_static_lib_project (project_name, rel_source_dirs, extern_libs, extra_params) setup_static_lib_project(project_name, rel_source_dirs, extern_libs, extra_params) includedirs { source_root .. "third_party/" .. project_name .. "/include/" } end function setup_shared_lib_project (project_name, rel_source_dirs, extern_libs, extra_params) local target_type = "SharedLib" project_create(project_name, target_type) project_add_contents(source_root, rel_source_dirs, {}, extra_params) project_add_extern_libs(extern_libs, target_type) if not extra_params["no_default_link"] then table.insert(static_lib_names, project_name) end if os.istarget("windows") then links { "delayimp" } elseif os.istarget("macosx") then architecture(macos_arch) buildoptions { "-arch " .. macos_arch } linkoptions { "-arch " .. macos_arch } xcodebuildsettings { ARCHS = macos_arch } if _OPTIONS["macosx-version-min"] then xcodebuildsettings { MACOSX_DEPLOYMENT_TARGET = _OPTIONS["macosx-version-min"] } end end end -- this is where the source tree is chopped up into static libs. -- can be changed very easily; just copy+paste a new setup_static_lib_project, -- or remove existing ones. static libs are automagically added to -- main_exe link step. function setup_all_libs () -- relative to global source_root. local source_dirs = {} -- names of external libraries used (see libraries_dir comment) local extern_libs = {} source_dirs = { "network", } extern_libs = { "spidermonkey", "enet", "sdl", "boost", -- dragged in via server->simulation.h->random and NetSession.h->lockfree "fmt", } if not _OPTIONS["without-miniupnpc"] then table.insert(extern_libs, "miniupnpc") end setup_static_lib_project("network", source_dirs, extern_libs, {}) source_dirs = { "rlinterface", } extern_libs = { "boost", -- dragged in via simulation.h and scriptinterface.h "fmt", "spidermonkey", } setup_static_lib_project("rlinterface", source_dirs, extern_libs, { no_pch = 1 }) source_dirs = { "third_party/tinygettext/src", } extern_libs = { "iconv", "boost", "fmt", } setup_third_party_static_lib_project("tinygettext", source_dirs, extern_libs, { } ) -- it's an external library and we don't want to modify its source to fix warnings, so we just disable them to avoid noise in the compile output filter "action:vs*" buildoptions { "/wd4127", "/wd4309", "/wd4800", "/wd4100", "/wd4996", "/wd4099", "/wd4503" } filter {} if not _OPTIONS["without-lobby"] then source_dirs = { "lobby", "lobby/scripting", "i18n", "third_party/encryption" } extern_libs = { "spidermonkey", "boost", "enet", "gloox", "icu", "iconv", "libsodium", "tinygettext", "fmt", } setup_static_lib_project("lobby", source_dirs, extern_libs, {}) if _OPTIONS["use-shared-glooxwrapper"] and not _OPTIONS["build-shared-glooxwrapper"] then table.insert(static_lib_names_debug, "glooxwrapper_dbg") table.insert(static_lib_names_release, "glooxwrapper") else source_dirs = { "lobby/glooxwrapper", } extern_libs = { "boost", "gloox", "fmt", } if _OPTIONS["build-shared-glooxwrapper"] then setup_shared_lib_project("glooxwrapper", source_dirs, extern_libs, {}) else setup_static_lib_project("glooxwrapper", source_dirs, extern_libs, {}) end end else source_dirs = { "lobby/scripting", "third_party/encryption" } extern_libs = { "spidermonkey", "boost", "libsodium", "fmt", } setup_static_lib_project("lobby", source_dirs, extern_libs, {}) files { source_root.."lobby/Globals.cpp" } end source_dirs = { "simulation2", "simulation2/components", "simulation2/helpers", "simulation2/scripting", "simulation2/serialization", "simulation2/system", "simulation2/testcomponents", } extern_libs = { "boost", "spidermonkey", "fmt", } setup_static_lib_project("simulation2", source_dirs, extern_libs, {}) source_dirs = { "scriptinterface", "scriptinterface/third_party" } extern_libs = { "boost", "spidermonkey", "valgrind", "sdl", "fmt", } setup_static_lib_project("scriptinterface", source_dirs, extern_libs, {}) source_dirs = { "ps", "ps/scripting", "network/scripting", "ps/GameSetup", "ps/XMB", "ps/XML", "soundmanager", "soundmanager/data", "soundmanager/items", "soundmanager/scripting", "maths", "maths/scripting", "i18n", "i18n/scripting", } extern_libs = { "spidermonkey", "sdl", -- key definitions "libxml2", "zlib", "boost", "enet", "libcurl", "tinygettext", "icu", "iconv", "libsodium", "fmt", "freetype", } if not _OPTIONS["without-audio"] then table.insert(extern_libs, "openal") table.insert(extern_libs, "vorbis") end setup_static_lib_project("engine", source_dirs, extern_libs, {}) source_dirs = { "graphics", "graphics/scripting", "renderer", "renderer/backend", "renderer/backend/dummy", "renderer/backend/gl", "renderer/backend/vulkan", "renderer/scripting", "third_party/mikktspace", "third_party/ogre3d_preprocessor" } extern_libs = { "sdl", -- key definitions "spidermonkey", -- for graphics/scripting "boost", "fmt", "freetype", "icu", } if not _OPTIONS["without-nvtt"] then table.insert(extern_libs, "nvtt") end setup_static_lib_project("graphics", source_dirs, extern_libs, {}) source_dirs = { "tools/atlas/GameInterface", "tools/atlas/GameInterface/Handlers" } extern_libs = { "boost", "sdl", -- key definitions "spidermonkey", "fmt", } setup_static_lib_project("atlas", source_dirs, extern_libs, {}) source_dirs = { "gui", "gui/ObjectTypes", "gui/ObjectBases", "gui/Scripting", "gui/SettingTypes", "i18n" } extern_libs = { "spidermonkey", "sdl", -- key definitions "boost", "enet", "tinygettext", "icu", "iconv", "fmt", } if not _OPTIONS["without-audio"] then table.insert(extern_libs, "openal") end setup_static_lib_project("gui", source_dirs, extern_libs, {}) source_dirs = { "lib", "lib/adts", "lib/allocators", "lib/external_libraries", "lib/file", "lib/file/archive", "lib/file/common", "lib/file/io", "lib/file/vfs", "lib/pch", "lib/posix", "lib/res", "lib/res/graphics", "lib/sysdep", "lib/tex" } extern_libs = { "boost", "sdl", "openal", "libpng", "zlib", "valgrind", "cxxtest", "fmt", } -- CPU architecture-specific if arch == "amd64" then table.insert(source_dirs, "lib/sysdep/arch/amd64"); table.insert(source_dirs, "lib/sysdep/arch/x86_x64"); elseif arch == "x86" then table.insert(source_dirs, "lib/sysdep/arch/ia32"); table.insert(source_dirs, "lib/sysdep/arch/x86_x64"); elseif arch == "arm" then table.insert(source_dirs, "lib/sysdep/arch/arm"); elseif arch == "aarch64" then table.insert(source_dirs, "lib/sysdep/arch/aarch64"); elseif arch == "e2k" then table.insert(source_dirs, "lib/sysdep/arch/e2k"); elseif arch == "ppc64" then table.insert(source_dirs, "lib/sysdep/arch/ppc64"); end -- OS-specific sysdep_dirs = { linux = { "lib/sysdep/os/linux", "lib/sysdep/os/unix" }, -- note: RC file must be added to main_exe project. -- note: don't add "lib/sysdep/os/win/aken.cpp" because that must be compiled with the DDK. windows = { "lib/sysdep/os/win", "lib/sysdep/os/win/wposix", "lib/sysdep/os/win/whrt" }, macosx = { "lib/sysdep/os/osx", "lib/sysdep/os/unix" }, bsd = { "lib/sysdep/os/bsd", "lib/sysdep/os/unix", "lib/sysdep/os/unix/x" }, } for i,v in pairs(sysdep_dirs[os.target()]) do table.insert(source_dirs, v); end if os.istarget("linux") then if _OPTIONS["android"] then table.insert(source_dirs, "lib/sysdep/os/android") else table.insert(source_dirs, "lib/sysdep/os/unix/x") end end -- On OSX, disable precompiled headers because C++ files and Objective-C++ files are -- mixed in this project. To fix that, we would need per-file basis configuration which -- is not yet supported by the gmake action in premake. We should look into using gmake2. extra_params = {} if os.istarget("macosx") then extra_params = { no_pch = 1 } end -- runtime-library-specific if _ACTION == "vs2017" then table.insert(source_dirs, "lib/sysdep/rtl/msc"); else table.insert(source_dirs, "lib/sysdep/rtl/gcc"); end setup_static_lib_project("lowlevel", source_dirs, extern_libs, extra_params) setup_static_lib_project("gladwrapper", {}, { "glad" }, { no_pch = 1 }) glad_path = libraries_source_dir.."glad/" sysincludedirs { glad_path.."include" } if _OPTIONS["gles"] then files { glad_path.."src/gles2.cpp" } else files { glad_path.."src/gl.cpp" } if os.istarget("windows") then files { glad_path.."src/wgl.cpp" } elseif os.istarget("linux") or os.istarget("bsd") then files { glad_path.."src/glx.cpp" } end end -- Third-party libraries that are built as part of the main project, -- not built externally and then linked source_dirs = { "third_party/mongoose", } extern_libs = { } setup_static_lib_project("mongoose", source_dirs, extern_libs, { no_pch = 1 }) -- CxxTest mock function support extern_libs = { "boost", "cxxtest", } -- 'real' implementations, to be linked against the main executable -- (files are added manually and not with setup_static_lib_project -- because not all files in the directory are included) setup_static_lib_project("mocks_real", {}, extern_libs, { no_default_link = 1, no_pch = 1 }) files { "mocks/*.h", source_root.."mocks/*_real.cpp" } -- 'test' implementations, to be linked against the test executable setup_static_lib_project("mocks_test", {}, extern_libs, { no_default_link = 1, no_pch = 1 }) files { source_root.."mocks/*.h", source_root.."mocks/*_test.cpp" } end -------------------------------------------------------------------------------- -- main EXE -------------------------------------------------------------------------------- -- used for main EXE as well as test used_extern_libs = { "sdl", "libpng", "zlib", "spidermonkey", "libxml2", "boost", "cxxtest", "comsuppw", "enet", "libcurl", "tinygettext", "icu", "iconv", "libsodium", "fmt", "freetype", "valgrind", } if not os.istarget("windows") and not _OPTIONS["android"] and not os.istarget("macosx") then -- X11 should only be linked on *nix table.insert(used_extern_libs, "x11") end if not _OPTIONS["without-audio"] then table.insert(used_extern_libs, "openal") table.insert(used_extern_libs, "vorbis") end if not _OPTIONS["without-nvtt"] then table.insert(used_extern_libs, "nvtt") end if not _OPTIONS["without-lobby"] then table.insert(used_extern_libs, "gloox") end if not _OPTIONS["without-miniupnpc"] then table.insert(used_extern_libs, "miniupnpc") end -- Bundles static libs together with main.cpp and builds game executable. function setup_main_exe () local target_type = get_main_project_target_type() project_create("pyrogenesis", target_type) filter "system:not macosx" linkgroups 'On' filter {} links { "mocks_real" } local extra_params = { extra_files = { "main.cpp" }, no_pch = 1 } project_add_contents(source_root, {}, {}, extra_params) project_add_extern_libs(used_extern_libs, target_type) dependson { "Collada" } rtti "off" -- Platform Specifics if os.istarget("windows") then files { source_root.."lib/sysdep/os/win/icon.rc" } -- from "lowlevel" static lib; must be added here to be linked in files { source_root.."lib/sysdep/os/win/error_dialog.rc" } linkoptions { -- wraps main thread in a __try block(see wseh.cpp). replace with mainCRTStartup if that's undesired. "/ENTRY:wseh_EntryPoint", -- see wstartup.h "/INCLUDE:_wstartup_InitAndRegisterShutdown", -- allow manual unload of delay-loaded DLLs "/DELAY:UNLOAD", } -- allow the executable to use more than 2GB of RAM. -- this should not be enabled during development, so that memory issues are easily spotted. if _OPTIONS["large-address-aware"] then linkoptions { "/LARGEADDRESSAWARE" } end -- see manifest.cpp project_add_manifest() elseif os.istarget("linux") or os.istarget("bsd") then if not _OPTIONS["android"] and not (os.getversion().description == "OpenBSD") then links { "rt" } end if _OPTIONS["android"] then -- NDK's STANDALONE-TOOLCHAIN.html says this is required linkoptions { "-Wl,--fix-cortex-a8" } links { "log" } end if link_execinfo then links { "execinfo" } end if os.istarget("linux") or os.getversion().description == "GNU/kFreeBSD" then links { -- Dynamic libraries (needed for linking for gold) "dl", } end -- Threading support buildoptions { "-pthread" } if not _OPTIONS["android"] then linkoptions { "-pthread" } end -- For debug_resolve_symbol filter "Debug" linkoptions { "-rdynamic" } filter { } elseif os.istarget("macosx") then links { "pthread" } links { "ApplicationServices.framework", "Cocoa.framework", "CoreFoundation.framework" } architecture(macos_arch) buildoptions { "-arch " .. macos_arch } linkoptions { "-arch " .. macos_arch } xcodebuildsettings { ARCHS = macos_arch } if _OPTIONS["macosx-version-min"] then xcodebuildsettings { MACOSX_DEPLOYMENT_TARGET = _OPTIONS["macosx-version-min"] } end end end -------------------------------------------------------------------------------- -- atlas -------------------------------------------------------------------------------- -- setup a typical Atlas component project -- extra_params, rel_source_dirs and rel_include_dirs: as in project_add_contents; function setup_atlas_project(project_name, target_type, rel_source_dirs, rel_include_dirs, extern_libs, extra_params) local source_root = rootdir.."/source/tools/atlas/" .. project_name .. "/" project_create(project_name, target_type) -- if not specified, the default for atlas pch files is in the project root. if not extra_params["pch_dir"] then extra_params["pch_dir"] = source_root end project_add_contents(source_root, rel_source_dirs, rel_include_dirs, extra_params) project_add_extern_libs(extern_libs, target_type) -- Platform Specifics if os.istarget("windows") then -- Link to required libraries links { "winmm", "delayimp" } elseif os.istarget("macosx") then architecture(macos_arch) buildoptions { "-arch " .. macos_arch } linkoptions { "-arch " .. macos_arch } xcodebuildsettings { ARCHS = macos_arch } if _OPTIONS["macosx-version-min"] then xcodebuildsettings { MACOSX_DEPLOYMENT_TARGET = _OPTIONS["macosx-version-min"] } end elseif os.istarget("linux") or os.istarget("bsd") then if os.getversion().description == "FreeBSD" then buildoptions { "-fPIC" } linkoptions { "-fPIC" } else buildoptions { "-rdynamic", "-fPIC" } linkoptions { "-fPIC", "-rdynamic" } end -- warnings triggered by wxWidgets buildoptions { "-Wno-unused-local-typedefs" } end end -- build all Atlas component projects function setup_atlas_projects() setup_atlas_project("AtlasObject", "StaticLib", { -- src ".", "../../../third_party/jsonspirit" },{ -- include "../../../third_party/jsonspirit" },{ -- extern_libs "boost", "iconv", "libxml2" },{ -- extra_params no_pch = 1 }) atlas_src = { "ActorEditor", "CustomControls/Buttons", "CustomControls/Canvas", "CustomControls/ColorDialog", "CustomControls/DraggableListCtrl", "CustomControls/EditableListCtrl", "CustomControls/FileHistory", "CustomControls/HighResTimer", "CustomControls/MapDialog", "CustomControls/MapResizeDialog", "CustomControls/SnapSplitterWindow", "CustomControls/VirtualDirTreeCtrl", "CustomControls/Windows", "General", "General/VideoRecorder", "Misc", "ScenarioEditor", "ScenarioEditor/Sections/Common", "ScenarioEditor/Sections/Cinema", "ScenarioEditor/Sections/Environment", "ScenarioEditor/Sections/Map", "ScenarioEditor/Sections/Object", "ScenarioEditor/Sections/Player", "ScenarioEditor/Sections/Terrain", "ScenarioEditor/Tools", "ScenarioEditor/Tools/Common", } atlas_extra_links = { "AtlasObject" } atlas_extern_libs = { "boost", "comsuppw", "iconv", "libxml2", "sdl", -- key definitions "wxwidgets", "zlib", } if not os.istarget("windows") and not os.istarget("macosx") then -- X11 should only be linked on *nix table.insert(atlas_extern_libs, "x11") end setup_atlas_project("AtlasUI", "SharedLib", atlas_src, { -- include "..", "CustomControls", "Misc", "../../../third_party/jsonspirit" }, atlas_extern_libs, { -- extra_params pch_dir = rootdir.."/source/tools/atlas/AtlasUI/Misc/", no_pch = false, extra_links = atlas_extra_links, extra_files = { "Misc/atlas.rc" } }) end -- Atlas 'frontend' tool-launching projects function setup_atlas_frontend_project (project_name) local target_type = get_main_project_target_type() project_create(project_name, target_type) local source_root = rootdir.."/source/tools/atlas/AtlasFrontends/" files { source_root..project_name..".cpp" } if os.istarget("windows") then files { source_root..project_name..".rc" } end includedirs { source_root .. ".." } -- Platform Specifics if os.istarget("windows") then -- see manifest.cpp project_add_manifest() else -- Non-Windows, = Unix links { "AtlasObject" } if os.istarget("macosx") then architecture(macos_arch) buildoptions { "-arch " .. macos_arch } linkoptions { "-arch " .. macos_arch } xcodebuildsettings { ARCHS = macos_arch } end end links { "AtlasUI" } end function setup_atlas_frontends() setup_atlas_frontend_project("ActorEditor") end -------------------------------------------------------------------------------- -- collada -------------------------------------------------------------------------------- function setup_collada_project(project_name, target_type, rel_source_dirs, rel_include_dirs, extern_libs, extra_params) project_create(project_name, target_type) local source_root = source_root.."collada/" extra_params["pch_dir"] = source_root project_add_contents(source_root, rel_source_dirs, rel_include_dirs, extra_params) project_add_extern_libs(extern_libs, target_type) -- Platform Specifics if os.istarget("windows") then characterset "MBCS" elseif os.istarget("linux") then defines { "LINUX" } links { "dl", } -- FCollada is not aliasing-safe, so disallow dangerous optimisations -- (TODO: It'd be nice to fix FCollada, but that looks hard) buildoptions { "-fno-strict-aliasing" } if os.getversion().description ~= "FreeBSD" then buildoptions { "-rdynamic" } linkoptions { "-rdynamic" } end elseif os.istarget("bsd") then if os.getversion().description == "OpenBSD" then links { "c", } end if os.getversion().description == "GNU/kFreeBSD" then links { "dl", } end buildoptions { "-fno-strict-aliasing" } buildoptions { "-rdynamic" } linkoptions { "-rdynamic" } elseif os.istarget("macosx") then -- define MACOS-something? buildoptions { "-fno-strict-aliasing" } -- On OSX, fcollada uses a few utility functions from coreservices links { "CoreServices.framework" } architecture(macos_arch) buildoptions { "-arch " .. macos_arch } linkoptions { "-arch " .. macos_arch } xcodebuildsettings { ARCHS = macos_arch } end end -- build all Collada component projects function setup_collada_projects() setup_collada_project("Collada", "SharedLib", { -- src "." },{ -- include },{ -- extern_libs "fcollada", "iconv", "libxml2" },{ -- extra_params }) end -------------------------------------------------------------------------------- -- tests -------------------------------------------------------------------------------- function setup_tests() local cxxtest = require "cxxtest" if os.istarget("windows") then cxxtest.setpath(rootdir.."/build/bin/cxxtestgen.exe") else cxxtest.setpath(rootdir.."/libraries/source/cxxtest-4.4/bin/cxxtestgen") end local runner = "ErrorPrinter" if _OPTIONS["jenkins-tests"] then runner = "XmlPrinter" end local includefiles = { -- Precompiled headers - the header is added to all generated .cpp files -- note that the header isn't actually precompiled here, only #included -- so that the build stage can use it as a precompiled header. "precompiled.h", -- This is required to build against SDL 2.0.4 on Windows. "lib/external_libraries/libsdl.h", } cxxtest.init(source_root, true, runner, includefiles) local target_type = get_main_project_target_type() project_create("test", target_type) -- Find header files in 'test' subdirectories local all_files = os.matchfiles(source_root .. "**/tests/*.h") local test_files = {} for i,v in pairs(all_files) do -- Don't include sysdep tests on the wrong sys -- Don't include Atlas tests unless Atlas is being built if not (string.find(v, "/sysdep/os/win/") and not os.istarget("windows")) and not (string.find(v, "/tools/atlas/") and not _OPTIONS["atlas"]) and not (string.find(v, "/sysdep/arch/x86_x64/") and ((arch ~= "amd64") or (arch ~= "x86"))) then table.insert(test_files, v) end end cxxtest.configure_project(test_files) filter "system:not macosx" linkgroups 'On' filter {} links { static_lib_names } filter "Debug" links { static_lib_names_debug } filter "Release" links { static_lib_names_release } filter { } links { "mocks_test" } if _OPTIONS["atlas"] then links { "AtlasObject" } end extra_params = { extra_files = { "test_setup.cpp" }, } project_add_contents(source_root, {}, {}, extra_params) project_add_extern_libs(used_extern_libs, target_type) dependson { "Collada" } rtti "off" -- TODO: should fix the duplication between this OS-specific linking -- code, and the similar version in setup_main_exe if os.istarget("windows") then -- from "lowlevel" static lib; must be added here to be linked in files { source_root.."lib/sysdep/os/win/error_dialog.rc" } -- see wstartup.h linkoptions { "/INCLUDE:_wstartup_InitAndRegisterShutdown" } -- Enables console for the TEST project on Windows linkoptions { "/SUBSYSTEM:CONSOLE" } project_add_manifest() elseif os.istarget("linux") or os.istarget("bsd") then if link_execinfo then links { "execinfo" } end if not _OPTIONS["android"] and not (os.getversion().description == "OpenBSD") then links { "rt" } end if _OPTIONS["android"] then -- NDK's STANDALONE-TOOLCHAIN.html says this is required linkoptions { "-Wl,--fix-cortex-a8" } end if os.istarget("linux") or os.getversion().description == "GNU/kFreeBSD" then links { -- Dynamic libraries (needed for linking for gold) "dl", } end -- Threading support buildoptions { "-pthread" } if not _OPTIONS["android"] then linkoptions { "-pthread" } end -- For debug_resolve_symbol filter "Debug" linkoptions { "-rdynamic" } filter { } includedirs { source_root .. "pch/test/" } elseif os.istarget("macosx") then architecture(macos_arch) buildoptions { "-arch " .. macos_arch } linkoptions { "-arch " .. macos_arch } xcodebuildsettings { ARCHS = macos_arch } if _OPTIONS["macosx-version-min"] then xcodebuildsettings { MACOSX_DEPLOYMENT_TARGET = _OPTIONS["macosx-version-min"] } end end end -- must come first, so that VC sets it as the default project and therefore -- allows running via F5 without the "where is the EXE" dialog. setup_main_exe() setup_all_libs() -- add the static libs to the main EXE project. only now (after -- setup_all_libs has run) are the lib names known. cannot move -- setup_main_exe to run after setup_all_libs (see comment above). -- we also don't want to hardcode the names - that would require more -- work when changing the static lib breakdown. project("pyrogenesis") -- Set the main project active links { static_lib_names } filter "Debug" links { static_lib_names_debug } filter "Release" links { static_lib_names_release } filter { } if _OPTIONS["atlas"] then setup_atlas_projects() setup_atlas_frontends() end setup_collada_projects() if not _OPTIONS["without-tests"] then setup_tests() end Index: ps/trunk/libraries/osx/build-osx-libs.sh =================================================================== --- ps/trunk/libraries/osx/build-osx-libs.sh (revision 26880) +++ ps/trunk/libraries/osx/build-osx-libs.sh (revision 26881) @@ -1,1070 +1,1086 @@ #!/bin/bash # # Script for acquiring and building OS X dependencies for 0 A.D. # # The script checks whether a source tarball exists for each # dependency, if not it will download the correct version from # the project's website, then it removes previous build files, # extracts the tarball, configures and builds the lib. The script # should die on any errors to ease troubleshooting. # # make install is used to copy the compiled libs to each specific # directory and also the config tools (e.g. sdl-config). Because # of this, OS X developers must run this script at least once, # to configure the correct lib directories. It must be run again # if the libraries are moved. # # Building against an SDK is an option, though not required, # as not all build environments contain the Developer SDKs # (Xcode does, but the Command Line Tools package does not) # # -------------------------------------------------------------- # Library versions for ease of updating: ZLIB_VERSION="zlib-1.2.11" CURL_VERSION="curl-7.71.0" ICONV_VERSION="libiconv-1.16" XML2_VERSION="libxml2-2.9.10" SDL2_VERSION="SDL2-2.0.18" # NOTE: remember to also update LIB_URL below when changing version BOOST_VERSION="boost_1_76_0" # NOTE: remember to also update LIB_URL below when changing version WXWIDGETS_VERSION="wxWidgets-3.1.4" # libpng was included as part of X11 but that's removed from Mountain Lion # (also the Snow Leopard version was ancient 1.2) PNG_VERSION="libpng-1.6.36" FREETYPE_VERSION="freetype-2.10.4" OGG_VERSION="libogg-1.3.3" VORBIS_VERSION="libvorbis-1.3.7" # gloox requires GnuTLS, GnuTLS requires Nettle and GMP -GMP_VERSION="gmp-6.2.0" +GMP_VERSION="gmp-6.2.1" NETTLE_VERSION="nettle-3.6" # NOTE: remember to also update LIB_URL below when changing version GLOOX_VERSION="gloox-1.0.24" GNUTLS_VERSION="gnutls-3.6.15" # OS X only includes part of ICU, and only the dylib # NOTE: remember to also update LIB_URL below when changing version -ICU_VERSION="icu4c-67_1" +ICU_VERSION="icu4c-69_1" ENET_VERSION="enet-1.3.17" MINIUPNPC_VERSION="miniupnpc-2.2.2" SODIUM_VERSION="libsodium-1.0.18" FMT_VERSION="7.1.3" # -------------------------------------------------------------- # Bundled with the game: # * SpiderMonkey # * NVTT # * FCollada # Provided by OS X: # * OpenAL # * OpenGL # -------------------------------------------------------------- export CC=${CC:="clang"} CXX=${CXX:="clang++"} export MIN_OSX_VERSION=${MIN_OSX_VERSION:="10.12"} +export ARCH=${ARCH:=""} # The various libs offer inconsistent configure options, some allow # setting sysroot and OS X-specific options, others don't. Adding to # the confusion, Apple moved /Developer/SDKs into the Xcode app bundle # so the path can't be guessed by clever build tools (like Boost.Build). # Sometimes configure gets it wrong anyway, especially on cross compiles. # This is why we prefer using (OBJ)CFLAGS, (OBJ)CXXFLAGS, and LDFLAGS. # Check if SYSROOT is set and not empty if [[ $SYSROOT && ${SYSROOT-_} ]]; then C_FLAGS="-isysroot $SYSROOT" LDFLAGS="$LDFLAGS -Wl,-syslibroot,$SYSROOT" fi # Check if MIN_OSX_VERSION is set and not empty if [[ $MIN_OSX_VERSION && ${MIN_OSX_VERSION-_} ]]; then C_FLAGS="$C_FLAGS -mmacosx-version-min=$MIN_OSX_VERSION" # clang and llvm-gcc look at mmacosx-version-min to determine link target # and CRT version, and use it to set the macosx_version_min linker flag LDFLAGS="$LDFLAGS -mmacosx-version-min=$MIN_OSX_VERSION" fi CFLAGS="$CFLAGS $C_FLAGS -fvisibility=hidden" -CXXFLAGS="$CXXFLAGS $C_FLAGS -stdlib=libc++ -std=c++17 -msse4.1" +CXXFLAGS="$CXXFLAGS $C_FLAGS -stdlib=libc++ -std=c++17" OBJCFLAGS="$OBJCFLAGS $C_FLAGS" OBJCXXFLAGS="$OBJCXXFLAGS $C_FLAGS" # Force x86_64 architecture on MacOS for now. # NB: annoyingly, this is rather unstandardised. Some libs expect -arch, others different things. # Further: wxWidgets uses its own system and actually fails to compile with arch arguments. ARCHLESS_CFLAGS=$CFLAGS ARCHLESS_CXXFLAGS=$CXXFLAGS ARCHLESS_LDFLAGS="$LDFLAGS -stdlib=libc++" -CFLAGS="$CFLAGS -arch x86_64" -CXXFLAGS="$CXXFLAGS -arch x86_64" +# If ARCH isn't set, select either x86_64 or arm64 +if [ -z "${ARCH}" ]; then + if [ "`uname -m`" == "arm64" ]; then + ARCH="arm64" + # Some libs want this passed to configure for cross compilation. + HOST_PLATFORM="--host=aarch64-apple-darwin" + else + CXXFLAGS="$CXXFLAGS -msse4.1" + ARCH="x86_64" + # Some libs want this passed to configure for cross compilation. + HOST_PLATFORM="--host=x86_64-apple-darwin" + fi +fi + +echo "ARCHITECTURE BREAKDOWN" +echo $ARCH +echo $HOST_PLATFORM -LDFLAGS="$LDFLAGS -arch x86_64" +CFLAGS="$CFLAGS -arch $ARCH" +CXXFLAGS="$CXXFLAGS -arch $ARCH" -# Some libs want this passed to configure for cross compilation. -HOST_PLATFORM="--host=x86_64-apple-darwin" +LDFLAGS="$LDFLAGS -arch $ARCH" # CMake doesn't seem to pick up on architecture with CFLAGS only -CMAKE_FLAGS="-DCMAKE_OSX_ARCHITECTURES=x86_64" +CMAKE_FLAGS="-DCMAKE_OSX_ARCHITECTURES=$ARCH" JOBS=${JOBS:="-j2"} set -e die() { echo ERROR: $* exit 1 } download_lib() { local url=$1 local filename=$2 if [ ! -e $filename ]; then echo "Downloading $filename" curl -fLo ${filename} ${url}${filename} || die "Download of $url$filename failed" fi } already_built() { echo -e "Skipping - already built (use --force-rebuild to override)" } # Check that we're actually on OS X if [ "`uname -s`" != "Darwin" ]; then die "This script is intended for OS X only" fi # Parse command-line options: force_rebuild=false for i in "$@" do case $i in --force-rebuild ) force_rebuild=true;; -j* ) JOBS=$i ;; esac done cd "$(dirname $0)" # Now in libraries/osx/ (where we assume this script resides) # Create a location to create copies of dependencies' *.pc files, so they can be found by pkg-config PC_PATH="$(pwd)/pkgconfig/" if [[ "$force_rebuild" = "true" ]]; then rm -rf $PC_PATH fi mkdir -p $PC_PATH # -------------------------------------------------------------- echo -e "Building zlib..." LIB_VERSION="${ZLIB_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY=$LIB_VERSION LIB_URL="https://zlib.net/fossils/" mkdir -p zlib pushd zlib > /dev/null ZLIB_DIR="$(pwd)" if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # patch zlib's configure script to use our CFLAGS and LDFLAGS (patch -Np0 -i ../../patches/zlib_flags.diff \ && CFLAGS="$CFLAGS" LDFLAGS="$LDFLAGS" \ ./configure --prefix="$ZLIB_DIR" \ --static \ && make ${JOBS} && make install) || die "zlib build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building libcurl..." LIB_VERSION="${CURL_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.bz2" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://curl.haxx.se/download/" mkdir -p libcurl pushd libcurl > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" \ LDFLAGS="$LDFLAGS" \ --prefix="$INSTALL_DIR" \ --enable-ipv6 \ --with-darwinssl \ --without-gssapi \ --without-libmetalink \ --without-libpsl \ --without-librtmp \ --without-libssh2 \ --without-nghttp2 \ --without-nss \ --without-polarssl \ --without-ssl \ --without-gnutls \ --without-brotli \ --without-cyassl \ --without-winssl \ --without-mbedtls \ --without-wolfssl \ --without-spnego \ --disable-ares \ --disable-ldap \ --disable-ldaps \ --without-libidn2 \ --with-zlib="${ZLIB_DIR}" \ --enable-shared=no \ && make ${JOBS} && make install) || die "libcurl build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building libiconv..." LIB_VERSION="${ICONV_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://ftp.gnu.org/pub/gnu/libiconv/" mkdir -p iconv pushd iconv > /dev/null ICONV_DIR="$(pwd)" if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" \ LDFLAGS="$LDFLAGS" \ --prefix="$ICONV_DIR" \ --without-libiconv-prefix \ --without-libintl-prefix \ --disable-nls \ --enable-shared=no \ && make ${JOBS} && make install) || die "libiconv build failed" popd echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building libxml2..." LIB_VERSION="${XML2_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="ftp://xmlsoft.org/libxml2/" mkdir -p libxml2 pushd libxml2 > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" \ LDFLAGS="$LDFLAGS" \ --prefix="$INSTALL_DIR" \ --without-lzma \ --without-python \ --with-iconv="${ICONV_DIR}" \ --with-zlib="${ZLIB_DIR}" \ --enable-shared=no \ && make ${JOBS} && make install) || die "libxml2 build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building SDL2..." LIB_VERSION="${SDL2_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY=$LIB_VERSION LIB_URL="https://libsdl.org/release/" mkdir -p sdl2 pushd sdl2 > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # We don't want SDL2 to pull in system iconv, force it to detect ours with flags. # Don't use X11 - we don't need it and Mountain Lion removed it (./configure CPPFLAGS="-I${ICONV_DIR}/include" \ CFLAGS="$CFLAGS" \ CXXFLAGS="$CXXFLAGS" \ LDFLAGS="$LDFLAGS -L${ICONV_DIR}/lib" \ --prefix="$INSTALL_DIR" \ --disable-video-x11 \ --without-x \ --enable-video-cocoa \ --enable-shared=no \ && make $JOBS && make install) || die "SDL2 build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building Boost..." LIB_VERSION="${BOOST_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.bz2" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="https://boostorg.jfrog.io/artifactory/main/release/1.76.0/source/" mkdir -p boost pushd boost > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY include lib tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # Can't use macosx-version, see above comment. (./bootstrap.sh --with-libraries=filesystem,system \ --prefix=$INSTALL_DIR \ && ./b2 cflags="$CFLAGS" \ toolset=clang \ cxxflags="$CXXFLAGS" \ linkflags="$LDFLAGS" ${JOBS} \ -d2 \ --layout=system \ --debug-configuration \ link=static \ threading=multi \ variant=release install \ ) || die "Boost build failed" popd echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- # TODO: This build takes ages, anything we can exclude? echo -e "Building wxWidgets..." LIB_VERSION="${WXWIDGETS_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.bz2" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://github.com/wxWidgets/wxWidgets/releases/download/v3.1.4/" mkdir -p wxwidgets pushd wxwidgets > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY mkdir -p build-release pushd build-release CONF_OPTS="--prefix=$INSTALL_DIR --disable-shared --enable-unicode --enable-universal_binary=x86_64 --with-cocoa --with-opengl --with-libiconv-prefix=${ICONV_DIR} --with-expat=builtin --with-libpng=builtin --without-libtiff --without-sdl --without-x --disable-stc --disable-webview --disable-webkit --disable-webviewwebkit --disable-webviewie --without-libjpeg" # wxWidgets configure now defaults to targeting 10.5, if not specified, # but that conflicts with our flags if [[ $MIN_OSX_VERSION && ${MIN_OSX_VERSION-_} ]]; then CONF_OPTS="$CONF_OPTS --with-macosx-version-min=$MIN_OSX_VERSION" fi (../configure CFLAGS="$ARCHLESS_CFLAGS" \ CXXFLAGS="$ARCHLESS_CXXFLAGS" \ CPPFLAGS="-D__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORES=1" \ LDFLAGS="$ARCHLESS_LDFLAGS" $CONF_OPTS \ && make ${JOBS} && make install) || die "wxWidgets build failed" popd popd echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building libpng..." LIB_VERSION="${PNG_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://download.sourceforge.net/libpng/" mkdir -p libpng pushd libpng > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # libpng has no flags for zlib but the 10.12 version is too old, so link our own. (./configure CFLAGS="$CFLAGS" CPPFLAGS=" -I $ZLIB_DIR/include "\ LDFLAGS="$LDFLAGS -L$ZLIB_DIR/lib" \ --prefix=$INSTALL_DIR \ --enable-shared=no \ && make ${JOBS} && make install) || die "libpng build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building freetype..." LIB_VERSION="${FREETYPE_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="https://download.savannah.gnu.org/releases/freetype/" mkdir -p freetype pushd freetype > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" LDFLAGS="$LDFLAGS" \ --prefix=$INSTALL_DIR \ --enable-shared=no \ --with-bzip2=no \ --with-brotli=no \ && make ${JOBS} && make install) || die "freetype build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- # Dependency of vorbis echo -e "Building libogg..." LIB_VERSION="${OGG_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://downloads.xiph.org/releases/ogg/" mkdir -p libogg pushd libogg > /dev/null OGG_DIR="$(pwd)" if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" \ LDFLAGS="$LDFLAGS" \ --prefix=$OGG_DIR \ --enable-shared=no \ && make ${JOBS} && make install) || die "libogg build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building libvorbis..." LIB_VERSION="${VORBIS_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://downloads.xiph.org/releases/vorbis/" mkdir -p vorbis pushd vorbis > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" \ LDFLAGS="$LDFLAGS" \ --prefix="$INSTALL_DIR" \ --enable-shared=no \ --with-ogg="$OGG_DIR" \ && make ${JOBS} && make install) || die "libvorbis build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building GMP..." LIB_VERSION="${GMP_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.bz2" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="https://gmplib.org/download/gmp/" mkdir -p gmp pushd gmp > /dev/null GMP_DIR="$(pwd)" if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # NOTE: enable-fat in this case allows building and running on different CPUS. # Otherwise CPU-specific instructions will be used with no fallback for older CPUs. (./configure CFLAGS="$CFLAGS" \ CXXFLAGS="$CXXFLAGS" \ LDFLAGS="$LDFLAGS" \ "$HOST_PLATFORM" \ --prefix="$INSTALL_DIR" \ --enable-fat \ --disable-shared \ --with-pic \ && make ${JOBS} && make install) || die "GMP build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building Nettle..." # Also builds hogweed LIB_VERSION="${NETTLE_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="https://ftp.gnu.org/gnu/nettle/" mkdir -p nettle pushd nettle > /dev/null NETTLE_DIR="$(pwd)" if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # NOTE: enable-fat in this case allows building and running on different CPUS. # Otherwise CPU-specific instructions will be used with no fallback for older CPUs. (./configure CFLAGS="$CFLAGS" \ CXXFLAGS="$CXXFLAGS" \ LDFLAGS="$LDFLAGS" \ --with-include-path="${GMP_DIR}/include" \ --with-lib-path="${GMP_DIR}/lib" \ --prefix="$INSTALL_DIR" \ --enable-fat \ --disable-shared \ --disable-documentation \ --disable-openssl \ --disable-assembler \ && make ${JOBS} && make install) || die "Nettle build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building GnuTLS..." LIB_VERSION="${GNUTLS_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.xz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="https://www.gnupg.org/ftp/gcrypt/gnutls/v3.6/" mkdir -p gnutls pushd gnutls > /dev/null GNUTLS_DIR="$(pwd)" if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # Patch GNUTLS for a linking issue with isdigit # Patch by Ross Nicholson: https://gitlab.com/gnutls/gnutls/-/issues/1033#note_379529145 (patch -Np1 -i ../../patches/03-undo-libtasn1-cisdigit.patch \ && ./configure CFLAGS="$CFLAGS" \ CXXFLAGS="$CXXFLAGS" \ LDFLAGS="$LDFLAGS" \ LIBS="-L${GMP_DIR}/lib -lgmp" \ NETTLE_CFLAGS="-I${NETTLE_DIR}/include" \ NETTLE_LIBS="-L${NETTLE_DIR}/lib -lnettle" \ HOGWEED_CFLAGS="-I${NETTLE_DIR}/include" \ HOGWEED_LIBS="-L${NETTLE_DIR}/lib -lhogweed" \ GMP_CFLAGS="-I${GMP_DIR}/include" \ GMP_LIBS="-L${GMP_DIR}/lib -lgmp" \ --prefix="$INSTALL_DIR" \ --enable-shared=no \ --without-idn \ --with-included-unistring \ --with-included-libtasn1 \ --without-p11-kit \ --disable-tests \ --disable-guile \ --disable-doc \ --disable-tools \ --disable-nls \ && make ${JOBS} LDFLAGS= install) || die "GnuTLS build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building gloox..." LIB_VERSION="${GLOOX_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.bz2" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://camaya.net/download/" mkdir -p gloox pushd gloox > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # TODO: pulls in libresolv dependency from /usr/lib (./configure CFLAGS="$CFLAGS" \ CXXFLAGS="$CXXFLAGS" \ LDFLAGS="$LDFLAGS" \ "$HOST_PLATFORM" \ --prefix="$INSTALL_DIR" \ GNUTLS_CFLAGS="-I${GNUTLS_DIR}/include" \ GNUTLS_LIBS="-L${GNUTLS_DIR}/lib -lgnutls" \ --enable-shared=no \ --with-zlib="${ZLIB_DIR}" \ --without-libidn \ --with-gnutls="yes" \ --without-openssl \ --without-tests \ --without-examples \ --disable-getaddrinfo \ && make ${JOBS} && make install) || die "gloox build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building ICU..." LIB_VERSION="${ICU_VERSION}" LIB_ARCHIVE="$LIB_VERSION-src.tgz" LIB_DIRECTORY="icu" -LIB_URL="https://github.com/unicode-org/icu/releases/download/release-67-1/" +LIB_URL="https://github.com/unicode-org/icu/releases/download/release-69-1/" mkdir -p icu pushd icu > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib sbin share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY mkdir -p source/build pushd source/build (CFLAGS="$CFLAGS" CXXFLAGS="$CXXFLAGS" LDFLAGS="$LDFLAGS" \ ../runConfigureICU MacOSX \ "$HOST_PLATFORM" \ --prefix=$INSTALL_DIR \ --disable-shared \ --enable-static \ --disable-samples \ --enable-extras \ --enable-icuio \ --enable-tools \ && make ${JOBS} && make install) || die "ICU build failed" popd popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building ENet..." LIB_VERSION="${ENET_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://enet.bespin.org/download/" mkdir -p enet pushd enet > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib sbin share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" \ LDFLAGS="$LDFLAGS" \ --prefix=${INSTALL_DIR} \ --enable-shared=no \ && make clean && make ${JOBS} && make install) || die "ENet build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building MiniUPnPc..." LIB_VERSION="${MINIUPNPC_VERSION}" LIB_ARCHIVE="$LIB_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="http://miniupnp.tuxfamily.org/files/download.php?file=" mkdir -p miniupnpc pushd miniupnpc > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY bin include lib share tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (make clean \ && CFLAGS=$CFLAGS LDFLAGS=$LDFLAGS make ${JOBS} \ && INSTALLPREFIX="$INSTALL_DIR" make install \ ) || die "MiniUPnPc build failed" popd # TODO: how can we not build the dylibs? rm -f lib/*.dylib cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building libsodium..." LIB_VERSION="${SODIUM_VERSION}" LIB_ARCHIVE="$SODIUM_VERSION.tar.gz" LIB_DIRECTORY="$LIB_VERSION" LIB_URL="https://download.libsodium.org/libsodium/releases/" mkdir -p libsodium pushd libsodium > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$LIB_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY include lib tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY (./configure CFLAGS="$CFLAGS" \ LDFLAGS="$LDFLAGS" \ --prefix=${INSTALL_DIR} \ --enable-shared=no \ && make clean \ && CFLAGS=$CFLAGS LDFLAGS=$LDFLAGS make ${JOBS} \ && make check \ && INSTALLPREFIX="$INSTALL_DIR" make install \ ) || die "libsodium build failed" popd cp -f lib/pkgconfig/* $PC_PATH echo "$LIB_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------- echo -e "Building fmt..." LIB_DIRECTORY="fmt-$FMT_VERSION" LIB_ARCHIVE="$FMT_VERSION.tar.gz" LIB_URL="https://github.com/fmtlib/fmt/archive/" mkdir -p fmt pushd fmt > /dev/null if [[ "$force_rebuild" = "true" ]] || [[ ! -e .already-built ]] || [[ "$(<.already-built)" != "$FMT_VERSION" ]] then INSTALL_DIR="$(pwd)" rm -f .already-built download_lib $LIB_URL $LIB_ARCHIVE rm -rf $LIB_DIRECTORY include lib tar -xf $LIB_ARCHIVE pushd $LIB_DIRECTORY # It appears that older versions of Clang require constexpr statements to have a user-set constructor. patch -Np1 -i ../../patches/fmt_constexpr.diff mkdir -p build pushd build (cmake .. \ -DFMT_TEST=False \ -DFMT_DOC=False \ -DCMAKE_INSTALL_PREFIX="$INSTALL_DIR" \ "$CMAKE_FLAGS" \ && make fmt ${JOBS} && make install) || die "fmt build failed" popd popd cp -f lib/pkgconfig/* $PC_PATH echo "$FMT_VERSION" > .already-built else already_built fi popd > /dev/null # -------------------------------------------------------------------- # The following libraries are shared on different OSes and may # be customized, so we build and install them from bundled sources # -------------------------------------------------------------------- # SpiderMonkey - bundled, no download pushd ../source/spidermonkey/ > /dev/null if [[ "$force_rebuild" = "true" ]] then rm -f .already-built fi # Use the regular build script for SM. -JOBS="$JOBS" ZLIB_DIR="$ZLIB_DIR" ./build.sh || die "Error building spidermonkey" +JOBS="$JOBS" ZLIB_DIR="$ZLIB_DIR" ARCH="$ARCH" ./build.sh || die "Error building spidermonkey" popd > /dev/null # -------------------------------------------------------------- # NVTT - bundled, no download pushd ../source/nvtt > /dev/null if [[ "$force_rebuild" = "true" ]] then rm -f .already-built fi CXXFLAGS="$CXXFLAGS" CFLAGS="$CFLAGS" LDFLAGS="$LDFLAGS" CMAKE_FLAGS="$CMAKE_FLAGS" JOBS="$JOBS" ./build.sh || die "Error building NVTT" popd > /dev/null # -------------------------------------------------------------- # FCollada - bundled, no download pushd ../source/fcollada/ > /dev/null if [[ "$force_rebuild" = "true" ]] then rm -f .already-built fi CXXFLAGS="$CXXFLAGS" CFLAGS="$CFLAGS" LDFLAGS="$LDFLAGS" JOBS="$JOBS" ./build.sh || die "Error building FCollada" popd > /dev/null Index: ps/trunk/libraries/source/nvtt/src/src/nvcore/Debug.cpp =================================================================== --- ps/trunk/libraries/source/nvtt/src/src/nvcore/Debug.cpp (revision 26880) +++ ps/trunk/libraries/source/nvtt/src/src/nvcore/Debug.cpp (revision 26881) @@ -1,1278 +1,1281 @@ // This code is in the public domain -- Ignacio CastaƱo #include "Debug.h" #include "Array.inl" #include "StrLib.h" // StringBuilder #include "StdStream.h" // fileOpen #include // Extern #if NV_OS_WIN32 //&& NV_CC_MSVC # define WIN32_LEAN_AND_MEAN # define VC_EXTRALEAN # include # include # if NV_CC_MSVC # include # if _MSC_VER < 1300 # define DECLSPEC_DEPRECATED // VC6: change this path to your Platform SDK headers # include // must be XP version of file // include "M:\\dev7\\vs\\devtools\\common\\win32sdk\\include\\dbghelp.h" # else // VC7: ships with updated headers # include # endif # endif # pragma comment(lib,"dbghelp.lib") #endif #if NV_OS_XBOX # include # ifdef _DEBUG # include # endif //_DEBUG #endif //NV_OS_XBOX #if !NV_OS_WIN32 && defined(HAVE_SIGNAL_H) # include #endif #if NV_OS_UNIX # include // getpid #endif #if NV_OS_LINUX && defined(HAVE_EXECINFO_H) # include // backtrace # if NV_CC_GNUC // defined(HAVE_CXXABI_H) # include # endif #endif #if NV_OS_DARWIN || NV_OS_FREEBSD || NV_OS_NETBSD || NV_OS_OPENBSD # include # include # include // sysctl # if !defined(NV_OS_OPENBSD) # include # endif # if defined(HAVE_EXECINFO_H) // only after OSX 10.5 # include // backtrace # if NV_CC_GNUC // defined(HAVE_CXXABI_H) # include # endif # endif #endif #if NV_OS_ORBIS #include #endif #define NV_USE_SEPARATE_THREAD 1 using namespace nv; namespace { static MessageHandler * s_message_handler = NULL; static AssertHandler * s_assert_handler = NULL; static bool s_sig_handler_enabled = false; static bool s_interactive = true; #if NV_OS_WIN32 && NV_CC_MSVC // Old exception filter. static LPTOP_LEVEL_EXCEPTION_FILTER s_old_exception_filter = NULL; #elif !NV_OS_WIN32 && defined(HAVE_SIGNAL_H) // Old signal handlers. struct sigaction s_old_sigsegv; struct sigaction s_old_sigtrap; struct sigaction s_old_sigfpe; struct sigaction s_old_sigbus; #endif #if NV_OS_WIN32 && NV_CC_MSVC // We should try to simplify the top level filter as much as possible. // http://www.nynaeve.net/?p=128 #if NV_USE_SEPARATE_THREAD // The critical section enforcing the requirement that only one exception be // handled by a handler at a time. static CRITICAL_SECTION s_handler_critical_section; // Semaphores used to move exception handling between the exception thread // and the handler thread. handler_start_semaphore_ is signalled by the // exception thread to wake up the handler thread when an exception occurs. // handler_finish_semaphore_ is signalled by the handler thread to wake up // the exception thread when handling is complete. static HANDLE s_handler_start_semaphore = NULL; static HANDLE s_handler_finish_semaphore = NULL; // The exception handler thread. static HANDLE s_handler_thread = NULL; static DWORD s_requesting_thread_id = 0; static EXCEPTION_POINTERS * s_exception_info = NULL; #endif // NV_USE_SEPARATE_THREAD struct MinidumpCallbackContext { ULONG64 memory_base; ULONG memory_size; bool finished; }; // static static BOOL CALLBACK miniDumpWriteDumpCallback(PVOID context, const PMINIDUMP_CALLBACK_INPUT callback_input, PMINIDUMP_CALLBACK_OUTPUT callback_output) { switch (callback_input->CallbackType) { case MemoryCallback: { MinidumpCallbackContext* callback_context = reinterpret_cast(context); if (callback_context->finished) return FALSE; // Include the specified memory region. callback_output->MemoryBase = callback_context->memory_base; callback_output->MemorySize = callback_context->memory_size; callback_context->finished = true; return TRUE; } // Include all modules. case IncludeModuleCallback: case ModuleCallback: return TRUE; // Include all threads. case IncludeThreadCallback: case ThreadCallback: return TRUE; // Stop receiving cancel callbacks. case CancelCallback: callback_output->CheckCancel = FALSE; callback_output->Cancel = FALSE; return TRUE; } // Ignore other callback types. return FALSE; } static bool writeMiniDump(EXCEPTION_POINTERS * pExceptionInfo) { // create the file HANDLE hFile = CreateFileA("crash.dmp", GENERIC_WRITE, FILE_SHARE_WRITE, NULL, CREATE_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL); if (hFile == INVALID_HANDLE_VALUE) { //nvDebug("*** Failed to create dump file.\n"); return false; } MINIDUMP_EXCEPTION_INFORMATION * pExInfo = NULL; MINIDUMP_CALLBACK_INFORMATION * pCallback = NULL; if (pExceptionInfo != NULL) { MINIDUMP_EXCEPTION_INFORMATION ExInfo; ExInfo.ThreadId = ::GetCurrentThreadId(); ExInfo.ExceptionPointers = pExceptionInfo; ExInfo.ClientPointers = NULL; pExInfo = &ExInfo; MINIDUMP_CALLBACK_INFORMATION callback; MinidumpCallbackContext context; // Find a memory region of 256 bytes centered on the // faulting instruction pointer. const ULONG64 instruction_pointer = #if defined(_M_IX86) pExceptionInfo->ContextRecord->Eip; #elif defined(_M_AMD64) pExceptionInfo->ContextRecord->Rip; #else #error Unsupported platform #endif MEMORY_BASIC_INFORMATION info; if (VirtualQuery(reinterpret_cast(instruction_pointer), &info, sizeof(MEMORY_BASIC_INFORMATION)) != 0 && info.State == MEM_COMMIT) { // Attempt to get 128 bytes before and after the instruction // pointer, but settle for whatever's available up to the // boundaries of the memory region. const ULONG64 kIPMemorySize = 256; context.memory_base = max(reinterpret_cast(info.BaseAddress), instruction_pointer - (kIPMemorySize / 2)); ULONG64 end_of_range = min(instruction_pointer + (kIPMemorySize / 2), reinterpret_cast(info.BaseAddress) + info.RegionSize); context.memory_size = static_cast(end_of_range - context.memory_base); context.finished = false; callback.CallbackRoutine = miniDumpWriteDumpCallback; callback.CallbackParam = reinterpret_cast(&context); pCallback = &callback; } } MINIDUMP_TYPE miniDumpType = (MINIDUMP_TYPE)(MiniDumpNormal|MiniDumpWithHandleData|MiniDumpWithThreadInfo); // write the dump BOOL ok = MiniDumpWriteDump(GetCurrentProcess(), GetCurrentProcessId(), hFile, miniDumpType, pExInfo, NULL, pCallback) != 0; CloseHandle(hFile); if (ok == FALSE) { //nvDebug("*** Failed to save dump file.\n"); return false; } //nvDebug("\nDump file saved.\n"); return true; } #if NV_USE_SEPARATE_THREAD static DWORD WINAPI ExceptionHandlerThreadMain(void* lpParameter) { nvDebugCheck(s_handler_start_semaphore != NULL); nvDebugCheck(s_handler_finish_semaphore != NULL); while (true) { if (WaitForSingleObject(s_handler_start_semaphore, INFINITE) == WAIT_OBJECT_0) { writeMiniDump(s_exception_info); // Allow the requesting thread to proceed. ReleaseSemaphore(s_handler_finish_semaphore, 1, NULL); } } // This statement is not reached when the thread is unconditionally // terminated by the ExceptionHandler destructor. return 0; } #endif // NV_USE_SEPARATE_THREAD static bool hasStackTrace() { return true; } /*static NV_NOINLINE int backtrace(void * trace[], int maxcount) { // In Windows XP and Windows Server 2003, the sum of the FramesToSkip and FramesToCapture parameters must be less than 63. int xp_maxcount = min(63-1, maxcount); int count = RtlCaptureStackBackTrace(1, xp_maxcount, trace, NULL); nvDebugCheck(count <= maxcount); return count; }*/ static NV_NOINLINE int backtraceWithSymbols(CONTEXT * ctx, void * trace[], int maxcount, int skip = 0) { // Init the stack frame for this function STACKFRAME64 stackFrame = { 0 }; #if NV_CPU_X86_64 DWORD dwMachineType = IMAGE_FILE_MACHINE_AMD64; stackFrame.AddrPC.Offset = ctx->Rip; stackFrame.AddrFrame.Offset = ctx->Rbp; stackFrame.AddrStack.Offset = ctx->Rsp; #elif NV_CPU_X86 DWORD dwMachineType = IMAGE_FILE_MACHINE_I386; stackFrame.AddrPC.Offset = ctx->Eip; stackFrame.AddrFrame.Offset = ctx->Ebp; stackFrame.AddrStack.Offset = ctx->Esp; #else #error "Platform not supported!" #endif stackFrame.AddrPC.Mode = AddrModeFlat; stackFrame.AddrFrame.Mode = AddrModeFlat; stackFrame.AddrStack.Mode = AddrModeFlat; // Walk up the stack const HANDLE hThread = GetCurrentThread(); const HANDLE hProcess = GetCurrentProcess(); int i; for (i = 0; i < maxcount; i++) { // walking once first makes us skip self if (!StackWalk64(dwMachineType, hProcess, hThread, &stackFrame, ctx, NULL, &SymFunctionTableAccess64, &SymGetModuleBase64, NULL)) { break; } /*if (stackFrame.AddrPC.Offset == stackFrame.AddrReturn.Offset || stackFrame.AddrPC.Offset == 0) { break; }*/ if (i >= skip) { trace[i - skip] = (PVOID)stackFrame.AddrPC.Offset; } } return i - skip; } #pragma warning(push) #pragma warning(disable:4748) static NV_NOINLINE int backtrace(void * trace[], int maxcount) { CONTEXT ctx = { 0 }; #if NV_CPU_X86 && !NV_CPU_X86_64 ctx.ContextFlags = CONTEXT_CONTROL; _asm { call x x: pop eax mov ctx.Eip, eax mov ctx.Ebp, ebp mov ctx.Esp, esp } #else RtlCaptureContext(&ctx); // Not implemented correctly in x86. #endif return backtraceWithSymbols(&ctx, trace, maxcount, 1); } #pragma warning(pop) static NV_NOINLINE void writeStackTrace(void * trace[], int size, int start, Array & lines) { StringBuilder builder(512); HANDLE hProcess = GetCurrentProcess(); // Resolve PC to function names for (int i = start; i < size; i++) { // Check for end of stack walk DWORD64 ip = (DWORD64)trace[i]; if (ip == NULL) break; // Get function name #define MAX_STRING_LEN (512) unsigned char byBuffer[sizeof(IMAGEHLP_SYMBOL64) + MAX_STRING_LEN] = { 0 }; IMAGEHLP_SYMBOL64 * pSymbol = (IMAGEHLP_SYMBOL64*)byBuffer; pSymbol->SizeOfStruct = sizeof(IMAGEHLP_SYMBOL64); pSymbol->MaxNameLength = MAX_STRING_LEN; DWORD64 dwDisplacement; if (SymGetSymFromAddr64(hProcess, ip, &dwDisplacement, pSymbol)) { pSymbol->Name[MAX_STRING_LEN-1] = 0; /* // Make the symbol readable for humans UnDecorateSymbolName( pSym->Name, lpszNonUnicodeUnDSymbol, BUFFERSIZE, UNDNAME_COMPLETE | UNDNAME_NO_THISTYPE | UNDNAME_NO_SPECIAL_SYMS | UNDNAME_NO_MEMBER_TYPE | UNDNAME_NO_MS_KEYWORDS | UNDNAME_NO_ACCESS_SPECIFIERS ); */ // pSymbol->Name const char * pFunc = pSymbol->Name; // Get file/line number IMAGEHLP_LINE64 theLine = { 0 }; theLine.SizeOfStruct = sizeof(theLine); DWORD dwDisplacement; if (!SymGetLineFromAddr64(hProcess, ip, &dwDisplacement, &theLine)) { // Do not print unknown symbols anymore. break; //builder.format("unknown(%08X) : %s\n", (uint32)ip, pFunc); } else { /* const char* pFile = strrchr(theLine.FileName, '\\'); if ( pFile == NULL ) pFile = theLine.FileName; else pFile++; */ const char * pFile = theLine.FileName; int line = theLine.LineNumber; builder.format("%s(%d) : %s\n", pFile, line, pFunc); } lines.append(builder.release()); if (pFunc != NULL && strcmp(pFunc, "WinMain") == 0) { break; } } } } // Write mini dump and print stack trace. static LONG WINAPI handleException(EXCEPTION_POINTERS * pExceptionInfo) { EnterCriticalSection(&s_handler_critical_section); #if NV_USE_SEPARATE_THREAD s_requesting_thread_id = GetCurrentThreadId(); s_exception_info = pExceptionInfo; // This causes the handler thread to call writeMiniDump. ReleaseSemaphore(s_handler_start_semaphore, 1, NULL); // Wait until WriteMinidumpWithException is done and collect its return value. WaitForSingleObject(s_handler_finish_semaphore, INFINITE); //bool status = s_handler_return_value; // Clean up. s_requesting_thread_id = 0; s_exception_info = NULL; #else // First of all, write mini dump. writeMiniDump(pExceptionInfo); #endif LeaveCriticalSection(&s_handler_critical_section); nvDebug("\nDump file saved.\n"); // Try to attach to debugger. if (s_interactive && debug::attachToDebugger()) { nvDebugBreak(); return EXCEPTION_CONTINUE_EXECUTION; } // If that fails, then try to pretty print a stack trace and terminate. void * trace[64]; int size = backtraceWithSymbols(pExceptionInfo->ContextRecord, trace, 64); // @@ Use win32's CreateFile? FILE * fp = fileOpen("crash.txt", "wb"); if (fp != NULL) { Array lines; writeStackTrace(trace, size, 0, lines); for (uint i = 0; i < lines.count(); i++) { fputs(lines[i], fp); delete lines[i]; } // @@ Add more info to crash.txt? fclose(fp); } // This should terminate the process and set the error exit code. TerminateProcess(GetCurrentProcess(), EXIT_FAILURE + 2); return EXCEPTION_EXECUTE_HANDLER; // Terminate app. In case terminate process did not succeed. } static void handlePureVirtualCall() { nvDebugBreak(); TerminateProcess(GetCurrentProcess(), EXIT_FAILURE + 8); } static void handleInvalidParameter(const wchar_t * wexpresion, const wchar_t * wfunction, const wchar_t * wfile, unsigned int line, uintptr_t reserved) { size_t convertedCharCount = 0; StringBuilder expresion; if (wexpresion != NULL) { uint size = U32(wcslen(wexpresion) + 1); expresion.reserve(size); wcstombs_s(&convertedCharCount, expresion.str(), size, wexpresion, _TRUNCATE); } StringBuilder file; if (wfile != NULL) { uint size = U32(wcslen(wfile) + 1); file.reserve(size); wcstombs_s(&convertedCharCount, file.str(), size, wfile, _TRUNCATE); } StringBuilder function; if (wfunction != NULL) { uint size = U32(wcslen(wfunction) + 1); function.reserve(size); wcstombs_s(&convertedCharCount, function.str(), size, wfunction, _TRUNCATE); } int result = nvAbort(expresion.str(), file.str(), line, function.str()); if (result == NV_ABORT_DEBUG) { nvDebugBreak(); } } #elif !NV_OS_WIN32 && defined(HAVE_SIGNAL_H) // NV_OS_LINUX || NV_OS_DARWIN #if defined(HAVE_EXECINFO_H) static bool hasStackTrace() { return true; } static void writeStackTrace(void * trace[], int size, int start, Array & lines) { StringBuilder builder(512); char ** string_array = backtrace_symbols(trace, size); for(int i = start; i < size-1; i++ ) { # if NV_CC_GNUC // defined(HAVE_CXXABI_H) // @@ Write a better parser for the possible formats. char * begin = strchr(string_array[i], '('); char * end = strrchr(string_array[i], '+'); char * module = string_array[i]; if (begin == 0 && end != 0) { *(end - 1) = '\0'; begin = strrchr(string_array[i], ' '); module = NULL; // Ignore module. } if (begin != 0 && begin < end) { int stat; *end = '\0'; *begin = '\0'; char * name = abi::__cxa_demangle(begin+1, 0, 0, &stat); if (module == NULL) { if (name == NULL || stat != 0) { builder.format(" In: '%s'\n", begin+1); } else { builder.format(" In: '%s'\n", name); } } else { if (name == NULL || stat != 0) { builder.format(" In: [%s] '%s'\n", module, begin+1); } else { builder.format(" In: [%s] '%s'\n", module, name); } } free(name); } else { builder.format(" In: '%s'\n", string_array[i]); } # else builder.format(" In: '%s'\n", string_array[i]); # endif lines.append(builder.release()); } free(string_array); } static void printStackTrace(void * trace[], int size, int start=0) { nvDebug( "\nDumping stacktrace:\n" ); Array lines; writeStackTrace(trace, size, 1, lines); for (uint i = 0; i < lines.count(); i++) { nvDebug("%s", lines[i]); delete lines[i]; } nvDebug("\n"); } #endif // defined(HAVE_EXECINFO_H) static void * callerAddress(void * secret) { #if NV_OS_DARWIN # if defined(_STRUCT_MCONTEXT) # if NV_CPU_PPC ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext->__ss.__srr0; # elif NV_CPU_X86_64 ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext->__ss.__rip; # elif NV_CPU_X86 ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext->__ss.__eip; # elif NV_CPU_ARM ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext->__ss.__pc; +# elif NV_CPU_AARCH64 + ucontext_t * ucp = (ucontext_t *)secret; + return (void *) ucp->uc_mcontext->__ss.__pc; # else # error "Unknown CPU" # endif # else # if NV_CPU_PPC ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext->ss.srr0; # elif NV_CPU_X86 ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext->ss.eip; # else # error "Unknown CPU" # endif # endif #elif NV_OS_FREEBSD # if NV_CPU_X86_64 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->uc_mcontext.mc_rip; # elif NV_CPU_X86 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->uc_mcontext.mc_eip; # else # error "Unknown CPU" # endif #elif NV_OS_NETBSD # if NV_CPU_X86_64 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->uc_mcontext.__gregs[_REG_RIP]; # elif NV_CPU_X86 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->uc_mcontext.__gregs[_REG_EIP]; # elif NV_CPU_PPC ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext.__gregs[_REG_PC]; # else # error "Unknown CPU" # endif #elif NV_OS_OPENBSD # if NV_CPU_X86_64 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->sc_rip; # elif NV_CPU_X86 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->sc_eip; # else # error "Unknown CPU" # endif #else # if NV_CPU_X86_64 // #define REG_RIP REG_INDEX(rip) // seems to be 16 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->uc_mcontext.gregs[REG_RIP]; # elif NV_CPU_X86 ucontext_t * ucp = (ucontext_t *)secret; return (void *)ucp->uc_mcontext.gregs[14/*REG_EIP*/]; # elif NV_CPU_PPC ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext.regs->nip; # elif NV_CPU_ARM ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext.arm_pc; # elif NV_CPU_AARCH64 ucontext_t * ucp = (ucontext_t *)secret; return (void *) ucp->uc_mcontext.pc; # else # error "Unknown CPU" # endif #endif // How to obtain the instruction pointers in different platforms, from mlton's source code. // http://mlton.org/ // OpenBSD // ucp->sc_eip // FreeBSD: // ucp->uc_mcontext.mc_eip // HPUX: // ucp->uc_link // Solaris: // ucp->uc_mcontext.gregs[REG_PC] // Linux hppa: // uc->uc_mcontext.sc_iaoq[0] & ~0x3UL // Linux sparc: // ((struct sigcontext*) secret)->sigc_regs.tpc // Linux sparc64: // ((struct sigcontext*) secret)->si_regs.pc // potentially correct for other archs: // Linux alpha: ucp->m_context.sc_pc // Linux arm: ucp->m_context.ctx.arm_pc // Linux ia64: ucp->m_context.sc_ip & ~0x3UL // Linux mips: ucp->m_context.sc_pc // Linux s390: ucp->m_context.sregs->regs.psw.addr } static void nvSigHandler(int sig, siginfo_t *info, void *secret) { void * pnt = callerAddress(secret); // Do something useful with siginfo_t if (sig == SIGSEGV) { if (pnt != NULL) nvDebug("Got signal %d, faulty address is %p, from %p\n", sig, info->si_addr, pnt); else nvDebug("Got signal %d, faulty address is %p\n", sig, info->si_addr); } else if(sig == SIGTRAP) { nvDebug("Breakpoint hit.\n"); } else { nvDebug("Got signal %d\n", sig); } #if defined(HAVE_EXECINFO_H) if (hasStackTrace()) // in case of weak linking { void * trace[64]; int size = backtrace(trace, 64); if (pnt != NULL) { // Overwrite sigaction with caller's address. trace[1] = pnt; } printStackTrace(trace, size, 1); } #endif // defined(HAVE_EXECINFO_H) exit(0); } #endif // defined(HAVE_SIGNAL_H) #if NV_OS_WIN32 //&& NV_CC_MSVC /** Win32 assert handler. */ struct Win32AssertHandler : public AssertHandler { // Flush the message queue. This is necessary for the message box to show up. static void flushMessageQueue() { MSG msg; while( PeekMessage( &msg, NULL, 0, 0, PM_REMOVE ) ) { //if( msg.message == WM_QUIT ) break; TranslateMessage( &msg ); DispatchMessage( &msg ); } } // Assert handler method. virtual int assertion(const char * exp, const char * file, int line, const char * func, const char * msg, va_list arg) { int ret = NV_ABORT_EXIT; StringBuilder error_string; error_string.format("*** Assertion failed: %s\n On file: %s\n On line: %d\n", exp, file, line ); if (func != NULL) { error_string.appendFormat(" On function: %s\n", func); } if (msg != NULL) { error_string.append(" Message: "); va_list tmp; va_copy(tmp, arg); error_string.appendFormatList(msg, tmp); va_end(tmp); error_string.append("\n"); } nvDebug( error_string.str() ); // Print stack trace: debug::dumpInfo(); if (debug::isDebuggerPresent()) { return NV_ABORT_DEBUG; } if (s_interactive) { flushMessageQueue(); int action = MessageBoxA(NULL, error_string.str(), "Assertion failed", MB_ABORTRETRYIGNORE | MB_ICONERROR | MB_TOPMOST); switch( action ) { case IDRETRY: ret = NV_ABORT_DEBUG; break; case IDIGNORE: ret = NV_ABORT_IGNORE; break; case IDABORT: default: ret = NV_ABORT_EXIT; break; } /*if( _CrtDbgReport( _CRT_ASSERT, file, line, module, exp ) == 1 ) { return NV_ABORT_DEBUG; }*/ } if (ret == NV_ABORT_EXIT) { // Exit cleanly. exit(EXIT_FAILURE + 1); } return ret; } }; #elif NV_OS_XBOX /** Xbox360 assert handler. */ struct Xbox360AssertHandler : public AssertHandler { // Assert handler method. virtual int assertion(const char * exp, const char * file, int line, const char * func, const char * msg, va_list arg) { int ret = NV_ABORT_EXIT; StringBuilder error_string; if( func != NULL ) { error_string.format( "*** Assertion failed: %s\n On file: %s\n On function: %s\n On line: %d\n ", exp, file, func, line ); nvDebug( error_string.str() ); } else { error_string.format( "*** Assertion failed: %s\n On file: %s\n On line: %d\n ", exp, file, line ); nvDebug( error_string.str() ); } if (debug::isDebuggerPresent()) { return NV_ABORT_DEBUG; } if( ret == NV_ABORT_EXIT ) { // Exit cleanly. exit(EXIT_FAILURE + 1); } return ret; } }; #elif NV_OS_ORBIS /** Orbis assert handler. */ struct OrbisAssertHandler : public AssertHandler { // Assert handler method. virtual int assertion(const char * exp, const char * file, int line, const char * func, const char * msg, va_list arg) { if( func != NULL ) { nvDebug( "*** Assertion failed: %s\n On file: %s\n On function: %s\n On line: %d\n ", exp, file, func, line ); } else { nvDebug( "*** Assertion failed: %s\n On file: %s\n On line: %d\n ", exp, file, line ); } //SBtodoORBIS print stack trace /*if (hasStackTrace()) { void * trace[64]; int size = backtrace(trace, 64); printStackTrace(trace, size, 2); }*/ if (debug::isDebuggerPresent()) return NV_ABORT_DEBUG; return NV_ABORT_IGNORE; } }; #else /** Unix assert handler. */ struct UnixAssertHandler : public AssertHandler { // Assert handler method. virtual int assertion(const char * exp, const char * file, int line, const char * func, const char * msg, va_list arg) { int ret = NV_ABORT_EXIT; if( func != NULL ) { nvDebug( "*** Assertion failed: %s\n On file: %s\n On function: %s\n On line: %d\n ", exp, file, func, line ); } else { nvDebug( "*** Assertion failed: %s\n On file: %s\n On line: %d\n ", exp, file, line ); } #if _DEBUG if (debug::isDebuggerPresent()) { return NV_ABORT_DEBUG; } #endif #if defined(HAVE_EXECINFO_H) if (hasStackTrace()) { void * trace[64]; int size = backtrace(trace, 64); printStackTrace(trace, size, 2); } #endif if( ret == NV_ABORT_EXIT ) { // Exit cleanly. exit(EXIT_FAILURE + 1); } return ret; } }; #endif } // namespace /// Handle assertion through the assert handler. int nvAbort(const char * exp, const char * file, int line, const char * func/*=NULL*/, const char * msg/*= NULL*/, ...) { #if NV_OS_WIN32 //&& NV_CC_MSVC static Win32AssertHandler s_default_assert_handler; #elif NV_OS_XBOX static Xbox360AssertHandler s_default_assert_handler; #elif NV_OS_ORBIS static OrbisAssertHandler s_default_assert_handler; #else static UnixAssertHandler s_default_assert_handler; #endif va_list arg; va_start(arg,msg); AssertHandler * handler = s_assert_handler != NULL ? s_assert_handler : &s_default_assert_handler; int result = handler->assertion(exp, file, line, func, msg, arg); va_end(arg); return result; } // Abnormal termination. Create mini dump and output call stack. void debug::terminate(int code) { #if NV_OS_WIN32 EnterCriticalSection(&s_handler_critical_section); writeMiniDump(NULL); const int max_stack_size = 64; void * trace[max_stack_size]; int size = backtrace(trace, max_stack_size); // @@ Use win32's CreateFile? FILE * fp = fileOpen("crash.txt", "wb"); if (fp != NULL) { Array lines; writeStackTrace(trace, size, 0, lines); for (uint i = 0; i < lines.count(); i++) { fputs(lines[i], fp); delete lines[i]; } // @@ Add more info to crash.txt? fclose(fp); } LeaveCriticalSection(&s_handler_critical_section); #endif exit(code); } /// Shows a message through the message handler. void NV_CDECL nvDebugPrint(const char *msg, ...) { va_list arg; va_start(arg,msg); if (s_message_handler != NULL) { s_message_handler->log( msg, arg ); } va_end(arg); } /// Dump debug info. void debug::dumpInfo() { #if (NV_OS_WIN32 && NV_CC_MSVC) || (defined(HAVE_SIGNAL_H) && defined(HAVE_EXECINFO_H)) if (hasStackTrace()) { void * trace[64]; int size = backtrace(trace, 64); nvDebug( "\nDumping stacktrace:\n" ); Array lines; writeStackTrace(trace, size, 1, lines); for (uint i = 0; i < lines.count(); i++) { nvDebug("%s", lines[i]); delete lines[i]; } } #endif } /// Dump callstack using the specified handler. void debug::dumpCallstack(MessageHandler *messageHandler, int callstackLevelsToSkip /*= 0*/) { #if (NV_OS_WIN32 && NV_CC_MSVC) || (defined(HAVE_SIGNAL_H) && defined(HAVE_EXECINFO_H)) if (hasStackTrace()) { void * trace[64]; int size = backtrace(trace, 64); Array lines; writeStackTrace(trace, size, callstackLevelsToSkip + 1, lines); // + 1 to skip the call to dumpCallstack for (uint i = 0; i < lines.count(); i++) { #if NV_CPU_ARM || NV_CPU_AARCH64 || NV_OS_DARWIN || NV_OS_IOS va_list empty_va_list = {}; messageHandler->log(lines[i], empty_va_list); #else messageHandler->log(lines[i], NULL); #endif delete lines[i]; } } #endif } /// Set the debug message handler. void debug::setMessageHandler(MessageHandler * message_handler) { s_message_handler = message_handler; } /// Reset the debug message handler. void debug::resetMessageHandler() { s_message_handler = NULL; } /// Set the assert handler. void debug::setAssertHandler(AssertHandler * assert_handler) { s_assert_handler = assert_handler; } /// Reset the assert handler. void debug::resetAssertHandler() { s_assert_handler = NULL; } #if NV_OS_WIN32 #if NV_USE_SEPARATE_THREAD static void initHandlerThread() { static const int kExceptionHandlerThreadInitialStackSize = 64 * 1024; // Set synchronization primitives and the handler thread. Each // ExceptionHandler object gets its own handler thread because that's the // only way to reliably guarantee sufficient stack space in an exception, // and it allows an easy way to get a snapshot of the requesting thread's // context outside of an exception. InitializeCriticalSection(&s_handler_critical_section); s_handler_start_semaphore = CreateSemaphore(NULL, 0, 1, NULL); nvDebugCheck(s_handler_start_semaphore != NULL); s_handler_finish_semaphore = CreateSemaphore(NULL, 0, 1, NULL); nvDebugCheck(s_handler_finish_semaphore != NULL); // Don't attempt to create the thread if we could not create the semaphores. if (s_handler_finish_semaphore != NULL && s_handler_start_semaphore != NULL) { DWORD thread_id; s_handler_thread = CreateThread(NULL, // lpThreadAttributes kExceptionHandlerThreadInitialStackSize, ExceptionHandlerThreadMain, NULL, // lpParameter 0, // dwCreationFlags &thread_id); nvDebugCheck(s_handler_thread != NULL); } /* @@ We should avoid loading modules in the exception handler! dbghelp_module_ = LoadLibrary(L"dbghelp.dll"); if (dbghelp_module_) { minidump_write_dump_ = reinterpret_cast(GetProcAddress(dbghelp_module_, "MiniDumpWriteDump")); } */ } static void shutHandlerThread() { // @@ Free stuff. Terminate thread. } #endif // NV_USE_SEPARATE_THREAD #endif // NV_OS_WIN32 // Enable signal handler. void debug::enableSigHandler(bool interactive) { nvCheck(s_sig_handler_enabled != true); s_sig_handler_enabled = true; s_interactive = interactive; #if NV_OS_WIN32 && NV_CC_MSVC if (interactive) { // Do not display message boxes on error. // http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621(v=vs.85).aspx SetErrorMode(SEM_FAILCRITICALERRORS|SEM_NOGPFAULTERRORBOX|SEM_NOOPENFILEERRORBOX); // CRT reports errors to debug output only. // http://msdn.microsoft.com/en-us/library/1y71x448(v=vs.80).aspx _CrtSetReportMode(_CRT_WARN, _CRTDBG_MODE_DEBUG); _CrtSetReportMode(_CRT_ERROR, _CRTDBG_MODE_DEBUG); _CrtSetReportMode(_CRT_ASSERT, _CRTDBG_MODE_DEBUG); } #if NV_USE_SEPARATE_THREAD initHandlerThread(); #endif s_old_exception_filter = ::SetUnhandledExceptionFilter( handleException ); #if _MSC_VER >= 1400 // MSVC 2005/8 _set_invalid_parameter_handler(handleInvalidParameter); #endif // _MSC_VER >= 1400 _set_purecall_handler(handlePureVirtualCall); // SYMOPT_DEFERRED_LOADS make us not take a ton of time unless we actual log traces SymSetOptions(SYMOPT_DEFERRED_LOADS|SYMOPT_FAIL_CRITICAL_ERRORS|SYMOPT_LOAD_LINES|SYMOPT_UNDNAME); if (!SymInitialize(GetCurrentProcess(), NULL, TRUE)) { DWORD error = GetLastError(); nvDebug("SymInitialize returned error : %d\n", error); } #elif !NV_OS_WIN32 && defined(HAVE_SIGNAL_H) // Install our signal handler struct sigaction sa; sa.sa_sigaction = nvSigHandler; sigemptyset (&sa.sa_mask); sa.sa_flags = SA_ONSTACK | SA_RESTART | SA_SIGINFO; sigaction(SIGSEGV, &sa, &s_old_sigsegv); sigaction(SIGTRAP, &sa, &s_old_sigtrap); sigaction(SIGFPE, &sa, &s_old_sigfpe); sigaction(SIGBUS, &sa, &s_old_sigbus); #endif } /// Disable signal handler. void debug::disableSigHandler() { nvCheck(s_sig_handler_enabled == true); s_sig_handler_enabled = false; #if NV_OS_WIN32 && NV_CC_MSVC ::SetUnhandledExceptionFilter( s_old_exception_filter ); s_old_exception_filter = NULL; SymCleanup(GetCurrentProcess()); #elif !NV_OS_WIN32 && defined(HAVE_SIGNAL_H) sigaction(SIGSEGV, &s_old_sigsegv, NULL); sigaction(SIGTRAP, &s_old_sigtrap, NULL); sigaction(SIGFPE, &s_old_sigfpe, NULL); sigaction(SIGBUS, &s_old_sigbus, NULL); #endif } bool debug::isDebuggerPresent() { #if NV_OS_WIN32 HINSTANCE kernel32 = GetModuleHandleA("kernel32.dll"); if (kernel32) { FARPROC IsDebuggerPresent = GetProcAddress(kernel32, "IsDebuggerPresent"); if (IsDebuggerPresent != NULL && IsDebuggerPresent()) { return true; } } return false; #elif NV_OS_XBOX #ifdef _DEBUG return DmIsDebuggerPresent() == TRUE; #else return false; #endif #elif NV_OS_ORBIS #if PS4_FINAL_REQUIREMENTS return false; #else return sceDbgIsDebuggerAttached() == 1; #endif #elif NV_OS_DARWIN int mib[4]; struct kinfo_proc info; size_t size; mib[0] = CTL_KERN; mib[1] = KERN_PROC; mib[2] = KERN_PROC_PID; mib[3] = getpid(); size = sizeof(info); info.kp_proc.p_flag = 0; sysctl(mib,4,&info,&size,NULL,0); return ((info.kp_proc.p_flag & P_TRACED) == P_TRACED); #else // if ppid != sid, some process spawned our app, probably a debugger. return getsid(getpid()) != getppid(); #endif } bool debug::attachToDebugger() { #if NV_OS_WIN32 if (isDebuggerPresent() == FALSE) { Path process(1024); process.copy("\""); GetSystemDirectoryA(process.str() + 1, 1024 - 1); process.appendSeparator(); process.appendFormat("VSJitDebugger.exe\" -p %lu", ::GetCurrentProcessId()); STARTUPINFOA sSi; memset(&sSi, 0, sizeof(sSi)); PROCESS_INFORMATION sPi; memset(&sPi, 0, sizeof(sPi)); BOOL b = CreateProcessA(NULL, process.str(), NULL, NULL, FALSE, 0, NULL, NULL, &sSi, &sPi); if (b != FALSE) { ::WaitForSingleObject(sPi.hProcess, INFINITE); DWORD dwExitCode; ::GetExitCodeProcess(sPi.hProcess, &dwExitCode); if (dwExitCode != 0) //if exit code is zero, a debugger was selected b = FALSE; } if (sPi.hThread != NULL) ::CloseHandle(sPi.hThread); if (sPi.hProcess != NULL) ::CloseHandle(sPi.hProcess); if (b == FALSE) return false; for (int i = 0; i < 5*60; i++) { if (isDebuggerPresent()) break; ::Sleep(200); } } #endif // NV_OS_WIN32 return true; } Index: ps/trunk/libraries/source/spidermonkey/FixMacBuild.diff =================================================================== --- ps/trunk/libraries/source/spidermonkey/FixMacBuild.diff (revision 26880) +++ ps/trunk/libraries/source/spidermonkey/FixMacBuild.diff (revision 26881) @@ -1,35 +1,96 @@ --- a/build/moz.configure/toolchain.configure +++ b/build/moz.configure/toolchain.configure @@ -1619,8 +1619,9 @@ # Initialize local variables with a 0xAA pattern in clang debug builds. # Linux32 fails some xpcshell tests with -ftrivial-auto-var-init linux32 = target.kernel == 'Linux' and target.cpu == 'x86' + mac = target.kernel == 'Darwin' if (c_compiler.type == 'clang' or c_compiler.type == 'clang-cl') and \ - c_compiler.version >= '8' and debug and not linux32: + c_compiler.version >= '8' and debug and not linux32 and not mac: if c_compiler.type == 'clang-cl': flags.append('-Xclang') js_flags.append('-Xclang') --- a/build/moz.configure/toolchain.configure +++ b/build/moz.configure/toolchain.configure @@ -141,7 +141,6 @@ @imports(_from='biplist', _import='readPlist') def macos_sdk(sdk, host): sdk_min_version = Version('10.11') - sdk_max_version = Version('10.15.4') if sdk: sdk = sdk[0] @@ -170,11 +169,6 @@ 'You may need to point to it using --with-macos-sdk= in your ' 'mozconfig. Various SDK versions are available from ' 'https://github.com/phracker/MacOSX-SDKs' % (version, sdk_min_version)) - if version > sdk_max_version: - die('SDK version "%s" is unsupported. Please downgrade to version ' - '%s. You may need to point to it using --with-macos-sdk= in ' - 'your mozconfig. Various SDK versions are available from ' - 'https://github.com/phracker/MacOSX-SDKs' % (version, sdk_max_version)) return sdk - set_config('MACOS_SDK_DIR', macos_sdk) \ No newline at end of file + set_config('MACOS_SDK_DIR', macos_sdk) +--- a/build/moz.configure/init.configure ++++ b/build/moz.configure/init.configure +@@ -375,9 +375,6 @@ + sys.exit(subprocess.call([python] + sys.argv)) + + # We are now in the virtualenv +- if not distutils.sysconfig.get_python_lib(): +- die('Could not determine python site packages directory') +- + str_version = '.'.join(str(v) for v in version) + + return namespace( +--- a/js/src/jit/arm64/vixl/MozCpu-vixl.cpp ++++ b/js/src/jit/arm64/vixl/MozCpu-vixl.cpp +@@ -69,7 +69,7 @@ void CPU::SetUp() { + + + uint32_t CPU::GetCacheType() { +-#if defined(__aarch64__) && !defined(_MSC_VER) ++#if defined(__aarch64__) && (defined(__linux__) || defined(__android__)) + uint64_t cache_type_register; + // Copy the content of the cache type register to a core register. + __asm__ __volatile__ ("mrs %[ctr], ctr_el0" // NOLINT +--- a/js/src/wasm/WasmSignalHandlers.cpp ++++ b/js/src/wasm/WasmSignalHandlers.cpp +@@ -226,6 +226,10 @@ using mozilla::DebugOnly; + # define R13_sig(p) ((p)->thread.__sp) + # define R14_sig(p) ((p)->thread.__lr) + # define R15_sig(p) ((p)->thread.__pc) ++# define EPC_sig(p) ((p)->thread.__pc) ++# define RFP_sig(p) ((p)->thread.__fp) ++# define R31_sig(p) ((p)->thread.__sp) ++# define RLR_sig(p) ((p)->thread.__lr) + #else + # error "Don't know how to read/write to the thread state via the mcontext_t." + #endif +@@ -351,6 +355,12 @@ struct macos_arm_context { + arm_neon_state_t float_; + }; + # define CONTEXT macos_arm_context ++# elif defined(__aarch64__) ++struct macos_aarch64_context { ++ arm_thread_state64_t thread; ++ arm_neon_state64_t float_; ++}; ++# define CONTEXT macos_aarch64_context + # else + # error Unsupported architecture + # endif +@@ -816,6 +826,11 @@ static bool HandleMachException(const ExceptionRequest& request) { + unsigned int float_state_count = ARM_NEON_STATE_COUNT; + int thread_state = ARM_THREAD_STATE; + int float_state = ARM_NEON_STATE; ++# elif defined(__aarch64__) ++ unsigned int thread_state_count = ARM_THREAD_STATE64_COUNT; ++ unsigned int float_state_count = ARM_NEON_STATE64_COUNT; ++ int thread_state = ARM_THREAD_STATE64; ++ int float_state = ARM_NEON_STATE64; + # else + # error Unsupported architecture + # endif Index: ps/trunk/libraries/source/spidermonkey/build.sh =================================================================== --- ps/trunk/libraries/source/spidermonkey/build.sh (revision 26880) +++ ps/trunk/libraries/source/spidermonkey/build.sh (revision 26881) @@ -1,238 +1,245 @@ #!/bin/sh # This script is called by update-workspaces.sh / build-osx-libraries.sh set -e # This should match the version in config/milestone.txt FOLDER="mozjs-78.6.0" # If same-version changes are needed, increment this. LIB_VERSION="78.6.0+3" LIB_NAME="mozjs78-ps" # Since this script is called by update-workspaces.sh, we want to quickly # avoid doing any work if SpiderMonkey is already built and up-to-date. # Running SM's Makefile is a bit slow and noisy, so instead we'll make a # special file and only rebuild if the build.sh version differs. if [ -e .already-built ] && [ "$(cat .already-built)" = "${LIB_VERSION}" ] then echo "SpiderMonkey is already up to date." exit fi echo "Building SpiderMonkey..." echo # Use Mozilla make on Windows if [ "${OS}" = "Windows_NT" ] then MAKE="mozmake" else MAKE=${MAKE:="make"} fi MAKE_OPTS="${JOBS}" # Standalone SpiderMonkey can not use jemalloc (see https://bugzilla.mozilla.org/show_bug.cgi?id=1465038) # Jitspew doesn't compile on VS17 in the zydis disassembler - since we don't use it, deactivate it. CONF_OPTS="--disable-tests --disable-jemalloc --disable-js-shell --without-intl-api --enable-shared-js --disable-jitspew" if [ "${OS}" = "Windows_NT" ] then CONF_OPTS="${CONF_OPTS} --with-visual-studio-version=2017 --target=i686" else CONF_OPTS="${CONF_OPTS}" fi if [ "`uname -s`" = "Darwin" ] then - # Explicitly target x86_64. - CONF_OPTS="${CONF_OPTS} --target=x86_64-apple-darwin" + ARCH=${ARCH:=""} + if [ -z "${ARCH}" ]; then + if [ "`uname -m`" == "arm64" ]; then + ARCH="aarch64" + else + ARCH="x86_64" + fi + fi + CONF_OPTS="${CONF_OPTS} --target=$ARCH-apple-darwin" # Link to custom-built zlib export PKG_CONFIG_PATH="=${ZLIB_DIR}:${PKG_CONFIG_PATH}" CONF_OPTS="${CONF_OPTS} --with-system-zlib" # Specify target versions and SDK if [ "${MIN_OSX_VERSION}" ] && [ "${MIN_OSX_VERSION-_}" ]; then CONF_OPTS="${CONF_OPTS} --enable-macos-target=$MIN_OSX_VERSION" fi if [ "${SYSROOT}" ] && [ "${SYSROOT-_}" ]; then CONF_OPTS="${CONF_OPTS} --with-macos-sdk=${SYSROOT}" fi fi LLVM_OBJDUMP=${LLVM_OBJDUMP:=$(command -v llvm-objdump || command -v objdump)} # Quick sanity check to print explicit error messages # (Don't run this on windows as it would likely fail spuriously) if [ "${OS}" != "Windows_NT" ] then [ ! -z "$(command -v rustc)" ] || (echo "Error: rustc is not available. Install the rust toolchain (rust + cargo) before proceeding." && exit 1) [ ! -z "${LLVM_OBJDUMP}" ] || (echo "Error: LLVM objdump is not available. Install it (likely via LLVM-clang) before proceeding." && exit 1) fi # If Valgrind looks like it's installed, then set up SM to support it # (else the JITs will interact poorly with it) if [ -e /usr/include/valgrind/valgrind.h ] then CONF_OPTS="${CONF_OPTS} --enable-valgrind" fi # We need to be able to override CHOST in case it is 32bit userland on 64bit kernel CONF_OPTS="${CONF_OPTS} \ ${CBUILD:+--build=${CBUILD}} \ ${CHOST:+--host=${CHOST}} \ ${CTARGET:+--target=${CTARGET}}" echo "SpiderMonkey build options: ${CONF_OPTS}" # It can occasionally be useful to not rebuild everything, but don't do this by default. REBUILD=${REBUILD:=true} if $REBUILD = true; then # Delete the existing directory to avoid conflicts and extract the tarball rm -rf "$FOLDER" if [ ! -e "${FOLDER}.tar.bz2" ]; then # The tarball is committed to svn, but it's useful to let jenkins download it (when testing upgrade scripts). download="$(command -v wget || echo "curl -L -o "${FOLDER}.tar.bz2"")" $download "https://github.com/wraitii/spidermonkey-tarballs/releases/download/v78.6.0/${FOLDER}.tar.bz2" fi tar xjf "${FOLDER}.tar.bz2" # Clean up header files that may be left over by earlier versions of SpiderMonkey rm -rf include-unix-debug rm -rf include-unix-release # Apply patches cd "$FOLDER" . ../patch.sh # Copy a more recent autoconf config.guess to handle ARM macs properly. cp -f ../config.guess build/autoconf/ # Prevent complaining that configure is outdated. touch ./js/src/configure else cd "$FOLDER" fi # Debug version of SM is broken on FreeBSD. if [ "$(uname -s)" != "FreeBSD" ]; then mkdir -p build-debug cd build-debug # SM configure checks for autoconf, but we don't actually need it. # To avoid a dependency, pass something arbitrary (it does need to be an actual program). # llvm-objdump is searched for with the complete name, not simply 'objdump', account for that. CXXFLAGS="${CXXFLAGS}" ../js/src/configure AUTOCONF="ls" \ LLVM_OBJDUMP="${LLVM_OBJDUMP}" \ ${CONF_OPTS} \ --enable-debug \ --disable-optimize \ --enable-gczeal ${MAKE} ${MAKE_OPTS} cd .. fi mkdir -p build-release cd build-release CXXFLAGS="${CXXFLAGS}" ../js/src/configure AUTOCONF="ls" \ LLVM_OBJDUMP="${LLVM_OBJDUMP}" \ ${CONF_OPTS} \ --enable-optimize ${MAKE} ${MAKE_OPTS} cd .. cd .. if [ "${OS}" = "Windows_NT" ] then INCLUDE_DIR_DEBUG=include-win32-debug INCLUDE_DIR_RELEASE=include-win32-release LIB_PREFIX= LIB_SUFFIX=.dll STATIC_LIB_SUFFIX=.lib else INCLUDE_DIR_DEBUG=include-unix-debug INCLUDE_DIR_RELEASE=include-unix-release LIB_PREFIX=lib LIB_SUFFIX=.so STATIC_LIB_SUFFIX=.a if [ "`uname -s`" = "OpenBSD" ]; then LIB_SUFFIX=.so.1.0 elif [ "`uname -s`" = "Darwin" ]; then LIB_SUFFIX=.a fi fi if [ "${OS}" = "Windows_NT" ] then # Bug #776126 # SpiderMonkey uses a tweaked zlib when building, and it wrongly copies its own files to include dirs # afterwards, so we have to remove them to not have them conflicting with the regular zlib pushd "${FOLDER}/build-release/dist/include" rm -f mozzconf.h zconf.h zlib.h popd pushd "${FOLDER}/build-debug/dist/include" rm -f mozzconf.h zconf.h zlib.h popd fi # Copy files into the necessary locations for building and running the game # js-config.h is different for debug and release builds, so we need different include directories for both mkdir -p "${INCLUDE_DIR_RELEASE}" cp -R -L "${FOLDER}"/build-release/dist/include/* "${INCLUDE_DIR_RELEASE}/" if [ "$(uname -s)" != "FreeBSD" ]; then mkdir -p "${INCLUDE_DIR_DEBUG}" cp -R -L "${FOLDER}"/build-debug/dist/include/* "${INCLUDE_DIR_DEBUG}/" fi # These align the ligns below, making it easier to check for mistakes. DEB="debug" REL="release" mkdir -p lib/ # Fetch the jsrust static library. Path is grepped from the build file as it varies by rust toolset. rust_path=$(grep jsrust < "${FOLDER}/build-release/js/src/build/backend.mk" | cut -d = -f 2 | cut -c2-) cp -L "${rust_path}" "lib/${LIB_PREFIX}${LIB_NAME}-rust${STATIC_LIB_SUFFIX}" if [ "`uname -s`" = "Darwin" ] then # On MacOS, copy the static libraries only. cp -L "${FOLDER}/build-${DEB}/js/src/build/${LIB_PREFIX}js_static${LIB_SUFFIX}" "lib/${LIB_PREFIX}${LIB_NAME}-${DEB}${LIB_SUFFIX}" cp -L "${FOLDER}/build-${REL}/js/src/build/${LIB_PREFIX}js_static${LIB_SUFFIX}" "lib/${LIB_PREFIX}${LIB_NAME}-${REL}${LIB_SUFFIX}" elif [ "${OS}" = "Windows_NT" ] then # Windows needs DLLs to binaries/, static stubs to lib/ and debug symbols cp -L "${FOLDER}/build-${DEB}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${DEB}${LIB_SUFFIX}" "../../../binaries/system/${LIB_PREFIX}${LIB_NAME}-${DEB}${LIB_SUFFIX}" cp -L "${FOLDER}/build-${REL}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${REL}${LIB_SUFFIX}" "../../../binaries/system/${LIB_PREFIX}${LIB_NAME}-${REL}${LIB_SUFFIX}" cp -L "${FOLDER}/build-${DEB}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${DEB}${STATIC_LIB_SUFFIX}" "lib/${LIB_PREFIX}${LIB_NAME}-${DEB}${STATIC_LIB_SUFFIX}" cp -L "${FOLDER}/build-${REL}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${REL}${STATIC_LIB_SUFFIX}" "lib/${LIB_PREFIX}${LIB_NAME}-${REL}${STATIC_LIB_SUFFIX}" # Copy debug symbols as well. cp -L "${FOLDER}/build-${DEB}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${DEB}.pdb" "../../../binaries/system/${LIB_PREFIX}${LIB_NAME}-${DEB}.pdb" cp -L "${FOLDER}/build-${REL}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${REL}.pdb" "../../../binaries/system/${LIB_PREFIX}${LIB_NAME}-${REL}.pdb" # Copy the debug jsrust library. rust_path=$(grep jsrust < "${FOLDER}/build-debug/js/src/build/backend.mk" | cut -d = -f 2 | cut -c2-) cp -L "${rust_path}" "lib/${LIB_PREFIX}${LIB_NAME}-rust-debug${STATIC_LIB_SUFFIX}" # Windows need some additional libraries for posix emulation. cp -L "${FOLDER}/build-release/dist/bin/${LIB_PREFIX}nspr4.dll" "../../../binaries/system/${LIB_PREFIX}nspr4.dll" cp -L "${FOLDER}/build-release/dist/bin/${LIB_PREFIX}plc4.dll" "../../../binaries/system/${LIB_PREFIX}plc4.dll" cp -L "${FOLDER}/build-release/dist/bin/${LIB_PREFIX}plds4.dll" "../../../binaries/system/${LIB_PREFIX}plds4.dll" else # Copy shared libs to both lib/ and binaries/ so the compiler and executable (resp.) can find them. cp -L "${FOLDER}/build-${REL}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${REL}${LIB_SUFFIX}" "lib/${LIB_PREFIX}${LIB_NAME}-${REL}${LIB_SUFFIX}" cp -L "${FOLDER}/build-${REL}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${REL}${LIB_SUFFIX}" "../../../binaries/system/${LIB_PREFIX}${LIB_NAME}-${REL}${LIB_SUFFIX}" if [ "$(uname -s)" != "FreeBSD" ]; then cp -L "${FOLDER}/build-${DEB}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${DEB}${LIB_SUFFIX}" "../../../binaries/system/${LIB_PREFIX}${LIB_NAME}-${DEB}${LIB_SUFFIX}" cp -L "${FOLDER}/build-${DEB}/js/src/build/${LIB_PREFIX}${LIB_NAME}-${DEB}${LIB_SUFFIX}" "lib/${LIB_PREFIX}${LIB_NAME}-${DEB}${LIB_SUFFIX}" fi fi # Flag that it's already been built successfully so we can skip it next time echo "${LIB_VERSION}" > .already-built Index: ps/trunk/source/graphics/Color.cpp =================================================================== --- ps/trunk/source/graphics/Color.cpp (revision 26880) +++ ps/trunk/source/graphics/Color.cpp (revision 26881) @@ -1,145 +1,146 @@ -/* Copyright (C) 2021 Wildfire Games. +/* Copyright (C) 2022 Wildfire Games. * This file is part of 0 A.D. * * 0 A.D. is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * 0 A.D. is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with 0 A.D. If not, see . */ #include "precompiled.h" #include "graphics/Color.h" #include "graphics/SColor.h" #include "lib/sysdep/arch/x86_x64/simd.h" #include "maths/MathUtil.h" #include "ps/CLogger.h" #include "ps/CStr.h" #if COMPILER_HAS_SSE #include #endif static SColor4ub ConvertRGBColorTo4ubFallback(const RGBColor& src) { SColor4ub result; result.R = Clamp(static_cast(src.X * 255), 0, 255); result.G = Clamp(static_cast(src.Y * 255), 0, 255); result.B = Clamp(static_cast(src.Z * 255), 0, 255); result.A = 255; return result; } // on IA32, this is replaced by an SSE assembly version in ia32.cpp SColor4ub (*ConvertRGBColorTo4ub)(const RGBColor& src) = ConvertRGBColorTo4ubFallback; // Assembler-optimized function for color conversion #if COMPILER_HAS_SSE static SColor4ub ConvertRGBColorTo4ubSSE(const RGBColor& src) { const __m128 zero = _mm_setzero_ps(); const __m128 _255 = _mm_set_ss(255.0f); __m128 r = _mm_load_ss(&src.X); __m128 g = _mm_load_ss(&src.Y); __m128 b = _mm_load_ss(&src.Z); // C = min(255, 255*max(C, 0)) ( == Clamp(255*C, 0, 255) ) r = _mm_max_ss(r, zero); g = _mm_max_ss(g, zero); b = _mm_max_ss(b, zero); r = _mm_mul_ss(r, _255); g = _mm_mul_ss(g, _255); b = _mm_mul_ss(b, _255); r = _mm_min_ss(r, _255); g = _mm_min_ss(g, _255); b = _mm_min_ss(b, _255); // convert to integer and combine channels using bit logic int ri = _mm_cvtss_si32(r); int gi = _mm_cvtss_si32(g); int bi = _mm_cvtss_si32(b); return SColor4ub(ri, gi, bi, 0xFF); } #endif void ColorActivateFastImpl() { #if COMPILER_HAS_SSE if (HostHasSSE()) { ConvertRGBColorTo4ub = ConvertRGBColorTo4ubSSE; return; } -#endif +#elif defined(ARCH_X86_64) debug_printf("No SSE available. Slow fallback routines will be used.\n"); +#endif } /** * Important: This function does not modify the value if parsing fails. */ bool CColor::ParseString(const CStr8& value, int defaultAlpha) { const size_t NUM_VALS = 4; int values[NUM_VALS] = { 0, 0, 0, defaultAlpha }; std::stringstream stream; stream.str(value); // Parse each value size_t i; for (i = 0; i < NUM_VALS; ++i) { if (stream.eof()) break; stream >> values[i]; if ((stream.rdstate() & std::stringstream::failbit) != 0) { LOGWARNING("Unable to parse CColor parameters. Your input: '%s'", value.c_str()); return false; } if (values[i] < 0 || values[i] > 255) { LOGWARNING("Invalid value (<0 or >255) when parsing CColor parameters. Your input: '%s'", value.c_str()); return false; } } if (i < 3) { LOGWARNING("Not enough parameters when parsing as CColor. Your input: '%s'", value.c_str()); return false; } if (!stream.eof()) { LOGWARNING("Too many parameters when parsing as CColor. Your input: '%s'", value.c_str()); return false; } r = values[0] / 255.f; g = values[1] / 255.f; b = values[2] / 255.f; a = values[3] / 255.f; return true; } bool CColor::operator==(const CColor& color) const { return r == color.r && g == color.g && b == color.b && a == color.a; } Index: ps/trunk/source/tools/dist/build-osx-executable.sh =================================================================== --- ps/trunk/source/tools/dist/build-osx-executable.sh (revision 26880) +++ ps/trunk/source/tools/dist/build-osx-executable.sh (revision 26881) @@ -1,75 +1,74 @@ #!/bin/sh # Build the Pyrogenesis executable, used to create the bundle and run the archiver. -# TODO: is there anything to do for ARM support? -export ARCH=${ARCH:="x86_64"} +export ARCH=${ARCH:=$(uname -m)} # Set mimimum required OS X version, SDK location and tools # Old SDKs can be found at https://github.com/phracker/MacOSX-SDKs export MIN_OSX_VERSION=${MIN_OSX_VERSION:="10.12"} # Note that the 10.12 SDK is know to be too old for FMT 7. export SYSROOT=${SYSROOT:="/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk"} export CC=${CC:="clang"} CXX=${CXX:="clang++"} die() { echo ERROR: $* exit 1 } # Check that we're actually on OS X if [ "`uname -s`" != "Darwin" ]; then die "This script is intended for OS X only" fi # Check SDK exists if [ ! -d "${SYSROOT}" ]; then die "${SYSROOT} does not exist! You probably need to install Xcode" fi # Assume this is called from trunk/ SVN_REV=$(svnversion -n .) echo "L\"${SVN_REV}-release\"" > build/svn_revision/svn_revision.txt cd "build/workspaces/" JOBS=${JOBS:="-j5"} # Toggle whether this is a full rebuild, including libraries (takes much longer). FULL_REBUILD=${FULL_REBUILD:=false} if $FULL_REBUILD = true; then CLEAN_WORKSPACE_ARGS="" BUILD_LIBS_ARGS="--force-rebuild" else CLEAN_WORKSPACE_ARGS="--preserve-libs" BUILD_LIBS_ARGS="" fi ./clean-workspaces.sh "${CLEAN_WORKSPACE_ARGS}" # Build libraries against SDK echo "\nBuilding libraries\n" pushd ../../libraries/osx > /dev/null SYSROOT="${SYSROOT}" MIN_OSX_VERSION="${MIN_OSX_VERSION}" \ ./build-osx-libs.sh $JOBS "${BUILD_LIBS_ARGS}" || die "Libraries build script failed" popd > /dev/null # Update workspaces echo "\nGenerating workspaces\n" # Pass OS X options through to Premake (SYSROOT="${SYSROOT}" MIN_OSX_VERSION="${MIN_OSX_VERSION}" \ ./update-workspaces.sh --sysroot="${SYSROOT}" --macosx-version-min="${MIN_OSX_VERSION}") || die "update-workspaces.sh failed!" pushd gcc > /dev/null echo "\nBuilding game\n" (make clean && CC="$CC -arch $ARCH" CXX="$CXX -arch $ARCH" make ${JOBS}) || die "Game build failed!" popd > /dev/null # Run test to confirm all is OK pushd ../../binaries/system > /dev/null echo "\nRunning tests\n" ./test || die "Post-build testing failed!" popd > /dev/null