libv8 3.3.10.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (700) hide show
  1. data/.gitignore +8 -0
  2. data/.gitmodules +3 -0
  3. data/Gemfile +4 -0
  4. data/README.md +44 -0
  5. data/Rakefile +73 -0
  6. data/ext/libv8/extconf.rb +9 -0
  7. data/lib/libv8.rb +15 -0
  8. data/lib/libv8/Makefile +38 -0
  9. data/lib/libv8/detect_cpu.rb +27 -0
  10. data/lib/libv8/fpic-on-linux-amd64.patch +13 -0
  11. data/lib/libv8/scons/CHANGES.txt +5334 -0
  12. data/lib/libv8/scons/LICENSE.txt +20 -0
  13. data/lib/libv8/scons/MANIFEST +199 -0
  14. data/lib/libv8/scons/PKG-INFO +13 -0
  15. data/lib/libv8/scons/README.txt +243 -0
  16. data/lib/libv8/scons/RELEASE.txt +98 -0
  17. data/lib/libv8/scons/engine/SCons/Action.py +1241 -0
  18. data/lib/libv8/scons/engine/SCons/Builder.py +877 -0
  19. data/lib/libv8/scons/engine/SCons/CacheDir.py +216 -0
  20. data/lib/libv8/scons/engine/SCons/Conftest.py +793 -0
  21. data/lib/libv8/scons/engine/SCons/Debug.py +220 -0
  22. data/lib/libv8/scons/engine/SCons/Defaults.py +480 -0
  23. data/lib/libv8/scons/engine/SCons/Environment.py +2318 -0
  24. data/lib/libv8/scons/engine/SCons/Errors.py +205 -0
  25. data/lib/libv8/scons/engine/SCons/Executor.py +633 -0
  26. data/lib/libv8/scons/engine/SCons/Job.py +435 -0
  27. data/lib/libv8/scons/engine/SCons/Memoize.py +244 -0
  28. data/lib/libv8/scons/engine/SCons/Node/Alias.py +152 -0
  29. data/lib/libv8/scons/engine/SCons/Node/FS.py +3142 -0
  30. data/lib/libv8/scons/engine/SCons/Node/Python.py +128 -0
  31. data/lib/libv8/scons/engine/SCons/Node/__init__.py +1328 -0
  32. data/lib/libv8/scons/engine/SCons/Options/BoolOption.py +50 -0
  33. data/lib/libv8/scons/engine/SCons/Options/EnumOption.py +50 -0
  34. data/lib/libv8/scons/engine/SCons/Options/ListOption.py +50 -0
  35. data/lib/libv8/scons/engine/SCons/Options/PackageOption.py +50 -0
  36. data/lib/libv8/scons/engine/SCons/Options/PathOption.py +76 -0
  37. data/lib/libv8/scons/engine/SCons/Options/__init__.py +67 -0
  38. data/lib/libv8/scons/engine/SCons/PathList.py +231 -0
  39. data/lib/libv8/scons/engine/SCons/Platform/__init__.py +241 -0
  40. data/lib/libv8/scons/engine/SCons/Platform/aix.py +69 -0
  41. data/lib/libv8/scons/engine/SCons/Platform/cygwin.py +55 -0
  42. data/lib/libv8/scons/engine/SCons/Platform/darwin.py +46 -0
  43. data/lib/libv8/scons/engine/SCons/Platform/hpux.py +46 -0
  44. data/lib/libv8/scons/engine/SCons/Platform/irix.py +44 -0
  45. data/lib/libv8/scons/engine/SCons/Platform/os2.py +58 -0
  46. data/lib/libv8/scons/engine/SCons/Platform/posix.py +263 -0
  47. data/lib/libv8/scons/engine/SCons/Platform/sunos.py +50 -0
  48. data/lib/libv8/scons/engine/SCons/Platform/win32.py +385 -0
  49. data/lib/libv8/scons/engine/SCons/SConf.py +1030 -0
  50. data/lib/libv8/scons/engine/SCons/SConsign.py +383 -0
  51. data/lib/libv8/scons/engine/SCons/Scanner/C.py +132 -0
  52. data/lib/libv8/scons/engine/SCons/Scanner/D.py +73 -0
  53. data/lib/libv8/scons/engine/SCons/Scanner/Dir.py +109 -0
  54. data/lib/libv8/scons/engine/SCons/Scanner/Fortran.py +316 -0
  55. data/lib/libv8/scons/engine/SCons/Scanner/IDL.py +48 -0
  56. data/lib/libv8/scons/engine/SCons/Scanner/LaTeX.py +384 -0
  57. data/lib/libv8/scons/engine/SCons/Scanner/Prog.py +101 -0
  58. data/lib/libv8/scons/engine/SCons/Scanner/RC.py +55 -0
  59. data/lib/libv8/scons/engine/SCons/Scanner/__init__.py +413 -0
  60. data/lib/libv8/scons/engine/SCons/Script/Interactive.py +384 -0
  61. data/lib/libv8/scons/engine/SCons/Script/Main.py +1334 -0
  62. data/lib/libv8/scons/engine/SCons/Script/SConsOptions.py +939 -0
  63. data/lib/libv8/scons/engine/SCons/Script/SConscript.py +640 -0
  64. data/lib/libv8/scons/engine/SCons/Script/__init__.py +412 -0
  65. data/lib/libv8/scons/engine/SCons/Sig.py +63 -0
  66. data/lib/libv8/scons/engine/SCons/Subst.py +904 -0
  67. data/lib/libv8/scons/engine/SCons/Taskmaster.py +1017 -0
  68. data/lib/libv8/scons/engine/SCons/Tool/386asm.py +61 -0
  69. data/lib/libv8/scons/engine/SCons/Tool/BitKeeper.py +67 -0
  70. data/lib/libv8/scons/engine/SCons/Tool/CVS.py +73 -0
  71. data/lib/libv8/scons/engine/SCons/Tool/FortranCommon.py +246 -0
  72. data/lib/libv8/scons/engine/SCons/Tool/JavaCommon.py +323 -0
  73. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/__init__.py +56 -0
  74. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/arch.py +61 -0
  75. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/common.py +240 -0
  76. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/netframework.py +82 -0
  77. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/sdk.py +391 -0
  78. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vc.py +456 -0
  79. data/lib/libv8/scons/engine/SCons/Tool/MSCommon/vs.py +499 -0
  80. data/lib/libv8/scons/engine/SCons/Tool/Perforce.py +103 -0
  81. data/lib/libv8/scons/engine/SCons/Tool/PharLapCommon.py +137 -0
  82. data/lib/libv8/scons/engine/SCons/Tool/RCS.py +64 -0
  83. data/lib/libv8/scons/engine/SCons/Tool/SCCS.py +64 -0
  84. data/lib/libv8/scons/engine/SCons/Tool/Subversion.py +71 -0
  85. data/lib/libv8/scons/engine/SCons/Tool/__init__.py +681 -0
  86. data/lib/libv8/scons/engine/SCons/Tool/aixc++.py +82 -0
  87. data/lib/libv8/scons/engine/SCons/Tool/aixcc.py +74 -0
  88. data/lib/libv8/scons/engine/SCons/Tool/aixf77.py +80 -0
  89. data/lib/libv8/scons/engine/SCons/Tool/aixlink.py +76 -0
  90. data/lib/libv8/scons/engine/SCons/Tool/applelink.py +71 -0
  91. data/lib/libv8/scons/engine/SCons/Tool/ar.py +63 -0
  92. data/lib/libv8/scons/engine/SCons/Tool/as.py +78 -0
  93. data/lib/libv8/scons/engine/SCons/Tool/bcc32.py +81 -0
  94. data/lib/libv8/scons/engine/SCons/Tool/c++.py +99 -0
  95. data/lib/libv8/scons/engine/SCons/Tool/cc.py +102 -0
  96. data/lib/libv8/scons/engine/SCons/Tool/cvf.py +58 -0
  97. data/lib/libv8/scons/engine/SCons/Tool/default.py +50 -0
  98. data/lib/libv8/scons/engine/SCons/Tool/dmd.py +223 -0
  99. data/lib/libv8/scons/engine/SCons/Tool/dvi.py +64 -0
  100. data/lib/libv8/scons/engine/SCons/Tool/dvipdf.py +124 -0
  101. data/lib/libv8/scons/engine/SCons/Tool/dvips.py +94 -0
  102. data/lib/libv8/scons/engine/SCons/Tool/f77.py +62 -0
  103. data/lib/libv8/scons/engine/SCons/Tool/f90.py +62 -0
  104. data/lib/libv8/scons/engine/SCons/Tool/f95.py +63 -0
  105. data/lib/libv8/scons/engine/SCons/Tool/filesystem.py +98 -0
  106. data/lib/libv8/scons/engine/SCons/Tool/fortran.py +62 -0
  107. data/lib/libv8/scons/engine/SCons/Tool/g++.py +90 -0
  108. data/lib/libv8/scons/engine/SCons/Tool/g77.py +73 -0
  109. data/lib/libv8/scons/engine/SCons/Tool/gas.py +53 -0
  110. data/lib/libv8/scons/engine/SCons/Tool/gcc.py +80 -0
  111. data/lib/libv8/scons/engine/SCons/Tool/gfortran.py +64 -0
  112. data/lib/libv8/scons/engine/SCons/Tool/gnulink.py +63 -0
  113. data/lib/libv8/scons/engine/SCons/Tool/gs.py +81 -0
  114. data/lib/libv8/scons/engine/SCons/Tool/hpc++.py +84 -0
  115. data/lib/libv8/scons/engine/SCons/Tool/hpcc.py +53 -0
  116. data/lib/libv8/scons/engine/SCons/Tool/hplink.py +77 -0
  117. data/lib/libv8/scons/engine/SCons/Tool/icc.py +59 -0
  118. data/lib/libv8/scons/engine/SCons/Tool/icl.py +52 -0
  119. data/lib/libv8/scons/engine/SCons/Tool/ifl.py +72 -0
  120. data/lib/libv8/scons/engine/SCons/Tool/ifort.py +88 -0
  121. data/lib/libv8/scons/engine/SCons/Tool/ilink.py +59 -0
  122. data/lib/libv8/scons/engine/SCons/Tool/ilink32.py +60 -0
  123. data/lib/libv8/scons/engine/SCons/Tool/install.py +229 -0
  124. data/lib/libv8/scons/engine/SCons/Tool/intelc.py +482 -0
  125. data/lib/libv8/scons/engine/SCons/Tool/ipkg.py +67 -0
  126. data/lib/libv8/scons/engine/SCons/Tool/jar.py +110 -0
  127. data/lib/libv8/scons/engine/SCons/Tool/javac.py +230 -0
  128. data/lib/libv8/scons/engine/SCons/Tool/javah.py +137 -0
  129. data/lib/libv8/scons/engine/SCons/Tool/latex.py +79 -0
  130. data/lib/libv8/scons/engine/SCons/Tool/lex.py +97 -0
  131. data/lib/libv8/scons/engine/SCons/Tool/link.py +121 -0
  132. data/lib/libv8/scons/engine/SCons/Tool/linkloc.py +112 -0
  133. data/lib/libv8/scons/engine/SCons/Tool/m4.py +63 -0
  134. data/lib/libv8/scons/engine/SCons/Tool/masm.py +77 -0
  135. data/lib/libv8/scons/engine/SCons/Tool/midl.py +88 -0
  136. data/lib/libv8/scons/engine/SCons/Tool/mingw.py +158 -0
  137. data/lib/libv8/scons/engine/SCons/Tool/mslib.py +64 -0
  138. data/lib/libv8/scons/engine/SCons/Tool/mslink.py +266 -0
  139. data/lib/libv8/scons/engine/SCons/Tool/mssdk.py +50 -0
  140. data/lib/libv8/scons/engine/SCons/Tool/msvc.py +268 -0
  141. data/lib/libv8/scons/engine/SCons/Tool/msvs.py +1388 -0
  142. data/lib/libv8/scons/engine/SCons/Tool/mwcc.py +207 -0
  143. data/lib/libv8/scons/engine/SCons/Tool/mwld.py +107 -0
  144. data/lib/libv8/scons/engine/SCons/Tool/nasm.py +72 -0
  145. data/lib/libv8/scons/engine/SCons/Tool/packaging/__init__.py +312 -0
  146. data/lib/libv8/scons/engine/SCons/Tool/packaging/ipk.py +185 -0
  147. data/lib/libv8/scons/engine/SCons/Tool/packaging/msi.py +527 -0
  148. data/lib/libv8/scons/engine/SCons/Tool/packaging/rpm.py +365 -0
  149. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_tarbz2.py +43 -0
  150. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_targz.py +43 -0
  151. data/lib/libv8/scons/engine/SCons/Tool/packaging/src_zip.py +43 -0
  152. data/lib/libv8/scons/engine/SCons/Tool/packaging/tarbz2.py +44 -0
  153. data/lib/libv8/scons/engine/SCons/Tool/packaging/targz.py +44 -0
  154. data/lib/libv8/scons/engine/SCons/Tool/packaging/zip.py +44 -0
  155. data/lib/libv8/scons/engine/SCons/Tool/pdf.py +78 -0
  156. data/lib/libv8/scons/engine/SCons/Tool/pdflatex.py +83 -0
  157. data/lib/libv8/scons/engine/SCons/Tool/pdftex.py +108 -0
  158. data/lib/libv8/scons/engine/SCons/Tool/qt.py +336 -0
  159. data/lib/libv8/scons/engine/SCons/Tool/rmic.py +120 -0
  160. data/lib/libv8/scons/engine/SCons/Tool/rpcgen.py +70 -0
  161. data/lib/libv8/scons/engine/SCons/Tool/rpm.py +132 -0
  162. data/lib/libv8/scons/engine/SCons/Tool/sgiar.py +68 -0
  163. data/lib/libv8/scons/engine/SCons/Tool/sgic++.py +58 -0
  164. data/lib/libv8/scons/engine/SCons/Tool/sgicc.py +53 -0
  165. data/lib/libv8/scons/engine/SCons/Tool/sgilink.py +63 -0
  166. data/lib/libv8/scons/engine/SCons/Tool/sunar.py +67 -0
  167. data/lib/libv8/scons/engine/SCons/Tool/sunc++.py +142 -0
  168. data/lib/libv8/scons/engine/SCons/Tool/suncc.py +58 -0
  169. data/lib/libv8/scons/engine/SCons/Tool/sunf77.py +63 -0
  170. data/lib/libv8/scons/engine/SCons/Tool/sunf90.py +64 -0
  171. data/lib/libv8/scons/engine/SCons/Tool/sunf95.py +64 -0
  172. data/lib/libv8/scons/engine/SCons/Tool/sunlink.py +77 -0
  173. data/lib/libv8/scons/engine/SCons/Tool/swig.py +182 -0
  174. data/lib/libv8/scons/engine/SCons/Tool/tar.py +73 -0
  175. data/lib/libv8/scons/engine/SCons/Tool/tex.py +813 -0
  176. data/lib/libv8/scons/engine/SCons/Tool/textfile.py +175 -0
  177. data/lib/libv8/scons/engine/SCons/Tool/tlib.py +53 -0
  178. data/lib/libv8/scons/engine/SCons/Tool/wix.py +99 -0
  179. data/lib/libv8/scons/engine/SCons/Tool/yacc.py +130 -0
  180. data/lib/libv8/scons/engine/SCons/Tool/zip.py +99 -0
  181. data/lib/libv8/scons/engine/SCons/Util.py +1492 -0
  182. data/lib/libv8/scons/engine/SCons/Variables/BoolVariable.py +89 -0
  183. data/lib/libv8/scons/engine/SCons/Variables/EnumVariable.py +103 -0
  184. data/lib/libv8/scons/engine/SCons/Variables/ListVariable.py +135 -0
  185. data/lib/libv8/scons/engine/SCons/Variables/PackageVariable.py +106 -0
  186. data/lib/libv8/scons/engine/SCons/Variables/PathVariable.py +147 -0
  187. data/lib/libv8/scons/engine/SCons/Variables/__init__.py +312 -0
  188. data/lib/libv8/scons/engine/SCons/Warnings.py +246 -0
  189. data/lib/libv8/scons/engine/SCons/__init__.py +49 -0
  190. data/lib/libv8/scons/engine/SCons/compat/__init__.py +237 -0
  191. data/lib/libv8/scons/engine/SCons/compat/_scons_builtins.py +150 -0
  192. data/lib/libv8/scons/engine/SCons/compat/_scons_collections.py +45 -0
  193. data/lib/libv8/scons/engine/SCons/compat/_scons_dbm.py +45 -0
  194. data/lib/libv8/scons/engine/SCons/compat/_scons_hashlib.py +76 -0
  195. data/lib/libv8/scons/engine/SCons/compat/_scons_io.py +45 -0
  196. data/lib/libv8/scons/engine/SCons/compat/_scons_sets.py +563 -0
  197. data/lib/libv8/scons/engine/SCons/compat/_scons_subprocess.py +1281 -0
  198. data/lib/libv8/scons/engine/SCons/cpp.py +589 -0
  199. data/lib/libv8/scons/engine/SCons/dblite.py +251 -0
  200. data/lib/libv8/scons/engine/SCons/exitfuncs.py +77 -0
  201. data/lib/libv8/scons/os_spawnv_fix.diff +83 -0
  202. data/lib/libv8/scons/scons-time.1 +1017 -0
  203. data/lib/libv8/scons/scons.1 +15219 -0
  204. data/lib/libv8/scons/sconsign.1 +208 -0
  205. data/lib/libv8/scons/script/scons +196 -0
  206. data/lib/libv8/scons/script/scons-time +1544 -0
  207. data/lib/libv8/scons/script/scons.bat +31 -0
  208. data/lib/libv8/scons/script/sconsign +513 -0
  209. data/lib/libv8/scons/setup.cfg +6 -0
  210. data/lib/libv8/scons/setup.py +425 -0
  211. data/lib/libv8/v8/.gitignore +35 -0
  212. data/lib/libv8/v8/AUTHORS +44 -0
  213. data/lib/libv8/v8/ChangeLog +2839 -0
  214. data/lib/libv8/v8/LICENSE +52 -0
  215. data/lib/libv8/v8/LICENSE.strongtalk +29 -0
  216. data/lib/libv8/v8/LICENSE.v8 +26 -0
  217. data/lib/libv8/v8/LICENSE.valgrind +45 -0
  218. data/lib/libv8/v8/SConstruct +1478 -0
  219. data/lib/libv8/v8/build/README.txt +49 -0
  220. data/lib/libv8/v8/build/all.gyp +18 -0
  221. data/lib/libv8/v8/build/armu.gypi +32 -0
  222. data/lib/libv8/v8/build/common.gypi +144 -0
  223. data/lib/libv8/v8/build/gyp_v8 +145 -0
  224. data/lib/libv8/v8/include/v8-debug.h +395 -0
  225. data/lib/libv8/v8/include/v8-preparser.h +117 -0
  226. data/lib/libv8/v8/include/v8-profiler.h +505 -0
  227. data/lib/libv8/v8/include/v8-testing.h +104 -0
  228. data/lib/libv8/v8/include/v8.h +4124 -0
  229. data/lib/libv8/v8/include/v8stdint.h +53 -0
  230. data/lib/libv8/v8/preparser/SConscript +38 -0
  231. data/lib/libv8/v8/preparser/preparser-process.cc +379 -0
  232. data/lib/libv8/v8/src/SConscript +368 -0
  233. data/lib/libv8/v8/src/accessors.cc +767 -0
  234. data/lib/libv8/v8/src/accessors.h +123 -0
  235. data/lib/libv8/v8/src/allocation-inl.h +49 -0
  236. data/lib/libv8/v8/src/allocation.cc +122 -0
  237. data/lib/libv8/v8/src/allocation.h +143 -0
  238. data/lib/libv8/v8/src/api.cc +5845 -0
  239. data/lib/libv8/v8/src/api.h +574 -0
  240. data/lib/libv8/v8/src/apinatives.js +110 -0
  241. data/lib/libv8/v8/src/apiutils.h +73 -0
  242. data/lib/libv8/v8/src/arguments.h +118 -0
  243. data/lib/libv8/v8/src/arm/assembler-arm-inl.h +353 -0
  244. data/lib/libv8/v8/src/arm/assembler-arm.cc +2661 -0
  245. data/lib/libv8/v8/src/arm/assembler-arm.h +1375 -0
  246. data/lib/libv8/v8/src/arm/builtins-arm.cc +1658 -0
  247. data/lib/libv8/v8/src/arm/code-stubs-arm.cc +6398 -0
  248. data/lib/libv8/v8/src/arm/code-stubs-arm.h +673 -0
  249. data/lib/libv8/v8/src/arm/codegen-arm.cc +52 -0
  250. data/lib/libv8/v8/src/arm/codegen-arm.h +91 -0
  251. data/lib/libv8/v8/src/arm/constants-arm.cc +152 -0
  252. data/lib/libv8/v8/src/arm/constants-arm.h +775 -0
  253. data/lib/libv8/v8/src/arm/cpu-arm.cc +120 -0
  254. data/lib/libv8/v8/src/arm/debug-arm.cc +317 -0
  255. data/lib/libv8/v8/src/arm/deoptimizer-arm.cc +754 -0
  256. data/lib/libv8/v8/src/arm/disasm-arm.cc +1506 -0
  257. data/lib/libv8/v8/src/arm/frames-arm.cc +45 -0
  258. data/lib/libv8/v8/src/arm/frames-arm.h +168 -0
  259. data/lib/libv8/v8/src/arm/full-codegen-arm.cc +4375 -0
  260. data/lib/libv8/v8/src/arm/ic-arm.cc +1562 -0
  261. data/lib/libv8/v8/src/arm/lithium-arm.cc +2206 -0
  262. data/lib/libv8/v8/src/arm/lithium-arm.h +2348 -0
  263. data/lib/libv8/v8/src/arm/lithium-codegen-arm.cc +4526 -0
  264. data/lib/libv8/v8/src/arm/lithium-codegen-arm.h +403 -0
  265. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.cc +305 -0
  266. data/lib/libv8/v8/src/arm/lithium-gap-resolver-arm.h +84 -0
  267. data/lib/libv8/v8/src/arm/macro-assembler-arm.cc +3163 -0
  268. data/lib/libv8/v8/src/arm/macro-assembler-arm.h +1126 -0
  269. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.cc +1287 -0
  270. data/lib/libv8/v8/src/arm/regexp-macro-assembler-arm.h +253 -0
  271. data/lib/libv8/v8/src/arm/simulator-arm.cc +3424 -0
  272. data/lib/libv8/v8/src/arm/simulator-arm.h +431 -0
  273. data/lib/libv8/v8/src/arm/stub-cache-arm.cc +4243 -0
  274. data/lib/libv8/v8/src/array.js +1366 -0
  275. data/lib/libv8/v8/src/assembler.cc +1207 -0
  276. data/lib/libv8/v8/src/assembler.h +858 -0
  277. data/lib/libv8/v8/src/ast-inl.h +112 -0
  278. data/lib/libv8/v8/src/ast.cc +1146 -0
  279. data/lib/libv8/v8/src/ast.h +2188 -0
  280. data/lib/libv8/v8/src/atomicops.h +167 -0
  281. data/lib/libv8/v8/src/atomicops_internals_arm_gcc.h +145 -0
  282. data/lib/libv8/v8/src/atomicops_internals_mips_gcc.h +169 -0
  283. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.cc +133 -0
  284. data/lib/libv8/v8/src/atomicops_internals_x86_gcc.h +287 -0
  285. data/lib/libv8/v8/src/atomicops_internals_x86_macosx.h +301 -0
  286. data/lib/libv8/v8/src/atomicops_internals_x86_msvc.h +203 -0
  287. data/lib/libv8/v8/src/bignum-dtoa.cc +655 -0
  288. data/lib/libv8/v8/src/bignum-dtoa.h +81 -0
  289. data/lib/libv8/v8/src/bignum.cc +768 -0
  290. data/lib/libv8/v8/src/bignum.h +140 -0
  291. data/lib/libv8/v8/src/bootstrapper.cc +2184 -0
  292. data/lib/libv8/v8/src/bootstrapper.h +188 -0
  293. data/lib/libv8/v8/src/builtins.cc +1707 -0
  294. data/lib/libv8/v8/src/builtins.h +371 -0
  295. data/lib/libv8/v8/src/bytecodes-irregexp.h +105 -0
  296. data/lib/libv8/v8/src/cached-powers.cc +177 -0
  297. data/lib/libv8/v8/src/cached-powers.h +65 -0
  298. data/lib/libv8/v8/src/char-predicates-inl.h +94 -0
  299. data/lib/libv8/v8/src/char-predicates.h +67 -0
  300. data/lib/libv8/v8/src/checks.cc +110 -0
  301. data/lib/libv8/v8/src/checks.h +296 -0
  302. data/lib/libv8/v8/src/circular-queue-inl.h +53 -0
  303. data/lib/libv8/v8/src/circular-queue.cc +122 -0
  304. data/lib/libv8/v8/src/circular-queue.h +103 -0
  305. data/lib/libv8/v8/src/code-stubs.cc +267 -0
  306. data/lib/libv8/v8/src/code-stubs.h +1011 -0
  307. data/lib/libv8/v8/src/code.h +70 -0
  308. data/lib/libv8/v8/src/codegen.cc +231 -0
  309. data/lib/libv8/v8/src/codegen.h +84 -0
  310. data/lib/libv8/v8/src/compilation-cache.cc +540 -0
  311. data/lib/libv8/v8/src/compilation-cache.h +287 -0
  312. data/lib/libv8/v8/src/compiler.cc +786 -0
  313. data/lib/libv8/v8/src/compiler.h +312 -0
  314. data/lib/libv8/v8/src/contexts.cc +347 -0
  315. data/lib/libv8/v8/src/contexts.h +391 -0
  316. data/lib/libv8/v8/src/conversions-inl.h +106 -0
  317. data/lib/libv8/v8/src/conversions.cc +1131 -0
  318. data/lib/libv8/v8/src/conversions.h +135 -0
  319. data/lib/libv8/v8/src/counters.cc +93 -0
  320. data/lib/libv8/v8/src/counters.h +254 -0
  321. data/lib/libv8/v8/src/cpu-profiler-inl.h +101 -0
  322. data/lib/libv8/v8/src/cpu-profiler.cc +609 -0
  323. data/lib/libv8/v8/src/cpu-profiler.h +302 -0
  324. data/lib/libv8/v8/src/cpu.h +69 -0
  325. data/lib/libv8/v8/src/d8-debug.cc +367 -0
  326. data/lib/libv8/v8/src/d8-debug.h +158 -0
  327. data/lib/libv8/v8/src/d8-posix.cc +695 -0
  328. data/lib/libv8/v8/src/d8-readline.cc +130 -0
  329. data/lib/libv8/v8/src/d8-windows.cc +42 -0
  330. data/lib/libv8/v8/src/d8.cc +803 -0
  331. data/lib/libv8/v8/src/d8.gyp +91 -0
  332. data/lib/libv8/v8/src/d8.h +235 -0
  333. data/lib/libv8/v8/src/d8.js +2798 -0
  334. data/lib/libv8/v8/src/data-flow.cc +66 -0
  335. data/lib/libv8/v8/src/data-flow.h +205 -0
  336. data/lib/libv8/v8/src/date.js +1103 -0
  337. data/lib/libv8/v8/src/dateparser-inl.h +127 -0
  338. data/lib/libv8/v8/src/dateparser.cc +178 -0
  339. data/lib/libv8/v8/src/dateparser.h +266 -0
  340. data/lib/libv8/v8/src/debug-agent.cc +447 -0
  341. data/lib/libv8/v8/src/debug-agent.h +129 -0
  342. data/lib/libv8/v8/src/debug-debugger.js +2569 -0
  343. data/lib/libv8/v8/src/debug.cc +3165 -0
  344. data/lib/libv8/v8/src/debug.h +1057 -0
  345. data/lib/libv8/v8/src/deoptimizer.cc +1256 -0
  346. data/lib/libv8/v8/src/deoptimizer.h +602 -0
  347. data/lib/libv8/v8/src/disasm.h +80 -0
  348. data/lib/libv8/v8/src/disassembler.cc +343 -0
  349. data/lib/libv8/v8/src/disassembler.h +58 -0
  350. data/lib/libv8/v8/src/diy-fp.cc +58 -0
  351. data/lib/libv8/v8/src/diy-fp.h +117 -0
  352. data/lib/libv8/v8/src/double.h +238 -0
  353. data/lib/libv8/v8/src/dtoa.cc +103 -0
  354. data/lib/libv8/v8/src/dtoa.h +85 -0
  355. data/lib/libv8/v8/src/execution.cc +849 -0
  356. data/lib/libv8/v8/src/execution.h +297 -0
  357. data/lib/libv8/v8/src/extensions/experimental/break-iterator.cc +250 -0
  358. data/lib/libv8/v8/src/extensions/experimental/break-iterator.h +89 -0
  359. data/lib/libv8/v8/src/extensions/experimental/collator.cc +218 -0
  360. data/lib/libv8/v8/src/extensions/experimental/collator.h +69 -0
  361. data/lib/libv8/v8/src/extensions/experimental/experimental.gyp +94 -0
  362. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.cc +78 -0
  363. data/lib/libv8/v8/src/extensions/experimental/i18n-extension.h +54 -0
  364. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.cc +112 -0
  365. data/lib/libv8/v8/src/extensions/experimental/i18n-locale.h +60 -0
  366. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.cc +43 -0
  367. data/lib/libv8/v8/src/extensions/experimental/i18n-utils.h +49 -0
  368. data/lib/libv8/v8/src/extensions/experimental/i18n.js +180 -0
  369. data/lib/libv8/v8/src/extensions/experimental/language-matcher.cc +251 -0
  370. data/lib/libv8/v8/src/extensions/experimental/language-matcher.h +95 -0
  371. data/lib/libv8/v8/src/extensions/externalize-string-extension.cc +141 -0
  372. data/lib/libv8/v8/src/extensions/externalize-string-extension.h +50 -0
  373. data/lib/libv8/v8/src/extensions/gc-extension.cc +58 -0
  374. data/lib/libv8/v8/src/extensions/gc-extension.h +49 -0
  375. data/lib/libv8/v8/src/factory.cc +1222 -0
  376. data/lib/libv8/v8/src/factory.h +442 -0
  377. data/lib/libv8/v8/src/fast-dtoa.cc +736 -0
  378. data/lib/libv8/v8/src/fast-dtoa.h +83 -0
  379. data/lib/libv8/v8/src/fixed-dtoa.cc +405 -0
  380. data/lib/libv8/v8/src/fixed-dtoa.h +55 -0
  381. data/lib/libv8/v8/src/flag-definitions.h +560 -0
  382. data/lib/libv8/v8/src/flags.cc +551 -0
  383. data/lib/libv8/v8/src/flags.h +79 -0
  384. data/lib/libv8/v8/src/frames-inl.h +247 -0
  385. data/lib/libv8/v8/src/frames.cc +1243 -0
  386. data/lib/libv8/v8/src/frames.h +870 -0
  387. data/lib/libv8/v8/src/full-codegen.cc +1374 -0
  388. data/lib/libv8/v8/src/full-codegen.h +771 -0
  389. data/lib/libv8/v8/src/func-name-inferrer.cc +92 -0
  390. data/lib/libv8/v8/src/func-name-inferrer.h +111 -0
  391. data/lib/libv8/v8/src/gdb-jit.cc +1555 -0
  392. data/lib/libv8/v8/src/gdb-jit.h +143 -0
  393. data/lib/libv8/v8/src/global-handles.cc +665 -0
  394. data/lib/libv8/v8/src/global-handles.h +284 -0
  395. data/lib/libv8/v8/src/globals.h +325 -0
  396. data/lib/libv8/v8/src/handles-inl.h +177 -0
  397. data/lib/libv8/v8/src/handles.cc +987 -0
  398. data/lib/libv8/v8/src/handles.h +382 -0
  399. data/lib/libv8/v8/src/hashmap.cc +230 -0
  400. data/lib/libv8/v8/src/hashmap.h +123 -0
  401. data/lib/libv8/v8/src/heap-inl.h +704 -0
  402. data/lib/libv8/v8/src/heap-profiler.cc +1173 -0
  403. data/lib/libv8/v8/src/heap-profiler.h +397 -0
  404. data/lib/libv8/v8/src/heap.cc +5930 -0
  405. data/lib/libv8/v8/src/heap.h +2268 -0
  406. data/lib/libv8/v8/src/hydrogen-instructions.cc +1769 -0
  407. data/lib/libv8/v8/src/hydrogen-instructions.h +3971 -0
  408. data/lib/libv8/v8/src/hydrogen.cc +6239 -0
  409. data/lib/libv8/v8/src/hydrogen.h +1202 -0
  410. data/lib/libv8/v8/src/ia32/assembler-ia32-inl.h +446 -0
  411. data/lib/libv8/v8/src/ia32/assembler-ia32.cc +2487 -0
  412. data/lib/libv8/v8/src/ia32/assembler-ia32.h +1144 -0
  413. data/lib/libv8/v8/src/ia32/builtins-ia32.cc +1621 -0
  414. data/lib/libv8/v8/src/ia32/code-stubs-ia32.cc +6198 -0
  415. data/lib/libv8/v8/src/ia32/code-stubs-ia32.h +517 -0
  416. data/lib/libv8/v8/src/ia32/codegen-ia32.cc +265 -0
  417. data/lib/libv8/v8/src/ia32/codegen-ia32.h +79 -0
  418. data/lib/libv8/v8/src/ia32/cpu-ia32.cc +88 -0
  419. data/lib/libv8/v8/src/ia32/debug-ia32.cc +312 -0
  420. data/lib/libv8/v8/src/ia32/deoptimizer-ia32.cc +774 -0
  421. data/lib/libv8/v8/src/ia32/disasm-ia32.cc +1628 -0
  422. data/lib/libv8/v8/src/ia32/frames-ia32.cc +45 -0
  423. data/lib/libv8/v8/src/ia32/frames-ia32.h +142 -0
  424. data/lib/libv8/v8/src/ia32/full-codegen-ia32.cc +4338 -0
  425. data/lib/libv8/v8/src/ia32/ic-ia32.cc +1597 -0
  426. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.cc +4461 -0
  427. data/lib/libv8/v8/src/ia32/lithium-codegen-ia32.h +375 -0
  428. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.cc +475 -0
  429. data/lib/libv8/v8/src/ia32/lithium-gap-resolver-ia32.h +110 -0
  430. data/lib/libv8/v8/src/ia32/lithium-ia32.cc +2261 -0
  431. data/lib/libv8/v8/src/ia32/lithium-ia32.h +2396 -0
  432. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.cc +2136 -0
  433. data/lib/libv8/v8/src/ia32/macro-assembler-ia32.h +775 -0
  434. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.cc +1263 -0
  435. data/lib/libv8/v8/src/ia32/regexp-macro-assembler-ia32.h +216 -0
  436. data/lib/libv8/v8/src/ia32/simulator-ia32.cc +30 -0
  437. data/lib/libv8/v8/src/ia32/simulator-ia32.h +74 -0
  438. data/lib/libv8/v8/src/ia32/stub-cache-ia32.cc +3847 -0
  439. data/lib/libv8/v8/src/ic-inl.h +130 -0
  440. data/lib/libv8/v8/src/ic.cc +2577 -0
  441. data/lib/libv8/v8/src/ic.h +736 -0
  442. data/lib/libv8/v8/src/inspector.cc +63 -0
  443. data/lib/libv8/v8/src/inspector.h +62 -0
  444. data/lib/libv8/v8/src/interpreter-irregexp.cc +659 -0
  445. data/lib/libv8/v8/src/interpreter-irregexp.h +49 -0
  446. data/lib/libv8/v8/src/isolate-inl.h +50 -0
  447. data/lib/libv8/v8/src/isolate.cc +1869 -0
  448. data/lib/libv8/v8/src/isolate.h +1382 -0
  449. data/lib/libv8/v8/src/json-parser.cc +504 -0
  450. data/lib/libv8/v8/src/json-parser.h +161 -0
  451. data/lib/libv8/v8/src/json.js +342 -0
  452. data/lib/libv8/v8/src/jsregexp.cc +5385 -0
  453. data/lib/libv8/v8/src/jsregexp.h +1492 -0
  454. data/lib/libv8/v8/src/list-inl.h +212 -0
  455. data/lib/libv8/v8/src/list.h +174 -0
  456. data/lib/libv8/v8/src/lithium-allocator-inl.h +142 -0
  457. data/lib/libv8/v8/src/lithium-allocator.cc +2123 -0
  458. data/lib/libv8/v8/src/lithium-allocator.h +630 -0
  459. data/lib/libv8/v8/src/lithium.cc +190 -0
  460. data/lib/libv8/v8/src/lithium.h +597 -0
  461. data/lib/libv8/v8/src/liveedit-debugger.js +1082 -0
  462. data/lib/libv8/v8/src/liveedit.cc +1691 -0
  463. data/lib/libv8/v8/src/liveedit.h +180 -0
  464. data/lib/libv8/v8/src/liveobjectlist-inl.h +126 -0
  465. data/lib/libv8/v8/src/liveobjectlist.cc +2589 -0
  466. data/lib/libv8/v8/src/liveobjectlist.h +322 -0
  467. data/lib/libv8/v8/src/log-inl.h +59 -0
  468. data/lib/libv8/v8/src/log-utils.cc +428 -0
  469. data/lib/libv8/v8/src/log-utils.h +231 -0
  470. data/lib/libv8/v8/src/log.cc +1993 -0
  471. data/lib/libv8/v8/src/log.h +476 -0
  472. data/lib/libv8/v8/src/macro-assembler.h +120 -0
  473. data/lib/libv8/v8/src/macros.py +178 -0
  474. data/lib/libv8/v8/src/mark-compact.cc +3143 -0
  475. data/lib/libv8/v8/src/mark-compact.h +506 -0
  476. data/lib/libv8/v8/src/math.js +264 -0
  477. data/lib/libv8/v8/src/messages.cc +179 -0
  478. data/lib/libv8/v8/src/messages.h +113 -0
  479. data/lib/libv8/v8/src/messages.js +1096 -0
  480. data/lib/libv8/v8/src/mips/assembler-mips-inl.h +312 -0
  481. data/lib/libv8/v8/src/mips/assembler-mips.cc +1960 -0
  482. data/lib/libv8/v8/src/mips/assembler-mips.h +1138 -0
  483. data/lib/libv8/v8/src/mips/builtins-mips.cc +1628 -0
  484. data/lib/libv8/v8/src/mips/code-stubs-mips.cc +6656 -0
  485. data/lib/libv8/v8/src/mips/code-stubs-mips.h +682 -0
  486. data/lib/libv8/v8/src/mips/codegen-mips.cc +52 -0
  487. data/lib/libv8/v8/src/mips/codegen-mips.h +98 -0
  488. data/lib/libv8/v8/src/mips/constants-mips.cc +352 -0
  489. data/lib/libv8/v8/src/mips/constants-mips.h +739 -0
  490. data/lib/libv8/v8/src/mips/cpu-mips.cc +96 -0
  491. data/lib/libv8/v8/src/mips/debug-mips.cc +308 -0
  492. data/lib/libv8/v8/src/mips/deoptimizer-mips.cc +91 -0
  493. data/lib/libv8/v8/src/mips/disasm-mips.cc +1050 -0
  494. data/lib/libv8/v8/src/mips/frames-mips.cc +47 -0
  495. data/lib/libv8/v8/src/mips/frames-mips.h +219 -0
  496. data/lib/libv8/v8/src/mips/full-codegen-mips.cc +4388 -0
  497. data/lib/libv8/v8/src/mips/ic-mips.cc +1580 -0
  498. data/lib/libv8/v8/src/mips/lithium-codegen-mips.h +65 -0
  499. data/lib/libv8/v8/src/mips/lithium-mips.h +307 -0
  500. data/lib/libv8/v8/src/mips/macro-assembler-mips.cc +4056 -0
  501. data/lib/libv8/v8/src/mips/macro-assembler-mips.h +1214 -0
  502. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.cc +1251 -0
  503. data/lib/libv8/v8/src/mips/regexp-macro-assembler-mips.h +252 -0
  504. data/lib/libv8/v8/src/mips/simulator-mips.cc +2621 -0
  505. data/lib/libv8/v8/src/mips/simulator-mips.h +401 -0
  506. data/lib/libv8/v8/src/mips/stub-cache-mips.cc +4285 -0
  507. data/lib/libv8/v8/src/mirror-debugger.js +2382 -0
  508. data/lib/libv8/v8/src/mksnapshot.cc +328 -0
  509. data/lib/libv8/v8/src/natives.h +64 -0
  510. data/lib/libv8/v8/src/objects-debug.cc +738 -0
  511. data/lib/libv8/v8/src/objects-inl.h +4323 -0
  512. data/lib/libv8/v8/src/objects-printer.cc +829 -0
  513. data/lib/libv8/v8/src/objects-visiting.cc +148 -0
  514. data/lib/libv8/v8/src/objects-visiting.h +424 -0
  515. data/lib/libv8/v8/src/objects.cc +10585 -0
  516. data/lib/libv8/v8/src/objects.h +6838 -0
  517. data/lib/libv8/v8/src/parser.cc +4997 -0
  518. data/lib/libv8/v8/src/parser.h +765 -0
  519. data/lib/libv8/v8/src/platform-cygwin.cc +779 -0
  520. data/lib/libv8/v8/src/platform-freebsd.cc +826 -0
  521. data/lib/libv8/v8/src/platform-linux.cc +1149 -0
  522. data/lib/libv8/v8/src/platform-macos.cc +830 -0
  523. data/lib/libv8/v8/src/platform-nullos.cc +479 -0
  524. data/lib/libv8/v8/src/platform-openbsd.cc +640 -0
  525. data/lib/libv8/v8/src/platform-posix.cc +424 -0
  526. data/lib/libv8/v8/src/platform-solaris.cc +762 -0
  527. data/lib/libv8/v8/src/platform-tls-mac.h +62 -0
  528. data/lib/libv8/v8/src/platform-tls-win32.h +62 -0
  529. data/lib/libv8/v8/src/platform-tls.h +50 -0
  530. data/lib/libv8/v8/src/platform-win32.cc +2021 -0
  531. data/lib/libv8/v8/src/platform.h +667 -0
  532. data/lib/libv8/v8/src/preparse-data-format.h +62 -0
  533. data/lib/libv8/v8/src/preparse-data.cc +183 -0
  534. data/lib/libv8/v8/src/preparse-data.h +225 -0
  535. data/lib/libv8/v8/src/preparser-api.cc +220 -0
  536. data/lib/libv8/v8/src/preparser.cc +1450 -0
  537. data/lib/libv8/v8/src/preparser.h +493 -0
  538. data/lib/libv8/v8/src/prettyprinter.cc +1493 -0
  539. data/lib/libv8/v8/src/prettyprinter.h +223 -0
  540. data/lib/libv8/v8/src/profile-generator-inl.h +128 -0
  541. data/lib/libv8/v8/src/profile-generator.cc +3098 -0
  542. data/lib/libv8/v8/src/profile-generator.h +1126 -0
  543. data/lib/libv8/v8/src/property.cc +105 -0
  544. data/lib/libv8/v8/src/property.h +365 -0
  545. data/lib/libv8/v8/src/proxy.js +83 -0
  546. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp-inl.h +78 -0
  547. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.cc +471 -0
  548. data/lib/libv8/v8/src/regexp-macro-assembler-irregexp.h +142 -0
  549. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.cc +373 -0
  550. data/lib/libv8/v8/src/regexp-macro-assembler-tracer.h +104 -0
  551. data/lib/libv8/v8/src/regexp-macro-assembler.cc +267 -0
  552. data/lib/libv8/v8/src/regexp-macro-assembler.h +243 -0
  553. data/lib/libv8/v8/src/regexp-stack.cc +111 -0
  554. data/lib/libv8/v8/src/regexp-stack.h +147 -0
  555. data/lib/libv8/v8/src/regexp.js +483 -0
  556. data/lib/libv8/v8/src/rewriter.cc +360 -0
  557. data/lib/libv8/v8/src/rewriter.h +50 -0
  558. data/lib/libv8/v8/src/runtime-profiler.cc +489 -0
  559. data/lib/libv8/v8/src/runtime-profiler.h +201 -0
  560. data/lib/libv8/v8/src/runtime.cc +12227 -0
  561. data/lib/libv8/v8/src/runtime.h +652 -0
  562. data/lib/libv8/v8/src/runtime.js +649 -0
  563. data/lib/libv8/v8/src/safepoint-table.cc +256 -0
  564. data/lib/libv8/v8/src/safepoint-table.h +270 -0
  565. data/lib/libv8/v8/src/scanner-base.cc +952 -0
  566. data/lib/libv8/v8/src/scanner-base.h +670 -0
  567. data/lib/libv8/v8/src/scanner.cc +345 -0
  568. data/lib/libv8/v8/src/scanner.h +146 -0
  569. data/lib/libv8/v8/src/scopeinfo.cc +646 -0
  570. data/lib/libv8/v8/src/scopeinfo.h +254 -0
  571. data/lib/libv8/v8/src/scopes.cc +1150 -0
  572. data/lib/libv8/v8/src/scopes.h +507 -0
  573. data/lib/libv8/v8/src/serialize.cc +1574 -0
  574. data/lib/libv8/v8/src/serialize.h +589 -0
  575. data/lib/libv8/v8/src/shell.h +55 -0
  576. data/lib/libv8/v8/src/simulator.h +43 -0
  577. data/lib/libv8/v8/src/small-pointer-list.h +163 -0
  578. data/lib/libv8/v8/src/smart-pointer.h +109 -0
  579. data/lib/libv8/v8/src/snapshot-common.cc +83 -0
  580. data/lib/libv8/v8/src/snapshot-empty.cc +54 -0
  581. data/lib/libv8/v8/src/snapshot.h +91 -0
  582. data/lib/libv8/v8/src/spaces-inl.h +529 -0
  583. data/lib/libv8/v8/src/spaces.cc +3145 -0
  584. data/lib/libv8/v8/src/spaces.h +2369 -0
  585. data/lib/libv8/v8/src/splay-tree-inl.h +310 -0
  586. data/lib/libv8/v8/src/splay-tree.h +205 -0
  587. data/lib/libv8/v8/src/string-search.cc +41 -0
  588. data/lib/libv8/v8/src/string-search.h +568 -0
  589. data/lib/libv8/v8/src/string-stream.cc +592 -0
  590. data/lib/libv8/v8/src/string-stream.h +191 -0
  591. data/lib/libv8/v8/src/string.js +994 -0
  592. data/lib/libv8/v8/src/strtod.cc +440 -0
  593. data/lib/libv8/v8/src/strtod.h +40 -0
  594. data/lib/libv8/v8/src/stub-cache.cc +1965 -0
  595. data/lib/libv8/v8/src/stub-cache.h +924 -0
  596. data/lib/libv8/v8/src/third_party/valgrind/valgrind.h +3925 -0
  597. data/lib/libv8/v8/src/token.cc +63 -0
  598. data/lib/libv8/v8/src/token.h +288 -0
  599. data/lib/libv8/v8/src/type-info.cc +507 -0
  600. data/lib/libv8/v8/src/type-info.h +272 -0
  601. data/lib/libv8/v8/src/unbound-queue-inl.h +95 -0
  602. data/lib/libv8/v8/src/unbound-queue.h +69 -0
  603. data/lib/libv8/v8/src/unicode-inl.h +238 -0
  604. data/lib/libv8/v8/src/unicode.cc +1624 -0
  605. data/lib/libv8/v8/src/unicode.h +280 -0
  606. data/lib/libv8/v8/src/uri.js +408 -0
  607. data/lib/libv8/v8/src/utils-inl.h +48 -0
  608. data/lib/libv8/v8/src/utils.cc +371 -0
  609. data/lib/libv8/v8/src/utils.h +800 -0
  610. data/lib/libv8/v8/src/v8-counters.cc +62 -0
  611. data/lib/libv8/v8/src/v8-counters.h +314 -0
  612. data/lib/libv8/v8/src/v8.cc +213 -0
  613. data/lib/libv8/v8/src/v8.h +131 -0
  614. data/lib/libv8/v8/src/v8checks.h +64 -0
  615. data/lib/libv8/v8/src/v8dll-main.cc +44 -0
  616. data/lib/libv8/v8/src/v8globals.h +512 -0
  617. data/lib/libv8/v8/src/v8memory.h +82 -0
  618. data/lib/libv8/v8/src/v8natives.js +1310 -0
  619. data/lib/libv8/v8/src/v8preparserdll-main.cc +39 -0
  620. data/lib/libv8/v8/src/v8threads.cc +464 -0
  621. data/lib/libv8/v8/src/v8threads.h +165 -0
  622. data/lib/libv8/v8/src/v8utils.h +319 -0
  623. data/lib/libv8/v8/src/variables.cc +114 -0
  624. data/lib/libv8/v8/src/variables.h +167 -0
  625. data/lib/libv8/v8/src/version.cc +116 -0
  626. data/lib/libv8/v8/src/version.h +68 -0
  627. data/lib/libv8/v8/src/vm-state-inl.h +138 -0
  628. data/lib/libv8/v8/src/vm-state.h +71 -0
  629. data/lib/libv8/v8/src/win32-headers.h +96 -0
  630. data/lib/libv8/v8/src/x64/assembler-x64-inl.h +462 -0
  631. data/lib/libv8/v8/src/x64/assembler-x64.cc +3027 -0
  632. data/lib/libv8/v8/src/x64/assembler-x64.h +1633 -0
  633. data/lib/libv8/v8/src/x64/builtins-x64.cc +1520 -0
  634. data/lib/libv8/v8/src/x64/code-stubs-x64.cc +5132 -0
  635. data/lib/libv8/v8/src/x64/code-stubs-x64.h +514 -0
  636. data/lib/libv8/v8/src/x64/codegen-x64.cc +146 -0
  637. data/lib/libv8/v8/src/x64/codegen-x64.h +76 -0
  638. data/lib/libv8/v8/src/x64/cpu-x64.cc +88 -0
  639. data/lib/libv8/v8/src/x64/debug-x64.cc +319 -0
  640. data/lib/libv8/v8/src/x64/deoptimizer-x64.cc +815 -0
  641. data/lib/libv8/v8/src/x64/disasm-x64.cc +1832 -0
  642. data/lib/libv8/v8/src/x64/frames-x64.cc +45 -0
  643. data/lib/libv8/v8/src/x64/frames-x64.h +130 -0
  644. data/lib/libv8/v8/src/x64/full-codegen-x64.cc +4318 -0
  645. data/lib/libv8/v8/src/x64/ic-x64.cc +1608 -0
  646. data/lib/libv8/v8/src/x64/lithium-codegen-x64.cc +4267 -0
  647. data/lib/libv8/v8/src/x64/lithium-codegen-x64.h +367 -0
  648. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.cc +320 -0
  649. data/lib/libv8/v8/src/x64/lithium-gap-resolver-x64.h +74 -0
  650. data/lib/libv8/v8/src/x64/lithium-x64.cc +2202 -0
  651. data/lib/libv8/v8/src/x64/lithium-x64.h +2333 -0
  652. data/lib/libv8/v8/src/x64/macro-assembler-x64.cc +3745 -0
  653. data/lib/libv8/v8/src/x64/macro-assembler-x64.h +1290 -0
  654. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.cc +1398 -0
  655. data/lib/libv8/v8/src/x64/regexp-macro-assembler-x64.h +282 -0
  656. data/lib/libv8/v8/src/x64/simulator-x64.cc +27 -0
  657. data/lib/libv8/v8/src/x64/simulator-x64.h +72 -0
  658. data/lib/libv8/v8/src/x64/stub-cache-x64.cc +3610 -0
  659. data/lib/libv8/v8/src/zone-inl.h +140 -0
  660. data/lib/libv8/v8/src/zone.cc +196 -0
  661. data/lib/libv8/v8/src/zone.h +240 -0
  662. data/lib/libv8/v8/tools/codemap.js +265 -0
  663. data/lib/libv8/v8/tools/consarray.js +93 -0
  664. data/lib/libv8/v8/tools/csvparser.js +78 -0
  665. data/lib/libv8/v8/tools/disasm.py +92 -0
  666. data/lib/libv8/v8/tools/freebsd-tick-processor +10 -0
  667. data/lib/libv8/v8/tools/gc-nvp-trace-processor.py +342 -0
  668. data/lib/libv8/v8/tools/gcmole/README +62 -0
  669. data/lib/libv8/v8/tools/gcmole/gccause.lua +60 -0
  670. data/lib/libv8/v8/tools/gcmole/gcmole.cc +1261 -0
  671. data/lib/libv8/v8/tools/gcmole/gcmole.lua +378 -0
  672. data/lib/libv8/v8/tools/generate-ten-powers.scm +286 -0
  673. data/lib/libv8/v8/tools/grokdump.py +841 -0
  674. data/lib/libv8/v8/tools/gyp/v8.gyp +995 -0
  675. data/lib/libv8/v8/tools/js2c.py +364 -0
  676. data/lib/libv8/v8/tools/jsmin.py +280 -0
  677. data/lib/libv8/v8/tools/linux-tick-processor +35 -0
  678. data/lib/libv8/v8/tools/ll_prof.py +942 -0
  679. data/lib/libv8/v8/tools/logreader.js +185 -0
  680. data/lib/libv8/v8/tools/mac-nm +18 -0
  681. data/lib/libv8/v8/tools/mac-tick-processor +6 -0
  682. data/lib/libv8/v8/tools/oom_dump/README +31 -0
  683. data/lib/libv8/v8/tools/oom_dump/SConstruct +42 -0
  684. data/lib/libv8/v8/tools/oom_dump/oom_dump.cc +288 -0
  685. data/lib/libv8/v8/tools/presubmit.py +305 -0
  686. data/lib/libv8/v8/tools/process-heap-prof.py +120 -0
  687. data/lib/libv8/v8/tools/profile.js +751 -0
  688. data/lib/libv8/v8/tools/profile_view.js +219 -0
  689. data/lib/libv8/v8/tools/run-valgrind.py +77 -0
  690. data/lib/libv8/v8/tools/splaytree.js +316 -0
  691. data/lib/libv8/v8/tools/stats-viewer.py +468 -0
  692. data/lib/libv8/v8/tools/test.py +1510 -0
  693. data/lib/libv8/v8/tools/tickprocessor-driver.js +59 -0
  694. data/lib/libv8/v8/tools/tickprocessor.js +877 -0
  695. data/lib/libv8/v8/tools/utils.py +96 -0
  696. data/lib/libv8/v8/tools/visual_studio/README.txt +12 -0
  697. data/lib/libv8/v8/tools/windows-tick-processor.bat +30 -0
  698. data/lib/libv8/version.rb +4 -0
  699. data/libv8.gemspec +31 -0
  700. metadata +800 -0
@@ -0,0 +1,2123 @@
1
+ // Copyright 2010 the V8 project authors. All rights reserved.
2
+ // Redistribution and use in source and binary forms, with or without
3
+ // modification, are permitted provided that the following conditions are
4
+ // met:
5
+ //
6
+ // * Redistributions of source code must retain the above copyright
7
+ // notice, this list of conditions and the following disclaimer.
8
+ // * Redistributions in binary form must reproduce the above
9
+ // copyright notice, this list of conditions and the following
10
+ // disclaimer in the documentation and/or other materials provided
11
+ // with the distribution.
12
+ // * Neither the name of Google Inc. nor the names of its
13
+ // contributors may be used to endorse or promote products derived
14
+ // from this software without specific prior written permission.
15
+ //
16
+ // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
+ // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
+ // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
+ // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
+ // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
+ // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
+ // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
+ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
+ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+
28
+ #include "v8.h"
29
+ #include "lithium-allocator-inl.h"
30
+
31
+ #include "hydrogen.h"
32
+ #include "string-stream.h"
33
+
34
+ #if V8_TARGET_ARCH_IA32
35
+ #include "ia32/lithium-ia32.h"
36
+ #elif V8_TARGET_ARCH_X64
37
+ #include "x64/lithium-x64.h"
38
+ #elif V8_TARGET_ARCH_ARM
39
+ #include "arm/lithium-arm.h"
40
+ #elif V8_TARGET_ARCH_MIPS
41
+ #include "mips/lithium-mips.h"
42
+ #else
43
+ #error "Unknown architecture."
44
+ #endif
45
+
46
+ namespace v8 {
47
+ namespace internal {
48
+
49
+
50
+ #define DEFINE_OPERAND_CACHE(name, type) \
51
+ name name::cache[name::kNumCachedOperands]; \
52
+ void name::SetupCache() { \
53
+ for (int i = 0; i < kNumCachedOperands; i++) { \
54
+ cache[i].ConvertTo(type, i); \
55
+ } \
56
+ } \
57
+ static bool name##_initialize() { \
58
+ name::SetupCache(); \
59
+ return true; \
60
+ } \
61
+ static bool name##_cache_initialized = name##_initialize();
62
+
63
+ DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
64
+ DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
65
+ DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
66
+ DEFINE_OPERAND_CACHE(LRegister, REGISTER)
67
+ DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
68
+
69
+ #undef DEFINE_OPERAND_CACHE
70
+
71
+
72
+ static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
73
+ return a.Value() < b.Value() ? a : b;
74
+ }
75
+
76
+
77
+ static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
78
+ return a.Value() > b.Value() ? a : b;
79
+ }
80
+
81
+
82
+ UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
83
+ : operand_(operand),
84
+ hint_(NULL),
85
+ pos_(pos),
86
+ next_(NULL),
87
+ requires_reg_(false),
88
+ register_beneficial_(true) {
89
+ if (operand_ != NULL && operand_->IsUnallocated()) {
90
+ LUnallocated* unalloc = LUnallocated::cast(operand_);
91
+ requires_reg_ = unalloc->HasRegisterPolicy();
92
+ register_beneficial_ = !unalloc->HasAnyPolicy();
93
+ }
94
+ ASSERT(pos_.IsValid());
95
+ }
96
+
97
+
98
+ bool UsePosition::HasHint() const {
99
+ return hint_ != NULL && !hint_->IsUnallocated();
100
+ }
101
+
102
+
103
+ bool UsePosition::RequiresRegister() const {
104
+ return requires_reg_;
105
+ }
106
+
107
+
108
+ bool UsePosition::RegisterIsBeneficial() const {
109
+ return register_beneficial_;
110
+ }
111
+
112
+
113
+ void UseInterval::SplitAt(LifetimePosition pos) {
114
+ ASSERT(Contains(pos) && pos.Value() != start().Value());
115
+ UseInterval* after = new UseInterval(pos, end_);
116
+ after->next_ = next_;
117
+ next_ = after;
118
+ end_ = pos;
119
+ }
120
+
121
+
122
+ #ifdef DEBUG
123
+
124
+
125
+ void LiveRange::Verify() const {
126
+ UsePosition* cur = first_pos_;
127
+ while (cur != NULL) {
128
+ ASSERT(Start().Value() <= cur->pos().Value() &&
129
+ cur->pos().Value() <= End().Value());
130
+ cur = cur->next();
131
+ }
132
+ }
133
+
134
+
135
+ bool LiveRange::HasOverlap(UseInterval* target) const {
136
+ UseInterval* current_interval = first_interval_;
137
+ while (current_interval != NULL) {
138
+ // Intervals overlap if the start of one is contained in the other.
139
+ if (current_interval->Contains(target->start()) ||
140
+ target->Contains(current_interval->start())) {
141
+ return true;
142
+ }
143
+ current_interval = current_interval->next();
144
+ }
145
+ return false;
146
+ }
147
+
148
+
149
+ #endif
150
+
151
+
152
+ LiveRange::LiveRange(int id)
153
+ : id_(id),
154
+ spilled_(false),
155
+ assigned_register_(kInvalidAssignment),
156
+ assigned_register_kind_(NONE),
157
+ last_interval_(NULL),
158
+ first_interval_(NULL),
159
+ first_pos_(NULL),
160
+ parent_(NULL),
161
+ next_(NULL),
162
+ current_interval_(NULL),
163
+ last_processed_use_(NULL),
164
+ spill_start_index_(kMaxInt) {
165
+ spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
166
+ }
167
+
168
+
169
+ void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) {
170
+ ASSERT(!HasRegisterAssigned() && !IsSpilled());
171
+ assigned_register_ = reg;
172
+ assigned_register_kind_ = register_kind;
173
+ ConvertOperands();
174
+ }
175
+
176
+
177
+ void LiveRange::MakeSpilled() {
178
+ ASSERT(!IsSpilled());
179
+ ASSERT(TopLevel()->HasAllocatedSpillOperand());
180
+ spilled_ = true;
181
+ assigned_register_ = kInvalidAssignment;
182
+ ConvertOperands();
183
+ }
184
+
185
+
186
+ bool LiveRange::HasAllocatedSpillOperand() const {
187
+ return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
188
+ }
189
+
190
+
191
+ void LiveRange::SetSpillOperand(LOperand* operand) {
192
+ ASSERT(!operand->IsUnallocated());
193
+ ASSERT(spill_operand_ != NULL);
194
+ ASSERT(spill_operand_->IsUnallocated());
195
+ spill_operand_->ConvertTo(operand->kind(), operand->index());
196
+ }
197
+
198
+
199
+ UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
200
+ UsePosition* use_pos = last_processed_use_;
201
+ if (use_pos == NULL) use_pos = first_pos();
202
+ while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
203
+ use_pos = use_pos->next();
204
+ }
205
+ last_processed_use_ = use_pos;
206
+ return use_pos;
207
+ }
208
+
209
+
210
+ UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
211
+ LifetimePosition start) {
212
+ UsePosition* pos = NextUsePosition(start);
213
+ while (pos != NULL && !pos->RegisterIsBeneficial()) {
214
+ pos = pos->next();
215
+ }
216
+ return pos;
217
+ }
218
+
219
+
220
+ UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
221
+ UsePosition* pos = NextUsePosition(start);
222
+ while (pos != NULL && !pos->RequiresRegister()) {
223
+ pos = pos->next();
224
+ }
225
+ return pos;
226
+ }
227
+
228
+
229
+ bool LiveRange::CanBeSpilled(LifetimePosition pos) {
230
+ // TODO(kmillikin): Comment. Now.
231
+ if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
232
+
233
+ // We cannot spill a live range that has a use requiring a register
234
+ // at the current or the immediate next position.
235
+ UsePosition* use_pos = NextRegisterPosition(pos);
236
+ if (use_pos == NULL) return true;
237
+ return use_pos->pos().Value() > pos.NextInstruction().Value();
238
+ }
239
+
240
+
241
+ UsePosition* LiveRange::FirstPosWithHint() const {
242
+ UsePosition* pos = first_pos_;
243
+ while (pos != NULL && !pos->HasHint()) pos = pos->next();
244
+ return pos;
245
+ }
246
+
247
+
248
+ LOperand* LiveRange::CreateAssignedOperand() {
249
+ LOperand* op = NULL;
250
+ if (HasRegisterAssigned()) {
251
+ ASSERT(!IsSpilled());
252
+ if (IsDouble()) {
253
+ op = LDoubleRegister::Create(assigned_register());
254
+ } else {
255
+ op = LRegister::Create(assigned_register());
256
+ }
257
+ } else if (IsSpilled()) {
258
+ ASSERT(!HasRegisterAssigned());
259
+ op = TopLevel()->GetSpillOperand();
260
+ ASSERT(!op->IsUnallocated());
261
+ } else {
262
+ LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
263
+ unalloc->set_virtual_register(id_);
264
+ op = unalloc;
265
+ }
266
+ return op;
267
+ }
268
+
269
+
270
+ UseInterval* LiveRange::FirstSearchIntervalForPosition(
271
+ LifetimePosition position) const {
272
+ if (current_interval_ == NULL) return first_interval_;
273
+ if (current_interval_->start().Value() > position.Value()) {
274
+ current_interval_ = NULL;
275
+ return first_interval_;
276
+ }
277
+ return current_interval_;
278
+ }
279
+
280
+
281
+ void LiveRange::AdvanceLastProcessedMarker(
282
+ UseInterval* to_start_of, LifetimePosition but_not_past) const {
283
+ if (to_start_of == NULL) return;
284
+ if (to_start_of->start().Value() > but_not_past.Value()) return;
285
+ LifetimePosition start =
286
+ current_interval_ == NULL ? LifetimePosition::Invalid()
287
+ : current_interval_->start();
288
+ if (to_start_of->start().Value() > start.Value()) {
289
+ current_interval_ = to_start_of;
290
+ }
291
+ }
292
+
293
+
294
+ void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
295
+ ASSERT(Start().Value() < position.Value());
296
+ ASSERT(result->IsEmpty());
297
+ // Find the last interval that ends before the position. If the
298
+ // position is contained in one of the intervals in the chain, we
299
+ // split that interval and use the first part.
300
+ UseInterval* current = FirstSearchIntervalForPosition(position);
301
+
302
+ // If the split position coincides with the beginning of a use interval
303
+ // we need to split use positons in a special way.
304
+ bool split_at_start = false;
305
+
306
+ while (current != NULL) {
307
+ if (current->Contains(position)) {
308
+ current->SplitAt(position);
309
+ break;
310
+ }
311
+ UseInterval* next = current->next();
312
+ if (next->start().Value() >= position.Value()) {
313
+ split_at_start = (next->start().Value() == position.Value());
314
+ break;
315
+ }
316
+ current = next;
317
+ }
318
+
319
+ // Partition original use intervals to the two live ranges.
320
+ UseInterval* before = current;
321
+ UseInterval* after = before->next();
322
+ result->last_interval_ = (last_interval_ == before)
323
+ ? after // Only interval in the range after split.
324
+ : last_interval_; // Last interval of the original range.
325
+ result->first_interval_ = after;
326
+ last_interval_ = before;
327
+
328
+ // Find the last use position before the split and the first use
329
+ // position after it.
330
+ UsePosition* use_after = first_pos_;
331
+ UsePosition* use_before = NULL;
332
+ if (split_at_start) {
333
+ // The split position coincides with the beginning of a use interval (the
334
+ // end of a lifetime hole). Use at this position should be attributed to
335
+ // the split child because split child owns use interval covering it.
336
+ while (use_after != NULL && use_after->pos().Value() < position.Value()) {
337
+ use_before = use_after;
338
+ use_after = use_after->next();
339
+ }
340
+ } else {
341
+ while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
342
+ use_before = use_after;
343
+ use_after = use_after->next();
344
+ }
345
+ }
346
+
347
+ // Partition original use positions to the two live ranges.
348
+ if (use_before != NULL) {
349
+ use_before->next_ = NULL;
350
+ } else {
351
+ first_pos_ = NULL;
352
+ }
353
+ result->first_pos_ = use_after;
354
+
355
+ // Link the new live range in the chain before any of the other
356
+ // ranges linked from the range before the split.
357
+ result->parent_ = (parent_ == NULL) ? this : parent_;
358
+ result->next_ = next_;
359
+ next_ = result;
360
+
361
+ #ifdef DEBUG
362
+ Verify();
363
+ result->Verify();
364
+ #endif
365
+ }
366
+
367
+
368
+ // This implements an ordering on live ranges so that they are ordered by their
369
+ // start positions. This is needed for the correctness of the register
370
+ // allocation algorithm. If two live ranges start at the same offset then there
371
+ // is a tie breaker based on where the value is first used. This part of the
372
+ // ordering is merely a heuristic.
373
+ bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
374
+ LifetimePosition start = Start();
375
+ LifetimePosition other_start = other->Start();
376
+ if (start.Value() == other_start.Value()) {
377
+ UsePosition* pos = FirstPosWithHint();
378
+ if (pos == NULL) return false;
379
+ UsePosition* other_pos = other->first_pos();
380
+ if (other_pos == NULL) return true;
381
+ return pos->pos().Value() < other_pos->pos().Value();
382
+ }
383
+ return start.Value() < other_start.Value();
384
+ }
385
+
386
+
387
+ void LiveRange::ShortenTo(LifetimePosition start) {
388
+ LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
389
+ ASSERT(first_interval_ != NULL);
390
+ ASSERT(first_interval_->start().Value() <= start.Value());
391
+ ASSERT(start.Value() < first_interval_->end().Value());
392
+ first_interval_->set_start(start);
393
+ }
394
+
395
+
396
+ void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
397
+ LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
398
+ id_,
399
+ start.Value(),
400
+ end.Value());
401
+ LifetimePosition new_end = end;
402
+ while (first_interval_ != NULL &&
403
+ first_interval_->start().Value() <= end.Value()) {
404
+ if (first_interval_->end().Value() > end.Value()) {
405
+ new_end = first_interval_->end();
406
+ }
407
+ first_interval_ = first_interval_->next();
408
+ }
409
+
410
+ UseInterval* new_interval = new UseInterval(start, new_end);
411
+ new_interval->next_ = first_interval_;
412
+ first_interval_ = new_interval;
413
+ if (new_interval->next() == NULL) {
414
+ last_interval_ = new_interval;
415
+ }
416
+ }
417
+
418
+
419
+ void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
420
+ LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
421
+ id_,
422
+ start.Value(),
423
+ end.Value());
424
+ if (first_interval_ == NULL) {
425
+ UseInterval* interval = new UseInterval(start, end);
426
+ first_interval_ = interval;
427
+ last_interval_ = interval;
428
+ } else {
429
+ if (end.Value() == first_interval_->start().Value()) {
430
+ first_interval_->set_start(start);
431
+ } else if (end.Value() < first_interval_->start().Value()) {
432
+ UseInterval* interval = new UseInterval(start, end);
433
+ interval->set_next(first_interval_);
434
+ first_interval_ = interval;
435
+ } else {
436
+ // Order of instruction's processing (see ProcessInstructions) guarantees
437
+ // that each new use interval either precedes or intersects with
438
+ // last added interval.
439
+ ASSERT(start.Value() < first_interval_->end().Value());
440
+ first_interval_->start_ = Min(start, first_interval_->start_);
441
+ first_interval_->end_ = Max(end, first_interval_->end_);
442
+ }
443
+ }
444
+ }
445
+
446
+
447
+ UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
448
+ LOperand* operand) {
449
+ LAllocator::TraceAlloc("Add to live range %d use position %d\n",
450
+ id_,
451
+ pos.Value());
452
+ UsePosition* use_pos = new UsePosition(pos, operand);
453
+ UsePosition* prev = NULL;
454
+ UsePosition* current = first_pos_;
455
+ while (current != NULL && current->pos().Value() < pos.Value()) {
456
+ prev = current;
457
+ current = current->next();
458
+ }
459
+
460
+ if (prev == NULL) {
461
+ use_pos->set_next(first_pos_);
462
+ first_pos_ = use_pos;
463
+ } else {
464
+ use_pos->next_ = prev->next_;
465
+ prev->next_ = use_pos;
466
+ }
467
+
468
+ return use_pos;
469
+ }
470
+
471
+
472
+ void LiveRange::ConvertOperands() {
473
+ LOperand* op = CreateAssignedOperand();
474
+ UsePosition* use_pos = first_pos();
475
+ while (use_pos != NULL) {
476
+ ASSERT(Start().Value() <= use_pos->pos().Value() &&
477
+ use_pos->pos().Value() <= End().Value());
478
+
479
+ if (use_pos->HasOperand()) {
480
+ ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
481
+ !use_pos->RequiresRegister());
482
+ use_pos->operand()->ConvertTo(op->kind(), op->index());
483
+ }
484
+ use_pos = use_pos->next();
485
+ }
486
+ }
487
+
488
+
489
+ bool LiveRange::CanCover(LifetimePosition position) const {
490
+ if (IsEmpty()) return false;
491
+ return Start().Value() <= position.Value() &&
492
+ position.Value() < End().Value();
493
+ }
494
+
495
+
496
+ bool LiveRange::Covers(LifetimePosition position) {
497
+ if (!CanCover(position)) return false;
498
+ UseInterval* start_search = FirstSearchIntervalForPosition(position);
499
+ for (UseInterval* interval = start_search;
500
+ interval != NULL;
501
+ interval = interval->next()) {
502
+ ASSERT(interval->next() == NULL ||
503
+ interval->next()->start().Value() >= interval->start().Value());
504
+ AdvanceLastProcessedMarker(interval, position);
505
+ if (interval->Contains(position)) return true;
506
+ if (interval->start().Value() > position.Value()) return false;
507
+ }
508
+ return false;
509
+ }
510
+
511
+
512
+ LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
513
+ UseInterval* b = other->first_interval();
514
+ if (b == NULL) return LifetimePosition::Invalid();
515
+ LifetimePosition advance_last_processed_up_to = b->start();
516
+ UseInterval* a = FirstSearchIntervalForPosition(b->start());
517
+ while (a != NULL && b != NULL) {
518
+ if (a->start().Value() > other->End().Value()) break;
519
+ if (b->start().Value() > End().Value()) break;
520
+ LifetimePosition cur_intersection = a->Intersect(b);
521
+ if (cur_intersection.IsValid()) {
522
+ return cur_intersection;
523
+ }
524
+ if (a->start().Value() < b->start().Value()) {
525
+ a = a->next();
526
+ if (a == NULL || a->start().Value() > other->End().Value()) break;
527
+ AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
528
+ } else {
529
+ b = b->next();
530
+ }
531
+ }
532
+ return LifetimePosition::Invalid();
533
+ }
534
+
535
+
536
+ LAllocator::LAllocator(int num_values, HGraph* graph)
537
+ : chunk_(NULL),
538
+ live_in_sets_(graph->blocks()->length()),
539
+ live_ranges_(num_values * 2),
540
+ fixed_live_ranges_(NULL),
541
+ fixed_double_live_ranges_(NULL),
542
+ unhandled_live_ranges_(num_values * 2),
543
+ active_live_ranges_(8),
544
+ inactive_live_ranges_(8),
545
+ reusable_slots_(8),
546
+ next_virtual_register_(num_values),
547
+ first_artificial_register_(num_values),
548
+ mode_(NONE),
549
+ num_registers_(-1),
550
+ graph_(graph),
551
+ has_osr_entry_(false) {}
552
+
553
+
554
+ void LAllocator::InitializeLivenessAnalysis() {
555
+ // Initialize the live_in sets for each block to NULL.
556
+ int block_count = graph_->blocks()->length();
557
+ live_in_sets_.Initialize(block_count);
558
+ live_in_sets_.AddBlock(NULL, block_count);
559
+ }
560
+
561
+
562
+ BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
563
+ // Compute live out for the given block, except not including backward
564
+ // successor edges.
565
+ BitVector* live_out = new BitVector(next_virtual_register_);
566
+
567
+ // Process all successor blocks.
568
+ HBasicBlock* successor = block->end()->FirstSuccessor();
569
+ while (successor != NULL) {
570
+ // Add values live on entry to the successor. Note the successor's
571
+ // live_in will not be computed yet for backwards edges.
572
+ BitVector* live_in = live_in_sets_[successor->block_id()];
573
+ if (live_in != NULL) live_out->Union(*live_in);
574
+
575
+ // All phi input operands corresponding to this successor edge are live
576
+ // out from this block.
577
+ int index = successor->PredecessorIndexOf(block);
578
+ const ZoneList<HPhi*>* phis = successor->phis();
579
+ for (int i = 0; i < phis->length(); ++i) {
580
+ HPhi* phi = phis->at(i);
581
+ if (!phi->OperandAt(index)->IsConstant()) {
582
+ live_out->Add(phi->OperandAt(index)->id());
583
+ }
584
+ }
585
+
586
+ // Check if we are done with second successor.
587
+ if (successor == block->end()->SecondSuccessor()) break;
588
+
589
+ successor = block->end()->SecondSuccessor();
590
+ }
591
+
592
+ return live_out;
593
+ }
594
+
595
+
596
+ void LAllocator::AddInitialIntervals(HBasicBlock* block,
597
+ BitVector* live_out) {
598
+ // Add an interval that includes the entire block to the live range for
599
+ // each live_out value.
600
+ LifetimePosition start = LifetimePosition::FromInstructionIndex(
601
+ block->first_instruction_index());
602
+ LifetimePosition end = LifetimePosition::FromInstructionIndex(
603
+ block->last_instruction_index()).NextInstruction();
604
+ BitVector::Iterator iterator(live_out);
605
+ while (!iterator.Done()) {
606
+ int operand_index = iterator.Current();
607
+ LiveRange* range = LiveRangeFor(operand_index);
608
+ range->AddUseInterval(start, end);
609
+ iterator.Advance();
610
+ }
611
+ }
612
+
613
+
614
+ int LAllocator::FixedDoubleLiveRangeID(int index) {
615
+ return -index - 1 - Register::kNumAllocatableRegisters;
616
+ }
617
+
618
+
619
+ LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
620
+ int pos,
621
+ bool is_tagged) {
622
+ TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
623
+ ASSERT(operand->HasFixedPolicy());
624
+ if (operand->policy() == LUnallocated::FIXED_SLOT) {
625
+ operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
626
+ } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
627
+ int reg_index = operand->fixed_index();
628
+ operand->ConvertTo(LOperand::REGISTER, reg_index);
629
+ } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
630
+ int reg_index = operand->fixed_index();
631
+ operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
632
+ } else {
633
+ UNREACHABLE();
634
+ }
635
+ if (is_tagged) {
636
+ TraceAlloc("Fixed reg is tagged at %d\n", pos);
637
+ LInstruction* instr = InstructionAt(pos);
638
+ if (instr->HasPointerMap()) {
639
+ instr->pointer_map()->RecordPointer(operand);
640
+ }
641
+ }
642
+ return operand;
643
+ }
644
+
645
+
646
+ LiveRange* LAllocator::FixedLiveRangeFor(int index) {
647
+ ASSERT(index < Register::kNumAllocatableRegisters);
648
+ LiveRange* result = fixed_live_ranges_[index];
649
+ if (result == NULL) {
650
+ result = new LiveRange(FixedLiveRangeID(index));
651
+ ASSERT(result->IsFixed());
652
+ result->set_assigned_register(index, GENERAL_REGISTERS);
653
+ fixed_live_ranges_[index] = result;
654
+ }
655
+ return result;
656
+ }
657
+
658
+
659
+ LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
660
+ ASSERT(index < DoubleRegister::kNumAllocatableRegisters);
661
+ LiveRange* result = fixed_double_live_ranges_[index];
662
+ if (result == NULL) {
663
+ result = new LiveRange(FixedDoubleLiveRangeID(index));
664
+ ASSERT(result->IsFixed());
665
+ result->set_assigned_register(index, DOUBLE_REGISTERS);
666
+ fixed_double_live_ranges_[index] = result;
667
+ }
668
+ return result;
669
+ }
670
+
671
+
672
+ LiveRange* LAllocator::LiveRangeFor(int index) {
673
+ if (index >= live_ranges_.length()) {
674
+ live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
675
+ }
676
+ LiveRange* result = live_ranges_[index];
677
+ if (result == NULL) {
678
+ result = new LiveRange(index);
679
+ live_ranges_[index] = result;
680
+ }
681
+ return result;
682
+ }
683
+
684
+
685
+ LGap* LAllocator::GetLastGap(HBasicBlock* block) {
686
+ int last_instruction = block->last_instruction_index();
687
+ int index = chunk_->NearestGapPos(last_instruction);
688
+ return GapAt(index);
689
+ }
690
+
691
+
692
+ HPhi* LAllocator::LookupPhi(LOperand* operand) const {
693
+ if (!operand->IsUnallocated()) return NULL;
694
+ int index = operand->VirtualRegister();
695
+ HValue* instr = graph_->LookupValue(index);
696
+ if (instr != NULL && instr->IsPhi()) {
697
+ return HPhi::cast(instr);
698
+ }
699
+ return NULL;
700
+ }
701
+
702
+
703
+ LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
704
+ if (operand->IsUnallocated()) {
705
+ return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
706
+ } else if (operand->IsRegister()) {
707
+ return FixedLiveRangeFor(operand->index());
708
+ } else if (operand->IsDoubleRegister()) {
709
+ return FixedDoubleLiveRangeFor(operand->index());
710
+ } else {
711
+ return NULL;
712
+ }
713
+ }
714
+
715
+
716
+ void LAllocator::Define(LifetimePosition position,
717
+ LOperand* operand,
718
+ LOperand* hint) {
719
+ LiveRange* range = LiveRangeFor(operand);
720
+ if (range == NULL) return;
721
+
722
+ if (range->IsEmpty() || range->Start().Value() > position.Value()) {
723
+ // Can happen if there is a definition without use.
724
+ range->AddUseInterval(position, position.NextInstruction());
725
+ range->AddUsePosition(position.NextInstruction(), NULL);
726
+ } else {
727
+ range->ShortenTo(position);
728
+ }
729
+
730
+ if (operand->IsUnallocated()) {
731
+ LUnallocated* unalloc_operand = LUnallocated::cast(operand);
732
+ range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
733
+ }
734
+ }
735
+
736
+
737
+ void LAllocator::Use(LifetimePosition block_start,
738
+ LifetimePosition position,
739
+ LOperand* operand,
740
+ LOperand* hint) {
741
+ LiveRange* range = LiveRangeFor(operand);
742
+ if (range == NULL) return;
743
+ if (operand->IsUnallocated()) {
744
+ LUnallocated* unalloc_operand = LUnallocated::cast(operand);
745
+ range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
746
+ }
747
+ range->AddUseInterval(block_start, position);
748
+ }
749
+
750
+
751
+ void LAllocator::AddConstraintsGapMove(int index,
752
+ LOperand* from,
753
+ LOperand* to) {
754
+ LGap* gap = GapAt(index);
755
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
756
+ if (from->IsUnallocated()) {
757
+ const ZoneList<LMoveOperands>* move_operands = move->move_operands();
758
+ for (int i = 0; i < move_operands->length(); ++i) {
759
+ LMoveOperands cur = move_operands->at(i);
760
+ LOperand* cur_to = cur.destination();
761
+ if (cur_to->IsUnallocated()) {
762
+ if (cur_to->VirtualRegister() == from->VirtualRegister()) {
763
+ move->AddMove(cur.source(), to);
764
+ return;
765
+ }
766
+ }
767
+ }
768
+ }
769
+ move->AddMove(from, to);
770
+ }
771
+
772
+
773
+ void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
774
+ int start = block->first_instruction_index();
775
+ int end = block->last_instruction_index();
776
+ for (int i = start; i <= end; ++i) {
777
+ if (IsGapAt(i)) {
778
+ LInstruction* instr = NULL;
779
+ LInstruction* prev_instr = NULL;
780
+ if (i < end) instr = InstructionAt(i + 1);
781
+ if (i > start) prev_instr = InstructionAt(i - 1);
782
+ MeetConstraintsBetween(prev_instr, instr, i);
783
+ }
784
+ }
785
+ }
786
+
787
+
788
+ void LAllocator::MeetConstraintsBetween(LInstruction* first,
789
+ LInstruction* second,
790
+ int gap_index) {
791
+ // Handle fixed temporaries.
792
+ if (first != NULL) {
793
+ for (TempIterator it(first); it.HasNext(); it.Advance()) {
794
+ LUnallocated* temp = LUnallocated::cast(it.Next());
795
+ if (temp->HasFixedPolicy()) {
796
+ AllocateFixed(temp, gap_index - 1, false);
797
+ }
798
+ }
799
+ }
800
+
801
+ // Handle fixed output operand.
802
+ if (first != NULL && first->Output() != NULL) {
803
+ LUnallocated* first_output = LUnallocated::cast(first->Output());
804
+ LiveRange* range = LiveRangeFor(first_output->VirtualRegister());
805
+ bool assigned = false;
806
+ if (first_output->HasFixedPolicy()) {
807
+ LUnallocated* output_copy = first_output->CopyUnconstrained();
808
+ bool is_tagged = HasTaggedValue(first_output->VirtualRegister());
809
+ AllocateFixed(first_output, gap_index, is_tagged);
810
+
811
+ // This value is produced on the stack, we never need to spill it.
812
+ if (first_output->IsStackSlot()) {
813
+ range->SetSpillOperand(first_output);
814
+ range->SetSpillStartIndex(gap_index - 1);
815
+ assigned = true;
816
+ }
817
+ chunk_->AddGapMove(gap_index, first_output, output_copy);
818
+ }
819
+
820
+ if (!assigned) {
821
+ range->SetSpillStartIndex(gap_index);
822
+
823
+ // This move to spill operand is not a real use. Liveness analysis
824
+ // and splitting of live ranges do not account for it.
825
+ // Thus it should be inserted to a lifetime position corresponding to
826
+ // the instruction end.
827
+ LGap* gap = GapAt(gap_index);
828
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
829
+ move->AddMove(first_output, range->GetSpillOperand());
830
+ }
831
+ }
832
+
833
+ // Handle fixed input operands of second instruction.
834
+ if (second != NULL) {
835
+ for (UseIterator it(second); it.HasNext(); it.Advance()) {
836
+ LUnallocated* cur_input = LUnallocated::cast(it.Next());
837
+ if (cur_input->HasFixedPolicy()) {
838
+ LUnallocated* input_copy = cur_input->CopyUnconstrained();
839
+ bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
840
+ AllocateFixed(cur_input, gap_index + 1, is_tagged);
841
+ AddConstraintsGapMove(gap_index, input_copy, cur_input);
842
+ } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
843
+ // The live range of writable input registers always goes until the end
844
+ // of the instruction.
845
+ ASSERT(!cur_input->IsUsedAtStart());
846
+
847
+ LUnallocated* input_copy = cur_input->CopyUnconstrained();
848
+ cur_input->set_virtual_register(next_virtual_register_++);
849
+
850
+ if (RequiredRegisterKind(input_copy->virtual_register()) ==
851
+ DOUBLE_REGISTERS) {
852
+ double_artificial_registers_.Add(
853
+ cur_input->virtual_register() - first_artificial_register_);
854
+ }
855
+
856
+ AddConstraintsGapMove(gap_index, input_copy, cur_input);
857
+ }
858
+ }
859
+ }
860
+
861
+ // Handle "output same as input" for second instruction.
862
+ if (second != NULL && second->Output() != NULL) {
863
+ LUnallocated* second_output = LUnallocated::cast(second->Output());
864
+ if (second_output->HasSameAsInputPolicy()) {
865
+ LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
866
+ int output_vreg = second_output->VirtualRegister();
867
+ int input_vreg = cur_input->VirtualRegister();
868
+
869
+ LUnallocated* input_copy = cur_input->CopyUnconstrained();
870
+ cur_input->set_virtual_register(second_output->virtual_register());
871
+ AddConstraintsGapMove(gap_index, input_copy, cur_input);
872
+
873
+ if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
874
+ int index = gap_index + 1;
875
+ LInstruction* instr = InstructionAt(index);
876
+ if (instr->HasPointerMap()) {
877
+ instr->pointer_map()->RecordPointer(input_copy);
878
+ }
879
+ } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
880
+ // The input is assumed to immediately have a tagged representation,
881
+ // before the pointer map can be used. I.e. the pointer map at the
882
+ // instruction will include the output operand (whose value at the
883
+ // beginning of the instruction is equal to the input operand). If
884
+ // this is not desired, then the pointer map at this instruction needs
885
+ // to be adjusted manually.
886
+ }
887
+ }
888
+ }
889
+ }
890
+
891
+
892
+ void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
893
+ int block_start = block->first_instruction_index();
894
+ int index = block->last_instruction_index();
895
+
896
+ LifetimePosition block_start_position =
897
+ LifetimePosition::FromInstructionIndex(block_start);
898
+
899
+ while (index >= block_start) {
900
+ LifetimePosition curr_position =
901
+ LifetimePosition::FromInstructionIndex(index);
902
+
903
+ if (IsGapAt(index)) {
904
+ // We have a gap at this position.
905
+ LGap* gap = GapAt(index);
906
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
907
+ const ZoneList<LMoveOperands>* move_operands = move->move_operands();
908
+ for (int i = 0; i < move_operands->length(); ++i) {
909
+ LMoveOperands* cur = &move_operands->at(i);
910
+ if (cur->IsIgnored()) continue;
911
+ LOperand* from = cur->source();
912
+ LOperand* to = cur->destination();
913
+ HPhi* phi = LookupPhi(to);
914
+ LOperand* hint = to;
915
+ if (phi != NULL) {
916
+ // This is a phi resolving move.
917
+ if (!phi->block()->IsLoopHeader()) {
918
+ hint = LiveRangeFor(phi->id())->FirstHint();
919
+ }
920
+ } else {
921
+ if (to->IsUnallocated()) {
922
+ if (live->Contains(to->VirtualRegister())) {
923
+ Define(curr_position, to, from);
924
+ live->Remove(to->VirtualRegister());
925
+ } else {
926
+ cur->Eliminate();
927
+ continue;
928
+ }
929
+ } else {
930
+ Define(curr_position, to, from);
931
+ }
932
+ }
933
+ Use(block_start_position, curr_position, from, hint);
934
+ if (from->IsUnallocated()) {
935
+ live->Add(from->VirtualRegister());
936
+ }
937
+ }
938
+ } else {
939
+ ASSERT(!IsGapAt(index));
940
+ LInstruction* instr = InstructionAt(index);
941
+
942
+ if (instr != NULL) {
943
+ LOperand* output = instr->Output();
944
+ if (output != NULL) {
945
+ if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
946
+ Define(curr_position, output, NULL);
947
+ }
948
+
949
+ if (instr->IsMarkedAsCall()) {
950
+ for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
951
+ if (output == NULL || !output->IsRegister() ||
952
+ output->index() != i) {
953
+ LiveRange* range = FixedLiveRangeFor(i);
954
+ range->AddUseInterval(curr_position,
955
+ curr_position.InstructionEnd());
956
+ }
957
+ }
958
+ }
959
+
960
+ if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) {
961
+ for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
962
+ if (output == NULL || !output->IsDoubleRegister() ||
963
+ output->index() != i) {
964
+ LiveRange* range = FixedDoubleLiveRangeFor(i);
965
+ range->AddUseInterval(curr_position,
966
+ curr_position.InstructionEnd());
967
+ }
968
+ }
969
+ }
970
+
971
+ for (UseIterator it(instr); it.HasNext(); it.Advance()) {
972
+ LOperand* input = it.Next();
973
+
974
+ LifetimePosition use_pos;
975
+ if (input->IsUnallocated() &&
976
+ LUnallocated::cast(input)->IsUsedAtStart()) {
977
+ use_pos = curr_position;
978
+ } else {
979
+ use_pos = curr_position.InstructionEnd();
980
+ }
981
+
982
+ Use(block_start_position, use_pos, input, NULL);
983
+ if (input->IsUnallocated()) live->Add(input->VirtualRegister());
984
+ }
985
+
986
+ for (TempIterator it(instr); it.HasNext(); it.Advance()) {
987
+ LOperand* temp = it.Next();
988
+ if (instr->IsMarkedAsCall()) {
989
+ if (temp->IsRegister()) continue;
990
+ if (temp->IsUnallocated()) {
991
+ LUnallocated* temp_unalloc = LUnallocated::cast(temp);
992
+ if (temp_unalloc->HasFixedPolicy()) {
993
+ continue;
994
+ }
995
+ }
996
+ }
997
+ Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
998
+ Define(curr_position, temp, NULL);
999
+ }
1000
+ }
1001
+ }
1002
+
1003
+ index = index - 1;
1004
+ }
1005
+ }
1006
+
1007
+
1008
+ void LAllocator::ResolvePhis(HBasicBlock* block) {
1009
+ const ZoneList<HPhi*>* phis = block->phis();
1010
+ for (int i = 0; i < phis->length(); ++i) {
1011
+ HPhi* phi = phis->at(i);
1012
+ LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE);
1013
+ phi_operand->set_virtual_register(phi->id());
1014
+ for (int j = 0; j < phi->OperandCount(); ++j) {
1015
+ HValue* op = phi->OperandAt(j);
1016
+ LOperand* operand = NULL;
1017
+ if (op->IsConstant() && op->EmitAtUses()) {
1018
+ HConstant* constant = HConstant::cast(op);
1019
+ operand = chunk_->DefineConstantOperand(constant);
1020
+ } else {
1021
+ ASSERT(!op->EmitAtUses());
1022
+ LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
1023
+ unalloc->set_virtual_register(op->id());
1024
+ operand = unalloc;
1025
+ }
1026
+ HBasicBlock* cur_block = block->predecessors()->at(j);
1027
+ // The gap move must be added without any special processing as in
1028
+ // the AddConstraintsGapMove.
1029
+ chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1030
+ operand,
1031
+ phi_operand);
1032
+
1033
+ // We are going to insert a move before the branch instruction.
1034
+ // Some branch instructions (e.g. loops' back edges)
1035
+ // can potentially cause a GC so they have a pointer map.
1036
+ // By inserting a move we essentially create a copy of a
1037
+ // value which is invisible to PopulatePointerMaps(), because we store
1038
+ // it into a location different from the operand of a live range
1039
+ // covering a branch instruction.
1040
+ // Thus we need to manually record a pointer.
1041
+ if (phi->representation().IsTagged()) {
1042
+ LInstruction* branch =
1043
+ InstructionAt(cur_block->last_instruction_index());
1044
+ if (branch->HasPointerMap()) {
1045
+ branch->pointer_map()->RecordPointer(phi_operand);
1046
+ }
1047
+ }
1048
+ }
1049
+
1050
+ LiveRange* live_range = LiveRangeFor(phi->id());
1051
+ LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1052
+ label->GetOrCreateParallelMove(LGap::START)->
1053
+ AddMove(phi_operand, live_range->GetSpillOperand());
1054
+ live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1055
+ }
1056
+ }
1057
+
1058
+
1059
+ void LAllocator::Allocate(LChunk* chunk) {
1060
+ ASSERT(chunk_ == NULL);
1061
+ chunk_ = chunk;
1062
+ MeetRegisterConstraints();
1063
+ ResolvePhis();
1064
+ BuildLiveRanges();
1065
+ AllocateGeneralRegisters();
1066
+ AllocateDoubleRegisters();
1067
+ PopulatePointerMaps();
1068
+ if (has_osr_entry_) ProcessOsrEntry();
1069
+ ConnectRanges();
1070
+ ResolveControlFlow();
1071
+ }
1072
+
1073
+
1074
+ void LAllocator::MeetRegisterConstraints() {
1075
+ HPhase phase("Register constraints", chunk_);
1076
+ first_artificial_register_ = next_virtual_register_;
1077
+ const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1078
+ for (int i = 0; i < blocks->length(); ++i) {
1079
+ HBasicBlock* block = blocks->at(i);
1080
+ MeetRegisterConstraints(block);
1081
+ }
1082
+ }
1083
+
1084
+
1085
+ void LAllocator::ResolvePhis() {
1086
+ HPhase phase("Resolve phis", chunk_);
1087
+
1088
+ // Process the blocks in reverse order.
1089
+ const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1090
+ for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1091
+ HBasicBlock* block = blocks->at(block_id);
1092
+ ResolvePhis(block);
1093
+ }
1094
+ }
1095
+
1096
+
1097
+ void LAllocator::ResolveControlFlow(LiveRange* range,
1098
+ HBasicBlock* block,
1099
+ HBasicBlock* pred) {
1100
+ LifetimePosition pred_end =
1101
+ LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1102
+ LifetimePosition cur_start =
1103
+ LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1104
+ LiveRange* pred_cover = NULL;
1105
+ LiveRange* cur_cover = NULL;
1106
+ LiveRange* cur_range = range;
1107
+ while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1108
+ if (cur_range->CanCover(cur_start)) {
1109
+ ASSERT(cur_cover == NULL);
1110
+ cur_cover = cur_range;
1111
+ }
1112
+ if (cur_range->CanCover(pred_end)) {
1113
+ ASSERT(pred_cover == NULL);
1114
+ pred_cover = cur_range;
1115
+ }
1116
+ cur_range = cur_range->next();
1117
+ }
1118
+
1119
+ if (cur_cover->IsSpilled()) return;
1120
+ ASSERT(pred_cover != NULL && cur_cover != NULL);
1121
+ if (pred_cover != cur_cover) {
1122
+ LOperand* pred_op = pred_cover->CreateAssignedOperand();
1123
+ LOperand* cur_op = cur_cover->CreateAssignedOperand();
1124
+ if (!pred_op->Equals(cur_op)) {
1125
+ LGap* gap = NULL;
1126
+ if (block->predecessors()->length() == 1) {
1127
+ gap = GapAt(block->first_instruction_index());
1128
+ } else {
1129
+ ASSERT(pred->end()->SecondSuccessor() == NULL);
1130
+ gap = GetLastGap(pred);
1131
+
1132
+ // We are going to insert a move before the branch instruction.
1133
+ // Some branch instructions (e.g. loops' back edges)
1134
+ // can potentially cause a GC so they have a pointer map.
1135
+ // By inserting a move we essentially create a copy of a
1136
+ // value which is invisible to PopulatePointerMaps(), because we store
1137
+ // it into a location different from the operand of a live range
1138
+ // covering a branch instruction.
1139
+ // Thus we need to manually record a pointer.
1140
+ if (HasTaggedValue(range->id())) {
1141
+ LInstruction* branch = InstructionAt(pred->last_instruction_index());
1142
+ if (branch->HasPointerMap()) {
1143
+ branch->pointer_map()->RecordPointer(cur_op);
1144
+ }
1145
+ }
1146
+ }
1147
+ gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
1148
+ }
1149
+ }
1150
+ }
1151
+
1152
+
1153
+ LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1154
+ int index = pos.InstructionIndex();
1155
+ if (IsGapAt(index)) {
1156
+ LGap* gap = GapAt(index);
1157
+ return gap->GetOrCreateParallelMove(
1158
+ pos.IsInstructionStart() ? LGap::START : LGap::END);
1159
+ }
1160
+ int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1161
+ return GapAt(gap_pos)->GetOrCreateParallelMove(
1162
+ (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
1163
+ }
1164
+
1165
+
1166
+ HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1167
+ LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1168
+ return gap->block();
1169
+ }
1170
+
1171
+
1172
+ void LAllocator::ConnectRanges() {
1173
+ HPhase phase("Connect ranges", this);
1174
+ for (int i = 0; i < live_ranges()->length(); ++i) {
1175
+ LiveRange* first_range = live_ranges()->at(i);
1176
+ if (first_range == NULL || first_range->parent() != NULL) continue;
1177
+
1178
+ LiveRange* second_range = first_range->next();
1179
+ while (second_range != NULL) {
1180
+ LifetimePosition pos = second_range->Start();
1181
+
1182
+ if (!second_range->IsSpilled()) {
1183
+ // Add gap move if the two live ranges touch and there is no block
1184
+ // boundary.
1185
+ if (first_range->End().Value() == pos.Value()) {
1186
+ bool should_insert = true;
1187
+ if (IsBlockBoundary(pos)) {
1188
+ should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1189
+ }
1190
+ if (should_insert) {
1191
+ LParallelMove* move = GetConnectingParallelMove(pos);
1192
+ LOperand* prev_operand = first_range->CreateAssignedOperand();
1193
+ LOperand* cur_operand = second_range->CreateAssignedOperand();
1194
+ move->AddMove(prev_operand, cur_operand);
1195
+ }
1196
+ }
1197
+ }
1198
+
1199
+ first_range = second_range;
1200
+ second_range = second_range->next();
1201
+ }
1202
+ }
1203
+ }
1204
+
1205
+
1206
+ bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1207
+ if (block->predecessors()->length() != 1) return false;
1208
+ return block->predecessors()->first()->block_id() == block->block_id() - 1;
1209
+ }
1210
+
1211
+
1212
+ void LAllocator::ResolveControlFlow() {
1213
+ HPhase phase("Resolve control flow", this);
1214
+ const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1215
+ for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1216
+ HBasicBlock* block = blocks->at(block_id);
1217
+ if (CanEagerlyResolveControlFlow(block)) continue;
1218
+ BitVector* live = live_in_sets_[block->block_id()];
1219
+ BitVector::Iterator iterator(live);
1220
+ while (!iterator.Done()) {
1221
+ int operand_index = iterator.Current();
1222
+ for (int i = 0; i < block->predecessors()->length(); ++i) {
1223
+ HBasicBlock* cur = block->predecessors()->at(i);
1224
+ LiveRange* cur_range = LiveRangeFor(operand_index);
1225
+ ResolveControlFlow(cur_range, block, cur);
1226
+ }
1227
+ iterator.Advance();
1228
+ }
1229
+ }
1230
+ }
1231
+
1232
+
1233
+ void LAllocator::BuildLiveRanges() {
1234
+ HPhase phase("Build live ranges", this);
1235
+ InitializeLivenessAnalysis();
1236
+ // Process the blocks in reverse order.
1237
+ const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1238
+ for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1239
+ HBasicBlock* block = blocks->at(block_id);
1240
+ BitVector* live = ComputeLiveOut(block);
1241
+ // Initially consider all live_out values live for the entire block. We
1242
+ // will shorten these intervals if necessary.
1243
+ AddInitialIntervals(block, live);
1244
+
1245
+ // Process the instructions in reverse order, generating and killing
1246
+ // live values.
1247
+ ProcessInstructions(block, live);
1248
+ // All phi output operands are killed by this block.
1249
+ const ZoneList<HPhi*>* phis = block->phis();
1250
+ for (int i = 0; i < phis->length(); ++i) {
1251
+ // The live range interval already ends at the first instruction of the
1252
+ // block.
1253
+ HPhi* phi = phis->at(i);
1254
+ live->Remove(phi->id());
1255
+
1256
+ LOperand* hint = NULL;
1257
+ LOperand* phi_operand = NULL;
1258
+ LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1259
+ LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
1260
+ for (int j = 0; j < move->move_operands()->length(); ++j) {
1261
+ LOperand* to = move->move_operands()->at(j).destination();
1262
+ if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
1263
+ hint = move->move_operands()->at(j).source();
1264
+ phi_operand = to;
1265
+ break;
1266
+ }
1267
+ }
1268
+ ASSERT(hint != NULL);
1269
+
1270
+ LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1271
+ block->first_instruction_index());
1272
+ Define(block_start, phi_operand, hint);
1273
+ }
1274
+
1275
+ // Now live is live_in for this block except not including values live
1276
+ // out on backward successor edges.
1277
+ live_in_sets_[block_id] = live;
1278
+
1279
+ // If this block is a loop header go back and patch up the necessary
1280
+ // predecessor blocks.
1281
+ if (block->IsLoopHeader()) {
1282
+ // TODO(kmillikin): Need to be able to get the last block of the loop
1283
+ // in the loop information. Add a live range stretching from the first
1284
+ // loop instruction to the last for each value live on entry to the
1285
+ // header.
1286
+ HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1287
+ BitVector::Iterator iterator(live);
1288
+ LifetimePosition start = LifetimePosition::FromInstructionIndex(
1289
+ block->first_instruction_index());
1290
+ LifetimePosition end = LifetimePosition::FromInstructionIndex(
1291
+ back_edge->last_instruction_index()).NextInstruction();
1292
+ while (!iterator.Done()) {
1293
+ int operand_index = iterator.Current();
1294
+ LiveRange* range = LiveRangeFor(operand_index);
1295
+ range->EnsureInterval(start, end);
1296
+ iterator.Advance();
1297
+ }
1298
+
1299
+ for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1300
+ live_in_sets_[i]->Union(*live);
1301
+ }
1302
+ }
1303
+
1304
+ #ifdef DEBUG
1305
+ if (block_id == 0) {
1306
+ BitVector::Iterator iterator(live);
1307
+ bool found = false;
1308
+ while (!iterator.Done()) {
1309
+ found = true;
1310
+ int operand_index = iterator.Current();
1311
+ PrintF("Function: %s\n",
1312
+ *chunk_->info()->function()->debug_name()->ToCString());
1313
+ PrintF("Value %d used before first definition!\n", operand_index);
1314
+ LiveRange* range = LiveRangeFor(operand_index);
1315
+ PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1316
+ iterator.Advance();
1317
+ }
1318
+ ASSERT(!found);
1319
+ }
1320
+ #endif
1321
+ }
1322
+ }
1323
+
1324
+
1325
+ bool LAllocator::SafePointsAreInOrder() const {
1326
+ const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1327
+ int safe_point = 0;
1328
+ for (int i = 0; i < pointer_maps->length(); ++i) {
1329
+ LPointerMap* map = pointer_maps->at(i);
1330
+ if (safe_point > map->lithium_position()) return false;
1331
+ safe_point = map->lithium_position();
1332
+ }
1333
+ return true;
1334
+ }
1335
+
1336
+
1337
+ void LAllocator::PopulatePointerMaps() {
1338
+ HPhase phase("Populate pointer maps", this);
1339
+ const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1340
+
1341
+ ASSERT(SafePointsAreInOrder());
1342
+
1343
+ // Iterate over all safe point positions and record a pointer
1344
+ // for all spilled live ranges at this point.
1345
+ int first_safe_point_index = 0;
1346
+ int last_range_start = 0;
1347
+ for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1348
+ LiveRange* range = live_ranges()->at(range_idx);
1349
+ if (range == NULL) continue;
1350
+ // Iterate over the first parts of multi-part live ranges.
1351
+ if (range->parent() != NULL) continue;
1352
+ // Skip non-pointer values.
1353
+ if (!HasTaggedValue(range->id())) continue;
1354
+ // Skip empty live ranges.
1355
+ if (range->IsEmpty()) continue;
1356
+
1357
+ // Find the extent of the range and its children.
1358
+ int start = range->Start().InstructionIndex();
1359
+ int end = 0;
1360
+ for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1361
+ LifetimePosition this_end = cur->End();
1362
+ if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1363
+ ASSERT(cur->Start().InstructionIndex() >= start);
1364
+ }
1365
+
1366
+ // Most of the ranges are in order, but not all. Keep an eye on when
1367
+ // they step backwards and reset the first_safe_point_index so we don't
1368
+ // miss any safe points.
1369
+ if (start < last_range_start) {
1370
+ first_safe_point_index = 0;
1371
+ }
1372
+ last_range_start = start;
1373
+
1374
+ // Step across all the safe points that are before the start of this range,
1375
+ // recording how far we step in order to save doing this for the next range.
1376
+ while (first_safe_point_index < pointer_maps->length()) {
1377
+ LPointerMap* map = pointer_maps->at(first_safe_point_index);
1378
+ int safe_point = map->lithium_position();
1379
+ if (safe_point >= start) break;
1380
+ first_safe_point_index++;
1381
+ }
1382
+
1383
+ // Step through the safe points to see whether they are in the range.
1384
+ for (int safe_point_index = first_safe_point_index;
1385
+ safe_point_index < pointer_maps->length();
1386
+ ++safe_point_index) {
1387
+ LPointerMap* map = pointer_maps->at(safe_point_index);
1388
+ int safe_point = map->lithium_position();
1389
+
1390
+ // The safe points are sorted so we can stop searching here.
1391
+ if (safe_point - 1 > end) break;
1392
+
1393
+ // Advance to the next active range that covers the current
1394
+ // safe point position.
1395
+ LifetimePosition safe_point_pos =
1396
+ LifetimePosition::FromInstructionIndex(safe_point);
1397
+ LiveRange* cur = range;
1398
+ while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1399
+ cur = cur->next();
1400
+ }
1401
+ if (cur == NULL) continue;
1402
+
1403
+ // Check if the live range is spilled and the safe point is after
1404
+ // the spill position.
1405
+ if (range->HasAllocatedSpillOperand() &&
1406
+ safe_point >= range->spill_start_index()) {
1407
+ TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1408
+ range->id(), range->spill_start_index(), safe_point);
1409
+ map->RecordPointer(range->GetSpillOperand());
1410
+ }
1411
+
1412
+ if (!cur->IsSpilled()) {
1413
+ TraceAlloc("Pointer in register for range %d (start at %d) "
1414
+ "at safe point %d\n",
1415
+ cur->id(), cur->Start().Value(), safe_point);
1416
+ LOperand* operand = cur->CreateAssignedOperand();
1417
+ ASSERT(!operand->IsStackSlot());
1418
+ map->RecordPointer(operand);
1419
+ }
1420
+ }
1421
+ }
1422
+ }
1423
+
1424
+
1425
+ void LAllocator::ProcessOsrEntry() {
1426
+ const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1427
+
1428
+ // Linear search for the OSR entry instruction in the chunk.
1429
+ int index = -1;
1430
+ while (++index < instrs->length() &&
1431
+ !instrs->at(index)->IsOsrEntry()) {
1432
+ }
1433
+ ASSERT(index < instrs->length());
1434
+ LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1435
+
1436
+ LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
1437
+ for (int i = 0; i < live_ranges()->length(); ++i) {
1438
+ LiveRange* range = live_ranges()->at(i);
1439
+ if (range != NULL) {
1440
+ if (range->Covers(position) &&
1441
+ range->HasRegisterAssigned() &&
1442
+ range->TopLevel()->HasAllocatedSpillOperand()) {
1443
+ int reg_index = range->assigned_register();
1444
+ LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1445
+ if (range->IsDouble()) {
1446
+ instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1447
+ } else {
1448
+ instruction->MarkSpilledRegister(reg_index, spill_operand);
1449
+ }
1450
+ }
1451
+ }
1452
+ }
1453
+ }
1454
+
1455
+
1456
+ void LAllocator::AllocateGeneralRegisters() {
1457
+ HPhase phase("Allocate general registers", this);
1458
+ num_registers_ = Register::kNumAllocatableRegisters;
1459
+ mode_ = GENERAL_REGISTERS;
1460
+ AllocateRegisters();
1461
+ }
1462
+
1463
+
1464
+ void LAllocator::AllocateDoubleRegisters() {
1465
+ HPhase phase("Allocate double registers", this);
1466
+ num_registers_ = DoubleRegister::kNumAllocatableRegisters;
1467
+ mode_ = DOUBLE_REGISTERS;
1468
+ AllocateRegisters();
1469
+ }
1470
+
1471
+
1472
+ void LAllocator::AllocateRegisters() {
1473
+ ASSERT(mode_ != NONE);
1474
+ ASSERT(unhandled_live_ranges_.is_empty());
1475
+
1476
+ for (int i = 0; i < live_ranges_.length(); ++i) {
1477
+ if (live_ranges_[i] != NULL) {
1478
+ if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1479
+ AddToUnhandledUnsorted(live_ranges_[i]);
1480
+ }
1481
+ }
1482
+ }
1483
+ SortUnhandled();
1484
+ ASSERT(UnhandledIsSorted());
1485
+
1486
+ ASSERT(reusable_slots_.is_empty());
1487
+ ASSERT(active_live_ranges_.is_empty());
1488
+ ASSERT(inactive_live_ranges_.is_empty());
1489
+
1490
+ if (mode_ == DOUBLE_REGISTERS) {
1491
+ for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1492
+ LiveRange* current = fixed_double_live_ranges_.at(i);
1493
+ if (current != NULL) {
1494
+ AddToInactive(current);
1495
+ }
1496
+ }
1497
+ } else {
1498
+ for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1499
+ LiveRange* current = fixed_live_ranges_.at(i);
1500
+ if (current != NULL) {
1501
+ AddToInactive(current);
1502
+ }
1503
+ }
1504
+ }
1505
+
1506
+ while (!unhandled_live_ranges_.is_empty()) {
1507
+ ASSERT(UnhandledIsSorted());
1508
+ LiveRange* current = unhandled_live_ranges_.RemoveLast();
1509
+ ASSERT(UnhandledIsSorted());
1510
+ LifetimePosition position = current->Start();
1511
+ TraceAlloc("Processing interval %d start=%d\n",
1512
+ current->id(),
1513
+ position.Value());
1514
+
1515
+ if (current->HasAllocatedSpillOperand()) {
1516
+ TraceAlloc("Live range %d already has a spill operand\n", current->id());
1517
+ LifetimePosition next_pos = position;
1518
+ if (IsGapAt(next_pos.InstructionIndex())) {
1519
+ next_pos = next_pos.NextInstruction();
1520
+ }
1521
+ UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1522
+ // If the range already has a spill operand and it doesn't need a
1523
+ // register immediately, split it and spill the first part of the range.
1524
+ if (pos == NULL) {
1525
+ Spill(current);
1526
+ continue;
1527
+ } else if (pos->pos().Value() >
1528
+ current->Start().NextInstruction().Value()) {
1529
+ // Do not spill live range eagerly if use position that can benefit from
1530
+ // the register is too close to the start of live range.
1531
+ SpillBetween(current, current->Start(), pos->pos());
1532
+ ASSERT(UnhandledIsSorted());
1533
+ continue;
1534
+ }
1535
+ }
1536
+
1537
+ for (int i = 0; i < active_live_ranges_.length(); ++i) {
1538
+ LiveRange* cur_active = active_live_ranges_.at(i);
1539
+ if (cur_active->End().Value() <= position.Value()) {
1540
+ ActiveToHandled(cur_active);
1541
+ --i; // The live range was removed from the list of active live ranges.
1542
+ } else if (!cur_active->Covers(position)) {
1543
+ ActiveToInactive(cur_active);
1544
+ --i; // The live range was removed from the list of active live ranges.
1545
+ }
1546
+ }
1547
+
1548
+ for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1549
+ LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1550
+ if (cur_inactive->End().Value() <= position.Value()) {
1551
+ InactiveToHandled(cur_inactive);
1552
+ --i; // Live range was removed from the list of inactive live ranges.
1553
+ } else if (cur_inactive->Covers(position)) {
1554
+ InactiveToActive(cur_inactive);
1555
+ --i; // Live range was removed from the list of inactive live ranges.
1556
+ }
1557
+ }
1558
+
1559
+ ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1560
+
1561
+ bool result = TryAllocateFreeReg(current);
1562
+ if (!result) {
1563
+ AllocateBlockedReg(current);
1564
+ }
1565
+
1566
+ if (current->HasRegisterAssigned()) {
1567
+ AddToActive(current);
1568
+ }
1569
+ }
1570
+
1571
+ reusable_slots_.Rewind(0);
1572
+ active_live_ranges_.Rewind(0);
1573
+ inactive_live_ranges_.Rewind(0);
1574
+ }
1575
+
1576
+
1577
+ const char* LAllocator::RegisterName(int allocation_index) {
1578
+ ASSERT(mode_ != NONE);
1579
+ if (mode_ == GENERAL_REGISTERS) {
1580
+ return Register::AllocationIndexToString(allocation_index);
1581
+ } else {
1582
+ return DoubleRegister::AllocationIndexToString(allocation_index);
1583
+ }
1584
+ }
1585
+
1586
+
1587
+ void LAllocator::TraceAlloc(const char* msg, ...) {
1588
+ if (FLAG_trace_alloc) {
1589
+ va_list arguments;
1590
+ va_start(arguments, msg);
1591
+ OS::VPrint(msg, arguments);
1592
+ va_end(arguments);
1593
+ }
1594
+ }
1595
+
1596
+
1597
+ bool LAllocator::HasTaggedValue(int virtual_register) const {
1598
+ HValue* value = graph_->LookupValue(virtual_register);
1599
+ if (value == NULL) return false;
1600
+ return value->representation().IsTagged();
1601
+ }
1602
+
1603
+
1604
+ RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1605
+ if (virtual_register < first_artificial_register_) {
1606
+ HValue* value = graph_->LookupValue(virtual_register);
1607
+ if (value != NULL && value->representation().IsDouble()) {
1608
+ return DOUBLE_REGISTERS;
1609
+ }
1610
+ } else if (double_artificial_registers_.Contains(
1611
+ virtual_register - first_artificial_register_)) {
1612
+ return DOUBLE_REGISTERS;
1613
+ }
1614
+
1615
+ return GENERAL_REGISTERS;
1616
+ }
1617
+
1618
+
1619
+ void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
1620
+ operand->set_virtual_register(instr->id());
1621
+ }
1622
+
1623
+
1624
+ void LAllocator::RecordTemporary(LUnallocated* operand) {
1625
+ ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters);
1626
+ if (!operand->HasFixedPolicy()) {
1627
+ operand->set_virtual_register(next_virtual_register_++);
1628
+ }
1629
+ }
1630
+
1631
+
1632
+ void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
1633
+ operand->set_virtual_register(value->id());
1634
+ }
1635
+
1636
+
1637
+ int LAllocator::max_initial_value_ids() {
1638
+ return LUnallocated::kMaxVirtualRegisters / 32;
1639
+ }
1640
+
1641
+
1642
+ void LAllocator::AddToActive(LiveRange* range) {
1643
+ TraceAlloc("Add live range %d to active\n", range->id());
1644
+ active_live_ranges_.Add(range);
1645
+ }
1646
+
1647
+
1648
+ void LAllocator::AddToInactive(LiveRange* range) {
1649
+ TraceAlloc("Add live range %d to inactive\n", range->id());
1650
+ inactive_live_ranges_.Add(range);
1651
+ }
1652
+
1653
+
1654
+ void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1655
+ if (range == NULL || range->IsEmpty()) return;
1656
+ ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1657
+ for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1658
+ LiveRange* cur_range = unhandled_live_ranges_.at(i);
1659
+ if (range->ShouldBeAllocatedBefore(cur_range)) {
1660
+ TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1661
+ unhandled_live_ranges_.InsertAt(i + 1, range);
1662
+ ASSERT(UnhandledIsSorted());
1663
+ return;
1664
+ }
1665
+ }
1666
+ TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1667
+ unhandled_live_ranges_.InsertAt(0, range);
1668
+ ASSERT(UnhandledIsSorted());
1669
+ }
1670
+
1671
+
1672
+ void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1673
+ if (range == NULL || range->IsEmpty()) return;
1674
+ ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1675
+ TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1676
+ unhandled_live_ranges_.Add(range);
1677
+ }
1678
+
1679
+
1680
+ static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1681
+ ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1682
+ !(*b)->ShouldBeAllocatedBefore(*a));
1683
+ if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1684
+ if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1685
+ return (*a)->id() - (*b)->id();
1686
+ }
1687
+
1688
+
1689
+ // Sort the unhandled live ranges so that the ranges to be processed first are
1690
+ // at the end of the array list. This is convenient for the register allocation
1691
+ // algorithm because it is efficient to remove elements from the end.
1692
+ void LAllocator::SortUnhandled() {
1693
+ TraceAlloc("Sort unhandled\n");
1694
+ unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1695
+ }
1696
+
1697
+
1698
+ bool LAllocator::UnhandledIsSorted() {
1699
+ int len = unhandled_live_ranges_.length();
1700
+ for (int i = 1; i < len; i++) {
1701
+ LiveRange* a = unhandled_live_ranges_.at(i - 1);
1702
+ LiveRange* b = unhandled_live_ranges_.at(i);
1703
+ if (a->Start().Value() < b->Start().Value()) return false;
1704
+ }
1705
+ return true;
1706
+ }
1707
+
1708
+
1709
+ void LAllocator::FreeSpillSlot(LiveRange* range) {
1710
+ // Check that we are the last range.
1711
+ if (range->next() != NULL) return;
1712
+
1713
+ if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1714
+
1715
+ int index = range->TopLevel()->GetSpillOperand()->index();
1716
+ if (index >= 0) {
1717
+ reusable_slots_.Add(range);
1718
+ }
1719
+ }
1720
+
1721
+
1722
+ LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1723
+ if (reusable_slots_.is_empty()) return NULL;
1724
+ if (reusable_slots_.first()->End().Value() >
1725
+ range->TopLevel()->Start().Value()) {
1726
+ return NULL;
1727
+ }
1728
+ LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1729
+ reusable_slots_.Remove(0);
1730
+ return result;
1731
+ }
1732
+
1733
+
1734
+ void LAllocator::ActiveToHandled(LiveRange* range) {
1735
+ ASSERT(active_live_ranges_.Contains(range));
1736
+ active_live_ranges_.RemoveElement(range);
1737
+ TraceAlloc("Moving live range %d from active to handled\n", range->id());
1738
+ FreeSpillSlot(range);
1739
+ }
1740
+
1741
+
1742
+ void LAllocator::ActiveToInactive(LiveRange* range) {
1743
+ ASSERT(active_live_ranges_.Contains(range));
1744
+ active_live_ranges_.RemoveElement(range);
1745
+ inactive_live_ranges_.Add(range);
1746
+ TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1747
+ }
1748
+
1749
+
1750
+ void LAllocator::InactiveToHandled(LiveRange* range) {
1751
+ ASSERT(inactive_live_ranges_.Contains(range));
1752
+ inactive_live_ranges_.RemoveElement(range);
1753
+ TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1754
+ FreeSpillSlot(range);
1755
+ }
1756
+
1757
+
1758
+ void LAllocator::InactiveToActive(LiveRange* range) {
1759
+ ASSERT(inactive_live_ranges_.Contains(range));
1760
+ inactive_live_ranges_.RemoveElement(range);
1761
+ active_live_ranges_.Add(range);
1762
+ TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1763
+ }
1764
+
1765
+
1766
+ // TryAllocateFreeReg and AllocateBlockedReg assume this
1767
+ // when allocating local arrays.
1768
+ STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1769
+ Register::kNumAllocatableRegisters);
1770
+
1771
+
1772
+ bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1773
+ LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1774
+
1775
+ for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1776
+ free_until_pos[i] = LifetimePosition::MaxPosition();
1777
+ }
1778
+
1779
+ for (int i = 0; i < active_live_ranges_.length(); ++i) {
1780
+ LiveRange* cur_active = active_live_ranges_.at(i);
1781
+ free_until_pos[cur_active->assigned_register()] =
1782
+ LifetimePosition::FromInstructionIndex(0);
1783
+ }
1784
+
1785
+ for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1786
+ LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1787
+ ASSERT(cur_inactive->End().Value() > current->Start().Value());
1788
+ LifetimePosition next_intersection =
1789
+ cur_inactive->FirstIntersection(current);
1790
+ if (!next_intersection.IsValid()) continue;
1791
+ int cur_reg = cur_inactive->assigned_register();
1792
+ free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1793
+ }
1794
+
1795
+ UsePosition* hinted_use = current->FirstPosWithHint();
1796
+ if (hinted_use != NULL) {
1797
+ LOperand* hint = hinted_use->hint();
1798
+ if (hint->IsRegister() || hint->IsDoubleRegister()) {
1799
+ int register_index = hint->index();
1800
+ TraceAlloc(
1801
+ "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1802
+ RegisterName(register_index),
1803
+ free_until_pos[register_index].Value(),
1804
+ current->id(),
1805
+ current->End().Value());
1806
+
1807
+ // The desired register is free until the end of the current live range.
1808
+ if (free_until_pos[register_index].Value() >= current->End().Value()) {
1809
+ TraceAlloc("Assigning preferred reg %s to live range %d\n",
1810
+ RegisterName(register_index),
1811
+ current->id());
1812
+ current->set_assigned_register(register_index, mode_);
1813
+ return true;
1814
+ }
1815
+ }
1816
+ }
1817
+
1818
+ // Find the register which stays free for the longest time.
1819
+ int reg = 0;
1820
+ for (int i = 1; i < RegisterCount(); ++i) {
1821
+ if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1822
+ reg = i;
1823
+ }
1824
+ }
1825
+
1826
+ LifetimePosition pos = free_until_pos[reg];
1827
+
1828
+ if (pos.Value() <= current->Start().Value()) {
1829
+ // All registers are blocked.
1830
+ return false;
1831
+ }
1832
+
1833
+ if (pos.Value() < current->End().Value()) {
1834
+ // Register reg is available at the range start but becomes blocked before
1835
+ // the range end. Split current at position where it becomes blocked.
1836
+ LiveRange* tail = SplitAt(current, pos);
1837
+ AddToUnhandledSorted(tail);
1838
+ }
1839
+
1840
+
1841
+ // Register reg is available at the range start and is free until
1842
+ // the range end.
1843
+ ASSERT(pos.Value() >= current->End().Value());
1844
+ TraceAlloc("Assigning free reg %s to live range %d\n",
1845
+ RegisterName(reg),
1846
+ current->id());
1847
+ current->set_assigned_register(reg, mode_);
1848
+
1849
+ return true;
1850
+ }
1851
+
1852
+
1853
+ void LAllocator::AllocateBlockedReg(LiveRange* current) {
1854
+ UsePosition* register_use = current->NextRegisterPosition(current->Start());
1855
+ if (register_use == NULL) {
1856
+ // There is no use in the current live range that requires a register.
1857
+ // We can just spill it.
1858
+ Spill(current);
1859
+ return;
1860
+ }
1861
+
1862
+
1863
+ LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1864
+ LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1865
+
1866
+ for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1867
+ use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1868
+ }
1869
+
1870
+ for (int i = 0; i < active_live_ranges_.length(); ++i) {
1871
+ LiveRange* range = active_live_ranges_[i];
1872
+ int cur_reg = range->assigned_register();
1873
+ if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1874
+ block_pos[cur_reg] = use_pos[cur_reg] =
1875
+ LifetimePosition::FromInstructionIndex(0);
1876
+ } else {
1877
+ UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1878
+ current->Start());
1879
+ if (next_use == NULL) {
1880
+ use_pos[cur_reg] = range->End();
1881
+ } else {
1882
+ use_pos[cur_reg] = next_use->pos();
1883
+ }
1884
+ }
1885
+ }
1886
+
1887
+ for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1888
+ LiveRange* range = inactive_live_ranges_.at(i);
1889
+ ASSERT(range->End().Value() > current->Start().Value());
1890
+ LifetimePosition next_intersection = range->FirstIntersection(current);
1891
+ if (!next_intersection.IsValid()) continue;
1892
+ int cur_reg = range->assigned_register();
1893
+ if (range->IsFixed()) {
1894
+ block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1895
+ use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1896
+ } else {
1897
+ use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1898
+ }
1899
+ }
1900
+
1901
+ int reg = 0;
1902
+ for (int i = 1; i < RegisterCount(); ++i) {
1903
+ if (use_pos[i].Value() > use_pos[reg].Value()) {
1904
+ reg = i;
1905
+ }
1906
+ }
1907
+
1908
+ LifetimePosition pos = use_pos[reg];
1909
+
1910
+ if (pos.Value() < register_use->pos().Value()) {
1911
+ // All registers are blocked before the first use that requires a register.
1912
+ // Spill starting part of live range up to that use.
1913
+ //
1914
+ // Corner case: the first use position is equal to the start of the range.
1915
+ // In this case we have nothing to spill and SpillBetween will just return
1916
+ // this range to the list of unhandled ones. This will lead to the infinite
1917
+ // loop.
1918
+ ASSERT(current->Start().Value() < register_use->pos().Value());
1919
+ SpillBetween(current, current->Start(), register_use->pos());
1920
+ return;
1921
+ }
1922
+
1923
+ if (block_pos[reg].Value() < current->End().Value()) {
1924
+ // Register becomes blocked before the current range end. Split before that
1925
+ // position.
1926
+ LiveRange* tail = SplitBetween(current,
1927
+ current->Start(),
1928
+ block_pos[reg].InstructionStart());
1929
+ AddToUnhandledSorted(tail);
1930
+ }
1931
+
1932
+ // Register reg is not blocked for the whole range.
1933
+ ASSERT(block_pos[reg].Value() >= current->End().Value());
1934
+ TraceAlloc("Assigning blocked reg %s to live range %d\n",
1935
+ RegisterName(reg),
1936
+ current->id());
1937
+ current->set_assigned_register(reg, mode_);
1938
+
1939
+ // This register was not free. Thus we need to find and spill
1940
+ // parts of active and inactive live regions that use the same register
1941
+ // at the same lifetime positions as current.
1942
+ SplitAndSpillIntersecting(current);
1943
+ }
1944
+
1945
+
1946
+ void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1947
+ ASSERT(current->HasRegisterAssigned());
1948
+ int reg = current->assigned_register();
1949
+ LifetimePosition split_pos = current->Start();
1950
+ for (int i = 0; i < active_live_ranges_.length(); ++i) {
1951
+ LiveRange* range = active_live_ranges_[i];
1952
+ if (range->assigned_register() == reg) {
1953
+ UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1954
+ if (next_pos == NULL) {
1955
+ SpillAfter(range, split_pos);
1956
+ } else {
1957
+ SpillBetween(range, split_pos, next_pos->pos());
1958
+ }
1959
+ ActiveToHandled(range);
1960
+ --i;
1961
+ }
1962
+ }
1963
+
1964
+ for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1965
+ LiveRange* range = inactive_live_ranges_[i];
1966
+ ASSERT(range->End().Value() > current->Start().Value());
1967
+ if (range->assigned_register() == reg && !range->IsFixed()) {
1968
+ LifetimePosition next_intersection = range->FirstIntersection(current);
1969
+ if (next_intersection.IsValid()) {
1970
+ UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1971
+ if (next_pos == NULL) {
1972
+ SpillAfter(range, split_pos);
1973
+ } else {
1974
+ next_intersection = Min(next_intersection, next_pos->pos());
1975
+ SpillBetween(range, split_pos, next_intersection);
1976
+ }
1977
+ InactiveToHandled(range);
1978
+ --i;
1979
+ }
1980
+ }
1981
+ }
1982
+ }
1983
+
1984
+
1985
+ bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1986
+ return pos.IsInstructionStart() &&
1987
+ InstructionAt(pos.InstructionIndex())->IsLabel();
1988
+ }
1989
+
1990
+
1991
+ LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
1992
+ ASSERT(!range->IsFixed());
1993
+ TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1994
+
1995
+ if (pos.Value() <= range->Start().Value()) return range;
1996
+
1997
+ // We can't properly connect liveranges if split occured at the end
1998
+ // of control instruction.
1999
+ ASSERT(pos.IsInstructionStart() ||
2000
+ !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2001
+
2002
+ LiveRange* result = LiveRangeFor(next_virtual_register_++);
2003
+ range->SplitAt(pos, result);
2004
+ return result;
2005
+ }
2006
+
2007
+
2008
+ LiveRange* LAllocator::SplitBetween(LiveRange* range,
2009
+ LifetimePosition start,
2010
+ LifetimePosition end) {
2011
+ ASSERT(!range->IsFixed());
2012
+ TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2013
+ range->id(),
2014
+ start.Value(),
2015
+ end.Value());
2016
+
2017
+ LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2018
+ ASSERT(split_pos.Value() >= start.Value());
2019
+ return SplitAt(range, split_pos);
2020
+ }
2021
+
2022
+
2023
+ LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2024
+ LifetimePosition end) {
2025
+ int start_instr = start.InstructionIndex();
2026
+ int end_instr = end.InstructionIndex();
2027
+ ASSERT(start_instr <= end_instr);
2028
+
2029
+ // We have no choice
2030
+ if (start_instr == end_instr) return end;
2031
+
2032
+ HBasicBlock* start_block = GetBlock(start);
2033
+ HBasicBlock* end_block = GetBlock(end);
2034
+
2035
+ if (end_block == start_block) {
2036
+ // The interval is split in the same basic block. Split at the latest
2037
+ // possible position.
2038
+ return end;
2039
+ }
2040
+
2041
+ HBasicBlock* block = end_block;
2042
+ // Find header of outermost loop.
2043
+ while (block->parent_loop_header() != NULL &&
2044
+ block->parent_loop_header()->block_id() > start_block->block_id()) {
2045
+ block = block->parent_loop_header();
2046
+ }
2047
+
2048
+ // We did not find any suitable outer loop. Split at the latest possible
2049
+ // position unless end_block is a loop header itself.
2050
+ if (block == end_block && !end_block->IsLoopHeader()) return end;
2051
+
2052
+ return LifetimePosition::FromInstructionIndex(
2053
+ block->first_instruction_index());
2054
+ }
2055
+
2056
+
2057
+ void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2058
+ LiveRange* second_part = SplitAt(range, pos);
2059
+ Spill(second_part);
2060
+ }
2061
+
2062
+
2063
+ void LAllocator::SpillBetween(LiveRange* range,
2064
+ LifetimePosition start,
2065
+ LifetimePosition end) {
2066
+ ASSERT(start.Value() < end.Value());
2067
+ LiveRange* second_part = SplitAt(range, start);
2068
+
2069
+ if (second_part->Start().Value() < end.Value()) {
2070
+ // The split result intersects with [start, end[.
2071
+ // Split it at position between ]start+1, end[, spill the middle part
2072
+ // and put the rest to unhandled.
2073
+ LiveRange* third_part = SplitBetween(
2074
+ second_part,
2075
+ second_part->Start().InstructionEnd(),
2076
+ end.PrevInstruction().InstructionEnd());
2077
+
2078
+ ASSERT(third_part != second_part);
2079
+
2080
+ Spill(second_part);
2081
+ AddToUnhandledSorted(third_part);
2082
+ } else {
2083
+ // The split result does not intersect with [start, end[.
2084
+ // Nothing to spill. Just put it to unhandled as whole.
2085
+ AddToUnhandledSorted(second_part);
2086
+ }
2087
+ }
2088
+
2089
+
2090
+ void LAllocator::Spill(LiveRange* range) {
2091
+ ASSERT(!range->IsSpilled());
2092
+ TraceAlloc("Spilling live range %d\n", range->id());
2093
+ LiveRange* first = range->TopLevel();
2094
+
2095
+ if (!first->HasAllocatedSpillOperand()) {
2096
+ LOperand* op = TryReuseSpillSlot(range);
2097
+ if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2098
+ first->SetSpillOperand(op);
2099
+ }
2100
+ range->MakeSpilled();
2101
+ }
2102
+
2103
+
2104
+ int LAllocator::RegisterCount() const {
2105
+ return num_registers_;
2106
+ }
2107
+
2108
+
2109
+ #ifdef DEBUG
2110
+
2111
+
2112
+ void LAllocator::Verify() const {
2113
+ for (int i = 0; i < live_ranges()->length(); ++i) {
2114
+ LiveRange* current = live_ranges()->at(i);
2115
+ if (current != NULL) current->Verify();
2116
+ }
2117
+ }
2118
+
2119
+
2120
+ #endif
2121
+
2122
+
2123
+ } } // namespace v8::internal