therubyracer 0.9.0beta4 → 0.9.0beta5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (771) hide show
  1. data/.gitignore +1 -0
  2. data/Rakefile +0 -11
  3. data/ext/v8/extconf.rb +9 -9
  4. data/ext/v8/v8_external.cpp +40 -10
  5. data/lib/v8/cli.rb +2 -1
  6. data/lib/v8/version.rb +1 -1
  7. data/spec/redjs/.gitignore +1 -0
  8. data/therubyracer.gemspec +2 -3
  9. metadata +33 -779
  10. data/ext/v8/upstream/Makefile +0 -38
  11. data/ext/v8/upstream/detect_cpu.rb +0 -27
  12. data/ext/v8/upstream/fpic-on-linux-amd64.patch +0 -13
  13. data/ext/v8/upstream/scons/CHANGES.txt +0 -5183
  14. data/ext/v8/upstream/scons/LICENSE.txt +0 -20
  15. data/ext/v8/upstream/scons/MANIFEST +0 -202
  16. data/ext/v8/upstream/scons/PKG-INFO +0 -13
  17. data/ext/v8/upstream/scons/README.txt +0 -273
  18. data/ext/v8/upstream/scons/RELEASE.txt +0 -1040
  19. data/ext/v8/upstream/scons/engine/SCons/Action.py +0 -1256
  20. data/ext/v8/upstream/scons/engine/SCons/Builder.py +0 -868
  21. data/ext/v8/upstream/scons/engine/SCons/CacheDir.py +0 -217
  22. data/ext/v8/upstream/scons/engine/SCons/Conftest.py +0 -794
  23. data/ext/v8/upstream/scons/engine/SCons/Debug.py +0 -237
  24. data/ext/v8/upstream/scons/engine/SCons/Defaults.py +0 -485
  25. data/ext/v8/upstream/scons/engine/SCons/Environment.py +0 -2327
  26. data/ext/v8/upstream/scons/engine/SCons/Errors.py +0 -207
  27. data/ext/v8/upstream/scons/engine/SCons/Executor.py +0 -636
  28. data/ext/v8/upstream/scons/engine/SCons/Job.py +0 -435
  29. data/ext/v8/upstream/scons/engine/SCons/Memoize.py +0 -292
  30. data/ext/v8/upstream/scons/engine/SCons/Node/Alias.py +0 -153
  31. data/ext/v8/upstream/scons/engine/SCons/Node/FS.py +0 -3220
  32. data/ext/v8/upstream/scons/engine/SCons/Node/Python.py +0 -128
  33. data/ext/v8/upstream/scons/engine/SCons/Node/__init__.py +0 -1341
  34. data/ext/v8/upstream/scons/engine/SCons/Options/BoolOption.py +0 -50
  35. data/ext/v8/upstream/scons/engine/SCons/Options/EnumOption.py +0 -50
  36. data/ext/v8/upstream/scons/engine/SCons/Options/ListOption.py +0 -50
  37. data/ext/v8/upstream/scons/engine/SCons/Options/PackageOption.py +0 -50
  38. data/ext/v8/upstream/scons/engine/SCons/Options/PathOption.py +0 -76
  39. data/ext/v8/upstream/scons/engine/SCons/Options/__init__.py +0 -74
  40. data/ext/v8/upstream/scons/engine/SCons/PathList.py +0 -232
  41. data/ext/v8/upstream/scons/engine/SCons/Platform/__init__.py +0 -236
  42. data/ext/v8/upstream/scons/engine/SCons/Platform/aix.py +0 -70
  43. data/ext/v8/upstream/scons/engine/SCons/Platform/cygwin.py +0 -55
  44. data/ext/v8/upstream/scons/engine/SCons/Platform/darwin.py +0 -46
  45. data/ext/v8/upstream/scons/engine/SCons/Platform/hpux.py +0 -46
  46. data/ext/v8/upstream/scons/engine/SCons/Platform/irix.py +0 -44
  47. data/ext/v8/upstream/scons/engine/SCons/Platform/os2.py +0 -58
  48. data/ext/v8/upstream/scons/engine/SCons/Platform/posix.py +0 -264
  49. data/ext/v8/upstream/scons/engine/SCons/Platform/sunos.py +0 -50
  50. data/ext/v8/upstream/scons/engine/SCons/Platform/win32.py +0 -386
  51. data/ext/v8/upstream/scons/engine/SCons/SConf.py +0 -1038
  52. data/ext/v8/upstream/scons/engine/SCons/SConsign.py +0 -381
  53. data/ext/v8/upstream/scons/engine/SCons/Scanner/C.py +0 -132
  54. data/ext/v8/upstream/scons/engine/SCons/Scanner/D.py +0 -74
  55. data/ext/v8/upstream/scons/engine/SCons/Scanner/Dir.py +0 -111
  56. data/ext/v8/upstream/scons/engine/SCons/Scanner/Fortran.py +0 -320
  57. data/ext/v8/upstream/scons/engine/SCons/Scanner/IDL.py +0 -48
  58. data/ext/v8/upstream/scons/engine/SCons/Scanner/LaTeX.py +0 -378
  59. data/ext/v8/upstream/scons/engine/SCons/Scanner/Prog.py +0 -103
  60. data/ext/v8/upstream/scons/engine/SCons/Scanner/RC.py +0 -55
  61. data/ext/v8/upstream/scons/engine/SCons/Scanner/__init__.py +0 -415
  62. data/ext/v8/upstream/scons/engine/SCons/Script/Interactive.py +0 -386
  63. data/ext/v8/upstream/scons/engine/SCons/Script/Main.py +0 -1360
  64. data/ext/v8/upstream/scons/engine/SCons/Script/SConsOptions.py +0 -944
  65. data/ext/v8/upstream/scons/engine/SCons/Script/SConscript.py +0 -642
  66. data/ext/v8/upstream/scons/engine/SCons/Script/__init__.py +0 -414
  67. data/ext/v8/upstream/scons/engine/SCons/Sig.py +0 -63
  68. data/ext/v8/upstream/scons/engine/SCons/Subst.py +0 -911
  69. data/ext/v8/upstream/scons/engine/SCons/Taskmaster.py +0 -1030
  70. data/ext/v8/upstream/scons/engine/SCons/Tool/386asm.py +0 -61
  71. data/ext/v8/upstream/scons/engine/SCons/Tool/BitKeeper.py +0 -65
  72. data/ext/v8/upstream/scons/engine/SCons/Tool/CVS.py +0 -73
  73. data/ext/v8/upstream/scons/engine/SCons/Tool/FortranCommon.py +0 -247
  74. data/ext/v8/upstream/scons/engine/SCons/Tool/JavaCommon.py +0 -324
  75. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/__init__.py +0 -56
  76. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/arch.py +0 -61
  77. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/common.py +0 -210
  78. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/netframework.py +0 -84
  79. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/sdk.py +0 -321
  80. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vc.py +0 -367
  81. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vs.py +0 -497
  82. data/ext/v8/upstream/scons/engine/SCons/Tool/Perforce.py +0 -104
  83. data/ext/v8/upstream/scons/engine/SCons/Tool/PharLapCommon.py +0 -138
  84. data/ext/v8/upstream/scons/engine/SCons/Tool/RCS.py +0 -64
  85. data/ext/v8/upstream/scons/engine/SCons/Tool/SCCS.py +0 -64
  86. data/ext/v8/upstream/scons/engine/SCons/Tool/Subversion.py +0 -71
  87. data/ext/v8/upstream/scons/engine/SCons/Tool/__init__.py +0 -675
  88. data/ext/v8/upstream/scons/engine/SCons/Tool/aixc++.py +0 -82
  89. data/ext/v8/upstream/scons/engine/SCons/Tool/aixcc.py +0 -74
  90. data/ext/v8/upstream/scons/engine/SCons/Tool/aixf77.py +0 -80
  91. data/ext/v8/upstream/scons/engine/SCons/Tool/aixlink.py +0 -76
  92. data/ext/v8/upstream/scons/engine/SCons/Tool/applelink.py +0 -71
  93. data/ext/v8/upstream/scons/engine/SCons/Tool/ar.py +0 -63
  94. data/ext/v8/upstream/scons/engine/SCons/Tool/as.py +0 -78
  95. data/ext/v8/upstream/scons/engine/SCons/Tool/bcc32.py +0 -82
  96. data/ext/v8/upstream/scons/engine/SCons/Tool/c++.py +0 -99
  97. data/ext/v8/upstream/scons/engine/SCons/Tool/cc.py +0 -114
  98. data/ext/v8/upstream/scons/engine/SCons/Tool/cvf.py +0 -58
  99. data/ext/v8/upstream/scons/engine/SCons/Tool/default.py +0 -50
  100. data/ext/v8/upstream/scons/engine/SCons/Tool/dmd.py +0 -224
  101. data/ext/v8/upstream/scons/engine/SCons/Tool/dvi.py +0 -64
  102. data/ext/v8/upstream/scons/engine/SCons/Tool/dvipdf.py +0 -125
  103. data/ext/v8/upstream/scons/engine/SCons/Tool/dvips.py +0 -94
  104. data/ext/v8/upstream/scons/engine/SCons/Tool/f77.py +0 -62
  105. data/ext/v8/upstream/scons/engine/SCons/Tool/f90.py +0 -62
  106. data/ext/v8/upstream/scons/engine/SCons/Tool/f95.py +0 -63
  107. data/ext/v8/upstream/scons/engine/SCons/Tool/filesystem.py +0 -98
  108. data/ext/v8/upstream/scons/engine/SCons/Tool/fortran.py +0 -63
  109. data/ext/v8/upstream/scons/engine/SCons/Tool/g++.py +0 -90
  110. data/ext/v8/upstream/scons/engine/SCons/Tool/g77.py +0 -73
  111. data/ext/v8/upstream/scons/engine/SCons/Tool/gas.py +0 -53
  112. data/ext/v8/upstream/scons/engine/SCons/Tool/gcc.py +0 -80
  113. data/ext/v8/upstream/scons/engine/SCons/Tool/gfortran.py +0 -64
  114. data/ext/v8/upstream/scons/engine/SCons/Tool/gnulink.py +0 -63
  115. data/ext/v8/upstream/scons/engine/SCons/Tool/gs.py +0 -81
  116. data/ext/v8/upstream/scons/engine/SCons/Tool/hpc++.py +0 -85
  117. data/ext/v8/upstream/scons/engine/SCons/Tool/hpcc.py +0 -53
  118. data/ext/v8/upstream/scons/engine/SCons/Tool/hplink.py +0 -77
  119. data/ext/v8/upstream/scons/engine/SCons/Tool/icc.py +0 -59
  120. data/ext/v8/upstream/scons/engine/SCons/Tool/icl.py +0 -52
  121. data/ext/v8/upstream/scons/engine/SCons/Tool/ifl.py +0 -72
  122. data/ext/v8/upstream/scons/engine/SCons/Tool/ifort.py +0 -90
  123. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink.py +0 -59
  124. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink32.py +0 -60
  125. data/ext/v8/upstream/scons/engine/SCons/Tool/install.py +0 -229
  126. data/ext/v8/upstream/scons/engine/SCons/Tool/intelc.py +0 -490
  127. data/ext/v8/upstream/scons/engine/SCons/Tool/ipkg.py +0 -71
  128. data/ext/v8/upstream/scons/engine/SCons/Tool/jar.py +0 -110
  129. data/ext/v8/upstream/scons/engine/SCons/Tool/javac.py +0 -234
  130. data/ext/v8/upstream/scons/engine/SCons/Tool/javah.py +0 -138
  131. data/ext/v8/upstream/scons/engine/SCons/Tool/latex.py +0 -79
  132. data/ext/v8/upstream/scons/engine/SCons/Tool/lex.py +0 -99
  133. data/ext/v8/upstream/scons/engine/SCons/Tool/link.py +0 -121
  134. data/ext/v8/upstream/scons/engine/SCons/Tool/linkloc.py +0 -112
  135. data/ext/v8/upstream/scons/engine/SCons/Tool/m4.py +0 -63
  136. data/ext/v8/upstream/scons/engine/SCons/Tool/masm.py +0 -77
  137. data/ext/v8/upstream/scons/engine/SCons/Tool/midl.py +0 -90
  138. data/ext/v8/upstream/scons/engine/SCons/Tool/mingw.py +0 -159
  139. data/ext/v8/upstream/scons/engine/SCons/Tool/mslib.py +0 -64
  140. data/ext/v8/upstream/scons/engine/SCons/Tool/mslink.py +0 -266
  141. data/ext/v8/upstream/scons/engine/SCons/Tool/mssdk.py +0 -50
  142. data/ext/v8/upstream/scons/engine/SCons/Tool/msvc.py +0 -269
  143. data/ext/v8/upstream/scons/engine/SCons/Tool/msvs.py +0 -1439
  144. data/ext/v8/upstream/scons/engine/SCons/Tool/mwcc.py +0 -208
  145. data/ext/v8/upstream/scons/engine/SCons/Tool/mwld.py +0 -107
  146. data/ext/v8/upstream/scons/engine/SCons/Tool/nasm.py +0 -72
  147. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/__init__.py +0 -314
  148. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/ipk.py +0 -185
  149. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/msi.py +0 -526
  150. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/rpm.py +0 -367
  151. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_tarbz2.py +0 -43
  152. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_targz.py +0 -43
  153. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_zip.py +0 -43
  154. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/tarbz2.py +0 -44
  155. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/targz.py +0 -44
  156. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/zip.py +0 -44
  157. data/ext/v8/upstream/scons/engine/SCons/Tool/pdf.py +0 -78
  158. data/ext/v8/upstream/scons/engine/SCons/Tool/pdflatex.py +0 -83
  159. data/ext/v8/upstream/scons/engine/SCons/Tool/pdftex.py +0 -108
  160. data/ext/v8/upstream/scons/engine/SCons/Tool/qt.py +0 -336
  161. data/ext/v8/upstream/scons/engine/SCons/Tool/rmic.py +0 -121
  162. data/ext/v8/upstream/scons/engine/SCons/Tool/rpcgen.py +0 -70
  163. data/ext/v8/upstream/scons/engine/SCons/Tool/rpm.py +0 -132
  164. data/ext/v8/upstream/scons/engine/SCons/Tool/sgiar.py +0 -68
  165. data/ext/v8/upstream/scons/engine/SCons/Tool/sgic++.py +0 -58
  166. data/ext/v8/upstream/scons/engine/SCons/Tool/sgicc.py +0 -53
  167. data/ext/v8/upstream/scons/engine/SCons/Tool/sgilink.py +0 -63
  168. data/ext/v8/upstream/scons/engine/SCons/Tool/sunar.py +0 -67
  169. data/ext/v8/upstream/scons/engine/SCons/Tool/sunc++.py +0 -142
  170. data/ext/v8/upstream/scons/engine/SCons/Tool/suncc.py +0 -58
  171. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf77.py +0 -63
  172. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf90.py +0 -64
  173. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf95.py +0 -64
  174. data/ext/v8/upstream/scons/engine/SCons/Tool/sunlink.py +0 -77
  175. data/ext/v8/upstream/scons/engine/SCons/Tool/swig.py +0 -186
  176. data/ext/v8/upstream/scons/engine/SCons/Tool/tar.py +0 -73
  177. data/ext/v8/upstream/scons/engine/SCons/Tool/tex.py +0 -805
  178. data/ext/v8/upstream/scons/engine/SCons/Tool/textfile.py +0 -175
  179. data/ext/v8/upstream/scons/engine/SCons/Tool/tlib.py +0 -53
  180. data/ext/v8/upstream/scons/engine/SCons/Tool/wix.py +0 -100
  181. data/ext/v8/upstream/scons/engine/SCons/Tool/yacc.py +0 -131
  182. data/ext/v8/upstream/scons/engine/SCons/Tool/zip.py +0 -100
  183. data/ext/v8/upstream/scons/engine/SCons/Util.py +0 -1645
  184. data/ext/v8/upstream/scons/engine/SCons/Variables/BoolVariable.py +0 -91
  185. data/ext/v8/upstream/scons/engine/SCons/Variables/EnumVariable.py +0 -107
  186. data/ext/v8/upstream/scons/engine/SCons/Variables/ListVariable.py +0 -139
  187. data/ext/v8/upstream/scons/engine/SCons/Variables/PackageVariable.py +0 -109
  188. data/ext/v8/upstream/scons/engine/SCons/Variables/PathVariable.py +0 -147
  189. data/ext/v8/upstream/scons/engine/SCons/Variables/__init__.py +0 -317
  190. data/ext/v8/upstream/scons/engine/SCons/Warnings.py +0 -228
  191. data/ext/v8/upstream/scons/engine/SCons/__init__.py +0 -49
  192. data/ext/v8/upstream/scons/engine/SCons/compat/__init__.py +0 -302
  193. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_UserString.py +0 -98
  194. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_hashlib.py +0 -91
  195. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_itertools.py +0 -124
  196. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_optparse.py +0 -1725
  197. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets.py +0 -583
  198. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets15.py +0 -176
  199. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_shlex.py +0 -325
  200. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_subprocess.py +0 -1296
  201. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_textwrap.py +0 -382
  202. data/ext/v8/upstream/scons/engine/SCons/compat/builtins.py +0 -187
  203. data/ext/v8/upstream/scons/engine/SCons/cpp.py +0 -598
  204. data/ext/v8/upstream/scons/engine/SCons/dblite.py +0 -248
  205. data/ext/v8/upstream/scons/engine/SCons/exitfuncs.py +0 -77
  206. data/ext/v8/upstream/scons/os_spawnv_fix.diff +0 -83
  207. data/ext/v8/upstream/scons/scons-time.1 +0 -1017
  208. data/ext/v8/upstream/scons/scons.1 +0 -15179
  209. data/ext/v8/upstream/scons/sconsign.1 +0 -208
  210. data/ext/v8/upstream/scons/script/scons +0 -184
  211. data/ext/v8/upstream/scons/script/scons-time +0 -1529
  212. data/ext/v8/upstream/scons/script/scons.bat +0 -31
  213. data/ext/v8/upstream/scons/script/sconsign +0 -508
  214. data/ext/v8/upstream/scons/setup.cfg +0 -6
  215. data/ext/v8/upstream/scons/setup.py +0 -427
  216. data/ext/v8/upstream/v8/.gitignore +0 -33
  217. data/ext/v8/upstream/v8/AUTHORS +0 -42
  218. data/ext/v8/upstream/v8/ChangeLog +0 -2663
  219. data/ext/v8/upstream/v8/LICENSE +0 -52
  220. data/ext/v8/upstream/v8/LICENSE.strongtalk +0 -29
  221. data/ext/v8/upstream/v8/LICENSE.v8 +0 -26
  222. data/ext/v8/upstream/v8/LICENSE.valgrind +0 -45
  223. data/ext/v8/upstream/v8/SConstruct +0 -1473
  224. data/ext/v8/upstream/v8/build/README.txt +0 -25
  225. data/ext/v8/upstream/v8/build/all.gyp +0 -18
  226. data/ext/v8/upstream/v8/build/armu.gypi +0 -32
  227. data/ext/v8/upstream/v8/build/common.gypi +0 -82
  228. data/ext/v8/upstream/v8/build/gyp_v8 +0 -145
  229. data/ext/v8/upstream/v8/include/v8-debug.h +0 -394
  230. data/ext/v8/upstream/v8/include/v8-preparser.h +0 -116
  231. data/ext/v8/upstream/v8/include/v8-profiler.h +0 -505
  232. data/ext/v8/upstream/v8/include/v8-testing.h +0 -104
  233. data/ext/v8/upstream/v8/include/v8.h +0 -4000
  234. data/ext/v8/upstream/v8/include/v8stdint.h +0 -53
  235. data/ext/v8/upstream/v8/preparser/SConscript +0 -38
  236. data/ext/v8/upstream/v8/preparser/preparser-process.cc +0 -169
  237. data/ext/v8/upstream/v8/src/SConscript +0 -380
  238. data/ext/v8/upstream/v8/src/accessors.cc +0 -766
  239. data/ext/v8/upstream/v8/src/accessors.h +0 -121
  240. data/ext/v8/upstream/v8/src/allocation-inl.h +0 -49
  241. data/ext/v8/upstream/v8/src/allocation.cc +0 -122
  242. data/ext/v8/upstream/v8/src/allocation.h +0 -143
  243. data/ext/v8/upstream/v8/src/api.cc +0 -5678
  244. data/ext/v8/upstream/v8/src/api.h +0 -572
  245. data/ext/v8/upstream/v8/src/apinatives.js +0 -110
  246. data/ext/v8/upstream/v8/src/apiutils.h +0 -73
  247. data/ext/v8/upstream/v8/src/arguments.h +0 -116
  248. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +0 -353
  249. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +0 -2877
  250. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +0 -1382
  251. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +0 -1634
  252. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +0 -6917
  253. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +0 -623
  254. data/ext/v8/upstream/v8/src/arm/codegen-arm-inl.h +0 -48
  255. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +0 -7437
  256. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +0 -595
  257. data/ext/v8/upstream/v8/src/arm/constants-arm.cc +0 -152
  258. data/ext/v8/upstream/v8/src/arm/constants-arm.h +0 -778
  259. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +0 -149
  260. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +0 -317
  261. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +0 -737
  262. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +0 -1503
  263. data/ext/v8/upstream/v8/src/arm/frames-arm.cc +0 -45
  264. data/ext/v8/upstream/v8/src/arm/frames-arm.h +0 -168
  265. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +0 -4374
  266. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +0 -1793
  267. data/ext/v8/upstream/v8/src/arm/jump-target-arm.cc +0 -174
  268. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +0 -2120
  269. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +0 -2179
  270. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +0 -4132
  271. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +0 -329
  272. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +0 -305
  273. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.h +0 -84
  274. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +0 -2939
  275. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +0 -1071
  276. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +0 -1287
  277. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +0 -253
  278. data/ext/v8/upstream/v8/src/arm/register-allocator-arm-inl.h +0 -100
  279. data/ext/v8/upstream/v8/src/arm/register-allocator-arm.cc +0 -63
  280. data/ext/v8/upstream/v8/src/arm/register-allocator-arm.h +0 -44
  281. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +0 -3288
  282. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +0 -413
  283. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +0 -4034
  284. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm-inl.h +0 -59
  285. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +0 -843
  286. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +0 -523
  287. data/ext/v8/upstream/v8/src/array.js +0 -1249
  288. data/ext/v8/upstream/v8/src/assembler.cc +0 -1067
  289. data/ext/v8/upstream/v8/src/assembler.h +0 -823
  290. data/ext/v8/upstream/v8/src/ast-inl.h +0 -112
  291. data/ext/v8/upstream/v8/src/ast.cc +0 -1078
  292. data/ext/v8/upstream/v8/src/ast.h +0 -2234
  293. data/ext/v8/upstream/v8/src/atomicops.h +0 -167
  294. data/ext/v8/upstream/v8/src/atomicops_internals_arm_gcc.h +0 -145
  295. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +0 -169
  296. data/ext/v8/upstream/v8/src/atomicops_internals_x86_gcc.cc +0 -126
  297. data/ext/v8/upstream/v8/src/atomicops_internals_x86_gcc.h +0 -287
  298. data/ext/v8/upstream/v8/src/atomicops_internals_x86_macosx.h +0 -301
  299. data/ext/v8/upstream/v8/src/atomicops_internals_x86_msvc.h +0 -203
  300. data/ext/v8/upstream/v8/src/bignum-dtoa.cc +0 -655
  301. data/ext/v8/upstream/v8/src/bignum-dtoa.h +0 -81
  302. data/ext/v8/upstream/v8/src/bignum.cc +0 -768
  303. data/ext/v8/upstream/v8/src/bignum.h +0 -140
  304. data/ext/v8/upstream/v8/src/bootstrapper.cc +0 -2138
  305. data/ext/v8/upstream/v8/src/bootstrapper.h +0 -185
  306. data/ext/v8/upstream/v8/src/builtins.cc +0 -1708
  307. data/ext/v8/upstream/v8/src/builtins.h +0 -368
  308. data/ext/v8/upstream/v8/src/bytecodes-irregexp.h +0 -105
  309. data/ext/v8/upstream/v8/src/cached-powers.cc +0 -177
  310. data/ext/v8/upstream/v8/src/cached-powers.h +0 -65
  311. data/ext/v8/upstream/v8/src/char-predicates-inl.h +0 -94
  312. data/ext/v8/upstream/v8/src/char-predicates.h +0 -65
  313. data/ext/v8/upstream/v8/src/checks.cc +0 -110
  314. data/ext/v8/upstream/v8/src/checks.h +0 -296
  315. data/ext/v8/upstream/v8/src/circular-queue-inl.h +0 -53
  316. data/ext/v8/upstream/v8/src/circular-queue.cc +0 -122
  317. data/ext/v8/upstream/v8/src/circular-queue.h +0 -103
  318. data/ext/v8/upstream/v8/src/code-stubs.cc +0 -240
  319. data/ext/v8/upstream/v8/src/code-stubs.h +0 -971
  320. data/ext/v8/upstream/v8/src/code.h +0 -68
  321. data/ext/v8/upstream/v8/src/codegen-inl.h +0 -68
  322. data/ext/v8/upstream/v8/src/codegen.cc +0 -505
  323. data/ext/v8/upstream/v8/src/codegen.h +0 -245
  324. data/ext/v8/upstream/v8/src/compilation-cache.cc +0 -540
  325. data/ext/v8/upstream/v8/src/compilation-cache.h +0 -287
  326. data/ext/v8/upstream/v8/src/compiler.cc +0 -792
  327. data/ext/v8/upstream/v8/src/compiler.h +0 -307
  328. data/ext/v8/upstream/v8/src/contexts.cc +0 -327
  329. data/ext/v8/upstream/v8/src/contexts.h +0 -382
  330. data/ext/v8/upstream/v8/src/conversions-inl.h +0 -110
  331. data/ext/v8/upstream/v8/src/conversions.cc +0 -1125
  332. data/ext/v8/upstream/v8/src/conversions.h +0 -122
  333. data/ext/v8/upstream/v8/src/counters.cc +0 -93
  334. data/ext/v8/upstream/v8/src/counters.h +0 -254
  335. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +0 -101
  336. data/ext/v8/upstream/v8/src/cpu-profiler.cc +0 -606
  337. data/ext/v8/upstream/v8/src/cpu-profiler.h +0 -305
  338. data/ext/v8/upstream/v8/src/cpu.h +0 -67
  339. data/ext/v8/upstream/v8/src/d8-debug.cc +0 -367
  340. data/ext/v8/upstream/v8/src/d8-debug.h +0 -158
  341. data/ext/v8/upstream/v8/src/d8-posix.cc +0 -695
  342. data/ext/v8/upstream/v8/src/d8-readline.cc +0 -128
  343. data/ext/v8/upstream/v8/src/d8-windows.cc +0 -42
  344. data/ext/v8/upstream/v8/src/d8.cc +0 -796
  345. data/ext/v8/upstream/v8/src/d8.gyp +0 -88
  346. data/ext/v8/upstream/v8/src/d8.h +0 -231
  347. data/ext/v8/upstream/v8/src/d8.js +0 -2798
  348. data/ext/v8/upstream/v8/src/data-flow.cc +0 -545
  349. data/ext/v8/upstream/v8/src/data-flow.h +0 -379
  350. data/ext/v8/upstream/v8/src/date.js +0 -1103
  351. data/ext/v8/upstream/v8/src/dateparser-inl.h +0 -125
  352. data/ext/v8/upstream/v8/src/dateparser.cc +0 -178
  353. data/ext/v8/upstream/v8/src/dateparser.h +0 -265
  354. data/ext/v8/upstream/v8/src/debug-agent.cc +0 -447
  355. data/ext/v8/upstream/v8/src/debug-agent.h +0 -129
  356. data/ext/v8/upstream/v8/src/debug-debugger.js +0 -2569
  357. data/ext/v8/upstream/v8/src/debug.cc +0 -3188
  358. data/ext/v8/upstream/v8/src/debug.h +0 -1055
  359. data/ext/v8/upstream/v8/src/deoptimizer.cc +0 -1296
  360. data/ext/v8/upstream/v8/src/deoptimizer.h +0 -629
  361. data/ext/v8/upstream/v8/src/disasm.h +0 -80
  362. data/ext/v8/upstream/v8/src/disassembler.cc +0 -339
  363. data/ext/v8/upstream/v8/src/disassembler.h +0 -56
  364. data/ext/v8/upstream/v8/src/diy-fp.cc +0 -58
  365. data/ext/v8/upstream/v8/src/diy-fp.h +0 -117
  366. data/ext/v8/upstream/v8/src/double.h +0 -238
  367. data/ext/v8/upstream/v8/src/dtoa.cc +0 -103
  368. data/ext/v8/upstream/v8/src/dtoa.h +0 -85
  369. data/ext/v8/upstream/v8/src/execution.cc +0 -791
  370. data/ext/v8/upstream/v8/src/execution.h +0 -291
  371. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +0 -250
  372. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +0 -89
  373. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +0 -55
  374. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +0 -284
  375. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.h +0 -64
  376. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +0 -141
  377. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.h +0 -50
  378. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +0 -58
  379. data/ext/v8/upstream/v8/src/extensions/gc-extension.h +0 -49
  380. data/ext/v8/upstream/v8/src/factory.cc +0 -1194
  381. data/ext/v8/upstream/v8/src/factory.h +0 -436
  382. data/ext/v8/upstream/v8/src/fast-dtoa.cc +0 -736
  383. data/ext/v8/upstream/v8/src/fast-dtoa.h +0 -83
  384. data/ext/v8/upstream/v8/src/fixed-dtoa.cc +0 -405
  385. data/ext/v8/upstream/v8/src/fixed-dtoa.h +0 -55
  386. data/ext/v8/upstream/v8/src/flag-definitions.h +0 -556
  387. data/ext/v8/upstream/v8/src/flags.cc +0 -551
  388. data/ext/v8/upstream/v8/src/flags.h +0 -79
  389. data/ext/v8/upstream/v8/src/frame-element.cc +0 -37
  390. data/ext/v8/upstream/v8/src/frame-element.h +0 -269
  391. data/ext/v8/upstream/v8/src/frames-inl.h +0 -236
  392. data/ext/v8/upstream/v8/src/frames.cc +0 -1273
  393. data/ext/v8/upstream/v8/src/frames.h +0 -854
  394. data/ext/v8/upstream/v8/src/full-codegen.cc +0 -1385
  395. data/ext/v8/upstream/v8/src/full-codegen.h +0 -753
  396. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +0 -91
  397. data/ext/v8/upstream/v8/src/func-name-inferrer.h +0 -111
  398. data/ext/v8/upstream/v8/src/gdb-jit.cc +0 -1548
  399. data/ext/v8/upstream/v8/src/gdb-jit.h +0 -138
  400. data/ext/v8/upstream/v8/src/global-handles.cc +0 -596
  401. data/ext/v8/upstream/v8/src/global-handles.h +0 -239
  402. data/ext/v8/upstream/v8/src/globals.h +0 -325
  403. data/ext/v8/upstream/v8/src/handles-inl.h +0 -177
  404. data/ext/v8/upstream/v8/src/handles.cc +0 -965
  405. data/ext/v8/upstream/v8/src/handles.h +0 -372
  406. data/ext/v8/upstream/v8/src/hashmap.cc +0 -230
  407. data/ext/v8/upstream/v8/src/hashmap.h +0 -121
  408. data/ext/v8/upstream/v8/src/heap-inl.h +0 -703
  409. data/ext/v8/upstream/v8/src/heap-profiler.cc +0 -1173
  410. data/ext/v8/upstream/v8/src/heap-profiler.h +0 -396
  411. data/ext/v8/upstream/v8/src/heap.cc +0 -5856
  412. data/ext/v8/upstream/v8/src/heap.h +0 -2264
  413. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +0 -1639
  414. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +0 -3657
  415. data/ext/v8/upstream/v8/src/hydrogen.cc +0 -6011
  416. data/ext/v8/upstream/v8/src/hydrogen.h +0 -1137
  417. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +0 -430
  418. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +0 -2846
  419. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +0 -1159
  420. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +0 -1596
  421. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +0 -6549
  422. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +0 -495
  423. data/ext/v8/upstream/v8/src/ia32/codegen-ia32-inl.h +0 -46
  424. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +0 -10385
  425. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +0 -801
  426. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +0 -88
  427. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +0 -312
  428. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +0 -774
  429. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +0 -1620
  430. data/ext/v8/upstream/v8/src/ia32/frames-ia32.cc +0 -45
  431. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +0 -140
  432. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +0 -4357
  433. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +0 -1779
  434. data/ext/v8/upstream/v8/src/ia32/jump-target-ia32.cc +0 -437
  435. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +0 -4158
  436. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +0 -318
  437. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +0 -466
  438. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.h +0 -110
  439. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +0 -2181
  440. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +0 -2235
  441. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +0 -2056
  442. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +0 -807
  443. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1264
  444. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +0 -216
  445. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32-inl.h +0 -82
  446. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +0 -157
  447. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.h +0 -43
  448. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.cc +0 -30
  449. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +0 -72
  450. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +0 -3711
  451. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +0 -1366
  452. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +0 -650
  453. data/ext/v8/upstream/v8/src/ic-inl.h +0 -130
  454. data/ext/v8/upstream/v8/src/ic.cc +0 -2389
  455. data/ext/v8/upstream/v8/src/ic.h +0 -675
  456. data/ext/v8/upstream/v8/src/inspector.cc +0 -63
  457. data/ext/v8/upstream/v8/src/inspector.h +0 -62
  458. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +0 -659
  459. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +0 -49
  460. data/ext/v8/upstream/v8/src/isolate.cc +0 -883
  461. data/ext/v8/upstream/v8/src/isolate.h +0 -1306
  462. data/ext/v8/upstream/v8/src/json.js +0 -342
  463. data/ext/v8/upstream/v8/src/jsregexp.cc +0 -5371
  464. data/ext/v8/upstream/v8/src/jsregexp.h +0 -1483
  465. data/ext/v8/upstream/v8/src/jump-target-heavy-inl.h +0 -51
  466. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +0 -427
  467. data/ext/v8/upstream/v8/src/jump-target-heavy.h +0 -238
  468. data/ext/v8/upstream/v8/src/jump-target-inl.h +0 -48
  469. data/ext/v8/upstream/v8/src/jump-target-light-inl.h +0 -56
  470. data/ext/v8/upstream/v8/src/jump-target-light.cc +0 -111
  471. data/ext/v8/upstream/v8/src/jump-target-light.h +0 -193
  472. data/ext/v8/upstream/v8/src/jump-target.cc +0 -91
  473. data/ext/v8/upstream/v8/src/jump-target.h +0 -90
  474. data/ext/v8/upstream/v8/src/list-inl.h +0 -206
  475. data/ext/v8/upstream/v8/src/list.h +0 -164
  476. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +0 -142
  477. data/ext/v8/upstream/v8/src/lithium-allocator.cc +0 -2105
  478. data/ext/v8/upstream/v8/src/lithium-allocator.h +0 -630
  479. data/ext/v8/upstream/v8/src/lithium.cc +0 -169
  480. data/ext/v8/upstream/v8/src/lithium.h +0 -592
  481. data/ext/v8/upstream/v8/src/liveedit-debugger.js +0 -1082
  482. data/ext/v8/upstream/v8/src/liveedit.cc +0 -1693
  483. data/ext/v8/upstream/v8/src/liveedit.h +0 -179
  484. data/ext/v8/upstream/v8/src/liveobjectlist-inl.h +0 -126
  485. data/ext/v8/upstream/v8/src/liveobjectlist.cc +0 -2589
  486. data/ext/v8/upstream/v8/src/liveobjectlist.h +0 -322
  487. data/ext/v8/upstream/v8/src/log-inl.h +0 -59
  488. data/ext/v8/upstream/v8/src/log-utils.cc +0 -423
  489. data/ext/v8/upstream/v8/src/log-utils.h +0 -229
  490. data/ext/v8/upstream/v8/src/log.cc +0 -1666
  491. data/ext/v8/upstream/v8/src/log.h +0 -446
  492. data/ext/v8/upstream/v8/src/macro-assembler.h +0 -120
  493. data/ext/v8/upstream/v8/src/macros.py +0 -178
  494. data/ext/v8/upstream/v8/src/mark-compact.cc +0 -3092
  495. data/ext/v8/upstream/v8/src/mark-compact.h +0 -506
  496. data/ext/v8/upstream/v8/src/math.js +0 -264
  497. data/ext/v8/upstream/v8/src/messages.cc +0 -166
  498. data/ext/v8/upstream/v8/src/messages.h +0 -114
  499. data/ext/v8/upstream/v8/src/messages.js +0 -1090
  500. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +0 -335
  501. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +0 -2093
  502. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +0 -1066
  503. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +0 -148
  504. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +0 -752
  505. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +0 -511
  506. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +0 -64
  507. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +0 -1213
  508. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +0 -633
  509. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +0 -352
  510. data/ext/v8/upstream/v8/src/mips/constants-mips.h +0 -723
  511. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +0 -90
  512. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +0 -155
  513. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +0 -91
  514. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +0 -1023
  515. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +0 -48
  516. data/ext/v8/upstream/v8/src/mips/frames-mips.h +0 -179
  517. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +0 -727
  518. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +0 -244
  519. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +0 -80
  520. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +0 -65
  521. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +0 -304
  522. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +0 -3327
  523. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +0 -1058
  524. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +0 -478
  525. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +0 -250
  526. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +0 -134
  527. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.cc +0 -63
  528. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +0 -47
  529. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +0 -2438
  530. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +0 -394
  531. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +0 -601
  532. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +0 -58
  533. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +0 -307
  534. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +0 -530
  535. data/ext/v8/upstream/v8/src/mirror-debugger.js +0 -2381
  536. data/ext/v8/upstream/v8/src/mksnapshot.cc +0 -256
  537. data/ext/v8/upstream/v8/src/natives.h +0 -63
  538. data/ext/v8/upstream/v8/src/objects-debug.cc +0 -722
  539. data/ext/v8/upstream/v8/src/objects-inl.h +0 -4166
  540. data/ext/v8/upstream/v8/src/objects-printer.cc +0 -801
  541. data/ext/v8/upstream/v8/src/objects-visiting.cc +0 -142
  542. data/ext/v8/upstream/v8/src/objects-visiting.h +0 -422
  543. data/ext/v8/upstream/v8/src/objects.cc +0 -10296
  544. data/ext/v8/upstream/v8/src/objects.h +0 -6662
  545. data/ext/v8/upstream/v8/src/parser.cc +0 -5168
  546. data/ext/v8/upstream/v8/src/parser.h +0 -823
  547. data/ext/v8/upstream/v8/src/platform-cygwin.cc +0 -811
  548. data/ext/v8/upstream/v8/src/platform-freebsd.cc +0 -854
  549. data/ext/v8/upstream/v8/src/platform-linux.cc +0 -1120
  550. data/ext/v8/upstream/v8/src/platform-macos.cc +0 -865
  551. data/ext/v8/upstream/v8/src/platform-nullos.cc +0 -504
  552. data/ext/v8/upstream/v8/src/platform-openbsd.cc +0 -672
  553. data/ext/v8/upstream/v8/src/platform-posix.cc +0 -424
  554. data/ext/v8/upstream/v8/src/platform-solaris.cc +0 -796
  555. data/ext/v8/upstream/v8/src/platform-tls-mac.h +0 -62
  556. data/ext/v8/upstream/v8/src/platform-tls-win32.h +0 -62
  557. data/ext/v8/upstream/v8/src/platform-tls.h +0 -50
  558. data/ext/v8/upstream/v8/src/platform-win32.cc +0 -2072
  559. data/ext/v8/upstream/v8/src/platform.h +0 -693
  560. data/ext/v8/upstream/v8/src/preparse-data.cc +0 -185
  561. data/ext/v8/upstream/v8/src/preparse-data.h +0 -249
  562. data/ext/v8/upstream/v8/src/preparser-api.cc +0 -219
  563. data/ext/v8/upstream/v8/src/preparser.cc +0 -1205
  564. data/ext/v8/upstream/v8/src/preparser.h +0 -278
  565. data/ext/v8/upstream/v8/src/prettyprinter.cc +0 -1530
  566. data/ext/v8/upstream/v8/src/prettyprinter.h +0 -223
  567. data/ext/v8/upstream/v8/src/profile-generator-inl.h +0 -128
  568. data/ext/v8/upstream/v8/src/profile-generator.cc +0 -3095
  569. data/ext/v8/upstream/v8/src/profile-generator.h +0 -1125
  570. data/ext/v8/upstream/v8/src/property.cc +0 -102
  571. data/ext/v8/upstream/v8/src/property.h +0 -348
  572. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp-inl.h +0 -78
  573. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +0 -470
  574. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.h +0 -142
  575. data/ext/v8/upstream/v8/src/regexp-macro-assembler-tracer.cc +0 -373
  576. data/ext/v8/upstream/v8/src/regexp-macro-assembler-tracer.h +0 -104
  577. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +0 -266
  578. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +0 -236
  579. data/ext/v8/upstream/v8/src/regexp-stack.cc +0 -111
  580. data/ext/v8/upstream/v8/src/regexp-stack.h +0 -147
  581. data/ext/v8/upstream/v8/src/regexp.js +0 -483
  582. data/ext/v8/upstream/v8/src/register-allocator-inl.h +0 -141
  583. data/ext/v8/upstream/v8/src/register-allocator.cc +0 -98
  584. data/ext/v8/upstream/v8/src/register-allocator.h +0 -310
  585. data/ext/v8/upstream/v8/src/rewriter.cc +0 -1024
  586. data/ext/v8/upstream/v8/src/rewriter.h +0 -59
  587. data/ext/v8/upstream/v8/src/runtime-profiler.cc +0 -478
  588. data/ext/v8/upstream/v8/src/runtime-profiler.h +0 -192
  589. data/ext/v8/upstream/v8/src/runtime.cc +0 -11949
  590. data/ext/v8/upstream/v8/src/runtime.h +0 -643
  591. data/ext/v8/upstream/v8/src/runtime.js +0 -643
  592. data/ext/v8/upstream/v8/src/safepoint-table.cc +0 -256
  593. data/ext/v8/upstream/v8/src/safepoint-table.h +0 -269
  594. data/ext/v8/upstream/v8/src/scanner-base.cc +0 -964
  595. data/ext/v8/upstream/v8/src/scanner-base.h +0 -664
  596. data/ext/v8/upstream/v8/src/scanner.cc +0 -584
  597. data/ext/v8/upstream/v8/src/scanner.h +0 -196
  598. data/ext/v8/upstream/v8/src/scopeinfo.cc +0 -631
  599. data/ext/v8/upstream/v8/src/scopeinfo.h +0 -249
  600. data/ext/v8/upstream/v8/src/scopes.cc +0 -1093
  601. data/ext/v8/upstream/v8/src/scopes.h +0 -508
  602. data/ext/v8/upstream/v8/src/serialize.cc +0 -1574
  603. data/ext/v8/upstream/v8/src/serialize.h +0 -589
  604. data/ext/v8/upstream/v8/src/shell.h +0 -55
  605. data/ext/v8/upstream/v8/src/simulator.h +0 -43
  606. data/ext/v8/upstream/v8/src/small-pointer-list.h +0 -163
  607. data/ext/v8/upstream/v8/src/smart-pointer.h +0 -109
  608. data/ext/v8/upstream/v8/src/snapshot-common.cc +0 -82
  609. data/ext/v8/upstream/v8/src/snapshot-empty.cc +0 -50
  610. data/ext/v8/upstream/v8/src/snapshot.h +0 -73
  611. data/ext/v8/upstream/v8/src/spaces-inl.h +0 -529
  612. data/ext/v8/upstream/v8/src/spaces.cc +0 -3147
  613. data/ext/v8/upstream/v8/src/spaces.h +0 -2368
  614. data/ext/v8/upstream/v8/src/splay-tree-inl.h +0 -310
  615. data/ext/v8/upstream/v8/src/splay-tree.h +0 -203
  616. data/ext/v8/upstream/v8/src/string-search.cc +0 -41
  617. data/ext/v8/upstream/v8/src/string-search.h +0 -568
  618. data/ext/v8/upstream/v8/src/string-stream.cc +0 -592
  619. data/ext/v8/upstream/v8/src/string-stream.h +0 -191
  620. data/ext/v8/upstream/v8/src/string.js +0 -915
  621. data/ext/v8/upstream/v8/src/strtod.cc +0 -440
  622. data/ext/v8/upstream/v8/src/strtod.h +0 -40
  623. data/ext/v8/upstream/v8/src/stub-cache.cc +0 -1940
  624. data/ext/v8/upstream/v8/src/stub-cache.h +0 -866
  625. data/ext/v8/upstream/v8/src/third_party/valgrind/valgrind.h +0 -3925
  626. data/ext/v8/upstream/v8/src/token.cc +0 -63
  627. data/ext/v8/upstream/v8/src/token.h +0 -288
  628. data/ext/v8/upstream/v8/src/top.cc +0 -983
  629. data/ext/v8/upstream/v8/src/type-info.cc +0 -472
  630. data/ext/v8/upstream/v8/src/type-info.h +0 -290
  631. data/ext/v8/upstream/v8/src/unbound-queue-inl.h +0 -95
  632. data/ext/v8/upstream/v8/src/unbound-queue.h +0 -67
  633. data/ext/v8/upstream/v8/src/unicode-inl.h +0 -238
  634. data/ext/v8/upstream/v8/src/unicode.cc +0 -1624
  635. data/ext/v8/upstream/v8/src/unicode.h +0 -280
  636. data/ext/v8/upstream/v8/src/uri.js +0 -402
  637. data/ext/v8/upstream/v8/src/utils.cc +0 -371
  638. data/ext/v8/upstream/v8/src/utils.h +0 -796
  639. data/ext/v8/upstream/v8/src/v8-counters.cc +0 -62
  640. data/ext/v8/upstream/v8/src/v8-counters.h +0 -311
  641. data/ext/v8/upstream/v8/src/v8.cc +0 -215
  642. data/ext/v8/upstream/v8/src/v8.h +0 -130
  643. data/ext/v8/upstream/v8/src/v8checks.h +0 -64
  644. data/ext/v8/upstream/v8/src/v8dll-main.cc +0 -39
  645. data/ext/v8/upstream/v8/src/v8globals.h +0 -486
  646. data/ext/v8/upstream/v8/src/v8memory.h +0 -82
  647. data/ext/v8/upstream/v8/src/v8natives.js +0 -1293
  648. data/ext/v8/upstream/v8/src/v8preparserdll-main.cc +0 -39
  649. data/ext/v8/upstream/v8/src/v8threads.cc +0 -453
  650. data/ext/v8/upstream/v8/src/v8threads.h +0 -164
  651. data/ext/v8/upstream/v8/src/v8utils.h +0 -317
  652. data/ext/v8/upstream/v8/src/variables.cc +0 -132
  653. data/ext/v8/upstream/v8/src/variables.h +0 -212
  654. data/ext/v8/upstream/v8/src/version.cc +0 -116
  655. data/ext/v8/upstream/v8/src/version.h +0 -68
  656. data/ext/v8/upstream/v8/src/virtual-frame-heavy-inl.h +0 -190
  657. data/ext/v8/upstream/v8/src/virtual-frame-heavy.cc +0 -312
  658. data/ext/v8/upstream/v8/src/virtual-frame-inl.h +0 -39
  659. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +0 -171
  660. data/ext/v8/upstream/v8/src/virtual-frame-light.cc +0 -52
  661. data/ext/v8/upstream/v8/src/virtual-frame.cc +0 -49
  662. data/ext/v8/upstream/v8/src/virtual-frame.h +0 -59
  663. data/ext/v8/upstream/v8/src/vm-state-inl.h +0 -138
  664. data/ext/v8/upstream/v8/src/vm-state.h +0 -70
  665. data/ext/v8/upstream/v8/src/win32-headers.h +0 -96
  666. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +0 -456
  667. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +0 -2954
  668. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +0 -1630
  669. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +0 -1493
  670. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +0 -5132
  671. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +0 -477
  672. data/ext/v8/upstream/v8/src/x64/codegen-x64-inl.h +0 -46
  673. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +0 -8843
  674. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +0 -753
  675. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +0 -88
  676. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +0 -318
  677. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +0 -815
  678. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +0 -1752
  679. data/ext/v8/upstream/v8/src/x64/frames-x64.cc +0 -45
  680. data/ext/v8/upstream/v8/src/x64/frames-x64.h +0 -130
  681. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +0 -4339
  682. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +0 -1752
  683. data/ext/v8/upstream/v8/src/x64/jump-target-x64.cc +0 -437
  684. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +0 -3970
  685. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +0 -318
  686. data/ext/v8/upstream/v8/src/x64/lithium-gap-resolver-x64.cc +0 -320
  687. data/ext/v8/upstream/v8/src/x64/lithium-gap-resolver-x64.h +0 -74
  688. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +0 -2115
  689. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +0 -2161
  690. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +0 -2911
  691. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +0 -1984
  692. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +0 -1398
  693. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +0 -282
  694. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +0 -87
  695. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +0 -95
  696. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.h +0 -43
  697. data/ext/v8/upstream/v8/src/x64/simulator-x64.cc +0 -27
  698. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +0 -71
  699. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +0 -3460
  700. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +0 -1296
  701. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +0 -597
  702. data/ext/v8/upstream/v8/src/zone-inl.h +0 -129
  703. data/ext/v8/upstream/v8/src/zone.cc +0 -196
  704. data/ext/v8/upstream/v8/src/zone.h +0 -236
  705. data/ext/v8/upstream/v8/tools/codemap.js +0 -265
  706. data/ext/v8/upstream/v8/tools/consarray.js +0 -93
  707. data/ext/v8/upstream/v8/tools/csvparser.js +0 -78
  708. data/ext/v8/upstream/v8/tools/disasm.py +0 -92
  709. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +0 -10
  710. data/ext/v8/upstream/v8/tools/gc-nvp-trace-processor.py +0 -328
  711. data/ext/v8/upstream/v8/tools/generate-ten-powers.scm +0 -286
  712. data/ext/v8/upstream/v8/tools/grokdump.py +0 -840
  713. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +0 -844
  714. data/ext/v8/upstream/v8/tools/js2c.py +0 -380
  715. data/ext/v8/upstream/v8/tools/jsmin.py +0 -280
  716. data/ext/v8/upstream/v8/tools/linux-tick-processor +0 -35
  717. data/ext/v8/upstream/v8/tools/ll_prof.py +0 -919
  718. data/ext/v8/upstream/v8/tools/logreader.js +0 -185
  719. data/ext/v8/upstream/v8/tools/mac-nm +0 -18
  720. data/ext/v8/upstream/v8/tools/mac-tick-processor +0 -6
  721. data/ext/v8/upstream/v8/tools/oom_dump/README +0 -31
  722. data/ext/v8/upstream/v8/tools/oom_dump/SConstruct +0 -42
  723. data/ext/v8/upstream/v8/tools/oom_dump/oom_dump.cc +0 -288
  724. data/ext/v8/upstream/v8/tools/presubmit.py +0 -305
  725. data/ext/v8/upstream/v8/tools/process-heap-prof.py +0 -120
  726. data/ext/v8/upstream/v8/tools/profile.js +0 -751
  727. data/ext/v8/upstream/v8/tools/profile_view.js +0 -219
  728. data/ext/v8/upstream/v8/tools/run-valgrind.py +0 -77
  729. data/ext/v8/upstream/v8/tools/splaytree.js +0 -316
  730. data/ext/v8/upstream/v8/tools/stats-viewer.py +0 -468
  731. data/ext/v8/upstream/v8/tools/test.py +0 -1490
  732. data/ext/v8/upstream/v8/tools/tickprocessor-driver.js +0 -59
  733. data/ext/v8/upstream/v8/tools/tickprocessor.js +0 -877
  734. data/ext/v8/upstream/v8/tools/utils.py +0 -96
  735. data/ext/v8/upstream/v8/tools/visual_studio/README.txt +0 -70
  736. data/ext/v8/upstream/v8/tools/visual_studio/arm.vsprops +0 -14
  737. data/ext/v8/upstream/v8/tools/visual_studio/common.vsprops +0 -34
  738. data/ext/v8/upstream/v8/tools/visual_studio/d8.vcproj +0 -193
  739. data/ext/v8/upstream/v8/tools/visual_studio/d8_arm.vcproj +0 -193
  740. data/ext/v8/upstream/v8/tools/visual_studio/d8_x64.vcproj +0 -209
  741. data/ext/v8/upstream/v8/tools/visual_studio/d8js2c.cmd +0 -6
  742. data/ext/v8/upstream/v8/tools/visual_studio/debug.vsprops +0 -17
  743. data/ext/v8/upstream/v8/tools/visual_studio/ia32.vsprops +0 -17
  744. data/ext/v8/upstream/v8/tools/visual_studio/js2c.cmd +0 -6
  745. data/ext/v8/upstream/v8/tools/visual_studio/release.vsprops +0 -24
  746. data/ext/v8/upstream/v8/tools/visual_studio/v8.sln +0 -101
  747. data/ext/v8/upstream/v8/tools/visual_studio/v8.vcproj +0 -227
  748. data/ext/v8/upstream/v8/tools/visual_studio/v8_arm.sln +0 -74
  749. data/ext/v8/upstream/v8/tools/visual_studio/v8_arm.vcproj +0 -227
  750. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +0 -1308
  751. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +0 -1238
  752. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +0 -1300
  753. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest.vcproj +0 -265
  754. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest_arm.vcproj +0 -249
  755. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest_x64.vcproj +0 -257
  756. data/ext/v8/upstream/v8/tools/visual_studio/v8_mksnapshot.vcproj +0 -145
  757. data/ext/v8/upstream/v8/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -145
  758. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample.vcproj +0 -145
  759. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample_arm.vcproj +0 -145
  760. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample_x64.vcproj +0 -161
  761. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample.vcproj +0 -147
  762. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -147
  763. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -163
  764. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot.vcproj +0 -142
  765. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_cc.vcproj +0 -92
  766. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -92
  767. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_x64.vcproj +0 -142
  768. data/ext/v8/upstream/v8/tools/visual_studio/v8_x64.sln +0 -101
  769. data/ext/v8/upstream/v8/tools/visual_studio/v8_x64.vcproj +0 -227
  770. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +0 -18
  771. data/ext/v8/upstream/v8/tools/windows-tick-processor.bat +0 -30
@@ -1,164 +0,0 @@
1
- // Copyright 2006-2009 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #ifndef V8_LIST_H_
29
- #define V8_LIST_H_
30
-
31
- namespace v8 {
32
- namespace internal {
33
-
34
-
35
- // ----------------------------------------------------------------------------
36
- // The list is a template for very light-weight lists. We are not
37
- // using the STL because we want full control over space and speed of
38
- // the code. This implementation is based on code by Robert Griesemer
39
- // and Rob Pike.
40
- //
41
- // The list is parameterized by the type of its elements (T) and by an
42
- // allocation policy (P). The policy is used for allocating lists in
43
- // the C free store or the zone; see zone.h.
44
-
45
- // Forward defined as
46
- // template <typename T, class P = FreeStoreAllocationPolicy> class List;
47
- template <typename T, class P>
48
- class List {
49
- public:
50
-
51
- List() { Initialize(0); }
52
- INLINE(explicit List(int capacity)) { Initialize(capacity); }
53
- INLINE(~List()) { DeleteData(data_); }
54
-
55
- // Deallocates memory used by the list and leaves the list in a consistent
56
- // empty state.
57
- void Free() {
58
- DeleteData(data_);
59
- Initialize(0);
60
- }
61
-
62
- INLINE(void* operator new(size_t size)) {
63
- return P::New(static_cast<int>(size));
64
- }
65
- INLINE(void operator delete(void* p, size_t)) { return P::Delete(p); }
66
-
67
- // Returns a reference to the element at index i. This reference is
68
- // not safe to use after operations that can change the list's
69
- // backing store (eg, Add).
70
- inline T& operator[](int i) const {
71
- ASSERT(0 <= i);
72
- ASSERT(i < length_);
73
- return data_[i];
74
- }
75
- inline T& at(int i) const { return operator[](i); }
76
- inline T& last() const { return at(length_ - 1); }
77
- inline T& first() const { return at(0); }
78
-
79
- INLINE(bool is_empty() const) { return length_ == 0; }
80
- INLINE(int length() const) { return length_; }
81
- INLINE(int capacity() const) { return capacity_; }
82
-
83
- Vector<T> ToVector() { return Vector<T>(data_, length_); }
84
-
85
- Vector<const T> ToConstVector() { return Vector<const T>(data_, length_); }
86
-
87
- // Adds a copy of the given 'element' to the end of the list,
88
- // expanding the list if necessary.
89
- void Add(const T& element);
90
-
91
- // Add all the elements from the argument list to this list.
92
- void AddAll(const List<T, P>& other);
93
-
94
- // Inserts the element at the specific index.
95
- void InsertAt(int index, const T& element);
96
-
97
- // Added 'count' elements with the value 'value' and returns a
98
- // vector that allows access to the elements. The vector is valid
99
- // until the next change is made to this list.
100
- Vector<T> AddBlock(T value, int count);
101
-
102
- // Removes the i'th element without deleting it even if T is a
103
- // pointer type; moves all elements above i "down". Returns the
104
- // removed element. This function's complexity is linear in the
105
- // size of the list.
106
- T Remove(int i);
107
-
108
- // Remove the given element from the list. Returns whether or not
109
- // the input is included in the list in the first place.
110
- bool RemoveElement(const T& elm);
111
-
112
- // Removes the last element without deleting it even if T is a
113
- // pointer type. Returns the removed element.
114
- INLINE(T RemoveLast()) { return Remove(length_ - 1); }
115
-
116
- // Clears the list by setting the length to zero. Even if T is a
117
- // pointer type, clearing the list doesn't delete the entries.
118
- INLINE(void Clear());
119
-
120
- // Drops all but the first 'pos' elements from the list.
121
- INLINE(void Rewind(int pos));
122
-
123
- // Drop the last 'count' elements from the list.
124
- INLINE(void RewindBy(int count)) { Rewind(length_ - count); }
125
-
126
- bool Contains(const T& elm) const;
127
- int CountOccurrences(const T& elm, int start, int end) const;
128
-
129
- // Iterate through all list entries, starting at index 0.
130
- void Iterate(void (*callback)(T* x));
131
- template<class Visitor>
132
- void Iterate(Visitor* visitor);
133
-
134
- // Sort all list entries (using QuickSort)
135
- void Sort(int (*cmp)(const T* x, const T* y));
136
- void Sort();
137
-
138
- INLINE(void Initialize(int capacity));
139
-
140
- private:
141
- T* data_;
142
- int capacity_;
143
- int length_;
144
-
145
- INLINE(T* NewData(int n)) { return static_cast<T*>(P::New(n * sizeof(T))); }
146
- INLINE(void DeleteData(T* data)) { P::Delete(data); }
147
-
148
- // Increase the capacity of a full list, and add an element.
149
- // List must be full already.
150
- void ResizeAdd(const T& element);
151
-
152
- // Inlined implementation of ResizeAdd, shared by inlined and
153
- // non-inlined versions of ResizeAdd.
154
- void ResizeAddInternal(const T& element);
155
-
156
- // Resize the list.
157
- void Resize(int new_capacity);
158
-
159
- DISALLOW_COPY_AND_ASSIGN(List);
160
- };
161
-
162
- } } // namespace v8::internal
163
-
164
- #endif // V8_LIST_H_
@@ -1,142 +0,0 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #ifndef V8_LITHIUM_ALLOCATOR_INL_H_
29
- #define V8_LITHIUM_ALLOCATOR_INL_H_
30
-
31
- #include "lithium-allocator.h"
32
-
33
- #if V8_TARGET_ARCH_IA32
34
- #include "ia32/lithium-ia32.h"
35
- #elif V8_TARGET_ARCH_X64
36
- #include "x64/lithium-x64.h"
37
- #elif V8_TARGET_ARCH_ARM
38
- #include "arm/lithium-arm.h"
39
- #elif V8_TARGET_ARCH_MIPS
40
- #include "mips/lithium-mips.h"
41
- #else
42
- #error "Unknown architecture."
43
- #endif
44
-
45
- namespace v8 {
46
- namespace internal {
47
-
48
- bool LAllocator::IsGapAt(int index) { return chunk_->IsGapAt(index); }
49
-
50
-
51
- LInstruction* LAllocator::InstructionAt(int index) {
52
- return chunk_->instructions()->at(index);
53
- }
54
-
55
-
56
- LGap* LAllocator::GapAt(int index) {
57
- return chunk_->GetGapAt(index);
58
- }
59
-
60
-
61
- TempIterator::TempIterator(LInstruction* instr)
62
- : instr_(instr),
63
- limit_(instr->TempCount()),
64
- current_(0) {
65
- current_ = AdvanceToNext(0);
66
- }
67
-
68
-
69
- bool TempIterator::HasNext() { return current_ < limit_; }
70
-
71
-
72
- LOperand* TempIterator::Next() {
73
- ASSERT(HasNext());
74
- return instr_->TempAt(current_);
75
- }
76
-
77
-
78
- int TempIterator::AdvanceToNext(int start) {
79
- while (start < limit_ && instr_->TempAt(start) == NULL) start++;
80
- return start;
81
- }
82
-
83
-
84
- void TempIterator::Advance() {
85
- current_ = AdvanceToNext(current_ + 1);
86
- }
87
-
88
-
89
- InputIterator::InputIterator(LInstruction* instr)
90
- : instr_(instr),
91
- limit_(instr->InputCount()),
92
- current_(0) {
93
- current_ = AdvanceToNext(0);
94
- }
95
-
96
-
97
- bool InputIterator::HasNext() { return current_ < limit_; }
98
-
99
-
100
- LOperand* InputIterator::Next() {
101
- ASSERT(HasNext());
102
- return instr_->InputAt(current_);
103
- }
104
-
105
-
106
- void InputIterator::Advance() {
107
- current_ = AdvanceToNext(current_ + 1);
108
- }
109
-
110
-
111
- int InputIterator::AdvanceToNext(int start) {
112
- while (start < limit_ && instr_->InputAt(start)->IsConstantOperand()) start++;
113
- return start;
114
- }
115
-
116
-
117
- UseIterator::UseIterator(LInstruction* instr)
118
- : input_iterator_(instr), env_iterator_(instr->environment()) { }
119
-
120
-
121
- bool UseIterator::HasNext() {
122
- return input_iterator_.HasNext() || env_iterator_.HasNext();
123
- }
124
-
125
-
126
- LOperand* UseIterator::Next() {
127
- ASSERT(HasNext());
128
- return input_iterator_.HasNext()
129
- ? input_iterator_.Next()
130
- : env_iterator_.Next();
131
- }
132
-
133
-
134
- void UseIterator::Advance() {
135
- input_iterator_.HasNext()
136
- ? input_iterator_.Advance()
137
- : env_iterator_.Advance();
138
- }
139
-
140
- } } // namespace v8::internal
141
-
142
- #endif // V8_LITHIUM_ALLOCATOR_INL_H_
@@ -1,2105 +0,0 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
- #include "lithium-allocator-inl.h"
30
-
31
- #include "hydrogen.h"
32
- #include "string-stream.h"
33
-
34
- #if V8_TARGET_ARCH_IA32
35
- #include "ia32/lithium-ia32.h"
36
- #elif V8_TARGET_ARCH_X64
37
- #include "x64/lithium-x64.h"
38
- #elif V8_TARGET_ARCH_ARM
39
- #include "arm/lithium-arm.h"
40
- #elif V8_TARGET_ARCH_MIPS
41
- #include "mips/lithium-mips.h"
42
- #else
43
- #error "Unknown architecture."
44
- #endif
45
-
46
- namespace v8 {
47
- namespace internal {
48
-
49
-
50
- #define DEFINE_OPERAND_CACHE(name, type) \
51
- name name::cache[name::kNumCachedOperands]; \
52
- void name::SetupCache() { \
53
- for (int i = 0; i < kNumCachedOperands; i++) { \
54
- cache[i].ConvertTo(type, i); \
55
- } \
56
- } \
57
- static bool name##_initialize() { \
58
- name::SetupCache(); \
59
- return true; \
60
- } \
61
- static bool name##_cache_initialized = name##_initialize();
62
-
63
- DEFINE_OPERAND_CACHE(LConstantOperand, CONSTANT_OPERAND)
64
- DEFINE_OPERAND_CACHE(LStackSlot, STACK_SLOT)
65
- DEFINE_OPERAND_CACHE(LDoubleStackSlot, DOUBLE_STACK_SLOT)
66
- DEFINE_OPERAND_CACHE(LRegister, REGISTER)
67
- DEFINE_OPERAND_CACHE(LDoubleRegister, DOUBLE_REGISTER)
68
-
69
- #undef DEFINE_OPERAND_CACHE
70
-
71
-
72
- static inline LifetimePosition Min(LifetimePosition a, LifetimePosition b) {
73
- return a.Value() < b.Value() ? a : b;
74
- }
75
-
76
-
77
- static inline LifetimePosition Max(LifetimePosition a, LifetimePosition b) {
78
- return a.Value() > b.Value() ? a : b;
79
- }
80
-
81
-
82
- UsePosition::UsePosition(LifetimePosition pos, LOperand* operand)
83
- : operand_(operand),
84
- hint_(NULL),
85
- pos_(pos),
86
- next_(NULL),
87
- requires_reg_(false),
88
- register_beneficial_(true) {
89
- if (operand_ != NULL && operand_->IsUnallocated()) {
90
- LUnallocated* unalloc = LUnallocated::cast(operand_);
91
- requires_reg_ = unalloc->HasRegisterPolicy();
92
- register_beneficial_ = !unalloc->HasAnyPolicy();
93
- }
94
- ASSERT(pos_.IsValid());
95
- }
96
-
97
-
98
- bool UsePosition::HasHint() const {
99
- return hint_ != NULL && !hint_->IsUnallocated();
100
- }
101
-
102
-
103
- bool UsePosition::RequiresRegister() const {
104
- return requires_reg_;
105
- }
106
-
107
-
108
- bool UsePosition::RegisterIsBeneficial() const {
109
- return register_beneficial_;
110
- }
111
-
112
-
113
- void UseInterval::SplitAt(LifetimePosition pos) {
114
- ASSERT(Contains(pos) && pos.Value() != start().Value());
115
- UseInterval* after = new UseInterval(pos, end_);
116
- after->next_ = next_;
117
- next_ = after;
118
- end_ = pos;
119
- }
120
-
121
-
122
- #ifdef DEBUG
123
-
124
-
125
- void LiveRange::Verify() const {
126
- UsePosition* cur = first_pos_;
127
- while (cur != NULL) {
128
- ASSERT(Start().Value() <= cur->pos().Value() &&
129
- cur->pos().Value() <= End().Value());
130
- cur = cur->next();
131
- }
132
- }
133
-
134
-
135
- bool LiveRange::HasOverlap(UseInterval* target) const {
136
- UseInterval* current_interval = first_interval_;
137
- while (current_interval != NULL) {
138
- // Intervals overlap if the start of one is contained in the other.
139
- if (current_interval->Contains(target->start()) ||
140
- target->Contains(current_interval->start())) {
141
- return true;
142
- }
143
- current_interval = current_interval->next();
144
- }
145
- return false;
146
- }
147
-
148
-
149
- #endif
150
-
151
-
152
- LiveRange::LiveRange(int id)
153
- : id_(id),
154
- spilled_(false),
155
- assigned_register_(kInvalidAssignment),
156
- assigned_register_kind_(NONE),
157
- last_interval_(NULL),
158
- first_interval_(NULL),
159
- first_pos_(NULL),
160
- parent_(NULL),
161
- next_(NULL),
162
- current_interval_(NULL),
163
- last_processed_use_(NULL),
164
- spill_start_index_(kMaxInt) {
165
- spill_operand_ = new LUnallocated(LUnallocated::IGNORE);
166
- }
167
-
168
-
169
- void LiveRange::set_assigned_register(int reg, RegisterKind register_kind) {
170
- ASSERT(!HasRegisterAssigned() && !IsSpilled());
171
- assigned_register_ = reg;
172
- assigned_register_kind_ = register_kind;
173
- ConvertOperands();
174
- }
175
-
176
-
177
- void LiveRange::MakeSpilled() {
178
- ASSERT(!IsSpilled());
179
- ASSERT(TopLevel()->HasAllocatedSpillOperand());
180
- spilled_ = true;
181
- assigned_register_ = kInvalidAssignment;
182
- ConvertOperands();
183
- }
184
-
185
-
186
- bool LiveRange::HasAllocatedSpillOperand() const {
187
- return spill_operand_ != NULL && !spill_operand_->IsUnallocated();
188
- }
189
-
190
-
191
- void LiveRange::SetSpillOperand(LOperand* operand) {
192
- ASSERT(!operand->IsUnallocated());
193
- ASSERT(spill_operand_ != NULL);
194
- ASSERT(spill_operand_->IsUnallocated());
195
- spill_operand_->ConvertTo(operand->kind(), operand->index());
196
- }
197
-
198
-
199
- UsePosition* LiveRange::NextUsePosition(LifetimePosition start) {
200
- UsePosition* use_pos = last_processed_use_;
201
- if (use_pos == NULL) use_pos = first_pos();
202
- while (use_pos != NULL && use_pos->pos().Value() < start.Value()) {
203
- use_pos = use_pos->next();
204
- }
205
- last_processed_use_ = use_pos;
206
- return use_pos;
207
- }
208
-
209
-
210
- UsePosition* LiveRange::NextUsePositionRegisterIsBeneficial(
211
- LifetimePosition start) {
212
- UsePosition* pos = NextUsePosition(start);
213
- while (pos != NULL && !pos->RegisterIsBeneficial()) {
214
- pos = pos->next();
215
- }
216
- return pos;
217
- }
218
-
219
-
220
- UsePosition* LiveRange::NextRegisterPosition(LifetimePosition start) {
221
- UsePosition* pos = NextUsePosition(start);
222
- while (pos != NULL && !pos->RequiresRegister()) {
223
- pos = pos->next();
224
- }
225
- return pos;
226
- }
227
-
228
-
229
- bool LiveRange::CanBeSpilled(LifetimePosition pos) {
230
- // TODO(kmillikin): Comment. Now.
231
- if (pos.Value() <= Start().Value() && HasRegisterAssigned()) return false;
232
-
233
- // We cannot spill a live range that has a use requiring a register
234
- // at the current or the immediate next position.
235
- UsePosition* use_pos = NextRegisterPosition(pos);
236
- if (use_pos == NULL) return true;
237
- return use_pos->pos().Value() > pos.NextInstruction().Value();
238
- }
239
-
240
-
241
- UsePosition* LiveRange::FirstPosWithHint() const {
242
- UsePosition* pos = first_pos_;
243
- while (pos != NULL && !pos->HasHint()) pos = pos->next();
244
- return pos;
245
- }
246
-
247
-
248
- LOperand* LiveRange::CreateAssignedOperand() {
249
- LOperand* op = NULL;
250
- if (HasRegisterAssigned()) {
251
- ASSERT(!IsSpilled());
252
- if (IsDouble()) {
253
- op = LDoubleRegister::Create(assigned_register());
254
- } else {
255
- op = LRegister::Create(assigned_register());
256
- }
257
- } else if (IsSpilled()) {
258
- ASSERT(!HasRegisterAssigned());
259
- op = TopLevel()->GetSpillOperand();
260
- ASSERT(!op->IsUnallocated());
261
- } else {
262
- LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
263
- unalloc->set_virtual_register(id_);
264
- op = unalloc;
265
- }
266
- return op;
267
- }
268
-
269
-
270
- UseInterval* LiveRange::FirstSearchIntervalForPosition(
271
- LifetimePosition position) const {
272
- if (current_interval_ == NULL) return first_interval_;
273
- if (current_interval_->start().Value() > position.Value()) {
274
- current_interval_ = NULL;
275
- return first_interval_;
276
- }
277
- return current_interval_;
278
- }
279
-
280
-
281
- void LiveRange::AdvanceLastProcessedMarker(
282
- UseInterval* to_start_of, LifetimePosition but_not_past) const {
283
- if (to_start_of == NULL) return;
284
- if (to_start_of->start().Value() > but_not_past.Value()) return;
285
- LifetimePosition start =
286
- current_interval_ == NULL ? LifetimePosition::Invalid()
287
- : current_interval_->start();
288
- if (to_start_of->start().Value() > start.Value()) {
289
- current_interval_ = to_start_of;
290
- }
291
- }
292
-
293
-
294
- void LiveRange::SplitAt(LifetimePosition position, LiveRange* result) {
295
- ASSERT(Start().Value() < position.Value());
296
- ASSERT(result->IsEmpty());
297
- // Find the last interval that ends before the position. If the
298
- // position is contained in one of the intervals in the chain, we
299
- // split that interval and use the first part.
300
- UseInterval* current = FirstSearchIntervalForPosition(position);
301
-
302
- // If the split position coincides with the beginning of a use interval
303
- // we need to split use positons in a special way.
304
- bool split_at_start = false;
305
-
306
- while (current != NULL) {
307
- if (current->Contains(position)) {
308
- current->SplitAt(position);
309
- break;
310
- }
311
- UseInterval* next = current->next();
312
- if (next->start().Value() >= position.Value()) {
313
- split_at_start = (next->start().Value() == position.Value());
314
- break;
315
- }
316
- current = next;
317
- }
318
-
319
- // Partition original use intervals to the two live ranges.
320
- UseInterval* before = current;
321
- UseInterval* after = before->next();
322
- result->last_interval_ = (last_interval_ == before)
323
- ? after // Only interval in the range after split.
324
- : last_interval_; // Last interval of the original range.
325
- result->first_interval_ = after;
326
- last_interval_ = before;
327
-
328
- // Find the last use position before the split and the first use
329
- // position after it.
330
- UsePosition* use_after = first_pos_;
331
- UsePosition* use_before = NULL;
332
- if (split_at_start) {
333
- // The split position coincides with the beginning of a use interval (the
334
- // end of a lifetime hole). Use at this position should be attributed to
335
- // the split child because split child owns use interval covering it.
336
- while (use_after != NULL && use_after->pos().Value() < position.Value()) {
337
- use_before = use_after;
338
- use_after = use_after->next();
339
- }
340
- } else {
341
- while (use_after != NULL && use_after->pos().Value() <= position.Value()) {
342
- use_before = use_after;
343
- use_after = use_after->next();
344
- }
345
- }
346
-
347
- // Partition original use positions to the two live ranges.
348
- if (use_before != NULL) {
349
- use_before->next_ = NULL;
350
- } else {
351
- first_pos_ = NULL;
352
- }
353
- result->first_pos_ = use_after;
354
-
355
- // Link the new live range in the chain before any of the other
356
- // ranges linked from the range before the split.
357
- result->parent_ = (parent_ == NULL) ? this : parent_;
358
- result->next_ = next_;
359
- next_ = result;
360
-
361
- #ifdef DEBUG
362
- Verify();
363
- result->Verify();
364
- #endif
365
- }
366
-
367
-
368
- // This implements an ordering on live ranges so that they are ordered by their
369
- // start positions. This is needed for the correctness of the register
370
- // allocation algorithm. If two live ranges start at the same offset then there
371
- // is a tie breaker based on where the value is first used. This part of the
372
- // ordering is merely a heuristic.
373
- bool LiveRange::ShouldBeAllocatedBefore(const LiveRange* other) const {
374
- LifetimePosition start = Start();
375
- LifetimePosition other_start = other->Start();
376
- if (start.Value() == other_start.Value()) {
377
- UsePosition* pos = FirstPosWithHint();
378
- if (pos == NULL) return false;
379
- UsePosition* other_pos = other->first_pos();
380
- if (other_pos == NULL) return true;
381
- return pos->pos().Value() < other_pos->pos().Value();
382
- }
383
- return start.Value() < other_start.Value();
384
- }
385
-
386
-
387
- void LiveRange::ShortenTo(LifetimePosition start) {
388
- LAllocator::TraceAlloc("Shorten live range %d to [%d\n", id_, start.Value());
389
- ASSERT(first_interval_ != NULL);
390
- ASSERT(first_interval_->start().Value() <= start.Value());
391
- ASSERT(start.Value() < first_interval_->end().Value());
392
- first_interval_->set_start(start);
393
- }
394
-
395
-
396
- void LiveRange::EnsureInterval(LifetimePosition start, LifetimePosition end) {
397
- LAllocator::TraceAlloc("Ensure live range %d in interval [%d %d[\n",
398
- id_,
399
- start.Value(),
400
- end.Value());
401
- LifetimePosition new_end = end;
402
- while (first_interval_ != NULL &&
403
- first_interval_->start().Value() <= end.Value()) {
404
- if (first_interval_->end().Value() > end.Value()) {
405
- new_end = first_interval_->end();
406
- }
407
- first_interval_ = first_interval_->next();
408
- }
409
-
410
- UseInterval* new_interval = new UseInterval(start, new_end);
411
- new_interval->next_ = first_interval_;
412
- first_interval_ = new_interval;
413
- if (new_interval->next() == NULL) {
414
- last_interval_ = new_interval;
415
- }
416
- }
417
-
418
-
419
- void LiveRange::AddUseInterval(LifetimePosition start, LifetimePosition end) {
420
- LAllocator::TraceAlloc("Add to live range %d interval [%d %d[\n",
421
- id_,
422
- start.Value(),
423
- end.Value());
424
- if (first_interval_ == NULL) {
425
- UseInterval* interval = new UseInterval(start, end);
426
- first_interval_ = interval;
427
- last_interval_ = interval;
428
- } else {
429
- if (end.Value() == first_interval_->start().Value()) {
430
- first_interval_->set_start(start);
431
- } else if (end.Value() < first_interval_->start().Value()) {
432
- UseInterval* interval = new UseInterval(start, end);
433
- interval->set_next(first_interval_);
434
- first_interval_ = interval;
435
- } else {
436
- // Order of instruction's processing (see ProcessInstructions) guarantees
437
- // that each new use interval either precedes or intersects with
438
- // last added interval.
439
- ASSERT(start.Value() < first_interval_->end().Value());
440
- first_interval_->start_ = Min(start, first_interval_->start_);
441
- first_interval_->end_ = Max(end, first_interval_->end_);
442
- }
443
- }
444
- }
445
-
446
-
447
- UsePosition* LiveRange::AddUsePosition(LifetimePosition pos,
448
- LOperand* operand) {
449
- LAllocator::TraceAlloc("Add to live range %d use position %d\n",
450
- id_,
451
- pos.Value());
452
- UsePosition* use_pos = new UsePosition(pos, operand);
453
- UsePosition* prev = NULL;
454
- UsePosition* current = first_pos_;
455
- while (current != NULL && current->pos().Value() < pos.Value()) {
456
- prev = current;
457
- current = current->next();
458
- }
459
-
460
- if (prev == NULL) {
461
- use_pos->set_next(first_pos_);
462
- first_pos_ = use_pos;
463
- } else {
464
- use_pos->next_ = prev->next_;
465
- prev->next_ = use_pos;
466
- }
467
-
468
- return use_pos;
469
- }
470
-
471
-
472
- void LiveRange::ConvertOperands() {
473
- LOperand* op = CreateAssignedOperand();
474
- UsePosition* use_pos = first_pos();
475
- while (use_pos != NULL) {
476
- ASSERT(Start().Value() <= use_pos->pos().Value() &&
477
- use_pos->pos().Value() <= End().Value());
478
-
479
- if (use_pos->HasOperand()) {
480
- ASSERT(op->IsRegister() || op->IsDoubleRegister() ||
481
- !use_pos->RequiresRegister());
482
- use_pos->operand()->ConvertTo(op->kind(), op->index());
483
- }
484
- use_pos = use_pos->next();
485
- }
486
- }
487
-
488
-
489
- bool LiveRange::CanCover(LifetimePosition position) const {
490
- if (IsEmpty()) return false;
491
- return Start().Value() <= position.Value() &&
492
- position.Value() < End().Value();
493
- }
494
-
495
-
496
- bool LiveRange::Covers(LifetimePosition position) {
497
- if (!CanCover(position)) return false;
498
- UseInterval* start_search = FirstSearchIntervalForPosition(position);
499
- for (UseInterval* interval = start_search;
500
- interval != NULL;
501
- interval = interval->next()) {
502
- ASSERT(interval->next() == NULL ||
503
- interval->next()->start().Value() >= interval->start().Value());
504
- AdvanceLastProcessedMarker(interval, position);
505
- if (interval->Contains(position)) return true;
506
- if (interval->start().Value() > position.Value()) return false;
507
- }
508
- return false;
509
- }
510
-
511
-
512
- LifetimePosition LiveRange::FirstIntersection(LiveRange* other) {
513
- UseInterval* b = other->first_interval();
514
- if (b == NULL) return LifetimePosition::Invalid();
515
- LifetimePosition advance_last_processed_up_to = b->start();
516
- UseInterval* a = FirstSearchIntervalForPosition(b->start());
517
- while (a != NULL && b != NULL) {
518
- if (a->start().Value() > other->End().Value()) break;
519
- if (b->start().Value() > End().Value()) break;
520
- LifetimePosition cur_intersection = a->Intersect(b);
521
- if (cur_intersection.IsValid()) {
522
- return cur_intersection;
523
- }
524
- if (a->start().Value() < b->start().Value()) {
525
- a = a->next();
526
- if (a == NULL || a->start().Value() > other->End().Value()) break;
527
- AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
528
- } else {
529
- b = b->next();
530
- }
531
- }
532
- return LifetimePosition::Invalid();
533
- }
534
-
535
-
536
- LAllocator::LAllocator(int num_values, HGraph* graph)
537
- : chunk_(NULL),
538
- live_in_sets_(graph->blocks()->length()),
539
- live_ranges_(num_values * 2),
540
- fixed_live_ranges_(NULL),
541
- fixed_double_live_ranges_(NULL),
542
- unhandled_live_ranges_(num_values * 2),
543
- active_live_ranges_(8),
544
- inactive_live_ranges_(8),
545
- reusable_slots_(8),
546
- next_virtual_register_(num_values),
547
- first_artificial_register_(num_values),
548
- mode_(NONE),
549
- num_registers_(-1),
550
- graph_(graph),
551
- has_osr_entry_(false) {}
552
-
553
-
554
- void LAllocator::InitializeLivenessAnalysis() {
555
- // Initialize the live_in sets for each block to NULL.
556
- int block_count = graph_->blocks()->length();
557
- live_in_sets_.Initialize(block_count);
558
- live_in_sets_.AddBlock(NULL, block_count);
559
- }
560
-
561
-
562
- BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
563
- // Compute live out for the given block, except not including backward
564
- // successor edges.
565
- BitVector* live_out = new BitVector(next_virtual_register_);
566
-
567
- // Process all successor blocks.
568
- HBasicBlock* successor = block->end()->FirstSuccessor();
569
- while (successor != NULL) {
570
- // Add values live on entry to the successor. Note the successor's
571
- // live_in will not be computed yet for backwards edges.
572
- BitVector* live_in = live_in_sets_[successor->block_id()];
573
- if (live_in != NULL) live_out->Union(*live_in);
574
-
575
- // All phi input operands corresponding to this successor edge are live
576
- // out from this block.
577
- int index = successor->PredecessorIndexOf(block);
578
- const ZoneList<HPhi*>* phis = successor->phis();
579
- for (int i = 0; i < phis->length(); ++i) {
580
- HPhi* phi = phis->at(i);
581
- if (!phi->OperandAt(index)->IsConstant()) {
582
- live_out->Add(phi->OperandAt(index)->id());
583
- }
584
- }
585
-
586
- // Check if we are done with second successor.
587
- if (successor == block->end()->SecondSuccessor()) break;
588
-
589
- successor = block->end()->SecondSuccessor();
590
- }
591
-
592
- return live_out;
593
- }
594
-
595
-
596
- void LAllocator::AddInitialIntervals(HBasicBlock* block,
597
- BitVector* live_out) {
598
- // Add an interval that includes the entire block to the live range for
599
- // each live_out value.
600
- LifetimePosition start = LifetimePosition::FromInstructionIndex(
601
- block->first_instruction_index());
602
- LifetimePosition end = LifetimePosition::FromInstructionIndex(
603
- block->last_instruction_index()).NextInstruction();
604
- BitVector::Iterator iterator(live_out);
605
- while (!iterator.Done()) {
606
- int operand_index = iterator.Current();
607
- LiveRange* range = LiveRangeFor(operand_index);
608
- range->AddUseInterval(start, end);
609
- iterator.Advance();
610
- }
611
- }
612
-
613
-
614
- int LAllocator::FixedDoubleLiveRangeID(int index) {
615
- return -index - 1 - Register::kNumAllocatableRegisters;
616
- }
617
-
618
-
619
- LOperand* LAllocator::AllocateFixed(LUnallocated* operand,
620
- int pos,
621
- bool is_tagged) {
622
- TraceAlloc("Allocating fixed reg for op %d\n", operand->virtual_register());
623
- ASSERT(operand->HasFixedPolicy());
624
- if (operand->policy() == LUnallocated::FIXED_SLOT) {
625
- operand->ConvertTo(LOperand::STACK_SLOT, operand->fixed_index());
626
- } else if (operand->policy() == LUnallocated::FIXED_REGISTER) {
627
- int reg_index = operand->fixed_index();
628
- operand->ConvertTo(LOperand::REGISTER, reg_index);
629
- } else if (operand->policy() == LUnallocated::FIXED_DOUBLE_REGISTER) {
630
- int reg_index = operand->fixed_index();
631
- operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
632
- } else {
633
- UNREACHABLE();
634
- }
635
- if (is_tagged) {
636
- TraceAlloc("Fixed reg is tagged at %d\n", pos);
637
- LInstruction* instr = InstructionAt(pos);
638
- if (instr->HasPointerMap()) {
639
- instr->pointer_map()->RecordPointer(operand);
640
- }
641
- }
642
- return operand;
643
- }
644
-
645
-
646
- LiveRange* LAllocator::FixedLiveRangeFor(int index) {
647
- ASSERT(index < Register::kNumAllocatableRegisters);
648
- LiveRange* result = fixed_live_ranges_[index];
649
- if (result == NULL) {
650
- result = new LiveRange(FixedLiveRangeID(index));
651
- ASSERT(result->IsFixed());
652
- result->set_assigned_register(index, GENERAL_REGISTERS);
653
- fixed_live_ranges_[index] = result;
654
- }
655
- return result;
656
- }
657
-
658
-
659
- LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
660
- ASSERT(index < DoubleRegister::kNumAllocatableRegisters);
661
- LiveRange* result = fixed_double_live_ranges_[index];
662
- if (result == NULL) {
663
- result = new LiveRange(FixedDoubleLiveRangeID(index));
664
- ASSERT(result->IsFixed());
665
- result->set_assigned_register(index, DOUBLE_REGISTERS);
666
- fixed_double_live_ranges_[index] = result;
667
- }
668
- return result;
669
- }
670
-
671
-
672
- LiveRange* LAllocator::LiveRangeFor(int index) {
673
- if (index >= live_ranges_.length()) {
674
- live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1);
675
- }
676
- LiveRange* result = live_ranges_[index];
677
- if (result == NULL) {
678
- result = new LiveRange(index);
679
- live_ranges_[index] = result;
680
- }
681
- return result;
682
- }
683
-
684
-
685
- LGap* LAllocator::GetLastGap(HBasicBlock* block) {
686
- int last_instruction = block->last_instruction_index();
687
- int index = chunk_->NearestGapPos(last_instruction);
688
- return GapAt(index);
689
- }
690
-
691
-
692
- HPhi* LAllocator::LookupPhi(LOperand* operand) const {
693
- if (!operand->IsUnallocated()) return NULL;
694
- int index = operand->VirtualRegister();
695
- HValue* instr = graph_->LookupValue(index);
696
- if (instr != NULL && instr->IsPhi()) {
697
- return HPhi::cast(instr);
698
- }
699
- return NULL;
700
- }
701
-
702
-
703
- LiveRange* LAllocator::LiveRangeFor(LOperand* operand) {
704
- if (operand->IsUnallocated()) {
705
- return LiveRangeFor(LUnallocated::cast(operand)->virtual_register());
706
- } else if (operand->IsRegister()) {
707
- return FixedLiveRangeFor(operand->index());
708
- } else if (operand->IsDoubleRegister()) {
709
- return FixedDoubleLiveRangeFor(operand->index());
710
- } else {
711
- return NULL;
712
- }
713
- }
714
-
715
-
716
- void LAllocator::Define(LifetimePosition position,
717
- LOperand* operand,
718
- LOperand* hint) {
719
- LiveRange* range = LiveRangeFor(operand);
720
- if (range == NULL) return;
721
-
722
- if (range->IsEmpty() || range->Start().Value() > position.Value()) {
723
- // Can happen if there is a definition without use.
724
- range->AddUseInterval(position, position.NextInstruction());
725
- range->AddUsePosition(position.NextInstruction(), NULL);
726
- } else {
727
- range->ShortenTo(position);
728
- }
729
-
730
- if (operand->IsUnallocated()) {
731
- LUnallocated* unalloc_operand = LUnallocated::cast(operand);
732
- range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
733
- }
734
- }
735
-
736
-
737
- void LAllocator::Use(LifetimePosition block_start,
738
- LifetimePosition position,
739
- LOperand* operand,
740
- LOperand* hint) {
741
- LiveRange* range = LiveRangeFor(operand);
742
- if (range == NULL) return;
743
- if (operand->IsUnallocated()) {
744
- LUnallocated* unalloc_operand = LUnallocated::cast(operand);
745
- range->AddUsePosition(position, unalloc_operand)->set_hint(hint);
746
- }
747
- range->AddUseInterval(block_start, position);
748
- }
749
-
750
-
751
- void LAllocator::AddConstraintsGapMove(int index,
752
- LOperand* from,
753
- LOperand* to) {
754
- LGap* gap = GapAt(index);
755
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
756
- if (from->IsUnallocated()) {
757
- const ZoneList<LMoveOperands>* move_operands = move->move_operands();
758
- for (int i = 0; i < move_operands->length(); ++i) {
759
- LMoveOperands cur = move_operands->at(i);
760
- LOperand* cur_to = cur.destination();
761
- if (cur_to->IsUnallocated()) {
762
- if (cur_to->VirtualRegister() == from->VirtualRegister()) {
763
- move->AddMove(cur.source(), to);
764
- return;
765
- }
766
- }
767
- }
768
- }
769
- move->AddMove(from, to);
770
- }
771
-
772
-
773
- void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
774
- int start = block->first_instruction_index();
775
- int end = block->last_instruction_index();
776
- for (int i = start; i <= end; ++i) {
777
- if (IsGapAt(i)) {
778
- LInstruction* instr = NULL;
779
- LInstruction* prev_instr = NULL;
780
- if (i < end) instr = InstructionAt(i + 1);
781
- if (i > start) prev_instr = InstructionAt(i - 1);
782
- MeetConstraintsBetween(prev_instr, instr, i);
783
- }
784
- }
785
- }
786
-
787
-
788
- void LAllocator::MeetConstraintsBetween(LInstruction* first,
789
- LInstruction* second,
790
- int gap_index) {
791
- // Handle fixed temporaries.
792
- if (first != NULL) {
793
- for (TempIterator it(first); it.HasNext(); it.Advance()) {
794
- LUnallocated* temp = LUnallocated::cast(it.Next());
795
- if (temp->HasFixedPolicy()) {
796
- AllocateFixed(temp, gap_index - 1, false);
797
- }
798
- }
799
- }
800
-
801
- // Handle fixed output operand.
802
- if (first != NULL && first->Output() != NULL) {
803
- LUnallocated* first_output = LUnallocated::cast(first->Output());
804
- LiveRange* range = LiveRangeFor(first_output->VirtualRegister());
805
- bool assigned = false;
806
- if (first_output->HasFixedPolicy()) {
807
- LUnallocated* output_copy = first_output->CopyUnconstrained();
808
- bool is_tagged = HasTaggedValue(first_output->VirtualRegister());
809
- AllocateFixed(first_output, gap_index, is_tagged);
810
-
811
- // This value is produced on the stack, we never need to spill it.
812
- if (first_output->IsStackSlot()) {
813
- range->SetSpillOperand(first_output);
814
- range->SetSpillStartIndex(gap_index - 1);
815
- assigned = true;
816
- }
817
- chunk_->AddGapMove(gap_index, first_output, output_copy);
818
- }
819
-
820
- if (!assigned) {
821
- range->SetSpillStartIndex(gap_index);
822
-
823
- // This move to spill operand is not a real use. Liveness analysis
824
- // and splitting of live ranges do not account for it.
825
- // Thus it should be inserted to a lifetime position corresponding to
826
- // the instruction end.
827
- LGap* gap = GapAt(gap_index);
828
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE);
829
- move->AddMove(first_output, range->GetSpillOperand());
830
- }
831
- }
832
-
833
- // Handle fixed input operands of second instruction.
834
- if (second != NULL) {
835
- for (UseIterator it(second); it.HasNext(); it.Advance()) {
836
- LUnallocated* cur_input = LUnallocated::cast(it.Next());
837
- if (cur_input->HasFixedPolicy()) {
838
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
839
- bool is_tagged = HasTaggedValue(cur_input->VirtualRegister());
840
- AllocateFixed(cur_input, gap_index + 1, is_tagged);
841
- AddConstraintsGapMove(gap_index, input_copy, cur_input);
842
- } else if (cur_input->policy() == LUnallocated::WRITABLE_REGISTER) {
843
- // The live range of writable input registers always goes until the end
844
- // of the instruction.
845
- ASSERT(!cur_input->IsUsedAtStart());
846
-
847
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
848
- cur_input->set_virtual_register(next_virtual_register_++);
849
-
850
- if (RequiredRegisterKind(input_copy->virtual_register()) ==
851
- DOUBLE_REGISTERS) {
852
- double_artificial_registers_.Add(
853
- cur_input->virtual_register() - first_artificial_register_);
854
- }
855
-
856
- AddConstraintsGapMove(gap_index, input_copy, cur_input);
857
- }
858
- }
859
- }
860
-
861
- // Handle "output same as input" for second instruction.
862
- if (second != NULL && second->Output() != NULL) {
863
- LUnallocated* second_output = LUnallocated::cast(second->Output());
864
- if (second_output->HasSameAsInputPolicy()) {
865
- LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
866
- int output_vreg = second_output->VirtualRegister();
867
- int input_vreg = cur_input->VirtualRegister();
868
-
869
- LUnallocated* input_copy = cur_input->CopyUnconstrained();
870
- cur_input->set_virtual_register(second_output->virtual_register());
871
- AddConstraintsGapMove(gap_index, input_copy, cur_input);
872
-
873
- if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
874
- int index = gap_index + 1;
875
- LInstruction* instr = InstructionAt(index);
876
- if (instr->HasPointerMap()) {
877
- instr->pointer_map()->RecordPointer(input_copy);
878
- }
879
- } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
880
- // The input is assumed to immediately have a tagged representation,
881
- // before the pointer map can be used. I.e. the pointer map at the
882
- // instruction will include the output operand (whose value at the
883
- // beginning of the instruction is equal to the input operand). If
884
- // this is not desired, then the pointer map at this instruction needs
885
- // to be adjusted manually.
886
- }
887
- }
888
- }
889
- }
890
-
891
-
892
- void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
893
- int block_start = block->first_instruction_index();
894
- int index = block->last_instruction_index();
895
-
896
- LifetimePosition block_start_position =
897
- LifetimePosition::FromInstructionIndex(block_start);
898
-
899
- while (index >= block_start) {
900
- LifetimePosition curr_position =
901
- LifetimePosition::FromInstructionIndex(index);
902
-
903
- if (IsGapAt(index)) {
904
- // We have a gap at this position.
905
- LGap* gap = GapAt(index);
906
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
907
- const ZoneList<LMoveOperands>* move_operands = move->move_operands();
908
- for (int i = 0; i < move_operands->length(); ++i) {
909
- LMoveOperands* cur = &move_operands->at(i);
910
- if (cur->IsIgnored()) continue;
911
- LOperand* from = cur->source();
912
- LOperand* to = cur->destination();
913
- HPhi* phi = LookupPhi(to);
914
- LOperand* hint = to;
915
- if (phi != NULL) {
916
- // This is a phi resolving move.
917
- if (!phi->block()->IsLoopHeader()) {
918
- hint = LiveRangeFor(phi->id())->FirstHint();
919
- }
920
- } else {
921
- if (to->IsUnallocated()) {
922
- if (live->Contains(to->VirtualRegister())) {
923
- Define(curr_position, to, from);
924
- live->Remove(to->VirtualRegister());
925
- } else {
926
- cur->Eliminate();
927
- continue;
928
- }
929
- } else {
930
- Define(curr_position, to, from);
931
- }
932
- }
933
- Use(block_start_position, curr_position, from, hint);
934
- if (from->IsUnallocated()) {
935
- live->Add(from->VirtualRegister());
936
- }
937
- }
938
- } else {
939
- ASSERT(!IsGapAt(index));
940
- LInstruction* instr = InstructionAt(index);
941
-
942
- if (instr != NULL) {
943
- LOperand* output = instr->Output();
944
- if (output != NULL) {
945
- if (output->IsUnallocated()) live->Remove(output->VirtualRegister());
946
- Define(curr_position, output, NULL);
947
- }
948
-
949
- if (instr->IsMarkedAsCall()) {
950
- for (int i = 0; i < Register::kNumAllocatableRegisters; ++i) {
951
- if (output == NULL || !output->IsRegister() ||
952
- output->index() != i) {
953
- LiveRange* range = FixedLiveRangeFor(i);
954
- range->AddUseInterval(curr_position,
955
- curr_position.InstructionEnd());
956
- }
957
- }
958
- }
959
-
960
- if (instr->IsMarkedAsCall() || instr->IsMarkedAsSaveDoubles()) {
961
- for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; ++i) {
962
- if (output == NULL || !output->IsDoubleRegister() ||
963
- output->index() != i) {
964
- LiveRange* range = FixedDoubleLiveRangeFor(i);
965
- range->AddUseInterval(curr_position,
966
- curr_position.InstructionEnd());
967
- }
968
- }
969
- }
970
-
971
- for (UseIterator it(instr); it.HasNext(); it.Advance()) {
972
- LOperand* input = it.Next();
973
-
974
- LifetimePosition use_pos;
975
- if (input->IsUnallocated() &&
976
- LUnallocated::cast(input)->IsUsedAtStart()) {
977
- use_pos = curr_position;
978
- } else {
979
- use_pos = curr_position.InstructionEnd();
980
- }
981
-
982
- Use(block_start_position, use_pos, input, NULL);
983
- if (input->IsUnallocated()) live->Add(input->VirtualRegister());
984
- }
985
-
986
- for (TempIterator it(instr); it.HasNext(); it.Advance()) {
987
- LOperand* temp = it.Next();
988
- if (instr->IsMarkedAsCall()) {
989
- if (temp->IsRegister()) continue;
990
- if (temp->IsUnallocated()) {
991
- LUnallocated* temp_unalloc = LUnallocated::cast(temp);
992
- if (temp_unalloc->HasFixedPolicy()) {
993
- continue;
994
- }
995
- }
996
- }
997
- Use(block_start_position, curr_position.InstructionEnd(), temp, NULL);
998
- Define(curr_position, temp, NULL);
999
- }
1000
- }
1001
- }
1002
-
1003
- index = index - 1;
1004
- }
1005
- }
1006
-
1007
-
1008
- void LAllocator::ResolvePhis(HBasicBlock* block) {
1009
- const ZoneList<HPhi*>* phis = block->phis();
1010
- for (int i = 0; i < phis->length(); ++i) {
1011
- HPhi* phi = phis->at(i);
1012
- LUnallocated* phi_operand = new LUnallocated(LUnallocated::NONE);
1013
- phi_operand->set_virtual_register(phi->id());
1014
- for (int j = 0; j < phi->OperandCount(); ++j) {
1015
- HValue* op = phi->OperandAt(j);
1016
- LOperand* operand = NULL;
1017
- if (op->IsConstant() && op->EmitAtUses()) {
1018
- HConstant* constant = HConstant::cast(op);
1019
- operand = chunk_->DefineConstantOperand(constant);
1020
- } else {
1021
- ASSERT(!op->EmitAtUses());
1022
- LUnallocated* unalloc = new LUnallocated(LUnallocated::NONE);
1023
- unalloc->set_virtual_register(op->id());
1024
- operand = unalloc;
1025
- }
1026
- HBasicBlock* cur_block = block->predecessors()->at(j);
1027
- // The gap move must be added without any special processing as in
1028
- // the AddConstraintsGapMove.
1029
- chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1030
- operand,
1031
- phi_operand);
1032
- }
1033
-
1034
- LiveRange* live_range = LiveRangeFor(phi->id());
1035
- LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1036
- label->GetOrCreateParallelMove(LGap::START)->
1037
- AddMove(phi_operand, live_range->GetSpillOperand());
1038
- live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1039
- }
1040
- }
1041
-
1042
-
1043
- void LAllocator::Allocate(LChunk* chunk) {
1044
- ASSERT(chunk_ == NULL);
1045
- chunk_ = chunk;
1046
- MeetRegisterConstraints();
1047
- ResolvePhis();
1048
- BuildLiveRanges();
1049
- AllocateGeneralRegisters();
1050
- AllocateDoubleRegisters();
1051
- PopulatePointerMaps();
1052
- if (has_osr_entry_) ProcessOsrEntry();
1053
- ConnectRanges();
1054
- ResolveControlFlow();
1055
- }
1056
-
1057
-
1058
- void LAllocator::MeetRegisterConstraints() {
1059
- HPhase phase("Register constraints", chunk_);
1060
- first_artificial_register_ = next_virtual_register_;
1061
- const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1062
- for (int i = 0; i < blocks->length(); ++i) {
1063
- HBasicBlock* block = blocks->at(i);
1064
- MeetRegisterConstraints(block);
1065
- }
1066
- }
1067
-
1068
-
1069
- void LAllocator::ResolvePhis() {
1070
- HPhase phase("Resolve phis", chunk_);
1071
-
1072
- // Process the blocks in reverse order.
1073
- const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1074
- for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1075
- HBasicBlock* block = blocks->at(block_id);
1076
- ResolvePhis(block);
1077
- }
1078
- }
1079
-
1080
-
1081
- void LAllocator::ResolveControlFlow(LiveRange* range,
1082
- HBasicBlock* block,
1083
- HBasicBlock* pred) {
1084
- LifetimePosition pred_end =
1085
- LifetimePosition::FromInstructionIndex(pred->last_instruction_index());
1086
- LifetimePosition cur_start =
1087
- LifetimePosition::FromInstructionIndex(block->first_instruction_index());
1088
- LiveRange* pred_cover = NULL;
1089
- LiveRange* cur_cover = NULL;
1090
- LiveRange* cur_range = range;
1091
- while (cur_range != NULL && (cur_cover == NULL || pred_cover == NULL)) {
1092
- if (cur_range->CanCover(cur_start)) {
1093
- ASSERT(cur_cover == NULL);
1094
- cur_cover = cur_range;
1095
- }
1096
- if (cur_range->CanCover(pred_end)) {
1097
- ASSERT(pred_cover == NULL);
1098
- pred_cover = cur_range;
1099
- }
1100
- cur_range = cur_range->next();
1101
- }
1102
-
1103
- if (cur_cover->IsSpilled()) return;
1104
- ASSERT(pred_cover != NULL && cur_cover != NULL);
1105
- if (pred_cover != cur_cover) {
1106
- LOperand* pred_op = pred_cover->CreateAssignedOperand();
1107
- LOperand* cur_op = cur_cover->CreateAssignedOperand();
1108
- if (!pred_op->Equals(cur_op)) {
1109
- LGap* gap = NULL;
1110
- if (block->predecessors()->length() == 1) {
1111
- gap = GapAt(block->first_instruction_index());
1112
- } else {
1113
- ASSERT(pred->end()->SecondSuccessor() == NULL);
1114
- gap = GetLastGap(pred);
1115
-
1116
- // We are going to insert a move before the branch instruction.
1117
- // Some branch instructions (e.g. loops' back edges)
1118
- // can potentially cause a GC so they have a pointer map.
1119
- // By insterting a move we essentially create a copy of a
1120
- // value which is invisible to PopulatePointerMaps(), because we store
1121
- // it into a location different from the operand of a live range
1122
- // covering a branch instruction.
1123
- // Thus we need to manually record a pointer.
1124
- if (HasTaggedValue(range->id())) {
1125
- LInstruction* branch = InstructionAt(pred->last_instruction_index());
1126
- if (branch->HasPointerMap()) {
1127
- branch->pointer_map()->RecordPointer(cur_op);
1128
- }
1129
- }
1130
- }
1131
- gap->GetOrCreateParallelMove(LGap::START)->AddMove(pred_op, cur_op);
1132
- }
1133
- }
1134
- }
1135
-
1136
-
1137
- LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1138
- int index = pos.InstructionIndex();
1139
- if (IsGapAt(index)) {
1140
- LGap* gap = GapAt(index);
1141
- return gap->GetOrCreateParallelMove(
1142
- pos.IsInstructionStart() ? LGap::START : LGap::END);
1143
- }
1144
- int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1145
- return GapAt(gap_pos)->GetOrCreateParallelMove(
1146
- (gap_pos < index) ? LGap::AFTER : LGap::BEFORE);
1147
- }
1148
-
1149
-
1150
- HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1151
- LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1152
- return gap->block();
1153
- }
1154
-
1155
-
1156
- void LAllocator::ConnectRanges() {
1157
- HPhase phase("Connect ranges", this);
1158
- for (int i = 0; i < live_ranges()->length(); ++i) {
1159
- LiveRange* first_range = live_ranges()->at(i);
1160
- if (first_range == NULL || first_range->parent() != NULL) continue;
1161
-
1162
- LiveRange* second_range = first_range->next();
1163
- while (second_range != NULL) {
1164
- LifetimePosition pos = second_range->Start();
1165
-
1166
- if (!second_range->IsSpilled()) {
1167
- // Add gap move if the two live ranges touch and there is no block
1168
- // boundary.
1169
- if (first_range->End().Value() == pos.Value()) {
1170
- bool should_insert = true;
1171
- if (IsBlockBoundary(pos)) {
1172
- should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1173
- }
1174
- if (should_insert) {
1175
- LParallelMove* move = GetConnectingParallelMove(pos);
1176
- LOperand* prev_operand = first_range->CreateAssignedOperand();
1177
- LOperand* cur_operand = second_range->CreateAssignedOperand();
1178
- move->AddMove(prev_operand, cur_operand);
1179
- }
1180
- }
1181
- }
1182
-
1183
- first_range = second_range;
1184
- second_range = second_range->next();
1185
- }
1186
- }
1187
- }
1188
-
1189
-
1190
- bool LAllocator::CanEagerlyResolveControlFlow(HBasicBlock* block) const {
1191
- if (block->predecessors()->length() != 1) return false;
1192
- return block->predecessors()->first()->block_id() == block->block_id() - 1;
1193
- }
1194
-
1195
-
1196
- void LAllocator::ResolveControlFlow() {
1197
- HPhase phase("Resolve control flow", this);
1198
- const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1199
- for (int block_id = 1; block_id < blocks->length(); ++block_id) {
1200
- HBasicBlock* block = blocks->at(block_id);
1201
- if (CanEagerlyResolveControlFlow(block)) continue;
1202
- BitVector* live = live_in_sets_[block->block_id()];
1203
- BitVector::Iterator iterator(live);
1204
- while (!iterator.Done()) {
1205
- int operand_index = iterator.Current();
1206
- for (int i = 0; i < block->predecessors()->length(); ++i) {
1207
- HBasicBlock* cur = block->predecessors()->at(i);
1208
- LiveRange* cur_range = LiveRangeFor(operand_index);
1209
- ResolveControlFlow(cur_range, block, cur);
1210
- }
1211
- iterator.Advance();
1212
- }
1213
- }
1214
- }
1215
-
1216
-
1217
- void LAllocator::BuildLiveRanges() {
1218
- HPhase phase("Build live ranges", this);
1219
- InitializeLivenessAnalysis();
1220
- // Process the blocks in reverse order.
1221
- const ZoneList<HBasicBlock*>* blocks = graph_->blocks();
1222
- for (int block_id = blocks->length() - 1; block_id >= 0; --block_id) {
1223
- HBasicBlock* block = blocks->at(block_id);
1224
- BitVector* live = ComputeLiveOut(block);
1225
- // Initially consider all live_out values live for the entire block. We
1226
- // will shorten these intervals if necessary.
1227
- AddInitialIntervals(block, live);
1228
-
1229
- // Process the instructions in reverse order, generating and killing
1230
- // live values.
1231
- ProcessInstructions(block, live);
1232
- // All phi output operands are killed by this block.
1233
- const ZoneList<HPhi*>* phis = block->phis();
1234
- for (int i = 0; i < phis->length(); ++i) {
1235
- // The live range interval already ends at the first instruction of the
1236
- // block.
1237
- HPhi* phi = phis->at(i);
1238
- live->Remove(phi->id());
1239
-
1240
- LOperand* hint = NULL;
1241
- LOperand* phi_operand = NULL;
1242
- LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1243
- LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START);
1244
- for (int j = 0; j < move->move_operands()->length(); ++j) {
1245
- LOperand* to = move->move_operands()->at(j).destination();
1246
- if (to->IsUnallocated() && to->VirtualRegister() == phi->id()) {
1247
- hint = move->move_operands()->at(j).source();
1248
- phi_operand = to;
1249
- break;
1250
- }
1251
- }
1252
- ASSERT(hint != NULL);
1253
-
1254
- LifetimePosition block_start = LifetimePosition::FromInstructionIndex(
1255
- block->first_instruction_index());
1256
- Define(block_start, phi_operand, hint);
1257
- }
1258
-
1259
- // Now live is live_in for this block except not including values live
1260
- // out on backward successor edges.
1261
- live_in_sets_[block_id] = live;
1262
-
1263
- // If this block is a loop header go back and patch up the necessary
1264
- // predecessor blocks.
1265
- if (block->IsLoopHeader()) {
1266
- // TODO(kmillikin): Need to be able to get the last block of the loop
1267
- // in the loop information. Add a live range stretching from the first
1268
- // loop instruction to the last for each value live on entry to the
1269
- // header.
1270
- HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1271
- BitVector::Iterator iterator(live);
1272
- LifetimePosition start = LifetimePosition::FromInstructionIndex(
1273
- block->first_instruction_index());
1274
- LifetimePosition end = LifetimePosition::FromInstructionIndex(
1275
- back_edge->last_instruction_index()).NextInstruction();
1276
- while (!iterator.Done()) {
1277
- int operand_index = iterator.Current();
1278
- LiveRange* range = LiveRangeFor(operand_index);
1279
- range->EnsureInterval(start, end);
1280
- iterator.Advance();
1281
- }
1282
-
1283
- for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1284
- live_in_sets_[i]->Union(*live);
1285
- }
1286
- }
1287
-
1288
- #ifdef DEBUG
1289
- if (block_id == 0) {
1290
- BitVector::Iterator iterator(live);
1291
- bool found = false;
1292
- while (!iterator.Done()) {
1293
- found = true;
1294
- int operand_index = iterator.Current();
1295
- PrintF("Function: %s\n",
1296
- *chunk_->info()->function()->debug_name()->ToCString());
1297
- PrintF("Value %d used before first definition!\n", operand_index);
1298
- LiveRange* range = LiveRangeFor(operand_index);
1299
- PrintF("First use is at %d\n", range->first_pos()->pos().Value());
1300
- iterator.Advance();
1301
- }
1302
- ASSERT(!found);
1303
- }
1304
- #endif
1305
- }
1306
- }
1307
-
1308
-
1309
- bool LAllocator::SafePointsAreInOrder() const {
1310
- const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1311
- int safe_point = 0;
1312
- for (int i = 0; i < pointer_maps->length(); ++i) {
1313
- LPointerMap* map = pointer_maps->at(i);
1314
- if (safe_point > map->lithium_position()) return false;
1315
- safe_point = map->lithium_position();
1316
- }
1317
- return true;
1318
- }
1319
-
1320
-
1321
- void LAllocator::PopulatePointerMaps() {
1322
- HPhase phase("Populate pointer maps", this);
1323
- const ZoneList<LPointerMap*>* pointer_maps = chunk_->pointer_maps();
1324
-
1325
- ASSERT(SafePointsAreInOrder());
1326
-
1327
- // Iterate over all safe point positions and record a pointer
1328
- // for all spilled live ranges at this point.
1329
- int first_safe_point_index = 0;
1330
- int last_range_start = 0;
1331
- for (int range_idx = 0; range_idx < live_ranges()->length(); ++range_idx) {
1332
- LiveRange* range = live_ranges()->at(range_idx);
1333
- if (range == NULL) continue;
1334
- // Iterate over the first parts of multi-part live ranges.
1335
- if (range->parent() != NULL) continue;
1336
- // Skip non-pointer values.
1337
- if (!HasTaggedValue(range->id())) continue;
1338
- // Skip empty live ranges.
1339
- if (range->IsEmpty()) continue;
1340
-
1341
- // Find the extent of the range and its children.
1342
- int start = range->Start().InstructionIndex();
1343
- int end = 0;
1344
- for (LiveRange* cur = range; cur != NULL; cur = cur->next()) {
1345
- LifetimePosition this_end = cur->End();
1346
- if (this_end.InstructionIndex() > end) end = this_end.InstructionIndex();
1347
- ASSERT(cur->Start().InstructionIndex() >= start);
1348
- }
1349
-
1350
- // Most of the ranges are in order, but not all. Keep an eye on when
1351
- // they step backwards and reset the first_safe_point_index so we don't
1352
- // miss any safe points.
1353
- if (start < last_range_start) {
1354
- first_safe_point_index = 0;
1355
- }
1356
- last_range_start = start;
1357
-
1358
- // Step across all the safe points that are before the start of this range,
1359
- // recording how far we step in order to save doing this for the next range.
1360
- while (first_safe_point_index < pointer_maps->length()) {
1361
- LPointerMap* map = pointer_maps->at(first_safe_point_index);
1362
- int safe_point = map->lithium_position();
1363
- if (safe_point >= start) break;
1364
- first_safe_point_index++;
1365
- }
1366
-
1367
- // Step through the safe points to see whether they are in the range.
1368
- for (int safe_point_index = first_safe_point_index;
1369
- safe_point_index < pointer_maps->length();
1370
- ++safe_point_index) {
1371
- LPointerMap* map = pointer_maps->at(safe_point_index);
1372
- int safe_point = map->lithium_position();
1373
-
1374
- // The safe points are sorted so we can stop searching here.
1375
- if (safe_point - 1 > end) break;
1376
-
1377
- // Advance to the next active range that covers the current
1378
- // safe point position.
1379
- LifetimePosition safe_point_pos =
1380
- LifetimePosition::FromInstructionIndex(safe_point);
1381
- LiveRange* cur = range;
1382
- while (cur != NULL && !cur->Covers(safe_point_pos.PrevInstruction())) {
1383
- cur = cur->next();
1384
- }
1385
- if (cur == NULL) continue;
1386
-
1387
- // Check if the live range is spilled and the safe point is after
1388
- // the spill position.
1389
- if (range->HasAllocatedSpillOperand() &&
1390
- safe_point >= range->spill_start_index()) {
1391
- TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1392
- range->id(), range->spill_start_index(), safe_point);
1393
- map->RecordPointer(range->GetSpillOperand());
1394
- }
1395
-
1396
- if (!cur->IsSpilled()) {
1397
- TraceAlloc("Pointer in register for range %d (start at %d) "
1398
- "at safe point %d\n",
1399
- cur->id(), cur->Start().Value(), safe_point);
1400
- LOperand* operand = cur->CreateAssignedOperand();
1401
- ASSERT(!operand->IsStackSlot());
1402
- map->RecordPointer(operand);
1403
- }
1404
- }
1405
- }
1406
- }
1407
-
1408
-
1409
- void LAllocator::ProcessOsrEntry() {
1410
- const ZoneList<LInstruction*>* instrs = chunk_->instructions();
1411
-
1412
- // Linear search for the OSR entry instruction in the chunk.
1413
- int index = -1;
1414
- while (++index < instrs->length() &&
1415
- !instrs->at(index)->IsOsrEntry()) {
1416
- }
1417
- ASSERT(index < instrs->length());
1418
- LOsrEntry* instruction = LOsrEntry::cast(instrs->at(index));
1419
-
1420
- LifetimePosition position = LifetimePosition::FromInstructionIndex(index);
1421
- for (int i = 0; i < live_ranges()->length(); ++i) {
1422
- LiveRange* range = live_ranges()->at(i);
1423
- if (range != NULL) {
1424
- if (range->Covers(position) &&
1425
- range->HasRegisterAssigned() &&
1426
- range->TopLevel()->HasAllocatedSpillOperand()) {
1427
- int reg_index = range->assigned_register();
1428
- LOperand* spill_operand = range->TopLevel()->GetSpillOperand();
1429
- if (range->IsDouble()) {
1430
- instruction->MarkSpilledDoubleRegister(reg_index, spill_operand);
1431
- } else {
1432
- instruction->MarkSpilledRegister(reg_index, spill_operand);
1433
- }
1434
- }
1435
- }
1436
- }
1437
- }
1438
-
1439
-
1440
- void LAllocator::AllocateGeneralRegisters() {
1441
- HPhase phase("Allocate general registers", this);
1442
- num_registers_ = Register::kNumAllocatableRegisters;
1443
- mode_ = GENERAL_REGISTERS;
1444
- AllocateRegisters();
1445
- }
1446
-
1447
-
1448
- void LAllocator::AllocateDoubleRegisters() {
1449
- HPhase phase("Allocate double registers", this);
1450
- num_registers_ = DoubleRegister::kNumAllocatableRegisters;
1451
- mode_ = DOUBLE_REGISTERS;
1452
- AllocateRegisters();
1453
- }
1454
-
1455
-
1456
- void LAllocator::AllocateRegisters() {
1457
- ASSERT(mode_ != NONE);
1458
- ASSERT(unhandled_live_ranges_.is_empty());
1459
-
1460
- for (int i = 0; i < live_ranges_.length(); ++i) {
1461
- if (live_ranges_[i] != NULL) {
1462
- if (RequiredRegisterKind(live_ranges_[i]->id()) == mode_) {
1463
- AddToUnhandledUnsorted(live_ranges_[i]);
1464
- }
1465
- }
1466
- }
1467
- SortUnhandled();
1468
- ASSERT(UnhandledIsSorted());
1469
-
1470
- ASSERT(reusable_slots_.is_empty());
1471
- ASSERT(active_live_ranges_.is_empty());
1472
- ASSERT(inactive_live_ranges_.is_empty());
1473
-
1474
- if (mode_ == DOUBLE_REGISTERS) {
1475
- for (int i = 0; i < fixed_double_live_ranges_.length(); ++i) {
1476
- LiveRange* current = fixed_double_live_ranges_.at(i);
1477
- if (current != NULL) {
1478
- AddToInactive(current);
1479
- }
1480
- }
1481
- } else {
1482
- for (int i = 0; i < fixed_live_ranges_.length(); ++i) {
1483
- LiveRange* current = fixed_live_ranges_.at(i);
1484
- if (current != NULL) {
1485
- AddToInactive(current);
1486
- }
1487
- }
1488
- }
1489
-
1490
- while (!unhandled_live_ranges_.is_empty()) {
1491
- ASSERT(UnhandledIsSorted());
1492
- LiveRange* current = unhandled_live_ranges_.RemoveLast();
1493
- ASSERT(UnhandledIsSorted());
1494
- LifetimePosition position = current->Start();
1495
- TraceAlloc("Processing interval %d start=%d\n",
1496
- current->id(),
1497
- position.Value());
1498
-
1499
- if (current->HasAllocatedSpillOperand()) {
1500
- TraceAlloc("Live range %d already has a spill operand\n", current->id());
1501
- LifetimePosition next_pos = position;
1502
- if (IsGapAt(next_pos.InstructionIndex())) {
1503
- next_pos = next_pos.NextInstruction();
1504
- }
1505
- UsePosition* pos = current->NextUsePositionRegisterIsBeneficial(next_pos);
1506
- // If the range already has a spill operand and it doesn't need a
1507
- // register immediately, split it and spill the first part of the range.
1508
- if (pos == NULL) {
1509
- Spill(current);
1510
- continue;
1511
- } else if (pos->pos().Value() >
1512
- current->Start().NextInstruction().Value()) {
1513
- // Do not spill live range eagerly if use position that can benefit from
1514
- // the register is too close to the start of live range.
1515
- SpillBetween(current, current->Start(), pos->pos());
1516
- ASSERT(UnhandledIsSorted());
1517
- continue;
1518
- }
1519
- }
1520
-
1521
- for (int i = 0; i < active_live_ranges_.length(); ++i) {
1522
- LiveRange* cur_active = active_live_ranges_.at(i);
1523
- if (cur_active->End().Value() <= position.Value()) {
1524
- ActiveToHandled(cur_active);
1525
- --i; // The live range was removed from the list of active live ranges.
1526
- } else if (!cur_active->Covers(position)) {
1527
- ActiveToInactive(cur_active);
1528
- --i; // The live range was removed from the list of active live ranges.
1529
- }
1530
- }
1531
-
1532
- for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1533
- LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1534
- if (cur_inactive->End().Value() <= position.Value()) {
1535
- InactiveToHandled(cur_inactive);
1536
- --i; // Live range was removed from the list of inactive live ranges.
1537
- } else if (cur_inactive->Covers(position)) {
1538
- InactiveToActive(cur_inactive);
1539
- --i; // Live range was removed from the list of inactive live ranges.
1540
- }
1541
- }
1542
-
1543
- ASSERT(!current->HasRegisterAssigned() && !current->IsSpilled());
1544
-
1545
- bool result = TryAllocateFreeReg(current);
1546
- if (!result) {
1547
- AllocateBlockedReg(current);
1548
- }
1549
-
1550
- if (current->HasRegisterAssigned()) {
1551
- AddToActive(current);
1552
- }
1553
- }
1554
-
1555
- reusable_slots_.Rewind(0);
1556
- active_live_ranges_.Rewind(0);
1557
- inactive_live_ranges_.Rewind(0);
1558
- }
1559
-
1560
-
1561
- const char* LAllocator::RegisterName(int allocation_index) {
1562
- ASSERT(mode_ != NONE);
1563
- if (mode_ == GENERAL_REGISTERS) {
1564
- return Register::AllocationIndexToString(allocation_index);
1565
- } else {
1566
- return DoubleRegister::AllocationIndexToString(allocation_index);
1567
- }
1568
- }
1569
-
1570
-
1571
- void LAllocator::TraceAlloc(const char* msg, ...) {
1572
- if (FLAG_trace_alloc) {
1573
- va_list arguments;
1574
- va_start(arguments, msg);
1575
- OS::VPrint(msg, arguments);
1576
- va_end(arguments);
1577
- }
1578
- }
1579
-
1580
-
1581
- bool LAllocator::HasTaggedValue(int virtual_register) const {
1582
- HValue* value = graph_->LookupValue(virtual_register);
1583
- if (value == NULL) return false;
1584
- return value->representation().IsTagged();
1585
- }
1586
-
1587
-
1588
- RegisterKind LAllocator::RequiredRegisterKind(int virtual_register) const {
1589
- if (virtual_register < first_artificial_register_) {
1590
- HValue* value = graph_->LookupValue(virtual_register);
1591
- if (value != NULL && value->representation().IsDouble()) {
1592
- return DOUBLE_REGISTERS;
1593
- }
1594
- } else if (double_artificial_registers_.Contains(
1595
- virtual_register - first_artificial_register_)) {
1596
- return DOUBLE_REGISTERS;
1597
- }
1598
-
1599
- return GENERAL_REGISTERS;
1600
- }
1601
-
1602
-
1603
- void LAllocator::RecordDefinition(HInstruction* instr, LUnallocated* operand) {
1604
- operand->set_virtual_register(instr->id());
1605
- }
1606
-
1607
-
1608
- void LAllocator::RecordTemporary(LUnallocated* operand) {
1609
- ASSERT(next_virtual_register_ < LUnallocated::kMaxVirtualRegisters);
1610
- if (!operand->HasFixedPolicy()) {
1611
- operand->set_virtual_register(next_virtual_register_++);
1612
- }
1613
- }
1614
-
1615
-
1616
- void LAllocator::RecordUse(HValue* value, LUnallocated* operand) {
1617
- operand->set_virtual_register(value->id());
1618
- }
1619
-
1620
-
1621
- int LAllocator::max_initial_value_ids() {
1622
- return LUnallocated::kMaxVirtualRegisters / 32;
1623
- }
1624
-
1625
-
1626
- void LAllocator::AddToActive(LiveRange* range) {
1627
- TraceAlloc("Add live range %d to active\n", range->id());
1628
- active_live_ranges_.Add(range);
1629
- }
1630
-
1631
-
1632
- void LAllocator::AddToInactive(LiveRange* range) {
1633
- TraceAlloc("Add live range %d to inactive\n", range->id());
1634
- inactive_live_ranges_.Add(range);
1635
- }
1636
-
1637
-
1638
- void LAllocator::AddToUnhandledSorted(LiveRange* range) {
1639
- if (range == NULL || range->IsEmpty()) return;
1640
- ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1641
- for (int i = unhandled_live_ranges_.length() - 1; i >= 0; --i) {
1642
- LiveRange* cur_range = unhandled_live_ranges_.at(i);
1643
- if (range->ShouldBeAllocatedBefore(cur_range)) {
1644
- TraceAlloc("Add live range %d to unhandled at %d\n", range->id(), i + 1);
1645
- unhandled_live_ranges_.InsertAt(i + 1, range);
1646
- ASSERT(UnhandledIsSorted());
1647
- return;
1648
- }
1649
- }
1650
- TraceAlloc("Add live range %d to unhandled at start\n", range->id());
1651
- unhandled_live_ranges_.InsertAt(0, range);
1652
- ASSERT(UnhandledIsSorted());
1653
- }
1654
-
1655
-
1656
- void LAllocator::AddToUnhandledUnsorted(LiveRange* range) {
1657
- if (range == NULL || range->IsEmpty()) return;
1658
- ASSERT(!range->HasRegisterAssigned() && !range->IsSpilled());
1659
- TraceAlloc("Add live range %d to unhandled unsorted at end\n", range->id());
1660
- unhandled_live_ranges_.Add(range);
1661
- }
1662
-
1663
-
1664
- static int UnhandledSortHelper(LiveRange* const* a, LiveRange* const* b) {
1665
- ASSERT(!(*a)->ShouldBeAllocatedBefore(*b) ||
1666
- !(*b)->ShouldBeAllocatedBefore(*a));
1667
- if ((*a)->ShouldBeAllocatedBefore(*b)) return 1;
1668
- if ((*b)->ShouldBeAllocatedBefore(*a)) return -1;
1669
- return (*a)->id() - (*b)->id();
1670
- }
1671
-
1672
-
1673
- // Sort the unhandled live ranges so that the ranges to be processed first are
1674
- // at the end of the array list. This is convenient for the register allocation
1675
- // algorithm because it is efficient to remove elements from the end.
1676
- void LAllocator::SortUnhandled() {
1677
- TraceAlloc("Sort unhandled\n");
1678
- unhandled_live_ranges_.Sort(&UnhandledSortHelper);
1679
- }
1680
-
1681
-
1682
- bool LAllocator::UnhandledIsSorted() {
1683
- int len = unhandled_live_ranges_.length();
1684
- for (int i = 1; i < len; i++) {
1685
- LiveRange* a = unhandled_live_ranges_.at(i - 1);
1686
- LiveRange* b = unhandled_live_ranges_.at(i);
1687
- if (a->Start().Value() < b->Start().Value()) return false;
1688
- }
1689
- return true;
1690
- }
1691
-
1692
-
1693
- void LAllocator::FreeSpillSlot(LiveRange* range) {
1694
- // Check that we are the last range.
1695
- if (range->next() != NULL) return;
1696
-
1697
- if (!range->TopLevel()->HasAllocatedSpillOperand()) return;
1698
-
1699
- int index = range->TopLevel()->GetSpillOperand()->index();
1700
- if (index >= 0) {
1701
- reusable_slots_.Add(range);
1702
- }
1703
- }
1704
-
1705
-
1706
- LOperand* LAllocator::TryReuseSpillSlot(LiveRange* range) {
1707
- if (reusable_slots_.is_empty()) return NULL;
1708
- if (reusable_slots_.first()->End().Value() >
1709
- range->TopLevel()->Start().Value()) {
1710
- return NULL;
1711
- }
1712
- LOperand* result = reusable_slots_.first()->TopLevel()->GetSpillOperand();
1713
- reusable_slots_.Remove(0);
1714
- return result;
1715
- }
1716
-
1717
-
1718
- void LAllocator::ActiveToHandled(LiveRange* range) {
1719
- ASSERT(active_live_ranges_.Contains(range));
1720
- active_live_ranges_.RemoveElement(range);
1721
- TraceAlloc("Moving live range %d from active to handled\n", range->id());
1722
- FreeSpillSlot(range);
1723
- }
1724
-
1725
-
1726
- void LAllocator::ActiveToInactive(LiveRange* range) {
1727
- ASSERT(active_live_ranges_.Contains(range));
1728
- active_live_ranges_.RemoveElement(range);
1729
- inactive_live_ranges_.Add(range);
1730
- TraceAlloc("Moving live range %d from active to inactive\n", range->id());
1731
- }
1732
-
1733
-
1734
- void LAllocator::InactiveToHandled(LiveRange* range) {
1735
- ASSERT(inactive_live_ranges_.Contains(range));
1736
- inactive_live_ranges_.RemoveElement(range);
1737
- TraceAlloc("Moving live range %d from inactive to handled\n", range->id());
1738
- FreeSpillSlot(range);
1739
- }
1740
-
1741
-
1742
- void LAllocator::InactiveToActive(LiveRange* range) {
1743
- ASSERT(inactive_live_ranges_.Contains(range));
1744
- inactive_live_ranges_.RemoveElement(range);
1745
- active_live_ranges_.Add(range);
1746
- TraceAlloc("Moving live range %d from inactive to active\n", range->id());
1747
- }
1748
-
1749
-
1750
- // TryAllocateFreeReg and AllocateBlockedReg assume this
1751
- // when allocating local arrays.
1752
- STATIC_ASSERT(DoubleRegister::kNumAllocatableRegisters >=
1753
- Register::kNumAllocatableRegisters);
1754
-
1755
-
1756
- bool LAllocator::TryAllocateFreeReg(LiveRange* current) {
1757
- LifetimePosition free_until_pos[DoubleRegister::kNumAllocatableRegisters];
1758
-
1759
- for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1760
- free_until_pos[i] = LifetimePosition::MaxPosition();
1761
- }
1762
-
1763
- for (int i = 0; i < active_live_ranges_.length(); ++i) {
1764
- LiveRange* cur_active = active_live_ranges_.at(i);
1765
- free_until_pos[cur_active->assigned_register()] =
1766
- LifetimePosition::FromInstructionIndex(0);
1767
- }
1768
-
1769
- for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1770
- LiveRange* cur_inactive = inactive_live_ranges_.at(i);
1771
- ASSERT(cur_inactive->End().Value() > current->Start().Value());
1772
- LifetimePosition next_intersection =
1773
- cur_inactive->FirstIntersection(current);
1774
- if (!next_intersection.IsValid()) continue;
1775
- int cur_reg = cur_inactive->assigned_register();
1776
- free_until_pos[cur_reg] = Min(free_until_pos[cur_reg], next_intersection);
1777
- }
1778
-
1779
- UsePosition* hinted_use = current->FirstPosWithHint();
1780
- if (hinted_use != NULL) {
1781
- LOperand* hint = hinted_use->hint();
1782
- if (hint->IsRegister() || hint->IsDoubleRegister()) {
1783
- int register_index = hint->index();
1784
- TraceAlloc(
1785
- "Found reg hint %s (free until [%d) for live range %d (end %d[).\n",
1786
- RegisterName(register_index),
1787
- free_until_pos[register_index].Value(),
1788
- current->id(),
1789
- current->End().Value());
1790
-
1791
- // The desired register is free until the end of the current live range.
1792
- if (free_until_pos[register_index].Value() >= current->End().Value()) {
1793
- TraceAlloc("Assigning preferred reg %s to live range %d\n",
1794
- RegisterName(register_index),
1795
- current->id());
1796
- current->set_assigned_register(register_index, mode_);
1797
- return true;
1798
- }
1799
- }
1800
- }
1801
-
1802
- // Find the register which stays free for the longest time.
1803
- int reg = 0;
1804
- for (int i = 1; i < RegisterCount(); ++i) {
1805
- if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1806
- reg = i;
1807
- }
1808
- }
1809
-
1810
- LifetimePosition pos = free_until_pos[reg];
1811
-
1812
- if (pos.Value() <= current->Start().Value()) {
1813
- // All registers are blocked.
1814
- return false;
1815
- }
1816
-
1817
- if (pos.Value() < current->End().Value()) {
1818
- // Register reg is available at the range start but becomes blocked before
1819
- // the range end. Split current at position where it becomes blocked.
1820
- LiveRange* tail = SplitAt(current, pos);
1821
- AddToUnhandledSorted(tail);
1822
- }
1823
-
1824
-
1825
- // Register reg is available at the range start and is free until
1826
- // the range end.
1827
- ASSERT(pos.Value() >= current->End().Value());
1828
- TraceAlloc("Assigning free reg %s to live range %d\n",
1829
- RegisterName(reg),
1830
- current->id());
1831
- current->set_assigned_register(reg, mode_);
1832
-
1833
- return true;
1834
- }
1835
-
1836
-
1837
- void LAllocator::AllocateBlockedReg(LiveRange* current) {
1838
- UsePosition* register_use = current->NextRegisterPosition(current->Start());
1839
- if (register_use == NULL) {
1840
- // There is no use in the current live range that requires a register.
1841
- // We can just spill it.
1842
- Spill(current);
1843
- return;
1844
- }
1845
-
1846
-
1847
- LifetimePosition use_pos[DoubleRegister::kNumAllocatableRegisters];
1848
- LifetimePosition block_pos[DoubleRegister::kNumAllocatableRegisters];
1849
-
1850
- for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
1851
- use_pos[i] = block_pos[i] = LifetimePosition::MaxPosition();
1852
- }
1853
-
1854
- for (int i = 0; i < active_live_ranges_.length(); ++i) {
1855
- LiveRange* range = active_live_ranges_[i];
1856
- int cur_reg = range->assigned_register();
1857
- if (range->IsFixed() || !range->CanBeSpilled(current->Start())) {
1858
- block_pos[cur_reg] = use_pos[cur_reg] =
1859
- LifetimePosition::FromInstructionIndex(0);
1860
- } else {
1861
- UsePosition* next_use = range->NextUsePositionRegisterIsBeneficial(
1862
- current->Start());
1863
- if (next_use == NULL) {
1864
- use_pos[cur_reg] = range->End();
1865
- } else {
1866
- use_pos[cur_reg] = next_use->pos();
1867
- }
1868
- }
1869
- }
1870
-
1871
- for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1872
- LiveRange* range = inactive_live_ranges_.at(i);
1873
- ASSERT(range->End().Value() > current->Start().Value());
1874
- LifetimePosition next_intersection = range->FirstIntersection(current);
1875
- if (!next_intersection.IsValid()) continue;
1876
- int cur_reg = range->assigned_register();
1877
- if (range->IsFixed()) {
1878
- block_pos[cur_reg] = Min(block_pos[cur_reg], next_intersection);
1879
- use_pos[cur_reg] = Min(block_pos[cur_reg], use_pos[cur_reg]);
1880
- } else {
1881
- use_pos[cur_reg] = Min(use_pos[cur_reg], next_intersection);
1882
- }
1883
- }
1884
-
1885
- int reg = 0;
1886
- for (int i = 1; i < RegisterCount(); ++i) {
1887
- if (use_pos[i].Value() > use_pos[reg].Value()) {
1888
- reg = i;
1889
- }
1890
- }
1891
-
1892
- LifetimePosition pos = use_pos[reg];
1893
-
1894
- if (pos.Value() < register_use->pos().Value()) {
1895
- // All registers are blocked before the first use that requires a register.
1896
- // Spill starting part of live range up to that use.
1897
- //
1898
- // Corner case: the first use position is equal to the start of the range.
1899
- // In this case we have nothing to spill and SpillBetween will just return
1900
- // this range to the list of unhandled ones. This will lead to the infinite
1901
- // loop.
1902
- ASSERT(current->Start().Value() < register_use->pos().Value());
1903
- SpillBetween(current, current->Start(), register_use->pos());
1904
- return;
1905
- }
1906
-
1907
- if (block_pos[reg].Value() < current->End().Value()) {
1908
- // Register becomes blocked before the current range end. Split before that
1909
- // position.
1910
- LiveRange* tail = SplitBetween(current,
1911
- current->Start(),
1912
- block_pos[reg].InstructionStart());
1913
- AddToUnhandledSorted(tail);
1914
- }
1915
-
1916
- // Register reg is not blocked for the whole range.
1917
- ASSERT(block_pos[reg].Value() >= current->End().Value());
1918
- TraceAlloc("Assigning blocked reg %s to live range %d\n",
1919
- RegisterName(reg),
1920
- current->id());
1921
- current->set_assigned_register(reg, mode_);
1922
-
1923
- // This register was not free. Thus we need to find and spill
1924
- // parts of active and inactive live regions that use the same register
1925
- // at the same lifetime positions as current.
1926
- SplitAndSpillIntersecting(current);
1927
- }
1928
-
1929
-
1930
- void LAllocator::SplitAndSpillIntersecting(LiveRange* current) {
1931
- ASSERT(current->HasRegisterAssigned());
1932
- int reg = current->assigned_register();
1933
- LifetimePosition split_pos = current->Start();
1934
- for (int i = 0; i < active_live_ranges_.length(); ++i) {
1935
- LiveRange* range = active_live_ranges_[i];
1936
- if (range->assigned_register() == reg) {
1937
- UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1938
- if (next_pos == NULL) {
1939
- SpillAfter(range, split_pos);
1940
- } else {
1941
- SpillBetween(range, split_pos, next_pos->pos());
1942
- }
1943
- ActiveToHandled(range);
1944
- --i;
1945
- }
1946
- }
1947
-
1948
- for (int i = 0; i < inactive_live_ranges_.length(); ++i) {
1949
- LiveRange* range = inactive_live_ranges_[i];
1950
- ASSERT(range->End().Value() > current->Start().Value());
1951
- if (range->assigned_register() == reg && !range->IsFixed()) {
1952
- LifetimePosition next_intersection = range->FirstIntersection(current);
1953
- if (next_intersection.IsValid()) {
1954
- UsePosition* next_pos = range->NextRegisterPosition(current->Start());
1955
- if (next_pos == NULL) {
1956
- SpillAfter(range, split_pos);
1957
- } else {
1958
- next_intersection = Min(next_intersection, next_pos->pos());
1959
- SpillBetween(range, split_pos, next_intersection);
1960
- }
1961
- InactiveToHandled(range);
1962
- --i;
1963
- }
1964
- }
1965
- }
1966
- }
1967
-
1968
-
1969
- bool LAllocator::IsBlockBoundary(LifetimePosition pos) {
1970
- return pos.IsInstructionStart() &&
1971
- InstructionAt(pos.InstructionIndex())->IsLabel();
1972
- }
1973
-
1974
-
1975
- LiveRange* LAllocator::SplitAt(LiveRange* range, LifetimePosition pos) {
1976
- ASSERT(!range->IsFixed());
1977
- TraceAlloc("Splitting live range %d at %d\n", range->id(), pos.Value());
1978
-
1979
- if (pos.Value() <= range->Start().Value()) return range;
1980
-
1981
- // We can't properly connect liveranges if split occured at the end
1982
- // of control instruction.
1983
- ASSERT(pos.IsInstructionStart() ||
1984
- !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
1985
-
1986
- LiveRange* result = LiveRangeFor(next_virtual_register_++);
1987
- range->SplitAt(pos, result);
1988
- return result;
1989
- }
1990
-
1991
-
1992
- LiveRange* LAllocator::SplitBetween(LiveRange* range,
1993
- LifetimePosition start,
1994
- LifetimePosition end) {
1995
- ASSERT(!range->IsFixed());
1996
- TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
1997
- range->id(),
1998
- start.Value(),
1999
- end.Value());
2000
-
2001
- LifetimePosition split_pos = FindOptimalSplitPos(start, end);
2002
- ASSERT(split_pos.Value() >= start.Value());
2003
- return SplitAt(range, split_pos);
2004
- }
2005
-
2006
-
2007
- LifetimePosition LAllocator::FindOptimalSplitPos(LifetimePosition start,
2008
- LifetimePosition end) {
2009
- int start_instr = start.InstructionIndex();
2010
- int end_instr = end.InstructionIndex();
2011
- ASSERT(start_instr <= end_instr);
2012
-
2013
- // We have no choice
2014
- if (start_instr == end_instr) return end;
2015
-
2016
- HBasicBlock* end_block = GetBlock(start);
2017
- HBasicBlock* start_block = GetBlock(end);
2018
-
2019
- if (end_block == start_block) {
2020
- // The interval is split in the same basic block. Split at latest possible
2021
- // position.
2022
- return end;
2023
- }
2024
-
2025
- HBasicBlock* block = end_block;
2026
- // Find header of outermost loop.
2027
- while (block->parent_loop_header() != NULL &&
2028
- block->parent_loop_header()->block_id() > start_block->block_id()) {
2029
- block = block->parent_loop_header();
2030
- }
2031
-
2032
- if (block == end_block) return end;
2033
-
2034
- return LifetimePosition::FromInstructionIndex(
2035
- block->first_instruction_index());
2036
- }
2037
-
2038
-
2039
- void LAllocator::SpillAfter(LiveRange* range, LifetimePosition pos) {
2040
- LiveRange* second_part = SplitAt(range, pos);
2041
- Spill(second_part);
2042
- }
2043
-
2044
-
2045
- void LAllocator::SpillBetween(LiveRange* range,
2046
- LifetimePosition start,
2047
- LifetimePosition end) {
2048
- ASSERT(start.Value() < end.Value());
2049
- LiveRange* second_part = SplitAt(range, start);
2050
-
2051
- if (second_part->Start().Value() < end.Value()) {
2052
- // The split result intersects with [start, end[.
2053
- // Split it at position between ]start+1, end[, spill the middle part
2054
- // and put the rest to unhandled.
2055
- LiveRange* third_part = SplitBetween(
2056
- second_part,
2057
- second_part->Start().InstructionEnd(),
2058
- end.PrevInstruction().InstructionEnd());
2059
-
2060
- ASSERT(third_part != second_part);
2061
-
2062
- Spill(second_part);
2063
- AddToUnhandledSorted(third_part);
2064
- } else {
2065
- // The split result does not intersect with [start, end[.
2066
- // Nothing to spill. Just put it to unhandled as whole.
2067
- AddToUnhandledSorted(second_part);
2068
- }
2069
- }
2070
-
2071
-
2072
- void LAllocator::Spill(LiveRange* range) {
2073
- ASSERT(!range->IsSpilled());
2074
- TraceAlloc("Spilling live range %d\n", range->id());
2075
- LiveRange* first = range->TopLevel();
2076
-
2077
- if (!first->HasAllocatedSpillOperand()) {
2078
- LOperand* op = TryReuseSpillSlot(range);
2079
- if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2080
- first->SetSpillOperand(op);
2081
- }
2082
- range->MakeSpilled();
2083
- }
2084
-
2085
-
2086
- int LAllocator::RegisterCount() const {
2087
- return num_registers_;
2088
- }
2089
-
2090
-
2091
- #ifdef DEBUG
2092
-
2093
-
2094
- void LAllocator::Verify() const {
2095
- for (int i = 0; i < live_ranges()->length(); ++i) {
2096
- LiveRange* current = live_ranges()->at(i);
2097
- if (current != NULL) current->Verify();
2098
- }
2099
- }
2100
-
2101
-
2102
- #endif
2103
-
2104
-
2105
- } } // namespace v8::internal