therubyracer 0.9.0beta4 → 0.9.0beta5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (771) hide show
  1. data/.gitignore +1 -0
  2. data/Rakefile +0 -11
  3. data/ext/v8/extconf.rb +9 -9
  4. data/ext/v8/v8_external.cpp +40 -10
  5. data/lib/v8/cli.rb +2 -1
  6. data/lib/v8/version.rb +1 -1
  7. data/spec/redjs/.gitignore +1 -0
  8. data/therubyracer.gemspec +2 -3
  9. metadata +33 -779
  10. data/ext/v8/upstream/Makefile +0 -38
  11. data/ext/v8/upstream/detect_cpu.rb +0 -27
  12. data/ext/v8/upstream/fpic-on-linux-amd64.patch +0 -13
  13. data/ext/v8/upstream/scons/CHANGES.txt +0 -5183
  14. data/ext/v8/upstream/scons/LICENSE.txt +0 -20
  15. data/ext/v8/upstream/scons/MANIFEST +0 -202
  16. data/ext/v8/upstream/scons/PKG-INFO +0 -13
  17. data/ext/v8/upstream/scons/README.txt +0 -273
  18. data/ext/v8/upstream/scons/RELEASE.txt +0 -1040
  19. data/ext/v8/upstream/scons/engine/SCons/Action.py +0 -1256
  20. data/ext/v8/upstream/scons/engine/SCons/Builder.py +0 -868
  21. data/ext/v8/upstream/scons/engine/SCons/CacheDir.py +0 -217
  22. data/ext/v8/upstream/scons/engine/SCons/Conftest.py +0 -794
  23. data/ext/v8/upstream/scons/engine/SCons/Debug.py +0 -237
  24. data/ext/v8/upstream/scons/engine/SCons/Defaults.py +0 -485
  25. data/ext/v8/upstream/scons/engine/SCons/Environment.py +0 -2327
  26. data/ext/v8/upstream/scons/engine/SCons/Errors.py +0 -207
  27. data/ext/v8/upstream/scons/engine/SCons/Executor.py +0 -636
  28. data/ext/v8/upstream/scons/engine/SCons/Job.py +0 -435
  29. data/ext/v8/upstream/scons/engine/SCons/Memoize.py +0 -292
  30. data/ext/v8/upstream/scons/engine/SCons/Node/Alias.py +0 -153
  31. data/ext/v8/upstream/scons/engine/SCons/Node/FS.py +0 -3220
  32. data/ext/v8/upstream/scons/engine/SCons/Node/Python.py +0 -128
  33. data/ext/v8/upstream/scons/engine/SCons/Node/__init__.py +0 -1341
  34. data/ext/v8/upstream/scons/engine/SCons/Options/BoolOption.py +0 -50
  35. data/ext/v8/upstream/scons/engine/SCons/Options/EnumOption.py +0 -50
  36. data/ext/v8/upstream/scons/engine/SCons/Options/ListOption.py +0 -50
  37. data/ext/v8/upstream/scons/engine/SCons/Options/PackageOption.py +0 -50
  38. data/ext/v8/upstream/scons/engine/SCons/Options/PathOption.py +0 -76
  39. data/ext/v8/upstream/scons/engine/SCons/Options/__init__.py +0 -74
  40. data/ext/v8/upstream/scons/engine/SCons/PathList.py +0 -232
  41. data/ext/v8/upstream/scons/engine/SCons/Platform/__init__.py +0 -236
  42. data/ext/v8/upstream/scons/engine/SCons/Platform/aix.py +0 -70
  43. data/ext/v8/upstream/scons/engine/SCons/Platform/cygwin.py +0 -55
  44. data/ext/v8/upstream/scons/engine/SCons/Platform/darwin.py +0 -46
  45. data/ext/v8/upstream/scons/engine/SCons/Platform/hpux.py +0 -46
  46. data/ext/v8/upstream/scons/engine/SCons/Platform/irix.py +0 -44
  47. data/ext/v8/upstream/scons/engine/SCons/Platform/os2.py +0 -58
  48. data/ext/v8/upstream/scons/engine/SCons/Platform/posix.py +0 -264
  49. data/ext/v8/upstream/scons/engine/SCons/Platform/sunos.py +0 -50
  50. data/ext/v8/upstream/scons/engine/SCons/Platform/win32.py +0 -386
  51. data/ext/v8/upstream/scons/engine/SCons/SConf.py +0 -1038
  52. data/ext/v8/upstream/scons/engine/SCons/SConsign.py +0 -381
  53. data/ext/v8/upstream/scons/engine/SCons/Scanner/C.py +0 -132
  54. data/ext/v8/upstream/scons/engine/SCons/Scanner/D.py +0 -74
  55. data/ext/v8/upstream/scons/engine/SCons/Scanner/Dir.py +0 -111
  56. data/ext/v8/upstream/scons/engine/SCons/Scanner/Fortran.py +0 -320
  57. data/ext/v8/upstream/scons/engine/SCons/Scanner/IDL.py +0 -48
  58. data/ext/v8/upstream/scons/engine/SCons/Scanner/LaTeX.py +0 -378
  59. data/ext/v8/upstream/scons/engine/SCons/Scanner/Prog.py +0 -103
  60. data/ext/v8/upstream/scons/engine/SCons/Scanner/RC.py +0 -55
  61. data/ext/v8/upstream/scons/engine/SCons/Scanner/__init__.py +0 -415
  62. data/ext/v8/upstream/scons/engine/SCons/Script/Interactive.py +0 -386
  63. data/ext/v8/upstream/scons/engine/SCons/Script/Main.py +0 -1360
  64. data/ext/v8/upstream/scons/engine/SCons/Script/SConsOptions.py +0 -944
  65. data/ext/v8/upstream/scons/engine/SCons/Script/SConscript.py +0 -642
  66. data/ext/v8/upstream/scons/engine/SCons/Script/__init__.py +0 -414
  67. data/ext/v8/upstream/scons/engine/SCons/Sig.py +0 -63
  68. data/ext/v8/upstream/scons/engine/SCons/Subst.py +0 -911
  69. data/ext/v8/upstream/scons/engine/SCons/Taskmaster.py +0 -1030
  70. data/ext/v8/upstream/scons/engine/SCons/Tool/386asm.py +0 -61
  71. data/ext/v8/upstream/scons/engine/SCons/Tool/BitKeeper.py +0 -65
  72. data/ext/v8/upstream/scons/engine/SCons/Tool/CVS.py +0 -73
  73. data/ext/v8/upstream/scons/engine/SCons/Tool/FortranCommon.py +0 -247
  74. data/ext/v8/upstream/scons/engine/SCons/Tool/JavaCommon.py +0 -324
  75. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/__init__.py +0 -56
  76. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/arch.py +0 -61
  77. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/common.py +0 -210
  78. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/netframework.py +0 -84
  79. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/sdk.py +0 -321
  80. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vc.py +0 -367
  81. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vs.py +0 -497
  82. data/ext/v8/upstream/scons/engine/SCons/Tool/Perforce.py +0 -104
  83. data/ext/v8/upstream/scons/engine/SCons/Tool/PharLapCommon.py +0 -138
  84. data/ext/v8/upstream/scons/engine/SCons/Tool/RCS.py +0 -64
  85. data/ext/v8/upstream/scons/engine/SCons/Tool/SCCS.py +0 -64
  86. data/ext/v8/upstream/scons/engine/SCons/Tool/Subversion.py +0 -71
  87. data/ext/v8/upstream/scons/engine/SCons/Tool/__init__.py +0 -675
  88. data/ext/v8/upstream/scons/engine/SCons/Tool/aixc++.py +0 -82
  89. data/ext/v8/upstream/scons/engine/SCons/Tool/aixcc.py +0 -74
  90. data/ext/v8/upstream/scons/engine/SCons/Tool/aixf77.py +0 -80
  91. data/ext/v8/upstream/scons/engine/SCons/Tool/aixlink.py +0 -76
  92. data/ext/v8/upstream/scons/engine/SCons/Tool/applelink.py +0 -71
  93. data/ext/v8/upstream/scons/engine/SCons/Tool/ar.py +0 -63
  94. data/ext/v8/upstream/scons/engine/SCons/Tool/as.py +0 -78
  95. data/ext/v8/upstream/scons/engine/SCons/Tool/bcc32.py +0 -82
  96. data/ext/v8/upstream/scons/engine/SCons/Tool/c++.py +0 -99
  97. data/ext/v8/upstream/scons/engine/SCons/Tool/cc.py +0 -114
  98. data/ext/v8/upstream/scons/engine/SCons/Tool/cvf.py +0 -58
  99. data/ext/v8/upstream/scons/engine/SCons/Tool/default.py +0 -50
  100. data/ext/v8/upstream/scons/engine/SCons/Tool/dmd.py +0 -224
  101. data/ext/v8/upstream/scons/engine/SCons/Tool/dvi.py +0 -64
  102. data/ext/v8/upstream/scons/engine/SCons/Tool/dvipdf.py +0 -125
  103. data/ext/v8/upstream/scons/engine/SCons/Tool/dvips.py +0 -94
  104. data/ext/v8/upstream/scons/engine/SCons/Tool/f77.py +0 -62
  105. data/ext/v8/upstream/scons/engine/SCons/Tool/f90.py +0 -62
  106. data/ext/v8/upstream/scons/engine/SCons/Tool/f95.py +0 -63
  107. data/ext/v8/upstream/scons/engine/SCons/Tool/filesystem.py +0 -98
  108. data/ext/v8/upstream/scons/engine/SCons/Tool/fortran.py +0 -63
  109. data/ext/v8/upstream/scons/engine/SCons/Tool/g++.py +0 -90
  110. data/ext/v8/upstream/scons/engine/SCons/Tool/g77.py +0 -73
  111. data/ext/v8/upstream/scons/engine/SCons/Tool/gas.py +0 -53
  112. data/ext/v8/upstream/scons/engine/SCons/Tool/gcc.py +0 -80
  113. data/ext/v8/upstream/scons/engine/SCons/Tool/gfortran.py +0 -64
  114. data/ext/v8/upstream/scons/engine/SCons/Tool/gnulink.py +0 -63
  115. data/ext/v8/upstream/scons/engine/SCons/Tool/gs.py +0 -81
  116. data/ext/v8/upstream/scons/engine/SCons/Tool/hpc++.py +0 -85
  117. data/ext/v8/upstream/scons/engine/SCons/Tool/hpcc.py +0 -53
  118. data/ext/v8/upstream/scons/engine/SCons/Tool/hplink.py +0 -77
  119. data/ext/v8/upstream/scons/engine/SCons/Tool/icc.py +0 -59
  120. data/ext/v8/upstream/scons/engine/SCons/Tool/icl.py +0 -52
  121. data/ext/v8/upstream/scons/engine/SCons/Tool/ifl.py +0 -72
  122. data/ext/v8/upstream/scons/engine/SCons/Tool/ifort.py +0 -90
  123. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink.py +0 -59
  124. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink32.py +0 -60
  125. data/ext/v8/upstream/scons/engine/SCons/Tool/install.py +0 -229
  126. data/ext/v8/upstream/scons/engine/SCons/Tool/intelc.py +0 -490
  127. data/ext/v8/upstream/scons/engine/SCons/Tool/ipkg.py +0 -71
  128. data/ext/v8/upstream/scons/engine/SCons/Tool/jar.py +0 -110
  129. data/ext/v8/upstream/scons/engine/SCons/Tool/javac.py +0 -234
  130. data/ext/v8/upstream/scons/engine/SCons/Tool/javah.py +0 -138
  131. data/ext/v8/upstream/scons/engine/SCons/Tool/latex.py +0 -79
  132. data/ext/v8/upstream/scons/engine/SCons/Tool/lex.py +0 -99
  133. data/ext/v8/upstream/scons/engine/SCons/Tool/link.py +0 -121
  134. data/ext/v8/upstream/scons/engine/SCons/Tool/linkloc.py +0 -112
  135. data/ext/v8/upstream/scons/engine/SCons/Tool/m4.py +0 -63
  136. data/ext/v8/upstream/scons/engine/SCons/Tool/masm.py +0 -77
  137. data/ext/v8/upstream/scons/engine/SCons/Tool/midl.py +0 -90
  138. data/ext/v8/upstream/scons/engine/SCons/Tool/mingw.py +0 -159
  139. data/ext/v8/upstream/scons/engine/SCons/Tool/mslib.py +0 -64
  140. data/ext/v8/upstream/scons/engine/SCons/Tool/mslink.py +0 -266
  141. data/ext/v8/upstream/scons/engine/SCons/Tool/mssdk.py +0 -50
  142. data/ext/v8/upstream/scons/engine/SCons/Tool/msvc.py +0 -269
  143. data/ext/v8/upstream/scons/engine/SCons/Tool/msvs.py +0 -1439
  144. data/ext/v8/upstream/scons/engine/SCons/Tool/mwcc.py +0 -208
  145. data/ext/v8/upstream/scons/engine/SCons/Tool/mwld.py +0 -107
  146. data/ext/v8/upstream/scons/engine/SCons/Tool/nasm.py +0 -72
  147. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/__init__.py +0 -314
  148. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/ipk.py +0 -185
  149. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/msi.py +0 -526
  150. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/rpm.py +0 -367
  151. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_tarbz2.py +0 -43
  152. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_targz.py +0 -43
  153. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_zip.py +0 -43
  154. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/tarbz2.py +0 -44
  155. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/targz.py +0 -44
  156. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/zip.py +0 -44
  157. data/ext/v8/upstream/scons/engine/SCons/Tool/pdf.py +0 -78
  158. data/ext/v8/upstream/scons/engine/SCons/Tool/pdflatex.py +0 -83
  159. data/ext/v8/upstream/scons/engine/SCons/Tool/pdftex.py +0 -108
  160. data/ext/v8/upstream/scons/engine/SCons/Tool/qt.py +0 -336
  161. data/ext/v8/upstream/scons/engine/SCons/Tool/rmic.py +0 -121
  162. data/ext/v8/upstream/scons/engine/SCons/Tool/rpcgen.py +0 -70
  163. data/ext/v8/upstream/scons/engine/SCons/Tool/rpm.py +0 -132
  164. data/ext/v8/upstream/scons/engine/SCons/Tool/sgiar.py +0 -68
  165. data/ext/v8/upstream/scons/engine/SCons/Tool/sgic++.py +0 -58
  166. data/ext/v8/upstream/scons/engine/SCons/Tool/sgicc.py +0 -53
  167. data/ext/v8/upstream/scons/engine/SCons/Tool/sgilink.py +0 -63
  168. data/ext/v8/upstream/scons/engine/SCons/Tool/sunar.py +0 -67
  169. data/ext/v8/upstream/scons/engine/SCons/Tool/sunc++.py +0 -142
  170. data/ext/v8/upstream/scons/engine/SCons/Tool/suncc.py +0 -58
  171. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf77.py +0 -63
  172. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf90.py +0 -64
  173. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf95.py +0 -64
  174. data/ext/v8/upstream/scons/engine/SCons/Tool/sunlink.py +0 -77
  175. data/ext/v8/upstream/scons/engine/SCons/Tool/swig.py +0 -186
  176. data/ext/v8/upstream/scons/engine/SCons/Tool/tar.py +0 -73
  177. data/ext/v8/upstream/scons/engine/SCons/Tool/tex.py +0 -805
  178. data/ext/v8/upstream/scons/engine/SCons/Tool/textfile.py +0 -175
  179. data/ext/v8/upstream/scons/engine/SCons/Tool/tlib.py +0 -53
  180. data/ext/v8/upstream/scons/engine/SCons/Tool/wix.py +0 -100
  181. data/ext/v8/upstream/scons/engine/SCons/Tool/yacc.py +0 -131
  182. data/ext/v8/upstream/scons/engine/SCons/Tool/zip.py +0 -100
  183. data/ext/v8/upstream/scons/engine/SCons/Util.py +0 -1645
  184. data/ext/v8/upstream/scons/engine/SCons/Variables/BoolVariable.py +0 -91
  185. data/ext/v8/upstream/scons/engine/SCons/Variables/EnumVariable.py +0 -107
  186. data/ext/v8/upstream/scons/engine/SCons/Variables/ListVariable.py +0 -139
  187. data/ext/v8/upstream/scons/engine/SCons/Variables/PackageVariable.py +0 -109
  188. data/ext/v8/upstream/scons/engine/SCons/Variables/PathVariable.py +0 -147
  189. data/ext/v8/upstream/scons/engine/SCons/Variables/__init__.py +0 -317
  190. data/ext/v8/upstream/scons/engine/SCons/Warnings.py +0 -228
  191. data/ext/v8/upstream/scons/engine/SCons/__init__.py +0 -49
  192. data/ext/v8/upstream/scons/engine/SCons/compat/__init__.py +0 -302
  193. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_UserString.py +0 -98
  194. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_hashlib.py +0 -91
  195. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_itertools.py +0 -124
  196. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_optparse.py +0 -1725
  197. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets.py +0 -583
  198. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets15.py +0 -176
  199. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_shlex.py +0 -325
  200. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_subprocess.py +0 -1296
  201. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_textwrap.py +0 -382
  202. data/ext/v8/upstream/scons/engine/SCons/compat/builtins.py +0 -187
  203. data/ext/v8/upstream/scons/engine/SCons/cpp.py +0 -598
  204. data/ext/v8/upstream/scons/engine/SCons/dblite.py +0 -248
  205. data/ext/v8/upstream/scons/engine/SCons/exitfuncs.py +0 -77
  206. data/ext/v8/upstream/scons/os_spawnv_fix.diff +0 -83
  207. data/ext/v8/upstream/scons/scons-time.1 +0 -1017
  208. data/ext/v8/upstream/scons/scons.1 +0 -15179
  209. data/ext/v8/upstream/scons/sconsign.1 +0 -208
  210. data/ext/v8/upstream/scons/script/scons +0 -184
  211. data/ext/v8/upstream/scons/script/scons-time +0 -1529
  212. data/ext/v8/upstream/scons/script/scons.bat +0 -31
  213. data/ext/v8/upstream/scons/script/sconsign +0 -508
  214. data/ext/v8/upstream/scons/setup.cfg +0 -6
  215. data/ext/v8/upstream/scons/setup.py +0 -427
  216. data/ext/v8/upstream/v8/.gitignore +0 -33
  217. data/ext/v8/upstream/v8/AUTHORS +0 -42
  218. data/ext/v8/upstream/v8/ChangeLog +0 -2663
  219. data/ext/v8/upstream/v8/LICENSE +0 -52
  220. data/ext/v8/upstream/v8/LICENSE.strongtalk +0 -29
  221. data/ext/v8/upstream/v8/LICENSE.v8 +0 -26
  222. data/ext/v8/upstream/v8/LICENSE.valgrind +0 -45
  223. data/ext/v8/upstream/v8/SConstruct +0 -1473
  224. data/ext/v8/upstream/v8/build/README.txt +0 -25
  225. data/ext/v8/upstream/v8/build/all.gyp +0 -18
  226. data/ext/v8/upstream/v8/build/armu.gypi +0 -32
  227. data/ext/v8/upstream/v8/build/common.gypi +0 -82
  228. data/ext/v8/upstream/v8/build/gyp_v8 +0 -145
  229. data/ext/v8/upstream/v8/include/v8-debug.h +0 -394
  230. data/ext/v8/upstream/v8/include/v8-preparser.h +0 -116
  231. data/ext/v8/upstream/v8/include/v8-profiler.h +0 -505
  232. data/ext/v8/upstream/v8/include/v8-testing.h +0 -104
  233. data/ext/v8/upstream/v8/include/v8.h +0 -4000
  234. data/ext/v8/upstream/v8/include/v8stdint.h +0 -53
  235. data/ext/v8/upstream/v8/preparser/SConscript +0 -38
  236. data/ext/v8/upstream/v8/preparser/preparser-process.cc +0 -169
  237. data/ext/v8/upstream/v8/src/SConscript +0 -380
  238. data/ext/v8/upstream/v8/src/accessors.cc +0 -766
  239. data/ext/v8/upstream/v8/src/accessors.h +0 -121
  240. data/ext/v8/upstream/v8/src/allocation-inl.h +0 -49
  241. data/ext/v8/upstream/v8/src/allocation.cc +0 -122
  242. data/ext/v8/upstream/v8/src/allocation.h +0 -143
  243. data/ext/v8/upstream/v8/src/api.cc +0 -5678
  244. data/ext/v8/upstream/v8/src/api.h +0 -572
  245. data/ext/v8/upstream/v8/src/apinatives.js +0 -110
  246. data/ext/v8/upstream/v8/src/apiutils.h +0 -73
  247. data/ext/v8/upstream/v8/src/arguments.h +0 -116
  248. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +0 -353
  249. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +0 -2877
  250. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +0 -1382
  251. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +0 -1634
  252. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +0 -6917
  253. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +0 -623
  254. data/ext/v8/upstream/v8/src/arm/codegen-arm-inl.h +0 -48
  255. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +0 -7437
  256. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +0 -595
  257. data/ext/v8/upstream/v8/src/arm/constants-arm.cc +0 -152
  258. data/ext/v8/upstream/v8/src/arm/constants-arm.h +0 -778
  259. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +0 -149
  260. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +0 -317
  261. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +0 -737
  262. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +0 -1503
  263. data/ext/v8/upstream/v8/src/arm/frames-arm.cc +0 -45
  264. data/ext/v8/upstream/v8/src/arm/frames-arm.h +0 -168
  265. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +0 -4374
  266. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +0 -1793
  267. data/ext/v8/upstream/v8/src/arm/jump-target-arm.cc +0 -174
  268. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +0 -2120
  269. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +0 -2179
  270. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +0 -4132
  271. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +0 -329
  272. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +0 -305
  273. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.h +0 -84
  274. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +0 -2939
  275. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +0 -1071
  276. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +0 -1287
  277. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +0 -253
  278. data/ext/v8/upstream/v8/src/arm/register-allocator-arm-inl.h +0 -100
  279. data/ext/v8/upstream/v8/src/arm/register-allocator-arm.cc +0 -63
  280. data/ext/v8/upstream/v8/src/arm/register-allocator-arm.h +0 -44
  281. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +0 -3288
  282. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +0 -413
  283. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +0 -4034
  284. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm-inl.h +0 -59
  285. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +0 -843
  286. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +0 -523
  287. data/ext/v8/upstream/v8/src/array.js +0 -1249
  288. data/ext/v8/upstream/v8/src/assembler.cc +0 -1067
  289. data/ext/v8/upstream/v8/src/assembler.h +0 -823
  290. data/ext/v8/upstream/v8/src/ast-inl.h +0 -112
  291. data/ext/v8/upstream/v8/src/ast.cc +0 -1078
  292. data/ext/v8/upstream/v8/src/ast.h +0 -2234
  293. data/ext/v8/upstream/v8/src/atomicops.h +0 -167
  294. data/ext/v8/upstream/v8/src/atomicops_internals_arm_gcc.h +0 -145
  295. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +0 -169
  296. data/ext/v8/upstream/v8/src/atomicops_internals_x86_gcc.cc +0 -126
  297. data/ext/v8/upstream/v8/src/atomicops_internals_x86_gcc.h +0 -287
  298. data/ext/v8/upstream/v8/src/atomicops_internals_x86_macosx.h +0 -301
  299. data/ext/v8/upstream/v8/src/atomicops_internals_x86_msvc.h +0 -203
  300. data/ext/v8/upstream/v8/src/bignum-dtoa.cc +0 -655
  301. data/ext/v8/upstream/v8/src/bignum-dtoa.h +0 -81
  302. data/ext/v8/upstream/v8/src/bignum.cc +0 -768
  303. data/ext/v8/upstream/v8/src/bignum.h +0 -140
  304. data/ext/v8/upstream/v8/src/bootstrapper.cc +0 -2138
  305. data/ext/v8/upstream/v8/src/bootstrapper.h +0 -185
  306. data/ext/v8/upstream/v8/src/builtins.cc +0 -1708
  307. data/ext/v8/upstream/v8/src/builtins.h +0 -368
  308. data/ext/v8/upstream/v8/src/bytecodes-irregexp.h +0 -105
  309. data/ext/v8/upstream/v8/src/cached-powers.cc +0 -177
  310. data/ext/v8/upstream/v8/src/cached-powers.h +0 -65
  311. data/ext/v8/upstream/v8/src/char-predicates-inl.h +0 -94
  312. data/ext/v8/upstream/v8/src/char-predicates.h +0 -65
  313. data/ext/v8/upstream/v8/src/checks.cc +0 -110
  314. data/ext/v8/upstream/v8/src/checks.h +0 -296
  315. data/ext/v8/upstream/v8/src/circular-queue-inl.h +0 -53
  316. data/ext/v8/upstream/v8/src/circular-queue.cc +0 -122
  317. data/ext/v8/upstream/v8/src/circular-queue.h +0 -103
  318. data/ext/v8/upstream/v8/src/code-stubs.cc +0 -240
  319. data/ext/v8/upstream/v8/src/code-stubs.h +0 -971
  320. data/ext/v8/upstream/v8/src/code.h +0 -68
  321. data/ext/v8/upstream/v8/src/codegen-inl.h +0 -68
  322. data/ext/v8/upstream/v8/src/codegen.cc +0 -505
  323. data/ext/v8/upstream/v8/src/codegen.h +0 -245
  324. data/ext/v8/upstream/v8/src/compilation-cache.cc +0 -540
  325. data/ext/v8/upstream/v8/src/compilation-cache.h +0 -287
  326. data/ext/v8/upstream/v8/src/compiler.cc +0 -792
  327. data/ext/v8/upstream/v8/src/compiler.h +0 -307
  328. data/ext/v8/upstream/v8/src/contexts.cc +0 -327
  329. data/ext/v8/upstream/v8/src/contexts.h +0 -382
  330. data/ext/v8/upstream/v8/src/conversions-inl.h +0 -110
  331. data/ext/v8/upstream/v8/src/conversions.cc +0 -1125
  332. data/ext/v8/upstream/v8/src/conversions.h +0 -122
  333. data/ext/v8/upstream/v8/src/counters.cc +0 -93
  334. data/ext/v8/upstream/v8/src/counters.h +0 -254
  335. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +0 -101
  336. data/ext/v8/upstream/v8/src/cpu-profiler.cc +0 -606
  337. data/ext/v8/upstream/v8/src/cpu-profiler.h +0 -305
  338. data/ext/v8/upstream/v8/src/cpu.h +0 -67
  339. data/ext/v8/upstream/v8/src/d8-debug.cc +0 -367
  340. data/ext/v8/upstream/v8/src/d8-debug.h +0 -158
  341. data/ext/v8/upstream/v8/src/d8-posix.cc +0 -695
  342. data/ext/v8/upstream/v8/src/d8-readline.cc +0 -128
  343. data/ext/v8/upstream/v8/src/d8-windows.cc +0 -42
  344. data/ext/v8/upstream/v8/src/d8.cc +0 -796
  345. data/ext/v8/upstream/v8/src/d8.gyp +0 -88
  346. data/ext/v8/upstream/v8/src/d8.h +0 -231
  347. data/ext/v8/upstream/v8/src/d8.js +0 -2798
  348. data/ext/v8/upstream/v8/src/data-flow.cc +0 -545
  349. data/ext/v8/upstream/v8/src/data-flow.h +0 -379
  350. data/ext/v8/upstream/v8/src/date.js +0 -1103
  351. data/ext/v8/upstream/v8/src/dateparser-inl.h +0 -125
  352. data/ext/v8/upstream/v8/src/dateparser.cc +0 -178
  353. data/ext/v8/upstream/v8/src/dateparser.h +0 -265
  354. data/ext/v8/upstream/v8/src/debug-agent.cc +0 -447
  355. data/ext/v8/upstream/v8/src/debug-agent.h +0 -129
  356. data/ext/v8/upstream/v8/src/debug-debugger.js +0 -2569
  357. data/ext/v8/upstream/v8/src/debug.cc +0 -3188
  358. data/ext/v8/upstream/v8/src/debug.h +0 -1055
  359. data/ext/v8/upstream/v8/src/deoptimizer.cc +0 -1296
  360. data/ext/v8/upstream/v8/src/deoptimizer.h +0 -629
  361. data/ext/v8/upstream/v8/src/disasm.h +0 -80
  362. data/ext/v8/upstream/v8/src/disassembler.cc +0 -339
  363. data/ext/v8/upstream/v8/src/disassembler.h +0 -56
  364. data/ext/v8/upstream/v8/src/diy-fp.cc +0 -58
  365. data/ext/v8/upstream/v8/src/diy-fp.h +0 -117
  366. data/ext/v8/upstream/v8/src/double.h +0 -238
  367. data/ext/v8/upstream/v8/src/dtoa.cc +0 -103
  368. data/ext/v8/upstream/v8/src/dtoa.h +0 -85
  369. data/ext/v8/upstream/v8/src/execution.cc +0 -791
  370. data/ext/v8/upstream/v8/src/execution.h +0 -291
  371. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +0 -250
  372. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +0 -89
  373. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +0 -55
  374. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +0 -284
  375. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.h +0 -64
  376. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +0 -141
  377. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.h +0 -50
  378. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +0 -58
  379. data/ext/v8/upstream/v8/src/extensions/gc-extension.h +0 -49
  380. data/ext/v8/upstream/v8/src/factory.cc +0 -1194
  381. data/ext/v8/upstream/v8/src/factory.h +0 -436
  382. data/ext/v8/upstream/v8/src/fast-dtoa.cc +0 -736
  383. data/ext/v8/upstream/v8/src/fast-dtoa.h +0 -83
  384. data/ext/v8/upstream/v8/src/fixed-dtoa.cc +0 -405
  385. data/ext/v8/upstream/v8/src/fixed-dtoa.h +0 -55
  386. data/ext/v8/upstream/v8/src/flag-definitions.h +0 -556
  387. data/ext/v8/upstream/v8/src/flags.cc +0 -551
  388. data/ext/v8/upstream/v8/src/flags.h +0 -79
  389. data/ext/v8/upstream/v8/src/frame-element.cc +0 -37
  390. data/ext/v8/upstream/v8/src/frame-element.h +0 -269
  391. data/ext/v8/upstream/v8/src/frames-inl.h +0 -236
  392. data/ext/v8/upstream/v8/src/frames.cc +0 -1273
  393. data/ext/v8/upstream/v8/src/frames.h +0 -854
  394. data/ext/v8/upstream/v8/src/full-codegen.cc +0 -1385
  395. data/ext/v8/upstream/v8/src/full-codegen.h +0 -753
  396. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +0 -91
  397. data/ext/v8/upstream/v8/src/func-name-inferrer.h +0 -111
  398. data/ext/v8/upstream/v8/src/gdb-jit.cc +0 -1548
  399. data/ext/v8/upstream/v8/src/gdb-jit.h +0 -138
  400. data/ext/v8/upstream/v8/src/global-handles.cc +0 -596
  401. data/ext/v8/upstream/v8/src/global-handles.h +0 -239
  402. data/ext/v8/upstream/v8/src/globals.h +0 -325
  403. data/ext/v8/upstream/v8/src/handles-inl.h +0 -177
  404. data/ext/v8/upstream/v8/src/handles.cc +0 -965
  405. data/ext/v8/upstream/v8/src/handles.h +0 -372
  406. data/ext/v8/upstream/v8/src/hashmap.cc +0 -230
  407. data/ext/v8/upstream/v8/src/hashmap.h +0 -121
  408. data/ext/v8/upstream/v8/src/heap-inl.h +0 -703
  409. data/ext/v8/upstream/v8/src/heap-profiler.cc +0 -1173
  410. data/ext/v8/upstream/v8/src/heap-profiler.h +0 -396
  411. data/ext/v8/upstream/v8/src/heap.cc +0 -5856
  412. data/ext/v8/upstream/v8/src/heap.h +0 -2264
  413. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +0 -1639
  414. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +0 -3657
  415. data/ext/v8/upstream/v8/src/hydrogen.cc +0 -6011
  416. data/ext/v8/upstream/v8/src/hydrogen.h +0 -1137
  417. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +0 -430
  418. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +0 -2846
  419. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +0 -1159
  420. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +0 -1596
  421. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +0 -6549
  422. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +0 -495
  423. data/ext/v8/upstream/v8/src/ia32/codegen-ia32-inl.h +0 -46
  424. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +0 -10385
  425. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +0 -801
  426. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +0 -88
  427. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +0 -312
  428. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +0 -774
  429. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +0 -1620
  430. data/ext/v8/upstream/v8/src/ia32/frames-ia32.cc +0 -45
  431. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +0 -140
  432. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +0 -4357
  433. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +0 -1779
  434. data/ext/v8/upstream/v8/src/ia32/jump-target-ia32.cc +0 -437
  435. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +0 -4158
  436. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +0 -318
  437. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +0 -466
  438. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.h +0 -110
  439. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +0 -2181
  440. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +0 -2235
  441. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +0 -2056
  442. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +0 -807
  443. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1264
  444. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +0 -216
  445. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32-inl.h +0 -82
  446. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +0 -157
  447. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.h +0 -43
  448. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.cc +0 -30
  449. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +0 -72
  450. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +0 -3711
  451. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +0 -1366
  452. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +0 -650
  453. data/ext/v8/upstream/v8/src/ic-inl.h +0 -130
  454. data/ext/v8/upstream/v8/src/ic.cc +0 -2389
  455. data/ext/v8/upstream/v8/src/ic.h +0 -675
  456. data/ext/v8/upstream/v8/src/inspector.cc +0 -63
  457. data/ext/v8/upstream/v8/src/inspector.h +0 -62
  458. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +0 -659
  459. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +0 -49
  460. data/ext/v8/upstream/v8/src/isolate.cc +0 -883
  461. data/ext/v8/upstream/v8/src/isolate.h +0 -1306
  462. data/ext/v8/upstream/v8/src/json.js +0 -342
  463. data/ext/v8/upstream/v8/src/jsregexp.cc +0 -5371
  464. data/ext/v8/upstream/v8/src/jsregexp.h +0 -1483
  465. data/ext/v8/upstream/v8/src/jump-target-heavy-inl.h +0 -51
  466. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +0 -427
  467. data/ext/v8/upstream/v8/src/jump-target-heavy.h +0 -238
  468. data/ext/v8/upstream/v8/src/jump-target-inl.h +0 -48
  469. data/ext/v8/upstream/v8/src/jump-target-light-inl.h +0 -56
  470. data/ext/v8/upstream/v8/src/jump-target-light.cc +0 -111
  471. data/ext/v8/upstream/v8/src/jump-target-light.h +0 -193
  472. data/ext/v8/upstream/v8/src/jump-target.cc +0 -91
  473. data/ext/v8/upstream/v8/src/jump-target.h +0 -90
  474. data/ext/v8/upstream/v8/src/list-inl.h +0 -206
  475. data/ext/v8/upstream/v8/src/list.h +0 -164
  476. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +0 -142
  477. data/ext/v8/upstream/v8/src/lithium-allocator.cc +0 -2105
  478. data/ext/v8/upstream/v8/src/lithium-allocator.h +0 -630
  479. data/ext/v8/upstream/v8/src/lithium.cc +0 -169
  480. data/ext/v8/upstream/v8/src/lithium.h +0 -592
  481. data/ext/v8/upstream/v8/src/liveedit-debugger.js +0 -1082
  482. data/ext/v8/upstream/v8/src/liveedit.cc +0 -1693
  483. data/ext/v8/upstream/v8/src/liveedit.h +0 -179
  484. data/ext/v8/upstream/v8/src/liveobjectlist-inl.h +0 -126
  485. data/ext/v8/upstream/v8/src/liveobjectlist.cc +0 -2589
  486. data/ext/v8/upstream/v8/src/liveobjectlist.h +0 -322
  487. data/ext/v8/upstream/v8/src/log-inl.h +0 -59
  488. data/ext/v8/upstream/v8/src/log-utils.cc +0 -423
  489. data/ext/v8/upstream/v8/src/log-utils.h +0 -229
  490. data/ext/v8/upstream/v8/src/log.cc +0 -1666
  491. data/ext/v8/upstream/v8/src/log.h +0 -446
  492. data/ext/v8/upstream/v8/src/macro-assembler.h +0 -120
  493. data/ext/v8/upstream/v8/src/macros.py +0 -178
  494. data/ext/v8/upstream/v8/src/mark-compact.cc +0 -3092
  495. data/ext/v8/upstream/v8/src/mark-compact.h +0 -506
  496. data/ext/v8/upstream/v8/src/math.js +0 -264
  497. data/ext/v8/upstream/v8/src/messages.cc +0 -166
  498. data/ext/v8/upstream/v8/src/messages.h +0 -114
  499. data/ext/v8/upstream/v8/src/messages.js +0 -1090
  500. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +0 -335
  501. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +0 -2093
  502. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +0 -1066
  503. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +0 -148
  504. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +0 -752
  505. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +0 -511
  506. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +0 -64
  507. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +0 -1213
  508. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +0 -633
  509. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +0 -352
  510. data/ext/v8/upstream/v8/src/mips/constants-mips.h +0 -723
  511. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +0 -90
  512. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +0 -155
  513. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +0 -91
  514. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +0 -1023
  515. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +0 -48
  516. data/ext/v8/upstream/v8/src/mips/frames-mips.h +0 -179
  517. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +0 -727
  518. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +0 -244
  519. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +0 -80
  520. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +0 -65
  521. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +0 -304
  522. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +0 -3327
  523. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +0 -1058
  524. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +0 -478
  525. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +0 -250
  526. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +0 -134
  527. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.cc +0 -63
  528. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +0 -47
  529. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +0 -2438
  530. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +0 -394
  531. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +0 -601
  532. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +0 -58
  533. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +0 -307
  534. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +0 -530
  535. data/ext/v8/upstream/v8/src/mirror-debugger.js +0 -2381
  536. data/ext/v8/upstream/v8/src/mksnapshot.cc +0 -256
  537. data/ext/v8/upstream/v8/src/natives.h +0 -63
  538. data/ext/v8/upstream/v8/src/objects-debug.cc +0 -722
  539. data/ext/v8/upstream/v8/src/objects-inl.h +0 -4166
  540. data/ext/v8/upstream/v8/src/objects-printer.cc +0 -801
  541. data/ext/v8/upstream/v8/src/objects-visiting.cc +0 -142
  542. data/ext/v8/upstream/v8/src/objects-visiting.h +0 -422
  543. data/ext/v8/upstream/v8/src/objects.cc +0 -10296
  544. data/ext/v8/upstream/v8/src/objects.h +0 -6662
  545. data/ext/v8/upstream/v8/src/parser.cc +0 -5168
  546. data/ext/v8/upstream/v8/src/parser.h +0 -823
  547. data/ext/v8/upstream/v8/src/platform-cygwin.cc +0 -811
  548. data/ext/v8/upstream/v8/src/platform-freebsd.cc +0 -854
  549. data/ext/v8/upstream/v8/src/platform-linux.cc +0 -1120
  550. data/ext/v8/upstream/v8/src/platform-macos.cc +0 -865
  551. data/ext/v8/upstream/v8/src/platform-nullos.cc +0 -504
  552. data/ext/v8/upstream/v8/src/platform-openbsd.cc +0 -672
  553. data/ext/v8/upstream/v8/src/platform-posix.cc +0 -424
  554. data/ext/v8/upstream/v8/src/platform-solaris.cc +0 -796
  555. data/ext/v8/upstream/v8/src/platform-tls-mac.h +0 -62
  556. data/ext/v8/upstream/v8/src/platform-tls-win32.h +0 -62
  557. data/ext/v8/upstream/v8/src/platform-tls.h +0 -50
  558. data/ext/v8/upstream/v8/src/platform-win32.cc +0 -2072
  559. data/ext/v8/upstream/v8/src/platform.h +0 -693
  560. data/ext/v8/upstream/v8/src/preparse-data.cc +0 -185
  561. data/ext/v8/upstream/v8/src/preparse-data.h +0 -249
  562. data/ext/v8/upstream/v8/src/preparser-api.cc +0 -219
  563. data/ext/v8/upstream/v8/src/preparser.cc +0 -1205
  564. data/ext/v8/upstream/v8/src/preparser.h +0 -278
  565. data/ext/v8/upstream/v8/src/prettyprinter.cc +0 -1530
  566. data/ext/v8/upstream/v8/src/prettyprinter.h +0 -223
  567. data/ext/v8/upstream/v8/src/profile-generator-inl.h +0 -128
  568. data/ext/v8/upstream/v8/src/profile-generator.cc +0 -3095
  569. data/ext/v8/upstream/v8/src/profile-generator.h +0 -1125
  570. data/ext/v8/upstream/v8/src/property.cc +0 -102
  571. data/ext/v8/upstream/v8/src/property.h +0 -348
  572. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp-inl.h +0 -78
  573. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +0 -470
  574. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.h +0 -142
  575. data/ext/v8/upstream/v8/src/regexp-macro-assembler-tracer.cc +0 -373
  576. data/ext/v8/upstream/v8/src/regexp-macro-assembler-tracer.h +0 -104
  577. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +0 -266
  578. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +0 -236
  579. data/ext/v8/upstream/v8/src/regexp-stack.cc +0 -111
  580. data/ext/v8/upstream/v8/src/regexp-stack.h +0 -147
  581. data/ext/v8/upstream/v8/src/regexp.js +0 -483
  582. data/ext/v8/upstream/v8/src/register-allocator-inl.h +0 -141
  583. data/ext/v8/upstream/v8/src/register-allocator.cc +0 -98
  584. data/ext/v8/upstream/v8/src/register-allocator.h +0 -310
  585. data/ext/v8/upstream/v8/src/rewriter.cc +0 -1024
  586. data/ext/v8/upstream/v8/src/rewriter.h +0 -59
  587. data/ext/v8/upstream/v8/src/runtime-profiler.cc +0 -478
  588. data/ext/v8/upstream/v8/src/runtime-profiler.h +0 -192
  589. data/ext/v8/upstream/v8/src/runtime.cc +0 -11949
  590. data/ext/v8/upstream/v8/src/runtime.h +0 -643
  591. data/ext/v8/upstream/v8/src/runtime.js +0 -643
  592. data/ext/v8/upstream/v8/src/safepoint-table.cc +0 -256
  593. data/ext/v8/upstream/v8/src/safepoint-table.h +0 -269
  594. data/ext/v8/upstream/v8/src/scanner-base.cc +0 -964
  595. data/ext/v8/upstream/v8/src/scanner-base.h +0 -664
  596. data/ext/v8/upstream/v8/src/scanner.cc +0 -584
  597. data/ext/v8/upstream/v8/src/scanner.h +0 -196
  598. data/ext/v8/upstream/v8/src/scopeinfo.cc +0 -631
  599. data/ext/v8/upstream/v8/src/scopeinfo.h +0 -249
  600. data/ext/v8/upstream/v8/src/scopes.cc +0 -1093
  601. data/ext/v8/upstream/v8/src/scopes.h +0 -508
  602. data/ext/v8/upstream/v8/src/serialize.cc +0 -1574
  603. data/ext/v8/upstream/v8/src/serialize.h +0 -589
  604. data/ext/v8/upstream/v8/src/shell.h +0 -55
  605. data/ext/v8/upstream/v8/src/simulator.h +0 -43
  606. data/ext/v8/upstream/v8/src/small-pointer-list.h +0 -163
  607. data/ext/v8/upstream/v8/src/smart-pointer.h +0 -109
  608. data/ext/v8/upstream/v8/src/snapshot-common.cc +0 -82
  609. data/ext/v8/upstream/v8/src/snapshot-empty.cc +0 -50
  610. data/ext/v8/upstream/v8/src/snapshot.h +0 -73
  611. data/ext/v8/upstream/v8/src/spaces-inl.h +0 -529
  612. data/ext/v8/upstream/v8/src/spaces.cc +0 -3147
  613. data/ext/v8/upstream/v8/src/spaces.h +0 -2368
  614. data/ext/v8/upstream/v8/src/splay-tree-inl.h +0 -310
  615. data/ext/v8/upstream/v8/src/splay-tree.h +0 -203
  616. data/ext/v8/upstream/v8/src/string-search.cc +0 -41
  617. data/ext/v8/upstream/v8/src/string-search.h +0 -568
  618. data/ext/v8/upstream/v8/src/string-stream.cc +0 -592
  619. data/ext/v8/upstream/v8/src/string-stream.h +0 -191
  620. data/ext/v8/upstream/v8/src/string.js +0 -915
  621. data/ext/v8/upstream/v8/src/strtod.cc +0 -440
  622. data/ext/v8/upstream/v8/src/strtod.h +0 -40
  623. data/ext/v8/upstream/v8/src/stub-cache.cc +0 -1940
  624. data/ext/v8/upstream/v8/src/stub-cache.h +0 -866
  625. data/ext/v8/upstream/v8/src/third_party/valgrind/valgrind.h +0 -3925
  626. data/ext/v8/upstream/v8/src/token.cc +0 -63
  627. data/ext/v8/upstream/v8/src/token.h +0 -288
  628. data/ext/v8/upstream/v8/src/top.cc +0 -983
  629. data/ext/v8/upstream/v8/src/type-info.cc +0 -472
  630. data/ext/v8/upstream/v8/src/type-info.h +0 -290
  631. data/ext/v8/upstream/v8/src/unbound-queue-inl.h +0 -95
  632. data/ext/v8/upstream/v8/src/unbound-queue.h +0 -67
  633. data/ext/v8/upstream/v8/src/unicode-inl.h +0 -238
  634. data/ext/v8/upstream/v8/src/unicode.cc +0 -1624
  635. data/ext/v8/upstream/v8/src/unicode.h +0 -280
  636. data/ext/v8/upstream/v8/src/uri.js +0 -402
  637. data/ext/v8/upstream/v8/src/utils.cc +0 -371
  638. data/ext/v8/upstream/v8/src/utils.h +0 -796
  639. data/ext/v8/upstream/v8/src/v8-counters.cc +0 -62
  640. data/ext/v8/upstream/v8/src/v8-counters.h +0 -311
  641. data/ext/v8/upstream/v8/src/v8.cc +0 -215
  642. data/ext/v8/upstream/v8/src/v8.h +0 -130
  643. data/ext/v8/upstream/v8/src/v8checks.h +0 -64
  644. data/ext/v8/upstream/v8/src/v8dll-main.cc +0 -39
  645. data/ext/v8/upstream/v8/src/v8globals.h +0 -486
  646. data/ext/v8/upstream/v8/src/v8memory.h +0 -82
  647. data/ext/v8/upstream/v8/src/v8natives.js +0 -1293
  648. data/ext/v8/upstream/v8/src/v8preparserdll-main.cc +0 -39
  649. data/ext/v8/upstream/v8/src/v8threads.cc +0 -453
  650. data/ext/v8/upstream/v8/src/v8threads.h +0 -164
  651. data/ext/v8/upstream/v8/src/v8utils.h +0 -317
  652. data/ext/v8/upstream/v8/src/variables.cc +0 -132
  653. data/ext/v8/upstream/v8/src/variables.h +0 -212
  654. data/ext/v8/upstream/v8/src/version.cc +0 -116
  655. data/ext/v8/upstream/v8/src/version.h +0 -68
  656. data/ext/v8/upstream/v8/src/virtual-frame-heavy-inl.h +0 -190
  657. data/ext/v8/upstream/v8/src/virtual-frame-heavy.cc +0 -312
  658. data/ext/v8/upstream/v8/src/virtual-frame-inl.h +0 -39
  659. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +0 -171
  660. data/ext/v8/upstream/v8/src/virtual-frame-light.cc +0 -52
  661. data/ext/v8/upstream/v8/src/virtual-frame.cc +0 -49
  662. data/ext/v8/upstream/v8/src/virtual-frame.h +0 -59
  663. data/ext/v8/upstream/v8/src/vm-state-inl.h +0 -138
  664. data/ext/v8/upstream/v8/src/vm-state.h +0 -70
  665. data/ext/v8/upstream/v8/src/win32-headers.h +0 -96
  666. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +0 -456
  667. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +0 -2954
  668. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +0 -1630
  669. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +0 -1493
  670. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +0 -5132
  671. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +0 -477
  672. data/ext/v8/upstream/v8/src/x64/codegen-x64-inl.h +0 -46
  673. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +0 -8843
  674. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +0 -753
  675. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +0 -88
  676. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +0 -318
  677. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +0 -815
  678. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +0 -1752
  679. data/ext/v8/upstream/v8/src/x64/frames-x64.cc +0 -45
  680. data/ext/v8/upstream/v8/src/x64/frames-x64.h +0 -130
  681. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +0 -4339
  682. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +0 -1752
  683. data/ext/v8/upstream/v8/src/x64/jump-target-x64.cc +0 -437
  684. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +0 -3970
  685. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +0 -318
  686. data/ext/v8/upstream/v8/src/x64/lithium-gap-resolver-x64.cc +0 -320
  687. data/ext/v8/upstream/v8/src/x64/lithium-gap-resolver-x64.h +0 -74
  688. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +0 -2115
  689. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +0 -2161
  690. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +0 -2911
  691. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +0 -1984
  692. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +0 -1398
  693. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +0 -282
  694. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +0 -87
  695. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +0 -95
  696. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.h +0 -43
  697. data/ext/v8/upstream/v8/src/x64/simulator-x64.cc +0 -27
  698. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +0 -71
  699. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +0 -3460
  700. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +0 -1296
  701. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +0 -597
  702. data/ext/v8/upstream/v8/src/zone-inl.h +0 -129
  703. data/ext/v8/upstream/v8/src/zone.cc +0 -196
  704. data/ext/v8/upstream/v8/src/zone.h +0 -236
  705. data/ext/v8/upstream/v8/tools/codemap.js +0 -265
  706. data/ext/v8/upstream/v8/tools/consarray.js +0 -93
  707. data/ext/v8/upstream/v8/tools/csvparser.js +0 -78
  708. data/ext/v8/upstream/v8/tools/disasm.py +0 -92
  709. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +0 -10
  710. data/ext/v8/upstream/v8/tools/gc-nvp-trace-processor.py +0 -328
  711. data/ext/v8/upstream/v8/tools/generate-ten-powers.scm +0 -286
  712. data/ext/v8/upstream/v8/tools/grokdump.py +0 -840
  713. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +0 -844
  714. data/ext/v8/upstream/v8/tools/js2c.py +0 -380
  715. data/ext/v8/upstream/v8/tools/jsmin.py +0 -280
  716. data/ext/v8/upstream/v8/tools/linux-tick-processor +0 -35
  717. data/ext/v8/upstream/v8/tools/ll_prof.py +0 -919
  718. data/ext/v8/upstream/v8/tools/logreader.js +0 -185
  719. data/ext/v8/upstream/v8/tools/mac-nm +0 -18
  720. data/ext/v8/upstream/v8/tools/mac-tick-processor +0 -6
  721. data/ext/v8/upstream/v8/tools/oom_dump/README +0 -31
  722. data/ext/v8/upstream/v8/tools/oom_dump/SConstruct +0 -42
  723. data/ext/v8/upstream/v8/tools/oom_dump/oom_dump.cc +0 -288
  724. data/ext/v8/upstream/v8/tools/presubmit.py +0 -305
  725. data/ext/v8/upstream/v8/tools/process-heap-prof.py +0 -120
  726. data/ext/v8/upstream/v8/tools/profile.js +0 -751
  727. data/ext/v8/upstream/v8/tools/profile_view.js +0 -219
  728. data/ext/v8/upstream/v8/tools/run-valgrind.py +0 -77
  729. data/ext/v8/upstream/v8/tools/splaytree.js +0 -316
  730. data/ext/v8/upstream/v8/tools/stats-viewer.py +0 -468
  731. data/ext/v8/upstream/v8/tools/test.py +0 -1490
  732. data/ext/v8/upstream/v8/tools/tickprocessor-driver.js +0 -59
  733. data/ext/v8/upstream/v8/tools/tickprocessor.js +0 -877
  734. data/ext/v8/upstream/v8/tools/utils.py +0 -96
  735. data/ext/v8/upstream/v8/tools/visual_studio/README.txt +0 -70
  736. data/ext/v8/upstream/v8/tools/visual_studio/arm.vsprops +0 -14
  737. data/ext/v8/upstream/v8/tools/visual_studio/common.vsprops +0 -34
  738. data/ext/v8/upstream/v8/tools/visual_studio/d8.vcproj +0 -193
  739. data/ext/v8/upstream/v8/tools/visual_studio/d8_arm.vcproj +0 -193
  740. data/ext/v8/upstream/v8/tools/visual_studio/d8_x64.vcproj +0 -209
  741. data/ext/v8/upstream/v8/tools/visual_studio/d8js2c.cmd +0 -6
  742. data/ext/v8/upstream/v8/tools/visual_studio/debug.vsprops +0 -17
  743. data/ext/v8/upstream/v8/tools/visual_studio/ia32.vsprops +0 -17
  744. data/ext/v8/upstream/v8/tools/visual_studio/js2c.cmd +0 -6
  745. data/ext/v8/upstream/v8/tools/visual_studio/release.vsprops +0 -24
  746. data/ext/v8/upstream/v8/tools/visual_studio/v8.sln +0 -101
  747. data/ext/v8/upstream/v8/tools/visual_studio/v8.vcproj +0 -227
  748. data/ext/v8/upstream/v8/tools/visual_studio/v8_arm.sln +0 -74
  749. data/ext/v8/upstream/v8/tools/visual_studio/v8_arm.vcproj +0 -227
  750. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +0 -1308
  751. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +0 -1238
  752. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +0 -1300
  753. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest.vcproj +0 -265
  754. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest_arm.vcproj +0 -249
  755. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest_x64.vcproj +0 -257
  756. data/ext/v8/upstream/v8/tools/visual_studio/v8_mksnapshot.vcproj +0 -145
  757. data/ext/v8/upstream/v8/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -145
  758. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample.vcproj +0 -145
  759. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample_arm.vcproj +0 -145
  760. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample_x64.vcproj +0 -161
  761. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample.vcproj +0 -147
  762. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -147
  763. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -163
  764. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot.vcproj +0 -142
  765. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_cc.vcproj +0 -92
  766. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -92
  767. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_x64.vcproj +0 -142
  768. data/ext/v8/upstream/v8/tools/visual_studio/v8_x64.sln +0 -101
  769. data/ext/v8/upstream/v8/tools/visual_studio/v8_x64.vcproj +0 -227
  770. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +0 -18
  771. data/ext/v8/upstream/v8/tools/windows-tick-processor.bat +0 -30
@@ -1,2911 +0,0 @@
1
- // Copyright 2011 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #if defined(V8_TARGET_ARCH_X64)
31
-
32
- #include "bootstrapper.h"
33
- #include "codegen-inl.h"
34
- #include "assembler-x64.h"
35
- #include "macro-assembler-x64.h"
36
- #include "serialize.h"
37
- #include "debug.h"
38
- #include "heap.h"
39
-
40
- namespace v8 {
41
- namespace internal {
42
-
43
- MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44
- : Assembler(arg_isolate, buffer, size),
45
- generating_stub_(false),
46
- allow_stub_calls_(true),
47
- root_array_available_(true) {
48
- if (isolate() != NULL) {
49
- code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50
- isolate());
51
- }
52
- }
53
-
54
-
55
- static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
56
- Address roots_register_value = kRootRegisterBias +
57
- reinterpret_cast<Address>(isolate->heap()->roots_address());
58
- intptr_t delta = other.address() - roots_register_value;
59
- return delta;
60
- }
61
-
62
-
63
- Operand MacroAssembler::ExternalOperand(ExternalReference target,
64
- Register scratch) {
65
- if (root_array_available_ && !Serializer::enabled()) {
66
- intptr_t delta = RootRegisterDelta(target, isolate());
67
- if (is_int32(delta)) {
68
- Serializer::TooLateToEnableNow();
69
- return Operand(kRootRegister, static_cast<int32_t>(delta));
70
- }
71
- }
72
- movq(scratch, target);
73
- return Operand(scratch, 0);
74
- }
75
-
76
-
77
- void MacroAssembler::Load(Register destination, ExternalReference source) {
78
- if (root_array_available_ && !Serializer::enabled()) {
79
- intptr_t delta = RootRegisterDelta(source, isolate());
80
- if (is_int32(delta)) {
81
- Serializer::TooLateToEnableNow();
82
- movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
83
- return;
84
- }
85
- }
86
- // Safe code.
87
- if (destination.is(rax)) {
88
- load_rax(source);
89
- } else {
90
- movq(kScratchRegister, source);
91
- movq(destination, Operand(kScratchRegister, 0));
92
- }
93
- }
94
-
95
-
96
- void MacroAssembler::Store(ExternalReference destination, Register source) {
97
- if (root_array_available_ && !Serializer::enabled()) {
98
- intptr_t delta = RootRegisterDelta(destination, isolate());
99
- if (is_int32(delta)) {
100
- Serializer::TooLateToEnableNow();
101
- movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
102
- return;
103
- }
104
- }
105
- // Safe code.
106
- if (source.is(rax)) {
107
- store_rax(destination);
108
- } else {
109
- movq(kScratchRegister, destination);
110
- movq(Operand(kScratchRegister, 0), source);
111
- }
112
- }
113
-
114
-
115
- void MacroAssembler::LoadAddress(Register destination,
116
- ExternalReference source) {
117
- if (root_array_available_ && !Serializer::enabled()) {
118
- intptr_t delta = RootRegisterDelta(source, isolate());
119
- if (is_int32(delta)) {
120
- Serializer::TooLateToEnableNow();
121
- lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
122
- return;
123
- }
124
- }
125
- // Safe code.
126
- movq(destination, source);
127
- }
128
-
129
-
130
- int MacroAssembler::LoadAddressSize(ExternalReference source) {
131
- if (root_array_available_ && !Serializer::enabled()) {
132
- // This calculation depends on the internals of LoadAddress.
133
- // It's correctness is ensured by the asserts in the Call
134
- // instruction below.
135
- intptr_t delta = RootRegisterDelta(source, isolate());
136
- if (is_int32(delta)) {
137
- Serializer::TooLateToEnableNow();
138
- // Operand is lea(scratch, Operand(kRootRegister, delta));
139
- // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
140
- int size = 4;
141
- if (!is_int8(static_cast<int32_t>(delta))) {
142
- size += 3; // Need full four-byte displacement in lea.
143
- }
144
- return size;
145
- }
146
- }
147
- // Size of movq(destination, src);
148
- return 10;
149
- }
150
-
151
-
152
- void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
153
- ASSERT(root_array_available_);
154
- movq(destination, Operand(kRootRegister,
155
- (index << kPointerSizeLog2) - kRootRegisterBias));
156
- }
157
-
158
-
159
- void MacroAssembler::LoadRootIndexed(Register destination,
160
- Register variable_offset,
161
- int fixed_offset) {
162
- ASSERT(root_array_available_);
163
- movq(destination,
164
- Operand(kRootRegister,
165
- variable_offset, times_pointer_size,
166
- (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
167
- }
168
-
169
-
170
- void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
171
- ASSERT(root_array_available_);
172
- movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
173
- source);
174
- }
175
-
176
-
177
- void MacroAssembler::PushRoot(Heap::RootListIndex index) {
178
- ASSERT(root_array_available_);
179
- push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
180
- }
181
-
182
-
183
- void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
184
- ASSERT(root_array_available_);
185
- cmpq(with, Operand(kRootRegister,
186
- (index << kPointerSizeLog2) - kRootRegisterBias));
187
- }
188
-
189
-
190
- void MacroAssembler::CompareRoot(const Operand& with,
191
- Heap::RootListIndex index) {
192
- ASSERT(root_array_available_);
193
- ASSERT(!with.AddressUsesRegister(kScratchRegister));
194
- LoadRoot(kScratchRegister, index);
195
- cmpq(with, kScratchRegister);
196
- }
197
-
198
-
199
- void MacroAssembler::RecordWriteHelper(Register object,
200
- Register addr,
201
- Register scratch) {
202
- if (emit_debug_code()) {
203
- // Check that the object is not in new space.
204
- NearLabel not_in_new_space;
205
- InNewSpace(object, scratch, not_equal, &not_in_new_space);
206
- Abort("new-space object passed to RecordWriteHelper");
207
- bind(&not_in_new_space);
208
- }
209
-
210
- // Compute the page start address from the heap object pointer, and reuse
211
- // the 'object' register for it.
212
- and_(object, Immediate(~Page::kPageAlignmentMask));
213
-
214
- // Compute number of region covering addr. See Page::GetRegionNumberForAddress
215
- // method for more details.
216
- shrl(addr, Immediate(Page::kRegionSizeLog2));
217
- andl(addr, Immediate(Page::kPageAlignmentMask >> Page::kRegionSizeLog2));
218
-
219
- // Set dirty mark for region.
220
- bts(Operand(object, Page::kDirtyFlagOffset), addr);
221
- }
222
-
223
-
224
- void MacroAssembler::RecordWrite(Register object,
225
- int offset,
226
- Register value,
227
- Register index) {
228
- // The compiled code assumes that record write doesn't change the
229
- // context register, so we check that none of the clobbered
230
- // registers are rsi.
231
- ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
232
-
233
- // First, check if a write barrier is even needed. The tests below
234
- // catch stores of smis and stores into the young generation.
235
- Label done;
236
- JumpIfSmi(value, &done);
237
-
238
- RecordWriteNonSmi(object, offset, value, index);
239
- bind(&done);
240
-
241
- // Clobber all input registers when running with the debug-code flag
242
- // turned on to provoke errors. This clobbering repeats the
243
- // clobbering done inside RecordWriteNonSmi but it's necessary to
244
- // avoid having the fast case for smis leave the registers
245
- // unchanged.
246
- if (emit_debug_code()) {
247
- movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
248
- movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
249
- movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
250
- }
251
- }
252
-
253
-
254
- void MacroAssembler::RecordWrite(Register object,
255
- Register address,
256
- Register value) {
257
- // The compiled code assumes that record write doesn't change the
258
- // context register, so we check that none of the clobbered
259
- // registers are rsi.
260
- ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
261
-
262
- // First, check if a write barrier is even needed. The tests below
263
- // catch stores of smis and stores into the young generation.
264
- Label done;
265
- JumpIfSmi(value, &done);
266
-
267
- InNewSpace(object, value, equal, &done);
268
-
269
- RecordWriteHelper(object, address, value);
270
-
271
- bind(&done);
272
-
273
- // Clobber all input registers when running with the debug-code flag
274
- // turned on to provoke errors.
275
- if (emit_debug_code()) {
276
- movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
277
- movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
278
- movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
279
- }
280
- }
281
-
282
-
283
- void MacroAssembler::RecordWriteNonSmi(Register object,
284
- int offset,
285
- Register scratch,
286
- Register index) {
287
- Label done;
288
-
289
- if (emit_debug_code()) {
290
- NearLabel okay;
291
- JumpIfNotSmi(object, &okay);
292
- Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
293
- bind(&okay);
294
-
295
- if (offset == 0) {
296
- // index must be int32.
297
- Register tmp = index.is(rax) ? rbx : rax;
298
- push(tmp);
299
- movl(tmp, index);
300
- cmpq(tmp, index);
301
- Check(equal, "Index register for RecordWrite must be untagged int32.");
302
- pop(tmp);
303
- }
304
- }
305
-
306
- // Test that the object address is not in the new space. We cannot
307
- // update page dirty marks for new space pages.
308
- InNewSpace(object, scratch, equal, &done);
309
-
310
- // The offset is relative to a tagged or untagged HeapObject pointer,
311
- // so either offset or offset + kHeapObjectTag must be a
312
- // multiple of kPointerSize.
313
- ASSERT(IsAligned(offset, kPointerSize) ||
314
- IsAligned(offset + kHeapObjectTag, kPointerSize));
315
-
316
- Register dst = index;
317
- if (offset != 0) {
318
- lea(dst, Operand(object, offset));
319
- } else {
320
- // array access: calculate the destination address in the same manner as
321
- // KeyedStoreIC::GenerateGeneric.
322
- lea(dst, FieldOperand(object,
323
- index,
324
- times_pointer_size,
325
- FixedArray::kHeaderSize));
326
- }
327
- RecordWriteHelper(object, dst, scratch);
328
-
329
- bind(&done);
330
-
331
- // Clobber all input registers when running with the debug-code flag
332
- // turned on to provoke errors.
333
- if (emit_debug_code()) {
334
- movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
335
- movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
336
- movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
337
- }
338
- }
339
-
340
- void MacroAssembler::Assert(Condition cc, const char* msg) {
341
- if (emit_debug_code()) Check(cc, msg);
342
- }
343
-
344
-
345
- void MacroAssembler::AssertFastElements(Register elements) {
346
- if (emit_debug_code()) {
347
- NearLabel ok;
348
- CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
349
- Heap::kFixedArrayMapRootIndex);
350
- j(equal, &ok);
351
- CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
352
- Heap::kFixedCOWArrayMapRootIndex);
353
- j(equal, &ok);
354
- Abort("JSObject with fast elements map has slow elements");
355
- bind(&ok);
356
- }
357
- }
358
-
359
-
360
- void MacroAssembler::Check(Condition cc, const char* msg) {
361
- NearLabel L;
362
- j(cc, &L);
363
- Abort(msg);
364
- // will not return here
365
- bind(&L);
366
- }
367
-
368
-
369
- void MacroAssembler::CheckStackAlignment() {
370
- int frame_alignment = OS::ActivationFrameAlignment();
371
- int frame_alignment_mask = frame_alignment - 1;
372
- if (frame_alignment > kPointerSize) {
373
- ASSERT(IsPowerOf2(frame_alignment));
374
- NearLabel alignment_as_expected;
375
- testq(rsp, Immediate(frame_alignment_mask));
376
- j(zero, &alignment_as_expected);
377
- // Abort if stack is not aligned.
378
- int3();
379
- bind(&alignment_as_expected);
380
- }
381
- }
382
-
383
-
384
- void MacroAssembler::NegativeZeroTest(Register result,
385
- Register op,
386
- Label* then_label) {
387
- NearLabel ok;
388
- testl(result, result);
389
- j(not_zero, &ok);
390
- testl(op, op);
391
- j(sign, then_label);
392
- bind(&ok);
393
- }
394
-
395
-
396
- void MacroAssembler::Abort(const char* msg) {
397
- // We want to pass the msg string like a smi to avoid GC
398
- // problems, however msg is not guaranteed to be aligned
399
- // properly. Instead, we pass an aligned pointer that is
400
- // a proper v8 smi, but also pass the alignment difference
401
- // from the real pointer as a smi.
402
- intptr_t p1 = reinterpret_cast<intptr_t>(msg);
403
- intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
404
- // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
405
- ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
406
- #ifdef DEBUG
407
- if (msg != NULL) {
408
- RecordComment("Abort message: ");
409
- RecordComment(msg);
410
- }
411
- #endif
412
- // Disable stub call restrictions to always allow calls to abort.
413
- AllowStubCallsScope allow_scope(this, true);
414
-
415
- push(rax);
416
- movq(kScratchRegister, p0, RelocInfo::NONE);
417
- push(kScratchRegister);
418
- movq(kScratchRegister,
419
- reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
420
- RelocInfo::NONE);
421
- push(kScratchRegister);
422
- CallRuntime(Runtime::kAbort, 2);
423
- // will not return here
424
- int3();
425
- }
426
-
427
-
428
- void MacroAssembler::CallStub(CodeStub* stub) {
429
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
430
- Call(stub->GetCode(), RelocInfo::CODE_TARGET);
431
- }
432
-
433
-
434
- MaybeObject* MacroAssembler::TryCallStub(CodeStub* stub) {
435
- ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
436
- MaybeObject* result = stub->TryGetCode();
437
- if (!result->IsFailure()) {
438
- call(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
439
- RelocInfo::CODE_TARGET);
440
- }
441
- return result;
442
- }
443
-
444
-
445
- void MacroAssembler::TailCallStub(CodeStub* stub) {
446
- ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
447
- Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
448
- }
449
-
450
-
451
- MaybeObject* MacroAssembler::TryTailCallStub(CodeStub* stub) {
452
- ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
453
- MaybeObject* result = stub->TryGetCode();
454
- if (!result->IsFailure()) {
455
- jmp(Handle<Code>(Code::cast(result->ToObjectUnchecked())),
456
- RelocInfo::CODE_TARGET);
457
- }
458
- return result;
459
- }
460
-
461
-
462
- void MacroAssembler::StubReturn(int argc) {
463
- ASSERT(argc >= 1 && generating_stub());
464
- ret((argc - 1) * kPointerSize);
465
- }
466
-
467
-
468
- void MacroAssembler::IllegalOperation(int num_arguments) {
469
- if (num_arguments > 0) {
470
- addq(rsp, Immediate(num_arguments * kPointerSize));
471
- }
472
- LoadRoot(rax, Heap::kUndefinedValueRootIndex);
473
- }
474
-
475
-
476
- void MacroAssembler::IndexFromHash(Register hash, Register index) {
477
- // The assert checks that the constants for the maximum number of digits
478
- // for an array index cached in the hash field and the number of bits
479
- // reserved for it does not conflict.
480
- ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
481
- (1 << String::kArrayIndexValueBits));
482
- // We want the smi-tagged index in key. Even if we subsequently go to
483
- // the slow case, converting the key to a smi is always valid.
484
- // key: string key
485
- // hash: key's hash field, including its array index value.
486
- and_(hash, Immediate(String::kArrayIndexValueMask));
487
- shr(hash, Immediate(String::kHashShift));
488
- // Here we actually clobber the key which will be used if calling into
489
- // runtime later. However as the new key is the numeric value of a string key
490
- // there is no difference in using either key.
491
- Integer32ToSmi(index, hash);
492
- }
493
-
494
-
495
- void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
496
- CallRuntime(Runtime::FunctionForId(id), num_arguments);
497
- }
498
-
499
-
500
- void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
501
- const Runtime::Function* function = Runtime::FunctionForId(id);
502
- Set(rax, function->nargs);
503
- LoadAddress(rbx, ExternalReference(function, isolate()));
504
- CEntryStub ces(1);
505
- ces.SaveDoubles();
506
- CallStub(&ces);
507
- }
508
-
509
-
510
- MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
511
- int num_arguments) {
512
- return TryCallRuntime(Runtime::FunctionForId(id), num_arguments);
513
- }
514
-
515
-
516
- void MacroAssembler::CallRuntime(const Runtime::Function* f,
517
- int num_arguments) {
518
- // If the expected number of arguments of the runtime function is
519
- // constant, we check that the actual number of arguments match the
520
- // expectation.
521
- if (f->nargs >= 0 && f->nargs != num_arguments) {
522
- IllegalOperation(num_arguments);
523
- return;
524
- }
525
-
526
- // TODO(1236192): Most runtime routines don't need the number of
527
- // arguments passed in because it is constant. At some point we
528
- // should remove this need and make the runtime routine entry code
529
- // smarter.
530
- Set(rax, num_arguments);
531
- LoadAddress(rbx, ExternalReference(f, isolate()));
532
- CEntryStub ces(f->result_size);
533
- CallStub(&ces);
534
- }
535
-
536
-
537
- MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
538
- int num_arguments) {
539
- if (f->nargs >= 0 && f->nargs != num_arguments) {
540
- IllegalOperation(num_arguments);
541
- // Since we did not call the stub, there was no allocation failure.
542
- // Return some non-failure object.
543
- return HEAP->undefined_value();
544
- }
545
-
546
- // TODO(1236192): Most runtime routines don't need the number of
547
- // arguments passed in because it is constant. At some point we
548
- // should remove this need and make the runtime routine entry code
549
- // smarter.
550
- Set(rax, num_arguments);
551
- LoadAddress(rbx, ExternalReference(f, isolate()));
552
- CEntryStub ces(f->result_size);
553
- return TryCallStub(&ces);
554
- }
555
-
556
-
557
- void MacroAssembler::CallExternalReference(const ExternalReference& ext,
558
- int num_arguments) {
559
- Set(rax, num_arguments);
560
- LoadAddress(rbx, ext);
561
-
562
- CEntryStub stub(1);
563
- CallStub(&stub);
564
- }
565
-
566
-
567
- void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
568
- int num_arguments,
569
- int result_size) {
570
- // ----------- S t a t e -------------
571
- // -- rsp[0] : return address
572
- // -- rsp[8] : argument num_arguments - 1
573
- // ...
574
- // -- rsp[8 * num_arguments] : argument 0 (receiver)
575
- // -----------------------------------
576
-
577
- // TODO(1236192): Most runtime routines don't need the number of
578
- // arguments passed in because it is constant. At some point we
579
- // should remove this need and make the runtime routine entry code
580
- // smarter.
581
- Set(rax, num_arguments);
582
- JumpToExternalReference(ext, result_size);
583
- }
584
-
585
-
586
- MaybeObject* MacroAssembler::TryTailCallExternalReference(
587
- const ExternalReference& ext, int num_arguments, int result_size) {
588
- // ----------- S t a t e -------------
589
- // -- rsp[0] : return address
590
- // -- rsp[8] : argument num_arguments - 1
591
- // ...
592
- // -- rsp[8 * num_arguments] : argument 0 (receiver)
593
- // -----------------------------------
594
-
595
- // TODO(1236192): Most runtime routines don't need the number of
596
- // arguments passed in because it is constant. At some point we
597
- // should remove this need and make the runtime routine entry code
598
- // smarter.
599
- Set(rax, num_arguments);
600
- return TryJumpToExternalReference(ext, result_size);
601
- }
602
-
603
-
604
- void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
605
- int num_arguments,
606
- int result_size) {
607
- TailCallExternalReference(ExternalReference(fid, isolate()),
608
- num_arguments,
609
- result_size);
610
- }
611
-
612
-
613
- MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
614
- int num_arguments,
615
- int result_size) {
616
- return TryTailCallExternalReference(ExternalReference(fid, isolate()),
617
- num_arguments,
618
- result_size);
619
- }
620
-
621
-
622
- static int Offset(ExternalReference ref0, ExternalReference ref1) {
623
- int64_t offset = (ref0.address() - ref1.address());
624
- // Check that fits into int.
625
- ASSERT(static_cast<int>(offset) == offset);
626
- return static_cast<int>(offset);
627
- }
628
-
629
-
630
- void MacroAssembler::PrepareCallApiFunction(int arg_stack_space) {
631
- #ifdef _WIN64
632
- // We need to prepare a slot for result handle on stack and put
633
- // a pointer to it into 1st arg register.
634
- EnterApiExitFrame(arg_stack_space + 1);
635
-
636
- // rcx must be used to pass the pointer to the return value slot.
637
- lea(rcx, StackSpaceOperand(arg_stack_space));
638
- #else
639
- EnterApiExitFrame(arg_stack_space);
640
- #endif
641
- }
642
-
643
-
644
- MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
645
- ApiFunction* function, int stack_space) {
646
- Label empty_result;
647
- Label prologue;
648
- Label promote_scheduled_exception;
649
- Label delete_allocated_handles;
650
- Label leave_exit_frame;
651
- Label write_back;
652
-
653
- ExternalReference next_address =
654
- ExternalReference::handle_scope_next_address();
655
- const int kNextOffset = 0;
656
- const int kLimitOffset = Offset(
657
- ExternalReference::handle_scope_limit_address(),
658
- next_address);
659
- const int kLevelOffset = Offset(
660
- ExternalReference::handle_scope_level_address(),
661
- next_address);
662
- ExternalReference scheduled_exception_address =
663
- ExternalReference::scheduled_exception_address(isolate());
664
-
665
- // Allocate HandleScope in callee-save registers.
666
- Register prev_next_address_reg = r14;
667
- Register prev_limit_reg = rbx;
668
- Register base_reg = r15;
669
- movq(base_reg, next_address);
670
- movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
671
- movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
672
- addl(Operand(base_reg, kLevelOffset), Immediate(1));
673
- // Call the api function!
674
- movq(rax,
675
- reinterpret_cast<int64_t>(function->address()),
676
- RelocInfo::RUNTIME_ENTRY);
677
- call(rax);
678
-
679
- #ifdef _WIN64
680
- // rax keeps a pointer to v8::Handle, unpack it.
681
- movq(rax, Operand(rax, 0));
682
- #endif
683
- // Check if the result handle holds 0.
684
- testq(rax, rax);
685
- j(zero, &empty_result);
686
- // It was non-zero. Dereference to get the result value.
687
- movq(rax, Operand(rax, 0));
688
- bind(&prologue);
689
-
690
- // No more valid handles (the result handle was the last one). Restore
691
- // previous handle scope.
692
- subl(Operand(base_reg, kLevelOffset), Immediate(1));
693
- movq(Operand(base_reg, kNextOffset), prev_next_address_reg);
694
- cmpq(prev_limit_reg, Operand(base_reg, kLimitOffset));
695
- j(not_equal, &delete_allocated_handles);
696
- bind(&leave_exit_frame);
697
-
698
- // Check if the function scheduled an exception.
699
- movq(rsi, scheduled_exception_address);
700
- Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
701
- j(not_equal, &promote_scheduled_exception);
702
-
703
- LeaveApiExitFrame();
704
- ret(stack_space * kPointerSize);
705
-
706
- bind(&promote_scheduled_exception);
707
- MaybeObject* result = TryTailCallRuntime(Runtime::kPromoteScheduledException,
708
- 0, 1);
709
- if (result->IsFailure()) {
710
- return result;
711
- }
712
-
713
- bind(&empty_result);
714
- // It was zero; the result is undefined.
715
- Move(rax, FACTORY->undefined_value());
716
- jmp(&prologue);
717
-
718
- // HandleScope limit has changed. Delete allocated extensions.
719
- bind(&delete_allocated_handles);
720
- movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
721
- movq(prev_limit_reg, rax);
722
- #ifdef _WIN64
723
- LoadAddress(rcx, ExternalReference::isolate_address());
724
- #else
725
- LoadAddress(rdi, ExternalReference::isolate_address());
726
- #endif
727
- LoadAddress(rax,
728
- ExternalReference::delete_handle_scope_extensions(isolate()));
729
- call(rax);
730
- movq(rax, prev_limit_reg);
731
- jmp(&leave_exit_frame);
732
-
733
- return result;
734
- }
735
-
736
-
737
- void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
738
- int result_size) {
739
- // Set the entry point and jump to the C entry runtime stub.
740
- LoadAddress(rbx, ext);
741
- CEntryStub ces(result_size);
742
- jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
743
- }
744
-
745
-
746
- MaybeObject* MacroAssembler::TryJumpToExternalReference(
747
- const ExternalReference& ext, int result_size) {
748
- // Set the entry point and jump to the C entry runtime stub.
749
- LoadAddress(rbx, ext);
750
- CEntryStub ces(result_size);
751
- return TryTailCallStub(&ces);
752
- }
753
-
754
-
755
- void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
756
- InvokeFlag flag,
757
- CallWrapper* call_wrapper) {
758
- // Calls are not allowed in some stubs.
759
- ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
760
-
761
- // Rely on the assertion to check that the number of provided
762
- // arguments match the expected number of arguments. Fake a
763
- // parameter count to avoid emitting code to do the check.
764
- ParameterCount expected(0);
765
- GetBuiltinEntry(rdx, id);
766
- InvokeCode(rdx, expected, expected, flag, call_wrapper);
767
- }
768
-
769
-
770
- void MacroAssembler::GetBuiltinFunction(Register target,
771
- Builtins::JavaScript id) {
772
- // Load the builtins object into target register.
773
- movq(target, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
774
- movq(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
775
- movq(target, FieldOperand(target,
776
- JSBuiltinsObject::OffsetOfFunctionWithId(id)));
777
- }
778
-
779
-
780
- void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
781
- ASSERT(!target.is(rdi));
782
- // Load the JavaScript builtin function from the builtins object.
783
- GetBuiltinFunction(rdi, id);
784
- movq(target, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
785
- }
786
-
787
-
788
- void MacroAssembler::Set(Register dst, int64_t x) {
789
- if (x == 0) {
790
- xorl(dst, dst);
791
- } else if (is_int32(x)) {
792
- movq(dst, Immediate(static_cast<int32_t>(x)));
793
- } else if (is_uint32(x)) {
794
- movl(dst, Immediate(static_cast<uint32_t>(x)));
795
- } else {
796
- movq(dst, x, RelocInfo::NONE);
797
- }
798
- }
799
-
800
- void MacroAssembler::Set(const Operand& dst, int64_t x) {
801
- if (is_int32(x)) {
802
- movq(dst, Immediate(static_cast<int32_t>(x)));
803
- } else {
804
- movq(kScratchRegister, x, RelocInfo::NONE);
805
- movq(dst, kScratchRegister);
806
- }
807
- }
808
-
809
- // ----------------------------------------------------------------------------
810
- // Smi tagging, untagging and tag detection.
811
-
812
- Register MacroAssembler::GetSmiConstant(Smi* source) {
813
- int value = source->value();
814
- if (value == 0) {
815
- xorl(kScratchRegister, kScratchRegister);
816
- return kScratchRegister;
817
- }
818
- if (value == 1) {
819
- return kSmiConstantRegister;
820
- }
821
- LoadSmiConstant(kScratchRegister, source);
822
- return kScratchRegister;
823
- }
824
-
825
- void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
826
- if (emit_debug_code()) {
827
- movq(dst,
828
- reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
829
- RelocInfo::NONE);
830
- cmpq(dst, kSmiConstantRegister);
831
- if (allow_stub_calls()) {
832
- Assert(equal, "Uninitialized kSmiConstantRegister");
833
- } else {
834
- NearLabel ok;
835
- j(equal, &ok);
836
- int3();
837
- bind(&ok);
838
- }
839
- }
840
- int value = source->value();
841
- if (value == 0) {
842
- xorl(dst, dst);
843
- return;
844
- }
845
- bool negative = value < 0;
846
- unsigned int uvalue = negative ? -value : value;
847
-
848
- switch (uvalue) {
849
- case 9:
850
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_8, 0));
851
- break;
852
- case 8:
853
- xorl(dst, dst);
854
- lea(dst, Operand(dst, kSmiConstantRegister, times_8, 0));
855
- break;
856
- case 4:
857
- xorl(dst, dst);
858
- lea(dst, Operand(dst, kSmiConstantRegister, times_4, 0));
859
- break;
860
- case 5:
861
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_4, 0));
862
- break;
863
- case 3:
864
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_2, 0));
865
- break;
866
- case 2:
867
- lea(dst, Operand(kSmiConstantRegister, kSmiConstantRegister, times_1, 0));
868
- break;
869
- case 1:
870
- movq(dst, kSmiConstantRegister);
871
- break;
872
- case 0:
873
- UNREACHABLE();
874
- return;
875
- default:
876
- movq(dst, reinterpret_cast<uint64_t>(source), RelocInfo::NONE);
877
- return;
878
- }
879
- if (negative) {
880
- neg(dst);
881
- }
882
- }
883
-
884
-
885
- void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
886
- ASSERT_EQ(0, kSmiTag);
887
- if (!dst.is(src)) {
888
- movl(dst, src);
889
- }
890
- shl(dst, Immediate(kSmiShift));
891
- }
892
-
893
-
894
- void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
895
- if (emit_debug_code()) {
896
- testb(dst, Immediate(0x01));
897
- NearLabel ok;
898
- j(zero, &ok);
899
- if (allow_stub_calls()) {
900
- Abort("Integer32ToSmiField writing to non-smi location");
901
- } else {
902
- int3();
903
- }
904
- bind(&ok);
905
- }
906
- ASSERT(kSmiShift % kBitsPerByte == 0);
907
- movl(Operand(dst, kSmiShift / kBitsPerByte), src);
908
- }
909
-
910
-
911
- void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
912
- Register src,
913
- int constant) {
914
- if (dst.is(src)) {
915
- addl(dst, Immediate(constant));
916
- } else {
917
- leal(dst, Operand(src, constant));
918
- }
919
- shl(dst, Immediate(kSmiShift));
920
- }
921
-
922
-
923
- void MacroAssembler::SmiToInteger32(Register dst, Register src) {
924
- ASSERT_EQ(0, kSmiTag);
925
- if (!dst.is(src)) {
926
- movq(dst, src);
927
- }
928
- shr(dst, Immediate(kSmiShift));
929
- }
930
-
931
-
932
- void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
933
- movl(dst, Operand(src, kSmiShift / kBitsPerByte));
934
- }
935
-
936
-
937
- void MacroAssembler::SmiToInteger64(Register dst, Register src) {
938
- ASSERT_EQ(0, kSmiTag);
939
- if (!dst.is(src)) {
940
- movq(dst, src);
941
- }
942
- sar(dst, Immediate(kSmiShift));
943
- }
944
-
945
-
946
- void MacroAssembler::SmiToInteger64(Register dst, const Operand& src) {
947
- movsxlq(dst, Operand(src, kSmiShift / kBitsPerByte));
948
- }
949
-
950
-
951
- void MacroAssembler::SmiTest(Register src) {
952
- testq(src, src);
953
- }
954
-
955
-
956
- void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
957
- if (emit_debug_code()) {
958
- AbortIfNotSmi(smi1);
959
- AbortIfNotSmi(smi2);
960
- }
961
- cmpq(smi1, smi2);
962
- }
963
-
964
-
965
- void MacroAssembler::SmiCompare(Register dst, Smi* src) {
966
- if (emit_debug_code()) {
967
- AbortIfNotSmi(dst);
968
- }
969
- Cmp(dst, src);
970
- }
971
-
972
-
973
- void MacroAssembler::Cmp(Register dst, Smi* src) {
974
- ASSERT(!dst.is(kScratchRegister));
975
- if (src->value() == 0) {
976
- testq(dst, dst);
977
- } else {
978
- Register constant_reg = GetSmiConstant(src);
979
- cmpq(dst, constant_reg);
980
- }
981
- }
982
-
983
-
984
- void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
985
- if (emit_debug_code()) {
986
- AbortIfNotSmi(dst);
987
- AbortIfNotSmi(src);
988
- }
989
- cmpq(dst, src);
990
- }
991
-
992
-
993
- void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
994
- if (emit_debug_code()) {
995
- AbortIfNotSmi(dst);
996
- AbortIfNotSmi(src);
997
- }
998
- cmpq(dst, src);
999
- }
1000
-
1001
-
1002
- void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
1003
- if (emit_debug_code()) {
1004
- AbortIfNotSmi(dst);
1005
- }
1006
- cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
1007
- }
1008
-
1009
-
1010
- void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1011
- // The Operand cannot use the smi register.
1012
- Register smi_reg = GetSmiConstant(src);
1013
- ASSERT(!dst.AddressUsesRegister(smi_reg));
1014
- cmpq(dst, smi_reg);
1015
- }
1016
-
1017
-
1018
- void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
1019
- cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
1020
- }
1021
-
1022
-
1023
- void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1024
- Register src,
1025
- int power) {
1026
- ASSERT(power >= 0);
1027
- ASSERT(power < 64);
1028
- if (power == 0) {
1029
- SmiToInteger64(dst, src);
1030
- return;
1031
- }
1032
- if (!dst.is(src)) {
1033
- movq(dst, src);
1034
- }
1035
- if (power < kSmiShift) {
1036
- sar(dst, Immediate(kSmiShift - power));
1037
- } else if (power > kSmiShift) {
1038
- shl(dst, Immediate(power - kSmiShift));
1039
- }
1040
- }
1041
-
1042
-
1043
- void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1044
- Register src,
1045
- int power) {
1046
- ASSERT((0 <= power) && (power < 32));
1047
- if (dst.is(src)) {
1048
- shr(dst, Immediate(power + kSmiShift));
1049
- } else {
1050
- UNIMPLEMENTED(); // Not used.
1051
- }
1052
- }
1053
-
1054
-
1055
- Condition MacroAssembler::CheckSmi(Register src) {
1056
- ASSERT_EQ(0, kSmiTag);
1057
- testb(src, Immediate(kSmiTagMask));
1058
- return zero;
1059
- }
1060
-
1061
-
1062
- Condition MacroAssembler::CheckSmi(const Operand& src) {
1063
- ASSERT_EQ(0, kSmiTag);
1064
- testb(src, Immediate(kSmiTagMask));
1065
- return zero;
1066
- }
1067
-
1068
-
1069
- Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1070
- ASSERT_EQ(0, kSmiTag);
1071
- // Test that both bits of the mask 0x8000000000000001 are zero.
1072
- movq(kScratchRegister, src);
1073
- rol(kScratchRegister, Immediate(1));
1074
- testb(kScratchRegister, Immediate(3));
1075
- return zero;
1076
- }
1077
-
1078
-
1079
- Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1080
- if (first.is(second)) {
1081
- return CheckSmi(first);
1082
- }
1083
- ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
1084
- leal(kScratchRegister, Operand(first, second, times_1, 0));
1085
- testb(kScratchRegister, Immediate(0x03));
1086
- return zero;
1087
- }
1088
-
1089
-
1090
- Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1091
- Register second) {
1092
- if (first.is(second)) {
1093
- return CheckNonNegativeSmi(first);
1094
- }
1095
- movq(kScratchRegister, first);
1096
- or_(kScratchRegister, second);
1097
- rol(kScratchRegister, Immediate(1));
1098
- testl(kScratchRegister, Immediate(3));
1099
- return zero;
1100
- }
1101
-
1102
-
1103
- Condition MacroAssembler::CheckEitherSmi(Register first,
1104
- Register second,
1105
- Register scratch) {
1106
- if (first.is(second)) {
1107
- return CheckSmi(first);
1108
- }
1109
- if (scratch.is(second)) {
1110
- andl(scratch, first);
1111
- } else {
1112
- if (!scratch.is(first)) {
1113
- movl(scratch, first);
1114
- }
1115
- andl(scratch, second);
1116
- }
1117
- testb(scratch, Immediate(kSmiTagMask));
1118
- return zero;
1119
- }
1120
-
1121
-
1122
- Condition MacroAssembler::CheckIsMinSmi(Register src) {
1123
- ASSERT(!src.is(kScratchRegister));
1124
- // If we overflow by subtracting one, it's the minimal smi value.
1125
- cmpq(src, kSmiConstantRegister);
1126
- return overflow;
1127
- }
1128
-
1129
-
1130
- Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1131
- // A 32-bit integer value can always be converted to a smi.
1132
- return always;
1133
- }
1134
-
1135
-
1136
- Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1137
- // An unsigned 32-bit integer value is valid as long as the high bit
1138
- // is not set.
1139
- testl(src, src);
1140
- return positive;
1141
- }
1142
-
1143
-
1144
- void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1145
- if (dst.is(src)) {
1146
- andl(dst, Immediate(kSmiTagMask));
1147
- } else {
1148
- movl(dst, Immediate(kSmiTagMask));
1149
- andl(dst, src);
1150
- }
1151
- }
1152
-
1153
-
1154
- void MacroAssembler::CheckSmiToIndicator(Register dst, const Operand& src) {
1155
- if (!(src.AddressUsesRegister(dst))) {
1156
- movl(dst, Immediate(kSmiTagMask));
1157
- andl(dst, src);
1158
- } else {
1159
- movl(dst, src);
1160
- andl(dst, Immediate(kSmiTagMask));
1161
- }
1162
- }
1163
-
1164
-
1165
- void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1166
- if (constant->value() == 0) {
1167
- if (!dst.is(src)) {
1168
- movq(dst, src);
1169
- }
1170
- return;
1171
- } else if (dst.is(src)) {
1172
- ASSERT(!dst.is(kScratchRegister));
1173
- switch (constant->value()) {
1174
- case 1:
1175
- addq(dst, kSmiConstantRegister);
1176
- return;
1177
- case 2:
1178
- lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1179
- return;
1180
- case 4:
1181
- lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1182
- return;
1183
- case 8:
1184
- lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1185
- return;
1186
- default:
1187
- Register constant_reg = GetSmiConstant(constant);
1188
- addq(dst, constant_reg);
1189
- return;
1190
- }
1191
- } else {
1192
- switch (constant->value()) {
1193
- case 1:
1194
- lea(dst, Operand(src, kSmiConstantRegister, times_1, 0));
1195
- return;
1196
- case 2:
1197
- lea(dst, Operand(src, kSmiConstantRegister, times_2, 0));
1198
- return;
1199
- case 4:
1200
- lea(dst, Operand(src, kSmiConstantRegister, times_4, 0));
1201
- return;
1202
- case 8:
1203
- lea(dst, Operand(src, kSmiConstantRegister, times_8, 0));
1204
- return;
1205
- default:
1206
- LoadSmiConstant(dst, constant);
1207
- addq(dst, src);
1208
- return;
1209
- }
1210
- }
1211
- }
1212
-
1213
-
1214
- void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) {
1215
- if (constant->value() != 0) {
1216
- addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value()));
1217
- }
1218
- }
1219
-
1220
-
1221
- void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1222
- if (constant->value() == 0) {
1223
- if (!dst.is(src)) {
1224
- movq(dst, src);
1225
- }
1226
- } else if (dst.is(src)) {
1227
- ASSERT(!dst.is(kScratchRegister));
1228
- Register constant_reg = GetSmiConstant(constant);
1229
- subq(dst, constant_reg);
1230
- } else {
1231
- if (constant->value() == Smi::kMinValue) {
1232
- LoadSmiConstant(dst, constant);
1233
- // Adding and subtracting the min-value gives the same result, it only
1234
- // differs on the overflow bit, which we don't check here.
1235
- addq(dst, src);
1236
- } else {
1237
- // Subtract by adding the negation.
1238
- LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1239
- addq(dst, src);
1240
- }
1241
- }
1242
- }
1243
-
1244
-
1245
- void MacroAssembler::SmiAdd(Register dst,
1246
- Register src1,
1247
- Register src2) {
1248
- // No overflow checking. Use only when it's known that
1249
- // overflowing is impossible.
1250
- ASSERT(!dst.is(src2));
1251
- if (!dst.is(src1)) {
1252
- movq(dst, src1);
1253
- }
1254
- addq(dst, src2);
1255
- Assert(no_overflow, "Smi addition overflow");
1256
- }
1257
-
1258
-
1259
- void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1260
- // No overflow checking. Use only when it's known that
1261
- // overflowing is impossible (e.g., subtracting two positive smis).
1262
- ASSERT(!dst.is(src2));
1263
- if (!dst.is(src1)) {
1264
- movq(dst, src1);
1265
- }
1266
- subq(dst, src2);
1267
- Assert(no_overflow, "Smi subtraction overflow");
1268
- }
1269
-
1270
-
1271
- void MacroAssembler::SmiSub(Register dst,
1272
- Register src1,
1273
- const Operand& src2) {
1274
- // No overflow checking. Use only when it's known that
1275
- // overflowing is impossible (e.g., subtracting two positive smis).
1276
- if (!dst.is(src1)) {
1277
- movq(dst, src1);
1278
- }
1279
- subq(dst, src2);
1280
- Assert(no_overflow, "Smi subtraction overflow");
1281
- }
1282
-
1283
-
1284
- void MacroAssembler::SmiNot(Register dst, Register src) {
1285
- ASSERT(!dst.is(kScratchRegister));
1286
- ASSERT(!src.is(kScratchRegister));
1287
- // Set tag and padding bits before negating, so that they are zero afterwards.
1288
- movl(kScratchRegister, Immediate(~0));
1289
- if (dst.is(src)) {
1290
- xor_(dst, kScratchRegister);
1291
- } else {
1292
- lea(dst, Operand(src, kScratchRegister, times_1, 0));
1293
- }
1294
- not_(dst);
1295
- }
1296
-
1297
-
1298
- void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
1299
- ASSERT(!dst.is(src2));
1300
- if (!dst.is(src1)) {
1301
- movq(dst, src1);
1302
- }
1303
- and_(dst, src2);
1304
- }
1305
-
1306
-
1307
- void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
1308
- if (constant->value() == 0) {
1309
- Set(dst, 0);
1310
- } else if (dst.is(src)) {
1311
- ASSERT(!dst.is(kScratchRegister));
1312
- Register constant_reg = GetSmiConstant(constant);
1313
- and_(dst, constant_reg);
1314
- } else {
1315
- LoadSmiConstant(dst, constant);
1316
- and_(dst, src);
1317
- }
1318
- }
1319
-
1320
-
1321
- void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
1322
- if (!dst.is(src1)) {
1323
- movq(dst, src1);
1324
- }
1325
- or_(dst, src2);
1326
- }
1327
-
1328
-
1329
- void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1330
- if (dst.is(src)) {
1331
- ASSERT(!dst.is(kScratchRegister));
1332
- Register constant_reg = GetSmiConstant(constant);
1333
- or_(dst, constant_reg);
1334
- } else {
1335
- LoadSmiConstant(dst, constant);
1336
- or_(dst, src);
1337
- }
1338
- }
1339
-
1340
-
1341
- void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1342
- if (!dst.is(src1)) {
1343
- movq(dst, src1);
1344
- }
1345
- xor_(dst, src2);
1346
- }
1347
-
1348
-
1349
- void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1350
- if (dst.is(src)) {
1351
- ASSERT(!dst.is(kScratchRegister));
1352
- Register constant_reg = GetSmiConstant(constant);
1353
- xor_(dst, constant_reg);
1354
- } else {
1355
- LoadSmiConstant(dst, constant);
1356
- xor_(dst, src);
1357
- }
1358
- }
1359
-
1360
-
1361
- void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1362
- Register src,
1363
- int shift_value) {
1364
- ASSERT(is_uint5(shift_value));
1365
- if (shift_value > 0) {
1366
- if (dst.is(src)) {
1367
- sar(dst, Immediate(shift_value + kSmiShift));
1368
- shl(dst, Immediate(kSmiShift));
1369
- } else {
1370
- UNIMPLEMENTED(); // Not used.
1371
- }
1372
- }
1373
- }
1374
-
1375
-
1376
- void MacroAssembler::SmiShiftLeftConstant(Register dst,
1377
- Register src,
1378
- int shift_value) {
1379
- if (!dst.is(src)) {
1380
- movq(dst, src);
1381
- }
1382
- if (shift_value > 0) {
1383
- shl(dst, Immediate(shift_value));
1384
- }
1385
- }
1386
-
1387
-
1388
- void MacroAssembler::SmiShiftLeft(Register dst,
1389
- Register src1,
1390
- Register src2) {
1391
- ASSERT(!dst.is(rcx));
1392
- NearLabel result_ok;
1393
- // Untag shift amount.
1394
- if (!dst.is(src1)) {
1395
- movq(dst, src1);
1396
- }
1397
- SmiToInteger32(rcx, src2);
1398
- // Shift amount specified by lower 5 bits, not six as the shl opcode.
1399
- and_(rcx, Immediate(0x1f));
1400
- shl_cl(dst);
1401
- }
1402
-
1403
-
1404
- void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1405
- Register src1,
1406
- Register src2) {
1407
- ASSERT(!dst.is(kScratchRegister));
1408
- ASSERT(!src1.is(kScratchRegister));
1409
- ASSERT(!src2.is(kScratchRegister));
1410
- ASSERT(!dst.is(rcx));
1411
- if (src1.is(rcx)) {
1412
- movq(kScratchRegister, src1);
1413
- } else if (src2.is(rcx)) {
1414
- movq(kScratchRegister, src2);
1415
- }
1416
- if (!dst.is(src1)) {
1417
- movq(dst, src1);
1418
- }
1419
- SmiToInteger32(rcx, src2);
1420
- orl(rcx, Immediate(kSmiShift));
1421
- sar_cl(dst); // Shift 32 + original rcx & 0x1f.
1422
- shl(dst, Immediate(kSmiShift));
1423
- if (src1.is(rcx)) {
1424
- movq(src1, kScratchRegister);
1425
- } else if (src2.is(rcx)) {
1426
- movq(src2, kScratchRegister);
1427
- }
1428
- }
1429
-
1430
-
1431
- SmiIndex MacroAssembler::SmiToIndex(Register dst,
1432
- Register src,
1433
- int shift) {
1434
- ASSERT(is_uint6(shift));
1435
- // There is a possible optimization if shift is in the range 60-63, but that
1436
- // will (and must) never happen.
1437
- if (!dst.is(src)) {
1438
- movq(dst, src);
1439
- }
1440
- if (shift < kSmiShift) {
1441
- sar(dst, Immediate(kSmiShift - shift));
1442
- } else {
1443
- shl(dst, Immediate(shift - kSmiShift));
1444
- }
1445
- return SmiIndex(dst, times_1);
1446
- }
1447
-
1448
- SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1449
- Register src,
1450
- int shift) {
1451
- // Register src holds a positive smi.
1452
- ASSERT(is_uint6(shift));
1453
- if (!dst.is(src)) {
1454
- movq(dst, src);
1455
- }
1456
- neg(dst);
1457
- if (shift < kSmiShift) {
1458
- sar(dst, Immediate(kSmiShift - shift));
1459
- } else {
1460
- shl(dst, Immediate(shift - kSmiShift));
1461
- }
1462
- return SmiIndex(dst, times_1);
1463
- }
1464
-
1465
-
1466
- void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
1467
- ASSERT_EQ(0, kSmiShift % kBitsPerByte);
1468
- addl(dst, Operand(src, kSmiShift / kBitsPerByte));
1469
- }
1470
-
1471
-
1472
-
1473
- void MacroAssembler::Move(Register dst, Register src) {
1474
- if (!dst.is(src)) {
1475
- movq(dst, src);
1476
- }
1477
- }
1478
-
1479
-
1480
- void MacroAssembler::Move(Register dst, Handle<Object> source) {
1481
- ASSERT(!source->IsFailure());
1482
- if (source->IsSmi()) {
1483
- Move(dst, Smi::cast(*source));
1484
- } else {
1485
- movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1486
- }
1487
- }
1488
-
1489
-
1490
- void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
1491
- ASSERT(!source->IsFailure());
1492
- if (source->IsSmi()) {
1493
- Move(dst, Smi::cast(*source));
1494
- } else {
1495
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1496
- movq(dst, kScratchRegister);
1497
- }
1498
- }
1499
-
1500
-
1501
- void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1502
- if (source->IsSmi()) {
1503
- Cmp(dst, Smi::cast(*source));
1504
- } else {
1505
- Move(kScratchRegister, source);
1506
- cmpq(dst, kScratchRegister);
1507
- }
1508
- }
1509
-
1510
-
1511
- void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1512
- if (source->IsSmi()) {
1513
- Cmp(dst, Smi::cast(*source));
1514
- } else {
1515
- ASSERT(source->IsHeapObject());
1516
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1517
- cmpq(dst, kScratchRegister);
1518
- }
1519
- }
1520
-
1521
-
1522
- void MacroAssembler::Push(Handle<Object> source) {
1523
- if (source->IsSmi()) {
1524
- Push(Smi::cast(*source));
1525
- } else {
1526
- ASSERT(source->IsHeapObject());
1527
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1528
- push(kScratchRegister);
1529
- }
1530
- }
1531
-
1532
-
1533
- void MacroAssembler::Push(Smi* source) {
1534
- intptr_t smi = reinterpret_cast<intptr_t>(source);
1535
- if (is_int32(smi)) {
1536
- push(Immediate(static_cast<int32_t>(smi)));
1537
- } else {
1538
- Register constant = GetSmiConstant(source);
1539
- push(constant);
1540
- }
1541
- }
1542
-
1543
-
1544
- void MacroAssembler::Drop(int stack_elements) {
1545
- if (stack_elements > 0) {
1546
- addq(rsp, Immediate(stack_elements * kPointerSize));
1547
- }
1548
- }
1549
-
1550
-
1551
- void MacroAssembler::Test(const Operand& src, Smi* source) {
1552
- testl(Operand(src, kIntSize), Immediate(source->value()));
1553
- }
1554
-
1555
-
1556
- void MacroAssembler::Jump(ExternalReference ext) {
1557
- LoadAddress(kScratchRegister, ext);
1558
- jmp(kScratchRegister);
1559
- }
1560
-
1561
-
1562
- void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1563
- movq(kScratchRegister, destination, rmode);
1564
- jmp(kScratchRegister);
1565
- }
1566
-
1567
-
1568
- void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1569
- // TODO(X64): Inline this
1570
- jmp(code_object, rmode);
1571
- }
1572
-
1573
-
1574
- int MacroAssembler::CallSize(ExternalReference ext) {
1575
- // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
1576
- const int kCallInstructionSize = 3;
1577
- return LoadAddressSize(ext) + kCallInstructionSize;
1578
- }
1579
-
1580
-
1581
- void MacroAssembler::Call(ExternalReference ext) {
1582
- #ifdef DEBUG
1583
- int end_position = pc_offset() + CallSize(ext);
1584
- #endif
1585
- LoadAddress(kScratchRegister, ext);
1586
- call(kScratchRegister);
1587
- #ifdef DEBUG
1588
- CHECK_EQ(end_position, pc_offset());
1589
- #endif
1590
- }
1591
-
1592
-
1593
- void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1594
- #ifdef DEBUG
1595
- int end_position = pc_offset() + CallSize(destination, rmode);
1596
- #endif
1597
- movq(kScratchRegister, destination, rmode);
1598
- call(kScratchRegister);
1599
- #ifdef DEBUG
1600
- CHECK_EQ(pc_offset(), end_position);
1601
- #endif
1602
- }
1603
-
1604
-
1605
- void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1606
- #ifdef DEBUG
1607
- int end_position = pc_offset() + CallSize(code_object);
1608
- #endif
1609
- ASSERT(RelocInfo::IsCodeTarget(rmode));
1610
- call(code_object, rmode);
1611
- #ifdef DEBUG
1612
- CHECK_EQ(end_position, pc_offset());
1613
- #endif
1614
- }
1615
-
1616
-
1617
- void MacroAssembler::Pushad() {
1618
- push(rax);
1619
- push(rcx);
1620
- push(rdx);
1621
- push(rbx);
1622
- // Not pushing rsp or rbp.
1623
- push(rsi);
1624
- push(rdi);
1625
- push(r8);
1626
- push(r9);
1627
- // r10 is kScratchRegister.
1628
- push(r11);
1629
- // r12 is kSmiConstantRegister.
1630
- // r13 is kRootRegister.
1631
- push(r14);
1632
- push(r15);
1633
- STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
1634
- // Use lea for symmetry with Popad.
1635
- int sp_delta =
1636
- (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1637
- lea(rsp, Operand(rsp, -sp_delta));
1638
- }
1639
-
1640
-
1641
- void MacroAssembler::Popad() {
1642
- // Popad must not change the flags, so use lea instead of addq.
1643
- int sp_delta =
1644
- (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1645
- lea(rsp, Operand(rsp, sp_delta));
1646
- pop(r15);
1647
- pop(r14);
1648
- pop(r11);
1649
- pop(r9);
1650
- pop(r8);
1651
- pop(rdi);
1652
- pop(rsi);
1653
- pop(rbx);
1654
- pop(rdx);
1655
- pop(rcx);
1656
- pop(rax);
1657
- }
1658
-
1659
-
1660
- void MacroAssembler::Dropad() {
1661
- addq(rsp, Immediate(kNumSafepointRegisters * kPointerSize));
1662
- }
1663
-
1664
-
1665
- // Order general registers are pushed by Pushad:
1666
- // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
1667
- int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1668
- 0,
1669
- 1,
1670
- 2,
1671
- 3,
1672
- -1,
1673
- -1,
1674
- 4,
1675
- 5,
1676
- 6,
1677
- 7,
1678
- -1,
1679
- 8,
1680
- -1,
1681
- -1,
1682
- 9,
1683
- 10
1684
- };
1685
-
1686
-
1687
- void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1688
- movq(SafepointRegisterSlot(dst), src);
1689
- }
1690
-
1691
-
1692
- void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1693
- movq(dst, SafepointRegisterSlot(src));
1694
- }
1695
-
1696
-
1697
- Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1698
- return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1699
- }
1700
-
1701
-
1702
- void MacroAssembler::PushTryHandler(CodeLocation try_location,
1703
- HandlerType type) {
1704
- // Adjust this code if not the case.
1705
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1706
-
1707
- // The pc (return address) is already on TOS. This code pushes state,
1708
- // frame pointer and current handler. Check that they are expected
1709
- // next on the stack, in that order.
1710
- ASSERT_EQ(StackHandlerConstants::kStateOffset,
1711
- StackHandlerConstants::kPCOffset - kPointerSize);
1712
- ASSERT_EQ(StackHandlerConstants::kFPOffset,
1713
- StackHandlerConstants::kStateOffset - kPointerSize);
1714
- ASSERT_EQ(StackHandlerConstants::kNextOffset,
1715
- StackHandlerConstants::kFPOffset - kPointerSize);
1716
-
1717
- if (try_location == IN_JAVASCRIPT) {
1718
- if (type == TRY_CATCH_HANDLER) {
1719
- push(Immediate(StackHandler::TRY_CATCH));
1720
- } else {
1721
- push(Immediate(StackHandler::TRY_FINALLY));
1722
- }
1723
- push(rbp);
1724
- } else {
1725
- ASSERT(try_location == IN_JS_ENTRY);
1726
- // The frame pointer does not point to a JS frame so we save NULL
1727
- // for rbp. We expect the code throwing an exception to check rbp
1728
- // before dereferencing it to restore the context.
1729
- push(Immediate(StackHandler::ENTRY));
1730
- push(Immediate(0)); // NULL frame pointer.
1731
- }
1732
- // Save the current handler.
1733
- Operand handler_operand =
1734
- ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1735
- push(handler_operand);
1736
- // Link this handler.
1737
- movq(handler_operand, rsp);
1738
- }
1739
-
1740
-
1741
- void MacroAssembler::PopTryHandler() {
1742
- ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1743
- // Unlink this handler.
1744
- Operand handler_operand =
1745
- ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1746
- pop(handler_operand);
1747
- // Remove the remaining fields.
1748
- addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1749
- }
1750
-
1751
-
1752
- void MacroAssembler::Throw(Register value) {
1753
- // Check that stack should contain next handler, frame pointer, state and
1754
- // return address in that order.
1755
- STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1756
- StackHandlerConstants::kStateOffset);
1757
- STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1758
- StackHandlerConstants::kPCOffset);
1759
- // Keep thrown value in rax.
1760
- if (!value.is(rax)) {
1761
- movq(rax, value);
1762
- }
1763
-
1764
- ExternalReference handler_address(Isolate::k_handler_address, isolate());
1765
- Operand handler_operand = ExternalOperand(handler_address);
1766
- movq(rsp, handler_operand);
1767
- // get next in chain
1768
- pop(handler_operand);
1769
- pop(rbp); // pop frame pointer
1770
- pop(rdx); // remove state
1771
-
1772
- // Before returning we restore the context from the frame pointer if not NULL.
1773
- // The frame pointer is NULL in the exception handler of a JS entry frame.
1774
- Set(rsi, 0); // Tentatively set context pointer to NULL
1775
- NearLabel skip;
1776
- cmpq(rbp, Immediate(0));
1777
- j(equal, &skip);
1778
- movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1779
- bind(&skip);
1780
- ret(0);
1781
- }
1782
-
1783
-
1784
- void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1785
- Register value) {
1786
- // Keep thrown value in rax.
1787
- if (!value.is(rax)) {
1788
- movq(rax, value);
1789
- }
1790
- // Fetch top stack handler.
1791
- ExternalReference handler_address(Isolate::k_handler_address, isolate());
1792
- Load(rsp, handler_address);
1793
-
1794
- // Unwind the handlers until the ENTRY handler is found.
1795
- NearLabel loop, done;
1796
- bind(&loop);
1797
- // Load the type of the current stack handler.
1798
- const int kStateOffset = StackHandlerConstants::kStateOffset;
1799
- cmpq(Operand(rsp, kStateOffset), Immediate(StackHandler::ENTRY));
1800
- j(equal, &done);
1801
- // Fetch the next handler in the list.
1802
- const int kNextOffset = StackHandlerConstants::kNextOffset;
1803
- movq(rsp, Operand(rsp, kNextOffset));
1804
- jmp(&loop);
1805
- bind(&done);
1806
-
1807
- // Set the top handler address to next handler past the current ENTRY handler.
1808
- Operand handler_operand = ExternalOperand(handler_address);
1809
- pop(handler_operand);
1810
-
1811
- if (type == OUT_OF_MEMORY) {
1812
- // Set external caught exception to false.
1813
- ExternalReference external_caught(
1814
- Isolate::k_external_caught_exception_address, isolate());
1815
- movq(rax, Immediate(false));
1816
- Store(external_caught, rax);
1817
-
1818
- // Set pending exception and rax to out of memory exception.
1819
- ExternalReference pending_exception(Isolate::k_pending_exception_address,
1820
- isolate());
1821
- movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
1822
- Store(pending_exception, rax);
1823
- }
1824
-
1825
- // Clear the context pointer.
1826
- Set(rsi, 0);
1827
-
1828
- // Restore registers from handler.
1829
- STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
1830
- StackHandlerConstants::kFPOffset);
1831
- pop(rbp); // FP
1832
- STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1833
- StackHandlerConstants::kStateOffset);
1834
- pop(rdx); // State
1835
-
1836
- STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1837
- StackHandlerConstants::kPCOffset);
1838
- ret(0);
1839
- }
1840
-
1841
-
1842
- void MacroAssembler::Ret() {
1843
- ret(0);
1844
- }
1845
-
1846
-
1847
- void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
1848
- if (is_uint16(bytes_dropped)) {
1849
- ret(bytes_dropped);
1850
- } else {
1851
- pop(scratch);
1852
- addq(rsp, Immediate(bytes_dropped));
1853
- push(scratch);
1854
- ret(0);
1855
- }
1856
- }
1857
-
1858
-
1859
- void MacroAssembler::FCmp() {
1860
- fucomip();
1861
- fstp(0);
1862
- }
1863
-
1864
-
1865
- void MacroAssembler::CmpObjectType(Register heap_object,
1866
- InstanceType type,
1867
- Register map) {
1868
- movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1869
- CmpInstanceType(map, type);
1870
- }
1871
-
1872
-
1873
- void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1874
- cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1875
- Immediate(static_cast<int8_t>(type)));
1876
- }
1877
-
1878
-
1879
- void MacroAssembler::CheckMap(Register obj,
1880
- Handle<Map> map,
1881
- Label* fail,
1882
- bool is_heap_object) {
1883
- if (!is_heap_object) {
1884
- JumpIfSmi(obj, fail);
1885
- }
1886
- Cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
1887
- j(not_equal, fail);
1888
- }
1889
-
1890
-
1891
- void MacroAssembler::AbortIfNotNumber(Register object) {
1892
- NearLabel ok;
1893
- Condition is_smi = CheckSmi(object);
1894
- j(is_smi, &ok);
1895
- Cmp(FieldOperand(object, HeapObject::kMapOffset),
1896
- FACTORY->heap_number_map());
1897
- Assert(equal, "Operand not a number");
1898
- bind(&ok);
1899
- }
1900
-
1901
-
1902
- void MacroAssembler::AbortIfSmi(Register object) {
1903
- NearLabel ok;
1904
- Condition is_smi = CheckSmi(object);
1905
- Assert(NegateCondition(is_smi), "Operand is a smi");
1906
- }
1907
-
1908
-
1909
- void MacroAssembler::AbortIfNotSmi(Register object) {
1910
- Condition is_smi = CheckSmi(object);
1911
- Assert(is_smi, "Operand is not a smi");
1912
- }
1913
-
1914
-
1915
- void MacroAssembler::AbortIfNotSmi(const Operand& object) {
1916
- Condition is_smi = CheckSmi(object);
1917
- Assert(is_smi, "Operand is not a smi");
1918
- }
1919
-
1920
-
1921
- void MacroAssembler::AbortIfNotString(Register object) {
1922
- testb(object, Immediate(kSmiTagMask));
1923
- Assert(not_equal, "Operand is not a string");
1924
- push(object);
1925
- movq(object, FieldOperand(object, HeapObject::kMapOffset));
1926
- CmpInstanceType(object, FIRST_NONSTRING_TYPE);
1927
- pop(object);
1928
- Assert(below, "Operand is not a string");
1929
- }
1930
-
1931
-
1932
- void MacroAssembler::AbortIfNotRootValue(Register src,
1933
- Heap::RootListIndex root_value_index,
1934
- const char* message) {
1935
- ASSERT(!src.is(kScratchRegister));
1936
- LoadRoot(kScratchRegister, root_value_index);
1937
- cmpq(src, kScratchRegister);
1938
- Check(equal, message);
1939
- }
1940
-
1941
-
1942
-
1943
- Condition MacroAssembler::IsObjectStringType(Register heap_object,
1944
- Register map,
1945
- Register instance_type) {
1946
- movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1947
- movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
1948
- ASSERT(kNotStringTag != 0);
1949
- testb(instance_type, Immediate(kIsNotStringMask));
1950
- return zero;
1951
- }
1952
-
1953
-
1954
- void MacroAssembler::TryGetFunctionPrototype(Register function,
1955
- Register result,
1956
- Label* miss) {
1957
- // Check that the receiver isn't a smi.
1958
- testl(function, Immediate(kSmiTagMask));
1959
- j(zero, miss);
1960
-
1961
- // Check that the function really is a function.
1962
- CmpObjectType(function, JS_FUNCTION_TYPE, result);
1963
- j(not_equal, miss);
1964
-
1965
- // Make sure that the function has an instance prototype.
1966
- NearLabel non_instance;
1967
- testb(FieldOperand(result, Map::kBitFieldOffset),
1968
- Immediate(1 << Map::kHasNonInstancePrototype));
1969
- j(not_zero, &non_instance);
1970
-
1971
- // Get the prototype or initial map from the function.
1972
- movq(result,
1973
- FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1974
-
1975
- // If the prototype or initial map is the hole, don't return it and
1976
- // simply miss the cache instead. This will allow us to allocate a
1977
- // prototype object on-demand in the runtime system.
1978
- CompareRoot(result, Heap::kTheHoleValueRootIndex);
1979
- j(equal, miss);
1980
-
1981
- // If the function does not have an initial map, we're done.
1982
- NearLabel done;
1983
- CmpObjectType(result, MAP_TYPE, kScratchRegister);
1984
- j(not_equal, &done);
1985
-
1986
- // Get the prototype from the initial map.
1987
- movq(result, FieldOperand(result, Map::kPrototypeOffset));
1988
- jmp(&done);
1989
-
1990
- // Non-instance prototype: Fetch prototype from constructor field
1991
- // in initial map.
1992
- bind(&non_instance);
1993
- movq(result, FieldOperand(result, Map::kConstructorOffset));
1994
-
1995
- // All done.
1996
- bind(&done);
1997
- }
1998
-
1999
-
2000
- void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2001
- if (FLAG_native_code_counters && counter->Enabled()) {
2002
- Operand counter_operand = ExternalOperand(ExternalReference(counter));
2003
- movq(counter_operand, Immediate(value));
2004
- }
2005
- }
2006
-
2007
-
2008
- void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2009
- ASSERT(value > 0);
2010
- if (FLAG_native_code_counters && counter->Enabled()) {
2011
- Operand counter_operand = ExternalOperand(ExternalReference(counter));
2012
- if (value == 1) {
2013
- incl(counter_operand);
2014
- } else {
2015
- addl(counter_operand, Immediate(value));
2016
- }
2017
- }
2018
- }
2019
-
2020
-
2021
- void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2022
- ASSERT(value > 0);
2023
- if (FLAG_native_code_counters && counter->Enabled()) {
2024
- Operand counter_operand = ExternalOperand(ExternalReference(counter));
2025
- if (value == 1) {
2026
- decl(counter_operand);
2027
- } else {
2028
- subl(counter_operand, Immediate(value));
2029
- }
2030
- }
2031
- }
2032
-
2033
-
2034
- #ifdef ENABLE_DEBUGGER_SUPPORT
2035
- void MacroAssembler::DebugBreak() {
2036
- ASSERT(allow_stub_calls());
2037
- Set(rax, 0); // No arguments.
2038
- LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
2039
- CEntryStub ces(1);
2040
- Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
2041
- }
2042
- #endif // ENABLE_DEBUGGER_SUPPORT
2043
-
2044
-
2045
- void MacroAssembler::InvokeCode(Register code,
2046
- const ParameterCount& expected,
2047
- const ParameterCount& actual,
2048
- InvokeFlag flag,
2049
- CallWrapper* call_wrapper) {
2050
- NearLabel done;
2051
- InvokePrologue(expected,
2052
- actual,
2053
- Handle<Code>::null(),
2054
- code,
2055
- &done,
2056
- flag,
2057
- call_wrapper);
2058
- if (flag == CALL_FUNCTION) {
2059
- if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
2060
- call(code);
2061
- if (call_wrapper != NULL) call_wrapper->AfterCall();
2062
- } else {
2063
- ASSERT(flag == JUMP_FUNCTION);
2064
- jmp(code);
2065
- }
2066
- bind(&done);
2067
- }
2068
-
2069
-
2070
- void MacroAssembler::InvokeCode(Handle<Code> code,
2071
- const ParameterCount& expected,
2072
- const ParameterCount& actual,
2073
- RelocInfo::Mode rmode,
2074
- InvokeFlag flag,
2075
- CallWrapper* call_wrapper) {
2076
- NearLabel done;
2077
- Register dummy = rax;
2078
- InvokePrologue(expected,
2079
- actual,
2080
- code,
2081
- dummy,
2082
- &done,
2083
- flag,
2084
- call_wrapper);
2085
- if (flag == CALL_FUNCTION) {
2086
- if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
2087
- Call(code, rmode);
2088
- if (call_wrapper != NULL) call_wrapper->AfterCall();
2089
- } else {
2090
- ASSERT(flag == JUMP_FUNCTION);
2091
- Jump(code, rmode);
2092
- }
2093
- bind(&done);
2094
- }
2095
-
2096
-
2097
- void MacroAssembler::InvokeFunction(Register function,
2098
- const ParameterCount& actual,
2099
- InvokeFlag flag,
2100
- CallWrapper* call_wrapper) {
2101
- ASSERT(function.is(rdi));
2102
- movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2103
- movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
2104
- movsxlq(rbx,
2105
- FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
2106
- // Advances rdx to the end of the Code object header, to the start of
2107
- // the executable code.
2108
- movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2109
-
2110
- ParameterCount expected(rbx);
2111
- InvokeCode(rdx, expected, actual, flag, call_wrapper);
2112
- }
2113
-
2114
-
2115
- void MacroAssembler::InvokeFunction(JSFunction* function,
2116
- const ParameterCount& actual,
2117
- InvokeFlag flag,
2118
- CallWrapper* call_wrapper) {
2119
- ASSERT(function->is_compiled());
2120
- // Get the function and setup the context.
2121
- Move(rdi, Handle<JSFunction>(function));
2122
- movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2123
-
2124
- if (V8::UseCrankshaft()) {
2125
- // Since Crankshaft can recompile a function, we need to load
2126
- // the Code object every time we call the function.
2127
- movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2128
- ParameterCount expected(function->shared()->formal_parameter_count());
2129
- InvokeCode(rdx, expected, actual, flag, call_wrapper);
2130
- } else {
2131
- // Invoke the cached code.
2132
- Handle<Code> code(function->code());
2133
- ParameterCount expected(function->shared()->formal_parameter_count());
2134
- InvokeCode(code,
2135
- expected,
2136
- actual,
2137
- RelocInfo::CODE_TARGET,
2138
- flag,
2139
- call_wrapper);
2140
- }
2141
- }
2142
-
2143
-
2144
- void MacroAssembler::EnterFrame(StackFrame::Type type) {
2145
- push(rbp);
2146
- movq(rbp, rsp);
2147
- push(rsi); // Context.
2148
- Push(Smi::FromInt(type));
2149
- movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2150
- push(kScratchRegister);
2151
- if (emit_debug_code()) {
2152
- movq(kScratchRegister,
2153
- FACTORY->undefined_value(),
2154
- RelocInfo::EMBEDDED_OBJECT);
2155
- cmpq(Operand(rsp, 0), kScratchRegister);
2156
- Check(not_equal, "code object not properly patched");
2157
- }
2158
- }
2159
-
2160
-
2161
- void MacroAssembler::LeaveFrame(StackFrame::Type type) {
2162
- if (emit_debug_code()) {
2163
- Move(kScratchRegister, Smi::FromInt(type));
2164
- cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
2165
- Check(equal, "stack frame types must match");
2166
- }
2167
- movq(rsp, rbp);
2168
- pop(rbp);
2169
- }
2170
-
2171
-
2172
- void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
2173
- // Setup the frame structure on the stack.
2174
- // All constants are relative to the frame pointer of the exit frame.
2175
- ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
2176
- ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
2177
- ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
2178
- push(rbp);
2179
- movq(rbp, rsp);
2180
-
2181
- // Reserve room for entry stack pointer and push the code object.
2182
- ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
2183
- push(Immediate(0)); // Saved entry sp, patched before call.
2184
- movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
2185
- push(kScratchRegister); // Accessed from EditFrame::code_slot.
2186
-
2187
- // Save the frame pointer and the context in top.
2188
- if (save_rax) {
2189
- movq(r14, rax); // Backup rax in callee-save register.
2190
- }
2191
-
2192
- Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
2193
- Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
2194
- }
2195
-
2196
-
2197
- void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2198
- bool save_doubles) {
2199
- #ifdef _WIN64
2200
- const int kShadowSpace = 4;
2201
- arg_stack_space += kShadowSpace;
2202
- #endif
2203
- // Optionally save all XMM registers.
2204
- if (save_doubles) {
2205
- int space = XMMRegister::kNumRegisters * kDoubleSize +
2206
- arg_stack_space * kPointerSize;
2207
- subq(rsp, Immediate(space));
2208
- int offset = -2 * kPointerSize;
2209
- for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2210
- XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2211
- movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
2212
- }
2213
- } else if (arg_stack_space > 0) {
2214
- subq(rsp, Immediate(arg_stack_space * kPointerSize));
2215
- }
2216
-
2217
- // Get the required frame alignment for the OS.
2218
- const int kFrameAlignment = OS::ActivationFrameAlignment();
2219
- if (kFrameAlignment > 0) {
2220
- ASSERT(IsPowerOf2(kFrameAlignment));
2221
- movq(kScratchRegister, Immediate(-kFrameAlignment));
2222
- and_(rsp, kScratchRegister);
2223
- }
2224
-
2225
- // Patch the saved entry sp.
2226
- movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
2227
- }
2228
-
2229
-
2230
- void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
2231
- EnterExitFramePrologue(true);
2232
-
2233
- // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2234
- // so it must be retained across the C-call.
2235
- int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2236
- lea(r15, Operand(rbp, r14, times_pointer_size, offset));
2237
-
2238
- EnterExitFrameEpilogue(arg_stack_space, save_doubles);
2239
- }
2240
-
2241
-
2242
- void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
2243
- EnterExitFramePrologue(false);
2244
- EnterExitFrameEpilogue(arg_stack_space, false);
2245
- }
2246
-
2247
-
2248
- void MacroAssembler::LeaveExitFrame(bool save_doubles) {
2249
- // Registers:
2250
- // r15 : argv
2251
- if (save_doubles) {
2252
- int offset = -2 * kPointerSize;
2253
- for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
2254
- XMMRegister reg = XMMRegister::FromAllocationIndex(i);
2255
- movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
2256
- }
2257
- }
2258
- // Get the return address from the stack and restore the frame pointer.
2259
- movq(rcx, Operand(rbp, 1 * kPointerSize));
2260
- movq(rbp, Operand(rbp, 0 * kPointerSize));
2261
-
2262
- // Drop everything up to and including the arguments and the receiver
2263
- // from the caller stack.
2264
- lea(rsp, Operand(r15, 1 * kPointerSize));
2265
-
2266
- // Push the return address to get ready to return.
2267
- push(rcx);
2268
-
2269
- LeaveExitFrameEpilogue();
2270
- }
2271
-
2272
-
2273
- void MacroAssembler::LeaveApiExitFrame() {
2274
- movq(rsp, rbp);
2275
- pop(rbp);
2276
-
2277
- LeaveExitFrameEpilogue();
2278
- }
2279
-
2280
-
2281
- void MacroAssembler::LeaveExitFrameEpilogue() {
2282
- // Restore current context from top and clear it in debug mode.
2283
- ExternalReference context_address(Isolate::k_context_address, isolate());
2284
- Operand context_operand = ExternalOperand(context_address);
2285
- movq(rsi, context_operand);
2286
- #ifdef DEBUG
2287
- movq(context_operand, Immediate(0));
2288
- #endif
2289
-
2290
- // Clear the top frame.
2291
- ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
2292
- isolate());
2293
- Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
2294
- movq(c_entry_fp_operand, Immediate(0));
2295
- }
2296
-
2297
-
2298
- void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2299
- Register scratch,
2300
- Label* miss) {
2301
- Label same_contexts;
2302
-
2303
- ASSERT(!holder_reg.is(scratch));
2304
- ASSERT(!scratch.is(kScratchRegister));
2305
- // Load current lexical context from the stack frame.
2306
- movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2307
-
2308
- // When generating debug code, make sure the lexical context is set.
2309
- if (emit_debug_code()) {
2310
- cmpq(scratch, Immediate(0));
2311
- Check(not_equal, "we should not have an empty lexical context");
2312
- }
2313
- // Load the global context of the current context.
2314
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2315
- movq(scratch, FieldOperand(scratch, offset));
2316
- movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2317
-
2318
- // Check the context is a global context.
2319
- if (emit_debug_code()) {
2320
- Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2321
- FACTORY->global_context_map());
2322
- Check(equal, "JSGlobalObject::global_context should be a global context.");
2323
- }
2324
-
2325
- // Check if both contexts are the same.
2326
- cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2327
- j(equal, &same_contexts);
2328
-
2329
- // Compare security tokens.
2330
- // Check that the security token in the calling global object is
2331
- // compatible with the security token in the receiving global
2332
- // object.
2333
-
2334
- // Check the context is a global context.
2335
- if (emit_debug_code()) {
2336
- // Preserve original value of holder_reg.
2337
- push(holder_reg);
2338
- movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2339
- CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2340
- Check(not_equal, "JSGlobalProxy::context() should not be null.");
2341
-
2342
- // Read the first word and compare to global_context_map(),
2343
- movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2344
- CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2345
- Check(equal, "JSGlobalObject::global_context should be a global context.");
2346
- pop(holder_reg);
2347
- }
2348
-
2349
- movq(kScratchRegister,
2350
- FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2351
- int token_offset =
2352
- Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
2353
- movq(scratch, FieldOperand(scratch, token_offset));
2354
- cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2355
- j(not_equal, miss);
2356
-
2357
- bind(&same_contexts);
2358
- }
2359
-
2360
-
2361
- void MacroAssembler::LoadAllocationTopHelper(Register result,
2362
- Register scratch,
2363
- AllocationFlags flags) {
2364
- ExternalReference new_space_allocation_top =
2365
- ExternalReference::new_space_allocation_top_address(isolate());
2366
-
2367
- // Just return if allocation top is already known.
2368
- if ((flags & RESULT_CONTAINS_TOP) != 0) {
2369
- // No use of scratch if allocation top is provided.
2370
- ASSERT(!scratch.is_valid());
2371
- #ifdef DEBUG
2372
- // Assert that result actually contains top on entry.
2373
- Operand top_operand = ExternalOperand(new_space_allocation_top);
2374
- cmpq(result, top_operand);
2375
- Check(equal, "Unexpected allocation top");
2376
- #endif
2377
- return;
2378
- }
2379
-
2380
- // Move address of new object to result. Use scratch register if available,
2381
- // and keep address in scratch until call to UpdateAllocationTopHelper.
2382
- if (scratch.is_valid()) {
2383
- LoadAddress(scratch, new_space_allocation_top);
2384
- movq(result, Operand(scratch, 0));
2385
- } else {
2386
- Load(result, new_space_allocation_top);
2387
- }
2388
- }
2389
-
2390
-
2391
- void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2392
- Register scratch) {
2393
- if (emit_debug_code()) {
2394
- testq(result_end, Immediate(kObjectAlignmentMask));
2395
- Check(zero, "Unaligned allocation in new space");
2396
- }
2397
-
2398
- ExternalReference new_space_allocation_top =
2399
- ExternalReference::new_space_allocation_top_address(isolate());
2400
-
2401
- // Update new top.
2402
- if (scratch.is_valid()) {
2403
- // Scratch already contains address of allocation top.
2404
- movq(Operand(scratch, 0), result_end);
2405
- } else {
2406
- Store(new_space_allocation_top, result_end);
2407
- }
2408
- }
2409
-
2410
-
2411
- void MacroAssembler::AllocateInNewSpace(int object_size,
2412
- Register result,
2413
- Register result_end,
2414
- Register scratch,
2415
- Label* gc_required,
2416
- AllocationFlags flags) {
2417
- if (!FLAG_inline_new) {
2418
- if (emit_debug_code()) {
2419
- // Trash the registers to simulate an allocation failure.
2420
- movl(result, Immediate(0x7091));
2421
- if (result_end.is_valid()) {
2422
- movl(result_end, Immediate(0x7191));
2423
- }
2424
- if (scratch.is_valid()) {
2425
- movl(scratch, Immediate(0x7291));
2426
- }
2427
- }
2428
- jmp(gc_required);
2429
- return;
2430
- }
2431
- ASSERT(!result.is(result_end));
2432
-
2433
- // Load address of new object into result.
2434
- LoadAllocationTopHelper(result, scratch, flags);
2435
-
2436
- // Calculate new top and bail out if new space is exhausted.
2437
- ExternalReference new_space_allocation_limit =
2438
- ExternalReference::new_space_allocation_limit_address(isolate());
2439
-
2440
- Register top_reg = result_end.is_valid() ? result_end : result;
2441
-
2442
- if (!top_reg.is(result)) {
2443
- movq(top_reg, result);
2444
- }
2445
- addq(top_reg, Immediate(object_size));
2446
- j(carry, gc_required);
2447
- Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2448
- cmpq(top_reg, limit_operand);
2449
- j(above, gc_required);
2450
-
2451
- // Update allocation top.
2452
- UpdateAllocationTopHelper(top_reg, scratch);
2453
-
2454
- if (top_reg.is(result)) {
2455
- if ((flags & TAG_OBJECT) != 0) {
2456
- subq(result, Immediate(object_size - kHeapObjectTag));
2457
- } else {
2458
- subq(result, Immediate(object_size));
2459
- }
2460
- } else if ((flags & TAG_OBJECT) != 0) {
2461
- // Tag the result if requested.
2462
- addq(result, Immediate(kHeapObjectTag));
2463
- }
2464
- }
2465
-
2466
-
2467
- void MacroAssembler::AllocateInNewSpace(int header_size,
2468
- ScaleFactor element_size,
2469
- Register element_count,
2470
- Register result,
2471
- Register result_end,
2472
- Register scratch,
2473
- Label* gc_required,
2474
- AllocationFlags flags) {
2475
- if (!FLAG_inline_new) {
2476
- if (emit_debug_code()) {
2477
- // Trash the registers to simulate an allocation failure.
2478
- movl(result, Immediate(0x7091));
2479
- movl(result_end, Immediate(0x7191));
2480
- if (scratch.is_valid()) {
2481
- movl(scratch, Immediate(0x7291));
2482
- }
2483
- // Register element_count is not modified by the function.
2484
- }
2485
- jmp(gc_required);
2486
- return;
2487
- }
2488
- ASSERT(!result.is(result_end));
2489
-
2490
- // Load address of new object into result.
2491
- LoadAllocationTopHelper(result, scratch, flags);
2492
-
2493
- // Calculate new top and bail out if new space is exhausted.
2494
- ExternalReference new_space_allocation_limit =
2495
- ExternalReference::new_space_allocation_limit_address(isolate());
2496
-
2497
- // We assume that element_count*element_size + header_size does not
2498
- // overflow.
2499
- lea(result_end, Operand(element_count, element_size, header_size));
2500
- addq(result_end, result);
2501
- j(carry, gc_required);
2502
- Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2503
- cmpq(result_end, limit_operand);
2504
- j(above, gc_required);
2505
-
2506
- // Update allocation top.
2507
- UpdateAllocationTopHelper(result_end, scratch);
2508
-
2509
- // Tag the result if requested.
2510
- if ((flags & TAG_OBJECT) != 0) {
2511
- addq(result, Immediate(kHeapObjectTag));
2512
- }
2513
- }
2514
-
2515
-
2516
- void MacroAssembler::AllocateInNewSpace(Register object_size,
2517
- Register result,
2518
- Register result_end,
2519
- Register scratch,
2520
- Label* gc_required,
2521
- AllocationFlags flags) {
2522
- if (!FLAG_inline_new) {
2523
- if (emit_debug_code()) {
2524
- // Trash the registers to simulate an allocation failure.
2525
- movl(result, Immediate(0x7091));
2526
- movl(result_end, Immediate(0x7191));
2527
- if (scratch.is_valid()) {
2528
- movl(scratch, Immediate(0x7291));
2529
- }
2530
- // object_size is left unchanged by this function.
2531
- }
2532
- jmp(gc_required);
2533
- return;
2534
- }
2535
- ASSERT(!result.is(result_end));
2536
-
2537
- // Load address of new object into result.
2538
- LoadAllocationTopHelper(result, scratch, flags);
2539
-
2540
- // Calculate new top and bail out if new space is exhausted.
2541
- ExternalReference new_space_allocation_limit =
2542
- ExternalReference::new_space_allocation_limit_address(isolate());
2543
- if (!object_size.is(result_end)) {
2544
- movq(result_end, object_size);
2545
- }
2546
- addq(result_end, result);
2547
- j(carry, gc_required);
2548
- Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2549
- cmpq(result_end, limit_operand);
2550
- j(above, gc_required);
2551
-
2552
- // Update allocation top.
2553
- UpdateAllocationTopHelper(result_end, scratch);
2554
-
2555
- // Tag the result if requested.
2556
- if ((flags & TAG_OBJECT) != 0) {
2557
- addq(result, Immediate(kHeapObjectTag));
2558
- }
2559
- }
2560
-
2561
-
2562
- void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2563
- ExternalReference new_space_allocation_top =
2564
- ExternalReference::new_space_allocation_top_address(isolate());
2565
-
2566
- // Make sure the object has no tag before resetting top.
2567
- and_(object, Immediate(~kHeapObjectTagMask));
2568
- Operand top_operand = ExternalOperand(new_space_allocation_top);
2569
- #ifdef DEBUG
2570
- cmpq(object, top_operand);
2571
- Check(below, "Undo allocation of non allocated memory");
2572
- #endif
2573
- movq(top_operand, object);
2574
- }
2575
-
2576
-
2577
- void MacroAssembler::AllocateHeapNumber(Register result,
2578
- Register scratch,
2579
- Label* gc_required) {
2580
- // Allocate heap number in new space.
2581
- AllocateInNewSpace(HeapNumber::kSize,
2582
- result,
2583
- scratch,
2584
- no_reg,
2585
- gc_required,
2586
- TAG_OBJECT);
2587
-
2588
- // Set the map.
2589
- LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2590
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2591
- }
2592
-
2593
-
2594
- void MacroAssembler::AllocateTwoByteString(Register result,
2595
- Register length,
2596
- Register scratch1,
2597
- Register scratch2,
2598
- Register scratch3,
2599
- Label* gc_required) {
2600
- // Calculate the number of bytes needed for the characters in the string while
2601
- // observing object alignment.
2602
- const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
2603
- kObjectAlignmentMask;
2604
- ASSERT(kShortSize == 2);
2605
- // scratch1 = length * 2 + kObjectAlignmentMask.
2606
- lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask +
2607
- kHeaderAlignment));
2608
- and_(scratch1, Immediate(~kObjectAlignmentMask));
2609
- if (kHeaderAlignment > 0) {
2610
- subq(scratch1, Immediate(kHeaderAlignment));
2611
- }
2612
-
2613
- // Allocate two byte string in new space.
2614
- AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2615
- times_1,
2616
- scratch1,
2617
- result,
2618
- scratch2,
2619
- scratch3,
2620
- gc_required,
2621
- TAG_OBJECT);
2622
-
2623
- // Set the map, length and hash field.
2624
- LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2625
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2626
- Integer32ToSmi(scratch1, length);
2627
- movq(FieldOperand(result, String::kLengthOffset), scratch1);
2628
- movq(FieldOperand(result, String::kHashFieldOffset),
2629
- Immediate(String::kEmptyHashField));
2630
- }
2631
-
2632
-
2633
- void MacroAssembler::AllocateAsciiString(Register result,
2634
- Register length,
2635
- Register scratch1,
2636
- Register scratch2,
2637
- Register scratch3,
2638
- Label* gc_required) {
2639
- // Calculate the number of bytes needed for the characters in the string while
2640
- // observing object alignment.
2641
- const int kHeaderAlignment = SeqAsciiString::kHeaderSize &
2642
- kObjectAlignmentMask;
2643
- movl(scratch1, length);
2644
- ASSERT(kCharSize == 1);
2645
- addq(scratch1, Immediate(kObjectAlignmentMask + kHeaderAlignment));
2646
- and_(scratch1, Immediate(~kObjectAlignmentMask));
2647
- if (kHeaderAlignment > 0) {
2648
- subq(scratch1, Immediate(kHeaderAlignment));
2649
- }
2650
-
2651
- // Allocate ascii string in new space.
2652
- AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2653
- times_1,
2654
- scratch1,
2655
- result,
2656
- scratch2,
2657
- scratch3,
2658
- gc_required,
2659
- TAG_OBJECT);
2660
-
2661
- // Set the map, length and hash field.
2662
- LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2663
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2664
- Integer32ToSmi(scratch1, length);
2665
- movq(FieldOperand(result, String::kLengthOffset), scratch1);
2666
- movq(FieldOperand(result, String::kHashFieldOffset),
2667
- Immediate(String::kEmptyHashField));
2668
- }
2669
-
2670
-
2671
- void MacroAssembler::AllocateConsString(Register result,
2672
- Register scratch1,
2673
- Register scratch2,
2674
- Label* gc_required) {
2675
- // Allocate heap number in new space.
2676
- AllocateInNewSpace(ConsString::kSize,
2677
- result,
2678
- scratch1,
2679
- scratch2,
2680
- gc_required,
2681
- TAG_OBJECT);
2682
-
2683
- // Set the map. The other fields are left uninitialized.
2684
- LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2685
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2686
- }
2687
-
2688
-
2689
- void MacroAssembler::AllocateAsciiConsString(Register result,
2690
- Register scratch1,
2691
- Register scratch2,
2692
- Label* gc_required) {
2693
- // Allocate heap number in new space.
2694
- AllocateInNewSpace(ConsString::kSize,
2695
- result,
2696
- scratch1,
2697
- scratch2,
2698
- gc_required,
2699
- TAG_OBJECT);
2700
-
2701
- // Set the map. The other fields are left uninitialized.
2702
- LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2703
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2704
- }
2705
-
2706
-
2707
- // Copy memory, byte-by-byte, from source to destination. Not optimized for
2708
- // long or aligned copies. The contents of scratch and length are destroyed.
2709
- // Destination is incremented by length, source, length and scratch are
2710
- // clobbered.
2711
- // A simpler loop is faster on small copies, but slower on large ones.
2712
- // The cld() instruction must have been emitted, to set the direction flag(),
2713
- // before calling this function.
2714
- void MacroAssembler::CopyBytes(Register destination,
2715
- Register source,
2716
- Register length,
2717
- int min_length,
2718
- Register scratch) {
2719
- ASSERT(min_length >= 0);
2720
- if (FLAG_debug_code) {
2721
- cmpl(length, Immediate(min_length));
2722
- Assert(greater_equal, "Invalid min_length");
2723
- }
2724
- Label loop, done, short_string, short_loop;
2725
-
2726
- const int kLongStringLimit = 20;
2727
- if (min_length <= kLongStringLimit) {
2728
- cmpl(length, Immediate(kLongStringLimit));
2729
- j(less_equal, &short_string);
2730
- }
2731
-
2732
- ASSERT(source.is(rsi));
2733
- ASSERT(destination.is(rdi));
2734
- ASSERT(length.is(rcx));
2735
-
2736
- // Because source is 8-byte aligned in our uses of this function,
2737
- // we keep source aligned for the rep movs operation by copying the odd bytes
2738
- // at the end of the ranges.
2739
- movq(scratch, length);
2740
- shrl(length, Immediate(3));
2741
- repmovsq();
2742
- // Move remaining bytes of length.
2743
- andl(scratch, Immediate(0x7));
2744
- movq(length, Operand(source, scratch, times_1, -8));
2745
- movq(Operand(destination, scratch, times_1, -8), length);
2746
- addq(destination, scratch);
2747
-
2748
- if (min_length <= kLongStringLimit) {
2749
- jmp(&done);
2750
-
2751
- bind(&short_string);
2752
- if (min_length == 0) {
2753
- testl(length, length);
2754
- j(zero, &done);
2755
- }
2756
- lea(scratch, Operand(destination, length, times_1, 0));
2757
-
2758
- bind(&short_loop);
2759
- movb(length, Operand(source, 0));
2760
- movb(Operand(destination, 0), length);
2761
- incq(source);
2762
- incq(destination);
2763
- cmpq(destination, scratch);
2764
- j(not_equal, &short_loop);
2765
-
2766
- bind(&done);
2767
- }
2768
- }
2769
-
2770
-
2771
- void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2772
- if (context_chain_length > 0) {
2773
- // Move up the chain of contexts to the context containing the slot.
2774
- movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2775
- // Load the function context (which is the incoming, outer context).
2776
- movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2777
- for (int i = 1; i < context_chain_length; i++) {
2778
- movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2779
- movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2780
- }
2781
- // The context may be an intermediate context, not a function context.
2782
- movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2783
- } else {
2784
- // Slot is in the current function context. Move it into the
2785
- // destination register in case we store into it (the write barrier
2786
- // cannot be allowed to destroy the context in rsi).
2787
- movq(dst, rsi);
2788
- }
2789
-
2790
- // We should not have found a 'with' context by walking the context chain
2791
- // (i.e., the static scope chain and runtime context chain do not agree).
2792
- // A variable occurring in such a scope should have slot type LOOKUP and
2793
- // not CONTEXT.
2794
- if (emit_debug_code()) {
2795
- cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2796
- Check(equal, "Yo dawg, I heard you liked function contexts "
2797
- "so I put function contexts in all your contexts");
2798
- }
2799
- }
2800
-
2801
- #ifdef _WIN64
2802
- static const int kRegisterPassedArguments = 4;
2803
- #else
2804
- static const int kRegisterPassedArguments = 6;
2805
- #endif
2806
-
2807
- void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2808
- // Load the global or builtins object from the current context.
2809
- movq(function, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2810
- // Load the global context from the global or builtins object.
2811
- movq(function, FieldOperand(function, GlobalObject::kGlobalContextOffset));
2812
- // Load the function from the global context.
2813
- movq(function, Operand(function, Context::SlotOffset(index)));
2814
- }
2815
-
2816
-
2817
- void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2818
- Register map) {
2819
- // Load the initial map. The global functions all have initial maps.
2820
- movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2821
- if (emit_debug_code()) {
2822
- Label ok, fail;
2823
- CheckMap(map, FACTORY->meta_map(), &fail, false);
2824
- jmp(&ok);
2825
- bind(&fail);
2826
- Abort("Global functions must have initial map");
2827
- bind(&ok);
2828
- }
2829
- }
2830
-
2831
-
2832
- int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2833
- // On Windows 64 stack slots are reserved by the caller for all arguments
2834
- // including the ones passed in registers, and space is always allocated for
2835
- // the four register arguments even if the function takes fewer than four
2836
- // arguments.
2837
- // On AMD64 ABI (Linux/Mac) the first six arguments are passed in registers
2838
- // and the caller does not reserve stack slots for them.
2839
- ASSERT(num_arguments >= 0);
2840
- #ifdef _WIN64
2841
- const int kMinimumStackSlots = kRegisterPassedArguments;
2842
- if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2843
- return num_arguments;
2844
- #else
2845
- if (num_arguments < kRegisterPassedArguments) return 0;
2846
- return num_arguments - kRegisterPassedArguments;
2847
- #endif
2848
- }
2849
-
2850
-
2851
- void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2852
- int frame_alignment = OS::ActivationFrameAlignment();
2853
- ASSERT(frame_alignment != 0);
2854
- ASSERT(num_arguments >= 0);
2855
-
2856
- // Make stack end at alignment and allocate space for arguments and old rsp.
2857
- movq(kScratchRegister, rsp);
2858
- ASSERT(IsPowerOf2(frame_alignment));
2859
- int argument_slots_on_stack =
2860
- ArgumentStackSlotsForCFunctionCall(num_arguments);
2861
- subq(rsp, Immediate((argument_slots_on_stack + 1) * kPointerSize));
2862
- and_(rsp, Immediate(-frame_alignment));
2863
- movq(Operand(rsp, argument_slots_on_stack * kPointerSize), kScratchRegister);
2864
- }
2865
-
2866
-
2867
- void MacroAssembler::CallCFunction(ExternalReference function,
2868
- int num_arguments) {
2869
- LoadAddress(rax, function);
2870
- CallCFunction(rax, num_arguments);
2871
- }
2872
-
2873
-
2874
- void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2875
- // Check stack alignment.
2876
- if (emit_debug_code()) {
2877
- CheckStackAlignment();
2878
- }
2879
-
2880
- call(function);
2881
- ASSERT(OS::ActivationFrameAlignment() != 0);
2882
- ASSERT(num_arguments >= 0);
2883
- int argument_slots_on_stack =
2884
- ArgumentStackSlotsForCFunctionCall(num_arguments);
2885
- movq(rsp, Operand(rsp, argument_slots_on_stack * kPointerSize));
2886
- }
2887
-
2888
-
2889
- CodePatcher::CodePatcher(byte* address, int size)
2890
- : address_(address),
2891
- size_(size),
2892
- masm_(Isolate::Current(), address, size + Assembler::kGap) {
2893
- // Create a new macro assembler pointing to the address of the code to patch.
2894
- // The size is adjusted with kGap on order for the assembler to generate size
2895
- // bytes of instructions without failing with buffer size constraints.
2896
- ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2897
- }
2898
-
2899
-
2900
- CodePatcher::~CodePatcher() {
2901
- // Indicate that code has changed.
2902
- CPU::FlushICache(address_, size_);
2903
-
2904
- // Check that the code was patched as expected.
2905
- ASSERT(masm_.pc_ == address_ + size_);
2906
- ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2907
- }
2908
-
2909
- } } // namespace v8::internal
2910
-
2911
- #endif // V8_TARGET_ARCH_X64