therubyracer 0.9.0beta4 → 0.9.0beta5

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (771) hide show
  1. data/.gitignore +1 -0
  2. data/Rakefile +0 -11
  3. data/ext/v8/extconf.rb +9 -9
  4. data/ext/v8/v8_external.cpp +40 -10
  5. data/lib/v8/cli.rb +2 -1
  6. data/lib/v8/version.rb +1 -1
  7. data/spec/redjs/.gitignore +1 -0
  8. data/therubyracer.gemspec +2 -3
  9. metadata +33 -779
  10. data/ext/v8/upstream/Makefile +0 -38
  11. data/ext/v8/upstream/detect_cpu.rb +0 -27
  12. data/ext/v8/upstream/fpic-on-linux-amd64.patch +0 -13
  13. data/ext/v8/upstream/scons/CHANGES.txt +0 -5183
  14. data/ext/v8/upstream/scons/LICENSE.txt +0 -20
  15. data/ext/v8/upstream/scons/MANIFEST +0 -202
  16. data/ext/v8/upstream/scons/PKG-INFO +0 -13
  17. data/ext/v8/upstream/scons/README.txt +0 -273
  18. data/ext/v8/upstream/scons/RELEASE.txt +0 -1040
  19. data/ext/v8/upstream/scons/engine/SCons/Action.py +0 -1256
  20. data/ext/v8/upstream/scons/engine/SCons/Builder.py +0 -868
  21. data/ext/v8/upstream/scons/engine/SCons/CacheDir.py +0 -217
  22. data/ext/v8/upstream/scons/engine/SCons/Conftest.py +0 -794
  23. data/ext/v8/upstream/scons/engine/SCons/Debug.py +0 -237
  24. data/ext/v8/upstream/scons/engine/SCons/Defaults.py +0 -485
  25. data/ext/v8/upstream/scons/engine/SCons/Environment.py +0 -2327
  26. data/ext/v8/upstream/scons/engine/SCons/Errors.py +0 -207
  27. data/ext/v8/upstream/scons/engine/SCons/Executor.py +0 -636
  28. data/ext/v8/upstream/scons/engine/SCons/Job.py +0 -435
  29. data/ext/v8/upstream/scons/engine/SCons/Memoize.py +0 -292
  30. data/ext/v8/upstream/scons/engine/SCons/Node/Alias.py +0 -153
  31. data/ext/v8/upstream/scons/engine/SCons/Node/FS.py +0 -3220
  32. data/ext/v8/upstream/scons/engine/SCons/Node/Python.py +0 -128
  33. data/ext/v8/upstream/scons/engine/SCons/Node/__init__.py +0 -1341
  34. data/ext/v8/upstream/scons/engine/SCons/Options/BoolOption.py +0 -50
  35. data/ext/v8/upstream/scons/engine/SCons/Options/EnumOption.py +0 -50
  36. data/ext/v8/upstream/scons/engine/SCons/Options/ListOption.py +0 -50
  37. data/ext/v8/upstream/scons/engine/SCons/Options/PackageOption.py +0 -50
  38. data/ext/v8/upstream/scons/engine/SCons/Options/PathOption.py +0 -76
  39. data/ext/v8/upstream/scons/engine/SCons/Options/__init__.py +0 -74
  40. data/ext/v8/upstream/scons/engine/SCons/PathList.py +0 -232
  41. data/ext/v8/upstream/scons/engine/SCons/Platform/__init__.py +0 -236
  42. data/ext/v8/upstream/scons/engine/SCons/Platform/aix.py +0 -70
  43. data/ext/v8/upstream/scons/engine/SCons/Platform/cygwin.py +0 -55
  44. data/ext/v8/upstream/scons/engine/SCons/Platform/darwin.py +0 -46
  45. data/ext/v8/upstream/scons/engine/SCons/Platform/hpux.py +0 -46
  46. data/ext/v8/upstream/scons/engine/SCons/Platform/irix.py +0 -44
  47. data/ext/v8/upstream/scons/engine/SCons/Platform/os2.py +0 -58
  48. data/ext/v8/upstream/scons/engine/SCons/Platform/posix.py +0 -264
  49. data/ext/v8/upstream/scons/engine/SCons/Platform/sunos.py +0 -50
  50. data/ext/v8/upstream/scons/engine/SCons/Platform/win32.py +0 -386
  51. data/ext/v8/upstream/scons/engine/SCons/SConf.py +0 -1038
  52. data/ext/v8/upstream/scons/engine/SCons/SConsign.py +0 -381
  53. data/ext/v8/upstream/scons/engine/SCons/Scanner/C.py +0 -132
  54. data/ext/v8/upstream/scons/engine/SCons/Scanner/D.py +0 -74
  55. data/ext/v8/upstream/scons/engine/SCons/Scanner/Dir.py +0 -111
  56. data/ext/v8/upstream/scons/engine/SCons/Scanner/Fortran.py +0 -320
  57. data/ext/v8/upstream/scons/engine/SCons/Scanner/IDL.py +0 -48
  58. data/ext/v8/upstream/scons/engine/SCons/Scanner/LaTeX.py +0 -378
  59. data/ext/v8/upstream/scons/engine/SCons/Scanner/Prog.py +0 -103
  60. data/ext/v8/upstream/scons/engine/SCons/Scanner/RC.py +0 -55
  61. data/ext/v8/upstream/scons/engine/SCons/Scanner/__init__.py +0 -415
  62. data/ext/v8/upstream/scons/engine/SCons/Script/Interactive.py +0 -386
  63. data/ext/v8/upstream/scons/engine/SCons/Script/Main.py +0 -1360
  64. data/ext/v8/upstream/scons/engine/SCons/Script/SConsOptions.py +0 -944
  65. data/ext/v8/upstream/scons/engine/SCons/Script/SConscript.py +0 -642
  66. data/ext/v8/upstream/scons/engine/SCons/Script/__init__.py +0 -414
  67. data/ext/v8/upstream/scons/engine/SCons/Sig.py +0 -63
  68. data/ext/v8/upstream/scons/engine/SCons/Subst.py +0 -911
  69. data/ext/v8/upstream/scons/engine/SCons/Taskmaster.py +0 -1030
  70. data/ext/v8/upstream/scons/engine/SCons/Tool/386asm.py +0 -61
  71. data/ext/v8/upstream/scons/engine/SCons/Tool/BitKeeper.py +0 -65
  72. data/ext/v8/upstream/scons/engine/SCons/Tool/CVS.py +0 -73
  73. data/ext/v8/upstream/scons/engine/SCons/Tool/FortranCommon.py +0 -247
  74. data/ext/v8/upstream/scons/engine/SCons/Tool/JavaCommon.py +0 -324
  75. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/__init__.py +0 -56
  76. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/arch.py +0 -61
  77. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/common.py +0 -210
  78. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/netframework.py +0 -84
  79. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/sdk.py +0 -321
  80. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vc.py +0 -367
  81. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vs.py +0 -497
  82. data/ext/v8/upstream/scons/engine/SCons/Tool/Perforce.py +0 -104
  83. data/ext/v8/upstream/scons/engine/SCons/Tool/PharLapCommon.py +0 -138
  84. data/ext/v8/upstream/scons/engine/SCons/Tool/RCS.py +0 -64
  85. data/ext/v8/upstream/scons/engine/SCons/Tool/SCCS.py +0 -64
  86. data/ext/v8/upstream/scons/engine/SCons/Tool/Subversion.py +0 -71
  87. data/ext/v8/upstream/scons/engine/SCons/Tool/__init__.py +0 -675
  88. data/ext/v8/upstream/scons/engine/SCons/Tool/aixc++.py +0 -82
  89. data/ext/v8/upstream/scons/engine/SCons/Tool/aixcc.py +0 -74
  90. data/ext/v8/upstream/scons/engine/SCons/Tool/aixf77.py +0 -80
  91. data/ext/v8/upstream/scons/engine/SCons/Tool/aixlink.py +0 -76
  92. data/ext/v8/upstream/scons/engine/SCons/Tool/applelink.py +0 -71
  93. data/ext/v8/upstream/scons/engine/SCons/Tool/ar.py +0 -63
  94. data/ext/v8/upstream/scons/engine/SCons/Tool/as.py +0 -78
  95. data/ext/v8/upstream/scons/engine/SCons/Tool/bcc32.py +0 -82
  96. data/ext/v8/upstream/scons/engine/SCons/Tool/c++.py +0 -99
  97. data/ext/v8/upstream/scons/engine/SCons/Tool/cc.py +0 -114
  98. data/ext/v8/upstream/scons/engine/SCons/Tool/cvf.py +0 -58
  99. data/ext/v8/upstream/scons/engine/SCons/Tool/default.py +0 -50
  100. data/ext/v8/upstream/scons/engine/SCons/Tool/dmd.py +0 -224
  101. data/ext/v8/upstream/scons/engine/SCons/Tool/dvi.py +0 -64
  102. data/ext/v8/upstream/scons/engine/SCons/Tool/dvipdf.py +0 -125
  103. data/ext/v8/upstream/scons/engine/SCons/Tool/dvips.py +0 -94
  104. data/ext/v8/upstream/scons/engine/SCons/Tool/f77.py +0 -62
  105. data/ext/v8/upstream/scons/engine/SCons/Tool/f90.py +0 -62
  106. data/ext/v8/upstream/scons/engine/SCons/Tool/f95.py +0 -63
  107. data/ext/v8/upstream/scons/engine/SCons/Tool/filesystem.py +0 -98
  108. data/ext/v8/upstream/scons/engine/SCons/Tool/fortran.py +0 -63
  109. data/ext/v8/upstream/scons/engine/SCons/Tool/g++.py +0 -90
  110. data/ext/v8/upstream/scons/engine/SCons/Tool/g77.py +0 -73
  111. data/ext/v8/upstream/scons/engine/SCons/Tool/gas.py +0 -53
  112. data/ext/v8/upstream/scons/engine/SCons/Tool/gcc.py +0 -80
  113. data/ext/v8/upstream/scons/engine/SCons/Tool/gfortran.py +0 -64
  114. data/ext/v8/upstream/scons/engine/SCons/Tool/gnulink.py +0 -63
  115. data/ext/v8/upstream/scons/engine/SCons/Tool/gs.py +0 -81
  116. data/ext/v8/upstream/scons/engine/SCons/Tool/hpc++.py +0 -85
  117. data/ext/v8/upstream/scons/engine/SCons/Tool/hpcc.py +0 -53
  118. data/ext/v8/upstream/scons/engine/SCons/Tool/hplink.py +0 -77
  119. data/ext/v8/upstream/scons/engine/SCons/Tool/icc.py +0 -59
  120. data/ext/v8/upstream/scons/engine/SCons/Tool/icl.py +0 -52
  121. data/ext/v8/upstream/scons/engine/SCons/Tool/ifl.py +0 -72
  122. data/ext/v8/upstream/scons/engine/SCons/Tool/ifort.py +0 -90
  123. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink.py +0 -59
  124. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink32.py +0 -60
  125. data/ext/v8/upstream/scons/engine/SCons/Tool/install.py +0 -229
  126. data/ext/v8/upstream/scons/engine/SCons/Tool/intelc.py +0 -490
  127. data/ext/v8/upstream/scons/engine/SCons/Tool/ipkg.py +0 -71
  128. data/ext/v8/upstream/scons/engine/SCons/Tool/jar.py +0 -110
  129. data/ext/v8/upstream/scons/engine/SCons/Tool/javac.py +0 -234
  130. data/ext/v8/upstream/scons/engine/SCons/Tool/javah.py +0 -138
  131. data/ext/v8/upstream/scons/engine/SCons/Tool/latex.py +0 -79
  132. data/ext/v8/upstream/scons/engine/SCons/Tool/lex.py +0 -99
  133. data/ext/v8/upstream/scons/engine/SCons/Tool/link.py +0 -121
  134. data/ext/v8/upstream/scons/engine/SCons/Tool/linkloc.py +0 -112
  135. data/ext/v8/upstream/scons/engine/SCons/Tool/m4.py +0 -63
  136. data/ext/v8/upstream/scons/engine/SCons/Tool/masm.py +0 -77
  137. data/ext/v8/upstream/scons/engine/SCons/Tool/midl.py +0 -90
  138. data/ext/v8/upstream/scons/engine/SCons/Tool/mingw.py +0 -159
  139. data/ext/v8/upstream/scons/engine/SCons/Tool/mslib.py +0 -64
  140. data/ext/v8/upstream/scons/engine/SCons/Tool/mslink.py +0 -266
  141. data/ext/v8/upstream/scons/engine/SCons/Tool/mssdk.py +0 -50
  142. data/ext/v8/upstream/scons/engine/SCons/Tool/msvc.py +0 -269
  143. data/ext/v8/upstream/scons/engine/SCons/Tool/msvs.py +0 -1439
  144. data/ext/v8/upstream/scons/engine/SCons/Tool/mwcc.py +0 -208
  145. data/ext/v8/upstream/scons/engine/SCons/Tool/mwld.py +0 -107
  146. data/ext/v8/upstream/scons/engine/SCons/Tool/nasm.py +0 -72
  147. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/__init__.py +0 -314
  148. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/ipk.py +0 -185
  149. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/msi.py +0 -526
  150. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/rpm.py +0 -367
  151. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_tarbz2.py +0 -43
  152. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_targz.py +0 -43
  153. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_zip.py +0 -43
  154. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/tarbz2.py +0 -44
  155. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/targz.py +0 -44
  156. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/zip.py +0 -44
  157. data/ext/v8/upstream/scons/engine/SCons/Tool/pdf.py +0 -78
  158. data/ext/v8/upstream/scons/engine/SCons/Tool/pdflatex.py +0 -83
  159. data/ext/v8/upstream/scons/engine/SCons/Tool/pdftex.py +0 -108
  160. data/ext/v8/upstream/scons/engine/SCons/Tool/qt.py +0 -336
  161. data/ext/v8/upstream/scons/engine/SCons/Tool/rmic.py +0 -121
  162. data/ext/v8/upstream/scons/engine/SCons/Tool/rpcgen.py +0 -70
  163. data/ext/v8/upstream/scons/engine/SCons/Tool/rpm.py +0 -132
  164. data/ext/v8/upstream/scons/engine/SCons/Tool/sgiar.py +0 -68
  165. data/ext/v8/upstream/scons/engine/SCons/Tool/sgic++.py +0 -58
  166. data/ext/v8/upstream/scons/engine/SCons/Tool/sgicc.py +0 -53
  167. data/ext/v8/upstream/scons/engine/SCons/Tool/sgilink.py +0 -63
  168. data/ext/v8/upstream/scons/engine/SCons/Tool/sunar.py +0 -67
  169. data/ext/v8/upstream/scons/engine/SCons/Tool/sunc++.py +0 -142
  170. data/ext/v8/upstream/scons/engine/SCons/Tool/suncc.py +0 -58
  171. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf77.py +0 -63
  172. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf90.py +0 -64
  173. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf95.py +0 -64
  174. data/ext/v8/upstream/scons/engine/SCons/Tool/sunlink.py +0 -77
  175. data/ext/v8/upstream/scons/engine/SCons/Tool/swig.py +0 -186
  176. data/ext/v8/upstream/scons/engine/SCons/Tool/tar.py +0 -73
  177. data/ext/v8/upstream/scons/engine/SCons/Tool/tex.py +0 -805
  178. data/ext/v8/upstream/scons/engine/SCons/Tool/textfile.py +0 -175
  179. data/ext/v8/upstream/scons/engine/SCons/Tool/tlib.py +0 -53
  180. data/ext/v8/upstream/scons/engine/SCons/Tool/wix.py +0 -100
  181. data/ext/v8/upstream/scons/engine/SCons/Tool/yacc.py +0 -131
  182. data/ext/v8/upstream/scons/engine/SCons/Tool/zip.py +0 -100
  183. data/ext/v8/upstream/scons/engine/SCons/Util.py +0 -1645
  184. data/ext/v8/upstream/scons/engine/SCons/Variables/BoolVariable.py +0 -91
  185. data/ext/v8/upstream/scons/engine/SCons/Variables/EnumVariable.py +0 -107
  186. data/ext/v8/upstream/scons/engine/SCons/Variables/ListVariable.py +0 -139
  187. data/ext/v8/upstream/scons/engine/SCons/Variables/PackageVariable.py +0 -109
  188. data/ext/v8/upstream/scons/engine/SCons/Variables/PathVariable.py +0 -147
  189. data/ext/v8/upstream/scons/engine/SCons/Variables/__init__.py +0 -317
  190. data/ext/v8/upstream/scons/engine/SCons/Warnings.py +0 -228
  191. data/ext/v8/upstream/scons/engine/SCons/__init__.py +0 -49
  192. data/ext/v8/upstream/scons/engine/SCons/compat/__init__.py +0 -302
  193. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_UserString.py +0 -98
  194. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_hashlib.py +0 -91
  195. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_itertools.py +0 -124
  196. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_optparse.py +0 -1725
  197. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets.py +0 -583
  198. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets15.py +0 -176
  199. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_shlex.py +0 -325
  200. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_subprocess.py +0 -1296
  201. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_textwrap.py +0 -382
  202. data/ext/v8/upstream/scons/engine/SCons/compat/builtins.py +0 -187
  203. data/ext/v8/upstream/scons/engine/SCons/cpp.py +0 -598
  204. data/ext/v8/upstream/scons/engine/SCons/dblite.py +0 -248
  205. data/ext/v8/upstream/scons/engine/SCons/exitfuncs.py +0 -77
  206. data/ext/v8/upstream/scons/os_spawnv_fix.diff +0 -83
  207. data/ext/v8/upstream/scons/scons-time.1 +0 -1017
  208. data/ext/v8/upstream/scons/scons.1 +0 -15179
  209. data/ext/v8/upstream/scons/sconsign.1 +0 -208
  210. data/ext/v8/upstream/scons/script/scons +0 -184
  211. data/ext/v8/upstream/scons/script/scons-time +0 -1529
  212. data/ext/v8/upstream/scons/script/scons.bat +0 -31
  213. data/ext/v8/upstream/scons/script/sconsign +0 -508
  214. data/ext/v8/upstream/scons/setup.cfg +0 -6
  215. data/ext/v8/upstream/scons/setup.py +0 -427
  216. data/ext/v8/upstream/v8/.gitignore +0 -33
  217. data/ext/v8/upstream/v8/AUTHORS +0 -42
  218. data/ext/v8/upstream/v8/ChangeLog +0 -2663
  219. data/ext/v8/upstream/v8/LICENSE +0 -52
  220. data/ext/v8/upstream/v8/LICENSE.strongtalk +0 -29
  221. data/ext/v8/upstream/v8/LICENSE.v8 +0 -26
  222. data/ext/v8/upstream/v8/LICENSE.valgrind +0 -45
  223. data/ext/v8/upstream/v8/SConstruct +0 -1473
  224. data/ext/v8/upstream/v8/build/README.txt +0 -25
  225. data/ext/v8/upstream/v8/build/all.gyp +0 -18
  226. data/ext/v8/upstream/v8/build/armu.gypi +0 -32
  227. data/ext/v8/upstream/v8/build/common.gypi +0 -82
  228. data/ext/v8/upstream/v8/build/gyp_v8 +0 -145
  229. data/ext/v8/upstream/v8/include/v8-debug.h +0 -394
  230. data/ext/v8/upstream/v8/include/v8-preparser.h +0 -116
  231. data/ext/v8/upstream/v8/include/v8-profiler.h +0 -505
  232. data/ext/v8/upstream/v8/include/v8-testing.h +0 -104
  233. data/ext/v8/upstream/v8/include/v8.h +0 -4000
  234. data/ext/v8/upstream/v8/include/v8stdint.h +0 -53
  235. data/ext/v8/upstream/v8/preparser/SConscript +0 -38
  236. data/ext/v8/upstream/v8/preparser/preparser-process.cc +0 -169
  237. data/ext/v8/upstream/v8/src/SConscript +0 -380
  238. data/ext/v8/upstream/v8/src/accessors.cc +0 -766
  239. data/ext/v8/upstream/v8/src/accessors.h +0 -121
  240. data/ext/v8/upstream/v8/src/allocation-inl.h +0 -49
  241. data/ext/v8/upstream/v8/src/allocation.cc +0 -122
  242. data/ext/v8/upstream/v8/src/allocation.h +0 -143
  243. data/ext/v8/upstream/v8/src/api.cc +0 -5678
  244. data/ext/v8/upstream/v8/src/api.h +0 -572
  245. data/ext/v8/upstream/v8/src/apinatives.js +0 -110
  246. data/ext/v8/upstream/v8/src/apiutils.h +0 -73
  247. data/ext/v8/upstream/v8/src/arguments.h +0 -116
  248. data/ext/v8/upstream/v8/src/arm/assembler-arm-inl.h +0 -353
  249. data/ext/v8/upstream/v8/src/arm/assembler-arm.cc +0 -2877
  250. data/ext/v8/upstream/v8/src/arm/assembler-arm.h +0 -1382
  251. data/ext/v8/upstream/v8/src/arm/builtins-arm.cc +0 -1634
  252. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.cc +0 -6917
  253. data/ext/v8/upstream/v8/src/arm/code-stubs-arm.h +0 -623
  254. data/ext/v8/upstream/v8/src/arm/codegen-arm-inl.h +0 -48
  255. data/ext/v8/upstream/v8/src/arm/codegen-arm.cc +0 -7437
  256. data/ext/v8/upstream/v8/src/arm/codegen-arm.h +0 -595
  257. data/ext/v8/upstream/v8/src/arm/constants-arm.cc +0 -152
  258. data/ext/v8/upstream/v8/src/arm/constants-arm.h +0 -778
  259. data/ext/v8/upstream/v8/src/arm/cpu-arm.cc +0 -149
  260. data/ext/v8/upstream/v8/src/arm/debug-arm.cc +0 -317
  261. data/ext/v8/upstream/v8/src/arm/deoptimizer-arm.cc +0 -737
  262. data/ext/v8/upstream/v8/src/arm/disasm-arm.cc +0 -1503
  263. data/ext/v8/upstream/v8/src/arm/frames-arm.cc +0 -45
  264. data/ext/v8/upstream/v8/src/arm/frames-arm.h +0 -168
  265. data/ext/v8/upstream/v8/src/arm/full-codegen-arm.cc +0 -4374
  266. data/ext/v8/upstream/v8/src/arm/ic-arm.cc +0 -1793
  267. data/ext/v8/upstream/v8/src/arm/jump-target-arm.cc +0 -174
  268. data/ext/v8/upstream/v8/src/arm/lithium-arm.cc +0 -2120
  269. data/ext/v8/upstream/v8/src/arm/lithium-arm.h +0 -2179
  270. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.cc +0 -4132
  271. data/ext/v8/upstream/v8/src/arm/lithium-codegen-arm.h +0 -329
  272. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.cc +0 -305
  273. data/ext/v8/upstream/v8/src/arm/lithium-gap-resolver-arm.h +0 -84
  274. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.cc +0 -2939
  275. data/ext/v8/upstream/v8/src/arm/macro-assembler-arm.h +0 -1071
  276. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.cc +0 -1287
  277. data/ext/v8/upstream/v8/src/arm/regexp-macro-assembler-arm.h +0 -253
  278. data/ext/v8/upstream/v8/src/arm/register-allocator-arm-inl.h +0 -100
  279. data/ext/v8/upstream/v8/src/arm/register-allocator-arm.cc +0 -63
  280. data/ext/v8/upstream/v8/src/arm/register-allocator-arm.h +0 -44
  281. data/ext/v8/upstream/v8/src/arm/simulator-arm.cc +0 -3288
  282. data/ext/v8/upstream/v8/src/arm/simulator-arm.h +0 -413
  283. data/ext/v8/upstream/v8/src/arm/stub-cache-arm.cc +0 -4034
  284. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm-inl.h +0 -59
  285. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.cc +0 -843
  286. data/ext/v8/upstream/v8/src/arm/virtual-frame-arm.h +0 -523
  287. data/ext/v8/upstream/v8/src/array.js +0 -1249
  288. data/ext/v8/upstream/v8/src/assembler.cc +0 -1067
  289. data/ext/v8/upstream/v8/src/assembler.h +0 -823
  290. data/ext/v8/upstream/v8/src/ast-inl.h +0 -112
  291. data/ext/v8/upstream/v8/src/ast.cc +0 -1078
  292. data/ext/v8/upstream/v8/src/ast.h +0 -2234
  293. data/ext/v8/upstream/v8/src/atomicops.h +0 -167
  294. data/ext/v8/upstream/v8/src/atomicops_internals_arm_gcc.h +0 -145
  295. data/ext/v8/upstream/v8/src/atomicops_internals_mips_gcc.h +0 -169
  296. data/ext/v8/upstream/v8/src/atomicops_internals_x86_gcc.cc +0 -126
  297. data/ext/v8/upstream/v8/src/atomicops_internals_x86_gcc.h +0 -287
  298. data/ext/v8/upstream/v8/src/atomicops_internals_x86_macosx.h +0 -301
  299. data/ext/v8/upstream/v8/src/atomicops_internals_x86_msvc.h +0 -203
  300. data/ext/v8/upstream/v8/src/bignum-dtoa.cc +0 -655
  301. data/ext/v8/upstream/v8/src/bignum-dtoa.h +0 -81
  302. data/ext/v8/upstream/v8/src/bignum.cc +0 -768
  303. data/ext/v8/upstream/v8/src/bignum.h +0 -140
  304. data/ext/v8/upstream/v8/src/bootstrapper.cc +0 -2138
  305. data/ext/v8/upstream/v8/src/bootstrapper.h +0 -185
  306. data/ext/v8/upstream/v8/src/builtins.cc +0 -1708
  307. data/ext/v8/upstream/v8/src/builtins.h +0 -368
  308. data/ext/v8/upstream/v8/src/bytecodes-irregexp.h +0 -105
  309. data/ext/v8/upstream/v8/src/cached-powers.cc +0 -177
  310. data/ext/v8/upstream/v8/src/cached-powers.h +0 -65
  311. data/ext/v8/upstream/v8/src/char-predicates-inl.h +0 -94
  312. data/ext/v8/upstream/v8/src/char-predicates.h +0 -65
  313. data/ext/v8/upstream/v8/src/checks.cc +0 -110
  314. data/ext/v8/upstream/v8/src/checks.h +0 -296
  315. data/ext/v8/upstream/v8/src/circular-queue-inl.h +0 -53
  316. data/ext/v8/upstream/v8/src/circular-queue.cc +0 -122
  317. data/ext/v8/upstream/v8/src/circular-queue.h +0 -103
  318. data/ext/v8/upstream/v8/src/code-stubs.cc +0 -240
  319. data/ext/v8/upstream/v8/src/code-stubs.h +0 -971
  320. data/ext/v8/upstream/v8/src/code.h +0 -68
  321. data/ext/v8/upstream/v8/src/codegen-inl.h +0 -68
  322. data/ext/v8/upstream/v8/src/codegen.cc +0 -505
  323. data/ext/v8/upstream/v8/src/codegen.h +0 -245
  324. data/ext/v8/upstream/v8/src/compilation-cache.cc +0 -540
  325. data/ext/v8/upstream/v8/src/compilation-cache.h +0 -287
  326. data/ext/v8/upstream/v8/src/compiler.cc +0 -792
  327. data/ext/v8/upstream/v8/src/compiler.h +0 -307
  328. data/ext/v8/upstream/v8/src/contexts.cc +0 -327
  329. data/ext/v8/upstream/v8/src/contexts.h +0 -382
  330. data/ext/v8/upstream/v8/src/conversions-inl.h +0 -110
  331. data/ext/v8/upstream/v8/src/conversions.cc +0 -1125
  332. data/ext/v8/upstream/v8/src/conversions.h +0 -122
  333. data/ext/v8/upstream/v8/src/counters.cc +0 -93
  334. data/ext/v8/upstream/v8/src/counters.h +0 -254
  335. data/ext/v8/upstream/v8/src/cpu-profiler-inl.h +0 -101
  336. data/ext/v8/upstream/v8/src/cpu-profiler.cc +0 -606
  337. data/ext/v8/upstream/v8/src/cpu-profiler.h +0 -305
  338. data/ext/v8/upstream/v8/src/cpu.h +0 -67
  339. data/ext/v8/upstream/v8/src/d8-debug.cc +0 -367
  340. data/ext/v8/upstream/v8/src/d8-debug.h +0 -158
  341. data/ext/v8/upstream/v8/src/d8-posix.cc +0 -695
  342. data/ext/v8/upstream/v8/src/d8-readline.cc +0 -128
  343. data/ext/v8/upstream/v8/src/d8-windows.cc +0 -42
  344. data/ext/v8/upstream/v8/src/d8.cc +0 -796
  345. data/ext/v8/upstream/v8/src/d8.gyp +0 -88
  346. data/ext/v8/upstream/v8/src/d8.h +0 -231
  347. data/ext/v8/upstream/v8/src/d8.js +0 -2798
  348. data/ext/v8/upstream/v8/src/data-flow.cc +0 -545
  349. data/ext/v8/upstream/v8/src/data-flow.h +0 -379
  350. data/ext/v8/upstream/v8/src/date.js +0 -1103
  351. data/ext/v8/upstream/v8/src/dateparser-inl.h +0 -125
  352. data/ext/v8/upstream/v8/src/dateparser.cc +0 -178
  353. data/ext/v8/upstream/v8/src/dateparser.h +0 -265
  354. data/ext/v8/upstream/v8/src/debug-agent.cc +0 -447
  355. data/ext/v8/upstream/v8/src/debug-agent.h +0 -129
  356. data/ext/v8/upstream/v8/src/debug-debugger.js +0 -2569
  357. data/ext/v8/upstream/v8/src/debug.cc +0 -3188
  358. data/ext/v8/upstream/v8/src/debug.h +0 -1055
  359. data/ext/v8/upstream/v8/src/deoptimizer.cc +0 -1296
  360. data/ext/v8/upstream/v8/src/deoptimizer.h +0 -629
  361. data/ext/v8/upstream/v8/src/disasm.h +0 -80
  362. data/ext/v8/upstream/v8/src/disassembler.cc +0 -339
  363. data/ext/v8/upstream/v8/src/disassembler.h +0 -56
  364. data/ext/v8/upstream/v8/src/diy-fp.cc +0 -58
  365. data/ext/v8/upstream/v8/src/diy-fp.h +0 -117
  366. data/ext/v8/upstream/v8/src/double.h +0 -238
  367. data/ext/v8/upstream/v8/src/dtoa.cc +0 -103
  368. data/ext/v8/upstream/v8/src/dtoa.h +0 -85
  369. data/ext/v8/upstream/v8/src/execution.cc +0 -791
  370. data/ext/v8/upstream/v8/src/execution.h +0 -291
  371. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.cc +0 -250
  372. data/ext/v8/upstream/v8/src/extensions/experimental/break-iterator.h +0 -89
  373. data/ext/v8/upstream/v8/src/extensions/experimental/experimental.gyp +0 -55
  374. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.cc +0 -284
  375. data/ext/v8/upstream/v8/src/extensions/experimental/i18n-extension.h +0 -64
  376. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.cc +0 -141
  377. data/ext/v8/upstream/v8/src/extensions/externalize-string-extension.h +0 -50
  378. data/ext/v8/upstream/v8/src/extensions/gc-extension.cc +0 -58
  379. data/ext/v8/upstream/v8/src/extensions/gc-extension.h +0 -49
  380. data/ext/v8/upstream/v8/src/factory.cc +0 -1194
  381. data/ext/v8/upstream/v8/src/factory.h +0 -436
  382. data/ext/v8/upstream/v8/src/fast-dtoa.cc +0 -736
  383. data/ext/v8/upstream/v8/src/fast-dtoa.h +0 -83
  384. data/ext/v8/upstream/v8/src/fixed-dtoa.cc +0 -405
  385. data/ext/v8/upstream/v8/src/fixed-dtoa.h +0 -55
  386. data/ext/v8/upstream/v8/src/flag-definitions.h +0 -556
  387. data/ext/v8/upstream/v8/src/flags.cc +0 -551
  388. data/ext/v8/upstream/v8/src/flags.h +0 -79
  389. data/ext/v8/upstream/v8/src/frame-element.cc +0 -37
  390. data/ext/v8/upstream/v8/src/frame-element.h +0 -269
  391. data/ext/v8/upstream/v8/src/frames-inl.h +0 -236
  392. data/ext/v8/upstream/v8/src/frames.cc +0 -1273
  393. data/ext/v8/upstream/v8/src/frames.h +0 -854
  394. data/ext/v8/upstream/v8/src/full-codegen.cc +0 -1385
  395. data/ext/v8/upstream/v8/src/full-codegen.h +0 -753
  396. data/ext/v8/upstream/v8/src/func-name-inferrer.cc +0 -91
  397. data/ext/v8/upstream/v8/src/func-name-inferrer.h +0 -111
  398. data/ext/v8/upstream/v8/src/gdb-jit.cc +0 -1548
  399. data/ext/v8/upstream/v8/src/gdb-jit.h +0 -138
  400. data/ext/v8/upstream/v8/src/global-handles.cc +0 -596
  401. data/ext/v8/upstream/v8/src/global-handles.h +0 -239
  402. data/ext/v8/upstream/v8/src/globals.h +0 -325
  403. data/ext/v8/upstream/v8/src/handles-inl.h +0 -177
  404. data/ext/v8/upstream/v8/src/handles.cc +0 -965
  405. data/ext/v8/upstream/v8/src/handles.h +0 -372
  406. data/ext/v8/upstream/v8/src/hashmap.cc +0 -230
  407. data/ext/v8/upstream/v8/src/hashmap.h +0 -121
  408. data/ext/v8/upstream/v8/src/heap-inl.h +0 -703
  409. data/ext/v8/upstream/v8/src/heap-profiler.cc +0 -1173
  410. data/ext/v8/upstream/v8/src/heap-profiler.h +0 -396
  411. data/ext/v8/upstream/v8/src/heap.cc +0 -5856
  412. data/ext/v8/upstream/v8/src/heap.h +0 -2264
  413. data/ext/v8/upstream/v8/src/hydrogen-instructions.cc +0 -1639
  414. data/ext/v8/upstream/v8/src/hydrogen-instructions.h +0 -3657
  415. data/ext/v8/upstream/v8/src/hydrogen.cc +0 -6011
  416. data/ext/v8/upstream/v8/src/hydrogen.h +0 -1137
  417. data/ext/v8/upstream/v8/src/ia32/assembler-ia32-inl.h +0 -430
  418. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.cc +0 -2846
  419. data/ext/v8/upstream/v8/src/ia32/assembler-ia32.h +0 -1159
  420. data/ext/v8/upstream/v8/src/ia32/builtins-ia32.cc +0 -1596
  421. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.cc +0 -6549
  422. data/ext/v8/upstream/v8/src/ia32/code-stubs-ia32.h +0 -495
  423. data/ext/v8/upstream/v8/src/ia32/codegen-ia32-inl.h +0 -46
  424. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.cc +0 -10385
  425. data/ext/v8/upstream/v8/src/ia32/codegen-ia32.h +0 -801
  426. data/ext/v8/upstream/v8/src/ia32/cpu-ia32.cc +0 -88
  427. data/ext/v8/upstream/v8/src/ia32/debug-ia32.cc +0 -312
  428. data/ext/v8/upstream/v8/src/ia32/deoptimizer-ia32.cc +0 -774
  429. data/ext/v8/upstream/v8/src/ia32/disasm-ia32.cc +0 -1620
  430. data/ext/v8/upstream/v8/src/ia32/frames-ia32.cc +0 -45
  431. data/ext/v8/upstream/v8/src/ia32/frames-ia32.h +0 -140
  432. data/ext/v8/upstream/v8/src/ia32/full-codegen-ia32.cc +0 -4357
  433. data/ext/v8/upstream/v8/src/ia32/ic-ia32.cc +0 -1779
  434. data/ext/v8/upstream/v8/src/ia32/jump-target-ia32.cc +0 -437
  435. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.cc +0 -4158
  436. data/ext/v8/upstream/v8/src/ia32/lithium-codegen-ia32.h +0 -318
  437. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.cc +0 -466
  438. data/ext/v8/upstream/v8/src/ia32/lithium-gap-resolver-ia32.h +0 -110
  439. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.cc +0 -2181
  440. data/ext/v8/upstream/v8/src/ia32/lithium-ia32.h +0 -2235
  441. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.cc +0 -2056
  442. data/ext/v8/upstream/v8/src/ia32/macro-assembler-ia32.h +0 -807
  443. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.cc +0 -1264
  444. data/ext/v8/upstream/v8/src/ia32/regexp-macro-assembler-ia32.h +0 -216
  445. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32-inl.h +0 -82
  446. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.cc +0 -157
  447. data/ext/v8/upstream/v8/src/ia32/register-allocator-ia32.h +0 -43
  448. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.cc +0 -30
  449. data/ext/v8/upstream/v8/src/ia32/simulator-ia32.h +0 -72
  450. data/ext/v8/upstream/v8/src/ia32/stub-cache-ia32.cc +0 -3711
  451. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.cc +0 -1366
  452. data/ext/v8/upstream/v8/src/ia32/virtual-frame-ia32.h +0 -650
  453. data/ext/v8/upstream/v8/src/ic-inl.h +0 -130
  454. data/ext/v8/upstream/v8/src/ic.cc +0 -2389
  455. data/ext/v8/upstream/v8/src/ic.h +0 -675
  456. data/ext/v8/upstream/v8/src/inspector.cc +0 -63
  457. data/ext/v8/upstream/v8/src/inspector.h +0 -62
  458. data/ext/v8/upstream/v8/src/interpreter-irregexp.cc +0 -659
  459. data/ext/v8/upstream/v8/src/interpreter-irregexp.h +0 -49
  460. data/ext/v8/upstream/v8/src/isolate.cc +0 -883
  461. data/ext/v8/upstream/v8/src/isolate.h +0 -1306
  462. data/ext/v8/upstream/v8/src/json.js +0 -342
  463. data/ext/v8/upstream/v8/src/jsregexp.cc +0 -5371
  464. data/ext/v8/upstream/v8/src/jsregexp.h +0 -1483
  465. data/ext/v8/upstream/v8/src/jump-target-heavy-inl.h +0 -51
  466. data/ext/v8/upstream/v8/src/jump-target-heavy.cc +0 -427
  467. data/ext/v8/upstream/v8/src/jump-target-heavy.h +0 -238
  468. data/ext/v8/upstream/v8/src/jump-target-inl.h +0 -48
  469. data/ext/v8/upstream/v8/src/jump-target-light-inl.h +0 -56
  470. data/ext/v8/upstream/v8/src/jump-target-light.cc +0 -111
  471. data/ext/v8/upstream/v8/src/jump-target-light.h +0 -193
  472. data/ext/v8/upstream/v8/src/jump-target.cc +0 -91
  473. data/ext/v8/upstream/v8/src/jump-target.h +0 -90
  474. data/ext/v8/upstream/v8/src/list-inl.h +0 -206
  475. data/ext/v8/upstream/v8/src/list.h +0 -164
  476. data/ext/v8/upstream/v8/src/lithium-allocator-inl.h +0 -142
  477. data/ext/v8/upstream/v8/src/lithium-allocator.cc +0 -2105
  478. data/ext/v8/upstream/v8/src/lithium-allocator.h +0 -630
  479. data/ext/v8/upstream/v8/src/lithium.cc +0 -169
  480. data/ext/v8/upstream/v8/src/lithium.h +0 -592
  481. data/ext/v8/upstream/v8/src/liveedit-debugger.js +0 -1082
  482. data/ext/v8/upstream/v8/src/liveedit.cc +0 -1693
  483. data/ext/v8/upstream/v8/src/liveedit.h +0 -179
  484. data/ext/v8/upstream/v8/src/liveobjectlist-inl.h +0 -126
  485. data/ext/v8/upstream/v8/src/liveobjectlist.cc +0 -2589
  486. data/ext/v8/upstream/v8/src/liveobjectlist.h +0 -322
  487. data/ext/v8/upstream/v8/src/log-inl.h +0 -59
  488. data/ext/v8/upstream/v8/src/log-utils.cc +0 -423
  489. data/ext/v8/upstream/v8/src/log-utils.h +0 -229
  490. data/ext/v8/upstream/v8/src/log.cc +0 -1666
  491. data/ext/v8/upstream/v8/src/log.h +0 -446
  492. data/ext/v8/upstream/v8/src/macro-assembler.h +0 -120
  493. data/ext/v8/upstream/v8/src/macros.py +0 -178
  494. data/ext/v8/upstream/v8/src/mark-compact.cc +0 -3092
  495. data/ext/v8/upstream/v8/src/mark-compact.h +0 -506
  496. data/ext/v8/upstream/v8/src/math.js +0 -264
  497. data/ext/v8/upstream/v8/src/messages.cc +0 -166
  498. data/ext/v8/upstream/v8/src/messages.h +0 -114
  499. data/ext/v8/upstream/v8/src/messages.js +0 -1090
  500. data/ext/v8/upstream/v8/src/mips/assembler-mips-inl.h +0 -335
  501. data/ext/v8/upstream/v8/src/mips/assembler-mips.cc +0 -2093
  502. data/ext/v8/upstream/v8/src/mips/assembler-mips.h +0 -1066
  503. data/ext/v8/upstream/v8/src/mips/builtins-mips.cc +0 -148
  504. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.cc +0 -752
  505. data/ext/v8/upstream/v8/src/mips/code-stubs-mips.h +0 -511
  506. data/ext/v8/upstream/v8/src/mips/codegen-mips-inl.h +0 -64
  507. data/ext/v8/upstream/v8/src/mips/codegen-mips.cc +0 -1213
  508. data/ext/v8/upstream/v8/src/mips/codegen-mips.h +0 -633
  509. data/ext/v8/upstream/v8/src/mips/constants-mips.cc +0 -352
  510. data/ext/v8/upstream/v8/src/mips/constants-mips.h +0 -723
  511. data/ext/v8/upstream/v8/src/mips/cpu-mips.cc +0 -90
  512. data/ext/v8/upstream/v8/src/mips/debug-mips.cc +0 -155
  513. data/ext/v8/upstream/v8/src/mips/deoptimizer-mips.cc +0 -91
  514. data/ext/v8/upstream/v8/src/mips/disasm-mips.cc +0 -1023
  515. data/ext/v8/upstream/v8/src/mips/frames-mips.cc +0 -48
  516. data/ext/v8/upstream/v8/src/mips/frames-mips.h +0 -179
  517. data/ext/v8/upstream/v8/src/mips/full-codegen-mips.cc +0 -727
  518. data/ext/v8/upstream/v8/src/mips/ic-mips.cc +0 -244
  519. data/ext/v8/upstream/v8/src/mips/jump-target-mips.cc +0 -80
  520. data/ext/v8/upstream/v8/src/mips/lithium-codegen-mips.h +0 -65
  521. data/ext/v8/upstream/v8/src/mips/lithium-mips.h +0 -304
  522. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.cc +0 -3327
  523. data/ext/v8/upstream/v8/src/mips/macro-assembler-mips.h +0 -1058
  524. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.cc +0 -478
  525. data/ext/v8/upstream/v8/src/mips/regexp-macro-assembler-mips.h +0 -250
  526. data/ext/v8/upstream/v8/src/mips/register-allocator-mips-inl.h +0 -134
  527. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.cc +0 -63
  528. data/ext/v8/upstream/v8/src/mips/register-allocator-mips.h +0 -47
  529. data/ext/v8/upstream/v8/src/mips/simulator-mips.cc +0 -2438
  530. data/ext/v8/upstream/v8/src/mips/simulator-mips.h +0 -394
  531. data/ext/v8/upstream/v8/src/mips/stub-cache-mips.cc +0 -601
  532. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips-inl.h +0 -58
  533. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.cc +0 -307
  534. data/ext/v8/upstream/v8/src/mips/virtual-frame-mips.h +0 -530
  535. data/ext/v8/upstream/v8/src/mirror-debugger.js +0 -2381
  536. data/ext/v8/upstream/v8/src/mksnapshot.cc +0 -256
  537. data/ext/v8/upstream/v8/src/natives.h +0 -63
  538. data/ext/v8/upstream/v8/src/objects-debug.cc +0 -722
  539. data/ext/v8/upstream/v8/src/objects-inl.h +0 -4166
  540. data/ext/v8/upstream/v8/src/objects-printer.cc +0 -801
  541. data/ext/v8/upstream/v8/src/objects-visiting.cc +0 -142
  542. data/ext/v8/upstream/v8/src/objects-visiting.h +0 -422
  543. data/ext/v8/upstream/v8/src/objects.cc +0 -10296
  544. data/ext/v8/upstream/v8/src/objects.h +0 -6662
  545. data/ext/v8/upstream/v8/src/parser.cc +0 -5168
  546. data/ext/v8/upstream/v8/src/parser.h +0 -823
  547. data/ext/v8/upstream/v8/src/platform-cygwin.cc +0 -811
  548. data/ext/v8/upstream/v8/src/platform-freebsd.cc +0 -854
  549. data/ext/v8/upstream/v8/src/platform-linux.cc +0 -1120
  550. data/ext/v8/upstream/v8/src/platform-macos.cc +0 -865
  551. data/ext/v8/upstream/v8/src/platform-nullos.cc +0 -504
  552. data/ext/v8/upstream/v8/src/platform-openbsd.cc +0 -672
  553. data/ext/v8/upstream/v8/src/platform-posix.cc +0 -424
  554. data/ext/v8/upstream/v8/src/platform-solaris.cc +0 -796
  555. data/ext/v8/upstream/v8/src/platform-tls-mac.h +0 -62
  556. data/ext/v8/upstream/v8/src/platform-tls-win32.h +0 -62
  557. data/ext/v8/upstream/v8/src/platform-tls.h +0 -50
  558. data/ext/v8/upstream/v8/src/platform-win32.cc +0 -2072
  559. data/ext/v8/upstream/v8/src/platform.h +0 -693
  560. data/ext/v8/upstream/v8/src/preparse-data.cc +0 -185
  561. data/ext/v8/upstream/v8/src/preparse-data.h +0 -249
  562. data/ext/v8/upstream/v8/src/preparser-api.cc +0 -219
  563. data/ext/v8/upstream/v8/src/preparser.cc +0 -1205
  564. data/ext/v8/upstream/v8/src/preparser.h +0 -278
  565. data/ext/v8/upstream/v8/src/prettyprinter.cc +0 -1530
  566. data/ext/v8/upstream/v8/src/prettyprinter.h +0 -223
  567. data/ext/v8/upstream/v8/src/profile-generator-inl.h +0 -128
  568. data/ext/v8/upstream/v8/src/profile-generator.cc +0 -3095
  569. data/ext/v8/upstream/v8/src/profile-generator.h +0 -1125
  570. data/ext/v8/upstream/v8/src/property.cc +0 -102
  571. data/ext/v8/upstream/v8/src/property.h +0 -348
  572. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp-inl.h +0 -78
  573. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.cc +0 -470
  574. data/ext/v8/upstream/v8/src/regexp-macro-assembler-irregexp.h +0 -142
  575. data/ext/v8/upstream/v8/src/regexp-macro-assembler-tracer.cc +0 -373
  576. data/ext/v8/upstream/v8/src/regexp-macro-assembler-tracer.h +0 -104
  577. data/ext/v8/upstream/v8/src/regexp-macro-assembler.cc +0 -266
  578. data/ext/v8/upstream/v8/src/regexp-macro-assembler.h +0 -236
  579. data/ext/v8/upstream/v8/src/regexp-stack.cc +0 -111
  580. data/ext/v8/upstream/v8/src/regexp-stack.h +0 -147
  581. data/ext/v8/upstream/v8/src/regexp.js +0 -483
  582. data/ext/v8/upstream/v8/src/register-allocator-inl.h +0 -141
  583. data/ext/v8/upstream/v8/src/register-allocator.cc +0 -98
  584. data/ext/v8/upstream/v8/src/register-allocator.h +0 -310
  585. data/ext/v8/upstream/v8/src/rewriter.cc +0 -1024
  586. data/ext/v8/upstream/v8/src/rewriter.h +0 -59
  587. data/ext/v8/upstream/v8/src/runtime-profiler.cc +0 -478
  588. data/ext/v8/upstream/v8/src/runtime-profiler.h +0 -192
  589. data/ext/v8/upstream/v8/src/runtime.cc +0 -11949
  590. data/ext/v8/upstream/v8/src/runtime.h +0 -643
  591. data/ext/v8/upstream/v8/src/runtime.js +0 -643
  592. data/ext/v8/upstream/v8/src/safepoint-table.cc +0 -256
  593. data/ext/v8/upstream/v8/src/safepoint-table.h +0 -269
  594. data/ext/v8/upstream/v8/src/scanner-base.cc +0 -964
  595. data/ext/v8/upstream/v8/src/scanner-base.h +0 -664
  596. data/ext/v8/upstream/v8/src/scanner.cc +0 -584
  597. data/ext/v8/upstream/v8/src/scanner.h +0 -196
  598. data/ext/v8/upstream/v8/src/scopeinfo.cc +0 -631
  599. data/ext/v8/upstream/v8/src/scopeinfo.h +0 -249
  600. data/ext/v8/upstream/v8/src/scopes.cc +0 -1093
  601. data/ext/v8/upstream/v8/src/scopes.h +0 -508
  602. data/ext/v8/upstream/v8/src/serialize.cc +0 -1574
  603. data/ext/v8/upstream/v8/src/serialize.h +0 -589
  604. data/ext/v8/upstream/v8/src/shell.h +0 -55
  605. data/ext/v8/upstream/v8/src/simulator.h +0 -43
  606. data/ext/v8/upstream/v8/src/small-pointer-list.h +0 -163
  607. data/ext/v8/upstream/v8/src/smart-pointer.h +0 -109
  608. data/ext/v8/upstream/v8/src/snapshot-common.cc +0 -82
  609. data/ext/v8/upstream/v8/src/snapshot-empty.cc +0 -50
  610. data/ext/v8/upstream/v8/src/snapshot.h +0 -73
  611. data/ext/v8/upstream/v8/src/spaces-inl.h +0 -529
  612. data/ext/v8/upstream/v8/src/spaces.cc +0 -3147
  613. data/ext/v8/upstream/v8/src/spaces.h +0 -2368
  614. data/ext/v8/upstream/v8/src/splay-tree-inl.h +0 -310
  615. data/ext/v8/upstream/v8/src/splay-tree.h +0 -203
  616. data/ext/v8/upstream/v8/src/string-search.cc +0 -41
  617. data/ext/v8/upstream/v8/src/string-search.h +0 -568
  618. data/ext/v8/upstream/v8/src/string-stream.cc +0 -592
  619. data/ext/v8/upstream/v8/src/string-stream.h +0 -191
  620. data/ext/v8/upstream/v8/src/string.js +0 -915
  621. data/ext/v8/upstream/v8/src/strtod.cc +0 -440
  622. data/ext/v8/upstream/v8/src/strtod.h +0 -40
  623. data/ext/v8/upstream/v8/src/stub-cache.cc +0 -1940
  624. data/ext/v8/upstream/v8/src/stub-cache.h +0 -866
  625. data/ext/v8/upstream/v8/src/third_party/valgrind/valgrind.h +0 -3925
  626. data/ext/v8/upstream/v8/src/token.cc +0 -63
  627. data/ext/v8/upstream/v8/src/token.h +0 -288
  628. data/ext/v8/upstream/v8/src/top.cc +0 -983
  629. data/ext/v8/upstream/v8/src/type-info.cc +0 -472
  630. data/ext/v8/upstream/v8/src/type-info.h +0 -290
  631. data/ext/v8/upstream/v8/src/unbound-queue-inl.h +0 -95
  632. data/ext/v8/upstream/v8/src/unbound-queue.h +0 -67
  633. data/ext/v8/upstream/v8/src/unicode-inl.h +0 -238
  634. data/ext/v8/upstream/v8/src/unicode.cc +0 -1624
  635. data/ext/v8/upstream/v8/src/unicode.h +0 -280
  636. data/ext/v8/upstream/v8/src/uri.js +0 -402
  637. data/ext/v8/upstream/v8/src/utils.cc +0 -371
  638. data/ext/v8/upstream/v8/src/utils.h +0 -796
  639. data/ext/v8/upstream/v8/src/v8-counters.cc +0 -62
  640. data/ext/v8/upstream/v8/src/v8-counters.h +0 -311
  641. data/ext/v8/upstream/v8/src/v8.cc +0 -215
  642. data/ext/v8/upstream/v8/src/v8.h +0 -130
  643. data/ext/v8/upstream/v8/src/v8checks.h +0 -64
  644. data/ext/v8/upstream/v8/src/v8dll-main.cc +0 -39
  645. data/ext/v8/upstream/v8/src/v8globals.h +0 -486
  646. data/ext/v8/upstream/v8/src/v8memory.h +0 -82
  647. data/ext/v8/upstream/v8/src/v8natives.js +0 -1293
  648. data/ext/v8/upstream/v8/src/v8preparserdll-main.cc +0 -39
  649. data/ext/v8/upstream/v8/src/v8threads.cc +0 -453
  650. data/ext/v8/upstream/v8/src/v8threads.h +0 -164
  651. data/ext/v8/upstream/v8/src/v8utils.h +0 -317
  652. data/ext/v8/upstream/v8/src/variables.cc +0 -132
  653. data/ext/v8/upstream/v8/src/variables.h +0 -212
  654. data/ext/v8/upstream/v8/src/version.cc +0 -116
  655. data/ext/v8/upstream/v8/src/version.h +0 -68
  656. data/ext/v8/upstream/v8/src/virtual-frame-heavy-inl.h +0 -190
  657. data/ext/v8/upstream/v8/src/virtual-frame-heavy.cc +0 -312
  658. data/ext/v8/upstream/v8/src/virtual-frame-inl.h +0 -39
  659. data/ext/v8/upstream/v8/src/virtual-frame-light-inl.h +0 -171
  660. data/ext/v8/upstream/v8/src/virtual-frame-light.cc +0 -52
  661. data/ext/v8/upstream/v8/src/virtual-frame.cc +0 -49
  662. data/ext/v8/upstream/v8/src/virtual-frame.h +0 -59
  663. data/ext/v8/upstream/v8/src/vm-state-inl.h +0 -138
  664. data/ext/v8/upstream/v8/src/vm-state.h +0 -70
  665. data/ext/v8/upstream/v8/src/win32-headers.h +0 -96
  666. data/ext/v8/upstream/v8/src/x64/assembler-x64-inl.h +0 -456
  667. data/ext/v8/upstream/v8/src/x64/assembler-x64.cc +0 -2954
  668. data/ext/v8/upstream/v8/src/x64/assembler-x64.h +0 -1630
  669. data/ext/v8/upstream/v8/src/x64/builtins-x64.cc +0 -1493
  670. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.cc +0 -5132
  671. data/ext/v8/upstream/v8/src/x64/code-stubs-x64.h +0 -477
  672. data/ext/v8/upstream/v8/src/x64/codegen-x64-inl.h +0 -46
  673. data/ext/v8/upstream/v8/src/x64/codegen-x64.cc +0 -8843
  674. data/ext/v8/upstream/v8/src/x64/codegen-x64.h +0 -753
  675. data/ext/v8/upstream/v8/src/x64/cpu-x64.cc +0 -88
  676. data/ext/v8/upstream/v8/src/x64/debug-x64.cc +0 -318
  677. data/ext/v8/upstream/v8/src/x64/deoptimizer-x64.cc +0 -815
  678. data/ext/v8/upstream/v8/src/x64/disasm-x64.cc +0 -1752
  679. data/ext/v8/upstream/v8/src/x64/frames-x64.cc +0 -45
  680. data/ext/v8/upstream/v8/src/x64/frames-x64.h +0 -130
  681. data/ext/v8/upstream/v8/src/x64/full-codegen-x64.cc +0 -4339
  682. data/ext/v8/upstream/v8/src/x64/ic-x64.cc +0 -1752
  683. data/ext/v8/upstream/v8/src/x64/jump-target-x64.cc +0 -437
  684. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.cc +0 -3970
  685. data/ext/v8/upstream/v8/src/x64/lithium-codegen-x64.h +0 -318
  686. data/ext/v8/upstream/v8/src/x64/lithium-gap-resolver-x64.cc +0 -320
  687. data/ext/v8/upstream/v8/src/x64/lithium-gap-resolver-x64.h +0 -74
  688. data/ext/v8/upstream/v8/src/x64/lithium-x64.cc +0 -2115
  689. data/ext/v8/upstream/v8/src/x64/lithium-x64.h +0 -2161
  690. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.cc +0 -2911
  691. data/ext/v8/upstream/v8/src/x64/macro-assembler-x64.h +0 -1984
  692. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.cc +0 -1398
  693. data/ext/v8/upstream/v8/src/x64/regexp-macro-assembler-x64.h +0 -282
  694. data/ext/v8/upstream/v8/src/x64/register-allocator-x64-inl.h +0 -87
  695. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.cc +0 -95
  696. data/ext/v8/upstream/v8/src/x64/register-allocator-x64.h +0 -43
  697. data/ext/v8/upstream/v8/src/x64/simulator-x64.cc +0 -27
  698. data/ext/v8/upstream/v8/src/x64/simulator-x64.h +0 -71
  699. data/ext/v8/upstream/v8/src/x64/stub-cache-x64.cc +0 -3460
  700. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.cc +0 -1296
  701. data/ext/v8/upstream/v8/src/x64/virtual-frame-x64.h +0 -597
  702. data/ext/v8/upstream/v8/src/zone-inl.h +0 -129
  703. data/ext/v8/upstream/v8/src/zone.cc +0 -196
  704. data/ext/v8/upstream/v8/src/zone.h +0 -236
  705. data/ext/v8/upstream/v8/tools/codemap.js +0 -265
  706. data/ext/v8/upstream/v8/tools/consarray.js +0 -93
  707. data/ext/v8/upstream/v8/tools/csvparser.js +0 -78
  708. data/ext/v8/upstream/v8/tools/disasm.py +0 -92
  709. data/ext/v8/upstream/v8/tools/freebsd-tick-processor +0 -10
  710. data/ext/v8/upstream/v8/tools/gc-nvp-trace-processor.py +0 -328
  711. data/ext/v8/upstream/v8/tools/generate-ten-powers.scm +0 -286
  712. data/ext/v8/upstream/v8/tools/grokdump.py +0 -840
  713. data/ext/v8/upstream/v8/tools/gyp/v8.gyp +0 -844
  714. data/ext/v8/upstream/v8/tools/js2c.py +0 -380
  715. data/ext/v8/upstream/v8/tools/jsmin.py +0 -280
  716. data/ext/v8/upstream/v8/tools/linux-tick-processor +0 -35
  717. data/ext/v8/upstream/v8/tools/ll_prof.py +0 -919
  718. data/ext/v8/upstream/v8/tools/logreader.js +0 -185
  719. data/ext/v8/upstream/v8/tools/mac-nm +0 -18
  720. data/ext/v8/upstream/v8/tools/mac-tick-processor +0 -6
  721. data/ext/v8/upstream/v8/tools/oom_dump/README +0 -31
  722. data/ext/v8/upstream/v8/tools/oom_dump/SConstruct +0 -42
  723. data/ext/v8/upstream/v8/tools/oom_dump/oom_dump.cc +0 -288
  724. data/ext/v8/upstream/v8/tools/presubmit.py +0 -305
  725. data/ext/v8/upstream/v8/tools/process-heap-prof.py +0 -120
  726. data/ext/v8/upstream/v8/tools/profile.js +0 -751
  727. data/ext/v8/upstream/v8/tools/profile_view.js +0 -219
  728. data/ext/v8/upstream/v8/tools/run-valgrind.py +0 -77
  729. data/ext/v8/upstream/v8/tools/splaytree.js +0 -316
  730. data/ext/v8/upstream/v8/tools/stats-viewer.py +0 -468
  731. data/ext/v8/upstream/v8/tools/test.py +0 -1490
  732. data/ext/v8/upstream/v8/tools/tickprocessor-driver.js +0 -59
  733. data/ext/v8/upstream/v8/tools/tickprocessor.js +0 -877
  734. data/ext/v8/upstream/v8/tools/utils.py +0 -96
  735. data/ext/v8/upstream/v8/tools/visual_studio/README.txt +0 -70
  736. data/ext/v8/upstream/v8/tools/visual_studio/arm.vsprops +0 -14
  737. data/ext/v8/upstream/v8/tools/visual_studio/common.vsprops +0 -34
  738. data/ext/v8/upstream/v8/tools/visual_studio/d8.vcproj +0 -193
  739. data/ext/v8/upstream/v8/tools/visual_studio/d8_arm.vcproj +0 -193
  740. data/ext/v8/upstream/v8/tools/visual_studio/d8_x64.vcproj +0 -209
  741. data/ext/v8/upstream/v8/tools/visual_studio/d8js2c.cmd +0 -6
  742. data/ext/v8/upstream/v8/tools/visual_studio/debug.vsprops +0 -17
  743. data/ext/v8/upstream/v8/tools/visual_studio/ia32.vsprops +0 -17
  744. data/ext/v8/upstream/v8/tools/visual_studio/js2c.cmd +0 -6
  745. data/ext/v8/upstream/v8/tools/visual_studio/release.vsprops +0 -24
  746. data/ext/v8/upstream/v8/tools/visual_studio/v8.sln +0 -101
  747. data/ext/v8/upstream/v8/tools/visual_studio/v8.vcproj +0 -227
  748. data/ext/v8/upstream/v8/tools/visual_studio/v8_arm.sln +0 -74
  749. data/ext/v8/upstream/v8/tools/visual_studio/v8_arm.vcproj +0 -227
  750. data/ext/v8/upstream/v8/tools/visual_studio/v8_base.vcproj +0 -1308
  751. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_arm.vcproj +0 -1238
  752. data/ext/v8/upstream/v8/tools/visual_studio/v8_base_x64.vcproj +0 -1300
  753. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest.vcproj +0 -265
  754. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest_arm.vcproj +0 -249
  755. data/ext/v8/upstream/v8/tools/visual_studio/v8_cctest_x64.vcproj +0 -257
  756. data/ext/v8/upstream/v8/tools/visual_studio/v8_mksnapshot.vcproj +0 -145
  757. data/ext/v8/upstream/v8/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -145
  758. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample.vcproj +0 -145
  759. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample_arm.vcproj +0 -145
  760. data/ext/v8/upstream/v8/tools/visual_studio/v8_process_sample_x64.vcproj +0 -161
  761. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample.vcproj +0 -147
  762. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -147
  763. data/ext/v8/upstream/v8/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -163
  764. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot.vcproj +0 -142
  765. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_cc.vcproj +0 -92
  766. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -92
  767. data/ext/v8/upstream/v8/tools/visual_studio/v8_snapshot_x64.vcproj +0 -142
  768. data/ext/v8/upstream/v8/tools/visual_studio/v8_x64.sln +0 -101
  769. data/ext/v8/upstream/v8/tools/visual_studio/v8_x64.vcproj +0 -227
  770. data/ext/v8/upstream/v8/tools/visual_studio/x64.vsprops +0 -18
  771. data/ext/v8/upstream/v8/tools/windows-tick-processor.bat +0 -30
@@ -1,437 +0,0 @@
1
- // Copyright 2008 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #if defined(V8_TARGET_ARCH_IA32)
31
-
32
- #include "codegen-inl.h"
33
- #include "jump-target-inl.h"
34
- #include "register-allocator-inl.h"
35
- #include "virtual-frame-inl.h"
36
-
37
- namespace v8 {
38
- namespace internal {
39
-
40
- // -------------------------------------------------------------------------
41
- // JumpTarget implementation.
42
-
43
- #define __ ACCESS_MASM(cgen()->masm())
44
-
45
- void JumpTarget::DoJump() {
46
- ASSERT(cgen()->has_valid_frame());
47
- // Live non-frame registers are not allowed at unconditional jumps
48
- // because we have no way of invalidating the corresponding results
49
- // which are still live in the C++ code.
50
- ASSERT(cgen()->HasValidEntryRegisters());
51
-
52
- if (is_bound()) {
53
- // Backward jump. There is an expected frame to merge to.
54
- ASSERT(direction_ == BIDIRECTIONAL);
55
- cgen()->frame()->PrepareMergeTo(entry_frame_);
56
- cgen()->frame()->MergeTo(entry_frame_);
57
- cgen()->DeleteFrame();
58
- __ jmp(&entry_label_);
59
- } else if (entry_frame_ != NULL) {
60
- // Forward jump with a preconfigured entry frame. Assert the
61
- // current frame matches the expected one and jump to the block.
62
- ASSERT(cgen()->frame()->Equals(entry_frame_));
63
- cgen()->DeleteFrame();
64
- __ jmp(&entry_label_);
65
- } else {
66
- // Forward jump. Remember the current frame and emit a jump to
67
- // its merge code.
68
- AddReachingFrame(cgen()->frame());
69
- RegisterFile empty;
70
- cgen()->SetFrame(NULL, &empty);
71
- __ jmp(&merge_labels_.last());
72
- }
73
- }
74
-
75
-
76
- void JumpTarget::DoBranch(Condition cc, Hint hint) {
77
- ASSERT(cgen() != NULL);
78
- ASSERT(cgen()->has_valid_frame());
79
-
80
- if (is_bound()) {
81
- ASSERT(direction_ == BIDIRECTIONAL);
82
- // Backward branch. We have an expected frame to merge to on the
83
- // backward edge.
84
-
85
- // Swap the current frame for a copy (we do the swapping to get
86
- // the off-frame registers off the fall through) to use for the
87
- // branch.
88
- VirtualFrame* fall_through_frame = cgen()->frame();
89
- VirtualFrame* branch_frame = new VirtualFrame(fall_through_frame);
90
- RegisterFile non_frame_registers;
91
- cgen()->SetFrame(branch_frame, &non_frame_registers);
92
-
93
- // Check if we can avoid merge code.
94
- cgen()->frame()->PrepareMergeTo(entry_frame_);
95
- if (cgen()->frame()->Equals(entry_frame_)) {
96
- // Branch right in to the block.
97
- cgen()->DeleteFrame();
98
- __ j(cc, &entry_label_, hint);
99
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
100
- return;
101
- }
102
-
103
- // Check if we can reuse existing merge code.
104
- for (int i = 0; i < reaching_frames_.length(); i++) {
105
- if (reaching_frames_[i] != NULL &&
106
- cgen()->frame()->Equals(reaching_frames_[i])) {
107
- // Branch to the merge code.
108
- cgen()->DeleteFrame();
109
- __ j(cc, &merge_labels_[i], hint);
110
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
111
- return;
112
- }
113
- }
114
-
115
- // To emit the merge code here, we negate the condition and branch
116
- // around the merge code on the fall through path.
117
- Label original_fall_through;
118
- __ j(NegateCondition(cc), &original_fall_through, NegateHint(hint));
119
- cgen()->frame()->MergeTo(entry_frame_);
120
- cgen()->DeleteFrame();
121
- __ jmp(&entry_label_);
122
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
123
- __ bind(&original_fall_through);
124
-
125
- } else if (entry_frame_ != NULL) {
126
- // Forward branch with a preconfigured entry frame. Assert the
127
- // current frame matches the expected one and branch to the block.
128
- ASSERT(cgen()->frame()->Equals(entry_frame_));
129
- // Explicitly use the macro assembler instead of __ as forward
130
- // branches are expected to be a fixed size (no inserted
131
- // coverage-checking instructions please). This is used in
132
- // Reference::GetValue.
133
- cgen()->masm()->j(cc, &entry_label_, hint);
134
-
135
- } else {
136
- // Forward branch. A copy of the current frame is remembered and
137
- // a branch to the merge code is emitted. Explicitly use the
138
- // macro assembler instead of __ as forward branches are expected
139
- // to be a fixed size (no inserted coverage-checking instructions
140
- // please). This is used in Reference::GetValue.
141
- AddReachingFrame(new VirtualFrame(cgen()->frame()));
142
- cgen()->masm()->j(cc, &merge_labels_.last(), hint);
143
- }
144
- }
145
-
146
-
147
- void JumpTarget::Call() {
148
- // Call is used to push the address of the catch block on the stack as
149
- // a return address when compiling try/catch and try/finally. We
150
- // fully spill the frame before making the call. The expected frame
151
- // at the label (which should be the only one) is the spilled current
152
- // frame plus an in-memory return address. The "fall-through" frame
153
- // at the return site is the spilled current frame.
154
- ASSERT(cgen() != NULL);
155
- ASSERT(cgen()->has_valid_frame());
156
- // There are no non-frame references across the call.
157
- ASSERT(cgen()->HasValidEntryRegisters());
158
- ASSERT(!is_linked());
159
-
160
- cgen()->frame()->SpillAll();
161
- VirtualFrame* target_frame = new VirtualFrame(cgen()->frame());
162
- target_frame->Adjust(1);
163
- // We do not expect a call with a preconfigured entry frame.
164
- ASSERT(entry_frame_ == NULL);
165
- AddReachingFrame(target_frame);
166
- __ call(&merge_labels_.last());
167
- }
168
-
169
-
170
- void JumpTarget::DoBind() {
171
- ASSERT(cgen() != NULL);
172
- ASSERT(!is_bound());
173
-
174
- // Live non-frame registers are not allowed at the start of a basic
175
- // block.
176
- ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
177
-
178
- // Fast case: the jump target was manually configured with an entry
179
- // frame to use.
180
- if (entry_frame_ != NULL) {
181
- // Assert no reaching frames to deal with.
182
- ASSERT(reaching_frames_.is_empty());
183
- ASSERT(!cgen()->has_valid_frame());
184
-
185
- RegisterFile empty;
186
- if (direction_ == BIDIRECTIONAL) {
187
- // Copy the entry frame so the original can be used for a
188
- // possible backward jump.
189
- cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
190
- } else {
191
- // Take ownership of the entry frame.
192
- cgen()->SetFrame(entry_frame_, &empty);
193
- entry_frame_ = NULL;
194
- }
195
- __ bind(&entry_label_);
196
- return;
197
- }
198
-
199
- if (!is_linked()) {
200
- ASSERT(cgen()->has_valid_frame());
201
- if (direction_ == FORWARD_ONLY) {
202
- // Fast case: no forward jumps and no possible backward jumps.
203
- // The stack pointer can be floating above the top of the
204
- // virtual frame before the bind. Afterward, it should not.
205
- VirtualFrame* frame = cgen()->frame();
206
- int difference = frame->stack_pointer_ - (frame->element_count() - 1);
207
- if (difference > 0) {
208
- frame->stack_pointer_ -= difference;
209
- __ add(Operand(esp), Immediate(difference * kPointerSize));
210
- }
211
- } else {
212
- ASSERT(direction_ == BIDIRECTIONAL);
213
- // Fast case: no forward jumps, possible backward ones. Remove
214
- // constants and copies above the watermark on the fall-through
215
- // frame and use it as the entry frame.
216
- cgen()->frame()->MakeMergable();
217
- entry_frame_ = new VirtualFrame(cgen()->frame());
218
- }
219
- __ bind(&entry_label_);
220
- return;
221
- }
222
-
223
- if (direction_ == FORWARD_ONLY &&
224
- !cgen()->has_valid_frame() &&
225
- reaching_frames_.length() == 1) {
226
- // Fast case: no fall-through, a single forward jump, and no
227
- // possible backward jumps. Pick up the only reaching frame, take
228
- // ownership of it, and use it for the block about to be emitted.
229
- VirtualFrame* frame = reaching_frames_[0];
230
- RegisterFile empty;
231
- cgen()->SetFrame(frame, &empty);
232
- reaching_frames_[0] = NULL;
233
- __ bind(&merge_labels_[0]);
234
-
235
- // The stack pointer can be floating above the top of the
236
- // virtual frame before the bind. Afterward, it should not.
237
- int difference = frame->stack_pointer_ - (frame->element_count() - 1);
238
- if (difference > 0) {
239
- frame->stack_pointer_ -= difference;
240
- __ add(Operand(esp), Immediate(difference * kPointerSize));
241
- }
242
-
243
- __ bind(&entry_label_);
244
- return;
245
- }
246
-
247
- // If there is a current frame, record it as the fall-through. It
248
- // is owned by the reaching frames for now.
249
- bool had_fall_through = false;
250
- if (cgen()->has_valid_frame()) {
251
- had_fall_through = true;
252
- AddReachingFrame(cgen()->frame()); // Return value ignored.
253
- RegisterFile empty;
254
- cgen()->SetFrame(NULL, &empty);
255
- }
256
-
257
- // Compute the frame to use for entry to the block.
258
- ComputeEntryFrame();
259
-
260
- // Some moves required to merge to an expected frame require purely
261
- // frame state changes, and do not require any code generation.
262
- // Perform those first to increase the possibility of finding equal
263
- // frames below.
264
- for (int i = 0; i < reaching_frames_.length(); i++) {
265
- if (reaching_frames_[i] != NULL) {
266
- reaching_frames_[i]->PrepareMergeTo(entry_frame_);
267
- }
268
- }
269
-
270
- if (is_linked()) {
271
- // There were forward jumps. Handle merging the reaching frames
272
- // to the entry frame.
273
-
274
- // Loop over the (non-null) reaching frames and process any that
275
- // need merge code. Iterate backwards through the list to handle
276
- // the fall-through frame first. Set frames that will be
277
- // processed after 'i' to NULL if we want to avoid processing
278
- // them.
279
- for (int i = reaching_frames_.length() - 1; i >= 0; i--) {
280
- VirtualFrame* frame = reaching_frames_[i];
281
-
282
- if (frame != NULL) {
283
- // Does the frame (probably) need merge code?
284
- if (!frame->Equals(entry_frame_)) {
285
- // We could have a valid frame as the fall through to the
286
- // binding site or as the fall through from a previous merge
287
- // code block. Jump around the code we are about to
288
- // generate.
289
- if (cgen()->has_valid_frame()) {
290
- cgen()->DeleteFrame();
291
- __ jmp(&entry_label_);
292
- }
293
- // Pick up the frame for this block. Assume ownership if
294
- // there cannot be backward jumps.
295
- RegisterFile empty;
296
- if (direction_ == BIDIRECTIONAL) {
297
- cgen()->SetFrame(new VirtualFrame(frame), &empty);
298
- } else {
299
- cgen()->SetFrame(frame, &empty);
300
- reaching_frames_[i] = NULL;
301
- }
302
- __ bind(&merge_labels_[i]);
303
-
304
- // Loop over the remaining (non-null) reaching frames,
305
- // looking for any that can share merge code with this one.
306
- for (int j = 0; j < i; j++) {
307
- VirtualFrame* other = reaching_frames_[j];
308
- if (other != NULL && other->Equals(cgen()->frame())) {
309
- // Set the reaching frame element to null to avoid
310
- // processing it later, and then bind its entry label.
311
- reaching_frames_[j] = NULL;
312
- __ bind(&merge_labels_[j]);
313
- }
314
- }
315
-
316
- // Emit the merge code.
317
- cgen()->frame()->MergeTo(entry_frame_);
318
- } else if (i == reaching_frames_.length() - 1 && had_fall_through) {
319
- // If this is the fall through frame, and it didn't need
320
- // merge code, we need to pick up the frame so we can jump
321
- // around subsequent merge blocks if necessary.
322
- RegisterFile empty;
323
- cgen()->SetFrame(frame, &empty);
324
- reaching_frames_[i] = NULL;
325
- }
326
- }
327
- }
328
-
329
- // The code generator may not have a current frame if there was no
330
- // fall through and none of the reaching frames needed merging.
331
- // In that case, clone the entry frame as the current frame.
332
- if (!cgen()->has_valid_frame()) {
333
- RegisterFile empty;
334
- cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
335
- }
336
-
337
- // There may be unprocessed reaching frames that did not need
338
- // merge code. They will have unbound merge labels. Bind their
339
- // merge labels to be the same as the entry label and deallocate
340
- // them.
341
- for (int i = 0; i < reaching_frames_.length(); i++) {
342
- if (!merge_labels_[i].is_bound()) {
343
- reaching_frames_[i] = NULL;
344
- __ bind(&merge_labels_[i]);
345
- }
346
- }
347
-
348
- // There are non-NULL reaching frames with bound labels for each
349
- // merge block, but only on backward targets.
350
- } else {
351
- // There were no forward jumps. There must be a current frame and
352
- // this must be a bidirectional target.
353
- ASSERT(reaching_frames_.length() == 1);
354
- ASSERT(reaching_frames_[0] != NULL);
355
- ASSERT(direction_ == BIDIRECTIONAL);
356
-
357
- // Use a copy of the reaching frame so the original can be saved
358
- // for possible reuse as a backward merge block.
359
- RegisterFile empty;
360
- cgen()->SetFrame(new VirtualFrame(reaching_frames_[0]), &empty);
361
- __ bind(&merge_labels_[0]);
362
- cgen()->frame()->MergeTo(entry_frame_);
363
- }
364
-
365
- __ bind(&entry_label_);
366
- }
367
-
368
-
369
- void BreakTarget::Jump() {
370
- // Drop leftover statement state from the frame before merging, without
371
- // emitting code.
372
- ASSERT(cgen()->has_valid_frame());
373
- int count = cgen()->frame()->height() - expected_height_;
374
- cgen()->frame()->ForgetElements(count);
375
- DoJump();
376
- }
377
-
378
-
379
- void BreakTarget::Jump(Result* arg) {
380
- // Drop leftover statement state from the frame before merging, without
381
- // emitting code.
382
- ASSERT(cgen()->has_valid_frame());
383
- int count = cgen()->frame()->height() - expected_height_;
384
- cgen()->frame()->ForgetElements(count);
385
- cgen()->frame()->Push(arg);
386
- DoJump();
387
- }
388
-
389
-
390
- void BreakTarget::Bind() {
391
- #ifdef DEBUG
392
- // All the forward-reaching frames should have been adjusted at the
393
- // jumps to this target.
394
- for (int i = 0; i < reaching_frames_.length(); i++) {
395
- ASSERT(reaching_frames_[i] == NULL ||
396
- reaching_frames_[i]->height() == expected_height_);
397
- }
398
- #endif
399
- // Drop leftover statement state from the frame before merging, even on
400
- // the fall through. This is so we can bind the return target with state
401
- // on the frame.
402
- if (cgen()->has_valid_frame()) {
403
- int count = cgen()->frame()->height() - expected_height_;
404
- cgen()->frame()->ForgetElements(count);
405
- }
406
- DoBind();
407
- }
408
-
409
-
410
- void BreakTarget::Bind(Result* arg) {
411
- #ifdef DEBUG
412
- // All the forward-reaching frames should have been adjusted at the
413
- // jumps to this target.
414
- for (int i = 0; i < reaching_frames_.length(); i++) {
415
- ASSERT(reaching_frames_[i] == NULL ||
416
- reaching_frames_[i]->height() == expected_height_ + 1);
417
- }
418
- #endif
419
- // Drop leftover statement state from the frame before merging, even on
420
- // the fall through. This is so we can bind the return target with state
421
- // on the frame.
422
- if (cgen()->has_valid_frame()) {
423
- int count = cgen()->frame()->height() - expected_height_;
424
- cgen()->frame()->ForgetElements(count);
425
- cgen()->frame()->Push(arg);
426
- }
427
- DoBind();
428
- *arg = cgen()->frame()->Pop();
429
- }
430
-
431
-
432
- #undef __
433
-
434
-
435
- } } // namespace v8::internal
436
-
437
- #endif // V8_TARGET_ARCH_IA32
@@ -1,4158 +0,0 @@
1
- // Copyright 2011 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #if defined(V8_TARGET_ARCH_IA32)
31
-
32
- #include "ia32/lithium-codegen-ia32.h"
33
- #include "code-stubs.h"
34
- #include "deoptimizer.h"
35
- #include "stub-cache.h"
36
-
37
- namespace v8 {
38
- namespace internal {
39
-
40
-
41
- // When invoking builtins, we need to record the safepoint in the middle of
42
- // the invoke instruction sequence generated by the macro assembler.
43
- class SafepointGenerator : public PostCallGenerator {
44
- public:
45
- SafepointGenerator(LCodeGen* codegen,
46
- LPointerMap* pointers,
47
- int deoptimization_index)
48
- : codegen_(codegen),
49
- pointers_(pointers),
50
- deoptimization_index_(deoptimization_index) {}
51
- virtual ~SafepointGenerator() { }
52
-
53
- virtual void Generate() {
54
- codegen_->RecordSafepoint(pointers_, deoptimization_index_);
55
- }
56
-
57
- private:
58
- LCodeGen* codegen_;
59
- LPointerMap* pointers_;
60
- int deoptimization_index_;
61
- };
62
-
63
-
64
- #define __ masm()->
65
-
66
- bool LCodeGen::GenerateCode() {
67
- HPhase phase("Code generation", chunk());
68
- ASSERT(is_unused());
69
- status_ = GENERATING;
70
- CpuFeatures::Scope scope(SSE2);
71
- return GeneratePrologue() &&
72
- GenerateBody() &&
73
- GenerateDeferredCode() &&
74
- GenerateSafepointTable();
75
- }
76
-
77
-
78
- void LCodeGen::FinishCode(Handle<Code> code) {
79
- ASSERT(is_done());
80
- code->set_stack_slots(StackSlotCount());
81
- code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
82
- PopulateDeoptimizationData(code);
83
- Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
84
- }
85
-
86
-
87
- void LCodeGen::Abort(const char* format, ...) {
88
- if (FLAG_trace_bailout) {
89
- SmartPointer<char> name(info()->shared_info()->DebugName()->ToCString());
90
- PrintF("Aborting LCodeGen in @\"%s\": ", *name);
91
- va_list arguments;
92
- va_start(arguments, format);
93
- OS::VPrint(format, arguments);
94
- va_end(arguments);
95
- PrintF("\n");
96
- }
97
- status_ = ABORTED;
98
- }
99
-
100
-
101
- void LCodeGen::Comment(const char* format, ...) {
102
- if (!FLAG_code_comments) return;
103
- char buffer[4 * KB];
104
- StringBuilder builder(buffer, ARRAY_SIZE(buffer));
105
- va_list arguments;
106
- va_start(arguments, format);
107
- builder.AddFormattedList(format, arguments);
108
- va_end(arguments);
109
-
110
- // Copy the string before recording it in the assembler to avoid
111
- // issues when the stack allocated buffer goes out of scope.
112
- size_t length = builder.position();
113
- Vector<char> copy = Vector<char>::New(length + 1);
114
- memcpy(copy.start(), builder.Finalize(), copy.length());
115
- masm()->RecordComment(copy.start());
116
- }
117
-
118
-
119
- bool LCodeGen::GeneratePrologue() {
120
- ASSERT(is_generating());
121
-
122
- #ifdef DEBUG
123
- if (strlen(FLAG_stop_at) > 0 &&
124
- info_->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
125
- __ int3();
126
- }
127
- #endif
128
-
129
- __ push(ebp); // Caller's frame pointer.
130
- __ mov(ebp, esp);
131
- __ push(esi); // Callee's context.
132
- __ push(edi); // Callee's JS function.
133
-
134
- // Reserve space for the stack slots needed by the code.
135
- int slots = StackSlotCount();
136
- if (slots > 0) {
137
- if (FLAG_debug_code) {
138
- __ mov(Operand(eax), Immediate(slots));
139
- Label loop;
140
- __ bind(&loop);
141
- __ push(Immediate(kSlotsZapValue));
142
- __ dec(eax);
143
- __ j(not_zero, &loop);
144
- } else {
145
- __ sub(Operand(esp), Immediate(slots * kPointerSize));
146
- #ifdef _MSC_VER
147
- // On windows, you may not access the stack more than one page below
148
- // the most recently mapped page. To make the allocated area randomly
149
- // accessible, we write to each page in turn (the value is irrelevant).
150
- const int kPageSize = 4 * KB;
151
- for (int offset = slots * kPointerSize - kPageSize;
152
- offset > 0;
153
- offset -= kPageSize) {
154
- __ mov(Operand(esp, offset), eax);
155
- }
156
- #endif
157
- }
158
- }
159
-
160
- // Possibly allocate a local context.
161
- int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
162
- if (heap_slots > 0) {
163
- Comment(";;; Allocate local context");
164
- // Argument to NewContext is the function, which is still in edi.
165
- __ push(edi);
166
- if (heap_slots <= FastNewContextStub::kMaximumSlots) {
167
- FastNewContextStub stub(heap_slots);
168
- __ CallStub(&stub);
169
- } else {
170
- __ CallRuntime(Runtime::kNewContext, 1);
171
- }
172
- RecordSafepoint(Safepoint::kNoDeoptimizationIndex);
173
- // Context is returned in both eax and esi. It replaces the context
174
- // passed to us. It's saved in the stack and kept live in esi.
175
- __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
176
-
177
- // Copy parameters into context if necessary.
178
- int num_parameters = scope()->num_parameters();
179
- for (int i = 0; i < num_parameters; i++) {
180
- Slot* slot = scope()->parameter(i)->AsSlot();
181
- if (slot != NULL && slot->type() == Slot::CONTEXT) {
182
- int parameter_offset = StandardFrameConstants::kCallerSPOffset +
183
- (num_parameters - 1 - i) * kPointerSize;
184
- // Load parameter from stack.
185
- __ mov(eax, Operand(ebp, parameter_offset));
186
- // Store it in the context.
187
- int context_offset = Context::SlotOffset(slot->index());
188
- __ mov(Operand(esi, context_offset), eax);
189
- // Update the write barrier. This clobbers all involved
190
- // registers, so we have to use a third register to avoid
191
- // clobbering esi.
192
- __ mov(ecx, esi);
193
- __ RecordWrite(ecx, context_offset, eax, ebx);
194
- }
195
- }
196
- Comment(";;; End allocate local context");
197
- }
198
-
199
- // Trace the call.
200
- if (FLAG_trace) {
201
- // We have not executed any compiled code yet, so esi still holds the
202
- // incoming context.
203
- __ CallRuntime(Runtime::kTraceEnter, 0);
204
- }
205
- return !is_aborted();
206
- }
207
-
208
-
209
- bool LCodeGen::GenerateBody() {
210
- ASSERT(is_generating());
211
- bool emit_instructions = true;
212
- for (current_instruction_ = 0;
213
- !is_aborted() && current_instruction_ < instructions_->length();
214
- current_instruction_++) {
215
- LInstruction* instr = instructions_->at(current_instruction_);
216
- if (instr->IsLabel()) {
217
- LLabel* label = LLabel::cast(instr);
218
- emit_instructions = !label->HasReplacement();
219
- }
220
-
221
- if (emit_instructions) {
222
- Comment(";;; @%d: %s.", current_instruction_, instr->Mnemonic());
223
- instr->CompileToNative(this);
224
- }
225
- }
226
- return !is_aborted();
227
- }
228
-
229
-
230
- LInstruction* LCodeGen::GetNextInstruction() {
231
- if (current_instruction_ < instructions_->length() - 1) {
232
- return instructions_->at(current_instruction_ + 1);
233
- } else {
234
- return NULL;
235
- }
236
- }
237
-
238
-
239
- bool LCodeGen::GenerateDeferredCode() {
240
- ASSERT(is_generating());
241
- for (int i = 0; !is_aborted() && i < deferred_.length(); i++) {
242
- LDeferredCode* code = deferred_[i];
243
- __ bind(code->entry());
244
- code->Generate();
245
- __ jmp(code->exit());
246
- }
247
-
248
- // Deferred code is the last part of the instruction sequence. Mark
249
- // the generated code as done unless we bailed out.
250
- if (!is_aborted()) status_ = DONE;
251
- return !is_aborted();
252
- }
253
-
254
-
255
- bool LCodeGen::GenerateSafepointTable() {
256
- ASSERT(is_done());
257
- safepoints_.Emit(masm(), StackSlotCount());
258
- return !is_aborted();
259
- }
260
-
261
-
262
- Register LCodeGen::ToRegister(int index) const {
263
- return Register::FromAllocationIndex(index);
264
- }
265
-
266
-
267
- XMMRegister LCodeGen::ToDoubleRegister(int index) const {
268
- return XMMRegister::FromAllocationIndex(index);
269
- }
270
-
271
-
272
- Register LCodeGen::ToRegister(LOperand* op) const {
273
- ASSERT(op->IsRegister());
274
- return ToRegister(op->index());
275
- }
276
-
277
-
278
- XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
279
- ASSERT(op->IsDoubleRegister());
280
- return ToDoubleRegister(op->index());
281
- }
282
-
283
-
284
- int LCodeGen::ToInteger32(LConstantOperand* op) const {
285
- Handle<Object> value = chunk_->LookupLiteral(op);
286
- ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32());
287
- ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) ==
288
- value->Number());
289
- return static_cast<int32_t>(value->Number());
290
- }
291
-
292
-
293
- Immediate LCodeGen::ToImmediate(LOperand* op) {
294
- LConstantOperand* const_op = LConstantOperand::cast(op);
295
- Handle<Object> literal = chunk_->LookupLiteral(const_op);
296
- Representation r = chunk_->LookupLiteralRepresentation(const_op);
297
- if (r.IsInteger32()) {
298
- ASSERT(literal->IsNumber());
299
- return Immediate(static_cast<int32_t>(literal->Number()));
300
- } else if (r.IsDouble()) {
301
- Abort("unsupported double immediate");
302
- }
303
- ASSERT(r.IsTagged());
304
- return Immediate(literal);
305
- }
306
-
307
-
308
- Operand LCodeGen::ToOperand(LOperand* op) const {
309
- if (op->IsRegister()) return Operand(ToRegister(op));
310
- if (op->IsDoubleRegister()) return Operand(ToDoubleRegister(op));
311
- ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot());
312
- int index = op->index();
313
- if (index >= 0) {
314
- // Local or spill slot. Skip the frame pointer, function, and
315
- // context in the fixed part of the frame.
316
- return Operand(ebp, -(index + 3) * kPointerSize);
317
- } else {
318
- // Incoming parameter. Skip the return address.
319
- return Operand(ebp, -(index - 1) * kPointerSize);
320
- }
321
- }
322
-
323
-
324
- Operand LCodeGen::HighOperand(LOperand* op) {
325
- ASSERT(op->IsDoubleStackSlot());
326
- int index = op->index();
327
- int offset = (index >= 0) ? index + 3 : index - 1;
328
- return Operand(ebp, -offset * kPointerSize);
329
- }
330
-
331
-
332
- void LCodeGen::WriteTranslation(LEnvironment* environment,
333
- Translation* translation) {
334
- if (environment == NULL) return;
335
-
336
- // The translation includes one command per value in the environment.
337
- int translation_size = environment->values()->length();
338
- // The output frame height does not include the parameters.
339
- int height = translation_size - environment->parameter_count();
340
-
341
- WriteTranslation(environment->outer(), translation);
342
- int closure_id = DefineDeoptimizationLiteral(environment->closure());
343
- translation->BeginFrame(environment->ast_id(), closure_id, height);
344
- for (int i = 0; i < translation_size; ++i) {
345
- LOperand* value = environment->values()->at(i);
346
- // spilled_registers_ and spilled_double_registers_ are either
347
- // both NULL or both set.
348
- if (environment->spilled_registers() != NULL && value != NULL) {
349
- if (value->IsRegister() &&
350
- environment->spilled_registers()[value->index()] != NULL) {
351
- translation->MarkDuplicate();
352
- AddToTranslation(translation,
353
- environment->spilled_registers()[value->index()],
354
- environment->HasTaggedValueAt(i));
355
- } else if (
356
- value->IsDoubleRegister() &&
357
- environment->spilled_double_registers()[value->index()] != NULL) {
358
- translation->MarkDuplicate();
359
- AddToTranslation(
360
- translation,
361
- environment->spilled_double_registers()[value->index()],
362
- false);
363
- }
364
- }
365
-
366
- AddToTranslation(translation, value, environment->HasTaggedValueAt(i));
367
- }
368
- }
369
-
370
-
371
- void LCodeGen::AddToTranslation(Translation* translation,
372
- LOperand* op,
373
- bool is_tagged) {
374
- if (op == NULL) {
375
- // TODO(twuerthinger): Introduce marker operands to indicate that this value
376
- // is not present and must be reconstructed from the deoptimizer. Currently
377
- // this is only used for the arguments object.
378
- translation->StoreArgumentsObject();
379
- } else if (op->IsStackSlot()) {
380
- if (is_tagged) {
381
- translation->StoreStackSlot(op->index());
382
- } else {
383
- translation->StoreInt32StackSlot(op->index());
384
- }
385
- } else if (op->IsDoubleStackSlot()) {
386
- translation->StoreDoubleStackSlot(op->index());
387
- } else if (op->IsArgument()) {
388
- ASSERT(is_tagged);
389
- int src_index = StackSlotCount() + op->index();
390
- translation->StoreStackSlot(src_index);
391
- } else if (op->IsRegister()) {
392
- Register reg = ToRegister(op);
393
- if (is_tagged) {
394
- translation->StoreRegister(reg);
395
- } else {
396
- translation->StoreInt32Register(reg);
397
- }
398
- } else if (op->IsDoubleRegister()) {
399
- XMMRegister reg = ToDoubleRegister(op);
400
- translation->StoreDoubleRegister(reg);
401
- } else if (op->IsConstantOperand()) {
402
- Handle<Object> literal = chunk()->LookupLiteral(LConstantOperand::cast(op));
403
- int src_index = DefineDeoptimizationLiteral(literal);
404
- translation->StoreLiteral(src_index);
405
- } else {
406
- UNREACHABLE();
407
- }
408
- }
409
-
410
-
411
- void LCodeGen::CallCode(Handle<Code> code,
412
- RelocInfo::Mode mode,
413
- LInstruction* instr,
414
- bool adjusted) {
415
- ASSERT(instr != NULL);
416
- LPointerMap* pointers = instr->pointer_map();
417
- RecordPosition(pointers->position());
418
-
419
- if (!adjusted) {
420
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
421
- }
422
- __ call(code, mode);
423
-
424
- RegisterLazyDeoptimization(instr);
425
-
426
- // Signal that we don't inline smi code before these stubs in the
427
- // optimizing code generator.
428
- if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
429
- code->kind() == Code::COMPARE_IC) {
430
- __ nop();
431
- }
432
- }
433
-
434
-
435
- void LCodeGen::CallRuntime(const Runtime::Function* fun,
436
- int argc,
437
- LInstruction* instr,
438
- bool adjusted) {
439
- ASSERT(instr != NULL);
440
- ASSERT(instr->HasPointerMap());
441
- LPointerMap* pointers = instr->pointer_map();
442
- RecordPosition(pointers->position());
443
-
444
- if (!adjusted) {
445
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
446
- }
447
- __ CallRuntime(fun, argc);
448
-
449
- RegisterLazyDeoptimization(instr);
450
- }
451
-
452
-
453
- void LCodeGen::RegisterLazyDeoptimization(LInstruction* instr) {
454
- // Create the environment to bailout to. If the call has side effects
455
- // execution has to continue after the call otherwise execution can continue
456
- // from a previous bailout point repeating the call.
457
- LEnvironment* deoptimization_environment;
458
- if (instr->HasDeoptimizationEnvironment()) {
459
- deoptimization_environment = instr->deoptimization_environment();
460
- } else {
461
- deoptimization_environment = instr->environment();
462
- }
463
-
464
- RegisterEnvironmentForDeoptimization(deoptimization_environment);
465
- RecordSafepoint(instr->pointer_map(),
466
- deoptimization_environment->deoptimization_index());
467
- }
468
-
469
-
470
- void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment) {
471
- if (!environment->HasBeenRegistered()) {
472
- // Physical stack frame layout:
473
- // -x ............. -4 0 ..................................... y
474
- // [incoming arguments] [spill slots] [pushed outgoing arguments]
475
-
476
- // Layout of the environment:
477
- // 0 ..................................................... size-1
478
- // [parameters] [locals] [expression stack including arguments]
479
-
480
- // Layout of the translation:
481
- // 0 ........................................................ size - 1 + 4
482
- // [expression stack including arguments] [locals] [4 words] [parameters]
483
- // |>------------ translation_size ------------<|
484
-
485
- int frame_count = 0;
486
- for (LEnvironment* e = environment; e != NULL; e = e->outer()) {
487
- ++frame_count;
488
- }
489
- Translation translation(&translations_, frame_count);
490
- WriteTranslation(environment, &translation);
491
- int deoptimization_index = deoptimizations_.length();
492
- environment->Register(deoptimization_index, translation.index());
493
- deoptimizations_.Add(environment);
494
- }
495
- }
496
-
497
-
498
- void LCodeGen::DeoptimizeIf(Condition cc, LEnvironment* environment) {
499
- RegisterEnvironmentForDeoptimization(environment);
500
- ASSERT(environment->HasBeenRegistered());
501
- int id = environment->deoptimization_index();
502
- Address entry = Deoptimizer::GetDeoptimizationEntry(id, Deoptimizer::EAGER);
503
- ASSERT(entry != NULL);
504
- if (entry == NULL) {
505
- Abort("bailout was not prepared");
506
- return;
507
- }
508
-
509
- if (FLAG_deopt_every_n_times != 0) {
510
- Handle<SharedFunctionInfo> shared(info_->shared_info());
511
- Label no_deopt;
512
- __ pushfd();
513
- __ push(eax);
514
- __ push(ebx);
515
- __ mov(ebx, shared);
516
- __ mov(eax, FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset));
517
- __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
518
- __ j(not_zero, &no_deopt);
519
- if (FLAG_trap_on_deopt) __ int3();
520
- __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
521
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
522
- __ pop(ebx);
523
- __ pop(eax);
524
- __ popfd();
525
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
526
-
527
- __ bind(&no_deopt);
528
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kDeoptCounterOffset), eax);
529
- __ pop(ebx);
530
- __ pop(eax);
531
- __ popfd();
532
- }
533
-
534
- if (cc == no_condition) {
535
- if (FLAG_trap_on_deopt) __ int3();
536
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
537
- } else {
538
- if (FLAG_trap_on_deopt) {
539
- NearLabel done;
540
- __ j(NegateCondition(cc), &done);
541
- __ int3();
542
- __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
543
- __ bind(&done);
544
- } else {
545
- __ j(cc, entry, RelocInfo::RUNTIME_ENTRY, not_taken);
546
- }
547
- }
548
- }
549
-
550
-
551
- void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) {
552
- int length = deoptimizations_.length();
553
- if (length == 0) return;
554
- ASSERT(FLAG_deopt);
555
- Handle<DeoptimizationInputData> data =
556
- factory()->NewDeoptimizationInputData(length, TENURED);
557
-
558
- Handle<ByteArray> translations = translations_.CreateByteArray();
559
- data->SetTranslationByteArray(*translations);
560
- data->SetInlinedFunctionCount(Smi::FromInt(inlined_function_count_));
561
-
562
- Handle<FixedArray> literals =
563
- factory()->NewFixedArray(deoptimization_literals_.length(), TENURED);
564
- for (int i = 0; i < deoptimization_literals_.length(); i++) {
565
- literals->set(i, *deoptimization_literals_[i]);
566
- }
567
- data->SetLiteralArray(*literals);
568
-
569
- data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id()));
570
- data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
571
-
572
- // Populate the deoptimization entries.
573
- for (int i = 0; i < length; i++) {
574
- LEnvironment* env = deoptimizations_[i];
575
- data->SetAstId(i, Smi::FromInt(env->ast_id()));
576
- data->SetTranslationIndex(i, Smi::FromInt(env->translation_index()));
577
- data->SetArgumentsStackHeight(i,
578
- Smi::FromInt(env->arguments_stack_height()));
579
- }
580
- code->set_deoptimization_data(*data);
581
- }
582
-
583
-
584
- int LCodeGen::DefineDeoptimizationLiteral(Handle<Object> literal) {
585
- int result = deoptimization_literals_.length();
586
- for (int i = 0; i < deoptimization_literals_.length(); ++i) {
587
- if (deoptimization_literals_[i].is_identical_to(literal)) return i;
588
- }
589
- deoptimization_literals_.Add(literal);
590
- return result;
591
- }
592
-
593
-
594
- void LCodeGen::PopulateDeoptimizationLiteralsWithInlinedFunctions() {
595
- ASSERT(deoptimization_literals_.length() == 0);
596
-
597
- const ZoneList<Handle<JSFunction> >* inlined_closures =
598
- chunk()->inlined_closures();
599
-
600
- for (int i = 0, length = inlined_closures->length();
601
- i < length;
602
- i++) {
603
- DefineDeoptimizationLiteral(inlined_closures->at(i));
604
- }
605
-
606
- inlined_function_count_ = deoptimization_literals_.length();
607
- }
608
-
609
-
610
- void LCodeGen::RecordSafepoint(
611
- LPointerMap* pointers,
612
- Safepoint::Kind kind,
613
- int arguments,
614
- int deoptimization_index) {
615
- const ZoneList<LOperand*>* operands = pointers->operands();
616
- Safepoint safepoint = safepoints_.DefineSafepoint(masm(),
617
- kind, arguments, deoptimization_index);
618
- for (int i = 0; i < operands->length(); i++) {
619
- LOperand* pointer = operands->at(i);
620
- if (pointer->IsStackSlot()) {
621
- safepoint.DefinePointerSlot(pointer->index());
622
- } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
623
- safepoint.DefinePointerRegister(ToRegister(pointer));
624
- }
625
- }
626
- }
627
-
628
-
629
- void LCodeGen::RecordSafepoint(LPointerMap* pointers,
630
- int deoptimization_index) {
631
- RecordSafepoint(pointers, Safepoint::kSimple, 0, deoptimization_index);
632
- }
633
-
634
-
635
- void LCodeGen::RecordSafepoint(int deoptimization_index) {
636
- LPointerMap empty_pointers(RelocInfo::kNoPosition);
637
- RecordSafepoint(&empty_pointers, deoptimization_index);
638
- }
639
-
640
-
641
- void LCodeGen::RecordSafepointWithRegisters(LPointerMap* pointers,
642
- int arguments,
643
- int deoptimization_index) {
644
- RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments,
645
- deoptimization_index);
646
- }
647
-
648
-
649
- void LCodeGen::RecordPosition(int position) {
650
- if (!FLAG_debug_info || position == RelocInfo::kNoPosition) return;
651
- masm()->positions_recorder()->RecordPosition(position);
652
- }
653
-
654
-
655
- void LCodeGen::DoLabel(LLabel* label) {
656
- if (label->is_loop_header()) {
657
- Comment(";;; B%d - LOOP entry", label->block_id());
658
- } else {
659
- Comment(";;; B%d", label->block_id());
660
- }
661
- __ bind(label->label());
662
- current_block_ = label->block_id();
663
- LCodeGen::DoGap(label);
664
- }
665
-
666
-
667
- void LCodeGen::DoParallelMove(LParallelMove* move) {
668
- resolver_.Resolve(move);
669
- }
670
-
671
-
672
- void LCodeGen::DoGap(LGap* gap) {
673
- for (int i = LGap::FIRST_INNER_POSITION;
674
- i <= LGap::LAST_INNER_POSITION;
675
- i++) {
676
- LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
677
- LParallelMove* move = gap->GetParallelMove(inner_pos);
678
- if (move != NULL) DoParallelMove(move);
679
- }
680
-
681
- LInstruction* next = GetNextInstruction();
682
- if (next != NULL && next->IsLazyBailout()) {
683
- int pc = masm()->pc_offset();
684
- safepoints_.SetPcAfterGap(pc);
685
- }
686
- }
687
-
688
-
689
- void LCodeGen::DoParameter(LParameter* instr) {
690
- // Nothing to do.
691
- }
692
-
693
-
694
- void LCodeGen::DoCallStub(LCallStub* instr) {
695
- ASSERT(ToRegister(instr->context()).is(esi));
696
- ASSERT(ToRegister(instr->result()).is(eax));
697
- switch (instr->hydrogen()->major_key()) {
698
- case CodeStub::RegExpConstructResult: {
699
- RegExpConstructResultStub stub;
700
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
701
- break;
702
- }
703
- case CodeStub::RegExpExec: {
704
- RegExpExecStub stub;
705
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
706
- break;
707
- }
708
- case CodeStub::SubString: {
709
- SubStringStub stub;
710
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
711
- break;
712
- }
713
- case CodeStub::NumberToString: {
714
- NumberToStringStub stub;
715
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
716
- break;
717
- }
718
- case CodeStub::StringAdd: {
719
- StringAddStub stub(NO_STRING_ADD_FLAGS);
720
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
721
- break;
722
- }
723
- case CodeStub::StringCompare: {
724
- StringCompareStub stub;
725
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
726
- break;
727
- }
728
- case CodeStub::TranscendentalCache: {
729
- TranscendentalCacheStub stub(instr->transcendental_type(),
730
- TranscendentalCacheStub::TAGGED);
731
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
732
- break;
733
- }
734
- default:
735
- UNREACHABLE();
736
- }
737
- }
738
-
739
-
740
- void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
741
- // Nothing to do.
742
- }
743
-
744
-
745
- void LCodeGen::DoModI(LModI* instr) {
746
- if (instr->hydrogen()->HasPowerOf2Divisor()) {
747
- Register dividend = ToRegister(instr->InputAt(0));
748
-
749
- int32_t divisor =
750
- HConstant::cast(instr->hydrogen()->right())->Integer32Value();
751
-
752
- if (divisor < 0) divisor = -divisor;
753
-
754
- NearLabel positive_dividend, done;
755
- __ test(dividend, Operand(dividend));
756
- __ j(not_sign, &positive_dividend);
757
- __ neg(dividend);
758
- __ and_(dividend, divisor - 1);
759
- __ neg(dividend);
760
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
761
- __ j(not_zero, &done);
762
- DeoptimizeIf(no_condition, instr->environment());
763
- }
764
- __ bind(&positive_dividend);
765
- __ and_(dividend, divisor - 1);
766
- __ bind(&done);
767
- } else {
768
- LOperand* right = instr->InputAt(1);
769
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
770
- ASSERT(ToRegister(instr->result()).is(edx));
771
-
772
- Register right_reg = ToRegister(right);
773
- ASSERT(!right_reg.is(eax));
774
- ASSERT(!right_reg.is(edx));
775
-
776
- // Check for x % 0.
777
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
778
- __ test(right_reg, ToOperand(right));
779
- DeoptimizeIf(zero, instr->environment());
780
- }
781
-
782
- // Sign extend to edx.
783
- __ cdq();
784
-
785
- // Check for (0 % -x) that will produce negative zero.
786
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
787
- NearLabel positive_left;
788
- NearLabel done;
789
- __ test(eax, Operand(eax));
790
- __ j(not_sign, &positive_left);
791
- __ idiv(right_reg);
792
-
793
- // Test the remainder for 0, because then the result would be -0.
794
- __ test(edx, Operand(edx));
795
- __ j(not_zero, &done);
796
-
797
- DeoptimizeIf(no_condition, instr->environment());
798
- __ bind(&positive_left);
799
- __ idiv(right_reg);
800
- __ bind(&done);
801
- } else {
802
- __ idiv(right_reg);
803
- }
804
- }
805
- }
806
-
807
-
808
- void LCodeGen::DoDivI(LDivI* instr) {
809
- LOperand* right = instr->InputAt(1);
810
- ASSERT(ToRegister(instr->result()).is(eax));
811
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
812
- ASSERT(!ToRegister(instr->InputAt(1)).is(eax));
813
- ASSERT(!ToRegister(instr->InputAt(1)).is(edx));
814
-
815
- Register left_reg = eax;
816
-
817
- // Check for x / 0.
818
- Register right_reg = ToRegister(right);
819
- if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
820
- __ test(right_reg, ToOperand(right));
821
- DeoptimizeIf(zero, instr->environment());
822
- }
823
-
824
- // Check for (0 / -x) that will produce negative zero.
825
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
826
- NearLabel left_not_zero;
827
- __ test(left_reg, Operand(left_reg));
828
- __ j(not_zero, &left_not_zero);
829
- __ test(right_reg, ToOperand(right));
830
- DeoptimizeIf(sign, instr->environment());
831
- __ bind(&left_not_zero);
832
- }
833
-
834
- // Check for (-kMinInt / -1).
835
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
836
- NearLabel left_not_min_int;
837
- __ cmp(left_reg, kMinInt);
838
- __ j(not_zero, &left_not_min_int);
839
- __ cmp(right_reg, -1);
840
- DeoptimizeIf(zero, instr->environment());
841
- __ bind(&left_not_min_int);
842
- }
843
-
844
- // Sign extend to edx.
845
- __ cdq();
846
- __ idiv(right_reg);
847
-
848
- // Deoptimize if remainder is not 0.
849
- __ test(edx, Operand(edx));
850
- DeoptimizeIf(not_zero, instr->environment());
851
- }
852
-
853
-
854
- void LCodeGen::DoMulI(LMulI* instr) {
855
- Register left = ToRegister(instr->InputAt(0));
856
- LOperand* right = instr->InputAt(1);
857
-
858
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
859
- __ mov(ToRegister(instr->TempAt(0)), left);
860
- }
861
-
862
- if (right->IsConstantOperand()) {
863
- // Try strength reductions on the multiplication.
864
- // All replacement instructions are at most as long as the imul
865
- // and have better latency.
866
- int constant = ToInteger32(LConstantOperand::cast(right));
867
- if (constant == -1) {
868
- __ neg(left);
869
- } else if (constant == 0) {
870
- __ xor_(left, Operand(left));
871
- } else if (constant == 2) {
872
- __ add(left, Operand(left));
873
- } else if (!instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
874
- // If we know that the multiplication can't overflow, it's safe to
875
- // use instructions that don't set the overflow flag for the
876
- // multiplication.
877
- switch (constant) {
878
- case 1:
879
- // Do nothing.
880
- break;
881
- case 3:
882
- __ lea(left, Operand(left, left, times_2, 0));
883
- break;
884
- case 4:
885
- __ shl(left, 2);
886
- break;
887
- case 5:
888
- __ lea(left, Operand(left, left, times_4, 0));
889
- break;
890
- case 8:
891
- __ shl(left, 3);
892
- break;
893
- case 9:
894
- __ lea(left, Operand(left, left, times_8, 0));
895
- break;
896
- case 16:
897
- __ shl(left, 4);
898
- break;
899
- default:
900
- __ imul(left, left, constant);
901
- break;
902
- }
903
- } else {
904
- __ imul(left, left, constant);
905
- }
906
- } else {
907
- __ imul(left, ToOperand(right));
908
- }
909
-
910
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
911
- DeoptimizeIf(overflow, instr->environment());
912
- }
913
-
914
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
915
- // Bail out if the result is supposed to be negative zero.
916
- NearLabel done;
917
- __ test(left, Operand(left));
918
- __ j(not_zero, &done);
919
- if (right->IsConstantOperand()) {
920
- if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
921
- DeoptimizeIf(no_condition, instr->environment());
922
- }
923
- } else {
924
- // Test the non-zero operand for negative sign.
925
- __ or_(ToRegister(instr->TempAt(0)), ToOperand(right));
926
- DeoptimizeIf(sign, instr->environment());
927
- }
928
- __ bind(&done);
929
- }
930
- }
931
-
932
-
933
- void LCodeGen::DoBitI(LBitI* instr) {
934
- LOperand* left = instr->InputAt(0);
935
- LOperand* right = instr->InputAt(1);
936
- ASSERT(left->Equals(instr->result()));
937
- ASSERT(left->IsRegister());
938
-
939
- if (right->IsConstantOperand()) {
940
- int right_operand = ToInteger32(LConstantOperand::cast(right));
941
- switch (instr->op()) {
942
- case Token::BIT_AND:
943
- __ and_(ToRegister(left), right_operand);
944
- break;
945
- case Token::BIT_OR:
946
- __ or_(ToRegister(left), right_operand);
947
- break;
948
- case Token::BIT_XOR:
949
- __ xor_(ToRegister(left), right_operand);
950
- break;
951
- default:
952
- UNREACHABLE();
953
- break;
954
- }
955
- } else {
956
- switch (instr->op()) {
957
- case Token::BIT_AND:
958
- __ and_(ToRegister(left), ToOperand(right));
959
- break;
960
- case Token::BIT_OR:
961
- __ or_(ToRegister(left), ToOperand(right));
962
- break;
963
- case Token::BIT_XOR:
964
- __ xor_(ToRegister(left), ToOperand(right));
965
- break;
966
- default:
967
- UNREACHABLE();
968
- break;
969
- }
970
- }
971
- }
972
-
973
-
974
- void LCodeGen::DoShiftI(LShiftI* instr) {
975
- LOperand* left = instr->InputAt(0);
976
- LOperand* right = instr->InputAt(1);
977
- ASSERT(left->Equals(instr->result()));
978
- ASSERT(left->IsRegister());
979
- if (right->IsRegister()) {
980
- ASSERT(ToRegister(right).is(ecx));
981
-
982
- switch (instr->op()) {
983
- case Token::SAR:
984
- __ sar_cl(ToRegister(left));
985
- break;
986
- case Token::SHR:
987
- __ shr_cl(ToRegister(left));
988
- if (instr->can_deopt()) {
989
- __ test(ToRegister(left), Immediate(0x80000000));
990
- DeoptimizeIf(not_zero, instr->environment());
991
- }
992
- break;
993
- case Token::SHL:
994
- __ shl_cl(ToRegister(left));
995
- break;
996
- default:
997
- UNREACHABLE();
998
- break;
999
- }
1000
- } else {
1001
- int value = ToInteger32(LConstantOperand::cast(right));
1002
- uint8_t shift_count = static_cast<uint8_t>(value & 0x1F);
1003
- switch (instr->op()) {
1004
- case Token::SAR:
1005
- if (shift_count != 0) {
1006
- __ sar(ToRegister(left), shift_count);
1007
- }
1008
- break;
1009
- case Token::SHR:
1010
- if (shift_count == 0 && instr->can_deopt()) {
1011
- __ test(ToRegister(left), Immediate(0x80000000));
1012
- DeoptimizeIf(not_zero, instr->environment());
1013
- } else {
1014
- __ shr(ToRegister(left), shift_count);
1015
- }
1016
- break;
1017
- case Token::SHL:
1018
- if (shift_count != 0) {
1019
- __ shl(ToRegister(left), shift_count);
1020
- }
1021
- break;
1022
- default:
1023
- UNREACHABLE();
1024
- break;
1025
- }
1026
- }
1027
- }
1028
-
1029
-
1030
- void LCodeGen::DoSubI(LSubI* instr) {
1031
- LOperand* left = instr->InputAt(0);
1032
- LOperand* right = instr->InputAt(1);
1033
- ASSERT(left->Equals(instr->result()));
1034
-
1035
- if (right->IsConstantOperand()) {
1036
- __ sub(ToOperand(left), ToImmediate(right));
1037
- } else {
1038
- __ sub(ToRegister(left), ToOperand(right));
1039
- }
1040
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1041
- DeoptimizeIf(overflow, instr->environment());
1042
- }
1043
- }
1044
-
1045
-
1046
- void LCodeGen::DoConstantI(LConstantI* instr) {
1047
- ASSERT(instr->result()->IsRegister());
1048
- __ Set(ToRegister(instr->result()), Immediate(instr->value()));
1049
- }
1050
-
1051
-
1052
- void LCodeGen::DoConstantD(LConstantD* instr) {
1053
- ASSERT(instr->result()->IsDoubleRegister());
1054
- XMMRegister res = ToDoubleRegister(instr->result());
1055
- double v = instr->value();
1056
- // Use xor to produce +0.0 in a fast and compact way, but avoid to
1057
- // do so if the constant is -0.0.
1058
- if (BitCast<uint64_t, double>(v) == 0) {
1059
- __ xorpd(res, res);
1060
- } else {
1061
- Register temp = ToRegister(instr->TempAt(0));
1062
- uint64_t int_val = BitCast<uint64_t, double>(v);
1063
- int32_t lower = static_cast<int32_t>(int_val);
1064
- int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
1065
- if (CpuFeatures::IsSupported(SSE4_1)) {
1066
- CpuFeatures::Scope scope(SSE4_1);
1067
- if (lower != 0) {
1068
- __ Set(temp, Immediate(lower));
1069
- __ movd(res, Operand(temp));
1070
- __ Set(temp, Immediate(upper));
1071
- __ pinsrd(res, Operand(temp), 1);
1072
- } else {
1073
- __ xorpd(res, res);
1074
- __ Set(temp, Immediate(upper));
1075
- __ pinsrd(res, Operand(temp), 1);
1076
- }
1077
- } else {
1078
- __ Set(temp, Immediate(upper));
1079
- __ movd(res, Operand(temp));
1080
- __ psllq(res, 32);
1081
- if (lower != 0) {
1082
- __ Set(temp, Immediate(lower));
1083
- __ movd(xmm0, Operand(temp));
1084
- __ por(res, xmm0);
1085
- }
1086
- }
1087
- }
1088
- }
1089
-
1090
-
1091
- void LCodeGen::DoConstantT(LConstantT* instr) {
1092
- ASSERT(instr->result()->IsRegister());
1093
- __ Set(ToRegister(instr->result()), Immediate(instr->value()));
1094
- }
1095
-
1096
-
1097
- void LCodeGen::DoJSArrayLength(LJSArrayLength* instr) {
1098
- Register result = ToRegister(instr->result());
1099
- Register array = ToRegister(instr->InputAt(0));
1100
- __ mov(result, FieldOperand(array, JSArray::kLengthOffset));
1101
- }
1102
-
1103
-
1104
- void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
1105
- Register result = ToRegister(instr->result());
1106
- Register array = ToRegister(instr->InputAt(0));
1107
- __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
1108
- }
1109
-
1110
-
1111
- void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
1112
- Register result = ToRegister(instr->result());
1113
- Register array = ToRegister(instr->InputAt(0));
1114
- __ mov(result, FieldOperand(array, ExternalArray::kLengthOffset));
1115
- }
1116
-
1117
-
1118
- void LCodeGen::DoValueOf(LValueOf* instr) {
1119
- Register input = ToRegister(instr->InputAt(0));
1120
- Register result = ToRegister(instr->result());
1121
- Register map = ToRegister(instr->TempAt(0));
1122
- ASSERT(input.is(result));
1123
- NearLabel done;
1124
- // If the object is a smi return the object.
1125
- __ test(input, Immediate(kSmiTagMask));
1126
- __ j(zero, &done);
1127
-
1128
- // If the object is not a value type, return the object.
1129
- __ CmpObjectType(input, JS_VALUE_TYPE, map);
1130
- __ j(not_equal, &done);
1131
- __ mov(result, FieldOperand(input, JSValue::kValueOffset));
1132
-
1133
- __ bind(&done);
1134
- }
1135
-
1136
-
1137
- void LCodeGen::DoBitNotI(LBitNotI* instr) {
1138
- LOperand* input = instr->InputAt(0);
1139
- ASSERT(input->Equals(instr->result()));
1140
- __ not_(ToRegister(input));
1141
- }
1142
-
1143
-
1144
- void LCodeGen::DoThrow(LThrow* instr) {
1145
- __ push(ToOperand(instr->InputAt(0)));
1146
- CallRuntime(Runtime::kThrow, 1, instr, false);
1147
-
1148
- if (FLAG_debug_code) {
1149
- Comment("Unreachable code.");
1150
- __ int3();
1151
- }
1152
- }
1153
-
1154
-
1155
- void LCodeGen::DoAddI(LAddI* instr) {
1156
- LOperand* left = instr->InputAt(0);
1157
- LOperand* right = instr->InputAt(1);
1158
- ASSERT(left->Equals(instr->result()));
1159
-
1160
- if (right->IsConstantOperand()) {
1161
- __ add(ToOperand(left), ToImmediate(right));
1162
- } else {
1163
- __ add(ToRegister(left), ToOperand(right));
1164
- }
1165
-
1166
- if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
1167
- DeoptimizeIf(overflow, instr->environment());
1168
- }
1169
- }
1170
-
1171
-
1172
- void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1173
- XMMRegister left = ToDoubleRegister(instr->InputAt(0));
1174
- XMMRegister right = ToDoubleRegister(instr->InputAt(1));
1175
- XMMRegister result = ToDoubleRegister(instr->result());
1176
- // Modulo uses a fixed result register.
1177
- ASSERT(instr->op() == Token::MOD || left.is(result));
1178
- switch (instr->op()) {
1179
- case Token::ADD:
1180
- __ addsd(left, right);
1181
- break;
1182
- case Token::SUB:
1183
- __ subsd(left, right);
1184
- break;
1185
- case Token::MUL:
1186
- __ mulsd(left, right);
1187
- break;
1188
- case Token::DIV:
1189
- __ divsd(left, right);
1190
- break;
1191
- case Token::MOD: {
1192
- // Pass two doubles as arguments on the stack.
1193
- __ PrepareCallCFunction(4, eax);
1194
- __ movdbl(Operand(esp, 0 * kDoubleSize), left);
1195
- __ movdbl(Operand(esp, 1 * kDoubleSize), right);
1196
- __ CallCFunction(
1197
- ExternalReference::double_fp_operation(Token::MOD, isolate()),
1198
- 4);
1199
-
1200
- // Return value is in st(0) on ia32.
1201
- // Store it into the (fixed) result register.
1202
- __ sub(Operand(esp), Immediate(kDoubleSize));
1203
- __ fstp_d(Operand(esp, 0));
1204
- __ movdbl(result, Operand(esp, 0));
1205
- __ add(Operand(esp), Immediate(kDoubleSize));
1206
- break;
1207
- }
1208
- default:
1209
- UNREACHABLE();
1210
- break;
1211
- }
1212
- }
1213
-
1214
-
1215
- void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1216
- ASSERT(ToRegister(instr->InputAt(0)).is(edx));
1217
- ASSERT(ToRegister(instr->InputAt(1)).is(eax));
1218
- ASSERT(ToRegister(instr->result()).is(eax));
1219
-
1220
- TypeRecordingBinaryOpStub stub(instr->op(), NO_OVERWRITE);
1221
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
1222
- }
1223
-
1224
-
1225
- int LCodeGen::GetNextEmittedBlock(int block) {
1226
- for (int i = block + 1; i < graph()->blocks()->length(); ++i) {
1227
- LLabel* label = chunk_->GetLabel(i);
1228
- if (!label->HasReplacement()) return i;
1229
- }
1230
- return -1;
1231
- }
1232
-
1233
-
1234
- void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) {
1235
- int next_block = GetNextEmittedBlock(current_block_);
1236
- right_block = chunk_->LookupDestination(right_block);
1237
- left_block = chunk_->LookupDestination(left_block);
1238
-
1239
- if (right_block == left_block) {
1240
- EmitGoto(left_block);
1241
- } else if (left_block == next_block) {
1242
- __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block));
1243
- } else if (right_block == next_block) {
1244
- __ j(cc, chunk_->GetAssemblyLabel(left_block));
1245
- } else {
1246
- __ j(cc, chunk_->GetAssemblyLabel(left_block));
1247
- __ jmp(chunk_->GetAssemblyLabel(right_block));
1248
- }
1249
- }
1250
-
1251
-
1252
- void LCodeGen::DoBranch(LBranch* instr) {
1253
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1254
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1255
-
1256
- Representation r = instr->hydrogen()->representation();
1257
- if (r.IsInteger32()) {
1258
- Register reg = ToRegister(instr->InputAt(0));
1259
- __ test(reg, Operand(reg));
1260
- EmitBranch(true_block, false_block, not_zero);
1261
- } else if (r.IsDouble()) {
1262
- XMMRegister reg = ToDoubleRegister(instr->InputAt(0));
1263
- __ xorpd(xmm0, xmm0);
1264
- __ ucomisd(reg, xmm0);
1265
- EmitBranch(true_block, false_block, not_equal);
1266
- } else {
1267
- ASSERT(r.IsTagged());
1268
- Register reg = ToRegister(instr->InputAt(0));
1269
- if (instr->hydrogen()->type().IsBoolean()) {
1270
- __ cmp(reg, factory()->true_value());
1271
- EmitBranch(true_block, false_block, equal);
1272
- } else {
1273
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1274
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1275
-
1276
- __ cmp(reg, factory()->undefined_value());
1277
- __ j(equal, false_label);
1278
- __ cmp(reg, factory()->true_value());
1279
- __ j(equal, true_label);
1280
- __ cmp(reg, factory()->false_value());
1281
- __ j(equal, false_label);
1282
- __ test(reg, Operand(reg));
1283
- __ j(equal, false_label);
1284
- __ test(reg, Immediate(kSmiTagMask));
1285
- __ j(zero, true_label);
1286
-
1287
- // Test for double values. Zero is false.
1288
- NearLabel call_stub;
1289
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1290
- factory()->heap_number_map());
1291
- __ j(not_equal, &call_stub);
1292
- __ fldz();
1293
- __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
1294
- __ FCmp();
1295
- __ j(zero, false_label);
1296
- __ jmp(true_label);
1297
-
1298
- // The conversion stub doesn't cause garbage collections so it's
1299
- // safe to not record a safepoint after the call.
1300
- __ bind(&call_stub);
1301
- ToBooleanStub stub;
1302
- __ pushad();
1303
- __ push(reg);
1304
- __ CallStub(&stub);
1305
- __ test(eax, Operand(eax));
1306
- __ popad();
1307
- EmitBranch(true_block, false_block, not_zero);
1308
- }
1309
- }
1310
- }
1311
-
1312
-
1313
- void LCodeGen::EmitGoto(int block, LDeferredCode* deferred_stack_check) {
1314
- block = chunk_->LookupDestination(block);
1315
- int next_block = GetNextEmittedBlock(current_block_);
1316
- if (block != next_block) {
1317
- // Perform stack overflow check if this goto needs it before jumping.
1318
- if (deferred_stack_check != NULL) {
1319
- ExternalReference stack_limit =
1320
- ExternalReference::address_of_stack_limit(isolate());
1321
- __ cmp(esp, Operand::StaticVariable(stack_limit));
1322
- __ j(above_equal, chunk_->GetAssemblyLabel(block));
1323
- __ jmp(deferred_stack_check->entry());
1324
- deferred_stack_check->SetExit(chunk_->GetAssemblyLabel(block));
1325
- } else {
1326
- __ jmp(chunk_->GetAssemblyLabel(block));
1327
- }
1328
- }
1329
- }
1330
-
1331
-
1332
- void LCodeGen::DoDeferredStackCheck(LGoto* instr) {
1333
- __ pushad();
1334
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1335
- __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
1336
- RecordSafepointWithRegisters(
1337
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
1338
- __ popad();
1339
- }
1340
-
1341
- void LCodeGen::DoGoto(LGoto* instr) {
1342
- class DeferredStackCheck: public LDeferredCode {
1343
- public:
1344
- DeferredStackCheck(LCodeGen* codegen, LGoto* instr)
1345
- : LDeferredCode(codegen), instr_(instr) { }
1346
- virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
1347
- private:
1348
- LGoto* instr_;
1349
- };
1350
-
1351
- DeferredStackCheck* deferred = NULL;
1352
- if (instr->include_stack_check()) {
1353
- deferred = new DeferredStackCheck(this, instr);
1354
- }
1355
- EmitGoto(instr->block_id(), deferred);
1356
- }
1357
-
1358
-
1359
- Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) {
1360
- Condition cond = no_condition;
1361
- switch (op) {
1362
- case Token::EQ:
1363
- case Token::EQ_STRICT:
1364
- cond = equal;
1365
- break;
1366
- case Token::LT:
1367
- cond = is_unsigned ? below : less;
1368
- break;
1369
- case Token::GT:
1370
- cond = is_unsigned ? above : greater;
1371
- break;
1372
- case Token::LTE:
1373
- cond = is_unsigned ? below_equal : less_equal;
1374
- break;
1375
- case Token::GTE:
1376
- cond = is_unsigned ? above_equal : greater_equal;
1377
- break;
1378
- case Token::IN:
1379
- case Token::INSTANCEOF:
1380
- default:
1381
- UNREACHABLE();
1382
- }
1383
- return cond;
1384
- }
1385
-
1386
-
1387
- void LCodeGen::EmitCmpI(LOperand* left, LOperand* right) {
1388
- if (right->IsConstantOperand()) {
1389
- __ cmp(ToOperand(left), ToImmediate(right));
1390
- } else {
1391
- __ cmp(ToRegister(left), ToOperand(right));
1392
- }
1393
- }
1394
-
1395
-
1396
- void LCodeGen::DoCmpID(LCmpID* instr) {
1397
- LOperand* left = instr->InputAt(0);
1398
- LOperand* right = instr->InputAt(1);
1399
- LOperand* result = instr->result();
1400
-
1401
- NearLabel unordered;
1402
- if (instr->is_double()) {
1403
- // Don't base result on EFLAGS when a NaN is involved. Instead
1404
- // jump to the unordered case, which produces a false value.
1405
- __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1406
- __ j(parity_even, &unordered, not_taken);
1407
- } else {
1408
- EmitCmpI(left, right);
1409
- }
1410
-
1411
- NearLabel done;
1412
- Condition cc = TokenToCondition(instr->op(), instr->is_double());
1413
- __ mov(ToRegister(result), factory()->true_value());
1414
- __ j(cc, &done);
1415
-
1416
- __ bind(&unordered);
1417
- __ mov(ToRegister(result), factory()->false_value());
1418
- __ bind(&done);
1419
- }
1420
-
1421
-
1422
- void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1423
- LOperand* left = instr->InputAt(0);
1424
- LOperand* right = instr->InputAt(1);
1425
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1426
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1427
-
1428
- if (instr->is_double()) {
1429
- // Don't base result on EFLAGS when a NaN is involved. Instead
1430
- // jump to the false block.
1431
- __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1432
- __ j(parity_even, chunk_->GetAssemblyLabel(false_block));
1433
- } else {
1434
- EmitCmpI(left, right);
1435
- }
1436
-
1437
- Condition cc = TokenToCondition(instr->op(), instr->is_double());
1438
- EmitBranch(true_block, false_block, cc);
1439
- }
1440
-
1441
-
1442
- void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1443
- Register left = ToRegister(instr->InputAt(0));
1444
- Register right = ToRegister(instr->InputAt(1));
1445
- Register result = ToRegister(instr->result());
1446
-
1447
- __ cmp(left, Operand(right));
1448
- __ mov(result, factory()->true_value());
1449
- NearLabel done;
1450
- __ j(equal, &done);
1451
- __ mov(result, factory()->false_value());
1452
- __ bind(&done);
1453
- }
1454
-
1455
-
1456
- void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1457
- Register left = ToRegister(instr->InputAt(0));
1458
- Register right = ToRegister(instr->InputAt(1));
1459
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1460
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1461
-
1462
- __ cmp(left, Operand(right));
1463
- EmitBranch(true_block, false_block, equal);
1464
- }
1465
-
1466
-
1467
- void LCodeGen::DoIsNull(LIsNull* instr) {
1468
- Register reg = ToRegister(instr->InputAt(0));
1469
- Register result = ToRegister(instr->result());
1470
-
1471
- // TODO(fsc): If the expression is known to be a smi, then it's
1472
- // definitely not null. Materialize false.
1473
-
1474
- __ cmp(reg, factory()->null_value());
1475
- if (instr->is_strict()) {
1476
- __ mov(result, factory()->true_value());
1477
- NearLabel done;
1478
- __ j(equal, &done);
1479
- __ mov(result, factory()->false_value());
1480
- __ bind(&done);
1481
- } else {
1482
- NearLabel true_value, false_value, done;
1483
- __ j(equal, &true_value);
1484
- __ cmp(reg, factory()->undefined_value());
1485
- __ j(equal, &true_value);
1486
- __ test(reg, Immediate(kSmiTagMask));
1487
- __ j(zero, &false_value);
1488
- // Check for undetectable objects by looking in the bit field in
1489
- // the map. The object has already been smi checked.
1490
- Register scratch = result;
1491
- __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1492
- __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1493
- __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1494
- __ j(not_zero, &true_value);
1495
- __ bind(&false_value);
1496
- __ mov(result, factory()->false_value());
1497
- __ jmp(&done);
1498
- __ bind(&true_value);
1499
- __ mov(result, factory()->true_value());
1500
- __ bind(&done);
1501
- }
1502
- }
1503
-
1504
-
1505
- void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1506
- Register reg = ToRegister(instr->InputAt(0));
1507
-
1508
- // TODO(fsc): If the expression is known to be a smi, then it's
1509
- // definitely not null. Jump to the false block.
1510
-
1511
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1512
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1513
-
1514
- __ cmp(reg, factory()->null_value());
1515
- if (instr->is_strict()) {
1516
- EmitBranch(true_block, false_block, equal);
1517
- } else {
1518
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1519
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1520
- __ j(equal, true_label);
1521
- __ cmp(reg, factory()->undefined_value());
1522
- __ j(equal, true_label);
1523
- __ test(reg, Immediate(kSmiTagMask));
1524
- __ j(zero, false_label);
1525
- // Check for undetectable objects by looking in the bit field in
1526
- // the map. The object has already been smi checked.
1527
- Register scratch = ToRegister(instr->TempAt(0));
1528
- __ mov(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1529
- __ movzx_b(scratch, FieldOperand(scratch, Map::kBitFieldOffset));
1530
- __ test(scratch, Immediate(1 << Map::kIsUndetectable));
1531
- EmitBranch(true_block, false_block, not_zero);
1532
- }
1533
- }
1534
-
1535
-
1536
- Condition LCodeGen::EmitIsObject(Register input,
1537
- Register temp1,
1538
- Register temp2,
1539
- Label* is_not_object,
1540
- Label* is_object) {
1541
- ASSERT(!input.is(temp1));
1542
- ASSERT(!input.is(temp2));
1543
- ASSERT(!temp1.is(temp2));
1544
-
1545
- __ test(input, Immediate(kSmiTagMask));
1546
- __ j(equal, is_not_object);
1547
-
1548
- __ cmp(input, isolate()->factory()->null_value());
1549
- __ j(equal, is_object);
1550
-
1551
- __ mov(temp1, FieldOperand(input, HeapObject::kMapOffset));
1552
- // Undetectable objects behave like undefined.
1553
- __ movzx_b(temp2, FieldOperand(temp1, Map::kBitFieldOffset));
1554
- __ test(temp2, Immediate(1 << Map::kIsUndetectable));
1555
- __ j(not_zero, is_not_object);
1556
-
1557
- __ movzx_b(temp2, FieldOperand(temp1, Map::kInstanceTypeOffset));
1558
- __ cmp(temp2, FIRST_JS_OBJECT_TYPE);
1559
- __ j(below, is_not_object);
1560
- __ cmp(temp2, LAST_JS_OBJECT_TYPE);
1561
- return below_equal;
1562
- }
1563
-
1564
-
1565
- void LCodeGen::DoIsObject(LIsObject* instr) {
1566
- Register reg = ToRegister(instr->InputAt(0));
1567
- Register result = ToRegister(instr->result());
1568
- Register temp = ToRegister(instr->TempAt(0));
1569
- Label is_false, is_true, done;
1570
-
1571
- Condition true_cond = EmitIsObject(reg, result, temp, &is_false, &is_true);
1572
- __ j(true_cond, &is_true);
1573
-
1574
- __ bind(&is_false);
1575
- __ mov(result, factory()->false_value());
1576
- __ jmp(&done);
1577
-
1578
- __ bind(&is_true);
1579
- __ mov(result, factory()->true_value());
1580
-
1581
- __ bind(&done);
1582
- }
1583
-
1584
-
1585
- void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
1586
- Register reg = ToRegister(instr->InputAt(0));
1587
- Register temp = ToRegister(instr->TempAt(0));
1588
- Register temp2 = ToRegister(instr->TempAt(1));
1589
-
1590
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1591
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1592
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1593
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1594
-
1595
- Condition true_cond = EmitIsObject(reg, temp, temp2, false_label, true_label);
1596
-
1597
- EmitBranch(true_block, false_block, true_cond);
1598
- }
1599
-
1600
-
1601
- void LCodeGen::DoIsSmi(LIsSmi* instr) {
1602
- Operand input = ToOperand(instr->InputAt(0));
1603
- Register result = ToRegister(instr->result());
1604
-
1605
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1606
- __ test(input, Immediate(kSmiTagMask));
1607
- __ mov(result, factory()->true_value());
1608
- NearLabel done;
1609
- __ j(zero, &done);
1610
- __ mov(result, factory()->false_value());
1611
- __ bind(&done);
1612
- }
1613
-
1614
-
1615
- void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
1616
- Operand input = ToOperand(instr->InputAt(0));
1617
-
1618
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1619
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1620
-
1621
- __ test(input, Immediate(kSmiTagMask));
1622
- EmitBranch(true_block, false_block, zero);
1623
- }
1624
-
1625
-
1626
- static InstanceType TestType(HHasInstanceType* instr) {
1627
- InstanceType from = instr->from();
1628
- InstanceType to = instr->to();
1629
- if (from == FIRST_TYPE) return to;
1630
- ASSERT(from == to || to == LAST_TYPE);
1631
- return from;
1632
- }
1633
-
1634
-
1635
- static Condition BranchCondition(HHasInstanceType* instr) {
1636
- InstanceType from = instr->from();
1637
- InstanceType to = instr->to();
1638
- if (from == to) return equal;
1639
- if (to == LAST_TYPE) return above_equal;
1640
- if (from == FIRST_TYPE) return below_equal;
1641
- UNREACHABLE();
1642
- return equal;
1643
- }
1644
-
1645
-
1646
- void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1647
- Register input = ToRegister(instr->InputAt(0));
1648
- Register result = ToRegister(instr->result());
1649
-
1650
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1651
- __ test(input, Immediate(kSmiTagMask));
1652
- NearLabel done, is_false;
1653
- __ j(zero, &is_false);
1654
- __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1655
- __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false);
1656
- __ mov(result, factory()->true_value());
1657
- __ jmp(&done);
1658
- __ bind(&is_false);
1659
- __ mov(result, factory()->false_value());
1660
- __ bind(&done);
1661
- }
1662
-
1663
-
1664
- void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1665
- Register input = ToRegister(instr->InputAt(0));
1666
- Register temp = ToRegister(instr->TempAt(0));
1667
-
1668
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1669
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1670
-
1671
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1672
-
1673
- __ test(input, Immediate(kSmiTagMask));
1674
- __ j(zero, false_label);
1675
-
1676
- __ CmpObjectType(input, TestType(instr->hydrogen()), temp);
1677
- EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen()));
1678
- }
1679
-
1680
-
1681
- void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
1682
- Register input = ToRegister(instr->InputAt(0));
1683
- Register result = ToRegister(instr->result());
1684
-
1685
- if (FLAG_debug_code) {
1686
- __ AbortIfNotString(input);
1687
- }
1688
-
1689
- __ mov(result, FieldOperand(input, String::kHashFieldOffset));
1690
- __ IndexFromHash(result, result);
1691
- }
1692
-
1693
-
1694
- void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1695
- Register input = ToRegister(instr->InputAt(0));
1696
- Register result = ToRegister(instr->result());
1697
-
1698
- ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1699
- __ mov(result, factory()->true_value());
1700
- __ test(FieldOperand(input, String::kHashFieldOffset),
1701
- Immediate(String::kContainsCachedArrayIndexMask));
1702
- NearLabel done;
1703
- __ j(zero, &done);
1704
- __ mov(result, factory()->false_value());
1705
- __ bind(&done);
1706
- }
1707
-
1708
-
1709
- void LCodeGen::DoHasCachedArrayIndexAndBranch(
1710
- LHasCachedArrayIndexAndBranch* instr) {
1711
- Register input = ToRegister(instr->InputAt(0));
1712
-
1713
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1714
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1715
-
1716
- __ test(FieldOperand(input, String::kHashFieldOffset),
1717
- Immediate(String::kContainsCachedArrayIndexMask));
1718
- EmitBranch(true_block, false_block, equal);
1719
- }
1720
-
1721
-
1722
- // Branches to a label or falls through with the answer in the z flag. Trashes
1723
- // the temp registers, but not the input. Only input and temp2 may alias.
1724
- void LCodeGen::EmitClassOfTest(Label* is_true,
1725
- Label* is_false,
1726
- Handle<String>class_name,
1727
- Register input,
1728
- Register temp,
1729
- Register temp2) {
1730
- ASSERT(!input.is(temp));
1731
- ASSERT(!temp.is(temp2)); // But input and temp2 may be the same register.
1732
- __ test(input, Immediate(kSmiTagMask));
1733
- __ j(zero, is_false);
1734
- __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, temp);
1735
- __ j(below, is_false);
1736
-
1737
- // Map is now in temp.
1738
- // Functions have class 'Function'.
1739
- __ CmpInstanceType(temp, JS_FUNCTION_TYPE);
1740
- if (class_name->IsEqualTo(CStrVector("Function"))) {
1741
- __ j(equal, is_true);
1742
- } else {
1743
- __ j(equal, is_false);
1744
- }
1745
-
1746
- // Check if the constructor in the map is a function.
1747
- __ mov(temp, FieldOperand(temp, Map::kConstructorOffset));
1748
-
1749
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
1750
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
1751
- // LAST_JS_OBJECT_TYPE.
1752
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
1753
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
1754
-
1755
- // Objects with a non-function constructor have class 'Object'.
1756
- __ CmpObjectType(temp, JS_FUNCTION_TYPE, temp2);
1757
- if (class_name->IsEqualTo(CStrVector("Object"))) {
1758
- __ j(not_equal, is_true);
1759
- } else {
1760
- __ j(not_equal, is_false);
1761
- }
1762
-
1763
- // temp now contains the constructor function. Grab the
1764
- // instance class name from there.
1765
- __ mov(temp, FieldOperand(temp, JSFunction::kSharedFunctionInfoOffset));
1766
- __ mov(temp, FieldOperand(temp,
1767
- SharedFunctionInfo::kInstanceClassNameOffset));
1768
- // The class name we are testing against is a symbol because it's a literal.
1769
- // The name in the constructor is a symbol because of the way the context is
1770
- // booted. This routine isn't expected to work for random API-created
1771
- // classes and it doesn't have to because you can't access it with natives
1772
- // syntax. Since both sides are symbols it is sufficient to use an identity
1773
- // comparison.
1774
- __ cmp(temp, class_name);
1775
- // End with the answer in the z flag.
1776
- }
1777
-
1778
-
1779
- void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1780
- Register input = ToRegister(instr->InputAt(0));
1781
- Register result = ToRegister(instr->result());
1782
- ASSERT(input.is(result));
1783
- Register temp = ToRegister(instr->TempAt(0));
1784
- Handle<String> class_name = instr->hydrogen()->class_name();
1785
- NearLabel done;
1786
- Label is_true, is_false;
1787
-
1788
- EmitClassOfTest(&is_true, &is_false, class_name, input, temp, input);
1789
-
1790
- __ j(not_equal, &is_false);
1791
-
1792
- __ bind(&is_true);
1793
- __ mov(result, factory()->true_value());
1794
- __ jmp(&done);
1795
-
1796
- __ bind(&is_false);
1797
- __ mov(result, factory()->false_value());
1798
- __ bind(&done);
1799
- }
1800
-
1801
-
1802
- void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1803
- Register input = ToRegister(instr->InputAt(0));
1804
- Register temp = ToRegister(instr->TempAt(0));
1805
- Register temp2 = ToRegister(instr->TempAt(1));
1806
- if (input.is(temp)) {
1807
- // Swap.
1808
- Register swapper = temp;
1809
- temp = temp2;
1810
- temp2 = swapper;
1811
- }
1812
- Handle<String> class_name = instr->hydrogen()->class_name();
1813
-
1814
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1815
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1816
-
1817
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
1818
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
1819
-
1820
- EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2);
1821
-
1822
- EmitBranch(true_block, false_block, equal);
1823
- }
1824
-
1825
-
1826
- void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
1827
- Register reg = ToRegister(instr->InputAt(0));
1828
- int true_block = instr->true_block_id();
1829
- int false_block = instr->false_block_id();
1830
-
1831
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1832
- EmitBranch(true_block, false_block, equal);
1833
- }
1834
-
1835
-
1836
- void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1837
- // Object and function are in fixed registers defined by the stub.
1838
- ASSERT(ToRegister(instr->context()).is(esi));
1839
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1840
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1841
-
1842
- NearLabel true_value, done;
1843
- __ test(eax, Operand(eax));
1844
- __ j(zero, &true_value);
1845
- __ mov(ToRegister(instr->result()), factory()->false_value());
1846
- __ jmp(&done);
1847
- __ bind(&true_value);
1848
- __ mov(ToRegister(instr->result()), factory()->true_value());
1849
- __ bind(&done);
1850
- }
1851
-
1852
-
1853
- void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1854
- ASSERT(ToRegister(instr->context()).is(esi));
1855
- int true_block = chunk_->LookupDestination(instr->true_block_id());
1856
- int false_block = chunk_->LookupDestination(instr->false_block_id());
1857
-
1858
- InstanceofStub stub(InstanceofStub::kArgsInRegisters);
1859
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1860
- __ test(eax, Operand(eax));
1861
- EmitBranch(true_block, false_block, zero);
1862
- }
1863
-
1864
-
1865
- void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
1866
- class DeferredInstanceOfKnownGlobal: public LDeferredCode {
1867
- public:
1868
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
1869
- LInstanceOfKnownGlobal* instr)
1870
- : LDeferredCode(codegen), instr_(instr) { }
1871
- virtual void Generate() {
1872
- codegen()->DoDeferredLInstanceOfKnownGlobal(instr_, &map_check_);
1873
- }
1874
-
1875
- Label* map_check() { return &map_check_; }
1876
-
1877
- private:
1878
- LInstanceOfKnownGlobal* instr_;
1879
- Label map_check_;
1880
- };
1881
-
1882
- DeferredInstanceOfKnownGlobal* deferred;
1883
- deferred = new DeferredInstanceOfKnownGlobal(this, instr);
1884
-
1885
- Label done, false_result;
1886
- Register object = ToRegister(instr->InputAt(0));
1887
- Register temp = ToRegister(instr->TempAt(0));
1888
-
1889
- // A Smi is not an instance of anything.
1890
- __ test(object, Immediate(kSmiTagMask));
1891
- __ j(zero, &false_result, not_taken);
1892
-
1893
- // This is the inlined call site instanceof cache. The two occurences of the
1894
- // hole value will be patched to the last map/result pair generated by the
1895
- // instanceof stub.
1896
- NearLabel cache_miss;
1897
- Register map = ToRegister(instr->TempAt(0));
1898
- __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
1899
- __ bind(deferred->map_check()); // Label for calculating code patching.
1900
- __ cmp(map, factory()->the_hole_value()); // Patched to cached map.
1901
- __ j(not_equal, &cache_miss, not_taken);
1902
- __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
1903
- __ jmp(&done);
1904
-
1905
- // The inlined call site cache did not match. Check for null and string
1906
- // before calling the deferred code.
1907
- __ bind(&cache_miss);
1908
- // Null is not an instance of anything.
1909
- __ cmp(object, factory()->null_value());
1910
- __ j(equal, &false_result);
1911
-
1912
- // String values are not instances of anything.
1913
- Condition is_string = masm_->IsObjectStringType(object, temp, temp);
1914
- __ j(is_string, &false_result);
1915
-
1916
- // Go to the deferred code.
1917
- __ jmp(deferred->entry());
1918
-
1919
- __ bind(&false_result);
1920
- __ mov(ToRegister(instr->result()), factory()->false_value());
1921
-
1922
- // Here result has either true or false. Deferred code also produces true or
1923
- // false object.
1924
- __ bind(deferred->exit());
1925
- __ bind(&done);
1926
- }
1927
-
1928
-
1929
- void LCodeGen::DoDeferredLInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
1930
- Label* map_check) {
1931
- __ PushSafepointRegisters();
1932
-
1933
- InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
1934
- flags = static_cast<InstanceofStub::Flags>(
1935
- flags | InstanceofStub::kArgsInRegisters);
1936
- flags = static_cast<InstanceofStub::Flags>(
1937
- flags | InstanceofStub::kCallSiteInlineCheck);
1938
- flags = static_cast<InstanceofStub::Flags>(
1939
- flags | InstanceofStub::kReturnTrueFalseObject);
1940
- InstanceofStub stub(flags);
1941
-
1942
- // Get the temp register reserved by the instruction. This needs to be edi as
1943
- // its slot of the pushing of safepoint registers is used to communicate the
1944
- // offset to the location of the map check.
1945
- Register temp = ToRegister(instr->TempAt(0));
1946
- ASSERT(temp.is(edi));
1947
- __ mov(InstanceofStub::right(), Immediate(instr->function()));
1948
- static const int kAdditionalDelta = 16;
1949
- int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
1950
- __ mov(temp, Immediate(delta));
1951
- __ StoreToSafepointRegisterSlot(temp, temp);
1952
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
1953
- // Put the result value into the eax slot and restore all registers.
1954
- __ StoreToSafepointRegisterSlot(eax, eax);
1955
- __ PopSafepointRegisters();
1956
- }
1957
-
1958
-
1959
- static Condition ComputeCompareCondition(Token::Value op) {
1960
- switch (op) {
1961
- case Token::EQ_STRICT:
1962
- case Token::EQ:
1963
- return equal;
1964
- case Token::LT:
1965
- return less;
1966
- case Token::GT:
1967
- return greater;
1968
- case Token::LTE:
1969
- return less_equal;
1970
- case Token::GTE:
1971
- return greater_equal;
1972
- default:
1973
- UNREACHABLE();
1974
- return no_condition;
1975
- }
1976
- }
1977
-
1978
-
1979
- void LCodeGen::DoCmpT(LCmpT* instr) {
1980
- Token::Value op = instr->op();
1981
-
1982
- Handle<Code> ic = CompareIC::GetUninitialized(op);
1983
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
1984
-
1985
- Condition condition = ComputeCompareCondition(op);
1986
- if (op == Token::GT || op == Token::LTE) {
1987
- condition = ReverseCondition(condition);
1988
- }
1989
- NearLabel true_value, done;
1990
- __ test(eax, Operand(eax));
1991
- __ j(condition, &true_value);
1992
- __ mov(ToRegister(instr->result()), factory()->false_value());
1993
- __ jmp(&done);
1994
- __ bind(&true_value);
1995
- __ mov(ToRegister(instr->result()), factory()->true_value());
1996
- __ bind(&done);
1997
- }
1998
-
1999
-
2000
- void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
2001
- Token::Value op = instr->op();
2002
- int true_block = chunk_->LookupDestination(instr->true_block_id());
2003
- int false_block = chunk_->LookupDestination(instr->false_block_id());
2004
-
2005
- Handle<Code> ic = CompareIC::GetUninitialized(op);
2006
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
2007
-
2008
- // The compare stub expects compare condition and the input operands
2009
- // reversed for GT and LTE.
2010
- Condition condition = ComputeCompareCondition(op);
2011
- if (op == Token::GT || op == Token::LTE) {
2012
- condition = ReverseCondition(condition);
2013
- }
2014
- __ test(eax, Operand(eax));
2015
- EmitBranch(true_block, false_block, condition);
2016
- }
2017
-
2018
-
2019
- void LCodeGen::DoReturn(LReturn* instr) {
2020
- if (FLAG_trace) {
2021
- // Preserve the return value on the stack and rely on the runtime call
2022
- // to return the value in the same register. We're leaving the code
2023
- // managed by the register allocator and tearing down the frame, it's
2024
- // safe to write to the context register.
2025
- __ push(eax);
2026
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2027
- __ CallRuntime(Runtime::kTraceExit, 1);
2028
- }
2029
- __ mov(esp, ebp);
2030
- __ pop(ebp);
2031
- __ Ret((ParameterCount() + 1) * kPointerSize, ecx);
2032
- }
2033
-
2034
-
2035
- void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2036
- Register result = ToRegister(instr->result());
2037
- __ mov(result, Operand::Cell(instr->hydrogen()->cell()));
2038
- if (instr->hydrogen()->check_hole_value()) {
2039
- __ cmp(result, factory()->the_hole_value());
2040
- DeoptimizeIf(equal, instr->environment());
2041
- }
2042
- }
2043
-
2044
-
2045
- void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2046
- ASSERT(ToRegister(instr->context()).is(esi));
2047
- ASSERT(ToRegister(instr->global_object()).is(eax));
2048
- ASSERT(ToRegister(instr->result()).is(eax));
2049
-
2050
- __ mov(ecx, instr->name());
2051
- RelocInfo::Mode mode = instr->for_typeof() ? RelocInfo::CODE_TARGET :
2052
- RelocInfo::CODE_TARGET_CONTEXT;
2053
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2054
- CallCode(ic, mode, instr);
2055
- }
2056
-
2057
-
2058
- void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2059
- Register value = ToRegister(instr->InputAt(0));
2060
- Operand cell_operand = Operand::Cell(instr->hydrogen()->cell());
2061
-
2062
- // If the cell we are storing to contains the hole it could have
2063
- // been deleted from the property dictionary. In that case, we need
2064
- // to update the property details in the property dictionary to mark
2065
- // it as no longer deleted. We deoptimize in that case.
2066
- if (instr->hydrogen()->check_hole_value()) {
2067
- __ cmp(cell_operand, factory()->the_hole_value());
2068
- DeoptimizeIf(equal, instr->environment());
2069
- }
2070
-
2071
- // Store the value.
2072
- __ mov(cell_operand, value);
2073
- }
2074
-
2075
-
2076
- void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
2077
- ASSERT(ToRegister(instr->context()).is(esi));
2078
- ASSERT(ToRegister(instr->global_object()).is(edx));
2079
- ASSERT(ToRegister(instr->value()).is(eax));
2080
-
2081
- __ mov(ecx, instr->name());
2082
- Handle<Code> ic = isolate()->builtins()->StoreIC_Initialize();
2083
- CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2084
- }
2085
-
2086
-
2087
- void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2088
- Register context = ToRegister(instr->context());
2089
- Register result = ToRegister(instr->result());
2090
- __ mov(result, ContextOperand(context, instr->slot_index()));
2091
- }
2092
-
2093
-
2094
- void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2095
- Register context = ToRegister(instr->context());
2096
- Register value = ToRegister(instr->value());
2097
- __ mov(ContextOperand(context, instr->slot_index()), value);
2098
- if (instr->needs_write_barrier()) {
2099
- Register temp = ToRegister(instr->TempAt(0));
2100
- int offset = Context::SlotOffset(instr->slot_index());
2101
- __ RecordWrite(context, offset, value, temp);
2102
- }
2103
- }
2104
-
2105
-
2106
- void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2107
- Register object = ToRegister(instr->object());
2108
- Register result = ToRegister(instr->result());
2109
- if (instr->hydrogen()->is_in_object()) {
2110
- __ mov(result, FieldOperand(object, instr->hydrogen()->offset()));
2111
- } else {
2112
- __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2113
- __ mov(result, FieldOperand(result, instr->hydrogen()->offset()));
2114
- }
2115
- }
2116
-
2117
-
2118
- void LCodeGen::EmitLoadField(Register result,
2119
- Register object,
2120
- Handle<Map> type,
2121
- Handle<String> name) {
2122
- LookupResult lookup;
2123
- type->LookupInDescriptors(NULL, *name, &lookup);
2124
- ASSERT(lookup.IsProperty() && lookup.type() == FIELD);
2125
- int index = lookup.GetLocalFieldIndexFromMap(*type);
2126
- int offset = index * kPointerSize;
2127
- if (index < 0) {
2128
- // Negative property indices are in-object properties, indexed
2129
- // from the end of the fixed part of the object.
2130
- __ mov(result, FieldOperand(object, offset + type->instance_size()));
2131
- } else {
2132
- // Non-negative property indices are in the properties array.
2133
- __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2134
- __ mov(result, FieldOperand(result, offset + FixedArray::kHeaderSize));
2135
- }
2136
- }
2137
-
2138
-
2139
- void LCodeGen::DoLoadNamedFieldPolymorphic(LLoadNamedFieldPolymorphic* instr) {
2140
- Register object = ToRegister(instr->object());
2141
- Register result = ToRegister(instr->result());
2142
-
2143
- int map_count = instr->hydrogen()->types()->length();
2144
- Handle<String> name = instr->hydrogen()->name();
2145
- if (map_count == 0) {
2146
- ASSERT(instr->hydrogen()->need_generic());
2147
- __ mov(ecx, name);
2148
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2149
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
2150
- } else {
2151
- NearLabel done;
2152
- for (int i = 0; i < map_count - 1; ++i) {
2153
- Handle<Map> map = instr->hydrogen()->types()->at(i);
2154
- NearLabel next;
2155
- __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2156
- __ j(not_equal, &next);
2157
- EmitLoadField(result, object, map, name);
2158
- __ jmp(&done);
2159
- __ bind(&next);
2160
- }
2161
- Handle<Map> map = instr->hydrogen()->types()->last();
2162
- __ cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2163
- if (instr->hydrogen()->need_generic()) {
2164
- NearLabel generic;
2165
- __ j(not_equal, &generic);
2166
- EmitLoadField(result, object, map, name);
2167
- __ jmp(&done);
2168
- __ bind(&generic);
2169
- __ mov(ecx, name);
2170
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2171
- CallCode(ic, RelocInfo::CODE_TARGET, instr, false);
2172
- } else {
2173
- DeoptimizeIf(not_equal, instr->environment());
2174
- EmitLoadField(result, object, map, name);
2175
- }
2176
- __ bind(&done);
2177
- }
2178
- }
2179
-
2180
-
2181
- void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
2182
- ASSERT(ToRegister(instr->context()).is(esi));
2183
- ASSERT(ToRegister(instr->object()).is(eax));
2184
- ASSERT(ToRegister(instr->result()).is(eax));
2185
-
2186
- __ mov(ecx, instr->name());
2187
- Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2188
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2189
- }
2190
-
2191
-
2192
- void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2193
- Register function = ToRegister(instr->function());
2194
- Register temp = ToRegister(instr->TempAt(0));
2195
- Register result = ToRegister(instr->result());
2196
-
2197
- // Check that the function really is a function.
2198
- __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2199
- DeoptimizeIf(not_equal, instr->environment());
2200
-
2201
- // Check whether the function has an instance prototype.
2202
- NearLabel non_instance;
2203
- __ test_b(FieldOperand(result, Map::kBitFieldOffset),
2204
- 1 << Map::kHasNonInstancePrototype);
2205
- __ j(not_zero, &non_instance);
2206
-
2207
- // Get the prototype or initial map from the function.
2208
- __ mov(result,
2209
- FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2210
-
2211
- // Check that the function has a prototype or an initial map.
2212
- __ cmp(Operand(result), Immediate(factory()->the_hole_value()));
2213
- DeoptimizeIf(equal, instr->environment());
2214
-
2215
- // If the function does not have an initial map, we're done.
2216
- NearLabel done;
2217
- __ CmpObjectType(result, MAP_TYPE, temp);
2218
- __ j(not_equal, &done);
2219
-
2220
- // Get the prototype from the initial map.
2221
- __ mov(result, FieldOperand(result, Map::kPrototypeOffset));
2222
- __ jmp(&done);
2223
-
2224
- // Non-instance prototype: Fetch prototype from constructor field
2225
- // in the function's map.
2226
- __ bind(&non_instance);
2227
- __ mov(result, FieldOperand(result, Map::kConstructorOffset));
2228
-
2229
- // All done.
2230
- __ bind(&done);
2231
- }
2232
-
2233
-
2234
- void LCodeGen::DoLoadElements(LLoadElements* instr) {
2235
- Register result = ToRegister(instr->result());
2236
- Register input = ToRegister(instr->InputAt(0));
2237
- __ mov(result, FieldOperand(input, JSObject::kElementsOffset));
2238
- if (FLAG_debug_code) {
2239
- NearLabel done;
2240
- __ cmp(FieldOperand(result, HeapObject::kMapOffset),
2241
- Immediate(factory()->fixed_array_map()));
2242
- __ j(equal, &done);
2243
- __ cmp(FieldOperand(result, HeapObject::kMapOffset),
2244
- Immediate(factory()->fixed_cow_array_map()));
2245
- __ j(equal, &done);
2246
- Register temp((result.is(eax)) ? ebx : eax);
2247
- __ push(temp);
2248
- __ mov(temp, FieldOperand(result, HeapObject::kMapOffset));
2249
- __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
2250
- __ sub(Operand(temp), Immediate(FIRST_EXTERNAL_ARRAY_TYPE));
2251
- __ cmp(Operand(temp), Immediate(kExternalArrayTypeCount));
2252
- __ pop(temp);
2253
- __ Check(below, "Check for fast elements or pixel array failed.");
2254
- __ bind(&done);
2255
- }
2256
- }
2257
-
2258
-
2259
- void LCodeGen::DoLoadExternalArrayPointer(
2260
- LLoadExternalArrayPointer* instr) {
2261
- Register result = ToRegister(instr->result());
2262
- Register input = ToRegister(instr->InputAt(0));
2263
- __ mov(result, FieldOperand(input,
2264
- ExternalArray::kExternalPointerOffset));
2265
- }
2266
-
2267
-
2268
- void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
2269
- Register arguments = ToRegister(instr->arguments());
2270
- Register length = ToRegister(instr->length());
2271
- Operand index = ToOperand(instr->index());
2272
- Register result = ToRegister(instr->result());
2273
-
2274
- __ sub(length, index);
2275
- DeoptimizeIf(below_equal, instr->environment());
2276
-
2277
- // There are two words between the frame pointer and the last argument.
2278
- // Subtracting from length accounts for one of them add one more.
2279
- __ mov(result, Operand(arguments, length, times_4, kPointerSize));
2280
- }
2281
-
2282
-
2283
- void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
2284
- Register elements = ToRegister(instr->elements());
2285
- Register key = ToRegister(instr->key());
2286
- Register result = ToRegister(instr->result());
2287
- ASSERT(result.is(elements));
2288
-
2289
- // Load the result.
2290
- __ mov(result, FieldOperand(elements,
2291
- key,
2292
- times_pointer_size,
2293
- FixedArray::kHeaderSize));
2294
-
2295
- // Check for the hole value.
2296
- __ cmp(result, factory()->the_hole_value());
2297
- DeoptimizeIf(equal, instr->environment());
2298
- }
2299
-
2300
-
2301
- void LCodeGen::DoLoadKeyedSpecializedArrayElement(
2302
- LLoadKeyedSpecializedArrayElement* instr) {
2303
- Register external_pointer = ToRegister(instr->external_pointer());
2304
- Register key = ToRegister(instr->key());
2305
- ExternalArrayType array_type = instr->array_type();
2306
- if (array_type == kExternalFloatArray) {
2307
- XMMRegister result(ToDoubleRegister(instr->result()));
2308
- __ movss(result, Operand(external_pointer, key, times_4, 0));
2309
- __ cvtss2sd(result, result);
2310
- } else {
2311
- Register result(ToRegister(instr->result()));
2312
- switch (array_type) {
2313
- case kExternalByteArray:
2314
- __ movsx_b(result, Operand(external_pointer, key, times_1, 0));
2315
- break;
2316
- case kExternalUnsignedByteArray:
2317
- case kExternalPixelArray:
2318
- __ movzx_b(result, Operand(external_pointer, key, times_1, 0));
2319
- break;
2320
- case kExternalShortArray:
2321
- __ movsx_w(result, Operand(external_pointer, key, times_2, 0));
2322
- break;
2323
- case kExternalUnsignedShortArray:
2324
- __ movzx_w(result, Operand(external_pointer, key, times_2, 0));
2325
- break;
2326
- case kExternalIntArray:
2327
- __ mov(result, Operand(external_pointer, key, times_4, 0));
2328
- break;
2329
- case kExternalUnsignedIntArray:
2330
- __ mov(result, Operand(external_pointer, key, times_4, 0));
2331
- __ test(result, Operand(result));
2332
- // TODO(danno): we could be more clever here, perhaps having a special
2333
- // version of the stub that detects if the overflow case actually
2334
- // happens, and generate code that returns a double rather than int.
2335
- DeoptimizeIf(negative, instr->environment());
2336
- break;
2337
- case kExternalFloatArray:
2338
- UNREACHABLE();
2339
- break;
2340
- }
2341
- }
2342
- }
2343
-
2344
-
2345
- void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
2346
- ASSERT(ToRegister(instr->context()).is(esi));
2347
- ASSERT(ToRegister(instr->object()).is(edx));
2348
- ASSERT(ToRegister(instr->key()).is(eax));
2349
-
2350
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2351
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2352
- }
2353
-
2354
-
2355
- void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2356
- Register result = ToRegister(instr->result());
2357
-
2358
- // Check for arguments adapter frame.
2359
- NearLabel done, adapted;
2360
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2361
- __ mov(result, Operand(result, StandardFrameConstants::kContextOffset));
2362
- __ cmp(Operand(result),
2363
- Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2364
- __ j(equal, &adapted);
2365
-
2366
- // No arguments adaptor frame.
2367
- __ mov(result, Operand(ebp));
2368
- __ jmp(&done);
2369
-
2370
- // Arguments adaptor frame present.
2371
- __ bind(&adapted);
2372
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2373
-
2374
- // Result is the frame pointer for the frame if not adapted and for the real
2375
- // frame below the adaptor frame if adapted.
2376
- __ bind(&done);
2377
- }
2378
-
2379
-
2380
- void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2381
- Operand elem = ToOperand(instr->InputAt(0));
2382
- Register result = ToRegister(instr->result());
2383
-
2384
- NearLabel done;
2385
-
2386
- // If no arguments adaptor frame the number of arguments is fixed.
2387
- __ cmp(ebp, elem);
2388
- __ mov(result, Immediate(scope()->num_parameters()));
2389
- __ j(equal, &done);
2390
-
2391
- // Arguments adaptor frame present. Get argument length from there.
2392
- __ mov(result, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2393
- __ mov(result, Operand(result,
2394
- ArgumentsAdaptorFrameConstants::kLengthOffset));
2395
- __ SmiUntag(result);
2396
-
2397
- // Argument length is in result register.
2398
- __ bind(&done);
2399
- }
2400
-
2401
-
2402
- void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2403
- Register receiver = ToRegister(instr->receiver());
2404
- Register function = ToRegister(instr->function());
2405
- Register length = ToRegister(instr->length());
2406
- Register elements = ToRegister(instr->elements());
2407
- Register scratch = ToRegister(instr->TempAt(0));
2408
- ASSERT(receiver.is(eax)); // Used for parameter count.
2409
- ASSERT(function.is(edi)); // Required by InvokeFunction.
2410
- ASSERT(ToRegister(instr->result()).is(eax));
2411
-
2412
- // If the receiver is null or undefined, we have to pass the global object
2413
- // as a receiver.
2414
- NearLabel global_object, receiver_ok;
2415
- __ cmp(receiver, factory()->null_value());
2416
- __ j(equal, &global_object);
2417
- __ cmp(receiver, factory()->undefined_value());
2418
- __ j(equal, &global_object);
2419
-
2420
- // The receiver should be a JS object.
2421
- __ test(receiver, Immediate(kSmiTagMask));
2422
- DeoptimizeIf(equal, instr->environment());
2423
- __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, scratch);
2424
- DeoptimizeIf(below, instr->environment());
2425
- __ jmp(&receiver_ok);
2426
-
2427
- __ bind(&global_object);
2428
- // TODO(kmillikin): We have a hydrogen value for the global object. See
2429
- // if it's better to use it than to explicitly fetch it from the context
2430
- // here.
2431
- __ mov(receiver, Operand(ebp, StandardFrameConstants::kContextOffset));
2432
- __ mov(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2433
- __ bind(&receiver_ok);
2434
-
2435
- // Copy the arguments to this function possibly from the
2436
- // adaptor frame below it.
2437
- const uint32_t kArgumentsLimit = 1 * KB;
2438
- __ cmp(length, kArgumentsLimit);
2439
- DeoptimizeIf(above, instr->environment());
2440
-
2441
- __ push(receiver);
2442
- __ mov(receiver, length);
2443
-
2444
- // Loop through the arguments pushing them onto the execution
2445
- // stack.
2446
- NearLabel invoke, loop;
2447
- // length is a small non-negative integer, due to the test above.
2448
- __ test(length, Operand(length));
2449
- __ j(zero, &invoke);
2450
- __ bind(&loop);
2451
- __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2452
- __ dec(length);
2453
- __ j(not_zero, &loop);
2454
-
2455
- // Invoke the function.
2456
- __ bind(&invoke);
2457
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2458
- LPointerMap* pointers = instr->pointer_map();
2459
- LEnvironment* env = instr->deoptimization_environment();
2460
- RecordPosition(pointers->position());
2461
- RegisterEnvironmentForDeoptimization(env);
2462
- SafepointGenerator safepoint_generator(this,
2463
- pointers,
2464
- env->deoptimization_index());
2465
- v8::internal::ParameterCount actual(eax);
2466
- __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
2467
- }
2468
-
2469
-
2470
- void LCodeGen::DoPushArgument(LPushArgument* instr) {
2471
- LOperand* argument = instr->InputAt(0);
2472
- if (argument->IsConstantOperand()) {
2473
- __ push(ToImmediate(argument));
2474
- } else {
2475
- __ push(ToOperand(argument));
2476
- }
2477
- }
2478
-
2479
-
2480
- void LCodeGen::DoContext(LContext* instr) {
2481
- Register result = ToRegister(instr->result());
2482
- __ mov(result, Operand(ebp, StandardFrameConstants::kContextOffset));
2483
- }
2484
-
2485
-
2486
- void LCodeGen::DoOuterContext(LOuterContext* instr) {
2487
- Register context = ToRegister(instr->context());
2488
- Register result = ToRegister(instr->result());
2489
- __ mov(result, Operand(context, Context::SlotOffset(Context::CLOSURE_INDEX)));
2490
- __ mov(result, FieldOperand(result, JSFunction::kContextOffset));
2491
- }
2492
-
2493
-
2494
- void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
2495
- Register context = ToRegister(instr->context());
2496
- Register result = ToRegister(instr->result());
2497
- __ mov(result, Operand(context, Context::SlotOffset(Context::GLOBAL_INDEX)));
2498
- }
2499
-
2500
-
2501
- void LCodeGen::DoGlobalReceiver(LGlobalReceiver* instr) {
2502
- Register global = ToRegister(instr->global());
2503
- Register result = ToRegister(instr->result());
2504
- __ mov(result, FieldOperand(global, GlobalObject::kGlobalReceiverOffset));
2505
- }
2506
-
2507
-
2508
- void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
2509
- int arity,
2510
- LInstruction* instr) {
2511
- // Change context if needed.
2512
- bool change_context =
2513
- (info()->closure()->context() != function->context()) ||
2514
- scope()->contains_with() ||
2515
- (scope()->num_heap_slots() > 0);
2516
- if (change_context) {
2517
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2518
- } else {
2519
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2520
- }
2521
-
2522
- // Set eax to arguments count if adaption is not needed. Assumes that eax
2523
- // is available to write to at this point.
2524
- if (!function->NeedsArgumentsAdaption()) {
2525
- __ mov(eax, arity);
2526
- }
2527
-
2528
- LPointerMap* pointers = instr->pointer_map();
2529
- RecordPosition(pointers->position());
2530
-
2531
- // Invoke function.
2532
- if (*function == *info()->closure()) {
2533
- __ CallSelf();
2534
- } else {
2535
- __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
2536
- }
2537
-
2538
- // Setup deoptimization.
2539
- RegisterLazyDeoptimization(instr);
2540
- }
2541
-
2542
-
2543
- void LCodeGen::DoCallConstantFunction(LCallConstantFunction* instr) {
2544
- ASSERT(ToRegister(instr->result()).is(eax));
2545
- __ mov(edi, instr->function());
2546
- CallKnownFunction(instr->function(), instr->arity(), instr);
2547
- }
2548
-
2549
-
2550
- void LCodeGen::DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr) {
2551
- Register input_reg = ToRegister(instr->InputAt(0));
2552
- __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
2553
- factory()->heap_number_map());
2554
- DeoptimizeIf(not_equal, instr->environment());
2555
-
2556
- Label done;
2557
- Register tmp = input_reg.is(eax) ? ecx : eax;
2558
- Register tmp2 = tmp.is(ecx) ? edx : input_reg.is(ecx) ? edx : ecx;
2559
-
2560
- // Preserve the value of all registers.
2561
- __ PushSafepointRegisters();
2562
-
2563
- Label negative;
2564
- __ mov(tmp, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2565
- // Check the sign of the argument. If the argument is positive, just
2566
- // return it. We do not need to patch the stack since |input| and
2567
- // |result| are the same register and |input| will be restored
2568
- // unchanged by popping safepoint registers.
2569
- __ test(tmp, Immediate(HeapNumber::kSignMask));
2570
- __ j(not_zero, &negative);
2571
- __ jmp(&done);
2572
-
2573
- __ bind(&negative);
2574
-
2575
- Label allocated, slow;
2576
- __ AllocateHeapNumber(tmp, tmp2, no_reg, &slow);
2577
- __ jmp(&allocated);
2578
-
2579
- // Slow case: Call the runtime system to do the number allocation.
2580
- __ bind(&slow);
2581
-
2582
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2583
- __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
2584
- RecordSafepointWithRegisters(
2585
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
2586
- // Set the pointer to the new heap number in tmp.
2587
- if (!tmp.is(eax)) __ mov(tmp, eax);
2588
-
2589
- // Restore input_reg after call to runtime.
2590
- __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
2591
-
2592
- __ bind(&allocated);
2593
- __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kExponentOffset));
2594
- __ and_(tmp2, ~HeapNumber::kSignMask);
2595
- __ mov(FieldOperand(tmp, HeapNumber::kExponentOffset), tmp2);
2596
- __ mov(tmp2, FieldOperand(input_reg, HeapNumber::kMantissaOffset));
2597
- __ mov(FieldOperand(tmp, HeapNumber::kMantissaOffset), tmp2);
2598
- __ StoreToSafepointRegisterSlot(input_reg, tmp);
2599
-
2600
- __ bind(&done);
2601
- __ PopSafepointRegisters();
2602
- }
2603
-
2604
-
2605
- void LCodeGen::EmitIntegerMathAbs(LUnaryMathOperation* instr) {
2606
- Register input_reg = ToRegister(instr->InputAt(0));
2607
- __ test(input_reg, Operand(input_reg));
2608
- Label is_positive;
2609
- __ j(not_sign, &is_positive);
2610
- __ neg(input_reg);
2611
- __ test(input_reg, Operand(input_reg));
2612
- DeoptimizeIf(negative, instr->environment());
2613
- __ bind(&is_positive);
2614
- }
2615
-
2616
-
2617
- void LCodeGen::DoMathAbs(LUnaryMathOperation* instr) {
2618
- // Class for deferred case.
2619
- class DeferredMathAbsTaggedHeapNumber: public LDeferredCode {
2620
- public:
2621
- DeferredMathAbsTaggedHeapNumber(LCodeGen* codegen,
2622
- LUnaryMathOperation* instr)
2623
- : LDeferredCode(codegen), instr_(instr) { }
2624
- virtual void Generate() {
2625
- codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
2626
- }
2627
- private:
2628
- LUnaryMathOperation* instr_;
2629
- };
2630
-
2631
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
2632
- Representation r = instr->hydrogen()->value()->representation();
2633
-
2634
- if (r.IsDouble()) {
2635
- XMMRegister scratch = xmm0;
2636
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2637
- __ pxor(scratch, scratch);
2638
- __ subsd(scratch, input_reg);
2639
- __ pand(input_reg, scratch);
2640
- } else if (r.IsInteger32()) {
2641
- EmitIntegerMathAbs(instr);
2642
- } else { // Tagged case.
2643
- DeferredMathAbsTaggedHeapNumber* deferred =
2644
- new DeferredMathAbsTaggedHeapNumber(this, instr);
2645
- Register input_reg = ToRegister(instr->InputAt(0));
2646
- // Smi check.
2647
- __ test(input_reg, Immediate(kSmiTagMask));
2648
- __ j(not_zero, deferred->entry());
2649
- EmitIntegerMathAbs(instr);
2650
- __ bind(deferred->exit());
2651
- }
2652
- }
2653
-
2654
-
2655
- void LCodeGen::DoMathFloor(LUnaryMathOperation* instr) {
2656
- XMMRegister xmm_scratch = xmm0;
2657
- Register output_reg = ToRegister(instr->result());
2658
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2659
- __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2660
- __ ucomisd(input_reg, xmm_scratch);
2661
-
2662
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2663
- DeoptimizeIf(below_equal, instr->environment());
2664
- } else {
2665
- DeoptimizeIf(below, instr->environment());
2666
- }
2667
-
2668
- // Use truncating instruction (OK because input is positive).
2669
- __ cvttsd2si(output_reg, Operand(input_reg));
2670
-
2671
- // Overflow is signalled with minint.
2672
- __ cmp(output_reg, 0x80000000u);
2673
- DeoptimizeIf(equal, instr->environment());
2674
- }
2675
-
2676
-
2677
- void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2678
- XMMRegister xmm_scratch = xmm0;
2679
- Register output_reg = ToRegister(instr->result());
2680
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2681
-
2682
- // xmm_scratch = 0.5
2683
- ExternalReference one_half = ExternalReference::address_of_one_half();
2684
- __ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
2685
-
2686
- // input = input + 0.5
2687
- __ addsd(input_reg, xmm_scratch);
2688
-
2689
- // We need to return -0 for the input range [-0.5, 0[, otherwise
2690
- // compute Math.floor(value + 0.5).
2691
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
2692
- __ ucomisd(input_reg, xmm_scratch);
2693
- DeoptimizeIf(below_equal, instr->environment());
2694
- } else {
2695
- // If we don't need to bailout on -0, we check only bailout
2696
- // on negative inputs.
2697
- __ xorpd(xmm_scratch, xmm_scratch); // Zero the register.
2698
- __ ucomisd(input_reg, xmm_scratch);
2699
- DeoptimizeIf(below, instr->environment());
2700
- }
2701
-
2702
- // Compute Math.floor(value + 0.5).
2703
- // Use truncating instruction (OK because input is positive).
2704
- __ cvttsd2si(output_reg, Operand(input_reg));
2705
-
2706
- // Overflow is signalled with minint.
2707
- __ cmp(output_reg, 0x80000000u);
2708
- DeoptimizeIf(equal, instr->environment());
2709
- }
2710
-
2711
-
2712
- void LCodeGen::DoMathSqrt(LUnaryMathOperation* instr) {
2713
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2714
- ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2715
- __ sqrtsd(input_reg, input_reg);
2716
- }
2717
-
2718
-
2719
- void LCodeGen::DoMathPowHalf(LUnaryMathOperation* instr) {
2720
- XMMRegister xmm_scratch = xmm0;
2721
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2722
- ASSERT(ToDoubleRegister(instr->result()).is(input_reg));
2723
- __ xorpd(xmm_scratch, xmm_scratch);
2724
- __ addsd(input_reg, xmm_scratch); // Convert -0 to +0.
2725
- __ sqrtsd(input_reg, input_reg);
2726
- }
2727
-
2728
-
2729
- void LCodeGen::DoPower(LPower* instr) {
2730
- LOperand* left = instr->InputAt(0);
2731
- LOperand* right = instr->InputAt(1);
2732
- DoubleRegister result_reg = ToDoubleRegister(instr->result());
2733
- Representation exponent_type = instr->hydrogen()->right()->representation();
2734
-
2735
- if (exponent_type.IsDouble()) {
2736
- // It is safe to use ebx directly since the instruction is marked
2737
- // as a call.
2738
- __ PrepareCallCFunction(4, ebx);
2739
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2740
- __ movdbl(Operand(esp, 1 * kDoubleSize), ToDoubleRegister(right));
2741
- __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
2742
- 4);
2743
- } else if (exponent_type.IsInteger32()) {
2744
- // It is safe to use ebx directly since the instruction is marked
2745
- // as a call.
2746
- ASSERT(!ToRegister(right).is(ebx));
2747
- __ PrepareCallCFunction(4, ebx);
2748
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2749
- __ mov(Operand(esp, 1 * kDoubleSize), ToRegister(right));
2750
- __ CallCFunction(ExternalReference::power_double_int_function(isolate()),
2751
- 4);
2752
- } else {
2753
- ASSERT(exponent_type.IsTagged());
2754
- CpuFeatures::Scope scope(SSE2);
2755
- Register right_reg = ToRegister(right);
2756
-
2757
- Label non_smi, call;
2758
- __ test(right_reg, Immediate(kSmiTagMask));
2759
- __ j(not_zero, &non_smi);
2760
- __ SmiUntag(right_reg);
2761
- __ cvtsi2sd(result_reg, Operand(right_reg));
2762
- __ jmp(&call);
2763
-
2764
- __ bind(&non_smi);
2765
- // It is safe to use ebx directly since the instruction is marked
2766
- // as a call.
2767
- ASSERT(!right_reg.is(ebx));
2768
- __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , ebx);
2769
- DeoptimizeIf(not_equal, instr->environment());
2770
- __ movdbl(result_reg, FieldOperand(right_reg, HeapNumber::kValueOffset));
2771
-
2772
- __ bind(&call);
2773
- __ PrepareCallCFunction(4, ebx);
2774
- __ movdbl(Operand(esp, 0 * kDoubleSize), ToDoubleRegister(left));
2775
- __ movdbl(Operand(esp, 1 * kDoubleSize), result_reg);
2776
- __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
2777
- 4);
2778
- }
2779
-
2780
- // Return value is in st(0) on ia32.
2781
- // Store it into the (fixed) result register.
2782
- __ sub(Operand(esp), Immediate(kDoubleSize));
2783
- __ fstp_d(Operand(esp, 0));
2784
- __ movdbl(result_reg, Operand(esp, 0));
2785
- __ add(Operand(esp), Immediate(kDoubleSize));
2786
- }
2787
-
2788
-
2789
- void LCodeGen::DoMathLog(LUnaryMathOperation* instr) {
2790
- ASSERT(instr->InputAt(0)->Equals(instr->result()));
2791
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2792
- NearLabel positive, done, zero, negative;
2793
- __ xorpd(xmm0, xmm0);
2794
- __ ucomisd(input_reg, xmm0);
2795
- __ j(above, &positive);
2796
- __ j(equal, &zero);
2797
- ExternalReference nan = ExternalReference::address_of_nan();
2798
- __ movdbl(input_reg, Operand::StaticVariable(nan));
2799
- __ jmp(&done);
2800
- __ bind(&zero);
2801
- __ push(Immediate(0xFFF00000));
2802
- __ push(Immediate(0));
2803
- __ movdbl(input_reg, Operand(esp, 0));
2804
- __ add(Operand(esp), Immediate(kDoubleSize));
2805
- __ jmp(&done);
2806
- __ bind(&positive);
2807
- __ fldln2();
2808
- __ sub(Operand(esp), Immediate(kDoubleSize));
2809
- __ movdbl(Operand(esp, 0), input_reg);
2810
- __ fld_d(Operand(esp, 0));
2811
- __ fyl2x();
2812
- __ fstp_d(Operand(esp, 0));
2813
- __ movdbl(input_reg, Operand(esp, 0));
2814
- __ add(Operand(esp), Immediate(kDoubleSize));
2815
- __ bind(&done);
2816
- }
2817
-
2818
-
2819
- void LCodeGen::DoMathCos(LUnaryMathOperation* instr) {
2820
- ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2821
- TranscendentalCacheStub stub(TranscendentalCache::COS,
2822
- TranscendentalCacheStub::UNTAGGED);
2823
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2824
- }
2825
-
2826
-
2827
- void LCodeGen::DoMathSin(LUnaryMathOperation* instr) {
2828
- ASSERT(ToDoubleRegister(instr->result()).is(xmm1));
2829
- TranscendentalCacheStub stub(TranscendentalCache::SIN,
2830
- TranscendentalCacheStub::UNTAGGED);
2831
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
2832
- }
2833
-
2834
-
2835
- void LCodeGen::DoUnaryMathOperation(LUnaryMathOperation* instr) {
2836
- switch (instr->op()) {
2837
- case kMathAbs:
2838
- DoMathAbs(instr);
2839
- break;
2840
- case kMathFloor:
2841
- DoMathFloor(instr);
2842
- break;
2843
- case kMathRound:
2844
- DoMathRound(instr);
2845
- break;
2846
- case kMathSqrt:
2847
- DoMathSqrt(instr);
2848
- break;
2849
- case kMathPowHalf:
2850
- DoMathPowHalf(instr);
2851
- break;
2852
- case kMathCos:
2853
- DoMathCos(instr);
2854
- break;
2855
- case kMathSin:
2856
- DoMathSin(instr);
2857
- break;
2858
- case kMathLog:
2859
- DoMathLog(instr);
2860
- break;
2861
-
2862
- default:
2863
- UNREACHABLE();
2864
- }
2865
- }
2866
-
2867
-
2868
- void LCodeGen::DoCallKeyed(LCallKeyed* instr) {
2869
- ASSERT(ToRegister(instr->context()).is(esi));
2870
- ASSERT(ToRegister(instr->key()).is(ecx));
2871
- ASSERT(ToRegister(instr->result()).is(eax));
2872
-
2873
- int arity = instr->arity();
2874
- Handle<Code> ic = isolate()->stub_cache()->
2875
- ComputeKeyedCallInitialize(arity, NOT_IN_LOOP);
2876
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2877
- }
2878
-
2879
-
2880
- void LCodeGen::DoCallNamed(LCallNamed* instr) {
2881
- ASSERT(ToRegister(instr->context()).is(esi));
2882
- ASSERT(ToRegister(instr->result()).is(eax));
2883
-
2884
- int arity = instr->arity();
2885
- Handle<Code> ic = isolate()->stub_cache()->
2886
- ComputeCallInitialize(arity, NOT_IN_LOOP);
2887
- __ mov(ecx, instr->name());
2888
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2889
- }
2890
-
2891
-
2892
- void LCodeGen::DoCallFunction(LCallFunction* instr) {
2893
- ASSERT(ToRegister(instr->context()).is(esi));
2894
- ASSERT(ToRegister(instr->result()).is(eax));
2895
-
2896
- int arity = instr->arity();
2897
- CallFunctionStub stub(arity, NOT_IN_LOOP, RECEIVER_MIGHT_BE_VALUE);
2898
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
2899
- __ Drop(1);
2900
- }
2901
-
2902
-
2903
- void LCodeGen::DoCallGlobal(LCallGlobal* instr) {
2904
- ASSERT(ToRegister(instr->context()).is(esi));
2905
- ASSERT(ToRegister(instr->result()).is(eax));
2906
-
2907
- int arity = instr->arity();
2908
- Handle<Code> ic = isolate()->stub_cache()->
2909
- ComputeCallInitialize(arity, NOT_IN_LOOP);
2910
- __ mov(ecx, instr->name());
2911
- CallCode(ic, RelocInfo::CODE_TARGET_CONTEXT, instr);
2912
- }
2913
-
2914
-
2915
- void LCodeGen::DoCallKnownGlobal(LCallKnownGlobal* instr) {
2916
- ASSERT(ToRegister(instr->result()).is(eax));
2917
- __ mov(edi, instr->target());
2918
- CallKnownFunction(instr->target(), instr->arity(), instr);
2919
- }
2920
-
2921
-
2922
- void LCodeGen::DoCallNew(LCallNew* instr) {
2923
- ASSERT(ToRegister(instr->context()).is(esi));
2924
- ASSERT(ToRegister(instr->constructor()).is(edi));
2925
- ASSERT(ToRegister(instr->result()).is(eax));
2926
-
2927
- Handle<Code> builtin = isolate()->builtins()->JSConstructCall();
2928
- __ Set(eax, Immediate(instr->arity()));
2929
- CallCode(builtin, RelocInfo::CONSTRUCT_CALL, instr);
2930
- }
2931
-
2932
-
2933
- void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2934
- CallRuntime(instr->function(), instr->arity(), instr, false);
2935
- }
2936
-
2937
-
2938
- void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
2939
- Register object = ToRegister(instr->object());
2940
- Register value = ToRegister(instr->value());
2941
- int offset = instr->offset();
2942
-
2943
- if (!instr->transition().is_null()) {
2944
- __ mov(FieldOperand(object, HeapObject::kMapOffset), instr->transition());
2945
- }
2946
-
2947
- // Do the store.
2948
- if (instr->is_in_object()) {
2949
- __ mov(FieldOperand(object, offset), value);
2950
- if (instr->needs_write_barrier()) {
2951
- Register temp = ToRegister(instr->TempAt(0));
2952
- // Update the write barrier for the object for in-object properties.
2953
- __ RecordWrite(object, offset, value, temp);
2954
- }
2955
- } else {
2956
- Register temp = ToRegister(instr->TempAt(0));
2957
- __ mov(temp, FieldOperand(object, JSObject::kPropertiesOffset));
2958
- __ mov(FieldOperand(temp, offset), value);
2959
- if (instr->needs_write_barrier()) {
2960
- // Update the write barrier for the properties array.
2961
- // object is used as a scratch register.
2962
- __ RecordWrite(temp, offset, value, object);
2963
- }
2964
- }
2965
- }
2966
-
2967
-
2968
- void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
2969
- ASSERT(ToRegister(instr->context()).is(esi));
2970
- ASSERT(ToRegister(instr->object()).is(edx));
2971
- ASSERT(ToRegister(instr->value()).is(eax));
2972
-
2973
- __ mov(ecx, instr->name());
2974
- Handle<Code> ic = info_->is_strict()
2975
- ? isolate()->builtins()->StoreIC_Initialize_Strict()
2976
- : isolate()->builtins()->StoreIC_Initialize();
2977
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
2978
- }
2979
-
2980
-
2981
- void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
2982
- __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
2983
- DeoptimizeIf(above_equal, instr->environment());
2984
- }
2985
-
2986
-
2987
- void LCodeGen::DoStoreKeyedSpecializedArrayElement(
2988
- LStoreKeyedSpecializedArrayElement* instr) {
2989
- Register external_pointer = ToRegister(instr->external_pointer());
2990
- Register key = ToRegister(instr->key());
2991
- ExternalArrayType array_type = instr->array_type();
2992
- if (array_type == kExternalFloatArray) {
2993
- __ cvtsd2ss(xmm0, ToDoubleRegister(instr->value()));
2994
- __ movss(Operand(external_pointer, key, times_4, 0), xmm0);
2995
- } else {
2996
- Register value = ToRegister(instr->value());
2997
- switch (array_type) {
2998
- case kExternalPixelArray: {
2999
- // Clamp the value to [0..255].
3000
- Register temp = ToRegister(instr->TempAt(0));
3001
- // The dec_b below requires that the clamped value is in a byte
3002
- // register. eax is an arbitrary choice to satisfy this requirement, we
3003
- // hinted the register allocator to give us eax when building the
3004
- // instruction.
3005
- ASSERT(temp.is(eax));
3006
- __ mov(temp, ToRegister(instr->value()));
3007
- NearLabel done;
3008
- __ test(temp, Immediate(0xFFFFFF00));
3009
- __ j(zero, &done);
3010
- __ setcc(negative, temp); // 1 if negative, 0 if positive.
3011
- __ dec_b(temp); // 0 if negative, 255 if positive.
3012
- __ bind(&done);
3013
- __ mov_b(Operand(external_pointer, key, times_1, 0), temp);
3014
- break;
3015
- }
3016
- case kExternalByteArray:
3017
- case kExternalUnsignedByteArray:
3018
- __ mov_b(Operand(external_pointer, key, times_1, 0), value);
3019
- break;
3020
- case kExternalShortArray:
3021
- case kExternalUnsignedShortArray:
3022
- __ mov_w(Operand(external_pointer, key, times_2, 0), value);
3023
- break;
3024
- case kExternalIntArray:
3025
- case kExternalUnsignedIntArray:
3026
- __ mov(Operand(external_pointer, key, times_4, 0), value);
3027
- break;
3028
- case kExternalFloatArray:
3029
- UNREACHABLE();
3030
- break;
3031
- }
3032
- }
3033
- }
3034
-
3035
-
3036
- void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
3037
- Register value = ToRegister(instr->value());
3038
- Register elements = ToRegister(instr->object());
3039
- Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
3040
-
3041
- // Do the store.
3042
- if (instr->key()->IsConstantOperand()) {
3043
- ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
3044
- LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3045
- int offset =
3046
- ToInteger32(const_operand) * kPointerSize + FixedArray::kHeaderSize;
3047
- __ mov(FieldOperand(elements, offset), value);
3048
- } else {
3049
- __ mov(FieldOperand(elements,
3050
- key,
3051
- times_pointer_size,
3052
- FixedArray::kHeaderSize),
3053
- value);
3054
- }
3055
-
3056
- if (instr->hydrogen()->NeedsWriteBarrier()) {
3057
- // Compute address of modified element and store it into key register.
3058
- __ lea(key,
3059
- FieldOperand(elements,
3060
- key,
3061
- times_pointer_size,
3062
- FixedArray::kHeaderSize));
3063
- __ RecordWrite(elements, key, value);
3064
- }
3065
- }
3066
-
3067
-
3068
- void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
3069
- ASSERT(ToRegister(instr->context()).is(esi));
3070
- ASSERT(ToRegister(instr->object()).is(edx));
3071
- ASSERT(ToRegister(instr->key()).is(ecx));
3072
- ASSERT(ToRegister(instr->value()).is(eax));
3073
-
3074
- Handle<Code> ic = info_->is_strict()
3075
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3076
- : isolate()->builtins()->KeyedStoreIC_Initialize();
3077
- CallCode(ic, RelocInfo::CODE_TARGET, instr);
3078
- }
3079
-
3080
-
3081
- void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
3082
- class DeferredStringCharCodeAt: public LDeferredCode {
3083
- public:
3084
- DeferredStringCharCodeAt(LCodeGen* codegen, LStringCharCodeAt* instr)
3085
- : LDeferredCode(codegen), instr_(instr) { }
3086
- virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
3087
- private:
3088
- LStringCharCodeAt* instr_;
3089
- };
3090
-
3091
- Register string = ToRegister(instr->string());
3092
- Register index = no_reg;
3093
- int const_index = -1;
3094
- if (instr->index()->IsConstantOperand()) {
3095
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3096
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3097
- if (!Smi::IsValid(const_index)) {
3098
- // Guaranteed to be out of bounds because of the assert above.
3099
- // So the bounds check that must dominate this instruction must
3100
- // have deoptimized already.
3101
- if (FLAG_debug_code) {
3102
- __ Abort("StringCharCodeAt: out of bounds index.");
3103
- }
3104
- // No code needs to be generated.
3105
- return;
3106
- }
3107
- } else {
3108
- index = ToRegister(instr->index());
3109
- }
3110
- Register result = ToRegister(instr->result());
3111
-
3112
- DeferredStringCharCodeAt* deferred =
3113
- new DeferredStringCharCodeAt(this, instr);
3114
-
3115
- NearLabel flat_string, ascii_string, done;
3116
-
3117
- // Fetch the instance type of the receiver into result register.
3118
- __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3119
- __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3120
-
3121
- // We need special handling for non-flat strings.
3122
- STATIC_ASSERT(kSeqStringTag == 0);
3123
- __ test(result, Immediate(kStringRepresentationMask));
3124
- __ j(zero, &flat_string);
3125
-
3126
- // Handle non-flat strings.
3127
- __ test(result, Immediate(kIsConsStringMask));
3128
- __ j(zero, deferred->entry());
3129
-
3130
- // ConsString.
3131
- // Check whether the right hand side is the empty string (i.e. if
3132
- // this is really a flat string in a cons string). If that is not
3133
- // the case we would rather go to the runtime system now to flatten
3134
- // the string.
3135
- __ cmp(FieldOperand(string, ConsString::kSecondOffset),
3136
- Immediate(factory()->empty_string()));
3137
- __ j(not_equal, deferred->entry());
3138
- // Get the first of the two strings and load its instance type.
3139
- __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
3140
- __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
3141
- __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
3142
- // If the first cons component is also non-flat, then go to runtime.
3143
- STATIC_ASSERT(kSeqStringTag == 0);
3144
- __ test(result, Immediate(kStringRepresentationMask));
3145
- __ j(not_zero, deferred->entry());
3146
-
3147
- // Check for ASCII or two-byte string.
3148
- __ bind(&flat_string);
3149
- STATIC_ASSERT(kAsciiStringTag != 0);
3150
- __ test(result, Immediate(kStringEncodingMask));
3151
- __ j(not_zero, &ascii_string);
3152
-
3153
- // Two-byte string.
3154
- // Load the two-byte character code into the result register.
3155
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3156
- if (instr->index()->IsConstantOperand()) {
3157
- __ movzx_w(result,
3158
- FieldOperand(string,
3159
- SeqTwoByteString::kHeaderSize +
3160
- (kUC16Size * const_index)));
3161
- } else {
3162
- __ movzx_w(result, FieldOperand(string,
3163
- index,
3164
- times_2,
3165
- SeqTwoByteString::kHeaderSize));
3166
- }
3167
- __ jmp(&done);
3168
-
3169
- // ASCII string.
3170
- // Load the byte into the result register.
3171
- __ bind(&ascii_string);
3172
- if (instr->index()->IsConstantOperand()) {
3173
- __ movzx_b(result, FieldOperand(string,
3174
- SeqAsciiString::kHeaderSize + const_index));
3175
- } else {
3176
- __ movzx_b(result, FieldOperand(string,
3177
- index,
3178
- times_1,
3179
- SeqAsciiString::kHeaderSize));
3180
- }
3181
- __ bind(&done);
3182
- __ bind(deferred->exit());
3183
- }
3184
-
3185
-
3186
- void LCodeGen::DoDeferredStringCharCodeAt(LStringCharCodeAt* instr) {
3187
- Register string = ToRegister(instr->string());
3188
- Register result = ToRegister(instr->result());
3189
-
3190
- // TODO(3095996): Get rid of this. For now, we need to make the
3191
- // result register contain a valid pointer because it is already
3192
- // contained in the register pointer map.
3193
- __ Set(result, Immediate(0));
3194
-
3195
- __ PushSafepointRegisters();
3196
- __ push(string);
3197
- // Push the index as a smi. This is safe because of the checks in
3198
- // DoStringCharCodeAt above.
3199
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
3200
- if (instr->index()->IsConstantOperand()) {
3201
- int const_index = ToInteger32(LConstantOperand::cast(instr->index()));
3202
- __ push(Immediate(Smi::FromInt(const_index)));
3203
- } else {
3204
- Register index = ToRegister(instr->index());
3205
- __ SmiTag(index);
3206
- __ push(index);
3207
- }
3208
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3209
- __ CallRuntimeSaveDoubles(Runtime::kStringCharCodeAt);
3210
- RecordSafepointWithRegisters(
3211
- instr->pointer_map(), 2, Safepoint::kNoDeoptimizationIndex);
3212
- if (FLAG_debug_code) {
3213
- __ AbortIfNotSmi(eax);
3214
- }
3215
- __ SmiUntag(eax);
3216
- __ StoreToSafepointRegisterSlot(result, eax);
3217
- __ PopSafepointRegisters();
3218
- }
3219
-
3220
-
3221
- void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
3222
- class DeferredStringCharFromCode: public LDeferredCode {
3223
- public:
3224
- DeferredStringCharFromCode(LCodeGen* codegen, LStringCharFromCode* instr)
3225
- : LDeferredCode(codegen), instr_(instr) { }
3226
- virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
3227
- private:
3228
- LStringCharFromCode* instr_;
3229
- };
3230
-
3231
- DeferredStringCharFromCode* deferred =
3232
- new DeferredStringCharFromCode(this, instr);
3233
-
3234
- ASSERT(instr->hydrogen()->value()->representation().IsInteger32());
3235
- Register char_code = ToRegister(instr->char_code());
3236
- Register result = ToRegister(instr->result());
3237
- ASSERT(!char_code.is(result));
3238
-
3239
- __ cmp(char_code, String::kMaxAsciiCharCode);
3240
- __ j(above, deferred->entry());
3241
- __ Set(result, Immediate(factory()->single_character_string_cache()));
3242
- __ mov(result, FieldOperand(result,
3243
- char_code, times_pointer_size,
3244
- FixedArray::kHeaderSize));
3245
- __ cmp(result, factory()->undefined_value());
3246
- __ j(equal, deferred->entry());
3247
- __ bind(deferred->exit());
3248
- }
3249
-
3250
-
3251
- void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
3252
- Register char_code = ToRegister(instr->char_code());
3253
- Register result = ToRegister(instr->result());
3254
-
3255
- // TODO(3095996): Get rid of this. For now, we need to make the
3256
- // result register contain a valid pointer because it is already
3257
- // contained in the register pointer map.
3258
- __ Set(result, Immediate(0));
3259
-
3260
- __ PushSafepointRegisters();
3261
- __ SmiTag(char_code);
3262
- __ push(char_code);
3263
- __ CallRuntimeSaveDoubles(Runtime::kCharFromCode);
3264
- RecordSafepointWithRegisters(
3265
- instr->pointer_map(), 1, Safepoint::kNoDeoptimizationIndex);
3266
- __ StoreToSafepointRegisterSlot(result, eax);
3267
- __ PopSafepointRegisters();
3268
- }
3269
-
3270
-
3271
- void LCodeGen::DoStringLength(LStringLength* instr) {
3272
- Register string = ToRegister(instr->string());
3273
- Register result = ToRegister(instr->result());
3274
- __ mov(result, FieldOperand(string, String::kLengthOffset));
3275
- }
3276
-
3277
-
3278
- void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3279
- LOperand* input = instr->InputAt(0);
3280
- ASSERT(input->IsRegister() || input->IsStackSlot());
3281
- LOperand* output = instr->result();
3282
- ASSERT(output->IsDoubleRegister());
3283
- __ cvtsi2sd(ToDoubleRegister(output), ToOperand(input));
3284
- }
3285
-
3286
-
3287
- void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
3288
- class DeferredNumberTagI: public LDeferredCode {
3289
- public:
3290
- DeferredNumberTagI(LCodeGen* codegen, LNumberTagI* instr)
3291
- : LDeferredCode(codegen), instr_(instr) { }
3292
- virtual void Generate() { codegen()->DoDeferredNumberTagI(instr_); }
3293
- private:
3294
- LNumberTagI* instr_;
3295
- };
3296
-
3297
- LOperand* input = instr->InputAt(0);
3298
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
3299
- Register reg = ToRegister(input);
3300
-
3301
- DeferredNumberTagI* deferred = new DeferredNumberTagI(this, instr);
3302
- __ SmiTag(reg);
3303
- __ j(overflow, deferred->entry());
3304
- __ bind(deferred->exit());
3305
- }
3306
-
3307
-
3308
- void LCodeGen::DoDeferredNumberTagI(LNumberTagI* instr) {
3309
- Label slow;
3310
- Register reg = ToRegister(instr->InputAt(0));
3311
- Register tmp = reg.is(eax) ? ecx : eax;
3312
-
3313
- // Preserve the value of all registers.
3314
- __ PushSafepointRegisters();
3315
-
3316
- // There was overflow, so bits 30 and 31 of the original integer
3317
- // disagree. Try to allocate a heap number in new space and store
3318
- // the value in there. If that fails, call the runtime system.
3319
- NearLabel done;
3320
- __ SmiUntag(reg);
3321
- __ xor_(reg, 0x80000000);
3322
- __ cvtsi2sd(xmm0, Operand(reg));
3323
- if (FLAG_inline_new) {
3324
- __ AllocateHeapNumber(reg, tmp, no_reg, &slow);
3325
- __ jmp(&done);
3326
- }
3327
-
3328
- // Slow case: Call the runtime system to do the number allocation.
3329
- __ bind(&slow);
3330
-
3331
- // TODO(3095996): Put a valid pointer value in the stack slot where the result
3332
- // register is stored, as this register is in the pointer map, but contains an
3333
- // integer value.
3334
- __ StoreToSafepointRegisterSlot(reg, Immediate(0));
3335
-
3336
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3337
- __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3338
- RecordSafepointWithRegisters(
3339
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3340
- if (!reg.is(eax)) __ mov(reg, eax);
3341
-
3342
- // Done. Put the value in xmm0 into the value of the allocated heap
3343
- // number.
3344
- __ bind(&done);
3345
- __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), xmm0);
3346
- __ StoreToSafepointRegisterSlot(reg, reg);
3347
- __ PopSafepointRegisters();
3348
- }
3349
-
3350
-
3351
- void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
3352
- class DeferredNumberTagD: public LDeferredCode {
3353
- public:
3354
- DeferredNumberTagD(LCodeGen* codegen, LNumberTagD* instr)
3355
- : LDeferredCode(codegen), instr_(instr) { }
3356
- virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
3357
- private:
3358
- LNumberTagD* instr_;
3359
- };
3360
-
3361
- XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
3362
- Register reg = ToRegister(instr->result());
3363
- Register tmp = ToRegister(instr->TempAt(0));
3364
-
3365
- DeferredNumberTagD* deferred = new DeferredNumberTagD(this, instr);
3366
- if (FLAG_inline_new) {
3367
- __ AllocateHeapNumber(reg, tmp, no_reg, deferred->entry());
3368
- } else {
3369
- __ jmp(deferred->entry());
3370
- }
3371
- __ bind(deferred->exit());
3372
- __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
3373
- }
3374
-
3375
-
3376
- void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
3377
- // TODO(3095996): Get rid of this. For now, we need to make the
3378
- // result register contain a valid pointer because it is already
3379
- // contained in the register pointer map.
3380
- Register reg = ToRegister(instr->result());
3381
- __ Set(reg, Immediate(0));
3382
-
3383
- __ PushSafepointRegisters();
3384
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3385
- __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
3386
- RecordSafepointWithRegisters(
3387
- instr->pointer_map(), 0, Safepoint::kNoDeoptimizationIndex);
3388
- __ StoreToSafepointRegisterSlot(reg, eax);
3389
- __ PopSafepointRegisters();
3390
- }
3391
-
3392
-
3393
- void LCodeGen::DoSmiTag(LSmiTag* instr) {
3394
- LOperand* input = instr->InputAt(0);
3395
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
3396
- ASSERT(!instr->hydrogen_value()->CheckFlag(HValue::kCanOverflow));
3397
- __ SmiTag(ToRegister(input));
3398
- }
3399
-
3400
-
3401
- void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
3402
- LOperand* input = instr->InputAt(0);
3403
- ASSERT(input->IsRegister() && input->Equals(instr->result()));
3404
- if (instr->needs_check()) {
3405
- __ test(ToRegister(input), Immediate(kSmiTagMask));
3406
- DeoptimizeIf(not_zero, instr->environment());
3407
- }
3408
- __ SmiUntag(ToRegister(input));
3409
- }
3410
-
3411
-
3412
- void LCodeGen::EmitNumberUntagD(Register input_reg,
3413
- XMMRegister result_reg,
3414
- LEnvironment* env) {
3415
- NearLabel load_smi, heap_number, done;
3416
-
3417
- // Smi check.
3418
- __ test(input_reg, Immediate(kSmiTagMask));
3419
- __ j(zero, &load_smi, not_taken);
3420
-
3421
- // Heap number map check.
3422
- __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3423
- factory()->heap_number_map());
3424
- __ j(equal, &heap_number);
3425
-
3426
- __ cmp(input_reg, factory()->undefined_value());
3427
- DeoptimizeIf(not_equal, env);
3428
-
3429
- // Convert undefined to NaN.
3430
- ExternalReference nan = ExternalReference::address_of_nan();
3431
- __ movdbl(result_reg, Operand::StaticVariable(nan));
3432
- __ jmp(&done);
3433
-
3434
- // Heap number to XMM conversion.
3435
- __ bind(&heap_number);
3436
- __ movdbl(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3437
- __ jmp(&done);
3438
-
3439
- // Smi to XMM conversion
3440
- __ bind(&load_smi);
3441
- __ SmiUntag(input_reg); // Untag smi before converting to float.
3442
- __ cvtsi2sd(result_reg, Operand(input_reg));
3443
- __ SmiTag(input_reg); // Retag smi.
3444
- __ bind(&done);
3445
- }
3446
-
3447
-
3448
- class DeferredTaggedToI: public LDeferredCode {
3449
- public:
3450
- DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3451
- : LDeferredCode(codegen), instr_(instr) { }
3452
- virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3453
- private:
3454
- LTaggedToI* instr_;
3455
- };
3456
-
3457
-
3458
- void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3459
- NearLabel done, heap_number;
3460
- Register input_reg = ToRegister(instr->InputAt(0));
3461
-
3462
- // Heap number map check.
3463
- __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
3464
- factory()->heap_number_map());
3465
-
3466
- if (instr->truncating()) {
3467
- __ j(equal, &heap_number);
3468
- // Check for undefined. Undefined is converted to zero for truncating
3469
- // conversions.
3470
- __ cmp(input_reg, factory()->undefined_value());
3471
- DeoptimizeIf(not_equal, instr->environment());
3472
- __ mov(input_reg, 0);
3473
- __ jmp(&done);
3474
-
3475
- __ bind(&heap_number);
3476
- if (CpuFeatures::IsSupported(SSE3)) {
3477
- CpuFeatures::Scope scope(SSE3);
3478
- NearLabel convert;
3479
- // Use more powerful conversion when sse3 is available.
3480
- // Load x87 register with heap number.
3481
- __ fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
3482
- // Get exponent alone and check for too-big exponent.
3483
- __ mov(input_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
3484
- __ and_(input_reg, HeapNumber::kExponentMask);
3485
- const uint32_t kTooBigExponent =
3486
- (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3487
- __ cmp(Operand(input_reg), Immediate(kTooBigExponent));
3488
- __ j(less, &convert);
3489
- // Pop FPU stack before deoptimizing.
3490
- __ ffree(0);
3491
- __ fincstp();
3492
- DeoptimizeIf(no_condition, instr->environment());
3493
-
3494
- // Reserve space for 64 bit answer.
3495
- __ bind(&convert);
3496
- __ sub(Operand(esp), Immediate(kDoubleSize));
3497
- // Do conversion, which cannot fail because we checked the exponent.
3498
- __ fisttp_d(Operand(esp, 0));
3499
- __ mov(input_reg, Operand(esp, 0)); // Low word of answer is the result.
3500
- __ add(Operand(esp), Immediate(kDoubleSize));
3501
- } else {
3502
- NearLabel deopt;
3503
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
3504
- __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3505
- __ cvttsd2si(input_reg, Operand(xmm0));
3506
- __ cmp(input_reg, 0x80000000u);
3507
- __ j(not_equal, &done);
3508
- // Check if the input was 0x8000000 (kMinInt).
3509
- // If no, then we got an overflow and we deoptimize.
3510
- ExternalReference min_int = ExternalReference::address_of_min_int();
3511
- __ movdbl(xmm_temp, Operand::StaticVariable(min_int));
3512
- __ ucomisd(xmm_temp, xmm0);
3513
- DeoptimizeIf(not_equal, instr->environment());
3514
- DeoptimizeIf(parity_even, instr->environment()); // NaN.
3515
- }
3516
- } else {
3517
- // Deoptimize if we don't have a heap number.
3518
- DeoptimizeIf(not_equal, instr->environment());
3519
-
3520
- XMMRegister xmm_temp = ToDoubleRegister(instr->TempAt(0));
3521
- __ movdbl(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3522
- __ cvttsd2si(input_reg, Operand(xmm0));
3523
- __ cvtsi2sd(xmm_temp, Operand(input_reg));
3524
- __ ucomisd(xmm0, xmm_temp);
3525
- DeoptimizeIf(not_equal, instr->environment());
3526
- DeoptimizeIf(parity_even, instr->environment()); // NaN.
3527
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3528
- __ test(input_reg, Operand(input_reg));
3529
- __ j(not_zero, &done);
3530
- __ movmskpd(input_reg, xmm0);
3531
- __ and_(input_reg, 1);
3532
- DeoptimizeIf(not_zero, instr->environment());
3533
- }
3534
- }
3535
- __ bind(&done);
3536
- }
3537
-
3538
-
3539
- void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
3540
- LOperand* input = instr->InputAt(0);
3541
- ASSERT(input->IsRegister());
3542
- ASSERT(input->Equals(instr->result()));
3543
-
3544
- Register input_reg = ToRegister(input);
3545
-
3546
- DeferredTaggedToI* deferred = new DeferredTaggedToI(this, instr);
3547
-
3548
- // Smi check.
3549
- __ test(input_reg, Immediate(kSmiTagMask));
3550
- __ j(not_zero, deferred->entry());
3551
-
3552
- // Smi to int32 conversion
3553
- __ SmiUntag(input_reg); // Untag smi.
3554
-
3555
- __ bind(deferred->exit());
3556
- }
3557
-
3558
-
3559
- void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
3560
- LOperand* input = instr->InputAt(0);
3561
- ASSERT(input->IsRegister());
3562
- LOperand* result = instr->result();
3563
- ASSERT(result->IsDoubleRegister());
3564
-
3565
- Register input_reg = ToRegister(input);
3566
- XMMRegister result_reg = ToDoubleRegister(result);
3567
-
3568
- EmitNumberUntagD(input_reg, result_reg, instr->environment());
3569
- }
3570
-
3571
-
3572
- void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
3573
- LOperand* input = instr->InputAt(0);
3574
- ASSERT(input->IsDoubleRegister());
3575
- LOperand* result = instr->result();
3576
- ASSERT(result->IsRegister());
3577
-
3578
- XMMRegister input_reg = ToDoubleRegister(input);
3579
- Register result_reg = ToRegister(result);
3580
-
3581
- if (instr->truncating()) {
3582
- // Performs a truncating conversion of a floating point number as used by
3583
- // the JS bitwise operations.
3584
- __ cvttsd2si(result_reg, Operand(input_reg));
3585
- __ cmp(result_reg, 0x80000000u);
3586
- if (CpuFeatures::IsSupported(SSE3)) {
3587
- // This will deoptimize if the exponent of the input in out of range.
3588
- CpuFeatures::Scope scope(SSE3);
3589
- NearLabel convert, done;
3590
- __ j(not_equal, &done);
3591
- __ sub(Operand(esp), Immediate(kDoubleSize));
3592
- __ movdbl(Operand(esp, 0), input_reg);
3593
- // Get exponent alone and check for too-big exponent.
3594
- __ mov(result_reg, Operand(esp, sizeof(int32_t)));
3595
- __ and_(result_reg, HeapNumber::kExponentMask);
3596
- const uint32_t kTooBigExponent =
3597
- (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
3598
- __ cmp(Operand(result_reg), Immediate(kTooBigExponent));
3599
- __ j(less, &convert);
3600
- __ add(Operand(esp), Immediate(kDoubleSize));
3601
- DeoptimizeIf(no_condition, instr->environment());
3602
- __ bind(&convert);
3603
- // Do conversion, which cannot fail because we checked the exponent.
3604
- __ fld_d(Operand(esp, 0));
3605
- __ fisttp_d(Operand(esp, 0));
3606
- __ mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
3607
- __ add(Operand(esp), Immediate(kDoubleSize));
3608
- __ bind(&done);
3609
- } else {
3610
- NearLabel done;
3611
- Register temp_reg = ToRegister(instr->TempAt(0));
3612
- XMMRegister xmm_scratch = xmm0;
3613
-
3614
- // If cvttsd2si succeeded, we're done. Otherwise, we attempt
3615
- // manual conversion.
3616
- __ j(not_equal, &done);
3617
-
3618
- // Get high 32 bits of the input in result_reg and temp_reg.
3619
- __ pshufd(xmm_scratch, input_reg, 1);
3620
- __ movd(Operand(temp_reg), xmm_scratch);
3621
- __ mov(result_reg, temp_reg);
3622
-
3623
- // Prepare negation mask in temp_reg.
3624
- __ sar(temp_reg, kBitsPerInt - 1);
3625
-
3626
- // Extract the exponent from result_reg and subtract adjusted
3627
- // bias from it. The adjustment is selected in a way such that
3628
- // when the difference is zero, the answer is in the low 32 bits
3629
- // of the input, otherwise a shift has to be performed.
3630
- __ shr(result_reg, HeapNumber::kExponentShift);
3631
- __ and_(result_reg,
3632
- HeapNumber::kExponentMask >> HeapNumber::kExponentShift);
3633
- __ sub(Operand(result_reg),
3634
- Immediate(HeapNumber::kExponentBias +
3635
- HeapNumber::kExponentBits +
3636
- HeapNumber::kMantissaBits));
3637
- // Don't handle big (> kMantissaBits + kExponentBits == 63) or
3638
- // special exponents.
3639
- DeoptimizeIf(greater, instr->environment());
3640
-
3641
- // Zero out the sign and the exponent in the input (by shifting
3642
- // it to the left) and restore the implicit mantissa bit,
3643
- // i.e. convert the input to unsigned int64 shifted left by
3644
- // kExponentBits.
3645
- ExternalReference minus_zero = ExternalReference::address_of_minus_zero();
3646
- // Minus zero has the most significant bit set and the other
3647
- // bits cleared.
3648
- __ movdbl(xmm_scratch, Operand::StaticVariable(minus_zero));
3649
- __ psllq(input_reg, HeapNumber::kExponentBits);
3650
- __ por(input_reg, xmm_scratch);
3651
-
3652
- // Get the amount to shift the input right in xmm_scratch.
3653
- __ neg(result_reg);
3654
- __ movd(xmm_scratch, Operand(result_reg));
3655
-
3656
- // Shift the input right and extract low 32 bits.
3657
- __ psrlq(input_reg, xmm_scratch);
3658
- __ movd(Operand(result_reg), input_reg);
3659
-
3660
- // Use the prepared mask in temp_reg to negate the result if necessary.
3661
- __ xor_(result_reg, Operand(temp_reg));
3662
- __ sub(result_reg, Operand(temp_reg));
3663
- __ bind(&done);
3664
- }
3665
- } else {
3666
- NearLabel done;
3667
- __ cvttsd2si(result_reg, Operand(input_reg));
3668
- __ cvtsi2sd(xmm0, Operand(result_reg));
3669
- __ ucomisd(xmm0, input_reg);
3670
- DeoptimizeIf(not_equal, instr->environment());
3671
- DeoptimizeIf(parity_even, instr->environment()); // NaN.
3672
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3673
- // The integer converted back is equal to the original. We
3674
- // only have to test if we got -0 as an input.
3675
- __ test(result_reg, Operand(result_reg));
3676
- __ j(not_zero, &done);
3677
- __ movmskpd(result_reg, input_reg);
3678
- // Bit 0 contains the sign of the double in input_reg.
3679
- // If input was positive, we are ok and return 0, otherwise
3680
- // deoptimize.
3681
- __ and_(result_reg, 1);
3682
- DeoptimizeIf(not_zero, instr->environment());
3683
- }
3684
- __ bind(&done);
3685
- }
3686
- }
3687
-
3688
-
3689
- void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
3690
- LOperand* input = instr->InputAt(0);
3691
- __ test(ToRegister(input), Immediate(kSmiTagMask));
3692
- DeoptimizeIf(not_zero, instr->environment());
3693
- }
3694
-
3695
-
3696
- void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
3697
- LOperand* input = instr->InputAt(0);
3698
- __ test(ToRegister(input), Immediate(kSmiTagMask));
3699
- DeoptimizeIf(zero, instr->environment());
3700
- }
3701
-
3702
-
3703
- void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
3704
- Register input = ToRegister(instr->InputAt(0));
3705
- Register temp = ToRegister(instr->TempAt(0));
3706
- InstanceType first = instr->hydrogen()->first();
3707
- InstanceType last = instr->hydrogen()->last();
3708
-
3709
- __ mov(temp, FieldOperand(input, HeapObject::kMapOffset));
3710
-
3711
- // If there is only one type in the interval check for equality.
3712
- if (first == last) {
3713
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3714
- static_cast<int8_t>(first));
3715
- DeoptimizeIf(not_equal, instr->environment());
3716
- } else if (first == FIRST_STRING_TYPE && last == LAST_STRING_TYPE) {
3717
- // String has a dedicated bit in instance type.
3718
- __ test_b(FieldOperand(temp, Map::kInstanceTypeOffset), kIsNotStringMask);
3719
- DeoptimizeIf(not_zero, instr->environment());
3720
- } else {
3721
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3722
- static_cast<int8_t>(first));
3723
- DeoptimizeIf(below, instr->environment());
3724
- // Omit check for the last type.
3725
- if (last != LAST_TYPE) {
3726
- __ cmpb(FieldOperand(temp, Map::kInstanceTypeOffset),
3727
- static_cast<int8_t>(last));
3728
- DeoptimizeIf(above, instr->environment());
3729
- }
3730
- }
3731
- }
3732
-
3733
-
3734
- void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
3735
- ASSERT(instr->InputAt(0)->IsRegister());
3736
- Register reg = ToRegister(instr->InputAt(0));
3737
- __ cmp(reg, instr->hydrogen()->target());
3738
- DeoptimizeIf(not_equal, instr->environment());
3739
- }
3740
-
3741
-
3742
- void LCodeGen::DoCheckMap(LCheckMap* instr) {
3743
- LOperand* input = instr->InputAt(0);
3744
- ASSERT(input->IsRegister());
3745
- Register reg = ToRegister(input);
3746
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3747
- instr->hydrogen()->map());
3748
- DeoptimizeIf(not_equal, instr->environment());
3749
- }
3750
-
3751
-
3752
- void LCodeGen::LoadHeapObject(Register result, Handle<HeapObject> object) {
3753
- if (isolate()->heap()->InNewSpace(*object)) {
3754
- Handle<JSGlobalPropertyCell> cell =
3755
- isolate()->factory()->NewJSGlobalPropertyCell(object);
3756
- __ mov(result, Operand::Cell(cell));
3757
- } else {
3758
- __ mov(result, object);
3759
- }
3760
- }
3761
-
3762
-
3763
- void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
3764
- Register reg = ToRegister(instr->TempAt(0));
3765
-
3766
- Handle<JSObject> holder = instr->holder();
3767
- Handle<JSObject> current_prototype = instr->prototype();
3768
-
3769
- // Load prototype object.
3770
- LoadHeapObject(reg, current_prototype);
3771
-
3772
- // Check prototype maps up to the holder.
3773
- while (!current_prototype.is_identical_to(holder)) {
3774
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3775
- Handle<Map>(current_prototype->map()));
3776
- DeoptimizeIf(not_equal, instr->environment());
3777
- current_prototype =
3778
- Handle<JSObject>(JSObject::cast(current_prototype->GetPrototype()));
3779
- // Load next prototype object.
3780
- LoadHeapObject(reg, current_prototype);
3781
- }
3782
-
3783
- // Check the holder map.
3784
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
3785
- Handle<Map>(current_prototype->map()));
3786
- DeoptimizeIf(not_equal, instr->environment());
3787
- }
3788
-
3789
-
3790
- void LCodeGen::DoArrayLiteral(LArrayLiteral* instr) {
3791
- // Setup the parameters to the stub/runtime call.
3792
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3793
- __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3794
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3795
- __ push(Immediate(instr->hydrogen()->constant_elements()));
3796
-
3797
- // Pick the right runtime function or stub to call.
3798
- int length = instr->hydrogen()->length();
3799
- if (instr->hydrogen()->IsCopyOnWrite()) {
3800
- ASSERT(instr->hydrogen()->depth() == 1);
3801
- FastCloneShallowArrayStub::Mode mode =
3802
- FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS;
3803
- FastCloneShallowArrayStub stub(mode, length);
3804
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3805
- } else if (instr->hydrogen()->depth() > 1) {
3806
- CallRuntime(Runtime::kCreateArrayLiteral, 3, instr, false);
3807
- } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
3808
- CallRuntime(Runtime::kCreateArrayLiteralShallow, 3, instr, false);
3809
- } else {
3810
- FastCloneShallowArrayStub::Mode mode =
3811
- FastCloneShallowArrayStub::CLONE_ELEMENTS;
3812
- FastCloneShallowArrayStub stub(mode, length);
3813
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3814
- }
3815
- }
3816
-
3817
-
3818
- void LCodeGen::DoObjectLiteral(LObjectLiteral* instr) {
3819
- ASSERT(ToRegister(instr->context()).is(esi));
3820
- // Setup the parameters to the stub/runtime call.
3821
- __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3822
- __ push(FieldOperand(eax, JSFunction::kLiteralsOffset));
3823
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3824
- __ push(Immediate(instr->hydrogen()->constant_properties()));
3825
- int flags = instr->hydrogen()->fast_elements()
3826
- ? ObjectLiteral::kFastElements
3827
- : ObjectLiteral::kNoFlags;
3828
- flags |= instr->hydrogen()->has_function()
3829
- ? ObjectLiteral::kHasFunction
3830
- : ObjectLiteral::kNoFlags;
3831
- __ push(Immediate(Smi::FromInt(flags)));
3832
-
3833
- // Pick the right runtime function to call.
3834
- if (instr->hydrogen()->depth() > 1) {
3835
- CallRuntime(Runtime::kCreateObjectLiteral, 4, instr);
3836
- } else {
3837
- CallRuntime(Runtime::kCreateObjectLiteralShallow, 4, instr);
3838
- }
3839
- }
3840
-
3841
-
3842
- void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3843
- ASSERT(ToRegister(instr->InputAt(0)).is(eax));
3844
- __ push(eax);
3845
- CallRuntime(Runtime::kToFastProperties, 1, instr);
3846
- }
3847
-
3848
-
3849
- void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3850
- NearLabel materialized;
3851
- // Registers will be used as follows:
3852
- // edi = JS function.
3853
- // ecx = literals array.
3854
- // ebx = regexp literal.
3855
- // eax = regexp literal clone.
3856
- __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3857
- __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
3858
- int literal_offset = FixedArray::kHeaderSize +
3859
- instr->hydrogen()->literal_index() * kPointerSize;
3860
- __ mov(ebx, FieldOperand(ecx, literal_offset));
3861
- __ cmp(ebx, factory()->undefined_value());
3862
- __ j(not_equal, &materialized);
3863
-
3864
- // Create regexp literal using runtime function
3865
- // Result will be in eax.
3866
- __ push(ecx);
3867
- __ push(Immediate(Smi::FromInt(instr->hydrogen()->literal_index())));
3868
- __ push(Immediate(instr->hydrogen()->pattern()));
3869
- __ push(Immediate(instr->hydrogen()->flags()));
3870
- CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr, false);
3871
- __ mov(ebx, eax);
3872
-
3873
- __ bind(&materialized);
3874
- int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
3875
- Label allocated, runtime_allocate;
3876
- __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
3877
- __ jmp(&allocated);
3878
-
3879
- __ bind(&runtime_allocate);
3880
- __ push(ebx);
3881
- __ push(Immediate(Smi::FromInt(size)));
3882
- CallRuntime(Runtime::kAllocateInNewSpace, 1, instr, false);
3883
- __ pop(ebx);
3884
-
3885
- __ bind(&allocated);
3886
- // Copy the content into the newly allocated memory.
3887
- // (Unroll copy loop once for better throughput).
3888
- for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
3889
- __ mov(edx, FieldOperand(ebx, i));
3890
- __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
3891
- __ mov(FieldOperand(eax, i), edx);
3892
- __ mov(FieldOperand(eax, i + kPointerSize), ecx);
3893
- }
3894
- if ((size % (2 * kPointerSize)) != 0) {
3895
- __ mov(edx, FieldOperand(ebx, size - kPointerSize));
3896
- __ mov(FieldOperand(eax, size - kPointerSize), edx);
3897
- }
3898
- }
3899
-
3900
-
3901
- void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
3902
- // Use the fast case closure allocation code that allocates in new
3903
- // space for nested functions that don't need literals cloning.
3904
- Handle<SharedFunctionInfo> shared_info = instr->shared_info();
3905
- bool pretenure = instr->hydrogen()->pretenure();
3906
- if (!pretenure && shared_info->num_literals() == 0) {
3907
- FastNewClosureStub stub(
3908
- shared_info->strict_mode() ? kStrictMode : kNonStrictMode);
3909
- __ push(Immediate(shared_info));
3910
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
3911
- } else {
3912
- __ push(Operand(ebp, StandardFrameConstants::kContextOffset));
3913
- __ push(Immediate(shared_info));
3914
- __ push(Immediate(pretenure
3915
- ? factory()->true_value()
3916
- : factory()->false_value()));
3917
- CallRuntime(Runtime::kNewClosure, 3, instr, false);
3918
- }
3919
- }
3920
-
3921
-
3922
- void LCodeGen::DoTypeof(LTypeof* instr) {
3923
- LOperand* input = instr->InputAt(0);
3924
- if (input->IsConstantOperand()) {
3925
- __ push(ToImmediate(input));
3926
- } else {
3927
- __ push(ToOperand(input));
3928
- }
3929
- CallRuntime(Runtime::kTypeof, 1, instr, false);
3930
- }
3931
-
3932
-
3933
- void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
3934
- Register input = ToRegister(instr->InputAt(0));
3935
- Register result = ToRegister(instr->result());
3936
- Label true_label;
3937
- Label false_label;
3938
- NearLabel done;
3939
-
3940
- Condition final_branch_condition = EmitTypeofIs(&true_label,
3941
- &false_label,
3942
- input,
3943
- instr->type_literal());
3944
- __ j(final_branch_condition, &true_label);
3945
- __ bind(&false_label);
3946
- __ mov(result, factory()->false_value());
3947
- __ jmp(&done);
3948
-
3949
- __ bind(&true_label);
3950
- __ mov(result, factory()->true_value());
3951
-
3952
- __ bind(&done);
3953
- }
3954
-
3955
-
3956
- void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
3957
- Register input = ToRegister(instr->InputAt(0));
3958
- int true_block = chunk_->LookupDestination(instr->true_block_id());
3959
- int false_block = chunk_->LookupDestination(instr->false_block_id());
3960
- Label* true_label = chunk_->GetAssemblyLabel(true_block);
3961
- Label* false_label = chunk_->GetAssemblyLabel(false_block);
3962
-
3963
- Condition final_branch_condition = EmitTypeofIs(true_label,
3964
- false_label,
3965
- input,
3966
- instr->type_literal());
3967
-
3968
- EmitBranch(true_block, false_block, final_branch_condition);
3969
- }
3970
-
3971
-
3972
- Condition LCodeGen::EmitTypeofIs(Label* true_label,
3973
- Label* false_label,
3974
- Register input,
3975
- Handle<String> type_name) {
3976
- Condition final_branch_condition = no_condition;
3977
- if (type_name->Equals(heap()->number_symbol())) {
3978
- __ JumpIfSmi(input, true_label);
3979
- __ cmp(FieldOperand(input, HeapObject::kMapOffset),
3980
- factory()->heap_number_map());
3981
- final_branch_condition = equal;
3982
-
3983
- } else if (type_name->Equals(heap()->string_symbol())) {
3984
- __ JumpIfSmi(input, false_label);
3985
- __ CmpObjectType(input, FIRST_NONSTRING_TYPE, input);
3986
- __ j(above_equal, false_label);
3987
- __ test_b(FieldOperand(input, Map::kBitFieldOffset),
3988
- 1 << Map::kIsUndetectable);
3989
- final_branch_condition = zero;
3990
-
3991
- } else if (type_name->Equals(heap()->boolean_symbol())) {
3992
- __ cmp(input, factory()->true_value());
3993
- __ j(equal, true_label);
3994
- __ cmp(input, factory()->false_value());
3995
- final_branch_condition = equal;
3996
-
3997
- } else if (type_name->Equals(heap()->undefined_symbol())) {
3998
- __ cmp(input, factory()->undefined_value());
3999
- __ j(equal, true_label);
4000
- __ JumpIfSmi(input, false_label);
4001
- // Check for undetectable objects => true.
4002
- __ mov(input, FieldOperand(input, HeapObject::kMapOffset));
4003
- __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4004
- 1 << Map::kIsUndetectable);
4005
- final_branch_condition = not_zero;
4006
-
4007
- } else if (type_name->Equals(heap()->function_symbol())) {
4008
- __ JumpIfSmi(input, false_label);
4009
- __ CmpObjectType(input, JS_FUNCTION_TYPE, input);
4010
- __ j(equal, true_label);
4011
- // Regular expressions => 'function' (they are callable).
4012
- __ CmpInstanceType(input, JS_REGEXP_TYPE);
4013
- final_branch_condition = equal;
4014
-
4015
- } else if (type_name->Equals(heap()->object_symbol())) {
4016
- __ JumpIfSmi(input, false_label);
4017
- __ cmp(input, factory()->null_value());
4018
- __ j(equal, true_label);
4019
- // Regular expressions => 'function', not 'object'.
4020
- __ CmpObjectType(input, FIRST_JS_OBJECT_TYPE, input);
4021
- __ j(below, false_label);
4022
- __ CmpInstanceType(input, FIRST_FUNCTION_CLASS_TYPE);
4023
- __ j(above_equal, false_label);
4024
- // Check for undetectable objects => false.
4025
- __ test_b(FieldOperand(input, Map::kBitFieldOffset),
4026
- 1 << Map::kIsUndetectable);
4027
- final_branch_condition = zero;
4028
-
4029
- } else {
4030
- final_branch_condition = not_equal;
4031
- __ jmp(false_label);
4032
- // A dead branch instruction will be generated after this point.
4033
- }
4034
-
4035
- return final_branch_condition;
4036
- }
4037
-
4038
-
4039
- void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
4040
- Register result = ToRegister(instr->result());
4041
- NearLabel true_label;
4042
- NearLabel false_label;
4043
- NearLabel done;
4044
-
4045
- EmitIsConstructCall(result);
4046
- __ j(equal, &true_label);
4047
-
4048
- __ mov(result, factory()->false_value());
4049
- __ jmp(&done);
4050
-
4051
- __ bind(&true_label);
4052
- __ mov(result, factory()->true_value());
4053
-
4054
- __ bind(&done);
4055
- }
4056
-
4057
-
4058
- void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4059
- Register temp = ToRegister(instr->TempAt(0));
4060
- int true_block = chunk_->LookupDestination(instr->true_block_id());
4061
- int false_block = chunk_->LookupDestination(instr->false_block_id());
4062
-
4063
- EmitIsConstructCall(temp);
4064
- EmitBranch(true_block, false_block, equal);
4065
- }
4066
-
4067
-
4068
- void LCodeGen::EmitIsConstructCall(Register temp) {
4069
- // Get the frame pointer for the calling frame.
4070
- __ mov(temp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4071
-
4072
- // Skip the arguments adaptor frame if it exists.
4073
- NearLabel check_frame_marker;
4074
- __ cmp(Operand(temp, StandardFrameConstants::kContextOffset),
4075
- Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4076
- __ j(not_equal, &check_frame_marker);
4077
- __ mov(temp, Operand(temp, StandardFrameConstants::kCallerFPOffset));
4078
-
4079
- // Check the marker in the calling frame.
4080
- __ bind(&check_frame_marker);
4081
- __ cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
4082
- Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
4083
- }
4084
-
4085
-
4086
- void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
4087
- // No code for lazy bailout instruction. Used to capture environment after a
4088
- // call for populating the safepoint data with deoptimization data.
4089
- }
4090
-
4091
-
4092
- void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
4093
- DeoptimizeIf(no_condition, instr->environment());
4094
- }
4095
-
4096
-
4097
- void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
4098
- LOperand* obj = instr->object();
4099
- LOperand* key = instr->key();
4100
- __ push(ToOperand(obj));
4101
- if (key->IsConstantOperand()) {
4102
- __ push(ToImmediate(key));
4103
- } else {
4104
- __ push(ToOperand(key));
4105
- }
4106
- ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
4107
- LPointerMap* pointers = instr->pointer_map();
4108
- LEnvironment* env = instr->deoptimization_environment();
4109
- RecordPosition(pointers->position());
4110
- RegisterEnvironmentForDeoptimization(env);
4111
- // Create safepoint generator that will also ensure enough space in the
4112
- // reloc info for patching in deoptimization (since this is invoking a
4113
- // builtin)
4114
- SafepointGenerator safepoint_generator(this,
4115
- pointers,
4116
- env->deoptimization_index());
4117
- __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4118
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
4119
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
4120
- }
4121
-
4122
-
4123
- void LCodeGen::DoStackCheck(LStackCheck* instr) {
4124
- // Perform stack overflow check.
4125
- NearLabel done;
4126
- ExternalReference stack_limit =
4127
- ExternalReference::address_of_stack_limit(isolate());
4128
- __ cmp(esp, Operand::StaticVariable(stack_limit));
4129
- __ j(above_equal, &done);
4130
-
4131
- StackCheckStub stub;
4132
- CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr, false);
4133
- __ bind(&done);
4134
- }
4135
-
4136
-
4137
- void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4138
- // This is a pseudo-instruction that ensures that the environment here is
4139
- // properly registered for deoptimization and records the assembler's PC
4140
- // offset.
4141
- LEnvironment* environment = instr->environment();
4142
- environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4143
- instr->SpilledDoubleRegisterArray());
4144
-
4145
- // If the environment were already registered, we would have no way of
4146
- // backpatching it with the spill slot operands.
4147
- ASSERT(!environment->HasBeenRegistered());
4148
- RegisterEnvironmentForDeoptimization(environment);
4149
- ASSERT(osr_pc_offset_ == -1);
4150
- osr_pc_offset_ = masm()->pc_offset();
4151
- }
4152
-
4153
-
4154
- #undef __
4155
-
4156
- } } // namespace v8::internal
4157
-
4158
- #endif // V8_TARGET_ARCH_IA32