therubyracer 0.5.0-x86-linux → 0.11.0beta5-x86-linux

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of therubyracer might be problematic. Click here for more details.

Files changed (721) hide show
  1. data/.gitignore +23 -11
  2. data/.travis.yml +10 -0
  3. data/Changelog.md +242 -0
  4. data/Gemfile +16 -0
  5. data/README.md +185 -0
  6. data/Rakefile +42 -51
  7. data/benchmarks.rb +217 -0
  8. data/ext/v8/accessor.cc +181 -0
  9. data/ext/v8/array.cc +26 -0
  10. data/ext/v8/backref.cc +56 -0
  11. data/ext/v8/build.rb +52 -0
  12. data/ext/v8/constants.cc +34 -0
  13. data/ext/v8/constraints.cc +52 -0
  14. data/ext/v8/context.cc +130 -0
  15. data/ext/v8/date.cc +18 -0
  16. data/ext/v8/exception.cc +38 -0
  17. data/ext/v8/extconf.rb +16 -29
  18. data/ext/v8/external.cc +43 -0
  19. data/ext/v8/function.cc +58 -0
  20. data/ext/v8/gc.cc +43 -0
  21. data/ext/v8/handles.cc +34 -0
  22. data/ext/v8/heap.cc +31 -0
  23. data/ext/v8/init.cc +39 -0
  24. data/ext/v8/init.so +0 -0
  25. data/ext/v8/invocation.cc +86 -0
  26. data/ext/v8/locker.cc +77 -0
  27. data/ext/v8/message.cc +51 -0
  28. data/ext/v8/object.cc +334 -0
  29. data/ext/v8/primitive.cc +8 -0
  30. data/ext/v8/rr.cc +83 -0
  31. data/ext/v8/rr.h +883 -0
  32. data/ext/v8/script.cc +80 -0
  33. data/ext/v8/signature.cc +18 -0
  34. data/ext/v8/stack.cc +75 -0
  35. data/ext/v8/string.cc +47 -0
  36. data/ext/v8/template.cc +175 -0
  37. data/ext/v8/trycatch.cc +86 -0
  38. data/ext/v8/v8.cc +87 -0
  39. data/ext/v8/value.cc +239 -0
  40. data/lib/v8.rb +30 -9
  41. data/lib/v8/access.rb +5 -0
  42. data/lib/v8/access/indices.rb +40 -0
  43. data/lib/v8/access/invocation.rb +47 -0
  44. data/lib/v8/access/names.rb +65 -0
  45. data/lib/v8/array.rb +26 -0
  46. data/lib/v8/context.rb +217 -75
  47. data/lib/v8/conversion.rb +35 -0
  48. data/lib/v8/conversion/array.rb +11 -0
  49. data/lib/v8/conversion/class.rb +120 -0
  50. data/lib/v8/conversion/code.rb +38 -0
  51. data/lib/v8/conversion/fundamental.rb +11 -0
  52. data/lib/v8/conversion/hash.rb +11 -0
  53. data/lib/v8/conversion/indentity.rb +31 -0
  54. data/lib/v8/conversion/method.rb +26 -0
  55. data/lib/v8/conversion/object.rb +28 -0
  56. data/lib/v8/conversion/primitive.rb +7 -0
  57. data/lib/v8/conversion/proc.rb +5 -0
  58. data/lib/v8/conversion/reference.rb +16 -0
  59. data/lib/v8/conversion/string.rb +12 -0
  60. data/lib/v8/conversion/symbol.rb +7 -0
  61. data/lib/v8/conversion/time.rb +13 -0
  62. data/lib/v8/error.rb +25 -0
  63. data/lib/v8/error/protect.rb +20 -0
  64. data/lib/v8/error/try.rb +15 -0
  65. data/lib/v8/function.rb +28 -0
  66. data/lib/v8/object.rb +69 -28
  67. data/lib/v8/util/weakcell.rb +29 -0
  68. data/lib/v8/version.rb +3 -0
  69. data/spec/c/array_spec.rb +17 -0
  70. data/spec/c/constants_spec.rb +20 -0
  71. data/spec/c/exception_spec.rb +26 -0
  72. data/spec/c/external_spec.rb +9 -0
  73. data/spec/c/function_spec.rb +46 -0
  74. data/spec/c/handles_spec.rb +35 -0
  75. data/spec/c/locker_spec.rb +38 -0
  76. data/spec/c/object_spec.rb +46 -0
  77. data/spec/c/script_spec.rb +28 -0
  78. data/spec/c/string_spec.rb +16 -0
  79. data/spec/c/template_spec.rb +30 -0
  80. data/spec/c/trycatch_spec.rb +51 -0
  81. data/spec/mem/blunt_spec.rb +42 -0
  82. data/spec/redjs_spec.rb +10 -0
  83. data/spec/spec_helper.rb +43 -12
  84. data/spec/threading_spec.rb +52 -0
  85. data/spec/v8/context_spec.rb +19 -0
  86. data/spec/v8/conversion_spec.rb +9 -0
  87. data/spec/v8/error_spec.rb +21 -0
  88. data/spec/v8/function_spec.rb +9 -0
  89. data/spec/v8/object_spec.rb +15 -0
  90. data/thefrontside.png +0 -0
  91. data/therubyracer.gemspec +15 -676
  92. metadata +146 -680
  93. data/.gitmodules +0 -3
  94. data/Doxyfile +0 -1514
  95. data/History.txt +0 -51
  96. data/README.rdoc +0 -158
  97. data/docs/data_conversion.txt +0 -18
  98. data/ext/v8/callbacks.cpp +0 -160
  99. data/ext/v8/callbacks.h +0 -14
  100. data/ext/v8/convert_ruby.cpp +0 -8
  101. data/ext/v8/convert_ruby.h +0 -99
  102. data/ext/v8/convert_string.cpp +0 -10
  103. data/ext/v8/convert_string.h +0 -73
  104. data/ext/v8/convert_v8.cpp +0 -9
  105. data/ext/v8/convert_v8.h +0 -121
  106. data/ext/v8/converters.cpp +0 -83
  107. data/ext/v8/converters.h +0 -23
  108. data/ext/v8/upstream/2.0.6/.gitignore +0 -26
  109. data/ext/v8/upstream/2.0.6/AUTHORS +0 -23
  110. data/ext/v8/upstream/2.0.6/ChangeLog +0 -1479
  111. data/ext/v8/upstream/2.0.6/LICENSE +0 -55
  112. data/ext/v8/upstream/2.0.6/SConstruct +0 -1028
  113. data/ext/v8/upstream/2.0.6/include/v8-debug.h +0 -275
  114. data/ext/v8/upstream/2.0.6/include/v8.h +0 -3236
  115. data/ext/v8/upstream/2.0.6/src/SConscript +0 -283
  116. data/ext/v8/upstream/2.0.6/src/accessors.cc +0 -695
  117. data/ext/v8/upstream/2.0.6/src/accessors.h +0 -114
  118. data/ext/v8/upstream/2.0.6/src/allocation.cc +0 -198
  119. data/ext/v8/upstream/2.0.6/src/allocation.h +0 -169
  120. data/ext/v8/upstream/2.0.6/src/api.cc +0 -3831
  121. data/ext/v8/upstream/2.0.6/src/api.h +0 -479
  122. data/ext/v8/upstream/2.0.6/src/apinatives.js +0 -110
  123. data/ext/v8/upstream/2.0.6/src/apiutils.h +0 -69
  124. data/ext/v8/upstream/2.0.6/src/arguments.h +0 -97
  125. data/ext/v8/upstream/2.0.6/src/arm/assembler-arm-inl.h +0 -277
  126. data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.cc +0 -1821
  127. data/ext/v8/upstream/2.0.6/src/arm/assembler-arm.h +0 -1027
  128. data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2-inl.h +0 -267
  129. data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.cc +0 -1821
  130. data/ext/v8/upstream/2.0.6/src/arm/assembler-thumb2.h +0 -1027
  131. data/ext/v8/upstream/2.0.6/src/arm/builtins-arm.cc +0 -1271
  132. data/ext/v8/upstream/2.0.6/src/arm/codegen-arm-inl.h +0 -74
  133. data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.cc +0 -6682
  134. data/ext/v8/upstream/2.0.6/src/arm/codegen-arm.h +0 -535
  135. data/ext/v8/upstream/2.0.6/src/arm/constants-arm.cc +0 -112
  136. data/ext/v8/upstream/2.0.6/src/arm/constants-arm.h +0 -347
  137. data/ext/v8/upstream/2.0.6/src/arm/cpu-arm.cc +0 -132
  138. data/ext/v8/upstream/2.0.6/src/arm/debug-arm.cc +0 -213
  139. data/ext/v8/upstream/2.0.6/src/arm/disasm-arm.cc +0 -1166
  140. data/ext/v8/upstream/2.0.6/src/arm/fast-codegen-arm.cc +0 -1698
  141. data/ext/v8/upstream/2.0.6/src/arm/frames-arm.cc +0 -123
  142. data/ext/v8/upstream/2.0.6/src/arm/frames-arm.h +0 -162
  143. data/ext/v8/upstream/2.0.6/src/arm/ic-arm.cc +0 -849
  144. data/ext/v8/upstream/2.0.6/src/arm/jump-target-arm.cc +0 -238
  145. data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.cc +0 -1259
  146. data/ext/v8/upstream/2.0.6/src/arm/macro-assembler-arm.h +0 -423
  147. data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.cc +0 -1266
  148. data/ext/v8/upstream/2.0.6/src/arm/regexp-macro-assembler-arm.h +0 -282
  149. data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm-inl.h +0 -103
  150. data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.cc +0 -59
  151. data/ext/v8/upstream/2.0.6/src/arm/register-allocator-arm.h +0 -43
  152. data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.cc +0 -2264
  153. data/ext/v8/upstream/2.0.6/src/arm/simulator-arm.h +0 -306
  154. data/ext/v8/upstream/2.0.6/src/arm/stub-cache-arm.cc +0 -1516
  155. data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.cc +0 -412
  156. data/ext/v8/upstream/2.0.6/src/arm/virtual-frame-arm.h +0 -532
  157. data/ext/v8/upstream/2.0.6/src/array.js +0 -1154
  158. data/ext/v8/upstream/2.0.6/src/assembler.cc +0 -772
  159. data/ext/v8/upstream/2.0.6/src/assembler.h +0 -525
  160. data/ext/v8/upstream/2.0.6/src/ast.cc +0 -512
  161. data/ext/v8/upstream/2.0.6/src/ast.h +0 -1820
  162. data/ext/v8/upstream/2.0.6/src/bootstrapper.cc +0 -1680
  163. data/ext/v8/upstream/2.0.6/src/bootstrapper.h +0 -103
  164. data/ext/v8/upstream/2.0.6/src/builtins.cc +0 -851
  165. data/ext/v8/upstream/2.0.6/src/builtins.h +0 -245
  166. data/ext/v8/upstream/2.0.6/src/bytecodes-irregexp.h +0 -104
  167. data/ext/v8/upstream/2.0.6/src/char-predicates-inl.h +0 -86
  168. data/ext/v8/upstream/2.0.6/src/char-predicates.h +0 -65
  169. data/ext/v8/upstream/2.0.6/src/checks.cc +0 -100
  170. data/ext/v8/upstream/2.0.6/src/checks.h +0 -284
  171. data/ext/v8/upstream/2.0.6/src/code-stubs.cc +0 -164
  172. data/ext/v8/upstream/2.0.6/src/code-stubs.h +0 -164
  173. data/ext/v8/upstream/2.0.6/src/code.h +0 -68
  174. data/ext/v8/upstream/2.0.6/src/codegen-inl.h +0 -88
  175. data/ext/v8/upstream/2.0.6/src/codegen.cc +0 -504
  176. data/ext/v8/upstream/2.0.6/src/codegen.h +0 -522
  177. data/ext/v8/upstream/2.0.6/src/compilation-cache.cc +0 -490
  178. data/ext/v8/upstream/2.0.6/src/compilation-cache.h +0 -98
  179. data/ext/v8/upstream/2.0.6/src/compiler.cc +0 -1132
  180. data/ext/v8/upstream/2.0.6/src/compiler.h +0 -107
  181. data/ext/v8/upstream/2.0.6/src/contexts.cc +0 -256
  182. data/ext/v8/upstream/2.0.6/src/contexts.h +0 -345
  183. data/ext/v8/upstream/2.0.6/src/conversions-inl.h +0 -95
  184. data/ext/v8/upstream/2.0.6/src/conversions.cc +0 -709
  185. data/ext/v8/upstream/2.0.6/src/conversions.h +0 -118
  186. data/ext/v8/upstream/2.0.6/src/counters.cc +0 -78
  187. data/ext/v8/upstream/2.0.6/src/counters.h +0 -239
  188. data/ext/v8/upstream/2.0.6/src/cpu.h +0 -65
  189. data/ext/v8/upstream/2.0.6/src/d8-debug.cc +0 -345
  190. data/ext/v8/upstream/2.0.6/src/d8-debug.h +0 -155
  191. data/ext/v8/upstream/2.0.6/src/d8-posix.cc +0 -675
  192. data/ext/v8/upstream/2.0.6/src/d8-readline.cc +0 -128
  193. data/ext/v8/upstream/2.0.6/src/d8-windows.cc +0 -42
  194. data/ext/v8/upstream/2.0.6/src/d8.cc +0 -776
  195. data/ext/v8/upstream/2.0.6/src/d8.h +0 -225
  196. data/ext/v8/upstream/2.0.6/src/d8.js +0 -1625
  197. data/ext/v8/upstream/2.0.6/src/date-delay.js +0 -1138
  198. data/ext/v8/upstream/2.0.6/src/dateparser-inl.h +0 -114
  199. data/ext/v8/upstream/2.0.6/src/dateparser.cc +0 -186
  200. data/ext/v8/upstream/2.0.6/src/dateparser.h +0 -240
  201. data/ext/v8/upstream/2.0.6/src/debug-agent.cc +0 -425
  202. data/ext/v8/upstream/2.0.6/src/debug-agent.h +0 -129
  203. data/ext/v8/upstream/2.0.6/src/debug-delay.js +0 -2073
  204. data/ext/v8/upstream/2.0.6/src/debug.cc +0 -2751
  205. data/ext/v8/upstream/2.0.6/src/debug.h +0 -866
  206. data/ext/v8/upstream/2.0.6/src/disasm.h +0 -77
  207. data/ext/v8/upstream/2.0.6/src/disassembler.cc +0 -318
  208. data/ext/v8/upstream/2.0.6/src/disassembler.h +0 -56
  209. data/ext/v8/upstream/2.0.6/src/dtoa-config.c +0 -91
  210. data/ext/v8/upstream/2.0.6/src/execution.cc +0 -701
  211. data/ext/v8/upstream/2.0.6/src/execution.h +0 -312
  212. data/ext/v8/upstream/2.0.6/src/factory.cc +0 -957
  213. data/ext/v8/upstream/2.0.6/src/factory.h +0 -393
  214. data/ext/v8/upstream/2.0.6/src/fast-codegen.cc +0 -725
  215. data/ext/v8/upstream/2.0.6/src/fast-codegen.h +0 -371
  216. data/ext/v8/upstream/2.0.6/src/flag-definitions.h +0 -426
  217. data/ext/v8/upstream/2.0.6/src/flags.cc +0 -555
  218. data/ext/v8/upstream/2.0.6/src/flags.h +0 -81
  219. data/ext/v8/upstream/2.0.6/src/frame-element.cc +0 -45
  220. data/ext/v8/upstream/2.0.6/src/frame-element.h +0 -235
  221. data/ext/v8/upstream/2.0.6/src/frames-inl.h +0 -215
  222. data/ext/v8/upstream/2.0.6/src/frames.cc +0 -749
  223. data/ext/v8/upstream/2.0.6/src/frames.h +0 -659
  224. data/ext/v8/upstream/2.0.6/src/func-name-inferrer.cc +0 -76
  225. data/ext/v8/upstream/2.0.6/src/func-name-inferrer.h +0 -135
  226. data/ext/v8/upstream/2.0.6/src/global-handles.cc +0 -516
  227. data/ext/v8/upstream/2.0.6/src/global-handles.h +0 -180
  228. data/ext/v8/upstream/2.0.6/src/globals.h +0 -608
  229. data/ext/v8/upstream/2.0.6/src/handles-inl.h +0 -76
  230. data/ext/v8/upstream/2.0.6/src/handles.cc +0 -811
  231. data/ext/v8/upstream/2.0.6/src/handles.h +0 -367
  232. data/ext/v8/upstream/2.0.6/src/hashmap.cc +0 -226
  233. data/ext/v8/upstream/2.0.6/src/hashmap.h +0 -120
  234. data/ext/v8/upstream/2.0.6/src/heap-inl.h +0 -407
  235. data/ext/v8/upstream/2.0.6/src/heap-profiler.cc +0 -695
  236. data/ext/v8/upstream/2.0.6/src/heap-profiler.h +0 -277
  237. data/ext/v8/upstream/2.0.6/src/heap.cc +0 -4204
  238. data/ext/v8/upstream/2.0.6/src/heap.h +0 -1704
  239. data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32-inl.h +0 -325
  240. data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.cc +0 -2375
  241. data/ext/v8/upstream/2.0.6/src/ia32/assembler-ia32.h +0 -914
  242. data/ext/v8/upstream/2.0.6/src/ia32/builtins-ia32.cc +0 -1222
  243. data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32-inl.h +0 -46
  244. data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.cc +0 -9770
  245. data/ext/v8/upstream/2.0.6/src/ia32/codegen-ia32.h +0 -834
  246. data/ext/v8/upstream/2.0.6/src/ia32/cpu-ia32.cc +0 -79
  247. data/ext/v8/upstream/2.0.6/src/ia32/debug-ia32.cc +0 -208
  248. data/ext/v8/upstream/2.0.6/src/ia32/disasm-ia32.cc +0 -1357
  249. data/ext/v8/upstream/2.0.6/src/ia32/fast-codegen-ia32.cc +0 -1813
  250. data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.cc +0 -111
  251. data/ext/v8/upstream/2.0.6/src/ia32/frames-ia32.h +0 -135
  252. data/ext/v8/upstream/2.0.6/src/ia32/ic-ia32.cc +0 -1490
  253. data/ext/v8/upstream/2.0.6/src/ia32/jump-target-ia32.cc +0 -432
  254. data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.cc +0 -1517
  255. data/ext/v8/upstream/2.0.6/src/ia32/macro-assembler-ia32.h +0 -528
  256. data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.cc +0 -1219
  257. data/ext/v8/upstream/2.0.6/src/ia32/regexp-macro-assembler-ia32.h +0 -230
  258. data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32-inl.h +0 -82
  259. data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.cc +0 -99
  260. data/ext/v8/upstream/2.0.6/src/ia32/register-allocator-ia32.h +0 -43
  261. data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.cc +0 -30
  262. data/ext/v8/upstream/2.0.6/src/ia32/simulator-ia32.h +0 -62
  263. data/ext/v8/upstream/2.0.6/src/ia32/stub-cache-ia32.cc +0 -1961
  264. data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.cc +0 -1105
  265. data/ext/v8/upstream/2.0.6/src/ia32/virtual-frame-ia32.h +0 -580
  266. data/ext/v8/upstream/2.0.6/src/ic-inl.h +0 -93
  267. data/ext/v8/upstream/2.0.6/src/ic.cc +0 -1426
  268. data/ext/v8/upstream/2.0.6/src/ic.h +0 -443
  269. data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.cc +0 -646
  270. data/ext/v8/upstream/2.0.6/src/interpreter-irregexp.h +0 -48
  271. data/ext/v8/upstream/2.0.6/src/json-delay.js +0 -254
  272. data/ext/v8/upstream/2.0.6/src/jsregexp.cc +0 -5234
  273. data/ext/v8/upstream/2.0.6/src/jsregexp.h +0 -1439
  274. data/ext/v8/upstream/2.0.6/src/jump-target-inl.h +0 -49
  275. data/ext/v8/upstream/2.0.6/src/jump-target.cc +0 -383
  276. data/ext/v8/upstream/2.0.6/src/jump-target.h +0 -280
  277. data/ext/v8/upstream/2.0.6/src/list-inl.h +0 -166
  278. data/ext/v8/upstream/2.0.6/src/list.h +0 -158
  279. data/ext/v8/upstream/2.0.6/src/log-inl.h +0 -126
  280. data/ext/v8/upstream/2.0.6/src/log-utils.cc +0 -503
  281. data/ext/v8/upstream/2.0.6/src/log-utils.h +0 -292
  282. data/ext/v8/upstream/2.0.6/src/log.cc +0 -1457
  283. data/ext/v8/upstream/2.0.6/src/log.h +0 -371
  284. data/ext/v8/upstream/2.0.6/src/macro-assembler.h +0 -93
  285. data/ext/v8/upstream/2.0.6/src/macros.py +0 -137
  286. data/ext/v8/upstream/2.0.6/src/mark-compact.cc +0 -2007
  287. data/ext/v8/upstream/2.0.6/src/mark-compact.h +0 -442
  288. data/ext/v8/upstream/2.0.6/src/math.js +0 -263
  289. data/ext/v8/upstream/2.0.6/src/memory.h +0 -74
  290. data/ext/v8/upstream/2.0.6/src/messages.cc +0 -177
  291. data/ext/v8/upstream/2.0.6/src/messages.h +0 -112
  292. data/ext/v8/upstream/2.0.6/src/messages.js +0 -937
  293. data/ext/v8/upstream/2.0.6/src/mirror-delay.js +0 -2332
  294. data/ext/v8/upstream/2.0.6/src/mksnapshot.cc +0 -169
  295. data/ext/v8/upstream/2.0.6/src/natives.h +0 -63
  296. data/ext/v8/upstream/2.0.6/src/objects-debug.cc +0 -1317
  297. data/ext/v8/upstream/2.0.6/src/objects-inl.h +0 -3044
  298. data/ext/v8/upstream/2.0.6/src/objects.cc +0 -8306
  299. data/ext/v8/upstream/2.0.6/src/objects.h +0 -4960
  300. data/ext/v8/upstream/2.0.6/src/oprofile-agent.cc +0 -116
  301. data/ext/v8/upstream/2.0.6/src/oprofile-agent.h +0 -69
  302. data/ext/v8/upstream/2.0.6/src/parser.cc +0 -4810
  303. data/ext/v8/upstream/2.0.6/src/parser.h +0 -195
  304. data/ext/v8/upstream/2.0.6/src/platform-freebsd.cc +0 -645
  305. data/ext/v8/upstream/2.0.6/src/platform-linux.cc +0 -808
  306. data/ext/v8/upstream/2.0.6/src/platform-macos.cc +0 -643
  307. data/ext/v8/upstream/2.0.6/src/platform-nullos.cc +0 -454
  308. data/ext/v8/upstream/2.0.6/src/platform-openbsd.cc +0 -597
  309. data/ext/v8/upstream/2.0.6/src/platform-posix.cc +0 -380
  310. data/ext/v8/upstream/2.0.6/src/platform-win32.cc +0 -1908
  311. data/ext/v8/upstream/2.0.6/src/platform.h +0 -556
  312. data/ext/v8/upstream/2.0.6/src/prettyprinter.cc +0 -1511
  313. data/ext/v8/upstream/2.0.6/src/prettyprinter.h +0 -219
  314. data/ext/v8/upstream/2.0.6/src/property.cc +0 -96
  315. data/ext/v8/upstream/2.0.6/src/property.h +0 -327
  316. data/ext/v8/upstream/2.0.6/src/regexp-delay.js +0 -406
  317. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp-inl.h +0 -78
  318. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.cc +0 -464
  319. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-irregexp.h +0 -141
  320. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.cc +0 -356
  321. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler-tracer.h +0 -103
  322. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.cc +0 -240
  323. data/ext/v8/upstream/2.0.6/src/regexp-macro-assembler.h +0 -220
  324. data/ext/v8/upstream/2.0.6/src/regexp-stack.cc +0 -103
  325. data/ext/v8/upstream/2.0.6/src/regexp-stack.h +0 -123
  326. data/ext/v8/upstream/2.0.6/src/register-allocator-inl.h +0 -74
  327. data/ext/v8/upstream/2.0.6/src/register-allocator.cc +0 -100
  328. data/ext/v8/upstream/2.0.6/src/register-allocator.h +0 -295
  329. data/ext/v8/upstream/2.0.6/src/rewriter.cc +0 -855
  330. data/ext/v8/upstream/2.0.6/src/rewriter.h +0 -54
  331. data/ext/v8/upstream/2.0.6/src/runtime.cc +0 -8163
  332. data/ext/v8/upstream/2.0.6/src/runtime.h +0 -432
  333. data/ext/v8/upstream/2.0.6/src/runtime.js +0 -626
  334. data/ext/v8/upstream/2.0.6/src/scanner.cc +0 -1098
  335. data/ext/v8/upstream/2.0.6/src/scanner.h +0 -425
  336. data/ext/v8/upstream/2.0.6/src/scopeinfo.cc +0 -649
  337. data/ext/v8/upstream/2.0.6/src/scopeinfo.h +0 -236
  338. data/ext/v8/upstream/2.0.6/src/scopes.cc +0 -963
  339. data/ext/v8/upstream/2.0.6/src/scopes.h +0 -401
  340. data/ext/v8/upstream/2.0.6/src/serialize.cc +0 -1260
  341. data/ext/v8/upstream/2.0.6/src/serialize.h +0 -404
  342. data/ext/v8/upstream/2.0.6/src/shell.h +0 -55
  343. data/ext/v8/upstream/2.0.6/src/simulator.h +0 -41
  344. data/ext/v8/upstream/2.0.6/src/smart-pointer.h +0 -109
  345. data/ext/v8/upstream/2.0.6/src/snapshot-common.cc +0 -97
  346. data/ext/v8/upstream/2.0.6/src/snapshot-empty.cc +0 -40
  347. data/ext/v8/upstream/2.0.6/src/snapshot.h +0 -59
  348. data/ext/v8/upstream/2.0.6/src/spaces-inl.h +0 -372
  349. data/ext/v8/upstream/2.0.6/src/spaces.cc +0 -2864
  350. data/ext/v8/upstream/2.0.6/src/spaces.h +0 -2072
  351. data/ext/v8/upstream/2.0.6/src/string-stream.cc +0 -584
  352. data/ext/v8/upstream/2.0.6/src/string-stream.h +0 -189
  353. data/ext/v8/upstream/2.0.6/src/string.js +0 -901
  354. data/ext/v8/upstream/2.0.6/src/stub-cache.cc +0 -1108
  355. data/ext/v8/upstream/2.0.6/src/stub-cache.h +0 -578
  356. data/ext/v8/upstream/2.0.6/src/third_party/dtoa/COPYING +0 -15
  357. data/ext/v8/upstream/2.0.6/src/third_party/dtoa/dtoa.c +0 -3330
  358. data/ext/v8/upstream/2.0.6/src/third_party/valgrind/valgrind.h +0 -3925
  359. data/ext/v8/upstream/2.0.6/src/token.cc +0 -56
  360. data/ext/v8/upstream/2.0.6/src/token.h +0 -270
  361. data/ext/v8/upstream/2.0.6/src/top.cc +0 -991
  362. data/ext/v8/upstream/2.0.6/src/top.h +0 -459
  363. data/ext/v8/upstream/2.0.6/src/unicode-inl.h +0 -238
  364. data/ext/v8/upstream/2.0.6/src/unicode.cc +0 -749
  365. data/ext/v8/upstream/2.0.6/src/unicode.h +0 -279
  366. data/ext/v8/upstream/2.0.6/src/uri.js +0 -415
  367. data/ext/v8/upstream/2.0.6/src/usage-analyzer.cc +0 -426
  368. data/ext/v8/upstream/2.0.6/src/usage-analyzer.h +0 -40
  369. data/ext/v8/upstream/2.0.6/src/utils.cc +0 -322
  370. data/ext/v8/upstream/2.0.6/src/utils.h +0 -592
  371. data/ext/v8/upstream/2.0.6/src/v8-counters.cc +0 -55
  372. data/ext/v8/upstream/2.0.6/src/v8-counters.h +0 -198
  373. data/ext/v8/upstream/2.0.6/src/v8.cc +0 -193
  374. data/ext/v8/upstream/2.0.6/src/v8.h +0 -119
  375. data/ext/v8/upstream/2.0.6/src/v8natives.js +0 -846
  376. data/ext/v8/upstream/2.0.6/src/v8threads.cc +0 -450
  377. data/ext/v8/upstream/2.0.6/src/v8threads.h +0 -144
  378. data/ext/v8/upstream/2.0.6/src/variables.cc +0 -163
  379. data/ext/v8/upstream/2.0.6/src/variables.h +0 -235
  380. data/ext/v8/upstream/2.0.6/src/version.cc +0 -88
  381. data/ext/v8/upstream/2.0.6/src/version.h +0 -64
  382. data/ext/v8/upstream/2.0.6/src/virtual-frame.cc +0 -381
  383. data/ext/v8/upstream/2.0.6/src/virtual-frame.h +0 -44
  384. data/ext/v8/upstream/2.0.6/src/x64/assembler-x64-inl.h +0 -352
  385. data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.cc +0 -2539
  386. data/ext/v8/upstream/2.0.6/src/x64/assembler-x64.h +0 -1399
  387. data/ext/v8/upstream/2.0.6/src/x64/builtins-x64.cc +0 -1255
  388. data/ext/v8/upstream/2.0.6/src/x64/codegen-x64-inl.h +0 -46
  389. data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.cc +0 -8223
  390. data/ext/v8/upstream/2.0.6/src/x64/codegen-x64.h +0 -785
  391. data/ext/v8/upstream/2.0.6/src/x64/cpu-x64.cc +0 -79
  392. data/ext/v8/upstream/2.0.6/src/x64/debug-x64.cc +0 -202
  393. data/ext/v8/upstream/2.0.6/src/x64/disasm-x64.cc +0 -1596
  394. data/ext/v8/upstream/2.0.6/src/x64/fast-codegen-x64.cc +0 -1820
  395. data/ext/v8/upstream/2.0.6/src/x64/frames-x64.cc +0 -109
  396. data/ext/v8/upstream/2.0.6/src/x64/frames-x64.h +0 -121
  397. data/ext/v8/upstream/2.0.6/src/x64/ic-x64.cc +0 -1392
  398. data/ext/v8/upstream/2.0.6/src/x64/jump-target-x64.cc +0 -432
  399. data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.cc +0 -2409
  400. data/ext/v8/upstream/2.0.6/src/x64/macro-assembler-x64.h +0 -765
  401. data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.cc +0 -1337
  402. data/ext/v8/upstream/2.0.6/src/x64/regexp-macro-assembler-x64.h +0 -295
  403. data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64-inl.h +0 -86
  404. data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.cc +0 -84
  405. data/ext/v8/upstream/2.0.6/src/x64/register-allocator-x64.h +0 -43
  406. data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.cc +0 -27
  407. data/ext/v8/upstream/2.0.6/src/x64/simulator-x64.h +0 -63
  408. data/ext/v8/upstream/2.0.6/src/x64/stub-cache-x64.cc +0 -1884
  409. data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.cc +0 -1089
  410. data/ext/v8/upstream/2.0.6/src/x64/virtual-frame-x64.h +0 -560
  411. data/ext/v8/upstream/2.0.6/src/zone-inl.h +0 -297
  412. data/ext/v8/upstream/2.0.6/src/zone.cc +0 -193
  413. data/ext/v8/upstream/2.0.6/src/zone.h +0 -305
  414. data/ext/v8/upstream/2.0.6/tools/codemap.js +0 -258
  415. data/ext/v8/upstream/2.0.6/tools/consarray.js +0 -93
  416. data/ext/v8/upstream/2.0.6/tools/csvparser.js +0 -98
  417. data/ext/v8/upstream/2.0.6/tools/gyp/v8.gyp +0 -620
  418. data/ext/v8/upstream/2.0.6/tools/js2c.py +0 -376
  419. data/ext/v8/upstream/2.0.6/tools/jsmin.py +0 -280
  420. data/ext/v8/upstream/2.0.6/tools/linux-tick-processor +0 -24
  421. data/ext/v8/upstream/2.0.6/tools/linux-tick-processor.py +0 -78
  422. data/ext/v8/upstream/2.0.6/tools/logreader.js +0 -320
  423. data/ext/v8/upstream/2.0.6/tools/mac-nm +0 -18
  424. data/ext/v8/upstream/2.0.6/tools/mac-tick-processor +0 -6
  425. data/ext/v8/upstream/2.0.6/tools/oprofile/annotate +0 -7
  426. data/ext/v8/upstream/2.0.6/tools/oprofile/common +0 -19
  427. data/ext/v8/upstream/2.0.6/tools/oprofile/dump +0 -7
  428. data/ext/v8/upstream/2.0.6/tools/oprofile/report +0 -7
  429. data/ext/v8/upstream/2.0.6/tools/oprofile/reset +0 -7
  430. data/ext/v8/upstream/2.0.6/tools/oprofile/run +0 -14
  431. data/ext/v8/upstream/2.0.6/tools/oprofile/shutdown +0 -7
  432. data/ext/v8/upstream/2.0.6/tools/oprofile/start +0 -7
  433. data/ext/v8/upstream/2.0.6/tools/presubmit.py +0 -299
  434. data/ext/v8/upstream/2.0.6/tools/process-heap-prof.py +0 -120
  435. data/ext/v8/upstream/2.0.6/tools/profile.js +0 -621
  436. data/ext/v8/upstream/2.0.6/tools/profile_view.js +0 -224
  437. data/ext/v8/upstream/2.0.6/tools/run-valgrind.py +0 -77
  438. data/ext/v8/upstream/2.0.6/tools/splaytree.js +0 -322
  439. data/ext/v8/upstream/2.0.6/tools/splaytree.py +0 -226
  440. data/ext/v8/upstream/2.0.6/tools/stats-viewer.py +0 -456
  441. data/ext/v8/upstream/2.0.6/tools/test.py +0 -1370
  442. data/ext/v8/upstream/2.0.6/tools/tickprocessor-driver.js +0 -53
  443. data/ext/v8/upstream/2.0.6/tools/tickprocessor.js +0 -731
  444. data/ext/v8/upstream/2.0.6/tools/tickprocessor.py +0 -535
  445. data/ext/v8/upstream/2.0.6/tools/utils.py +0 -82
  446. data/ext/v8/upstream/2.0.6/tools/visual_studio/README.txt +0 -71
  447. data/ext/v8/upstream/2.0.6/tools/visual_studio/arm.vsprops +0 -14
  448. data/ext/v8/upstream/2.0.6/tools/visual_studio/common.vsprops +0 -35
  449. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8.vcproj +0 -199
  450. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_arm.vcproj +0 -199
  451. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8_x64.vcproj +0 -201
  452. data/ext/v8/upstream/2.0.6/tools/visual_studio/d8js2c.cmd +0 -6
  453. data/ext/v8/upstream/2.0.6/tools/visual_studio/debug.vsprops +0 -17
  454. data/ext/v8/upstream/2.0.6/tools/visual_studio/ia32.vsprops +0 -13
  455. data/ext/v8/upstream/2.0.6/tools/visual_studio/js2c.cmd +0 -6
  456. data/ext/v8/upstream/2.0.6/tools/visual_studio/release.vsprops +0 -24
  457. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.sln +0 -101
  458. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8.vcproj +0 -223
  459. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.sln +0 -74
  460. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_arm.vcproj +0 -223
  461. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base.vcproj +0 -971
  462. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_arm.vcproj +0 -983
  463. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_base_x64.vcproj +0 -959
  464. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest.vcproj +0 -255
  465. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_arm.vcproj +0 -243
  466. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_cctest_x64.vcproj +0 -257
  467. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot.vcproj +0 -151
  468. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_mksnapshot_x64.vcproj +0 -151
  469. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample.vcproj +0 -151
  470. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_arm.vcproj +0 -151
  471. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_process_sample_x64.vcproj +0 -151
  472. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample.vcproj +0 -151
  473. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_arm.vcproj +0 -151
  474. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_shell_sample_x64.vcproj +0 -153
  475. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot.vcproj +0 -142
  476. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc.vcproj +0 -92
  477. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_cc_x64.vcproj +0 -92
  478. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_snapshot_x64.vcproj +0 -142
  479. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.sln +0 -101
  480. data/ext/v8/upstream/2.0.6/tools/visual_studio/v8_x64.vcproj +0 -223
  481. data/ext/v8/upstream/2.0.6/tools/visual_studio/x64.vsprops +0 -13
  482. data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.bat +0 -5
  483. data/ext/v8/upstream/2.0.6/tools/windows-tick-processor.py +0 -137
  484. data/ext/v8/upstream/Makefile +0 -32
  485. data/ext/v8/upstream/fpic-on-linux-amd64.patch +0 -13
  486. data/ext/v8/upstream/no-strict-aliasing.patch +0 -13
  487. data/ext/v8/upstream/scons/CHANGES.txt +0 -5183
  488. data/ext/v8/upstream/scons/LICENSE.txt +0 -20
  489. data/ext/v8/upstream/scons/MANIFEST +0 -202
  490. data/ext/v8/upstream/scons/PKG-INFO +0 -13
  491. data/ext/v8/upstream/scons/README.txt +0 -273
  492. data/ext/v8/upstream/scons/RELEASE.txt +0 -1040
  493. data/ext/v8/upstream/scons/engine/SCons/Action.py +0 -1256
  494. data/ext/v8/upstream/scons/engine/SCons/Builder.py +0 -868
  495. data/ext/v8/upstream/scons/engine/SCons/CacheDir.py +0 -217
  496. data/ext/v8/upstream/scons/engine/SCons/Conftest.py +0 -794
  497. data/ext/v8/upstream/scons/engine/SCons/Debug.py +0 -237
  498. data/ext/v8/upstream/scons/engine/SCons/Defaults.py +0 -485
  499. data/ext/v8/upstream/scons/engine/SCons/Environment.py +0 -2327
  500. data/ext/v8/upstream/scons/engine/SCons/Errors.py +0 -207
  501. data/ext/v8/upstream/scons/engine/SCons/Executor.py +0 -636
  502. data/ext/v8/upstream/scons/engine/SCons/Job.py +0 -435
  503. data/ext/v8/upstream/scons/engine/SCons/Memoize.py +0 -292
  504. data/ext/v8/upstream/scons/engine/SCons/Node/Alias.py +0 -153
  505. data/ext/v8/upstream/scons/engine/SCons/Node/FS.py +0 -3220
  506. data/ext/v8/upstream/scons/engine/SCons/Node/Python.py +0 -128
  507. data/ext/v8/upstream/scons/engine/SCons/Node/__init__.py +0 -1341
  508. data/ext/v8/upstream/scons/engine/SCons/Options/BoolOption.py +0 -50
  509. data/ext/v8/upstream/scons/engine/SCons/Options/EnumOption.py +0 -50
  510. data/ext/v8/upstream/scons/engine/SCons/Options/ListOption.py +0 -50
  511. data/ext/v8/upstream/scons/engine/SCons/Options/PackageOption.py +0 -50
  512. data/ext/v8/upstream/scons/engine/SCons/Options/PathOption.py +0 -76
  513. data/ext/v8/upstream/scons/engine/SCons/Options/__init__.py +0 -74
  514. data/ext/v8/upstream/scons/engine/SCons/PathList.py +0 -232
  515. data/ext/v8/upstream/scons/engine/SCons/Platform/__init__.py +0 -236
  516. data/ext/v8/upstream/scons/engine/SCons/Platform/aix.py +0 -70
  517. data/ext/v8/upstream/scons/engine/SCons/Platform/cygwin.py +0 -55
  518. data/ext/v8/upstream/scons/engine/SCons/Platform/darwin.py +0 -46
  519. data/ext/v8/upstream/scons/engine/SCons/Platform/hpux.py +0 -46
  520. data/ext/v8/upstream/scons/engine/SCons/Platform/irix.py +0 -44
  521. data/ext/v8/upstream/scons/engine/SCons/Platform/os2.py +0 -58
  522. data/ext/v8/upstream/scons/engine/SCons/Platform/posix.py +0 -264
  523. data/ext/v8/upstream/scons/engine/SCons/Platform/sunos.py +0 -50
  524. data/ext/v8/upstream/scons/engine/SCons/Platform/win32.py +0 -386
  525. data/ext/v8/upstream/scons/engine/SCons/SConf.py +0 -1038
  526. data/ext/v8/upstream/scons/engine/SCons/SConsign.py +0 -381
  527. data/ext/v8/upstream/scons/engine/SCons/Scanner/C.py +0 -132
  528. data/ext/v8/upstream/scons/engine/SCons/Scanner/D.py +0 -74
  529. data/ext/v8/upstream/scons/engine/SCons/Scanner/Dir.py +0 -111
  530. data/ext/v8/upstream/scons/engine/SCons/Scanner/Fortran.py +0 -320
  531. data/ext/v8/upstream/scons/engine/SCons/Scanner/IDL.py +0 -48
  532. data/ext/v8/upstream/scons/engine/SCons/Scanner/LaTeX.py +0 -378
  533. data/ext/v8/upstream/scons/engine/SCons/Scanner/Prog.py +0 -103
  534. data/ext/v8/upstream/scons/engine/SCons/Scanner/RC.py +0 -55
  535. data/ext/v8/upstream/scons/engine/SCons/Scanner/__init__.py +0 -415
  536. data/ext/v8/upstream/scons/engine/SCons/Script/Interactive.py +0 -386
  537. data/ext/v8/upstream/scons/engine/SCons/Script/Main.py +0 -1360
  538. data/ext/v8/upstream/scons/engine/SCons/Script/SConsOptions.py +0 -944
  539. data/ext/v8/upstream/scons/engine/SCons/Script/SConscript.py +0 -642
  540. data/ext/v8/upstream/scons/engine/SCons/Script/__init__.py +0 -414
  541. data/ext/v8/upstream/scons/engine/SCons/Sig.py +0 -63
  542. data/ext/v8/upstream/scons/engine/SCons/Subst.py +0 -911
  543. data/ext/v8/upstream/scons/engine/SCons/Taskmaster.py +0 -1030
  544. data/ext/v8/upstream/scons/engine/SCons/Tool/386asm.py +0 -61
  545. data/ext/v8/upstream/scons/engine/SCons/Tool/BitKeeper.py +0 -65
  546. data/ext/v8/upstream/scons/engine/SCons/Tool/CVS.py +0 -73
  547. data/ext/v8/upstream/scons/engine/SCons/Tool/FortranCommon.py +0 -247
  548. data/ext/v8/upstream/scons/engine/SCons/Tool/JavaCommon.py +0 -324
  549. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/__init__.py +0 -56
  550. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/arch.py +0 -61
  551. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/common.py +0 -210
  552. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/netframework.py +0 -84
  553. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/sdk.py +0 -321
  554. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vc.py +0 -367
  555. data/ext/v8/upstream/scons/engine/SCons/Tool/MSCommon/vs.py +0 -497
  556. data/ext/v8/upstream/scons/engine/SCons/Tool/Perforce.py +0 -104
  557. data/ext/v8/upstream/scons/engine/SCons/Tool/PharLapCommon.py +0 -138
  558. data/ext/v8/upstream/scons/engine/SCons/Tool/RCS.py +0 -64
  559. data/ext/v8/upstream/scons/engine/SCons/Tool/SCCS.py +0 -64
  560. data/ext/v8/upstream/scons/engine/SCons/Tool/Subversion.py +0 -71
  561. data/ext/v8/upstream/scons/engine/SCons/Tool/__init__.py +0 -675
  562. data/ext/v8/upstream/scons/engine/SCons/Tool/aixc++.py +0 -82
  563. data/ext/v8/upstream/scons/engine/SCons/Tool/aixcc.py +0 -74
  564. data/ext/v8/upstream/scons/engine/SCons/Tool/aixf77.py +0 -80
  565. data/ext/v8/upstream/scons/engine/SCons/Tool/aixlink.py +0 -76
  566. data/ext/v8/upstream/scons/engine/SCons/Tool/applelink.py +0 -71
  567. data/ext/v8/upstream/scons/engine/SCons/Tool/ar.py +0 -63
  568. data/ext/v8/upstream/scons/engine/SCons/Tool/as.py +0 -78
  569. data/ext/v8/upstream/scons/engine/SCons/Tool/bcc32.py +0 -82
  570. data/ext/v8/upstream/scons/engine/SCons/Tool/c++.py +0 -99
  571. data/ext/v8/upstream/scons/engine/SCons/Tool/cc.py +0 -114
  572. data/ext/v8/upstream/scons/engine/SCons/Tool/cvf.py +0 -58
  573. data/ext/v8/upstream/scons/engine/SCons/Tool/default.py +0 -50
  574. data/ext/v8/upstream/scons/engine/SCons/Tool/dmd.py +0 -224
  575. data/ext/v8/upstream/scons/engine/SCons/Tool/dvi.py +0 -64
  576. data/ext/v8/upstream/scons/engine/SCons/Tool/dvipdf.py +0 -125
  577. data/ext/v8/upstream/scons/engine/SCons/Tool/dvips.py +0 -94
  578. data/ext/v8/upstream/scons/engine/SCons/Tool/f77.py +0 -62
  579. data/ext/v8/upstream/scons/engine/SCons/Tool/f90.py +0 -62
  580. data/ext/v8/upstream/scons/engine/SCons/Tool/f95.py +0 -63
  581. data/ext/v8/upstream/scons/engine/SCons/Tool/filesystem.py +0 -98
  582. data/ext/v8/upstream/scons/engine/SCons/Tool/fortran.py +0 -63
  583. data/ext/v8/upstream/scons/engine/SCons/Tool/g++.py +0 -90
  584. data/ext/v8/upstream/scons/engine/SCons/Tool/g77.py +0 -73
  585. data/ext/v8/upstream/scons/engine/SCons/Tool/gas.py +0 -53
  586. data/ext/v8/upstream/scons/engine/SCons/Tool/gcc.py +0 -80
  587. data/ext/v8/upstream/scons/engine/SCons/Tool/gfortran.py +0 -64
  588. data/ext/v8/upstream/scons/engine/SCons/Tool/gnulink.py +0 -63
  589. data/ext/v8/upstream/scons/engine/SCons/Tool/gs.py +0 -81
  590. data/ext/v8/upstream/scons/engine/SCons/Tool/hpc++.py +0 -85
  591. data/ext/v8/upstream/scons/engine/SCons/Tool/hpcc.py +0 -53
  592. data/ext/v8/upstream/scons/engine/SCons/Tool/hplink.py +0 -77
  593. data/ext/v8/upstream/scons/engine/SCons/Tool/icc.py +0 -59
  594. data/ext/v8/upstream/scons/engine/SCons/Tool/icl.py +0 -52
  595. data/ext/v8/upstream/scons/engine/SCons/Tool/ifl.py +0 -72
  596. data/ext/v8/upstream/scons/engine/SCons/Tool/ifort.py +0 -90
  597. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink.py +0 -59
  598. data/ext/v8/upstream/scons/engine/SCons/Tool/ilink32.py +0 -60
  599. data/ext/v8/upstream/scons/engine/SCons/Tool/install.py +0 -229
  600. data/ext/v8/upstream/scons/engine/SCons/Tool/intelc.py +0 -490
  601. data/ext/v8/upstream/scons/engine/SCons/Tool/ipkg.py +0 -71
  602. data/ext/v8/upstream/scons/engine/SCons/Tool/jar.py +0 -110
  603. data/ext/v8/upstream/scons/engine/SCons/Tool/javac.py +0 -234
  604. data/ext/v8/upstream/scons/engine/SCons/Tool/javah.py +0 -138
  605. data/ext/v8/upstream/scons/engine/SCons/Tool/latex.py +0 -79
  606. data/ext/v8/upstream/scons/engine/SCons/Tool/lex.py +0 -99
  607. data/ext/v8/upstream/scons/engine/SCons/Tool/link.py +0 -121
  608. data/ext/v8/upstream/scons/engine/SCons/Tool/linkloc.py +0 -112
  609. data/ext/v8/upstream/scons/engine/SCons/Tool/m4.py +0 -63
  610. data/ext/v8/upstream/scons/engine/SCons/Tool/masm.py +0 -77
  611. data/ext/v8/upstream/scons/engine/SCons/Tool/midl.py +0 -90
  612. data/ext/v8/upstream/scons/engine/SCons/Tool/mingw.py +0 -159
  613. data/ext/v8/upstream/scons/engine/SCons/Tool/mslib.py +0 -64
  614. data/ext/v8/upstream/scons/engine/SCons/Tool/mslink.py +0 -266
  615. data/ext/v8/upstream/scons/engine/SCons/Tool/mssdk.py +0 -50
  616. data/ext/v8/upstream/scons/engine/SCons/Tool/msvc.py +0 -269
  617. data/ext/v8/upstream/scons/engine/SCons/Tool/msvs.py +0 -1439
  618. data/ext/v8/upstream/scons/engine/SCons/Tool/mwcc.py +0 -208
  619. data/ext/v8/upstream/scons/engine/SCons/Tool/mwld.py +0 -107
  620. data/ext/v8/upstream/scons/engine/SCons/Tool/nasm.py +0 -72
  621. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/__init__.py +0 -314
  622. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/ipk.py +0 -185
  623. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/msi.py +0 -526
  624. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/rpm.py +0 -367
  625. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_tarbz2.py +0 -43
  626. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_targz.py +0 -43
  627. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/src_zip.py +0 -43
  628. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/tarbz2.py +0 -44
  629. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/targz.py +0 -44
  630. data/ext/v8/upstream/scons/engine/SCons/Tool/packaging/zip.py +0 -44
  631. data/ext/v8/upstream/scons/engine/SCons/Tool/pdf.py +0 -78
  632. data/ext/v8/upstream/scons/engine/SCons/Tool/pdflatex.py +0 -83
  633. data/ext/v8/upstream/scons/engine/SCons/Tool/pdftex.py +0 -108
  634. data/ext/v8/upstream/scons/engine/SCons/Tool/qt.py +0 -336
  635. data/ext/v8/upstream/scons/engine/SCons/Tool/rmic.py +0 -121
  636. data/ext/v8/upstream/scons/engine/SCons/Tool/rpcgen.py +0 -70
  637. data/ext/v8/upstream/scons/engine/SCons/Tool/rpm.py +0 -132
  638. data/ext/v8/upstream/scons/engine/SCons/Tool/sgiar.py +0 -68
  639. data/ext/v8/upstream/scons/engine/SCons/Tool/sgic++.py +0 -58
  640. data/ext/v8/upstream/scons/engine/SCons/Tool/sgicc.py +0 -53
  641. data/ext/v8/upstream/scons/engine/SCons/Tool/sgilink.py +0 -63
  642. data/ext/v8/upstream/scons/engine/SCons/Tool/sunar.py +0 -67
  643. data/ext/v8/upstream/scons/engine/SCons/Tool/sunc++.py +0 -142
  644. data/ext/v8/upstream/scons/engine/SCons/Tool/suncc.py +0 -58
  645. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf77.py +0 -63
  646. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf90.py +0 -64
  647. data/ext/v8/upstream/scons/engine/SCons/Tool/sunf95.py +0 -64
  648. data/ext/v8/upstream/scons/engine/SCons/Tool/sunlink.py +0 -77
  649. data/ext/v8/upstream/scons/engine/SCons/Tool/swig.py +0 -186
  650. data/ext/v8/upstream/scons/engine/SCons/Tool/tar.py +0 -73
  651. data/ext/v8/upstream/scons/engine/SCons/Tool/tex.py +0 -805
  652. data/ext/v8/upstream/scons/engine/SCons/Tool/textfile.py +0 -175
  653. data/ext/v8/upstream/scons/engine/SCons/Tool/tlib.py +0 -53
  654. data/ext/v8/upstream/scons/engine/SCons/Tool/wix.py +0 -100
  655. data/ext/v8/upstream/scons/engine/SCons/Tool/yacc.py +0 -131
  656. data/ext/v8/upstream/scons/engine/SCons/Tool/zip.py +0 -100
  657. data/ext/v8/upstream/scons/engine/SCons/Util.py +0 -1645
  658. data/ext/v8/upstream/scons/engine/SCons/Variables/BoolVariable.py +0 -91
  659. data/ext/v8/upstream/scons/engine/SCons/Variables/EnumVariable.py +0 -107
  660. data/ext/v8/upstream/scons/engine/SCons/Variables/ListVariable.py +0 -139
  661. data/ext/v8/upstream/scons/engine/SCons/Variables/PackageVariable.py +0 -109
  662. data/ext/v8/upstream/scons/engine/SCons/Variables/PathVariable.py +0 -147
  663. data/ext/v8/upstream/scons/engine/SCons/Variables/__init__.py +0 -317
  664. data/ext/v8/upstream/scons/engine/SCons/Warnings.py +0 -228
  665. data/ext/v8/upstream/scons/engine/SCons/__init__.py +0 -49
  666. data/ext/v8/upstream/scons/engine/SCons/compat/__init__.py +0 -302
  667. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_UserString.py +0 -98
  668. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_hashlib.py +0 -91
  669. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_itertools.py +0 -124
  670. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_optparse.py +0 -1725
  671. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets.py +0 -583
  672. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_sets15.py +0 -176
  673. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_shlex.py +0 -325
  674. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_subprocess.py +0 -1296
  675. data/ext/v8/upstream/scons/engine/SCons/compat/_scons_textwrap.py +0 -382
  676. data/ext/v8/upstream/scons/engine/SCons/compat/builtins.py +0 -187
  677. data/ext/v8/upstream/scons/engine/SCons/cpp.py +0 -598
  678. data/ext/v8/upstream/scons/engine/SCons/dblite.py +0 -248
  679. data/ext/v8/upstream/scons/engine/SCons/exitfuncs.py +0 -77
  680. data/ext/v8/upstream/scons/os_spawnv_fix.diff +0 -83
  681. data/ext/v8/upstream/scons/scons-time.1 +0 -1017
  682. data/ext/v8/upstream/scons/scons.1 +0 -15179
  683. data/ext/v8/upstream/scons/sconsign.1 +0 -208
  684. data/ext/v8/upstream/scons/script/scons +0 -184
  685. data/ext/v8/upstream/scons/script/scons-time +0 -1529
  686. data/ext/v8/upstream/scons/script/scons.bat +0 -31
  687. data/ext/v8/upstream/scons/script/sconsign +0 -508
  688. data/ext/v8/upstream/scons/setup.cfg +0 -6
  689. data/ext/v8/upstream/scons/setup.py +0 -427
  690. data/ext/v8/v8.cpp +0 -89
  691. data/ext/v8/v8_cxt.cpp +0 -92
  692. data/ext/v8/v8_cxt.h +0 -20
  693. data/ext/v8/v8_func.cpp +0 -10
  694. data/ext/v8/v8_func.h +0 -11
  695. data/ext/v8/v8_msg.cpp +0 -54
  696. data/ext/v8/v8_msg.h +0 -18
  697. data/ext/v8/v8_obj.cpp +0 -52
  698. data/ext/v8/v8_obj.h +0 -13
  699. data/ext/v8/v8_ref.cpp +0 -26
  700. data/ext/v8/v8_ref.h +0 -31
  701. data/ext/v8/v8_script.cpp +0 -20
  702. data/ext/v8/v8_script.h +0 -8
  703. data/ext/v8/v8_standalone.cpp +0 -69
  704. data/ext/v8/v8_standalone.h +0 -31
  705. data/ext/v8/v8_str.cpp +0 -17
  706. data/ext/v8/v8_str.h +0 -9
  707. data/ext/v8/v8_template.cpp +0 -53
  708. data/ext/v8/v8_template.h +0 -13
  709. data/lib/v8/to.rb +0 -33
  710. data/lib/v8/v8.so +0 -0
  711. data/script/console +0 -10
  712. data/script/destroy +0 -14
  713. data/script/generate +0 -14
  714. data/spec/ext/cxt_spec.rb +0 -25
  715. data/spec/ext/obj_spec.rb +0 -13
  716. data/spec/redjs/jsapi_spec.rb +0 -405
  717. data/spec/redjs/tap.rb +0 -8
  718. data/spec/redjs_helper.rb +0 -3
  719. data/spec/spec.opts +0 -1
  720. data/spec/v8/to_spec.rb +0 -15
  721. data/tasks/rspec.rake +0 -21
@@ -1,432 +0,0 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #include "codegen-inl.h"
31
- #include "jump-target-inl.h"
32
- #include "register-allocator-inl.h"
33
-
34
- namespace v8 {
35
- namespace internal {
36
-
37
- // -------------------------------------------------------------------------
38
- // JumpTarget implementation.
39
-
40
- #define __ ACCESS_MASM(cgen()->masm())
41
-
42
- void JumpTarget::DoJump() {
43
- ASSERT(cgen()->has_valid_frame());
44
- // Live non-frame registers are not allowed at unconditional jumps
45
- // because we have no way of invalidating the corresponding results
46
- // which are still live in the C++ code.
47
- ASSERT(cgen()->HasValidEntryRegisters());
48
-
49
- if (is_bound()) {
50
- // Backward jump. There is an expected frame to merge to.
51
- ASSERT(direction_ == BIDIRECTIONAL);
52
- cgen()->frame()->PrepareMergeTo(entry_frame_);
53
- cgen()->frame()->MergeTo(entry_frame_);
54
- cgen()->DeleteFrame();
55
- __ jmp(&entry_label_);
56
- } else if (entry_frame_ != NULL) {
57
- // Forward jump with a preconfigured entry frame. Assert the
58
- // current frame matches the expected one and jump to the block.
59
- ASSERT(cgen()->frame()->Equals(entry_frame_));
60
- cgen()->DeleteFrame();
61
- __ jmp(&entry_label_);
62
- } else {
63
- // Forward jump. Remember the current frame and emit a jump to
64
- // its merge code.
65
- AddReachingFrame(cgen()->frame());
66
- RegisterFile empty;
67
- cgen()->SetFrame(NULL, &empty);
68
- __ jmp(&merge_labels_.last());
69
- }
70
- }
71
-
72
-
73
- void JumpTarget::DoBranch(Condition cc, Hint b) {
74
- ASSERT(cgen() != NULL);
75
- ASSERT(cgen()->has_valid_frame());
76
-
77
- if (is_bound()) {
78
- ASSERT(direction_ == BIDIRECTIONAL);
79
- // Backward branch. We have an expected frame to merge to on the
80
- // backward edge.
81
-
82
- // Swap the current frame for a copy (we do the swapping to get
83
- // the off-frame registers off the fall through) to use for the
84
- // branch.
85
- VirtualFrame* fall_through_frame = cgen()->frame();
86
- VirtualFrame* branch_frame = new VirtualFrame(fall_through_frame);
87
- RegisterFile non_frame_registers;
88
- cgen()->SetFrame(branch_frame, &non_frame_registers);
89
-
90
- // Check if we can avoid merge code.
91
- cgen()->frame()->PrepareMergeTo(entry_frame_);
92
- if (cgen()->frame()->Equals(entry_frame_)) {
93
- // Branch right in to the block.
94
- cgen()->DeleteFrame();
95
- __ j(cc, &entry_label_);
96
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
97
- return;
98
- }
99
-
100
- // Check if we can reuse existing merge code.
101
- for (int i = 0; i < reaching_frames_.length(); i++) {
102
- if (reaching_frames_[i] != NULL &&
103
- cgen()->frame()->Equals(reaching_frames_[i])) {
104
- // Branch to the merge code.
105
- cgen()->DeleteFrame();
106
- __ j(cc, &merge_labels_[i]);
107
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
108
- return;
109
- }
110
- }
111
-
112
- // To emit the merge code here, we negate the condition and branch
113
- // around the merge code on the fall through path.
114
- Label original_fall_through;
115
- __ j(NegateCondition(cc), &original_fall_through);
116
- cgen()->frame()->MergeTo(entry_frame_);
117
- cgen()->DeleteFrame();
118
- __ jmp(&entry_label_);
119
- cgen()->SetFrame(fall_through_frame, &non_frame_registers);
120
- __ bind(&original_fall_through);
121
-
122
- } else if (entry_frame_ != NULL) {
123
- // Forward branch with a preconfigured entry frame. Assert the
124
- // current frame matches the expected one and branch to the block.
125
- ASSERT(cgen()->frame()->Equals(entry_frame_));
126
- // Explicitly use the macro assembler instead of __ as forward
127
- // branches are expected to be a fixed size (no inserted
128
- // coverage-checking instructions please). This is used in
129
- // Reference::GetValue.
130
- cgen()->masm()->j(cc, &entry_label_);
131
-
132
- } else {
133
- // Forward branch. A copy of the current frame is remembered and
134
- // a branch to the merge code is emitted. Explicitly use the
135
- // macro assembler instead of __ as forward branches are expected
136
- // to be a fixed size (no inserted coverage-checking instructions
137
- // please). This is used in Reference::GetValue.
138
- AddReachingFrame(new VirtualFrame(cgen()->frame()));
139
- cgen()->masm()->j(cc, &merge_labels_.last());
140
- }
141
- }
142
-
143
-
144
- void JumpTarget::Call() {
145
- // Call is used to push the address of the catch block on the stack as
146
- // a return address when compiling try/catch and try/finally. We
147
- // fully spill the frame before making the call. The expected frame
148
- // at the label (which should be the only one) is the spilled current
149
- // frame plus an in-memory return address. The "fall-through" frame
150
- // at the return site is the spilled current frame.
151
- ASSERT(cgen() != NULL);
152
- ASSERT(cgen()->has_valid_frame());
153
- // There are no non-frame references across the call.
154
- ASSERT(cgen()->HasValidEntryRegisters());
155
- ASSERT(!is_linked());
156
-
157
- cgen()->frame()->SpillAll();
158
- VirtualFrame* target_frame = new VirtualFrame(cgen()->frame());
159
- target_frame->Adjust(1);
160
- // We do not expect a call with a preconfigured entry frame.
161
- ASSERT(entry_frame_ == NULL);
162
- AddReachingFrame(target_frame);
163
- __ call(&merge_labels_.last());
164
- }
165
-
166
-
167
- void JumpTarget::DoBind() {
168
- ASSERT(cgen() != NULL);
169
- ASSERT(!is_bound());
170
-
171
- // Live non-frame registers are not allowed at the start of a basic
172
- // block.
173
- ASSERT(!cgen()->has_valid_frame() || cgen()->HasValidEntryRegisters());
174
-
175
- // Fast case: the jump target was manually configured with an entry
176
- // frame to use.
177
- if (entry_frame_ != NULL) {
178
- // Assert no reaching frames to deal with.
179
- ASSERT(reaching_frames_.is_empty());
180
- ASSERT(!cgen()->has_valid_frame());
181
-
182
- RegisterFile empty;
183
- if (direction_ == BIDIRECTIONAL) {
184
- // Copy the entry frame so the original can be used for a
185
- // possible backward jump.
186
- cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
187
- } else {
188
- // Take ownership of the entry frame.
189
- cgen()->SetFrame(entry_frame_, &empty);
190
- entry_frame_ = NULL;
191
- }
192
- __ bind(&entry_label_);
193
- return;
194
- }
195
-
196
- if (!is_linked()) {
197
- ASSERT(cgen()->has_valid_frame());
198
- if (direction_ == FORWARD_ONLY) {
199
- // Fast case: no forward jumps and no possible backward jumps.
200
- // The stack pointer can be floating above the top of the
201
- // virtual frame before the bind. Afterward, it should not.
202
- VirtualFrame* frame = cgen()->frame();
203
- int difference = frame->stack_pointer_ - (frame->element_count() - 1);
204
- if (difference > 0) {
205
- frame->stack_pointer_ -= difference;
206
- __ addq(rsp, Immediate(difference * kPointerSize));
207
- }
208
- } else {
209
- ASSERT(direction_ == BIDIRECTIONAL);
210
- // Fast case: no forward jumps, possible backward ones. Remove
211
- // constants and copies above the watermark on the fall-through
212
- // frame and use it as the entry frame.
213
- cgen()->frame()->MakeMergable();
214
- entry_frame_ = new VirtualFrame(cgen()->frame());
215
- }
216
- __ bind(&entry_label_);
217
- return;
218
- }
219
-
220
- if (direction_ == FORWARD_ONLY &&
221
- !cgen()->has_valid_frame() &&
222
- reaching_frames_.length() == 1) {
223
- // Fast case: no fall-through, a single forward jump, and no
224
- // possible backward jumps. Pick up the only reaching frame, take
225
- // ownership of it, and use it for the block about to be emitted.
226
- VirtualFrame* frame = reaching_frames_[0];
227
- RegisterFile empty;
228
- cgen()->SetFrame(frame, &empty);
229
- reaching_frames_[0] = NULL;
230
- __ bind(&merge_labels_[0]);
231
-
232
- // The stack pointer can be floating above the top of the
233
- // virtual frame before the bind. Afterward, it should not.
234
- int difference = frame->stack_pointer_ - (frame->element_count() - 1);
235
- if (difference > 0) {
236
- frame->stack_pointer_ -= difference;
237
- __ addq(rsp, Immediate(difference * kPointerSize));
238
- }
239
-
240
- __ bind(&entry_label_);
241
- return;
242
- }
243
-
244
- // If there is a current frame, record it as the fall-through. It
245
- // is owned by the reaching frames for now.
246
- bool had_fall_through = false;
247
- if (cgen()->has_valid_frame()) {
248
- had_fall_through = true;
249
- AddReachingFrame(cgen()->frame()); // Return value ignored.
250
- RegisterFile empty;
251
- cgen()->SetFrame(NULL, &empty);
252
- }
253
-
254
- // Compute the frame to use for entry to the block.
255
- ComputeEntryFrame();
256
-
257
- // Some moves required to merge to an expected frame require purely
258
- // frame state changes, and do not require any code generation.
259
- // Perform those first to increase the possibility of finding equal
260
- // frames below.
261
- for (int i = 0; i < reaching_frames_.length(); i++) {
262
- if (reaching_frames_[i] != NULL) {
263
- reaching_frames_[i]->PrepareMergeTo(entry_frame_);
264
- }
265
- }
266
-
267
- if (is_linked()) {
268
- // There were forward jumps. Handle merging the reaching frames
269
- // to the entry frame.
270
-
271
- // Loop over the (non-null) reaching frames and process any that
272
- // need merge code. Iterate backwards through the list to handle
273
- // the fall-through frame first. Set frames that will be
274
- // processed after 'i' to NULL if we want to avoid processing
275
- // them.
276
- for (int i = reaching_frames_.length() - 1; i >= 0; i--) {
277
- VirtualFrame* frame = reaching_frames_[i];
278
-
279
- if (frame != NULL) {
280
- // Does the frame (probably) need merge code?
281
- if (!frame->Equals(entry_frame_)) {
282
- // We could have a valid frame as the fall through to the
283
- // binding site or as the fall through from a previous merge
284
- // code block. Jump around the code we are about to
285
- // generate.
286
- if (cgen()->has_valid_frame()) {
287
- cgen()->DeleteFrame();
288
- __ jmp(&entry_label_);
289
- }
290
- // Pick up the frame for this block. Assume ownership if
291
- // there cannot be backward jumps.
292
- RegisterFile empty;
293
- if (direction_ == BIDIRECTIONAL) {
294
- cgen()->SetFrame(new VirtualFrame(frame), &empty);
295
- } else {
296
- cgen()->SetFrame(frame, &empty);
297
- reaching_frames_[i] = NULL;
298
- }
299
- __ bind(&merge_labels_[i]);
300
-
301
- // Loop over the remaining (non-null) reaching frames,
302
- // looking for any that can share merge code with this one.
303
- for (int j = 0; j < i; j++) {
304
- VirtualFrame* other = reaching_frames_[j];
305
- if (other != NULL && other->Equals(cgen()->frame())) {
306
- // Set the reaching frame element to null to avoid
307
- // processing it later, and then bind its entry label.
308
- reaching_frames_[j] = NULL;
309
- __ bind(&merge_labels_[j]);
310
- }
311
- }
312
-
313
- // Emit the merge code.
314
- cgen()->frame()->MergeTo(entry_frame_);
315
- } else if (i == reaching_frames_.length() - 1 && had_fall_through) {
316
- // If this is the fall through frame, and it didn't need
317
- // merge code, we need to pick up the frame so we can jump
318
- // around subsequent merge blocks if necessary.
319
- RegisterFile empty;
320
- cgen()->SetFrame(frame, &empty);
321
- reaching_frames_[i] = NULL;
322
- }
323
- }
324
- }
325
-
326
- // The code generator may not have a current frame if there was no
327
- // fall through and none of the reaching frames needed merging.
328
- // In that case, clone the entry frame as the current frame.
329
- if (!cgen()->has_valid_frame()) {
330
- RegisterFile empty;
331
- cgen()->SetFrame(new VirtualFrame(entry_frame_), &empty);
332
- }
333
-
334
- // There may be unprocessed reaching frames that did not need
335
- // merge code. They will have unbound merge labels. Bind their
336
- // merge labels to be the same as the entry label and deallocate
337
- // them.
338
- for (int i = 0; i < reaching_frames_.length(); i++) {
339
- if (!merge_labels_[i].is_bound()) {
340
- reaching_frames_[i] = NULL;
341
- __ bind(&merge_labels_[i]);
342
- }
343
- }
344
-
345
- // There are non-NULL reaching frames with bound labels for each
346
- // merge block, but only on backward targets.
347
- } else {
348
- // There were no forward jumps. There must be a current frame and
349
- // this must be a bidirectional target.
350
- ASSERT(reaching_frames_.length() == 1);
351
- ASSERT(reaching_frames_[0] != NULL);
352
- ASSERT(direction_ == BIDIRECTIONAL);
353
-
354
- // Use a copy of the reaching frame so the original can be saved
355
- // for possible reuse as a backward merge block.
356
- RegisterFile empty;
357
- cgen()->SetFrame(new VirtualFrame(reaching_frames_[0]), &empty);
358
- __ bind(&merge_labels_[0]);
359
- cgen()->frame()->MergeTo(entry_frame_);
360
- }
361
-
362
- __ bind(&entry_label_);
363
- }
364
-
365
-
366
- void BreakTarget::Jump() {
367
- // Drop leftover statement state from the frame before merging, without
368
- // emitting code.
369
- ASSERT(cgen()->has_valid_frame());
370
- int count = cgen()->frame()->height() - expected_height_;
371
- cgen()->frame()->ForgetElements(count);
372
- DoJump();
373
- }
374
-
375
-
376
- void BreakTarget::Jump(Result* arg) {
377
- // Drop leftover statement state from the frame before merging, without
378
- // emitting code.
379
- ASSERT(cgen()->has_valid_frame());
380
- int count = cgen()->frame()->height() - expected_height_;
381
- cgen()->frame()->ForgetElements(count);
382
- cgen()->frame()->Push(arg);
383
- DoJump();
384
- }
385
-
386
-
387
- void BreakTarget::Bind() {
388
- #ifdef DEBUG
389
- // All the forward-reaching frames should have been adjusted at the
390
- // jumps to this target.
391
- for (int i = 0; i < reaching_frames_.length(); i++) {
392
- ASSERT(reaching_frames_[i] == NULL ||
393
- reaching_frames_[i]->height() == expected_height_);
394
- }
395
- #endif
396
- // Drop leftover statement state from the frame before merging, even on
397
- // the fall through. This is so we can bind the return target with state
398
- // on the frame.
399
- if (cgen()->has_valid_frame()) {
400
- int count = cgen()->frame()->height() - expected_height_;
401
- cgen()->frame()->ForgetElements(count);
402
- }
403
- DoBind();
404
- }
405
-
406
-
407
- void BreakTarget::Bind(Result* arg) {
408
- #ifdef DEBUG
409
- // All the forward-reaching frames should have been adjusted at the
410
- // jumps to this target.
411
- for (int i = 0; i < reaching_frames_.length(); i++) {
412
- ASSERT(reaching_frames_[i] == NULL ||
413
- reaching_frames_[i]->height() == expected_height_ + 1);
414
- }
415
- #endif
416
- // Drop leftover statement state from the frame before merging, even on
417
- // the fall through. This is so we can bind the return target with state
418
- // on the frame.
419
- if (cgen()->has_valid_frame()) {
420
- int count = cgen()->frame()->height() - expected_height_;
421
- cgen()->frame()->ForgetElements(count);
422
- cgen()->frame()->Push(arg);
423
- }
424
- DoBind();
425
- *arg = cgen()->frame()->Pop();
426
- }
427
-
428
-
429
- #undef __
430
-
431
-
432
- } } // namespace v8::internal
@@ -1,2409 +0,0 @@
1
- // Copyright 2009 the V8 project authors. All rights reserved.
2
- // Redistribution and use in source and binary forms, with or without
3
- // modification, are permitted provided that the following conditions are
4
- // met:
5
- //
6
- // * Redistributions of source code must retain the above copyright
7
- // notice, this list of conditions and the following disclaimer.
8
- // * Redistributions in binary form must reproduce the above
9
- // copyright notice, this list of conditions and the following
10
- // disclaimer in the documentation and/or other materials provided
11
- // with the distribution.
12
- // * Neither the name of Google Inc. nor the names of its
13
- // contributors may be used to endorse or promote products derived
14
- // from this software without specific prior written permission.
15
- //
16
- // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17
- // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18
- // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19
- // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
- // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
- // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22
- // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23
- // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24
- // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25
- // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26
- // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
-
28
- #include "v8.h"
29
-
30
- #include "bootstrapper.h"
31
- #include "codegen-inl.h"
32
- #include "assembler-x64.h"
33
- #include "macro-assembler-x64.h"
34
- #include "serialize.h"
35
- #include "debug.h"
36
-
37
- namespace v8 {
38
- namespace internal {
39
-
40
- MacroAssembler::MacroAssembler(void* buffer, int size)
41
- : Assembler(buffer, size),
42
- unresolved_(0),
43
- generating_stub_(false),
44
- allow_stub_calls_(true),
45
- code_object_(Heap::undefined_value()) {
46
- }
47
-
48
-
49
- void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
50
- movq(destination, Operand(r13, index << kPointerSizeLog2));
51
- }
52
-
53
-
54
- void MacroAssembler::PushRoot(Heap::RootListIndex index) {
55
- push(Operand(r13, index << kPointerSizeLog2));
56
- }
57
-
58
-
59
- void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
60
- cmpq(with, Operand(r13, index << kPointerSizeLog2));
61
- }
62
-
63
-
64
- void MacroAssembler::CompareRoot(Operand with, Heap::RootListIndex index) {
65
- LoadRoot(kScratchRegister, index);
66
- cmpq(with, kScratchRegister);
67
- }
68
-
69
-
70
- void MacroAssembler::StackLimitCheck(Label* on_stack_overflow) {
71
- CompareRoot(rsp, Heap::kStackLimitRootIndex);
72
- j(below, on_stack_overflow);
73
- }
74
-
75
-
76
- static void RecordWriteHelper(MacroAssembler* masm,
77
- Register object,
78
- Register addr,
79
- Register scratch) {
80
- Label fast;
81
-
82
- // Compute the page start address from the heap object pointer, and reuse
83
- // the 'object' register for it.
84
- ASSERT(is_int32(~Page::kPageAlignmentMask));
85
- masm->and_(object,
86
- Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
87
- Register page_start = object;
88
-
89
- // Compute the bit addr in the remembered set/index of the pointer in the
90
- // page. Reuse 'addr' as pointer_offset.
91
- masm->subq(addr, page_start);
92
- masm->shr(addr, Immediate(kPointerSizeLog2));
93
- Register pointer_offset = addr;
94
-
95
- // If the bit offset lies beyond the normal remembered set range, it is in
96
- // the extra remembered set area of a large object.
97
- masm->cmpq(pointer_offset, Immediate(Page::kPageSize / kPointerSize));
98
- masm->j(less, &fast);
99
-
100
- // Adjust 'page_start' so that addressing using 'pointer_offset' hits the
101
- // extra remembered set after the large object.
102
-
103
- // Load the array length into 'scratch'.
104
- masm->movl(scratch,
105
- Operand(page_start,
106
- Page::kObjectStartOffset + FixedArray::kLengthOffset));
107
- Register array_length = scratch;
108
-
109
- // Extra remembered set starts right after the large object (a FixedArray), at
110
- // page_start + kObjectStartOffset + objectSize
111
- // where objectSize is FixedArray::kHeaderSize + kPointerSize * array_length.
112
- // Add the delta between the end of the normal RSet and the start of the
113
- // extra RSet to 'page_start', so that addressing the bit using
114
- // 'pointer_offset' hits the extra RSet words.
115
- masm->lea(page_start,
116
- Operand(page_start, array_length, times_pointer_size,
117
- Page::kObjectStartOffset + FixedArray::kHeaderSize
118
- - Page::kRSetEndOffset));
119
-
120
- // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
121
- // to limit code size. We should probably evaluate this decision by
122
- // measuring the performance of an equivalent implementation using
123
- // "simpler" instructions
124
- masm->bind(&fast);
125
- masm->bts(Operand(page_start, Page::kRSetOffset), pointer_offset);
126
- }
127
-
128
-
129
- class RecordWriteStub : public CodeStub {
130
- public:
131
- RecordWriteStub(Register object, Register addr, Register scratch)
132
- : object_(object), addr_(addr), scratch_(scratch) { }
133
-
134
- void Generate(MacroAssembler* masm);
135
-
136
- private:
137
- Register object_;
138
- Register addr_;
139
- Register scratch_;
140
-
141
- #ifdef DEBUG
142
- void Print() {
143
- PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
144
- object_.code(), addr_.code(), scratch_.code());
145
- }
146
- #endif
147
-
148
- // Minor key encoding in 12 bits of three registers (object, address and
149
- // scratch) OOOOAAAASSSS.
150
- class ScratchBits : public BitField<uint32_t, 0, 4> {};
151
- class AddressBits : public BitField<uint32_t, 4, 4> {};
152
- class ObjectBits : public BitField<uint32_t, 8, 4> {};
153
-
154
- Major MajorKey() { return RecordWrite; }
155
-
156
- int MinorKey() {
157
- // Encode the registers.
158
- return ObjectBits::encode(object_.code()) |
159
- AddressBits::encode(addr_.code()) |
160
- ScratchBits::encode(scratch_.code());
161
- }
162
- };
163
-
164
-
165
- void RecordWriteStub::Generate(MacroAssembler* masm) {
166
- RecordWriteHelper(masm, object_, addr_, scratch_);
167
- masm->ret(0);
168
- }
169
-
170
-
171
- // Set the remembered set bit for [object+offset].
172
- // object is the object being stored into, value is the object being stored.
173
- // If offset is zero, then the smi_index register contains the array index into
174
- // the elements array represented as a smi. Otherwise it can be used as a
175
- // scratch register.
176
- // All registers are clobbered by the operation.
177
- void MacroAssembler::RecordWrite(Register object,
178
- int offset,
179
- Register value,
180
- Register smi_index) {
181
- // First, check if a remembered set write is even needed. The tests below
182
- // catch stores of Smis and stores into young gen (which does not have space
183
- // for the remembered set bits.
184
- Label done;
185
- JumpIfSmi(value, &done);
186
-
187
- RecordWriteNonSmi(object, offset, value, smi_index);
188
- bind(&done);
189
- }
190
-
191
-
192
- void MacroAssembler::RecordWriteNonSmi(Register object,
193
- int offset,
194
- Register scratch,
195
- Register smi_index) {
196
- Label done;
197
- // Test that the object address is not in the new space. We cannot
198
- // set remembered set bits in the new space.
199
- movq(scratch, object);
200
- ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
201
- and_(scratch, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
202
- movq(kScratchRegister, ExternalReference::new_space_start());
203
- cmpq(scratch, kScratchRegister);
204
- j(equal, &done);
205
-
206
- if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
207
- // Compute the bit offset in the remembered set, leave it in 'value'.
208
- lea(scratch, Operand(object, offset));
209
- ASSERT(is_int32(Page::kPageAlignmentMask));
210
- and_(scratch, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
211
- shr(scratch, Immediate(kObjectAlignmentBits));
212
-
213
- // Compute the page address from the heap object pointer, leave it in
214
- // 'object' (immediate value is sign extended).
215
- and_(object, Immediate(~Page::kPageAlignmentMask));
216
-
217
- // NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
218
- // to limit code size. We should probably evaluate this decision by
219
- // measuring the performance of an equivalent implementation using
220
- // "simpler" instructions
221
- bts(Operand(object, Page::kRSetOffset), scratch);
222
- } else {
223
- Register dst = smi_index;
224
- if (offset != 0) {
225
- lea(dst, Operand(object, offset));
226
- } else {
227
- // array access: calculate the destination address in the same manner as
228
- // KeyedStoreIC::GenerateGeneric.
229
- SmiIndex index = SmiToIndex(smi_index, smi_index, kPointerSizeLog2);
230
- lea(dst, Operand(object,
231
- index.reg,
232
- index.scale,
233
- FixedArray::kHeaderSize - kHeapObjectTag));
234
- }
235
- // If we are already generating a shared stub, not inlining the
236
- // record write code isn't going to save us any memory.
237
- if (generating_stub()) {
238
- RecordWriteHelper(this, object, dst, scratch);
239
- } else {
240
- RecordWriteStub stub(object, dst, scratch);
241
- CallStub(&stub);
242
- }
243
- }
244
-
245
- bind(&done);
246
- }
247
-
248
-
249
- void MacroAssembler::Assert(Condition cc, const char* msg) {
250
- if (FLAG_debug_code) Check(cc, msg);
251
- }
252
-
253
-
254
- void MacroAssembler::Check(Condition cc, const char* msg) {
255
- Label L;
256
- j(cc, &L);
257
- Abort(msg);
258
- // will not return here
259
- bind(&L);
260
- }
261
-
262
-
263
- void MacroAssembler::NegativeZeroTest(Register result,
264
- Register op,
265
- Label* then_label) {
266
- Label ok;
267
- testl(result, result);
268
- j(not_zero, &ok);
269
- testl(op, op);
270
- j(sign, then_label);
271
- bind(&ok);
272
- }
273
-
274
-
275
- void MacroAssembler::Abort(const char* msg) {
276
- // We want to pass the msg string like a smi to avoid GC
277
- // problems, however msg is not guaranteed to be aligned
278
- // properly. Instead, we pass an aligned pointer that is
279
- // a proper v8 smi, but also pass the alignment difference
280
- // from the real pointer as a smi.
281
- intptr_t p1 = reinterpret_cast<intptr_t>(msg);
282
- intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
283
- // Note: p0 might not be a valid Smi *value*, but it has a valid Smi tag.
284
- ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
285
- #ifdef DEBUG
286
- if (msg != NULL) {
287
- RecordComment("Abort message: ");
288
- RecordComment(msg);
289
- }
290
- #endif
291
- // Disable stub call restrictions to always allow calls to abort.
292
- set_allow_stub_calls(true);
293
-
294
- push(rax);
295
- movq(kScratchRegister, p0, RelocInfo::NONE);
296
- push(kScratchRegister);
297
- movq(kScratchRegister,
298
- reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))),
299
- RelocInfo::NONE);
300
- push(kScratchRegister);
301
- CallRuntime(Runtime::kAbort, 2);
302
- // will not return here
303
- int3();
304
- }
305
-
306
-
307
- void MacroAssembler::CallStub(CodeStub* stub) {
308
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
309
- Call(stub->GetCode(), RelocInfo::CODE_TARGET);
310
- }
311
-
312
-
313
- void MacroAssembler::TailCallStub(CodeStub* stub) {
314
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
315
- Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
316
- }
317
-
318
-
319
- void MacroAssembler::StubReturn(int argc) {
320
- ASSERT(argc >= 1 && generating_stub());
321
- ret((argc - 1) * kPointerSize);
322
- }
323
-
324
-
325
- void MacroAssembler::IllegalOperation(int num_arguments) {
326
- if (num_arguments > 0) {
327
- addq(rsp, Immediate(num_arguments * kPointerSize));
328
- }
329
- LoadRoot(rax, Heap::kUndefinedValueRootIndex);
330
- }
331
-
332
-
333
- void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
334
- CallRuntime(Runtime::FunctionForId(id), num_arguments);
335
- }
336
-
337
-
338
- void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
339
- // If the expected number of arguments of the runtime function is
340
- // constant, we check that the actual number of arguments match the
341
- // expectation.
342
- if (f->nargs >= 0 && f->nargs != num_arguments) {
343
- IllegalOperation(num_arguments);
344
- return;
345
- }
346
-
347
- Runtime::FunctionId function_id =
348
- static_cast<Runtime::FunctionId>(f->stub_id);
349
- RuntimeStub stub(function_id, num_arguments);
350
- CallStub(&stub);
351
- }
352
-
353
-
354
- void MacroAssembler::TailCallRuntime(ExternalReference const& ext,
355
- int num_arguments,
356
- int result_size) {
357
- // ----------- S t a t e -------------
358
- // -- rsp[0] : return address
359
- // -- rsp[8] : argument num_arguments - 1
360
- // ...
361
- // -- rsp[8 * num_arguments] : argument 0 (receiver)
362
- // -----------------------------------
363
-
364
- // TODO(1236192): Most runtime routines don't need the number of
365
- // arguments passed in because it is constant. At some point we
366
- // should remove this need and make the runtime routine entry code
367
- // smarter.
368
- movq(rax, Immediate(num_arguments));
369
- JumpToRuntime(ext, result_size);
370
- }
371
-
372
-
373
- void MacroAssembler::JumpToRuntime(const ExternalReference& ext,
374
- int result_size) {
375
- // Set the entry point and jump to the C entry runtime stub.
376
- movq(rbx, ext);
377
- CEntryStub ces(result_size);
378
- jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
379
- }
380
-
381
-
382
- void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
383
- bool resolved;
384
- Handle<Code> code = ResolveBuiltin(id, &resolved);
385
-
386
- const char* name = Builtins::GetName(id);
387
- int argc = Builtins::GetArgumentsCount(id);
388
-
389
- movq(target, code, RelocInfo::EMBEDDED_OBJECT);
390
- if (!resolved) {
391
- uint32_t flags =
392
- Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
393
- Bootstrapper::FixupFlagsUseCodeObject::encode(true);
394
- Unresolved entry = { pc_offset() - sizeof(intptr_t), flags, name };
395
- unresolved_.Add(entry);
396
- }
397
- addq(target, Immediate(Code::kHeaderSize - kHeapObjectTag));
398
- }
399
-
400
- Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id,
401
- bool* resolved) {
402
- // Move the builtin function into the temporary function slot by
403
- // reading it from the builtins object. NOTE: We should be able to
404
- // reduce this to two instructions by putting the function table in
405
- // the global object instead of the "builtins" object and by using a
406
- // real register for the function.
407
- movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
408
- movq(rdx, FieldOperand(rdx, GlobalObject::kBuiltinsOffset));
409
- int builtins_offset =
410
- JSBuiltinsObject::kJSBuiltinsOffset + (id * kPointerSize);
411
- movq(rdi, FieldOperand(rdx, builtins_offset));
412
-
413
- return Builtins::GetCode(id, resolved);
414
- }
415
-
416
-
417
- void MacroAssembler::Set(Register dst, int64_t x) {
418
- if (x == 0) {
419
- xor_(dst, dst);
420
- } else if (is_int32(x)) {
421
- movq(dst, Immediate(static_cast<int32_t>(x)));
422
- } else if (is_uint32(x)) {
423
- movl(dst, Immediate(static_cast<uint32_t>(x)));
424
- } else {
425
- movq(dst, x, RelocInfo::NONE);
426
- }
427
- }
428
-
429
-
430
- void MacroAssembler::Set(const Operand& dst, int64_t x) {
431
- if (x == 0) {
432
- xor_(kScratchRegister, kScratchRegister);
433
- movq(dst, kScratchRegister);
434
- } else if (is_int32(x)) {
435
- movq(dst, Immediate(static_cast<int32_t>(x)));
436
- } else if (is_uint32(x)) {
437
- movl(dst, Immediate(static_cast<uint32_t>(x)));
438
- } else {
439
- movq(kScratchRegister, x, RelocInfo::NONE);
440
- movq(dst, kScratchRegister);
441
- }
442
- }
443
-
444
- // ----------------------------------------------------------------------------
445
- // Smi tagging, untagging and tag detection.
446
-
447
- static int kSmiShift = kSmiTagSize + kSmiShiftSize;
448
-
449
- void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
450
- ASSERT_EQ(0, kSmiTag);
451
- if (!dst.is(src)) {
452
- movl(dst, src);
453
- }
454
- shl(dst, Immediate(kSmiShift));
455
- }
456
-
457
-
458
- void MacroAssembler::Integer32ToSmi(Register dst,
459
- Register src,
460
- Label* on_overflow) {
461
- ASSERT_EQ(0, kSmiTag);
462
- // 32-bit integer always fits in a long smi.
463
- if (!dst.is(src)) {
464
- movl(dst, src);
465
- }
466
- shl(dst, Immediate(kSmiShift));
467
- }
468
-
469
-
470
- void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
471
- Register src,
472
- int constant) {
473
- if (dst.is(src)) {
474
- addq(dst, Immediate(constant));
475
- } else {
476
- lea(dst, Operand(src, constant));
477
- }
478
- shl(dst, Immediate(kSmiShift));
479
- }
480
-
481
-
482
- void MacroAssembler::SmiToInteger32(Register dst, Register src) {
483
- ASSERT_EQ(0, kSmiTag);
484
- if (!dst.is(src)) {
485
- movq(dst, src);
486
- }
487
- shr(dst, Immediate(kSmiShift));
488
- }
489
-
490
-
491
- void MacroAssembler::SmiToInteger64(Register dst, Register src) {
492
- ASSERT_EQ(0, kSmiTag);
493
- if (!dst.is(src)) {
494
- movq(dst, src);
495
- }
496
- sar(dst, Immediate(kSmiShift));
497
- }
498
-
499
-
500
- void MacroAssembler::SmiTest(Register src) {
501
- testq(src, src);
502
- }
503
-
504
-
505
- void MacroAssembler::SmiCompare(Register dst, Register src) {
506
- cmpq(dst, src);
507
- }
508
-
509
-
510
- void MacroAssembler::SmiCompare(Register dst, Smi* src) {
511
- ASSERT(!dst.is(kScratchRegister));
512
- if (src->value() == 0) {
513
- testq(dst, dst);
514
- } else {
515
- Move(kScratchRegister, src);
516
- cmpq(dst, kScratchRegister);
517
- }
518
- }
519
-
520
-
521
- void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
522
- cmpq(dst, src);
523
- }
524
-
525
-
526
- void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
527
- if (src->value() == 0) {
528
- // Only tagged long smi to have 32-bit representation.
529
- cmpq(dst, Immediate(0));
530
- } else {
531
- Move(kScratchRegister, src);
532
- cmpq(dst, kScratchRegister);
533
- }
534
- }
535
-
536
-
537
- void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
538
- Register src,
539
- int power) {
540
- ASSERT(power >= 0);
541
- ASSERT(power < 64);
542
- if (power == 0) {
543
- SmiToInteger64(dst, src);
544
- return;
545
- }
546
- if (!dst.is(src)) {
547
- movq(dst, src);
548
- }
549
- if (power < kSmiShift) {
550
- sar(dst, Immediate(kSmiShift - power));
551
- } else if (power > kSmiShift) {
552
- shl(dst, Immediate(power - kSmiShift));
553
- }
554
- }
555
-
556
-
557
- Condition MacroAssembler::CheckSmi(Register src) {
558
- ASSERT_EQ(0, kSmiTag);
559
- testb(src, Immediate(kSmiTagMask));
560
- return zero;
561
- }
562
-
563
-
564
- Condition MacroAssembler::CheckPositiveSmi(Register src) {
565
- ASSERT_EQ(0, kSmiTag);
566
- movq(kScratchRegister, src);
567
- rol(kScratchRegister, Immediate(1));
568
- testl(kScratchRegister, Immediate(0x03));
569
- return zero;
570
- }
571
-
572
-
573
- Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
574
- if (first.is(second)) {
575
- return CheckSmi(first);
576
- }
577
- movl(kScratchRegister, first);
578
- orl(kScratchRegister, second);
579
- testb(kScratchRegister, Immediate(kSmiTagMask));
580
- return zero;
581
- }
582
-
583
-
584
- Condition MacroAssembler::CheckIsMinSmi(Register src) {
585
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
586
- movq(kScratchRegister, src);
587
- rol(kScratchRegister, Immediate(1));
588
- cmpq(kScratchRegister, Immediate(1));
589
- return equal;
590
- }
591
-
592
-
593
- Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
594
- // A 32-bit integer value can always be converted to a smi.
595
- return always;
596
- }
597
-
598
-
599
- Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
600
- // An unsigned 32-bit integer value is valid as long as the high bit
601
- // is not set.
602
- testq(src, Immediate(0x80000000));
603
- return zero;
604
- }
605
-
606
-
607
- void MacroAssembler::SmiNeg(Register dst, Register src, Label* on_smi_result) {
608
- if (dst.is(src)) {
609
- ASSERT(!dst.is(kScratchRegister));
610
- movq(kScratchRegister, src);
611
- neg(dst); // Low 32 bits are retained as zero by negation.
612
- // Test if result is zero or Smi::kMinValue.
613
- cmpq(dst, kScratchRegister);
614
- j(not_equal, on_smi_result);
615
- movq(src, kScratchRegister);
616
- } else {
617
- movq(dst, src);
618
- neg(dst);
619
- cmpq(dst, src);
620
- // If the result is zero or Smi::kMinValue, negation failed to create a smi.
621
- j(not_equal, on_smi_result);
622
- }
623
- }
624
-
625
-
626
- void MacroAssembler::SmiAdd(Register dst,
627
- Register src1,
628
- Register src2,
629
- Label* on_not_smi_result) {
630
- ASSERT(!dst.is(src2));
631
- if (dst.is(src1)) {
632
- addq(dst, src2);
633
- Label smi_result;
634
- j(no_overflow, &smi_result);
635
- // Restore src1.
636
- subq(src1, src2);
637
- jmp(on_not_smi_result);
638
- bind(&smi_result);
639
- } else {
640
- movq(dst, src1);
641
- addq(dst, src2);
642
- j(overflow, on_not_smi_result);
643
- }
644
- }
645
-
646
-
647
- void MacroAssembler::SmiSub(Register dst,
648
- Register src1,
649
- Register src2,
650
- Label* on_not_smi_result) {
651
- ASSERT(!dst.is(src2));
652
- if (dst.is(src1)) {
653
- subq(dst, src2);
654
- Label smi_result;
655
- j(no_overflow, &smi_result);
656
- // Restore src1.
657
- addq(src1, src2);
658
- jmp(on_not_smi_result);
659
- bind(&smi_result);
660
- } else {
661
- movq(dst, src1);
662
- subq(dst, src2);
663
- j(overflow, on_not_smi_result);
664
- }
665
- }
666
-
667
-
668
- void MacroAssembler::SmiMul(Register dst,
669
- Register src1,
670
- Register src2,
671
- Label* on_not_smi_result) {
672
- ASSERT(!dst.is(src2));
673
- ASSERT(!dst.is(kScratchRegister));
674
- ASSERT(!src1.is(kScratchRegister));
675
- ASSERT(!src2.is(kScratchRegister));
676
-
677
- if (dst.is(src1)) {
678
- Label failure, zero_correct_result;
679
- movq(kScratchRegister, src1); // Create backup for later testing.
680
- SmiToInteger64(dst, src1);
681
- imul(dst, src2);
682
- j(overflow, &failure);
683
-
684
- // Check for negative zero result. If product is zero, and one
685
- // argument is negative, go to slow case.
686
- Label correct_result;
687
- testq(dst, dst);
688
- j(not_zero, &correct_result);
689
-
690
- movq(dst, kScratchRegister);
691
- xor_(dst, src2);
692
- j(positive, &zero_correct_result); // Result was positive zero.
693
-
694
- bind(&failure); // Reused failure exit, restores src1.
695
- movq(src1, kScratchRegister);
696
- jmp(on_not_smi_result);
697
-
698
- bind(&zero_correct_result);
699
- xor_(dst, dst);
700
-
701
- bind(&correct_result);
702
- } else {
703
- SmiToInteger64(dst, src1);
704
- imul(dst, src2);
705
- j(overflow, on_not_smi_result);
706
- // Check for negative zero result. If product is zero, and one
707
- // argument is negative, go to slow case.
708
- Label correct_result;
709
- testq(dst, dst);
710
- j(not_zero, &correct_result);
711
- // One of src1 and src2 is zero, the check whether the other is
712
- // negative.
713
- movq(kScratchRegister, src1);
714
- xor_(kScratchRegister, src2);
715
- j(negative, on_not_smi_result);
716
- bind(&correct_result);
717
- }
718
- }
719
-
720
-
721
- void MacroAssembler::SmiTryAddConstant(Register dst,
722
- Register src,
723
- Smi* constant,
724
- Label* on_not_smi_result) {
725
- // Does not assume that src is a smi.
726
- ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
727
- ASSERT_EQ(0, kSmiTag);
728
- ASSERT(!dst.is(kScratchRegister));
729
- ASSERT(!src.is(kScratchRegister));
730
-
731
- JumpIfNotSmi(src, on_not_smi_result);
732
- Register tmp = (dst.is(src) ? kScratchRegister : dst);
733
- Move(tmp, constant);
734
- addq(tmp, src);
735
- j(overflow, on_not_smi_result);
736
- if (dst.is(src)) {
737
- movq(dst, tmp);
738
- }
739
- }
740
-
741
-
742
- void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
743
- if (constant->value() == 0) {
744
- if (!dst.is(src)) {
745
- movq(dst, src);
746
- }
747
- } else if (dst.is(src)) {
748
- ASSERT(!dst.is(kScratchRegister));
749
-
750
- Move(kScratchRegister, constant);
751
- addq(dst, kScratchRegister);
752
- } else {
753
- Move(dst, constant);
754
- addq(dst, src);
755
- }
756
- }
757
-
758
-
759
- void MacroAssembler::SmiAddConstant(Register dst,
760
- Register src,
761
- Smi* constant,
762
- Label* on_not_smi_result) {
763
- if (constant->value() == 0) {
764
- if (!dst.is(src)) {
765
- movq(dst, src);
766
- }
767
- } else if (dst.is(src)) {
768
- ASSERT(!dst.is(kScratchRegister));
769
-
770
- Move(kScratchRegister, constant);
771
- addq(dst, kScratchRegister);
772
- Label result_ok;
773
- j(no_overflow, &result_ok);
774
- subq(dst, kScratchRegister);
775
- jmp(on_not_smi_result);
776
- bind(&result_ok);
777
- } else {
778
- Move(dst, constant);
779
- addq(dst, src);
780
- j(overflow, on_not_smi_result);
781
- }
782
- }
783
-
784
-
785
- void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
786
- if (constant->value() == 0) {
787
- if (!dst.is(src)) {
788
- movq(dst, src);
789
- }
790
- } else if (dst.is(src)) {
791
- ASSERT(!dst.is(kScratchRegister));
792
-
793
- Move(kScratchRegister, constant);
794
- subq(dst, kScratchRegister);
795
- } else {
796
- // Subtract by adding the negative, to do it in two operations.
797
- if (constant->value() == Smi::kMinValue) {
798
- Move(kScratchRegister, constant);
799
- movq(dst, src);
800
- subq(dst, kScratchRegister);
801
- } else {
802
- Move(dst, Smi::FromInt(-constant->value()));
803
- addq(dst, src);
804
- }
805
- }
806
- }
807
-
808
-
809
- void MacroAssembler::SmiSubConstant(Register dst,
810
- Register src,
811
- Smi* constant,
812
- Label* on_not_smi_result) {
813
- if (constant->value() == 0) {
814
- if (!dst.is(src)) {
815
- movq(dst, src);
816
- }
817
- } else if (dst.is(src)) {
818
- ASSERT(!dst.is(kScratchRegister));
819
-
820
- Move(kScratchRegister, constant);
821
- subq(dst, kScratchRegister);
822
- Label sub_success;
823
- j(no_overflow, &sub_success);
824
- addq(src, kScratchRegister);
825
- jmp(on_not_smi_result);
826
- bind(&sub_success);
827
- } else {
828
- if (constant->value() == Smi::kMinValue) {
829
- Move(kScratchRegister, constant);
830
- movq(dst, src);
831
- subq(dst, kScratchRegister);
832
- j(overflow, on_not_smi_result);
833
- } else {
834
- Move(dst, Smi::FromInt(-(constant->value())));
835
- addq(dst, src);
836
- j(overflow, on_not_smi_result);
837
- }
838
- }
839
- }
840
-
841
-
842
- void MacroAssembler::SmiDiv(Register dst,
843
- Register src1,
844
- Register src2,
845
- Label* on_not_smi_result) {
846
- ASSERT(!src1.is(kScratchRegister));
847
- ASSERT(!src2.is(kScratchRegister));
848
- ASSERT(!dst.is(kScratchRegister));
849
- ASSERT(!src2.is(rax));
850
- ASSERT(!src2.is(rdx));
851
- ASSERT(!src1.is(rdx));
852
-
853
- // Check for 0 divisor (result is +/-Infinity).
854
- Label positive_divisor;
855
- testq(src2, src2);
856
- j(zero, on_not_smi_result);
857
-
858
- if (src1.is(rax)) {
859
- movq(kScratchRegister, src1);
860
- }
861
- SmiToInteger32(rax, src1);
862
- // We need to rule out dividing Smi::kMinValue by -1, since that would
863
- // overflow in idiv and raise an exception.
864
- // We combine this with negative zero test (negative zero only happens
865
- // when dividing zero by a negative number).
866
-
867
- // We overshoot a little and go to slow case if we divide min-value
868
- // by any negative value, not just -1.
869
- Label safe_div;
870
- testl(rax, Immediate(0x7fffffff));
871
- j(not_zero, &safe_div);
872
- testq(src2, src2);
873
- if (src1.is(rax)) {
874
- j(positive, &safe_div);
875
- movq(src1, kScratchRegister);
876
- jmp(on_not_smi_result);
877
- } else {
878
- j(negative, on_not_smi_result);
879
- }
880
- bind(&safe_div);
881
-
882
- SmiToInteger32(src2, src2);
883
- // Sign extend src1 into edx:eax.
884
- cdq();
885
- idivl(src2);
886
- Integer32ToSmi(src2, src2);
887
- // Check that the remainder is zero.
888
- testl(rdx, rdx);
889
- if (src1.is(rax)) {
890
- Label smi_result;
891
- j(zero, &smi_result);
892
- movq(src1, kScratchRegister);
893
- jmp(on_not_smi_result);
894
- bind(&smi_result);
895
- } else {
896
- j(not_zero, on_not_smi_result);
897
- }
898
- if (!dst.is(src1) && src1.is(rax)) {
899
- movq(src1, kScratchRegister);
900
- }
901
- Integer32ToSmi(dst, rax);
902
- }
903
-
904
-
905
- void MacroAssembler::SmiMod(Register dst,
906
- Register src1,
907
- Register src2,
908
- Label* on_not_smi_result) {
909
- ASSERT(!dst.is(kScratchRegister));
910
- ASSERT(!src1.is(kScratchRegister));
911
- ASSERT(!src2.is(kScratchRegister));
912
- ASSERT(!src2.is(rax));
913
- ASSERT(!src2.is(rdx));
914
- ASSERT(!src1.is(rdx));
915
- ASSERT(!src1.is(src2));
916
-
917
- testq(src2, src2);
918
- j(zero, on_not_smi_result);
919
-
920
- if (src1.is(rax)) {
921
- movq(kScratchRegister, src1);
922
- }
923
- SmiToInteger32(rax, src1);
924
- SmiToInteger32(src2, src2);
925
-
926
- // Test for the edge case of dividing Smi::kMinValue by -1 (will overflow).
927
- Label safe_div;
928
- cmpl(rax, Immediate(Smi::kMinValue));
929
- j(not_equal, &safe_div);
930
- cmpl(src2, Immediate(-1));
931
- j(not_equal, &safe_div);
932
- // Retag inputs and go slow case.
933
- Integer32ToSmi(src2, src2);
934
- if (src1.is(rax)) {
935
- movq(src1, kScratchRegister);
936
- }
937
- jmp(on_not_smi_result);
938
- bind(&safe_div);
939
-
940
- // Sign extend eax into edx:eax.
941
- cdq();
942
- idivl(src2);
943
- // Restore smi tags on inputs.
944
- Integer32ToSmi(src2, src2);
945
- if (src1.is(rax)) {
946
- movq(src1, kScratchRegister);
947
- }
948
- // Check for a negative zero result. If the result is zero, and the
949
- // dividend is negative, go slow to return a floating point negative zero.
950
- Label smi_result;
951
- testl(rdx, rdx);
952
- j(not_zero, &smi_result);
953
- testq(src1, src1);
954
- j(negative, on_not_smi_result);
955
- bind(&smi_result);
956
- Integer32ToSmi(dst, rdx);
957
- }
958
-
959
-
960
- void MacroAssembler::SmiNot(Register dst, Register src) {
961
- ASSERT(!dst.is(kScratchRegister));
962
- ASSERT(!src.is(kScratchRegister));
963
- // Set tag and padding bits before negating, so that they are zero afterwards.
964
- movl(kScratchRegister, Immediate(~0));
965
- if (dst.is(src)) {
966
- xor_(dst, kScratchRegister);
967
- } else {
968
- lea(dst, Operand(src, kScratchRegister, times_1, 0));
969
- }
970
- not_(dst);
971
- }
972
-
973
-
974
- void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
975
- ASSERT(!dst.is(src2));
976
- if (!dst.is(src1)) {
977
- movq(dst, src1);
978
- }
979
- and_(dst, src2);
980
- }
981
-
982
-
983
- void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
984
- if (constant->value() == 0) {
985
- xor_(dst, dst);
986
- } else if (dst.is(src)) {
987
- ASSERT(!dst.is(kScratchRegister));
988
- Move(kScratchRegister, constant);
989
- and_(dst, kScratchRegister);
990
- } else {
991
- Move(dst, constant);
992
- and_(dst, src);
993
- }
994
- }
995
-
996
-
997
- void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
998
- if (!dst.is(src1)) {
999
- movq(dst, src1);
1000
- }
1001
- or_(dst, src2);
1002
- }
1003
-
1004
-
1005
- void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
1006
- if (dst.is(src)) {
1007
- ASSERT(!dst.is(kScratchRegister));
1008
- Move(kScratchRegister, constant);
1009
- or_(dst, kScratchRegister);
1010
- } else {
1011
- Move(dst, constant);
1012
- or_(dst, src);
1013
- }
1014
- }
1015
-
1016
-
1017
- void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
1018
- if (!dst.is(src1)) {
1019
- movq(dst, src1);
1020
- }
1021
- xor_(dst, src2);
1022
- }
1023
-
1024
-
1025
- void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
1026
- if (dst.is(src)) {
1027
- ASSERT(!dst.is(kScratchRegister));
1028
- Move(kScratchRegister, constant);
1029
- xor_(dst, kScratchRegister);
1030
- } else {
1031
- Move(dst, constant);
1032
- xor_(dst, src);
1033
- }
1034
- }
1035
-
1036
-
1037
- void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
1038
- Register src,
1039
- int shift_value) {
1040
- ASSERT(is_uint5(shift_value));
1041
- if (shift_value > 0) {
1042
- if (dst.is(src)) {
1043
- sar(dst, Immediate(shift_value + kSmiShift));
1044
- shl(dst, Immediate(kSmiShift));
1045
- } else {
1046
- UNIMPLEMENTED(); // Not used.
1047
- }
1048
- }
1049
- }
1050
-
1051
-
1052
- void MacroAssembler::SmiShiftLogicalRightConstant(Register dst,
1053
- Register src,
1054
- int shift_value,
1055
- Label* on_not_smi_result) {
1056
- // Logic right shift interprets its result as an *unsigned* number.
1057
- if (dst.is(src)) {
1058
- UNIMPLEMENTED(); // Not used.
1059
- } else {
1060
- movq(dst, src);
1061
- if (shift_value == 0) {
1062
- testq(dst, dst);
1063
- j(negative, on_not_smi_result);
1064
- }
1065
- shr(dst, Immediate(shift_value + kSmiShift));
1066
- shl(dst, Immediate(kSmiShift));
1067
- }
1068
- }
1069
-
1070
-
1071
- void MacroAssembler::SmiShiftLeftConstant(Register dst,
1072
- Register src,
1073
- int shift_value,
1074
- Label* on_not_smi_result) {
1075
- if (!dst.is(src)) {
1076
- movq(dst, src);
1077
- }
1078
- if (shift_value > 0) {
1079
- shl(dst, Immediate(shift_value));
1080
- }
1081
- }
1082
-
1083
-
1084
- void MacroAssembler::SmiShiftLeft(Register dst,
1085
- Register src1,
1086
- Register src2,
1087
- Label* on_not_smi_result) {
1088
- ASSERT(!dst.is(rcx));
1089
- Label result_ok;
1090
- // Untag shift amount.
1091
- if (!dst.is(src1)) {
1092
- movq(dst, src1);
1093
- }
1094
- SmiToInteger32(rcx, src2);
1095
- // Shift amount specified by lower 5 bits, not six as the shl opcode.
1096
- and_(rcx, Immediate(0x1f));
1097
- shl_cl(dst);
1098
- }
1099
-
1100
-
1101
- void MacroAssembler::SmiShiftLogicalRight(Register dst,
1102
- Register src1,
1103
- Register src2,
1104
- Label* on_not_smi_result) {
1105
- ASSERT(!dst.is(kScratchRegister));
1106
- ASSERT(!src1.is(kScratchRegister));
1107
- ASSERT(!src2.is(kScratchRegister));
1108
- ASSERT(!dst.is(rcx));
1109
- Label result_ok;
1110
- if (src1.is(rcx) || src2.is(rcx)) {
1111
- movq(kScratchRegister, rcx);
1112
- }
1113
- if (!dst.is(src1)) {
1114
- movq(dst, src1);
1115
- }
1116
- SmiToInteger32(rcx, src2);
1117
- orl(rcx, Immediate(kSmiShift));
1118
- shr_cl(dst); // Shift is rcx modulo 0x1f + 32.
1119
- shl(dst, Immediate(kSmiShift));
1120
- testq(dst, dst);
1121
- if (src1.is(rcx) || src2.is(rcx)) {
1122
- Label positive_result;
1123
- j(positive, &positive_result);
1124
- if (src1.is(rcx)) {
1125
- movq(src1, kScratchRegister);
1126
- } else {
1127
- movq(src2, kScratchRegister);
1128
- }
1129
- jmp(on_not_smi_result);
1130
- bind(&positive_result);
1131
- } else {
1132
- j(negative, on_not_smi_result); // src2 was zero and src1 negative.
1133
- }
1134
- }
1135
-
1136
-
1137
- void MacroAssembler::SmiShiftArithmeticRight(Register dst,
1138
- Register src1,
1139
- Register src2) {
1140
- ASSERT(!dst.is(kScratchRegister));
1141
- ASSERT(!src1.is(kScratchRegister));
1142
- ASSERT(!src2.is(kScratchRegister));
1143
- ASSERT(!dst.is(rcx));
1144
- if (src1.is(rcx)) {
1145
- movq(kScratchRegister, src1);
1146
- } else if (src2.is(rcx)) {
1147
- movq(kScratchRegister, src2);
1148
- }
1149
- if (!dst.is(src1)) {
1150
- movq(dst, src1);
1151
- }
1152
- SmiToInteger32(rcx, src2);
1153
- orl(rcx, Immediate(kSmiShift));
1154
- sar_cl(dst); // Shift 32 + original rcx & 0x1f.
1155
- shl(dst, Immediate(kSmiShift));
1156
- if (src1.is(rcx)) {
1157
- movq(src1, kScratchRegister);
1158
- } else if (src2.is(rcx)) {
1159
- movq(src2, kScratchRegister);
1160
- }
1161
- }
1162
-
1163
-
1164
- void MacroAssembler::SelectNonSmi(Register dst,
1165
- Register src1,
1166
- Register src2,
1167
- Label* on_not_smis) {
1168
- ASSERT(!dst.is(kScratchRegister));
1169
- ASSERT(!src1.is(kScratchRegister));
1170
- ASSERT(!src2.is(kScratchRegister));
1171
- ASSERT(!dst.is(src1));
1172
- ASSERT(!dst.is(src2));
1173
- // Both operands must not be smis.
1174
- #ifdef DEBUG
1175
- if (allow_stub_calls()) { // Check contains a stub call.
1176
- Condition not_both_smis = NegateCondition(CheckBothSmi(src1, src2));
1177
- Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
1178
- }
1179
- #endif
1180
- ASSERT_EQ(0, kSmiTag);
1181
- ASSERT_EQ(0, Smi::FromInt(0));
1182
- movl(kScratchRegister, Immediate(kSmiTagMask));
1183
- and_(kScratchRegister, src1);
1184
- testl(kScratchRegister, src2);
1185
- // If non-zero then both are smis.
1186
- j(not_zero, on_not_smis);
1187
-
1188
- // Exactly one operand is a smi.
1189
- ASSERT_EQ(1, static_cast<int>(kSmiTagMask));
1190
- // kScratchRegister still holds src1 & kSmiTag, which is either zero or one.
1191
- subq(kScratchRegister, Immediate(1));
1192
- // If src1 is a smi, then scratch register all 1s, else it is all 0s.
1193
- movq(dst, src1);
1194
- xor_(dst, src2);
1195
- and_(dst, kScratchRegister);
1196
- // If src1 is a smi, dst holds src1 ^ src2, else it is zero.
1197
- xor_(dst, src1);
1198
- // If src1 is a smi, dst is src2, else it is src1, i.e., the non-smi.
1199
- }
1200
-
1201
- SmiIndex MacroAssembler::SmiToIndex(Register dst,
1202
- Register src,
1203
- int shift) {
1204
- ASSERT(is_uint6(shift));
1205
- // There is a possible optimization if shift is in the range 60-63, but that
1206
- // will (and must) never happen.
1207
- if (!dst.is(src)) {
1208
- movq(dst, src);
1209
- }
1210
- if (shift < kSmiShift) {
1211
- sar(dst, Immediate(kSmiShift - shift));
1212
- } else {
1213
- shl(dst, Immediate(shift - kSmiShift));
1214
- }
1215
- return SmiIndex(dst, times_1);
1216
- }
1217
-
1218
- SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1219
- Register src,
1220
- int shift) {
1221
- // Register src holds a positive smi.
1222
- ASSERT(is_uint6(shift));
1223
- if (!dst.is(src)) {
1224
- movq(dst, src);
1225
- }
1226
- neg(dst);
1227
- if (shift < kSmiShift) {
1228
- sar(dst, Immediate(kSmiShift - shift));
1229
- } else {
1230
- shl(dst, Immediate(shift - kSmiShift));
1231
- }
1232
- return SmiIndex(dst, times_1);
1233
- }
1234
-
1235
-
1236
- void MacroAssembler::JumpIfSmi(Register src, Label* on_smi) {
1237
- ASSERT_EQ(0, kSmiTag);
1238
- Condition smi = CheckSmi(src);
1239
- j(smi, on_smi);
1240
- }
1241
-
1242
-
1243
- void MacroAssembler::JumpIfNotSmi(Register src, Label* on_not_smi) {
1244
- Condition smi = CheckSmi(src);
1245
- j(NegateCondition(smi), on_not_smi);
1246
- }
1247
-
1248
-
1249
- void MacroAssembler::JumpIfNotPositiveSmi(Register src,
1250
- Label* on_not_positive_smi) {
1251
- Condition positive_smi = CheckPositiveSmi(src);
1252
- j(NegateCondition(positive_smi), on_not_positive_smi);
1253
- }
1254
-
1255
-
1256
- void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1257
- Smi* constant,
1258
- Label* on_equals) {
1259
- SmiCompare(src, constant);
1260
- j(equal, on_equals);
1261
- }
1262
-
1263
-
1264
- void MacroAssembler::JumpIfNotValidSmiValue(Register src, Label* on_invalid) {
1265
- Condition is_valid = CheckInteger32ValidSmiValue(src);
1266
- j(NegateCondition(is_valid), on_invalid);
1267
- }
1268
-
1269
-
1270
- void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1271
- Label* on_invalid) {
1272
- Condition is_valid = CheckUInteger32ValidSmiValue(src);
1273
- j(NegateCondition(is_valid), on_invalid);
1274
- }
1275
-
1276
-
1277
- void MacroAssembler::JumpIfNotBothSmi(Register src1, Register src2,
1278
- Label* on_not_both_smi) {
1279
- Condition both_smi = CheckBothSmi(src1, src2);
1280
- j(NegateCondition(both_smi), on_not_both_smi);
1281
- }
1282
-
1283
-
1284
- void MacroAssembler::Move(Register dst, Handle<Object> source) {
1285
- ASSERT(!source->IsFailure());
1286
- if (source->IsSmi()) {
1287
- Move(dst, Smi::cast(*source));
1288
- } else {
1289
- movq(dst, source, RelocInfo::EMBEDDED_OBJECT);
1290
- }
1291
- }
1292
-
1293
-
1294
- void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
1295
- ASSERT(!source->IsFailure());
1296
- if (source->IsSmi()) {
1297
- Move(dst, Smi::cast(*source));
1298
- } else {
1299
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1300
- movq(dst, kScratchRegister);
1301
- }
1302
- }
1303
-
1304
-
1305
- void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1306
- if (source->IsSmi()) {
1307
- SmiCompare(dst, Smi::cast(*source));
1308
- } else {
1309
- Move(kScratchRegister, source);
1310
- cmpq(dst, kScratchRegister);
1311
- }
1312
- }
1313
-
1314
-
1315
- void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1316
- if (source->IsSmi()) {
1317
- SmiCompare(dst, Smi::cast(*source));
1318
- } else {
1319
- ASSERT(source->IsHeapObject());
1320
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1321
- cmpq(dst, kScratchRegister);
1322
- }
1323
- }
1324
-
1325
-
1326
- void MacroAssembler::Push(Handle<Object> source) {
1327
- if (source->IsSmi()) {
1328
- Push(Smi::cast(*source));
1329
- } else {
1330
- ASSERT(source->IsHeapObject());
1331
- movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
1332
- push(kScratchRegister);
1333
- }
1334
- }
1335
-
1336
-
1337
- void MacroAssembler::Push(Smi* source) {
1338
- intptr_t smi = reinterpret_cast<intptr_t>(source);
1339
- if (is_int32(smi)) {
1340
- push(Immediate(static_cast<int32_t>(smi)));
1341
- } else {
1342
- Set(kScratchRegister, smi);
1343
- push(kScratchRegister);
1344
- }
1345
- }
1346
-
1347
-
1348
- void MacroAssembler::Drop(int stack_elements) {
1349
- if (stack_elements > 0) {
1350
- addq(rsp, Immediate(stack_elements * kPointerSize));
1351
- }
1352
- }
1353
-
1354
-
1355
- void MacroAssembler::Test(const Operand& src, Smi* source) {
1356
- intptr_t smi = reinterpret_cast<intptr_t>(source);
1357
- if (is_int32(smi)) {
1358
- testl(src, Immediate(static_cast<int32_t>(smi)));
1359
- } else {
1360
- Move(kScratchRegister, source);
1361
- testq(src, kScratchRegister);
1362
- }
1363
- }
1364
-
1365
-
1366
- void MacroAssembler::Jump(ExternalReference ext) {
1367
- movq(kScratchRegister, ext);
1368
- jmp(kScratchRegister);
1369
- }
1370
-
1371
-
1372
- void MacroAssembler::Jump(Address destination, RelocInfo::Mode rmode) {
1373
- movq(kScratchRegister, destination, rmode);
1374
- jmp(kScratchRegister);
1375
- }
1376
-
1377
-
1378
- void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1379
- // TODO(X64): Inline this
1380
- jmp(code_object, rmode);
1381
- }
1382
-
1383
-
1384
- void MacroAssembler::Call(ExternalReference ext) {
1385
- movq(kScratchRegister, ext);
1386
- call(kScratchRegister);
1387
- }
1388
-
1389
-
1390
- void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1391
- movq(kScratchRegister, destination, rmode);
1392
- call(kScratchRegister);
1393
- }
1394
-
1395
-
1396
- void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1397
- ASSERT(RelocInfo::IsCodeTarget(rmode));
1398
- WriteRecordedPositions();
1399
- call(code_object, rmode);
1400
- }
1401
-
1402
-
1403
- void MacroAssembler::PushTryHandler(CodeLocation try_location,
1404
- HandlerType type) {
1405
- // Adjust this code if not the case.
1406
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
1407
-
1408
- // The pc (return address) is already on TOS. This code pushes state,
1409
- // frame pointer and current handler. Check that they are expected
1410
- // next on the stack, in that order.
1411
- ASSERT_EQ(StackHandlerConstants::kStateOffset,
1412
- StackHandlerConstants::kPCOffset - kPointerSize);
1413
- ASSERT_EQ(StackHandlerConstants::kFPOffset,
1414
- StackHandlerConstants::kStateOffset - kPointerSize);
1415
- ASSERT_EQ(StackHandlerConstants::kNextOffset,
1416
- StackHandlerConstants::kFPOffset - kPointerSize);
1417
-
1418
- if (try_location == IN_JAVASCRIPT) {
1419
- if (type == TRY_CATCH_HANDLER) {
1420
- push(Immediate(StackHandler::TRY_CATCH));
1421
- } else {
1422
- push(Immediate(StackHandler::TRY_FINALLY));
1423
- }
1424
- push(rbp);
1425
- } else {
1426
- ASSERT(try_location == IN_JS_ENTRY);
1427
- // The frame pointer does not point to a JS frame so we save NULL
1428
- // for rbp. We expect the code throwing an exception to check rbp
1429
- // before dereferencing it to restore the context.
1430
- push(Immediate(StackHandler::ENTRY));
1431
- push(Immediate(0)); // NULL frame pointer.
1432
- }
1433
- // Save the current handler.
1434
- movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1435
- push(Operand(kScratchRegister, 0));
1436
- // Link this handler.
1437
- movq(Operand(kScratchRegister, 0), rsp);
1438
- }
1439
-
1440
-
1441
- void MacroAssembler::PopTryHandler() {
1442
- ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1443
- // Unlink this handler.
1444
- movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1445
- pop(Operand(kScratchRegister, 0));
1446
- // Remove the remaining fields.
1447
- addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1448
- }
1449
-
1450
-
1451
- void MacroAssembler::Ret() {
1452
- ret(0);
1453
- }
1454
-
1455
-
1456
- void MacroAssembler::FCmp() {
1457
- fucomip();
1458
- ffree(0);
1459
- fincstp();
1460
- }
1461
-
1462
-
1463
- void MacroAssembler::CmpObjectType(Register heap_object,
1464
- InstanceType type,
1465
- Register map) {
1466
- movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
1467
- CmpInstanceType(map, type);
1468
- }
1469
-
1470
-
1471
- void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
1472
- cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
1473
- Immediate(static_cast<int8_t>(type)));
1474
- }
1475
-
1476
-
1477
- void MacroAssembler::TryGetFunctionPrototype(Register function,
1478
- Register result,
1479
- Label* miss) {
1480
- // Check that the receiver isn't a smi.
1481
- testl(function, Immediate(kSmiTagMask));
1482
- j(zero, miss);
1483
-
1484
- // Check that the function really is a function.
1485
- CmpObjectType(function, JS_FUNCTION_TYPE, result);
1486
- j(not_equal, miss);
1487
-
1488
- // Make sure that the function has an instance prototype.
1489
- Label non_instance;
1490
- testb(FieldOperand(result, Map::kBitFieldOffset),
1491
- Immediate(1 << Map::kHasNonInstancePrototype));
1492
- j(not_zero, &non_instance);
1493
-
1494
- // Get the prototype or initial map from the function.
1495
- movq(result,
1496
- FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1497
-
1498
- // If the prototype or initial map is the hole, don't return it and
1499
- // simply miss the cache instead. This will allow us to allocate a
1500
- // prototype object on-demand in the runtime system.
1501
- CompareRoot(result, Heap::kTheHoleValueRootIndex);
1502
- j(equal, miss);
1503
-
1504
- // If the function does not have an initial map, we're done.
1505
- Label done;
1506
- CmpObjectType(result, MAP_TYPE, kScratchRegister);
1507
- j(not_equal, &done);
1508
-
1509
- // Get the prototype from the initial map.
1510
- movq(result, FieldOperand(result, Map::kPrototypeOffset));
1511
- jmp(&done);
1512
-
1513
- // Non-instance prototype: Fetch prototype from constructor field
1514
- // in initial map.
1515
- bind(&non_instance);
1516
- movq(result, FieldOperand(result, Map::kConstructorOffset));
1517
-
1518
- // All done.
1519
- bind(&done);
1520
- }
1521
-
1522
-
1523
- void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1524
- if (FLAG_native_code_counters && counter->Enabled()) {
1525
- movq(kScratchRegister, ExternalReference(counter));
1526
- movl(Operand(kScratchRegister, 0), Immediate(value));
1527
- }
1528
- }
1529
-
1530
-
1531
- void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1532
- ASSERT(value > 0);
1533
- if (FLAG_native_code_counters && counter->Enabled()) {
1534
- movq(kScratchRegister, ExternalReference(counter));
1535
- Operand operand(kScratchRegister, 0);
1536
- if (value == 1) {
1537
- incl(operand);
1538
- } else {
1539
- addl(operand, Immediate(value));
1540
- }
1541
- }
1542
- }
1543
-
1544
-
1545
- void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1546
- ASSERT(value > 0);
1547
- if (FLAG_native_code_counters && counter->Enabled()) {
1548
- movq(kScratchRegister, ExternalReference(counter));
1549
- Operand operand(kScratchRegister, 0);
1550
- if (value == 1) {
1551
- decl(operand);
1552
- } else {
1553
- subl(operand, Immediate(value));
1554
- }
1555
- }
1556
- }
1557
-
1558
- #ifdef ENABLE_DEBUGGER_SUPPORT
1559
-
1560
- void MacroAssembler::PushRegistersFromMemory(RegList regs) {
1561
- ASSERT((regs & ~kJSCallerSaved) == 0);
1562
- // Push the content of the memory location to the stack.
1563
- for (int i = 0; i < kNumJSCallerSaved; i++) {
1564
- int r = JSCallerSavedCode(i);
1565
- if ((regs & (1 << r)) != 0) {
1566
- ExternalReference reg_addr =
1567
- ExternalReference(Debug_Address::Register(i));
1568
- movq(kScratchRegister, reg_addr);
1569
- push(Operand(kScratchRegister, 0));
1570
- }
1571
- }
1572
- }
1573
-
1574
-
1575
- void MacroAssembler::SaveRegistersToMemory(RegList regs) {
1576
- ASSERT((regs & ~kJSCallerSaved) == 0);
1577
- // Copy the content of registers to memory location.
1578
- for (int i = 0; i < kNumJSCallerSaved; i++) {
1579
- int r = JSCallerSavedCode(i);
1580
- if ((regs & (1 << r)) != 0) {
1581
- Register reg = { r };
1582
- ExternalReference reg_addr =
1583
- ExternalReference(Debug_Address::Register(i));
1584
- movq(kScratchRegister, reg_addr);
1585
- movq(Operand(kScratchRegister, 0), reg);
1586
- }
1587
- }
1588
- }
1589
-
1590
-
1591
- void MacroAssembler::RestoreRegistersFromMemory(RegList regs) {
1592
- ASSERT((regs & ~kJSCallerSaved) == 0);
1593
- // Copy the content of memory location to registers.
1594
- for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1595
- int r = JSCallerSavedCode(i);
1596
- if ((regs & (1 << r)) != 0) {
1597
- Register reg = { r };
1598
- ExternalReference reg_addr =
1599
- ExternalReference(Debug_Address::Register(i));
1600
- movq(kScratchRegister, reg_addr);
1601
- movq(reg, Operand(kScratchRegister, 0));
1602
- }
1603
- }
1604
- }
1605
-
1606
-
1607
- void MacroAssembler::PopRegistersToMemory(RegList regs) {
1608
- ASSERT((regs & ~kJSCallerSaved) == 0);
1609
- // Pop the content from the stack to the memory location.
1610
- for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1611
- int r = JSCallerSavedCode(i);
1612
- if ((regs & (1 << r)) != 0) {
1613
- ExternalReference reg_addr =
1614
- ExternalReference(Debug_Address::Register(i));
1615
- movq(kScratchRegister, reg_addr);
1616
- pop(Operand(kScratchRegister, 0));
1617
- }
1618
- }
1619
- }
1620
-
1621
-
1622
- void MacroAssembler::CopyRegistersFromStackToMemory(Register base,
1623
- Register scratch,
1624
- RegList regs) {
1625
- ASSERT(!scratch.is(kScratchRegister));
1626
- ASSERT(!base.is(kScratchRegister));
1627
- ASSERT(!base.is(scratch));
1628
- ASSERT((regs & ~kJSCallerSaved) == 0);
1629
- // Copy the content of the stack to the memory location and adjust base.
1630
- for (int i = kNumJSCallerSaved - 1; i >= 0; i--) {
1631
- int r = JSCallerSavedCode(i);
1632
- if ((regs & (1 << r)) != 0) {
1633
- movq(scratch, Operand(base, 0));
1634
- ExternalReference reg_addr =
1635
- ExternalReference(Debug_Address::Register(i));
1636
- movq(kScratchRegister, reg_addr);
1637
- movq(Operand(kScratchRegister, 0), scratch);
1638
- lea(base, Operand(base, kPointerSize));
1639
- }
1640
- }
1641
- }
1642
-
1643
- #endif // ENABLE_DEBUGGER_SUPPORT
1644
-
1645
-
1646
- void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) {
1647
- bool resolved;
1648
- Handle<Code> code = ResolveBuiltin(id, &resolved);
1649
-
1650
- // Calls are not allowed in some stubs.
1651
- ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
1652
-
1653
- // Rely on the assertion to check that the number of provided
1654
- // arguments match the expected number of arguments. Fake a
1655
- // parameter count to avoid emitting code to do the check.
1656
- ParameterCount expected(0);
1657
- InvokeCode(Handle<Code>(code),
1658
- expected,
1659
- expected,
1660
- RelocInfo::CODE_TARGET,
1661
- flag);
1662
-
1663
- const char* name = Builtins::GetName(id);
1664
- int argc = Builtins::GetArgumentsCount(id);
1665
- // The target address for the jump is stored as an immediate at offset
1666
- // kInvokeCodeAddressOffset.
1667
- if (!resolved) {
1668
- uint32_t flags =
1669
- Bootstrapper::FixupFlagsArgumentsCount::encode(argc) |
1670
- Bootstrapper::FixupFlagsUseCodeObject::encode(false);
1671
- Unresolved entry =
1672
- { pc_offset() - kCallTargetAddressOffset, flags, name };
1673
- unresolved_.Add(entry);
1674
- }
1675
- }
1676
-
1677
-
1678
- void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1679
- const ParameterCount& actual,
1680
- Handle<Code> code_constant,
1681
- Register code_register,
1682
- Label* done,
1683
- InvokeFlag flag) {
1684
- bool definitely_matches = false;
1685
- Label invoke;
1686
- if (expected.is_immediate()) {
1687
- ASSERT(actual.is_immediate());
1688
- if (expected.immediate() == actual.immediate()) {
1689
- definitely_matches = true;
1690
- } else {
1691
- movq(rax, Immediate(actual.immediate()));
1692
- if (expected.immediate() ==
1693
- SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
1694
- // Don't worry about adapting arguments for built-ins that
1695
- // don't want that done. Skip adaption code by making it look
1696
- // like we have a match between expected and actual number of
1697
- // arguments.
1698
- definitely_matches = true;
1699
- } else {
1700
- movq(rbx, Immediate(expected.immediate()));
1701
- }
1702
- }
1703
- } else {
1704
- if (actual.is_immediate()) {
1705
- // Expected is in register, actual is immediate. This is the
1706
- // case when we invoke function values without going through the
1707
- // IC mechanism.
1708
- cmpq(expected.reg(), Immediate(actual.immediate()));
1709
- j(equal, &invoke);
1710
- ASSERT(expected.reg().is(rbx));
1711
- movq(rax, Immediate(actual.immediate()));
1712
- } else if (!expected.reg().is(actual.reg())) {
1713
- // Both expected and actual are in (different) registers. This
1714
- // is the case when we invoke functions using call and apply.
1715
- cmpq(expected.reg(), actual.reg());
1716
- j(equal, &invoke);
1717
- ASSERT(actual.reg().is(rax));
1718
- ASSERT(expected.reg().is(rbx));
1719
- }
1720
- }
1721
-
1722
- if (!definitely_matches) {
1723
- Handle<Code> adaptor =
1724
- Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
1725
- if (!code_constant.is_null()) {
1726
- movq(rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
1727
- addq(rdx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1728
- } else if (!code_register.is(rdx)) {
1729
- movq(rdx, code_register);
1730
- }
1731
-
1732
- if (flag == CALL_FUNCTION) {
1733
- Call(adaptor, RelocInfo::CODE_TARGET);
1734
- jmp(done);
1735
- } else {
1736
- Jump(adaptor, RelocInfo::CODE_TARGET);
1737
- }
1738
- bind(&invoke);
1739
- }
1740
- }
1741
-
1742
-
1743
- void MacroAssembler::InvokeCode(Register code,
1744
- const ParameterCount& expected,
1745
- const ParameterCount& actual,
1746
- InvokeFlag flag) {
1747
- Label done;
1748
- InvokePrologue(expected, actual, Handle<Code>::null(), code, &done, flag);
1749
- if (flag == CALL_FUNCTION) {
1750
- call(code);
1751
- } else {
1752
- ASSERT(flag == JUMP_FUNCTION);
1753
- jmp(code);
1754
- }
1755
- bind(&done);
1756
- }
1757
-
1758
-
1759
- void MacroAssembler::InvokeCode(Handle<Code> code,
1760
- const ParameterCount& expected,
1761
- const ParameterCount& actual,
1762
- RelocInfo::Mode rmode,
1763
- InvokeFlag flag) {
1764
- Label done;
1765
- Register dummy = rax;
1766
- InvokePrologue(expected, actual, code, dummy, &done, flag);
1767
- if (flag == CALL_FUNCTION) {
1768
- Call(code, rmode);
1769
- } else {
1770
- ASSERT(flag == JUMP_FUNCTION);
1771
- Jump(code, rmode);
1772
- }
1773
- bind(&done);
1774
- }
1775
-
1776
-
1777
- void MacroAssembler::InvokeFunction(Register function,
1778
- const ParameterCount& actual,
1779
- InvokeFlag flag) {
1780
- ASSERT(function.is(rdi));
1781
- movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1782
- movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
1783
- movsxlq(rbx,
1784
- FieldOperand(rdx, SharedFunctionInfo::kFormalParameterCountOffset));
1785
- movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset));
1786
- // Advances rdx to the end of the Code object header, to the start of
1787
- // the executable code.
1788
- lea(rdx, FieldOperand(rdx, Code::kHeaderSize));
1789
-
1790
- ParameterCount expected(rbx);
1791
- InvokeCode(rdx, expected, actual, flag);
1792
- }
1793
-
1794
-
1795
- void MacroAssembler::EnterFrame(StackFrame::Type type) {
1796
- push(rbp);
1797
- movq(rbp, rsp);
1798
- push(rsi); // Context.
1799
- Push(Smi::FromInt(type));
1800
- movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1801
- push(kScratchRegister);
1802
- if (FLAG_debug_code) {
1803
- movq(kScratchRegister,
1804
- Factory::undefined_value(),
1805
- RelocInfo::EMBEDDED_OBJECT);
1806
- cmpq(Operand(rsp, 0), kScratchRegister);
1807
- Check(not_equal, "code object not properly patched");
1808
- }
1809
- }
1810
-
1811
-
1812
- void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1813
- if (FLAG_debug_code) {
1814
- Move(kScratchRegister, Smi::FromInt(type));
1815
- cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1816
- Check(equal, "stack frame types must match");
1817
- }
1818
- movq(rsp, rbp);
1819
- pop(rbp);
1820
- }
1821
-
1822
-
1823
- void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) {
1824
- // Setup the frame structure on the stack.
1825
- // All constants are relative to the frame pointer of the exit frame.
1826
- ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
1827
- ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
1828
- ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
1829
- push(rbp);
1830
- movq(rbp, rsp);
1831
-
1832
- // Reserve room for entry stack pointer and push the debug marker.
1833
- ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
1834
- push(Immediate(0)); // saved entry sp, patched before call
1835
- if (mode == ExitFrame::MODE_DEBUG) {
1836
- push(Immediate(0));
1837
- } else {
1838
- movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1839
- push(kScratchRegister);
1840
- }
1841
-
1842
- // Save the frame pointer and the context in top.
1843
- ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1844
- ExternalReference context_address(Top::k_context_address);
1845
- movq(r14, rax); // Backup rax before we use it.
1846
-
1847
- movq(rax, rbp);
1848
- store_rax(c_entry_fp_address);
1849
- movq(rax, rsi);
1850
- store_rax(context_address);
1851
-
1852
- // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
1853
- // so it must be retained across the C-call.
1854
- int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1855
- lea(r15, Operand(rbp, r14, times_pointer_size, offset));
1856
-
1857
- #ifdef ENABLE_DEBUGGER_SUPPORT
1858
- // Save the state of all registers to the stack from the memory
1859
- // location. This is needed to allow nested break points.
1860
- if (mode == ExitFrame::MODE_DEBUG) {
1861
- // TODO(1243899): This should be symmetric to
1862
- // CopyRegistersFromStackToMemory() but it isn't! esp is assumed
1863
- // correct here, but computed for the other call. Very error
1864
- // prone! FIX THIS. Actually there are deeper problems with
1865
- // register saving than this asymmetry (see the bug report
1866
- // associated with this issue).
1867
- PushRegistersFromMemory(kJSCallerSaved);
1868
- }
1869
- #endif
1870
-
1871
- #ifdef _WIN64
1872
- // Reserve space on stack for result and argument structures, if necessary.
1873
- int result_stack_space = (result_size < 2) ? 0 : result_size * kPointerSize;
1874
- // Reserve space for the Arguments object. The Windows 64-bit ABI
1875
- // requires us to pass this structure as a pointer to its location on
1876
- // the stack. The structure contains 2 values.
1877
- int argument_stack_space = 2 * kPointerSize;
1878
- // We also need backing space for 4 parameters, even though
1879
- // we only pass one or two parameter, and it is in a register.
1880
- int argument_mirror_space = 4 * kPointerSize;
1881
- int total_stack_space =
1882
- argument_mirror_space + argument_stack_space + result_stack_space;
1883
- subq(rsp, Immediate(total_stack_space));
1884
- #endif
1885
-
1886
- // Get the required frame alignment for the OS.
1887
- static const int kFrameAlignment = OS::ActivationFrameAlignment();
1888
- if (kFrameAlignment > 0) {
1889
- ASSERT(IsPowerOf2(kFrameAlignment));
1890
- movq(kScratchRegister, Immediate(-kFrameAlignment));
1891
- and_(rsp, kScratchRegister);
1892
- }
1893
-
1894
- // Patch the saved entry sp.
1895
- movq(Operand(rbp, ExitFrameConstants::kSPOffset), rsp);
1896
- }
1897
-
1898
-
1899
- void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) {
1900
- // Registers:
1901
- // r15 : argv
1902
- #ifdef ENABLE_DEBUGGER_SUPPORT
1903
- // Restore the memory copy of the registers by digging them out from
1904
- // the stack. This is needed to allow nested break points.
1905
- if (mode == ExitFrame::MODE_DEBUG) {
1906
- // It's okay to clobber register rbx below because we don't need
1907
- // the function pointer after this.
1908
- const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize;
1909
- int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize;
1910
- lea(rbx, Operand(rbp, kOffset));
1911
- CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved);
1912
- }
1913
- #endif
1914
-
1915
- // Get the return address from the stack and restore the frame pointer.
1916
- movq(rcx, Operand(rbp, 1 * kPointerSize));
1917
- movq(rbp, Operand(rbp, 0 * kPointerSize));
1918
-
1919
- // Pop everything up to and including the arguments and the receiver
1920
- // from the caller stack.
1921
- lea(rsp, Operand(r15, 1 * kPointerSize));
1922
-
1923
- // Restore current context from top and clear it in debug mode.
1924
- ExternalReference context_address(Top::k_context_address);
1925
- movq(kScratchRegister, context_address);
1926
- movq(rsi, Operand(kScratchRegister, 0));
1927
- #ifdef DEBUG
1928
- movq(Operand(kScratchRegister, 0), Immediate(0));
1929
- #endif
1930
-
1931
- // Push the return address to get ready to return.
1932
- push(rcx);
1933
-
1934
- // Clear the top frame.
1935
- ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1936
- movq(kScratchRegister, c_entry_fp_address);
1937
- movq(Operand(kScratchRegister, 0), Immediate(0));
1938
- }
1939
-
1940
-
1941
- Register MacroAssembler::CheckMaps(JSObject* object,
1942
- Register object_reg,
1943
- JSObject* holder,
1944
- Register holder_reg,
1945
- Register scratch,
1946
- Label* miss) {
1947
- // Make sure there's no overlap between scratch and the other
1948
- // registers.
1949
- ASSERT(!scratch.is(object_reg) && !scratch.is(holder_reg));
1950
-
1951
- // Keep track of the current object in register reg. On the first
1952
- // iteration, reg is an alias for object_reg, on later iterations,
1953
- // it is an alias for holder_reg.
1954
- Register reg = object_reg;
1955
- int depth = 1;
1956
-
1957
- // Check the maps in the prototype chain.
1958
- // Traverse the prototype chain from the object and do map checks.
1959
- while (object != holder) {
1960
- depth++;
1961
-
1962
- // Only global objects and objects that do not require access
1963
- // checks are allowed in stubs.
1964
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
1965
-
1966
- JSObject* prototype = JSObject::cast(object->GetPrototype());
1967
- if (Heap::InNewSpace(prototype)) {
1968
- // Get the map of the current object.
1969
- movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1970
- Cmp(scratch, Handle<Map>(object->map()));
1971
- // Branch on the result of the map check.
1972
- j(not_equal, miss);
1973
- // Check access rights to the global object. This has to happen
1974
- // after the map check so that we know that the object is
1975
- // actually a global object.
1976
- if (object->IsJSGlobalProxy()) {
1977
- CheckAccessGlobalProxy(reg, scratch, miss);
1978
-
1979
- // Restore scratch register to be the map of the object.
1980
- // We load the prototype from the map in the scratch register.
1981
- movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1982
- }
1983
- // The prototype is in new space; we cannot store a reference
1984
- // to it in the code. Load it from the map.
1985
- reg = holder_reg; // from now the object is in holder_reg
1986
- movq(reg, FieldOperand(scratch, Map::kPrototypeOffset));
1987
-
1988
- } else {
1989
- // Check the map of the current object.
1990
- Cmp(FieldOperand(reg, HeapObject::kMapOffset),
1991
- Handle<Map>(object->map()));
1992
- // Branch on the result of the map check.
1993
- j(not_equal, miss);
1994
- // Check access rights to the global object. This has to happen
1995
- // after the map check so that we know that the object is
1996
- // actually a global object.
1997
- if (object->IsJSGlobalProxy()) {
1998
- CheckAccessGlobalProxy(reg, scratch, miss);
1999
- }
2000
- // The prototype is in old space; load it directly.
2001
- reg = holder_reg; // from now the object is in holder_reg
2002
- Move(reg, Handle<JSObject>(prototype));
2003
- }
2004
-
2005
- // Go to the next object in the prototype chain.
2006
- object = prototype;
2007
- }
2008
-
2009
- // Check the holder map.
2010
- Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
2011
- j(not_equal, miss);
2012
-
2013
- // Log the check depth.
2014
- LOG(IntEvent("check-maps-depth", depth));
2015
-
2016
- // Perform security check for access to the global object and return
2017
- // the holder register.
2018
- ASSERT(object == holder);
2019
- ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2020
- if (object->IsJSGlobalProxy()) {
2021
- CheckAccessGlobalProxy(reg, scratch, miss);
2022
- }
2023
- return reg;
2024
- }
2025
-
2026
-
2027
- void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2028
- Register scratch,
2029
- Label* miss) {
2030
- Label same_contexts;
2031
-
2032
- ASSERT(!holder_reg.is(scratch));
2033
- ASSERT(!scratch.is(kScratchRegister));
2034
- // Load current lexical context from the stack frame.
2035
- movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2036
-
2037
- // When generating debug code, make sure the lexical context is set.
2038
- if (FLAG_debug_code) {
2039
- cmpq(scratch, Immediate(0));
2040
- Check(not_equal, "we should not have an empty lexical context");
2041
- }
2042
- // Load the global context of the current context.
2043
- int offset = Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
2044
- movq(scratch, FieldOperand(scratch, offset));
2045
- movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2046
-
2047
- // Check the context is a global context.
2048
- if (FLAG_debug_code) {
2049
- Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2050
- Factory::global_context_map());
2051
- Check(equal, "JSGlobalObject::global_context should be a global context.");
2052
- }
2053
-
2054
- // Check if both contexts are the same.
2055
- cmpq(scratch, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2056
- j(equal, &same_contexts);
2057
-
2058
- // Compare security tokens.
2059
- // Check that the security token in the calling global object is
2060
- // compatible with the security token in the receiving global
2061
- // object.
2062
-
2063
- // Check the context is a global context.
2064
- if (FLAG_debug_code) {
2065
- // Preserve original value of holder_reg.
2066
- push(holder_reg);
2067
- movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2068
- CompareRoot(holder_reg, Heap::kNullValueRootIndex);
2069
- Check(not_equal, "JSGlobalProxy::context() should not be null.");
2070
-
2071
- // Read the first word and compare to global_context_map(),
2072
- movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
2073
- CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
2074
- Check(equal, "JSGlobalObject::global_context should be a global context.");
2075
- pop(holder_reg);
2076
- }
2077
-
2078
- movq(kScratchRegister,
2079
- FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
2080
- int token_offset =
2081
- Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX * kPointerSize;
2082
- movq(scratch, FieldOperand(scratch, token_offset));
2083
- cmpq(scratch, FieldOperand(kScratchRegister, token_offset));
2084
- j(not_equal, miss);
2085
-
2086
- bind(&same_contexts);
2087
- }
2088
-
2089
-
2090
- void MacroAssembler::LoadAllocationTopHelper(Register result,
2091
- Register result_end,
2092
- Register scratch,
2093
- AllocationFlags flags) {
2094
- ExternalReference new_space_allocation_top =
2095
- ExternalReference::new_space_allocation_top_address();
2096
-
2097
- // Just return if allocation top is already known.
2098
- if ((flags & RESULT_CONTAINS_TOP) != 0) {
2099
- // No use of scratch if allocation top is provided.
2100
- ASSERT(scratch.is(no_reg));
2101
- #ifdef DEBUG
2102
- // Assert that result actually contains top on entry.
2103
- movq(kScratchRegister, new_space_allocation_top);
2104
- cmpq(result, Operand(kScratchRegister, 0));
2105
- Check(equal, "Unexpected allocation top");
2106
- #endif
2107
- return;
2108
- }
2109
-
2110
- // Move address of new object to result. Use scratch register if available.
2111
- if (scratch.is(no_reg)) {
2112
- movq(kScratchRegister, new_space_allocation_top);
2113
- movq(result, Operand(kScratchRegister, 0));
2114
- } else {
2115
- ASSERT(!scratch.is(result_end));
2116
- movq(scratch, new_space_allocation_top);
2117
- movq(result, Operand(scratch, 0));
2118
- }
2119
- }
2120
-
2121
-
2122
- void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2123
- Register scratch) {
2124
- if (FLAG_debug_code) {
2125
- testq(result_end, Immediate(kObjectAlignmentMask));
2126
- Check(zero, "Unaligned allocation in new space");
2127
- }
2128
-
2129
- ExternalReference new_space_allocation_top =
2130
- ExternalReference::new_space_allocation_top_address();
2131
-
2132
- // Update new top.
2133
- if (result_end.is(rax)) {
2134
- // rax can be stored directly to a memory location.
2135
- store_rax(new_space_allocation_top);
2136
- } else {
2137
- // Register required - use scratch provided if available.
2138
- if (scratch.is(no_reg)) {
2139
- movq(kScratchRegister, new_space_allocation_top);
2140
- movq(Operand(kScratchRegister, 0), result_end);
2141
- } else {
2142
- movq(Operand(scratch, 0), result_end);
2143
- }
2144
- }
2145
- }
2146
-
2147
-
2148
- void MacroAssembler::AllocateInNewSpace(int object_size,
2149
- Register result,
2150
- Register result_end,
2151
- Register scratch,
2152
- Label* gc_required,
2153
- AllocationFlags flags) {
2154
- ASSERT(!result.is(result_end));
2155
-
2156
- // Load address of new object into result.
2157
- LoadAllocationTopHelper(result, result_end, scratch, flags);
2158
-
2159
- // Calculate new top and bail out if new space is exhausted.
2160
- ExternalReference new_space_allocation_limit =
2161
- ExternalReference::new_space_allocation_limit_address();
2162
- lea(result_end, Operand(result, object_size));
2163
- movq(kScratchRegister, new_space_allocation_limit);
2164
- cmpq(result_end, Operand(kScratchRegister, 0));
2165
- j(above, gc_required);
2166
-
2167
- // Update allocation top.
2168
- UpdateAllocationTopHelper(result_end, scratch);
2169
-
2170
- // Tag the result if requested.
2171
- if ((flags & TAG_OBJECT) != 0) {
2172
- addq(result, Immediate(kHeapObjectTag));
2173
- }
2174
- }
2175
-
2176
-
2177
- void MacroAssembler::AllocateInNewSpace(int header_size,
2178
- ScaleFactor element_size,
2179
- Register element_count,
2180
- Register result,
2181
- Register result_end,
2182
- Register scratch,
2183
- Label* gc_required,
2184
- AllocationFlags flags) {
2185
- ASSERT(!result.is(result_end));
2186
-
2187
- // Load address of new object into result.
2188
- LoadAllocationTopHelper(result, result_end, scratch, flags);
2189
-
2190
- // Calculate new top and bail out if new space is exhausted.
2191
- ExternalReference new_space_allocation_limit =
2192
- ExternalReference::new_space_allocation_limit_address();
2193
- lea(result_end, Operand(result, element_count, element_size, header_size));
2194
- movq(kScratchRegister, new_space_allocation_limit);
2195
- cmpq(result_end, Operand(kScratchRegister, 0));
2196
- j(above, gc_required);
2197
-
2198
- // Update allocation top.
2199
- UpdateAllocationTopHelper(result_end, scratch);
2200
-
2201
- // Tag the result if requested.
2202
- if ((flags & TAG_OBJECT) != 0) {
2203
- addq(result, Immediate(kHeapObjectTag));
2204
- }
2205
- }
2206
-
2207
-
2208
- void MacroAssembler::AllocateInNewSpace(Register object_size,
2209
- Register result,
2210
- Register result_end,
2211
- Register scratch,
2212
- Label* gc_required,
2213
- AllocationFlags flags) {
2214
- // Load address of new object into result.
2215
- LoadAllocationTopHelper(result, result_end, scratch, flags);
2216
-
2217
- // Calculate new top and bail out if new space is exhausted.
2218
- ExternalReference new_space_allocation_limit =
2219
- ExternalReference::new_space_allocation_limit_address();
2220
- if (!object_size.is(result_end)) {
2221
- movq(result_end, object_size);
2222
- }
2223
- addq(result_end, result);
2224
- movq(kScratchRegister, new_space_allocation_limit);
2225
- cmpq(result_end, Operand(kScratchRegister, 0));
2226
- j(above, gc_required);
2227
-
2228
- // Update allocation top.
2229
- UpdateAllocationTopHelper(result_end, scratch);
2230
-
2231
- // Tag the result if requested.
2232
- if ((flags & TAG_OBJECT) != 0) {
2233
- addq(result, Immediate(kHeapObjectTag));
2234
- }
2235
- }
2236
-
2237
-
2238
- void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2239
- ExternalReference new_space_allocation_top =
2240
- ExternalReference::new_space_allocation_top_address();
2241
-
2242
- // Make sure the object has no tag before resetting top.
2243
- and_(object, Immediate(~kHeapObjectTagMask));
2244
- movq(kScratchRegister, new_space_allocation_top);
2245
- #ifdef DEBUG
2246
- cmpq(object, Operand(kScratchRegister, 0));
2247
- Check(below, "Undo allocation of non allocated memory");
2248
- #endif
2249
- movq(Operand(kScratchRegister, 0), object);
2250
- }
2251
-
2252
-
2253
- void MacroAssembler::AllocateHeapNumber(Register result,
2254
- Register scratch,
2255
- Label* gc_required) {
2256
- // Allocate heap number in new space.
2257
- AllocateInNewSpace(HeapNumber::kSize,
2258
- result,
2259
- scratch,
2260
- no_reg,
2261
- gc_required,
2262
- TAG_OBJECT);
2263
-
2264
- // Set the map.
2265
- LoadRoot(kScratchRegister, Heap::kHeapNumberMapRootIndex);
2266
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2267
- }
2268
-
2269
-
2270
- void MacroAssembler::AllocateTwoByteString(Register result,
2271
- Register length,
2272
- Register scratch1,
2273
- Register scratch2,
2274
- Register scratch3,
2275
- Label* gc_required) {
2276
- // Calculate the number of bytes needed for the characters in the string while
2277
- // observing object alignment.
2278
- ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
2279
- ASSERT(kShortSize == 2);
2280
- // scratch1 = length * 2 + kObjectAlignmentMask.
2281
- lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
2282
- and_(scratch1, Immediate(~kObjectAlignmentMask));
2283
-
2284
- // Allocate two byte string in new space.
2285
- AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
2286
- times_1,
2287
- scratch1,
2288
- result,
2289
- scratch2,
2290
- scratch3,
2291
- gc_required,
2292
- TAG_OBJECT);
2293
-
2294
- // Set the map, length and hash field.
2295
- LoadRoot(kScratchRegister, Heap::kStringMapRootIndex);
2296
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2297
- movl(FieldOperand(result, String::kLengthOffset), length);
2298
- movl(FieldOperand(result, String::kHashFieldOffset),
2299
- Immediate(String::kEmptyHashField));
2300
- }
2301
-
2302
-
2303
- void MacroAssembler::AllocateAsciiString(Register result,
2304
- Register length,
2305
- Register scratch1,
2306
- Register scratch2,
2307
- Register scratch3,
2308
- Label* gc_required) {
2309
- // Calculate the number of bytes needed for the characters in the string while
2310
- // observing object alignment.
2311
- ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
2312
- movl(scratch1, length);
2313
- ASSERT(kCharSize == 1);
2314
- addq(scratch1, Immediate(kObjectAlignmentMask));
2315
- and_(scratch1, Immediate(~kObjectAlignmentMask));
2316
-
2317
- // Allocate ascii string in new space.
2318
- AllocateInNewSpace(SeqAsciiString::kHeaderSize,
2319
- times_1,
2320
- scratch1,
2321
- result,
2322
- scratch2,
2323
- scratch3,
2324
- gc_required,
2325
- TAG_OBJECT);
2326
-
2327
- // Set the map, length and hash field.
2328
- LoadRoot(kScratchRegister, Heap::kAsciiStringMapRootIndex);
2329
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2330
- movl(FieldOperand(result, String::kLengthOffset), length);
2331
- movl(FieldOperand(result, String::kHashFieldOffset),
2332
- Immediate(String::kEmptyHashField));
2333
- }
2334
-
2335
-
2336
- void MacroAssembler::AllocateConsString(Register result,
2337
- Register scratch1,
2338
- Register scratch2,
2339
- Label* gc_required) {
2340
- // Allocate heap number in new space.
2341
- AllocateInNewSpace(ConsString::kSize,
2342
- result,
2343
- scratch1,
2344
- scratch2,
2345
- gc_required,
2346
- TAG_OBJECT);
2347
-
2348
- // Set the map. The other fields are left uninitialized.
2349
- LoadRoot(kScratchRegister, Heap::kConsStringMapRootIndex);
2350
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2351
- }
2352
-
2353
-
2354
- void MacroAssembler::AllocateAsciiConsString(Register result,
2355
- Register scratch1,
2356
- Register scratch2,
2357
- Label* gc_required) {
2358
- // Allocate heap number in new space.
2359
- AllocateInNewSpace(ConsString::kSize,
2360
- result,
2361
- scratch1,
2362
- scratch2,
2363
- gc_required,
2364
- TAG_OBJECT);
2365
-
2366
- // Set the map. The other fields are left uninitialized.
2367
- LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);
2368
- movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
2369
- }
2370
-
2371
-
2372
- void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2373
- if (context_chain_length > 0) {
2374
- // Move up the chain of contexts to the context containing the slot.
2375
- movq(dst, Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX)));
2376
- // Load the function context (which is the incoming, outer context).
2377
- movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2378
- for (int i = 1; i < context_chain_length; i++) {
2379
- movq(dst, Operand(dst, Context::SlotOffset(Context::CLOSURE_INDEX)));
2380
- movq(dst, FieldOperand(dst, JSFunction::kContextOffset));
2381
- }
2382
- // The context may be an intermediate context, not a function context.
2383
- movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2384
- } else { // context is the current function context.
2385
- // The context may be an intermediate context, not a function context.
2386
- movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2387
- }
2388
- }
2389
-
2390
-
2391
- CodePatcher::CodePatcher(byte* address, int size)
2392
- : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2393
- // Create a new macro assembler pointing to the address of the code to patch.
2394
- // The size is adjusted with kGap on order for the assembler to generate size
2395
- // bytes of instructions without failing with buffer size constraints.
2396
- ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2397
- }
2398
-
2399
-
2400
- CodePatcher::~CodePatcher() {
2401
- // Indicate that code has changed.
2402
- CPU::FlushICache(address_, size_);
2403
-
2404
- // Check that the code was patched as expected.
2405
- ASSERT(masm_.pc_ == address_ + size_);
2406
- ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2407
- }
2408
-
2409
- } } // namespace v8::internal