mustang 0.0.1 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (560) hide show
  1. data/.rspec +1 -0
  2. data/Isolate +9 -0
  3. data/README.md +6 -12
  4. data/Rakefile +30 -4
  5. data/TODO.md +9 -0
  6. data/ext/v8/extconf.rb +56 -0
  7. data/ext/v8/v8.cpp +37 -0
  8. data/ext/v8/v8_array.cpp +161 -0
  9. data/ext/v8/v8_array.h +17 -0
  10. data/ext/v8/v8_base.cpp +147 -0
  11. data/ext/v8/v8_base.h +23 -0
  12. data/ext/v8/v8_cast.cpp +151 -0
  13. data/ext/v8/v8_cast.h +64 -0
  14. data/ext/v8/v8_context.cpp +174 -0
  15. data/ext/v8/v8_context.h +12 -0
  16. data/ext/v8/v8_date.cpp +61 -0
  17. data/ext/v8/v8_date.h +16 -0
  18. data/ext/v8/v8_errors.cpp +147 -0
  19. data/ext/v8/v8_errors.h +19 -0
  20. data/ext/v8/v8_external.cpp +66 -0
  21. data/ext/v8/v8_external.h +16 -0
  22. data/ext/v8/v8_function.cpp +182 -0
  23. data/ext/v8/v8_function.h +14 -0
  24. data/ext/v8/v8_integer.cpp +70 -0
  25. data/ext/v8/v8_integer.h +16 -0
  26. data/ext/v8/v8_macros.h +30 -0
  27. data/ext/v8/v8_main.cpp +53 -0
  28. data/ext/v8/v8_main.h +13 -0
  29. data/ext/v8/v8_number.cpp +62 -0
  30. data/ext/v8/v8_number.h +16 -0
  31. data/ext/v8/v8_object.cpp +172 -0
  32. data/ext/v8/v8_object.h +17 -0
  33. data/ext/v8/v8_ref.cpp +72 -0
  34. data/ext/v8/v8_ref.h +43 -0
  35. data/ext/v8/v8_regexp.cpp +148 -0
  36. data/ext/v8/v8_regexp.h +16 -0
  37. data/ext/v8/v8_string.cpp +78 -0
  38. data/ext/v8/v8_string.h +16 -0
  39. data/ext/v8/v8_value.cpp +370 -0
  40. data/ext/v8/v8_value.h +19 -0
  41. data/gemspec.yml +2 -1
  42. data/lib/core_ext/class.rb +14 -0
  43. data/lib/core_ext/object.rb +12 -0
  44. data/lib/core_ext/symbol.rb +23 -0
  45. data/lib/mustang.rb +44 -0
  46. data/lib/mustang/context.rb +69 -0
  47. data/lib/mustang/errors.rb +36 -0
  48. data/lib/support/delegated.rb +25 -0
  49. data/lib/v8/array.rb +21 -0
  50. data/lib/v8/context.rb +13 -0
  51. data/lib/v8/date.rb +20 -0
  52. data/lib/v8/error.rb +15 -0
  53. data/lib/v8/external.rb +16 -0
  54. data/lib/v8/function.rb +11 -0
  55. data/lib/v8/integer.rb +16 -0
  56. data/lib/v8/number.rb +16 -0
  57. data/lib/v8/object.rb +66 -0
  58. data/lib/v8/regexp.rb +23 -0
  59. data/lib/v8/string.rb +27 -0
  60. data/mustang.gemspec +3 -0
  61. data/spec/core_ext/class_spec.rb +19 -0
  62. data/spec/core_ext/object_spec.rb +19 -0
  63. data/spec/core_ext/symbol_spec.rb +27 -0
  64. data/spec/fixtures/test1.js +2 -0
  65. data/spec/fixtures/test2.js +2 -0
  66. data/spec/spec_helper.rb +20 -0
  67. data/spec/v8/array_spec.rb +88 -0
  68. data/spec/v8/cast_spec.rb +151 -0
  69. data/spec/v8/context_spec.rb +78 -0
  70. data/spec/v8/data_spec.rb +39 -0
  71. data/spec/v8/date_spec.rb +45 -0
  72. data/spec/v8/empty_spec.rb +27 -0
  73. data/spec/v8/errors_spec.rb +142 -0
  74. data/spec/v8/external_spec.rb +44 -0
  75. data/spec/v8/function_spec.rb +170 -0
  76. data/spec/v8/integer_spec.rb +41 -0
  77. data/spec/v8/main_spec.rb +18 -0
  78. data/spec/v8/null_spec.rb +27 -0
  79. data/spec/v8/number_spec.rb +40 -0
  80. data/spec/v8/object_spec.rb +79 -0
  81. data/spec/v8/primitive_spec.rb +9 -0
  82. data/spec/v8/regexp_spec.rb +65 -0
  83. data/spec/v8/string_spec.rb +48 -0
  84. data/spec/v8/undefined_spec.rb +27 -0
  85. data/spec/v8/value_spec.rb +215 -0
  86. data/vendor/v8/.gitignore +2 -0
  87. data/vendor/v8/AUTHORS +3 -1
  88. data/vendor/v8/ChangeLog +117 -0
  89. data/vendor/v8/SConstruct +334 -53
  90. data/vendor/v8/include/v8-debug.h +21 -11
  91. data/vendor/v8/include/v8-preparser.h +1 -1
  92. data/vendor/v8/include/v8-profiler.h +122 -43
  93. data/vendor/v8/include/v8-testing.h +5 -0
  94. data/vendor/v8/include/v8.h +171 -17
  95. data/vendor/v8/preparser/SConscript +38 -0
  96. data/vendor/v8/preparser/preparser-process.cc +77 -114
  97. data/vendor/v8/samples/shell.cc +232 -46
  98. data/vendor/v8/src/SConscript +29 -5
  99. data/vendor/v8/src/accessors.cc +70 -211
  100. data/vendor/v8/{test/cctest/test-mips.cc → src/allocation-inl.h} +15 -18
  101. data/vendor/v8/src/allocation.cc +0 -82
  102. data/vendor/v8/src/allocation.h +9 -42
  103. data/vendor/v8/src/api.cc +1645 -1156
  104. data/vendor/v8/src/api.h +76 -12
  105. data/vendor/v8/src/apiutils.h +0 -7
  106. data/vendor/v8/src/arguments.h +15 -4
  107. data/vendor/v8/src/arm/assembler-arm-inl.h +10 -9
  108. data/vendor/v8/src/arm/assembler-arm.cc +62 -23
  109. data/vendor/v8/src/arm/assembler-arm.h +76 -11
  110. data/vendor/v8/src/arm/builtins-arm.cc +39 -33
  111. data/vendor/v8/src/arm/code-stubs-arm.cc +1182 -402
  112. data/vendor/v8/src/arm/code-stubs-arm.h +20 -54
  113. data/vendor/v8/src/arm/codegen-arm.cc +159 -106
  114. data/vendor/v8/src/arm/codegen-arm.h +6 -6
  115. data/vendor/v8/src/arm/constants-arm.h +16 -1
  116. data/vendor/v8/src/arm/cpu-arm.cc +7 -5
  117. data/vendor/v8/src/arm/debug-arm.cc +6 -4
  118. data/vendor/v8/src/arm/deoptimizer-arm.cc +51 -14
  119. data/vendor/v8/src/arm/disasm-arm.cc +47 -15
  120. data/vendor/v8/src/arm/frames-arm.h +1 -1
  121. data/vendor/v8/src/arm/full-codegen-arm.cc +724 -408
  122. data/vendor/v8/src/arm/ic-arm.cc +90 -85
  123. data/vendor/v8/src/arm/lithium-arm.cc +140 -69
  124. data/vendor/v8/src/arm/lithium-arm.h +161 -46
  125. data/vendor/v8/src/arm/lithium-codegen-arm.cc +567 -297
  126. data/vendor/v8/src/arm/lithium-codegen-arm.h +21 -9
  127. data/vendor/v8/src/arm/lithium-gap-resolver-arm.cc +2 -0
  128. data/vendor/v8/src/arm/macro-assembler-arm.cc +457 -96
  129. data/vendor/v8/src/arm/macro-assembler-arm.h +115 -18
  130. data/vendor/v8/src/arm/regexp-macro-assembler-arm.cc +20 -13
  131. data/vendor/v8/src/arm/regexp-macro-assembler-arm.h +1 -0
  132. data/vendor/v8/src/arm/simulator-arm.cc +184 -101
  133. data/vendor/v8/src/arm/simulator-arm.h +26 -21
  134. data/vendor/v8/src/arm/stub-cache-arm.cc +450 -467
  135. data/vendor/v8/src/arm/virtual-frame-arm.cc +14 -12
  136. data/vendor/v8/src/arm/virtual-frame-arm.h +11 -8
  137. data/vendor/v8/src/array.js +35 -18
  138. data/vendor/v8/src/assembler.cc +186 -92
  139. data/vendor/v8/src/assembler.h +106 -69
  140. data/vendor/v8/src/ast-inl.h +5 -0
  141. data/vendor/v8/src/ast.cc +46 -35
  142. data/vendor/v8/src/ast.h +107 -50
  143. data/vendor/v8/src/atomicops.h +2 -0
  144. data/vendor/v8/src/atomicops_internals_mips_gcc.h +169 -0
  145. data/vendor/v8/src/bootstrapper.cc +649 -399
  146. data/vendor/v8/src/bootstrapper.h +94 -27
  147. data/vendor/v8/src/builtins.cc +359 -227
  148. data/vendor/v8/src/builtins.h +157 -123
  149. data/vendor/v8/src/checks.cc +2 -2
  150. data/vendor/v8/src/checks.h +4 -0
  151. data/vendor/v8/src/code-stubs.cc +27 -17
  152. data/vendor/v8/src/code-stubs.h +38 -17
  153. data/vendor/v8/src/codegen-inl.h +5 -1
  154. data/vendor/v8/src/codegen.cc +27 -17
  155. data/vendor/v8/src/codegen.h +9 -9
  156. data/vendor/v8/src/compilation-cache.cc +92 -206
  157. data/vendor/v8/src/compilation-cache.h +205 -30
  158. data/vendor/v8/src/compiler.cc +107 -120
  159. data/vendor/v8/src/compiler.h +17 -2
  160. data/vendor/v8/src/contexts.cc +22 -15
  161. data/vendor/v8/src/contexts.h +14 -8
  162. data/vendor/v8/src/conversions.cc +86 -30
  163. data/vendor/v8/src/counters.cc +19 -4
  164. data/vendor/v8/src/counters.h +28 -16
  165. data/vendor/v8/src/cpu-profiler-inl.h +4 -3
  166. data/vendor/v8/src/cpu-profiler.cc +123 -72
  167. data/vendor/v8/src/cpu-profiler.h +33 -19
  168. data/vendor/v8/src/cpu.h +2 -0
  169. data/vendor/v8/src/d8-debug.cc +3 -3
  170. data/vendor/v8/src/d8-debug.h +7 -6
  171. data/vendor/v8/src/d8-posix.cc +2 -0
  172. data/vendor/v8/src/d8.cc +22 -12
  173. data/vendor/v8/src/d8.gyp +3 -0
  174. data/vendor/v8/src/d8.js +618 -0
  175. data/vendor/v8/src/data-flow.h +3 -3
  176. data/vendor/v8/src/dateparser.h +4 -2
  177. data/vendor/v8/src/debug-agent.cc +10 -9
  178. data/vendor/v8/src/debug-agent.h +9 -11
  179. data/vendor/v8/src/debug-debugger.js +121 -0
  180. data/vendor/v8/src/debug.cc +331 -227
  181. data/vendor/v8/src/debug.h +248 -219
  182. data/vendor/v8/src/deoptimizer.cc +173 -62
  183. data/vendor/v8/src/deoptimizer.h +119 -19
  184. data/vendor/v8/src/disasm.h +3 -0
  185. data/vendor/v8/src/disassembler.cc +10 -9
  186. data/vendor/v8/src/execution.cc +185 -129
  187. data/vendor/v8/src/execution.h +47 -78
  188. data/vendor/v8/src/extensions/experimental/break-iterator.cc +250 -0
  189. data/vendor/v8/src/extensions/experimental/break-iterator.h +89 -0
  190. data/vendor/v8/src/extensions/experimental/experimental.gyp +2 -0
  191. data/vendor/v8/src/extensions/experimental/i18n-extension.cc +22 -2
  192. data/vendor/v8/src/extensions/externalize-string-extension.cc +2 -2
  193. data/vendor/v8/src/extensions/gc-extension.cc +1 -1
  194. data/vendor/v8/src/factory.cc +261 -154
  195. data/vendor/v8/src/factory.h +162 -158
  196. data/vendor/v8/src/flag-definitions.h +17 -11
  197. data/vendor/v8/src/frame-element.cc +0 -5
  198. data/vendor/v8/src/frame-element.h +9 -13
  199. data/vendor/v8/src/frames-inl.h +7 -0
  200. data/vendor/v8/src/frames.cc +56 -46
  201. data/vendor/v8/src/frames.h +36 -25
  202. data/vendor/v8/src/full-codegen.cc +15 -24
  203. data/vendor/v8/src/full-codegen.h +13 -41
  204. data/vendor/v8/src/func-name-inferrer.cc +7 -6
  205. data/vendor/v8/src/func-name-inferrer.h +1 -1
  206. data/vendor/v8/src/gdb-jit.cc +1 -0
  207. data/vendor/v8/src/global-handles.cc +118 -56
  208. data/vendor/v8/src/global-handles.h +98 -40
  209. data/vendor/v8/src/globals.h +2 -2
  210. data/vendor/v8/src/handles-inl.h +106 -9
  211. data/vendor/v8/src/handles.cc +220 -157
  212. data/vendor/v8/src/handles.h +38 -59
  213. data/vendor/v8/src/hashmap.h +3 -3
  214. data/vendor/v8/src/heap-inl.h +141 -25
  215. data/vendor/v8/src/heap-profiler.cc +117 -63
  216. data/vendor/v8/src/heap-profiler.h +38 -21
  217. data/vendor/v8/src/heap.cc +805 -564
  218. data/vendor/v8/src/heap.h +640 -594
  219. data/vendor/v8/src/hydrogen-instructions.cc +216 -73
  220. data/vendor/v8/src/hydrogen-instructions.h +259 -124
  221. data/vendor/v8/src/hydrogen.cc +996 -1171
  222. data/vendor/v8/src/hydrogen.h +163 -144
  223. data/vendor/v8/src/ia32/assembler-ia32-inl.h +12 -11
  224. data/vendor/v8/src/ia32/assembler-ia32.cc +85 -39
  225. data/vendor/v8/src/ia32/assembler-ia32.h +82 -16
  226. data/vendor/v8/src/ia32/builtins-ia32.cc +64 -58
  227. data/vendor/v8/src/ia32/code-stubs-ia32.cc +248 -324
  228. data/vendor/v8/src/ia32/code-stubs-ia32.h +3 -44
  229. data/vendor/v8/src/ia32/codegen-ia32.cc +217 -165
  230. data/vendor/v8/src/ia32/codegen-ia32.h +3 -0
  231. data/vendor/v8/src/ia32/cpu-ia32.cc +6 -5
  232. data/vendor/v8/src/ia32/debug-ia32.cc +8 -5
  233. data/vendor/v8/src/ia32/deoptimizer-ia32.cc +124 -14
  234. data/vendor/v8/src/ia32/disasm-ia32.cc +85 -62
  235. data/vendor/v8/src/ia32/frames-ia32.h +1 -1
  236. data/vendor/v8/src/ia32/full-codegen-ia32.cc +348 -435
  237. data/vendor/v8/src/ia32/ic-ia32.cc +91 -91
  238. data/vendor/v8/src/ia32/lithium-codegen-ia32.cc +500 -255
  239. data/vendor/v8/src/ia32/lithium-codegen-ia32.h +13 -4
  240. data/vendor/v8/src/ia32/lithium-gap-resolver-ia32.cc +6 -0
  241. data/vendor/v8/src/ia32/lithium-ia32.cc +122 -45
  242. data/vendor/v8/src/ia32/lithium-ia32.h +128 -41
  243. data/vendor/v8/src/ia32/macro-assembler-ia32.cc +109 -84
  244. data/vendor/v8/src/ia32/macro-assembler-ia32.h +18 -9
  245. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.cc +26 -15
  246. data/vendor/v8/src/ia32/regexp-macro-assembler-ia32.h +1 -0
  247. data/vendor/v8/src/ia32/register-allocator-ia32.cc +30 -30
  248. data/vendor/v8/src/ia32/simulator-ia32.h +4 -4
  249. data/vendor/v8/src/ia32/stub-cache-ia32.cc +383 -400
  250. data/vendor/v8/src/ia32/virtual-frame-ia32.cc +36 -13
  251. data/vendor/v8/src/ia32/virtual-frame-ia32.h +11 -5
  252. data/vendor/v8/src/ic-inl.h +12 -2
  253. data/vendor/v8/src/ic.cc +304 -221
  254. data/vendor/v8/src/ic.h +115 -58
  255. data/vendor/v8/src/interpreter-irregexp.cc +25 -21
  256. data/vendor/v8/src/interpreter-irregexp.h +2 -1
  257. data/vendor/v8/src/isolate.cc +883 -0
  258. data/vendor/v8/src/isolate.h +1304 -0
  259. data/vendor/v8/src/json.js +10 -10
  260. data/vendor/v8/src/jsregexp.cc +111 -80
  261. data/vendor/v8/src/jsregexp.h +6 -7
  262. data/vendor/v8/src/jump-target-heavy.cc +5 -8
  263. data/vendor/v8/src/jump-target-heavy.h +0 -6
  264. data/vendor/v8/src/jump-target-inl.h +1 -1
  265. data/vendor/v8/src/jump-target-light.cc +3 -3
  266. data/vendor/v8/src/lithium-allocator-inl.h +2 -0
  267. data/vendor/v8/src/lithium-allocator.cc +42 -30
  268. data/vendor/v8/src/lithium-allocator.h +8 -22
  269. data/vendor/v8/src/lithium.cc +1 -0
  270. data/vendor/v8/src/liveedit.cc +141 -99
  271. data/vendor/v8/src/liveedit.h +7 -2
  272. data/vendor/v8/src/liveobjectlist-inl.h +90 -0
  273. data/vendor/v8/src/liveobjectlist.cc +2537 -1
  274. data/vendor/v8/src/liveobjectlist.h +245 -35
  275. data/vendor/v8/src/log-utils.cc +122 -35
  276. data/vendor/v8/src/log-utils.h +33 -36
  277. data/vendor/v8/src/log.cc +299 -241
  278. data/vendor/v8/src/log.h +177 -110
  279. data/vendor/v8/src/mark-compact.cc +612 -470
  280. data/vendor/v8/src/mark-compact.h +153 -80
  281. data/vendor/v8/src/messages.cc +16 -14
  282. data/vendor/v8/src/messages.js +30 -7
  283. data/vendor/v8/src/mips/assembler-mips-inl.h +155 -35
  284. data/vendor/v8/src/mips/assembler-mips.cc +1093 -219
  285. data/vendor/v8/src/mips/assembler-mips.h +552 -153
  286. data/vendor/v8/src/mips/builtins-mips.cc +43 -100
  287. data/vendor/v8/src/mips/code-stubs-mips.cc +752 -0
  288. data/vendor/v8/src/mips/code-stubs-mips.h +511 -0
  289. data/vendor/v8/src/mips/codegen-mips-inl.h +8 -14
  290. data/vendor/v8/src/mips/codegen-mips.cc +672 -896
  291. data/vendor/v8/src/mips/codegen-mips.h +271 -69
  292. data/vendor/v8/src/mips/constants-mips.cc +44 -20
  293. data/vendor/v8/src/mips/constants-mips.h +238 -40
  294. data/vendor/v8/src/mips/cpu-mips.cc +20 -3
  295. data/vendor/v8/src/mips/debug-mips.cc +35 -7
  296. data/vendor/v8/src/mips/deoptimizer-mips.cc +91 -0
  297. data/vendor/v8/src/mips/disasm-mips.cc +329 -93
  298. data/vendor/v8/src/mips/frames-mips.cc +2 -50
  299. data/vendor/v8/src/mips/frames-mips.h +24 -9
  300. data/vendor/v8/src/mips/full-codegen-mips.cc +473 -23
  301. data/vendor/v8/src/mips/ic-mips.cc +81 -45
  302. data/vendor/v8/src/mips/jump-target-mips.cc +11 -106
  303. data/vendor/v8/src/mips/lithium-codegen-mips.h +65 -0
  304. data/vendor/v8/src/mips/lithium-mips.h +304 -0
  305. data/vendor/v8/src/mips/macro-assembler-mips.cc +2391 -390
  306. data/vendor/v8/src/mips/macro-assembler-mips.h +718 -121
  307. data/vendor/v8/src/mips/regexp-macro-assembler-mips.cc +478 -0
  308. data/vendor/v8/src/mips/regexp-macro-assembler-mips.h +250 -0
  309. data/vendor/v8/src/mips/register-allocator-mips-inl.h +0 -3
  310. data/vendor/v8/src/mips/register-allocator-mips.h +3 -2
  311. data/vendor/v8/src/mips/simulator-mips.cc +1009 -221
  312. data/vendor/v8/src/mips/simulator-mips.h +119 -36
  313. data/vendor/v8/src/mips/stub-cache-mips.cc +331 -148
  314. data/vendor/v8/src/mips/{fast-codegen-mips.cc → virtual-frame-mips-inl.h} +11 -30
  315. data/vendor/v8/src/mips/virtual-frame-mips.cc +137 -149
  316. data/vendor/v8/src/mips/virtual-frame-mips.h +294 -312
  317. data/vendor/v8/src/mirror-debugger.js +9 -8
  318. data/vendor/v8/src/mksnapshot.cc +2 -2
  319. data/vendor/v8/src/objects-debug.cc +16 -16
  320. data/vendor/v8/src/objects-inl.h +421 -195
  321. data/vendor/v8/src/objects-printer.cc +7 -7
  322. data/vendor/v8/src/objects-visiting.cc +1 -1
  323. data/vendor/v8/src/objects-visiting.h +33 -12
  324. data/vendor/v8/src/objects.cc +935 -658
  325. data/vendor/v8/src/objects.h +234 -139
  326. data/vendor/v8/src/parser.cc +484 -439
  327. data/vendor/v8/src/parser.h +35 -14
  328. data/vendor/v8/src/platform-cygwin.cc +173 -107
  329. data/vendor/v8/src/platform-freebsd.cc +224 -72
  330. data/vendor/v8/src/platform-linux.cc +234 -95
  331. data/vendor/v8/src/platform-macos.cc +215 -82
  332. data/vendor/v8/src/platform-nullos.cc +9 -3
  333. data/vendor/v8/src/platform-openbsd.cc +22 -7
  334. data/vendor/v8/src/platform-posix.cc +30 -5
  335. data/vendor/v8/src/platform-solaris.cc +120 -38
  336. data/vendor/v8/src/platform-tls-mac.h +62 -0
  337. data/vendor/v8/src/platform-tls-win32.h +62 -0
  338. data/vendor/v8/src/platform-tls.h +50 -0
  339. data/vendor/v8/src/platform-win32.cc +195 -97
  340. data/vendor/v8/src/platform.h +72 -15
  341. data/vendor/v8/src/preparse-data.cc +2 -0
  342. data/vendor/v8/src/preparser-api.cc +8 -2
  343. data/vendor/v8/src/preparser.cc +1 -1
  344. data/vendor/v8/src/prettyprinter.cc +43 -52
  345. data/vendor/v8/src/prettyprinter.h +1 -1
  346. data/vendor/v8/src/profile-generator-inl.h +0 -28
  347. data/vendor/v8/src/profile-generator.cc +942 -685
  348. data/vendor/v8/src/profile-generator.h +210 -176
  349. data/vendor/v8/src/property.cc +6 -0
  350. data/vendor/v8/src/property.h +14 -3
  351. data/vendor/v8/src/regexp-macro-assembler-irregexp.cc +1 -1
  352. data/vendor/v8/src/regexp-macro-assembler.cc +28 -19
  353. data/vendor/v8/src/regexp-macro-assembler.h +11 -6
  354. data/vendor/v8/src/regexp-stack.cc +18 -10
  355. data/vendor/v8/src/regexp-stack.h +45 -21
  356. data/vendor/v8/src/regexp.js +3 -3
  357. data/vendor/v8/src/register-allocator-inl.h +3 -3
  358. data/vendor/v8/src/register-allocator.cc +1 -7
  359. data/vendor/v8/src/register-allocator.h +5 -15
  360. data/vendor/v8/src/rewriter.cc +2 -1
  361. data/vendor/v8/src/runtime-profiler.cc +158 -128
  362. data/vendor/v8/src/runtime-profiler.h +131 -15
  363. data/vendor/v8/src/runtime.cc +2409 -1692
  364. data/vendor/v8/src/runtime.h +93 -17
  365. data/vendor/v8/src/safepoint-table.cc +3 -0
  366. data/vendor/v8/src/safepoint-table.h +9 -3
  367. data/vendor/v8/src/scanner-base.cc +21 -28
  368. data/vendor/v8/src/scanner-base.h +22 -11
  369. data/vendor/v8/src/scanner.cc +3 -5
  370. data/vendor/v8/src/scanner.h +4 -2
  371. data/vendor/v8/src/scopeinfo.cc +11 -16
  372. data/vendor/v8/src/scopeinfo.h +26 -15
  373. data/vendor/v8/src/scopes.cc +67 -37
  374. data/vendor/v8/src/scopes.h +26 -12
  375. data/vendor/v8/src/serialize.cc +193 -154
  376. data/vendor/v8/src/serialize.h +41 -36
  377. data/vendor/v8/src/small-pointer-list.h +163 -0
  378. data/vendor/v8/src/snapshot-common.cc +1 -1
  379. data/vendor/v8/src/snapshot.h +3 -1
  380. data/vendor/v8/src/spaces-inl.h +30 -25
  381. data/vendor/v8/src/spaces.cc +263 -370
  382. data/vendor/v8/src/spaces.h +178 -166
  383. data/vendor/v8/src/string-search.cc +4 -3
  384. data/vendor/v8/src/string-search.h +21 -20
  385. data/vendor/v8/src/string-stream.cc +32 -24
  386. data/vendor/v8/src/string.js +7 -7
  387. data/vendor/v8/src/stub-cache.cc +324 -248
  388. data/vendor/v8/src/stub-cache.h +181 -155
  389. data/vendor/v8/src/token.cc +3 -3
  390. data/vendor/v8/src/token.h +3 -3
  391. data/vendor/v8/src/top.cc +218 -390
  392. data/vendor/v8/src/type-info.cc +98 -32
  393. data/vendor/v8/src/type-info.h +10 -3
  394. data/vendor/v8/src/unicode.cc +1 -1
  395. data/vendor/v8/src/unicode.h +1 -1
  396. data/vendor/v8/src/utils.h +3 -0
  397. data/vendor/v8/src/v8-counters.cc +18 -11
  398. data/vendor/v8/src/v8-counters.h +34 -13
  399. data/vendor/v8/src/v8.cc +66 -121
  400. data/vendor/v8/src/v8.h +7 -4
  401. data/vendor/v8/src/v8globals.h +18 -12
  402. data/vendor/v8/src/{memory.h → v8memory.h} +0 -0
  403. data/vendor/v8/src/v8natives.js +59 -18
  404. data/vendor/v8/src/v8threads.cc +127 -114
  405. data/vendor/v8/src/v8threads.h +42 -35
  406. data/vendor/v8/src/v8utils.h +2 -39
  407. data/vendor/v8/src/variables.h +1 -1
  408. data/vendor/v8/src/version.cc +26 -5
  409. data/vendor/v8/src/version.h +4 -0
  410. data/vendor/v8/src/virtual-frame-heavy-inl.h +2 -4
  411. data/vendor/v8/src/virtual-frame-light-inl.h +5 -4
  412. data/vendor/v8/src/vm-state-inl.h +21 -17
  413. data/vendor/v8/src/vm-state.h +7 -5
  414. data/vendor/v8/src/win32-headers.h +1 -0
  415. data/vendor/v8/src/x64/assembler-x64-inl.h +12 -11
  416. data/vendor/v8/src/x64/assembler-x64.cc +80 -40
  417. data/vendor/v8/src/x64/assembler-x64.h +67 -17
  418. data/vendor/v8/src/x64/builtins-x64.cc +34 -33
  419. data/vendor/v8/src/x64/code-stubs-x64.cc +636 -377
  420. data/vendor/v8/src/x64/code-stubs-x64.h +14 -48
  421. data/vendor/v8/src/x64/codegen-x64-inl.h +1 -1
  422. data/vendor/v8/src/x64/codegen-x64.cc +158 -136
  423. data/vendor/v8/src/x64/codegen-x64.h +4 -1
  424. data/vendor/v8/src/x64/cpu-x64.cc +7 -5
  425. data/vendor/v8/src/x64/debug-x64.cc +8 -6
  426. data/vendor/v8/src/x64/deoptimizer-x64.cc +195 -20
  427. data/vendor/v8/src/x64/disasm-x64.cc +42 -23
  428. data/vendor/v8/src/x64/frames-x64.cc +1 -1
  429. data/vendor/v8/src/x64/frames-x64.h +2 -2
  430. data/vendor/v8/src/x64/full-codegen-x64.cc +780 -218
  431. data/vendor/v8/src/x64/ic-x64.cc +77 -79
  432. data/vendor/v8/src/x64/jump-target-x64.cc +1 -1
  433. data/vendor/v8/src/x64/lithium-codegen-x64.cc +698 -181
  434. data/vendor/v8/src/x64/lithium-codegen-x64.h +31 -6
  435. data/vendor/v8/src/x64/lithium-x64.cc +136 -54
  436. data/vendor/v8/src/x64/lithium-x64.h +142 -51
  437. data/vendor/v8/src/x64/macro-assembler-x64.cc +456 -187
  438. data/vendor/v8/src/x64/macro-assembler-x64.h +166 -34
  439. data/vendor/v8/src/x64/regexp-macro-assembler-x64.cc +44 -28
  440. data/vendor/v8/src/x64/regexp-macro-assembler-x64.h +8 -4
  441. data/vendor/v8/src/x64/register-allocator-x64-inl.h +3 -3
  442. data/vendor/v8/src/x64/register-allocator-x64.cc +12 -8
  443. data/vendor/v8/src/x64/simulator-x64.h +5 -5
  444. data/vendor/v8/src/x64/stub-cache-x64.cc +299 -344
  445. data/vendor/v8/src/x64/virtual-frame-x64.cc +37 -13
  446. data/vendor/v8/src/x64/virtual-frame-x64.h +13 -7
  447. data/vendor/v8/src/zone-inl.h +49 -3
  448. data/vendor/v8/src/zone.cc +42 -41
  449. data/vendor/v8/src/zone.h +37 -34
  450. data/vendor/v8/test/benchmarks/testcfg.py +100 -0
  451. data/vendor/v8/test/cctest/SConscript +5 -4
  452. data/vendor/v8/test/cctest/cctest.h +3 -2
  453. data/vendor/v8/test/cctest/cctest.status +6 -11
  454. data/vendor/v8/test/cctest/test-accessors.cc +3 -3
  455. data/vendor/v8/test/cctest/test-alloc.cc +39 -33
  456. data/vendor/v8/test/cctest/test-api.cc +1092 -205
  457. data/vendor/v8/test/cctest/test-assembler-arm.cc +39 -25
  458. data/vendor/v8/test/cctest/test-assembler-ia32.cc +36 -37
  459. data/vendor/v8/test/cctest/test-assembler-mips.cc +1098 -40
  460. data/vendor/v8/test/cctest/test-assembler-x64.cc +32 -25
  461. data/vendor/v8/test/cctest/test-ast.cc +1 -0
  462. data/vendor/v8/test/cctest/test-circular-queue.cc +8 -5
  463. data/vendor/v8/test/cctest/test-compiler.cc +24 -24
  464. data/vendor/v8/test/cctest/test-cpu-profiler.cc +140 -5
  465. data/vendor/v8/test/cctest/test-dataflow.cc +1 -0
  466. data/vendor/v8/test/cctest/test-debug.cc +136 -77
  467. data/vendor/v8/test/cctest/test-decls.cc +1 -1
  468. data/vendor/v8/test/cctest/test-deoptimization.cc +25 -24
  469. data/vendor/v8/test/cctest/test-disasm-arm.cc +9 -4
  470. data/vendor/v8/test/cctest/test-disasm-ia32.cc +10 -8
  471. data/vendor/v8/test/cctest/test-func-name-inference.cc +10 -4
  472. data/vendor/v8/test/cctest/test-heap-profiler.cc +226 -164
  473. data/vendor/v8/test/cctest/test-heap.cc +240 -217
  474. data/vendor/v8/test/cctest/test-liveedit.cc +1 -0
  475. data/vendor/v8/test/cctest/test-log-stack-tracer.cc +18 -20
  476. data/vendor/v8/test/cctest/test-log.cc +114 -108
  477. data/vendor/v8/test/cctest/test-macro-assembler-x64.cc +247 -177
  478. data/vendor/v8/test/cctest/test-mark-compact.cc +129 -90
  479. data/vendor/v8/test/cctest/test-parsing.cc +15 -14
  480. data/vendor/v8/test/cctest/test-platform-linux.cc +1 -0
  481. data/vendor/v8/test/cctest/test-platform-tls.cc +66 -0
  482. data/vendor/v8/test/cctest/test-platform-win32.cc +1 -0
  483. data/vendor/v8/test/cctest/test-profile-generator.cc +1 -1
  484. data/vendor/v8/test/cctest/test-regexp.cc +53 -41
  485. data/vendor/v8/test/cctest/test-reloc-info.cc +18 -11
  486. data/vendor/v8/test/cctest/test-serialize.cc +44 -43
  487. data/vendor/v8/test/cctest/test-sockets.cc +8 -3
  488. data/vendor/v8/test/cctest/test-spaces.cc +47 -29
  489. data/vendor/v8/test/cctest/test-strings.cc +20 -20
  490. data/vendor/v8/test/cctest/test-thread-termination.cc +8 -3
  491. data/vendor/v8/test/cctest/test-threads.cc +5 -3
  492. data/vendor/v8/test/cctest/test-utils.cc +5 -4
  493. data/vendor/v8/test/cctest/testcfg.py +7 -3
  494. data/vendor/v8/test/es5conform/es5conform.status +2 -77
  495. data/vendor/v8/test/es5conform/testcfg.py +1 -1
  496. data/vendor/v8/test/message/testcfg.py +1 -1
  497. data/vendor/v8/test/mjsunit/accessors-on-global-object.js +3 -3
  498. data/vendor/v8/test/mjsunit/array-concat.js +43 -1
  499. data/vendor/v8/test/mjsunit/array-join.js +25 -0
  500. data/vendor/v8/test/mjsunit/bitops-info.js +7 -1
  501. data/vendor/v8/test/mjsunit/compiler/array-length.js +2 -2
  502. data/vendor/v8/test/mjsunit/compiler/global-accessors.js +47 -0
  503. data/vendor/v8/test/mjsunit/compiler/pic.js +1 -1
  504. data/vendor/v8/test/mjsunit/compiler/regress-loadfield.js +65 -0
  505. data/vendor/v8/test/mjsunit/math-sqrt.js +5 -1
  506. data/vendor/v8/test/mjsunit/mjsunit.js +59 -8
  507. data/vendor/v8/test/mjsunit/mjsunit.status +0 -12
  508. data/vendor/v8/test/mjsunit/mul-exhaustive.js +129 -11
  509. data/vendor/v8/test/mjsunit/negate-zero.js +1 -1
  510. data/vendor/v8/test/mjsunit/object-freeze.js +5 -13
  511. data/vendor/v8/test/mjsunit/object-prevent-extensions.js +9 -50
  512. data/vendor/v8/test/mjsunit/object-seal.js +4 -13
  513. data/vendor/v8/test/mjsunit/override-eval-with-non-function.js +36 -0
  514. data/vendor/v8/test/mjsunit/regress/regress-1145.js +54 -0
  515. data/vendor/v8/test/mjsunit/regress/regress-1172-bis.js +37 -0
  516. data/vendor/v8/test/mjsunit/regress/regress-1181.js +54 -0
  517. data/vendor/v8/test/mjsunit/regress/regress-1207.js +35 -0
  518. data/vendor/v8/test/mjsunit/regress/regress-1209.js +34 -0
  519. data/vendor/v8/test/mjsunit/regress/regress-1210.js +48 -0
  520. data/vendor/v8/test/mjsunit/regress/regress-1213.js +43 -0
  521. data/vendor/v8/test/mjsunit/regress/regress-1218.js +29 -0
  522. data/vendor/v8/test/mjsunit/regress/regress-1229.js +79 -0
  523. data/vendor/v8/test/mjsunit/regress/regress-1233.js +47 -0
  524. data/vendor/v8/test/mjsunit/regress/regress-1236.js +34 -0
  525. data/vendor/v8/test/mjsunit/regress/regress-1237.js +36 -0
  526. data/vendor/v8/test/mjsunit/regress/regress-1240.js +39 -0
  527. data/vendor/v8/test/mjsunit/regress/regress-1257.js +58 -0
  528. data/vendor/v8/test/mjsunit/regress/regress-1278.js +69 -0
  529. data/vendor/v8/test/mjsunit/regress/regress-create-exception.js +1 -0
  530. data/vendor/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js +52 -0
  531. data/vendor/v8/test/mjsunit/sin-cos.js +15 -10
  532. data/vendor/v8/test/mjsunit/smi-negative-zero.js +2 -2
  533. data/vendor/v8/test/mjsunit/str-to-num.js +1 -1
  534. data/vendor/v8/test/mjsunit/strict-mode.js +435 -0
  535. data/vendor/v8/test/mjsunit/testcfg.py +23 -6
  536. data/vendor/v8/test/mozilla/mozilla.status +0 -2
  537. data/vendor/v8/test/mozilla/testcfg.py +1 -1
  538. data/vendor/v8/test/preparser/empty.js +28 -0
  539. data/vendor/v8/test/preparser/functions-only.js +38 -0
  540. data/vendor/v8/test/preparser/non-alphanum.js +34 -0
  541. data/vendor/v8/test/preparser/symbols-only.js +49 -0
  542. data/vendor/v8/test/preparser/testcfg.py +90 -0
  543. data/vendor/v8/test/sputnik/testcfg.py +1 -1
  544. data/vendor/v8/test/test262/README +16 -0
  545. data/vendor/v8/test/test262/harness-adapt.js +80 -0
  546. data/vendor/v8/test/test262/test262.status +1506 -0
  547. data/vendor/v8/test/test262/testcfg.py +123 -0
  548. data/vendor/v8/tools/freebsd-tick-processor +10 -0
  549. data/vendor/v8/tools/gyp/v8.gyp +8 -33
  550. data/vendor/v8/tools/linux-tick-processor +5 -3
  551. data/vendor/v8/tools/test.py +37 -14
  552. data/vendor/v8/tools/tickprocessor.js +22 -8
  553. data/vendor/v8/tools/visual_studio/v8_base.vcproj +13 -1
  554. data/vendor/v8/tools/visual_studio/v8_base_arm.vcproj +5 -1
  555. data/vendor/v8/tools/visual_studio/v8_base_x64.vcproj +5 -1
  556. data/vendor/v8/tools/visual_studio/x64.vsprops +1 -0
  557. metadata +1495 -1341
  558. data/ext/extconf.rb +0 -22
  559. data/ext/mustang.cpp +0 -58
  560. data/vendor/v8/src/top.h +0 -608
@@ -70,8 +70,11 @@ class LCodeGen;
70
70
  V(CheckFunction) \
71
71
  V(CheckInstanceType) \
72
72
  V(CheckMap) \
73
+ V(CheckNonSmi) \
73
74
  V(CheckPrototypeMaps) \
74
75
  V(CheckSmi) \
76
+ V(ClassOfTest) \
77
+ V(ClassOfTestAndBranch) \
75
78
  V(CmpID) \
76
79
  V(CmpIDAndBranch) \
77
80
  V(CmpJSObjectEq) \
@@ -87,12 +90,18 @@ class LCodeGen;
87
90
  V(Deoptimize) \
88
91
  V(DivI) \
89
92
  V(DoubleToI) \
93
+ V(ExternalArrayLength) \
94
+ V(FixedArrayLength) \
90
95
  V(FunctionLiteral) \
91
96
  V(Gap) \
97
+ V(GetCachedArrayIndex) \
92
98
  V(GlobalObject) \
93
99
  V(GlobalReceiver) \
94
100
  V(Goto) \
95
- V(FixedArrayLength) \
101
+ V(HasInstanceType) \
102
+ V(HasInstanceTypeAndBranch) \
103
+ V(HasCachedArrayIndex) \
104
+ V(HasCachedArrayIndexAndBranch) \
96
105
  V(InstanceOf) \
97
106
  V(InstanceOfAndBranch) \
98
107
  V(InstanceOfKnownGlobal) \
@@ -104,24 +113,20 @@ class LCodeGen;
104
113
  V(IsSmi) \
105
114
  V(IsSmiAndBranch) \
106
115
  V(JSArrayLength) \
107
- V(HasInstanceType) \
108
- V(HasInstanceTypeAndBranch) \
109
- V(HasCachedArrayIndex) \
110
- V(HasCachedArrayIndexAndBranch) \
111
- V(ClassOfTest) \
112
- V(ClassOfTestAndBranch) \
113
116
  V(Label) \
114
117
  V(LazyBailout) \
115
118
  V(LoadContextSlot) \
116
119
  V(LoadElements) \
117
- V(LoadGlobal) \
120
+ V(LoadExternalArrayPointer) \
121
+ V(LoadGlobalCell) \
122
+ V(LoadGlobalGeneric) \
118
123
  V(LoadKeyedFastElement) \
119
124
  V(LoadKeyedGeneric) \
125
+ V(LoadKeyedSpecializedArrayElement) \
120
126
  V(LoadNamedField) \
127
+ V(LoadNamedFieldPolymorphic) \
121
128
  V(LoadNamedGeneric) \
122
129
  V(LoadFunctionPrototype) \
123
- V(LoadPixelArrayElement) \
124
- V(LoadPixelArrayExternalPointer) \
125
130
  V(ModI) \
126
131
  V(MulI) \
127
132
  V(NumberTagD) \
@@ -131,7 +136,6 @@ class LCodeGen;
131
136
  V(OsrEntry) \
132
137
  V(OuterContext) \
133
138
  V(Parameter) \
134
- V(PixelArrayLength) \
135
139
  V(Power) \
136
140
  V(PushArgument) \
137
141
  V(RegExpLiteral) \
@@ -144,13 +148,15 @@ class LCodeGen;
144
148
  V(StoreGlobal) \
145
149
  V(StoreKeyedFastElement) \
146
150
  V(StoreKeyedGeneric) \
151
+ V(StoreKeyedSpecializedArrayElement) \
147
152
  V(StoreNamedField) \
148
153
  V(StoreNamedGeneric) \
149
- V(StorePixelArrayElement) \
150
154
  V(StringCharCodeAt) \
155
+ V(StringCharFromCode) \
151
156
  V(StringLength) \
152
157
  V(SubI) \
153
158
  V(TaggedToI) \
159
+ V(ToFastProperties) \
154
160
  V(Throw) \
155
161
  V(Typeof) \
156
162
  V(TypeofIs) \
@@ -730,6 +736,17 @@ class LHasInstanceTypeAndBranch: public LControlInstruction<1, 0> {
730
736
  };
731
737
 
732
738
 
739
+ class LGetCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
740
+ public:
741
+ explicit LGetCachedArrayIndex(LOperand* value) {
742
+ inputs_[0] = value;
743
+ }
744
+
745
+ DECLARE_CONCRETE_INSTRUCTION(GetCachedArrayIndex, "get-cached-array-index")
746
+ DECLARE_HYDROGEN_ACCESSOR(GetCachedArrayIndex)
747
+ };
748
+
749
+
733
750
  class LHasCachedArrayIndex: public LTemplateInstruction<1, 1, 0> {
734
751
  public:
735
752
  explicit LHasCachedArrayIndex(LOperand* value) {
@@ -832,10 +849,11 @@ class LInstanceOfAndBranch: public LControlInstruction<2, 0> {
832
849
  };
833
850
 
834
851
 
835
- class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 0> {
852
+ class LInstanceOfKnownGlobal: public LTemplateInstruction<1, 1, 1> {
836
853
  public:
837
- explicit LInstanceOfKnownGlobal(LOperand* value) {
854
+ LInstanceOfKnownGlobal(LOperand* value, LOperand* temp) {
838
855
  inputs_[0] = value;
856
+ temps_[0] = temp;
839
857
  }
840
858
 
841
859
  DECLARE_CONCRETE_INSTRUCTION(InstanceOfKnownGlobal,
@@ -984,14 +1002,14 @@ class LJSArrayLength: public LTemplateInstruction<1, 1, 0> {
984
1002
  };
985
1003
 
986
1004
 
987
- class LPixelArrayLength: public LTemplateInstruction<1, 1, 0> {
1005
+ class LExternalArrayLength: public LTemplateInstruction<1, 1, 0> {
988
1006
  public:
989
- explicit LPixelArrayLength(LOperand* value) {
1007
+ explicit LExternalArrayLength(LOperand* value) {
990
1008
  inputs_[0] = value;
991
1009
  }
992
1010
 
993
- DECLARE_CONCRETE_INSTRUCTION(PixelArrayLength, "pixel-array-length")
994
- DECLARE_HYDROGEN_ACCESSOR(PixelArrayLength)
1011
+ DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength, "external-array-length")
1012
+ DECLARE_HYDROGEN_ACCESSOR(ExternalArrayLength)
995
1013
  };
996
1014
 
997
1015
 
@@ -1118,6 +1136,19 @@ class LLoadNamedField: public LTemplateInstruction<1, 1, 0> {
1118
1136
  };
1119
1137
 
1120
1138
 
1139
+ class LLoadNamedFieldPolymorphic: public LTemplateInstruction<1, 1, 0> {
1140
+ public:
1141
+ explicit LLoadNamedFieldPolymorphic(LOperand* object) {
1142
+ inputs_[0] = object;
1143
+ }
1144
+
1145
+ DECLARE_CONCRETE_INSTRUCTION(LoadNamedField, "load-named-field-polymorphic")
1146
+ DECLARE_HYDROGEN_ACCESSOR(LoadNamedFieldPolymorphic)
1147
+
1148
+ LOperand* object() { return inputs_[0]; }
1149
+ };
1150
+
1151
+
1121
1152
  class LLoadNamedGeneric: public LTemplateInstruction<1, 1, 0> {
1122
1153
  public:
1123
1154
  explicit LLoadNamedGeneric(LOperand* object) {
@@ -1155,14 +1186,14 @@ class LLoadElements: public LTemplateInstruction<1, 1, 0> {
1155
1186
  };
1156
1187
 
1157
1188
 
1158
- class LLoadPixelArrayExternalPointer: public LTemplateInstruction<1, 1, 0> {
1189
+ class LLoadExternalArrayPointer: public LTemplateInstruction<1, 1, 0> {
1159
1190
  public:
1160
- explicit LLoadPixelArrayExternalPointer(LOperand* object) {
1191
+ explicit LLoadExternalArrayPointer(LOperand* object) {
1161
1192
  inputs_[0] = object;
1162
1193
  }
1163
1194
 
1164
- DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayExternalPointer,
1165
- "load-pixel-array-external-pointer")
1195
+ DECLARE_CONCRETE_INSTRUCTION(LoadExternalArrayPointer,
1196
+ "load-external-array-pointer")
1166
1197
  };
1167
1198
 
1168
1199
 
@@ -1181,19 +1212,23 @@ class LLoadKeyedFastElement: public LTemplateInstruction<1, 2, 0> {
1181
1212
  };
1182
1213
 
1183
1214
 
1184
- class LLoadPixelArrayElement: public LTemplateInstruction<1, 2, 0> {
1215
+ class LLoadKeyedSpecializedArrayElement: public LTemplateInstruction<1, 2, 0> {
1185
1216
  public:
1186
- LLoadPixelArrayElement(LOperand* external_pointer, LOperand* key) {
1217
+ LLoadKeyedSpecializedArrayElement(LOperand* external_pointer,
1218
+ LOperand* key) {
1187
1219
  inputs_[0] = external_pointer;
1188
1220
  inputs_[1] = key;
1189
1221
  }
1190
1222
 
1191
- DECLARE_CONCRETE_INSTRUCTION(LoadPixelArrayElement,
1192
- "load-pixel-array-element")
1193
- DECLARE_HYDROGEN_ACCESSOR(LoadPixelArrayElement)
1223
+ DECLARE_CONCRETE_INSTRUCTION(LoadKeyedSpecializedArrayElement,
1224
+ "load-keyed-specialized-array-element")
1225
+ DECLARE_HYDROGEN_ACCESSOR(LoadKeyedSpecializedArrayElement)
1194
1226
 
1195
1227
  LOperand* external_pointer() { return inputs_[0]; }
1196
1228
  LOperand* key() { return inputs_[1]; }
1229
+ ExternalArrayType array_type() const {
1230
+ return hydrogen()->array_type();
1231
+ }
1197
1232
  };
1198
1233
 
1199
1234
 
@@ -1211,10 +1246,25 @@ class LLoadKeyedGeneric: public LTemplateInstruction<1, 2, 0> {
1211
1246
  };
1212
1247
 
1213
1248
 
1214
- class LLoadGlobal: public LTemplateInstruction<1, 0, 0> {
1249
+ class LLoadGlobalCell: public LTemplateInstruction<1, 0, 0> {
1215
1250
  public:
1216
- DECLARE_CONCRETE_INSTRUCTION(LoadGlobal, "load-global")
1217
- DECLARE_HYDROGEN_ACCESSOR(LoadGlobal)
1251
+ DECLARE_CONCRETE_INSTRUCTION(LoadGlobalCell, "load-global-cell")
1252
+ DECLARE_HYDROGEN_ACCESSOR(LoadGlobalCell)
1253
+ };
1254
+
1255
+
1256
+ class LLoadGlobalGeneric: public LTemplateInstruction<1, 1, 0> {
1257
+ public:
1258
+ explicit LLoadGlobalGeneric(LOperand* global_object) {
1259
+ inputs_[0] = global_object;
1260
+ }
1261
+
1262
+ DECLARE_CONCRETE_INSTRUCTION(LoadGlobalGeneric, "load-global-generic")
1263
+ DECLARE_HYDROGEN_ACCESSOR(LoadGlobalGeneric)
1264
+
1265
+ LOperand* global_object() { return inputs_[0]; }
1266
+ Handle<Object> name() const { return hydrogen()->name(); }
1267
+ bool for_typeof() const { return hydrogen()->for_typeof(); }
1218
1268
  };
1219
1269
 
1220
1270
 
@@ -1246,11 +1296,12 @@ class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
1246
1296
  };
1247
1297
 
1248
1298
 
1249
- class LStoreContextSlot: public LTemplateInstruction<0, 2, 0> {
1299
+ class LStoreContextSlot: public LTemplateInstruction<0, 2, 1> {
1250
1300
  public:
1251
- LStoreContextSlot(LOperand* context, LOperand* value) {
1301
+ LStoreContextSlot(LOperand* context, LOperand* value, LOperand* temp) {
1252
1302
  inputs_[0] = context;
1253
1303
  inputs_[1] = value;
1304
+ temps_[0] = temp;
1254
1305
  }
1255
1306
 
1256
1307
  DECLARE_CONCRETE_INSTRUCTION(StoreContextSlot, "store-context-slot")
@@ -1299,9 +1350,15 @@ class LGlobalObject: public LTemplateInstruction<1, 0, 0> {
1299
1350
  };
1300
1351
 
1301
1352
 
1302
- class LGlobalReceiver: public LTemplateInstruction<1, 0, 0> {
1353
+ class LGlobalReceiver: public LTemplateInstruction<1, 1, 0> {
1303
1354
  public:
1355
+ explicit LGlobalReceiver(LOperand* global_object) {
1356
+ inputs_[0] = global_object;
1357
+ }
1358
+
1304
1359
  DECLARE_CONCRETE_INSTRUCTION(GlobalReceiver, "global-receiver")
1360
+
1361
+ LOperand* global() { return InputAt(0); }
1305
1362
  };
1306
1363
 
1307
1364
 
@@ -1401,7 +1458,7 @@ class LCallRuntime: public LTemplateInstruction<1, 0, 0> {
1401
1458
  DECLARE_CONCRETE_INSTRUCTION(CallRuntime, "call-runtime")
1402
1459
  DECLARE_HYDROGEN_ACCESSOR(CallRuntime)
1403
1460
 
1404
- Runtime::Function* function() const { return hydrogen()->function(); }
1461
+ const Runtime::Function* function() const { return hydrogen()->function(); }
1405
1462
  int arity() const { return hydrogen()->argument_count(); }
1406
1463
  };
1407
1464
 
@@ -1564,23 +1621,26 @@ class LStoreKeyedFastElement: public LTemplateInstruction<0, 3, 0> {
1564
1621
  };
1565
1622
 
1566
1623
 
1567
- class LStorePixelArrayElement: public LTemplateInstruction<0, 3, 0> {
1624
+ class LStoreKeyedSpecializedArrayElement: public LTemplateInstruction<0, 3, 0> {
1568
1625
  public:
1569
- LStorePixelArrayElement(LOperand* external_pointer,
1570
- LOperand* key,
1571
- LOperand* val) {
1626
+ LStoreKeyedSpecializedArrayElement(LOperand* external_pointer,
1627
+ LOperand* key,
1628
+ LOperand* val) {
1572
1629
  inputs_[0] = external_pointer;
1573
1630
  inputs_[1] = key;
1574
1631
  inputs_[2] = val;
1575
1632
  }
1576
1633
 
1577
- DECLARE_CONCRETE_INSTRUCTION(StorePixelArrayElement,
1578
- "store-pixel-array-element")
1579
- DECLARE_HYDROGEN_ACCESSOR(StorePixelArrayElement)
1634
+ DECLARE_CONCRETE_INSTRUCTION(StoreKeyedSpecializedArrayElement,
1635
+ "store-keyed-specialized-array-element")
1636
+ DECLARE_HYDROGEN_ACCESSOR(StoreKeyedSpecializedArrayElement)
1580
1637
 
1581
1638
  LOperand* external_pointer() { return inputs_[0]; }
1582
1639
  LOperand* key() { return inputs_[1]; }
1583
1640
  LOperand* value() { return inputs_[2]; }
1641
+ ExternalArrayType array_type() const {
1642
+ return hydrogen()->array_type();
1643
+ }
1584
1644
  };
1585
1645
 
1586
1646
 
@@ -1617,6 +1677,19 @@ class LStringCharCodeAt: public LTemplateInstruction<1, 2, 0> {
1617
1677
  };
1618
1678
 
1619
1679
 
1680
+ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
1681
+ public:
1682
+ explicit LStringCharFromCode(LOperand* char_code) {
1683
+ inputs_[0] = char_code;
1684
+ }
1685
+
1686
+ DECLARE_CONCRETE_INSTRUCTION(StringCharFromCode, "string-char-from-code")
1687
+ DECLARE_HYDROGEN_ACCESSOR(StringCharFromCode)
1688
+
1689
+ LOperand* char_code() { return inputs_[0]; }
1690
+ };
1691
+
1692
+
1620
1693
  class LStringLength: public LTemplateInstruction<1, 1, 0> {
1621
1694
  public:
1622
1695
  explicit LStringLength(LOperand* string) {
@@ -1679,20 +1752,21 @@ class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 1> {
1679
1752
 
1680
1753
  class LCheckSmi: public LTemplateInstruction<0, 1, 0> {
1681
1754
  public:
1682
- LCheckSmi(LOperand* value, Condition condition)
1683
- : condition_(condition) {
1755
+ explicit LCheckSmi(LOperand* value) {
1684
1756
  inputs_[0] = value;
1685
1757
  }
1686
1758
 
1687
- Condition condition() const { return condition_; }
1759
+ DECLARE_CONCRETE_INSTRUCTION(CheckSmi, "check-smi")
1760
+ };
1761
+
1688
1762
 
1689
- virtual void CompileToNative(LCodeGen* generator);
1690
- virtual const char* Mnemonic() const {
1691
- return (condition_ == zero) ? "check-non-smi" : "check-smi";
1763
+ class LCheckNonSmi: public LTemplateInstruction<0, 1, 0> {
1764
+ public:
1765
+ explicit LCheckNonSmi(LOperand* value) {
1766
+ inputs_[0] = value;
1692
1767
  }
1693
1768
 
1694
- private:
1695
- Condition condition_;
1769
+ DECLARE_CONCRETE_INSTRUCTION(CheckNonSmi, "check-non-smi")
1696
1770
  };
1697
1771
 
1698
1772
 
@@ -1726,6 +1800,17 @@ class LFunctionLiteral: public LTemplateInstruction<1, 0, 0> {
1726
1800
  };
1727
1801
 
1728
1802
 
1803
+ class LToFastProperties: public LTemplateInstruction<1, 1, 0> {
1804
+ public:
1805
+ explicit LToFastProperties(LOperand* value) {
1806
+ inputs_[0] = value;
1807
+ }
1808
+
1809
+ DECLARE_CONCRETE_INSTRUCTION(ToFastProperties, "to-fast-properties")
1810
+ DECLARE_HYDROGEN_ACCESSOR(ToFastProperties)
1811
+ };
1812
+
1813
+
1729
1814
  class LTypeof: public LTemplateInstruction<1, 1, 0> {
1730
1815
  public:
1731
1816
  explicit LTypeof(LOperand* value) {
@@ -1830,8 +1915,9 @@ class LStackCheck: public LTemplateInstruction<0, 0, 0> {
1830
1915
  class LChunkBuilder;
1831
1916
  class LChunk: public ZoneObject {
1832
1917
  public:
1833
- explicit LChunk(HGraph* graph)
1918
+ explicit LChunk(CompilationInfo* info, HGraph* graph)
1834
1919
  : spill_slot_count_(0),
1920
+ info_(info),
1835
1921
  graph_(graph),
1836
1922
  instructions_(32),
1837
1923
  pointer_maps_(8),
@@ -1848,6 +1934,7 @@ class LChunk: public ZoneObject {
1848
1934
  int ParameterAt(int index);
1849
1935
  int GetParameterStackSlot(int index) const;
1850
1936
  int spill_slot_count() const { return spill_slot_count_; }
1937
+ CompilationInfo* info() const { return info_; }
1851
1938
  HGraph* graph() const { return graph_; }
1852
1939
  const ZoneList<LInstruction*>* instructions() const { return &instructions_; }
1853
1940
  void AddGapMove(int index, LOperand* from, LOperand* to);
@@ -1884,6 +1971,7 @@ class LChunk: public ZoneObject {
1884
1971
 
1885
1972
  private:
1886
1973
  int spill_slot_count_;
1974
+ CompilationInfo* info_;
1887
1975
  HGraph* const graph_;
1888
1976
  ZoneList<LInstruction*> instructions_;
1889
1977
  ZoneList<LPointerMap*> pointer_maps_;
@@ -1893,8 +1981,9 @@ class LChunk: public ZoneObject {
1893
1981
 
1894
1982
  class LChunkBuilder BASE_EMBEDDED {
1895
1983
  public:
1896
- LChunkBuilder(HGraph* graph, LAllocator* allocator)
1984
+ LChunkBuilder(CompilationInfo* info, HGraph* graph, LAllocator* allocator)
1897
1985
  : chunk_(NULL),
1986
+ info_(info),
1898
1987
  graph_(graph),
1899
1988
  status_(UNUSED),
1900
1989
  current_instruction_(NULL),
@@ -1923,6 +2012,7 @@ class LChunkBuilder BASE_EMBEDDED {
1923
2012
  };
1924
2013
 
1925
2014
  LChunk* chunk() const { return chunk_; }
2015
+ CompilationInfo* info() const { return info_; }
1926
2016
  HGraph* graph() const { return graph_; }
1927
2017
 
1928
2018
  bool is_unused() const { return status_ == UNUSED; }
@@ -2029,6 +2119,7 @@ class LChunkBuilder BASE_EMBEDDED {
2029
2119
  HArithmeticBinaryOperation* instr);
2030
2120
 
2031
2121
  LChunk* chunk_;
2122
+ CompilationInfo* info_;
2032
2123
  HGraph* const graph_;
2033
2124
  Status status_;
2034
2125
  HInstruction* current_instruction_;
@@ -1,4 +1,4 @@
1
- // Copyright 2010 the V8 project authors. All rights reserved.
1
+ // Copyright 2011 the V8 project authors. All rights reserved.
2
2
  // Redistribution and use in source and binary forms, with or without
3
3
  // modification, are permitted provided that the following conditions are
4
4
  // met:
@@ -40,36 +40,156 @@
40
40
  namespace v8 {
41
41
  namespace internal {
42
42
 
43
- MacroAssembler::MacroAssembler(void* buffer, int size)
44
- : Assembler(buffer, size),
43
+ MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
44
+ : Assembler(arg_isolate, buffer, size),
45
45
  generating_stub_(false),
46
46
  allow_stub_calls_(true),
47
- code_object_(Heap::undefined_value()) {
47
+ root_array_available_(true) {
48
+ if (isolate() != NULL) {
49
+ code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
50
+ isolate());
51
+ }
52
+ }
53
+
54
+
55
+ static intptr_t RootRegisterDelta(ExternalReference other, Isolate* isolate) {
56
+ Address roots_register_value = kRootRegisterBias +
57
+ reinterpret_cast<Address>(isolate->heap()->roots_address());
58
+ intptr_t delta = other.address() - roots_register_value;
59
+ return delta;
60
+ }
61
+
62
+
63
+ Operand MacroAssembler::ExternalOperand(ExternalReference target,
64
+ Register scratch) {
65
+ if (root_array_available_ && !Serializer::enabled()) {
66
+ intptr_t delta = RootRegisterDelta(target, isolate());
67
+ if (is_int32(delta)) {
68
+ Serializer::TooLateToEnableNow();
69
+ return Operand(kRootRegister, static_cast<int32_t>(delta));
70
+ }
71
+ }
72
+ movq(scratch, target);
73
+ return Operand(scratch, 0);
74
+ }
75
+
76
+
77
+ void MacroAssembler::Load(Register destination, ExternalReference source) {
78
+ if (root_array_available_ && !Serializer::enabled()) {
79
+ intptr_t delta = RootRegisterDelta(source, isolate());
80
+ if (is_int32(delta)) {
81
+ Serializer::TooLateToEnableNow();
82
+ movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
83
+ return;
84
+ }
85
+ }
86
+ // Safe code.
87
+ if (destination.is(rax)) {
88
+ load_rax(source);
89
+ } else {
90
+ movq(kScratchRegister, source);
91
+ movq(destination, Operand(kScratchRegister, 0));
92
+ }
93
+ }
94
+
95
+
96
+ void MacroAssembler::Store(ExternalReference destination, Register source) {
97
+ if (root_array_available_ && !Serializer::enabled()) {
98
+ intptr_t delta = RootRegisterDelta(destination, isolate());
99
+ if (is_int32(delta)) {
100
+ Serializer::TooLateToEnableNow();
101
+ movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
102
+ return;
103
+ }
104
+ }
105
+ // Safe code.
106
+ if (source.is(rax)) {
107
+ store_rax(destination);
108
+ } else {
109
+ movq(kScratchRegister, destination);
110
+ movq(Operand(kScratchRegister, 0), source);
111
+ }
112
+ }
113
+
114
+
115
+ void MacroAssembler::LoadAddress(Register destination,
116
+ ExternalReference source) {
117
+ if (root_array_available_ && !Serializer::enabled()) {
118
+ intptr_t delta = RootRegisterDelta(source, isolate());
119
+ if (is_int32(delta)) {
120
+ Serializer::TooLateToEnableNow();
121
+ lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
122
+ return;
123
+ }
124
+ }
125
+ // Safe code.
126
+ movq(destination, source);
127
+ }
128
+
129
+
130
+ int MacroAssembler::LoadAddressSize(ExternalReference source) {
131
+ if (root_array_available_ && !Serializer::enabled()) {
132
+ // This calculation depends on the internals of LoadAddress.
133
+ // It's correctness is ensured by the asserts in the Call
134
+ // instruction below.
135
+ intptr_t delta = RootRegisterDelta(source, isolate());
136
+ if (is_int32(delta)) {
137
+ Serializer::TooLateToEnableNow();
138
+ // Operand is lea(scratch, Operand(kRootRegister, delta));
139
+ // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
140
+ int size = 4;
141
+ if (!is_int8(static_cast<int32_t>(delta))) {
142
+ size += 3; // Need full four-byte displacement in lea.
143
+ }
144
+ return size;
145
+ }
146
+ }
147
+ // Size of movq(destination, src);
148
+ return 10;
48
149
  }
49
150
 
50
151
 
51
152
  void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
52
- movq(destination, Operand(kRootRegister, index << kPointerSizeLog2));
153
+ ASSERT(root_array_available_);
154
+ movq(destination, Operand(kRootRegister,
155
+ (index << kPointerSizeLog2) - kRootRegisterBias));
156
+ }
157
+
158
+
159
+ void MacroAssembler::LoadRootIndexed(Register destination,
160
+ Register variable_offset,
161
+ int fixed_offset) {
162
+ ASSERT(root_array_available_);
163
+ movq(destination,
164
+ Operand(kRootRegister,
165
+ variable_offset, times_pointer_size,
166
+ (fixed_offset << kPointerSizeLog2) - kRootRegisterBias));
53
167
  }
54
168
 
55
169
 
56
170
  void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
57
- movq(Operand(kRootRegister, index << kPointerSizeLog2), source);
171
+ ASSERT(root_array_available_);
172
+ movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
173
+ source);
58
174
  }
59
175
 
60
176
 
61
177
  void MacroAssembler::PushRoot(Heap::RootListIndex index) {
62
- push(Operand(kRootRegister, index << kPointerSizeLog2));
178
+ ASSERT(root_array_available_);
179
+ push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
63
180
  }
64
181
 
65
182
 
66
183
  void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
67
- cmpq(with, Operand(kRootRegister, index << kPointerSizeLog2));
184
+ ASSERT(root_array_available_);
185
+ cmpq(with, Operand(kRootRegister,
186
+ (index << kPointerSizeLog2) - kRootRegisterBias));
68
187
  }
69
188
 
70
189
 
71
190
  void MacroAssembler::CompareRoot(const Operand& with,
72
191
  Heap::RootListIndex index) {
192
+ ASSERT(root_array_available_);
73
193
  ASSERT(!with.AddressUsesRegister(kScratchRegister));
74
194
  LoadRoot(kScratchRegister, index);
75
195
  cmpq(with, kScratchRegister);
@@ -79,7 +199,7 @@ void MacroAssembler::CompareRoot(const Operand& with,
79
199
  void MacroAssembler::RecordWriteHelper(Register object,
80
200
  Register addr,
81
201
  Register scratch) {
82
- if (FLAG_debug_code) {
202
+ if (emit_debug_code()) {
83
203
  // Check that the object is not in new space.
84
204
  NearLabel not_in_new_space;
85
205
  InNewSpace(object, scratch, not_equal, &not_in_new_space);
@@ -111,7 +231,7 @@ void MacroAssembler::RecordWrite(Register object,
111
231
  ASSERT(!object.is(rsi) && !value.is(rsi) && !index.is(rsi));
112
232
 
113
233
  // First, check if a write barrier is even needed. The tests below
114
- // catch stores of Smis and stores into young gen.
234
+ // catch stores of smis and stores into the young generation.
115
235
  Label done;
116
236
  JumpIfSmi(value, &done);
117
237
 
@@ -123,7 +243,7 @@ void MacroAssembler::RecordWrite(Register object,
123
243
  // clobbering done inside RecordWriteNonSmi but it's necessary to
124
244
  // avoid having the fast case for smis leave the registers
125
245
  // unchanged.
126
- if (FLAG_debug_code) {
246
+ if (emit_debug_code()) {
127
247
  movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
128
248
  movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
129
249
  movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
@@ -136,11 +256,11 @@ void MacroAssembler::RecordWrite(Register object,
136
256
  Register value) {
137
257
  // The compiled code assumes that record write doesn't change the
138
258
  // context register, so we check that none of the clobbered
139
- // registers are esi.
259
+ // registers are rsi.
140
260
  ASSERT(!object.is(rsi) && !value.is(rsi) && !address.is(rsi));
141
261
 
142
262
  // First, check if a write barrier is even needed. The tests below
143
- // catch stores of Smis and stores into young gen.
263
+ // catch stores of smis and stores into the young generation.
144
264
  Label done;
145
265
  JumpIfSmi(value, &done);
146
266
 
@@ -152,7 +272,7 @@ void MacroAssembler::RecordWrite(Register object,
152
272
 
153
273
  // Clobber all input registers when running with the debug-code flag
154
274
  // turned on to provoke errors.
155
- if (FLAG_debug_code) {
275
+ if (emit_debug_code()) {
156
276
  movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
157
277
  movq(address, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
158
278
  movq(value, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
@@ -166,7 +286,7 @@ void MacroAssembler::RecordWriteNonSmi(Register object,
166
286
  Register index) {
167
287
  Label done;
168
288
 
169
- if (FLAG_debug_code) {
289
+ if (emit_debug_code()) {
170
290
  NearLabel okay;
171
291
  JumpIfNotSmi(object, &okay);
172
292
  Abort("MacroAssembler::RecordWriteNonSmi cannot deal with smis");
@@ -210,7 +330,7 @@ void MacroAssembler::RecordWriteNonSmi(Register object,
210
330
 
211
331
  // Clobber all input registers when running with the debug-code flag
212
332
  // turned on to provoke errors.
213
- if (FLAG_debug_code) {
333
+ if (emit_debug_code()) {
214
334
  movq(object, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
215
335
  movq(scratch, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
216
336
  movq(index, BitCast<int64_t>(kZapValue), RelocInfo::NONE);
@@ -218,12 +338,12 @@ void MacroAssembler::RecordWriteNonSmi(Register object,
218
338
  }
219
339
 
220
340
  void MacroAssembler::Assert(Condition cc, const char* msg) {
221
- if (FLAG_debug_code) Check(cc, msg);
341
+ if (emit_debug_code()) Check(cc, msg);
222
342
  }
223
343
 
224
344
 
225
345
  void MacroAssembler::AssertFastElements(Register elements) {
226
- if (FLAG_debug_code) {
346
+ if (emit_debug_code()) {
227
347
  NearLabel ok;
228
348
  CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
229
349
  Heap::kFixedArrayMapRootIndex);
@@ -378,9 +498,9 @@ void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
378
498
 
379
499
 
380
500
  void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
381
- Runtime::Function* function = Runtime::FunctionForId(id);
501
+ const Runtime::Function* function = Runtime::FunctionForId(id);
382
502
  Set(rax, function->nargs);
383
- movq(rbx, ExternalReference(function));
503
+ LoadAddress(rbx, ExternalReference(function, isolate()));
384
504
  CEntryStub ces(1);
385
505
  ces.SaveDoubles();
386
506
  CallStub(&ces);
@@ -393,7 +513,8 @@ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::FunctionId id,
393
513
  }
394
514
 
395
515
 
396
- void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
516
+ void MacroAssembler::CallRuntime(const Runtime::Function* f,
517
+ int num_arguments) {
397
518
  // If the expected number of arguments of the runtime function is
398
519
  // constant, we check that the actual number of arguments match the
399
520
  // expectation.
@@ -407,19 +528,19 @@ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
407
528
  // should remove this need and make the runtime routine entry code
408
529
  // smarter.
409
530
  Set(rax, num_arguments);
410
- movq(rbx, ExternalReference(f));
531
+ LoadAddress(rbx, ExternalReference(f, isolate()));
411
532
  CEntryStub ces(f->result_size);
412
533
  CallStub(&ces);
413
534
  }
414
535
 
415
536
 
416
- MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
537
+ MaybeObject* MacroAssembler::TryCallRuntime(const Runtime::Function* f,
417
538
  int num_arguments) {
418
539
  if (f->nargs >= 0 && f->nargs != num_arguments) {
419
540
  IllegalOperation(num_arguments);
420
541
  // Since we did not call the stub, there was no allocation failure.
421
542
  // Return some non-failure object.
422
- return Heap::undefined_value();
543
+ return HEAP->undefined_value();
423
544
  }
424
545
 
425
546
  // TODO(1236192): Most runtime routines don't need the number of
@@ -427,7 +548,7 @@ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
427
548
  // should remove this need and make the runtime routine entry code
428
549
  // smarter.
429
550
  Set(rax, num_arguments);
430
- movq(rbx, ExternalReference(f));
551
+ LoadAddress(rbx, ExternalReference(f, isolate()));
431
552
  CEntryStub ces(f->result_size);
432
553
  return TryCallStub(&ces);
433
554
  }
@@ -436,7 +557,7 @@ MaybeObject* MacroAssembler::TryCallRuntime(Runtime::Function* f,
436
557
  void MacroAssembler::CallExternalReference(const ExternalReference& ext,
437
558
  int num_arguments) {
438
559
  Set(rax, num_arguments);
439
- movq(rbx, ext);
560
+ LoadAddress(rbx, ext);
440
561
 
441
562
  CEntryStub stub(1);
442
563
  CallStub(&stub);
@@ -483,14 +604,16 @@ MaybeObject* MacroAssembler::TryTailCallExternalReference(
483
604
  void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
484
605
  int num_arguments,
485
606
  int result_size) {
486
- TailCallExternalReference(ExternalReference(fid), num_arguments, result_size);
607
+ TailCallExternalReference(ExternalReference(fid, isolate()),
608
+ num_arguments,
609
+ result_size);
487
610
  }
488
611
 
489
612
 
490
613
  MaybeObject* MacroAssembler::TryTailCallRuntime(Runtime::FunctionId fid,
491
614
  int num_arguments,
492
615
  int result_size) {
493
- return TryTailCallExternalReference(ExternalReference(fid),
616
+ return TryTailCallExternalReference(ExternalReference(fid, isolate()),
494
617
  num_arguments,
495
618
  result_size);
496
619
  }
@@ -537,12 +660,12 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
537
660
  ExternalReference::handle_scope_level_address(),
538
661
  next_address);
539
662
  ExternalReference scheduled_exception_address =
540
- ExternalReference::scheduled_exception_address();
663
+ ExternalReference::scheduled_exception_address(isolate());
541
664
 
542
665
  // Allocate HandleScope in callee-save registers.
543
666
  Register prev_next_address_reg = r14;
544
667
  Register prev_limit_reg = rbx;
545
- Register base_reg = r12;
668
+ Register base_reg = r15;
546
669
  movq(base_reg, next_address);
547
670
  movq(prev_next_address_reg, Operand(base_reg, kNextOffset));
548
671
  movq(prev_limit_reg, Operand(base_reg, kLimitOffset));
@@ -574,7 +697,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
574
697
 
575
698
  // Check if the function scheduled an exception.
576
699
  movq(rsi, scheduled_exception_address);
577
- Cmp(Operand(rsi, 0), Factory::the_hole_value());
700
+ Cmp(Operand(rsi, 0), FACTORY->the_hole_value());
578
701
  j(not_equal, &promote_scheduled_exception);
579
702
 
580
703
  LeaveApiExitFrame();
@@ -589,14 +712,20 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
589
712
 
590
713
  bind(&empty_result);
591
714
  // It was zero; the result is undefined.
592
- Move(rax, Factory::undefined_value());
715
+ Move(rax, FACTORY->undefined_value());
593
716
  jmp(&prologue);
594
717
 
595
718
  // HandleScope limit has changed. Delete allocated extensions.
596
719
  bind(&delete_allocated_handles);
597
720
  movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
598
721
  movq(prev_limit_reg, rax);
599
- movq(rax, ExternalReference::delete_handle_scope_extensions());
722
+ #ifdef _WIN64
723
+ LoadAddress(rcx, ExternalReference::isolate_address());
724
+ #else
725
+ LoadAddress(rdi, ExternalReference::isolate_address());
726
+ #endif
727
+ LoadAddress(rax,
728
+ ExternalReference::delete_handle_scope_extensions(isolate()));
600
729
  call(rax);
601
730
  movq(rax, prev_limit_reg);
602
731
  jmp(&leave_exit_frame);
@@ -608,7 +737,7 @@ MaybeObject* MacroAssembler::TryCallApiFunctionAndReturn(
608
737
  void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
609
738
  int result_size) {
610
739
  // Set the entry point and jump to the C entry runtime stub.
611
- movq(rbx, ext);
740
+ LoadAddress(rbx, ext);
612
741
  CEntryStub ces(result_size);
613
742
  jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
614
743
  }
@@ -617,7 +746,7 @@ void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
617
746
  MaybeObject* MacroAssembler::TryJumpToExternalReference(
618
747
  const ExternalReference& ext, int result_size) {
619
748
  // Set the entry point and jump to the C entry runtime stub.
620
- movq(rbx, ext);
749
+ LoadAddress(rbx, ext);
621
750
  CEntryStub ces(result_size);
622
751
  return TryTailCallStub(&ces);
623
752
  }
@@ -625,7 +754,7 @@ MaybeObject* MacroAssembler::TryJumpToExternalReference(
625
754
 
626
755
  void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
627
756
  InvokeFlag flag,
628
- PostCallGenerator* post_call_generator) {
757
+ CallWrapper* call_wrapper) {
629
758
  // Calls are not allowed in some stubs.
630
759
  ASSERT(flag == JUMP_FUNCTION || allow_stub_calls());
631
760
 
@@ -634,7 +763,7 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
634
763
  // parameter count to avoid emitting code to do the check.
635
764
  ParameterCount expected(0);
636
765
  GetBuiltinEntry(rdx, id);
637
- InvokeCode(rdx, expected, expected, flag, post_call_generator);
766
+ InvokeCode(rdx, expected, expected, flag, call_wrapper);
638
767
  }
639
768
 
640
769
 
@@ -694,7 +823,7 @@ Register MacroAssembler::GetSmiConstant(Smi* source) {
694
823
  }
695
824
 
696
825
  void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
697
- if (FLAG_debug_code) {
826
+ if (emit_debug_code()) {
698
827
  movq(dst,
699
828
  reinterpret_cast<uint64_t>(Smi::FromInt(kSmiConstantRegisterValue)),
700
829
  RelocInfo::NONE);
@@ -708,11 +837,11 @@ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
708
837
  bind(&ok);
709
838
  }
710
839
  }
711
- if (source->value() == 0) {
840
+ int value = source->value();
841
+ if (value == 0) {
712
842
  xorl(dst, dst);
713
843
  return;
714
844
  }
715
- int value = source->value();
716
845
  bool negative = value < 0;
717
846
  unsigned int uvalue = negative ? -value : value;
718
847
 
@@ -763,7 +892,7 @@ void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
763
892
 
764
893
 
765
894
  void MacroAssembler::Integer32ToSmiField(const Operand& dst, Register src) {
766
- if (FLAG_debug_code) {
895
+ if (emit_debug_code()) {
767
896
  testb(dst, Immediate(0x01));
768
897
  NearLabel ok;
769
898
  j(zero, &ok);
@@ -783,9 +912,9 @@ void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
783
912
  Register src,
784
913
  int constant) {
785
914
  if (dst.is(src)) {
786
- addq(dst, Immediate(constant));
915
+ addl(dst, Immediate(constant));
787
916
  } else {
788
- lea(dst, Operand(src, constant));
917
+ leal(dst, Operand(src, constant));
789
918
  }
790
919
  shl(dst, Immediate(kSmiShift));
791
920
  }
@@ -824,12 +953,24 @@ void MacroAssembler::SmiTest(Register src) {
824
953
  }
825
954
 
826
955
 
827
- void MacroAssembler::SmiCompare(Register dst, Register src) {
828
- cmpq(dst, src);
956
+ void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
957
+ if (emit_debug_code()) {
958
+ AbortIfNotSmi(smi1);
959
+ AbortIfNotSmi(smi2);
960
+ }
961
+ cmpq(smi1, smi2);
829
962
  }
830
963
 
831
964
 
832
965
  void MacroAssembler::SmiCompare(Register dst, Smi* src) {
966
+ if (emit_debug_code()) {
967
+ AbortIfNotSmi(dst);
968
+ }
969
+ Cmp(dst, src);
970
+ }
971
+
972
+
973
+ void MacroAssembler::Cmp(Register dst, Smi* src) {
833
974
  ASSERT(!dst.is(kScratchRegister));
834
975
  if (src->value() == 0) {
835
976
  testq(dst, dst);
@@ -841,20 +982,39 @@ void MacroAssembler::SmiCompare(Register dst, Smi* src) {
841
982
 
842
983
 
843
984
  void MacroAssembler::SmiCompare(Register dst, const Operand& src) {
985
+ if (emit_debug_code()) {
986
+ AbortIfNotSmi(dst);
987
+ AbortIfNotSmi(src);
988
+ }
844
989
  cmpq(dst, src);
845
990
  }
846
991
 
847
992
 
848
993
  void MacroAssembler::SmiCompare(const Operand& dst, Register src) {
994
+ if (emit_debug_code()) {
995
+ AbortIfNotSmi(dst);
996
+ AbortIfNotSmi(src);
997
+ }
849
998
  cmpq(dst, src);
850
999
  }
851
1000
 
852
1001
 
853
1002
  void MacroAssembler::SmiCompare(const Operand& dst, Smi* src) {
1003
+ if (emit_debug_code()) {
1004
+ AbortIfNotSmi(dst);
1005
+ }
854
1006
  cmpl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(src->value()));
855
1007
  }
856
1008
 
857
1009
 
1010
+ void MacroAssembler::Cmp(const Operand& dst, Smi* src) {
1011
+ // The Operand cannot use the smi register.
1012
+ Register smi_reg = GetSmiConstant(src);
1013
+ ASSERT(!dst.AddressUsesRegister(smi_reg));
1014
+ cmpq(dst, smi_reg);
1015
+ }
1016
+
1017
+
858
1018
  void MacroAssembler::SmiCompareInteger32(const Operand& dst, Register src) {
859
1019
  cmpl(Operand(dst, kSmiShift / kBitsPerByte), src);
860
1020
  }
@@ -908,7 +1068,7 @@ Condition MacroAssembler::CheckSmi(const Operand& src) {
908
1068
 
909
1069
  Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
910
1070
  ASSERT_EQ(0, kSmiTag);
911
- // Make mask 0x8000000000000001 and test that both bits are zero.
1071
+ // Test that both bits of the mask 0x8000000000000001 are zero.
912
1072
  movq(kScratchRegister, src);
913
1073
  rol(kScratchRegister, Immediate(1));
914
1074
  testb(kScratchRegister, Immediate(3));
@@ -1088,12 +1248,10 @@ void MacroAssembler::SmiAdd(Register dst,
1088
1248
  // No overflow checking. Use only when it's known that
1089
1249
  // overflowing is impossible.
1090
1250
  ASSERT(!dst.is(src2));
1091
- if (dst.is(src1)) {
1092
- addq(dst, src2);
1093
- } else {
1251
+ if (!dst.is(src1)) {
1094
1252
  movq(dst, src1);
1095
- addq(dst, src2);
1096
1253
  }
1254
+ addq(dst, src2);
1097
1255
  Assert(no_overflow, "Smi addition overflow");
1098
1256
  }
1099
1257
 
@@ -1102,12 +1260,10 @@ void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1102
1260
  // No overflow checking. Use only when it's known that
1103
1261
  // overflowing is impossible (e.g., subtracting two positive smis).
1104
1262
  ASSERT(!dst.is(src2));
1105
- if (dst.is(src1)) {
1106
- subq(dst, src2);
1107
- } else {
1263
+ if (!dst.is(src1)) {
1108
1264
  movq(dst, src1);
1109
- subq(dst, src2);
1110
1265
  }
1266
+ subq(dst, src2);
1111
1267
  Assert(no_overflow, "Smi subtraction overflow");
1112
1268
  }
1113
1269
 
@@ -1117,12 +1273,10 @@ void MacroAssembler::SmiSub(Register dst,
1117
1273
  const Operand& src2) {
1118
1274
  // No overflow checking. Use only when it's known that
1119
1275
  // overflowing is impossible (e.g., subtracting two positive smis).
1120
- if (dst.is(src1)) {
1121
- subq(dst, src2);
1122
- } else {
1276
+ if (!dst.is(src1)) {
1123
1277
  movq(dst, src1);
1124
- subq(dst, src2);
1125
1278
  }
1279
+ subq(dst, src2);
1126
1280
  Assert(no_overflow, "Smi subtraction overflow");
1127
1281
  }
1128
1282
 
@@ -1309,6 +1463,13 @@ SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
1309
1463
  }
1310
1464
 
1311
1465
 
1466
+ void MacroAssembler::AddSmiField(Register dst, const Operand& src) {
1467
+ ASSERT_EQ(0, kSmiShift % kBitsPerByte);
1468
+ addl(dst, Operand(src, kSmiShift / kBitsPerByte));
1469
+ }
1470
+
1471
+
1472
+
1312
1473
  void MacroAssembler::Move(Register dst, Register src) {
1313
1474
  if (!dst.is(src)) {
1314
1475
  movq(dst, src);
@@ -1339,7 +1500,7 @@ void MacroAssembler::Move(const Operand& dst, Handle<Object> source) {
1339
1500
 
1340
1501
  void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1341
1502
  if (source->IsSmi()) {
1342
- SmiCompare(dst, Smi::cast(*source));
1503
+ Cmp(dst, Smi::cast(*source));
1343
1504
  } else {
1344
1505
  Move(kScratchRegister, source);
1345
1506
  cmpq(dst, kScratchRegister);
@@ -1349,7 +1510,7 @@ void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
1349
1510
 
1350
1511
  void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) {
1351
1512
  if (source->IsSmi()) {
1352
- SmiCompare(dst, Smi::cast(*source));
1513
+ Cmp(dst, Smi::cast(*source));
1353
1514
  } else {
1354
1515
  ASSERT(source->IsHeapObject());
1355
1516
  movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT);
@@ -1393,7 +1554,7 @@ void MacroAssembler::Test(const Operand& src, Smi* source) {
1393
1554
 
1394
1555
 
1395
1556
  void MacroAssembler::Jump(ExternalReference ext) {
1396
- movq(kScratchRegister, ext);
1557
+ LoadAddress(kScratchRegister, ext);
1397
1558
  jmp(kScratchRegister);
1398
1559
  }
1399
1560
 
@@ -1410,21 +1571,46 @@ void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
1410
1571
  }
1411
1572
 
1412
1573
 
1574
+ int MacroAssembler::CallSize(ExternalReference ext) {
1575
+ // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
1576
+ const int kCallInstructionSize = 3;
1577
+ return LoadAddressSize(ext) + kCallInstructionSize;
1578
+ }
1579
+
1580
+
1413
1581
  void MacroAssembler::Call(ExternalReference ext) {
1414
- movq(kScratchRegister, ext);
1582
+ #ifdef DEBUG
1583
+ int end_position = pc_offset() + CallSize(ext);
1584
+ #endif
1585
+ LoadAddress(kScratchRegister, ext);
1415
1586
  call(kScratchRegister);
1587
+ #ifdef DEBUG
1588
+ CHECK_EQ(end_position, pc_offset());
1589
+ #endif
1416
1590
  }
1417
1591
 
1418
1592
 
1419
1593
  void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
1594
+ #ifdef DEBUG
1595
+ int end_position = pc_offset() + CallSize(destination, rmode);
1596
+ #endif
1420
1597
  movq(kScratchRegister, destination, rmode);
1421
1598
  call(kScratchRegister);
1599
+ #ifdef DEBUG
1600
+ CHECK_EQ(pc_offset(), end_position);
1601
+ #endif
1422
1602
  }
1423
1603
 
1424
1604
 
1425
1605
  void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
1606
+ #ifdef DEBUG
1607
+ int end_position = pc_offset() + CallSize(code_object);
1608
+ #endif
1426
1609
  ASSERT(RelocInfo::IsCodeTarget(rmode));
1427
1610
  call(code_object, rmode);
1611
+ #ifdef DEBUG
1612
+ CHECK_EQ(end_position, pc_offset());
1613
+ #endif
1428
1614
  }
1429
1615
 
1430
1616
 
@@ -1440,10 +1626,10 @@ void MacroAssembler::Pushad() {
1440
1626
  push(r9);
1441
1627
  // r10 is kScratchRegister.
1442
1628
  push(r11);
1443
- push(r12);
1629
+ // r12 is kSmiConstantRegister.
1444
1630
  // r13 is kRootRegister.
1445
1631
  push(r14);
1446
- // r15 is kSmiConstantRegister
1632
+ push(r15);
1447
1633
  STATIC_ASSERT(11 == kNumSafepointSavedRegisters);
1448
1634
  // Use lea for symmetry with Popad.
1449
1635
  int sp_delta =
@@ -1457,8 +1643,8 @@ void MacroAssembler::Popad() {
1457
1643
  int sp_delta =
1458
1644
  (kNumSafepointRegisters - kNumSafepointSavedRegisters) * kPointerSize;
1459
1645
  lea(rsp, Operand(rsp, sp_delta));
1646
+ pop(r15);
1460
1647
  pop(r14);
1461
- pop(r12);
1462
1648
  pop(r11);
1463
1649
  pop(r9);
1464
1650
  pop(r8);
@@ -1477,7 +1663,7 @@ void MacroAssembler::Dropad() {
1477
1663
 
1478
1664
 
1479
1665
  // Order general registers are pushed by Pushad:
1480
- // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r12, r14.
1666
+ // rax, rcx, rdx, rbx, rsi, rdi, r8, r9, r11, r14, r15.
1481
1667
  int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1482
1668
  0,
1483
1669
  1,
@@ -1491,10 +1677,10 @@ int MacroAssembler::kSafepointPushRegisterIndices[Register::kNumRegisters] = {
1491
1677
  7,
1492
1678
  -1,
1493
1679
  8,
1494
- 9,
1495
1680
  -1,
1496
- 10,
1497
- -1
1681
+ -1,
1682
+ 9,
1683
+ 10
1498
1684
  };
1499
1685
 
1500
1686
 
@@ -1503,6 +1689,11 @@ void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
1503
1689
  }
1504
1690
 
1505
1691
 
1692
+ void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
1693
+ movq(dst, SafepointRegisterSlot(src));
1694
+ }
1695
+
1696
+
1506
1697
  Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
1507
1698
  return Operand(rsp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
1508
1699
  }
@@ -1539,18 +1730,20 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
1539
1730
  push(Immediate(0)); // NULL frame pointer.
1540
1731
  }
1541
1732
  // Save the current handler.
1542
- movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1543
- push(Operand(kScratchRegister, 0));
1733
+ Operand handler_operand =
1734
+ ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1735
+ push(handler_operand);
1544
1736
  // Link this handler.
1545
- movq(Operand(kScratchRegister, 0), rsp);
1737
+ movq(handler_operand, rsp);
1546
1738
  }
1547
1739
 
1548
1740
 
1549
1741
  void MacroAssembler::PopTryHandler() {
1550
1742
  ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
1551
1743
  // Unlink this handler.
1552
- movq(kScratchRegister, ExternalReference(Top::k_handler_address));
1553
- pop(Operand(kScratchRegister, 0));
1744
+ Operand handler_operand =
1745
+ ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
1746
+ pop(handler_operand);
1554
1747
  // Remove the remaining fields.
1555
1748
  addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1556
1749
  }
@@ -1568,12 +1761,11 @@ void MacroAssembler::Throw(Register value) {
1568
1761
  movq(rax, value);
1569
1762
  }
1570
1763
 
1571
- ExternalReference handler_address(Top::k_handler_address);
1572
- movq(kScratchRegister, handler_address);
1573
- movq(rsp, Operand(kScratchRegister, 0));
1764
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
1765
+ Operand handler_operand = ExternalOperand(handler_address);
1766
+ movq(rsp, handler_operand);
1574
1767
  // get next in chain
1575
- pop(rcx);
1576
- movq(Operand(kScratchRegister, 0), rcx);
1768
+ pop(handler_operand);
1577
1769
  pop(rbp); // pop frame pointer
1578
1770
  pop(rdx); // remove state
1579
1771
 
@@ -1596,9 +1788,8 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1596
1788
  movq(rax, value);
1597
1789
  }
1598
1790
  // Fetch top stack handler.
1599
- ExternalReference handler_address(Top::k_handler_address);
1600
- movq(kScratchRegister, handler_address);
1601
- movq(rsp, Operand(kScratchRegister, 0));
1791
+ ExternalReference handler_address(Isolate::k_handler_address, isolate());
1792
+ Load(rsp, handler_address);
1602
1793
 
1603
1794
  // Unwind the handlers until the ENTRY handler is found.
1604
1795
  NearLabel loop, done;
@@ -1614,19 +1805,21 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1614
1805
  bind(&done);
1615
1806
 
1616
1807
  // Set the top handler address to next handler past the current ENTRY handler.
1617
- movq(kScratchRegister, handler_address);
1618
- pop(Operand(kScratchRegister, 0));
1808
+ Operand handler_operand = ExternalOperand(handler_address);
1809
+ pop(handler_operand);
1619
1810
 
1620
1811
  if (type == OUT_OF_MEMORY) {
1621
1812
  // Set external caught exception to false.
1622
- ExternalReference external_caught(Top::k_external_caught_exception_address);
1813
+ ExternalReference external_caught(
1814
+ Isolate::k_external_caught_exception_address, isolate());
1623
1815
  movq(rax, Immediate(false));
1624
- store_rax(external_caught);
1816
+ Store(external_caught, rax);
1625
1817
 
1626
1818
  // Set pending exception and rax to out of memory exception.
1627
- ExternalReference pending_exception(Top::k_pending_exception_address);
1819
+ ExternalReference pending_exception(Isolate::k_pending_exception_address,
1820
+ isolate());
1628
1821
  movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
1629
- store_rax(pending_exception);
1822
+ Store(pending_exception, rax);
1630
1823
  }
1631
1824
 
1632
1825
  // Clear the context pointer.
@@ -1634,14 +1827,14 @@ void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
1634
1827
 
1635
1828
  // Restore registers from handler.
1636
1829
  STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
1637
- StackHandlerConstants::kFPOffset);
1830
+ StackHandlerConstants::kFPOffset);
1638
1831
  pop(rbp); // FP
1639
1832
  STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
1640
- StackHandlerConstants::kStateOffset);
1833
+ StackHandlerConstants::kStateOffset);
1641
1834
  pop(rdx); // State
1642
1835
 
1643
1836
  STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
1644
- StackHandlerConstants::kPCOffset);
1837
+ StackHandlerConstants::kPCOffset);
1645
1838
  ret(0);
1646
1839
  }
1647
1840
 
@@ -1700,7 +1893,7 @@ void MacroAssembler::AbortIfNotNumber(Register object) {
1700
1893
  Condition is_smi = CheckSmi(object);
1701
1894
  j(is_smi, &ok);
1702
1895
  Cmp(FieldOperand(object, HeapObject::kMapOffset),
1703
- Factory::heap_number_map());
1896
+ FACTORY->heap_number_map());
1704
1897
  Assert(equal, "Operand not a number");
1705
1898
  bind(&ok);
1706
1899
  }
@@ -1714,7 +1907,12 @@ void MacroAssembler::AbortIfSmi(Register object) {
1714
1907
 
1715
1908
 
1716
1909
  void MacroAssembler::AbortIfNotSmi(Register object) {
1717
- NearLabel ok;
1910
+ Condition is_smi = CheckSmi(object);
1911
+ Assert(is_smi, "Operand is not a smi");
1912
+ }
1913
+
1914
+
1915
+ void MacroAssembler::AbortIfNotSmi(const Operand& object) {
1718
1916
  Condition is_smi = CheckSmi(object);
1719
1917
  Assert(is_smi, "Operand is not a smi");
1720
1918
  }
@@ -1801,8 +1999,8 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
1801
1999
 
1802
2000
  void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1803
2001
  if (FLAG_native_code_counters && counter->Enabled()) {
1804
- movq(kScratchRegister, ExternalReference(counter));
1805
- movl(Operand(kScratchRegister, 0), Immediate(value));
2002
+ Operand counter_operand = ExternalOperand(ExternalReference(counter));
2003
+ movq(counter_operand, Immediate(value));
1806
2004
  }
1807
2005
  }
1808
2006
 
@@ -1810,12 +2008,11 @@ void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
1810
2008
  void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1811
2009
  ASSERT(value > 0);
1812
2010
  if (FLAG_native_code_counters && counter->Enabled()) {
1813
- movq(kScratchRegister, ExternalReference(counter));
1814
- Operand operand(kScratchRegister, 0);
2011
+ Operand counter_operand = ExternalOperand(ExternalReference(counter));
1815
2012
  if (value == 1) {
1816
- incl(operand);
2013
+ incl(counter_operand);
1817
2014
  } else {
1818
- addl(operand, Immediate(value));
2015
+ addl(counter_operand, Immediate(value));
1819
2016
  }
1820
2017
  }
1821
2018
  }
@@ -1824,12 +2021,11 @@ void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
1824
2021
  void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1825
2022
  ASSERT(value > 0);
1826
2023
  if (FLAG_native_code_counters && counter->Enabled()) {
1827
- movq(kScratchRegister, ExternalReference(counter));
1828
- Operand operand(kScratchRegister, 0);
2024
+ Operand counter_operand = ExternalOperand(ExternalReference(counter));
1829
2025
  if (value == 1) {
1830
- decl(operand);
2026
+ decl(counter_operand);
1831
2027
  } else {
1832
- subl(operand, Immediate(value));
2028
+ subl(counter_operand, Immediate(value));
1833
2029
  }
1834
2030
  }
1835
2031
  }
@@ -1839,7 +2035,7 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
1839
2035
  void MacroAssembler::DebugBreak() {
1840
2036
  ASSERT(allow_stub_calls());
1841
2037
  Set(rax, 0); // No arguments.
1842
- movq(rbx, ExternalReference(Runtime::kDebugBreak));
2038
+ LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
1843
2039
  CEntryStub ces(1);
1844
2040
  Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
1845
2041
  }
@@ -1850,7 +2046,7 @@ void MacroAssembler::InvokeCode(Register code,
1850
2046
  const ParameterCount& expected,
1851
2047
  const ParameterCount& actual,
1852
2048
  InvokeFlag flag,
1853
- PostCallGenerator* post_call_generator) {
2049
+ CallWrapper* call_wrapper) {
1854
2050
  NearLabel done;
1855
2051
  InvokePrologue(expected,
1856
2052
  actual,
@@ -1858,10 +2054,11 @@ void MacroAssembler::InvokeCode(Register code,
1858
2054
  code,
1859
2055
  &done,
1860
2056
  flag,
1861
- post_call_generator);
2057
+ call_wrapper);
1862
2058
  if (flag == CALL_FUNCTION) {
2059
+ if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
1863
2060
  call(code);
1864
- if (post_call_generator != NULL) post_call_generator->Generate();
2061
+ if (call_wrapper != NULL) call_wrapper->AfterCall();
1865
2062
  } else {
1866
2063
  ASSERT(flag == JUMP_FUNCTION);
1867
2064
  jmp(code);
@@ -1875,7 +2072,7 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
1875
2072
  const ParameterCount& actual,
1876
2073
  RelocInfo::Mode rmode,
1877
2074
  InvokeFlag flag,
1878
- PostCallGenerator* post_call_generator) {
2075
+ CallWrapper* call_wrapper) {
1879
2076
  NearLabel done;
1880
2077
  Register dummy = rax;
1881
2078
  InvokePrologue(expected,
@@ -1884,10 +2081,11 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
1884
2081
  dummy,
1885
2082
  &done,
1886
2083
  flag,
1887
- post_call_generator);
2084
+ call_wrapper);
1888
2085
  if (flag == CALL_FUNCTION) {
2086
+ if (call_wrapper != NULL) call_wrapper->BeforeCall(CallSize(code));
1889
2087
  Call(code, rmode);
1890
- if (post_call_generator != NULL) post_call_generator->Generate();
2088
+ if (call_wrapper != NULL) call_wrapper->AfterCall();
1891
2089
  } else {
1892
2090
  ASSERT(flag == JUMP_FUNCTION);
1893
2091
  Jump(code, rmode);
@@ -1899,7 +2097,7 @@ void MacroAssembler::InvokeCode(Handle<Code> code,
1899
2097
  void MacroAssembler::InvokeFunction(Register function,
1900
2098
  const ParameterCount& actual,
1901
2099
  InvokeFlag flag,
1902
- PostCallGenerator* post_call_generator) {
2100
+ CallWrapper* call_wrapper) {
1903
2101
  ASSERT(function.is(rdi));
1904
2102
  movq(rdx, FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1905
2103
  movq(rsi, FieldOperand(function, JSFunction::kContextOffset));
@@ -1910,14 +2108,14 @@ void MacroAssembler::InvokeFunction(Register function,
1910
2108
  movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1911
2109
 
1912
2110
  ParameterCount expected(rbx);
1913
- InvokeCode(rdx, expected, actual, flag, post_call_generator);
2111
+ InvokeCode(rdx, expected, actual, flag, call_wrapper);
1914
2112
  }
1915
2113
 
1916
2114
 
1917
2115
  void MacroAssembler::InvokeFunction(JSFunction* function,
1918
2116
  const ParameterCount& actual,
1919
2117
  InvokeFlag flag,
1920
- PostCallGenerator* post_call_generator) {
2118
+ CallWrapper* call_wrapper) {
1921
2119
  ASSERT(function->is_compiled());
1922
2120
  // Get the function and setup the context.
1923
2121
  Move(rdi, Handle<JSFunction>(function));
@@ -1928,7 +2126,7 @@ void MacroAssembler::InvokeFunction(JSFunction* function,
1928
2126
  // the Code object every time we call the function.
1929
2127
  movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
1930
2128
  ParameterCount expected(function->shared()->formal_parameter_count());
1931
- InvokeCode(rdx, expected, actual, flag, post_call_generator);
2129
+ InvokeCode(rdx, expected, actual, flag, call_wrapper);
1932
2130
  } else {
1933
2131
  // Invoke the cached code.
1934
2132
  Handle<Code> code(function->code());
@@ -1938,7 +2136,7 @@ void MacroAssembler::InvokeFunction(JSFunction* function,
1938
2136
  actual,
1939
2137
  RelocInfo::CODE_TARGET,
1940
2138
  flag,
1941
- post_call_generator);
2139
+ call_wrapper);
1942
2140
  }
1943
2141
  }
1944
2142
 
@@ -1950,9 +2148,9 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
1950
2148
  Push(Smi::FromInt(type));
1951
2149
  movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT);
1952
2150
  push(kScratchRegister);
1953
- if (FLAG_debug_code) {
2151
+ if (emit_debug_code()) {
1954
2152
  movq(kScratchRegister,
1955
- Factory::undefined_value(),
2153
+ FACTORY->undefined_value(),
1956
2154
  RelocInfo::EMBEDDED_OBJECT);
1957
2155
  cmpq(Operand(rsp, 0), kScratchRegister);
1958
2156
  Check(not_equal, "code object not properly patched");
@@ -1961,7 +2159,7 @@ void MacroAssembler::EnterFrame(StackFrame::Type type) {
1961
2159
 
1962
2160
 
1963
2161
  void MacroAssembler::LeaveFrame(StackFrame::Type type) {
1964
- if (FLAG_debug_code) {
2162
+ if (emit_debug_code()) {
1965
2163
  Move(kScratchRegister, Smi::FromInt(type));
1966
2164
  cmpq(Operand(rbp, StandardFrameConstants::kMarkerOffset), kScratchRegister);
1967
2165
  Check(equal, "stack frame types must match");
@@ -1987,16 +2185,12 @@ void MacroAssembler::EnterExitFramePrologue(bool save_rax) {
1987
2185
  push(kScratchRegister); // Accessed from EditFrame::code_slot.
1988
2186
 
1989
2187
  // Save the frame pointer and the context in top.
1990
- ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
1991
- ExternalReference context_address(Top::k_context_address);
1992
2188
  if (save_rax) {
1993
- movq(r14, rax); // Backup rax before we use it.
2189
+ movq(r14, rax); // Backup rax in callee-save register.
1994
2190
  }
1995
2191
 
1996
- movq(rax, rbp);
1997
- store_rax(c_entry_fp_address);
1998
- movq(rax, rsi);
1999
- store_rax(context_address);
2192
+ Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
2193
+ Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
2000
2194
  }
2001
2195
 
2002
2196
 
@@ -2022,7 +2216,7 @@ void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2022
2216
  }
2023
2217
 
2024
2218
  // Get the required frame alignment for the OS.
2025
- static const int kFrameAlignment = OS::ActivationFrameAlignment();
2219
+ const int kFrameAlignment = OS::ActivationFrameAlignment();
2026
2220
  if (kFrameAlignment > 0) {
2027
2221
  ASSERT(IsPowerOf2(kFrameAlignment));
2028
2222
  movq(kScratchRegister, Immediate(-kFrameAlignment));
@@ -2037,10 +2231,10 @@ void MacroAssembler::EnterExitFrameEpilogue(int arg_stack_space,
2037
2231
  void MacroAssembler::EnterExitFrame(int arg_stack_space, bool save_doubles) {
2038
2232
  EnterExitFramePrologue(true);
2039
2233
 
2040
- // Setup argv in callee-saved register r12. It is reused in LeaveExitFrame,
2234
+ // Setup argv in callee-saved register r15. It is reused in LeaveExitFrame,
2041
2235
  // so it must be retained across the C-call.
2042
2236
  int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
2043
- lea(r12, Operand(rbp, r14, times_pointer_size, offset));
2237
+ lea(r15, Operand(rbp, r14, times_pointer_size, offset));
2044
2238
 
2045
2239
  EnterExitFrameEpilogue(arg_stack_space, save_doubles);
2046
2240
  }
@@ -2054,7 +2248,7 @@ void MacroAssembler::EnterApiExitFrame(int arg_stack_space) {
2054
2248
 
2055
2249
  void MacroAssembler::LeaveExitFrame(bool save_doubles) {
2056
2250
  // Registers:
2057
- // r12 : argv
2251
+ // r15 : argv
2058
2252
  if (save_doubles) {
2059
2253
  int offset = -2 * kPointerSize;
2060
2254
  for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
@@ -2068,7 +2262,7 @@ void MacroAssembler::LeaveExitFrame(bool save_doubles) {
2068
2262
 
2069
2263
  // Drop everything up to and including the arguments and the receiver
2070
2264
  // from the caller stack.
2071
- lea(rsp, Operand(r12, 1 * kPointerSize));
2265
+ lea(rsp, Operand(r15, 1 * kPointerSize));
2072
2266
 
2073
2267
  // Push the return address to get ready to return.
2074
2268
  push(rcx);
@@ -2087,17 +2281,18 @@ void MacroAssembler::LeaveApiExitFrame() {
2087
2281
 
2088
2282
  void MacroAssembler::LeaveExitFrameEpilogue() {
2089
2283
  // Restore current context from top and clear it in debug mode.
2090
- ExternalReference context_address(Top::k_context_address);
2091
- movq(kScratchRegister, context_address);
2092
- movq(rsi, Operand(kScratchRegister, 0));
2284
+ ExternalReference context_address(Isolate::k_context_address, isolate());
2285
+ Operand context_operand = ExternalOperand(context_address);
2286
+ movq(rsi, context_operand);
2093
2287
  #ifdef DEBUG
2094
- movq(Operand(kScratchRegister, 0), Immediate(0));
2288
+ movq(context_operand, Immediate(0));
2095
2289
  #endif
2096
2290
 
2097
2291
  // Clear the top frame.
2098
- ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address);
2099
- movq(kScratchRegister, c_entry_fp_address);
2100
- movq(Operand(kScratchRegister, 0), Immediate(0));
2292
+ ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
2293
+ isolate());
2294
+ Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
2295
+ movq(c_entry_fp_operand, Immediate(0));
2101
2296
  }
2102
2297
 
2103
2298
 
@@ -2112,7 +2307,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2112
2307
  movq(scratch, Operand(rbp, StandardFrameConstants::kContextOffset));
2113
2308
 
2114
2309
  // When generating debug code, make sure the lexical context is set.
2115
- if (FLAG_debug_code) {
2310
+ if (emit_debug_code()) {
2116
2311
  cmpq(scratch, Immediate(0));
2117
2312
  Check(not_equal, "we should not have an empty lexical context");
2118
2313
  }
@@ -2122,9 +2317,9 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2122
2317
  movq(scratch, FieldOperand(scratch, GlobalObject::kGlobalContextOffset));
2123
2318
 
2124
2319
  // Check the context is a global context.
2125
- if (FLAG_debug_code) {
2320
+ if (emit_debug_code()) {
2126
2321
  Cmp(FieldOperand(scratch, HeapObject::kMapOffset),
2127
- Factory::global_context_map());
2322
+ FACTORY->global_context_map());
2128
2323
  Check(equal, "JSGlobalObject::global_context should be a global context.");
2129
2324
  }
2130
2325
 
@@ -2138,7 +2333,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
2138
2333
  // object.
2139
2334
 
2140
2335
  // Check the context is a global context.
2141
- if (FLAG_debug_code) {
2336
+ if (emit_debug_code()) {
2142
2337
  // Preserve original value of holder_reg.
2143
2338
  push(holder_reg);
2144
2339
  movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
@@ -2168,7 +2363,7 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
2168
2363
  Register scratch,
2169
2364
  AllocationFlags flags) {
2170
2365
  ExternalReference new_space_allocation_top =
2171
- ExternalReference::new_space_allocation_top_address();
2366
+ ExternalReference::new_space_allocation_top_address(isolate());
2172
2367
 
2173
2368
  // Just return if allocation top is already known.
2174
2369
  if ((flags & RESULT_CONTAINS_TOP) != 0) {
@@ -2176,8 +2371,8 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
2176
2371
  ASSERT(!scratch.is_valid());
2177
2372
  #ifdef DEBUG
2178
2373
  // Assert that result actually contains top on entry.
2179
- movq(kScratchRegister, new_space_allocation_top);
2180
- cmpq(result, Operand(kScratchRegister, 0));
2374
+ Operand top_operand = ExternalOperand(new_space_allocation_top);
2375
+ cmpq(result, top_operand);
2181
2376
  Check(equal, "Unexpected allocation top");
2182
2377
  #endif
2183
2378
  return;
@@ -2186,39 +2381,30 @@ void MacroAssembler::LoadAllocationTopHelper(Register result,
2186
2381
  // Move address of new object to result. Use scratch register if available,
2187
2382
  // and keep address in scratch until call to UpdateAllocationTopHelper.
2188
2383
  if (scratch.is_valid()) {
2189
- movq(scratch, new_space_allocation_top);
2384
+ LoadAddress(scratch, new_space_allocation_top);
2190
2385
  movq(result, Operand(scratch, 0));
2191
- } else if (result.is(rax)) {
2192
- load_rax(new_space_allocation_top);
2193
2386
  } else {
2194
- movq(kScratchRegister, new_space_allocation_top);
2195
- movq(result, Operand(kScratchRegister, 0));
2387
+ Load(result, new_space_allocation_top);
2196
2388
  }
2197
2389
  }
2198
2390
 
2199
2391
 
2200
2392
  void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
2201
2393
  Register scratch) {
2202
- if (FLAG_debug_code) {
2394
+ if (emit_debug_code()) {
2203
2395
  testq(result_end, Immediate(kObjectAlignmentMask));
2204
2396
  Check(zero, "Unaligned allocation in new space");
2205
2397
  }
2206
2398
 
2207
2399
  ExternalReference new_space_allocation_top =
2208
- ExternalReference::new_space_allocation_top_address();
2400
+ ExternalReference::new_space_allocation_top_address(isolate());
2209
2401
 
2210
2402
  // Update new top.
2211
- if (result_end.is(rax)) {
2212
- // rax can be stored directly to a memory location.
2213
- store_rax(new_space_allocation_top);
2403
+ if (scratch.is_valid()) {
2404
+ // Scratch already contains address of allocation top.
2405
+ movq(Operand(scratch, 0), result_end);
2214
2406
  } else {
2215
- // Register required - use scratch provided if available.
2216
- if (scratch.is_valid()) {
2217
- movq(Operand(scratch, 0), result_end);
2218
- } else {
2219
- movq(kScratchRegister, new_space_allocation_top);
2220
- movq(Operand(kScratchRegister, 0), result_end);
2221
- }
2407
+ Store(new_space_allocation_top, result_end);
2222
2408
  }
2223
2409
  }
2224
2410
 
@@ -2230,7 +2416,7 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
2230
2416
  Label* gc_required,
2231
2417
  AllocationFlags flags) {
2232
2418
  if (!FLAG_inline_new) {
2233
- if (FLAG_debug_code) {
2419
+ if (emit_debug_code()) {
2234
2420
  // Trash the registers to simulate an allocation failure.
2235
2421
  movl(result, Immediate(0x7091));
2236
2422
  if (result_end.is_valid()) {
@@ -2250,7 +2436,7 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
2250
2436
 
2251
2437
  // Calculate new top and bail out if new space is exhausted.
2252
2438
  ExternalReference new_space_allocation_limit =
2253
- ExternalReference::new_space_allocation_limit_address();
2439
+ ExternalReference::new_space_allocation_limit_address(isolate());
2254
2440
 
2255
2441
  Register top_reg = result_end.is_valid() ? result_end : result;
2256
2442
 
@@ -2259,8 +2445,8 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
2259
2445
  }
2260
2446
  addq(top_reg, Immediate(object_size));
2261
2447
  j(carry, gc_required);
2262
- movq(kScratchRegister, new_space_allocation_limit);
2263
- cmpq(top_reg, Operand(kScratchRegister, 0));
2448
+ Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2449
+ cmpq(top_reg, limit_operand);
2264
2450
  j(above, gc_required);
2265
2451
 
2266
2452
  // Update allocation top.
@@ -2288,7 +2474,7 @@ void MacroAssembler::AllocateInNewSpace(int header_size,
2288
2474
  Label* gc_required,
2289
2475
  AllocationFlags flags) {
2290
2476
  if (!FLAG_inline_new) {
2291
- if (FLAG_debug_code) {
2477
+ if (emit_debug_code()) {
2292
2478
  // Trash the registers to simulate an allocation failure.
2293
2479
  movl(result, Immediate(0x7091));
2294
2480
  movl(result_end, Immediate(0x7191));
@@ -2307,15 +2493,15 @@ void MacroAssembler::AllocateInNewSpace(int header_size,
2307
2493
 
2308
2494
  // Calculate new top and bail out if new space is exhausted.
2309
2495
  ExternalReference new_space_allocation_limit =
2310
- ExternalReference::new_space_allocation_limit_address();
2496
+ ExternalReference::new_space_allocation_limit_address(isolate());
2311
2497
 
2312
2498
  // We assume that element_count*element_size + header_size does not
2313
2499
  // overflow.
2314
2500
  lea(result_end, Operand(element_count, element_size, header_size));
2315
2501
  addq(result_end, result);
2316
2502
  j(carry, gc_required);
2317
- movq(kScratchRegister, new_space_allocation_limit);
2318
- cmpq(result_end, Operand(kScratchRegister, 0));
2503
+ Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2504
+ cmpq(result_end, limit_operand);
2319
2505
  j(above, gc_required);
2320
2506
 
2321
2507
  // Update allocation top.
@@ -2335,7 +2521,7 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
2335
2521
  Label* gc_required,
2336
2522
  AllocationFlags flags) {
2337
2523
  if (!FLAG_inline_new) {
2338
- if (FLAG_debug_code) {
2524
+ if (emit_debug_code()) {
2339
2525
  // Trash the registers to simulate an allocation failure.
2340
2526
  movl(result, Immediate(0x7091));
2341
2527
  movl(result_end, Immediate(0x7191));
@@ -2354,14 +2540,14 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
2354
2540
 
2355
2541
  // Calculate new top and bail out if new space is exhausted.
2356
2542
  ExternalReference new_space_allocation_limit =
2357
- ExternalReference::new_space_allocation_limit_address();
2543
+ ExternalReference::new_space_allocation_limit_address(isolate());
2358
2544
  if (!object_size.is(result_end)) {
2359
2545
  movq(result_end, object_size);
2360
2546
  }
2361
2547
  addq(result_end, result);
2362
2548
  j(carry, gc_required);
2363
- movq(kScratchRegister, new_space_allocation_limit);
2364
- cmpq(result_end, Operand(kScratchRegister, 0));
2549
+ Operand limit_operand = ExternalOperand(new_space_allocation_limit);
2550
+ cmpq(result_end, limit_operand);
2365
2551
  j(above, gc_required);
2366
2552
 
2367
2553
  // Update allocation top.
@@ -2376,16 +2562,16 @@ void MacroAssembler::AllocateInNewSpace(Register object_size,
2376
2562
 
2377
2563
  void MacroAssembler::UndoAllocationInNewSpace(Register object) {
2378
2564
  ExternalReference new_space_allocation_top =
2379
- ExternalReference::new_space_allocation_top_address();
2565
+ ExternalReference::new_space_allocation_top_address(isolate());
2380
2566
 
2381
2567
  // Make sure the object has no tag before resetting top.
2382
2568
  and_(object, Immediate(~kHeapObjectTagMask));
2383
- movq(kScratchRegister, new_space_allocation_top);
2569
+ Operand top_operand = ExternalOperand(new_space_allocation_top);
2384
2570
  #ifdef DEBUG
2385
- cmpq(object, Operand(kScratchRegister, 0));
2571
+ cmpq(object, top_operand);
2386
2572
  Check(below, "Undo allocation of non allocated memory");
2387
2573
  #endif
2388
- movq(Operand(kScratchRegister, 0), object);
2574
+ movq(top_operand, object);
2389
2575
  }
2390
2576
 
2391
2577
 
@@ -2519,6 +2705,70 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
2519
2705
  }
2520
2706
 
2521
2707
 
2708
+ // Copy memory, byte-by-byte, from source to destination. Not optimized for
2709
+ // long or aligned copies. The contents of scratch and length are destroyed.
2710
+ // Destination is incremented by length, source, length and scratch are
2711
+ // clobbered.
2712
+ // A simpler loop is faster on small copies, but slower on large ones.
2713
+ // The cld() instruction must have been emitted, to set the direction flag(),
2714
+ // before calling this function.
2715
+ void MacroAssembler::CopyBytes(Register destination,
2716
+ Register source,
2717
+ Register length,
2718
+ int min_length,
2719
+ Register scratch) {
2720
+ ASSERT(min_length >= 0);
2721
+ if (FLAG_debug_code) {
2722
+ cmpl(length, Immediate(min_length));
2723
+ Assert(greater_equal, "Invalid min_length");
2724
+ }
2725
+ Label loop, done, short_string, short_loop;
2726
+
2727
+ const int kLongStringLimit = 20;
2728
+ if (min_length <= kLongStringLimit) {
2729
+ cmpl(length, Immediate(kLongStringLimit));
2730
+ j(less_equal, &short_string);
2731
+ }
2732
+
2733
+ ASSERT(source.is(rsi));
2734
+ ASSERT(destination.is(rdi));
2735
+ ASSERT(length.is(rcx));
2736
+
2737
+ // Because source is 8-byte aligned in our uses of this function,
2738
+ // we keep source aligned for the rep movs operation by copying the odd bytes
2739
+ // at the end of the ranges.
2740
+ movq(scratch, length);
2741
+ shrl(length, Immediate(3));
2742
+ repmovsq();
2743
+ // Move remaining bytes of length.
2744
+ andl(scratch, Immediate(0x7));
2745
+ movq(length, Operand(source, scratch, times_1, -8));
2746
+ movq(Operand(destination, scratch, times_1, -8), length);
2747
+ addq(destination, scratch);
2748
+
2749
+ if (min_length <= kLongStringLimit) {
2750
+ jmp(&done);
2751
+
2752
+ bind(&short_string);
2753
+ if (min_length == 0) {
2754
+ testl(length, length);
2755
+ j(zero, &done);
2756
+ }
2757
+ lea(scratch, Operand(destination, length, times_1, 0));
2758
+
2759
+ bind(&short_loop);
2760
+ movb(length, Operand(source, 0));
2761
+ movb(Operand(destination, 0), length);
2762
+ incq(source);
2763
+ incq(destination);
2764
+ cmpq(destination, scratch);
2765
+ j(not_equal, &short_loop);
2766
+
2767
+ bind(&done);
2768
+ }
2769
+ }
2770
+
2771
+
2522
2772
  void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2523
2773
  if (context_chain_length > 0) {
2524
2774
  // Move up the chain of contexts to the context containing the slot.
@@ -2531,12 +2781,29 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2531
2781
  }
2532
2782
  // The context may be an intermediate context, not a function context.
2533
2783
  movq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2534
- } else { // context is the current function context.
2535
- // The context may be an intermediate context, not a function context.
2536
- movq(dst, Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2784
+ } else {
2785
+ // Slot is in the current function context. Move it into the
2786
+ // destination register in case we store into it (the write barrier
2787
+ // cannot be allowed to destroy the context in rsi).
2788
+ movq(dst, rsi);
2789
+ }
2790
+
2791
+ // We should not have found a 'with' context by walking the context chain
2792
+ // (i.e., the static scope chain and runtime context chain do not agree).
2793
+ // A variable occurring in such a scope should have slot type LOOKUP and
2794
+ // not CONTEXT.
2795
+ if (emit_debug_code()) {
2796
+ cmpq(dst, Operand(dst, Context::SlotOffset(Context::FCONTEXT_INDEX)));
2797
+ Check(equal, "Yo dawg, I heard you liked function contexts "
2798
+ "so I put function contexts in all your contexts");
2537
2799
  }
2538
2800
  }
2539
2801
 
2802
+ #ifdef _WIN64
2803
+ static const int kRegisterPassedArguments = 4;
2804
+ #else
2805
+ static const int kRegisterPassedArguments = 6;
2806
+ #endif
2540
2807
 
2541
2808
  void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2542
2809
  // Load the global or builtins object from the current context.
@@ -2552,9 +2819,9 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2552
2819
  Register map) {
2553
2820
  // Load the initial map. The global functions all have initial maps.
2554
2821
  movq(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2555
- if (FLAG_debug_code) {
2822
+ if (emit_debug_code()) {
2556
2823
  Label ok, fail;
2557
- CheckMap(map, Factory::meta_map(), &fail, false);
2824
+ CheckMap(map, FACTORY->meta_map(), &fail, false);
2558
2825
  jmp(&ok);
2559
2826
  bind(&fail);
2560
2827
  Abort("Global functions must have initial map");
@@ -2572,11 +2839,10 @@ int MacroAssembler::ArgumentStackSlotsForCFunctionCall(int num_arguments) {
2572
2839
  // and the caller does not reserve stack slots for them.
2573
2840
  ASSERT(num_arguments >= 0);
2574
2841
  #ifdef _WIN64
2575
- static const int kMinimumStackSlots = 4;
2842
+ const int kMinimumStackSlots = kRegisterPassedArguments;
2576
2843
  if (num_arguments < kMinimumStackSlots) return kMinimumStackSlots;
2577
2844
  return num_arguments;
2578
2845
  #else
2579
- static const int kRegisterPassedArguments = 6;
2580
2846
  if (num_arguments < kRegisterPassedArguments) return 0;
2581
2847
  return num_arguments - kRegisterPassedArguments;
2582
2848
  #endif
@@ -2587,6 +2853,7 @@ void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2587
2853
  int frame_alignment = OS::ActivationFrameAlignment();
2588
2854
  ASSERT(frame_alignment != 0);
2589
2855
  ASSERT(num_arguments >= 0);
2856
+
2590
2857
  // Make stack end at alignment and allocate space for arguments and old rsp.
2591
2858
  movq(kScratchRegister, rsp);
2592
2859
  ASSERT(IsPowerOf2(frame_alignment));
@@ -2600,14 +2867,14 @@ void MacroAssembler::PrepareCallCFunction(int num_arguments) {
2600
2867
 
2601
2868
  void MacroAssembler::CallCFunction(ExternalReference function,
2602
2869
  int num_arguments) {
2603
- movq(rax, function);
2870
+ LoadAddress(rax, function);
2604
2871
  CallCFunction(rax, num_arguments);
2605
2872
  }
2606
2873
 
2607
2874
 
2608
2875
  void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2609
2876
  // Check stack alignment.
2610
- if (FLAG_debug_code) {
2877
+ if (emit_debug_code()) {
2611
2878
  CheckStackAlignment();
2612
2879
  }
2613
2880
 
@@ -2621,7 +2888,9 @@ void MacroAssembler::CallCFunction(Register function, int num_arguments) {
2621
2888
 
2622
2889
 
2623
2890
  CodePatcher::CodePatcher(byte* address, int size)
2624
- : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
2891
+ : address_(address),
2892
+ size_(size),
2893
+ masm_(Isolate::Current(), address, size + Assembler::kGap) {
2625
2894
  // Create a new macro assembler pointing to the address of the code to patch.
2626
2895
  // The size is adjusted with kGap on order for the assembler to generate size
2627
2896
  // bytes of instructions without failing with buffer size constraints.